diff --git a/go.mod b/go.mod
index 94f515069217878d947d743c79b2be184cdadd02..f3cd7c7bbac2737b980226543895e527e9024641 100644
--- a/go.mod
+++ b/go.mod
@@ -8,8 +8,8 @@ require (
 	github.com/blang/semver v3.5.1+incompatible
 	github.com/go-logr/logr v1.4.1
 	github.com/google/uuid v1.6.0
-	github.com/onsi/ginkgo/v2 v2.17.1
-	github.com/onsi/gomega v1.32.0
+	github.com/onsi/ginkgo/v2 v2.17.2
+	github.com/onsi/gomega v1.33.1
 	github.com/openshift/api v0.0.0-20240904015708-69df64132c91
 	github.com/openshift/client-go v0.0.0-20240904134955-cd42fd3d7408
 	github.com/openshift/library-go v0.0.0-20240903143724-7c5c5d305ac1
@@ -35,8 +35,9 @@ require (
 )
 
 require (
-	github.com/golangci/golangci-lint v1.54.2
+	github.com/golangci/golangci-lint v1.58.2
 	github.com/openshift/cluster-api-actuator-pkg/testutils v0.0.0-20240626103413-ddea9c7c0aca
+	github.com/openshift/cluster-control-plane-machine-set-operator v0.0.0-20240909043600-373ac49835bf
 	golang.org/x/time v0.5.0
 	k8s.io/component-base v0.30.1
 	sigs.k8s.io/kube-storage-version-migrator v0.0.6-0.20230721195810-5c8923c5ff96
@@ -45,18 +46,21 @@ require (
 require (
 	4d63.com/gocheckcompilerdirectives v1.2.1 // indirect
 	4d63.com/gochecknoglobals v0.2.1 // indirect
-	github.com/4meepo/tagalign v1.3.2 // indirect
-	github.com/Abirdcfly/dupword v0.0.12 // indirect
-	github.com/Antonboom/errname v0.1.12 // indirect
-	github.com/Antonboom/nilnil v0.1.7 // indirect
+	github.com/4meepo/tagalign v1.3.4 // indirect
+	github.com/Abirdcfly/dupword v0.0.14 // indirect
+	github.com/Antonboom/errname v0.1.13 // indirect
+	github.com/Antonboom/nilnil v0.1.9 // indirect
+	github.com/Antonboom/testifylint v1.2.0 // indirect
 	github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 // indirect
 	github.com/BurntSushi/toml v1.3.2 // indirect
+	github.com/Crocmagnon/fatcontext v0.2.2 // indirect
 	github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24 // indirect
-	github.com/GaijinEntertainment/go-exhaustruct/v3 v3.1.0 // indirect
+	github.com/GaijinEntertainment/go-exhaustruct/v3 v3.2.0 // indirect
 	github.com/MakeNowJust/heredoc v1.0.0 // indirect
-	github.com/Masterminds/semver v1.5.0 // indirect
-	github.com/OpenPeeDeeP/depguard/v2 v2.1.0 // indirect
-	github.com/alexkohler/nakedret/v2 v2.0.2 // indirect
+	github.com/Masterminds/semver/v3 v3.2.1 // indirect
+	github.com/OpenPeeDeeP/depguard/v2 v2.2.0 // indirect
+	github.com/alecthomas/go-check-sumtype v0.1.4 // indirect
+	github.com/alexkohler/nakedret/v2 v2.0.4 // indirect
 	github.com/alexkohler/prealloc v1.0.0 // indirect
 	github.com/alingse/asasalint v0.0.11 // indirect
 	github.com/ashanbrown/forbidigo v1.6.0 // indirect
@@ -65,97 +69,100 @@ require (
 	github.com/bkielbasa/cyclop v1.2.1 // indirect
 	github.com/blang/semver/v4 v4.0.0 // indirect
 	github.com/blizzy78/varnamelen v0.8.0 // indirect
-	github.com/bombsimon/wsl/v3 v3.4.0 // indirect
-	github.com/breml/bidichk v0.2.4 // indirect
-	github.com/breml/errchkjson v0.3.1 // indirect
-	github.com/butuzov/ireturn v0.2.0 // indirect
-	github.com/butuzov/mirror v1.1.0 // indirect
-	github.com/ccojocar/zxcvbn-go v1.0.1 // indirect
+	github.com/bombsimon/wsl/v4 v4.2.1 // indirect
+	github.com/breml/bidichk v0.2.7 // indirect
+	github.com/breml/errchkjson v0.3.6 // indirect
+	github.com/butuzov/ireturn v0.3.0 // indirect
+	github.com/butuzov/mirror v1.2.0 // indirect
+	github.com/catenacyber/perfsprint v0.7.1 // indirect
+	github.com/ccojocar/zxcvbn-go v1.0.2 // indirect
 	github.com/cespare/xxhash/v2 v2.2.0 // indirect
 	github.com/chai2010/gettext-go v1.0.2 // indirect
 	github.com/charithe/durationcheck v0.0.10 // indirect
-	github.com/chavacava/garif v0.0.0-20230227094218-b8c73b2037b8 // indirect
+	github.com/chavacava/garif v0.1.0 // indirect
+	github.com/ckaznocha/intrange v0.1.2 // indirect
 	github.com/curioswitch/go-reassign v0.2.0 // indirect
-	github.com/daixiang0/gci v0.11.0 // indirect
+	github.com/daixiang0/gci v0.13.4 // indirect
 	github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
-	github.com/denis-tingaikin/go-header v0.4.3 // indirect
+	github.com/denis-tingaikin/go-header v0.5.0 // indirect
 	github.com/emicklei/go-restful/v3 v3.12.0 // indirect
-	github.com/esimonov/ifshort v1.0.4 // indirect
-	github.com/ettle/strcase v0.1.1 // indirect
+	github.com/ettle/strcase v0.2.0 // indirect
 	github.com/evanphx/json-patch v5.6.0+incompatible // indirect
 	github.com/evanphx/json-patch/v5 v5.9.0 // indirect
 	github.com/exponent-io/jsonpath v0.0.0-20151013193312-d6023ce2651d // indirect
-	github.com/fatih/color v1.16.0 // indirect
+	github.com/fatih/color v1.17.0 // indirect
 	github.com/fatih/structtag v1.2.0 // indirect
-	github.com/firefart/nonamedreturns v1.0.4 // indirect
+	github.com/firefart/nonamedreturns v1.0.5 // indirect
 	github.com/fsnotify/fsnotify v1.7.0 // indirect
 	github.com/fzipp/gocyclo v0.6.0 // indirect
-	github.com/go-critic/go-critic v0.9.0 // indirect
+	github.com/ghostiam/protogetter v0.3.6 // indirect
+	github.com/go-critic/go-critic v0.11.3 // indirect
 	github.com/go-errors/errors v1.4.2 // indirect
 	github.com/go-logr/zapr v1.3.0 // indirect
 	github.com/go-openapi/jsonpointer v0.21.0 // indirect
 	github.com/go-openapi/jsonreference v0.21.0 // indirect
 	github.com/go-openapi/swag v0.23.0 // indirect
-	github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 // indirect
+	github.com/go-task/slim-sprig/v3 v3.0.0 // indirect
+	github.com/go-test/deep v1.1.0 // indirect
 	github.com/go-toolsmith/astcast v1.1.0 // indirect
 	github.com/go-toolsmith/astcopy v1.1.0 // indirect
-	github.com/go-toolsmith/astequal v1.1.0 // indirect
+	github.com/go-toolsmith/astequal v1.2.0 // indirect
 	github.com/go-toolsmith/astfmt v1.1.0 // indirect
 	github.com/go-toolsmith/astp v1.1.0 // indirect
 	github.com/go-toolsmith/strparse v1.1.0 // indirect
 	github.com/go-toolsmith/typep v1.1.0 // indirect
+	github.com/go-viper/mapstructure/v2 v2.0.0-alpha.1 // indirect
 	github.com/go-xmlfmt/xmlfmt v1.1.2 // indirect
 	github.com/gobwas/glob v0.2.3 // indirect
 	github.com/gofrs/flock v0.8.1 // indirect
 	github.com/gogo/protobuf v1.3.2 // indirect
 	github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
 	github.com/golang/protobuf v1.5.4 // indirect
-	github.com/golangci/check v0.0.0-20180506172741-cfe4005ccda2 // indirect
 	github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a // indirect
-	github.com/golangci/go-misc v0.0.0-20220329215616-d24fe342adfe // indirect
-	github.com/golangci/gofmt v0.0.0-20220901101216-f2edd75033f2 // indirect
-	github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0 // indirect
-	github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca // indirect
-	github.com/golangci/misspell v0.4.1 // indirect
-	github.com/golangci/revgrep v0.0.0-20220804021717-745bb2f7c2e6 // indirect
-	github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4 // indirect
+	github.com/golangci/gofmt v0.0.0-20231018234816-f50ced29576e // indirect
+	github.com/golangci/misspell v0.5.1 // indirect
+	github.com/golangci/modinfo v0.3.4 // indirect
+	github.com/golangci/plugin-module-register v0.1.1 // indirect
+	github.com/golangci/revgrep v0.5.3 // indirect
+	github.com/golangci/unconvert v0.0.0-20240309020433-c5143eacb3ed // indirect
 	github.com/google/btree v1.0.1 // indirect
 	github.com/google/gnostic-models v0.6.8 // indirect
 	github.com/google/go-cmp v0.6.0 // indirect
 	github.com/google/gofuzz v1.2.0 // indirect
-	github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1 // indirect
+	github.com/google/pprof v0.0.0-20240424215950-a892ee059fd6 // indirect
 	github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect
-	github.com/gordonklaus/ineffassign v0.0.0-20230610083614-0e73809eb601 // indirect
+	github.com/gordonklaus/ineffassign v0.1.0 // indirect
 	github.com/gorilla/websocket v1.5.0 // indirect
 	github.com/gostaticanalysis/analysisutil v0.7.1 // indirect
 	github.com/gostaticanalysis/comment v1.4.2 // indirect
 	github.com/gostaticanalysis/forcetypeassert v0.1.0 // indirect
 	github.com/gostaticanalysis/nilerr v0.1.1 // indirect
 	github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7 // indirect
-	github.com/hashicorp/errwrap v1.0.0 // indirect
-	github.com/hashicorp/go-multierror v1.1.1 // indirect
 	github.com/hashicorp/go-version v1.6.0 // indirect
 	github.com/hashicorp/hcl v1.0.0 // indirect
 	github.com/hexops/gotextdiff v1.0.3 // indirect
 	github.com/imdario/mergo v0.3.15 // indirect
 	github.com/inconshreveable/mousetrap v1.1.0 // indirect
-	github.com/jgautheron/goconst v1.5.1 // indirect
+	github.com/jgautheron/goconst v1.7.1 // indirect
 	github.com/jingyugao/rowserrcheck v1.1.1 // indirect
 	github.com/jirfag/go-printf-func-name v0.0.0-20200119135958-7558a9eaa5af // indirect
+	github.com/jjti/go-spancheck v0.6.1 // indirect
 	github.com/josharian/intern v1.0.0 // indirect
 	github.com/json-iterator/go v1.1.12 // indirect
 	github.com/julz/importas v0.1.0 // indirect
-	github.com/kisielk/errcheck v1.6.3 // indirect
-	github.com/kisielk/gotool v1.0.0 // indirect
-	github.com/kkHAIKE/contextcheck v1.1.4 // indirect
+	github.com/karamaru-alpha/copyloopvar v1.1.0 // indirect
+	github.com/kisielk/errcheck v1.7.0 // indirect
+	github.com/kkHAIKE/contextcheck v1.1.5 // indirect
 	github.com/kulti/thelper v0.6.3 // indirect
-	github.com/kunwardeep/paralleltest v1.0.8 // indirect
+	github.com/kunwardeep/paralleltest v1.0.10 // indirect
 	github.com/kyoh86/exportloopref v0.1.11 // indirect
-	github.com/ldez/gomoddirectives v0.2.3 // indirect
+	github.com/lasiar/canonicalheader v1.1.1 // indirect
+	github.com/ldez/gomoddirectives v0.2.4 // indirect
 	github.com/ldez/tagliatelle v0.5.0 // indirect
-	github.com/leonklingele/grouper v1.1.1 // indirect
+	github.com/leonklingele/grouper v1.1.2 // indirect
 	github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de // indirect
 	github.com/lufeee/execinquery v1.2.1 // indirect
+	github.com/macabu/inamedparam v0.1.3 // indirect
 	github.com/magiconair/properties v1.8.7 // indirect
 	github.com/mailru/easyjson v0.7.7 // indirect
 	github.com/maratori/testableexamples v1.0.0 // indirect
@@ -165,8 +172,7 @@ require (
 	github.com/mattn/go-isatty v0.0.20 // indirect
 	github.com/mattn/go-runewidth v0.0.14 // indirect
 	github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0 // indirect
-	github.com/mbilski/exhaustivestruct v1.2.0 // indirect
-	github.com/mgechev/revive v1.3.2 // indirect
+	github.com/mgechev/revive v1.3.7 // indirect
 	github.com/mitchellh/go-homedir v1.1.0 // indirect
 	github.com/mitchellh/go-wordwrap v1.0.1 // indirect
 	github.com/mitchellh/mapstructure v1.5.0 // indirect
@@ -179,42 +185,43 @@ require (
 	github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
 	github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f // indirect
 	github.com/nakabonne/nestif v0.3.1 // indirect
-	github.com/nishanths/exhaustive v0.11.0 // indirect
+	github.com/nishanths/exhaustive v0.12.0 // indirect
 	github.com/nishanths/predeclared v0.2.2 // indirect
-	github.com/nunnatsa/ginkgolinter v0.13.5 // indirect
+	github.com/nunnatsa/ginkgolinter v0.16.2 // indirect
 	github.com/olekukonko/tablewriter v0.0.5 // indirect
-	github.com/pelletier/go-toml/v2 v2.1.0 // indirect
+	github.com/pelletier/go-toml/v2 v2.2.2 // indirect
 	github.com/peterbourgon/diskv v2.0.1+incompatible // indirect
 	github.com/pkg/errors v0.9.1 // indirect
 	github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
-	github.com/polyfloyd/go-errorlint v1.4.4 // indirect
+	github.com/polyfloyd/go-errorlint v1.5.1 // indirect
 	github.com/prometheus/client_model v0.5.0 // indirect
 	github.com/prometheus/common v0.45.0 // indirect
 	github.com/prometheus/procfs v0.12.0 // indirect
-	github.com/quasilyte/go-ruleguard v0.4.0 // indirect
+	github.com/quasilyte/go-ruleguard v0.4.2 // indirect
+	github.com/quasilyte/go-ruleguard/dsl v0.3.22 // indirect
 	github.com/quasilyte/gogrep v0.5.0 // indirect
 	github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727 // indirect
 	github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 // indirect
 	github.com/rivo/uniseg v0.4.2 // indirect
 	github.com/robfig/cron v1.2.0 // indirect
 	github.com/russross/blackfriday/v2 v2.1.0 // indirect
-	github.com/ryancurrah/gomodguard v1.3.0 // indirect
-	github.com/ryanrolds/sqlclosecheck v0.4.0 // indirect
+	github.com/ryancurrah/gomodguard v1.3.2 // indirect
+	github.com/ryanrolds/sqlclosecheck v0.5.1 // indirect
 	github.com/sagikazarmark/locafero v0.3.0 // indirect
 	github.com/sagikazarmark/slog-shim v0.1.0 // indirect
 	github.com/sanposhiho/wastedassign/v2 v2.0.7 // indirect
+	github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 // indirect
 	github.com/sashamelentyev/interfacebloat v1.1.0 // indirect
-	github.com/sashamelentyev/usestdlibvars v1.24.0 // indirect
-	github.com/securego/gosec/v2 v2.17.0 // indirect
+	github.com/sashamelentyev/usestdlibvars v1.25.0 // indirect
+	github.com/securego/gosec/v2 v2.20.0 // indirect
 	github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c // indirect
 	github.com/sirupsen/logrus v1.9.3 // indirect
 	github.com/sivchari/containedctx v1.0.3 // indirect
-	github.com/sivchari/nosnakecase v1.7.0 // indirect
 	github.com/sivchari/tenv v1.7.1 // indirect
 	github.com/sonatard/noctx v0.0.2 // indirect
 	github.com/sourcegraph/conc v0.3.0 // indirect
 	github.com/sourcegraph/go-diff v0.7.0 // indirect
-	github.com/spf13/afero v1.10.0 // indirect
+	github.com/spf13/afero v1.11.0 // indirect
 	github.com/spf13/cast v1.5.1 // indirect
 	github.com/spf13/viper v1.17.0 // indirect
 	github.com/ssgreg/nlreturn/v2 v2.2.1 // indirect
@@ -223,33 +230,35 @@ require (
 	github.com/subosito/gotenv v1.6.0 // indirect
 	github.com/t-yuki/gocover-cobertura v0.0.0-20180217150009-aaee18c8195c // indirect
 	github.com/tdakkota/asciicheck v0.2.0 // indirect
-	github.com/tetafro/godot v1.4.14 // indirect
+	github.com/tetafro/godot v1.4.16 // indirect
 	github.com/timakin/bodyclose v0.0.0-20230421092635-574207250966 // indirect
 	github.com/timonwong/loggercheck v0.9.4 // indirect
-	github.com/tomarrell/wrapcheck/v2 v2.8.1 // indirect
+	github.com/tomarrell/wrapcheck/v2 v2.8.3 // indirect
 	github.com/tommy-muehle/go-mnd/v2 v2.5.1 // indirect
 	github.com/ultraware/funlen v0.1.0 // indirect
-	github.com/ultraware/whitespace v0.0.5 // indirect
-	github.com/uudashr/gocognit v1.0.7 // indirect
-	github.com/xen0n/gosmopolitan v1.2.1 // indirect
+	github.com/ultraware/whitespace v0.1.1 // indirect
+	github.com/uudashr/gocognit v1.1.2 // indirect
+	github.com/xen0n/gosmopolitan v1.2.2 // indirect
 	github.com/xlab/treeprint v1.2.0 // indirect
 	github.com/yagipy/maintidx v1.0.0 // indirect
-	github.com/yeya24/promlinter v0.2.0 // indirect
-	github.com/ykadowak/zerologlint v0.1.3 // indirect
-	gitlab.com/bosi/decorder v0.4.0 // indirect
+	github.com/yeya24/promlinter v0.3.0 // indirect
+	github.com/ykadowak/zerologlint v0.1.5 // indirect
+	gitlab.com/bosi/decorder v0.4.2 // indirect
+	go-simpler.org/musttag v0.12.2 // indirect
+	go-simpler.org/sloglint v0.6.0 // indirect
 	go.starlark.net v0.0.0-20230525235612-a134d8f9ddca // indirect
-	go.tmz.dev/musttag v0.7.2 // indirect
+	go.uber.org/automaxprocs v1.5.3 // indirect
 	go.uber.org/multierr v1.11.0 // indirect
 	go.uber.org/zap v1.26.0 // indirect
-	golang.org/x/exp v0.0.0-20230905200255-921286631fa9 // indirect
-	golang.org/x/exp/typeparams v0.0.0-20230307190834-24139beb5833 // indirect
-	golang.org/x/mod v0.16.0 // indirect
+	golang.org/x/exp v0.0.0-20240103183307-be819d1f06fc // indirect
+	golang.org/x/exp/typeparams v0.0.0-20240314144324-c7f7c6466f7f // indirect
+	golang.org/x/mod v0.17.0 // indirect
 	golang.org/x/oauth2 v0.18.0 // indirect
-	golang.org/x/sync v0.6.0 // indirect
+	golang.org/x/sync v0.7.0 // indirect
 	golang.org/x/sys v0.20.0 // indirect
 	golang.org/x/term v0.20.0 // indirect
 	golang.org/x/text v0.15.0 // indirect
-	golang.org/x/tools v0.19.0 // indirect
+	golang.org/x/tools v0.21.0 // indirect
 	gomodules.xyz/jsonpatch/v2 v2.4.0 // indirect
 	google.golang.org/appengine v1.6.8 // indirect
 	google.golang.org/protobuf v1.33.0 // indirect
@@ -258,15 +267,13 @@ require (
 	gopkg.in/warnings.v0 v0.1.2 // indirect
 	gopkg.in/yaml.v2 v2.4.0 // indirect
 	gopkg.in/yaml.v3 v3.0.1 // indirect
-	honnef.co/go/tools v0.4.5 // indirect
+	honnef.co/go/tools v0.4.7 // indirect
 	k8s.io/apiextensions-apiserver v0.30.1 // indirect
 	k8s.io/cli-runtime v0.30.0 // indirect
 	k8s.io/kube-aggregator v0.30.1 // indirect
 	k8s.io/kube-openapi v0.0.0-20240228011516-70dd3763d340 // indirect
-	mvdan.cc/gofumpt v0.5.0 // indirect
-	mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed // indirect
-	mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b // indirect
-	mvdan.cc/unparam v0.0.0-20221223090309-7455f1af531d // indirect
+	mvdan.cc/gofumpt v0.6.0 // indirect
+	mvdan.cc/unparam v0.0.0-20240427195214-063aff900ca1 // indirect
 	sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd // indirect
 	sigs.k8s.io/kustomize/api v0.13.5-0.20230601165947-6ce0bf390ce3 // indirect
 	sigs.k8s.io/kustomize/kyaml v0.14.3-0.20230601165947-6ce0bf390ce3 // indirect
diff --git a/go.sum b/go.sum
index 783748204e7deb089f91b8725997bbc8b33a0bf3..32e7cfb2d57733ddebaa02221305f485bed99b23 100644
--- a/go.sum
+++ b/go.sum
@@ -3,74 +3,41 @@
 4d63.com/gochecknoglobals v0.2.1 h1:1eiorGsgHOFOuoOiJDy2psSrQbRdIHrlge0IJIkUgDc=
 4d63.com/gochecknoglobals v0.2.1/go.mod h1:KRE8wtJB3CXCsb1xy421JfTHIIbmT3U5ruxw2Qu8fSU=
 cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
-cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
-cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU=
-cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU=
-cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY=
-cloud.google.com/go v0.44.3/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY=
-cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc=
-cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0=
-cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To=
-cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4=
-cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M=
-cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc=
-cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk=
-cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs=
-cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc=
-cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY=
-cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI=
-cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk=
-cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY=
-cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o=
-cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE=
-cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc=
-cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg=
-cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc=
-cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ=
-cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE=
-cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk=
-cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I=
-cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw=
-cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA=
-cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU=
-cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw=
-cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos=
-cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk=
-cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs=
-cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
-cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo=
-dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
-github.com/4meepo/tagalign v1.3.2 h1:1idD3yxlRGV18VjqtDbqYvQ5pXqQS0wO2dn6M3XstvI=
-github.com/4meepo/tagalign v1.3.2/go.mod h1:Q9c1rYMZJc9dPRkbQPpcBNCLEmY2njbAsXhQOZFE2dE=
-github.com/Abirdcfly/dupword v0.0.12 h1:56NnOyrXzChj07BDFjeRA+IUzSz01jmzEq+G4kEgFhc=
-github.com/Abirdcfly/dupword v0.0.12/go.mod h1:+us/TGct/nI9Ndcbcp3rgNcQzctTj68pq7TcgNpLfdI=
-github.com/Antonboom/errname v0.1.12 h1:oh9ak2zUtsLp5oaEd/erjB4GPu9w19NyoIskZClDcQY=
-github.com/Antonboom/errname v0.1.12/go.mod h1:bK7todrzvlaZoQagP1orKzWXv59X/x0W0Io2XT1Ssro=
-github.com/Antonboom/nilnil v0.1.7 h1:ofgL+BA7vlA1K2wNQOsHzLJ2Pw5B5DpWRLdDAVvvTow=
-github.com/Antonboom/nilnil v0.1.7/go.mod h1:TP+ScQWVEq0eSIxqU8CbdT5DFWoHp0MbP+KMUO1BKYQ=
+github.com/4meepo/tagalign v1.3.4 h1:P51VcvBnf04YkHzjfclN6BbsopfJR5rxs1n+5zHt+w8=
+github.com/4meepo/tagalign v1.3.4/go.mod h1:M+pnkHH2vG8+qhE5bVc/zeP7HS/j910Fwa9TUSyZVI0=
+github.com/Abirdcfly/dupword v0.0.14 h1:3U4ulkc8EUo+CaT105/GJ1BQwtgyj6+VaBVbAX11Ba8=
+github.com/Abirdcfly/dupword v0.0.14/go.mod h1:VKDAbxdY8YbKUByLGg8EETzYSuC4crm9WwI6Y3S0cLI=
+github.com/Antonboom/errname v0.1.13 h1:JHICqsewj/fNckzrfVSe+T33svwQxmjC+1ntDsHOVvM=
+github.com/Antonboom/errname v0.1.13/go.mod h1:uWyefRYRN54lBg6HseYCFhs6Qjcy41Y3Jl/dVhA87Ns=
+github.com/Antonboom/nilnil v0.1.9 h1:eKFMejSxPSA9eLSensFmjW2XTgTwJMjZ8hUHtV4s/SQ=
+github.com/Antonboom/nilnil v0.1.9/go.mod h1:iGe2rYwCq5/Me1khrysB4nwI7swQvjclR8/YRPl5ihQ=
+github.com/Antonboom/testifylint v1.2.0 h1:015bxD8zc5iY8QwTp4+RG9I4kIbqwvGX9TrBbb7jGdM=
+github.com/Antonboom/testifylint v1.2.0/go.mod h1:rkmEqjqVnHDRNsinyN6fPSLnoajzFwsCcguJgwADBkw=
 github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 h1:L/gRVlceqvL25UVaW/CKtUDjefjrs0SPonmDGUVOYP0=
 github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E=
 github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
 github.com/BurntSushi/toml v1.3.2 h1:o7IhLm0Msx3BaB+n3Ag7L8EVlByGnpq14C4YWiu/gL8=
 github.com/BurntSushi/toml v1.3.2/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ=
-github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
+github.com/Crocmagnon/fatcontext v0.2.2 h1:OrFlsDdOj9hW/oBEJBNSuH7QWf+E9WPVHw+x52bXVbk=
+github.com/Crocmagnon/fatcontext v0.2.2/go.mod h1:WSn/c/+MMNiD8Pri0ahRj0o9jVpeowzavOQplBJw6u0=
 github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24 h1:sHglBQTwgx+rWPdisA5ynNEsoARbiCBOyGcJM4/OzsM=
 github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24/go.mod h1:4UJr5HIiMZrwgkSPdsjy2uOQExX/WEILpIrO9UPGuXs=
-github.com/GaijinEntertainment/go-exhaustruct/v3 v3.1.0 h1:3ZBs7LAezy8gh0uECsA6CGU43FF3zsx5f4eah5FxTMA=
-github.com/GaijinEntertainment/go-exhaustruct/v3 v3.1.0/go.mod h1:rZLTje5A9kFBe0pzhpe2TdhRniBF++PRHQuRpR8esVc=
+github.com/GaijinEntertainment/go-exhaustruct/v3 v3.2.0 h1:sATXp1x6/axKxz2Gjxv8MALP0bXaNRfQinEwyfMcx8c=
+github.com/GaijinEntertainment/go-exhaustruct/v3 v3.2.0/go.mod h1:Nl76DrGNJTA1KJ0LePKBw/vznBX1EHbAZX8mwjR82nI=
 github.com/MakeNowJust/heredoc v1.0.0 h1:cXCdzVdstXyiTqTvfqk9SDHpKNjxuom+DOlyEeQ4pzQ=
 github.com/MakeNowJust/heredoc v1.0.0/go.mod h1:mG5amYoWBHf8vpLOuehzbGGw0EHxpZZ6lCpQ4fNJ8LE=
-github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww=
-github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y=
-github.com/OpenPeeDeeP/depguard/v2 v2.1.0 h1:aQl70G173h/GZYhWf36aE5H0KaujXfVMnn/f1kSDVYY=
-github.com/OpenPeeDeeP/depguard/v2 v2.1.0/go.mod h1:PUBgk35fX4i7JDmwzlJwJ+GMe6NfO1723wmJMgPThNQ=
-github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
-github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
-github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
-github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
-github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
-github.com/alexkohler/nakedret/v2 v2.0.2 h1:qnXuZNvv3/AxkAb22q/sEsEpcA99YxLFACDtEw9TPxE=
-github.com/alexkohler/nakedret/v2 v2.0.2/go.mod h1:2b8Gkk0GsOrqQv/gPWjNLDSKwG8I5moSXG1K4VIBcTQ=
+github.com/Masterminds/semver/v3 v3.2.1 h1:RN9w6+7QoMeJVGyfmbcgs28Br8cvmnucEXnY0rYXWg0=
+github.com/Masterminds/semver/v3 v3.2.1/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ=
+github.com/OpenPeeDeeP/depguard/v2 v2.2.0 h1:vDfG60vDtIuf0MEOhmLlLLSzqaRM8EMcgJPdp74zmpA=
+github.com/OpenPeeDeeP/depguard/v2 v2.2.0/go.mod h1:CIzddKRvLBC4Au5aYP/i3nyaWQ+ClszLIuVocRiCYFQ=
+github.com/alecthomas/assert/v2 v2.2.2 h1:Z/iVC0xZfWTaFNE6bA3z07T86hd45Xe2eLt6WVy2bbk=
+github.com/alecthomas/assert/v2 v2.2.2/go.mod h1:pXcQ2Asjp247dahGEmsZ6ru0UVwnkhktn7S0bBDLxvQ=
+github.com/alecthomas/go-check-sumtype v0.1.4 h1:WCvlB3l5Vq5dZQTFmodqL2g68uHiSwwlWcT5a2FGK0c=
+github.com/alecthomas/go-check-sumtype v0.1.4/go.mod h1:WyYPfhfkdhyrdaligV6svFopZV8Lqdzn5pyVBaV6jhQ=
+github.com/alecthomas/repr v0.2.0 h1:HAzS41CIzNW5syS8Mf9UwXhNH1J9aix/BvDRf1Ml2Yk=
+github.com/alecthomas/repr v0.2.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
+github.com/alexkohler/nakedret/v2 v2.0.4 h1:yZuKmjqGi0pSmjGpOC016LtPJysIL0WEUiaXW5SUnNg=
+github.com/alexkohler/nakedret/v2 v2.0.4/go.mod h1:bF5i0zF2Wo2o4X4USt9ntUWve6JbFv02Ff4vlkmS/VU=
 github.com/alexkohler/prealloc v1.0.0 h1:Hbq0/3fJPQhNkN0dR95AVrr6R7tou91y0uHG5pOcUuw=
 github.com/alexkohler/prealloc v1.0.0/go.mod h1:VetnK3dIgFBBKmg0YnD9F9x6Icjd+9cvfHR56wJVlKE=
 github.com/alingse/asasalint v0.0.11 h1:SFwnQXJ49Kx/1GghOFz1XGqHYKp21Kq1nHad/0WQRnw=
@@ -81,8 +48,6 @@ github.com/ashanbrown/forbidigo v1.6.0 h1:D3aewfM37Yb3pxHujIPSpTf6oQk9sc9WZi8ger
 github.com/ashanbrown/forbidigo v1.6.0/go.mod h1:Y8j9jy9ZYAEHXdu723cUlraTqbzjKF1MUyfOKL+AjcU=
 github.com/ashanbrown/makezero v1.1.1 h1:iCQ87C0V0vSyO+M9E/FZYbu65auqH0lnsOkf5FcB28s=
 github.com/ashanbrown/makezero v1.1.1/go.mod h1:i1bJLCRSCHOcOa9Y6MyF2FTfMZMFdHvxKHxgO5Z1axI=
-github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
-github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
 github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
 github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
 github.com/bkielbasa/cyclop v1.2.1 h1:AeF71HZDob1P2/pRm1so9cd1alZnrpyc4q2uP2l0gJY=
@@ -93,92 +58,78 @@ github.com/blang/semver/v4 v4.0.0 h1:1PFHFE6yCCTv8C1TeyNNarDzntLi7wMI5i/pzqYIsAM
 github.com/blang/semver/v4 v4.0.0/go.mod h1:IbckMUScFkM3pff0VJDNKRiT6TG/YpiHIM2yvyW5YoQ=
 github.com/blizzy78/varnamelen v0.8.0 h1:oqSblyuQvFsW1hbBHh1zfwrKe3kcSj0rnXkKzsQ089M=
 github.com/blizzy78/varnamelen v0.8.0/go.mod h1:V9TzQZ4fLJ1DSrjVDfl89H7aMnTvKkApdHeyESmyR7k=
-github.com/bombsimon/wsl/v3 v3.4.0 h1:RkSxjT3tmlptwfgEgTgU+KYKLI35p/tviNXNXiL2aNU=
-github.com/bombsimon/wsl/v3 v3.4.0/go.mod h1:KkIB+TXkqy6MvK9BDZVbZxKNYsE1/oLRJbIFtf14qqo=
-github.com/breml/bidichk v0.2.4 h1:i3yedFWWQ7YzjdZJHnPo9d/xURinSq3OM+gyM43K4/8=
-github.com/breml/bidichk v0.2.4/go.mod h1:7Zk0kRFt1LIZxtQdl9W9JwGAcLTTkOs+tN7wuEYGJ3s=
-github.com/breml/errchkjson v0.3.1 h1:hlIeXuspTyt8Y/UmP5qy1JocGNR00KQHgfaNtRAjoxQ=
-github.com/breml/errchkjson v0.3.1/go.mod h1:XroxrzKjdiutFyW3nWhw34VGg7kiMsDQox73yWCGI2U=
-github.com/butuzov/ireturn v0.2.0 h1:kCHi+YzC150GE98WFuZQu9yrTn6GEydO2AuPLbTgnO4=
-github.com/butuzov/ireturn v0.2.0/go.mod h1:Wh6Zl3IMtTpaIKbmwzqi6olnM9ptYQxxVacMsOEFPoc=
-github.com/butuzov/mirror v1.1.0 h1:ZqX54gBVMXu78QLoiqdwpl2mgmoOJTk7s4p4o+0avZI=
-github.com/butuzov/mirror v1.1.0/go.mod h1:8Q0BdQU6rC6WILDiBM60DBfvV78OLJmMmixe7GF45AE=
-github.com/ccojocar/zxcvbn-go v1.0.1 h1:+sxrANSCj6CdadkcMnvde/GWU1vZiiXRbqYSCalV4/4=
-github.com/ccojocar/zxcvbn-go v1.0.1/go.mod h1:g1qkXtUSvHP8lhHp5GrSmTz6uWALGRMQdw6Qnz/hi60=
+github.com/bombsimon/wsl/v4 v4.2.1 h1:Cxg6u+XDWff75SIFFmNsqnIOgob+Q9hG6y/ioKbRFiM=
+github.com/bombsimon/wsl/v4 v4.2.1/go.mod h1:Xu/kDxGZTofQcDGCtQe9KCzhHphIe0fDuyWTxER9Feo=
+github.com/breml/bidichk v0.2.7 h1:dAkKQPLl/Qrk7hnP6P+E0xOodrq8Us7+U0o4UBOAlQY=
+github.com/breml/bidichk v0.2.7/go.mod h1:YodjipAGI9fGcYM7II6wFvGhdMYsC5pHDlGzqvEW3tQ=
+github.com/breml/errchkjson v0.3.6 h1:VLhVkqSBH96AvXEyclMR37rZslRrY2kcyq+31HCsVrA=
+github.com/breml/errchkjson v0.3.6/go.mod h1:jhSDoFheAF2RSDOlCfhHO9KqhZgAYLyvHe7bRCX8f/U=
+github.com/butuzov/ireturn v0.3.0 h1:hTjMqWw3y5JC3kpnC5vXmFJAWI/m31jaCYQqzkS6PL0=
+github.com/butuzov/ireturn v0.3.0/go.mod h1:A09nIiwiqzN/IoVo9ogpa0Hzi9fex1kd9PSD6edP5ZA=
+github.com/butuzov/mirror v1.2.0 h1:9YVK1qIjNspaqWutSv8gsge2e/Xpq1eqEkslEUHy5cs=
+github.com/butuzov/mirror v1.2.0/go.mod h1:DqZZDtzm42wIAIyHXeN8W/qb1EPlb9Qn/if9icBOpdQ=
+github.com/catenacyber/perfsprint v0.7.1 h1:PGW5G/Kxn+YrN04cRAZKC+ZuvlVwolYMrIyyTJ/rMmc=
+github.com/catenacyber/perfsprint v0.7.1/go.mod h1:/wclWYompEyjUD2FuIIDVKNkqz7IgBIWXIH3V0Zol50=
+github.com/ccojocar/zxcvbn-go v1.0.2 h1:na/czXU8RrhXO4EZme6eQJLR4PzcGsahsBOAwU6I3Vg=
+github.com/ccojocar/zxcvbn-go v1.0.2/go.mod h1:g1qkXtUSvHP8lhHp5GrSmTz6uWALGRMQdw6Qnz/hi60=
 github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
-github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
-github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
 github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44=
 github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
 github.com/chai2010/gettext-go v1.0.2 h1:1Lwwip6Q2QGsAdl/ZKPCwTe9fe0CjlUbqj5bFNSjIRk=
 github.com/chai2010/gettext-go v1.0.2/go.mod h1:y+wnP2cHYaVj19NZhYKAwEMH2CI1gNHeQQ+5AjwawxA=
 github.com/charithe/durationcheck v0.0.10 h1:wgw73BiocdBDQPik+zcEoBG/ob8uyBHf2iyoHGPf5w4=
 github.com/charithe/durationcheck v0.0.10/go.mod h1:bCWXb7gYRysD1CU3C+u4ceO49LoGOY1C1L6uouGNreQ=
-github.com/chavacava/garif v0.0.0-20230227094218-b8c73b2037b8 h1:W9o46d2kbNL06lq7UNDPV0zYLzkrde/bjIqO02eoll0=
-github.com/chavacava/garif v0.0.0-20230227094218-b8c73b2037b8/go.mod h1:gakxgyXaaPkxvLw1XQxNGK4I37ys9iBRzNUx/B7pUCo=
+github.com/chavacava/garif v0.1.0 h1:2JHa3hbYf5D9dsgseMKAmc/MZ109otzgNFk5s87H9Pc=
+github.com/chavacava/garif v0.1.0/go.mod h1:XMyYCkEL58DF0oyW4qDjjnPWONs2HBqYKI+UIPD+Gww=
 github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
 github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
 github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
+github.com/ckaznocha/intrange v0.1.2 h1:3Y4JAxcMntgb/wABQ6e8Q8leMd26JbX2790lIss9MTI=
+github.com/ckaznocha/intrange v0.1.2/go.mod h1:RWffCw/vKBwHeOEwWdCikAtY0q4gGt8VhJZEEA5n+RE=
 github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
-github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
-github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
-github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
 github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
 github.com/creack/pty v1.1.18 h1:n56/Zwd5o6whRC5PMGretI4IdRLlmBXYNjScPaBgsbY=
 github.com/creack/pty v1.1.18/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4=
 github.com/curioswitch/go-reassign v0.2.0 h1:G9UZyOcpk/d7Gd6mqYgd8XYWFMw/znxwGDUstnC9DIo=
 github.com/curioswitch/go-reassign v0.2.0/go.mod h1:x6OpXuWvgfQaMGks2BZybTngWjT84hqJfKoO8Tt/Roc=
-github.com/daixiang0/gci v0.11.0 h1:XeQbFKkCRxvVyn06EOuNY6LPGBLVuB/W130c8FrnX6A=
-github.com/daixiang0/gci v0.11.0/go.mod h1:xtHP9N7AHdNvtRNfcx9gwTDfw7FRJx4bZUsiEfiNNAI=
+github.com/daixiang0/gci v0.13.4 h1:61UGkmpoAcxHM2hhNkZEf5SzwQtWJXTSws7jaPyqwlw=
+github.com/daixiang0/gci v0.13.4/go.mod h1:12etP2OniiIdP4q+kjUGrC/rUagga7ODbqsom5Eo5Yk=
 github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
 github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
 github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
 github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
-github.com/denis-tingaikin/go-header v0.4.3 h1:tEaZKAlqql6SKCY++utLmkPLd6K8IBM20Ha7UVm+mtU=
-github.com/denis-tingaikin/go-header v0.4.3/go.mod h1:0wOCWuN71D5qIgE2nz9KrKmuYBAC2Mra5RassOIQ2/c=
+github.com/denis-tingaikin/go-header v0.5.0 h1:SRdnP5ZKvcO9KKRP1KJrhFR3RrlGuD+42t4429eC9k8=
+github.com/denis-tingaikin/go-header v0.5.0/go.mod h1:mMenU5bWrok6Wl2UsZjy+1okegmwQ3UgWl4V1D8gjlY=
 github.com/emicklei/go-restful/v3 v3.12.0 h1:y2DdzBAURM29NFF94q6RaY4vjIH1rtwDapwQtU84iWk=
 github.com/emicklei/go-restful/v3 v3.12.0/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc=
-github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
 github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
-github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
-github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po=
-github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
 github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
-github.com/esimonov/ifshort v1.0.4 h1:6SID4yGWfRae/M7hkVDVVyppy8q/v9OuxNdmjLQStBA=
-github.com/esimonov/ifshort v1.0.4/go.mod h1:Pe8zjlRrJ80+q2CxHLfEOfTwxCZ4O+MuhcHcfgNWTk0=
-github.com/ettle/strcase v0.1.1 h1:htFueZyVeE1XNnMEfbqp5r67qAN/4r6ya1ysq8Q+Zcw=
-github.com/ettle/strcase v0.1.1/go.mod h1:hzDLsPC7/lwKyBOywSHEP89nt2pDgdy+No1NBA9o9VY=
+github.com/ettle/strcase v0.2.0 h1:fGNiVF21fHXpX1niBgk0aROov1LagYsOwV/xqKDKR/Q=
+github.com/ettle/strcase v0.2.0/go.mod h1:DajmHElDSaX76ITe3/VHVyMin4LWSJN5Z909Wp+ED1A=
 github.com/evanphx/json-patch v5.6.0+incompatible h1:jBYDEEiFBPxA0v50tFdvOzQQTCvpL6mnFh5mB2/l16U=
 github.com/evanphx/json-patch v5.6.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk=
 github.com/evanphx/json-patch/v5 v5.9.0 h1:kcBlZQbplgElYIlo/n1hJbls2z/1awpXxpRi0/FOJfg=
 github.com/evanphx/json-patch/v5 v5.9.0/go.mod h1:VNkHZ/282BpEyt/tObQO8s5CMPmYYq14uClGH4abBuQ=
 github.com/exponent-io/jsonpath v0.0.0-20151013193312-d6023ce2651d h1:105gxyaGwCFad8crR9dcMQWvV9Hvulu6hwUh4tWPJnM=
 github.com/exponent-io/jsonpath v0.0.0-20151013193312-d6023ce2651d/go.mod h1:ZZMPRZwes7CROmyNKgQzC3XPs6L/G2EJLHddWejkmf4=
-github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM=
-github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE=
+github.com/fatih/color v1.17.0 h1:GlRw1BRJxkpqUCBKzKOw098ed57fEsKeNjpTe3cSjK4=
+github.com/fatih/color v1.17.0/go.mod h1:YZ7TlrGPkiz6ku9fK3TLD/pl3CpsiFyu8N92HLgmosI=
 github.com/fatih/structtag v1.2.0 h1:/OdNE99OxoI/PqaW/SuSK9uxxT3f/tcSZgon/ssNSx4=
 github.com/fatih/structtag v1.2.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4/aAZl94=
-github.com/firefart/nonamedreturns v1.0.4 h1:abzI1p7mAEPYuR4A+VLKn4eNDOycjYo2phmY9sfv40Y=
-github.com/firefart/nonamedreturns v1.0.4/go.mod h1:TDhe/tjI1BXo48CmYbUduTV7BdIga8MAO/xbKdcVsGI=
-github.com/frankban/quicktest v1.14.4 h1:g2rn0vABPOOXmZUj+vbmUp0lPoXEMuhTpIluN0XL9UY=
-github.com/frankban/quicktest v1.14.4/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
+github.com/firefart/nonamedreturns v1.0.5 h1:tM+Me2ZaXs8tfdDw3X6DOX++wMCOqzYUho6tUTYIdRA=
+github.com/firefart/nonamedreturns v1.0.5/go.mod h1:gHJjDqhGM4WyPt639SOZs+G89Ko7QKH5R5BhnO6xJhw=
+github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
+github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
 github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA=
 github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM=
 github.com/fzipp/gocyclo v0.6.0 h1:lsblElZG7d3ALtGMx9fmxeTKZaLLpU8mET09yN4BBLo=
 github.com/fzipp/gocyclo v0.6.0/go.mod h1:rXPyn8fnlpa0R2csP/31uerbiVBugk5whMdlyaLkLoA=
-github.com/go-critic/go-critic v0.9.0 h1:Pmys9qvU3pSML/3GEQ2Xd9RZ/ip+aXHKILuxczKGV/U=
-github.com/go-critic/go-critic v0.9.0/go.mod h1:5P8tdXL7m/6qnyG6oRAlYLORvoXH0WDypYgAEmagT40=
+github.com/ghostiam/protogetter v0.3.6 h1:R7qEWaSgFCsy20yYHNIJsU9ZOb8TziSRRxuAOTVKeOk=
+github.com/ghostiam/protogetter v0.3.6/go.mod h1:7lpeDnEJ1ZjL/YtyoN99ljO4z0pd3H0d18/t2dPBxHw=
+github.com/go-critic/go-critic v0.11.3 h1:SJbYD/egY1noYjTMNTlhGaYlfQ77rQmrNH7h+gtn0N0=
+github.com/go-critic/go-critic v0.11.3/go.mod h1:Je0h5Obm1rR5hAGA9mP2PDiOOk53W+n7pyvXErFKIgI=
 github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA=
 github.com/go-errors/errors v1.4.2/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og=
-github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
-github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
-github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
-github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
-github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
-github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY=
-github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
-github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
-github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A=
 github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ=
 github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
 github.com/go-logr/zapr v1.3.0 h1:XGdV8XW8zdwFiwOA2Dryh1gj2KRQyOOoNmBy4EplIcQ=
@@ -189,16 +140,18 @@ github.com/go-openapi/jsonreference v0.21.0 h1:Rs+Y7hSXT83Jacb7kFyjn4ijOuVGSvOdF
 github.com/go-openapi/jsonreference v0.21.0/go.mod h1:LmZmgsrTkVg9LG4EaHeY8cBDslNPMo06cago5JNLkm4=
 github.com/go-openapi/swag v0.23.0 h1:vsEVJDUo2hPJ2tu0/Xc+4noaxyEffXNIs3cOULZ+GrE=
 github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ=
-github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
-github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI=
-github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572/go.mod h1:9Pwr4B2jHnOSGXyyzV8ROjYa2ojvAY6HCGYYfMoC3Ls=
+github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI=
+github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8=
+github.com/go-test/deep v1.1.0 h1:WOcxcdHcvdgThNXjw0t76K42FXTU7HpNQWHpA2HHNlg=
+github.com/go-test/deep v1.1.0/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE=
 github.com/go-toolsmith/astcast v1.1.0 h1:+JN9xZV1A+Re+95pgnMgDboWNVnIMMQXwfBwLRPgSC8=
 github.com/go-toolsmith/astcast v1.1.0/go.mod h1:qdcuFWeGGS2xX5bLM/c3U9lewg7+Zu4mr+xPwZIB4ZU=
 github.com/go-toolsmith/astcopy v1.1.0 h1:YGwBN0WM+ekI/6SS6+52zLDEf8Yvp3n2seZITCUBt5s=
 github.com/go-toolsmith/astcopy v1.1.0/go.mod h1:hXM6gan18VA1T/daUEHCFcYiW8Ai1tIwIzHY6srfEAw=
 github.com/go-toolsmith/astequal v1.0.3/go.mod h1:9Ai4UglvtR+4up+bAD4+hCj7iTo4m/OXVTSLnCyTAx4=
-github.com/go-toolsmith/astequal v1.1.0 h1:kHKm1AWqClYn15R0K1KKE4RG614D46n+nqUQ06E1dTw=
 github.com/go-toolsmith/astequal v1.1.0/go.mod h1:sedf7VIdCL22LD8qIvv7Nn9MuWJruQA/ysswh64lffQ=
+github.com/go-toolsmith/astequal v1.2.0 h1:3Fs3CYZ1k9Vo4FzFhwwewC3CHISHDnVUPC4x0bI2+Cw=
+github.com/go-toolsmith/astequal v1.2.0/go.mod h1:c8NZ3+kSFtFY/8lPso4v8LuJjdJiUFVnSuU3s0qrrDY=
 github.com/go-toolsmith/astfmt v1.1.0 h1:iJVPDPp6/7AaeLJEruMsBUlOYCmvg0MoCfJprsOmcco=
 github.com/go-toolsmith/astfmt v1.1.0/go.mod h1:OrcLlRwu0CuiIBp/8b5PYF9ktGVZUjlNMV634mhwuQ4=
 github.com/go-toolsmith/astp v1.1.0 h1:dXPuCl6u2llURjdPLLDxJeZInAeZ0/eZwFJmqZMnpQA=
@@ -210,68 +163,48 @@ github.com/go-toolsmith/strparse v1.1.0 h1:GAioeZUK9TGxnLS+qfdqNbA4z0SSm5zVNtCQi
 github.com/go-toolsmith/strparse v1.1.0/go.mod h1:7ksGy58fsaQkGQlY8WVoBFNyEPMGuJin1rfoPS4lBSQ=
 github.com/go-toolsmith/typep v1.1.0 h1:fIRYDyF+JywLfqzyhdiHzRop/GQDxxNhLGQ6gFUNHus=
 github.com/go-toolsmith/typep v1.1.0/go.mod h1:fVIw+7zjdsMxDA3ITWnH1yOiw1rnTQKCsF/sk2H/qig=
+github.com/go-viper/mapstructure/v2 v2.0.0-alpha.1 h1:TQcrn6Wq+sKGkpyPvppOz99zsMBaUOKXq6HSv655U1c=
+github.com/go-viper/mapstructure/v2 v2.0.0-alpha.1/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
 github.com/go-xmlfmt/xmlfmt v1.1.2 h1:Nea7b4icn8s57fTx1M5AI4qQT5HEM3rVUO8MuE6g80U=
 github.com/go-xmlfmt/xmlfmt v1.1.2/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM=
 github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
 github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
 github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw=
 github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU=
-github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
 github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=
 github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
 github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
-github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
-github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
-github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
 github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE=
 github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
 github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
-github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
-github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
-github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
-github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
-github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
-github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4=
 github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
-github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
 github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
-github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
-github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
-github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk=
 github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=
 github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
 github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=
 github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=
 github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
 github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8=
-github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
-github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
 github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
 github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
 github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
 github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
-github.com/golangci/check v0.0.0-20180506172741-cfe4005ccda2 h1:23T5iq8rbUYlhpt5DB4XJkc6BU31uODLD1o1gKvZmD0=
-github.com/golangci/check v0.0.0-20180506172741-cfe4005ccda2/go.mod h1:k9Qvh+8juN+UKMCS/3jFtGICgW8O96FVaZsaxdzDkR4=
 github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a h1:w8hkcTqaFpzKqonE9uMCefW1WDie15eSP/4MssdenaM=
 github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a/go.mod h1:ryS0uhF+x9jgbj/N71xsEqODy9BN81/GonCZiOzirOk=
-github.com/golangci/go-misc v0.0.0-20220329215616-d24fe342adfe h1:6RGUuS7EGotKx6J5HIP8ZtyMdiDscjMLfRBSPuzVVeo=
-github.com/golangci/go-misc v0.0.0-20220329215616-d24fe342adfe/go.mod h1:gjqyPShc/m8pEMpk0a3SeagVb0kaqvhscv+i9jI5ZhQ=
-github.com/golangci/gofmt v0.0.0-20220901101216-f2edd75033f2 h1:amWTbTGqOZ71ruzrdA+Nx5WA3tV1N0goTspwmKCQvBY=
-github.com/golangci/gofmt v0.0.0-20220901101216-f2edd75033f2/go.mod h1:9wOXstvyDRshQ9LggQuzBCGysxs3b6Uo/1MvYCR2NMs=
-github.com/golangci/golangci-lint v1.54.2 h1:oR9zxfWYxt7hFqk6+fw6Enr+E7F0SN2nqHhJYyIb0yo=
-github.com/golangci/golangci-lint v1.54.2/go.mod h1:vnsaCTPKCI2wreL9tv7RkHDwUrz3htLjed6+6UsvcwU=
-github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0 h1:MfyDlzVjl1hoaPzPD4Gpb/QgoRfSBR0jdhwGyAWwMSA=
-github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0/go.mod h1:66R6K6P6VWk9I95jvqGxkqJxVWGFy9XlDwLwVz1RCFg=
-github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca h1:kNY3/svz5T29MYHubXix4aDDuE3RWHkPvopM/EDv/MA=
-github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca/go.mod h1:tvlJhZqDe4LMs4ZHD0oMUlt9G2LWuDGoisJTBzLMV9o=
-github.com/golangci/misspell v0.4.1 h1:+y73iSicVy2PqyX7kmUefHusENlrP9YwuHZHPLGQj/g=
-github.com/golangci/misspell v0.4.1/go.mod h1:9mAN1quEo3DlpbaIKKyEvRxK1pwqR9s/Sea1bJCtlNI=
-github.com/golangci/revgrep v0.0.0-20220804021717-745bb2f7c2e6 h1:DIPQnGy2Gv2FSA4B/hh8Q7xx3B7AIDk3DAMeHclH1vQ=
-github.com/golangci/revgrep v0.0.0-20220804021717-745bb2f7c2e6/go.mod h1:0AKcRCkMoKvUvlf89F6O7H2LYdhr1zBh736mBItOdRs=
-github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4 h1:zwtduBRr5SSWhqsYNgcuWO2kFlpdOZbP0+yRjmvPGys=
-github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4/go.mod h1:Izgrg8RkN3rCIMLGE9CyYmU9pY2Jer6DgANEnZ/L/cQ=
-github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
-github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
+github.com/golangci/gofmt v0.0.0-20231018234816-f50ced29576e h1:ULcKCDV1LOZPFxGZaA6TlQbiM3J2GCPnkx/bGF6sX/g=
+github.com/golangci/gofmt v0.0.0-20231018234816-f50ced29576e/go.mod h1:Pm5KhLPA8gSnQwrQ6ukebRcapGb/BG9iUkdaiCcGHJM=
+github.com/golangci/golangci-lint v1.58.2 h1:YHfceEW3CmHmJTPkmVNlqEa4xcjIulYIaVIiB5sNbC4=
+github.com/golangci/golangci-lint v1.58.2/go.mod h1:QH/aRLQIdNuOqEiki+4En6/k2SmeOsRJU/oiYffM1Ow=
+github.com/golangci/misspell v0.5.1 h1:/SjR1clj5uDjNLwYzCahHwIOPmQgoH04AyQIiWGbhCM=
+github.com/golangci/misspell v0.5.1/go.mod h1:keMNyY6R9isGaSAu+4Q8NMBwMPkh15Gtc8UCVoDtAWo=
+github.com/golangci/modinfo v0.3.4 h1:oU5huX3fbxqQXdfspamej74DFX0kyGLkw1ppvXoJ8GA=
+github.com/golangci/modinfo v0.3.4/go.mod h1:wytF1M5xl9u0ij8YSvhkEVPP3M5Mc7XLl1pxH3B2aUM=
+github.com/golangci/plugin-module-register v0.1.1 h1:TCmesur25LnyJkpsVrupv1Cdzo+2f7zX0H6Jkw1Ol6c=
+github.com/golangci/plugin-module-register v0.1.1/go.mod h1:TTpqoB6KkwOJMV8u7+NyXMrkwwESJLOkfl9TxR1DGFc=
+github.com/golangci/revgrep v0.5.3 h1:3tL7c1XBMtWHHqVpS5ChmiAAoe4PF/d5+ULzV9sLAzs=
+github.com/golangci/revgrep v0.5.3/go.mod h1:U4R/s9dlXZsg8uJmaR1GrloUr14D7qDl8gi2iPXJH8k=
+github.com/golangci/unconvert v0.0.0-20240309020433-c5143eacb3ed h1:IURFTjxeTfNFP0hTEi1YKjB/ub8zkpaOqFFMApi2EAs=
+github.com/golangci/unconvert v0.0.0-20240309020433-c5143eacb3ed/go.mod h1:XLXN8bNw4CGRPaqgl3bv/lhz7bsGPh4/xSaMTbo2vkQ=
 github.com/google/btree v1.0.1 h1:gK4Kx5IaGY9CD5sPJ36FHiBJ6ZXl0kilRiiCj+jdYp4=
 github.com/google/btree v1.0.1/go.mod h1:xXMiIv4Fb/0kKde4SpL7qlzvu5cMJDRkFDxJfI9uaxA=
 github.com/google/gnostic-models v0.6.8 h1:yo/ABAfM5IMRsS1VnXjTBvUb61tFIHozhlYvRgGre9I=
@@ -280,7 +213,6 @@ github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5a
 github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
 github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
 github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
-github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
 github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
 github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
 github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
@@ -294,32 +226,14 @@ github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeN
 github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
 github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0=
 github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
-github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
-github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
-github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
-github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
-github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
-github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
-github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
-github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
-github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
-github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
-github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
-github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
-github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
-github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1 h1:K6RDEckDVWvDI9JAJYCmNdQXq6neHJOYx3V6jnqNEec=
-github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
-github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
+github.com/google/pprof v0.0.0-20240424215950-a892ee059fd6 h1:k7nVchz72niMH6YLQNvHSdIE7iqsQxK1P41mySCvssg=
+github.com/google/pprof v0.0.0-20240424215950-a892ee059fd6/go.mod h1:kf6iHlnVGwgKolg33glAes7Yg/8iWP8ukqeldJSO7jw=
 github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4=
 github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ=
-github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
 github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
 github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
-github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
-github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
-github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g=
-github.com/gordonklaus/ineffassign v0.0.0-20230610083614-0e73809eb601 h1:mrEEilTAUmaAORhssPPkxj84TsHrPMLBGW2Z4SoTxm8=
-github.com/gordonklaus/ineffassign v0.0.0-20230610083614-0e73809eb601/go.mod h1:Qcp2HIAYhR7mNUVSIxZww3Guk4it82ghYcEXIAk+QT0=
+github.com/gordonklaus/ineffassign v0.1.0 h1:y2Gd/9I7MdY1oEIt+n+rowjBNDcLQq3RsH5hwJd0f9s=
+github.com/gordonklaus/ineffassign v0.1.0/go.mod h1:Qcp2HIAYhR7mNUVSIxZww3Guk4it82ghYcEXIAk+QT0=
 github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
 github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc=
 github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
@@ -337,79 +251,63 @@ github.com/gostaticanalysis/testutil v0.4.0 h1:nhdCmubdmDF6VEatUNjgUZBJKWRqugoIS
 github.com/gostaticanalysis/testutil v0.4.0/go.mod h1:bLIoPefWXrRi/ssLFWX1dx7Repi5x3CuviD3dgAZaBU=
 github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7 h1:pdN6V1QBWetyv/0+wjACpqVH+eVULgEjkurDLq3goeM=
 github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA=
-github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA=
-github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
-github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
-github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
 github.com/hashicorp/go-version v1.2.1/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
 github.com/hashicorp/go-version v1.6.0 h1:feTTfFNnjP967rlCxM/I9g701jU+RN74YKx2mOkIeek=
 github.com/hashicorp/go-version v1.6.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
-github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
-github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
 github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
 github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
 github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
 github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg=
-github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
-github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
 github.com/imdario/mergo v0.3.15 h1:M8XP7IuFNsqUx6VPK2P9OSmsYsI/YFaGil0uD21V3dM=
 github.com/imdario/mergo v0.3.15/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY=
 github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
 github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
-github.com/jgautheron/goconst v1.5.1 h1:HxVbL1MhydKs8R8n/HE5NPvzfaYmQJA3o879lE4+WcM=
-github.com/jgautheron/goconst v1.5.1/go.mod h1:aAosetZ5zaeC/2EfMeRswtxUFBpe2Hr7HzkgX4fanO4=
+github.com/jgautheron/goconst v1.7.1 h1:VpdAG7Ca7yvvJk5n8dMwQhfEZJh95kl/Hl9S1OI5Jkk=
+github.com/jgautheron/goconst v1.7.1/go.mod h1:aAosetZ5zaeC/2EfMeRswtxUFBpe2Hr7HzkgX4fanO4=
 github.com/jingyugao/rowserrcheck v1.1.1 h1:zibz55j/MJtLsjP1OF4bSdgXxwL1b+Vn7Tjzq7gFzUs=
 github.com/jingyugao/rowserrcheck v1.1.1/go.mod h1:4yvlZSDb3IyDTUZJUmpZfm2Hwok+Dtp+nu2qOq+er9c=
 github.com/jirfag/go-printf-func-name v0.0.0-20200119135958-7558a9eaa5af h1:KA9BjwUk7KlCh6S9EAGWBt1oExIUv9WyNCiRz5amv48=
 github.com/jirfag/go-printf-func-name v0.0.0-20200119135958-7558a9eaa5af/go.mod h1:HEWGJkRDzjJY2sqdDwxccsGicWEf9BQOZsq2tV+xzM0=
+github.com/jjti/go-spancheck v0.6.1 h1:ZK/wE5Kyi1VX3PJpUO2oEgeoI4FWOUm7Shb2Gbv5obI=
+github.com/jjti/go-spancheck v0.6.1/go.mod h1:vF1QkOO159prdo6mHRxak2CpzDpHAfKiPUDP/NeRnX8=
 github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
 github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
-github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4=
-github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
-github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
-github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
 github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
 github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
-github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
-github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
-github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
-github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM=
 github.com/julz/importas v0.1.0 h1:F78HnrsjY3cR7j0etXy5+TU1Zuy7Xt08X/1aJnH5xXY=
 github.com/julz/importas v0.1.0/go.mod h1:oSFU2R4XK/P7kNBrnL/FEQlDGN1/6WoxXEjSSXO0DV0=
+github.com/karamaru-alpha/copyloopvar v1.1.0 h1:x7gNyKcC2vRBO1H2Mks5u1VxQtYvFiym7fCjIP8RPos=
+github.com/karamaru-alpha/copyloopvar v1.1.0/go.mod h1:u7CIfztblY0jZLOQZgH3oYsJzpC2A7S6u/lfgSXHy0k=
 github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
-github.com/kisielk/errcheck v1.6.3 h1:dEKh+GLHcWm2oN34nMvDzn1sqI0i0WxPvrgiJA5JuM8=
-github.com/kisielk/errcheck v1.6.3/go.mod h1:nXw/i/MfnvRHqXa7XXmQMUB0oNFGuBrNI8d8NLy0LPw=
-github.com/kisielk/gotool v1.0.0 h1:AV2c/EiW3KqPNT9ZKl07ehoAGi4C5/01Cfbblndcapg=
+github.com/kisielk/errcheck v1.7.0 h1:+SbscKmWJ5mOK/bO1zS60F5I9WwZDWOfRsC4RwfwRV0=
+github.com/kisielk/errcheck v1.7.0/go.mod h1:1kLL+jV4e+CFfueBmI1dSK2ADDyQnlrnrY/FqKluHJQ=
 github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
-github.com/kkHAIKE/contextcheck v1.1.4 h1:B6zAaLhOEEcjvUgIYEqystmnFk1Oemn8bvJhbt0GMb8=
-github.com/kkHAIKE/contextcheck v1.1.4/go.mod h1:1+i/gWqokIa+dm31mqGLZhZJ7Uh44DJGZVmr6QRBNJg=
-github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
-github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
-github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
-github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
-github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
+github.com/kkHAIKE/contextcheck v1.1.5 h1:CdnJh63tcDe53vG+RebdpdXJTc9atMgGqdx8LXxiilg=
+github.com/kkHAIKE/contextcheck v1.1.5/go.mod h1:O930cpht4xb1YQpK+1+AgoM3mFsvxr7uyFptcnWTYUA=
 github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
 github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
-github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
-github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
 github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
 github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
 github.com/kulti/thelper v0.6.3 h1:ElhKf+AlItIu+xGnI990no4cE2+XaSu1ULymV2Yulxs=
 github.com/kulti/thelper v0.6.3/go.mod h1:DsqKShOvP40epevkFrvIwkCMNYxMeTNjdWL4dqWHZ6I=
-github.com/kunwardeep/paralleltest v1.0.8 h1:Ul2KsqtzFxTlSU7IP0JusWlLiNqQaloB9vguyjbE558=
-github.com/kunwardeep/paralleltest v1.0.8/go.mod h1:2C7s65hONVqY7Q5Efj5aLzRCNLjw2h4eMc9EcypGjcY=
+github.com/kunwardeep/paralleltest v1.0.10 h1:wrodoaKYzS2mdNVnc4/w31YaXFtsc21PCTdvWJ/lDDs=
+github.com/kunwardeep/paralleltest v1.0.10/go.mod h1:2C7s65hONVqY7Q5Efj5aLzRCNLjw2h4eMc9EcypGjcY=
 github.com/kyoh86/exportloopref v0.1.11 h1:1Z0bcmTypkL3Q4k+IDHMWTcnCliEZcaPiIe0/ymEyhQ=
 github.com/kyoh86/exportloopref v0.1.11/go.mod h1:qkV4UF1zGl6EkF1ox8L5t9SwyeBAZ3qLMd6up458uqA=
-github.com/ldez/gomoddirectives v0.2.3 h1:y7MBaisZVDYmKvt9/l1mjNCiSA1BVn34U0ObUcJwlhA=
-github.com/ldez/gomoddirectives v0.2.3/go.mod h1:cpgBogWITnCfRq2qGoDkKMEVSaarhdBr6g8G04uz6d0=
+github.com/lasiar/canonicalheader v1.1.1 h1:wC+dY9ZfiqiPwAexUApFush/csSPXeIi4QqyxXmng8I=
+github.com/lasiar/canonicalheader v1.1.1/go.mod h1:cXkb3Dlk6XXy+8MVQnF23CYKWlyA7kfQhSw2CcZtZb0=
+github.com/ldez/gomoddirectives v0.2.4 h1:j3YjBIjEBbqZ0NKtBNzr8rtMHTOrLPeiwTkfUJZ3alg=
+github.com/ldez/gomoddirectives v0.2.4/go.mod h1:oWu9i62VcQDYp9EQ0ONTfqLNh+mDLWWDO+SO0qSQw5g=
 github.com/ldez/tagliatelle v0.5.0 h1:epgfuYt9v0CG3fms0pEgIMNPuFf/LpPIfjk4kyqSioo=
 github.com/ldez/tagliatelle v0.5.0/go.mod h1:rj1HmWiL1MiKQuOONhd09iySTEkUuE/8+5jtPYz9xa4=
-github.com/leonklingele/grouper v1.1.1 h1:suWXRU57D4/Enn6pXR0QVqqWWrnJ9Osrz+5rjt8ivzU=
-github.com/leonklingele/grouper v1.1.1/go.mod h1:uk3I3uDfi9B6PeUjsCKi6ndcf63Uy7snXgR4yDYQVDY=
+github.com/leonklingele/grouper v1.1.2 h1:o1ARBDLOmmasUaNDesWqWCIFH3u7hoFlM84YrjT3mIY=
+github.com/leonklingele/grouper v1.1.2/go.mod h1:6D0M/HVkhs2yRKRFZUoGjeDy7EZTfFBE9gl4kjmIGkA=
 github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de h1:9TO3cAIGXtEhnIaL+V+BEER86oLrvS+kWobKpbJuye0=
 github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de/go.mod h1:zAbeS9B/r2mtpb6U+EI2rYA5OAXxsYw6wTamcNW+zcE=
 github.com/lufeee/execinquery v1.2.1 h1:hf0Ems4SHcUGBxpGN7Jz78z1ppVkP/837ZlETPCEtOM=
 github.com/lufeee/execinquery v1.2.1/go.mod h1:EC7DrEKView09ocscGHC+apXMIaorh4xqSxS/dy8SbM=
+github.com/macabu/inamedparam v0.1.3 h1:2tk/phHkMlEL/1GNe/Yf6kkR/hkcUdAEY3L0hjYV1Mk=
+github.com/macabu/inamedparam v0.1.3/go.mod h1:93FLICAIk/quk7eaPPQvbzihUdn/QkGDwIZEoLtpH6I=
 github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY=
 github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
 github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
@@ -430,13 +328,10 @@ github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D
 github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
 github.com/mattn/go-runewidth v0.0.14 h1:+xnbZSEeDbOIg5/mE6JF0w6n9duR1l3/WmbinWVwUuU=
 github.com/mattn/go-runewidth v0.0.14/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
-github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
 github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0 h1:jWpvCLoY8Z/e3VKvlsiIGKtc+UG6U5vzxaoagmhXfyg=
 github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0/go.mod h1:QUyp042oQthUoa9bqDv0ER0wrtXnBruoNd7aNjkbP+k=
-github.com/mbilski/exhaustivestruct v1.2.0 h1:wCBmUnSYufAHO6J4AVWY6ff+oxWxsVFrwgOdMUQePUo=
-github.com/mbilski/exhaustivestruct v1.2.0/go.mod h1:OeTBVxQWoEmB2J2JCHmXWPJ0aksxSUOUy+nvtVEfzXc=
-github.com/mgechev/revive v1.3.2 h1:Wb8NQKBaALBJ3xrrj4zpwJwqwNA6nDpyJSEQWcCka6U=
-github.com/mgechev/revive v1.3.2/go.mod h1:UCLtc7o5vg5aXCwdUTU1kEBQ1v+YXPAkYDIDXbrs5I0=
+github.com/mgechev/revive v1.3.7 h1:502QY0vQGe9KtYJ9FpxMz9rL+Fc/P13CI5POL4uHCcE=
+github.com/mgechev/revive v1.3.7/go.mod h1:RJ16jUbF0OWC3co/+XTxmFNgEpUPwnnA0BRllX2aDNA=
 github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
 github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
 github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0=
@@ -450,8 +345,6 @@ github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3
 github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
 github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
 github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
-github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
-github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
 github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
 github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
 github.com/monochromegane/go-gitignore v0.0.0-20200626010858-205db1a8cc00 h1:n6/2gBQ3RWajuToeY6ZtZTIKv2v7ThUy5KKusIT0yc0=
@@ -460,82 +353,67 @@ github.com/moricho/tparallel v0.3.1 h1:fQKD4U1wRMAYNngDonW5XupoB/ZGJHdpzrWqgyg9k
 github.com/moricho/tparallel v0.3.1/go.mod h1:leENX2cUv7Sv2qDgdi0D0fCftN8fRC67Bcn8pqzeYNI=
 github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=
 github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
-github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
-github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
 github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f h1:y5//uYreIhSUg3J1GEMiLbxo1LJaP8RfCpH6pymGZus=
 github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+o7JKHSa8/e818NopupXU1YMK5fe1lsApnBw=
 github.com/nakabonne/nestif v0.3.1 h1:wm28nZjhQY5HyYPx+weN3Q65k6ilSBxDb8v5S81B81U=
 github.com/nakabonne/nestif v0.3.1/go.mod h1:9EtoZochLn5iUprVDmDjqGKPofoUEBL8U4Ngq6aY7OE=
-github.com/nishanths/exhaustive v0.11.0 h1:T3I8nUGhl/Cwu5Z2hfc92l0e04D2GEW6e0l8pzda2l0=
-github.com/nishanths/exhaustive v0.11.0/go.mod h1:RqwDsZ1xY0dNdqHho2z6X+bgzizwbLYOWnZbbl2wLB4=
+github.com/nishanths/exhaustive v0.12.0 h1:vIY9sALmw6T/yxiASewa4TQcFsVYZQQRUQJhKRf3Swg=
+github.com/nishanths/exhaustive v0.12.0/go.mod h1:mEZ95wPIZW+x8kC4TgC+9YCUgiST7ecevsVDTgc2obs=
 github.com/nishanths/predeclared v0.2.2 h1:V2EPdZPliZymNAn79T8RkNApBjMmVKh5XRpLm/w98Vk=
 github.com/nishanths/predeclared v0.2.2/go.mod h1:RROzoN6TnGQupbC+lqggsOlcgysk3LMK/HI84Mp280c=
-github.com/nunnatsa/ginkgolinter v0.13.5 h1:fOsPB4CEZOPkyMqF4B9hoqOpooFWU7vWSVkCSscVpgU=
-github.com/nunnatsa/ginkgolinter v0.13.5/go.mod h1:OBHy4536xtuX3102NM63XRtOyxqZOO02chsaeDWXVO8=
+github.com/nunnatsa/ginkgolinter v0.16.2 h1:8iLqHIZvN4fTLDC0Ke9tbSZVcyVHoBs0HIbnVSxfHJk=
+github.com/nunnatsa/ginkgolinter v0.16.2/go.mod h1:4tWRinDN1FeJgU+iJANW/kz7xKN5nYRAOfJDQUS9dOQ=
 github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec=
 github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY=
-github.com/onsi/ginkgo/v2 v2.17.1 h1:V++EzdbhI4ZV4ev0UTIj0PzhzOcReJFyJaLjtSF55M8=
-github.com/onsi/ginkgo/v2 v2.17.1/go.mod h1:llBI3WDLL9Z6taip6f33H76YcWtJv+7R3HigUjbIBOs=
-github.com/onsi/gomega v1.32.0 h1:JRYU78fJ1LPxlckP6Txi/EYqJvjtMrDC04/MM5XRHPk=
-github.com/onsi/gomega v1.32.0/go.mod h1:a4x4gW6Pz2yK1MAmvluYme5lvYTn61afQ2ETw/8n4Lg=
+github.com/onsi/ginkgo/v2 v2.17.2 h1:7eMhcy3GimbsA3hEnVKdw/PQM9XN9krpKVXsZdph0/g=
+github.com/onsi/ginkgo/v2 v2.17.2/go.mod h1:nP2DPOQoNsQmsVyv5rDA8JkXQoCs6goXIvr/PRJ1eCc=
+github.com/onsi/gomega v1.33.1 h1:dsYjIxxSR755MDmKVsaFQTE22ChNBcuuTWgkUDSubOk=
+github.com/onsi/gomega v1.33.1/go.mod h1:U4R44UsT+9eLIaYRB2a5qajjtQYn0hauxvRm16AVYg0=
 github.com/openshift/api v0.0.0-20240904015708-69df64132c91 h1:PfPpMwHR8iAxQuLpQt+x9f3PAIeSPG2BLQd69p+NQHw=
 github.com/openshift/api v0.0.0-20240904015708-69df64132c91/go.mod h1:OOh6Qopf21pSzqNVCB5gomomBXb8o5sGKZxG2KNpaXM=
 github.com/openshift/client-go v0.0.0-20240904134955-cd42fd3d7408 h1:xHOmkHjN0AFEXo4qCpSAdntIGZfD9UShjnAer+IGXUo=
 github.com/openshift/client-go v0.0.0-20240904134955-cd42fd3d7408/go.mod h1:D3yfotGqRY1LWaHcoSCeadhdyvfrQWYJXui1M+JqKvY=
 github.com/openshift/cluster-api-actuator-pkg/testutils v0.0.0-20240626103413-ddea9c7c0aca h1:HFFzJznFQVudyVSpMa+hZwXZpc0ZvFCIpiMNFdf21g8=
 github.com/openshift/cluster-api-actuator-pkg/testutils v0.0.0-20240626103413-ddea9c7c0aca/go.mod h1:osVq9/R6qKHBQxDP4cYTvkgXVBKOMs1SOfPLFfn0m7A=
+github.com/openshift/cluster-control-plane-machine-set-operator v0.0.0-20240909043600-373ac49835bf h1:mfMmaD9+vZIZQq3MGXsS/AGHXekj4wIn3zc1Cs1EY8M=
+github.com/openshift/cluster-control-plane-machine-set-operator v0.0.0-20240909043600-373ac49835bf/go.mod h1:2fZsjZ3QSPkoMUc8QntXfeBb8AnvW+WIYwwQX8vmgvQ=
 github.com/openshift/library-go v0.0.0-20240903143724-7c5c5d305ac1 h1:qEMLKQXdGlUiK1KC2k6QK8og38eveU2/gcNBnQAwwmk=
 github.com/openshift/library-go v0.0.0-20240903143724-7c5c5d305ac1/go.mod h1:/wmao3qtqOQ484HDka9cWP7SIvOQOdzpmhyXkF2YdzE=
-github.com/otiai10/copy v1.2.0 h1:HvG945u96iNadPoG2/Ja2+AUJeW5YuFQMixq9yirC+k=
 github.com/otiai10/copy v1.2.0/go.mod h1:rrF5dJ5F0t/EWSYODDu4j9/vEeYHMkc8jt0zJChqQWw=
+github.com/otiai10/copy v1.14.0 h1:dCI/t1iTdYGtkvCuBG2BgR6KZa83PTclw4U5n2wAllU=
+github.com/otiai10/copy v1.14.0/go.mod h1:ECfuL02W+/FkTWZWgQqXPWZgW9oeKCSQ5qVfSc4qc4w=
 github.com/otiai10/curr v0.0.0-20150429015615-9b4961190c95/go.mod h1:9qAhocn7zKJG+0mI8eUu6xqkFDYS2kb2saOteoSB3cE=
 github.com/otiai10/curr v1.0.0/go.mod h1:LskTG5wDwr8Rs+nNQ+1LlxRjAtTZZjtJW4rMXl6j4vs=
 github.com/otiai10/mint v1.3.0/go.mod h1:F5AjcsTsWUqX+Na9fpHb52P8pcRX2CI6A3ctIT91xUo=
 github.com/otiai10/mint v1.3.1/go.mod h1:/yxELlJQ0ufhjUwhshSj+wFjZ78CnZ48/1wtmBH1OTc=
-github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4=
-github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
+github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM=
+github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs=
 github.com/peterbourgon/diskv v2.0.1+incompatible h1:UBdAOUP5p4RWqPBg048CAvpKN+vxiaj6gdUUzhl4XmI=
 github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU=
-github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
-github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
 github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
 github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
-github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg=
 github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
 github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
 github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
-github.com/polyfloyd/go-errorlint v1.4.4 h1:A9gytp+p6TYqeALTYRoxJESYP8wJRETRX2xzGWFsEBU=
-github.com/polyfloyd/go-errorlint v1.4.4/go.mod h1:ry5NqF7l9Q77V+XqAfUg1zfryrEtyac3G5+WVpIK0xU=
+github.com/polyfloyd/go-errorlint v1.5.1 h1:5gHxDjLyyWij7fhfrjYNNlHsUNQeyx0LFQKUelO3RBo=
+github.com/polyfloyd/go-errorlint v1.5.1/go.mod h1:sH1QC1pxxi0fFecsVIzBmxtrgd9IF/SkJpA6wqyKAJs=
+github.com/prashantv/gostub v1.1.0 h1:BTyx3RfQjRHnUWaGF9oQos79AlQ5k8WNktv7VGvVH4g=
+github.com/prashantv/gostub v1.1.0/go.mod h1:A5zLQHz7ieHGG7is6LLXLz7I8+3LZzsrV0P1IAHhP5U=
 github.com/prometheus-operator/prometheus-operator/pkg/apis/monitoring v0.74.0 h1:AHzMWDxNiAVscJL6+4wkvFRTpMnJqiaZFEKA/osaBXE=
 github.com/prometheus-operator/prometheus-operator/pkg/apis/monitoring v0.74.0/go.mod h1:wAR5JopumPtAZnu0Cjv2PSqV4p4QB09LMhc6fZZTXuA=
-github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
-github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo=
-github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M=
-github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0=
-github.com/prometheus/client_golang v1.12.1/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY=
 github.com/prometheus/client_golang v1.18.0 h1:HzFfmkOzH5Q8L8G+kSJKUx5dtG87sewO+FoDDqP5Tbk=
 github.com/prometheus/client_golang v1.18.0/go.mod h1:T+GXkCk5wSJyOqMIzVgvvjFDlkOQntgjkJWKrN5txjA=
-github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
-github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
 github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
-github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
 github.com/prometheus/client_model v0.5.0 h1:VQw1hfvPvk3Uv6Qf29VrPF32JB6rtbgI6cYPYQjL0Qw=
 github.com/prometheus/client_model v0.5.0/go.mod h1:dTiFglRmd66nLR9Pv9f0mZi7B7fk5Pm3gvsjB5tr+kI=
-github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
-github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo=
-github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc=
-github.com/prometheus/common v0.32.1/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls=
 github.com/prometheus/common v0.45.0 h1:2BGz0eBc2hdMDLnO/8n0jeB3oPrt2D08CekT0lneoxM=
 github.com/prometheus/common v0.45.0/go.mod h1:YJmSTw9BoKxJplESWWxlbyttQR4uaEcGyv9MZjVOJsY=
-github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
-github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
-github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU=
-github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA=
-github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA=
 github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k6Bo=
 github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo=
-github.com/quasilyte/go-ruleguard v0.4.0 h1:DyM6r+TKL+xbKB4Nm7Afd1IQh9kEUKQs2pboWGKtvQo=
-github.com/quasilyte/go-ruleguard v0.4.0/go.mod h1:Eu76Z/R8IXtViWUIHkE3p8gdH3/PKk1eh3YGfaEof10=
+github.com/quasilyte/go-ruleguard v0.4.2 h1:htXcXDK6/rO12kiTHKfHuqR4kr3Y4M0J0rOL6CH/BYs=
+github.com/quasilyte/go-ruleguard v0.4.2/go.mod h1:GJLgqsLeo4qgavUoL8JeGFNS7qcisx3awV/w9eWTmNI=
+github.com/quasilyte/go-ruleguard/dsl v0.3.22 h1:wd8zkOhSNr+I+8Qeciml08ivDt1pSXe60+5DqOpCjPE=
+github.com/quasilyte/go-ruleguard/dsl v0.3.22/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU=
 github.com/quasilyte/gogrep v0.5.0 h1:eTKODPXbI8ffJMN+W2aE0+oL0z/nh8/5eNdiO34SOAo=
 github.com/quasilyte/gogrep v0.5.0/go.mod h1:Cm9lpz9NZjEoL1tgZ2OgeUKPIxL1meE7eo60Z6Sk+Ng=
 github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727 h1:TCg2WBOl980XxGFEZSS6KlBGIV0diGdySzxATTWoqaU=
@@ -547,42 +425,38 @@ github.com/rivo/uniseg v0.4.2 h1:YwD0ulJSJytLpiaWua0sBDusfsCZohxjxzVTYjwxfV8=
 github.com/rivo/uniseg v0.4.2/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
 github.com/robfig/cron v1.2.0 h1:ZjScXvvxeQ63Dbyxy76Fj3AT3Ut0aKsyd2/tl3DTMuQ=
 github.com/robfig/cron v1.2.0/go.mod h1:JGuDeoQd7Z6yL4zQhZ3OPEVHB7fL6Ka6skscFHfmt2k=
-github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
-github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M=
-github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA=
+github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8=
+github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4=
 github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
 github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
-github.com/ryancurrah/gomodguard v1.3.0 h1:q15RT/pd6UggBXVBuLps8BXRvl5GPBcwVA7BJHMLuTw=
-github.com/ryancurrah/gomodguard v1.3.0/go.mod h1:ggBxb3luypPEzqVtq33ee7YSN35V28XeGnid8dnni50=
-github.com/ryanrolds/sqlclosecheck v0.4.0 h1:i8SX60Rppc1wRuyQjMciLqIzV3xnoHB7/tXbr6RGYNI=
-github.com/ryanrolds/sqlclosecheck v0.4.0/go.mod h1:TBRRjzL31JONc9i4XMinicuo+s+E8yKZ5FN8X3G6CKQ=
+github.com/ryancurrah/gomodguard v1.3.2 h1:CuG27ulzEB1Gu5Dk5gP8PFxSOZ3ptSdP5iI/3IXxM18=
+github.com/ryancurrah/gomodguard v1.3.2/go.mod h1:LqdemiFomEjcxOqirbQCb3JFvSxH2JUYMerTFd3sF2o=
+github.com/ryanrolds/sqlclosecheck v0.5.1 h1:dibWW826u0P8jNLsLN+En7+RqWWTYrjCB9fJfSfdyCU=
+github.com/ryanrolds/sqlclosecheck v0.5.1/go.mod h1:2g3dUjoS6AL4huFdv6wn55WpLIDjY7ZgUR4J8HOO/XQ=
 github.com/sagikazarmark/locafero v0.3.0 h1:zT7VEGWC2DTflmccN/5T1etyKvxSxpHsjb9cJvm4SvQ=
 github.com/sagikazarmark/locafero v0.3.0/go.mod h1:w+v7UsPNFwzF1cHuOajOOzoq4U7v/ig1mpRjqV+Bu1U=
 github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE=
 github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ=
 github.com/sanposhiho/wastedassign/v2 v2.0.7 h1:J+6nrY4VW+gC9xFzUc+XjPD3g3wF3je/NsJFwFK7Uxc=
 github.com/sanposhiho/wastedassign/v2 v2.0.7/go.mod h1:KyZ0MWTwxxBmfwn33zh3k1dmsbF2ud9pAAGfoLfjhtI=
+github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 h1:lZUw3E0/J3roVtGQ+SCrUrg3ON6NgVqpn3+iol9aGu4=
+github.com/santhosh-tekuri/jsonschema/v5 v5.3.1/go.mod h1:uToXkOrWAZ6/Oc07xWQrPOhJotwFIyu2bBVN41fcDUY=
 github.com/sashamelentyev/interfacebloat v1.1.0 h1:xdRdJp0irL086OyW1H/RTZTr1h/tMEOsumirXcOJqAw=
 github.com/sashamelentyev/interfacebloat v1.1.0/go.mod h1:+Y9yU5YdTkrNvoX0xHc84dxiN1iBi9+G8zZIhPVoNjQ=
-github.com/sashamelentyev/usestdlibvars v1.24.0 h1:MKNzmXtGh5N0y74Z/CIaJh4GlB364l0K1RUT08WSWAc=
-github.com/sashamelentyev/usestdlibvars v1.24.0/go.mod h1:9cYkq+gYJ+a5W2RPdhfaSCnTVUC1OQP/bSiiBhq3OZE=
-github.com/securego/gosec/v2 v2.17.0 h1:ZpAStTDKY39insEG9OH6kV3IkhQZPTq9a9eGOLOjcdI=
-github.com/securego/gosec/v2 v2.17.0/go.mod h1:lt+mgC91VSmriVoJLentrMkRCYs+HLTBnUFUBuhV2hc=
+github.com/sashamelentyev/usestdlibvars v1.25.0 h1:IK8SI2QyFzy/2OD2PYnhy84dpfNo9qADrRt6LH8vSzU=
+github.com/sashamelentyev/usestdlibvars v1.25.0/go.mod h1:9nl0jgOfHKWNFS43Ojw0i7aRoS4j6EBye3YBhmAIRF8=
+github.com/securego/gosec/v2 v2.20.0 h1:z/d5qp1niWa2avgFyUIglYTYYuGq2LrJwNj1HRVXsqc=
+github.com/securego/gosec/v2 v2.20.0/go.mod h1:hkiArbBZLwK1cehBcg3oFWUlYPWTBffPwwJVWChu83o=
 github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=
 github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
 github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c h1:W65qqJCIOVP4jpqPQ0YvHYKwcMEMVWIzWC5iNQQfBTU=
 github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c/go.mod h1:/PevMnwAxekIXwN8qQyfc5gl2NlkB3CQlkizAbOkeBs=
 github.com/shurcooL/go v0.0.0-20180423040247-9e1955d9fb6e/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk=
 github.com/shurcooL/go-goon v0.0.0-20170922171312-37c2f522c041/go.mod h1:N5mDOmsrJOB+vfqUK+7DmDyjhSLIIBnXo9lvZJj3MWQ=
-github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
-github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
-github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88=
 github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
 github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
 github.com/sivchari/containedctx v1.0.3 h1:x+etemjbsh2fB5ewm5FeLNi5bUjK0V8n0RB+Wwfd0XE=
 github.com/sivchari/containedctx v1.0.3/go.mod h1:c1RDvCbnJLtH4lLcYD/GqwiBSSf4F5Qk0xld2rBqzJ4=
-github.com/sivchari/nosnakecase v1.7.0 h1:7QkpWIRMe8x25gckkFd2A5Pi6Ymo0qgr4JrhGt95do8=
-github.com/sivchari/nosnakecase v1.7.0/go.mod h1:CwDzrzPea40/GB6uynrNLiorAlgFRvRbFSgJx2Gs+QY=
 github.com/sivchari/tenv v1.7.1 h1:PSpuD4bu6fSmtWMxSGWcvqUUgIn7k3yOJhOIzVWn8Ak=
 github.com/sivchari/tenv v1.7.1/go.mod h1:64yStXKSOxDfX47NlhVwND4dHwfZDdbp2Lyl018Icvg=
 github.com/sonatard/noctx v0.0.2 h1:L7Dz4De2zDQhW8S0t+KUjY0MAQJd6SgVwhzNIc4ok00=
@@ -591,8 +465,8 @@ github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9yS
 github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0=
 github.com/sourcegraph/go-diff v0.7.0 h1:9uLlrd5T46OXs5qpp8L/MTltk0zikUGi0sNNyCpA8G0=
 github.com/sourcegraph/go-diff v0.7.0/go.mod h1:iBszgVvyxdc8SFZ7gm69go2KDdt3ag071iBaWPF6cjs=
-github.com/spf13/afero v1.10.0 h1:EaGW2JJh15aKOejeuJ+wpFSHnbd7GE6Wvp3TsNhb6LY=
-github.com/spf13/afero v1.10.0/go.mod h1:UBogFpq8E9Hx+xc5CNTTEpTnuHVmXDwZcZcE1eb/UhQ=
+github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8=
+github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY=
 github.com/spf13/cast v1.5.1 h1:R+kOtfhWQE6TVQzY+4D7wJLBgkdVasCEFxSUBYBYIlA=
 github.com/spf13/cast v1.5.1/go.mod h1:b9PdjNptOpzXr7Rq1q9gJML/2cdGQAo69NKzQ10KN48=
 github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0=
@@ -606,20 +480,16 @@ github.com/ssgreg/nlreturn/v2 v2.2.1/go.mod h1:E/iiPB78hV7Szg2YfRgyIrk1AD6JVMTRk
 github.com/stbenjam/no-sprintf-host-port v0.1.1 h1:tYugd/yrm1O0dV+ThCbaKZh195Dfm07ysF0U6JQXczc=
 github.com/stbenjam/no-sprintf-host-port v0.1.1/go.mod h1:TLhvtIvONRzdmkFiio4O8LHsN9N74I+PhRquPsxpL0I=
 github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
-github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
 github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
 github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
 github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
 github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
-github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
 github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
 github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
 github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
-github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
 github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
 github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
 github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
-github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
 github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
 github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
 github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
@@ -633,34 +503,34 @@ github.com/tenntenn/modver v1.0.1 h1:2klLppGhDgzJrScMpkj9Ujy3rXPUspSjAcev9tSEBgA
 github.com/tenntenn/modver v1.0.1/go.mod h1:bePIyQPb7UeioSRkw3Q0XeMhYZSMx9B8ePqg6SAMGH0=
 github.com/tenntenn/text/transform v0.0.0-20200319021203-7eef512accb3 h1:f+jULpRQGxTSkNYKJ51yaw6ChIqO+Je8UqsTKN/cDag=
 github.com/tenntenn/text/transform v0.0.0-20200319021203-7eef512accb3/go.mod h1:ON8b8w4BN/kE1EOhwT0o+d62W65a6aPw1nouo9LMgyY=
-github.com/tetafro/godot v1.4.14 h1:ScO641OHpf9UpHPk8fCknSuXNMpi4iFlwuWoBs3L+1s=
-github.com/tetafro/godot v1.4.14/go.mod h1:2oVxTBSftRTh4+MVfUaUXR6bn2GDXCaMcOG4Dk3rfio=
+github.com/tetafro/godot v1.4.16 h1:4ChfhveiNLk4NveAZ9Pu2AN8QZ2nkUGFuadM9lrr5D0=
+github.com/tetafro/godot v1.4.16/go.mod h1:2oVxTBSftRTh4+MVfUaUXR6bn2GDXCaMcOG4Dk3rfio=
 github.com/timakin/bodyclose v0.0.0-20230421092635-574207250966 h1:quvGphlmUVU+nhpFa4gg4yJyTRJ13reZMDHrKwYw53M=
 github.com/timakin/bodyclose v0.0.0-20230421092635-574207250966/go.mod h1:27bSVNWSBOHm+qRp1T9qzaIpsWEP6TbUnei/43HK+PQ=
 github.com/timonwong/loggercheck v0.9.4 h1:HKKhqrjcVj8sxL7K77beXh0adEm6DLjV/QOGeMXEVi4=
 github.com/timonwong/loggercheck v0.9.4/go.mod h1:caz4zlPcgvpEkXgVnAJGowHAMW2NwHaNlpS8xDbVhTg=
-github.com/tomarrell/wrapcheck/v2 v2.8.1 h1:HxSqDSN0sAt0yJYsrcYVoEeyM4aI9yAm3KQpIXDJRhQ=
-github.com/tomarrell/wrapcheck/v2 v2.8.1/go.mod h1:/n2Q3NZ4XFT50ho6Hbxg+RV1uyo2Uow/Vdm9NQcl5SE=
+github.com/tomarrell/wrapcheck/v2 v2.8.3 h1:5ov+Cbhlgi7s/a42BprYoxsr73CbdMUTzE3bRDFASUs=
+github.com/tomarrell/wrapcheck/v2 v2.8.3/go.mod h1:g9vNIyhb5/9TQgumxQyOEqDHsmGYcGsVMOx/xGkqdMo=
 github.com/tommy-muehle/go-mnd/v2 v2.5.1 h1:NowYhSdyE/1zwK9QCLeRb6USWdoif80Ie+v+yU8u1Zw=
 github.com/tommy-muehle/go-mnd/v2 v2.5.1/go.mod h1:WsUAkMJMYww6l/ufffCD3m+P7LEvr8TnZn9lwVDlgzw=
 github.com/ultraware/funlen v0.1.0 h1:BuqclbkY6pO+cvxoq7OsktIXZpgBSkYTQtmwhAK81vI=
 github.com/ultraware/funlen v0.1.0/go.mod h1:XJqmOQja6DpxarLj6Jj1U7JuoS8PvL4nEqDaQhy22p4=
-github.com/ultraware/whitespace v0.0.5 h1:hh+/cpIcopyMYbZNVov9iSxvJU3OYQg78Sfaqzi/CzI=
-github.com/ultraware/whitespace v0.0.5/go.mod h1:aVMh/gQve5Maj9hQ/hg+F75lr/X5A89uZnzAmWSineA=
-github.com/uudashr/gocognit v1.0.7 h1:e9aFXgKgUJrQ5+bs61zBigmj7bFJ/5cC6HmMahVzuDo=
-github.com/uudashr/gocognit v1.0.7/go.mod h1:nAIUuVBnYU7pcninia3BHOvQkpQCeO76Uscky5BOwcY=
+github.com/ultraware/whitespace v0.1.1 h1:bTPOGejYFulW3PkcrqkeQwOd6NKOOXvmGD9bo/Gk8VQ=
+github.com/ultraware/whitespace v0.1.1/go.mod h1:XcP1RLD81eV4BW8UhQlpaR+SDc2givTvyI8a586WjW8=
+github.com/uudashr/gocognit v1.1.2 h1:l6BAEKJqQH2UpKAPKdMfZf5kE4W/2xk8pfU1OVLvniI=
+github.com/uudashr/gocognit v1.1.2/go.mod h1:aAVdLURqcanke8h3vg35BC++eseDm66Z7KmchI5et4k=
 github.com/vmware/govmomi v0.37.2 h1:5ANLoaTxWv600ZnoosJ2zXbM3A+EaxqGheEZbRN8YVE=
 github.com/vmware/govmomi v0.37.2/go.mod h1:mtGWtM+YhTADHlCgJBiskSRPOZRsN9MSjPzaZLte/oQ=
-github.com/xen0n/gosmopolitan v1.2.1 h1:3pttnTuFumELBRSh+KQs1zcz4fN6Zy7aB0xlnQSn1Iw=
-github.com/xen0n/gosmopolitan v1.2.1/go.mod h1:JsHq/Brs1o050OOdmzHeOr0N7OtlnKRAGAsElF8xBQA=
+github.com/xen0n/gosmopolitan v1.2.2 h1:/p2KTnMzwRexIW8GlKawsTWOxn7UHA+jCMF/V8HHtvU=
+github.com/xen0n/gosmopolitan v1.2.2/go.mod h1:7XX7Mj61uLYrj0qmeN0zi7XDon9JRAEhYQqAPLVNTeg=
 github.com/xlab/treeprint v1.2.0 h1:HzHnuAF1plUN2zGlAFHbSQP2qJ0ZAD3XF5XD7OesXRQ=
 github.com/xlab/treeprint v1.2.0/go.mod h1:gj5Gd3gPdKtR1ikdDK6fnFLdmIS0X30kTTuNd/WEJu0=
 github.com/yagipy/maintidx v1.0.0 h1:h5NvIsCz+nRDapQ0exNv4aJ0yXSI0420omVANTv3GJM=
 github.com/yagipy/maintidx v1.0.0/go.mod h1:0qNf/I/CCZXSMhsRsrEPDZ+DkekpKLXAJfsTACwgXLk=
-github.com/yeya24/promlinter v0.2.0 h1:xFKDQ82orCU5jQujdaD8stOHiv8UN68BSdn2a8u8Y3o=
-github.com/yeya24/promlinter v0.2.0/go.mod h1:u54lkmBOZrpEbQQ6gox2zWKKLKu2SGe+2KOiextY+IA=
-github.com/ykadowak/zerologlint v0.1.3 h1:TLy1dTW3Nuc+YE3bYRPToG1Q9Ej78b5UUN6bjbGdxPE=
-github.com/ykadowak/zerologlint v0.1.3/go.mod h1:KaUskqF3e/v59oPmdq1U1DnKcuHokl2/K1U4pmIELKg=
+github.com/yeya24/promlinter v0.3.0 h1:JVDbMp08lVCP7Y6NP3qHroGAO6z2yGKQtS5JsjqtoFs=
+github.com/yeya24/promlinter v0.3.0/go.mod h1:cDfJQQYv9uYciW60QT0eeHlFodotkYZlL+YcPQN+mW4=
+github.com/ykadowak/zerologlint v0.1.5 h1:Gy/fMz1dFQN9JZTPjv1hxEk+sRWm05row04Yoolgdiw=
+github.com/ykadowak/zerologlint v0.1.5/go.mod h1:KaUskqF3e/v59oPmdq1U1DnKcuHokl2/K1U4pmIELKg=
 github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
 github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
 github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
@@ -668,74 +538,41 @@ github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9dec
 github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
 github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
 github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
-gitlab.com/bosi/decorder v0.4.0 h1:HWuxAhSxIvsITcXeP+iIRg9d1cVfvVkmlF7M68GaoDY=
-gitlab.com/bosi/decorder v0.4.0/go.mod h1:xarnteyUoJiOTEldDysquWKTVDCKo2TOIOIibSuWqOg=
-go-simpler.org/assert v0.6.0 h1:QxSrXa4oRuo/1eHMXSBFHKvJIpWABayzKldqZyugG7E=
-go-simpler.org/assert v0.6.0/go.mod h1:74Eqh5eI6vCK6Y5l3PI8ZYFXG4Sa+tkr70OIPJAUr28=
-go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
-go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
-go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
-go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
-go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
-go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk=
+gitlab.com/bosi/decorder v0.4.2 h1:qbQaV3zgwnBZ4zPMhGLW4KZe7A7NwxEhJx39R3shffo=
+gitlab.com/bosi/decorder v0.4.2/go.mod h1:muuhHoaJkA9QLcYHq4Mj8FJUwDZ+EirSHRiaTcTf6T8=
+go-simpler.org/assert v0.9.0 h1:PfpmcSvL7yAnWyChSjOz6Sp6m9j5lyK8Ok9pEL31YkQ=
+go-simpler.org/assert v0.9.0/go.mod h1:74Eqh5eI6vCK6Y5l3PI8ZYFXG4Sa+tkr70OIPJAUr28=
+go-simpler.org/musttag v0.12.2 h1:J7lRc2ysXOq7eM8rwaTYnNrHd5JwjppzB6mScysB2Cs=
+go-simpler.org/musttag v0.12.2/go.mod h1:uN1DVIasMTQKk6XSik7yrJoEysGtR2GRqvWnI9S7TYM=
+go-simpler.org/sloglint v0.6.0 h1:0YcqSVG7LI9EVBfRPhgPec79BH6X6mwjFuUR5Mr7j1M=
+go-simpler.org/sloglint v0.6.0/go.mod h1:+kJJtebtPePWyG5boFwY46COydAggADDOHM22zOvzBk=
 go.starlark.net v0.0.0-20230525235612-a134d8f9ddca h1:VdD38733bfYv5tUZwEIskMM93VanwNIi5bIKnDrJdEY=
 go.starlark.net v0.0.0-20230525235612-a134d8f9ddca/go.mod h1:jxU+3+j+71eXOW14274+SmmuW82qJzl6iZSeqEtTGds=
-go.tmz.dev/musttag v0.7.2 h1:1J6S9ipDbalBSODNT5jCep8dhZyMr4ttnjQagmGYR5s=
-go.tmz.dev/musttag v0.7.2/go.mod h1:m6q5NiiSKMnQYokefa2xGoyoXnrswCbJ0AWYzf4Zs28=
+go.uber.org/automaxprocs v1.5.3 h1:kWazyxZUrS3Gs4qUpbwo5kEIMGe/DAvi5Z4tl2NW4j8=
+go.uber.org/automaxprocs v1.5.3/go.mod h1:eRbA25aqJrxAbsLO0xy5jVwPt7FQnRgjW+efnwa1WM0=
 go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
 go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
 go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
 go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
 go.uber.org/zap v1.26.0 h1:sI7k6L95XOKS281NhVKOFCUNIvv9e0w4BF8N3u+tCRo=
 go.uber.org/zap v1.26.0/go.mod h1:dtElttAiwGvoJ/vj4IwHBS/gXsEu/pZ50mUIRWuG0so=
-golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
 golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
-golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
-golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
 golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
 golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
-golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4=
 golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
-golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
 golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw=
 golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
-golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
-golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
-golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek=
-golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY=
-golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
-golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
-golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
-golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
-golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
-golang.org/x/exp v0.0.0-20230905200255-921286631fa9 h1:GoHiUyI/Tp2nVkLI2mCxVkOjsbSXD66ic0XW0js0R9g=
-golang.org/x/exp v0.0.0-20230905200255-921286631fa9/go.mod h1:S2oDrQGGwySpoQPVqRShND87VCbxmc6bL1Yd2oYrm6k=
+golang.org/x/exp v0.0.0-20240103183307-be819d1f06fc h1:ao2WRsKSzW6KuUY9IWPwWahcHCgR0s52IfwutMfEbdM=
+golang.org/x/exp v0.0.0-20240103183307-be819d1f06fc/go.mod h1:iRJReGqOEeBhDZGkGbynYwcHlctCvnjTYIamk7uXpHI=
 golang.org/x/exp/typeparams v0.0.0-20220428152302-39d4317da171/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk=
 golang.org/x/exp/typeparams v0.0.0-20230203172020-98cc5a0785f9/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk=
-golang.org/x/exp/typeparams v0.0.0-20230307190834-24139beb5833 h1:jWGQJV4niP+CCmFW9ekjA9Zx8vYORzOUH2/Nl5WPuLQ=
-golang.org/x/exp/typeparams v0.0.0-20230307190834-24139beb5833/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk=
-golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
-golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
+golang.org/x/exp/typeparams v0.0.0-20240314144324-c7f7c6466f7f h1:phY1HzDcf18Aq9A8KkmRtY9WvOFIxN8wgfvy6Zm1DV8=
+golang.org/x/exp/typeparams v0.0.0-20240314144324-c7f7c6466f7f/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk=
 golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
 golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
-golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
 golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
-golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
-golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
-golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
-golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs=
-golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
-golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
-golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
-golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE=
-golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o=
-golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
-golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY=
-golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
-golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
 golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
 golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
-golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
 golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
 golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
 golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
@@ -744,45 +581,20 @@ golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91
 golang.org/x/mod v0.6.0/go.mod h1:4mET923SAdbXp2ki8ey+zGs1SLqsuM2Y0uvdZR/fUNI=
 golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
 golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
-golang.org/x/mod v0.16.0 h1:QX4fJ0Rr5cPQCF7O9lh9Se4pmwfwskqZfq5moyldzic=
-golang.org/x/mod v0.16.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
+golang.org/x/mod v0.17.0 h1:zY54UmvipHiNd+pm+m0x9KhZ9hl1/7QNMyxXbc6ICqA=
+golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
 golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
 golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
 golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
 golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
 golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
-golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
-golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
-golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
-golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
 golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
-golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
-golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
-golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
-golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
-golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
-golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
 golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
-golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
-golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
-golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
-golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
-golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
-golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
 golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
-golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
-golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
 golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
-golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
-golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
-golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
 golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
 golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
-golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
 golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
-golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
 golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
 golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco=
 golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY=
@@ -791,81 +603,34 @@ golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
 golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac=
 golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
 golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
-golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
-golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
-golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
-golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
-golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
-golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
-golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
-golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
-golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
 golang.org/x/oauth2 v0.18.0 h1:09qnuIAgzdx1XplqJvW6CQqMCtGZykZWcXzPMPUusvI=
 golang.org/x/oauth2 v0.18.0/go.mod h1:Wf7knwG0MPoWIMMBgFlEaSUDaKskp0dCfrlJRJXbBi8=
 golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ=
-golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
+golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
+golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
 golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
 golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
 golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
 golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
 golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
 golang.org/x/sys v0.0.0-20211105183446-c75c47738b0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
 golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
 golang.org/x/sys v0.0.0-20220702020025-31831981b65f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
 golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
@@ -887,12 +652,9 @@ golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ=
 golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
 golang.org/x/term v0.20.0 h1:VnkxpohqXaOBYJtBmEppKUG6mXpi+4O6purfc2+sMhw=
 golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
-golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
 golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
-golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
 golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
 golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
-golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
 golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
 golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
 golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
@@ -901,68 +663,24 @@ golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
 golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
 golang.org/x/text v0.15.0 h1:h1V/4gjBv8v9cjcR6+AR5+/cIYK5N/WAgiv4xlsEtAk=
 golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
-golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
-golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
-golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
 golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk=
 golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
 golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
 golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
 golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
 golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
-golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
-golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
 golang.org/x/tools v0.0.0-20190321232350-e250d351ecad/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
-golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
-golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
 golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
-golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
-golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
-golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
-golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
 golang.org/x/tools v0.0.0-20190910044552-dd2b5c81c578/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
-golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
-golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
 golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
-golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
-golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
 golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
-golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
-golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
-golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
-golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
-golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
-golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
-golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
-golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
-golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
-golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
-golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
-golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
-golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw=
-golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw=
 golang.org/x/tools v0.0.0-20200324003944-a576cf524670/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8=
 golang.org/x/tools v0.0.0-20200329025819-fd4102a86c65/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8=
-golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8=
-golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
-golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
-golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
-golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
 golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
 golang.org/x/tools v0.0.0-20200724022722-7017fd6b1305/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
-golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
-golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
 golang.org/x/tools v0.0.0-20200820010801-b793a1359eac/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
-golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
-golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE=
-golang.org/x/tools v0.0.0-20201001104356-43ebab892c4c/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU=
 golang.org/x/tools v0.0.0-20201023174141-c8cfbd0f21e6/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
-golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
-golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
-golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
-golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
 golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
-golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
 golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
 golang.org/x/tools v0.1.1-0.20210205202024-ef80cdb6ec6d/go.mod h1:9bzcO0MWcOuT0tm1iBGzDVPshzfwoVvREIui8C+MHqU=
 golang.org/x/tools v0.1.1-0.20210302220138-2ac05c832e1a/go.mod h1:9bzcO0MWcOuT0tm1iBGzDVPshzfwoVvREIui8C+MHqU=
@@ -976,115 +694,39 @@ golang.org/x/tools v0.2.0/go.mod h1:y4OqIKeOV/fWJetJ8bXPU1sEVniLMIyDAZWeHdV+NTA=
 golang.org/x/tools v0.3.0/go.mod h1:/rWhSS2+zyEVwoJf8YAX6L2f0ntZ7Kn/mGgAWcipA5k=
 golang.org/x/tools v0.5.0/go.mod h1:N+Kgy78s5I24c24dU8OfWNEotWjutIs8SnJvn5IDq+k=
 golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
-golang.org/x/tools v0.19.0 h1:tfGCXNR1OsFG+sVdLAitlpjAvD/I6dHDKnYrpEZUHkw=
-golang.org/x/tools v0.19.0/go.mod h1:qoJWxmGSIBmAeriMx19ogtrEPrGtDbPK634QFIcLAhc=
+golang.org/x/tools v0.21.0 h1:qc0xYgIbsSDt9EyWz05J5wfa7LOVW0YTLOXrqdLAWIw=
+golang.org/x/tools v0.21.0/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
 golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
 golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
 golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
 golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
 gomodules.xyz/jsonpatch/v2 v2.4.0 h1:Ci3iUJyx9UeRx7CeFN8ARgGbkESwJK+KB9lLcWxY/Zw=
 gomodules.xyz/jsonpatch/v2 v2.4.0/go.mod h1:AH3dM2RI6uoBZxn3LVrfvJ3E0/9dG4cSrbuBJT4moAY=
-google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
-google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
-google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
-google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
-google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
-google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
-google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
-google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
-google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
-google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
-google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
-google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
-google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE=
-google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE=
-google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM=
-google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc=
-google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg=
-google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE=
-google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8=
 google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
 google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
-google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
-google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=
-google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
-google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
-google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
 google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM=
 google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds=
 google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
-google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
-google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
-google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
-google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
-google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
 google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
-google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8=
-google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
-google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
-google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
-google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
-google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
-google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
-google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA=
-google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
-google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
-google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
-google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
-google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
-google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
-google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
-google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
-google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U=
 google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
-google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA=
-google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
-google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
-google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
-google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
-google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
-google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
-google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
-google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
-google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
-google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
 google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
-google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
-google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
 google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
-google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
-google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
 google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
-google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
-google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60=
-google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk=
-google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
-google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
-google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
-google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc=
-google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8=
-google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
 google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
 google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
 google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
 google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=
 google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
 google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
-google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
 google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
-google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4=
 google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
 google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
 google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
 google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI=
 google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
-gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
 gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
-gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
-gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
 gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
 gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
-gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
 gopkg.in/gcfg.v1 v1.2.3 h1:m8OOJ4ccYHnx2f4gQwpno8nAX5OGOh7RLaaz0pj3Ogs=
 gopkg.in/gcfg.v1 v1.2.3/go.mod h1:yesOnuUOFQAhST5vPY4nbZsb/huCgGGXlipJsBn0b3o=
 gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc=
@@ -1093,26 +735,17 @@ gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
 gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
 gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME=
 gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI=
-gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
 gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
-gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
-gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
 gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
-gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
 gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
 gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
 gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
 gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
 gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
 honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
-honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
-honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
 honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
-honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
-honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
-honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
-honnef.co/go/tools v0.4.5 h1:YGD4H+SuIOOqsyoLOpZDWcieM28W47/zRO7f+9V3nvo=
-honnef.co/go/tools v0.4.5/go.mod h1:GUV+uIBCLpdf0/v6UhHHG/yzI/z6qPskBeQCjcNB96k=
+honnef.co/go/tools v0.4.7 h1:9MDAWxMoSnB6QoSqiVr7P5mtkT9pOc1kSxchzPCnqJs=
+honnef.co/go/tools v0.4.7/go.mod h1:+rnGS1THNh8zMwnd2oVOTL9QF6vmfyG6ZXBULae2uc0=
 k8s.io/api v0.30.1 h1:kCm/6mADMdbAxmIh0LBjS54nQBE+U4KmbCfIkF5CpJY=
 k8s.io/api v0.30.1/go.mod h1:ddbN2C0+0DIiPntan/bye3SW3PdwLa11/0yqwvuRrJM=
 k8s.io/apiextensions-apiserver v0.30.1 h1:4fAJZ9985BmpJG6PkoxVRpXv9vmPUOVzl614xarePws=
@@ -1139,17 +772,10 @@ k8s.io/kubectl v0.30.0 h1:xbPvzagbJ6RNYVMVuiHArC1grrV5vSmmIcSZuCdzRyk=
 k8s.io/kubectl v0.30.0/go.mod h1:zgolRw2MQXLPwmic2l/+iHs239L49fhSeICuMhQQXTI=
 k8s.io/utils v0.0.0-20240310230437-4693a0247e57 h1:gbqbevonBh57eILzModw6mrkbwM0gQBEuevE/AaBsHY=
 k8s.io/utils v0.0.0-20240310230437-4693a0247e57/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0=
-mvdan.cc/gofumpt v0.5.0 h1:0EQ+Z56k8tXjj/6TQD25BFNKQXpCvT0rnansIc7Ug5E=
-mvdan.cc/gofumpt v0.5.0/go.mod h1:HBeVDtMKRZpXyxFciAirzdKklDlGu8aAy1wEbH5Y9js=
-mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed h1:WX1yoOaKQfddO/mLzdV4wptyWgoH/6hwLs7QHTixo0I=
-mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed/go.mod h1:Xkxe497xwlCKkIaQYRfC7CSLworTXY9RMqwhhCm+8Nc=
-mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b h1:DxJ5nJdkhDlLok9K6qO+5290kphDJbHOQO1DFFFTeBo=
-mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b/go.mod h1:2odslEg/xrtNQqCYg2/jCoyKnw3vv5biOc3JnIcYfL4=
-mvdan.cc/unparam v0.0.0-20221223090309-7455f1af531d h1:3rvTIIM22r9pvXk+q3swxUQAQOxksVMGK7sml4nG57w=
-mvdan.cc/unparam v0.0.0-20221223090309-7455f1af531d/go.mod h1:IeHQjmn6TOD+e4Z3RFiZMMsLVL+A96Nvptar8Fj71is=
-rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
-rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
-rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=
+mvdan.cc/gofumpt v0.6.0 h1:G3QvahNDmpD+Aek/bNOLrFR2XC6ZAdo62dZu65gmwGo=
+mvdan.cc/gofumpt v0.6.0/go.mod h1:4L0wf+kgIPZtcCWXynNS2e6bhmj73umwnuXSZarixzA=
+mvdan.cc/unparam v0.0.0-20240427195214-063aff900ca1 h1:Nykk7fggxChwLK4rUPYESzeIwqsuxXXlFEAh5YhaMRo=
+mvdan.cc/unparam v0.0.0-20240427195214-063aff900ca1/go.mod h1:ZzZjEpJDOmx8TdVU6umamY3Xy0UAQUI2DHbf05USVbI=
 sigs.k8s.io/cluster-api v1.6.1 h1:I34p/fwgRlEhs+o9cUhKXDwNNfPS3no0yJsd2bJyQVc=
 sigs.k8s.io/cluster-api v1.6.1/go.mod h1:DaxwruDvSaEYq5q6FREDaGzX6UsAVUCA99Sp8vfMHyQ=
 sigs.k8s.io/controller-runtime v0.18.3 h1:B5Wmmo8WMWK7izei+2LlXLVDGzMwAHBNLX68lwtlSR4=
diff --git a/vendor/github.com/4meepo/tagalign/README.md b/vendor/github.com/4meepo/tagalign/README.md
index 262a2e429f59b1e366a09111da1f130355f31776..9d04dccbf2c81b7a274a788607ca2f4177475662 100644
--- a/vendor/github.com/4meepo/tagalign/README.md
+++ b/vendor/github.com/4meepo/tagalign/README.md
@@ -48,7 +48,7 @@ By default tagalign will only align tags, but not sort them. But alignment and [
 * As a Golangci Linter (Recommended)
 
     Tagalign is a built-in linter in [Golangci Lint](https://golangci-lint.run/usage/linters/#tagalign) since `v1.53`.
-    > Note: In order to have the best experience,  add the `--fix` flag to `golangci-lint` to enabled the aufofix feature.
+    > Note: In order to have the best experience,  add the `--fix` flag to `golangci-lint` to enable the autofix feature.
 
 * Standalone Mode
 
@@ -117,7 +117,7 @@ type StrictStyleExample struct {
 }
 ```
 
-> Note: The strict style can't run without the align or sort feature enabled.
+> ⚠️Note: The strict style can't run without the align or sort feature enabled.
 
 ## References
 
diff --git a/vendor/github.com/4meepo/tagalign/tagalign.go b/vendor/github.com/4meepo/tagalign/tagalign.go
index c998510360bc9187a5abbcea7d56abc2006f325f..4734b56661ccad1c17ef13d78e19da3a04e6da40 100644
--- a/vendor/github.com/4meepo/tagalign/tagalign.go
+++ b/vendor/github.com/4meepo/tagalign/tagalign.go
@@ -29,6 +29,10 @@ const (
 	StrictStyle
 )
 
+const (
+	errTagValueSyntax = "bad syntax for struct tag value"
+)
+
 func NewAnalyzer(options ...Option) *analysis.Analyzer {
 	return &analysis.Analyzer{
 		Name: "tagalign",
@@ -208,16 +212,25 @@ func (w *Helper) Process(pass *analysis.Pass) { //nolint:gocognit
 			uniqueKeys = append(uniqueKeys, k)
 		}
 
-		for i, field := range fields {
-			offsets[i] = pass.Fset.Position(field.Tag.Pos()).Column
+		for i := 0; i < len(fields); {
+			field := fields[i]
+			column := pass.Fset.Position(field.Tag.Pos()).Column - 1
+			offsets[i] = column
+
 			tag, err := strconv.Unquote(field.Tag.Value)
 			if err != nil {
-				break
+				// if tag value is not a valid string, report it directly
+				w.report(pass, field, column, errTagValueSyntax, field.Tag.Value)
+				fields = removeField(fields, i)
+				continue
 			}
 
 			tags, err := structtag.Parse(tag)
 			if err != nil {
-				break
+				// if tag value is not a valid struct tag, report it directly
+				w.report(pass, field, column, err.Error(), field.Tag.Value)
+				fields = removeField(fields, i)
+				continue
 			}
 
 			maxTagNum = max(maxTagNum, tags.Len())
@@ -234,6 +247,8 @@ func (w *Helper) Process(pass *analysis.Pass) { //nolint:gocognit
 				addKey(t.Key)
 			}
 			tagsGroup = append(tagsGroup, tags.Tags())
+
+			i++
 		}
 
 		if w.sort && StrictStyle == w.style {
@@ -325,19 +340,22 @@ func (w *Helper) Process(pass *analysis.Pass) { //nolint:gocognit
 
 			msg := "tag is not aligned, should be: " + unquoteTag
 
-			w.report(pass, field, offsets[i]-1, msg, newTagValue)
+			w.report(pass, field, offsets[i], msg, newTagValue)
 		}
 	}
 
 	// process single fields
 	for _, field := range w.singleFields {
+		column := pass.Fset.Position(field.Tag.Pos()).Column - 1
 		tag, err := strconv.Unquote(field.Tag.Value)
 		if err != nil {
+			w.report(pass, field, column, errTagValueSyntax, field.Tag.Value)
 			continue
 		}
 
 		tags, err := structtag.Parse(tag)
 		if err != nil {
+			w.report(pass, field, column, err.Error(), field.Tag.Value)
 			continue
 		}
 		originalTags := append([]*structtag.Tag(nil), tags.Tags()...)
@@ -353,7 +371,7 @@ func (w *Helper) Process(pass *analysis.Pass) { //nolint:gocognit
 
 		msg := "tag is not aligned , should be: " + tags.String()
 
-		w.report(pass, field, pass.Fset.Position(field.Tag.Pos()).Column-1, msg, newTagValue)
+		w.report(pass, field, column, msg, newTagValue)
 	}
 }
 
@@ -431,3 +449,11 @@ func max(a, b int) int {
 	}
 	return b
 }
+
+func removeField(fields []*ast.Field, index int) []*ast.Field {
+	if index < 0 || index >= len(fields) {
+		return fields
+	}
+
+	return append(fields[:index], fields[index+1:]...)
+}
diff --git a/vendor/github.com/Abirdcfly/dupword/README.md b/vendor/github.com/Abirdcfly/dupword/README.md
index 6917acae255bbc612d296442aa0adf23613b1d65..e6c5b919faaffb1c6446d5ae6f20755ccc267a2b 100644
--- a/vendor/github.com/Abirdcfly/dupword/README.md
+++ b/vendor/github.com/Abirdcfly/dupword/README.md
@@ -109,10 +109,12 @@ Flags:
         apply all suggested fixes
   -flags
         print analyzer flags in JSON
+  -ignore value
+        ignore words
   -json
         emit JSON output
   -keyword value
-        key words for detecting duplicate words
+        keywords for detecting duplicate words
   -memprofile string
         write memory profile to this file
   -source
@@ -128,7 +130,7 @@ Flags:
 
 ### 5. my advice
 
-use `--keyword=the,and,a` and `-fix` together. I personally think that specifying only common repeated prepositions can effectively avoid false positives. 
+use `--keyword=the,and,a` and `-fix` together. I think that specifying only commonly repeated prepositions can effectively avoid false positives. 
 
 see [dupword#4](https://github.com/Abirdcfly/dupword/issues/4) for real code example.
 
diff --git a/vendor/github.com/Abirdcfly/dupword/dupword.go b/vendor/github.com/Abirdcfly/dupword/dupword.go
index 508caca52fbdad2130e9f8a85c8903a7f003caa5..9a78fb6ccaa07079ae9099357bad3240eac3ed47 100644
--- a/vendor/github.com/Abirdcfly/dupword/dupword.go
+++ b/vendor/github.com/Abirdcfly/dupword/dupword.go
@@ -52,6 +52,7 @@ This analyzer checks miswritten duplicate words in comments or package doc or st
 var (
 	defaultWord = []string{}
 	// defaultWord = []string{"the", "and", "a"}
+	ignoreWord = map[string]bool{}
 )
 
 type analyzer struct {
@@ -70,7 +71,31 @@ func (a *analyzer) Set(w string) error {
 	return nil
 }
 
+type ignore struct {
+}
+
+func (a *ignore) String() string {
+	t := make([]string, 0, len(ignoreWord))
+	for k := range ignoreWord {
+		t = append(t, k)
+	}
+	return strings.Join(t, ",")
+}
+
+func (a *ignore) Set(w string) error {
+	for _, k := range strings.Split(w, ",") {
+		ignoreWord[k] = true
+	}
+	return nil
+}
+
+// for test only
+func ClearIgnoreWord() {
+	ignoreWord = map[string]bool{}
+}
+
 func NewAnalyzer() *analysis.Analyzer {
+	ignore := &ignore{}
 	analyzer := &analyzer{KeyWord: defaultWord}
 	a := &analysis.Analyzer{
 		Name:             Name,
@@ -80,7 +105,8 @@ func NewAnalyzer() *analysis.Analyzer {
 		RunDespiteErrors: true,
 	}
 	a.Flags.Init(Name, flag.ExitOnError)
-	a.Flags.Var(analyzer, "keyword", "key words for detecting duplicate words")
+	a.Flags.Var(analyzer, "keyword", "keywords for detecting duplicate words")
+	a.Flags.Var(ignore, "ignore", "ignore words")
 	a.Flags.Var(version{}, "V", "print version and exit")
 	return a
 }
@@ -176,7 +202,7 @@ func (a *analyzer) fixDuplicateWordInString(pass *analysis.Pass, lit *ast.BasicL
 	}
 }
 
-// CheckOneKey use to check there is defined duplicate word in a string.
+// CheckOneKey use to check there is a defined duplicate word in a string.
 // raw is checked line. key is the keyword to check. empty means just check duplicate word.
 func CheckOneKey(raw, key string) (new string, findWord string, find bool) {
 	if key == "" {
@@ -298,5 +324,8 @@ func ExcludeWords(word string) (exclude bool) {
 	if unicode.IsSymbol(firstRune) {
 		return true
 	}
+	if _, exist := ignoreWord[word]; exist {
+		return true
+	}
 	return false
 }
diff --git a/vendor/github.com/Antonboom/nilnil/pkg/analyzer/analyzer.go b/vendor/github.com/Antonboom/nilnil/pkg/analyzer/analyzer.go
index e980db5462038b63bcd48c59f7d437b18d774a08..5646ee90948e0e93001a37acaa83de1972a8adb2 100644
--- a/vendor/github.com/Antonboom/nilnil/pkg/analyzer/analyzer.go
+++ b/vendor/github.com/Antonboom/nilnil/pkg/analyzer/analyzer.go
@@ -2,6 +2,9 @@ package analyzer
 
 import (
 	"go/ast"
+	"go/token"
+	"go/types"
+	"strconv"
 
 	"golang.org/x/tools/go/analysis"
 	"golang.org/x/tools/go/analysis/passes/inspect"
@@ -40,29 +43,15 @@ func newNilNil() *nilNil {
 	}
 }
 
-var (
-	types = []ast.Node{(*ast.TypeSpec)(nil)}
-
-	funcAndReturns = []ast.Node{
-		(*ast.FuncDecl)(nil),
-		(*ast.FuncLit)(nil),
-		(*ast.ReturnStmt)(nil),
-	}
-)
-
-type typeSpecByName map[string]typer
+var funcAndReturns = []ast.Node{
+	(*ast.FuncDecl)(nil),
+	(*ast.FuncLit)(nil),
+	(*ast.ReturnStmt)(nil),
+}
 
 func (n *nilNil) run(pass *analysis.Pass) (interface{}, error) {
 	insp := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
 
-	typeSpecs := typeSpecByName{
-		"any": newTyper(new(ast.InterfaceType)),
-	}
-	insp.Preorder(types, func(node ast.Node) {
-		t := node.(*ast.TypeSpec)
-		typeSpecs[t.Name.Name] = newTyper(t.Type)
-	})
-
 	var fs funcTypeStack
 	insp.Nodes(funcAndReturns, func(node ast.Node, push bool) (proceed bool) {
 		switch v := node.(type) {
@@ -87,13 +76,32 @@ func (n *nilNil) run(pass *analysis.Pass) (interface{}, error) {
 				return false
 			}
 
-			fRes1, fRes2 := ft.Results.List[0], ft.Results.List[1]
-			if !(n.isDangerNilField(fRes1, typeSpecs) && n.isErrorField(fRes2)) {
+			fRes1Type := pass.TypesInfo.TypeOf(ft.Results.List[0].Type)
+			if fRes1Type == nil {
 				return false
 			}
 
-			rRes1, rRes2 := v.Results[0], v.Results[1]
-			if isNil(rRes1) && isNil(rRes2) {
+			fRes2Type := pass.TypesInfo.TypeOf(ft.Results.List[1].Type)
+			if fRes2Type == nil {
+				return false
+			}
+
+			ok, zv := n.isDangerNilType(fRes1Type)
+			if !(ok && isErrorType(fRes2Type)) {
+				return false
+			}
+
+			retVal, retErr := v.Results[0], v.Results[1]
+
+			var needWarn bool
+			switch zv {
+			case zeroValueNil:
+				needWarn = isNil(pass, retVal) && isNil(pass, retErr)
+			case zeroValueZero:
+				needWarn = isZero(retVal) && isNil(pass, retErr)
+			}
+
+			if needWarn {
 				pass.Reportf(v.Pos(), reportMsg)
 			}
 		}
@@ -104,55 +112,73 @@ func (n *nilNil) run(pass *analysis.Pass) (interface{}, error) {
 	return nil, nil //nolint:nilnil
 }
 
-func (n *nilNil) isDangerNilField(f *ast.Field, typeSpecs typeSpecByName) bool {
-	return n.isDangerNilType(f.Type, typeSpecs)
-}
+type zeroValue int
 
-func (n *nilNil) isDangerNilType(t ast.Expr, typeSpecs typeSpecByName) bool {
+const (
+	zeroValueNil = iota + 1
+	zeroValueZero
+)
+
+func (n *nilNil) isDangerNilType(t types.Type) (bool, zeroValue) {
 	switch v := t.(type) {
-	case *ast.StarExpr:
-		return n.checkedTypes.Contains(ptrType)
+	case *types.Pointer:
+		return n.checkedTypes.Contains(ptrType), zeroValueNil
 
-	case *ast.FuncType:
-		return n.checkedTypes.Contains(funcType)
+	case *types.Signature:
+		return n.checkedTypes.Contains(funcType), zeroValueNil
 
-	case *ast.InterfaceType:
-		return n.checkedTypes.Contains(ifaceType)
+	case *types.Interface:
+		return n.checkedTypes.Contains(ifaceType), zeroValueNil
 
-	case *ast.MapType:
-		return n.checkedTypes.Contains(mapType)
+	case *types.Map:
+		return n.checkedTypes.Contains(mapType), zeroValueNil
 
-	case *ast.ChanType:
-		return n.checkedTypes.Contains(chanType)
+	case *types.Chan:
+		return n.checkedTypes.Contains(chanType), zeroValueNil
 
-	case *ast.Ident:
-		if t, ok := typeSpecs[v.Name]; ok {
-			return n.isDangerNilType(t.Type(), typeSpecs)
+	case *types.Basic:
+		if v.Kind() == types.Uintptr {
+			return n.checkedTypes.Contains(uintptrType), zeroValueZero
+		}
+		if v.Kind() == types.UnsafePointer {
+			return n.checkedTypes.Contains(unsafeptrType), zeroValueNil
 		}
+
+	case *types.Named:
+		return n.isDangerNilType(v.Underlying())
 	}
-	return false
+	return false, 0
 }
 
-func (n *nilNil) isErrorField(f *ast.Field) bool {
-	return isIdent(f.Type, "error")
-}
+var errorIface = types.Universe.Lookup("error").Type().Underlying().(*types.Interface)
 
-func isNil(e ast.Expr) bool {
-	return isIdent(e, "nil")
+func isErrorType(t types.Type) bool {
+	_, ok := t.Underlying().(*types.Interface)
+	return ok && types.Implements(t, errorIface)
 }
 
-func isIdent(n ast.Node, name string) bool {
-	i, ok := n.(*ast.Ident)
+func isNil(pass *analysis.Pass, e ast.Expr) bool {
+	i, ok := e.(*ast.Ident)
 	if !ok {
 		return false
 	}
-	return i.Name == name
-}
 
-type typer interface {
-	Type() ast.Expr
+	_, ok = pass.TypesInfo.ObjectOf(i).(*types.Nil)
+	return ok
 }
 
-func newTyper(t ast.Expr) typer     { return typerImpl{t: t} } //
-type typerImpl struct{ t ast.Expr } //
-func (ti typerImpl) Type() ast.Expr { return ti.t }
+func isZero(e ast.Expr) bool {
+	bl, ok := e.(*ast.BasicLit)
+	if !ok {
+		return false
+	}
+	if bl.Kind != token.INT {
+		return false
+	}
+
+	v, err := strconv.ParseInt(bl.Value, 0, 64)
+	if err != nil {
+		return false
+	}
+	return v == 0
+}
diff --git a/vendor/github.com/Antonboom/nilnil/pkg/analyzer/config.go b/vendor/github.com/Antonboom/nilnil/pkg/analyzer/config.go
index 520b813a54f132e648ea09464b0c5e570ead2ce1..c9b8e3eedcb848a04cd88d0529737af6693f3cb3 100644
--- a/vendor/github.com/Antonboom/nilnil/pkg/analyzer/config.go
+++ b/vendor/github.com/Antonboom/nilnil/pkg/analyzer/config.go
@@ -8,11 +8,13 @@ import (
 
 func newDefaultCheckedTypes() checkedTypes {
 	return checkedTypes{
-		ptrType:   struct{}{},
-		funcType:  struct{}{},
-		ifaceType: struct{}{},
-		mapType:   struct{}{},
-		chanType:  struct{}{},
+		ptrType:       {},
+		funcType:      {},
+		ifaceType:     {},
+		mapType:       {},
+		chanType:      {},
+		uintptrType:   {},
+		unsafeptrType: {},
 	}
 }
 
@@ -25,15 +27,15 @@ func (t typeName) S() string {
 }
 
 const (
-	ptrType   typeName = "ptr"
-	funcType  typeName = "func"
-	ifaceType typeName = "iface"
-	mapType   typeName = "map"
-	chanType  typeName = "chan"
+	ptrType       typeName = "ptr"
+	funcType      typeName = "func"
+	ifaceType     typeName = "iface"
+	mapType       typeName = "map"
+	chanType      typeName = "chan"
+	uintptrType   typeName = "uintptr"
+	unsafeptrType typeName = "unsafeptr"
 )
 
-var knownTypes = []typeName{ptrType, funcType, ifaceType, mapType, chanType}
-
 type checkedTypes map[typeName]struct{}
 
 func (c checkedTypes) Contains(t typeName) bool {
@@ -60,7 +62,7 @@ func (c checkedTypes) Set(s string) error {
 	c.disableAll()
 	for _, t := range types {
 		switch tt := typeName(t); tt {
-		case ptrType, funcType, ifaceType, mapType, chanType:
+		case ptrType, funcType, ifaceType, mapType, chanType, uintptrType, unsafeptrType:
 			c[tt] = struct{}{}
 		default:
 			return fmt.Errorf("unknown checked type name %q (see help)", t)
diff --git a/vendor/github.com/mbilski/exhaustivestruct/LICENSE b/vendor/github.com/Antonboom/testifylint/LICENSE
similarity index 96%
rename from vendor/github.com/mbilski/exhaustivestruct/LICENSE
rename to vendor/github.com/Antonboom/testifylint/LICENSE
index 893eb73b9fc44e3e8cec496a41f8e9252a0b2db4..9b1cf3a393fc2333bd4f912a4f311235487f33f0 100644
--- a/vendor/github.com/mbilski/exhaustivestruct/LICENSE
+++ b/vendor/github.com/Antonboom/testifylint/LICENSE
@@ -1,6 +1,6 @@
 MIT License
 
-Copyright (c) 2020 Mateusz Bilski
+Copyright (c) 2022 Anton Telyshev
 
 Permission is hereby granted, free of charge, to any person obtaining a copy
 of this software and associated documentation files (the "Software"), to deal
diff --git a/vendor/github.com/Antonboom/testifylint/analyzer/analyzer.go b/vendor/github.com/Antonboom/testifylint/analyzer/analyzer.go
new file mode 100644
index 0000000000000000000000000000000000000000..84d7e815dc9930279ffa57ef013dcad2050c727e
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/analyzer/analyzer.go
@@ -0,0 +1,93 @@
+package analyzer
+
+import (
+	"fmt"
+	"go/ast"
+
+	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/go/ast/inspector"
+
+	"github.com/Antonboom/testifylint/internal/analysisutil"
+	"github.com/Antonboom/testifylint/internal/checkers"
+	"github.com/Antonboom/testifylint/internal/config"
+	"github.com/Antonboom/testifylint/internal/testify"
+)
+
+const (
+	name = "testifylint"
+	doc  = "Checks usage of " + testify.ModulePath + "."
+	url  = "https://github.com/antonboom/" + name
+)
+
+// New returns new instance of testifylint analyzer.
+func New() *analysis.Analyzer {
+	cfg := config.NewDefault()
+
+	analyzer := &analysis.Analyzer{
+		Name: name,
+		Doc:  doc,
+		URL:  url,
+		Run: func(pass *analysis.Pass) (any, error) {
+			regularCheckers, advancedCheckers, err := newCheckers(cfg)
+			if err != nil {
+				return nil, fmt.Errorf("build checkers: %v", err)
+			}
+
+			tl := &testifyLint{
+				regularCheckers:  regularCheckers,
+				advancedCheckers: advancedCheckers,
+			}
+			return tl.run(pass)
+		},
+	}
+	config.BindToFlags(&cfg, &analyzer.Flags)
+
+	return analyzer
+}
+
+type testifyLint struct {
+	regularCheckers  []checkers.RegularChecker
+	advancedCheckers []checkers.AdvancedChecker
+}
+
+func (tl *testifyLint) run(pass *analysis.Pass) (any, error) {
+	filesToAnalysis := make([]*ast.File, 0, len(pass.Files))
+	for _, f := range pass.Files {
+		if !analysisutil.Imports(f, testify.AssertPkgPath, testify.RequirePkgPath, testify.SuitePkgPath) {
+			continue
+		}
+		filesToAnalysis = append(filesToAnalysis, f)
+	}
+
+	insp := inspector.New(filesToAnalysis)
+
+	// Regular checkers.
+	insp.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, func(node ast.Node) {
+		tl.regularCheck(pass, node.(*ast.CallExpr))
+	})
+
+	// Advanced checkers.
+	for _, ch := range tl.advancedCheckers {
+		for _, d := range ch.Check(pass, insp) {
+			pass.Report(d)
+		}
+	}
+
+	return nil, nil
+}
+
+func (tl *testifyLint) regularCheck(pass *analysis.Pass, ce *ast.CallExpr) {
+	call := checkers.NewCallMeta(pass, ce)
+	if nil == call {
+		return
+	}
+
+	for _, ch := range tl.regularCheckers {
+		if d := ch.Check(pass, call); d != nil {
+			pass.Report(*d)
+			// NOTE(a.telyshev): I'm not interested in multiple diagnostics per assertion.
+			// This simplifies the code and also makes the linter more efficient.
+			return
+		}
+	}
+}
diff --git a/vendor/github.com/Antonboom/testifylint/analyzer/checkers_factory.go b/vendor/github.com/Antonboom/testifylint/analyzer/checkers_factory.go
new file mode 100644
index 0000000000000000000000000000000000000000..77573e3952ad52a55b52f8909366402b2fa65ee7
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/analyzer/checkers_factory.go
@@ -0,0 +1,74 @@
+package analyzer
+
+import (
+	"fmt"
+
+	"github.com/Antonboom/testifylint/internal/checkers"
+	"github.com/Antonboom/testifylint/internal/config"
+)
+
+// newCheckers accepts linter config and returns slices of enabled checkers sorted by priority.
+func newCheckers(cfg config.Config) ([]checkers.RegularChecker, []checkers.AdvancedChecker, error) {
+	if err := cfg.Validate(); err != nil {
+		return nil, nil, err
+	}
+
+	enabledCheckersSet := make(map[string]struct{})
+
+	if cfg.EnableAll {
+		for _, checker := range checkers.All() {
+			enabledCheckersSet[checker] = struct{}{}
+		}
+	} else if !cfg.DisableAll {
+		for _, checker := range checkers.EnabledByDefault() {
+			enabledCheckersSet[checker] = struct{}{}
+		}
+	}
+
+	for _, checker := range cfg.EnabledCheckers {
+		enabledCheckersSet[checker] = struct{}{}
+	}
+
+	for _, checker := range cfg.DisabledCheckers {
+		delete(enabledCheckersSet, checker)
+	}
+
+	enabledCheckers := make([]string, 0, len(enabledCheckersSet))
+	for v := range enabledCheckersSet {
+		enabledCheckers = append(enabledCheckers, v)
+	}
+	checkers.SortByPriority(enabledCheckers)
+
+	regularCheckers := make([]checkers.RegularChecker, 0, len(enabledCheckers))
+	advancedCheckers := make([]checkers.AdvancedChecker, 0, len(enabledCheckers)/2)
+
+	for _, name := range enabledCheckers {
+		ch, ok := checkers.Get(name)
+		if !ok {
+			return nil, nil, fmt.Errorf("unknown checker %q", name)
+		}
+
+		switch c := ch.(type) {
+		case *checkers.BoolCompare:
+			c.SetIgnoreCustomTypes(cfg.BoolCompare.IgnoreCustomTypes)
+
+		case *checkers.ExpectedActual:
+			c.SetExpVarPattern(cfg.ExpectedActual.ExpVarPattern.Regexp)
+
+		case *checkers.RequireError:
+			c.SetFnPattern(cfg.RequireError.FnPattern.Regexp)
+
+		case *checkers.SuiteExtraAssertCall:
+			c.SetMode(cfg.SuiteExtraAssertCall.Mode)
+		}
+
+		switch casted := ch.(type) {
+		case checkers.RegularChecker:
+			regularCheckers = append(regularCheckers, casted)
+		case checkers.AdvancedChecker:
+			advancedCheckers = append(advancedCheckers, casted)
+		}
+	}
+
+	return regularCheckers, advancedCheckers, nil
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/analysisutil/doc.go b/vendor/github.com/Antonboom/testifylint/internal/analysisutil/doc.go
new file mode 100644
index 0000000000000000000000000000000000000000..b57cbd93844552a37be1d97a7d9fb7f278d0f25d
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/analysisutil/doc.go
@@ -0,0 +1,9 @@
+// Package analysisutil contains functions common for `analyzer` and `internal/checkers` packages.
+// In addition, it is intended to "lighten" these packages.
+//
+// If the function is common to several packages, or it makes sense to test it separately without
+// "polluting" the target package with tests of private functionality, then you can put function in this package.
+//
+// It's important to avoid dependency on `golang.org/x/tools/go/analysis` in the helpers API.
+// This makes the API "narrower" and also allows you to test functions without some "abstraction leaks".
+package analysisutil
diff --git a/vendor/github.com/Antonboom/testifylint/internal/analysisutil/file.go b/vendor/github.com/Antonboom/testifylint/internal/analysisutil/file.go
new file mode 100644
index 0000000000000000000000000000000000000000..3fc1f42b8639bd00b9ebc7542344fa7415ef1134
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/analysisutil/file.go
@@ -0,0 +1,28 @@
+package analysisutil
+
+import (
+	"go/ast"
+	"strconv"
+)
+
+// Imports tells if the file imports at least one of the packages.
+// If no packages provided then function returns false.
+func Imports(file *ast.File, pkgs ...string) bool {
+	for _, i := range file.Imports {
+		if i.Path == nil {
+			continue
+		}
+
+		path, err := strconv.Unquote(i.Path.Value)
+		if err != nil {
+			continue
+		}
+		// NOTE(a.telyshev): Don't use `slices.Contains` to keep the minimum module version 1.20.
+		for _, pkg := range pkgs { // Small O(n).
+			if pkg == path {
+				return true
+			}
+		}
+	}
+	return false
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/analysisutil/format.go b/vendor/github.com/Antonboom/testifylint/internal/analysisutil/format.go
new file mode 100644
index 0000000000000000000000000000000000000000..fcb4b847f64663336c649738450f1d248c69a6ef
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/analysisutil/format.go
@@ -0,0 +1,34 @@
+package analysisutil
+
+import (
+	"bytes"
+	"go/ast"
+	"go/format"
+	"go/token"
+)
+
+// NodeString is a more powerful analogue of types.ExprString.
+// Return empty string if node AST is invalid.
+func NodeString(fset *token.FileSet, node ast.Node) string {
+	if v := formatNode(fset, node); v != nil {
+		return v.String()
+	}
+	return ""
+}
+
+// NodeBytes works as NodeString but returns a byte slice.
+// Return nil if node AST is invalid.
+func NodeBytes(fset *token.FileSet, node ast.Node) []byte {
+	if v := formatNode(fset, node); v != nil {
+		return v.Bytes()
+	}
+	return nil
+}
+
+func formatNode(fset *token.FileSet, node ast.Node) *bytes.Buffer {
+	buf := new(bytes.Buffer)
+	if err := format.Node(buf, fset, node); err != nil {
+		return nil
+	}
+	return buf
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/analysisutil/object.go b/vendor/github.com/Antonboom/testifylint/internal/analysisutil/object.go
new file mode 100644
index 0000000000000000000000000000000000000000..4e0346d2bae7124712ae89d41a34dbbf85e483f6
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/analysisutil/object.go
@@ -0,0 +1,34 @@
+package analysisutil
+
+import (
+	"go/ast"
+	"go/types"
+)
+
+// ObjectOf works in context of Golang package and returns types.Object for the given object's package and name.
+// The search is based on the provided package and its dependencies (imports).
+// Returns nil if the object is not found.
+func ObjectOf(pkg *types.Package, objPkg, objName string) types.Object {
+	if pkg.Path() == objPkg {
+		return pkg.Scope().Lookup(objName)
+	}
+
+	for _, i := range pkg.Imports() {
+		if trimVendor(i.Path()) == objPkg {
+			return i.Scope().Lookup(objName)
+		}
+	}
+	return nil
+}
+
+// IsObj returns true if expression is identifier which notes to given types.Object.
+// Useful in combination with types.Universe objects.
+func IsObj(typesInfo *types.Info, expr ast.Expr, expected types.Object) bool {
+	id, ok := expr.(*ast.Ident)
+	if !ok {
+		return false
+	}
+
+	obj := typesInfo.ObjectOf(id)
+	return obj == expected
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/analysisutil/pkg.go b/vendor/github.com/Antonboom/testifylint/internal/analysisutil/pkg.go
new file mode 100644
index 0000000000000000000000000000000000000000..d34be5d34179a057c97528893685dc0bbba980a1
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/analysisutil/pkg.go
@@ -0,0 +1,19 @@
+package analysisutil
+
+import (
+	"go/types"
+	"strings"
+)
+
+// IsPkg checks that package has corresponding objName and path.
+// Supports vendored packages.
+func IsPkg(pkg *types.Package, name, path string) bool {
+	return pkg.Name() == name && trimVendor(pkg.Path()) == path
+}
+
+func trimVendor(path string) string {
+	if strings.HasPrefix(path, "vendor/") {
+		return path[len("vendor/"):]
+	}
+	return path
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/blank_import.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/blank_import.go
new file mode 100644
index 0000000000000000000000000000000000000000..403691e270e6499cd476fa79fbe0e3ce44b0478e
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/blank_import.go
@@ -0,0 +1,69 @@
+package checkers
+
+import (
+	"fmt"
+	"strconv"
+
+	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/go/ast/inspector"
+
+	"github.com/Antonboom/testifylint/internal/testify"
+)
+
+// BlankImport detects useless blank imports of testify's packages.
+// These imports are useless since testify doesn't do any magic with init() function.
+//
+// The checker detects situations like
+//
+//	import (
+//		"testing"
+//
+//		_ "github.com/stretchr/testify"
+//		_ "github.com/stretchr/testify/assert"
+//		_ "github.com/stretchr/testify/http"
+//		_ "github.com/stretchr/testify/mock"
+//		_ "github.com/stretchr/testify/require"
+//		_ "github.com/stretchr/testify/suite"
+//	)
+//
+// and requires
+//
+//	import (
+//		"testing"
+//	)
+type BlankImport struct{}
+
+// NewBlankImport constructs BlankImport checker.
+func NewBlankImport() BlankImport { return BlankImport{} }
+func (BlankImport) Name() string  { return "blank-import" }
+
+func (checker BlankImport) Check(pass *analysis.Pass, _ *inspector.Inspector) (diagnostics []analysis.Diagnostic) {
+	for _, file := range pass.Files {
+		for _, imp := range file.Imports {
+			if imp.Name == nil || imp.Name.Name != "_" {
+				continue
+			}
+
+			pkg, err := strconv.Unquote(imp.Path.Value)
+			if err != nil {
+				continue
+			}
+			if _, ok := packagesNotIntendedForBlankImport[pkg]; !ok {
+				continue
+			}
+
+			msg := fmt.Sprintf("avoid blank import of %s as it does nothing", pkg)
+			diagnostics = append(diagnostics, *newDiagnostic(checker.Name(), imp, msg, nil))
+		}
+	}
+	return diagnostics
+}
+
+var packagesNotIntendedForBlankImport = map[string]struct{}{
+	testify.ModulePath:     {},
+	testify.AssertPkgPath:  {},
+	testify.HTTPPkgPath:    {},
+	testify.MockPkgPath:    {},
+	testify.RequirePkgPath: {},
+	testify.SuitePkgPath:   {},
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/bool_compare.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/bool_compare.go
new file mode 100644
index 0000000000000000000000000000000000000000..43907123bdb4249fa175e49161308370e3f74244
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/bool_compare.go
@@ -0,0 +1,304 @@
+package checkers
+
+import (
+	"go/ast"
+	"go/token"
+	"go/types"
+
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/Antonboom/testifylint/internal/analysisutil"
+)
+
+// BoolCompare detects situations like
+//
+//	assert.Equal(t, false, result)
+//	assert.EqualValues(t, false, result)
+//	assert.Exactly(t, false, result)
+//	assert.NotEqual(t, true, result)
+//	assert.NotEqualValues(t, true, result)
+//	assert.False(t, !result)
+//	assert.True(t, result == true)
+//	...
+//
+// and requires
+//
+//	assert.False(t, result)
+//	assert.True(t, result)
+type BoolCompare struct {
+	ignoreCustomTypes bool
+}
+
+// NewBoolCompare constructs BoolCompare checker.
+func NewBoolCompare() *BoolCompare { return new(BoolCompare) }
+func (BoolCompare) Name() string   { return "bool-compare" }
+
+func (checker *BoolCompare) SetIgnoreCustomTypes(v bool) *BoolCompare {
+	checker.ignoreCustomTypes = v
+	return checker
+}
+
+func (checker BoolCompare) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic {
+	newBoolCast := func(e ast.Expr) ast.Expr {
+		return &ast.CallExpr{Fun: &ast.Ident{Name: "bool"}, Args: []ast.Expr{e}}
+	}
+
+	newUseFnDiagnostic := func(proposed string, survivingArg ast.Expr, replaceStart, replaceEnd token.Pos) *analysis.Diagnostic {
+		if !isBuiltinBool(pass, survivingArg) {
+			if checker.ignoreCustomTypes {
+				return nil
+			}
+			survivingArg = newBoolCast(survivingArg)
+		}
+		return newUseFunctionDiagnostic(checker.Name(), call, proposed,
+			newSuggestedFuncReplacement(call, proposed, analysis.TextEdit{
+				Pos:     replaceStart,
+				End:     replaceEnd,
+				NewText: analysisutil.NodeBytes(pass.Fset, survivingArg),
+			}),
+		)
+	}
+
+	newUseTrueDiagnostic := func(survivingArg ast.Expr, replaceStart, replaceEnd token.Pos) *analysis.Diagnostic {
+		return newUseFnDiagnostic("True", survivingArg, replaceStart, replaceEnd)
+	}
+
+	newUseFalseDiagnostic := func(survivingArg ast.Expr, replaceStart, replaceEnd token.Pos) *analysis.Diagnostic {
+		return newUseFnDiagnostic("False", survivingArg, replaceStart, replaceEnd)
+	}
+
+	newNeedSimplifyDiagnostic := func(survivingArg ast.Expr, replaceStart, replaceEnd token.Pos) *analysis.Diagnostic {
+		if !isBuiltinBool(pass, survivingArg) {
+			if checker.ignoreCustomTypes {
+				return nil
+			}
+			survivingArg = newBoolCast(survivingArg)
+		}
+		return newDiagnostic(checker.Name(), call, "need to simplify the assertion",
+			&analysis.SuggestedFix{
+				Message: "Simplify the assertion",
+				TextEdits: []analysis.TextEdit{{
+					Pos:     replaceStart,
+					End:     replaceEnd,
+					NewText: analysisutil.NodeBytes(pass.Fset, survivingArg),
+				}},
+			},
+		)
+	}
+
+	switch call.Fn.NameFTrimmed {
+	case "Equal", "EqualValues", "Exactly":
+		if len(call.Args) < 2 {
+			return nil
+		}
+
+		arg1, arg2 := call.Args[0], call.Args[1]
+		if anyCondSatisfaction(pass, isEmptyInterface, arg1, arg2) {
+			return nil
+		}
+		if anyCondSatisfaction(pass, isBoolOverride, arg1, arg2) {
+			return nil
+		}
+
+		t1, t2 := isUntypedTrue(pass, arg1), isUntypedTrue(pass, arg2)
+		f1, f2 := isUntypedFalse(pass, arg1), isUntypedFalse(pass, arg2)
+
+		switch {
+		case xor(t1, t2):
+			survivingArg, _ := anyVal([]bool{t1, t2}, arg2, arg1)
+			if call.Fn.NameFTrimmed == "Exactly" && !isBuiltinBool(pass, survivingArg) {
+				// NOTE(a.telyshev): `Exactly` assumes no type casting.
+				return nil
+			}
+			return newUseTrueDiagnostic(survivingArg, arg1.Pos(), arg2.End())
+
+		case xor(f1, f2):
+			survivingArg, _ := anyVal([]bool{f1, f2}, arg2, arg1)
+			if call.Fn.NameFTrimmed == "Exactly" && !isBuiltinBool(pass, survivingArg) {
+				// NOTE(a.telyshev): `Exactly` assumes no type casting.
+				return nil
+			}
+			return newUseFalseDiagnostic(survivingArg, arg1.Pos(), arg2.End())
+		}
+
+	case "NotEqual", "NotEqualValues":
+		if len(call.Args) < 2 {
+			return nil
+		}
+
+		arg1, arg2 := call.Args[0], call.Args[1]
+		if anyCondSatisfaction(pass, isEmptyInterface, arg1, arg2) {
+			return nil
+		}
+		if anyCondSatisfaction(pass, isBoolOverride, arg1, arg2) {
+			return nil
+		}
+
+		t1, t2 := isUntypedTrue(pass, arg1), isUntypedTrue(pass, arg2)
+		f1, f2 := isUntypedFalse(pass, arg1), isUntypedFalse(pass, arg2)
+
+		switch {
+		case xor(t1, t2):
+			survivingArg, _ := anyVal([]bool{t1, t2}, arg2, arg1)
+			return newUseFalseDiagnostic(survivingArg, arg1.Pos(), arg2.End())
+
+		case xor(f1, f2):
+			survivingArg, _ := anyVal([]bool{f1, f2}, arg2, arg1)
+			return newUseTrueDiagnostic(survivingArg, arg1.Pos(), arg2.End())
+		}
+
+	case "True":
+		if len(call.Args) < 1 {
+			return nil
+		}
+		expr := call.Args[0]
+
+		{
+			arg1, ok1 := isComparisonWithTrue(pass, expr, token.EQL)
+			arg2, ok2 := isComparisonWithFalse(pass, expr, token.NEQ)
+
+			survivingArg, ok := anyVal([]bool{ok1, ok2}, arg1, arg2)
+			if ok && !isEmptyInterface(pass, survivingArg) {
+				return newNeedSimplifyDiagnostic(survivingArg, expr.Pos(), expr.End())
+			}
+		}
+
+		{
+			arg1, ok1 := isComparisonWithTrue(pass, expr, token.NEQ)
+			arg2, ok2 := isComparisonWithFalse(pass, expr, token.EQL)
+			arg3, ok3 := isNegation(expr)
+
+			survivingArg, ok := anyVal([]bool{ok1, ok2, ok3}, arg1, arg2, arg3)
+			if ok && !isEmptyInterface(pass, survivingArg) {
+				return newUseFalseDiagnostic(survivingArg, expr.Pos(), expr.End())
+			}
+		}
+
+	case "False":
+		if len(call.Args) < 1 {
+			return nil
+		}
+		expr := call.Args[0]
+
+		{
+			arg1, ok1 := isComparisonWithTrue(pass, expr, token.EQL)
+			arg2, ok2 := isComparisonWithFalse(pass, expr, token.NEQ)
+
+			survivingArg, ok := anyVal([]bool{ok1, ok2}, arg1, arg2)
+			if ok && !isEmptyInterface(pass, survivingArg) {
+				return newNeedSimplifyDiagnostic(survivingArg, expr.Pos(), expr.End())
+			}
+		}
+
+		{
+			arg1, ok1 := isComparisonWithTrue(pass, expr, token.NEQ)
+			arg2, ok2 := isComparisonWithFalse(pass, expr, token.EQL)
+			arg3, ok3 := isNegation(expr)
+
+			survivingArg, ok := anyVal([]bool{ok1, ok2, ok3}, arg1, arg2, arg3)
+			if ok && !isEmptyInterface(pass, survivingArg) {
+				return newUseTrueDiagnostic(survivingArg, expr.Pos(), expr.End())
+			}
+		}
+	}
+	return nil
+}
+
+func isEmptyInterface(pass *analysis.Pass, expr ast.Expr) bool {
+	t, ok := pass.TypesInfo.Types[expr]
+	if !ok {
+		return false
+	}
+
+	iface, ok := t.Type.Underlying().(*types.Interface)
+	return ok && iface.NumMethods() == 0
+}
+
+func isBuiltinBool(pass *analysis.Pass, e ast.Expr) bool {
+	basicType, ok := pass.TypesInfo.TypeOf(e).(*types.Basic)
+	return ok && basicType.Kind() == types.Bool
+}
+
+func isBoolOverride(pass *analysis.Pass, e ast.Expr) bool {
+	namedType, ok := pass.TypesInfo.TypeOf(e).(*types.Named)
+	return ok && namedType.Obj().Name() == "bool"
+}
+
+var (
+	falseObj = types.Universe.Lookup("false")
+	trueObj  = types.Universe.Lookup("true")
+)
+
+func isUntypedTrue(pass *analysis.Pass, e ast.Expr) bool {
+	return analysisutil.IsObj(pass.TypesInfo, e, trueObj)
+}
+
+func isUntypedFalse(pass *analysis.Pass, e ast.Expr) bool {
+	return analysisutil.IsObj(pass.TypesInfo, e, falseObj)
+}
+
+func isComparisonWithTrue(pass *analysis.Pass, e ast.Expr, op token.Token) (ast.Expr, bool) {
+	return isComparisonWith(pass, e, isUntypedTrue, op)
+}
+
+func isComparisonWithFalse(pass *analysis.Pass, e ast.Expr, op token.Token) (ast.Expr, bool) {
+	return isComparisonWith(pass, e, isUntypedFalse, op)
+}
+
+type predicate func(pass *analysis.Pass, e ast.Expr) bool
+
+func isComparisonWith(pass *analysis.Pass, e ast.Expr, predicate predicate, op token.Token) (ast.Expr, bool) {
+	be, ok := e.(*ast.BinaryExpr)
+	if !ok {
+		return nil, false
+	}
+	if be.Op != op {
+		return nil, false
+	}
+
+	t1, t2 := predicate(pass, be.X), predicate(pass, be.Y)
+	if xor(t1, t2) {
+		if t1 {
+			return be.Y, true
+		}
+		return be.X, true
+	}
+	return nil, false
+}
+
+func isNegation(e ast.Expr) (ast.Expr, bool) {
+	ue, ok := e.(*ast.UnaryExpr)
+	if !ok {
+		return nil, false
+	}
+	return ue.X, ue.Op == token.NOT
+}
+
+func xor(a, b bool) bool {
+	return a != b
+}
+
+// anyVal returns the first value[i] for which bools[i] is true.
+func anyVal[T any](bools []bool, vals ...T) (T, bool) {
+	if len(bools) != len(vals) {
+		panic("inconsistent usage of valOr") //nolint:forbidigo // Does not depend on the code being analyzed.
+	}
+
+	for i, b := range bools {
+		if b {
+			return vals[i], true
+		}
+	}
+
+	var _default T
+	return _default, false
+}
+
+func anyCondSatisfaction(pass *analysis.Pass, p predicate, vals ...ast.Expr) bool {
+	for _, v := range vals {
+		if p(pass, v) {
+			return true
+		}
+	}
+	return false
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/call_meta.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/call_meta.go
new file mode 100644
index 0000000000000000000000000000000000000000..44eed49a621fe6358b70e4b9d98f07b9c13f1db4
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/call_meta.go
@@ -0,0 +1,136 @@
+package checkers
+
+import (
+	"go/ast"
+	"go/types"
+	"strings"
+
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/Antonboom/testifylint/internal/analysisutil"
+	"github.com/Antonboom/testifylint/internal/testify"
+)
+
+// CallMeta stores meta info about assertion function/method call, for example
+//
+//	assert.Equal(t, 42, result, "helpful comment")
+type CallMeta struct {
+	// Range contains start and end position of assertion call.
+	analysis.Range
+	// IsPkg true if this is package (not object) call.
+	IsPkg bool
+	// IsAssert true if this is "testify/assert" package (or object) call.
+	IsAssert bool
+	// Selector is the AST expression of "assert.Equal".
+	Selector *ast.SelectorExpr
+	// SelectorXStr is a string representation of Selector's left part – value before point, e.g. "assert".
+	SelectorXStr string
+	// Fn stores meta info about assertion function itself.
+	Fn FnMeta
+	// Args stores assertion call arguments but without `t *testing.T` argument.
+	// E.g [42, result, "helpful comment"].
+	Args []ast.Expr
+	// ArgsRaw stores assertion call initial arguments.
+	// E.g [t, 42, result, "helpful comment"].
+	ArgsRaw []ast.Expr
+}
+
+func (c CallMeta) String() string {
+	return c.SelectorXStr + "." + c.Fn.Name
+}
+
+// FnMeta stores meta info about assertion function itself, for example "Equal".
+type FnMeta struct {
+	// Range contains start and end position of function Name.
+	analysis.Range
+	// Name is a function name.
+	Name string
+	// NameFTrimmed is a function name without "f" suffix.
+	NameFTrimmed string
+	// IsFmt is true if function is formatted, e.g. "Equalf".
+	IsFmt bool
+}
+
+// NewCallMeta returns meta information about testify assertion call.
+// Returns nil if ast.CallExpr is not testify call.
+func NewCallMeta(pass *analysis.Pass, ce *ast.CallExpr) *CallMeta {
+	se, ok := ce.Fun.(*ast.SelectorExpr)
+	if !ok || se.Sel == nil {
+		return nil
+	}
+	fnName := se.Sel.Name
+
+	initiatorPkg, isPkgCall := func() (*types.Package, bool) {
+		// Examples:
+		// s.Assert         -> method of *suite.Suite        -> package suite ("vendor/github.com/stretchr/testify/suite")
+		// s.Assert().Equal -> method of *assert.Assertions  -> package assert ("vendor/github.com/stretchr/testify/assert")
+		// s.Equal          -> method of *assert.Assertions  -> package assert ("vendor/github.com/stretchr/testify/assert")
+		// reqObj.Falsef    -> method of *require.Assertions -> package require ("vendor/github.com/stretchr/testify/require")
+		if sel, ok := pass.TypesInfo.Selections[se]; ok {
+			return sel.Obj().Pkg(), false
+		}
+
+		// Examples:
+		// assert.False      -> assert  -> package assert ("vendor/github.com/stretchr/testify/assert")
+		// require.NotEqualf -> require -> package require ("vendor/github.com/stretchr/testify/require")
+		if id, ok := se.X.(*ast.Ident); ok {
+			if selObj := pass.TypesInfo.ObjectOf(id); selObj != nil {
+				if pkg, ok := selObj.(*types.PkgName); ok {
+					return pkg.Imported(), true
+				}
+			}
+		}
+		return nil, false
+	}()
+	if initiatorPkg == nil {
+		return nil
+	}
+
+	isAssert := analysisutil.IsPkg(initiatorPkg, testify.AssertPkgName, testify.AssertPkgPath)
+	isRequire := analysisutil.IsPkg(initiatorPkg, testify.RequirePkgName, testify.RequirePkgPath)
+	if !(isAssert || isRequire) {
+		return nil
+	}
+
+	return &CallMeta{
+		Range:        ce,
+		IsPkg:        isPkgCall,
+		IsAssert:     isAssert,
+		Selector:     se,
+		SelectorXStr: analysisutil.NodeString(pass.Fset, se.X),
+		Fn: FnMeta{
+			Range:        se.Sel,
+			Name:         fnName,
+			NameFTrimmed: strings.TrimSuffix(fnName, "f"),
+			IsFmt:        strings.HasSuffix(fnName, "f"),
+		},
+		Args:    trimTArg(pass, ce.Args),
+		ArgsRaw: ce.Args,
+	}
+}
+
+func trimTArg(pass *analysis.Pass, args []ast.Expr) []ast.Expr {
+	if len(args) == 0 {
+		return args
+	}
+
+	if isTestingTPtr(pass, args[0]) {
+		return args[1:]
+	}
+	return args
+}
+
+func isTestingTPtr(pass *analysis.Pass, arg ast.Expr) bool {
+	assertTestingTObj := analysisutil.ObjectOf(pass.Pkg, testify.AssertPkgPath, "TestingT")
+	requireTestingTObj := analysisutil.ObjectOf(pass.Pkg, testify.RequirePkgPath, "TestingT")
+
+	argType := pass.TypesInfo.TypeOf(arg)
+	if argType == nil {
+		return false
+	}
+
+	return ((assertTestingTObj != nil) &&
+		types.Implements(argType, assertTestingTObj.Type().Underlying().(*types.Interface))) ||
+		((requireTestingTObj != nil) &&
+			types.Implements(argType, requireTestingTObj.Type().Underlying().(*types.Interface)))
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/checker.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/checker.go
new file mode 100644
index 0000000000000000000000000000000000000000..ac23af6f6fc2681ef799d942efacb04c19f18247
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/checker.go
@@ -0,0 +1,23 @@
+package checkers
+
+import (
+	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/go/ast/inspector"
+)
+
+// Checker describes named checker.
+type Checker interface {
+	Name() string
+}
+
+// RegularChecker check assertion call presented in CallMeta form.
+type RegularChecker interface {
+	Checker
+	Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic
+}
+
+// AdvancedChecker implements complex Check logic different from trivial CallMeta check.
+type AdvancedChecker interface {
+	Checker
+	Check(pass *analysis.Pass, inspector *inspector.Inspector) []analysis.Diagnostic
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/checkers_registry.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/checkers_registry.go
new file mode 100644
index 0000000000000000000000000000000000000000..e34a21bf9c7767187e138ca7b6c6076a87d27a95
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/checkers_registry.go
@@ -0,0 +1,105 @@
+package checkers
+
+import (
+	"sort"
+)
+
+// registry stores checkers meta information in checkers' priority order.
+var registry = checkersRegistry{
+	// Regular checkers.
+	{factory: asCheckerFactory(NewFloatCompare), enabledByDefault: true},
+	{factory: asCheckerFactory(NewBoolCompare), enabledByDefault: true},
+	{factory: asCheckerFactory(NewEmpty), enabledByDefault: true},
+	{factory: asCheckerFactory(NewLen), enabledByDefault: true},
+	{factory: asCheckerFactory(NewCompares), enabledByDefault: true},
+	{factory: asCheckerFactory(NewErrorNil), enabledByDefault: true},
+	{factory: asCheckerFactory(NewNilCompare), enabledByDefault: true},
+	{factory: asCheckerFactory(NewErrorIsAs), enabledByDefault: true},
+	{factory: asCheckerFactory(NewExpectedActual), enabledByDefault: true},
+	{factory: asCheckerFactory(NewSuiteExtraAssertCall), enabledByDefault: true},
+	{factory: asCheckerFactory(NewSuiteDontUsePkg), enabledByDefault: true},
+	{factory: asCheckerFactory(NewUselessAssert), enabledByDefault: true},
+	// Advanced checkers.
+	{factory: asCheckerFactory(NewBlankImport), enabledByDefault: true},
+	{factory: asCheckerFactory(NewGoRequire), enabledByDefault: true},
+	{factory: asCheckerFactory(NewRequireError), enabledByDefault: true},
+	{factory: asCheckerFactory(NewSuiteTHelper), enabledByDefault: false},
+}
+
+type checkersRegistry []checkerMeta
+
+type checkerMeta struct {
+	factory          checkerFactory
+	enabledByDefault bool
+}
+
+type checkerFactory func() Checker
+
+func asCheckerFactory[T Checker](fn func() T) checkerFactory {
+	return func() Checker {
+		return fn()
+	}
+}
+
+func (r checkersRegistry) get(name string) (m checkerMeta, priority int, found bool) {
+	for i, meta := range r {
+		if meta.factory().Name() == name {
+			return meta, i, true
+		}
+	}
+	return checkerMeta{}, 0, false
+}
+
+// All returns all checkers names sorted by checker's priority.
+func All() []string {
+	result := make([]string, 0, len(registry))
+	for _, meta := range registry {
+		result = append(result, meta.factory().Name())
+	}
+	return result
+}
+
+// EnabledByDefault returns checkers enabled by default sorted by checker's priority.
+func EnabledByDefault() []string {
+	result := make([]string, 0, len(registry))
+	for _, meta := range registry {
+		if meta.enabledByDefault {
+			result = append(result, meta.factory().Name())
+		}
+	}
+	return result
+}
+
+// Get returns new checker instance by checker's name.
+func Get(name string) (Checker, bool) {
+	meta, _, ok := registry.get(name)
+	if ok {
+		return meta.factory(), true
+	}
+	return nil, false
+}
+
+// IsKnown checks if there is a checker with that name.
+func IsKnown(name string) bool {
+	_, _, ok := registry.get(name)
+	return ok
+}
+
+// IsEnabledByDefault returns true if a checker is enabled by default.
+// Returns false if there is no such checker in the registry.
+// For pre-validation use Get or IsKnown.
+func IsEnabledByDefault(name string) bool {
+	meta, _, ok := registry.get(name)
+	return ok && meta.enabledByDefault
+}
+
+// SortByPriority mutates the input checkers names by sorting them in checker priority order.
+// Ignores unknown checkers. For pre-validation use Get or IsKnown.
+func SortByPriority(checkers []string) {
+	sort.Slice(checkers, func(i, j int) bool {
+		lhs, rhs := checkers[i], checkers[j]
+		_, lhsPriority, _ := registry.get(lhs)
+		_, rhsPriority, _ := registry.get(rhs)
+		return lhsPriority < rhsPriority
+	})
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/compares.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/compares.go
new file mode 100644
index 0000000000000000000000000000000000000000..336a34512416a9f2749933ba12786c447b3ec68e
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/compares.go
@@ -0,0 +1,96 @@
+package checkers
+
+import (
+	"bytes"
+	"go/ast"
+	"go/token"
+
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/Antonboom/testifylint/internal/analysisutil"
+)
+
+// Compares detects situations like
+//
+//	assert.True(t, a == b)
+//	assert.True(t, a != b)
+//	assert.True(t, a > b)
+//	assert.True(t, a >= b)
+//	assert.True(t, a < b)
+//	assert.True(t, a <= b)
+//	assert.False(t, a == b)
+//	...
+//
+// and requires
+//
+//	assert.Equal(t, a, b)
+//	assert.NotEqual(t, a, b)
+//	assert.Greater(t, a, b)
+//	assert.GreaterOrEqual(t, a, b)
+//	assert.Less(t, a, b)
+//	assert.LessOrEqual(t, a, b)
+type Compares struct{}
+
+// NewCompares constructs Compares checker.
+func NewCompares() Compares   { return Compares{} }
+func (Compares) Name() string { return "compares" }
+
+func (checker Compares) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic {
+	if len(call.Args) < 1 {
+		return nil
+	}
+
+	be, ok := call.Args[0].(*ast.BinaryExpr)
+	if !ok {
+		return nil
+	}
+
+	var tokenToProposedFn map[token.Token]string
+
+	switch call.Fn.NameFTrimmed {
+	case "True":
+		tokenToProposedFn = tokenToProposedFnInsteadOfTrue
+	case "False":
+		tokenToProposedFn = tokenToProposedFnInsteadOfFalse
+	default:
+		return nil
+	}
+
+	if proposedFn, ok := tokenToProposedFn[be.Op]; ok {
+		a, b := be.X, be.Y
+		return newUseFunctionDiagnostic(checker.Name(), call, proposedFn,
+			newSuggestedFuncReplacement(call, proposedFn, analysis.TextEdit{
+				Pos:     be.X.Pos(),
+				End:     be.Y.End(),
+				NewText: formatAsCallArgs(pass, a, b),
+			}),
+		)
+	}
+	return nil
+}
+
+var tokenToProposedFnInsteadOfTrue = map[token.Token]string{
+	token.EQL: "Equal",
+	token.NEQ: "NotEqual",
+	token.GTR: "Greater",
+	token.GEQ: "GreaterOrEqual",
+	token.LSS: "Less",
+	token.LEQ: "LessOrEqual",
+}
+
+var tokenToProposedFnInsteadOfFalse = map[token.Token]string{
+	token.EQL: "NotEqual",
+	token.NEQ: "Equal",
+	token.GTR: "LessOrEqual",
+	token.GEQ: "Less",
+	token.LSS: "GreaterOrEqual",
+	token.LEQ: "Greater",
+}
+
+// formatAsCallArgs joins a and b and return bytes like `a, b`.
+func formatAsCallArgs(pass *analysis.Pass, a, b ast.Node) []byte {
+	return bytes.Join([][]byte{
+		analysisutil.NodeBytes(pass.Fset, a),
+		analysisutil.NodeBytes(pass.Fset, b),
+	}, []byte(", "))
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/diagnostic.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/diagnostic.go
new file mode 100644
index 0000000000000000000000000000000000000000..4ab69c69bb5f004af8f5e6061feecf06792d4645
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/diagnostic.go
@@ -0,0 +1,60 @@
+package checkers
+
+import (
+	"fmt"
+
+	"golang.org/x/tools/go/analysis"
+)
+
+func newUseFunctionDiagnostic(
+	checker string,
+	call *CallMeta,
+	proposedFn string,
+	fix *analysis.SuggestedFix,
+) *analysis.Diagnostic {
+	f := proposedFn
+	if call.Fn.IsFmt {
+		f += "f"
+	}
+	msg := fmt.Sprintf("use %s.%s", call.SelectorXStr, f)
+
+	return newDiagnostic(checker, call, msg, fix)
+}
+
+func newDiagnostic(
+	checker string,
+	rng analysis.Range,
+	msg string,
+	fix *analysis.SuggestedFix,
+) *analysis.Diagnostic {
+	d := analysis.Diagnostic{
+		Pos:      rng.Pos(),
+		End:      rng.End(),
+		Category: checker,
+		Message:  checker + ": " + msg,
+	}
+	if fix != nil {
+		d.SuggestedFixes = []analysis.SuggestedFix{*fix}
+	}
+	return &d
+}
+
+func newSuggestedFuncReplacement(
+	call *CallMeta,
+	proposedFn string,
+	additionalEdits ...analysis.TextEdit,
+) *analysis.SuggestedFix {
+	if call.Fn.IsFmt {
+		proposedFn += "f"
+	}
+	return &analysis.SuggestedFix{
+		Message: fmt.Sprintf("Replace `%s` with `%s`", call.Fn.Name, proposedFn),
+		TextEdits: append([]analysis.TextEdit{
+			{
+				Pos:     call.Fn.Pos(),
+				End:     call.Fn.End(),
+				NewText: []byte(proposedFn),
+			},
+		}, additionalEdits...),
+	}
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/empty.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/empty.go
new file mode 100644
index 0000000000000000000000000000000000000000..5ad371bb4fbfe01bb8f43dd4995321610163ceb2
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/empty.go
@@ -0,0 +1,172 @@
+package checkers
+
+import (
+	"fmt"
+	"go/ast"
+	"go/token"
+	"go/types"
+
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/Antonboom/testifylint/internal/analysisutil"
+)
+
+// Empty detects situations like
+//
+//	assert.Len(t, arr, 0)
+//	assert.Equal(t, 0, len(arr))
+//	assert.EqualValues(t, 0, len(arr))
+//	assert.Exactly(t, 0, len(arr))
+//	assert.LessOrEqual(t, len(arr), 0)
+//	assert.GreaterOrEqual(t, 0, len(arr))
+//	assert.Less(t, len(arr), 0)
+//	assert.Greater(t, 0, len(arr))
+//	assert.Less(t, len(arr), 1)
+//	assert.Greater(t, 1, len(arr))
+//
+//	assert.NotEqual(t, 0, len(arr))
+//	assert.NotEqualValues(t, 0, len(arr))
+//	assert.Less(t, 0, len(arr))
+//	assert.Greater(t, len(arr), 0)
+//
+// and requires
+//
+//	assert.Empty(t, arr)
+//	assert.NotEmpty(t, arr)
+type Empty struct{}
+
+// NewEmpty constructs Empty checker.
+func NewEmpty() Empty      { return Empty{} }
+func (Empty) Name() string { return "empty" }
+
+func (checker Empty) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic {
+	if d := checker.checkEmpty(pass, call); d != nil {
+		return d
+	}
+	return checker.checkNotEmpty(pass, call)
+}
+
+func (checker Empty) checkEmpty(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic { //nolint:gocognit
+	newUseEmptyDiagnostic := func(replaceStart, replaceEnd token.Pos, replaceWith ast.Expr) *analysis.Diagnostic {
+		const proposed = "Empty"
+		return newUseFunctionDiagnostic(checker.Name(), call, proposed,
+			newSuggestedFuncReplacement(call, proposed, analysis.TextEdit{
+				Pos:     replaceStart,
+				End:     replaceEnd,
+				NewText: analysisutil.NodeBytes(pass.Fset, replaceWith),
+			}),
+		)
+	}
+
+	if len(call.Args) < 2 {
+		return nil
+	}
+	a, b := call.Args[0], call.Args[1]
+
+	switch call.Fn.NameFTrimmed {
+	case "Len":
+		if isZero(b) {
+			return newUseEmptyDiagnostic(a.Pos(), b.End(), a)
+		}
+
+	case "Equal", "EqualValues", "Exactly":
+		arg1, ok1 := isLenCallAndZero(pass, a, b)
+		arg2, ok2 := isLenCallAndZero(pass, b, a)
+
+		if lenArg, ok := anyVal([]bool{ok1, ok2}, arg1, arg2); ok {
+			return newUseEmptyDiagnostic(a.Pos(), b.End(), lenArg)
+		}
+
+	case "LessOrEqual":
+		if lenArg, ok := isBuiltinLenCall(pass, a); ok && isZero(b) {
+			return newUseEmptyDiagnostic(a.Pos(), b.End(), lenArg)
+		}
+
+	case "GreaterOrEqual":
+		if lenArg, ok := isBuiltinLenCall(pass, b); ok && isZero(a) {
+			return newUseEmptyDiagnostic(a.Pos(), b.End(), lenArg)
+		}
+
+	case "Less":
+		if lenArg, ok := isBuiltinLenCall(pass, a); ok && (isOne(b) || isZero(b)) {
+			return newUseEmptyDiagnostic(a.Pos(), b.End(), lenArg)
+		}
+
+	case "Greater":
+		if lenArg, ok := isBuiltinLenCall(pass, b); ok && (isOne(a) || isZero(a)) {
+			return newUseEmptyDiagnostic(a.Pos(), b.End(), lenArg)
+		}
+	}
+	return nil
+}
+
+func (checker Empty) checkNotEmpty(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic { //nolint:gocognit
+	newUseNotEmptyDiagnostic := func(replaceStart, replaceEnd token.Pos, replaceWith ast.Expr) *analysis.Diagnostic {
+		const proposed = "NotEmpty"
+		return newUseFunctionDiagnostic(checker.Name(), call, proposed,
+			newSuggestedFuncReplacement(call, proposed, analysis.TextEdit{
+				Pos:     replaceStart,
+				End:     replaceEnd,
+				NewText: analysisutil.NodeBytes(pass.Fset, replaceWith),
+			}),
+		)
+	}
+
+	if len(call.Args) < 2 {
+		return nil
+	}
+	a, b := call.Args[0], call.Args[1]
+
+	switch call.Fn.NameFTrimmed {
+	case "NotEqual", "NotEqualValues":
+		arg1, ok1 := isLenCallAndZero(pass, a, b)
+		arg2, ok2 := isLenCallAndZero(pass, b, a)
+
+		if lenArg, ok := anyVal([]bool{ok1, ok2}, arg1, arg2); ok {
+			return newUseNotEmptyDiagnostic(a.Pos(), b.End(), lenArg)
+		}
+
+	case "Less":
+		if lenArg, ok := isBuiltinLenCall(pass, b); ok && isZero(a) {
+			return newUseNotEmptyDiagnostic(a.Pos(), b.End(), lenArg)
+		}
+
+	case "Greater":
+		if lenArg, ok := isBuiltinLenCall(pass, a); ok && isZero(b) {
+			return newUseNotEmptyDiagnostic(a.Pos(), b.End(), lenArg)
+		}
+	}
+	return nil
+}
+
+var lenObj = types.Universe.Lookup("len")
+
+func isLenCallAndZero(pass *analysis.Pass, a, b ast.Expr) (ast.Expr, bool) {
+	lenArg, ok := isBuiltinLenCall(pass, a)
+	return lenArg, ok && isZero(b)
+}
+
+func isBuiltinLenCall(pass *analysis.Pass, e ast.Expr) (ast.Expr, bool) {
+	ce, ok := e.(*ast.CallExpr)
+	if !ok {
+		return nil, false
+	}
+
+	if analysisutil.IsObj(pass.TypesInfo, ce.Fun, lenObj) && len(ce.Args) == 1 {
+		return ce.Args[0], true
+	}
+	return nil, false
+}
+
+func isZero(e ast.Expr) bool {
+	return isIntNumber(e, 0)
+}
+
+func isOne(e ast.Expr) bool {
+	return isIntNumber(e, 1)
+}
+
+func isIntNumber(e ast.Expr, v int) bool {
+	bl, ok := e.(*ast.BasicLit)
+	return ok && bl.Kind == token.INT && bl.Value == fmt.Sprintf("%d", v)
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/error_is_as.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/error_is_as.go
new file mode 100644
index 0000000000000000000000000000000000000000..0363873a63a9422bc375ecd948d3e28268766cde
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/error_is_as.go
@@ -0,0 +1,166 @@
+package checkers
+
+import (
+	"fmt"
+	"go/ast"
+	"go/types"
+
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/Antonboom/testifylint/internal/analysisutil"
+)
+
+// ErrorIsAs detects situations like
+//
+//	assert.Error(t, err, errSentinel)
+//	assert.NoError(t, err, errSentinel)
+//	assert.True(t, errors.Is(err, errSentinel))
+//	assert.False(t, errors.Is(err, errSentinel))
+//	assert.True(t, errors.As(err, &target))
+//
+// and requires
+//
+//	assert.ErrorIs(t, err, errSentinel)
+//	assert.NotErrorIs(t, err, errSentinel)
+//	assert.ErrorAs(t, err, &target)
+//
+// Also ErrorIsAs repeats go vet's "errorsas" check logic.
+type ErrorIsAs struct{}
+
+// NewErrorIsAs constructs ErrorIsAs checker.
+func NewErrorIsAs() ErrorIsAs  { return ErrorIsAs{} }
+func (ErrorIsAs) Name() string { return "error-is-as" }
+
+func (checker ErrorIsAs) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic {
+	switch call.Fn.NameFTrimmed {
+	case "Error":
+		if len(call.Args) >= 2 && isError(pass, call.Args[1]) {
+			const proposed = "ErrorIs"
+			msg := fmt.Sprintf("invalid usage of %[1]s.Error, use %[1]s.%[2]s instead", call.SelectorXStr, proposed)
+			return newDiagnostic(checker.Name(), call, msg, newSuggestedFuncReplacement(call, proposed))
+		}
+
+	case "NoError":
+		if len(call.Args) >= 2 && isError(pass, call.Args[1]) {
+			const proposed = "NotErrorIs"
+			msg := fmt.Sprintf("invalid usage of %[1]s.NoError, use %[1]s.%[2]s instead", call.SelectorXStr, proposed)
+			return newDiagnostic(checker.Name(), call, msg, newSuggestedFuncReplacement(call, proposed))
+		}
+
+	case "True":
+		if len(call.Args) < 1 {
+			return nil
+		}
+
+		ce, ok := call.Args[0].(*ast.CallExpr)
+		if !ok {
+			return nil
+		}
+		if len(ce.Args) != 2 {
+			return nil
+		}
+
+		var proposed string
+		switch {
+		case isErrorsIsCall(pass, ce):
+			proposed = "ErrorIs"
+		case isErrorsAsCall(pass, ce):
+			proposed = "ErrorAs"
+		}
+		if proposed != "" {
+			return newUseFunctionDiagnostic(checker.Name(), call, proposed,
+				newSuggestedFuncReplacement(call, proposed, analysis.TextEdit{
+					Pos:     ce.Pos(),
+					End:     ce.End(),
+					NewText: formatAsCallArgs(pass, ce.Args[0], ce.Args[1]),
+				}),
+			)
+		}
+
+	case "False":
+		if len(call.Args) < 1 {
+			return nil
+		}
+
+		ce, ok := call.Args[0].(*ast.CallExpr)
+		if !ok {
+			return nil
+		}
+		if len(ce.Args) != 2 {
+			return nil
+		}
+
+		if isErrorsIsCall(pass, ce) {
+			const proposed = "NotErrorIs"
+			return newUseFunctionDiagnostic(checker.Name(), call, proposed,
+				newSuggestedFuncReplacement(call, proposed, analysis.TextEdit{
+					Pos:     ce.Pos(),
+					End:     ce.End(),
+					NewText: formatAsCallArgs(pass, ce.Args[0], ce.Args[1]),
+				}),
+			)
+		}
+
+	case "ErrorAs":
+		if len(call.Args) < 2 {
+			return nil
+		}
+
+		// NOTE(a.telyshev): Logic below must be consistent with
+		// https://cs.opensource.google/go/x/tools/+/master:go/analysis/passes/errorsas/errorsas.go
+
+		var (
+			defaultReport  = fmt.Sprintf("second argument to %s must be a non-nil pointer to either a type that implements error, or to any interface type", call) //nolint:lll
+			errorPtrReport = fmt.Sprintf("second argument to %s should not be *error", call)
+		)
+
+		target := call.Args[1]
+
+		if isEmptyInterface(pass, target) {
+			// `any` interface case. It is always allowed, since it often indicates
+			// a value forwarded from another source.
+			return nil
+		}
+
+		tv, ok := pass.TypesInfo.Types[target]
+		if !ok {
+			return nil
+		}
+
+		pt, ok := tv.Type.Underlying().(*types.Pointer)
+		if !ok {
+			return newDiagnostic(checker.Name(), call, defaultReport, nil)
+		}
+		if pt.Elem() == errorType {
+			return newDiagnostic(checker.Name(), call, errorPtrReport, nil)
+		}
+
+		_, isInterface := pt.Elem().Underlying().(*types.Interface)
+		if !isInterface && !types.Implements(pt.Elem(), errorIface) {
+			return newDiagnostic(checker.Name(), call, defaultReport, nil)
+		}
+	}
+	return nil
+}
+
+func isErrorsIsCall(pass *analysis.Pass, ce *ast.CallExpr) bool {
+	return isErrorsPkgFnCall(pass, ce, "Is")
+}
+
+func isErrorsAsCall(pass *analysis.Pass, ce *ast.CallExpr) bool {
+	return isErrorsPkgFnCall(pass, ce, "As")
+}
+
+func isErrorsPkgFnCall(pass *analysis.Pass, ce *ast.CallExpr, fn string) bool {
+	se, ok := ce.Fun.(*ast.SelectorExpr)
+	if !ok {
+		return false
+	}
+
+	errorsIsObj := analysisutil.ObjectOf(pass.Pkg, "errors", fn)
+	if errorsIsObj == nil {
+		return false
+	}
+
+	return analysisutil.IsObj(pass.TypesInfo, se.Sel, errorsIsObj)
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/error_nil.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/error_nil.go
new file mode 100644
index 0000000000000000000000000000000000000000..5b0af7458f682783b3cf8bef09e25941b8f428d5
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/error_nil.go
@@ -0,0 +1,113 @@
+package checkers
+
+import (
+	"go/ast"
+	"go/token"
+	"go/types"
+
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/Antonboom/testifylint/internal/analysisutil"
+)
+
+// ErrorNil detects situations like
+//
+//	assert.Nil(t, err)
+//	assert.NotNil(t, err)
+//	assert.Equal(t, nil, err)
+//	assert.EqualValues(t, nil, err)
+//	assert.Exactly(t, nil, err)
+//	assert.ErrorIs(t, err, nil)
+//
+//	assert.NotEqual(t, nil, err)
+//	assert.NotEqualValues(t, nil, err)
+//	assert.NotErrorIs(t, err, nil)
+//
+// and requires
+//
+//	assert.NoError(t, err)
+//	assert.Error(t, err)
+type ErrorNil struct{}
+
+// NewErrorNil constructs ErrorNil checker.
+func NewErrorNil() ErrorNil   { return ErrorNil{} }
+func (ErrorNil) Name() string { return "error-nil" }
+
+func (checker ErrorNil) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic {
+	const (
+		errorFn   = "Error"
+		noErrorFn = "NoError"
+	)
+
+	proposedFn, survivingArg, replacementEndPos := func() (string, ast.Expr, token.Pos) {
+		switch call.Fn.NameFTrimmed {
+		case "Nil":
+			if len(call.Args) >= 1 && isError(pass, call.Args[0]) {
+				return noErrorFn, call.Args[0], call.Args[0].End()
+			}
+
+		case "NotNil":
+			if len(call.Args) >= 1 && isError(pass, call.Args[0]) {
+				return errorFn, call.Args[0], call.Args[0].End()
+			}
+
+		case "Equal", "EqualValues", "Exactly", "ErrorIs":
+			if len(call.Args) < 2 {
+				return "", nil, token.NoPos
+			}
+			a, b := call.Args[0], call.Args[1]
+
+			switch {
+			case isError(pass, a) && isNil(b):
+				return noErrorFn, a, b.End()
+			case isNil(a) && isError(pass, b):
+				return noErrorFn, b, b.End()
+			}
+
+		case "NotEqual", "NotEqualValues", "NotErrorIs":
+			if len(call.Args) < 2 {
+				return "", nil, token.NoPos
+			}
+			a, b := call.Args[0], call.Args[1]
+
+			switch {
+			case isError(pass, a) && isNil(b):
+				return errorFn, a, b.End()
+			case isNil(a) && isError(pass, b):
+				return errorFn, b, b.End()
+			}
+		}
+		return "", nil, token.NoPos
+	}()
+
+	if proposedFn != "" {
+		return newUseFunctionDiagnostic(checker.Name(), call, proposedFn,
+			newSuggestedFuncReplacement(call, proposedFn, analysis.TextEdit{
+				Pos:     call.Args[0].Pos(),
+				End:     replacementEndPos,
+				NewText: analysisutil.NodeBytes(pass.Fset, survivingArg),
+			}),
+		)
+	}
+	return nil
+}
+
+var (
+	errorType  = types.Universe.Lookup("error").Type()
+	errorIface = errorType.Underlying().(*types.Interface)
+)
+
+func isError(pass *analysis.Pass, expr ast.Expr) bool {
+	t := pass.TypesInfo.TypeOf(expr)
+	if t == nil {
+		return false
+	}
+
+	_, ok := t.Underlying().(*types.Interface)
+	return ok && types.Implements(t, errorIface)
+}
+
+func isNil(expr ast.Expr) bool {
+	ident, ok := expr.(*ast.Ident)
+	return ok && ident.Name == "nil"
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/expected_actual.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/expected_actual.go
new file mode 100644
index 0000000000000000000000000000000000000000..e6825eaa67a9dad62fa15d47812122230c5260b9
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/expected_actual.go
@@ -0,0 +1,215 @@
+package checkers
+
+import (
+	"go/ast"
+	"go/token"
+	"go/types"
+	"regexp"
+
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/Antonboom/testifylint/internal/analysisutil"
+)
+
+// DefaultExpectedVarPattern matches variables with "expected" or "wanted" prefix or suffix in the name.
+var DefaultExpectedVarPattern = regexp.MustCompile(
+	`(^(exp(ected)?|want(ed)?)([A-Z]\w*)?$)|(^(\w*[a-z])?(Exp(ected)?|Want(ed)?)$)`)
+
+// ExpectedActual detects situation like
+//
+//	assert.Equal(t, result, expected)
+//	assert.EqualExportedValues(t, resultObj, User{Name: "Anton"})
+//	assert.EqualValues(t, result, 42)
+//	assert.Exactly(t, result, int64(42))
+//	assert.JSONEq(t, result, `{"version": 3}`)
+//	assert.InDelta(t, result, 42.42, 1.0)
+//	assert.InDeltaMapValues(t, result, map[string]float64{"score": 0.99}, 1.0)
+//	assert.InDeltaSlice(t, result, []float64{0.98, 0.99}, 1.0)
+//	assert.InEpsilon(t, result, 42.42, 0.0001)
+//	assert.InEpsilonSlice(t, result, []float64{0.9801, 0.9902}, 0.0001)
+//	assert.IsType(t, result, (*User)(nil))
+//	assert.NotEqual(t, result, "expected")
+//	assert.NotEqualValues(t, result, "expected")
+//	assert.NotSame(t, resultPtr, &value)
+//	assert.Same(t, resultPtr, &value)
+//	assert.WithinDuration(t, resultTime, time.Date(2023, 01, 12, 11, 46, 33, 0, nil), time.Second)
+//	assert.YAMLEq(t, result, "version: '3'")
+//
+// and requires
+//
+//	assert.Equal(t, expected, result)
+//	assert.EqualExportedValues(t, User{Name: "Anton"}, resultObj)
+//	assert.EqualValues(t, 42, result)
+//	...
+type ExpectedActual struct {
+	expVarPattern *regexp.Regexp
+}
+
+// NewExpectedActual constructs ExpectedActual checker using DefaultExpectedVarPattern.
+func NewExpectedActual() *ExpectedActual {
+	return &ExpectedActual{expVarPattern: DefaultExpectedVarPattern}
+}
+
+func (ExpectedActual) Name() string { return "expected-actual" }
+
+func (checker *ExpectedActual) SetExpVarPattern(p *regexp.Regexp) *ExpectedActual {
+	if p != nil {
+		checker.expVarPattern = p
+	}
+	return checker
+}
+
+func (checker ExpectedActual) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic {
+	switch call.Fn.NameFTrimmed {
+	case "Equal",
+		"EqualExportedValues",
+		"EqualValues",
+		"Exactly",
+		"InDelta",
+		"InDeltaMapValues",
+		"InDeltaSlice",
+		"InEpsilon",
+		"InEpsilonSlice",
+		"IsType",
+		"JSONEq",
+		"NotEqual",
+		"NotEqualValues",
+		"NotSame",
+		"Same",
+		"WithinDuration",
+		"YAMLEq":
+	default:
+		return nil
+	}
+
+	if len(call.Args) < 2 {
+		return nil
+	}
+	first, second := call.Args[0], call.Args[1]
+
+	if checker.isWrongExpectedActualOrder(pass, first, second) {
+		return newDiagnostic(checker.Name(), call, "need to reverse actual and expected values", &analysis.SuggestedFix{
+			Message: "Reverse actual and expected values",
+			TextEdits: []analysis.TextEdit{
+				{
+					Pos:     first.Pos(),
+					End:     second.End(),
+					NewText: formatAsCallArgs(pass, second, first),
+				},
+			},
+		})
+	}
+	return nil
+}
+
+func (checker ExpectedActual) isWrongExpectedActualOrder(pass *analysis.Pass, first, second ast.Expr) bool {
+	leftIsCandidate := checker.isExpectedValueCandidate(pass, first)
+	rightIsCandidate := checker.isExpectedValueCandidate(pass, second)
+	return rightIsCandidate && !leftIsCandidate
+}
+
+func (checker ExpectedActual) isExpectedValueCandidate(pass *analysis.Pass, expr ast.Expr) bool {
+	switch v := expr.(type) {
+	case *ast.ParenExpr:
+		return checker.isExpectedValueCandidate(pass, v.X)
+
+	case *ast.StarExpr: // *value
+		return checker.isExpectedValueCandidate(pass, v.X)
+
+	case *ast.UnaryExpr:
+		return (v.Op == token.AND) && checker.isExpectedValueCandidate(pass, v.X) // &value
+
+	case *ast.CompositeLit:
+		return true
+
+	case *ast.CallExpr:
+		return isParenExpr(v) ||
+			isCastedBasicLitOrExpectedValue(v, checker.expVarPattern) ||
+			isExpectedValueFactory(pass, v, checker.expVarPattern)
+	}
+
+	return isBasicLit(expr) ||
+		isUntypedConst(pass, expr) ||
+		isTypedConst(pass, expr) ||
+		isIdentNamedAsExpected(checker.expVarPattern, expr) ||
+		isStructVarNamedAsExpected(checker.expVarPattern, expr) ||
+		isStructFieldNamedAsExpected(checker.expVarPattern, expr)
+}
+
+func isParenExpr(ce *ast.CallExpr) bool {
+	_, ok := ce.Fun.(*ast.ParenExpr)
+	return ok
+}
+
+func isCastedBasicLitOrExpectedValue(ce *ast.CallExpr, pattern *regexp.Regexp) bool {
+	if len(ce.Args) != 1 {
+		return false
+	}
+
+	fn, ok := ce.Fun.(*ast.Ident)
+	if !ok {
+		return false
+	}
+
+	switch fn.Name {
+	case "complex64", "complex128":
+		return true
+
+	case "uint", "uint8", "uint16", "uint32", "uint64",
+		"int", "int8", "int16", "int32", "int64",
+		"float32", "float64",
+		"rune", "string":
+		return isBasicLit(ce.Args[0]) || isIdentNamedAsExpected(pattern, ce.Args[0])
+	}
+	return false
+}
+
+func isExpectedValueFactory(pass *analysis.Pass, ce *ast.CallExpr, pattern *regexp.Regexp) bool {
+	switch fn := ce.Fun.(type) {
+	case *ast.Ident:
+		return pattern.MatchString(fn.Name)
+
+	case *ast.SelectorExpr:
+		timeDateFn := analysisutil.ObjectOf(pass.Pkg, "time", "Date")
+		if timeDateFn != nil && analysisutil.IsObj(pass.TypesInfo, fn.Sel, timeDateFn) {
+			return true
+		}
+		return pattern.MatchString(fn.Sel.Name)
+	}
+	return false
+}
+
+func isBasicLit(e ast.Expr) bool {
+	_, ok := e.(*ast.BasicLit)
+	return ok
+}
+
+func isUntypedConst(p *analysis.Pass, e ast.Expr) bool {
+	t := p.TypesInfo.TypeOf(e)
+	if t == nil {
+		return false
+	}
+
+	b, ok := t.(*types.Basic)
+	return ok && b.Info()&types.IsUntyped > 0
+}
+
+func isTypedConst(p *analysis.Pass, e ast.Expr) bool {
+	tt, ok := p.TypesInfo.Types[e]
+	return ok && tt.IsValue() && tt.Value != nil
+}
+
+func isIdentNamedAsExpected(pattern *regexp.Regexp, e ast.Expr) bool {
+	id, ok := e.(*ast.Ident)
+	return ok && pattern.MatchString(id.Name)
+}
+
+func isStructVarNamedAsExpected(pattern *regexp.Regexp, e ast.Expr) bool {
+	s, ok := e.(*ast.SelectorExpr)
+	return ok && isIdentNamedAsExpected(pattern, s.X)
+}
+
+func isStructFieldNamedAsExpected(pattern *regexp.Regexp, e ast.Expr) bool {
+	s, ok := e.(*ast.SelectorExpr)
+	return ok && isIdentNamedAsExpected(pattern, s.Sel)
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/float_compare.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/float_compare.go
new file mode 100644
index 0000000000000000000000000000000000000000..10b1330de8909a2b155092359f72777df99c773d
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/float_compare.go
@@ -0,0 +1,70 @@
+package checkers
+
+import (
+	"fmt"
+	"go/ast"
+	"go/token"
+	"go/types"
+
+	"golang.org/x/tools/go/analysis"
+)
+
+// FloatCompare detects situation like
+//
+//	assert.Equal(t, 42.42, result)
+//	assert.EqualValues(t, 42.42, result)
+//	assert.Exactly(t, 42.42, result)
+//	assert.True(t, result == 42.42)
+//	assert.False(t, result != 42.42)
+//
+// and requires
+//
+//	assert.InEpsilon(t, 42.42, result, 0.0001) // Or assert.InDelta
+type FloatCompare struct{}
+
+// NewFloatCompare constructs FloatCompare checker.
+func NewFloatCompare() FloatCompare { return FloatCompare{} }
+func (FloatCompare) Name() string   { return "float-compare" }
+
+func (checker FloatCompare) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic {
+	invalid := func() bool {
+		switch call.Fn.NameFTrimmed {
+		case "Equal", "EqualValues", "Exactly":
+			return len(call.Args) > 1 && (isFloat(pass, call.Args[0]) || isFloat(pass, call.Args[1]))
+
+		case "True":
+			return len(call.Args) > 0 && isFloatCompare(pass, call.Args[0], token.EQL)
+
+		case "False":
+			return len(call.Args) > 0 && isFloatCompare(pass, call.Args[0], token.NEQ)
+		}
+		return false
+	}()
+
+	if invalid {
+		format := "use %s.InEpsilon (or InDelta)"
+		if call.Fn.IsFmt {
+			format = "use %s.InEpsilonf (or InDeltaf)"
+		}
+		return newDiagnostic(checker.Name(), call, fmt.Sprintf(format, call.SelectorXStr), nil)
+	}
+	return nil
+}
+
+func isFloat(pass *analysis.Pass, expr ast.Expr) bool {
+	t := pass.TypesInfo.TypeOf(expr)
+	if t == nil {
+		return false
+	}
+
+	bt, ok := t.Underlying().(*types.Basic)
+	return ok && (bt.Info()&types.IsFloat > 0)
+}
+
+func isFloatCompare(p *analysis.Pass, e ast.Expr, op token.Token) bool {
+	be, ok := e.(*ast.BinaryExpr)
+	if !ok {
+		return false
+	}
+	return be.Op == op && (isFloat(p, be.X) || isFloat(p, be.Y))
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/go_require.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/go_require.go
new file mode 100644
index 0000000000000000000000000000000000000000..0844f15a0930a7ceb2de3a47d08bb742712832d0
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/go_require.go
@@ -0,0 +1,327 @@
+package checkers
+
+import (
+	"fmt"
+	"go/ast"
+	"go/types"
+
+	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/go/ast/inspector"
+
+	"github.com/Antonboom/testifylint/internal/analysisutil"
+)
+
+const (
+	goRequireFnReportFormat   = "%s contains assertions that must only be used in the goroutine running the test function"
+	goRequireCallReportFormat = "%s must only be used in the goroutine running the test function"
+)
+
+// GoRequire takes idea from go vet's "testinggoroutine" check
+// and detects usage of require package's functions or assert.FailNow in the non-test goroutines
+//
+//	go func() {
+//		conn, err = lis.Accept()
+//		require.NoError(t, err)
+//
+//		if assert.Error(err) {
+//			assert.FailNow(t, msg)
+//		}
+//	}()
+type GoRequire struct{}
+
+// NewGoRequire constructs GoRequire checker.
+func NewGoRequire() GoRequire  { return GoRequire{} }
+func (GoRequire) Name() string { return "go-require" }
+
+// Check should be consistent with
+// https://cs.opensource.google/go/x/tools/+/master:go/analysis/passes/testinggoroutine/testinggoroutine.go
+//
+// But due to the fact that the Check covers cases missed by go vet,
+// the implementation turned out to be terribly complicated.
+//
+// In simple words, the algorithm is as follows:
+//   - we walk along the call tree and store the status, whether we are in the test goroutine or not;
+//   - if we are in a test goroutine, then require is allowed, otherwise not;
+//   - when we encounter the launch of a subtest or `go` statement, the status changes;
+//   - in order to correctly handle the return to the correct status when exiting the current function,
+//     we have to store a stack of statuses (inGoroutineRunningTestFunc).
+//
+// Other test functions called in the test function are also analyzed to make a verdict about the current function.
+// This leads to recursion, which the cache of processed functions (processedFuncs) helps reduce the impact of.
+// Also, because of this, we have to pre-collect a list of test function declarations (testsDecls).
+func (checker GoRequire) Check(pass *analysis.Pass, inspector *inspector.Inspector) (diagnostics []analysis.Diagnostic) {
+	testsDecls := make(funcDeclarations)
+	inspector.Preorder([]ast.Node{(*ast.FuncDecl)(nil)}, func(node ast.Node) {
+		fd := node.(*ast.FuncDecl)
+
+		if isTestingFuncOrMethod(pass, fd) {
+			if tf, ok := pass.TypesInfo.ObjectOf(fd.Name).(*types.Func); ok {
+				testsDecls[tf] = fd
+			}
+		}
+	})
+
+	var inGoroutineRunningTestFunc boolStack
+	processedFuncs := make(map[*ast.FuncDecl]goRequireVerdict)
+
+	nodesFilter := []ast.Node{
+		(*ast.FuncDecl)(nil),
+		(*ast.FuncType)(nil),
+		(*ast.GoStmt)(nil),
+		(*ast.CallExpr)(nil),
+	}
+	inspector.Nodes(nodesFilter, func(node ast.Node, push bool) bool {
+		if fd, ok := node.(*ast.FuncDecl); ok {
+			if !isTestingFuncOrMethod(pass, fd) {
+				return false
+			}
+
+			if push {
+				inGoroutineRunningTestFunc.Push(true)
+			} else {
+				inGoroutineRunningTestFunc.Pop()
+			}
+			return true
+		}
+
+		if ft, ok := node.(*ast.FuncType); ok {
+			if !isTestingAnonymousFunc(pass, ft) {
+				return false
+			}
+
+			if push {
+				inGoroutineRunningTestFunc.Push(true)
+			} else {
+				inGoroutineRunningTestFunc.Pop()
+			}
+			return true
+		}
+
+		if _, ok := node.(*ast.GoStmt); ok {
+			if push {
+				inGoroutineRunningTestFunc.Push(false)
+			} else {
+				inGoroutineRunningTestFunc.Pop()
+			}
+			return true
+		}
+
+		ce := node.(*ast.CallExpr)
+		if isSubTestRun(pass, ce) {
+			if push {
+				// t.Run spawns the new testing goroutine and declines
+				// possible warnings from previous "simple" goroutine.
+				inGoroutineRunningTestFunc.Push(true)
+			} else {
+				inGoroutineRunningTestFunc.Pop()
+			}
+			return true
+		}
+
+		if !push {
+			return false
+		}
+		if inGoroutineRunningTestFunc.Len() == 0 {
+			// Insufficient info.
+			return true
+		}
+		if inGoroutineRunningTestFunc.Last() {
+			// We are in testing goroutine and can skip any assertion checks.
+			return true
+		}
+
+		testifyCall := NewCallMeta(pass, ce)
+		if testifyCall != nil {
+			switch checker.checkCall(testifyCall) {
+			case goRequireVerdictRequire:
+				d := newDiagnostic(checker.Name(), testifyCall, fmt.Sprintf(goRequireCallReportFormat, "require"), nil)
+				diagnostics = append(diagnostics, *d)
+
+			case goRequireVerdictAssertFailNow:
+				d := newDiagnostic(checker.Name(), testifyCall, fmt.Sprintf(goRequireCallReportFormat, testifyCall), nil)
+				diagnostics = append(diagnostics, *d)
+
+			case goRequireVerdictNoExit:
+			}
+			return false
+		}
+
+		// Case of nested function call.
+		{
+			calledFd := testsDecls.Get(pass, ce)
+			if calledFd == nil {
+				return true
+			}
+
+			if v := checker.checkFunc(pass, calledFd, testsDecls, processedFuncs); v != goRequireVerdictNoExit {
+				caller := analysisutil.NodeString(pass.Fset, ce.Fun)
+				d := newDiagnostic(checker.Name(), ce, fmt.Sprintf(goRequireFnReportFormat, caller), nil)
+				diagnostics = append(diagnostics, *d)
+			}
+		}
+		return true
+	})
+
+	return diagnostics
+}
+
+func (checker GoRequire) checkFunc(
+	pass *analysis.Pass,
+	fd *ast.FuncDecl,
+	testsDecls funcDeclarations,
+	processedFuncs map[*ast.FuncDecl]goRequireVerdict,
+) (result goRequireVerdict) {
+	if v, ok := processedFuncs[fd]; ok {
+		return v
+	}
+
+	ast.Inspect(fd, func(node ast.Node) bool {
+		if result != goRequireVerdictNoExit {
+			return false
+		}
+
+		if _, ok := node.(*ast.GoStmt); ok {
+			return false
+		}
+
+		ce, ok := node.(*ast.CallExpr)
+		if !ok {
+			return true
+		}
+
+		testifyCall := NewCallMeta(pass, ce)
+		if testifyCall != nil {
+			if v := checker.checkCall(testifyCall); v != goRequireVerdictNoExit {
+				result, processedFuncs[fd] = v, v
+			}
+			return false
+		}
+
+		// Case of nested function call.
+		{
+			calledFd := testsDecls.Get(pass, ce)
+			if calledFd == nil {
+				return true
+			}
+			if calledFd == fd {
+				// Recursion.
+				return true
+			}
+
+			if v := checker.checkFunc(pass, calledFd, testsDecls, processedFuncs); v != goRequireVerdictNoExit {
+				result = v
+				return false
+			}
+			return true
+		}
+	})
+
+	return result
+}
+
+type goRequireVerdict int
+
+const (
+	goRequireVerdictNoExit goRequireVerdict = iota
+	goRequireVerdictRequire
+	goRequireVerdictAssertFailNow
+)
+
+func (checker GoRequire) checkCall(call *CallMeta) goRequireVerdict {
+	if !call.IsAssert {
+		return goRequireVerdictRequire
+	}
+	if call.Fn.NameFTrimmed == "FailNow" {
+		return goRequireVerdictAssertFailNow
+	}
+	return goRequireVerdictNoExit
+}
+
+type funcDeclarations map[*types.Func]*ast.FuncDecl
+
+// Get returns the declaration of a called function or method.
+// Currently, only static calls within the same package are supported, otherwise returns nil.
+func (fd funcDeclarations) Get(pass *analysis.Pass, ce *ast.CallExpr) *ast.FuncDecl {
+	var obj types.Object
+
+	switch fun := ce.Fun.(type) {
+	case *ast.SelectorExpr:
+		obj = pass.TypesInfo.ObjectOf(fun.Sel)
+
+	case *ast.Ident:
+		obj = pass.TypesInfo.ObjectOf(fun)
+
+	case *ast.IndexExpr:
+		if id, ok := fun.X.(*ast.Ident); ok {
+			obj = pass.TypesInfo.ObjectOf(id)
+		}
+
+	case *ast.IndexListExpr:
+		if id, ok := fun.X.(*ast.Ident); ok {
+			obj = pass.TypesInfo.ObjectOf(id)
+		}
+	}
+
+	if tf, ok := obj.(*types.Func); ok {
+		return fd[tf]
+	}
+	return nil
+}
+
+type boolStack []bool
+
+func (s boolStack) Len() int {
+	return len(s)
+}
+
+func (s *boolStack) Push(v bool) {
+	*s = append(*s, v)
+}
+
+func (s *boolStack) Pop() bool {
+	n := len(*s)
+	if n == 0 {
+		return false
+	}
+
+	last := (*s)[n-1]
+	*s = (*s)[:n-1]
+	return last
+}
+
+func (s boolStack) Last() bool {
+	n := len(s)
+	if n == 0 {
+		return false
+	}
+	return s[n-1]
+}
+
+func isSubTestRun(pass *analysis.Pass, ce *ast.CallExpr) bool {
+	se, ok := ce.Fun.(*ast.SelectorExpr)
+	if !ok || se.Sel == nil {
+		return false
+	}
+	return (isTestingTPtr(pass, se.X) || implementsTestifySuiteIface(pass, se.X)) && se.Sel.Name == "Run"
+}
+
+func isTestingFuncOrMethod(pass *analysis.Pass, fd *ast.FuncDecl) bool {
+	return hasTestingTParam(pass, fd.Type) || isTestifySuiteMethod(pass, fd)
+}
+
+func isTestingAnonymousFunc(pass *analysis.Pass, ft *ast.FuncType) bool {
+	return hasTestingTParam(pass, ft)
+}
+
+func hasTestingTParam(pass *analysis.Pass, ft *ast.FuncType) bool {
+	if ft == nil || ft.Params == nil {
+		return false
+	}
+
+	for _, param := range ft.Params.List {
+		if isTestingTPtr(pass, param.Type) {
+			return true
+		}
+	}
+	return false
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/len.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/len.go
new file mode 100644
index 0000000000000000000000000000000000000000..d4e6a48b5b612c86e9e9a708f4a4bf43c1f14d2a
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/len.go
@@ -0,0 +1,97 @@
+package checkers
+
+import (
+	"go/ast"
+	"go/token"
+
+	"golang.org/x/tools/go/analysis"
+)
+
+// Len detects situations like
+//
+//	assert.Equal(t, 3, len(arr))
+//	assert.EqualValues(t, 3, len(arr))
+//	assert.Exactly(t, 3, len(arr))
+//	assert.True(t, len(arr) == 3)
+//
+// and requires
+//
+//	assert.Len(t, arr, 3)
+type Len struct{}
+
+// NewLen constructs Len checker.
+func NewLen() Len        { return Len{} }
+func (Len) Name() string { return "len" }
+
+func (checker Len) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic {
+	const proposedFn = "Len"
+
+	switch call.Fn.NameFTrimmed {
+	case "Equal", "EqualValues", "Exactly":
+		if len(call.Args) < 2 {
+			return nil
+		}
+		a, b := call.Args[0], call.Args[1]
+
+		if lenArg, expectedLen, ok := xorLenCall(pass, a, b); ok {
+			if expectedLen == b && !isIntBasicLit(expectedLen) {
+				// https://github.com/Antonboom/testifylint/issues/9
+				return nil
+			}
+			return newUseFunctionDiagnostic(checker.Name(), call, proposedFn,
+				newSuggestedFuncReplacement(call, proposedFn, analysis.TextEdit{
+					Pos:     a.Pos(),
+					End:     b.End(),
+					NewText: formatAsCallArgs(pass, lenArg, expectedLen),
+				}),
+			)
+		}
+
+	case "True":
+		if len(call.Args) < 1 {
+			return nil
+		}
+		expr := call.Args[0]
+
+		if lenArg, expectedLen, ok := isLenEquality(pass, expr); ok && isIntBasicLit(expectedLen) {
+			return newUseFunctionDiagnostic(checker.Name(), call, proposedFn,
+				newSuggestedFuncReplacement(call, proposedFn, analysis.TextEdit{
+					Pos:     expr.Pos(),
+					End:     expr.End(),
+					NewText: formatAsCallArgs(pass, lenArg, expectedLen),
+				}),
+			)
+		}
+	}
+	return nil
+}
+
+func xorLenCall(pass *analysis.Pass, a, b ast.Expr) (lenArg ast.Expr, expectedLen ast.Expr, ok bool) {
+	arg1, ok1 := isBuiltinLenCall(pass, a)
+	arg2, ok2 := isBuiltinLenCall(pass, b)
+
+	if xor(ok1, ok2) {
+		if ok1 {
+			return arg1, b, true
+		}
+		return arg2, a, true
+	}
+	return nil, nil, false
+}
+
+func isLenEquality(pass *analysis.Pass, e ast.Expr) (ast.Expr, ast.Expr, bool) {
+	be, ok := e.(*ast.BinaryExpr)
+	if !ok {
+		return nil, nil, false
+	}
+
+	if be.Op != token.EQL {
+		return nil, nil, false
+	}
+	return xorLenCall(pass, be.X, be.Y)
+}
+
+func isIntBasicLit(e ast.Expr) bool {
+	bl, ok := e.(*ast.BasicLit)
+	return ok && bl.Kind == token.INT
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/nil_compare.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/nil_compare.go
new file mode 100644
index 0000000000000000000000000000000000000000..89680a069967fe663d3dc35719aaabff5fdfbf27
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/nil_compare.go
@@ -0,0 +1,69 @@
+package checkers
+
+import (
+	"go/ast"
+
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/Antonboom/testifylint/internal/analysisutil"
+)
+
+// NilCompare detects situations like
+//
+//	assert.Equal(t, nil, value)
+//	assert.EqualValues(t, nil, value)
+//	assert.Exactly(t, nil, value)
+//
+//	assert.NotEqual(t, nil, value)
+//	assert.NotEqualValues(t, nil, value)
+//
+// and requires
+//
+//	assert.Nil(t, value)
+//	assert.NotNil(t, value)
+type NilCompare struct{}
+
+// NewNilCompare constructs NilCompare checker.
+func NewNilCompare() NilCompare { return NilCompare{} }
+func (NilCompare) Name() string { return "nil-compare" }
+
+func (checker NilCompare) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic {
+	if len(call.Args) < 2 {
+		return nil
+	}
+
+	survivingArg, ok := xorNil(call.Args[0], call.Args[1])
+	if !ok {
+		return nil
+	}
+
+	var proposedFn string
+
+	switch call.Fn.NameFTrimmed {
+	case "Equal", "EqualValues", "Exactly":
+		proposedFn = "Nil"
+	case "NotEqual", "NotEqualValues":
+		proposedFn = "NotNil"
+	default:
+		return nil
+	}
+
+	return newUseFunctionDiagnostic(checker.Name(), call, proposedFn,
+		newSuggestedFuncReplacement(call, proposedFn, analysis.TextEdit{
+			Pos:     call.Args[0].Pos(),
+			End:     call.Args[1].End(),
+			NewText: analysisutil.NodeBytes(pass.Fset, survivingArg),
+		}),
+	)
+}
+
+func xorNil(first, second ast.Expr) (ast.Expr, bool) {
+	a, b := isNil(first), isNil(second)
+	if xor(a, b) {
+		if a {
+			return second, true
+		}
+		return first, true
+	}
+	return nil, false
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/require_error.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/require_error.go
new file mode 100644
index 0000000000000000000000000000000000000000..ab09dd4478a6f4e6d7d74be68a23bfb82d248b87
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/require_error.go
@@ -0,0 +1,338 @@
+package checkers
+
+import (
+	"fmt"
+	"go/ast"
+	"go/token"
+	"regexp"
+
+	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/go/ast/inspector"
+)
+
+const requireErrorReport = "for error assertions use require"
+
+// RequireError detects error assertions like
+//
+//	assert.Error(t, err) // s.Error(err), s.Assert().Error(err)
+//	assert.ErrorIs(t, err, io.EOF)
+//	assert.ErrorAs(t, err, &target)
+//	assert.EqualError(t, err, "end of file")
+//	assert.ErrorContains(t, err, "end of file")
+//	assert.NoError(t, err)
+//	assert.NotErrorIs(t, err, io.EOF)
+//
+// and requires
+//
+//	require.Error(t, err) // s.Require().Error(err), s.Require().Error(err)
+//	require.ErrorIs(t, err, io.EOF)
+//	require.ErrorAs(t, err, &target)
+//	...
+//
+// RequireError ignores:
+// - assertion in the `if` condition;
+// - the entire `if-else[-if]` block, if there is an assertion in any `if` condition;
+// - the last assertion in the block, if there are no methods/functions calls after it;
+// - assertions in an explicit goroutine;
+// - assertions in an explicit testing cleanup function or suite teardown methods;
+// - sequence of NoError assertions.
+type RequireError struct {
+	fnPattern *regexp.Regexp
+}
+
+// NewRequireError constructs RequireError checker.
+func NewRequireError() *RequireError { return new(RequireError) }
+func (RequireError) Name() string    { return "require-error" }
+
+func (checker *RequireError) SetFnPattern(p *regexp.Regexp) *RequireError {
+	if p != nil {
+		checker.fnPattern = p
+	}
+	return checker
+}
+
+func (checker RequireError) Check(pass *analysis.Pass, inspector *inspector.Inspector) []analysis.Diagnostic {
+	callsByFunc := make(map[funcID][]*callMeta)
+
+	// Stage 1. Collect meta information about any calls inside functions.
+
+	inspector.WithStack([]ast.Node{(*ast.CallExpr)(nil)}, func(node ast.Node, push bool, stack []ast.Node) bool {
+		if !push {
+			return false
+		}
+		if len(stack) < 3 {
+			return true
+		}
+
+		fID := findSurroundingFunc(pass, stack)
+		if fID == nil {
+			return true
+		}
+
+		_, prevIsIfStmt := stack[len(stack)-2].(*ast.IfStmt)
+		_, prevIsAssignStmt := stack[len(stack)-2].(*ast.AssignStmt)
+		_, prevPrevIsIfStmt := stack[len(stack)-3].(*ast.IfStmt)
+		inIfCond := prevIsIfStmt || (prevPrevIsIfStmt && prevIsAssignStmt)
+
+		callExpr := node.(*ast.CallExpr)
+		testifyCall := NewCallMeta(pass, callExpr)
+
+		call := &callMeta{
+			call:         callExpr,
+			testifyCall:  testifyCall,
+			rootIf:       findRootIf(stack),
+			parentIf:     findNearestNode[*ast.IfStmt](stack),
+			parentBlock:  findNearestNode[*ast.BlockStmt](stack),
+			inIfCond:     inIfCond,
+			inNoErrorSeq: false, // Will be filled in below.
+		}
+
+		callsByFunc[*fID] = append(callsByFunc[*fID], call)
+		return testifyCall == nil // Do not support asserts in asserts.
+	})
+
+	// Stage 2. Analyze calls and block context.
+
+	var diagnostics []analysis.Diagnostic
+
+	callsByBlock := map[*ast.BlockStmt][]*callMeta{}
+	for _, calls := range callsByFunc {
+		for _, c := range calls {
+			if b := c.parentBlock; b != nil {
+				callsByBlock[b] = append(callsByBlock[b], c)
+			}
+		}
+	}
+
+	markCallsInNoErrorSequence(callsByBlock)
+
+	for funcInfo, calls := range callsByFunc {
+		for i, c := range calls {
+			if funcInfo.isTestCleanup {
+				continue
+			}
+			if funcInfo.isGoroutine {
+				continue
+			}
+
+			if c.testifyCall == nil {
+				continue
+			}
+			if !c.testifyCall.IsAssert {
+				continue
+			}
+			switch c.testifyCall.Fn.NameFTrimmed {
+			default:
+				continue
+			case "Error", "ErrorIs", "ErrorAs", "EqualError", "ErrorContains", "NoError", "NotErrorIs":
+			}
+
+			if needToSkipBasedOnContext(c, i, calls, callsByBlock) {
+				continue
+			}
+			if p := checker.fnPattern; p != nil && !p.MatchString(c.testifyCall.Fn.Name) {
+				continue
+			}
+
+			diagnostics = append(diagnostics,
+				*newDiagnostic(checker.Name(), c.testifyCall, requireErrorReport, nil))
+		}
+	}
+
+	return diagnostics
+}
+
+func needToSkipBasedOnContext(
+	currCall *callMeta,
+	currCallIndex int,
+	otherCalls []*callMeta,
+	callsByBlock map[*ast.BlockStmt][]*callMeta,
+) bool {
+	if currCall.inNoErrorSeq {
+		// Skip `assert.NoError` sequence.
+		return true
+	}
+
+	if currCall.inIfCond {
+		// Skip assertions in the "if condition".
+		return true
+	}
+
+	if currCall.rootIf != nil {
+		for _, rootCall := range otherCalls {
+			if (rootCall.rootIf == currCall.rootIf) && rootCall.inIfCond {
+				// Skip assertions in the entire if-else[-if] block, if some of "if condition" contains assertion.
+				return true
+			}
+		}
+	}
+
+	block := currCall.parentBlock
+	blockCalls := callsByBlock[block]
+	isLastCallInBlock := blockCalls[len(blockCalls)-1] == currCall
+
+	noCallsAfter := true
+
+	_, blockEndWithReturn := block.List[len(block.List)-1].(*ast.ReturnStmt)
+	if !blockEndWithReturn {
+		for i := currCallIndex + 1; i < len(otherCalls); i++ {
+			nextCall := otherCalls[i]
+			nextCallInElseBlock := false
+
+			if pIf := currCall.parentIf; pIf != nil && pIf.Else != nil {
+				ast.Inspect(pIf.Else, func(n ast.Node) bool {
+					if n == nextCall.call {
+						nextCallInElseBlock = true
+						return false
+					}
+					return true
+				})
+			}
+
+			if !nextCallInElseBlock {
+				noCallsAfter = false
+				break
+			}
+		}
+	}
+
+	// Skip assertion if this is the last operation in the test.
+	return isLastCallInBlock && noCallsAfter
+}
+
+func findSurroundingFunc(pass *analysis.Pass, stack []ast.Node) *funcID {
+	for i := len(stack) - 2; i >= 0; i-- {
+		var fType *ast.FuncType
+		var fName string
+		var isTestCleanup bool
+		var isGoroutine bool
+
+		switch fd := stack[i].(type) {
+		case *ast.FuncDecl:
+			fType, fName = fd.Type, fd.Name.Name
+
+			if isTestifySuiteMethod(pass, fd) {
+				if ident := fd.Name; ident != nil && isAfterTestMethod(ident.Name) {
+					isTestCleanup = true
+				}
+			}
+
+		case *ast.FuncLit:
+			fType, fName = fd.Type, "anonymous"
+
+			if i >= 2 { //nolint:nestif
+				if ce, ok := stack[i-1].(*ast.CallExpr); ok {
+					if se, ok := ce.Fun.(*ast.SelectorExpr); ok {
+						isTestCleanup = isTestingTPtr(pass, se.X) && se.Sel != nil && (se.Sel.Name == "Cleanup")
+					}
+
+					if _, ok := stack[i-2].(*ast.GoStmt); ok {
+						isGoroutine = true
+					}
+				}
+			}
+
+		default:
+			continue
+		}
+
+		return &funcID{
+			pos:           fType.Pos(),
+			posStr:        pass.Fset.Position(fType.Pos()).String(),
+			name:          fName,
+			isTestCleanup: isTestCleanup,
+			isGoroutine:   isGoroutine,
+		}
+	}
+	return nil
+}
+
+func findRootIf(stack []ast.Node) *ast.IfStmt {
+	nearestIf, i := findNearestNodeWithIdx[*ast.IfStmt](stack)
+	for ; i > 0; i-- {
+		parent, ok := stack[i-1].(*ast.IfStmt)
+		if ok {
+			nearestIf = parent
+		} else {
+			break
+		}
+	}
+	return nearestIf
+}
+
+func findNearestNode[T ast.Node](stack []ast.Node) (v T) {
+	v, _ = findNearestNodeWithIdx[T](stack)
+	return
+}
+
+func findNearestNodeWithIdx[T ast.Node](stack []ast.Node) (v T, index int) {
+	for i := len(stack) - 2; i >= 0; i-- {
+		if n, ok := stack[i].(T); ok {
+			return n, i
+		}
+	}
+	return
+}
+
+func markCallsInNoErrorSequence(callsByBlock map[*ast.BlockStmt][]*callMeta) {
+	for _, calls := range callsByBlock {
+		for i, c := range calls {
+			if c.testifyCall == nil {
+				continue
+			}
+
+			var prevIsNoError bool
+			if i > 0 {
+				if prev := calls[i-1].testifyCall; prev != nil {
+					prevIsNoError = isNoErrorAssertion(prev.Fn.Name)
+				}
+			}
+
+			var nextIsNoError bool
+			if i < len(calls)-1 {
+				if next := calls[i+1].testifyCall; next != nil {
+					nextIsNoError = isNoErrorAssertion(next.Fn.Name)
+				}
+			}
+
+			if isNoErrorAssertion(c.testifyCall.Fn.Name) && (prevIsNoError || nextIsNoError) {
+				calls[i].inNoErrorSeq = true
+			}
+		}
+	}
+}
+
+type callMeta struct {
+	call         *ast.CallExpr
+	testifyCall  *CallMeta
+	rootIf       *ast.IfStmt // The root `if` in if-else[-if] chain.
+	parentIf     *ast.IfStmt // The nearest `if`, can be equal with rootIf.
+	parentBlock  *ast.BlockStmt
+	inIfCond     bool // True for code like `if assert.ErrorAs(t, err, &target) {`.
+	inNoErrorSeq bool // True for sequence of `assert.NoError` assertions.
+}
+
+type funcID struct {
+	pos           token.Pos
+	posStr        string
+	name          string
+	isTestCleanup bool
+	isGoroutine   bool
+}
+
+func (id funcID) String() string {
+	return fmt.Sprintf("%s at %s", id.name, id.posStr)
+}
+
+func isAfterTestMethod(name string) bool {
+	// https://github.com/stretchr/testify/blob/master/suite/interfaces.go
+	switch name {
+	case "TearDownSuite", "TearDownTest", "AfterTest", "HandleStats", "TearDownSubTest":
+		return true
+	}
+	return false
+}
+
+func isNoErrorAssertion(fnName string) bool {
+	return (fnName == "NoError") || (fnName == "NoErrorf")
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_dont_use_pkg.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_dont_use_pkg.go
new file mode 100644
index 0000000000000000000000000000000000000000..bf84f6378e062df9f6184203ed0ade0948d9dfeb
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_dont_use_pkg.go
@@ -0,0 +1,96 @@
+package checkers
+
+import (
+	"fmt"
+	"go/ast"
+	"go/types"
+
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/Antonboom/testifylint/internal/analysisutil"
+	"github.com/Antonboom/testifylint/internal/testify"
+)
+
+// SuiteDontUsePkg detects situation like
+//
+//	func (s *MySuite) TestSomething() {
+//		assert.Equal(s.T(), 42, value)
+//	}
+//
+// and requires
+//
+//	func (s *MySuite) TestSomething() {
+//		s.Equal(42, value)
+//	}
+type SuiteDontUsePkg struct{}
+
+// NewSuiteDontUsePkg constructs SuiteDontUsePkg checker.
+func NewSuiteDontUsePkg() SuiteDontUsePkg { return SuiteDontUsePkg{} }
+func (SuiteDontUsePkg) Name() string      { return "suite-dont-use-pkg" }
+
+func (checker SuiteDontUsePkg) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic {
+	if !call.IsPkg {
+		return nil
+	}
+
+	args := call.ArgsRaw
+	if len(args) < 2 {
+		return nil
+	}
+	t := args[0]
+
+	ce, ok := t.(*ast.CallExpr)
+	if !ok {
+		return nil
+	}
+	se, ok := ce.Fun.(*ast.SelectorExpr)
+	if !ok {
+		return nil
+	}
+	if se.X == nil || !implementsTestifySuiteIface(pass, se.X) {
+		return nil
+	}
+	if se.Sel == nil || se.Sel.Name != "T" {
+		return nil
+	}
+	rcv, ok := se.X.(*ast.Ident) // At this point we ensure that `s.T()` is used as the first argument of assertion.
+	if !ok {
+		return nil
+	}
+
+	newSelector := rcv.Name
+	if !call.IsAssert {
+		newSelector += "." + "Require()"
+	}
+
+	msg := fmt.Sprintf("use %s.%s", newSelector, call.Fn.Name)
+	return newDiagnostic(checker.Name(), call, msg, &analysis.SuggestedFix{
+		Message: fmt.Sprintf("Replace `%s` with `%s`", call.SelectorXStr, newSelector),
+		TextEdits: []analysis.TextEdit{
+			// Replace package function with suite method.
+			{
+				Pos:     call.Selector.X.Pos(),
+				End:     call.Selector.X.End(),
+				NewText: []byte(newSelector),
+			},
+			// Remove `s.T()`.
+			{
+				Pos:     t.Pos(),
+				End:     args[1].Pos(),
+				NewText: []byte(""),
+			},
+		},
+	})
+}
+
+func implementsTestifySuiteIface(pass *analysis.Pass, rcv ast.Expr) bool {
+	suiteIface := analysisutil.ObjectOf(pass.Pkg, testify.SuitePkgPath, "TestingSuite")
+	if suiteIface == nil {
+		return false
+	}
+
+	return types.Implements(
+		pass.TypesInfo.TypeOf(rcv),
+		suiteIface.Type().Underlying().(*types.Interface),
+	)
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_extra_assert_call.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_extra_assert_call.go
new file mode 100644
index 0000000000000000000000000000000000000000..791488b651e9485b95068db29c268c96fdff0a69
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_extra_assert_call.go
@@ -0,0 +1,99 @@
+package checkers
+
+import (
+	"fmt"
+	"go/ast"
+
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/Antonboom/testifylint/internal/analysisutil"
+)
+
+// SuiteExtraAssertCallMode reflects different modes of work of SuiteExtraAssertCall checker.
+type SuiteExtraAssertCallMode int
+
+const (
+	SuiteExtraAssertCallModeRemove SuiteExtraAssertCallMode = iota
+	SuiteExtraAssertCallModeRequire
+)
+
+const DefaultSuiteExtraAssertCallMode = SuiteExtraAssertCallModeRemove
+
+// SuiteExtraAssertCall detects situation like
+//
+//	func (s *MySuite) TestSomething() {
+//		s.Assert().Equal(42, value)
+//	}
+//
+// and requires
+//
+//	func (s *MySuite) TestSomething() {
+//		s.Equal(42, value)
+//	}
+//
+// or vice versa (depending on the configurable mode).
+type SuiteExtraAssertCall struct {
+	mode SuiteExtraAssertCallMode
+}
+
+// NewSuiteExtraAssertCall constructs SuiteExtraAssertCall checker.
+func NewSuiteExtraAssertCall() *SuiteExtraAssertCall {
+	return &SuiteExtraAssertCall{mode: DefaultSuiteExtraAssertCallMode}
+}
+
+func (SuiteExtraAssertCall) Name() string { return "suite-extra-assert-call" }
+
+func (checker *SuiteExtraAssertCall) SetMode(m SuiteExtraAssertCallMode) *SuiteExtraAssertCall {
+	checker.mode = m
+	return checker
+}
+
+func (checker SuiteExtraAssertCall) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic {
+	if call.IsPkg {
+		return nil
+	}
+
+	switch checker.mode {
+	case SuiteExtraAssertCallModeRequire:
+		x, ok := call.Selector.X.(*ast.Ident) // s.True
+		if !ok || x == nil || !implementsTestifySuiteIface(pass, x) {
+			return nil
+		}
+
+		msg := fmt.Sprintf("use an explicit %s.Assert().%s", analysisutil.NodeString(pass.Fset, x), call.Fn.Name)
+		return newDiagnostic(checker.Name(), call, msg, &analysis.SuggestedFix{
+			Message: "Add `Assert()` call",
+			TextEdits: []analysis.TextEdit{{
+				Pos:     x.End(),
+				End:     x.End(), // Pure insertion.
+				NewText: []byte(".Assert()"),
+			}},
+		})
+
+	case SuiteExtraAssertCallModeRemove:
+		x, ok := call.Selector.X.(*ast.CallExpr) // s.Assert().True
+		if !ok {
+			return nil
+		}
+
+		se, ok := x.Fun.(*ast.SelectorExpr)
+		if !ok || se == nil || !implementsTestifySuiteIface(pass, se.X) {
+			return nil
+		}
+		if se.Sel == nil || se.Sel.Name != "Assert" {
+			return nil
+		}
+
+		msg := fmt.Sprintf("need to simplify the assertion to %s.%s", analysisutil.NodeString(pass.Fset, se.X), call.Fn.Name)
+		return newDiagnostic(checker.Name(), call, msg, &analysis.SuggestedFix{
+			Message: "Remove `Assert()` call",
+			TextEdits: []analysis.TextEdit{{
+				Pos:     se.Sel.Pos(),
+				End:     x.End() + 1, // +1 for dot.
+				NewText: []byte(""),
+			}},
+		})
+	}
+
+	return nil
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_thelper.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_thelper.go
new file mode 100644
index 0000000000000000000000000000000000000000..5cadc93ada60e4652b477acc3d727344c6805525
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_thelper.go
@@ -0,0 +1,130 @@
+package checkers
+
+import (
+	"fmt"
+	"go/ast"
+	"strings"
+
+	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/go/ast/inspector"
+
+	"github.com/Antonboom/testifylint/internal/analysisutil"
+	"github.com/Antonboom/testifylint/internal/testify"
+)
+
+// SuiteTHelper requires t.Helper() call in suite helpers:
+//
+//	func (s *RoomSuite) assertRoomRound(roundID RoundID) {
+//		s.T().Helper()
+//		s.Equal(roundID, s.getRoom().CurrentRound.ID)
+//	}
+type SuiteTHelper struct{}
+
+// NewSuiteTHelper constructs SuiteTHelper checker.
+func NewSuiteTHelper() SuiteTHelper { return SuiteTHelper{} }
+func (SuiteTHelper) Name() string   { return "suite-thelper" }
+
+func (checker SuiteTHelper) Check(pass *analysis.Pass, inspector *inspector.Inspector) (diagnostics []analysis.Diagnostic) {
+	inspector.Preorder([]ast.Node{(*ast.FuncDecl)(nil)}, func(node ast.Node) {
+		fd := node.(*ast.FuncDecl)
+		if !isTestifySuiteMethod(pass, fd) {
+			return
+		}
+
+		if ident := fd.Name; ident == nil || isTestMethod(ident.Name) || isServiceMethod(ident.Name) {
+			return
+		}
+
+		if !containsSuiteAssertions(pass, fd) {
+			return
+		}
+
+		rcv := fd.Recv.List[0]
+		if len(rcv.Names) != 1 || rcv.Names[0] == nil {
+			return
+		}
+		rcvName := rcv.Names[0].Name
+
+		helperCallStr := fmt.Sprintf("%s.T().Helper()", rcvName)
+
+		firstStmt := fd.Body.List[0]
+		if analysisutil.NodeString(pass.Fset, firstStmt) == helperCallStr {
+			return
+		}
+
+		msg := fmt.Sprintf("suite helper method must start with " + helperCallStr)
+		d := newDiagnostic(checker.Name(), fd, msg, &analysis.SuggestedFix{
+			Message: fmt.Sprintf("Insert `%s`", helperCallStr),
+			TextEdits: []analysis.TextEdit{
+				{
+					Pos:     firstStmt.Pos(),
+					End:     firstStmt.Pos(), // Pure insertion.
+					NewText: []byte(helperCallStr + "\n\n"),
+				},
+			},
+		})
+		diagnostics = append(diagnostics, *d)
+	})
+	return diagnostics
+}
+
+func isTestifySuiteMethod(pass *analysis.Pass, fDecl *ast.FuncDecl) bool {
+	if fDecl.Recv == nil || len(fDecl.Recv.List) != 1 {
+		return false
+	}
+
+	rcv := fDecl.Recv.List[0]
+	return implementsTestifySuiteIface(pass, rcv.Type)
+}
+
+func isTestMethod(name string) bool {
+	return strings.HasPrefix(name, "Test")
+}
+
+func isServiceMethod(name string) bool {
+	// https://github.com/stretchr/testify/blob/master/suite/interfaces.go
+	switch name {
+	case "T", "SetT", "SetS", "SetupSuite", "SetupTest", "TearDownSuite", "TearDownTest",
+		"BeforeTest", "AfterTest", "HandleStats", "SetupSubTest", "TearDownSubTest":
+		return true
+	}
+	return false
+}
+
+func containsSuiteAssertions(pass *analysis.Pass, fn *ast.FuncDecl) bool {
+	if fn.Body == nil {
+		return false
+	}
+
+	for _, s := range fn.Body.List {
+		if isSuiteAssertion(pass, s) {
+			return true
+		}
+	}
+	return false
+}
+
+func isSuiteAssertion(pass *analysis.Pass, stmt ast.Stmt) bool {
+	expr, ok := stmt.(*ast.ExprStmt)
+	if !ok {
+		return false
+	}
+
+	ce, ok := expr.X.(*ast.CallExpr)
+	if !ok {
+		return false
+	}
+
+	se, ok := ce.Fun.(*ast.SelectorExpr)
+	if !ok || se.Sel == nil {
+		return false
+	}
+
+	if sel, ok := pass.TypesInfo.Selections[se]; ok {
+		pkg := sel.Obj().Pkg()
+		isAssert := analysisutil.IsPkg(pkg, testify.AssertPkgName, testify.AssertPkgPath)
+		isRequire := analysisutil.IsPkg(pkg, testify.RequirePkgName, testify.RequirePkgPath)
+		return isAssert || isRequire
+	}
+	return false
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/useless_assert.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/useless_assert.go
new file mode 100644
index 0000000000000000000000000000000000000000..669f9d187d9c0c2840e1a24000ec7c7d6556c412
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/useless_assert.go
@@ -0,0 +1,71 @@
+package checkers
+
+import (
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/Antonboom/testifylint/internal/analysisutil"
+)
+
+// UselessAssert detects useless asserts like
+//
+// 1) Asserting of the same variable
+//
+//	assert.Equal(t, tt.value, tt.value)
+//	assert.ElementsMatch(t, users, users)
+//	...
+//
+// 2) Open for contribution...
+type UselessAssert struct{}
+
+// NewUselessAssert constructs UselessAssert checker.
+func NewUselessAssert() UselessAssert { return UselessAssert{} }
+func (UselessAssert) Name() string    { return "useless-assert" }
+
+func (checker UselessAssert) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic {
+	switch call.Fn.NameFTrimmed {
+	case
+		"Contains",
+		"ElementsMatch",
+		"Equal",
+		"EqualExportedValues",
+		"EqualValues",
+		"ErrorAs",
+		"ErrorIs",
+		"Exactly",
+		"Greater",
+		"GreaterOrEqual",
+		"Implements",
+		"InDelta",
+		"InDeltaMapValues",
+		"InDeltaSlice",
+		"InEpsilon",
+		"InEpsilonSlice",
+		"IsType",
+		"JSONEq",
+		"Less",
+		"LessOrEqual",
+		"NotEqual",
+		"NotEqualValues",
+		"NotErrorIs",
+		"NotRegexp",
+		"NotSame",
+		"NotSubset",
+		"Regexp",
+		"Same",
+		"Subset",
+		"WithinDuration",
+		"YAMLEq":
+	default:
+		return nil
+	}
+
+	if len(call.Args) < 2 {
+		return nil
+	}
+	first, second := call.Args[0], call.Args[1]
+
+	if analysisutil.NodeString(pass.Fset, first) == analysisutil.NodeString(pass.Fset, second) {
+		return newDiagnostic(checker.Name(), call, "asserting of the same variable", nil)
+	}
+	return nil
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/config/config.go b/vendor/github.com/Antonboom/testifylint/internal/config/config.go
new file mode 100644
index 0000000000000000000000000000000000000000..7eba0ea3285249172dcb59e4869025b5c472f312
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/config/config.go
@@ -0,0 +1,111 @@
+package config
+
+import (
+	"errors"
+	"flag"
+	"fmt"
+
+	"github.com/Antonboom/testifylint/internal/checkers"
+)
+
+// NewDefault builds default testifylint config.
+func NewDefault() Config {
+	return Config{
+		EnableAll:        false,
+		DisabledCheckers: nil,
+		DisableAll:       false,
+		EnabledCheckers:  nil,
+		ExpectedActual: ExpectedActualConfig{
+			ExpVarPattern: RegexpValue{checkers.DefaultExpectedVarPattern},
+		},
+		RequireError: RequireErrorConfig{
+			FnPattern: RegexpValue{nil},
+		},
+		SuiteExtraAssertCall: SuiteExtraAssertCallConfig{
+			Mode: checkers.DefaultSuiteExtraAssertCallMode,
+		},
+	}
+}
+
+// Config implements testifylint configuration.
+type Config struct {
+	EnableAll        bool
+	DisabledCheckers KnownCheckersValue
+	DisableAll       bool
+	EnabledCheckers  KnownCheckersValue
+
+	BoolCompare          BoolCompareConfig
+	ExpectedActual       ExpectedActualConfig
+	RequireError         RequireErrorConfig
+	SuiteExtraAssertCall SuiteExtraAssertCallConfig
+}
+
+// BoolCompareConfig implements configuration of checkers.BoolCompare.
+type BoolCompareConfig struct {
+	IgnoreCustomTypes bool
+}
+
+// ExpectedActualConfig implements configuration of checkers.ExpectedActual.
+type ExpectedActualConfig struct {
+	ExpVarPattern RegexpValue
+}
+
+// RequireErrorConfig implements configuration of checkers.RequireError.
+type RequireErrorConfig struct {
+	FnPattern RegexpValue
+}
+
+// SuiteExtraAssertCallConfig implements configuration of checkers.SuiteExtraAssertCall.
+type SuiteExtraAssertCallConfig struct {
+	Mode checkers.SuiteExtraAssertCallMode
+}
+
+func (cfg Config) Validate() error {
+	if cfg.EnableAll {
+		if cfg.DisableAll {
+			return errors.New("enable-all and disable-all options must not be combined")
+		}
+
+		if len(cfg.EnabledCheckers) != 0 {
+			return errors.New("enable-all and enable options must not be combined")
+		}
+	}
+
+	if cfg.DisableAll {
+		if len(cfg.DisabledCheckers) != 0 {
+			return errors.New("disable-all and disable options must not be combined")
+		}
+
+		if len(cfg.EnabledCheckers) == 0 {
+			return errors.New("all checkers were disabled, but no one checker was enabled: at least one must be enabled")
+		}
+	}
+
+	for _, checker := range cfg.DisabledCheckers {
+		if cfg.EnabledCheckers.Contains(checker) {
+			return fmt.Errorf("checker %q disabled and enabled at one moment", checker)
+		}
+	}
+
+	return nil
+}
+
+// BindToFlags binds Config fields to according flags.
+func BindToFlags(cfg *Config, fs *flag.FlagSet) {
+	fs.BoolVar(&cfg.EnableAll, "enable-all", false, "enable all checkers")
+	fs.Var(&cfg.DisabledCheckers, "disable", "comma separated list of disabled checkers (to exclude from enabled by default)")
+	fs.BoolVar(&cfg.DisableAll, "disable-all", false, "disable all checkers")
+	fs.Var(&cfg.EnabledCheckers, "enable", "comma separated list of enabled checkers (in addition to enabled by default)")
+
+	fs.BoolVar(&cfg.BoolCompare.IgnoreCustomTypes, "bool-compare.ignore-custom-types", false,
+		"ignore user defined types (over builtin bool)")
+	fs.Var(&cfg.ExpectedActual.ExpVarPattern, "expected-actual.pattern", "regexp for expected variable name")
+	fs.Var(&cfg.RequireError.FnPattern, "require-error.fn-pattern", "regexp for error assertions that should only be analyzed")
+	fs.Var(NewEnumValue(suiteExtraAssertCallModeAsString, &cfg.SuiteExtraAssertCall.Mode),
+		"suite-extra-assert-call.mode", "to require or remove extra Assert() call")
+}
+
+var suiteExtraAssertCallModeAsString = map[string]checkers.SuiteExtraAssertCallMode{
+	"remove":  checkers.SuiteExtraAssertCallModeRemove,
+	"require": checkers.SuiteExtraAssertCallModeRequire,
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/config/flag_value_types.go b/vendor/github.com/Antonboom/testifylint/internal/config/flag_value_types.go
new file mode 100644
index 0000000000000000000000000000000000000000..5b08ec47b1739966edc7be2d3575ea25022f2283
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/config/flag_value_types.go
@@ -0,0 +1,114 @@
+package config
+
+import (
+	"flag"
+	"fmt"
+	"regexp"
+	"sort"
+	"strings"
+
+	"github.com/Antonboom/testifylint/internal/checkers"
+)
+
+var (
+	_ flag.Value = (*KnownCheckersValue)(nil)
+	_ flag.Value = (*RegexpValue)(nil)
+	_ flag.Value = (*EnumValue[checkers.SuiteExtraAssertCallMode])(nil)
+)
+
+// KnownCheckersValue implements comma separated list of testify checkers.
+type KnownCheckersValue []string
+
+func (kcv KnownCheckersValue) String() string {
+	return strings.Join(kcv, ",")
+}
+
+func (kcv *KnownCheckersValue) Set(v string) error {
+	chckrs := strings.Split(v, ",")
+	for _, checkerName := range chckrs {
+		if ok := checkers.IsKnown(checkerName); !ok {
+			return fmt.Errorf("unknown checker %q", checkerName)
+		}
+	}
+
+	*kcv = chckrs
+	return nil
+}
+
+func (kcv KnownCheckersValue) Contains(v string) bool {
+	for _, checker := range kcv {
+		if checker == v {
+			return true
+		}
+	}
+	return false
+}
+
+// RegexpValue is a special wrapper for support of flag.FlagSet over regexp.Regexp.
+// Original regexp is available through RegexpValue.Regexp.
+type RegexpValue struct {
+	*regexp.Regexp
+}
+
+func (rv RegexpValue) String() string {
+	if rv.Regexp == nil {
+		return ""
+	}
+	return rv.Regexp.String()
+}
+
+func (rv *RegexpValue) Set(v string) error {
+	compiled, err := regexp.Compile(v)
+	if err != nil {
+		return err
+	}
+
+	rv.Regexp = compiled
+	return nil
+}
+
+// EnumValue is a special type for support of flag.FlagSet over user-defined constants.
+type EnumValue[EnumT comparable] struct {
+	mapping map[string]EnumT
+	keys    []string
+	dst     *EnumT
+}
+
+// NewEnumValue takes the "enum-value-name to enum-value" mapping and a destination for the value passed through the CLI.
+// Returns an EnumValue instance suitable for flag.FlagSet.Var.
+func NewEnumValue[EnumT comparable](mapping map[string]EnumT, dst *EnumT) *EnumValue[EnumT] {
+	keys := make([]string, 0, len(mapping))
+	for k := range mapping {
+		keys = append(keys, k)
+	}
+	sort.Strings(keys)
+
+	return &EnumValue[EnumT]{
+		mapping: mapping,
+		keys:    keys,
+		dst:     dst,
+	}
+}
+
+func (e EnumValue[EnumT]) String() string {
+	if e.dst == nil {
+		return ""
+	}
+
+	for k, v := range e.mapping {
+		if v == *e.dst {
+			return k
+		}
+	}
+	return ""
+}
+
+func (e *EnumValue[EnumT]) Set(s string) error {
+	v, ok := e.mapping[s]
+	if !ok {
+		return fmt.Errorf("use one of (%v)", strings.Join(e.keys, " | "))
+	}
+
+	*e.dst = v
+	return nil
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/testify/const.go b/vendor/github.com/Antonboom/testifylint/internal/testify/const.go
new file mode 100644
index 0000000000000000000000000000000000000000..3476e40402bdcd379b603ff5950908cb4f79aa49
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/testify/const.go
@@ -0,0 +1,17 @@
+package testify
+
+const (
+	ModulePath = "github.com/stretchr/testify"
+
+	AssertPkgName  = "assert"
+	HTTPPkgName    = "http"
+	MockPkgName    = "mock"
+	RequirePkgName = "require"
+	SuitePkgName   = "suite"
+
+	AssertPkgPath  = ModulePath + "/" + AssertPkgName
+	HTTPPkgPath    = ModulePath + "/" + HTTPPkgName
+	MockPkgPath    = ModulePath + "/" + MockPkgName
+	RequirePkgPath = ModulePath + "/" + RequirePkgName
+	SuitePkgPath   = ModulePath + "/" + SuitePkgName
+)
diff --git a/vendor/github.com/Crocmagnon/fatcontext/LICENSE b/vendor/github.com/Crocmagnon/fatcontext/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..96f153ca4216f1d93ea9f9a13b834005bf6b7954
--- /dev/null
+++ b/vendor/github.com/Crocmagnon/fatcontext/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2024 Gabriel Augendre
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/Crocmagnon/fatcontext/pkg/analyzer/analyzer.go b/vendor/github.com/Crocmagnon/fatcontext/pkg/analyzer/analyzer.go
new file mode 100644
index 0000000000000000000000000000000000000000..7bb8fd42828f667d80785466750b7635e92f1cd2
--- /dev/null
+++ b/vendor/github.com/Crocmagnon/fatcontext/pkg/analyzer/analyzer.go
@@ -0,0 +1,113 @@
+package analyzer
+
+import (
+	"bytes"
+	"errors"
+	"fmt"
+	"go/ast"
+	"go/printer"
+	"go/token"
+	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/go/analysis/passes/inspect"
+	"golang.org/x/tools/go/ast/inspector"
+)
+
+var Analyzer = &analysis.Analyzer{
+	Name:     "fatcontext",
+	Doc:      "detects nested contexts in loops",
+	Run:      run,
+	Requires: []*analysis.Analyzer{inspect.Analyzer},
+}
+
+var errUnknown = errors.New("unknown node type")
+
+func run(pass *analysis.Pass) (interface{}, error) {
+	inspctr := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+
+	nodeFilter := []ast.Node{
+		(*ast.ForStmt)(nil),
+		(*ast.RangeStmt)(nil),
+	}
+
+	inspctr.Preorder(nodeFilter, func(node ast.Node) {
+		body, err := getBody(node)
+		if err != nil {
+			return
+		}
+
+		for _, stmt := range body.List {
+			assignStmt, ok := stmt.(*ast.AssignStmt)
+			if !ok {
+				continue
+			}
+
+			t := pass.TypesInfo.TypeOf(assignStmt.Lhs[0])
+			if t == nil {
+				continue
+			}
+
+			if t.String() != "context.Context" {
+				continue
+			}
+
+			if assignStmt.Tok == token.DEFINE {
+				break
+			}
+
+			suggestedStmt := ast.AssignStmt{
+				Lhs:    assignStmt.Lhs,
+				TokPos: assignStmt.TokPos,
+				Tok:    token.DEFINE,
+				Rhs:    assignStmt.Rhs,
+			}
+			suggested, err := render(pass.Fset, &suggestedStmt)
+
+			var fixes []analysis.SuggestedFix
+			if err == nil {
+				fixes = append(fixes, analysis.SuggestedFix{
+					Message: "replace `=` with `:=`",
+					TextEdits: []analysis.TextEdit{
+						{
+							Pos:     assignStmt.Pos(),
+							End:     assignStmt.End(),
+							NewText: []byte(suggested),
+						},
+					},
+				})
+			}
+
+			pass.Report(analysis.Diagnostic{
+				Pos:            assignStmt.Pos(),
+				Message:        "nested context in loop",
+				SuggestedFixes: fixes,
+			})
+
+			break
+		}
+	})
+
+	return nil, nil
+}
+
+func getBody(node ast.Node) (*ast.BlockStmt, error) {
+	forStmt, ok := node.(*ast.ForStmt)
+	if ok {
+		return forStmt.Body, nil
+	}
+
+	rangeStmt, ok := node.(*ast.RangeStmt)
+	if ok {
+		return rangeStmt.Body, nil
+	}
+
+	return nil, errUnknown
+}
+
+// render returns the pretty-print of the given node
+func render(fset *token.FileSet, x interface{}) (string, error) {
+	var buf bytes.Buffer
+	if err := printer.Fprint(&buf, fset, x); err != nil {
+		return "", fmt.Errorf("printing node: %w", err)
+	}
+	return buf.String(), nil
+}
diff --git a/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/analyzer/analyzer.go b/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/analyzer/analyzer.go
index d0cd2d5bb46ba2bb3edaffce9a7a7137b5c7c0a8..b490f1c640e1eb387b61b1c6059a0be77b63b08a 100644
--- a/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/analyzer/analyzer.go
+++ b/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/analyzer/analyzer.go
@@ -12,16 +12,17 @@ import (
 	"golang.org/x/tools/go/analysis/passes/inspect"
 	"golang.org/x/tools/go/ast/inspector"
 
-	"github.com/GaijinEntertainment/go-exhaustruct/v3/internal/fields"
+	"github.com/GaijinEntertainment/go-exhaustruct/v3/internal/comment"
 	"github.com/GaijinEntertainment/go-exhaustruct/v3/internal/pattern"
+	"github.com/GaijinEntertainment/go-exhaustruct/v3/internal/structure"
 )
 
 type analyzer struct {
 	include pattern.List `exhaustruct:"optional"`
 	exclude pattern.List `exhaustruct:"optional"`
 
-	fieldsCache   map[types.Type]fields.StructFields
-	fieldsCacheMu sync.RWMutex `exhaustruct:"optional"`
+	structFields structure.FieldsCache `exhaustruct:"optional"`
+	comments     comment.Cache         `exhaustruct:"optional"`
 
 	typeProcessingNeed   map[string]bool
 	typeProcessingNeedMu sync.RWMutex `exhaustruct:"optional"`
@@ -29,8 +30,8 @@ type analyzer struct {
 
 func NewAnalyzer(include, exclude []string) (*analysis.Analyzer, error) {
 	a := analyzer{
-		fieldsCache:        make(map[types.Type]fields.StructFields),
 		typeProcessingNeed: make(map[string]bool),
+		comments:           comment.Cache{},
 	}
 
 	var err error
@@ -74,12 +75,7 @@ Anonymous structs can be matched by '<anonymous>' alias.
 func (a *analyzer) run(pass *analysis.Pass) (any, error) {
 	insp := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) //nolint:forcetypeassert
 
-	insp.WithStack(
-		[]ast.Node{
-			(*ast.CompositeLit)(nil),
-		},
-		a.newVisitor(pass),
-	)
+	insp.WithStack([]ast.Node{(*ast.CompositeLit)(nil)}, a.newVisitor(pass))
 
 	return nil, nil //nolint:nilnil
 }
@@ -115,7 +111,10 @@ func (a *analyzer) newVisitor(pass *analysis.Pass) func(n ast.Node, push bool, s
 			}
 		}
 
-		pos, msg := a.processStruct(pass, lit, structTyp, typeInfo)
+		file := a.comments.Get(pass.Fset, stack[0].(*ast.File)) //nolint:forcetypeassert
+		rc := getCompositeLitRelatedComments(stack, file)
+		pos, msg := a.processStruct(pass, lit, structTyp, typeInfo, rc)
+
 		if pos != nil {
 			pass.Reportf(*pos, msg)
 		}
@@ -124,6 +123,35 @@ func (a *analyzer) newVisitor(pass *analysis.Pass) func(n ast.Node, push bool, s
 	}
 }
 
+// getCompositeLitRelatedComments returns all comments that are related to checked node. We
+// have to traverse the stack manually as ast do not associate comments with
+// [ast.CompositeLit].
+func getCompositeLitRelatedComments(stack []ast.Node, cm ast.CommentMap) []*ast.CommentGroup {
+	comments := make([]*ast.CommentGroup, 0)
+
+	for i := len(stack) - 1; i >= 0; i-- {
+		node := stack[i]
+
+		switch node.(type) {
+		case *ast.CompositeLit, // stack[len(stack)-1]
+			*ast.ReturnStmt, // return ...
+			*ast.IndexExpr,  // map[enum]...{...}[key]
+			*ast.CallExpr,   // myfunc(map...)
+			*ast.UnaryExpr,  // &map...
+			*ast.AssignStmt, // variable assignment (without var keyword)
+			*ast.DeclStmt,   // var declaration, parent of *ast.GenDecl
+			*ast.GenDecl,    // var declaration, parent of *ast.ValueSpec
+			*ast.ValueSpec:  // var declaration
+			comments = append(comments, cm[node]...)
+
+		default:
+			return comments
+		}
+	}
+
+	return comments
+}
+
 func getStructType(pass *analysis.Pass, lit *ast.CompositeLit) (*types.Struct, *TypeInfo, bool) {
 	switch typ := pass.TypesInfo.TypeOf(lit).(type) {
 	case *types.Named: // named type
@@ -179,8 +207,15 @@ func (a *analyzer) processStruct(
 	lit *ast.CompositeLit,
 	structTyp *types.Struct,
 	info *TypeInfo,
+	comments []*ast.CommentGroup,
 ) (*token.Pos, string) {
-	if !a.shouldProcessType(info) {
+	shouldProcess := a.shouldProcessType(info)
+
+	if shouldProcess && comment.HasDirective(comments, comment.DirectiveIgnore) {
+		return nil, ""
+	}
+
+	if !shouldProcess && !comment.HasDirective(comments, comment.DirectiveEnforce) {
 		return nil, ""
 	}
 
@@ -233,24 +268,12 @@ func (a *analyzer) shouldProcessType(info *TypeInfo) bool {
 	return res
 }
 
-//revive:disable-next-line:unused-receiver
 func (a *analyzer) litSkippedFields(
 	lit *ast.CompositeLit,
 	typ *types.Struct,
 	onlyExported bool,
-) fields.StructFields {
-	a.fieldsCacheMu.RLock()
-	f, ok := a.fieldsCache[typ]
-	a.fieldsCacheMu.RUnlock()
-
-	if !ok {
-		a.fieldsCacheMu.Lock()
-		f = fields.NewStructFields(typ)
-		a.fieldsCache[typ] = f
-		a.fieldsCacheMu.Unlock()
-	}
-
-	return f.SkippedFields(lit, onlyExported)
+) structure.Fields {
+	return a.structFields.Get(typ).Skipped(lit, onlyExported)
 }
 
 type TypeInfo struct {
diff --git a/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/internal/comment/cache.go b/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/internal/comment/cache.go
new file mode 100644
index 0000000000000000000000000000000000000000..88edef638a2da9daeb169976cc5f0df35e49d324
--- /dev/null
+++ b/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/internal/comment/cache.go
@@ -0,0 +1,35 @@
+package comment
+
+import (
+	"go/ast"
+	"go/token"
+	"sync"
+)
+
+type Cache struct {
+	comments map[*ast.File]ast.CommentMap
+	mu       sync.RWMutex
+}
+
+// Get returns a comment map for a given file. In case if a comment map is not
+// found, it creates a new one.
+func (c *Cache) Get(fset *token.FileSet, f *ast.File) ast.CommentMap {
+	c.mu.RLock()
+	if cm, ok := c.comments[f]; ok {
+		c.mu.RUnlock()
+		return cm
+	}
+	c.mu.RUnlock()
+
+	c.mu.Lock()
+	defer c.mu.Unlock()
+
+	if c.comments == nil {
+		c.comments = make(map[*ast.File]ast.CommentMap)
+	}
+
+	cm := ast.NewCommentMap(fset, f, f.Comments)
+	c.comments[f] = cm
+
+	return cm
+}
diff --git a/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/internal/comment/directive.go b/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/internal/comment/directive.go
new file mode 100644
index 0000000000000000000000000000000000000000..a39a8076fa61003d62d030d3e99069165fcc69b0
--- /dev/null
+++ b/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/internal/comment/directive.go
@@ -0,0 +1,28 @@
+package comment
+
+import (
+	"go/ast"
+	"strings"
+)
+
+type Directive string
+
+const (
+	prefix                     = `//exhaustruct:`
+	DirectiveIgnore  Directive = prefix + `ignore`
+	DirectiveEnforce Directive = prefix + `enforce`
+)
+
+// HasDirective parses a directive from a given list of comments.
+// If no directive is found, the second return value is `false`.
+func HasDirective(comments []*ast.CommentGroup, expected Directive) bool {
+	for _, cg := range comments {
+		for _, commentLine := range cg.List {
+			if strings.HasPrefix(commentLine.Text, string(expected)) {
+				return true
+			}
+		}
+	}
+
+	return false
+}
diff --git a/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/internal/structure/fields-cache.go b/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/internal/structure/fields-cache.go
new file mode 100644
index 0000000000000000000000000000000000000000..12a3796926b3920dfc5dd5d846ac3cdb1b3101ec
--- /dev/null
+++ b/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/internal/structure/fields-cache.go
@@ -0,0 +1,35 @@
+package structure
+
+import (
+	"go/types"
+	"sync"
+)
+
+type FieldsCache struct {
+	fields map[*types.Struct]Fields
+	mu     sync.RWMutex
+}
+
+// Get returns a struct fields for a given type. In case if a struct fields is
+// not found, it creates a new one from type definition.
+func (c *FieldsCache) Get(typ *types.Struct) Fields {
+	c.mu.RLock()
+	fields, ok := c.fields[typ]
+	c.mu.RUnlock()
+
+	if ok {
+		return fields
+	}
+
+	c.mu.Lock()
+	defer c.mu.Unlock()
+
+	if c.fields == nil {
+		c.fields = make(map[*types.Struct]Fields)
+	}
+
+	fields = NewFields(typ)
+	c.fields[typ] = fields
+
+	return fields
+}
diff --git a/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/internal/fields/struct.go b/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/internal/structure/fields.go
similarity index 64%
rename from vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/internal/fields/struct.go
rename to vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/internal/structure/fields.go
index af2390e8749779342878c327040d39c5b01a04d0..b6b1a48c87292298e02829edd29f1b5f97db8c7a 100644
--- a/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/internal/fields/struct.go
+++ b/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/internal/structure/fields.go
@@ -1,33 +1,34 @@
-package fields
+package structure
 
 import (
 	"go/ast"
 	"go/types"
 	"reflect"
+	"strings"
 )
 
 const (
-	TagName          = "exhaustruct"
-	OptionalTagValue = "optional"
+	tagName          = "exhaustruct"
+	optionalTagValue = "optional"
 )
 
-type StructField struct {
+type Field struct {
 	Name     string
 	Exported bool
 	Optional bool
 }
 
-type StructFields []*StructField
+type Fields []*Field
 
-// NewStructFields creates a new [StructFields] from a given struct type.
-// StructFields items are listed in order they appear in the struct.
-func NewStructFields(strct *types.Struct) StructFields {
-	sf := make(StructFields, 0, strct.NumFields())
+// NewFields creates a new [Fields] from a given struct type.
+// Fields items are listed in order they appear in the struct.
+func NewFields(strct *types.Struct) Fields {
+	sf := make(Fields, 0, strct.NumFields())
 
 	for i := 0; i < strct.NumFields(); i++ {
 		f := strct.Field(i)
 
-		sf = append(sf, &StructField{
+		sf = append(sf, &Field{
 			Name:     f.Name(),
 			Exported: f.Exported(),
 			Optional: HasOptionalTag(strct.Tag(i)),
@@ -38,27 +39,29 @@ func NewStructFields(strct *types.Struct) StructFields {
 }
 
 func HasOptionalTag(tags string) bool {
-	return reflect.StructTag(tags).Get(TagName) == OptionalTagValue
+	return reflect.StructTag(tags).Get(tagName) == optionalTagValue
 }
 
 // String returns a comma-separated list of field names.
-func (sf StructFields) String() (res string) {
+func (sf Fields) String() string {
+	b := strings.Builder{}
+
 	for i := 0; i < len(sf); i++ {
-		if res != "" {
-			res += ", "
+		if b.Len() != 0 {
+			b.WriteString(", ")
 		}
 
-		res += sf[i].Name
+		b.WriteString(sf[i].Name)
 	}
 
-	return res
+	return b.String()
 }
 
-// SkippedFields returns a list of fields that are not present in the given
+// Skipped returns a list of fields that are not present in the given
 // literal, but expected to.
 //
 //revive:disable-next-line:cyclomatic
-func (sf StructFields) SkippedFields(lit *ast.CompositeLit, onlyExported bool) StructFields {
+func (sf Fields) Skipped(lit *ast.CompositeLit, onlyExported bool) Fields {
 	if len(lit.Elts) != 0 && !isNamedLiteral(lit) {
 		if len(lit.Elts) == len(sf) {
 			return nil
@@ -68,7 +71,7 @@ func (sf StructFields) SkippedFields(lit *ast.CompositeLit, onlyExported bool) S
 	}
 
 	em := sf.existenceMap()
-	res := make(StructFields, 0, len(sf))
+	res := make(Fields, 0, len(sf))
 
 	for i := 0; i < len(lit.Elts); i++ {
 		kv, ok := lit.Elts[i].(*ast.KeyValueExpr)
@@ -99,7 +102,7 @@ func (sf StructFields) SkippedFields(lit *ast.CompositeLit, onlyExported bool) S
 	return res
 }
 
-func (sf StructFields) existenceMap() map[string]bool {
+func (sf Fields) existenceMap() map[string]bool {
 	m := make(map[string]bool, len(sf))
 
 	for i := 0; i < len(sf); i++ {
diff --git a/vendor/github.com/Masterminds/semver/.travis.yml b/vendor/github.com/Masterminds/semver/.travis.yml
deleted file mode 100644
index 096369d44d9c2ca384cbb0ee05cb6278112fe022..0000000000000000000000000000000000000000
--- a/vendor/github.com/Masterminds/semver/.travis.yml
+++ /dev/null
@@ -1,29 +0,0 @@
-language: go
-
-go:
-  - 1.6.x
-  - 1.7.x
-  - 1.8.x
-  - 1.9.x
-  - 1.10.x
-  - 1.11.x
-  - 1.12.x
-  - tip
-
-# Setting sudo access to false will let Travis CI use containers rather than
-# VMs to run the tests. For more details see:
-# - http://docs.travis-ci.com/user/workers/container-based-infrastructure/
-# - http://docs.travis-ci.com/user/workers/standard-infrastructure/
-sudo: false
-
-script:
-  - make setup
-  - make test
-
-notifications:
-  webhooks:
-    urls:
-      - https://webhooks.gitter.im/e/06e3328629952dabe3e0
-    on_success: change  # options: [always|never|change] default: always
-    on_failure: always  # options: [always|never|change] default: always
-    on_start: never     # options: [always|never|change] default: always
diff --git a/vendor/github.com/Masterminds/semver/CHANGELOG.md b/vendor/github.com/Masterminds/semver/CHANGELOG.md
deleted file mode 100644
index e405c9a84d9056264d1ed8a740f7f92bb2e89978..0000000000000000000000000000000000000000
--- a/vendor/github.com/Masterminds/semver/CHANGELOG.md
+++ /dev/null
@@ -1,109 +0,0 @@
-# 1.5.0 (2019-09-11)
-
-## Added
-
-- #103: Add basic fuzzing for `NewVersion()` (thanks @jesse-c)
-
-## Changed
-
-- #82: Clarify wildcard meaning in range constraints and update tests for it (thanks @greysteil)
-- #83: Clarify caret operator range for pre-1.0.0 dependencies (thanks @greysteil)
-- #72: Adding docs comment pointing to vert for a cli
-- #71: Update the docs on pre-release comparator handling
-- #89: Test with new go versions (thanks @thedevsaddam)
-- #87: Added $ to ValidPrerelease for better validation (thanks @jeremycarroll)
-
-## Fixed
-
-- #78: Fix unchecked error in example code (thanks @ravron)
-- #70: Fix the handling of pre-releases and the 0.0.0 release edge case
-- #97: Fixed copyright file for proper display on GitHub
-- #107: Fix handling prerelease when sorting alphanum and num 
-- #109: Fixed where Validate sometimes returns wrong message on error
-
-# 1.4.2 (2018-04-10)
-
-## Changed
-- #72: Updated the docs to point to vert for a console appliaction
-- #71: Update the docs on pre-release comparator handling
-
-## Fixed
-- #70: Fix the handling of pre-releases and the 0.0.0 release edge case
-
-# 1.4.1 (2018-04-02)
-
-## Fixed
-- Fixed #64: Fix pre-release precedence issue (thanks @uudashr)
-
-# 1.4.0 (2017-10-04)
-
-## Changed
-- #61: Update NewVersion to parse ints with a 64bit int size (thanks @zknill)
-
-# 1.3.1 (2017-07-10)
-
-## Fixed
-- Fixed #57: number comparisons in prerelease sometimes inaccurate
-
-# 1.3.0 (2017-05-02)
-
-## Added
-- #45: Added json (un)marshaling support (thanks @mh-cbon)
-- Stability marker. See https://masterminds.github.io/stability/
-
-## Fixed
-- #51: Fix handling of single digit tilde constraint (thanks @dgodd)
-
-## Changed
-- #55: The godoc icon moved from png to svg
-
-# 1.2.3 (2017-04-03)
-
-## Fixed
-- #46: Fixed 0.x.x and 0.0.x in constraints being treated as *
-
-# Release 1.2.2 (2016-12-13)
-
-## Fixed
-- #34: Fixed issue where hyphen range was not working with pre-release parsing.
-
-# Release 1.2.1 (2016-11-28)
-
-## Fixed
-- #24: Fixed edge case issue where constraint "> 0" does not handle "0.0.1-alpha"
-  properly.
-
-# Release 1.2.0 (2016-11-04)
-
-## Added
-- #20: Added MustParse function for versions (thanks @adamreese)
-- #15: Added increment methods on versions (thanks @mh-cbon)
-
-## Fixed
-- Issue #21: Per the SemVer spec (section 9) a pre-release is unstable and
-  might not satisfy the intended compatibility. The change here ignores pre-releases
-  on constraint checks (e.g., ~ or ^) when a pre-release is not part of the
-  constraint. For example, `^1.2.3` will ignore pre-releases while
-  `^1.2.3-alpha` will include them.
-
-# Release 1.1.1 (2016-06-30)
-
-## Changed
-- Issue #9: Speed up version comparison performance (thanks @sdboyer)
-- Issue #8: Added benchmarks (thanks @sdboyer)
-- Updated Go Report Card URL to new location
-- Updated Readme to add code snippet formatting (thanks @mh-cbon)
-- Updating tagging to v[SemVer] structure for compatibility with other tools.
-
-# Release 1.1.0 (2016-03-11)
-
-- Issue #2: Implemented validation to provide reasons a versions failed a
-  constraint.
-
-# Release 1.0.1 (2015-12-31)
-
-- Fixed #1: * constraint failing on valid versions.
-
-# Release 1.0.0 (2015-10-20)
-
-- Initial release
diff --git a/vendor/github.com/Masterminds/semver/Makefile b/vendor/github.com/Masterminds/semver/Makefile
deleted file mode 100644
index a7a1b4e36de966056eb74a4d46c98d8d7d0d8144..0000000000000000000000000000000000000000
--- a/vendor/github.com/Masterminds/semver/Makefile
+++ /dev/null
@@ -1,36 +0,0 @@
-.PHONY: setup
-setup:
-	go get -u gopkg.in/alecthomas/gometalinter.v1
-	gometalinter.v1 --install
-
-.PHONY: test
-test: validate lint
-	@echo "==> Running tests"
-	go test -v
-
-.PHONY: validate
-validate:
-	@echo "==> Running static validations"
-	@gometalinter.v1 \
-	  --disable-all \
-	  --enable deadcode \
-	  --severity deadcode:error \
-	  --enable gofmt \
-	  --enable gosimple \
-	  --enable ineffassign \
-	  --enable misspell \
-	  --enable vet \
-	  --tests \
-	  --vendor \
-	  --deadline 60s \
-	  ./... || exit_code=1
-
-.PHONY: lint
-lint:
-	@echo "==> Running linters"
-	@gometalinter.v1 \
-	  --disable-all \
-	  --enable golint \
-	  --vendor \
-	  --deadline 60s \
-	  ./... || :
diff --git a/vendor/github.com/Masterminds/semver/README.md b/vendor/github.com/Masterminds/semver/README.md
deleted file mode 100644
index 1b52d2f4362eaf28df7cfb6b3b41f5a2677e310a..0000000000000000000000000000000000000000
--- a/vendor/github.com/Masterminds/semver/README.md
+++ /dev/null
@@ -1,194 +0,0 @@
-# SemVer
-
-The `semver` package provides the ability to work with [Semantic Versions](http://semver.org) in Go. Specifically it provides the ability to:
-
-* Parse semantic versions
-* Sort semantic versions
-* Check if a semantic version fits within a set of constraints
-* Optionally work with a `v` prefix
-
-[![Stability:
-Active](https://masterminds.github.io/stability/active.svg)](https://masterminds.github.io/stability/active.html)
-[![Build Status](https://travis-ci.org/Masterminds/semver.svg)](https://travis-ci.org/Masterminds/semver) [![Build status](https://ci.appveyor.com/api/projects/status/jfk66lib7hb985k8/branch/master?svg=true&passingText=windows%20build%20passing&failingText=windows%20build%20failing)](https://ci.appveyor.com/project/mattfarina/semver/branch/master) [![GoDoc](https://godoc.org/github.com/Masterminds/semver?status.svg)](https://godoc.org/github.com/Masterminds/semver) [![Go Report Card](https://goreportcard.com/badge/github.com/Masterminds/semver)](https://goreportcard.com/report/github.com/Masterminds/semver)
-
-If you are looking for a command line tool for version comparisons please see
-[vert](https://github.com/Masterminds/vert) which uses this library.
-
-## Parsing Semantic Versions
-
-To parse a semantic version use the `NewVersion` function. For example,
-
-```go
-    v, err := semver.NewVersion("1.2.3-beta.1+build345")
-```
-
-If there is an error the version wasn't parseable. The version object has methods
-to get the parts of the version, compare it to other versions, convert the
-version back into a string, and get the original string. For more details
-please see the [documentation](https://godoc.org/github.com/Masterminds/semver).
-
-## Sorting Semantic Versions
-
-A set of versions can be sorted using the [`sort`](https://golang.org/pkg/sort/)
-package from the standard library. For example,
-
-```go
-    raw := []string{"1.2.3", "1.0", "1.3", "2", "0.4.2",}
-    vs := make([]*semver.Version, len(raw))
-	for i, r := range raw {
-		v, err := semver.NewVersion(r)
-		if err != nil {
-			t.Errorf("Error parsing version: %s", err)
-		}
-
-		vs[i] = v
-	}
-
-	sort.Sort(semver.Collection(vs))
-```
-
-## Checking Version Constraints
-
-Checking a version against version constraints is one of the most featureful
-parts of the package.
-
-```go
-    c, err := semver.NewConstraint(">= 1.2.3")
-    if err != nil {
-        // Handle constraint not being parseable.
-    }
-
-    v, _ := semver.NewVersion("1.3")
-    if err != nil {
-        // Handle version not being parseable.
-    }
-    // Check if the version meets the constraints. The a variable will be true.
-    a := c.Check(v)
-```
-
-## Basic Comparisons
-
-There are two elements to the comparisons. First, a comparison string is a list
-of comma separated and comparisons. These are then separated by || separated or
-comparisons. For example, `">= 1.2, < 3.0.0 || >= 4.2.3"` is looking for a
-comparison that's greater than or equal to 1.2 and less than 3.0.0 or is
-greater than or equal to 4.2.3.
-
-The basic comparisons are:
-
-* `=`: equal (aliased to no operator)
-* `!=`: not equal
-* `>`: greater than
-* `<`: less than
-* `>=`: greater than or equal to
-* `<=`: less than or equal to
-
-## Working With Pre-release Versions
-
-Pre-releases, for those not familiar with them, are used for software releases
-prior to stable or generally available releases. Examples of pre-releases include
-development, alpha, beta, and release candidate releases. A pre-release may be
-a version such as `1.2.3-beta.1` while the stable release would be `1.2.3`. In the
-order of precidence, pre-releases come before their associated releases. In this
-example `1.2.3-beta.1 < 1.2.3`.
-
-According to the Semantic Version specification pre-releases may not be
-API compliant with their release counterpart. It says,
-
-> A pre-release version indicates that the version is unstable and might not satisfy the intended compatibility requirements as denoted by its associated normal version.
-
-SemVer comparisons without a pre-release comparator will skip pre-release versions.
-For example, `>=1.2.3` will skip pre-releases when looking at a list of releases
-while `>=1.2.3-0` will evaluate and find pre-releases.
-
-The reason for the `0` as a pre-release version in the example comparison is
-because pre-releases can only contain ASCII alphanumerics and hyphens (along with
-`.` separators), per the spec. Sorting happens in ASCII sort order, again per the spec. The lowest character is a `0` in ASCII sort order (see an [ASCII Table](http://www.asciitable.com/))
-
-Understanding ASCII sort ordering is important because A-Z comes before a-z. That
-means `>=1.2.3-BETA` will return `1.2.3-alpha`. What you might expect from case
-sensitivity doesn't apply here. This is due to ASCII sort ordering which is what
-the spec specifies.
-
-## Hyphen Range Comparisons
-
-There are multiple methods to handle ranges and the first is hyphens ranges.
-These look like:
-
-* `1.2 - 1.4.5` which is equivalent to `>= 1.2, <= 1.4.5`
-* `2.3.4 - 4.5` which is equivalent to `>= 2.3.4, <= 4.5`
-
-## Wildcards In Comparisons
-
-The `x`, `X`, and `*` characters can be used as a wildcard character. This works
-for all comparison operators. When used on the `=` operator it falls
-back to the pack level comparison (see tilde below). For example,
-
-* `1.2.x` is equivalent to `>= 1.2.0, < 1.3.0`
-* `>= 1.2.x` is equivalent to `>= 1.2.0`
-* `<= 2.x` is equivalent to `< 3`
-* `*` is equivalent to `>= 0.0.0`
-
-## Tilde Range Comparisons (Patch)
-
-The tilde (`~`) comparison operator is for patch level ranges when a minor
-version is specified and major level changes when the minor number is missing.
-For example,
-
-* `~1.2.3` is equivalent to `>= 1.2.3, < 1.3.0`
-* `~1` is equivalent to `>= 1, < 2`
-* `~2.3` is equivalent to `>= 2.3, < 2.4`
-* `~1.2.x` is equivalent to `>= 1.2.0, < 1.3.0`
-* `~1.x` is equivalent to `>= 1, < 2`
-
-## Caret Range Comparisons (Major)
-
-The caret (`^`) comparison operator is for major level changes. This is useful
-when comparisons of API versions as a major change is API breaking. For example,
-
-* `^1.2.3` is equivalent to `>= 1.2.3, < 2.0.0`
-* `^0.0.1` is equivalent to `>= 0.0.1, < 1.0.0`
-* `^1.2.x` is equivalent to `>= 1.2.0, < 2.0.0`
-* `^2.3` is equivalent to `>= 2.3, < 3`
-* `^2.x` is equivalent to `>= 2.0.0, < 3`
-
-# Validation
-
-In addition to testing a version against a constraint, a version can be validated
-against a constraint. When validation fails a slice of errors containing why a
-version didn't meet the constraint is returned. For example,
-
-```go
-    c, err := semver.NewConstraint("<= 1.2.3, >= 1.4")
-    if err != nil {
-        // Handle constraint not being parseable.
-    }
-
-    v, _ := semver.NewVersion("1.3")
-    if err != nil {
-        // Handle version not being parseable.
-    }
-
-    // Validate a version against a constraint.
-    a, msgs := c.Validate(v)
-    // a is false
-    for _, m := range msgs {
-        fmt.Println(m)
-
-        // Loops over the errors which would read
-        // "1.3 is greater than 1.2.3"
-        // "1.3 is less than 1.4"
-    }
-```
-
-# Fuzzing
-
- [dvyukov/go-fuzz](https://github.com/dvyukov/go-fuzz) is used for fuzzing.
-
-1. `go-fuzz-build`
-2. `go-fuzz -workdir=fuzz`
-
-# Contribute
-
-If you find an issue or want to contribute please file an [issue](https://github.com/Masterminds/semver/issues)
-or [create a pull request](https://github.com/Masterminds/semver/pulls).
diff --git a/vendor/github.com/Masterminds/semver/appveyor.yml b/vendor/github.com/Masterminds/semver/appveyor.yml
deleted file mode 100644
index b2778df15a4061a767650d26d88457065f3e61c1..0000000000000000000000000000000000000000
--- a/vendor/github.com/Masterminds/semver/appveyor.yml
+++ /dev/null
@@ -1,44 +0,0 @@
-version: build-{build}.{branch}
-
-clone_folder: C:\gopath\src\github.com\Masterminds\semver
-shallow_clone: true
-
-environment:
-  GOPATH: C:\gopath
-
-platform:
-  - x64
-
-install:
-  - go version
-  - go env
-  - go get -u gopkg.in/alecthomas/gometalinter.v1
-  - set PATH=%PATH%;%GOPATH%\bin
-  - gometalinter.v1.exe --install
-
-build_script:
-  - go install -v ./...
-
-test_script:
-  - "gometalinter.v1 \
-    --disable-all \
-    --enable deadcode \
-    --severity deadcode:error \
-    --enable gofmt \
-    --enable gosimple \
-    --enable ineffassign \
-    --enable misspell \
-    --enable vet \
-    --tests \
-    --vendor \
-    --deadline 60s \
-    ./... || exit_code=1"
-  - "gometalinter.v1 \
-    --disable-all \
-    --enable golint \
-    --vendor \
-    --deadline 60s \
-    ./... || :"
-  - go test -v
-
-deploy: off
diff --git a/vendor/github.com/Masterminds/semver/constraints.go b/vendor/github.com/Masterminds/semver/constraints.go
deleted file mode 100644
index b94b93413f361f806fea4b9e3d150bc29019ba2c..0000000000000000000000000000000000000000
--- a/vendor/github.com/Masterminds/semver/constraints.go
+++ /dev/null
@@ -1,423 +0,0 @@
-package semver
-
-import (
-	"errors"
-	"fmt"
-	"regexp"
-	"strings"
-)
-
-// Constraints is one or more constraint that a semantic version can be
-// checked against.
-type Constraints struct {
-	constraints [][]*constraint
-}
-
-// NewConstraint returns a Constraints instance that a Version instance can
-// be checked against. If there is a parse error it will be returned.
-func NewConstraint(c string) (*Constraints, error) {
-
-	// Rewrite - ranges into a comparison operation.
-	c = rewriteRange(c)
-
-	ors := strings.Split(c, "||")
-	or := make([][]*constraint, len(ors))
-	for k, v := range ors {
-		cs := strings.Split(v, ",")
-		result := make([]*constraint, len(cs))
-		for i, s := range cs {
-			pc, err := parseConstraint(s)
-			if err != nil {
-				return nil, err
-			}
-
-			result[i] = pc
-		}
-		or[k] = result
-	}
-
-	o := &Constraints{constraints: or}
-	return o, nil
-}
-
-// Check tests if a version satisfies the constraints.
-func (cs Constraints) Check(v *Version) bool {
-	// loop over the ORs and check the inner ANDs
-	for _, o := range cs.constraints {
-		joy := true
-		for _, c := range o {
-			if !c.check(v) {
-				joy = false
-				break
-			}
-		}
-
-		if joy {
-			return true
-		}
-	}
-
-	return false
-}
-
-// Validate checks if a version satisfies a constraint. If not a slice of
-// reasons for the failure are returned in addition to a bool.
-func (cs Constraints) Validate(v *Version) (bool, []error) {
-	// loop over the ORs and check the inner ANDs
-	var e []error
-
-	// Capture the prerelease message only once. When it happens the first time
-	// this var is marked
-	var prerelesase bool
-	for _, o := range cs.constraints {
-		joy := true
-		for _, c := range o {
-			// Before running the check handle the case there the version is
-			// a prerelease and the check is not searching for prereleases.
-			if c.con.pre == "" && v.pre != "" {
-				if !prerelesase {
-					em := fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v)
-					e = append(e, em)
-					prerelesase = true
-				}
-				joy = false
-
-			} else {
-
-				if !c.check(v) {
-					em := fmt.Errorf(c.msg, v, c.orig)
-					e = append(e, em)
-					joy = false
-				}
-			}
-		}
-
-		if joy {
-			return true, []error{}
-		}
-	}
-
-	return false, e
-}
-
-var constraintOps map[string]cfunc
-var constraintMsg map[string]string
-var constraintRegex *regexp.Regexp
-
-func init() {
-	constraintOps = map[string]cfunc{
-		"":   constraintTildeOrEqual,
-		"=":  constraintTildeOrEqual,
-		"!=": constraintNotEqual,
-		">":  constraintGreaterThan,
-		"<":  constraintLessThan,
-		">=": constraintGreaterThanEqual,
-		"=>": constraintGreaterThanEqual,
-		"<=": constraintLessThanEqual,
-		"=<": constraintLessThanEqual,
-		"~":  constraintTilde,
-		"~>": constraintTilde,
-		"^":  constraintCaret,
-	}
-
-	constraintMsg = map[string]string{
-		"":   "%s is not equal to %s",
-		"=":  "%s is not equal to %s",
-		"!=": "%s is equal to %s",
-		">":  "%s is less than or equal to %s",
-		"<":  "%s is greater than or equal to %s",
-		">=": "%s is less than %s",
-		"=>": "%s is less than %s",
-		"<=": "%s is greater than %s",
-		"=<": "%s is greater than %s",
-		"~":  "%s does not have same major and minor version as %s",
-		"~>": "%s does not have same major and minor version as %s",
-		"^":  "%s does not have same major version as %s",
-	}
-
-	ops := make([]string, 0, len(constraintOps))
-	for k := range constraintOps {
-		ops = append(ops, regexp.QuoteMeta(k))
-	}
-
-	constraintRegex = regexp.MustCompile(fmt.Sprintf(
-		`^\s*(%s)\s*(%s)\s*$`,
-		strings.Join(ops, "|"),
-		cvRegex))
-
-	constraintRangeRegex = regexp.MustCompile(fmt.Sprintf(
-		`\s*(%s)\s+-\s+(%s)\s*`,
-		cvRegex, cvRegex))
-}
-
-// An individual constraint
-type constraint struct {
-	// The callback function for the restraint. It performs the logic for
-	// the constraint.
-	function cfunc
-
-	msg string
-
-	// The version used in the constraint check. For example, if a constraint
-	// is '<= 2.0.0' the con a version instance representing 2.0.0.
-	con *Version
-
-	// The original parsed version (e.g., 4.x from != 4.x)
-	orig string
-
-	// When an x is used as part of the version (e.g., 1.x)
-	minorDirty bool
-	dirty      bool
-	patchDirty bool
-}
-
-// Check if a version meets the constraint
-func (c *constraint) check(v *Version) bool {
-	return c.function(v, c)
-}
-
-type cfunc func(v *Version, c *constraint) bool
-
-func parseConstraint(c string) (*constraint, error) {
-	m := constraintRegex.FindStringSubmatch(c)
-	if m == nil {
-		return nil, fmt.Errorf("improper constraint: %s", c)
-	}
-
-	ver := m[2]
-	orig := ver
-	minorDirty := false
-	patchDirty := false
-	dirty := false
-	if isX(m[3]) {
-		ver = "0.0.0"
-		dirty = true
-	} else if isX(strings.TrimPrefix(m[4], ".")) || m[4] == "" {
-		minorDirty = true
-		dirty = true
-		ver = fmt.Sprintf("%s.0.0%s", m[3], m[6])
-	} else if isX(strings.TrimPrefix(m[5], ".")) {
-		dirty = true
-		patchDirty = true
-		ver = fmt.Sprintf("%s%s.0%s", m[3], m[4], m[6])
-	}
-
-	con, err := NewVersion(ver)
-	if err != nil {
-
-		// The constraintRegex should catch any regex parsing errors. So,
-		// we should never get here.
-		return nil, errors.New("constraint Parser Error")
-	}
-
-	cs := &constraint{
-		function:   constraintOps[m[1]],
-		msg:        constraintMsg[m[1]],
-		con:        con,
-		orig:       orig,
-		minorDirty: minorDirty,
-		patchDirty: patchDirty,
-		dirty:      dirty,
-	}
-	return cs, nil
-}
-
-// Constraint functions
-func constraintNotEqual(v *Version, c *constraint) bool {
-	if c.dirty {
-
-		// If there is a pre-release on the version but the constraint isn't looking
-		// for them assume that pre-releases are not compatible. See issue 21 for
-		// more details.
-		if v.Prerelease() != "" && c.con.Prerelease() == "" {
-			return false
-		}
-
-		if c.con.Major() != v.Major() {
-			return true
-		}
-		if c.con.Minor() != v.Minor() && !c.minorDirty {
-			return true
-		} else if c.minorDirty {
-			return false
-		}
-
-		return false
-	}
-
-	return !v.Equal(c.con)
-}
-
-func constraintGreaterThan(v *Version, c *constraint) bool {
-
-	// If there is a pre-release on the version but the constraint isn't looking
-	// for them assume that pre-releases are not compatible. See issue 21 for
-	// more details.
-	if v.Prerelease() != "" && c.con.Prerelease() == "" {
-		return false
-	}
-
-	return v.Compare(c.con) == 1
-}
-
-func constraintLessThan(v *Version, c *constraint) bool {
-	// If there is a pre-release on the version but the constraint isn't looking
-	// for them assume that pre-releases are not compatible. See issue 21 for
-	// more details.
-	if v.Prerelease() != "" && c.con.Prerelease() == "" {
-		return false
-	}
-
-	if !c.dirty {
-		return v.Compare(c.con) < 0
-	}
-
-	if v.Major() > c.con.Major() {
-		return false
-	} else if v.Minor() > c.con.Minor() && !c.minorDirty {
-		return false
-	}
-
-	return true
-}
-
-func constraintGreaterThanEqual(v *Version, c *constraint) bool {
-
-	// If there is a pre-release on the version but the constraint isn't looking
-	// for them assume that pre-releases are not compatible. See issue 21 for
-	// more details.
-	if v.Prerelease() != "" && c.con.Prerelease() == "" {
-		return false
-	}
-
-	return v.Compare(c.con) >= 0
-}
-
-func constraintLessThanEqual(v *Version, c *constraint) bool {
-	// If there is a pre-release on the version but the constraint isn't looking
-	// for them assume that pre-releases are not compatible. See issue 21 for
-	// more details.
-	if v.Prerelease() != "" && c.con.Prerelease() == "" {
-		return false
-	}
-
-	if !c.dirty {
-		return v.Compare(c.con) <= 0
-	}
-
-	if v.Major() > c.con.Major() {
-		return false
-	} else if v.Minor() > c.con.Minor() && !c.minorDirty {
-		return false
-	}
-
-	return true
-}
-
-// ~*, ~>* --> >= 0.0.0 (any)
-// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0, <3.0.0
-// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0, <2.1.0
-// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0, <1.3.0
-// ~1.2.3, ~>1.2.3 --> >=1.2.3, <1.3.0
-// ~1.2.0, ~>1.2.0 --> >=1.2.0, <1.3.0
-func constraintTilde(v *Version, c *constraint) bool {
-	// If there is a pre-release on the version but the constraint isn't looking
-	// for them assume that pre-releases are not compatible. See issue 21 for
-	// more details.
-	if v.Prerelease() != "" && c.con.Prerelease() == "" {
-		return false
-	}
-
-	if v.LessThan(c.con) {
-		return false
-	}
-
-	// ~0.0.0 is a special case where all constraints are accepted. It's
-	// equivalent to >= 0.0.0.
-	if c.con.Major() == 0 && c.con.Minor() == 0 && c.con.Patch() == 0 &&
-		!c.minorDirty && !c.patchDirty {
-		return true
-	}
-
-	if v.Major() != c.con.Major() {
-		return false
-	}
-
-	if v.Minor() != c.con.Minor() && !c.minorDirty {
-		return false
-	}
-
-	return true
-}
-
-// When there is a .x (dirty) status it automatically opts in to ~. Otherwise
-// it's a straight =
-func constraintTildeOrEqual(v *Version, c *constraint) bool {
-	// If there is a pre-release on the version but the constraint isn't looking
-	// for them assume that pre-releases are not compatible. See issue 21 for
-	// more details.
-	if v.Prerelease() != "" && c.con.Prerelease() == "" {
-		return false
-	}
-
-	if c.dirty {
-		c.msg = constraintMsg["~"]
-		return constraintTilde(v, c)
-	}
-
-	return v.Equal(c.con)
-}
-
-// ^* --> (any)
-// ^2, ^2.x, ^2.x.x --> >=2.0.0, <3.0.0
-// ^2.0, ^2.0.x --> >=2.0.0, <3.0.0
-// ^1.2, ^1.2.x --> >=1.2.0, <2.0.0
-// ^1.2.3 --> >=1.2.3, <2.0.0
-// ^1.2.0 --> >=1.2.0, <2.0.0
-func constraintCaret(v *Version, c *constraint) bool {
-	// If there is a pre-release on the version but the constraint isn't looking
-	// for them assume that pre-releases are not compatible. See issue 21 for
-	// more details.
-	if v.Prerelease() != "" && c.con.Prerelease() == "" {
-		return false
-	}
-
-	if v.LessThan(c.con) {
-		return false
-	}
-
-	if v.Major() != c.con.Major() {
-		return false
-	}
-
-	return true
-}
-
-var constraintRangeRegex *regexp.Regexp
-
-const cvRegex string = `v?([0-9|x|X|\*]+)(\.[0-9|x|X|\*]+)?(\.[0-9|x|X|\*]+)?` +
-	`(-([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` +
-	`(\+([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?`
-
-func isX(x string) bool {
-	switch x {
-	case "x", "*", "X":
-		return true
-	default:
-		return false
-	}
-}
-
-func rewriteRange(i string) string {
-	m := constraintRangeRegex.FindAllStringSubmatch(i, -1)
-	if m == nil {
-		return i
-	}
-	o := i
-	for _, v := range m {
-		t := fmt.Sprintf(">= %s, <= %s", v[1], v[11])
-		o = strings.Replace(o, v[0], t, 1)
-	}
-
-	return o
-}
diff --git a/vendor/github.com/Masterminds/semver/doc.go b/vendor/github.com/Masterminds/semver/doc.go
deleted file mode 100644
index 6a6c24c6d6e2a80986d40bc08eb3f93ae79369d9..0000000000000000000000000000000000000000
--- a/vendor/github.com/Masterminds/semver/doc.go
+++ /dev/null
@@ -1,115 +0,0 @@
-/*
-Package semver provides the ability to work with Semantic Versions (http://semver.org) in Go.
-
-Specifically it provides the ability to:
-
-    * Parse semantic versions
-    * Sort semantic versions
-    * Check if a semantic version fits within a set of constraints
-    * Optionally work with a `v` prefix
-
-Parsing Semantic Versions
-
-To parse a semantic version use the `NewVersion` function. For example,
-
-    v, err := semver.NewVersion("1.2.3-beta.1+build345")
-
-If there is an error the version wasn't parseable. The version object has methods
-to get the parts of the version, compare it to other versions, convert the
-version back into a string, and get the original string. For more details
-please see the documentation at https://godoc.org/github.com/Masterminds/semver.
-
-Sorting Semantic Versions
-
-A set of versions can be sorted using the `sort` package from the standard library.
-For example,
-
-    raw := []string{"1.2.3", "1.0", "1.3", "2", "0.4.2",}
-    vs := make([]*semver.Version, len(raw))
-	for i, r := range raw {
-		v, err := semver.NewVersion(r)
-		if err != nil {
-			t.Errorf("Error parsing version: %s", err)
-		}
-
-		vs[i] = v
-	}
-
-	sort.Sort(semver.Collection(vs))
-
-Checking Version Constraints
-
-Checking a version against version constraints is one of the most featureful
-parts of the package.
-
-    c, err := semver.NewConstraint(">= 1.2.3")
-    if err != nil {
-        // Handle constraint not being parseable.
-    }
-
-    v, err := semver.NewVersion("1.3")
-    if err != nil {
-        // Handle version not being parseable.
-    }
-    // Check if the version meets the constraints. The a variable will be true.
-    a := c.Check(v)
-
-Basic Comparisons
-
-There are two elements to the comparisons. First, a comparison string is a list
-of comma separated and comparisons. These are then separated by || separated or
-comparisons. For example, `">= 1.2, < 3.0.0 || >= 4.2.3"` is looking for a
-comparison that's greater than or equal to 1.2 and less than 3.0.0 or is
-greater than or equal to 4.2.3.
-
-The basic comparisons are:
-
-    * `=`: equal (aliased to no operator)
-    * `!=`: not equal
-    * `>`: greater than
-    * `<`: less than
-    * `>=`: greater than or equal to
-    * `<=`: less than or equal to
-
-Hyphen Range Comparisons
-
-There are multiple methods to handle ranges and the first is hyphens ranges.
-These look like:
-
-    * `1.2 - 1.4.5` which is equivalent to `>= 1.2, <= 1.4.5`
-    * `2.3.4 - 4.5` which is equivalent to `>= 2.3.4, <= 4.5`
-
-Wildcards In Comparisons
-
-The `x`, `X`, and `*` characters can be used as a wildcard character. This works
-for all comparison operators. When used on the `=` operator it falls
-back to the pack level comparison (see tilde below). For example,
-
-    * `1.2.x` is equivalent to `>= 1.2.0, < 1.3.0`
-    * `>= 1.2.x` is equivalent to `>= 1.2.0`
-    * `<= 2.x` is equivalent to `<= 3`
-    * `*` is equivalent to `>= 0.0.0`
-
-Tilde Range Comparisons (Patch)
-
-The tilde (`~`) comparison operator is for patch level ranges when a minor
-version is specified and major level changes when the minor number is missing.
-For example,
-
-    * `~1.2.3` is equivalent to `>= 1.2.3, < 1.3.0`
-    * `~1` is equivalent to `>= 1, < 2`
-    * `~2.3` is equivalent to `>= 2.3, < 2.4`
-    * `~1.2.x` is equivalent to `>= 1.2.0, < 1.3.0`
-    * `~1.x` is equivalent to `>= 1, < 2`
-
-Caret Range Comparisons (Major)
-
-The caret (`^`) comparison operator is for major level changes. This is useful
-when comparisons of API versions as a major change is API breaking. For example,
-
-    * `^1.2.3` is equivalent to `>= 1.2.3, < 2.0.0`
-    * `^1.2.x` is equivalent to `>= 1.2.0, < 2.0.0`
-    * `^2.3` is equivalent to `>= 2.3, < 3`
-    * `^2.x` is equivalent to `>= 2.0.0, < 3`
-*/
-package semver
diff --git a/vendor/github.com/Masterminds/semver/v3/.gitignore b/vendor/github.com/Masterminds/semver/v3/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..6b061e6174b3e6ba815c2b0407ba43733c184777
--- /dev/null
+++ b/vendor/github.com/Masterminds/semver/v3/.gitignore
@@ -0,0 +1 @@
+_fuzz/
\ No newline at end of file
diff --git a/vendor/github.com/Masterminds/semver/v3/.golangci.yml b/vendor/github.com/Masterminds/semver/v3/.golangci.yml
new file mode 100644
index 0000000000000000000000000000000000000000..fbc6332592f6bce3cca068ea3ef34aef59a40ab8
--- /dev/null
+++ b/vendor/github.com/Masterminds/semver/v3/.golangci.yml
@@ -0,0 +1,27 @@
+run:
+  deadline: 2m
+
+linters:
+  disable-all: true
+  enable:
+    - misspell
+    - govet
+    - staticcheck
+    - errcheck
+    - unparam
+    - ineffassign
+    - nakedret
+    - gocyclo
+    - dupl
+    - goimports
+    - revive
+    - gosec
+    - gosimple
+    - typecheck
+    - unused
+
+linters-settings:
+  gofmt:
+    simplify: true
+  dupl:
+    threshold: 600
diff --git a/vendor/github.com/Masterminds/semver/v3/CHANGELOG.md b/vendor/github.com/Masterminds/semver/v3/CHANGELOG.md
new file mode 100644
index 0000000000000000000000000000000000000000..f12626423a3e4d631d1e87e1ea4f730515851f0e
--- /dev/null
+++ b/vendor/github.com/Masterminds/semver/v3/CHANGELOG.md
@@ -0,0 +1,214 @@
+# Changelog
+
+## 3.2.0 (2022-11-28)
+
+### Added
+
+- #190: Added text marshaling and unmarshaling
+- #167: Added JSON marshalling for constraints (thanks @SimonTheLeg)
+- #173: Implement encoding.TextMarshaler and encoding.TextUnmarshaler on Version (thanks @MarkRosemaker)
+- #179: Added New() version constructor (thanks @kazhuravlev)
+
+### Changed
+
+- #182/#183: Updated CI testing setup
+
+### Fixed
+
+- #186: Fixing issue where validation of constraint section gave false positives
+- #176: Fix constraints check with *-0 (thanks @mtt0)
+- #181: Fixed Caret operator (^) gives unexpected results when the minor version in constraint is 0 (thanks @arshchimni)
+- #161: Fixed godoc (thanks @afirth)
+
+## 3.1.1 (2020-11-23)
+
+### Fixed
+
+- #158: Fixed issue with generated regex operation order that could cause problem
+
+## 3.1.0 (2020-04-15)
+
+### Added
+
+- #131: Add support for serializing/deserializing SQL (thanks @ryancurrah)
+
+### Changed
+
+- #148: More accurate validation messages on constraints
+
+## 3.0.3 (2019-12-13)
+
+### Fixed
+
+- #141: Fixed issue with <= comparison
+
+## 3.0.2 (2019-11-14)
+
+### Fixed
+
+- #134: Fixed broken constraint checking with ^0.0 (thanks @krmichelos)
+
+## 3.0.1 (2019-09-13)
+
+### Fixed
+
+- #125: Fixes issue with module path for v3
+
+## 3.0.0 (2019-09-12)
+
+This is a major release of the semver package which includes API changes. The Go
+API is compatible with ^1. The Go API was not changed because many people are using
+`go get` without Go modules for their applications and API breaking changes cause
+errors which we have or would need to support.
+
+The changes in this release are the handling based on the data passed into the
+functions. These are described in the added and changed sections below.
+
+### Added
+
+- StrictNewVersion function. This is similar to NewVersion but will return an
+  error if the version passed in is not a strict semantic version. For example,
+  1.2.3 would pass but v1.2.3 or 1.2 would fail because they are not strictly
+  speaking semantic versions. This function is faster, performs fewer operations,
+  and uses fewer allocations than NewVersion.
+- Fuzzing has been performed on NewVersion, StrictNewVersion, and NewConstraint.
+  The Makefile contains the operations used. For more information on you can start
+  on Wikipedia at https://en.wikipedia.org/wiki/Fuzzing
+- Now using Go modules
+
+### Changed
+
+- NewVersion has proper prerelease and metadata validation with error messages
+  to signal an issue with either of them
+- ^ now operates using a similar set of rules to npm/js and Rust/Cargo. If the
+  version is >=1 the ^ ranges works the same as v1. For major versions of 0 the
+  rules have changed. The minor version is treated as the stable version unless
+  a patch is specified and then it is equivalent to =. One difference from npm/js
+  is that prereleases there are only to a specific version (e.g. 1.2.3).
+  Prereleases here look over multiple versions and follow semantic version
+  ordering rules. This pattern now follows along with the expected and requested
+  handling of this packaged by numerous users.
+
+## 1.5.0 (2019-09-11)
+
+### Added
+
+- #103: Add basic fuzzing for `NewVersion()` (thanks @jesse-c)
+
+### Changed
+
+- #82: Clarify wildcard meaning in range constraints and update tests for it (thanks @greysteil)
+- #83: Clarify caret operator range for pre-1.0.0 dependencies (thanks @greysteil)
+- #72: Adding docs comment pointing to vert for a cli
+- #71: Update the docs on pre-release comparator handling
+- #89: Test with new go versions (thanks @thedevsaddam)
+- #87: Added $ to ValidPrerelease for better validation (thanks @jeremycarroll)
+
+### Fixed
+
+- #78: Fix unchecked error in example code (thanks @ravron)
+- #70: Fix the handling of pre-releases and the 0.0.0 release edge case
+- #97: Fixed copyright file for proper display on GitHub
+- #107: Fix handling prerelease when sorting alphanum and num 
+- #109: Fixed where Validate sometimes returns wrong message on error
+
+## 1.4.2 (2018-04-10)
+
+### Changed
+
+- #72: Updated the docs to point to vert for a console appliaction
+- #71: Update the docs on pre-release comparator handling
+
+### Fixed
+
+- #70: Fix the handling of pre-releases and the 0.0.0 release edge case
+
+## 1.4.1 (2018-04-02)
+
+### Fixed
+
+- Fixed #64: Fix pre-release precedence issue (thanks @uudashr)
+
+## 1.4.0 (2017-10-04)
+
+### Changed
+
+- #61: Update NewVersion to parse ints with a 64bit int size (thanks @zknill)
+
+## 1.3.1 (2017-07-10)
+
+### Fixed
+
+- Fixed #57: number comparisons in prerelease sometimes inaccurate
+
+## 1.3.0 (2017-05-02)
+
+### Added
+
+- #45: Added json (un)marshaling support (thanks @mh-cbon)
+- Stability marker. See https://masterminds.github.io/stability/
+
+### Fixed
+
+- #51: Fix handling of single digit tilde constraint (thanks @dgodd)
+
+### Changed
+
+- #55: The godoc icon moved from png to svg
+
+## 1.2.3 (2017-04-03)
+
+### Fixed
+
+- #46: Fixed 0.x.x and 0.0.x in constraints being treated as *
+
+## Release 1.2.2 (2016-12-13)
+
+### Fixed
+
+- #34: Fixed issue where hyphen range was not working with pre-release parsing.
+
+## Release 1.2.1 (2016-11-28)
+
+### Fixed
+
+- #24: Fixed edge case issue where constraint "> 0" does not handle "0.0.1-alpha"
+  properly.
+
+## Release 1.2.0 (2016-11-04)
+
+### Added
+
+- #20: Added MustParse function for versions (thanks @adamreese)
+- #15: Added increment methods on versions (thanks @mh-cbon)
+
+### Fixed
+
+- Issue #21: Per the SemVer spec (section 9) a pre-release is unstable and
+  might not satisfy the intended compatibility. The change here ignores pre-releases
+  on constraint checks (e.g., ~ or ^) when a pre-release is not part of the
+  constraint. For example, `^1.2.3` will ignore pre-releases while
+  `^1.2.3-alpha` will include them.
+
+## Release 1.1.1 (2016-06-30)
+
+### Changed
+
+- Issue #9: Speed up version comparison performance (thanks @sdboyer)
+- Issue #8: Added benchmarks (thanks @sdboyer)
+- Updated Go Report Card URL to new location
+- Updated Readme to add code snippet formatting (thanks @mh-cbon)
+- Updating tagging to v[SemVer] structure for compatibility with other tools.
+
+## Release 1.1.0 (2016-03-11)
+
+- Issue #2: Implemented validation to provide reasons a versions failed a
+  constraint.
+
+## Release 1.0.1 (2015-12-31)
+
+- Fixed #1: * constraint failing on valid versions.
+
+## Release 1.0.0 (2015-10-20)
+
+- Initial release
diff --git a/vendor/github.com/Masterminds/semver/LICENSE.txt b/vendor/github.com/Masterminds/semver/v3/LICENSE.txt
similarity index 100%
rename from vendor/github.com/Masterminds/semver/LICENSE.txt
rename to vendor/github.com/Masterminds/semver/v3/LICENSE.txt
diff --git a/vendor/github.com/Masterminds/semver/v3/Makefile b/vendor/github.com/Masterminds/semver/v3/Makefile
new file mode 100644
index 0000000000000000000000000000000000000000..0e7b5c7138ec2bb30c7887ac4dbfc084ca7aaa9a
--- /dev/null
+++ b/vendor/github.com/Masterminds/semver/v3/Makefile
@@ -0,0 +1,30 @@
+GOPATH=$(shell go env GOPATH)
+GOLANGCI_LINT=$(GOPATH)/bin/golangci-lint
+
+.PHONY: lint
+lint: $(GOLANGCI_LINT)
+	@echo "==> Linting codebase"
+	@$(GOLANGCI_LINT) run
+
+.PHONY: test
+test:
+	@echo "==> Running tests"
+	GO111MODULE=on go test -v
+
+.PHONY: test-cover
+test-cover:
+	@echo "==> Running Tests with coverage"
+	GO111MODULE=on go test -cover .
+
+.PHONY: fuzz
+fuzz:
+	@echo "==> Running Fuzz Tests"
+	go test -fuzz=FuzzNewVersion -fuzztime=15s .
+	go test -fuzz=FuzzStrictNewVersion -fuzztime=15s .
+	go test -fuzz=FuzzNewConstraint -fuzztime=15s .
+
+$(GOLANGCI_LINT):
+	# Install golangci-lint. The configuration for it is in the .golangci.yml
+	# file in the root of the repository
+	echo ${GOPATH}
+	curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | sh -s -- -b $(GOPATH)/bin v1.17.1
diff --git a/vendor/github.com/Masterminds/semver/v3/README.md b/vendor/github.com/Masterminds/semver/v3/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..eab8cac3b7f9c828da76104b1437b29fc79c2386
--- /dev/null
+++ b/vendor/github.com/Masterminds/semver/v3/README.md
@@ -0,0 +1,258 @@
+# SemVer
+
+The `semver` package provides the ability to work with [Semantic Versions](http://semver.org) in Go. Specifically it provides the ability to:
+
+* Parse semantic versions
+* Sort semantic versions
+* Check if a semantic version fits within a set of constraints
+* Optionally work with a `v` prefix
+
+[![Stability:
+Active](https://masterminds.github.io/stability/active.svg)](https://masterminds.github.io/stability/active.html)
+[![](https://github.com/Masterminds/semver/workflows/Tests/badge.svg)](https://github.com/Masterminds/semver/actions)
+[![GoDoc](https://img.shields.io/static/v1?label=godoc&message=reference&color=blue)](https://pkg.go.dev/github.com/Masterminds/semver/v3)
+[![Go Report Card](https://goreportcard.com/badge/github.com/Masterminds/semver)](https://goreportcard.com/report/github.com/Masterminds/semver)
+
+If you are looking for a command line tool for version comparisons please see
+[vert](https://github.com/Masterminds/vert) which uses this library.
+
+## Package Versions
+
+Note, import `github.com/github.com/Masterminds/semver/v3` to use the latest version.
+
+There are three major versions fo the `semver` package.
+
+* 3.x.x is the stable and active version. This version is focused on constraint
+  compatibility for range handling in other tools from other languages. It has
+  a similar API to the v1 releases. The development of this version is on the master
+  branch. The documentation for this version is below.
+* 2.x was developed primarily for [dep](https://github.com/golang/dep). There are
+  no tagged releases and the development was performed by [@sdboyer](https://github.com/sdboyer).
+  There are API breaking changes from v1. This version lives on the [2.x branch](https://github.com/Masterminds/semver/tree/2.x).
+* 1.x.x is the original release. It is no longer maintained. You should use the
+  v3 release instead. You can read the documentation for the 1.x.x release
+  [here](https://github.com/Masterminds/semver/blob/release-1/README.md).
+
+## Parsing Semantic Versions
+
+There are two functions that can parse semantic versions. The `StrictNewVersion`
+function only parses valid version 2 semantic versions as outlined in the
+specification. The `NewVersion` function attempts to coerce a version into a
+semantic version and parse it. For example, if there is a leading v or a version
+listed without all 3 parts (e.g. `v1.2`) it will attempt to coerce it into a valid
+semantic version (e.g., 1.2.0). In both cases a `Version` object is returned
+that can be sorted, compared, and used in constraints.
+
+When parsing a version an error is returned if there is an issue parsing the
+version. For example,
+
+    v, err := semver.NewVersion("1.2.3-beta.1+build345")
+
+The version object has methods to get the parts of the version, compare it to
+other versions, convert the version back into a string, and get the original
+string. Getting the original string is useful if the semantic version was coerced
+into a valid form.
+
+## Sorting Semantic Versions
+
+A set of versions can be sorted using the `sort` package from the standard library.
+For example,
+
+```go
+raw := []string{"1.2.3", "1.0", "1.3", "2", "0.4.2",}
+vs := make([]*semver.Version, len(raw))
+for i, r := range raw {
+    v, err := semver.NewVersion(r)
+    if err != nil {
+        t.Errorf("Error parsing version: %s", err)
+    }
+
+    vs[i] = v
+}
+
+sort.Sort(semver.Collection(vs))
+```
+
+## Checking Version Constraints
+
+There are two methods for comparing versions. One uses comparison methods on
+`Version` instances and the other uses `Constraints`. There are some important
+differences to notes between these two methods of comparison.
+
+1. When two versions are compared using functions such as `Compare`, `LessThan`,
+   and others it will follow the specification and always include prereleases
+   within the comparison. It will provide an answer that is valid with the
+   comparison section of the spec at https://semver.org/#spec-item-11
+2. When constraint checking is used for checks or validation it will follow a
+   different set of rules that are common for ranges with tools like npm/js
+   and Rust/Cargo. This includes considering prereleases to be invalid if the
+   ranges does not include one. If you want to have it include pre-releases a
+   simple solution is to include `-0` in your range.
+3. Constraint ranges can have some complex rules including the shorthand use of
+   ~ and ^. For more details on those see the options below.
+
+There are differences between the two methods or checking versions because the
+comparison methods on `Version` follow the specification while comparison ranges
+are not part of the specification. Different packages and tools have taken it
+upon themselves to come up with range rules. This has resulted in differences.
+For example, npm/js and Cargo/Rust follow similar patterns while PHP has a
+different pattern for ^. The comparison features in this package follow the
+npm/js and Cargo/Rust lead because applications using it have followed similar
+patters with their versions.
+
+Checking a version against version constraints is one of the most featureful
+parts of the package.
+
+```go
+c, err := semver.NewConstraint(">= 1.2.3")
+if err != nil {
+    // Handle constraint not being parsable.
+}
+
+v, err := semver.NewVersion("1.3")
+if err != nil {
+    // Handle version not being parsable.
+}
+// Check if the version meets the constraints. The a variable will be true.
+a := c.Check(v)
+```
+
+### Basic Comparisons
+
+There are two elements to the comparisons. First, a comparison string is a list
+of space or comma separated AND comparisons. These are then separated by || (OR)
+comparisons. For example, `">= 1.2 < 3.0.0 || >= 4.2.3"` is looking for a
+comparison that's greater than or equal to 1.2 and less than 3.0.0 or is
+greater than or equal to 4.2.3.
+
+The basic comparisons are:
+
+* `=`: equal (aliased to no operator)
+* `!=`: not equal
+* `>`: greater than
+* `<`: less than
+* `>=`: greater than or equal to
+* `<=`: less than or equal to
+
+### Working With Prerelease Versions
+
+Pre-releases, for those not familiar with them, are used for software releases
+prior to stable or generally available releases. Examples of prereleases include
+development, alpha, beta, and release candidate releases. A prerelease may be
+a version such as `1.2.3-beta.1` while the stable release would be `1.2.3`. In the
+order of precedence, prereleases come before their associated releases. In this
+example `1.2.3-beta.1 < 1.2.3`.
+
+According to the Semantic Version specification prereleases may not be
+API compliant with their release counterpart. It says,
+
+> A pre-release version indicates that the version is unstable and might not satisfy the intended compatibility requirements as denoted by its associated normal version.
+
+SemVer comparisons using constraints without a prerelease comparator will skip
+prerelease versions. For example, `>=1.2.3` will skip prereleases when looking
+at a list of releases while `>=1.2.3-0` will evaluate and find prereleases.
+
+The reason for the `0` as a pre-release version in the example comparison is
+because pre-releases can only contain ASCII alphanumerics and hyphens (along with
+`.` separators), per the spec. Sorting happens in ASCII sort order, again per the
+spec. The lowest character is a `0` in ASCII sort order
+(see an [ASCII Table](http://www.asciitable.com/))
+
+Understanding ASCII sort ordering is important because A-Z comes before a-z. That
+means `>=1.2.3-BETA` will return `1.2.3-alpha`. What you might expect from case
+sensitivity doesn't apply here. This is due to ASCII sort ordering which is what
+the spec specifies.
+
+### Hyphen Range Comparisons
+
+There are multiple methods to handle ranges and the first is hyphens ranges.
+These look like:
+
+* `1.2 - 1.4.5` which is equivalent to `>= 1.2 <= 1.4.5`
+* `2.3.4 - 4.5` which is equivalent to `>= 2.3.4 <= 4.5`
+
+### Wildcards In Comparisons
+
+The `x`, `X`, and `*` characters can be used as a wildcard character. This works
+for all comparison operators. When used on the `=` operator it falls
+back to the patch level comparison (see tilde below). For example,
+
+* `1.2.x` is equivalent to `>= 1.2.0, < 1.3.0`
+* `>= 1.2.x` is equivalent to `>= 1.2.0`
+* `<= 2.x` is equivalent to `< 3`
+* `*` is equivalent to `>= 0.0.0`
+
+### Tilde Range Comparisons (Patch)
+
+The tilde (`~`) comparison operator is for patch level ranges when a minor
+version is specified and major level changes when the minor number is missing.
+For example,
+
+* `~1.2.3` is equivalent to `>= 1.2.3, < 1.3.0`
+* `~1` is equivalent to `>= 1, < 2`
+* `~2.3` is equivalent to `>= 2.3, < 2.4`
+* `~1.2.x` is equivalent to `>= 1.2.0, < 1.3.0`
+* `~1.x` is equivalent to `>= 1, < 2`
+
+### Caret Range Comparisons (Major)
+
+The caret (`^`) comparison operator is for major level changes once a stable
+(1.0.0) release has occurred. Prior to a 1.0.0 release the minor versions acts
+as the API stability level. This is useful when comparisons of API versions as a
+major change is API breaking. For example,
+
+* `^1.2.3` is equivalent to `>= 1.2.3, < 2.0.0`
+* `^1.2.x` is equivalent to `>= 1.2.0, < 2.0.0`
+* `^2.3` is equivalent to `>= 2.3, < 3`
+* `^2.x` is equivalent to `>= 2.0.0, < 3`
+* `^0.2.3` is equivalent to `>=0.2.3 <0.3.0`
+* `^0.2` is equivalent to `>=0.2.0 <0.3.0`
+* `^0.0.3` is equivalent to `>=0.0.3 <0.0.4`
+* `^0.0` is equivalent to `>=0.0.0 <0.1.0`
+* `^0` is equivalent to `>=0.0.0 <1.0.0`
+
+## Validation
+
+In addition to testing a version against a constraint, a version can be validated
+against a constraint. When validation fails a slice of errors containing why a
+version didn't meet the constraint is returned. For example,
+
+```go
+c, err := semver.NewConstraint("<= 1.2.3, >= 1.4")
+if err != nil {
+    // Handle constraint not being parseable.
+}
+
+v, err := semver.NewVersion("1.3")
+if err != nil {
+    // Handle version not being parseable.
+}
+
+// Validate a version against a constraint.
+a, msgs := c.Validate(v)
+// a is false
+for _, m := range msgs {
+    fmt.Println(m)
+
+    // Loops over the errors which would read
+    // "1.3 is greater than 1.2.3"
+    // "1.3 is less than 1.4"
+}
+```
+
+## Contribute
+
+If you find an issue or want to contribute please file an [issue](https://github.com/Masterminds/semver/issues)
+or [create a pull request](https://github.com/Masterminds/semver/pulls).
+
+## Security
+
+Security is an important consideration for this project. The project currently
+uses the following tools to help discover security issues:
+
+* [CodeQL](https://github.com/Masterminds/semver)
+* [gosec](https://github.com/securego/gosec)
+* Daily Fuzz testing
+
+If you believe you have found a security vulnerability you can privately disclose
+it through the [GitHub security page](https://github.com/Masterminds/semver/security).
diff --git a/vendor/github.com/Masterminds/semver/v3/SECURITY.md b/vendor/github.com/Masterminds/semver/v3/SECURITY.md
new file mode 100644
index 0000000000000000000000000000000000000000..a30a66b1f7477ea00705bb62bb1023c93cc46213
--- /dev/null
+++ b/vendor/github.com/Masterminds/semver/v3/SECURITY.md
@@ -0,0 +1,19 @@
+# Security Policy
+
+## Supported Versions
+
+The following versions of semver are currently supported:
+
+| Version | Supported          |
+| ------- | ------------------ |
+| 3.x     | :white_check_mark: |
+| 2.x     | :x:                |
+| 1.x     | :x:                |
+
+Fixes are only released for the latest minor version in the form of a patch release.
+
+## Reporting a Vulnerability
+
+You can privately disclose a vulnerability through GitHubs
+[private vulnerability reporting](https://github.com/Masterminds/semver/security/advisories)
+mechanism.
diff --git a/vendor/github.com/Masterminds/semver/collection.go b/vendor/github.com/Masterminds/semver/v3/collection.go
similarity index 100%
rename from vendor/github.com/Masterminds/semver/collection.go
rename to vendor/github.com/Masterminds/semver/v3/collection.go
diff --git a/vendor/github.com/Masterminds/semver/v3/constraints.go b/vendor/github.com/Masterminds/semver/v3/constraints.go
new file mode 100644
index 0000000000000000000000000000000000000000..8461c7ed9038883a1b3d9e796af7a4827ed4971a
--- /dev/null
+++ b/vendor/github.com/Masterminds/semver/v3/constraints.go
@@ -0,0 +1,594 @@
+package semver
+
+import (
+	"bytes"
+	"errors"
+	"fmt"
+	"regexp"
+	"strings"
+)
+
+// Constraints is one or more constraint that a semantic version can be
+// checked against.
+type Constraints struct {
+	constraints [][]*constraint
+}
+
+// NewConstraint returns a Constraints instance that a Version instance can
+// be checked against. If there is a parse error it will be returned.
+func NewConstraint(c string) (*Constraints, error) {
+
+	// Rewrite - ranges into a comparison operation.
+	c = rewriteRange(c)
+
+	ors := strings.Split(c, "||")
+	or := make([][]*constraint, len(ors))
+	for k, v := range ors {
+
+		// TODO: Find a way to validate and fetch all the constraints in a simpler form
+
+		// Validate the segment
+		if !validConstraintRegex.MatchString(v) {
+			return nil, fmt.Errorf("improper constraint: %s", v)
+		}
+
+		cs := findConstraintRegex.FindAllString(v, -1)
+		if cs == nil {
+			cs = append(cs, v)
+		}
+		result := make([]*constraint, len(cs))
+		for i, s := range cs {
+			pc, err := parseConstraint(s)
+			if err != nil {
+				return nil, err
+			}
+
+			result[i] = pc
+		}
+		or[k] = result
+	}
+
+	o := &Constraints{constraints: or}
+	return o, nil
+}
+
+// Check tests if a version satisfies the constraints.
+func (cs Constraints) Check(v *Version) bool {
+	// TODO(mattfarina): For v4 of this library consolidate the Check and Validate
+	// functions as the underlying functions make that possible now.
+	// loop over the ORs and check the inner ANDs
+	for _, o := range cs.constraints {
+		joy := true
+		for _, c := range o {
+			if check, _ := c.check(v); !check {
+				joy = false
+				break
+			}
+		}
+
+		if joy {
+			return true
+		}
+	}
+
+	return false
+}
+
+// Validate checks if a version satisfies a constraint. If not a slice of
+// reasons for the failure are returned in addition to a bool.
+func (cs Constraints) Validate(v *Version) (bool, []error) {
+	// loop over the ORs and check the inner ANDs
+	var e []error
+
+	// Capture the prerelease message only once. When it happens the first time
+	// this var is marked
+	var prerelesase bool
+	for _, o := range cs.constraints {
+		joy := true
+		for _, c := range o {
+			// Before running the check handle the case there the version is
+			// a prerelease and the check is not searching for prereleases.
+			if c.con.pre == "" && v.pre != "" {
+				if !prerelesase {
+					em := fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v)
+					e = append(e, em)
+					prerelesase = true
+				}
+				joy = false
+
+			} else {
+
+				if _, err := c.check(v); err != nil {
+					e = append(e, err)
+					joy = false
+				}
+			}
+		}
+
+		if joy {
+			return true, []error{}
+		}
+	}
+
+	return false, e
+}
+
+func (cs Constraints) String() string {
+	buf := make([]string, len(cs.constraints))
+	var tmp bytes.Buffer
+
+	for k, v := range cs.constraints {
+		tmp.Reset()
+		vlen := len(v)
+		for kk, c := range v {
+			tmp.WriteString(c.string())
+
+			// Space separate the AND conditions
+			if vlen > 1 && kk < vlen-1 {
+				tmp.WriteString(" ")
+			}
+		}
+		buf[k] = tmp.String()
+	}
+
+	return strings.Join(buf, " || ")
+}
+
+// UnmarshalText implements the encoding.TextUnmarshaler interface.
+func (cs *Constraints) UnmarshalText(text []byte) error {
+	temp, err := NewConstraint(string(text))
+	if err != nil {
+		return err
+	}
+
+	*cs = *temp
+
+	return nil
+}
+
+// MarshalText implements the encoding.TextMarshaler interface.
+func (cs Constraints) MarshalText() ([]byte, error) {
+	return []byte(cs.String()), nil
+}
+
+var constraintOps map[string]cfunc
+var constraintRegex *regexp.Regexp
+var constraintRangeRegex *regexp.Regexp
+
+// Used to find individual constraints within a multi-constraint string
+var findConstraintRegex *regexp.Regexp
+
+// Used to validate an segment of ANDs is valid
+var validConstraintRegex *regexp.Regexp
+
+const cvRegex string = `v?([0-9|x|X|\*]+)(\.[0-9|x|X|\*]+)?(\.[0-9|x|X|\*]+)?` +
+	`(-([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` +
+	`(\+([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?`
+
+func init() {
+	constraintOps = map[string]cfunc{
+		"":   constraintTildeOrEqual,
+		"=":  constraintTildeOrEqual,
+		"!=": constraintNotEqual,
+		">":  constraintGreaterThan,
+		"<":  constraintLessThan,
+		">=": constraintGreaterThanEqual,
+		"=>": constraintGreaterThanEqual,
+		"<=": constraintLessThanEqual,
+		"=<": constraintLessThanEqual,
+		"~":  constraintTilde,
+		"~>": constraintTilde,
+		"^":  constraintCaret,
+	}
+
+	ops := `=||!=|>|<|>=|=>|<=|=<|~|~>|\^`
+
+	constraintRegex = regexp.MustCompile(fmt.Sprintf(
+		`^\s*(%s)\s*(%s)\s*$`,
+		ops,
+		cvRegex))
+
+	constraintRangeRegex = regexp.MustCompile(fmt.Sprintf(
+		`\s*(%s)\s+-\s+(%s)\s*`,
+		cvRegex, cvRegex))
+
+	findConstraintRegex = regexp.MustCompile(fmt.Sprintf(
+		`(%s)\s*(%s)`,
+		ops,
+		cvRegex))
+
+	// The first time a constraint shows up will look slightly different from
+	// future times it shows up due to a leading space or comma in a given
+	// string.
+	validConstraintRegex = regexp.MustCompile(fmt.Sprintf(
+		`^(\s*(%s)\s*(%s)\s*)((?:\s+|,\s*)(%s)\s*(%s)\s*)*$`,
+		ops,
+		cvRegex,
+		ops,
+		cvRegex))
+}
+
+// An individual constraint
+type constraint struct {
+	// The version used in the constraint check. For example, if a constraint
+	// is '<= 2.0.0' the con a version instance representing 2.0.0.
+	con *Version
+
+	// The original parsed version (e.g., 4.x from != 4.x)
+	orig string
+
+	// The original operator for the constraint
+	origfunc string
+
+	// When an x is used as part of the version (e.g., 1.x)
+	minorDirty bool
+	dirty      bool
+	patchDirty bool
+}
+
+// Check if a version meets the constraint
+func (c *constraint) check(v *Version) (bool, error) {
+	return constraintOps[c.origfunc](v, c)
+}
+
+// String prints an individual constraint into a string
+func (c *constraint) string() string {
+	return c.origfunc + c.orig
+}
+
+type cfunc func(v *Version, c *constraint) (bool, error)
+
+func parseConstraint(c string) (*constraint, error) {
+	if len(c) > 0 {
+		m := constraintRegex.FindStringSubmatch(c)
+		if m == nil {
+			return nil, fmt.Errorf("improper constraint: %s", c)
+		}
+
+		cs := &constraint{
+			orig:     m[2],
+			origfunc: m[1],
+		}
+
+		ver := m[2]
+		minorDirty := false
+		patchDirty := false
+		dirty := false
+		if isX(m[3]) || m[3] == "" {
+			ver = fmt.Sprintf("0.0.0%s", m[6])
+			dirty = true
+		} else if isX(strings.TrimPrefix(m[4], ".")) || m[4] == "" {
+			minorDirty = true
+			dirty = true
+			ver = fmt.Sprintf("%s.0.0%s", m[3], m[6])
+		} else if isX(strings.TrimPrefix(m[5], ".")) || m[5] == "" {
+			dirty = true
+			patchDirty = true
+			ver = fmt.Sprintf("%s%s.0%s", m[3], m[4], m[6])
+		}
+
+		con, err := NewVersion(ver)
+		if err != nil {
+
+			// The constraintRegex should catch any regex parsing errors. So,
+			// we should never get here.
+			return nil, errors.New("constraint Parser Error")
+		}
+
+		cs.con = con
+		cs.minorDirty = minorDirty
+		cs.patchDirty = patchDirty
+		cs.dirty = dirty
+
+		return cs, nil
+	}
+
+	// The rest is the special case where an empty string was passed in which
+	// is equivalent to * or >=0.0.0
+	con, err := StrictNewVersion("0.0.0")
+	if err != nil {
+
+		// The constraintRegex should catch any regex parsing errors. So,
+		// we should never get here.
+		return nil, errors.New("constraint Parser Error")
+	}
+
+	cs := &constraint{
+		con:        con,
+		orig:       c,
+		origfunc:   "",
+		minorDirty: false,
+		patchDirty: false,
+		dirty:      true,
+	}
+	return cs, nil
+}
+
+// Constraint functions
+func constraintNotEqual(v *Version, c *constraint) (bool, error) {
+	if c.dirty {
+
+		// If there is a pre-release on the version but the constraint isn't looking
+		// for them assume that pre-releases are not compatible. See issue 21 for
+		// more details.
+		if v.Prerelease() != "" && c.con.Prerelease() == "" {
+			return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v)
+		}
+
+		if c.con.Major() != v.Major() {
+			return true, nil
+		}
+		if c.con.Minor() != v.Minor() && !c.minorDirty {
+			return true, nil
+		} else if c.minorDirty {
+			return false, fmt.Errorf("%s is equal to %s", v, c.orig)
+		} else if c.con.Patch() != v.Patch() && !c.patchDirty {
+			return true, nil
+		} else if c.patchDirty {
+			// Need to handle prereleases if present
+			if v.Prerelease() != "" || c.con.Prerelease() != "" {
+				eq := comparePrerelease(v.Prerelease(), c.con.Prerelease()) != 0
+				if eq {
+					return true, nil
+				}
+				return false, fmt.Errorf("%s is equal to %s", v, c.orig)
+			}
+			return false, fmt.Errorf("%s is equal to %s", v, c.orig)
+		}
+	}
+
+	eq := v.Equal(c.con)
+	if eq {
+		return false, fmt.Errorf("%s is equal to %s", v, c.orig)
+	}
+
+	return true, nil
+}
+
+func constraintGreaterThan(v *Version, c *constraint) (bool, error) {
+
+	// If there is a pre-release on the version but the constraint isn't looking
+	// for them assume that pre-releases are not compatible. See issue 21 for
+	// more details.
+	if v.Prerelease() != "" && c.con.Prerelease() == "" {
+		return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v)
+	}
+
+	var eq bool
+
+	if !c.dirty {
+		eq = v.Compare(c.con) == 1
+		if eq {
+			return true, nil
+		}
+		return false, fmt.Errorf("%s is less than or equal to %s", v, c.orig)
+	}
+
+	if v.Major() > c.con.Major() {
+		return true, nil
+	} else if v.Major() < c.con.Major() {
+		return false, fmt.Errorf("%s is less than or equal to %s", v, c.orig)
+	} else if c.minorDirty {
+		// This is a range case such as >11. When the version is something like
+		// 11.1.0 is it not > 11. For that we would need 12 or higher
+		return false, fmt.Errorf("%s is less than or equal to %s", v, c.orig)
+	} else if c.patchDirty {
+		// This is for ranges such as >11.1. A version of 11.1.1 is not greater
+		// which one of 11.2.1 is greater
+		eq = v.Minor() > c.con.Minor()
+		if eq {
+			return true, nil
+		}
+		return false, fmt.Errorf("%s is less than or equal to %s", v, c.orig)
+	}
+
+	// If we have gotten here we are not comparing pre-preleases and can use the
+	// Compare function to accomplish that.
+	eq = v.Compare(c.con) == 1
+	if eq {
+		return true, nil
+	}
+	return false, fmt.Errorf("%s is less than or equal to %s", v, c.orig)
+}
+
+func constraintLessThan(v *Version, c *constraint) (bool, error) {
+	// If there is a pre-release on the version but the constraint isn't looking
+	// for them assume that pre-releases are not compatible. See issue 21 for
+	// more details.
+	if v.Prerelease() != "" && c.con.Prerelease() == "" {
+		return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v)
+	}
+
+	eq := v.Compare(c.con) < 0
+	if eq {
+		return true, nil
+	}
+	return false, fmt.Errorf("%s is greater than or equal to %s", v, c.orig)
+}
+
+func constraintGreaterThanEqual(v *Version, c *constraint) (bool, error) {
+
+	// If there is a pre-release on the version but the constraint isn't looking
+	// for them assume that pre-releases are not compatible. See issue 21 for
+	// more details.
+	if v.Prerelease() != "" && c.con.Prerelease() == "" {
+		return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v)
+	}
+
+	eq := v.Compare(c.con) >= 0
+	if eq {
+		return true, nil
+	}
+	return false, fmt.Errorf("%s is less than %s", v, c.orig)
+}
+
+func constraintLessThanEqual(v *Version, c *constraint) (bool, error) {
+	// If there is a pre-release on the version but the constraint isn't looking
+	// for them assume that pre-releases are not compatible. See issue 21 for
+	// more details.
+	if v.Prerelease() != "" && c.con.Prerelease() == "" {
+		return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v)
+	}
+
+	var eq bool
+
+	if !c.dirty {
+		eq = v.Compare(c.con) <= 0
+		if eq {
+			return true, nil
+		}
+		return false, fmt.Errorf("%s is greater than %s", v, c.orig)
+	}
+
+	if v.Major() > c.con.Major() {
+		return false, fmt.Errorf("%s is greater than %s", v, c.orig)
+	} else if v.Major() == c.con.Major() && v.Minor() > c.con.Minor() && !c.minorDirty {
+		return false, fmt.Errorf("%s is greater than %s", v, c.orig)
+	}
+
+	return true, nil
+}
+
+// ~*, ~>* --> >= 0.0.0 (any)
+// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0, <3.0.0
+// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0, <2.1.0
+// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0, <1.3.0
+// ~1.2.3, ~>1.2.3 --> >=1.2.3, <1.3.0
+// ~1.2.0, ~>1.2.0 --> >=1.2.0, <1.3.0
+func constraintTilde(v *Version, c *constraint) (bool, error) {
+	// If there is a pre-release on the version but the constraint isn't looking
+	// for them assume that pre-releases are not compatible. See issue 21 for
+	// more details.
+	if v.Prerelease() != "" && c.con.Prerelease() == "" {
+		return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v)
+	}
+
+	if v.LessThan(c.con) {
+		return false, fmt.Errorf("%s is less than %s", v, c.orig)
+	}
+
+	// ~0.0.0 is a special case where all constraints are accepted. It's
+	// equivalent to >= 0.0.0.
+	if c.con.Major() == 0 && c.con.Minor() == 0 && c.con.Patch() == 0 &&
+		!c.minorDirty && !c.patchDirty {
+		return true, nil
+	}
+
+	if v.Major() != c.con.Major() {
+		return false, fmt.Errorf("%s does not have same major version as %s", v, c.orig)
+	}
+
+	if v.Minor() != c.con.Minor() && !c.minorDirty {
+		return false, fmt.Errorf("%s does not have same major and minor version as %s", v, c.orig)
+	}
+
+	return true, nil
+}
+
+// When there is a .x (dirty) status it automatically opts in to ~. Otherwise
+// it's a straight =
+func constraintTildeOrEqual(v *Version, c *constraint) (bool, error) {
+	// If there is a pre-release on the version but the constraint isn't looking
+	// for them assume that pre-releases are not compatible. See issue 21 for
+	// more details.
+	if v.Prerelease() != "" && c.con.Prerelease() == "" {
+		return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v)
+	}
+
+	if c.dirty {
+		return constraintTilde(v, c)
+	}
+
+	eq := v.Equal(c.con)
+	if eq {
+		return true, nil
+	}
+
+	return false, fmt.Errorf("%s is not equal to %s", v, c.orig)
+}
+
+// ^*      -->  (any)
+// ^1.2.3  -->  >=1.2.3 <2.0.0
+// ^1.2    -->  >=1.2.0 <2.0.0
+// ^1      -->  >=1.0.0 <2.0.0
+// ^0.2.3  -->  >=0.2.3 <0.3.0
+// ^0.2    -->  >=0.2.0 <0.3.0
+// ^0.0.3  -->  >=0.0.3 <0.0.4
+// ^0.0    -->  >=0.0.0 <0.1.0
+// ^0      -->  >=0.0.0 <1.0.0
+func constraintCaret(v *Version, c *constraint) (bool, error) {
+	// If there is a pre-release on the version but the constraint isn't looking
+	// for them assume that pre-releases are not compatible. See issue 21 for
+	// more details.
+	if v.Prerelease() != "" && c.con.Prerelease() == "" {
+		return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v)
+	}
+
+	// This less than handles prereleases
+	if v.LessThan(c.con) {
+		return false, fmt.Errorf("%s is less than %s", v, c.orig)
+	}
+
+	var eq bool
+
+	// ^ when the major > 0 is >=x.y.z < x+1
+	if c.con.Major() > 0 || c.minorDirty {
+
+		// ^ has to be within a major range for > 0. Everything less than was
+		// filtered out with the LessThan call above. This filters out those
+		// that greater but not within the same major range.
+		eq = v.Major() == c.con.Major()
+		if eq {
+			return true, nil
+		}
+		return false, fmt.Errorf("%s does not have same major version as %s", v, c.orig)
+	}
+
+	// ^ when the major is 0 and minor > 0 is >=0.y.z < 0.y+1
+	if c.con.Major() == 0 && v.Major() > 0 {
+		return false, fmt.Errorf("%s does not have same major version as %s", v, c.orig)
+	}
+	// If the con Minor is > 0 it is not dirty
+	if c.con.Minor() > 0 || c.patchDirty {
+		eq = v.Minor() == c.con.Minor()
+		if eq {
+			return true, nil
+		}
+		return false, fmt.Errorf("%s does not have same minor version as %s. Expected minor versions to match when constraint major version is 0", v, c.orig)
+	}
+	// ^ when the minor is 0 and minor > 0 is =0.0.z
+	if c.con.Minor() == 0 && v.Minor() > 0 {
+		return false, fmt.Errorf("%s does not have same minor version as %s", v, c.orig)
+	}
+
+	// At this point the major is 0 and the minor is 0 and not dirty. The patch
+	// is not dirty so we need to check if they are equal. If they are not equal
+	eq = c.con.Patch() == v.Patch()
+	if eq {
+		return true, nil
+	}
+	return false, fmt.Errorf("%s does not equal %s. Expect version and constraint to equal when major and minor versions are 0", v, c.orig)
+}
+
+func isX(x string) bool {
+	switch x {
+	case "x", "*", "X":
+		return true
+	default:
+		return false
+	}
+}
+
+func rewriteRange(i string) string {
+	m := constraintRangeRegex.FindAllStringSubmatch(i, -1)
+	if m == nil {
+		return i
+	}
+	o := i
+	for _, v := range m {
+		t := fmt.Sprintf(">= %s, <= %s ", v[1], v[11])
+		o = strings.Replace(o, v[0], t, 1)
+	}
+
+	return o
+}
diff --git a/vendor/github.com/Masterminds/semver/v3/doc.go b/vendor/github.com/Masterminds/semver/v3/doc.go
new file mode 100644
index 0000000000000000000000000000000000000000..74f97caa57f80fe5274f5cdf8d7c9ffe40e179d2
--- /dev/null
+++ b/vendor/github.com/Masterminds/semver/v3/doc.go
@@ -0,0 +1,184 @@
+/*
+Package semver provides the ability to work with Semantic Versions (http://semver.org) in Go.
+
+Specifically it provides the ability to:
+
+  - Parse semantic versions
+  - Sort semantic versions
+  - Check if a semantic version fits within a set of constraints
+  - Optionally work with a `v` prefix
+
+# Parsing Semantic Versions
+
+There are two functions that can parse semantic versions. The `StrictNewVersion`
+function only parses valid version 2 semantic versions as outlined in the
+specification. The `NewVersion` function attempts to coerce a version into a
+semantic version and parse it. For example, if there is a leading v or a version
+listed without all 3 parts (e.g. 1.2) it will attempt to coerce it into a valid
+semantic version (e.g., 1.2.0). In both cases a `Version` object is returned
+that can be sorted, compared, and used in constraints.
+
+When parsing a version an optional error can be returned if there is an issue
+parsing the version. For example,
+
+	v, err := semver.NewVersion("1.2.3-beta.1+b345")
+
+The version object has methods to get the parts of the version, compare it to
+other versions, convert the version back into a string, and get the original
+string. For more details please see the documentation
+at https://godoc.org/github.com/Masterminds/semver.
+
+# Sorting Semantic Versions
+
+A set of versions can be sorted using the `sort` package from the standard library.
+For example,
+
+	    raw := []string{"1.2.3", "1.0", "1.3", "2", "0.4.2",}
+	    vs := make([]*semver.Version, len(raw))
+		for i, r := range raw {
+			v, err := semver.NewVersion(r)
+			if err != nil {
+				t.Errorf("Error parsing version: %s", err)
+			}
+
+			vs[i] = v
+		}
+
+		sort.Sort(semver.Collection(vs))
+
+# Checking Version Constraints and Comparing Versions
+
+There are two methods for comparing versions. One uses comparison methods on
+`Version` instances and the other is using Constraints. There are some important
+differences to notes between these two methods of comparison.
+
+ 1. When two versions are compared using functions such as `Compare`, `LessThan`,
+    and others it will follow the specification and always include prereleases
+    within the comparison. It will provide an answer valid with the comparison
+    spec section at https://semver.org/#spec-item-11
+ 2. When constraint checking is used for checks or validation it will follow a
+    different set of rules that are common for ranges with tools like npm/js
+    and Rust/Cargo. This includes considering prereleases to be invalid if the
+    ranges does not include on. If you want to have it include pre-releases a
+    simple solution is to include `-0` in your range.
+ 3. Constraint ranges can have some complex rules including the shorthard use of
+    ~ and ^. For more details on those see the options below.
+
+There are differences between the two methods or checking versions because the
+comparison methods on `Version` follow the specification while comparison ranges
+are not part of the specification. Different packages and tools have taken it
+upon themselves to come up with range rules. This has resulted in differences.
+For example, npm/js and Cargo/Rust follow similar patterns which PHP has a
+different pattern for ^. The comparison features in this package follow the
+npm/js and Cargo/Rust lead because applications using it have followed similar
+patters with their versions.
+
+Checking a version against version constraints is one of the most featureful
+parts of the package.
+
+	c, err := semver.NewConstraint(">= 1.2.3")
+	if err != nil {
+	    // Handle constraint not being parsable.
+	}
+
+	v, err := semver.NewVersion("1.3")
+	if err != nil {
+	    // Handle version not being parsable.
+	}
+	// Check if the version meets the constraints. The a variable will be true.
+	a := c.Check(v)
+
+# Basic Comparisons
+
+There are two elements to the comparisons. First, a comparison string is a list
+of comma or space separated AND comparisons. These are then separated by || (OR)
+comparisons. For example, `">= 1.2 < 3.0.0 || >= 4.2.3"` is looking for a
+comparison that's greater than or equal to 1.2 and less than 3.0.0 or is
+greater than or equal to 4.2.3. This can also be written as
+`">= 1.2, < 3.0.0 || >= 4.2.3"`
+
+The basic comparisons are:
+
+  - `=`: equal (aliased to no operator)
+  - `!=`: not equal
+  - `>`: greater than
+  - `<`: less than
+  - `>=`: greater than or equal to
+  - `<=`: less than or equal to
+
+# Hyphen Range Comparisons
+
+There are multiple methods to handle ranges and the first is hyphens ranges.
+These look like:
+
+  - `1.2 - 1.4.5` which is equivalent to `>= 1.2, <= 1.4.5`
+  - `2.3.4 - 4.5` which is equivalent to `>= 2.3.4 <= 4.5`
+
+# Wildcards In Comparisons
+
+The `x`, `X`, and `*` characters can be used as a wildcard character. This works
+for all comparison operators. When used on the `=` operator it falls
+back to the tilde operation. For example,
+
+  - `1.2.x` is equivalent to `>= 1.2.0 < 1.3.0`
+  - `>= 1.2.x` is equivalent to `>= 1.2.0`
+  - `<= 2.x` is equivalent to `<= 3`
+  - `*` is equivalent to `>= 0.0.0`
+
+Tilde Range Comparisons (Patch)
+
+The tilde (`~`) comparison operator is for patch level ranges when a minor
+version is specified and major level changes when the minor number is missing.
+For example,
+
+  - `~1.2.3` is equivalent to `>= 1.2.3 < 1.3.0`
+  - `~1` is equivalent to `>= 1, < 2`
+  - `~2.3` is equivalent to `>= 2.3 < 2.4`
+  - `~1.2.x` is equivalent to `>= 1.2.0 < 1.3.0`
+  - `~1.x` is equivalent to `>= 1 < 2`
+
+Caret Range Comparisons (Major)
+
+The caret (`^`) comparison operator is for major level changes once a stable
+(1.0.0) release has occurred. Prior to a 1.0.0 release the minor versions acts
+as the API stability level. This is useful when comparisons of API versions as a
+major change is API breaking. For example,
+
+  - `^1.2.3` is equivalent to `>= 1.2.3, < 2.0.0`
+  - `^1.2.x` is equivalent to `>= 1.2.0, < 2.0.0`
+  - `^2.3` is equivalent to `>= 2.3, < 3`
+  - `^2.x` is equivalent to `>= 2.0.0, < 3`
+  - `^0.2.3` is equivalent to `>=0.2.3 <0.3.0`
+  - `^0.2` is equivalent to `>=0.2.0 <0.3.0`
+  - `^0.0.3` is equivalent to `>=0.0.3 <0.0.4`
+  - `^0.0` is equivalent to `>=0.0.0 <0.1.0`
+  - `^0` is equivalent to `>=0.0.0 <1.0.0`
+
+# Validation
+
+In addition to testing a version against a constraint, a version can be validated
+against a constraint. When validation fails a slice of errors containing why a
+version didn't meet the constraint is returned. For example,
+
+	c, err := semver.NewConstraint("<= 1.2.3, >= 1.4")
+	if err != nil {
+	    // Handle constraint not being parseable.
+	}
+
+	v, _ := semver.NewVersion("1.3")
+	if err != nil {
+	    // Handle version not being parseable.
+	}
+
+	// Validate a version against a constraint.
+	a, msgs := c.Validate(v)
+	// a is false
+	for _, m := range msgs {
+	    fmt.Println(m)
+
+	    // Loops over the errors which would read
+	    // "1.3 is greater than 1.2.3"
+	    // "1.3 is less than 1.4"
+	}
+*/
+package semver
diff --git a/vendor/github.com/Masterminds/semver/version.go b/vendor/github.com/Masterminds/semver/v3/version.go
similarity index 55%
rename from vendor/github.com/Masterminds/semver/version.go
rename to vendor/github.com/Masterminds/semver/v3/version.go
index 400d4f93412351f88ba9da19e7052c9a2db89341..7c4bed33474cf59868432057621158364489a239 100644
--- a/vendor/github.com/Masterminds/semver/version.go
+++ b/vendor/github.com/Masterminds/semver/v3/version.go
@@ -2,6 +2,7 @@ package semver
 
 import (
 	"bytes"
+	"database/sql/driver"
 	"encoding/json"
 	"errors"
 	"fmt"
@@ -13,13 +14,23 @@ import (
 // The compiled version of the regex created at init() is cached here so it
 // only needs to be created once.
 var versionRegex *regexp.Regexp
-var validPrereleaseRegex *regexp.Regexp
 
 var (
 	// ErrInvalidSemVer is returned a version is found to be invalid when
 	// being parsed.
 	ErrInvalidSemVer = errors.New("Invalid Semantic Version")
 
+	// ErrEmptyString is returned when an empty string is passed in for parsing.
+	ErrEmptyString = errors.New("Version string empty")
+
+	// ErrInvalidCharacters is returned when invalid characters are found as
+	// part of a version
+	ErrInvalidCharacters = errors.New("Invalid characters in version")
+
+	// ErrSegmentStartsZero is returned when a version segment starts with 0.
+	// This is invalid in SemVer.
+	ErrSegmentStartsZero = errors.New("Version segment starts with 0")
+
 	// ErrInvalidMetadata is returned when the metadata is an invalid format
 	ErrInvalidMetadata = errors.New("Invalid Metadata string")
 
@@ -27,30 +38,123 @@ var (
 	ErrInvalidPrerelease = errors.New("Invalid Prerelease string")
 )
 
-// SemVerRegex is the regular expression used to parse a semantic version.
-const SemVerRegex string = `v?([0-9]+)(\.[0-9]+)?(\.[0-9]+)?` +
+// semVerRegex is the regular expression used to parse a semantic version.
+const semVerRegex string = `v?([0-9]+)(\.[0-9]+)?(\.[0-9]+)?` +
 	`(-([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` +
 	`(\+([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?`
 
-// ValidPrerelease is the regular expression which validates
-// both prerelease and metadata values.
-const ValidPrerelease string = `^([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*)$`
-
 // Version represents a single semantic version.
 type Version struct {
-	major, minor, patch int64
+	major, minor, patch uint64
 	pre                 string
 	metadata            string
 	original            string
 }
 
 func init() {
-	versionRegex = regexp.MustCompile("^" + SemVerRegex + "$")
-	validPrereleaseRegex = regexp.MustCompile(ValidPrerelease)
+	versionRegex = regexp.MustCompile("^" + semVerRegex + "$")
+}
+
+const (
+	num     string = "0123456789"
+	allowed string = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ-" + num
+)
+
+// StrictNewVersion parses a given version and returns an instance of Version or
+// an error if unable to parse the version. Only parses valid semantic versions.
+// Performs checking that can find errors within the version.
+// If you want to coerce a version such as 1 or 1.2 and parse it as the 1.x
+// releases of semver did, use the NewVersion() function.
+func StrictNewVersion(v string) (*Version, error) {
+	// Parsing here does not use RegEx in order to increase performance and reduce
+	// allocations.
+
+	if len(v) == 0 {
+		return nil, ErrEmptyString
+	}
+
+	// Split the parts into [0]major, [1]minor, and [2]patch,prerelease,build
+	parts := strings.SplitN(v, ".", 3)
+	if len(parts) != 3 {
+		return nil, ErrInvalidSemVer
+	}
+
+	sv := &Version{
+		original: v,
+	}
+
+	// check for prerelease or build metadata
+	var extra []string
+	if strings.ContainsAny(parts[2], "-+") {
+		// Start with the build metadata first as it needs to be on the right
+		extra = strings.SplitN(parts[2], "+", 2)
+		if len(extra) > 1 {
+			// build metadata found
+			sv.metadata = extra[1]
+			parts[2] = extra[0]
+		}
+
+		extra = strings.SplitN(parts[2], "-", 2)
+		if len(extra) > 1 {
+			// prerelease found
+			sv.pre = extra[1]
+			parts[2] = extra[0]
+		}
+	}
+
+	// Validate the number segments are valid. This includes only having positive
+	// numbers and no leading 0's.
+	for _, p := range parts {
+		if !containsOnly(p, num) {
+			return nil, ErrInvalidCharacters
+		}
+
+		if len(p) > 1 && p[0] == '0' {
+			return nil, ErrSegmentStartsZero
+		}
+	}
+
+	// Extract the major, minor, and patch elements onto the returned Version
+	var err error
+	sv.major, err = strconv.ParseUint(parts[0], 10, 64)
+	if err != nil {
+		return nil, err
+	}
+
+	sv.minor, err = strconv.ParseUint(parts[1], 10, 64)
+	if err != nil {
+		return nil, err
+	}
+
+	sv.patch, err = strconv.ParseUint(parts[2], 10, 64)
+	if err != nil {
+		return nil, err
+	}
+
+	// No prerelease or build metadata found so returning now as a fastpath.
+	if sv.pre == "" && sv.metadata == "" {
+		return sv, nil
+	}
+
+	if sv.pre != "" {
+		if err = validatePrerelease(sv.pre); err != nil {
+			return nil, err
+		}
+	}
+
+	if sv.metadata != "" {
+		if err = validateMetadata(sv.metadata); err != nil {
+			return nil, err
+		}
+	}
+
+	return sv, nil
 }
 
 // NewVersion parses a given version and returns an instance of Version or
-// an error if unable to parse the version.
+// an error if unable to parse the version. If the version is SemVer-ish it
+// attempts to convert it to SemVer. If you want  to validate it was a strict
+// semantic version at parse time see StrictNewVersion().
 func NewVersion(v string) (*Version, error) {
 	m := versionRegex.FindStringSubmatch(v)
 	if m == nil {
@@ -63,36 +167,65 @@ func NewVersion(v string) (*Version, error) {
 		original: v,
 	}
 
-	var temp int64
-	temp, err := strconv.ParseInt(m[1], 10, 64)
+	var err error
+	sv.major, err = strconv.ParseUint(m[1], 10, 64)
 	if err != nil {
 		return nil, fmt.Errorf("Error parsing version segment: %s", err)
 	}
-	sv.major = temp
 
 	if m[2] != "" {
-		temp, err = strconv.ParseInt(strings.TrimPrefix(m[2], "."), 10, 64)
+		sv.minor, err = strconv.ParseUint(strings.TrimPrefix(m[2], "."), 10, 64)
 		if err != nil {
 			return nil, fmt.Errorf("Error parsing version segment: %s", err)
 		}
-		sv.minor = temp
 	} else {
 		sv.minor = 0
 	}
 
 	if m[3] != "" {
-		temp, err = strconv.ParseInt(strings.TrimPrefix(m[3], "."), 10, 64)
+		sv.patch, err = strconv.ParseUint(strings.TrimPrefix(m[3], "."), 10, 64)
 		if err != nil {
 			return nil, fmt.Errorf("Error parsing version segment: %s", err)
 		}
-		sv.patch = temp
 	} else {
 		sv.patch = 0
 	}
 
+	// Perform some basic due diligence on the extra parts to ensure they are
+	// valid.
+
+	if sv.pre != "" {
+		if err = validatePrerelease(sv.pre); err != nil {
+			return nil, err
+		}
+	}
+
+	if sv.metadata != "" {
+		if err = validateMetadata(sv.metadata); err != nil {
+			return nil, err
+		}
+	}
+
 	return sv, nil
 }
 
+// New creates a new instance of Version with each of the parts passed in as
+// arguments instead of parsing a version string.
+func New(major, minor, patch uint64, pre, metadata string) *Version {
+	v := Version{
+		major:    major,
+		minor:    minor,
+		patch:    patch,
+		pre:      pre,
+		metadata: metadata,
+		original: "",
+	}
+
+	v.original = v.String()
+
+	return &v
+}
+
 // MustParse parses a given version and panics on error.
 func MustParse(v string) *Version {
 	sv, err := NewVersion(v)
@@ -107,7 +240,7 @@ func MustParse(v string) *Version {
 // See the Original() method to retrieve the original value. Semantic Versions
 // don't contain a leading v per the spec. Instead it's optional on
 // implementation.
-func (v *Version) String() string {
+func (v Version) String() string {
 	var buf bytes.Buffer
 
 	fmt.Fprintf(&buf, "%d.%d.%d", v.major, v.minor, v.patch)
@@ -127,33 +260,32 @@ func (v *Version) Original() string {
 }
 
 // Major returns the major version.
-func (v *Version) Major() int64 {
+func (v Version) Major() uint64 {
 	return v.major
 }
 
 // Minor returns the minor version.
-func (v *Version) Minor() int64 {
+func (v Version) Minor() uint64 {
 	return v.minor
 }
 
 // Patch returns the patch version.
-func (v *Version) Patch() int64 {
+func (v Version) Patch() uint64 {
 	return v.patch
 }
 
 // Prerelease returns the pre-release version.
-func (v *Version) Prerelease() string {
+func (v Version) Prerelease() string {
 	return v.pre
 }
 
 // Metadata returns the metadata on the version.
-func (v *Version) Metadata() string {
+func (v Version) Metadata() string {
 	return v.metadata
 }
 
 // originalVPrefix returns the original 'v' prefix if any.
-func (v *Version) originalVPrefix() string {
-
+func (v Version) originalVPrefix() string {
 	// Note, only lowercase v is supported as a prefix by the parser.
 	if v.original != "" && v.original[:1] == "v" {
 		return v.original[:1]
@@ -165,7 +297,7 @@ func (v *Version) originalVPrefix() string {
 // If the current version does not have prerelease/metadata information,
 // it unsets metadata and prerelease values, increments patch number.
 // If the current version has any of prerelease or metadata information,
-// it unsets both values and keeps curent patch value
+// it unsets both values and keeps current patch value
 func (v Version) IncPatch() Version {
 	vNext := v
 	// according to http://semver.org/#spec-item-9
@@ -217,11 +349,13 @@ func (v Version) IncMajor() Version {
 }
 
 // SetPrerelease defines the prerelease value.
-// Value must not include the required 'hypen' prefix.
+// Value must not include the required 'hyphen' prefix.
 func (v Version) SetPrerelease(prerelease string) (Version, error) {
 	vNext := v
-	if len(prerelease) > 0 && !validPrereleaseRegex.MatchString(prerelease) {
-		return vNext, ErrInvalidPrerelease
+	if len(prerelease) > 0 {
+		if err := validatePrerelease(prerelease); err != nil {
+			return vNext, err
+		}
 	}
 	vNext.pre = prerelease
 	vNext.original = v.originalVPrefix() + "" + vNext.String()
@@ -232,8 +366,10 @@ func (v Version) SetPrerelease(prerelease string) (Version, error) {
 // Value must not include the required 'plus' prefix.
 func (v Version) SetMetadata(metadata string) (Version, error) {
 	vNext := v
-	if len(metadata) > 0 && !validPrereleaseRegex.MatchString(metadata) {
-		return vNext, ErrInvalidMetadata
+	if len(metadata) > 0 {
+		if err := validateMetadata(metadata); err != nil {
+			return vNext, err
+		}
 	}
 	vNext.metadata = metadata
 	vNext.original = v.originalVPrefix() + "" + vNext.String()
@@ -261,7 +397,9 @@ func (v *Version) Equal(o *Version) bool {
 // the version smaller, equal, or larger than the other version.
 //
 // Versions are compared by X.Y.Z. Build metadata is ignored. Prerelease is
-// lower than the version without a prerelease.
+// lower than the version without a prerelease. Compare always takes into account
+// prereleases. If you want to work with ranges using typical range syntaxes that
+// skip prereleases if the range is not looking for them use constraints.
 func (v *Version) Compare(o *Version) int {
 	// Compare the major, minor, and patch version for differences. If a
 	// difference is found return the comparison.
@@ -308,16 +446,54 @@ func (v *Version) UnmarshalJSON(b []byte) error {
 	v.pre = temp.pre
 	v.metadata = temp.metadata
 	v.original = temp.original
-	temp = nil
 	return nil
 }
 
 // MarshalJSON implements JSON.Marshaler interface.
-func (v *Version) MarshalJSON() ([]byte, error) {
+func (v Version) MarshalJSON() ([]byte, error) {
 	return json.Marshal(v.String())
 }
 
-func compareSegment(v, o int64) int {
+// UnmarshalText implements the encoding.TextUnmarshaler interface.
+func (v *Version) UnmarshalText(text []byte) error {
+	temp, err := NewVersion(string(text))
+	if err != nil {
+		return err
+	}
+
+	*v = *temp
+
+	return nil
+}
+
+// MarshalText implements the encoding.TextMarshaler interface.
+func (v Version) MarshalText() ([]byte, error) {
+	return []byte(v.String()), nil
+}
+
+// Scan implements the SQL.Scanner interface.
+func (v *Version) Scan(value interface{}) error {
+	var s string
+	s, _ = value.(string)
+	temp, err := NewVersion(s)
+	if err != nil {
+		return err
+	}
+	v.major = temp.major
+	v.minor = temp.minor
+	v.patch = temp.patch
+	v.pre = temp.pre
+	v.metadata = temp.metadata
+	v.original = temp.original
+	return nil
+}
+
+// Value implements the Driver.Valuer interface.
+func (v Version) Value() (driver.Value, error) {
+	return v.String(), nil
+}
+
+func compareSegment(v, o uint64) int {
 	if v < o {
 		return -1
 	}
@@ -329,7 +505,6 @@ func compareSegment(v, o int64) int {
 }
 
 func comparePrerelease(v, o string) int {
-
 	// split the prelease versions by their part. The separator, per the spec,
 	// is a .
 	sparts := strings.Split(v, ".")
@@ -421,5 +596,44 @@ func comparePrePart(s, o string) int {
 		return 1
 	}
 	return -1
+}
+
+// Like strings.ContainsAny but does an only instead of any.
+func containsOnly(s string, comp string) bool {
+	return strings.IndexFunc(s, func(r rune) bool {
+		return !strings.ContainsRune(comp, r)
+	}) == -1
+}
+
+// From the spec, "Identifiers MUST comprise only
+// ASCII alphanumerics and hyphen [0-9A-Za-z-]. Identifiers MUST NOT be empty.
+// Numeric identifiers MUST NOT include leading zeroes.". These segments can
+// be dot separated.
+func validatePrerelease(p string) error {
+	eparts := strings.Split(p, ".")
+	for _, p := range eparts {
+		if containsOnly(p, num) {
+			if len(p) > 1 && p[0] == '0' {
+				return ErrSegmentStartsZero
+			}
+		} else if !containsOnly(p, allowed) {
+			return ErrInvalidPrerelease
+		}
+	}
 
+	return nil
+}
+
+// From the spec, "Build metadata MAY be denoted by
+// appending a plus sign and a series of dot separated identifiers immediately
+// following the patch or pre-release version. Identifiers MUST comprise only
+// ASCII alphanumerics and hyphen [0-9A-Za-z-]. Identifiers MUST NOT be empty."
+func validateMetadata(m string) error {
+	eparts := strings.Split(m, ".")
+	for _, p := range eparts {
+		if !containsOnly(p, allowed) {
+			return ErrInvalidMetadata
+		}
+	}
+	return nil
 }
diff --git a/vendor/github.com/Masterminds/semver/version_fuzz.go b/vendor/github.com/Masterminds/semver/version_fuzz.go
deleted file mode 100644
index b42bcd62b954d25d9d628a7a2d21d8a84989902c..0000000000000000000000000000000000000000
--- a/vendor/github.com/Masterminds/semver/version_fuzz.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// +build gofuzz
-
-package semver
-
-func Fuzz(data []byte) int {
-	if _, err := NewVersion(string(data)); err != nil {
-		return 0
-	}
-	return 1
-}
diff --git a/vendor/github.com/OpenPeeDeeP/depguard/v2/.gitignore b/vendor/github.com/OpenPeeDeeP/depguard/v2/.gitignore
index 97cca67c6785b0968bcc0b011d87c53387d1f6ab..e189bdb220458febebcb230346717cef5db8866e 100644
--- a/vendor/github.com/OpenPeeDeeP/depguard/v2/.gitignore
+++ b/vendor/github.com/OpenPeeDeeP/depguard/v2/.gitignore
@@ -12,3 +12,4 @@
 *.out
 
 .idea
+.null-ls*.go
diff --git a/vendor/github.com/OpenPeeDeeP/depguard/v2/README.md b/vendor/github.com/OpenPeeDeeP/depguard/v2/README.md
index 3de3f631774809f6577731a7cd0dfe3fac1e24f1..2ccfa22c5904a4a14e07eb445ef4f2b39685e1a4 100644
--- a/vendor/github.com/OpenPeeDeeP/depguard/v2/README.md
+++ b/vendor/github.com/OpenPeeDeeP/depguard/v2/README.md
@@ -7,13 +7,12 @@ allow specific packages within a repository.
 ## Install
 
 ```bash
-go get github.com/OpenPeeDeeP/depguard/v2
+go install github.com/OpenPeeDeeP/depguard@latest
 ```
 
 ## Config
 
-The Depguard binary looks for a file named `^\.?depguard\.(yaml|yml|json|toml)$` in the current
-current working directory. Examples include (`.depguard.yml` or `depguard.toml`).
+The Depguard binary looks for a file named `^\.?depguard\.(yaml|yml|json|toml)$` in the current working directory. Examples include (`.depguard.yml` or `depguard.toml`).
 
 The following is an example configuration file.
 
@@ -24,6 +23,7 @@ The following is an example configuration file.
       "$all",
       "!$test"
     ],
+    "listMode": "Strict",
     "allow": [
       "$gostd",
       "github.com/OpenPeeDeeP"
@@ -36,6 +36,7 @@ The following is an example configuration file.
     "files": [
       "$test"
     ],
+    "listMode": "Lax",
     "deny": {
       "github.com/stretchr/testify": "Please use standard library for tests"
     }
@@ -48,6 +49,7 @@ the linter's output.
 - `files` - list of file globs that will match this list of settings to compare against
 - `allow` - list of allowed packages
 - `deny` - map of packages that are not allowed where the value is a suggestion
+= `listMode` - the mode to use for package matching
 
 Files are matched using [Globs](https://github.com/gobwas/glob). If the files 
 list is empty, then all files will match that list. Prefixing a file
@@ -67,6 +69,21 @@ A Prefix List just means that a package will match a value, if the value is a
 prefix of the package. Example `github.com/OpenPeeDeeP/depguard` package will match
 a value of `github.com/OpenPeeDeeP` but won't match `github.com/OpenPeeDeeP/depguard/v2`.
 
+ListMode is used to determine the package matching priority. There are three
+different modes; Original, Strict, and Lax.
+
+Original is the original way that the package was written to use. It is not recommended
+to stay with this and is only here for backwards compatibility.
+
+Strict, at its roots, is everything is denied unless in allowed.
+
+Lax, at its roots, is everything is allowed unless it is denied.
+
+There are cases where a package can be matched in both the allow and denied lists.
+You may allow a subpackage but deny the root or vice versa. The `settings_tests.go` file
+has many scenarios listed out under `TestListImportAllowed`. These tests will stay
+up to date as features are added.
+
 ### Variables
 
 There are variable replacements for each type of list (file or package). This is
@@ -74,7 +91,7 @@ to reduce repetition and tedious behaviors.
 
 #### File Variables
 
-> you can still use and exclamation mark `!` in front of a variable to say not to 
+> you can still use an exclamation mark `!` in front of a variable to say not to 
 use it. Example `!$test` will match any file that is not a go test file.
 
 - `$all` - matches all go files
diff --git a/vendor/github.com/OpenPeeDeeP/depguard/v2/settings.go b/vendor/github.com/OpenPeeDeeP/depguard/v2/settings.go
index 440f329850a778082cae535b3234cbc0819c1457..311cacc88901341b42938a0ce25110da8ba07aa6 100644
--- a/vendor/github.com/OpenPeeDeeP/depguard/v2/settings.go
+++ b/vendor/github.com/OpenPeeDeeP/depguard/v2/settings.go
@@ -11,12 +11,22 @@ import (
 )
 
 type List struct {
-	Files []string          `json:"files" yaml:"files" toml:"files" mapstructure:"files"`
-	Allow []string          `json:"allow" yaml:"allow" toml:"allow" mapstructure:"allow"`
-	Deny  map[string]string `json:"deny" yaml:"deny" toml:"deny" mapstructure:"deny"`
+	ListMode string            `json:"listMode" yaml:"listMode" toml:"listMode" mapstructure:"listMode"`
+	Files    []string          `json:"files" yaml:"files" toml:"files" mapstructure:"files"`
+	Allow    []string          `json:"allow" yaml:"allow" toml:"allow" mapstructure:"allow"`
+	Deny     map[string]string `json:"deny" yaml:"deny" toml:"deny" mapstructure:"deny"`
 }
 
+type listMode int
+
+const (
+	listModeOriginal listMode = iota
+	listModeStrict
+	listModeLax
+)
+
 type list struct {
+	listMode    listMode
 	name        string
 	files       []glob.Glob
 	negFiles    []glob.Glob
@@ -33,6 +43,20 @@ func (l *List) compile() (*list, error) {
 	var errs utils.MultiError
 	var err error
 
+	// Determine List Mode
+	switch strings.ToLower(l.ListMode) {
+	case "":
+		li.listMode = listModeOriginal
+	case "original":
+		li.listMode = listModeOriginal
+	case "strict":
+		li.listMode = listModeStrict
+	case "lax":
+		li.listMode = listModeLax
+	default:
+		errs = append(errs, fmt.Errorf("%s is not a known list mode", l.ListMode))
+	}
+
 	// Compile Files
 	for _, f := range l.Files {
 		var negate bool
@@ -113,16 +137,25 @@ func (l *list) fileMatch(fileName string) bool {
 }
 
 func (l *list) importAllowed(imp string) (bool, string) {
-	inAllowed := len(l.allow) == 0
-	if !inAllowed {
-		inAllowed, _ = strInPrefixList(imp, l.allow)
+	inAllowed, aIdx := strInPrefixList(imp, l.allow)
+	inDenied, dIdx := strInPrefixList(imp, l.deny)
+	var allowed bool
+	switch l.listMode {
+	case listModeOriginal:
+		inAllowed = len(l.allow) == 0 || inAllowed
+		allowed = inAllowed && !inDenied
+	case listModeStrict:
+		allowed = inAllowed && (!inDenied || len(l.allow[aIdx]) > len(l.deny[dIdx]))
+	case listModeLax:
+		allowed = !inDenied || (inAllowed && len(l.allow[aIdx]) > len(l.deny[dIdx]))
+	default:
+		allowed = false
 	}
-	inDenied, suggIdx := strInPrefixList(imp, l.deny)
 	sugg := ""
-	if inDenied && suggIdx != -1 {
-		sugg = l.suggestions[suggIdx]
+	if !allowed && inDenied && dIdx != -1 {
+		sugg = l.suggestions[dIdx]
 	}
-	return inAllowed && !inDenied, sugg
+	return allowed, sugg
 }
 
 type LinterSettings map[string]*List
diff --git a/vendor/github.com/alecthomas/go-check-sumtype/.goreleaser.yml b/vendor/github.com/alecthomas/go-check-sumtype/.goreleaser.yml
new file mode 100644
index 0000000000000000000000000000000000000000..33bd03d060a7118e439c548ae9380a421be1d217
--- /dev/null
+++ b/vendor/github.com/alecthomas/go-check-sumtype/.goreleaser.yml
@@ -0,0 +1,32 @@
+project_name: go-check-sumtype
+release:
+  github:
+    owner: alecthomas
+    name: go-check-sumtype
+env:
+  - CGO_ENABLED=0
+builds:
+- goos:
+    - linux
+    - darwin
+    - windows
+  goarch:
+    - arm64
+    - amd64
+    - "386"
+  goarm:
+    - "6"
+  main: ./cmd/go-check-sumtype
+  binary: go-check-sumtype
+archives:
+  -
+    format: tar.gz
+    name_template: '{{ .Binary }}-{{ .Version }}-{{ .Os }}-{{ .Arch }}{{ if .Arm }}v{{
+    .Arm }}{{ end }}'
+    files:
+      - COPYING
+      - README*
+snapshot:
+  name_template: SNAPSHOT-{{ .Commit }}
+checksum:
+  name_template: '{{ .ProjectName }}-{{ .Version }}-checksums.txt'
diff --git a/vendor/github.com/alecthomas/go-check-sumtype/COPYING b/vendor/github.com/alecthomas/go-check-sumtype/COPYING
new file mode 100644
index 0000000000000000000000000000000000000000..bb9c20a094e41b7632d63bcff20c0b4b95e80777
--- /dev/null
+++ b/vendor/github.com/alecthomas/go-check-sumtype/COPYING
@@ -0,0 +1,3 @@
+This project is dual-licensed under the Unlicense and MIT licenses.
+
+You may use this code under the terms of either license.
diff --git a/vendor/github.com/alecthomas/go-check-sumtype/LICENSE-MIT b/vendor/github.com/alecthomas/go-check-sumtype/LICENSE-MIT
new file mode 100644
index 0000000000000000000000000000000000000000..3b0a5dc09c1e16357459ddc9182a50f360f3cdba
--- /dev/null
+++ b/vendor/github.com/alecthomas/go-check-sumtype/LICENSE-MIT
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2015 Andrew Gallant
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/vendor/github.com/alecthomas/go-check-sumtype/README.md b/vendor/github.com/alecthomas/go-check-sumtype/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..36614ef4006c805f2178db4a8461e30a3802ab10
--- /dev/null
+++ b/vendor/github.com/alecthomas/go-check-sumtype/README.md
@@ -0,0 +1,120 @@
+**Note: This is a fork of the great project [go-sumtype](https://github.com/BurntSushi/go-sumtype) by BurntSushi.**
+**The original seems largely unmaintained, and the changes in this fork are backwards incompatible.**
+
+# go-check-sumtype [![CI](https://github.com/alecthomas/go-check-sumtype/actions/workflows/ci.yml/badge.svg)](https://github.com/alecthomas/go-check-sumtype/actions/workflows/ci.yml)
+A simple utility for running exhaustiveness checks on type switch statements.
+Exhaustiveness checks are only run on interfaces that are declared to be
+"sum types."
+
+Dual-licensed under MIT or the [UNLICENSE](http://unlicense.org).
+
+This work was inspired by our code at
+[Diffeo](https://diffeo.com).
+
+## Installation
+
+```go
+$ go get github.com/alecthomas/go-check-sumtype
+```
+
+For usage info, just run the command:
+
+```
+$ go-check-sumtype
+```
+
+Typical usage might look like this:
+
+```
+$ go-check-sumtype $(go list ./... | grep -v vendor)
+```
+
+## Usage
+
+`go-check-sumtype` takes a list of Go package paths or files and looks for sum type
+declarations in each package/file provided. Exhaustiveness checks are then
+performed for each use of a declared sum type in a type switch statement.
+Namely, `go-check-sumtype` will report an error for any type switch statement that
+either lacks a `default` clause or does not account for all possible variants.
+
+Declarations are provided in comments like so:
+
+```
+//sumtype:decl
+type MySumType interface { ... }
+```
+
+`MySumType` must be *sealed*. That is, part of its interface definition
+contains an unexported method.
+
+`go-check-sumtype` will produce an error if any of the above is not true.
+
+For valid declarations, `go-check-sumtype` will look for all occurrences in which a
+value of type `MySumType` participates in a type switch statement. In those
+occurrences, it will attempt to detect whether the type switch is exhaustive
+or not. If it's not, `go-check-sumtype` will report an error. For example, running
+`go-check-sumtype` on this source file:
+
+```go
+package main
+
+//sumtype:decl
+type MySumType interface {
+        sealed()
+}
+
+type VariantA struct{}
+
+func (*VariantA) sealed() {}
+
+type VariantB struct{}
+
+func (*VariantB) sealed() {}
+
+func main() {
+        switch MySumType(nil).(type) {
+        case *VariantA:
+        }
+}
+```
+
+produces the following:
+
+```
+$ sumtype mysumtype.go
+mysumtype.go:18:2: exhaustiveness check failed for sum type 'MySumType': missing cases for VariantB
+```
+
+Adding either a `default` clause or a clause to handle `*VariantB` will cause
+exhaustive checks to pass.
+
+As a special case, if the type switch statement contains a `default` clause
+that always panics, then exhaustiveness checks are still performed.
+
+## Details and motivation
+
+Sum types are otherwise known as discriminated unions. That is, a sum type is
+a finite set of disjoint values. In type systems that support sum types, the
+language will guarantee that if one has a sum type `T`, then its value must
+be one of its variants.
+
+Go's type system does not support sum types. A typical proxy for representing
+sum types in Go is to use an interface with an unexported method and define
+each variant of the sum type in the same package to satisfy said interface.
+This guarantees that the set of types that satisfy the interface is closed
+at compile time. Performing case analysis on these types is then done with
+a type switch statement, e.g., `switch x.(type) { ... }`. Each clause of the
+type switch corresponds to a *variant* of the sum type. The downside of this
+approach is that Go's type system is not aware of the set of variants, so it
+cannot tell you whether case analysis over a sum type is complete or not.
+
+The `go-check-sumtype` command recognizes this pattern, but it needs a small amount
+of help to recognize which interfaces should be treated as sum types, which
+is why the `//sumtype:decl` annotation is required. `go-check-sumtype` will
+figure out all of the variants of a sum type by finding the set of types
+defined in the same package that satisfy the interface specified by the
+declaration.
+
+The `go-check-sumtype` command will prove its worth when you need to add a variant
+to an existing sum type. Running `go-check-sumtype` will tell you immediately which
+case analyses need to be updated to account for the new variant.
diff --git a/vendor/github.com/alecthomas/go-check-sumtype/UNLICENSE b/vendor/github.com/alecthomas/go-check-sumtype/UNLICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..68a49daad8ff7e35068f2b7a97d643aab440eaec
--- /dev/null
+++ b/vendor/github.com/alecthomas/go-check-sumtype/UNLICENSE
@@ -0,0 +1,24 @@
+This is free and unencumbered software released into the public domain.
+
+Anyone is free to copy, modify, publish, use, compile, sell, or
+distribute this software, either in source code form or as a compiled
+binary, for any purpose, commercial or non-commercial, and by any
+means.
+
+In jurisdictions that recognize copyright laws, the author or authors
+of this software dedicate any and all copyright interest in the
+software to the public domain. We make this dedication for the benefit
+of the public at large and to the detriment of our heirs and
+successors. We intend this dedication to be an overt act of
+relinquishment in perpetuity of all present and future rights to this
+software under copyright law.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
+
+For more information, please refer to <http://unlicense.org/>
diff --git a/vendor/github.com/alecthomas/go-check-sumtype/check.go b/vendor/github.com/alecthomas/go-check-sumtype/check.go
new file mode 100644
index 0000000000000000000000000000000000000000..21d751af42d1519b395eee9b946f37ff39f1c94f
--- /dev/null
+++ b/vendor/github.com/alecthomas/go-check-sumtype/check.go
@@ -0,0 +1,184 @@
+package gochecksumtype
+
+import (
+	"fmt"
+	"go/ast"
+	"go/token"
+	"go/types"
+	"sort"
+	"strings"
+
+	"golang.org/x/tools/go/packages"
+)
+
+// inexhaustiveError is returned from check for each occurrence of inexhaustive
+// case analysis in a Go type switch statement.
+type inexhaustiveError struct {
+	Position token.Position
+	Def      sumTypeDef
+	Missing  []types.Object
+}
+
+func (e inexhaustiveError) Pos() token.Position { return e.Position }
+func (e inexhaustiveError) Error() string {
+	return fmt.Sprintf(
+		"%s: exhaustiveness check failed for sum type %q (from %s): missing cases for %s",
+		e.Pos(), e.Def.Decl.TypeName, e.Def.Decl.Pos, strings.Join(e.Names(), ", "))
+}
+
+// Names returns a sorted list of names corresponding to the missing variant
+// cases.
+func (e inexhaustiveError) Names() []string {
+	var list []string
+	for _, o := range e.Missing {
+		list = append(list, o.Name())
+	}
+	sort.Strings(list)
+	return list
+}
+
+// check does exhaustiveness checking for the given sum type definitions in the
+// given package. Every instance of inexhaustive case analysis is returned.
+func check(pkg *packages.Package, defs []sumTypeDef) []error {
+	var errs []error
+	for _, astfile := range pkg.Syntax {
+		ast.Inspect(astfile, func(n ast.Node) bool {
+			swtch, ok := n.(*ast.TypeSwitchStmt)
+			if !ok {
+				return true
+			}
+			if err := checkSwitch(pkg, defs, swtch); err != nil {
+				errs = append(errs, err)
+			}
+			return true
+		})
+	}
+	return errs
+}
+
+// checkSwitch performs an exhaustiveness check on the given type switch
+// statement. If the type switch is used on a sum type and does not cover
+// all variants of that sum type, then an error is returned indicating which
+// variants were missed.
+//
+// Note that if the type switch contains a non-panicing default case, then
+// exhaustiveness checks are disabled.
+func checkSwitch(
+	pkg *packages.Package,
+	defs []sumTypeDef,
+	swtch *ast.TypeSwitchStmt,
+) error {
+	def, missing := missingVariantsInSwitch(pkg, defs, swtch)
+	if len(missing) > 0 {
+		return inexhaustiveError{
+			Position: pkg.Fset.Position(swtch.Pos()),
+			Def:      *def,
+			Missing:  missing,
+		}
+	}
+	return nil
+}
+
+// missingVariantsInSwitch returns a list of missing variants corresponding to
+// the given switch statement. The corresponding sum type definition is also
+// returned. (If no sum type definition could be found, then no exhaustiveness
+// checks are performed, and therefore, no missing variants are returned.)
+func missingVariantsInSwitch(
+	pkg *packages.Package,
+	defs []sumTypeDef,
+	swtch *ast.TypeSwitchStmt,
+) (*sumTypeDef, []types.Object) {
+	asserted := findTypeAssertExpr(swtch)
+	ty := pkg.TypesInfo.TypeOf(asserted)
+	def := findDef(defs, ty)
+	if def == nil {
+		// We couldn't find a corresponding sum type, so there's
+		// nothing we can do to check it.
+		return nil, nil
+	}
+	variantExprs, hasDefault := switchVariants(swtch)
+	if hasDefault && !defaultClauseAlwaysPanics(swtch) {
+		// A catch-all case defeats all exhaustiveness checks.
+		return def, nil
+	}
+	var variantTypes []types.Type
+	for _, expr := range variantExprs {
+		variantTypes = append(variantTypes, pkg.TypesInfo.TypeOf(expr))
+	}
+	return def, def.missing(variantTypes)
+}
+
+// switchVariants returns all case expressions found in a type switch. This
+// includes expressions from cases that have a list of expressions.
+func switchVariants(swtch *ast.TypeSwitchStmt) (exprs []ast.Expr, hasDefault bool) {
+	for _, stmt := range swtch.Body.List {
+		clause := stmt.(*ast.CaseClause)
+		if clause.List == nil {
+			hasDefault = true
+		} else {
+			exprs = append(exprs, clause.List...)
+		}
+	}
+	return
+}
+
+// defaultClauseAlwaysPanics returns true if the given switch statement has a
+// default clause that always panics. Note that this is done on a best-effort
+// basis. While there will never be any false positives, there may be false
+// negatives.
+//
+// If the given switch statement has no default clause, then this function
+// panics.
+func defaultClauseAlwaysPanics(swtch *ast.TypeSwitchStmt) bool {
+	var clause *ast.CaseClause
+	for _, stmt := range swtch.Body.List {
+		c := stmt.(*ast.CaseClause)
+		if c.List == nil {
+			clause = c
+			break
+		}
+	}
+	if clause == nil {
+		panic("switch statement has no default clause")
+	}
+	if len(clause.Body) != 1 {
+		return false
+	}
+	exprStmt, ok := clause.Body[0].(*ast.ExprStmt)
+	if !ok {
+		return false
+	}
+	callExpr, ok := exprStmt.X.(*ast.CallExpr)
+	if !ok {
+		return false
+	}
+	fun, ok := callExpr.Fun.(*ast.Ident)
+	if !ok {
+		return false
+	}
+	return fun.Name == "panic"
+}
+
+// findTypeAssertExpr extracts the expression that is being type asserted from a
+// type swtich statement.
+func findTypeAssertExpr(swtch *ast.TypeSwitchStmt) ast.Expr {
+	var expr ast.Expr
+	if assign, ok := swtch.Assign.(*ast.AssignStmt); ok {
+		expr = assign.Rhs[0]
+	} else {
+		expr = swtch.Assign.(*ast.ExprStmt).X
+	}
+	return expr.(*ast.TypeAssertExpr).X
+}
+
+// findDef returns the sum type definition corresponding to the given type. If
+// no such sum type definition exists, then nil is returned.
+func findDef(defs []sumTypeDef, needle types.Type) *sumTypeDef {
+	for i := range defs {
+		def := &defs[i]
+		if types.Identical(needle.Underlying(), def.Ty) {
+			return def
+		}
+	}
+	return nil
+}
diff --git a/vendor/github.com/alecthomas/go-check-sumtype/decl.go b/vendor/github.com/alecthomas/go-check-sumtype/decl.go
new file mode 100644
index 0000000000000000000000000000000000000000..9dec9eefd5716c700b717931371d891ccd02fb88
--- /dev/null
+++ b/vendor/github.com/alecthomas/go-check-sumtype/decl.go
@@ -0,0 +1,69 @@
+package gochecksumtype
+
+import (
+	"go/ast"
+	"go/token"
+	"strings"
+
+	"golang.org/x/tools/go/packages"
+)
+
+// sumTypeDecl is a declaration of a sum type in a Go source file.
+type sumTypeDecl struct {
+	// The package path that contains this decl.
+	Package *packages.Package
+	// The type named by this decl.
+	TypeName string
+	// Position where the declaration was found.
+	Pos token.Position
+}
+
+// Location returns a short string describing where this declaration was found.
+func (d sumTypeDecl) Location() string {
+	return d.Pos.String()
+}
+
+// findSumTypeDecls searches every package given for sum type declarations of
+// the form `sumtype:decl`.
+func findSumTypeDecls(pkgs []*packages.Package) ([]sumTypeDecl, error) {
+	var decls []sumTypeDecl
+	var retErr error
+	for _, pkg := range pkgs {
+		for _, file := range pkg.Syntax {
+			ast.Inspect(file, func(node ast.Node) bool {
+				if node == nil {
+					return true
+				}
+				decl, ok := node.(*ast.GenDecl)
+				if !ok || decl.Doc == nil {
+					return true
+				}
+				var tspec *ast.TypeSpec
+				for _, spec := range decl.Specs {
+					ts, ok := spec.(*ast.TypeSpec)
+					if !ok {
+						continue
+					}
+					tspec = ts
+				}
+				for _, line := range decl.Doc.List {
+					if !strings.HasPrefix(line.Text, "//sumtype:decl") {
+						continue
+					}
+					pos := pkg.Fset.Position(decl.Pos())
+					if tspec == nil {
+						retErr = notFoundError{Decl: sumTypeDecl{Package: pkg, Pos: pos}}
+						return false
+					}
+					pos = pkg.Fset.Position(tspec.Pos())
+					decl := sumTypeDecl{Package: pkg, TypeName: tspec.Name.Name, Pos: pos}
+					debugf("found sum type decl: %s.%s", decl.Package.PkgPath, decl.TypeName)
+					decls = append(decls, decl)
+					break
+				}
+				return true
+			})
+		}
+	}
+	return decls, retErr
+}
diff --git a/vendor/github.com/alecthomas/go-check-sumtype/def.go b/vendor/github.com/alecthomas/go-check-sumtype/def.go
new file mode 100644
index 0000000000000000000000000000000000000000..24729ac01bf66902389b46714762aa62dc55561a
--- /dev/null
+++ b/vendor/github.com/alecthomas/go-check-sumtype/def.go
@@ -0,0 +1,173 @@
+package gochecksumtype
+
+import (
+	"flag"
+	"fmt"
+	"go/token"
+	"go/types"
+	"log"
+)
+
+var debug = flag.Bool("debug", false, "enable debug logging")
+
+func debugf(format string, args ...interface{}) {
+	if *debug {
+		log.Printf(format, args...)
+	}
+}
+
+// Error as returned by Run()
+type Error interface {
+	error
+	Pos() token.Position
+}
+
+// unsealedError corresponds to a declared sum type whose interface is not
+// sealed. A sealed interface requires at least one unexported method.
+type unsealedError struct {
+	Decl sumTypeDecl
+}
+
+func (e unsealedError) Pos() token.Position { return e.Decl.Pos }
+func (e unsealedError) Error() string {
+	return fmt.Sprintf(
+		"%s: interface '%s' is not sealed "+
+			"(sealing requires at least one unexported method)",
+		e.Decl.Location(), e.Decl.TypeName)
+}
+
+// notFoundError corresponds to a declared sum type whose type definition
+// could not be found in the same Go package.
+type notFoundError struct {
+	Decl sumTypeDecl
+}
+
+func (e notFoundError) Pos() token.Position { return e.Decl.Pos }
+func (e notFoundError) Error() string {
+	return fmt.Sprintf("%s: type '%s' is not defined", e.Decl.Location(), e.Decl.TypeName)
+}
+
+// notInterfaceError corresponds to a declared sum type that does not
+// correspond to an interface.
+type notInterfaceError struct {
+	Decl sumTypeDecl
+}
+
+func (e notInterfaceError) Pos() token.Position { return e.Decl.Pos }
+func (e notInterfaceError) Error() string {
+	return fmt.Sprintf("%s: type '%s' is not an interface", e.Decl.Location(), e.Decl.TypeName)
+}
+
+// sumTypeDef corresponds to the definition of a Go interface that is
+// interpreted as a sum type. Its variants are determined by finding all types
+// that implement said interface in the same package.
+type sumTypeDef struct {
+	Decl     sumTypeDecl
+	Ty       *types.Interface
+	Variants []types.Object
+}
+
+// findSumTypeDefs attempts to find a Go type definition for each of the given
+// sum type declarations. If no such sum type definition could be found for
+// any of the given declarations, then an error is returned.
+func findSumTypeDefs(decls []sumTypeDecl) ([]sumTypeDef, []error) {
+	var defs []sumTypeDef
+	var errs []error
+	for _, decl := range decls {
+		def, err := newSumTypeDef(decl.Package.Types, decl)
+		if err != nil {
+			errs = append(errs, err)
+			continue
+		}
+		if def == nil {
+			errs = append(errs, notFoundError{decl})
+			continue
+		}
+		defs = append(defs, *def)
+	}
+	return defs, errs
+}
+
+// newSumTypeDef attempts to extract a sum type definition from a single
+// package. If no such type corresponds to the given decl, then this function
+// returns a nil def and a nil error.
+//
+// If the decl corresponds to a type that isn't an interface containing at
+// least one unexported method, then this returns an error.
+func newSumTypeDef(pkg *types.Package, decl sumTypeDecl) (*sumTypeDef, error) {
+	obj := pkg.Scope().Lookup(decl.TypeName)
+	if obj == nil {
+		return nil, nil
+	}
+	iface, ok := obj.Type().Underlying().(*types.Interface)
+	if !ok {
+		return nil, notInterfaceError{decl}
+	}
+	hasUnexported := false
+	for i := 0; i < iface.NumMethods(); i++ {
+		if !iface.Method(i).Exported() {
+			hasUnexported = true
+			break
+		}
+	}
+	if !hasUnexported {
+		return nil, unsealedError{decl}
+	}
+	def := &sumTypeDef{
+		Decl: decl,
+		Ty:   iface,
+	}
+	debugf("searching for variants of %s.%s\n", pkg.Path(), decl.TypeName)
+	for _, name := range pkg.Scope().Names() {
+		obj, ok := pkg.Scope().Lookup(name).(*types.TypeName)
+		if !ok {
+			continue
+		}
+		ty := obj.Type()
+		if types.Identical(ty.Underlying(), iface) {
+			continue
+		}
+		// Skip generic types.
+		if named, ok := ty.(*types.Named); ok && named.TypeParams() != nil {
+			continue
+		}
+		if types.Implements(ty, iface) || types.Implements(types.NewPointer(ty), iface) {
+			debugf("  found variant: %s.%s\n", pkg.Path(), obj.Name())
+			def.Variants = append(def.Variants, obj)
+		}
+	}
+	return def, nil
+}
+
+func (def *sumTypeDef) String() string {
+	return def.Decl.TypeName
+}
+
+// missing returns a list of variants in this sum type that are not in the
+// given list of types.
+func (def *sumTypeDef) missing(tys []types.Type) []types.Object {
+	// TODO(ag): This is O(n^2). Fix that. /shrug
+	var missing []types.Object
+	for _, v := range def.Variants {
+		found := false
+		varty := indirect(v.Type())
+		for _, ty := range tys {
+			ty = indirect(ty)
+			if types.Identical(varty, ty) {
+				found = true
+			}
+		}
+		if !found {
+			missing = append(missing, v)
+		}
+	}
+	return missing
+}
+
+// indirect dereferences through an arbitrary number of pointer types.
+func indirect(ty types.Type) types.Type {
+	if ty, ok := ty.(*types.Pointer); ok {
+		return indirect(ty.Elem())
+	}
+	return ty
+}
diff --git a/vendor/github.com/alecthomas/go-check-sumtype/doc.go b/vendor/github.com/alecthomas/go-check-sumtype/doc.go
new file mode 100644
index 0000000000000000000000000000000000000000..2b6e86764edc20b69faf2cf980500269970fb576
--- /dev/null
+++ b/vendor/github.com/alecthomas/go-check-sumtype/doc.go
@@ -0,0 +1,53 @@
+/*
+sumtype takes a list of Go package paths or files and looks for sum type
+declarations in each package/file provided. Exhaustiveness checks are then
+performed for each use of a declared sum type in a type switch statement.
+Namely, sumtype will report an error for any type switch statement that
+either lacks a default clause or does not account for all possible variants.
+
+Declarations are provided in comments like so:
+
+	//sumtype:decl
+	type MySumType interface { ... }
+
+MySumType must be *sealed*. That is, part of its interface definition contains
+an unexported method.
+
+sumtype will produce an error if any of the above is not true.
+
+For valid declarations, sumtype will look for all occurrences in which a
+value of type MySumType participates in a type switch statement. In those
+occurrences, it will attempt to detect whether the type switch is exhaustive
+or not. If it's not, sumtype will report an error. For example:
+
+	$ cat mysumtype.go
+	package gochecksumtype
+
+	//sumtype:decl
+	type MySumType interface {
+		sealed()
+	}
+
+	type VariantA struct{}
+
+	func (a *VariantA) sealed() {}
+
+	type VariantB struct{}
+
+	func (b *VariantB) sealed() {}
+
+	func main() {
+		switch MySumType(nil).(type) {
+		case *VariantA:
+		}
+	}
+	$ sumtype mysumtype.go
+	mysumtype.go:18:2: exhaustiveness check failed for sum type 'MySumType': missing cases for VariantB
+
+Adding either a default clause or a clause to handle *VariantB will cause
+exhaustive checks to pass.
+
+As a special case, if the type switch statement contains a default clause
+that always panics, then exhaustiveness checks are still performed.
+*/
+package gochecksumtype
diff --git a/vendor/github.com/alecthomas/go-check-sumtype/run.go b/vendor/github.com/alecthomas/go-check-sumtype/run.go
new file mode 100644
index 0000000000000000000000000000000000000000..fdcb643c5dc094a13296f3ab5963ae57d442b7f5
--- /dev/null
+++ b/vendor/github.com/alecthomas/go-check-sumtype/run.go
@@ -0,0 +1,26 @@
+package gochecksumtype
+
+import "golang.org/x/tools/go/packages"
+
+// Run sumtype checking on the given packages.
+func Run(pkgs []*packages.Package) []error {
+	var errs []error
+
+	decls, err := findSumTypeDecls(pkgs)
+	if err != nil {
+		return []error{err}
+	}
+
+	defs, defErrs := findSumTypeDefs(decls)
+	errs = append(errs, defErrs...)
+	if len(defs) == 0 {
+		return errs
+	}
+
+	for _, pkg := range pkgs {
+		if pkgErrs := check(pkg, defs); pkgErrs != nil {
+			errs = append(errs, pkgErrs...)
+		}
+	}
+	return errs
+}
diff --git a/vendor/github.com/bombsimon/wsl/v3/.gitignore b/vendor/github.com/bombsimon/wsl/v4/.gitignore
similarity index 100%
rename from vendor/github.com/bombsimon/wsl/v3/.gitignore
rename to vendor/github.com/bombsimon/wsl/v4/.gitignore
diff --git a/vendor/github.com/bombsimon/wsl/v3/.golangci.yml b/vendor/github.com/bombsimon/wsl/v4/.golangci.yml
similarity index 96%
rename from vendor/github.com/bombsimon/wsl/v3/.golangci.yml
rename to vendor/github.com/bombsimon/wsl/v4/.golangci.yml
index 336ad4bc86388a04ea73f58bda3dae0991bb6792..543012008fdd012f1c2fa1fbe1b807a0f0b12b75 100644
--- a/vendor/github.com/bombsimon/wsl/v3/.golangci.yml
+++ b/vendor/github.com/bombsimon/wsl/v4/.golangci.yml
@@ -39,6 +39,7 @@ linters:
   disable:
     - cyclop
     - deadcode
+    - depguard
     - dupl
     - dupword
     - exhaustivestruct
@@ -62,11 +63,13 @@ linters:
     - nosnakecase
     - paralleltest
     - prealloc
+    - rowserrcheck
     - scopelint
     - structcheck
     - testpackage
     - varcheck
     - varnamelen
+    - wastedassign
   fast: false
 
 
diff --git a/vendor/github.com/bombsimon/wsl/v3/LICENSE b/vendor/github.com/bombsimon/wsl/v4/LICENSE
similarity index 100%
rename from vendor/github.com/bombsimon/wsl/v3/LICENSE
rename to vendor/github.com/bombsimon/wsl/v4/LICENSE
diff --git a/vendor/github.com/bombsimon/wsl/v3/README.md b/vendor/github.com/bombsimon/wsl/v4/README.md
similarity index 69%
rename from vendor/github.com/bombsimon/wsl/v3/README.md
rename to vendor/github.com/bombsimon/wsl/v4/README.md
index 8ff74392bd582bc7770f7ba6ba7516d58484d7af..0bcf01d96a3c1226828290e006c13dfc1c0739aa 100644
--- a/vendor/github.com/bombsimon/wsl/v3/README.md
+++ b/vendor/github.com/bombsimon/wsl/v4/README.md
@@ -1,4 +1,4 @@
-# WSL - Whitespace Linter
+# wsl - Whitespace Linter
 
 [![forthebadge](https://forthebadge.com/images/badges/made-with-go.svg)](https://forthebadge.com)
 [![forthebadge](https://forthebadge.com/images/badges/built-with-love.svg)](https://forthebadge.com)
@@ -6,83 +6,58 @@
 [![GitHub Actions](https://github.com/bombsimon/wsl/actions/workflows/go.yml/badge.svg)](https://github.com/bombsimon/wsl/actions/workflows/go.yml)
 [![Coverage Status](https://coveralls.io/repos/github/bombsimon/wsl/badge.svg?branch=master)](https://coveralls.io/github/bombsimon/wsl?branch=master)
 
-WSL is a linter that enforces a very **non scientific** vision of how to make
+`wsl` is a linter that enforces a very **non scientific** vision of how to make
 code more readable by enforcing empty lines at the right places.
 
-I think too much code out there is to cuddly and a bit too warm for it's own
-good, making it harder for other people to read and understand. The linter will
-warn about newlines in and around blocks, in the beginning of files and other
-places in the code.
-
-**I know this linter is aggressive** and a lot of projects I've tested it on
-have failed miserably. For this linter to be useful at all I want to be open to
-new ideas, configurations and discussions! Also note that some of the warnings
-might be bugs or unintentional false positives so I would love an
+**This linter is aggressive** and a lot of projects I've tested it on have
+failed miserably. For this linter to be useful at all I want to be open to new
+ideas, configurations and discussions! Also note that some of the warnings might
+be bugs or unintentional false positives so I would love an
 [issue](https://github.com/bombsimon/wsl/issues/new) to fix, discuss, change or
 make something configurable!
 
 ## Installation
 
-### By `go get` (local installation)
-
-You can do that by using:
-
 ```sh
-go get -u github.com/bombsimon/wsl/v3/cmd/...
-```
-
-### By golangci-lint (CI automation)
+# Latest release
+go install github.com/bombsimon/wsl/v4/cmd/wsl
 
-`wsl` is already integrated with
-[golangci-lint](https://github.com/golangci/golangci-lint). Please refer to the
-instructions there.
+# Main branch
+go install github.com/bombsimon/wsl/v4/cmd/wsl@master
+```
 
 ## Usage
 
-How to use depends on how you install `wsl`.
+> **Note**: This linter provides a fixer that can fix most issues with the
+> `--fix` flag. However, currently `golangci-lint` [does not support suggested
+> fixes](https://github.com/golangci/golangci-lint/issues/1779) so the `--fix`
+> flag in `golangci-lint` will **not** work.
 
-### With local binary
-
-The general command format for `wsl` is:
+`wsl` uses the [analysis](https://pkg.go.dev/golang.org/x/tools/go/analysis)
+package meaning it will operate on package level with the default analysis flags
+and way of working.
 
 ```sh
-$ wsl [flags] <file1> [files...]
-$ wsl [flags] </path/to/package/...>
-
-# Examples
+wsl --help
+wsl [flags] </path/to/package/...>
 
-$ wsl ./main.go
-$ wsl --no-test ./main.go
-$ wsl --allow-cuddle-declarations ./main.go
-$ wsl --no-test --allow-cuddle-declaration ./main.go
-$ wsl --no-test --allow-trailing-comment ./myProject/...
+wsl --allow-cuddle-declarations --fix ./...
 ```
 
-The "..." wildcard is not used like other `go` commands but instead can only
-be to a relative or absolute path.
-
-By default, the linter will run on `./...` which means all go files in the
-current path and all subsequent paths, including test files. To disable linting
-test files, use `-n` or `--no-test`.
-
-### By `golangci-lint` (CI automation)
-
-The recommended command is:
+`wsl` is also integrated in [`golangci-lint`](https://golangci-lint.run)
 
 ```sh
-golangci-lint run --disable-all --enable wsl
+golangci-lint run --no-config --disable-all --enable wsl
 ```
 
-For more information, please refer to
-[golangci-lint](https://github.com/golangci/golangci-lint)'s documentation.
-
 ## Issues and configuration
 
 The linter suppers a few ways to configure it to satisfy more than one kind of
 code style. These settings could be set either with flags or with YAML
 configuration if used via `golangci-lint`.
 
-The supported configuration can be found [in the documentation](doc/configuration.md).
+The supported configuration can be found [in the
+documentation](doc/configuration.md).
 
 Below are the available checklist for any hit from `wsl`. If you do not see any,
 feel free to raise an [issue](https://github.com/bombsimon/wsl/issues/new).
@@ -106,14 +81,11 @@ feel free to raise an [issue](https://github.com/bombsimon/wsl/issues/new).
 * [For statements should only be cuddled with assignments used in the iteration](doc/rules.md#for-statements-should-only-be-cuddled-with-assignments-used-in-the-iteration)
 * [Go statements can only invoke functions assigned on line above](doc/rules.md#go-statements-can-only-invoke-functions-assigned-on-line-above)
 * [If statements should only be cuddled with assignments](doc/rules.md#if-statements-should-only-be-cuddled-with-assignments)
-* [If statements should only be cuddled with assignments used in the if
-  statement
-  itself](doc/rules.md#if-statements-should-only-be-cuddled-with-assignments-used-in-the-if-statement-itself)
+* [If statements should only be cuddled with assignments used in the if statement itself](doc/rules.md#if-statements-should-only-be-cuddled-with-assignments-used-in-the-if-statement-itself)
 * [If statements that check an error must be cuddled with the statement that assigned the error](doc/rules.md#if-statements-that-check-an-error-must-be-cuddled-with-the-statement-that-assigned-the-error)
-* [Only cuddled expressions if assigning variable or using from line
-  above](doc/rules.md#only-cuddled-expressions-if-assigning-variable-or-using-from-line-above)
+* [Only cuddled expressions if assigning variable or using from line above](doc/rules.md#only-cuddled-expressions-if-assigning-variable-or-using-from-line-above)
 * [Only one cuddle assignment allowed before defer statement](doc/rules.md#only-one-cuddle-assignment-allowed-before-defer-statement)
-* [Only one cuddle assginment allowed before for statement](doc/rules.md#only-one-cuddle-assignment-allowed-before-for-statement)
+* [Only one cuddle assignment allowed before for statement](doc/rules.md#only-one-cuddle-assignment-allowed-before-for-statement)
 * [Only one cuddle assignment allowed before go statement](doc/rules.md#only-one-cuddle-assignment-allowed-before-go-statement)
 * [Only one cuddle assignment allowed before if statement](doc/rules.md#only-one-cuddle-assignment-allowed-before-if-statement)
 * [Only one cuddle assignment allowed before range statement](doc/rules.md#only-one-cuddle-assignment-allowed-before-range-statement)
diff --git a/vendor/github.com/bombsimon/wsl/v4/analyzer.go b/vendor/github.com/bombsimon/wsl/v4/analyzer.go
new file mode 100644
index 0000000000000000000000000000000000000000..b8eac15875d0504d506154ce52cbddd3ae70c88d
--- /dev/null
+++ b/vendor/github.com/bombsimon/wsl/v4/analyzer.go
@@ -0,0 +1,141 @@
+package wsl
+
+import (
+	"flag"
+	"strings"
+
+	"golang.org/x/tools/go/analysis"
+)
+
+func NewAnalyzer(config *Configuration) *analysis.Analyzer {
+	wa := &wslAnalyzer{config: config}
+
+	return &analysis.Analyzer{
+		Name:             "wsl",
+		Doc:              "add or remove empty lines",
+		Flags:            wa.flags(),
+		Run:              wa.run,
+		RunDespiteErrors: true,
+	}
+}
+
+func defaultConfig() *Configuration {
+	return &Configuration{
+		AllowAssignAndAnythingCuddle:     false,
+		AllowAssignAndCallCuddle:         true,
+		AllowCuddleDeclaration:           false,
+		AllowMultiLineAssignCuddle:       true,
+		AllowSeparatedLeadingComment:     false,
+		AllowTrailingComment:             false,
+		ForceCuddleErrCheckAndAssign:     false,
+		ForceExclusiveShortDeclarations:  false,
+		StrictAppend:                     true,
+		AllowCuddleWithCalls:             []string{"Lock", "RLock"},
+		AllowCuddleWithRHS:               []string{"Unlock", "RUnlock"},
+		ErrorVariableNames:               []string{"err"},
+		ForceCaseTrailingWhitespaceLimit: 0,
+	}
+}
+
+// wslAnalyzer is a wrapper around the configuration which is used to be able to
+// set the configuration when creating the analyzer and later be able to update
+// flags and running method.
+type wslAnalyzer struct {
+	config *Configuration
+}
+
+func (wa *wslAnalyzer) flags() flag.FlagSet {
+	flags := flag.NewFlagSet("", flag.ExitOnError)
+
+	// If we have a configuration set we're not running from the command line so
+	// we don't use any flags.
+	if wa.config != nil {
+		return *flags
+	}
+
+	wa.config = defaultConfig()
+
+	flags.BoolVar(&wa.config.AllowAssignAndAnythingCuddle, "allow-assign-and-anything", false, "Allow assignments and anything to be cuddled")
+	flags.BoolVar(&wa.config.AllowAssignAndCallCuddle, "allow-assign-and-call", true, "Allow assignments and calls to be cuddled (if using same variable/type)")
+	flags.BoolVar(&wa.config.AllowCuddleDeclaration, "allow-cuddle-declarations", false, "Allow declarations to be cuddled")
+	flags.BoolVar(&wa.config.AllowMultiLineAssignCuddle, "allow-multi-line-assign", true, "Allow cuddling with multi line assignments")
+	flags.BoolVar(&wa.config.AllowSeparatedLeadingComment, "allow-separated-leading-comment", false, "Allow empty newlines in leading comments")
+	flags.BoolVar(&wa.config.AllowTrailingComment, "allow-trailing-comment", false, "Allow blocks to end with a comment")
+	flags.BoolVar(&wa.config.ForceCuddleErrCheckAndAssign, "force-err-cuddling", false, "Force cuddling of error checks with error var assignment")
+	flags.BoolVar(&wa.config.ForceExclusiveShortDeclarations, "force-short-decl-cuddling", false, "Force short declarations to cuddle by themselves")
+	flags.BoolVar(&wa.config.StrictAppend, "strict-append", true, "Strict rules for append")
+	flags.IntVar(&wa.config.ForceCaseTrailingWhitespaceLimit, "force-case-trailing-whitespace", 0, "Force newlines for case blocks > this number.")
+
+	flags.Var(&multiStringValue{slicePtr: &wa.config.AllowCuddleWithCalls}, "allow-cuddle-with-calls", "Comma separated list of idents that can have cuddles after")
+	flags.Var(&multiStringValue{slicePtr: &wa.config.AllowCuddleWithRHS}, "allow-cuddle-with-rhs", "Comma separated list of idents that can have cuddles before")
+	flags.Var(&multiStringValue{slicePtr: &wa.config.ErrorVariableNames}, "error-variable-names", "Comma separated list of error variable names")
+
+	return *flags
+}
+
+func (wa *wslAnalyzer) run(pass *analysis.Pass) (interface{}, error) {
+	for _, file := range pass.Files {
+		filename := pass.Fset.PositionFor(file.Pos(), false).Filename
+		if !strings.HasSuffix(filename, ".go") {
+			continue
+		}
+
+		processor := newProcessorWithConfig(file, pass.Fset, wa.config)
+		processor.parseAST()
+
+		for pos, fix := range processor.result {
+			textEdits := []analysis.TextEdit{}
+			for _, f := range fix.fixRanges {
+				textEdits = append(textEdits, analysis.TextEdit{
+					Pos:     f.fixRangeStart,
+					End:     f.fixRangeEnd,
+					NewText: []byte("\n"),
+				})
+			}
+
+			pass.Report(analysis.Diagnostic{
+				Pos:      pos,
+				Category: "whitespace",
+				Message:  fix.reason,
+				SuggestedFixes: []analysis.SuggestedFix{
+					{
+						TextEdits: textEdits,
+					},
+				},
+			})
+		}
+	}
+
+	//nolint:nilnil // A pass don't need to return anything.
+	return nil, nil
+}
+
+// multiStringValue is a flag that supports multiple values. It's implemented to
+// contain a pointer to a string slice that will be overwritten when the flag's
+// `Set` method is called.
+type multiStringValue struct {
+	slicePtr *[]string
+}
+
+// Set implements the flag.Value interface and will overwrite the pointer to the
+// slice with a new pointer after splitting the flag by comma.
+func (m *multiStringValue) Set(value string) error {
+	s := []string{}
+
+	for _, v := range strings.Split(value, ",") {
+		s = append(s, strings.TrimSpace(v))
+	}
+
+	*m.slicePtr = s
+
+	return nil
+}
+
+// Set implements the flag.Value interface.
+func (m *multiStringValue) String() string {
+	if m.slicePtr == nil {
+		return ""
+	}
+
+	return strings.Join(*m.slicePtr, ", ")
+}
diff --git a/vendor/github.com/bombsimon/wsl/v3/wsl.go b/vendor/github.com/bombsimon/wsl/v4/wsl.go
similarity index 63%
rename from vendor/github.com/bombsimon/wsl/v3/wsl.go
rename to vendor/github.com/bombsimon/wsl/v4/wsl.go
index 1b139c0476da6c2d35a3742a32f298018751a707..6fd33335a1ea1f91c2c54bc5c768e3a72b3fd734 100644
--- a/vendor/github.com/bombsimon/wsl/v3/wsl.go
+++ b/vendor/github.com/bombsimon/wsl/v4/wsl.go
@@ -3,45 +3,44 @@ package wsl
 import (
 	"fmt"
 	"go/ast"
-	"go/parser"
 	"go/token"
-	"os"
 	"reflect"
+	"sort"
 	"strings"
 )
 
 // Error reason strings.
 const (
-	reasonMustCuddleErrCheck             = "if statements that check an error must be cuddled with the statement that assigned the error"
-	reasonOnlyCuddleIfWithAssign         = "if statements should only be cuddled with assignments"
-	reasonOnlyOneCuddle                  = "only one cuddle assignment allowed before if statement"
-	reasonOnlyCuddleWithUsedAssign       = "if statements should only be cuddled with assignments used in the if statement itself"
-	reasonOnlyCuddle2LineReturn          = "return statements should not be cuddled if block has more than two lines"
-	reasonMultiLineBranchCuddle          = "branch statements should not be cuddled if block has more than two lines"
+	reasonAnonSwitchCuddled              = "anonymous switch statements should never be cuddled"
 	reasonAppendCuddledWithoutUse        = "append only allowed to cuddle with appended value"
 	reasonAssignsCuddleAssign            = "assignments should only be cuddled with other assignments"
-	reasonNeverCuddleDeclare             = "declarations should never be cuddled"
-	reasonExpressionCuddledWithDeclOrRet = "expressions should not be cuddled with declarations or returns"
-	reasonExpressionCuddledWithBlock     = "expressions should not be cuddled with blocks"
-	reasonExprCuddlingNonAssignedVar     = "only cuddled expressions if assigning variable or using from line above"
-	reasonOneCuddleBeforeRange           = "only one cuddle assignment allowed before range statement"
-	reasonRangeCuddledWithoutUse         = "ranges should only be cuddled with assignments used in the iteration"
-	reasonOneCuddleBeforeDefer           = "only one cuddle assignment allowed before defer statement"
+	reasonBlockEndsWithWS                = "block should not end with a whitespace (or comment)"
+	reasonBlockStartsWithWS              = "block should not start with a whitespace"
+	reasonCaseBlockTooCuddly             = "case block should end with newline at this size"
 	reasonDeferCuddledWithOtherVar       = "defer statements should only be cuddled with expressions on same variable"
-	reasonForWithoutCondition            = "for statement without condition should never be cuddled"
-	reasonForWithMoreThanOneCuddle       = "only one cuddle assignment allowed before for statement"
+	reasonExprCuddlingNonAssignedVar     = "only cuddled expressions if assigning variable or using from line above"
+	reasonExpressionCuddledWithBlock     = "expressions should not be cuddled with blocks"
+	reasonExpressionCuddledWithDeclOrRet = "expressions should not be cuddled with declarations or returns"
 	reasonForCuddledAssignWithoutUse     = "for statements should only be cuddled with assignments used in the iteration"
-	reasonOneCuddleBeforeGo              = "only one cuddle assignment allowed before go statement"
+	reasonForWithoutCondition            = "for statement without condition should never be cuddled"
 	reasonGoFuncWithoutAssign            = "go statements can only invoke functions assigned on line above"
-	reasonSwitchManyCuddles              = "only one cuddle assignment allowed before switch statement"
-	reasonAnonSwitchCuddled              = "anonymous switch statements should never be cuddled"
+	reasonMultiLineBranchCuddle          = "branch statements should not be cuddled if block has more than two lines"
+	reasonMustCuddleErrCheck             = "if statements that check an error must be cuddled with the statement that assigned the error"
+	reasonNeverCuddleDeclare             = "declarations should never be cuddled"
+	reasonOnlyCuddle2LineReturn          = "return statements should not be cuddled if block has more than two lines"
+	reasonOnlyCuddleIfWithAssign         = "if statements should only be cuddled with assignments"
+	reasonOnlyCuddleWithUsedAssign       = "if statements should only be cuddled with assignments used in the if statement itself"
+	reasonOnlyOneCuddleBeforeDefer       = "only one cuddle assignment allowed before defer statement"
+	reasonOnlyOneCuddleBeforeFor         = "only one cuddle assignment allowed before for statement"
+	reasonOnlyOneCuddleBeforeGo          = "only one cuddle assignment allowed before go statement"
+	reasonOnlyOneCuddleBeforeIf          = "only one cuddle assignment allowed before if statement"
+	reasonOnlyOneCuddleBeforeRange       = "only one cuddle assignment allowed before range statement"
+	reasonOnlyOneCuddleBeforeSwitch      = "only one cuddle assignment allowed before switch statement"
+	reasonOnlyOneCuddleBeforeTypeSwitch  = "only one cuddle assignment allowed before type switch statement"
+	reasonRangeCuddledWithoutUse         = "ranges should only be cuddled with assignments used in the iteration"
+	reasonShortDeclNotExclusive          = "short declaration should cuddle only with other short declarations"
 	reasonSwitchCuddledWithoutUse        = "switch statements should only be cuddled with variables switched"
-	reasonTypeSwitchTooCuddled           = "only one cuddle assignment allowed before type switch statement"
 	reasonTypeSwitchCuddledWithoutUse    = "type switch statements should only be cuddled with variables switched"
-	reasonBlockStartsWithWS              = "block should not start with a whitespace"
-	reasonBlockEndsWithWS                = "block should not end with a whitespace (or comment)"
-	reasonCaseBlockTooCuddly             = "case block should end with newline at this size"
-	reasonShortDeclNotExclusive          = "short declaration should cuddle only with other short declarations"
 )
 
 // Warning strings.
@@ -54,6 +53,7 @@ const (
 	warnUnknownRHS                 = "UNKNOWN RHS"
 )
 
+// Configuration represents configurable settings for the linter.
 type Configuration struct {
 	// StrictAppend will do strict checking when assigning from append (x =
 	// append(x, y)). If this is set to true the append call must append either
@@ -82,7 +82,7 @@ type Configuration struct {
 	//  x = AnotherAssign()
 	AllowAssignAndCallCuddle bool
 
-	// AllowAssignAndCallCuddle allows assignments to be cuddled with anything.
+	// AllowAssignAndAnythingCuddle allows assignments to be cuddled with anything.
 	// Example supported with this set to true:
 	//  if x == 1 {
 	//  	x = 0
@@ -176,94 +176,40 @@ type Configuration struct {
 	ForceExclusiveShortDeclarations bool
 }
 
-// DefaultConfig returns default configuration.
-func DefaultConfig() Configuration {
-	return Configuration{
-		StrictAppend:                     true,
-		AllowAssignAndCallCuddle:         true,
-		AllowAssignAndAnythingCuddle:     false,
-		AllowMultiLineAssignCuddle:       true,
-		AllowTrailingComment:             false,
-		AllowSeparatedLeadingComment:     false,
-		ForceCuddleErrCheckAndAssign:     false,
-		ForceExclusiveShortDeclarations:  false,
-		ForceCaseTrailingWhitespaceLimit: 0,
-		AllowCuddleWithCalls:             []string{"Lock", "RLock"},
-		AllowCuddleWithRHS:               []string{"Unlock", "RUnlock"},
-		ErrorVariableNames:               []string{"err"},
-	}
+// fix is a range to fixup.
+type fix struct {
+	fixRangeStart token.Pos
+	fixRangeEnd   token.Pos
 }
 
-// Result represents the result of one error.
-type Result struct {
-	FileName   string
-	LineNumber int
-	Position   token.Position
-	Reason     string
+// result represents the result of one error.
+type result struct {
+	fixRanges []fix
+	reason    string
 }
 
-// String returns the filename, line number and reason of a Result.
-func (r *Result) String() string {
-	return fmt.Sprintf("%s:%d: %s", r.FileName, r.LineNumber, r.Reason)
-}
-
-type Processor struct {
-	config   Configuration
-	result   []Result
-	warnings []string
-	fileSet  *token.FileSet
+// processor is the type that keeps track of the file and fileset and holds the
+// results from parsing the AST.
+type processor struct {
+	config   *Configuration
 	file     *ast.File
+	fileSet  *token.FileSet
+	result   map[token.Pos]result
+	warnings []string
 }
 
-// NewProcessor will create a Processor.
-//
-//nolint:gocritic // It's fine to copy config struct
-func NewProcessorWithConfig(cfg Configuration) *Processor {
-	return &Processor{
-		result: []Result{},
-		config: cfg,
-	}
-}
-
-// NewProcessor will create a Processor.
-func NewProcessor() *Processor {
-	return NewProcessorWithConfig(DefaultConfig())
-}
-
-// ProcessFiles takes a string slice with file names (full paths) and lints
-// them.
-//
-//nolint:gocritic // Don't want named returns
-func (p *Processor) ProcessFiles(filenames []string) ([]Result, []string) {
-	for _, filename := range filenames {
-		data, err := os.ReadFile(filename)
-		if err != nil {
-			panic(err)
-		}
-
-		p.process(filename, data)
+// newProcessorWithConfig will create a Processor with the passed configuration.
+func newProcessorWithConfig(file *ast.File, fileSet *token.FileSet, cfg *Configuration) *processor {
+	return &processor{
+		config:  cfg,
+		file:    file,
+		fileSet: fileSet,
+		result:  make(map[token.Pos]result),
 	}
-
-	return p.result, p.warnings
 }
 
-func (p *Processor) process(filename string, data []byte) {
-	fileSet := token.NewFileSet()
-	file, err := parser.ParseFile(fileSet, filename, data, parser.ParseComments)
-	// If the file is not parsable let's add a syntax error and move on.
-	if err != nil {
-		p.result = append(p.result, Result{
-			FileName:   filename,
-			LineNumber: 0,
-			Reason:     fmt.Sprintf("invalid syntax, file cannot be linted (%s)", err.Error()),
-		})
-
-		return
-	}
-
-	p.fileSet = fileSet
-	p.file = file
-
+// parseAST will parse the AST attached to the Processor instance.
+func (p *processor) parseAST() {
 	for _, d := range p.file.Decls {
 		switch v := d.(type) {
 		case *ast.FuncDecl:
@@ -279,7 +225,7 @@ func (p *Processor) process(filename string, data []byte) {
 
 // parseBlockBody will parse any kind of block statements such as switch cases
 // and if statements. A list of Result is returned.
-func (p *Processor) parseBlockBody(ident *ast.Ident, block *ast.BlockStmt) {
+func (p *processor) parseBlockBody(ident *ast.Ident, block *ast.BlockStmt) {
 	// Nothing to do if there's no value.
 	if reflect.ValueOf(block).IsNil() {
 		return
@@ -294,7 +240,7 @@ func (p *Processor) parseBlockBody(ident *ast.Ident, block *ast.BlockStmt) {
 
 // parseBlockStatements will parse all the statements found in the body of a
 // node. A list of Result is returned.
-func (p *Processor) parseBlockStatements(statements []ast.Stmt) {
+func (p *processor) parseBlockStatements(statements []ast.Stmt) {
 	for i, stmt := range statements {
 		// Start by checking if this statement is another block (other than if,
 		// for and range). This could be assignment to a function, defer or go
@@ -380,14 +326,38 @@ func (p *Processor) parseBlockStatements(statements []ast.Stmt) {
 			continue
 		}
 
-		moreThanOneStatementAbove := func() bool {
-			if i < 2 {
+		nStatementsBefore := func(n int) bool {
+			if i < n {
 				return false
 			}
 
-			statementBeforePreviousStatement := statements[i-2]
+			for j := 1; j < n; j++ {
+				s1 := statements[i-j]
+				s2 := statements[i-(j+1)]
+
+				if p.nodeStart(s1)-1 != p.nodeEnd(s2) {
+					return false
+				}
+			}
 
-			return p.nodeStart(previousStatement)-1 == p.nodeEnd(statementBeforePreviousStatement)
+			return true
+		}
+
+		nStatementsAfter := func(n int) bool {
+			if len(statements)-1 < i+n {
+				return false
+			}
+
+			for j := 0; j < n; j++ {
+				s1 := statements[i+j]
+				s2 := statements[i+j+1]
+
+				if p.nodeEnd(s1)+1 != p.nodeStart(s2) {
+					return false
+				}
+			}
+
+			return true
 		}
 
 		isLastStatementInBlockOfOnlyTwoLines := func() bool {
@@ -413,9 +383,30 @@ func (p *Processor) parseBlockStatements(statements []ast.Stmt) {
 		// it was and use *that* statement's position
 		if p.config.ForceExclusiveShortDeclarations && cuddledWithLastStmt {
 			if p.isShortDecl(stmt) && !p.isShortDecl(previousStatement) {
-				p.addError(stmt.Pos(), reasonShortDeclNotExclusive)
+				var reportNode ast.Node = previousStatement
+
+				cm := ast.NewCommentMap(p.fileSet, stmt, p.file.Comments)
+				if cg, ok := cm[stmt]; ok && len(cg) > 0 {
+					for _, c := range cg {
+						if c.Pos() > previousStatement.End() && c.End() < stmt.Pos() {
+							reportNode = c
+						}
+					}
+				}
+
+				p.addErrorRange(
+					stmt.Pos(),
+					reportNode.End(),
+					reportNode.End(),
+					reasonShortDeclNotExclusive,
+				)
 			} else if p.isShortDecl(previousStatement) && !p.isShortDecl(stmt) {
-				p.addError(previousStatement.Pos(), reasonShortDeclNotExclusive)
+				p.addErrorRange(
+					previousStatement.Pos(),
+					stmt.Pos(),
+					stmt.Pos(),
+					reasonShortDeclNotExclusive,
+				)
 			}
 		}
 
@@ -428,6 +419,31 @@ func (p *Processor) parseBlockStatements(statements []ast.Stmt) {
 			}
 		}
 
+		reportNewlineTwoLinesAbove := func(n1, n2 ast.Node, reason string) {
+			if atLeastOneInListsMatch(rightAndLeftHandSide, assignedOnLineAbove) ||
+				atLeastOneInListsMatch(assignedOnLineAbove, calledOrAssignedFirstInBlock) {
+				// If both the assignment on the line above _and_ the assignment
+				// two lines above is part of line or first in block, add the
+				// newline as if non were.
+				_, isAssignmentTwoLinesAbove := statements[i-2].(*ast.AssignStmt)
+				assignedTwoLinesAbove := p.findLHS(statements[i-2])
+
+				if isAssignmentTwoLinesAbove &&
+					(atLeastOneInListsMatch(rightAndLeftHandSide, assignedTwoLinesAbove) ||
+						atLeastOneInListsMatch(assignedTwoLinesAbove, calledOrAssignedFirstInBlock)) {
+					p.addWhitespaceBeforeError(n1, reason)
+				} else {
+					// If the variable on the line above is allowed to be
+					// cuddled, break two lines above so we keep the proper
+					// cuddling.
+					p.addErrorRange(n1.Pos(), n2.Pos(), n2.Pos(), reason)
+				}
+			} else {
+				// If not, break here so we separate the cuddled variable.
+				p.addWhitespaceBeforeError(n1, reason)
+			}
+		}
+
 		switch t := stmt.(type) {
 		case *ast.IfStmt:
 			checkingErrInitializedInline := func() bool {
@@ -460,7 +476,12 @@ func (p *Processor) parseBlockStatements(statements []ast.Stmt) {
 					}
 
 					if atLeastOneInListsMatch(assignedOnLineAbove, p.config.ErrorVariableNames) {
-						p.addError(t.Pos(), reasonMustCuddleErrCheck)
+						p.addErrorRange(
+							stmt.Pos(),
+							previousStatement.End(),
+							stmt.Pos(),
+							reasonMustCuddleErrCheck,
+						)
 					}
 				}
 
@@ -468,12 +489,12 @@ func (p *Processor) parseBlockStatements(statements []ast.Stmt) {
 			}
 
 			if len(assignedOnLineAbove) == 0 {
-				p.addError(t.Pos(), reasonOnlyCuddleIfWithAssign)
+				p.addWhitespaceBeforeError(t, reasonOnlyCuddleIfWithAssign)
 				continue
 			}
 
-			if moreThanOneStatementAbove() {
-				p.addError(t.Pos(), reasonOnlyOneCuddle)
+			if nStatementsBefore(2) {
+				reportNewlineTwoLinesAbove(t, statements[i-1], reasonOnlyOneCuddleBeforeIf)
 				continue
 			}
 
@@ -485,33 +506,34 @@ func (p *Processor) parseBlockStatements(statements []ast.Stmt) {
 				continue
 			}
 
-			p.addError(t.Pos(), reasonOnlyCuddleWithUsedAssign)
+			p.addWhitespaceBeforeError(t, reasonOnlyCuddleWithUsedAssign)
 		case *ast.ReturnStmt:
 			if isLastStatementInBlockOfOnlyTwoLines() {
 				continue
 			}
 
-			p.addError(t.Pos(), reasonOnlyCuddle2LineReturn)
+			p.addWhitespaceBeforeError(t, reasonOnlyCuddle2LineReturn)
 		case *ast.BranchStmt:
 			if isLastStatementInBlockOfOnlyTwoLines() {
 				continue
 			}
 
-			p.addError(t.Pos(), reasonMultiLineBranchCuddle)
+			p.addWhitespaceBeforeError(t, reasonMultiLineBranchCuddle)
 		case *ast.AssignStmt:
 			// append is usually an assignment but should not be allowed to be
 			// cuddled with anything not appended.
 			if len(rightHandSide) > 0 && rightHandSide[len(rightHandSide)-1] == "append" {
 				if p.config.StrictAppend {
 					if !atLeastOneInListsMatch(calledOrAssignedOnLineAbove, rightHandSide) {
-						p.addError(t.Pos(), reasonAppendCuddledWithoutUse)
+						p.addWhitespaceBeforeError(t, reasonAppendCuddledWithoutUse)
 					}
 				}
 
 				continue
 			}
 
-			if _, ok := previousStatement.(*ast.AssignStmt); ok {
+			switch previousStatement.(type) {
+			case *ast.AssignStmt, *ast.IncDecStmt:
 				continue
 			}
 
@@ -535,10 +557,18 @@ func (p *Processor) parseBlockStatements(statements []ast.Stmt) {
 				}
 			}
 
-			p.addError(t.Pos(), reasonAssignsCuddleAssign)
+			p.addWhitespaceBeforeError(t, reasonAssignsCuddleAssign)
+		case *ast.IncDecStmt:
+			switch previousStatement.(type) {
+			case *ast.AssignStmt, *ast.IncDecStmt:
+				continue
+			}
+
+			p.addWhitespaceBeforeError(t, reasonAssignsCuddleAssign)
+
 		case *ast.DeclStmt:
 			if !p.config.AllowCuddleDeclaration {
-				p.addError(t.Pos(), reasonNeverCuddleDeclare)
+				p.addWhitespaceBeforeError(t, reasonNeverCuddleDeclare)
 			}
 		case *ast.ExprStmt:
 			switch previousStatement.(type) {
@@ -547,9 +577,9 @@ func (p *Processor) parseBlockStatements(statements []ast.Stmt) {
 					continue
 				}
 
-				p.addError(t.Pos(), reasonExpressionCuddledWithDeclOrRet)
+				p.addWhitespaceBeforeError(t, reasonExpressionCuddledWithDeclOrRet)
 			case *ast.IfStmt, *ast.RangeStmt, *ast.SwitchStmt:
-				p.addError(t.Pos(), reasonExpressionCuddledWithBlock)
+				p.addWhitespaceBeforeError(t, reasonExpressionCuddledWithBlock)
 			}
 
 			// If the expression is called on a type or variable used or
@@ -567,17 +597,17 @@ func (p *Processor) parseBlockStatements(statements []ast.Stmt) {
 			// If we assigned variables on the line above but didn't use them in
 			// this expression there should probably be a newline between them.
 			if len(assignedOnLineAbove) > 0 && !atLeastOneInListsMatch(rightAndLeftHandSide, assignedOnLineAbove) {
-				p.addError(t.Pos(), reasonExprCuddlingNonAssignedVar)
+				p.addWhitespaceBeforeError(t, reasonExprCuddlingNonAssignedVar)
 			}
 		case *ast.RangeStmt:
-			if moreThanOneStatementAbove() {
-				p.addError(t.Pos(), reasonOneCuddleBeforeRange)
+			if nStatementsBefore(2) {
+				reportNewlineTwoLinesAbove(t, statements[i-1], reasonOnlyOneCuddleBeforeRange)
 				continue
 			}
 
 			if !atLeastOneInListsMatch(rightAndLeftHandSide, assignedOnLineAbove) {
 				if !atLeastOneInListsMatch(assignedOnLineAbove, calledOrAssignedFirstInBlock) {
-					p.addError(t.Pos(), reasonRangeCuddledWithoutUse)
+					p.addWhitespaceBeforeError(t, reasonRangeCuddledWithoutUse)
 				}
 			}
 		case *ast.DeferStmt:
@@ -586,27 +616,38 @@ func (p *Processor) parseBlockStatements(statements []ast.Stmt) {
 				continue
 			}
 
-			// Special treatment of deferring body closes after error checking
-			// according to best practices. See
-			// https://github.com/bombsimon/wsl/issues/31 which links to
-			// discussion about error handling after HTTP requests. This is hard
-			// coded and very specific but for now this is to be seen as a
-			// special case. What this does is that it *only* allows a defer
-			// statement with `Close` on the right hand side to be cuddled with
-			// an if-statement to support this:
-			//  resp, err := client.Do(req)
-			//  if err != nil {
-			//      return err
-			//  }
-			//  defer resp.Body.Close()
-			if _, ok := previousStatement.(*ast.IfStmt); ok {
-				if atLeastOneInListsMatch(rightHandSide, []string{"Close"}) {
-					continue
+			if nStatementsBefore(2) {
+				// We allow cuddling defer if the defer references something
+				// used two lines above.
+				// There are several reasons to why we do this.
+				// Originally there was a special use case only for "Close"
+				//
+				// https://github.com/bombsimon/wsl/issues/31 which links to
+				//  resp, err := client.Do(req)
+				//  if err != nil {
+				//      return err
+				//  }
+				//  defer resp.Body.Close()
+				//
+				// After a discussion in a followup issue it makes sense to not
+				// only hard code `Close` but for anything that's referenced two
+				// statements above.
+				//
+				// https://github.com/bombsimon/wsl/issues/85
+				//  db, err := OpenDB()
+				//  require.NoError(t, err)
+				//  defer db.Close()
+				//
+				// All of this is only allowed if there's exactly three cuddled
+				// statements, otherwise the regular rules apply.
+				if !nStatementsBefore(3) && !nStatementsAfter(1) {
+					variablesTwoLinesAbove := append(p.findLHS(statements[i-2]), p.findRHS(statements[i-2])...)
+					if atLeastOneInListsMatch(rightHandSide, variablesTwoLinesAbove) {
+						continue
+					}
 				}
-			}
 
-			if moreThanOneStatementAbove() {
-				p.addError(t.Pos(), reasonOneCuddleBeforeDefer)
+				reportNewlineTwoLinesAbove(t, statements[i-1], reasonOnlyOneCuddleBeforeDefer)
 
 				continue
 			}
@@ -620,7 +661,7 @@ func (p *Processor) parseBlockStatements(statements []ast.Stmt) {
 			}
 
 			// Allow use to cuddled defer func literals with usages on line
-			// abouve. Example:
+			// above. Example:
 			// b := getB()
 			// defer func() {
 			//     makesSenseToUse(b)
@@ -638,18 +679,16 @@ func (p *Processor) parseBlockStatements(statements []ast.Stmt) {
 			}
 
 			if !atLeastOneInListsMatch(rightAndLeftHandSide, assignedOnLineAbove) {
-				p.addError(t.Pos(), reasonDeferCuddledWithOtherVar)
+				p.addWhitespaceBeforeError(t, reasonDeferCuddledWithOtherVar)
 			}
 		case *ast.ForStmt:
 			if len(rightAndLeftHandSide) == 0 {
-				p.addError(t.Pos(), reasonForWithoutCondition)
-
+				p.addWhitespaceBeforeError(t, reasonForWithoutCondition)
 				continue
 			}
 
-			if moreThanOneStatementAbove() {
-				p.addError(t.Pos(), reasonForWithMoreThanOneCuddle)
-
+			if nStatementsBefore(2) {
+				reportNewlineTwoLinesAbove(t, statements[i-1], reasonOnlyOneCuddleBeforeFor)
 				continue
 			}
 
@@ -658,7 +697,7 @@ func (p *Processor) parseBlockStatements(statements []ast.Stmt) {
 			// first line in the block for details.
 			if !atLeastOneInListsMatch(rightAndLeftHandSide, assignedOnLineAbove) {
 				if !atLeastOneInListsMatch(assignedOnLineAbove, calledOrAssignedFirstInBlock) {
-					p.addError(t.Pos(), reasonForCuddledAssignWithoutUse)
+					p.addWhitespaceBeforeError(t, reasonForCuddledAssignWithoutUse)
 				}
 			}
 		case *ast.GoStmt:
@@ -666,9 +705,8 @@ func (p *Processor) parseBlockStatements(statements []ast.Stmt) {
 				continue
 			}
 
-			if moreThanOneStatementAbove() {
-				p.addError(t.Pos(), reasonOneCuddleBeforeGo)
-
+			if nStatementsBefore(2) {
+				reportNewlineTwoLinesAbove(t, statements[i-1], reasonOnlyOneCuddleBeforeGo)
 				continue
 			}
 
@@ -689,26 +727,24 @@ func (p *Processor) parseBlockStatements(statements []ast.Stmt) {
 			}
 
 			if !atLeastOneInListsMatch(rightAndLeftHandSide, assignedOnLineAbove) {
-				p.addError(t.Pos(), reasonGoFuncWithoutAssign)
+				p.addWhitespaceBeforeError(t, reasonGoFuncWithoutAssign)
 			}
 		case *ast.SwitchStmt:
-			if moreThanOneStatementAbove() {
-				p.addError(t.Pos(), reasonSwitchManyCuddles)
-
+			if nStatementsBefore(2) {
+				reportNewlineTwoLinesAbove(t, statements[i-1], reasonOnlyOneCuddleBeforeSwitch)
 				continue
 			}
 
 			if !atLeastOneInListsMatch(rightAndLeftHandSide, assignedOnLineAbove) {
 				if len(rightAndLeftHandSide) == 0 {
-					p.addError(t.Pos(), reasonAnonSwitchCuddled)
+					p.addWhitespaceBeforeError(t, reasonAnonSwitchCuddled)
 				} else {
-					p.addError(t.Pos(), reasonSwitchCuddledWithoutUse)
+					p.addWhitespaceBeforeError(t, reasonSwitchCuddledWithoutUse)
 				}
 			}
 		case *ast.TypeSwitchStmt:
-			if moreThanOneStatementAbove() {
-				p.addError(t.Pos(), reasonTypeSwitchTooCuddled)
-
+			if nStatementsBefore(2) {
+				reportNewlineTwoLinesAbove(t, statements[i-1], reasonOnlyOneCuddleBeforeTypeSwitch)
 				continue
 			}
 
@@ -717,11 +753,11 @@ func (p *Processor) parseBlockStatements(statements []ast.Stmt) {
 				// Allow type assertion on variables used in the first case
 				// immediately.
 				if !atLeastOneInListsMatch(assignedOnLineAbove, calledOrAssignedFirstInBlock) {
-					p.addError(t.Pos(), reasonTypeSwitchCuddledWithoutUse)
+					p.addWhitespaceBeforeError(t, reasonTypeSwitchCuddledWithoutUse)
 				}
 			}
 		case *ast.CaseClause, *ast.CommClause:
-			// Case clauses will be checked by not allowing leading ot trailing
+			// Case clauses will be checked by not allowing leading of trailing
 			// whitespaces within the block. There's nothing in the case itself
 			// that may be cuddled.
 		default:
@@ -735,7 +771,7 @@ func (p *Processor) parseBlockStatements(statements []ast.Stmt) {
 // directly as the first argument inside a body.
 // The body will then be parsed as a *ast.BlockStmt (regular block) or as a list
 // of []ast.Stmt (case block).
-func (p *Processor) firstBodyStatement(i int, allStmt []ast.Stmt) ast.Node {
+func (p *processor) firstBodyStatement(i int, allStmt []ast.Stmt) ast.Node {
 	stmt := allStmt[i]
 
 	// Start by checking if the statement has a body (probably if-statement,
@@ -766,7 +802,14 @@ func (p *Processor) firstBodyStatement(i int, allStmt []ast.Stmt) ast.Node {
 			}
 		}
 
-		p.parseBlockBody(nil, statementBodyContent)
+		// If statement bodies will be parsed already when finding block bodies.
+		// The reason is because if/else-if/else chains is nested in the AST
+		// where the else bit is a part of the if statement. Since if statements
+		// is the only statement that can be chained like this we exclude it
+		// from parsing it again here.
+		if _, ok := stmt.(*ast.IfStmt); !ok {
+			p.parseBlockBody(nil, statementBodyContent)
+		}
 	case []ast.Stmt:
 		// The Body field for an *ast.CaseClause or *ast.CommClause is of type
 		// []ast.Stmt. We must check leading and trailing whitespaces and then
@@ -791,7 +834,7 @@ func (p *Processor) firstBodyStatement(i int, allStmt []ast.Stmt) ast.Node {
 	return firstBodyStatement
 }
 
-func (p *Processor) findLHS(node ast.Node) []string {
+func (p *processor) findLHS(node ast.Node) []string {
 	var lhs []string
 
 	if node == nil {
@@ -850,7 +893,7 @@ func (p *Processor) findLHS(node ast.Node) []string {
 	return lhs
 }
 
-func (p *Processor) findRHS(node ast.Node) []string {
+func (p *processor) findRHS(node ast.Node) []string {
 	var rhs []string
 
 	if node == nil {
@@ -934,7 +977,7 @@ func (p *Processor) findRHS(node ast.Node) []string {
 	return rhs
 }
 
-func (p *Processor) isShortDecl(node ast.Node) bool {
+func (p *processor) isShortDecl(node ast.Node) bool {
 	if t, ok := node.(*ast.AssignStmt); ok {
 		return t.Tok == token.DEFINE
 	}
@@ -942,16 +985,22 @@ func (p *Processor) isShortDecl(node ast.Node) bool {
 	return false
 }
 
-func (p *Processor) findBlockStmt(node ast.Node) []*ast.BlockStmt {
+func (p *processor) findBlockStmt(node ast.Node) []*ast.BlockStmt {
 	var blocks []*ast.BlockStmt
 
 	switch t := node.(type) {
+	case *ast.BlockStmt:
+		return []*ast.BlockStmt{t}
 	case *ast.AssignStmt:
 		for _, x := range t.Rhs {
 			blocks = append(blocks, p.findBlockStmt(x)...)
 		}
 	case *ast.CallExpr:
 		blocks = append(blocks, p.findBlockStmt(t.Fun)...)
+
+		for _, x := range t.Args {
+			blocks = append(blocks, p.findBlockStmt(x)...)
+		}
 	case *ast.FuncLit:
 		blocks = append(blocks, t.Body)
 	case *ast.ExprStmt:
@@ -964,6 +1013,8 @@ func (p *Processor) findBlockStmt(node ast.Node) []*ast.BlockStmt {
 		blocks = append(blocks, p.findBlockStmt(t.Call)...)
 	case *ast.GoStmt:
 		blocks = append(blocks, p.findBlockStmt(t.Call)...)
+	case *ast.IfStmt:
+		blocks = append([]*ast.BlockStmt{t.Body}, p.findBlockStmt(t.Else)...)
 	}
 
 	return blocks
@@ -1020,15 +1071,15 @@ func atLeastOneInListsMatch(listOne, listTwo []string) bool {
 // findLeadingAndTrailingWhitespaces will find leading and trailing whitespaces
 // in a node. The method takes comments in consideration which will make the
 // parser more gentle.
-func (p *Processor) findLeadingAndTrailingWhitespaces(ident *ast.Ident, stmt, nextStatement ast.Node) {
+func (p *processor) findLeadingAndTrailingWhitespaces(ident *ast.Ident, stmt, nextStatement ast.Node) {
 	var (
-		allowedLinesBeforeFirstStatement = 1
-		commentMap                       = ast.NewCommentMap(p.fileSet, stmt, p.file.Comments)
-		blockStatements                  []ast.Stmt
-		blockStartLine                   int
-		blockEndLine                     int
-		blockStartPos                    token.Pos
-		blockEndPos                      token.Pos
+		commentMap      = ast.NewCommentMap(p.fileSet, stmt, p.file.Comments)
+		blockStatements []ast.Stmt
+		blockStartLine  int
+		blockEndLine    int
+		blockStartPos   token.Pos
+		blockEndPos     token.Pos
+		isCase          bool
 	)
 
 	// Depending on the block type, get the statements in the block and where
@@ -1041,9 +1092,11 @@ func (p *Processor) findLeadingAndTrailingWhitespaces(ident *ast.Ident, stmt, ne
 	case *ast.CaseClause:
 		blockStatements = t.Body
 		blockStartPos = t.Colon
+		isCase = true
 	case *ast.CommClause:
 		blockStatements = t.Body
 		blockStartPos = t.Colon
+		isCase = true
 	default:
 		p.addWarning(warnWSNodeTypeNotImplemented, stmt.Pos(), stmt)
 
@@ -1055,8 +1108,8 @@ func (p *Processor) findLeadingAndTrailingWhitespaces(ident *ast.Ident, stmt, ne
 		return
 	}
 
-	blockStartLine = p.fileSet.Position(blockStartPos).Line
-	blockEndLine = p.fileSet.Position(blockEndPos).Line
+	blockStartLine = p.fileSet.PositionFor(blockStartPos, false).Line
+	blockEndLine = p.fileSet.PositionFor(blockEndPos, false).Line
 
 	// No whitespace possible if LBrace and RBrace is on the same line.
 	if blockStartLine == blockEndLine {
@@ -1064,152 +1117,238 @@ func (p *Processor) findLeadingAndTrailingWhitespaces(ident *ast.Ident, stmt, ne
 	}
 
 	var (
-		firstStatement    = blockStatements[0]
-		lastStatement     = blockStatements[len(blockStatements)-1]
-		seenCommentGroups = 0
+		firstStatement = blockStatements[0]
+		lastStatement  = blockStatements[len(blockStatements)-1]
 	)
 
-	// Get the comment related to the first statement, we do allow commends in
+	// Get the comment related to the first statement, we do allow comments in
 	// the beginning of a block before the first statement.
-	if c, ok := commentMap[firstStatement]; ok {
-		for _, commentGroup := range c {
-			// If the comment group is on the same line as the block start
-			// (LBrace) we should not consider it.
-			if p.nodeStart(commentGroup) == blockStartLine {
+	var (
+		openingNodePos     = blockStartPos + 1
+		lastLeadingComment ast.Node
+	)
+
+	var (
+		firstStatementCommentGroups []*ast.CommentGroup
+		lastStatementCommentGroups  []*ast.CommentGroup
+	)
+
+	if cg, ok := commentMap[firstStatement]; ok && !isCase {
+		firstStatementCommentGroups = cg
+	} else {
+		// TODO: Just like with trailing whitespaces comments in a case block is
+		// tied to the last token of the first statement. For now we iterate over
+		// all comments in the stmt and grab those that's after colon and before
+		// first statement.
+		for _, cg := range commentMap {
+			if len(cg) < 1 {
 				continue
 			}
 
-			// We only care about comments before our statement from the comment
-			// map. As soon as we hit comments after our statement let's break
-			// out!
-			if commentGroup.Pos() > firstStatement.Pos() {
-				break
+			// If we have comments and the last comment ends before the first
+			// statement and the node is after the colon, this must be the node
+			// mapped to comments.
+			for _, c := range cg {
+				if c.End() < firstStatement.Pos() && c.Pos() > blockStartPos {
+					firstStatementCommentGroups = append(firstStatementCommentGroups, c)
+				}
 			}
 
-			// We store number of seen comment groups because we allow multiple
-			// groups with a newline between them; but if the first one has WS
-			// before it, we're not going to count it to force an error.
-			if p.config.AllowSeparatedLeadingComment {
-				cg := p.fileSet.Position(commentGroup.Pos()).Line
+			// And same if we have comments where the first comment is after the
+			// last statement but before the next statement (next case). As with
+			// the other things, if there is not next statement it's no next
+			// case and the logic will be handled when parsing the block.
+			if nextStatement == nil {
+				continue
+			}
 
-				if seenCommentGroups > 0 || cg == blockStartLine+1 {
-					seenCommentGroups++
+			for _, c := range cg {
+				if c.Pos() > lastStatement.End() && c.End() < nextStatement.Pos() {
+					lastStatementCommentGroups = append(lastStatementCommentGroups, c)
 				}
-			} else {
-				seenCommentGroups++
 			}
+		}
+
+		// Since the comments come from a map they might not be ordered meaning
+		// that the last and first comment groups can be in the wrong order. We
+		// fix this by sorting all comments by pos after adding them all to the
+		// slice.
+		sort.Slice(firstStatementCommentGroups, func(i, j int) bool {
+			return firstStatementCommentGroups[i].Pos() < firstStatementCommentGroups[j].Pos()
+		})
+
+		sort.Slice(lastStatementCommentGroups, func(i, j int) bool {
+			return lastStatementCommentGroups[i].Pos() < lastStatementCommentGroups[j].Pos()
+		})
+	}
 
-			// Support both /* multiline */ and //single line comments
-			for _, c := range commentGroup.List {
-				allowedLinesBeforeFirstStatement += len(strings.Split(c.Text, "\n"))
+	for _, commentGroup := range firstStatementCommentGroups {
+		// If the comment group is on the same line as the block start
+		// (LBrace) we should not consider it.
+		if p.nodeEnd(commentGroup) == blockStartLine {
+			openingNodePos = commentGroup.End()
+			continue
+		}
+
+		// We only care about comments before our statement from the comment
+		// map. As soon as we hit comments after our statement let's break
+		// out!
+		if commentGroup.Pos() > firstStatement.Pos() {
+			break
+		}
+
+		// We never allow leading whitespace for the first comment.
+		if lastLeadingComment == nil && p.nodeStart(commentGroup)-1 != blockStartLine {
+			p.addErrorRange(
+				openingNodePos,
+				openingNodePos,
+				commentGroup.Pos(),
+				reasonBlockStartsWithWS,
+			)
+		}
+
+		// If lastLeadingComment is set this is not the first comment so we
+		// should remove whitespace between them if we don't explicitly
+		// allow it.
+		if lastLeadingComment != nil && !p.config.AllowSeparatedLeadingComment {
+			if p.nodeStart(commentGroup)+1 != p.nodeEnd(lastLeadingComment) {
+				p.addErrorRange(
+					openingNodePos,
+					lastLeadingComment.End(),
+					commentGroup.Pos(),
+					reasonBlockStartsWithWS,
+				)
 			}
 		}
+
+		lastLeadingComment = commentGroup
 	}
 
-	// If we allow separated comments, allow for a space after each group
-	if p.config.AllowSeparatedLeadingComment {
-		if seenCommentGroups > 1 {
-			allowedLinesBeforeFirstStatement += seenCommentGroups - 1
-		} else if seenCommentGroups == 1 {
-			allowedLinesBeforeFirstStatement++
-		}
+	lastNodePos := openingNodePos
+	if lastLeadingComment != nil {
+		lastNodePos = lastLeadingComment.End()
+		blockStartLine = p.nodeEnd(lastLeadingComment)
 	}
 
-	// And now if the first statement is passed the number of allowed lines,
-	// then we had extra WS, possibly before the first comment group.
-	if p.nodeStart(firstStatement) > blockStartLine+allowedLinesBeforeFirstStatement {
-		p.addError(
-			blockStartPos,
+	// Check if we have a whitespace between the last node which can be the
+	// Lbrace, a comment on the same line or the last comment if we have
+	// comments inside the actual block and the first statement. This is never
+	// allowed.
+	if p.nodeStart(firstStatement)-1 != blockStartLine {
+		p.addErrorRange(
+			openingNodePos,
+			lastNodePos,
+			firstStatement.Pos(),
 			reasonBlockStartsWithWS,
 		)
 	}
 
 	// If the blockEndLine is not 0 we're a regular block (not case).
 	if blockEndLine != 0 {
-		if p.config.AllowTrailingComment {
-			if lastComment, ok := commentMap[lastStatement]; ok {
-				var (
-					lastCommentGroup = lastComment[len(lastComment)-1]
-					lastCommentLine  = lastCommentGroup.List[len(lastCommentGroup.List)-1]
-					countNewlines    = 0
-				)
+		// We don't want to reject example functions since they have to end with
+		// a comment.
+		if isExampleFunc(ident) {
+			return
+		}
 
-				countNewlines += len(strings.Split(lastCommentLine.Text, "\n"))
+		var (
+			lastNode         ast.Node = lastStatement
+			trailingComments []ast.Node
+		)
 
-				// No newlines between trailing comments and end of block.
-				if p.nodeStart(lastCommentLine)+countNewlines != blockEndLine-1 {
-					return
+		// Check if we have an comments _after_ the last statement and update
+		// the last node if so.
+		if c, ok := commentMap[lastStatement]; ok {
+			lastComment := c[len(c)-1]
+			if lastComment.Pos() > lastStatement.End() && lastComment.Pos() < stmt.End() {
+				lastNode = lastComment
+			}
+		}
+
+		// TODO: This should be improved.
+		// The trailing comments are mapped to the last statement item which can
+		// be anything depending on what the last statement is.
+		// In `fmt.Println("hello")`, trailing comments will be mapped to
+		// `*ast.BasicLit` for the "hello" string.
+		// A short term improvement can be to cache this but for now we naively
+		// iterate over all items when we check a block.
+		for _, commentGroups := range commentMap {
+			for _, commentGroup := range commentGroups {
+				if commentGroup.Pos() < lastNode.End() || commentGroup.End() > stmt.End() {
+					continue
 				}
+
+				trailingComments = append(trailingComments, commentGroup)
 			}
 		}
 
-		if p.nodeEnd(lastStatement) != blockEndLine-1 && !isExampleFunc(ident) {
-			p.addError(blockEndPos, reasonBlockEndsWithWS)
+		// TODO: Should this be relaxed?
+		// Given the old code we only allowed trailing newline if it was
+		// directly tied to the last statement so for backwards compatibility
+		// we'll do the same. This means we fail all but the last whitespace
+		// even when allowing trailing comments.
+		for _, comment := range trailingComments {
+			if p.nodeStart(comment)-p.nodeEnd(lastNode) > 1 {
+				p.addErrorRange(
+					blockEndPos,
+					lastNode.End(),
+					comment.Pos(),
+					reasonBlockEndsWithWS,
+				)
+			}
+
+			lastNode = comment
+		}
+
+		if !p.config.AllowTrailingComment && p.nodeEnd(stmt)-1 != p.nodeEnd(lastStatement) {
+			p.addErrorRange(
+				blockEndPos,
+				lastNode.End(),
+				stmt.End()-1,
+				reasonBlockEndsWithWS,
+			)
 		}
 
 		return
 	}
 
+	// Nothing to do if we're not looking for enforced newline.
+	if p.config.ForceCaseTrailingWhitespaceLimit == 0 {
+		return
+	}
+
 	// If we don't have any nextStatement the trailing whitespace will be
 	// handled when parsing the switch. If we do have a next statement we can
 	// see where it starts by getting it's colon position. We set the end of the
 	// current case to the position of the next case.
-	switch n := nextStatement.(type) {
-	case *ast.CaseClause:
-		blockEndPos = n.Case
-	case *ast.CommClause:
-		blockEndPos = n.Case
+	switch nextStatement.(type) {
+	case *ast.CaseClause, *ast.CommClause:
 	default:
 		// No more cases
 		return
 	}
 
-	blockEndLine = p.fileSet.Position(blockEndPos).Line - 1
-
-	var (
-		blockSize                = blockEndLine - blockStartLine
-		caseTrailingCommentLines int
-	)
-
-	// TODO: I don't know what comments are bound to in cases. For regular
-	// blocks the last comment is bound to the last statement but for cases
-	// they are bound to the case clause expression. This will however get us all
-	// comments and depending on the case expression this gets tricky.
-	//
-	// To handle this I get the comment map from the current statement (the case
-	// itself) and iterate through all groups and all comment within all groups.
-	// I then get the comments after the last statement but before the next case
-	// clause and just map each line of comment that way.
-	for _, commentGroups := range commentMap {
-		for _, commentGroup := range commentGroups {
-			for _, comment := range commentGroup.List {
-				commentLine := p.fileSet.Position(comment.Pos()).Line
-
-				// Ignore comments before the last statement.
-				if commentLine <= p.nodeStart(lastStatement) {
-					continue
-				}
-
-				// Ignore comments after the end of this case.
-				if commentLine > blockEndLine {
-					continue
-				}
-
-				// This allows /* multiline */ comments with newlines as well
-				// as regular (//) ones
-				caseTrailingCommentLines += len(strings.Split(comment.Text, "\n"))
-			}
-		}
+	var closingNode ast.Node = lastStatement
+	for _, commentGroup := range lastStatementCommentGroups {
+		// TODO: In future versions we might want to close the gaps between
+		// comments. However this is not currently reported in v3 so we
+		// won't add this for now.
+		// if p.nodeStart(commentGroup)-1 != p.nodeEnd(closingNode) {}
+		closingNode = commentGroup
 	}
 
-	hasTrailingWhitespace := p.nodeEnd(lastStatement)+caseTrailingCommentLines != blockEndLine
+	totalRowsInCase := p.nodeEnd(closingNode) - blockStartLine
+	if totalRowsInCase < p.config.ForceCaseTrailingWhitespaceLimit {
+		return
+	}
 
-	// If the force trailing limit is configured and we don't end with a newline.
-	if p.config.ForceCaseTrailingWhitespaceLimit > 0 && !hasTrailingWhitespace {
-		// Check if the block size is too big to miss the newline.
-		if blockSize >= p.config.ForceCaseTrailingWhitespaceLimit {
-			p.addError(lastStatement.Pos(), reasonCaseBlockTooCuddly)
-		}
+	if p.nodeEnd(closingNode)+1 == p.nodeStart(nextStatement) {
+		p.addErrorRange(
+			closingNode.Pos(),
+			closingNode.End(),
+			closingNode.End(),
+			reasonCaseBlockTooCuddly,
+		)
 	}
 }
 
@@ -1217,15 +1356,15 @@ func isExampleFunc(ident *ast.Ident) bool {
 	return ident != nil && strings.HasPrefix(ident.Name, "Example")
 }
 
-func (p *Processor) nodeStart(node ast.Node) int {
-	return p.fileSet.Position(node.Pos()).Line
+func (p *processor) nodeStart(node ast.Node) int {
+	return p.fileSet.PositionFor(node.Pos(), false).Line
 }
 
-func (p *Processor) nodeEnd(node ast.Node) int {
-	line := p.fileSet.Position(node.End()).Line
+func (p *processor) nodeEnd(node ast.Node) int {
+	line := p.fileSet.PositionFor(node.End(), false).Line
 
 	if isEmptyLabeledStmt(node) {
-		return p.fileSet.Position(node.Pos()).Line
+		return p.fileSet.PositionFor(node.Pos(), false).Line
 	}
 
 	return line
@@ -1242,21 +1381,29 @@ func isEmptyLabeledStmt(node ast.Node) bool {
 	return empty
 }
 
-// Add an error for the file and line number for the current token.Pos with the
-// given reason.
-func (p *Processor) addError(pos token.Pos, reason string) {
-	position := p.fileSet.Position(pos)
+func (p *processor) addWhitespaceBeforeError(node ast.Node, reason string) {
+	p.addErrorRange(node.Pos(), node.Pos(), node.Pos(), reason)
+}
+
+func (p *processor) addErrorRange(reportAt, start, end token.Pos, reason string) {
+	report, ok := p.result[reportAt]
+	if !ok {
+		report = result{
+			reason:    reason,
+			fixRanges: []fix{},
+		}
+	}
 
-	p.result = append(p.result, Result{
-		FileName:   position.Filename,
-		LineNumber: position.Line,
-		Position:   position,
-		Reason:     reason,
+	report.fixRanges = append(report.fixRanges, fix{
+		fixRangeStart: start,
+		fixRangeEnd:   end,
 	})
+
+	p.result[reportAt] = report
 }
 
-func (p *Processor) addWarning(w string, pos token.Pos, t interface{}) {
-	position := p.fileSet.Position(pos)
+func (p *processor) addWarning(w string, pos token.Pos, t interface{}) {
+	position := p.fileSet.PositionFor(pos, false)
 
 	p.warnings = append(p.warnings,
 		fmt.Sprintf("%s:%d: %s (%T)", position.Filename, position.Line, w, t),
diff --git a/vendor/github.com/breml/bidichk/pkg/bidichk/bidichk.go b/vendor/github.com/breml/bidichk/pkg/bidichk/bidichk.go
index 2e1e899349d11a429df751c78ed641aaf132453d..f1bf20faba24800aa85d52a2cf0e0d90d247bb01 100644
--- a/vendor/github.com/breml/bidichk/pkg/bidichk/bidichk.go
+++ b/vendor/github.com/breml/bidichk/pkg/bidichk/bidichk.go
@@ -15,7 +15,7 @@ import (
 
 const (
 	doc           = "bidichk detects dangerous unicode character sequences"
-	disallowedDoc = `coma separated list of disallowed runes (full name or short name)
+	disallowedDoc = `comma separated list of disallowed runes (full name or short name)
 
 Supported runes
 
diff --git a/vendor/github.com/breml/errchkjson/.goreleaser.yml b/vendor/github.com/breml/errchkjson/.goreleaser.yml
index 5f23690f15973228d847b023d851a447e3d9ee18..a05c172cb6aecaaa8ad6a164926122d95bc3677b 100644
--- a/vendor/github.com/breml/errchkjson/.goreleaser.yml
+++ b/vendor/github.com/breml/errchkjson/.goreleaser.yml
@@ -14,13 +14,14 @@ builds:
       - windows
       - darwin
 archives:
-  - name_template: "{{ .Binary }}_{{ .Version }}_{{ .Os }}_{{ .Arch }}"
-    replacements:
-      darwin: Darwin
-      linux: Linux
-      windows: Windows
-      386: i386
-      amd64: x86_64
+  - name_template: >-
+      {{- .Binary }}_
+      {{- .Version }}_
+      {{- title .Os }}_
+      {{- if eq .Arch "amd64" }}x86_64
+      {{- else if eq .Arch "386" }}i386
+      {{- else }}{{ .Arch }}{{ end }}
+      {{- if .Arm }}v{{ .Arm }}{{ end -}}
 snapshot:
   name_template: "{{ .Tag }}-next"
 changelog:
diff --git a/vendor/github.com/breml/errchkjson/errchkjson.go b/vendor/github.com/breml/errchkjson/errchkjson.go
index 746709c76365e400bef97b533ebda64698ba30db..4a23929cf246af3916ed11115e56f73e64fa3692 100644
--- a/vendor/github.com/breml/errchkjson/errchkjson.go
+++ b/vendor/github.com/breml/errchkjson/errchkjson.go
@@ -308,14 +308,14 @@ func (e *errchkjson) inspectArgs(pass *analysis.Pass, args []ast.Expr) {
 }
 
 // Construct *types.Interface for interface encoding.TextMarshaler
-//     type TextMarshaler interface {
-//         MarshalText() (text []byte, err error)
-//     }
 //
+//	type TextMarshaler interface {
+//	    MarshalText() (text []byte, err error)
+//	}
 func textMarshalerInterface() *types.Interface {
 	textMarshalerInterface := types.NewInterfaceType([]*types.Func{
-		types.NewFunc(token.NoPos, nil, "MarshalText", types.NewSignature(
-			nil, nil, types.NewTuple(
+		types.NewFunc(token.NoPos, nil, "MarshalText", types.NewSignatureType(
+			nil, nil, nil, nil, types.NewTuple(
 				types.NewVar(token.NoPos, nil, "text",
 					types.NewSlice(
 						types.Universe.Lookup("byte").Type())),
@@ -328,14 +328,14 @@ func textMarshalerInterface() *types.Interface {
 }
 
 // Construct *types.Interface for interface json.Marshaler
-//     type Marshaler interface {
-//         MarshalJSON() ([]byte, error)
-//     }
 //
+//	type Marshaler interface {
+//	    MarshalJSON() ([]byte, error)
+//	}
 func jsonMarshalerInterface() *types.Interface {
 	textMarshalerInterface := types.NewInterfaceType([]*types.Func{
-		types.NewFunc(token.NoPos, nil, "MarshalJSON", types.NewSignature(
-			nil, nil, types.NewTuple(
+		types.NewFunc(token.NoPos, nil, "MarshalJSON", types.NewSignatureType(
+			nil, nil, nil, nil, types.NewTuple(
 				types.NewVar(token.NoPos, nil, "",
 					types.NewSlice(
 						types.Universe.Lookup("byte").Type())),
diff --git a/vendor/github.com/butuzov/ireturn/analyzer/analyzer.go b/vendor/github.com/butuzov/ireturn/analyzer/analyzer.go
index 3a0bf7402dfb97a3068e81f3d57a977606d1d57f..f68170fb3187f8ac68c5ca50757e8a09209cd515 100644
--- a/vendor/github.com/butuzov/ireturn/analyzer/analyzer.go
+++ b/vendor/github.com/butuzov/ireturn/analyzer/analyzer.go
@@ -4,6 +4,7 @@ import (
 	"flag"
 	"go/ast"
 	gotypes "go/types"
+	"runtime"
 	"strings"
 	"sync"
 
@@ -22,9 +23,11 @@ type validator interface {
 }
 
 type analyzer struct {
-	once    sync.Once
-	handler validator
-	err     error
+	once          sync.Once
+	mu            sync.RWMutex
+	handler       validator
+	err           error
+	diabledNolint bool
 
 	found []analysis.Diagnostic
 }
@@ -60,8 +63,7 @@ func (a *analyzer) run(pass *analysis.Pass) (interface{}, error) {
 		}
 
 		// 003. Is it allowed to be checked?
-		// TODO(butuzov): add inline comment
-		if hasDisallowDirective(f.Doc) {
+		if !a.diabledNolint && hasDisallowDirective(f.Doc) {
 			return
 		}
 
@@ -69,7 +71,6 @@ func (a *analyzer) run(pass *analysis.Pass) (interface{}, error) {
 
 		// 004. Filtering Results.
 		for _, issue := range filterInterfaces(pass, f.Type, dotImportedStd) {
-
 			if a.handler.IsValid(issue) {
 				continue
 			}
@@ -83,11 +84,13 @@ func (a *analyzer) run(pass *analysis.Pass) (interface{}, error) {
 			}
 			seen[key] = true
 
-			a.found = append(a.found, issue.ExportDiagnostic())
+			a.addDiagnostic(issue.ExportDiagnostic())
 		}
 	})
 
 	// 02. Printing reports.
+	a.mu.RLock()
+	defer a.mu.RUnlock()
 	for i := range a.found {
 		pass.Report(a.found[i])
 	}
@@ -95,6 +98,13 @@ func (a *analyzer) run(pass *analysis.Pass) (interface{}, error) {
 	return nil, nil
 }
 
+func (a *analyzer) addDiagnostic(d analysis.Diagnostic) {
+	a.mu.Lock()
+	defer a.mu.Unlock()
+
+	a.found = append(a.found, d)
+}
+
 func (a *analyzer) readConfiguration(fs *flag.FlagSet) {
 	cnf, err := config.New(fs)
 	if err != nil {
@@ -102,6 +112,13 @@ func (a *analyzer) readConfiguration(fs *flag.FlagSet) {
 		return
 	}
 
+	// First: checking nonolint directive
+	val := fs.Lookup("nonolint")
+	if val != nil {
+		a.diabledNolint = fs.Lookup("nonolint").Value.String() == "true"
+	}
+
+	// Second: validators implementation next
 	if validatorImpl, ok := cnf.(validator); ok {
 		a.handler = validatorImpl
 		return
@@ -126,6 +143,7 @@ func flags() flag.FlagSet {
 	set := flag.NewFlagSet("", flag.PanicOnError)
 	set.String("allow", "", "accept-list of the comma-separated interfaces")
 	set.String("reject", "", "reject-list of the comma-separated interfaces")
+	set.Bool("nonolint", false, "disable nolint checks")
 	return *set
 }
 
@@ -136,13 +154,10 @@ func filterInterfaces(p *analysis.Pass, ft *ast.FuncType, di map[string]struct{}
 		return results
 	}
 
-	tp := newTypeParams(ft.TypeParams)
-
 	for _, el := range ft.Results.List {
 		switch v := el.Type.(type) {
 		// ----- empty or anonymous interfaces
 		case *ast.InterfaceType:
-
 			if len(v.Methods.List) == 0 {
 				results = append(results, types.NewIssue("interface{}", types.EmptyInterface))
 				continue
@@ -154,35 +169,65 @@ func filterInterfaces(p *analysis.Pass, ft *ast.FuncType, di map[string]struct{}
 		case *ast.Ident:
 
 			t1 := p.TypesInfo.TypeOf(el.Type)
-			if !gotypes.IsInterface(t1.Underlying()) {
+			val, ok := t1.Underlying().(*gotypes.Interface)
+			if !ok {
 				continue
 			}
 
-			word := t1.String()
-			// only build in interface is error
-			if obj := gotypes.Universe.Lookup(word); obj != nil {
-				results = append(results, types.NewIssue(obj.Name(), types.ErrorInterface))
+			var (
+				name    = t1.String()
+				isNamed = strings.Contains(name, ".")
+				isEmpty = val.Empty()
+			)
+
+			// catching any
+			if isEmpty && name == "any" {
+				results = append(results, types.NewIssue(name, types.EmptyInterface))
 				continue
 			}
 
-			// found in type params
-			if tp.In(word) {
-				results = append(results, types.NewIssue(word, types.Generic))
+			// NOTE: FIXED!
+			if name == "error" {
+				results = append(results, types.NewIssue(name, types.ErrorInterface))
+				continue
+			}
+
+			if !isNamed {
+
+				typeParams := val.String()
+				prefix, suffix := "interface{", "}"
+				if strings.HasPrefix(typeParams, prefix) { // nolint: gosimple
+					typeParams = typeParams[len(prefix):]
+				}
+				if strings.HasSuffix(typeParams, suffix) {
+					typeParams = typeParams[:len(typeParams)-1]
+				}
+
+				goVersion := runtime.Version()
+				if strings.HasPrefix(goVersion, "go1.18") || strings.HasPrefix(goVersion, "go1.19") {
+					typeParams = strings.ReplaceAll(typeParams, "|", " | ")
+				}
+
+				results = append(results, types.IFace{
+					Name:   name,
+					Type:   types.Generic,
+					OfType: typeParams,
+				})
 				continue
 			}
 
 			// is it dot-imported package?
 			// handling cases when stdlib package imported via "." dot-import
 			if len(di) > 0 {
-				name := stdPkgInterface(word)
-				if _, ok := di[name]; ok {
-					results = append(results, types.NewIssue(word, types.NamedStdInterface))
+				pkgName := stdPkgInterface(name)
+				if _, ok := di[pkgName]; ok {
+					results = append(results, types.NewIssue(name, types.NamedStdInterface))
 
 					continue
 				}
 			}
 
-			results = append(results, types.NewIssue(word, types.NamedInterface))
+			results = append(results, types.NewIssue(name, types.NamedInterface))
 
 		// ------- standard library and 3rd party interfaces
 		case *ast.SelectorExpr:
diff --git a/vendor/github.com/butuzov/ireturn/analyzer/internal/config/config.go b/vendor/github.com/butuzov/ireturn/analyzer/internal/config/config.go
index e2f1aef6e95c814e56d53f0c2db913f7e23f3e2f..46c73170ae4efd8cc36e12292e8d70c3bf85c229 100644
--- a/vendor/github.com/butuzov/ireturn/analyzer/internal/config/config.go
+++ b/vendor/github.com/butuzov/ireturn/analyzer/internal/config/config.go
@@ -2,6 +2,7 @@ package config
 
 import (
 	"regexp"
+	"sync"
 
 	"github.com/butuzov/ireturn/analyzer/internal/types"
 )
@@ -13,16 +14,13 @@ type defaultConfig struct {
 	List []string
 
 	// private fields (for search optimization look ups)
-	init  bool
+	once  sync.Once
 	quick uint8
 	list  []*regexp.Regexp
 }
 
 func (config *defaultConfig) Has(i types.IFace) bool {
-	if !config.init {
-		config.compileList()
-		config.init = true
-	}
+	config.once.Do(config.compileList)
 
 	if config.quick&uint8(i.Type) > 0 {
 		return true
diff --git a/vendor/github.com/butuzov/ireturn/analyzer/internal/types/iface.go b/vendor/github.com/butuzov/ireturn/analyzer/internal/types/iface.go
index 13f19a3e250f9f5ad74c5594fd5890727bb5c500..5e576374d54c62486089ae1aaefe79989126e7b8 100644
--- a/vendor/github.com/butuzov/ireturn/analyzer/internal/types/iface.go
+++ b/vendor/github.com/butuzov/ireturn/analyzer/internal/types/iface.go
@@ -14,6 +14,7 @@ type IFace struct {
 
 	Pos      token.Pos // Token Position
 	FuncName string    //
+	OfType   string
 }
 
 func NewIssue(name string, interfaceType IType) IFace {
@@ -30,11 +31,15 @@ func (i *IFace) Enrich(f *ast.FuncDecl) {
 }
 
 func (i IFace) String() string {
-	if i.Type == Generic {
-		return fmt.Sprintf("%s returns generic interface (%s)", i.FuncName, i.Name)
+	if i.Type != Generic {
+		return fmt.Sprintf("%s returns interface (%s)", i.FuncName, i.Name)
 	}
 
-	return fmt.Sprintf("%s returns interface (%s)", i.FuncName, i.Name)
+	if i.OfType != "" {
+		return fmt.Sprintf("%s returns generic interface (%s) of type param %s", i.FuncName, i.Name, i.OfType)
+	}
+
+	return fmt.Sprintf("%s returns generic interface (%s)", i.FuncName, i.Name)
 }
 
 func (i IFace) HashString() string {
diff --git a/vendor/github.com/butuzov/ireturn/analyzer/std.go b/vendor/github.com/butuzov/ireturn/analyzer/std.go
index ec361cd442132c6f94f42fac7baccfc9a94d1a2f..cac46461264607f90b6ebfd5568567afb2dab2d9 100644
--- a/vendor/github.com/butuzov/ireturn/analyzer/std.go
+++ b/vendor/github.com/butuzov/ireturn/analyzer/std.go
@@ -191,4 +191,13 @@ var std = map[string]struct{}{
 	// added in Go v1.20 in compare to v1.19 (docker image)
 	"crypto/ecdh":      {},
 	"runtime/coverage": {},
+	// added in Go v1.21 in compare to v1.20 (docker image)
+	"cmp":              {},
+	"log/slog":         {},
+	"maps":             {},
+	"slices":           {},
+	"testing/slogtest": {},
+	// added in Go v1.22 in compare to v1.21 (docker image)
+	"go/version":   {},
+	"math/rand/v2": {},
 }
diff --git a/vendor/github.com/butuzov/ireturn/analyzer/typeparams.go b/vendor/github.com/butuzov/ireturn/analyzer/typeparams.go
deleted file mode 100644
index 14193c355b9c827ac171d3a7c62dc989e6e95466..0000000000000000000000000000000000000000
--- a/vendor/github.com/butuzov/ireturn/analyzer/typeparams.go
+++ /dev/null
@@ -1,38 +0,0 @@
-package analyzer
-
-import (
-	"go/ast"
-)
-
-type typeParams struct {
-	found []string
-}
-
-func newTypeParams(fl *ast.FieldList) typeParams {
-	tp := typeParams{}
-
-	if fl == nil {
-		return tp
-	}
-
-	for _, el := range fl.List {
-		if el == nil {
-			continue
-		}
-
-		for _, name := range el.Names {
-			tp.found = append(tp.found, name.Name)
-		}
-	}
-
-	return tp
-}
-
-func (tp typeParams) In(t string) bool {
-	for _, i := range tp.found {
-		if i == t {
-			return true
-		}
-	}
-	return false
-}
diff --git a/vendor/github.com/butuzov/mirror/.act b/vendor/github.com/butuzov/mirror/.act
deleted file mode 100644
index 8182d703ae0bf921a4cb41e4ee434863bcdcba39..0000000000000000000000000000000000000000
--- a/vendor/github.com/butuzov/mirror/.act
+++ /dev/null
@@ -1,2 +0,0 @@
---platform ubuntu-latest=butuzov/act-go:latest
---env DRY_RUN=1
diff --git a/vendor/github.com/butuzov/mirror/.goreleaser.yaml b/vendor/github.com/butuzov/mirror/.goreleaser.yaml
index 11749ed2b310a9035e72831b57e042ab55178036..fa91fa97e2ddb06c3551d5ff4de96724f5717ac0 100644
--- a/vendor/github.com/butuzov/mirror/.goreleaser.yaml
+++ b/vendor/github.com/butuzov/mirror/.goreleaser.yaml
@@ -5,33 +5,17 @@ builds:
   - binary: mirror
     env:
     - CGO_ENABLED=0
+    main: ./cmd/mirror/
+    flags:
+      - -trimpath
+    ldflags: -s -w
     goos:
-      - darwin
       - linux
+      - darwin
       - windows
     goarch:
       - amd64
-      - 386
       - arm64
-      - arm
-    goarm:
-      - 6
-    ignore:
-      - goos: windows
-        goarm: 6
-      - goos: windows
-        goarch: arm64
-      - goos: linux
-        goarm: 6
-      - goos: darwin
-        goarch: 386
-    main: ./cmd/mirror/
-    flags:
-      - -trimpath
-    ldflags: -s -w
-
-checksum:
-  name_template: 'checksums.txt'
 
 changelog:
   sort: asc
@@ -45,14 +29,15 @@ changelog:
       - Merge pull request
       - Merge branch
 
+
+checksum:
+  name_template: 'checksums.txt'
+
+
 archives:
-  - name_template: '{{ .ProjectName }}_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}{{ if .Mips }}_{{ .Mips }}{{ end }}'
-    replacements:
-      darwin: darwin
-      linux: linux
-      windows: windows
-      386: i386
-      amd64: x86_64
+  - format: tar.gz
+    name_template: >-
+        {{ .ProjectName }}_{{- tolower .Os }}_{{ .Arch }}
     format_overrides:
       - goos: windows
         format: zip
diff --git a/vendor/github.com/butuzov/mirror/MIRROR_FUNCS.md b/vendor/github.com/butuzov/mirror/MIRROR_FUNCS.md
index 776816e5141961c4b3b8627f6f4ab360c80aa067..3dcc01e9605378512a46a4fd2c944cfe4642c69b 100644
--- a/vendor/github.com/butuzov/mirror/MIRROR_FUNCS.md
+++ b/vendor/github.com/butuzov/mirror/MIRROR_FUNCS.md
@@ -1,150 +1,201 @@
-<table><tr>
-<td><code>func (b *bufio.Writer) WriteString(s string) (int, error)</code></td>
-<td>
-  <code>func (b *bufio.Writer) Write(p []byte) (int, error)</code>
-  <code>func (b *bufio.Writer) WriteRune(r rune) (int, error)</code>
-</td>
+<tr>
+<td><code>func (*bufio.Writer) Write([]byte) (int, error)</code></td>
+<td><code>func (*bufio.Writer) WriteString(string) (int, error)</code></td>
+</tr>
+<tr>
+<td><code>func (*bufio.Writer) WriteRune(rune) (int, error)</code></td>
+<td><code>func (*bufio.Writer) WriteString(string) (int, error)</code></td>
+</tr>
+<tr>
+<td><code>func (*bytes.Buffer) Write([]byte) (int, error)</code></td>
+<td><code>func (*bytes.Buffer) WriteString(string) (int, error)</code></td>
+</tr>
+<tr>
+<td><code>func (*bytes.Buffer) WriteRune(rune) (int, error)</code></td>
+<td><code>func (*bytes.Buffer) WriteString(string) (int, error)</code></td>
+</tr>
+<tr>
+<td><code>func bytes.Compare([]byte, []byte) int</code></td>
+<td><code>func strings.Compare(string, string) int</code></td>
+</tr>
+<tr>
+<td><code>func bytes.Contains([]byte, []byte) bool</code></td>
+<td><code>func strings.Contains(string, string) bool</code></td>
+</tr>
+<tr>
+<td><code>func bytes.ContainsAny([]byte, string) bool</code></td>
+<td><code>func strings.ContainsAny(string, string) bool</code></td>
+</tr>
+<tr>
+<td><code>func bytes.ContainsRune([]byte, byte) bool</code></td>
+<td><code>func strings.ContainsRune(string, byte) bool</code></td>
+</tr>
+<tr>
+<td><code>func bytes.Count([]byte, []byte) int</code></td>
+<td><code>func strings.Count(string, string) int</code></td>
+</tr>
+<tr>
+<td><code>func bytes.EqualFold([]byte, []byte) bool</code></td>
+<td><code>func strings.EqualFold(string, string) bool</code></td>
+</tr>
+<tr>
+<td><code>func bytes.HasPrefix([]byte, []byte) bool</code></td>
+<td><code>func strings.HasPrefix(string, string) bool</code></td>
+</tr>
+<tr>
+<td><code>func bytes.HasSuffix([]byte, []byte) bool</code></td>
+<td><code>func strings.HasSuffix(string, string) bool</code></td>
+</tr>
+<tr>
+<td><code>func bytes.Index([]byte, []byte) int</code></td>
+<td><code>func strings.Index(string, string) int</code></td>
+</tr>
+<tr>
+<td><code>func bytes.IndexAny([]byte, string) int</code></td>
+<td><code>func strings.IndexAny(string, string) int</code></td>
+</tr>
+<tr>
+<td><code>func bytes.IndexByte([]byte, byte) int</code></td>
+<td><code>func strings.IndexByte(string, byte) int</code></td>
+</tr>
+<tr>
+<td><code>func bytes.IndexFunc([]byte, func(rune) bool) int</code></td>
+<td><code>func strings.IndexFunc(string, func(rune) bool) int</code></td>
 </tr>
 <tr>
-<td><code>func (b *bytes.Buffer) WriteString(s string) (int, error)</code></td>
-<td>
-  <code>func (b *bytes.Buffer) Write(p []byte) (int, error)</code>
-  <code>func (b *bytes.Buffer) WriteRune(r rune) (int, error)</code>
- </td>
+<td><code>func bytes.IndexRune([]byte, rune) int</code></td>
+<td><code>func strings.IndexRune(string, rune) int</code></td>
 </tr>
 <tr>
-<td><code>func strings.Compare(a, b string) int</code></td>
-<td><code>func bytes.Compare(a, b []byte) int</code></td>
+<td><code>func bytes.LastIndex([]byte, []byte) int</code></td>
+<td><code>func strings.LastIndex(string, string) int</code></td>
 </tr>
 <tr>
-<td><code>func strings.Contains(s, substr string) bool</code></td>
-<td><code>func bytes.Contains(b, subslice []byte) bool</code></td>
+<td><code>func bytes.LastIndexAny([]byte, string) int</code></td>
+<td><code>func strings.LastIndexAny(string, string) int</code></td>
 </tr>
 <tr>
-<td><code>func strings.ContainsAny(s, chars string) bool</code></td>
-<td><code>func bytes.ContainsAny(b []byte, chars string) bool</code></td>
+<td><code>func bytes.LastIndexByte([]byte, byte) int</code></td>
+<td><code>func strings.LastIndexByte(string, byte) int</code></td>
 </tr>
 <tr>
-<td><code>func strings.ContainsRune(s string, r rune) bool</code></td>
-<td><code>func bytes.ContainsRune(b []byte, r rune) bool</code></td>
+<td><code>func bytes.LastIndexFunc([]byte, func(rune) bool) int</code></td>
+<td><code>func strings.LastIndexFunc(string, func(rune) bool) int</code></td>
 </tr>
 <tr>
-<td><code>func strings.Count(s, substr string) int</code></td>
-<td><code>func bytes.Count(s, sep []byte) int</code></td>
+<td><code>func bytes.NewBuffer([]byte) *bytes.Buffer</code></td>
+<td><code>func bytes.NewBufferString(string) *bytes.Buffer</code></td>
 </tr>
 <tr>
-<td><code>func strings.EqualFold(s, t string) bool</code></td>
-<td><code>func bytes.EqualFold(s, t []byte) bool</code></td>
+<td><code>func (*httptest.ResponseRecorder) Write([]byte) (int, error)</code></td>
+<td><code>func (*httptest.ResponseRecorder) WriteString(string) (int, error)</code></td>
 </tr>
 <tr>
-<td><code>func strings.HasPrefix(s, prefix string) bool</code></td>
-<td><code>func bytes.HasPrefix(s, prefix []byte) bool</code></td>
+<td><code>func (*maphash.Hash) Write([]byte) (int, error)</code></td>
+<td><code>func (*maphash.Hash) WriteString(string) (int, error)</code></td>
 </tr>
 <tr>
-<td><code>func strings.HasSuffix(s, suffix string) bool</code></td>
-<td><code>func bytes.HasSuffix(s, suffix []byte) bool</code></td>
+<td><code>func (*os.File) Write([]byte) (int, error)</code></td>
+<td><code>func (*os.File) WriteString(string) (int, error)</code></td>
 </tr>
 <tr>
-<td><code>func strings.Index(s, substr string) int</code></td>
-<td><code>func bytes.Index(s, sep []byte) int</code></td>
+<td><code>func regexp.Match(string, []byte) (bool, error)</code></td>
+<td><code>func regexp.MatchString(string, string) (bool, error)</code></td>
 </tr>
 <tr>
-<td><code>func strings.IndexAny(s, chars string) int</code></td>
-<td><code>func bytes.IndexAny(s []byte, chars string) int</code></td>
+<td><code>func (*regexp.Regexp) FindAllIndex([]byte, int) [][]int</code></td>
+<td><code>func (*regexp.Regexp) FindAllStringIndex(string, int) [][]int</code></td>
 </tr>
 <tr>
-<td><code>func strings.IndexByte(s string, c byte) int</code></td>
-<td><code>func bytes.IndexByte(b []byte, c byte) int</code></td>
+<td><code>func (*regexp.Regexp) FindAllSubmatchIndex([]byte, int) [][]int</code></td>
+<td><code>func (*regexp.Regexp) FindAllStringSubmatchIndex(string, int) [][]int</code></td>
 </tr>
 <tr>
-<td><code>func strings.IndexFunc(s string, f func(rune) bool) int</code></td>
-<td><code>func bytes.IndexFunc(s []byte, f func(r rune) bool) int</code></td>
+<td><code>func (*regexp.Regexp) FindIndex([]byte) []int</code></td>
+<td><code>func (*regexp.Regexp) FindStringIndex(string) []int</code></td>
 </tr>
 <tr>
-<td><code>func strings.IndexRune(s string, r rune) int</code></td>
-<td><code>func bytes.IndexRune(s []byte, r rune) int</code></td>
+<td><code>func (*regexp.Regexp) FindSubmatchIndex([]byte) []int</code></td>
+<td><code>func (*regexp.Regexp) FindStringSubmatchIndex(string) []int</code></td>
 </tr>
 <tr>
-<td><code>func strings.LastIndex(s, sep string) int</code></td>
-<td><code>func bytes.LastIndex(s, sep []byte) int</code></td>
+<td><code>func (*regexp.Regexp) Match([]byte) bool</code></td>
+<td><code>func (*regexp.Regexp) MatchString(string) bool</code></td>
 </tr>
 <tr>
-<td><code>func strings.LastIndexAny(s, chars string) int</code></td>
-<td><code>func bytes.LastIndexAny(s []byte, chars string) int</code></td>
+<td><code>func (*strings.Builder) Write([]byte) (int, error)</code></td>
+<td><code>func (*strings.Builder) WriteString(string) (int, error)</code></td>
 </tr>
 <tr>
-<td><code>func strings.LastIndexByte(s string, c byte) int</code></td>
-<td><code>func bytes.LastIndexByte(s []byte, c byte) int</code></td>
+<td><code>func (*strings.Builder) WriteRune(rune) (int, error)</code></td>
+<td><code>func (*strings.Builder) WriteString(string) (int, error)</code></td>
 </tr>
 <tr>
-<td><code>func strings.LastIndexFunc(s string, f func(rune) bool) int</code></td>
-<td><code>func bytes.LastIndexFunc(s []byte, f func(r rune) bool) int</code></td>
+<td><code>func strings.Compare(string) int</code></td>
+<td><code>func bytes.Compare([]byte) int</code></td>
 </tr>
 <tr>
-<td><code>func bytes.NewBufferString(s string) *bytes.Buffer</code></td>
-<td><code>func bytes.NewBuffer(buf []byte *bytes.Buffer</code></td>
+<td><code>func strings.Contains(string) bool</code></td>
+<td><code>func bytes.Contains([]byte) bool</code></td>
 </tr>
 <tr>
-<td><code>func (h *hash/maphash.Hash) WriteString(s string) (int, error)</code></td>
-<td><code>func (h *hash/maphash.Hash) Write(b []byte) (int, error)</code></td>
+<td><code>func strings.ContainsAny(string) bool</code></td>
+<td><code>func bytes.ContainsAny([]byte) bool</code></td>
 </tr>
 <tr>
-<td><code>func (rw *net/http/httptest.ResponseRecorder) WriteString(str string) (int, error)</code></td>
-<td><code>func (rw *net/http/httptest.ResponseRecorder) Write(buf []byte) (int, error)</code></td>
+<td><code>func strings.ContainsRune(string) bool</code></td>
+<td><code>func bytes.ContainsRune([]byte) bool</code></td>
 </tr>
 <tr>
-<td><code>func (f *os.File) WriteString(s string) (n int, err error)</code></td>
-<td><code>func (f *os.File) Write(b []byte) (n int, err error)</code></td>
+<td><code>func strings.EqualFold(string) bool</code></td>
+<td><code>func bytes.EqualFold([]byte) bool</code></td>
 </tr>
 <tr>
-<td><code>func regexp.MatchString(pattern string, s string) (bool, error)</code></td>
-<td><code>func regexp.Match(pattern string, b []byte) (bool, error)</code></td>
+<td><code>func strings.HasPrefix(string) bool</code></td>
+<td><code>func bytes.HasPrefix([]byte) bool</code></td>
 </tr>
 <tr>
-<td><code>func (re *regexp.Regexp) FindAllStringIndex(s string, n int) [][]int</code></td>
-<td><code>func (re *regexp.Regexp) FindAllIndex(b []byte, n int) [][]int</code></td>
+<td><code>func strings.HasSuffix(string) bool</code></td>
+<td><code>func bytes.HasSuffix([]byte) bool</code></td>
 </tr>
 <tr>
-<td><code>func (re *regexp.Regexp) FindAllStringSubmatch(s string, n int) [][]string</code></td>
-<td><code>func (re *regexp.Regexp) FindAllSubmatch(b []byte, n int) [][][]byte</code></td>
+<td><code>func strings.Index(string) int</code></td>
+<td><code>func bytes.Index([]byte) int</code></td>
 </tr>
 <tr>
-<td><code>func (re *regexp.Regexp) FindStringIndex(s string) (loc []int)</code></td>
-<td><code>func (re *regexp.Regexp) FindIndex(b []byte) (loc []int)</code></td>
+<td><code>func strings.IndexFunc(string, func(r rune) bool) int</code></td>
+<td><code>func bytes.IndexFunc([]byte, func(r rune) bool) int</code></td>
 </tr>
 <tr>
-<td><code>func (re *regexp.Regexp) FindStringSubmatchIndex(s string) []int</code></td>
-<td><code>func (re *regexp.Regexp) FindSubmatchIndex(b []byte) []int</code></td>
+<td><code>func strings.LastIndex(string) int</code></td>
+<td><code>func bytes.LastIndex([]byte) int</code></td>
 </tr>
 <tr>
-<td><code>func (re *regexp.Regexp) MatchString(s string) bool</code></td>
-<td><code>func (re *regexp.Regexp) Match(b []byte) bool</code></td>
+<td><code>func strings.LastIndexAny(string) int</code></td>
+<td><code>func bytes.LastIndexAny([]byte) int</code></td>
 </tr>
 <tr>
-<td><code>func (b *strings.Builder) WriteString(s string) error</code></td>
-<td>
-  <code>func (b *strings.Builder) Write(p []byte) (int, error)</code>
-  <code>func (b *strings.Builder) WriteRune(r rune) (int, error)</code>
- </td>
+<td><code>func strings.LastIndexFunc(string, func(r rune) bool) int</code></td>
+<td><code>func bytes.LastIndexFunc([]byte, func(r rune) bool) int</code></td>
 </tr>
 <tr>
-<td><code>func utf8.ValidString(s string) bool</code></td>
-<td><code>func utf8.Valid(p []byte) bool</code></td>
+<td><code>func utf8.DecodeLastRune([]byte) (rune, int)</code></td>
+<td><code>func utf8.DecodeLastRuneInString(string) (rune, int)</code></td>
 </tr>
 <tr>
-<td><code>func utf8.FullRuneInString(s string) bool</code></td>
-<td><code>func utf8.FullRune(p []byte) bool</code></td>
+<td><code>func utf8.DecodeRune([]byte) (rune, int)</code></td>
+<td><code>func utf8.DecodeRuneInString(string) (rune, int)</code></td>
 </tr>
 <tr>
-<td><code>func utf8.RuneCountInString(s string) (n int)</code></td>
-<td><code>func utf8.RuneCount(p []byte) int</code></td>
+<td><code>func utf8.FullRune([]byte) bool</code></td>
+<td><code>func utf8.FullRuneInString(string) bool</code></td>
 </tr>
 <tr>
-<td><code>func utf8.DecodeLastRuneInString(s string) (rune, int)</code></td>
-<td><code>func utf8.DecodeLastRune(p []byte) (rune, int)</code></td>
+<td><code>func utf8.RuneCount([]byte) int</code></td>
+<td><code>func utf8.RuneCountInString(string) int</code></td>
 </tr>
 <tr>
-<td><code>func utf8.DecodeRuneInString(s string) (une, int)</code></td>
-<td><code>func utf8.DecodeRune(p []byte) (rune, int)</code></td>
+<td><code>func utf8.Valid([]byte) bool</code></td>
+<td><code>func utf8.ValidString(string) bool</code></td>
 </tr>
-</table>
+
diff --git a/vendor/github.com/butuzov/mirror/Makefile b/vendor/github.com/butuzov/mirror/Makefile
index b4b952b012a7754b619f37a779b794de6ef37e91..ac267208fb991de87cd77db12cd476d432bc24af 100644
--- a/vendor/github.com/butuzov/mirror/Makefile
+++ b/vendor/github.com/butuzov/mirror/Makefile
@@ -2,57 +2,130 @@
 export PATH   := $(PWD)/bin:$(PATH)                    # ./bin to $PATH
 export SHELL  := bash                                  # Default Shell
 
-GOPKGS := $(shell go list ./... | grep -vE "(cmd|sandbox|testdata)" | tr -s '\n' ',' | sed 's/.\{1\}$$//' )
+define install_go_bin
+	@ which $(1) 2>&1 1>/dev/null || GOBIN=$(PWD)/bin go install $(2)
+endef
 
+.DEFAULT_GOAL := help
 
-build:
-	@ go build -trimpath -ldflags="-w -s" \
-		-o bin/mirror ./cmd/mirror/
+# Generate Artifacts ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+generate: ## Generate Assets
+	$(MAKE)
 
-build-race:
-	@ go build -race -trimpath -ldflags="-w -s" \
-		-o bin/mirror ./cmd/mirror/
+generate-tests: ## Generates Assets at testdata
+	go run ./cmd/internal/tests/ "$(PWD)/testdata"
 
-tests:
-	go test -v -count=1 -race \
+generate-mirror-table: ## Generate Asset MIRROR_FUNCS.md
+	go run ./cmd/internal/mirror-table/ > "$(PWD)/MIRROR_FUNCS.md"
+
+
+# Build Artifacts ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+build: ## Build binary
+	@ go build -trimpath -ldflags="-w -s" -o bin/mirror ./cmd/mirror/
+
+build-race: ## Build binary with race flag
+	@ go build -race -trimpath -ldflags="-w -s" -o bin/mirror ./cmd/mirror/
+
+install: ## Installs binary
+	@ go install -trimpath -v -ldflags="-w -s" ./cmd/mirror
+
+# Run Tests ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+tests: ## Run Tests (Summary)
+	@ go test -v -count=1 -race \
 		-failfast \
 		-parallel=2 \
 		-timeout=1m \
 		-covermode=atomic \
-		-coverpkg=$(GOPKGS) -coverprofile=coverage.cov ./...
+	    -coverprofile=coverage.cov ./...
 
-tests-summary:
-	go test -v -count=1 -race \
+tests-summary: ## Run Tests, but shows summary
+tests-summary: bin/tparse
+	@ go test -v -count=1 -race \
 		-failfast \
 		-parallel=2 \
 		-timeout=1m \
 		-covermode=atomic \
-		-coverpkg=$(GOPKGS) -coverprofile=coverage.cov --json ./... | tparse -all
+		-coverprofile=coverage.cov --json ./... | tparse -all
 
-test-generate:
-	go run ./cmd/internal/generate-tests/ "$(PWD)/testdata"
+# Linter ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
+lints: ## Run golangci-lint
+lints: bin/golangci-lint
 lints:
-	golangci-lint run --no-config ./... -D deadcode --skip-dirs "^(cmd|sandbox|testdata)"
+	golangci-lint run --no-config ./... --skip-dirs "^(cmd|testdata)"
 
 
-cover:
-	go tool cover -html=coverage.cov
+cover: ## Run Coverage
+	@ go tool cover -html=coverage.cov
+
+# Other ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+test-release: bin/goreleaser
+	goreleaser release --help
+	goreleaser release --skip=publish --skip=validate --clean
+
+# Install  ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-install:
-	go install -trimpath -v -ldflags="-w -s" \
-		./cmd/mirror
+bin/tparse: ## Installs tparse@v0.13.2 (if not exists)
+bin/tparse: INSTALL_URL=github.com/mfridman/tparse@v0.13.2
+bin/tparse:
+	$(call install_go_bin, tparse, $(INSTALL_URL))
 
-funcs:
-	echo "" > "out/results.txt"
-	go list std | grep -v "vendor" | grep -v "internal" | \
-		xargs -I {} sh -c 'go doc -all {} > out/$(basename {}).txt'
+bin/golangci-lint: ## Installs golangci-lint@v1.55.2 (if not exists)
+bin/golangci-lint: INSTALL_URL=github.com/golangci/golangci-lint@v1.55.2
+bin/golangci-lint:
+	$(call install_go_bin, golangci-lint, $(INSTALL_URL))
 
+bin/goreleaser: ## Installs goreleaser@v1.24.0 (if not exists)
+bin/goreleaser: INSTALL_URL=github.com/goreleaser/goreleaser@v1.24.0
 bin/goreleaser:
-	@curl -Ls https://github.com/goreleaser/goreleaser/releases/download/v1.17.2/goreleaser_Darwin_all.tar.gz | tar -zOxf - goreleaser > ./bin/goreleaser
-	chmod 0755 ./bin/goreleaser
+	$(call install_go_bin, goreleaser, $(INSTALL_URL))
 
-test-release: bin/goreleaser
-	goreleaser release --help
-	goreleaser release -f .goreleaser.yaml \
-		--skip-validate --skip-publish --clean
+# Help ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+help: dep-gawk
+	@ echo "=============================================================================="
+	@ echo " Makefile: github.com/butuzov/mirror                                   "
+	@ echo "=============================================================================="
+	@ cat $(MAKEFILE_LIST) | \
+		grep -E '^# ~~~ .*? [~]+$$|^[a-zA-Z0-9_-]+:.*?## .*$$' | \
+		gawk '{if ( $$1=="#" ) { \
+			match($$0, /^# ~~~ (.+?) [~]+$$/, a);\
+			{print "\n", a[1], ""}\
+		} else { \
+			match($$0, /^([a-zA-Z/_-]+):.*?## (.*)$$/, a); \
+			{printf "  - \033[32m%-20s\033[0m %s\n",   a[1], a[2]} \
+ 		}}'
+	@ echo ""
+
+
+# Helper Mehtods ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+dep-gawk:
+	@ if [ -z "$(shell command -v gawk)" ]; then  \
+		if [ -x /usr/local/bin/brew ]; then $(MAKE) _brew_gawk_install; exit 0; fi; \
+		if [ -x /usr/bin/apt-get ]; then $(MAKE) _ubuntu_gawk_install; exit 0; fi; \
+		if [ -x /usr/bin/yum ]; then  $(MAKE) _centos_gawk_install; exit 0; fi; \
+		if [ -x /sbin/apk ]; then  $(MAKE) _alpine_gawk_install; exit 0; fi; \
+		echo  "GNU Awk Required.";\
+		exit 1; \
+	fi
+
+_brew_gawk_install:
+	@ echo "Instaling gawk using brew... "
+	@ brew install gawk --quiet
+	@ echo "done"
+
+_ubuntu_gawk_install:
+	@ echo "Instaling gawk using apt-get... "
+	@ apt-get -q install gawk -y
+	@ echo "done"
+
+_alpine_gawk_install:
+	@ echo "Instaling gawk using yum... "
+	@ apk add --update --no-cache gawk
+	@ echo "done"
+
+_centos_gawk_install:
+	@ echo "Instaling gawk using yum... "
+	@ yum install -q -y gawk;
+	@ echo "done"
diff --git a/vendor/github.com/butuzov/mirror/Taskfile.yml b/vendor/github.com/butuzov/mirror/Taskfile.yml
index 26c9ba257153cedbb64487d657f45595e2319d44..4bc7cfeda524bd2199d7fc084d4798bbb7d2036d 100644
--- a/vendor/github.com/butuzov/mirror/Taskfile.yml
+++ b/vendor/github.com/butuzov/mirror/Taskfile.yml
@@ -1,28 +1,73 @@
 version: '3'
 
 tasks:
-  default:
-    sources:
-    - "./**/*.go"
-    method: timestamp
-    cmds:
-    - clear
-    - make build
-    - make build-race
-    - task: lints
-    # - make test-generate
-    - task: tests
-    - cmd: go run ./cmd/mirror/ --with-tests --with-debug ./sandbox
-      ignore_error: true
-
-  testcase: go test -v -failfast -count=1 -run "TestAll/{{ .Case }}" ./...
-
-  tests:
-    cmds:
-    - cmd: make tests
-      ignore_error: true
-
-  lints:
-    cmds:
-    - cmd: make lints
-      ignore_error: true
+    default: task --list-all
+
+    # Continues Development ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+    watcher:
+        desc: watcher
+        sources:
+        - ./**/*.go
+        method: timestamp
+        cmds:
+        - task: lints
+        - task: test-summary
+        - task: build-race
+
+    # Generating assets ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+    generate:
+        desc: Generate Assets
+        sources:
+        - ./checkers_*.go
+        - ./cmd/internal/**/*.go
+        method: timestamp
+        cmds:
+        - task generate-mirror-table
+        - task generate-tests
+
+    generate-mirror-table:
+        desc: Generates Assets at testdata
+        cmd: make generate-mirror-table
+
+    generate-tests:
+        desc: Generate Asset MIRROR_FUNCS.md
+        cmd: make generate-tests
+
+    # Run Tests ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+    tests:
+        desc: Run Tests
+        cmd: make tests
+        ignore_error: true
+
+    test-summary:
+        desc: Run Tests (Summary)
+        cmd: make tests-summary
+        ignore_error: true
+
+    testcase: go test -v -failfast -count=1 -run "TestAll/{{ .Case }}" ./...
+
+    # Build Artifacts ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+    build:
+        desc: Build binary
+        cmd: make build
+
+    build-race:
+        desc: Build binary with race flag
+        cmd: make build-race
+
+    install:
+        desc: Install binary
+        cmd: make install
+
+    # Linter ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+    lints:
+        cmd: make lints
+
+    # Other
+    cover:
+        desc: Run Coverage
+        cmd: make cover
+
+    test-release:
+        desc: Testing Release
+        cmd: make test-release
diff --git a/vendor/github.com/butuzov/mirror/checkers_bufio.go b/vendor/github.com/butuzov/mirror/checkers_bufio.go
index 292ed269aaa136963c7467b1d2c5b3de387dad28..0985edad324887ffd7e59a13565e746c4b67bec0 100644
--- a/vendor/github.com/butuzov/mirror/checkers_bufio.go
+++ b/vendor/github.com/butuzov/mirror/checkers_bufio.go
@@ -15,7 +15,7 @@ var BufioMethods = []checker.Violation{
 		Generate: &checker.Generate{
 			PreCondition: `b := bufio.Writer{}`,
 			Pattern:      `Write($0)`,
-			Returns:      2,
+			Returns:      []string{"int", "error"},
 		},
 	},
 	{ // (*bufio.Writer).WriteString
@@ -30,7 +30,7 @@ var BufioMethods = []checker.Violation{
 		Generate: &checker.Generate{
 			PreCondition: `b := bufio.Writer{}`,
 			Pattern:      `WriteString($0)`,
-			Returns:      2,
+			Returns:      []string{"int", "error"},
 		},
 	},
 	{ // (*bufio.Writer).WriteString -> (*bufio.Writer).WriteRune
@@ -42,15 +42,11 @@ var BufioMethods = []checker.Violation{
 		Args:      []int{0},
 		ArgsType:  checker.Rune,
 		AltCaller: "WriteRune",
+
+		Generate: &checker.Generate{
+			SkipGenerate: true,
+			Pattern:      `WriteString($0)`,
+			Returns:      []string{"int", "error"},
+		},
 	},
-	// { // (*bufio.Writer).WriteString -> (*bufio.Writer).WriteByte
-	// 	Targets:   checker.Strings,
-	// 	Type:      checker.Method,
-	// 	Package:   "strings",
-	// 	Struct:    "Builder",
-	// 	Caller:    "WriteString",
-	// 	Args:      []int{0},
-	// 	ArgsType:  checker.Byte,
-	// 	AltCaller: "WriteByte", // byte
-	// },
 }
diff --git a/vendor/github.com/butuzov/mirror/checkers_bytes.go b/vendor/github.com/butuzov/mirror/checkers_bytes.go
index c490a3784ed81841e75a51a4d06010804e485057..b8819879ccce282de8e0db53eee83abafe197f9e 100644
--- a/vendor/github.com/butuzov/mirror/checkers_bytes.go
+++ b/vendor/github.com/butuzov/mirror/checkers_bytes.go
@@ -14,7 +14,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `NewBuffer($0)`,
-				Returns: 1,
+				Returns: []string{"*bytes.Buffer"},
 			},
 		},
 		{ // bytes.NewBufferString
@@ -27,7 +27,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `NewBufferString($0)`,
-				Returns: 1,
+				Returns: []string{"*bytes.Buffer"},
 			},
 		},
 		{ // bytes.Compare:
@@ -41,7 +41,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `Compare($0, $1)`,
-				Returns: 1,
+				Returns: []string{"int"},
 			},
 		},
 		{ // bytes.Contains:
@@ -55,7 +55,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `Contains($0, $1)`,
-				Returns: 1,
+				Returns: []string{"bool"},
 			},
 		},
 		{ // bytes.ContainsAny
@@ -69,7 +69,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `ContainsAny($0, "f")`,
-				Returns: 1,
+				Returns: []string{"bool"},
 			},
 		},
 		{ // bytes.ContainsRune
@@ -83,7 +83,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `ContainsRune($0, 'Ñ„')`,
-				Returns: 1,
+				Returns: []string{"bool"},
 			},
 		},
 		{ // bytes.Count
@@ -97,7 +97,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `Count($0, $1)`,
-				Returns: 1,
+				Returns: []string{"int"},
 			},
 		},
 		{ // bytes.EqualFold
@@ -111,7 +111,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `EqualFold($0, $1)`,
-				Returns: 1,
+				Returns: []string{"bool"},
 			},
 		},
 
@@ -126,7 +126,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `HasPrefix($0, $1)`,
-				Returns: 1,
+				Returns: []string{"bool"},
 			},
 		},
 		{ // bytes.HasSuffix
@@ -140,7 +140,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `HasSuffix($0, $1)`,
-				Returns: 1,
+				Returns: []string{"bool"},
 			},
 		},
 		{ // bytes.Index
@@ -154,7 +154,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `Index($0, $1)`,
-				Returns: 1,
+				Returns: []string{"int"},
 			},
 		},
 		{ // bytes.IndexAny
@@ -168,7 +168,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `IndexAny($0, "f")`,
-				Returns: 1,
+				Returns: []string{"int"},
 			},
 		},
 		{ // bytes.IndexByte
@@ -182,7 +182,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `IndexByte($0, 'f')`,
-				Returns: 1,
+				Returns: []string{"int"},
 			},
 		},
 		{ // bytes.IndexFunc
@@ -196,7 +196,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `IndexFunc($0, func(rune) bool {return true })`,
-				Returns: 1,
+				Returns: []string{"int"},
 			},
 		},
 		{ // bytes.IndexRune
@@ -210,7 +210,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `IndexRune($0, rune('Ñ„'))`,
-				Returns: 1,
+				Returns: []string{"int"},
 			},
 		},
 		{ // bytes.LastIndex
@@ -224,7 +224,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `LastIndex($0, $1)`,
-				Returns: 1,
+				Returns: []string{"int"},
 			},
 		},
 		{ // bytes.LastIndexAny
@@ -238,7 +238,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `LastIndexAny($0, "Ñ„")`,
-				Returns: 1,
+				Returns: []string{"int"},
 			},
 		},
 		{ // bytes.LastIndexByte
@@ -252,7 +252,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `LastIndexByte($0, 'f')`,
-				Returns: 1,
+				Returns: []string{"int"},
 			},
 		},
 		{ // bytes.LastIndexFunc
@@ -266,7 +266,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `LastIndexFunc($0, func(rune) bool {return true })`,
-				Returns: 1,
+				Returns: []string{"int"},
 			},
 		},
 	}
@@ -284,7 +284,7 @@ var (
 			Generate: &checker.Generate{
 				PreCondition: `bb := bytes.Buffer{}`,
 				Pattern:      `Write($0)`,
-				Returns:      2,
+				Returns:      []string{"int", "error"},
 			},
 		},
 		{ // (*bytes.Buffer).WriteString
@@ -299,7 +299,7 @@ var (
 			Generate: &checker.Generate{
 				PreCondition: `bb := bytes.Buffer{}`,
 				Pattern:      `WriteString($0)`,
-				Returns:      2,
+				Returns:      []string{"int", "error"},
 			},
 		},
 		{ // (*bytes.Buffer).WriteString -> (*bytes.Buffer).WriteRune
@@ -311,6 +311,11 @@ var (
 			Args:      []int{0},
 			ArgsType:  checker.Rune,
 			AltCaller: "WriteRune",
+			Generate: &checker.Generate{
+				SkipGenerate: true,
+				Pattern:      `WriteString($0)`,
+				Returns:      []string{"int", "error"},
+			},
 		},
 		// { // (*bytes.Buffer).WriteString -> (*bytes.Buffer).WriteByte
 		// 	Targets:   checker.Strings,
diff --git a/vendor/github.com/butuzov/mirror/checkers_httptest.go b/vendor/github.com/butuzov/mirror/checkers_httptest.go
index ae675093007084e3cd8e3208363a56da06900c31..c28bb1ade4317ad6793b2345cc4d809890a92b4b 100644
--- a/vendor/github.com/butuzov/mirror/checkers_httptest.go
+++ b/vendor/github.com/butuzov/mirror/checkers_httptest.go
@@ -15,7 +15,7 @@ var HTTPTestMethods = []checker.Violation{
 		Generate: &checker.Generate{
 			PreCondition: `h := httptest.ResponseRecorder{}`,
 			Pattern:      `Write($0)`,
-			Returns:      2,
+			Returns:      []string{"int", "error"},
 		},
 	},
 	{ // (*net/http/httptest.ResponseRecorder).WriteString
@@ -30,7 +30,7 @@ var HTTPTestMethods = []checker.Violation{
 		Generate: &checker.Generate{
 			PreCondition: `h := httptest.ResponseRecorder{}`,
 			Pattern:      `WriteString($0)`,
-			Returns:      2,
+			Returns:      []string{"int", "error"},
 		},
 	},
 }
diff --git a/vendor/github.com/butuzov/mirror/checkers_maphash.go b/vendor/github.com/butuzov/mirror/checkers_maphash.go
index 4d184d2a956a444f9b70e3c68683296b09b3b17b..0aa43ff7bbee7f303fce061b09366e639874216d 100644
--- a/vendor/github.com/butuzov/mirror/checkers_maphash.go
+++ b/vendor/github.com/butuzov/mirror/checkers_maphash.go
@@ -15,7 +15,7 @@ var MaphashMethods = []checker.Violation{
 		Generate: &checker.Generate{
 			PreCondition: `h := maphash.Hash{}`,
 			Pattern:      `Write($0)`,
-			Returns:      2,
+			Returns:      []string{"int", "error"},
 		},
 	},
 	{ // (*hash/maphash).WriteString
@@ -30,7 +30,7 @@ var MaphashMethods = []checker.Violation{
 		Generate: &checker.Generate{
 			PreCondition: `h := maphash.Hash{}`,
 			Pattern:      `WriteString($0)`,
-			Returns:      2,
+			Returns:      []string{"int", "error"},
 		},
 	},
 }
diff --git a/vendor/github.com/butuzov/mirror/checkers_os.go b/vendor/github.com/butuzov/mirror/checkers_os.go
index 09f5a18e58c50161185fd95a0e05096c0489f25e..40973576b6de17c925f0a9cc2fd539c8c4e91583 100644
--- a/vendor/github.com/butuzov/mirror/checkers_os.go
+++ b/vendor/github.com/butuzov/mirror/checkers_os.go
@@ -15,7 +15,7 @@ var OsFileMethods = []checker.Violation{
 		Generate: &checker.Generate{
 			PreCondition: `f := &os.File{}`,
 			Pattern:      `Write($0)`,
-			Returns:      2,
+			Returns:      []string{"int", "error"},
 		},
 	},
 	{ // (*os.File).WriteString
@@ -30,7 +30,7 @@ var OsFileMethods = []checker.Violation{
 		Generate: &checker.Generate{
 			PreCondition: `f := &os.File{}`,
 			Pattern:      `WriteString($0)`,
-			Returns:      2,
+			Returns:      []string{"int", "error"},
 		},
 	},
 }
diff --git a/vendor/github.com/butuzov/mirror/checkers_regexp.go b/vendor/github.com/butuzov/mirror/checkers_regexp.go
index 17175e0286f59bbe903406b8a63905b93fdb8666..2cd4dc9f80e97cc1722e40b3a43243230e96eaa1 100644
--- a/vendor/github.com/butuzov/mirror/checkers_regexp.go
+++ b/vendor/github.com/butuzov/mirror/checkers_regexp.go
@@ -14,7 +14,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `Match("foo", $0)`,
-				Returns: 2,
+				Returns: []string{"bool", "error"},
 			},
 		},
 		{ // regexp.MatchString
@@ -27,7 +27,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `MatchString("foo", $0)`,
-				Returns: 2,
+				Returns: []string{"bool", "error"},
 			},
 		},
 	}
@@ -45,7 +45,7 @@ var (
 			Generate: &checker.Generate{
 				PreCondition: `re := regexp.MustCompile(".*")`,
 				Pattern:      `Match($0)`,
-				Returns:      1,
+				Returns:      []string{"bool"},
 			},
 		},
 		{ // (*regexp.Regexp).MatchString
@@ -60,7 +60,7 @@ var (
 			Generate: &checker.Generate{
 				PreCondition: `re := regexp.MustCompile(".*")`,
 				Pattern:      `MatchString($0)`,
-				Returns:      1,
+				Returns:      []string{"bool"},
 			},
 		},
 		{ // (*regexp.Regexp).FindAllIndex
@@ -75,7 +75,7 @@ var (
 			Generate: &checker.Generate{
 				PreCondition: `re := regexp.MustCompile(".*")`,
 				Pattern:      `FindAllIndex($0, 1)`,
-				Returns:      1,
+				Returns:      []string{"[][]int"},
 			},
 		},
 		{ // (*regexp.Regexp).FindAllStringIndex
@@ -90,7 +90,7 @@ var (
 			Generate: &checker.Generate{
 				PreCondition: `re := regexp.MustCompile(".*")`,
 				Pattern:      `FindAllStringIndex($0, 1)`,
-				Returns:      1,
+				Returns:      []string{"[][]int"},
 			},
 		},
 		{ // (*regexp.Regexp).FindAllSubmatchIndex
@@ -105,7 +105,7 @@ var (
 			Generate: &checker.Generate{
 				PreCondition: `re := regexp.MustCompile(".*")`,
 				Pattern:      `FindAllSubmatchIndex($0, 1)`,
-				Returns:      1,
+				Returns:      []string{"[][]int"},
 			},
 		},
 		{ // (*regexp.Regexp).FindAllStringSubmatchIndex
@@ -120,7 +120,7 @@ var (
 			Generate: &checker.Generate{
 				PreCondition: `re := regexp.MustCompile(".*")`,
 				Pattern:      `FindAllStringSubmatchIndex($0, 1)`,
-				Returns:      1,
+				Returns:      []string{"[][]int"},
 			},
 		},
 		{ // (*regexp.Regexp).FindIndex
@@ -135,7 +135,7 @@ var (
 			Generate: &checker.Generate{
 				PreCondition: `re := regexp.MustCompile(".*")`,
 				Pattern:      `FindIndex($0)`,
-				Returns:      1,
+				Returns:      []string{"[]int"},
 			},
 		},
 		{ // (*regexp.Regexp).FindStringIndex
@@ -150,7 +150,7 @@ var (
 			Generate: &checker.Generate{
 				PreCondition: `re := regexp.MustCompile(".*")`,
 				Pattern:      `FindStringIndex($0)`,
-				Returns:      1,
+				Returns:      []string{"[]int"},
 			},
 		},
 		{ // (*regexp.Regexp).FindSubmatchIndex
@@ -165,7 +165,7 @@ var (
 			Generate: &checker.Generate{
 				PreCondition: `re := regexp.MustCompile(".*")`,
 				Pattern:      `FindSubmatchIndex($0)`,
-				Returns:      1,
+				Returns:      []string{"[]int"},
 			},
 		},
 		{ // (*regexp.Regexp).FindStringSubmatchIndex
@@ -180,7 +180,7 @@ var (
 			Generate: &checker.Generate{
 				PreCondition: `re := regexp.MustCompile(".*")`,
 				Pattern:      `FindStringSubmatchIndex($0)`,
-				Returns:      1,
+				Returns:      []string{"[]int"},
 			},
 		},
 	}
diff --git a/vendor/github.com/butuzov/mirror/checkers_strings.go b/vendor/github.com/butuzov/mirror/checkers_strings.go
index ead7e9cc7ea170e593723deda6d85d564f66e2ed..3bd59a62fad865800f3b5555e3238d661b218ed5 100644
--- a/vendor/github.com/butuzov/mirror/checkers_strings.go
+++ b/vendor/github.com/butuzov/mirror/checkers_strings.go
@@ -15,7 +15,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `Compare($0,$1)`,
-				Returns: 1,
+				Returns: []string{"int"},
 			},
 		},
 		{ // strings.Contains
@@ -29,7 +29,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `Contains($0,$1)`,
-				Returns: 1,
+				Returns: []string{"bool"},
 			},
 		},
 		{ // strings.ContainsAny
@@ -43,7 +43,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `ContainsAny($0,"foobar")`,
-				Returns: 1,
+				Returns: []string{"bool"},
 			},
 		},
 		{ // strings.ContainsRune
@@ -57,7 +57,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `ContainsRune($0,'Ñ„')`,
-				Returns: 1,
+				Returns: []string{"bool"},
 			},
 		},
 		{ // 	strings.Count
@@ -71,7 +71,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `Count($0, $1)`,
-				Returns: 1,
+				Returns: []string{"int"},
 			},
 		},
 		{ // strings.EqualFold
@@ -85,7 +85,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `EqualFold($0,$1)`,
-				Returns: 1,
+				Returns: []string{"bool"},
 			},
 		},
 		{ // strings.HasPrefix
@@ -99,7 +99,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `HasPrefix($0,$1)`,
-				Returns: 1,
+				Returns: []string{"bool"},
 			},
 		},
 		{ // strings.HasSuffix
@@ -113,7 +113,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `HasSuffix($0,$1)`,
-				Returns: 1,
+				Returns: []string{"bool"},
 			},
 		},
 		{ // strings.Index
@@ -127,7 +127,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `Index($0,$1)`,
-				Returns: 1,
+				Returns: []string{"int"},
 			},
 		},
 		{ // strings.IndexAny
@@ -141,7 +141,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `IndexAny($0, "f")`,
-				Returns: 1,
+				Returns: []string{"int"},
 			},
 		},
 		{ // strings.IndexByte
@@ -154,8 +154,8 @@ var (
 			AltCaller:  "IndexByte",
 
 			Generate: &checker.Generate{
-				Pattern: `IndexByte($0, byte('f'))`,
-				Returns: 1,
+				Pattern: `IndexByte($0, 'f')`,
+				Returns: []string{"int"},
 			},
 		},
 		{ // strings.IndexFunc
@@ -168,8 +168,8 @@ var (
 			AltCaller:  "IndexFunc",
 
 			Generate: &checker.Generate{
-				Pattern: `IndexFunc($0,func(r rune) bool { return true })`,
-				Returns: 1,
+				Pattern: `IndexFunc($0, func(r rune) bool { return true })`,
+				Returns: []string{"int"},
 			},
 		},
 		{ // strings.IndexRune
@@ -183,7 +183,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `IndexRune($0, rune('Ñ„'))`,
-				Returns: 1,
+				Returns: []string{"int"},
 			},
 		},
 		{ // strings.LastIndex
@@ -197,7 +197,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `LastIndex($0,$1)`,
-				Returns: 1,
+				Returns: []string{"int"},
 			},
 		},
 		{ // strings.LastIndexAny
@@ -211,7 +211,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `LastIndexAny($0,"f")`,
-				Returns: 1,
+				Returns: []string{"int"},
 			},
 		},
 		{ // strings.LastIndexByte
@@ -224,8 +224,8 @@ var (
 			AltCaller:  "LastIndexByte",
 
 			Generate: &checker.Generate{
-				Pattern: `LastIndexByte($0, byte('f'))`,
-				Returns: 1,
+				Pattern: `LastIndexByte($0, 'f')`,
+				Returns: []string{"int"},
 			},
 		},
 		{ // strings.LastIndexFunc
@@ -239,7 +239,7 @@ var (
 
 			Generate: &checker.Generate{
 				Pattern: `LastIndexFunc($0, func(r rune) bool { return true })`,
-				Returns: 1,
+				Returns: []string{"int"},
 			},
 		},
 	}
@@ -257,7 +257,7 @@ var (
 			Generate: &checker.Generate{
 				PreCondition: `builder := strings.Builder{}`,
 				Pattern:      `Write($0)`,
-				Returns:      2,
+				Returns:      []string{"int", "error"},
 			},
 		},
 		{ // (*strings.Builder).WriteString
@@ -272,7 +272,7 @@ var (
 			Generate: &checker.Generate{
 				PreCondition: `builder := strings.Builder{}`,
 				Pattern:      `WriteString($0)`,
-				Returns:      2,
+				Returns:      []string{"int", "error"},
 			},
 		},
 		{ // (*strings.Builder).WriteString -> (*strings.Builder).WriteRune
@@ -284,6 +284,11 @@ var (
 			Args:      []int{0},
 			ArgsType:  checker.Rune,
 			AltCaller: "WriteRune",
+			Generate: &checker.Generate{
+				SkipGenerate: true,
+				Pattern:      `WriteString($0)`,
+				Returns:      []string{"int", "error"},
+			},
 		},
 		// { // (*strings.Builder).WriteString -> (*strings.Builder).WriteByte
 		// 	Targets:   checker.Strings,
diff --git a/vendor/github.com/butuzov/mirror/checkers_utf8.go b/vendor/github.com/butuzov/mirror/checkers_utf8.go
index e7c4d5ba4da83313dd5c6a8204ebaa8c4020ea8e..fd3010c37e385632329daa74368f42f3e58bc356 100644
--- a/vendor/github.com/butuzov/mirror/checkers_utf8.go
+++ b/vendor/github.com/butuzov/mirror/checkers_utf8.go
@@ -13,7 +13,7 @@ var UTF8Functions = []checker.Violation{
 
 		Generate: &checker.Generate{
 			Pattern: `Valid($0)`,
-			Returns: 1,
+			Returns: []string{"bool"},
 		},
 	},
 	{ // utf8.ValidString
@@ -26,7 +26,7 @@ var UTF8Functions = []checker.Violation{
 
 		Generate: &checker.Generate{
 			Pattern: `ValidString($0)`,
-			Returns: 1,
+			Returns: []string{"bool"},
 		},
 	},
 	{ // utf8.FullRune
@@ -39,7 +39,7 @@ var UTF8Functions = []checker.Violation{
 
 		Generate: &checker.Generate{
 			Pattern: `FullRune($0)`,
-			Returns: 1,
+			Returns: []string{"bool"},
 		},
 	},
 	{ // utf8.FullRuneInString
@@ -52,7 +52,7 @@ var UTF8Functions = []checker.Violation{
 
 		Generate: &checker.Generate{
 			Pattern: `FullRuneInString($0)`,
-			Returns: 1,
+			Returns: []string{"bool"},
 		},
 	},
 
@@ -66,7 +66,7 @@ var UTF8Functions = []checker.Violation{
 
 		Generate: &checker.Generate{
 			Pattern: `RuneCount($0)`,
-			Returns: 1,
+			Returns: []string{"int"},
 		},
 	},
 	{ // bytes.RuneCountInString
@@ -79,7 +79,7 @@ var UTF8Functions = []checker.Violation{
 
 		Generate: &checker.Generate{
 			Pattern: `RuneCountInString($0)`,
-			Returns: 1,
+			Returns: []string{"int"},
 		},
 	},
 
@@ -93,7 +93,7 @@ var UTF8Functions = []checker.Violation{
 
 		Generate: &checker.Generate{
 			Pattern: `DecodeLastRune($0)`,
-			Returns: 2,
+			Returns: []string{"rune", "int"},
 		},
 	},
 	{ // utf8.DecodeLastRuneInString
@@ -106,7 +106,7 @@ var UTF8Functions = []checker.Violation{
 
 		Generate: &checker.Generate{
 			Pattern: `DecodeLastRuneInString($0)`,
-			Returns: 2,
+			Returns: []string{"rune", "int"},
 		},
 	},
 	{ // utf8.DecodeRune
@@ -119,7 +119,7 @@ var UTF8Functions = []checker.Violation{
 
 		Generate: &checker.Generate{
 			Pattern: `DecodeRune($0)`,
-			Returns: 2,
+			Returns: []string{"rune", "int"},
 		},
 	},
 	{ // utf8.DecodeRuneInString
@@ -132,7 +132,7 @@ var UTF8Functions = []checker.Violation{
 
 		Generate: &checker.Generate{
 			Pattern: `DecodeRuneInString($0)`,
-			Returns: 2,
+			Returns: []string{"rune", "int"},
 		},
 	},
 }
diff --git a/vendor/github.com/butuzov/mirror/internal/checker/violation.go b/vendor/github.com/butuzov/mirror/internal/checker/violation.go
index 375d3c8e65b7c083a318cbd82e1c3ff36af90aef..3d8acf1415b3f74898a52e5b0ee87c2b1f7700aa 100644
--- a/vendor/github.com/butuzov/mirror/internal/checker/violation.go
+++ b/vendor/github.com/butuzov/mirror/internal/checker/violation.go
@@ -52,9 +52,10 @@ type Violation struct {
 
 // Tests (generation) related struct.
 type Generate struct {
-	PreCondition string // Precondition we want to be generated
-	Pattern      string // Generate pattern (for the `want` message)
-	Returns      int    // Expected to return n elements
+	SkipGenerate bool
+	PreCondition string   // Precondition we want to be generated
+	Pattern      string   // Generate pattern (for the `want` message)
+	Returns      []string // ReturnTypes as slice
 }
 
 func (v *Violation) With(base []byte, e *ast.CallExpr, args map[int]ast.Expr) *Violation {
@@ -165,7 +166,7 @@ type GolangIssue struct {
 	Original  string
 }
 
-// Issue inteanded to be used only with golangci-lint, bu you can use use it
+// Issue intended to be used only within `golangci-lint`, bu you can use use it
 // alongside Diagnostic if you wish.
 func (v *Violation) Issue(fSet *token.FileSet) GolangIssue {
 	issue := GolangIssue{
@@ -174,7 +175,7 @@ func (v *Violation) Issue(fSet *token.FileSet) GolangIssue {
 		Message: v.Message(),
 	}
 
-	// original expression (useful for debug & requied for replace)
+	// original expression (useful for debug & required for replace)
 	var buf bytes.Buffer
 	printer.Fprint(&buf, fSet, v.callExpr)
 	issue.Original = buf.String()
diff --git a/vendor/github.com/butuzov/mirror/readme.md b/vendor/github.com/butuzov/mirror/readme.md
index fcfd1de11a39e536de50decc56bb983b4c36124a..f830ea72eae6856543ab43a152236a12f0ea8205 100644
--- a/vendor/github.com/butuzov/mirror/readme.md
+++ b/vendor/github.com/butuzov/mirror/readme.md
@@ -1,10 +1,7 @@
-# `mirror` [![Code Coverage](https://coveralls.io/repos/github/butuzov/mirror/badge.svg?branch=main)](https://coveralls.io/github/butuzov/mirror?branch=main) [![build status](https://github.com/butuzov/mirror/actions/workflows/main.yaml/badge.svg?branch=main)]()
+# `mirror` [![Stand with Ukraine](https://raw.githubusercontent.com/vshymanskyy/StandWithUkraine/main/badges/StandWithUkraine.svg)](https://u24.gov.ua/) [![Code Coverage](https://coveralls.io/repos/github/butuzov/mirror/badge.svg?branch=main)](https://coveralls.io/github/butuzov/mirror?branch=main) [![build status](https://github.com/butuzov/mirror/actions/workflows/main.yaml/badge.svg?branch=main)]()
 
 `mirror` suggests use of alternative functions/methods in order to gain performance boosts by avoiding unnecessary `[]byte/string` conversion calls. See [MIRROR_FUNCS.md](MIRROR_FUNCS.md) list of mirror functions you can use in go's stdlib.
 
-## 🇺🇦 PLEASE HELP ME 🇺🇦
-Fundrise for scout drone **DJI Matrice 30T** for my squad (Ukrainian Forces). See more details at [butuzov/README.md](https://github.com/butuzov/butuzov/)
-
 ## Linter Use Cases
 
 ### `github.com/argoproj/argo-cd`
@@ -35,6 +32,10 @@ func IsValidHostname(hostname string, fqdn bool) bool {
 go install github.com/butuzov/mirror/cmd/mirror@latest
 ```
 
+### `golangci-lint`
+`golangci-lint` supports `mirror` since `v1.53.0`
+
+
 ## How to use
 
 You run `mirror` with [`go vet`](https://pkg.go.dev/cmd/vet):
@@ -55,6 +56,59 @@ mirror ./...
 /air/runner/util.go:173:14: avoid allocations with (*os.File).WriteString
 ```
 
+With [`golangci-lint`](https://github.com/golangci/golangci-lint)
+
+```
+golangci-lint run --no-config --disable-all -Emirror
+# github.com/argoproj/argo-cd
+test/e2e/fixture/app/actions.go:83:11: avoid allocations with (*os.File).WriteString (mirror)
+	_, err = tmpFile.Write([]byte(data))
+	         ^
+server/server.go:1166:9: avoid allocations with (*regexp.Regexp).MatchString (mirror)
+	return mainJsBundleRegex.Match([]byte(filename))
+	       ^
+server/account/account.go:91:6: avoid allocations with (*regexp.Regexp).MatchString (mirror)
+	if !validPasswordRegexp.Match([]byte(q.NewPassword)) {
+	    ^
+server/badge/badge.go:52:20: avoid allocations with (*regexp.Regexp).FindAllStringSubmatchIndex (mirror)
+	for _, v := range re.FindAllSubmatchIndex([]byte(str), -1) {
+	                  ^
+util/cert/cert.go:82:10: avoid allocations with (*regexp.Regexp).MatchString (mirror)
+		return validHostNameRegexp.Match([]byte(hostname)) || validIPv6Regexp.Match([]byte(hostname))
+```
+
 ## Command line
 
 - You can add checks for `_test.go` files with cli option `--with-tests`
+
+### `golangci-lint`
+  With `golangci-lint` tests are checked by default and can be can be turned off by using the regular `golangci-lint` ways to do it:
+
+  - flag `--tests` (e.g. `--tests=false`)
+  - flag `--skip-files` (e.g. `--skip-files="_test.go"`)
+  - yaml confguration `run.skip-files`:
+    ```yaml
+    run:
+      skip-files:
+        - '(.+)_test\.go'
+    ```
+  - yaml confguration `issues.exclude-rules`:
+    ```yaml
+      issues:
+        exclude-rules:
+          - path: '(.+)_test\.go'
+            linters:
+              - mirror
+      ```
+
+
+## Contributing
+
+```shell
+# Update Assets (testdata/(strings|bytes|os|utf8|maphash|regexp|bufio).go)
+(task|make) generated
+# Run Tests
+(task|make) tests
+# Lint Code
+(task|make) lints
+```
diff --git a/vendor/github.com/sivchari/nosnakecase/LICENSE b/vendor/github.com/catenacyber/perfsprint/LICENSE
similarity index 97%
rename from vendor/github.com/sivchari/nosnakecase/LICENSE
rename to vendor/github.com/catenacyber/perfsprint/LICENSE
index fb412767740e3468921affe4cf12b33a5ac83783..14c2b9e737667aad930a01b1e65cec9178511563 100644
--- a/vendor/github.com/sivchari/nosnakecase/LICENSE
+++ b/vendor/github.com/catenacyber/perfsprint/LICENSE
@@ -1,6 +1,6 @@
 MIT License
 
-Copyright (c) 2022 sivchari
+Copyright (c) 2023 Catena cyber
 
 Permission is hereby granted, free of charge, to any person obtaining a copy
 of this software and associated documentation files (the "Software"), to deal
diff --git a/vendor/github.com/catenacyber/perfsprint/analyzer/analyzer.go b/vendor/github.com/catenacyber/perfsprint/analyzer/analyzer.go
new file mode 100644
index 0000000000000000000000000000000000000000..543b4bdbc70d0234610208a979ecb198a82a6d02
--- /dev/null
+++ b/vendor/github.com/catenacyber/perfsprint/analyzer/analyzer.go
@@ -0,0 +1,603 @@
+package analyzer
+
+import (
+	"bytes"
+	"go/ast"
+	"go/format"
+	"go/token"
+	"go/types"
+	"sort"
+	"strconv"
+	"strings"
+
+	"golang.org/x/tools/go/analysis/passes/inspect"
+	"golang.org/x/tools/go/ast/inspector"
+
+	"golang.org/x/tools/go/analysis"
+)
+
+type perfSprint struct {
+	intConv    bool
+	errError   bool
+	errorf     bool
+	sprintf1   bool
+	fiximports bool
+	strconcat  bool
+}
+
+func newPerfSprint() *perfSprint {
+	return &perfSprint{
+		intConv:    true,
+		errError:   false,
+		errorf:     true,
+		sprintf1:   true,
+		fiximports: true,
+		strconcat:  true,
+	}
+}
+
+func New() *analysis.Analyzer {
+	n := newPerfSprint()
+	r := &analysis.Analyzer{
+		Name:     "perfsprint",
+		Doc:      "Checks that fmt.Sprintf can be replaced with a faster alternative.",
+		Run:      n.run,
+		Requires: []*analysis.Analyzer{inspect.Analyzer},
+	}
+	r.Flags.BoolVar(&n.intConv, "int-conversion", true, "optimizes even if it requires an int or uint type cast")
+	r.Flags.BoolVar(&n.errError, "err-error", false, "optimizes into err.Error() even if it is only equivalent for non-nil errors")
+	r.Flags.BoolVar(&n.errorf, "errorf", true, "optimizes fmt.Errorf")
+	r.Flags.BoolVar(&n.sprintf1, "sprintf1", true, "optimizes fmt.Sprintf with only one argument")
+	r.Flags.BoolVar(&n.fiximports, "fiximports", true, "fix needed imports from other fixes")
+	r.Flags.BoolVar(&n.strconcat, "strconcat", true, "optimizes into strings concatenation")
+	return r
+}
+
+// true if verb is a format string that could be replaced with concatenation.
+func isConcatable(verb string) bool {
+	hasPrefix :=
+		(strings.HasPrefix(verb, "%s") && !strings.Contains(verb, "%[1]s")) ||
+			(strings.HasPrefix(verb, "%[1]s") && !strings.Contains(verb, "%s"))
+	hasSuffix :=
+		(strings.HasSuffix(verb, "%s") && !strings.Contains(verb, "%[1]s")) ||
+			(strings.HasSuffix(verb, "%[1]s") && !strings.Contains(verb, "%s"))
+
+	if strings.Count(verb, "%[1]s") > 1 {
+		return false
+	}
+	return (hasPrefix || hasSuffix) && !(hasPrefix && hasSuffix)
+}
+
+func (n *perfSprint) run(pass *analysis.Pass) (interface{}, error) {
+	var fmtSprintObj, fmtSprintfObj, fmtErrorfObj types.Object
+	for _, pkg := range pass.Pkg.Imports() {
+		if pkg.Path() == "fmt" {
+			fmtSprintObj = pkg.Scope().Lookup("Sprint")
+			fmtSprintfObj = pkg.Scope().Lookup("Sprintf")
+			fmtErrorfObj = pkg.Scope().Lookup("Errorf")
+		}
+	}
+	if fmtSprintfObj == nil && fmtSprintObj == nil && fmtErrorfObj == nil {
+		return nil, nil
+	}
+	removedFmtUsages := make(map[string]int)
+	neededPackages := make(map[string]map[string]bool)
+
+	insp := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+	nodeFilter := []ast.Node{
+		(*ast.CallExpr)(nil),
+	}
+	insp.Preorder(nodeFilter, func(node ast.Node) {
+		call := node.(*ast.CallExpr)
+		called, ok := call.Fun.(*ast.SelectorExpr)
+		if !ok {
+			return
+		}
+		calledObj := pass.TypesInfo.ObjectOf(called.Sel)
+
+		var (
+			fn    string
+			verb  string
+			value ast.Expr
+			err   error
+		)
+		switch {
+		case calledObj == fmtErrorfObj && len(call.Args) == 1:
+			if n.errorf {
+				fn = "fmt.Errorf"
+				verb = "%s"
+				value = call.Args[0]
+			} else {
+				return
+			}
+
+		case calledObj == fmtSprintObj && len(call.Args) == 1:
+			fn = "fmt.Sprint"
+			verb = "%v"
+			value = call.Args[0]
+
+		case calledObj == fmtSprintfObj && len(call.Args) == 1:
+			if n.sprintf1 {
+				fn = "fmt.Sprintf"
+				verb = "%s"
+				value = call.Args[0]
+			} else {
+				return
+			}
+
+		case calledObj == fmtSprintfObj && len(call.Args) == 2:
+			verbLit, ok := call.Args[0].(*ast.BasicLit)
+			if !ok {
+				return
+			}
+			verb, err = strconv.Unquote(verbLit.Value)
+			if err != nil {
+				// Probably unreachable.
+				return
+			}
+			// one single explicit arg is simplified
+			if strings.HasPrefix(verb, "%[1]") {
+				verb = "%" + verb[4:]
+			}
+
+			fn = "fmt.Sprintf"
+			value = call.Args[1]
+
+		default:
+			return
+		}
+
+		switch verb {
+		default:
+			if fn == "fmt.Sprintf" && isConcatable(verb) && n.strconcat {
+				break
+			}
+			return
+		case "%d", "%v", "%x", "%t", "%s":
+		}
+
+		valueType := pass.TypesInfo.TypeOf(value)
+		a, isArray := valueType.(*types.Array)
+		s, isSlice := valueType.(*types.Slice)
+
+		var d *analysis.Diagnostic
+		switch {
+		case isBasicType(valueType, types.String) && oneOf(verb, "%v", "%s"):
+			fname := pass.Fset.File(call.Pos()).Name()
+			_, ok := neededPackages[fname]
+			if !ok {
+				neededPackages[fname] = make(map[string]bool)
+			}
+			removedFmtUsages[fname]++
+			if fn == "fmt.Errorf" {
+				neededPackages[fname]["errors"] = true
+				d = &analysis.Diagnostic{
+					Pos:     call.Pos(),
+					End:     call.End(),
+					Message: fn + " can be replaced with errors.New",
+					SuggestedFixes: []analysis.SuggestedFix{
+						{
+							Message: "Use errors.New",
+							TextEdits: []analysis.TextEdit{{
+								Pos:     call.Pos(),
+								End:     value.Pos(),
+								NewText: []byte("errors.New("),
+							}},
+						},
+					},
+				}
+			} else {
+				d = &analysis.Diagnostic{
+					Pos:     call.Pos(),
+					End:     call.End(),
+					Message: fn + " can be replaced with just using the string",
+					SuggestedFixes: []analysis.SuggestedFix{
+						{
+							Message: "Just use string value",
+							TextEdits: []analysis.TextEdit{{
+								Pos:     call.Pos(),
+								End:     call.End(),
+								NewText: []byte(formatNode(pass.Fset, value)),
+							}},
+						},
+					},
+				}
+			}
+		case types.Implements(valueType, errIface) && oneOf(verb, "%v", "%s") && n.errError:
+			// known false positive if this error is nil
+			// fmt.Sprint(nil) does not panic like nil.Error() does
+			errMethodCall := formatNode(pass.Fset, value) + ".Error()"
+			fname := pass.Fset.File(call.Pos()).Name()
+			removedFmtUsages[fname]++
+			d = &analysis.Diagnostic{
+				Pos:     call.Pos(),
+				End:     call.End(),
+				Message: fn + " can be replaced with " + errMethodCall,
+				SuggestedFixes: []analysis.SuggestedFix{
+					{
+						Message: "Use " + errMethodCall,
+						TextEdits: []analysis.TextEdit{{
+							Pos:     call.Pos(),
+							End:     call.End(),
+							NewText: []byte(errMethodCall),
+						}},
+					},
+				},
+			}
+
+		case isBasicType(valueType, types.Bool) && oneOf(verb, "%v", "%t"):
+			fname := pass.Fset.File(call.Pos()).Name()
+			removedFmtUsages[fname]++
+			_, ok := neededPackages[fname]
+			if !ok {
+				neededPackages[fname] = make(map[string]bool)
+			}
+			neededPackages[fname]["strconv"] = true
+			d = &analysis.Diagnostic{
+				Pos:     call.Pos(),
+				End:     call.End(),
+				Message: fn + " can be replaced with faster strconv.FormatBool",
+				SuggestedFixes: []analysis.SuggestedFix{
+					{
+						Message: "Use strconv.FormatBool",
+						TextEdits: []analysis.TextEdit{{
+							Pos:     call.Pos(),
+							End:     value.Pos(),
+							NewText: []byte("strconv.FormatBool("),
+						}},
+					},
+				},
+			}
+
+		case isArray && isBasicType(a.Elem(), types.Uint8) && oneOf(verb, "%x"):
+			if _, ok := value.(*ast.Ident); !ok {
+				// Doesn't support array literals.
+				return
+			}
+
+			fname := pass.Fset.File(call.Pos()).Name()
+			removedFmtUsages[fname]++
+			_, ok := neededPackages[fname]
+			if !ok {
+				neededPackages[fname] = make(map[string]bool)
+			}
+			neededPackages[fname]["encoding/hex"] = true
+			d = &analysis.Diagnostic{
+				Pos:     call.Pos(),
+				End:     call.End(),
+				Message: fn + " can be replaced with faster hex.EncodeToString",
+				SuggestedFixes: []analysis.SuggestedFix{
+					{
+						Message: "Use hex.EncodeToString",
+						TextEdits: []analysis.TextEdit{
+							{
+								Pos:     call.Pos(),
+								End:     value.Pos(),
+								NewText: []byte("hex.EncodeToString("),
+							},
+							{
+								Pos:     value.End(),
+								End:     value.End(),
+								NewText: []byte("[:]"),
+							},
+						},
+					},
+				},
+			}
+		case isSlice && isBasicType(s.Elem(), types.Uint8) && oneOf(verb, "%x"):
+			fname := pass.Fset.File(call.Pos()).Name()
+			removedFmtUsages[fname]++
+			_, ok := neededPackages[fname]
+			if !ok {
+				neededPackages[fname] = make(map[string]bool)
+			}
+			neededPackages[fname]["encoding/hex"] = true
+			d = &analysis.Diagnostic{
+				Pos:     call.Pos(),
+				End:     call.End(),
+				Message: fn + " can be replaced with faster hex.EncodeToString",
+				SuggestedFixes: []analysis.SuggestedFix{
+					{
+						Message: "Use hex.EncodeToString",
+						TextEdits: []analysis.TextEdit{{
+							Pos:     call.Pos(),
+							End:     value.Pos(),
+							NewText: []byte("hex.EncodeToString("),
+						}},
+					},
+				},
+			}
+
+		case isBasicType(valueType, types.Int8, types.Int16, types.Int32) && oneOf(verb, "%v", "%d") && n.intConv:
+			fname := pass.Fset.File(call.Pos()).Name()
+			removedFmtUsages[fname]++
+			_, ok := neededPackages[fname]
+			if !ok {
+				neededPackages[fname] = make(map[string]bool)
+			}
+			neededPackages[fname]["strconv"] = true
+			d = &analysis.Diagnostic{
+				Pos:     call.Pos(),
+				End:     call.End(),
+				Message: fn + " can be replaced with faster strconv.Itoa",
+				SuggestedFixes: []analysis.SuggestedFix{
+					{
+						Message: "Use strconv.Itoa",
+						TextEdits: []analysis.TextEdit{
+							{
+								Pos:     call.Pos(),
+								End:     value.Pos(),
+								NewText: []byte("strconv.Itoa(int("),
+							},
+							{
+								Pos:     value.End(),
+								End:     value.End(),
+								NewText: []byte(")"),
+							},
+						},
+					},
+				},
+			}
+		case isBasicType(valueType, types.Int) && oneOf(verb, "%v", "%d"):
+			fname := pass.Fset.File(call.Pos()).Name()
+			removedFmtUsages[fname]++
+			_, ok := neededPackages[fname]
+			if !ok {
+				neededPackages[fname] = make(map[string]bool)
+			}
+			neededPackages[fname]["strconv"] = true
+			d = &analysis.Diagnostic{
+				Pos:     call.Pos(),
+				End:     call.End(),
+				Message: fn + " can be replaced with faster strconv.Itoa",
+				SuggestedFixes: []analysis.SuggestedFix{
+					{
+						Message: "Use strconv.Itoa",
+						TextEdits: []analysis.TextEdit{{
+							Pos:     call.Pos(),
+							End:     value.Pos(),
+							NewText: []byte("strconv.Itoa("),
+						}},
+					},
+				},
+			}
+		case isBasicType(valueType, types.Int64) && oneOf(verb, "%v", "%d"):
+			fname := pass.Fset.File(call.Pos()).Name()
+			removedFmtUsages[fname]++
+			_, ok := neededPackages[fname]
+			if !ok {
+				neededPackages[fname] = make(map[string]bool)
+			}
+			neededPackages[fname]["strconv"] = true
+			d = &analysis.Diagnostic{
+				Pos:     call.Pos(),
+				End:     call.End(),
+				Message: fn + " can be replaced with faster strconv.FormatInt",
+				SuggestedFixes: []analysis.SuggestedFix{
+					{
+						Message: "Use strconv.FormatInt",
+						TextEdits: []analysis.TextEdit{
+							{
+								Pos:     call.Pos(),
+								End:     value.Pos(),
+								NewText: []byte("strconv.FormatInt("),
+							},
+							{
+								Pos:     value.End(),
+								End:     value.End(),
+								NewText: []byte(", 10"),
+							},
+						},
+					},
+				},
+			}
+
+		case isBasicType(valueType, types.Uint8, types.Uint16, types.Uint32, types.Uint) && oneOf(verb, "%v", "%d", "%x") && n.intConv:
+			base := []byte("), 10")
+			if verb == "%x" {
+				base = []byte("), 16")
+			}
+			fname := pass.Fset.File(call.Pos()).Name()
+			removedFmtUsages[fname]++
+			_, ok := neededPackages[fname]
+			if !ok {
+				neededPackages[fname] = make(map[string]bool)
+			}
+			neededPackages[fname]["strconv"] = true
+			d = &analysis.Diagnostic{
+				Pos:     call.Pos(),
+				End:     call.End(),
+				Message: fn + " can be replaced with faster strconv.FormatUint",
+				SuggestedFixes: []analysis.SuggestedFix{
+					{
+						Message: "Use strconv.FormatUint",
+						TextEdits: []analysis.TextEdit{
+							{
+								Pos:     call.Pos(),
+								End:     value.Pos(),
+								NewText: []byte("strconv.FormatUint(uint64("),
+							},
+							{
+								Pos:     value.End(),
+								End:     value.End(),
+								NewText: base,
+							},
+						},
+					},
+				},
+			}
+		case isBasicType(valueType, types.Uint64) && oneOf(verb, "%v", "%d", "%x"):
+			base := []byte(", 10")
+			if verb == "%x" {
+				base = []byte(", 16")
+			}
+			fname := pass.Fset.File(call.Pos()).Name()
+			removedFmtUsages[fname]++
+			_, ok := neededPackages[fname]
+			if !ok {
+				neededPackages[fname] = make(map[string]bool)
+			}
+			neededPackages[fname]["strconv"] = true
+			d = &analysis.Diagnostic{
+				Pos:     call.Pos(),
+				End:     call.End(),
+				Message: fn + " can be replaced with faster strconv.FormatUint",
+				SuggestedFixes: []analysis.SuggestedFix{
+					{
+						Message: "Use strconv.FormatUint",
+						TextEdits: []analysis.TextEdit{
+							{
+								Pos:     call.Pos(),
+								End:     value.Pos(),
+								NewText: []byte("strconv.FormatUint("),
+							},
+							{
+								Pos:     value.End(),
+								End:     value.End(),
+								NewText: base,
+							},
+						},
+					},
+				},
+			}
+		case isBasicType(valueType, types.String) && fn == "fmt.Sprintf" && isConcatable(verb):
+			var fix string
+			if strings.HasSuffix(verb, "%s") {
+				fix = strconv.Quote(verb[:len(verb)-2]) + "+" + formatNode(pass.Fset, value)
+			} else if strings.HasSuffix(verb, "%[1]s") {
+				fix = strconv.Quote(verb[:len(verb)-5]) + "+" + formatNode(pass.Fset, value)
+			} else if strings.HasPrefix(verb, "%s") {
+				fix = formatNode(pass.Fset, value) + "+" + strconv.Quote(verb[2:])
+			} else {
+				fix = formatNode(pass.Fset, value) + "+" + strconv.Quote(verb[5:])
+			}
+			fname := pass.Fset.File(call.Pos()).Name()
+			removedFmtUsages[fname]++
+			d = &analysis.Diagnostic{
+				Pos:     call.Pos(),
+				End:     call.End(),
+				Message: fn + " can be replaced with string concatenation",
+				SuggestedFixes: []analysis.SuggestedFix{
+					{
+						Message: "Use string concatenation",
+						TextEdits: []analysis.TextEdit{{
+							Pos:     call.Pos(),
+							End:     call.End(),
+							NewText: []byte(fix),
+						}},
+					},
+				},
+			}
+		}
+
+		if d != nil {
+			pass.Report(*d)
+		}
+	})
+
+	if len(removedFmtUsages) > 0 && n.fiximports {
+		for _, pkg := range pass.Pkg.Imports() {
+			if pkg.Path() == "fmt" {
+				insp = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+				nodeFilter = []ast.Node{
+					(*ast.SelectorExpr)(nil),
+				}
+				insp.Preorder(nodeFilter, func(node ast.Node) {
+					selec := node.(*ast.SelectorExpr)
+					selecok, ok := selec.X.(*ast.Ident)
+					if ok {
+						pkgname, ok := pass.TypesInfo.ObjectOf(selecok).(*types.PkgName)
+						if ok && pkgname.Name() == pkg.Name() {
+							fname := pass.Fset.File(pkgname.Pos()).Name()
+							removedFmtUsages[fname]--
+						}
+					}
+				})
+			} else if pkg.Path() == "errors" || pkg.Path() == "strconv" || pkg.Path() == "encoding/hex" {
+				insp = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+				nodeFilter = []ast.Node{
+					(*ast.ImportSpec)(nil),
+				}
+				insp.Preorder(nodeFilter, func(node ast.Node) {
+					gd := node.(*ast.ImportSpec)
+					if gd.Path.Value == strconv.Quote(pkg.Path()) {
+						fname := pass.Fset.File(gd.Pos()).Name()
+						_, ok := neededPackages[fname]
+						if ok {
+							delete(neededPackages[fname], pkg.Path())
+						}
+					}
+				})
+			}
+		}
+		insp = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+		nodeFilter = []ast.Node{
+			(*ast.ImportSpec)(nil),
+		}
+		insp.Preorder(nodeFilter, func(node ast.Node) {
+			gd := node.(*ast.ImportSpec)
+			if gd.Path.Value == `"fmt"` {
+				fix := ""
+				fname := pass.Fset.File(gd.Pos()).Name()
+				if removedFmtUsages[fname] < 0 {
+					fix += `"fmt"`
+					if len(neededPackages[fname]) == 0 {
+						return
+					}
+				}
+				keys := make([]string, 0, len(neededPackages[fname]))
+				for k := range neededPackages[fname] {
+					keys = append(keys, k)
+				}
+				sort.Strings(keys)
+				for _, k := range keys {
+					fix = fix + "\n\t\"" + k + `"`
+				}
+				pass.Report(analysis.Diagnostic{
+					Pos:     gd.Pos(),
+					End:     gd.End(),
+					Message: "Fix imports",
+					SuggestedFixes: []analysis.SuggestedFix{
+						{
+							Message: "Fix imports",
+							TextEdits: []analysis.TextEdit{{
+								Pos:     gd.Pos(),
+								End:     gd.End(),
+								NewText: []byte(fix),
+							}},
+						},
+					}})
+			}
+		})
+	}
+
+	return nil, nil
+}
+
+var errIface = types.Universe.Lookup("error").Type().Underlying().(*types.Interface)
+
+func isBasicType(lhs types.Type, expected ...types.BasicKind) bool {
+	for _, rhs := range expected {
+		if types.Identical(lhs, types.Typ[rhs]) {
+			return true
+		}
+	}
+	return false
+}
+
+func formatNode(fset *token.FileSet, node ast.Node) string {
+	buf := new(bytes.Buffer)
+	if err := format.Node(buf, fset, node); err != nil {
+		return ""
+	}
+	return buf.String()
+}
+
+func oneOf[T comparable](v T, expected ...T) bool {
+	for _, rhs := range expected {
+		if v == rhs {
+			return true
+		}
+	}
+	return false
+}
diff --git a/vendor/github.com/ccojocar/zxcvbn-go/match/match.go b/vendor/github.com/ccojocar/zxcvbn-go/match/match.go
index 998dde1112f65cd987ed207140d2d84ab6af7e3a..da3e894ece8545f76d7ff56f9eaa79d5396ec4ea 100644
--- a/vendor/github.com/ccojocar/zxcvbn-go/match/match.go
+++ b/vendor/github.com/ccojocar/zxcvbn-go/match/match.go
@@ -16,9 +16,8 @@ func (s Matches) Less(i, j int) bool {
 		return true
 	} else if s[i].I == s[j].I {
 		return s[i].J < s[j].J
-	} else {
-		return false
 	}
+	return false
 }
 
 // Match represents different matches
diff --git a/vendor/github.com/ccojocar/zxcvbn-go/renovate.json b/vendor/github.com/ccojocar/zxcvbn-go/renovate.json
new file mode 100644
index 0000000000000000000000000000000000000000..58ee1e0ea8cc20d5ab87e2bf5e586fcf4eaed23e
--- /dev/null
+++ b/vendor/github.com/ccojocar/zxcvbn-go/renovate.json
@@ -0,0 +1,25 @@
+{
+  "dependencyDashboard": true,
+  "dependencyDashboardTitle" : "Renovate(bot) : dependency dashboard",
+  "vulnerabilityAlerts": {
+    "enabled": true
+  },
+  "extends": [
+    ":preserveSemverRanges",
+    "group:all",
+    "schedule:weekly"
+  ],
+  "lockFileMaintenance": {
+    "commitMessageAction": "Update",
+    "enabled": true,
+    "extends": [
+      "group:all",
+      "schedule:weekly"
+    ]
+  },
+  "postUpdateOptions": [
+    "gomodTidy",
+    "gomodUpdateImportPaths"
+  ],
+  "separateMajorMinor": false
+}
diff --git a/vendor/github.com/ccojocar/zxcvbn-go/scoring/scoring.go b/vendor/github.com/ccojocar/zxcvbn-go/scoring/scoring.go
index dbe3318848362a6b41a7ac39b342b5700b670713..f25606a8d60135c797665567d628b45142489058 100644
--- a/vendor/github.com/ccojocar/zxcvbn-go/scoring/scoring.go
+++ b/vendor/github.com/ccojocar/zxcvbn-go/scoring/scoring.go
@@ -161,9 +161,8 @@ func displayTime(seconds float64) string {
 		return fmt.Sprintf(formater, (1 + math.Ceil(seconds/month)), "months")
 	} else if seconds < century {
 		return fmt.Sprintf(formater, (1 + math.Ceil(seconds/century)), "years")
-	} else {
-		return "centuries"
 	}
+	return "centuries"
 }
 
 func crackTimeToScore(seconds float64) int {
diff --git a/vendor/github.com/chavacava/garif/enums.go b/vendor/github.com/chavacava/garif/enums.go
new file mode 100644
index 0000000000000000000000000000000000000000..dea2daf131b505bb166ddf5e3e9d78b39e2c738e
--- /dev/null
+++ b/vendor/github.com/chavacava/garif/enums.go
@@ -0,0 +1,41 @@
+package garif
+
+type ResultKind string
+
+// declare JSON values
+const (
+	_pass          ResultKind = "pass"
+	_open          ResultKind = "open"
+	_informational ResultKind = "informational"
+	_notApplicable ResultKind = "notApplicable"
+	_review        ResultKind = "review"
+	_fail          ResultKind = "fail"
+)
+
+// create public visible constants with a namespace as enums
+const (
+	ResultKind_Pass          ResultKind = _pass
+	ResultKind_Open          ResultKind = _open
+	ResultKind_Informational ResultKind = _informational
+	ResultKind_NotApplicable ResultKind = _notApplicable
+	ResultKind_Review        ResultKind = _review
+	ResultKind_Fail          ResultKind = _fail
+)
+
+type ResultLevel string
+
+// declare JSON values
+const (
+	_warning ResultLevel = "warning"
+	_error   ResultLevel = "error"
+	_note    ResultLevel = "note"
+	_none    ResultLevel = "none"
+)
+
+// create public visible constants with a namespace as enums
+const (
+	ResultLevel_Warning ResultLevel = _warning
+	ResultLevel_Error   ResultLevel = _error
+	ResultLevel_Note    ResultLevel = _note
+	ResultLevel_None    ResultLevel = _none
+)
diff --git a/vendor/github.com/chavacava/garif/models.go b/vendor/github.com/chavacava/garif/models.go
index 3668436a3c949a73dae118a668cdc3eeb1557542..f16a86136e5401bebd30b93f5d82c4e10cf2df7c 100644
--- a/vendor/github.com/chavacava/garif/models.go
+++ b/vendor/github.com/chavacava/garif/models.go
@@ -935,10 +935,10 @@ type Result struct {
 	HostedViewerUri string `json:"hostedViewerUri,omitempty"`
 
 	// A value that categorizes results by evaluation state.
-	Kind interface{} `json:"kind,omitempty"`
+	Kind ResultKind `json:"kind,omitempty"`
 
 	// A value specifying the severity level of the result.
-	Level interface{} `json:"level,omitempty"`
+	Level ResultLevel `json:"level,omitempty"`
 
 	// The set of locations where the result was detected. Specify only one location unless the problem indicated by the result can only be corrected by making a change at every specified location.
 	Locations []*Location `json:"locations,omitempty"`
diff --git a/vendor/github.com/ckaznocha/intrange/.gitignore b/vendor/github.com/ckaznocha/intrange/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..cfcb676e15b471505c5ea2c91b64162c49ca357e
--- /dev/null
+++ b/vendor/github.com/ckaznocha/intrange/.gitignore
@@ -0,0 +1,191 @@
+# Binaries for programs and plugins
+*.exe
+*.exe~
+*.dll
+*.so
+*.dylib
+
+# Test binary, built with `go test -c`
+*.test
+
+# Output of the go coverage tool, specifically when used with LiteIDE
+*.out
+
+go.work.sum
+
+.vscode/*
+!.vscode/settings.json
+!.vscode/tasks.json
+!.vscode/launch.json
+!.vscode/extensions.json
+!.vscode/*.code-snippets
+
+# Local History for Visual Studio Code
+.history/
+
+# Built Visual Studio Code Extensions
+*.vsix
+
+# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
+# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
+
+# User-specific stuff
+.idea/**/workspace.xml
+.idea/**/tasks.xml
+.idea/**/usage.statistics.xml
+.idea/**/dictionaries
+.idea/**/shelf
+
+# AWS User-specific
+.idea/**/aws.xml
+
+# Generated files
+.idea/**/contentModel.xml
+
+# Sensitive or high-churn files
+.idea/**/dataSources/
+.idea/**/dataSources.ids
+.idea/**/dataSources.local.xml
+.idea/**/sqlDataSources.xml
+.idea/**/dynamic.xml
+.idea/**/uiDesigner.xml
+.idea/**/dbnavigator.xml
+
+# Gradle
+.idea/**/gradle.xml
+.idea/**/libraries
+
+# Gradle and Maven with auto-import
+# When using Gradle or Maven with auto-import, you should exclude module files,
+# since they will be recreated, and may cause churn.  Uncomment if using
+# auto-import.
+# .idea/artifacts
+# .idea/compiler.xml
+# .idea/jarRepositories.xml
+# .idea/modules.xml
+# .idea/*.iml
+# .idea/modules
+# *.iml
+# *.ipr
+
+# CMake
+cmake-build-*/
+
+# Mongo Explorer plugin
+.idea/**/mongoSettings.xml
+
+# File-based project format
+*.iws
+
+# IntelliJ
+out/
+
+# mpeltonen/sbt-idea plugin
+.idea_modules/
+
+# JIRA plugin
+atlassian-ide-plugin.xml
+
+# Cursive Clojure plugin
+.idea/replstate.xml
+
+# SonarLint plugin
+.idea/sonarlint/
+
+# Crashlytics plugin (for Android Studio and IntelliJ)
+com_crashlytics_export_strings.xml
+crashlytics.properties
+crashlytics-build.properties
+fabric.properties
+
+# Editor-based Rest Client
+.idea/httpRequests
+
+# Android studio 3.1+ serialized cache file
+.idea/caches/build_file_checksums.ser
+
+# Swap
+[._]*.s[a-v][a-z]
+!*.svg  # comment out if you don't need vector files
+[._]*.sw[a-p]
+[._]s[a-rt-v][a-z]
+[._]ss[a-gi-z]
+[._]sw[a-p]
+
+# Session
+Session.vim
+Sessionx.vim
+
+# Temporary
+.netrwhist
+*~
+# Auto-generated tag files
+tags
+# Persistent undo
+[._]*.un~
+
+# Windows thumbnail cache files
+Thumbs.db
+Thumbs.db:encryptable
+ehthumbs.db
+ehthumbs_vista.db
+
+# Dump file
+*.stackdump
+
+# Folder config file
+[Dd]esktop.ini
+
+# Recycle Bin used on file shares
+$RECYCLE.BIN/
+
+# Windows Installer files
+*.cab
+*.msi
+*.msix
+*.msm
+*.msp
+
+# Windows shortcuts
+*.lnk
+
+# General
+.DS_Store
+.AppleDouble
+.LSOverride
+
+# Icon must end with two \r
+Icon
+
+# Thumbnails
+._*
+
+# Files that might appear in the root of a volume
+.DocumentRevisions-V100
+.fseventsd
+.Spotlight-V100
+.TemporaryItems
+.Trashes
+.VolumeIcon.icns
+.com.apple.timemachine.donotpresent
+
+# Directories potentially created on remote AFP share
+.AppleDB
+.AppleDesktop
+Network Trash Folder
+Temporary Items
+.apdisk
+
+*~
+
+# temporary files which can be created if a process still has a handle open of a deleted file
+.fuse_hidden*
+
+# KDE directory preferences
+.directory
+
+# Linux trash folder which might appear on any partition or disk
+.Trash-*
+
+# .nfs files are created when an open file is removed but is still being accessed
+.nfs*
diff --git a/vendor/github.com/ckaznocha/intrange/.golangci.yml b/vendor/github.com/ckaznocha/intrange/.golangci.yml
new file mode 100644
index 0000000000000000000000000000000000000000..2ad830d1b2a1d72f0289c197905ff00a8a126cea
--- /dev/null
+++ b/vendor/github.com/ckaznocha/intrange/.golangci.yml
@@ -0,0 +1,99 @@
+linters-settings:
+  gci:
+    local-prefixes: github.com/ckaznocha/intrange
+  gocritic:
+    enabled-tags:
+      - diagnostic
+      - experimental
+      - opinionated
+      - performance
+      - style
+  goimports:
+    local-prefixes: github.com/ckaznocha/intrange
+  golint:
+    min-confidence: 0
+  govet:
+    check-shadowing: true
+    enable:
+      - asmdecl
+      - assign
+      - atomic
+      - atomicalign
+      - bools
+      - buildtag
+      - cgocall
+      - composite
+      - copylock
+      - deepequalerrors
+      - errorsas
+      - fieldalignment
+      - findcall
+      - framepointer
+      - httpresponse
+      - ifaceassert
+      - loopclosure
+      - lostcancel
+      - nilfunc
+      - nilness
+      - printf
+      - shadow
+      - shift
+      - sortslice
+      - stdmethods
+      - stringintconv
+      - structtag
+      - testinggoroutine
+      - tests
+      - unmarshal
+      - unreachable
+      - unsafeptr
+      - unusedresult
+  misspell:
+    locale: US
+linters:
+  disable-all: true
+  enable:
+    - asciicheck
+    - dupl
+    - errcheck
+    - errorlint
+    - exportloopref
+    - gci
+    - gochecknoinits
+    - goconst
+    - gocritic
+    - godot
+    - godox
+    - goerr113
+    - gofmt
+    - gofumpt
+    - goimports
+    - gomnd
+    - goprintffuncname
+    - gosec
+    - gosimple
+    - govet
+    - ineffassign
+    - lll
+    - misspell
+    - nakedret
+    - nestif
+    - nilerr
+    - nlreturn
+    - noctx
+    - nolintlint
+    - prealloc
+    - predeclared
+    - revive
+    - rowserrcheck
+    - staticcheck
+    - stylecheck
+    - typecheck
+    - unconvert
+    - unused
+    - wastedassign
+    - whitespace
+    - wsl
+run:
+  skip-dirs:
+    - testdata/
diff --git a/vendor/github.com/ckaznocha/intrange/CONTRIBUTING.md b/vendor/github.com/ckaznocha/intrange/CONTRIBUTING.md
new file mode 100644
index 0000000000000000000000000000000000000000..541cf2c54ea1c7b534f2674711b3daf9144401f0
--- /dev/null
+++ b/vendor/github.com/ckaznocha/intrange/CONTRIBUTING.md
@@ -0,0 +1,25 @@
+# Contributing
+Enhancements or fixes are welcome
+
+## Issues
+Check if a ticket for your issue already exists in GitHub issues. If you don't
+find a ticket submit a new one.
+
+## Pull Requests
+1. Fork the repo
+1. Make your changes.
+1. Commit and push the to your fork.
+    1. Extra credit if you squash your commits first.
+1. Submit a pull request.
+
+### Style
+- Your code should pass golint.
+- Follow the existing conventions.
+
+### Tests
+- If you add any functionality be sure to also add a test for it.
+- All regressions need to pass before your pull can be accepted
+
+## License
+By contributing to intrange you agree that your contributions will be
+licensed under its MIT license.
diff --git a/vendor/github.com/ckaznocha/intrange/LICENSE b/vendor/github.com/ckaznocha/intrange/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..b68bde54b587012bd5ceda3ceb5fc243bcb6580f
--- /dev/null
+++ b/vendor/github.com/ckaznocha/intrange/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2024 Clifton Kaznocha
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/ckaznocha/intrange/README.md b/vendor/github.com/ckaznocha/intrange/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..9cac46220bba053f8921efa88b5215b1122ffb6b
--- /dev/null
+++ b/vendor/github.com/ckaznocha/intrange/README.md
@@ -0,0 +1,90 @@
+# intrange
+
+[![Build Status](https://github.com/ckaznocha/intrange/actions/workflows/ci.yml/badge.svg?branch=main)](https://github.com/ckaznocha/intrange/actions/workflows/ci.yml)
+[![Release](http://img.shields.io/github/release/ckaznocha/intrange.svg)](https://github.com/ckaznocha/intrange/releases/latest)
+[![GoDoc](https://godoc.org/github.com/ckaznocha/intrange?status.svg)](https://godoc.org/github.com/ckaznocha/intrange)
+
+intrange is a program for checking for loops that could use the [Go 1.22](https://go.dev/ref/spec#Go_1.22) integer
+range feature.
+
+## Installation
+
+```bash
+go install github.com/ckaznocha/intrange/cmd/intrange@latest
+```
+
+## Usage
+
+```bash
+go vet -vettool=$(which intrange) ./...
+```
+
+## Examples
+
+### A loop that uses the value of the loop variable
+
+```go
+package main
+
+import "fmt"
+
+func main() {
+    for i := 0; i < 10; i++ {
+        fmt.Println(i)
+    }
+}
+```
+
+Running `intrange` on the above code will produce the following output:
+
+```bash
+main.go:5:2: for loop can be changed to use an integer range (Go 1.22+)
+```
+
+The loop can be rewritten as:
+
+```go
+package main
+
+import "fmt"
+
+func main() {
+    for i := range 10 {
+        fmt.Println(i)
+    }
+}
+```
+
+### A loop that does not use the value of the loop variable
+
+```go
+package main
+
+import "fmt"
+
+func main() {
+    for i := 0; i < 10; i++ {
+        fmt.Println("Hello again!")
+    }
+}
+```
+
+Running `intrange` on the above code will produce the following output:
+
+```bash
+main.go:5:2: for loop can be changed to use an integer range (Go 1.22+)
+```
+
+The loop can be rewritten as:
+
+```go
+package main
+
+import "fmt"
+
+func main() {
+    for range 10 {
+        fmt.Println("Hello again!")
+    }
+}
+```
diff --git a/vendor/github.com/ckaznocha/intrange/SECURITY.md b/vendor/github.com/ckaznocha/intrange/SECURITY.md
new file mode 100644
index 0000000000000000000000000000000000000000..e2c44c4e213f6a3c08abe1f3f979ec7af93a7970
--- /dev/null
+++ b/vendor/github.com/ckaznocha/intrange/SECURITY.md
@@ -0,0 +1,5 @@
+# Security Policy
+
+## Reporting a Vulnerability
+
+Please open a [github issue](https://github.com/ckaznocha/intrange/issues)
diff --git a/vendor/github.com/ckaznocha/intrange/go.work b/vendor/github.com/ckaznocha/intrange/go.work
new file mode 100644
index 0000000000000000000000000000000000000000..f41a04a2fb7d1b0b84ed9199783d0e13b672c1a8
--- /dev/null
+++ b/vendor/github.com/ckaznocha/intrange/go.work
@@ -0,0 +1,6 @@
+go 1.22.0
+
+use (
+	.
+	./testdata
+)
diff --git a/vendor/github.com/ckaznocha/intrange/intrange.go b/vendor/github.com/ckaznocha/intrange/intrange.go
new file mode 100644
index 0000000000000000000000000000000000000000..fac4e3deae42d9629eb10291d33034f9c5538e8e
--- /dev/null
+++ b/vendor/github.com/ckaznocha/intrange/intrange.go
@@ -0,0 +1,388 @@
+package intrange
+
+import (
+	"errors"
+	"fmt"
+	"go/ast"
+	"go/token"
+	"strconv"
+
+	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/go/analysis/passes/inspect"
+	"golang.org/x/tools/go/ast/inspector"
+)
+
+var (
+	Analyzer = &analysis.Analyzer{
+		Name:     "intrange",
+		Doc:      "intrange is a linter to find places where for loops could make use of an integer range.",
+		Run:      run,
+		Requires: []*analysis.Analyzer{inspect.Analyzer},
+	}
+
+	errFailedAnalysis = errors.New("failed analysis")
+)
+
+const msg = "for loop can be changed to use an integer range (Go 1.22+)"
+
+func run(pass *analysis.Pass) (any, error) {
+	result, ok := pass.ResultOf[inspect.Analyzer]
+	if !ok {
+		return nil, fmt.Errorf(
+			"%w: %s",
+			errFailedAnalysis,
+			inspect.Analyzer.Name,
+		)
+	}
+
+	resultInspector, ok := result.(*inspector.Inspector)
+	if !ok {
+		return nil, fmt.Errorf(
+			"%w: %s",
+			errFailedAnalysis,
+			inspect.Analyzer.Name,
+		)
+	}
+
+	resultInspector.Preorder([]ast.Node{(*ast.ForStmt)(nil)}, check(pass))
+
+	return nil, nil
+}
+
+func check(pass *analysis.Pass) func(node ast.Node) {
+	return func(node ast.Node) {
+		forStmt, ok := node.(*ast.ForStmt)
+		if !ok {
+			return
+		}
+
+		if forStmt.Init == nil || forStmt.Cond == nil || forStmt.Post == nil {
+			return
+		}
+
+		// i := 0;;
+		init, ok := forStmt.Init.(*ast.AssignStmt)
+		if !ok {
+			return
+		}
+
+		if len(init.Lhs) != 1 || len(init.Rhs) != 1 {
+			return
+		}
+
+		initIdent, ok := init.Lhs[0].(*ast.Ident)
+		if !ok {
+			return
+		}
+
+		if !compareNumberLit(init.Rhs[0], 0) {
+			return
+		}
+
+		cond, ok := forStmt.Cond.(*ast.BinaryExpr)
+		if !ok {
+			return
+		}
+
+		var nExpr ast.Expr
+
+		switch cond.Op {
+		case token.LSS: // ;i < n;
+			if isBenchmark(cond.Y) {
+				return
+			}
+
+			nExpr = findNExpr(cond.Y)
+
+			x, ok := cond.X.(*ast.Ident)
+			if !ok {
+				return
+			}
+
+			if x.Name != initIdent.Name {
+				return
+			}
+		case token.GTR: // ;n > i;
+			if isBenchmark(cond.X) {
+				return
+			}
+
+			nExpr = findNExpr(cond.X)
+
+			y, ok := cond.Y.(*ast.Ident)
+			if !ok {
+				return
+			}
+
+			if y.Name != initIdent.Name {
+				return
+			}
+		default:
+			return
+		}
+
+		switch post := forStmt.Post.(type) {
+		case *ast.IncDecStmt: // ;;i++
+			if post.Tok != token.INC {
+				return
+			}
+
+			ident, ok := post.X.(*ast.Ident)
+			if !ok {
+				return
+			}
+
+			if ident.Name != initIdent.Name {
+				return
+			}
+		case *ast.AssignStmt:
+			switch post.Tok {
+			case token.ADD_ASSIGN: // ;;i += 1
+				if len(post.Lhs) != 1 {
+					return
+				}
+
+				ident, ok := post.Lhs[0].(*ast.Ident)
+				if !ok {
+					return
+				}
+
+				if ident.Name != initIdent.Name {
+					return
+				}
+
+				if len(post.Rhs) != 1 {
+					return
+				}
+
+				if !compareNumberLit(post.Rhs[0], 1) {
+					return
+				}
+			case token.ASSIGN: // ;;i = i + 1 && ;;i = 1 + i
+				if len(post.Lhs) != 1 || len(post.Rhs) != 1 {
+					return
+				}
+
+				ident, ok := post.Lhs[0].(*ast.Ident)
+				if !ok {
+					return
+				}
+
+				if ident.Name != initIdent.Name {
+					return
+				}
+
+				bin, ok := post.Rhs[0].(*ast.BinaryExpr)
+				if !ok {
+					return
+				}
+
+				if bin.Op != token.ADD {
+					return
+				}
+
+				switch x := bin.X.(type) {
+				case *ast.Ident: // ;;i = i + 1
+					if x.Name != initIdent.Name {
+						return
+					}
+
+					if !compareNumberLit(bin.Y, 1) {
+						return
+					}
+				case *ast.BasicLit: // ;;i = 1 + i
+					if !compareNumberLit(x, 1) {
+						return
+					}
+
+					ident, ok := bin.Y.(*ast.Ident)
+					if !ok {
+						return
+					}
+
+					if ident.Name != initIdent.Name {
+						return
+					}
+				default:
+					return
+				}
+			default:
+				return
+			}
+		default:
+			return
+		}
+
+		bc := &bodyChecker{
+			initIdent: initIdent,
+			nExpr:     nExpr,
+		}
+
+		ast.Inspect(forStmt.Body, bc.check)
+
+		if bc.modified {
+			return
+		}
+
+		pass.Report(analysis.Diagnostic{
+			Pos:     forStmt.Pos(),
+			Message: msg,
+		})
+	}
+}
+
+func findNExpr(expr ast.Expr) ast.Expr {
+	switch e := expr.(type) {
+	case *ast.CallExpr:
+		if fun, ok := e.Fun.(*ast.Ident); ok && fun.Name == "len" && len(e.Args) == 1 {
+			return findNExpr(e.Args[0])
+		}
+
+		return nil
+	case *ast.BasicLit:
+		return nil
+	case *ast.Ident:
+		return e
+	case *ast.SelectorExpr:
+		return e
+	case *ast.IndexExpr:
+		return e
+	default:
+		return nil
+	}
+}
+
+func isBenchmark(expr ast.Expr) bool {
+	selectorExpr, ok := expr.(*ast.SelectorExpr)
+	if !ok {
+		return false
+	}
+
+	if selectorExpr.Sel.Name != "N" {
+		return false
+	}
+
+	ident, ok := selectorExpr.X.(*ast.Ident)
+	if !ok {
+		return false
+	}
+
+	if ident.Name == "b" {
+		return true
+	}
+
+	return false
+}
+
+func identEqual(a, b ast.Expr) bool {
+	if a == nil || b == nil {
+		return false
+	}
+
+	switch aT := a.(type) {
+	case *ast.Ident:
+		identB, ok := b.(*ast.Ident)
+		if !ok {
+			return false
+		}
+
+		return aT.Name == identB.Name
+	case *ast.SelectorExpr:
+		selectorB, ok := b.(*ast.SelectorExpr)
+		if !ok {
+			return false
+		}
+
+		return identEqual(aT.Sel, selectorB.Sel) && identEqual(aT.X, selectorB.X)
+	case *ast.IndexExpr:
+		indexB, ok := b.(*ast.IndexExpr)
+		if ok {
+			return identEqual(aT.X, indexB.X) && identEqual(aT.Index, indexB.Index)
+		}
+
+		return identEqual(aT.X, b)
+	case *ast.BasicLit:
+		litB, ok := b.(*ast.BasicLit)
+		if !ok {
+			return false
+		}
+
+		return aT.Value == litB.Value
+	default:
+		return false
+	}
+}
+
+type bodyChecker struct {
+	initIdent *ast.Ident
+	nExpr     ast.Expr
+	modified  bool
+}
+
+func (b *bodyChecker) check(n ast.Node) bool {
+	switch stmt := n.(type) {
+	case *ast.AssignStmt:
+		for _, lhs := range stmt.Lhs {
+			if identEqual(lhs, b.initIdent) || identEqual(lhs, b.nExpr) {
+				b.modified = true
+
+				return false
+			}
+		}
+	case *ast.IncDecStmt:
+		if identEqual(stmt.X, b.initIdent) || identEqual(stmt.X, b.nExpr) {
+			b.modified = true
+
+			return false
+		}
+	}
+
+	return true
+}
+
+func compareNumberLit(exp ast.Expr, val int) bool {
+	switch lit := exp.(type) {
+	case *ast.BasicLit:
+		if lit.Kind != token.INT {
+			return false
+		}
+
+		n := strconv.Itoa(val)
+
+		switch lit.Value {
+		case n, "0x" + n, "0X" + n:
+			return true
+		default:
+			return false
+		}
+	case *ast.CallExpr:
+		switch fun := lit.Fun.(type) {
+		case *ast.Ident:
+			switch fun.Name {
+			case
+				"int",
+				"int8",
+				"int16",
+				"int32",
+				"int64",
+				"uint",
+				"uint8",
+				"uint16",
+				"uint32",
+				"uint64":
+			default:
+				return false
+			}
+		default:
+			return false
+		}
+
+		if len(lit.Args) != 1 {
+			return false
+		}
+
+		return compareNumberLit(lit.Args[0], val)
+	default:
+		return false
+	}
+}
diff --git a/vendor/github.com/daixiang0/gci/pkg/config/config.go b/vendor/github.com/daixiang0/gci/pkg/config/config.go
index 120e787e91cad4ff4f63175dd0f260d402b70f24..cc43f2fa07f0425f4e821ae5d8f5e3c887466326 100644
--- a/vendor/github.com/daixiang0/gci/pkg/config/config.go
+++ b/vendor/github.com/daixiang0/gci/pkg/config/config.go
@@ -1,7 +1,6 @@
 package config
 
 import (
-	"io/ioutil"
 	"sort"
 	"strings"
 
@@ -11,11 +10,13 @@ import (
 )
 
 var defaultOrder = map[string]int{
-	section.StandardType: 0,
-	section.DefaultType:  1,
-	section.CustomType:   2,
-	section.BlankType:    3,
-	section.DotType:      4,
+	section.StandardType:    0,
+	section.DefaultType:     1,
+	section.CustomType:      2,
+	section.BlankType:       3,
+	section.DotType:         4,
+	section.AliasType:       5,
+	section.LocalModuleType: 6,
 }
 
 type BoolConfig struct {
@@ -37,6 +38,10 @@ type YamlConfig struct {
 	Cfg                     BoolConfig `yaml:",inline"`
 	SectionStrings          []string   `yaml:"sections"`
 	SectionSeparatorStrings []string   `yaml:"sectionseparators"`
+
+	// Since history issue, Golangci-lint needs Analyzer to run and GCI add an Analyzer layer to integrate.
+	// The ModPath param is only from analyzer.go, no need to set it in all other places.
+	ModPath string `yaml:"-"`
 }
 
 func (g YamlConfig) Parse() (*Config, error) {
@@ -49,6 +54,9 @@ func (g YamlConfig) Parse() (*Config, error) {
 	if sections == nil {
 		sections = section.DefaultSections()
 	}
+	if err := configureSections(sections, g.ModPath); err != nil {
+		return nil, err
+	}
 
 	// if default order sorted sections
 	if !g.Cfg.CustomOrder {
@@ -73,19 +81,33 @@ func (g YamlConfig) Parse() (*Config, error) {
 	return &Config{g.Cfg, sections, sectionSeparators}, nil
 }
 
-func InitializeGciConfigFromYAML(filePath string) (*Config, error) {
+func ParseConfig(in string) (*Config, error) {
 	config := YamlConfig{}
-	yamlData, err := ioutil.ReadFile(filePath)
-	if err != nil {
-		return nil, err
-	}
-	err = yaml.Unmarshal(yamlData, &config)
+
+	err := yaml.Unmarshal([]byte(in), &config)
 	if err != nil {
 		return nil, err
 	}
+
 	gciCfg, err := config.Parse()
 	if err != nil {
 		return nil, err
 	}
+
 	return gciCfg, nil
 }
+
+// configureSections now only do golang module path finding.
+// Since history issue, Golangci-lint needs Analyzer to run and GCI add an Analyzer layer to integrate.
+// The path param is from analyzer.go, in all other places should pass empty string.
+func configureSections(sections section.SectionList, path string) error {
+	for _, sec := range sections {
+		switch s := sec.(type) {
+		case *section.LocalModule:
+			if err := s.Configure(path); err != nil {
+				return err
+			}
+		}
+	}
+	return nil
+}
diff --git a/vendor/github.com/daixiang0/gci/pkg/gci/gci.go b/vendor/github.com/daixiang0/gci/pkg/gci/gci.go
index e84a166761effde378d7d24fa5dd91733864a03d..163e95a861e43877c141091e45853e5e47c8094d 100644
--- a/vendor/github.com/daixiang0/gci/pkg/gci/gci.go
+++ b/vendor/github.com/daixiang0/gci/pkg/gci/gci.go
@@ -127,11 +127,17 @@ func LoadFormatGoFile(file io.FileObj, cfg config.Config) (src, dist []byte, err
 		return nil, nil, err
 	}
 
+	return LoadFormat(src, file.Path(), cfg)
+}
+
+func LoadFormat(in []byte, path string, cfg config.Config) (src, dist []byte, err error) {
+	src = in
+
 	if cfg.SkipGenerated && parse.IsGeneratedFileByComment(string(src)) {
 		return src, src, nil
 	}
 
-	imports, headEnd, tailStart, cStart, cEnd, err := parse.ParseFile(src, file.Path())
+	imports, headEnd, tailStart, cStart, cEnd, err := parse.ParseFile(src, path)
 	if err != nil {
 		if errors.Is(err, parse.NoImportError{}) {
 			return src, src, nil
@@ -201,6 +207,10 @@ func LoadFormatGoFile(file io.FileObj, cfg config.Config) (src, dist []byte, err
 	for _, s := range slices {
 		i += copy(dist[i:], s)
 	}
+
+	// remove ^M(\r\n) from Win to Unix
+	dist = bytes.ReplaceAll(dist, []byte{utils.WinLinebreak}, []byte{utils.Linebreak})
+
 	log.L().Debug(fmt.Sprintf("raw:\n%s", dist))
 	dist, err = goFormat.Source(dist)
 	if err != nil {
diff --git a/vendor/github.com/daixiang0/gci/pkg/gci/testdata.go b/vendor/github.com/daixiang0/gci/pkg/gci/testdata.go
new file mode 100644
index 0000000000000000000000000000000000000000..866ae84c49fe385dadef85b5d9a9c90f16f0a6d2
--- /dev/null
+++ b/vendor/github.com/daixiang0/gci/pkg/gci/testdata.go
@@ -0,0 +1,1298 @@
+package gci
+
+type Cases struct {
+	name, config, in, out string
+}
+
+var commonConfig = `sections:
+  - Standard
+  - Default
+  - Prefix(github.com/daixiang0)
+`
+
+var testCases = []Cases{
+	{
+		"already-good",
+
+		commonConfig,
+
+		`package main
+
+import (
+	"fmt"
+
+	g "github.com/golang"
+
+	"github.com/daixiang0/gci"
+)
+`,
+		`package main
+
+import (
+	"fmt"
+
+	g "github.com/golang"
+
+	"github.com/daixiang0/gci"
+)
+`,
+	},
+	{
+		"blank-format",
+
+		commonConfig,
+
+		`package main
+import (
+	"fmt"
+
+  // comment
+	g  "github.com/golang"    // comment
+
+	"github.com/daixiang0/gci"
+)
+`,
+		`package main
+
+import (
+	"fmt"
+
+	// comment
+	g "github.com/golang" // comment
+
+	"github.com/daixiang0/gci"
+)
+`,
+	},
+	{
+		"cgo-block",
+
+		commonConfig,
+
+		`package main
+
+import (
+	/*
+	#include "types.h"
+	*/
+	"C"
+)
+`,
+		`package main
+
+import (
+	/*
+	#include "types.h"
+	*/
+	"C"
+)
+`,
+	},
+	{
+		"cgo-block-after-import",
+
+		commonConfig,
+
+		`package main
+
+import (
+	"fmt"
+
+	"github.com/daixiang0/gci"
+	g "github.com/golang"
+)
+
+// #cgo CFLAGS: -DPNG_DEBUG=1
+// #cgo amd64 386 CFLAGS: -DX86=1
+// #cgo LDFLAGS: -lpng
+// #include <png.h>
+import "C"
+`,
+		`package main
+
+// #cgo CFLAGS: -DPNG_DEBUG=1
+// #cgo amd64 386 CFLAGS: -DX86=1
+// #cgo LDFLAGS: -lpng
+// #include <png.h>
+import "C"
+
+import (
+	"fmt"
+
+	g "github.com/golang"
+
+	"github.com/daixiang0/gci"
+)
+`,
+	},
+	{
+		"cgo-block-before-import",
+
+		commonConfig,
+
+		`package main
+
+// #cgo CFLAGS: -DPNG_DEBUG=1
+// #cgo amd64 386 CFLAGS: -DX86=1
+// #cgo LDFLAGS: -lpng
+// #include <png.h>
+import "C"
+
+import (
+	"fmt"
+
+	"github.com/daixiang0/gci"
+
+	g "github.com/golang"
+)
+`,
+		`package main
+
+// #cgo CFLAGS: -DPNG_DEBUG=1
+// #cgo amd64 386 CFLAGS: -DX86=1
+// #cgo LDFLAGS: -lpng
+// #include <png.h>
+import "C"
+
+import (
+	"fmt"
+
+	g "github.com/golang"
+
+	"github.com/daixiang0/gci"
+)
+`,
+	},
+	{
+		"cgo-block-mixed",
+
+		commonConfig,
+
+		`package main
+
+import (
+	/* #include "types.h"
+	*/"C"
+)
+`,
+		`package main
+
+import (
+	/* #include "types.h"
+	*/"C"
+)
+`,
+	},
+	{
+		"cgo-block-mixed-with-content",
+
+		commonConfig,
+
+		`package main
+
+import (
+	/* #include "types.h"
+	#include "other.h" */"C"
+)
+`,
+		`package main
+
+import (
+	/* #include "types.h"
+	#include "other.h" */"C"
+)
+`,
+	},
+	{
+		"cgo-block-prefix",
+
+		commonConfig,
+
+		`package main
+
+import (
+	/* #include "types.h" */ "C"
+)
+`,
+		`package main
+
+import (
+	/* #include "types.h" */ "C"
+)
+`,
+	},
+	{
+		"cgo-block-single-line",
+
+		commonConfig,
+
+		`package main
+
+import (
+	/* #include "types.h" */
+	"C"
+)
+`,
+		`package main
+
+import (
+	/* #include "types.h" */
+	"C"
+)
+`,
+	},
+	{
+		"cgo-line",
+
+		commonConfig,
+
+		`package main
+
+import (
+	// #include "types.h"
+	"C"
+)
+`,
+		`package main
+
+import (
+	// #include "types.h"
+	"C"
+)
+`,
+	},
+	{
+		"cgo-multiline",
+
+		commonConfig,
+
+		`package main
+
+import (
+	// #include "types.h"
+	// #include "other.h"
+	"C"
+)
+`,
+		`package main
+
+import (
+	// #include "types.h"
+	// #include "other.h"
+	"C"
+)
+`,
+	},
+	{
+		"cgo-single",
+
+		commonConfig,
+
+		`package main
+
+import (
+	"fmt"
+
+	"github.com/daixiang0/gci"
+)
+
+import "C"
+
+import "github.com/golang"
+
+import (
+  "github.com/daixiang0/gci"
+)
+`,
+		`package main
+
+import "C"
+
+import (
+	"fmt"
+
+	"github.com/golang"
+
+	"github.com/daixiang0/gci"
+)
+`,
+	},
+	{
+		"comment",
+
+		commonConfig,
+
+		`package main
+import (
+	//Do not forget to run Gci
+	"fmt"
+)
+`,
+		`package main
+import (
+	//Do not forget to run Gci
+	"fmt"
+)
+`,
+	},
+	{
+		"comment-before-import",
+
+		commonConfig,
+
+		`package main
+
+// comment
+import (
+	"fmt"
+	"os"
+
+	"github.com/daixiang0/gci"
+)
+`,
+		`package main
+
+// comment
+import (
+	"fmt"
+	"os"
+
+	"github.com/daixiang0/gci"
+)
+`,
+	},
+	{
+		"comment-in-the-tail",
+
+		`sections:
+  - Standard
+  - Default
+  - Prefix(github.com/daixiang0)
+`,
+		`package main
+
+import (
+	"fmt"
+
+	g "github.com/golang"
+
+	"github.com/daixiang0/gci"
+)
+
+type test int
+
+// test
+`,
+		`package main
+
+import (
+	"fmt"
+
+	g "github.com/golang"
+
+	"github.com/daixiang0/gci"
+)
+
+type test int
+
+// test
+`,
+	},
+	{
+		"comment-top",
+
+		commonConfig,
+
+		`package main
+
+import (
+	"os" // https://pkg.go.dev/os
+	// https://pkg.go.dev/fmt
+	"fmt"
+)
+`,
+		`package main
+
+import (
+	// https://pkg.go.dev/fmt
+	"fmt"
+	"os" // https://pkg.go.dev/os
+)
+`,
+	},
+	{
+		"comment-without-whitespace",
+
+		commonConfig,
+
+		`package proc
+
+import (
+	"context"// no separating whitespace here //nolint:confusion
+)
+`,
+		`package proc
+
+import (
+	"context"// no separating whitespace here //nolint:confusion
+)
+`,
+	},
+	{
+		"comment-with-slashslash",
+
+		commonConfig,
+
+		`package main
+
+import (
+	"fmt" // https://pkg.go.dev/fmt
+)
+`,
+		`package main
+
+import (
+	"fmt" // https://pkg.go.dev/fmt
+)
+`,
+	},
+	{
+		"custom-order",
+
+		`customOrder: true
+sections:
+  - Prefix(github.com/daixiang0)
+  - Default
+  - Standard
+`,
+		`package main
+
+import (
+	"fmt"
+
+	g "github.com/golang"
+
+	"github.com/daixiang0/a"
+)
+`,
+		`package main
+
+import (
+	"github.com/daixiang0/a"
+
+	g "github.com/golang"
+
+	"fmt"
+)
+`,
+	},
+	{
+		"default-order",
+
+		`sections:
+  - Standard
+  - Prefix(github.com/daixiang0)
+  - Default
+`,
+		`package main
+
+import (
+	"fmt"
+
+	g "github.com/golang"
+
+	"github.com/daixiang0/a"
+)
+`,
+		`package main
+
+import (
+	"fmt"
+
+	g "github.com/golang"
+
+	"github.com/daixiang0/a"
+)
+`,
+	},
+	{
+		"dot-and-blank",
+
+		`sections:
+  - Standard
+  - Default
+  - Prefix(github.com/daixiang0)
+  - Blank
+  - Dot
+`,
+		`package main
+
+import (
+	"fmt"
+
+	g "github.com/golang"
+	. "github.com/golang/dot"
+	_ "github.com/golang/blank"
+
+	"github.com/daixiang0/a"
+	"github.com/daixiang0/gci"
+	"github.com/daixiang0/gci/subtest"
+	. "github.com/daixiang0/gci/dot"
+	_ "github.com/daixiang0/gci/blank"
+)
+`,
+		`package main
+
+import (
+	"fmt"
+
+	g "github.com/golang"
+
+	"github.com/daixiang0/a"
+	"github.com/daixiang0/gci"
+	"github.com/daixiang0/gci/subtest"
+
+	_ "github.com/daixiang0/gci/blank"
+	_ "github.com/golang/blank"
+
+	. "github.com/daixiang0/gci/dot"
+	. "github.com/golang/dot"
+)
+`,
+	},
+	{
+		"duplicate-imports",
+
+		`sections:
+  - Standard
+  - Default
+  - Prefix(github.com/daixiang0)
+`,
+		`package main
+
+import (
+	"fmt"
+
+	g "github.com/golang"
+
+	a "github.com/daixiang0/gci"
+	"github.com/daixiang0/gci"
+)
+`,
+		`package main
+
+import (
+	"fmt"
+
+	g "github.com/golang"
+
+	"github.com/daixiang0/gci"
+	a "github.com/daixiang0/gci"
+)
+`,
+	},
+	{
+		"grouped-multiple-custom",
+
+		`sections:
+  - Standard
+  - Default
+  - Prefix(github.com/daixiang0,gitlab.com/daixiang0,daixiang0)
+`,
+		`package main
+
+import (
+	"daixiang0/lib1"
+	"fmt"
+	"github.com/daixiang0/gci"
+	"gitlab.com/daixiang0/gci"
+	g "github.com/golang"
+	"github.com/daixiang0/gci/subtest"
+)
+`,
+		`package main
+
+import (
+	"fmt"
+
+	g "github.com/golang"
+
+	"daixiang0/lib1"
+	"github.com/daixiang0/gci"
+	"github.com/daixiang0/gci/subtest"
+	"gitlab.com/daixiang0/gci"
+)
+`,
+	},
+	{
+		"leading-comment",
+
+		commonConfig,
+
+		`package main
+
+import (
+	// foo
+	"fmt"
+)
+`,
+		`package main
+
+import (
+	// foo
+	"fmt"
+)
+`,
+	},
+	{
+		"linebreak",
+
+		`sections:
+  - Standard
+  - Default
+  - Prefix(github.com/daixiang0)
+`,
+		`package main
+
+import (
+	g "github.com/golang"
+
+	"fmt"
+
+	"github.com/daixiang0/gci"
+
+)
+`,
+		`package main
+
+import (
+	"fmt"
+
+	g "github.com/golang"
+
+	"github.com/daixiang0/gci"
+)
+`,
+	},
+	{
+		"linebreak-no-custom",
+
+		`sections:
+  - Standard
+  - Default
+  - Prefix(github.com/daixiang0)
+`,
+		`package main
+
+import (
+	g "github.com/golang"
+
+	"fmt"
+
+)
+`,
+		`package main
+
+import (
+	"fmt"
+
+	g "github.com/golang"
+)
+`,
+	},
+	{
+		"mismatch-section",
+
+		`sections:
+  - Standard
+  - Default
+  - Prefix(github.com/daixiang0)
+  - Prefix(github.com/daixiang0/gci)
+`,
+		`package main
+
+import (
+	"fmt"
+
+	g "github.com/golang"
+
+	"github.com/daixiang0/gci"
+)
+`,
+		`package main
+
+import (
+	"fmt"
+
+	g "github.com/golang"
+
+	"github.com/daixiang0/gci"
+)
+`,
+	},
+	{
+		"multiple-custom",
+
+		`sections:
+  - Standard
+  - Default
+  - Prefix(github.com/daixiang0)
+  - Prefix(github.com/daixiang0/gci)
+  - Prefix(github.com/daixiang0/gci/subtest)
+`,
+		`package main
+
+import (
+	"fmt"
+
+	g "github.com/golang"
+
+	"github.com/daixiang0/a"
+	"github.com/daixiang0/gci"
+	"github.com/daixiang0/gci/subtest"
+)
+`,
+		`package main
+
+import (
+	"fmt"
+
+	g "github.com/golang"
+
+	"github.com/daixiang0/a"
+
+	"github.com/daixiang0/gci"
+
+	"github.com/daixiang0/gci/subtest"
+)
+`,
+	},
+	{
+		"multiple-imports",
+
+		commonConfig,
+
+		`package main
+
+import "fmt"
+
+import "context"
+
+import (
+	"os"
+
+	"github.com/daixiang0/test"
+)
+
+import "math"
+
+
+// main
+func main() {
+}
+`,
+		`package main
+
+import (
+	"context"
+	"fmt"
+	"math"
+	"os"
+
+	"github.com/daixiang0/test"
+)
+
+// main
+func main() {
+}
+`,
+	},
+	{
+		"multiple-line-comment",
+
+		commonConfig,
+
+		`package proc
+
+import (
+	"context" // in-line comment
+	"fmt"
+	"os"
+
+	//nolint:depguard // A multi-line comment explaining why in
+	// this one case it's OK to use os/exec even though depguard
+	// is configured to force us to use dlib/exec instead.
+	"os/exec"
+
+	"golang.org/x/sys/unix"
+	"github.com/local/dlib/dexec"
+)
+`,
+		`package proc
+
+import (
+	"context" // in-line comment
+	"fmt"
+	"os"
+	//nolint:depguard // A multi-line comment explaining why in
+	// this one case it's OK to use os/exec even though depguard
+	// is configured to force us to use dlib/exec instead.
+	"os/exec"
+
+	"github.com/local/dlib/dexec"
+	"golang.org/x/sys/unix"
+)
+`,
+	},
+	{
+		"nochar-after-import",
+
+		commonConfig,
+
+		`package main
+
+import (
+	"fmt"
+)
+`,
+		`package main
+
+import (
+	"fmt"
+)
+`,
+	},
+	{
+		"no-format",
+
+		commonConfig,
+
+		`package main
+
+import(
+"fmt"
+
+g "github.com/golang"
+
+"github.com/daixiang0/gci"
+)
+`,
+		`package main
+
+import (
+	"fmt"
+
+	g "github.com/golang"
+
+	"github.com/daixiang0/gci"
+)
+`,
+	},
+	{
+		"nolint",
+
+		commonConfig,
+
+		`package main
+
+import (
+	"fmt"
+
+	"github.com/forbidden/pkg" //nolint:depguard
+
+	_ "github.com/daixiang0/gci" //nolint:depguard
+)
+`,
+		`package main
+
+import (
+	"fmt"
+
+	"github.com/forbidden/pkg" //nolint:depguard
+
+	_ "github.com/daixiang0/gci" //nolint:depguard
+)
+`,
+	},
+	{
+		"number-in-alias",
+
+		commonConfig,
+
+		`package main
+
+import (
+	"fmt"
+
+	go_V1 "github.com/golang"
+
+	"github.com/daixiang0/gci"
+)
+`,
+		`package main
+
+import (
+	"fmt"
+
+	go_V1 "github.com/golang"
+
+	"github.com/daixiang0/gci"
+)
+`,
+	},
+	{
+		"one-import",
+
+		commonConfig,
+
+		`package main
+import (
+	"fmt"
+)
+
+func main() {
+}
+`,
+		`package main
+import (
+	"fmt"
+)
+
+func main() {
+}
+`,
+	},
+	{
+		"one-import-one-line",
+
+		commonConfig,
+
+		`package main
+
+import "fmt"
+
+func main() {
+}
+`,
+		`package main
+
+import "fmt"
+
+func main() {
+}
+`,
+	},
+	{
+		"one-line-import-after-import",
+
+		`sections:
+  - Standard
+  - Default
+  - Prefix(github.com/daixiang0)
+`,
+		`package main
+
+import (
+	"fmt"
+	"os"
+
+	"github.com/daixiang0/test"
+)
+
+import "context"
+`,
+		`package main
+
+import (
+	"context"
+	"fmt"
+	"os"
+
+	"github.com/daixiang0/test"
+)
+`,
+	},
+	{
+		"same-prefix-custom",
+
+		`sections:
+  - Standard
+  - Default
+  - Prefix(github.com/daixiang0/gci)
+  - Prefix(github.com/daixiang0/gci/subtest)
+`,
+		`package main
+
+import (
+	"fmt"
+
+	g "github.com/golang"
+
+	"github.com/daixiang0/gci"
+	"github.com/daixiang0/gci/subtest"
+)
+`,
+		`package main
+
+import (
+	"fmt"
+
+	g "github.com/golang"
+
+	"github.com/daixiang0/gci"
+
+	"github.com/daixiang0/gci/subtest"
+)
+`,
+	},
+	{
+		"simple-case",
+
+		commonConfig,
+
+		`package main
+
+import (
+	"golang.org/x/tools"
+
+	"fmt"
+
+	"github.com/daixiang0/gci"
+)
+`,
+		`package main
+
+import (
+	"fmt"
+
+	"golang.org/x/tools"
+
+	"github.com/daixiang0/gci"
+)
+`,
+	},
+	{
+		"whitespace-test",
+
+		commonConfig,
+
+		`package main
+
+import (
+	"fmt"
+	"github.com/golang" // golang
+	alias "github.com/daixiang0/gci"
+)
+`,
+		`package main
+
+import (
+	"fmt"
+
+	"github.com/golang" // golang
+
+	alias "github.com/daixiang0/gci"
+)
+`,
+	},
+	{
+		"with-above-comment-and-alias",
+
+		commonConfig,
+
+		`package main
+
+import (
+	"fmt"
+	// golang
+	_ "github.com/golang"
+	"github.com/daixiang0/gci"
+)
+`,
+		`package main
+
+import (
+	"fmt"
+
+	// golang
+	_ "github.com/golang"
+
+	"github.com/daixiang0/gci"
+)
+`,
+	},
+	{
+		"with-comment-and-alias",
+
+		commonConfig,
+
+		`package main
+
+import (
+	"fmt"
+	_ "github.com/golang" // golang
+	"github.com/daixiang0/gci"
+)
+`,
+		`package main
+
+import (
+	"fmt"
+
+	_ "github.com/golang" // golang
+
+	"github.com/daixiang0/gci"
+)
+`,
+	},
+	{
+		"same-prefix-custom",
+
+		`sections:
+  - Standard
+  - Default
+  - Prefix(github.com/daixiang0/gci)
+  - Prefix(github.com/daixiang0/gci/subtest)
+`,
+		`package main
+
+import (
+	"fmt"
+
+	g "github.com/golang"
+
+	"github.com/daixiang0/gci"
+	"github.com/daixiang0/gci/subtest"
+)
+`,
+		`package main
+
+import (
+	"fmt"
+
+	g "github.com/golang"
+
+	"github.com/daixiang0/gci"
+
+	"github.com/daixiang0/gci/subtest"
+)
+`,
+	},
+	{
+		"same-prefix-custom",
+
+		`sections:
+  - Standard
+  - Default
+  - Prefix(github.com/daixiang0/gci)
+  - Prefix(github.com/daixiang0/gci/subtest)
+`,
+		`package main
+
+import (
+	"fmt"
+
+	g "github.com/golang"
+
+	"github.com/daixiang0/gci"
+	"github.com/daixiang0/gci/subtest"
+)
+`,
+		`package main
+
+import (
+	"fmt"
+
+	g "github.com/golang"
+
+	"github.com/daixiang0/gci"
+
+	"github.com/daixiang0/gci/subtest"
+)
+`,
+	},
+	{
+		"blank-in-config",
+
+		`sections:
+  - Standard
+  - Default
+  - Prefix(  github.com/daixiang0/gci,   github.com/daixiang0/gci/subtest  )
+`,
+		`package main
+
+import (
+	"fmt"
+
+	g "github.com/golang"
+
+	"github.com/daixiang0/gci"
+	"github.com/daixiang0/gci/subtest"
+)
+`,
+		`package main
+
+import (
+	"fmt"
+
+	g "github.com/golang"
+
+	"github.com/daixiang0/gci"
+	"github.com/daixiang0/gci/subtest"
+)
+`,
+	},
+	{
+		"alias",
+
+		`sections:
+  - Standard
+  - Default
+  - Alias
+`,
+		`package main
+
+import (
+	testing "github.com/daixiang0/test"
+	"fmt"
+
+	g "github.com/golang"
+
+	"github.com/daixiang0/gci"
+	"github.com/daixiang0/gci/subtest"
+)
+`,
+		`package main
+
+import (
+	"fmt"
+
+	"github.com/daixiang0/gci"
+	"github.com/daixiang0/gci/subtest"
+
+	testing "github.com/daixiang0/test"
+	g "github.com/golang"
+)
+`,
+	},
+	{
+		"no-trailing-newline",
+
+		`sections:
+  - Standard
+`,
+		`package main
+
+import (
+	"net"
+	"fmt"
+)`,
+		`package main
+
+import (
+	"fmt"
+	"net"
+)
+`,
+	},
+}
diff --git a/vendor/github.com/daixiang0/gci/pkg/parse/parse.go b/vendor/github.com/daixiang0/gci/pkg/parse/parse.go
index 33d6e1705d391afaf1cd73cf5383c406fb7c7c66..e8532f850de6e37aff1f64985e3d5e857e4d47f0 100644
--- a/vendor/github.com/daixiang0/gci/pkg/parse/parse.go
+++ b/vendor/github.com/daixiang0/gci/pkg/parse/parse.go
@@ -111,6 +111,9 @@ func ParseFile(src []byte, filename string) (ImportList, int, int, int, int, err
 					headEnd = int(decl.Pos()) - 1
 				}
 				tailStart = int(decl.End())
+				if tailStart > len(src) {
+					tailStart = len(src)
+				}
 
 				for _, spec := range genDecl.Specs {
 					imp := spec.(*ast.ImportSpec)
diff --git a/vendor/github.com/daixiang0/gci/pkg/section/alias.go b/vendor/github.com/daixiang0/gci/pkg/section/alias.go
new file mode 100644
index 0000000000000000000000000000000000000000..423e96acf0da762ed89d68a775602ffcb3673836
--- /dev/null
+++ b/vendor/github.com/daixiang0/gci/pkg/section/alias.go
@@ -0,0 +1,25 @@
+package section
+
+import (
+	"github.com/daixiang0/gci/pkg/parse"
+	"github.com/daixiang0/gci/pkg/specificity"
+)
+
+type Alias struct{}
+
+const AliasType = "alias"
+
+func (b Alias) MatchSpecificity(spec *parse.GciImports) specificity.MatchSpecificity {
+	if spec.Name != "." && spec.Name != "_" && spec.Name != "" {
+		return specificity.NameMatch{}
+	}
+	return specificity.MisMatch{}
+}
+
+func (b Alias) String() string {
+	return AliasType
+}
+
+func (b Alias) Type() string {
+	return AliasType
+}
diff --git a/vendor/github.com/daixiang0/gci/pkg/section/local_module.go b/vendor/github.com/daixiang0/gci/pkg/section/local_module.go
new file mode 100644
index 0000000000000000000000000000000000000000..50f41e50179a2f50f01ef5dd13a32f096fd2faae
--- /dev/null
+++ b/vendor/github.com/daixiang0/gci/pkg/section/local_module.go
@@ -0,0 +1,59 @@
+package section
+
+import (
+	"fmt"
+	"os"
+	"strings"
+
+	"golang.org/x/mod/modfile"
+
+	"github.com/daixiang0/gci/pkg/parse"
+	"github.com/daixiang0/gci/pkg/specificity"
+)
+
+const LocalModuleType = "localmodule"
+
+type LocalModule struct {
+	Path string
+}
+
+func (m *LocalModule) MatchSpecificity(spec *parse.GciImports) specificity.MatchSpecificity {
+	if spec.Path == m.Path || strings.HasPrefix(spec.Path, m.Path+"/") {
+		return specificity.LocalModule{}
+	}
+
+	return specificity.MisMatch{}
+}
+
+func (m *LocalModule) String() string {
+	return LocalModuleType
+}
+
+func (m *LocalModule) Type() string {
+	return LocalModuleType
+}
+
+// Configure configures the module section by finding the module
+// for the current path
+func (m *LocalModule) Configure(path string) error {
+	if path != "" {
+		m.Path = path
+	} else {
+		path, err := findLocalModule()
+		if err != nil {
+			return fmt.Errorf("finding local modules for `localModule` configuration: %w", err)
+		}
+		m.Path = path
+	}
+
+	return nil
+}
+
+func findLocalModule() (string, error) {
+	b, err := os.ReadFile("go.mod")
+	if err != nil {
+		return "", fmt.Errorf("reading go.mod: %w", err)
+	}
+
+	return modfile.ModulePath(b), nil
+}
diff --git a/vendor/github.com/daixiang0/gci/pkg/section/parser.go b/vendor/github.com/daixiang0/gci/pkg/section/parser.go
index 9834dcd139c297ad902da862c64f9a326719ec6d..62ed1582af9a863be14060cd664b67dd6a3df7b0 100644
--- a/vendor/github.com/daixiang0/gci/pkg/section/parser.go
+++ b/vendor/github.com/daixiang0/gci/pkg/section/parser.go
@@ -33,6 +33,11 @@ func Parse(data []string) (SectionList, error) {
 			list = append(list, Dot{})
 		} else if s == "blank" {
 			list = append(list, Blank{})
+		} else if s == "alias" {
+			list = append(list, Alias{})
+		} else if s == "localmodule" {
+			// pointer because we need to mutate the section at configuration time
+			list = append(list, &LocalModule{})
 		} else {
 			errString += fmt.Sprintf(" %s", s)
 		}
diff --git a/vendor/github.com/daixiang0/gci/pkg/section/prefix.go b/vendor/github.com/daixiang0/gci/pkg/section/prefix.go
index a274347cddef548aee8464cffd5efc4919a30c88..30bdd8f4ea5f4de376e403182f1b28fa8cdba9fb 100644
--- a/vendor/github.com/daixiang0/gci/pkg/section/prefix.go
+++ b/vendor/github.com/daixiang0/gci/pkg/section/prefix.go
@@ -20,6 +20,7 @@ const CustomType = "custom"
 
 func (c Custom) MatchSpecificity(spec *parse.GciImports) specificity.MatchSpecificity {
 	for _, prefix := range strings.Split(c.Prefix, CustomSeparator) {
+		prefix = strings.TrimSpace(prefix)
 		if strings.HasPrefix(spec.Path, prefix) {
 			return specificity.Match{Length: len(prefix)}
 		}
diff --git a/vendor/github.com/daixiang0/gci/pkg/section/standard_list.go b/vendor/github.com/daixiang0/gci/pkg/section/standard_list.go
index 05e799939648c59bfce46a1cfabf409c03df70a7..a2cd0a6dd21dacd22e2b2197830f4f002bb8d66c 100644
--- a/vendor/github.com/daixiang0/gci/pkg/section/standard_list.go
+++ b/vendor/github.com/daixiang0/gci/pkg/section/standard_list.go
@@ -1,6 +1,6 @@
 package section
 
-// Code generated based on go1.21.0 X:arenas. DO NOT EDIT.
+// Code generated based on go1.22.0 X:boringcrypto,arenas. DO NOT EDIT.
 
 var standardPackages = map[string]struct{}{
 	"archive/tar":          {},
@@ -20,6 +20,7 @@ var standardPackages = map[string]struct{}{
 	"context":              {},
 	"crypto":               {},
 	"crypto/aes":           {},
+	"crypto/boring":        {},
 	"crypto/cipher":        {},
 	"crypto/des":           {},
 	"crypto/dsa":           {},
@@ -37,6 +38,7 @@ var standardPackages = map[string]struct{}{
 	"crypto/sha512":        {},
 	"crypto/subtle":        {},
 	"crypto/tls":           {},
+	"crypto/tls/fipsonly":  {},
 	"crypto/x509":          {},
 	"crypto/x509/pkix":     {},
 	"database/sql":         {},
@@ -78,6 +80,7 @@ var standardPackages = map[string]struct{}{
 	"go/scanner":           {},
 	"go/token":             {},
 	"go/types":             {},
+	"go/version":           {},
 	"hash":                 {},
 	"hash/adler32":         {},
 	"hash/crc32":           {},
@@ -106,6 +109,7 @@ var standardPackages = map[string]struct{}{
 	"math/bits":            {},
 	"math/cmplx":           {},
 	"math/rand":            {},
+	"math/rand/v2":         {},
 	"mime":                 {},
 	"mime/multipart":       {},
 	"mime/quotedprintable": {},
diff --git a/vendor/github.com/daixiang0/gci/pkg/specificity/local_module.go b/vendor/github.com/daixiang0/gci/pkg/specificity/local_module.go
new file mode 100644
index 0000000000000000000000000000000000000000..ae482fec4721f428bb0c39d836ca0afdb77c35f7
--- /dev/null
+++ b/vendor/github.com/daixiang0/gci/pkg/specificity/local_module.go
@@ -0,0 +1,15 @@
+package specificity
+
+type LocalModule struct{}
+
+func (m LocalModule) IsMoreSpecific(than MatchSpecificity) bool {
+	return isMoreSpecific(m, than)
+}
+
+func (m LocalModule) Equal(to MatchSpecificity) bool {
+	return equalSpecificity(m, to)
+}
+
+func (LocalModule) class() specificityClass {
+	return LocalModuleClass
+}
diff --git a/vendor/github.com/daixiang0/gci/pkg/specificity/specificity.go b/vendor/github.com/daixiang0/gci/pkg/specificity/specificity.go
index 842da185797d1e14bdf6054476513478aa573463..4a188b3bb4dc49d8e7815cd04e853e08dbac9e36 100644
--- a/vendor/github.com/daixiang0/gci/pkg/specificity/specificity.go
+++ b/vendor/github.com/daixiang0/gci/pkg/specificity/specificity.go
@@ -3,11 +3,12 @@ package specificity
 type specificityClass int
 
 const (
-	MisMatchClass = 0
-	DefaultClass  = 10
-	StandardClass = 20
-	MatchClass    = 30
-	NameClass     = 40
+	MisMatchClass    = 0
+	DefaultClass     = 10
+	StandardClass    = 20
+	MatchClass       = 30
+	NameClass        = 40
+	LocalModuleClass = 50
 )
 
 // MatchSpecificity is used to determine which section matches an import best
diff --git a/vendor/github.com/daixiang0/gci/pkg/utils/constants.go b/vendor/github.com/daixiang0/gci/pkg/utils/constants.go
index 0e7cce7576fa4219752c6c96fd4bdad113c50e73..2fafbc32ccd843e33edc2f9c72b5942d99e4aac1 100644
--- a/vendor/github.com/daixiang0/gci/pkg/utils/constants.go
+++ b/vendor/github.com/daixiang0/gci/pkg/utils/constants.go
@@ -1,8 +1,9 @@
 package utils
 
 const (
-	Indent    = '\t'
-	Linebreak = '\n'
+	Indent       = '\t'
+	Linebreak    = '\n'
+	WinLinebreak = '\r'
 
 	Colon = ":"
 
diff --git a/vendor/github.com/denis-tingaikin/go-header/.go-header.yml b/vendor/github.com/denis-tingaikin/go-header/.go-header.yml
index 3f87c8798dc4cce45bdaa99f203818b31679975d..3aa6d060dbd0cc911ebfdb82665f52ee4ced40ab 100644
--- a/vendor/github.com/denis-tingaikin/go-header/.go-header.yml
+++ b/vendor/github.com/denis-tingaikin/go-header/.go-header.yml
@@ -1,6 +1,6 @@
 values:
   regexp:
-    copyright-holder: Copyright \(c\) {{year-range}} Denis Tingaikin
+    copyright-holder: Copyright \(c\) {{mod-year-range}} Denis Tingaikin
 template: |
   {{copyright-holder}}
 
diff --git a/vendor/github.com/denis-tingaikin/go-header/README.md b/vendor/github.com/denis-tingaikin/go-header/README.md
index c2044ec96edba655602964bd7142002eb49714cb..fcddad1fa116e58a302fb04eb5b9a6f80d4ef010 100644
--- a/vendor/github.com/denis-tingaikin/go-header/README.md
+++ b/vendor/github.com/denis-tingaikin/go-header/README.md
@@ -8,7 +8,7 @@ Go source code linter providing checks for license headers.
 For installation you can simply use `go get`.
 
 ```bash
-go get github.com/denis-tingaikin/go-header/cmd/go-header
+go install github.com/denis-tingaikin/go-header/cmd/go-header
 ```
 
 ## Configuration
@@ -38,6 +38,8 @@ values:
 
 ## Bult-in values
 
+- **MOD-YEAR** - Returns the year when the file was modified.
+- **MOD-YEAR-RANGE** - Returns a year-range where the range starts from the  year when the file was modified.
 - **YEAR** - Expects current year. Example header value: `2020`.  Example of template using: `{{YEAR}}` or `{{year}}`.
 - **YEAR-RANGE** - Expects any valid year interval or current year. Example header value: `2020` or `2000-2020`. Example of template using: `{{year-range}}` or `{{YEAR-RANGE}}`.
 
diff --git a/vendor/github.com/denis-tingaikin/go-header/analyzer.go b/vendor/github.com/denis-tingaikin/go-header/analyzer.go
index 785a02e79de89e8a227fbb6fc8deb3364ff0431c..c6b361f01dea72af52a44f0d2bef481768fa4688 100644
--- a/vendor/github.com/denis-tingaikin/go-header/analyzer.go
+++ b/vendor/github.com/denis-tingaikin/go-header/analyzer.go
@@ -1,4 +1,4 @@
-// Copyright (c) 2020-2022 Denis Tingaikin
+// Copyright (c) 2020-2024 Denis Tingaikin
 //
 // SPDX-License-Identifier: Apache-2.0
 //
@@ -52,15 +52,31 @@ type Analyzer struct {
 	template string
 }
 
-func (a *Analyzer) Analyze(target *Target) Issue {
+func (a *Analyzer) processPerTargetValues(target *Target) error {
+	a.values["mod-year"] = a.values["year"]
+	a.values["mod-year-range"] = a.values["year-range"]
+	if t, err := target.ModTime(); err == nil {
+		a.values["mod-year"] = &ConstValue{RawValue: fmt.Sprint(t.Year())}
+		a.values["mod-year-range"] = &RegexpValue{RawValue: `((20\d\d\-{{mod-year}})|({{mod-year}}))`}
+	}
+
+	for _, v := range a.values {
+		if err := v.Calculate(a.values); err != nil {
+			return err
+		}
+	}
+	return nil
+}
+
+func (a *Analyzer) Analyze(target *Target) (i Issue) {
 	if a.template == "" {
 		return NewIssue("Missed template for check")
 	}
-	if t, err := target.ModTime(); err == nil {
-		if t.Year() != time.Now().Year() {
-			return nil
-		}
+
+	if err := a.processPerTargetValues(target); err != nil {
+		return &issue{msg: err.Error()}
 	}
+
 	file := target.File
 	var header string
 	var offset = Location{
@@ -74,6 +90,16 @@ func (a *Analyzer) Analyze(target *Target) Issue {
 			offset.Position += 3
 		}
 	}
+	defer func() {
+		if i == nil {
+			return
+		}
+		fix, ok := a.generateFix(i, file, header)
+		if !ok {
+			return
+		}
+		i = NewIssueWithFix(i.Message(), i.Location(), fix)
+	}()
 	header = strings.TrimSpace(header)
 	if header == "" {
 		return NewIssue("Missed header for check")
@@ -132,15 +158,99 @@ func (a *Analyzer) readField(reader *Reader) string {
 }
 
 func New(options ...Option) *Analyzer {
-	a := &Analyzer{}
+	a := &Analyzer{values: make(map[string]Value)}
 	for _, o := range options {
 		o.apply(a)
 	}
-	for _, v := range a.values {
-		err := v.Calculate(a.values)
-		if err != nil {
-			panic(err.Error())
+	return a
+}
+
+func (a *Analyzer) generateFix(i Issue, file *ast.File, header string) (Fix, bool) {
+	var expect string
+	t := NewReader(a.template)
+	for !t.Done() {
+		ch := t.Peek()
+		if ch == '{' {
+			f := a.values[a.readField(t)]
+			if f == nil {
+				return Fix{}, false
+			}
+			if f.Calculate(a.values) != nil {
+				return Fix{}, false
+			}
+			expect += f.Get()
+			continue
 		}
+
+		expect += string(ch)
+		t.Next()
 	}
-	return a
+
+	fix := Fix{Expected: strings.Split(expect, "\n")}
+	if !(len(file.Comments) > 0 && file.Comments[0].Pos() < file.Package) {
+		for i := range fix.Expected {
+			fix.Expected[i] = "// " + fix.Expected[i]
+		}
+		return fix, true
+	}
+
+	actual := file.Comments[0].List[0].Text
+	if !strings.HasPrefix(actual, "/*") {
+		for i := range fix.Expected {
+			fix.Expected[i] = "// " + fix.Expected[i]
+		}
+		for _, c := range file.Comments[0].List {
+			fix.Actual = append(fix.Actual, c.Text)
+		}
+		i = NewIssueWithFix(i.Message(), i.Location(), fix)
+		return fix, true
+	}
+
+	gets := func(i int, end bool) string {
+		if i < 0 {
+			return header
+		}
+		if end {
+			return header[i+1:]
+		}
+		return header[:i]
+	}
+	start := strings.Index(actual, gets(strings.IndexByte(header, '\n'), false))
+	if start < 0 {
+		return Fix{}, false // Should be impossible
+	}
+	nl := strings.LastIndexByte(actual[:start], '\n')
+	if nl >= 0 {
+		fix.Actual = strings.Split(actual[:nl], "\n")
+		fix.Expected = append(fix.Actual, fix.Expected...)
+		actual = actual[nl+1:]
+		start -= nl + 1
+	}
+
+	prefix := actual[:start]
+	if nl < 0 {
+		fix.Expected[0] = prefix + fix.Expected[0]
+	} else {
+		n := len(fix.Actual)
+		for i := range fix.Expected[n:] {
+			fix.Expected[n+i] = prefix + fix.Expected[n+i]
+		}
+	}
+
+	last := gets(strings.LastIndexByte(header, '\n'), true)
+	end := strings.Index(actual, last)
+	if end < 0 {
+		return Fix{}, false // Should be impossible
+	}
+
+	trailing := actual[end+len(last):]
+	if i := strings.IndexRune(trailing, '\n'); i < 0 {
+		fix.Expected[len(fix.Expected)-1] += trailing
+	} else {
+		fix.Expected[len(fix.Expected)-1] += trailing[:i]
+		fix.Expected = append(fix.Expected, strings.Split(trailing[i+1:], "\n")...)
+	}
+
+	fix.Actual = append(fix.Actual, strings.Split(actual, "\n")...)
+	return fix, true
 }
diff --git a/vendor/github.com/denis-tingaikin/go-header/config.go b/vendor/github.com/denis-tingaikin/go-header/config.go
index 9576b949fd11f3a8043c963b8ec2d58d436e504a..c881b63acdc48166c0158cf227ccb4a6186fd411 100644
--- a/vendor/github.com/denis-tingaikin/go-header/config.go
+++ b/vendor/github.com/denis-tingaikin/go-header/config.go
@@ -1,4 +1,4 @@
-// Copyright (c) 2020-2022 Denis Tingaikin
+// Copyright (c) 2020-2024 Denis Tingaikin
 //
 // SPDX-License-Identifier: Apache-2.0
 //
@@ -19,7 +19,7 @@ package goheader
 import (
 	"errors"
 	"fmt"
-	"io/ioutil"
+	"os"
 	"strings"
 	"time"
 
@@ -40,7 +40,7 @@ func (c *Configuration) builtInValues() map[string]Value {
 	var result = make(map[string]Value)
 	year := fmt.Sprint(time.Now().Year())
 	result["year-range"] = &RegexpValue{
-		RawValue: strings.ReplaceAll(`((20\d\d\-YEAR)|(YEAR))`, "YEAR", year),
+		RawValue: `((20\d\d\-{{YEAR}})|({{YEAR}}))`,
 	}
 	result["year"] = &ConstValue{
 		RawValue: year,
@@ -82,7 +82,7 @@ func (c *Configuration) GetTemplate() (string, error) {
 	if c.TemplatePath == "" {
 		return "", errors.New("template has not passed")
 	}
-	if b, err := ioutil.ReadFile(c.TemplatePath); err != nil {
+	if b, err := os.ReadFile(c.TemplatePath); err != nil {
 		return "", err
 	} else {
 		c.Template = strings.TrimSpace(string(b))
@@ -91,7 +91,7 @@ func (c *Configuration) GetTemplate() (string, error) {
 }
 
 func (c *Configuration) Parse(p string) error {
-	b, err := ioutil.ReadFile(p)
+	b, err := os.ReadFile(p)
 	if err != nil {
 		return err
 	}
diff --git a/vendor/github.com/denis-tingaikin/go-header/issue.go b/vendor/github.com/denis-tingaikin/go-header/issue.go
index 0ada0d62cf54546612082180f758a867b2a85150..e92279793cb5f2cfd3f034d58aaad8b641aafb40 100644
--- a/vendor/github.com/denis-tingaikin/go-header/issue.go
+++ b/vendor/github.com/denis-tingaikin/go-header/issue.go
@@ -1,4 +1,4 @@
-// Copyright (c) 2020-2022 Denis Tingaikin
+// Copyright (c) 2020-2024 Denis Tingaikin
 //
 // SPDX-License-Identifier: Apache-2.0
 //
@@ -19,11 +19,18 @@ package goheader
 type Issue interface {
 	Location() Location
 	Message() string
+	Fix() *Fix
 }
 
 type issue struct {
 	msg      string
 	location Location
+	fix      *Fix
+}
+
+type Fix struct {
+	Actual   []string
+	Expected []string
 }
 
 func (i *issue) Location() Location {
@@ -34,6 +41,10 @@ func (i *issue) Message() string {
 	return i.msg
 }
 
+func (i *issue) Fix() *Fix {
+	return i.fix
+}
+
 func NewIssueWithLocation(msg string, location Location) Issue {
 	return &issue{
 		msg:      msg,
@@ -41,6 +52,14 @@ func NewIssueWithLocation(msg string, location Location) Issue {
 	}
 }
 
+func NewIssueWithFix(msg string, location Location, fix Fix) Issue {
+	return &issue{
+		msg:      msg,
+		location: location,
+		fix:      &fix,
+	}
+}
+
 func NewIssue(msg string) Issue {
 	return &issue{
 		msg: msg,
diff --git a/vendor/github.com/denis-tingaikin/go-header/value.go b/vendor/github.com/denis-tingaikin/go-header/value.go
index dcb206d353919e70fb39513c1d47abb422791406..706a84f18ae5855159f25b34a3d94d9a4c076f6b 100644
--- a/vendor/github.com/denis-tingaikin/go-header/value.go
+++ b/vendor/github.com/denis-tingaikin/go-header/value.go
@@ -1,4 +1,4 @@
-// Copyright (c) 2020-2022 Denis Tingaikin
+// Copyright (c) 2020-2024 Denis Tingaikin
 //
 // SPDX-License-Identifier: Apache-2.0
 //
@@ -26,6 +26,7 @@ import (
 type Calculable interface {
 	Calculate(map[string]Value) error
 	Get() string
+	Raw() string
 }
 
 type Value interface {
@@ -35,7 +36,7 @@ type Value interface {
 
 func calculateValue(calculable Calculable, values map[string]Value) (string, error) {
 	sb := strings.Builder{}
-	r := calculable.Get()
+	r := calculable.Raw()
 	var endIndex int
 	var startIndex int
 	for startIndex = strings.Index(r, "{{"); startIndex >= 0; startIndex = strings.Index(r, "{{") {
@@ -61,7 +62,7 @@ func calculateValue(calculable Calculable, values map[string]Value) (string, err
 }
 
 type ConstValue struct {
-	RawValue string
+	RawValue, Value string
 }
 
 func (c *ConstValue) Calculate(values map[string]Value) error {
@@ -69,14 +70,25 @@ func (c *ConstValue) Calculate(values map[string]Value) error {
 	if err != nil {
 		return err
 	}
-	c.RawValue = v
+	c.Value = v
 	return nil
 }
 
+func (c *ConstValue) Raw() string {
+	return c.RawValue
+}
+
 func (c *ConstValue) Get() string {
+	if c.Value != "" {
+		return c.Value
+	}
 	return c.RawValue
 }
 
+func (c *ConstValue) String() string {
+	return c.Get()
+}
+
 func (c *ConstValue) Read(s *Reader) Issue {
 	l := s.Location()
 	p := s.Position()
@@ -94,7 +106,7 @@ func (c *ConstValue) Read(s *Reader) Issue {
 }
 
 type RegexpValue struct {
-	RawValue string
+	RawValue, Value string
 }
 
 func (r *RegexpValue) Calculate(values map[string]Value) error {
@@ -102,14 +114,24 @@ func (r *RegexpValue) Calculate(values map[string]Value) error {
 	if err != nil {
 		return err
 	}
-	r.RawValue = v
+	r.Value = v
 	return nil
 }
 
+func (r *RegexpValue) Raw() string {
+	return r.RawValue
+}
 func (r *RegexpValue) Get() string {
+	if r.Value != "" {
+		return r.Value
+	}
 	return r.RawValue
 }
 
+func (r *RegexpValue) String() string {
+	return r.Get()
+}
+
 func (r *RegexpValue) Read(s *Reader) Issue {
 	l := s.Location()
 	p := regexp.MustCompile(r.Get())
diff --git a/vendor/github.com/esimonov/ifshort/pkg/analyzer/analyzer.go b/vendor/github.com/esimonov/ifshort/pkg/analyzer/analyzer.go
deleted file mode 100644
index b2d06881d7aeaf0f423e360b50f489a8949e37d2..0000000000000000000000000000000000000000
--- a/vendor/github.com/esimonov/ifshort/pkg/analyzer/analyzer.go
+++ /dev/null
@@ -1,280 +0,0 @@
-package analyzer
-
-import (
-	"go/ast"
-	"go/token"
-
-	"golang.org/x/tools/go/analysis"
-	"golang.org/x/tools/go/analysis/passes/inspect"
-	"golang.org/x/tools/go/ast/inspector"
-)
-
-var maxDeclChars, maxDeclLines int
-
-const (
-	maxDeclLinesUsage = `maximum length of variable declaration measured in number of lines, after which the linter won't suggest using short syntax.
-Has precedence over max-decl-chars.`
-	maxDeclCharsUsage = `maximum length of variable declaration measured in number of characters, after which the linter won't suggest using short syntax.`
-)
-
-func init() {
-	Analyzer.Flags.IntVar(&maxDeclLines, "max-decl-lines", 1, maxDeclLinesUsage)
-	Analyzer.Flags.IntVar(&maxDeclChars, "max-decl-chars", 30, maxDeclCharsUsage)
-}
-
-// Analyzer is an analysis.Analyzer instance for ifshort linter.
-var Analyzer = &analysis.Analyzer{
-	Name:     "ifshort",
-	Doc:      "Checks that your code uses short syntax for if-statements whenever possible.",
-	Run:      run,
-	Requires: []*analysis.Analyzer{inspect.Analyzer},
-}
-
-func run(pass *analysis.Pass) (interface{}, error) {
-	inspector := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
-	nodeFilter := []ast.Node{
-		(*ast.FuncDecl)(nil),
-	}
-
-	inspector.Preorder(nodeFilter, func(node ast.Node) {
-		fdecl := node.(*ast.FuncDecl)
-
-		/*if fdecl.Name.Name != "notUsed_BinaryExpressionInIndex_OK" {
-			return
-		}*/
-
-		if fdecl == nil || fdecl.Body == nil {
-			return
-		}
-
-		candidates := getNamedOccurrenceMap(fdecl, pass)
-
-		for _, stmt := range fdecl.Body.List {
-			candidates.checkStatement(stmt, token.NoPos)
-		}
-
-		for varName := range candidates {
-			for marker, occ := range candidates[varName] {
-				//  If two or more vars with the same scope marker - skip them.
-				if candidates.isFoundByScopeMarker(marker) {
-					continue
-				}
-
-				pass.Reportf(occ.declarationPos,
-					"variable '%s' is only used in the if-statement (%s); consider using short syntax",
-					varName, pass.Fset.Position(occ.ifStmtPos))
-			}
-		}
-	})
-	return nil, nil
-}
-
-func (nom namedOccurrenceMap) checkStatement(stmt ast.Stmt, ifPos token.Pos) {
-	switch v := stmt.(type) {
-	case *ast.AssignStmt:
-		for _, el := range v.Rhs {
-			nom.checkExpression(el, ifPos)
-		}
-		if isAssign(v.Tok) {
-			for _, el := range v.Lhs {
-				nom.checkExpression(el, ifPos)
-			}
-		}
-	case *ast.DeferStmt:
-		for _, a := range v.Call.Args {
-			nom.checkExpression(a, ifPos)
-		}
-	case *ast.ExprStmt:
-		switch v.X.(type) {
-		case *ast.CallExpr, *ast.UnaryExpr:
-			nom.checkExpression(v.X, ifPos)
-		}
-	case *ast.ForStmt:
-		for _, el := range v.Body.List {
-			nom.checkStatement(el, ifPos)
-		}
-
-		if bexpr, ok := v.Cond.(*ast.BinaryExpr); ok {
-			nom.checkExpression(bexpr.X, ifPos)
-			nom.checkExpression(bexpr.Y, ifPos)
-		}
-
-		nom.checkStatement(v.Post, ifPos)
-	case *ast.GoStmt:
-		for _, a := range v.Call.Args {
-			nom.checkExpression(a, ifPos)
-		}
-	case *ast.IfStmt:
-		for _, el := range v.Body.List {
-			nom.checkStatement(el, v.If)
-		}
-		if elseBlock, ok := v.Else.(*ast.BlockStmt); ok {
-			for _, el := range elseBlock.List {
-				nom.checkStatement(el, v.If)
-			}
-		}
-
-		switch cond := v.Cond.(type) {
-		case *ast.UnaryExpr:
-			nom.checkExpression(cond.X, v.If)
-		case *ast.BinaryExpr:
-			nom.checkExpression(cond.X, v.If)
-			nom.checkExpression(cond.Y, v.If)
-		case *ast.CallExpr:
-			nom.checkExpression(cond, v.If)
-		}
-
-		if init, ok := v.Init.(*ast.AssignStmt); ok {
-			for _, e := range init.Rhs {
-				nom.checkExpression(e, v.If)
-			}
-		}
-	case *ast.IncDecStmt:
-		nom.checkExpression(v.X, ifPos)
-	case *ast.RangeStmt:
-		nom.checkExpression(v.X, ifPos)
-		if v.Body != nil {
-			for _, e := range v.Body.List {
-				nom.checkStatement(e, ifPos)
-			}
-		}
-	case *ast.ReturnStmt:
-		for _, r := range v.Results {
-			nom.checkExpression(r, ifPos)
-		}
-	case *ast.SendStmt:
-		nom.checkExpression(v.Chan, ifPos)
-		nom.checkExpression(v.Value, ifPos)
-	case *ast.SwitchStmt:
-		nom.checkExpression(v.Tag, ifPos)
-
-		for _, el := range v.Body.List {
-			clauses, ok := el.(*ast.CaseClause)
-			if !ok {
-				continue
-			}
-
-			for _, c := range clauses.List {
-				switch v := c.(type) {
-				case *ast.BinaryExpr:
-					nom.checkExpression(v.X, ifPos)
-					nom.checkExpression(v.Y, ifPos)
-				case *ast.Ident:
-					nom.checkExpression(v, ifPos)
-				}
-			}
-
-			for _, c := range clauses.Body {
-				switch v := c.(type) {
-				case *ast.AssignStmt:
-					for _, el := range v.Lhs {
-						nom.checkExpression(el, ifPos)
-					}
-					for _, el := range v.Rhs {
-						nom.checkExpression(el, ifPos)
-					}
-				case *ast.ExprStmt:
-					nom.checkExpression(v.X, ifPos)
-				}
-			}
-		}
-	case *ast.SelectStmt:
-		for _, el := range v.Body.List {
-			clause := el.(*ast.CommClause)
-
-			nom.checkStatement(clause.Comm, ifPos)
-
-			for _, c := range clause.Body {
-				switch v := c.(type) {
-				case *ast.AssignStmt:
-					for _, el := range v.Lhs {
-						nom.checkExpression(el, ifPos)
-					}
-					for _, el := range v.Rhs {
-						nom.checkExpression(el, ifPos)
-					}
-				case *ast.ExprStmt:
-					nom.checkExpression(v.X, ifPos)
-				}
-			}
-		}
-	case *ast.LabeledStmt:
-		nom.checkStatement(v.Stmt, ifPos)
-	}
-}
-
-func (nom namedOccurrenceMap) checkExpression(candidate ast.Expr, ifPos token.Pos) {
-	switch v := candidate.(type) {
-	case *ast.BinaryExpr:
-		nom.checkExpression(v.X, ifPos)
-		nom.checkExpression(v.Y, ifPos)
-	case *ast.CallExpr:
-		for _, arg := range v.Args {
-			nom.checkExpression(arg, ifPos)
-		}
-		nom.checkExpression(v.Fun, ifPos)
-		if fun, ok := v.Fun.(*ast.SelectorExpr); ok {
-			nom.checkExpression(fun.X, ifPos)
-		}
-	case *ast.CompositeLit:
-		for _, el := range v.Elts {
-			switch v := el.(type) {
-			case *ast.Ident, *ast.CompositeLit:
-				nom.checkExpression(v, ifPos)
-			case *ast.KeyValueExpr:
-				nom.checkExpression(v.Key, ifPos)
-				nom.checkExpression(v.Value, ifPos)
-			case *ast.SelectorExpr:
-				nom.checkExpression(v.X, ifPos)
-			}
-		}
-	case *ast.FuncLit:
-		for _, el := range v.Body.List {
-			nom.checkStatement(el, ifPos)
-		}
-	case *ast.Ident:
-		if _, ok := nom[v.Name]; !ok || nom[v.Name].isEmponymousKey(ifPos) {
-			return
-		}
-
-		scopeMarker1 := nom[v.Name].getScopeMarkerForPosition(v.Pos())
-
-		delete(nom[v.Name], scopeMarker1)
-
-		for k := range nom {
-			for scopeMarker2 := range nom[k] {
-				if scopeMarker1 == scopeMarker2 {
-					delete(nom[k], scopeMarker2)
-				}
-			}
-		}
-	case *ast.StarExpr:
-		nom.checkExpression(v.X, ifPos)
-	case *ast.IndexExpr:
-		nom.checkExpression(v.X, ifPos)
-		switch index := v.Index.(type) {
-		case *ast.BinaryExpr:
-			nom.checkExpression(index.X, ifPos)
-		case *ast.Ident:
-			nom.checkExpression(index, ifPos)
-		}
-	case *ast.SelectorExpr:
-		nom.checkExpression(v.X, ifPos)
-	case *ast.SliceExpr:
-		nom.checkExpression(v.High, ifPos)
-		nom.checkExpression(v.Low, ifPos)
-		nom.checkExpression(v.X, ifPos)
-	case *ast.TypeAssertExpr:
-		nom.checkExpression(v.X, ifPos)
-	case *ast.UnaryExpr:
-		nom.checkExpression(v.X, ifPos)
-	}
-}
-
-func isAssign(tok token.Token) bool {
-	return (tok == token.ASSIGN ||
-		tok == token.ADD_ASSIGN || tok == token.SUB_ASSIGN ||
-		tok == token.MUL_ASSIGN || tok == token.QUO_ASSIGN || tok == token.REM_ASSIGN ||
-		tok == token.AND_ASSIGN || tok == token.OR_ASSIGN || tok == token.XOR_ASSIGN || tok == token.AND_NOT_ASSIGN ||
-		tok == token.SHL_ASSIGN || tok == token.SHR_ASSIGN)
-}
diff --git a/vendor/github.com/esimonov/ifshort/pkg/analyzer/occurrences.go b/vendor/github.com/esimonov/ifshort/pkg/analyzer/occurrences.go
deleted file mode 100644
index 0d3793a57e81ef4a680049f96057e25d45ca52ec..0000000000000000000000000000000000000000
--- a/vendor/github.com/esimonov/ifshort/pkg/analyzer/occurrences.go
+++ /dev/null
@@ -1,268 +0,0 @@
-package analyzer
-
-import (
-	"go/ast"
-	"go/token"
-	"time"
-
-	"golang.org/x/tools/go/analysis"
-)
-
-// occurrence is a variable occurrence.
-type occurrence struct {
-	declarationPos token.Pos
-	ifStmtPos      token.Pos
-}
-
-func (occ *occurrence) isComplete() bool {
-	return occ.ifStmtPos != token.NoPos && occ.declarationPos != token.NoPos
-}
-
-// scopeMarkeredOccurences is a map of scope markers to variable occurrences.
-type scopeMarkeredOccurences map[int64]occurrence
-
-func (smo scopeMarkeredOccurences) getGreatestMarker() int64 {
-	var maxScopeMarker int64
-
-	for marker := range smo {
-		if marker > maxScopeMarker {
-			maxScopeMarker = marker
-		}
-	}
-	return maxScopeMarker
-}
-
-// find scope marker of the greatest token.Pos that is smaller than provided.
-func (smo scopeMarkeredOccurences) getScopeMarkerForPosition(pos token.Pos) int64 {
-	var m int64
-	var foundPos token.Pos
-
-	for marker, occ := range smo {
-		if occ.declarationPos < pos && occ.declarationPos >= foundPos {
-			m = marker
-			foundPos = occ.declarationPos
-		}
-	}
-	return m
-}
-
-func (smo scopeMarkeredOccurences) isEmponymousKey(pos token.Pos) bool {
-	if pos == token.NoPos {
-		return false
-	}
-
-	for _, occ := range smo {
-		if occ.ifStmtPos == pos {
-			return true
-		}
-	}
-	return false
-}
-
-// namedOccurrenceMap is a map of variable names to scopeMarkeredOccurences.
-type namedOccurrenceMap map[string]scopeMarkeredOccurences
-
-func getNamedOccurrenceMap(fdecl *ast.FuncDecl, pass *analysis.Pass) namedOccurrenceMap {
-	nom := namedOccurrenceMap(map[string]scopeMarkeredOccurences{})
-
-	if fdecl == nil || fdecl.Body == nil {
-		return nom
-	}
-
-	for _, stmt := range fdecl.Body.List {
-		switch v := stmt.(type) {
-		case *ast.AssignStmt:
-			nom.addFromAssignment(pass, v)
-		case *ast.IfStmt:
-			nom.addFromCondition(v)
-			nom.addFromIfClause(v)
-			nom.addFromElseClause(v)
-		}
-	}
-
-	candidates := namedOccurrenceMap(map[string]scopeMarkeredOccurences{})
-
-	for varName, markeredOccs := range nom {
-		for marker, occ := range markeredOccs {
-			if !occ.isComplete() && !nom.isFoundByScopeMarker(marker) {
-				continue
-			}
-			if _, ok := candidates[varName]; !ok {
-				candidates[varName] = scopeMarkeredOccurences{
-					marker: occ,
-				}
-			} else {
-				candidates[varName][marker] = occ
-			}
-		}
-	}
-	return candidates
-}
-
-func (nom namedOccurrenceMap) isFoundByScopeMarker(scopeMarker int64) bool {
-	var i int
-
-	for _, markeredOccs := range nom {
-		for marker := range markeredOccs {
-			if marker == scopeMarker {
-				i++
-			}
-		}
-	}
-	return i >= 2
-}
-
-func (nom namedOccurrenceMap) addFromAssignment(pass *analysis.Pass, assignment *ast.AssignStmt) {
-	if assignment.Tok != token.DEFINE {
-		return
-	}
-
-	scopeMarker := time.Now().UnixNano()
-
-	for i, el := range assignment.Lhs {
-		ident, ok := el.(*ast.Ident)
-		if !ok {
-			continue
-		}
-
-		if ident.Name == "_" || ident.Obj == nil || isUnshortenableAssignment(ident.Obj.Decl) {
-			continue
-		}
-
-		if markeredOccs, ok := nom[ident.Name]; ok {
-			markeredOccs[scopeMarker] = occurrence{
-				declarationPos: ident.Pos(),
-			}
-			nom[ident.Name] = markeredOccs
-		} else {
-			newOcc := occurrence{}
-			if areFlagSettingsSatisfied(pass, assignment, i) {
-				newOcc.declarationPos = ident.Pos()
-			}
-			nom[ident.Name] = scopeMarkeredOccurences{scopeMarker: newOcc}
-		}
-	}
-}
-
-func isUnshortenableAssignment(decl interface{}) bool {
-	assign, ok := decl.(*ast.AssignStmt)
-	if !ok {
-		return false
-	}
-
-	for _, el := range assign.Rhs {
-		u, ok := el.(*ast.UnaryExpr)
-		if !ok {
-			continue
-		}
-
-		if u.Op == token.AND {
-			if _, ok := u.X.(*ast.CompositeLit); ok {
-				return true
-			}
-		}
-	}
-	return false
-}
-
-func areFlagSettingsSatisfied(pass *analysis.Pass, assignment *ast.AssignStmt, i int) bool {
-	lh := assignment.Lhs[i]
-	rh := assignment.Rhs[len(assignment.Rhs)-1]
-
-	if len(assignment.Rhs) == len(assignment.Lhs) {
-		rh = assignment.Rhs[i]
-	}
-
-	if pass.Fset.Position(rh.End()).Line-pass.Fset.Position(rh.Pos()).Line > maxDeclLines {
-		return false
-	}
-	if int(rh.End()-lh.Pos()) > maxDeclChars {
-		return false
-	}
-	return true
-}
-
-func (nom namedOccurrenceMap) addFromCondition(stmt *ast.IfStmt) {
-	switch v := stmt.Cond.(type) {
-	case *ast.BinaryExpr:
-		for _, v := range [2]ast.Expr{v.X, v.Y} {
-			switch e := v.(type) {
-			case *ast.CallExpr:
-				nom.addFromCallExpr(stmt.If, e)
-			case *ast.Ident:
-				nom.addFromIdent(stmt.If, e)
-			case *ast.SelectorExpr:
-				nom.addFromIdent(stmt.If, e.X)
-			}
-		}
-	case *ast.CallExpr:
-		for _, a := range v.Args {
-			switch e := a.(type) {
-			case *ast.Ident:
-				nom.addFromIdent(stmt.If, e)
-			case *ast.CallExpr:
-				nom.addFromCallExpr(stmt.If, e)
-			}
-		}
-	case *ast.Ident:
-		nom.addFromIdent(stmt.If, v)
-	case *ast.UnaryExpr:
-		switch e := v.X.(type) {
-		case *ast.Ident:
-			nom.addFromIdent(stmt.If, e)
-		case *ast.SelectorExpr:
-			nom.addFromIdent(stmt.If, e.X)
-		}
-	}
-}
-
-func (nom namedOccurrenceMap) addFromIfClause(stmt *ast.IfStmt) {
-	nom.addFromBlockStmt(stmt.Body, stmt.If)
-}
-
-func (nom namedOccurrenceMap) addFromElseClause(stmt *ast.IfStmt) {
-	nom.addFromBlockStmt(stmt.Else, stmt.If)
-}
-
-func (nom namedOccurrenceMap) addFromBlockStmt(stmt ast.Stmt, ifPos token.Pos) {
-	blockStmt, ok := stmt.(*ast.BlockStmt)
-	if !ok {
-		return
-	}
-
-	for _, el := range blockStmt.List {
-		exptStmt, ok := el.(*ast.ExprStmt)
-		if !ok {
-			continue
-		}
-
-		if callExpr, ok := exptStmt.X.(*ast.CallExpr); ok {
-			nom.addFromCallExpr(ifPos, callExpr)
-		}
-	}
-}
-
-func (nom namedOccurrenceMap) addFromCallExpr(ifPos token.Pos, callExpr *ast.CallExpr) {
-	for _, arg := range callExpr.Args {
-		nom.addFromIdent(ifPos, arg)
-	}
-}
-
-func (nom namedOccurrenceMap) addFromIdent(ifPos token.Pos, v ast.Expr) {
-	ident, ok := v.(*ast.Ident)
-	if !ok {
-		return
-	}
-
-	if markeredOccs, ok := nom[ident.Name]; ok {
-		marker := nom[ident.Name].getGreatestMarker()
-
-		occ := markeredOccs[marker]
-		if occ.isComplete() {
-			return
-		}
-
-		occ.ifStmtPos = ifPos
-		nom[ident.Name][marker] = occ
-	}
-}
diff --git a/vendor/github.com/ettle/strcase/.golangci.yml b/vendor/github.com/ettle/strcase/.golangci.yml
index 4d31fcc5b42271c3f21fdbf673c3fd98bc8fa7b1..b7ce85d4241bb44cd29afea2772b3c41d499acef 100644
--- a/vendor/github.com/ettle/strcase/.golangci.yml
+++ b/vendor/github.com/ettle/strcase/.golangci.yml
@@ -14,8 +14,6 @@ linters-settings:
       - ifElseChain
       - whyNoLint
       - wrapperFunc
-  golint:
-    min-confidence: 0.5
   govet:
     check-shadowing: true
   lll:
@@ -37,7 +35,6 @@ linters:
   disable-all: true
   enable:
     - bodyclose
-    - deadcode
     - depguard
     - dogsled
     - dupl
@@ -47,26 +44,23 @@ linters:
     - gocyclo
     - gofmt
     - goimports
-    - golint
     - goprintffuncname
     - gosec
     - gosimple
     - govet
     - ineffassign
-    - interfacer
     - lll
     - misspell
     - nakedret
     - nolintlint
+    - revive
     - rowserrcheck
     - staticcheck
-    - structcheck
     - stylecheck
     - typecheck
     - unconvert
     - unparam
     - unused
-    - varcheck
     - whitespace
 
   # don't enable:
diff --git a/vendor/github.com/ettle/strcase/.readme.tmpl b/vendor/github.com/ettle/strcase/.readme.tmpl
index 135765c40a50fc678d82198ada61ec35f3cf7b3f..4d7a894f0e6efcafa68749f8702d080db1300929 100644
--- a/vendor/github.com/ettle/strcase/.readme.tmpl
+++ b/vendor/github.com/ettle/strcase/.readme.tmpl
@@ -16,10 +16,10 @@ Convert strings to `snake_case`, `camelCase`, `PascalCase`, `kebab-case` and mor
 ## <a name="pkg-index">Index</a>{{if .Consts}}
 * [Constants](#pkg-constants){{end}}{{if .Vars}}
 * [Variables](#pkg-variables){{end}}{{- range .Funcs -}}{{$name_html := html .Name}}
-* [{{node_html $ .Decl false | sanitize}}](#{{$name_html}}){{- end}}{{- range .Types}}{{$tname_html := html .Name}}
-* [type {{$tname_html}}](#{{$tname_html}}){{- range .Funcs}}{{$name_html := html .Name}}
-  * [{{node_html $ .Decl false | sanitize}}](#{{$name_html}}){{- end}}{{- range .Methods}}{{$name_html := html .Name}}
-  * [{{node_html $ .Decl false | sanitize}}](#{{$tname_html}}.{{$name_html}}){{- end}}{{- end}}{{- if $.Notes}}{{- range $marker, $item := $.Notes}}
+* [{{node_html $ .Decl false | sanitize}}](#func-{{$name_html}}){{- end}}{{- range .Types}}{{$tname_html := html .Name}}
+* [type {{$tname_html}}](#type-{{$tname_html}}){{- range .Funcs}}{{$name_html := html .Name}}
+  * [{{node_html $ .Decl false | sanitize}}](#func-{{$name_html}}){{- end}}{{- range .Methods}}{{$name_html := html .Name}}
+  * [{{node_html $ .Decl false | sanitize}}](#type-{{$tname_html}}.{{$name_html}}){{- end}}{{- end}}{{- if $.Notes}}{{- range $marker, $item := $.Notes}}
 * [{{noteTitle $marker | html}}s](#pkg-note-{{$marker}}){{end}}{{end}}
 {{if $.Examples}}
 #### <a name="pkg-examples">Examples</a>{{- range $.Examples}}
diff --git a/vendor/github.com/ettle/strcase/Makefile b/vendor/github.com/ettle/strcase/Makefile
index 462f8b473a7dfbe9214628bc00390c2643849dc1..ac98b4aa54a214edd835cd18f7cd76dbb666fced 100644
--- a/vendor/github.com/ettle/strcase/Makefile
+++ b/vendor/github.com/ettle/strcase/Makefile
@@ -1,16 +1,19 @@
 .PHONY: benchmark docs lint test
 
 docs:
-	which godoc2ghmd || ( go get github.com/DevotedHealth/godoc2ghmd && go mod tidy )
+	which godoc2ghmd || go get github.com/DevotedHealth/godoc2ghmd
 	godoc2ghmd -template .readme.tmpl github.com/ettle/strcase > README.md
+	go mod tidy
 
 test:
 	go test -cover ./...
 
 lint:
-	which golangci-lint || ( go get github.com/golangci/golangci-lint/cmd/golangci-lint@v1.27.0 && go mod tidy )
+	which golangci-lint || go get github.com/golangci/golangci-lint/cmd/golangci-lint@v1.50.1
 	golangci-lint run
 	golangci-lint run benchmark/*.go
+	go mod tidy
 
 benchmark:
-	cd benchmark && go test -bench=. -test.benchmem && go mod tidy
+	cd benchmark && go test -bench=. -test.benchmem
+	go mod tidy
diff --git a/vendor/github.com/ettle/strcase/README.md b/vendor/github.com/ettle/strcase/README.md
index ee165e3e5e8d61770ef86c26b228bb68a36195fe..a984da80da4521c4e02a8aee290d39eef3575b64 100644
--- a/vendor/github.com/ettle/strcase/README.md
+++ b/vendor/github.com/ettle/strcase/README.md
@@ -32,21 +32,24 @@ Example usage
 	strcase.ToSnake("FOOBar")          // foo_bar
 	
 	// Support Go initialisms
-	strcase.ToGoCamel("http_response") // HTTPResponse
+	strcase.ToGoPascal("http_response") // HTTPResponse
 	
 	// Specify case and delimiter
 	strcase.ToCase("HelloWorld", strcase.UpperCase, '.') // HELLO.WORLD
 
-### Why this package
+## Why this package
+
 String strcase is pretty straight forward and there are a number of methods to
 do it. This package is fully featured, more customizable, better tested, and
-faster* than other packages and what you would probably whip up yourself.
+faster than other packages and what you would probably whip up yourself.
 
 ### Unicode support
+
 We work for with unicode strings and pay very little performance penalty for it
 as we optimized for the common use case of ASCII only strings.
 
 ### Customization
+
 You can create a custom caser that changes the behavior to what you want. This
 customization also reduces the pressure for us to change the default behavior
 which means that things are more stable for everyone involved.  The goal is to
@@ -71,19 +74,22 @@ make the common path easy and fast, while making the uncommon path possible.
 	 assert.Equal(t, "http_200", c.ToSnake("http200"))
 
 ### Initialism support
+
 By default, we use the golint intialisms list. You can customize and override
 the initialisms if you wish to add additional ones, such as "SSL" or "CMS" or
 domain specific ones to your industry.
 
 
-	ToGoCamel("http_response") // HTTPResponse
+	ToGoPascal("http_response") // HTTPResponse
 	ToGoSnake("http_response") // HTTP_response
 
 ### Test coverage
+
 We have a wide ranging test suite to make sure that we understand our behavior.
 Test coverage isn't everything, but we aim for 100% coverage.
 
 ### Fast
+
 Optimized to reduce memory allocations with Builder. Benchmarked and optimized
 around common cases.
 
@@ -96,56 +102,57 @@ Hopefully I was fair to each library and happy to rerun benchmarks differently
 or reword my commentary based on suggestions or updates.
 
 
-	// This package
-	// Go intialisms and custom casers are slower
-	BenchmarkToTitle-4                992491              1559 ns/op              32 B/op          1 allocs/op
-	BenchmarkToSnake-4               1000000              1475 ns/op              32 B/op          1 allocs/op
-	BenchmarkToSNAKE-4               1000000              1609 ns/op              32 B/op          1 allocs/op
-	BenchmarkToGoSnake-4              275010              3697 ns/op              44 B/op          4 allocs/op
-	BenchmarkToCustomCaser-4          342704              4191 ns/op              56 B/op          4 allocs/op
+	// This package - faster then almost all libraries
+	// Initialisms are more complicated and slightly slower, but still fast
+	BenchmarkToTitle-96                      9617142               125.7 ns/op            16 B/op          1 allocs/op
+	BenchmarkToSnake-96                     10659919               120.7 ns/op            16 B/op          1 allocs/op
+	BenchmarkToSNAKE-96                      9018282               126.4 ns/op            16 B/op          1 allocs/op
+	BenchmarkToGoSnake-96                    4903687               254.5 ns/op            26 B/op          4 allocs/op
+	BenchmarkToCustomCaser-96                4434489               265.0 ns/op            28 B/op          4 allocs/op
 	
 	// Segment has very fast snake case and camel case libraries
 	// No features or customization, but very very fast
-	BenchmarkSegment-4               1303809               938 ns/op              16 B/op          1 allocs/op
+	BenchmarkSegment-96                     33625734                35.54 ns/op           16 B/op          1 allocs/op
 	
-	// Stdlib strings.Title for comparison, even though it only splits on spaces
-	BenchmarkToTitleStrings-4        1213467              1164 ns/op              16 B/op          1 allocs/op
+	// Iancoleman has gotten some performance improvements, but remains
+	// without unicode support and lacks fine-grained customization
+	BenchmarkToSnakeIan-96                  13141522                92.99 ns/op           16 B/op          1 allocs/op
+	
+	// Stdlib strings.Title is deprecated; using golang.org/x.text
+	BenchmarkGolangOrgXTextCases-96          4665676               262.5 ns/op           272 B/op          2 allocs/op
 	
 	// Other libraries or code snippets
 	// - Most are slower, by up to an order of magnitude
-	// - None support initialisms or customization
+	// - No support for initialisms or customization
 	// - Some generate only camelCase or snake_case
 	// - Many lack unicode support
-	BenchmarkToSnakeStoewer-4         973200              2075 ns/op              64 B/op          2 allocs/op
+	BenchmarkToSnakeStoewer-96               8095468               148.9 ns/op            64 B/op          2 allocs/op
 	// Copying small rune arrays is slow
-	BenchmarkToSnakeSiongui-4         264315              4229 ns/op              48 B/op         10 allocs/op
-	BenchmarkGoValidator-4            206811              5152 ns/op             184 B/op          9 allocs/op
+	BenchmarkToSnakeSiongui-96               2912593               401.7 ns/op           112 B/op         19 allocs/op
+	BenchmarkGoValidator-96                  3493800               342.6 ns/op           184 B/op          9 allocs/op
 	// String alloction is slow
-	BenchmarkToSnakeFatih-4            82675             12280 ns/op             392 B/op         26 allocs/op
-	BenchmarkToSnakeIanColeman-4       83276             13903 ns/op             145 B/op         13 allocs/op
+	BenchmarkToSnakeFatih-96                 1282648               945.1 ns/op           616 B/op         26 allocs/op
 	// Regexp is slow
-	BenchmarkToSnakeGolangPrograms-4   74448             18586 ns/op             176 B/op         11 allocs/op
+	BenchmarkToSnakeGolangPrograms-96         778674              1495 ns/op             227 B/op         11 allocs/op
 	
 	// These results aren't a surprise - my initial version of this library was
 	// painfully slow. I think most of us, without spending some time with
 	// profilers and benchmarks, would write also something on the slower side.
 
-### Why not this package
+### Zero dependencies
+
+That's right - zero. We only import the Go standard library. No hassles with
+dependencies, licensing, security alerts.
+
+## Why not this package
+
 If every nanosecond matters and this is used in a tight loop, use segment.io's
 libraries (<a href="https://github.com/segmentio/go-snakecase">https://github.com/segmentio/go-snakecase</a> and
 <a href="https://github.com/segmentio/go-camelcase">https://github.com/segmentio/go-camelcase</a>). They lack features, but make up for
-it by being blazing fast. Alternatively, if you need your code to work slightly
-differently, fork them and tailor it for your use case.
-
-If you don't like having external imports, I get it. This package only imports
-packages for testing, otherwise it only uses the standard library. If that's
-not enough, you can use this repo as the foundation for your own. MIT Licensed.
+it by being blazing fast.
 
-This package is still relatively new and while I've used it for a while
-personally, it doesn't have the miles that other packages do. I've tested this
-code agains't their test cases to make sure that there aren't any surprises.
+## Migrating from other packages
 
-### Migrating from other packages
 If you are migrating from from another package, you may find slight differences
 in output. To reduce the delta, you may find it helpful to use the following
 custom casers to mimic the behavior of the other package.
@@ -161,32 +168,32 @@ custom casers to mimic the behavior of the other package.
 
 
 ## <a name="pkg-index">Index</a>
-* [func ToCamel(s string) string](#ToCamel)
-* [func ToCase(s string, wordCase WordCase, delimiter rune) string](#ToCase)
-* [func ToGoCamel(s string) string](#ToGoCamel)
-* [func ToGoCase(s string, wordCase WordCase, delimiter rune) string](#ToGoCase)
-* [func ToGoKebab(s string) string](#ToGoKebab)
-* [func ToGoPascal(s string) string](#ToGoPascal)
-* [func ToGoSnake(s string) string](#ToGoSnake)
-* [func ToKEBAB(s string) string](#ToKEBAB)
-* [func ToKebab(s string) string](#ToKebab)
-* [func ToPascal(s string) string](#ToPascal)
-* [func ToSNAKE(s string) string](#ToSNAKE)
-* [func ToSnake(s string) string](#ToSnake)
-* [type Caser](#Caser)
-  * [func NewCaser(goInitialisms bool, initialismOverrides map[string]bool, splitFn SplitFn) *Caser](#NewCaser)
-  * [func (c *Caser) ToCamel(s string) string](#Caser.ToCamel)
-  * [func (c *Caser) ToCase(s string, wordCase WordCase, delimiter rune) string](#Caser.ToCase)
-  * [func (c *Caser) ToKEBAB(s string) string](#Caser.ToKEBAB)
-  * [func (c *Caser) ToKebab(s string) string](#Caser.ToKebab)
-  * [func (c *Caser) ToPascal(s string) string](#Caser.ToPascal)
-  * [func (c *Caser) ToSNAKE(s string) string](#Caser.ToSNAKE)
-  * [func (c *Caser) ToSnake(s string) string](#Caser.ToSnake)
-* [type SplitAction](#SplitAction)
-* [type SplitFn](#SplitFn)
-  * [func NewSplitFn(delimiters []rune, splitOptions ...SplitOption) SplitFn](#NewSplitFn)
-* [type SplitOption](#SplitOption)
-* [type WordCase](#WordCase)
+* [func ToCamel(s string) string](#func-ToCamel)
+* [func ToCase(s string, wordCase WordCase, delimiter rune) string](#func-ToCase)
+* [func ToGoCamel(s string) string](#func-ToGoCamel)
+* [func ToGoCase(s string, wordCase WordCase, delimiter rune) string](#func-ToGoCase)
+* [func ToGoKebab(s string) string](#func-ToGoKebab)
+* [func ToGoPascal(s string) string](#func-ToGoPascal)
+* [func ToGoSnake(s string) string](#func-ToGoSnake)
+* [func ToKEBAB(s string) string](#func-ToKEBAB)
+* [func ToKebab(s string) string](#func-ToKebab)
+* [func ToPascal(s string) string](#func-ToPascal)
+* [func ToSNAKE(s string) string](#func-ToSNAKE)
+* [func ToSnake(s string) string](#func-ToSnake)
+* [type Caser](#type-Caser)
+  * [func NewCaser(goInitialisms bool, initialismOverrides map[string]bool, splitFn SplitFn) *Caser](#func-NewCaser)
+  * [func (c *Caser) ToCamel(s string) string](#type-Caser.ToCamel)
+  * [func (c *Caser) ToCase(s string, wordCase WordCase, delimiter rune) string](#type-Caser.ToCase)
+  * [func (c *Caser) ToKEBAB(s string) string](#type-Caser.ToKEBAB)
+  * [func (c *Caser) ToKebab(s string) string](#type-Caser.ToKebab)
+  * [func (c *Caser) ToPascal(s string) string](#type-Caser.ToPascal)
+  * [func (c *Caser) ToSNAKE(s string) string](#type-Caser.ToSNAKE)
+  * [func (c *Caser) ToSnake(s string) string](#type-Caser.ToSnake)
+* [type SplitAction](#type-SplitAction)
+* [type SplitFn](#type-SplitFn)
+  * [func NewSplitFn(delimiters []rune, splitOptions ...SplitOption) SplitFn](#func-NewSplitFn)
+* [type SplitOption](#type-SplitOption)
+* [type WordCase](#type-WordCase)
 
 
 
@@ -201,7 +208,7 @@ Also known as lowerCamelCase or mixedCase.
 
 
 
-## <a name="ToCase">func</a> [ToCase](./strcase.go#L70)
+## <a name="ToCase">func</a> [ToCase](./strcase.go#L72)
 ``` go
 func ToCase(s string, wordCase WordCase, delimiter rune) string
 ```
@@ -209,18 +216,20 @@ ToCase returns words in given case and delimiter.
 
 
 
-## <a name="ToGoCamel">func</a> [ToGoCamel](./strcase.go#L65)
+## <a name="ToGoCamel">func</a> [ToGoCamel](./strcase.go#L67)
 ``` go
 func ToGoCamel(s string) string
 ```
 ToGoCamel returns words in camelCase (capitalized words concatenated together, with first word lower case).
 Also known as lowerCamelCase or mixedCase.
 
-Respects Go's common initialisms (e.g. httpResponse -> HTTPResponse).
+Respects Go's common initialisms, but first word remains lowercased which is
+important for code generator use cases (e.g. toJson -> toJSON, httpResponse
+-> httpResponse).
 
 
 
-## <a name="ToGoCase">func</a> [ToGoCase](./strcase.go#L77)
+## <a name="ToGoCase">func</a> [ToGoCase](./strcase.go#L79)
 ``` go
 func ToGoCase(s string, wordCase WordCase, delimiter rune) string
 ```
@@ -415,7 +424,7 @@ ToSnake returns words in snake_case (lower case words with underscores).
 
 
 
-## <a name="SplitAction">type</a> [SplitAction](./split.go#L110)
+## <a name="SplitAction">type</a> [SplitAction](./split.go#L111)
 ``` go
 type SplitAction int
 ```
@@ -457,7 +466,7 @@ SplitFn defines how to split a string into words
 
 
 
-### <a name="NewSplitFn">func</a> [NewSplitFn](./split.go#L14-L17)
+### <a name="NewSplitFn">func</a> [NewSplitFn](./split.go#L15-L18)
 ``` go
 func NewSplitFn(
     delimiters []rune,
@@ -469,13 +478,12 @@ NewSplitFn returns a SplitFn based on the options provided.
 NewSplitFn covers the majority of common options that other strcase
 libraries provide and should allow you to simply create a custom caser.
 For more complicated use cases, feel free to write your own SplitFn
-nolint:gocyclo
 
 
 
 
 
-## <a name="SplitOption">type</a> [SplitOption](./split.go#L93)
+## <a name="SplitOption">type</a> [SplitOption](./split.go#L94)
 ``` go
 type SplitOption int
 ```
@@ -524,6 +532,9 @@ const (
     // TitleCase - Only first letter upper cased (Example)
     TitleCase
     // CamelCase - TitleCase except lower case first word (exampleText)
+    // Notably, even if the first word is an initialism, it will be lower
+    // cased. This is important for code generators where capital letters
+    // mean exported functions. i.e. jsonString(), not JSONString()
     CamelCase
 )
 ```
diff --git a/vendor/github.com/ettle/strcase/assert.go b/vendor/github.com/ettle/strcase/assert.go
new file mode 100644
index 0000000000000000000000000000000000000000..09344e40f2b2a53ce18715b5edcade68ab0474ea
--- /dev/null
+++ b/vendor/github.com/ettle/strcase/assert.go
@@ -0,0 +1,24 @@
+package strcase
+
+// We use a lightweight replacement for testify/assert to reduce dependencies
+
+// testingT interface allows us to test our assert functions
+type testingT interface {
+	Logf(format string, args ...interface{})
+	Fail()
+}
+
+// assertTrue will fail if the value is not true
+func assertTrue(t testingT, value bool) {
+	if !value {
+		t.Fail()
+	}
+}
+
+// assertEqual will fail if the two strings are not equal
+func assertEqual(t testingT, expected, actual string) {
+	if expected != actual {
+		t.Logf("Expected: %s Actual: %s", expected, actual)
+		t.Fail()
+	}
+}
diff --git a/vendor/github.com/ettle/strcase/caser.go b/vendor/github.com/ettle/strcase/caser.go
index 891a6718970ad9f142d3190be67dfc8dd971bb36..2e7eb955ba587254bf5ecad3c767db6074491dbe 100644
--- a/vendor/github.com/ettle/strcase/caser.go
+++ b/vendor/github.com/ettle/strcase/caser.go
@@ -10,17 +10,17 @@ type Caser struct {
 //
 // A Caser should be created when you want fine grained control over how the words are split.
 //
-//  Notes on function arguments
+//	Notes on function arguments
 //
-//  goInitialisms: Whether to use Golint's intialisms
+//	goInitialisms: Whether to use Golint's intialisms
 //
-//  initialismOverrides: A mapping of extra initialisms
-//  Keys must be in ALL CAPS. Merged with Golint's if goInitialisms is set.
-//  Setting a key to false will override Golint's.
+//	initialismOverrides: A mapping of extra initialisms
+//	Keys must be in ALL CAPS. Merged with Golint's if goInitialisms is set.
+//	Setting a key to false will override Golint's.
 //
-//  splitFn: How to separate words
-//  Override the default split function. Consider using NewSplitFn to
-//  configure one instead of writing your own.
+//	splitFn: How to separate words
+//	Override the default split function. Consider using NewSplitFn to
+//	configure one instead of writing your own.
 func NewCaser(goInitialisms bool, initialismOverrides map[string]bool, splitFn SplitFn) *Caser {
 	c := &Caser{
 		initialisms: golintInitialisms,
diff --git a/vendor/github.com/ettle/strcase/convert.go b/vendor/github.com/ettle/strcase/convert.go
index 70fedb1449a7842b48330adb890438d2fa078ae1..cb901d079d6f82e5a844127681e0f3d9b6102b69 100644
--- a/vendor/github.com/ettle/strcase/convert.go
+++ b/vendor/github.com/ettle/strcase/convert.go
@@ -29,6 +29,7 @@ const (
 // Case 2: UpperCase words, which don't need to support initialisms since everything is in upper case
 
 // convertWithoutInitialims only works for to UpperCase and LowerCase
+//
 //nolint:gocyclo
 func convertWithoutInitialisms(input string, delimiter rune, wordCase WordCase) string {
 	input = strings.TrimSpace(input)
@@ -38,7 +39,7 @@ func convertWithoutInitialisms(input string, delimiter rune, wordCase WordCase)
 	}
 
 	var b strings.Builder
-	b.Grow(len(input) * 2) // In case we need to write delimiters where they weren't before
+	b.Grow(len(input) + 4) // In case we need to write delimiters where they weren't before
 
 	var prev, curr rune
 	next := runes[0] // 0 length will have already returned so safe to index
@@ -90,13 +91,14 @@ func convertWithoutInitialisms(input string, delimiter rune, wordCase WordCase)
 			// Must be original case
 			b.WriteRune(curr)
 		}
-		inWord = inWord || true
+		inWord = true
 	}
 	return b.String()
 }
 
 // convertWithGoInitialisms changes a input string to a certain case with a
 // delimiter, respecting go initialisms but not skip runes
+//
 //nolint:gocyclo
 func convertWithGoInitialisms(input string, delimiter rune, wordCase WordCase) string {
 	input = strings.TrimSpace(input)
@@ -106,7 +108,7 @@ func convertWithGoInitialisms(input string, delimiter rune, wordCase WordCase) s
 	}
 
 	var b strings.Builder
-	b.Grow(len(input) * 2) // In case we need to write delimiters where they weren't before
+	b.Grow(len(input) + 4) // In case we need to write delimiters where they weren't before
 
 	firstWord := true
 
@@ -122,10 +124,15 @@ func convertWithGoInitialisms(input string, delimiter rune, wordCase WordCase) s
 		// Don't bother with initialisms if the word is longer than 5
 		// A quick proxy to avoid the extra memory allocations
 		if end-start <= 5 {
-			key := strings.ToUpper(string(runes[start:end]))
-			if golintInitialisms[key] {
+			var word strings.Builder
+			word.Grow(end - start)
+			for i := start; i < end; i++ {
+				word.WriteRune(toUpper(runes[i]))
+			}
+			w := word.String()
+			if golintInitialisms[w] {
 				if !firstWord || wordCase != CamelCase {
-					b.WriteString(key)
+					b.WriteString(w)
 					firstWord = false
 					return
 				}
@@ -188,6 +195,7 @@ func convertWithGoInitialisms(input string, delimiter rune, wordCase WordCase) s
 
 // convert changes a input string to a certain case with a delimiter,
 // respecting arbitrary initialisms and skip characters
+//
 //nolint:gocyclo
 func convert(input string, fn SplitFn, delimiter rune, wordCase WordCase,
 	initialisms map[string]bool) string {
@@ -198,7 +206,7 @@ func convert(input string, fn SplitFn, delimiter rune, wordCase WordCase,
 	}
 
 	var b strings.Builder
-	b.Grow(len(input) * 2) // In case we need to write delimiters where they weren't before
+	b.Grow(len(input) + 4) // In case we need to write delimiters where they weren't before
 
 	firstWord := true
 	var skipIndexes []int
@@ -221,13 +229,14 @@ func convert(input string, fn SplitFn, delimiter rune, wordCase WordCase,
 		// I'm open to it if there is a use case
 		if initialisms != nil {
 			var word strings.Builder
+			word.Grow(end - start)
 			for i := start; i < end; i++ {
 				word.WriteRune(toUpper(runes[i]))
 			}
-			key := word.String()
-			if initialisms[key] {
+			w := word.String()
+			if initialisms[w] {
 				if !firstWord || wordCase != CamelCase {
-					b.WriteString(key)
+					b.WriteString(w)
 					firstWord = false
 					return
 				}
diff --git a/vendor/github.com/ettle/strcase/doc.go b/vendor/github.com/ettle/strcase/doc.go
index b898a4e45f15bd5f06ef8bb5e1d1b68e828e6cb9..c3bf14a8f5a190c3651a2a25687739d135f1109f 100644
--- a/vendor/github.com/ettle/strcase/doc.go
+++ b/vendor/github.com/ettle/strcase/doc.go
@@ -2,78 +2,78 @@
 Package strcase is a package for converting strings into various word cases
 (e.g. snake_case, camelCase)
 
- go get -u github.com/ettle/strcase
+	go get -u github.com/ettle/strcase
 
 Example usage
 
- strcase.ToSnake("Hello World")     // hello_world
- strcase.ToSNAKE("Hello World")     // HELLO_WORLD
+	strcase.ToSnake("Hello World")     // hello_world
+	strcase.ToSNAKE("Hello World")     // HELLO_WORLD
 
- strcase.ToKebab("helloWorld")      // hello-world
- strcase.ToKEBAB("helloWorld")      // HELLO-WORLD
+	strcase.ToKebab("helloWorld")      // hello-world
+	strcase.ToKEBAB("helloWorld")      // HELLO-WORLD
 
- strcase.ToPascal("hello-world")    // HelloWorld
- strcase.ToCamel("hello-world")     // helloWorld
+	strcase.ToPascal("hello-world")    // HelloWorld
+	strcase.ToCamel("hello-world")     // helloWorld
 
- // Handle odd cases
- strcase.ToSnake("FOOBar")          // foo_bar
+	// Handle odd cases
+	strcase.ToSnake("FOOBar")          // foo_bar
 
- // Support Go initialisms
- strcase.ToGoPascal("http_response") // HTTPResponse
+	// Support Go initialisms
+	strcase.ToGoPascal("http_response") // HTTPResponse
 
- // Specify case and delimiter
- strcase.ToCase("HelloWorld", strcase.UpperCase, '.') // HELLO.WORLD
+	// Specify case and delimiter
+	strcase.ToCase("HelloWorld", strcase.UpperCase, '.') // HELLO.WORLD
 
-Why this package
+## Why this package
 
 String strcase is pretty straight forward and there are a number of methods to
 do it. This package is fully featured, more customizable, better tested, and
-faster* than other packages and what you would probably whip up yourself.
+faster than other packages and what you would probably whip up yourself.
 
-Unicode support
+### Unicode support
 
 We work for with unicode strings and pay very little performance penalty for it
 as we optimized for the common use case of ASCII only strings.
 
-Customization
+### Customization
 
 You can create a custom caser that changes the behavior to what you want. This
 customization also reduces the pressure for us to change the default behavior
 which means that things are more stable for everyone involved.  The goal is to
 make the common path easy and fast, while making the uncommon path possible.
 
- c := NewCaser(
-	// Use Go's default initialisms e.g. ID, HTML
- 	true,
-	// Override initialisms (e.g. don't initialize HTML but initialize SSL
- 	map[string]bool{"SSL": true, "HTML": false},
-	// Write your own custom SplitFn
-	//
- 	NewSplitFn(
- 		[]rune{'*', '.', ','},
- 		SplitCase,
- 		SplitAcronym,
- 		PreserveNumberFormatting,
- 		SplitBeforeNumber,
- 		SplitAfterNumber,
- 	))
- assert.Equal(t, "http_200", c.ToSnake("http200"))
-
-Initialism support
+	 c := NewCaser(
+		// Use Go's default initialisms e.g. ID, HTML
+	 	true,
+		// Override initialisms (e.g. don't initialize HTML but initialize SSL
+	 	map[string]bool{"SSL": true, "HTML": false},
+		// Write your own custom SplitFn
+		//
+	 	NewSplitFn(
+	 		[]rune{'*', '.', ','},
+	 		SplitCase,
+	 		SplitAcronym,
+	 		PreserveNumberFormatting,
+	 		SplitBeforeNumber,
+	 		SplitAfterNumber,
+	 	))
+	 assert.Equal(t, "http_200", c.ToSnake("http200"))
+
+### Initialism support
 
 By default, we use the golint intialisms list. You can customize and override
 the initialisms if you wish to add additional ones, such as "SSL" or "CMS" or
 domain specific ones to your industry.
 
-  ToGoPascal("http_response") // HTTPResponse
-  ToGoSnake("http_response") // HTTP_response
+	ToGoPascal("http_response") // HTTPResponse
+	ToGoSnake("http_response") // HTTP_response
 
-Test coverage
+### Test coverage
 
 We have a wide ranging test suite to make sure that we understand our behavior.
 Test coverage isn't everything, but we aim for 100% coverage.
 
-Fast
+### Fast
 
 Optimized to reduce memory allocations with Builder. Benchmarked and optimized
 around common cases.
@@ -86,70 +86,65 @@ common cases have a large performance impact.
 Hopefully I was fair to each library and happy to rerun benchmarks differently
 or reword my commentary based on suggestions or updates.
 
-  // This package - faster then almost all libraries
-  // Initialisms are more complicated and slightly slower, but still faster then other libraries that do less
-  BenchmarkToTitle-4                       7821166               221 ns/op              32 B/op          1 allocs/op
-  BenchmarkToSnake-4                       9378589               202 ns/op              32 B/op          1 allocs/op
-  BenchmarkToSNAKE-4                       6174453               223 ns/op              32 B/op          1 allocs/op
-  BenchmarkToGoSnake-4                     3114266               434 ns/op              44 B/op          4 allocs/op
-  BenchmarkToCustomCaser-4                 2973855               448 ns/op              56 B/op          4 allocs/op
-
-  // Segment has very fast snake case and camel case libraries
-  // No features or customization, but very very fast
-  BenchmarkSegment-4                      24003495                64.9 ns/op            16 B/op          1 allocs/op
-
-  // Stdlib strings.Title for comparison, even though it only splits on spaces
-  BenchmarkToTitleStrings-4               11259376               161 ns/op              16 B/op          1 allocs/op
-
-  // Other libraries or code snippets
-  // - Most are slower, by up to an order of magnitude
-  // - None support initialisms or customization
-  // - Some generate only camelCase or snake_case
-  // - Many lack unicode support
-  BenchmarkToSnakeStoewer-4                7103268               297 ns/op              64 B/op          2 allocs/op
-  // Copying small rune arrays is slow
-  BenchmarkToSnakeSiongui-4                3710768               413 ns/op              48 B/op         10 allocs/op
-  BenchmarkGoValidator-4                   2416479              1049 ns/op             184 B/op          9 allocs/op
-  // String alloction is slow
-  BenchmarkToSnakeFatih-4                  1000000              2407 ns/op             624 B/op         26 allocs/op
-  BenchmarkToSnakeIanColeman-4             1005766              1426 ns/op             160 B/op         13 allocs/op
-  // Regexp is slow
-  BenchmarkToSnakeGolangPrograms-4          614689              2237 ns/op             225 B/op         11 allocs/op
-
-
-
-  // These results aren't a surprise - my initial version of this library was
-  // painfully slow. I think most of us, without spending some time with
-  // profilers and benchmarks, would write also something on the slower side.
-
-
-Why not this package
+	// This package - faster then almost all libraries
+	// Initialisms are more complicated and slightly slower, but still fast
+	BenchmarkToTitle-96                      9617142               125.7 ns/op            16 B/op          1 allocs/op
+	BenchmarkToSnake-96                     10659919               120.7 ns/op            16 B/op          1 allocs/op
+	BenchmarkToSNAKE-96                      9018282               126.4 ns/op            16 B/op          1 allocs/op
+	BenchmarkToGoSnake-96                    4903687               254.5 ns/op            26 B/op          4 allocs/op
+	BenchmarkToCustomCaser-96                4434489               265.0 ns/op            28 B/op          4 allocs/op
+
+	// Segment has very fast snake case and camel case libraries
+	// No features or customization, but very very fast
+	BenchmarkSegment-96                     33625734                35.54 ns/op           16 B/op          1 allocs/op
+
+	// Iancoleman has gotten some performance improvements, but remains
+	// without unicode support and lacks fine-grained customization
+	BenchmarkToSnakeIan-96                  13141522                92.99 ns/op           16 B/op          1 allocs/op
+
+	// Stdlib strings.Title is deprecated; using golang.org/x.text
+	BenchmarkGolangOrgXTextCases-96          4665676               262.5 ns/op           272 B/op          2 allocs/op
+
+	// Other libraries or code snippets
+	// - Most are slower, by up to an order of magnitude
+	// - No support for initialisms or customization
+	// - Some generate only camelCase or snake_case
+	// - Many lack unicode support
+	BenchmarkToSnakeStoewer-96               8095468               148.9 ns/op            64 B/op          2 allocs/op
+	// Copying small rune arrays is slow
+	BenchmarkToSnakeSiongui-96               2912593               401.7 ns/op           112 B/op         19 allocs/op
+	BenchmarkGoValidator-96                  3493800               342.6 ns/op           184 B/op          9 allocs/op
+	// String alloction is slow
+	BenchmarkToSnakeFatih-96                 1282648               945.1 ns/op           616 B/op         26 allocs/op
+	// Regexp is slow
+	BenchmarkToSnakeGolangPrograms-96         778674              1495 ns/op             227 B/op         11 allocs/op
+
+	// These results aren't a surprise - my initial version of this library was
+	// painfully slow. I think most of us, without spending some time with
+	// profilers and benchmarks, would write also something on the slower side.
+
+### Zero dependencies
+
+That's right - zero. We only import the Go standard library. No hassles with
+dependencies, licensing, security alerts.
+
+## Why not this package
 
 If every nanosecond matters and this is used in a tight loop, use segment.io's
 libraries (https://github.com/segmentio/go-snakecase and
 https://github.com/segmentio/go-camelcase). They lack features, but make up for
-it by being blazing fast. Alternatively, if you need your code to work slightly
-differently, fork them and tailor it for your use case.
-
-If you don't like having external imports, I get it. This package only imports
-packages for testing, otherwise it only uses the standard library. If that's
-not enough, you can use this repo as the foundation for your own. MIT Licensed.
+it by being blazing fast.
 
-This package is still relatively new and while I've used it for a while
-personally, it doesn't have the miles that other packages do. I've tested this
-code agains't their test cases to make sure that there aren't any surprises.
-
-Migrating from other packages
+## Migrating from other packages
 
 If you are migrating from from another package, you may find slight differences
 in output. To reduce the delta, you may find it helpful to use the following
 custom casers to mimic the behavior of the other package.
 
-  // From https://github.com/iancoleman/strcase
-  var c = NewCaser(false, nil, NewSplitFn([]rune{'_', '-', '.'}, SplitCase, SplitAcronym, SplitBeforeNumber))
-
-  // From https://github.com/stoewer/go-strcase
-  var c = NewCaser(false, nil, NewSplitFn([]rune{'_', '-'}, SplitCase), SplitAcronym)
+	// From https://github.com/iancoleman/strcase
+	var c = NewCaser(false, nil, NewSplitFn([]rune{'_', '-', '.'}, SplitCase, SplitAcronym, SplitBeforeNumber))
 
+	// From https://github.com/stoewer/go-strcase
+	var c = NewCaser(false, nil, NewSplitFn([]rune{'_', '-'}, SplitCase), SplitAcronym)
 */
 package strcase
diff --git a/vendor/github.com/ettle/strcase/split.go b/vendor/github.com/ettle/strcase/split.go
index 84381106bc698065e5a784d5d61090bd7fc80eab..32bc29759a7aa03adb101a1cf32492ef4fb8b20b 100644
--- a/vendor/github.com/ettle/strcase/split.go
+++ b/vendor/github.com/ettle/strcase/split.go
@@ -10,6 +10,7 @@ type SplitFn func(prev, curr, next rune) SplitAction
 // NewSplitFn covers the majority of common options that other strcase
 // libraries provide and should allow you to simply create a custom caser.
 // For more complicated use cases, feel free to write your own SplitFn
+//
 //nolint:gocyclo
 func NewSplitFn(
 	delimiters []rune,
diff --git a/vendor/github.com/fatih/color/color.go b/vendor/github.com/fatih/color/color.go
index c4234287dcc0aaba32bb6915711531e67d214486..81094e87c56cde4b772176c841b84d2b9a713a26 100644
--- a/vendor/github.com/fatih/color/color.go
+++ b/vendor/github.com/fatih/color/color.go
@@ -269,7 +269,7 @@ func (c *Color) Printf(format string, a ...interface{}) (n int, err error) {
 // On Windows, users should wrap w with colorable.NewColorable() if w is of
 // type *os.File.
 func (c *Color) Fprintln(w io.Writer, a ...interface{}) (n int, err error) {
-	return fmt.Fprintln(w, c.wrap(fmt.Sprint(a...)))
+	return fmt.Fprintln(w, c.wrap(sprintln(a...)))
 }
 
 // Println formats using the default formats for its operands and writes to
@@ -278,7 +278,7 @@ func (c *Color) Fprintln(w io.Writer, a ...interface{}) (n int, err error) {
 // encountered. This is the standard fmt.Print() method wrapped with the given
 // color.
 func (c *Color) Println(a ...interface{}) (n int, err error) {
-	return fmt.Fprintln(Output, c.wrap(fmt.Sprint(a...)))
+	return fmt.Fprintln(Output, c.wrap(sprintln(a...)))
 }
 
 // Sprint is just like Print, but returns a string instead of printing it.
@@ -288,7 +288,7 @@ func (c *Color) Sprint(a ...interface{}) string {
 
 // Sprintln is just like Println, but returns a string instead of printing it.
 func (c *Color) Sprintln(a ...interface{}) string {
-	return fmt.Sprintln(c.Sprint(a...))
+	return c.wrap(sprintln(a...)) + "\n"
 }
 
 // Sprintf is just like Printf, but returns a string instead of printing it.
@@ -370,7 +370,7 @@ func (c *Color) SprintfFunc() func(format string, a ...interface{}) string {
 // string. Windows users should use this in conjunction with color.Output.
 func (c *Color) SprintlnFunc() func(a ...interface{}) string {
 	return func(a ...interface{}) string {
-		return fmt.Sprintln(c.Sprint(a...))
+		return c.wrap(sprintln(a...)) + "\n"
 	}
 }
 
@@ -648,3 +648,8 @@ func HiCyanString(format string, a ...interface{}) string { return colorString(f
 func HiWhiteString(format string, a ...interface{}) string {
 	return colorString(format, FgHiWhite, a...)
 }
+
+// sprintln is a helper function to format a string with fmt.Sprintln and trim the trailing newline.
+func sprintln(a ...interface{}) string {
+	return strings.TrimSuffix(fmt.Sprintln(a...), "\n")
+}
diff --git a/vendor/github.com/firefart/nonamedreturns/analyzer/analyzer.go b/vendor/github.com/firefart/nonamedreturns/analyzer/analyzer.go
index 6ad97ab491a8cea4caa80c3a4d6ce6fb96c93f66..ecd4915a8b2830541f1809ab913d6cccd2684e49 100644
--- a/vendor/github.com/firefart/nonamedreturns/analyzer/analyzer.go
+++ b/vendor/github.com/firefart/nonamedreturns/analyzer/analyzer.go
@@ -53,6 +53,11 @@ func run(pass *analysis.Pass) (interface{}, error) {
 			return
 		}
 
+		// Function without body, ex: https://github.com/golang/go/blob/master/src/internal/syscall/unix/net.go
+		if funcBody == nil {
+			return
+		}
+
 		// no return values
 		if funcResults == nil {
 			return
diff --git a/vendor/github.com/ghostiam/protogetter/.goreleaser.yaml b/vendor/github.com/ghostiam/protogetter/.goreleaser.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..a70d0fb00692213fe806a87f2c925ad4b1986b46
--- /dev/null
+++ b/vendor/github.com/ghostiam/protogetter/.goreleaser.yaml
@@ -0,0 +1,24 @@
+before:
+  hooks:
+    - go mod tidy
+builds:
+  - id: protogetter
+    main: ./cmd/protogetter
+    binary: protogetter
+    env:
+      - CGO_ENABLED=0
+    goos:
+      - linux
+      - windows
+      - darwin
+checksum:
+  name_template: 'checksums.txt'
+snapshot:
+  name_template: "{{ incpatch .Version }}-next"
+changelog:
+  sort: asc
+  filters:
+    exclude:
+      - '^docs:'
+      - '^test:'
+      - '^ci:'
\ No newline at end of file
diff --git a/vendor/github.com/ghostiam/protogetter/LICENSE b/vendor/github.com/ghostiam/protogetter/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..b4449661b7091d3c401541cb4c112d57eefcab5b
--- /dev/null
+++ b/vendor/github.com/ghostiam/protogetter/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2023 Vladislav Fursov (GhostIAm)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
\ No newline at end of file
diff --git a/vendor/github.com/ghostiam/protogetter/Makefile b/vendor/github.com/ghostiam/protogetter/Makefile
new file mode 100644
index 0000000000000000000000000000000000000000..4c2a62af18bb3d7908fcdb42ece1eff99c4bb40a
--- /dev/null
+++ b/vendor/github.com/ghostiam/protogetter/Makefile
@@ -0,0 +1,9 @@
+.PHONY: test
+test:
+	$(MAKE) -C testdata vendor
+	go test -v ./...
+
+.PHONY: install
+install:
+	go install ./cmd/protogetter
+	@echo "Installed in $(shell which protogetter)"
diff --git a/vendor/github.com/ghostiam/protogetter/README.md b/vendor/github.com/ghostiam/protogetter/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..c033e9597ff3e48b7b7c89abf6f042da56073a35
--- /dev/null
+++ b/vendor/github.com/ghostiam/protogetter/README.md
@@ -0,0 +1,73 @@
+# Protogetter
+Welcome to the Protogetter project!
+
+## Overview
+Protogetter is a linter developed specifically for Go programmers working with nested `protobuf` types.\
+It's designed to aid developers in preventing `invalid memory address or nil pointer dereference` errors arising from direct access of nested `protobuf` fields.
+
+When working with `protobuf`, it's quite common to have complex structures where a message field is contained within another message, which itself can be part of another message, and so on.
+If these fields are accessed directly and some field in the call chain will not be initialized, it can result in application panic.
+
+Protogetter addresses this issue by suggesting use of getter methods for field access.
+
+## How does it work?
+Protogetter analyzes your Go code and helps detect direct `protobuf` field accesses that could give rise to panic.\
+The linter suggests using getters:
+```go
+m.GetFoo().GetBar().GetBaz()
+```
+instead of direct field access:
+```go
+m.Foo.Bar.Baz
+```
+
+And you will then only need to perform a nil check after the final call:
+```go
+if m.GetFoo().GetBar().GetBaz() != nil {
+    // Do something with m.GetFoo().GetBar().GetBaz()
+}
+```
+instead of:
+```go
+if m.Foo != nil {
+    if m.Foo.Bar != nil {
+        if m.Foo.Bar.Baz != nil {
+            // Do something with m.Foo.Bar.Baz
+        }
+    }
+}
+```
+
+or use zero values:
+
+```go
+// If one of the methods returns `nil` we will receive 0 instead of panic.
+v := m.GetFoo().GetBar().GetBaz().GetInt() 
+```
+
+instead of panic:
+
+```go
+// If at least one structure in the chains is not initialized, we will get a panic. 
+v := m.Foo.Bar.Baz.Int
+```
+
+which simplifies the code and makes it more reliable.
+
+## Installation
+
+```bash
+go install github.com/ghostiam/protogetter/cmd/protogetter@latest
+```
+
+## Usage
+
+To run the linter:
+```bash
+protogetter ./...
+```
+
+Or to apply suggested fixes directly:
+```bash
+protogetter --fix ./...
+```
diff --git a/vendor/github.com/ghostiam/protogetter/posfilter.go b/vendor/github.com/ghostiam/protogetter/posfilter.go
new file mode 100644
index 0000000000000000000000000000000000000000..82075ccb16c22a87b9d739c80252df287758ea5e
--- /dev/null
+++ b/vendor/github.com/ghostiam/protogetter/posfilter.go
@@ -0,0 +1,65 @@
+package protogetter
+
+import (
+	"go/token"
+)
+
+type PosFilter struct {
+	positions       map[token.Pos]struct{}
+	alreadyReplaced map[string]map[int][2]int // map[filename][line][start, end]
+}
+
+func NewPosFilter() *PosFilter {
+	return &PosFilter{
+		positions:       make(map[token.Pos]struct{}),
+		alreadyReplaced: make(map[string]map[int][2]int),
+	}
+}
+
+func (f *PosFilter) IsFiltered(pos token.Pos) bool {
+	_, ok := f.positions[pos]
+	return ok
+}
+
+func (f *PosFilter) AddPos(pos token.Pos) {
+	f.positions[pos] = struct{}{}
+}
+
+func (f *PosFilter) IsAlreadyReplaced(fset *token.FileSet, pos, end token.Pos) bool {
+	filePos := fset.Position(pos)
+	fileEnd := fset.Position(end)
+
+	lines, ok := f.alreadyReplaced[filePos.Filename]
+	if !ok {
+		return false
+	}
+
+	lineRange, ok := lines[filePos.Line]
+	if !ok {
+		return false
+	}
+
+	if lineRange[0] <= filePos.Offset && fileEnd.Offset <= lineRange[1] {
+		return true
+	}
+
+	return false
+}
+
+func (f *PosFilter) AddAlreadyReplaced(fset *token.FileSet, pos, end token.Pos) {
+	filePos := fset.Position(pos)
+	fileEnd := fset.Position(end)
+
+	lines, ok := f.alreadyReplaced[filePos.Filename]
+	if !ok {
+		lines = make(map[int][2]int)
+		f.alreadyReplaced[filePos.Filename] = lines
+	}
+
+	lineRange, ok := lines[filePos.Line]
+	if ok && lineRange[0] <= filePos.Offset && fileEnd.Offset <= lineRange[1] {
+		return
+	}
+
+	lines[filePos.Line] = [2]int{filePos.Offset, fileEnd.Offset}
+}
diff --git a/vendor/github.com/ghostiam/protogetter/processor.go b/vendor/github.com/ghostiam/protogetter/processor.go
new file mode 100644
index 0000000000000000000000000000000000000000..eca82939d82158e2f92c6e57d0c5c64c3aa9a6e3
--- /dev/null
+++ b/vendor/github.com/ghostiam/protogetter/processor.go
@@ -0,0 +1,351 @@
+package protogetter
+
+import (
+	"fmt"
+	"go/ast"
+	"go/token"
+	"go/types"
+	"reflect"
+	"strings"
+)
+
+type processor struct {
+	info   *types.Info
+	filter *PosFilter
+	cfg    *Config
+
+	to   strings.Builder
+	from strings.Builder
+	err  error
+}
+
+func Process(info *types.Info, filter *PosFilter, n ast.Node, cfg *Config) (*Result, error) {
+	p := &processor{
+		info:   info,
+		filter: filter,
+		cfg:    cfg,
+	}
+
+	return p.process(n)
+}
+
+func (c *processor) process(n ast.Node) (*Result, error) {
+	switch x := n.(type) {
+	case *ast.AssignStmt:
+		// Skip any assignment to the field.
+		for _, s := range x.Lhs {
+			c.filter.AddPos(s.Pos())
+
+			if se, ok := s.(*ast.StarExpr); ok {
+				c.filter.AddPos(se.X.Pos())
+			}
+		}
+
+	case *ast.IncDecStmt:
+		// Skip any increment/decrement to the field.
+		c.filter.AddPos(x.X.Pos())
+
+	case *ast.UnaryExpr:
+		if x.Op == token.AND {
+			// Skip all expressions when the field is used as a pointer.
+			// Because this is not direct reading, but most likely writing by pointer (for example like sql.Scan).
+			c.filter.AddPos(x.X.Pos())
+		}
+
+	case *ast.CallExpr:
+		if !c.cfg.ReplaceFirstArgInAppend && len(x.Args) > 0 {
+			if v, ok := x.Fun.(*ast.Ident); ok && v.Name == "append" {
+				// Skip first argument of append function.
+				c.filter.AddPos(x.Args[0].Pos())
+				break
+			}
+		}
+
+		f, ok := x.Fun.(*ast.SelectorExpr)
+		if !ok {
+			return &Result{}, nil
+		}
+
+		if !isProtoMessage(c.info, f.X) {
+			return &Result{}, nil
+		}
+
+		c.processInner(x)
+
+	case *ast.SelectorExpr:
+		if !isProtoMessage(c.info, x.X) {
+			// If the selector is not on a proto message, skip it.
+			return &Result{}, nil
+		}
+
+		c.processInner(x)
+
+	case *ast.StarExpr:
+		f, ok := x.X.(*ast.SelectorExpr)
+		if !ok {
+			return &Result{}, nil
+		}
+
+		if !isProtoMessage(c.info, f.X) {
+			return &Result{}, nil
+		}
+
+		// proto2 generates fields as pointers. Hence, the indirection
+		// must be removed when generating the fix for the case.
+		// The `*` is retained in `c.from`, but excluded from the fix
+		// present in the `c.to`.
+		c.writeFrom("*")
+		c.processInner(x.X)
+
+	case *ast.BinaryExpr:
+		// Check if the expression is a comparison.
+		if x.Op != token.EQL && x.Op != token.NEQ {
+			return &Result{}, nil
+		}
+
+		// Check if one of the operands is nil.
+
+		xIdent, xOk := x.X.(*ast.Ident)
+		yIdent, yOk := x.Y.(*ast.Ident)
+
+		xIsNil := xOk && xIdent.Name == "nil"
+		yIsNil := yOk && yIdent.Name == "nil"
+
+		if !xIsNil && !yIsNil {
+			return &Result{}, nil
+		}
+
+		// Extract the non-nil operand for further checks
+
+		var expr ast.Expr
+		if xIsNil {
+			expr = x.Y
+		} else {
+			expr = x.X
+		}
+
+		se, ok := expr.(*ast.SelectorExpr)
+		if !ok {
+			return &Result{}, nil
+		}
+
+		if !isProtoMessage(c.info, se.X) {
+			return &Result{}, nil
+		}
+
+		// Check if the Getter function of the protobuf message returns a pointer.
+		hasPointer, ok := getterResultHasPointer(c.info, se.X, se.Sel.Name)
+		if !ok || hasPointer {
+			return &Result{}, nil
+		}
+
+		c.filter.AddPos(x.X.Pos())
+
+	default:
+		return nil, fmt.Errorf("not implemented for type: %s (%s)", reflect.TypeOf(x), formatNode(n))
+	}
+
+	if c.err != nil {
+		return nil, c.err
+	}
+
+	return &Result{
+		From: c.from.String(),
+		To:   c.to.String(),
+	}, nil
+}
+
+func (c *processor) processInner(expr ast.Expr) {
+	switch x := expr.(type) {
+	case *ast.Ident:
+		c.write(x.Name)
+
+	case *ast.BasicLit:
+		c.write(x.Value)
+
+	case *ast.UnaryExpr:
+		if x.Op == token.AND {
+			c.write(formatNode(x))
+			return
+		}
+
+		c.write(x.Op.String())
+		c.processInner(x.X)
+
+	case *ast.SelectorExpr:
+		c.processInner(x.X)
+		c.write(".")
+
+		// If getter exists, use it.
+		if methodIsExists(c.info, x.X, "Get"+x.Sel.Name) {
+			c.writeFrom(x.Sel.Name)
+			c.writeTo("Get" + x.Sel.Name + "()")
+			return
+		}
+
+		// If the selector is not a proto-message or the method has already been called, we leave it unchanged.
+		// This approach is significantly more efficient than verifying the presence of methods in all cases.
+		c.write(x.Sel.Name)
+
+	case *ast.CallExpr:
+		c.processInner(x.Fun)
+		c.write("(")
+		for i, arg := range x.Args {
+			if i > 0 {
+				c.write(",")
+			}
+			c.processInner(arg)
+		}
+		c.write(")")
+
+	case *ast.IndexExpr:
+		c.processInner(x.X)
+		c.write("[")
+		c.processInner(x.Index)
+		c.write("]")
+
+	case *ast.BinaryExpr:
+		c.processInner(x.X)
+		c.write(x.Op.String())
+		c.processInner(x.Y)
+
+	case *ast.ParenExpr:
+		c.write("(")
+		c.processInner(x.X)
+		c.write(")")
+
+	case *ast.StarExpr:
+		c.write("*")
+		c.processInner(x.X)
+
+	case *ast.CompositeLit, *ast.TypeAssertExpr, *ast.ArrayType, *ast.FuncLit:
+		// Process the node as is.
+		c.write(formatNode(x))
+
+	default:
+		c.err = fmt.Errorf("processInner: not implemented for type: %s", reflect.TypeOf(x))
+	}
+}
+
+func (c *processor) write(s string) {
+	c.writeTo(s)
+	c.writeFrom(s)
+}
+
+func (c *processor) writeTo(s string) {
+	c.to.WriteString(s)
+}
+
+func (c *processor) writeFrom(s string) {
+	c.from.WriteString(s)
+}
+
+// Result contains source code (from) and suggested change (to)
+type Result struct {
+	From string
+	To   string
+}
+
+func (r *Result) Skipped() bool {
+	// If from and to are the same, skip it.
+	return r.From == r.To
+}
+
+func isProtoMessage(info *types.Info, expr ast.Expr) bool {
+	// First, we are checking for the presence of the ProtoReflect method which is currently being generated
+	// and corresponds to v2 version.
+	// https://pkg.go.dev/google.golang.org/protobuf@v1.31.0/proto#Message
+	const protoV2Method = "ProtoReflect"
+	ok := methodIsExists(info, expr, protoV2Method)
+	if ok {
+		return true
+	}
+
+	// Afterwards, we are checking the ProtoMessage method. All the structures that implement the proto.Message interface
+	// have a ProtoMessage method and are proto-structures. This interface has been generated since version 1.0.0 and
+	// continues to exist for compatibility.
+	// https://pkg.go.dev/github.com/golang/protobuf/proto?utm_source=godoc#Message
+	const protoV1Method = "ProtoMessage"
+	ok = methodIsExists(info, expr, protoV1Method)
+	if ok {
+		// Since there is a protoc-gen-gogo generator that implements the proto.Message interface, but may not generate
+		// getters or generate from without checking for nil, so even if getters exist, we skip them.
+		const protocGenGoGoMethod = "MarshalToSizedBuffer"
+		return !methodIsExists(info, expr, protocGenGoGoMethod)
+	}
+
+	return false
+}
+
+func typesNamed(info *types.Info, x ast.Expr) (*types.Named, bool) {
+	if info == nil {
+		return nil, false
+	}
+
+	t := info.TypeOf(x)
+	if t == nil {
+		return nil, false
+	}
+
+	ptr, ok := t.Underlying().(*types.Pointer)
+	if ok {
+		t = ptr.Elem()
+	}
+
+	named, ok := t.(*types.Named)
+	if !ok {
+		return nil, false
+	}
+
+	return named, true
+}
+
+func methodIsExists(info *types.Info, x ast.Expr, name string) bool {
+	named, ok := typesNamed(info, x)
+	if !ok {
+		return false
+	}
+
+	for i := 0; i < named.NumMethods(); i++ {
+		if named.Method(i).Name() == name {
+			return true
+		}
+	}
+
+	return false
+}
+
+func getterResultHasPointer(info *types.Info, x ast.Expr, name string) (hasPointer, ok bool) {
+	named, ok := typesNamed(info, x)
+	if !ok {
+		return false, false
+	}
+
+	for i := 0; i < named.NumMethods(); i++ {
+		method := named.Method(i)
+		if method.Name() != "Get"+name {
+			continue
+		}
+
+		var sig *types.Signature
+		sig, ok = method.Type().(*types.Signature)
+		if !ok {
+			return false, false
+		}
+
+		results := sig.Results()
+		if results.Len() == 0 {
+			return false, false
+		}
+
+		firstType := results.At(0)
+		_, ok = firstType.Type().(*types.Pointer)
+		if !ok {
+			return false, true
+		}
+
+		return true, true
+	}
+
+	return false, false
+}
diff --git a/vendor/github.com/ghostiam/protogetter/protogetter.go b/vendor/github.com/ghostiam/protogetter/protogetter.go
new file mode 100644
index 0000000000000000000000000000000000000000..31eee8572ad2c0843be9eaad2de358a6e59de901
--- /dev/null
+++ b/vendor/github.com/ghostiam/protogetter/protogetter.go
@@ -0,0 +1,279 @@
+package protogetter
+
+import (
+	"bytes"
+	"flag"
+	"fmt"
+	"go/ast"
+	"go/format"
+	"go/token"
+	"log"
+	"path/filepath"
+	"strings"
+
+	"github.com/gobwas/glob"
+	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/go/ast/inspector"
+)
+
+type Mode int
+
+const (
+	StandaloneMode Mode = iota
+	GolangciLintMode
+)
+
+const msgFormat = "avoid direct access to proto field %s, use %s instead"
+
+func NewAnalyzer(cfg *Config) *analysis.Analyzer {
+	if cfg == nil {
+		cfg = &Config{}
+	}
+
+	return &analysis.Analyzer{
+		Name:  "protogetter",
+		Doc:   "Reports direct reads from proto message fields when getters should be used",
+		Flags: flags(cfg),
+		Run: func(pass *analysis.Pass) (any, error) {
+			_, err := Run(pass, cfg)
+			return nil, err
+		},
+	}
+}
+
+func flags(opts *Config) flag.FlagSet {
+	fs := flag.NewFlagSet("protogetter", flag.ContinueOnError)
+
+	fs.Func("skip-generated-by", "skip files generated with the given prefixes", func(s string) error {
+		for _, prefix := range strings.Split(s, ",") {
+			opts.SkipGeneratedBy = append(opts.SkipGeneratedBy, prefix)
+		}
+		return nil
+	})
+	fs.Func("skip-files", "skip files with the given glob patterns", func(s string) error {
+		for _, pattern := range strings.Split(s, ",") {
+			opts.SkipFiles = append(opts.SkipFiles, pattern)
+		}
+		return nil
+	})
+	fs.BoolVar(&opts.SkipAnyGenerated, "skip-any-generated", false, "skip any generated files")
+
+	return *fs
+}
+
+type Config struct {
+	Mode                    Mode // Zero value is StandaloneMode.
+	SkipGeneratedBy         []string
+	SkipFiles               []string
+	SkipAnyGenerated        bool
+	ReplaceFirstArgInAppend bool
+}
+
+func Run(pass *analysis.Pass, cfg *Config) ([]Issue, error) {
+	skipGeneratedBy := make([]string, 0, len(cfg.SkipGeneratedBy)+3)
+	// Always skip files generated by protoc-gen-go, protoc-gen-go-grpc and protoc-gen-grpc-gateway.
+	skipGeneratedBy = append(skipGeneratedBy, "protoc-gen-go", "protoc-gen-go-grpc", "protoc-gen-grpc-gateway")
+	for _, s := range cfg.SkipGeneratedBy {
+		s = strings.TrimSpace(s)
+		if s == "" {
+			continue
+		}
+		skipGeneratedBy = append(skipGeneratedBy, s)
+	}
+
+	skipFilesGlobPatterns := make([]glob.Glob, 0, len(cfg.SkipFiles))
+	for _, s := range cfg.SkipFiles {
+		s = strings.TrimSpace(s)
+		if s == "" {
+			continue
+		}
+
+		compile, err := glob.Compile(s)
+		if err != nil {
+			return nil, fmt.Errorf("invalid glob pattern: %w", err)
+		}
+
+		skipFilesGlobPatterns = append(skipFilesGlobPatterns, compile)
+	}
+
+	nodeTypes := []ast.Node{
+		(*ast.AssignStmt)(nil),
+		(*ast.BinaryExpr)(nil),
+		(*ast.CallExpr)(nil),
+		(*ast.SelectorExpr)(nil),
+		(*ast.StarExpr)(nil),
+		(*ast.IncDecStmt)(nil),
+		(*ast.UnaryExpr)(nil),
+	}
+
+	// Skip filtered files.
+	var files []*ast.File
+	for _, f := range pass.Files {
+		if skipGeneratedFile(f, skipGeneratedBy, cfg.SkipAnyGenerated) {
+			continue
+		}
+
+		if skipFilesByGlob(pass.Fset.File(f.Pos()).Name(), skipFilesGlobPatterns) {
+			continue
+		}
+
+		files = append(files, f)
+
+		// ast.Print(pass.Fset, f)
+	}
+
+	ins := inspector.New(files)
+
+	var issues []Issue
+
+	filter := NewPosFilter()
+	ins.Preorder(nodeTypes, func(node ast.Node) {
+		report := analyse(pass, filter, node, cfg)
+		if report == nil {
+			return
+		}
+
+		switch cfg.Mode {
+		case StandaloneMode:
+			pass.Report(report.ToDiagReport())
+		case GolangciLintMode:
+			issues = append(issues, report.ToIssue(pass.Fset))
+		}
+	})
+
+	return issues, nil
+}
+
+func analyse(pass *analysis.Pass, filter *PosFilter, n ast.Node, cfg *Config) *Report {
+	// fmt.Printf("\n>>> check: %s\n", formatNode(n))
+	// ast.Print(pass.Fset, n)
+	if filter.IsFiltered(n.Pos()) {
+		// fmt.Printf(">>> filtered\n")
+		return nil
+	}
+
+	result, err := Process(pass.TypesInfo, filter, n, cfg)
+	if err != nil {
+		pass.Report(analysis.Diagnostic{
+			Pos:     n.Pos(),
+			End:     n.End(),
+			Message: fmt.Sprintf("error: %v", err),
+		})
+
+		return nil
+	}
+
+	// If existing in filter, skip it.
+	if filter.IsFiltered(n.Pos()) {
+		return nil
+	}
+
+	if result.Skipped() {
+		return nil
+	}
+
+	// If the expression has already been replaced, skip it.
+	if filter.IsAlreadyReplaced(pass.Fset, n.Pos(), n.End()) {
+		return nil
+	}
+	// Add the expression to the filter.
+	filter.AddAlreadyReplaced(pass.Fset, n.Pos(), n.End())
+
+	return &Report{
+		node:   n,
+		result: result,
+	}
+}
+
+// Issue is used to integrate with golangci-lint's inline auto fix.
+type Issue struct {
+	Pos       token.Position
+	Message   string
+	InlineFix InlineFix
+}
+
+type InlineFix struct {
+	StartCol  int // zero-based
+	Length    int
+	NewString string
+}
+
+type Report struct {
+	node   ast.Node
+	result *Result
+}
+
+func (r *Report) ToDiagReport() analysis.Diagnostic {
+	msg := fmt.Sprintf(msgFormat, r.result.From, r.result.To)
+
+	return analysis.Diagnostic{
+		Pos:     r.node.Pos(),
+		End:     r.node.End(),
+		Message: msg,
+		SuggestedFixes: []analysis.SuggestedFix{
+			{
+				Message: msg,
+				TextEdits: []analysis.TextEdit{
+					{
+						Pos:     r.node.Pos(),
+						End:     r.node.End(),
+						NewText: []byte(r.result.To),
+					},
+				},
+			},
+		},
+	}
+}
+
+func (r *Report) ToIssue(fset *token.FileSet) Issue {
+	msg := fmt.Sprintf(msgFormat, r.result.From, r.result.To)
+	return Issue{
+		Pos:     fset.Position(r.node.Pos()),
+		Message: msg,
+		InlineFix: InlineFix{
+			StartCol:  fset.Position(r.node.Pos()).Column - 1,
+			Length:    len(r.result.From),
+			NewString: r.result.To,
+		},
+	}
+}
+
+func skipGeneratedFile(f *ast.File, prefixes []string, skipAny bool) bool {
+	if len(f.Comments) == 0 {
+		return false
+	}
+	firstComment := f.Comments[0].Text()
+
+	// https://golang.org/s/generatedcode
+	if skipAny && strings.HasPrefix(firstComment, "Code generated") {
+		return true
+	}
+
+	for _, prefix := range prefixes {
+		if strings.HasPrefix(firstComment, "Code generated by "+prefix) {
+			return true
+		}
+	}
+
+	return false
+}
+
+func skipFilesByGlob(filename string, patterns []glob.Glob) bool {
+	for _, pattern := range patterns {
+		if pattern.Match(filename) || pattern.Match(filepath.Base(filename)) {
+			return true
+		}
+	}
+
+	return false
+}
+
+func formatNode(node ast.Node) string {
+	buf := new(bytes.Buffer)
+	if err := format.Node(buf, token.NewFileSet(), node); err != nil {
+		log.Printf("Error formatting expression: %v", err)
+		return ""
+	}
+
+	return buf.String()
+}
diff --git a/vendor/github.com/go-critic/go-critic/checkers/commentFormatting_checker.go b/vendor/github.com/go-critic/go-critic/checkers/commentFormatting_checker.go
index 5a9564a0f9e436d33f5e6fac9313b959ae99d33a..b834158ecaf6cbdf74bb87fb77ce0f0fff62cbfe 100644
--- a/vendor/github.com/go-critic/go-critic/checkers/commentFormatting_checker.go
+++ b/vendor/github.com/go-critic/go-critic/checkers/commentFormatting_checker.go
@@ -27,12 +27,16 @@ func init() {
 			"//nolint",
 		}
 		parts := []string{
-			"//go:generate ",  // e.g.: go:generate value
-			"//line /",        // e.g.: line /path/to/file:123
-			"//nolint ",       // e.g.: nolint
-			"//noinspection ", // e.g.: noinspection ALL, some GoLand and friends versions
-			"//export ",       // e.g.: export Foo
-			"///",             // e.g.: vertical breaker /////////////
+			"//go:generate ",   // e.g.: go:generate value
+			"//line /",         // e.g.: line /path/to/file:123
+			"//nolint ",        // e.g.: nolint
+			"//noinspection ",  // e.g.: noinspection ALL, some GoLand and friends versions
+			"//region",         // e.g.: region awawa, used by GoLand and friends for custom folding
+			"//endregion",      // e.g.: endregion awawa or endregion, closes GoLand regions
+			"//<editor-fold",   // e.g.: <editor-fold desc="awawa"> or <editor-fold>, used by VSCode for custom folding
+			"//</editor-fold>", // e.g.: </editor-fold>, closes VSCode regions
+			"//export ",        // e.g.: export Foo
+			"///",              // e.g.: vertical breaker /////////////
 			"//+",
 			"//#",
 			"//-",
diff --git a/vendor/github.com/go-critic/go-critic/checkers/commentedOutCode_checker.go b/vendor/github.com/go-critic/go-critic/checkers/commentedOutCode_checker.go
index 402ba33066e8a94598d0f2e773b9aaf1a27c8247..8595b79515f08fc9d2d904ad8a824008ea51b727 100644
--- a/vendor/github.com/go-critic/go-critic/checkers/commentedOutCode_checker.go
+++ b/vendor/github.com/go-critic/go-critic/checkers/commentedOutCode_checker.go
@@ -6,6 +6,7 @@ import (
 	"go/token"
 	"regexp"
 	"strings"
+	"unicode/utf8"
 
 	"github.com/go-critic/go-critic/checkers/internal/astwalk"
 	"github.com/go-critic/go-critic/linter"
@@ -18,6 +19,12 @@ func init() {
 	info.Name = "commentedOutCode"
 	info.Tags = []string{linter.DiagnosticTag, linter.ExperimentalTag}
 	info.Summary = "Detects commented-out code inside function bodies"
+	info.Params = linter.CheckerParams{
+		"minLength": {
+			Value: 15,
+			Usage: "min length of the comment that triggers a warning",
+		},
+	}
 	info.Before = `
 // fmt.Println("Debugging hard")
 foo(1, 2)`
@@ -27,6 +34,7 @@ foo(1, 2)`
 		return astwalk.WalkerForLocalComment(&commentedOutCodeChecker{
 			ctx:              ctx,
 			notQuiteFuncCall: regexp.MustCompile(`\w+\s+\([^)]*\)\s*$`),
+			minLength:        info.Params.Int("minLength"),
 		}), nil
 	})
 }
@@ -37,6 +45,7 @@ type commentedOutCodeChecker struct {
 	fn  *ast.FuncDecl
 
 	notQuiteFuncCall *regexp.Regexp
+	minLength        int
 }
 
 func (c *commentedOutCodeChecker) EnterFunc(fn *ast.FuncDecl) bool {
@@ -69,7 +78,7 @@ func (c *commentedOutCodeChecker) VisitLocalComment(cg *ast.CommentGroup) {
 	// Some very short comment that can be skipped.
 	// Usually triggering on these results in false positive.
 	// Unless there is a very popular call like print/println.
-	cond := len(s) < len("quite too short") &&
+	cond := utf8.RuneCountInString(s) < c.minLength &&
 		!strings.Contains(s, "print") &&
 		!strings.Contains(s, "fmt.") &&
 		!strings.Contains(s, "log.")
diff --git a/vendor/github.com/go-critic/go-critic/checkers/dupImports_checker.go b/vendor/github.com/go-critic/go-critic/checkers/dupImports_checker.go
index 19079871f7f0ba1029abbe9c6b2b4ecd9b03f602..ed674eb85c20e2e69cb8b94e3d82224e32d0c3c9 100644
--- a/vendor/github.com/go-critic/go-critic/checkers/dupImports_checker.go
+++ b/vendor/github.com/go-critic/go-critic/checkers/dupImports_checker.go
@@ -15,7 +15,7 @@ func init() {
 	info.Before = `
 import (
 	"fmt"
-	priting "fmt" // Imported the second time
+	printing "fmt" // Imported the second time
 )`
 	info.After = `
 import(
diff --git a/vendor/github.com/go-critic/go-critic/checkers/flagName_checker.go b/vendor/github.com/go-critic/go-critic/checkers/flagName_checker.go
index 98b76e2618ef03ad7a0953ab860b9e1abb0fab49..701066860867ffb971b89c95f780d411ccc514d1 100644
--- a/vendor/github.com/go-critic/go-critic/checkers/flagName_checker.go
+++ b/vendor/github.com/go-critic/go-critic/checkers/flagName_checker.go
@@ -64,7 +64,7 @@ func (c *flagNameChecker) checkFlagName(call *ast.CallExpr, arg ast.Expr) {
 	case name == "":
 		c.warnEmpty(call)
 	case strings.HasPrefix(name, "-"):
-		c.warnHypenPrefix(call, name)
+		c.warnHyphenPrefix(call, name)
 	case strings.Contains(name, "="):
 		c.warnEq(call, name)
 	case strings.Contains(name, " "):
@@ -76,8 +76,8 @@ func (c *flagNameChecker) warnEmpty(cause ast.Node) {
 	c.ctx.Warn(cause, "empty flag name")
 }
 
-func (c *flagNameChecker) warnHypenPrefix(cause ast.Node, name string) {
-	c.ctx.Warn(cause, "flag name %q should not start with a hypen", name)
+func (c *flagNameChecker) warnHyphenPrefix(cause ast.Node, name string) {
+	c.ctx.Warn(cause, "flag name %q should not start with a hyphen", name)
 }
 
 func (c *flagNameChecker) warnEq(cause ast.Node, name string) {
diff --git a/vendor/github.com/go-critic/go-critic/checkers/hugeParam_checker.go b/vendor/github.com/go-critic/go-critic/checkers/hugeParam_checker.go
index 3b7f1d12b3bff0650a03a24c7a012d877fe39f9e..7b7a3c538b1044786103881c5252b13d2a81b17e 100644
--- a/vendor/github.com/go-critic/go-critic/checkers/hugeParam_checker.go
+++ b/vendor/github.com/go-critic/go-critic/checkers/hugeParam_checker.go
@@ -5,6 +5,8 @@ import (
 
 	"github.com/go-critic/go-critic/checkers/internal/astwalk"
 	"github.com/go-critic/go-critic/linter"
+
+	"github.com/go-toolsmith/astcast"
 )
 
 func init() {
@@ -39,12 +41,30 @@ type hugeParamChecker struct {
 func (c *hugeParamChecker) VisitFuncDecl(decl *ast.FuncDecl) {
 	// TODO(quasilyte): maybe it's worthwhile to permit skipping
 	// test files for this checker?
+	if c.isImplementStringer(decl) {
+		return
+	}
+
 	if decl.Recv != nil {
 		c.checkParams(decl.Recv.List)
 	}
 	c.checkParams(decl.Type.Params.List)
 }
 
+// isImplementStringer check method signature is: String() string.
+func (*hugeParamChecker) isImplementStringer(decl *ast.FuncDecl) bool {
+	if decl.Recv != nil &&
+		decl.Name.Name == "String" &&
+		decl.Type != nil &&
+		len(decl.Type.Params.List) == 0 &&
+		len(decl.Type.Results.List) == 1 &&
+		astcast.ToIdent(decl.Type.Results.List[0].Type).Name == "string" {
+		return true
+	}
+
+	return false
+}
+
 func (c *hugeParamChecker) checkParams(params []*ast.Field) {
 	for _, p := range params {
 		for _, id := range p.Names {
diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/astflow.go b/vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/astflow.go
index 63d181e5eb2b7ee94454d44991505188fadf5c54..f64907d69b8db114fceee8be58bd1ecb7e355087 100644
--- a/vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/astflow.go
+++ b/vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/astflow.go
@@ -18,7 +18,7 @@ import (
 //
 // If proven really useful, can be moved to go-toolsmith library.
 
-// IsImmutable reports whether n can be midified through any operation.
+// IsImmutable reports whether n can be modified through any operation.
 func IsImmutable(info *types.Info, n ast.Expr) bool {
 	if astp.IsBasicLit(n) {
 		return true
diff --git a/vendor/github.com/go-critic/go-critic/checkers/mapKey_checker.go b/vendor/github.com/go-critic/go-critic/checkers/mapKey_checker.go
index ebc61c12a489dc5e0880b22eef92a4b49fe4bab9..2885dc72547fdac60517606c1b55d312354e4442 100644
--- a/vendor/github.com/go-critic/go-critic/checkers/mapKey_checker.go
+++ b/vendor/github.com/go-critic/go-critic/checkers/mapKey_checker.go
@@ -117,7 +117,7 @@ func (c *mapKeyChecker) checkWhitespace(lit *ast.CompositeLit) {
 }
 
 func (c *mapKeyChecker) warnWhitespace(key ast.Node) {
-	c.ctx.Warn(key, "suspucious whitespace in %s key", key)
+	c.ctx.Warn(key, "suspicious whitespace in %s key", key)
 }
 
 func (c *mapKeyChecker) warnDupKey(key ast.Node) {
diff --git a/vendor/github.com/go-critic/go-critic/checkers/rulesdata/rulesdata.go b/vendor/github.com/go-critic/go-critic/checkers/rulesdata/rulesdata.go
index 503118c7ecbf2a92b10075f79be1cd6cc82aa635..4ab31076fc4317a79c392500f12aa4429bd0a027 100644
--- a/vendor/github.com/go-critic/go-critic/checkers/rulesdata/rulesdata.go
+++ b/vendor/github.com/go-critic/go-critic/checkers/rulesdata/rulesdata.go
@@ -363,7 +363,7 @@ var PrecompiledRules = &ir.File{
 			Line:        114,
 			Name:        "sloppyLen",
 			MatcherName: "m",
-			DocTags:     []string{"style"},
+			DocTags:     []string{"diagnostic"},
 			DocSummary:  "Detects usage of `len` when result is obvious or doesn't make sense",
 			DocBefore:   "len(arr) <= 0",
 			DocAfter:    "len(arr) == 0",
@@ -493,21 +493,45 @@ var PrecompiledRules = &ir.File{
 					},
 				},
 				{
-					Line:           164,
-					SyntaxPatterns: []ir.PatternString{{Line: 164, Value: "len($s) == 0"}},
+					Line:           163,
+					SyntaxPatterns: []ir.PatternString{{Line: 163, Value: "len($s) > 0"}},
+					ReportTemplate: "replace `$$` with `$s != \"\"`",
+					WhereExpr: ir.FilterExpr{
+						Line:  164,
+						Op:    ir.FilterVarTypeIsOp,
+						Src:   "m[\"s\"].Type.Is(`string`)",
+						Value: "s",
+						Args:  []ir.FilterExpr{{Line: 164, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}},
+					},
+				},
+				{
+					Line:           167,
+					SyntaxPatterns: []ir.PatternString{{Line: 167, Value: "len($s) == 0"}},
+					ReportTemplate: "replace `$$` with `$s == \"\"`",
+					WhereExpr: ir.FilterExpr{
+						Line:  168,
+						Op:    ir.FilterVarTypeIsOp,
+						Src:   "m[\"s\"].Type.Is(`string`)",
+						Value: "s",
+						Args:  []ir.FilterExpr{{Line: 168, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}},
+					},
+				},
+				{
+					Line:           170,
+					SyntaxPatterns: []ir.PatternString{{Line: 170, Value: "len($s) <= 0"}},
 					ReportTemplate: "replace `$$` with `$s == \"\"`",
 					WhereExpr: ir.FilterExpr{
-						Line:  165,
+						Line:  171,
 						Op:    ir.FilterVarTypeIsOp,
 						Src:   "m[\"s\"].Type.Is(`string`)",
 						Value: "s",
-						Args:  []ir.FilterExpr{{Line: 165, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}},
+						Args:  []ir.FilterExpr{{Line: 171, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}},
 					},
 				},
 			},
 		},
 		{
-			Line:        173,
+			Line:        179,
 			Name:        "stringXbytes",
 			MatcherName: "m",
 			DocTags:     []string{"performance"},
@@ -516,180 +540,180 @@ var PrecompiledRules = &ir.File{
 			DocAfter:    "copy(b, s)",
 			Rules: []ir.Rule{
 				{
-					Line:           174,
-					SyntaxPatterns: []ir.PatternString{{Line: 174, Value: "copy($_, []byte($s))"}},
+					Line:           180,
+					SyntaxPatterns: []ir.PatternString{{Line: 180, Value: "copy($_, []byte($s))"}},
 					ReportTemplate: "can simplify `[]byte($s)` to `$s`",
 				},
 				{
-					Line:            176,
-					SyntaxPatterns:  []ir.PatternString{{Line: 176, Value: "string($b) == \"\""}},
+					Line:            182,
+					SyntaxPatterns:  []ir.PatternString{{Line: 182, Value: "string($b) == \"\""}},
 					ReportTemplate:  "suggestion: len($b) == 0",
 					SuggestTemplate: "len($b) == 0",
 					WhereExpr: ir.FilterExpr{
-						Line:  176,
+						Line:  182,
 						Op:    ir.FilterVarTypeIsOp,
 						Src:   "m[\"b\"].Type.Is(`[]byte`)",
 						Value: "b",
-						Args:  []ir.FilterExpr{{Line: 176, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}},
+						Args:  []ir.FilterExpr{{Line: 182, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}},
 					},
 				},
 				{
-					Line:            177,
-					SyntaxPatterns:  []ir.PatternString{{Line: 177, Value: "string($b) != \"\""}},
+					Line:            183,
+					SyntaxPatterns:  []ir.PatternString{{Line: 183, Value: "string($b) != \"\""}},
 					ReportTemplate:  "suggestion: len($b) != 0",
 					SuggestTemplate: "len($b) != 0",
 					WhereExpr: ir.FilterExpr{
-						Line:  177,
+						Line:  183,
 						Op:    ir.FilterVarTypeIsOp,
 						Src:   "m[\"b\"].Type.Is(`[]byte`)",
 						Value: "b",
-						Args:  []ir.FilterExpr{{Line: 177, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}},
+						Args:  []ir.FilterExpr{{Line: 183, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}},
 					},
 				},
 				{
-					Line:            179,
-					SyntaxPatterns:  []ir.PatternString{{Line: 179, Value: "len(string($b))"}},
+					Line:            185,
+					SyntaxPatterns:  []ir.PatternString{{Line: 185, Value: "len(string($b))"}},
 					ReportTemplate:  "suggestion: len($b)",
 					SuggestTemplate: "len($b)",
 					WhereExpr: ir.FilterExpr{
-						Line:  179,
+						Line:  185,
 						Op:    ir.FilterVarTypeIsOp,
 						Src:   "m[\"b\"].Type.Is(`[]byte`)",
 						Value: "b",
-						Args:  []ir.FilterExpr{{Line: 179, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}},
+						Args:  []ir.FilterExpr{{Line: 185, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}},
 					},
 				},
 				{
-					Line:            181,
-					SyntaxPatterns:  []ir.PatternString{{Line: 181, Value: "string($x) == string($y)"}},
+					Line:            187,
+					SyntaxPatterns:  []ir.PatternString{{Line: 187, Value: "string($x) == string($y)"}},
 					ReportTemplate:  "suggestion: bytes.Equal($x, $y)",
 					SuggestTemplate: "bytes.Equal($x, $y)",
 					WhereExpr: ir.FilterExpr{
-						Line: 182,
+						Line: 188,
 						Op:   ir.FilterAndOp,
 						Src:  "m[\"x\"].Type.Is(`[]byte`) && m[\"y\"].Type.Is(`[]byte`)",
 						Args: []ir.FilterExpr{
 							{
-								Line:  182,
+								Line:  188,
 								Op:    ir.FilterVarTypeIsOp,
 								Src:   "m[\"x\"].Type.Is(`[]byte`)",
 								Value: "x",
-								Args:  []ir.FilterExpr{{Line: 182, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}},
+								Args:  []ir.FilterExpr{{Line: 188, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}},
 							},
 							{
-								Line:  182,
+								Line:  188,
 								Op:    ir.FilterVarTypeIsOp,
 								Src:   "m[\"y\"].Type.Is(`[]byte`)",
 								Value: "y",
-								Args:  []ir.FilterExpr{{Line: 182, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}},
+								Args:  []ir.FilterExpr{{Line: 188, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}},
 							},
 						},
 					},
 				},
 				{
-					Line:            185,
-					SyntaxPatterns:  []ir.PatternString{{Line: 185, Value: "string($x) != string($y)"}},
+					Line:            191,
+					SyntaxPatterns:  []ir.PatternString{{Line: 191, Value: "string($x) != string($y)"}},
 					ReportTemplate:  "suggestion: !bytes.Equal($x, $y)",
 					SuggestTemplate: "!bytes.Equal($x, $y)",
 					WhereExpr: ir.FilterExpr{
-						Line: 186,
+						Line: 192,
 						Op:   ir.FilterAndOp,
 						Src:  "m[\"x\"].Type.Is(`[]byte`) && m[\"y\"].Type.Is(`[]byte`)",
 						Args: []ir.FilterExpr{
 							{
-								Line:  186,
+								Line:  192,
 								Op:    ir.FilterVarTypeIsOp,
 								Src:   "m[\"x\"].Type.Is(`[]byte`)",
 								Value: "x",
-								Args:  []ir.FilterExpr{{Line: 186, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}},
+								Args:  []ir.FilterExpr{{Line: 192, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}},
 							},
 							{
-								Line:  186,
+								Line:  192,
 								Op:    ir.FilterVarTypeIsOp,
 								Src:   "m[\"y\"].Type.Is(`[]byte`)",
 								Value: "y",
-								Args:  []ir.FilterExpr{{Line: 186, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}},
+								Args:  []ir.FilterExpr{{Line: 192, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}},
 							},
 						},
 					},
 				},
 				{
-					Line:            189,
-					SyntaxPatterns:  []ir.PatternString{{Line: 189, Value: "$re.Match([]byte($s))"}},
+					Line:            195,
+					SyntaxPatterns:  []ir.PatternString{{Line: 195, Value: "$re.Match([]byte($s))"}},
 					ReportTemplate:  "suggestion: $re.MatchString($s)",
 					SuggestTemplate: "$re.MatchString($s)",
 					WhereExpr: ir.FilterExpr{
-						Line: 190,
+						Line: 196,
 						Op:   ir.FilterAndOp,
 						Src:  "m[\"re\"].Type.Is(`*regexp.Regexp`) && m[\"s\"].Type.Is(`string`)",
 						Args: []ir.FilterExpr{
 							{
-								Line:  190,
+								Line:  196,
 								Op:    ir.FilterVarTypeIsOp,
 								Src:   "m[\"re\"].Type.Is(`*regexp.Regexp`)",
 								Value: "re",
-								Args:  []ir.FilterExpr{{Line: 190, Op: ir.FilterStringOp, Src: "`*regexp.Regexp`", Value: "*regexp.Regexp"}},
+								Args:  []ir.FilterExpr{{Line: 196, Op: ir.FilterStringOp, Src: "`*regexp.Regexp`", Value: "*regexp.Regexp"}},
 							},
 							{
-								Line:  190,
+								Line:  196,
 								Op:    ir.FilterVarTypeIsOp,
 								Src:   "m[\"s\"].Type.Is(`string`)",
 								Value: "s",
-								Args:  []ir.FilterExpr{{Line: 190, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}},
+								Args:  []ir.FilterExpr{{Line: 196, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}},
 							},
 						},
 					},
 				},
 				{
-					Line:            193,
-					SyntaxPatterns:  []ir.PatternString{{Line: 193, Value: "$re.FindIndex([]byte($s))"}},
+					Line:            199,
+					SyntaxPatterns:  []ir.PatternString{{Line: 199, Value: "$re.FindIndex([]byte($s))"}},
 					ReportTemplate:  "suggestion: $re.FindStringIndex($s)",
 					SuggestTemplate: "$re.FindStringIndex($s)",
 					WhereExpr: ir.FilterExpr{
-						Line: 194,
+						Line: 200,
 						Op:   ir.FilterAndOp,
 						Src:  "m[\"re\"].Type.Is(`*regexp.Regexp`) && m[\"s\"].Type.Is(`string`)",
 						Args: []ir.FilterExpr{
 							{
-								Line:  194,
+								Line:  200,
 								Op:    ir.FilterVarTypeIsOp,
 								Src:   "m[\"re\"].Type.Is(`*regexp.Regexp`)",
 								Value: "re",
-								Args:  []ir.FilterExpr{{Line: 194, Op: ir.FilterStringOp, Src: "`*regexp.Regexp`", Value: "*regexp.Regexp"}},
+								Args:  []ir.FilterExpr{{Line: 200, Op: ir.FilterStringOp, Src: "`*regexp.Regexp`", Value: "*regexp.Regexp"}},
 							},
 							{
-								Line:  194,
+								Line:  200,
 								Op:    ir.FilterVarTypeIsOp,
 								Src:   "m[\"s\"].Type.Is(`string`)",
 								Value: "s",
-								Args:  []ir.FilterExpr{{Line: 194, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}},
+								Args:  []ir.FilterExpr{{Line: 200, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}},
 							},
 						},
 					},
 				},
 				{
-					Line:            197,
-					SyntaxPatterns:  []ir.PatternString{{Line: 197, Value: "$re.FindAllIndex([]byte($s), $n)"}},
+					Line:            203,
+					SyntaxPatterns:  []ir.PatternString{{Line: 203, Value: "$re.FindAllIndex([]byte($s), $n)"}},
 					ReportTemplate:  "suggestion: $re.FindAllStringIndex($s, $n)",
 					SuggestTemplate: "$re.FindAllStringIndex($s, $n)",
 					WhereExpr: ir.FilterExpr{
-						Line: 198,
+						Line: 204,
 						Op:   ir.FilterAndOp,
 						Src:  "m[\"re\"].Type.Is(`*regexp.Regexp`) && m[\"s\"].Type.Is(`string`)",
 						Args: []ir.FilterExpr{
 							{
-								Line:  198,
+								Line:  204,
 								Op:    ir.FilterVarTypeIsOp,
 								Src:   "m[\"re\"].Type.Is(`*regexp.Regexp`)",
 								Value: "re",
-								Args:  []ir.FilterExpr{{Line: 198, Op: ir.FilterStringOp, Src: "`*regexp.Regexp`", Value: "*regexp.Regexp"}},
+								Args:  []ir.FilterExpr{{Line: 204, Op: ir.FilterStringOp, Src: "`*regexp.Regexp`", Value: "*regexp.Regexp"}},
 							},
 							{
-								Line:  198,
+								Line:  204,
 								Op:    ir.FilterVarTypeIsOp,
 								Src:   "m[\"s\"].Type.Is(`string`)",
 								Value: "s",
-								Args:  []ir.FilterExpr{{Line: 198, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}},
+								Args:  []ir.FilterExpr{{Line: 204, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}},
 							},
 						},
 					},
@@ -697,7 +721,7 @@ var PrecompiledRules = &ir.File{
 			},
 		},
 		{
-			Line:        207,
+			Line:        213,
 			Name:        "indexAlloc",
 			MatcherName: "m",
 			DocTags:     []string{"performance"},
@@ -706,22 +730,22 @@ var PrecompiledRules = &ir.File{
 			DocAfter:    "bytes.Index(x, []byte(y))",
 			DocNote:     "See Go issue for details: https://github.com/golang/go/issues/25864",
 			Rules: []ir.Rule{{
-				Line:           208,
-				SyntaxPatterns: []ir.PatternString{{Line: 208, Value: "strings.Index(string($x), $y)"}},
+				Line:           214,
+				SyntaxPatterns: []ir.PatternString{{Line: 214, Value: "strings.Index(string($x), $y)"}},
 				ReportTemplate: "consider replacing $$ with bytes.Index($x, []byte($y))",
 				WhereExpr: ir.FilterExpr{
-					Line: 209,
+					Line: 215,
 					Op:   ir.FilterAndOp,
 					Src:  "m[\"x\"].Pure && m[\"y\"].Pure",
 					Args: []ir.FilterExpr{
-						{Line: 209, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
-						{Line: 209, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"},
+						{Line: 215, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
+						{Line: 215, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"},
 					},
 				},
 			}},
 		},
 		{
-			Line:        217,
+			Line:        223,
 			Name:        "wrapperFunc",
 			MatcherName: "m",
 			DocTags:     []string{"style"},
@@ -730,169 +754,169 @@ var PrecompiledRules = &ir.File{
 			DocAfter:    "wg.Done()",
 			Rules: []ir.Rule{
 				{
-					Line:           218,
-					SyntaxPatterns: []ir.PatternString{{Line: 218, Value: "$wg.Add(-1)"}},
+					Line:           224,
+					SyntaxPatterns: []ir.PatternString{{Line: 224, Value: "$wg.Add(-1)"}},
 					ReportTemplate: "use WaitGroup.Done method in `$$`",
 					WhereExpr: ir.FilterExpr{
-						Line:  219,
+						Line:  225,
 						Op:    ir.FilterVarTypeIsOp,
 						Src:   "m[\"wg\"].Type.Is(`sync.WaitGroup`)",
 						Value: "wg",
-						Args:  []ir.FilterExpr{{Line: 219, Op: ir.FilterStringOp, Src: "`sync.WaitGroup`", Value: "sync.WaitGroup"}},
+						Args:  []ir.FilterExpr{{Line: 225, Op: ir.FilterStringOp, Src: "`sync.WaitGroup`", Value: "sync.WaitGroup"}},
 					},
 				},
 				{
-					Line:           222,
-					SyntaxPatterns: []ir.PatternString{{Line: 222, Value: "$buf.Truncate(0)"}},
+					Line:           228,
+					SyntaxPatterns: []ir.PatternString{{Line: 228, Value: "$buf.Truncate(0)"}},
 					ReportTemplate: "use Buffer.Reset method in `$$`",
 					WhereExpr: ir.FilterExpr{
-						Line:  223,
+						Line:  229,
 						Op:    ir.FilterVarTypeIsOp,
 						Src:   "m[\"buf\"].Type.Is(`bytes.Buffer`)",
 						Value: "buf",
-						Args:  []ir.FilterExpr{{Line: 223, Op: ir.FilterStringOp, Src: "`bytes.Buffer`", Value: "bytes.Buffer"}},
+						Args:  []ir.FilterExpr{{Line: 229, Op: ir.FilterStringOp, Src: "`bytes.Buffer`", Value: "bytes.Buffer"}},
 					},
 				},
 				{
-					Line:           226,
-					SyntaxPatterns: []ir.PatternString{{Line: 226, Value: "http.HandlerFunc(http.NotFound)"}},
+					Line:           232,
+					SyntaxPatterns: []ir.PatternString{{Line: 232, Value: "http.HandlerFunc(http.NotFound)"}},
 					ReportTemplate: "use http.NotFoundHandler method in `$$`",
 				},
 				{
-					Line:           228,
-					SyntaxPatterns: []ir.PatternString{{Line: 228, Value: "strings.SplitN($_, $_, -1)"}},
+					Line:           234,
+					SyntaxPatterns: []ir.PatternString{{Line: 234, Value: "strings.SplitN($_, $_, -1)"}},
 					ReportTemplate: "use strings.Split method in `$$`",
 				},
 				{
-					Line:           229,
-					SyntaxPatterns: []ir.PatternString{{Line: 229, Value: "strings.Replace($_, $_, $_, -1)"}},
+					Line:           235,
+					SyntaxPatterns: []ir.PatternString{{Line: 235, Value: "strings.Replace($_, $_, $_, -1)"}},
 					ReportTemplate: "use strings.ReplaceAll method in `$$`",
 				},
 				{
-					Line:           230,
-					SyntaxPatterns: []ir.PatternString{{Line: 230, Value: "strings.Map(unicode.ToTitle, $_)"}},
+					Line:           236,
+					SyntaxPatterns: []ir.PatternString{{Line: 236, Value: "strings.Map(unicode.ToTitle, $_)"}},
 					ReportTemplate: "use strings.ToTitle method in `$$`",
 				},
 				{
-					Line: 231,
+					Line: 237,
 					SyntaxPatterns: []ir.PatternString{
-						{Line: 231, Value: "strings.Index($s1, $s2) >= 0"},
-						{Line: 231, Value: "strings.Index($s1, $s2) != -1"},
+						{Line: 237, Value: "strings.Index($s1, $s2) >= 0"},
+						{Line: 237, Value: "strings.Index($s1, $s2) != -1"},
 					},
 					ReportTemplate:  "suggestion: strings.Contains($s1, $s2)",
 					SuggestTemplate: "strings.Contains($s1, $s2)",
 				},
 				{
-					Line: 232,
+					Line: 238,
 					SyntaxPatterns: []ir.PatternString{
-						{Line: 232, Value: "strings.IndexAny($s1, $s2) >= 0"},
-						{Line: 232, Value: "strings.IndexAny($s1, $s2) != -1"},
+						{Line: 238, Value: "strings.IndexAny($s1, $s2) >= 0"},
+						{Line: 238, Value: "strings.IndexAny($s1, $s2) != -1"},
 					},
 					ReportTemplate:  "suggestion: strings.ContainsAny($s1, $s2)",
 					SuggestTemplate: "strings.ContainsAny($s1, $s2)",
 				},
 				{
-					Line: 233,
+					Line: 239,
 					SyntaxPatterns: []ir.PatternString{
-						{Line: 233, Value: "strings.IndexRune($s1, $s2) >= 0"},
-						{Line: 233, Value: "strings.IndexRune($s1, $s2) != -1"},
+						{Line: 239, Value: "strings.IndexRune($s1, $s2) >= 0"},
+						{Line: 239, Value: "strings.IndexRune($s1, $s2) != -1"},
 					},
 					ReportTemplate:  "suggestion: strings.ContainsRune($s1, $s2)",
 					SuggestTemplate: "strings.ContainsRune($s1, $s2)",
 				},
 				{
-					Line: 235,
+					Line: 241,
 					SyntaxPatterns: []ir.PatternString{
-						{Line: 235, Value: "$i := strings.Index($s, $sep); $*_; $x, $y = $s[:$i], $s[$i+1:]"},
-						{Line: 236, Value: "$i := strings.Index($s, $sep); $*_; $x = $s[:$i]; $*_; $y = $s[$i+1:]"},
+						{Line: 241, Value: "$i := strings.Index($s, $sep); $*_; $x, $y = $s[:$i], $s[$i+1:]"},
+						{Line: 242, Value: "$i := strings.Index($s, $sep); $*_; $x = $s[:$i]; $*_; $y = $s[$i+1:]"},
 					},
 					ReportTemplate:  "suggestion: $x, $y, _ = strings.Cut($s, $sep)",
 					SuggestTemplate: "$x, $y, _ = strings.Cut($s, $sep)",
 					WhereExpr: ir.FilterExpr{
-						Line:  237,
+						Line:  243,
 						Op:    ir.FilterGoVersionGreaterEqThanOp,
 						Src:   "m.GoVersion().GreaterEqThan(\"1.18\")",
 						Value: "1.18",
 					},
 				},
 				{
-					Line: 240,
+					Line: 246,
 					SyntaxPatterns: []ir.PatternString{
-						{Line: 241, Value: "if $i := strings.Index($s, $sep); $i != -1 { $*_; $x, $y = $s[:$i], $s[$i+1:]; $*_ }"},
-						{Line: 242, Value: "if $i := strings.Index($s, $sep); $i != -1 { $*_; $x = $s[:$i]; $*_; $y = $s[$i+1:]; $*_ }"},
-						{Line: 243, Value: "if $i := strings.Index($s, $sep); $i >= 0 { $*_; $x, $y = $s[:$i], $s[$i+1:]; $*_ }"},
-						{Line: 244, Value: "if $i := strings.Index($s, $sep); $i >= 0 { $*_; $x = $s[:$i]; $*_; $y = $s[$i+1:]; $*_ }"},
+						{Line: 247, Value: "if $i := strings.Index($s, $sep); $i != -1 { $*_; $x, $y = $s[:$i], $s[$i+1:]; $*_ }"},
+						{Line: 248, Value: "if $i := strings.Index($s, $sep); $i != -1 { $*_; $x = $s[:$i]; $*_; $y = $s[$i+1:]; $*_ }"},
+						{Line: 249, Value: "if $i := strings.Index($s, $sep); $i >= 0 { $*_; $x, $y = $s[:$i], $s[$i+1:]; $*_ }"},
+						{Line: 250, Value: "if $i := strings.Index($s, $sep); $i >= 0 { $*_; $x = $s[:$i]; $*_; $y = $s[$i+1:]; $*_ }"},
 					},
 					ReportTemplate:  "suggestion: if $x, $y, ok = strings.Cut($s, $sep); ok { ... }",
 					SuggestTemplate: "if $x, $y, ok = strings.Cut($s, $sep); ok { ... }",
 					WhereExpr: ir.FilterExpr{
-						Line:  245,
+						Line:  251,
 						Op:    ir.FilterGoVersionGreaterEqThanOp,
 						Src:   "m.GoVersion().GreaterEqThan(\"1.18\")",
 						Value: "1.18",
 					},
 				},
 				{
-					Line:           248,
-					SyntaxPatterns: []ir.PatternString{{Line: 248, Value: "bytes.SplitN(b, []byte(\".\"), -1)"}},
+					Line:           254,
+					SyntaxPatterns: []ir.PatternString{{Line: 254, Value: "bytes.SplitN(b, []byte(\".\"), -1)"}},
 					ReportTemplate: "use bytes.Split method in `$$`",
 				},
 				{
-					Line:           249,
-					SyntaxPatterns: []ir.PatternString{{Line: 249, Value: "bytes.Replace($_, $_, $_, -1)"}},
+					Line:           255,
+					SyntaxPatterns: []ir.PatternString{{Line: 255, Value: "bytes.Replace($_, $_, $_, -1)"}},
 					ReportTemplate: "use bytes.ReplaceAll method in `$$`",
 				},
 				{
-					Line:           250,
-					SyntaxPatterns: []ir.PatternString{{Line: 250, Value: "bytes.Map(unicode.ToUpper, $_)"}},
+					Line:           256,
+					SyntaxPatterns: []ir.PatternString{{Line: 256, Value: "bytes.Map(unicode.ToUpper, $_)"}},
 					ReportTemplate: "use bytes.ToUpper method in `$$`",
 				},
 				{
-					Line:           251,
-					SyntaxPatterns: []ir.PatternString{{Line: 251, Value: "bytes.Map(unicode.ToLower, $_)"}},
+					Line:           257,
+					SyntaxPatterns: []ir.PatternString{{Line: 257, Value: "bytes.Map(unicode.ToLower, $_)"}},
 					ReportTemplate: "use bytes.ToLower method in `$$`",
 				},
 				{
-					Line:           252,
-					SyntaxPatterns: []ir.PatternString{{Line: 252, Value: "bytes.Map(unicode.ToTitle, $_)"}},
+					Line:           258,
+					SyntaxPatterns: []ir.PatternString{{Line: 258, Value: "bytes.Map(unicode.ToTitle, $_)"}},
 					ReportTemplate: "use bytes.ToTitle method in `$$`",
 				},
 				{
-					Line: 253,
+					Line: 259,
 					SyntaxPatterns: []ir.PatternString{
-						{Line: 253, Value: "bytes.Index($b1, $b2) >= 0"},
-						{Line: 253, Value: "bytes.Index($b1, $b2) != -1"},
+						{Line: 259, Value: "bytes.Index($b1, $b2) >= 0"},
+						{Line: 259, Value: "bytes.Index($b1, $b2) != -1"},
 					},
 					ReportTemplate:  "suggestion: bytes.Contains($b1, $b2)",
 					SuggestTemplate: "bytes.Contains($b1, $b2)",
 				},
 				{
-					Line: 254,
+					Line: 260,
 					SyntaxPatterns: []ir.PatternString{
-						{Line: 254, Value: "bytes.IndexAny($b1, $b2) >= 0"},
-						{Line: 254, Value: "bytes.IndexAny($b1, $b2) != -1"},
+						{Line: 260, Value: "bytes.IndexAny($b1, $b2) >= 0"},
+						{Line: 260, Value: "bytes.IndexAny($b1, $b2) != -1"},
 					},
 					ReportTemplate:  "suggestion: bytes.ContainsAny($b1, $b2)",
 					SuggestTemplate: "bytes.ContainsAny($b1, $b2)",
 				},
 				{
-					Line: 255,
+					Line: 261,
 					SyntaxPatterns: []ir.PatternString{
-						{Line: 255, Value: "bytes.IndexRune($b1, $b2) >= 0"},
-						{Line: 255, Value: "bytes.IndexRune($b1, $b2) != -1"},
+						{Line: 261, Value: "bytes.IndexRune($b1, $b2) >= 0"},
+						{Line: 261, Value: "bytes.IndexRune($b1, $b2) != -1"},
 					},
 					ReportTemplate:  "suggestion: bytes.ContainsRune($b1, $b2)",
 					SuggestTemplate: "bytes.ContainsRune($b1, $b2)",
 				},
 				{
-					Line:           257,
-					SyntaxPatterns: []ir.PatternString{{Line: 257, Value: "draw.DrawMask($_, $_, $_, $_, nil, image.Point{}, $_)"}},
+					Line:           263,
+					SyntaxPatterns: []ir.PatternString{{Line: 263, Value: "draw.DrawMask($_, $_, $_, $_, nil, image.Point{}, $_)"}},
 					ReportTemplate: "use draw.Draw method in `$$`",
 				},
 			},
 		},
 		{
-			Line:        265,
+			Line:        271,
 			Name:        "regexpMust",
 			MatcherName: "m",
 			DocTags:     []string{"style"},
@@ -901,22 +925,22 @@ var PrecompiledRules = &ir.File{
 			DocAfter:    "re := regexp.MustCompile(\"const pattern\")",
 			Rules: []ir.Rule{
 				{
-					Line:           266,
-					SyntaxPatterns: []ir.PatternString{{Line: 266, Value: "regexp.Compile($pat)"}},
+					Line:           272,
+					SyntaxPatterns: []ir.PatternString{{Line: 272, Value: "regexp.Compile($pat)"}},
 					ReportTemplate: "for const patterns like $pat, use regexp.MustCompile",
 					WhereExpr: ir.FilterExpr{
-						Line:  267,
+						Line:  273,
 						Op:    ir.FilterVarConstOp,
 						Src:   "m[\"pat\"].Const",
 						Value: "pat",
 					},
 				},
 				{
-					Line:           270,
-					SyntaxPatterns: []ir.PatternString{{Line: 270, Value: "regexp.CompilePOSIX($pat)"}},
+					Line:           276,
+					SyntaxPatterns: []ir.PatternString{{Line: 276, Value: "regexp.CompilePOSIX($pat)"}},
 					ReportTemplate: "for const patterns like $pat, use regexp.MustCompilePOSIX",
 					WhereExpr: ir.FilterExpr{
-						Line:  271,
+						Line:  277,
 						Op:    ir.FilterVarConstOp,
 						Src:   "m[\"pat\"].Const",
 						Value: "pat",
@@ -925,7 +949,7 @@ var PrecompiledRules = &ir.File{
 			},
 		},
 		{
-			Line:        279,
+			Line:        285,
 			Name:        "badCall",
 			MatcherName: "m",
 			DocTags:     []string{"diagnostic"},
@@ -934,22 +958,22 @@ var PrecompiledRules = &ir.File{
 			DocAfter:    "strings.Replace(s, from, to, -1)",
 			Rules: []ir.Rule{
 				{
-					Line:           280,
-					SyntaxPatterns: []ir.PatternString{{Line: 280, Value: "strings.Replace($_, $_, $_, $zero)"}},
+					Line:           286,
+					SyntaxPatterns: []ir.PatternString{{Line: 286, Value: "strings.Replace($_, $_, $_, $zero)"}},
 					ReportTemplate: "suspicious arg 0, probably meant -1",
 					WhereExpr: ir.FilterExpr{
-						Line: 281,
+						Line: 287,
 						Op:   ir.FilterEqOp,
 						Src:  "m[\"zero\"].Value.Int() == 0",
 						Args: []ir.FilterExpr{
 							{
-								Line:  281,
+								Line:  287,
 								Op:    ir.FilterVarValueIntOp,
 								Src:   "m[\"zero\"].Value.Int()",
 								Value: "zero",
 							},
 							{
-								Line:  281,
+								Line:  287,
 								Op:    ir.FilterIntOp,
 								Src:   "0",
 								Value: int64(0),
@@ -959,22 +983,22 @@ var PrecompiledRules = &ir.File{
 					LocationVar: "zero",
 				},
 				{
-					Line:           283,
-					SyntaxPatterns: []ir.PatternString{{Line: 283, Value: "bytes.Replace($_, $_, $_, $zero)"}},
+					Line:           289,
+					SyntaxPatterns: []ir.PatternString{{Line: 289, Value: "bytes.Replace($_, $_, $_, $zero)"}},
 					ReportTemplate: "suspicious arg 0, probably meant -1",
 					WhereExpr: ir.FilterExpr{
-						Line: 284,
+						Line: 290,
 						Op:   ir.FilterEqOp,
 						Src:  "m[\"zero\"].Value.Int() == 0",
 						Args: []ir.FilterExpr{
 							{
-								Line:  284,
+								Line:  290,
 								Op:    ir.FilterVarValueIntOp,
 								Src:   "m[\"zero\"].Value.Int()",
 								Value: "zero",
 							},
 							{
-								Line:  284,
+								Line:  290,
 								Op:    ir.FilterIntOp,
 								Src:   "0",
 								Value: int64(0),
@@ -984,22 +1008,22 @@ var PrecompiledRules = &ir.File{
 					LocationVar: "zero",
 				},
 				{
-					Line:           287,
-					SyntaxPatterns: []ir.PatternString{{Line: 287, Value: "strings.SplitN($_, $_, $zero)"}},
+					Line:           293,
+					SyntaxPatterns: []ir.PatternString{{Line: 293, Value: "strings.SplitN($_, $_, $zero)"}},
 					ReportTemplate: "suspicious arg 0, probably meant -1",
 					WhereExpr: ir.FilterExpr{
-						Line: 288,
+						Line: 294,
 						Op:   ir.FilterEqOp,
 						Src:  "m[\"zero\"].Value.Int() == 0",
 						Args: []ir.FilterExpr{
 							{
-								Line:  288,
+								Line:  294,
 								Op:    ir.FilterVarValueIntOp,
 								Src:   "m[\"zero\"].Value.Int()",
 								Value: "zero",
 							},
 							{
-								Line:  288,
+								Line:  294,
 								Op:    ir.FilterIntOp,
 								Src:   "0",
 								Value: int64(0),
@@ -1009,22 +1033,22 @@ var PrecompiledRules = &ir.File{
 					LocationVar: "zero",
 				},
 				{
-					Line:           290,
-					SyntaxPatterns: []ir.PatternString{{Line: 290, Value: "bytes.SplitN($_, $_, $zero)"}},
+					Line:           296,
+					SyntaxPatterns: []ir.PatternString{{Line: 296, Value: "bytes.SplitN($_, $_, $zero)"}},
 					ReportTemplate: "suspicious arg 0, probably meant -1",
 					WhereExpr: ir.FilterExpr{
-						Line: 291,
+						Line: 297,
 						Op:   ir.FilterEqOp,
 						Src:  "m[\"zero\"].Value.Int() == 0",
 						Args: []ir.FilterExpr{
 							{
-								Line:  291,
+								Line:  297,
 								Op:    ir.FilterVarValueIntOp,
 								Src:   "m[\"zero\"].Value.Int()",
 								Value: "zero",
 							},
 							{
-								Line:  291,
+								Line:  297,
 								Op:    ir.FilterIntOp,
 								Src:   "0",
 								Value: int64(0),
@@ -1034,19 +1058,19 @@ var PrecompiledRules = &ir.File{
 					LocationVar: "zero",
 				},
 				{
-					Line:           294,
-					SyntaxPatterns: []ir.PatternString{{Line: 294, Value: "append($_)"}},
+					Line:           300,
+					SyntaxPatterns: []ir.PatternString{{Line: 300, Value: "append($_)"}},
 					ReportTemplate: "no-op append call, probably missing arguments",
 				},
 				{
-					Line:           296,
-					SyntaxPatterns: []ir.PatternString{{Line: 296, Value: "filepath.Join($_)"}},
+					Line:           302,
+					SyntaxPatterns: []ir.PatternString{{Line: 302, Value: "filepath.Join($_)"}},
 					ReportTemplate: "suspicious Join on 1 argument",
 				},
 			},
 		},
 		{
-			Line:        303,
+			Line:        309,
 			Name:        "assignOp",
 			MatcherName: "m",
 			DocTags:     []string{"style"},
@@ -1055,87 +1079,87 @@ var PrecompiledRules = &ir.File{
 			DocAfter:    "x *= 2",
 			Rules: []ir.Rule{
 				{
-					Line:           304,
-					SyntaxPatterns: []ir.PatternString{{Line: 304, Value: "$x = $x + 1"}},
+					Line:           310,
+					SyntaxPatterns: []ir.PatternString{{Line: 310, Value: "$x = $x + 1"}},
 					ReportTemplate: "replace `$$` with `$x++`",
-					WhereExpr:      ir.FilterExpr{Line: 304, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
+					WhereExpr:      ir.FilterExpr{Line: 310, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
 				},
 				{
-					Line:           305,
-					SyntaxPatterns: []ir.PatternString{{Line: 305, Value: "$x = $x - 1"}},
+					Line:           311,
+					SyntaxPatterns: []ir.PatternString{{Line: 311, Value: "$x = $x - 1"}},
 					ReportTemplate: "replace `$$` with `$x--`",
-					WhereExpr:      ir.FilterExpr{Line: 305, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
+					WhereExpr:      ir.FilterExpr{Line: 311, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
 				},
 				{
-					Line:           307,
-					SyntaxPatterns: []ir.PatternString{{Line: 307, Value: "$x = $x + $y"}},
+					Line:           313,
+					SyntaxPatterns: []ir.PatternString{{Line: 313, Value: "$x = $x + $y"}},
 					ReportTemplate: "replace `$$` with `$x += $y`",
-					WhereExpr:      ir.FilterExpr{Line: 307, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
+					WhereExpr:      ir.FilterExpr{Line: 313, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
 				},
 				{
-					Line:           308,
-					SyntaxPatterns: []ir.PatternString{{Line: 308, Value: "$x = $x - $y"}},
+					Line:           314,
+					SyntaxPatterns: []ir.PatternString{{Line: 314, Value: "$x = $x - $y"}},
 					ReportTemplate: "replace `$$` with `$x -= $y`",
-					WhereExpr:      ir.FilterExpr{Line: 308, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
+					WhereExpr:      ir.FilterExpr{Line: 314, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
 				},
 				{
-					Line:           310,
-					SyntaxPatterns: []ir.PatternString{{Line: 310, Value: "$x = $x * $y"}},
+					Line:           316,
+					SyntaxPatterns: []ir.PatternString{{Line: 316, Value: "$x = $x * $y"}},
 					ReportTemplate: "replace `$$` with `$x *= $y`",
-					WhereExpr:      ir.FilterExpr{Line: 310, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
+					WhereExpr:      ir.FilterExpr{Line: 316, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
 				},
 				{
-					Line:           311,
-					SyntaxPatterns: []ir.PatternString{{Line: 311, Value: "$x = $x / $y"}},
+					Line:           317,
+					SyntaxPatterns: []ir.PatternString{{Line: 317, Value: "$x = $x / $y"}},
 					ReportTemplate: "replace `$$` with `$x /= $y`",
-					WhereExpr:      ir.FilterExpr{Line: 311, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
+					WhereExpr:      ir.FilterExpr{Line: 317, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
 				},
 				{
-					Line:           312,
-					SyntaxPatterns: []ir.PatternString{{Line: 312, Value: "$x = $x % $y"}},
+					Line:           318,
+					SyntaxPatterns: []ir.PatternString{{Line: 318, Value: "$x = $x % $y"}},
 					ReportTemplate: "replace `$$` with `$x %= $y`",
-					WhereExpr:      ir.FilterExpr{Line: 312, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
+					WhereExpr:      ir.FilterExpr{Line: 318, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
 				},
 				{
-					Line:           313,
-					SyntaxPatterns: []ir.PatternString{{Line: 313, Value: "$x = $x & $y"}},
+					Line:           319,
+					SyntaxPatterns: []ir.PatternString{{Line: 319, Value: "$x = $x & $y"}},
 					ReportTemplate: "replace `$$` with `$x &= $y`",
-					WhereExpr:      ir.FilterExpr{Line: 313, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
+					WhereExpr:      ir.FilterExpr{Line: 319, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
 				},
 				{
-					Line:           314,
-					SyntaxPatterns: []ir.PatternString{{Line: 314, Value: "$x = $x | $y"}},
+					Line:           320,
+					SyntaxPatterns: []ir.PatternString{{Line: 320, Value: "$x = $x | $y"}},
 					ReportTemplate: "replace `$$` with `$x |= $y`",
-					WhereExpr:      ir.FilterExpr{Line: 314, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
+					WhereExpr:      ir.FilterExpr{Line: 320, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
 				},
 				{
-					Line:           315,
-					SyntaxPatterns: []ir.PatternString{{Line: 315, Value: "$x = $x ^ $y"}},
+					Line:           321,
+					SyntaxPatterns: []ir.PatternString{{Line: 321, Value: "$x = $x ^ $y"}},
 					ReportTemplate: "replace `$$` with `$x ^= $y`",
-					WhereExpr:      ir.FilterExpr{Line: 315, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
+					WhereExpr:      ir.FilterExpr{Line: 321, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
 				},
 				{
-					Line:           316,
-					SyntaxPatterns: []ir.PatternString{{Line: 316, Value: "$x = $x << $y"}},
+					Line:           322,
+					SyntaxPatterns: []ir.PatternString{{Line: 322, Value: "$x = $x << $y"}},
 					ReportTemplate: "replace `$$` with `$x <<= $y`",
-					WhereExpr:      ir.FilterExpr{Line: 316, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
+					WhereExpr:      ir.FilterExpr{Line: 322, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
 				},
 				{
-					Line:           317,
-					SyntaxPatterns: []ir.PatternString{{Line: 317, Value: "$x = $x >> $y"}},
+					Line:           323,
+					SyntaxPatterns: []ir.PatternString{{Line: 323, Value: "$x = $x >> $y"}},
 					ReportTemplate: "replace `$$` with `$x >>= $y`",
-					WhereExpr:      ir.FilterExpr{Line: 317, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
+					WhereExpr:      ir.FilterExpr{Line: 323, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
 				},
 				{
-					Line:           318,
-					SyntaxPatterns: []ir.PatternString{{Line: 318, Value: "$x = $x &^ $y"}},
+					Line:           324,
+					SyntaxPatterns: []ir.PatternString{{Line: 324, Value: "$x = $x &^ $y"}},
 					ReportTemplate: "replace `$$` with `$x &^= $y`",
-					WhereExpr:      ir.FilterExpr{Line: 318, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
+					WhereExpr:      ir.FilterExpr{Line: 324, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
 				},
 			},
 		},
 		{
-			Line:        325,
+			Line:        331,
 			Name:        "preferWriteByte",
 			MatcherName: "m",
 			DocTags:     []string{"performance", "experimental", "opinionated"},
@@ -1143,45 +1167,45 @@ var PrecompiledRules = &ir.File{
 			DocBefore:   "w.WriteRune('\\n')",
 			DocAfter:    "w.WriteByte('\\n')",
 			Rules: []ir.Rule{{
-				Line:           329,
-				SyntaxPatterns: []ir.PatternString{{Line: 329, Value: "$w.WriteRune($c)"}},
+				Line:           335,
+				SyntaxPatterns: []ir.PatternString{{Line: 335, Value: "$w.WriteRune($c)"}},
 				ReportTemplate: "consider writing single byte rune $c with $w.WriteByte($c)",
 				WhereExpr: ir.FilterExpr{
-					Line: 330,
+					Line: 336,
 					Op:   ir.FilterAndOp,
 					Src:  "m[\"w\"].Type.Implements(\"io.ByteWriter\") && (m[\"c\"].Const && m[\"c\"].Value.Int() < runeSelf)",
 					Args: []ir.FilterExpr{
 						{
-							Line:  330,
+							Line:  336,
 							Op:    ir.FilterVarTypeImplementsOp,
 							Src:   "m[\"w\"].Type.Implements(\"io.ByteWriter\")",
 							Value: "w",
-							Args:  []ir.FilterExpr{{Line: 330, Op: ir.FilterStringOp, Src: "\"io.ByteWriter\"", Value: "io.ByteWriter"}},
+							Args:  []ir.FilterExpr{{Line: 336, Op: ir.FilterStringOp, Src: "\"io.ByteWriter\"", Value: "io.ByteWriter"}},
 						},
 						{
-							Line: 330,
+							Line: 336,
 							Op:   ir.FilterAndOp,
 							Src:  "(m[\"c\"].Const && m[\"c\"].Value.Int() < runeSelf)",
 							Args: []ir.FilterExpr{
 								{
-									Line:  330,
+									Line:  336,
 									Op:    ir.FilterVarConstOp,
 									Src:   "m[\"c\"].Const",
 									Value: "c",
 								},
 								{
-									Line: 330,
+									Line: 336,
 									Op:   ir.FilterLtOp,
 									Src:  "m[\"c\"].Value.Int() < runeSelf",
 									Args: []ir.FilterExpr{
 										{
-											Line:  330,
+											Line:  336,
 											Op:    ir.FilterVarValueIntOp,
 											Src:   "m[\"c\"].Value.Int()",
 											Value: "c",
 										},
 										{
-											Line:  330,
+											Line:  336,
 											Op:    ir.FilterIntOp,
 											Src:   "runeSelf",
 											Value: int64(128),
@@ -1195,7 +1219,7 @@ var PrecompiledRules = &ir.File{
 			}},
 		},
 		{
-			Line:        338,
+			Line:        344,
 			Name:        "preferFprint",
 			MatcherName: "m",
 			DocTags:     []string{"performance", "experimental"},
@@ -1204,139 +1228,139 @@ var PrecompiledRules = &ir.File{
 			DocAfter:    "fmt.Fprintf(w, \"%x\", 10)",
 			Rules: []ir.Rule{
 				{
-					Line:            339,
-					SyntaxPatterns:  []ir.PatternString{{Line: 339, Value: "$w.Write([]byte(fmt.Sprint($*args)))"}},
+					Line:            345,
+					SyntaxPatterns:  []ir.PatternString{{Line: 345, Value: "$w.Write([]byte(fmt.Sprint($*args)))"}},
 					ReportTemplate:  "fmt.Fprint($w, $args) should be preferred to the $$",
 					SuggestTemplate: "fmt.Fprint($w, $args)",
 					WhereExpr: ir.FilterExpr{
-						Line:  340,
+						Line:  346,
 						Op:    ir.FilterVarTypeImplementsOp,
 						Src:   "m[\"w\"].Type.Implements(\"io.Writer\")",
 						Value: "w",
-						Args:  []ir.FilterExpr{{Line: 340, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}},
+						Args:  []ir.FilterExpr{{Line: 346, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}},
 					},
 				},
 				{
-					Line:            344,
-					SyntaxPatterns:  []ir.PatternString{{Line: 344, Value: "$w.Write([]byte(fmt.Sprintf($*args)))"}},
+					Line:            350,
+					SyntaxPatterns:  []ir.PatternString{{Line: 350, Value: "$w.Write([]byte(fmt.Sprintf($*args)))"}},
 					ReportTemplate:  "fmt.Fprintf($w, $args) should be preferred to the $$",
 					SuggestTemplate: "fmt.Fprintf($w, $args)",
 					WhereExpr: ir.FilterExpr{
-						Line:  345,
+						Line:  351,
 						Op:    ir.FilterVarTypeImplementsOp,
 						Src:   "m[\"w\"].Type.Implements(\"io.Writer\")",
 						Value: "w",
-						Args:  []ir.FilterExpr{{Line: 345, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}},
+						Args:  []ir.FilterExpr{{Line: 351, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}},
 					},
 				},
 				{
-					Line:            349,
-					SyntaxPatterns:  []ir.PatternString{{Line: 349, Value: "$w.Write([]byte(fmt.Sprintln($*args)))"}},
+					Line:            355,
+					SyntaxPatterns:  []ir.PatternString{{Line: 355, Value: "$w.Write([]byte(fmt.Sprintln($*args)))"}},
 					ReportTemplate:  "fmt.Fprintln($w, $args) should be preferred to the $$",
 					SuggestTemplate: "fmt.Fprintln($w, $args)",
 					WhereExpr: ir.FilterExpr{
-						Line:  350,
+						Line:  356,
 						Op:    ir.FilterVarTypeImplementsOp,
 						Src:   "m[\"w\"].Type.Implements(\"io.Writer\")",
 						Value: "w",
-						Args:  []ir.FilterExpr{{Line: 350, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}},
+						Args:  []ir.FilterExpr{{Line: 356, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}},
 					},
 				},
 				{
-					Line:            354,
-					SyntaxPatterns:  []ir.PatternString{{Line: 354, Value: "io.WriteString($w, fmt.Sprint($*args))"}},
+					Line:            360,
+					SyntaxPatterns:  []ir.PatternString{{Line: 360, Value: "io.WriteString($w, fmt.Sprint($*args))"}},
 					ReportTemplate:  "suggestion: fmt.Fprint($w, $args)",
 					SuggestTemplate: "fmt.Fprint($w, $args)",
 				},
 				{
-					Line:            355,
-					SyntaxPatterns:  []ir.PatternString{{Line: 355, Value: "io.WriteString($w, fmt.Sprintf($*args))"}},
+					Line:            361,
+					SyntaxPatterns:  []ir.PatternString{{Line: 361, Value: "io.WriteString($w, fmt.Sprintf($*args))"}},
 					ReportTemplate:  "suggestion: fmt.Fprintf($w, $args)",
 					SuggestTemplate: "fmt.Fprintf($w, $args)",
 				},
 				{
-					Line:            356,
-					SyntaxPatterns:  []ir.PatternString{{Line: 356, Value: "io.WriteString($w, fmt.Sprintln($*args))"}},
+					Line:            362,
+					SyntaxPatterns:  []ir.PatternString{{Line: 362, Value: "io.WriteString($w, fmt.Sprintln($*args))"}},
 					ReportTemplate:  "suggestion: fmt.Fprintln($w, $args)",
 					SuggestTemplate: "fmt.Fprintln($w, $args)",
 				},
 				{
-					Line:            358,
-					SyntaxPatterns:  []ir.PatternString{{Line: 358, Value: "$w.WriteString(fmt.Sprint($*args))"}},
+					Line:            364,
+					SyntaxPatterns:  []ir.PatternString{{Line: 364, Value: "$w.WriteString(fmt.Sprint($*args))"}},
 					ReportTemplate:  "suggestion: fmt.Fprint($w, $args)",
 					SuggestTemplate: "fmt.Fprint($w, $args)",
 					WhereExpr: ir.FilterExpr{
-						Line: 359,
+						Line: 365,
 						Op:   ir.FilterAndOp,
 						Src:  "m[\"w\"].Type.Implements(\"io.Writer\") && m[\"w\"].Type.Implements(\"io.StringWriter\")",
 						Args: []ir.FilterExpr{
 							{
-								Line:  359,
+								Line:  365,
 								Op:    ir.FilterVarTypeImplementsOp,
 								Src:   "m[\"w\"].Type.Implements(\"io.Writer\")",
 								Value: "w",
-								Args:  []ir.FilterExpr{{Line: 359, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}},
+								Args:  []ir.FilterExpr{{Line: 365, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}},
 							},
 							{
-								Line:  359,
+								Line:  365,
 								Op:    ir.FilterVarTypeImplementsOp,
 								Src:   "m[\"w\"].Type.Implements(\"io.StringWriter\")",
 								Value: "w",
-								Args:  []ir.FilterExpr{{Line: 359, Op: ir.FilterStringOp, Src: "\"io.StringWriter\"", Value: "io.StringWriter"}},
+								Args:  []ir.FilterExpr{{Line: 365, Op: ir.FilterStringOp, Src: "\"io.StringWriter\"", Value: "io.StringWriter"}},
 							},
 						},
 					},
 				},
 				{
-					Line:            361,
-					SyntaxPatterns:  []ir.PatternString{{Line: 361, Value: "$w.WriteString(fmt.Sprintf($*args))"}},
+					Line:            367,
+					SyntaxPatterns:  []ir.PatternString{{Line: 367, Value: "$w.WriteString(fmt.Sprintf($*args))"}},
 					ReportTemplate:  "suggestion: fmt.Fprintf($w, $args)",
 					SuggestTemplate: "fmt.Fprintf($w, $args)",
 					WhereExpr: ir.FilterExpr{
-						Line: 362,
+						Line: 368,
 						Op:   ir.FilterAndOp,
 						Src:  "m[\"w\"].Type.Implements(\"io.Writer\") && m[\"w\"].Type.Implements(\"io.StringWriter\")",
 						Args: []ir.FilterExpr{
 							{
-								Line:  362,
+								Line:  368,
 								Op:    ir.FilterVarTypeImplementsOp,
 								Src:   "m[\"w\"].Type.Implements(\"io.Writer\")",
 								Value: "w",
-								Args:  []ir.FilterExpr{{Line: 362, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}},
+								Args:  []ir.FilterExpr{{Line: 368, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}},
 							},
 							{
-								Line:  362,
+								Line:  368,
 								Op:    ir.FilterVarTypeImplementsOp,
 								Src:   "m[\"w\"].Type.Implements(\"io.StringWriter\")",
 								Value: "w",
-								Args:  []ir.FilterExpr{{Line: 362, Op: ir.FilterStringOp, Src: "\"io.StringWriter\"", Value: "io.StringWriter"}},
+								Args:  []ir.FilterExpr{{Line: 368, Op: ir.FilterStringOp, Src: "\"io.StringWriter\"", Value: "io.StringWriter"}},
 							},
 						},
 					},
 				},
 				{
-					Line:            364,
-					SyntaxPatterns:  []ir.PatternString{{Line: 364, Value: "$w.WriteString(fmt.Sprintln($*args))"}},
+					Line:            370,
+					SyntaxPatterns:  []ir.PatternString{{Line: 370, Value: "$w.WriteString(fmt.Sprintln($*args))"}},
 					ReportTemplate:  "suggestion: fmt.Fprintln($w, $args)",
 					SuggestTemplate: "fmt.Fprintln($w, $args)",
 					WhereExpr: ir.FilterExpr{
-						Line: 365,
+						Line: 371,
 						Op:   ir.FilterAndOp,
 						Src:  "m[\"w\"].Type.Implements(\"io.Writer\") && m[\"w\"].Type.Implements(\"io.StringWriter\")",
 						Args: []ir.FilterExpr{
 							{
-								Line:  365,
+								Line:  371,
 								Op:    ir.FilterVarTypeImplementsOp,
 								Src:   "m[\"w\"].Type.Implements(\"io.Writer\")",
 								Value: "w",
-								Args:  []ir.FilterExpr{{Line: 365, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}},
+								Args:  []ir.FilterExpr{{Line: 371, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}},
 							},
 							{
-								Line:  365,
+								Line:  371,
 								Op:    ir.FilterVarTypeImplementsOp,
 								Src:   "m[\"w\"].Type.Implements(\"io.StringWriter\")",
 								Value: "w",
-								Args:  []ir.FilterExpr{{Line: 365, Op: ir.FilterStringOp, Src: "\"io.StringWriter\"", Value: "io.StringWriter"}},
+								Args:  []ir.FilterExpr{{Line: 371, Op: ir.FilterStringOp, Src: "\"io.StringWriter\"", Value: "io.StringWriter"}},
 							},
 						},
 					},
@@ -1344,7 +1368,7 @@ var PrecompiledRules = &ir.File{
 			},
 		},
 		{
-			Line:        373,
+			Line:        379,
 			Name:        "dupArg",
 			MatcherName: "m",
 			DocTags:     []string{"diagnostic"},
@@ -1353,62 +1377,62 @@ var PrecompiledRules = &ir.File{
 			DocAfter:    "copy(dst, src)",
 			Rules: []ir.Rule{
 				{
-					Line: 374,
+					Line: 380,
 					SyntaxPatterns: []ir.PatternString{
-						{Line: 374, Value: "$x.Equal($x)"},
-						{Line: 374, Value: "$x.Equals($x)"},
-						{Line: 374, Value: "$x.Compare($x)"},
-						{Line: 374, Value: "$x.Cmp($x)"},
+						{Line: 380, Value: "$x.Equal($x)"},
+						{Line: 380, Value: "$x.Equals($x)"},
+						{Line: 380, Value: "$x.Compare($x)"},
+						{Line: 380, Value: "$x.Cmp($x)"},
 					},
 					ReportTemplate: "suspicious method call with the same argument and receiver",
-					WhereExpr:      ir.FilterExpr{Line: 375, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
+					WhereExpr:      ir.FilterExpr{Line: 381, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
 				},
 				{
-					Line: 378,
+					Line: 384,
 					SyntaxPatterns: []ir.PatternString{
-						{Line: 378, Value: "copy($x, $x)"},
-						{Line: 379, Value: "math.Max($x, $x)"},
-						{Line: 380, Value: "math.Min($x, $x)"},
-						{Line: 381, Value: "reflect.Copy($x, $x)"},
-						{Line: 382, Value: "reflect.DeepEqual($x, $x)"},
-						{Line: 383, Value: "strings.Contains($x, $x)"},
-						{Line: 384, Value: "strings.Compare($x, $x)"},
-						{Line: 385, Value: "strings.EqualFold($x, $x)"},
-						{Line: 386, Value: "strings.HasPrefix($x, $x)"},
-						{Line: 387, Value: "strings.HasSuffix($x, $x)"},
-						{Line: 388, Value: "strings.Index($x, $x)"},
-						{Line: 389, Value: "strings.LastIndex($x, $x)"},
-						{Line: 390, Value: "strings.Split($x, $x)"},
-						{Line: 391, Value: "strings.SplitAfter($x, $x)"},
-						{Line: 392, Value: "strings.SplitAfterN($x, $x, $_)"},
-						{Line: 393, Value: "strings.SplitN($x, $x, $_)"},
-						{Line: 394, Value: "strings.Replace($_, $x, $x, $_)"},
-						{Line: 395, Value: "strings.ReplaceAll($_, $x, $x)"},
-						{Line: 396, Value: "bytes.Contains($x, $x)"},
-						{Line: 397, Value: "bytes.Compare($x, $x)"},
-						{Line: 398, Value: "bytes.Equal($x, $x)"},
-						{Line: 399, Value: "bytes.EqualFold($x, $x)"},
-						{Line: 400, Value: "bytes.HasPrefix($x, $x)"},
-						{Line: 401, Value: "bytes.HasSuffix($x, $x)"},
-						{Line: 402, Value: "bytes.Index($x, $x)"},
-						{Line: 403, Value: "bytes.LastIndex($x, $x)"},
-						{Line: 404, Value: "bytes.Split($x, $x)"},
-						{Line: 405, Value: "bytes.SplitAfter($x, $x)"},
-						{Line: 406, Value: "bytes.SplitAfterN($x, $x, $_)"},
-						{Line: 407, Value: "bytes.SplitN($x, $x, $_)"},
-						{Line: 408, Value: "bytes.Replace($_, $x, $x, $_)"},
-						{Line: 409, Value: "bytes.ReplaceAll($_, $x, $x)"},
-						{Line: 410, Value: "types.Identical($x, $x)"},
-						{Line: 411, Value: "types.IdenticalIgnoreTags($x, $x)"},
-						{Line: 412, Value: "draw.Draw($x, $_, $x, $_, $_)"},
+						{Line: 384, Value: "copy($x, $x)"},
+						{Line: 385, Value: "math.Max($x, $x)"},
+						{Line: 386, Value: "math.Min($x, $x)"},
+						{Line: 387, Value: "reflect.Copy($x, $x)"},
+						{Line: 388, Value: "reflect.DeepEqual($x, $x)"},
+						{Line: 389, Value: "strings.Contains($x, $x)"},
+						{Line: 390, Value: "strings.Compare($x, $x)"},
+						{Line: 391, Value: "strings.EqualFold($x, $x)"},
+						{Line: 392, Value: "strings.HasPrefix($x, $x)"},
+						{Line: 393, Value: "strings.HasSuffix($x, $x)"},
+						{Line: 394, Value: "strings.Index($x, $x)"},
+						{Line: 395, Value: "strings.LastIndex($x, $x)"},
+						{Line: 396, Value: "strings.Split($x, $x)"},
+						{Line: 397, Value: "strings.SplitAfter($x, $x)"},
+						{Line: 398, Value: "strings.SplitAfterN($x, $x, $_)"},
+						{Line: 399, Value: "strings.SplitN($x, $x, $_)"},
+						{Line: 400, Value: "strings.Replace($_, $x, $x, $_)"},
+						{Line: 401, Value: "strings.ReplaceAll($_, $x, $x)"},
+						{Line: 402, Value: "bytes.Contains($x, $x)"},
+						{Line: 403, Value: "bytes.Compare($x, $x)"},
+						{Line: 404, Value: "bytes.Equal($x, $x)"},
+						{Line: 405, Value: "bytes.EqualFold($x, $x)"},
+						{Line: 406, Value: "bytes.HasPrefix($x, $x)"},
+						{Line: 407, Value: "bytes.HasSuffix($x, $x)"},
+						{Line: 408, Value: "bytes.Index($x, $x)"},
+						{Line: 409, Value: "bytes.LastIndex($x, $x)"},
+						{Line: 410, Value: "bytes.Split($x, $x)"},
+						{Line: 411, Value: "bytes.SplitAfter($x, $x)"},
+						{Line: 412, Value: "bytes.SplitAfterN($x, $x, $_)"},
+						{Line: 413, Value: "bytes.SplitN($x, $x, $_)"},
+						{Line: 414, Value: "bytes.Replace($_, $x, $x, $_)"},
+						{Line: 415, Value: "bytes.ReplaceAll($_, $x, $x)"},
+						{Line: 416, Value: "types.Identical($x, $x)"},
+						{Line: 417, Value: "types.IdenticalIgnoreTags($x, $x)"},
+						{Line: 418, Value: "draw.Draw($x, $_, $x, $_, $_)"},
 					},
 					ReportTemplate: "suspicious duplicated args in $$",
-					WhereExpr:      ir.FilterExpr{Line: 413, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
+					WhereExpr:      ir.FilterExpr{Line: 419, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
 				},
 			},
 		},
 		{
-			Line:        421,
+			Line:        427,
 			Name:        "returnAfterHttpError",
 			MatcherName: "m",
 			DocTags:     []string{"diagnostic", "experimental"},
@@ -1416,14 +1440,14 @@ var PrecompiledRules = &ir.File{
 			DocBefore:   "if err != nil { http.Error(...); }",
 			DocAfter:    "if err != nil { http.Error(...); return; }",
 			Rules: []ir.Rule{{
-				Line:           422,
-				SyntaxPatterns: []ir.PatternString{{Line: 422, Value: "if $_ { $*_; http.Error($w, $err, $code) }"}},
+				Line:           428,
+				SyntaxPatterns: []ir.PatternString{{Line: 428, Value: "if $_ { $*_; http.Error($w, $err, $code) }"}},
 				ReportTemplate: "Possibly return is missed after the http.Error call",
 				LocationVar:    "w",
 			}},
 		},
 		{
-			Line:        431,
+			Line:        437,
 			Name:        "preferFilepathJoin",
 			MatcherName: "m",
 			DocTags:     []string{"style", "experimental"},
@@ -1431,35 +1455,35 @@ var PrecompiledRules = &ir.File{
 			DocBefore:   "x + string(os.PathSeparator) + y",
 			DocAfter:    "filepath.Join(x, y)",
 			Rules: []ir.Rule{{
-				Line:            432,
-				SyntaxPatterns:  []ir.PatternString{{Line: 432, Value: "$x + string(os.PathSeparator) + $y"}},
+				Line:            438,
+				SyntaxPatterns:  []ir.PatternString{{Line: 438, Value: "$x + string(os.PathSeparator) + $y"}},
 				ReportTemplate:  "filepath.Join($x, $y) should be preferred to the $$",
 				SuggestTemplate: "filepath.Join($x, $y)",
 				WhereExpr: ir.FilterExpr{
-					Line: 433,
+					Line: 439,
 					Op:   ir.FilterAndOp,
 					Src:  "m[\"x\"].Type.Is(`string`) && m[\"y\"].Type.Is(`string`)",
 					Args: []ir.FilterExpr{
 						{
-							Line:  433,
+							Line:  439,
 							Op:    ir.FilterVarTypeIsOp,
 							Src:   "m[\"x\"].Type.Is(`string`)",
 							Value: "x",
-							Args:  []ir.FilterExpr{{Line: 433, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}},
+							Args:  []ir.FilterExpr{{Line: 439, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}},
 						},
 						{
-							Line:  433,
+							Line:  439,
 							Op:    ir.FilterVarTypeIsOp,
 							Src:   "m[\"y\"].Type.Is(`string`)",
 							Value: "y",
-							Args:  []ir.FilterExpr{{Line: 433, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}},
+							Args:  []ir.FilterExpr{{Line: 439, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}},
 						},
 					},
 				},
 			}},
 		},
 		{
-			Line:        442,
+			Line:        448,
 			Name:        "preferStringWriter",
 			MatcherName: "m",
 			DocTags:     []string{"performance", "experimental"},
@@ -1468,35 +1492,35 @@ var PrecompiledRules = &ir.File{
 			DocAfter:    "w.WriteString(\"foo\")",
 			Rules: []ir.Rule{
 				{
-					Line:            443,
-					SyntaxPatterns:  []ir.PatternString{{Line: 443, Value: "$w.Write([]byte($s))"}},
+					Line:            449,
+					SyntaxPatterns:  []ir.PatternString{{Line: 449, Value: "$w.Write([]byte($s))"}},
 					ReportTemplate:  "$w.WriteString($s) should be preferred to the $$",
 					SuggestTemplate: "$w.WriteString($s)",
 					WhereExpr: ir.FilterExpr{
-						Line:  444,
+						Line:  450,
 						Op:    ir.FilterVarTypeImplementsOp,
 						Src:   "m[\"w\"].Type.Implements(\"io.StringWriter\")",
 						Value: "w",
-						Args:  []ir.FilterExpr{{Line: 444, Op: ir.FilterStringOp, Src: "\"io.StringWriter\"", Value: "io.StringWriter"}},
+						Args:  []ir.FilterExpr{{Line: 450, Op: ir.FilterStringOp, Src: "\"io.StringWriter\"", Value: "io.StringWriter"}},
 					},
 				},
 				{
-					Line:            448,
-					SyntaxPatterns:  []ir.PatternString{{Line: 448, Value: "io.WriteString($w, $s)"}},
+					Line:            454,
+					SyntaxPatterns:  []ir.PatternString{{Line: 454, Value: "io.WriteString($w, $s)"}},
 					ReportTemplate:  "$w.WriteString($s) should be preferred to the $$",
 					SuggestTemplate: "$w.WriteString($s)",
 					WhereExpr: ir.FilterExpr{
-						Line:  449,
+						Line:  455,
 						Op:    ir.FilterVarTypeImplementsOp,
 						Src:   "m[\"w\"].Type.Implements(\"io.StringWriter\")",
 						Value: "w",
-						Args:  []ir.FilterExpr{{Line: 449, Op: ir.FilterStringOp, Src: "\"io.StringWriter\"", Value: "io.StringWriter"}},
+						Args:  []ir.FilterExpr{{Line: 455, Op: ir.FilterStringOp, Src: "\"io.StringWriter\"", Value: "io.StringWriter"}},
 					},
 				},
 			},
 		},
 		{
-			Line:        458,
+			Line:        464,
 			Name:        "sliceClear",
 			MatcherName: "m",
 			DocTags:     []string{"performance", "experimental"},
@@ -1504,22 +1528,22 @@ var PrecompiledRules = &ir.File{
 			DocBefore:   "for i := 0; i < len(buf); i++ { buf[i] = 0 }",
 			DocAfter:    "for i := range buf { buf[i] = 0 }",
 			Rules: []ir.Rule{{
-				Line:           459,
-				SyntaxPatterns: []ir.PatternString{{Line: 459, Value: "for $i := 0; $i < len($xs); $i++ { $xs[$i] = $zero }"}},
+				Line:           465,
+				SyntaxPatterns: []ir.PatternString{{Line: 465, Value: "for $i := 0; $i < len($xs); $i++ { $xs[$i] = $zero }"}},
 				ReportTemplate: "rewrite as for-range so compiler can recognize this pattern",
 				WhereExpr: ir.FilterExpr{
-					Line: 460,
+					Line: 466,
 					Op:   ir.FilterEqOp,
 					Src:  "m[\"zero\"].Value.Int() == 0",
 					Args: []ir.FilterExpr{
 						{
-							Line:  460,
+							Line:  466,
 							Op:    ir.FilterVarValueIntOp,
 							Src:   "m[\"zero\"].Value.Int()",
 							Value: "zero",
 						},
 						{
-							Line:  460,
+							Line:  466,
 							Op:    ir.FilterIntOp,
 							Src:   "0",
 							Value: int64(0),
@@ -1529,7 +1553,7 @@ var PrecompiledRules = &ir.File{
 			}},
 		},
 		{
-			Line:        468,
+			Line:        474,
 			Name:        "syncMapLoadAndDelete",
 			MatcherName: "m",
 			DocTags:     []string{"diagnostic", "experimental"},
@@ -1537,33 +1561,33 @@ var PrecompiledRules = &ir.File{
 			DocBefore:   "v, ok := m.Load(k); if ok { m.Delete($k); f(v); }",
 			DocAfter:    "v, deleted := m.LoadAndDelete(k); if deleted { f(v) }",
 			Rules: []ir.Rule{{
-				Line:           469,
-				SyntaxPatterns: []ir.PatternString{{Line: 469, Value: "$_, $ok := $m.Load($k); if $ok { $m.Delete($k); $*_ }"}},
+				Line:           475,
+				SyntaxPatterns: []ir.PatternString{{Line: 475, Value: "$_, $ok := $m.Load($k); if $ok { $m.Delete($k); $*_ }"}},
 				ReportTemplate: "use $m.LoadAndDelete to perform load+delete operations atomically",
 				WhereExpr: ir.FilterExpr{
-					Line: 470,
+					Line: 476,
 					Op:   ir.FilterAndOp,
 					Src:  "m.GoVersion().GreaterEqThan(\"1.15\") &&\n\tm[\"m\"].Type.Is(`*sync.Map`)",
 					Args: []ir.FilterExpr{
 						{
-							Line:  470,
+							Line:  476,
 							Op:    ir.FilterGoVersionGreaterEqThanOp,
 							Src:   "m.GoVersion().GreaterEqThan(\"1.15\")",
 							Value: "1.15",
 						},
 						{
-							Line:  471,
+							Line:  477,
 							Op:    ir.FilterVarTypeIsOp,
 							Src:   "m[\"m\"].Type.Is(`*sync.Map`)",
 							Value: "m",
-							Args:  []ir.FilterExpr{{Line: 471, Op: ir.FilterStringOp, Src: "`*sync.Map`", Value: "*sync.Map"}},
+							Args:  []ir.FilterExpr{{Line: 477, Op: ir.FilterStringOp, Src: "`*sync.Map`", Value: "*sync.Map"}},
 						},
 					},
 				},
 			}},
 		},
 		{
-			Line:        479,
+			Line:        485,
 			Name:        "sprintfQuotedString",
 			MatcherName: "m",
 			DocTags:     []string{"diagnostic", "experimental"},
@@ -1571,34 +1595,34 @@ var PrecompiledRules = &ir.File{
 			DocBefore:   "fmt.Sprintf(`\"%s\"`, s)",
 			DocAfter:    "fmt.Sprintf(`%q`, s)",
 			Rules: []ir.Rule{{
-				Line:           480,
-				SyntaxPatterns: []ir.PatternString{{Line: 480, Value: "fmt.Sprintf($s, $*_)"}},
+				Line:           486,
+				SyntaxPatterns: []ir.PatternString{{Line: 486, Value: "fmt.Sprintf($s, $*_)"}},
 				ReportTemplate: "use %q instead of \"%s\" for quoted strings",
 				WhereExpr: ir.FilterExpr{
-					Line: 481,
+					Line: 487,
 					Op:   ir.FilterOrOp,
 					Src:  "m[\"s\"].Text.Matches(\"^`.*\\\"%s\\\".*`$\") ||\n\tm[\"s\"].Text.Matches(`^\".*\\\\\"%s\\\\\".*\"$`)",
 					Args: []ir.FilterExpr{
 						{
-							Line:  481,
+							Line:  487,
 							Op:    ir.FilterVarTextMatchesOp,
 							Src:   "m[\"s\"].Text.Matches(\"^`.*\\\"%s\\\".*`$\")",
 							Value: "s",
-							Args:  []ir.FilterExpr{{Line: 481, Op: ir.FilterStringOp, Src: "\"^`.*\\\"%s\\\".*`$\"", Value: "^`.*\"%s\".*`$"}},
+							Args:  []ir.FilterExpr{{Line: 487, Op: ir.FilterStringOp, Src: "\"^`.*\\\"%s\\\".*`$\"", Value: "^`.*\"%s\".*`$"}},
 						},
 						{
-							Line:  482,
+							Line:  488,
 							Op:    ir.FilterVarTextMatchesOp,
 							Src:   "m[\"s\"].Text.Matches(`^\".*\\\\\"%s\\\\\".*\"$`)",
 							Value: "s",
-							Args:  []ir.FilterExpr{{Line: 482, Op: ir.FilterStringOp, Src: "`^\".*\\\\\"%s\\\\\".*\"$`", Value: "^\".*\\\\\"%s\\\\\".*\"$"}},
+							Args:  []ir.FilterExpr{{Line: 488, Op: ir.FilterStringOp, Src: "`^\".*\\\\\"%s\\\\\".*\"$`", Value: "^\".*\\\\\"%s\\\\\".*\"$"}},
 						},
 					},
 				},
 			}},
 		},
 		{
-			Line:        490,
+			Line:        496,
 			Name:        "offBy1",
 			MatcherName: "m",
 			DocTags:     []string{"diagnostic"},
@@ -1607,80 +1631,80 @@ var PrecompiledRules = &ir.File{
 			DocAfter:    "xs[len(xs)-1]",
 			Rules: []ir.Rule{
 				{
-					Line:            491,
-					SyntaxPatterns:  []ir.PatternString{{Line: 491, Value: "$x[len($x)]"}},
+					Line:            497,
+					SyntaxPatterns:  []ir.PatternString{{Line: 497, Value: "$x[len($x)]"}},
 					ReportTemplate:  "index expr always panics; maybe you wanted $x[len($x)-1]?",
 					SuggestTemplate: "$x[len($x)-1]",
 					WhereExpr: ir.FilterExpr{
-						Line: 492,
+						Line: 498,
 						Op:   ir.FilterAndOp,
 						Src:  "m[\"x\"].Pure && m[\"x\"].Type.Is(`[]$_`)",
 						Args: []ir.FilterExpr{
-							{Line: 492, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
+							{Line: 498, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
 							{
-								Line:  492,
+								Line:  498,
 								Op:    ir.FilterVarTypeIsOp,
 								Src:   "m[\"x\"].Type.Is(`[]$_`)",
 								Value: "x",
-								Args:  []ir.FilterExpr{{Line: 492, Op: ir.FilterStringOp, Src: "`[]$_`", Value: "[]$_"}},
+								Args:  []ir.FilterExpr{{Line: 498, Op: ir.FilterStringOp, Src: "`[]$_`", Value: "[]$_"}},
 							},
 						},
 					},
 				},
 				{
-					Line: 499,
+					Line: 505,
 					SyntaxPatterns: []ir.PatternString{
-						{Line: 500, Value: "$i := strings.Index($s, $_); $_ := $slicing[$i:]"},
-						{Line: 501, Value: "$i := strings.Index($s, $_); $_ = $slicing[$i:]"},
-						{Line: 502, Value: "$i := bytes.Index($s, $_); $_ := $slicing[$i:]"},
-						{Line: 503, Value: "$i := bytes.Index($s, $_); $_ = $slicing[$i:]"},
+						{Line: 506, Value: "$i := strings.Index($s, $_); $_ := $slicing[$i:]"},
+						{Line: 507, Value: "$i := strings.Index($s, $_); $_ = $slicing[$i:]"},
+						{Line: 508, Value: "$i := bytes.Index($s, $_); $_ := $slicing[$i:]"},
+						{Line: 509, Value: "$i := bytes.Index($s, $_); $_ = $slicing[$i:]"},
 					},
 					ReportTemplate: "Index() can return -1; maybe you wanted to do $s[$i+1:]",
 					WhereExpr: ir.FilterExpr{
-						Line: 504,
+						Line: 510,
 						Op:   ir.FilterEqOp,
 						Src:  "m[\"s\"].Text == m[\"slicing\"].Text",
 						Args: []ir.FilterExpr{
-							{Line: 504, Op: ir.FilterVarTextOp, Src: "m[\"s\"].Text", Value: "s"},
-							{Line: 504, Op: ir.FilterVarTextOp, Src: "m[\"slicing\"].Text", Value: "slicing"},
+							{Line: 510, Op: ir.FilterVarTextOp, Src: "m[\"s\"].Text", Value: "s"},
+							{Line: 510, Op: ir.FilterVarTextOp, Src: "m[\"slicing\"].Text", Value: "slicing"},
 						},
 					},
 					LocationVar: "slicing",
 				},
 				{
-					Line: 508,
+					Line: 514,
 					SyntaxPatterns: []ir.PatternString{
-						{Line: 509, Value: "$i := strings.Index($s, $_); $_ := $slicing[:$i]"},
-						{Line: 510, Value: "$i := strings.Index($s, $_); $_ = $slicing[:$i]"},
-						{Line: 511, Value: "$i := bytes.Index($s, $_); $_ := $slicing[:$i]"},
-						{Line: 512, Value: "$i := bytes.Index($s, $_); $_ = $slicing[:$i]"},
+						{Line: 515, Value: "$i := strings.Index($s, $_); $_ := $slicing[:$i]"},
+						{Line: 516, Value: "$i := strings.Index($s, $_); $_ = $slicing[:$i]"},
+						{Line: 517, Value: "$i := bytes.Index($s, $_); $_ := $slicing[:$i]"},
+						{Line: 518, Value: "$i := bytes.Index($s, $_); $_ = $slicing[:$i]"},
 					},
 					ReportTemplate: "Index() can return -1; maybe you wanted to do $s[:$i+1]",
 					WhereExpr: ir.FilterExpr{
-						Line: 513,
+						Line: 519,
 						Op:   ir.FilterEqOp,
 						Src:  "m[\"s\"].Text == m[\"slicing\"].Text",
 						Args: []ir.FilterExpr{
-							{Line: 513, Op: ir.FilterVarTextOp, Src: "m[\"s\"].Text", Value: "s"},
-							{Line: 513, Op: ir.FilterVarTextOp, Src: "m[\"slicing\"].Text", Value: "slicing"},
+							{Line: 519, Op: ir.FilterVarTextOp, Src: "m[\"s\"].Text", Value: "s"},
+							{Line: 519, Op: ir.FilterVarTextOp, Src: "m[\"slicing\"].Text", Value: "slicing"},
 						},
 					},
 					LocationVar: "slicing",
 				},
 				{
-					Line: 517,
+					Line: 523,
 					SyntaxPatterns: []ir.PatternString{
-						{Line: 518, Value: "$s[strings.Index($s, $_):]"},
-						{Line: 519, Value: "$s[:strings.Index($s, $_)]"},
-						{Line: 520, Value: "$s[bytes.Index($s, $_):]"},
-						{Line: 521, Value: "$s[:bytes.Index($s, $_)]"},
+						{Line: 524, Value: "$s[strings.Index($s, $_):]"},
+						{Line: 525, Value: "$s[:strings.Index($s, $_)]"},
+						{Line: 526, Value: "$s[bytes.Index($s, $_):]"},
+						{Line: 527, Value: "$s[:bytes.Index($s, $_)]"},
 					},
 					ReportTemplate: "Index() can return -1; maybe you wanted to do Index()+1",
 				},
 			},
 		},
 		{
-			Line:        529,
+			Line:        535,
 			Name:        "unslice",
 			MatcherName: "m",
 			DocTags:     []string{"style"},
@@ -1688,35 +1712,35 @@ var PrecompiledRules = &ir.File{
 			DocBefore:   "copy(b[:], values...)",
 			DocAfter:    "copy(b, values...)",
 			Rules: []ir.Rule{{
-				Line:            530,
-				SyntaxPatterns:  []ir.PatternString{{Line: 530, Value: "$s[:]"}},
+				Line:            536,
+				SyntaxPatterns:  []ir.PatternString{{Line: 536, Value: "$s[:]"}},
 				ReportTemplate:  "could simplify $$ to $s",
 				SuggestTemplate: "$s",
 				WhereExpr: ir.FilterExpr{
-					Line: 531,
+					Line: 537,
 					Op:   ir.FilterOrOp,
 					Src:  "m[\"s\"].Type.Is(`string`) || m[\"s\"].Type.Is(`[]$_`)",
 					Args: []ir.FilterExpr{
 						{
-							Line:  531,
+							Line:  537,
 							Op:    ir.FilterVarTypeIsOp,
 							Src:   "m[\"s\"].Type.Is(`string`)",
 							Value: "s",
-							Args:  []ir.FilterExpr{{Line: 531, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}},
+							Args:  []ir.FilterExpr{{Line: 537, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}},
 						},
 						{
-							Line:  531,
+							Line:  537,
 							Op:    ir.FilterVarTypeIsOp,
 							Src:   "m[\"s\"].Type.Is(`[]$_`)",
 							Value: "s",
-							Args:  []ir.FilterExpr{{Line: 531, Op: ir.FilterStringOp, Src: "`[]$_`", Value: "[]$_"}},
+							Args:  []ir.FilterExpr{{Line: 537, Op: ir.FilterStringOp, Src: "`[]$_`", Value: "[]$_"}},
 						},
 					},
 				},
 			}},
 		},
 		{
-			Line:        540,
+			Line:        546,
 			Name:        "yodaStyleExpr",
 			MatcherName: "m",
 			DocTags:     []string{"style", "experimental"},
@@ -1725,105 +1749,105 @@ var PrecompiledRules = &ir.File{
 			DocAfter:    "return ptr != nil",
 			Rules: []ir.Rule{
 				{
-					Line:           541,
-					SyntaxPatterns: []ir.PatternString{{Line: 541, Value: "$constval != $x"}},
+					Line:           547,
+					SyntaxPatterns: []ir.PatternString{{Line: 547, Value: "$constval != $x"}},
 					ReportTemplate: "consider to change order in expression to $x != $constval",
 					WhereExpr: ir.FilterExpr{
-						Line: 541,
+						Line: 547,
 						Op:   ir.FilterAndOp,
 						Src:  "m[\"constval\"].Node.Is(`BasicLit`) && !m[\"x\"].Node.Is(`BasicLit`)",
 						Args: []ir.FilterExpr{
 							{
-								Line:  541,
+								Line:  547,
 								Op:    ir.FilterVarNodeIsOp,
 								Src:   "m[\"constval\"].Node.Is(`BasicLit`)",
 								Value: "constval",
-								Args:  []ir.FilterExpr{{Line: 541, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}},
+								Args:  []ir.FilterExpr{{Line: 547, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}},
 							},
 							{
-								Line: 541,
+								Line: 547,
 								Op:   ir.FilterNotOp,
 								Src:  "!m[\"x\"].Node.Is(`BasicLit`)",
 								Args: []ir.FilterExpr{{
-									Line:  541,
+									Line:  547,
 									Op:    ir.FilterVarNodeIsOp,
 									Src:   "m[\"x\"].Node.Is(`BasicLit`)",
 									Value: "x",
-									Args:  []ir.FilterExpr{{Line: 541, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}},
+									Args:  []ir.FilterExpr{{Line: 547, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}},
 								}},
 							},
 						},
 					},
 				},
 				{
-					Line:           543,
-					SyntaxPatterns: []ir.PatternString{{Line: 543, Value: "$constval == $x"}},
+					Line:           549,
+					SyntaxPatterns: []ir.PatternString{{Line: 549, Value: "$constval == $x"}},
 					ReportTemplate: "consider to change order in expression to $x == $constval",
 					WhereExpr: ir.FilterExpr{
-						Line: 543,
+						Line: 549,
 						Op:   ir.FilterAndOp,
 						Src:  "m[\"constval\"].Node.Is(`BasicLit`) && !m[\"x\"].Node.Is(`BasicLit`)",
 						Args: []ir.FilterExpr{
 							{
-								Line:  543,
+								Line:  549,
 								Op:    ir.FilterVarNodeIsOp,
 								Src:   "m[\"constval\"].Node.Is(`BasicLit`)",
 								Value: "constval",
-								Args:  []ir.FilterExpr{{Line: 543, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}},
+								Args:  []ir.FilterExpr{{Line: 549, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}},
 							},
 							{
-								Line: 543,
+								Line: 549,
 								Op:   ir.FilterNotOp,
 								Src:  "!m[\"x\"].Node.Is(`BasicLit`)",
 								Args: []ir.FilterExpr{{
-									Line:  543,
+									Line:  549,
 									Op:    ir.FilterVarNodeIsOp,
 									Src:   "m[\"x\"].Node.Is(`BasicLit`)",
 									Value: "x",
-									Args:  []ir.FilterExpr{{Line: 543, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}},
+									Args:  []ir.FilterExpr{{Line: 549, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}},
 								}},
 							},
 						},
 					},
 				},
 				{
-					Line:           546,
-					SyntaxPatterns: []ir.PatternString{{Line: 546, Value: "nil != $x"}},
+					Line:           552,
+					SyntaxPatterns: []ir.PatternString{{Line: 552, Value: "nil != $x"}},
 					ReportTemplate: "consider to change order in expression to $x != nil",
 					WhereExpr: ir.FilterExpr{
-						Line: 546,
+						Line: 552,
 						Op:   ir.FilterNotOp,
 						Src:  "!m[\"x\"].Node.Is(`BasicLit`)",
 						Args: []ir.FilterExpr{{
-							Line:  546,
+							Line:  552,
 							Op:    ir.FilterVarNodeIsOp,
 							Src:   "m[\"x\"].Node.Is(`BasicLit`)",
 							Value: "x",
-							Args:  []ir.FilterExpr{{Line: 546, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}},
+							Args:  []ir.FilterExpr{{Line: 552, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}},
 						}},
 					},
 				},
 				{
-					Line:           548,
-					SyntaxPatterns: []ir.PatternString{{Line: 548, Value: "nil == $x"}},
+					Line:           554,
+					SyntaxPatterns: []ir.PatternString{{Line: 554, Value: "nil == $x"}},
 					ReportTemplate: "consider to change order in expression to $x == nil",
 					WhereExpr: ir.FilterExpr{
-						Line: 548,
+						Line: 554,
 						Op:   ir.FilterNotOp,
 						Src:  "!m[\"x\"].Node.Is(`BasicLit`)",
 						Args: []ir.FilterExpr{{
-							Line:  548,
+							Line:  554,
 							Op:    ir.FilterVarNodeIsOp,
 							Src:   "m[\"x\"].Node.Is(`BasicLit`)",
 							Value: "x",
-							Args:  []ir.FilterExpr{{Line: 548, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}},
+							Args:  []ir.FilterExpr{{Line: 554, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}},
 						}},
 					},
 				},
 			},
 		},
 		{
-			Line:        556,
+			Line:        562,
 			Name:        "equalFold",
 			MatcherName: "m",
 			DocTags:     []string{"performance", "experimental"},
@@ -1832,114 +1856,114 @@ var PrecompiledRules = &ir.File{
 			DocAfter:    "strings.EqualFold(x, y)",
 			Rules: []ir.Rule{
 				{
-					Line: 565,
+					Line: 571,
 					SyntaxPatterns: []ir.PatternString{
-						{Line: 566, Value: "strings.ToLower($x) == $y"},
-						{Line: 567, Value: "strings.ToLower($x) == strings.ToLower($y)"},
-						{Line: 568, Value: "$x == strings.ToLower($y)"},
-						{Line: 569, Value: "strings.ToUpper($x) == $y"},
-						{Line: 570, Value: "strings.ToUpper($x) == strings.ToUpper($y)"},
-						{Line: 571, Value: "$x == strings.ToUpper($y)"},
+						{Line: 572, Value: "strings.ToLower($x) == $y"},
+						{Line: 573, Value: "strings.ToLower($x) == strings.ToLower($y)"},
+						{Line: 574, Value: "$x == strings.ToLower($y)"},
+						{Line: 575, Value: "strings.ToUpper($x) == $y"},
+						{Line: 576, Value: "strings.ToUpper($x) == strings.ToUpper($y)"},
+						{Line: 577, Value: "$x == strings.ToUpper($y)"},
 					},
 					ReportTemplate:  "consider replacing with strings.EqualFold($x, $y)",
 					SuggestTemplate: "strings.EqualFold($x, $y)",
 					WhereExpr: ir.FilterExpr{
-						Line: 572,
+						Line: 578,
 						Op:   ir.FilterAndOp,
 						Src:  "m[\"x\"].Pure && m[\"y\"].Pure && m[\"x\"].Text != m[\"y\"].Text",
 						Args: []ir.FilterExpr{
 							{
-								Line: 572,
+								Line: 578,
 								Op:   ir.FilterAndOp,
 								Src:  "m[\"x\"].Pure && m[\"y\"].Pure",
 								Args: []ir.FilterExpr{
-									{Line: 572, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
-									{Line: 572, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"},
+									{Line: 578, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
+									{Line: 578, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"},
 								},
 							},
 							{
-								Line: 572,
+								Line: 578,
 								Op:   ir.FilterNeqOp,
 								Src:  "m[\"x\"].Text != m[\"y\"].Text",
 								Args: []ir.FilterExpr{
-									{Line: 572, Op: ir.FilterVarTextOp, Src: "m[\"x\"].Text", Value: "x"},
-									{Line: 572, Op: ir.FilterVarTextOp, Src: "m[\"y\"].Text", Value: "y"},
+									{Line: 578, Op: ir.FilterVarTextOp, Src: "m[\"x\"].Text", Value: "x"},
+									{Line: 578, Op: ir.FilterVarTextOp, Src: "m[\"y\"].Text", Value: "y"},
 								},
 							},
 						},
 					},
 				},
 				{
-					Line: 577,
+					Line: 583,
 					SyntaxPatterns: []ir.PatternString{
-						{Line: 578, Value: "strings.ToLower($x) != $y"},
-						{Line: 579, Value: "strings.ToLower($x) != strings.ToLower($y)"},
-						{Line: 580, Value: "$x != strings.ToLower($y)"},
-						{Line: 581, Value: "strings.ToUpper($x) != $y"},
-						{Line: 582, Value: "strings.ToUpper($x) != strings.ToUpper($y)"},
-						{Line: 583, Value: "$x != strings.ToUpper($y)"},
+						{Line: 584, Value: "strings.ToLower($x) != $y"},
+						{Line: 585, Value: "strings.ToLower($x) != strings.ToLower($y)"},
+						{Line: 586, Value: "$x != strings.ToLower($y)"},
+						{Line: 587, Value: "strings.ToUpper($x) != $y"},
+						{Line: 588, Value: "strings.ToUpper($x) != strings.ToUpper($y)"},
+						{Line: 589, Value: "$x != strings.ToUpper($y)"},
 					},
 					ReportTemplate:  "consider replacing with !strings.EqualFold($x, $y)",
 					SuggestTemplate: "!strings.EqualFold($x, $y)",
 					WhereExpr: ir.FilterExpr{
-						Line: 584,
+						Line: 590,
 						Op:   ir.FilterAndOp,
 						Src:  "m[\"x\"].Pure && m[\"y\"].Pure && m[\"x\"].Text != m[\"y\"].Text",
 						Args: []ir.FilterExpr{
 							{
-								Line: 584,
+								Line: 590,
 								Op:   ir.FilterAndOp,
 								Src:  "m[\"x\"].Pure && m[\"y\"].Pure",
 								Args: []ir.FilterExpr{
-									{Line: 584, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
-									{Line: 584, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"},
+									{Line: 590, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
+									{Line: 590, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"},
 								},
 							},
 							{
-								Line: 584,
+								Line: 590,
 								Op:   ir.FilterNeqOp,
 								Src:  "m[\"x\"].Text != m[\"y\"].Text",
 								Args: []ir.FilterExpr{
-									{Line: 584, Op: ir.FilterVarTextOp, Src: "m[\"x\"].Text", Value: "x"},
-									{Line: 584, Op: ir.FilterVarTextOp, Src: "m[\"y\"].Text", Value: "y"},
+									{Line: 590, Op: ir.FilterVarTextOp, Src: "m[\"x\"].Text", Value: "x"},
+									{Line: 590, Op: ir.FilterVarTextOp, Src: "m[\"y\"].Text", Value: "y"},
 								},
 							},
 						},
 					},
 				},
 				{
-					Line: 589,
+					Line: 595,
 					SyntaxPatterns: []ir.PatternString{
-						{Line: 590, Value: "bytes.Equal(bytes.ToLower($x), $y)"},
-						{Line: 591, Value: "bytes.Equal(bytes.ToLower($x), bytes.ToLower($y))"},
-						{Line: 592, Value: "bytes.Equal($x, bytes.ToLower($y))"},
-						{Line: 593, Value: "bytes.Equal(bytes.ToUpper($x), $y)"},
-						{Line: 594, Value: "bytes.Equal(bytes.ToUpper($x), bytes.ToUpper($y))"},
-						{Line: 595, Value: "bytes.Equal($x, bytes.ToUpper($y))"},
+						{Line: 596, Value: "bytes.Equal(bytes.ToLower($x), $y)"},
+						{Line: 597, Value: "bytes.Equal(bytes.ToLower($x), bytes.ToLower($y))"},
+						{Line: 598, Value: "bytes.Equal($x, bytes.ToLower($y))"},
+						{Line: 599, Value: "bytes.Equal(bytes.ToUpper($x), $y)"},
+						{Line: 600, Value: "bytes.Equal(bytes.ToUpper($x), bytes.ToUpper($y))"},
+						{Line: 601, Value: "bytes.Equal($x, bytes.ToUpper($y))"},
 					},
 					ReportTemplate:  "consider replacing with bytes.EqualFold($x, $y)",
 					SuggestTemplate: "bytes.EqualFold($x, $y)",
 					WhereExpr: ir.FilterExpr{
-						Line: 596,
+						Line: 602,
 						Op:   ir.FilterAndOp,
 						Src:  "m[\"x\"].Pure && m[\"y\"].Pure && m[\"x\"].Text != m[\"y\"].Text",
 						Args: []ir.FilterExpr{
 							{
-								Line: 596,
+								Line: 602,
 								Op:   ir.FilterAndOp,
 								Src:  "m[\"x\"].Pure && m[\"y\"].Pure",
 								Args: []ir.FilterExpr{
-									{Line: 596, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
-									{Line: 596, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"},
+									{Line: 602, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"},
+									{Line: 602, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"},
 								},
 							},
 							{
-								Line: 596,
+								Line: 602,
 								Op:   ir.FilterNeqOp,
 								Src:  "m[\"x\"].Text != m[\"y\"].Text",
 								Args: []ir.FilterExpr{
-									{Line: 596, Op: ir.FilterVarTextOp, Src: "m[\"x\"].Text", Value: "x"},
-									{Line: 596, Op: ir.FilterVarTextOp, Src: "m[\"y\"].Text", Value: "y"},
+									{Line: 602, Op: ir.FilterVarTextOp, Src: "m[\"x\"].Text", Value: "x"},
+									{Line: 602, Op: ir.FilterVarTextOp, Src: "m[\"y\"].Text", Value: "y"},
 								},
 							},
 						},
@@ -1948,7 +1972,7 @@ var PrecompiledRules = &ir.File{
 			},
 		},
 		{
-			Line:        605,
+			Line:        611,
 			Name:        "argOrder",
 			MatcherName: "m",
 			DocTags:     []string{"diagnostic"},
@@ -1956,45 +1980,45 @@ var PrecompiledRules = &ir.File{
 			DocBefore:   "strings.HasPrefix(\"#\", userpass)",
 			DocAfter:    "strings.HasPrefix(userpass, \"#\")",
 			Rules: []ir.Rule{{
-				Line: 606,
+				Line: 612,
 				SyntaxPatterns: []ir.PatternString{
-					{Line: 607, Value: "strings.HasPrefix($lit, $s)"},
-					{Line: 608, Value: "bytes.HasPrefix($lit, $s)"},
-					{Line: 609, Value: "strings.HasSuffix($lit, $s)"},
-					{Line: 610, Value: "bytes.HasSuffix($lit, $s)"},
-					{Line: 611, Value: "strings.Contains($lit, $s)"},
-					{Line: 612, Value: "bytes.Contains($lit, $s)"},
-					{Line: 613, Value: "strings.TrimPrefix($lit, $s)"},
-					{Line: 614, Value: "bytes.TrimPrefix($lit, $s)"},
-					{Line: 615, Value: "strings.TrimSuffix($lit, $s)"},
-					{Line: 616, Value: "bytes.TrimSuffix($lit, $s)"},
-					{Line: 617, Value: "strings.Split($lit, $s)"},
-					{Line: 618, Value: "bytes.Split($lit, $s)"},
+					{Line: 613, Value: "strings.HasPrefix($lit, $s)"},
+					{Line: 614, Value: "bytes.HasPrefix($lit, $s)"},
+					{Line: 615, Value: "strings.HasSuffix($lit, $s)"},
+					{Line: 616, Value: "bytes.HasSuffix($lit, $s)"},
+					{Line: 617, Value: "strings.Contains($lit, $s)"},
+					{Line: 618, Value: "bytes.Contains($lit, $s)"},
+					{Line: 619, Value: "strings.TrimPrefix($lit, $s)"},
+					{Line: 620, Value: "bytes.TrimPrefix($lit, $s)"},
+					{Line: 621, Value: "strings.TrimSuffix($lit, $s)"},
+					{Line: 622, Value: "bytes.TrimSuffix($lit, $s)"},
+					{Line: 623, Value: "strings.Split($lit, $s)"},
+					{Line: 624, Value: "bytes.Split($lit, $s)"},
 				},
 				ReportTemplate: "$lit and $s arguments order looks reversed",
 				WhereExpr: ir.FilterExpr{
-					Line: 619,
+					Line: 625,
 					Op:   ir.FilterAndOp,
 					Src:  "(m[\"lit\"].Const || m[\"lit\"].ConstSlice) &&\n\t!(m[\"s\"].Const || m[\"s\"].ConstSlice) &&\n\t!m[\"lit\"].Node.Is(`Ident`)",
 					Args: []ir.FilterExpr{
 						{
-							Line: 619,
+							Line: 625,
 							Op:   ir.FilterAndOp,
 							Src:  "(m[\"lit\"].Const || m[\"lit\"].ConstSlice) &&\n\t!(m[\"s\"].Const || m[\"s\"].ConstSlice)",
 							Args: []ir.FilterExpr{
 								{
-									Line: 619,
+									Line: 625,
 									Op:   ir.FilterOrOp,
 									Src:  "(m[\"lit\"].Const || m[\"lit\"].ConstSlice)",
 									Args: []ir.FilterExpr{
 										{
-											Line:  619,
+											Line:  625,
 											Op:    ir.FilterVarConstOp,
 											Src:   "m[\"lit\"].Const",
 											Value: "lit",
 										},
 										{
-											Line:  619,
+											Line:  625,
 											Op:    ir.FilterVarConstSliceOp,
 											Src:   "m[\"lit\"].ConstSlice",
 											Value: "lit",
@@ -2002,22 +2026,22 @@ var PrecompiledRules = &ir.File{
 									},
 								},
 								{
-									Line: 620,
+									Line: 626,
 									Op:   ir.FilterNotOp,
 									Src:  "!(m[\"s\"].Const || m[\"s\"].ConstSlice)",
 									Args: []ir.FilterExpr{{
-										Line: 620,
+										Line: 626,
 										Op:   ir.FilterOrOp,
 										Src:  "(m[\"s\"].Const || m[\"s\"].ConstSlice)",
 										Args: []ir.FilterExpr{
 											{
-												Line:  620,
+												Line:  626,
 												Op:    ir.FilterVarConstOp,
 												Src:   "m[\"s\"].Const",
 												Value: "s",
 											},
 											{
-												Line:  620,
+												Line:  626,
 												Op:    ir.FilterVarConstSliceOp,
 												Src:   "m[\"s\"].ConstSlice",
 												Value: "s",
@@ -2028,15 +2052,15 @@ var PrecompiledRules = &ir.File{
 							},
 						},
 						{
-							Line: 621,
+							Line: 627,
 							Op:   ir.FilterNotOp,
 							Src:  "!m[\"lit\"].Node.Is(`Ident`)",
 							Args: []ir.FilterExpr{{
-								Line:  621,
+								Line:  627,
 								Op:    ir.FilterVarNodeIsOp,
 								Src:   "m[\"lit\"].Node.Is(`Ident`)",
 								Value: "lit",
-								Args:  []ir.FilterExpr{{Line: 621, Op: ir.FilterStringOp, Src: "`Ident`", Value: "Ident"}},
+								Args:  []ir.FilterExpr{{Line: 627, Op: ir.FilterStringOp, Src: "`Ident`", Value: "Ident"}},
 							}},
 						},
 					},
@@ -2044,7 +2068,7 @@ var PrecompiledRules = &ir.File{
 			}},
 		},
 		{
-			Line:        629,
+			Line:        635,
 			Name:        "stringConcatSimplify",
 			MatcherName: "m",
 			DocTags:     []string{"style", "experimental"},
@@ -2053,27 +2077,27 @@ var PrecompiledRules = &ir.File{
 			DocAfter:    "x + \"_\" + y",
 			Rules: []ir.Rule{
 				{
-					Line:            630,
-					SyntaxPatterns:  []ir.PatternString{{Line: 630, Value: "strings.Join([]string{$x, $y}, \"\")"}},
+					Line:            636,
+					SyntaxPatterns:  []ir.PatternString{{Line: 636, Value: "strings.Join([]string{$x, $y}, \"\")"}},
 					ReportTemplate:  "suggestion: $x + $y",
 					SuggestTemplate: "$x + $y",
 				},
 				{
-					Line:            631,
-					SyntaxPatterns:  []ir.PatternString{{Line: 631, Value: "strings.Join([]string{$x, $y, $z}, \"\")"}},
+					Line:            637,
+					SyntaxPatterns:  []ir.PatternString{{Line: 637, Value: "strings.Join([]string{$x, $y, $z}, \"\")"}},
 					ReportTemplate:  "suggestion: $x + $y + $z",
 					SuggestTemplate: "$x + $y + $z",
 				},
 				{
-					Line:            632,
-					SyntaxPatterns:  []ir.PatternString{{Line: 632, Value: "strings.Join([]string{$x, $y}, $glue)"}},
+					Line:            638,
+					SyntaxPatterns:  []ir.PatternString{{Line: 638, Value: "strings.Join([]string{$x, $y}, $glue)"}},
 					ReportTemplate:  "suggestion: $x + $glue + $y",
 					SuggestTemplate: "$x + $glue + $y",
 				},
 			},
 		},
 		{
-			Line:        639,
+			Line:        645,
 			Name:        "timeExprSimplify",
 			MatcherName: "m",
 			DocTags:     []string{"style", "experimental"},
@@ -2082,39 +2106,39 @@ var PrecompiledRules = &ir.File{
 			DocAfter:    "t.UnixMilli()",
 			Rules: []ir.Rule{
 				{
-					Line:            644,
-					SyntaxPatterns:  []ir.PatternString{{Line: 644, Value: "$t.Unix() / 1000"}},
+					Line:            650,
+					SyntaxPatterns:  []ir.PatternString{{Line: 650, Value: "$t.Unix() / 1000"}},
 					ReportTemplate:  "use $t.UnixMilli() instead of $$",
 					SuggestTemplate: "$t.UnixMilli()",
 					WhereExpr: ir.FilterExpr{
-						Line: 645,
+						Line: 651,
 						Op:   ir.FilterAndOp,
 						Src:  "m.GoVersion().GreaterEqThan(\"1.17\") && isTime(m[\"t\"])",
 						Args: []ir.FilterExpr{
 							{
-								Line:  645,
+								Line:  651,
 								Op:    ir.FilterGoVersionGreaterEqThanOp,
 								Src:   "m.GoVersion().GreaterEqThan(\"1.17\")",
 								Value: "1.17",
 							},
 							{
-								Line: 645,
+								Line: 651,
 								Op:   ir.FilterOrOp,
 								Src:  "isTime(m[\"t\"])",
 								Args: []ir.FilterExpr{
 									{
-										Line:  645,
+										Line:  651,
 										Op:    ir.FilterVarTypeIsOp,
 										Src:   "m[\"t\"].Type.Is(`time.Time`)",
 										Value: "t",
-										Args:  []ir.FilterExpr{{Line: 641, Op: ir.FilterStringOp, Src: "`time.Time`", Value: "time.Time"}},
+										Args:  []ir.FilterExpr{{Line: 647, Op: ir.FilterStringOp, Src: "`time.Time`", Value: "time.Time"}},
 									},
 									{
-										Line:  645,
+										Line:  651,
 										Op:    ir.FilterVarTypeIsOp,
 										Src:   "m[\"t\"].Type.Is(`*time.Time`)",
 										Value: "t",
-										Args:  []ir.FilterExpr{{Line: 641, Op: ir.FilterStringOp, Src: "`*time.Time`", Value: "*time.Time"}},
+										Args:  []ir.FilterExpr{{Line: 647, Op: ir.FilterStringOp, Src: "`*time.Time`", Value: "*time.Time"}},
 									},
 								},
 							},
@@ -2122,39 +2146,39 @@ var PrecompiledRules = &ir.File{
 					},
 				},
 				{
-					Line:            649,
-					SyntaxPatterns:  []ir.PatternString{{Line: 649, Value: "$t.UnixNano() * 1000"}},
+					Line:            655,
+					SyntaxPatterns:  []ir.PatternString{{Line: 655, Value: "$t.UnixNano() * 1000"}},
 					ReportTemplate:  "use $t.UnixMicro() instead of $$",
 					SuggestTemplate: "$t.UnixMicro()",
 					WhereExpr: ir.FilterExpr{
-						Line: 650,
+						Line: 656,
 						Op:   ir.FilterAndOp,
 						Src:  "m.GoVersion().GreaterEqThan(\"1.17\") && isTime(m[\"t\"])",
 						Args: []ir.FilterExpr{
 							{
-								Line:  650,
+								Line:  656,
 								Op:    ir.FilterGoVersionGreaterEqThanOp,
 								Src:   "m.GoVersion().GreaterEqThan(\"1.17\")",
 								Value: "1.17",
 							},
 							{
-								Line: 650,
+								Line: 656,
 								Op:   ir.FilterOrOp,
 								Src:  "isTime(m[\"t\"])",
 								Args: []ir.FilterExpr{
 									{
-										Line:  650,
+										Line:  656,
 										Op:    ir.FilterVarTypeIsOp,
 										Src:   "m[\"t\"].Type.Is(`time.Time`)",
 										Value: "t",
-										Args:  []ir.FilterExpr{{Line: 641, Op: ir.FilterStringOp, Src: "`time.Time`", Value: "time.Time"}},
+										Args:  []ir.FilterExpr{{Line: 647, Op: ir.FilterStringOp, Src: "`time.Time`", Value: "time.Time"}},
 									},
 									{
-										Line:  650,
+										Line:  656,
 										Op:    ir.FilterVarTypeIsOp,
 										Src:   "m[\"t\"].Type.Is(`*time.Time`)",
 										Value: "t",
-										Args:  []ir.FilterExpr{{Line: 641, Op: ir.FilterStringOp, Src: "`*time.Time`", Value: "*time.Time"}},
+										Args:  []ir.FilterExpr{{Line: 647, Op: ir.FilterStringOp, Src: "`*time.Time`", Value: "*time.Time"}},
 									},
 								},
 							},
@@ -2164,72 +2188,7 @@ var PrecompiledRules = &ir.File{
 			},
 		},
 		{
-			Line:        659,
-			Name:        "timeCmpSimplify",
-			MatcherName: "m",
-			DocTags:     []string{"style", "experimental"},
-			DocSummary:  "Detects Before/After call of time.Time that can be simplified",
-			DocBefore:   "!t.Before(tt)",
-			DocAfter:    "t.After(tt)",
-			Rules: []ir.Rule{
-				{
-					Line:            664,
-					SyntaxPatterns:  []ir.PatternString{{Line: 664, Value: "!$t.Before($tt)"}},
-					ReportTemplate:  "suggestion: $t.After($tt)",
-					SuggestTemplate: "$t.After($tt)",
-					WhereExpr: ir.FilterExpr{
-						Line: 665,
-						Op:   ir.FilterOrOp,
-						Src:  "isTime(m[\"t\"])",
-						Args: []ir.FilterExpr{
-							{
-								Line:  665,
-								Op:    ir.FilterVarTypeIsOp,
-								Src:   "m[\"t\"].Type.Is(`time.Time`)",
-								Value: "t",
-								Args:  []ir.FilterExpr{{Line: 661, Op: ir.FilterStringOp, Src: "`time.Time`", Value: "time.Time"}},
-							},
-							{
-								Line:  665,
-								Op:    ir.FilterVarTypeIsOp,
-								Src:   "m[\"t\"].Type.Is(`*time.Time`)",
-								Value: "t",
-								Args:  []ir.FilterExpr{{Line: 661, Op: ir.FilterStringOp, Src: "`*time.Time`", Value: "*time.Time"}},
-							},
-						},
-					},
-				},
-				{
-					Line:            668,
-					SyntaxPatterns:  []ir.PatternString{{Line: 668, Value: "!$t.After($tt)"}},
-					ReportTemplate:  "suggestion: $t.Before($tt)",
-					SuggestTemplate: "$t.Before($tt)",
-					WhereExpr: ir.FilterExpr{
-						Line: 669,
-						Op:   ir.FilterOrOp,
-						Src:  "isTime(m[\"t\"])",
-						Args: []ir.FilterExpr{
-							{
-								Line:  669,
-								Op:    ir.FilterVarTypeIsOp,
-								Src:   "m[\"t\"].Type.Is(`time.Time`)",
-								Value: "t",
-								Args:  []ir.FilterExpr{{Line: 661, Op: ir.FilterStringOp, Src: "`time.Time`", Value: "time.Time"}},
-							},
-							{
-								Line:  669,
-								Op:    ir.FilterVarTypeIsOp,
-								Src:   "m[\"t\"].Type.Is(`*time.Time`)",
-								Value: "t",
-								Args:  []ir.FilterExpr{{Line: 661, Op: ir.FilterStringOp, Src: "`*time.Time`", Value: "*time.Time"}},
-							},
-						},
-					},
-				},
-			},
-		},
-		{
-			Line:        677,
+			Line:        665,
 			Name:        "exposedSyncMutex",
 			MatcherName: "m",
 			DocTags:     []string{"style", "experimental"},
@@ -2238,57 +2197,57 @@ var PrecompiledRules = &ir.File{
 			DocAfter:    "type Foo struct{ ...; mu sync.Mutex; ... }",
 			Rules: []ir.Rule{
 				{
-					Line:           682,
-					SyntaxPatterns: []ir.PatternString{{Line: 682, Value: "type $x struct { $*_; sync.Mutex; $*_ }"}},
+					Line:           670,
+					SyntaxPatterns: []ir.PatternString{{Line: 670, Value: "type $x struct { $*_; sync.Mutex; $*_ }"}},
 					ReportTemplate: "don't embed sync.Mutex",
 					WhereExpr: ir.FilterExpr{
-						Line:  683,
+						Line:  671,
 						Op:    ir.FilterVarTextMatchesOp,
 						Src:   "isExported(m[\"x\"])",
 						Value: "x",
-						Args:  []ir.FilterExpr{{Line: 679, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}},
+						Args:  []ir.FilterExpr{{Line: 667, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}},
 					},
 				},
 				{
-					Line:           686,
-					SyntaxPatterns: []ir.PatternString{{Line: 686, Value: "type $x struct { $*_; *sync.Mutex; $*_ }"}},
+					Line:           674,
+					SyntaxPatterns: []ir.PatternString{{Line: 674, Value: "type $x struct { $*_; *sync.Mutex; $*_ }"}},
 					ReportTemplate: "don't embed *sync.Mutex",
 					WhereExpr: ir.FilterExpr{
-						Line:  687,
+						Line:  675,
 						Op:    ir.FilterVarTextMatchesOp,
 						Src:   "isExported(m[\"x\"])",
 						Value: "x",
-						Args:  []ir.FilterExpr{{Line: 679, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}},
+						Args:  []ir.FilterExpr{{Line: 667, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}},
 					},
 				},
 				{
-					Line:           690,
-					SyntaxPatterns: []ir.PatternString{{Line: 690, Value: "type $x struct { $*_; sync.RWMutex; $*_ }"}},
+					Line:           678,
+					SyntaxPatterns: []ir.PatternString{{Line: 678, Value: "type $x struct { $*_; sync.RWMutex; $*_ }"}},
 					ReportTemplate: "don't embed sync.RWMutex",
 					WhereExpr: ir.FilterExpr{
-						Line:  691,
+						Line:  679,
 						Op:    ir.FilterVarTextMatchesOp,
 						Src:   "isExported(m[\"x\"])",
 						Value: "x",
-						Args:  []ir.FilterExpr{{Line: 679, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}},
+						Args:  []ir.FilterExpr{{Line: 667, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}},
 					},
 				},
 				{
-					Line:           694,
-					SyntaxPatterns: []ir.PatternString{{Line: 694, Value: "type $x struct { $*_; *sync.RWMutex; $*_ }"}},
+					Line:           682,
+					SyntaxPatterns: []ir.PatternString{{Line: 682, Value: "type $x struct { $*_; *sync.RWMutex; $*_ }"}},
 					ReportTemplate: "don't embed *sync.RWMutex",
 					WhereExpr: ir.FilterExpr{
-						Line:  695,
+						Line:  683,
 						Op:    ir.FilterVarTextMatchesOp,
 						Src:   "isExported(m[\"x\"])",
 						Value: "x",
-						Args:  []ir.FilterExpr{{Line: 679, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}},
+						Args:  []ir.FilterExpr{{Line: 667, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}},
 					},
 				},
 			},
 		},
 		{
-			Line:        703,
+			Line:        691,
 			Name:        "badSorting",
 			MatcherName: "m",
 			DocTags:     []string{"diagnostic", "experimental"},
@@ -2297,83 +2256,83 @@ var PrecompiledRules = &ir.File{
 			DocAfter:    "sort.Strings(xs)",
 			Rules: []ir.Rule{
 				{
-					Line:            704,
-					SyntaxPatterns:  []ir.PatternString{{Line: 704, Value: "$x = sort.IntSlice($x)"}},
+					Line:            692,
+					SyntaxPatterns:  []ir.PatternString{{Line: 692, Value: "$x = sort.IntSlice($x)"}},
 					ReportTemplate:  "suspicious sort.IntSlice usage, maybe sort.Ints was intended?",
 					SuggestTemplate: "sort.Ints($x)",
 					WhereExpr: ir.FilterExpr{
-						Line:  705,
+						Line:  693,
 						Op:    ir.FilterVarTypeIsOp,
 						Src:   "m[\"x\"].Type.Is(`[]int`)",
 						Value: "x",
-						Args:  []ir.FilterExpr{{Line: 705, Op: ir.FilterStringOp, Src: "`[]int`", Value: "[]int"}},
+						Args:  []ir.FilterExpr{{Line: 693, Op: ir.FilterStringOp, Src: "`[]int`", Value: "[]int"}},
 					},
 				},
 				{
-					Line:            709,
-					SyntaxPatterns:  []ir.PatternString{{Line: 709, Value: "$x = sort.Float64Slice($x)"}},
+					Line:            697,
+					SyntaxPatterns:  []ir.PatternString{{Line: 697, Value: "$x = sort.Float64Slice($x)"}},
 					ReportTemplate:  "suspicious sort.Float64s usage, maybe sort.Float64s was intended?",
 					SuggestTemplate: "sort.Float64s($x)",
 					WhereExpr: ir.FilterExpr{
-						Line:  710,
+						Line:  698,
 						Op:    ir.FilterVarTypeIsOp,
 						Src:   "m[\"x\"].Type.Is(`[]float64`)",
 						Value: "x",
-						Args:  []ir.FilterExpr{{Line: 710, Op: ir.FilterStringOp, Src: "`[]float64`", Value: "[]float64"}},
+						Args:  []ir.FilterExpr{{Line: 698, Op: ir.FilterStringOp, Src: "`[]float64`", Value: "[]float64"}},
 					},
 				},
 				{
-					Line:            714,
-					SyntaxPatterns:  []ir.PatternString{{Line: 714, Value: "$x = sort.StringSlice($x)"}},
+					Line:            702,
+					SyntaxPatterns:  []ir.PatternString{{Line: 702, Value: "$x = sort.StringSlice($x)"}},
 					ReportTemplate:  "suspicious sort.StringSlice usage, maybe sort.Strings was intended?",
 					SuggestTemplate: "sort.Strings($x)",
 					WhereExpr: ir.FilterExpr{
-						Line:  715,
+						Line:  703,
 						Op:    ir.FilterVarTypeIsOp,
 						Src:   "m[\"x\"].Type.Is(`[]string`)",
 						Value: "x",
-						Args:  []ir.FilterExpr{{Line: 715, Op: ir.FilterStringOp, Src: "`[]string`", Value: "[]string"}},
+						Args:  []ir.FilterExpr{{Line: 703, Op: ir.FilterStringOp, Src: "`[]string`", Value: "[]string"}},
 					},
 				},
 			},
 		},
 		{
-			Line:        724,
+			Line:        712,
 			Name:        "externalErrorReassign",
 			MatcherName: "m",
 			DocTags:     []string{"diagnostic", "experimental"},
-			DocSummary:  "Detects suspicious reassigment of error from another package",
+			DocSummary:  "Detects suspicious reassignment of error from another package",
 			DocBefore:   "io.EOF = nil",
 			DocAfter:    "/* don't do it */",
 			Rules: []ir.Rule{{
-				Line:           725,
-				SyntaxPatterns: []ir.PatternString{{Line: 725, Value: "$pkg.$err = $x"}},
-				ReportTemplate: "suspicious reassigment of error from another package",
+				Line:           713,
+				SyntaxPatterns: []ir.PatternString{{Line: 713, Value: "$pkg.$err = $x"}},
+				ReportTemplate: "suspicious reassignment of error from another package",
 				WhereExpr: ir.FilterExpr{
-					Line: 726,
+					Line: 714,
 					Op:   ir.FilterAndOp,
 					Src:  "m[\"err\"].Type.Is(`error`) && m[\"pkg\"].Object.Is(`PkgName`)",
 					Args: []ir.FilterExpr{
 						{
-							Line:  726,
+							Line:  714,
 							Op:    ir.FilterVarTypeIsOp,
 							Src:   "m[\"err\"].Type.Is(`error`)",
 							Value: "err",
-							Args:  []ir.FilterExpr{{Line: 726, Op: ir.FilterStringOp, Src: "`error`", Value: "error"}},
+							Args:  []ir.FilterExpr{{Line: 714, Op: ir.FilterStringOp, Src: "`error`", Value: "error"}},
 						},
 						{
-							Line:  726,
+							Line:  714,
 							Op:    ir.FilterVarObjectIsOp,
 							Src:   "m[\"pkg\"].Object.Is(`PkgName`)",
 							Value: "pkg",
-							Args:  []ir.FilterExpr{{Line: 726, Op: ir.FilterStringOp, Src: "`PkgName`", Value: "PkgName"}},
+							Args:  []ir.FilterExpr{{Line: 714, Op: ir.FilterStringOp, Src: "`PkgName`", Value: "PkgName"}},
 						},
 					},
 				},
 			}},
 		},
 		{
-			Line:        734,
+			Line:        722,
 			Name:        "emptyDecl",
 			MatcherName: "m",
 			DocTags:     []string{"diagnostic", "experimental"},
@@ -2382,24 +2341,24 @@ var PrecompiledRules = &ir.File{
 			DocAfter:    "/* nothing */",
 			Rules: []ir.Rule{
 				{
-					Line:           735,
-					SyntaxPatterns: []ir.PatternString{{Line: 735, Value: "var()"}},
+					Line:           723,
+					SyntaxPatterns: []ir.PatternString{{Line: 723, Value: "var()"}},
 					ReportTemplate: "empty var() block",
 				},
 				{
-					Line:           736,
-					SyntaxPatterns: []ir.PatternString{{Line: 736, Value: "const()"}},
+					Line:           724,
+					SyntaxPatterns: []ir.PatternString{{Line: 724, Value: "const()"}},
 					ReportTemplate: "empty const() block",
 				},
 				{
-					Line:           737,
-					SyntaxPatterns: []ir.PatternString{{Line: 737, Value: "type()"}},
+					Line:           725,
+					SyntaxPatterns: []ir.PatternString{{Line: 725, Value: "type()"}},
 					ReportTemplate: "empty type() block",
 				},
 			},
 		},
 		{
-			Line:        744,
+			Line:        732,
 			Name:        "dynamicFmtString",
 			MatcherName: "m",
 			DocTags:     []string{"diagnostic", "experimental"},
@@ -2408,16 +2367,16 @@ var PrecompiledRules = &ir.File{
 			DocAfter:    "fmt.Errorf(\"%s\", msg)",
 			Rules: []ir.Rule{
 				{
-					Line:            745,
-					SyntaxPatterns:  []ir.PatternString{{Line: 745, Value: "fmt.Errorf($f)"}},
+					Line:            733,
+					SyntaxPatterns:  []ir.PatternString{{Line: 733, Value: "fmt.Errorf($f)"}},
 					ReportTemplate:  "use errors.New($f) or fmt.Errorf(\"%s\", $f) instead",
 					SuggestTemplate: "errors.New($f)",
 					WhereExpr: ir.FilterExpr{
-						Line: 746,
+						Line: 734,
 						Op:   ir.FilterNotOp,
 						Src:  "!m[\"f\"].Const",
 						Args: []ir.FilterExpr{{
-							Line:  746,
+							Line:  734,
 							Op:    ir.FilterVarConstOp,
 							Src:   "m[\"f\"].Const",
 							Value: "f",
@@ -2425,15 +2384,15 @@ var PrecompiledRules = &ir.File{
 					},
 				},
 				{
-					Line:            750,
-					SyntaxPatterns:  []ir.PatternString{{Line: 750, Value: "fmt.Errorf($f($*args))"}},
+					Line:            738,
+					SyntaxPatterns:  []ir.PatternString{{Line: 738, Value: "fmt.Errorf($f($*args))"}},
 					ReportTemplate:  "use errors.New($f($*args)) or fmt.Errorf(\"%s\", $f($*args)) instead",
 					SuggestTemplate: "errors.New($f($*args))",
 				},
 			},
 		},
 		{
-			Line:        759,
+			Line:        747,
 			Name:        "stringsCompare",
 			MatcherName: "m",
 			DocTags:     []string{"style", "experimental"},
@@ -2442,25 +2401,25 @@ var PrecompiledRules = &ir.File{
 			DocAfter:    "x < y",
 			Rules: []ir.Rule{
 				{
-					Line:            760,
-					SyntaxPatterns:  []ir.PatternString{{Line: 760, Value: "strings.Compare($s1, $s2) == 0"}},
+					Line:            748,
+					SyntaxPatterns:  []ir.PatternString{{Line: 748, Value: "strings.Compare($s1, $s2) == 0"}},
 					ReportTemplate:  "suggestion: $s1 == $s2",
 					SuggestTemplate: "$s1 == $s2",
 				},
 				{
-					Line: 763,
+					Line: 751,
 					SyntaxPatterns: []ir.PatternString{
-						{Line: 763, Value: "strings.Compare($s1, $s2) == -1"},
-						{Line: 764, Value: "strings.Compare($s1, $s2) < 0"},
+						{Line: 751, Value: "strings.Compare($s1, $s2) == -1"},
+						{Line: 752, Value: "strings.Compare($s1, $s2) < 0"},
 					},
 					ReportTemplate:  "suggestion: $s1 < $s2",
 					SuggestTemplate: "$s1 < $s2",
 				},
 				{
-					Line: 767,
+					Line: 755,
 					SyntaxPatterns: []ir.PatternString{
-						{Line: 767, Value: "strings.Compare($s1, $s2) == 1"},
-						{Line: 768, Value: "strings.Compare($s1, $s2) > 0"},
+						{Line: 755, Value: "strings.Compare($s1, $s2) == 1"},
+						{Line: 756, Value: "strings.Compare($s1, $s2) > 0"},
 					},
 					ReportTemplate:  "suggestion: $s1 > $s2",
 					SuggestTemplate: "$s1 > $s2",
@@ -2468,7 +2427,7 @@ var PrecompiledRules = &ir.File{
 			},
 		},
 		{
-			Line:        776,
+			Line:        764,
 			Name:        "uncheckedInlineErr",
 			MatcherName: "m",
 			DocTags:     []string{"diagnostic", "experimental"},
@@ -2476,47 +2435,47 @@ var PrecompiledRules = &ir.File{
 			DocBefore:   "if err := expr(); err2 != nil { /*...*/ }",
 			DocAfter:    "if err := expr(); err != nil { /*...*/ }",
 			Rules: []ir.Rule{{
-				Line: 777,
+				Line: 765,
 				SyntaxPatterns: []ir.PatternString{
-					{Line: 778, Value: "if $err := $_($*_); $err2 != nil { $*_ }"},
-					{Line: 779, Value: "if $err = $_($*_); $err2 != nil { $*_ }"},
-					{Line: 780, Value: "if $*_, $err := $_($*_); $err2 != nil { $*_ }"},
-					{Line: 781, Value: "if $*_, $err = $_($*_); $err2 != nil { $*_ }"},
+					{Line: 766, Value: "if $err := $_($*_); $err2 != nil { $*_ }"},
+					{Line: 767, Value: "if $err = $_($*_); $err2 != nil { $*_ }"},
+					{Line: 768, Value: "if $*_, $err := $_($*_); $err2 != nil { $*_ }"},
+					{Line: 769, Value: "if $*_, $err = $_($*_); $err2 != nil { $*_ }"},
 				},
 				ReportTemplate: "$err error is unchecked, maybe intended to check it instead of $err2",
 				WhereExpr: ir.FilterExpr{
-					Line: 782,
+					Line: 770,
 					Op:   ir.FilterAndOp,
 					Src:  "m[\"err\"].Type.Implements(\"error\") && m[\"err2\"].Type.Implements(\"error\") &&\n\tm[\"err\"].Text != m[\"err2\"].Text",
 					Args: []ir.FilterExpr{
 						{
-							Line: 782,
+							Line: 770,
 							Op:   ir.FilterAndOp,
 							Src:  "m[\"err\"].Type.Implements(\"error\") && m[\"err2\"].Type.Implements(\"error\")",
 							Args: []ir.FilterExpr{
 								{
-									Line:  782,
+									Line:  770,
 									Op:    ir.FilterVarTypeImplementsOp,
 									Src:   "m[\"err\"].Type.Implements(\"error\")",
 									Value: "err",
-									Args:  []ir.FilterExpr{{Line: 782, Op: ir.FilterStringOp, Src: "\"error\"", Value: "error"}},
+									Args:  []ir.FilterExpr{{Line: 770, Op: ir.FilterStringOp, Src: "\"error\"", Value: "error"}},
 								},
 								{
-									Line:  782,
+									Line:  770,
 									Op:    ir.FilterVarTypeImplementsOp,
 									Src:   "m[\"err2\"].Type.Implements(\"error\")",
 									Value: "err2",
-									Args:  []ir.FilterExpr{{Line: 782, Op: ir.FilterStringOp, Src: "\"error\"", Value: "error"}},
+									Args:  []ir.FilterExpr{{Line: 770, Op: ir.FilterStringOp, Src: "\"error\"", Value: "error"}},
 								},
 							},
 						},
 						{
-							Line: 783,
+							Line: 771,
 							Op:   ir.FilterNeqOp,
 							Src:  "m[\"err\"].Text != m[\"err2\"].Text",
 							Args: []ir.FilterExpr{
-								{Line: 783, Op: ir.FilterVarTextOp, Src: "m[\"err\"].Text", Value: "err"},
-								{Line: 783, Op: ir.FilterVarTextOp, Src: "m[\"err2\"].Text", Value: "err2"},
+								{Line: 771, Op: ir.FilterVarTextOp, Src: "m[\"err\"].Text", Value: "err"},
+								{Line: 771, Op: ir.FilterVarTextOp, Src: "m[\"err2\"].Text", Value: "err2"},
 							},
 						},
 					},
@@ -2525,108 +2484,34 @@ var PrecompiledRules = &ir.File{
 			}},
 		},
 		{
-			Line:        792,
-			Name:        "sloppyTestFuncName",
+			Line:        780,
+			Name:        "badSyncOnceFunc",
 			MatcherName: "m",
 			DocTags:     []string{"diagnostic", "experimental"},
-			DocSummary:  "Detects unsupported test and benchmark funcs",
-			DocBefore:   "func TessstUnit(t *testing.T)",
-			DocAfter:    "func TestUnit(t *testing.T)",
+			DocSummary:  "Detects bad usage of sync.OnceFunc",
+			DocBefore:   "sync.OnceFunc(foo)()",
+			DocAfter:    "fooOnce := sync.OnceFunc(foo); ...; fooOnce()",
 			Rules: []ir.Rule{
 				{
-					Line:           793,
-					SyntaxPatterns: []ir.PatternString{{Line: 793, Value: "func $test($_ *testing.T) { $*_ }"}},
-					ReportTemplate: "function $test should be of form TestXXX(t *testing.T)",
+					Line:           781,
+					SyntaxPatterns: []ir.PatternString{{Line: 781, Value: "$*_; sync.OnceFunc($x); $*_;"}},
+					ReportTemplate: "possible sync.OnceFunc misuse, sync.OnceFunc($x) result is not used",
 					WhereExpr: ir.FilterExpr{
-						Line: 794,
-						Op:   ir.FilterAndOp,
-						Src:  "!m[\"test\"].Text.Matches(\"Test.*\") &&\n\t!m[\"test\"].Text.Matches(\"test.*\")",
-						Args: []ir.FilterExpr{
-							{
-								Line: 794,
-								Op:   ir.FilterNotOp,
-								Src:  "!m[\"test\"].Text.Matches(\"Test.*\")",
-								Args: []ir.FilterExpr{{
-									Line:  794,
-									Op:    ir.FilterVarTextMatchesOp,
-									Src:   "m[\"test\"].Text.Matches(\"Test.*\")",
-									Value: "test",
-									Args:  []ir.FilterExpr{{Line: 794, Op: ir.FilterStringOp, Src: "\"Test.*\"", Value: "Test.*"}},
-								}},
-							},
-							{
-								Line: 795,
-								Op:   ir.FilterNotOp,
-								Src:  "!m[\"test\"].Text.Matches(\"test.*\")",
-								Args: []ir.FilterExpr{{
-									Line:  795,
-									Op:    ir.FilterVarTextMatchesOp,
-									Src:   "m[\"test\"].Text.Matches(\"test.*\")",
-									Value: "test",
-									Args:  []ir.FilterExpr{{Line: 795, Op: ir.FilterStringOp, Src: "\"test.*\"", Value: "test.*"}},
-								}},
-							},
-						},
-					},
-				},
-				{
-					Line:           798,
-					SyntaxPatterns: []ir.PatternString{{Line: 798, Value: "func $bench($_ *testing.B) { $*_ }"}},
-					ReportTemplate: "function $bench should be of form BenchmarkXXX(b *testing.B)",
-					WhereExpr: ir.FilterExpr{
-						Line: 799,
-						Op:   ir.FilterAndOp,
-						Src:  "!m[\"bench\"].Text.Matches(\"Benchmark.*\") &&\n\t!m[\"bench\"].Text.Matches(\"bench.*\")",
-						Args: []ir.FilterExpr{
-							{
-								Line: 799,
-								Op:   ir.FilterNotOp,
-								Src:  "!m[\"bench\"].Text.Matches(\"Benchmark.*\")",
-								Args: []ir.FilterExpr{{
-									Line:  799,
-									Op:    ir.FilterVarTextMatchesOp,
-									Src:   "m[\"bench\"].Text.Matches(\"Benchmark.*\")",
-									Value: "bench",
-									Args:  []ir.FilterExpr{{Line: 799, Op: ir.FilterStringOp, Src: "\"Benchmark.*\"", Value: "Benchmark.*"}},
-								}},
-							},
-							{
-								Line: 800,
-								Op:   ir.FilterNotOp,
-								Src:  "!m[\"bench\"].Text.Matches(\"bench.*\")",
-								Args: []ir.FilterExpr{{
-									Line:  800,
-									Op:    ir.FilterVarTextMatchesOp,
-									Src:   "m[\"bench\"].Text.Matches(\"bench.*\")",
-									Value: "bench",
-									Args:  []ir.FilterExpr{{Line: 800, Op: ir.FilterStringOp, Src: "\"bench.*\"", Value: "bench.*"}},
-								}},
-							},
-						},
-					},
-				},
-				{
-					Line:           803,
-					SyntaxPatterns: []ir.PatternString{{Line: 803, Value: "func $test($_ *testing.T) { $*_ }"}},
-					ReportTemplate: "function $test looks like a test helper, consider to change 1st param to 'tb testing.TB'",
-					WhereExpr: ir.FilterExpr{
-						Line:  804,
-						Op:    ir.FilterVarTextMatchesOp,
-						Src:   "m[\"test\"].Text.Matches(\"^test.*\")",
-						Value: "test",
-						Args:  []ir.FilterExpr{{Line: 804, Op: ir.FilterStringOp, Src: "\"^test.*\"", Value: "^test.*"}},
+						Line:  783,
+						Op:    ir.FilterGoVersionGreaterEqThanOp,
+						Src:   "m.GoVersion().GreaterEqThan(\"1.21\")",
+						Value: "1.21",
 					},
 				},
 				{
-					Line:           807,
-					SyntaxPatterns: []ir.PatternString{{Line: 807, Value: "func $bench($_ *testing.B) { $*_ }"}},
-					ReportTemplate: "function $bench looks like a benchmark helper, consider to change 1st param to 'tb testing.TB'",
+					Line:           785,
+					SyntaxPatterns: []ir.PatternString{{Line: 785, Value: "sync.OnceFunc($x)()"}},
+					ReportTemplate: "possible sync.OnceFunc misuse, consider to assign sync.OnceFunc($x) to a variable",
 					WhereExpr: ir.FilterExpr{
-						Line:  808,
-						Op:    ir.FilterVarTextMatchesOp,
-						Src:   "m[\"bench\"].Text.Matches(\"^bench(mark)?.*\")",
-						Value: "bench",
-						Args:  []ir.FilterExpr{{Line: 808, Op: ir.FilterStringOp, Src: "\"^bench(mark)?.*\"", Value: "^bench(mark)?.*"}},
+						Line:  787,
+						Op:    ir.FilterGoVersionGreaterEqThanOp,
+						Src:   "m.GoVersion().GreaterEqThan(\"1.21\")",
+						Value: "1.21",
 					},
 				},
 			},
diff --git a/vendor/github.com/go-critic/go-critic/checkers/typeUnparen_checker.go b/vendor/github.com/go-critic/go-critic/checkers/typeUnparen_checker.go
index d270268bd0a85ce3084b2927c755dff9771f1f26..e2e225ebf2c7b1b8e61b133078a11e158edf070c 100644
--- a/vendor/github.com/go-critic/go-critic/checkers/typeUnparen_checker.go
+++ b/vendor/github.com/go-critic/go-critic/checkers/typeUnparen_checker.go
@@ -14,7 +14,7 @@ func init() {
 	var info linter.CheckerInfo
 	info.Name = "typeUnparen"
 	info.Tags = []string{linter.StyleTag, linter.OpinionatedTag}
-	info.Summary = "Detects unneded parenthesis inside type expressions and suggests to remove them"
+	info.Summary = "Detects unneeded parenthesis inside type expressions and suggests to remove them"
 	info.Before = `type foo [](func([](func())))`
 	info.After = `type foo []func([]func())`
 
diff --git a/vendor/github.com/go-critic/go-critic/checkers/unlambda_checker.go b/vendor/github.com/go-critic/go-critic/checkers/unlambda_checker.go
index bcfe5a0c47dde84d7b72aa0f14f3e0187a553953..0401bf5d37efdb1210f356cad33e9a725fccc874 100644
--- a/vendor/github.com/go-critic/go-critic/checkers/unlambda_checker.go
+++ b/vendor/github.com/go-critic/go-critic/checkers/unlambda_checker.go
@@ -91,7 +91,7 @@ func (c *unlambdaChecker) VisitExpr(x ast.Expr) {
 		}
 	}
 
-	if len(result.Args) == n {
+	if c.lenArgs(result.Args) == n {
 		c.warn(fn, callable)
 	}
 }
@@ -99,3 +99,20 @@ func (c *unlambdaChecker) VisitExpr(x ast.Expr) {
 func (c *unlambdaChecker) warn(cause ast.Node, suggestion string) {
 	c.ctx.Warn(cause, "replace `%s` with `%s`", cause, suggestion)
 }
+
+func (c *unlambdaChecker) lenArgs(args []ast.Expr) int {
+	lenArgs := len(args)
+
+	for _, arg := range args {
+		callExp, ok := arg.(*ast.CallExpr)
+		if !ok {
+			continue
+		}
+
+		// Don't count function call. only args.
+		lenArgs--
+		lenArgs += c.lenArgs(callExp.Args)
+	}
+
+	return lenArgs
+}
diff --git a/vendor/github.com/go-critic/go-critic/checkers/unnecessaryDefer_checker.go b/vendor/github.com/go-critic/go-critic/checkers/unnecessaryDefer_checker.go
index 4358ab1713476be5a2bc70307cf90df08326c681..4c1ed41f6f5aaeb3aa21ab7ca38b844186d264fc 100644
--- a/vendor/github.com/go-critic/go-critic/checkers/unnecessaryDefer_checker.go
+++ b/vendor/github.com/go-critic/go-critic/checkers/unnecessaryDefer_checker.go
@@ -37,7 +37,7 @@ type unnecessaryDeferChecker struct {
 // Visit implements the ast.Visitor. This visitor keeps track of the block
 // statement belongs to a function or any other block. If the block is not a
 // function and ends with a defer statement that should be OK since it's
-// defering the outer function.
+// deferring the outer function.
 func (c *unnecessaryDeferChecker) Visit(node ast.Node) ast.Visitor {
 	switch n := node.(type) {
 	case *ast.FuncDecl, *ast.FuncLit:
diff --git a/vendor/github.com/go-critic/go-critic/checkers/utils.go b/vendor/github.com/go-critic/go-critic/checkers/utils.go
index e9123352d27d4289b59130443e7433eb98812725..6e12cf9b3093aed7adb41109e3ce77f100adc4e7 100644
--- a/vendor/github.com/go-critic/go-critic/checkers/utils.go
+++ b/vendor/github.com/go-critic/go-critic/checkers/utils.go
@@ -223,6 +223,8 @@ var goBuiltins = map[string]bool{
 	"imag":    true,
 	"len":     true,
 	"make":    true,
+	"min":     true,
+	"max":     true,
 	"new":     true,
 	"panic":   true,
 	"print":   true,
@@ -260,7 +262,7 @@ func isUnitTestFunc(ctx *linter.CheckerContext, fn *ast.FuncDecl) bool {
 	return false
 }
 
-// qualifiedName returns called expr fully-quallified name.
+// qualifiedName returns called expr fully-qualified name.
 //
 // It works for simple identifiers like f => "f" and identifiers
 // from other package like pkg.f => "pkg.f".
diff --git a/vendor/github.com/go-task/slim-sprig/.editorconfig b/vendor/github.com/go-task/slim-sprig/v3/.editorconfig
similarity index 100%
rename from vendor/github.com/go-task/slim-sprig/.editorconfig
rename to vendor/github.com/go-task/slim-sprig/v3/.editorconfig
diff --git a/vendor/github.com/go-task/slim-sprig/.gitattributes b/vendor/github.com/go-task/slim-sprig/v3/.gitattributes
similarity index 100%
rename from vendor/github.com/go-task/slim-sprig/.gitattributes
rename to vendor/github.com/go-task/slim-sprig/v3/.gitattributes
diff --git a/vendor/github.com/go-task/slim-sprig/.gitignore b/vendor/github.com/go-task/slim-sprig/v3/.gitignore
similarity index 100%
rename from vendor/github.com/go-task/slim-sprig/.gitignore
rename to vendor/github.com/go-task/slim-sprig/v3/.gitignore
diff --git a/vendor/github.com/go-task/slim-sprig/CHANGELOG.md b/vendor/github.com/go-task/slim-sprig/v3/CHANGELOG.md
similarity index 95%
rename from vendor/github.com/go-task/slim-sprig/CHANGELOG.md
rename to vendor/github.com/go-task/slim-sprig/v3/CHANGELOG.md
index 61d8ebffc375a5d7aeed921936d1126851bd0576..2ce45dd4eca6368a23814a2d55360261c57b1ad9 100644
--- a/vendor/github.com/go-task/slim-sprig/CHANGELOG.md
+++ b/vendor/github.com/go-task/slim-sprig/v3/CHANGELOG.md
@@ -1,5 +1,24 @@
 # Changelog
 
+## Release 3.2.3 (2022-11-29)
+
+### Changed
+
+- Updated docs (thanks @book987 @aJetHorn @neelayu @pellizzetti @apricote @SaigyoujiYuyuko233 @AlekSi)
+- #348: Updated huandu/xstrings which fixed a snake case bug (thanks @yxxhero)
+- #353: Updated masterminds/semver which included bug fixes
+- #354: Updated golang.org/x/crypto which included bug fixes
+
+## Release 3.2.2 (2021-02-04)
+
+This is a re-release of 3.2.1 to satisfy something with the Go module system.
+
+## Release 3.2.1 (2021-02-04)
+
+### Changed
+
+- Upgraded `Masterminds/goutils` to `v1.1.1`. see the [Security Advisory](https://github.com/Masterminds/goutils/security/advisories/GHSA-xg2h-wx96-xgxr)
+
 ## Release 3.2.0 (2020-12-14)
 
 ### Added
diff --git a/vendor/github.com/go-task/slim-sprig/LICENSE.txt b/vendor/github.com/go-task/slim-sprig/v3/LICENSE.txt
similarity index 100%
rename from vendor/github.com/go-task/slim-sprig/LICENSE.txt
rename to vendor/github.com/go-task/slim-sprig/v3/LICENSE.txt
diff --git a/vendor/github.com/go-task/slim-sprig/README.md b/vendor/github.com/go-task/slim-sprig/v3/README.md
similarity index 88%
rename from vendor/github.com/go-task/slim-sprig/README.md
rename to vendor/github.com/go-task/slim-sprig/v3/README.md
index 72579471ff0e239d84515644ef9800f6b168632c..b5ab564254f44ccd9016e829208db73a09b52d4c 100644
--- a/vendor/github.com/go-task/slim-sprig/README.md
+++ b/vendor/github.com/go-task/slim-sprig/v3/README.md
@@ -1,4 +1,4 @@
-# Slim-Sprig: Template functions for Go templates [![GoDoc](https://godoc.org/github.com/go-task/slim-sprig?status.svg)](https://godoc.org/github.com/go-task/slim-sprig) [![Go Report Card](https://goreportcard.com/badge/github.com/go-task/slim-sprig)](https://goreportcard.com/report/github.com/go-task/slim-sprig)
+# Slim-Sprig: Template functions for Go templates [![Go Reference](https://pkg.go.dev/badge/github.com/go-task/slim-sprig/v3.svg)](https://pkg.go.dev/github.com/go-task/slim-sprig/v3)
 
 Slim-Sprig is a fork of [Sprig](https://github.com/Masterminds/sprig), but with
 all functions that depend on external (non standard library) or crypto packages
diff --git a/vendor/github.com/go-task/slim-sprig/Taskfile.yml b/vendor/github.com/go-task/slim-sprig/v3/Taskfile.yml
similarity index 89%
rename from vendor/github.com/go-task/slim-sprig/Taskfile.yml
rename to vendor/github.com/go-task/slim-sprig/v3/Taskfile.yml
index cdcfd223b719e7ed90055545e5e054ff202eab55..8e6346bb19eb04b29709770402bb3518db6a0d19 100644
--- a/vendor/github.com/go-task/slim-sprig/Taskfile.yml
+++ b/vendor/github.com/go-task/slim-sprig/v3/Taskfile.yml
@@ -1,6 +1,6 @@
 # https://taskfile.dev
 
-version: '2'
+version: '3'
 
 tasks:
   default:
diff --git a/vendor/github.com/go-task/slim-sprig/crypto.go b/vendor/github.com/go-task/slim-sprig/v3/crypto.go
similarity index 100%
rename from vendor/github.com/go-task/slim-sprig/crypto.go
rename to vendor/github.com/go-task/slim-sprig/v3/crypto.go
diff --git a/vendor/github.com/go-task/slim-sprig/date.go b/vendor/github.com/go-task/slim-sprig/v3/date.go
similarity index 100%
rename from vendor/github.com/go-task/slim-sprig/date.go
rename to vendor/github.com/go-task/slim-sprig/v3/date.go
diff --git a/vendor/github.com/go-task/slim-sprig/defaults.go b/vendor/github.com/go-task/slim-sprig/v3/defaults.go
similarity index 100%
rename from vendor/github.com/go-task/slim-sprig/defaults.go
rename to vendor/github.com/go-task/slim-sprig/v3/defaults.go
diff --git a/vendor/github.com/go-task/slim-sprig/dict.go b/vendor/github.com/go-task/slim-sprig/v3/dict.go
similarity index 100%
rename from vendor/github.com/go-task/slim-sprig/dict.go
rename to vendor/github.com/go-task/slim-sprig/v3/dict.go
diff --git a/vendor/github.com/go-task/slim-sprig/doc.go b/vendor/github.com/go-task/slim-sprig/v3/doc.go
similarity index 100%
rename from vendor/github.com/go-task/slim-sprig/doc.go
rename to vendor/github.com/go-task/slim-sprig/v3/doc.go
diff --git a/vendor/github.com/go-task/slim-sprig/functions.go b/vendor/github.com/go-task/slim-sprig/v3/functions.go
similarity index 100%
rename from vendor/github.com/go-task/slim-sprig/functions.go
rename to vendor/github.com/go-task/slim-sprig/v3/functions.go
diff --git a/vendor/github.com/go-task/slim-sprig/list.go b/vendor/github.com/go-task/slim-sprig/v3/list.go
similarity index 100%
rename from vendor/github.com/go-task/slim-sprig/list.go
rename to vendor/github.com/go-task/slim-sprig/v3/list.go
diff --git a/vendor/github.com/go-task/slim-sprig/network.go b/vendor/github.com/go-task/slim-sprig/v3/network.go
similarity index 100%
rename from vendor/github.com/go-task/slim-sprig/network.go
rename to vendor/github.com/go-task/slim-sprig/v3/network.go
diff --git a/vendor/github.com/go-task/slim-sprig/numeric.go b/vendor/github.com/go-task/slim-sprig/v3/numeric.go
similarity index 100%
rename from vendor/github.com/go-task/slim-sprig/numeric.go
rename to vendor/github.com/go-task/slim-sprig/v3/numeric.go
diff --git a/vendor/github.com/go-task/slim-sprig/reflect.go b/vendor/github.com/go-task/slim-sprig/v3/reflect.go
similarity index 100%
rename from vendor/github.com/go-task/slim-sprig/reflect.go
rename to vendor/github.com/go-task/slim-sprig/v3/reflect.go
diff --git a/vendor/github.com/go-task/slim-sprig/regex.go b/vendor/github.com/go-task/slim-sprig/v3/regex.go
similarity index 100%
rename from vendor/github.com/go-task/slim-sprig/regex.go
rename to vendor/github.com/go-task/slim-sprig/v3/regex.go
diff --git a/vendor/github.com/go-task/slim-sprig/strings.go b/vendor/github.com/go-task/slim-sprig/v3/strings.go
similarity index 100%
rename from vendor/github.com/go-task/slim-sprig/strings.go
rename to vendor/github.com/go-task/slim-sprig/v3/strings.go
diff --git a/vendor/github.com/go-task/slim-sprig/url.go b/vendor/github.com/go-task/slim-sprig/v3/url.go
similarity index 100%
rename from vendor/github.com/go-task/slim-sprig/url.go
rename to vendor/github.com/go-task/slim-sprig/v3/url.go
diff --git a/vendor/github.com/go-test/deep/.gitignore b/vendor/github.com/go-test/deep/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..53f12f0f0e813770f45ed049a680c325d9b7f6db
--- /dev/null
+++ b/vendor/github.com/go-test/deep/.gitignore
@@ -0,0 +1,2 @@
+*.swp
+*.out
diff --git a/vendor/github.com/go-test/deep/CHANGES.md b/vendor/github.com/go-test/deep/CHANGES.md
new file mode 100644
index 0000000000000000000000000000000000000000..98f8c18b18f93e42abea825646bcb67fb4e32631
--- /dev/null
+++ b/vendor/github.com/go-test/deep/CHANGES.md
@@ -0,0 +1,53 @@
+# go-test/deep Changelog
+
+## v1.0.9 released 2022-12-09
+
+* Fixed issue #45: Panic when comparing errors in unexported fields (PR #54) (@seveas)
+* Fixed issue #46: Functions are handled differently from reflect.DeepEqual (PR #55) (@countcb)
+* Updated matrix to go1.17, go1.18, and go1.19 and moved testing to GitHub Actions
+
+## v1.0.8 released 2021-10-13
+
+* Updated matrix to go1.15, go1.16, and go1.17
+* Added SECURITY.md and GitHub code analysis
+
+## v1.0.7 released 2020-07-11
+
+* Fixed issue #39: Confusing diff when comparing distinct types with the same name (PR #44)
+
+## v1.0.6 released 2020-04-21
+
+* Added `NilMapsAreEmpty` variable which causes a nil map to equal an empty map (PR #43) (@yalegko)
+
+## v1.0.5 released 2020-01-16
+
+* Added `NilSlicesAreEmpty` variable which causes a nil slice to be equal to an empty slice (PR #27) (@Anaminus)
+
+## v1.0.4 released 2019-09-15
+
+* Added \`deep:"-"\` structure field tag to ignore field (PR #38) (@flga)
+
+## v1.0.3 released 2019-08-18
+
+* Fixed issue #31: panic on typed primitives that implement error interface
+
+## v1.0.2 released 2019-07-14
+
+* Enabled Go module (@radeksimko)
+* Changed supported and tested Go versions: 1.10, 1.11, and 1.12 (dropped 1.9)
+* Changed Error equality: additional struct fields are compared too (PR #29) (@andrewmostello)
+* Fixed typos and ineffassign issues (PR #25) (@tariq1890)
+* Fixed diff order for nil comparison (PR #16) (@gmarik)
+* Fixed slice equality when slices are extracted from the same array (PR #11) (@risteli)
+* Fixed test spelling and messages (PR #19) (@sofuture)
+* Fixed issue #15: panic on comparing struct with anonymous time.Time
+* Fixed issue #18: Panic when comparing structs with time.Time value and CompareUnexportedFields is true
+* Fixed issue #21: Set default MaxDepth = 0 (disabled) (PR #23)
+
+## v1.0.1 released 2018-01-28
+
+* Fixed issue #12: Arrays are not properly compared (@samlitowitz)
+
+## v1.0.0 releaesd 2017-10-27 
+
+* First release
diff --git a/vendor/github.com/go-test/deep/LICENSE b/vendor/github.com/go-test/deep/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..228ef16f74e89a812de8b2cc4e4d7e8efd52c5c9
--- /dev/null
+++ b/vendor/github.com/go-test/deep/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright 2015-2017 Daniel Nichter
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/go-test/deep/README.md b/vendor/github.com/go-test/deep/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..08a86070d1a5646b3c6601b13e089ebb954cb0ee
--- /dev/null
+++ b/vendor/github.com/go-test/deep/README.md
@@ -0,0 +1,53 @@
+# Deep Variable Equality for Humans
+
+[![Go Report Card](https://goreportcard.com/badge/github.com/go-test/deep)](https://goreportcard.com/report/github.com/go-test/deep)
+[![Coverage Status](https://coveralls.io/repos/github/go-test/deep/badge.svg?branch=master)](https://coveralls.io/github/go-test/deep?branch=master)
+[![Go Reference](https://pkg.go.dev/badge/github.com/go-test/deep.svg)](https://pkg.go.dev/github.com/go-test/deep)
+
+This package provides a single function: `deep.Equal`. It's like [reflect.DeepEqual](http://golang.org/pkg/reflect/#DeepEqual) but much friendlier to humans (or any sentient being) for two reason:
+
+* `deep.Equal` returns a list of differences
+* `deep.Equal` does not compare unexported fields (by default)
+
+`reflect.DeepEqual` is good (like all things Golang!), but it's a game of [Hunt the Wumpus](https://en.wikipedia.org/wiki/Hunt_the_Wumpus). For large maps, slices, and structs, finding the difference is difficult.
+
+`deep.Equal` doesn't play games with you, it lists the differences:
+
+```go
+package main_test
+
+import (
+	"testing"
+	"github.com/go-test/deep"
+)
+
+type T struct {
+	Name    string
+	Numbers []float64
+}
+
+func TestDeepEqual(t *testing.T) {
+	// Can you spot the difference?
+	t1 := T{
+		Name:    "Isabella",
+		Numbers: []float64{1.13459, 2.29343, 3.010100010},
+	}
+	t2 := T{
+		Name:    "Isabella",
+		Numbers: []float64{1.13459, 2.29843, 3.010100010},
+	}
+
+	if diff := deep.Equal(t1, t2); diff != nil {
+		t.Error(diff)
+	}
+}
+```
+
+
+```
+$ go test
+--- FAIL: TestDeepEqual (0.00s)
+        main_test.go:25: [Numbers.slice[1]: 2.29343 != 2.29843]
+```
+
+The difference is in `Numbers.slice[1]`: the two values aren't equal using Go `==`.
diff --git a/vendor/github.com/go-test/deep/SECURITY.md b/vendor/github.com/go-test/deep/SECURITY.md
new file mode 100644
index 0000000000000000000000000000000000000000..845584a9a9ef4f9e95d735bd3d1d0527d79772eb
--- /dev/null
+++ b/vendor/github.com/go-test/deep/SECURITY.md
@@ -0,0 +1,17 @@
+# Security Policy
+
+## Supported Versions
+
+For security patches, the latest release is supported:
+
+| Version | Supported          |
+| ------- | ------------------ |
+| 1.0.x   | :white_check_mark: |
+
+## Reporting a Vulnerability
+
+To report a vulnerability, [create an issue](https://github.com/go-test/deep/issues) with the _security_ label.
+
+This project is developed and maintained by volunteers during their free time,
+so there is no SLA or ETA for fixing vulnerabilities (or any issues).
+Please help by submitting a PR to fix an issue.
diff --git a/vendor/github.com/go-test/deep/deep.go b/vendor/github.com/go-test/deep/deep.go
new file mode 100644
index 0000000000000000000000000000000000000000..4aab66a8b467e8770e3b1c508067f2367d7dfbeb
--- /dev/null
+++ b/vendor/github.com/go-test/deep/deep.go
@@ -0,0 +1,504 @@
+// Package deep provides function deep.Equal which is like reflect.DeepEqual but
+// returns a list of differences. This is helpful when comparing complex types
+// like structures and maps.
+package deep
+
+import (
+	"errors"
+	"fmt"
+	"log"
+	"reflect"
+	"strings"
+)
+
+var (
+	// FloatPrecision is the number of decimal places to round float values
+	// to when comparing.
+	FloatPrecision = 10
+
+	// MaxDiff specifies the maximum number of differences to return.
+	MaxDiff = 10
+
+	// MaxDepth specifies the maximum levels of a struct to recurse into,
+	// if greater than zero. If zero, there is no limit.
+	MaxDepth = 0
+
+	// LogErrors causes errors to be logged to STDERR when true.
+	LogErrors = false
+
+	// CompareUnexportedFields causes unexported struct fields, like s in
+	// T{s int}, to be compared when true. This does not work for comparing
+	// error or Time types on unexported fields because methods on unexported
+	// fields cannot be called.
+	CompareUnexportedFields = false
+
+	// CompareFunctions compares functions the same as reflect.DeepEqual:
+	// only two nil functions are equal. Every other combination is not equal.
+	// This is disabled by default because previous versions of this package
+	// ignored functions. Enabling it can possibly report new diffs.
+	CompareFunctions = false
+
+	// NilSlicesAreEmpty causes a nil slice to be equal to an empty slice.
+	NilSlicesAreEmpty = false
+
+	// NilMapsAreEmpty causes a nil map to be equal to an empty map.
+	NilMapsAreEmpty = false
+)
+
+var (
+	// ErrMaxRecursion is logged when MaxDepth is reached.
+	ErrMaxRecursion = errors.New("recursed to MaxDepth")
+
+	// ErrTypeMismatch is logged when Equal passed two different types of values.
+	ErrTypeMismatch = errors.New("variables are different reflect.Type")
+
+	// ErrNotHandled is logged when a primitive Go kind is not handled.
+	ErrNotHandled = errors.New("cannot compare the reflect.Kind")
+)
+
+const (
+	// FLAG_NONE is a placeholder for default Equal behavior. You don't have to
+	// pass it to Equal; if you do, it does nothing.
+	FLAG_NONE byte = iota
+
+	// FLAG_IGNORE_SLICE_ORDER causes Equal to ignore slice order so that
+	// []int{1, 2} and []int{2, 1} are equal. Only slices of primitive scalars
+	// like numbers and strings are supported. Slices of complex types,
+	// like []T where T is a struct, are undefined because Equal does not
+	// recurse into the slice value when this flag is enabled.
+	FLAG_IGNORE_SLICE_ORDER
+)
+
+type cmp struct {
+	diff        []string
+	buff        []string
+	floatFormat string
+	flag        map[byte]bool
+}
+
+var errorType = reflect.TypeOf((*error)(nil)).Elem()
+
+// Equal compares variables a and b, recursing into their structure up to
+// MaxDepth levels deep (if greater than zero), and returns a list of differences,
+// or nil if there are none. Some differences may not be found if an error is
+// also returned.
+//
+// If a type has an Equal method, like time.Equal, it is called to check for
+// equality.
+//
+// When comparing a struct, if a field has the tag `deep:"-"` then it will be
+// ignored.
+func Equal(a, b interface{}, flags ...interface{}) []string {
+	aVal := reflect.ValueOf(a)
+	bVal := reflect.ValueOf(b)
+	c := &cmp{
+		diff:        []string{},
+		buff:        []string{},
+		floatFormat: fmt.Sprintf("%%.%df", FloatPrecision),
+		flag:        map[byte]bool{},
+	}
+	for i := range flags {
+		c.flag[flags[i].(byte)] = true
+	}
+	if a == nil && b == nil {
+		return nil
+	} else if a == nil && b != nil {
+		c.saveDiff("<nil pointer>", b)
+	} else if a != nil && b == nil {
+		c.saveDiff(a, "<nil pointer>")
+	}
+	if len(c.diff) > 0 {
+		return c.diff
+	}
+
+	c.equals(aVal, bVal, 0)
+	if len(c.diff) > 0 {
+		return c.diff // diffs
+	}
+	return nil // no diffs
+}
+
+func (c *cmp) equals(a, b reflect.Value, level int) {
+	if MaxDepth > 0 && level > MaxDepth {
+		logError(ErrMaxRecursion)
+		return
+	}
+
+	// Check if one value is nil, e.g. T{x: *X} and T.x is nil
+	if !a.IsValid() || !b.IsValid() {
+		if a.IsValid() && !b.IsValid() {
+			c.saveDiff(a.Type(), "<nil pointer>")
+		} else if !a.IsValid() && b.IsValid() {
+			c.saveDiff("<nil pointer>", b.Type())
+		}
+		return
+	}
+
+	// If different types, they can't be equal
+	aType := a.Type()
+	bType := b.Type()
+	if aType != bType {
+		// Built-in types don't have a name, so don't report [3]int != [2]int as " != "
+		if aType.Name() == "" || aType.Name() != bType.Name() {
+			c.saveDiff(aType, bType)
+		} else {
+			// Type names can be the same, e.g. pkg/v1.Error and pkg/v2.Error
+			// are both exported as pkg, so unless we include the full pkg path
+			// the diff will be "pkg.Error != pkg.Error"
+			// https://github.com/go-test/deep/issues/39
+			aFullType := aType.PkgPath() + "." + aType.Name()
+			bFullType := bType.PkgPath() + "." + bType.Name()
+			c.saveDiff(aFullType, bFullType)
+		}
+		logError(ErrTypeMismatch)
+		return
+	}
+
+	// Primitive https://golang.org/pkg/reflect/#Kind
+	aKind := a.Kind()
+	bKind := b.Kind()
+
+	// Do a and b have underlying elements? Yes if they're ptr or interface.
+	aElem := aKind == reflect.Ptr || aKind == reflect.Interface
+	bElem := bKind == reflect.Ptr || bKind == reflect.Interface
+
+	// If both types implement the error interface, compare the error strings.
+	// This must be done before dereferencing because errors.New() returns a
+	// pointer to a struct that implements the interface:
+	//   func (e *errorString) Error() string {
+	// And we check CanInterface as a hack to make sure the underlying method
+	// is callable because https://github.com/golang/go/issues/32438
+	// Issues:
+	//   https://github.com/go-test/deep/issues/31
+	//   https://github.com/go-test/deep/issues/45
+	if (aType.Implements(errorType) && bType.Implements(errorType)) &&
+		((!aElem || !a.IsNil()) && (!bElem || !b.IsNil())) &&
+		(a.CanInterface() && b.CanInterface()) {
+		aString := a.MethodByName("Error").Call(nil)[0].String()
+		bString := b.MethodByName("Error").Call(nil)[0].String()
+		if aString != bString {
+			c.saveDiff(aString, bString)
+		}
+		return
+	}
+
+	// Dereference pointers and interface{}
+	if aElem || bElem {
+		if aElem {
+			a = a.Elem()
+		}
+		if bElem {
+			b = b.Elem()
+		}
+		c.equals(a, b, level+1)
+		return
+	}
+
+	switch aKind {
+
+	/////////////////////////////////////////////////////////////////////
+	// Iterable kinds
+	/////////////////////////////////////////////////////////////////////
+
+	case reflect.Struct:
+		/*
+			The variables are structs like:
+				type T struct {
+					FirstName string
+					LastName  string
+				}
+			Type = <pkg>.T, Kind = reflect.Struct
+
+			Iterate through the fields (FirstName, LastName), recurse into their values.
+		*/
+
+		// Types with an Equal() method, like time.Time, only if struct field
+		// is exported (CanInterface)
+		if eqFunc := a.MethodByName("Equal"); eqFunc.IsValid() && eqFunc.CanInterface() {
+			// Handle https://github.com/go-test/deep/issues/15:
+			// Don't call T.Equal if the method is from an embedded struct, like:
+			//   type Foo struct { time.Time }
+			// First, we'll encounter Equal(Ttime, time.Time) but if we pass b
+			// as the 2nd arg we'll panic: "Call using pkg.Foo as type time.Time"
+			// As far as I can tell, there's no way to see that the method is from
+			// time.Time not Foo. So we check the type of the 1st (0) arg and skip
+			// unless it's b type. Later, we'll encounter the time.Time anonymous/
+			// embedded field and then we'll have Equal(time.Time, time.Time).
+			funcType := eqFunc.Type()
+			if funcType.NumIn() == 1 && funcType.In(0) == bType {
+				retVals := eqFunc.Call([]reflect.Value{b})
+				if !retVals[0].Bool() {
+					c.saveDiff(a, b)
+				}
+				return
+			}
+		}
+
+		for i := 0; i < a.NumField(); i++ {
+			if aType.Field(i).PkgPath != "" && !CompareUnexportedFields {
+				continue // skip unexported field, e.g. s in type T struct {s string}
+			}
+
+			if aType.Field(i).Tag.Get("deep") == "-" {
+				continue // field wants to be ignored
+			}
+
+			c.push(aType.Field(i).Name) // push field name to buff
+
+			// Get the Value for each field, e.g. FirstName has Type = string,
+			// Kind = reflect.String.
+			af := a.Field(i)
+			bf := b.Field(i)
+
+			// Recurse to compare the field values
+			c.equals(af, bf, level+1)
+
+			c.pop() // pop field name from buff
+
+			if len(c.diff) >= MaxDiff {
+				break
+			}
+		}
+	case reflect.Map:
+		/*
+			The variables are maps like:
+				map[string]int{
+					"foo": 1,
+					"bar": 2,
+				}
+			Type = map[string]int, Kind = reflect.Map
+
+			Or:
+				type T map[string]int{}
+			Type = <pkg>.T, Kind = reflect.Map
+
+			Iterate through the map keys (foo, bar), recurse into their values.
+		*/
+
+		if a.IsNil() || b.IsNil() {
+			if NilMapsAreEmpty {
+				if a.IsNil() && b.Len() != 0 {
+					c.saveDiff("<nil map>", b)
+					return
+				} else if a.Len() != 0 && b.IsNil() {
+					c.saveDiff(a, "<nil map>")
+					return
+				}
+			} else {
+				if a.IsNil() && !b.IsNil() {
+					c.saveDiff("<nil map>", b)
+				} else if !a.IsNil() && b.IsNil() {
+					c.saveDiff(a, "<nil map>")
+				}
+			}
+			return
+		}
+
+		if a.Pointer() == b.Pointer() {
+			return
+		}
+
+		for _, key := range a.MapKeys() {
+			c.push(fmt.Sprintf("map[%v]", key))
+
+			aVal := a.MapIndex(key)
+			bVal := b.MapIndex(key)
+			if bVal.IsValid() {
+				c.equals(aVal, bVal, level+1)
+			} else {
+				c.saveDiff(aVal, "<does not have key>")
+			}
+
+			c.pop()
+
+			if len(c.diff) >= MaxDiff {
+				return
+			}
+		}
+
+		for _, key := range b.MapKeys() {
+			if aVal := a.MapIndex(key); aVal.IsValid() {
+				continue
+			}
+
+			c.push(fmt.Sprintf("map[%v]", key))
+			c.saveDiff("<does not have key>", b.MapIndex(key))
+			c.pop()
+			if len(c.diff) >= MaxDiff {
+				return
+			}
+		}
+	case reflect.Array:
+		n := a.Len()
+		for i := 0; i < n; i++ {
+			c.push(fmt.Sprintf("array[%d]", i))
+			c.equals(a.Index(i), b.Index(i), level+1)
+			c.pop()
+			if len(c.diff) >= MaxDiff {
+				break
+			}
+		}
+	case reflect.Slice:
+		if NilSlicesAreEmpty {
+			if a.IsNil() && b.Len() != 0 {
+				c.saveDiff("<nil slice>", b)
+				return
+			} else if a.Len() != 0 && b.IsNil() {
+				c.saveDiff(a, "<nil slice>")
+				return
+			}
+		} else {
+			if a.IsNil() && !b.IsNil() {
+				c.saveDiff("<nil slice>", b)
+				return
+			} else if !a.IsNil() && b.IsNil() {
+				c.saveDiff(a, "<nil slice>")
+				return
+			}
+		}
+
+		// Equal if same underlying pointer and same length, this latter handles
+		//   foo := []int{1, 2, 3, 4}
+		//   a := foo[0:2] // == {1,2}
+		//   b := foo[2:4] // == {3,4}
+		// a and b are same pointer but different slices (lengths) of the underlying
+		// array, so not equal.
+		aLen := a.Len()
+		bLen := b.Len()
+		if a.Pointer() == b.Pointer() && aLen == bLen {
+			return
+		}
+
+		if c.flag[FLAG_IGNORE_SLICE_ORDER] {
+			// Compare slices by value and value count; ignore order.
+			// Value equality is impliclity established by the maps:
+			// any value v1 will hash to the same map value if it's equal
+			// to another value v2. Then equality is determiend by value
+			// count: presuming v1==v2, then the slics are equal if there
+			// are equal numbers of v1 in each slice.
+			am := map[interface{}]int{}
+			for i := 0; i < a.Len(); i++ {
+				am[a.Index(i).Interface()] += 1
+			}
+			bm := map[interface{}]int{}
+			for i := 0; i < b.Len(); i++ {
+				bm[b.Index(i).Interface()] += 1
+			}
+			c.cmpMapValueCounts(a, b, am, bm, true)  // a cmp b
+			c.cmpMapValueCounts(b, a, bm, am, false) // b cmp a
+		} else {
+			// Compare slices by order
+			n := aLen
+			if bLen > aLen {
+				n = bLen
+			}
+			for i := 0; i < n; i++ {
+				c.push(fmt.Sprintf("slice[%d]", i))
+				if i < aLen && i < bLen {
+					c.equals(a.Index(i), b.Index(i), level+1)
+				} else if i < aLen {
+					c.saveDiff(a.Index(i), "<no value>")
+				} else {
+					c.saveDiff("<no value>", b.Index(i))
+				}
+				c.pop()
+				if len(c.diff) >= MaxDiff {
+					break
+				}
+			}
+		}
+
+	/////////////////////////////////////////////////////////////////////
+	// Primitive kinds
+	/////////////////////////////////////////////////////////////////////
+
+	case reflect.Float32, reflect.Float64:
+		// Round floats to FloatPrecision decimal places to compare with
+		// user-defined precision. As is commonly know, floats have "imprecision"
+		// such that 0.1 becomes 0.100000001490116119384765625. This cannot
+		// be avoided; it can only be handled. Issue 30 suggested that floats
+		// be compared using an epsilon: equal = |a-b| < epsilon.
+		// In many cases the result is the same, but I think epsilon is a little
+		// less clear for users to reason about. See issue 30 for details.
+		aval := fmt.Sprintf(c.floatFormat, a.Float())
+		bval := fmt.Sprintf(c.floatFormat, b.Float())
+		if aval != bval {
+			c.saveDiff(a.Float(), b.Float())
+		}
+	case reflect.Bool:
+		if a.Bool() != b.Bool() {
+			c.saveDiff(a.Bool(), b.Bool())
+		}
+	case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+		if a.Int() != b.Int() {
+			c.saveDiff(a.Int(), b.Int())
+		}
+	case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+		if a.Uint() != b.Uint() {
+			c.saveDiff(a.Uint(), b.Uint())
+		}
+	case reflect.String:
+		if a.String() != b.String() {
+			c.saveDiff(a.String(), b.String())
+		}
+	case reflect.Func:
+		if CompareFunctions {
+			if !a.IsNil() || !b.IsNil() {
+				aVal, bVal := "nil func", "nil func"
+				if !a.IsNil() {
+					aVal = "func"
+				}
+				if !b.IsNil() {
+					bVal = "func"
+				}
+				c.saveDiff(aVal, bVal)
+			}
+		}
+	default:
+		logError(ErrNotHandled)
+	}
+}
+
+func (c *cmp) push(name string) {
+	c.buff = append(c.buff, name)
+}
+
+func (c *cmp) pop() {
+	if len(c.buff) > 0 {
+		c.buff = c.buff[0 : len(c.buff)-1]
+	}
+}
+
+func (c *cmp) saveDiff(aval, bval interface{}) {
+	if len(c.buff) > 0 {
+		varName := strings.Join(c.buff, ".")
+		c.diff = append(c.diff, fmt.Sprintf("%s: %v != %v", varName, aval, bval))
+	} else {
+		c.diff = append(c.diff, fmt.Sprintf("%v != %v", aval, bval))
+	}
+}
+
+func (c *cmp) cmpMapValueCounts(a, b reflect.Value, am, bm map[interface{}]int, a2b bool) {
+	for v := range am {
+		aCount, _ := am[v]
+		bCount, _ := bm[v]
+
+		if aCount != bCount {
+			c.push(fmt.Sprintf("(unordered) slice[]=%v: value count", v))
+			if a2b {
+				c.saveDiff(fmt.Sprintf("%d", aCount), fmt.Sprintf("%d", bCount))
+			} else {
+				c.saveDiff(fmt.Sprintf("%d", bCount), fmt.Sprintf("%d", aCount))
+			}
+			c.pop()
+		}
+		delete(am, v)
+		delete(bm, v)
+	}
+}
+
+func logError(err error) {
+	if LogErrors {
+		log.Println(err)
+	}
+}
diff --git a/vendor/github.com/go-toolsmith/astequal/astequal.go b/vendor/github.com/go-toolsmith/astequal/astequal.go
index 3d8db4af902e94f073e1b8c14fdf5492c53a67bd..d1a04e942737315b7e42c7fee05c5a4ef998ba76 100644
--- a/vendor/github.com/go-toolsmith/astequal/astequal.go
+++ b/vendor/github.com/go-toolsmith/astequal/astequal.go
@@ -4,8 +4,6 @@ package astequal
 import (
 	"go/ast"
 	"go/token"
-
-	"golang.org/x/exp/typeparams"
 )
 
 // Node reports whether two AST nodes are structurally (deep) equal.
@@ -109,8 +107,8 @@ func astExprEq(x, y ast.Expr) bool {
 		y, ok := y.(*ast.IndexExpr)
 		return ok && astIndexExprEq(x, y)
 
-	case *typeparams.IndexListExpr:
-		y, ok := y.(*typeparams.IndexListExpr)
+	case *ast.IndexListExpr:
+		y, ok := y.(*ast.IndexListExpr)
 		return ok && astIndexListExprEq(x, y)
 
 	case *ast.SliceExpr:
@@ -323,7 +321,7 @@ func astFuncTypeEq(x, y *ast.FuncType) bool {
 	}
 	return astFieldListEq(x.Params, y.Params) &&
 		astFieldListEq(x.Results, y.Results) &&
-		astFieldListEq(typeparams.ForFuncType(x), typeparams.ForFuncType(y))
+		astFieldListEq(forFuncType(x), forFuncType(y))
 }
 
 func astBasicLitEq(x, y *ast.BasicLit) bool {
@@ -378,7 +376,7 @@ func astIndexExprEq(x, y *ast.IndexExpr) bool {
 	return astExprEq(x.X, y.X) && astExprEq(x.Index, y.Index)
 }
 
-func astIndexListExprEq(x, y *typeparams.IndexListExpr) bool {
+func astIndexListExprEq(x, y *ast.IndexListExpr) bool {
 	if x == nil || y == nil {
 		return x == y
 	}
@@ -690,7 +688,7 @@ func astTypeSpecEq(x, y *ast.TypeSpec) bool {
 		return x == y
 	}
 	return astIdentEq(x.Name, y.Name) && astExprEq(x.Type, y.Type) &&
-		astFieldListEq(typeparams.ForTypeSpec(x), typeparams.ForTypeSpec(y))
+		astFieldListEq(forTypeSpec(x), forTypeSpec(y))
 }
 
 func astValueSpecEq(x, y *ast.ValueSpec) bool {
@@ -755,3 +753,19 @@ func astExprSliceEq(xs, ys []ast.Expr) bool {
 	}
 	return true
 }
+
+// forTypeSpec returns n.TypeParams.
+func forTypeSpec(n *ast.TypeSpec) *ast.FieldList {
+	if n == nil {
+		return nil
+	}
+	return n.TypeParams
+}
+
+// forFuncType returns n.TypeParams.
+func forFuncType(n *ast.FuncType) *ast.FieldList {
+	if n == nil {
+		return nil
+	}
+	return n.TypeParams
+}
diff --git a/vendor/github.com/go-toolsmith/astequal/diff.go b/vendor/github.com/go-toolsmith/astequal/diff.go
new file mode 100644
index 0000000000000000000000000000000000000000..cd69b4525055782590c7a229fe60857f2e51220c
--- /dev/null
+++ b/vendor/github.com/go-toolsmith/astequal/diff.go
@@ -0,0 +1,23 @@
+package astequal
+
+import (
+	"bytes"
+	"go/ast"
+	"go/format"
+	"go/token"
+
+	"github.com/google/go-cmp/cmp"
+)
+
+func Diff(x, y ast.Node) string {
+	var buf bytes.Buffer
+	format.Node(&buf, token.NewFileSet(), x)
+	s1 := buf.String()
+
+	buf.Reset()
+	format.Node(&buf, token.NewFileSet(), y)
+	s2 := buf.String()
+
+	// TODO(cristaloleg): replace with a more lightweight diff impl.
+	return cmp.Diff(s1, s2)
+}
diff --git a/vendor/github.com/go-viper/mapstructure/v2/.editorconfig b/vendor/github.com/go-viper/mapstructure/v2/.editorconfig
new file mode 100644
index 0000000000000000000000000000000000000000..1f664d13a5f6a6a4a8c7f6d2861e6bd2a2196880
--- /dev/null
+++ b/vendor/github.com/go-viper/mapstructure/v2/.editorconfig
@@ -0,0 +1,18 @@
+root = true
+
+[*]
+charset = utf-8
+end_of_line = lf
+indent_size = 4
+indent_style = space
+insert_final_newline = true
+trim_trailing_whitespace = true
+
+[*.go]
+indent_style = tab
+
+[{Makefile,*.mk}]
+indent_style = tab
+
+[*.nix]
+indent_size = 2
diff --git a/vendor/github.com/go-viper/mapstructure/v2/.envrc b/vendor/github.com/go-viper/mapstructure/v2/.envrc
new file mode 100644
index 0000000000000000000000000000000000000000..c66fc0d3548dedb6fc5094293289b48113800f71
--- /dev/null
+++ b/vendor/github.com/go-viper/mapstructure/v2/.envrc
@@ -0,0 +1,4 @@
+if ! has nix_direnv_version || ! nix_direnv_version 3.0.1; then
+  source_url "https://raw.githubusercontent.com/nix-community/nix-direnv/3.0.1/direnvrc" "sha256-17G+Mvt/JsyJrwsf7bqMr7ho7liHP+0Lo4RMIHgp0F8="
+fi
+use flake . --impure
diff --git a/vendor/github.com/go-viper/mapstructure/v2/.gitignore b/vendor/github.com/go-viper/mapstructure/v2/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..470e7ca2bd2b86450327aa15b37c3b1d6de21829
--- /dev/null
+++ b/vendor/github.com/go-viper/mapstructure/v2/.gitignore
@@ -0,0 +1,6 @@
+/.devenv/
+/.direnv/
+/.pre-commit-config.yaml
+/bin/
+/build/
+/var/
diff --git a/vendor/github.com/go-viper/mapstructure/v2/.golangci.yaml b/vendor/github.com/go-viper/mapstructure/v2/.golangci.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..763143aa77a17f07f78c1a3afac4a4ac5b2e7232
--- /dev/null
+++ b/vendor/github.com/go-viper/mapstructure/v2/.golangci.yaml
@@ -0,0 +1,23 @@
+run:
+  timeout: 5m
+
+linters-settings:
+  gci:
+    sections:
+      - standard
+      - default
+      - prefix(github.com/go-viper/mapstructure)
+  golint:
+    min-confidence: 0
+  goimports:
+    local-prefixes: github.com/go-viper/maptstructure
+
+linters:
+  disable-all: true
+  enable:
+    - gci
+    - gofmt
+    - gofumpt
+    - goimports
+    - staticcheck
+    # - stylecheck
diff --git a/vendor/github.com/go-viper/mapstructure/v2/CHANGELOG.md b/vendor/github.com/go-viper/mapstructure/v2/CHANGELOG.md
new file mode 100644
index 0000000000000000000000000000000000000000..ae634d1cc08ff5696c900243eadc013409cfd6b6
--- /dev/null
+++ b/vendor/github.com/go-viper/mapstructure/v2/CHANGELOG.md
@@ -0,0 +1,101 @@
+## 1.5.1
+
+* Wrap errors so they're compatible with `errors.Is` and `errors.As` [GH-282]
+* Fix map of slices not decoding properly in certain cases. [GH-266]
+
+## 1.5.0
+
+* New option `IgnoreUntaggedFields` to ignore decoding to any fields
+  without `mapstructure` (or the configured tag name) set [GH-277]
+* New option `ErrorUnset` which makes it an error if any fields
+  in a target struct are not set by the decoding process. [GH-225]
+* New function `OrComposeDecodeHookFunc` to help compose decode hooks. [GH-240]
+* Decoding to slice from array no longer crashes [GH-265]
+* Decode nested struct pointers to map [GH-271]
+* Fix issue where `,squash` was ignored if `Squash` option was set. [GH-280]
+* Fix issue where fields with `,omitempty` would sometimes decode
+  into a map with an empty string key [GH-281]
+
+## 1.4.3
+
+* Fix cases where `json.Number` didn't decode properly [GH-261]
+
+## 1.4.2
+
+* Custom name matchers to support any sort of casing, formatting, etc. for
+  field names. [GH-250]
+* Fix possible panic in ComposeDecodeHookFunc [GH-251]
+
+## 1.4.1
+
+* Fix regression where `*time.Time` value would be set to empty and not be sent
+  to decode hooks properly [GH-232]
+
+## 1.4.0
+
+* A new decode hook type `DecodeHookFuncValue` has been added that has
+  access to the full values. [GH-183]
+* Squash is now supported with embedded fields that are struct pointers [GH-205]
+* Empty strings will convert to 0 for all numeric types when weakly decoding [GH-206]
+
+## 1.3.3
+
+* Decoding maps from maps creates a settable value for decode hooks [GH-203]
+
+## 1.3.2
+
+* Decode into interface type with a struct value is supported [GH-187]
+
+## 1.3.1
+
+* Squash should only squash embedded structs. [GH-194]
+
+## 1.3.0
+
+* Added `",omitempty"` support. This will ignore zero values in the source
+  structure when encoding. [GH-145]
+
+## 1.2.3
+
+* Fix duplicate entries in Keys list with pointer values. [GH-185]
+
+## 1.2.2
+
+* Do not add unsettable (unexported) values to the unused metadata key
+  or "remain" value. [GH-150]
+
+## 1.2.1
+
+* Go modules checksum mismatch fix
+
+## 1.2.0
+
+* Added support to capture unused values in a field using the `",remain"` value
+  in the mapstructure tag. There is an example to showcase usage.
+* Added `DecoderConfig` option to always squash embedded structs
+* `json.Number` can decode into `uint` types
+* Empty slices are preserved and not replaced with nil slices
+* Fix panic that can occur in when decoding a map into a nil slice of structs
+* Improved package documentation for godoc
+
+## 1.1.2
+
+* Fix error when decode hook decodes interface implementation into interface
+  type. [GH-140]
+
+## 1.1.1
+
+* Fix panic that can happen in `decodePtr`
+
+## 1.1.0
+
+* Added `StringToIPHookFunc` to convert `string` to `net.IP` and `net.IPNet` [GH-133]
+* Support struct to struct decoding [GH-137]
+* If source map value is nil, then destination map value is nil (instead of empty)
+* If source slice value is nil, then destination slice value is nil (instead of empty)
+* If source pointer is nil, then destination pointer is set to nil (instead of
+  allocated zero value of type)
+
+## 1.0.0
+
+* Initial tagged stable release.
diff --git a/vendor/github.com/go-viper/mapstructure/v2/LICENSE b/vendor/github.com/go-viper/mapstructure/v2/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..f9c841a51e0d11ec20c19ff7600e88da826867fa
--- /dev/null
+++ b/vendor/github.com/go-viper/mapstructure/v2/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2013 Mitchell Hashimoto
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/vendor/github.com/go-viper/mapstructure/v2/README.md b/vendor/github.com/go-viper/mapstructure/v2/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..2b28db89489b0a885a4ec3c41a6efca5e0758d2d
--- /dev/null
+++ b/vendor/github.com/go-viper/mapstructure/v2/README.md
@@ -0,0 +1,59 @@
+# mapstructure
+
+[![GitHub Workflow Status](https://img.shields.io/github/actions/workflow/status/go-viper/mapstructure/ci.yaml?branch=main&style=flat-square)](https://github.com/go-viper/mapstructure/actions?query=workflow%3ACI)
+[![go.dev reference](https://img.shields.io/badge/go.dev-reference-007d9c?logo=go&logoColor=white&style=flat-square)](https://pkg.go.dev/mod/github.com/go-viper/mapstructure/v2)
+![Go Version](https://img.shields.io/badge/go%20version-%3E=1.18-61CFDD.svg?style=flat-square)
+
+mapstructure is a Go library for decoding generic map values to structures
+and vice versa, while providing helpful error handling.
+
+This library is most useful when decoding values from some data stream (JSON,
+Gob, etc.) where you don't _quite_ know the structure of the underlying data
+until you read a part of it. You can therefore read a `map[string]interface{}`
+and use this library to decode it into the proper underlying native Go
+structure.
+
+## Installation
+
+```shell
+go get github.com/go-viper/mapstructure/v2
+```
+
+## Usage & Example
+
+For usage and examples see the [documentation](https://pkg.go.dev/mod/github.com/go-viper/mapstructure/v2).
+
+The `Decode` function has examples associated with it there.
+
+## But Why?!
+
+Go offers fantastic standard libraries for decoding formats such as JSON.
+The standard method is to have a struct pre-created, and populate that struct
+from the bytes of the encoded format. This is great, but the problem is if
+you have configuration or an encoding that changes slightly depending on
+specific fields. For example, consider this JSON:
+
+```json
+{
+  "type": "person",
+  "name": "Mitchell"
+}
+```
+
+Perhaps we can't populate a specific structure without first reading
+the "type" field from the JSON. We could always do two passes over the
+decoding of the JSON (reading the "type" first, and the rest later).
+However, it is much simpler to just decode this into a `map[string]interface{}`
+structure, read the "type" key, then use something like this library
+to decode it into the proper structure.
+
+## Credits
+
+Mapstructure was originally created by [@mitchellh](https://github.com/mitchellh).
+This is a maintained fork of the original library.
+
+Read more about the reasons for the fork [here](https://github.com/mitchellh/mapstructure/issues/349).
+
+## License
+
+The project is licensed under the [MIT License](LICENSE).
diff --git a/vendor/github.com/go-viper/mapstructure/v2/decode_hooks.go b/vendor/github.com/go-viper/mapstructure/v2/decode_hooks.go
new file mode 100644
index 0000000000000000000000000000000000000000..840d6adce0e628ccb93befc76af628ae81e3c03a
--- /dev/null
+++ b/vendor/github.com/go-viper/mapstructure/v2/decode_hooks.go
@@ -0,0 +1,334 @@
+package mapstructure
+
+import (
+	"encoding"
+	"errors"
+	"fmt"
+	"net"
+	"net/netip"
+	"reflect"
+	"strconv"
+	"strings"
+	"time"
+)
+
+// typedDecodeHook takes a raw DecodeHookFunc (an interface{}) and turns
+// it into the proper DecodeHookFunc type, such as DecodeHookFuncType.
+func typedDecodeHook(h DecodeHookFunc) DecodeHookFunc {
+	// Create variables here so we can reference them with the reflect pkg
+	var f1 DecodeHookFuncType
+	var f2 DecodeHookFuncKind
+	var f3 DecodeHookFuncValue
+
+	// Fill in the variables into this interface and the rest is done
+	// automatically using the reflect package.
+	potential := []interface{}{f1, f2, f3}
+
+	v := reflect.ValueOf(h)
+	vt := v.Type()
+	for _, raw := range potential {
+		pt := reflect.ValueOf(raw).Type()
+		if vt.ConvertibleTo(pt) {
+			return v.Convert(pt).Interface()
+		}
+	}
+
+	return nil
+}
+
+// DecodeHookExec executes the given decode hook. This should be used
+// since it'll naturally degrade to the older backwards compatible DecodeHookFunc
+// that took reflect.Kind instead of reflect.Type.
+func DecodeHookExec(
+	raw DecodeHookFunc,
+	from reflect.Value, to reflect.Value,
+) (interface{}, error) {
+	switch f := typedDecodeHook(raw).(type) {
+	case DecodeHookFuncType:
+		return f(from.Type(), to.Type(), from.Interface())
+	case DecodeHookFuncKind:
+		return f(from.Kind(), to.Kind(), from.Interface())
+	case DecodeHookFuncValue:
+		return f(from, to)
+	default:
+		return nil, errors.New("invalid decode hook signature")
+	}
+}
+
+// ComposeDecodeHookFunc creates a single DecodeHookFunc that
+// automatically composes multiple DecodeHookFuncs.
+//
+// The composed funcs are called in order, with the result of the
+// previous transformation.
+func ComposeDecodeHookFunc(fs ...DecodeHookFunc) DecodeHookFunc {
+	return func(f reflect.Value, t reflect.Value) (interface{}, error) {
+		var err error
+		data := f.Interface()
+
+		newFrom := f
+		for _, f1 := range fs {
+			data, err = DecodeHookExec(f1, newFrom, t)
+			if err != nil {
+				return nil, err
+			}
+			newFrom = reflect.ValueOf(data)
+		}
+
+		return data, nil
+	}
+}
+
+// OrComposeDecodeHookFunc executes all input hook functions until one of them returns no error. In that case its value is returned.
+// If all hooks return an error, OrComposeDecodeHookFunc returns an error concatenating all error messages.
+func OrComposeDecodeHookFunc(ff ...DecodeHookFunc) DecodeHookFunc {
+	return func(a, b reflect.Value) (interface{}, error) {
+		var allErrs string
+		var out interface{}
+		var err error
+
+		for _, f := range ff {
+			out, err = DecodeHookExec(f, a, b)
+			if err != nil {
+				allErrs += err.Error() + "\n"
+				continue
+			}
+
+			return out, nil
+		}
+
+		return nil, errors.New(allErrs)
+	}
+}
+
+// StringToSliceHookFunc returns a DecodeHookFunc that converts
+// string to []string by splitting on the given sep.
+func StringToSliceHookFunc(sep string) DecodeHookFunc {
+	return func(
+		f reflect.Type,
+		t reflect.Type,
+		data interface{},
+	) (interface{}, error) {
+		if f.Kind() != reflect.String {
+			return data, nil
+		}
+		if t != reflect.SliceOf(f) {
+			return data, nil
+		}
+
+		raw := data.(string)
+		if raw == "" {
+			return []string{}, nil
+		}
+
+		return strings.Split(raw, sep), nil
+	}
+}
+
+// StringToTimeDurationHookFunc returns a DecodeHookFunc that converts
+// strings to time.Duration.
+func StringToTimeDurationHookFunc() DecodeHookFunc {
+	return func(
+		f reflect.Type,
+		t reflect.Type,
+		data interface{},
+	) (interface{}, error) {
+		if f.Kind() != reflect.String {
+			return data, nil
+		}
+		if t != reflect.TypeOf(time.Duration(5)) {
+			return data, nil
+		}
+
+		// Convert it by parsing
+		return time.ParseDuration(data.(string))
+	}
+}
+
+// StringToIPHookFunc returns a DecodeHookFunc that converts
+// strings to net.IP
+func StringToIPHookFunc() DecodeHookFunc {
+	return func(
+		f reflect.Type,
+		t reflect.Type,
+		data interface{},
+	) (interface{}, error) {
+		if f.Kind() != reflect.String {
+			return data, nil
+		}
+		if t != reflect.TypeOf(net.IP{}) {
+			return data, nil
+		}
+
+		// Convert it by parsing
+		ip := net.ParseIP(data.(string))
+		if ip == nil {
+			return net.IP{}, fmt.Errorf("failed parsing ip %v", data)
+		}
+
+		return ip, nil
+	}
+}
+
+// StringToIPNetHookFunc returns a DecodeHookFunc that converts
+// strings to net.IPNet
+func StringToIPNetHookFunc() DecodeHookFunc {
+	return func(
+		f reflect.Type,
+		t reflect.Type,
+		data interface{},
+	) (interface{}, error) {
+		if f.Kind() != reflect.String {
+			return data, nil
+		}
+		if t != reflect.TypeOf(net.IPNet{}) {
+			return data, nil
+		}
+
+		// Convert it by parsing
+		_, net, err := net.ParseCIDR(data.(string))
+		return net, err
+	}
+}
+
+// StringToTimeHookFunc returns a DecodeHookFunc that converts
+// strings to time.Time.
+func StringToTimeHookFunc(layout string) DecodeHookFunc {
+	return func(
+		f reflect.Type,
+		t reflect.Type,
+		data interface{},
+	) (interface{}, error) {
+		if f.Kind() != reflect.String {
+			return data, nil
+		}
+		if t != reflect.TypeOf(time.Time{}) {
+			return data, nil
+		}
+
+		// Convert it by parsing
+		return time.Parse(layout, data.(string))
+	}
+}
+
+// WeaklyTypedHook is a DecodeHookFunc which adds support for weak typing to
+// the decoder.
+//
+// Note that this is significantly different from the WeaklyTypedInput option
+// of the DecoderConfig.
+func WeaklyTypedHook(
+	f reflect.Kind,
+	t reflect.Kind,
+	data interface{},
+) (interface{}, error) {
+	dataVal := reflect.ValueOf(data)
+	switch t {
+	case reflect.String:
+		switch f {
+		case reflect.Bool:
+			if dataVal.Bool() {
+				return "1", nil
+			}
+			return "0", nil
+		case reflect.Float32:
+			return strconv.FormatFloat(dataVal.Float(), 'f', -1, 64), nil
+		case reflect.Int:
+			return strconv.FormatInt(dataVal.Int(), 10), nil
+		case reflect.Slice:
+			dataType := dataVal.Type()
+			elemKind := dataType.Elem().Kind()
+			if elemKind == reflect.Uint8 {
+				return string(dataVal.Interface().([]uint8)), nil
+			}
+		case reflect.Uint:
+			return strconv.FormatUint(dataVal.Uint(), 10), nil
+		}
+	}
+
+	return data, nil
+}
+
+func RecursiveStructToMapHookFunc() DecodeHookFunc {
+	return func(f reflect.Value, t reflect.Value) (interface{}, error) {
+		if f.Kind() != reflect.Struct {
+			return f.Interface(), nil
+		}
+
+		var i interface{} = struct{}{}
+		if t.Type() != reflect.TypeOf(&i).Elem() {
+			return f.Interface(), nil
+		}
+
+		m := make(map[string]interface{})
+		t.Set(reflect.ValueOf(m))
+
+		return f.Interface(), nil
+	}
+}
+
+// TextUnmarshallerHookFunc returns a DecodeHookFunc that applies
+// strings to the UnmarshalText function, when the target type
+// implements the encoding.TextUnmarshaler interface
+func TextUnmarshallerHookFunc() DecodeHookFuncType {
+	return func(
+		f reflect.Type,
+		t reflect.Type,
+		data interface{},
+	) (interface{}, error) {
+		if f.Kind() != reflect.String {
+			return data, nil
+		}
+		result := reflect.New(t).Interface()
+		unmarshaller, ok := result.(encoding.TextUnmarshaler)
+		if !ok {
+			return data, nil
+		}
+		str, ok := data.(string)
+		if !ok {
+			str = reflect.Indirect(reflect.ValueOf(&data)).Elem().String()
+		}
+		if err := unmarshaller.UnmarshalText([]byte(str)); err != nil {
+			return nil, err
+		}
+		return result, nil
+	}
+}
+
+// StringToNetIPAddrHookFunc returns a DecodeHookFunc that converts
+// strings to netip.Addr.
+func StringToNetIPAddrHookFunc() DecodeHookFunc {
+	return func(
+		f reflect.Type,
+		t reflect.Type,
+		data interface{},
+	) (interface{}, error) {
+		if f.Kind() != reflect.String {
+			return data, nil
+		}
+		if t != reflect.TypeOf(netip.Addr{}) {
+			return data, nil
+		}
+
+		// Convert it by parsing
+		return netip.ParseAddr(data.(string))
+	}
+}
+
+// StringToNetIPAddrPortHookFunc returns a DecodeHookFunc that converts
+// strings to netip.AddrPort.
+func StringToNetIPAddrPortHookFunc() DecodeHookFunc {
+	return func(
+		f reflect.Type,
+		t reflect.Type,
+		data interface{},
+	) (interface{}, error) {
+		if f.Kind() != reflect.String {
+			return data, nil
+		}
+		if t != reflect.TypeOf(netip.AddrPort{}) {
+			return data, nil
+		}
+
+		// Convert it by parsing
+		return netip.ParseAddrPort(data.(string))
+	}
+}
diff --git a/vendor/github.com/go-viper/mapstructure/v2/error.go b/vendor/github.com/go-viper/mapstructure/v2/error.go
new file mode 100644
index 0000000000000000000000000000000000000000..47a99e5af3f1b700db374eca24b48d9d8fc21647
--- /dev/null
+++ b/vendor/github.com/go-viper/mapstructure/v2/error.go
@@ -0,0 +1,50 @@
+package mapstructure
+
+import (
+	"errors"
+	"fmt"
+	"sort"
+	"strings"
+)
+
+// Error implements the error interface and can represents multiple
+// errors that occur in the course of a single decode.
+type Error struct {
+	Errors []string
+}
+
+func (e *Error) Error() string {
+	points := make([]string, len(e.Errors))
+	for i, err := range e.Errors {
+		points[i] = fmt.Sprintf("* %s", err)
+	}
+
+	sort.Strings(points)
+	return fmt.Sprintf(
+		"%d error(s) decoding:\n\n%s",
+		len(e.Errors), strings.Join(points, "\n"))
+}
+
+// WrappedErrors implements the errwrap.Wrapper interface to make this
+// return value more useful with the errwrap and go-multierror libraries.
+func (e *Error) WrappedErrors() []error {
+	if e == nil {
+		return nil
+	}
+
+	result := make([]error, len(e.Errors))
+	for i, e := range e.Errors {
+		result[i] = errors.New(e)
+	}
+
+	return result
+}
+
+func appendErrors(errors []string, err error) []string {
+	switch e := err.(type) {
+	case *Error:
+		return append(errors, e.Errors...)
+	default:
+		return append(errors, e.Error())
+	}
+}
diff --git a/vendor/github.com/go-viper/mapstructure/v2/flake.lock b/vendor/github.com/go-viper/mapstructure/v2/flake.lock
new file mode 100644
index 0000000000000000000000000000000000000000..5a387d32994a78fadd5a7e7f1a295fd356efec04
--- /dev/null
+++ b/vendor/github.com/go-viper/mapstructure/v2/flake.lock
@@ -0,0 +1,273 @@
+{
+  "nodes": {
+    "devenv": {
+      "inputs": {
+        "flake-compat": "flake-compat",
+        "nix": "nix",
+        "nixpkgs": "nixpkgs",
+        "pre-commit-hooks": "pre-commit-hooks"
+      },
+      "locked": {
+        "lastModified": 1702549996,
+        "narHash": "sha256-mEN+8gjWUXRxBCcixeth+jlDNuzxbpFwZNOEc4K22vw=",
+        "owner": "cachix",
+        "repo": "devenv",
+        "rev": "e681a99ffe2d2882f413a5d771129223c838ddce",
+        "type": "github"
+      },
+      "original": {
+        "owner": "cachix",
+        "repo": "devenv",
+        "type": "github"
+      }
+    },
+    "flake-compat": {
+      "flake": false,
+      "locked": {
+        "lastModified": 1673956053,
+        "narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=",
+        "owner": "edolstra",
+        "repo": "flake-compat",
+        "rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9",
+        "type": "github"
+      },
+      "original": {
+        "owner": "edolstra",
+        "repo": "flake-compat",
+        "type": "github"
+      }
+    },
+    "flake-parts": {
+      "inputs": {
+        "nixpkgs-lib": "nixpkgs-lib"
+      },
+      "locked": {
+        "lastModified": 1701473968,
+        "narHash": "sha256-YcVE5emp1qQ8ieHUnxt1wCZCC3ZfAS+SRRWZ2TMda7E=",
+        "owner": "hercules-ci",
+        "repo": "flake-parts",
+        "rev": "34fed993f1674c8d06d58b37ce1e0fe5eebcb9f5",
+        "type": "github"
+      },
+      "original": {
+        "owner": "hercules-ci",
+        "repo": "flake-parts",
+        "type": "github"
+      }
+    },
+    "flake-utils": {
+      "inputs": {
+        "systems": "systems"
+      },
+      "locked": {
+        "lastModified": 1685518550,
+        "narHash": "sha256-o2d0KcvaXzTrPRIo0kOLV0/QXHhDQ5DTi+OxcjO8xqY=",
+        "owner": "numtide",
+        "repo": "flake-utils",
+        "rev": "a1720a10a6cfe8234c0e93907ffe81be440f4cef",
+        "type": "github"
+      },
+      "original": {
+        "owner": "numtide",
+        "repo": "flake-utils",
+        "type": "github"
+      }
+    },
+    "gitignore": {
+      "inputs": {
+        "nixpkgs": [
+          "devenv",
+          "pre-commit-hooks",
+          "nixpkgs"
+        ]
+      },
+      "locked": {
+        "lastModified": 1660459072,
+        "narHash": "sha256-8DFJjXG8zqoONA1vXtgeKXy68KdJL5UaXR8NtVMUbx8=",
+        "owner": "hercules-ci",
+        "repo": "gitignore.nix",
+        "rev": "a20de23b925fd8264fd7fad6454652e142fd7f73",
+        "type": "github"
+      },
+      "original": {
+        "owner": "hercules-ci",
+        "repo": "gitignore.nix",
+        "type": "github"
+      }
+    },
+    "lowdown-src": {
+      "flake": false,
+      "locked": {
+        "lastModified": 1633514407,
+        "narHash": "sha256-Dw32tiMjdK9t3ETl5fzGrutQTzh2rufgZV4A/BbxuD4=",
+        "owner": "kristapsdz",
+        "repo": "lowdown",
+        "rev": "d2c2b44ff6c27b936ec27358a2653caaef8f73b8",
+        "type": "github"
+      },
+      "original": {
+        "owner": "kristapsdz",
+        "repo": "lowdown",
+        "type": "github"
+      }
+    },
+    "nix": {
+      "inputs": {
+        "lowdown-src": "lowdown-src",
+        "nixpkgs": [
+          "devenv",
+          "nixpkgs"
+        ],
+        "nixpkgs-regression": "nixpkgs-regression"
+      },
+      "locked": {
+        "lastModified": 1676545802,
+        "narHash": "sha256-EK4rZ+Hd5hsvXnzSzk2ikhStJnD63odF7SzsQ8CuSPU=",
+        "owner": "domenkozar",
+        "repo": "nix",
+        "rev": "7c91803598ffbcfe4a55c44ac6d49b2cf07a527f",
+        "type": "github"
+      },
+      "original": {
+        "owner": "domenkozar",
+        "ref": "relaxed-flakes",
+        "repo": "nix",
+        "type": "github"
+      }
+    },
+    "nixpkgs": {
+      "locked": {
+        "lastModified": 1678875422,
+        "narHash": "sha256-T3o6NcQPwXjxJMn2shz86Chch4ljXgZn746c2caGxd8=",
+        "owner": "NixOS",
+        "repo": "nixpkgs",
+        "rev": "126f49a01de5b7e35a43fd43f891ecf6d3a51459",
+        "type": "github"
+      },
+      "original": {
+        "owner": "NixOS",
+        "ref": "nixpkgs-unstable",
+        "repo": "nixpkgs",
+        "type": "github"
+      }
+    },
+    "nixpkgs-lib": {
+      "locked": {
+        "dir": "lib",
+        "lastModified": 1701253981,
+        "narHash": "sha256-ztaDIyZ7HrTAfEEUt9AtTDNoCYxUdSd6NrRHaYOIxtk=",
+        "owner": "NixOS",
+        "repo": "nixpkgs",
+        "rev": "e92039b55bcd58469325ded85d4f58dd5a4eaf58",
+        "type": "github"
+      },
+      "original": {
+        "dir": "lib",
+        "owner": "NixOS",
+        "ref": "nixos-unstable",
+        "repo": "nixpkgs",
+        "type": "github"
+      }
+    },
+    "nixpkgs-regression": {
+      "locked": {
+        "lastModified": 1643052045,
+        "narHash": "sha256-uGJ0VXIhWKGXxkeNnq4TvV3CIOkUJ3PAoLZ3HMzNVMw=",
+        "owner": "NixOS",
+        "repo": "nixpkgs",
+        "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2",
+        "type": "github"
+      },
+      "original": {
+        "owner": "NixOS",
+        "repo": "nixpkgs",
+        "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2",
+        "type": "github"
+      }
+    },
+    "nixpkgs-stable": {
+      "locked": {
+        "lastModified": 1685801374,
+        "narHash": "sha256-otaSUoFEMM+LjBI1XL/xGB5ao6IwnZOXc47qhIgJe8U=",
+        "owner": "NixOS",
+        "repo": "nixpkgs",
+        "rev": "c37ca420157f4abc31e26f436c1145f8951ff373",
+        "type": "github"
+      },
+      "original": {
+        "owner": "NixOS",
+        "ref": "nixos-23.05",
+        "repo": "nixpkgs",
+        "type": "github"
+      }
+    },
+    "nixpkgs_2": {
+      "locked": {
+        "lastModified": 1702539185,
+        "narHash": "sha256-KnIRG5NMdLIpEkZTnN5zovNYc0hhXjAgv6pfd5Z4c7U=",
+        "owner": "NixOS",
+        "repo": "nixpkgs",
+        "rev": "aa9d4729cbc99dabacb50e3994dcefb3ea0f7447",
+        "type": "github"
+      },
+      "original": {
+        "owner": "NixOS",
+        "ref": "nixpkgs-unstable",
+        "repo": "nixpkgs",
+        "type": "github"
+      }
+    },
+    "pre-commit-hooks": {
+      "inputs": {
+        "flake-compat": [
+          "devenv",
+          "flake-compat"
+        ],
+        "flake-utils": "flake-utils",
+        "gitignore": "gitignore",
+        "nixpkgs": [
+          "devenv",
+          "nixpkgs"
+        ],
+        "nixpkgs-stable": "nixpkgs-stable"
+      },
+      "locked": {
+        "lastModified": 1688056373,
+        "narHash": "sha256-2+SDlNRTKsgo3LBRiMUcoEUb6sDViRNQhzJquZ4koOI=",
+        "owner": "cachix",
+        "repo": "pre-commit-hooks.nix",
+        "rev": "5843cf069272d92b60c3ed9e55b7a8989c01d4c7",
+        "type": "github"
+      },
+      "original": {
+        "owner": "cachix",
+        "repo": "pre-commit-hooks.nix",
+        "type": "github"
+      }
+    },
+    "root": {
+      "inputs": {
+        "devenv": "devenv",
+        "flake-parts": "flake-parts",
+        "nixpkgs": "nixpkgs_2"
+      }
+    },
+    "systems": {
+      "locked": {
+        "lastModified": 1681028828,
+        "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
+        "owner": "nix-systems",
+        "repo": "default",
+        "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
+        "type": "github"
+      },
+      "original": {
+        "owner": "nix-systems",
+        "repo": "default",
+        "type": "github"
+      }
+    }
+  },
+  "root": "root",
+  "version": 7
+}
diff --git a/vendor/github.com/go-viper/mapstructure/v2/flake.nix b/vendor/github.com/go-viper/mapstructure/v2/flake.nix
new file mode 100644
index 0000000000000000000000000000000000000000..4ed0f53311bf025e42436f41c2feffa569a02f0f
--- /dev/null
+++ b/vendor/github.com/go-viper/mapstructure/v2/flake.nix
@@ -0,0 +1,39 @@
+{
+  inputs = {
+    nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable";
+    flake-parts.url = "github:hercules-ci/flake-parts";
+    devenv.url = "github:cachix/devenv";
+  };
+
+  outputs = inputs@{ flake-parts, ... }:
+    flake-parts.lib.mkFlake { inherit inputs; } {
+      imports = [
+        inputs.devenv.flakeModule
+      ];
+
+      systems = [ "x86_64-linux" "x86_64-darwin" "aarch64-darwin" ];
+
+      perSystem = { config, self', inputs', pkgs, system, ... }: rec {
+        devenv.shells = {
+          default = {
+            languages = {
+              go.enable = true;
+            };
+
+            pre-commit.hooks = {
+              nixpkgs-fmt.enable = true;
+            };
+
+            packages = with pkgs; [
+              golangci-lint
+            ];
+
+            # https://github.com/cachix/devenv/issues/528#issuecomment-1556108767
+            containers = pkgs.lib.mkForce { };
+          };
+
+          ci = devenv.shells.default;
+        };
+      };
+    };
+}
diff --git a/vendor/github.com/go-viper/mapstructure/v2/mapstructure.go b/vendor/github.com/go-viper/mapstructure/v2/mapstructure.go
new file mode 100644
index 0000000000000000000000000000000000000000..27f21bc7219203895df3cb5ef0491209a82ba0cd
--- /dev/null
+++ b/vendor/github.com/go-viper/mapstructure/v2/mapstructure.go
@@ -0,0 +1,1562 @@
+// Package mapstructure exposes functionality to convert one arbitrary
+// Go type into another, typically to convert a map[string]interface{}
+// into a native Go structure.
+//
+// The Go structure can be arbitrarily complex, containing slices,
+// other structs, etc. and the decoder will properly decode nested
+// maps and so on into the proper structures in the native Go struct.
+// See the examples to see what the decoder is capable of.
+//
+// The simplest function to start with is Decode.
+//
+// # Field Tags
+//
+// When decoding to a struct, mapstructure will use the field name by
+// default to perform the mapping. For example, if a struct has a field
+// "Username" then mapstructure will look for a key in the source value
+// of "username" (case insensitive).
+//
+//	type User struct {
+//	    Username string
+//	}
+//
+// You can change the behavior of mapstructure by using struct tags.
+// The default struct tag that mapstructure looks for is "mapstructure"
+// but you can customize it using DecoderConfig.
+//
+// # Renaming Fields
+//
+// To rename the key that mapstructure looks for, use the "mapstructure"
+// tag and set a value directly. For example, to change the "username" example
+// above to "user":
+//
+//	type User struct {
+//	    Username string `mapstructure:"user"`
+//	}
+//
+// # Embedded Structs and Squashing
+//
+// Embedded structs are treated as if they're another field with that name.
+// By default, the two structs below are equivalent when decoding with
+// mapstructure:
+//
+//	type Person struct {
+//	    Name string
+//	}
+//
+//	type Friend struct {
+//	    Person
+//	}
+//
+//	type Friend struct {
+//	    Person Person
+//	}
+//
+// This would require an input that looks like below:
+//
+//	map[string]interface{}{
+//	    "person": map[string]interface{}{"name": "alice"},
+//	}
+//
+// If your "person" value is NOT nested, then you can append ",squash" to
+// your tag value and mapstructure will treat it as if the embedded struct
+// were part of the struct directly. Example:
+//
+//	type Friend struct {
+//	    Person `mapstructure:",squash"`
+//	}
+//
+// Now the following input would be accepted:
+//
+//	map[string]interface{}{
+//	    "name": "alice",
+//	}
+//
+// When decoding from a struct to a map, the squash tag squashes the struct
+// fields into a single map. Using the example structs from above:
+//
+//	Friend{Person: Person{Name: "alice"}}
+//
+// Will be decoded into a map:
+//
+//	map[string]interface{}{
+//	    "name": "alice",
+//	}
+//
+// DecoderConfig has a field that changes the behavior of mapstructure
+// to always squash embedded structs.
+//
+// # Remainder Values
+//
+// If there are any unmapped keys in the source value, mapstructure by
+// default will silently ignore them. You can error by setting ErrorUnused
+// in DecoderConfig. If you're using Metadata you can also maintain a slice
+// of the unused keys.
+//
+// You can also use the ",remain" suffix on your tag to collect all unused
+// values in a map. The field with this tag MUST be a map type and should
+// probably be a "map[string]interface{}" or "map[interface{}]interface{}".
+// See example below:
+//
+//	type Friend struct {
+//	    Name  string
+//	    Other map[string]interface{} `mapstructure:",remain"`
+//	}
+//
+// Given the input below, Other would be populated with the other
+// values that weren't used (everything but "name"):
+//
+//	map[string]interface{}{
+//	    "name":    "bob",
+//	    "address": "123 Maple St.",
+//	}
+//
+// # Omit Empty Values
+//
+// When decoding from a struct to any other value, you may use the
+// ",omitempty" suffix on your tag to omit that value if it equates to
+// the zero value. The zero value of all types is specified in the Go
+// specification.
+//
+// For example, the zero type of a numeric type is zero ("0"). If the struct
+// field value is zero and a numeric type, the field is empty, and it won't
+// be encoded into the destination type.
+//
+//	type Source struct {
+//	    Age int `mapstructure:",omitempty"`
+//	}
+//
+// # Unexported fields
+//
+// Since unexported (private) struct fields cannot be set outside the package
+// where they are defined, the decoder will simply skip them.
+//
+// For this output type definition:
+//
+//	type Exported struct {
+//	    private string // this unexported field will be skipped
+//	    Public string
+//	}
+//
+// Using this map as input:
+//
+//	map[string]interface{}{
+//	    "private": "I will be ignored",
+//	    "Public":  "I made it through!",
+//	}
+//
+// The following struct will be decoded:
+//
+//	type Exported struct {
+//	    private: "" // field is left with an empty string (zero value)
+//	    Public: "I made it through!"
+//	}
+//
+// # Other Configuration
+//
+// mapstructure is highly configurable. See the DecoderConfig struct
+// for other features and options that are supported.
+package mapstructure
+
+import (
+	"encoding/json"
+	"errors"
+	"fmt"
+	"reflect"
+	"sort"
+	"strconv"
+	"strings"
+)
+
+// DecodeHookFunc is the callback function that can be used for
+// data transformations. See "DecodeHook" in the DecoderConfig
+// struct.
+//
+// The type must be one of DecodeHookFuncType, DecodeHookFuncKind, or
+// DecodeHookFuncValue.
+// Values are a superset of Types (Values can return types), and Types are a
+// superset of Kinds (Types can return Kinds) and are generally a richer thing
+// to use, but Kinds are simpler if you only need those.
+//
+// The reason DecodeHookFunc is multi-typed is for backwards compatibility:
+// we started with Kinds and then realized Types were the better solution,
+// but have a promise to not break backwards compat so we now support
+// both.
+type DecodeHookFunc interface{}
+
+// DecodeHookFuncType is a DecodeHookFunc which has complete information about
+// the source and target types.
+type DecodeHookFuncType func(reflect.Type, reflect.Type, interface{}) (interface{}, error)
+
+// DecodeHookFuncKind is a DecodeHookFunc which knows only the Kinds of the
+// source and target types.
+type DecodeHookFuncKind func(reflect.Kind, reflect.Kind, interface{}) (interface{}, error)
+
+// DecodeHookFuncValue is a DecodeHookFunc which has complete access to both the source and target
+// values.
+type DecodeHookFuncValue func(from reflect.Value, to reflect.Value) (interface{}, error)
+
+// DecoderConfig is the configuration that is used to create a new decoder
+// and allows customization of various aspects of decoding.
+type DecoderConfig struct {
+	// DecodeHook, if set, will be called before any decoding and any
+	// type conversion (if WeaklyTypedInput is on). This lets you modify
+	// the values before they're set down onto the resulting struct. The
+	// DecodeHook is called for every map and value in the input. This means
+	// that if a struct has embedded fields with squash tags the decode hook
+	// is called only once with all of the input data, not once for each
+	// embedded struct.
+	//
+	// If an error is returned, the entire decode will fail with that error.
+	DecodeHook DecodeHookFunc
+
+	// If ErrorUnused is true, then it is an error for there to exist
+	// keys in the original map that were unused in the decoding process
+	// (extra keys).
+	ErrorUnused bool
+
+	// If ErrorUnset is true, then it is an error for there to exist
+	// fields in the result that were not set in the decoding process
+	// (extra fields). This only applies to decoding to a struct. This
+	// will affect all nested structs as well.
+	ErrorUnset bool
+
+	// ZeroFields, if set to true, will zero fields before writing them.
+	// For example, a map will be emptied before decoded values are put in
+	// it. If this is false, a map will be merged.
+	ZeroFields bool
+
+	// If WeaklyTypedInput is true, the decoder will make the following
+	// "weak" conversions:
+	//
+	//   - bools to string (true = "1", false = "0")
+	//   - numbers to string (base 10)
+	//   - bools to int/uint (true = 1, false = 0)
+	//   - strings to int/uint (base implied by prefix)
+	//   - int to bool (true if value != 0)
+	//   - string to bool (accepts: 1, t, T, TRUE, true, True, 0, f, F,
+	//     FALSE, false, False. Anything else is an error)
+	//   - empty array = empty map and vice versa
+	//   - negative numbers to overflowed uint values (base 10)
+	//   - slice of maps to a merged map
+	//   - single values are converted to slices if required. Each
+	//     element is weakly decoded. For example: "4" can become []int{4}
+	//     if the target type is an int slice.
+	//
+	WeaklyTypedInput bool
+
+	// Squash will squash embedded structs.  A squash tag may also be
+	// added to an individual struct field using a tag.  For example:
+	//
+	//  type Parent struct {
+	//      Child `mapstructure:",squash"`
+	//  }
+	Squash bool
+
+	// Metadata is the struct that will contain extra metadata about
+	// the decoding. If this is nil, then no metadata will be tracked.
+	Metadata *Metadata
+
+	// Result is a pointer to the struct that will contain the decoded
+	// value.
+	Result interface{}
+
+	// The tag name that mapstructure reads for field names. This
+	// defaults to "mapstructure"
+	TagName string
+
+	// IgnoreUntaggedFields ignores all struct fields without explicit
+	// TagName, comparable to `mapstructure:"-"` as default behaviour.
+	IgnoreUntaggedFields bool
+
+	// MatchName is the function used to match the map key to the struct
+	// field name or tag. Defaults to `strings.EqualFold`. This can be used
+	// to implement case-sensitive tag values, support snake casing, etc.
+	MatchName func(mapKey, fieldName string) bool
+}
+
+// A Decoder takes a raw interface value and turns it into structured
+// data, keeping track of rich error information along the way in case
+// anything goes wrong. Unlike the basic top-level Decode method, you can
+// more finely control how the Decoder behaves using the DecoderConfig
+// structure. The top-level Decode method is just a convenience that sets
+// up the most basic Decoder.
+type Decoder struct {
+	config *DecoderConfig
+}
+
+// Metadata contains information about decoding a structure that
+// is tedious or difficult to get otherwise.
+type Metadata struct {
+	// Keys are the keys of the structure which were successfully decoded
+	Keys []string
+
+	// Unused is a slice of keys that were found in the raw value but
+	// weren't decoded since there was no matching field in the result interface
+	Unused []string
+
+	// Unset is a slice of field names that were found in the result interface
+	// but weren't set in the decoding process since there was no matching value
+	// in the input
+	Unset []string
+}
+
+// Decode takes an input structure and uses reflection to translate it to
+// the output structure. output must be a pointer to a map or struct.
+func Decode(input interface{}, output interface{}) error {
+	config := &DecoderConfig{
+		Metadata: nil,
+		Result:   output,
+	}
+
+	decoder, err := NewDecoder(config)
+	if err != nil {
+		return err
+	}
+
+	return decoder.Decode(input)
+}
+
+// WeakDecode is the same as Decode but is shorthand to enable
+// WeaklyTypedInput. See DecoderConfig for more info.
+func WeakDecode(input, output interface{}) error {
+	config := &DecoderConfig{
+		Metadata:         nil,
+		Result:           output,
+		WeaklyTypedInput: true,
+	}
+
+	decoder, err := NewDecoder(config)
+	if err != nil {
+		return err
+	}
+
+	return decoder.Decode(input)
+}
+
+// DecodeMetadata is the same as Decode, but is shorthand to
+// enable metadata collection. See DecoderConfig for more info.
+func DecodeMetadata(input interface{}, output interface{}, metadata *Metadata) error {
+	config := &DecoderConfig{
+		Metadata: metadata,
+		Result:   output,
+	}
+
+	decoder, err := NewDecoder(config)
+	if err != nil {
+		return err
+	}
+
+	return decoder.Decode(input)
+}
+
+// WeakDecodeMetadata is the same as Decode, but is shorthand to
+// enable both WeaklyTypedInput and metadata collection. See
+// DecoderConfig for more info.
+func WeakDecodeMetadata(input interface{}, output interface{}, metadata *Metadata) error {
+	config := &DecoderConfig{
+		Metadata:         metadata,
+		Result:           output,
+		WeaklyTypedInput: true,
+	}
+
+	decoder, err := NewDecoder(config)
+	if err != nil {
+		return err
+	}
+
+	return decoder.Decode(input)
+}
+
+// NewDecoder returns a new decoder for the given configuration. Once
+// a decoder has been returned, the same configuration must not be used
+// again.
+func NewDecoder(config *DecoderConfig) (*Decoder, error) {
+	val := reflect.ValueOf(config.Result)
+	if val.Kind() != reflect.Ptr {
+		return nil, errors.New("result must be a pointer")
+	}
+
+	val = val.Elem()
+	if !val.CanAddr() {
+		return nil, errors.New("result must be addressable (a pointer)")
+	}
+
+	if config.Metadata != nil {
+		if config.Metadata.Keys == nil {
+			config.Metadata.Keys = make([]string, 0)
+		}
+
+		if config.Metadata.Unused == nil {
+			config.Metadata.Unused = make([]string, 0)
+		}
+
+		if config.Metadata.Unset == nil {
+			config.Metadata.Unset = make([]string, 0)
+		}
+	}
+
+	if config.TagName == "" {
+		config.TagName = "mapstructure"
+	}
+
+	if config.MatchName == nil {
+		config.MatchName = strings.EqualFold
+	}
+
+	result := &Decoder{
+		config: config,
+	}
+
+	return result, nil
+}
+
+// Decode decodes the given raw interface to the target pointer specified
+// by the configuration.
+func (d *Decoder) Decode(input interface{}) error {
+	return d.decode("", input, reflect.ValueOf(d.config.Result).Elem())
+}
+
+// Decodes an unknown data type into a specific reflection value.
+func (d *Decoder) decode(name string, input interface{}, outVal reflect.Value) error {
+	var inputVal reflect.Value
+	if input != nil {
+		inputVal = reflect.ValueOf(input)
+
+		// We need to check here if input is a typed nil. Typed nils won't
+		// match the "input == nil" below so we check that here.
+		if inputVal.Kind() == reflect.Ptr && inputVal.IsNil() {
+			input = nil
+		}
+	}
+
+	if input == nil {
+		// If the data is nil, then we don't set anything, unless ZeroFields is set
+		// to true.
+		if d.config.ZeroFields {
+			outVal.Set(reflect.Zero(outVal.Type()))
+
+			if d.config.Metadata != nil && name != "" {
+				d.config.Metadata.Keys = append(d.config.Metadata.Keys, name)
+			}
+		}
+		return nil
+	}
+
+	if !inputVal.IsValid() {
+		// If the input value is invalid, then we just set the value
+		// to be the zero value.
+		outVal.Set(reflect.Zero(outVal.Type()))
+		if d.config.Metadata != nil && name != "" {
+			d.config.Metadata.Keys = append(d.config.Metadata.Keys, name)
+		}
+		return nil
+	}
+
+	if d.config.DecodeHook != nil {
+		// We have a DecodeHook, so let's pre-process the input.
+		var err error
+		input, err = DecodeHookExec(d.config.DecodeHook, inputVal, outVal)
+		if err != nil {
+			return fmt.Errorf("error decoding '%s': %w", name, err)
+		}
+	}
+
+	var err error
+	outputKind := getKind(outVal)
+	addMetaKey := true
+	switch outputKind {
+	case reflect.Bool:
+		err = d.decodeBool(name, input, outVal)
+	case reflect.Interface:
+		err = d.decodeBasic(name, input, outVal)
+	case reflect.String:
+		err = d.decodeString(name, input, outVal)
+	case reflect.Int:
+		err = d.decodeInt(name, input, outVal)
+	case reflect.Uint:
+		err = d.decodeUint(name, input, outVal)
+	case reflect.Float32:
+		err = d.decodeFloat(name, input, outVal)
+	case reflect.Struct:
+		err = d.decodeStruct(name, input, outVal)
+	case reflect.Map:
+		err = d.decodeMap(name, input, outVal)
+	case reflect.Ptr:
+		addMetaKey, err = d.decodePtr(name, input, outVal)
+	case reflect.Slice:
+		err = d.decodeSlice(name, input, outVal)
+	case reflect.Array:
+		err = d.decodeArray(name, input, outVal)
+	case reflect.Func:
+		err = d.decodeFunc(name, input, outVal)
+	default:
+		// If we reached this point then we weren't able to decode it
+		return fmt.Errorf("%s: unsupported type: %s", name, outputKind)
+	}
+
+	// If we reached here, then we successfully decoded SOMETHING, so
+	// mark the key as used if we're tracking metainput.
+	if addMetaKey && d.config.Metadata != nil && name != "" {
+		d.config.Metadata.Keys = append(d.config.Metadata.Keys, name)
+	}
+
+	return err
+}
+
+// This decodes a basic type (bool, int, string, etc.) and sets the
+// value to "data" of that type.
+func (d *Decoder) decodeBasic(name string, data interface{}, val reflect.Value) error {
+	if val.IsValid() && val.Elem().IsValid() {
+		elem := val.Elem()
+
+		// If we can't address this element, then its not writable. Instead,
+		// we make a copy of the value (which is a pointer and therefore
+		// writable), decode into that, and replace the whole value.
+		copied := false
+		if !elem.CanAddr() {
+			copied = true
+
+			// Make *T
+			copy := reflect.New(elem.Type())
+
+			// *T = elem
+			copy.Elem().Set(elem)
+
+			// Set elem so we decode into it
+			elem = copy
+		}
+
+		// Decode. If we have an error then return. We also return right
+		// away if we're not a copy because that means we decoded directly.
+		if err := d.decode(name, data, elem); err != nil || !copied {
+			return err
+		}
+
+		// If we're a copy, we need to set te final result
+		val.Set(elem.Elem())
+		return nil
+	}
+
+	dataVal := reflect.ValueOf(data)
+
+	// If the input data is a pointer, and the assigned type is the dereference
+	// of that exact pointer, then indirect it so that we can assign it.
+	// Example: *string to string
+	if dataVal.Kind() == reflect.Ptr && dataVal.Type().Elem() == val.Type() {
+		dataVal = reflect.Indirect(dataVal)
+	}
+
+	if !dataVal.IsValid() {
+		dataVal = reflect.Zero(val.Type())
+	}
+
+	dataValType := dataVal.Type()
+	if !dataValType.AssignableTo(val.Type()) {
+		return fmt.Errorf(
+			"'%s' expected type '%s', got '%s'",
+			name, val.Type(), dataValType)
+	}
+
+	val.Set(dataVal)
+	return nil
+}
+
+func (d *Decoder) decodeString(name string, data interface{}, val reflect.Value) error {
+	dataVal := reflect.Indirect(reflect.ValueOf(data))
+	dataKind := getKind(dataVal)
+
+	converted := true
+	switch {
+	case dataKind == reflect.String:
+		val.SetString(dataVal.String())
+	case dataKind == reflect.Bool && d.config.WeaklyTypedInput:
+		if dataVal.Bool() {
+			val.SetString("1")
+		} else {
+			val.SetString("0")
+		}
+	case dataKind == reflect.Int && d.config.WeaklyTypedInput:
+		val.SetString(strconv.FormatInt(dataVal.Int(), 10))
+	case dataKind == reflect.Uint && d.config.WeaklyTypedInput:
+		val.SetString(strconv.FormatUint(dataVal.Uint(), 10))
+	case dataKind == reflect.Float32 && d.config.WeaklyTypedInput:
+		val.SetString(strconv.FormatFloat(dataVal.Float(), 'f', -1, 64))
+	case dataKind == reflect.Slice && d.config.WeaklyTypedInput,
+		dataKind == reflect.Array && d.config.WeaklyTypedInput:
+		dataType := dataVal.Type()
+		elemKind := dataType.Elem().Kind()
+		switch elemKind {
+		case reflect.Uint8:
+			var uints []uint8
+			if dataKind == reflect.Array {
+				uints = make([]uint8, dataVal.Len(), dataVal.Len())
+				for i := range uints {
+					uints[i] = dataVal.Index(i).Interface().(uint8)
+				}
+			} else {
+				uints = dataVal.Interface().([]uint8)
+			}
+			val.SetString(string(uints))
+		default:
+			converted = false
+		}
+	default:
+		converted = false
+	}
+
+	if !converted {
+		return fmt.Errorf(
+			"'%s' expected type '%s', got unconvertible type '%s', value: '%v'",
+			name, val.Type(), dataVal.Type(), data)
+	}
+
+	return nil
+}
+
+func (d *Decoder) decodeInt(name string, data interface{}, val reflect.Value) error {
+	dataVal := reflect.Indirect(reflect.ValueOf(data))
+	dataKind := getKind(dataVal)
+	dataType := dataVal.Type()
+
+	switch {
+	case dataKind == reflect.Int:
+		val.SetInt(dataVal.Int())
+	case dataKind == reflect.Uint:
+		val.SetInt(int64(dataVal.Uint()))
+	case dataKind == reflect.Float32:
+		val.SetInt(int64(dataVal.Float()))
+	case dataKind == reflect.Bool && d.config.WeaklyTypedInput:
+		if dataVal.Bool() {
+			val.SetInt(1)
+		} else {
+			val.SetInt(0)
+		}
+	case dataKind == reflect.String && d.config.WeaklyTypedInput:
+		str := dataVal.String()
+		if str == "" {
+			str = "0"
+		}
+
+		i, err := strconv.ParseInt(str, 0, val.Type().Bits())
+		if err == nil {
+			val.SetInt(i)
+		} else {
+			return fmt.Errorf("cannot parse '%s' as int: %s", name, err)
+		}
+	case dataType.PkgPath() == "encoding/json" && dataType.Name() == "Number":
+		jn := data.(json.Number)
+		i, err := jn.Int64()
+		if err != nil {
+			return fmt.Errorf(
+				"error decoding json.Number into %s: %s", name, err)
+		}
+		val.SetInt(i)
+	default:
+		return fmt.Errorf(
+			"'%s' expected type '%s', got unconvertible type '%s', value: '%v'",
+			name, val.Type(), dataVal.Type(), data)
+	}
+
+	return nil
+}
+
+func (d *Decoder) decodeUint(name string, data interface{}, val reflect.Value) error {
+	dataVal := reflect.Indirect(reflect.ValueOf(data))
+	dataKind := getKind(dataVal)
+	dataType := dataVal.Type()
+
+	switch {
+	case dataKind == reflect.Int:
+		i := dataVal.Int()
+		if i < 0 && !d.config.WeaklyTypedInput {
+			return fmt.Errorf("cannot parse '%s', %d overflows uint",
+				name, i)
+		}
+		val.SetUint(uint64(i))
+	case dataKind == reflect.Uint:
+		val.SetUint(dataVal.Uint())
+	case dataKind == reflect.Float32:
+		f := dataVal.Float()
+		if f < 0 && !d.config.WeaklyTypedInput {
+			return fmt.Errorf("cannot parse '%s', %f overflows uint",
+				name, f)
+		}
+		val.SetUint(uint64(f))
+	case dataKind == reflect.Bool && d.config.WeaklyTypedInput:
+		if dataVal.Bool() {
+			val.SetUint(1)
+		} else {
+			val.SetUint(0)
+		}
+	case dataKind == reflect.String && d.config.WeaklyTypedInput:
+		str := dataVal.String()
+		if str == "" {
+			str = "0"
+		}
+
+		i, err := strconv.ParseUint(str, 0, val.Type().Bits())
+		if err == nil {
+			val.SetUint(i)
+		} else {
+			return fmt.Errorf("cannot parse '%s' as uint: %s", name, err)
+		}
+	case dataType.PkgPath() == "encoding/json" && dataType.Name() == "Number":
+		jn := data.(json.Number)
+		i, err := strconv.ParseUint(string(jn), 0, 64)
+		if err != nil {
+			return fmt.Errorf(
+				"error decoding json.Number into %s: %s", name, err)
+		}
+		val.SetUint(i)
+	default:
+		return fmt.Errorf(
+			"'%s' expected type '%s', got unconvertible type '%s', value: '%v'",
+			name, val.Type(), dataVal.Type(), data)
+	}
+
+	return nil
+}
+
+func (d *Decoder) decodeBool(name string, data interface{}, val reflect.Value) error {
+	dataVal := reflect.Indirect(reflect.ValueOf(data))
+	dataKind := getKind(dataVal)
+
+	switch {
+	case dataKind == reflect.Bool:
+		val.SetBool(dataVal.Bool())
+	case dataKind == reflect.Int && d.config.WeaklyTypedInput:
+		val.SetBool(dataVal.Int() != 0)
+	case dataKind == reflect.Uint && d.config.WeaklyTypedInput:
+		val.SetBool(dataVal.Uint() != 0)
+	case dataKind == reflect.Float32 && d.config.WeaklyTypedInput:
+		val.SetBool(dataVal.Float() != 0)
+	case dataKind == reflect.String && d.config.WeaklyTypedInput:
+		b, err := strconv.ParseBool(dataVal.String())
+		if err == nil {
+			val.SetBool(b)
+		} else if dataVal.String() == "" {
+			val.SetBool(false)
+		} else {
+			return fmt.Errorf("cannot parse '%s' as bool: %s", name, err)
+		}
+	default:
+		return fmt.Errorf(
+			"'%s' expected type '%s', got unconvertible type '%s', value: '%v'",
+			name, val.Type(), dataVal.Type(), data)
+	}
+
+	return nil
+}
+
+func (d *Decoder) decodeFloat(name string, data interface{}, val reflect.Value) error {
+	dataVal := reflect.Indirect(reflect.ValueOf(data))
+	dataKind := getKind(dataVal)
+	dataType := dataVal.Type()
+
+	switch {
+	case dataKind == reflect.Int:
+		val.SetFloat(float64(dataVal.Int()))
+	case dataKind == reflect.Uint:
+		val.SetFloat(float64(dataVal.Uint()))
+	case dataKind == reflect.Float32:
+		val.SetFloat(dataVal.Float())
+	case dataKind == reflect.Bool && d.config.WeaklyTypedInput:
+		if dataVal.Bool() {
+			val.SetFloat(1)
+		} else {
+			val.SetFloat(0)
+		}
+	case dataKind == reflect.String && d.config.WeaklyTypedInput:
+		str := dataVal.String()
+		if str == "" {
+			str = "0"
+		}
+
+		f, err := strconv.ParseFloat(str, val.Type().Bits())
+		if err == nil {
+			val.SetFloat(f)
+		} else {
+			return fmt.Errorf("cannot parse '%s' as float: %s", name, err)
+		}
+	case dataType.PkgPath() == "encoding/json" && dataType.Name() == "Number":
+		jn := data.(json.Number)
+		i, err := jn.Float64()
+		if err != nil {
+			return fmt.Errorf(
+				"error decoding json.Number into %s: %s", name, err)
+		}
+		val.SetFloat(i)
+	default:
+		return fmt.Errorf(
+			"'%s' expected type '%s', got unconvertible type '%s', value: '%v'",
+			name, val.Type(), dataVal.Type(), data)
+	}
+
+	return nil
+}
+
+func (d *Decoder) decodeMap(name string, data interface{}, val reflect.Value) error {
+	valType := val.Type()
+	valKeyType := valType.Key()
+	valElemType := valType.Elem()
+
+	// By default we overwrite keys in the current map
+	valMap := val
+
+	// If the map is nil or we're purposely zeroing fields, make a new map
+	if valMap.IsNil() || d.config.ZeroFields {
+		// Make a new map to hold our result
+		mapType := reflect.MapOf(valKeyType, valElemType)
+		valMap = reflect.MakeMap(mapType)
+	}
+
+	dataVal := reflect.ValueOf(data)
+
+	// Resolve any levels of indirection
+	for dataVal.Kind() == reflect.Pointer {
+		dataVal = reflect.Indirect(dataVal)
+	}
+
+	// Check input type and based on the input type jump to the proper func
+	switch dataVal.Kind() {
+	case reflect.Map:
+		return d.decodeMapFromMap(name, dataVal, val, valMap)
+
+	case reflect.Struct:
+		return d.decodeMapFromStruct(name, dataVal, val, valMap)
+
+	case reflect.Array, reflect.Slice:
+		if d.config.WeaklyTypedInput {
+			return d.decodeMapFromSlice(name, dataVal, val, valMap)
+		}
+
+		fallthrough
+
+	default:
+		return fmt.Errorf("'%s' expected a map, got '%s'", name, dataVal.Kind())
+	}
+}
+
+func (d *Decoder) decodeMapFromSlice(name string, dataVal reflect.Value, val reflect.Value, valMap reflect.Value) error {
+	// Special case for BC reasons (covered by tests)
+	if dataVal.Len() == 0 {
+		val.Set(valMap)
+		return nil
+	}
+
+	for i := 0; i < dataVal.Len(); i++ {
+		err := d.decode(
+			name+"["+strconv.Itoa(i)+"]",
+			dataVal.Index(i).Interface(), val)
+		if err != nil {
+			return err
+		}
+	}
+
+	return nil
+}
+
+func (d *Decoder) decodeMapFromMap(name string, dataVal reflect.Value, val reflect.Value, valMap reflect.Value) error {
+	valType := val.Type()
+	valKeyType := valType.Key()
+	valElemType := valType.Elem()
+
+	// Accumulate errors
+	errors := make([]string, 0)
+
+	// If the input data is empty, then we just match what the input data is.
+	if dataVal.Len() == 0 {
+		if dataVal.IsNil() {
+			if !val.IsNil() {
+				val.Set(dataVal)
+			}
+		} else {
+			// Set to empty allocated value
+			val.Set(valMap)
+		}
+
+		return nil
+	}
+
+	for _, k := range dataVal.MapKeys() {
+		fieldName := name + "[" + k.String() + "]"
+
+		// First decode the key into the proper type
+		currentKey := reflect.Indirect(reflect.New(valKeyType))
+		if err := d.decode(fieldName, k.Interface(), currentKey); err != nil {
+			errors = appendErrors(errors, err)
+			continue
+		}
+
+		// Next decode the data into the proper type
+		v := dataVal.MapIndex(k).Interface()
+		currentVal := reflect.Indirect(reflect.New(valElemType))
+		if err := d.decode(fieldName, v, currentVal); err != nil {
+			errors = appendErrors(errors, err)
+			continue
+		}
+
+		valMap.SetMapIndex(currentKey, currentVal)
+	}
+
+	// Set the built up map to the value
+	val.Set(valMap)
+
+	// If we had errors, return those
+	if len(errors) > 0 {
+		return &Error{errors}
+	}
+
+	return nil
+}
+
+func (d *Decoder) decodeMapFromStruct(name string, dataVal reflect.Value, val reflect.Value, valMap reflect.Value) error {
+	typ := dataVal.Type()
+	for i := 0; i < typ.NumField(); i++ {
+		// Get the StructField first since this is a cheap operation. If the
+		// field is unexported, then ignore it.
+		f := typ.Field(i)
+		if f.PkgPath != "" {
+			continue
+		}
+
+		// Next get the actual value of this field and verify it is assignable
+		// to the map value.
+		v := dataVal.Field(i)
+		if !v.Type().AssignableTo(valMap.Type().Elem()) {
+			return fmt.Errorf("cannot assign type '%s' to map value field of type '%s'", v.Type(), valMap.Type().Elem())
+		}
+
+		tagValue := f.Tag.Get(d.config.TagName)
+		keyName := f.Name
+
+		if tagValue == "" && d.config.IgnoreUntaggedFields {
+			continue
+		}
+
+		// If Squash is set in the config, we squash the field down.
+		squash := d.config.Squash && v.Kind() == reflect.Struct && f.Anonymous
+
+		v = dereferencePtrToStructIfNeeded(v, d.config.TagName)
+
+		// Determine the name of the key in the map
+		if index := strings.Index(tagValue, ","); index != -1 {
+			if tagValue[:index] == "-" {
+				continue
+			}
+			// If "omitempty" is specified in the tag, it ignores empty values.
+			if strings.Index(tagValue[index+1:], "omitempty") != -1 && isEmptyValue(v) {
+				continue
+			}
+
+			// If "squash" is specified in the tag, we squash the field down.
+			squash = squash || strings.Index(tagValue[index+1:], "squash") != -1
+			if squash {
+				// When squashing, the embedded type can be a pointer to a struct.
+				if v.Kind() == reflect.Ptr && v.Elem().Kind() == reflect.Struct {
+					v = v.Elem()
+				}
+
+				// The final type must be a struct
+				if v.Kind() != reflect.Struct {
+					return fmt.Errorf("cannot squash non-struct type '%s'", v.Type())
+				}
+			} else {
+				if strings.Index(tagValue[index+1:], "remain") != -1 {
+					if v.Kind() != reflect.Map {
+						return fmt.Errorf("error remain-tag field with invalid type: '%s'", v.Type())
+					}
+
+					ptr := v.MapRange()
+					for ptr.Next() {
+						valMap.SetMapIndex(ptr.Key(), ptr.Value())
+					}
+					continue
+				}
+			}
+			if keyNameTagValue := tagValue[:index]; keyNameTagValue != "" {
+				keyName = keyNameTagValue
+			}
+		} else if len(tagValue) > 0 {
+			if tagValue == "-" {
+				continue
+			}
+			keyName = tagValue
+		}
+
+		switch v.Kind() {
+		// this is an embedded struct, so handle it differently
+		case reflect.Struct:
+			x := reflect.New(v.Type())
+			x.Elem().Set(v)
+
+			vType := valMap.Type()
+			vKeyType := vType.Key()
+			vElemType := vType.Elem()
+			mType := reflect.MapOf(vKeyType, vElemType)
+			vMap := reflect.MakeMap(mType)
+
+			// Creating a pointer to a map so that other methods can completely
+			// overwrite the map if need be (looking at you decodeMapFromMap). The
+			// indirection allows the underlying map to be settable (CanSet() == true)
+			// where as reflect.MakeMap returns an unsettable map.
+			addrVal := reflect.New(vMap.Type())
+			reflect.Indirect(addrVal).Set(vMap)
+
+			err := d.decode(keyName, x.Interface(), reflect.Indirect(addrVal))
+			if err != nil {
+				return err
+			}
+
+			// the underlying map may have been completely overwritten so pull
+			// it indirectly out of the enclosing value.
+			vMap = reflect.Indirect(addrVal)
+
+			if squash {
+				for _, k := range vMap.MapKeys() {
+					valMap.SetMapIndex(k, vMap.MapIndex(k))
+				}
+			} else {
+				valMap.SetMapIndex(reflect.ValueOf(keyName), vMap)
+			}
+
+		default:
+			valMap.SetMapIndex(reflect.ValueOf(keyName), v)
+		}
+	}
+
+	if val.CanAddr() {
+		val.Set(valMap)
+	}
+
+	return nil
+}
+
+func (d *Decoder) decodePtr(name string, data interface{}, val reflect.Value) (bool, error) {
+	// If the input data is nil, then we want to just set the output
+	// pointer to be nil as well.
+	isNil := data == nil
+	if !isNil {
+		switch v := reflect.Indirect(reflect.ValueOf(data)); v.Kind() {
+		case reflect.Chan,
+			reflect.Func,
+			reflect.Interface,
+			reflect.Map,
+			reflect.Ptr,
+			reflect.Slice:
+			isNil = v.IsNil()
+		}
+	}
+	if isNil {
+		if !val.IsNil() && val.CanSet() {
+			nilValue := reflect.New(val.Type()).Elem()
+			val.Set(nilValue)
+		}
+
+		return true, nil
+	}
+
+	// Create an element of the concrete (non pointer) type and decode
+	// into that. Then set the value of the pointer to this type.
+	valType := val.Type()
+	valElemType := valType.Elem()
+	if val.CanSet() {
+		realVal := val
+		if realVal.IsNil() || d.config.ZeroFields {
+			realVal = reflect.New(valElemType)
+		}
+
+		if err := d.decode(name, data, reflect.Indirect(realVal)); err != nil {
+			return false, err
+		}
+
+		val.Set(realVal)
+	} else {
+		if err := d.decode(name, data, reflect.Indirect(val)); err != nil {
+			return false, err
+		}
+	}
+	return false, nil
+}
+
+func (d *Decoder) decodeFunc(name string, data interface{}, val reflect.Value) error {
+	// Create an element of the concrete (non pointer) type and decode
+	// into that. Then set the value of the pointer to this type.
+	dataVal := reflect.Indirect(reflect.ValueOf(data))
+	if val.Type() != dataVal.Type() {
+		return fmt.Errorf(
+			"'%s' expected type '%s', got unconvertible type '%s', value: '%v'",
+			name, val.Type(), dataVal.Type(), data)
+	}
+	val.Set(dataVal)
+	return nil
+}
+
+func (d *Decoder) decodeSlice(name string, data interface{}, val reflect.Value) error {
+	dataVal := reflect.Indirect(reflect.ValueOf(data))
+	dataValKind := dataVal.Kind()
+	valType := val.Type()
+	valElemType := valType.Elem()
+	sliceType := reflect.SliceOf(valElemType)
+
+	// If we have a non array/slice type then we first attempt to convert.
+	if dataValKind != reflect.Array && dataValKind != reflect.Slice {
+		if d.config.WeaklyTypedInput {
+			switch {
+			// Slice and array we use the normal logic
+			case dataValKind == reflect.Slice, dataValKind == reflect.Array:
+				break
+
+			// Empty maps turn into empty slices
+			case dataValKind == reflect.Map:
+				if dataVal.Len() == 0 {
+					val.Set(reflect.MakeSlice(sliceType, 0, 0))
+					return nil
+				}
+				// Create slice of maps of other sizes
+				return d.decodeSlice(name, []interface{}{data}, val)
+
+			case dataValKind == reflect.String && valElemType.Kind() == reflect.Uint8:
+				return d.decodeSlice(name, []byte(dataVal.String()), val)
+
+			// All other types we try to convert to the slice type
+			// and "lift" it into it. i.e. a string becomes a string slice.
+			default:
+				// Just re-try this function with data as a slice.
+				return d.decodeSlice(name, []interface{}{data}, val)
+			}
+		}
+
+		return fmt.Errorf(
+			"'%s': source data must be an array or slice, got %s", name, dataValKind)
+	}
+
+	// If the input value is nil, then don't allocate since empty != nil
+	if dataValKind != reflect.Array && dataVal.IsNil() {
+		return nil
+	}
+
+	valSlice := val
+	if valSlice.IsNil() || d.config.ZeroFields {
+		// Make a new slice to hold our result, same size as the original data.
+		valSlice = reflect.MakeSlice(sliceType, dataVal.Len(), dataVal.Len())
+	} else if valSlice.Len() > dataVal.Len() {
+		valSlice = valSlice.Slice(0, dataVal.Len())
+	}
+
+	// Accumulate any errors
+	errors := make([]string, 0)
+
+	for i := 0; i < dataVal.Len(); i++ {
+		currentData := dataVal.Index(i).Interface()
+		for valSlice.Len() <= i {
+			valSlice = reflect.Append(valSlice, reflect.Zero(valElemType))
+		}
+		currentField := valSlice.Index(i)
+
+		fieldName := name + "[" + strconv.Itoa(i) + "]"
+		if err := d.decode(fieldName, currentData, currentField); err != nil {
+			errors = appendErrors(errors, err)
+		}
+	}
+
+	// Finally, set the value to the slice we built up
+	val.Set(valSlice)
+
+	// If there were errors, we return those
+	if len(errors) > 0 {
+		return &Error{errors}
+	}
+
+	return nil
+}
+
+func (d *Decoder) decodeArray(name string, data interface{}, val reflect.Value) error {
+	dataVal := reflect.Indirect(reflect.ValueOf(data))
+	dataValKind := dataVal.Kind()
+	valType := val.Type()
+	valElemType := valType.Elem()
+	arrayType := reflect.ArrayOf(valType.Len(), valElemType)
+
+	valArray := val
+
+	if isComparable(valArray) && valArray.Interface() == reflect.Zero(valArray.Type()).Interface() || d.config.ZeroFields {
+		// Check input type
+		if dataValKind != reflect.Array && dataValKind != reflect.Slice {
+			if d.config.WeaklyTypedInput {
+				switch {
+				// Empty maps turn into empty arrays
+				case dataValKind == reflect.Map:
+					if dataVal.Len() == 0 {
+						val.Set(reflect.Zero(arrayType))
+						return nil
+					}
+
+				// All other types we try to convert to the array type
+				// and "lift" it into it. i.e. a string becomes a string array.
+				default:
+					// Just re-try this function with data as a slice.
+					return d.decodeArray(name, []interface{}{data}, val)
+				}
+			}
+
+			return fmt.Errorf(
+				"'%s': source data must be an array or slice, got %s", name, dataValKind)
+
+		}
+		if dataVal.Len() > arrayType.Len() {
+			return fmt.Errorf(
+				"'%s': expected source data to have length less or equal to %d, got %d", name, arrayType.Len(), dataVal.Len())
+		}
+
+		// Make a new array to hold our result, same size as the original data.
+		valArray = reflect.New(arrayType).Elem()
+	}
+
+	// Accumulate any errors
+	errors := make([]string, 0)
+
+	for i := 0; i < dataVal.Len(); i++ {
+		currentData := dataVal.Index(i).Interface()
+		currentField := valArray.Index(i)
+
+		fieldName := name + "[" + strconv.Itoa(i) + "]"
+		if err := d.decode(fieldName, currentData, currentField); err != nil {
+			errors = appendErrors(errors, err)
+		}
+	}
+
+	// Finally, set the value to the array we built up
+	val.Set(valArray)
+
+	// If there were errors, we return those
+	if len(errors) > 0 {
+		return &Error{errors}
+	}
+
+	return nil
+}
+
+func (d *Decoder) decodeStruct(name string, data interface{}, val reflect.Value) error {
+	dataVal := reflect.Indirect(reflect.ValueOf(data))
+
+	// If the type of the value to write to and the data match directly,
+	// then we just set it directly instead of recursing into the structure.
+	if dataVal.Type() == val.Type() {
+		val.Set(dataVal)
+		return nil
+	}
+
+	dataValKind := dataVal.Kind()
+	switch dataValKind {
+	case reflect.Map:
+		return d.decodeStructFromMap(name, dataVal, val)
+
+	case reflect.Struct:
+		// Not the most efficient way to do this but we can optimize later if
+		// we want to. To convert from struct to struct we go to map first
+		// as an intermediary.
+
+		// Make a new map to hold our result
+		mapType := reflect.TypeOf((map[string]interface{})(nil))
+		mval := reflect.MakeMap(mapType)
+
+		// Creating a pointer to a map so that other methods can completely
+		// overwrite the map if need be (looking at you decodeMapFromMap). The
+		// indirection allows the underlying map to be settable (CanSet() == true)
+		// where as reflect.MakeMap returns an unsettable map.
+		addrVal := reflect.New(mval.Type())
+
+		reflect.Indirect(addrVal).Set(mval)
+		if err := d.decodeMapFromStruct(name, dataVal, reflect.Indirect(addrVal), mval); err != nil {
+			return err
+		}
+
+		result := d.decodeStructFromMap(name, reflect.Indirect(addrVal), val)
+		return result
+
+	default:
+		return fmt.Errorf("'%s' expected a map, got '%s'", name, dataVal.Kind())
+	}
+}
+
+func (d *Decoder) decodeStructFromMap(name string, dataVal, val reflect.Value) error {
+	dataValType := dataVal.Type()
+	if kind := dataValType.Key().Kind(); kind != reflect.String && kind != reflect.Interface {
+		return fmt.Errorf(
+			"'%s' needs a map with string keys, has '%s' keys",
+			name, dataValType.Key().Kind())
+	}
+
+	dataValKeys := make(map[reflect.Value]struct{})
+	dataValKeysUnused := make(map[interface{}]struct{})
+	for _, dataValKey := range dataVal.MapKeys() {
+		dataValKeys[dataValKey] = struct{}{}
+		dataValKeysUnused[dataValKey.Interface()] = struct{}{}
+	}
+
+	targetValKeysUnused := make(map[interface{}]struct{})
+	errors := make([]string, 0)
+
+	// This slice will keep track of all the structs we'll be decoding.
+	// There can be more than one struct if there are embedded structs
+	// that are squashed.
+	structs := make([]reflect.Value, 1, 5)
+	structs[0] = val
+
+	// Compile the list of all the fields that we're going to be decoding
+	// from all the structs.
+	type field struct {
+		field reflect.StructField
+		val   reflect.Value
+	}
+
+	// remainField is set to a valid field set with the "remain" tag if
+	// we are keeping track of remaining values.
+	var remainField *field
+
+	fields := []field{}
+	for len(structs) > 0 {
+		structVal := structs[0]
+		structs = structs[1:]
+
+		structType := structVal.Type()
+
+		for i := 0; i < structType.NumField(); i++ {
+			fieldType := structType.Field(i)
+			fieldVal := structVal.Field(i)
+			if fieldVal.Kind() == reflect.Ptr && fieldVal.Elem().Kind() == reflect.Struct {
+				// Handle embedded struct pointers as embedded structs.
+				fieldVal = fieldVal.Elem()
+			}
+
+			// If "squash" is specified in the tag, we squash the field down.
+			squash := d.config.Squash && fieldVal.Kind() == reflect.Struct && fieldType.Anonymous
+			remain := false
+
+			// We always parse the tags cause we're looking for other tags too
+			tagParts := strings.Split(fieldType.Tag.Get(d.config.TagName), ",")
+			for _, tag := range tagParts[1:] {
+				if tag == "squash" {
+					squash = true
+					break
+				}
+
+				if tag == "remain" {
+					remain = true
+					break
+				}
+			}
+
+			if squash {
+				if fieldVal.Kind() != reflect.Struct {
+					errors = appendErrors(errors,
+						fmt.Errorf("%s: unsupported type for squash: %s", fieldType.Name, fieldVal.Kind()))
+				} else {
+					structs = append(structs, fieldVal)
+				}
+				continue
+			}
+
+			// Build our field
+			if remain {
+				remainField = &field{fieldType, fieldVal}
+			} else {
+				// Normal struct field, store it away
+				fields = append(fields, field{fieldType, fieldVal})
+			}
+		}
+	}
+
+	// for fieldType, field := range fields {
+	for _, f := range fields {
+		field, fieldValue := f.field, f.val
+		fieldName := field.Name
+
+		tagValue := field.Tag.Get(d.config.TagName)
+		if tagValue == "" && d.config.IgnoreUntaggedFields {
+			continue
+		}
+		tagValue = strings.SplitN(tagValue, ",", 2)[0]
+		if tagValue != "" {
+			fieldName = tagValue
+		}
+
+		rawMapKey := reflect.ValueOf(fieldName)
+		rawMapVal := dataVal.MapIndex(rawMapKey)
+		if !rawMapVal.IsValid() {
+			// Do a slower search by iterating over each key and
+			// doing case-insensitive search.
+			for dataValKey := range dataValKeys {
+				mK, ok := dataValKey.Interface().(string)
+				if !ok {
+					// Not a string key
+					continue
+				}
+
+				if d.config.MatchName(mK, fieldName) {
+					rawMapKey = dataValKey
+					rawMapVal = dataVal.MapIndex(dataValKey)
+					break
+				}
+			}
+
+			if !rawMapVal.IsValid() {
+				// There was no matching key in the map for the value in
+				// the struct. Remember it for potential errors and metadata.
+				targetValKeysUnused[fieldName] = struct{}{}
+				continue
+			}
+		}
+
+		if !fieldValue.IsValid() {
+			// This should never happen
+			panic("field is not valid")
+		}
+
+		// If we can't set the field, then it is unexported or something,
+		// and we just continue onwards.
+		if !fieldValue.CanSet() {
+			continue
+		}
+
+		// Delete the key we're using from the unused map so we stop tracking
+		delete(dataValKeysUnused, rawMapKey.Interface())
+
+		// If the name is empty string, then we're at the root, and we
+		// don't dot-join the fields.
+		if name != "" {
+			fieldName = name + "." + fieldName
+		}
+
+		if err := d.decode(fieldName, rawMapVal.Interface(), fieldValue); err != nil {
+			errors = appendErrors(errors, err)
+		}
+	}
+
+	// If we have a "remain"-tagged field and we have unused keys then
+	// we put the unused keys directly into the remain field.
+	if remainField != nil && len(dataValKeysUnused) > 0 {
+		// Build a map of only the unused values
+		remain := map[interface{}]interface{}{}
+		for key := range dataValKeysUnused {
+			remain[key] = dataVal.MapIndex(reflect.ValueOf(key)).Interface()
+		}
+
+		// Decode it as-if we were just decoding this map onto our map.
+		if err := d.decodeMap(name, remain, remainField.val); err != nil {
+			errors = appendErrors(errors, err)
+		}
+
+		// Set the map to nil so we have none so that the next check will
+		// not error (ErrorUnused)
+		dataValKeysUnused = nil
+	}
+
+	if d.config.ErrorUnused && len(dataValKeysUnused) > 0 {
+		keys := make([]string, 0, len(dataValKeysUnused))
+		for rawKey := range dataValKeysUnused {
+			keys = append(keys, rawKey.(string))
+		}
+		sort.Strings(keys)
+
+		err := fmt.Errorf("'%s' has invalid keys: %s", name, strings.Join(keys, ", "))
+		errors = appendErrors(errors, err)
+	}
+
+	if d.config.ErrorUnset && len(targetValKeysUnused) > 0 {
+		keys := make([]string, 0, len(targetValKeysUnused))
+		for rawKey := range targetValKeysUnused {
+			keys = append(keys, rawKey.(string))
+		}
+		sort.Strings(keys)
+
+		err := fmt.Errorf("'%s' has unset fields: %s", name, strings.Join(keys, ", "))
+		errors = appendErrors(errors, err)
+	}
+
+	if len(errors) > 0 {
+		return &Error{errors}
+	}
+
+	// Add the unused keys to the list of unused keys if we're tracking metadata
+	if d.config.Metadata != nil {
+		for rawKey := range dataValKeysUnused {
+			key := rawKey.(string)
+			if name != "" {
+				key = name + "." + key
+			}
+
+			d.config.Metadata.Unused = append(d.config.Metadata.Unused, key)
+		}
+		for rawKey := range targetValKeysUnused {
+			key := rawKey.(string)
+			if name != "" {
+				key = name + "." + key
+			}
+
+			d.config.Metadata.Unset = append(d.config.Metadata.Unset, key)
+		}
+	}
+
+	return nil
+}
+
+func isEmptyValue(v reflect.Value) bool {
+	switch getKind(v) {
+	case reflect.Array, reflect.Map, reflect.Slice, reflect.String:
+		return v.Len() == 0
+	case reflect.Bool:
+		return !v.Bool()
+	case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+		return v.Int() == 0
+	case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+		return v.Uint() == 0
+	case reflect.Float32, reflect.Float64:
+		return v.Float() == 0
+	case reflect.Interface, reflect.Ptr:
+		return v.IsNil()
+	}
+	return false
+}
+
+func getKind(val reflect.Value) reflect.Kind {
+	kind := val.Kind()
+
+	switch {
+	case kind >= reflect.Int && kind <= reflect.Int64:
+		return reflect.Int
+	case kind >= reflect.Uint && kind <= reflect.Uint64:
+		return reflect.Uint
+	case kind >= reflect.Float32 && kind <= reflect.Float64:
+		return reflect.Float32
+	default:
+		return kind
+	}
+}
+
+func isStructTypeConvertibleToMap(typ reflect.Type, checkMapstructureTags bool, tagName string) bool {
+	for i := 0; i < typ.NumField(); i++ {
+		f := typ.Field(i)
+		if f.PkgPath == "" && !checkMapstructureTags { // check for unexported fields
+			return true
+		}
+		if checkMapstructureTags && f.Tag.Get(tagName) != "" { // check for mapstructure tags inside
+			return true
+		}
+	}
+	return false
+}
+
+func dereferencePtrToStructIfNeeded(v reflect.Value, tagName string) reflect.Value {
+	if v.Kind() != reflect.Ptr || v.Elem().Kind() != reflect.Struct {
+		return v
+	}
+	deref := v.Elem()
+	derefT := deref.Type()
+	if isStructTypeConvertibleToMap(derefT, true, tagName) {
+		return deref
+	}
+	return v
+}
diff --git a/vendor/github.com/go-viper/mapstructure/v2/reflect_go1_19.go b/vendor/github.com/go-viper/mapstructure/v2/reflect_go1_19.go
new file mode 100644
index 0000000000000000000000000000000000000000..d0913fff6c7db9a811ec24fa0b6f6c685a8b8dfd
--- /dev/null
+++ b/vendor/github.com/go-viper/mapstructure/v2/reflect_go1_19.go
@@ -0,0 +1,44 @@
+//go:build !go1.20
+
+package mapstructure
+
+import "reflect"
+
+func isComparable(v reflect.Value) bool {
+	k := v.Kind()
+	switch k {
+	case reflect.Invalid:
+		return false
+
+	case reflect.Array:
+		switch v.Type().Elem().Kind() {
+		case reflect.Interface, reflect.Array, reflect.Struct:
+			for i := 0; i < v.Type().Len(); i++ {
+				// if !v.Index(i).Comparable() {
+				if !isComparable(v.Index(i)) {
+					return false
+				}
+			}
+			return true
+		}
+		return v.Type().Comparable()
+
+	case reflect.Interface:
+		// return v.Elem().Comparable()
+		return isComparable(v.Elem())
+
+	case reflect.Struct:
+		for i := 0; i < v.NumField(); i++ {
+			return false
+
+			// if !v.Field(i).Comparable() {
+			if !isComparable(v.Field(i)) {
+				return false
+			}
+		}
+		return true
+
+	default:
+		return v.Type().Comparable()
+	}
+}
diff --git a/vendor/github.com/go-viper/mapstructure/v2/reflect_go1_20.go b/vendor/github.com/go-viper/mapstructure/v2/reflect_go1_20.go
new file mode 100644
index 0000000000000000000000000000000000000000..f8255a1b174b790025394aa0a65a9d76070a20d6
--- /dev/null
+++ b/vendor/github.com/go-viper/mapstructure/v2/reflect_go1_20.go
@@ -0,0 +1,10 @@
+//go:build go1.20
+
+package mapstructure
+
+import "reflect"
+
+// TODO: remove once we drop support for Go <1.20
+func isComparable(v reflect.Value) bool {
+	return v.Comparable()
+}
diff --git a/vendor/github.com/golangci/check/cmd/structcheck/structcheck.go b/vendor/github.com/golangci/check/cmd/structcheck/structcheck.go
deleted file mode 100644
index 5dc5f838040b84eab429e97efa551e0448bbd1f6..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/check/cmd/structcheck/structcheck.go
+++ /dev/null
@@ -1,193 +0,0 @@
-// structcheck
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-//
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-// GNU General Public License for more details.
-//
-// You should have received a copy of the GNU General Public License
-// along with this program.  If not, see <http://www.gnu.org/licenses/>.
-
-package structcheck
-
-import (
-	"flag"
-	"fmt"
-	"go/ast"
-	"go/token"
-	"go/types"
-
-	"golang.org/x/tools/go/loader"
-)
-
-var (
-	assignmentsOnly = flag.Bool("structcheck.a", false, "Count assignments only")
-	loadTestFiles   = flag.Bool("structcheck.t", false, "Load test files too")
-	buildTags       = flag.String("structcheck.tags", "", "Build tags")
-)
-
-type visitor struct {
-	prog *loader.Program
-	pkg  *loader.PackageInfo
-	m    map[types.Type]map[string]int
-	skip map[types.Type]struct{}
-}
-
-func (v *visitor) decl(t types.Type, fieldName string) {
-	if _, ok := v.m[t]; !ok {
-		v.m[t] = make(map[string]int)
-	}
-	if _, ok := v.m[t][fieldName]; !ok {
-		v.m[t][fieldName] = 0
-	}
-}
-
-func (v *visitor) assignment(t types.Type, fieldName string) {
-	if _, ok := v.m[t]; !ok {
-		v.m[t] = make(map[string]int)
-	}
-	if _, ok := v.m[t][fieldName]; ok {
-		v.m[t][fieldName]++
-	} else {
-		v.m[t][fieldName] = 1
-	}
-}
-
-func (v *visitor) typeSpec(node *ast.TypeSpec) {
-	if strukt, ok := node.Type.(*ast.StructType); ok {
-		t := v.pkg.Info.Defs[node.Name].Type()
-		for _, f := range strukt.Fields.List {
-			if len(f.Names) > 0 {
-				fieldName := f.Names[0].Name
-				v.decl(t, fieldName)
-			}
-		}
-	}
-}
-
-func (v *visitor) typeAndFieldName(expr *ast.SelectorExpr) (types.Type, string, bool) {
-	selection := v.pkg.Info.Selections[expr]
-	if selection == nil {
-		return nil, "", false
-	}
-	recv := selection.Recv()
-	if ptr, ok := recv.(*types.Pointer); ok {
-		recv = ptr.Elem()
-	}
-	return recv, selection.Obj().Name(), true
-}
-
-func (v *visitor) assignStmt(node *ast.AssignStmt) {
-	for _, lhs := range node.Lhs {
-		var selector *ast.SelectorExpr
-		switch expr := lhs.(type) {
-		case *ast.SelectorExpr:
-			selector = expr
-		case *ast.IndexExpr:
-			if expr, ok := expr.X.(*ast.SelectorExpr); ok {
-				selector = expr
-			}
-		}
-		if selector != nil {
-			if t, fn, ok := v.typeAndFieldName(selector); ok {
-				v.assignment(t, fn)
-			}
-		}
-	}
-}
-
-func (v *visitor) compositeLiteral(node *ast.CompositeLit) {
-	t := v.pkg.Info.Types[node.Type].Type
-	for _, expr := range node.Elts {
-		if kv, ok := expr.(*ast.KeyValueExpr); ok {
-			if ident, ok := kv.Key.(*ast.Ident); ok {
-				v.assignment(t, ident.Name)
-			}
-		} else {
-			// Struct literal with positional values.
-			// All the fields are assigned.
-			v.skip[t] = struct{}{}
-			break
-		}
-	}
-}
-
-func (v *visitor) Visit(node ast.Node) ast.Visitor {
-	switch node := node.(type) {
-	case *ast.TypeSpec:
-		v.typeSpec(node)
-
-	case *ast.AssignStmt:
-		if *assignmentsOnly {
-			v.assignStmt(node)
-		}
-
-	case *ast.SelectorExpr:
-		if !*assignmentsOnly {
-			if t, fn, ok := v.typeAndFieldName(node); ok {
-				v.assignment(t, fn)
-			}
-		}
-
-	case *ast.CompositeLit:
-		v.compositeLiteral(node)
-	}
-
-	return v
-}
-
-type Issue struct {
-	Pos       token.Position
-	Type      string
-	FieldName string
-}
-
-func Run(program *loader.Program, reportExported bool) []Issue {
-	var issues []Issue
-	for _, pkg := range program.InitialPackages() {
-		visitor := &visitor{
-			m:    make(map[types.Type]map[string]int),
-			skip: make(map[types.Type]struct{}),
-			prog: program,
-			pkg:  pkg,
-		}
-		for _, f := range pkg.Files {
-			ast.Walk(visitor, f)
-		}
-
-		for t := range visitor.m {
-			if _, skip := visitor.skip[t]; skip {
-				continue
-			}
-			for fieldName, v := range visitor.m[t] {
-				if !reportExported && ast.IsExported(fieldName) {
-					continue
-				}
-				if v == 0 {
-					field, _, _ := types.LookupFieldOrMethod(t, false, pkg.Pkg, fieldName)
-					if field == nil {
-						fmt.Printf("%s: unknown field or method: %s.%s\n", pkg.Pkg.Path(), t, fieldName)
-						continue
-					}
-					if fieldName == "XMLName" {
-						if named, ok := field.Type().(*types.Named); ok && named.Obj().Pkg().Path() == "encoding/xml" {
-							continue
-						}
-					}
-					pos := program.Fset.Position(field.Pos())
-					issues = append(issues, Issue{
-						Pos:       pos,
-						Type:      types.TypeString(t, nil),
-						FieldName: fieldName,
-					})
-				}
-			}
-		}
-	}
-
-	return issues
-}
diff --git a/vendor/github.com/golangci/check/cmd/varcheck/varcheck.go b/vendor/github.com/golangci/check/cmd/varcheck/varcheck.go
deleted file mode 100644
index 8e93e0473b815d7157b9be79d8cee1d339a25c8a..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/check/cmd/varcheck/varcheck.go
+++ /dev/null
@@ -1,163 +0,0 @@
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-//
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-// GNU General Public License for more details.
-//
-// You should have received a copy of the GNU General Public License
-// along with this program.  If not, see <http://www.gnu.org/licenses/>.
-
-package varcheck
-
-import (
-	"flag"
-	"go/ast"
-	"go/token"
-	"strings"
-
-	"go/types"
-
-	"golang.org/x/tools/go/loader"
-)
-
-var (
-	buildTags = flag.String("varcheck.tags", "", "Build tags")
-)
-
-type object struct {
-	pkgPath string
-	name    string
-}
-
-type visitor struct {
-	prog       *loader.Program
-	pkg        *loader.PackageInfo
-	uses       map[object]int
-	positions  map[object]token.Position
-	insideFunc bool
-}
-
-func getKey(obj types.Object) object {
-	if obj == nil {
-		return object{}
-	}
-
-	pkg := obj.Pkg()
-	pkgPath := ""
-	if pkg != nil {
-		pkgPath = pkg.Path()
-	}
-
-	return object{
-		pkgPath: pkgPath,
-		name:    obj.Name(),
-	}
-}
-
-func (v *visitor) decl(obj types.Object) {
-	key := getKey(obj)
-	if _, ok := v.uses[key]; !ok {
-		v.uses[key] = 0
-	}
-	if _, ok := v.positions[key]; !ok {
-		v.positions[key] = v.prog.Fset.Position(obj.Pos())
-	}
-}
-
-func (v *visitor) use(obj types.Object) {
-	key := getKey(obj)
-	if _, ok := v.uses[key]; ok {
-		v.uses[key]++
-	} else {
-		v.uses[key] = 1
-	}
-}
-
-func isReserved(name string) bool {
-	return name == "_" || strings.HasPrefix(strings.ToLower(name), "_cgo_")
-}
-
-func (v *visitor) Visit(node ast.Node) ast.Visitor {
-	switch node := node.(type) {
-	case *ast.Ident:
-		v.use(v.pkg.Info.Uses[node])
-
-	case *ast.ValueSpec:
-		if !v.insideFunc {
-			for _, ident := range node.Names {
-				if !isReserved(ident.Name) {
-					v.decl(v.pkg.Info.Defs[ident])
-				}
-			}
-		}
-		for _, val := range node.Values {
-			ast.Walk(v, val)
-		}
-		if node.Type != nil {
-			ast.Walk(v, node.Type)
-		}
-		return nil
-
-	case *ast.FuncDecl:
-		if node.Body != nil {
-			v.insideFunc = true
-			ast.Walk(v, node.Body)
-			v.insideFunc = false
-		}
-
-		if node.Recv != nil {
-			ast.Walk(v, node.Recv)
-		}
-		if node.Type != nil {
-			ast.Walk(v, node.Type)
-		}
-
-		return nil
-	}
-
-	return v
-}
-
-type Issue struct {
-	Pos     token.Position
-	VarName string
-}
-
-func Run(program *loader.Program, reportExported bool) []Issue {
-	var issues []Issue
-	uses := make(map[object]int)
-	positions := make(map[object]token.Position)
-
-	for _, pkgInfo := range program.InitialPackages() {
-		if pkgInfo.Pkg.Path() == "unsafe" {
-			continue
-		}
-
-		v := &visitor{
-			prog:      program,
-			pkg:       pkgInfo,
-			uses:      uses,
-			positions: positions,
-		}
-
-		for _, f := range v.pkg.Files {
-			ast.Walk(v, f)
-		}
-	}
-
-	for obj, useCount := range uses {
-		if useCount == 0 && (reportExported || !ast.IsExported(obj.name)) {
-			pos := positions[obj]
-			issues = append(issues, Issue{
-				Pos:     pos,
-				VarName: obj.name,
-			})
-		}
-	}
-
-	return issues
-}
diff --git a/vendor/github.com/golangci/go-misc/LICENSE b/vendor/github.com/golangci/go-misc/LICENSE
deleted file mode 100644
index cc42dd45d1b00bd1c390ba3b84094e75e16c1906..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/go-misc/LICENSE
+++ /dev/null
@@ -1,27 +0,0 @@
-Copyright (c) 2012 Rémy Oudompheng. All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
-   * Redistributions of source code must retain the above copyright
-notice, this list of conditions and the following disclaimer.
-   * Redistributions in binary form must reproduce the above
-copyright notice, this list of conditions and the following disclaimer
-in the documentation and/or other materials provided with the
-distribution.
-   * The name of Rémy Oudompheng may not be used to endorse or promote products derived from
-this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
diff --git a/vendor/github.com/golangci/go-misc/deadcode/README.md b/vendor/github.com/golangci/go-misc/deadcode/README.md
deleted file mode 100644
index 5504231281055b46cf0486475b814f6bd90b983e..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/go-misc/deadcode/README.md
+++ /dev/null
@@ -1,18 +0,0 @@
-# deadcode
-
-`deadcode` is a very simple utility which detects unused declarations in a Go package.
-
-## Usage
-```
-deadcode [-test] [packages]
-
-    -test     Include test files
-    packages  A list of packages using the same conventions as the go tool
-```
-
-## Limitations
-
-* Self-referential unused code is not currently reported
-* A single package can be tested at a time
-* Unused methods are not reported
-
diff --git a/vendor/github.com/golangci/go-misc/deadcode/deadcode.go b/vendor/github.com/golangci/go-misc/deadcode/deadcode.go
deleted file mode 100644
index c154a576b3206064d723e8b68b45a753efaba531..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/go-misc/deadcode/deadcode.go
+++ /dev/null
@@ -1,138 +0,0 @@
-package deadcode
-
-import (
-	"fmt"
-	"go/ast"
-	"go/token"
-	"go/types"
-	"os"
-	"path/filepath"
-	"sort"
-	"strings"
-
-	"golang.org/x/tools/go/loader"
-)
-
-var exitCode int
-
-var (
-	withTestFiles bool
-)
-
-type Issue struct {
-	Pos             token.Position
-	UnusedIdentName string
-}
-
-func Run(program *loader.Program) ([]Issue, error) {
-	ctx := &Context{
-		program: program,
-	}
-	report := ctx.Process()
-	var issues []Issue
-	for _, obj := range report {
-		issues = append(issues, Issue{
-			Pos:             program.Fset.Position(obj.Pos()),
-			UnusedIdentName: obj.Name(),
-		})
-	}
-
-	return issues, nil
-}
-
-func fatalf(format string, args ...interface{}) {
-	panic(fmt.Errorf(format, args...))
-}
-
-type Context struct {
-	cwd       string
-	withTests bool
-
-	program *loader.Program
-}
-
-// pos resolves a compact position encoding into a verbose one
-func (ctx *Context) pos(pos token.Pos) token.Position {
-	if ctx.cwd == "" {
-		ctx.cwd, _ = os.Getwd()
-	}
-	p := ctx.program.Fset.Position(pos)
-	f, err := filepath.Rel(ctx.cwd, p.Filename)
-	if err == nil {
-		p.Filename = f
-	}
-	return p
-}
-
-// error formats the error to standard error, adding program
-// identification and a newline
-func (ctx *Context) errorf(pos token.Pos, format string, args ...interface{}) {
-	p := ctx.pos(pos)
-	fmt.Fprintf(os.Stderr, p.String()+": "+format+"\n", args...)
-	exitCode = 2
-}
-
-func (ctx *Context) Load(args ...string) {
-	// TODO
-}
-
-func (ctx *Context) Process() []types.Object {
-	prog := ctx.program
-	var allUnused []types.Object
-	for _, pkg := range prog.Imported {
-		unused := ctx.doPackage(prog, pkg)
-		allUnused = append(allUnused, unused...)
-	}
-	for _, pkg := range prog.Created {
-		unused := ctx.doPackage(prog, pkg)
-		allUnused = append(allUnused, unused...)
-	}
-	sort.Sort(objects(allUnused))
-	return allUnused
-}
-
-func isTestFuncByName(name string) bool {
-	return strings.HasPrefix(name, "Test") ||
-		strings.HasPrefix(name, "Benchmark") ||
-		strings.HasPrefix(name, "Fuzz") ||
-		strings.HasPrefix(name, "Example")
-}
-
-func (ctx *Context) doPackage(prog *loader.Program, pkg *loader.PackageInfo) []types.Object {
-	used := make(map[types.Object]bool)
-	for _, file := range pkg.Files {
-		ast.Inspect(file, func(n ast.Node) bool {
-			id, ok := n.(*ast.Ident)
-			if !ok {
-				return true
-			}
-			obj := pkg.Info.Uses[id]
-			if obj != nil {
-				used[obj] = true
-			}
-			return false
-		})
-	}
-
-	global := pkg.Pkg.Scope()
-	var unused []types.Object
-	for _, name := range global.Names() {
-		if pkg.Pkg.Name() == "main" && name == "main" {
-			continue
-		}
-		obj := global.Lookup(name)
-		_, isSig := obj.Type().(*types.Signature)
-		pos := ctx.pos(obj.Pos())
-		isTestMethod := isSig && isTestFuncByName(obj.Name()) && strings.HasSuffix(pos.Filename, "_test.go")
-		if !used[obj] && ((pkg.Pkg.Name() == "main" && !isTestMethod) || !ast.IsExported(name)) {
-			unused = append(unused, obj)
-		}
-	}
-	return unused
-}
-
-type objects []types.Object
-
-func (s objects) Len() int           { return len(s) }
-func (s objects) Swap(i, j int)      { s[i], s[j] = s[j], s[i] }
-func (s objects) Less(i, j int) bool { return s[i].Pos() < s[j].Pos() }
diff --git a/vendor/github.com/golangci/gofmt/gofmt/doc.go b/vendor/github.com/golangci/gofmt/gofmt/doc.go
index da0c8581ddea46ff22cae9ec246791f33fcadf82..d0a4580219c012c9b6fc717d1c43f4df9e0a566d 100644
--- a/vendor/github.com/golangci/gofmt/gofmt/doc.go
+++ b/vendor/github.com/golangci/gofmt/gofmt/doc.go
@@ -13,9 +13,11 @@ that directory, recursively.  (Files starting with a period are ignored.)
 By default, gofmt prints the reformatted sources to standard output.
 
 Usage:
+
 	gofmt [flags] [path ...]
 
 The flags are:
+
 	-d
 		Do not print reformatted sources to standard output.
 		If a file's formatting is different than gofmt's, print diffs
@@ -37,10 +39,10 @@ The flags are:
 		the original file is restored from an automatic backup.
 
 Debugging support:
+
 	-cpuprofile filename
 		Write cpu profile to the specified file.
 
-
 The rewrite rule specified with the -r flag must be a string of the form:
 
 	pattern -> replacement
@@ -57,7 +59,7 @@ such a fragment, gofmt preserves leading indentation as well as leading
 and trailing spaces, so that individual sections of a Go program can be
 formatted by piping them through gofmt.
 
-Examples
+# Examples
 
 To check files for unnecessary parentheses:
 
@@ -71,7 +73,7 @@ To convert the package tree from explicit slice upper bounds to implicit ones:
 
 	gofmt -r 'α[β:len(α)] -> α[β:]' -w $GOROOT/src
 
-The simplify command
+# The simplify command
 
 When invoked with -s gofmt will make the following source transformations where possible.
 
diff --git a/vendor/github.com/golangci/gofmt/gofmt/gofmt.go b/vendor/github.com/golangci/gofmt/gofmt/gofmt.go
index e7612afae0312fdd836f14ed5ec969cbdcc07abc..be046f34cf93cb59457315dfa494b1ffdf94771b 100644
--- a/vendor/github.com/golangci/gofmt/gofmt/gofmt.go
+++ b/vendor/github.com/golangci/gofmt/gofmt/gofmt.go
@@ -76,6 +76,11 @@ func initParserMode() {
 	if *allErrors {
 		parserMode |= parser.AllErrors
 	}
+	// It's only -r that makes use of go/ast's object resolution,
+	// so avoid the unnecessary work if the flag isn't used.
+	if *rewriteRule == "" {
+		parserMode |= parser.SkipObjectResolution
+	}
 }
 
 func isGoFile(f fs.DirEntry) bool {
@@ -286,12 +291,9 @@ func processFile(filename string, info fs.FileInfo, in io.Reader, r *reporter) e
 			}
 		}
 		if *doDiff {
-			data, err := diffWithReplaceTempFile(src, res, filename)
-			if err != nil {
-				return fmt.Errorf("computing diff: %s", err)
-			}
-			fmt.Fprintf(r, "diff -u %s %s\n", filepath.ToSlash(filename+".orig"), filepath.ToSlash(filename))
-			r.Write(data)
+			newName := filepath.ToSlash(filename)
+			oldName := newName + ".orig"
+			r.Write(diff.Diff(oldName, src, newName, res))
 		}
 	}
 
@@ -350,7 +352,12 @@ func readFile(filename string, info fs.FileInfo, in io.Reader) ([]byte, error) {
 	// stop to avoid corrupting it.)
 	src := make([]byte, size+1)
 	n, err := io.ReadFull(in, src)
-	if err != nil && err != io.ErrUnexpectedEOF {
+	switch err {
+	case nil, io.EOF, io.ErrUnexpectedEOF:
+		// io.ReadFull returns io.EOF (for an empty file) or io.ErrUnexpectedEOF
+		// (for a non-empty file) if the file was changed unexpectedly. Continue
+		// with comparing file sizes in those cases.
+	default:
 		return nil, err
 	}
 	if n < size {
@@ -463,43 +470,6 @@ func fileWeight(path string, info fs.FileInfo) int64 {
 	return info.Size()
 }
 
-func diffWithReplaceTempFile(b1, b2 []byte, filename string) ([]byte, error) {
-	data, err := diff.Diff("gofmt", b1, b2)
-	if len(data) > 0 {
-		return replaceTempFilename(data, filename)
-	}
-	return data, err
-}
-
-// replaceTempFilename replaces temporary filenames in diff with actual one.
-//
-// --- /tmp/gofmt316145376	2017-02-03 19:13:00.280468375 -0500
-// +++ /tmp/gofmt617882815	2017-02-03 19:13:00.280468375 -0500
-// ...
-// ->
-// --- path/to/file.go.orig	2017-02-03 19:13:00.280468375 -0500
-// +++ path/to/file.go	2017-02-03 19:13:00.280468375 -0500
-// ...
-func replaceTempFilename(diff []byte, filename string) ([]byte, error) {
-	bs := bytes.SplitN(diff, []byte{'\n'}, 3)
-	if len(bs) < 3 {
-		return nil, fmt.Errorf("got unexpected diff for %s", filename)
-	}
-	// Preserve timestamps.
-	var t0, t1 []byte
-	if i := bytes.LastIndexByte(bs[0], '\t'); i != -1 {
-		t0 = bs[0][i:]
-	}
-	if i := bytes.LastIndexByte(bs[1], '\t'); i != -1 {
-		t1 = bs[1][i:]
-	}
-	// Always print filepath with slash separator.
-	f := filepath.ToSlash(filename)
-	bs[0] = []byte(fmt.Sprintf("--- %s%s", f+".orig", t0))
-	bs[1] = []byte(fmt.Sprintf("+++ %s%s", f, t1))
-	return bytes.Join(bs, []byte{'\n'}), nil
-}
-
 const chmodSupported = runtime.GOOS != "windows"
 
 // backupFile writes data to a new file named filename<number> with permissions perm,
diff --git a/vendor/github.com/golangci/gofmt/gofmt/golangci.go b/vendor/github.com/golangci/gofmt/gofmt/golangci.go
index c9c3fe2ae5ff9ba7af22c025739e9a749cffdd6c..a69611e1d31fa80103781a23ed6867105afe5a62 100644
--- a/vendor/github.com/golangci/gofmt/gofmt/golangci.go
+++ b/vendor/github.com/golangci/gofmt/gofmt/golangci.go
@@ -8,8 +8,14 @@ import (
 	"go/printer"
 	"go/token"
 	"os"
+	"path/filepath"
+	"sync"
+
+	"github.com/golangci/gofmt/gofmt/internal/diff"
 )
 
+var parserModeMu sync.RWMutex
+
 type RewriteRule struct {
 	Pattern     string
 	Replacement string
@@ -31,7 +37,9 @@ func RunRewrite(filename string, needSimplify bool, rewriteRules []RewriteRule)
 
 	fset := token.NewFileSet()
 
+	parserModeMu.Lock()
 	initParserMode()
+	parserModeMu.Unlock()
 
 	file, sourceAdj, indentAdj, err := parse(fset, filename, src, false)
 	if err != nil {
@@ -59,12 +67,10 @@ func RunRewrite(filename string, needSimplify bool, rewriteRules []RewriteRule)
 	}
 
 	// formatting has changed
-	data, err := diffWithReplaceTempFile(src, res, filename)
-	if err != nil {
-		return nil, fmt.Errorf("error computing diff: %s", err)
-	}
+	newName := filepath.ToSlash(filename)
+	oldName := newName + ".orig"
 
-	return data, nil
+	return diff.Diff(oldName, src, newName, res), nil
 }
 
 func rewriteFileContent(fset *token.FileSet, file *ast.File, rewriteRules []RewriteRule) (*ast.File, error) {
diff --git a/vendor/github.com/golangci/gofmt/gofmt/internal.go b/vendor/github.com/golangci/gofmt/gofmt/internal.go
index 1abbdd69892567e73a06bdf1cfe2d2c6de4d4ab7..31a825bf833a1940a9eeeaf05fc4953196abcf13 100644
--- a/vendor/github.com/golangci/gofmt/gofmt/internal.go
+++ b/vendor/github.com/golangci/gofmt/gofmt/internal.go
@@ -26,6 +26,13 @@ func parse(fset *token.FileSet, filename string, src []byte, fragmentOk bool) (
 	indentAdj int,
 	err error,
 ) {
+
+	// START - Change related to usgae inside golangci-lint
+	parserModeMu.Lock()
+	parserMode := parserMode
+	parserModeMu.Unlock()
+	// END - Change related to usgae inside golangci-lint
+
 	// Try as whole source file.
 	file, err = parser.ParseFile(fset, filename, src, parserMode)
 	// If there's no error, return. If the error is that the source file didn't begin with a
diff --git a/vendor/github.com/golangci/gofmt/gofmt/internal/diff/diff.go b/vendor/github.com/golangci/gofmt/gofmt/internal/diff/diff.go
index cbd0529ec6ce1fd50a403d6455a3a51619582fa3..47b285671454ed9ace89cb7b24568abc47c48243 100644
--- a/vendor/github.com/golangci/gofmt/gofmt/internal/diff/diff.go
+++ b/vendor/github.com/golangci/gofmt/gofmt/internal/diff/diff.go
@@ -1,79 +1,261 @@
-// Copyright 2019 The Go Authors. All rights reserved.
+// Copyright 2022 The Go Authors. All rights reserved.
 // Use of this source code is governed by a BSD-style
 // license that can be found in the LICENSE file.
 
-// Package diff implements a Diff function that compare two inputs
-// using the 'diff' tool.
 package diff
 
 import (
 	"bytes"
-	"io/ioutil"
-	"os"
-	"runtime"
-
-	exec "github.com/golangci/gofmt/gofmt/internal/execabs"
+	"fmt"
+	"sort"
+	"strings"
 )
 
-// Returns diff of two arrays of bytes in diff tool format.
-func Diff(prefix string, b1, b2 []byte) ([]byte, error) {
-	f1, err := writeTempFile(prefix, b1)
-	if err != nil {
-		return nil, err
+// A pair is a pair of values tracked for both the x and y side of a diff.
+// It is typically a pair of line indexes.
+type pair struct{ x, y int }
+
+// Diff returns an anchored diff of the two texts old and new
+// in the “unified diff” format. If old and new are identical,
+// Diff returns a nil slice (no output).
+//
+// Unix diff implementations typically look for a diff with
+// the smallest number of lines inserted and removed,
+// which can in the worst case take time quadratic in the
+// number of lines in the texts. As a result, many implementations
+// either can be made to run for a long time or cut off the search
+// after a predetermined amount of work.
+//
+// In contrast, this implementation looks for a diff with the
+// smallest number of “unique” lines inserted and removed,
+// where unique means a line that appears just once in both old and new.
+// We call this an “anchored diff” because the unique lines anchor
+// the chosen matching regions. An anchored diff is usually clearer
+// than a standard diff, because the algorithm does not try to
+// reuse unrelated blank lines or closing braces.
+// The algorithm also guarantees to run in O(n log n) time
+// instead of the standard O(n²) time.
+//
+// Some systems call this approach a “patience diff,” named for
+// the “patience sorting” algorithm, itself named for a solitaire card game.
+// We avoid that name for two reasons. First, the name has been used
+// for a few different variants of the algorithm, so it is imprecise.
+// Second, the name is frequently interpreted as meaning that you have
+// to wait longer (to be patient) for the diff, meaning that it is a slower algorithm,
+// when in fact the algorithm is faster than the standard one.
+func Diff(oldName string, old []byte, newName string, new []byte) []byte {
+	if bytes.Equal(old, new) {
+		return nil
 	}
-	defer os.Remove(f1)
+	x := lines(old)
+	y := lines(new)
+
+	// Print diff header.
+	var out bytes.Buffer
+	fmt.Fprintf(&out, "diff %s %s\n", oldName, newName)
+	fmt.Fprintf(&out, "--- %s\n", oldName)
+	fmt.Fprintf(&out, "+++ %s\n", newName)
+
+	// Loop over matches to consider,
+	// expanding each match to include surrounding lines,
+	// and then printing diff chunks.
+	// To avoid setup/teardown cases outside the loop,
+	// tgs returns a leading {0,0} and trailing {len(x), len(y)} pair
+	// in the sequence of matches.
+	var (
+		done  pair     // printed up to x[:done.x] and y[:done.y]
+		chunk pair     // start lines of current chunk
+		count pair     // number of lines from each side in current chunk
+		ctext []string // lines for current chunk
+	)
+	for _, m := range tgs(x, y) {
+		if m.x < done.x {
+			// Already handled scanning forward from earlier match.
+			continue
+		}
 
-	f2, err := writeTempFile(prefix, b2)
-	if err != nil {
-		return nil, err
+		// Expand matching lines as far possible,
+		// establishing that x[start.x:end.x] == y[start.y:end.y].
+		// Note that on the first (or last) iteration we may (or definitey do)
+		// have an empty match: start.x==end.x and start.y==end.y.
+		start := m
+		for start.x > done.x && start.y > done.y && x[start.x-1] == y[start.y-1] {
+			start.x--
+			start.y--
+		}
+		end := m
+		for end.x < len(x) && end.y < len(y) && x[end.x] == y[end.y] {
+			end.x++
+			end.y++
+		}
+
+		// Emit the mismatched lines before start into this chunk.
+		// (No effect on first sentinel iteration, when start = {0,0}.)
+		for _, s := range x[done.x:start.x] {
+			ctext = append(ctext, "-"+s)
+			count.x++
+		}
+		for _, s := range y[done.y:start.y] {
+			ctext = append(ctext, "+"+s)
+			count.y++
+		}
+
+		// If we're not at EOF and have too few common lines,
+		// the chunk includes all the common lines and continues.
+		const C = 3 // number of context lines
+		if (end.x < len(x) || end.y < len(y)) &&
+			(end.x-start.x < C || (len(ctext) > 0 && end.x-start.x < 2*C)) {
+			for _, s := range x[start.x:end.x] {
+				ctext = append(ctext, " "+s)
+				count.x++
+				count.y++
+			}
+			done = end
+			continue
+		}
+
+		// End chunk with common lines for context.
+		if len(ctext) > 0 {
+			n := end.x - start.x
+			if n > C {
+				n = C
+			}
+			for _, s := range x[start.x : start.x+n] {
+				ctext = append(ctext, " "+s)
+				count.x++
+				count.y++
+			}
+			done = pair{start.x + n, start.y + n}
+
+			// Format and emit chunk.
+			// Convert line numbers to 1-indexed.
+			// Special case: empty file shows up as 0,0 not 1,0.
+			if count.x > 0 {
+				chunk.x++
+			}
+			if count.y > 0 {
+				chunk.y++
+			}
+			fmt.Fprintf(&out, "@@ -%d,%d +%d,%d @@\n", chunk.x, count.x, chunk.y, count.y)
+			for _, s := range ctext {
+				out.WriteString(s)
+			}
+			count.x = 0
+			count.y = 0
+			ctext = ctext[:0]
+		}
+
+		// If we reached EOF, we're done.
+		if end.x >= len(x) && end.y >= len(y) {
+			break
+		}
+
+		// Otherwise start a new chunk.
+		chunk = pair{end.x - C, end.y - C}
+		for _, s := range x[chunk.x:end.x] {
+			ctext = append(ctext, " "+s)
+			count.x++
+			count.y++
+		}
+		done = end
 	}
-	defer os.Remove(f2)
 
-	cmd := "diff"
-	if runtime.GOOS == "plan9" {
-		cmd = "/bin/ape/diff"
+	return out.Bytes()
+}
+
+// lines returns the lines in the file x, including newlines.
+// If the file does not end in a newline, one is supplied
+// along with a warning about the missing newline.
+func lines(x []byte) []string {
+	l := strings.SplitAfter(string(x), "\n")
+	if l[len(l)-1] == "" {
+		l = l[:len(l)-1]
+	} else {
+		// Treat last line as having a message about the missing newline attached,
+		// using the same text as BSD/GNU diff (including the leading backslash).
+		l[len(l)-1] += "\n\\ No newline at end of file\n"
 	}
+	return l
+}
 
-	data, err := exec.Command(cmd, "-u", f1, f2).CombinedOutput()
-	if len(data) > 0 {
-		// diff exits with a non-zero status when the files don't match.
-		// Ignore that failure as long as we get output.
-		err = nil
+// tgs returns the pairs of indexes of the longest common subsequence
+// of unique lines in x and y, where a unique line is one that appears
+// once in x and once in y.
+//
+// The longest common subsequence algorithm is as described in
+// Thomas G. Szymanski, “A Special Case of the Maximal Common
+// Subsequence Problem,” Princeton TR #170 (January 1975),
+// available at https://research.swtch.com/tgs170.pdf.
+func tgs(x, y []string) []pair {
+	// Count the number of times each string appears in a and b.
+	// We only care about 0, 1, many, counted as 0, -1, -2
+	// for the x side and 0, -4, -8 for the y side.
+	// Using negative numbers now lets us distinguish positive line numbers later.
+	m := make(map[string]int)
+	for _, s := range x {
+		if c := m[s]; c > -2 {
+			m[s] = c - 1
+		}
+	}
+	for _, s := range y {
+		if c := m[s]; c > -8 {
+			m[s] = c - 4
+		}
 	}
 
-	// If we are on Windows and the diff is Cygwin diff,
-	// machines can get into a state where every Cygwin
-	// command works fine but prints a useless message like:
+	// Now unique strings can be identified by m[s] = -1+-4.
 	//
-	//	Cygwin WARNING:
-	//	  Couldn't compute FAST_CWD pointer.  This typically occurs if you're using
-	//	  an older Cygwin version on a newer Windows.  Please update to the latest
-	//	  available Cygwin version from https://cygwin.com/.  If the problem persists,
-	//	  please see https://cygwin.com/problems.html
-	//
-	// Skip over that message and just return the actual diff.
-	if len(data) > 0 && !bytes.HasPrefix(data, []byte("--- ")) {
-		i := bytes.Index(data, []byte("\n--- "))
-		if i >= 0 && i < 80*10 && bytes.Contains(data[:i], []byte("://cygwin.com/")) {
-			data = data[i+1:]
+	// Gather the indexes of those strings in x and y, building:
+	//	xi[i] = increasing indexes of unique strings in x.
+	//	yi[i] = increasing indexes of unique strings in y.
+	//	inv[i] = index j such that x[xi[i]] = y[yi[j]].
+	var xi, yi, inv []int
+	for i, s := range y {
+		if m[s] == -1+-4 {
+			m[s] = len(yi)
+			yi = append(yi, i)
+		}
+	}
+	for i, s := range x {
+		if j, ok := m[s]; ok && j >= 0 {
+			xi = append(xi, i)
+			inv = append(inv, j)
 		}
 	}
 
-	return data, err
-}
-
-func writeTempFile(prefix string, data []byte) (string, error) {
-	file, err := ioutil.TempFile("", prefix)
-	if err != nil {
-		return "", err
+	// Apply Algorithm A from Szymanski's paper.
+	// In those terms, A = J = inv and B = [0, n).
+	// We add sentinel pairs {0,0}, and {len(x),len(y)}
+	// to the returned sequence, to help the processing loop.
+	J := inv
+	n := len(xi)
+	T := make([]int, n)
+	L := make([]int, n)
+	for i := range T {
+		T[i] = n + 1
+	}
+	for i := 0; i < n; i++ {
+		k := sort.Search(n, func(k int) bool {
+			return T[k] >= J[i]
+		})
+		T[k] = J[i]
+		L[i] = k + 1
 	}
-	_, err = file.Write(data)
-	if err1 := file.Close(); err == nil {
-		err = err1
+	k := 0
+	for _, v := range L {
+		if k < v {
+			k = v
+		}
 	}
-	if err != nil {
-		os.Remove(file.Name())
-		return "", err
+	seq := make([]pair, 2+k)
+	seq[1+k] = pair{len(x), len(y)} // sentinel at end
+	lastj := n
+	for i := n - 1; i >= 0; i-- {
+		if L[i] == k && J[i] < lastj {
+			seq[k] = pair{xi[i], yi[J[i]]}
+			k--
+		}
 	}
-	return file.Name(), nil
+	seq[0] = pair{0, 0} // sentinel at start
+	return seq
 }
diff --git a/vendor/github.com/golangci/gofmt/gofmt/internal/execabs/execabs.go b/vendor/github.com/golangci/gofmt/gofmt/internal/execabs/execabs.go
deleted file mode 100644
index 9a05d971dadb9e49f632ba7622f88371e1b3a27d..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/gofmt/gofmt/internal/execabs/execabs.go
+++ /dev/null
@@ -1,70 +0,0 @@
-// Copyright 2021 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package execabs is a drop-in replacement for os/exec
-// that requires PATH lookups to find absolute paths.
-// That is, execabs.Command("cmd") runs the same PATH lookup
-// as exec.Command("cmd"), but if the result is a path
-// which is relative, the Run and Start methods will report
-// an error instead of running the executable.
-package execabs
-
-import (
-	"context"
-	"fmt"
-	"os/exec"
-	"path/filepath"
-	"reflect"
-	"unsafe"
-)
-
-var ErrNotFound = exec.ErrNotFound
-
-type (
-	Cmd       = exec.Cmd
-	Error     = exec.Error
-	ExitError = exec.ExitError
-)
-
-func relError(file, path string) error {
-	return fmt.Errorf("%s resolves to executable relative to current directory (.%c%s)", file, filepath.Separator, path)
-}
-
-func LookPath(file string) (string, error) {
-	path, err := exec.LookPath(file)
-	if err != nil {
-		return "", err
-	}
-	if filepath.Base(file) == file && !filepath.IsAbs(path) {
-		return "", relError(file, path)
-	}
-	return path, nil
-}
-
-func fixCmd(name string, cmd *exec.Cmd) {
-	if filepath.Base(name) == name && !filepath.IsAbs(cmd.Path) {
-		// exec.Command was called with a bare binary name and
-		// exec.LookPath returned a path which is not absolute.
-		// Set cmd.lookPathErr and clear cmd.Path so that it
-		// cannot be run.
-		lookPathErr := (*error)(unsafe.Pointer(reflect.ValueOf(cmd).Elem().FieldByName("lookPathErr").Addr().Pointer()))
-		if *lookPathErr == nil {
-			*lookPathErr = relError(name, cmd.Path)
-		}
-		cmd.Path = ""
-	}
-}
-
-func CommandContext(ctx context.Context, name string, arg ...string) *exec.Cmd {
-	cmd := exec.CommandContext(ctx, name, arg...)
-	fixCmd(name, cmd)
-	return cmd
-
-}
-
-func Command(name string, arg ...string) *exec.Cmd {
-	cmd := exec.Command(name, arg...)
-	fixCmd(name, cmd)
-	return cmd
-}
diff --git a/vendor/github.com/golangci/gofmt/gofmt/readme.md b/vendor/github.com/golangci/gofmt/gofmt/readme.md
index 36a716d81979f8a45e5e877c43e343cc9328350b..c2faaab82d2b684e713e0e63cfb45dfceb161f1f 100644
--- a/vendor/github.com/golangci/gofmt/gofmt/readme.md
+++ b/vendor/github.com/golangci/gofmt/gofmt/readme.md
@@ -1,3 +1,5 @@
 # Hard Fork of gofmt
 
 2022-08-31: Sync with go1.18.5
+2023-10-04: Sync with go1.19.13
+2023-10-04: Sync with go1.20.8
diff --git a/vendor/github.com/golangci/gofmt/gofmt/simplify.go b/vendor/github.com/golangci/gofmt/gofmt/simplify.go
index 2c75495a6951062c429dc99ea5e50a7bb228399c..3b34d562bab5cabccd63c4f8ba52a19658bfd841 100644
--- a/vendor/github.com/golangci/gofmt/gofmt/simplify.go
+++ b/vendor/github.com/golangci/gofmt/gofmt/simplify.go
@@ -53,22 +53,26 @@ func (s simplifier) Visit(node ast.Node) ast.Visitor {
 		// can be simplified to: s[a:]
 		// if s is "simple enough" (for now we only accept identifiers)
 		//
-		// Note: This may not be correct because len may have been redeclared in another
-		//       file belonging to the same package. However, this is extremely unlikely
-		//       and so far (April 2016, after years of supporting this rewrite feature)
+		// Note: This may not be correct because len may have been redeclared in
+		//       the same package. However, this is extremely unlikely and so far
+		//       (April 2022, after years of supporting this rewrite feature)
 		//       has never come up, so let's keep it working as is (see also #15153).
+		//
+		// Also note that this code used to use go/ast's object tracking,
+		// which was removed in exchange for go/parser.Mode.SkipObjectResolution.
+		// False positives are extremely unlikely as described above,
+		// and go/ast's object tracking is incomplete in any case.
 		if n.Max != nil {
 			// - 3-index slices always require the 2nd and 3rd index
 			break
 		}
-		if s, _ := n.X.(*ast.Ident); s != nil && s.Obj != nil {
-			// the array/slice object is a single, resolved identifier
+		if s, _ := n.X.(*ast.Ident); s != nil {
+			// the array/slice object is a single identifier
 			if call, _ := n.High.(*ast.CallExpr); call != nil && len(call.Args) == 1 && !call.Ellipsis.IsValid() {
 				// the high expression is a function call with a single argument
-				if fun, _ := call.Fun.(*ast.Ident); fun != nil && fun.Name == "len" && fun.Obj == nil {
-					// the function called is "len" and it is not locally defined; and
-					// because we don't have dot imports, it must be the predefined len()
-					if arg, _ := call.Args[0].(*ast.Ident); arg != nil && arg.Obj == s.Obj {
+				if fun, _ := call.Fun.(*ast.Ident); fun != nil && fun.Name == "len" {
+					// the function called is "len"
+					if arg, _ := call.Args[0].(*ast.Ident); arg != nil && arg.Name == s.Name {
 						// the len argument is the array/slice object
 						n.High = nil
 					}
diff --git a/vendor/github.com/golangci/gofmt/goimports/goimports.go b/vendor/github.com/golangci/gofmt/goimports/goimports.go
index 1fa3328f8a3b984a1bbf606b1b498d9bdf08df34..20d92e119c70d4e8d4c38619e7210ef2536dc0ed 100644
--- a/vendor/github.com/golangci/gofmt/goimports/goimports.go
+++ b/vendor/github.com/golangci/gofmt/goimports/goimports.go
@@ -14,7 +14,7 @@ import (
 	"runtime"
 )
 
-// Extracted from golang.org/x/tools@v0.1.12/cmd/goimports/goimports.go
+// Extracted from golang.org/x/tools@v0.13.0/cmd/goimports/goimports.go
 
 func writeTempFile(dir, prefix string, data []byte) (string, error) {
 	file, err := ioutil.TempFile(dir, prefix)
diff --git a/vendor/github.com/golangci/gofmt/goimports/golangci.go b/vendor/github.com/golangci/gofmt/goimports/golangci.go
index 7edc37937cb7bf03d0caf965e0c54b1bbd975e17..6ff286ae0613b8163e3bf12000ab514f691e6289 100644
--- a/vendor/github.com/golangci/gofmt/goimports/golangci.go
+++ b/vendor/github.com/golangci/gofmt/goimports/golangci.go
@@ -3,7 +3,7 @@ package goimports
 import (
 	"bytes"
 	"fmt"
-	"io/ioutil"
+	"os"
 
 	"golang.org/x/tools/imports"
 )
@@ -11,7 +11,7 @@ import (
 // Run runs goimports.
 // The local prefixes (comma separated) must be defined through the global variable imports.LocalPrefix.
 func Run(filename string) ([]byte, error) {
-	src, err := ioutil.ReadFile(filename)
+	src, err := os.ReadFile(filename)
 	if err != nil {
 		return nil, err
 	}
diff --git a/vendor/github.com/golangci/gofmt/goimports/readme.md b/vendor/github.com/golangci/gofmt/goimports/readme.md
index 6c793eb7d13fa5f93f1d9019468cf3b301ddb0f4..e57ed550b1e964f9261253d78fb09ffb5b426085 100644
--- a/vendor/github.com/golangci/gofmt/goimports/readme.md
+++ b/vendor/github.com/golangci/gofmt/goimports/readme.md
@@ -1,3 +1,4 @@
 # Hard Fork of goimports
 
 2022-08-31: Sync with golang.org/x/tools v0.1.12
+2023-10-04: Sync with golang.org/x/tools v0.13.0
diff --git a/vendor/github.com/golangci/golangci-lint/cmd/golangci-lint/main.go b/vendor/github.com/golangci/golangci-lint/cmd/golangci-lint/main.go
index 9d1daa81df92f61a66899023e954a8cc441ea8e2..413e071d65f29f6473c92928ed1a158272d55582 100644
--- a/vendor/github.com/golangci/golangci-lint/cmd/golangci-lint/main.go
+++ b/vendor/github.com/golangci/golangci-lint/cmd/golangci-lint/main.go
@@ -13,33 +13,69 @@ var (
 	goVersion = "unknown"
 
 	// Populated by goreleaser during build
-	version = "master"
+	version = "unknown"
 	commit  = "?"
 	date    = ""
 )
 
 func main() {
-	if buildInfo, available := debug.ReadBuildInfo(); available {
-		goVersion = buildInfo.GoVersion
+	info := createBuildInfo()
 
-		if date == "" {
-			version = buildInfo.Main.Version
-			commit = fmt.Sprintf("(unknown, mod sum: %q)", buildInfo.Main.Sum)
-			date = "(unknown)"
-		}
+	if err := commands.Execute(info); err != nil {
+		_, _ = fmt.Fprintf(os.Stderr, "Failed executing command with error: %v\n", err)
+		os.Exit(exitcodes.Failure)
 	}
+}
 
+func createBuildInfo() commands.BuildInfo {
 	info := commands.BuildInfo{
-		GoVersion: goVersion,
-		Version:   version,
 		Commit:    commit,
+		Version:   version,
+		GoVersion: goVersion,
 		Date:      date,
 	}
 
-	e := commands.NewExecutor(info)
+	buildInfo, available := debug.ReadBuildInfo()
+	if !available {
+		return info
+	}
 
-	if err := e.Execute(); err != nil {
-		fmt.Fprintf(os.Stderr, "failed executing command with error %v\n", err)
-		os.Exit(exitcodes.Failure)
+	info.GoVersion = buildInfo.GoVersion
+
+	if date != "" {
+		return info
+	}
+
+	info.Version = buildInfo.Main.Version
+
+	var revision string
+	var modified string
+	for _, setting := range buildInfo.Settings {
+		// The `vcs.xxx` information is only available with `go build`.
+		// This information is not available with `go install` or `go run`.
+		switch setting.Key {
+		case "vcs.time":
+			info.Date = setting.Value
+		case "vcs.revision":
+			revision = setting.Value
+		case "vcs.modified":
+			modified = setting.Value
+		}
 	}
+
+	if revision == "" {
+		revision = "unknown"
+	}
+
+	if modified == "" {
+		modified = "?"
+	}
+
+	if info.Date == "" {
+		info.Date = "(unknown)"
+	}
+
+	info.Commit = fmt.Sprintf("(%s, modified: %s, mod sum: %q)", revision, modified, buildInfo.Main.Sum)
+
+	return info
 }
diff --git a/vendor/github.com/golangci/golangci-lint/cmd/golangci-lint/plugins.go b/vendor/github.com/golangci/golangci-lint/cmd/golangci-lint/plugins.go
new file mode 100644
index 0000000000000000000000000000000000000000..541ff76242711cdd1352021204d6851137f30e85
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/cmd/golangci-lint/plugins.go
@@ -0,0 +1,3 @@
+package main
+
+// This file is used to declare module plugins.
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/cache.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/cache.go
index 6fdaebaede8fd6ccd5aa47d4d0de1f8ddab267ca..cc6c0eacd557bfb7f0e231277c88dbf5a8c1786f 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/commands/cache.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/cache.go
@@ -12,7 +12,13 @@ import (
 	"github.com/golangci/golangci-lint/pkg/logutils"
 )
 
-func (e *Executor) initCache() {
+type cacheCommand struct {
+	cmd *cobra.Command
+}
+
+func newCacheCommand() *cacheCommand {
+	c := &cacheCommand{}
+
 	cacheCmd := &cobra.Command{
 		Use:   "cache",
 		Short: "Cache control and information",
@@ -21,28 +27,32 @@ func (e *Executor) initCache() {
 			return cmd.Help()
 		},
 	}
-	e.rootCmd.AddCommand(cacheCmd)
 
-	cacheCmd.AddCommand(&cobra.Command{
-		Use:               "clean",
-		Short:             "Clean cache",
-		Args:              cobra.NoArgs,
-		ValidArgsFunction: cobra.NoFileCompletions,
-		RunE:              e.executeCleanCache,
-	})
-	cacheCmd.AddCommand(&cobra.Command{
-		Use:               "status",
-		Short:             "Show cache status",
-		Args:              cobra.NoArgs,
-		ValidArgsFunction: cobra.NoFileCompletions,
-		Run:               e.executeCacheStatus,
-	})
+	cacheCmd.AddCommand(
+		&cobra.Command{
+			Use:               "clean",
+			Short:             "Clean cache",
+			Args:              cobra.NoArgs,
+			ValidArgsFunction: cobra.NoFileCompletions,
+			RunE:              c.executeClean,
+		},
+		&cobra.Command{
+			Use:               "status",
+			Short:             "Show cache status",
+			Args:              cobra.NoArgs,
+			ValidArgsFunction: cobra.NoFileCompletions,
+			Run:               c.executeStatus,
+		},
+	)
 
-	// TODO: add trim command?
+	c.cmd = cacheCmd
+
+	return c
 }
 
-func (e *Executor) executeCleanCache(_ *cobra.Command, _ []string) error {
+func (*cacheCommand) executeClean(_ *cobra.Command, _ []string) error {
 	cacheDir := cache.DefaultDir()
+
 	if err := os.RemoveAll(cacheDir); err != nil {
 		return fmt.Errorf("failed to remove dir %s: %w", cacheDir, err)
 	}
@@ -50,13 +60,13 @@ func (e *Executor) executeCleanCache(_ *cobra.Command, _ []string) error {
 	return nil
 }
 
-func (e *Executor) executeCacheStatus(_ *cobra.Command, _ []string) {
+func (*cacheCommand) executeStatus(_ *cobra.Command, _ []string) {
 	cacheDir := cache.DefaultDir()
-	fmt.Fprintf(logutils.StdOut, "Dir: %s\n", cacheDir)
+	_, _ = fmt.Fprintf(logutils.StdOut, "Dir: %s\n", cacheDir)
 
 	cacheSizeBytes, err := dirSizeBytes(cacheDir)
 	if err == nil {
-		fmt.Fprintf(logutils.StdOut, "Size: %s\n", fsutils.PrettifyBytesCount(cacheSizeBytes))
+		_, _ = fmt.Fprintf(logutils.StdOut, "Size: %s\n", fsutils.PrettifyBytesCount(cacheSizeBytes))
 	}
 }
 
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/config.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/config.go
index a16ef63106c1895723e6b22491dae0a4ed8429eb..935ec5e8643c9d706dd88826b7f441815659d21c 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/commands/config.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/config.go
@@ -4,58 +4,119 @@ import (
 	"fmt"
 	"os"
 
+	"github.com/fatih/color"
 	"github.com/spf13/cobra"
 	"github.com/spf13/viper"
 
+	"github.com/golangci/golangci-lint/pkg/config"
 	"github.com/golangci/golangci-lint/pkg/exitcodes"
 	"github.com/golangci/golangci-lint/pkg/fsutils"
+	"github.com/golangci/golangci-lint/pkg/logutils"
 )
 
-func (e *Executor) initConfig() {
-	cmd := &cobra.Command{
+type configCommand struct {
+	viper *viper.Viper
+	cmd   *cobra.Command
+
+	opts       config.LoaderOptions
+	verifyOpts verifyOptions
+
+	buildInfo BuildInfo
+
+	log logutils.Log
+}
+
+func newConfigCommand(log logutils.Log, info BuildInfo) *configCommand {
+	c := &configCommand{
+		viper:     viper.New(),
+		log:       log,
+		buildInfo: info,
+	}
+
+	configCmd := &cobra.Command{
 		Use:   "config",
-		Short: "Config",
+		Short: "Config file information",
 		Args:  cobra.NoArgs,
 		RunE: func(cmd *cobra.Command, _ []string) error {
 			return cmd.Help()
 		},
+		PersistentPreRunE: c.preRunE,
 	}
-	e.rootCmd.AddCommand(cmd)
 
-	pathCmd := &cobra.Command{
-		Use:               "path",
-		Short:             "Print used config path",
+	verifyCommand := &cobra.Command{
+		Use:               "verify",
+		Short:             "Verify configuration against JSON schema",
 		Args:              cobra.NoArgs,
 		ValidArgsFunction: cobra.NoFileCompletions,
-		Run:               e.executePathCmd,
+		RunE:              c.executeVerify,
+		SilenceUsage:      true,
+		SilenceErrors:     true,
 	}
-	e.initRunConfiguration(pathCmd) // allow --config
-	cmd.AddCommand(pathCmd)
+
+	configCmd.AddCommand(
+		&cobra.Command{
+			Use:               "path",
+			Short:             "Print used config path",
+			Args:              cobra.NoArgs,
+			ValidArgsFunction: cobra.NoFileCompletions,
+			Run:               c.executePath,
+		},
+		verifyCommand,
+	)
+
+	flagSet := configCmd.PersistentFlags()
+	flagSet.SortFlags = false // sort them as they are defined here
+
+	setupConfigFileFlagSet(flagSet, &c.opts)
+
+	// ex: --schema jsonschema/golangci.next.jsonschema.json
+	verifyFlagSet := verifyCommand.Flags()
+	verifyFlagSet.StringVar(&c.verifyOpts.schemaURL, "schema", "", color.GreenString("JSON schema URL"))
+	_ = verifyFlagSet.MarkHidden("schema")
+
+	c.cmd = configCmd
+
+	return c
 }
 
-// getUsedConfig returns the resolved path to the golangci config file, or the empty string
-// if no configuration could be found.
-func (e *Executor) getUsedConfig() string {
-	usedConfigFile := viper.ConfigFileUsed()
-	if usedConfigFile == "" {
-		return ""
-	}
+func (c *configCommand) preRunE(cmd *cobra.Command, args []string) error {
+	// The command doesn't depend on the real configuration.
+	// It only needs to know the path of the configuration file.
+	cfg := config.NewDefault()
 
-	prettyUsedConfigFile, err := fsutils.ShortestRelPath(usedConfigFile, "")
+	loader := config.NewLoader(c.log.Child(logutils.DebugKeyConfigReader), c.viper, cmd.Flags(), c.opts, cfg, args)
+
+	err := loader.Load(config.LoadOptions{})
 	if err != nil {
-		e.log.Warnf("Can't pretty print config file path: %s", err)
-		return usedConfigFile
+		return fmt.Errorf("can't load config: %w", err)
 	}
 
-	return prettyUsedConfigFile
+	return nil
 }
 
-func (e *Executor) executePathCmd(_ *cobra.Command, _ []string) {
-	usedConfigFile := e.getUsedConfig()
+func (c *configCommand) executePath(cmd *cobra.Command, _ []string) {
+	usedConfigFile := c.getUsedConfig()
 	if usedConfigFile == "" {
-		e.log.Warnf("No config file detected")
+		c.log.Warnf("No config file detected")
 		os.Exit(exitcodes.NoConfigFileDetected)
 	}
 
-	fmt.Println(usedConfigFile)
+	cmd.Println(usedConfigFile)
+}
+
+// getUsedConfig returns the resolved path to the golangci config file,
+// or the empty string if no configuration could be found.
+func (c *configCommand) getUsedConfig() string {
+	usedConfigFile := c.viper.ConfigFileUsed()
+	if usedConfigFile == "" {
+		return ""
+	}
+
+	prettyUsedConfigFile, err := fsutils.ShortestRelPath(usedConfigFile, "")
+	if err != nil {
+		c.log.Warnf("Can't pretty print config file path: %s", err)
+		return usedConfigFile
+	}
+
+	return prettyUsedConfigFile
 }
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/config_verify.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/config_verify.go
new file mode 100644
index 0000000000000000000000000000000000000000..a44050b593b3a7e817d16e034cf2a1309f86c87a
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/config_verify.go
@@ -0,0 +1,180 @@
+package commands
+
+import (
+	"errors"
+	"fmt"
+	"net/http"
+	"os"
+	"path/filepath"
+	"strings"
+	"time"
+
+	hcversion "github.com/hashicorp/go-version"
+	"github.com/pelletier/go-toml/v2"
+	"github.com/santhosh-tekuri/jsonschema/v5"
+	"github.com/santhosh-tekuri/jsonschema/v5/httploader"
+	"github.com/spf13/cobra"
+	"github.com/spf13/pflag"
+	"gopkg.in/yaml.v3"
+
+	"github.com/golangci/golangci-lint/pkg/exitcodes"
+)
+
+type verifyOptions struct {
+	schemaURL string // For debugging purpose only (Flag only).
+}
+
+func (c *configCommand) executeVerify(cmd *cobra.Command, _ []string) error {
+	usedConfigFile := c.getUsedConfig()
+	if usedConfigFile == "" {
+		c.log.Warnf("No config file detected")
+		os.Exit(exitcodes.NoConfigFileDetected)
+	}
+
+	schemaURL, err := createSchemaURL(cmd.Flags(), c.buildInfo)
+	if err != nil {
+		return fmt.Errorf("get JSON schema: %w", err)
+	}
+
+	err = validateConfiguration(schemaURL, usedConfigFile)
+	if err != nil {
+		var v *jsonschema.ValidationError
+		if !errors.As(err, &v) {
+			return fmt.Errorf("[%s] validate: %w", usedConfigFile, err)
+		}
+
+		detail := v.DetailedOutput()
+
+		printValidationDetail(cmd, &detail)
+
+		return errors.New("the configuration contains invalid elements")
+	}
+
+	return nil
+}
+
+func createSchemaURL(flags *pflag.FlagSet, buildInfo BuildInfo) (string, error) {
+	schemaURL, err := flags.GetString("schema")
+	if err != nil {
+		return "", fmt.Errorf("get schema flag: %w", err)
+	}
+
+	if schemaURL != "" {
+		return schemaURL, nil
+	}
+
+	switch {
+	case buildInfo.Version != "" && buildInfo.Version != "(devel)":
+		version, err := hcversion.NewVersion(buildInfo.Version)
+		if err != nil {
+			return "", fmt.Errorf("parse version: %w", err)
+		}
+
+		schemaURL = fmt.Sprintf("https://golangci-lint.run/jsonschema/golangci.v%d.%d.jsonschema.json",
+			version.Segments()[0], version.Segments()[1])
+
+	case buildInfo.Commit != "" && buildInfo.Commit != "?":
+		if buildInfo.Commit == "unknown" {
+			return "", errors.New("unknown commit information")
+		}
+
+		commit := buildInfo.Commit
+
+		if strings.HasPrefix(commit, "(") {
+			c, _, ok := strings.Cut(strings.TrimPrefix(commit, "("), ",")
+			if !ok {
+				return "", errors.New("commit information not found")
+			}
+
+			commit = c
+		}
+
+		schemaURL = fmt.Sprintf("https://raw.githubusercontent.com/golangci/golangci-lint/%s/jsonschema/golangci.next.jsonschema.json",
+			commit)
+
+	default:
+		return "", errors.New("version not found")
+	}
+
+	return schemaURL, nil
+}
+
+func validateConfiguration(schemaPath, targetFile string) error {
+	httploader.Client = &http.Client{Timeout: 2 * time.Second}
+
+	compiler := jsonschema.NewCompiler()
+	compiler.Draft = jsonschema.Draft7
+
+	schema, err := compiler.Compile(schemaPath)
+	if err != nil {
+		return fmt.Errorf("compile schema: %w", err)
+	}
+
+	var m any
+
+	switch strings.ToLower(filepath.Ext(targetFile)) {
+	case ".yaml", ".yml", ".json":
+		m, err = decodeYamlFile(targetFile)
+		if err != nil {
+			return err
+		}
+
+	case ".toml":
+		m, err = decodeTomlFile(targetFile)
+		if err != nil {
+			return err
+		}
+
+	default:
+		// unsupported
+		return errors.New("unsupported configuration format")
+	}
+
+	return schema.Validate(m)
+}
+
+func printValidationDetail(cmd *cobra.Command, detail *jsonschema.Detailed) {
+	if detail.Error != "" {
+		cmd.PrintErrf("jsonschema: %q does not validate with %q: %s\n",
+			strings.ReplaceAll(strings.TrimPrefix(detail.InstanceLocation, "/"), "/", "."), detail.KeywordLocation, detail.Error)
+	}
+
+	for _, d := range detail.Errors {
+		d := d
+		printValidationDetail(cmd, &d)
+	}
+}
+
+func decodeYamlFile(filename string) (any, error) {
+	file, err := os.Open(filename)
+	if err != nil {
+		return nil, fmt.Errorf("[%s] file open: %w", filename, err)
+	}
+
+	defer func() { _ = file.Close() }()
+
+	var m any
+	err = yaml.NewDecoder(file).Decode(&m)
+	if err != nil {
+		return nil, fmt.Errorf("[%s] YAML decode: %w", filename, err)
+	}
+
+	return m, nil
+}
+
+func decodeTomlFile(filename string) (any, error) {
+	file, err := os.Open(filename)
+	if err != nil {
+		return nil, fmt.Errorf("[%s] file open: %w", filename, err)
+	}
+
+	defer func() { _ = file.Close() }()
+
+	var m any
+	err = toml.NewDecoder(file).Decode(&m)
+	if err != nil {
+		return nil, fmt.Errorf("[%s] TOML decode: %w", filename, err)
+	}
+
+	return m, nil
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/custom.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/custom.go
new file mode 100644
index 0000000000000000000000000000000000000000..1bc9f9014615d55e2f652a67d33158ffdd49318a
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/custom.go
@@ -0,0 +1,79 @@
+package commands
+
+import (
+	"fmt"
+	"log"
+	"os"
+
+	"github.com/spf13/cobra"
+
+	"github.com/golangci/golangci-lint/pkg/commands/internal"
+	"github.com/golangci/golangci-lint/pkg/logutils"
+)
+
+const envKeepTempFiles = "CUSTOM_GCL_KEEP_TEMP_FILES"
+
+type customCommand struct {
+	cmd *cobra.Command
+
+	cfg *internal.Configuration
+
+	log logutils.Log
+}
+
+func newCustomCommand(logger logutils.Log) *customCommand {
+	c := &customCommand{log: logger}
+
+	customCmd := &cobra.Command{
+		Use:          "custom",
+		Short:        "Build a version of golangci-lint with custom linters",
+		Args:         cobra.NoArgs,
+		PreRunE:      c.preRunE,
+		RunE:         c.runE,
+		SilenceUsage: true,
+	}
+
+	c.cmd = customCmd
+
+	return c
+}
+
+func (c *customCommand) preRunE(_ *cobra.Command, _ []string) error {
+	cfg, err := internal.LoadConfiguration()
+	if err != nil {
+		return err
+	}
+
+	err = cfg.Validate()
+	if err != nil {
+		return err
+	}
+
+	c.cfg = cfg
+
+	return nil
+}
+
+func (c *customCommand) runE(cmd *cobra.Command, _ []string) error {
+	tmp, err := os.MkdirTemp(os.TempDir(), "custom-gcl")
+	if err != nil {
+		return fmt.Errorf("create temporary directory: %w", err)
+	}
+
+	defer func() {
+		if os.Getenv(envKeepTempFiles) != "" {
+			log.Printf("WARN: The env var %s has been detected: the temporary directory is preserved: %s", envKeepTempFiles, tmp)
+
+			return
+		}
+
+		_ = os.RemoveAll(tmp)
+	}()
+
+	err = internal.NewBuilder(c.log, c.cfg, tmp).Build(cmd.Context())
+	if err != nil {
+		return fmt.Errorf("build process: %w", err)
+	}
+
+	return nil
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/executor.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/executor.go
deleted file mode 100644
index 61e221cb8bf04d15a635a8a4e14c77f6bbf1df1a..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/commands/executor.go
+++ /dev/null
@@ -1,253 +0,0 @@
-package commands
-
-import (
-	"bytes"
-	"context"
-	"crypto/sha256"
-	"fmt"
-	"io"
-	"os"
-	"path/filepath"
-	"strings"
-	"time"
-
-	"github.com/fatih/color"
-	"github.com/gofrs/flock"
-	"github.com/spf13/cobra"
-	"github.com/spf13/pflag"
-	"gopkg.in/yaml.v3"
-
-	"github.com/golangci/golangci-lint/internal/cache"
-	"github.com/golangci/golangci-lint/internal/pkgcache"
-	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/fsutils"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis/load"
-	"github.com/golangci/golangci-lint/pkg/goutil"
-	"github.com/golangci/golangci-lint/pkg/lint"
-	"github.com/golangci/golangci-lint/pkg/lint/lintersdb"
-	"github.com/golangci/golangci-lint/pkg/logutils"
-	"github.com/golangci/golangci-lint/pkg/report"
-	"github.com/golangci/golangci-lint/pkg/timeutils"
-)
-
-type BuildInfo struct {
-	GoVersion string `json:"goVersion"`
-	Version   string `json:"version"`
-	Commit    string `json:"commit"`
-	Date      string `json:"date"`
-}
-
-type Executor struct {
-	rootCmd    *cobra.Command
-	runCmd     *cobra.Command
-	lintersCmd *cobra.Command
-
-	exitCode  int
-	buildInfo BuildInfo
-
-	cfg               *config.Config // cfg is the unmarshaled data from the golangci config file.
-	log               logutils.Log
-	reportData        report.Data
-	DBManager         *lintersdb.Manager
-	EnabledLintersSet *lintersdb.EnabledSet
-	contextLoader     *lint.ContextLoader
-	goenv             *goutil.Env
-	fileCache         *fsutils.FileCache
-	lineCache         *fsutils.LineCache
-	pkgCache          *pkgcache.Cache
-	debugf            logutils.DebugFunc
-	sw                *timeutils.Stopwatch
-
-	loadGuard *load.Guard
-	flock     *flock.Flock
-}
-
-// NewExecutor creates and initializes a new command executor.
-func NewExecutor(buildInfo BuildInfo) *Executor {
-	startedAt := time.Now()
-	e := &Executor{
-		cfg:       config.NewDefault(),
-		buildInfo: buildInfo,
-		DBManager: lintersdb.NewManager(nil, nil),
-		debugf:    logutils.Debug(logutils.DebugKeyExec),
-	}
-
-	e.debugf("Starting execution...")
-	e.log = report.NewLogWrapper(logutils.NewStderrLog(logutils.DebugKeyEmpty), &e.reportData)
-
-	// to setup log level early we need to parse config from command line extra time to
-	// find `-v` option
-	commandLineCfg, err := e.getConfigForCommandLine()
-	if err != nil && err != pflag.ErrHelp {
-		e.log.Fatalf("Can't get config for command line: %s", err)
-	}
-	if commandLineCfg != nil {
-		logutils.SetupVerboseLog(e.log, commandLineCfg.Run.IsVerbose)
-
-		switch commandLineCfg.Output.Color {
-		case "always":
-			color.NoColor = false
-		case "never":
-			color.NoColor = true
-		case "auto":
-			// nothing
-		default:
-			e.log.Fatalf("invalid value %q for --color; must be 'always', 'auto', or 'never'", commandLineCfg.Output.Color)
-		}
-	}
-
-	// init of commands must be done before config file reading because
-	// init sets config with the default values of flags
-	e.initRoot()
-	e.initRun()
-	e.initHelp()
-	e.initLinters()
-	e.initConfig()
-	e.initVersion()
-	e.initCache()
-
-	// init e.cfg by values from config: flags parse will see these values
-	// like the default ones. It will overwrite them only if the same option
-	// is found in command-line: it's ok, command-line has higher priority.
-
-	r := config.NewFileReader(e.cfg, commandLineCfg, e.log.Child(logutils.DebugKeyConfigReader))
-	if err = r.Read(); err != nil {
-		e.log.Fatalf("Can't read config: %s", err)
-	}
-
-	if (commandLineCfg == nil || commandLineCfg.Run.Go == "") && e.cfg != nil && e.cfg.Run.Go == "" {
-		e.cfg.Run.Go = config.DetectGoVersion()
-	}
-
-	// recreate after getting config
-	e.DBManager = lintersdb.NewManager(e.cfg, e.log)
-
-	// Slice options must be explicitly set for proper merging of config and command-line options.
-	fixSlicesFlags(e.runCmd.Flags())
-	fixSlicesFlags(e.lintersCmd.Flags())
-
-	e.EnabledLintersSet = lintersdb.NewEnabledSet(e.DBManager,
-		lintersdb.NewValidator(e.DBManager), e.log.Child(logutils.DebugKeyLintersDB), e.cfg)
-	e.goenv = goutil.NewEnv(e.log.Child(logutils.DebugKeyGoEnv))
-	e.fileCache = fsutils.NewFileCache()
-	e.lineCache = fsutils.NewLineCache(e.fileCache)
-
-	e.sw = timeutils.NewStopwatch("pkgcache", e.log.Child(logutils.DebugKeyStopwatch))
-	e.pkgCache, err = pkgcache.NewCache(e.sw, e.log.Child(logutils.DebugKeyPkgCache))
-	if err != nil {
-		e.log.Fatalf("Failed to build packages cache: %s", err)
-	}
-	e.loadGuard = load.NewGuard()
-	e.contextLoader = lint.NewContextLoader(e.cfg, e.log.Child(logutils.DebugKeyLoader), e.goenv,
-		e.lineCache, e.fileCache, e.pkgCache, e.loadGuard)
-	if err = e.initHashSalt(buildInfo.Version); err != nil {
-		e.log.Fatalf("Failed to init hash salt: %s", err)
-	}
-	e.debugf("Initialized executor in %s", time.Since(startedAt))
-	return e
-}
-
-func (e *Executor) Execute() error {
-	return e.rootCmd.Execute()
-}
-
-func (e *Executor) initHashSalt(version string) error {
-	binSalt, err := computeBinarySalt(version)
-	if err != nil {
-		return fmt.Errorf("failed to calculate binary salt: %w", err)
-	}
-
-	configSalt, err := computeConfigSalt(e.cfg)
-	if err != nil {
-		return fmt.Errorf("failed to calculate config salt: %w", err)
-	}
-
-	b := bytes.NewBuffer(binSalt)
-	b.Write(configSalt)
-	cache.SetSalt(b.Bytes())
-	return nil
-}
-
-func computeBinarySalt(version string) ([]byte, error) {
-	if version != "" && version != "(devel)" {
-		return []byte(version), nil
-	}
-
-	if logutils.HaveDebugTag(logutils.DebugKeyBinSalt) {
-		return []byte("debug"), nil
-	}
-
-	p, err := os.Executable()
-	if err != nil {
-		return nil, err
-	}
-	f, err := os.Open(p)
-	if err != nil {
-		return nil, err
-	}
-	defer f.Close()
-	h := sha256.New()
-	if _, err := io.Copy(h, f); err != nil {
-		return nil, err
-	}
-	return h.Sum(nil), nil
-}
-
-func computeConfigSalt(cfg *config.Config) ([]byte, error) {
-	// We don't hash all config fields to reduce meaningless cache
-	// invalidations. At least, it has a huge impact on tests speed.
-
-	lintersSettingsBytes, err := yaml.Marshal(cfg.LintersSettings)
-	if err != nil {
-		return nil, fmt.Errorf("failed to json marshal config linter settings: %w", err)
-	}
-
-	configData := bytes.NewBufferString("linters-settings=")
-	configData.Write(lintersSettingsBytes)
-	configData.WriteString("\nbuild-tags=%s" + strings.Join(cfg.Run.BuildTags, ","))
-
-	h := sha256.New()
-	if _, err := h.Write(configData.Bytes()); err != nil {
-		return nil, err
-	}
-	return h.Sum(nil), nil
-}
-
-func (e *Executor) acquireFileLock() bool {
-	if e.cfg.Run.AllowParallelRunners {
-		e.debugf("Parallel runners are allowed, no locking")
-		return true
-	}
-
-	lockFile := filepath.Join(os.TempDir(), "golangci-lint.lock")
-	e.debugf("Locking on file %s...", lockFile)
-	f := flock.New(lockFile)
-	const retryDelay = time.Second
-
-	ctx := context.Background()
-	if !e.cfg.Run.AllowSerialRunners {
-		const totalTimeout = 5 * time.Second
-		var cancel context.CancelFunc
-		ctx, cancel = context.WithTimeout(ctx, totalTimeout)
-		defer cancel()
-	}
-	if ok, _ := f.TryLockContext(ctx, retryDelay); !ok {
-		return false
-	}
-
-	e.flock = f
-	return true
-}
-
-func (e *Executor) releaseFileLock() {
-	if e.cfg.Run.AllowParallelRunners {
-		return
-	}
-
-	if err := e.flock.Unlock(); err != nil {
-		e.debugf("Failed to unlock on file: %s", err)
-	}
-	if err := os.Remove(e.flock.Path()); err != nil {
-		e.debugf("Failed to remove lock file: %s", err)
-	}
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/flagsets.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/flagsets.go
new file mode 100644
index 0000000000000000000000000000000000000000..f70474cad4a68bdddc05734050fda061e6860d8d
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/flagsets.go
@@ -0,0 +1,143 @@
+package commands
+
+import (
+	"fmt"
+	"strings"
+
+	"github.com/fatih/color"
+	"github.com/spf13/pflag"
+	"github.com/spf13/viper"
+
+	"github.com/golangci/golangci-lint/pkg/commands/internal"
+	"github.com/golangci/golangci-lint/pkg/config"
+	"github.com/golangci/golangci-lint/pkg/exitcodes"
+	"github.com/golangci/golangci-lint/pkg/lint/lintersdb"
+	"github.com/golangci/golangci-lint/pkg/result/processors"
+)
+
+const defaultMaxIssuesPerLinter = 50
+
+func setupLintersFlagSet(v *viper.Viper, fs *pflag.FlagSet) {
+	internal.AddHackedStringSliceP(fs, "disable", "D", color.GreenString("Disable specific linter"))
+	internal.AddFlagAndBind(v, fs, fs.Bool, "disable-all", "linters.disable-all", false, color.GreenString("Disable all linters"))
+
+	internal.AddHackedStringSliceP(fs, "enable", "E", color.GreenString("Enable specific linter"))
+	internal.AddFlagAndBind(v, fs, fs.Bool, "enable-all", "linters.enable-all", false, color.GreenString("Enable all linters"))
+
+	internal.AddFlagAndBind(v, fs, fs.Bool, "fast", "linters.fast", false,
+		color.GreenString("Enable only fast linters from enabled linters set (first run won't be fast)"))
+
+	internal.AddHackedStringSliceP(fs, "presets", "p",
+		color.GreenString(fmt.Sprintf("Enable presets (%s) of linters.\n"+
+			"Run 'golangci-lint help linters' to see them.\n"+
+			"This option implies option --disable-all",
+			strings.Join(lintersdb.AllPresets(), "|"),
+		)))
+
+	fs.StringSlice("enable-only", nil,
+		color.GreenString("Override linters configuration section to only run the specific linter(s)")) // Flags only.
+}
+
+func setupRunFlagSet(v *viper.Viper, fs *pflag.FlagSet) {
+	internal.AddFlagAndBindP(v, fs, fs.IntP, "concurrency", "j", "run.concurrency", getDefaultConcurrency(),
+		color.GreenString("Number of CPUs to use (Default: number of logical CPUs)"))
+
+	internal.AddFlagAndBind(v, fs, fs.String, "modules-download-mode", "run.modules-download-mode", "",
+		color.GreenString("Modules download mode. If not empty, passed as -mod=<mode> to go tools"))
+	internal.AddFlagAndBind(v, fs, fs.Int, "issues-exit-code", "run.issues-exit-code", exitcodes.IssuesFound,
+		color.GreenString("Exit code when issues were found"))
+	internal.AddFlagAndBind(v, fs, fs.String, "go", "run.go", "", color.GreenString("Targeted Go version"))
+	internal.AddHackedStringSlice(fs, "build-tags", color.GreenString("Build tags"))
+
+	internal.AddFlagAndBind(v, fs, fs.Duration, "timeout", "run.timeout", defaultTimeout, color.GreenString("Timeout for total work"))
+
+	internal.AddFlagAndBind(v, fs, fs.Bool, "tests", "run.tests", true, color.GreenString("Analyze tests (*_test.go)"))
+
+	internal.AddDeprecatedHackedStringSlice(fs, "skip-files", color.GreenString("Regexps of files to skip"))
+	internal.AddDeprecatedHackedStringSlice(fs, "skip-dirs", color.GreenString("Regexps of directories to skip"))
+	internal.AddDeprecatedFlagAndBind(v, fs, fs.Bool, "skip-dirs-use-default", "run.skip-dirs-use-default", true,
+		getDefaultDirectoryExcludeHelp())
+
+	const allowParallelDesc = "Allow multiple parallel golangci-lint instances running.\n" +
+		"If false (default) - golangci-lint acquires file lock on start."
+	internal.AddFlagAndBind(v, fs, fs.Bool, "allow-parallel-runners", "run.allow-parallel-runners", false,
+		color.GreenString(allowParallelDesc))
+	const allowSerialDesc = "Allow multiple golangci-lint instances running, but serialize them around a lock.\n" +
+		"If false (default) - golangci-lint exits with an error if it fails to acquire file lock on start."
+	internal.AddFlagAndBind(v, fs, fs.Bool, "allow-serial-runners", "run.allow-serial-runners", false, color.GreenString(allowSerialDesc))
+}
+
+func setupOutputFlagSet(v *viper.Viper, fs *pflag.FlagSet) {
+	internal.AddFlagAndBind(v, fs, fs.String, "out-format", "output.formats", config.OutFormatColoredLineNumber,
+		color.GreenString(fmt.Sprintf("Formats of output: %s", strings.Join(config.AllOutputFormats, "|"))))
+	internal.AddFlagAndBind(v, fs, fs.Bool, "print-issued-lines", "output.print-issued-lines", true,
+		color.GreenString("Print lines of code with issue"))
+	internal.AddFlagAndBind(v, fs, fs.Bool, "print-linter-name", "output.print-linter-name", true,
+		color.GreenString("Print linter name in issue line"))
+	internal.AddFlagAndBind(v, fs, fs.Bool, "uniq-by-line", "output.uniq-by-line", true,
+		color.GreenString("Make issues output unique by line"))
+	internal.AddFlagAndBind(v, fs, fs.Bool, "sort-results", "output.sort-results", false,
+		color.GreenString("Sort linter results"))
+	internal.AddFlagAndBind(v, fs, fs.StringSlice, "sort-order", "output.sort-order", nil,
+		color.GreenString("Sort order of linter results"))
+	internal.AddFlagAndBind(v, fs, fs.String, "path-prefix", "output.path-prefix", "",
+		color.GreenString("Path prefix to add to output"))
+	internal.AddFlagAndBind(v, fs, fs.Bool, "show-stats", "output.show-stats", false, color.GreenString("Show statistics per linter"))
+}
+
+//nolint:gomnd // magic numbers here is ok
+func setupIssuesFlagSet(v *viper.Viper, fs *pflag.FlagSet) {
+	internal.AddHackedStringSliceP(fs, "exclude", "e", color.GreenString("Exclude issue by regexp"))
+	internal.AddFlagAndBind(v, fs, fs.Bool, "exclude-use-default", "issues.exclude-use-default", true,
+		getDefaultIssueExcludeHelp())
+	internal.AddFlagAndBind(v, fs, fs.Bool, "exclude-case-sensitive", "issues.exclude-case-sensitive", false,
+		color.GreenString("If set to true exclude and exclude rules regular expressions are case-sensitive"))
+
+	internal.AddFlagAndBind(v, fs, fs.Int, "max-issues-per-linter", "issues.max-issues-per-linter", defaultMaxIssuesPerLinter,
+		color.GreenString("Maximum issues count per one linter. Set to 0 to disable"))
+	internal.AddFlagAndBind(v, fs, fs.Int, "max-same-issues", "issues.max-same-issues", 3,
+		color.GreenString("Maximum count of issues with the same text. Set to 0 to disable"))
+
+	internal.AddHackedStringSlice(fs, "exclude-files", color.GreenString("Regexps of files to exclude"))
+	internal.AddHackedStringSlice(fs, "exclude-dirs", color.GreenString("Regexps of directories to exclude"))
+	internal.AddFlagAndBind(v, fs, fs.Bool, "exclude-dirs-use-default", "issues.exclude-dirs-use-default", true,
+		getDefaultDirectoryExcludeHelp())
+
+	const newDesc = "Show only new issues: if there are unstaged changes or untracked files, only those changes " +
+		"are analyzed, else only changes in HEAD~ are analyzed.\nIt's a super-useful option for integration " +
+		"of golangci-lint into existing large codebase.\nIt's not practical to fix all existing issues at " +
+		"the moment of integration: much better to not allow issues in new code.\nFor CI setups, prefer " +
+		"--new-from-rev=HEAD~, as --new can skip linting the current patch if any scripts generate " +
+		"unstaged files before golangci-lint runs."
+	internal.AddFlagAndBindP(v, fs, fs.BoolP, "new", "n", "issues.new", false, color.GreenString(newDesc))
+	internal.AddFlagAndBind(v, fs, fs.String, "new-from-rev", "issues.new-from-rev", "",
+		color.GreenString("Show only new issues created after git revision `REV`"))
+	internal.AddFlagAndBind(v, fs, fs.String, "new-from-patch", "issues.new-from-patch", "",
+		color.GreenString("Show only new issues created in git patch with file path `PATH`"))
+	internal.AddFlagAndBind(v, fs, fs.Bool, "whole-files", "issues.whole-files", false,
+		color.GreenString("Show issues in any part of update files (requires new-from-rev or new-from-patch)"))
+	internal.AddFlagAndBind(v, fs, fs.Bool, "fix", "issues.fix", false,
+		color.GreenString("Fix found issues (if it's supported by the linter)"))
+}
+
+func getDefaultIssueExcludeHelp() string {
+	parts := []string{color.GreenString("Use or not use default excludes:")}
+
+	for _, ep := range config.DefaultExcludePatterns {
+		parts = append(parts,
+			fmt.Sprintf("  - %s (%s): %s", color.BlueString(ep.ID), color.CyanString(ep.Linter), ep.Why),
+			fmt.Sprintf(`    Pattern: %s`, color.YellowString(`'`+ep.Pattern+`'`)),
+		)
+	}
+
+	return strings.Join(parts, "\n")
+}
+
+func getDefaultDirectoryExcludeHelp() string {
+	parts := []string{color.GreenString("Use or not use default excluded directories:")}
+	for _, dir := range processors.StdExcludeDirRegexps {
+		parts = append(parts, fmt.Sprintf("  - %s", color.YellowString(dir)))
+	}
+	parts = append(parts, "")
+	return strings.Join(parts, "\n")
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/help.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/help.go
index a06d508f27c2fb5f693b45c483dc38f90c9a650d..094e5d19054848da261e22ffa4cbca3907de38ab 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/commands/help.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/help.go
@@ -2,17 +2,30 @@ package commands
 
 import (
 	"fmt"
+	"slices"
 	"sort"
 	"strings"
 
 	"github.com/fatih/color"
 	"github.com/spf13/cobra"
 
+	"github.com/golangci/golangci-lint/pkg/config"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
+	"github.com/golangci/golangci-lint/pkg/lint/lintersdb"
 	"github.com/golangci/golangci-lint/pkg/logutils"
 )
 
-func (e *Executor) initHelp() {
+type helpCommand struct {
+	cmd *cobra.Command
+
+	dbManager *lintersdb.Manager
+
+	log logutils.Log
+}
+
+func newHelpCommand(logger logutils.Log) *helpCommand {
+	c := &helpCommand{log: logger}
+
 	helpCmd := &cobra.Command{
 		Use:   "help",
 		Short: "Help",
@@ -21,48 +34,39 @@ func (e *Executor) initHelp() {
 			return cmd.Help()
 		},
 	}
-	e.rootCmd.SetHelpCommand(helpCmd)
-
-	lintersHelpCmd := &cobra.Command{
-		Use:               "linters",
-		Short:             "Help about linters",
-		Args:              cobra.NoArgs,
-		ValidArgsFunction: cobra.NoFileCompletions,
-		Run:               e.executeLintersHelp,
-	}
-	helpCmd.AddCommand(lintersHelpCmd)
-}
 
-func printLinterConfigs(lcs []*linter.Config) {
-	sort.Slice(lcs, func(i, j int) bool {
-		return lcs[i].Name() < lcs[j].Name()
-	})
-	for _, lc := range lcs {
-		altNamesStr := ""
-		if len(lc.AlternativeNames) != 0 {
-			altNamesStr = fmt.Sprintf(" (%s)", strings.Join(lc.AlternativeNames, ", "))
-		}
+	helpCmd.AddCommand(
+		&cobra.Command{
+			Use:               "linters",
+			Short:             "Help about linters",
+			Args:              cobra.NoArgs,
+			ValidArgsFunction: cobra.NoFileCompletions,
+			Run:               c.execute,
+			PreRunE:           c.preRunE,
+		},
+	)
 
-		// If the linter description spans multiple lines, truncate everything following the first newline
-		linterDescription := lc.Linter.Desc()
-		firstNewline := strings.IndexRune(linterDescription, '\n')
-		if firstNewline > 0 {
-			linterDescription = linterDescription[:firstNewline]
-		}
+	c.cmd = helpCmd
 
-		deprecatedMark := ""
-		if lc.IsDeprecated() {
-			deprecatedMark = " [" + color.RedString("deprecated") + "]"
-		}
+	return c
+}
 
-		fmt.Fprintf(logutils.StdOut, "%s%s%s: %s [fast: %t, auto-fix: %t]\n", color.YellowString(lc.Name()),
-			altNamesStr, deprecatedMark, linterDescription, !lc.IsSlowLinter(), lc.CanAutoFix)
+func (c *helpCommand) preRunE(_ *cobra.Command, _ []string) error {
+	// The command doesn't depend on the real configuration.
+	// It just needs the list of all plugins and all presets.
+	dbManager, err := lintersdb.NewManager(c.log.Child(logutils.DebugKeyLintersDB), config.NewDefault(), lintersdb.NewLinterBuilder())
+	if err != nil {
+		return err
 	}
+
+	c.dbManager = dbManager
+
+	return nil
 }
 
-func (e *Executor) executeLintersHelp(_ *cobra.Command, _ []string) {
+func (c *helpCommand) execute(_ *cobra.Command, _ []string) {
 	var enabledLCs, disabledLCs []*linter.Config
-	for _, lc := range e.DBManager.GetAllSupportedLinterConfigs() {
+	for _, lc := range c.dbManager.GetAllSupportedLinterConfigs() {
 		if lc.Internal {
 			continue
 		}
@@ -75,13 +79,19 @@ func (e *Executor) executeLintersHelp(_ *cobra.Command, _ []string) {
 	}
 
 	color.Green("Enabled by default linters:\n")
-	printLinterConfigs(enabledLCs)
+	printLinters(enabledLCs)
+
 	color.Red("\nDisabled by default linters:\n")
-	printLinterConfigs(disabledLCs)
+	printLinters(disabledLCs)
 
 	color.Green("\nLinters presets:")
-	for _, p := range e.DBManager.AllPresets() {
-		linters := e.DBManager.GetAllLinterConfigsForPreset(p)
+	c.printPresets()
+}
+
+func (c *helpCommand) printPresets() {
+	for _, p := range lintersdb.AllPresets() {
+		linters := c.dbManager.GetAllLinterConfigsForPreset(p)
+
 		var linterNames []string
 		for _, lc := range linters {
 			if lc.Internal {
@@ -91,6 +101,42 @@ func (e *Executor) executeLintersHelp(_ *cobra.Command, _ []string) {
 			linterNames = append(linterNames, lc.Name())
 		}
 		sort.Strings(linterNames)
-		fmt.Fprintf(logutils.StdOut, "%s: %s\n", color.YellowString(p), strings.Join(linterNames, ", "))
+
+		_, _ = fmt.Fprintf(logutils.StdOut, "%s: %s\n", color.YellowString(p), strings.Join(linterNames, ", "))
+	}
+}
+
+func printLinters(lcs []*linter.Config) {
+	slices.SortFunc(lcs, func(a, b *linter.Config) int {
+		if a.IsDeprecated() && b.IsDeprecated() {
+			return strings.Compare(a.Name(), b.Name())
+		}
+
+		if a.IsDeprecated() {
+			return 1
+		}
+
+		if b.IsDeprecated() {
+			return -1
+		}
+
+		return strings.Compare(a.Name(), b.Name())
+	})
+
+	for _, lc := range lcs {
+		// If the linter description spans multiple lines, truncate everything following the first newline
+		linterDescription := lc.Linter.Desc()
+		firstNewline := strings.IndexRune(linterDescription, '\n')
+		if firstNewline > 0 {
+			linterDescription = linterDescription[:firstNewline]
+		}
+
+		deprecatedMark := ""
+		if lc.IsDeprecated() {
+			deprecatedMark = " [" + color.RedString("deprecated") + "]"
+		}
+
+		_, _ = fmt.Fprintf(logutils.StdOut, "%s%s: %s [fast: %t, auto-fix: %t]\n",
+			color.YellowString(lc.Name()), deprecatedMark, linterDescription, !lc.IsSlowLinter(), lc.CanAutoFix)
 	}
 }
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/internal/builder.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/internal/builder.go
new file mode 100644
index 0000000000000000000000000000000000000000..7253615a45e551163110b07911bdc8a10c6ac376
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/internal/builder.go
@@ -0,0 +1,250 @@
+package internal
+
+import (
+	"context"
+	"fmt"
+	"io"
+	"os"
+	"os/exec"
+	"path/filepath"
+	"runtime"
+	"strings"
+	"time"
+	"unicode"
+
+	"github.com/golangci/golangci-lint/pkg/logutils"
+)
+
+// Builder runs all the required commands to build a binary.
+type Builder struct {
+	cfg *Configuration
+
+	log logutils.Log
+
+	root string
+	repo string
+}
+
+// NewBuilder creates a new Builder.
+func NewBuilder(logger logutils.Log, cfg *Configuration, root string) *Builder {
+	return &Builder{
+		cfg:  cfg,
+		log:  logger,
+		root: root,
+		repo: filepath.Join(root, "golangci-lint"),
+	}
+}
+
+// Build builds the custom binary.
+func (b Builder) Build(ctx context.Context) error {
+	b.log.Infof("Cloning golangci-lint repository")
+
+	err := b.clone(ctx)
+	if err != nil {
+		return fmt.Errorf("clone golangci-lint: %w", err)
+	}
+
+	b.log.Infof("Adding plugin imports")
+
+	err = b.updatePluginsFile()
+	if err != nil {
+		return fmt.Errorf("update plugin file: %w", err)
+	}
+
+	b.log.Infof("Adding replace directives")
+
+	err = b.addToGoMod(ctx)
+	if err != nil {
+		return fmt.Errorf("add to go.mod: %w", err)
+	}
+
+	b.log.Infof("Running go mod tidy")
+
+	err = b.goModTidy(ctx)
+	if err != nil {
+		return fmt.Errorf("go mod tidy: %w", err)
+	}
+
+	b.log.Infof("Building golangci-lint binary")
+
+	binaryName := b.getBinaryName()
+
+	err = b.goBuild(ctx, binaryName)
+	if err != nil {
+		return fmt.Errorf("build golangci-lint binary: %w", err)
+	}
+
+	b.log.Infof("Moving golangci-lint binary")
+
+	err = b.copyBinary(binaryName)
+	if err != nil {
+		return fmt.Errorf("move golangci-lint binary: %w", err)
+	}
+
+	return nil
+}
+
+func (b Builder) clone(ctx context.Context) error {
+	//nolint:gosec // the variable is sanitized.
+	cmd := exec.CommandContext(ctx,
+		"git", "clone", "--branch", sanitizeVersion(b.cfg.Version),
+		"--single-branch", "--depth", "1", "-c advice.detachedHead=false", "-q",
+		"https://github.com/golangci/golangci-lint.git",
+	)
+	cmd.Dir = b.root
+
+	output, err := cmd.CombinedOutput()
+	if err != nil {
+		b.log.Infof(string(output))
+
+		return fmt.Errorf("%s: %w", strings.Join(cmd.Args, " "), err)
+	}
+
+	return nil
+}
+
+func (b Builder) addToGoMod(ctx context.Context) error {
+	for _, plugin := range b.cfg.Plugins {
+		if plugin.Path != "" {
+			err := b.addReplaceDirective(ctx, plugin)
+			if err != nil {
+				return err
+			}
+
+			continue
+		}
+
+		err := b.goGet(ctx, plugin)
+		if err != nil {
+			return err
+		}
+	}
+
+	return nil
+}
+
+func (b Builder) goGet(ctx context.Context, plugin *Plugin) error {
+	//nolint:gosec // the variables are user related.
+	cmd := exec.CommandContext(ctx, "go", "get", plugin.Module+"@"+plugin.Version)
+	cmd.Dir = b.repo
+
+	b.log.Infof("run: %s", strings.Join(cmd.Args, " "))
+
+	output, err := cmd.CombinedOutput()
+	if err != nil {
+		b.log.Warnf(string(output))
+
+		return fmt.Errorf("%s: %w", strings.Join(cmd.Args, " "), err)
+	}
+
+	return nil
+}
+
+func (b Builder) addReplaceDirective(ctx context.Context, plugin *Plugin) error {
+	replace := fmt.Sprintf("%s=%s", plugin.Module, plugin.Path)
+
+	cmd := exec.CommandContext(ctx, "go", "mod", "edit", "-replace", replace)
+	cmd.Dir = b.repo
+
+	b.log.Infof("run: %s", strings.Join(cmd.Args, " "))
+
+	output, err := cmd.CombinedOutput()
+	if err != nil {
+		b.log.Warnf(string(output))
+
+		return fmt.Errorf("%s: %w", strings.Join(cmd.Args, " "), err)
+	}
+
+	return nil
+}
+
+func (b Builder) goModTidy(ctx context.Context) error {
+	cmd := exec.CommandContext(ctx, "go", "mod", "tidy")
+	cmd.Dir = b.repo
+
+	output, err := cmd.CombinedOutput()
+	if err != nil {
+		b.log.Warnf(string(output))
+
+		return fmt.Errorf("%s: %w", strings.Join(cmd.Args, " "), err)
+	}
+
+	return nil
+}
+
+func (b Builder) goBuild(ctx context.Context, binaryName string) error {
+	//nolint:gosec // the variable is sanitized.
+	cmd := exec.CommandContext(ctx, "go", "build",
+		"-ldflags",
+		fmt.Sprintf(
+			"-s -w -X 'main.version=%s-custom-gcl' -X 'main.date=%s'",
+			sanitizeVersion(b.cfg.Version), time.Now().UTC().String(),
+		),
+		"-o", binaryName,
+		"./cmd/golangci-lint",
+	)
+	cmd.Dir = b.repo
+
+	output, err := cmd.CombinedOutput()
+	if err != nil {
+		b.log.Warnf(string(output))
+
+		return fmt.Errorf("%s: %w", strings.Join(cmd.Args, " "), err)
+	}
+
+	return nil
+}
+
+func (b Builder) copyBinary(binaryName string) error {
+	src := filepath.Join(b.repo, binaryName)
+
+	source, err := os.Open(filepath.Clean(src))
+	if err != nil {
+		return fmt.Errorf("open source file: %w", err)
+	}
+
+	defer func() { _ = source.Close() }()
+
+	info, err := source.Stat()
+	if err != nil {
+		return fmt.Errorf("stat source file: %w", err)
+	}
+
+	if b.cfg.Destination != "" {
+		err = os.MkdirAll(b.cfg.Destination, os.ModePerm)
+		if err != nil {
+			return fmt.Errorf("create destination directory: %w", err)
+		}
+	}
+
+	dst, err := os.OpenFile(filepath.Join(b.cfg.Destination, binaryName), os.O_RDWR|os.O_CREATE|os.O_TRUNC, info.Mode())
+	if err != nil {
+		return fmt.Errorf("create destination file: %w", err)
+	}
+
+	defer func() { _ = dst.Close() }()
+
+	_, err = io.Copy(dst, source)
+	if err != nil {
+		return fmt.Errorf("copy source to destination: %w", err)
+	}
+
+	return nil
+}
+
+func (b Builder) getBinaryName() string {
+	name := b.cfg.Name
+	if runtime.GOOS == "windows" {
+		name += ".exe"
+	}
+
+	return name
+}
+
+func sanitizeVersion(v string) string {
+	fn := func(c rune) bool {
+		return !(unicode.IsLetter(c) || unicode.IsNumber(c) || c == '.' || c == '/')
+	}
+
+	return strings.Join(strings.FieldsFunc(v, fn), "")
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/internal/configuration.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/internal/configuration.go
new file mode 100644
index 0000000000000000000000000000000000000000..f9de4c47a7375a37206a5e5ef8562ee82ed5e315
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/internal/configuration.go
@@ -0,0 +1,140 @@
+package internal
+
+import (
+	"errors"
+	"fmt"
+	"os"
+	"path/filepath"
+	"strings"
+
+	"gopkg.in/yaml.v3"
+)
+
+const base = ".custom-gcl"
+
+const defaultBinaryName = "custom-gcl"
+
+// Configuration represents the configuration file.
+type Configuration struct {
+	// golangci-lint version.
+	Version string `yaml:"version"`
+
+	// Name of the binary.
+	Name string `yaml:"name,omitempty"`
+
+	// Destination is the path to a directory to store the binary.
+	Destination string `yaml:"destination,omitempty"`
+
+	// Plugins information.
+	Plugins []*Plugin `yaml:"plugins,omitempty"`
+}
+
+// Validate checks and clean the configuration.
+func (c *Configuration) Validate() error {
+	if strings.TrimSpace(c.Version) == "" {
+		return errors.New("root field 'version' is required")
+	}
+
+	if strings.TrimSpace(c.Name) == "" {
+		c.Name = defaultBinaryName
+	}
+
+	if len(c.Plugins) == 0 {
+		return errors.New("no plugins defined")
+	}
+
+	for _, plugin := range c.Plugins {
+		if strings.TrimSpace(plugin.Module) == "" {
+			return errors.New("field 'module' is required")
+		}
+
+		if strings.TrimSpace(plugin.Import) == "" {
+			plugin.Import = plugin.Module
+		}
+
+		if strings.TrimSpace(plugin.Path) == "" && strings.TrimSpace(plugin.Version) == "" {
+			return errors.New("missing information: 'version' or 'path' should be provided")
+		}
+
+		if strings.TrimSpace(plugin.Path) != "" && strings.TrimSpace(plugin.Version) != "" {
+			return errors.New("invalid configuration: 'version' and 'path' should not be provided at the same time")
+		}
+
+		if strings.TrimSpace(plugin.Path) == "" {
+			continue
+		}
+
+		abs, err := filepath.Abs(plugin.Path)
+		if err != nil {
+			return err
+		}
+
+		plugin.Path = abs
+	}
+
+	return nil
+}
+
+// Plugin represents information about a plugin.
+type Plugin struct {
+	// Module name.
+	Module string `yaml:"module"`
+
+	// Import to use.
+	Import string `yaml:"import,omitempty"`
+
+	// Version of the module.
+	// Only for module available through a Go proxy.
+	Version string `yaml:"version,omitempty"`
+
+	// Path to the local module.
+	// Only for local module.
+	Path string `yaml:"path,omitempty"`
+}
+
+func LoadConfiguration() (*Configuration, error) {
+	configFilePath, err := findConfigurationFile()
+	if err != nil {
+		return nil, fmt.Errorf("file %s not found: %w", configFilePath, err)
+	}
+
+	file, err := os.Open(configFilePath)
+	if err != nil {
+		return nil, fmt.Errorf("file %s open: %w", configFilePath, err)
+	}
+
+	defer func() { _ = file.Close() }()
+
+	var cfg Configuration
+
+	err = yaml.NewDecoder(file).Decode(&cfg)
+	if err != nil {
+		return nil, fmt.Errorf("YAML decoding: %w", err)
+	}
+
+	return &cfg, nil
+}
+
+func findConfigurationFile() (string, error) {
+	entries, err := os.ReadDir(".")
+	if err != nil {
+		return "", fmt.Errorf("read directory: %w", err)
+	}
+
+	for _, entry := range entries {
+		ext := filepath.Ext(entry.Name())
+
+		switch strings.ToLower(strings.TrimPrefix(ext, ".")) {
+		case "yml", "yaml", "json":
+			if isConf(ext, entry.Name()) {
+				return entry.Name(), nil
+			}
+		}
+	}
+
+	return "", errors.New("configuration file not found")
+}
+
+func isConf(ext, name string) bool {
+	return base+ext == name
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/internal/imports.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/internal/imports.go
new file mode 100644
index 0000000000000000000000000000000000000000..3bebf596b1abe9e5e4f2c660781063e370049b53
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/internal/imports.go
@@ -0,0 +1,69 @@
+package internal
+
+import (
+	"bytes"
+	"fmt"
+	"go/format"
+	"os"
+	"path/filepath"
+	"text/template"
+)
+
+const importsTemplate = `
+package main
+
+import (
+{{range .Imports -}}
+	_ "{{.}}"
+{{end -}}
+)
+`
+
+func (b Builder) updatePluginsFile() error {
+	importsDest := filepath.Join(b.repo, "cmd", "golangci-lint", "plugins.go")
+
+	info, err := os.Stat(importsDest)
+	if err != nil {
+		return fmt.Errorf("file %s not found: %w", importsDest, err)
+	}
+
+	source, err := generateImports(b.cfg)
+	if err != nil {
+		return fmt.Errorf("generate imports: %w", err)
+	}
+
+	b.log.Infof("generated imports info %s:\n%s\n", importsDest, source)
+
+	err = os.WriteFile(filepath.Clean(importsDest), source, info.Mode())
+	if err != nil {
+		return fmt.Errorf("write file %s: %w", importsDest, err)
+	}
+
+	return nil
+}
+
+func generateImports(cfg *Configuration) ([]byte, error) {
+	impTmpl, err := template.New("plugins.go").Parse(importsTemplate)
+	if err != nil {
+		return nil, fmt.Errorf("parse template: %w", err)
+	}
+
+	var imps []string
+	for _, plugin := range cfg.Plugins {
+		imps = append(imps, plugin.Import)
+	}
+
+	buf := &bytes.Buffer{}
+
+	err = impTmpl.Execute(buf, map[string]any{"Imports": imps})
+	if err != nil {
+		return nil, fmt.Errorf("execute template: %w", err)
+	}
+
+	source, err := format.Source(buf.Bytes())
+	if err != nil {
+		return nil, fmt.Errorf("format source: %w", err)
+	}
+
+	return source, nil
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/internal/vibra.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/internal/vibra.go
new file mode 100644
index 0000000000000000000000000000000000000000..ece2483fe02e9cfff5c28c7a1af868965489b77b
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/internal/vibra.go
@@ -0,0 +1,59 @@
+package internal
+
+import (
+	"fmt"
+
+	"github.com/spf13/pflag"
+	"github.com/spf13/viper"
+)
+
+type FlagFunc[T any] func(name string, value T, usage string) *T
+
+type FlagPFunc[T any] func(name, shorthand string, value T, usage string) *T
+
+// AddFlagAndBind adds a Cobra/pflag flag and binds it with Viper.
+func AddFlagAndBind[T any](v *viper.Viper, fs *pflag.FlagSet, pfn FlagFunc[T], name, bind string, value T, usage string) {
+	pfn(name, value, usage)
+
+	err := v.BindPFlag(bind, fs.Lookup(name))
+	if err != nil {
+		panic(fmt.Sprintf("failed to bind flag %s: %v", name, err))
+	}
+}
+
+// AddFlagAndBindP adds a Cobra/pflag flag and binds it with Viper.
+func AddFlagAndBindP[T any](v *viper.Viper, fs *pflag.FlagSet, pfn FlagPFunc[T], name, shorthand, bind string, value T, usage string) {
+	pfn(name, shorthand, value, usage)
+
+	err := v.BindPFlag(bind, fs.Lookup(name))
+	if err != nil {
+		panic(fmt.Sprintf("failed to bind flag %s: %v", name, err))
+	}
+}
+
+// AddDeprecatedFlagAndBind similar to AddFlagAndBind but deprecate the flag.
+func AddDeprecatedFlagAndBind[T any](v *viper.Viper, fs *pflag.FlagSet, pfn FlagFunc[T], name, bind string, value T, usage string) {
+	AddFlagAndBind(v, fs, pfn, name, bind, value, usage)
+	deprecateFlag(fs, name)
+}
+
+// AddHackedStringSliceP Hack for slice, see Loader.applyStringSliceHack.
+func AddHackedStringSliceP(fs *pflag.FlagSet, name, shorthand, usage string) {
+	fs.StringSliceP(name, shorthand, nil, usage)
+}
+
+// AddHackedStringSlice Hack for slice, see Loader.applyStringSliceHack.
+func AddHackedStringSlice(fs *pflag.FlagSet, name, usage string) {
+	AddHackedStringSliceP(fs, name, "", usage)
+}
+
+// AddDeprecatedHackedStringSlice similar to AddHackedStringSlice but deprecate the flag.
+func AddDeprecatedHackedStringSlice(fs *pflag.FlagSet, name, usage string) {
+	AddHackedStringSlice(fs, name, usage)
+	deprecateFlag(fs, name)
+}
+
+func deprecateFlag(fs *pflag.FlagSet, name string) {
+	_ = fs.MarkHidden(name)
+	_ = fs.MarkDeprecated(name, "check the documentation for more information.")
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/linters.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/linters.go
index 292713ec900231a36d9cec0dde2c6e33ce2e94a1..a93814f0f8510b4525f51758589bdbd54036bdd7 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/commands/linters.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/linters.go
@@ -5,53 +5,103 @@ import (
 
 	"github.com/fatih/color"
 	"github.com/spf13/cobra"
+	"github.com/spf13/viper"
 
+	"github.com/golangci/golangci-lint/pkg/config"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
+	"github.com/golangci/golangci-lint/pkg/lint/lintersdb"
+	"github.com/golangci/golangci-lint/pkg/logutils"
 )
 
-func (e *Executor) initLinters() {
-	e.lintersCmd = &cobra.Command{
+type lintersOptions struct {
+	config.LoaderOptions
+}
+
+type lintersCommand struct {
+	viper *viper.Viper
+	cmd   *cobra.Command
+
+	opts lintersOptions
+
+	cfg *config.Config
+
+	log logutils.Log
+
+	dbManager *lintersdb.Manager
+}
+
+func newLintersCommand(logger logutils.Log) *lintersCommand {
+	c := &lintersCommand{
+		viper: viper.New(),
+		cfg:   config.NewDefault(),
+		log:   logger,
+	}
+
+	lintersCmd := &cobra.Command{
 		Use:               "linters",
 		Short:             "List current linters configuration",
 		Args:              cobra.NoArgs,
 		ValidArgsFunction: cobra.NoFileCompletions,
-		RunE:              e.executeLinters,
+		RunE:              c.execute,
+		PreRunE:           c.preRunE,
+		SilenceUsage:      true,
 	}
-	e.rootCmd.AddCommand(e.lintersCmd)
-	e.initRunConfiguration(e.lintersCmd)
+
+	fs := lintersCmd.Flags()
+	fs.SortFlags = false // sort them as they are defined here
+
+	setupConfigFileFlagSet(fs, &c.opts.LoaderOptions)
+	setupLintersFlagSet(c.viper, fs)
+
+	c.cmd = lintersCmd
+
+	return c
 }
 
-// executeLinters runs the 'linters' CLI command, which displays the supported linters.
-func (e *Executor) executeLinters(_ *cobra.Command, _ []string) error {
-	enabledLintersMap, err := e.EnabledLintersSet.GetEnabledLintersMap()
+func (c *lintersCommand) preRunE(cmd *cobra.Command, args []string) error {
+	loader := config.NewLoader(c.log.Child(logutils.DebugKeyConfigReader), c.viper, cmd.Flags(), c.opts.LoaderOptions, c.cfg, args)
+
+	err := loader.Load(config.LoadOptions{Validation: true})
 	if err != nil {
-		return fmt.Errorf("can't get enabled linters: %w", err)
+		return fmt.Errorf("can't load config: %w", err)
 	}
 
-	color.Green("Enabled by your configuration linters:\n")
-	var enabledLinters []*linter.Config
-	for _, lc := range enabledLintersMap {
-		if lc.Internal {
-			continue
-		}
+	dbManager, err := lintersdb.NewManager(c.log.Child(logutils.DebugKeyLintersDB), c.cfg,
+		lintersdb.NewLinterBuilder(), lintersdb.NewPluginModuleBuilder(c.log), lintersdb.NewPluginGoBuilder(c.log))
+	if err != nil {
+		return err
+	}
+
+	c.dbManager = dbManager
 
-		enabledLinters = append(enabledLinters, lc)
+	return nil
+}
+
+func (c *lintersCommand) execute(_ *cobra.Command, _ []string) error {
+	enabledLintersMap, err := c.dbManager.GetEnabledLintersMap()
+	if err != nil {
+		return fmt.Errorf("can't get enabled linters: %w", err)
 	}
-	printLinterConfigs(enabledLinters)
 
+	var enabledLinters []*linter.Config
 	var disabledLCs []*linter.Config
-	for _, lc := range e.DBManager.GetAllSupportedLinterConfigs() {
+
+	for _, lc := range c.dbManager.GetAllSupportedLinterConfigs() {
 		if lc.Internal {
 			continue
 		}
 
 		if enabledLintersMap[lc.Name()] == nil {
 			disabledLCs = append(disabledLCs, lc)
+		} else {
+			enabledLinters = append(enabledLinters, lc)
 		}
 	}
 
+	color.Green("Enabled by your configuration linters:\n")
+	printLinters(enabledLinters)
 	color.Red("\nDisabled by your configuration linters:\n")
-	printLinterConfigs(disabledLCs)
+	printLinters(disabledLCs)
 
 	return nil
 }
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/root.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/root.go
index 5fe4c784d61b873657e79e4676942c838e30c484..cbb838aac2bc70cb7893d4ee44c7a711c6b227c2 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/commands/root.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/root.go
@@ -1,174 +1,167 @@
 package commands
 
 import (
+	"errors"
 	"fmt"
 	"os"
-	"runtime"
-	"runtime/pprof"
-	"runtime/trace"
-	"strconv"
+	"slices"
 
+	"github.com/fatih/color"
 	"github.com/spf13/cobra"
 	"github.com/spf13/pflag"
 
-	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/exitcodes"
 	"github.com/golangci/golangci-lint/pkg/logutils"
 )
 
-const (
-	// envHelpRun value: "1".
-	envHelpRun        = "HELP_RUN"
-	envMemProfileRate = "GL_MEM_PROFILE_RATE"
-)
+func Execute(info BuildInfo) error {
+	return newRootCommand(info).Execute()
+}
 
-func (e *Executor) persistentPreRun(_ *cobra.Command, _ []string) error {
-	if e.cfg.Run.PrintVersion {
-		_ = printVersion(logutils.StdOut, e.buildInfo)
-		os.Exit(exitcodes.Success) // a return nil is not enough to stop the process because we are inside the `preRun`.
-	}
+type rootOptions struct {
+	PrintVersion bool // Flag only.
 
-	runtime.GOMAXPROCS(e.cfg.Run.Concurrency)
+	Verbose bool   // Flag only.
+	Color   string // Flag only.
+}
 
-	if e.cfg.Run.CPUProfilePath != "" {
-		f, err := os.Create(e.cfg.Run.CPUProfilePath)
-		if err != nil {
-			return fmt.Errorf("can't create file %s: %w", e.cfg.Run.CPUProfilePath, err)
-		}
-		if err := pprof.StartCPUProfile(f); err != nil {
-			return fmt.Errorf("can't start CPU profiling: %w", err)
-		}
-	}
+type rootCommand struct {
+	cmd  *cobra.Command
+	opts rootOptions
 
-	if e.cfg.Run.MemProfilePath != "" {
-		if rate := os.Getenv(envMemProfileRate); rate != "" {
-			runtime.MemProfileRate, _ = strconv.Atoi(rate)
-		}
-	}
+	log logutils.Log
+}
 
-	if e.cfg.Run.TracePath != "" {
-		f, err := os.Create(e.cfg.Run.TracePath)
-		if err != nil {
-			return fmt.Errorf("can't create file %s: %w", e.cfg.Run.TracePath, err)
-		}
-		if err = trace.Start(f); err != nil {
-			return fmt.Errorf("can't start tracing: %w", err)
-		}
-	}
+func newRootCommand(info BuildInfo) *rootCommand {
+	c := &rootCommand{}
 
-	return nil
-}
+	rootCmd := &cobra.Command{
+		Use:   "golangci-lint",
+		Short: "golangci-lint is a smart linters runner.",
+		Long:  `Smart, fast linters runner.`,
+		Args:  cobra.NoArgs,
+		RunE: func(cmd *cobra.Command, _ []string) error {
+			if c.opts.PrintVersion {
+				_ = printVersion(logutils.StdOut, info)
+				return nil
+			}
 
-func (e *Executor) persistentPostRun(_ *cobra.Command, _ []string) error {
-	if e.cfg.Run.CPUProfilePath != "" {
-		pprof.StopCPUProfile()
+			return cmd.Help()
+		},
 	}
 
-	if e.cfg.Run.MemProfilePath != "" {
-		f, err := os.Create(e.cfg.Run.MemProfilePath)
-		if err != nil {
-			return fmt.Errorf("can't create file %s: %w", e.cfg.Run.MemProfilePath, err)
-		}
+	fs := rootCmd.Flags()
+	fs.BoolVar(&c.opts.PrintVersion, "version", false, color.GreenString("Print version"))
 
-		var ms runtime.MemStats
-		runtime.ReadMemStats(&ms)
-		printMemStats(&ms, e.log)
+	setupRootPersistentFlags(rootCmd.PersistentFlags(), &c.opts)
 
-		if err := pprof.WriteHeapProfile(f); err != nil {
-			return fmt.Errorf("cCan't write heap profile: %w", err)
-		}
-		_ = f.Close()
-	}
+	log := logutils.NewStderrLog(logutils.DebugKeyEmpty)
 
-	if e.cfg.Run.TracePath != "" {
-		trace.Stop()
-	}
+	// Each command uses a dedicated configuration structure to avoid side effects of bindings.
+	rootCmd.AddCommand(
+		newLintersCommand(log).cmd,
+		newRunCommand(log, info).cmd,
+		newCacheCommand().cmd,
+		newConfigCommand(log, info).cmd,
+		newVersionCommand(info).cmd,
+		newCustomCommand(log).cmd,
+	)
 
-	os.Exit(e.exitCode)
+	rootCmd.SetHelpCommand(newHelpCommand(log).cmd)
 
-	return nil
+	c.log = log
+	c.cmd = rootCmd
+
+	return c
 }
 
-func printMemStats(ms *runtime.MemStats, logger logutils.Log) {
-	logger.Infof("Mem stats: alloc=%s total_alloc=%s sys=%s "+
-		"heap_alloc=%s heap_sys=%s heap_idle=%s heap_released=%s heap_in_use=%s "+
-		"stack_in_use=%s stack_sys=%s "+
-		"mspan_sys=%s mcache_sys=%s buck_hash_sys=%s gc_sys=%s other_sys=%s "+
-		"mallocs_n=%d frees_n=%d heap_objects_n=%d gc_cpu_fraction=%.2f",
-		formatMemory(ms.Alloc), formatMemory(ms.TotalAlloc), formatMemory(ms.Sys),
-		formatMemory(ms.HeapAlloc), formatMemory(ms.HeapSys),
-		formatMemory(ms.HeapIdle), formatMemory(ms.HeapReleased), formatMemory(ms.HeapInuse),
-		formatMemory(ms.StackInuse), formatMemory(ms.StackSys),
-		formatMemory(ms.MSpanSys), formatMemory(ms.MCacheSys), formatMemory(ms.BuckHashSys),
-		formatMemory(ms.GCSys), formatMemory(ms.OtherSys),
-		ms.Mallocs, ms.Frees, ms.HeapObjects, ms.GCCPUFraction)
+func (c *rootCommand) Execute() error {
+	err := setupLogger(c.log)
+	if err != nil {
+		return err
+	}
+
+	return c.cmd.Execute()
 }
 
-func formatMemory(memBytes uint64) string {
-	const Kb = 1024
-	const Mb = Kb * 1024
+func setupRootPersistentFlags(fs *pflag.FlagSet, opts *rootOptions) {
+	fs.BoolP("help", "h", false, color.GreenString("Help for a command"))
+	fs.BoolVarP(&opts.Verbose, "verbose", "v", false, color.GreenString("Verbose output"))
+	fs.StringVar(&opts.Color, "color", "auto", color.GreenString("Use color when printing; can be 'always', 'auto', or 'never'"))
+}
 
-	if memBytes < Kb {
-		return fmt.Sprintf("%db", memBytes)
+func setupLogger(logger logutils.Log) error {
+	opts, err := forceRootParsePersistentFlags()
+	if err != nil && !errors.Is(err, pflag.ErrHelp) {
+		return err
 	}
-	if memBytes < Mb {
-		return fmt.Sprintf("%dkb", memBytes/Kb)
+
+	if opts == nil {
+		return nil
 	}
-	return fmt.Sprintf("%dmb", memBytes/Mb)
-}
 
-func getDefaultConcurrency() int {
-	if os.Getenv(envHelpRun) == "1" {
-		// Make stable concurrency for README help generating builds.
-		const prettyConcurrency = 8
-		return prettyConcurrency
+	logutils.SetupVerboseLog(logger, opts.Verbose)
+
+	switch opts.Color {
+	case "always":
+		color.NoColor = false
+	case "never":
+		color.NoColor = true
+	case "auto":
+		// nothing
+	default:
+		logger.Fatalf("invalid value %q for --color; must be 'always', 'auto', or 'never'", opts.Color)
 	}
 
-	return runtime.NumCPU()
+	return nil
 }
 
-func (e *Executor) initRoot() {
-	rootCmd := &cobra.Command{
-		Use:   "golangci-lint",
-		Short: "golangci-lint is a smart linters runner.",
-		Long:  `Smart, fast linters runner.`,
-		Args:  cobra.NoArgs,
-		RunE: func(cmd *cobra.Command, _ []string) error {
-			return cmd.Help()
-		},
-		PersistentPreRunE:  e.persistentPreRun,
-		PersistentPostRunE: e.persistentPostRun,
-	}
+func forceRootParsePersistentFlags() (*rootOptions, error) {
+	// We use another pflag.FlagSet here to not set `changed` flag on cmd.Flags() options.
+	// Otherwise, string slice options will be duplicated.
+	fs := pflag.NewFlagSet("config flag set", pflag.ContinueOnError)
 
-	initRootFlagSet(rootCmd.PersistentFlags(), e.cfg, e.needVersionOption())
-	e.rootCmd = rootCmd
-}
+	// Ignore unknown flags because we will parse the command flags later.
+	fs.ParseErrorsWhitelist = pflag.ParseErrorsWhitelist{UnknownFlags: true}
 
-func (e *Executor) needVersionOption() bool {
-	return e.buildInfo.Date != ""
-}
+	opts := &rootOptions{}
 
-func initRootFlagSet(fs *pflag.FlagSet, cfg *config.Config, needVersionOption bool) {
-	fs.BoolVarP(&cfg.Run.IsVerbose, "verbose", "v", false, wh("verbose output"))
+	// Don't do `fs.AddFlagSet(cmd.Flags())` because it shares flags representations:
+	// `changed` variable inside string slice vars will be shared.
+	// Use another config variable here,
+	// to not affect main parsing by this parsing of only config option.
+	setupRootPersistentFlags(fs, opts)
 
-	var silent bool
-	fs.BoolVarP(&silent, "silent", "s", false, wh("disables congrats outputs"))
-	if err := fs.MarkHidden("silent"); err != nil {
-		panic(err)
-	}
-	err := fs.MarkDeprecated("silent",
-		"now golangci-lint by default is silent: it doesn't print Congrats message")
-	if err != nil {
-		panic(err)
+	fs.Usage = func() {} // otherwise, help text will be printed twice
+
+	if err := fs.Parse(safeArgs(fs, os.Args)); err != nil {
+		if errors.Is(err, pflag.ErrHelp) {
+			return nil, err
+		}
+
+		return nil, fmt.Errorf("can't parse args: %w", err)
 	}
 
-	fs.StringVar(&cfg.Run.CPUProfilePath, "cpu-profile-path", "", wh("Path to CPU profile output file"))
-	fs.StringVar(&cfg.Run.MemProfilePath, "mem-profile-path", "", wh("Path to memory profile output file"))
-	fs.StringVar(&cfg.Run.TracePath, "trace-path", "", wh("Path to trace output file"))
-	fs.IntVarP(&cfg.Run.Concurrency, "concurrency", "j", getDefaultConcurrency(), wh("Concurrency (default NumCPU)"))
-	if needVersionOption {
-		fs.BoolVar(&cfg.Run.PrintVersion, "version", false, wh("Print version"))
+	return opts, nil
+}
+
+// Shorthands are a problem because pflag, with UnknownFlags, will try to parse all the letters as options.
+// A shorthand can aggregate several letters (ex `ps -aux`)
+// The function replaces non-supported shorthands by a dumb flag.
+func safeArgs(fs *pflag.FlagSet, args []string) []string {
+	var shorthands []string
+	fs.VisitAll(func(flag *pflag.Flag) {
+		shorthands = append(shorthands, flag.Shorthand)
+	})
+
+	var cleanArgs []string
+	for _, arg := range args {
+		if len(arg) > 1 && arg[0] == '-' && arg[1] != '-' && !slices.Contains(shorthands, string(arg[1])) {
+			cleanArgs = append(cleanArgs, "--potato")
+			continue
+		}
+
+		cleanArgs = append(cleanArgs, arg)
 	}
 
-	fs.StringVar(&cfg.Output.Color, "color", "auto", wh("Use color when printing; can be 'always', 'auto', or 'never'"))
+	return cleanArgs
 }
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go
index 9149b177bcee225920f347a90914519c8e21fadf..dc323f1017685c8d2750dc69ba5b4cf5ee1622fd 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go
@@ -1,31 +1,50 @@
 package commands
 
 import (
+	"bytes"
 	"context"
+	"crypto/sha256"
 	"errors"
 	"fmt"
 	"io"
 	"log"
 	"os"
+	"path/filepath"
 	"runtime"
+	"runtime/pprof"
+	"runtime/trace"
+	"sort"
+	"strconv"
 	"strings"
 	"time"
 
 	"github.com/fatih/color"
+	"github.com/gofrs/flock"
 	"github.com/spf13/cobra"
 	"github.com/spf13/pflag"
+	"github.com/spf13/viper"
+	"go.uber.org/automaxprocs/maxprocs"
+	"golang.org/x/exp/maps"
+	"gopkg.in/yaml.v3"
 
+	"github.com/golangci/golangci-lint/internal/cache"
+	"github.com/golangci/golangci-lint/internal/pkgcache"
 	"github.com/golangci/golangci-lint/pkg/config"
 	"github.com/golangci/golangci-lint/pkg/exitcodes"
+	"github.com/golangci/golangci-lint/pkg/fsutils"
+	"github.com/golangci/golangci-lint/pkg/goanalysis/load"
+	"github.com/golangci/golangci-lint/pkg/goutil"
 	"github.com/golangci/golangci-lint/pkg/lint"
+	"github.com/golangci/golangci-lint/pkg/lint/linter"
 	"github.com/golangci/golangci-lint/pkg/lint/lintersdb"
 	"github.com/golangci/golangci-lint/pkg/logutils"
-	"github.com/golangci/golangci-lint/pkg/packages"
 	"github.com/golangci/golangci-lint/pkg/printers"
+	"github.com/golangci/golangci-lint/pkg/report"
 	"github.com/golangci/golangci-lint/pkg/result"
+	"github.com/golangci/golangci-lint/pkg/timeutils"
 )
 
-const defaultFileMode = 0644
+const defaultTimeout = time.Minute
 
 const (
 	// envFailOnWarnings value: "1"
@@ -34,526 +53,468 @@ const (
 	envMemLogEvery = "GL_MEM_LOG_EVERY"
 )
 
-func getDefaultIssueExcludeHelp() string {
-	parts := []string{"Use or not use default excludes:"}
-	for _, ep := range config.DefaultExcludePatterns {
-		parts = append(parts,
-			fmt.Sprintf("  # %s %s: %s", ep.ID, ep.Linter, ep.Why),
-			fmt.Sprintf("  - %s", color.YellowString(ep.Pattern)),
-			"",
-		)
-	}
-	return strings.Join(parts, "\n")
-}
+const (
+	// envHelpRun value: "1".
+	envHelpRun        = "HELP_RUN"
+	envMemProfileRate = "GL_MEM_PROFILE_RATE"
+)
 
-func getDefaultDirectoryExcludeHelp() string {
-	parts := []string{"Use or not use default excluded directories:"}
-	for _, dir := range packages.StdExcludeDirRegexps {
-		parts = append(parts, fmt.Sprintf("  - %s", color.YellowString(dir)))
-	}
-	parts = append(parts, "")
-	return strings.Join(parts, "\n")
+type runOptions struct {
+	config.LoaderOptions
+
+	CPUProfilePath string // Flag only.
+	MemProfilePath string // Flag only.
+	TracePath      string // Flag only.
+
+	PrintResourcesUsage bool // Flag only.
 }
 
-func wh(text string) string {
-	return color.GreenString(text)
+type runCommand struct {
+	viper *viper.Viper
+	cmd   *cobra.Command
+
+	opts runOptions
+
+	cfg *config.Config
+
+	buildInfo BuildInfo
+
+	dbManager *lintersdb.Manager
+
+	printer *printers.Printer
+
+	log        logutils.Log
+	debugf     logutils.DebugFunc
+	reportData *report.Data
+
+	contextBuilder *lint.ContextBuilder
+	goenv          *goutil.Env
+
+	fileCache *fsutils.FileCache
+	lineCache *fsutils.LineCache
+
+	flock *flock.Flock
+
+	exitCode int
 }
 
-const defaultTimeout = time.Minute
+func newRunCommand(logger logutils.Log, info BuildInfo) *runCommand {
+	reportData := &report.Data{}
 
-//nolint:funlen,gomnd
-func initFlagSet(fs *pflag.FlagSet, cfg *config.Config, m *lintersdb.Manager, isFinalInit bool) {
-	hideFlag := func(name string) {
-		if err := fs.MarkHidden(name); err != nil {
-			panic(err)
-		}
+	c := &runCommand{
+		viper:      viper.New(),
+		log:        report.NewLogWrapper(logger, reportData),
+		debugf:     logutils.Debug(logutils.DebugKeyExec),
+		cfg:        config.NewDefault(),
+		reportData: reportData,
+		buildInfo:  info,
+	}
 
-		// we run initFlagSet multiple times, but we wouldn't like to see deprecation message multiple times
-		if isFinalInit {
-			const deprecateMessage = "flag will be removed soon, please, use .golangci.yml config"
-			if err := fs.MarkDeprecated(name, deprecateMessage); err != nil {
-				panic(err)
-			}
-		}
+	runCmd := &cobra.Command{
+		Use:                "run",
+		Short:              "Run the linters",
+		Run:                c.execute,
+		PreRunE:            c.preRunE,
+		PostRun:            c.postRun,
+		PersistentPreRunE:  c.persistentPreRunE,
+		PersistentPostRunE: c.persistentPostRunE,
+		SilenceUsage:       true,
 	}
 
-	// Output config
-	oc := &cfg.Output
-	fs.StringVar(&oc.Format, "out-format",
-		config.OutFormatColoredLineNumber,
-		wh(fmt.Sprintf("Format of output: %s", strings.Join(config.OutFormats, "|"))))
-	fs.BoolVar(&oc.PrintIssuedLine, "print-issued-lines", true, wh("Print lines of code with issue"))
-	fs.BoolVar(&oc.PrintLinterName, "print-linter-name", true, wh("Print linter name in issue line"))
-	fs.BoolVar(&oc.UniqByLine, "uniq-by-line", true, wh("Make issues output unique by line"))
-	fs.BoolVar(&oc.SortResults, "sort-results", false, wh("Sort linter results"))
-	fs.BoolVar(&oc.PrintWelcomeMessage, "print-welcome", false, wh("Print welcome message"))
-	fs.StringVar(&oc.PathPrefix, "path-prefix", "", wh("Path prefix to add to output"))
-	hideFlag("print-welcome") // no longer used
-
-	fs.BoolVar(&cfg.InternalCmdTest, "internal-cmd-test", false, wh("Option is used only for testing golangci-lint command, don't use it"))
-	if err := fs.MarkHidden("internal-cmd-test"); err != nil {
-		panic(err)
-	}
-
-	// Run config
-	rc := &cfg.Run
-	fs.StringVar(&rc.ModulesDownloadMode, "modules-download-mode", "",
-		"Modules download mode. If not empty, passed as -mod=<mode> to go tools")
-	fs.IntVar(&rc.ExitCodeIfIssuesFound, "issues-exit-code",
-		exitcodes.IssuesFound, wh("Exit code when issues were found"))
-	fs.StringVar(&rc.Go, "go", "", wh("Targeted Go version"))
-	fs.StringSliceVar(&rc.BuildTags, "build-tags", nil, wh("Build tags"))
-
-	fs.DurationVar(&rc.Timeout, "deadline", defaultTimeout, wh("Deadline for total work"))
-	if err := fs.MarkHidden("deadline"); err != nil {
-		panic(err)
-	}
-	fs.DurationVar(&rc.Timeout, "timeout", defaultTimeout, wh("Timeout for total work"))
-
-	fs.BoolVar(&rc.AnalyzeTests, "tests", true, wh("Analyze tests (*_test.go)"))
-	fs.BoolVar(&rc.PrintResourcesUsage, "print-resources-usage", false,
-		wh("Print avg and max memory usage of golangci-lint and total time"))
-	fs.StringVarP(&rc.Config, "config", "c", "", wh("Read config from file path `PATH`"))
-	fs.BoolVar(&rc.NoConfig, "no-config", false, wh("Don't read config"))
-	fs.StringSliceVar(&rc.SkipDirs, "skip-dirs", nil, wh("Regexps of directories to skip"))
-	fs.BoolVar(&rc.UseDefaultSkipDirs, "skip-dirs-use-default", true, getDefaultDirectoryExcludeHelp())
-	fs.StringSliceVar(&rc.SkipFiles, "skip-files", nil, wh("Regexps of files to skip"))
-
-	const allowParallelDesc = "Allow multiple parallel golangci-lint instances running. " +
-		"If false (default) - golangci-lint acquires file lock on start."
-	fs.BoolVar(&rc.AllowParallelRunners, "allow-parallel-runners", false, wh(allowParallelDesc))
-	const allowSerialDesc = "Allow multiple golangci-lint instances running, but serialize them	around a lock. " +
-		"If false (default) - golangci-lint exits with an error if it fails to acquire file lock on start."
-	fs.BoolVar(&rc.AllowSerialRunners, "allow-serial-runners", false, wh(allowSerialDesc))
-
-	// Linters settings config
-	lsc := &cfg.LintersSettings
-
-	// Hide all linters settings flags: they were initially visible,
-	// but when number of linters started to grow it became obvious that
-	// we can't fill 90% of flags by linters settings: common flags became hard to find.
-	// New linters settings should be done only through config file.
-	fs.BoolVar(&lsc.Errcheck.CheckTypeAssertions, "errcheck.check-type-assertions",
-		false, "Errcheck: check for ignored type assertion results")
-	hideFlag("errcheck.check-type-assertions")
-	fs.BoolVar(&lsc.Errcheck.CheckAssignToBlank, "errcheck.check-blank", false,
-		"Errcheck: check for errors assigned to blank identifier: _ = errFunc()")
-	hideFlag("errcheck.check-blank")
-	fs.StringVar(&lsc.Errcheck.Exclude, "errcheck.exclude", "",
-		"Path to a file containing a list of functions to exclude from checking")
-	hideFlag("errcheck.exclude")
-	fs.StringVar(&lsc.Errcheck.Ignore, "errcheck.ignore", "fmt:.*",
-		`Comma-separated list of pairs of the form pkg:regex. The regex is used to ignore names within pkg`)
-	hideFlag("errcheck.ignore")
-
-	fs.BoolVar(&lsc.Govet.CheckShadowing, "govet.check-shadowing", false,
-		"Govet: check for shadowed variables")
-	hideFlag("govet.check-shadowing")
-
-	fs.Float64Var(&lsc.Golint.MinConfidence, "golint.min-confidence", 0.8,
-		"Golint: minimum confidence of a problem to print it")
-	hideFlag("golint.min-confidence")
-
-	fs.BoolVar(&lsc.Gofmt.Simplify, "gofmt.simplify", true, "Gofmt: simplify code")
-	hideFlag("gofmt.simplify")
-
-	fs.IntVar(&lsc.Gocyclo.MinComplexity, "gocyclo.min-complexity",
-		30, "Minimal complexity of function to report it")
-	hideFlag("gocyclo.min-complexity")
-
-	fs.BoolVar(&lsc.Maligned.SuggestNewOrder, "maligned.suggest-new", false,
-		"Maligned: print suggested more optimal struct fields ordering")
-	hideFlag("maligned.suggest-new")
-
-	fs.IntVar(&lsc.Dupl.Threshold, "dupl.threshold",
-		150, "Dupl: Minimal threshold to detect copy-paste")
-	hideFlag("dupl.threshold")
-
-	fs.BoolVar(&lsc.Goconst.MatchWithConstants, "goconst.match-constant",
-		true, "Goconst: look for existing constants matching the values")
-	hideFlag("goconst.match-constant")
-	fs.IntVar(&lsc.Goconst.MinStringLen, "goconst.min-len",
-		3, "Goconst: minimum constant string length")
-	hideFlag("goconst.min-len")
-	fs.IntVar(&lsc.Goconst.MinOccurrencesCount, "goconst.min-occurrences",
-		3, "Goconst: minimum occurrences of constant string count to trigger issue")
-	hideFlag("goconst.min-occurrences")
-	fs.BoolVar(&lsc.Goconst.ParseNumbers, "goconst.numbers",
-		false, "Goconst: search also for duplicated numbers")
-	hideFlag("goconst.numbers")
-	fs.IntVar(&lsc.Goconst.NumberMin, "goconst.min",
-		3, "minimum value, only works with goconst.numbers")
-	hideFlag("goconst.min")
-	fs.IntVar(&lsc.Goconst.NumberMax, "goconst.max",
-		3, "maximum value, only works with goconst.numbers")
-	hideFlag("goconst.max")
-	fs.BoolVar(&lsc.Goconst.IgnoreCalls, "goconst.ignore-calls",
-		true, "Goconst: ignore when constant is not used as function argument")
-	hideFlag("goconst.ignore-calls")
-
-	fs.IntVar(&lsc.Lll.TabWidth, "lll.tab-width", 1,
-		"Lll: tab width in spaces")
-	hideFlag("lll.tab-width")
-
-	// Linters config
-	lc := &cfg.Linters
-	fs.StringSliceVarP(&lc.Enable, "enable", "E", nil, wh("Enable specific linter"))
-	fs.StringSliceVarP(&lc.Disable, "disable", "D", nil, wh("Disable specific linter"))
-	fs.BoolVar(&lc.EnableAll, "enable-all", false, wh("Enable all linters"))
-
-	fs.BoolVar(&lc.DisableAll, "disable-all", false, wh("Disable all linters"))
-	fs.StringSliceVarP(&lc.Presets, "presets", "p", nil,
-		wh(fmt.Sprintf("Enable presets (%s) of linters. Run 'golangci-lint linters' to see "+
-			"them. This option implies option --disable-all", strings.Join(m.AllPresets(), "|"))))
-	fs.BoolVar(&lc.Fast, "fast", false, wh("Run only fast linters from enabled linters set (first run won't be fast)"))
-
-	// Issues config
-	ic := &cfg.Issues
-	fs.StringSliceVarP(&ic.ExcludePatterns, "exclude", "e", nil, wh("Exclude issue by regexp"))
-	fs.BoolVar(&ic.UseDefaultExcludes, "exclude-use-default", true, getDefaultIssueExcludeHelp())
-	fs.BoolVar(&ic.ExcludeCaseSensitive, "exclude-case-sensitive", false, wh("If set to true exclude "+
-		"and exclude rules regular expressions are case sensitive"))
-
-	fs.IntVar(&ic.MaxIssuesPerLinter, "max-issues-per-linter", 50,
-		wh("Maximum issues count per one linter. Set to 0 to disable"))
-	fs.IntVar(&ic.MaxSameIssues, "max-same-issues", 3,
-		wh("Maximum count of issues with the same text. Set to 0 to disable"))
-
-	fs.BoolVarP(&ic.Diff, "new", "n", false,
-		wh("Show only new issues: if there are unstaged changes or untracked files, only those changes "+
-			"are analyzed, else only changes in HEAD~ are analyzed.\nIt's a super-useful option for integration "+
-			"of golangci-lint into existing large codebase.\nIt's not practical to fix all existing issues at "+
-			"the moment of integration: much better to not allow issues in new code.\nFor CI setups, prefer "+
-			"--new-from-rev=HEAD~, as --new can skip linting the current patch if any scripts generate "+
-			"unstaged files before golangci-lint runs."))
-	fs.StringVar(&ic.DiffFromRevision, "new-from-rev", "",
-		wh("Show only new issues created after git revision `REV`"))
-	fs.StringVar(&ic.DiffPatchFilePath, "new-from-patch", "",
-		wh("Show only new issues created in git patch with file path `PATH`"))
-	fs.BoolVar(&ic.WholeFiles, "whole-files", false,
-		wh("Show issues in any part of update files (requires new-from-rev or new-from-patch)"))
-	fs.BoolVar(&ic.NeedFix, "fix", false, "Fix found issues (if it's supported by the linter)")
-}
+	runCmd.SetOut(logutils.StdOut) // use custom output to properly color it in Windows terminals
+	runCmd.SetErr(logutils.StdErr)
 
-func (e *Executor) initRunConfiguration(cmd *cobra.Command) {
-	fs := cmd.Flags()
+	fs := runCmd.Flags()
 	fs.SortFlags = false // sort them as they are defined here
-	initFlagSet(fs, e.cfg, e.DBManager, true)
-}
 
-func (e *Executor) getConfigForCommandLine() (*config.Config, error) {
-	// We use another pflag.FlagSet here to not set `changed` flag
-	// on cmd.Flags() options. Otherwise, string slice options will be duplicated.
-	fs := pflag.NewFlagSet("config flag set", pflag.ContinueOnError)
-
-	var cfg config.Config
-	// Don't do `fs.AddFlagSet(cmd.Flags())` because it shares flags representations:
-	// `changed` variable inside string slice vars will be shared.
-	// Use another config variable here, not e.cfg, to not
-	// affect main parsing by this parsing of only config option.
-	initFlagSet(fs, &cfg, e.DBManager, false)
-	initVersionFlagSet(fs, &cfg)
-
-	// Parse max options, even force version option: don't want
-	// to get access to Executor here: it's error-prone to use
-	// cfg vs e.cfg.
-	initRootFlagSet(fs, &cfg, true)
-
-	fs.Usage = func() {} // otherwise, help text will be printed twice
-	if err := fs.Parse(os.Args); err != nil {
-		if err == pflag.ErrHelp {
-			return nil, err
-		}
+	// Only for testing purpose.
+	// Don't add other flags here.
+	fs.BoolVar(&c.cfg.InternalCmdTest, "internal-cmd-test", false,
+		color.GreenString("Option is used only for testing golangci-lint command, don't use it"))
+	_ = fs.MarkHidden("internal-cmd-test")
 
-		return nil, fmt.Errorf("can't parse args: %s", err)
-	}
+	setupConfigFileFlagSet(fs, &c.opts.LoaderOptions)
+
+	setupLintersFlagSet(c.viper, fs)
+	setupRunFlagSet(c.viper, fs)
+	setupOutputFlagSet(c.viper, fs)
+	setupIssuesFlagSet(c.viper, fs)
+
+	setupRunPersistentFlags(runCmd.PersistentFlags(), &c.opts)
+
+	c.cmd = runCmd
 
-	return &cfg, nil
+	return c
 }
 
-func (e *Executor) initRun() {
-	e.runCmd = &cobra.Command{
-		Use:   "run",
-		Short: "Run the linters",
-		Run:   e.executeRun,
-		PreRunE: func(_ *cobra.Command, _ []string) error {
-			if ok := e.acquireFileLock(); !ok {
-				return errors.New("parallel golangci-lint is running")
-			}
-			return nil
-		},
-		PostRun: func(_ *cobra.Command, _ []string) {
-			e.releaseFileLock()
-		},
+func (c *runCommand) persistentPreRunE(cmd *cobra.Command, args []string) error {
+	if err := c.startTracing(); err != nil {
+		return err
 	}
-	e.rootCmd.AddCommand(e.runCmd)
 
-	e.runCmd.SetOut(logutils.StdOut) // use custom output to properly color it in Windows terminals
-	e.runCmd.SetErr(logutils.StdErr)
+	loader := config.NewLoader(c.log.Child(logutils.DebugKeyConfigReader), c.viper, cmd.Flags(), c.opts.LoaderOptions, c.cfg, args)
 
-	e.initRunConfiguration(e.runCmd)
-}
+	err := loader.Load(config.LoadOptions{CheckDeprecation: true, Validation: true})
+	if err != nil {
+		return fmt.Errorf("can't load config: %w", err)
+	}
 
-func fixSlicesFlags(fs *pflag.FlagSet) {
-	// It's a dirty hack to set flag.Changed to true for every string slice flag.
-	// It's necessary to merge config and command-line slices: otherwise command-line
-	// flags will always overwrite ones from the config.
-	fs.VisitAll(func(f *pflag.Flag) {
-		if f.Value.Type() != "stringSlice" {
-			return
-		}
+	if c.cfg.Run.Concurrency == 0 {
+		backup := runtime.GOMAXPROCS(0)
 
-		s, err := fs.GetStringSlice(f.Name)
+		// Automatically set GOMAXPROCS to match Linux container CPU quota.
+		_, err := maxprocs.Set(maxprocs.Logger(c.log.Infof))
 		if err != nil {
-			return
+			runtime.GOMAXPROCS(backup)
 		}
+	} else {
+		runtime.GOMAXPROCS(c.cfg.Run.Concurrency)
+	}
 
-		if s == nil { // assume that every string slice flag has nil as the default
-			return
-		}
+	return c.startTracing()
+}
 
-		var safe []string
-		for _, v := range s {
-			// add quotes to escape comma because spf13/pflag use a CSV parser:
-			// https://github.com/spf13/pflag/blob/85dd5c8bc61cfa382fecd072378089d4e856579d/string_slice.go#L43
-			safe = append(safe, `"`+v+`"`)
-		}
+func (c *runCommand) persistentPostRunE(_ *cobra.Command, _ []string) error {
+	if err := c.stopTracing(); err != nil {
+		return err
+	}
 
-		// calling Set sets Changed to true: next Set calls will append, not overwrite
-		_ = f.Value.Set(strings.Join(safe, ","))
-	})
+	os.Exit(c.exitCode)
+
+	return nil
 }
 
-// runAnalysis executes the linters that have been enabled in the configuration.
-func (e *Executor) runAnalysis(ctx context.Context, args []string) ([]result.Issue, error) {
-	e.cfg.Run.Args = args
+func (c *runCommand) preRunE(_ *cobra.Command, args []string) error {
+	dbManager, err := lintersdb.NewManager(c.log.Child(logutils.DebugKeyLintersDB), c.cfg,
+		lintersdb.NewLinterBuilder(), lintersdb.NewPluginModuleBuilder(c.log), lintersdb.NewPluginGoBuilder(c.log))
+	if err != nil {
+		return err
+	}
+
+	c.dbManager = dbManager
 
-	lintersToRun, err := e.EnabledLintersSet.GetOptimizedLinters()
+	printer, err := printers.NewPrinter(c.log, &c.cfg.Output, c.reportData)
 	if err != nil {
-		return nil, err
+		return err
 	}
 
-	enabledLintersMap, err := e.EnabledLintersSet.GetEnabledLintersMap()
+	c.printer = printer
+
+	c.goenv = goutil.NewEnv(c.log.Child(logutils.DebugKeyGoEnv))
+
+	c.fileCache = fsutils.NewFileCache()
+	c.lineCache = fsutils.NewLineCache(c.fileCache)
+
+	sw := timeutils.NewStopwatch("pkgcache", c.log.Child(logutils.DebugKeyStopwatch))
+
+	pkgCache, err := pkgcache.NewCache(sw, c.log.Child(logutils.DebugKeyPkgCache))
 	if err != nil {
-		return nil, err
+		return fmt.Errorf("failed to build packages cache: %w", err)
 	}
 
-	for _, lc := range e.DBManager.GetAllSupportedLinterConfigs() {
-		isEnabled := enabledLintersMap[lc.Name()] != nil
-		e.reportData.AddLinter(lc.Name(), isEnabled, lc.EnabledByDefault)
+	guard := load.NewGuard()
+
+	pkgLoader := lint.NewPackageLoader(c.log.Child(logutils.DebugKeyLoader), c.cfg, args, c.goenv, guard)
+
+	c.contextBuilder = lint.NewContextBuilder(c.cfg, pkgLoader, c.fileCache, pkgCache, guard)
+
+	if err = initHashSalt(c.buildInfo.Version, c.cfg); err != nil {
+		return fmt.Errorf("failed to init hash salt: %w", err)
 	}
 
-	lintCtx, err := e.contextLoader.Load(ctx, lintersToRun)
-	if err != nil {
-		return nil, fmt.Errorf("context loading failed: %w", err)
+	if ok := c.acquireFileLock(); !ok {
+		return errors.New("parallel golangci-lint is running")
 	}
-	lintCtx.Log = e.log.Child(logutils.DebugKeyLintersContext)
 
-	runner, err := lint.NewRunner(e.cfg, e.log.Child(logutils.DebugKeyRunner),
-		e.goenv, e.EnabledLintersSet, e.lineCache, e.fileCache, e.DBManager, lintCtx.Packages)
-	if err != nil {
-		return nil, err
+	return nil
+}
+
+func (c *runCommand) postRun(_ *cobra.Command, _ []string) {
+	c.releaseFileLock()
+}
+
+func (c *runCommand) execute(_ *cobra.Command, args []string) {
+	needTrackResources := logutils.IsVerbose() || c.opts.PrintResourcesUsage
+
+	trackResourcesEndCh := make(chan struct{})
+	defer func() { // XXX: this defer must be before ctx.cancel defer
+		if needTrackResources { // wait until resource tracking finished to print properly
+			<-trackResourcesEndCh
+		}
+	}()
+
+	ctx, cancel := context.WithTimeout(context.Background(), c.cfg.Run.Timeout)
+	defer cancel()
+
+	if needTrackResources {
+		go watchResources(ctx, trackResourcesEndCh, c.log, c.debugf)
 	}
 
-	return runner.Run(ctx, lintersToRun, lintCtx)
+	if err := c.runAndPrint(ctx, args); err != nil {
+		c.log.Errorf("Running error: %s", err)
+		if c.exitCode == exitcodes.Success {
+			var exitErr *exitcodes.ExitError
+			if errors.As(err, &exitErr) {
+				c.exitCode = exitErr.Code
+			} else {
+				c.exitCode = exitcodes.Failure
+			}
+		}
+	}
+
+	c.setupExitCode(ctx)
 }
 
-func (e *Executor) setOutputToDevNull() (savedStdout, savedStderr *os.File) {
-	savedStdout, savedStderr = os.Stdout, os.Stderr
-	devNull, err := os.Open(os.DevNull)
-	if err != nil {
-		e.log.Warnf("Can't open null device %q: %s", os.DevNull, err)
-		return
+func (c *runCommand) startTracing() error {
+	if c.opts.CPUProfilePath != "" {
+		f, err := os.Create(c.opts.CPUProfilePath)
+		if err != nil {
+			return fmt.Errorf("can't create file %s: %w", c.opts.CPUProfilePath, err)
+		}
+		if err := pprof.StartCPUProfile(f); err != nil {
+			return fmt.Errorf("can't start CPU profiling: %w", err)
+		}
 	}
 
-	os.Stdout, os.Stderr = devNull, devNull
-	return
+	if c.opts.MemProfilePath != "" {
+		if rate := os.Getenv(envMemProfileRate); rate != "" {
+			runtime.MemProfileRate, _ = strconv.Atoi(rate)
+		}
+	}
+
+	if c.opts.TracePath != "" {
+		f, err := os.Create(c.opts.TracePath)
+		if err != nil {
+			return fmt.Errorf("can't create file %s: %w", c.opts.TracePath, err)
+		}
+		if err = trace.Start(f); err != nil {
+			return fmt.Errorf("can't start tracing: %w", err)
+		}
+	}
+
+	return nil
 }
 
-func (e *Executor) setExitCodeIfIssuesFound(issues []result.Issue) {
-	if len(issues) != 0 {
-		e.exitCode = e.cfg.Run.ExitCodeIfIssuesFound
+func (c *runCommand) stopTracing() error {
+	if c.opts.CPUProfilePath != "" {
+		pprof.StopCPUProfile()
+	}
+
+	if c.opts.MemProfilePath != "" {
+		f, err := os.Create(c.opts.MemProfilePath)
+		if err != nil {
+			return fmt.Errorf("can't create file %s: %w", c.opts.MemProfilePath, err)
+		}
+
+		var ms runtime.MemStats
+		runtime.ReadMemStats(&ms)
+		printMemStats(&ms, c.log)
+
+		if err := pprof.WriteHeapProfile(f); err != nil {
+			return fmt.Errorf("can't write heap profile: %w", err)
+		}
+		_ = f.Close()
 	}
+
+	if c.opts.TracePath != "" {
+		trace.Stop()
+	}
+
+	return nil
 }
 
-func (e *Executor) runAndPrint(ctx context.Context, args []string) error {
-	if err := e.goenv.Discover(ctx); err != nil {
-		e.log.Warnf("Failed to discover go env: %s", err)
+func (c *runCommand) runAndPrint(ctx context.Context, args []string) error {
+	if err := c.goenv.Discover(ctx); err != nil {
+		c.log.Warnf("Failed to discover go env: %s", err)
 	}
 
 	if !logutils.HaveDebugTag(logutils.DebugKeyLintersOutput) {
 		// Don't allow linters and loader to print anything
 		log.SetOutput(io.Discard)
-		savedStdout, savedStderr := e.setOutputToDevNull()
+		savedStdout, savedStderr := c.setOutputToDevNull()
 		defer func() {
 			os.Stdout, os.Stderr = savedStdout, savedStderr
 		}()
 	}
 
-	issues, err := e.runAnalysis(ctx, args)
+	enabledLintersMap, err := c.dbManager.GetEnabledLintersMap()
 	if err != nil {
-		return err // XXX: don't loose type
+		return err
 	}
 
-	formats := strings.Split(e.cfg.Output.Format, ",")
-	for _, format := range formats {
-		out := strings.SplitN(format, ":", 2)
-		if len(out) < 2 {
-			out = append(out, "")
-		}
+	c.printDeprecatedLinterMessages(enabledLintersMap)
 
-		err := e.printReports(issues, out[1], out[0])
-		if err != nil {
-			return err
-		}
+	issues, err := c.runAnalysis(ctx, args)
+	if err != nil {
+		return err // XXX: don't lose type
+	}
+
+	// Fills linters information for the JSON printer.
+	for _, lc := range c.dbManager.GetAllSupportedLinterConfigs() {
+		isEnabled := enabledLintersMap[lc.Name()] != nil
+		c.reportData.AddLinter(lc.Name(), isEnabled, lc.EnabledByDefault)
 	}
 
-	e.setExitCodeIfIssuesFound(issues)
+	err = c.printer.Print(issues)
+	if err != nil {
+		return err
+	}
 
-	e.fileCache.PrintStats(e.log)
+	c.printStats(issues)
+
+	c.setExitCodeIfIssuesFound(issues)
+
+	c.fileCache.PrintStats(c.log)
 
 	return nil
 }
 
-func (e *Executor) printReports(issues []result.Issue, path, format string) error {
-	w, shouldClose, err := e.createWriter(path)
+// runAnalysis executes the linters that have been enabled in the configuration.
+func (c *runCommand) runAnalysis(ctx context.Context, args []string) ([]result.Issue, error) {
+	lintersToRun, err := c.dbManager.GetOptimizedLinters()
 	if err != nil {
-		return fmt.Errorf("can't create output for %s: %w", path, err)
+		return nil, err
 	}
 
-	p, err := e.createPrinter(format, w)
+	lintCtx, err := c.contextBuilder.Build(ctx, c.log.Child(logutils.DebugKeyLintersContext), lintersToRun)
 	if err != nil {
-		if file, ok := w.(io.Closer); shouldClose && ok {
-			_ = file.Close()
-		}
-		return err
-	}
-
-	if err = p.Print(issues); err != nil {
-		if file, ok := w.(io.Closer); shouldClose && ok {
-			_ = file.Close()
-		}
-		return fmt.Errorf("can't print %d issues: %s", len(issues), err)
+		return nil, fmt.Errorf("context loading failed: %w", err)
 	}
 
-	if file, ok := w.(io.Closer); shouldClose && ok {
-		_ = file.Close()
+	runner, err := lint.NewRunner(c.log.Child(logutils.DebugKeyRunner), c.cfg, args,
+		c.goenv, c.lineCache, c.fileCache, c.dbManager, lintCtx)
+	if err != nil {
+		return nil, err
 	}
 
-	return nil
+	return runner.Run(ctx, lintersToRun)
 }
 
-func (e *Executor) createWriter(path string) (io.Writer, bool, error) {
-	if path == "" || path == "stdout" {
-		return logutils.StdOut, false, nil
-	}
-	if path == "stderr" {
-		return logutils.StdErr, false, nil
-	}
-	f, err := os.OpenFile(path, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, defaultFileMode)
+func (c *runCommand) setOutputToDevNull() (savedStdout, savedStderr *os.File) {
+	savedStdout, savedStderr = os.Stdout, os.Stderr
+	devNull, err := os.Open(os.DevNull)
 	if err != nil {
-		return nil, false, err
+		c.log.Warnf("Can't open null device %q: %s", os.DevNull, err)
+		return
 	}
-	return f, true, nil
+
+	os.Stdout, os.Stderr = devNull, devNull
+	return
 }
 
-func (e *Executor) createPrinter(format string, w io.Writer) (printers.Printer, error) {
-	var p printers.Printer
-	switch format {
-	case config.OutFormatJSON:
-		p = printers.NewJSON(&e.reportData, w)
-	case config.OutFormatColoredLineNumber, config.OutFormatLineNumber:
-		p = printers.NewText(e.cfg.Output.PrintIssuedLine,
-			format == config.OutFormatColoredLineNumber, e.cfg.Output.PrintLinterName,
-			e.log.Child(logutils.DebugKeyTextPrinter), w)
-	case config.OutFormatTab, config.OutFormatColoredTab:
-		p = printers.NewTab(e.cfg.Output.PrintLinterName,
-			format == config.OutFormatColoredTab,
-			e.log.Child(logutils.DebugKeyTabPrinter), w)
-	case config.OutFormatCheckstyle:
-		p = printers.NewCheckstyle(w)
-	case config.OutFormatCodeClimate:
-		p = printers.NewCodeClimate(w)
-	case config.OutFormatHTML:
-		p = printers.NewHTML(w)
-	case config.OutFormatJunitXML:
-		p = printers.NewJunitXML(w)
-	case config.OutFormatGithubActions:
-		p = printers.NewGithub(w)
-	case config.OutFormatTeamCity:
-		p = printers.NewTeamCity(w)
-	default:
-		return nil, fmt.Errorf("unknown output format %s", format)
-	}
-
-	return p, nil
+func (c *runCommand) setExitCodeIfIssuesFound(issues []result.Issue) {
+	if len(issues) != 0 {
+		c.exitCode = c.cfg.Run.ExitCodeIfIssuesFound
+	}
 }
 
-// executeRun executes the 'run' CLI command, which runs the linters.
-func (e *Executor) executeRun(_ *cobra.Command, args []string) {
-	needTrackResources := e.cfg.Run.IsVerbose || e.cfg.Run.PrintResourcesUsage
-	trackResourcesEndCh := make(chan struct{})
-	defer func() { // XXX: this defer must be before ctx.cancel defer
-		if needTrackResources { // wait until resource tracking finished to print properly
-			<-trackResourcesEndCh
+func (c *runCommand) printDeprecatedLinterMessages(enabledLinters map[string]*linter.Config) {
+	if c.cfg.InternalCmdTest || os.Getenv(logutils.EnvTestRun) == "1" {
+		return
+	}
+
+	for name, lc := range enabledLinters {
+		if !lc.IsDeprecated() {
+			continue
 		}
-	}()
 
-	e.setTimeoutToDeadlineIfOnlyDeadlineIsSet()
-	ctx, cancel := context.WithTimeout(context.Background(), e.cfg.Run.Timeout)
-	defer cancel()
+		var extra string
+		if lc.Deprecation.Replacement != "" {
+			extra = fmt.Sprintf("Replaced by %s.", lc.Deprecation.Replacement)
+		}
 
-	if needTrackResources {
-		go watchResources(ctx, trackResourcesEndCh, e.log, e.debugf)
+		c.log.Warnf("The linter '%s' is deprecated (since %s) due to: %s %s", name, lc.Deprecation.Since, lc.Deprecation.Message, extra)
 	}
+}
 
-	if err := e.runAndPrint(ctx, args); err != nil {
-		e.log.Errorf("Running error: %s", err)
-		if e.exitCode == exitcodes.Success {
-			var exitErr *exitcodes.ExitError
-			if errors.As(err, &exitErr) {
-				e.exitCode = exitErr.Code
-			} else {
-				e.exitCode = exitcodes.Failure
-			}
-		}
+func (c *runCommand) printStats(issues []result.Issue) {
+	if !c.cfg.Output.ShowStats {
+		return
 	}
 
-	e.setupExitCode(ctx)
-}
+	if len(issues) == 0 {
+		c.cmd.Println("0 issues.")
+		return
+	}
 
-// to be removed when deadline is finally decommissioned
-func (e *Executor) setTimeoutToDeadlineIfOnlyDeadlineIsSet() {
-	deadlineValue := e.cfg.Run.Deadline
-	if deadlineValue != 0 && e.cfg.Run.Timeout == defaultTimeout {
-		e.cfg.Run.Timeout = deadlineValue
+	stats := map[string]int{}
+	for idx := range issues {
+		stats[issues[idx].FromLinter]++
+	}
+
+	c.cmd.Printf("%d issues:\n", len(issues))
+
+	keys := maps.Keys(stats)
+	sort.Strings(keys)
+
+	for _, key := range keys {
+		c.cmd.Printf("* %s: %d\n", key, stats[key])
 	}
 }
 
-func (e *Executor) setupExitCode(ctx context.Context) {
+func (c *runCommand) setupExitCode(ctx context.Context) {
 	if ctx.Err() != nil {
-		e.exitCode = exitcodes.Timeout
-		e.log.Errorf("Timeout exceeded: try increasing it by passing --timeout option")
+		c.exitCode = exitcodes.Timeout
+		c.log.Errorf("Timeout exceeded: try increasing it by passing --timeout option")
 		return
 	}
 
-	if e.exitCode != exitcodes.Success {
+	if c.exitCode != exitcodes.Success {
 		return
 	}
 
-	needFailOnWarnings := os.Getenv(lintersdb.EnvTestRun) == "1" || os.Getenv(envFailOnWarnings) == "1"
-	if needFailOnWarnings && len(e.reportData.Warnings) != 0 {
-		e.exitCode = exitcodes.WarningInTest
+	needFailOnWarnings := os.Getenv(logutils.EnvTestRun) == "1" || os.Getenv(envFailOnWarnings) == "1"
+	if needFailOnWarnings && len(c.reportData.Warnings) != 0 {
+		c.exitCode = exitcodes.WarningInTest
 		return
 	}
 
-	if e.reportData.Error != "" {
+	if c.reportData.Error != "" {
 		// it's a case e.g. when typecheck linter couldn't parse and error and just logged it
-		e.exitCode = exitcodes.ErrorWasLogged
+		c.exitCode = exitcodes.ErrorWasLogged
 		return
 	}
 }
 
+func (c *runCommand) acquireFileLock() bool {
+	if c.cfg.Run.AllowParallelRunners {
+		c.debugf("Parallel runners are allowed, no locking")
+		return true
+	}
+
+	lockFile := filepath.Join(os.TempDir(), "golangci-lint.lock")
+	c.debugf("Locking on file %s...", lockFile)
+	f := flock.New(lockFile)
+	const retryDelay = time.Second
+
+	ctx := context.Background()
+	if !c.cfg.Run.AllowSerialRunners {
+		const totalTimeout = 5 * time.Second
+		var cancel context.CancelFunc
+		ctx, cancel = context.WithTimeout(ctx, totalTimeout)
+		defer cancel()
+	}
+	if ok, _ := f.TryLockContext(ctx, retryDelay); !ok {
+		return false
+	}
+
+	c.flock = f
+	return true
+}
+
+func (c *runCommand) releaseFileLock() {
+	if c.cfg.Run.AllowParallelRunners {
+		return
+	}
+
+	if err := c.flock.Unlock(); err != nil {
+		c.debugf("Failed to unlock on file: %s", err)
+	}
+	if err := os.Remove(c.flock.Path()); err != nil {
+		c.debugf("Failed to remove lock file: %s", err)
+	}
+}
+
 func watchResources(ctx context.Context, done chan struct{}, logger logutils.Log, debugf logutils.DebugFunc) {
 	startedAt := time.Now()
 	debugf("Started tracking time")
@@ -609,3 +570,120 @@ func watchResources(ctx context.Context, done chan struct{}, logger logutils.Log
 	logger.Infof("Execution took %s", time.Since(startedAt))
 	close(done)
 }
+
+func setupConfigFileFlagSet(fs *pflag.FlagSet, cfg *config.LoaderOptions) {
+	fs.StringVarP(&cfg.Config, "config", "c", "", color.GreenString("Read config from file path `PATH`"))
+	fs.BoolVar(&cfg.NoConfig, "no-config", false, color.GreenString("Don't read config file"))
+}
+
+func setupRunPersistentFlags(fs *pflag.FlagSet, opts *runOptions) {
+	fs.BoolVar(&opts.PrintResourcesUsage, "print-resources-usage", false,
+		color.GreenString("Print avg and max memory usage of golangci-lint and total time"))
+
+	fs.StringVar(&opts.CPUProfilePath, "cpu-profile-path", "", color.GreenString("Path to CPU profile output file"))
+	fs.StringVar(&opts.MemProfilePath, "mem-profile-path", "", color.GreenString("Path to memory profile output file"))
+	fs.StringVar(&opts.TracePath, "trace-path", "", color.GreenString("Path to trace output file"))
+}
+
+func getDefaultConcurrency() int {
+	if os.Getenv(envHelpRun) == "1" {
+		// Make stable concurrency for generating help documentation.
+		const prettyConcurrency = 8
+		return prettyConcurrency
+	}
+
+	return runtime.NumCPU()
+}
+
+func printMemStats(ms *runtime.MemStats, logger logutils.Log) {
+	logger.Infof("Mem stats: alloc=%s total_alloc=%s sys=%s "+
+		"heap_alloc=%s heap_sys=%s heap_idle=%s heap_released=%s heap_in_use=%s "+
+		"stack_in_use=%s stack_sys=%s "+
+		"mspan_sys=%s mcache_sys=%s buck_hash_sys=%s gc_sys=%s other_sys=%s "+
+		"mallocs_n=%d frees_n=%d heap_objects_n=%d gc_cpu_fraction=%.2f",
+		formatMemory(ms.Alloc), formatMemory(ms.TotalAlloc), formatMemory(ms.Sys),
+		formatMemory(ms.HeapAlloc), formatMemory(ms.HeapSys),
+		formatMemory(ms.HeapIdle), formatMemory(ms.HeapReleased), formatMemory(ms.HeapInuse),
+		formatMemory(ms.StackInuse), formatMemory(ms.StackSys),
+		formatMemory(ms.MSpanSys), formatMemory(ms.MCacheSys), formatMemory(ms.BuckHashSys),
+		formatMemory(ms.GCSys), formatMemory(ms.OtherSys),
+		ms.Mallocs, ms.Frees, ms.HeapObjects, ms.GCCPUFraction)
+}
+
+func formatMemory(memBytes uint64) string {
+	const Kb = 1024
+	const Mb = Kb * 1024
+
+	if memBytes < Kb {
+		return fmt.Sprintf("%db", memBytes)
+	}
+	if memBytes < Mb {
+		return fmt.Sprintf("%dkb", memBytes/Kb)
+	}
+	return fmt.Sprintf("%dmb", memBytes/Mb)
+}
+
+// Related to cache.
+
+func initHashSalt(version string, cfg *config.Config) error {
+	binSalt, err := computeBinarySalt(version)
+	if err != nil {
+		return fmt.Errorf("failed to calculate binary salt: %w", err)
+	}
+
+	configSalt, err := computeConfigSalt(cfg)
+	if err != nil {
+		return fmt.Errorf("failed to calculate config salt: %w", err)
+	}
+
+	b := bytes.NewBuffer(binSalt)
+	b.Write(configSalt)
+	cache.SetSalt(b.Bytes())
+	return nil
+}
+
+func computeBinarySalt(version string) ([]byte, error) {
+	if version != "" && version != "(devel)" {
+		return []byte(version), nil
+	}
+
+	if logutils.HaveDebugTag(logutils.DebugKeyBinSalt) {
+		return []byte("debug"), nil
+	}
+
+	p, err := os.Executable()
+	if err != nil {
+		return nil, err
+	}
+	f, err := os.Open(p)
+	if err != nil {
+		return nil, err
+	}
+	defer f.Close()
+	h := sha256.New()
+	if _, err := io.Copy(h, f); err != nil {
+		return nil, err
+	}
+	return h.Sum(nil), nil
+}
+
+// computeConfigSalt computes configuration hash.
+// We don't hash all config fields to reduce meaningless cache invalidations.
+// At least, it has a huge impact on tests speed.
+// Fields: `LintersSettings` and `Run.BuildTags`.
+func computeConfigSalt(cfg *config.Config) ([]byte, error) {
+	lintersSettingsBytes, err := yaml.Marshal(cfg.LintersSettings)
+	if err != nil {
+		return nil, fmt.Errorf("failed to json marshal config linter settings: %w", err)
+	}
+
+	configData := bytes.NewBufferString("linters-settings=")
+	configData.Write(lintersSettingsBytes)
+	configData.WriteString("\nbuild-tags=%s" + strings.Join(cfg.Run.BuildTags, ","))
+
+	h := sha256.New()
+	if _, err := h.Write(configData.Bytes()); err != nil {
+		return nil, err
+	}
+	return h.Sum(nil), nil
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/version.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/version.go
index bb7732250f5d9a929b43c6cf202caddee8cb45ea..a03e46e221c89366b282dc8c4d1bac1aa1b69fd0 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/commands/version.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/version.go
@@ -8,76 +8,91 @@ import (
 	"runtime/debug"
 	"strings"
 
+	"github.com/fatih/color"
 	"github.com/spf13/cobra"
-	"github.com/spf13/pflag"
-
-	"github.com/golangci/golangci-lint/pkg/config"
 )
 
+type BuildInfo struct {
+	GoVersion string `json:"goVersion"`
+	Version   string `json:"version"`
+	Commit    string `json:"commit"`
+	Date      string `json:"date"`
+}
+
 type versionInfo struct {
 	Info      BuildInfo
 	BuildInfo *debug.BuildInfo
 }
 
-func (e *Executor) initVersionConfiguration(cmd *cobra.Command) {
-	fs := cmd.Flags()
-	fs.SortFlags = false // sort them as they are defined here
-	initVersionFlagSet(fs, e.cfg)
+type versionOptions struct {
+	Format string
+	Debug  bool
 }
 
-func initVersionFlagSet(fs *pflag.FlagSet, cfg *config.Config) {
-	// Version config
-	vc := &cfg.Version
-	fs.StringVar(&vc.Format, "format", "", wh("The version's format can be: 'short', 'json'"))
-	fs.BoolVar(&vc.Debug, "debug", false, wh("Add build information"))
+type versionCommand struct {
+	cmd  *cobra.Command
+	opts versionOptions
+
+	info BuildInfo
 }
 
-func (e *Executor) initVersion() {
+func newVersionCommand(info BuildInfo) *versionCommand {
+	c := &versionCommand{info: info}
+
 	versionCmd := &cobra.Command{
 		Use:               "version",
 		Short:             "Version",
 		Args:              cobra.NoArgs,
 		ValidArgsFunction: cobra.NoFileCompletions,
-		RunE: func(cmd *cobra.Command, _ []string) error {
-			if e.cfg.Version.Debug {
-				info, ok := debug.ReadBuildInfo()
-				if !ok {
-					return nil
-				}
-
-				switch strings.ToLower(e.cfg.Version.Format) {
-				case "json":
-					return json.NewEncoder(os.Stdout).Encode(versionInfo{
-						Info:      e.buildInfo,
-						BuildInfo: info,
-					})
-
-				default:
-					fmt.Println(info.String())
-					return printVersion(os.Stdout, e.buildInfo)
-				}
-			}
-
-			switch strings.ToLower(e.cfg.Version.Format) {
-			case "short":
-				fmt.Println(e.buildInfo.Version)
-				return nil
-
-			case "json":
-				return json.NewEncoder(os.Stdout).Encode(e.buildInfo)
-
-			default:
-				return printVersion(os.Stdout, e.buildInfo)
-			}
-		},
+		RunE:              c.execute,
 	}
 
-	e.rootCmd.AddCommand(versionCmd)
-	e.initVersionConfiguration(versionCmd)
+	fs := versionCmd.Flags()
+	fs.SortFlags = false // sort them as they are defined here
+
+	fs.StringVar(&c.opts.Format, "format", "", color.GreenString("The version's format can be: 'short', 'json'"))
+	fs.BoolVar(&c.opts.Debug, "debug", false, color.GreenString("Add build information"))
+
+	c.cmd = versionCmd
+
+	return c
+}
+
+func (c *versionCommand) execute(_ *cobra.Command, _ []string) error {
+	if c.opts.Debug {
+		info, ok := debug.ReadBuildInfo()
+		if !ok {
+			return nil
+		}
+
+		switch strings.ToLower(c.opts.Format) {
+		case "json":
+			return json.NewEncoder(os.Stdout).Encode(versionInfo{
+				Info:      c.info,
+				BuildInfo: info,
+			})
+
+		default:
+			fmt.Println(info.String())
+			return printVersion(os.Stdout, c.info)
+		}
+	}
+
+	switch strings.ToLower(c.opts.Format) {
+	case "short":
+		fmt.Println(c.info.Version)
+		return nil
+
+	case "json":
+		return json.NewEncoder(os.Stdout).Encode(c.info)
+
+	default:
+		return printVersion(os.Stdout, c.info)
+	}
 }
 
-func printVersion(w io.Writer, buildInfo BuildInfo) error {
+func printVersion(w io.Writer, info BuildInfo) error {
 	_, err := fmt.Fprintf(w, "golangci-lint has version %s built with %s from %s on %s\n",
-		buildInfo.Version, buildInfo.GoVersion, buildInfo.Commit, buildInfo.Date)
+		info.Version, info.GoVersion, info.Commit, info.Date)
 	return err
 }
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/config.go b/vendor/github.com/golangci/golangci-lint/pkg/config/config.go
index 7941f428f4f5fb53a67d7f5e5658060832105627..59f6eef0da91a4923311b7e18c7d85da66462682 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/config/config.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/config/config.go
@@ -2,26 +2,27 @@ package config
 
 import (
 	"os"
+	"regexp"
 	"strings"
 
 	hcversion "github.com/hashicorp/go-version"
 	"github.com/ldez/gomoddirectives"
 )
 
-// Config encapsulates the config data specified in the golangci yaml config file.
+// Config encapsulates the config data specified in the golangci-lint yaml config file.
 type Config struct {
-	cfgDir string // The directory containing the golangci config file.
-	Run    Run
+	cfgDir string // The directory containing the golangci-lint config file.
 
-	Output Output
+	Run Run `mapstructure:"run"`
+
+	Output Output `mapstructure:"output"`
 
 	LintersSettings LintersSettings `mapstructure:"linters-settings"`
-	Linters         Linters
-	Issues          Issues
-	Severity        Severity
-	Version         Version
+	Linters         Linters         `mapstructure:"linters"`
+	Issues          Issues          `mapstructure:"issues"`
+	Severity        Severity        `mapstructure:"severity"`
 
-	InternalCmdTest bool `mapstructure:"internal-cmd-test"` // Option is used only for testing golangci-lint command, don't use it
+	InternalCmdTest bool // Option is used only for testing golangci-lint command, don't use it
 	InternalTest    bool // Option is used only for testing golangci-lint code, don't use it
 }
 
@@ -30,6 +31,25 @@ func (c *Config) GetConfigDir() string {
 	return c.cfgDir
 }
 
+func (c *Config) Validate() error {
+	validators := []func() error{
+		c.Run.Validate,
+		c.Output.Validate,
+		c.LintersSettings.Validate,
+		c.Linters.Validate,
+		c.Issues.Validate,
+		c.Severity.Validate,
+	}
+
+	for _, v := range validators {
+		if err := v(); err != nil {
+			return err
+		}
+	}
+
+	return nil
+}
+
 func NewDefault() *Config {
 	return &Config{
 		LintersSettings: defaultLintersSettings,
@@ -41,21 +61,21 @@ type Version struct {
 	Debug  bool   `mapstructure:"debug"`
 }
 
-func IsGreaterThanOrEqualGo121(v string) bool {
-	v1, err := hcversion.NewVersion(strings.TrimPrefix(v, "go"))
+func IsGoGreaterThanOrEqual(current, limit string) bool {
+	v1, err := hcversion.NewVersion(strings.TrimPrefix(current, "go"))
 	if err != nil {
 		return false
 	}
 
-	limit, err := hcversion.NewVersion("1.21")
+	l, err := hcversion.NewVersion(limit)
 	if err != nil {
 		return false
 	}
 
-	return v1.GreaterThanOrEqual(limit)
+	return v1.GreaterThanOrEqual(l)
 }
 
-func DetectGoVersion() string {
+func detectGoVersion() string {
 	file, _ := gomoddirectives.GetModuleFile()
 
 	if file != nil && file.Go != nil && file.Go.Version != "" {
@@ -69,3 +89,22 @@ func DetectGoVersion() string {
 
 	return "1.17"
 }
+
+// Trims the Go version to keep only M.m.
+// Since Go 1.21 the version inside the go.mod can be a patched version (ex: 1.21.0).
+// The version can also include information which we want to remove (ex: 1.21alpha1)
+// https://go.dev/doc/toolchain#versions
+// This a problem with staticcheck and gocritic.
+func trimGoVersion(v string) string {
+	if v == "" {
+		return ""
+	}
+
+	exp := regexp.MustCompile(`(\d\.\d+)(?:\.\d+|[a-z]+\d)`)
+
+	if exp.MatchString(v) {
+		return exp.FindStringSubmatch(v)[1]
+	}
+
+	return v
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/issues.go b/vendor/github.com/golangci/golangci-lint/pkg/config/issues.go
index 417b28bdbf058789010571e488e1266bc3c36cba..6d48694948d3d522d4fcf0573e015fdf7593f861 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/config/issues.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/config/issues.go
@@ -1,6 +1,7 @@
 package config
 
 import (
+	"errors"
 	"fmt"
 	"regexp"
 )
@@ -13,93 +14,92 @@ var DefaultExcludePatterns = []ExcludePattern{
 		Pattern: "Error return value of .((os\\.)?std(out|err)\\..*|.*Close" +
 			"|.*Flush|os\\.Remove(All)?|.*print(f|ln)?|os\\.(Un)?Setenv). is not checked",
 		Linter: "errcheck",
-		Why:    "Almost all programs ignore errors on these functions and in most cases it's ok",
+		Why:    "Almost all programs ignore errors on these functions and in most cases it's ok.",
 	},
 	{
-		ID: "EXC0002",
+		ID: "EXC0002", // TODO(ldez): should be remove in v2
 		Pattern: "(comment on exported (method|function|type|const)|" +
 			"should have( a package)? comment|comment should be of the form)",
 		Linter: "golint",
-		Why:    "Annoying issue about not having a comment. The rare codebase has such comments",
+		Why:    "Annoying issue about not having a comment. The rare codebase has such comments.",
 	},
 	{
-		ID:      "EXC0003",
+		ID:      "EXC0003", // TODO(ldez): should be remove in v2
 		Pattern: "func name will be used as test\\.Test.* by other packages, and that stutters; consider calling this",
 		Linter:  "golint",
-		Why:     "False positive when tests are defined in package 'test'",
+		Why:     "False positive when tests are defined in package 'test'.",
 	},
 	{
 		ID:      "EXC0004",
 		Pattern: "(possible misuse of unsafe.Pointer|should have signature)",
 		Linter:  "govet",
-		Why:     "Common false positives",
+		Why:     "Common false positives.",
 	},
 	{
 		ID:      "EXC0005",
-		Pattern: "ineffective break statement. Did you mean to break out of the outer loop",
+		Pattern: "SA4011", // CheckScopedBreak
 		Linter:  "staticcheck",
-		Why:     "Developers tend to write in C-style with an explicit 'break' in a 'switch', so it's ok to ignore",
+		Why:     "Developers tend to write in C-style with an explicit 'break' in a 'switch', so it's ok to ignore.",
 	},
 	{
 		ID:      "EXC0006",
-		Pattern: "Use of unsafe calls should be audited",
+		Pattern: "G103: Use of unsafe calls should be audited",
 		Linter:  "gosec",
-		Why:     "Too many false-positives on 'unsafe' usage",
+		Why:     "Too many false-positives on 'unsafe' usage.",
 	},
 	{
 		ID:      "EXC0007",
-		Pattern: "Subprocess launch(ed with variable|ing should be audited)",
+		Pattern: "G204: Subprocess launched with variable",
 		Linter:  "gosec",
-		Why:     "Too many false-positives for parametrized shell calls",
+		Why:     "Too many false-positives for parametrized shell calls.",
 	},
 	{
 		ID:      "EXC0008",
-		Pattern: "(G104|G307)",
+		Pattern: "G104", // Errors unhandled.
 		Linter:  "gosec",
-		Why:     "Duplicated errcheck checks",
+		Why:     "Duplicated errcheck checks.",
 	},
 	{
 		ID:      "EXC0009",
-		Pattern: "(Expect directory permissions to be 0750 or less|Expect file permissions to be 0600 or less)",
+		Pattern: "(G301|G302|G307): Expect (directory permissions to be 0750|file permissions to be 0600) or less",
 		Linter:  "gosec",
-		Why:     "Too many issues in popular repos",
+		Why:     "Too many issues in popular repos.",
 	},
 	{
 		ID:      "EXC0010",
-		Pattern: "Potential file inclusion via variable",
+		Pattern: "G304: Potential file inclusion via variable",
 		Linter:  "gosec",
-		Why:     "False positive is triggered by 'src, err := ioutil.ReadFile(filename)'",
+		Why:     "False positive is triggered by 'src, err := ioutil.ReadFile(filename)'.",
 	},
 	{
-		ID: "EXC0011",
-		Pattern: "(comment on exported (method|function|type|const)|" +
-			"should have( a package)? comment|comment should be of the form)",
-		Linter: "stylecheck",
-		Why:    "Annoying issue about not having a comment. The rare codebase has such comments",
+		ID:      "EXC0011",
+		Pattern: "(ST1000|ST1020|ST1021|ST1022)", // CheckPackageComment, CheckExportedFunctionDocs, CheckExportedTypeDocs, CheckExportedVarDocs
+		Linter:  "stylecheck",
+		Why:     "Annoying issue about not having a comment. The rare codebase has such comments.",
 	},
 	{
 		ID:      "EXC0012",
-		Pattern: `exported (.+) should have comment( \(or a comment on this block\))? or be unexported`,
+		Pattern: `exported (.+) should have comment( \(or a comment on this block\))? or be unexported`, // rule: exported
 		Linter:  "revive",
-		Why:     "Annoying issue about not having a comment. The rare codebase has such comments",
+		Why:     "Annoying issue about not having a comment. The rare codebase has such comments.",
 	},
 	{
 		ID:      "EXC0013",
-		Pattern: `package comment should be of the form "(.+)...`,
+		Pattern: `package comment should be of the form "(.+)..."`, // rule: package-comments
 		Linter:  "revive",
-		Why:     "Annoying issue about not having a comment. The rare codebase has such comments",
+		Why:     "Annoying issue about not having a comment. The rare codebase has such comments.",
 	},
 	{
 		ID:      "EXC0014",
-		Pattern: `comment on exported (.+) should be of the form "(.+)..."`,
+		Pattern: `comment on exported (.+) should be of the form "(.+)..."`, // rule: exported
 		Linter:  "revive",
-		Why:     "Annoying issue about not having a comment. The rare codebase has such comments",
+		Why:     "Annoying issue about not having a comment. The rare codebase has such comments.",
 	},
 	{
 		ID:      "EXC0015",
-		Pattern: `should have a package comment`,
+		Pattern: `should have a package comment`, // rule: package-comments
 		Linter:  "revive",
-		Why:     "Annoying issue about not having a comment. The rare codebase has such comments",
+		Why:     "Annoying issue about not having a comment. The rare codebase has such comments.",
 	},
 }
 
@@ -108,8 +108,13 @@ type Issues struct {
 	ExcludeCaseSensitive   bool          `mapstructure:"exclude-case-sensitive"`
 	ExcludePatterns        []string      `mapstructure:"exclude"`
 	ExcludeRules           []ExcludeRule `mapstructure:"exclude-rules"`
+	ExcludeGeneratedStrict bool          `mapstructure:"exclude-generated-strict"`
 	UseDefaultExcludes     bool          `mapstructure:"exclude-use-default"`
 
+	ExcludeFiles          []string `mapstructure:"exclude-files"`
+	ExcludeDirs           []string `mapstructure:"exclude-dirs"`
+	UseDefaultExcludeDirs bool     `mapstructure:"exclude-dirs-use-default"`
+
 	MaxIssuesPerLinter int `mapstructure:"max-issues-per-linter"`
 	MaxSameIssues      int `mapstructure:"max-same-issues"`
 
@@ -121,6 +126,16 @@ type Issues struct {
 	NeedFix bool `mapstructure:"fix"`
 }
 
+func (i *Issues) Validate() error {
+	for i, rule := range i.ExcludeRules {
+		if err := rule.Validate(); err != nil {
+			return fmt.Errorf("error in exclude rule #%d: %w", i, err)
+		}
+	}
+
+	return nil
+}
+
 type ExcludeRule struct {
 	BaseRule `mapstructure:",squash"`
 }
@@ -139,36 +154,49 @@ type BaseRule struct {
 
 func (b *BaseRule) Validate(minConditionsCount int) error {
 	if err := validateOptionalRegex(b.Path); err != nil {
-		return fmt.Errorf("invalid path regex: %v", err)
+		return fmt.Errorf("invalid path regex: %w", err)
 	}
+
 	if err := validateOptionalRegex(b.PathExcept); err != nil {
-		return fmt.Errorf("invalid path-except regex: %v", err)
+		return fmt.Errorf("invalid path-except regex: %w", err)
 	}
+
 	if err := validateOptionalRegex(b.Text); err != nil {
-		return fmt.Errorf("invalid text regex: %v", err)
+		return fmt.Errorf("invalid text regex: %w", err)
 	}
+
 	if err := validateOptionalRegex(b.Source); err != nil {
-		return fmt.Errorf("invalid source regex: %v", err)
+		return fmt.Errorf("invalid source regex: %w", err)
+	}
+
+	if b.Path != "" && b.PathExcept != "" {
+		return errors.New("path and path-except should not be set at the same time")
 	}
+
 	nonBlank := 0
 	if len(b.Linters) > 0 {
 		nonBlank++
 	}
+
 	// Filtering by path counts as one condition, regardless how it is done (one or both).
 	// Otherwise, a rule with Path and PathExcept set would pass validation
 	// whereas before the introduction of path-except that wouldn't have been precise enough.
 	if b.Path != "" || b.PathExcept != "" {
 		nonBlank++
 	}
+
 	if b.Text != "" {
 		nonBlank++
 	}
+
 	if b.Source != "" {
 		nonBlank++
 	}
+
 	if nonBlank < minConditionsCount {
 		return fmt.Errorf("at least %d of (text, source, path[-except],  linters) should be set", minConditionsCount)
 	}
+
 	return nil
 }
 
@@ -176,6 +204,7 @@ func validateOptionalRegex(value string) error {
 	if value == "" {
 		return nil
 	}
+
 	_, err := regexp.Compile(value)
 	return err
 }
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/linters.go b/vendor/github.com/golangci/golangci-lint/pkg/config/linters.go
index ccbdc123a1b5cee133dc2e406dc9da1132c1ce3f..5c2628272c1fb6449f093dbbbc3ffff84a3ac5e9 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/config/linters.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/config/linters.go
@@ -1,5 +1,10 @@
 package config
 
+import (
+	"errors"
+	"fmt"
+)
+
 type Linters struct {
 	Enable     []string
 	Disable    []string
@@ -9,3 +14,52 @@ type Linters struct {
 
 	Presets []string
 }
+
+func (l *Linters) Validate() error {
+	if err := l.validateAllDisableEnableOptions(); err != nil {
+		return err
+	}
+
+	if err := l.validateDisabledAndEnabledAtOneMoment(); err != nil {
+		return err
+	}
+
+	return nil
+}
+
+func (l *Linters) validateAllDisableEnableOptions() error {
+	if l.EnableAll && l.DisableAll {
+		return errors.New("--enable-all and --disable-all options must not be combined")
+	}
+
+	if l.DisableAll {
+		if len(l.Enable) == 0 && len(l.Presets) == 0 {
+			return errors.New("all linters were disabled, but no one linter was enabled: must enable at least one")
+		}
+
+		if len(l.Disable) != 0 {
+			return errors.New("can't combine options --disable-all and --disable")
+		}
+	}
+
+	if l.EnableAll && len(l.Enable) != 0 && !l.Fast {
+		return errors.New("can't combine options --enable-all and --enable")
+	}
+
+	return nil
+}
+
+func (l *Linters) validateDisabledAndEnabledAtOneMoment() error {
+	enabledLintersSet := map[string]bool{}
+	for _, name := range l.Enable {
+		enabledLintersSet[name] = true
+	}
+
+	for _, name := range l.Disable {
+		if enabledLintersSet[name] {
+			return fmt.Errorf("linter %q can't be disabled and enabled at one moment", name)
+		}
+	}
+
+	return nil
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go b/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go
index 805d0ff4736baa6d923bf511f9a58cd35142414f..e204328421ce3e6d609dca96ff08420fce36f1d3 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go
@@ -3,6 +3,7 @@ package config
 import (
 	"encoding"
 	"errors"
+	"fmt"
 	"runtime"
 
 	"gopkg.in/yaml.v3"
@@ -21,6 +22,12 @@ var defaultLintersSettings = LintersSettings{
 	Dogsled: DogsledSettings{
 		MaxBlankIdentifiers: 2,
 	},
+	Dupl: DuplSettings{
+		Threshold: 150,
+	},
+	Errcheck: ErrcheckSettings{
+		Ignore: "fmt:.*",
+	},
 	ErrorLint: ErrorLintSettings{
 		Errorf:      true,
 		ErrorfMulti: true,
@@ -46,9 +53,20 @@ var defaultLintersSettings = LintersSettings{
 	Gocognit: GocognitSettings{
 		MinComplexity: 30,
 	},
+	Goconst: GoConstSettings{
+		MatchWithConstants:  true,
+		MinStringLen:        3,
+		MinOccurrencesCount: 3,
+		NumberMin:           3,
+		NumberMax:           3,
+		IgnoreCalls:         true,
+	},
 	Gocritic: GoCriticSettings{
 		SettingsPerCheck: map[string]GoCriticCheckSettings{},
 	},
+	Gocyclo: GoCycloSettings{
+		MinComplexity: 30,
+	},
 	Godox: GodoxSettings{
 		Keywords: []string{},
 	},
@@ -56,6 +74,9 @@ var defaultLintersSettings = LintersSettings{
 		Scope:  "declarations",
 		Period: true,
 	},
+	Gofmt: GoFmtSettings{
+		Simplify: true,
+	},
 	Gofumpt: GofumptSettings{
 		LangVersion: "",
 		ModulePath:  "",
@@ -70,9 +91,8 @@ var defaultLintersSettings = LintersSettings{
 		IgnoreTests:     true,
 		WatchForScripts: []string{"Han"},
 	},
-	Ifshort: IfshortSettings{
-		MaxDeclLines: 1,
-		MaxDeclChars: 30,
+	Inamedparam: INamedParamSettings{
+		SkipSingleParam: false,
 	},
 	InterfaceBloat: InterfaceBloatSettings{
 		Max: 10,
@@ -104,6 +124,13 @@ var defaultLintersSettings = LintersSettings{
 		RequireSpecific:    false,
 		AllowUnused:        false,
 	},
+	PerfSprint: PerfSprintSettings{
+		IntConversion: true,
+		ErrError:      false,
+		ErrorF:        true,
+		SprintF1:      true,
+		StrConcat:     true,
+	},
 	Prealloc: PreallocSettings{
 		Simple:     true,
 		RangeLoops: true,
@@ -113,6 +140,18 @@ var defaultLintersSettings = LintersSettings{
 		Ignore:    "",
 		Qualified: false,
 	},
+	SlogLint: SlogLintSettings{
+		NoMixedArgs:    true,
+		KVOnly:         false,
+		AttrOnly:       false,
+		NoGlobal:       "",
+		Context:        "",
+		ContextOnly:    false,
+		StaticMsg:      false,
+		NoRawKeys:      false,
+		KeyNamingCase:  "",
+		ArgsOnSepLines: false,
+	},
 	TagAlign: TagAlignSettings{
 		Align:  true,
 		Sort:   true,
@@ -126,6 +165,15 @@ var defaultLintersSettings = LintersSettings{
 	Unparam: UnparamSettings{
 		Algo: "cha",
 	},
+	Unused: UnusedSettings{
+		FieldWritesAreUses:     true,
+		PostStatementsAreReads: false,
+		ExportedIsUsed:         true,
+		ExportedFieldsAreUsed:  true,
+		ParametersAreUsed:      true,
+		LocalVariablesAreUsed:  true,
+		GeneratedIsUsed:        true,
+	},
 	UseStdlibVars: UseStdlibVarsSettings{
 		HTTPMethod:     true,
 		HTTPStatusCode: true,
@@ -152,86 +200,104 @@ var defaultLintersSettings = LintersSettings{
 }
 
 type LintersSettings struct {
-	Asasalint        AsasalintSettings
-	BiDiChk          BiDiChkSettings
-	Cyclop           Cyclop
-	Decorder         DecorderSettings
-	Depguard         DepGuardSettings
-	Dogsled          DogsledSettings
-	Dupl             DuplSettings
-	DupWord          DupWordSettings
-	Errcheck         ErrcheckSettings
-	ErrChkJSON       ErrChkJSONSettings
-	ErrorLint        ErrorLintSettings
-	Exhaustive       ExhaustiveSettings
-	ExhaustiveStruct ExhaustiveStructSettings
-	Exhaustruct      ExhaustructSettings
-	Forbidigo        ForbidigoSettings
-	Funlen           FunlenSettings
-	Gci              GciSettings
-	GinkgoLinter     GinkgoLinterSettings
-	Gocognit         GocognitSettings
-	Goconst          GoConstSettings
-	Gocritic         GoCriticSettings
-	Gocyclo          GoCycloSettings
-	Godot            GodotSettings
-	Godox            GodoxSettings
-	Gofmt            GoFmtSettings
-	Gofumpt          GofumptSettings
-	Goheader         GoHeaderSettings
-	Goimports        GoImportsSettings
-	Golint           GoLintSettings
-	Gomnd            GoMndSettings
-	GoModDirectives  GoModDirectivesSettings
-	Gomodguard       GoModGuardSettings
-	Gosec            GoSecSettings
-	Gosimple         StaticCheckSettings
-	Gosmopolitan     GosmopolitanSettings
-	Govet            GovetSettings
-	Grouper          GrouperSettings
-	Ifshort          IfshortSettings
-	ImportAs         ImportAsSettings
-	InterfaceBloat   InterfaceBloatSettings
-	Ireturn          IreturnSettings
-	Lll              LllSettings
-	LoggerCheck      LoggerCheckSettings
-	MaintIdx         MaintIdxSettings
-	Makezero         MakezeroSettings
-	Maligned         MalignedSettings
-	Misspell         MisspellSettings
-	MustTag          MustTagSettings
-	Nakedret         NakedretSettings
-	Nestif           NestifSettings
-	NilNil           NilNilSettings
-	Nlreturn         NlreturnSettings
-	NoLintLint       NoLintLintSettings
-	NoNamedReturns   NoNamedReturnsSettings
-	ParallelTest     ParallelTestSettings
-	Prealloc         PreallocSettings
-	Predeclared      PredeclaredSettings
-	Promlinter       PromlinterSettings
-	Reassign         ReassignSettings
-	Revive           ReviveSettings
-	RowsErrCheck     RowsErrCheckSettings
-	Staticcheck      StaticCheckSettings
-	Structcheck      StructCheckSettings
-	Stylecheck       StaticCheckSettings
-	TagAlign         TagAlignSettings
-	Tagliatelle      TagliatelleSettings
-	Tenv             TenvSettings
-	Testpackage      TestpackageSettings
-	Thelper          ThelperSettings
-	Unparam          UnparamSettings
-	UseStdlibVars    UseStdlibVarsSettings
-	Varcheck         VarCheckSettings
-	Varnamelen       VarnamelenSettings
-	Whitespace       WhitespaceSettings
-	Wrapcheck        WrapcheckSettings
-	WSL              WSLSettings
+	Asasalint       AsasalintSettings
+	BiDiChk         BiDiChkSettings
+	CopyLoopVar     CopyLoopVarSettings
+	Cyclop          Cyclop
+	Decorder        DecorderSettings
+	Depguard        DepGuardSettings
+	Dogsled         DogsledSettings
+	Dupl            DuplSettings
+	DupWord         DupWordSettings
+	Errcheck        ErrcheckSettings
+	ErrChkJSON      ErrChkJSONSettings
+	ErrorLint       ErrorLintSettings
+	Exhaustive      ExhaustiveSettings
+	Exhaustruct     ExhaustructSettings
+	Forbidigo       ForbidigoSettings
+	Funlen          FunlenSettings
+	Gci             GciSettings
+	GinkgoLinter    GinkgoLinterSettings
+	Gocognit        GocognitSettings
+	Goconst         GoConstSettings
+	Gocritic        GoCriticSettings
+	Gocyclo         GoCycloSettings
+	Godot           GodotSettings
+	Godox           GodoxSettings
+	Gofmt           GoFmtSettings
+	Gofumpt         GofumptSettings
+	Goheader        GoHeaderSettings
+	Goimports       GoImportsSettings
+	Gomnd           GoMndSettings
+	GoModDirectives GoModDirectivesSettings
+	Gomodguard      GoModGuardSettings
+	Gosec           GoSecSettings
+	Gosimple        StaticCheckSettings
+	Gosmopolitan    GosmopolitanSettings
+	Govet           GovetSettings
+	Grouper         GrouperSettings
+	ImportAs        ImportAsSettings
+	Inamedparam     INamedParamSettings
+	InterfaceBloat  InterfaceBloatSettings
+	Ireturn         IreturnSettings
+	Lll             LllSettings
+	LoggerCheck     LoggerCheckSettings
+	MaintIdx        MaintIdxSettings
+	Makezero        MakezeroSettings
+	Misspell        MisspellSettings
+	Mnd             MndSettings
+	MustTag         MustTagSettings
+	Nakedret        NakedretSettings
+	Nestif          NestifSettings
+	NilNil          NilNilSettings
+	Nlreturn        NlreturnSettings
+	NoLintLint      NoLintLintSettings
+	NoNamedReturns  NoNamedReturnsSettings
+	ParallelTest    ParallelTestSettings
+	PerfSprint      PerfSprintSettings
+	Prealloc        PreallocSettings
+	Predeclared     PredeclaredSettings
+	Promlinter      PromlinterSettings
+	ProtoGetter     ProtoGetterSettings
+	Reassign        ReassignSettings
+	Revive          ReviveSettings
+	RowsErrCheck    RowsErrCheckSettings
+	SlogLint        SlogLintSettings
+	Spancheck       SpancheckSettings
+	Staticcheck     StaticCheckSettings
+	Stylecheck      StaticCheckSettings
+	TagAlign        TagAlignSettings
+	Tagliatelle     TagliatelleSettings
+	Tenv            TenvSettings
+	Testifylint     TestifylintSettings
+	Testpackage     TestpackageSettings
+	Thelper         ThelperSettings
+	Unconvert       UnconvertSettings
+	Unparam         UnparamSettings
+	Unused          UnusedSettings
+	UseStdlibVars   UseStdlibVarsSettings
+	Varnamelen      VarnamelenSettings
+	Whitespace      WhitespaceSettings
+	Wrapcheck       WrapcheckSettings
+	WSL             WSLSettings
 
 	Custom map[string]CustomLinterSettings
 }
 
+func (s *LintersSettings) Validate() error {
+	if err := s.Govet.Validate(); err != nil {
+		return err
+	}
+
+	for name, settings := range s.Custom {
+		if err := settings.Validate(); err != nil {
+			return fmt.Errorf("custom linter %q: %w", name, err)
+		}
+	}
+
+	return nil
+}
+
 type AsasalintSettings struct {
 	Exclude              []string `mapstructure:"exclude"`
 	UseBuiltinExclusions bool     `mapstructure:"use-builtin-exclusions"`
@@ -250,6 +316,11 @@ type BiDiChkSettings struct {
 	PopDirectionalIsolate    bool `mapstructure:"pop-directional-isolate"`
 }
 
+type CopyLoopVarSettings struct {
+	IgnoreAlias bool `mapstructure:"ignore-alias"` // Deprecated: use CheckAlias
+	CheckAlias  bool `mapstructure:"check-alias"`
+}
+
 type Cyclop struct {
 	MaxComplexity  int     `mapstructure:"max-complexity"`
 	PackageAverage float64 `mapstructure:"package-average"`
@@ -261,9 +332,10 @@ type DepGuardSettings struct {
 }
 
 type DepGuardList struct {
-	Files []string       `mapstructure:"files"`
-	Allow []string       `mapstructure:"allow"`
-	Deny  []DepGuardDeny `mapstructure:"deny"`
+	ListMode string         `mapstructure:"list-mode"`
+	Files    []string       `mapstructure:"files"`
+	Allow    []string       `mapstructure:"allow"`
+	Deny     []DepGuardDeny `mapstructure:"deny"`
 }
 
 type DepGuardDeny struct {
@@ -292,17 +364,20 @@ type DuplSettings struct {
 
 type DupWordSettings struct {
 	Keywords []string `mapstructure:"keywords"`
+	Ignore   []string `mapstructure:"ignore"`
 }
 
 type ErrcheckSettings struct {
 	DisableDefaultExclusions bool     `mapstructure:"disable-default-exclusions"`
 	CheckTypeAssertions      bool     `mapstructure:"check-type-assertions"`
 	CheckAssignToBlank       bool     `mapstructure:"check-blank"`
-	Ignore                   string   `mapstructure:"ignore"`
 	ExcludeFunctions         []string `mapstructure:"exclude-functions"`
 
 	// Deprecated: use ExcludeFunctions instead
 	Exclude string `mapstructure:"exclude"`
+
+	// Deprecated: use ExcludeFunctions instead
+	Ignore string `mapstructure:"ignore"`
 }
 
 type ErrChkJSONSettings struct {
@@ -311,10 +386,17 @@ type ErrChkJSONSettings struct {
 }
 
 type ErrorLintSettings struct {
-	Errorf      bool `mapstructure:"errorf"`
-	ErrorfMulti bool `mapstructure:"errorf-multi"`
-	Asserts     bool `mapstructure:"asserts"`
-	Comparison  bool `mapstructure:"comparison"`
+	Errorf                bool                 `mapstructure:"errorf"`
+	ErrorfMulti           bool                 `mapstructure:"errorf-multi"`
+	Asserts               bool                 `mapstructure:"asserts"`
+	Comparison            bool                 `mapstructure:"comparison"`
+	AllowedErrors         []ErrorLintAllowPair `mapstructure:"allowed-errors"`
+	AllowedErrorsWildcard []ErrorLintAllowPair `mapstructure:"allowed-errors-wildcard"`
+}
+
+type ErrorLintAllowPair struct {
+	Err string `mapstructure:"err"`
+	Fun string `mapstructure:"fun"`
 }
 
 type ExhaustiveSettings struct {
@@ -326,10 +408,7 @@ type ExhaustiveSettings struct {
 	PackageScopeOnly           bool     `mapstructure:"package-scope-only"`
 	ExplicitExhaustiveMap      bool     `mapstructure:"explicit-exhaustive-map"`
 	ExplicitExhaustiveSwitch   bool     `mapstructure:"explicit-exhaustive-switch"`
-}
-
-type ExhaustiveStructSettings struct {
-	StructPatterns []string `mapstructure:"struct-patterns"`
+	DefaultCaseRequired        bool     `mapstructure:"default-case-required"`
 }
 
 type ExhaustructSettings struct {
@@ -386,20 +465,26 @@ type FunlenSettings struct {
 }
 
 type GciSettings struct {
-	LocalPrefixes string   `mapstructure:"local-prefixes"` // Deprecated
 	Sections      []string `mapstructure:"sections"`
 	SkipGenerated bool     `mapstructure:"skip-generated"`
 	CustomOrder   bool     `mapstructure:"custom-order"`
+
+	// Deprecated: use Sections instead.
+	LocalPrefixes string `mapstructure:"local-prefixes"`
 }
 
 type GinkgoLinterSettings struct {
-	SuppressLenAssertion     bool `mapstructure:"suppress-len-assertion"`
-	SuppressNilAssertion     bool `mapstructure:"suppress-nil-assertion"`
-	SuppressErrAssertion     bool `mapstructure:"suppress-err-assertion"`
-	SuppressCompareAssertion bool `mapstructure:"suppress-compare-assertion"`
-	SuppressAsyncAssertion   bool `mapstructure:"suppress-async-assertion"`
-	ForbidFocusContainer     bool `mapstructure:"forbid-focus-container"`
-	AllowHaveLenZero         bool `mapstructure:"allow-havelen-zero"`
+	SuppressLenAssertion       bool `mapstructure:"suppress-len-assertion"`
+	SuppressNilAssertion       bool `mapstructure:"suppress-nil-assertion"`
+	SuppressErrAssertion       bool `mapstructure:"suppress-err-assertion"`
+	SuppressCompareAssertion   bool `mapstructure:"suppress-compare-assertion"`
+	SuppressAsyncAssertion     bool `mapstructure:"suppress-async-assertion"`
+	SuppressTypeCompareWarning bool `mapstructure:"suppress-type-compare-assertion"`
+	ForbidFocusContainer       bool `mapstructure:"forbid-focus-container"`
+	AllowHaveLenZero           bool `mapstructure:"allow-havelen-zero"`
+	ForceExpectTo              bool `mapstructure:"force-expect-to"`
+	ValidateAsyncIntervals     bool `mapstructure:"validate-async-intervals"`
+	ForbidSpecPollution        bool `mapstructure:"forbid-spec-pollution"`
 }
 
 type GocognitSettings struct {
@@ -407,19 +492,22 @@ type GocognitSettings struct {
 }
 
 type GoConstSettings struct {
-	IgnoreTests         bool `mapstructure:"ignore-tests"`
-	MatchWithConstants  bool `mapstructure:"match-constant"`
-	MinStringLen        int  `mapstructure:"min-len"`
-	MinOccurrencesCount int  `mapstructure:"min-occurrences"`
-	ParseNumbers        bool `mapstructure:"numbers"`
-	NumberMin           int  `mapstructure:"min"`
-	NumberMax           int  `mapstructure:"max"`
-	IgnoreCalls         bool `mapstructure:"ignore-calls"`
+	IgnoreStrings       string `mapstructure:"ignore-strings"`
+	IgnoreTests         bool   `mapstructure:"ignore-tests"`
+	MatchWithConstants  bool   `mapstructure:"match-constant"`
+	MinStringLen        int    `mapstructure:"min-len"`
+	MinOccurrencesCount int    `mapstructure:"min-occurrences"`
+	ParseNumbers        bool   `mapstructure:"numbers"`
+	NumberMin           int    `mapstructure:"min"`
+	NumberMax           int    `mapstructure:"max"`
+	IgnoreCalls         bool   `mapstructure:"ignore-calls"`
 }
 
 type GoCriticSettings struct {
 	Go               string                           `mapstructure:"-"`
+	DisableAll       bool                             `mapstructure:"disable-all"`
 	EnabledChecks    []string                         `mapstructure:"enabled-checks"`
+	EnableAll        bool                             `mapstructure:"enable-all"`
 	DisabledChecks   []string                         `mapstructure:"disabled-checks"`
 	EnabledTags      []string                         `mapstructure:"enabled-tags"`
 	DisabledTags     []string                         `mapstructure:"disabled-tags"`
@@ -438,7 +526,7 @@ type GodotSettings struct {
 	Capital bool     `mapstructure:"capital"`
 	Period  bool     `mapstructure:"period"`
 
-	// Deprecated: use `Scope` instead
+	// Deprecated: use Scope instead
 	CheckAll bool `mapstructure:"check-all"`
 }
 
@@ -474,16 +562,12 @@ type GoImportsSettings struct {
 	LocalPrefixes string `mapstructure:"local-prefixes"`
 }
 
-type GoLintSettings struct {
-	MinConfidence float64 `mapstructure:"min-confidence"`
-}
-
+// Deprecated: use MndSettings.
 type GoMndSettings struct {
-	Settings         map[string]map[string]any // Deprecated
-	Checks           []string                  `mapstructure:"checks"`
-	IgnoredNumbers   []string                  `mapstructure:"ignored-numbers"`
-	IgnoredFiles     []string                  `mapstructure:"ignored-files"`
-	IgnoredFunctions []string                  `mapstructure:"ignored-functions"`
+	MndSettings `mapstructure:",squash"`
+
+	// Deprecated: use root level settings instead.
+	Settings map[string]map[string]any
 }
 
 type GoModDirectivesSettings struct {
@@ -529,25 +613,28 @@ type GosmopolitanSettings struct {
 }
 
 type GovetSettings struct {
-	Go             string `mapstructure:"-"`
-	CheckShadowing bool   `mapstructure:"check-shadowing"`
-	Settings       map[string]map[string]any
+	Go string `mapstructure:"-"`
 
 	Enable     []string
 	Disable    []string
 	EnableAll  bool `mapstructure:"enable-all"`
 	DisableAll bool `mapstructure:"disable-all"`
+
+	Settings map[string]map[string]any
+
+	// Deprecated: the linter should be enabled inside Enable.
+	CheckShadowing bool `mapstructure:"check-shadowing"`
 }
 
 func (cfg *GovetSettings) Validate() error {
 	if cfg.EnableAll && cfg.DisableAll {
-		return errors.New("enable-all and disable-all can't be combined")
+		return errors.New("govet: enable-all and disable-all can't be combined")
 	}
 	if cfg.EnableAll && len(cfg.Enable) != 0 {
-		return errors.New("enable-all and enable can't be combined")
+		return errors.New("govet: enable-all and enable can't be combined")
 	}
 	if cfg.DisableAll && len(cfg.Disable) != 0 {
-		return errors.New("disable-all and disable can't be combined")
+		return errors.New("govet: disable-all and disable can't be combined")
 	}
 	return nil
 }
@@ -563,11 +650,6 @@ type GrouperSettings struct {
 	VarRequireGrouping        bool `mapstructure:"var-require-grouping"`
 }
 
-type IfshortSettings struct {
-	MaxDeclLines int `mapstructure:"max-decl-lines"`
-	MaxDeclChars int `mapstructure:"max-decl-chars"`
-}
-
 type ImportAsSettings struct {
 	Alias          []ImportAsAlias
 	NoUnaliased    bool `mapstructure:"no-unaliased"`
@@ -579,6 +661,10 @@ type ImportAsAlias struct {
 	Alias string
 }
 
+type INamedParamSettings struct {
+	SkipSingleParam bool `mapstructure:"skip-single-param"`
+}
+
 type InterfaceBloatSettings struct {
 	Max int `mapstructure:"max"`
 }
@@ -611,16 +697,19 @@ type MakezeroSettings struct {
 	Always bool
 }
 
-type MalignedSettings struct {
-	SuggestNewOrder bool `mapstructure:"suggest-new"`
-}
-
 type MisspellSettings struct {
-	Locale string
-	// TODO(ldez): v2 the options must be renamed to `IgnoredRules`.
+	Mode       string               `mapstructure:"mode"`
+	Locale     string               `mapstructure:"locale"`
+	ExtraWords []MisspellExtraWords `mapstructure:"extra-words"`
+	// TODO(ldez): v2 the option must be renamed to `IgnoredRules`.
 	IgnoreWords []string `mapstructure:"ignore-words"`
 }
 
+type MisspellExtraWords struct {
+	Typo       string `mapstructure:"typo"`
+	Correction string `mapstructure:"correction"`
+}
+
 type MustTagSettings struct {
 	Functions []struct {
 		Name   string `mapstructure:"name"`
@@ -645,6 +734,13 @@ type NlreturnSettings struct {
 	BlockSize int `mapstructure:"block-size"`
 }
 
+type MndSettings struct {
+	Checks           []string `mapstructure:"checks"`
+	IgnoredNumbers   []string `mapstructure:"ignored-numbers"`
+	IgnoredFiles     []string `mapstructure:"ignored-files"`
+	IgnoredFunctions []string `mapstructure:"ignored-functions"`
+}
+
 type NoLintLintSettings struct {
 	RequireExplanation bool     `mapstructure:"require-explanation"`
 	RequireSpecific    bool     `mapstructure:"require-specific"`
@@ -655,9 +751,19 @@ type NoLintLintSettings struct {
 type NoNamedReturnsSettings struct {
 	ReportErrorInDefer bool `mapstructure:"report-error-in-defer"`
 }
+
 type ParallelTestSettings struct {
-	IgnoreMissing         bool `mapstructure:"ignore-missing"`
-	IgnoreMissingSubtests bool `mapstructure:"ignore-missing-subtests"`
+	Go                    string `mapstructure:"-"`
+	IgnoreMissing         bool   `mapstructure:"ignore-missing"`
+	IgnoreMissingSubtests bool   `mapstructure:"ignore-missing-subtests"`
+}
+
+type PerfSprintSettings struct {
+	IntConversion bool `mapstructure:"int-conversion"`
+	ErrError      bool `mapstructure:"err-error"`
+	ErrorF        bool `mapstructure:"errorf"`
+	SprintF1      bool `mapstructure:"sprintf1"`
+	StrConcat     bool `mapstructure:"strconcat"`
 }
 
 type PreallocSettings struct {
@@ -676,6 +782,13 @@ type PromlinterSettings struct {
 	DisabledLinters []string `mapstructure:"disabled-linters"`
 }
 
+type ProtoGetterSettings struct {
+	SkipGeneratedBy         []string `mapstructure:"skip-generated-by"`
+	SkipFiles               []string `mapstructure:"skip-files"`
+	SkipAnyGenerated        bool     `mapstructure:"skip-any-generated"`
+	ReplaceFirstArgInAppend bool     `mapstructure:"replace-first-arg-in-append"`
+}
+
 type ReassignSettings struct {
 	Patterns []string `mapstructure:"patterns"`
 }
@@ -691,6 +804,7 @@ type ReviveSettings struct {
 		Arguments []any
 		Severity  string
 		Disabled  bool
+		Exclude   []string
 	}
 	ErrorCode   int `mapstructure:"error-code"`
 	WarningCode int `mapstructure:"warning-code"`
@@ -704,24 +818,39 @@ type RowsErrCheckSettings struct {
 	Packages []string
 }
 
-type StaticCheckSettings struct {
-	// Deprecated: use the global `run.go` instead.
-	GoVersion string `mapstructure:"go"`
+type SlogLintSettings struct {
+	NoMixedArgs    bool   `mapstructure:"no-mixed-args"`
+	KVOnly         bool   `mapstructure:"kv-only"`
+	AttrOnly       bool   `mapstructure:"attr-only"`
+	NoGlobal       string `mapstructure:"no-global"`
+	Context        string `mapstructure:"context"`
+	ContextOnly    bool   `mapstructure:"context-only"` // Deprecated: use Context instead.
+	StaticMsg      bool   `mapstructure:"static-msg"`
+	NoRawKeys      bool   `mapstructure:"no-raw-keys"`
+	KeyNamingCase  string `mapstructure:"key-naming-case"`
+	ArgsOnSepLines bool   `mapstructure:"args-on-sep-lines"`
+}
+
+type SpancheckSettings struct {
+	Checks                   []string `mapstructure:"checks"`
+	IgnoreCheckSignatures    []string `mapstructure:"ignore-check-signatures"`
+	ExtraStartSpanSignatures []string `mapstructure:"extra-start-span-signatures"`
+}
 
+type StaticCheckSettings struct {
 	Checks                  []string `mapstructure:"checks"`
 	Initialisms             []string `mapstructure:"initialisms"`                // only for stylecheck
 	DotImportWhitelist      []string `mapstructure:"dot-import-whitelist"`       // only for stylecheck
 	HTTPStatusCodeWhitelist []string `mapstructure:"http-status-code-whitelist"` // only for stylecheck
+
+	// Deprecated: use the global `run.go` instead.
+	GoVersion string `mapstructure:"go"`
 }
 
 func (s *StaticCheckSettings) HasConfiguration() bool {
 	return len(s.Initialisms) > 0 || len(s.HTTPStatusCodeWhitelist) > 0 || len(s.DotImportWhitelist) > 0 || len(s.Checks) > 0
 }
 
-type StructCheckSettings struct {
-	CheckExportedFields bool `mapstructure:"exported-fields"`
-}
-
 type TagAlignSettings struct {
 	Align  bool     `mapstructure:"align"`
 	Sort   bool     `mapstructure:"sort"`
@@ -736,6 +865,29 @@ type TagliatelleSettings struct {
 	}
 }
 
+type TestifylintSettings struct {
+	EnableAll        bool     `mapstructure:"enable-all"`
+	DisableAll       bool     `mapstructure:"disable-all"`
+	EnabledCheckers  []string `mapstructure:"enable"`
+	DisabledCheckers []string `mapstructure:"disable"`
+
+	BoolCompare struct {
+		IgnoreCustomTypes bool `mapstructure:"ignore-custom-types"`
+	} `mapstructure:"bool-compare"`
+
+	ExpectedActual struct {
+		ExpVarPattern string `mapstructure:"pattern"`
+	} `mapstructure:"expected-actual"`
+
+	RequireError struct {
+		FnPattern string `mapstructure:"fn-pattern"`
+	} `mapstructure:"require-error"`
+
+	SuiteExtraAssertCall struct {
+		Mode string `mapstructure:"mode"`
+	} `mapstructure:"suite-extra-assert-call"`
+}
+
 type TestpackageSettings struct {
 	SkipRegexp    string   `mapstructure:"skip-regexp"`
 	AllowPackages []string `mapstructure:"allow-packages"`
@@ -766,11 +918,16 @@ type UseStdlibVarsSettings struct {
 	TimeLayout         bool `mapstructure:"time-layout"`
 	CryptoHash         bool `mapstructure:"crypto-hash"`
 	DefaultRPCPath     bool `mapstructure:"default-rpc-path"`
-	OSDevNull          bool `mapstructure:"os-dev-null"`
+	OSDevNull          bool `mapstructure:"os-dev-null"` // Deprecated
 	SQLIsolationLevel  bool `mapstructure:"sql-isolation-level"`
 	TLSSignatureScheme bool `mapstructure:"tls-signature-scheme"`
 	ConstantKind       bool `mapstructure:"constant-kind"`
-	SyslogPriority     bool `mapstructure:"syslog-priority"`
+	SyslogPriority     bool `mapstructure:"syslog-priority"` // Deprecated
+}
+
+type UnconvertSettings struct {
+	FastMath bool `mapstructure:"fast-math"`
+	Safe     bool `mapstructure:"safe"`
 }
 
 type UnparamSettings struct {
@@ -778,8 +935,14 @@ type UnparamSettings struct {
 	Algo          string
 }
 
-type VarCheckSettings struct {
-	CheckExportedFields bool `mapstructure:"exported-fields"`
+type UnusedSettings struct {
+	FieldWritesAreUses     bool `mapstructure:"field-writes-are-uses"`
+	PostStatementsAreReads bool `mapstructure:"post-statements-are-reads"`
+	ExportedIsUsed         bool `mapstructure:"exported-is-used"`
+	ExportedFieldsAreUsed  bool `mapstructure:"exported-fields-are-used"`
+	ParametersAreUsed      bool `mapstructure:"parameters-are-used"`
+	LocalVariablesAreUsed  bool `mapstructure:"local-variables-are-used"`
+	GeneratedIsUsed        bool `mapstructure:"generated-is-used"`
 }
 
 type VarnamelenSettings struct {
@@ -825,17 +988,15 @@ type WSLSettings struct {
 }
 
 // CustomLinterSettings encapsulates the meta-data of a private linter.
-// For example, a private linter may be added to the golangci config file as shown below.
-//
-//	linters-settings:
-//	 custom:
-//	   example:
-//	     path: /example.so
-//	     description: The description of the linter
-//	     original-url: github.com/golangci/example-linter
 type CustomLinterSettings struct {
+	// Type plugin type.
+	// It can be `goplugin` or `module`.
+	Type string `mapstructure:"type"`
+
 	// Path to a plugin *.so file that implements the private linter.
+	// Only for Go plugin system.
 	Path string
+
 	// Description describes the purpose of the private linter.
 	Description string
 	// OriginalURL The URL containing the source code for the private linter.
@@ -844,3 +1005,19 @@ type CustomLinterSettings struct {
 	// Settings plugin settings only work with linterdb.PluginConstructor symbol.
 	Settings any
 }
+
+func (s *CustomLinterSettings) Validate() error {
+	if s.Type == "module" {
+		if s.Path != "" {
+			return errors.New("path not supported with module type")
+		}
+
+		return nil
+	}
+
+	if s.Path == "" {
+		return errors.New("path is required")
+	}
+
+	return nil
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/loader.go b/vendor/github.com/golangci/golangci-lint/pkg/config/loader.go
new file mode 100644
index 0000000000000000000000000000000000000000..a48381474c8795d1d30baf97e899de2a0189d53a
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/config/loader.go
@@ -0,0 +1,468 @@
+package config
+
+import (
+	"errors"
+	"fmt"
+	"os"
+	"path/filepath"
+	"slices"
+
+	"github.com/go-viper/mapstructure/v2"
+	"github.com/mitchellh/go-homedir"
+	"github.com/spf13/pflag"
+	"github.com/spf13/viper"
+
+	"github.com/golangci/golangci-lint/pkg/exitcodes"
+	"github.com/golangci/golangci-lint/pkg/fsutils"
+	"github.com/golangci/golangci-lint/pkg/logutils"
+)
+
+var errConfigDisabled = errors.New("config is disabled by --no-config")
+
+type LoaderOptions struct {
+	Config   string // Flag only. The path to the golangci config file, as specified with the --config argument.
+	NoConfig bool   // Flag only.
+}
+
+type LoadOptions struct {
+	CheckDeprecation bool
+	Validation       bool
+}
+
+type Loader struct {
+	opts LoaderOptions
+
+	viper *viper.Viper
+	fs    *pflag.FlagSet
+
+	log logutils.Log
+
+	cfg  *Config
+	args []string
+}
+
+func NewLoader(log logutils.Log, v *viper.Viper, fs *pflag.FlagSet, opts LoaderOptions, cfg *Config, args []string) *Loader {
+	return &Loader{
+		opts:  opts,
+		viper: v,
+		fs:    fs,
+		log:   log,
+		cfg:   cfg,
+		args:  args,
+	}
+}
+
+func (l *Loader) Load(opts LoadOptions) error {
+	err := l.setConfigFile()
+	if err != nil {
+		return err
+	}
+
+	err = l.parseConfig()
+	if err != nil {
+		return err
+	}
+
+	l.applyStringSliceHack()
+
+	if opts.CheckDeprecation {
+		err = l.handleDeprecation()
+		if err != nil {
+			return err
+		}
+	}
+
+	l.handleGoVersion()
+
+	err = l.handleEnableOnlyOption()
+	if err != nil {
+		return err
+	}
+
+	if opts.Validation {
+		err = l.cfg.Validate()
+		if err != nil {
+			return err
+		}
+	}
+
+	return nil
+}
+
+func (l *Loader) setConfigFile() error {
+	configFile, err := l.evaluateOptions()
+	if err != nil {
+		if errors.Is(err, errConfigDisabled) {
+			return nil
+		}
+
+		return fmt.Errorf("can't parse --config option: %w", err)
+	}
+
+	if configFile != "" {
+		l.viper.SetConfigFile(configFile)
+
+		// Assume YAML if the file has no extension.
+		if filepath.Ext(configFile) == "" {
+			l.viper.SetConfigType("yaml")
+		}
+	} else {
+		l.setupConfigFileSearch()
+	}
+
+	return nil
+}
+
+func (l *Loader) evaluateOptions() (string, error) {
+	if l.opts.NoConfig && l.opts.Config != "" {
+		return "", errors.New("can't combine option --config and --no-config")
+	}
+
+	if l.opts.NoConfig {
+		return "", errConfigDisabled
+	}
+
+	configFile, err := homedir.Expand(l.opts.Config)
+	if err != nil {
+		return "", errors.New("failed to expand configuration path")
+	}
+
+	return configFile, nil
+}
+
+func (l *Loader) setupConfigFileSearch() {
+	l.viper.SetConfigName(".golangci")
+
+	configSearchPaths := l.getConfigSearchPaths()
+
+	l.log.Infof("Config search paths: %s", configSearchPaths)
+
+	for _, p := range configSearchPaths {
+		l.viper.AddConfigPath(p)
+	}
+}
+
+func (l *Loader) getConfigSearchPaths() []string {
+	firstArg := "./..."
+	if len(l.args) > 0 {
+		firstArg = l.args[0]
+	}
+
+	absPath, err := filepath.Abs(firstArg)
+	if err != nil {
+		l.log.Warnf("Can't make abs path for %q: %s", firstArg, err)
+		absPath = filepath.Clean(firstArg)
+	}
+
+	// start from it
+	var currentDir string
+	if fsutils.IsDir(absPath) {
+		currentDir = absPath
+	} else {
+		currentDir = filepath.Dir(absPath)
+	}
+
+	// find all dirs from it up to the root
+	searchPaths := []string{"./"}
+
+	for {
+		searchPaths = append(searchPaths, currentDir)
+
+		parent := filepath.Dir(currentDir)
+		if currentDir == parent || parent == "" {
+			break
+		}
+
+		currentDir = parent
+	}
+
+	// find home directory for global config
+	if home, err := homedir.Dir(); err != nil {
+		l.log.Warnf("Can't get user's home directory: %v", err)
+	} else if !slices.Contains(searchPaths, home) {
+		searchPaths = append(searchPaths, home)
+	}
+
+	return searchPaths
+}
+
+func (l *Loader) parseConfig() error {
+	if err := l.viper.ReadInConfig(); err != nil {
+		var configFileNotFoundError viper.ConfigFileNotFoundError
+		if errors.As(err, &configFileNotFoundError) {
+			// Load configuration from flags only.
+			err = l.viper.Unmarshal(l.cfg, customDecoderHook())
+			if err != nil {
+				return fmt.Errorf("can't unmarshal config by viper (flags): %w", err)
+			}
+
+			return nil
+		}
+
+		return fmt.Errorf("can't read viper config: %w", err)
+	}
+
+	err := l.setConfigDir()
+	if err != nil {
+		return err
+	}
+
+	// Load configuration from all sources (flags, file).
+	if err := l.viper.Unmarshal(l.cfg, customDecoderHook()); err != nil {
+		return fmt.Errorf("can't unmarshal config by viper (flags, file): %w", err)
+	}
+
+	if l.cfg.InternalTest { // just for testing purposes: to detect config file usage
+		_, _ = fmt.Fprintln(logutils.StdOut, "test")
+		os.Exit(exitcodes.Success)
+	}
+
+	return nil
+}
+
+func (l *Loader) setConfigDir() error {
+	usedConfigFile := l.viper.ConfigFileUsed()
+	if usedConfigFile == "" {
+		return nil
+	}
+
+	if usedConfigFile == os.Stdin.Name() {
+		usedConfigFile = ""
+		l.log.Infof("Reading config file stdin")
+	} else {
+		var err error
+		usedConfigFile, err = fsutils.ShortestRelPath(usedConfigFile, "")
+		if err != nil {
+			l.log.Warnf("Can't pretty print config file path: %v", err)
+		}
+
+		l.log.Infof("Used config file %s", usedConfigFile)
+	}
+
+	usedConfigDir, err := filepath.Abs(filepath.Dir(usedConfigFile))
+	if err != nil {
+		return errors.New("can't get config directory")
+	}
+
+	l.cfg.cfgDir = usedConfigDir
+
+	return nil
+}
+
+// Hack to append values from StringSlice flags.
+// Viper always overrides StringSlice values.
+// https://github.com/spf13/viper/issues/1448
+// So StringSlice flags are not bind to Viper like that their values are obtain via Cobra Flags.
+func (l *Loader) applyStringSliceHack() {
+	if l.fs == nil {
+		return
+	}
+
+	l.appendStringSlice("enable", &l.cfg.Linters.Enable)
+	l.appendStringSlice("disable", &l.cfg.Linters.Disable)
+	l.appendStringSlice("presets", &l.cfg.Linters.Presets)
+	l.appendStringSlice("build-tags", &l.cfg.Run.BuildTags)
+	l.appendStringSlice("exclude", &l.cfg.Issues.ExcludePatterns)
+
+	l.appendStringSlice("skip-dirs", &l.cfg.Run.SkipDirs)
+	l.appendStringSlice("skip-files", &l.cfg.Run.SkipFiles)
+	l.appendStringSlice("exclude-dirs", &l.cfg.Issues.ExcludeDirs)
+	l.appendStringSlice("exclude-files", &l.cfg.Issues.ExcludeFiles)
+}
+
+func (l *Loader) appendStringSlice(name string, current *[]string) {
+	if l.fs.Changed(name) {
+		val, _ := l.fs.GetStringSlice(name)
+		*current = append(*current, val...)
+	}
+}
+
+func (l *Loader) handleGoVersion() {
+	if l.cfg.Run.Go == "" {
+		l.cfg.Run.Go = detectGoVersion()
+	}
+
+	l.cfg.LintersSettings.Govet.Go = l.cfg.Run.Go
+
+	l.cfg.LintersSettings.ParallelTest.Go = l.cfg.Run.Go
+
+	if l.cfg.LintersSettings.Gofumpt.LangVersion == "" {
+		l.cfg.LintersSettings.Gofumpt.LangVersion = l.cfg.Run.Go
+	}
+
+	trimmedGoVersion := trimGoVersion(l.cfg.Run.Go)
+
+	l.cfg.LintersSettings.Gocritic.Go = trimmedGoVersion
+
+	// staticcheck related linters.
+	if l.cfg.LintersSettings.Staticcheck.GoVersion == "" {
+		l.cfg.LintersSettings.Staticcheck.GoVersion = trimmedGoVersion
+	}
+	if l.cfg.LintersSettings.Gosimple.GoVersion == "" {
+		l.cfg.LintersSettings.Gosimple.GoVersion = trimmedGoVersion
+	}
+	if l.cfg.LintersSettings.Stylecheck.GoVersion == "" {
+		l.cfg.LintersSettings.Stylecheck.GoVersion = trimmedGoVersion
+	}
+}
+
+func (l *Loader) handleDeprecation() error {
+	if l.cfg.InternalTest || l.cfg.InternalCmdTest || os.Getenv(logutils.EnvTestRun) == "1" {
+		return nil
+	}
+
+	// Deprecated since v1.57.0
+	if len(l.cfg.Run.SkipFiles) > 0 {
+		l.log.Warnf("The configuration option `run.skip-files` is deprecated, please use `issues.exclude-files`.")
+		l.cfg.Issues.ExcludeFiles = l.cfg.Run.SkipFiles
+	}
+
+	// Deprecated since v1.57.0
+	if len(l.cfg.Run.SkipDirs) > 0 {
+		l.log.Warnf("The configuration option `run.skip-dirs` is deprecated, please use `issues.exclude-dirs`.")
+		l.cfg.Issues.ExcludeDirs = l.cfg.Run.SkipDirs
+	}
+
+	// The 2 options are true by default.
+	// Deprecated since v1.57.0
+	if !l.cfg.Run.UseDefaultSkipDirs {
+		l.log.Warnf("The configuration option `run.skip-dirs-use-default` is deprecated, please use `issues.exclude-dirs-use-default`.")
+	}
+	l.cfg.Issues.UseDefaultExcludeDirs = l.cfg.Run.UseDefaultSkipDirs && l.cfg.Issues.UseDefaultExcludeDirs
+
+	// The 2 options are false by default.
+	// Deprecated since v1.57.0
+	if l.cfg.Run.ShowStats {
+		l.log.Warnf("The configuration option `run.show-stats` is deprecated, please use `output.show-stats`")
+	}
+	l.cfg.Output.ShowStats = l.cfg.Run.ShowStats || l.cfg.Output.ShowStats
+
+	// Deprecated since v1.57.0
+	if l.cfg.Output.Format != "" {
+		l.log.Warnf("The configuration option `output.format` is deprecated, please use `output.formats`")
+
+		var f OutputFormats
+		err := f.UnmarshalText([]byte(l.cfg.Output.Format))
+		if err != nil {
+			return fmt.Errorf("unmarshal output format: %w", err)
+		}
+
+		l.cfg.Output.Formats = f
+	}
+
+	l.handleLinterOptionDeprecations()
+
+	return nil
+}
+
+//nolint:gocyclo // the complexity cannot be reduced.
+func (l *Loader) handleLinterOptionDeprecations() {
+	// Deprecated since v1.57.0,
+	// but it was unofficially deprecated since v1.19 (2019) (https://github.com/golangci/golangci-lint/pull/697).
+	if l.cfg.LintersSettings.Govet.CheckShadowing {
+		l.log.Warnf("The configuration option `linters.govet.check-shadowing` is deprecated. " +
+			"Please enable `shadow` instead, if you are not using `enable-all`.")
+	}
+
+	if l.cfg.LintersSettings.CopyLoopVar.IgnoreAlias {
+		l.log.Warnf("The configuration option `linters.copyloopvar.ignore-alias` is deprecated and ignored," +
+			"please use `linters.copyloopvar.check-alias`.")
+	}
+
+	// Deprecated since v1.42.0.
+	if l.cfg.LintersSettings.Errcheck.Exclude != "" {
+		l.log.Warnf("The configuration option `linters.errcheck.exclude` is deprecated, please use `linters.errcheck.exclude-functions`.")
+	}
+
+	// Deprecated since v1.59.0,
+	// but it was unofficially deprecated since v1.13 (2018) (https://github.com/golangci/golangci-lint/pull/332).
+	if l.cfg.LintersSettings.Errcheck.Ignore != "" {
+		l.log.Warnf("The configuration option `linters.errcheck.ignore` is deprecated, please use `linters.errcheck.exclude-functions`.")
+	}
+
+	// Deprecated since v1.44.0.
+	if l.cfg.LintersSettings.Gci.LocalPrefixes != "" {
+		l.log.Warnf("The configuration option `linters.gci.local-prefixes` is deprecated, please use `prefix()` inside `linters.gci.sections`.")
+	}
+
+	// Deprecated since v1.33.0.
+	if l.cfg.LintersSettings.Godot.CheckAll {
+		l.log.Warnf("The configuration option `linters.godot.check-all` is deprecated, please use `linters.godot.scope: all`.")
+	}
+
+	// Deprecated since v1.44.0.
+	if len(l.cfg.LintersSettings.Gomnd.Settings) > 0 {
+		l.log.Warnf("The configuration option `linters.gomnd.settings` is deprecated. Please use the options " +
+			"`linters.gomnd.checks`,`linters.gomnd.ignored-numbers`,`linters.gomnd.ignored-files`,`linters.gomnd.ignored-functions`.")
+	}
+
+	// Deprecated since v1.47.0
+	if l.cfg.LintersSettings.Gofumpt.LangVersion != "" {
+		l.log.Warnf("The configuration option `linters.gofumpt.lang-version` is deprecated, please use global `run.go`.")
+	}
+
+	// Deprecated since v1.47.0
+	if l.cfg.LintersSettings.Staticcheck.GoVersion != "" {
+		l.log.Warnf("The configuration option `linters.staticcheck.go` is deprecated, please use global `run.go`.")
+	}
+
+	// Deprecated since v1.47.0
+	if l.cfg.LintersSettings.Gosimple.GoVersion != "" {
+		l.log.Warnf("The configuration option `linters.gosimple.go` is deprecated, please use global `run.go`.")
+	}
+
+	// Deprecated since v1.47.0
+	if l.cfg.LintersSettings.Stylecheck.GoVersion != "" {
+		l.log.Warnf("The configuration option `linters.stylecheck.go` is deprecated, please use global `run.go`.")
+	}
+
+	// Deprecated since v1.58.0
+	if l.cfg.LintersSettings.SlogLint.ContextOnly {
+		l.log.Warnf("The configuration option `linters.sloglint.context-only` is deprecated, please use `linters.sloglint.context`.")
+		if l.cfg.LintersSettings.SlogLint.Context == "" {
+			l.cfg.LintersSettings.SlogLint.Context = "all"
+		}
+	}
+
+	// Deprecated since v1.51.0
+	if l.cfg.LintersSettings.UseStdlibVars.OSDevNull {
+		l.log.Warnf("The configuration option `linters.usestdlibvars.os-dev-null` is deprecated.")
+	}
+
+	// Deprecated since v1.51.0
+	if l.cfg.LintersSettings.UseStdlibVars.SyslogPriority {
+		l.log.Warnf("The configuration option `linters.usestdlibvars.syslog-priority` is deprecated.")
+	}
+}
+
+func (l *Loader) handleEnableOnlyOption() error {
+	lookup := l.fs.Lookup("enable-only")
+	if lookup == nil {
+		return nil
+	}
+
+	only, err := l.fs.GetStringSlice("enable-only")
+	if err != nil {
+		return err
+	}
+
+	if len(only) > 0 {
+		l.cfg.Linters = Linters{
+			Enable:     only,
+			DisableAll: true,
+		}
+	}
+
+	return nil
+}
+
+func customDecoderHook() viper.DecoderConfigOption {
+	return viper.DecodeHook(mapstructure.ComposeDecodeHookFunc(
+		// Default hooks (https://github.com/spf13/viper/blob/518241257478c557633ab36e474dfcaeb9a3c623/viper.go#L135-L138).
+		mapstructure.StringToTimeDurationHookFunc(),
+		mapstructure.StringToSliceHookFunc(","),
+
+		// Needed for forbidigo, and output.formats.
+		mapstructure.TextUnmarshallerHookFunc(),
+	))
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/output.go b/vendor/github.com/golangci/golangci-lint/pkg/config/output.go
index e8726392055d786cf6313cc5b63f4b54c981dcb0..a005213cfdce019bcfabe9aeb7a8949d08583b60 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/config/output.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/config/output.go
@@ -1,5 +1,12 @@
 package config
 
+import (
+	"errors"
+	"fmt"
+	"slices"
+	"strings"
+)
+
 const (
 	OutFormatJSON              = "json"
 	OutFormatLineNumber        = "line-number"
@@ -14,11 +21,12 @@ const (
 	OutFormatTeamCity          = "teamcity"
 )
 
-var OutFormats = []string{
-	OutFormatColoredLineNumber,
-	OutFormatLineNumber,
+var AllOutputFormats = []string{
 	OutFormatJSON,
+	OutFormatLineNumber,
+	OutFormatColoredLineNumber,
 	OutFormatTab,
+	OutFormatColoredTab,
 	OutFormatCheckstyle,
 	OutFormatCodeClimate,
 	OutFormatHTML,
@@ -28,14 +36,78 @@ var OutFormats = []string{
 }
 
 type Output struct {
-	Format              string
-	PrintIssuedLine     bool   `mapstructure:"print-issued-lines"`
-	PrintLinterName     bool   `mapstructure:"print-linter-name"`
-	UniqByLine          bool   `mapstructure:"uniq-by-line"`
-	SortResults         bool   `mapstructure:"sort-results"`
-	PrintWelcomeMessage bool   `mapstructure:"print-welcome"`
-	PathPrefix          string `mapstructure:"path-prefix"`
-
-	// only work with CLI flags because the setup of logs is done before the config file parsing.
-	Color string
+	Formats         OutputFormats `mapstructure:"formats"`
+	PrintIssuedLine bool          `mapstructure:"print-issued-lines"`
+	PrintLinterName bool          `mapstructure:"print-linter-name"`
+	UniqByLine      bool          `mapstructure:"uniq-by-line"`
+	SortResults     bool          `mapstructure:"sort-results"`
+	SortOrder       []string      `mapstructure:"sort-order"`
+	PathPrefix      string        `mapstructure:"path-prefix"`
+	ShowStats       bool          `mapstructure:"show-stats"`
+
+	// Deprecated: use Formats instead.
+	Format string `mapstructure:"format"`
+}
+
+func (o *Output) Validate() error {
+	if !o.SortResults && len(o.SortOrder) > 0 {
+		return errors.New("sort-results should be 'true' to use sort-order")
+	}
+
+	validOrders := []string{"linter", "file", "severity"}
+
+	all := strings.Join(o.SortOrder, " ")
+
+	for _, order := range o.SortOrder {
+		if strings.Count(all, order) > 1 {
+			return fmt.Errorf("the sort-order name %q is repeated several times", order)
+		}
+
+		if !slices.Contains(validOrders, order) {
+			return fmt.Errorf("unsupported sort-order name %q", order)
+		}
+	}
+
+	for _, format := range o.Formats {
+		err := format.Validate()
+		if err != nil {
+			return err
+		}
+	}
+
+	return nil
+}
+
+type OutputFormat struct {
+	Format string `mapstructure:"format"`
+	Path   string `mapstructure:"path"`
+}
+
+func (o *OutputFormat) Validate() error {
+	if o.Format == "" {
+		return errors.New("the format is required")
+	}
+
+	if !slices.Contains(AllOutputFormats, o.Format) {
+		return fmt.Errorf("unsupported output format %q", o.Format)
+	}
+
+	return nil
+}
+
+type OutputFormats []OutputFormat
+
+func (p *OutputFormats) UnmarshalText(text []byte) error {
+	formats := strings.Split(string(text), ",")
+
+	for _, item := range formats {
+		format, path, _ := strings.Cut(item, ":")
+
+		*p = append(*p, OutputFormat{
+			Path:   path,
+			Format: format,
+		})
+	}
+
+	return nil
 }
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/reader.go b/vendor/github.com/golangci/golangci-lint/pkg/config/reader.go
deleted file mode 100644
index de203876e9699206c3eda2dd8085e36640925aa4..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/config/reader.go
+++ /dev/null
@@ -1,248 +0,0 @@
-package config
-
-import (
-	"errors"
-	"fmt"
-	"os"
-	"path/filepath"
-	"strings"
-
-	"github.com/mitchellh/go-homedir"
-	"github.com/mitchellh/mapstructure"
-	"github.com/spf13/viper"
-	"golang.org/x/exp/slices"
-
-	"github.com/golangci/golangci-lint/pkg/exitcodes"
-	"github.com/golangci/golangci-lint/pkg/fsutils"
-	"github.com/golangci/golangci-lint/pkg/logutils"
-)
-
-type FileReader struct {
-	log            logutils.Log
-	cfg            *Config
-	commandLineCfg *Config
-}
-
-func NewFileReader(toCfg, commandLineCfg *Config, log logutils.Log) *FileReader {
-	return &FileReader{
-		log:            log,
-		cfg:            toCfg,
-		commandLineCfg: commandLineCfg,
-	}
-}
-
-func (r *FileReader) Read() error {
-	// XXX: hack with double parsing for 2 purposes:
-	// 1. to access "config" option here.
-	// 2. to give config less priority than command line.
-
-	configFile, err := r.parseConfigOption()
-	if err != nil {
-		if err == errConfigDisabled {
-			return nil
-		}
-
-		return fmt.Errorf("can't parse --config option: %s", err)
-	}
-
-	if configFile != "" {
-		viper.SetConfigFile(configFile)
-
-		// Assume YAML if the file has no extension.
-		if filepath.Ext(configFile) == "" {
-			viper.SetConfigType("yaml")
-		}
-	} else {
-		r.setupConfigFileSearch()
-	}
-
-	return r.parseConfig()
-}
-
-func (r *FileReader) parseConfig() error {
-	if err := viper.ReadInConfig(); err != nil {
-		if _, ok := err.(viper.ConfigFileNotFoundError); ok {
-			return nil
-		}
-
-		return fmt.Errorf("can't read viper config: %s", err)
-	}
-
-	usedConfigFile := viper.ConfigFileUsed()
-	if usedConfigFile == "" {
-		return nil
-	}
-
-	if usedConfigFile == os.Stdin.Name() {
-		usedConfigFile = ""
-		r.log.Infof("Reading config file stdin")
-	} else {
-		var err error
-		usedConfigFile, err = fsutils.ShortestRelPath(usedConfigFile, "")
-		if err != nil {
-			r.log.Warnf("Can't pretty print config file path: %v", err)
-		}
-
-		r.log.Infof("Used config file %s", usedConfigFile)
-	}
-
-	usedConfigDir, err := filepath.Abs(filepath.Dir(usedConfigFile))
-	if err != nil {
-		return errors.New("can't get config directory")
-	}
-	r.cfg.cfgDir = usedConfigDir
-
-	if err := viper.Unmarshal(r.cfg, viper.DecodeHook(mapstructure.ComposeDecodeHookFunc(
-		// Default hooks (https://github.com/spf13/viper/blob/518241257478c557633ab36e474dfcaeb9a3c623/viper.go#L135-L138).
-		mapstructure.StringToTimeDurationHookFunc(),
-		mapstructure.StringToSliceHookFunc(","),
-
-		// Needed for forbidigo.
-		mapstructure.TextUnmarshallerHookFunc(),
-	))); err != nil {
-		return fmt.Errorf("can't unmarshal config by viper: %s", err)
-	}
-
-	if err := r.validateConfig(); err != nil {
-		return fmt.Errorf("can't validate config: %s", err)
-	}
-
-	if r.cfg.InternalTest { // just for testing purposes: to detect config file usage
-		fmt.Fprintln(logutils.StdOut, "test")
-		os.Exit(exitcodes.Success)
-	}
-
-	return nil
-}
-
-func (r *FileReader) validateConfig() error {
-	c := r.cfg
-	if len(c.Run.Args) != 0 {
-		return errors.New("option run.args in config isn't supported now")
-	}
-
-	if c.Run.CPUProfilePath != "" {
-		return errors.New("option run.cpuprofilepath in config isn't allowed")
-	}
-
-	if c.Run.MemProfilePath != "" {
-		return errors.New("option run.memprofilepath in config isn't allowed")
-	}
-
-	if c.Run.TracePath != "" {
-		return errors.New("option run.tracepath in config isn't allowed")
-	}
-
-	if c.Run.IsVerbose {
-		return errors.New("can't set run.verbose option with config: only on command-line")
-	}
-	for i, rule := range c.Issues.ExcludeRules {
-		if err := rule.Validate(); err != nil {
-			return fmt.Errorf("error in exclude rule #%d: %v", i, err)
-		}
-	}
-	if len(c.Severity.Rules) > 0 && c.Severity.Default == "" {
-		return errors.New("can't set severity rule option: no default severity defined")
-	}
-	for i, rule := range c.Severity.Rules {
-		if err := rule.Validate(); err != nil {
-			return fmt.Errorf("error in severity rule #%d: %v", i, err)
-		}
-	}
-	if err := c.LintersSettings.Govet.Validate(); err != nil {
-		return fmt.Errorf("error in govet config: %v", err)
-	}
-	return nil
-}
-
-func getFirstPathArg() string {
-	args := os.Args
-
-	// skip all args ([golangci-lint, run/linters]) before files/dirs list
-	for len(args) != 0 {
-		if args[0] == "run" {
-			args = args[1:]
-			break
-		}
-
-		args = args[1:]
-	}
-
-	// find first file/dir arg
-	firstArg := "./..."
-	for _, arg := range args {
-		if !strings.HasPrefix(arg, "-") {
-			firstArg = arg
-			break
-		}
-	}
-
-	return firstArg
-}
-
-func (r *FileReader) setupConfigFileSearch() {
-	firstArg := getFirstPathArg()
-	absStartPath, err := filepath.Abs(firstArg)
-	if err != nil {
-		r.log.Warnf("Can't make abs path for %q: %s", firstArg, err)
-		absStartPath = filepath.Clean(firstArg)
-	}
-
-	// start from it
-	var curDir string
-	if fsutils.IsDir(absStartPath) {
-		curDir = absStartPath
-	} else {
-		curDir = filepath.Dir(absStartPath)
-	}
-
-	// find all dirs from it up to the root
-	configSearchPaths := []string{"./"}
-
-	for {
-		configSearchPaths = append(configSearchPaths, curDir)
-		newCurDir := filepath.Dir(curDir)
-		if curDir == newCurDir || newCurDir == "" {
-			break
-		}
-		curDir = newCurDir
-	}
-
-	// find home directory for global config
-	if home, err := homedir.Dir(); err != nil {
-		r.log.Warnf("Can't get user's home directory: %s", err.Error())
-	} else if !slices.Contains(configSearchPaths, home) {
-		configSearchPaths = append(configSearchPaths, home)
-	}
-
-	r.log.Infof("Config search paths: %s", configSearchPaths)
-	viper.SetConfigName(".golangci")
-	for _, p := range configSearchPaths {
-		viper.AddConfigPath(p)
-	}
-}
-
-var errConfigDisabled = errors.New("config is disabled by --no-config")
-
-func (r *FileReader) parseConfigOption() (string, error) {
-	cfg := r.commandLineCfg
-	if cfg == nil {
-		return "", nil
-	}
-
-	configFile := cfg.Run.Config
-	if cfg.Run.NoConfig && configFile != "" {
-		return "", errors.New("can't combine option --config and --no-config")
-	}
-
-	if cfg.Run.NoConfig {
-		return "", errConfigDisabled
-	}
-
-	configFile, err := homedir.Expand(configFile)
-	if err != nil {
-		return "", errors.New("failed to expand configuration path")
-	}
-
-	return configFile, nil
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/run.go b/vendor/github.com/golangci/golangci-lint/pkg/config/run.go
index ff812d0a2583d7fe10224baf9f4aa4750828af7a..2f6523c0b9644ffdfe6544873b448b4a657e5e52 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/config/run.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/config/run.go
@@ -1,21 +1,17 @@
 package config
 
-import "time"
+import (
+	"fmt"
+	"slices"
+	"strings"
+	"time"
+)
 
 // Run encapsulates the config options for running the linter analysis.
 type Run struct {
-	IsVerbose           bool `mapstructure:"verbose"`
-	Silent              bool
-	CPUProfilePath      string
-	MemProfilePath      string
-	TracePath           string
-	Concurrency         int
-	PrintResourcesUsage bool `mapstructure:"print-resources-usage"`
+	Timeout time.Duration `mapstructure:"timeout"`
 
-	Config   string // The path to the golangci config file, as specified with the --config argument.
-	NoConfig bool
-
-	Args []string
+	Concurrency int `mapstructure:"concurrency"`
 
 	Go string `mapstructure:"go"`
 
@@ -25,16 +21,27 @@ type Run struct {
 	ExitCodeIfIssuesFound int  `mapstructure:"issues-exit-code"`
 	AnalyzeTests          bool `mapstructure:"tests"`
 
-	// Deprecated: Deadline exists for historical compatibility
-	// and should not be used. To set run timeout use Timeout instead.
-	Deadline time.Duration
-	Timeout  time.Duration
-
-	PrintVersion       bool
-	SkipFiles          []string `mapstructure:"skip-files"`
-	SkipDirs           []string `mapstructure:"skip-dirs"`
-	UseDefaultSkipDirs bool     `mapstructure:"skip-dirs-use-default"`
-
 	AllowParallelRunners bool `mapstructure:"allow-parallel-runners"`
 	AllowSerialRunners   bool `mapstructure:"allow-serial-runners"`
+
+	// Deprecated: use Issues.ExcludeFiles instead.
+	SkipFiles []string `mapstructure:"skip-files"`
+	// Deprecated: use Issues.ExcludeDirs instead.
+	SkipDirs []string `mapstructure:"skip-dirs"`
+	// Deprecated: use Issues.UseDefaultExcludeDirs instead.
+	UseDefaultSkipDirs bool `mapstructure:"skip-dirs-use-default"`
+
+	// Deprecated: use Output.ShowStats instead.
+	ShowStats bool `mapstructure:"show-stats"`
+}
+
+func (r *Run) Validate() error {
+	// go help modules
+	allowedMods := []string{"mod", "readonly", "vendor"}
+
+	if r.ModulesDownloadMode != "" && !slices.Contains(allowedMods, r.ModulesDownloadMode) {
+		return fmt.Errorf("invalid modules download path %s, only (%s) allowed", r.ModulesDownloadMode, strings.Join(allowedMods, "|"))
+	}
+
+	return nil
 }
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/severity.go b/vendor/github.com/golangci/golangci-lint/pkg/config/severity.go
index 3068a0ed69ce0182939b06bdddca9ae0b6238a5d..a6d2c9ec3fb39009e281178421e49d6303366cfe 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/config/severity.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/config/severity.go
@@ -1,5 +1,10 @@
 package config
 
+import (
+	"errors"
+	"fmt"
+)
+
 const severityRuleMinConditionsCount = 1
 
 type Severity struct {
@@ -8,11 +13,29 @@ type Severity struct {
 	Rules         []SeverityRule `mapstructure:"rules"`
 }
 
+func (s *Severity) Validate() error {
+	if len(s.Rules) > 0 && s.Default == "" {
+		return errors.New("can't set severity rule option: no default severity defined")
+	}
+
+	for i, rule := range s.Rules {
+		if err := rule.Validate(); err != nil {
+			return fmt.Errorf("error in severity rule #%d: %w", i, err)
+		}
+	}
+
+	return nil
+}
+
 type SeverityRule struct {
 	BaseRule `mapstructure:",squash"`
 	Severity string
 }
 
 func (s *SeverityRule) Validate() error {
+	if s.Severity == "" {
+		return errors.New("severity should be set")
+	}
+
 	return s.BaseRule.Validate(severityRuleMinConditionsCount)
 }
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/fsutils/fsutils.go b/vendor/github.com/golangci/golangci-lint/pkg/fsutils/fsutils.go
index a39c105e432a00fd5fe3e5769d3835727603bfbc..80bb9c5b44f4ed966f88812839f5f3d21aeb2d77 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/fsutils/fsutils.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/fsutils/fsutils.go
@@ -12,10 +12,12 @@ func IsDir(filename string) bool {
 	return err == nil && fi.IsDir()
 }
 
-var cachedWd string
-var cachedWdError error
-var getWdOnce sync.Once
-var useCache = true
+var (
+	cachedWd      string
+	cachedWdError error
+	getWdOnce     sync.Once
+	useCache      = true
+)
 
 func UseWdCache(use bool) {
 	useCache = use
@@ -34,7 +36,7 @@ func Getwd() (string, error) {
 
 		evaledWd, err := EvalSymlinks(cachedWd)
 		if err != nil {
-			cachedWd, cachedWdError = "", fmt.Errorf("can't eval symlinks on wd %s: %s", cachedWd, err)
+			cachedWd, cachedWdError = "", fmt.Errorf("can't eval symlinks on wd %s: %w", cachedWd, err)
 			return
 		}
 
@@ -70,13 +72,13 @@ func ShortestRelPath(path, wd string) (string, error) {
 		var err error
 		wd, err = Getwd()
 		if err != nil {
-			return "", fmt.Errorf("can't get working directory: %s", err)
+			return "", fmt.Errorf("can't get working directory: %w", err)
 		}
 	}
 
 	evaledPath, err := EvalSymlinks(path)
 	if err != nil {
-		return "", fmt.Errorf("can't eval symlinks for path %s: %s", path, err)
+		return "", fmt.Errorf("can't eval symlinks for path %s: %w", path, err)
 	}
 	path = evaledPath
 
@@ -92,7 +94,7 @@ func ShortestRelPath(path, wd string) (string, error) {
 
 	relPath, err := filepath.Rel(wd, absPath)
 	if err != nil {
-		return "", fmt.Errorf("can't get relative path for path %s and root %s: %s",
+		return "", fmt.Errorf("can't get relative path for path %s and root %s: %w",
 			absPath, wd, err)
 	}
 
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/issue.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/issue.go
similarity index 81%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/issue.go
rename to vendor/github.com/golangci/golangci-lint/pkg/goanalysis/issue.go
index f331a3ab9f1af82cabb42f1be4934c75f6a4f7c7..15d8dd2b333fdbb45438ced9e34c44679a13ee42 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/issue.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/issue.go
@@ -13,9 +13,9 @@ type Issue struct {
 	Pass *analysis.Pass
 }
 
-func NewIssue(i *result.Issue, pass *analysis.Pass) Issue {
+func NewIssue(issue *result.Issue, pass *analysis.Pass) Issue {
 	return Issue{
-		Issue: *i,
+		Issue: *issue,
 		Pass:  pass,
 	}
 }
@@ -23,6 +23,7 @@ func NewIssue(i *result.Issue, pass *analysis.Pass) Issue {
 type EncodingIssue struct {
 	FromLinter           string
 	Text                 string
+	Severity             string
 	Pos                  token.Position
 	LineRange            *result.Range
 	Replacement          *result.Replacement
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/linter.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/linter.go
similarity index 98%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/linter.go
rename to vendor/github.com/golangci/golangci-lint/pkg/goanalysis/linter.go
index f8ca2e7553c26e005f3b9d7939c7e55dcf754096..33f2254b02b1f0770b00d3fe8ca2d4645f3673da 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/linter.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/linter.go
@@ -102,7 +102,7 @@ func (lnt *Linter) allAnalyzerNames() []string {
 	return ret
 }
 
-func (lnt *Linter) configureAnalyzer(a *analysis.Analyzer, cfg map[string]any) error {
+func (*Linter) configureAnalyzer(a *analysis.Analyzer, cfg map[string]any) error {
 	for k, v := range cfg {
 		f := a.Flags.Lookup(k)
 		if f == nil {
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/load/guard.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/load/guard.go
similarity index 100%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/load/guard.go
rename to vendor/github.com/golangci/golangci-lint/pkg/goanalysis/load/guard.go
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/metalinter.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/metalinter.go
similarity index 92%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/metalinter.go
rename to vendor/github.com/golangci/golangci-lint/pkg/goanalysis/metalinter.go
index 333ab20f1f272b5cab348cbb90f554c1022fd6be..c2a7949974ce076908133cc589960d81ed9bced4 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/metalinter.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/metalinter.go
@@ -31,11 +31,11 @@ func (ml MetaLinter) Run(_ context.Context, lintCtx *linter.Context) ([]result.I
 	return runAnalyzers(ml, lintCtx)
 }
 
-func (ml MetaLinter) Name() string {
+func (MetaLinter) Name() string {
 	return "goanalysis_metalinter"
 }
 
-func (ml MetaLinter) Desc() string {
+func (MetaLinter) Desc() string {
 	return ""
 }
 
@@ -57,11 +57,11 @@ func (ml MetaLinter) getAnalyzers() []*analysis.Analyzer {
 	return allAnalyzers
 }
 
-func (ml MetaLinter) getName() string {
+func (MetaLinter) getName() string {
 	return "metalinter"
 }
 
-func (ml MetaLinter) useOriginalPackages() bool {
+func (MetaLinter) useOriginalPackages() bool {
 	return false // `unused` can't be run by this metalinter
 }
 
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/errors.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/pkgerrors/errors.go
similarity index 60%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/errors.go
rename to vendor/github.com/golangci/golangci-lint/pkg/goanalysis/pkgerrors/errors.go
index f59e02cc6408f089fc5cee24c60c0fa2ab1c68db..91f6dd39d75555788909d529f1c8354abdd51819 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/errors.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/pkgerrors/errors.go
@@ -1,4 +1,4 @@
-package goanalysis
+package pkgerrors
 
 import (
 	"errors"
@@ -7,7 +7,6 @@ import (
 	"golang.org/x/tools/go/packages"
 
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
-	libpackages "github.com/golangci/golangci-lint/pkg/packages"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
@@ -19,7 +18,7 @@ func (e *IllTypedError) Error() string {
 	return fmt.Sprintf("errors in package: %v", e.Pkg.Errors)
 }
 
-func buildIssuesFromIllTypedError(errs []error, lintCtx *linter.Context) ([]result.Issue, error) {
+func BuildIssuesFromIllTypedError(errs []error, lintCtx *linter.Context) ([]result.Issue, error) {
 	var issues []result.Issue
 	uniqReportedIssues := map[string]bool{}
 
@@ -36,8 +35,8 @@ func buildIssuesFromIllTypedError(errs []error, lintCtx *linter.Context) ([]resu
 			continue
 		}
 
-		for _, err := range libpackages.ExtractErrors(ill.Pkg) {
-			i, perr := parseError(err)
+		for _, err := range extractErrors(ill.Pkg) {
+			issue, perr := parseError(err)
 			if perr != nil { // failed to parse
 				if uniqReportedIssues[err.Msg] {
 					continue
@@ -45,8 +44,8 @@ func buildIssuesFromIllTypedError(errs []error, lintCtx *linter.Context) ([]resu
 				uniqReportedIssues[err.Msg] = true
 				lintCtx.Log.Errorf("typechecking error: %s", err.Msg)
 			} else {
-				i.Pkg = ill.Pkg // to save to cache later
-				issues = append(issues, *i)
+				issue.Pkg = ill.Pkg // to save to cache later
+				issues = append(issues, *issue)
 			}
 		}
 	}
@@ -57,16 +56,3 @@ func buildIssuesFromIllTypedError(errs []error, lintCtx *linter.Context) ([]resu
 
 	return issues, nil
 }
-
-func parseError(srcErr packages.Error) (*result.Issue, error) {
-	pos, err := libpackages.ParseErrorPosition(srcErr.Pos)
-	if err != nil {
-		return nil, err
-	}
-
-	return &result.Issue{
-		Pos:        *pos,
-		Text:       srcErr.Msg,
-		FromLinter: "typecheck",
-	}, nil
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/packages/util.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/pkgerrors/extract.go
similarity index 93%
rename from vendor/github.com/golangci/golangci-lint/pkg/packages/util.go
rename to vendor/github.com/golangci/golangci-lint/pkg/goanalysis/pkgerrors/extract.go
index 6a7789ebb730b9036aa2e18e5de856df872ec0f2..d1257e6638d682e0e0b39f630e54c66f262c94c1 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/packages/util.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/pkgerrors/extract.go
@@ -1,4 +1,4 @@
-package packages
+package pkgerrors
 
 import (
 	"fmt"
@@ -12,7 +12,7 @@ import (
 // ex: `/example/main.go:11:17: foobar`
 var reFile = regexp.MustCompile(`^.+\.go:\d+:\d+: .+`)
 
-func ExtractErrors(pkg *packages.Package) []packages.Error {
+func extractErrors(pkg *packages.Package) []packages.Error {
 	errors := extractErrorsImpl(pkg, map[*packages.Package]bool{})
 	if len(errors) == 0 {
 		return errors
@@ -38,7 +38,7 @@ func ExtractErrors(pkg *packages.Package) []packages.Error {
 	if len(pkg.GoFiles) != 0 {
 		// errors were extracted from deps and have at least one file in package
 		for i := range uniqErrors {
-			if _, parseErr := ParseErrorPosition(uniqErrors[i].Pos); parseErr == nil {
+			if _, parseErr := parseErrorPosition(uniqErrors[i].Pos); parseErr == nil {
 				continue
 			}
 
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/packages/errors.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/pkgerrors/parse.go
similarity index 53%
rename from vendor/github.com/golangci/golangci-lint/pkg/packages/errors.go
rename to vendor/github.com/golangci/golangci-lint/pkg/goanalysis/pkgerrors/parse.go
index 48983671294b1144888992e5dd335ac68cc75333..b25b50f713eda7a9dd9d447fb744734733ad23e3 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/packages/errors.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/pkgerrors/parse.go
@@ -1,4 +1,4 @@
-package packages
+package pkgerrors
 
 import (
 	"errors"
@@ -6,9 +6,26 @@ import (
 	"go/token"
 	"strconv"
 	"strings"
+
+	"golang.org/x/tools/go/packages"
+
+	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-func ParseErrorPosition(pos string) (*token.Position, error) {
+func parseError(srcErr packages.Error) (*result.Issue, error) {
+	pos, err := parseErrorPosition(srcErr.Pos)
+	if err != nil {
+		return nil, err
+	}
+
+	return &result.Issue{
+		Pos:        *pos,
+		Text:       srcErr.Msg,
+		FromLinter: "typecheck",
+	}, nil
+}
+
+func parseErrorPosition(pos string) (*token.Position, error) {
 	// file:line(<optional>:colon)
 	parts := strings.Split(pos, ":")
 	if len(parts) == 1 {
@@ -18,7 +35,7 @@ func ParseErrorPosition(pos string) (*token.Position, error) {
 	file := parts[0]
 	line, err := strconv.Atoi(parts[1])
 	if err != nil {
-		return nil, fmt.Errorf("can't parse line number %q: %s", parts[1], err)
+		return nil, fmt.Errorf("can't parse line number %q: %w", parts[1], err)
 	}
 
 	var column int
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner.go
similarity index 92%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner.go
rename to vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner.go
index 46871bc5b2879f985fa8fda72ed0eeb7b6edda2e..c1274ec09a7c2d3e069c0ff8729d4278fec426bc 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner.go
@@ -17,12 +17,13 @@ import (
 	"sort"
 	"sync"
 
+	"golang.org/x/exp/maps"
 	"golang.org/x/tools/go/analysis"
 	"golang.org/x/tools/go/packages"
 
 	"github.com/golangci/golangci-lint/internal/errorutil"
 	"github.com/golangci/golangci-lint/internal/pkgcache"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis/load"
+	"github.com/golangci/golangci-lint/pkg/goanalysis/load"
 	"github.com/golangci/golangci-lint/pkg/logutils"
 	"github.com/golangci/golangci-lint/pkg/timeutils"
 )
@@ -60,7 +61,8 @@ type runner struct {
 }
 
 func newRunner(prefix string, logger logutils.Log, pkgCache *pkgcache.Cache, loadGuard *load.Guard,
-	loadMode LoadMode, sw *timeutils.Stopwatch) *runner {
+	loadMode LoadMode, sw *timeutils.Stopwatch,
+) *runner {
 	return &runner{
 		prefix:    prefix,
 		log:       logger,
@@ -79,7 +81,8 @@ func newRunner(prefix string, logger logutils.Log, pkgCache *pkgcache.Cache, loa
 // singlechecker and the multi-analysis commands.
 // It returns the appropriate exit code.
 func (r *runner) run(analyzers []*analysis.Analyzer, initialPackages []*packages.Package) ([]Diagnostic,
-	[]error, map[*analysis.Pass]*packages.Package) {
+	[]error, map[*analysis.Pass]*packages.Package,
+) {
 	debugf("Analyzing %d packages on load mode %s", len(initialPackages), r.loadMode)
 	defer r.pkgCache.Trim()
 
@@ -115,7 +118,8 @@ func (r *runner) markAllActions(a *analysis.Analyzer, pkg *packages.Package, mar
 }
 
 func (r *runner) makeAction(a *analysis.Analyzer, pkg *packages.Package,
-	initialPkgs map[*packages.Package]bool, actions map[actKey]*action, actAlloc *actionAllocator) *action {
+	initialPkgs map[*packages.Package]bool, actions map[actKey]*action, actAlloc *actionAllocator,
+) *action {
 	k := actKey{a, pkg}
 	act, ok := actions[k]
 	if ok {
@@ -149,7 +153,8 @@ func (r *runner) makeAction(a *analysis.Analyzer, pkg *packages.Package,
 }
 
 func (r *runner) buildActionFactDeps(act *action, a *analysis.Analyzer, pkg *packages.Package,
-	initialPkgs map[*packages.Package]bool, actions map[actKey]*action, actAlloc *actionAllocator) {
+	initialPkgs map[*packages.Package]bool, actions map[actKey]*action, actAlloc *actionAllocator,
+) {
 	// An analysis that consumes/produces facts
 	// must run on the package's dependencies too.
 	if len(a.FactTypes) == 0 {
@@ -159,10 +164,7 @@ func (r *runner) buildActionFactDeps(act *action, a *analysis.Analyzer, pkg *pac
 	act.objectFacts = make(map[objectFactKey]analysis.Fact)
 	act.packageFacts = make(map[packageFactKey]analysis.Fact)
 
-	paths := make([]string, 0, len(pkg.Imports))
-	for path := range pkg.Imports {
-		paths = append(paths, path)
-	}
+	paths := maps.Keys(pkg.Imports)
 	sort.Strings(paths) // for determinism
 	for _, path := range paths {
 		dep := r.makeAction(a, pkg.Imports[path], initialPkgs, actions, actAlloc)
@@ -175,9 +177,9 @@ func (r *runner) buildActionFactDeps(act *action, a *analysis.Analyzer, pkg *pac
 	}
 }
 
-//nolint:gocritic
 func (r *runner) prepareAnalysis(pkgs []*packages.Package,
-	analyzers []*analysis.Analyzer) (map[*packages.Package]bool, []*action, []*action) {
+	analyzers []*analysis.Analyzer,
+) (initialPkgs map[*packages.Package]bool, allActions, roots []*action) {
 	// Construct the action graph.
 
 	// Each graph node (action) is one unit of analysis.
@@ -197,13 +199,13 @@ func (r *runner) prepareAnalysis(pkgs []*packages.Package,
 	actions := make(map[actKey]*action, totalActionsCount)
 	actAlloc := newActionAllocator(totalActionsCount)
 
-	initialPkgs := make(map[*packages.Package]bool, len(pkgs))
+	initialPkgs = make(map[*packages.Package]bool, len(pkgs))
 	for _, pkg := range pkgs {
 		initialPkgs[pkg] = true
 	}
 
 	// Build nodes for initial packages.
-	roots := make([]*action, 0, len(pkgs)*len(analyzers))
+	roots = make([]*action, 0, len(pkgs)*len(analyzers))
 	for _, a := range analyzers {
 		for _, pkg := range pkgs {
 			root := r.makeAction(a, pkg, initialPkgs, actions, actAlloc)
@@ -212,10 +214,7 @@ func (r *runner) prepareAnalysis(pkgs []*packages.Package,
 		}
 	}
 
-	allActions := make([]*action, 0, len(actions))
-	for _, act := range actions {
-		allActions = append(allActions, act)
-	}
+	allActions = maps.Values(actions)
 
 	debugf("Built %d actions", len(actions))
 
@@ -281,7 +280,6 @@ func (r *runner) analyze(pkgs []*packages.Package, analyzers []*analysis.Analyze
 	return rootActions
 }
 
-//nolint:nakedret
 func extractDiagnostics(roots []*action) (retDiags []Diagnostic, retErrors []error) {
 	extracted := make(map[*action]bool)
 	var extract func(*action)
@@ -338,5 +336,5 @@ func extractDiagnostics(roots []*action) (retDiags []Diagnostic, retErrors []err
 		}
 	}
 	visitAll(roots)
-	return
+	return retDiags, retErrors
 }
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_action.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_action.go
similarity index 97%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_action.go
rename to vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_action.go
index 5ded9fac9d372048ac7f051229535d575e9c85ad..58ea297ea92b68f8d49673467c02d37aa53941c4 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_action.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_action.go
@@ -9,13 +9,13 @@ import (
 	"runtime/debug"
 	"time"
 
-	"github.com/hashicorp/go-multierror"
 	"golang.org/x/tools/go/analysis"
 	"golang.org/x/tools/go/packages"
 	"golang.org/x/tools/go/types/objectpath"
 
 	"github.com/golangci/golangci-lint/internal/errorutil"
 	"github.com/golangci/golangci-lint/internal/pkgcache"
+	"github.com/golangci/golangci-lint/pkg/goanalysis/pkgerrors"
 )
 
 type actionAllocator struct {
@@ -126,20 +126,16 @@ func (act *action) analyze() {
 	}(time.Now())
 
 	// Report an error if any dependency failures.
-	var depErrors *multierror.Error
+	var depErrors error
 	for _, dep := range act.deps {
 		if dep.err == nil {
 			continue
 		}
 
-		depErrors = multierror.Append(depErrors, errors.Unwrap(dep.err))
+		depErrors = errors.Join(depErrors, errors.Unwrap(dep.err))
 	}
 	if depErrors != nil {
-		depErrors.ErrorFormat = func(e []error) string {
-			return fmt.Sprintf("failed prerequisites: %v", e)
-		}
-
-		act.err = depErrors
+		act.err = fmt.Errorf("failed prerequisites: %w", depErrors)
 		return
 	}
 
@@ -189,7 +185,7 @@ func (act *action) analyze() {
 		// It looks like there should be !pass.Analyzer.RunDespiteErrors
 		// but govet's cgocall crashes on it. Govet itself contains !pass.Analyzer.RunDespiteErrors condition here,
 		// but it exits before it if packages.Load have failed.
-		act.err = fmt.Errorf("analysis skipped: %w", &IllTypedError{Pkg: act.pkg})
+		act.err = fmt.Errorf("analysis skipped: %w", &pkgerrors.IllTypedError{Pkg: act.pkg})
 	} else {
 		startedAt = time.Now()
 		act.result, act.err = pass.Analyzer.Run(pass)
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_facts.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_facts.go
similarity index 100%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_facts.go
rename to vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_facts.go
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_loadingpackage.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_loadingpackage.go
similarity index 99%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_loadingpackage.go
rename to vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_loadingpackage.go
index e39e2212c392bc91bcd9093fba9939cf9cb0a01d..c54357eb6794c228237416bf43129c14703d20b0 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_loadingpackage.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_loadingpackage.go
@@ -15,7 +15,7 @@ import (
 	"golang.org/x/tools/go/gcexportdata"
 	"golang.org/x/tools/go/packages"
 
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis/load"
+	"github.com/golangci/golangci-lint/pkg/goanalysis/load"
 	"github.com/golangci/golangci-lint/pkg/logutils"
 )
 
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runners.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runners.go
similarity index 88%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runners.go
rename to vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runners.go
index 559fb1392b96c62c46a50397276bdadad6ac21ec..c02d33b79757707ef1d94adeffd2227564b32d5e 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runners.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runners.go
@@ -13,6 +13,7 @@ import (
 	"golang.org/x/tools/go/packages"
 
 	"github.com/golangci/golangci-lint/internal/pkgcache"
+	"github.com/golangci/golangci-lint/pkg/goanalysis/pkgerrors"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 	"github.com/golangci/golangci-lint/pkg/logutils"
 	"github.com/golangci/golangci-lint/pkg/result"
@@ -74,7 +75,7 @@ func runAnalyzers(cfg runAnalyzersConfig, lintCtx *linter.Context) ([]result.Iss
 		return retIssues
 	}
 
-	errIssues, err := buildIssuesFromIllTypedError(errs, lintCtx)
+	errIssues, err := pkgerrors.BuildIssuesFromIllTypedError(errs, lintCtx)
 	if err != nil {
 		return nil, err
 	}
@@ -124,7 +125,8 @@ func getIssuesCacheKey(analyzers []*analysis.Analyzer) string {
 }
 
 func saveIssuesToCache(allPkgs []*packages.Package, pkgsFromCache map[*packages.Package]bool,
-	issues []result.Issue, lintCtx *linter.Context, analyzers []*analysis.Analyzer) {
+	issues []result.Issue, lintCtx *linter.Context, analyzers []*analysis.Analyzer,
+) {
 	startedAt := time.Now()
 	perPkgIssues := map[*packages.Package][]result.Issue{}
 	for ind := range issues {
@@ -151,6 +153,7 @@ func saveIssuesToCache(allPkgs []*packages.Package, pkgsFromCache map[*packages.
 					encodedIssues = append(encodedIssues, EncodingIssue{
 						FromLinter:           i.FromLinter,
 						Text:                 i.Text,
+						Severity:             i.Severity,
 						Pos:                  i.Pos,
 						LineRange:            i.LineRange,
 						Replacement:          i.Replacement,
@@ -182,9 +185,9 @@ func saveIssuesToCache(allPkgs []*packages.Package, pkgsFromCache map[*packages.
 	issuesCacheDebugf("Saved %d issues from %d packages to cache in %s", savedIssuesCount, len(allPkgs), time.Since(startedAt))
 }
 
-//nolint:gocritic
 func loadIssuesFromCache(pkgs []*packages.Package, lintCtx *linter.Context,
-	analyzers []*analysis.Analyzer) ([]result.Issue, map[*packages.Package]bool) {
+	analyzers []*analysis.Analyzer,
+) (issuesFromCache []result.Issue, pkgsFromCache map[*packages.Package]bool) {
 	startedAt := time.Now()
 
 	lintResKey := getIssuesCacheKey(analyzers)
@@ -218,16 +221,18 @@ func loadIssuesFromCache(pkgs []*packages.Package, lintCtx *linter.Context,
 				}
 
 				issues := make([]result.Issue, 0, len(pkgIssues))
-				for _, i := range pkgIssues {
+				for i := range pkgIssues {
+					issue := &pkgIssues[i]
 					issues = append(issues, result.Issue{
-						FromLinter:           i.FromLinter,
-						Text:                 i.Text,
-						Pos:                  i.Pos,
-						LineRange:            i.LineRange,
-						Replacement:          i.Replacement,
+						FromLinter:           issue.FromLinter,
+						Text:                 issue.Text,
+						Severity:             issue.Severity,
+						Pos:                  issue.Pos,
+						LineRange:            issue.LineRange,
+						Replacement:          issue.Replacement,
 						Pkg:                  pkg,
-						ExpectNoLint:         i.ExpectNoLint,
-						ExpectedNoLintLinter: i.ExpectedNoLintLinter,
+						ExpectNoLint:         issue.ExpectNoLint,
+						ExpectedNoLintLinter: issue.ExpectedNoLintLinter,
 					})
 				}
 				cacheRes.issues = issues
@@ -242,13 +247,12 @@ func loadIssuesFromCache(pkgs []*packages.Package, lintCtx *linter.Context,
 	wg.Wait()
 
 	loadedIssuesCount := 0
-	var issues []result.Issue
-	pkgsFromCache := map[*packages.Package]bool{}
+	pkgsFromCache = map[*packages.Package]bool{}
 	for pkg, cacheRes := range pkgToCacheRes {
 		if cacheRes.loadErr == nil {
 			loadedIssuesCount += len(cacheRes.issues)
 			pkgsFromCache[pkg] = true
-			issues = append(issues, cacheRes.issues...)
+			issuesFromCache = append(issuesFromCache, cacheRes.issues...)
 			issuesCacheDebugf("Loaded package %s issues (%d) from cache", pkg, len(cacheRes.issues))
 		} else {
 			issuesCacheDebugf("Didn't load package %s issues from cache: %s", pkg, cacheRes.loadErr)
@@ -256,7 +260,7 @@ func loadIssuesFromCache(pkgs []*packages.Package, lintCtx *linter.Context,
 	}
 	issuesCacheDebugf("Loaded %d issues from cache in %s, analyzing %d/%d packages",
 		loadedIssuesCount, time.Since(startedAt), len(pkgs)-len(pkgsFromCache), len(pkgs))
-	return issues, pkgsFromCache
+	return issuesFromCache, pkgsFromCache
 }
 
 func analyzersHashID(analyzers []*analysis.Analyzer) string {
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/asasalint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/asasalint/asasalint.go
similarity index 65%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/asasalint.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/asasalint/asasalint.go
index 67dde79918b2db15f6661b007d78999e93fb6cf8..653a2d514250a4fc62d1c50c40d0b0cf7bec1418 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/asasalint.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/asasalint/asasalint.go
@@ -1,14 +1,15 @@
-package golinters
+package asasalint
 
 import (
 	"github.com/alingse/asasalint"
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/golinters/internal"
 )
 
-func NewAsasalint(setting *config.AsasalintSettings) *goanalysis.Linter {
+func New(setting *config.AsasalintSettings) *goanalysis.Linter {
 	cfg := asasalint.LinterSetting{}
 	if setting != nil {
 		cfg.Exclude = setting.Exclude
@@ -18,7 +19,7 @@ func NewAsasalint(setting *config.AsasalintSettings) *goanalysis.Linter {
 
 	a, err := asasalint.NewAnalyzer(cfg)
 	if err != nil {
-		linterLogger.Fatalf("asasalint: create analyzer: %v", err)
+		internal.LinterLogger.Fatalf("asasalint: create analyzer: %v", err)
 	}
 
 	return goanalysis.NewLinter(
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/asciicheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/asciicheck.go
deleted file mode 100644
index df301b417bc57f172b8c83f00238327cacbd6dc2..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/asciicheck.go
+++ /dev/null
@@ -1,17 +0,0 @@
-package golinters
-
-import (
-	"github.com/tdakkota/asciicheck"
-	"golang.org/x/tools/go/analysis"
-
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
-)
-
-func NewAsciicheck() *goanalysis.Linter {
-	return goanalysis.NewLinter(
-		"asciicheck",
-		"Simple linter to check that your code does not contain non-ASCII identifiers",
-		[]*analysis.Analyzer{asciicheck.NewAnalyzer()},
-		nil,
-	).WithLoadMode(goanalysis.LoadModeSyntax)
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/asciicheck/asciicheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/asciicheck/asciicheck.go
new file mode 100644
index 0000000000000000000000000000000000000000..675dfc4780871306f1f71717536fa388c3d2d297
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/asciicheck/asciicheck.go
@@ -0,0 +1,19 @@
+package asciicheck
+
+import (
+	"github.com/tdakkota/asciicheck"
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+)
+
+func New() *goanalysis.Linter {
+	a := asciicheck.NewAnalyzer()
+
+	return goanalysis.NewLinter(
+		a.Name,
+		a.Doc,
+		[]*analysis.Analyzer{a},
+		nil,
+	).WithLoadMode(goanalysis.LoadModeSyntax)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/bidichk.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/bidichk/bidichk.go
similarity index 88%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/bidichk.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/bidichk/bidichk.go
index e1b34717649a221b1f3228bd3cbacc963920c1b4..4ced901e8faeae4a002a7011fc20e4e7f8a78690 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/bidichk.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/bidichk/bidichk.go
@@ -1,4 +1,4 @@
-package golinters
+package bidichk
 
 import (
 	"strings"
@@ -7,10 +7,10 @@ import (
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewBiDiChkFuncName(cfg *config.BiDiChkSettings) *goanalysis.Linter {
+func New(cfg *config.BiDiChkSettings) *goanalysis.Linter {
 	a := bidichk.NewAnalyzer()
 
 	cfgMap := map[string]map[string]any{}
@@ -51,7 +51,7 @@ func NewBiDiChkFuncName(cfg *config.BiDiChkSettings) *goanalysis.Linter {
 	}
 
 	return goanalysis.NewLinter(
-		"bidichk",
+		a.Name,
 		"Checks for dangerous unicode character sequences",
 		[]*analysis.Analyzer{a},
 		cfgMap,
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/bodyclose.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/bodyclose/bodyclose.go
similarity index 57%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/bodyclose.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/bodyclose/bodyclose.go
index e56bd83f28cb65379c3e865490675c8c447e0414..f39814edc590055ab7149342750fe6dde7a3877f 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/bodyclose.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/bodyclose/bodyclose.go
@@ -1,17 +1,19 @@
-package golinters
+package bodyclose
 
 import (
 	"github.com/timakin/bodyclose/passes/bodyclose"
 	"golang.org/x/tools/go/analysis"
 
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewBodyclose() *goanalysis.Linter {
+func New() *goanalysis.Linter {
+	a := bodyclose.Analyzer
+
 	return goanalysis.NewLinter(
-		"bodyclose",
+		a.Name,
 		"checks whether HTTP response body is closed successfully",
-		[]*analysis.Analyzer{bodyclose.Analyzer},
+		[]*analysis.Analyzer{a},
 		nil,
 	).WithLoadMode(goanalysis.LoadModeTypesInfo)
 }
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/canonicalheader/canonicalheader.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/canonicalheader/canonicalheader.go
new file mode 100644
index 0000000000000000000000000000000000000000..d721916a49cd8f9a96fbd437e740eeb7945c34a7
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/canonicalheader/canonicalheader.go
@@ -0,0 +1,19 @@
+package canonicalheader
+
+import (
+	"github.com/lasiar/canonicalheader"
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+)
+
+func New() *goanalysis.Linter {
+	a := canonicalheader.Analyzer
+
+	return goanalysis.NewLinter(
+		a.Name,
+		a.Doc,
+		[]*analysis.Analyzer{a},
+		nil,
+	).WithLoadMode(goanalysis.LoadModeTypesInfo)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/commons.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/commons.go
deleted file mode 100644
index 3b40e59bfea5d93e499f440ca320c637b1bed198..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/commons.go
+++ /dev/null
@@ -1,6 +0,0 @@
-package golinters
-
-import "github.com/golangci/golangci-lint/pkg/logutils"
-
-// linterLogger must be use only when the context logger is not available.
-var linterLogger = logutils.NewStderrLog(logutils.DebugKeyLinter)
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/containedctx.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/containedctx/containedctx.go
similarity index 66%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/containedctx.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/containedctx/containedctx.go
index 8f7859af7d6cd754a1c35b8447fe43429d5a4672..6fdb4ea6e1869b3d8ec198c03c12644408fb97b4 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/containedctx.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/containedctx/containedctx.go
@@ -1,13 +1,13 @@
-package golinters
+package containedctx
 
 import (
 	"github.com/sivchari/containedctx"
 	"golang.org/x/tools/go/analysis"
 
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewContainedCtx() *goanalysis.Linter {
+func New() *goanalysis.Linter {
 	a := containedctx.Analyzer
 
 	return goanalysis.NewLinter(
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/contextcheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/contextcheck/contextcheck.go
similarity index 79%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/contextcheck.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/contextcheck/contextcheck.go
index f54192a189d1cad53183fea906eebe25fac2e958..a34c518b2c0f84961d3a5b1f880b402300508921 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/contextcheck.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/contextcheck/contextcheck.go
@@ -1,14 +1,14 @@
-package golinters
+package contextcheck
 
 import (
 	"github.com/kkHAIKE/contextcheck"
 	"golang.org/x/tools/go/analysis"
 
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 )
 
-func NewContextCheck() *goanalysis.Linter {
+func New() *goanalysis.Linter {
 	analyzer := contextcheck.NewAnalyzer(contextcheck.Configuration{})
 
 	return goanalysis.NewLinter(
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/copyloopvar/copyloopvar.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/copyloopvar/copyloopvar.go
new file mode 100644
index 0000000000000000000000000000000000000000..adb4ee7284656f37d0d6ce147e80dca32ad44e1a
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/copyloopvar/copyloopvar.go
@@ -0,0 +1,29 @@
+package copyloopvar
+
+import (
+	"github.com/karamaru-alpha/copyloopvar"
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/golangci/golangci-lint/pkg/config"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+)
+
+func New(settings *config.CopyLoopVarSettings) *goanalysis.Linter {
+	a := copyloopvar.NewAnalyzer()
+
+	var cfg map[string]map[string]any
+	if settings != nil {
+		cfg = map[string]map[string]any{
+			a.Name: {
+				"check-alias": settings.CheckAlias,
+			},
+		}
+	}
+
+	return goanalysis.NewLinter(
+		a.Name,
+		a.Doc,
+		[]*analysis.Analyzer{a},
+		cfg,
+	).WithLoadMode(goanalysis.LoadModeSyntax)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/cyclop.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/cyclop/cyclop.go
similarity index 78%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/cyclop.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/cyclop/cyclop.go
index 5ad65f122c25045e5d3ab402da67ecd5ea9f83b6..eb8c0577a5a2571fc6d2fd10f3db3c6dbc94ae49 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/cyclop.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/cyclop/cyclop.go
@@ -1,16 +1,14 @@
-package golinters
+package cyclop
 
 import (
 	"github.com/bkielbasa/cyclop/pkg/analyzer"
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-const cyclopName = "cyclop"
-
-func NewCyclop(settings *config.Cyclop) *goanalysis.Linter {
+func New(settings *config.Cyclop) *goanalysis.Linter {
 	a := analyzer.NewAnalyzer()
 
 	var cfg map[string]map[string]any
@@ -31,7 +29,7 @@ func NewCyclop(settings *config.Cyclop) *goanalysis.Linter {
 	}
 
 	return goanalysis.NewLinter(
-		cyclopName,
+		a.Name,
 		"checks function and package cyclomatic complexity",
 		[]*analysis.Analyzer{a},
 		cfg,
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/deadcode.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/deadcode.go
deleted file mode 100644
index 4f563c381339c45d23ba7720a39c1874c2a67bb7..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/deadcode.go
+++ /dev/null
@@ -1,61 +0,0 @@
-package golinters
-
-import (
-	"fmt"
-	"sync"
-
-	deadcodeAPI "github.com/golangci/go-misc/deadcode"
-	"golang.org/x/tools/go/analysis"
-
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
-	"github.com/golangci/golangci-lint/pkg/lint/linter"
-	"github.com/golangci/golangci-lint/pkg/result"
-)
-
-const deadcodeName = "deadcode"
-
-func NewDeadcode() *goanalysis.Linter {
-	var mu sync.Mutex
-	var resIssues []goanalysis.Issue
-
-	analyzer := &analysis.Analyzer{
-		Name: deadcodeName,
-		Doc:  goanalysis.TheOnlyanalyzerDoc,
-		Run: func(pass *analysis.Pass) (any, error) {
-			prog := goanalysis.MakeFakeLoaderProgram(pass)
-
-			issues, err := deadcodeAPI.Run(prog)
-			if err != nil {
-				return nil, err
-			}
-
-			res := make([]goanalysis.Issue, 0, len(issues))
-			for _, i := range issues {
-				res = append(res, goanalysis.NewIssue(&result.Issue{
-					Pos:        i.Pos,
-					Text:       fmt.Sprintf("%s is unused", formatCode(i.UnusedIdentName, nil)),
-					FromLinter: deadcodeName,
-				}, pass))
-			}
-
-			if len(issues) == 0 {
-				return nil, nil
-			}
-
-			mu.Lock()
-			resIssues = append(resIssues, res...)
-			mu.Unlock()
-
-			return nil, nil
-		},
-	}
-
-	return goanalysis.NewLinter(
-		deadcodeName,
-		"Finds unused code",
-		[]*analysis.Analyzer{analyzer},
-		nil,
-	).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
-		return resIssues
-	}).WithLoadMode(goanalysis.LoadModeTypesInfo)
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/decorder.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/decorder/decorder.go
similarity index 89%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/decorder.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/decorder/decorder.go
index 5202a03a4f95962a1b8045201c9f5a3e7b10adad..a05f6a32573fd6dba11c959fbaac7df5fdc64bb6 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/decorder.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/decorder/decorder.go
@@ -1,4 +1,4 @@
-package golinters
+package decorder
 
 import (
 	"strings"
@@ -7,10 +7,10 @@ import (
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewDecorder(settings *config.DecorderSettings) *goanalysis.Linter {
+func New(settings *config.DecorderSettings) *goanalysis.Linter {
 	a := decorder.Analyzer
 
 	// disable all rules/checks by default
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/depguard.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/depguard/depguard.go
similarity index 82%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/depguard.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/depguard/depguard.go
index 23986708c9319bc53315e3728403d996a5d0b71d..d2aedf252446a39d4d54f05c78b1c7ebf6e2a6ec 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/depguard.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/depguard/depguard.go
@@ -1,22 +1,23 @@
-package golinters
+package depguard
 
 import (
 	"github.com/OpenPeeDeeP/depguard/v2"
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 )
 
-func NewDepguard(settings *config.DepGuardSettings) *goanalysis.Linter {
+func New(settings *config.DepGuardSettings) *goanalysis.Linter {
 	conf := depguard.LinterSettings{}
 
 	if settings != nil {
 		for s, rule := range settings.Rules {
 			list := &depguard.List{
-				Files: rule.Files,
-				Allow: rule.Allow,
+				ListMode: rule.ListMode,
+				Files:    rule.Files,
+				Allow:    rule.Allow,
 			}
 
 			// because of bug with Viper parsing (split on dot) we use a list of struct instead of a map.
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/dogsled.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/dogsled/dogsled.go
similarity index 88%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/dogsled.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/dogsled/dogsled.go
index 79502fe8be6cac7348eb47c37beceece1106484e..49108f4f1fccbee325fd7a2636ce5f009123a26b 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/dogsled.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/dogsled/dogsled.go
@@ -1,4 +1,4 @@
-package golinters
+package dogsled
 
 import (
 	"fmt"
@@ -9,20 +9,19 @@ import (
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-const dogsledName = "dogsled"
+const linterName = "dogsled"
 
-//nolint:dupl
-func NewDogsled(settings *config.DogsledSettings) *goanalysis.Linter {
+func New(settings *config.DogsledSettings) *goanalysis.Linter {
 	var mu sync.Mutex
 	var resIssues []goanalysis.Issue
 
 	analyzer := &analysis.Analyzer{
-		Name: dogsledName,
+		Name: linterName,
 		Doc:  goanalysis.TheOnlyanalyzerDoc,
 		Run: func(pass *analysis.Pass) (any, error) {
 			issues := runDogsled(pass, settings)
@@ -40,7 +39,7 @@ func NewDogsled(settings *config.DogsledSettings) *goanalysis.Linter {
 	}
 
 	return goanalysis.NewLinter(
-		dogsledName,
+		linterName,
 		"Checks assignments with too many blank identifiers (e.g. x, _, _, _, := f())",
 		[]*analysis.Analyzer{analyzer},
 		nil,
@@ -101,7 +100,7 @@ func (v *returnsVisitor) Visit(node ast.Node) ast.Visitor {
 
 		if numBlank > v.maxBlanks {
 			v.issues = append(v.issues, result.Issue{
-				FromLinter: dogsledName,
+				FromLinter: linterName,
 				Text:       fmt.Sprintf("declaration has %v blank identifiers", numBlank),
 				Pos:        v.f.Position(assgnStmt.Pos()),
 			})
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/dupl.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/dupl/dupl.go
similarity index 84%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/dupl.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/dupl/dupl.go
index 5d772a5f2faa83a0251c301519e001edc0a7b2eb..7abcb4c4f4def5b72b45cb6fd4f0e6dc48c153db 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/dupl.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/dupl/dupl.go
@@ -1,4 +1,4 @@
-package golinters
+package dupl
 
 import (
 	"fmt"
@@ -10,20 +10,20 @@ import (
 
 	"github.com/golangci/golangci-lint/pkg/config"
 	"github.com/golangci/golangci-lint/pkg/fsutils"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/golinters/internal"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-const duplName = "dupl"
+const linterName = "dupl"
 
-//nolint:dupl
-func NewDupl(settings *config.DuplSettings) *goanalysis.Linter {
+func New(settings *config.DuplSettings) *goanalysis.Linter {
 	var mu sync.Mutex
 	var resIssues []goanalysis.Issue
 
 	analyzer := &analysis.Analyzer{
-		Name: duplName,
+		Name: linterName,
 		Doc:  goanalysis.TheOnlyanalyzerDoc,
 		Run: func(pass *analysis.Pass) (any, error) {
 			issues, err := runDupl(pass, settings)
@@ -44,7 +44,7 @@ func NewDupl(settings *config.DuplSettings) *goanalysis.Linter {
 	}
 
 	return goanalysis.NewLinter(
-		duplName,
+		linterName,
 		"Tool for code clone detection",
 		[]*analysis.Analyzer{analyzer},
 		nil,
@@ -54,7 +54,7 @@ func NewDupl(settings *config.DuplSettings) *goanalysis.Linter {
 }
 
 func runDupl(pass *analysis.Pass, settings *config.DuplSettings) ([]goanalysis.Issue, error) {
-	fileNames := getFileNames(pass)
+	fileNames := internal.GetFileNames(pass)
 
 	issues, err := duplAPI.Run(fileNames, settings.Threshold)
 	if err != nil {
@@ -76,7 +76,7 @@ func runDupl(pass *analysis.Pass, settings *config.DuplSettings) ([]goanalysis.I
 		dupl := fmt.Sprintf("%s:%d-%d", toFilename, i.To.LineStart(), i.To.LineEnd())
 		text := fmt.Sprintf("%d-%d lines are duplicate of %s",
 			i.From.LineStart(), i.From.LineEnd(),
-			formatCode(dupl, nil))
+			internal.FormatCode(dupl, nil))
 
 		res = append(res, goanalysis.NewIssue(&result.Issue{
 			Pos: token.Position{
@@ -88,7 +88,7 @@ func runDupl(pass *analysis.Pass, settings *config.DuplSettings) ([]goanalysis.I
 				To:   i.From.LineEnd(),
 			},
 			Text:       text,
-			FromLinter: duplName,
+			FromLinter: linterName,
 		}, pass))
 	}
 
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/dupword.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/dupword/dupword.go
similarity index 73%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/dupword.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/dupword/dupword.go
index f5a99bc0df3ca2fd394819b9b59b72be1caf9ef2..bba4fc9e1995061e2cd4c366113a825df6ee7f17 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/dupword.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/dupword/dupword.go
@@ -1,4 +1,4 @@
-package golinters
+package dupword
 
 import (
 	"strings"
@@ -7,16 +7,17 @@ import (
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewDupWord(setting *config.DupWordSettings) *goanalysis.Linter {
+func New(setting *config.DupWordSettings) *goanalysis.Linter {
 	a := dupword.NewAnalyzer()
 
 	cfgMap := map[string]map[string]any{}
 	if setting != nil {
 		cfgMap[a.Name] = map[string]any{
 			"keyword": strings.Join(setting.Keywords, ","),
+			"ignore":  strings.Join(setting.Ignore, ","),
 		}
 	}
 
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/durationcheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/durationcheck/durationcheck.go
similarity index 66%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/durationcheck.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/durationcheck/durationcheck.go
index 880de5d4200a2ca82723b12390e0ebdac708aba4..88f22c27c029cd8339e379445bcedb15fa4e6745 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/durationcheck.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/durationcheck/durationcheck.go
@@ -1,13 +1,13 @@
-package golinters
+package durationcheck
 
 import (
 	"github.com/charithe/durationcheck"
 	"golang.org/x/tools/go/analysis"
 
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewDurationCheck() *goanalysis.Linter {
+func New() *goanalysis.Linter {
 	a := durationcheck.Analyzer
 
 	return goanalysis.NewLinter(
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goerr113.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/err113/err113.go
similarity index 55%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/goerr113.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/err113/err113.go
index 10addc57c240eda0f1f23586ac655954894ae564..2600128be155320935363fd7f722a20ce998002b 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goerr113.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/err113/err113.go
@@ -1,17 +1,19 @@
-package golinters
+package err113
 
 import (
 	"github.com/Djarvur/go-err113"
 	"golang.org/x/tools/go/analysis"
 
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewGoerr113() *goanalysis.Linter {
+func New() *goanalysis.Linter {
+	a := err113.NewAnalyzer()
+
 	return goanalysis.NewLinter(
-		"goerr113",
+		a.Name,
 		"Go linter to check the errors handling expressions",
-		[]*analysis.Analyzer{err113.NewAnalyzer()},
+		[]*analysis.Analyzer{a},
 		nil,
 	).WithLoadMode(goanalysis.LoadModeTypesInfo)
 }
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/errcheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/errcheck/errcheck.go
similarity index 94%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/errcheck.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/errcheck/errcheck.go
index 89b18519c91beb563ea81b207e197f23bab6f312..9a8a2aa876f148ca1af5c54e0f7042a59fef2beb 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/errcheck.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/errcheck/errcheck.go
@@ -1,4 +1,4 @@
-package golinters
+package errcheck
 
 import (
 	"bufio"
@@ -16,25 +16,26 @@ import (
 
 	"github.com/golangci/golangci-lint/pkg/config"
 	"github.com/golangci/golangci-lint/pkg/fsutils"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/golinters/internal"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-const errcheckName = "errcheck"
+const linterName = "errcheck"
 
-func NewErrcheck(settings *config.ErrcheckSettings) *goanalysis.Linter {
+func New(settings *config.ErrcheckSettings) *goanalysis.Linter {
 	var mu sync.Mutex
 	var resIssues []goanalysis.Issue
 
 	analyzer := &analysis.Analyzer{
-		Name: errcheckName,
+		Name: linterName,
 		Doc:  goanalysis.TheOnlyanalyzerDoc,
 		Run:  goanalysis.DummyRun,
 	}
 
 	return goanalysis.NewLinter(
-		errcheckName,
+		linterName,
 		"errcheck is a program for checking for unchecked errors in Go code. "+
 			"These unchecked errors can be critical bugs in some cases",
 		[]*analysis.Analyzer{analyzer},
@@ -94,12 +95,12 @@ func runErrCheck(lintCtx *linter.Context, pass *analysis.Pass, checker *errcheck
 				code = err.FuncName
 			}
 
-			text = fmt.Sprintf("Error return value of %s is not checked", formatCode(code, lintCtx.Cfg))
+			text = fmt.Sprintf("Error return value of %s is not checked", internal.FormatCode(code, lintCtx.Cfg))
 		}
 
 		issues[i] = goanalysis.NewIssue(
 			&result.Issue{
-				FromLinter: errcheckName,
+				FromLinter: linterName,
 				Text:       text,
 				Pos:        err.Pos,
 			},
@@ -253,6 +254,7 @@ func readExcludeFile(name string) ([]string, error) {
 	if fh == nil {
 		return nil, fmt.Errorf("failed reading exclude file: %s: %w", name, err)
 	}
+	defer func() { _ = fh.Close() }()
 
 	scanner := bufio.NewScanner(fh)
 
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/errchkjson.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/errchkjson/errchkjson.go
similarity index 59%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/errchkjson.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/errchkjson/errchkjson.go
index 171de00a4f285ac53633e15177e99a36afdba9a9..8389a750c6c8facc453f697943fce3085e8db3f3 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/errchkjson.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/errchkjson/errchkjson.go
@@ -1,14 +1,14 @@
-package golinters
+package errchkjson
 
 import (
 	"github.com/breml/errchkjson"
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewErrChkJSONFuncName(cfg *config.ErrChkJSONSettings) *goanalysis.Linter {
+func New(cfg *config.ErrChkJSONSettings) *goanalysis.Linter {
 	a := errchkjson.NewAnalyzer()
 
 	cfgMap := map[string]map[string]any{}
@@ -23,10 +23,8 @@ func NewErrChkJSONFuncName(cfg *config.ErrChkJSONSettings) *goanalysis.Linter {
 	}
 
 	return goanalysis.NewLinter(
-		"errchkjson",
-		"Checks types passed to the json encoding functions. "+
-			"Reports unsupported types and optionally reports occasions, "+
-			"where the check for the returned error can be omitted.",
+		a.Name,
+		a.Doc,
 		[]*analysis.Analyzer{a},
 		cfgMap,
 	).WithLoadMode(goanalysis.LoadModeTypesInfo)
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/errname.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/errname.go
deleted file mode 100644
index 96564cfa8cbece215aa2c0482b629c32bba6e681..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/errname.go
+++ /dev/null
@@ -1,17 +0,0 @@
-package golinters
-
-import (
-	"github.com/Antonboom/errname/pkg/analyzer"
-	"golang.org/x/tools/go/analysis"
-
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
-)
-
-func NewErrName() *goanalysis.Linter {
-	return goanalysis.NewLinter(
-		"errname",
-		"Checks that sentinel errors are prefixed with the `Err` and error types are suffixed with the `Error`.",
-		[]*analysis.Analyzer{analyzer.New()},
-		nil,
-	).WithLoadMode(goanalysis.LoadModeTypesInfo)
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/errname/errname.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/errname/errname.go
new file mode 100644
index 0000000000000000000000000000000000000000..f868854c128617b4494d15c8eb3d938c82f8adc8
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/errname/errname.go
@@ -0,0 +1,19 @@
+package errname
+
+import (
+	"github.com/Antonboom/errname/pkg/analyzer"
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+)
+
+func New() *goanalysis.Linter {
+	a := analyzer.New()
+
+	return goanalysis.NewLinter(
+		a.Name,
+		a.Doc,
+		[]*analysis.Analyzer{a},
+		nil,
+	).WithLoadMode(goanalysis.LoadModeTypesInfo)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/errorlint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/errorlint.go
deleted file mode 100644
index cac94159d6f5757fedd4c653b99d786a1d4af47d..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/errorlint.go
+++ /dev/null
@@ -1,32 +0,0 @@
-package golinters
-
-import (
-	"github.com/polyfloyd/go-errorlint/errorlint"
-	"golang.org/x/tools/go/analysis"
-
-	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
-)
-
-func NewErrorLint(cfg *config.ErrorLintSettings) *goanalysis.Linter {
-	a := errorlint.NewAnalyzer()
-
-	cfgMap := map[string]map[string]any{}
-
-	if cfg != nil {
-		cfgMap[a.Name] = map[string]any{
-			"errorf":       cfg.Errorf,
-			"errorf-multi": cfg.ErrorfMulti,
-			"asserts":      cfg.Asserts,
-			"comparison":   cfg.Comparison,
-		}
-	}
-
-	return goanalysis.NewLinter(
-		a.Name,
-		"errorlint is a linter for that can be used to find code "+
-			"that will cause problems with the error wrapping scheme introduced in Go 1.13.",
-		[]*analysis.Analyzer{a},
-		cfgMap,
-	).WithLoadMode(goanalysis.LoadModeTypesInfo)
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/errorlint/errorlint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/errorlint/errorlint.go
new file mode 100644
index 0000000000000000000000000000000000000000..86db8552d04ff7b5d52e6e0a3e72ca7b635e25cd
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/errorlint/errorlint.go
@@ -0,0 +1,54 @@
+package errorlint
+
+import (
+	"github.com/polyfloyd/go-errorlint/errorlint"
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/golangci/golangci-lint/pkg/config"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+)
+
+func New(cfg *config.ErrorLintSettings) *goanalysis.Linter {
+	var opts []errorlint.Option
+
+	if cfg != nil {
+		ae := toAllowPairs(cfg.AllowedErrors)
+		if len(ae) > 0 {
+			opts = append(opts, errorlint.WithAllowedErrors(ae))
+		}
+
+		aew := toAllowPairs(cfg.AllowedErrorsWildcard)
+		if len(aew) > 0 {
+			opts = append(opts, errorlint.WithAllowedWildcard(aew))
+		}
+	}
+
+	a := errorlint.NewAnalyzer(opts...)
+
+	cfgMap := map[string]map[string]any{}
+
+	if cfg != nil {
+		cfgMap[a.Name] = map[string]any{
+			"errorf":       cfg.Errorf,
+			"errorf-multi": cfg.ErrorfMulti,
+			"asserts":      cfg.Asserts,
+			"comparison":   cfg.Comparison,
+		}
+	}
+
+	return goanalysis.NewLinter(
+		a.Name,
+		"errorlint is a linter for that can be used to find code "+
+			"that will cause problems with the error wrapping scheme introduced in Go 1.13.",
+		[]*analysis.Analyzer{a},
+		cfgMap,
+	).WithLoadMode(goanalysis.LoadModeTypesInfo)
+}
+
+func toAllowPairs(data []config.ErrorLintAllowPair) []errorlint.AllowPair {
+	var pairs []errorlint.AllowPair
+	for _, allowedError := range data {
+		pairs = append(pairs, errorlint.AllowPair(allowedError))
+	}
+	return pairs
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/execinquery.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/execinquery/execinquery.go
similarity index 66%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/execinquery.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/execinquery/execinquery.go
index 9911d315e0b313d3ca423843e80eba6d95182456..3832873c6367bb223480ff729d87820826551693 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/execinquery.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/execinquery/execinquery.go
@@ -1,13 +1,13 @@
-package golinters
+package execinquery
 
 import (
 	"github.com/lufeee/execinquery"
 	"golang.org/x/tools/go/analysis"
 
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewExecInQuery() *goanalysis.Linter {
+func New() *goanalysis.Linter {
 	a := execinquery.Analyzer
 
 	return goanalysis.NewLinter(
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustive.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustive/exhaustive.go
similarity index 82%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustive.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustive/exhaustive.go
index 3824afa0b9d1fb9b57f9d7cfa352eb54a7c97140..9249efb69a38735dc52f724353bac33a0e99548a 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustive.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustive/exhaustive.go
@@ -1,14 +1,14 @@
-package golinters
+package exhaustive
 
 import (
 	"github.com/nishanths/exhaustive"
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewExhaustive(settings *config.ExhaustiveSettings) *goanalysis.Linter {
+func New(settings *config.ExhaustiveSettings) *goanalysis.Linter {
 	a := exhaustive.Analyzer
 
 	var cfg map[string]map[string]any
@@ -23,6 +23,7 @@ func NewExhaustive(settings *config.ExhaustiveSettings) *goanalysis.Linter {
 				exhaustive.PackageScopeOnlyFlag:           settings.PackageScopeOnly,
 				exhaustive.ExplicitExhaustiveMapFlag:      settings.ExplicitExhaustiveMap,
 				exhaustive.ExplicitExhaustiveSwitchFlag:   settings.ExplicitExhaustiveSwitch,
+				exhaustive.DefaultCaseRequiredFlag:        settings.DefaultCaseRequired,
 			},
 		}
 	}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustivestruct.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustivestruct.go
deleted file mode 100644
index 9bc9bbfb0b0e692b10debc23b8d384f6c4aceda5..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustivestruct.go
+++ /dev/null
@@ -1,31 +0,0 @@
-package golinters
-
-import (
-	"strings"
-
-	"github.com/mbilski/exhaustivestruct/pkg/analyzer"
-	"golang.org/x/tools/go/analysis"
-
-	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
-)
-
-func NewExhaustiveStruct(settings *config.ExhaustiveStructSettings) *goanalysis.Linter {
-	a := analyzer.Analyzer
-
-	var cfg map[string]map[string]any
-	if settings != nil {
-		cfg = map[string]map[string]any{
-			a.Name: {
-				"struct_patterns": strings.Join(settings.StructPatterns, ","),
-			},
-		}
-	}
-
-	return goanalysis.NewLinter(
-		a.Name,
-		a.Doc,
-		[]*analysis.Analyzer{a},
-		cfg,
-	).WithLoadMode(goanalysis.LoadModeTypesInfo)
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustruct.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustruct/exhaustruct.go
similarity index 63%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustruct.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustruct/exhaustruct.go
index 5272879e1db182845a21b5aa3c9a6c2eca3eafc4..53ad87154fef6d9c7b03403e47d56e1c99e648ff 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustruct.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustruct/exhaustruct.go
@@ -1,14 +1,15 @@
-package golinters
+package exhaustruct
 
 import (
 	"github.com/GaijinEntertainment/go-exhaustruct/v3/analyzer"
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/golinters/internal"
 )
 
-func NewExhaustruct(settings *config.ExhaustructSettings) *goanalysis.Linter {
+func New(settings *config.ExhaustructSettings) *goanalysis.Linter {
 	var include, exclude []string
 	if settings != nil {
 		include = settings.Include
@@ -17,7 +18,7 @@ func NewExhaustruct(settings *config.ExhaustructSettings) *goanalysis.Linter {
 
 	a, err := analyzer.NewAnalyzer(include, exclude)
 	if err != nil {
-		linterLogger.Fatalf("exhaustruct configuration: %v", err)
+		internal.LinterLogger.Fatalf("exhaustruct configuration: %v", err)
 	}
 
 	return goanalysis.NewLinter(
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/exportloopref.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/exportloopref/exportloopref.go
similarity index 66%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/exportloopref.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/exportloopref/exportloopref.go
index 1131c575b950d7471ebb05e4c22a85e3d98617fe..e232f8045d3f38a152cb0d17fca10a8f335f7621 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/exportloopref.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/exportloopref/exportloopref.go
@@ -1,13 +1,13 @@
-package golinters
+package exportloopref
 
 import (
 	"github.com/kyoh86/exportloopref"
 	"golang.org/x/tools/go/analysis"
 
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewExportLoopRef() *goanalysis.Linter {
+func New() *goanalysis.Linter {
 	a := exportloopref.Analyzer
 
 	return goanalysis.NewLinter(
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/fatcontext/fatcontext.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/fatcontext/fatcontext.go
new file mode 100644
index 0000000000000000000000000000000000000000..378025a8cc5edd6437fe474f8841e71624cb32c4
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/fatcontext/fatcontext.go
@@ -0,0 +1,19 @@
+package fatcontext
+
+import (
+	"github.com/Crocmagnon/fatcontext/pkg/analyzer"
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+)
+
+func New() *goanalysis.Linter {
+	a := analyzer.Analyzer
+
+	return goanalysis.NewLinter(
+		a.Name,
+		a.Doc,
+		[]*analysis.Analyzer{a},
+		nil,
+	).WithLoadMode(goanalysis.LoadModeTypesInfo)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/forbidigo.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/forbidigo/forbidigo.go
similarity index 88%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/forbidigo.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/forbidigo/forbidigo.go
index 6aced29226e11c84d813748a9adb2bb44ff724d6..3572b60c23cb8bc4ba6889dbb182347bebf9031a 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/forbidigo.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/forbidigo/forbidigo.go
@@ -1,4 +1,4 @@
-package golinters
+package forbidigo
 
 import (
 	"fmt"
@@ -8,21 +8,20 @@ import (
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 	"github.com/golangci/golangci-lint/pkg/logutils"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-const forbidigoName = "forbidigo"
+const linterName = "forbidigo"
 
-//nolint:dupl
-func NewForbidigo(settings *config.ForbidigoSettings) *goanalysis.Linter {
+func New(settings *config.ForbidigoSettings) *goanalysis.Linter {
 	var mu sync.Mutex
 	var resIssues []goanalysis.Issue
 
 	analyzer := &analysis.Analyzer{
-		Name: forbidigoName,
+		Name: linterName,
 		Doc:  goanalysis.TheOnlyanalyzerDoc,
 		Run: func(pass *analysis.Pass) (any, error) {
 			issues, err := runForbidigo(pass, settings)
@@ -45,7 +44,7 @@ func NewForbidigo(settings *config.ForbidigoSettings) *goanalysis.Linter {
 	// But we cannot make this depend on the settings and have to mirror the mode chosen in GetAllSupportedLinterConfigs,
 	// therefore we have to use LoadModeTypesInfo in all cases.
 	return goanalysis.NewLinter(
-		forbidigoName,
+		linterName,
 		"Forbids identifiers",
 		[]*analysis.Analyzer{analyzer},
 		nil,
@@ -74,7 +73,7 @@ func runForbidigo(pass *analysis.Pass, settings *config.ForbidigoSettings) ([]go
 
 	forbid, err := forbidigo.NewLinter(patterns, options...)
 	if err != nil {
-		return nil, fmt.Errorf("failed to create linter %q: %w", forbidigoName, err)
+		return nil, fmt.Errorf("failed to create linter %q: %w", linterName, err)
 	}
 
 	var issues []goanalysis.Issue
@@ -95,7 +94,7 @@ func runForbidigo(pass *analysis.Pass, settings *config.ForbidigoSettings) ([]go
 			issues = append(issues, goanalysis.NewIssue(&result.Issue{
 				Pos:        hint.Position(),
 				Text:       hint.Details(),
-				FromLinter: forbidigoName,
+				FromLinter: linterName,
 			}, pass))
 		}
 	}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/forcetypeassert.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/forcetypeassert/forcetypeassert.go
similarity index 68%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/forcetypeassert.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/forcetypeassert/forcetypeassert.go
index 873c833b5666922ca660413a0be540167cd32923..741b57ceacbe3b23d93d999e44f8729f0de761be 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/forcetypeassert.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/forcetypeassert/forcetypeassert.go
@@ -1,13 +1,13 @@
-package golinters
+package forcetypeassert
 
 import (
 	"github.com/gostaticanalysis/forcetypeassert"
 	"golang.org/x/tools/go/analysis"
 
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewForceTypeAssert() *goanalysis.Linter {
+func New() *goanalysis.Linter {
 	a := forcetypeassert.Analyzer
 
 	return goanalysis.NewLinter(
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/funlen.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/funlen/funlen.go
similarity index 85%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/funlen.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/funlen/funlen.go
index 8def9c1f678aefc87912e5f3c3ba3a3eb5279784..e43339394d75c188c731edcf83381e210e363077 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/funlen.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/funlen/funlen.go
@@ -1,4 +1,4 @@
-package golinters
+package funlen
 
 import (
 	"go/token"
@@ -9,20 +9,19 @@ import (
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-const funlenName = "funlen"
+const linterName = "funlen"
 
-//nolint:dupl
-func NewFunlen(settings *config.FunlenSettings) *goanalysis.Linter {
+func New(settings *config.FunlenSettings) *goanalysis.Linter {
 	var mu sync.Mutex
 	var resIssues []goanalysis.Issue
 
 	analyzer := &analysis.Analyzer{
-		Name: funlenName,
+		Name: linterName,
 		Doc:  goanalysis.TheOnlyanalyzerDoc,
 		Run: func(pass *analysis.Pass) (any, error) {
 			issues := runFunlen(pass, settings)
@@ -40,7 +39,7 @@ func NewFunlen(settings *config.FunlenSettings) *goanalysis.Linter {
 	}
 
 	return goanalysis.NewLinter(
-		funlenName,
+		linterName,
 		"Tool for detection of long functions",
 		[]*analysis.Analyzer{analyzer},
 		nil,
@@ -68,7 +67,7 @@ func runFunlen(pass *analysis.Pass, settings *config.FunlenSettings) []goanalysi
 				Line:     i.Pos.Line,
 			},
 			Text:       strings.TrimRight(i.Message, "\n"),
-			FromLinter: funlenName,
+			FromLinter: linterName,
 		}, pass)
 	}
 
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gci.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gci/gci.go
similarity index 53%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/gci.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/gci/gci.go
index 3862267692f4b10b5641d144a9076e54fe8e8436..08733ca2f103732b339a19ba5fb86be3b5b18a58 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gci.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gci/gci.go
@@ -1,33 +1,41 @@
-package golinters
+package gci
 
 import (
 	"fmt"
+	"sort"
+	"strings"
 	"sync"
 
 	gcicfg "github.com/daixiang0/gci/pkg/config"
 	"github.com/daixiang0/gci/pkg/gci"
 	"github.com/daixiang0/gci/pkg/io"
 	"github.com/daixiang0/gci/pkg/log"
+	"github.com/daixiang0/gci/pkg/section"
+	"github.com/golangci/modinfo"
 	"github.com/hexops/gotextdiff"
 	"github.com/hexops/gotextdiff/myers"
 	"github.com/hexops/gotextdiff/span"
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/golinters/internal"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 )
 
-const gciName = "gci"
+const linterName = "gci"
 
-func NewGci(settings *config.GciSettings) *goanalysis.Linter {
+func New(settings *config.GciSettings) *goanalysis.Linter {
 	var mu sync.Mutex
 	var resIssues []goanalysis.Issue
 
 	analyzer := &analysis.Analyzer{
-		Name: gciName,
+		Name: linterName,
 		Doc:  goanalysis.TheOnlyanalyzerDoc,
 		Run:  goanalysis.DummyRun,
+		Requires: []*analysis.Analyzer{
+			modinfo.Analyzer,
+		},
 	}
 
 	var cfg *gcicfg.Config
@@ -46,21 +54,27 @@ func NewGci(settings *config.GciSettings) *goanalysis.Linter {
 		}
 
 		var err error
-		cfg, err = rawCfg.Parse()
+		cfg, err = YamlConfig{origin: rawCfg}.Parse()
 		if err != nil {
-			linterLogger.Fatalf("gci: configuration parsing: %v", err)
+			internal.LinterLogger.Fatalf("gci: configuration parsing: %v", err)
 		}
 	}
 
 	var lock sync.Mutex
 
 	return goanalysis.NewLinter(
-		gciName,
+		linterName,
 		"Gci controls Go package import order and makes it always deterministic.",
 		[]*analysis.Analyzer{analyzer},
 		nil,
 	).WithContextSetter(func(lintCtx *linter.Context) {
 		analyzer.Run = func(pass *analysis.Pass) (any, error) {
+			var err error
+			cfg.Sections, err = hackSectionList(pass, cfg)
+			if err != nil {
+				return nil, err
+			}
+
 			issues, err := runGci(pass, lintCtx, cfg, &lock)
 			if err != nil {
 				return nil, err
@@ -82,7 +96,7 @@ func NewGci(settings *config.GciSettings) *goanalysis.Linter {
 }
 
 func runGci(pass *analysis.Pass, lintCtx *linter.Context, cfg *gcicfg.Config, lock *sync.Mutex) ([]goanalysis.Issue, error) {
-	fileNames := getFileNames(pass)
+	fileNames := internal.GetFileNames(pass)
 
 	var diffs []string
 	err := diffFormattedFilesToArray(fileNames, *cfg, &diffs, lock)
@@ -97,7 +111,7 @@ func runGci(pass *analysis.Pass, lintCtx *linter.Context, cfg *gcicfg.Config, lo
 			continue
 		}
 
-		is, err := extractIssuesFromPatch(diff, lintCtx, gciName)
+		is, err := internal.ExtractIssuesFromPatch(diff, lintCtx, linterName, getIssuedTextGci)
 		if err != nil {
 			return nil, fmt.Errorf("can't extract issues from gci diff output %s: %w", diff, err)
 		}
@@ -110,6 +124,57 @@ func runGci(pass *analysis.Pass, lintCtx *linter.Context, cfg *gcicfg.Config, lo
 	return issues, nil
 }
 
+func getIssuedTextGci(settings *config.LintersSettings) string {
+	text := "File is not `gci`-ed"
+
+	hasOptions := settings.Gci.SkipGenerated || len(settings.Gci.Sections) > 0
+	if !hasOptions {
+		return text
+	}
+
+	text += " with"
+
+	if settings.Gci.SkipGenerated {
+		text += " --skip-generated"
+	}
+
+	if len(settings.Gci.Sections) > 0 {
+		for _, sect := range settings.Gci.Sections {
+			text += " -s " + sect
+		}
+	}
+
+	if settings.Gci.CustomOrder {
+		text += " --custom-order"
+	}
+
+	return text
+}
+
+func hackSectionList(pass *analysis.Pass, cfg *gcicfg.Config) (section.SectionList, error) {
+	var sections section.SectionList
+
+	for _, sect := range cfg.Sections {
+		// local module hack
+		if v, ok := sect.(*section.LocalModule); ok {
+			info, err := modinfo.FindModuleFromPass(pass)
+			if err != nil {
+				return nil, err
+			}
+
+			if info.Path == "" {
+				continue
+			}
+
+			v.Path = info.Path
+		}
+
+		sections = append(sections, sect)
+	}
+
+	return sections, nil
+}
+
 // diffFormattedFilesToArray is a copy of gci.DiffFormattedFilesToArray without io.StdInGenerator.
 // gci.DiffFormattedFilesToArray uses gci.processStdInAndGoFilesInPaths that uses io.StdInGenerator but stdin is not active on CI.
 // https://github.com/daixiang0/gci/blob/6f5cb16718ba07f0342a58de9b830ec5a6d58790/pkg/gci/gci.go#L63-L75
@@ -129,29 +194,55 @@ func diffFormattedFilesToArray(paths []string, cfg gcicfg.Config, diffs *[]strin
 	})
 }
 
-func getErrorTextForGci(settings config.GciSettings) string {
-	text := "File is not `gci`-ed"
+// Code bellow this comment is borrowed and modified from gci.
+// https://github.com/daixiang0/gci/blob/4725b0c101801e7449530eee2ddb0c72592e3405/pkg/config/config.go
+
+var defaultOrder = map[string]int{
+	section.StandardType:    0,
+	section.DefaultType:     1,
+	section.CustomType:      2,
+	section.BlankType:       3,
+	section.DotType:         4,
+	section.AliasType:       5,
+	section.LocalModuleType: 6,
+}
 
-	hasOptions := settings.SkipGenerated || len(settings.Sections) > 0
-	if !hasOptions {
-		return text
-	}
+type YamlConfig struct {
+	origin gcicfg.YamlConfig
+}
 
-	text += " with"
+//nolint:gocritic // code borrowed from gci and modified to fix LocalModule section behavior.
+func (g YamlConfig) Parse() (*gcicfg.Config, error) {
+	var err error
 
-	if settings.SkipGenerated {
-		text += " --skip-generated"
+	sections, err := section.Parse(g.origin.SectionStrings)
+	if err != nil {
+		return nil, err
 	}
 
-	if len(settings.Sections) > 0 {
-		for _, section := range settings.Sections {
-			text += " -s " + section
-		}
+	if sections == nil {
+		sections = section.DefaultSections()
 	}
 
-	if settings.CustomOrder {
-		text += " --custom-order"
+	// if default order sorted sections
+	if !g.origin.Cfg.CustomOrder {
+		sort.Slice(sections, func(i, j int) bool {
+			sectionI, sectionJ := sections[i].Type(), sections[j].Type()
+
+			if strings.Compare(sectionI, sectionJ) == 0 {
+				return strings.Compare(sections[i].String(), sections[j].String()) < 0
+			}
+			return defaultOrder[sectionI] < defaultOrder[sectionJ]
+		})
 	}
 
-	return text
+	sectionSeparators, err := section.Parse(g.origin.SectionSeparatorStrings)
+	if err != nil {
+		return nil, err
+	}
+	if sectionSeparators == nil {
+		sectionSeparators = section.DefaultSectionSeparators()
+	}
+
+	return &gcicfg.Config{BoolConfig: g.origin.Cfg, Sections: sections, SectionSeparators: sectionSeparators}, nil
 }
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/ginkgolinter.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/ginkgolinter.go
deleted file mode 100644
index 7b8102b6314f6bd5ffd0f8b99cdf3b8000b11a94..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/ginkgolinter.go
+++ /dev/null
@@ -1,33 +0,0 @@
-package golinters
-
-import (
-	"github.com/nunnatsa/ginkgolinter"
-	"golang.org/x/tools/go/analysis"
-
-	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
-)
-
-func NewGinkgoLinter(cfg *config.GinkgoLinterSettings) *goanalysis.Linter {
-	a := ginkgolinter.NewAnalyzer()
-
-	cfgMap := make(map[string]map[string]any)
-	if cfg != nil {
-		cfgMap[a.Name] = map[string]any{
-			"suppress-len-assertion":     cfg.SuppressLenAssertion,
-			"suppress-nil-assertion":     cfg.SuppressNilAssertion,
-			"suppress-err-assertion":     cfg.SuppressErrAssertion,
-			"suppress-compare-assertion": cfg.SuppressCompareAssertion,
-			"suppress-async-assertion":   cfg.SuppressAsyncAssertion,
-			"forbid-focus-container":     cfg.ForbidFocusContainer,
-			"allow-havelen-0":            cfg.AllowHaveLenZero,
-		}
-	}
-
-	return goanalysis.NewLinter(
-		a.Name,
-		"enforces standards of using ginkgo and gomega",
-		[]*analysis.Analyzer{a},
-		cfgMap,
-	).WithLoadMode(goanalysis.LoadModeTypesInfo)
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/ginkgolinter/ginkgolinter.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/ginkgolinter/ginkgolinter.go
new file mode 100644
index 0000000000000000000000000000000000000000..54d20725701f6eba026c1c66fb4fac8d7203c349
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/ginkgolinter/ginkgolinter.go
@@ -0,0 +1,39 @@
+package ginkgolinter
+
+import (
+	"github.com/nunnatsa/ginkgolinter"
+	"github.com/nunnatsa/ginkgolinter/types"
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/golangci/golangci-lint/pkg/config"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+)
+
+func New(settings *config.GinkgoLinterSettings) *goanalysis.Linter {
+	cfg := &types.Config{}
+
+	if settings != nil {
+		cfg = &types.Config{
+			SuppressLen:            types.Boolean(settings.SuppressLenAssertion),
+			SuppressNil:            types.Boolean(settings.SuppressNilAssertion),
+			SuppressErr:            types.Boolean(settings.SuppressErrAssertion),
+			SuppressCompare:        types.Boolean(settings.SuppressCompareAssertion),
+			SuppressAsync:          types.Boolean(settings.SuppressAsyncAssertion),
+			ForbidFocus:            types.Boolean(settings.ForbidFocusContainer),
+			SuppressTypeCompare:    types.Boolean(settings.SuppressTypeCompareWarning),
+			AllowHaveLen0:          types.Boolean(settings.AllowHaveLenZero),
+			ForceExpectTo:          types.Boolean(settings.ForceExpectTo),
+			ValidateAsyncIntervals: types.Boolean(settings.ForbidSpecPollution),
+			ForbidSpecPollution:    types.Boolean(settings.ValidateAsyncIntervals),
+		}
+	}
+
+	a := ginkgolinter.NewAnalyzerWithConfig(cfg)
+
+	return goanalysis.NewLinter(
+		a.Name,
+		"enforces standards of using ginkgo and gomega",
+		[]*analysis.Analyzer{a},
+		nil,
+	).WithLoadMode(goanalysis.LoadModeTypesInfo)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/adapters.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/adapters.go
deleted file mode 100644
index 284215dfc65dfac96572de258d8ce11896e50db8..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/adapters.go
+++ /dev/null
@@ -1,41 +0,0 @@
-package goanalysis
-
-import (
-	"go/types"
-
-	"golang.org/x/tools/go/analysis"
-	"golang.org/x/tools/go/loader" //nolint:staticcheck // it's an adapter for golang.org/x/tools/go/packages
-)
-
-func MakeFakeLoaderProgram(pass *analysis.Pass) *loader.Program {
-	var info types.Info
-	if pass.TypesInfo != nil {
-		info = *pass.TypesInfo
-	}
-
-	prog := &loader.Program{
-		Fset: pass.Fset,
-		Created: []*loader.PackageInfo{
-			{
-				Pkg:                   pass.Pkg,
-				Importable:            true, // not used
-				TransitivelyErrorFree: true, // TODO ???
-
-				Files:  pass.Files,
-				Errors: nil,
-				Info:   info,
-			},
-		},
-		AllPackages: map[*types.Package]*loader.PackageInfo{
-			pass.Pkg: {
-				Pkg:                   pass.Pkg,
-				Importable:            true,
-				TransitivelyErrorFree: true,
-				Files:                 pass.Files,
-				Errors:                nil,
-				Info:                  info,
-			},
-		},
-	}
-	return prog
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocheckcompilerdirectives.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocheckcompilerdirectives/gocheckcompilerdirectives.go
similarity index 67%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/gocheckcompilerdirectives.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/gocheckcompilerdirectives/gocheckcompilerdirectives.go
index 2592c899463b99a747a65f16f0d803f07223cfe1..be604d805b4bca69cbf2ae05f4c73cdad77014ef 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocheckcompilerdirectives.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocheckcompilerdirectives/gocheckcompilerdirectives.go
@@ -1,13 +1,13 @@
-package golinters
+package gocheckcompilerdirectives
 
 import (
 	"4d63.com/gocheckcompilerdirectives/checkcompilerdirectives"
 	"golang.org/x/tools/go/analysis"
 
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewGoCheckCompilerDirectives() *goanalysis.Linter {
+func New() *goanalysis.Linter {
 	a := checkcompilerdirectives.Analyzer()
 
 	return goanalysis.NewLinter(
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoglobals.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoglobals.go
deleted file mode 100644
index 6e18aeb27d90b5a1bdcaab9b742c4740c8a2286a..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoglobals.go
+++ /dev/null
@@ -1,29 +0,0 @@
-package golinters
-
-import (
-	"4d63.com/gochecknoglobals/checknoglobals"
-	"golang.org/x/tools/go/analysis"
-
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
-)
-
-func NewGochecknoglobals() *goanalysis.Linter {
-	gochecknoglobals := checknoglobals.Analyzer()
-
-	// gochecknoglobals only lints test files if the `-t` flag is passed, so we
-	// pass the `t` flag as true to the analyzer before running it. This can be
-	// turned off by using the regular golangci-lint flags such as `--tests` or
-	// `--skip-files`.
-	linterConfig := map[string]map[string]any{
-		gochecknoglobals.Name: {
-			"t": true,
-		},
-	}
-
-	return goanalysis.NewLinter(
-		gochecknoglobals.Name,
-		gochecknoglobals.Doc,
-		[]*analysis.Analyzer{gochecknoglobals},
-		linterConfig,
-	).WithLoadMode(goanalysis.LoadModeTypesInfo)
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoglobals/gochecknoglobals.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoglobals/gochecknoglobals.go
new file mode 100644
index 0000000000000000000000000000000000000000..af22b2f8e964e25826a519f045166bc33404c417
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoglobals/gochecknoglobals.go
@@ -0,0 +1,26 @@
+package gochecknoglobals
+
+import (
+	"4d63.com/gochecknoglobals/checknoglobals"
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+)
+
+func New() *goanalysis.Linter {
+	a := checknoglobals.Analyzer()
+
+	// gochecknoglobals only lints test files if the `-t` flag is passed,
+	// so we pass the `t` flag as true to the analyzer before running it.
+	// This can be turned off by using the regular golangci-lint flags such as `--tests` or `--exclude-files`.
+	linterConfig := map[string]map[string]any{
+		a.Name: {"t": true},
+	}
+
+	return goanalysis.NewLinter(
+		a.Name,
+		"Check that no global variables exist.",
+		[]*analysis.Analyzer{a},
+		linterConfig,
+	).WithLoadMode(goanalysis.LoadModeTypesInfo)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoinits.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoinits/gochecknoinits.go
similarity index 74%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoinits.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoinits/gochecknoinits.go
index a51b531b94adbe1ae7bc51d5e4279fb905bdbe28..1345eb8c29f9b314e637a7d183657d4fbdb64156 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoinits.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoinits/gochecknoinits.go
@@ -1,4 +1,4 @@
-package golinters
+package gochecknoinits
 
 import (
 	"fmt"
@@ -8,19 +8,20 @@ import (
 
 	"golang.org/x/tools/go/analysis"
 
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/golinters/internal"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-const gochecknoinitsName = "gochecknoinits"
+const linterName = "gochecknoinits"
 
-func NewGochecknoinits() *goanalysis.Linter {
+func New() *goanalysis.Linter {
 	var mu sync.Mutex
 	var resIssues []goanalysis.Issue
 
 	analyzer := &analysis.Analyzer{
-		Name: gochecknoinitsName,
+		Name: linterName,
 		Doc:  goanalysis.TheOnlyanalyzerDoc,
 		Run: func(pass *analysis.Pass) (any, error) {
 			var res []goanalysis.Issue
@@ -43,7 +44,7 @@ func NewGochecknoinits() *goanalysis.Linter {
 	}
 
 	return goanalysis.NewLinter(
-		gochecknoinitsName,
+		linterName,
 		"Checks that no init functions are present in Go code",
 		[]*analysis.Analyzer{analyzer},
 		nil,
@@ -60,12 +61,12 @@ func checkFileForInits(f *ast.File, fset *token.FileSet) []result.Issue {
 			continue
 		}
 
-		name := funcDecl.Name.Name
-		if name == "init" && funcDecl.Recv.NumFields() == 0 {
+		fnName := funcDecl.Name.Name
+		if fnName == "init" && funcDecl.Recv.NumFields() == 0 {
 			res = append(res, result.Issue{
 				Pos:        fset.Position(funcDecl.Pos()),
-				Text:       fmt.Sprintf("don't use %s function", formatCode(name, nil)),
-				FromLinter: gochecknoinitsName,
+				Text:       fmt.Sprintf("don't use %s function", internal.FormatCode(fnName, nil)),
+				FromLinter: linterName,
 			})
 		}
 	}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecksumtype/gochecksumtype.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecksumtype/gochecksumtype.go
new file mode 100644
index 0000000000000000000000000000000000000000..446f0e564f0728884b6784cfc144c7b6b606f594
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecksumtype/gochecksumtype.go
@@ -0,0 +1,80 @@
+package gochecksumtype
+
+import (
+	"strings"
+	"sync"
+
+	gochecksumtype "github.com/alecthomas/go-check-sumtype"
+	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/go/packages"
+
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/lint/linter"
+	"github.com/golangci/golangci-lint/pkg/result"
+)
+
+const linterName = "gochecksumtype"
+
+func New() *goanalysis.Linter {
+	var mu sync.Mutex
+	var resIssues []goanalysis.Issue
+
+	analyzer := &analysis.Analyzer{
+		Name: linterName,
+		Doc:  goanalysis.TheOnlyanalyzerDoc,
+		Run: func(pass *analysis.Pass) (any, error) {
+			issues, err := runGoCheckSumType(pass)
+			if err != nil {
+				return nil, err
+			}
+
+			if len(issues) == 0 {
+				return nil, nil
+			}
+
+			mu.Lock()
+			resIssues = append(resIssues, issues...)
+			mu.Unlock()
+
+			return nil, nil
+		},
+	}
+
+	return goanalysis.NewLinter(
+		linterName,
+		`Run exhaustiveness checks on Go "sum types"`,
+		[]*analysis.Analyzer{analyzer},
+		nil,
+	).WithIssuesReporter(func(_ *linter.Context) []goanalysis.Issue {
+		return resIssues
+	}).WithLoadMode(goanalysis.LoadModeTypesInfo)
+}
+
+func runGoCheckSumType(pass *analysis.Pass) ([]goanalysis.Issue, error) {
+	var resIssues []goanalysis.Issue
+
+	pkg := &packages.Package{
+		Fset:      pass.Fset,
+		Syntax:    pass.Files,
+		Types:     pass.Pkg,
+		TypesInfo: pass.TypesInfo,
+	}
+
+	var unknownError error
+	errors := gochecksumtype.Run([]*packages.Package{pkg})
+	for _, err := range errors {
+		err, ok := err.(gochecksumtype.Error)
+		if !ok {
+			unknownError = err
+			continue
+		}
+
+		resIssues = append(resIssues, goanalysis.NewIssue(&result.Issue{
+			FromLinter: linterName,
+			Text:       strings.TrimPrefix(err.Error(), err.Pos().String()+": "),
+			Pos:        err.Pos(),
+		}, pass))
+	}
+
+	return resIssues, unknownError
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocognit.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocognit/gocognit.go
similarity index 82%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/gocognit.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/gocognit/gocognit.go
index 406d34ed6cb88f424b63270a23c03b1b63b7b5c4..5fe0f90f09f703893ef0ce853be1c8e2559e1eb7 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocognit.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocognit/gocognit.go
@@ -1,4 +1,4 @@
-package golinters
+package gocognit
 
 import (
 	"fmt"
@@ -9,15 +9,15 @@ import (
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/golinters/internal"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-const gocognitName = "gocognit"
+const linterName = "gocognit"
 
-//nolint:dupl
-func NewGocognit(settings *config.GocognitSettings) *goanalysis.Linter {
+func New(settings *config.GocognitSettings) *goanalysis.Linter {
 	var mu sync.Mutex
 	var resIssues []goanalysis.Issue
 
@@ -40,7 +40,7 @@ func NewGocognit(settings *config.GocognitSettings) *goanalysis.Linter {
 	}
 
 	return goanalysis.NewLinter(
-		gocognitName,
+		linterName,
 		"Computes and checks the cognitive complexity of functions",
 		[]*analysis.Analyzer{analyzer},
 		nil,
@@ -71,8 +71,8 @@ func runGocognit(pass *analysis.Pass, settings *config.GocognitSettings) []goana
 		issues = append(issues, goanalysis.NewIssue(&result.Issue{
 			Pos: s.Pos,
 			Text: fmt.Sprintf("cognitive complexity %d of func %s is high (> %d)",
-				s.Complexity, formatCode(s.FuncName, nil), settings.MinComplexity),
-			FromLinter: gocognitName,
+				s.Complexity, internal.FormatCode(s.FuncName, nil), settings.MinComplexity),
+			FromLinter: linterName,
 		}, pass))
 	}
 
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goconst.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goconst/goconst.go
similarity index 80%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/goconst.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/goconst/goconst.go
index e277509d2d82fe931abc2ccf8a3a1a85d80220e6..07bed301f328257b02e6edd9ffe580645b1b6a5a 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goconst.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goconst/goconst.go
@@ -1,4 +1,4 @@
-package golinters
+package goconst
 
 import (
 	"fmt"
@@ -8,20 +8,20 @@ import (
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/golinters/internal"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-const goconstName = "goconst"
+const linterName = "goconst"
 
-//nolint:dupl
-func NewGoconst(settings *config.GoConstSettings) *goanalysis.Linter {
+func New(settings *config.GoConstSettings) *goanalysis.Linter {
 	var mu sync.Mutex
 	var resIssues []goanalysis.Issue
 
 	analyzer := &analysis.Analyzer{
-		Name: goconstName,
+		Name: linterName,
 		Doc:  goanalysis.TheOnlyanalyzerDoc,
 		Run: func(pass *analysis.Pass) (any, error) {
 			issues, err := runGoconst(pass, settings)
@@ -42,7 +42,7 @@ func NewGoconst(settings *config.GoConstSettings) *goanalysis.Linter {
 	}
 
 	return goanalysis.NewLinter(
-		goconstName,
+		linterName,
 		"Finds repeated strings that could be replaced by a constant",
 		[]*analysis.Analyzer{analyzer},
 		nil,
@@ -53,6 +53,7 @@ func NewGoconst(settings *config.GoConstSettings) *goanalysis.Linter {
 
 func runGoconst(pass *analysis.Pass, settings *config.GoConstSettings) ([]goanalysis.Issue, error) {
 	cfg := goconstAPI.Config{
+		IgnoreStrings:      settings.IgnoreStrings,
 		IgnoreTests:        settings.IgnoreTests,
 		MatchWithConstants: settings.MatchWithConstants,
 		MinStringLength:    settings.MinStringLen,
@@ -78,18 +79,18 @@ func runGoconst(pass *analysis.Pass, settings *config.GoConstSettings) ([]goanal
 
 	res := make([]goanalysis.Issue, 0, len(lintIssues))
 	for _, i := range lintIssues {
-		text := fmt.Sprintf("string %s has %d occurrences", formatCode(i.Str, nil), i.OccurrencesCount)
+		text := fmt.Sprintf("string %s has %d occurrences", internal.FormatCode(i.Str, nil), i.OccurrencesCount)
 
 		if i.MatchingConst == "" {
 			text += ", make it a constant"
 		} else {
-			text += fmt.Sprintf(", but such constant %s already exists", formatCode(i.MatchingConst, nil))
+			text += fmt.Sprintf(", but such constant %s already exists", internal.FormatCode(i.MatchingConst, nil))
 		}
 
 		res = append(res, goanalysis.NewIssue(&result.Issue{
 			Pos:        i.Pos,
 			Text:       text,
-			FromLinter: goconstName,
+			FromLinter: linterName,
 		}, pass))
 	}
 
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic.go
deleted file mode 100644
index 1319c72d9d4231bfccabc033494c5d07753f6fb6..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic.go
+++ /dev/null
@@ -1,641 +0,0 @@
-package golinters
-
-import (
-	"errors"
-	"fmt"
-	"go/ast"
-	"go/types"
-	"path/filepath"
-	"reflect"
-	"runtime"
-	"sort"
-	"strings"
-	"sync"
-
-	"github.com/go-critic/go-critic/checkers"
-	gocriticlinter "github.com/go-critic/go-critic/linter"
-	"golang.org/x/tools/go/analysis"
-
-	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
-	"github.com/golangci/golangci-lint/pkg/lint/linter"
-	"github.com/golangci/golangci-lint/pkg/logutils"
-	"github.com/golangci/golangci-lint/pkg/result"
-)
-
-const goCriticName = "gocritic"
-
-var (
-	goCriticDebugf  = logutils.Debug(logutils.DebugKeyGoCritic)
-	isGoCriticDebug = logutils.HaveDebugTag(logutils.DebugKeyGoCritic)
-)
-
-func NewGoCritic(settings *config.GoCriticSettings, cfg *config.Config) *goanalysis.Linter {
-	var mu sync.Mutex
-	var resIssues []goanalysis.Issue
-
-	wrapper := &goCriticWrapper{
-		cfg:   cfg,
-		sizes: types.SizesFor("gc", runtime.GOARCH),
-	}
-
-	analyzer := &analysis.Analyzer{
-		Name: goCriticName,
-		Doc:  goanalysis.TheOnlyanalyzerDoc,
-		Run: func(pass *analysis.Pass) (any, error) {
-			issues, err := wrapper.run(pass)
-			if err != nil {
-				return nil, err
-			}
-
-			if len(issues) == 0 {
-				return nil, nil
-			}
-
-			mu.Lock()
-			resIssues = append(resIssues, issues...)
-			mu.Unlock()
-
-			return nil, nil
-		},
-	}
-
-	return goanalysis.NewLinter(
-		goCriticName,
-		`Provides diagnostics that check for bugs, performance and style issues.
-Extensible without recompilation through dynamic rules.
-Dynamic rules are written declaratively with AST patterns, filters, report message and optional suggestion.`,
-		[]*analysis.Analyzer{analyzer},
-		nil,
-	).
-		WithContextSetter(func(context *linter.Context) {
-			wrapper.init(settings, context.Log)
-		}).
-		WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
-			return resIssues
-		}).WithLoadMode(goanalysis.LoadModeTypesInfo)
-}
-
-type goCriticWrapper struct {
-	settingsWrapper *goCriticSettingsWrapper
-	cfg             *config.Config
-	sizes           types.Sizes
-	once            sync.Once
-}
-
-func (w *goCriticWrapper) init(settings *config.GoCriticSettings, logger logutils.Log) {
-	if settings == nil {
-		return
-	}
-
-	w.once.Do(func() {
-		err := checkers.InitEmbeddedRules()
-		if err != nil {
-			logger.Fatalf("%s: %v: setting an explicit GOROOT can fix this problem.", goCriticName, err)
-		}
-	})
-
-	settingsWrapper := newGoCriticSettingsWrapper(settings, logger)
-
-	settingsWrapper.inferEnabledChecks()
-
-	if err := settingsWrapper.validate(); err != nil {
-		logger.Fatalf("%s: invalid settings: %s", goCriticName, err)
-	}
-
-	w.settingsWrapper = settingsWrapper
-}
-
-func (w *goCriticWrapper) run(pass *analysis.Pass) ([]goanalysis.Issue, error) {
-	if w.settingsWrapper == nil {
-		return nil, fmt.Errorf("the settings wrapper is nil")
-	}
-
-	linterCtx := gocriticlinter.NewContext(pass.Fset, w.sizes)
-
-	linterCtx.SetGoVersion(w.settingsWrapper.Go)
-
-	enabledCheckers, err := w.buildEnabledCheckers(linterCtx)
-	if err != nil {
-		return nil, err
-	}
-
-	linterCtx.SetPackageInfo(pass.TypesInfo, pass.Pkg)
-
-	pkgIssues := runGocriticOnPackage(linterCtx, enabledCheckers, pass.Files)
-
-	issues := make([]goanalysis.Issue, 0, len(pkgIssues))
-	for i := range pkgIssues {
-		issues = append(issues, goanalysis.NewIssue(&pkgIssues[i], pass))
-	}
-
-	return issues, nil
-}
-
-func (w *goCriticWrapper) buildEnabledCheckers(linterCtx *gocriticlinter.Context) ([]*gocriticlinter.Checker, error) {
-	allParams := w.settingsWrapper.getLowerCasedParams()
-
-	var enabledCheckers []*gocriticlinter.Checker
-	for _, info := range gocriticlinter.GetCheckersInfo() {
-		if !w.settingsWrapper.isCheckEnabled(info.Name) {
-			continue
-		}
-
-		if err := w.configureCheckerInfo(info, allParams); err != nil {
-			return nil, err
-		}
-
-		c, err := gocriticlinter.NewChecker(linterCtx, info)
-		if err != nil {
-			return nil, err
-		}
-		enabledCheckers = append(enabledCheckers, c)
-	}
-
-	return enabledCheckers, nil
-}
-
-func runGocriticOnPackage(linterCtx *gocriticlinter.Context, checks []*gocriticlinter.Checker,
-	files []*ast.File) []result.Issue {
-	var res []result.Issue
-	for _, f := range files {
-		filename := filepath.Base(linterCtx.FileSet.Position(f.Pos()).Filename)
-		linterCtx.SetFileInfo(filename, f)
-
-		issues := runGocriticOnFile(linterCtx, f, checks)
-		res = append(res, issues...)
-	}
-	return res
-}
-
-func runGocriticOnFile(linterCtx *gocriticlinter.Context, f *ast.File, checks []*gocriticlinter.Checker) []result.Issue {
-	var res []result.Issue
-
-	for _, c := range checks {
-		// All checkers are expected to use *lint.Context
-		// as read-only structure, so no copying is required.
-		for _, warn := range c.Check(f) {
-			pos := linterCtx.FileSet.Position(warn.Pos)
-			issue := result.Issue{
-				Pos:        pos,
-				Text:       fmt.Sprintf("%s: %s", c.Info.Name, warn.Text),
-				FromLinter: goCriticName,
-			}
-
-			if warn.HasQuickFix() {
-				issue.Replacement = &result.Replacement{
-					Inline: &result.InlineFix{
-						StartCol:  pos.Column - 1,
-						Length:    int(warn.Suggestion.To - warn.Suggestion.From),
-						NewString: string(warn.Suggestion.Replacement),
-					},
-				}
-			}
-
-			res = append(res, issue)
-		}
-	}
-
-	return res
-}
-
-func (w *goCriticWrapper) configureCheckerInfo(info *gocriticlinter.CheckerInfo, allParams map[string]config.GoCriticCheckSettings) error {
-	params := allParams[strings.ToLower(info.Name)]
-	if params == nil { // no config for this checker
-		return nil
-	}
-
-	infoParams := normalizeCheckerInfoParams(info)
-	for k, p := range params {
-		v, ok := infoParams[k]
-		if ok {
-			v.Value = w.normalizeCheckerParamsValue(p)
-			continue
-		}
-
-		// param `k` isn't supported
-		if len(info.Params) == 0 {
-			return fmt.Errorf("checker %s config param %s doesn't exist: checker doesn't have params",
-				info.Name, k)
-		}
-
-		var supportedKeys []string
-		for sk := range info.Params {
-			supportedKeys = append(supportedKeys, sk)
-		}
-		sort.Strings(supportedKeys)
-
-		return fmt.Errorf("checker %s config param %s doesn't exist, all existing: %s",
-			info.Name, k, supportedKeys)
-	}
-
-	return nil
-}
-
-func normalizeCheckerInfoParams(info *gocriticlinter.CheckerInfo) gocriticlinter.CheckerParams {
-	// lowercase info param keys here because golangci-lint's config parser lowercases all strings
-	ret := gocriticlinter.CheckerParams{}
-	for k, v := range info.Params {
-		ret[strings.ToLower(k)] = v
-	}
-
-	return ret
-}
-
-// normalizeCheckerParamsValue normalizes value types.
-// go-critic asserts that CheckerParam.Value has some specific types,
-// but the file parsers (TOML, YAML, JSON) don't create the same representation for raw type.
-// then we have to convert value types into the expected value types.
-// Maybe in the future, this kind of conversion will be done in go-critic itself.
-func (w *goCriticWrapper) normalizeCheckerParamsValue(p any) any {
-	rv := reflect.ValueOf(p)
-	switch rv.Type().Kind() {
-	case reflect.Int64, reflect.Int32, reflect.Int16, reflect.Int8, reflect.Int:
-		return int(rv.Int())
-	case reflect.Bool:
-		return rv.Bool()
-	case reflect.String:
-		// Perform variable substitution.
-		return strings.ReplaceAll(rv.String(), "${configDir}", w.cfg.GetConfigDir())
-	default:
-		return p
-	}
-}
-
-// TODO(ldez): rewrite and simplify goCriticSettingsWrapper.
-
-type goCriticSettingsWrapper struct {
-	*config.GoCriticSettings
-
-	logger logutils.Log
-
-	allCheckers   []*gocriticlinter.CheckerInfo
-	allCheckerMap map[string]*gocriticlinter.CheckerInfo
-
-	inferredEnabledChecks map[string]bool
-}
-
-func newGoCriticSettingsWrapper(settings *config.GoCriticSettings, logger logutils.Log) *goCriticSettingsWrapper {
-	allCheckers := gocriticlinter.GetCheckersInfo()
-
-	allCheckerMap := make(map[string]*gocriticlinter.CheckerInfo)
-	for _, checkInfo := range allCheckers {
-		allCheckerMap[checkInfo.Name] = checkInfo
-	}
-
-	return &goCriticSettingsWrapper{
-		GoCriticSettings:      settings,
-		logger:                logger,
-		allCheckers:           allCheckers,
-		allCheckerMap:         allCheckerMap,
-		inferredEnabledChecks: map[string]bool{},
-	}
-}
-
-func (s *goCriticSettingsWrapper) buildTagToCheckersMap() map[string][]string {
-	tagToCheckers := map[string][]string{}
-
-	for _, checker := range s.allCheckers {
-		for _, tag := range checker.Tags {
-			tagToCheckers[tag] = append(tagToCheckers[tag], checker.Name)
-		}
-	}
-
-	return tagToCheckers
-}
-
-func (s *goCriticSettingsWrapper) checkerTagsDebugf() {
-	if !isGoCriticDebug {
-		return
-	}
-
-	tagToCheckers := s.buildTagToCheckersMap()
-
-	allTags := make([]string, 0, len(tagToCheckers))
-	for tag := range tagToCheckers {
-		allTags = append(allTags, tag)
-	}
-
-	sort.Strings(allTags)
-
-	goCriticDebugf("All gocritic existing tags and checks:")
-	for _, tag := range allTags {
-		debugChecksListf(tagToCheckers[tag], "  tag %q", tag)
-	}
-}
-
-func (s *goCriticSettingsWrapper) disabledCheckersDebugf() {
-	if !isGoCriticDebug {
-		return
-	}
-
-	var disabledCheckers []string
-	for _, checker := range s.allCheckers {
-		if s.inferredEnabledChecks[strings.ToLower(checker.Name)] {
-			continue
-		}
-
-		disabledCheckers = append(disabledCheckers, checker.Name)
-	}
-
-	if len(disabledCheckers) == 0 {
-		goCriticDebugf("All checks are enabled")
-	} else {
-		debugChecksListf(disabledCheckers, "Final not used")
-	}
-}
-
-func (s *goCriticSettingsWrapper) inferEnabledChecks() {
-	s.checkerTagsDebugf()
-
-	enabledByDefaultChecks := s.getDefaultEnabledCheckersNames()
-	debugChecksListf(enabledByDefaultChecks, "Enabled by default")
-
-	disabledByDefaultChecks := s.getDefaultDisabledCheckersNames()
-	debugChecksListf(disabledByDefaultChecks, "Disabled by default")
-
-	enabledChecks := make([]string, 0, len(s.EnabledTags)+len(enabledByDefaultChecks))
-
-	// EnabledTags
-	if len(s.EnabledTags) != 0 {
-		tagToCheckers := s.buildTagToCheckersMap()
-		for _, tag := range s.EnabledTags {
-			enabledChecks = append(enabledChecks, tagToCheckers[tag]...)
-		}
-
-		debugChecksListf(enabledChecks, "Enabled by config tags %s", sprintStrings(s.EnabledTags))
-	}
-
-	if !(len(s.EnabledTags) == 0 && len(s.EnabledChecks) != 0) {
-		// don't use default checks only if we have no enabled tags and enable some checks manually
-		enabledChecks = append(enabledChecks, enabledByDefaultChecks...)
-	}
-
-	// DisabledTags
-	if len(s.DisabledTags) != 0 {
-		enabledChecks = s.filterByDisableTags(enabledChecks, s.DisabledTags)
-	}
-
-	// EnabledChecks
-	if len(s.EnabledChecks) != 0 {
-		debugChecksListf(s.EnabledChecks, "Enabled by config")
-
-		alreadyEnabledChecksSet := stringsSliceToSet(enabledChecks)
-		for _, enabledCheck := range s.EnabledChecks {
-			if alreadyEnabledChecksSet[enabledCheck] {
-				s.logger.Warnf("%s: no need to enable check %q: it's already enabled", goCriticName, enabledCheck)
-				continue
-			}
-			enabledChecks = append(enabledChecks, enabledCheck)
-		}
-	}
-
-	// DisabledChecks
-	if len(s.DisabledChecks) != 0 {
-		debugChecksListf(s.DisabledChecks, "Disabled by config")
-
-		enabledChecksSet := stringsSliceToSet(enabledChecks)
-		for _, disabledCheck := range s.DisabledChecks {
-			if !enabledChecksSet[disabledCheck] {
-				s.logger.Warnf("%s: check %q was explicitly disabled via config. However, as this check "+
-					"is disabled by default, there is no need to explicitly disable it via config.", goCriticName, disabledCheck)
-				continue
-			}
-			delete(enabledChecksSet, disabledCheck)
-		}
-
-		enabledChecks = nil
-		for enabledCheck := range enabledChecksSet {
-			enabledChecks = append(enabledChecks, enabledCheck)
-		}
-	}
-
-	s.inferredEnabledChecks = map[string]bool{}
-	for _, check := range enabledChecks {
-		s.inferredEnabledChecks[strings.ToLower(check)] = true
-	}
-
-	debugChecksListf(enabledChecks, "Final used")
-
-	s.disabledCheckersDebugf()
-}
-
-func (s *goCriticSettingsWrapper) validate() error {
-	if len(s.EnabledTags) == 0 {
-		if len(s.EnabledChecks) != 0 && len(s.DisabledChecks) != 0 {
-			return errors.New("both enabled and disabled check aren't allowed for gocritic")
-		}
-	} else {
-		if err := validateStringsUniq(s.EnabledTags); err != nil {
-			return fmt.Errorf("validate enabled tags: %w", err)
-		}
-
-		tagToCheckers := s.buildTagToCheckersMap()
-
-		for _, tag := range s.EnabledTags {
-			if _, ok := tagToCheckers[tag]; !ok {
-				return fmt.Errorf("gocritic [enabled]tag %q doesn't exist", tag)
-			}
-		}
-	}
-
-	if len(s.DisabledTags) > 0 {
-		tagToCheckers := s.buildTagToCheckersMap()
-		for _, tag := range s.EnabledTags {
-			if _, ok := tagToCheckers[tag]; !ok {
-				return fmt.Errorf("gocritic [disabled]tag %q doesn't exist", tag)
-			}
-		}
-	}
-
-	if err := validateStringsUniq(s.EnabledChecks); err != nil {
-		return fmt.Errorf("validate enabled checks: %w", err)
-	}
-
-	if err := validateStringsUniq(s.DisabledChecks); err != nil {
-		return fmt.Errorf("validate disabled checks: %w", err)
-	}
-
-	if err := s.validateCheckerNames(); err != nil {
-		return fmt.Errorf("validation failed: %w", err)
-	}
-
-	return nil
-}
-
-func (s *goCriticSettingsWrapper) isCheckEnabled(name string) bool {
-	return s.inferredEnabledChecks[strings.ToLower(name)]
-}
-
-// getAllCheckerNames returns a map containing all checker names supported by gocritic.
-func (s *goCriticSettingsWrapper) getAllCheckerNames() map[string]bool {
-	allCheckerNames := make(map[string]bool, len(s.allCheckers))
-
-	for _, checker := range s.allCheckers {
-		allCheckerNames[strings.ToLower(checker.Name)] = true
-	}
-
-	return allCheckerNames
-}
-
-func (s *goCriticSettingsWrapper) getDefaultEnabledCheckersNames() []string {
-	var enabled []string
-
-	for _, info := range s.allCheckers {
-		enable := s.isEnabledByDefaultCheck(info)
-		if enable {
-			enabled = append(enabled, info.Name)
-		}
-	}
-
-	return enabled
-}
-
-func (s *goCriticSettingsWrapper) getDefaultDisabledCheckersNames() []string {
-	var disabled []string
-
-	for _, info := range s.allCheckers {
-		enable := s.isEnabledByDefaultCheck(info)
-		if !enable {
-			disabled = append(disabled, info.Name)
-		}
-	}
-
-	return disabled
-}
-
-func (s *goCriticSettingsWrapper) validateCheckerNames() error {
-	allowedNames := s.getAllCheckerNames()
-
-	for _, name := range s.EnabledChecks {
-		if !allowedNames[strings.ToLower(name)] {
-			return fmt.Errorf("enabled checker %s doesn't exist, all existing checkers: %s",
-				name, sprintAllowedCheckerNames(allowedNames))
-		}
-	}
-
-	for _, name := range s.DisabledChecks {
-		if !allowedNames[strings.ToLower(name)] {
-			return fmt.Errorf("disabled checker %s doesn't exist, all existing checkers: %s",
-				name, sprintAllowedCheckerNames(allowedNames))
-		}
-	}
-
-	for checkName := range s.SettingsPerCheck {
-		if _, ok := allowedNames[checkName]; !ok {
-			return fmt.Errorf("invalid setting, checker %s doesn't exist, all existing checkers: %s",
-				checkName, sprintAllowedCheckerNames(allowedNames))
-		}
-
-		if !s.isCheckEnabled(checkName) {
-			s.logger.Warnf("%s: settings were provided for not enabled check %q", goCriticName, checkName)
-		}
-	}
-
-	return nil
-}
-
-func (s *goCriticSettingsWrapper) getLowerCasedParams() map[string]config.GoCriticCheckSettings {
-	ret := make(map[string]config.GoCriticCheckSettings, len(s.SettingsPerCheck))
-
-	for checker, params := range s.SettingsPerCheck {
-		ret[strings.ToLower(checker)] = params
-	}
-
-	return ret
-}
-
-func (s *goCriticSettingsWrapper) filterByDisableTags(enabledChecks, disableTags []string) []string {
-	enabledChecksSet := stringsSliceToSet(enabledChecks)
-
-	for _, enabledCheck := range enabledChecks {
-		checkInfo, checkInfoExists := s.allCheckerMap[enabledCheck]
-		if !checkInfoExists {
-			s.logger.Warnf("%s: check %q was not exists via filtering disabled tags", goCriticName, enabledCheck)
-			continue
-		}
-
-		hitTags := intersectStringSlice(checkInfo.Tags, disableTags)
-		if len(hitTags) != 0 {
-			delete(enabledChecksSet, enabledCheck)
-		}
-	}
-
-	debugChecksListf(enabledChecks, "Disabled by config tags %s", sprintStrings(disableTags))
-
-	enabledChecks = nil
-	for enabledCheck := range enabledChecksSet {
-		enabledChecks = append(enabledChecks, enabledCheck)
-	}
-
-	return enabledChecks
-}
-
-func (s *goCriticSettingsWrapper) isEnabledByDefaultCheck(info *gocriticlinter.CheckerInfo) bool {
-	return !info.HasTag("experimental") &&
-		!info.HasTag("opinionated") &&
-		!info.HasTag("performance")
-}
-
-func validateStringsUniq(ss []string) error {
-	set := map[string]bool{}
-
-	for _, s := range ss {
-		_, ok := set[s]
-		if ok {
-			return fmt.Errorf("%q occurs multiple times in list", s)
-		}
-		set[s] = true
-	}
-
-	return nil
-}
-
-func intersectStringSlice(s1, s2 []string) []string {
-	s1Map := make(map[string]struct{}, len(s1))
-
-	for _, s := range s1 {
-		s1Map[s] = struct{}{}
-	}
-
-	results := make([]string, 0)
-	for _, s := range s2 {
-		if _, exists := s1Map[s]; exists {
-			results = append(results, s)
-		}
-	}
-
-	return results
-}
-
-func sprintAllowedCheckerNames(allowedNames map[string]bool) string {
-	namesSlice := make([]string, 0, len(allowedNames))
-
-	for name := range allowedNames {
-		namesSlice = append(namesSlice, name)
-	}
-
-	return sprintStrings(namesSlice)
-}
-
-func sprintStrings(ss []string) string {
-	sort.Strings(ss)
-	return fmt.Sprint(ss)
-}
-
-func debugChecksListf(checks []string, format string, args ...any) {
-	if !isGoCriticDebug {
-		return
-	}
-
-	goCriticDebugf("%s checks (%d): %s", fmt.Sprintf(format, args...), len(checks), sprintStrings(checks))
-}
-
-func stringsSliceToSet(ss []string) map[string]bool {
-	ret := make(map[string]bool, len(ss))
-	for _, s := range ss {
-		ret[s] = true
-	}
-
-	return ret
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic/gocritic.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic/gocritic.go
new file mode 100644
index 0000000000000000000000000000000000000000..68cc338e43a612c9209eb78c1da3390c606d1121
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic/gocritic.go
@@ -0,0 +1,590 @@
+package gocritic
+
+import (
+	"errors"
+	"fmt"
+	"go/ast"
+	"go/types"
+	"path/filepath"
+	"reflect"
+	"runtime"
+	"sort"
+	"strings"
+	"sync"
+
+	"github.com/go-critic/go-critic/checkers"
+	gocriticlinter "github.com/go-critic/go-critic/linter"
+	_ "github.com/quasilyte/go-ruleguard/dsl"
+	"golang.org/x/exp/maps"
+	"golang.org/x/exp/slices"
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/golangci/golangci-lint/pkg/config"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/lint/linter"
+	"github.com/golangci/golangci-lint/pkg/logutils"
+	"github.com/golangci/golangci-lint/pkg/result"
+)
+
+const linterName = "gocritic"
+
+var (
+	debugf  = logutils.Debug(logutils.DebugKeyGoCritic)
+	isDebug = logutils.HaveDebugTag(logutils.DebugKeyGoCritic)
+)
+
+func New(settings *config.GoCriticSettings) *goanalysis.Linter {
+	var mu sync.Mutex
+	var resIssues []goanalysis.Issue
+
+	wrapper := &goCriticWrapper{
+		sizes: types.SizesFor("gc", runtime.GOARCH),
+	}
+
+	analyzer := &analysis.Analyzer{
+		Name: linterName,
+		Doc:  goanalysis.TheOnlyanalyzerDoc,
+		Run: func(pass *analysis.Pass) (any, error) {
+			issues, err := wrapper.run(pass)
+			if err != nil {
+				return nil, err
+			}
+
+			if len(issues) == 0 {
+				return nil, nil
+			}
+
+			mu.Lock()
+			resIssues = append(resIssues, issues...)
+			mu.Unlock()
+
+			return nil, nil
+		},
+	}
+
+	return goanalysis.NewLinter(
+		linterName,
+		`Provides diagnostics that check for bugs, performance and style issues.
+Extensible without recompilation through dynamic rules.
+Dynamic rules are written declaratively with AST patterns, filters, report message and optional suggestion.`,
+		[]*analysis.Analyzer{analyzer},
+		nil,
+	).
+		WithContextSetter(func(context *linter.Context) {
+			wrapper.configDir = context.Cfg.GetConfigDir()
+
+			wrapper.init(context.Log, settings)
+		}).
+		WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
+			return resIssues
+		}).
+		WithLoadMode(goanalysis.LoadModeTypesInfo)
+}
+
+type goCriticWrapper struct {
+	settingsWrapper *settingsWrapper
+	configDir       string
+	sizes           types.Sizes
+	once            sync.Once
+}
+
+func (w *goCriticWrapper) init(logger logutils.Log, settings *config.GoCriticSettings) {
+	if settings == nil {
+		return
+	}
+
+	w.once.Do(func() {
+		err := checkers.InitEmbeddedRules()
+		if err != nil {
+			logger.Fatalf("%s: %v: setting an explicit GOROOT can fix this problem", linterName, err)
+		}
+	})
+
+	settingsWrapper := newSettingsWrapper(settings, logger)
+	settingsWrapper.InferEnabledChecks()
+	// Validate must be after InferEnabledChecks, not before.
+	// Because it uses gathered information about tags set and finally enabled checks.
+	if err := settingsWrapper.Validate(); err != nil {
+		logger.Fatalf("%s: invalid settings: %s", linterName, err)
+	}
+
+	w.settingsWrapper = settingsWrapper
+}
+
+func (w *goCriticWrapper) run(pass *analysis.Pass) ([]goanalysis.Issue, error) {
+	if w.settingsWrapper == nil {
+		return nil, errors.New("the settings wrapper is nil")
+	}
+
+	linterCtx := gocriticlinter.NewContext(pass.Fset, w.sizes)
+
+	linterCtx.SetGoVersion(w.settingsWrapper.Go)
+
+	enabledCheckers, err := w.buildEnabledCheckers(linterCtx)
+	if err != nil {
+		return nil, err
+	}
+
+	linterCtx.SetPackageInfo(pass.TypesInfo, pass.Pkg)
+
+	pkgIssues := runOnPackage(linterCtx, enabledCheckers, pass.Files)
+
+	issues := make([]goanalysis.Issue, 0, len(pkgIssues))
+	for i := range pkgIssues {
+		issues = append(issues, goanalysis.NewIssue(&pkgIssues[i], pass))
+	}
+
+	return issues, nil
+}
+
+func (w *goCriticWrapper) buildEnabledCheckers(linterCtx *gocriticlinter.Context) ([]*gocriticlinter.Checker, error) {
+	allLowerCasedParams := w.settingsWrapper.GetLowerCasedParams()
+
+	var enabledCheckers []*gocriticlinter.Checker
+	for _, info := range gocriticlinter.GetCheckersInfo() {
+		if !w.settingsWrapper.IsCheckEnabled(info.Name) {
+			continue
+		}
+
+		if err := w.configureCheckerInfo(info, allLowerCasedParams); err != nil {
+			return nil, err
+		}
+
+		c, err := gocriticlinter.NewChecker(linterCtx, info)
+		if err != nil {
+			return nil, err
+		}
+		enabledCheckers = append(enabledCheckers, c)
+	}
+
+	return enabledCheckers, nil
+}
+
+func (w *goCriticWrapper) configureCheckerInfo(
+	info *gocriticlinter.CheckerInfo,
+	allLowerCasedParams map[string]config.GoCriticCheckSettings,
+) error {
+	params := allLowerCasedParams[strings.ToLower(info.Name)]
+	if params == nil { // no config for this checker
+		return nil
+	}
+
+	// To lowercase info param keys here because golangci-lint's config parser lowercases all strings.
+	infoParams := normalizeMap(info.Params)
+	for k, p := range params {
+		v, ok := infoParams[k]
+		if ok {
+			v.Value = w.normalizeCheckerParamsValue(p)
+			continue
+		}
+
+		// param `k` isn't supported
+		if len(info.Params) == 0 {
+			return fmt.Errorf("checker %s config param %s doesn't exist: checker doesn't have params",
+				info.Name, k)
+		}
+
+		supportedKeys := maps.Keys(info.Params)
+		sort.Strings(supportedKeys)
+
+		return fmt.Errorf("checker %s config param %s doesn't exist, all existing: %s",
+			info.Name, k, supportedKeys)
+	}
+
+	return nil
+}
+
+// normalizeCheckerParamsValue normalizes value types.
+// go-critic asserts that CheckerParam.Value has some specific types,
+// but the file parsers (TOML, YAML, JSON) don't create the same representation for raw type.
+// then we have to convert value types into the expected value types.
+// Maybe in the future, this kind of conversion will be done in go-critic itself.
+func (w *goCriticWrapper) normalizeCheckerParamsValue(p any) any {
+	rv := reflect.ValueOf(p)
+	switch rv.Type().Kind() {
+	case reflect.Int64, reflect.Int32, reflect.Int16, reflect.Int8, reflect.Int:
+		return int(rv.Int())
+	case reflect.Bool:
+		return rv.Bool()
+	case reflect.String:
+		// Perform variable substitution.
+		return strings.ReplaceAll(rv.String(), "${configDir}", w.configDir)
+	default:
+		return p
+	}
+}
+
+func runOnPackage(linterCtx *gocriticlinter.Context, checks []*gocriticlinter.Checker, files []*ast.File) []result.Issue {
+	var res []result.Issue
+	for _, f := range files {
+		filename := filepath.Base(linterCtx.FileSet.Position(f.Pos()).Filename)
+		linterCtx.SetFileInfo(filename, f)
+
+		issues := runOnFile(linterCtx, f, checks)
+		res = append(res, issues...)
+	}
+	return res
+}
+
+func runOnFile(linterCtx *gocriticlinter.Context, f *ast.File, checks []*gocriticlinter.Checker) []result.Issue {
+	var res []result.Issue
+
+	for _, c := range checks {
+		// All checkers are expected to use *lint.Context
+		// as read-only structure, so no copying is required.
+		for _, warn := range c.Check(f) {
+			pos := linterCtx.FileSet.Position(warn.Pos)
+			issue := result.Issue{
+				Pos:        pos,
+				Text:       fmt.Sprintf("%s: %s", c.Info.Name, warn.Text),
+				FromLinter: linterName,
+			}
+
+			if warn.HasQuickFix() {
+				issue.Replacement = &result.Replacement{
+					Inline: &result.InlineFix{
+						StartCol:  pos.Column - 1,
+						Length:    int(warn.Suggestion.To - warn.Suggestion.From),
+						NewString: string(warn.Suggestion.Replacement),
+					},
+				}
+			}
+
+			res = append(res, issue)
+		}
+	}
+
+	return res
+}
+
+type goCriticChecks[T any] map[string]T
+
+func (m goCriticChecks[T]) has(name string) bool {
+	_, ok := m[name]
+	return ok
+}
+
+type settingsWrapper struct {
+	*config.GoCriticSettings
+
+	logger logutils.Log
+
+	allCheckers []*gocriticlinter.CheckerInfo
+
+	allChecks             goCriticChecks[struct{}]
+	allChecksByTag        goCriticChecks[[]string]
+	allTagsSorted         []string
+	inferredEnabledChecks goCriticChecks[struct{}]
+
+	// *LowerCased fields are used for GoCriticSettings.SettingsPerCheck validation only.
+
+	allChecksLowerCased             goCriticChecks[struct{}]
+	inferredEnabledChecksLowerCased goCriticChecks[struct{}]
+}
+
+func newSettingsWrapper(settings *config.GoCriticSettings, logger logutils.Log) *settingsWrapper {
+	allCheckers := gocriticlinter.GetCheckersInfo()
+
+	allChecks := make(goCriticChecks[struct{}], len(allCheckers))
+	allChecksLowerCased := make(goCriticChecks[struct{}], len(allCheckers))
+	allChecksByTag := make(goCriticChecks[[]string])
+	for _, checker := range allCheckers {
+		allChecks[checker.Name] = struct{}{}
+		allChecksLowerCased[strings.ToLower(checker.Name)] = struct{}{}
+
+		for _, tag := range checker.Tags {
+			allChecksByTag[tag] = append(allChecksByTag[tag], checker.Name)
+		}
+	}
+
+	allTagsSorted := maps.Keys(allChecksByTag)
+	sort.Strings(allTagsSorted)
+
+	return &settingsWrapper{
+		GoCriticSettings:                settings,
+		logger:                          logger,
+		allCheckers:                     allCheckers,
+		allChecks:                       allChecks,
+		allChecksLowerCased:             allChecksLowerCased,
+		allChecksByTag:                  allChecksByTag,
+		allTagsSorted:                   allTagsSorted,
+		inferredEnabledChecks:           make(goCriticChecks[struct{}]),
+		inferredEnabledChecksLowerCased: make(goCriticChecks[struct{}]),
+	}
+}
+
+func (s *settingsWrapper) IsCheckEnabled(name string) bool {
+	return s.inferredEnabledChecks.has(name)
+}
+
+func (s *settingsWrapper) GetLowerCasedParams() map[string]config.GoCriticCheckSettings {
+	return normalizeMap(s.SettingsPerCheck)
+}
+
+// InferEnabledChecks tries to be consistent with (lintersdb.Manager).build.
+func (s *settingsWrapper) InferEnabledChecks() {
+	s.debugChecksInitialState()
+
+	enabledByDefaultChecks, disabledByDefaultChecks := s.buildEnabledAndDisabledByDefaultChecks()
+	debugChecksListf(enabledByDefaultChecks, "Enabled by default")
+	debugChecksListf(disabledByDefaultChecks, "Disabled by default")
+
+	enabledChecks := make(goCriticChecks[struct{}])
+
+	if s.EnableAll {
+		enabledChecks = make(goCriticChecks[struct{}], len(s.allCheckers))
+		for _, info := range s.allCheckers {
+			enabledChecks[info.Name] = struct{}{}
+		}
+	} else if !s.DisableAll {
+		// enable-all/disable-all revokes the default settings.
+		enabledChecks = make(goCriticChecks[struct{}], len(enabledByDefaultChecks))
+		for _, check := range enabledByDefaultChecks {
+			enabledChecks[check] = struct{}{}
+		}
+	}
+
+	if len(s.EnabledTags) != 0 {
+		enabledFromTags := s.expandTagsToChecks(s.EnabledTags)
+		debugChecksListf(enabledFromTags, "Enabled by config tags %s", sprintSortedStrings(s.EnabledTags))
+
+		for _, check := range enabledFromTags {
+			enabledChecks[check] = struct{}{}
+		}
+	}
+
+	if len(s.EnabledChecks) != 0 {
+		debugChecksListf(s.EnabledChecks, "Enabled by config")
+
+		for _, check := range s.EnabledChecks {
+			if enabledChecks.has(check) {
+				s.logger.Warnf("%s: no need to enable check %q: it's already enabled", linterName, check)
+				continue
+			}
+			enabledChecks[check] = struct{}{}
+		}
+	}
+
+	if len(s.DisabledTags) != 0 {
+		disabledFromTags := s.expandTagsToChecks(s.DisabledTags)
+		debugChecksListf(disabledFromTags, "Disabled by config tags %s", sprintSortedStrings(s.DisabledTags))
+
+		for _, check := range disabledFromTags {
+			delete(enabledChecks, check)
+		}
+	}
+
+	if len(s.DisabledChecks) != 0 {
+		debugChecksListf(s.DisabledChecks, "Disabled by config")
+
+		for _, check := range s.DisabledChecks {
+			if !enabledChecks.has(check) {
+				s.logger.Warnf("%s: no need to disable check %q: it's already disabled", linterName, check)
+				continue
+			}
+			delete(enabledChecks, check)
+		}
+	}
+
+	s.inferredEnabledChecks = enabledChecks
+	s.inferredEnabledChecksLowerCased = normalizeMap(s.inferredEnabledChecks)
+	s.debugChecksFinalState()
+}
+
+func (s *settingsWrapper) buildEnabledAndDisabledByDefaultChecks() (enabled, disabled []string) {
+	for _, info := range s.allCheckers {
+		if enabledByDef := isEnabledByDefaultGoCriticChecker(info); enabledByDef {
+			enabled = append(enabled, info.Name)
+		} else {
+			disabled = append(disabled, info.Name)
+		}
+	}
+	return enabled, disabled
+}
+
+func (s *settingsWrapper) expandTagsToChecks(tags []string) []string {
+	var checks []string
+	for _, tag := range tags {
+		checks = append(checks, s.allChecksByTag[tag]...)
+	}
+	return checks
+}
+
+func (s *settingsWrapper) debugChecksInitialState() {
+	if !isDebug {
+		return
+	}
+
+	debugf("All gocritic existing tags and checks:")
+	for _, tag := range s.allTagsSorted {
+		debugChecksListf(s.allChecksByTag[tag], "  tag %q", tag)
+	}
+}
+
+func (s *settingsWrapper) debugChecksFinalState() {
+	if !isDebug {
+		return
+	}
+
+	var enabledChecks []string
+	var disabledChecks []string
+
+	for _, checker := range s.allCheckers {
+		check := checker.Name
+		if s.inferredEnabledChecks.has(check) {
+			enabledChecks = append(enabledChecks, check)
+		} else {
+			disabledChecks = append(disabledChecks, check)
+		}
+	}
+
+	debugChecksListf(enabledChecks, "Final used")
+
+	if len(disabledChecks) == 0 {
+		debugf("All checks are enabled")
+	} else {
+		debugChecksListf(disabledChecks, "Final not used")
+	}
+}
+
+// Validate tries to be consistent with (lintersdb.Validator).validateEnabledDisabledLintersConfig.
+func (s *settingsWrapper) Validate() error {
+	for _, v := range []func() error{
+		s.validateOptionsCombinations,
+		s.validateCheckerTags,
+		s.validateCheckerNames,
+		s.validateDisabledAndEnabledAtOneMoment,
+		s.validateAtLeastOneCheckerEnabled,
+	} {
+		if err := v(); err != nil {
+			return err
+		}
+	}
+	return nil
+}
+
+func (s *settingsWrapper) validateOptionsCombinations() error {
+	if s.EnableAll {
+		if s.DisableAll {
+			return errors.New("enable-all and disable-all options must not be combined")
+		}
+
+		if len(s.EnabledTags) != 0 {
+			return errors.New("enable-all and enabled-tags options must not be combined")
+		}
+
+		if len(s.EnabledChecks) != 0 {
+			return errors.New("enable-all and enabled-checks options must not be combined")
+		}
+	}
+
+	if s.DisableAll {
+		if len(s.DisabledTags) != 0 {
+			return errors.New("disable-all and disabled-tags options must not be combined")
+		}
+
+		if len(s.DisabledChecks) != 0 {
+			return errors.New("disable-all and disabled-checks options must not be combined")
+		}
+
+		if len(s.EnabledTags) == 0 && len(s.EnabledChecks) == 0 {
+			return errors.New("all checks were disabled, but no one check was enabled: at least one must be enabled")
+		}
+	}
+
+	return nil
+}
+
+func (s *settingsWrapper) validateCheckerTags() error {
+	for _, tag := range s.EnabledTags {
+		if !s.allChecksByTag.has(tag) {
+			return fmt.Errorf("enabled tag %q doesn't exist, see %s's documentation", tag, linterName)
+		}
+	}
+
+	for _, tag := range s.DisabledTags {
+		if !s.allChecksByTag.has(tag) {
+			return fmt.Errorf("disabled tag %q doesn't exist, see %s's documentation", tag, linterName)
+		}
+	}
+
+	return nil
+}
+
+func (s *settingsWrapper) validateCheckerNames() error {
+	for _, check := range s.EnabledChecks {
+		if !s.allChecks.has(check) {
+			return fmt.Errorf("enabled check %q doesn't exist, see %s's documentation", check, linterName)
+		}
+	}
+
+	for _, check := range s.DisabledChecks {
+		if !s.allChecks.has(check) {
+			return fmt.Errorf("disabled check %q doesn't exist, see %s documentation", check, linterName)
+		}
+	}
+
+	for check := range s.SettingsPerCheck {
+		lcName := strings.ToLower(check)
+		if !s.allChecksLowerCased.has(lcName) {
+			return fmt.Errorf("invalid check settings: check %q doesn't exist, see %s documentation", check, linterName)
+		}
+		if !s.inferredEnabledChecksLowerCased.has(lcName) {
+			s.logger.Warnf("%s: settings were provided for disabled check %q", check, linterName)
+		}
+	}
+
+	return nil
+}
+
+func (s *settingsWrapper) validateDisabledAndEnabledAtOneMoment() error {
+	for _, tag := range s.DisabledTags {
+		if slices.Contains(s.EnabledTags, tag) {
+			return fmt.Errorf("tag %q disabled and enabled at one moment", tag)
+		}
+	}
+
+	for _, check := range s.DisabledChecks {
+		if slices.Contains(s.EnabledChecks, check) {
+			return fmt.Errorf("check %q disabled and enabled at one moment", check)
+		}
+	}
+
+	return nil
+}
+
+func (s *settingsWrapper) validateAtLeastOneCheckerEnabled() error {
+	if len(s.inferredEnabledChecks) == 0 {
+		return errors.New("eventually all checks were disabled: at least one must be enabled")
+	}
+	return nil
+}
+
+func normalizeMap[ValueT any](in map[string]ValueT) map[string]ValueT {
+	ret := make(map[string]ValueT, len(in))
+	for k, v := range in {
+		ret[strings.ToLower(k)] = v
+	}
+	return ret
+}
+
+func isEnabledByDefaultGoCriticChecker(info *gocriticlinter.CheckerInfo) bool {
+	// https://github.com/go-critic/go-critic/blob/5b67cfd487ae9fe058b4b19321901b3131810f65/cmd/gocritic/check.go#L342-L345
+	return !info.HasTag(gocriticlinter.ExperimentalTag) &&
+		!info.HasTag(gocriticlinter.OpinionatedTag) &&
+		!info.HasTag(gocriticlinter.PerformanceTag) &&
+		!info.HasTag(gocriticlinter.SecurityTag)
+}
+
+func debugChecksListf(checks []string, format string, args ...any) {
+	if !isDebug {
+		return
+	}
+
+	debugf("%s checks (%d): %s", fmt.Sprintf(format, args...), len(checks), sprintSortedStrings(checks))
+}
+
+func sprintSortedStrings(v []string) string {
+	sort.Strings(slices.Clone(v))
+	return fmt.Sprint(v)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocyclo.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocyclo/gocyclo.go
similarity index 80%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/gocyclo.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/gocyclo/gocyclo.go
index b502623ba6208ec2230319ef2309b9c8c81d5cc7..51333dc154a4091d3195c8876f72d071a2f2298c 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocyclo.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocyclo/gocyclo.go
@@ -1,4 +1,4 @@
-package golinters
+package gocyclo
 
 import (
 	"fmt"
@@ -8,20 +8,20 @@ import (
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/golinters/internal"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-const gocycloName = "gocyclo"
+const linterName = "gocyclo"
 
-//nolint:dupl
-func NewGocyclo(settings *config.GoCycloSettings) *goanalysis.Linter {
+func New(settings *config.GoCycloSettings) *goanalysis.Linter {
 	var mu sync.Mutex
 	var resIssues []goanalysis.Issue
 
 	analyzer := &analysis.Analyzer{
-		Name: gocycloName,
+		Name: linterName,
 		Doc:  goanalysis.TheOnlyanalyzerDoc,
 		Run: func(pass *analysis.Pass) (any, error) {
 			issues := runGoCyclo(pass, settings)
@@ -39,7 +39,7 @@ func NewGocyclo(settings *config.GoCycloSettings) *goanalysis.Linter {
 	}
 
 	return goanalysis.NewLinter(
-		gocycloName,
+		linterName,
 		"Computes and checks the cyclomatic complexity of functions",
 		[]*analysis.Analyzer{analyzer},
 		nil,
@@ -63,12 +63,12 @@ func runGoCyclo(pass *analysis.Pass, settings *config.GoCycloSettings) []goanaly
 
 	for _, s := range stats {
 		text := fmt.Sprintf("cyclomatic complexity %d of func %s is high (> %d)",
-			s.Complexity, formatCode(s.FuncName, nil), settings.MinComplexity)
+			s.Complexity, internal.FormatCode(s.FuncName, nil), settings.MinComplexity)
 
 		issues = append(issues, goanalysis.NewIssue(&result.Issue{
 			Pos:        s.Pos,
 			Text:       text,
-			FromLinter: gocycloName,
+			FromLinter: linterName,
 		}, pass))
 	}
 
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/godot.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/godot/godot.go
similarity index 84%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/godot.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/godot/godot.go
index b0ee64434936d877d61cf96cf2b2f34f2fcad5fd..fc51b5bb8c9216897018904abe688327bcb1b224 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/godot.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/godot/godot.go
@@ -1,4 +1,4 @@
-package golinters
+package godot
 
 import (
 	"sync"
@@ -7,14 +7,14 @@ import (
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-const godotName = "godot"
+const linterName = "godot"
 
-func NewGodot(settings *config.GodotSettings) *goanalysis.Linter {
+func New(settings *config.GodotSettings) *goanalysis.Linter {
 	var mu sync.Mutex
 	var resIssues []goanalysis.Issue
 
@@ -29,8 +29,7 @@ func NewGodot(settings *config.GodotSettings) *goanalysis.Linter {
 		}
 
 		// Convert deprecated setting
-		// todo(butuzov): remove on v2 release
-		if settings.CheckAll { //nolint:staticcheck // Keep for retro-compatibility.
+		if settings.CheckAll {
 			dotSettings.Scope = godot.AllScope
 		}
 	}
@@ -40,7 +39,7 @@ func NewGodot(settings *config.GodotSettings) *goanalysis.Linter {
 	}
 
 	analyzer := &analysis.Analyzer{
-		Name: godotName,
+		Name: linterName,
 		Doc:  goanalysis.TheOnlyanalyzerDoc,
 		Run: func(pass *analysis.Pass) (any, error) {
 			issues, err := runGodot(pass, dotSettings)
@@ -61,7 +60,7 @@ func NewGodot(settings *config.GodotSettings) *goanalysis.Linter {
 	}
 
 	return goanalysis.NewLinter(
-		godotName,
+		linterName,
 		"Check if comments end in a period",
 		[]*analysis.Analyzer{analyzer},
 		nil,
@@ -89,7 +88,7 @@ func runGodot(pass *analysis.Pass, settings godot.Settings) ([]goanalysis.Issue,
 		issue := result.Issue{
 			Pos:        i.Pos,
 			Text:       i.Message,
-			FromLinter: godotName,
+			FromLinter: linterName,
 			Replacement: &result.Replacement{
 				NewLines: []string{i.Replacement},
 			},
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/godox.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/godox/godox.go
similarity index 85%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/godox.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/godox/godox.go
index 955810417dcb383eb7a1ffa17ca18cd0a19c75f0..d8de026baf1006e07e64718ecb0ae8f8eac2fda8 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/godox.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/godox/godox.go
@@ -1,4 +1,4 @@
-package golinters
+package godox
 
 import (
 	"go/token"
@@ -9,20 +9,19 @@ import (
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-const godoxName = "godox"
+const linterName = "godox"
 
-//nolint:dupl
-func NewGodox(settings *config.GodoxSettings) *goanalysis.Linter {
+func New(settings *config.GodoxSettings) *goanalysis.Linter {
 	var mu sync.Mutex
 	var resIssues []goanalysis.Issue
 
 	analyzer := &analysis.Analyzer{
-		Name: godoxName,
+		Name: linterName,
 		Doc:  goanalysis.TheOnlyanalyzerDoc,
 		Run: func(pass *analysis.Pass) (any, error) {
 			issues := runGodox(pass, settings)
@@ -40,7 +39,7 @@ func NewGodox(settings *config.GodoxSettings) *goanalysis.Linter {
 	}
 
 	return goanalysis.NewLinter(
-		godoxName,
+		linterName,
 		"Tool for detection of FIXME, TODO and other comment keywords",
 		[]*analysis.Analyzer{analyzer},
 		nil,
@@ -68,7 +67,7 @@ func runGodox(pass *analysis.Pass, settings *config.GodoxSettings) []goanalysis.
 				Line:     i.Pos.Line,
 			},
 			Text:       strings.TrimRight(i.Message, "\n"),
-			FromLinter: godoxName,
+			FromLinter: linterName,
 		}, pass)
 	}
 
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt/gofmt.go
similarity index 72%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt/gofmt.go
index d2d0d3ccc5a4d275286e980010478aa3cc740583..289ceab8aec680d77c4712ebc5b5a170a05c68f3 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt/gofmt.go
@@ -1,4 +1,4 @@
-package golinters
+package gofmt
 
 import (
 	"fmt"
@@ -8,24 +8,25 @@ import (
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/golinters/internal"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 )
 
-const gofmtName = "gofmt"
+const linterName = "gofmt"
 
-func NewGofmt(settings *config.GoFmtSettings) *goanalysis.Linter {
+func New(settings *config.GoFmtSettings) *goanalysis.Linter {
 	var mu sync.Mutex
 	var resIssues []goanalysis.Issue
 
 	analyzer := &analysis.Analyzer{
-		Name: gofmtName,
+		Name: linterName,
 		Doc:  goanalysis.TheOnlyanalyzerDoc,
 		Run:  goanalysis.DummyRun,
 	}
 
 	return goanalysis.NewLinter(
-		gofmtName,
+		linterName,
 		"Gofmt checks whether code was gofmt-ed. By default "+
 			"this tool runs with -s option to check for code simplification",
 		[]*analysis.Analyzer{analyzer},
@@ -53,7 +54,7 @@ func NewGofmt(settings *config.GoFmtSettings) *goanalysis.Linter {
 }
 
 func runGofmt(lintCtx *linter.Context, pass *analysis.Pass, settings *config.GoFmtSettings) ([]goanalysis.Issue, error) {
-	fileNames := getFileNames(pass)
+	fileNames := internal.GetFileNames(pass)
 
 	var rewriteRules []gofmtAPI.RewriteRule
 	for _, rule := range settings.RewriteRules {
@@ -71,7 +72,7 @@ func runGofmt(lintCtx *linter.Context, pass *analysis.Pass, settings *config.GoF
 			continue
 		}
 
-		is, err := extractIssuesFromPatch(string(diff), lintCtx, gofmtName)
+		is, err := internal.ExtractIssuesFromPatch(string(diff), lintCtx, linterName, getIssuedTextGoFmt)
 		if err != nil {
 			return nil, fmt.Errorf("can't extract issues from gofmt diff output %q: %w", string(diff), err)
 		}
@@ -83,3 +84,15 @@ func runGofmt(lintCtx *linter.Context, pass *analysis.Pass, settings *config.GoF
 
 	return issues, nil
 }
+
+func getIssuedTextGoFmt(settings *config.LintersSettings) string {
+	text := "File is not `gofmt`-ed"
+	if settings.Gofmt.Simplify {
+		text += " with `-s`"
+	}
+	for _, rule := range settings.Gofmt.RewriteRules {
+		text += fmt.Sprintf(" `-r '%s -> %s'`", rule.Pattern, rule.Replacement)
+	}
+
+	return text
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofumpt.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofumpt/gofumpt.go
similarity index 81%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/gofumpt.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/gofumpt/gofumpt.go
index c2aaf121de7e05cc39a0374a58b8e52271f6154f..9a0bef84aa15826be56c276b4f3c9deefca34a8d 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofumpt.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofumpt/gofumpt.go
@@ -1,4 +1,4 @@
-package golinters
+package gofumpt
 
 import (
 	"bytes"
@@ -12,17 +12,18 @@ import (
 	"mvdan.cc/gofumpt/format"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/golinters/internal"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 )
 
-const gofumptName = "gofumpt"
+const linterName = "gofumpt"
 
 type differ interface {
 	Diff(out io.Writer, a io.ReadSeeker, b io.ReadSeeker) error
 }
 
-func NewGofumpt(settings *config.GofumptSettings) *goanalysis.Linter {
+func New(settings *config.GofumptSettings) *goanalysis.Linter {
 	var mu sync.Mutex
 	var resIssues []goanalysis.Issue
 
@@ -39,13 +40,13 @@ func NewGofumpt(settings *config.GofumptSettings) *goanalysis.Linter {
 	}
 
 	analyzer := &analysis.Analyzer{
-		Name: gofumptName,
+		Name: linterName,
 		Doc:  goanalysis.TheOnlyanalyzerDoc,
 		Run:  goanalysis.DummyRun,
 	}
 
 	return goanalysis.NewLinter(
-		gofumptName,
+		linterName,
 		"Gofumpt checks whether code was gofumpt-ed.",
 		[]*analysis.Analyzer{analyzer},
 		nil,
@@ -72,7 +73,7 @@ func NewGofumpt(settings *config.GofumptSettings) *goanalysis.Linter {
 }
 
 func runGofumpt(lintCtx *linter.Context, pass *analysis.Pass, diff differ, options format.Options) ([]goanalysis.Issue, error) {
-	fileNames := getFileNames(pass)
+	fileNames := internal.GetFileNames(pass)
 
 	var issues []goanalysis.Issue
 
@@ -96,7 +97,7 @@ func runGofumpt(lintCtx *linter.Context, pass *analysis.Pass, diff differ, optio
 			}
 
 			diff := out.String()
-			is, err := extractIssuesFromPatch(diff, lintCtx, gofumptName)
+			is, err := internal.ExtractIssuesFromPatch(diff, lintCtx, linterName, getIssuedTextGoFumpt)
 			if err != nil {
 				return nil, fmt.Errorf("can't extract issues from gofumpt diff output %q: %w", diff, err)
 			}
@@ -117,3 +118,13 @@ func getLangVersion(settings *config.GofumptSettings) string {
 	}
 	return settings.LangVersion
 }
+
+func getIssuedTextGoFumpt(settings *config.LintersSettings) string {
+	text := "File is not `gofumpt`-ed"
+
+	if settings.Gofumpt.ExtraRules {
+		text += " with `-extra`"
+	}
+
+	return text
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goheader.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goheader/goheader.go
similarity index 80%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/goheader.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/goheader/goheader.go
index d3cfefa90b28551b965e6b1087335bb92053851e..14d517fb30935301d042931199f56fa2a3fb1a2d 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goheader.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goheader/goheader.go
@@ -1,4 +1,4 @@
-package golinters
+package goheader
 
 import (
 	"go/token"
@@ -8,14 +8,14 @@ import (
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-const goHeaderName = "goheader"
+const linterName = "goheader"
 
-func NewGoHeader(settings *config.GoHeaderSettings) *goanalysis.Linter {
+func New(settings *config.GoHeaderSettings) *goanalysis.Linter {
 	var mu sync.Mutex
 	var resIssues []goanalysis.Issue
 
@@ -29,7 +29,7 @@ func NewGoHeader(settings *config.GoHeaderSettings) *goanalysis.Linter {
 	}
 
 	analyzer := &analysis.Analyzer{
-		Name: goHeaderName,
+		Name: linterName,
 		Doc:  goanalysis.TheOnlyanalyzerDoc,
 		Run: func(pass *analysis.Pass) (any, error) {
 			issues, err := runGoHeader(pass, conf)
@@ -50,7 +50,7 @@ func NewGoHeader(settings *config.GoHeaderSettings) *goanalysis.Linter {
 	}
 
 	return goanalysis.NewLinter(
-		goHeaderName,
+		linterName,
 		"Checks is file header matches to pattern",
 		[]*analysis.Analyzer{analyzer},
 		nil,
@@ -94,7 +94,18 @@ func runGoHeader(pass *analysis.Pass, conf *goheader.Configuration) ([]goanalysi
 				Filename: path,
 			},
 			Text:       i.Message(),
-			FromLinter: goHeaderName,
+			FromLinter: linterName,
+		}
+
+		if fix := i.Fix(); fix != nil {
+			issue.LineRange = &result.Range{
+				From: issue.Line(),
+				To:   issue.Line() + len(fix.Actual) - 1,
+			}
+			issue.Replacement = &result.Replacement{
+				NeedOnlyDelete: len(fix.Expected) == 0,
+				NewLines:       fix.Expected,
+			}
 		}
 
 		issues = append(issues, goanalysis.NewIssue(&issue, pass))
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goimports.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goimports/goimports.go
similarity index 71%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/goimports.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/goimports/goimports.go
index aac27f38e55f7a56cc9f5e212b438bc589c779ae..de965d5c8543f667bda77f6aed4ef43d79210a81 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goimports.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goimports/goimports.go
@@ -1,4 +1,4 @@
-package golinters
+package goimports
 
 import (
 	"fmt"
@@ -9,24 +9,25 @@ import (
 	"golang.org/x/tools/imports"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/golinters/internal"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 )
 
-const goimportsName = "goimports"
+const linterName = "goimports"
 
-func NewGoimports(settings *config.GoImportsSettings) *goanalysis.Linter {
+func New(settings *config.GoImportsSettings) *goanalysis.Linter {
 	var mu sync.Mutex
 	var resIssues []goanalysis.Issue
 
 	analyzer := &analysis.Analyzer{
-		Name: goimportsName,
+		Name: linterName,
 		Doc:  goanalysis.TheOnlyanalyzerDoc,
 		Run:  goanalysis.DummyRun,
 	}
 
 	return goanalysis.NewLinter(
-		goimportsName,
+		linterName,
 		"Check import statements are formatted according to the 'goimport' command. "+
 			"Reformat imports in autofix mode.",
 		[]*analysis.Analyzer{analyzer},
@@ -56,7 +57,7 @@ func NewGoimports(settings *config.GoImportsSettings) *goanalysis.Linter {
 }
 
 func runGoImports(lintCtx *linter.Context, pass *analysis.Pass) ([]goanalysis.Issue, error) {
-	fileNames := getFileNames(pass)
+	fileNames := internal.GetFileNames(pass)
 
 	var issues []goanalysis.Issue
 
@@ -69,7 +70,7 @@ func runGoImports(lintCtx *linter.Context, pass *analysis.Pass) ([]goanalysis.Is
 			continue
 		}
 
-		is, err := extractIssuesFromPatch(string(diff), lintCtx, goimportsName)
+		is, err := internal.ExtractIssuesFromPatch(string(diff), lintCtx, linterName, getIssuedTextGoImports)
 		if err != nil {
 			return nil, fmt.Errorf("can't extract issues from gofmt diff output %q: %w", string(diff), err)
 		}
@@ -81,3 +82,13 @@ func runGoImports(lintCtx *linter.Context, pass *analysis.Pass) ([]goanalysis.Is
 
 	return issues, nil
 }
+
+func getIssuedTextGoImports(settings *config.LintersSettings) string {
+	text := "File is not `goimports`-ed"
+
+	if settings.Goimports.LocalPrefixes != "" {
+		text += " with -local " + settings.Goimports.LocalPrefixes
+	}
+
+	return text
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/golint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/golint.go
deleted file mode 100644
index a6fc73c9ecb64fcfa8e4e39f4ef05a50f0c4d86f..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/golint.go
+++ /dev/null
@@ -1,84 +0,0 @@
-package golinters
-
-import (
-	"fmt"
-	"sync"
-
-	lintAPI "github.com/golangci/lint-1"
-	"golang.org/x/tools/go/analysis"
-
-	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
-	"github.com/golangci/golangci-lint/pkg/lint/linter"
-	"github.com/golangci/golangci-lint/pkg/result"
-)
-
-const golintName = "golint"
-
-//nolint:dupl
-func NewGolint(settings *config.GoLintSettings) *goanalysis.Linter {
-	var mu sync.Mutex
-	var resIssues []goanalysis.Issue
-
-	analyzer := &analysis.Analyzer{
-		Name: golintName,
-		Doc:  goanalysis.TheOnlyanalyzerDoc,
-		Run: func(pass *analysis.Pass) (any, error) {
-			issues, err := runGoLint(pass, settings)
-			if err != nil {
-				return nil, err
-			}
-
-			if len(issues) == 0 {
-				return nil, nil
-			}
-
-			mu.Lock()
-			resIssues = append(resIssues, issues...)
-			mu.Unlock()
-
-			return nil, nil
-		},
-	}
-
-	return goanalysis.NewLinter(
-		golintName,
-		"Golint differs from gofmt. Gofmt reformats Go source code, whereas golint prints out style mistakes",
-		[]*analysis.Analyzer{analyzer},
-		nil,
-	).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
-		return resIssues
-	}).WithLoadMode(goanalysis.LoadModeTypesInfo)
-}
-
-func runGoLint(pass *analysis.Pass, settings *config.GoLintSettings) ([]goanalysis.Issue, error) {
-	l := new(lintAPI.Linter)
-
-	ps, err := l.LintPkg(pass.Files, pass.Fset, pass.Pkg, pass.TypesInfo)
-	if err != nil {
-		return nil, fmt.Errorf("can't lint %d files: %s", len(pass.Files), err)
-	}
-
-	if len(ps) == 0 {
-		return nil, nil
-	}
-
-	lintIssues := make([]*result.Issue, 0, len(ps)) // This is worst case
-	for idx := range ps {
-		if ps[idx].Confidence >= settings.MinConfidence {
-			lintIssues = append(lintIssues, &result.Issue{
-				Pos:        ps[idx].Position,
-				Text:       ps[idx].Text,
-				FromLinter: golintName,
-			})
-			// TODO: use p.Link and p.Category
-		}
-	}
-
-	issues := make([]goanalysis.Issue, 0, len(lintIssues))
-	for _, issue := range lintIssues {
-		issues = append(issues, goanalysis.NewIssue(issue, pass))
-	}
-
-	return issues, nil
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomnd.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomnd.go
deleted file mode 100644
index 2e6d77a8015e059a3a3a722914113b762db90f71..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomnd.go
+++ /dev/null
@@ -1,45 +0,0 @@
-package golinters
-
-import (
-	mnd "github.com/tommy-muehle/go-mnd/v2"
-	"golang.org/x/tools/go/analysis"
-
-	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
-)
-
-func NewGoMND(settings *config.GoMndSettings) *goanalysis.Linter {
-	var linterCfg map[string]map[string]any
-
-	if settings != nil {
-		// TODO(ldez) For compatibility only, must be drop in v2.
-		if len(settings.Settings) > 0 {
-			linterCfg = settings.Settings
-		} else {
-			cfg := make(map[string]any)
-			if len(settings.Checks) > 0 {
-				cfg["checks"] = settings.Checks
-			}
-			if len(settings.IgnoredNumbers) > 0 {
-				cfg["ignored-numbers"] = settings.IgnoredNumbers
-			}
-			if len(settings.IgnoredFiles) > 0 {
-				cfg["ignored-files"] = settings.IgnoredFiles
-			}
-			if len(settings.IgnoredFunctions) > 0 {
-				cfg["ignored-functions"] = settings.IgnoredFunctions
-			}
-
-			linterCfg = map[string]map[string]any{
-				"mnd": cfg,
-			}
-		}
-	}
-
-	return goanalysis.NewLinter(
-		"gomnd",
-		"An analyzer to detect magic numbers.",
-		[]*analysis.Analyzer{mnd.Analyzer},
-		linterCfg,
-	).WithLoadMode(goanalysis.LoadModeSyntax)
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomoddirectives.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomoddirectives/gomoddirectives.go
similarity index 80%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/gomoddirectives.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/gomoddirectives/gomoddirectives.go
index 56afcd465f08df5704cbb601ef1bccf03d2add19..9cde7e26c69ed532ce1d8c1fe8931aabb7f09195 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomoddirectives.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomoddirectives/gomoddirectives.go
@@ -1,4 +1,4 @@
-package golinters
+package gomoddirectives
 
 import (
 	"sync"
@@ -7,15 +7,14 @@ import (
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-const goModDirectivesName = "gomoddirectives"
+const linterName = "gomoddirectives"
 
-// NewGoModDirectives returns a new gomoddirectives linter.
-func NewGoModDirectives(settings *config.GoModDirectivesSettings) *goanalysis.Linter {
+func New(settings *config.GoModDirectivesSettings) *goanalysis.Linter {
 	var issues []goanalysis.Issue
 	var once sync.Once
 
@@ -34,7 +33,7 @@ func NewGoModDirectives(settings *config.GoModDirectivesSettings) *goanalysis.Li
 	}
 
 	return goanalysis.NewLinter(
-		goModDirectivesName,
+		linterName,
 		"Manage the use of 'replace', 'retract', and 'excludes' directives in go.mod.",
 		[]*analysis.Analyzer{analyzer},
 		nil,
@@ -44,13 +43,13 @@ func NewGoModDirectives(settings *config.GoModDirectivesSettings) *goanalysis.Li
 				results, err := gomoddirectives.Analyze(opts)
 				if err != nil {
 					lintCtx.Log.Warnf("running %s failed: %s: "+
-						"if you are not using go modules it is suggested to disable this linter", goModDirectivesName, err)
+						"if you are not using go modules it is suggested to disable this linter", linterName, err)
 					return
 				}
 
 				for _, p := range results {
 					issues = append(issues, goanalysis.NewIssue(&result.Issue{
-						FromLinter: goModDirectivesName,
+						FromLinter: linterName,
 						Pos:        p.Start,
 						Text:       p.Reason,
 					}, pass))
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomodguard.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomodguard/gomodguard.go
similarity index 83%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/gomodguard.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/gomodguard/gomodguard.go
index 157bf56c35de787a4b486018f78ee26ce3ea56fb..8f1036b0f13c84a02eee2286674d0a2895375275 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomodguard.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomodguard/gomodguard.go
@@ -1,4 +1,4 @@
-package golinters
+package gomodguard
 
 import (
 	"sync"
@@ -7,20 +7,20 @@ import (
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/golinters/internal"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
 const (
-	gomodguardName = "gomodguard"
-	gomodguardDesc = "Allow and block list linter for direct Go module dependencies. " +
+	name = "gomodguard"
+	desc = "Allow and block list linter for direct Go module dependencies. " +
 		"This is different from depguard where there are different block " +
 		"types for example version constraints and module recommendations."
 )
 
-// NewGomodguard returns a new Gomodguard linter.
-func NewGomodguard(settings *config.GoModGuardSettings) *goanalysis.Linter {
+func New(settings *config.GoModGuardSettings) *goanalysis.Linter {
 	var issues []goanalysis.Issue
 	var mu sync.Mutex
 
@@ -60,8 +60,8 @@ func NewGomodguard(settings *config.GoModGuardSettings) *goanalysis.Linter {
 	}
 
 	return goanalysis.NewLinter(
-		gomodguardName,
-		gomodguardDesc,
+		name,
+		desc,
 		[]*analysis.Analyzer{analyzer},
 		nil,
 	).WithContextSetter(func(lintCtx *linter.Context) {
@@ -73,14 +73,14 @@ func NewGomodguard(settings *config.GoModGuardSettings) *goanalysis.Linter {
 		}
 
 		analyzer.Run = func(pass *analysis.Pass) (any, error) {
-			gomodguardIssues := processor.ProcessFiles(getFileNames(pass))
+			gomodguardIssues := processor.ProcessFiles(internal.GetFileNames(pass))
 
 			mu.Lock()
 			defer mu.Unlock()
 
 			for _, gomodguardIssue := range gomodguardIssues {
 				issues = append(issues, goanalysis.NewIssue(&result.Issue{
-					FromLinter: gomodguardName,
+					FromLinter: name,
 					Pos:        gomodguardIssue.Position,
 					Text:       gomodguardIssue.Reason,
 				}, pass))
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goprintffuncname.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goprintffuncname.go
deleted file mode 100644
index c5516dc7f9b47b0b519880fca1eb2edc79545bd1..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goprintffuncname.go
+++ /dev/null
@@ -1,17 +0,0 @@
-package golinters
-
-import (
-	"github.com/jirfag/go-printf-func-name/pkg/analyzer"
-	"golang.org/x/tools/go/analysis"
-
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
-)
-
-func NewGoPrintfFuncName() *goanalysis.Linter {
-	return goanalysis.NewLinter(
-		"goprintffuncname",
-		"Checks that printf-like functions are named with `f` at the end",
-		[]*analysis.Analyzer{analyzer.Analyzer},
-		nil,
-	).WithLoadMode(goanalysis.LoadModeSyntax)
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goprintffuncname/goprintffuncname.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goprintffuncname/goprintffuncname.go
new file mode 100644
index 0000000000000000000000000000000000000000..85154a9b38eab5b8d1560157d231b82093ceb9fa
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goprintffuncname/goprintffuncname.go
@@ -0,0 +1,19 @@
+package goprintffuncname
+
+import (
+	"github.com/jirfag/go-printf-func-name/pkg/analyzer"
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+)
+
+func New() *goanalysis.Linter {
+	a := analyzer.Analyzer
+
+	return goanalysis.NewLinter(
+		a.Name,
+		a.Doc,
+		[]*analysis.Analyzer{a},
+		nil,
+	).WithLoadMode(goanalysis.LoadModeSyntax)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosec.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosec/gosec.go
similarity index 90%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/gosec.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/gosec/gosec.go
index 235f0e91419dbf10fe10d2f87ad7ad1fecec70e2..c333152e69c6230745fb5ff7512572b93054fdf8 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosec.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosec/gosec.go
@@ -1,4 +1,4 @@
-package golinters
+package gosec
 
 import (
 	"fmt"
@@ -16,14 +16,14 @@ import (
 	"golang.org/x/tools/go/packages"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-const gosecName = "gosec"
+const linterName = "gosec"
 
-func NewGosec(settings *config.GoSecSettings) *goanalysis.Linter {
+func New(settings *config.GoSecSettings) *goanalysis.Linter {
 	var mu sync.Mutex
 	var resIssues []goanalysis.Issue
 
@@ -39,13 +39,13 @@ func NewGosec(settings *config.GoSecSettings) *goanalysis.Linter {
 	ruleDefinitions := rules.Generate(false, filters...)
 
 	analyzer := &analysis.Analyzer{
-		Name: gosecName,
+		Name: linterName,
 		Doc:  goanalysis.TheOnlyanalyzerDoc,
 		Run:  goanalysis.DummyRun,
 	}
 
 	return goanalysis.NewLinter(
-		gosecName,
+		linterName,
 		"Inspects source code for security problems",
 		[]*analysis.Analyzer{analyzer},
 		nil,
@@ -97,7 +97,7 @@ func runGoSec(lintCtx *linter.Context, pass *analysis.Pass, settings *config.GoS
 
 	issues := make([]goanalysis.Issue, 0, len(secIssues))
 	for _, i := range secIssues {
-		text := fmt.Sprintf("%s: %s", i.RuleID, i.What) // TODO: use severity and confidence
+		text := fmt.Sprintf("%s: %s", i.RuleID, i.What)
 
 		var r *result.Range
 
@@ -118,6 +118,7 @@ func runGoSec(lintCtx *linter.Context, pass *analysis.Pass, settings *config.GoS
 		}
 
 		issues = append(issues, goanalysis.NewIssue(&result.Issue{
+			Severity: convertScoreToString(i.Severity),
 			Pos: token.Position{
 				Filename: i.File,
 				Line:     line,
@@ -125,7 +126,7 @@ func runGoSec(lintCtx *linter.Context, pass *analysis.Pass, settings *config.GoS
 			},
 			Text:       text,
 			LineRange:  r,
-			FromLinter: gosecName,
+			FromLinter: linterName,
 		}, pass))
 	}
 
@@ -149,6 +150,19 @@ func toGosecConfig(settings *config.GoSecSettings) gosec.Config {
 	return conf
 }
 
+func convertScoreToString(score issue.Score) string {
+	switch score {
+	case issue.Low:
+		return "low"
+	case issue.Medium:
+		return "medium"
+	case issue.High:
+		return "high"
+	default:
+		return ""
+	}
+}
+
 // based on https://github.com/securego/gosec/blob/47bfd4eb6fc7395940933388550b547538b4c946/config.go#L52-L62
 func convertGosecGlobals(globalOptionFromConfig any, conf gosec.Config) {
 	globalOptionMap, ok := globalOptionFromConfig.(map[string]any)
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosimple.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosimple.go
deleted file mode 100644
index de60ded73e2dbb76d067a6b946f9e7de44ecf4ad..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosimple.go
+++ /dev/null
@@ -1,21 +0,0 @@
-package golinters
-
-import (
-	"honnef.co/go/tools/simple"
-
-	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
-)
-
-func NewGosimple(settings *config.StaticCheckSettings) *goanalysis.Linter {
-	cfg := staticCheckConfig(settings)
-
-	analyzers := setupStaticCheckAnalyzers(simple.Analyzers, getGoVersion(settings), cfg.Checks)
-
-	return goanalysis.NewLinter(
-		"gosimple",
-		"Linter for Go source code that specializes in simplifying code",
-		analyzers,
-		nil,
-	).WithLoadMode(goanalysis.LoadModeTypesInfo)
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosimple/gosimple.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosimple/gosimple.go
new file mode 100644
index 0000000000000000000000000000000000000000..6a0d967232ab43747562945bf1d4f9698a613f21
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosimple/gosimple.go
@@ -0,0 +1,22 @@
+package gosimple
+
+import (
+	"honnef.co/go/tools/simple"
+
+	"github.com/golangci/golangci-lint/pkg/config"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/golinters/internal"
+)
+
+func New(settings *config.StaticCheckSettings) *goanalysis.Linter {
+	cfg := internal.StaticCheckConfig(settings)
+
+	analyzers := internal.SetupStaticCheckAnalyzers(simple.Analyzers, internal.GetGoVersion(settings), cfg.Checks)
+
+	return goanalysis.NewLinter(
+		"gosimple",
+		"Linter for Go source code that specializes in simplifying code",
+		analyzers,
+		nil,
+	).WithLoadMode(goanalysis.LoadModeTypesInfo)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosmopolitan.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosmopolitan/gosmopolitan.go
similarity index 79%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/gosmopolitan.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/gosmopolitan/gosmopolitan.go
index 2e01fcc70d59e47f2f7d1399c9f9b88cfeabffc8..4f6fb80358abf3c92bccd71a707929213d83b9f2 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosmopolitan.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosmopolitan/gosmopolitan.go
@@ -1,4 +1,4 @@
-package golinters
+package gosmopolitan
 
 import (
 	"strings"
@@ -7,10 +7,10 @@ import (
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewGosmopolitan(s *config.GosmopolitanSettings) *goanalysis.Linter {
+func New(s *config.GosmopolitanSettings) *goanalysis.Linter {
 	a := gosmopolitan.NewAnalyzer()
 
 	cfgMap := map[string]map[string]any{}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/govet.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/govet/govet.go
similarity index 74%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/govet.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/govet/govet.go
index 5a1309cd01fdb21ad3a669d2fd7d3d86e1c68b5d..1211a8833b5e5210cbb2e6fe6d2702541b4cf002 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/govet.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/govet/govet.go
@@ -1,7 +1,11 @@
-package golinters
+package govet
 
 import (
+	"slices"
+	"sort"
+
 	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/go/analysis/passes/appends"
 	"golang.org/x/tools/go/analysis/passes/asmdecl"
 	"golang.org/x/tools/go/analysis/passes/assign"
 	"golang.org/x/tools/go/analysis/passes/atomic"
@@ -48,11 +52,13 @@ import (
 	"golang.org/x/tools/go/analysis/passes/unusedwrite"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/logutils"
 )
 
 var (
 	allAnalyzers = []*analysis.Analyzer{
+		appends.Analyzer,
 		asmdecl.Analyzer,
 		assign.Analyzer,
 		atomic.Analyzer,
@@ -95,8 +101,9 @@ var (
 		unusedwrite.Analyzer,
 	}
 
-	// https://github.com/golang/go/blob/c19c4c566c63818dfd059b352e52c4710eecf14d/src/cmd/vet/main.go#L47-L78
+	// https://github.com/golang/go/blob/b56645a87b28840a180d64077877cb46570b4176/src/cmd/vet/main.go#L49-L81
 	defaultAnalyzers = []*analysis.Analyzer{
+		appends.Analyzer,
 		asmdecl.Analyzer,
 		assign.Analyzer,
 		atomic.Analyzer,
@@ -105,6 +112,7 @@ var (
 		cgocall.Analyzer,
 		composite.Analyzer,
 		copylock.Analyzer,
+		defers.Analyzer,
 		directive.Analyzer,
 		errorsas.Analyzer,
 		framepointer.Analyzer,
@@ -130,7 +138,12 @@ var (
 	}
 )
 
-func NewGovet(settings *config.GovetSettings) *goanalysis.Linter {
+var (
+	debugf  = logutils.Debug(logutils.DebugKeyGovet)
+	isDebug = logutils.HaveDebugTag(logutils.DebugKeyGovet)
+)
+
+func New(settings *config.GovetSettings) *goanalysis.Linter {
 	var conf map[string]map[string]any
 	if settings != nil {
 		conf = settings.Settings
@@ -138,23 +151,21 @@ func NewGovet(settings *config.GovetSettings) *goanalysis.Linter {
 
 	return goanalysis.NewLinter(
 		"govet",
-		"Vet examines Go source code and reports suspicious constructs, "+
-			"such as Printf calls whose arguments do not align with the format string",
+		"Vet examines Go source code and reports suspicious constructs. "+
+			"It is roughly the same as 'go vet' and uses its passes.",
 		analyzersFromConfig(settings),
 		conf,
 	).WithLoadMode(goanalysis.LoadModeTypesInfo)
 }
 
 func analyzersFromConfig(settings *config.GovetSettings) []*analysis.Analyzer {
+	debugAnalyzersListf(allAnalyzers, "All available analyzers")
+	debugAnalyzersListf(defaultAnalyzers, "Default analyzers")
+
 	if settings == nil {
 		return defaultAnalyzers
 	}
 
-	if settings.CheckShadowing {
-		// Keeping for backward compatibility.
-		settings.Enable = append(settings.Enable, shadow.Analyzer.Name)
-	}
-
 	var enabledAnalyzers []*analysis.Analyzer
 	for _, a := range allAnalyzers {
 		if isAnalyzerEnabled(a.Name, settings, defaultAnalyzers) {
@@ -162,40 +173,51 @@ func analyzersFromConfig(settings *config.GovetSettings) []*analysis.Analyzer {
 		}
 	}
 
+	debugAnalyzersListf(enabledAnalyzers, "Enabled by config analyzers")
+
 	return enabledAnalyzers
 }
 
 func isAnalyzerEnabled(name string, cfg *config.GovetSettings, defaultAnalyzers []*analysis.Analyzer) bool {
-	if cfg.EnableAll {
-		for _, n := range cfg.Disable {
-			if n == name {
-				return false
-			}
-		}
-		return true
+	// TODO(ldez) remove loopclosure when go1.23
+	if name == loopclosure.Analyzer.Name && config.IsGoGreaterThanOrEqual(cfg.Go, "1.22") {
+		return false
 	}
 
-	// Raw for loops should be OK on small slice lengths.
-	for _, n := range cfg.Enable {
-		if n == name {
-			return true
-		}
+	// Keeping for backward compatibility.
+	if cfg.CheckShadowing && name == shadow.Analyzer.Name {
+		return true
 	}
 
-	for _, n := range cfg.Disable {
-		if n == name {
-			return false
-		}
-	}
+	switch {
+	case cfg.EnableAll:
+		return !slices.Contains(cfg.Disable, name)
+
+	case slices.Contains(cfg.Enable, name):
+		return true
 
-	if cfg.DisableAll {
+	case slices.Contains(cfg.Disable, name):
 		return false
+
+	case cfg.DisableAll:
+		return false
+
+	default:
+		return slices.ContainsFunc(defaultAnalyzers, func(a *analysis.Analyzer) bool { return a.Name == name })
 	}
+}
 
-	for _, a := range defaultAnalyzers {
-		if a.Name == name {
-			return true
-		}
+func debugAnalyzersListf(analyzers []*analysis.Analyzer, message string) {
+	if !isDebug {
+		return
+	}
+
+	analyzerNames := make([]string, 0, len(analyzers))
+	for _, a := range analyzers {
+		analyzerNames = append(analyzerNames, a.Name)
 	}
-	return false
+
+	sort.Strings(analyzerNames)
+
+	debugf("%s (%d): %s", message, len(analyzerNames), analyzerNames)
 }
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/grouper.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/grouper/grouper.go
similarity index 74%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/grouper.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/grouper/grouper.go
index 9feecf3baf8311891227cc2c5a1a6ca58609dd47..aa6ce1cebb15205b9e06c4cbaf4a64aa087a8832 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/grouper.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/grouper/grouper.go
@@ -1,17 +1,19 @@
-package golinters
+package grouper
 
 import (
 	grouper "github.com/leonklingele/grouper/pkg/analyzer"
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewGrouper(settings *config.GrouperSettings) *goanalysis.Linter {
+func New(settings *config.GrouperSettings) *goanalysis.Linter {
+	a := grouper.New()
+
 	linterCfg := map[string]map[string]any{}
 	if settings != nil {
-		linterCfg["grouper"] = map[string]any{
+		linterCfg[a.Name] = map[string]any{
 			"const-require-single-const":   settings.ConstRequireSingleConst,
 			"const-require-grouping":       settings.ConstRequireGrouping,
 			"import-require-single-import": settings.ImportRequireSingleImport,
@@ -24,9 +26,9 @@ func NewGrouper(settings *config.GrouperSettings) *goanalysis.Linter {
 	}
 
 	return goanalysis.NewLinter(
-		"grouper",
-		"An analyzer to analyze expression groups.",
-		[]*analysis.Analyzer{grouper.New()},
+		a.Name,
+		"Analyze expression groups.",
+		[]*analysis.Analyzer{a},
 		linterCfg,
 	).WithLoadMode(goanalysis.LoadModeSyntax)
 }
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/ifshort.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/ifshort.go
deleted file mode 100644
index 1574eaf7091fdd82d7b313de0e4b2c3d4cc09b64..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/ifshort.go
+++ /dev/null
@@ -1,28 +0,0 @@
-package golinters
-
-import (
-	"github.com/esimonov/ifshort/pkg/analyzer"
-	"golang.org/x/tools/go/analysis"
-
-	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
-)
-
-func NewIfshort(settings *config.IfshortSettings) *goanalysis.Linter {
-	var cfg map[string]map[string]any
-	if settings != nil {
-		cfg = map[string]map[string]any{
-			analyzer.Analyzer.Name: {
-				"max-decl-lines": settings.MaxDeclLines,
-				"max-decl-chars": settings.MaxDeclChars,
-			},
-		}
-	}
-
-	return goanalysis.NewLinter(
-		"ifshort",
-		"Checks that your code uses short syntax for if-statements whenever possible",
-		[]*analysis.Analyzer{analyzer.Analyzer},
-		cfg,
-	).WithLoadMode(goanalysis.LoadModeSyntax)
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/importas.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/importas/importas.go
similarity index 88%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/importas.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/importas/importas.go
index b06aec7a3b4b58bda6aa27c2be7d81eca2dab09a..45117c9a488b7c6f4de977e773f1be68dce356c8 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/importas.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/importas/importas.go
@@ -1,19 +1,19 @@
-package golinters
+package importas
 
 import (
 	"fmt"
 	"strconv"
 	"strings"
 
-	"github.com/julz/importas" //nolint:misspell
+	"github.com/julz/importas"
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 )
 
-func NewImportAs(settings *config.ImportAsSettings) *goanalysis.Linter {
+func New(settings *config.ImportAsSettings) *goanalysis.Linter {
 	analyzer := importas.Analyzer
 
 	return goanalysis.NewLinter(
@@ -26,7 +26,7 @@ func NewImportAs(settings *config.ImportAsSettings) *goanalysis.Linter {
 			return
 		}
 		if len(settings.Alias) == 0 {
-			lintCtx.Log.Infof("importas settings found, but no aliases listed. List aliases under alias: key.") //nolint:misspell
+			lintCtx.Log.Infof("importas settings found, but no aliases listed. List aliases under alias: key.")
 		}
 
 		if err := analyzer.Flags.Set("no-unaliased", strconv.FormatBool(settings.NoUnaliased)); err != nil {
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/inamedparam/inamedparam.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/inamedparam/inamedparam.go
new file mode 100644
index 0000000000000000000000000000000000000000..5cf06a08cc8160795ed5b8cea2b31c6d32cfc20a
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/inamedparam/inamedparam.go
@@ -0,0 +1,30 @@
+package inamedparam
+
+import (
+	"github.com/macabu/inamedparam"
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/golangci/golangci-lint/pkg/config"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+)
+
+func New(settings *config.INamedParamSettings) *goanalysis.Linter {
+	a := inamedparam.Analyzer
+
+	var cfg map[string]map[string]any
+
+	if settings != nil {
+		cfg = map[string]map[string]any{
+			a.Name: {
+				"skip-single-param": settings.SkipSingleParam,
+			},
+		}
+	}
+
+	return goanalysis.NewLinter(
+		a.Name,
+		a.Doc,
+		[]*analysis.Analyzer{a},
+		cfg,
+	).WithLoadMode(goanalysis.LoadModeSyntax)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/ineffassign.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/ineffassign/ineffassign.go
similarity index 57%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/ineffassign.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/ineffassign/ineffassign.go
index c87bb2fa51c089fb24acc420357b6312690305c1..ba86fb90e374e7b4da70864d6a4ddc5444409c87 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/ineffassign.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/ineffassign/ineffassign.go
@@ -1,17 +1,19 @@
-package golinters
+package ineffassign
 
 import (
 	"github.com/gordonklaus/ineffassign/pkg/ineffassign"
 	"golang.org/x/tools/go/analysis"
 
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewIneffassign() *goanalysis.Linter {
+func New() *goanalysis.Linter {
+	a := ineffassign.Analyzer
+
 	return goanalysis.NewLinter(
-		"ineffassign",
+		a.Name,
 		"Detects when assignments to existing variables are not used",
-		[]*analysis.Analyzer{ineffassign.Analyzer},
+		[]*analysis.Analyzer{a},
 		nil,
 	).WithLoadMode(goanalysis.LoadModeSyntax)
 }
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/interfacebloat.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/interfacebloat/interfacebloat.go
similarity index 74%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/interfacebloat.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/interfacebloat/interfacebloat.go
index a6dbfe178fe10c94c8c831e6d3876a8e26e20f5e..88927a3d9bcf20e7e804c1178cdc658f144531c7 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/interfacebloat.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/interfacebloat/interfacebloat.go
@@ -1,14 +1,14 @@
-package golinters
+package interfacebloat
 
 import (
 	"github.com/sashamelentyev/interfacebloat/pkg/analyzer"
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewInterfaceBloat(settings *config.InterfaceBloatSettings) *goanalysis.Linter {
+func New(settings *config.InterfaceBloatSettings) *goanalysis.Linter {
 	a := analyzer.New()
 
 	var cfg map[string]map[string]any
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/interfacer.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/interfacer.go
deleted file mode 100644
index 71bdfddbe815de27b02867b8ba88adb23fb22f75..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/interfacer.go
+++ /dev/null
@@ -1,82 +0,0 @@
-package golinters
-
-import (
-	"sync"
-
-	"golang.org/x/tools/go/analysis"
-	"golang.org/x/tools/go/analysis/passes/buildssa"
-	"mvdan.cc/interfacer/check"
-
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
-	"github.com/golangci/golangci-lint/pkg/lint/linter"
-	"github.com/golangci/golangci-lint/pkg/result"
-)
-
-const interfacerName = "interfacer"
-
-func NewInterfacer() *goanalysis.Linter {
-	var mu sync.Mutex
-	var resIssues []goanalysis.Issue
-
-	analyzer := &analysis.Analyzer{
-		Name:     interfacerName,
-		Doc:      goanalysis.TheOnlyanalyzerDoc,
-		Requires: []*analysis.Analyzer{buildssa.Analyzer},
-		Run: func(pass *analysis.Pass) (any, error) {
-			issues, err := runInterfacer(pass)
-			if err != nil {
-				return nil, err
-			}
-
-			if len(issues) == 0 {
-				return nil, nil
-			}
-
-			mu.Lock()
-			resIssues = append(resIssues, issues...)
-			mu.Unlock()
-
-			return nil, nil
-		},
-	}
-
-	return goanalysis.NewLinter(
-		interfacerName,
-		"Linter that suggests narrower interface types",
-		[]*analysis.Analyzer{analyzer},
-		nil,
-	).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
-		return resIssues
-	}).WithLoadMode(goanalysis.LoadModeTypesInfo)
-}
-
-func runInterfacer(pass *analysis.Pass) ([]goanalysis.Issue, error) {
-	c := &check.Checker{}
-
-	prog := goanalysis.MakeFakeLoaderProgram(pass)
-	c.Program(prog)
-
-	ssa := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA)
-	ssaPkg := ssa.Pkg
-	c.ProgramSSA(ssaPkg.Prog)
-
-	lintIssues, err := c.Check()
-	if err != nil {
-		return nil, err
-	}
-	if len(lintIssues) == 0 {
-		return nil, nil
-	}
-
-	issues := make([]goanalysis.Issue, 0, len(lintIssues))
-	for _, i := range lintIssues {
-		pos := pass.Fset.Position(i.Pos())
-		issues = append(issues, goanalysis.NewIssue(&result.Issue{
-			Pos:        pos,
-			Text:       i.Message(),
-			FromLinter: interfacerName,
-		}, pass))
-	}
-
-	return issues, nil
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/commons.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/commons.go
new file mode 100644
index 0000000000000000000000000000000000000000..c21dd00927938c3390ce18283f4f62f229c3be72
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/commons.go
@@ -0,0 +1,6 @@
+package internal
+
+import "github.com/golangci/golangci-lint/pkg/logutils"
+
+// LinterLogger must be use only when the context logger is not available.
+var LinterLogger = logutils.NewStderrLog(logutils.DebugKeyLinter)
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt_common.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/diff.go
similarity index 85%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt_common.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/diff.go
index 6b7184d65bc199c9fc9a7f1fd539e1ca6faf47bc..b20230dfa9cfdb749d9ca123949702a880e49207 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt_common.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/diff.go
@@ -1,4 +1,4 @@
-package golinters
+package internal
 
 import (
 	"bytes"
@@ -27,6 +27,8 @@ const (
 	diffLineDeleted  diffLineType = "deleted"
 )
 
+type fmtTextFormatter func(settings *config.LintersSettings) string
+
 type diffLine struct {
 	originalNumber int // 1-based original line number
 	typ            diffLineType
@@ -133,8 +135,8 @@ func (p *hunkChangesParser) handleDeletedLines(deletedLines []diffLine, addedLin
 	}
 
 	if len(addedLines) != 0 {
-		//nolint:gocritic
-		change.Replacement.NewLines = append(p.replacementLinesToPrepend, addedLines...)
+		change.Replacement.NewLines = append([]string{}, p.replacementLinesToPrepend...)
+		change.Replacement.NewLines = append(change.Replacement.NewLines, addedLines...)
 		if len(p.replacementLinesToPrepend) != 0 {
 			p.replacementLinesToPrepend = nil
 		}
@@ -217,34 +219,7 @@ func (p *hunkChangesParser) parse(h *diffpkg.Hunk) []Change {
 	return p.ret
 }
 
-func getErrorTextForLinter(settings *config.LintersSettings, linterName string) string {
-	text := "File is not formatted"
-	switch linterName {
-	case gciName:
-		text = getErrorTextForGci(settings.Gci)
-	case gofumptName:
-		text = "File is not `gofumpt`-ed"
-		if settings.Gofumpt.ExtraRules {
-			text += " with `-extra`"
-		}
-	case gofmtName:
-		text = "File is not `gofmt`-ed"
-		if settings.Gofmt.Simplify {
-			text += " with `-s`"
-		}
-		for _, rule := range settings.Gofmt.RewriteRules {
-			text += fmt.Sprintf(" `-r '%s -> %s'`", rule.Pattern, rule.Replacement)
-		}
-	case goimportsName:
-		text = "File is not `goimports`-ed"
-		if settings.Goimports.LocalPrefixes != "" {
-			text += " with -local " + settings.Goimports.LocalPrefixes
-		}
-	}
-	return text
-}
-
-func extractIssuesFromPatch(patch string, lintCtx *linter.Context, linterName string) ([]result.Issue, error) {
+func ExtractIssuesFromPatch(patch string, lintCtx *linter.Context, linterName string, formatter fmtTextFormatter) ([]result.Issue, error) {
 	diffs, err := diffpkg.ParseMultiFileDiff([]byte(patch))
 	if err != nil {
 		return nil, fmt.Errorf("can't parse patch: %w", err)
@@ -274,7 +249,7 @@ func extractIssuesFromPatch(patch string, lintCtx *linter.Context, linterName st
 						Filename: d.NewName,
 						Line:     change.LineRange.From,
 					},
-					Text:        getErrorTextForLinter(lintCtx.Settings(), linterName),
+					Text:        formatter(lintCtx.Settings()),
 					Replacement: &change.Replacement,
 				}
 				if change.LineRange.From != change.LineRange.To {
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/staticcheck_common.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/staticcheck_common.go
similarity index 92%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/staticcheck_common.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/staticcheck_common.go
index 0eb21ec9cfb096be7e3268da49643d72b275c181..5b5812c318a24dd2bb730eb918c7a1a7f49782d9 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/staticcheck_common.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/staticcheck_common.go
@@ -1,4 +1,4 @@
-package golinters
+package internal
 
 import (
 	"strings"
@@ -14,7 +14,7 @@ import (
 
 var debugf = logutils.Debug(logutils.DebugKeyMegacheck)
 
-func getGoVersion(settings *config.StaticCheckSettings) string {
+func GetGoVersion(settings *config.StaticCheckSettings) string {
 	var goVersion string
 	if settings != nil {
 		goVersion = settings.GoVersion
@@ -27,7 +27,7 @@ func getGoVersion(settings *config.StaticCheckSettings) string {
 	return "1.17"
 }
 
-func setupStaticCheckAnalyzers(src []*lint.Analyzer, goVersion string, checks []string) []*analysis.Analyzer {
+func SetupStaticCheckAnalyzers(src []*lint.Analyzer, goVersion string, checks []string) []*analysis.Analyzer {
 	var names []string
 	for _, a := range src {
 		names = append(names, a.Analyzer.Name)
@@ -38,7 +38,7 @@ func setupStaticCheckAnalyzers(src []*lint.Analyzer, goVersion string, checks []
 	var ret []*analysis.Analyzer
 	for _, a := range src {
 		if filter[a.Analyzer.Name] {
-			setAnalyzerGoVersion(a.Analyzer, goVersion)
+			SetAnalyzerGoVersion(a.Analyzer, goVersion)
 			ret = append(ret, a.Analyzer)
 		}
 	}
@@ -46,7 +46,7 @@ func setupStaticCheckAnalyzers(src []*lint.Analyzer, goVersion string, checks []
 	return ret
 }
 
-func setAnalyzerGoVersion(a *analysis.Analyzer, goVersion string) {
+func SetAnalyzerGoVersion(a *analysis.Analyzer, goVersion string) {
 	if v := a.Flags.Lookup("go"); v != nil {
 		if err := v.Value.Set(goVersion); err != nil {
 			debugf("Failed to set go version: %s", err)
@@ -54,7 +54,7 @@ func setAnalyzerGoVersion(a *analysis.Analyzer, goVersion string) {
 	}
 }
 
-func staticCheckConfig(settings *config.StaticCheckSettings) *scconfig.Config {
+func StaticCheckConfig(settings *config.StaticCheckSettings) *scconfig.Config {
 	var cfg *scconfig.Config
 
 	if settings == nil || !settings.HasConfiguration() {
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/util.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/util.go
similarity index 66%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/util.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/util.go
index 1044567a951b5a5829b3e5f0d419b993b7d697f6..80b194dd26db73edf858d819906c696b1e3eb915 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/util.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/util.go
@@ -1,4 +1,4 @@
-package golinters
+package internal
 
 import (
 	"fmt"
@@ -10,7 +10,7 @@ import (
 	"github.com/golangci/golangci-lint/pkg/config"
 )
 
-func formatCode(code string, _ *config.Config) string {
+func FormatCode(code string, _ *config.Config) string {
 	if strings.Contains(code, "`") {
 		return code // TODO: properly escape or remove
 	}
@@ -18,15 +18,7 @@ func formatCode(code string, _ *config.Config) string {
 	return fmt.Sprintf("`%s`", code)
 }
 
-func formatCodeBlock(code string, _ *config.Config) string {
-	if strings.Contains(code, "`") {
-		return code // TODO: properly escape or remove
-	}
-
-	return fmt.Sprintf("```\n%s\n```", code)
-}
-
-func getFileNames(pass *analysis.Pass) []string {
+func GetFileNames(pass *analysis.Pass) []string {
 	var fileNames []string
 	for _, f := range pass.Files {
 		fileName := pass.Fset.PositionFor(f.Pos(), true).Filename
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/intrange/intrange.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/intrange/intrange.go
new file mode 100644
index 0000000000000000000000000000000000000000..a27569ebbcbedb0130b7f93bf4b066692a4b875a
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/intrange/intrange.go
@@ -0,0 +1,19 @@
+package intrange
+
+import (
+	"github.com/ckaznocha/intrange"
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+)
+
+func New() *goanalysis.Linter {
+	a := intrange.Analyzer
+
+	return goanalysis.NewLinter(
+		a.Name,
+		a.Doc,
+		[]*analysis.Analyzer{a},
+		nil,
+	).WithLoadMode(goanalysis.LoadModeSyntax)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/ireturn.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/ireturn/ireturn.go
similarity index 61%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/ireturn.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/ireturn/ireturn.go
index 34dc09d2684d53b827d243e80d9cc49fedcc5192..57de57111e0d4d98a89dd77bda17b9fc1596f949 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/ireturn.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/ireturn/ireturn.go
@@ -1,4 +1,4 @@
-package golinters
+package ireturn
 
 import (
 	"strings"
@@ -7,17 +7,18 @@ import (
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewIreturn(settings *config.IreturnSettings) *goanalysis.Linter {
+func New(settings *config.IreturnSettings) *goanalysis.Linter {
 	a := analyzer.NewAnalyzer()
 
 	cfg := map[string]map[string]any{}
 	if settings != nil {
 		cfg[a.Name] = map[string]any{
-			"allow":  strings.Join(settings.Allow, ","),
-			"reject": strings.Join(settings.Reject, ","),
+			"allow":    strings.Join(settings.Allow, ","),
+			"reject":   strings.Join(settings.Reject, ","),
+			"nonolint": true,
 		}
 	}
 
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/lll.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/lll/lll.go
similarity index 85%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/lll.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/lll/lll.go
index 9ed320120960dc6efc17ca8ee95a0ec713269e5c..15edcccad4a41f5d52c0ba1919b45d6ec1433b44 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/lll.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/lll/lll.go
@@ -1,7 +1,8 @@
-package golinters
+package lll
 
 import (
 	"bufio"
+	"errors"
 	"fmt"
 	"go/token"
 	"os"
@@ -12,22 +13,22 @@ import (
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/golinters/internal"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-const lllName = "lll"
+const linterName = "lll"
 
 const goCommentDirectivePrefix = "//go:"
 
-//nolint:dupl
-func NewLLL(settings *config.LllSettings) *goanalysis.Linter {
+func New(settings *config.LllSettings) *goanalysis.Linter {
 	var mu sync.Mutex
 	var resIssues []goanalysis.Issue
 
 	analyzer := &analysis.Analyzer{
-		Name: lllName,
+		Name: linterName,
 		Doc:  goanalysis.TheOnlyanalyzerDoc,
 		Run: func(pass *analysis.Pass) (any, error) {
 			issues, err := runLll(pass, settings)
@@ -48,7 +49,7 @@ func NewLLL(settings *config.LllSettings) *goanalysis.Linter {
 	}
 
 	return goanalysis.NewLinter(
-		lllName,
+		linterName,
 		"Reports long lines",
 		[]*analysis.Analyzer{analyzer},
 		nil,
@@ -58,7 +59,7 @@ func NewLLL(settings *config.LllSettings) *goanalysis.Linter {
 }
 
 func runLll(pass *analysis.Pass, settings *config.LllSettings) ([]goanalysis.Issue, error) {
-	fileNames := getFileNames(pass)
+	fileNames := internal.GetFileNames(pass)
 
 	spaces := strings.Repeat(" ", settings.TabWidth)
 
@@ -82,7 +83,7 @@ func getLLLIssuesForFile(filename string, maxLineLen int, tabSpaces string) ([]r
 
 	f, err := os.Open(filename)
 	if err != nil {
-		return nil, fmt.Errorf("can't open file %s: %s", filename, err)
+		return nil, fmt.Errorf("can't open file %s: %w", filename, err)
 	}
 	defer f.Close()
 
@@ -121,13 +122,13 @@ func getLLLIssuesForFile(filename string, maxLineLen int, tabSpaces string) ([]r
 					Line:     lineNumber,
 				},
 				Text:       fmt.Sprintf("line is %d characters", lineLen),
-				FromLinter: lllName,
+				FromLinter: linterName,
 			})
 		}
 	}
 
 	if err := scanner.Err(); err != nil {
-		if err == bufio.ErrTooLong && maxLineLen < bufio.MaxScanTokenSize {
+		if errors.Is(err, bufio.ErrTooLong) && maxLineLen < bufio.MaxScanTokenSize {
 			// scanner.Scan() might fail if the line is longer than bufio.MaxScanTokenSize
 			// In the case where the specified maxLineLen is smaller than bufio.MaxScanTokenSize
 			// we can return this line as a long line instead of returning an error.
@@ -145,10 +146,10 @@ func getLLLIssuesForFile(filename string, maxLineLen int, tabSpaces string) ([]r
 					Column:   1,
 				},
 				Text:       fmt.Sprintf("line is more than %d characters", bufio.MaxScanTokenSize),
-				FromLinter: lllName,
+				FromLinter: linterName,
 			})
 		} else {
-			return nil, fmt.Errorf("can't scan file %s: %s", filename, err)
+			return nil, fmt.Errorf("can't scan file %s: %w", filename, err)
 		}
 	}
 
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/loggercheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/loggercheck/loggercheck.go
similarity index 84%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/loggercheck.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/loggercheck/loggercheck.go
index fc29127c3bb7fb8689625a12df75b9b2bd5adf00..077e8a512f363fce27feea590830afda27a4a675 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/loggercheck.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/loggercheck/loggercheck.go
@@ -1,14 +1,14 @@
-package golinters
+package loggercheck
 
 import (
 	"github.com/timonwong/loggercheck"
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewLoggerCheck(settings *config.LoggerCheckSettings) *goanalysis.Linter {
+func New(settings *config.LoggerCheckSettings) *goanalysis.Linter {
 	var opts []loggercheck.Option
 
 	if settings != nil {
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/maintidx.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/maintidx/maintidx.go
similarity index 76%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/maintidx.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/maintidx/maintidx.go
index 55509d970c19a6817e2090be5fa3ea7013ac78d8..08f12369e64154eea4225b63bb6d1c45196759cd 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/maintidx.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/maintidx/maintidx.go
@@ -1,14 +1,14 @@
-package golinters
+package maintidx
 
 import (
 	"github.com/yagipy/maintidx"
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewMaintIdx(cfg *config.MaintIdxSettings) *goanalysis.Linter {
+func New(cfg *config.MaintIdxSettings) *goanalysis.Linter {
 	analyzer := maintidx.Analyzer
 
 	cfgMap := map[string]map[string]any{
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/makezero.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/makezero/makezero.go
similarity index 85%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/makezero.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/makezero/makezero.go
index a9828629a2e1f64e470ef36b0a25ad8fcc921312..ae4bf2184222b68a1c04b1651751da5594818bea 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/makezero.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/makezero/makezero.go
@@ -1,4 +1,4 @@
-package golinters
+package makezero
 
 import (
 	"fmt"
@@ -8,20 +8,19 @@ import (
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-const makezeroName = "makezero"
+const linterName = "makezero"
 
-//nolint:dupl
-func NewMakezero(settings *config.MakezeroSettings) *goanalysis.Linter {
+func New(settings *config.MakezeroSettings) *goanalysis.Linter {
 	var mu sync.Mutex
 	var resIssues []goanalysis.Issue
 
 	analyzer := &analysis.Analyzer{
-		Name: makezeroName,
+		Name: linterName,
 		Doc:  goanalysis.TheOnlyanalyzerDoc,
 		Run: func(pass *analysis.Pass) (any, error) {
 			issues, err := runMakeZero(pass, settings)
@@ -42,7 +41,7 @@ func NewMakezero(settings *config.MakezeroSettings) *goanalysis.Linter {
 	}
 
 	return goanalysis.NewLinter(
-		makezeroName,
+		linterName,
 		"Finds slice declarations with non-zero initial length",
 		[]*analysis.Analyzer{analyzer},
 		nil,
@@ -66,7 +65,7 @@ func runMakeZero(pass *analysis.Pass, settings *config.MakezeroSettings) ([]goan
 			issues = append(issues, goanalysis.NewIssue(&result.Issue{
 				Pos:        hint.Position(),
 				Text:       hint.Details(),
-				FromLinter: makezeroName,
+				FromLinter: linterName,
 			}, pass))
 		}
 	}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/maligned.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/maligned.go
deleted file mode 100644
index 0455be76aa999246852cbd3ee9e81b6d7ca11383..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/maligned.go
+++ /dev/null
@@ -1,74 +0,0 @@
-package golinters
-
-import (
-	"fmt"
-	"sync"
-
-	malignedAPI "github.com/golangci/maligned"
-	"golang.org/x/tools/go/analysis"
-
-	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
-	"github.com/golangci/golangci-lint/pkg/lint/linter"
-	"github.com/golangci/golangci-lint/pkg/result"
-)
-
-const malignedName = "maligned"
-
-//nolint:dupl
-func NewMaligned(settings *config.MalignedSettings) *goanalysis.Linter {
-	var mu sync.Mutex
-	var res []goanalysis.Issue
-
-	analyzer := &analysis.Analyzer{
-		Name: malignedName,
-		Doc:  goanalysis.TheOnlyanalyzerDoc,
-		Run: func(pass *analysis.Pass) (any, error) {
-			issues := runMaligned(pass, settings)
-
-			if len(issues) == 0 {
-				return nil, nil
-			}
-
-			mu.Lock()
-			res = append(res, issues...)
-			mu.Unlock()
-
-			return nil, nil
-		},
-	}
-
-	return goanalysis.NewLinter(
-		malignedName,
-		"Tool to detect Go structs that would take less memory if their fields were sorted",
-		[]*analysis.Analyzer{analyzer},
-		nil,
-	).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
-		return res
-	}).WithLoadMode(goanalysis.LoadModeTypesInfo)
-}
-
-func runMaligned(pass *analysis.Pass, settings *config.MalignedSettings) []goanalysis.Issue {
-	prog := goanalysis.MakeFakeLoaderProgram(pass)
-
-	malignedIssues := malignedAPI.Run(prog)
-	if len(malignedIssues) == 0 {
-		return nil
-	}
-
-	issues := make([]goanalysis.Issue, 0, len(malignedIssues))
-	for _, i := range malignedIssues {
-		text := fmt.Sprintf("struct of size %d bytes could be of size %d bytes", i.OldSize, i.NewSize)
-		if settings.SuggestNewOrder {
-			text += fmt.Sprintf(":\n%s", formatCodeBlock(i.NewStructDef, nil))
-		}
-
-		issues = append(issues, goanalysis.NewIssue(&result.Issue{
-			Pos:        i.Pos,
-			Text:       text,
-			FromLinter: malignedName,
-		}, pass))
-	}
-
-	return issues
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/mirror.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/mirror/mirror.go
similarity index 91%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/mirror.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/mirror/mirror.go
index 4adc001a19e39119b3aaa80b7893cc92dec40419..34b880b5295f5fe21390a1b154a54c31b2c4a602 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/mirror.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/mirror/mirror.go
@@ -1,4 +1,4 @@
-package golinters
+package mirror
 
 import (
 	"sync"
@@ -6,12 +6,12 @@ import (
 	"github.com/butuzov/mirror"
 	"golang.org/x/tools/go/analysis"
 
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-func NewMirror() *goanalysis.Linter {
+func New() *goanalysis.Linter {
 	var (
 		mu     sync.Mutex
 		issues []goanalysis.Issue
@@ -39,7 +39,7 @@ func NewMirror() *goanalysis.Linter {
 				Pos:        i.Start,
 			}
 
-			if len(i.InlineFix) > 0 {
+			if i.InlineFix != "" {
 				issue.Replacement = &result.Replacement{
 					Inline: &result.InlineFix{
 						StartCol:  i.Start.Column - 1,
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/misspell.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/misspell/misspell.go
similarity index 55%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/misspell.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/misspell/misspell.go
index ce2b79a7c1f8dc207d02c4133ad9c9ab2f2f41ea..44409cec9deefbc72bbbe0519fed0230c1a8f49f 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/misspell.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/misspell/misspell.go
@@ -1,35 +1,37 @@
-package golinters
+package misspell
 
 import (
 	"fmt"
 	"go/token"
 	"strings"
 	"sync"
+	"unicode"
 
 	"github.com/golangci/misspell"
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/golinters/internal"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-const misspellName = "misspell"
+const linterName = "misspell"
 
-func NewMisspell(settings *config.MisspellSettings) *goanalysis.Linter {
+func New(settings *config.MisspellSettings) *goanalysis.Linter {
 	var mu sync.Mutex
 	var resIssues []goanalysis.Issue
 
 	analyzer := &analysis.Analyzer{
-		Name: misspellName,
+		Name: linterName,
 		Doc:  goanalysis.TheOnlyanalyzerDoc,
 		Run:  goanalysis.DummyRun,
 	}
 
 	return goanalysis.NewLinter(
-		misspellName,
-		"Finds commonly misspelled English words in comments",
+		linterName,
+		"Finds commonly misspelled English words",
 		[]*analysis.Analyzer{analyzer},
 		nil,
 	).WithContextSetter(func(lintCtx *linter.Context) {
@@ -40,7 +42,7 @@ func NewMisspell(settings *config.MisspellSettings) *goanalysis.Linter {
 				return nil, ruleErr
 			}
 
-			issues, err := runMisspell(lintCtx, pass, replacer)
+			issues, err := runMisspell(lintCtx, pass, replacer, settings.Mode)
 			if err != nil {
 				return nil, err
 			}
@@ -60,15 +62,16 @@ func NewMisspell(settings *config.MisspellSettings) *goanalysis.Linter {
 	}).WithLoadMode(goanalysis.LoadModeSyntax)
 }
 
-func runMisspell(lintCtx *linter.Context, pass *analysis.Pass, replacer *misspell.Replacer) ([]goanalysis.Issue, error) {
-	fileNames := getFileNames(pass)
+func runMisspell(lintCtx *linter.Context, pass *analysis.Pass, replacer *misspell.Replacer, mode string) ([]goanalysis.Issue, error) {
+	fileNames := internal.GetFileNames(pass)
 
 	var issues []goanalysis.Issue
 	for _, filename := range fileNames {
-		lintIssues, err := runMisspellOnFile(lintCtx, filename, replacer)
+		lintIssues, err := runMisspellOnFile(lintCtx, filename, replacer, mode)
 		if err != nil {
 			return nil, err
 		}
+
 		for i := range lintIssues {
 			issues = append(issues, goanalysis.NewIssue(&lintIssues[i], pass))
 		}
@@ -94,6 +97,11 @@ func createMisspellReplacer(settings *config.MisspellSettings) (*misspell.Replac
 		return nil, fmt.Errorf("unknown locale: %q", settings.Locale)
 	}
 
+	err := appendExtraWords(replacer, settings.ExtraWords)
+	if err != nil {
+		return nil, fmt.Errorf("process extra words: %w", err)
+	}
+
 	if len(settings.IgnoreWords) != 0 {
 		replacer.RemoveRule(settings.IgnoreWords)
 	}
@@ -104,25 +112,36 @@ func createMisspellReplacer(settings *config.MisspellSettings) (*misspell.Replac
 	return replacer, nil
 }
 
-func runMisspellOnFile(lintCtx *linter.Context, filename string, replacer *misspell.Replacer) ([]result.Issue, error) {
-	var res []result.Issue
+func runMisspellOnFile(lintCtx *linter.Context, filename string, replacer *misspell.Replacer, mode string) ([]result.Issue, error) {
 	fileContent, err := lintCtx.FileCache.GetFileBytes(filename)
 	if err != nil {
-		return nil, fmt.Errorf("can't get file %s contents: %s", filename, err)
+		return nil, fmt.Errorf("can't get file %s contents: %w", filename, err)
+	}
+
+	// `r.ReplaceGo` doesn't find issues inside strings: it searches only inside comments.
+	// `r.Replace` searches all words: it treats input as a plain text.
+	// The standalone misspell tool uses `r.Replace` by default.
+	var replace func(input string) (string, []misspell.Diff)
+	switch strings.ToLower(mode) {
+	case "restricted":
+		replace = replacer.ReplaceGo
+	default:
+		replace = replacer.Replace
 	}
 
-	// use r.Replace, not r.ReplaceGo because r.ReplaceGo doesn't find
-	// issues inside strings: it searches only inside comments. r.Replace
-	// searches all words: it treats input as a plain text. A standalone misspell
-	// tool uses r.Replace by default.
-	_, diffs := replacer.Replace(string(fileContent))
+	_, diffs := replace(string(fileContent))
+
+	var res []result.Issue
+
 	for _, diff := range diffs {
 		text := fmt.Sprintf("`%s` is a misspelling of `%s`", diff.Original, diff.Corrected)
+
 		pos := token.Position{
 			Filename: filename,
 			Line:     diff.Line,
 			Column:   diff.Column + 1,
 		}
+
 		replacement := &result.Replacement{
 			Inline: &result.InlineFix{
 				StartCol:  diff.Column,
@@ -134,10 +153,37 @@ func runMisspellOnFile(lintCtx *linter.Context, filename string, replacer *missp
 		res = append(res, result.Issue{
 			Pos:         pos,
 			Text:        text,
-			FromLinter:  misspellName,
+			FromLinter:  linterName,
 			Replacement: replacement,
 		})
 	}
 
 	return res, nil
 }
+
+func appendExtraWords(replacer *misspell.Replacer, extraWords []config.MisspellExtraWords) error {
+	if len(extraWords) == 0 {
+		return nil
+	}
+
+	extra := make([]string, 0, len(extraWords)*2)
+
+	for _, word := range extraWords {
+		if word.Typo == "" || word.Correction == "" {
+			return fmt.Errorf("typo (%q) and correction (%q) fields should not be empty", word.Typo, word.Correction)
+		}
+
+		if strings.ContainsFunc(word.Typo, func(r rune) bool { return !unicode.IsLetter(r) }) {
+			return fmt.Errorf("the word %q in the 'typo' field should only contain letters", word.Typo)
+		}
+		if strings.ContainsFunc(word.Correction, func(r rune) bool { return !unicode.IsLetter(r) }) {
+			return fmt.Errorf("the word %q in the 'correction' field should only contain letters", word.Correction)
+		}
+
+		extra = append(extra, strings.ToLower(word.Typo), strings.ToLower(word.Correction))
+	}
+
+	replacer.AddRuleList(extra)
+
+	return nil
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/mnd/mnd.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/mnd/mnd.go
new file mode 100644
index 0000000000000000000000000000000000000000..ee73c535b65ecb46ee4287d359a368f03051e462
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/mnd/mnd.go
@@ -0,0 +1,61 @@
+package mnd
+
+import (
+	mnd "github.com/tommy-muehle/go-mnd/v2"
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/golangci/golangci-lint/pkg/config"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+)
+
+func New(settings *config.MndSettings) *goanalysis.Linter {
+	return newMND(mnd.Analyzer, settings, nil)
+}
+
+func NewGoMND(settings *config.GoMndSettings) *goanalysis.Linter {
+	// shallow copy because mnd.Analyzer is a global variable.
+	a := new(analysis.Analyzer)
+	*a = *mnd.Analyzer
+
+	// Used to force the analyzer name to use the same name as the linter.
+	// This is required to avoid displaying the analyzer name inside the issue text.
+	a.Name = "gomnd"
+
+	var linterCfg map[string]map[string]any
+
+	if settings != nil && len(settings.Settings) > 0 {
+		// Convert deprecated setting.
+		linterCfg = settings.Settings
+	}
+
+	return newMND(a, &settings.MndSettings, linterCfg)
+}
+
+func newMND(a *analysis.Analyzer, settings *config.MndSettings, linterCfg map[string]map[string]any) *goanalysis.Linter {
+	if len(linterCfg) == 0 && settings != nil {
+		cfg := make(map[string]any)
+		if len(settings.Checks) > 0 {
+			cfg["checks"] = settings.Checks
+		}
+		if len(settings.IgnoredNumbers) > 0 {
+			cfg["ignored-numbers"] = settings.IgnoredNumbers
+		}
+		if len(settings.IgnoredFiles) > 0 {
+			cfg["ignored-files"] = settings.IgnoredFiles
+		}
+		if len(settings.IgnoredFunctions) > 0 {
+			cfg["ignored-functions"] = settings.IgnoredFunctions
+		}
+
+		linterCfg = map[string]map[string]any{
+			a.Name: cfg,
+		}
+	}
+
+	return goanalysis.NewLinter(
+		a.Name,
+		"An analyzer to detect magic numbers.",
+		[]*analysis.Analyzer{a},
+		linterCfg,
+	).WithLoadMode(goanalysis.LoadModeSyntax)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/musttag.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/musttag/musttag.go
similarity index 72%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/musttag.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/musttag/musttag.go
index d9ea7efc7e1e17e72cca48aa4e1a57c3a4b7f1b7..30047abfc2a9895dd33308a62c5e43532d95b2e2 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/musttag.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/musttag/musttag.go
@@ -1,14 +1,14 @@
-package golinters
+package musttag
 
 import (
-	"go.tmz.dev/musttag"
+	"go-simpler.org/musttag"
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewMustTag(setting *config.MustTagSettings) *goanalysis.Linter {
+func New(setting *config.MustTagSettings) *goanalysis.Linter {
 	var funcs []musttag.Func
 
 	if setting != nil {
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nakedret.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nakedret.go
deleted file mode 100644
index d276ac6a9a36b845f7c3f8d666680e1176369c75..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nakedret.go
+++ /dev/null
@@ -1,27 +0,0 @@
-package golinters
-
-import (
-	"github.com/alexkohler/nakedret/v2"
-	"golang.org/x/tools/go/analysis"
-
-	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
-)
-
-const nakedretName = "nakedret"
-
-func NewNakedret(settings *config.NakedretSettings) *goanalysis.Linter {
-	var maxLines int
-	if settings != nil {
-		maxLines = settings.MaxFuncLines
-	}
-
-	analyzer := nakedret.NakedReturnAnalyzer(uint(maxLines))
-
-	return goanalysis.NewLinter(
-		nakedretName,
-		"Finds naked returns in functions greater than a specified function length",
-		[]*analysis.Analyzer{analyzer},
-		nil,
-	).WithLoadMode(goanalysis.LoadModeSyntax)
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nakedret/nakedret.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nakedret/nakedret.go
new file mode 100644
index 0000000000000000000000000000000000000000..4dd3fd4c3fd727e91b7d565f7bc0b5e323ca84ce
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nakedret/nakedret.go
@@ -0,0 +1,25 @@
+package nakedret
+
+import (
+	"github.com/alexkohler/nakedret/v2"
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/golangci/golangci-lint/pkg/config"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+)
+
+func New(settings *config.NakedretSettings) *goanalysis.Linter {
+	var maxLines int
+	if settings != nil {
+		maxLines = settings.MaxFuncLines
+	}
+
+	a := nakedret.NakedReturnAnalyzer(uint(maxLines))
+
+	return goanalysis.NewLinter(
+		a.Name,
+		a.Doc,
+		[]*analysis.Analyzer{a},
+		nil,
+	).WithLoadMode(goanalysis.LoadModeSyntax)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nestif.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nestif/nestif.go
similarity index 87%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/nestif.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/nestif/nestif.go
index 12ad69eceb39202c24cb42b6f6ef00b741f27f5a..43be973b0aeee5db0ca45033e39395e27b384012 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nestif.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nestif/nestif.go
@@ -1,4 +1,4 @@
-package golinters
+package nestif
 
 import (
 	"sort"
@@ -8,15 +8,14 @@ import (
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-const nestifName = "nestif"
+const linterName = "nestif"
 
-//nolint:dupl
-func NewNestif(settings *config.NestifSettings) *goanalysis.Linter {
+func New(settings *config.NestifSettings) *goanalysis.Linter {
 	var mu sync.Mutex
 	var resIssues []goanalysis.Issue
 
@@ -39,7 +38,7 @@ func NewNestif(settings *config.NestifSettings) *goanalysis.Linter {
 	}
 
 	return goanalysis.NewLinter(
-		nestifName,
+		linterName,
 		"Reports deeply nested if statements",
 		[]*analysis.Analyzer{analyzer},
 		nil,
@@ -71,7 +70,7 @@ func runNestIf(pass *analysis.Pass, settings *config.NestifSettings) []goanalysi
 		issues = append(issues, goanalysis.NewIssue(&result.Issue{
 			Pos:        i.Pos,
 			Text:       i.Message,
-			FromLinter: nestifName,
+			FromLinter: linterName,
 		}, pass))
 	}
 
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilerr.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilerr/nilerr.go
similarity index 72%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/nilerr.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/nilerr/nilerr.go
index 2ea16f2f3958e312670989ce340f3da614dfcd9a..c9e466905e86f180510aa69572a371bf5b6aad62 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilerr.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilerr/nilerr.go
@@ -1,13 +1,13 @@
-package golinters
+package nilerr
 
 import (
 	"github.com/gostaticanalysis/nilerr"
 	"golang.org/x/tools/go/analysis"
 
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewNilErr() *goanalysis.Linter {
+func New() *goanalysis.Linter {
 	a := nilerr.Analyzer
 
 	return goanalysis.NewLinter(
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilnil.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilnil/nilnil.go
similarity index 77%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/nilnil.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/nilnil/nilnil.go
index 804557b76dc27488a044413569dc21c37886a8f9..c9237035d3310aa797c520315f6735a982e58e70 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilnil.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilnil/nilnil.go
@@ -1,4 +1,4 @@
-package golinters
+package nilnil
 
 import (
 	"strings"
@@ -7,10 +7,10 @@ import (
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewNilNil(cfg *config.NilNilSettings) *goanalysis.Linter {
+func New(cfg *config.NilNilSettings) *goanalysis.Linter {
 	a := analyzer.New()
 
 	cfgMap := make(map[string]map[string]any)
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nlreturn.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nlreturn/nlreturn.go
similarity index 77%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/nlreturn.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/nlreturn/nlreturn.go
index a359548f420fe8158e2921fec9f12b93322a9ae5..50921880850193c68e467a334ab0b735fb62ed4a 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nlreturn.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nlreturn/nlreturn.go
@@ -1,14 +1,14 @@
-package golinters
+package nlreturn
 
 import (
 	"github.com/ssgreg/nlreturn/v2/pkg/nlreturn"
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewNLReturn(settings *config.NlreturnSettings) *goanalysis.Linter {
+func New(settings *config.NlreturnSettings) *goanalysis.Linter {
 	a := nlreturn.NewAnalyzer()
 
 	cfg := map[string]map[string]any{}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/noctx.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/noctx.go
deleted file mode 100644
index b7fcd2a737afa4b404d2fae023333b1e9499bb44..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/noctx.go
+++ /dev/null
@@ -1,17 +0,0 @@
-package golinters
-
-import (
-	"github.com/sonatard/noctx"
-	"golang.org/x/tools/go/analysis"
-
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
-)
-
-func NewNoctx() *goanalysis.Linter {
-	return goanalysis.NewLinter(
-		"noctx",
-		"noctx finds sending http request without context.Context",
-		[]*analysis.Analyzer{noctx.Analyzer},
-		nil,
-	).WithLoadMode(goanalysis.LoadModeTypesInfo)
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/noctx/noctx.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/noctx/noctx.go
new file mode 100644
index 0000000000000000000000000000000000000000..8a063c613c31d7d226d02c833d1b1cca4d2a0dac
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/noctx/noctx.go
@@ -0,0 +1,19 @@
+package noctx
+
+import (
+	"github.com/sonatard/noctx"
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+)
+
+func New() *goanalysis.Linter {
+	a := noctx.Analyzer
+
+	return goanalysis.NewLinter(
+		a.Name,
+		"Finds sending http request without context.Context",
+		[]*analysis.Analyzer{a},
+		nil,
+	).WithLoadMode(goanalysis.LoadModeTypesInfo)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint.go
deleted file mode 100644
index 00ef1f833f8f3c0656920c3a0b874f8ce471751f..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint.go
+++ /dev/null
@@ -1,105 +0,0 @@
-package golinters
-
-import (
-	"fmt"
-	"go/ast"
-	"sync"
-
-	"golang.org/x/tools/go/analysis"
-
-	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
-	"github.com/golangci/golangci-lint/pkg/golinters/nolintlint"
-	"github.com/golangci/golangci-lint/pkg/lint/linter"
-	"github.com/golangci/golangci-lint/pkg/result"
-)
-
-const NoLintLintName = "nolintlint"
-
-//nolint:dupl
-func NewNoLintLint(settings *config.NoLintLintSettings) *goanalysis.Linter {
-	var mu sync.Mutex
-	var resIssues []goanalysis.Issue
-
-	analyzer := &analysis.Analyzer{
-		Name: NoLintLintName,
-		Doc:  goanalysis.TheOnlyanalyzerDoc,
-		Run: func(pass *analysis.Pass) (any, error) {
-			issues, err := runNoLintLint(pass, settings)
-			if err != nil {
-				return nil, err
-			}
-
-			if len(issues) == 0 {
-				return nil, nil
-			}
-
-			mu.Lock()
-			resIssues = append(resIssues, issues...)
-			mu.Unlock()
-
-			return nil, nil
-		},
-	}
-
-	return goanalysis.NewLinter(
-		NoLintLintName,
-		"Reports ill-formed or insufficient nolint directives",
-		[]*analysis.Analyzer{analyzer},
-		nil,
-	).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
-		return resIssues
-	}).WithLoadMode(goanalysis.LoadModeSyntax)
-}
-
-func runNoLintLint(pass *analysis.Pass, settings *config.NoLintLintSettings) ([]goanalysis.Issue, error) {
-	var needs nolintlint.Needs
-	if settings.RequireExplanation {
-		needs |= nolintlint.NeedsExplanation
-	}
-	if settings.RequireSpecific {
-		needs |= nolintlint.NeedsSpecific
-	}
-	if !settings.AllowUnused {
-		needs |= nolintlint.NeedsUnused
-	}
-
-	lnt, err := nolintlint.NewLinter(needs, settings.AllowNoExplanation)
-	if err != nil {
-		return nil, err
-	}
-
-	nodes := make([]ast.Node, 0, len(pass.Files))
-	for _, n := range pass.Files {
-		nodes = append(nodes, n)
-	}
-
-	lintIssues, err := lnt.Run(pass.Fset, nodes...)
-	if err != nil {
-		return nil, fmt.Errorf("linter failed to run: %s", err)
-	}
-
-	var issues []goanalysis.Issue
-
-	for _, i := range lintIssues {
-		expectNoLint := false
-		var expectedNolintLinter string
-		if ii, ok := i.(nolintlint.UnusedCandidate); ok {
-			expectedNolintLinter = ii.ExpectedLinter
-			expectNoLint = true
-		}
-
-		issue := &result.Issue{
-			FromLinter:           NoLintLintName,
-			Text:                 i.Details(),
-			Pos:                  i.Position(),
-			ExpectNoLint:         expectNoLint,
-			ExpectedNoLintLinter: expectedNolintLinter,
-			Replacement:          i.Replacement(),
-		}
-
-		issues = append(issues, goanalysis.NewIssue(issue, pass))
-	}
-
-	return issues, nil
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/README.md b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/internal/README.md
similarity index 100%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/README.md
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/internal/README.md
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/internal/nolintlint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/internal/nolintlint.go
new file mode 100644
index 0000000000000000000000000000000000000000..5fed41cfdfbb6cf788952cefde5259b67b4ae37b
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/internal/nolintlint.go
@@ -0,0 +1,304 @@
+// Package internal provides a linter to ensure that all //nolint directives are followed by explanations
+package internal
+
+import (
+	"fmt"
+	"go/ast"
+	"go/token"
+	"regexp"
+	"strings"
+	"unicode"
+
+	"github.com/golangci/golangci-lint/pkg/result"
+)
+
+type BaseIssue struct {
+	fullDirective                     string
+	directiveWithOptionalLeadingSpace string
+	position                          token.Position
+	replacement                       *result.Replacement
+}
+
+//nolint:gocritic // TODO(ldez) must be change in the future.
+func (b BaseIssue) Position() token.Position {
+	return b.position
+}
+
+//nolint:gocritic // TODO(ldez) must be change in the future.
+func (b BaseIssue) Replacement() *result.Replacement {
+	return b.replacement
+}
+
+type ExtraLeadingSpace struct {
+	BaseIssue
+}
+
+//nolint:gocritic // TODO(ldez) must be change in the future.
+func (i ExtraLeadingSpace) Details() string {
+	return fmt.Sprintf("directive `%s` should not have more than one leading space", i.fullDirective)
+}
+
+func (i ExtraLeadingSpace) String() string { return toString(i) }
+
+type NotMachine struct {
+	BaseIssue
+}
+
+//nolint:gocritic // TODO(ldez) must be change in the future.
+func (i NotMachine) Details() string {
+	expected := i.fullDirective[:2] + strings.TrimLeftFunc(i.fullDirective[2:], unicode.IsSpace)
+	return fmt.Sprintf("directive `%s` should be written without leading space as `%s`",
+		i.fullDirective, expected)
+}
+
+func (i NotMachine) String() string { return toString(i) }
+
+type NotSpecific struct {
+	BaseIssue
+}
+
+//nolint:gocritic // TODO(ldez) must be change in the future.
+func (i NotSpecific) Details() string {
+	return fmt.Sprintf("directive `%s` should mention specific linter such as `%s:my-linter`",
+		i.fullDirective, i.directiveWithOptionalLeadingSpace)
+}
+
+func (i NotSpecific) String() string { return toString(i) }
+
+type ParseError struct {
+	BaseIssue
+}
+
+//nolint:gocritic // TODO(ldez) must be change in the future.
+func (i ParseError) Details() string {
+	return fmt.Sprintf("directive `%s` should match `%s[:<comma-separated-linters>] [// <explanation>]`",
+		i.fullDirective,
+		i.directiveWithOptionalLeadingSpace)
+}
+
+func (i ParseError) String() string { return toString(i) }
+
+type NoExplanation struct {
+	BaseIssue
+	fullDirectiveWithoutExplanation string
+}
+
+//nolint:gocritic // TODO(ldez) must be change in the future.
+func (i NoExplanation) Details() string {
+	return fmt.Sprintf("directive `%s` should provide explanation such as `%s // this is why`",
+		i.fullDirective, i.fullDirectiveWithoutExplanation)
+}
+
+func (i NoExplanation) String() string { return toString(i) }
+
+type UnusedCandidate struct {
+	BaseIssue
+	ExpectedLinter string
+}
+
+//nolint:gocritic // TODO(ldez) must be change in the future.
+func (i UnusedCandidate) Details() string {
+	details := fmt.Sprintf("directive `%s` is unused", i.fullDirective)
+	if i.ExpectedLinter != "" {
+		details += fmt.Sprintf(" for linter %q", i.ExpectedLinter)
+	}
+	return details
+}
+
+func (i UnusedCandidate) String() string { return toString(i) }
+
+func toString(issue Issue) string {
+	return fmt.Sprintf("%s at %s", issue.Details(), issue.Position())
+}
+
+type Issue interface {
+	Details() string
+	Position() token.Position
+	String() string
+	Replacement() *result.Replacement
+}
+
+type Needs uint
+
+const (
+	NeedsMachineOnly Needs = 1 << iota
+	NeedsSpecific
+	NeedsExplanation
+	NeedsUnused
+	NeedsAll = NeedsMachineOnly | NeedsSpecific | NeedsExplanation
+)
+
+var commentPattern = regexp.MustCompile(`^//\s*(nolint)(:\s*[\w-]+\s*(?:,\s*[\w-]+\s*)*)?\b`)
+
+// matches a complete nolint directive
+var fullDirectivePattern = regexp.MustCompile(`^//\s*nolint(?::(\s*[\w-]+\s*(?:,\s*[\w-]+\s*)*))?\s*(//.*)?\s*\n?$`)
+
+type Linter struct {
+	needs           Needs // indicates which linter checks to perform
+	excludeByLinter map[string]bool
+}
+
+// NewLinter creates a linter that enforces that the provided directives fulfill the provided requirements
+func NewLinter(needs Needs, excludes []string) (*Linter, error) {
+	excludeByName := make(map[string]bool)
+	for _, e := range excludes {
+		excludeByName[e] = true
+	}
+
+	return &Linter{
+		needs:           needs | NeedsMachineOnly,
+		excludeByLinter: excludeByName,
+	}, nil
+}
+
+var (
+	leadingSpacePattern      = regexp.MustCompile(`^//(\s*)`)
+	trailingBlankExplanation = regexp.MustCompile(`\s*(//\s*)?$`)
+)
+
+//nolint:funlen,gocyclo // the function is going to be refactored in the future
+func (l Linter) Run(fset *token.FileSet, nodes ...ast.Node) ([]Issue, error) {
+	var issues []Issue
+
+	for _, node := range nodes {
+		file, ok := node.(*ast.File)
+		if !ok {
+			continue
+		}
+
+		for _, c := range file.Comments {
+			for _, comment := range c.List {
+				if !commentPattern.MatchString(comment.Text) {
+					continue
+				}
+
+				// check for a space between the "//" and the directive
+				leadingSpaceMatches := leadingSpacePattern.FindStringSubmatch(comment.Text)
+
+				var leadingSpace string
+				if len(leadingSpaceMatches) > 0 {
+					leadingSpace = leadingSpaceMatches[1]
+				}
+
+				directiveWithOptionalLeadingSpace := "//"
+				if leadingSpace != "" {
+					directiveWithOptionalLeadingSpace += " "
+				}
+
+				split := strings.Split(strings.SplitN(comment.Text, ":", 2)[0], "//")
+				directiveWithOptionalLeadingSpace += strings.TrimSpace(split[1])
+
+				pos := fset.Position(comment.Pos())
+				end := fset.Position(comment.End())
+
+				base := BaseIssue{
+					fullDirective:                     comment.Text,
+					directiveWithOptionalLeadingSpace: directiveWithOptionalLeadingSpace,
+					position:                          pos,
+				}
+
+				// check for, report and eliminate leading spaces, so we can check for other issues
+				if leadingSpace != "" {
+					removeWhitespace := &result.Replacement{
+						Inline: &result.InlineFix{
+							StartCol:  pos.Column + 1,
+							Length:    len(leadingSpace),
+							NewString: "",
+						},
+					}
+					if (l.needs & NeedsMachineOnly) != 0 {
+						issue := NotMachine{BaseIssue: base}
+						issue.BaseIssue.replacement = removeWhitespace
+						issues = append(issues, issue)
+					} else if len(leadingSpace) > 1 {
+						issue := ExtraLeadingSpace{BaseIssue: base}
+						issue.BaseIssue.replacement = removeWhitespace
+						issue.BaseIssue.replacement.Inline.NewString = " " // assume a single space was intended
+						issues = append(issues, issue)
+					}
+				}
+
+				fullMatches := fullDirectivePattern.FindStringSubmatch(comment.Text)
+				if len(fullMatches) == 0 {
+					issues = append(issues, ParseError{BaseIssue: base})
+					continue
+				}
+
+				lintersText, explanation := fullMatches[1], fullMatches[2]
+
+				var linters []string
+				if lintersText != "" && !strings.HasPrefix(lintersText, "all") {
+					lls := strings.Split(lintersText, ",")
+					linters = make([]string, 0, len(lls))
+					rangeStart := (pos.Column - 1) + len("//") + len(leadingSpace) + len("nolint:")
+					for i, ll := range lls {
+						rangeEnd := rangeStart + len(ll)
+						if i < len(lls)-1 {
+							rangeEnd++ // include trailing comma
+						}
+						trimmedLinterName := strings.TrimSpace(ll)
+						if trimmedLinterName != "" {
+							linters = append(linters, trimmedLinterName)
+						}
+						rangeStart = rangeEnd
+					}
+				}
+
+				if (l.needs & NeedsSpecific) != 0 {
+					if len(linters) == 0 {
+						issues = append(issues, NotSpecific{BaseIssue: base})
+					}
+				}
+
+				// when detecting unused directives, we send all the directives through and filter them out in the nolint processor
+				if (l.needs & NeedsUnused) != 0 {
+					removeNolintCompletely := &result.Replacement{
+						Inline: &result.InlineFix{
+							StartCol:  pos.Column - 1,
+							Length:    end.Column - pos.Column,
+							NewString: "",
+						},
+					}
+
+					if len(linters) == 0 {
+						issue := UnusedCandidate{BaseIssue: base}
+						issue.replacement = removeNolintCompletely
+						issues = append(issues, issue)
+					} else {
+						for _, linter := range linters {
+							issue := UnusedCandidate{BaseIssue: base, ExpectedLinter: linter}
+							// only offer replacement if there is a single linter
+							// because of issues around commas and the possibility of all
+							// linters being removed
+							if len(linters) == 1 {
+								issue.replacement = removeNolintCompletely
+							}
+							issues = append(issues, issue)
+						}
+					}
+				}
+
+				if (l.needs&NeedsExplanation) != 0 && (explanation == "" || strings.TrimSpace(explanation) == "//") {
+					needsExplanation := len(linters) == 0 // if no linters are mentioned, we must have explanation
+					// otherwise, check if we are excluding all the mentioned linters
+					for _, ll := range linters {
+						if !l.excludeByLinter[ll] { // if a linter does require explanation
+							needsExplanation = true
+							break
+						}
+					}
+
+					if needsExplanation {
+						fullDirectiveWithoutExplanation := trailingBlankExplanation.ReplaceAllString(comment.Text, "")
+						issues = append(issues, NoExplanation{
+							BaseIssue:                       base,
+							fullDirectiveWithoutExplanation: fullDirectiveWithoutExplanation,
+						})
+					}
+				}
+			}
+		}
+	}
+
+	return issues, nil
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/nolintlint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/nolintlint.go
index 9c6b10f38c5015728fbccc02a9d18dbec58187cb..9f04454a5a2b7ea1ad340746c9ee90160ce98e6e 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/nolintlint.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/nolintlint.go
@@ -1,307 +1,103 @@
-// Package nolintlint provides a linter to ensure that all //nolint directives are followed by explanations
 package nolintlint
 
 import (
 	"fmt"
 	"go/ast"
-	"go/token"
-	"regexp"
-	"strings"
-	"unicode"
+	"sync"
 
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/golangci/golangci-lint/pkg/config"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/golinters/nolintlint/internal"
+	"github.com/golangci/golangci-lint/pkg/lint/linter"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-type BaseIssue struct {
-	fullDirective                     string
-	directiveWithOptionalLeadingSpace string
-	position                          token.Position
-	replacement                       *result.Replacement
-}
-
-//nolint:gocritic // TODO must be change in the future.
-func (b BaseIssue) Position() token.Position {
-	return b.position
-}
-
-//nolint:gocritic // TODO must be change in the future.
-func (b BaseIssue) Replacement() *result.Replacement {
-	return b.replacement
-}
-
-type ExtraLeadingSpace struct {
-	BaseIssue
-}
-
-//nolint:gocritic // TODO must be change in the future.
-func (i ExtraLeadingSpace) Details() string {
-	return fmt.Sprintf("directive `%s` should not have more than one leading space", i.fullDirective)
-}
-
-//nolint:gocritic // TODO must be change in the future.
-func (i ExtraLeadingSpace) String() string { return toString(i) }
-
-type NotMachine struct {
-	BaseIssue
-}
-
-//nolint:gocritic // TODO must be change in the future.
-func (i NotMachine) Details() string {
-	expected := i.fullDirective[:2] + strings.TrimLeftFunc(i.fullDirective[2:], unicode.IsSpace)
-	return fmt.Sprintf("directive `%s` should be written without leading space as `%s`",
-		i.fullDirective, expected)
-}
-
-//nolint:gocritic // TODO must be change in the future.
-func (i NotMachine) String() string { return toString(i) }
+const LinterName = "nolintlint"
 
-type NotSpecific struct {
-	BaseIssue
-}
-
-//nolint:gocritic // TODO must be change in the future.
-func (i NotSpecific) Details() string {
-	return fmt.Sprintf("directive `%s` should mention specific linter such as `%s:my-linter`",
-		i.fullDirective, i.directiveWithOptionalLeadingSpace)
-}
-
-//nolint:gocritic // TODO must be change in the future.
-func (i NotSpecific) String() string { return toString(i) }
-
-type ParseError struct {
-	BaseIssue
-}
-
-//nolint:gocritic // TODO must be change in the future.
-func (i ParseError) Details() string {
-	return fmt.Sprintf("directive `%s` should match `%s[:<comma-separated-linters>] [// <explanation>]`",
-		i.fullDirective,
-		i.directiveWithOptionalLeadingSpace)
-}
+func New(settings *config.NoLintLintSettings) *goanalysis.Linter {
+	var mu sync.Mutex
+	var resIssues []goanalysis.Issue
 
-//nolint:gocritic // TODO must be change in the future.
-func (i ParseError) String() string { return toString(i) }
-
-type NoExplanation struct {
-	BaseIssue
-	fullDirectiveWithoutExplanation string
-}
-
-//nolint:gocritic // TODO must be change in the future.
-func (i NoExplanation) Details() string {
-	return fmt.Sprintf("directive `%s` should provide explanation such as `%s // this is why`",
-		i.fullDirective, i.fullDirectiveWithoutExplanation)
-}
+	analyzer := &analysis.Analyzer{
+		Name: LinterName,
+		Doc:  goanalysis.TheOnlyanalyzerDoc,
+		Run: func(pass *analysis.Pass) (any, error) {
+			issues, err := runNoLintLint(pass, settings)
+			if err != nil {
+				return nil, err
+			}
 
-//nolint:gocritic // TODO must be change in the future.
-func (i NoExplanation) String() string { return toString(i) }
+			if len(issues) == 0 {
+				return nil, nil
+			}
 
-type UnusedCandidate struct {
-	BaseIssue
-	ExpectedLinter string
-}
+			mu.Lock()
+			resIssues = append(resIssues, issues...)
+			mu.Unlock()
 
-//nolint:gocritic // TODO must be change in the future.
-func (i UnusedCandidate) Details() string {
-	details := fmt.Sprintf("directive `%s` is unused", i.fullDirective)
-	if i.ExpectedLinter != "" {
-		details += fmt.Sprintf(" for linter %q", i.ExpectedLinter)
+			return nil, nil
+		},
 	}
-	return details
-}
 
-//nolint:gocritic // TODO must be change in the future.
-func (i UnusedCandidate) String() string { return toString(i) }
-
-func toString(i Issue) string {
-	return fmt.Sprintf("%s at %s", i.Details(), i.Position())
-}
-
-type Issue interface {
-	Details() string
-	Position() token.Position
-	String() string
-	Replacement() *result.Replacement
+	return goanalysis.NewLinter(
+		LinterName,
+		"Reports ill-formed or insufficient nolint directives",
+		[]*analysis.Analyzer{analyzer},
+		nil,
+	).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
+		return resIssues
+	}).WithLoadMode(goanalysis.LoadModeSyntax)
 }
 
-type Needs uint
-
-const (
-	NeedsMachineOnly Needs = 1 << iota
-	NeedsSpecific
-	NeedsExplanation
-	NeedsUnused
-	NeedsAll = NeedsMachineOnly | NeedsSpecific | NeedsExplanation
-)
-
-var commentPattern = regexp.MustCompile(`^//\s*(nolint)(:\s*[\w-]+\s*(?:,\s*[\w-]+\s*)*)?\b`)
-
-// matches a complete nolint directive
-var fullDirectivePattern = regexp.MustCompile(`^//\s*nolint(?::(\s*[\w-]+\s*(?:,\s*[\w-]+\s*)*))?\s*(//.*)?\s*\n?$`)
-
-type Linter struct {
-	needs           Needs // indicates which linter checks to perform
-	excludeByLinter map[string]bool
-}
+func runNoLintLint(pass *analysis.Pass, settings *config.NoLintLintSettings) ([]goanalysis.Issue, error) {
+	var needs internal.Needs
+	if settings.RequireExplanation {
+		needs |= internal.NeedsExplanation
+	}
+	if settings.RequireSpecific {
+		needs |= internal.NeedsSpecific
+	}
+	if !settings.AllowUnused {
+		needs |= internal.NeedsUnused
+	}
 
-// NewLinter creates a linter that enforces that the provided directives fulfill the provided requirements
-func NewLinter(needs Needs, excludes []string) (*Linter, error) {
-	excludeByName := make(map[string]bool)
-	for _, e := range excludes {
-		excludeByName[e] = true
+	lnt, err := internal.NewLinter(needs, settings.AllowNoExplanation)
+	if err != nil {
+		return nil, err
 	}
 
-	return &Linter{
-		needs:           needs | NeedsMachineOnly,
-		excludeByLinter: excludeByName,
-	}, nil
-}
+	nodes := make([]ast.Node, 0, len(pass.Files))
+	for _, n := range pass.Files {
+		nodes = append(nodes, n)
+	}
 
-var leadingSpacePattern = regexp.MustCompile(`^//(\s*)`)
-var trailingBlankExplanation = regexp.MustCompile(`\s*(//\s*)?$`)
+	lintIssues, err := lnt.Run(pass.Fset, nodes...)
+	if err != nil {
+		return nil, fmt.Errorf("linter failed to run: %w", err)
+	}
 
-//nolint:funlen,gocyclo
-func (l Linter) Run(fset *token.FileSet, nodes ...ast.Node) ([]Issue, error) {
-	var issues []Issue
+	var issues []goanalysis.Issue
 
-	for _, node := range nodes {
-		file, ok := node.(*ast.File)
-		if !ok {
-			continue
+	for _, i := range lintIssues {
+		expectNoLint := false
+		var expectedNolintLinter string
+		if ii, ok := i.(internal.UnusedCandidate); ok {
+			expectedNolintLinter = ii.ExpectedLinter
+			expectNoLint = true
 		}
 
-		for _, c := range file.Comments {
-			for _, comment := range c.List {
-				if !commentPattern.MatchString(comment.Text) {
-					continue
-				}
-
-				// check for a space between the "//" and the directive
-				leadingSpaceMatches := leadingSpacePattern.FindStringSubmatch(comment.Text)
-
-				var leadingSpace string
-				if len(leadingSpaceMatches) > 0 {
-					leadingSpace = leadingSpaceMatches[1]
-				}
-
-				directiveWithOptionalLeadingSpace := "//"
-				if len(leadingSpace) > 0 {
-					directiveWithOptionalLeadingSpace += " "
-				}
-
-				split := strings.Split(strings.SplitN(comment.Text, ":", 2)[0], "//")
-				directiveWithOptionalLeadingSpace += strings.TrimSpace(split[1])
-
-				pos := fset.Position(comment.Pos())
-				end := fset.Position(comment.End())
-
-				base := BaseIssue{
-					fullDirective:                     comment.Text,
-					directiveWithOptionalLeadingSpace: directiveWithOptionalLeadingSpace,
-					position:                          pos,
-				}
-
-				// check for, report and eliminate leading spaces, so we can check for other issues
-				if len(leadingSpace) > 0 {
-					removeWhitespace := &result.Replacement{
-						Inline: &result.InlineFix{
-							StartCol:  pos.Column + 1,
-							Length:    len(leadingSpace),
-							NewString: "",
-						},
-					}
-					if (l.needs & NeedsMachineOnly) != 0 {
-						issue := NotMachine{BaseIssue: base}
-						issue.BaseIssue.replacement = removeWhitespace
-						issues = append(issues, issue)
-					} else if len(leadingSpace) > 1 {
-						issue := ExtraLeadingSpace{BaseIssue: base}
-						issue.BaseIssue.replacement = removeWhitespace
-						issue.BaseIssue.replacement.Inline.NewString = " " // assume a single space was intended
-						issues = append(issues, issue)
-					}
-				}
-
-				fullMatches := fullDirectivePattern.FindStringSubmatch(comment.Text)
-				if len(fullMatches) == 0 {
-					issues = append(issues, ParseError{BaseIssue: base})
-					continue
-				}
-
-				lintersText, explanation := fullMatches[1], fullMatches[2]
-
-				var linters []string
-				if len(lintersText) > 0 && !strings.HasPrefix(lintersText, "all") {
-					lls := strings.Split(lintersText, ",")
-					linters = make([]string, 0, len(lls))
-					rangeStart := (pos.Column - 1) + len("//") + len(leadingSpace) + len("nolint:")
-					for i, ll := range lls {
-						rangeEnd := rangeStart + len(ll)
-						if i < len(lls)-1 {
-							rangeEnd++ // include trailing comma
-						}
-						trimmedLinterName := strings.TrimSpace(ll)
-						if trimmedLinterName != "" {
-							linters = append(linters, trimmedLinterName)
-						}
-						rangeStart = rangeEnd
-					}
-				}
-
-				if (l.needs & NeedsSpecific) != 0 {
-					if len(linters) == 0 {
-						issues = append(issues, NotSpecific{BaseIssue: base})
-					}
-				}
-
-				// when detecting unused directives, we send all the directives through and filter them out in the nolint processor
-				if (l.needs & NeedsUnused) != 0 {
-					removeNolintCompletely := &result.Replacement{
-						Inline: &result.InlineFix{
-							StartCol:  pos.Column - 1,
-							Length:    end.Column - pos.Column,
-							NewString: "",
-						},
-					}
-
-					if len(linters) == 0 {
-						issue := UnusedCandidate{BaseIssue: base}
-						issue.replacement = removeNolintCompletely
-						issues = append(issues, issue)
-					} else {
-						for _, linter := range linters {
-							issue := UnusedCandidate{BaseIssue: base, ExpectedLinter: linter}
-							// only offer replacement if there is a single linter
-							// because of issues around commas and the possibility of all
-							// linters being removed
-							if len(linters) == 1 {
-								issue.replacement = removeNolintCompletely
-							}
-							issues = append(issues, issue)
-						}
-					}
-				}
-
-				if (l.needs&NeedsExplanation) != 0 && (explanation == "" || strings.TrimSpace(explanation) == "//") {
-					needsExplanation := len(linters) == 0 // if no linters are mentioned, we must have explanation
-					// otherwise, check if we are excluding all the mentioned linters
-					for _, ll := range linters {
-						if !l.excludeByLinter[ll] { // if a linter does require explanation
-							needsExplanation = true
-							break
-						}
-					}
-
-					if needsExplanation {
-						fullDirectiveWithoutExplanation := trailingBlankExplanation.ReplaceAllString(comment.Text, "")
-						issues = append(issues, NoExplanation{
-							BaseIssue:                       base,
-							fullDirectiveWithoutExplanation: fullDirectiveWithoutExplanation,
-						})
-					}
-				}
-			}
+		issue := &result.Issue{
+			FromLinter:           LinterName,
+			Text:                 i.Details(),
+			Pos:                  i.Position(),
+			ExpectNoLint:         expectNoLint,
+			ExpectedNoLintLinter: expectedNolintLinter,
+			Replacement:          i.Replacement(),
 		}
+
+		issues = append(issues, goanalysis.NewIssue(issue, pass))
 	}
 
 	return issues, nil
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nonamedreturns.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nonamedreturns/nonamedreturns.go
similarity index 74%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/nonamedreturns.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/nonamedreturns/nonamedreturns.go
index 7856f6d613ef26336afb7753cd2ce16cf991c33f..42a618e64154af9c76373b49cd698efd469ced1c 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nonamedreturns.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nonamedreturns/nonamedreturns.go
@@ -1,14 +1,14 @@
-package golinters
+package nonamedreturns
 
 import (
 	"github.com/firefart/nonamedreturns/analyzer"
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewNoNamedReturns(settings *config.NoNamedReturnsSettings) *goanalysis.Linter {
+func New(settings *config.NoNamedReturnsSettings) *goanalysis.Linter {
 	a := analyzer.Analyzer
 
 	var cfg map[string]map[string]any
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nosnakecase.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nosnakecase.go
deleted file mode 100644
index 26d5d6d4c84f17f91bb1e9f7f23b901e2dea40e8..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nosnakecase.go
+++ /dev/null
@@ -1,19 +0,0 @@
-package golinters
-
-import (
-	"github.com/sivchari/nosnakecase"
-	"golang.org/x/tools/go/analysis"
-
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
-)
-
-func NewNoSnakeCase() *goanalysis.Linter {
-	a := nosnakecase.Analyzer
-
-	return goanalysis.NewLinter(
-		a.Name,
-		a.Doc,
-		[]*analysis.Analyzer{a},
-		nil,
-	).WithLoadMode(goanalysis.LoadModeSyntax)
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nosprintfhostport.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nosprintfhostport/nosprintfhostport.go
similarity index 66%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/nosprintfhostport.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/nosprintfhostport/nosprintfhostport.go
index a63b9bb5f567cb5baf02e37a26248a8f86b4dd29..8f06ae1f6d19b5b84851ec462a3928f57ccb92d0 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nosprintfhostport.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nosprintfhostport/nosprintfhostport.go
@@ -1,13 +1,13 @@
-package golinters
+package nosprintfhostport
 
 import (
 	"github.com/stbenjam/no-sprintf-host-port/pkg/analyzer"
 	"golang.org/x/tools/go/analysis"
 
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewNoSprintfHostPort() *goanalysis.Linter {
+func New() *goanalysis.Linter {
 	a := analyzer.Analyzer
 
 	return goanalysis.NewLinter(
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/paralleltest.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/paralleltest.go
deleted file mode 100644
index 4c03952c10a2017ef50d99733fd8c483839c06d2..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/paralleltest.go
+++ /dev/null
@@ -1,30 +0,0 @@
-package golinters
-
-import (
-	"github.com/kunwardeep/paralleltest/pkg/paralleltest"
-	"golang.org/x/tools/go/analysis"
-
-	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
-)
-
-func NewParallelTest(settings *config.ParallelTestSettings) *goanalysis.Linter {
-	a := paralleltest.NewAnalyzer()
-
-	var cfg map[string]map[string]any
-	if settings != nil {
-		cfg = map[string]map[string]any{
-			a.Name: {
-				"i":                     settings.IgnoreMissing,
-				"ignoremissingsubtests": settings.IgnoreMissingSubtests,
-			},
-		}
-	}
-
-	return goanalysis.NewLinter(
-		"paralleltest",
-		"paralleltest detects missing usage of t.Parallel() method in your Go test",
-		[]*analysis.Analyzer{a},
-		cfg,
-	).WithLoadMode(goanalysis.LoadModeTypesInfo)
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/paralleltest/paralleltest.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/paralleltest/paralleltest.go
new file mode 100644
index 0000000000000000000000000000000000000000..0c908fa38ffbd168d0169c38051b6397905214a7
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/paralleltest/paralleltest.go
@@ -0,0 +1,34 @@
+package paralleltest
+
+import (
+	"github.com/kunwardeep/paralleltest/pkg/paralleltest"
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/golangci/golangci-lint/pkg/config"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+)
+
+func New(settings *config.ParallelTestSettings) *goanalysis.Linter {
+	a := paralleltest.NewAnalyzer()
+
+	var cfg map[string]map[string]any
+	if settings != nil {
+		d := map[string]any{
+			"i":                     settings.IgnoreMissing,
+			"ignoremissingsubtests": settings.IgnoreMissingSubtests,
+		}
+
+		if config.IsGoGreaterThanOrEqual(settings.Go, "1.22") {
+			d["ignoreloopVar"] = true
+		}
+
+		cfg = map[string]map[string]any{a.Name: d}
+	}
+
+	return goanalysis.NewLinter(
+		a.Name,
+		"Detects missing usage of t.Parallel() method in your Go test",
+		[]*analysis.Analyzer{a},
+		cfg,
+	).WithLoadMode(goanalysis.LoadModeTypesInfo)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/perfsprint/perfsprint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/perfsprint/perfsprint.go
new file mode 100644
index 0000000000000000000000000000000000000000..a4ead1914d2cdec7e574e0d46832bd2884b5a2c1
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/perfsprint/perfsprint.go
@@ -0,0 +1,32 @@
+package perfsprint
+
+import (
+	"github.com/catenacyber/perfsprint/analyzer"
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/golangci/golangci-lint/pkg/config"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+)
+
+func New(settings *config.PerfSprintSettings) *goanalysis.Linter {
+	a := analyzer.New()
+
+	cfg := map[string]map[string]any{
+		a.Name: {"fiximports": false},
+	}
+
+	if settings != nil {
+		cfg[a.Name]["int-conversion"] = settings.IntConversion
+		cfg[a.Name]["err-error"] = settings.ErrError
+		cfg[a.Name]["errorf"] = settings.ErrorF
+		cfg[a.Name]["sprintf1"] = settings.SprintF1
+		cfg[a.Name]["strconcat"] = settings.StrConcat
+	}
+
+	return goanalysis.NewLinter(
+		a.Name,
+		a.Doc,
+		[]*analysis.Analyzer{a},
+		cfg,
+	).WithLoadMode(goanalysis.LoadModeTypesInfo)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/prealloc.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/prealloc/prealloc.go
similarity index 76%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/prealloc.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/prealloc/prealloc.go
index f48d57562ef2a1773947b20b6436e8d51ce09283..ce7ff9d59cb5ea5aafc9ac4069382f83b1b8deb7 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/prealloc.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/prealloc/prealloc.go
@@ -1,4 +1,4 @@
-package golinters
+package prealloc
 
 import (
 	"fmt"
@@ -8,20 +8,20 @@ import (
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/golinters/internal"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-const preallocName = "prealloc"
+const linterName = "prealloc"
 
-//nolint:dupl
-func NewPreAlloc(settings *config.PreallocSettings) *goanalysis.Linter {
+func New(settings *config.PreallocSettings) *goanalysis.Linter {
 	var mu sync.Mutex
 	var resIssues []goanalysis.Issue
 
 	analyzer := &analysis.Analyzer{
-		Name: preallocName,
+		Name: linterName,
 		Doc:  goanalysis.TheOnlyanalyzerDoc,
 		Run: func(pass *analysis.Pass) (any, error) {
 			issues := runPreAlloc(pass, settings)
@@ -39,7 +39,7 @@ func NewPreAlloc(settings *config.PreallocSettings) *goanalysis.Linter {
 	}
 
 	return goanalysis.NewLinter(
-		preallocName,
+		linterName,
 		"Finds slice declarations that could potentially be pre-allocated",
 		[]*analysis.Analyzer{analyzer},
 		nil,
@@ -56,8 +56,8 @@ func runPreAlloc(pass *analysis.Pass, settings *config.PreallocSettings) []goana
 	for _, hint := range hints {
 		issues = append(issues, goanalysis.NewIssue(&result.Issue{
 			Pos:        pass.Fset.Position(hint.Pos),
-			Text:       fmt.Sprintf("Consider pre-allocating %s", formatCode(hint.DeclaredSliceName, nil)),
-			FromLinter: preallocName,
+			Text:       fmt.Sprintf("Consider pre-allocating %s", internal.FormatCode(hint.DeclaredSliceName, nil)),
+			FromLinter: linterName,
 		}, pass))
 	}
 
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/predeclared.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/predeclared/predeclared.go
similarity index 76%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/predeclared.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/predeclared/predeclared.go
index d3c25e274bb6fe1cea5cfe8778011a61c584b6e1..b8d189fd557e0c0961b4c29d0c5c543b70ca5d82 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/predeclared.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/predeclared/predeclared.go
@@ -1,14 +1,14 @@
-package golinters
+package predeclared
 
 import (
 	"github.com/nishanths/predeclared/passes/predeclared"
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewPredeclared(settings *config.PredeclaredSettings) *goanalysis.Linter {
+func New(settings *config.PredeclaredSettings) *goanalysis.Linter {
 	a := predeclared.Analyzer
 
 	var cfg map[string]map[string]any
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/promlinter.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/promlinter/promlinter.go
similarity index 85%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/promlinter.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/promlinter/promlinter.go
index 381c57489d0e84217cd97a2c7527c01146017d33..5decbbc7c3de4a59e0181581b11f29fe4c85f08e 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/promlinter.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/promlinter/promlinter.go
@@ -1,4 +1,4 @@
-package golinters
+package promlinter
 
 import (
 	"fmt"
@@ -8,14 +8,14 @@ import (
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-const promlinterName = "promlinter"
+const linterName = "promlinter"
 
-func NewPromlinter(settings *config.PromlinterSettings) *goanalysis.Linter {
+func New(settings *config.PromlinterSettings) *goanalysis.Linter {
 	var mu sync.Mutex
 	var resIssues []goanalysis.Issue
 
@@ -28,7 +28,7 @@ func NewPromlinter(settings *config.PromlinterSettings) *goanalysis.Linter {
 	}
 
 	analyzer := &analysis.Analyzer{
-		Name: promlinterName,
+		Name: linterName,
 		Doc:  goanalysis.TheOnlyanalyzerDoc,
 		Run: func(pass *analysis.Pass) (any, error) {
 			issues := runPromLinter(pass, promSettings)
@@ -46,7 +46,7 @@ func NewPromlinter(settings *config.PromlinterSettings) *goanalysis.Linter {
 	}
 
 	return goanalysis.NewLinter(
-		promlinterName,
+		linterName,
 		"Check Prometheus metrics naming via promlint",
 		[]*analysis.Analyzer{analyzer},
 		nil,
@@ -67,7 +67,7 @@ func runPromLinter(pass *analysis.Pass, promSettings promlinter.Setting) []goana
 		issue := result.Issue{
 			Pos:        i.Pos,
 			Text:       fmt.Sprintf("Metric: %s Error: %s", i.Metric, i.Text),
-			FromLinter: promlinterName,
+			FromLinter: linterName,
 		}
 
 		issues[k] = goanalysis.NewIssue(&issue, pass)
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/protogetter/protogetter.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/protogetter/protogetter.go
new file mode 100644
index 0000000000000000000000000000000000000000..302ce67b88f480390112b70e59798d62b43f0eb3
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/protogetter/protogetter.go
@@ -0,0 +1,74 @@
+package protogetter
+
+import (
+	"sync"
+
+	"github.com/ghostiam/protogetter"
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/golangci/golangci-lint/pkg/config"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/lint/linter"
+	"github.com/golangci/golangci-lint/pkg/result"
+)
+
+func New(settings *config.ProtoGetterSettings) *goanalysis.Linter {
+	var mu sync.Mutex
+	var resIssues []goanalysis.Issue
+
+	var cfg protogetter.Config
+	if settings != nil {
+		cfg = protogetter.Config{
+			SkipGeneratedBy:         settings.SkipGeneratedBy,
+			SkipFiles:               settings.SkipFiles,
+			SkipAnyGenerated:        settings.SkipAnyGenerated,
+			ReplaceFirstArgInAppend: settings.ReplaceFirstArgInAppend,
+		}
+	}
+	cfg.Mode = protogetter.GolangciLintMode
+
+	a := protogetter.NewAnalyzer(&cfg)
+	a.Run = func(pass *analysis.Pass) (any, error) {
+		pgIssues, err := protogetter.Run(pass, &cfg)
+		if err != nil {
+			return nil, err
+		}
+
+		issues := make([]goanalysis.Issue, len(pgIssues))
+		for i, issue := range pgIssues {
+			report := &result.Issue{
+				FromLinter: a.Name,
+				Pos:        issue.Pos,
+				Text:       issue.Message,
+				Replacement: &result.Replacement{
+					Inline: &result.InlineFix{
+						StartCol:  issue.InlineFix.StartCol,
+						Length:    issue.InlineFix.Length,
+						NewString: issue.InlineFix.NewString,
+					},
+				},
+			}
+
+			issues[i] = goanalysis.NewIssue(report, pass)
+		}
+
+		if len(issues) == 0 {
+			return nil, nil
+		}
+
+		mu.Lock()
+		resIssues = append(resIssues, issues...)
+		mu.Unlock()
+
+		return nil, nil
+	}
+
+	return goanalysis.NewLinter(
+		a.Name,
+		a.Doc,
+		[]*analysis.Analyzer{a},
+		nil,
+	).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
+		return resIssues
+	}).WithLoadMode(goanalysis.LoadModeTypesInfo)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/reassign.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/reassign/reassign.go
similarity index 78%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/reassign.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/reassign/reassign.go
index a6dd6705308f837904028385e5572ef845603448..cfc85635e2be15161d65b6aff0e8a7b5b2458ad0 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/reassign.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/reassign/reassign.go
@@ -1,4 +1,4 @@
-package golinters
+package reassign
 
 import (
 	"fmt"
@@ -8,10 +8,10 @@ import (
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewReassign(settings *config.ReassignSettings) *goanalysis.Linter {
+func New(settings *config.ReassignSettings) *goanalysis.Linter {
 	a := reassign.NewAnalyzer()
 
 	var cfg map[string]map[string]any
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive/revive.go
similarity index 88%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/revive.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/revive/revive.go
index b57566e7aff24b9c42a1e16cea0ab39bf8e56588..da44d92414681b213282673134125c396277be72 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive/revive.go
@@ -1,4 +1,4 @@
-package golinters
+package revive
 
 import (
 	"bytes"
@@ -16,15 +16,16 @@ import (
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/golinters/internal"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 	"github.com/golangci/golangci-lint/pkg/logutils"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-const reviveName = "revive"
+const linterName = "revive"
 
-var reviveDebugf = logutils.Debug(logutils.DebugKeyRevive)
+var debugf = logutils.Debug(logutils.DebugKeyRevive)
 
 // jsonObject defines a JSON object of a failure
 type jsonObject struct {
@@ -32,10 +33,7 @@ type jsonObject struct {
 	lint.Failure `json:",inline"`
 }
 
-// NewRevive returns a new Revive linter.
-//
-//nolint:dupl
-func NewRevive(settings *config.ReviveSettings) *goanalysis.Linter {
+func New(settings *config.ReviveSettings) *goanalysis.Linter {
 	var mu sync.Mutex
 	var resIssues []goanalysis.Issue
 
@@ -46,7 +44,7 @@ func NewRevive(settings *config.ReviveSettings) *goanalysis.Linter {
 	}
 
 	return goanalysis.NewLinter(
-		reviveName,
+		linterName,
 		"Fast, configurable, extensible, flexible, and beautiful linter for Go. Drop-in replacement of golint.",
 		[]*analysis.Analyzer{analyzer},
 		nil,
@@ -73,7 +71,7 @@ func NewRevive(settings *config.ReviveSettings) *goanalysis.Linter {
 }
 
 func runRevive(lintCtx *linter.Context, pass *analysis.Pass, settings *config.ReviveSettings) ([]goanalysis.Issue, error) {
-	packages := [][]string{getFileNames(pass)}
+	packages := [][]string{internal.GetFileNames(pass)}
 
 	conf, err := getReviveConfig(settings)
 	if err != nil {
@@ -153,14 +151,15 @@ func reviveToIssue(pass *analysis.Pass, object *jsonObject) goanalysis.Issue {
 			From: object.Position.Start.Line,
 			To:   lineRangeTo,
 		},
-		FromLinter: reviveName,
+		FromLinter: linterName,
 	}, pass)
 }
 
 // This function mimics the GetConfig function of revive.
 // This allows to get default values and right types.
 // https://github.com/golangci/golangci-lint/issues/1745
-// https://github.com/mgechev/revive/blob/v1.1.4/config/config.go#L182
+// https://github.com/mgechev/revive/blob/v1.3.7/config/config.go#L217
+// https://github.com/mgechev/revive/blob/v1.3.7/config/config.go#L169-L174
 func getReviveConfig(cfg *config.ReviveSettings) (*lint.Config, error) {
 	conf := defaultConfig()
 
@@ -182,7 +181,15 @@ func getReviveConfig(cfg *config.ReviveSettings) (*lint.Config, error) {
 
 	normalizeConfig(conf)
 
-	reviveDebugf("revive configuration: %#v", conf)
+	for k, r := range conf.Rules {
+		err := r.Initialize()
+		if err != nil {
+			return nil, fmt.Errorf("error in config of rule %q: %w", k, err)
+		}
+		conf.Rules[k] = r
+	}
+
+	debugf("revive configuration: %#v", conf)
 
 	return conf, nil
 }
@@ -214,6 +221,7 @@ func createConfigMap(cfg *config.ReviveSettings) map[string]any {
 			"severity":  s.Severity,
 			"arguments": safeTomlSlice(s.Arguments),
 			"disabled":  s.Disabled,
+			"exclude":   s.Exclude,
 		}
 	}
 
@@ -247,7 +255,7 @@ func safeTomlSlice(r []any) []any {
 }
 
 // This element is not exported by revive, so we need copy the code.
-// Extracted from https://github.com/mgechev/revive/blob/v1.3.0/config/config.go#L15
+// Extracted from https://github.com/mgechev/revive/blob/v1.3.7/config/config.go#L15
 var defaultRules = []lint.Rule{
 	&rule.VarDeclarationsRule{},
 	&rule.PackageCommentsRule{},
@@ -267,7 +275,6 @@ var defaultRules = []lint.Rule{
 	&rule.TimeNamingRule{},
 	&rule.ContextKeysType{},
 	&rule.ContextAsArgumentRule{},
-	&rule.IfReturnRule{},
 	&rule.EmptyBlockRule{},
 	&rule.SuperfluousElseRule{},
 	&rule.UnusedParamRule{},
@@ -291,7 +298,7 @@ var allRules = append([]lint.Rule{
 	&rule.ModifiesValRecRule{},
 	&rule.ConstantLogicalExprRule{},
 	&rule.BoolLiteralRule{},
-	&rule.ImportsBlacklistRule{},
+	&rule.ImportsBlocklistRule{},
 	&rule.FunctionResultsLimitRule{},
 	&rule.MaxPublicStructsRule{},
 	&rule.RangeValInClosureRule{},
@@ -317,12 +324,20 @@ var allRules = append([]lint.Rule{
 	&rule.FunctionLength{},
 	&rule.NestedStructs{},
 	&rule.UselessBreak{},
+	&rule.UncheckedTypeAssertionRule{},
 	&rule.TimeEqualRule{},
 	&rule.BannedCharsRule{},
 	&rule.OptimizeOperandsOrderRule{},
 	&rule.UseAnyRule{},
 	&rule.DataRaceRule{},
 	&rule.CommentSpacingsRule{},
+	&rule.IfReturnRule{},
+	&rule.RedundantImportAlias{},
+	&rule.ImportAliasNamingRule{},
+	&rule.EnforceMapStyleRule{},
+	&rule.EnforceRepeatedArgTypeStyleRule{},
+	&rule.EnforceSliceStyleRule{},
+	&rule.MaxControlNestingRule{},
 }, defaultRules...)
 
 const defaultConfidence = 0.8
@@ -345,8 +360,8 @@ func normalizeConfig(cfg *lint.Config) {
 	}
 	if cfg.EnableAllRules {
 		// Add to the configuration all rules not yet present in it
-		for _, rule := range allRules {
-			ruleName := rule.Name()
+		for _, r := range allRules {
+			ruleName := r.Name()
 			_, alreadyInConf := cfg.Rules[ruleName]
 			if alreadyInConf {
 				continue
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/rowserrcheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/rowserrcheck.go
deleted file mode 100644
index 5a66d62e7dd8eb0fce82c211b68bf4fab5f4dfbe..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/rowserrcheck.go
+++ /dev/null
@@ -1,25 +0,0 @@
-package golinters
-
-import (
-	"github.com/jingyugao/rowserrcheck/passes/rowserr"
-	"golang.org/x/tools/go/analysis"
-
-	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
-)
-
-func NewRowsErrCheck(settings *config.RowsErrCheckSettings) *goanalysis.Linter {
-	var pkgs []string
-	if settings != nil {
-		pkgs = settings.Packages
-	}
-
-	analyzer := rowserr.NewAnalyzer(pkgs...)
-
-	return goanalysis.NewLinter(
-		"rowserrcheck",
-		"checks whether Err of rows is checked successfully",
-		[]*analysis.Analyzer{analyzer},
-		nil,
-	).WithLoadMode(goanalysis.LoadModeTypesInfo)
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/rowserrcheck/rowserrcheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/rowserrcheck/rowserrcheck.go
new file mode 100644
index 0000000000000000000000000000000000000000..3fe824467341ab3fd69a01486ae851fc2efb3033
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/rowserrcheck/rowserrcheck.go
@@ -0,0 +1,25 @@
+package rowserrcheck
+
+import (
+	"github.com/jingyugao/rowserrcheck/passes/rowserr"
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/golangci/golangci-lint/pkg/config"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+)
+
+func New(settings *config.RowsErrCheckSettings) *goanalysis.Linter {
+	var pkgs []string
+	if settings != nil {
+		pkgs = settings.Packages
+	}
+
+	a := rowserr.NewAnalyzer(pkgs...)
+
+	return goanalysis.NewLinter(
+		a.Name,
+		"checks whether Rows.Err of rows is checked successfully",
+		[]*analysis.Analyzer{a},
+		nil,
+	).WithLoadMode(goanalysis.LoadModeTypesInfo)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/scopelint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/scopelint.go
deleted file mode 100644
index e6ef15ede049c0c1454aa17c79b06c297884d9cd..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/scopelint.go
+++ /dev/null
@@ -1,190 +0,0 @@
-package golinters
-
-import (
-	"fmt"
-	"go/ast"
-	"go/token"
-	"sync"
-
-	"golang.org/x/tools/go/analysis"
-
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
-	"github.com/golangci/golangci-lint/pkg/lint/linter"
-	"github.com/golangci/golangci-lint/pkg/result"
-)
-
-const scopelintName = "scopelint"
-
-//nolint:dupl
-func NewScopelint() *goanalysis.Linter {
-	var mu sync.Mutex
-	var resIssues []goanalysis.Issue
-
-	analyzer := &analysis.Analyzer{
-		Name: scopelintName,
-		Doc:  goanalysis.TheOnlyanalyzerDoc,
-		Run: func(pass *analysis.Pass) (any, error) {
-			issues := runScopeLint(pass)
-
-			if len(issues) == 0 {
-				return nil, nil
-			}
-
-			mu.Lock()
-			resIssues = append(resIssues, issues...)
-			mu.Unlock()
-
-			return nil, nil
-		},
-	}
-
-	return goanalysis.NewLinter(
-		scopelintName,
-		"Scopelint checks for unpinned variables in go programs",
-		[]*analysis.Analyzer{analyzer},
-		nil,
-	).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
-		return resIssues
-	}).WithLoadMode(goanalysis.LoadModeSyntax)
-}
-
-func runScopeLint(pass *analysis.Pass) []goanalysis.Issue {
-	var lintIssues []result.Issue
-
-	for _, file := range pass.Files {
-		n := Node{
-			fset:          pass.Fset,
-			DangerObjects: map[*ast.Object]int{},
-			UnsafeObjects: map[*ast.Object]int{},
-			SkipFuncs:     map[*ast.FuncLit]int{},
-			issues:        &lintIssues,
-		}
-		ast.Walk(&n, file)
-	}
-
-	var issues []goanalysis.Issue
-	for i := range lintIssues {
-		issues = append(issues, goanalysis.NewIssue(&lintIssues[i], pass))
-	}
-
-	return issues
-}
-
-// The code below is copy-pasted from https://github.com/kyoh86/scopelint 92cbe2cc9276abda0e309f52cc9e309d407f174e
-
-// Node represents a Node being linted.
-type Node struct {
-	fset          *token.FileSet
-	DangerObjects map[*ast.Object]int
-	UnsafeObjects map[*ast.Object]int
-	SkipFuncs     map[*ast.FuncLit]int
-	issues        *[]result.Issue
-}
-
-// Visit method is invoked for each node encountered by Walk.
-// If the result visitor w is not nil, Walk visits each of the children
-// of node with the visitor w, followed by a call of w.Visit(nil).
-//
-//nolint:gocyclo,gocritic
-func (f *Node) Visit(node ast.Node) ast.Visitor {
-	switch typedNode := node.(type) {
-	case *ast.ForStmt:
-		switch init := typedNode.Init.(type) {
-		case *ast.AssignStmt:
-			for _, lh := range init.Lhs {
-				switch tlh := lh.(type) {
-				case *ast.Ident:
-					f.UnsafeObjects[tlh.Obj] = 0
-				}
-			}
-		}
-
-	case *ast.RangeStmt:
-		// Memory variables declared in range statement
-		switch k := typedNode.Key.(type) {
-		case *ast.Ident:
-			f.UnsafeObjects[k.Obj] = 0
-		}
-		switch v := typedNode.Value.(type) {
-		case *ast.Ident:
-			f.UnsafeObjects[v.Obj] = 0
-		}
-
-	case *ast.UnaryExpr:
-		if typedNode.Op == token.AND {
-			switch ident := typedNode.X.(type) {
-			case *ast.Ident:
-				if _, unsafe := f.UnsafeObjects[ident.Obj]; unsafe {
-					f.errorf(ident, "Using a reference for the variable on range scope %s", formatCode(ident.Name, nil))
-				}
-			}
-		}
-
-	case *ast.Ident:
-		if _, obj := f.DangerObjects[typedNode.Obj]; obj {
-			// It is the naked variable in scope of range statement.
-			f.errorf(node, "Using the variable on range scope %s in function literal", formatCode(typedNode.Name, nil))
-			break
-		}
-
-	case *ast.CallExpr:
-		// Ignore func literals that'll be called immediately.
-		switch funcLit := typedNode.Fun.(type) {
-		case *ast.FuncLit:
-			f.SkipFuncs[funcLit] = 0
-		}
-
-	case *ast.FuncLit:
-		if _, skip := f.SkipFuncs[typedNode]; !skip {
-			dangers := map[*ast.Object]int{}
-			for d := range f.DangerObjects {
-				dangers[d] = 0
-			}
-			for u := range f.UnsafeObjects {
-				dangers[u] = 0
-				f.UnsafeObjects[u]++
-			}
-			return &Node{
-				fset:          f.fset,
-				DangerObjects: dangers,
-				UnsafeObjects: f.UnsafeObjects,
-				SkipFuncs:     f.SkipFuncs,
-				issues:        f.issues,
-			}
-		}
-
-	case *ast.ReturnStmt:
-		unsafe := map[*ast.Object]int{}
-		for u := range f.UnsafeObjects {
-			if f.UnsafeObjects[u] == 0 {
-				continue
-			}
-			unsafe[u] = f.UnsafeObjects[u]
-		}
-		return &Node{
-			fset:          f.fset,
-			DangerObjects: f.DangerObjects,
-			UnsafeObjects: unsafe,
-			SkipFuncs:     f.SkipFuncs,
-			issues:        f.issues,
-		}
-	}
-	return f
-}
-
-// The variadic arguments may start with link and category types,
-// and must end with a format string and any arguments.
-//
-//nolint:interfacer
-func (f *Node) errorf(n ast.Node, format string, args ...any) {
-	pos := f.fset.Position(n.Pos())
-	f.errorAtf(pos, format, args...)
-}
-
-func (f *Node) errorAtf(pos token.Position, format string, args ...any) {
-	*f.issues = append(*f.issues, result.Issue{
-		Pos:        pos,
-		Text:       fmt.Sprintf(format, args...),
-		FromLinter: scopelintName,
-	})
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/sloglint/sloglint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/sloglint/sloglint.go
new file mode 100644
index 0000000000000000000000000000000000000000..d3f567e9c7f73960798d57cb846de17f34d391e5
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/sloglint/sloglint.go
@@ -0,0 +1,32 @@
+package sloglint
+
+import (
+	"go-simpler.org/sloglint"
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/golangci/golangci-lint/pkg/config"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+)
+
+func New(settings *config.SlogLintSettings) *goanalysis.Linter {
+	var opts *sloglint.Options
+	if settings != nil {
+		opts = &sloglint.Options{
+			NoMixedArgs:    settings.NoMixedArgs,
+			KVOnly:         settings.KVOnly,
+			AttrOnly:       settings.AttrOnly,
+			NoGlobal:       settings.NoGlobal,
+			ContextOnly:    settings.Context,
+			StaticMsg:      settings.StaticMsg,
+			NoRawKeys:      settings.NoRawKeys,
+			KeyNamingCase:  settings.KeyNamingCase,
+			ArgsOnSepLines: settings.ArgsOnSepLines,
+		}
+	}
+
+	a := sloglint.New(opts)
+
+	return goanalysis.
+		NewLinter(a.Name, a.Doc, []*analysis.Analyzer{a}, nil).
+		WithLoadMode(goanalysis.LoadModeTypesInfo)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/spancheck/spancheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/spancheck/spancheck.go
new file mode 100644
index 0000000000000000000000000000000000000000..a800a170582768ca8188f6e3a0c4eff5bae066e6
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/spancheck/spancheck.go
@@ -0,0 +1,33 @@
+package spancheck
+
+import (
+	"github.com/jjti/go-spancheck"
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/golangci/golangci-lint/pkg/config"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+)
+
+func New(settings *config.SpancheckSettings) *goanalysis.Linter {
+	cfg := spancheck.NewDefaultConfig()
+
+	if settings != nil {
+		if settings.Checks != nil {
+			cfg.EnabledChecks = settings.Checks
+		}
+
+		if settings.IgnoreCheckSignatures != nil {
+			cfg.IgnoreChecksSignaturesSlice = settings.IgnoreCheckSignatures
+		}
+
+		if settings.ExtraStartSpanSignatures != nil {
+			cfg.StartSpanMatchersSlice = settings.ExtraStartSpanSignatures
+		}
+	}
+
+	a := spancheck.NewAnalyzerWithConfig(cfg)
+
+	return goanalysis.
+		NewLinter(a.Name, a.Doc, []*analysis.Analyzer{a}, nil).
+		WithLoadMode(goanalysis.LoadModeTypesInfo)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/sqlclosecheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/sqlclosecheck.go
deleted file mode 100644
index ff2c0c08ff864cb90be17e18787f86834f4f9ba8..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/sqlclosecheck.go
+++ /dev/null
@@ -1,19 +0,0 @@
-package golinters
-
-import (
-	"github.com/ryanrolds/sqlclosecheck/pkg/analyzer"
-	"golang.org/x/tools/go/analysis"
-
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
-)
-
-func NewSQLCloseCheck() *goanalysis.Linter {
-	return goanalysis.NewLinter(
-		"sqlclosecheck",
-		"Checks that sql.Rows and sql.Stmt are closed.",
-		[]*analysis.Analyzer{
-			analyzer.NewAnalyzer(),
-		},
-		nil,
-	).WithLoadMode(goanalysis.LoadModeTypesInfo)
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/sqlclosecheck/sqlclosecheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/sqlclosecheck/sqlclosecheck.go
new file mode 100644
index 0000000000000000000000000000000000000000..5eb32ff9dbe3351dc2485645cd3c7cd68289c6b2
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/sqlclosecheck/sqlclosecheck.go
@@ -0,0 +1,19 @@
+package sqlclosecheck
+
+import (
+	"github.com/ryanrolds/sqlclosecheck/pkg/analyzer"
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+)
+
+func New() *goanalysis.Linter {
+	a := analyzer.NewAnalyzer()
+
+	return goanalysis.NewLinter(
+		a.Name,
+		a.Doc,
+		[]*analysis.Analyzer{a},
+		nil,
+	).WithLoadMode(goanalysis.LoadModeTypesInfo)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/staticcheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/staticcheck/staticcheck.go
similarity index 53%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/staticcheck.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/staticcheck/staticcheck.go
index 673484630aba82a549528a5caf8f041e58b38417..0c0534539ea549114ed1254e6b0439f58b0c8552 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/staticcheck.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/staticcheck/staticcheck.go
@@ -1,15 +1,16 @@
-package golinters
+package staticcheck
 
 import (
 	"honnef.co/go/tools/staticcheck"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/golinters/internal"
 )
 
-func NewStaticcheck(settings *config.StaticCheckSettings) *goanalysis.Linter {
-	cfg := staticCheckConfig(settings)
-	analyzers := setupStaticCheckAnalyzers(staticcheck.Analyzers, getGoVersion(settings), cfg.Checks)
+func New(settings *config.StaticCheckSettings) *goanalysis.Linter {
+	cfg := internal.StaticCheckConfig(settings)
+	analyzers := internal.SetupStaticCheckAnalyzers(staticcheck.Analyzers, internal.GetGoVersion(settings), cfg.Checks)
 
 	return goanalysis.NewLinter(
 		"staticcheck",
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/structcheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/structcheck.go
deleted file mode 100644
index f3df0c2f3552d360bbd82e8dd7497c6e5cf6285b..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/structcheck.go
+++ /dev/null
@@ -1,71 +0,0 @@
-package golinters
-
-import (
-	"fmt"
-	"sync"
-
-	structcheckAPI "github.com/golangci/check/cmd/structcheck"
-	"golang.org/x/tools/go/analysis"
-
-	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
-	"github.com/golangci/golangci-lint/pkg/lint/linter"
-	"github.com/golangci/golangci-lint/pkg/result"
-)
-
-const structcheckName = "structcheck"
-
-//nolint:dupl
-func NewStructcheck(settings *config.StructCheckSettings) *goanalysis.Linter {
-	var mu sync.Mutex
-	var resIssues []goanalysis.Issue
-
-	analyzer := &analysis.Analyzer{
-		Name: structcheckName,
-		Doc:  goanalysis.TheOnlyanalyzerDoc,
-		Run: func(pass *analysis.Pass) (any, error) {
-			issues := runStructCheck(pass, settings)
-
-			if len(issues) == 0 {
-				return nil, nil
-			}
-
-			mu.Lock()
-			resIssues = append(resIssues, issues...)
-			mu.Unlock()
-
-			return nil, nil
-		},
-	}
-
-	return goanalysis.NewLinter(
-		structcheckName,
-		"Finds unused struct fields",
-		[]*analysis.Analyzer{analyzer},
-		nil,
-	).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
-		return resIssues
-	}).WithLoadMode(goanalysis.LoadModeTypesInfo)
-}
-
-//nolint:dupl
-func runStructCheck(pass *analysis.Pass, settings *config.StructCheckSettings) []goanalysis.Issue {
-	prog := goanalysis.MakeFakeLoaderProgram(pass)
-
-	lintIssues := structcheckAPI.Run(prog, settings.CheckExportedFields)
-	if len(lintIssues) == 0 {
-		return nil
-	}
-
-	issues := make([]goanalysis.Issue, 0, len(lintIssues))
-
-	for _, i := range lintIssues {
-		issues = append(issues, goanalysis.NewIssue(&result.Issue{
-			Pos:        i.Pos,
-			Text:       fmt.Sprintf("%s is unused", formatCode(i.FieldName, nil)),
-			FromLinter: structcheckName,
-		}, pass))
-	}
-
-	return issues
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/stylecheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/stylecheck/stylecheck.go
similarity index 61%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/stylecheck.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/stylecheck/stylecheck.go
index 2e1e21c5bdbb09b9a52474d36fe2c43837aee1e8..b8fc8fe547d16e351e38a1cc68c273c78b9b8eb7 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/stylecheck.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/stylecheck/stylecheck.go
@@ -1,4 +1,4 @@
-package golinters
+package stylecheck
 
 import (
 	"golang.org/x/tools/go/analysis"
@@ -6,20 +6,21 @@ import (
 	"honnef.co/go/tools/stylecheck"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/golinters/internal"
 )
 
-func NewStylecheck(settings *config.StaticCheckSettings) *goanalysis.Linter {
-	cfg := staticCheckConfig(settings)
+func New(settings *config.StaticCheckSettings) *goanalysis.Linter {
+	cfg := internal.StaticCheckConfig(settings)
 
 	// `scconfig.Analyzer` is a singleton, then it's not possible to have more than one instance for all staticcheck "sub-linters".
 	// When we will merge the 4 "sub-linters", the problem will disappear: https://github.com/golangci/golangci-lint/issues/357
 	// Currently only stylecheck analyzer has a configuration in staticcheck.
-	scconfig.Analyzer.Run = func(pass *analysis.Pass) (any, error) {
+	scconfig.Analyzer.Run = func(_ *analysis.Pass) (any, error) {
 		return cfg, nil
 	}
 
-	analyzers := setupStaticCheckAnalyzers(stylecheck.Analyzers, getGoVersion(settings), cfg.Checks)
+	analyzers := internal.SetupStaticCheckAnalyzers(stylecheck.Analyzers, internal.GetGoVersion(settings), cfg.Checks)
 
 	return goanalysis.NewLinter(
 		"stylecheck",
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/tagalign.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/tagalign/tagalign.go
similarity index 92%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/tagalign.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/tagalign/tagalign.go
index c23838f702b50a8ed87a50b6b1ffcf2b8990ea3d..f438c51b5c7eeecea771232cc33b1cc53d0afa32 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/tagalign.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/tagalign/tagalign.go
@@ -1,4 +1,4 @@
-package golinters
+package tagalign
 
 import (
 	"sync"
@@ -7,12 +7,12 @@ import (
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-func NewTagAlign(settings *config.TagAlignSettings) *goanalysis.Linter {
+func New(settings *config.TagAlignSettings) *goanalysis.Linter {
 	var mu sync.Mutex
 	var resIssues []goanalysis.Issue
 
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/tagliatelle.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/tagliatelle/tagliatelle.go
similarity index 77%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/tagliatelle.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/tagliatelle/tagliatelle.go
index 67c14cbd48e3af8cacc4dbba6497168bbd00065a..d1674c3e9e285a2495cf7b824ec3a469bb12dbe9 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/tagliatelle.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/tagliatelle/tagliatelle.go
@@ -1,14 +1,14 @@
-package golinters
+package tagliatelle
 
 import (
 	"github.com/ldez/tagliatelle"
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewTagliatelle(settings *config.TagliatelleSettings) *goanalysis.Linter {
+func New(settings *config.TagliatelleSettings) *goanalysis.Linter {
 	cfg := tagliatelle.Config{
 		Rules: map[string]string{
 			"json":   "camel",
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/tenv.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/tenv/tenv.go
similarity index 74%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/tenv.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/tenv/tenv.go
index 6c6bd3186f7867e07db6f3c2cdbd5ac0e41d4712..b80a783b6567853a3c4aea678682e05071cb4be9 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/tenv.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/tenv/tenv.go
@@ -1,14 +1,14 @@
-package golinters
+package tenv
 
 import (
 	"github.com/sivchari/tenv"
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewTenv(settings *config.TenvSettings) *goanalysis.Linter {
+func New(settings *config.TenvSettings) *goanalysis.Linter {
 	a := tenv.Analyzer
 
 	var cfg map[string]map[string]any
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/testableexamples.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/testableexamples/testableexamples.go
similarity index 68%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/testableexamples.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/testableexamples/testableexamples.go
index 3333593a62eb6c79f1d42b7bfddf3c43dff221a0..6b76271dbab511554d88c0a6cafca717b626d3d5 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/testableexamples.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/testableexamples/testableexamples.go
@@ -1,13 +1,13 @@
-package golinters
+package testableexamples
 
 import (
 	"github.com/maratori/testableexamples/pkg/testableexamples"
 	"golang.org/x/tools/go/analysis"
 
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewTestableexamples() *goanalysis.Linter {
+func New() *goanalysis.Linter {
 	a := testableexamples.NewAnalyzer()
 
 	return goanalysis.NewLinter(
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/testifylint/testifylint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/testifylint/testifylint.go
new file mode 100644
index 0000000000000000000000000000000000000000..55c96881d83b9be035d1dd442e922408b15701e3
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/testifylint/testifylint.go
@@ -0,0 +1,46 @@
+package testifylint
+
+import (
+	"github.com/Antonboom/testifylint/analyzer"
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/golangci/golangci-lint/pkg/config"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+)
+
+func New(settings *config.TestifylintSettings) *goanalysis.Linter {
+	a := analyzer.New()
+
+	cfg := make(map[string]map[string]any)
+	if settings != nil {
+		cfg[a.Name] = map[string]any{
+			"enable-all":  settings.EnableAll,
+			"disable-all": settings.DisableAll,
+
+			"bool-compare.ignore-custom-types": settings.BoolCompare.IgnoreCustomTypes,
+		}
+		if len(settings.EnabledCheckers) > 0 {
+			cfg[a.Name]["enable"] = settings.EnabledCheckers
+		}
+		if len(settings.DisabledCheckers) > 0 {
+			cfg[a.Name]["disable"] = settings.DisabledCheckers
+		}
+
+		if p := settings.ExpectedActual.ExpVarPattern; p != "" {
+			cfg[a.Name]["expected-actual.pattern"] = p
+		}
+		if p := settings.RequireError.FnPattern; p != "" {
+			cfg[a.Name]["require-error.fn-pattern"] = p
+		}
+		if m := settings.SuiteExtraAssertCall.Mode; m != "" {
+			cfg[a.Name]["suite-extra-assert-call.mode"] = m
+		}
+	}
+
+	return goanalysis.NewLinter(
+		a.Name,
+		a.Doc,
+		[]*analysis.Analyzer{a},
+		cfg,
+	).WithLoadMode(goanalysis.LoadModeTypesInfo)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/testpackage.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/testpackage/testpackage.go
similarity index 74%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/testpackage.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/testpackage/testpackage.go
index db1ead9661e4dc36ad2b549e4accf5372f37b55b..632152712bccb6a67e65474828dc494d50929a58 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/testpackage.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/testpackage/testpackage.go
@@ -1,4 +1,4 @@
-package golinters
+package testpackage
 
 import (
 	"strings"
@@ -7,11 +7,11 @@ import (
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewTestpackage(cfg *config.TestpackageSettings) *goanalysis.Linter {
-	var a = testpackage.NewAnalyzer()
+func New(cfg *config.TestpackageSettings) *goanalysis.Linter {
+	a := testpackage.NewAnalyzer()
 
 	var settings map[string]map[string]any
 	if cfg != nil {
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/thelper.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/thelper/thelper.go
similarity index 74%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/thelper.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/thelper/thelper.go
index 84a8e9e8bee93e04fc1e7bf559d69b9413d01246..cc6ea755c9edeb0a8c155da7469069c7dd9f8c7a 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/thelper.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/thelper/thelper.go
@@ -1,16 +1,18 @@
-package golinters
+package thelper
 
 import (
 	"strings"
 
 	"github.com/kulti/thelper/pkg/analyzer"
+	"golang.org/x/exp/maps"
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/golinters/internal"
 )
 
-func NewThelper(cfg *config.ThelperSettings) *goanalysis.Linter {
+func New(cfg *config.ThelperSettings) *goanalysis.Linter {
 	a := analyzer.NewAnalyzer()
 
 	opts := map[string]struct{}{
@@ -39,13 +41,10 @@ func NewThelper(cfg *config.ThelperSettings) *goanalysis.Linter {
 	}
 
 	if len(opts) == 0 {
-		linterLogger.Fatalf("thelper: at least one option must be enabled")
+		internal.LinterLogger.Fatalf("thelper: at least one option must be enabled")
 	}
 
-	var args []string
-	for k := range opts {
-		args = append(args, k)
-	}
+	args := maps.Keys(opts)
 
 	cfgMap := map[string]map[string]any{
 		a.Name: {
@@ -54,8 +53,8 @@ func NewThelper(cfg *config.ThelperSettings) *goanalysis.Linter {
 	}
 
 	return goanalysis.NewLinter(
-		"thelper",
-		"thelper detects Go test helpers without t.Helper() call and checks the consistency of test helpers",
+		a.Name,
+		a.Doc,
 		[]*analysis.Analyzer{a},
 		cfgMap,
 	).WithLoadMode(goanalysis.LoadModeTypesInfo)
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/tparallel.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/tparallel.go
deleted file mode 100644
index cbe97516c49fc060e8ab13dd9f78303d193e8d50..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/tparallel.go
+++ /dev/null
@@ -1,17 +0,0 @@
-package golinters
-
-import (
-	"github.com/moricho/tparallel"
-	"golang.org/x/tools/go/analysis"
-
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
-)
-
-func NewTparallel() *goanalysis.Linter {
-	return goanalysis.NewLinter(
-		"tparallel",
-		"tparallel detects inappropriate usage of t.Parallel() method in your Go test codes",
-		[]*analysis.Analyzer{tparallel.Analyzer},
-		nil,
-	).WithLoadMode(goanalysis.LoadModeTypesInfo)
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/tparallel/tparallel.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/tparallel/tparallel.go
new file mode 100644
index 0000000000000000000000000000000000000000..4f7c43a99d1bc2efa9be80d13153f19bb8e7377d
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/tparallel/tparallel.go
@@ -0,0 +1,18 @@
+package tparallel
+
+import (
+	"github.com/moricho/tparallel"
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+)
+
+func New() *goanalysis.Linter {
+	a := tparallel.Analyzer
+	return goanalysis.NewLinter(
+		a.Name,
+		a.Doc,
+		[]*analysis.Analyzer{a},
+		nil,
+	).WithLoadMode(goanalysis.LoadModeTypesInfo)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/typecheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/typecheck.go
index e9b26ef485e1c4007215ab51487d18f7a5ea5fd4..d0eaa00d0cea943fe6808489c3ec36deade15794 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/typecheck.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/typecheck.go
@@ -3,7 +3,7 @@ package golinters
 import (
 	"golang.org/x/tools/go/analysis"
 
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
 func NewTypecheck() *goanalysis.Linter {
@@ -20,5 +20,5 @@ func NewTypecheck() *goanalysis.Linter {
 		"Like the front-end of a Go compiler, parses and type-checks Go code",
 		[]*analysis.Analyzer{analyzer},
 		nil,
-	).WithLoadMode(goanalysis.LoadModeTypesInfo)
+	).WithLoadMode(goanalysis.LoadModeNone)
 }
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/unconvert.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/unconvert/unconvert.go
similarity index 58%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/unconvert.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/unconvert/unconvert.go
index aad858dfd69646044af79ee28aa55ef05a4b9441..954cc9eb34ce887739c3d301c0bf171dbf9b8f43 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/unconvert.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/unconvert/unconvert.go
@@ -1,28 +1,28 @@
-package golinters
+package unconvert
 
 import (
 	"sync"
 
-	unconvertAPI "github.com/golangci/unconvert"
+	"github.com/golangci/unconvert"
 	"golang.org/x/tools/go/analysis"
 
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/config"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-const unconvertName = "unconvert"
+const linterName = "unconvert"
 
-//nolint:dupl
-func NewUnconvert() *goanalysis.Linter {
+func New(settings *config.UnconvertSettings) *goanalysis.Linter {
 	var mu sync.Mutex
 	var resIssues []goanalysis.Issue
 
 	analyzer := &analysis.Analyzer{
-		Name: unconvertName,
+		Name: linterName,
 		Doc:  goanalysis.TheOnlyanalyzerDoc,
 		Run: func(pass *analysis.Pass) (any, error) {
-			issues := runUnconvert(pass)
+			issues := runUnconvert(pass, settings)
 
 			if len(issues) == 0 {
 				return nil, nil
@@ -37,7 +37,7 @@ func NewUnconvert() *goanalysis.Linter {
 	}
 
 	return goanalysis.NewLinter(
-		unconvertName,
+		linterName,
 		"Remove unnecessary type conversions",
 		[]*analysis.Analyzer{analyzer},
 		nil,
@@ -46,20 +46,15 @@ func NewUnconvert() *goanalysis.Linter {
 	}).WithLoadMode(goanalysis.LoadModeTypesInfo)
 }
 
-func runUnconvert(pass *analysis.Pass) []goanalysis.Issue {
-	prog := goanalysis.MakeFakeLoaderProgram(pass)
+func runUnconvert(pass *analysis.Pass, settings *config.UnconvertSettings) []goanalysis.Issue {
+	positions := unconvert.Run(pass, settings.FastMath, settings.Safe)
 
-	positions := unconvertAPI.Run(prog)
-	if len(positions) == 0 {
-		return nil
-	}
-
-	issues := make([]goanalysis.Issue, 0, len(positions))
-	for _, pos := range positions {
+	var issues []goanalysis.Issue
+	for _, position := range positions {
 		issues = append(issues, goanalysis.NewIssue(&result.Issue{
-			Pos:        pos,
+			Pos:        position,
 			Text:       "unnecessary conversion",
-			FromLinter: unconvertName,
+			FromLinter: linterName,
 		}, pass))
 	}
 
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/unparam.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/unparam/unparam.go
similarity index 88%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/unparam.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/unparam/unparam.go
index 4078d94988fbc6c4f0c21d9cec22c0206fb0cf74..0fe18473662905aab3d53770ab2193428d60883d 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/unparam.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/unparam/unparam.go
@@ -1,4 +1,4 @@
-package golinters
+package unparam
 
 import (
 	"sync"
@@ -9,19 +9,19 @@ import (
 	"mvdan.cc/unparam/check"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-const unparamName = "unparam"
+const linterName = "unparam"
 
-func NewUnparam(settings *config.UnparamSettings) *goanalysis.Linter {
+func New(settings *config.UnparamSettings) *goanalysis.Linter {
 	var mu sync.Mutex
 	var resIssues []goanalysis.Issue
 
 	analyzer := &analysis.Analyzer{
-		Name:     unparamName,
+		Name:     linterName,
 		Doc:      goanalysis.TheOnlyanalyzerDoc,
 		Requires: []*analysis.Analyzer{buildssa.Analyzer},
 		Run: func(pass *analysis.Pass) (any, error) {
@@ -43,7 +43,7 @@ func NewUnparam(settings *config.UnparamSettings) *goanalysis.Linter {
 	}
 
 	return goanalysis.NewLinter(
-		unparamName,
+		linterName,
 		"Reports unused function parameters",
 		[]*analysis.Analyzer{analyzer},
 		nil,
@@ -82,7 +82,7 @@ func runUnparam(pass *analysis.Pass, settings *config.UnparamSettings) ([]goanal
 		issues = append(issues, goanalysis.NewIssue(&result.Issue{
 			Pos:        pass.Fset.Position(i.Pos()),
 			Text:       i.Message(),
-			FromLinter: unparamName,
+			FromLinter: linterName,
 		}, pass))
 	}
 
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/unused.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/unused.go
deleted file mode 100644
index aa9374d34357d9acbe01cf70d69086c0aed5e0d1..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/unused.go
+++ /dev/null
@@ -1,94 +0,0 @@
-package golinters
-
-import (
-	"fmt"
-	"sync"
-
-	"golang.org/x/tools/go/analysis"
-	"honnef.co/go/tools/unused"
-
-	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
-	"github.com/golangci/golangci-lint/pkg/lint/linter"
-	"github.com/golangci/golangci-lint/pkg/result"
-)
-
-const unusedName = "unused"
-
-type UnusedSettings struct {
-	GoVersion string
-}
-
-func NewUnused(settings *config.StaticCheckSettings) *goanalysis.Linter {
-	var mu sync.Mutex
-	var resIssues []goanalysis.Issue
-
-	analyzer := &analysis.Analyzer{
-		Name:     unusedName,
-		Doc:      unused.Analyzer.Analyzer.Doc,
-		Requires: unused.Analyzer.Analyzer.Requires,
-		Run: func(pass *analysis.Pass) (any, error) {
-			issues, err := runUnused(pass)
-			if err != nil {
-				return nil, err
-			}
-
-			if len(issues) == 0 {
-				return nil, nil
-			}
-
-			mu.Lock()
-			resIssues = append(resIssues, issues...)
-			mu.Unlock()
-
-			return nil, nil
-		},
-	}
-
-	setAnalyzerGoVersion(analyzer, getGoVersion(settings))
-
-	return goanalysis.NewLinter(
-		unusedName,
-		"Checks Go code for unused constants, variables, functions and types",
-		[]*analysis.Analyzer{analyzer},
-		nil,
-	).WithIssuesReporter(func(lintCtx *linter.Context) []goanalysis.Issue {
-		return resIssues
-	}).WithLoadMode(goanalysis.LoadModeTypesInfo)
-}
-
-func runUnused(pass *analysis.Pass) ([]goanalysis.Issue, error) {
-	res, err := unused.Analyzer.Analyzer.Run(pass)
-	if err != nil {
-		return nil, err
-	}
-
-	used := make(map[string]bool)
-	for _, obj := range res.(unused.Result).Used {
-		used[fmt.Sprintf("%s %d %s", obj.Position.Filename, obj.Position.Line, obj.Name)] = true
-	}
-
-	var issues []goanalysis.Issue
-
-	// Inspired by https://github.com/dominikh/go-tools/blob/d694aadcb1f50c2d8ac0a1dd06217ebb9f654764/lintcmd/lint.go#L177-L197
-	for _, object := range res.(unused.Result).Unused {
-		if object.Kind == "type param" {
-			continue
-		}
-
-		key := fmt.Sprintf("%s %d %s", object.Position.Filename, object.Position.Line, object.Name)
-		if used[key] {
-			continue
-		}
-
-		issue := goanalysis.NewIssue(&result.Issue{
-			FromLinter: unusedName,
-			Text:       fmt.Sprintf("%s %s is unused", object.Kind, object.Name),
-			Pos:        object.Position,
-		}, pass)
-
-		issues = append(issues, issue)
-	}
-
-	return issues, nil
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/unused/unused.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/unused/unused.go
new file mode 100644
index 0000000000000000000000000000000000000000..55712f08405a247c340a67b58ab41e3d8a29f306
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/unused/unused.go
@@ -0,0 +1,113 @@
+package unused
+
+import (
+	"fmt"
+	"sync"
+
+	"golang.org/x/tools/go/analysis"
+	"honnef.co/go/tools/analysis/facts/directives"
+	"honnef.co/go/tools/analysis/facts/generated"
+	"honnef.co/go/tools/analysis/lint"
+	"honnef.co/go/tools/unused"
+
+	"github.com/golangci/golangci-lint/pkg/config"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/golinters/internal"
+	"github.com/golangci/golangci-lint/pkg/lint/linter"
+	"github.com/golangci/golangci-lint/pkg/result"
+)
+
+const linterName = "unused"
+
+func New(settings *config.UnusedSettings, scSettings *config.StaticCheckSettings) *goanalysis.Linter {
+	var mu sync.Mutex
+	var resIssues []goanalysis.Issue
+
+	analyzer := &analysis.Analyzer{
+		Name:     linterName,
+		Doc:      unused.Analyzer.Analyzer.Doc,
+		Requires: unused.Analyzer.Analyzer.Requires,
+		Run: func(pass *analysis.Pass) (any, error) {
+			issues := runUnused(pass, settings)
+			if len(issues) == 0 {
+				return nil, nil
+			}
+
+			mu.Lock()
+			resIssues = append(resIssues, issues...)
+			mu.Unlock()
+
+			return nil, nil
+		},
+	}
+
+	internal.SetAnalyzerGoVersion(analyzer, internal.GetGoVersion(scSettings))
+
+	return goanalysis.NewLinter(
+		linterName,
+		"Checks Go code for unused constants, variables, functions and types",
+		[]*analysis.Analyzer{analyzer},
+		nil,
+	).WithIssuesReporter(func(_ *linter.Context) []goanalysis.Issue {
+		return resIssues
+	}).WithLoadMode(goanalysis.LoadModeTypesInfo)
+}
+
+func runUnused(pass *analysis.Pass, cfg *config.UnusedSettings) []goanalysis.Issue {
+	res := getUnusedResults(pass, cfg)
+
+	used := make(map[string]bool)
+	for _, obj := range res.Used {
+		used[fmt.Sprintf("%s %d %s", obj.Position.Filename, obj.Position.Line, obj.Name)] = true
+	}
+
+	var issues []goanalysis.Issue
+
+	// Inspired by https://github.com/dominikh/go-tools/blob/d694aadcb1f50c2d8ac0a1dd06217ebb9f654764/lintcmd/lint.go#L177-L197
+	for _, object := range res.Unused {
+		if object.Kind == "type param" {
+			continue
+		}
+
+		key := fmt.Sprintf("%s %d %s", object.Position.Filename, object.Position.Line, object.Name)
+		if used[key] {
+			continue
+		}
+
+		issue := goanalysis.NewIssue(&result.Issue{
+			FromLinter: linterName,
+			Text:       fmt.Sprintf("%s %s is unused", object.Kind, object.Name),
+			Pos:        object.Position,
+		}, pass)
+
+		issues = append(issues, issue)
+	}
+
+	return issues
+}
+
+func getUnusedResults(pass *analysis.Pass, settings *config.UnusedSettings) unused.Result {
+	opts := unused.Options{
+		FieldWritesAreUses:     settings.FieldWritesAreUses,
+		PostStatementsAreReads: settings.PostStatementsAreReads,
+		ExportedIsUsed:         settings.ExportedIsUsed,
+		ExportedFieldsAreUsed:  settings.ExportedFieldsAreUsed,
+		ParametersAreUsed:      settings.ParametersAreUsed,
+		LocalVariablesAreUsed:  settings.LocalVariablesAreUsed,
+		GeneratedIsUsed:        settings.GeneratedIsUsed,
+	}
+
+	// ref: https://github.com/dominikh/go-tools/blob/4ec1f474ca6c0feb8e10a8fcca4ab95f5b5b9881/internal/cmd/unused/unused.go#L68
+	nodes := unused.Graph(pass.Fset,
+		pass.Files,
+		pass.Pkg,
+		pass.TypesInfo,
+		pass.ResultOf[directives.Analyzer].([]lint.Directive),
+		pass.ResultOf[generated.Analyzer].(map[string]generated.Generator),
+		opts,
+	)
+
+	sg := unused.SerializedGraph{}
+	sg.Merge(nodes)
+	return sg.Results()
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/usestdlibvars.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/usestdlibvars/usestdlibvars.go
similarity index 87%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/usestdlibvars.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/usestdlibvars/usestdlibvars.go
index 663a841ac70a10f523aef940394dddb41089c25a..050e47f24cce1c51ef06bf0832b34041aa7c7ce2 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/usestdlibvars.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/usestdlibvars/usestdlibvars.go
@@ -1,14 +1,14 @@
-package golinters
+package usestdlibvars
 
 import (
 	"github.com/sashamelentyev/usestdlibvars/pkg/analyzer"
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewUseStdlibVars(cfg *config.UseStdlibVarsSettings) *goanalysis.Linter {
+func New(cfg *config.UseStdlibVarsSettings) *goanalysis.Linter {
 	a := analyzer.New()
 
 	cfgMap := make(map[string]map[string]any)
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/varcheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/varcheck.go
deleted file mode 100644
index 495c5b59fb9ee1d5e51809341be2aec2d2f7bbd9..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/varcheck.go
+++ /dev/null
@@ -1,72 +0,0 @@
-package golinters
-
-import (
-	"fmt"
-	"sync"
-
-	varcheckAPI "github.com/golangci/check/cmd/varcheck"
-	"golang.org/x/tools/go/analysis"
-
-	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
-	"github.com/golangci/golangci-lint/pkg/lint/linter"
-	"github.com/golangci/golangci-lint/pkg/result"
-)
-
-const varcheckName = "varcheck"
-
-func NewVarcheck(settings *config.VarCheckSettings) *goanalysis.Linter {
-	var mu sync.Mutex
-	var resIssues []goanalysis.Issue
-
-	analyzer := &analysis.Analyzer{
-		Name: varcheckName,
-		Doc:  goanalysis.TheOnlyanalyzerDoc,
-		Run:  goanalysis.DummyRun,
-	}
-
-	return goanalysis.NewLinter(
-		varcheckName,
-		"Finds unused global variables and constants",
-		[]*analysis.Analyzer{analyzer},
-		nil,
-	).WithContextSetter(func(lintCtx *linter.Context) {
-		analyzer.Run = func(pass *analysis.Pass) (any, error) {
-			issues := runVarCheck(pass, settings)
-
-			if len(issues) == 0 {
-				return nil, nil
-			}
-
-			mu.Lock()
-			resIssues = append(resIssues, issues...)
-			mu.Unlock()
-
-			return nil, nil
-		}
-	}).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
-		return resIssues
-	}).WithLoadMode(goanalysis.LoadModeTypesInfo)
-}
-
-//nolint:dupl
-func runVarCheck(pass *analysis.Pass, settings *config.VarCheckSettings) []goanalysis.Issue {
-	prog := goanalysis.MakeFakeLoaderProgram(pass)
-
-	lintIssues := varcheckAPI.Run(prog, settings.CheckExportedFields)
-	if len(lintIssues) == 0 {
-		return nil
-	}
-
-	issues := make([]goanalysis.Issue, 0, len(lintIssues))
-
-	for _, i := range lintIssues {
-		issues = append(issues, goanalysis.NewIssue(&result.Issue{
-			Pos:        i.Pos,
-			Text:       fmt.Sprintf("%s is unused", formatCode(i.VarName, nil)),
-			FromLinter: varcheckName,
-		}, pass))
-	}
-
-	return issues
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/varnamelen.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/varnamelen/varnamelen.go
similarity index 89%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/varnamelen.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/varnamelen/varnamelen.go
index 688dfa804680a4ced8652d6ae907e644687f497e..6cb57ffa57c453a72747ba81630278b014f2428b 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/varnamelen.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/varnamelen/varnamelen.go
@@ -1,4 +1,4 @@
-package golinters
+package varnamelen
 
 import (
 	"strconv"
@@ -8,10 +8,10 @@ import (
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-func NewVarnamelen(settings *config.VarnamelenSettings) *goanalysis.Linter {
+func New(settings *config.VarnamelenSettings) *goanalysis.Linter {
 	analyzer := varnamelen.NewAnalyzer()
 	cfg := map[string]map[string]any{}
 
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/wastedassign.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/wastedassign.go
deleted file mode 100644
index 92798d4f7353cbf753127342f37b71b070377b3c..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/wastedassign.go
+++ /dev/null
@@ -1,17 +0,0 @@
-package golinters
-
-import (
-	"github.com/sanposhiho/wastedassign/v2"
-	"golang.org/x/tools/go/analysis"
-
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
-)
-
-func NewWastedAssign() *goanalysis.Linter {
-	return goanalysis.NewLinter(
-		"wastedassign",
-		"wastedassign finds wasted assignment statements.",
-		[]*analysis.Analyzer{wastedassign.Analyzer},
-		nil,
-	).WithLoadMode(goanalysis.LoadModeTypesInfo)
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/wastedassign/wastedassign.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/wastedassign/wastedassign.go
new file mode 100644
index 0000000000000000000000000000000000000000..094fa95c29433a1da7ead63d5f4b064a7b3cd8ba
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/wastedassign/wastedassign.go
@@ -0,0 +1,19 @@
+package wastedassign
+
+import (
+	"github.com/sanposhiho/wastedassign/v2"
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+)
+
+func New() *goanalysis.Linter {
+	a := wastedassign.Analyzer
+
+	return goanalysis.NewLinter(
+		a.Name,
+		"Finds wasted assignment statements",
+		[]*analysis.Analyzer{a},
+		nil,
+	).WithLoadMode(goanalysis.LoadModeTypesInfo)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/whitespace.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/whitespace.go
deleted file mode 100644
index e5941fa5dc67802a03fafda7bdaea8a7df1c809d..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/whitespace.go
+++ /dev/null
@@ -1,108 +0,0 @@
-package golinters
-
-import (
-	"fmt"
-	"go/token"
-	"sync"
-
-	"github.com/ultraware/whitespace"
-	"golang.org/x/tools/go/analysis"
-
-	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
-	"github.com/golangci/golangci-lint/pkg/lint/linter"
-	"github.com/golangci/golangci-lint/pkg/result"
-)
-
-const whitespaceName = "whitespace"
-
-//nolint:dupl
-func NewWhitespace(settings *config.WhitespaceSettings) *goanalysis.Linter {
-	var mu sync.Mutex
-	var resIssues []goanalysis.Issue
-
-	var wsSettings whitespace.Settings
-	if settings != nil {
-		wsSettings = whitespace.Settings{
-			MultiIf:   settings.MultiIf,
-			MultiFunc: settings.MultiFunc,
-		}
-	}
-
-	analyzer := &analysis.Analyzer{
-		Name: whitespaceName,
-		Doc:  goanalysis.TheOnlyanalyzerDoc,
-		Run:  goanalysis.DummyRun,
-	}
-
-	return goanalysis.NewLinter(
-		whitespaceName,
-		"Tool for detection of leading and trailing whitespace",
-		[]*analysis.Analyzer{analyzer},
-		nil,
-	).WithContextSetter(func(lintCtx *linter.Context) {
-		analyzer.Run = func(pass *analysis.Pass) (any, error) {
-			issues, err := runWhitespace(lintCtx, pass, wsSettings)
-			if err != nil {
-				return nil, err
-			}
-
-			if len(issues) == 0 {
-				return nil, nil
-			}
-
-			mu.Lock()
-			resIssues = append(resIssues, issues...)
-			mu.Unlock()
-
-			return nil, nil
-		}
-	}).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
-		return resIssues
-	}).WithLoadMode(goanalysis.LoadModeSyntax)
-}
-
-func runWhitespace(lintCtx *linter.Context, pass *analysis.Pass, wsSettings whitespace.Settings) ([]goanalysis.Issue, error) {
-	var messages []whitespace.Message
-	for _, file := range pass.Files {
-		messages = append(messages, whitespace.Run(file, pass.Fset, wsSettings)...)
-	}
-
-	if len(messages) == 0 {
-		return nil, nil
-	}
-
-	issues := make([]goanalysis.Issue, len(messages))
-	for k, i := range messages {
-		issue := result.Issue{
-			Pos: token.Position{
-				Filename: i.Pos.Filename,
-				Line:     i.Pos.Line,
-			},
-			LineRange:   &result.Range{From: i.Pos.Line, To: i.Pos.Line},
-			Text:        i.Message,
-			FromLinter:  whitespaceName,
-			Replacement: &result.Replacement{},
-		}
-
-		bracketLine, err := lintCtx.LineCache.GetLine(issue.Pos.Filename, issue.Pos.Line)
-		if err != nil {
-			return nil, fmt.Errorf("failed to get line %s:%d: %w", issue.Pos.Filename, issue.Pos.Line, err)
-		}
-
-		switch i.Type {
-		case whitespace.MessageTypeLeading:
-			issue.LineRange.To++ // cover two lines by the issue: opening bracket "{" (issue.Pos.Line) and following empty line
-		case whitespace.MessageTypeTrailing:
-			issue.LineRange.From-- // cover two lines by the issue: closing bracket "}" (issue.Pos.Line) and preceding empty line
-			issue.Pos.Line--       // set in sync with LineRange.From to not break fixer and other code features
-		case whitespace.MessageTypeAddAfter:
-			bracketLine += "\n"
-		}
-		issue.Replacement.NewLines = []string{bracketLine}
-
-		issues[k] = goanalysis.NewIssue(&issue, pass)
-	}
-
-	return issues, nil
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/whitespace/whitespace.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/whitespace/whitespace.go
new file mode 100644
index 0000000000000000000000000000000000000000..721bfada1c6f083ae71a6ae2fa19f0344587c5ad
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/whitespace/whitespace.go
@@ -0,0 +1,102 @@
+package whitespace
+
+import (
+	"fmt"
+	"sync"
+
+	"github.com/ultraware/whitespace"
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/golangci/golangci-lint/pkg/config"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/lint/linter"
+	"github.com/golangci/golangci-lint/pkg/result"
+)
+
+const linterName = "whitespace"
+
+func New(settings *config.WhitespaceSettings) *goanalysis.Linter {
+	var mu sync.Mutex
+	var resIssues []goanalysis.Issue
+
+	var wsSettings whitespace.Settings
+	if settings != nil {
+		wsSettings = whitespace.Settings{
+			Mode:      whitespace.RunningModeGolangCI,
+			MultiIf:   settings.MultiIf,
+			MultiFunc: settings.MultiFunc,
+		}
+	}
+
+	a := whitespace.NewAnalyzer(&wsSettings)
+
+	return goanalysis.NewLinter(
+		a.Name,
+		a.Doc,
+		[]*analysis.Analyzer{a},
+		nil,
+	).WithContextSetter(func(_ *linter.Context) {
+		a.Run = func(pass *analysis.Pass) (any, error) {
+			issues, err := runWhitespace(pass, wsSettings)
+			if err != nil {
+				return nil, err
+			}
+
+			if len(issues) == 0 {
+				return nil, nil
+			}
+
+			mu.Lock()
+			resIssues = append(resIssues, issues...)
+			mu.Unlock()
+
+			return nil, nil
+		}
+	}).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
+		return resIssues
+	}).WithLoadMode(goanalysis.LoadModeSyntax)
+}
+
+func runWhitespace(pass *analysis.Pass, wsSettings whitespace.Settings) ([]goanalysis.Issue, error) {
+	lintIssues := whitespace.Run(pass, &wsSettings)
+
+	issues := make([]goanalysis.Issue, len(lintIssues))
+	for i, issue := range lintIssues {
+		report := &result.Issue{
+			FromLinter: linterName,
+			Pos:        pass.Fset.PositionFor(issue.Diagnostic, false),
+			Text:       issue.Message,
+		}
+
+		switch issue.MessageType {
+		case whitespace.MessageTypeRemove:
+			if len(issue.LineNumbers) == 0 {
+				continue
+			}
+
+			report.LineRange = &result.Range{
+				From: issue.LineNumbers[0],
+				To:   issue.LineNumbers[len(issue.LineNumbers)-1],
+			}
+
+			report.Replacement = &result.Replacement{NeedOnlyDelete: true}
+
+		case whitespace.MessageTypeAdd:
+			report.Pos = pass.Fset.PositionFor(issue.FixStart, false)
+			report.Replacement = &result.Replacement{
+				Inline: &result.InlineFix{
+					StartCol:  0,
+					Length:    1,
+					NewString: "\n\t",
+				},
+			}
+
+		default:
+			return nil, fmt.Errorf("unknown message type: %v", issue.MessageType)
+		}
+
+		issues[i] = goanalysis.NewIssue(report, pass)
+	}
+
+	return issues, nil
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/wrapcheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/wrapcheck/wrapcheck.go
similarity index 78%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/wrapcheck.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/wrapcheck/wrapcheck.go
index 098eb87bafc669bafcb49fd315e1047c252e0eed..96ec2eeae047bc8a231a47057b99ef49a60e68ca 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/wrapcheck.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/wrapcheck/wrapcheck.go
@@ -1,16 +1,14 @@
-package golinters
+package wrapcheck
 
 import (
 	"github.com/tomarrell/wrapcheck/v2/wrapcheck"
 	"golang.org/x/tools/go/analysis"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 )
 
-const wrapcheckName = "wrapcheck"
-
-func NewWrapcheck(settings *config.WrapcheckSettings) *goanalysis.Linter {
+func New(settings *config.WrapcheckSettings) *goanalysis.Linter {
 	cfg := wrapcheck.NewDefaultConfig()
 	if settings != nil {
 		if len(settings.IgnoreSigs) != 0 {
@@ -30,7 +28,7 @@ func NewWrapcheck(settings *config.WrapcheckSettings) *goanalysis.Linter {
 	a := wrapcheck.NewAnalyzer(cfg)
 
 	return goanalysis.NewLinter(
-		wrapcheckName,
+		a.Name,
 		a.Doc,
 		[]*analysis.Analyzer{a},
 		nil,
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/wsl.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/wsl.go
deleted file mode 100644
index 05697a629e40f9acd4044ce4b3c539069c6f4051..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/wsl.go
+++ /dev/null
@@ -1,89 +0,0 @@
-package golinters
-
-import (
-	"sync"
-
-	"github.com/bombsimon/wsl/v3"
-	"golang.org/x/tools/go/analysis"
-
-	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
-	"github.com/golangci/golangci-lint/pkg/lint/linter"
-	"github.com/golangci/golangci-lint/pkg/result"
-)
-
-const wslName = "wsl"
-
-// NewWSL returns a new WSL linter.
-func NewWSL(settings *config.WSLSettings) *goanalysis.Linter {
-	var mu sync.Mutex
-	var resIssues []goanalysis.Issue
-
-	conf := wsl.DefaultConfig()
-
-	if settings != nil {
-		conf.StrictAppend = settings.StrictAppend
-		conf.AllowAssignAndCallCuddle = settings.AllowAssignAndCallCuddle
-		conf.AllowAssignAndAnythingCuddle = settings.AllowAssignAndAnythingCuddle
-		conf.AllowMultiLineAssignCuddle = settings.AllowMultiLineAssignCuddle
-		conf.ForceCaseTrailingWhitespaceLimit = settings.ForceCaseTrailingWhitespaceLimit
-		conf.AllowTrailingComment = settings.AllowTrailingComment
-		conf.AllowSeparatedLeadingComment = settings.AllowSeparatedLeadingComment
-		conf.AllowCuddleDeclaration = settings.AllowCuddleDeclaration
-		conf.AllowCuddleWithCalls = settings.AllowCuddleWithCalls
-		conf.AllowCuddleWithRHS = settings.AllowCuddleWithRHS
-		conf.ForceCuddleErrCheckAndAssign = settings.ForceCuddleErrCheckAndAssign
-		conf.ErrorVariableNames = settings.ErrorVariableNames
-		conf.ForceExclusiveShortDeclarations = settings.ForceExclusiveShortDeclarations
-	}
-
-	analyzer := &analysis.Analyzer{
-		Name: goanalysis.TheOnlyAnalyzerName,
-		Doc:  goanalysis.TheOnlyanalyzerDoc,
-		Run: func(pass *analysis.Pass) (any, error) {
-			issues := runWSL(pass, &conf)
-
-			if len(issues) == 0 {
-				return nil, nil
-			}
-
-			mu.Lock()
-			resIssues = append(resIssues, issues...)
-			mu.Unlock()
-
-			return nil, nil
-		},
-	}
-
-	return goanalysis.NewLinter(
-		wslName,
-		"Whitespace Linter - Forces you to use empty lines!",
-		[]*analysis.Analyzer{analyzer},
-		nil,
-	).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
-		return resIssues
-	}).WithLoadMode(goanalysis.LoadModeSyntax)
-}
-
-func runWSL(pass *analysis.Pass, conf *wsl.Configuration) []goanalysis.Issue {
-	if conf == nil {
-		return nil
-	}
-
-	files := getFileNames(pass)
-	wslErrors, _ := wsl.NewProcessorWithConfig(*conf).ProcessFiles(files)
-	if len(wslErrors) == 0 {
-		return nil
-	}
-
-	var issues []goanalysis.Issue
-	for _, err := range wslErrors {
-		issues = append(issues, goanalysis.NewIssue(&result.Issue{
-			FromLinter: wslName,
-			Pos:        err.Position,
-			Text:       err.Reason,
-		}, pass))
-	}
-
-	return issues
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/wsl/wsl.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/wsl/wsl.go
new file mode 100644
index 0000000000000000000000000000000000000000..5a72035b506cc92c290b9bcbb291832923ec4fff
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/wsl/wsl.go
@@ -0,0 +1,39 @@
+package wsl
+
+import (
+	"github.com/bombsimon/wsl/v4"
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/golangci/golangci-lint/pkg/config"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+)
+
+func New(settings *config.WSLSettings) *goanalysis.Linter {
+	var conf *wsl.Configuration
+	if settings != nil {
+		conf = &wsl.Configuration{
+			StrictAppend:                     settings.StrictAppend,
+			AllowAssignAndCallCuddle:         settings.AllowAssignAndCallCuddle,
+			AllowAssignAndAnythingCuddle:     settings.AllowAssignAndAnythingCuddle,
+			AllowMultiLineAssignCuddle:       settings.AllowMultiLineAssignCuddle,
+			ForceCaseTrailingWhitespaceLimit: settings.ForceCaseTrailingWhitespaceLimit,
+			AllowTrailingComment:             settings.AllowTrailingComment,
+			AllowSeparatedLeadingComment:     settings.AllowSeparatedLeadingComment,
+			AllowCuddleDeclaration:           settings.AllowCuddleDeclaration,
+			AllowCuddleWithCalls:             settings.AllowCuddleWithCalls,
+			AllowCuddleWithRHS:               settings.AllowCuddleWithRHS,
+			ForceCuddleErrCheckAndAssign:     settings.ForceCuddleErrCheckAndAssign,
+			ErrorVariableNames:               settings.ErrorVariableNames,
+			ForceExclusiveShortDeclarations:  settings.ForceExclusiveShortDeclarations,
+		}
+	}
+
+	a := wsl.NewAnalyzer(conf)
+
+	return goanalysis.NewLinter(
+		a.Name,
+		a.Doc,
+		[]*analysis.Analyzer{a},
+		nil,
+	).WithLoadMode(goanalysis.LoadModeSyntax)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/zerologlint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/zerologlint.go
deleted file mode 100644
index a37bca12e272bc2db5d443482631fe732f5eb298..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/zerologlint.go
+++ /dev/null
@@ -1,17 +0,0 @@
-package golinters
-
-import (
-	"github.com/ykadowak/zerologlint"
-	"golang.org/x/tools/go/analysis"
-
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
-)
-
-func NewZerologLint() *goanalysis.Linter {
-	return goanalysis.NewLinter(
-		"zerologlint",
-		"Detects the wrong usage of `zerolog` that a user forgets to dispatch with `Send` or `Msg`.",
-		[]*analysis.Analyzer{zerologlint.Analyzer},
-		nil,
-	).WithLoadMode(goanalysis.LoadModeTypesInfo)
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/zerologlint/zerologlint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/zerologlint/zerologlint.go
new file mode 100644
index 0000000000000000000000000000000000000000..6ca74020c82d873154da074f67ad035120c87569
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/zerologlint/zerologlint.go
@@ -0,0 +1,19 @@
+package zerologlint
+
+import (
+	"github.com/ykadowak/zerologlint"
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+)
+
+func New() *goanalysis.Linter {
+	a := zerologlint.Analyzer
+
+	return goanalysis.NewLinter(
+		a.Name,
+		a.Doc,
+		[]*analysis.Analyzer{a},
+		nil,
+	).WithLoadMode(goanalysis.LoadModeTypesInfo)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goutil/env.go b/vendor/github.com/golangci/golangci-lint/pkg/goutil/env.go
index 93922f85a717c81b2d87717c3ed23fd00dea49de..7b748d8e9031790579042f2e085dceecd002e787 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/goutil/env.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goutil/env.go
@@ -33,20 +33,23 @@ func NewEnv(log logutils.Log) *Env {
 	}
 }
 
-func (e *Env) Discover(ctx context.Context) error {
+func (e Env) Discover(ctx context.Context) error {
 	startedAt := time.Now()
-	args := []string{"env", "-json"}
-	args = append(args, string(EnvGoCache), string(EnvGoRoot))
-	out, err := exec.CommandContext(ctx, "go", args...).Output()
+
+	//nolint:gosec // Everything is static here.
+	cmd := exec.CommandContext(ctx, "go", "env", "-json", string(EnvGoCache), string(EnvGoRoot))
+
+	out, err := cmd.Output()
 	if err != nil {
-		return fmt.Errorf("failed to run 'go env': %w", err)
+		return fmt.Errorf("failed to run '%s': %w", strings.Join(cmd.Args, " "), err)
 	}
 
 	if err = json.Unmarshal(out, &e.vars); err != nil {
-		return fmt.Errorf("failed to parse 'go %s' json: %w", strings.Join(args, " "), err)
+		return fmt.Errorf("failed to parse '%s' json: %w", strings.Join(cmd.Args, " "), err)
 	}
 
 	e.debugf("Read go env for %s: %#v", time.Since(startedAt), e.vars)
+
 	return nil
 }
 
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/context.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/context.go
new file mode 100644
index 0000000000000000000000000000000000000000..160620338f8ccc8b508de52bfb19ba3dd13f961a
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/context.go
@@ -0,0 +1,64 @@
+package lint
+
+import (
+	"context"
+	"fmt"
+
+	"github.com/golangci/golangci-lint/internal/pkgcache"
+	"github.com/golangci/golangci-lint/pkg/config"
+	"github.com/golangci/golangci-lint/pkg/exitcodes"
+	"github.com/golangci/golangci-lint/pkg/fsutils"
+	"github.com/golangci/golangci-lint/pkg/goanalysis/load"
+	"github.com/golangci/golangci-lint/pkg/lint/linter"
+	"github.com/golangci/golangci-lint/pkg/logutils"
+)
+
+type ContextBuilder struct {
+	cfg *config.Config
+
+	pkgLoader *PackageLoader
+
+	fileCache *fsutils.FileCache
+	pkgCache  *pkgcache.Cache
+
+	loadGuard *load.Guard
+}
+
+func NewContextBuilder(cfg *config.Config, pkgLoader *PackageLoader,
+	fileCache *fsutils.FileCache, pkgCache *pkgcache.Cache, loadGuard *load.Guard,
+) *ContextBuilder {
+	return &ContextBuilder{
+		cfg:       cfg,
+		pkgLoader: pkgLoader,
+		fileCache: fileCache,
+		pkgCache:  pkgCache,
+		loadGuard: loadGuard,
+	}
+}
+
+func (cl *ContextBuilder) Build(ctx context.Context, log logutils.Log, linters []*linter.Config) (*linter.Context, error) {
+	pkgs, deduplicatedPkgs, err := cl.pkgLoader.Load(ctx, linters)
+	if err != nil {
+		return nil, fmt.Errorf("failed to load packages: %w", err)
+	}
+
+	if len(deduplicatedPkgs) == 0 {
+		return nil, fmt.Errorf("%w: running `go mod tidy` may solve the problem", exitcodes.ErrNoGoFiles)
+	}
+
+	ret := &linter.Context{
+		Packages: deduplicatedPkgs,
+
+		// At least `unused` linters works properly only on original (not deduplicated) packages,
+		// see https://github.com/golangci/golangci-lint/pull/585.
+		OriginalPackages: pkgs,
+
+		Cfg:       cl.cfg,
+		Log:       log,
+		FileCache: cl.fileCache,
+		PkgCache:  cl.pkgCache,
+		LoadGuard: cl.loadGuard,
+	}
+
+	return ret, nil
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/config.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/config.go
index c911b5613dafe1be721f02407227bdd0fab861c0..57c51fa75e4cbb059f783bec97dd66b913393ea2 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/config.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/config.go
@@ -1,7 +1,8 @@
 package linter
 
 import (
-	"golang.org/x/tools/go/analysis"
+	"fmt"
+
 	"golang.org/x/tools/go/packages"
 
 	"github.com/golangci/golangci-lint/pkg/config"
@@ -26,10 +27,19 @@ const (
 // LastLinter nolintlint must be last because it looks at the results of all the previous linters for unused nolint directives.
 const LastLinter = "nolintlint"
 
+type DeprecationLevel int
+
+const (
+	DeprecationNone DeprecationLevel = iota
+	DeprecationWarning
+	DeprecationError
+)
+
 type Deprecation struct {
 	Since       string
 	Message     string
 	Replacement string
+	Level       DeprecationLevel
 }
 
 type Config struct {
@@ -112,15 +122,24 @@ func (lc *Config) WithSince(version string) *Config {
 	return lc
 }
 
-func (lc *Config) Deprecated(message, version, replacement string) *Config {
+func (lc *Config) Deprecated(message, version, replacement string, level DeprecationLevel) *Config {
 	lc.Deprecation = &Deprecation{
 		Since:       version,
 		Message:     message,
 		Replacement: replacement,
+		Level:       level,
 	}
 	return lc
 }
 
+func (lc *Config) DeprecatedWarning(message, version, replacement string) *Config {
+	return lc.Deprecated(message, version, replacement, DeprecationWarning)
+}
+
+func (lc *Config) DeprecatedError(message, version, replacement string) *Config {
+	return lc.Deprecated(message, version, replacement, DeprecationError)
+}
+
 func (lc *Config) IsDeprecated() bool {
 	return lc.Deprecation != nil
 }
@@ -133,23 +152,27 @@ func (lc *Config) Name() string {
 	return lc.Linter.Name()
 }
 
-func (lc *Config) WithNoopFallback(cfg *config.Config) *Config {
-	if cfg != nil && config.IsGreaterThanOrEqualGo121(cfg.Run.Go) {
-		lc.Linter = &Noop{
-			name: lc.Linter.Name(),
-			desc: lc.Linter.Desc(),
-			run: func(pass *analysis.Pass) (any, error) {
-				return nil, nil
-			},
-		}
-
+func (lc *Config) WithNoopFallback(cfg *config.Config, cond func(cfg *config.Config) error) *Config {
+	if err := cond(cfg); err != nil {
+		lc.Linter = NewNoop(lc.Linter, err.Error())
 		lc.LoadMode = 0
+
 		return lc.WithLoadFiles()
 	}
 
 	return lc
 }
 
+func IsGoLowerThanGo122() func(cfg *config.Config) error {
+	return func(cfg *config.Config) error {
+		if cfg == nil || config.IsGoGreaterThanOrEqual(cfg.Run.Go, "1.22") {
+			return nil
+		}
+
+		return fmt.Errorf("this linter is disabled because the Go version (%s) of your project is lower than Go 1.22", cfg.Run.Go)
+	}
+}
+
 func NewConfig(linter Linter) *Config {
 	lc := &Config{
 		Linter: linter,
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/context.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/context.go
index a9f9d7d7f2b3c7f91d5bd48199e0fcecaeb8c0f4..5c03630b26df7564513ab1ce11ba10178f3e258b 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/context.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/context.go
@@ -8,7 +8,7 @@ import (
 	"github.com/golangci/golangci-lint/internal/pkgcache"
 	"github.com/golangci/golangci-lint/pkg/config"
 	"github.com/golangci/golangci-lint/pkg/fsutils"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis/load"
+	"github.com/golangci/golangci-lint/pkg/goanalysis/load"
 	"github.com/golangci/golangci-lint/pkg/logutils"
 )
 
@@ -22,7 +22,6 @@ type Context struct {
 
 	Cfg       *config.Config
 	FileCache *fsutils.FileCache
-	LineCache *fsutils.LineCache
 	Log       logutils.Log
 
 	PkgCache  *pkgcache.Cache
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/linter.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/linter.go
index a65d6b927852c5b8537aa48981b362613b4921e8..088aa3d78b3f627786dfe08391aef2aa735d2542 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/linter.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/linter.go
@@ -3,8 +3,7 @@ package linter
 import (
 	"context"
 
-	"golang.org/x/tools/go/analysis"
-
+	"github.com/golangci/golangci-lint/pkg/config"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
@@ -15,14 +14,47 @@ type Linter interface {
 }
 
 type Noop struct {
-	name string
-	desc string
-	run  func(pass *analysis.Pass) (any, error)
+	name   string
+	desc   string
+	reason string
+	level  DeprecationLevel
+}
+
+func NewNoop(l Linter, reason string) Noop {
+	return Noop{
+		name:   l.Name(),
+		desc:   l.Desc(),
+		reason: reason,
+	}
+}
+
+func NewNoopDeprecated(name string, cfg *config.Config, level DeprecationLevel) Noop {
+	noop := Noop{
+		name:   name,
+		desc:   "Deprecated",
+		reason: "This linter is fully inactivated: it will not produce any reports.",
+		level:  level,
+	}
+
+	if cfg.InternalCmdTest {
+		noop.reason = ""
+	}
+
+	return noop
 }
 
 func (n Noop) Run(_ context.Context, lintCtx *Context) ([]result.Issue, error) {
-	lintCtx.Log.Warnf("%s is disabled because of generics."+
-		" You can track the evolution of the generics support by following the https://github.com/golangci/golangci-lint/issues/2649.", n.name)
+	if n.reason == "" {
+		return nil, nil
+	}
+
+	switch n.level {
+	case DeprecationError:
+		lintCtx.Log.Errorf("%s: %s", n.name, n.reason)
+	default:
+		lintCtx.Log.Warnf("%s: %s", n.name, n.reason)
+	}
+
 	return nil, nil
 }
 
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/builder_linter.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/builder_linter.go
new file mode 100644
index 0000000000000000000000000000000000000000..7b9d2a559babd6adae25d962cdcbf4617ddc0545
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/builder_linter.go
@@ -0,0 +1,840 @@
+package lintersdb
+
+import (
+	"github.com/golangci/golangci-lint/pkg/config"
+	"github.com/golangci/golangci-lint/pkg/golinters"
+	"github.com/golangci/golangci-lint/pkg/golinters/asasalint"
+	"github.com/golangci/golangci-lint/pkg/golinters/asciicheck"
+	"github.com/golangci/golangci-lint/pkg/golinters/bidichk"
+	"github.com/golangci/golangci-lint/pkg/golinters/bodyclose"
+	"github.com/golangci/golangci-lint/pkg/golinters/canonicalheader"
+	"github.com/golangci/golangci-lint/pkg/golinters/containedctx"
+	"github.com/golangci/golangci-lint/pkg/golinters/contextcheck"
+	"github.com/golangci/golangci-lint/pkg/golinters/copyloopvar"
+	"github.com/golangci/golangci-lint/pkg/golinters/cyclop"
+	"github.com/golangci/golangci-lint/pkg/golinters/decorder"
+	"github.com/golangci/golangci-lint/pkg/golinters/depguard"
+	"github.com/golangci/golangci-lint/pkg/golinters/dogsled"
+	"github.com/golangci/golangci-lint/pkg/golinters/dupl"
+	"github.com/golangci/golangci-lint/pkg/golinters/dupword"
+	"github.com/golangci/golangci-lint/pkg/golinters/durationcheck"
+	"github.com/golangci/golangci-lint/pkg/golinters/err113"
+	"github.com/golangci/golangci-lint/pkg/golinters/errcheck"
+	"github.com/golangci/golangci-lint/pkg/golinters/errchkjson"
+	"github.com/golangci/golangci-lint/pkg/golinters/errname"
+	"github.com/golangci/golangci-lint/pkg/golinters/errorlint"
+	"github.com/golangci/golangci-lint/pkg/golinters/execinquery"
+	"github.com/golangci/golangci-lint/pkg/golinters/exhaustive"
+	"github.com/golangci/golangci-lint/pkg/golinters/exhaustruct"
+	"github.com/golangci/golangci-lint/pkg/golinters/exportloopref"
+	"github.com/golangci/golangci-lint/pkg/golinters/fatcontext"
+	"github.com/golangci/golangci-lint/pkg/golinters/forbidigo"
+	"github.com/golangci/golangci-lint/pkg/golinters/forcetypeassert"
+	"github.com/golangci/golangci-lint/pkg/golinters/funlen"
+	"github.com/golangci/golangci-lint/pkg/golinters/gci"
+	"github.com/golangci/golangci-lint/pkg/golinters/ginkgolinter"
+	"github.com/golangci/golangci-lint/pkg/golinters/gocheckcompilerdirectives"
+	"github.com/golangci/golangci-lint/pkg/golinters/gochecknoglobals"
+	"github.com/golangci/golangci-lint/pkg/golinters/gochecknoinits"
+	"github.com/golangci/golangci-lint/pkg/golinters/gochecksumtype"
+	"github.com/golangci/golangci-lint/pkg/golinters/gocognit"
+	"github.com/golangci/golangci-lint/pkg/golinters/goconst"
+	"github.com/golangci/golangci-lint/pkg/golinters/gocritic"
+	"github.com/golangci/golangci-lint/pkg/golinters/gocyclo"
+	"github.com/golangci/golangci-lint/pkg/golinters/godot"
+	"github.com/golangci/golangci-lint/pkg/golinters/godox"
+	"github.com/golangci/golangci-lint/pkg/golinters/gofmt"
+	"github.com/golangci/golangci-lint/pkg/golinters/gofumpt"
+	"github.com/golangci/golangci-lint/pkg/golinters/goheader"
+	"github.com/golangci/golangci-lint/pkg/golinters/goimports"
+	"github.com/golangci/golangci-lint/pkg/golinters/gomoddirectives"
+	"github.com/golangci/golangci-lint/pkg/golinters/gomodguard"
+	"github.com/golangci/golangci-lint/pkg/golinters/goprintffuncname"
+	"github.com/golangci/golangci-lint/pkg/golinters/gosec"
+	"github.com/golangci/golangci-lint/pkg/golinters/gosimple"
+	"github.com/golangci/golangci-lint/pkg/golinters/gosmopolitan"
+	"github.com/golangci/golangci-lint/pkg/golinters/govet"
+	"github.com/golangci/golangci-lint/pkg/golinters/grouper"
+	"github.com/golangci/golangci-lint/pkg/golinters/importas"
+	"github.com/golangci/golangci-lint/pkg/golinters/inamedparam"
+	"github.com/golangci/golangci-lint/pkg/golinters/ineffassign"
+	"github.com/golangci/golangci-lint/pkg/golinters/interfacebloat"
+	"github.com/golangci/golangci-lint/pkg/golinters/intrange"
+	"github.com/golangci/golangci-lint/pkg/golinters/ireturn"
+	"github.com/golangci/golangci-lint/pkg/golinters/lll"
+	"github.com/golangci/golangci-lint/pkg/golinters/loggercheck"
+	"github.com/golangci/golangci-lint/pkg/golinters/maintidx"
+	"github.com/golangci/golangci-lint/pkg/golinters/makezero"
+	"github.com/golangci/golangci-lint/pkg/golinters/mirror"
+	"github.com/golangci/golangci-lint/pkg/golinters/misspell"
+	"github.com/golangci/golangci-lint/pkg/golinters/mnd"
+	"github.com/golangci/golangci-lint/pkg/golinters/musttag"
+	"github.com/golangci/golangci-lint/pkg/golinters/nakedret"
+	"github.com/golangci/golangci-lint/pkg/golinters/nestif"
+	"github.com/golangci/golangci-lint/pkg/golinters/nilerr"
+	"github.com/golangci/golangci-lint/pkg/golinters/nilnil"
+	"github.com/golangci/golangci-lint/pkg/golinters/nlreturn"
+	"github.com/golangci/golangci-lint/pkg/golinters/noctx"
+	"github.com/golangci/golangci-lint/pkg/golinters/nolintlint"
+	"github.com/golangci/golangci-lint/pkg/golinters/nonamedreturns"
+	"github.com/golangci/golangci-lint/pkg/golinters/nosprintfhostport"
+	"github.com/golangci/golangci-lint/pkg/golinters/paralleltest"
+	"github.com/golangci/golangci-lint/pkg/golinters/perfsprint"
+	"github.com/golangci/golangci-lint/pkg/golinters/prealloc"
+	"github.com/golangci/golangci-lint/pkg/golinters/predeclared"
+	"github.com/golangci/golangci-lint/pkg/golinters/promlinter"
+	"github.com/golangci/golangci-lint/pkg/golinters/protogetter"
+	"github.com/golangci/golangci-lint/pkg/golinters/reassign"
+	"github.com/golangci/golangci-lint/pkg/golinters/revive"
+	"github.com/golangci/golangci-lint/pkg/golinters/rowserrcheck"
+	"github.com/golangci/golangci-lint/pkg/golinters/sloglint"
+	"github.com/golangci/golangci-lint/pkg/golinters/spancheck"
+	"github.com/golangci/golangci-lint/pkg/golinters/sqlclosecheck"
+	"github.com/golangci/golangci-lint/pkg/golinters/staticcheck"
+	"github.com/golangci/golangci-lint/pkg/golinters/stylecheck"
+	"github.com/golangci/golangci-lint/pkg/golinters/tagalign"
+	"github.com/golangci/golangci-lint/pkg/golinters/tagliatelle"
+	"github.com/golangci/golangci-lint/pkg/golinters/tenv"
+	"github.com/golangci/golangci-lint/pkg/golinters/testableexamples"
+	"github.com/golangci/golangci-lint/pkg/golinters/testifylint"
+	"github.com/golangci/golangci-lint/pkg/golinters/testpackage"
+	"github.com/golangci/golangci-lint/pkg/golinters/thelper"
+	"github.com/golangci/golangci-lint/pkg/golinters/tparallel"
+	"github.com/golangci/golangci-lint/pkg/golinters/unconvert"
+	"github.com/golangci/golangci-lint/pkg/golinters/unparam"
+	"github.com/golangci/golangci-lint/pkg/golinters/unused"
+	"github.com/golangci/golangci-lint/pkg/golinters/usestdlibvars"
+	"github.com/golangci/golangci-lint/pkg/golinters/varnamelen"
+	"github.com/golangci/golangci-lint/pkg/golinters/wastedassign"
+	"github.com/golangci/golangci-lint/pkg/golinters/whitespace"
+	"github.com/golangci/golangci-lint/pkg/golinters/wrapcheck"
+	"github.com/golangci/golangci-lint/pkg/golinters/wsl"
+	"github.com/golangci/golangci-lint/pkg/golinters/zerologlint"
+	"github.com/golangci/golangci-lint/pkg/lint/linter"
+)
+
+// LinterBuilder builds the "internal" linters based on the configuration.
+type LinterBuilder struct{}
+
+// NewLinterBuilder creates a new LinterBuilder.
+func NewLinterBuilder() *LinterBuilder {
+	return &LinterBuilder{}
+}
+
+// Build loads all the "internal" linters.
+// The configuration is use for the linter settings.
+func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
+	if cfg == nil {
+		return nil, nil
+	}
+
+	const megacheckName = "megacheck"
+
+	// The linters are sorted in the alphabetical order (case-insensitive).
+	// When a new linter is added the version in `WithSince(...)` must be the next minor version of golangci-lint.
+	return []*linter.Config{
+		linter.NewConfig(asasalint.New(&cfg.LintersSettings.Asasalint)).
+			WithSince("1.47.0").
+			WithPresets(linter.PresetBugs).
+			WithLoadForGoAnalysis().
+			WithURL("https://github.com/alingse/asasalint"),
+
+		linter.NewConfig(asciicheck.New()).
+			WithSince("v1.26.0").
+			WithPresets(linter.PresetBugs, linter.PresetStyle).
+			WithURL("https://github.com/tdakkota/asciicheck"),
+
+		linter.NewConfig(bidichk.New(&cfg.LintersSettings.BiDiChk)).
+			WithSince("1.43.0").
+			WithPresets(linter.PresetBugs).
+			WithURL("https://github.com/breml/bidichk"),
+
+		linter.NewConfig(bodyclose.New()).
+			WithSince("v1.18.0").
+			WithLoadForGoAnalysis().
+			WithPresets(linter.PresetPerformance, linter.PresetBugs).
+			WithURL("https://github.com/timakin/bodyclose"),
+
+		linter.NewConfig(canonicalheader.New()).
+			WithSince("v1.58.0").
+			WithPresets(linter.PresetStyle).
+			WithLoadForGoAnalysis().
+			WithURL("https://github.com/lasiar/canonicalHeader"),
+
+		linter.NewConfig(containedctx.New()).
+			WithSince("1.44.0").
+			WithLoadForGoAnalysis().
+			WithPresets(linter.PresetStyle).
+			WithURL("https://github.com/sivchari/containedctx"),
+
+		linter.NewConfig(contextcheck.New()).
+			WithSince("v1.43.0").
+			WithPresets(linter.PresetBugs).
+			WithLoadForGoAnalysis().
+			WithURL("https://github.com/kkHAIKE/contextcheck"),
+
+		linter.NewConfig(copyloopvar.New(&cfg.LintersSettings.CopyLoopVar)).
+			WithSince("v1.57.0").
+			WithPresets(linter.PresetStyle).
+			WithURL("https://github.com/karamaru-alpha/copyloopvar").
+			WithNoopFallback(cfg, linter.IsGoLowerThanGo122()),
+
+		linter.NewConfig(cyclop.New(&cfg.LintersSettings.Cyclop)).
+			WithSince("v1.37.0").
+			WithLoadForGoAnalysis().
+			WithPresets(linter.PresetComplexity).
+			WithURL("https://github.com/bkielbasa/cyclop"),
+
+		linter.NewConfig(decorder.New(&cfg.LintersSettings.Decorder)).
+			WithSince("v1.44.0").
+			WithPresets(linter.PresetFormatting, linter.PresetStyle).
+			WithURL("https://gitlab.com/bosi/decorder"),
+
+		linter.NewConfig(linter.NewNoopDeprecated("deadcode", cfg, linter.DeprecationError)).
+			WithSince("v1.0.0").
+			WithLoadForGoAnalysis().
+			WithPresets(linter.PresetUnused).
+			WithURL("https://github.com/remyoudompheng/go-misc/tree/master/deadcode").
+			DeprecatedError("The owner seems to have abandoned the linter.", "v1.49.0", "unused"),
+
+		linter.NewConfig(depguard.New(&cfg.LintersSettings.Depguard)).
+			WithSince("v1.4.0").
+			WithPresets(linter.PresetStyle, linter.PresetImport, linter.PresetModule).
+			WithURL("https://github.com/OpenPeeDeeP/depguard"),
+
+		linter.NewConfig(dogsled.New(&cfg.LintersSettings.Dogsled)).
+			WithSince("v1.19.0").
+			WithPresets(linter.PresetStyle).
+			WithURL("https://github.com/alexkohler/dogsled"),
+
+		linter.NewConfig(dupl.New(&cfg.LintersSettings.Dupl)).
+			WithSince("v1.0.0").
+			WithPresets(linter.PresetStyle).
+			WithURL("https://github.com/mibk/dupl"),
+
+		linter.NewConfig(dupword.New(&cfg.LintersSettings.DupWord)).
+			WithSince("1.50.0").
+			WithPresets(linter.PresetComment).
+			WithURL("https://github.com/Abirdcfly/dupword"),
+
+		linter.NewConfig(durationcheck.New()).
+			WithSince("v1.37.0").
+			WithPresets(linter.PresetBugs).
+			WithLoadForGoAnalysis().
+			WithURL("https://github.com/charithe/durationcheck"),
+
+		linter.NewConfig(errcheck.New(&cfg.LintersSettings.Errcheck)).
+			WithEnabledByDefault().
+			WithSince("v1.0.0").
+			WithLoadForGoAnalysis().
+			WithPresets(linter.PresetBugs, linter.PresetError).
+			WithURL("https://github.com/kisielk/errcheck"),
+
+		linter.NewConfig(errchkjson.New(&cfg.LintersSettings.ErrChkJSON)).
+			WithSince("1.44.0").
+			WithPresets(linter.PresetBugs).
+			WithLoadForGoAnalysis().
+			WithURL("https://github.com/breml/errchkjson"),
+
+		linter.NewConfig(errname.New()).
+			WithSince("v1.42.0").
+			WithPresets(linter.PresetStyle).
+			WithLoadForGoAnalysis().
+			WithURL("https://github.com/Antonboom/errname"),
+
+		linter.NewConfig(errorlint.New(&cfg.LintersSettings.ErrorLint)).
+			WithSince("v1.32.0").
+			WithPresets(linter.PresetBugs, linter.PresetError).
+			WithLoadForGoAnalysis().
+			WithURL("https://github.com/polyfloyd/go-errorlint"),
+
+		linter.NewConfig(execinquery.New()).
+			WithSince("v1.46.0").
+			WithPresets(linter.PresetSQL).
+			WithLoadForGoAnalysis().
+			WithURL("https://github.com/1uf3/execinquery").
+			DeprecatedWarning("The repository of the linter has been archived by the owner.", "v1.58.0", ""),
+
+		linter.NewConfig(exhaustive.New(&cfg.LintersSettings.Exhaustive)).
+			WithSince(" v1.28.0").
+			WithPresets(linter.PresetBugs).
+			WithLoadForGoAnalysis().
+			WithURL("https://github.com/nishanths/exhaustive"),
+
+		linter.NewConfig(linter.NewNoopDeprecated("exhaustivestruct", cfg, linter.DeprecationError)).
+			WithSince("v1.32.0").
+			WithPresets(linter.PresetStyle, linter.PresetTest).
+			WithLoadForGoAnalysis().
+			WithURL("https://github.com/mbilski/exhaustivestruct").
+			DeprecatedError("The repository of the linter has been deprecated by the owner.", "v1.46.0", "exhaustruct"),
+
+		linter.NewConfig(exhaustruct.New(&cfg.LintersSettings.Exhaustruct)).
+			WithSince("v1.46.0").
+			WithPresets(linter.PresetStyle, linter.PresetTest).
+			WithLoadForGoAnalysis().
+			WithURL("https://github.com/GaijinEntertainment/go-exhaustruct"),
+
+		linter.NewConfig(exportloopref.New()).
+			WithSince("v1.28.0").
+			WithPresets(linter.PresetBugs).
+			WithLoadForGoAnalysis().
+			WithURL("https://github.com/kyoh86/exportloopref"),
+
+		linter.NewConfig(forbidigo.New(&cfg.LintersSettings.Forbidigo)).
+			WithSince("v1.34.0").
+			WithPresets(linter.PresetStyle).
+			// Strictly speaking,
+			// the additional information is only needed when forbidigoCfg.AnalyzeTypes is chosen by the user.
+			// But we don't know that here in all cases (sometimes config is not loaded),
+			// so we have to assume that it is needed to be on the safe side.
+			WithLoadForGoAnalysis().
+			WithURL("https://github.com/ashanbrown/forbidigo"),
+
+		linter.NewConfig(forcetypeassert.New()).
+			WithSince("v1.38.0").
+			WithPresets(linter.PresetStyle).
+			WithURL("https://github.com/gostaticanalysis/forcetypeassert"),
+
+		linter.NewConfig(fatcontext.New()).
+			WithSince("1.58.0").
+			WithPresets(linter.PresetPerformance).
+			WithLoadForGoAnalysis().
+			WithURL("https://github.com/Crocmagnon/fatcontext"),
+
+		linter.NewConfig(funlen.New(&cfg.LintersSettings.Funlen)).
+			WithSince("v1.18.0").
+			WithPresets(linter.PresetComplexity).
+			WithURL("https://github.com/ultraware/funlen"),
+
+		linter.NewConfig(gci.New(&cfg.LintersSettings.Gci)).
+			WithSince("v1.30.0").
+			WithPresets(linter.PresetFormatting, linter.PresetImport).
+			WithAutoFix().
+			WithURL("https://github.com/daixiang0/gci"),
+
+		linter.NewConfig(ginkgolinter.New(&cfg.LintersSettings.GinkgoLinter)).
+			WithSince("v1.51.0").
+			WithLoadForGoAnalysis().
+			WithPresets(linter.PresetStyle).
+			WithURL("https://github.com/nunnatsa/ginkgolinter"),
+
+		linter.NewConfig(gocheckcompilerdirectives.New()).
+			WithSince("v1.51.0").
+			WithPresets(linter.PresetBugs).
+			WithURL("https://github.com/leighmcculloch/gocheckcompilerdirectives"),
+
+		linter.NewConfig(gochecknoglobals.New()).
+			WithSince("v1.12.0").
+			WithPresets(linter.PresetStyle).
+			WithLoadForGoAnalysis().
+			WithURL("https://github.com/leighmcculloch/gochecknoglobals"),
+
+		linter.NewConfig(gochecknoinits.New()).
+			WithSince("v1.12.0").
+			WithPresets(linter.PresetStyle),
+
+		linter.NewConfig(gochecksumtype.New()).
+			WithSince("v1.55.0").
+			WithPresets(linter.PresetBugs).
+			WithLoadForGoAnalysis().
+			WithURL("https://github.com/alecthomas/go-check-sumtype"),
+
+		linter.NewConfig(gocognit.New(&cfg.LintersSettings.Gocognit)).
+			WithSince("v1.20.0").
+			WithPresets(linter.PresetComplexity).
+			WithURL("https://github.com/uudashr/gocognit"),
+
+		linter.NewConfig(goconst.New(&cfg.LintersSettings.Goconst)).
+			WithSince("v1.0.0").
+			WithPresets(linter.PresetStyle).
+			WithURL("https://github.com/jgautheron/goconst"),
+
+		linter.NewConfig(gocritic.New(&cfg.LintersSettings.Gocritic)).
+			WithSince("v1.12.0").
+			WithPresets(linter.PresetStyle, linter.PresetMetaLinter).
+			WithLoadForGoAnalysis().
+			WithAutoFix().
+			WithURL("https://github.com/go-critic/go-critic"),
+
+		linter.NewConfig(gocyclo.New(&cfg.LintersSettings.Gocyclo)).
+			WithSince("v1.0.0").
+			WithPresets(linter.PresetComplexity).
+			WithURL("https://github.com/fzipp/gocyclo"),
+
+		linter.NewConfig(godot.New(&cfg.LintersSettings.Godot)).
+			WithSince("v1.25.0").
+			WithPresets(linter.PresetStyle, linter.PresetComment).
+			WithAutoFix().
+			WithURL("https://github.com/tetafro/godot"),
+
+		linter.NewConfig(godox.New(&cfg.LintersSettings.Godox)).
+			WithSince("v1.19.0").
+			WithPresets(linter.PresetStyle, linter.PresetComment).
+			WithURL("https://github.com/matoous/godox"),
+
+		linter.NewConfig(err113.New()).
+			WithSince("v1.26.0").
+			WithPresets(linter.PresetStyle, linter.PresetError).
+			WithLoadForGoAnalysis().
+			WithAlternativeNames("goerr113").
+			WithURL("https://github.com/Djarvur/go-err113"),
+
+		linter.NewConfig(gofmt.New(&cfg.LintersSettings.Gofmt)).
+			WithSince("v1.0.0").
+			WithPresets(linter.PresetFormatting).
+			WithAutoFix().
+			WithURL("https://pkg.go.dev/cmd/gofmt"),
+
+		linter.NewConfig(gofumpt.New(&cfg.LintersSettings.Gofumpt)).
+			WithSince("v1.28.0").
+			WithPresets(linter.PresetFormatting).
+			WithAutoFix().
+			WithURL("https://github.com/mvdan/gofumpt"),
+
+		linter.NewConfig(goheader.New(&cfg.LintersSettings.Goheader)).
+			WithSince("v1.28.0").
+			WithPresets(linter.PresetStyle).
+			WithAutoFix().
+			WithURL("https://github.com/denis-tingaikin/go-header"),
+
+		linter.NewConfig(goimports.New(&cfg.LintersSettings.Goimports)).
+			WithSince("v1.20.0").
+			WithPresets(linter.PresetFormatting, linter.PresetImport).
+			WithAutoFix().
+			WithURL("https://pkg.go.dev/golang.org/x/tools/cmd/goimports"),
+
+		linter.NewConfig(linter.NewNoopDeprecated("golint", cfg, linter.DeprecationError)).
+			WithSince("v1.0.0").
+			WithLoadForGoAnalysis().
+			WithPresets(linter.PresetStyle).
+			WithURL("https://github.com/golang/lint").
+			DeprecatedError("The repository of the linter has been archived by the owner.", "v1.41.0", "revive"),
+
+		linter.NewConfig(mnd.New(&cfg.LintersSettings.Mnd)).
+			WithSince("v1.22.0").
+			WithPresets(linter.PresetStyle).
+			WithURL("https://github.com/tommy-muehle/go-mnd"),
+
+		linter.NewConfig(mnd.NewGoMND(&cfg.LintersSettings.Gomnd)).
+			WithSince("v1.22.0").
+			WithPresets(linter.PresetStyle).
+			WithURL("https://github.com/tommy-muehle/go-mnd").
+			DeprecatedWarning("The linter has been renamed.", "v1.58.0", "mnd"),
+
+		linter.NewConfig(gomoddirectives.New(&cfg.LintersSettings.GoModDirectives)).
+			WithSince("v1.39.0").
+			WithPresets(linter.PresetStyle, linter.PresetModule).
+			WithURL("https://github.com/ldez/gomoddirectives"),
+
+		linter.NewConfig(gomodguard.New(&cfg.LintersSettings.Gomodguard)).
+			WithSince("v1.25.0").
+			WithPresets(linter.PresetStyle, linter.PresetImport, linter.PresetModule).
+			WithURL("https://github.com/ryancurrah/gomodguard"),
+
+		linter.NewConfig(goprintffuncname.New()).
+			WithSince("v1.23.0").
+			WithPresets(linter.PresetStyle).
+			WithURL("https://github.com/jirfag/go-printf-func-name"),
+
+		linter.NewConfig(gosec.New(&cfg.LintersSettings.Gosec)).
+			WithSince("v1.0.0").
+			WithLoadForGoAnalysis().
+			WithPresets(linter.PresetBugs).
+			WithURL("https://github.com/securego/gosec").
+			WithAlternativeNames("gas"),
+
+		linter.NewConfig(gosimple.New(&cfg.LintersSettings.Gosimple)).
+			WithEnabledByDefault().
+			WithSince("v1.20.0").
+			WithLoadForGoAnalysis().
+			WithPresets(linter.PresetStyle).
+			WithAlternativeNames(megacheckName).
+			WithURL("https://github.com/dominikh/go-tools/tree/master/simple"),
+
+		linter.NewConfig(gosmopolitan.New(&cfg.LintersSettings.Gosmopolitan)).
+			WithSince("v1.53.0").
+			WithLoadForGoAnalysis().
+			WithPresets(linter.PresetBugs).
+			WithURL("https://github.com/xen0n/gosmopolitan"),
+
+		linter.NewConfig(govet.New(&cfg.LintersSettings.Govet)).
+			WithEnabledByDefault().
+			WithSince("v1.0.0").
+			WithLoadForGoAnalysis().
+			WithPresets(linter.PresetBugs, linter.PresetMetaLinter).
+			WithAlternativeNames("vet", "vetshadow").
+			WithURL("https://pkg.go.dev/cmd/vet"),
+
+		linter.NewConfig(grouper.New(&cfg.LintersSettings.Grouper)).
+			WithSince("v1.44.0").
+			WithPresets(linter.PresetStyle).
+			WithURL("https://github.com/leonklingele/grouper"),
+
+		linter.NewConfig(linter.NewNoopDeprecated("ifshort", cfg, linter.DeprecationError)).
+			WithSince("v1.36.0").
+			WithPresets(linter.PresetStyle).
+			WithURL("https://github.com/esimonov/ifshort").
+			DeprecatedError("The repository of the linter has been deprecated by the owner.", "v1.48.0", ""),
+
+		linter.NewConfig(importas.New(&cfg.LintersSettings.ImportAs)).
+			WithSince("v1.38.0").
+			WithPresets(linter.PresetStyle).
+			WithLoadForGoAnalysis().
+			WithURL("https://github.com/julz/importas"),
+
+		linter.NewConfig(inamedparam.New(&cfg.LintersSettings.Inamedparam)).
+			WithSince("v1.55.0").
+			WithPresets(linter.PresetStyle).
+			WithURL("https://github.com/macabu/inamedparam"),
+
+		linter.NewConfig(ineffassign.New()).
+			WithEnabledByDefault().
+			WithSince("v1.0.0").
+			WithPresets(linter.PresetUnused).
+			WithURL("https://github.com/gordonklaus/ineffassign"),
+
+		linter.NewConfig(interfacebloat.New(&cfg.LintersSettings.InterfaceBloat)).
+			WithSince("v1.49.0").
+			WithPresets(linter.PresetStyle).
+			WithURL("https://github.com/sashamelentyev/interfacebloat"),
+
+		linter.NewConfig(linter.NewNoopDeprecated("interfacer", cfg, linter.DeprecationError)).
+			WithSince("v1.0.0").
+			WithLoadForGoAnalysis().
+			WithPresets(linter.PresetStyle).
+			WithURL("https://github.com/mvdan/interfacer").
+			DeprecatedError("The repository of the linter has been archived by the owner.", "v1.38.0", ""),
+
+		linter.NewConfig(intrange.New()).
+			WithSince("v1.57.0").
+			WithURL("https://github.com/ckaznocha/intrange").
+			WithNoopFallback(cfg, linter.IsGoLowerThanGo122()),
+
+		linter.NewConfig(ireturn.New(&cfg.LintersSettings.Ireturn)).
+			WithSince("v1.43.0").
+			WithPresets(linter.PresetStyle).
+			WithLoadForGoAnalysis().
+			WithURL("https://github.com/butuzov/ireturn"),
+
+		linter.NewConfig(lll.New(&cfg.LintersSettings.Lll)).
+			WithSince("v1.8.0").
+			WithPresets(linter.PresetStyle),
+
+		linter.NewConfig(loggercheck.New(&cfg.LintersSettings.LoggerCheck)).
+			WithSince("v1.49.0").
+			WithLoadForGoAnalysis().
+			WithPresets(linter.PresetStyle, linter.PresetBugs).
+			WithAlternativeNames("logrlint").
+			WithURL("https://github.com/timonwong/loggercheck"),
+
+		linter.NewConfig(maintidx.New(&cfg.LintersSettings.MaintIdx)).
+			WithSince("v1.44.0").
+			WithPresets(linter.PresetComplexity).
+			WithURL("https://github.com/yagipy/maintidx"),
+
+		linter.NewConfig(makezero.New(&cfg.LintersSettings.Makezero)).
+			WithSince("v1.34.0").
+			WithPresets(linter.PresetStyle, linter.PresetBugs).
+			WithLoadForGoAnalysis().
+			WithURL("https://github.com/ashanbrown/makezero"),
+
+		linter.NewConfig(linter.NewNoopDeprecated("maligned", cfg, linter.DeprecationError)).
+			WithSince("v1.0.0").
+			WithLoadForGoAnalysis().
+			WithPresets(linter.PresetPerformance).
+			WithURL("https://github.com/mdempsky/maligned").
+			DeprecatedError("The repository of the linter has been archived by the owner.", "v1.38.0", "govet 'fieldalignment'"),
+
+		linter.NewConfig(mirror.New()).
+			WithSince("v1.53.0").
+			WithPresets(linter.PresetStyle).
+			WithLoadForGoAnalysis().
+			WithAutoFix().
+			WithURL("https://github.com/butuzov/mirror"),
+
+		linter.NewConfig(misspell.New(&cfg.LintersSettings.Misspell)).
+			WithSince("v1.8.0").
+			WithPresets(linter.PresetStyle, linter.PresetComment).
+			WithAutoFix().
+			WithURL("https://github.com/client9/misspell"),
+
+		linter.NewConfig(musttag.New(&cfg.LintersSettings.MustTag)).
+			WithSince("v1.51.0").
+			WithLoadForGoAnalysis().
+			WithPresets(linter.PresetStyle, linter.PresetBugs).
+			WithURL("https://github.com/go-simpler/musttag"),
+
+		linter.NewConfig(nakedret.New(&cfg.LintersSettings.Nakedret)).
+			WithSince("v1.19.0").
+			WithPresets(linter.PresetStyle).
+			WithURL("https://github.com/alexkohler/nakedret"),
+
+		linter.NewConfig(nestif.New(&cfg.LintersSettings.Nestif)).
+			WithSince("v1.25.0").
+			WithPresets(linter.PresetComplexity).
+			WithURL("https://github.com/nakabonne/nestif"),
+
+		linter.NewConfig(nilerr.New()).
+			WithSince("v1.38.0").
+			WithLoadForGoAnalysis().
+			WithPresets(linter.PresetBugs).
+			WithURL("https://github.com/gostaticanalysis/nilerr"),
+
+		linter.NewConfig(nilnil.New(&cfg.LintersSettings.NilNil)).
+			WithSince("v1.43.0").
+			WithPresets(linter.PresetStyle).
+			WithLoadForGoAnalysis().
+			WithURL("https://github.com/Antonboom/nilnil"),
+
+		linter.NewConfig(nlreturn.New(&cfg.LintersSettings.Nlreturn)).
+			WithSince("v1.30.0").
+			WithPresets(linter.PresetStyle).
+			WithURL("https://github.com/ssgreg/nlreturn"),
+
+		linter.NewConfig(noctx.New()).
+			WithSince("v1.28.0").
+			WithLoadForGoAnalysis().
+			WithPresets(linter.PresetPerformance, linter.PresetBugs).
+			WithURL("https://github.com/sonatard/noctx"),
+
+		linter.NewConfig(nonamedreturns.New(&cfg.LintersSettings.NoNamedReturns)).
+			WithSince("v1.46.0").
+			WithLoadForGoAnalysis().
+			WithPresets(linter.PresetStyle).
+			WithURL("https://github.com/firefart/nonamedreturns"),
+
+		linter.NewConfig(linter.NewNoopDeprecated("nosnakecase", cfg, linter.DeprecationError)).
+			WithSince("v1.47.0").
+			WithPresets(linter.PresetStyle).
+			WithURL("https://github.com/sivchari/nosnakecase").
+			DeprecatedError("The repository of the linter has been deprecated by the owner.", "v1.48.1", "revive 'var-naming'"),
+
+		linter.NewConfig(nosprintfhostport.New()).
+			WithSince("v1.46.0").
+			WithPresets(linter.PresetStyle).
+			WithURL("https://github.com/stbenjam/no-sprintf-host-port"),
+
+		linter.NewConfig(paralleltest.New(&cfg.LintersSettings.ParallelTest)).
+			WithSince("v1.33.0").
+			WithLoadForGoAnalysis().
+			WithPresets(linter.PresetStyle, linter.PresetTest).
+			WithURL("https://github.com/kunwardeep/paralleltest"),
+
+		linter.NewConfig(perfsprint.New(&cfg.LintersSettings.PerfSprint)).
+			WithSince("v1.55.0").
+			WithLoadForGoAnalysis().
+			WithPresets(linter.PresetPerformance).
+			WithURL("https://github.com/catenacyber/perfsprint"),
+
+		linter.NewConfig(prealloc.New(&cfg.LintersSettings.Prealloc)).
+			WithSince("v1.19.0").
+			WithPresets(linter.PresetPerformance).
+			WithURL("https://github.com/alexkohler/prealloc"),
+
+		linter.NewConfig(predeclared.New(&cfg.LintersSettings.Predeclared)).
+			WithSince("v1.35.0").
+			WithPresets(linter.PresetStyle).
+			WithURL("https://github.com/nishanths/predeclared"),
+
+		linter.NewConfig(promlinter.New(&cfg.LintersSettings.Promlinter)).
+			WithSince("v1.40.0").
+			WithPresets(linter.PresetStyle).
+			WithURL("https://github.com/yeya24/promlinter"),
+
+		linter.NewConfig(protogetter.New(&cfg.LintersSettings.ProtoGetter)).
+			WithSince("v1.55.0").
+			WithPresets(linter.PresetBugs).
+			WithLoadForGoAnalysis().
+			WithAutoFix().
+			WithURL("https://github.com/ghostiam/protogetter"),
+
+		linter.NewConfig(reassign.New(&cfg.LintersSettings.Reassign)).
+			WithSince("1.49.0").
+			WithPresets(linter.PresetBugs).
+			WithLoadForGoAnalysis().
+			WithURL("https://github.com/curioswitch/go-reassign"),
+
+		linter.NewConfig(revive.New(&cfg.LintersSettings.Revive)).
+			WithSince("v1.37.0").
+			WithPresets(linter.PresetStyle, linter.PresetMetaLinter).
+			ConsiderSlow().
+			WithURL("https://github.com/mgechev/revive"),
+
+		linter.NewConfig(rowserrcheck.New(&cfg.LintersSettings.RowsErrCheck)).
+			WithSince("v1.23.0").
+			WithLoadForGoAnalysis().
+			WithPresets(linter.PresetBugs, linter.PresetSQL).
+			WithURL("https://github.com/jingyugao/rowserrcheck"),
+
+		linter.NewConfig(sloglint.New(&cfg.LintersSettings.SlogLint)).
+			WithSince("v1.55.0").
+			WithLoadForGoAnalysis().
+			WithPresets(linter.PresetStyle, linter.PresetFormatting).
+			WithURL("https://github.com/go-simpler/sloglint"),
+
+		linter.NewConfig(linter.NewNoopDeprecated("scopelint", cfg, linter.DeprecationError)).
+			WithSince("v1.12.0").
+			WithPresets(linter.PresetBugs).
+			WithURL("https://github.com/kyoh86/scopelint").
+			DeprecatedError("The repository of the linter has been deprecated by the owner.", "v1.39.0", "exportloopref"),
+
+		linter.NewConfig(sqlclosecheck.New()).
+			WithSince("v1.28.0").
+			WithPresets(linter.PresetBugs, linter.PresetSQL).
+			WithLoadForGoAnalysis().
+			WithURL("https://github.com/ryanrolds/sqlclosecheck"),
+
+		linter.NewConfig(spancheck.New(&cfg.LintersSettings.Spancheck)).
+			WithSince("v1.56.0").
+			WithLoadForGoAnalysis().
+			WithPresets(linter.PresetBugs).
+			WithURL("https://github.com/jjti/go-spancheck"),
+
+		linter.NewConfig(staticcheck.New(&cfg.LintersSettings.Staticcheck)).
+			WithEnabledByDefault().
+			WithSince("v1.0.0").
+			WithLoadForGoAnalysis().
+			WithPresets(linter.PresetBugs, linter.PresetMetaLinter).
+			WithAlternativeNames(megacheckName).
+			WithURL("https://staticcheck.io/"),
+
+		linter.NewConfig(linter.NewNoopDeprecated("structcheck", cfg, linter.DeprecationError)).
+			WithSince("v1.0.0").
+			WithLoadForGoAnalysis().
+			WithPresets(linter.PresetUnused).
+			WithURL("https://github.com/opennota/check").
+			DeprecatedError("The owner seems to have abandoned the linter.", "v1.49.0", "unused"),
+
+		linter.NewConfig(stylecheck.New(&cfg.LintersSettings.Stylecheck)).
+			WithSince("v1.20.0").
+			WithLoadForGoAnalysis().
+			WithPresets(linter.PresetStyle).
+			WithURL("https://github.com/dominikh/go-tools/tree/master/stylecheck"),
+
+		linter.NewConfig(tagalign.New(&cfg.LintersSettings.TagAlign)).
+			WithSince("v1.53.0").
+			WithPresets(linter.PresetStyle, linter.PresetFormatting).
+			WithAutoFix().
+			WithURL("https://github.com/4meepo/tagalign"),
+
+		linter.NewConfig(tagliatelle.New(&cfg.LintersSettings.Tagliatelle)).
+			WithSince("v1.40.0").
+			WithPresets(linter.PresetStyle).
+			WithURL("https://github.com/ldez/tagliatelle"),
+
+		linter.NewConfig(tenv.New(&cfg.LintersSettings.Tenv)).
+			WithSince("v1.43.0").
+			WithPresets(linter.PresetStyle).
+			WithLoadForGoAnalysis().
+			WithURL("https://github.com/sivchari/tenv"),
+
+		linter.NewConfig(testableexamples.New()).
+			WithSince("v1.50.0").
+			WithPresets(linter.PresetTest).
+			WithURL("https://github.com/maratori/testableexamples"),
+
+		linter.NewConfig(testifylint.New(&cfg.LintersSettings.Testifylint)).
+			WithSince("v1.55.0").
+			WithPresets(linter.PresetTest, linter.PresetBugs).
+			WithLoadForGoAnalysis().
+			WithURL("https://github.com/Antonboom/testifylint"),
+
+		linter.NewConfig(testpackage.New(&cfg.LintersSettings.Testpackage)).
+			WithSince("v1.25.0").
+			WithPresets(linter.PresetStyle, linter.PresetTest).
+			WithURL("https://github.com/maratori/testpackage"),
+
+		linter.NewConfig(thelper.New(&cfg.LintersSettings.Thelper)).
+			WithSince("v1.34.0").
+			WithPresets(linter.PresetStyle).
+			WithLoadForGoAnalysis().
+			WithURL("https://github.com/kulti/thelper"),
+
+		linter.NewConfig(tparallel.New()).
+			WithSince("v1.32.0").
+			WithPresets(linter.PresetStyle, linter.PresetTest).
+			WithLoadForGoAnalysis().
+			WithURL("https://github.com/moricho/tparallel"),
+
+		linter.NewConfig(golinters.NewTypecheck()).
+			WithInternal().
+			WithEnabledByDefault().
+			WithSince("v1.3.0"),
+
+		linter.NewConfig(unconvert.New(&cfg.LintersSettings.Unconvert)).
+			WithSince("v1.0.0").
+			WithLoadForGoAnalysis().
+			WithPresets(linter.PresetStyle).
+			WithURL("https://github.com/mdempsky/unconvert"),
+
+		linter.NewConfig(unparam.New(&cfg.LintersSettings.Unparam)).
+			WithSince("v1.9.0").
+			WithPresets(linter.PresetUnused).
+			WithLoadForGoAnalysis().
+			WithURL("https://github.com/mvdan/unparam"),
+
+		linter.NewConfig(unused.New(&cfg.LintersSettings.Unused, &cfg.LintersSettings.Staticcheck)).
+			WithEnabledByDefault().
+			WithSince("v1.20.0").
+			WithLoadForGoAnalysis().
+			WithPresets(linter.PresetUnused).
+			WithAlternativeNames(megacheckName).
+			ConsiderSlow().
+			WithChangeTypes().
+			WithURL("https://github.com/dominikh/go-tools/tree/master/unused"),
+
+		linter.NewConfig(usestdlibvars.New(&cfg.LintersSettings.UseStdlibVars)).
+			WithSince("v1.48.0").
+			WithPresets(linter.PresetStyle).
+			WithURL("https://github.com/sashamelentyev/usestdlibvars"),
+
+		linter.NewConfig(linter.NewNoopDeprecated("varcheck", cfg, linter.DeprecationError)).
+			WithSince("v1.0.0").
+			WithLoadForGoAnalysis().
+			WithPresets(linter.PresetUnused).
+			WithURL("https://github.com/opennota/check").
+			DeprecatedError("The owner seems to have abandoned the linter.", "v1.49.0", "unused"),
+
+		linter.NewConfig(varnamelen.New(&cfg.LintersSettings.Varnamelen)).
+			WithSince("v1.43.0").
+			WithPresets(linter.PresetStyle).
+			WithLoadForGoAnalysis().
+			WithURL("https://github.com/blizzy78/varnamelen"),
+
+		linter.NewConfig(wastedassign.New()).
+			WithSince("v1.38.0").
+			WithPresets(linter.PresetStyle).
+			WithLoadForGoAnalysis().
+			WithURL("https://github.com/sanposhiho/wastedassign"),
+
+		linter.NewConfig(whitespace.New(&cfg.LintersSettings.Whitespace)).
+			WithSince("v1.19.0").
+			WithPresets(linter.PresetStyle).
+			WithAutoFix().
+			WithURL("https://github.com/ultraware/whitespace"),
+
+		linter.NewConfig(wrapcheck.New(&cfg.LintersSettings.Wrapcheck)).
+			WithSince("v1.32.0").
+			WithPresets(linter.PresetStyle, linter.PresetError).
+			WithLoadForGoAnalysis().
+			WithURL("https://github.com/tomarrell/wrapcheck"),
+
+		linter.NewConfig(wsl.New(&cfg.LintersSettings.WSL)).
+			WithSince("v1.20.0").
+			WithPresets(linter.PresetStyle).
+			WithURL("https://github.com/bombsimon/wsl"),
+
+		linter.NewConfig(zerologlint.New()).
+			WithSince("v1.53.0").
+			WithPresets(linter.PresetBugs).
+			WithLoadForGoAnalysis().
+			WithURL("https://github.com/ykadowak/zerologlint"),
+
+		// nolintlint must be last because it looks at the results of all the previous linters for unused nolint directives
+		linter.NewConfig(nolintlint.New(&cfg.LintersSettings.NoLintLint)).
+			WithSince("v1.26.0").
+			WithPresets(linter.PresetStyle).
+			WithAutoFix().
+			WithURL("https://github.com/golangci/golangci-lint/blob/master/pkg/golinters/nolintlint/README.md"),
+	}, nil
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/builder_plugin_go.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/builder_plugin_go.go
new file mode 100644
index 0000000000000000000000000000000000000000..c6dbaf79307d086bf7f1b5e10f64c62042ed3e8c
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/builder_plugin_go.go
@@ -0,0 +1,138 @@
+package lintersdb
+
+import (
+	"errors"
+	"fmt"
+	"path/filepath"
+	"plugin"
+
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/golangci/golangci-lint/pkg/config"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/lint/linter"
+	"github.com/golangci/golangci-lint/pkg/logutils"
+)
+
+const goPluginType = "goplugin"
+
+type AnalyzerPlugin interface {
+	GetAnalyzers() []*analysis.Analyzer
+}
+
+// PluginGoBuilder builds the custom linters (Go plugin) based on the configuration.
+type PluginGoBuilder struct {
+	log logutils.Log
+}
+
+// NewPluginGoBuilder creates new PluginGoBuilder.
+func NewPluginGoBuilder(log logutils.Log) *PluginGoBuilder {
+	return &PluginGoBuilder{log: log}
+}
+
+// Build loads custom linters that are specified in the golangci-lint config file.
+func (b *PluginGoBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
+	if cfg == nil || b.log == nil {
+		return nil, nil
+	}
+
+	var linters []*linter.Config
+
+	for name, settings := range cfg.LintersSettings.Custom {
+		if settings.Type != goPluginType && settings.Type != "" {
+			continue
+		}
+
+		settings := settings
+
+		lc, err := b.loadConfig(cfg, name, &settings)
+		if err != nil {
+			return nil, fmt.Errorf("unable to load custom analyzer %q: %s, %w", name, settings.Path, err)
+		}
+		linters = append(linters, lc)
+	}
+
+	return linters, nil
+}
+
+// loadConfig loads the configuration of private linters.
+// Private linters are dynamically loaded from .so plugin files.
+func (b *PluginGoBuilder) loadConfig(cfg *config.Config, name string, settings *config.CustomLinterSettings) (*linter.Config, error) {
+	analyzers, err := b.getAnalyzerPlugin(cfg, settings.Path, settings.Settings)
+	if err != nil {
+		return nil, err
+	}
+
+	b.log.Infof("Loaded %s: %s", settings.Path, name)
+
+	customLinter := goanalysis.NewLinter(name, settings.Description, analyzers, nil).
+		WithLoadMode(goanalysis.LoadModeTypesInfo)
+
+	linterConfig := linter.NewConfig(customLinter).
+		WithEnabledByDefault().
+		WithLoadForGoAnalysis().
+		WithURL(settings.OriginalURL)
+
+	return linterConfig, nil
+}
+
+// getAnalyzerPlugin loads a private linter as specified in the config file,
+// loads the plugin from a .so file,
+// and returns the 'AnalyzerPlugin' interface implemented by the private plugin.
+// An error is returned if the private linter cannot be loaded
+// or the linter does not implement the AnalyzerPlugin interface.
+func (b *PluginGoBuilder) getAnalyzerPlugin(cfg *config.Config, path string, settings any) ([]*analysis.Analyzer, error) {
+	if !filepath.IsAbs(path) {
+		// resolve non-absolute paths relative to config file's directory
+		path = filepath.Join(cfg.GetConfigDir(), path)
+	}
+
+	plug, err := plugin.Open(path)
+	if err != nil {
+		return nil, err
+	}
+
+	analyzers, err := b.lookupPlugin(plug, settings)
+	if err != nil {
+		return nil, fmt.Errorf("lookup plugin %s: %w", path, err)
+	}
+
+	return analyzers, nil
+}
+
+func (b *PluginGoBuilder) lookupPlugin(plug *plugin.Plugin, settings any) ([]*analysis.Analyzer, error) {
+	symbol, err := plug.Lookup("New")
+	if err != nil {
+		analyzers, errP := b.lookupAnalyzerPlugin(plug)
+		if errP != nil {
+			return nil, errors.Join(err, errP)
+		}
+
+		return analyzers, nil
+	}
+
+	// The type func cannot be used here, must be the explicit signature.
+	constructor, ok := symbol.(func(any) ([]*analysis.Analyzer, error))
+	if !ok {
+		return nil, fmt.Errorf("plugin does not abide by 'New' function: %T", symbol)
+	}
+
+	return constructor(settings)
+}
+
+func (b *PluginGoBuilder) lookupAnalyzerPlugin(plug *plugin.Plugin) ([]*analysis.Analyzer, error) {
+	symbol, err := plug.Lookup("AnalyzerPlugin")
+	if err != nil {
+		return nil, err
+	}
+
+	b.log.Warnf("plugin: 'AnalyzerPlugin' plugins are deprecated, please use the new plugin signature: " +
+		"https://golangci-lint.run/plugins/go-plugins#create-a-plugin")
+
+	analyzerPlugin, ok := symbol.(AnalyzerPlugin)
+	if !ok {
+		return nil, fmt.Errorf("plugin does not abide by 'AnalyzerPlugin' interface: %T", symbol)
+	}
+
+	return analyzerPlugin.GetAnalyzers(), nil
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/builder_plugin_module.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/builder_plugin_module.go
new file mode 100644
index 0000000000000000000000000000000000000000..60fb58d8ce3a462073b9c6d7e302e2d0d3c74aa9
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/builder_plugin_module.go
@@ -0,0 +1,85 @@
+package lintersdb
+
+import (
+	"fmt"
+	"strings"
+
+	"github.com/golangci/plugin-module-register/register"
+
+	"github.com/golangci/golangci-lint/pkg/config"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
+	"github.com/golangci/golangci-lint/pkg/lint/linter"
+	"github.com/golangci/golangci-lint/pkg/logutils"
+)
+
+const modulePluginType = "module"
+
+// PluginModuleBuilder builds the custom linters (module plugin) based on the configuration.
+type PluginModuleBuilder struct {
+	log logutils.Log
+}
+
+// NewPluginModuleBuilder creates new PluginModuleBuilder.
+func NewPluginModuleBuilder(log logutils.Log) *PluginModuleBuilder {
+	return &PluginModuleBuilder{log: log}
+}
+
+// Build loads custom linters that are specified in the golangci-lint config file.
+func (b *PluginModuleBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
+	if cfg == nil || b.log == nil {
+		return nil, nil
+	}
+
+	var linters []*linter.Config
+
+	for name, settings := range cfg.LintersSettings.Custom {
+		if settings.Type != modulePluginType {
+			continue
+		}
+
+		b.log.Infof("Loaded %s: %s", settings.Path, name)
+
+		newPlugin, err := register.GetPlugin(name)
+		if err != nil {
+			return nil, fmt.Errorf("plugin(%s): %w", name, err)
+		}
+
+		p, err := newPlugin(settings.Settings)
+		if err != nil {
+			return nil, fmt.Errorf("plugin(%s): newPlugin %w", name, err)
+		}
+
+		analyzers, err := p.BuildAnalyzers()
+		if err != nil {
+			return nil, fmt.Errorf("plugin(%s): BuildAnalyzers %w", name, err)
+		}
+
+		customLinter := goanalysis.NewLinter(name, settings.Description, analyzers, nil)
+
+		switch strings.ToLower(p.GetLoadMode()) {
+		case register.LoadModeSyntax:
+			customLinter = customLinter.WithLoadMode(goanalysis.LoadModeSyntax)
+		case register.LoadModeTypesInfo:
+			customLinter = customLinter.WithLoadMode(goanalysis.LoadModeTypesInfo)
+		default:
+			customLinter = customLinter.WithLoadMode(goanalysis.LoadModeTypesInfo)
+		}
+
+		lc := linter.NewConfig(customLinter).
+			WithEnabledByDefault().
+			WithURL(settings.OriginalURL)
+
+		switch strings.ToLower(p.GetLoadMode()) {
+		case register.LoadModeSyntax:
+			// noop
+		case register.LoadModeTypesInfo:
+			lc = lc.WithLoadForGoAnalysis()
+		default:
+			lc = lc.WithLoadForGoAnalysis()
+		}
+
+		linters = append(linters, lc)
+	}
+
+	return linters, nil
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/custom_linters.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/custom_linters.go
deleted file mode 100644
index d0eaa7905fb000f43330d88706e961749e61d532..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/custom_linters.go
+++ /dev/null
@@ -1,131 +0,0 @@
-package lintersdb
-
-import (
-	"fmt"
-	"os"
-	"path/filepath"
-	"plugin"
-
-	"github.com/hashicorp/go-multierror"
-	"github.com/spf13/viper"
-	"golang.org/x/tools/go/analysis"
-
-	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
-	"github.com/golangci/golangci-lint/pkg/lint/linter"
-)
-
-type AnalyzerPlugin interface {
-	GetAnalyzers() []*analysis.Analyzer
-}
-
-// getCustomLinterConfigs loads private linters that are specified in the golangci config file.
-func (m *Manager) getCustomLinterConfigs() []*linter.Config {
-	if m.cfg == nil || m.log == nil {
-		return nil
-	}
-
-	var linters []*linter.Config
-
-	for name, settings := range m.cfg.LintersSettings.Custom {
-		lc, err := m.loadCustomLinterConfig(name, settings)
-		if err != nil {
-			m.log.Errorf("Unable to load custom analyzer %s:%s, %v", name, settings.Path, err)
-		} else {
-			linters = append(linters, lc)
-		}
-	}
-
-	return linters
-}
-
-// loadCustomLinterConfig loads the configuration of private linters.
-// Private linters are dynamically loaded from .so plugin files.
-func (m *Manager) loadCustomLinterConfig(name string, settings config.CustomLinterSettings) (*linter.Config, error) {
-	analyzers, err := m.getAnalyzerPlugin(settings.Path, settings.Settings)
-	if err != nil {
-		return nil, err
-	}
-
-	m.log.Infof("Loaded %s: %s", settings.Path, name)
-
-	customLinter := goanalysis.NewLinter(name, settings.Description, analyzers, nil).
-		WithLoadMode(goanalysis.LoadModeTypesInfo)
-
-	linterConfig := linter.NewConfig(customLinter).
-		WithEnabledByDefault().
-		WithLoadForGoAnalysis().
-		WithURL(settings.OriginalURL)
-
-	return linterConfig, nil
-}
-
-// getAnalyzerPlugin loads a private linter as specified in the config file,
-// loads the plugin from a .so file,
-// and returns the 'AnalyzerPlugin' interface implemented by the private plugin.
-// An error is returned if the private linter cannot be loaded
-// or the linter does not implement the AnalyzerPlugin interface.
-func (m *Manager) getAnalyzerPlugin(path string, settings any) ([]*analysis.Analyzer, error) {
-	if !filepath.IsAbs(path) {
-		// resolve non-absolute paths relative to config file's directory
-		configFilePath := viper.ConfigFileUsed()
-		absConfigFilePath, err := filepath.Abs(configFilePath)
-		if err != nil {
-			return nil, fmt.Errorf("could not get absolute representation of config file path %q: %v", configFilePath, err)
-		}
-		path = filepath.Join(filepath.Dir(absConfigFilePath), path)
-	}
-
-	plug, err := plugin.Open(path)
-	if err != nil {
-		return nil, err
-	}
-
-	analyzers, err := m.lookupPlugin(plug, settings)
-	if err != nil {
-		return nil, fmt.Errorf("lookup plugin %s: %w", path, err)
-	}
-
-	return analyzers, nil
-}
-
-func (m *Manager) lookupPlugin(plug *plugin.Plugin, settings any) ([]*analysis.Analyzer, error) {
-	symbol, err := plug.Lookup("New")
-	if err != nil {
-		analyzers, errP := m.lookupAnalyzerPlugin(plug)
-		if errP != nil {
-			// TODO(ldez): use `errors.Join` when we will upgrade to go1.20.
-			return nil, multierror.Append(err, errP)
-		}
-
-		return analyzers, nil
-	}
-
-	// The type func cannot be used here, must be the explicit signature.
-	constructor, ok := symbol.(func(any) ([]*analysis.Analyzer, error))
-	if !ok {
-		return nil, fmt.Errorf("plugin does not abide by 'New' function: %T", symbol)
-	}
-
-	return constructor(settings)
-}
-
-func (m *Manager) lookupAnalyzerPlugin(plug *plugin.Plugin) ([]*analysis.Analyzer, error) {
-	symbol, err := plug.Lookup("AnalyzerPlugin")
-	if err != nil {
-		return nil, err
-	}
-
-	// TODO(ldez): remove this env var (but keep the log) in the next minor version (v1.55.0)
-	if _, ok := os.LookupEnv("GOLANGCI_LINT_HIDE_WARNING_ABOUT_PLUGIN_API_DEPRECATION"); !ok {
-		m.log.Warnf("plugin: 'AnalyzerPlugin' plugins are deprecated, please use the new plugin signature: " +
-			"https://golangci-lint.run/contributing/new-linters/#create-a-plugin")
-	}
-
-	analyzerPlugin, ok := symbol.(AnalyzerPlugin)
-	if !ok {
-		return nil, fmt.Errorf("plugin does not abide by 'AnalyzerPlugin' interface: %T", symbol)
-	}
-
-	return analyzerPlugin.GetAnalyzers(), nil
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/enabled_set.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/enabled_set.go
deleted file mode 100644
index c5c7874e4508b0dc5ee6681396c878efeda8b914..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/enabled_set.go
+++ /dev/null
@@ -1,223 +0,0 @@
-package lintersdb
-
-import (
-	"os"
-	"sort"
-
-	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis"
-	"github.com/golangci/golangci-lint/pkg/lint/linter"
-	"github.com/golangci/golangci-lint/pkg/logutils"
-)
-
-// EnvTestRun value: "1"
-const EnvTestRun = "GL_TEST_RUN"
-
-type EnabledSet struct {
-	m      *Manager
-	v      *Validator
-	log    logutils.Log
-	cfg    *config.Config
-	debugf logutils.DebugFunc
-}
-
-func NewEnabledSet(m *Manager, v *Validator, log logutils.Log, cfg *config.Config) *EnabledSet {
-	return &EnabledSet{
-		m:      m,
-		v:      v,
-		log:    log,
-		cfg:    cfg,
-		debugf: logutils.Debug(logutils.DebugKeyEnabledLinters),
-	}
-}
-
-//nolint:gocyclo // the complexity cannot be reduced.
-func (es EnabledSet) build(lcfg *config.Linters, enabledByDefaultLinters []*linter.Config) map[string]*linter.Config {
-	es.debugf("Linters config: %#v", lcfg)
-
-	resultLintersSet := map[string]*linter.Config{}
-	switch {
-	case len(lcfg.Presets) != 0:
-		break // imply --disable-all
-	case lcfg.EnableAll:
-		resultLintersSet = linterConfigsToMap(es.m.GetAllSupportedLinterConfigs())
-	case lcfg.DisableAll:
-		break
-	default:
-		resultLintersSet = linterConfigsToMap(enabledByDefaultLinters)
-	}
-
-	// --presets can only add linters to default set
-	for _, p := range lcfg.Presets {
-		for _, lc := range es.m.GetAllLinterConfigsForPreset(p) {
-			lc := lc
-			resultLintersSet[lc.Name()] = lc
-		}
-	}
-
-	// --fast removes slow linters from current set.
-	// It should be after --presets to be able to run only fast linters in preset.
-	// It should be before --enable and --disable to be able to enable or disable specific linter.
-	if lcfg.Fast {
-		for name, lc := range resultLintersSet {
-			if lc.IsSlowLinter() {
-				delete(resultLintersSet, name)
-			}
-		}
-	}
-
-	for _, name := range lcfg.Enable {
-		for _, lc := range es.m.GetLinterConfigs(name) {
-			// it's important to use lc.Name() nor name because name can be alias
-			resultLintersSet[lc.Name()] = lc
-		}
-	}
-
-	for _, name := range lcfg.Disable {
-		for _, lc := range es.m.GetLinterConfigs(name) {
-			// it's important to use lc.Name() nor name because name can be alias
-			delete(resultLintersSet, lc.Name())
-		}
-	}
-
-	// typecheck is not a real linter and cannot be disabled.
-	if _, ok := resultLintersSet["typecheck"]; !ok && (es.cfg == nil || !es.cfg.InternalCmdTest) {
-		for _, lc := range es.m.GetLinterConfigs("typecheck") {
-			// it's important to use lc.Name() nor name because name can be alias
-			resultLintersSet[lc.Name()] = lc
-		}
-	}
-
-	return resultLintersSet
-}
-
-func (es EnabledSet) GetEnabledLintersMap() (map[string]*linter.Config, error) {
-	if err := es.v.validateEnabledDisabledLintersConfig(&es.cfg.Linters); err != nil {
-		return nil, err
-	}
-
-	enabledLinters := es.build(&es.cfg.Linters, es.m.GetAllEnabledByDefaultLinters())
-	if os.Getenv(EnvTestRun) == "1" {
-		es.verbosePrintLintersStatus(enabledLinters)
-	}
-	return enabledLinters, nil
-}
-
-// GetOptimizedLinters returns enabled linters after optimization (merging) of multiple linters
-// into a fewer number of linters. E.g. some go/analysis linters can be optimized into
-// one metalinter for data reuse and speed up.
-func (es EnabledSet) GetOptimizedLinters() ([]*linter.Config, error) {
-	if err := es.v.validateEnabledDisabledLintersConfig(&es.cfg.Linters); err != nil {
-		return nil, err
-	}
-
-	resultLintersSet := es.build(&es.cfg.Linters, es.m.GetAllEnabledByDefaultLinters())
-	es.verbosePrintLintersStatus(resultLintersSet)
-	es.combineGoAnalysisLinters(resultLintersSet)
-
-	var resultLinters []*linter.Config
-	for _, lc := range resultLintersSet {
-		resultLinters = append(resultLinters, lc)
-	}
-
-	// Make order of execution of linters (go/analysis metalinter and unused) stable.
-	sort.Slice(resultLinters, func(i, j int) bool {
-		a, b := resultLinters[i], resultLinters[j]
-
-		if b.Name() == linter.LastLinter {
-			return true
-		}
-
-		if a.Name() == linter.LastLinter {
-			return false
-		}
-
-		if a.DoesChangeTypes != b.DoesChangeTypes {
-			return b.DoesChangeTypes // move type-changing linters to the end to optimize speed
-		}
-		return a.Name() < b.Name()
-	})
-
-	return resultLinters, nil
-}
-
-func (es EnabledSet) combineGoAnalysisLinters(linters map[string]*linter.Config) {
-	var goanalysisLinters []*goanalysis.Linter
-	goanalysisPresets := map[string]bool{}
-	for _, lc := range linters {
-		lnt, ok := lc.Linter.(*goanalysis.Linter)
-		if !ok {
-			continue
-		}
-		if lnt.LoadMode() == goanalysis.LoadModeWholeProgram {
-			// It's ineffective by CPU and memory to run whole-program and incremental analyzers at once.
-			continue
-		}
-		goanalysisLinters = append(goanalysisLinters, lnt)
-		for _, p := range lc.InPresets {
-			goanalysisPresets[p] = true
-		}
-	}
-
-	if len(goanalysisLinters) <= 1 {
-		es.debugf("Didn't combine go/analysis linters: got only %d linters", len(goanalysisLinters))
-		return
-	}
-
-	for _, lnt := range goanalysisLinters {
-		delete(linters, lnt.Name())
-	}
-
-	// Make order of execution of go/analysis analyzers stable.
-	sort.Slice(goanalysisLinters, func(i, j int) bool {
-		a, b := goanalysisLinters[i], goanalysisLinters[j]
-
-		if b.Name() == linter.LastLinter {
-			return true
-		}
-
-		if a.Name() == linter.LastLinter {
-			return false
-		}
-
-		return a.Name() <= b.Name()
-	})
-
-	ml := goanalysis.NewMetaLinter(goanalysisLinters)
-
-	var presets []string
-	for p := range goanalysisPresets {
-		presets = append(presets, p)
-	}
-
-	mlConfig := &linter.Config{
-		Linter:           ml,
-		EnabledByDefault: false,
-		InPresets:        presets,
-		AlternativeNames: nil,
-		OriginalURL:      "",
-	}
-
-	mlConfig = mlConfig.WithLoadForGoAnalysis()
-
-	linters[ml.Name()] = mlConfig
-	es.debugf("Combined %d go/analysis linters into one metalinter", len(goanalysisLinters))
-}
-
-func (es EnabledSet) verbosePrintLintersStatus(lcs map[string]*linter.Config) {
-	var linterNames []string
-	for _, lc := range lcs {
-		if lc.Internal {
-			continue
-		}
-
-		linterNames = append(linterNames, lc.Name())
-	}
-	sort.StringSlice(linterNames).Sort()
-	es.log.Infof("Active %d linters: %s", len(linterNames), linterNames)
-
-	if len(es.cfg.Linters.Presets) != 0 {
-		sort.StringSlice(es.cfg.Linters.Presets).Sort()
-		es.log.Infof("Active presets: %s", es.cfg.Linters.Presets)
-	}
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go
index 4de3a1116f58616395f8fba7c583d41fa3bfc093..0a487be92e739f171415f797951eddec3699c4a8 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go
@@ -1,900 +1,140 @@
 package lintersdb
 
 import (
-	"regexp"
+	"fmt"
+	"os"
+	"slices"
+	"sort"
+
+	"golang.org/x/exp/maps"
 
 	"github.com/golangci/golangci-lint/pkg/config"
-	"github.com/golangci/golangci-lint/pkg/golinters"
+	"github.com/golangci/golangci-lint/pkg/goanalysis"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 	"github.com/golangci/golangci-lint/pkg/logutils"
 )
 
+type Builder interface {
+	Build(cfg *config.Config) ([]*linter.Config, error)
+}
+
+// Manager is a type of database for all linters (internals or plugins).
+// It provides methods to access to the linter sets.
 type Manager struct {
+	log    logutils.Log
+	debugf logutils.DebugFunc
+
 	cfg *config.Config
-	log logutils.Log
 
-	nameToLCs     map[string][]*linter.Config
-	customLinters []*linter.Config
+	linters []*linter.Config
+
+	nameToLCs map[string][]*linter.Config
 }
 
-func NewManager(cfg *config.Config, log logutils.Log) *Manager {
-	m := &Manager{cfg: cfg, log: log}
-	m.customLinters = m.getCustomLinterConfigs()
+// NewManager creates a new Manager.
+// This constructor will call the builders to build and store the linters.
+func NewManager(log logutils.Log, cfg *config.Config, builders ...Builder) (*Manager, error) {
+	m := &Manager{
+		log:       log,
+		debugf:    logutils.Debug(logutils.DebugKeyEnabledLinters),
+		nameToLCs: make(map[string][]*linter.Config),
+	}
+
+	m.cfg = cfg
+	if cfg == nil {
+		m.cfg = config.NewDefault()
+	}
+
+	for _, builder := range builders {
+		linters, err := builder.Build(m.cfg)
+		if err != nil {
+			return nil, fmt.Errorf("build linters: %w", err)
+		}
+
+		m.linters = append(m.linters, linters...)
+	}
 
-	nameToLCs := make(map[string][]*linter.Config)
-	for _, lc := range m.GetAllSupportedLinterConfigs() {
+	for _, lc := range m.linters {
 		for _, name := range lc.AllNames() {
-			nameToLCs[name] = append(nameToLCs[name], lc)
+			m.nameToLCs[name] = append(m.nameToLCs[name], lc)
 		}
 	}
 
-	m.nameToLCs = nameToLCs
+	err := NewValidator(m).Validate(m.cfg)
+	if err != nil {
+		return nil, err
+	}
 
-	return m
+	return m, nil
 }
 
-func (Manager) AllPresets() []string {
-	return []string{
-		linter.PresetBugs,
-		linter.PresetComment,
-		linter.PresetComplexity,
-		linter.PresetError,
-		linter.PresetFormatting,
-		linter.PresetImport,
-		linter.PresetMetaLinter,
-		linter.PresetModule,
-		linter.PresetPerformance,
-		linter.PresetSQL,
-		linter.PresetStyle,
-		linter.PresetTest,
-		linter.PresetUnused,
-	}
+func (m *Manager) GetLinterConfigs(name string) []*linter.Config {
+	return m.nameToLCs[name]
+}
+
+func (m *Manager) GetAllSupportedLinterConfigs() []*linter.Config {
+	return m.linters
 }
 
-func (m Manager) allPresetsSet() map[string]bool {
-	ret := map[string]bool{}
-	for _, p := range m.AllPresets() {
-		ret[p] = true
+func (m *Manager) GetAllLinterConfigsForPreset(p string) []*linter.Config {
+	var ret []*linter.Config
+	for _, lc := range m.linters {
+		if lc.IsDeprecated() {
+			continue
+		}
+
+		if slices.Contains(lc.InPresets, p) {
+			ret = append(ret, lc)
+		}
 	}
+
 	return ret
 }
 
-func (m Manager) GetLinterConfigs(name string) []*linter.Config {
-	return m.nameToLCs[name]
+func (m *Manager) GetEnabledLintersMap() (map[string]*linter.Config, error) {
+	enabledLinters := m.build(m.GetAllEnabledByDefaultLinters())
+
+	if os.Getenv(logutils.EnvTestRun) == "1" {
+		m.verbosePrintLintersStatus(enabledLinters)
+	}
+
+	return enabledLinters, nil
 }
 
-//nolint:funlen
-func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config {
-	var (
-		asasalintCfg        *config.AsasalintSettings
-		bidichkCfg          *config.BiDiChkSettings
-		cyclopCfg           *config.Cyclop
-		decorderCfg         *config.DecorderSettings
-		depGuardCfg         *config.DepGuardSettings
-		dogsledCfg          *config.DogsledSettings
-		duplCfg             *config.DuplSettings
-		dupwordCfg          *config.DupWordSettings
-		errcheckCfg         *config.ErrcheckSettings
-		errchkjsonCfg       *config.ErrChkJSONSettings
-		errorlintCfg        *config.ErrorLintSettings
-		exhaustiveCfg       *config.ExhaustiveSettings
-		exhaustiveStructCfg *config.ExhaustiveStructSettings
-		exhaustructCfg      *config.ExhaustructSettings
-		forbidigoCfg        *config.ForbidigoSettings
-		funlenCfg           *config.FunlenSettings
-		gciCfg              *config.GciSettings
-		ginkgolinterCfg     *config.GinkgoLinterSettings
-		gocognitCfg         *config.GocognitSettings
-		goconstCfg          *config.GoConstSettings
-		gocriticCfg         *config.GoCriticSettings
-		gocycloCfg          *config.GoCycloSettings
-		godotCfg            *config.GodotSettings
-		godoxCfg            *config.GodoxSettings
-		gofmtCfg            *config.GoFmtSettings
-		gofumptCfg          *config.GofumptSettings
-		goheaderCfg         *config.GoHeaderSettings
-		goimportsCfg        *config.GoImportsSettings
-		golintCfg           *config.GoLintSettings
-		goMndCfg            *config.GoMndSettings
-		goModDirectivesCfg  *config.GoModDirectivesSettings
-		gomodguardCfg       *config.GoModGuardSettings
-		gosecCfg            *config.GoSecSettings
-		gosimpleCfg         *config.StaticCheckSettings
-		gosmopolitanCfg     *config.GosmopolitanSettings
-		govetCfg            *config.GovetSettings
-		grouperCfg          *config.GrouperSettings
-		ifshortCfg          *config.IfshortSettings
-		importAsCfg         *config.ImportAsSettings
-		interfaceBloatCfg   *config.InterfaceBloatSettings
-		ireturnCfg          *config.IreturnSettings
-		lllCfg              *config.LllSettings
-		loggerCheckCfg      *config.LoggerCheckSettings
-		maintIdxCfg         *config.MaintIdxSettings
-		makezeroCfg         *config.MakezeroSettings
-		malignedCfg         *config.MalignedSettings
-		misspellCfg         *config.MisspellSettings
-		musttagCfg          *config.MustTagSettings
-		nakedretCfg         *config.NakedretSettings
-		nestifCfg           *config.NestifSettings
-		nilNilCfg           *config.NilNilSettings
-		nlreturnCfg         *config.NlreturnSettings
-		noLintLintCfg       *config.NoLintLintSettings
-		noNamedReturnsCfg   *config.NoNamedReturnsSettings
-		parallelTestCfg     *config.ParallelTestSettings
-		preallocCfg         *config.PreallocSettings
-		predeclaredCfg      *config.PredeclaredSettings
-		promlinterCfg       *config.PromlinterSettings
-		reassignCfg         *config.ReassignSettings
-		reviveCfg           *config.ReviveSettings
-		rowserrcheckCfg     *config.RowsErrCheckSettings
-		staticcheckCfg      *config.StaticCheckSettings
-		structcheckCfg      *config.StructCheckSettings
-		stylecheckCfg       *config.StaticCheckSettings
-		tagalignCfg         *config.TagAlignSettings
-		tagliatelleCfg      *config.TagliatelleSettings
-		tenvCfg             *config.TenvSettings
-		testpackageCfg      *config.TestpackageSettings
-		thelperCfg          *config.ThelperSettings
-		unparamCfg          *config.UnparamSettings
-		unusedCfg           *config.StaticCheckSettings
-		usestdlibvars       *config.UseStdlibVarsSettings
-		varcheckCfg         *config.VarCheckSettings
-		varnamelenCfg       *config.VarnamelenSettings
-		whitespaceCfg       *config.WhitespaceSettings
-		wrapcheckCfg        *config.WrapcheckSettings
-		wslCfg              *config.WSLSettings
-	)
-
-	if m.cfg != nil {
-		asasalintCfg = &m.cfg.LintersSettings.Asasalint
-		bidichkCfg = &m.cfg.LintersSettings.BiDiChk
-		cyclopCfg = &m.cfg.LintersSettings.Cyclop
-		decorderCfg = &m.cfg.LintersSettings.Decorder
-		depGuardCfg = &m.cfg.LintersSettings.Depguard
-		dogsledCfg = &m.cfg.LintersSettings.Dogsled
-		duplCfg = &m.cfg.LintersSettings.Dupl
-		dupwordCfg = &m.cfg.LintersSettings.DupWord
-		errcheckCfg = &m.cfg.LintersSettings.Errcheck
-		errchkjsonCfg = &m.cfg.LintersSettings.ErrChkJSON
-		errorlintCfg = &m.cfg.LintersSettings.ErrorLint
-		exhaustiveCfg = &m.cfg.LintersSettings.Exhaustive
-		exhaustiveStructCfg = &m.cfg.LintersSettings.ExhaustiveStruct
-		exhaustructCfg = &m.cfg.LintersSettings.Exhaustruct
-		forbidigoCfg = &m.cfg.LintersSettings.Forbidigo
-		funlenCfg = &m.cfg.LintersSettings.Funlen
-		gciCfg = &m.cfg.LintersSettings.Gci
-		ginkgolinterCfg = &m.cfg.LintersSettings.GinkgoLinter
-		gocognitCfg = &m.cfg.LintersSettings.Gocognit
-		goconstCfg = &m.cfg.LintersSettings.Goconst
-		gocriticCfg = &m.cfg.LintersSettings.Gocritic
-		gocycloCfg = &m.cfg.LintersSettings.Gocyclo
-		godotCfg = &m.cfg.LintersSettings.Godot
-		godoxCfg = &m.cfg.LintersSettings.Godox
-		gofmtCfg = &m.cfg.LintersSettings.Gofmt
-		gofumptCfg = &m.cfg.LintersSettings.Gofumpt
-		goheaderCfg = &m.cfg.LintersSettings.Goheader
-		goimportsCfg = &m.cfg.LintersSettings.Goimports
-		golintCfg = &m.cfg.LintersSettings.Golint
-		goMndCfg = &m.cfg.LintersSettings.Gomnd
-		goModDirectivesCfg = &m.cfg.LintersSettings.GoModDirectives
-		gomodguardCfg = &m.cfg.LintersSettings.Gomodguard
-		gosecCfg = &m.cfg.LintersSettings.Gosec
-		gosimpleCfg = &m.cfg.LintersSettings.Gosimple
-		gosmopolitanCfg = &m.cfg.LintersSettings.Gosmopolitan
-		govetCfg = &m.cfg.LintersSettings.Govet
-		grouperCfg = &m.cfg.LintersSettings.Grouper
-		ifshortCfg = &m.cfg.LintersSettings.Ifshort
-		importAsCfg = &m.cfg.LintersSettings.ImportAs
-		interfaceBloatCfg = &m.cfg.LintersSettings.InterfaceBloat
-		ireturnCfg = &m.cfg.LintersSettings.Ireturn
-		lllCfg = &m.cfg.LintersSettings.Lll
-		loggerCheckCfg = &m.cfg.LintersSettings.LoggerCheck
-		maintIdxCfg = &m.cfg.LintersSettings.MaintIdx
-		makezeroCfg = &m.cfg.LintersSettings.Makezero
-		malignedCfg = &m.cfg.LintersSettings.Maligned
-		misspellCfg = &m.cfg.LintersSettings.Misspell
-		musttagCfg = &m.cfg.LintersSettings.MustTag
-		nakedretCfg = &m.cfg.LintersSettings.Nakedret
-		nestifCfg = &m.cfg.LintersSettings.Nestif
-		nilNilCfg = &m.cfg.LintersSettings.NilNil
-		nlreturnCfg = &m.cfg.LintersSettings.Nlreturn
-		noLintLintCfg = &m.cfg.LintersSettings.NoLintLint
-		noNamedReturnsCfg = &m.cfg.LintersSettings.NoNamedReturns
-		preallocCfg = &m.cfg.LintersSettings.Prealloc
-		parallelTestCfg = &m.cfg.LintersSettings.ParallelTest
-		predeclaredCfg = &m.cfg.LintersSettings.Predeclared
-		promlinterCfg = &m.cfg.LintersSettings.Promlinter
-		reassignCfg = &m.cfg.LintersSettings.Reassign
-		reviveCfg = &m.cfg.LintersSettings.Revive
-		rowserrcheckCfg = &m.cfg.LintersSettings.RowsErrCheck
-		staticcheckCfg = &m.cfg.LintersSettings.Staticcheck
-		structcheckCfg = &m.cfg.LintersSettings.Structcheck
-		stylecheckCfg = &m.cfg.LintersSettings.Stylecheck
-		tagalignCfg = &m.cfg.LintersSettings.TagAlign
-		tagliatelleCfg = &m.cfg.LintersSettings.Tagliatelle
-		tenvCfg = &m.cfg.LintersSettings.Tenv
-		testpackageCfg = &m.cfg.LintersSettings.Testpackage
-		thelperCfg = &m.cfg.LintersSettings.Thelper
-		unparamCfg = &m.cfg.LintersSettings.Unparam
-		unusedCfg = new(config.StaticCheckSettings)
-		usestdlibvars = &m.cfg.LintersSettings.UseStdlibVars
-		varcheckCfg = &m.cfg.LintersSettings.Varcheck
-		varnamelenCfg = &m.cfg.LintersSettings.Varnamelen
-		whitespaceCfg = &m.cfg.LintersSettings.Whitespace
-		wrapcheckCfg = &m.cfg.LintersSettings.Wrapcheck
-		wslCfg = &m.cfg.LintersSettings.WSL
-
-		if govetCfg != nil {
-			govetCfg.Go = m.cfg.Run.Go
-		}
+// GetOptimizedLinters returns enabled linters after optimization (merging) of multiple linters into a fewer number of linters.
+// E.g. some go/analysis linters can be optimized into one metalinter for data reuse and speed up.
+func (m *Manager) GetOptimizedLinters() ([]*linter.Config, error) {
+	resultLintersSet := m.build(m.GetAllEnabledByDefaultLinters())
+	m.verbosePrintLintersStatus(resultLintersSet)
 
-		if gocriticCfg != nil {
-			gocriticCfg.Go = trimGoVersion(m.cfg.Run.Go)
-		}
+	m.combineGoAnalysisLinters(resultLintersSet)
 
-		if gofumptCfg != nil && gofumptCfg.LangVersion == "" {
-			gofumptCfg.LangVersion = m.cfg.Run.Go
-		}
+	resultLinters := maps.Values(resultLintersSet)
 
-		if staticcheckCfg != nil && staticcheckCfg.GoVersion == "" {
-			staticcheckCfg.GoVersion = trimGoVersion(m.cfg.Run.Go)
-		}
-		if gosimpleCfg != nil && gosimpleCfg.GoVersion == "" {
-			gosimpleCfg.GoVersion = trimGoVersion(m.cfg.Run.Go)
+	// Make order of execution of linters (go/analysis metalinter and unused) stable.
+	sort.Slice(resultLinters, func(i, j int) bool {
+		a, b := resultLinters[i], resultLinters[j]
+
+		if b.Name() == linter.LastLinter {
+			return true
 		}
-		if stylecheckCfg != nil && stylecheckCfg.GoVersion != "" {
-			stylecheckCfg.GoVersion = trimGoVersion(m.cfg.Run.Go)
+
+		if a.Name() == linter.LastLinter {
+			return false
 		}
-		if unusedCfg != nil && unusedCfg.GoVersion == "" {
-			unusedCfg.GoVersion = trimGoVersion(m.cfg.Run.Go)
+
+		if a.DoesChangeTypes != b.DoesChangeTypes {
+			return b.DoesChangeTypes // move type-changing linters to the end to optimize speed
 		}
-	}
+		return a.Name() < b.Name()
+	})
 
-	const megacheckName = "megacheck"
-
-	var linters []*linter.Config
-	linters = append(linters, m.customLinters...)
-
-	// The linters are sorted in the alphabetical order (case-insensitive).
-	// When a new linter is added the version in `WithSince(...)` must be the next minor version of golangci-lint.
-	linters = append(linters,
-		linter.NewConfig(golinters.NewAsasalint(asasalintCfg)).
-			WithSince("1.47.0").
-			WithPresets(linter.PresetBugs).
-			WithLoadForGoAnalysis().
-			WithURL("https://github.com/alingse/asasalint"),
-
-		linter.NewConfig(golinters.NewAsciicheck()).
-			WithSince("v1.26.0").
-			WithPresets(linter.PresetBugs, linter.PresetStyle).
-			WithURL("https://github.com/tdakkota/asciicheck"),
-
-		linter.NewConfig(golinters.NewBiDiChkFuncName(bidichkCfg)).
-			WithSince("1.43.0").
-			WithPresets(linter.PresetBugs).
-			WithURL("https://github.com/breml/bidichk"),
-
-		linter.NewConfig(golinters.NewBodyclose()).
-			WithSince("v1.18.0").
-			WithLoadForGoAnalysis().
-			WithPresets(linter.PresetPerformance, linter.PresetBugs).
-			WithURL("https://github.com/timakin/bodyclose"),
-
-		linter.NewConfig(golinters.NewContainedCtx()).
-			WithSince("1.44.0").
-			WithLoadForGoAnalysis().
-			WithPresets(linter.PresetStyle).
-			WithURL("https://github.com/sivchari/containedctx"),
-
-		linter.NewConfig(golinters.NewContextCheck()).
-			WithSince("v1.43.0").
-			WithPresets(linter.PresetBugs).
-			WithLoadForGoAnalysis().
-			WithURL("https://github.com/kkHAIKE/contextcheck"),
-
-		linter.NewConfig(golinters.NewCyclop(cyclopCfg)).
-			WithSince("v1.37.0").
-			WithLoadForGoAnalysis().
-			WithPresets(linter.PresetComplexity).
-			WithURL("https://github.com/bkielbasa/cyclop"),
-
-		linter.NewConfig(golinters.NewDecorder(decorderCfg)).
-			WithSince("v1.44.0").
-			WithPresets(linter.PresetFormatting, linter.PresetStyle).
-			WithURL("https://gitlab.com/bosi/decorder"),
-
-		linter.NewConfig(golinters.NewDeadcode()).
-			WithSince("v1.0.0").
-			WithLoadForGoAnalysis().
-			WithPresets(linter.PresetUnused).
-			WithURL("https://github.com/remyoudompheng/go-misc/tree/master/deadcode").
-			Deprecated("The owner seems to have abandoned the linter.", "v1.49.0", "unused"),
-
-		linter.NewConfig(golinters.NewDepguard(depGuardCfg)).
-			WithSince("v1.4.0").
-			WithPresets(linter.PresetStyle, linter.PresetImport, linter.PresetModule).
-			WithURL("https://github.com/OpenPeeDeeP/depguard"),
-
-		linter.NewConfig(golinters.NewDogsled(dogsledCfg)).
-			WithSince("v1.19.0").
-			WithPresets(linter.PresetStyle).
-			WithURL("https://github.com/alexkohler/dogsled"),
-
-		linter.NewConfig(golinters.NewDupl(duplCfg)).
-			WithSince("v1.0.0").
-			WithPresets(linter.PresetStyle).
-			WithURL("https://github.com/mibk/dupl"),
-
-		linter.NewConfig(golinters.NewDupWord(dupwordCfg)).
-			WithSince("1.50.0").
-			WithPresets(linter.PresetComment).
-			WithAutoFix().
-			WithURL("https://github.com/Abirdcfly/dupword"),
-
-		linter.NewConfig(golinters.NewDurationCheck()).
-			WithSince("v1.37.0").
-			WithPresets(linter.PresetBugs).
-			WithLoadForGoAnalysis().
-			WithURL("https://github.com/charithe/durationcheck"),
-
-		linter.NewConfig(golinters.NewErrcheck(errcheckCfg)).
-			WithEnabledByDefault().
-			WithSince("v1.0.0").
-			WithLoadForGoAnalysis().
-			WithPresets(linter.PresetBugs, linter.PresetError).
-			WithURL("https://github.com/kisielk/errcheck"),
-
-		linter.NewConfig(golinters.NewErrChkJSONFuncName(errchkjsonCfg)).
-			WithSince("1.44.0").
-			WithPresets(linter.PresetBugs).
-			WithLoadForGoAnalysis().
-			WithURL("https://github.com/breml/errchkjson"),
-
-		linter.NewConfig(golinters.NewErrName()).
-			WithSince("v1.42.0").
-			WithPresets(linter.PresetStyle).
-			WithLoadForGoAnalysis().
-			WithURL("https://github.com/Antonboom/errname"),
-
-		linter.NewConfig(golinters.NewErrorLint(errorlintCfg)).
-			WithSince("v1.32.0").
-			WithPresets(linter.PresetBugs, linter.PresetError).
-			WithLoadForGoAnalysis().
-			WithURL("https://github.com/polyfloyd/go-errorlint"),
-
-		linter.NewConfig(golinters.NewExecInQuery()).
-			WithSince("v1.46.0").
-			WithPresets(linter.PresetSQL).
-			WithLoadForGoAnalysis().
-			WithURL("https://github.com/lufeee/execinquery"),
-
-		linter.NewConfig(golinters.NewExhaustive(exhaustiveCfg)).
-			WithSince(" v1.28.0").
-			WithPresets(linter.PresetBugs).
-			WithLoadForGoAnalysis().
-			WithURL("https://github.com/nishanths/exhaustive"),
-
-		linter.NewConfig(golinters.NewExhaustiveStruct(exhaustiveStructCfg)).
-			WithSince("v1.32.0").
-			WithPresets(linter.PresetStyle, linter.PresetTest).
-			WithLoadForGoAnalysis().
-			WithURL("https://github.com/mbilski/exhaustivestruct").
-			Deprecated("The owner seems to have abandoned the linter.", "v1.46.0", "exhaustruct"),
-
-		linter.NewConfig(golinters.NewExhaustruct(exhaustructCfg)).
-			WithSince("v1.46.0").
-			WithPresets(linter.PresetStyle, linter.PresetTest).
-			WithLoadForGoAnalysis().
-			WithURL("https://github.com/GaijinEntertainment/go-exhaustruct"),
-
-		linter.NewConfig(golinters.NewExportLoopRef()).
-			WithSince("v1.28.0").
-			WithPresets(linter.PresetBugs).
-			WithLoadForGoAnalysis().
-			WithURL("https://github.com/kyoh86/exportloopref"),
-
-		linter.NewConfig(golinters.NewForbidigo(forbidigoCfg)).
-			WithSince("v1.34.0").
-			WithPresets(linter.PresetStyle).
-			// Strictly speaking,
-			// the additional information is only needed when forbidigoCfg.AnalyzeTypes is chosen by the user.
-			// But we don't know that here in all cases (sometimes config is not loaded),
-			// so we have to assume that it is needed to be on the safe side.
-			WithLoadForGoAnalysis().
-			WithURL("https://github.com/ashanbrown/forbidigo"),
-
-		linter.NewConfig(golinters.NewForceTypeAssert()).
-			WithSince("v1.38.0").
-			WithPresets(linter.PresetStyle).
-			WithURL("https://github.com/gostaticanalysis/forcetypeassert"),
-
-		linter.NewConfig(golinters.NewFunlen(funlenCfg)).
-			WithSince("v1.18.0").
-			WithPresets(linter.PresetComplexity).
-			WithURL("https://github.com/ultraware/funlen"),
-
-		linter.NewConfig(golinters.NewGci(gciCfg)).
-			WithSince("v1.30.0").
-			WithPresets(linter.PresetFormatting, linter.PresetImport).
-			WithURL("https://github.com/daixiang0/gci"),
-
-		linter.NewConfig(golinters.NewGinkgoLinter(ginkgolinterCfg)).
-			WithSince("v1.51.0").
-			WithLoadForGoAnalysis().
-			WithPresets(linter.PresetStyle).
-			WithURL("https://github.com/nunnatsa/ginkgolinter"),
-
-		linter.NewConfig(golinters.NewGoCheckCompilerDirectives()).
-			WithSince("v1.51.0").
-			WithPresets(linter.PresetBugs).
-			WithURL("https://github.com/leighmcculloch/gocheckcompilerdirectives"),
-
-		linter.NewConfig(golinters.NewGochecknoglobals()).
-			WithSince("v1.12.0").
-			WithPresets(linter.PresetStyle).
-			WithLoadForGoAnalysis().
-			WithURL("https://github.com/leighmcculloch/gochecknoglobals"),
-
-		linter.NewConfig(golinters.NewGochecknoinits()).
-			WithSince("v1.12.0").
-			WithPresets(linter.PresetStyle),
-
-		linter.NewConfig(golinters.NewGocognit(gocognitCfg)).
-			WithSince("v1.20.0").
-			WithPresets(linter.PresetComplexity).
-			WithURL("https://github.com/uudashr/gocognit"),
-
-		linter.NewConfig(golinters.NewGoconst(goconstCfg)).
-			WithSince("v1.0.0").
-			WithPresets(linter.PresetStyle).
-			WithURL("https://github.com/jgautheron/goconst"),
-
-		linter.NewConfig(golinters.NewGoCritic(gocriticCfg, m.cfg)).
-			WithSince("v1.12.0").
-			WithPresets(linter.PresetStyle, linter.PresetMetaLinter).
-			WithLoadForGoAnalysis().
-			WithURL("https://github.com/go-critic/go-critic"),
-
-		linter.NewConfig(golinters.NewGocyclo(gocycloCfg)).
-			WithSince("v1.0.0").
-			WithPresets(linter.PresetComplexity).
-			WithURL("https://github.com/fzipp/gocyclo"),
-
-		linter.NewConfig(golinters.NewGodot(godotCfg)).
-			WithSince("v1.25.0").
-			WithPresets(linter.PresetStyle, linter.PresetComment).
-			WithAutoFix().
-			WithURL("https://github.com/tetafro/godot"),
-
-		linter.NewConfig(golinters.NewGodox(godoxCfg)).
-			WithSince("v1.19.0").
-			WithPresets(linter.PresetStyle, linter.PresetComment).
-			WithURL("https://github.com/matoous/godox"),
-
-		linter.NewConfig(golinters.NewGoerr113()).
-			WithSince("v1.26.0").
-			WithPresets(linter.PresetStyle, linter.PresetError).
-			WithLoadForGoAnalysis().
-			WithURL("https://github.com/Djarvur/go-err113"),
-
-		linter.NewConfig(golinters.NewGofmt(gofmtCfg)).
-			WithSince("v1.0.0").
-			WithPresets(linter.PresetFormatting).
-			WithAutoFix().
-			WithURL("https://pkg.go.dev/cmd/gofmt"),
-
-		linter.NewConfig(golinters.NewGofumpt(gofumptCfg)).
-			WithSince("v1.28.0").
-			WithPresets(linter.PresetFormatting).
-			WithAutoFix().
-			WithURL("https://github.com/mvdan/gofumpt"),
-
-		linter.NewConfig(golinters.NewGoHeader(goheaderCfg)).
-			WithSince("v1.28.0").
-			WithPresets(linter.PresetStyle).
-			WithURL("https://github.com/denis-tingaikin/go-header"),
-
-		linter.NewConfig(golinters.NewGoimports(goimportsCfg)).
-			WithSince("v1.20.0").
-			WithPresets(linter.PresetFormatting, linter.PresetImport).
-			WithAutoFix().
-			WithURL("https://pkg.go.dev/golang.org/x/tools/cmd/goimports"),
-
-		linter.NewConfig(golinters.NewGolint(golintCfg)).
-			WithSince("v1.0.0").
-			WithLoadForGoAnalysis().
-			WithPresets(linter.PresetStyle).
-			WithURL("https://github.com/golang/lint").
-			Deprecated("The repository of the linter has been archived by the owner.", "v1.41.0", "revive"),
-
-		linter.NewConfig(golinters.NewGoMND(goMndCfg)).
-			WithSince("v1.22.0").
-			WithPresets(linter.PresetStyle).
-			WithURL("https://github.com/tommy-muehle/go-mnd"),
-
-		linter.NewConfig(golinters.NewGoModDirectives(goModDirectivesCfg)).
-			WithSince("v1.39.0").
-			WithPresets(linter.PresetStyle, linter.PresetModule).
-			WithURL("https://github.com/ldez/gomoddirectives"),
-
-		linter.NewConfig(golinters.NewGomodguard(gomodguardCfg)).
-			WithSince("v1.25.0").
-			WithPresets(linter.PresetStyle, linter.PresetImport, linter.PresetModule).
-			WithURL("https://github.com/ryancurrah/gomodguard"),
-
-		linter.NewConfig(golinters.NewGoPrintfFuncName()).
-			WithSince("v1.23.0").
-			WithPresets(linter.PresetStyle).
-			WithURL("https://github.com/jirfag/go-printf-func-name"),
-
-		linter.NewConfig(golinters.NewGosec(gosecCfg)).
-			WithSince("v1.0.0").
-			WithLoadForGoAnalysis().
-			WithPresets(linter.PresetBugs).
-			WithURL("https://github.com/securego/gosec").
-			WithAlternativeNames("gas"),
-
-		linter.NewConfig(golinters.NewGosimple(gosimpleCfg)).
-			WithEnabledByDefault().
-			WithSince("v1.20.0").
-			WithLoadForGoAnalysis().
-			WithPresets(linter.PresetStyle).
-			WithAlternativeNames(megacheckName).
-			WithURL("https://github.com/dominikh/go-tools/tree/master/simple"),
-
-		linter.NewConfig(golinters.NewGosmopolitan(gosmopolitanCfg)).
-			WithSince("v1.53.0").
-			WithLoadForGoAnalysis().
-			WithPresets(linter.PresetBugs).
-			WithURL("https://github.com/xen0n/gosmopolitan"),
-
-		linter.NewConfig(golinters.NewGovet(govetCfg)).
-			WithEnabledByDefault().
-			WithSince("v1.0.0").
-			WithLoadForGoAnalysis().
-			WithPresets(linter.PresetBugs, linter.PresetMetaLinter).
-			WithAlternativeNames("vet", "vetshadow").
-			WithURL("https://pkg.go.dev/cmd/vet"),
-
-		linter.NewConfig(golinters.NewGrouper(grouperCfg)).
-			WithSince("v1.44.0").
-			WithPresets(linter.PresetStyle).
-			WithURL("https://github.com/leonklingele/grouper"),
-
-		linter.NewConfig(golinters.NewIfshort(ifshortCfg)).
-			WithSince("v1.36.0").
-			WithPresets(linter.PresetStyle).
-			WithURL("https://github.com/esimonov/ifshort").
-			Deprecated("The repository of the linter has been deprecated by the owner.", "v1.48.0", ""),
-
-		linter.NewConfig(golinters.NewImportAs(importAsCfg)).
-			WithSince("v1.38.0").
-			WithPresets(linter.PresetStyle).
-			WithLoadForGoAnalysis().
-			WithURL("https://github.com/julz/importas"),
-
-		linter.NewConfig(golinters.NewIneffassign()).
-			WithEnabledByDefault().
-			WithSince("v1.0.0").
-			WithPresets(linter.PresetUnused).
-			WithURL("https://github.com/gordonklaus/ineffassign"),
-
-		linter.NewConfig(golinters.NewInterfaceBloat(interfaceBloatCfg)).
-			WithSince("v1.49.0").
-			WithPresets(linter.PresetStyle).
-			WithURL("https://github.com/sashamelentyev/interfacebloat"),
-
-		linter.NewConfig(golinters.NewInterfacer()).
-			WithSince("v1.0.0").
-			WithLoadForGoAnalysis().
-			WithPresets(linter.PresetStyle).
-			WithURL("https://github.com/mvdan/interfacer").
-			Deprecated("The repository of the linter has been archived by the owner.", "v1.38.0", ""),
-
-		linter.NewConfig(golinters.NewIreturn(ireturnCfg)).
-			WithSince("v1.43.0").
-			WithPresets(linter.PresetStyle).
-			WithLoadForGoAnalysis().
-			WithURL("https://github.com/butuzov/ireturn"),
-
-		linter.NewConfig(golinters.NewLLL(lllCfg)).
-			WithSince("v1.8.0").
-			WithPresets(linter.PresetStyle),
-
-		linter.NewConfig(golinters.NewLoggerCheck(loggerCheckCfg)).
-			WithSince("v1.49.0").
-			WithLoadForGoAnalysis().
-			WithPresets(linter.PresetStyle, linter.PresetBugs).
-			WithAlternativeNames("logrlint").
-			WithURL("https://github.com/timonwong/loggercheck"),
-
-		linter.NewConfig(golinters.NewMaintIdx(maintIdxCfg)).
-			WithSince("v1.44.0").
-			WithPresets(linter.PresetComplexity).
-			WithURL("https://github.com/yagipy/maintidx"),
-
-		linter.NewConfig(golinters.NewMakezero(makezeroCfg)).
-			WithSince("v1.34.0").
-			WithPresets(linter.PresetStyle, linter.PresetBugs).
-			WithLoadForGoAnalysis().
-			WithURL("https://github.com/ashanbrown/makezero"),
-
-		linter.NewConfig(golinters.NewMaligned(malignedCfg)).
-			WithSince("v1.0.0").
-			WithLoadForGoAnalysis().
-			WithPresets(linter.PresetPerformance).
-			WithURL("https://github.com/mdempsky/maligned").
-			Deprecated("The repository of the linter has been archived by the owner.", "v1.38.0", "govet 'fieldalignment'"),
-
-		linter.NewConfig(golinters.NewMirror()).
-			WithSince("v1.53.0").
-			WithPresets(linter.PresetStyle).
-			WithLoadForGoAnalysis().
-			WithURL("https://github.com/butuzov/mirror"),
-
-		linter.NewConfig(golinters.NewMisspell(misspellCfg)).
-			WithSince("v1.8.0").
-			WithPresets(linter.PresetStyle, linter.PresetComment).
-			WithAutoFix().
-			WithURL("https://github.com/client9/misspell"),
-
-		linter.NewConfig(golinters.NewMustTag(musttagCfg)).
-			WithSince("v1.51.0").
-			WithLoadForGoAnalysis().
-			WithPresets(linter.PresetStyle, linter.PresetBugs).
-			WithURL("https://github.com/tmzane/musttag"),
-
-		linter.NewConfig(golinters.NewNakedret(nakedretCfg)).
-			WithSince("v1.19.0").
-			WithPresets(linter.PresetStyle).
-			WithURL("https://github.com/alexkohler/nakedret"),
-
-		linter.NewConfig(golinters.NewNestif(nestifCfg)).
-			WithSince("v1.25.0").
-			WithPresets(linter.PresetComplexity).
-			WithURL("https://github.com/nakabonne/nestif"),
-
-		linter.NewConfig(golinters.NewNilErr()).
-			WithSince("v1.38.0").
-			WithLoadForGoAnalysis().
-			WithPresets(linter.PresetBugs).
-			WithURL("https://github.com/gostaticanalysis/nilerr"),
-
-		linter.NewConfig(golinters.NewNilNil(nilNilCfg)).
-			WithSince("v1.43.0").
-			WithPresets(linter.PresetStyle).
-			WithLoadForGoAnalysis().
-			WithURL("https://github.com/Antonboom/nilnil"),
-
-		linter.NewConfig(golinters.NewNLReturn(nlreturnCfg)).
-			WithSince("v1.30.0").
-			WithPresets(linter.PresetStyle).
-			WithURL("https://github.com/ssgreg/nlreturn"),
-
-		linter.NewConfig(golinters.NewNoctx()).
-			WithSince("v1.28.0").
-			WithLoadForGoAnalysis().
-			WithPresets(linter.PresetPerformance, linter.PresetBugs).
-			WithURL("https://github.com/sonatard/noctx"),
-
-		linter.NewConfig(golinters.NewNoNamedReturns(noNamedReturnsCfg)).
-			WithSince("v1.46.0").
-			WithLoadForGoAnalysis().
-			WithPresets(linter.PresetStyle).
-			WithURL("https://github.com/firefart/nonamedreturns"),
-
-		linter.NewConfig(golinters.NewNoSnakeCase()).
-			WithSince("v1.47.0").
-			WithPresets(linter.PresetStyle).
-			WithURL("https://github.com/sivchari/nosnakecase").
-			Deprecated("The repository of the linter has been deprecated by the owner.", "v1.48.1", "revive(var-naming)"),
-
-		linter.NewConfig(golinters.NewNoSprintfHostPort()).
-			WithSince("v1.46.0").
-			WithPresets(linter.PresetStyle).
-			WithURL("https://github.com/stbenjam/no-sprintf-host-port"),
-
-		linter.NewConfig(golinters.NewParallelTest(parallelTestCfg)).
-			WithSince("v1.33.0").
-			WithLoadForGoAnalysis().
-			WithPresets(linter.PresetStyle, linter.PresetTest).
-			WithURL("https://github.com/kunwardeep/paralleltest"),
-
-		linter.NewConfig(golinters.NewPreAlloc(preallocCfg)).
-			WithSince("v1.19.0").
-			WithPresets(linter.PresetPerformance).
-			WithURL("https://github.com/alexkohler/prealloc"),
-
-		linter.NewConfig(golinters.NewPredeclared(predeclaredCfg)).
-			WithSince("v1.35.0").
-			WithPresets(linter.PresetStyle).
-			WithURL("https://github.com/nishanths/predeclared"),
-
-		linter.NewConfig(golinters.NewPromlinter(promlinterCfg)).
-			WithSince("v1.40.0").
-			WithPresets(linter.PresetStyle).
-			WithURL("https://github.com/yeya24/promlinter"),
-
-		linter.NewConfig(golinters.NewReassign(reassignCfg)).
-			WithSince("1.49.0").
-			WithPresets(linter.PresetBugs).
-			WithLoadForGoAnalysis().
-			WithURL("https://github.com/curioswitch/go-reassign"),
-
-		linter.NewConfig(golinters.NewRevive(reviveCfg)).
-			WithSince("v1.37.0").
-			WithPresets(linter.PresetStyle, linter.PresetMetaLinter).
-			ConsiderSlow().
-			WithURL("https://github.com/mgechev/revive"),
-
-		linter.NewConfig(golinters.NewRowsErrCheck(rowserrcheckCfg)).
-			WithSince("v1.23.0").
-			WithLoadForGoAnalysis().
-			WithPresets(linter.PresetBugs, linter.PresetSQL).
-			WithURL("https://github.com/jingyugao/rowserrcheck"),
-
-		linter.NewConfig(golinters.NewScopelint()).
-			WithSince("v1.12.0").
-			WithPresets(linter.PresetBugs).
-			WithURL("https://github.com/kyoh86/scopelint").
-			Deprecated("The repository of the linter has been deprecated by the owner.", "v1.39.0", "exportloopref"),
-
-		linter.NewConfig(golinters.NewSQLCloseCheck()).
-			WithSince("v1.28.0").
-			WithPresets(linter.PresetBugs, linter.PresetSQL).
-			WithLoadForGoAnalysis().
-			WithURL("https://github.com/ryanrolds/sqlclosecheck"),
-
-		linter.NewConfig(golinters.NewStaticcheck(staticcheckCfg)).
-			WithEnabledByDefault().
-			WithSince("v1.0.0").
-			WithLoadForGoAnalysis().
-			WithPresets(linter.PresetBugs, linter.PresetMetaLinter).
-			WithAlternativeNames(megacheckName).
-			WithURL("https://staticcheck.io/"),
-
-		linter.NewConfig(golinters.NewStructcheck(structcheckCfg)).
-			WithSince("v1.0.0").
-			WithLoadForGoAnalysis().
-			WithPresets(linter.PresetUnused).
-			WithURL("https://github.com/opennota/check").
-			Deprecated("The owner seems to have abandoned the linter.", "v1.49.0", "unused"),
-
-		linter.NewConfig(golinters.NewStylecheck(stylecheckCfg)).
-			WithSince("v1.20.0").
-			WithLoadForGoAnalysis().
-			WithPresets(linter.PresetStyle).
-			WithURL("https://github.com/dominikh/go-tools/tree/master/stylecheck"),
-
-		linter.NewConfig(golinters.NewTagAlign(tagalignCfg)).
-			WithSince("v1.53.0").
-			WithPresets(linter.PresetStyle, linter.PresetFormatting).
-			WithAutoFix().
-			WithURL("https://github.com/4meepo/tagalign"),
-
-		linter.NewConfig(golinters.NewTagliatelle(tagliatelleCfg)).
-			WithSince("v1.40.0").
-			WithPresets(linter.PresetStyle).
-			WithURL("https://github.com/ldez/tagliatelle"),
-
-		linter.NewConfig(golinters.NewTenv(tenvCfg)).
-			WithSince("v1.43.0").
-			WithPresets(linter.PresetStyle).
-			WithLoadForGoAnalysis().
-			WithURL("https://github.com/sivchari/tenv"),
-
-		linter.NewConfig(golinters.NewTestableexamples()).
-			WithSince("v1.50.0").
-			WithPresets(linter.PresetTest).
-			WithURL("https://github.com/maratori/testableexamples"),
-
-		linter.NewConfig(golinters.NewTestpackage(testpackageCfg)).
-			WithSince("v1.25.0").
-			WithPresets(linter.PresetStyle, linter.PresetTest).
-			WithURL("https://github.com/maratori/testpackage"),
-
-		linter.NewConfig(golinters.NewThelper(thelperCfg)).
-			WithSince("v1.34.0").
-			WithPresets(linter.PresetStyle).
-			WithLoadForGoAnalysis().
-			WithURL("https://github.com/kulti/thelper"),
-
-		linter.NewConfig(golinters.NewTparallel()).
-			WithSince("v1.32.0").
-			WithPresets(linter.PresetStyle, linter.PresetTest).
-			WithLoadForGoAnalysis().
-			WithURL("https://github.com/moricho/tparallel"),
-
-		linter.NewConfig(golinters.NewTypecheck()).
-			WithInternal().
-			WithEnabledByDefault().
-			WithSince("v1.3.0").
-			WithLoadForGoAnalysis().
-			WithPresets(linter.PresetBugs).
-			WithURL(""),
-
-		linter.NewConfig(golinters.NewUnconvert()).
-			WithSince("v1.0.0").
-			WithLoadForGoAnalysis().
-			WithPresets(linter.PresetStyle).
-			WithURL("https://github.com/mdempsky/unconvert"),
-
-		linter.NewConfig(golinters.NewUnparam(unparamCfg)).
-			WithSince("v1.9.0").
-			WithPresets(linter.PresetUnused).
-			WithLoadForGoAnalysis().
-			WithURL("https://github.com/mvdan/unparam"),
-
-		linter.NewConfig(golinters.NewUnused(unusedCfg)).
-			WithEnabledByDefault().
-			WithSince("v1.20.0").
-			WithLoadForGoAnalysis().
-			WithPresets(linter.PresetUnused).
-			WithAlternativeNames(megacheckName).
-			ConsiderSlow().
-			WithChangeTypes().
-			WithURL("https://github.com/dominikh/go-tools/tree/master/unused"),
-
-		linter.NewConfig(golinters.NewUseStdlibVars(usestdlibvars)).
-			WithSince("v1.48.0").
-			WithPresets(linter.PresetStyle).
-			WithURL("https://github.com/sashamelentyev/usestdlibvars"),
-
-		linter.NewConfig(golinters.NewVarcheck(varcheckCfg)).
-			WithSince("v1.0.0").
-			WithLoadForGoAnalysis().
-			WithPresets(linter.PresetUnused).
-			WithURL("https://github.com/opennota/check").
-			Deprecated("The owner seems to have abandoned the linter.", "v1.49.0", "unused"),
-
-		linter.NewConfig(golinters.NewVarnamelen(varnamelenCfg)).
-			WithSince("v1.43.0").
-			WithPresets(linter.PresetStyle).
-			WithLoadForGoAnalysis().
-			WithURL("https://github.com/blizzy78/varnamelen"),
-
-		linter.NewConfig(golinters.NewWastedAssign()).
-			WithSince("v1.38.0").
-			WithPresets(linter.PresetStyle).
-			WithLoadForGoAnalysis().
-			WithURL("https://github.com/sanposhiho/wastedassign"),
-
-		linter.NewConfig(golinters.NewWhitespace(whitespaceCfg)).
-			WithSince("v1.19.0").
-			WithPresets(linter.PresetStyle).
-			WithAutoFix().
-			WithURL("https://github.com/ultraware/whitespace"),
-
-		linter.NewConfig(golinters.NewWrapcheck(wrapcheckCfg)).
-			WithSince("v1.32.0").
-			WithPresets(linter.PresetStyle, linter.PresetError).
-			WithLoadForGoAnalysis().
-			WithURL("https://github.com/tomarrell/wrapcheck"),
-
-		linter.NewConfig(golinters.NewWSL(wslCfg)).
-			WithSince("v1.20.0").
-			WithPresets(linter.PresetStyle).
-			WithURL("https://github.com/bombsimon/wsl"),
-
-		linter.NewConfig(golinters.NewZerologLint()).
-			WithSince("v1.53.0").
-			WithPresets(linter.PresetBugs).
-			WithLoadForGoAnalysis().
-			WithURL("https://github.com/ykadowak/zerologlint"),
-
-		// nolintlint must be last because it looks at the results of all the previous linters for unused nolint directives
-		linter.NewConfig(golinters.NewNoLintLint(noLintLintCfg)).
-			WithSince("v1.26.0").
-			WithPresets(linter.PresetStyle).
-			WithURL("https://github.com/golangci/golangci-lint/blob/master/pkg/golinters/nolintlint/README.md"),
-	)
-
-	return linters
+	return resultLinters, nil
 }
 
-func (m Manager) GetAllEnabledByDefaultLinters() []*linter.Config {
+func (m *Manager) GetAllEnabledByDefaultLinters() []*linter.Config {
 	var ret []*linter.Config
-	for _, lc := range m.GetAllSupportedLinterConfigs() {
+	for _, lc := range m.linters {
 		if lc.EnabledByDefault {
 			ret = append(ret, lc)
 		}
@@ -903,48 +143,172 @@ func (m Manager) GetAllEnabledByDefaultLinters() []*linter.Config {
 	return ret
 }
 
-func linterConfigsToMap(lcs []*linter.Config) map[string]*linter.Config {
-	ret := map[string]*linter.Config{}
-	for _, lc := range lcs {
-		lc := lc // local copy
-		ret[lc.Name()] = lc
+//nolint:gocyclo // the complexity cannot be reduced.
+func (m *Manager) build(enabledByDefaultLinters []*linter.Config) map[string]*linter.Config {
+	m.debugf("Linters config: %#v", m.cfg.Linters)
+
+	resultLintersSet := map[string]*linter.Config{}
+	switch {
+	case m.cfg.Linters.DisableAll:
+		// no default linters
+	case len(m.cfg.Linters.Presets) != 0:
+		// imply --disable-all
+	case m.cfg.Linters.EnableAll:
+		resultLintersSet = linterConfigsToMap(m.linters)
+	default:
+		resultLintersSet = linterConfigsToMap(enabledByDefaultLinters)
 	}
 
-	return ret
+	// --presets can only add linters to default set
+	for _, p := range m.cfg.Linters.Presets {
+		for _, lc := range m.GetAllLinterConfigsForPreset(p) {
+			lc := lc
+			resultLintersSet[lc.Name()] = lc
+		}
+	}
+
+	// --fast removes slow linters from current set.
+	// It should be after --presets to be able to run only fast linters in preset.
+	// It should be before --enable and --disable to be able to enable or disable specific linter.
+	if m.cfg.Linters.Fast {
+		for name, lc := range resultLintersSet {
+			if lc.IsSlowLinter() {
+				delete(resultLintersSet, name)
+			}
+		}
+	}
+
+	for _, name := range m.cfg.Linters.Enable {
+		for _, lc := range m.GetLinterConfigs(name) {
+			// it's important to use lc.Name() nor name because name can be alias
+			resultLintersSet[lc.Name()] = lc
+		}
+	}
+
+	for _, name := range m.cfg.Linters.Disable {
+		for _, lc := range m.GetLinterConfigs(name) {
+			// it's important to use lc.Name() nor name because name can be alias
+			delete(resultLintersSet, lc.Name())
+		}
+	}
+
+	// typecheck is not a real linter and cannot be disabled.
+	if _, ok := resultLintersSet["typecheck"]; !ok && (m.cfg == nil || !m.cfg.InternalCmdTest) {
+		for _, lc := range m.GetLinterConfigs("typecheck") {
+			// it's important to use lc.Name() nor name because name can be alias
+			resultLintersSet[lc.Name()] = lc
+		}
+	}
+
+	return resultLintersSet
 }
 
-func (m Manager) GetAllLinterConfigsForPreset(p string) []*linter.Config {
-	var ret []*linter.Config
-	for _, lc := range m.GetAllSupportedLinterConfigs() {
-		if lc.IsDeprecated() {
+func (m *Manager) combineGoAnalysisLinters(linters map[string]*linter.Config) {
+	mlConfig := &linter.Config{}
+
+	var goanalysisLinters []*goanalysis.Linter
+
+	for _, lc := range linters {
+		lnt, ok := lc.Linter.(*goanalysis.Linter)
+		if !ok {
 			continue
 		}
 
-		for _, ip := range lc.InPresets {
-			if p == ip {
-				ret = append(ret, lc)
-				break
-			}
+		if lnt.LoadMode() == goanalysis.LoadModeWholeProgram {
+			// It's ineffective by CPU and memory to run whole-program and incremental analyzers at once.
+			continue
+		}
+
+		mlConfig.LoadMode |= lc.LoadMode
+
+		if lc.IsSlowLinter() {
+			mlConfig.ConsiderSlow()
 		}
+
+		mlConfig.InPresets = append(mlConfig.InPresets, lc.InPresets...)
+
+		goanalysisLinters = append(goanalysisLinters, lnt)
 	}
 
-	return ret
+	if len(goanalysisLinters) <= 1 {
+		m.debugf("Didn't combine go/analysis linters: got only %d linters", len(goanalysisLinters))
+		return
+	}
+
+	for _, lnt := range goanalysisLinters {
+		delete(linters, lnt.Name())
+	}
+
+	// Make order of execution of go/analysis analyzers stable.
+	sort.Slice(goanalysisLinters, func(i, j int) bool {
+		a, b := goanalysisLinters[i], goanalysisLinters[j]
+
+		if b.Name() == linter.LastLinter {
+			return true
+		}
+
+		if a.Name() == linter.LastLinter {
+			return false
+		}
+
+		return a.Name() <= b.Name()
+	})
+
+	mlConfig.Linter = goanalysis.NewMetaLinter(goanalysisLinters)
+
+	sort.Strings(mlConfig.InPresets)
+	mlConfig.InPresets = slices.Compact(mlConfig.InPresets)
+
+	linters[mlConfig.Linter.Name()] = mlConfig
+
+	m.debugf("Combined %d go/analysis linters into one metalinter", len(goanalysisLinters))
+}
+
+func (m *Manager) verbosePrintLintersStatus(lcs map[string]*linter.Config) {
+	var linterNames []string
+	for _, lc := range lcs {
+		if lc.Internal {
+			continue
+		}
+
+		linterNames = append(linterNames, lc.Name())
+	}
+	sort.Strings(linterNames)
+	m.log.Infof("Active %d linters: %s", len(linterNames), linterNames)
+
+	if len(m.cfg.Linters.Presets) != 0 {
+		sort.Strings(m.cfg.Linters.Presets)
+		m.log.Infof("Active presets: %s", m.cfg.Linters.Presets)
+	}
 }
 
-// Trims the Go version to keep only M.m.
-// Since Go 1.21 the version inside the go.mod can be a patched version (ex: 1.21.0).
-// https://go.dev/doc/toolchain#versions
-// This a problem with staticcheck and gocritic.
-func trimGoVersion(v string) string {
-	if v == "" {
-		return ""
+func AllPresets() []string {
+	return []string{
+		linter.PresetBugs,
+		linter.PresetComment,
+		linter.PresetComplexity,
+		linter.PresetError,
+		linter.PresetFormatting,
+		linter.PresetImport,
+		linter.PresetMetaLinter,
+		linter.PresetModule,
+		linter.PresetPerformance,
+		linter.PresetSQL,
+		linter.PresetStyle,
+		linter.PresetTest,
+		linter.PresetUnused,
 	}
+}
 
-	exp := regexp.MustCompile(`(\d\.\d+)\.\d+`)
+func linterConfigsToMap(lcs []*linter.Config) map[string]*linter.Config {
+	ret := map[string]*linter.Config{}
+	for _, lc := range lcs {
+		if lc.IsDeprecated() && lc.Deprecation.Level > linter.DeprecationWarning {
+			continue
+		}
 
-	if exp.MatchString(v) {
-		return exp.FindStringSubmatch(v)[1]
+		ret[lc.Name()] = lc
 	}
 
-	return v
+	return ret
 }
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/validator.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/validator.go
index 52a70d85900d8e84c040841a7e91e7aa785e1bd4..079d8198fa3c606009b7d779ff40c86390a021c5 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/validator.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/validator.go
@@ -3,9 +3,12 @@ package lintersdb
 import (
 	"errors"
 	"fmt"
+	"os"
+	"slices"
 	"strings"
 
 	"github.com/golangci/golangci-lint/pkg/config"
+	"github.com/golangci/golangci-lint/pkg/logutils"
 )
 
 type Validator struct {
@@ -13,13 +16,29 @@ type Validator struct {
 }
 
 func NewValidator(m *Manager) *Validator {
-	return &Validator{
-		m: m,
+	return &Validator{m: m}
+}
+
+// Validate validates the configuration by calling all other validators for different
+// sections in the configuration and then some additional linter validation functions.
+func (v Validator) Validate(cfg *config.Config) error {
+	validators := []func(cfg *config.Linters) error{
+		v.validateLintersNames,
+		v.validatePresets,
+		v.alternativeNamesDeprecation,
+	}
+
+	for _, v := range validators {
+		if err := v(&cfg.Linters); err != nil {
+			return err
+		}
 	}
+
+	return nil
 }
 
 func (v Validator) validateLintersNames(cfg *config.Linters) error {
-	allNames := append([]string{}, cfg.Enable...)
+	allNames := cfg.Enable
 	allNames = append(allNames, cfg.Disable...)
 
 	var unknownNames []string
@@ -38,12 +57,13 @@ func (v Validator) validateLintersNames(cfg *config.Linters) error {
 	return nil
 }
 
-func (v Validator) validatePresets(cfg *config.Linters) error {
-	allPresets := v.m.allPresetsSet()
+func (Validator) validatePresets(cfg *config.Linters) error {
+	presets := AllPresets()
+
 	for _, p := range cfg.Presets {
-		if !allPresets[p] {
+		if !slices.Contains(presets, p) {
 			return fmt.Errorf("no such preset %q: only next presets exist: (%s)",
-				p, strings.Join(v.m.AllPresets(), "|"))
+				p, strings.Join(presets, "|"))
 		}
 	}
 
@@ -54,53 +74,31 @@ func (v Validator) validatePresets(cfg *config.Linters) error {
 	return nil
 }
 
-func (v Validator) validateAllDisableEnableOptions(cfg *config.Linters) error {
-	if cfg.EnableAll && cfg.DisableAll {
-		return errors.New("--enable-all and --disable-all options must not be combined")
+func (v Validator) alternativeNamesDeprecation(cfg *config.Linters) error {
+	if v.m.cfg.InternalTest || v.m.cfg.InternalCmdTest || os.Getenv(logutils.EnvTestRun) == "1" {
+		return nil
 	}
 
-	if cfg.DisableAll {
-		if len(cfg.Enable) == 0 && len(cfg.Presets) == 0 {
-			return errors.New("all linters were disabled, but no one linter was enabled: must enable at least one")
-		}
-
-		if len(cfg.Disable) != 0 {
-			return fmt.Errorf("can't combine options --disable-all and --disable %s", cfg.Disable[0])
+	altNames := map[string][]string{}
+	for _, lc := range v.m.GetAllSupportedLinterConfigs() {
+		for _, alt := range lc.AlternativeNames {
+			altNames[alt] = append(altNames[alt], lc.Name())
 		}
 	}
 
-	if cfg.EnableAll && len(cfg.Enable) != 0 && !cfg.Fast {
-		return fmt.Errorf("can't combine options --enable-all and --enable %s", cfg.Enable[0])
-	}
+	names := cfg.Enable
+	names = append(names, cfg.Disable...)
 
-	return nil
-}
-
-func (v Validator) validateDisabledAndEnabledAtOneMoment(cfg *config.Linters) error {
-	enabledLintersSet := map[string]bool{}
-	for _, name := range cfg.Enable {
-		enabledLintersSet[name] = true
-	}
-
-	for _, name := range cfg.Disable {
-		if enabledLintersSet[name] {
-			return fmt.Errorf("linter %q can't be disabled and enabled at one moment", name)
+	for _, name := range names {
+		lc, ok := altNames[name]
+		if !ok {
+			continue
 		}
-	}
 
-	return nil
-}
-
-func (v Validator) validateEnabledDisabledLintersConfig(cfg *config.Linters) error {
-	validators := []func(cfg *config.Linters) error{
-		v.validateLintersNames,
-		v.validatePresets,
-		v.validateAllDisableEnableOptions,
-		v.validateDisabledAndEnabledAtOneMoment,
-	}
-	for _, v := range validators {
-		if err := v(cfg); err != nil {
-			return err
+		if len(lc) > 1 {
+			v.m.log.Warnf("The linter named %q is deprecated. It has been split into: %s.", name, strings.Join(lc, ", "))
+		} else {
+			v.m.log.Warnf("The name %q is deprecated. The linter has been renamed to: %s.", name, lc[0])
 		}
 	}
 
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/load.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/package.go
similarity index 52%
rename from vendor/github.com/golangci/golangci-lint/pkg/lint/load.go
rename to vendor/github.com/golangci/golangci-lint/pkg/lint/package.go
index babad5ba6074609fea9816c2e6acdc55198cc7f2..c314166cae372bc53ea6c1114808e84b46edb026 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/lint/load.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/package.go
@@ -13,151 +13,99 @@ import (
 
 	"golang.org/x/tools/go/packages"
 
-	"github.com/golangci/golangci-lint/internal/pkgcache"
 	"github.com/golangci/golangci-lint/pkg/config"
 	"github.com/golangci/golangci-lint/pkg/exitcodes"
-	"github.com/golangci/golangci-lint/pkg/fsutils"
-	"github.com/golangci/golangci-lint/pkg/golinters/goanalysis/load"
+	"github.com/golangci/golangci-lint/pkg/goanalysis/load"
 	"github.com/golangci/golangci-lint/pkg/goutil"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 	"github.com/golangci/golangci-lint/pkg/logutils"
 )
 
-type ContextLoader struct {
-	cfg         *config.Config
-	log         logutils.Log
-	debugf      logutils.DebugFunc
-	goenv       *goutil.Env
+// PackageLoader loads packages based on [golang.org/x/tools/go/packages.Load].
+type PackageLoader struct {
+	log    logutils.Log
+	debugf logutils.DebugFunc
+
+	cfg *config.Config
+
+	args []string
+
 	pkgTestIDRe *regexp.Regexp
-	lineCache   *fsutils.LineCache
-	fileCache   *fsutils.FileCache
-	pkgCache    *pkgcache.Cache
-	loadGuard   *load.Guard
+
+	goenv *goutil.Env
+
+	loadGuard *load.Guard
 }
 
-func NewContextLoader(cfg *config.Config, log logutils.Log, goenv *goutil.Env,
-	lineCache *fsutils.LineCache, fileCache *fsutils.FileCache, pkgCache *pkgcache.Cache, loadGuard *load.Guard) *ContextLoader {
-	return &ContextLoader{
+// NewPackageLoader creates a new PackageLoader.
+func NewPackageLoader(log logutils.Log, cfg *config.Config, args []string, goenv *goutil.Env, loadGuard *load.Guard) *PackageLoader {
+	return &PackageLoader{
 		cfg:         cfg,
+		args:        args,
 		log:         log,
 		debugf:      logutils.Debug(logutils.DebugKeyLoader),
 		goenv:       goenv,
 		pkgTestIDRe: regexp.MustCompile(`^(.*) \[(.*)\.test\]`),
-		lineCache:   lineCache,
-		fileCache:   fileCache,
-		pkgCache:    pkgCache,
 		loadGuard:   loadGuard,
 	}
 }
 
-func (cl *ContextLoader) prepareBuildContext() {
-	// Set GOROOT to have working cross-compilation: cross-compiled binaries
-	// have invalid GOROOT. XXX: can't use runtime.GOROOT().
-	goroot := cl.goenv.Get(goutil.EnvGoRoot)
-	if goroot == "" {
-		return
-	}
-
-	os.Setenv(string(goutil.EnvGoRoot), goroot)
-	build.Default.GOROOT = goroot
-	build.Default.BuildTags = cl.cfg.Run.BuildTags
-}
+// Load loads packages.
+func (l *PackageLoader) Load(ctx context.Context, linters []*linter.Config) (pkgs, deduplicatedPkgs []*packages.Package, err error) {
+	loadMode := findLoadMode(linters)
 
-func (cl *ContextLoader) findLoadMode(linters []*linter.Config) packages.LoadMode {
-	loadMode := packages.LoadMode(0)
-	for _, lc := range linters {
-		loadMode |= lc.LoadMode
+	pkgs, err = l.loadPackages(ctx, loadMode)
+	if err != nil {
+		return nil, nil, fmt.Errorf("failed to load packages: %w", err)
 	}
 
-	return loadMode
+	return pkgs, l.filterDuplicatePackages(pkgs), nil
 }
 
-func (cl *ContextLoader) buildArgs() []string {
-	args := cl.cfg.Run.Args
-	if len(args) == 0 {
-		return []string{"./..."}
-	}
-
-	var retArgs []string
-	for _, arg := range args {
-		if strings.HasPrefix(arg, ".") || filepath.IsAbs(arg) {
-			retArgs = append(retArgs, arg)
-		} else {
-			// go/packages doesn't work well if we don't have the prefix ./ for local packages
-			retArgs = append(retArgs, fmt.Sprintf(".%c%s", filepath.Separator, arg))
-		}
-	}
-
-	return retArgs
-}
+func (l *PackageLoader) loadPackages(ctx context.Context, loadMode packages.LoadMode) ([]*packages.Package, error) {
+	defer func(startedAt time.Time) {
+		l.log.Infof("Go packages loading at mode %s took %s", stringifyLoadMode(loadMode), time.Since(startedAt))
+	}(time.Now())
 
-func (cl *ContextLoader) makeBuildFlags() ([]string, error) {
-	var buildFlags []string
+	l.prepareBuildContext()
 
-	if len(cl.cfg.Run.BuildTags) != 0 {
-		// go help build
-		buildFlags = append(buildFlags, "-tags", strings.Join(cl.cfg.Run.BuildTags, " "))
-		cl.log.Infof("Using build tags: %v", cl.cfg.Run.BuildTags)
+	conf := &packages.Config{
+		Mode:       loadMode,
+		Tests:      l.cfg.Run.AnalyzeTests,
+		Context:    ctx,
+		BuildFlags: l.makeBuildFlags(),
+		Logf:       l.debugf,
+		// TODO: use fset, parsefile, overlay
 	}
 
-	mod := cl.cfg.Run.ModulesDownloadMode
-	if mod != "" {
-		// go help modules
-		allowedMods := []string{"mod", "readonly", "vendor"}
-		var ok bool
-		for _, am := range allowedMods {
-			if am == mod {
-				ok = true
-				break
-			}
-		}
-		if !ok {
-			return nil, fmt.Errorf("invalid modules download path %s, only (%s) allowed", mod, strings.Join(allowedMods, "|"))
-		}
+	args := buildArgs(l.args)
 
-		buildFlags = append(buildFlags, fmt.Sprintf("-mod=%s", cl.cfg.Run.ModulesDownloadMode))
-	}
+	l.debugf("Built loader args are %s", args)
 
-	return buildFlags, nil
-}
-
-func stringifyLoadMode(mode packages.LoadMode) string {
-	m := map[packages.LoadMode]string{
-		packages.NeedCompiledGoFiles: "compiled_files",
-		packages.NeedDeps:            "deps",
-		packages.NeedExportFile:      "exports_file",
-		packages.NeedFiles:           "files",
-		packages.NeedImports:         "imports",
-		packages.NeedName:            "name",
-		packages.NeedSyntax:          "syntax",
-		packages.NeedTypes:           "types",
-		packages.NeedTypesInfo:       "types_info",
-		packages.NeedTypesSizes:      "types_sizes",
+	pkgs, err := packages.Load(conf, args...)
+	if err != nil {
+		return nil, fmt.Errorf("failed to load with go/packages: %w", err)
 	}
 
-	var flags []string
-	for flag, flagStr := range m {
-		if mode&flag != 0 {
-			flags = append(flags, flagStr)
-		}
+	if loadMode&packages.NeedSyntax == 0 {
+		// Needed e.g. for go/analysis loading.
+		fset := token.NewFileSet()
+		packages.Visit(pkgs, nil, func(pkg *packages.Package) {
+			pkg.Fset = fset
+			l.loadGuard.AddMutexForPkg(pkg)
+		})
 	}
 
-	return fmt.Sprintf("%d (%s)", mode, strings.Join(flags, "|"))
-}
+	l.debugPrintLoadedPackages(pkgs)
 
-func (cl *ContextLoader) debugPrintLoadedPackages(pkgs []*packages.Package) {
-	cl.debugf("loaded %d pkgs", len(pkgs))
-	for i, pkg := range pkgs {
-		var syntaxFiles []string
-		for _, sf := range pkg.Syntax {
-			syntaxFiles = append(syntaxFiles, pkg.Fset.Position(sf.Pos()).Filename)
-		}
-		cl.debugf("Loaded pkg #%d: ID=%s GoFiles=%s CompiledGoFiles=%s Syntax=%s",
-			i, pkg.ID, pkg.GoFiles, pkg.CompiledGoFiles, syntaxFiles)
+	if err := l.parseLoadedPackagesErrors(pkgs); err != nil {
+		return nil, err
 	}
+
+	return l.filterTestMainPackages(pkgs), nil
 }
 
-func (cl *ContextLoader) parseLoadedPackagesErrors(pkgs []*packages.Package) error {
+func (*PackageLoader) parseLoadedPackagesErrors(pkgs []*packages.Package) error {
 	for _, pkg := range pkgs {
 		var errs []packages.Error
 		for _, err := range pkg.Errors {
@@ -185,61 +133,8 @@ func (cl *ContextLoader) parseLoadedPackagesErrors(pkgs []*packages.Package) err
 	return nil
 }
 
-func (cl *ContextLoader) loadPackages(ctx context.Context, loadMode packages.LoadMode) ([]*packages.Package, error) {
-	defer func(startedAt time.Time) {
-		cl.log.Infof("Go packages loading at mode %s took %s", stringifyLoadMode(loadMode), time.Since(startedAt))
-	}(time.Now())
-
-	cl.prepareBuildContext()
-
-	buildFlags, err := cl.makeBuildFlags()
-	if err != nil {
-		return nil, fmt.Errorf("failed to make build flags for go list: %w", err)
-	}
-
-	conf := &packages.Config{
-		Mode:       loadMode,
-		Tests:      cl.cfg.Run.AnalyzeTests,
-		Context:    ctx,
-		BuildFlags: buildFlags,
-		Logf:       cl.debugf,
-		// TODO: use fset, parsefile, overlay
-	}
-
-	args := cl.buildArgs()
-	cl.debugf("Built loader args are %s", args)
-	pkgs, err := packages.Load(conf, args...)
-	if err != nil {
-		return nil, fmt.Errorf("failed to load with go/packages: %w", err)
-	}
-
-	// Currently, go/packages doesn't guarantee that error will be returned
-	// if context was canceled. See
-	// https://github.com/golang/tools/commit/c5cec6710e927457c3c29d6c156415e8539a5111#r39261855
-	if ctx.Err() != nil {
-		return nil, fmt.Errorf("timed out to load packages: %w", ctx.Err())
-	}
-
-	if loadMode&packages.NeedSyntax == 0 {
-		// Needed e.g. for go/analysis loading.
-		fset := token.NewFileSet()
-		packages.Visit(pkgs, nil, func(pkg *packages.Package) {
-			pkg.Fset = fset
-			cl.loadGuard.AddMutexForPkg(pkg)
-		})
-	}
-
-	cl.debugPrintLoadedPackages(pkgs)
-
-	if err := cl.parseLoadedPackagesErrors(pkgs); err != nil {
-		return nil, err
-	}
-
-	return cl.filterTestMainPackages(pkgs), nil
-}
-
-func (cl *ContextLoader) tryParseTestPackage(pkg *packages.Package) (name string, isTest bool) {
-	matches := cl.pkgTestIDRe.FindStringSubmatch(pkg.ID)
+func (l *PackageLoader) tryParseTestPackage(pkg *packages.Package) (name string, isTest bool) {
+	matches := l.pkgTestIDRe.FindStringSubmatch(pkg.ID)
 	if matches == nil {
 		return "", false
 	}
@@ -247,36 +142,21 @@ func (cl *ContextLoader) tryParseTestPackage(pkg *packages.Package) (name string
 	return matches[1], true
 }
 
-func (cl *ContextLoader) filterTestMainPackages(pkgs []*packages.Package) []*packages.Package {
-	var retPkgs []*packages.Package
-	for _, pkg := range pkgs {
-		if pkg.Name == "main" && strings.HasSuffix(pkg.PkgPath, ".test") {
-			// it's an implicit testmain package
-			cl.debugf("skip pkg ID=%s", pkg.ID)
-			continue
-		}
-
-		retPkgs = append(retPkgs, pkg)
-	}
-
-	return retPkgs
-}
-
-func (cl *ContextLoader) filterDuplicatePackages(pkgs []*packages.Package) []*packages.Package {
+func (l *PackageLoader) filterDuplicatePackages(pkgs []*packages.Package) []*packages.Package {
 	packagesWithTests := map[string]bool{}
 	for _, pkg := range pkgs {
-		name, isTest := cl.tryParseTestPackage(pkg)
+		name, isTest := l.tryParseTestPackage(pkg)
 		if !isTest {
 			continue
 		}
 		packagesWithTests[name] = true
 	}
 
-	cl.debugf("package with tests: %#v", packagesWithTests)
+	l.debugf("package with tests: %#v", packagesWithTests)
 
 	var retPkgs []*packages.Package
 	for _, pkg := range pkgs {
-		_, isTest := cl.tryParseTestPackage(pkg)
+		_, isTest := l.tryParseTestPackage(pkg)
 		if !isTest && packagesWithTests[pkg.PkgPath] {
 			// If tests loading is enabled,
 			// for package with files a.go and a_test.go go/packages loads two packages:
@@ -284,7 +164,7 @@ func (cl *ContextLoader) filterDuplicatePackages(pkgs []*packages.Package) []*pa
 			// 2. ID=".../a [.../a.test]" GoFiles=[a.go a_test.go]
 			// We need only the second package, otherwise we can get warnings about unused variables/fields/functions
 			// in a.go if they are used only in a_test.go.
-			cl.debugf("skip pkg ID=%s because we load it with test package", pkg.ID)
+			l.debugf("skip pkg ID=%s because we load it with test package", pkg.ID)
 			continue
 		}
 
@@ -294,33 +174,110 @@ func (cl *ContextLoader) filterDuplicatePackages(pkgs []*packages.Package) []*pa
 	return retPkgs
 }
 
-func (cl *ContextLoader) Load(ctx context.Context, linters []*linter.Config) (*linter.Context, error) {
-	loadMode := cl.findLoadMode(linters)
-	pkgs, err := cl.loadPackages(ctx, loadMode)
-	if err != nil {
-		return nil, fmt.Errorf("failed to load packages: %w", err)
+func (l *PackageLoader) filterTestMainPackages(pkgs []*packages.Package) []*packages.Package {
+	var retPkgs []*packages.Package
+	for _, pkg := range pkgs {
+		if pkg.Name == "main" && strings.HasSuffix(pkg.PkgPath, ".test") {
+			// it's an implicit testmain package
+			l.debugf("skip pkg ID=%s", pkg.ID)
+			continue
+		}
+
+		retPkgs = append(retPkgs, pkg)
 	}
 
-	deduplicatedPkgs := cl.filterDuplicatePackages(pkgs)
+	return retPkgs
+}
 
-	if len(deduplicatedPkgs) == 0 {
-		return nil, exitcodes.ErrNoGoFiles
+func (l *PackageLoader) debugPrintLoadedPackages(pkgs []*packages.Package) {
+	l.debugf("loaded %d pkgs", len(pkgs))
+	for i, pkg := range pkgs {
+		var syntaxFiles []string
+		for _, sf := range pkg.Syntax {
+			syntaxFiles = append(syntaxFiles, pkg.Fset.Position(sf.Pos()).Filename)
+		}
+		l.debugf("Loaded pkg #%d: ID=%s GoFiles=%s CompiledGoFiles=%s Syntax=%s",
+			i, pkg.ID, pkg.GoFiles, pkg.CompiledGoFiles, syntaxFiles)
 	}
+}
 
-	ret := &linter.Context{
-		Packages: deduplicatedPkgs,
+func (l *PackageLoader) prepareBuildContext() {
+	// Set GOROOT to have working cross-compilation: cross-compiled binaries
+	// have invalid GOROOT. XXX: can't use runtime.GOROOT().
+	goroot := l.goenv.Get(goutil.EnvGoRoot)
+	if goroot == "" {
+		return
+	}
+
+	os.Setenv(string(goutil.EnvGoRoot), goroot)
+	build.Default.GOROOT = goroot
+	build.Default.BuildTags = l.cfg.Run.BuildTags
+}
 
-		// At least `unused` linters works properly only on original (not deduplicated) packages,
-		// see https://github.com/golangci/golangci-lint/pull/585.
-		OriginalPackages: pkgs,
+func (l *PackageLoader) makeBuildFlags() []string {
+	var buildFlags []string
 
-		Cfg:       cl.cfg,
-		Log:       cl.log,
-		FileCache: cl.fileCache,
-		LineCache: cl.lineCache,
-		PkgCache:  cl.pkgCache,
-		LoadGuard: cl.loadGuard,
+	if len(l.cfg.Run.BuildTags) != 0 {
+		// go help build
+		buildFlags = append(buildFlags, "-tags", strings.Join(l.cfg.Run.BuildTags, " "))
+		l.log.Infof("Using build tags: %v", l.cfg.Run.BuildTags)
+	}
+
+	if l.cfg.Run.ModulesDownloadMode != "" {
+		// go help modules
+		buildFlags = append(buildFlags, fmt.Sprintf("-mod=%s", l.cfg.Run.ModulesDownloadMode))
+	}
+
+	return buildFlags
+}
+
+func buildArgs(args []string) []string {
+	if len(args) == 0 {
+		return []string{"./..."}
 	}
 
-	return ret, nil
+	var retArgs []string
+	for _, arg := range args {
+		if strings.HasPrefix(arg, ".") || filepath.IsAbs(arg) {
+			retArgs = append(retArgs, arg)
+		} else {
+			// go/packages doesn't work well if we don't have the prefix ./ for local packages
+			retArgs = append(retArgs, fmt.Sprintf(".%c%s", filepath.Separator, arg))
+		}
+	}
+
+	return retArgs
+}
+
+func findLoadMode(linters []*linter.Config) packages.LoadMode {
+	loadMode := packages.LoadMode(0)
+	for _, lc := range linters {
+		loadMode |= lc.LoadMode
+	}
+
+	return loadMode
+}
+
+func stringifyLoadMode(mode packages.LoadMode) string {
+	m := map[packages.LoadMode]string{
+		packages.NeedCompiledGoFiles: "compiled_files",
+		packages.NeedDeps:            "deps",
+		packages.NeedExportFile:      "exports_file",
+		packages.NeedFiles:           "files",
+		packages.NeedImports:         "imports",
+		packages.NeedName:            "name",
+		packages.NeedSyntax:          "syntax",
+		packages.NeedTypes:           "types",
+		packages.NeedTypesInfo:       "types_info",
+		packages.NeedTypesSizes:      "types_sizes",
+	}
+
+	var flags []string
+	for flag, flagStr := range m {
+		if mode&flag != 0 {
+			flags = append(flags, flagStr)
+		}
+	}
+
+	return fmt.Sprintf("%d (%s)", mode, strings.Join(flags, "|"))
 }
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go
index d270892d53bef61eb20ff0680af1661af83897b6..f4877e1e89c6622839190aa0eac9e24b9981e072 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go
@@ -2,13 +2,11 @@ package lint
 
 import (
 	"context"
+	"errors"
 	"fmt"
 	"runtime/debug"
 	"strings"
 
-	"github.com/hashicorp/go-multierror"
-	gopackages "golang.org/x/tools/go/packages"
-
 	"github.com/golangci/golangci-lint/internal/errorutil"
 	"github.com/golangci/golangci-lint/pkg/config"
 	"github.com/golangci/golangci-lint/pkg/fsutils"
@@ -16,90 +14,84 @@ import (
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 	"github.com/golangci/golangci-lint/pkg/lint/lintersdb"
 	"github.com/golangci/golangci-lint/pkg/logutils"
-	"github.com/golangci/golangci-lint/pkg/packages"
 	"github.com/golangci/golangci-lint/pkg/result"
 	"github.com/golangci/golangci-lint/pkg/result/processors"
 	"github.com/golangci/golangci-lint/pkg/timeutils"
 )
 
+type processorStat struct {
+	inCount  int
+	outCount int
+}
+
 type Runner struct {
+	Log logutils.Log
+
+	lintCtx    *linter.Context
 	Processors []processors.Processor
-	Log        logutils.Log
 }
 
-func NewRunner(cfg *config.Config, log logutils.Log, goenv *goutil.Env,
-	es *lintersdb.EnabledSet,
+func NewRunner(log logutils.Log, cfg *config.Config, args []string, goenv *goutil.Env,
 	lineCache *fsutils.LineCache, fileCache *fsutils.FileCache,
-	dbManager *lintersdb.Manager, pkgs []*gopackages.Package) (*Runner, error) {
+	dbManager *lintersdb.Manager, lintCtx *linter.Context,
+) (*Runner, error) {
 	// Beware that some processors need to add the path prefix when working with paths
 	// because they get invoked before the path prefixer (exclude and severity rules)
 	// or process other paths (skip files).
 	files := fsutils.NewFiles(lineCache, cfg.Output.PathPrefix)
 
-	skipFilesProcessor, err := processors.NewSkipFiles(cfg.Run.SkipFiles, cfg.Output.PathPrefix)
+	skipFilesProcessor, err := processors.NewSkipFiles(cfg.Issues.ExcludeFiles, cfg.Output.PathPrefix)
 	if err != nil {
 		return nil, err
 	}
 
-	skipDirs := cfg.Run.SkipDirs
-	if cfg.Run.UseDefaultSkipDirs {
-		skipDirs = append(skipDirs, packages.StdExcludeDirRegexps...)
+	skipDirs := cfg.Issues.ExcludeDirs
+	if cfg.Issues.UseDefaultExcludeDirs {
+		skipDirs = append(skipDirs, processors.StdExcludeDirRegexps...)
 	}
-	skipDirsProcessor, err := processors.NewSkipDirs(skipDirs, log.Child(logutils.DebugKeySkipDirs), cfg.Run.Args, cfg.Output.PathPrefix)
+
+	skipDirsProcessor, err := processors.NewSkipDirs(log.Child(logutils.DebugKeySkipDirs), skipDirs, args, cfg.Output.PathPrefix)
 	if err != nil {
 		return nil, err
 	}
 
-	enabledLinters, err := es.GetEnabledLintersMap()
+	enabledLinters, err := dbManager.GetEnabledLintersMap()
 	if err != nil {
 		return nil, fmt.Errorf("failed to get enabled linters: %w", err)
 	}
 
-	// print deprecated messages
-	if !cfg.InternalCmdTest {
-		for name, lc := range enabledLinters {
-			if !lc.IsDeprecated() {
-				continue
-			}
-
-			var extra string
-			if lc.Deprecation.Replacement != "" {
-				extra = fmt.Sprintf("Replaced by %s.", lc.Deprecation.Replacement)
-			}
-
-			log.Warnf("The linter '%s' is deprecated (since %s) due to: %s %s", name, lc.Deprecation.Since, lc.Deprecation.Message, extra)
-		}
-	}
-
 	return &Runner{
 		Processors: []processors.Processor{
 			processors.NewCgo(goenv),
 
 			// Must go after Cgo.
-			processors.NewFilenameUnadjuster(pkgs, log.Child(logutils.DebugKeyFilenameUnadjuster)),
+			processors.NewFilenameUnadjuster(lintCtx.Packages, log.Child(logutils.DebugKeyFilenameUnadjuster)),
+
+			// Must go after FilenameUnadjuster.
+			processors.NewInvalidIssue(log.Child(logutils.DebugKeyInvalidIssue)),
 
 			// Must be before diff, nolint and exclude autogenerated processor at least.
 			processors.NewPathPrettifier(),
 			skipFilesProcessor,
 			skipDirsProcessor, // must be after path prettifier
 
-			processors.NewAutogeneratedExclude(),
+			processors.NewAutogeneratedExclude(cfg.Issues.ExcludeGeneratedStrict),
 
 			// Must be before exclude because users see already marked output and configure excluding by it.
 			processors.NewIdentifierMarker(),
 
-			getExcludeProcessor(&cfg.Issues),
-			getExcludeRulesProcessor(&cfg.Issues, log, files),
+			processors.NewExclude(&cfg.Issues),
+			processors.NewExcludeRules(log.Child(logutils.DebugKeyExcludeRules), files, &cfg.Issues),
 			processors.NewNolint(log.Child(logutils.DebugKeyNolint), dbManager, enabledLinters),
 
 			processors.NewUniqByLine(cfg),
-			processors.NewDiff(cfg.Issues.Diff, cfg.Issues.DiffFromRevision, cfg.Issues.DiffPatchFilePath, cfg.Issues.WholeFiles),
+			processors.NewDiff(&cfg.Issues),
 			processors.NewMaxPerFileFromLinter(cfg),
 			processors.NewMaxSameIssues(cfg.Issues.MaxSameIssues, log.Child(logutils.DebugKeyMaxSameIssues), cfg),
 			processors.NewMaxFromLinter(cfg.Issues.MaxIssuesPerLinter, log.Child(logutils.DebugKeyMaxFromLinter), cfg),
 			processors.NewSourceCode(lineCache, log.Child(logutils.DebugKeySourceCode)),
 			processors.NewPathShortener(),
-			getSeverityRulesProcessor(&cfg.Severity, log, files),
+			processors.NewSeverity(log.Child(logutils.DebugKeySeverityRules), files, &cfg.Severity),
 
 			// The fixer still needs to see paths for the issues that are relative to the current directory.
 			processors.NewFixer(cfg, log, fileCache),
@@ -108,12 +100,41 @@ func NewRunner(cfg *config.Config, log logutils.Log, goenv *goutil.Env,
 			processors.NewPathPrefixer(cfg.Output.PathPrefix),
 			processors.NewSortResults(cfg),
 		},
-		Log: log,
+		lintCtx: lintCtx,
+		Log:     log,
 	}, nil
 }
 
+func (r *Runner) Run(ctx context.Context, linters []*linter.Config) ([]result.Issue, error) {
+	sw := timeutils.NewStopwatch("linters", r.Log)
+	defer sw.Print()
+
+	var (
+		lintErrors error
+		issues     []result.Issue
+	)
+
+	for _, lc := range linters {
+		lc := lc
+		sw.TrackStage(lc.Name(), func() {
+			linterIssues, err := r.runLinterSafe(ctx, r.lintCtx, lc)
+			if err != nil {
+				lintErrors = errors.Join(lintErrors, fmt.Errorf("can't run linter %s", lc.Linter.Name()), err)
+				r.Log.Warnf("Can't run linter %s: %v", lc.Linter.Name(), err)
+
+				return
+			}
+
+			issues = append(issues, linterIssues...)
+		})
+	}
+
+	return r.processLintResults(issues), lintErrors
+}
+
 func (r *Runner) runLinterSafe(ctx context.Context, lintCtx *linter.Context,
-	lc *linter.Config) (ret []result.Issue, err error) {
+	lc *linter.Config,
+) (ret []result.Issue, err error) {
 	defer func() {
 		if panicData := recover(); panicData != nil {
 			if pe, ok := panicData.(*errorutil.PanicError); ok {
@@ -152,12 +173,7 @@ func (r *Runner) runLinterSafe(ctx context.Context, lintCtx *linter.Context,
 	return issues, nil
 }
 
-type processorStat struct {
-	inCount  int
-	outCount int
-}
-
-func (r Runner) processLintResults(inIssues []result.Issue) []result.Issue {
+func (r *Runner) processLintResults(inIssues []result.Issue) []result.Issue {
 	sw := timeutils.NewStopwatch("processing", r.Log)
 
 	var issuesBefore, issuesAfter int
@@ -188,7 +204,7 @@ func (r Runner) processLintResults(inIssues []result.Issue) []result.Issue {
 	return outIssues
 }
 
-func (r Runner) printPerProcessorStat(stat map[string]processorStat) {
+func (r *Runner) printPerProcessorStat(stat map[string]processorStat) {
 	parts := make([]string, 0, len(stat))
 	for name, ps := range stat {
 		if ps.inCount != 0 {
@@ -200,33 +216,6 @@ func (r Runner) printPerProcessorStat(stat map[string]processorStat) {
 	}
 }
 
-func (r Runner) Run(ctx context.Context, linters []*linter.Config, lintCtx *linter.Context) ([]result.Issue, error) {
-	sw := timeutils.NewStopwatch("linters", r.Log)
-	defer sw.Print()
-
-	var (
-		lintErrors *multierror.Error
-		issues     []result.Issue
-	)
-
-	for _, lc := range linters {
-		lc := lc
-		sw.TrackStage(lc.Name(), func() {
-			linterIssues, err := r.runLinterSafe(ctx, lintCtx, lc)
-			if err != nil {
-				lintErrors = multierror.Append(lintErrors, fmt.Errorf("can't run linter %s: %w", lc.Linter.Name(), err))
-				r.Log.Warnf("Can't run linter %s: %v", lc.Linter.Name(), err)
-
-				return
-			}
-
-			issues = append(issues, linterIssues...)
-		})
-	}
-
-	return r.processLintResults(issues), lintErrors.ErrorOrNil()
-}
-
 func (r *Runner) processIssues(issues []result.Issue, sw *timeutils.Stopwatch, statPerProcessor map[string]processorStat) []result.Issue {
 	for _, p := range r.Processors {
 		var newIssues []result.Issue
@@ -253,98 +242,3 @@ func (r *Runner) processIssues(issues []result.Issue, sw *timeutils.Stopwatch, s
 
 	return issues
 }
-
-func getExcludeProcessor(cfg *config.Issues) processors.Processor {
-	var excludeTotalPattern string
-
-	if len(cfg.ExcludePatterns) != 0 {
-		excludeTotalPattern = fmt.Sprintf("(%s)", strings.Join(cfg.ExcludePatterns, "|"))
-	}
-
-	var excludeProcessor processors.Processor
-	if cfg.ExcludeCaseSensitive {
-		excludeProcessor = processors.NewExcludeCaseSensitive(excludeTotalPattern)
-	} else {
-		excludeProcessor = processors.NewExclude(excludeTotalPattern)
-	}
-
-	return excludeProcessor
-}
-
-func getExcludeRulesProcessor(cfg *config.Issues, log logutils.Log, files *fsutils.Files) processors.Processor {
-	var excludeRules []processors.ExcludeRule
-	for _, r := range cfg.ExcludeRules {
-		excludeRules = append(excludeRules, processors.ExcludeRule{
-			BaseRule: processors.BaseRule{
-				Text:       r.Text,
-				Source:     r.Source,
-				Path:       r.Path,
-				PathExcept: r.PathExcept,
-				Linters:    r.Linters,
-			},
-		})
-	}
-
-	if cfg.UseDefaultExcludes {
-		for _, r := range config.GetExcludePatterns(cfg.IncludeDefaultExcludes) {
-			excludeRules = append(excludeRules, processors.ExcludeRule{
-				BaseRule: processors.BaseRule{
-					Text:    r.Pattern,
-					Linters: []string{r.Linter},
-				},
-			})
-		}
-	}
-
-	var excludeRulesProcessor processors.Processor
-	if cfg.ExcludeCaseSensitive {
-		excludeRulesProcessor = processors.NewExcludeRulesCaseSensitive(
-			excludeRules,
-			files,
-			log.Child(logutils.DebugKeyExcludeRules),
-		)
-	} else {
-		excludeRulesProcessor = processors.NewExcludeRules(
-			excludeRules,
-			files,
-			log.Child(logutils.DebugKeyExcludeRules),
-		)
-	}
-
-	return excludeRulesProcessor
-}
-
-func getSeverityRulesProcessor(cfg *config.Severity, log logutils.Log, files *fsutils.Files) processors.Processor {
-	var severityRules []processors.SeverityRule
-	for _, r := range cfg.Rules {
-		severityRules = append(severityRules, processors.SeverityRule{
-			Severity: r.Severity,
-			BaseRule: processors.BaseRule{
-				Text:       r.Text,
-				Source:     r.Source,
-				Path:       r.Path,
-				PathExcept: r.PathExcept,
-				Linters:    r.Linters,
-			},
-		})
-	}
-
-	var severityRulesProcessor processors.Processor
-	if cfg.CaseSensitive {
-		severityRulesProcessor = processors.NewSeverityRulesCaseSensitive(
-			cfg.Default,
-			severityRules,
-			files,
-			log.Child(logutils.DebugKeySeverityRules),
-		)
-	} else {
-		severityRulesProcessor = processors.NewSeverityRules(
-			cfg.Default,
-			severityRules,
-			files,
-			log.Child(logutils.DebugKeySeverityRules),
-		)
-	}
-
-	return severityRulesProcessor
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/logutils/logutils.go b/vendor/github.com/golangci/golangci-lint/pkg/logutils/logutils.go
index 80c9fed7a959e065d87d0d3f350ab7f8ff3ea354..e4bb98109db1f8eb560c6154b8b0e9eb0b7e21a8 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/logutils/logutils.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/logutils/logutils.go
@@ -5,6 +5,9 @@ import (
 	"strings"
 )
 
+// EnvTestRun value: "1"
+const EnvTestRun = "GL_TEST_RUN"
+
 // envDebug value: one or several debug keys.
 // examples:
 // - Remove output to `/dev/null`: `GL_DEBUG=linters_output ./golangci-lint run`
@@ -22,6 +25,7 @@ const (
 	DebugKeyExcludeRules       = "exclude_rules"
 	DebugKeyExec               = "exec"
 	DebugKeyFilenameUnadjuster = "filename_unadjuster"
+	DebugKeyInvalidIssue       = "invalid_issue"
 	DebugKeyForbidigo          = "forbidigo"
 	DebugKeyGoEnv              = "goenv"
 	DebugKeyLinter             = "linter"
@@ -57,9 +61,10 @@ const (
 
 const (
 	DebugKeyGoCritic  = "gocritic"  // Debugs `go-critic` linter.
+	DebugKeyGovet     = "govet"     // Debugs `govet` linter.
 	DebugKeyMegacheck = "megacheck" // Debugs `staticcheck` related linters.
 	DebugKeyNolint    = "nolint"    // Debugs a filter excluding issues by `//nolint` comments.
-	DebugKeyRevive    = "revive"    // Debugs `revice` linter.
+	DebugKeyRevive    = "revive"    // Debugs `revive` linter.
 )
 
 func getEnabledDebugs() map[string]bool {
@@ -99,8 +104,15 @@ func HaveDebugTag(tag string) bool {
 	return enabledDebugs[tag]
 }
 
+var verbose bool
+
 func SetupVerboseLog(log Log, isVerbose bool) {
 	if isVerbose {
+		verbose = isVerbose
 		log.SetLevel(LogLevelInfo)
 	}
 }
+
+func IsVerbose() bool {
+	return verbose
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/logutils/mock.go b/vendor/github.com/golangci/golangci-lint/pkg/logutils/mock.go
index efda8cc20f6edca27c0829f232f4baa0361643f3..bddcf85523a751614e98ed95352ed1d58d095c41 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/logutils/mock.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/logutils/mock.go
@@ -13,28 +13,23 @@ func NewMockLog() *MockLog {
 }
 
 func (m *MockLog) Fatalf(format string, args ...any) {
-	mArgs := []any{format}
-	m.Called(append(mArgs, args...)...)
+	m.Called(append([]any{format}, args...)...)
 }
 
 func (m *MockLog) Panicf(format string, args ...any) {
-	mArgs := []any{format}
-	m.Called(append(mArgs, args...)...)
+	m.Called(append([]any{format}, args...)...)
 }
 
 func (m *MockLog) Errorf(format string, args ...any) {
-	mArgs := []any{format}
-	m.Called(append(mArgs, args...)...)
+	m.Called(append([]any{format}, args...)...)
 }
 
 func (m *MockLog) Warnf(format string, args ...any) {
-	mArgs := []any{format}
-	m.Called(append(mArgs, args...)...)
+	m.Called(append([]any{format}, args...)...)
 }
 
 func (m *MockLog) Infof(format string, args ...any) {
-	mArgs := []any{format}
-	m.Called(append(mArgs, args...)...)
+	m.Called(append([]any{format}, args...)...)
 }
 
 func (m *MockLog) Child(name string) Log {
@@ -45,3 +40,43 @@ func (m *MockLog) Child(name string) Log {
 func (m *MockLog) SetLevel(level LogLevel) {
 	m.Called(level)
 }
+
+func (m *MockLog) OnFatalf(format string, args ...any) *MockLog {
+	arguments := append([]any{format}, args...)
+
+	m.On("Fatalf", arguments...)
+
+	return m
+}
+
+func (m *MockLog) OnPanicf(format string, args ...any) *MockLog {
+	arguments := append([]any{format}, args...)
+
+	m.On("Panicf", arguments...)
+
+	return m
+}
+
+func (m *MockLog) OnErrorf(format string, args ...any) *MockLog {
+	arguments := append([]any{format}, args...)
+
+	m.On("Errorf", arguments...)
+
+	return m
+}
+
+func (m *MockLog) OnWarnf(format string, args ...any) *MockLog {
+	arguments := append([]any{format}, args...)
+
+	m.On("Warnf", arguments...)
+
+	return m
+}
+
+func (m *MockLog) OnInfof(format string, args ...any) *MockLog {
+	arguments := append([]any{format}, args...)
+
+	m.On("Infof", arguments...)
+
+	return m
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/logutils/out.go b/vendor/github.com/golangci/golangci-lint/pkg/logutils/out.go
index 67c70dc8f283fbbf38eb4b875ab92b2f76035f24..ef137548671ff133ec1e59520127a86e4b483db8 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/logutils/out.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/logutils/out.go
@@ -5,5 +5,7 @@ import (
 	colorable "github.com/mattn/go-colorable"
 )
 
-var StdOut = color.Output // https://github.com/golangci/golangci-lint/issues/14
-var StdErr = colorable.NewColorableStderr()
+var (
+	StdOut = color.Output // https://github.com/golangci/golangci-lint/issues/14
+	StdErr = colorable.NewColorableStderr()
+)
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/logutils/stderr_log.go b/vendor/github.com/golangci/golangci-lint/pkg/logutils/stderr_log.go
index 367c94f385296be7627cee5b5e17755c5e7a5fe1..569a177a7c5c6a841b0c386868874b3a767c54c6 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/logutils/stderr_log.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/logutils/stderr_log.go
@@ -5,7 +5,7 @@ import (
 	"os"
 	"time"
 
-	"github.com/sirupsen/logrus" //nolint:depguard
+	"github.com/sirupsen/logrus"
 
 	"github.com/golangci/golangci-lint/pkg/exitcodes"
 )
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/packages/skip.go b/vendor/github.com/golangci/golangci-lint/pkg/packages/skip.go
deleted file mode 100644
index cdd327f5d880a75514657954b0a46ab9bdc8a3df..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/packages/skip.go
+++ /dev/null
@@ -1,25 +0,0 @@
-package packages
-
-import (
-	"fmt"
-	"path/filepath"
-	"regexp"
-)
-
-func pathElemReImpl(e string, sep rune) string {
-	escapedSep := regexp.QuoteMeta(string(sep)) // needed for windows sep '\\'
-	return fmt.Sprintf(`(^|%s)%s($|%s)`, escapedSep, e, escapedSep)
-}
-
-func pathElemRe(e string) string {
-	return pathElemReImpl(e, filepath.Separator)
-}
-
-var StdExcludeDirRegexps = []string{
-	pathElemRe("vendor"),
-	pathElemRe("third_party"),
-	pathElemRe("testdata"),
-	pathElemRe("examples"),
-	pathElemRe("Godeps"),
-	pathElemRe("builtin"),
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/checkstyle.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/checkstyle.go
index 3762ca0569f3ddbbcfc632077b97176b776f8592..e32eef7f51f55afaab5a7df7ce524dab5a6131f1 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/printers/checkstyle.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/checkstyle.go
@@ -7,6 +7,7 @@ import (
 	"sort"
 
 	"github.com/go-xmlfmt/xmlfmt"
+	"golang.org/x/exp/maps"
 
 	"github.com/golangci/golangci-lint/pkg/result"
 )
@@ -74,10 +75,7 @@ func (p Checkstyle) Print(issues []result.Issue) error {
 		file.Errors = append(file.Errors, newError)
 	}
 
-	out.Files = make([]*checkstyleFile, 0, len(files))
-	for _, file := range files {
-		out.Files = append(out.Files, file)
-	}
+	out.Files = maps.Values(files)
 
 	sort.Slice(out.Files, func(i, j int) bool {
 		return out.Files[i].Name < out.Files[j].Name
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/github.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/github.go
deleted file mode 100644
index 7f148097ab8c9295cd06c03d2995af6e18634814..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/printers/github.go
+++ /dev/null
@@ -1,46 +0,0 @@
-package printers
-
-import (
-	"fmt"
-	"io"
-
-	"github.com/golangci/golangci-lint/pkg/result"
-)
-
-type github struct {
-	w io.Writer
-}
-
-const defaultGithubSeverity = "error"
-
-// NewGithub output format outputs issues according to GitHub actions format:
-// https://help.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-an-error-message
-func NewGithub(w io.Writer) Printer {
-	return &github{w: w}
-}
-
-// print each line as: ::error file=app.js,line=10,col=15::Something went wrong
-func formatIssueAsGithub(issue *result.Issue) string {
-	severity := defaultGithubSeverity
-	if issue.Severity != "" {
-		severity = issue.Severity
-	}
-
-	ret := fmt.Sprintf("::%s file=%s,line=%d", severity, issue.FilePath(), issue.Line())
-	if issue.Pos.Column != 0 {
-		ret += fmt.Sprintf(",col=%d", issue.Pos.Column)
-	}
-
-	ret += fmt.Sprintf("::%s (%s)", issue.Text, issue.FromLinter)
-	return ret
-}
-
-func (p *github) Print(issues []result.Issue) error {
-	for ind := range issues {
-		_, err := fmt.Fprintln(p.w, formatIssueAsGithub(&issues[ind]))
-		if err != nil {
-			return err
-		}
-	}
-	return nil
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/githubaction.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/githubaction.go
new file mode 100644
index 0000000000000000000000000000000000000000..0d71b1c9b338f135ded13db6d2dd0b369aeb5d06
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/githubaction.go
@@ -0,0 +1,51 @@
+package printers
+
+import (
+	"fmt"
+	"io"
+	"path/filepath"
+
+	"github.com/golangci/golangci-lint/pkg/result"
+)
+
+const defaultGithubSeverity = "error"
+
+type GitHubAction struct {
+	w io.Writer
+}
+
+// NewGitHubAction output format outputs issues according to GitHub actions.
+func NewGitHubAction(w io.Writer) *GitHubAction {
+	return &GitHubAction{w: w}
+}
+
+func (p *GitHubAction) Print(issues []result.Issue) error {
+	for ind := range issues {
+		_, err := fmt.Fprintln(p.w, formatIssueAsGitHub(&issues[ind]))
+		if err != nil {
+			return err
+		}
+	}
+	return nil
+}
+
+// print each line as: ::error file=app.js,line=10,col=15::Something went wrong
+func formatIssueAsGitHub(issue *result.Issue) string {
+	severity := defaultGithubSeverity
+	if issue.Severity != "" {
+		severity = issue.Severity
+	}
+
+	// Convert backslashes to forward slashes.
+	// This is needed when running on windows.
+	// Otherwise, GitHub won't be able to show the annotations pointing to the file path with backslashes.
+	file := filepath.ToSlash(issue.FilePath())
+
+	ret := fmt.Sprintf("::%s file=%s,line=%d", severity, file, issue.Line())
+	if issue.Pos.Column != 0 {
+		ret += fmt.Sprintf(",col=%d", issue.Pos.Column)
+	}
+
+	ret += fmt.Sprintf("::%s (%s)", issue.Text, issue.FromLinter)
+	return ret
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/json.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/json.go
index 4bae526b879bb4f6a24d8e0dea9586223824b8c7..28509cac459b43ba1368c5151ce034c50084cec6 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/printers/json.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/json.go
@@ -9,7 +9,7 @@ import (
 )
 
 type JSON struct {
-	rd *report.Data
+	rd *report.Data // TODO(ldez) should be drop in v2. Only use by JSON reporter.
 	w  io.Writer
 }
 
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/junitxml.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/junitxml.go
index 86a3811e4782999e840c72a71995084564833eb8..3e3f82f5805c7380ac7593f8e120952a2c8f4892 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/printers/junitxml.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/junitxml.go
@@ -7,6 +7,8 @@ import (
 	"sort"
 	"strings"
 
+	"golang.org/x/exp/maps"
+
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
@@ -71,9 +73,7 @@ func (p JunitXML) Print(issues []result.Issue) error {
 	}
 
 	var res testSuitesXML
-	for _, val := range suites {
-		res.TestSuites = append(res.TestSuites, val)
-	}
+	res.TestSuites = maps.Values(suites)
 
 	sort.Slice(res.TestSuites, func(i, j int) bool {
 		return res.TestSuites[i].Suite < res.TestSuites[j].Suite
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/printer.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/printer.go
index ce3116fa4e2e89168cb4eb5332d0887cde73de72..d2944340874a31be45c1d864e62f76769b7e9cc5 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/printers/printer.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/printer.go
@@ -1,9 +1,143 @@
 package printers
 
 import (
+	"errors"
+	"fmt"
+	"io"
+	"os"
+	"path/filepath"
+
+	"github.com/golangci/golangci-lint/pkg/config"
+	"github.com/golangci/golangci-lint/pkg/logutils"
+	"github.com/golangci/golangci-lint/pkg/report"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-type Printer interface {
+const defaultFileMode = 0o644
+
+type issuePrinter interface {
 	Print(issues []result.Issue) error
 }
+
+// Printer prints issues
+type Printer struct {
+	cfg        *config.Output
+	reportData *report.Data
+
+	log logutils.Log
+
+	stdOut io.Writer
+	stdErr io.Writer
+}
+
+// NewPrinter creates a new Printer.
+func NewPrinter(log logutils.Log, cfg *config.Output, reportData *report.Data) (*Printer, error) {
+	if log == nil {
+		return nil, errors.New("missing log argument in constructor")
+	}
+	if cfg == nil {
+		return nil, errors.New("missing config argument in constructor")
+	}
+	if reportData == nil {
+		return nil, errors.New("missing reportData argument in constructor")
+	}
+
+	return &Printer{
+		cfg:        cfg,
+		reportData: reportData,
+		log:        log,
+		stdOut:     logutils.StdOut,
+		stdErr:     logutils.StdErr,
+	}, nil
+}
+
+// Print prints issues based on the formats defined
+func (c *Printer) Print(issues []result.Issue) error {
+	for _, format := range c.cfg.Formats {
+		err := c.printReports(issues, format)
+		if err != nil {
+			return err
+		}
+	}
+
+	return nil
+}
+
+func (c *Printer) printReports(issues []result.Issue, format config.OutputFormat) error {
+	w, shouldClose, err := c.createWriter(format.Path)
+	if err != nil {
+		return fmt.Errorf("can't create output for %s: %w", format.Path, err)
+	}
+
+	defer func() {
+		if file, ok := w.(io.Closer); shouldClose && ok {
+			_ = file.Close()
+		}
+	}()
+
+	p, err := c.createPrinter(format.Format, w)
+	if err != nil {
+		return err
+	}
+
+	if err = p.Print(issues); err != nil {
+		return fmt.Errorf("can't print %d issues: %w", len(issues), err)
+	}
+
+	return nil
+}
+
+func (c *Printer) createWriter(path string) (io.Writer, bool, error) {
+	if path == "" || path == "stdout" {
+		return c.stdOut, false, nil
+	}
+
+	if path == "stderr" {
+		return c.stdErr, false, nil
+	}
+
+	err := os.MkdirAll(filepath.Dir(path), os.ModePerm)
+	if err != nil {
+		return nil, false, err
+	}
+
+	f, err := os.OpenFile(path, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, defaultFileMode)
+	if err != nil {
+		return nil, false, err
+	}
+
+	return f, true, nil
+}
+
+func (c *Printer) createPrinter(format string, w io.Writer) (issuePrinter, error) {
+	var p issuePrinter
+
+	switch format {
+	case config.OutFormatJSON:
+		p = NewJSON(c.reportData, w)
+	case config.OutFormatColoredLineNumber, config.OutFormatLineNumber:
+		p = NewText(c.cfg.PrintIssuedLine,
+			format == config.OutFormatColoredLineNumber, c.cfg.PrintLinterName,
+			c.log.Child(logutils.DebugKeyTextPrinter), w)
+	case config.OutFormatTab, config.OutFormatColoredTab:
+		p = NewTab(c.cfg.PrintLinterName,
+			format == config.OutFormatColoredTab,
+			c.log.Child(logutils.DebugKeyTabPrinter), w)
+	case config.OutFormatCheckstyle:
+		p = NewCheckstyle(w)
+	case config.OutFormatCodeClimate:
+		p = NewCodeClimate(w)
+	case config.OutFormatHTML:
+		p = NewHTML(w)
+	case config.OutFormatJunitXML:
+		p = NewJunitXML(w)
+	case config.OutFormatGithubActions:
+		p = NewGitHubAction(w)
+	case config.OutFormatTeamCity:
+		p = NewTeamCity(w)
+	default:
+		return nil, fmt.Errorf("unknown output format %q", format)
+	}
+
+	return p, nil
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/tab.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/tab.go
index 8ede897402bfa53d33af16dedbb2249225446abd..c6d390d188cfe2fd6394fc0d54688ae5109d95d4 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/printers/tab.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/tab.go
@@ -52,15 +52,15 @@ func (p *Tab) Print(issues []result.Issue) error {
 	return nil
 }
 
-func (p *Tab) printIssue(i *result.Issue, w io.Writer) {
-	text := p.SprintfColored(color.FgRed, "%s", i.Text)
+func (p *Tab) printIssue(issue *result.Issue, w io.Writer) {
+	text := p.SprintfColored(color.FgRed, "%s", issue.Text)
 	if p.printLinterName {
-		text = fmt.Sprintf("%s\t%s", i.FromLinter, text)
+		text = fmt.Sprintf("%s\t%s", issue.FromLinter, text)
 	}
 
-	pos := p.SprintfColored(color.Bold, "%s:%d", i.FilePath(), i.Line())
-	if i.Pos.Column != 0 {
-		pos += fmt.Sprintf(":%d", i.Pos.Column)
+	pos := p.SprintfColored(color.Bold, "%s:%d", issue.FilePath(), issue.Line())
+	if issue.Pos.Column != 0 {
+		pos += fmt.Sprintf(":%d", issue.Pos.Column)
 	}
 
 	fmt.Fprintf(w, "%s\t%s\n", pos, text)
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/teamcity.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/teamcity.go
index d3693e99716bfd2a5021e5b20c58117863fc1b98..1d1c9f7d32e1038bc49225033b0f41e7c062b08d 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/printers/teamcity.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/teamcity.go
@@ -88,8 +88,8 @@ type InspectionType struct {
 }
 
 func (i InspectionType) Print(w io.Writer, escaper *strings.Replacer) (int, error) {
-	return fmt.Fprintf(w, "##teamcity[InspectionType id='%s' name='%s' description='%s' category='%s']\n",
-		limit(i.id, smallLimit), limit(i.name, smallLimit), limit(escaper.Replace(i.description), largeLimit), limit(i.category, smallLimit))
+	return fmt.Fprintf(w, "##teamcity[inspectionType id='%s' name='%s' description='%s' category='%s']\n",
+		cutVal(i.id, smallLimit), cutVal(i.name, smallLimit), cutVal(escaper.Replace(i.description), largeLimit), cutVal(i.category, smallLimit))
 }
 
 // InspectionInstance reports a specific defect, warning, error message.
@@ -105,15 +105,15 @@ type InspectionInstance struct {
 
 func (i InspectionInstance) Print(w io.Writer, replacer *strings.Replacer) (int, error) {
 	return fmt.Fprintf(w, "##teamcity[inspection typeId='%s' message='%s' file='%s' line='%d' SEVERITY='%s']\n",
-		limit(i.typeID, smallLimit),
-		limit(replacer.Replace(i.message), largeLimit),
-		limit(i.file, largeLimit),
+		cutVal(i.typeID, smallLimit),
+		cutVal(replacer.Replace(i.message), largeLimit),
+		cutVal(i.file, largeLimit),
 		i.line, strings.ToUpper(i.severity))
 }
 
-func limit(s string, max int) string {
+func cutVal(s string, limit int) string {
 	var size, count int
-	for i := 0; i < max && count < len(s); i++ {
+	for i := 0; i < limit && count < len(s); i++ {
 		_, size = utf8.DecodeRuneInString(s[count:])
 		count += size
 	}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/text.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/text.go
index 6e29c4b50f1022180c80c4ef0f5b7e335eacccb0..56cced7696956a016a1e93c7b751f29285b23012 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/printers/text.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/text.go
@@ -55,32 +55,32 @@ func (p *Text) Print(issues []result.Issue) error {
 	return nil
 }
 
-func (p *Text) printIssue(i *result.Issue) {
-	text := p.SprintfColored(color.FgRed, "%s", strings.TrimSpace(i.Text))
+func (p *Text) printIssue(issue *result.Issue) {
+	text := p.SprintfColored(color.FgRed, "%s", strings.TrimSpace(issue.Text))
 	if p.printLinterName {
-		text += fmt.Sprintf(" (%s)", i.FromLinter)
+		text += fmt.Sprintf(" (%s)", issue.FromLinter)
 	}
-	pos := p.SprintfColored(color.Bold, "%s:%d", i.FilePath(), i.Line())
-	if i.Pos.Column != 0 {
-		pos += fmt.Sprintf(":%d", i.Pos.Column)
+	pos := p.SprintfColored(color.Bold, "%s:%d", issue.FilePath(), issue.Line())
+	if issue.Pos.Column != 0 {
+		pos += fmt.Sprintf(":%d", issue.Pos.Column)
 	}
 	fmt.Fprintf(p.w, "%s: %s\n", pos, text)
 }
 
-func (p *Text) printSourceCode(i *result.Issue) {
-	for _, line := range i.SourceLines {
+func (p *Text) printSourceCode(issue *result.Issue) {
+	for _, line := range issue.SourceLines {
 		fmt.Fprintln(p.w, line)
 	}
 }
 
-func (p *Text) printUnderLinePointer(i *result.Issue) {
+func (p *Text) printUnderLinePointer(issue *result.Issue) {
 	// if column == 0 it means column is unknown (e.g. for gosec)
-	if len(i.SourceLines) != 1 || i.Pos.Column == 0 {
+	if len(issue.SourceLines) != 1 || issue.Pos.Column == 0 {
 		return
 	}
 
-	col0 := i.Pos.Column - 1
-	line := i.SourceLines[0]
+	col0 := issue.Pos.Column - 1
+	line := issue.SourceLines[0]
 	prefixRunes := make([]rune, 0, len(line))
 	for j := 0; j < len(line) && j < col0; j++ {
 		if line[j] == '\t' {
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/issue.go b/vendor/github.com/golangci/golangci-lint/pkg/result/issue.go
index 1e8cd30524b0e24b207398b03dd396c1a8f2d0c4..32246a6df43a4a9be3f29571372432ed1f1ef898 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/issue.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/issue.go
@@ -1,7 +1,7 @@
 package result
 
 import (
-	"crypto/md5" //nolint:gosec
+	"crypto/md5" //nolint:gosec // for md5 hash
 	"fmt"
 	"go/token"
 
@@ -91,7 +91,7 @@ func (i *Issue) Fingerprint() string {
 		firstLine = i.SourceLines[0]
 	}
 
-	hash := md5.New() //nolint:gosec
+	hash := md5.New() //nolint:gosec // we don't need a strong hash here
 	_, _ = fmt.Fprintf(hash, "%s%s%s", i.Pos.Filename, i.Text, firstLine)
 
 	return fmt.Sprintf("%X", hash.Sum(nil))
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/autogenerated_exclude.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/autogenerated_exclude.go
index c7675fce8f8a26ee5d800cc9ecd80c7d5730192e..2fca5117f2b6268e6aede759b1d22c51894534b8 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/autogenerated_exclude.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/autogenerated_exclude.go
@@ -1,38 +1,48 @@
 package processors
 
 import (
-	"errors"
 	"fmt"
 	"go/parser"
 	"go/token"
 	"path/filepath"
+	"regexp"
 	"strings"
 
 	"github.com/golangci/golangci-lint/pkg/logutils"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-var autogenDebugf = logutils.Debug(logutils.DebugKeyAutogenExclude)
+const (
+	genCodeGenerated = "code generated"
+	genDoNotEdit     = "do not edit"
+	genAutoFile      = "autogenerated file" // easyjson
+)
 
-type ageFileSummary struct {
-	isGenerated bool
-}
+var _ Processor = (*AutogeneratedExclude)(nil)
 
-type ageFileSummaryCache map[string]*ageFileSummary
+type fileSummary struct {
+	generated bool
+}
 
 type AutogeneratedExclude struct {
-	fileSummaryCache ageFileSummaryCache
+	debugf logutils.DebugFunc
+
+	strict        bool
+	strictPattern *regexp.Regexp
+
+	fileSummaryCache map[string]*fileSummary
 }
 
-func NewAutogeneratedExclude() *AutogeneratedExclude {
+func NewAutogeneratedExclude(strict bool) *AutogeneratedExclude {
 	return &AutogeneratedExclude{
-		fileSummaryCache: ageFileSummaryCache{},
+		debugf:           logutils.Debug(logutils.DebugKeyAutogenExclude),
+		strict:           strict,
+		strictPattern:    regexp.MustCompile(`^// Code generated .* DO NOT EDIT\.$`),
+		fileSummaryCache: map[string]*fileSummary{},
 	}
 }
 
-var _ Processor = &AutogeneratedExclude{}
-
-func (p *AutogeneratedExclude) Name() string {
+func (*AutogeneratedExclude) Name() string {
 	return "autogenerated_exclude"
 }
 
@@ -40,82 +50,104 @@ func (p *AutogeneratedExclude) Process(issues []result.Issue) ([]result.Issue, e
 	return filterIssuesErr(issues, p.shouldPassIssue)
 }
 
-func isSpecialAutogeneratedFile(filePath string) bool {
-	fileName := filepath.Base(filePath)
-	// fake files or generation definitions to which //line points to for generated files
-	return filepath.Ext(fileName) != ".go"
-}
+func (*AutogeneratedExclude) Finish() {}
 
-func (p *AutogeneratedExclude) shouldPassIssue(i *result.Issue) (bool, error) {
-	if i.FromLinter == "typecheck" {
+func (p *AutogeneratedExclude) shouldPassIssue(issue *result.Issue) (bool, error) {
+	if issue.FromLinter == typeCheckName {
 		// don't hide typechecking errors in generated files: users expect to see why the project isn't compiling
 		return true, nil
 	}
 
-	if filepath.Base(i.FilePath()) == "go.mod" {
+	if filepath.Base(issue.FilePath()) == "go.mod" {
 		return true, nil
 	}
 
-	if isSpecialAutogeneratedFile(i.FilePath()) {
-		return false, nil
+	// The file is already known.
+	fs := p.fileSummaryCache[issue.FilePath()]
+	if fs != nil {
+		return !fs.generated, nil
 	}
 
-	fs, err := p.getOrCreateFileSummary(i)
-	if err != nil {
-		return false, err
+	fs = &fileSummary{}
+	p.fileSummaryCache[issue.FilePath()] = fs
+
+	if p.strict {
+		var err error
+		fs.generated, err = p.isGeneratedFileStrict(issue.FilePath())
+		if err != nil {
+			return false, fmt.Errorf("failed to get doc (strict) of file %s: %w", issue.FilePath(), err)
+		}
+	} else {
+		doc, err := getComments(issue.FilePath())
+		if err != nil {
+			return false, fmt.Errorf("failed to get doc (lax) of file %s: %w", issue.FilePath(), err)
+		}
+
+		fs.generated = p.isGeneratedFileLax(doc)
 	}
 
+	p.debugf("file %q is generated: %t", issue.FilePath(), fs.generated)
+
 	// don't report issues for autogenerated files
-	return !fs.isGenerated, nil
+	return !fs.generated, nil
 }
 
-// isGenerated reports whether the source file is generated code.
-// Using a bit laxer rules than https://go.dev/s/generatedcode to
-// match more generated code. See #48 and #72.
-func isGeneratedFileByComment(doc string) bool {
-	const (
-		genCodeGenerated = "code generated"
-		genDoNotEdit     = "do not edit"
-		genAutoFile      = "autogenerated file" // easyjson
-	)
-
+// isGeneratedFileLax reports whether the source file is generated code.
+// The function uses a bit laxer rules than isGeneratedFileStrict to match more generated code.
+// See https://github.com/golangci/golangci-lint/issues/48 and https://github.com/golangci/golangci-lint/issues/72.
+func (p *AutogeneratedExclude) isGeneratedFileLax(doc string) bool {
 	markers := []string{genCodeGenerated, genDoNotEdit, genAutoFile}
+
 	doc = strings.ToLower(doc)
+
 	for _, marker := range markers {
 		if strings.Contains(doc, marker) {
-			autogenDebugf("doc contains marker %q: file is generated", marker)
+			p.debugf("doc contains marker %q: file is generated", marker)
+
 			return true
 		}
 	}
 
-	autogenDebugf("doc of len %d doesn't contain any of markers: %s", len(doc), markers)
+	p.debugf("doc of len %d doesn't contain any of markers: %s", len(doc), markers)
+
 	return false
 }
 
-func (p *AutogeneratedExclude) getOrCreateFileSummary(i *result.Issue) (*ageFileSummary, error) {
-	fs := p.fileSummaryCache[i.FilePath()]
-	if fs != nil {
-		return fs, nil
+// isGeneratedFileStrict returns true if the source file has a line that matches the regular expression:
+//
+//	^// Code generated .* DO NOT EDIT\.$
+//
+// This line must appear before the first non-comment, non-blank text in the file.
+// Based on https://go.dev/s/generatedcode.
+func (p *AutogeneratedExclude) isGeneratedFileStrict(filePath string) (bool, error) {
+	file, err := parser.ParseFile(token.NewFileSet(), filePath, nil, parser.PackageClauseOnly|parser.ParseComments)
+	if err != nil {
+		return false, fmt.Errorf("failed to parse file: %w", err)
 	}
 
-	fs = &ageFileSummary{}
-	p.fileSummaryCache[i.FilePath()] = fs
-
-	if i.FilePath() == "" {
-		return nil, errors.New("no file path for issue")
+	if file == nil || len(file.Comments) == 0 {
+		return false, nil
 	}
 
-	doc, err := getDoc(i.FilePath())
-	if err != nil {
-		return nil, fmt.Errorf("failed to get doc of file %s: %w", i.FilePath(), err)
+	for _, comment := range file.Comments {
+		if comment.Pos() > file.Package {
+			return false, nil
+		}
+
+		for _, line := range comment.List {
+			generated := p.strictPattern.MatchString(line.Text)
+			if generated {
+				p.debugf("doc contains ignore expression: file is generated")
+
+				return true, nil
+			}
+		}
 	}
 
-	fs.isGenerated = isGeneratedFileByComment(doc)
-	autogenDebugf("file %q is generated: %t", i.FilePath(), fs.isGenerated)
-	return fs, nil
+	return false, nil
 }
 
-func getDoc(filePath string) (string, error) {
+func getComments(filePath string) (string, error) {
 	fset := token.NewFileSet()
 	syntax, err := parser.ParseFile(fset, filePath, nil, parser.PackageClauseOnly|parser.ParseComments)
 	if err != nil {
@@ -129,5 +161,3 @@ func getDoc(filePath string) (string, error) {
 
 	return strings.Join(docLines, "\n"), nil
 }
-
-func (p *AutogeneratedExclude) Finish() {}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/base_rule.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/base_rule.go
index b5e138806b172500512ff02b56fc57635e4c02d7..d7a4f0ec4bc74b6bbc8d16f75a74d2a3dc071cbf 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/base_rule.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/base_rule.go
@@ -8,13 +8,7 @@ import (
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-type BaseRule struct {
-	Text       string
-	Source     string
-	Path       string
-	PathExcept string
-	Linters    []string
-}
+const caseInsensitivePrefix = "(?i)"
 
 type baseRule struct {
 	text       *regexp.Regexp
@@ -63,7 +57,7 @@ func (r *baseRule) matchLinter(issue *result.Issue) bool {
 	return false
 }
 
-func (r *baseRule) matchSource(issue *result.Issue, lineCache *fsutils.LineCache, log logutils.Log) bool { //nolint:interfacer
+func (r *baseRule) matchSource(issue *result.Issue, lineCache *fsutils.LineCache, log logutils.Log) bool {
 	sourceLine, errSourceLine := lineCache.GetLine(issue.FilePath(), issue.Line())
 	if errSourceLine != nil {
 		log.Warnf("Failed to get line %s:%d from line cache: %s", issue.FilePath(), issue.Line(), errSourceLine)
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/cgo.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/cgo.go
index 8e77237518aedc78fe87622a1f9581b7fa39f5b0..0e659f0f3e59d8f50b7258db9c4d196cf8aa674f 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/cgo.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/cgo.go
@@ -9,49 +9,51 @@ import (
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
+var _ Processor = (*Cgo)(nil)
+
 type Cgo struct {
 	goCacheDir string
 }
 
-var _ Processor = Cgo{}
-
 func NewCgo(goenv *goutil.Env) *Cgo {
 	return &Cgo{
 		goCacheDir: goenv.Get(goutil.EnvGoCache),
 	}
 }
 
-func (p Cgo) Name() string {
+func (Cgo) Name() string {
 	return "cgo"
 }
 
 func (p Cgo) Process(issues []result.Issue) ([]result.Issue, error) {
-	return filterIssuesErr(issues, func(i *result.Issue) (bool, error) {
-		// some linters (.e.g gosec, deadcode) return incorrect filepaths for cgo issues,
-		// also cgo files have strange issues looking like false positives.
-
-		// cache dir contains all preprocessed files including cgo files
-
-		issueFilePath := i.FilePath()
-		if !filepath.IsAbs(i.FilePath()) {
-			absPath, err := filepath.Abs(i.FilePath())
-			if err != nil {
-				return false, fmt.Errorf("failed to build abs path for %q: %w", i.FilePath(), err)
-			}
-			issueFilePath = absPath
-		}
+	return filterIssuesErr(issues, p.shouldPassIssue)
+}
 
-		if p.goCacheDir != "" && strings.HasPrefix(issueFilePath, p.goCacheDir) {
-			return false, nil
-		}
+func (Cgo) Finish() {}
+
+func (p Cgo) shouldPassIssue(issue *result.Issue) (bool, error) {
+	// some linters (e.g. gosec, deadcode) return incorrect filepaths for cgo issues,
+	// also cgo files have strange issues looking like false positives.
+
+	// cache dir contains all preprocessed files including cgo files
 
-		if filepath.Base(i.FilePath()) == "_cgo_gotypes.go" {
-			// skip cgo warning for go1.10
-			return false, nil
+	issueFilePath := issue.FilePath()
+	if !filepath.IsAbs(issue.FilePath()) {
+		absPath, err := filepath.Abs(issue.FilePath())
+		if err != nil {
+			return false, fmt.Errorf("failed to build abs path for %q: %w", issue.FilePath(), err)
 		}
+		issueFilePath = absPath
+	}
 
-		return true, nil
-	})
-}
+	if p.goCacheDir != "" && strings.HasPrefix(issueFilePath, p.goCacheDir) {
+		return false, nil
+	}
 
-func (Cgo) Finish() {}
+	if filepath.Base(issue.FilePath()) == "_cgo_gotypes.go" {
+		// skip cgo warning for go1.10
+		return false, nil
+	}
+
+	return true, nil
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/diff.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/diff.go
index 67104bab0a765ce992a4e8a799437d376e726bda..c602cdc65a1e09958bcdf9b3f8da6bc7f38ad9dd 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/diff.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/diff.go
@@ -9,11 +9,14 @@ import (
 
 	"github.com/golangci/revgrep"
 
+	"github.com/golangci/golangci-lint/pkg/config"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
 const envGolangciDiffProcessorPatch = "GOLANGCI_DIFF_PROCESSOR_PATCH"
 
+var _ Processor = (*Diff)(nil)
+
 type Diff struct {
 	onlyNew       bool
 	fromRev       string
@@ -22,19 +25,17 @@ type Diff struct {
 	patch         string
 }
 
-var _ Processor = Diff{}
-
-func NewDiff(onlyNew bool, fromRev, patchFilePath string, wholeFiles bool) *Diff {
+func NewDiff(cfg *config.Issues) *Diff {
 	return &Diff{
-		onlyNew:       onlyNew,
-		fromRev:       fromRev,
-		patchFilePath: patchFilePath,
-		wholeFiles:    wholeFiles,
+		onlyNew:       cfg.Diff,
+		fromRev:       cfg.DiffFromRevision,
+		patchFilePath: cfg.DiffPatchFilePath,
+		wholeFiles:    cfg.WholeFiles,
 		patch:         os.Getenv(envGolangciDiffProcessorPatch),
 	}
 }
 
-func (p Diff) Name() string {
+func (Diff) Name() string {
 	return "diff"
 }
 
@@ -47,7 +48,7 @@ func (p Diff) Process(issues []result.Issue) ([]result.Issue, error) {
 	if p.patchFilePath != "" {
 		patch, err := os.ReadFile(p.patchFilePath)
 		if err != nil {
-			return nil, fmt.Errorf("can't read from patch file %s: %s", p.patchFilePath, err)
+			return nil, fmt.Errorf("can't read from patch file %s: %w", p.patchFilePath, err)
 		}
 		patchReader = bytes.NewReader(patch)
 	} else if p.patch != "" {
@@ -60,18 +61,23 @@ func (p Diff) Process(issues []result.Issue) ([]result.Issue, error) {
 		WholeFiles:   p.wholeFiles,
 	}
 	if err := c.Prepare(); err != nil {
-		return nil, fmt.Errorf("can't prepare diff by revgrep: %s", err)
+		return nil, fmt.Errorf("can't prepare diff by revgrep: %w", err)
 	}
 
-	return transformIssues(issues, func(i *result.Issue) *result.Issue {
-		hunkPos, isNew := c.IsNewIssue(i)
+	return transformIssues(issues, func(issue *result.Issue) *result.Issue {
+		if issue.FromLinter == typeCheckName {
+			// Never hide typechecking errors.
+			return issue
+		}
+
+		hunkPos, isNew := c.IsNewIssue(issue)
 		if !isNew {
 			return nil
 		}
 
-		newI := *i
-		newI.HunkPos = hunkPos
-		return &newI
+		newIssue := *issue
+		newIssue.HunkPos = hunkPos
+		return &newIssue
 	}), nil
 }
 
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude.go
index 92959a328ca2798681b45177e11b1e983b330476..5431204502a71277d3d00f772728eacca4d86148 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude.go
@@ -1,29 +1,45 @@
 package processors
 
 import (
+	"fmt"
 	"regexp"
+	"strings"
 
+	"github.com/golangci/golangci-lint/pkg/config"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
+var _ Processor = (*Exclude)(nil)
+
 type Exclude struct {
+	name string
+
 	pattern *regexp.Regexp
 }
 
-var _ Processor = Exclude{}
+func NewExclude(cfg *config.Issues) *Exclude {
+	p := &Exclude{name: "exclude"}
 
-func NewExclude(pattern string) *Exclude {
-	var patternRe *regexp.Regexp
-	if pattern != "" {
-		patternRe = regexp.MustCompile("(?i)" + pattern)
+	var pattern string
+	if len(cfg.ExcludePatterns) != 0 {
+		pattern = fmt.Sprintf("(%s)", strings.Join(cfg.ExcludePatterns, "|"))
 	}
-	return &Exclude{
-		pattern: patternRe,
+
+	prefix := caseInsensitivePrefix
+	if cfg.ExcludeCaseSensitive {
+		p.name = "exclude-case-sensitive"
+		prefix = ""
 	}
+
+	if pattern != "" {
+		p.pattern = regexp.MustCompile(prefix + pattern)
+	}
+
+	return p
 }
 
 func (p Exclude) Name() string {
-	return "exclude"
+	return p.name
 }
 
 func (p Exclude) Process(issues []result.Issue) ([]result.Issue, error) {
@@ -31,29 +47,9 @@ func (p Exclude) Process(issues []result.Issue) ([]result.Issue, error) {
 		return issues, nil
 	}
 
-	return filterIssues(issues, func(i *result.Issue) bool {
-		return !p.pattern.MatchString(i.Text)
+	return filterIssues(issues, func(issue *result.Issue) bool {
+		return !p.pattern.MatchString(issue.Text)
 	}), nil
 }
 
-func (p Exclude) Finish() {}
-
-type ExcludeCaseSensitive struct {
-	*Exclude
-}
-
-var _ Processor = ExcludeCaseSensitive{}
-
-func NewExcludeCaseSensitive(pattern string) *ExcludeCaseSensitive {
-	var patternRe *regexp.Regexp
-	if pattern != "" {
-		patternRe = regexp.MustCompile(pattern)
-	}
-	return &ExcludeCaseSensitive{
-		&Exclude{pattern: patternRe},
-	}
-}
-
-func (p ExcludeCaseSensitive) Name() string {
-	return "exclude-case-sensitive"
-}
+func (Exclude) Finish() {}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude_rules.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude_rules.go
index 2f7e30b430f2261f28af14ae9ebc99537b708a91..b468c510137d269b8a28e17c024ed757c6e4e24c 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude_rules.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude_rules.go
@@ -3,93 +3,104 @@ package processors
 import (
 	"regexp"
 
+	"github.com/golangci/golangci-lint/pkg/config"
 	"github.com/golangci/golangci-lint/pkg/fsutils"
 	"github.com/golangci/golangci-lint/pkg/logutils"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
+var _ Processor = (*ExcludeRules)(nil)
+
 type excludeRule struct {
 	baseRule
 }
 
-type ExcludeRule struct {
-	BaseRule
-}
-
 type ExcludeRules struct {
-	rules []excludeRule
-	files *fsutils.Files
+	name string
+
 	log   logutils.Log
+	files *fsutils.Files
+
+	rules []excludeRule
 }
 
-func NewExcludeRules(rules []ExcludeRule, files *fsutils.Files, log logutils.Log) *ExcludeRules {
-	r := &ExcludeRules{
+func NewExcludeRules(log logutils.Log, files *fsutils.Files, cfg *config.Issues) *ExcludeRules {
+	p := &ExcludeRules{
+		name:  "exclude-rules",
 		files: files,
 		log:   log,
 	}
-	r.rules = createRules(rules, "(?i)")
 
-	return r
-}
+	prefix := caseInsensitivePrefix
+	if cfg.ExcludeCaseSensitive {
+		prefix = ""
+		p.name = "exclude-rules-case-sensitive"
+	}
 
-func createRules(rules []ExcludeRule, prefix string) []excludeRule {
-	parsedRules := make([]excludeRule, 0, len(rules))
-	for _, rule := range rules {
-		parsedRule := excludeRule{}
-		parsedRule.linters = rule.Linters
-		if rule.Text != "" {
-			parsedRule.text = regexp.MustCompile(prefix + rule.Text)
-		}
-		if rule.Source != "" {
-			parsedRule.source = regexp.MustCompile(prefix + rule.Source)
-		}
-		if rule.Path != "" {
-			path := fsutils.NormalizePathInRegex(rule.Path)
-			parsedRule.path = regexp.MustCompile(path)
-		}
-		if rule.PathExcept != "" {
-			pathExcept := fsutils.NormalizePathInRegex(rule.PathExcept)
-			parsedRule.pathExcept = regexp.MustCompile(pathExcept)
+	excludeRules := cfg.ExcludeRules
+
+	if cfg.UseDefaultExcludes {
+		for _, r := range config.GetExcludePatterns(cfg.IncludeDefaultExcludes) {
+			excludeRules = append(excludeRules, config.ExcludeRule{
+				BaseRule: config.BaseRule{
+					Text:    r.Pattern,
+					Linters: []string{r.Linter},
+				},
+			})
 		}
-		parsedRules = append(parsedRules, parsedRule)
 	}
-	return parsedRules
+
+	p.rules = createRules(excludeRules, prefix)
+
+	return p
 }
 
+func (p ExcludeRules) Name() string { return p.name }
+
 func (p ExcludeRules) Process(issues []result.Issue) ([]result.Issue, error) {
 	if len(p.rules) == 0 {
 		return issues, nil
 	}
-	return filterIssues(issues, func(i *result.Issue) bool {
+
+	return filterIssues(issues, func(issue *result.Issue) bool {
 		for _, rule := range p.rules {
 			rule := rule
-			if rule.match(i, p.files, p.log) {
+			if rule.match(issue, p.files, p.log) {
 				return false
 			}
 		}
+
 		return true
 	}), nil
 }
 
-func (ExcludeRules) Name() string { return "exclude-rules" }
-func (ExcludeRules) Finish()      {}
+func (ExcludeRules) Finish() {}
 
-var _ Processor = ExcludeRules{}
+func createRules(rules []config.ExcludeRule, prefix string) []excludeRule {
+	parsedRules := make([]excludeRule, 0, len(rules))
 
-type ExcludeRulesCaseSensitive struct {
-	*ExcludeRules
-}
+	for _, rule := range rules {
+		parsedRule := excludeRule{}
+		parsedRule.linters = rule.Linters
 
-func NewExcludeRulesCaseSensitive(rules []ExcludeRule, files *fsutils.Files, log logutils.Log) *ExcludeRulesCaseSensitive {
-	r := &ExcludeRules{
-		files: files,
-		log:   log,
-	}
-	r.rules = createRules(rules, "")
+		if rule.Text != "" {
+			parsedRule.text = regexp.MustCompile(prefix + rule.Text)
+		}
 
-	return &ExcludeRulesCaseSensitive{r}
-}
+		if rule.Source != "" {
+			parsedRule.source = regexp.MustCompile(prefix + rule.Source)
+		}
+
+		if rule.Path != "" {
+			parsedRule.path = regexp.MustCompile(fsutils.NormalizePathInRegex(rule.Path))
+		}
 
-func (ExcludeRulesCaseSensitive) Name() string { return "exclude-rules-case-sensitive" }
+		if rule.PathExcept != "" {
+			parsedRule.pathExcept = regexp.MustCompile(fsutils.NormalizePathInRegex(rule.PathExcept))
+		}
 
-var _ Processor = ExcludeCaseSensitive{}
+		parsedRules = append(parsedRules, parsedRule)
+	}
+
+	return parsedRules
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/filename_unadjuster.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/filename_unadjuster.go
index 2aaafbf58b37ce4bf3a84ee943f90f4c22d99f5a..6a1387c872e3fc8d0b9714d9d955e93d34649ad7 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/filename_unadjuster.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/filename_unadjuster.go
@@ -14,6 +14,8 @@ import (
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
+var _ Processor = (*FilenameUnadjuster)(nil)
+
 type posMapper func(pos token.Position) token.Position
 
 type adjustMap struct {
@@ -30,7 +32,61 @@ type FilenameUnadjuster struct {
 	loggedUnadjustments map[string]bool
 }
 
-var _ Processor = &FilenameUnadjuster{}
+func NewFilenameUnadjuster(pkgs []*packages.Package, log logutils.Log) *FilenameUnadjuster {
+	m := adjustMap{m: map[string]posMapper{}}
+
+	startedAt := time.Now()
+	var wg sync.WaitGroup
+	wg.Add(len(pkgs))
+	for _, pkg := range pkgs {
+		go func(pkg *packages.Package) {
+			// It's important to call func here to run GC
+			processUnadjusterPkg(&m, pkg, log)
+			wg.Done()
+		}(pkg)
+	}
+	wg.Wait()
+	log.Infof("Pre-built %d adjustments in %s", len(m.m), time.Since(startedAt))
+
+	return &FilenameUnadjuster{
+		m:                   m.m,
+		log:                 log,
+		loggedUnadjustments: map[string]bool{},
+	}
+}
+
+func (*FilenameUnadjuster) Name() string {
+	return "filename_unadjuster"
+}
+
+func (p *FilenameUnadjuster) Process(issues []result.Issue) ([]result.Issue, error) {
+	return transformIssues(issues, func(issue *result.Issue) *result.Issue {
+		issueFilePath := issue.FilePath()
+		if !filepath.IsAbs(issue.FilePath()) {
+			absPath, err := filepath.Abs(issue.FilePath())
+			if err != nil {
+				p.log.Warnf("failed to build abs path for %q: %s", issue.FilePath(), err)
+				return issue
+			}
+			issueFilePath = absPath
+		}
+
+		mapper := p.m[issueFilePath]
+		if mapper == nil {
+			return issue
+		}
+
+		newIssue := *issue
+		newIssue.Pos = mapper(issue.Pos)
+		if !p.loggedUnadjustments[issue.Pos.Filename] {
+			p.log.Infof("Unadjusted from %v to %v", issue.Pos, newIssue.Pos)
+			p.loggedUnadjustments[issue.Pos.Filename] = true
+		}
+		return &newIssue
+	}), nil
+}
+
+func (*FilenameUnadjuster) Finish() {}
 
 func processUnadjusterPkg(m *adjustMap, pkg *packages.Package, log logutils.Log) {
 	fset := token.NewFileSet() // it's more memory efficient to not store all in one fset
@@ -64,68 +120,14 @@ func processUnadjusterFile(filename string, m *adjustMap, log logutils.Log, fset
 
 	m.Lock()
 	defer m.Unlock()
+
 	m.m[adjustedFilename] = func(adjustedPos token.Position) token.Position {
 		tokenFile := fset.File(syntax.Pos())
 		if tokenFile == nil {
 			log.Warnf("Failed to get token file for %s", adjustedFilename)
 			return adjustedPos
 		}
-		return fset.PositionFor(tokenFile.Pos(adjustedPos.Offset), false)
-	}
-}
-
-func NewFilenameUnadjuster(pkgs []*packages.Package, log logutils.Log) *FilenameUnadjuster {
-	m := adjustMap{m: map[string]posMapper{}}
 
-	startedAt := time.Now()
-	var wg sync.WaitGroup
-	wg.Add(len(pkgs))
-	for _, pkg := range pkgs {
-		go func(pkg *packages.Package) {
-			// It's important to call func here to run GC
-			processUnadjusterPkg(&m, pkg, log)
-			wg.Done()
-		}(pkg)
-	}
-	wg.Wait()
-	log.Infof("Pre-built %d adjustments in %s", len(m.m), time.Since(startedAt))
-
-	return &FilenameUnadjuster{
-		m:                   m.m,
-		log:                 log,
-		loggedUnadjustments: map[string]bool{},
+		return fset.PositionFor(tokenFile.Pos(adjustedPos.Offset), false)
 	}
 }
-
-func (p *FilenameUnadjuster) Name() string {
-	return "filename_unadjuster"
-}
-
-func (p *FilenameUnadjuster) Process(issues []result.Issue) ([]result.Issue, error) {
-	return transformIssues(issues, func(i *result.Issue) *result.Issue {
-		issueFilePath := i.FilePath()
-		if !filepath.IsAbs(i.FilePath()) {
-			absPath, err := filepath.Abs(i.FilePath())
-			if err != nil {
-				p.log.Warnf("failed to build abs path for %q: %s", i.FilePath(), err)
-				return i
-			}
-			issueFilePath = absPath
-		}
-
-		mapper := p.m[issueFilePath]
-		if mapper == nil {
-			return i
-		}
-
-		newI := *i
-		newI.Pos = mapper(i.Pos)
-		if !p.loggedUnadjustments[i.Pos.Filename] {
-			p.log.Infof("Unadjusted from %v to %v", i.Pos, newI.Pos)
-			p.loggedUnadjustments[i.Pos.Filename] = true
-		}
-		return &newI
-	}), nil
-}
-
-func (p *FilenameUnadjuster) Finish() {}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/fixer.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/fixer.go
index a79a846288eee98a089db3623644a17c1340f36f..4915dc479a45ef5eae2535bd3d8eff3f2be334e9 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/fixer.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/fixer.go
@@ -16,7 +16,7 @@ import (
 	"github.com/golangci/golangci-lint/pkg/timeutils"
 )
 
-var _ Processor = Fixer{}
+var _ Processor = (*Fixer)(nil)
 
 type Fixer struct {
 	cfg       *config.Config
@@ -34,12 +34,12 @@ func NewFixer(cfg *config.Config, log logutils.Log, fileCache *fsutils.FileCache
 	}
 }
 
-func (f Fixer) printStat() {
-	f.sw.PrintStages()
+func (Fixer) Name() string {
+	return "fixer"
 }
 
-func (f Fixer) Process(issues []result.Issue) ([]result.Issue, error) {
-	if !f.cfg.Issues.NeedFix {
+func (p Fixer) Process(issues []result.Issue) ([]result.Issue, error) {
+	if !p.cfg.Issues.NeedFix {
 		return issues, nil
 	}
 
@@ -57,37 +57,36 @@ func (f Fixer) Process(issues []result.Issue) ([]result.Issue, error) {
 
 	for file, issuesToFix := range issuesToFixPerFile {
 		var err error
-		f.sw.TrackStage("all", func() {
-			err = f.fixIssuesInFile(file, issuesToFix)
+		p.sw.TrackStage("all", func() {
+			err = p.fixIssuesInFile(file, issuesToFix)
 		})
 		if err != nil {
-			f.log.Errorf("Failed to fix issues in file %s: %s", file, err)
+			p.log.Errorf("Failed to fix issues in file %s: %s", file, err)
 
 			// show issues only if can't fix them
 			outIssues = append(outIssues, issuesToFix...)
 		}
 	}
 
-	f.printStat()
-	return outIssues, nil
-}
+	p.printStat()
 
-func (f Fixer) Name() string {
-	return "fixer"
+	return outIssues, nil
 }
 
-func (f Fixer) Finish() {}
+func (Fixer) Finish() {}
 
-func (f Fixer) fixIssuesInFile(filePath string, issues []result.Issue) error {
+func (p Fixer) fixIssuesInFile(filePath string, issues []result.Issue) error {
 	// TODO: don't read the whole file into memory: read line by line;
 	// can't just use bufio.scanner: it has a line length limit
-	origFileData, err := f.fileCache.GetFileBytes(filePath)
+	origFileData, err := p.fileCache.GetFileBytes(filePath)
 	if err != nil {
 		return fmt.Errorf("failed to get file bytes for %s: %w", filePath, err)
 	}
+
 	origFileLines := bytes.Split(origFileData, []byte("\n"))
 
 	tmpFileName := filepath.Join(filepath.Dir(filePath), fmt.Sprintf(".%s.golangci_fix", filepath.Base(filePath)))
+
 	tmpOutFile, err := os.Create(tmpFileName)
 	if err != nil {
 		return fmt.Errorf("failed to make file %s: %w", tmpFileName, err)
@@ -102,20 +101,21 @@ func (f Fixer) fixIssuesInFile(filePath string, issues []result.Issue) error {
 
 	issues = issues[:0] // reuse the same memory
 	for line, lineIssues := range issuesPerLine {
-		if mergedIssue := f.mergeLineIssues(line, lineIssues, origFileLines); mergedIssue != nil {
+		if mergedIssue := p.mergeLineIssues(line, lineIssues, origFileLines); mergedIssue != nil {
 			issues = append(issues, *mergedIssue)
 		}
 	}
 
-	issues = f.findNotIntersectingIssues(issues)
+	issues = p.findNotIntersectingIssues(issues)
 
-	if err = f.writeFixedFile(origFileLines, issues, tmpOutFile); err != nil {
+	if err = p.writeFixedFile(origFileLines, issues, tmpOutFile); err != nil {
 		tmpOutFile.Close()
 		_ = robustio.RemoveAll(tmpOutFile.Name())
 		return err
 	}
 
 	tmpOutFile.Close()
+
 	if err = robustio.Rename(tmpOutFile.Name(), filePath); err != nil {
 		_ = robustio.RemoveAll(tmpOutFile.Name())
 		return fmt.Errorf("failed to rename %s -> %s: %w", tmpOutFile.Name(), filePath, err)
@@ -124,7 +124,7 @@ func (f Fixer) fixIssuesInFile(filePath string, issues []result.Issue) error {
 	return nil
 }
 
-func (f Fixer) mergeLineIssues(lineNum int, lineIssues []result.Issue, origFileLines [][]byte) *result.Issue {
+func (p Fixer) mergeLineIssues(lineNum int, lineIssues []result.Issue, origFileLines [][]byte) *result.Issue {
 	origLine := origFileLines[lineNum-1] // lineNum is 1-based
 
 	if len(lineIssues) == 1 && lineIssues[0].Replacement.Inline == nil {
@@ -133,28 +133,30 @@ func (f Fixer) mergeLineIssues(lineNum int, lineIssues []result.Issue, origFileL
 
 	// check issues first
 	for ind := range lineIssues {
-		i := &lineIssues[ind]
-		if i.LineRange != nil {
-			f.log.Infof("Line %d has multiple issues but at least one of them is ranged: %#v", lineNum, lineIssues)
+		li := &lineIssues[ind]
+
+		if li.LineRange != nil {
+			p.log.Infof("Line %d has multiple issues but at least one of them is ranged: %#v", lineNum, lineIssues)
 			return &lineIssues[0]
 		}
 
-		r := i.Replacement
-		if r.Inline == nil || len(r.NewLines) != 0 || r.NeedOnlyDelete {
-			f.log.Infof("Line %d has multiple issues but at least one of them isn't inline: %#v", lineNum, lineIssues)
-			return &lineIssues[0]
+		inline := li.Replacement.Inline
+
+		if inline == nil || len(li.Replacement.NewLines) != 0 || li.Replacement.NeedOnlyDelete {
+			p.log.Infof("Line %d has multiple issues but at least one of them isn't inline: %#v", lineNum, lineIssues)
+			return li
 		}
 
-		if r.Inline.StartCol < 0 || r.Inline.Length <= 0 || r.Inline.StartCol+r.Inline.Length > len(origLine) {
-			f.log.Warnf("Line %d (%q) has invalid inline fix: %#v, %#v", lineNum, origLine, i, r.Inline)
+		if inline.StartCol < 0 || inline.Length <= 0 || inline.StartCol+inline.Length > len(origLine) {
+			p.log.Warnf("Line %d (%q) has invalid inline fix: %#v, %#v", lineNum, origLine, li, inline)
 			return nil
 		}
 	}
 
-	return f.applyInlineFixes(lineIssues, origLine, lineNum)
+	return p.applyInlineFixes(lineIssues, origLine, lineNum)
 }
 
-func (f Fixer) applyInlineFixes(lineIssues []result.Issue, origLine []byte, lineNum int) *result.Issue {
+func (p Fixer) applyInlineFixes(lineIssues []result.Issue, origLine []byte, lineNum int) *result.Issue {
 	sort.Slice(lineIssues, func(i, j int) bool {
 		return lineIssues[i].Replacement.Inline.StartCol < lineIssues[j].Replacement.Inline.StartCol
 	})
@@ -162,14 +164,14 @@ func (f Fixer) applyInlineFixes(lineIssues []result.Issue, origLine []byte, line
 	var newLineBuf bytes.Buffer
 	newLineBuf.Grow(len(origLine))
 
-	//nolint:misspell
+	//nolint:misspell // misspelling is intentional
 	// example: origLine="it's becouse of them", StartCol=5, Length=7, NewString="because"
 
 	curOrigLinePos := 0
 	for i := range lineIssues {
 		fix := lineIssues[i].Replacement.Inline
 		if fix.StartCol < curOrigLinePos {
-			f.log.Warnf("Line %d has multiple intersecting issues: %#v", lineNum, lineIssues)
+			p.log.Warnf("Line %d has multiple intersecting issues: %#v", lineNum, lineIssues)
 			return nil
 		}
 
@@ -190,7 +192,7 @@ func (f Fixer) applyInlineFixes(lineIssues []result.Issue, origLine []byte, line
 	return &mergedIssue
 }
 
-func (f Fixer) findNotIntersectingIssues(issues []result.Issue) []result.Issue {
+func (p Fixer) findNotIntersectingIssues(issues []result.Issue) []result.Issue {
 	sort.SliceStable(issues, func(i, j int) bool {
 		a, b := issues[i], issues[j]
 		return a.Line() < b.Line()
@@ -202,10 +204,10 @@ func (f Fixer) findNotIntersectingIssues(issues []result.Issue) []result.Issue {
 		issue := &issues[i]
 		rng := issue.GetLineRange()
 		if rng.From <= currentEnd {
-			f.log.Infof("Skip issue %#v: intersects with end %d", issue, currentEnd)
+			p.log.Infof("Skip issue %#v: intersects with end %d", issue, currentEnd)
 			continue // skip intersecting issue
 		}
-		f.log.Infof("Fix issue %#v with range %v", issue, issue.GetLineRange())
+		p.log.Infof("Fix issue %#v with range %v", issue, issue.GetLineRange())
 		ret = append(ret, *issue)
 		currentEnd = rng.To
 	}
@@ -213,7 +215,7 @@ func (f Fixer) findNotIntersectingIssues(issues []result.Issue) []result.Issue {
 	return ret
 }
 
-func (f Fixer) writeFixedFile(origFileLines [][]byte, issues []result.Issue, tmpOutFile *os.File) error {
+func (p Fixer) writeFixedFile(origFileLines [][]byte, issues []result.Issue, tmpOutFile *os.File) error {
 	// issues aren't intersecting
 
 	nextIssueIndex := 0
@@ -232,7 +234,7 @@ func (f Fixer) writeFixedFile(origFileLines [][]byte, issues []result.Issue, tmp
 			rng := nextIssue.GetLineRange()
 			if rng.From > rng.To {
 				// Maybe better decision is to skip such issues, re-evaluate if regressed.
-				f.log.Warnf("[fixer]: issue line range is probably invalid, fix can be incorrect (from=%d, to=%d, linter=%s)",
+				p.log.Warnf("[fixer]: issue line range is probably invalid, fix can be incorrect (from=%d, to=%d, linter=%s)",
 					rng.From, rng.To, nextIssue.FromLinter,
 				)
 			}
@@ -253,3 +255,7 @@ func (f Fixer) writeFixedFile(origFileLines [][]byte, issues []result.Issue, tmp
 
 	return nil
 }
+
+func (p Fixer) printStat() {
+	p.sw.PrintStages()
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/identifier_marker.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/identifier_marker.go
index 5cc4e56ba22954ec6ea794e0eec49e06b1946782..876fd3bd3e30838940361682d813c3cfbe047a69 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/identifier_marker.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/identifier_marker.go
@@ -6,6 +6,8 @@ import (
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
+var _ Processor = (*IdentifierMarker)(nil)
+
 type replacePattern struct {
 	re   string
 	repl string
@@ -31,27 +33,39 @@ var replacePatterns = []replacePattern{
 	{`^composites: (\S+) composite literal uses unkeyed fields$`, "composites: `${1}` composite literal uses unkeyed fields"},
 
 	// gosec
-	{`^(\S+): Blacklisted import (\S+): weak cryptographic primitive$`,
-		"${1}: Blacklisted import `${2}`: weak cryptographic primitive"},
+	{
+		`^(\S+): Blacklisted import (\S+): weak cryptographic primitive$`,
+		"${1}: Blacklisted import `${2}`: weak cryptographic primitive",
+	},
 	{`^TLS InsecureSkipVerify set true.$`, "TLS `InsecureSkipVerify` set true."},
 
 	// gosimple
 	{`should replace loop with (.*)$`, "should replace loop with `${1}`"},
-	{`should use a simple channel send/receive instead of select with a single case`,
-		"should use a simple channel send/receive instead of `select` with a single case"},
-	{`should omit comparison to bool constant, can be simplified to (.+)$`,
-		"should omit comparison to bool constant, can be simplified to `${1}`"},
+	{
+		`should use a simple channel send/receive instead of select with a single case`,
+		"should use a simple channel send/receive instead of `select` with a single case",
+	},
+	{
+		`should omit comparison to bool constant, can be simplified to (.+)$`,
+		"should omit comparison to bool constant, can be simplified to `${1}`",
+	},
 	{`should write (.+) instead of (.+)$`, "should write `${1}` instead of `${2}`"},
 	{`redundant return statement$`, "redundant `return` statement"},
-	{`should replace this if statement with an unconditional strings.TrimPrefix`,
-		"should replace this `if` statement with an unconditional `strings.TrimPrefix`"},
+	{
+		`should replace this if statement with an unconditional strings.TrimPrefix`,
+		"should replace this `if` statement with an unconditional `strings.TrimPrefix`",
+	},
 
 	// staticcheck
 	{`this value of (\S+) is never used$`, "this value of `${1}` is never used"},
-	{`should use time.Since instead of time.Now\(\).Sub$`,
-		"should use `time.Since` instead of `time.Now().Sub`"},
-	{`should check returned error before deferring response.Close\(\)$`,
-		"should check returned error before deferring `response.Close()`"},
+	{
+		`should use time.Since instead of time.Now\(\).Sub$`,
+		"should use `time.Since` instead of `time.Now().Sub`",
+	},
+	{
+		`should check returned error before deferring response.Close\(\)$`,
+		"should check returned error before deferring `response.Close()`",
+	},
 	{`no value of type uint is less than 0$`, "no value of type `uint` is less than `0`"},
 
 	// unused
@@ -59,26 +73,40 @@ var replacePatterns = []replacePattern{
 
 	// typecheck
 	{`^unknown field (\S+) in struct literal$`, "unknown field `${1}` in struct literal"},
-	{`^invalid operation: (\S+) \(variable of type (\S+)\) has no field or method (\S+)$`,
-		"invalid operation: `${1}` (variable of type `${2}`) has no field or method `${3}`"},
+	{
+		`^invalid operation: (\S+) \(variable of type (\S+)\) has no field or method (\S+)$`,
+		"invalid operation: `${1}` (variable of type `${2}`) has no field or method `${3}`",
+	},
 	{`^undeclared name: (\S+)$`, "undeclared name: `${1}`"},
-	{`^cannot use addr \(variable of type (\S+)\) as (\S+) value in argument to (\S+)$`,
-		"cannot use addr (variable of type `${1}`) as `${2}` value in argument to `${3}`"},
+	{
+		`^cannot use addr \(variable of type (\S+)\) as (\S+) value in argument to (\S+)$`,
+		"cannot use addr (variable of type `${1}`) as `${2}` value in argument to `${3}`",
+	},
 	{`^other declaration of (\S+)$`, "other declaration of `${1}`"},
 	{`^(\S+) redeclared in this block$`, "`${1}` redeclared in this block"},
 
 	// golint
-	{`^exported (type|method|function|var|const) (\S+) should have comment or be unexported$`,
-		"exported ${1} `${2}` should have comment or be unexported"},
-	{`^comment on exported (type|method|function|var|const) (\S+) should be of the form "(\S+) ..."$`,
-		"comment on exported ${1} `${2}` should be of the form `${3} ...`"},
+	{
+		`^exported (type|method|function|var|const) (\S+) should have comment or be unexported$`,
+		"exported ${1} `${2}` should have comment or be unexported",
+	},
+	{
+		`^comment on exported (type|method|function|var|const) (\S+) should be of the form "(\S+) ..."$`,
+		"comment on exported ${1} `${2}` should be of the form `${3} ...`",
+	},
 	{`^should replace (.+) with (.+)$`, "should replace `${1}` with `${2}`"},
-	{`^if block ends with a return statement, so drop this else and outdent its block$`,
-		"`if` block ends with a `return` statement, so drop this `else` and outdent its block"},
-	{`^(struct field|var|range var|const|type|(?:func|method|interface method) (?:parameter|result)) (\S+) should be (\S+)$`,
-		"${1} `${2}` should be `${3}`"},
-	{`^don't use underscores in Go names; var (\S+) should be (\S+)$`,
-		"don't use underscores in Go names; var `${1}` should be `${2}`"},
+	{
+		`^if block ends with a return statement, so drop this else and outdent its block$`,
+		"`if` block ends with a `return` statement, so drop this `else` and outdent its block",
+	},
+	{
+		`^(struct field|var|range var|const|type|(?:func|method|interface method) (?:parameter|result)) (\S+) should be (\S+)$`,
+		"${1} `${2}` should be `${3}`",
+	},
+	{
+		`^don't use underscores in Go names; var (\S+) should be (\S+)$`,
+		"don't use underscores in Go names; var `${1}` should be `${2}`",
+	},
 }
 
 type IdentifierMarker struct {
@@ -100,16 +128,22 @@ func NewIdentifierMarker() *IdentifierMarker {
 	}
 }
 
-func (im IdentifierMarker) Process(issues []result.Issue) ([]result.Issue, error) {
-	return transformIssues(issues, func(i *result.Issue) *result.Issue {
-		iCopy := *i
-		iCopy.Text = im.markIdentifiers(iCopy.Text)
-		return &iCopy
+func (IdentifierMarker) Name() string {
+	return "identifier_marker"
+}
+
+func (p IdentifierMarker) Process(issues []result.Issue) ([]result.Issue, error) {
+	return transformIssues(issues, func(issue *result.Issue) *result.Issue {
+		newIssue := *issue
+		newIssue.Text = p.markIdentifiers(newIssue.Text)
+		return &newIssue
 	}), nil
 }
 
-func (im IdentifierMarker) markIdentifiers(s string) string {
-	for _, rr := range im.replaceRegexps {
+func (IdentifierMarker) Finish() {}
+
+func (p IdentifierMarker) markIdentifiers(s string) string {
+	for _, rr := range p.replaceRegexps {
 		rs := rr.re.ReplaceAllString(s, rr.repl)
 		if rs != s {
 			return rs
@@ -118,8 +152,3 @@ func (im IdentifierMarker) markIdentifiers(s string) string {
 
 	return s
 }
-
-func (im IdentifierMarker) Name() string {
-	return "identifier_marker"
-}
-func (im IdentifierMarker) Finish() {}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/invalid_issue.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/invalid_issue.go
new file mode 100644
index 0000000000000000000000000000000000000000..c1389e97074d27421ab020d31dcc4003571855f8
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/invalid_issue.go
@@ -0,0 +1,60 @@
+package processors
+
+import (
+	"path/filepath"
+
+	"github.com/golangci/golangci-lint/pkg/logutils"
+	"github.com/golangci/golangci-lint/pkg/result"
+)
+
+var _ Processor = (*InvalidIssue)(nil)
+
+type InvalidIssue struct {
+	log logutils.Log
+}
+
+func NewInvalidIssue(log logutils.Log) *InvalidIssue {
+	return &InvalidIssue{log: log}
+}
+
+func (InvalidIssue) Name() string {
+	return "invalid_issue"
+}
+
+func (p InvalidIssue) Process(issues []result.Issue) ([]result.Issue, error) {
+	tcIssues := filterIssues(issues, func(issue *result.Issue) bool {
+		return issue.FromLinter == typeCheckName
+	})
+
+	if len(tcIssues) > 0 {
+		return tcIssues, nil
+	}
+
+	return filterIssuesErr(issues, p.shouldPassIssue)
+}
+
+func (InvalidIssue) Finish() {}
+
+func (p InvalidIssue) shouldPassIssue(issue *result.Issue) (bool, error) {
+	if issue.FilePath() == "" {
+		p.log.Warnf("no file path for the issue: probably a bug inside the linter %q: %#v", issue.FromLinter, issue)
+
+		return false, nil
+	}
+
+	if filepath.Base(issue.FilePath()) == "go.mod" {
+		return true, nil
+	}
+
+	if !isGoFile(issue.FilePath()) {
+		p.log.Infof("issue related to file %s is skipped", issue.FilePath())
+
+		return false, nil
+	}
+
+	return true, nil
+}
+
+func isGoFile(name string) bool {
+	return filepath.Ext(name) == ".go"
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/issues.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/issues.go
index 4691be38a4bb763c303e9f149b1ba82a2fd4daf1..a65b0c2b0cdf04f8475a67052ac1f5c76c687f0e 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/issues.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/issues.go
@@ -6,7 +6,7 @@ import (
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-func filterIssues(issues []result.Issue, filter func(i *result.Issue) bool) []result.Issue {
+func filterIssues(issues []result.Issue, filter func(issue *result.Issue) bool) []result.Issue {
 	retIssues := make([]result.Issue, 0, len(issues))
 	for i := range issues {
 		if filter(&issues[i]) {
@@ -17,7 +17,7 @@ func filterIssues(issues []result.Issue, filter func(i *result.Issue) bool) []re
 	return retIssues
 }
 
-func filterIssuesErr(issues []result.Issue, filter func(i *result.Issue) (bool, error)) ([]result.Issue, error) {
+func filterIssuesErr(issues []result.Issue, filter func(issue *result.Issue) (bool, error)) ([]result.Issue, error) {
 	retIssues := make([]result.Issue, 0, len(issues))
 	for i := range issues {
 		ok, err := filter(&issues[i])
@@ -33,12 +33,12 @@ func filterIssuesErr(issues []result.Issue, filter func(i *result.Issue) (bool,
 	return retIssues, nil
 }
 
-func transformIssues(issues []result.Issue, transform func(i *result.Issue) *result.Issue) []result.Issue {
+func transformIssues(issues []result.Issue, transform func(issue *result.Issue) *result.Issue) []result.Issue {
 	retIssues := make([]result.Issue, 0, len(issues))
 	for i := range issues {
-		newI := transform(&issues[i])
-		if newI != nil {
-			retIssues = append(retIssues, *newI)
+		newIssue := transform(&issues[i])
+		if newIssue != nil {
+			retIssues = append(retIssues, *newIssue)
 		}
 	}
 
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_from_linter.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_from_linter.go
index 649ed86ae9aaab80690bea4e722900e9c7970007..e6200eec4c7dac6e4e2f954a711e1937ec628eda 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_from_linter.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_from_linter.go
@@ -6,25 +6,25 @@ import (
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
+var _ Processor = (*MaxFromLinter)(nil)
+
 type MaxFromLinter struct {
-	lc    linterToCountMap
-	limit int
-	log   logutils.Log
-	cfg   *config.Config
+	linterCounter map[string]int
+	limit         int
+	log           logutils.Log
+	cfg           *config.Config
 }
 
-var _ Processor = &MaxFromLinter{}
-
 func NewMaxFromLinter(limit int, log logutils.Log, cfg *config.Config) *MaxFromLinter {
 	return &MaxFromLinter{
-		lc:    linterToCountMap{},
-		limit: limit,
-		log:   log,
-		cfg:   cfg,
+		linterCounter: map[string]int{},
+		limit:         limit,
+		log:           log,
+		cfg:           cfg,
 	}
 }
 
-func (p *MaxFromLinter) Name() string {
+func (*MaxFromLinter) Name() string {
 	return "max_from_linter"
 }
 
@@ -33,19 +33,20 @@ func (p *MaxFromLinter) Process(issues []result.Issue) ([]result.Issue, error) {
 		return issues, nil
 	}
 
-	return filterIssues(issues, func(i *result.Issue) bool {
-		if i.Replacement != nil && p.cfg.Issues.NeedFix {
+	return filterIssues(issues, func(issue *result.Issue) bool {
+		if issue.Replacement != nil && p.cfg.Issues.NeedFix {
 			// we need to fix all issues at once => we need to return all of them
 			return true
 		}
 
-		p.lc[i.FromLinter]++ // always inc for stat
-		return p.lc[i.FromLinter] <= p.limit
+		p.linterCounter[issue.FromLinter]++ // always inc for stat
+
+		return p.linterCounter[issue.FromLinter] <= p.limit
 	}), nil
 }
 
 func (p *MaxFromLinter) Finish() {
-	walkStringToIntMapSortedByValue(p.lc, func(linter string, count int) {
+	walkStringToIntMapSortedByValue(p.linterCounter, func(linter string, count int) {
 		if count > p.limit {
 			p.log.Infof("%d/%d issues from linter %s were hidden, use --max-issues-per-linter",
 				count-p.limit, count, linter)
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_per_file_from_linter.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_per_file_from_linter.go
index 64182e3e22be959b97686130a7422e3e4f113850..da9fe4b7df7fc832d2f2428119da402f07156421 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_per_file_from_linter.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_per_file_from_linter.go
@@ -5,16 +5,13 @@ import (
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-type linterToCountMap map[string]int
-type fileToLinterToCountMap map[string]linterToCountMap
+var _ Processor = (*MaxPerFileFromLinter)(nil)
 
 type MaxPerFileFromLinter struct {
-	flc                        fileToLinterToCountMap
+	fileLinterCounter          fileLinterCounter
 	maxPerFileFromLinterConfig map[string]int
 }
 
-var _ Processor = &MaxPerFileFromLinter{}
-
 func NewMaxPerFileFromLinter(cfg *config.Config) *MaxPerFileFromLinter {
 	maxPerFileFromLinterConfig := map[string]int{}
 
@@ -26,34 +23,51 @@ func NewMaxPerFileFromLinter(cfg *config.Config) *MaxPerFileFromLinter {
 	}
 
 	return &MaxPerFileFromLinter{
-		flc:                        fileToLinterToCountMap{},
+		fileLinterCounter:          fileLinterCounter{},
 		maxPerFileFromLinterConfig: maxPerFileFromLinterConfig,
 	}
 }
 
-func (p *MaxPerFileFromLinter) Name() string {
+func (*MaxPerFileFromLinter) Name() string {
 	return "max_per_file_from_linter"
 }
 
 func (p *MaxPerFileFromLinter) Process(issues []result.Issue) ([]result.Issue, error) {
-	return filterIssues(issues, func(i *result.Issue) bool {
-		limit := p.maxPerFileFromLinterConfig[i.FromLinter]
+	return filterIssues(issues, func(issue *result.Issue) bool {
+		limit := p.maxPerFileFromLinterConfig[issue.FromLinter]
 		if limit == 0 {
 			return true
 		}
 
-		lm := p.flc[i.FilePath()]
-		if lm == nil {
-			p.flc[i.FilePath()] = linterToCountMap{}
-		}
-		count := p.flc[i.FilePath()][i.FromLinter]
-		if count >= limit {
+		if p.fileLinterCounter.GetCount(issue) >= limit {
 			return false
 		}
 
-		p.flc[i.FilePath()][i.FromLinter]++
+		p.fileLinterCounter.Increment(issue)
+
 		return true
 	}), nil
 }
 
-func (p *MaxPerFileFromLinter) Finish() {}
+func (*MaxPerFileFromLinter) Finish() {}
+
+type fileLinterCounter map[string]map[string]int
+
+func (f fileLinterCounter) GetCount(issue *result.Issue) int {
+	return f.getCounter(issue)[issue.FromLinter]
+}
+
+func (f fileLinterCounter) Increment(issue *result.Issue) {
+	f.getCounter(issue)[issue.FromLinter]++
+}
+
+func (f fileLinterCounter) getCounter(issue *result.Issue) map[string]int {
+	lc := f[issue.FilePath()]
+
+	if lc == nil {
+		lc = map[string]int{}
+		f[issue.FilePath()] = lc
+	}
+
+	return lc
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_same_issues.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_same_issues.go
index 391ae5fa7f494198178d571c73ba50d3265f5ca9..8948fa79db0ff7941dbc34c4ac4ed962d6397d6a 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_same_issues.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_same_issues.go
@@ -8,27 +8,25 @@ import (
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-type textToCountMap map[string]int
+var _ Processor = (*MaxSameIssues)(nil)
 
 type MaxSameIssues struct {
-	tc    textToCountMap
-	limit int
-	log   logutils.Log
-	cfg   *config.Config
+	textCounter map[string]int
+	limit       int
+	log         logutils.Log
+	cfg         *config.Config
 }
 
-var _ Processor = &MaxSameIssues{}
-
 func NewMaxSameIssues(limit int, log logutils.Log, cfg *config.Config) *MaxSameIssues {
 	return &MaxSameIssues{
-		tc:    textToCountMap{},
-		limit: limit,
-		log:   log,
-		cfg:   cfg,
+		textCounter: map[string]int{},
+		limit:       limit,
+		log:         log,
+		cfg:         cfg,
 	}
 }
 
-func (p *MaxSameIssues) Name() string {
+func (*MaxSameIssues) Name() string {
 	return "max_same_issues"
 }
 
@@ -37,19 +35,19 @@ func (p *MaxSameIssues) Process(issues []result.Issue) ([]result.Issue, error) {
 		return issues, nil
 	}
 
-	return filterIssues(issues, func(i *result.Issue) bool {
-		if i.Replacement != nil && p.cfg.Issues.NeedFix {
+	return filterIssues(issues, func(issue *result.Issue) bool {
+		if issue.Replacement != nil && p.cfg.Issues.NeedFix {
 			// we need to fix all issues at once => we need to return all of them
 			return true
 		}
 
-		p.tc[i.Text]++ // always inc for stat
-		return p.tc[i.Text] <= p.limit
+		p.textCounter[issue.Text]++ // always inc for stat
+		return p.textCounter[issue.Text] <= p.limit
 	}), nil
 }
 
 func (p *MaxSameIssues) Finish() {
-	walkStringToIntMapSortedByValue(p.tc, func(text string, count int) {
+	walkStringToIntMapSortedByValue(p.textCounter, func(text string, count int) {
 		if count > p.limit {
 			p.log.Infof("%d/%d issues with text %q were hidden, use --max-same-issues",
 				count-p.limit, count, text)
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/nolint.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/nolint.go
index 181d3bf1fea1bbfddf29534f77089dd4471ec1a3..7794bd3ecb6ee5b65d848fe9aa62155dc5d1ce99 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/nolint.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/nolint.go
@@ -1,7 +1,6 @@
 package processors
 
 import (
-	"errors"
 	"go/ast"
 	"go/parser"
 	"go/token"
@@ -9,15 +8,18 @@ import (
 	"sort"
 	"strings"
 
-	"github.com/golangci/golangci-lint/pkg/golinters"
+	"golang.org/x/exp/maps"
+
+	"github.com/golangci/golangci-lint/pkg/golinters/nolintlint"
 	"github.com/golangci/golangci-lint/pkg/lint/linter"
 	"github.com/golangci/golangci-lint/pkg/lint/lintersdb"
 	"github.com/golangci/golangci-lint/pkg/logutils"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
+var _ Processor = (*Nolint)(nil)
+
 var nolintDebugf = logutils.Debug(logutils.DebugKeyNolint)
-var nolintRe = regexp.MustCompile(`^nolint( |:|$)`)
 
 type ignoredRange struct {
 	linters                []string
@@ -33,7 +35,7 @@ func (i *ignoredRange) doesMatch(issue *result.Issue) bool {
 	}
 
 	// only allow selective nolinting of nolintlint
-	nolintFoundForLinter := len(i.linters) == 0 && issue.FromLinter != golinters.NoLintLintName
+	nolintFoundForLinter := len(i.linters) == 0 && issue.FromLinter != nolintlint.LinterName
 
 	for _, linterName := range i.linters {
 		if linterName == issue.FromLinter {
@@ -48,7 +50,7 @@ func (i *ignoredRange) doesMatch(issue *result.Issue) bool {
 
 	// handle possible unused nolint directives
 	// nolintlint generates potential issues for every nolint directive, and they are filtered out here
-	if issue.FromLinter == golinters.NoLintLintName && issue.ExpectNoLint {
+	if issue.FromLinter == nolintlint.LinterName && issue.ExpectNoLint {
 		if issue.ExpectedNoLintLinter != "" {
 			return i.matchedIssueFromLinter[issue.ExpectedNoLintLinter]
 		}
@@ -62,30 +64,29 @@ type fileData struct {
 	ignoredRanges []ignoredRange
 }
 
-type filesCache map[string]*fileData
-
 type Nolint struct {
-	cache          filesCache
+	fileCache      map[string]*fileData
 	dbManager      *lintersdb.Manager
 	enabledLinters map[string]*linter.Config
 	log            logutils.Log
 
 	unknownLintersSet map[string]bool
+
+	pattern *regexp.Regexp
 }
 
 func NewNolint(log logutils.Log, dbManager *lintersdb.Manager, enabledLinters map[string]*linter.Config) *Nolint {
 	return &Nolint{
-		cache:             filesCache{},
+		fileCache:         map[string]*fileData{},
 		dbManager:         dbManager,
 		enabledLinters:    enabledLinters,
 		log:               log,
 		unknownLintersSet: map[string]bool{},
+		pattern:           regexp.MustCompile(`^nolint( |:|$)`),
 	}
 }
 
-var _ Processor = &Nolint{}
-
-func (p *Nolint) Name() string {
+func (*Nolint) Name() string {
 	return "nolint"
 }
 
@@ -95,33 +96,77 @@ func (p *Nolint) Process(issues []result.Issue) ([]result.Issue, error) {
 	return filterIssuesErr(issues, p.shouldPassIssue)
 }
 
-func (p *Nolint) getOrCreateFileData(i *result.Issue) (*fileData, error) {
-	fd := p.cache[i.FilePath()]
-	if fd != nil {
-		return fd, nil
+func (p *Nolint) Finish() {
+	if len(p.unknownLintersSet) == 0 {
+		return
 	}
 
-	fd = &fileData{}
-	p.cache[i.FilePath()] = fd
+	unknownLinters := maps.Keys(p.unknownLintersSet)
+	sort.Strings(unknownLinters)
+
+	p.log.Warnf("Found unknown linters in //nolint directives: %s", strings.Join(unknownLinters, ", "))
+}
+
+func (p *Nolint) shouldPassIssue(issue *result.Issue) (bool, error) {
+	nolintDebugf("got issue: %v", *issue)
+
+	// don't expect disabled linters to cover their nolint statements
+	if issue.FromLinter == nolintlint.LinterName && issue.ExpectNoLint && issue.ExpectedNoLintLinter != "" {
+		nolintDebugf("enabled linters: %v", p.enabledLinters)
+
+		if p.enabledLinters[issue.ExpectedNoLintLinter] == nil {
+			return false, nil
+		}
+
+		nolintDebugf("checking that lint issue was used for %s: %v", issue.ExpectedNoLintLinter, issue)
+	}
+
+	fd := p.getOrCreateFileData(issue)
+
+	for _, ir := range fd.ignoredRanges {
+		if !ir.doesMatch(issue) {
+			continue
+		}
+
+		nolintDebugf("found ignored range for issue %v: %v", issue, ir)
+
+		ir.matchedIssueFromLinter[issue.FromLinter] = true
+
+		if ir.originalRange != nil {
+			ir.originalRange.matchedIssueFromLinter[issue.FromLinter] = true
+		}
+
+		return false, nil
+	}
+
+	return true, nil
+}
 
-	if i.FilePath() == "" {
-		return nil, errors.New("no file path for issue")
+func (p *Nolint) getOrCreateFileData(issue *result.Issue) *fileData {
+	fd := p.fileCache[issue.FilePath()]
+	if fd != nil {
+		return fd
 	}
 
+	fd = &fileData{}
+	p.fileCache[issue.FilePath()] = fd
+
 	// TODO: migrate this parsing to go/analysis facts
 	// or cache them somehow per file.
 
 	// Don't use cached AST because they consume a lot of memory on large projects.
 	fset := token.NewFileSet()
-	f, err := parser.ParseFile(fset, i.FilePath(), nil, parser.ParseComments)
+	f, err := parser.ParseFile(fset, issue.FilePath(), nil, parser.ParseComments)
 	if err != nil {
 		// Don't report error because it's already must be reporter by typecheck or go/analysis.
-		return fd, nil
+		return fd
 	}
 
-	fd.ignoredRanges = p.buildIgnoredRangesForFile(f, fset, i.FilePath())
-	nolintDebugf("file %s: built nolint ranges are %+v", i.FilePath(), fd.ignoredRanges)
-	return fd, nil
+	fd.ignoredRanges = p.buildIgnoredRangesForFile(f, fset, issue.FilePath())
+
+	nolintDebugf("file %s: built nolint ranges are %+v", issue.FilePath(), fd.ignoredRanges)
+
+	return fd
 }
 
 func (p *Nolint) buildIgnoredRangesForFile(f *ast.File, fset *token.FileSet, filePath string) []ignoredRange {
@@ -146,79 +191,6 @@ func (p *Nolint) buildIgnoredRangesForFile(f *ast.File, fset *token.FileSet, fil
 	return allRanges
 }
 
-func (p *Nolint) shouldPassIssue(i *result.Issue) (bool, error) {
-	nolintDebugf("got issue: %v", *i)
-	if i.FromLinter == golinters.NoLintLintName && i.ExpectNoLint && i.ExpectedNoLintLinter != "" {
-		// don't expect disabled linters to cover their nolint statements
-		nolintDebugf("enabled linters: %v", p.enabledLinters)
-		if p.enabledLinters[i.ExpectedNoLintLinter] == nil {
-			return false, nil
-		}
-		nolintDebugf("checking that lint issue was used for %s: %v", i.ExpectedNoLintLinter, i)
-	}
-
-	fd, err := p.getOrCreateFileData(i)
-	if err != nil {
-		return false, err
-	}
-
-	for _, ir := range fd.ignoredRanges {
-		if ir.doesMatch(i) {
-			nolintDebugf("found ignored range for issue %v: %v", i, ir)
-			ir.matchedIssueFromLinter[i.FromLinter] = true
-			if ir.originalRange != nil {
-				ir.originalRange.matchedIssueFromLinter[i.FromLinter] = true
-			}
-			return false, nil
-		}
-	}
-
-	return true, nil
-}
-
-type rangeExpander struct {
-	fset           *token.FileSet
-	inlineRanges   []ignoredRange
-	expandedRanges []ignoredRange
-}
-
-func (e *rangeExpander) Visit(node ast.Node) ast.Visitor {
-	if node == nil {
-		return e
-	}
-
-	nodeStartPos := e.fset.Position(node.Pos())
-	nodeStartLine := nodeStartPos.Line
-	nodeEndLine := e.fset.Position(node.End()).Line
-
-	var foundRange *ignoredRange
-	for _, r := range e.inlineRanges {
-		if r.To == nodeStartLine-1 && nodeStartPos.Column == r.col {
-			r := r
-			foundRange = &r
-			break
-		}
-	}
-	if foundRange == nil {
-		return e
-	}
-
-	expandedRange := *foundRange
-	// store the original unexpanded range for matching nolintlint issues
-	if expandedRange.originalRange == nil {
-		expandedRange.originalRange = foundRange
-	}
-	if expandedRange.To < nodeEndLine {
-		expandedRange.To = nodeEndLine
-	}
-
-	nolintDebugf("found range is %v for node %#v [%d;%d], expanded range is %v",
-		*foundRange, node, nodeStartLine, nodeEndLine, expandedRange)
-	e.expandedRanges = append(e.expandedRanges, expandedRange)
-
-	return e
-}
-
 func (p *Nolint) extractFileCommentsInlineRanges(fset *token.FileSet, comments ...*ast.CommentGroup) []ignoredRange {
 	var ret []ignoredRange
 	for _, g := range comments {
@@ -235,7 +207,7 @@ func (p *Nolint) extractFileCommentsInlineRanges(fset *token.FileSet, comments .
 
 func (p *Nolint) extractInlineRangeFromComment(text string, g ast.Node, fset *token.FileSet) *ignoredRange {
 	text = strings.TrimLeft(text, "/ ")
-	if !nolintRe.MatchString(text) {
+	if !p.pattern.MatchString(text) {
 		return nil
 	}
 
@@ -284,18 +256,47 @@ func (p *Nolint) extractInlineRangeFromComment(text string, g ast.Node, fset *to
 	return buildRange(linters)
 }
 
-func (p *Nolint) Finish() {
-	if len(p.unknownLintersSet) == 0 {
-		return
+type rangeExpander struct {
+	fset           *token.FileSet
+	inlineRanges   []ignoredRange
+	expandedRanges []ignoredRange
+}
+
+func (e *rangeExpander) Visit(node ast.Node) ast.Visitor {
+	if node == nil {
+		return e
 	}
 
-	unknownLinters := make([]string, 0, len(p.unknownLintersSet))
-	for name := range p.unknownLintersSet {
-		unknownLinters = append(unknownLinters, name)
+	nodeStartPos := e.fset.Position(node.Pos())
+	nodeStartLine := nodeStartPos.Line
+	nodeEndLine := e.fset.Position(node.End()).Line
+
+	var foundRange *ignoredRange
+	for _, r := range e.inlineRanges {
+		if r.To == nodeStartLine-1 && nodeStartPos.Column == r.col {
+			r := r
+			foundRange = &r
+			break
+		}
+	}
+	if foundRange == nil {
+		return e
 	}
-	sort.Strings(unknownLinters)
 
-	p.log.Warnf("Found unknown linters in //nolint directives: %s", strings.Join(unknownLinters, ", "))
+	expandedRange := *foundRange
+	// store the original unexpanded range for matching nolintlint issues
+	if expandedRange.originalRange == nil {
+		expandedRange.originalRange = foundRange
+	}
+	if expandedRange.To < nodeEndLine {
+		expandedRange.To = nodeEndLine
+	}
+
+	nolintDebugf("found range is %v for node %#v [%d;%d], expanded range is %v",
+		*foundRange, node, nodeStartLine, nodeEndLine, expandedRange)
+	e.expandedRanges = append(e.expandedRanges, expandedRange)
+
+	return e
 }
 
 // put nolintlint last
@@ -306,7 +307,7 @@ func (issues sortWithNolintlintLast) Len() int {
 }
 
 func (issues sortWithNolintlintLast) Less(i, j int) bool {
-	return issues[i].FromLinter != golinters.NoLintLintName && issues[j].FromLinter == golinters.NoLintLintName
+	return issues[i].FromLinter != nolintlint.LinterName && issues[j].FromLinter == nolintlint.LinterName
 }
 
 func (issues sortWithNolintlintLast) Swap(i, j int) {
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prefixer.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prefixer.go
index f6b885011bc04f15e8a5efd1009a8fed7fe9ff7c..8036e3fd6d7f4dd601dd7e249e53c4bd9347faa2 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prefixer.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prefixer.go
@@ -5,13 +5,13 @@ import (
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
+var _ Processor = (*PathPrefixer)(nil)
+
 // PathPrefixer adds a customizable prefix to every output path
 type PathPrefixer struct {
 	prefix string
 }
 
-var _ Processor = new(PathPrefixer)
-
 // NewPathPrefixer returns a new path prefixer for the provided string
 func NewPathPrefixer(prefix string) *PathPrefixer {
 	return &PathPrefixer{prefix: prefix}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prettifier.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prettifier.go
index 3a140999c02077cc0ac907060ff3b3878af889d4..3c97f06a65e1f74be44c7af6e71a29a42da92041 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prettifier.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prettifier.go
@@ -8,41 +8,40 @@ import (
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
+var _ Processor = (*PathPrettifier)(nil)
+
 type PathPrettifier struct {
 	root string
 }
 
-var _ Processor = PathPrettifier{}
-
 func NewPathPrettifier() *PathPrettifier {
 	root, err := fsutils.Getwd()
 	if err != nil {
 		panic(fmt.Sprintf("Can't get working dir: %s", err))
 	}
-	return &PathPrettifier{
-		root: root,
-	}
+
+	return &PathPrettifier{root: root}
 }
 
-func (p PathPrettifier) Name() string {
+func (PathPrettifier) Name() string {
 	return "path_prettifier"
 }
 
-func (p PathPrettifier) Process(issues []result.Issue) ([]result.Issue, error) {
-	return transformIssues(issues, func(i *result.Issue) *result.Issue {
-		if !filepath.IsAbs(i.FilePath()) {
-			return i
+func (PathPrettifier) Process(issues []result.Issue) ([]result.Issue, error) {
+	return transformIssues(issues, func(issue *result.Issue) *result.Issue {
+		if !filepath.IsAbs(issue.FilePath()) {
+			return issue
 		}
 
-		rel, err := fsutils.ShortestRelPath(i.FilePath(), "")
+		rel, err := fsutils.ShortestRelPath(issue.FilePath(), "")
 		if err != nil {
-			return i
+			return issue
 		}
 
-		newI := i
-		newI.Pos.Filename = rel
-		return newI
+		newIssue := issue
+		newIssue.Pos.Filename = rel
+		return newIssue
 	}), nil
 }
 
-func (p PathPrettifier) Finish() {}
+func (PathPrettifier) Finish() {}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_shortener.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_shortener.go
index 6b66bea8b0208ad704c1012e2d74a61a7d165f0d..b161e86c2f46f9b047cd820b15c925eafa4550fc 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_shortener.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_shortener.go
@@ -8,33 +8,32 @@ import (
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
+var _ Processor = (*PathShortener)(nil)
+
 type PathShortener struct {
 	wd string
 }
 
-var _ Processor = PathShortener{}
-
 func NewPathShortener() *PathShortener {
 	wd, err := fsutils.Getwd()
 	if err != nil {
 		panic(fmt.Sprintf("Can't get working dir: %s", err))
 	}
-	return &PathShortener{
-		wd: wd,
-	}
+
+	return &PathShortener{wd: wd}
 }
 
-func (p PathShortener) Name() string {
+func (PathShortener) Name() string {
 	return "path_shortener"
 }
 
 func (p PathShortener) Process(issues []result.Issue) ([]result.Issue, error) {
-	return transformIssues(issues, func(i *result.Issue) *result.Issue {
-		newI := i
-		newI.Text = strings.ReplaceAll(newI.Text, p.wd+"/", "")
-		newI.Text = strings.ReplaceAll(newI.Text, p.wd, "")
-		return newI
+	return transformIssues(issues, func(issue *result.Issue) *result.Issue {
+		newIssue := issue
+		newIssue.Text = strings.ReplaceAll(newIssue.Text, p.wd+"/", "")
+		newIssue.Text = strings.ReplaceAll(newIssue.Text, p.wd, "")
+		return newIssue
 	}), nil
 }
 
-func (p PathShortener) Finish() {}
+func (PathShortener) Finish() {}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/processor.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/processor.go
index 1a7a40434c65e0ba404ebe69fe7da1b2b9392ef0..13e63d604689f6ea8b1f815f9bcfe6106facc3c9 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/processor.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/processor.go
@@ -4,6 +4,8 @@ import (
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
+const typeCheckName = "typecheck"
+
 type Processor interface {
 	Process(issues []result.Issue) ([]result.Issue, error)
 	Name() string
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/severity.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/severity.go
new file mode 100644
index 0000000000000000000000000000000000000000..93a26586d6e356f18e01d7305d5b7bfae0e8b825
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/severity.go
@@ -0,0 +1,116 @@
+package processors
+
+import (
+	"regexp"
+
+	"github.com/golangci/golangci-lint/pkg/config"
+	"github.com/golangci/golangci-lint/pkg/fsutils"
+	"github.com/golangci/golangci-lint/pkg/logutils"
+	"github.com/golangci/golangci-lint/pkg/result"
+)
+
+const severityFromLinter = "@linter"
+
+var _ Processor = (*Severity)(nil)
+
+type severityRule struct {
+	baseRule
+	severity string
+}
+
+type Severity struct {
+	name string
+
+	log logutils.Log
+
+	files *fsutils.Files
+
+	defaultSeverity string
+	rules           []severityRule
+}
+
+func NewSeverity(log logutils.Log, files *fsutils.Files, cfg *config.Severity) *Severity {
+	p := &Severity{
+		name:            "severity-rules",
+		files:           files,
+		log:             log,
+		defaultSeverity: cfg.Default,
+	}
+
+	prefix := caseInsensitivePrefix
+	if cfg.CaseSensitive {
+		prefix = ""
+		p.name = "severity-rules-case-sensitive"
+	}
+
+	p.rules = createSeverityRules(cfg.Rules, prefix)
+
+	return p
+}
+
+func (p *Severity) Name() string { return p.name }
+
+func (p *Severity) Process(issues []result.Issue) ([]result.Issue, error) {
+	if len(p.rules) == 0 && p.defaultSeverity == "" {
+		return issues, nil
+	}
+
+	return transformIssues(issues, p.transform), nil
+}
+
+func (*Severity) Finish() {}
+
+func (p *Severity) transform(issue *result.Issue) *result.Issue {
+	for _, rule := range p.rules {
+		if rule.match(issue, p.files, p.log) {
+			if rule.severity == severityFromLinter || (rule.severity == "" && p.defaultSeverity == severityFromLinter) {
+				return issue
+			}
+
+			issue.Severity = rule.severity
+			if issue.Severity == "" {
+				issue.Severity = p.defaultSeverity
+			}
+
+			return issue
+		}
+	}
+
+	if p.defaultSeverity != severityFromLinter {
+		issue.Severity = p.defaultSeverity
+	}
+
+	return issue
+}
+
+func createSeverityRules(rules []config.SeverityRule, prefix string) []severityRule {
+	parsedRules := make([]severityRule, 0, len(rules))
+
+	for _, rule := range rules {
+		parsedRule := severityRule{}
+		parsedRule.linters = rule.Linters
+		parsedRule.severity = rule.Severity
+
+		if rule.Text != "" {
+			parsedRule.text = regexp.MustCompile(prefix + rule.Text)
+		}
+
+		if rule.Source != "" {
+			parsedRule.source = regexp.MustCompile(prefix + rule.Source)
+		}
+
+		if rule.Path != "" {
+			path := fsutils.NormalizePathInRegex(rule.Path)
+			parsedRule.path = regexp.MustCompile(path)
+		}
+
+		if rule.PathExcept != "" {
+			pathExcept := fsutils.NormalizePathInRegex(rule.PathExcept)
+			parsedRule.pathExcept = regexp.MustCompile(pathExcept)
+		}
+
+		parsedRules = append(parsedRules, parsedRule)
+	}
+
+	return parsedRules
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/severity_rules.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/severity_rules.go
deleted file mode 100644
index 0a4a643b71208596150dcaa2daf7e31dd3ff0303..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/severity_rules.go
+++ /dev/null
@@ -1,108 +0,0 @@
-package processors
-
-import (
-	"regexp"
-
-	"github.com/golangci/golangci-lint/pkg/fsutils"
-	"github.com/golangci/golangci-lint/pkg/logutils"
-	"github.com/golangci/golangci-lint/pkg/result"
-)
-
-type severityRule struct {
-	baseRule
-	severity string
-}
-
-type SeverityRule struct {
-	BaseRule
-	Severity string
-}
-
-type SeverityRules struct {
-	defaultSeverity string
-	rules           []severityRule
-	files           *fsutils.Files
-	log             logutils.Log
-}
-
-func NewSeverityRules(defaultSeverity string, rules []SeverityRule, files *fsutils.Files, log logutils.Log) *SeverityRules {
-	r := &SeverityRules{
-		files:           files,
-		log:             log,
-		defaultSeverity: defaultSeverity,
-	}
-	r.rules = createSeverityRules(rules, "(?i)")
-
-	return r
-}
-
-func createSeverityRules(rules []SeverityRule, prefix string) []severityRule {
-	parsedRules := make([]severityRule, 0, len(rules))
-	for _, rule := range rules {
-		parsedRule := severityRule{}
-		parsedRule.linters = rule.Linters
-		parsedRule.severity = rule.Severity
-		if rule.Text != "" {
-			parsedRule.text = regexp.MustCompile(prefix + rule.Text)
-		}
-		if rule.Source != "" {
-			parsedRule.source = regexp.MustCompile(prefix + rule.Source)
-		}
-		if rule.Path != "" {
-			path := fsutils.NormalizePathInRegex(rule.Path)
-			parsedRule.path = regexp.MustCompile(path)
-		}
-		if rule.PathExcept != "" {
-			pathExcept := fsutils.NormalizePathInRegex(rule.PathExcept)
-			parsedRule.pathExcept = regexp.MustCompile(pathExcept)
-		}
-		parsedRules = append(parsedRules, parsedRule)
-	}
-	return parsedRules
-}
-
-func (p SeverityRules) Process(issues []result.Issue) ([]result.Issue, error) {
-	if len(p.rules) == 0 && p.defaultSeverity == "" {
-		return issues, nil
-	}
-	return transformIssues(issues, func(i *result.Issue) *result.Issue {
-		for _, rule := range p.rules {
-			rule := rule
-
-			ruleSeverity := p.defaultSeverity
-			if rule.severity != "" {
-				ruleSeverity = rule.severity
-			}
-
-			if rule.match(i, p.files, p.log) {
-				i.Severity = ruleSeverity
-				return i
-			}
-		}
-		i.Severity = p.defaultSeverity
-		return i
-	}), nil
-}
-
-func (SeverityRules) Name() string { return "severity-rules" }
-func (SeverityRules) Finish()      {}
-
-var _ Processor = SeverityRules{}
-
-type SeverityRulesCaseSensitive struct {
-	*SeverityRules
-}
-
-func NewSeverityRulesCaseSensitive(defaultSeverity string, rules []SeverityRule,
-	files *fsutils.Files, log logutils.Log) *SeverityRulesCaseSensitive {
-	r := &SeverityRules{
-		files:           files,
-		log:             log,
-		defaultSeverity: defaultSeverity,
-	}
-	r.rules = createSeverityRules(rules, "")
-
-	return &SeverityRulesCaseSensitive{r}
-}
-
-func (SeverityRulesCaseSensitive) Name() string { return "severity-rules-case-sensitive" }
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_dirs.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_dirs.go
index e71495fd0b1d8cc9fc53df644bf49068db3d7b36..39dbfd1d388caa1c81f84f9830401820bca9c18d 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_dirs.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_dirs.go
@@ -4,13 +4,23 @@ import (
 	"fmt"
 	"path/filepath"
 	"regexp"
-	"strings"
 
 	"github.com/golangci/golangci-lint/pkg/fsutils"
 	"github.com/golangci/golangci-lint/pkg/logutils"
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
+var _ Processor = (*SkipDirs)(nil)
+
+var StdExcludeDirRegexps = []string{
+	normalizePathRegex("vendor"),
+	normalizePathRegex("third_party"),
+	normalizePathRegex("testdata"),
+	normalizePathRegex("examples"),
+	normalizePathRegex("Godeps"),
+	normalizePathRegex("builtin"),
+}
+
 type skipStat struct {
 	pattern string
 	count   int
@@ -25,11 +35,7 @@ type SkipDirs struct {
 	pathPrefix       string
 }
 
-var _ Processor = (*SkipDirs)(nil)
-
-const goFileSuffix = ".go"
-
-func NewSkipDirs(patterns []string, log logutils.Log, runArgs []string, pathPrefix string) (*SkipDirs, error) {
+func NewSkipDirs(log logutils.Log, patterns, args []string, pathPrefix string) (*SkipDirs, error) {
 	var patternsRe []*regexp.Regexp
 	for _, p := range patterns {
 		p = fsutils.NormalizePathInRegex(p)
@@ -40,21 +46,9 @@ func NewSkipDirs(patterns []string, log logutils.Log, runArgs []string, pathPref
 		patternsRe = append(patternsRe, patternRe)
 	}
 
-	if len(runArgs) == 0 {
-		runArgs = append(runArgs, "./...")
-	}
-	var absArgsDirs []string
-	for _, arg := range runArgs {
-		base := filepath.Base(arg)
-		if base == "..." || strings.HasSuffix(base, goFileSuffix) {
-			arg = filepath.Dir(arg)
-		}
-
-		absArg, err := filepath.Abs(arg)
-		if err != nil {
-			return nil, fmt.Errorf("failed to abs-ify arg %q: %w", arg, err)
-		}
-		absArgsDirs = append(absArgsDirs, absArg)
+	absArgsDirs, err := absDirs(args)
+	if err != nil {
+		return nil, err
 	}
 
 	return &SkipDirs{
@@ -67,7 +61,7 @@ func NewSkipDirs(patterns []string, log logutils.Log, runArgs []string, pathPref
 	}, nil
 }
 
-func (p *SkipDirs) Name() string {
+func (*SkipDirs) Name() string {
 	return "skip_dirs"
 }
 
@@ -79,15 +73,21 @@ func (p *SkipDirs) Process(issues []result.Issue) ([]result.Issue, error) {
 	return filterIssues(issues, p.shouldPassIssue), nil
 }
 
-func (p *SkipDirs) shouldPassIssue(i *result.Issue) bool {
-	if filepath.IsAbs(i.FilePath()) {
-		if !isSpecialAutogeneratedFile(i.FilePath()) {
-			p.log.Warnf("Got abs path %s in skip dirs processor, it should be relative", i.FilePath())
+func (p *SkipDirs) Finish() {
+	for dir, stat := range p.skippedDirs {
+		p.log.Infof("Skipped %d issues from dir %s by pattern %s", stat.count, dir, stat.pattern)
+	}
+}
+
+func (p *SkipDirs) shouldPassIssue(issue *result.Issue) bool {
+	if filepath.IsAbs(issue.FilePath()) {
+		if isGoFile(issue.FilePath()) {
+			p.log.Warnf("Got abs path %s in skip dirs processor, it should be relative", issue.FilePath())
 		}
 		return true
 	}
 
-	issueRelDir := filepath.Dir(i.FilePath())
+	issueRelDir := filepath.Dir(issue.FilePath())
 
 	if toPass, ok := p.skippedDirsCache[issueRelDir]; ok {
 		if !toPass {
@@ -139,8 +139,34 @@ func (p *SkipDirs) shouldPassIssueDirs(issueRelDir, issueAbsDir string) bool {
 	return true
 }
 
-func (p *SkipDirs) Finish() {
-	for dir, stat := range p.skippedDirs {
-		p.log.Infof("Skipped %d issues from dir %s by pattern %s", stat.count, dir, stat.pattern)
+func absDirs(args []string) ([]string, error) {
+	if len(args) == 0 {
+		args = append(args, "./...")
 	}
+
+	var absArgsDirs []string
+	for _, arg := range args {
+		base := filepath.Base(arg)
+		if base == "..." || isGoFile(base) {
+			arg = filepath.Dir(arg)
+		}
+
+		absArg, err := filepath.Abs(arg)
+		if err != nil {
+			return nil, fmt.Errorf("failed to abs-ify arg %q: %w", arg, err)
+		}
+
+		absArgsDirs = append(absArgsDirs, absArg)
+	}
+
+	return absArgsDirs, nil
+}
+
+func normalizePathRegex(e string) string {
+	return createPathRegex(e, filepath.Separator)
+}
+
+func createPathRegex(e string, sep rune) string {
+	escapedSep := regexp.QuoteMeta(string(sep)) // needed for windows sep '\\'
+	return fmt.Sprintf(`(^|%[1]s)%[2]s($|%[1]s)`, escapedSep, e)
 }
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_files.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_files.go
index 9579bee84408fe6149f4d911b2fcab4d600d734d..3b17a9f327cfc806c617de2a85c3437995161b9d 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_files.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_files.go
@@ -8,21 +8,23 @@ import (
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
+var _ Processor = (*SkipFiles)(nil)
+
 type SkipFiles struct {
 	patterns   []*regexp.Regexp
 	pathPrefix string
 }
 
-var _ Processor = (*SkipFiles)(nil)
-
 func NewSkipFiles(patterns []string, pathPrefix string) (*SkipFiles, error) {
 	var patternsRe []*regexp.Regexp
 	for _, p := range patterns {
 		p = fsutils.NormalizePathInRegex(p)
+
 		patternRe, err := regexp.Compile(p)
 		if err != nil {
-			return nil, fmt.Errorf("can't compile regexp %q: %s", p, err)
+			return nil, fmt.Errorf("can't compile regexp %q: %w", p, err)
 		}
+
 		patternsRe = append(patternsRe, patternRe)
 	}
 
@@ -32,7 +34,7 @@ func NewSkipFiles(patterns []string, pathPrefix string) (*SkipFiles, error) {
 	}, nil
 }
 
-func (p SkipFiles) Name() string {
+func (SkipFiles) Name() string {
 	return "skip_files"
 }
 
@@ -41,8 +43,9 @@ func (p SkipFiles) Process(issues []result.Issue) ([]result.Issue, error) {
 		return issues, nil
 	}
 
-	return filterIssues(issues, func(i *result.Issue) bool {
-		path := fsutils.WithPathPrefix(p.pathPrefix, i.FilePath())
+	return filterIssues(issues, func(issue *result.Issue) bool {
+		path := fsutils.WithPathPrefix(p.pathPrefix, issue.FilePath())
+
 		for _, pattern := range p.patterns {
 			if pattern.MatchString(path) {
 				return false
@@ -53,4 +56,4 @@ func (p SkipFiles) Process(issues []result.Issue) ([]result.Issue, error) {
 	}), nil
 }
 
-func (p SkipFiles) Finish() {}
+func (SkipFiles) Finish() {}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/sort_results.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/sort_results.go
index 740c4fa8c37a0b662dc6968bdd9836ac0c4c71b8..77f58c03e57c739769b46d128815889a6881c60e 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/sort_results.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/sort_results.go
@@ -1,6 +1,9 @@
 package processors
 
 import (
+	"errors"
+	"fmt"
+	"slices"
 	"sort"
 	"strings"
 
@@ -13,89 +16,124 @@ import (
 // by sorting results.Issues using processor step, and chain based
 // rules that can compare different properties of the Issues struct.
 
+const (
+	orderNameFile     = "file"
+	orderNameLinter   = "linter"
+	orderNameSeverity = "severity"
+)
+
 var _ Processor = (*SortResults)(nil)
 
 type SortResults struct {
-	cmp comparator
-	cfg *config.Config
+	cmps map[string]*comparator
+
+	cfg *config.Output
 }
 
 func NewSortResults(cfg *config.Config) *SortResults {
-	// For sorting we are comparing (in next order): file names, line numbers,
-	// position, and finally - giving up.
 	return &SortResults{
-		cmp: ByName{
-			next: ByLine{
-				next: ByColumn{},
-			},
+		cmps: map[string]*comparator{
+			// For sorting we are comparing (in next order):
+			// file names, line numbers, position, and finally - giving up.
+			orderNameFile: byFileName().SetNext(byLine().SetNext(byColumn())),
+			// For sorting we are comparing: linter name
+			orderNameLinter: byLinter(),
+			// For sorting we are comparing: severity
+			orderNameSeverity: bySeverity(),
 		},
-		cfg: cfg,
+		cfg: &cfg.Output,
 	}
 }
 
+func (SortResults) Name() string { return "sort_results" }
+
 // Process is performing sorting of the result issues.
-func (sr SortResults) Process(issues []result.Issue) ([]result.Issue, error) {
-	if !sr.cfg.Output.SortResults {
+func (p SortResults) Process(issues []result.Issue) ([]result.Issue, error) {
+	if !p.cfg.SortResults {
 		return issues, nil
 	}
 
+	if len(p.cfg.SortOrder) == 0 {
+		p.cfg.SortOrder = []string{orderNameFile}
+	}
+
+	var cmps []*comparator
+	for _, name := range p.cfg.SortOrder {
+		c, ok := p.cmps[name]
+		if !ok {
+			return nil, fmt.Errorf("unsupported sort-order name %q", name)
+		}
+
+		cmps = append(cmps, c)
+	}
+
+	cmp, err := mergeComparators(cmps)
+	if err != nil {
+		return nil, err
+	}
+
 	sort.Slice(issues, func(i, j int) bool {
-		return sr.cmp.Compare(&issues[i], &issues[j]) == Less
+		return cmp.Compare(&issues[i], &issues[j]) == less
 	})
 
 	return issues, nil
 }
 
-func (sr SortResults) Name() string { return "sort_results" }
-func (sr SortResults) Finish()      {}
+func (SortResults) Finish() {}
 
 type compareResult int
 
 const (
-	Less compareResult = iota - 1
-	Equal
-	Greater
-	None
+	less compareResult = iota - 1
+	equal
+	greater
+	none
 )
 
 func (c compareResult) isNeutral() bool {
 	// return true if compare result is incomparable or equal.
-	return c == None || c == Equal
+	return c == none || c == equal
 }
 
 func (c compareResult) String() string {
 	switch c {
-	case Less:
-		return "Less"
-	case Equal:
-		return "Equal"
-	case Greater:
-		return "Greater"
+	case less:
+		return "less"
+	case equal:
+		return "equal"
+	case greater:
+		return "greater"
+	default:
+		return "none"
 	}
-
-	return "None"
 }
 
-// comparator describe how to implement compare for two "issues" lexicographically
-type comparator interface {
-	Compare(a, b *result.Issue) compareResult
-	Next() comparator
+// comparator describes how to implement compare for two "issues".
+type comparator struct {
+	name    string
+	compare func(a, b *result.Issue) compareResult
+	next    *comparator
 }
 
-var (
-	_ comparator = (*ByName)(nil)
-	_ comparator = (*ByLine)(nil)
-	_ comparator = (*ByColumn)(nil)
-)
+func (cmp *comparator) Next() *comparator { return cmp.next }
 
-type ByName struct{ next comparator }
+func (cmp *comparator) SetNext(c *comparator) *comparator {
+	cmp.next = c
+	return cmp
+}
 
-func (cmp ByName) Next() comparator { return cmp.next }
+func (cmp *comparator) String() string {
+	s := cmp.name
+	if cmp.Next() != nil {
+		s += " > " + cmp.Next().String()
+	}
 
-func (cmp ByName) Compare(a, b *result.Issue) compareResult {
-	var res compareResult
+	return s
+}
 
-	if res = compareResult(strings.Compare(a.FilePath(), b.FilePath())); !res.isNeutral() {
+func (cmp *comparator) Compare(a, b *result.Issue) compareResult {
+	res := cmp.compare(a, b)
+	if !res.isNeutral() {
 		return res
 	}
 
@@ -106,40 +144,95 @@ func (cmp ByName) Compare(a, b *result.Issue) compareResult {
 	return res
 }
 
-type ByLine struct{ next comparator }
+func byFileName() *comparator {
+	return &comparator{
+		name: "byFileName",
+		compare: func(a, b *result.Issue) compareResult {
+			return compareResult(strings.Compare(a.FilePath(), b.FilePath()))
+		},
+	}
+}
 
-func (cmp ByLine) Next() comparator { return cmp.next }
+func byLine() *comparator {
+	return &comparator{
+		name: "byLine",
+		compare: func(a, b *result.Issue) compareResult {
+			return numericCompare(a.Line(), b.Line())
+		},
+	}
+}
 
-func (cmp ByLine) Compare(a, b *result.Issue) compareResult {
-	var res compareResult
+func byColumn() *comparator {
+	return &comparator{
+		name: "byColumn",
+		compare: func(a, b *result.Issue) compareResult {
+			return numericCompare(a.Column(), b.Column())
+		},
+	}
+}
 
-	if res = numericCompare(a.Line(), b.Line()); !res.isNeutral() {
-		return res
+func byLinter() *comparator {
+	return &comparator{
+		name: "byLinter",
+		compare: func(a, b *result.Issue) compareResult {
+			return compareResult(strings.Compare(a.FromLinter, b.FromLinter))
+		},
 	}
+}
 
-	if next := cmp.Next(); next != nil {
-		return next.Compare(a, b)
+func bySeverity() *comparator {
+	return &comparator{
+		name: "bySeverity",
+		compare: func(a, b *result.Issue) compareResult {
+			return severityCompare(a.Severity, b.Severity)
+		},
 	}
+}
 
-	return res
+func mergeComparators(cmps []*comparator) (*comparator, error) {
+	if len(cmps) == 0 {
+		return nil, errors.New("no comparator")
+	}
+
+	for i := 0; i < len(cmps)-1; i++ {
+		findComparatorTip(cmps[i]).SetNext(cmps[i+1])
+	}
+
+	return cmps[0], nil
 }
 
-type ByColumn struct{ next comparator }
+func findComparatorTip(cmp *comparator) *comparator {
+	if cmp.Next() != nil {
+		return findComparatorTip(cmp.Next())
+	}
 
-func (cmp ByColumn) Next() comparator { return cmp.next }
+	return cmp
+}
 
-func (cmp ByColumn) Compare(a, b *result.Issue) compareResult {
-	var res compareResult
+func severityCompare(a, b string) compareResult {
+	// The position inside the slice define the importance (lower to higher).
+	classic := []string{"low", "medium", "high", "warning", "error"}
+
+	if slices.Contains(classic, a) && slices.Contains(classic, b) {
+		switch {
+		case slices.Index(classic, a) > slices.Index(classic, b):
+			return greater
+		case slices.Index(classic, a) < slices.Index(classic, b):
+			return less
+		default:
+			return equal
+		}
+	}
 
-	if res = numericCompare(a.Column(), b.Column()); !res.isNeutral() {
-		return res
+	if slices.Contains(classic, a) {
+		return greater
 	}
 
-	if next := cmp.Next(); next != nil {
-		return next.Compare(a, b)
+	if slices.Contains(classic, b) {
+		return less
 	}
 
-	return res
+	return compareResult(strings.Compare(a, b))
 }
 
 func numericCompare(a, b int) compareResult {
@@ -153,14 +246,14 @@ func numericCompare(a, b int) compareResult {
 
 	switch {
 	case isZeroValuesBoth || isEqual:
-		return Equal
+		return equal
 	case isValuesInvalid || isZeroValueInA || isZeroValueInB:
-		return None
+		return none
 	case a > b:
-		return Greater
+		return greater
 	case a < b:
-		return Less
+		return less
 	}
 
-	return Equal
+	return equal
 }
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/source_code.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/source_code.go
index cfd73cb98e04f37518cd97fbe16df48bc8e149f2..4a89fc73ed7edb9594b6154da07216c695dd9e48 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/source_code.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/source_code.go
@@ -6,13 +6,13 @@ import (
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
+var _ Processor = (*SourceCode)(nil)
+
 type SourceCode struct {
 	lineCache *fsutils.LineCache
 	log       logutils.Log
 }
 
-var _ Processor = SourceCode{}
-
 func NewSourceCode(lc *fsutils.LineCache, log logutils.Log) *SourceCode {
 	return &SourceCode{
 		lineCache: lc,
@@ -20,28 +20,31 @@ func NewSourceCode(lc *fsutils.LineCache, log logutils.Log) *SourceCode {
 	}
 }
 
-func (p SourceCode) Name() string {
+func (SourceCode) Name() string {
 	return "source_code"
 }
 
 func (p SourceCode) Process(issues []result.Issue) ([]result.Issue, error) {
-	return transformIssues(issues, func(i *result.Issue) *result.Issue {
-		newI := *i
-
-		lineRange := i.GetLineRange()
-		for lineNumber := lineRange.From; lineNumber <= lineRange.To; lineNumber++ {
-			line, err := p.lineCache.GetLine(i.FilePath(), lineNumber)
-			if err != nil {
-				p.log.Warnf("Failed to get line %d for file %s: %s",
-					lineNumber, i.FilePath(), err)
-				return i
-			}
-
-			newI.SourceLines = append(newI.SourceLines, line)
+	return transformIssues(issues, p.transform), nil
+}
+
+func (SourceCode) Finish() {}
+
+func (p SourceCode) transform(issue *result.Issue) *result.Issue {
+	newIssue := *issue
+
+	lineRange := issue.GetLineRange()
+	for lineNumber := lineRange.From; lineNumber <= lineRange.To; lineNumber++ {
+		line, err := p.lineCache.GetLine(issue.FilePath(), lineNumber)
+		if err != nil {
+			p.log.Warnf("Failed to get line %d for file %s: %s",
+				lineNumber, issue.FilePath(), err)
+
+			return issue
 		}
 
-		return &newI
-	}), nil
-}
+		newIssue.SourceLines = append(newIssue.SourceLines, line)
+	}
 
-func (p SourceCode) Finish() {}
+	return &newIssue
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/uniq_by_line.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/uniq_by_line.go
index dc0e1e8cf667b8b7ef6cb5ccaf3ab30be2466c45..8e384e390b17ae3ae5ee91fba10b08dd3df46659 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/uniq_by_line.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/uniq_by_line.go
@@ -5,24 +5,23 @@ import (
 	"github.com/golangci/golangci-lint/pkg/result"
 )
 
-type lineToCount map[int]int
-type fileToLineToCount map[string]lineToCount
+const uniqByLineLimit = 1
+
+var _ Processor = (*UniqByLine)(nil)
 
 type UniqByLine struct {
-	flc fileToLineToCount
-	cfg *config.Config
+	fileLineCounter fileLineCounter
+	cfg             *config.Config
 }
 
 func NewUniqByLine(cfg *config.Config) *UniqByLine {
 	return &UniqByLine{
-		flc: fileToLineToCount{},
-		cfg: cfg,
+		fileLineCounter: fileLineCounter{},
+		cfg:             cfg,
 	}
 }
 
-var _ Processor = &UniqByLine{}
-
-func (p *UniqByLine) Name() string {
+func (*UniqByLine) Name() string {
 	return "uniq_by_line"
 }
 
@@ -31,28 +30,44 @@ func (p *UniqByLine) Process(issues []result.Issue) ([]result.Issue, error) {
 		return issues, nil
 	}
 
-	return filterIssues(issues, func(i *result.Issue) bool {
-		if i.Replacement != nil && p.cfg.Issues.NeedFix {
-			// if issue will be auto-fixed we shouldn't collapse issues:
-			// e.g. one line can contain 2 misspellings, they will be in 2 issues and misspell should fix both of them.
-			return true
-		}
-
-		lc := p.flc[i.FilePath()]
-		if lc == nil {
-			lc = lineToCount{}
-			p.flc[i.FilePath()] = lc
-		}
-
-		const limit = 1
-		count := lc[i.Line()]
-		if count == limit {
-			return false
-		}
-
-		lc[i.Line()]++
+	return filterIssues(issues, p.shouldPassIssue), nil
+}
+
+func (*UniqByLine) Finish() {}
+
+func (p *UniqByLine) shouldPassIssue(issue *result.Issue) bool {
+	if issue.Replacement != nil && p.cfg.Issues.NeedFix {
+		// if issue will be auto-fixed we shouldn't collapse issues:
+		// e.g. one line can contain 2 misspellings, they will be in 2 issues and misspell should fix both of them.
 		return true
-	}), nil
+	}
+
+	if p.fileLineCounter.GetCount(issue) == uniqByLineLimit {
+		return false
+	}
+
+	p.fileLineCounter.Increment(issue)
+
+	return true
 }
 
-func (p *UniqByLine) Finish() {}
+type fileLineCounter map[string]map[int]int
+
+func (f fileLineCounter) GetCount(issue *result.Issue) int {
+	return f.getCounter(issue)[issue.Line()]
+}
+
+func (f fileLineCounter) Increment(issue *result.Issue) {
+	f.getCounter(issue)[issue.Line()]++
+}
+
+func (f fileLineCounter) getCounter(issue *result.Issue) map[int]int {
+	lc := f[issue.FilePath()]
+
+	if lc == nil {
+		lc = map[int]int{}
+		f[issue.FilePath()] = lc
+	}
+
+	return lc
+}
diff --git a/vendor/github.com/golangci/lint-1/.travis.yml b/vendor/github.com/golangci/lint-1/.travis.yml
deleted file mode 100644
index bc2f4b311e45ec1354c9c20317574b75a9ad0bf0..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/lint-1/.travis.yml
+++ /dev/null
@@ -1,19 +0,0 @@
-sudo: false
-language: go
-go:
-  - 1.10.x
-  - 1.11.x
-  - master
-
-go_import_path: github.com/golangci/lint-1
-
-install:
-  - go get -t -v ./...
-
-script:
-  - go test -v -race ./...
-
-matrix:
-  allow_failures:
-    - go: master
-  fast_finish: true
diff --git a/vendor/github.com/golangci/lint-1/CONTRIBUTING.md b/vendor/github.com/golangci/lint-1/CONTRIBUTING.md
deleted file mode 100644
index 2e39a1c6770238e973c6c673df8f29f5e46bde09..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/lint-1/CONTRIBUTING.md
+++ /dev/null
@@ -1,15 +0,0 @@
-# Contributing to Golint
-
-## Before filing an issue:
-
-### Are you having trouble building golint?
-
-Check you have the latest version of its dependencies. Run
-```
-go get -u github.com/golangci/lint-1/golint
-```
-If you still have problems, consider searching for existing issues before filing a new issue.
-
-## Before sending a pull request:
-
-Have you understood the purpose of golint? Make sure to carefully read `README`.
diff --git a/vendor/github.com/golangci/lint-1/LICENSE b/vendor/github.com/golangci/lint-1/LICENSE
deleted file mode 100644
index 65d761bc9f28c7de26b4f39c495d5ebd365b114d..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/lint-1/LICENSE
+++ /dev/null
@@ -1,27 +0,0 @@
-Copyright (c) 2013 The Go Authors. All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
-   * Redistributions of source code must retain the above copyright
-notice, this list of conditions and the following disclaimer.
-   * Redistributions in binary form must reproduce the above
-copyright notice, this list of conditions and the following disclaimer
-in the documentation and/or other materials provided with the
-distribution.
-   * Neither the name of Google Inc. nor the names of its
-contributors may be used to endorse or promote products derived from
-this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/golangci/lint-1/README.md b/vendor/github.com/golangci/lint-1/README.md
deleted file mode 100644
index 2de6ee835c58e6ea0d9a8bcb3e01b30865496f2f..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/lint-1/README.md
+++ /dev/null
@@ -1,88 +0,0 @@
-Golint is a linter for Go source code.
-
-[![Build Status](https://travis-ci.org/golang/lint.svg?branch=master)](https://travis-ci.org/golang/lint)
-
-## Installation
-
-Golint requires a
-[supported release of Go](https://golang.org/doc/devel/release.html#policy).
-
-    go get -u github.com/golangci/lint-1/golint
-
-To find out where `golint` was installed you can run `go list -f {{.Target}} github.com/golangci/lint-1/golint`. For `golint` to be used globally add that directory to the `$PATH` environment setting.
-
-## Usage
-
-Invoke `golint` with one or more filenames, directories, or packages named
-by its import path. Golint uses the same
-[import path syntax](https://golang.org/cmd/go/#hdr-Import_path_syntax) as
-the `go` command and therefore
-also supports relative import paths like `./...`. Additionally the `...`
-wildcard can be used as suffix on relative and absolute file paths to recurse
-into them.
-
-The output of this tool is a list of suggestions in Vim quickfix format,
-which is accepted by lots of different editors.
-
-## Purpose
-
-Golint differs from gofmt. Gofmt reformats Go source code, whereas
-golint prints out style mistakes.
-
-Golint differs from govet. Govet is concerned with correctness, whereas
-golint is concerned with coding style. Golint is in use at Google, and it
-seeks to match the accepted style of the open source Go project.
-
-The suggestions made by golint are exactly that: suggestions.
-Golint is not perfect, and has both false positives and false negatives.
-Do not treat its output as a gold standard. We will not be adding pragmas
-or other knobs to suppress specific warnings, so do not expect or require
-code to be completely "lint-free".
-In short, this tool is not, and will never be, trustworthy enough for its
-suggestions to be enforced automatically, for example as part of a build process.
-Golint makes suggestions for many of the mechanically checkable items listed in
-[Effective Go](https://golang.org/doc/effective_go.html) and the
-[CodeReviewComments wiki page](https://golang.org/wiki/CodeReviewComments).
-
-## Scope
-
-Golint is meant to carry out the stylistic conventions put forth in
-[Effective Go](https://golang.org/doc/effective_go.html) and
-[CodeReviewComments](https://golang.org/wiki/CodeReviewComments).
-Changes that are not aligned with those documents will not be considered.
-
-## Contributions
-
-Contributions to this project are welcome provided they are [in scope](#scope),
-though please send mail before starting work on anything major.
-Contributors retain their copyright, so we need you to fill out
-[a short form](https://developers.google.com/open-source/cla/individual)
-before we can accept your contribution.
-
-## Vim
-
-Add this to your ~/.vimrc:
-
-    set rtp+=$GOPATH/src/github.com/golangci/lint-1/misc/vim
-
-If you have multiple entries in your GOPATH, replace `$GOPATH` with the right value.
-
-Running `:Lint` will run golint on the current file and populate the quickfix list.
-
-Optionally, add this to your `~/.vimrc` to automatically run `golint` on `:w`
-
-    autocmd BufWritePost,FileWritePost *.go execute 'Lint' | cwindow
-
-
-## Emacs
-
-Add this to your `.emacs` file:
-
-    (add-to-list 'load-path (concat (getenv "GOPATH")  "/src/github.com/golang/lint/misc/emacs"))
-    (require 'golint)
-
-If you have multiple entries in your GOPATH, replace `$GOPATH` with the right value.
-
-Running M-x golint will run golint on the current file.
-
-For more usage, see [Compilation-Mode](http://www.gnu.org/software/emacs/manual/html_node/emacs/Compilation-Mode.html).
diff --git a/vendor/github.com/golangci/lint-1/lint.go b/vendor/github.com/golangci/lint-1/lint.go
deleted file mode 100644
index 886c85bf099b877cdf6a6131152a99d08906d3bf..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/lint-1/lint.go
+++ /dev/null
@@ -1,1655 +0,0 @@
-// Copyright (c) 2013 The Go Authors. All rights reserved.
-//
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file or at
-// https://developers.google.com/open-source/licenses/bsd.
-
-// Package lint contains a linter for Go source code.
-package lint // import "github.com/golangci/lint-1"
-
-import (
-	"bufio"
-	"bytes"
-	"fmt"
-	"go/ast"
-	"go/parser"
-	"go/printer"
-	"go/token"
-	"go/types"
-	"io/ioutil"
-	"regexp"
-	"sort"
-	"strconv"
-	"strings"
-	"unicode"
-	"unicode/utf8"
-
-	"golang.org/x/tools/go/ast/astutil"
-	"golang.org/x/tools/go/gcexportdata"
-)
-
-const styleGuideBase = "https://golang.org/wiki/CodeReviewComments"
-
-// A Linter lints Go source code.
-type Linter struct {
-}
-
-// Problem represents a problem in some source code.
-type Problem struct {
-	Position   token.Position // position in source file
-	Text       string         // the prose that describes the problem
-	Link       string         // (optional) the link to the style guide for the problem
-	Confidence float64        // a value in (0,1] estimating the confidence in this problem's correctness
-	LineText   string         // the source line
-	Category   string         // a short name for the general category of the problem
-
-	// If the problem has a suggested fix (the minority case),
-	// ReplacementLine is a full replacement for the relevant line of the source file.
-	ReplacementLine string
-}
-
-func (p *Problem) String() string {
-	if p.Link != "" {
-		return p.Text + "\n\n" + p.Link
-	}
-	return p.Text
-}
-
-type byPosition []Problem
-
-func (p byPosition) Len() int      { return len(p) }
-func (p byPosition) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
-
-func (p byPosition) Less(i, j int) bool {
-	pi, pj := p[i].Position, p[j].Position
-
-	if pi.Filename != pj.Filename {
-		return pi.Filename < pj.Filename
-	}
-	if pi.Line != pj.Line {
-		return pi.Line < pj.Line
-	}
-	if pi.Column != pj.Column {
-		return pi.Column < pj.Column
-	}
-
-	return p[i].Text < p[j].Text
-}
-
-// Lint lints src.
-func (l *Linter) Lint(filename string, src []byte) ([]Problem, error) {
-	return l.LintFiles(map[string][]byte{filename: src})
-}
-
-// LintFiles lints a set of files of a single package.
-// The argument is a map of filename to source.
-func (l *Linter) LintFiles(files map[string][]byte) ([]Problem, error) {
-	pkg := &pkg{
-		fset:  token.NewFileSet(),
-		files: make(map[string]*file),
-	}
-	var pkgName string
-	for filename, src := range files {
-		if isGenerated(src) {
-			continue // See issue #239
-		}
-		f, err := parser.ParseFile(pkg.fset, filename, src, parser.ParseComments)
-		if err != nil {
-			return nil, err
-		}
-		if pkgName == "" {
-			pkgName = f.Name.Name
-		} else if f.Name.Name != pkgName {
-			return nil, fmt.Errorf("%s is in package %s, not %s", filename, f.Name.Name, pkgName)
-		}
-		pkg.files[filename] = &file{
-			pkg:      pkg,
-			f:        f,
-			fset:     pkg.fset,
-			src:      src,
-			filename: filename,
-		}
-	}
-	if len(pkg.files) == 0 {
-		return nil, nil
-	}
-	return pkg.lint(), nil
-}
-
-// LintFiles lints a set of files of a single package.
-// The argument is a map of filename to source.
-func (l *Linter) LintPkg(files []*ast.File, fset *token.FileSet, typesPkg *types.Package, typesInfo *types.Info) ([]Problem, error) {
-	pkg := &pkg{
-		fset:      fset,
-		files:     make(map[string]*file),
-		typesPkg:  typesPkg,
-		typesInfo: typesInfo,
-	}
-	var pkgName string
-	for _, f := range files {
-		// use PositionFor, not Position because of //line directives:
-		// this filename will be used for source lines extraction.
-		filename := fset.PositionFor(f.Pos(), false).Filename
-		if filename == "" {
-			return nil, fmt.Errorf("no file name for file %+v", f)
-		}
-
-		if pkgName == "" {
-			pkgName = f.Name.Name
-		} else if f.Name.Name != pkgName {
-			return nil, fmt.Errorf("%s is in package %s, not %s", filename, f.Name.Name, pkgName)
-		}
-
-		// TODO: reuse golangci-lint lines cache
-		src, err := ioutil.ReadFile(filename)
-		if err != nil {
-			return nil, fmt.Errorf("can't read file %s: %s", filename, err)
-		}
-
-		pkg.files[filename] = &file{
-			pkg:      pkg,
-			f:        f,
-			fset:     pkg.fset,
-			src:      src,
-			filename: filename,
-		}
-	}
-	if len(pkg.files) == 0 {
-		return nil, nil
-	}
-	return pkg.lint(), nil
-}
-
-var (
-	genHdr = []byte("// Code generated ")
-	genFtr = []byte(" DO NOT EDIT.")
-)
-
-// isGenerated reports whether the source file is generated code
-// according the rules from https://golang.org/s/generatedcode.
-func isGenerated(src []byte) bool {
-	sc := bufio.NewScanner(bytes.NewReader(src))
-	for sc.Scan() {
-		b := sc.Bytes()
-		if bytes.HasPrefix(b, genHdr) && bytes.HasSuffix(b, genFtr) && len(b) >= len(genHdr)+len(genFtr) {
-			return true
-		}
-	}
-	return false
-}
-
-// pkg represents a package being linted.
-type pkg struct {
-	fset  *token.FileSet
-	files map[string]*file
-
-	typesPkg  *types.Package
-	typesInfo *types.Info
-
-	// sortable is the set of types in the package that implement sort.Interface.
-	sortable map[string]bool
-	// main is whether this is a "main" package.
-	main bool
-
-	problems []Problem
-}
-
-func (p *pkg) lint() []Problem {
-	p.scanSortable()
-	p.main = p.isMain()
-
-	for _, f := range p.files {
-		f.lint()
-	}
-
-	sort.Sort(byPosition(p.problems))
-
-	return p.problems
-}
-
-// file represents a file being linted.
-type file struct {
-	pkg      *pkg
-	f        *ast.File
-	fset     *token.FileSet
-	src      []byte
-	filename string
-}
-
-func (f *file) isTest() bool { return strings.HasSuffix(f.filename, "_test.go") }
-
-func (f *file) lint() {
-	f.lintPackageComment()
-	f.lintImports()
-	f.lintBlankImports()
-	f.lintExported()
-	f.lintNames()
-	f.lintElses()
-	f.lintRanges()
-	f.lintErrorf()
-	f.lintErrors()
-	f.lintErrorStrings()
-	f.lintReceiverNames()
-	f.lintIncDec()
-	f.lintErrorReturn()
-	f.lintUnexportedReturn()
-	f.lintTimeNames()
-	f.lintContextKeyTypes()
-	f.lintContextArgs()
-}
-
-type link string
-type category string
-
-// The variadic arguments may start with link and category types,
-// and must end with a format string and any arguments.
-// It returns the new Problem.
-func (f *file) errorf(n ast.Node, confidence float64, args ...interface{}) *Problem {
-	pos := f.fset.Position(n.Pos())
-	if pos.Filename == "" {
-		pos.Filename = f.filename
-	}
-	return f.pkg.errorfAt(pos, confidence, args...)
-}
-
-func (p *pkg) errorfAt(pos token.Position, confidence float64, args ...interface{}) *Problem {
-	problem := Problem{
-		Position:   pos,
-		Confidence: confidence,
-	}
-	if pos.Filename != "" {
-		// The file might not exist in our mapping if a //line directive was encountered.
-		if f, ok := p.files[pos.Filename]; ok {
-			problem.LineText = srcLine(f.src, pos)
-		}
-	}
-
-argLoop:
-	for len(args) > 1 { // always leave at least the format string in args
-		switch v := args[0].(type) {
-		case link:
-			problem.Link = string(v)
-		case category:
-			problem.Category = string(v)
-		default:
-			break argLoop
-		}
-		args = args[1:]
-	}
-
-	problem.Text = fmt.Sprintf(args[0].(string), args[1:]...)
-
-	p.problems = append(p.problems, problem)
-	return &p.problems[len(p.problems)-1]
-}
-
-var newImporter = func(fset *token.FileSet) types.ImporterFrom {
-	return gcexportdata.NewImporter(fset, make(map[string]*types.Package))
-}
-
-func (p *pkg) typeCheck() error {
-	config := &types.Config{
-		// By setting a no-op error reporter, the type checker does as much work as possible.
-		Error:    func(error) {},
-		Importer: newImporter(p.fset),
-	}
-	info := &types.Info{
-		Types:  make(map[ast.Expr]types.TypeAndValue),
-		Defs:   make(map[*ast.Ident]types.Object),
-		Uses:   make(map[*ast.Ident]types.Object),
-		Scopes: make(map[ast.Node]*types.Scope),
-	}
-	var anyFile *file
-	var astFiles []*ast.File
-	for _, f := range p.files {
-		anyFile = f
-		astFiles = append(astFiles, f.f)
-	}
-	pkg, err := config.Check(anyFile.f.Name.Name, p.fset, astFiles, info)
-	// Remember the typechecking info, even if config.Check failed,
-	// since we will get partial information.
-	p.typesPkg = pkg
-	p.typesInfo = info
-	return err
-}
-
-func (p *pkg) typeOf(expr ast.Expr) types.Type {
-	if p.typesInfo == nil {
-		return nil
-	}
-	return p.typesInfo.TypeOf(expr)
-}
-
-func (p *pkg) isNamedType(typ types.Type, importPath, name string) bool {
-	n, ok := typ.(*types.Named)
-	if !ok {
-		return false
-	}
-	tn := n.Obj()
-	return tn != nil && tn.Pkg() != nil && tn.Pkg().Path() == importPath && tn.Name() == name
-}
-
-// scopeOf returns the tightest scope encompassing id.
-func (p *pkg) scopeOf(id *ast.Ident) *types.Scope {
-	var scope *types.Scope
-	if obj := p.typesInfo.ObjectOf(id); obj != nil {
-		scope = obj.Parent()
-	}
-	if scope == p.typesPkg.Scope() {
-		// We were given a top-level identifier.
-		// Use the file-level scope instead of the package-level scope.
-		pos := id.Pos()
-		for _, f := range p.files {
-			if f.f.Pos() <= pos && pos < f.f.End() {
-				scope = p.typesInfo.Scopes[f.f]
-				break
-			}
-		}
-	}
-	return scope
-}
-
-func (p *pkg) scanSortable() {
-	p.sortable = make(map[string]bool)
-
-	// bitfield for which methods exist on each type.
-	const (
-		Len = 1 << iota
-		Less
-		Swap
-	)
-	nmap := map[string]int{"Len": Len, "Less": Less, "Swap": Swap}
-	has := make(map[string]int)
-	for _, f := range p.files {
-		f.walk(func(n ast.Node) bool {
-			fn, ok := n.(*ast.FuncDecl)
-			if !ok || fn.Recv == nil || len(fn.Recv.List) == 0 {
-				return true
-			}
-			// TODO(dsymonds): We could check the signature to be more precise.
-			recv := receiverType(fn)
-			if i, ok := nmap[fn.Name.Name]; ok {
-				has[recv] |= i
-			}
-			return false
-		})
-	}
-	for typ, ms := range has {
-		if ms == Len|Less|Swap {
-			p.sortable[typ] = true
-		}
-	}
-}
-
-func (p *pkg) isMain() bool {
-	for _, f := range p.files {
-		if f.isMain() {
-			return true
-		}
-	}
-	return false
-}
-
-func (f *file) isMain() bool {
-	if f.f.Name.Name == "main" {
-		return true
-	}
-	return false
-}
-
-// lintPackageComment checks package comments. It complains if
-// there is no package comment, or if it is not of the right form.
-// This has a notable false positive in that a package comment
-// could rightfully appear in a different file of the same package,
-// but that's not easy to fix since this linter is file-oriented.
-func (f *file) lintPackageComment() {
-	if f.isTest() {
-		return
-	}
-
-	const ref = styleGuideBase + "#package-comments"
-	prefix := "Package " + f.f.Name.Name + " "
-
-	// Look for a detached package comment.
-	// First, scan for the last comment that occurs before the "package" keyword.
-	var lastCG *ast.CommentGroup
-	for _, cg := range f.f.Comments {
-		if cg.Pos() > f.f.Package {
-			// Gone past "package" keyword.
-			break
-		}
-		lastCG = cg
-	}
-	if lastCG != nil && strings.HasPrefix(lastCG.Text(), prefix) {
-		endPos := f.fset.Position(lastCG.End())
-		pkgPos := f.fset.Position(f.f.Package)
-		if endPos.Line+1 < pkgPos.Line {
-			// There isn't a great place to anchor this error;
-			// the start of the blank lines between the doc and the package statement
-			// is at least pointing at the location of the problem.
-			pos := token.Position{
-				Filename: endPos.Filename,
-				// Offset not set; it is non-trivial, and doesn't appear to be needed.
-				Line:   endPos.Line + 1,
-				Column: 1,
-			}
-			f.pkg.errorfAt(pos, 0.9, link(ref), category("comments"), "package comment is detached; there should be no blank lines between it and the package statement")
-			return
-		}
-	}
-
-	if f.f.Doc == nil {
-		f.errorf(f.f, 0.2, link(ref), category("comments"), "should have a package comment, unless it's in another file for this package")
-		return
-	}
-	s := f.f.Doc.Text()
-	if ts := strings.TrimLeft(s, " \t"); ts != s {
-		f.errorf(f.f.Doc, 1, link(ref), category("comments"), "package comment should not have leading space")
-		s = ts
-	}
-	// Only non-main packages need to keep to this form.
-	if !f.pkg.main && !strings.HasPrefix(s, prefix) {
-		f.errorf(f.f.Doc, 1, link(ref), category("comments"), `package comment should be of the form "%s..."`, prefix)
-	}
-}
-
-func (f *file) isCgo() bool {
-	if f.src == nil {
-		return false
-	}
-	newLinePos := bytes.Index(f.src, []byte("\n"))
-	if newLinePos < 0 {
-		return false
-	}
-	firstLine := string(f.src[:newLinePos])
-
-	// files using cgo have implicitly added comment "Created by cgo - DO NOT EDIT" for go <= 1.10
-	// and "Code generated by cmd/cgo" for go >= 1.11
-	return strings.Contains(firstLine, "Created by cgo") || strings.Contains(firstLine, "Code generated by cmd/cgo")
-}
-
-// lintBlankImports complains if a non-main package has blank imports that are
-// not documented.
-func (f *file) lintBlankImports() {
-	// In package main and in tests, we don't complain about blank imports.
-	if f.pkg.main || f.isTest() || f.isCgo() {
-		return
-	}
-
-	// The first element of each contiguous group of blank imports should have
-	// an explanatory comment of some kind.
-	for i, imp := range f.f.Imports {
-		pos := f.fset.Position(imp.Pos())
-
-		if !isBlank(imp.Name) {
-			continue // Ignore non-blank imports.
-		}
-		if i > 0 {
-			prev := f.f.Imports[i-1]
-			prevPos := f.fset.Position(prev.Pos())
-			if isBlank(prev.Name) && prevPos.Line+1 == pos.Line {
-				continue // A subsequent blank in a group.
-			}
-		}
-
-		// This is the first blank import of a group.
-		if imp.Doc == nil && imp.Comment == nil {
-			ref := ""
-			f.errorf(imp, 1, link(ref), category("imports"), "a blank import should be only in a main or test package, or have a comment justifying it")
-		}
-	}
-}
-
-// lintImports examines import blocks.
-func (f *file) lintImports() {
-	for i, is := range f.f.Imports {
-		_ = i
-		if is.Name != nil && is.Name.Name == "." && !f.isTest() {
-			f.errorf(is, 1, link(styleGuideBase+"#import-dot"), category("imports"), "should not use dot imports")
-		}
-
-	}
-}
-
-const docCommentsLink = styleGuideBase + "#doc-comments"
-
-// lintExported examines the exported names.
-// It complains if any required doc comments are missing,
-// or if they are not of the right form. The exact rules are in
-// lintFuncDoc, lintTypeDoc and lintValueSpecDoc; this function
-// also tracks the GenDecl structure being traversed to permit
-// doc comments for constants to be on top of the const block.
-// It also complains if the names stutter when combined with
-// the package name.
-func (f *file) lintExported() {
-	if f.isTest() {
-		return
-	}
-
-	var lastGen *ast.GenDecl // last GenDecl entered.
-
-	// Set of GenDecls that have already had missing comments flagged.
-	genDeclMissingComments := make(map[*ast.GenDecl]bool)
-
-	f.walk(func(node ast.Node) bool {
-		switch v := node.(type) {
-		case *ast.GenDecl:
-			if v.Tok == token.IMPORT {
-				return false
-			}
-			// token.CONST, token.TYPE or token.VAR
-			lastGen = v
-			return true
-		case *ast.FuncDecl:
-			f.lintFuncDoc(v)
-			if v.Recv == nil {
-				// Only check for stutter on functions, not methods.
-				// Method names are not used package-qualified.
-				f.checkStutter(v.Name, "func")
-			}
-			// Don't proceed inside funcs.
-			return false
-		case *ast.TypeSpec:
-			// inside a GenDecl, which usually has the doc
-			doc := v.Doc
-			if doc == nil {
-				doc = lastGen.Doc
-			}
-			f.lintTypeDoc(v, doc)
-			f.checkStutter(v.Name, "type")
-			// Don't proceed inside types.
-			return false
-		case *ast.ValueSpec:
-			f.lintValueSpecDoc(v, lastGen, genDeclMissingComments)
-			return false
-		}
-		return true
-	})
-}
-
-var (
-	allCapsRE = regexp.MustCompile(`^[A-Z0-9_]+$`)
-	anyCapsRE = regexp.MustCompile(`[A-Z]`)
-)
-
-// knownNameExceptions is a set of names that are known to be exempt from naming checks.
-// This is usually because they are constrained by having to match names in the
-// standard library.
-var knownNameExceptions = map[string]bool{
-	"LastInsertId": true, // must match database/sql
-	"kWh":          true,
-}
-
-func isInTopLevel(f *ast.File, ident *ast.Ident) bool {
-	path, _ := astutil.PathEnclosingInterval(f, ident.Pos(), ident.End())
-	for _, f := range path {
-		switch f.(type) {
-		case *ast.File, *ast.GenDecl, *ast.ValueSpec, *ast.Ident:
-			continue
-		}
-		return false
-	}
-	return true
-}
-
-// lintNames examines all names in the file.
-// It complains if any use underscores or incorrect known initialisms.
-func (f *file) lintNames() {
-	// Package names need slightly different handling than other names.
-	if strings.Contains(f.f.Name.Name, "_") && !strings.HasSuffix(f.f.Name.Name, "_test") {
-		f.errorf(f.f, 1, link("http://golang.org/doc/effective_go.html#package-names"), category("naming"), "don't use an underscore in package name")
-	}
-	if anyCapsRE.MatchString(f.f.Name.Name) {
-		f.errorf(f.f, 1, link("http://golang.org/doc/effective_go.html#package-names"), category("mixed-caps"), "don't use MixedCaps in package name; %s should be %s", f.f.Name.Name, strings.ToLower(f.f.Name.Name))
-	}
-
-	check := func(id *ast.Ident, thing string) {
-		if id.Name == "_" {
-			return
-		}
-		if knownNameExceptions[id.Name] {
-			return
-		}
-
-		// Handle two common styles from other languages that don't belong in Go.
-		if len(id.Name) >= 5 && allCapsRE.MatchString(id.Name) && strings.Contains(id.Name, "_") {
-			capCount := 0
-			for _, c := range id.Name {
-				if 'A' <= c && c <= 'Z' {
-					capCount++
-				}
-			}
-			if capCount >= 2 {
-				f.errorf(id, 0.8, link(styleGuideBase+"#mixed-caps"), category("naming"), "don't use ALL_CAPS in Go names; use CamelCase")
-				return
-			}
-		}
-		if thing == "const" || (thing == "var" && isInTopLevel(f.f, id)) {
-			if len(id.Name) > 2 && id.Name[0] == 'k' && id.Name[1] >= 'A' && id.Name[1] <= 'Z' {
-				should := string(id.Name[1]+'a'-'A') + id.Name[2:]
-				f.errorf(id, 0.8, link(styleGuideBase+"#mixed-caps"), category("naming"), "don't use leading k in Go names; %s %s should be %s", thing, id.Name, should)
-			}
-		}
-
-		should := lintName(id.Name)
-		if id.Name == should {
-			return
-		}
-
-		if len(id.Name) > 2 && strings.Contains(id.Name[1:], "_") {
-			f.errorf(id, 0.9, link("http://golang.org/doc/effective_go.html#mixed-caps"), category("naming"), "don't use underscores in Go names; %s %s should be %s", thing, id.Name, should)
-			return
-		}
-		f.errorf(id, 0.8, link(styleGuideBase+"#initialisms"), category("naming"), "%s %s should be %s", thing, id.Name, should)
-	}
-	checkList := func(fl *ast.FieldList, thing string) {
-		if fl == nil {
-			return
-		}
-		for _, f := range fl.List {
-			for _, id := range f.Names {
-				check(id, thing)
-			}
-		}
-	}
-	f.walk(func(node ast.Node) bool {
-		switch v := node.(type) {
-		case *ast.AssignStmt:
-			if v.Tok == token.ASSIGN {
-				return true
-			}
-			for _, exp := range v.Lhs {
-				if id, ok := exp.(*ast.Ident); ok {
-					check(id, "var")
-				}
-			}
-		case *ast.FuncDecl:
-			if f.isTest() && (strings.HasPrefix(v.Name.Name, "Example") || strings.HasPrefix(v.Name.Name, "Test") || strings.HasPrefix(v.Name.Name, "Benchmark")) {
-				return true
-			}
-
-			thing := "func"
-			if v.Recv != nil {
-				thing = "method"
-			}
-
-			// Exclude naming warnings for functions that are exported to C but
-			// not exported in the Go API.
-			// See https://github.com/golang/lint/issues/144.
-			if ast.IsExported(v.Name.Name) || !isCgoExported(v) {
-				check(v.Name, thing)
-			}
-
-			checkList(v.Type.Params, thing+" parameter")
-			checkList(v.Type.Results, thing+" result")
-		case *ast.GenDecl:
-			if v.Tok == token.IMPORT {
-				return true
-			}
-			var thing string
-			switch v.Tok {
-			case token.CONST:
-				thing = "const"
-			case token.TYPE:
-				thing = "type"
-			case token.VAR:
-				thing = "var"
-			}
-			for _, spec := range v.Specs {
-				switch s := spec.(type) {
-				case *ast.TypeSpec:
-					check(s.Name, thing)
-				case *ast.ValueSpec:
-					for _, id := range s.Names {
-						check(id, thing)
-					}
-				}
-			}
-		case *ast.InterfaceType:
-			// Do not check interface method names.
-			// They are often constrainted by the method names of concrete types.
-			for _, x := range v.Methods.List {
-				ft, ok := x.Type.(*ast.FuncType)
-				if !ok { // might be an embedded interface name
-					continue
-				}
-				checkList(ft.Params, "interface method parameter")
-				checkList(ft.Results, "interface method result")
-			}
-		case *ast.RangeStmt:
-			if v.Tok == token.ASSIGN {
-				return true
-			}
-			if id, ok := v.Key.(*ast.Ident); ok {
-				check(id, "range var")
-			}
-			if id, ok := v.Value.(*ast.Ident); ok {
-				check(id, "range var")
-			}
-		case *ast.StructType:
-			for _, f := range v.Fields.List {
-				for _, id := range f.Names {
-					check(id, "struct field")
-				}
-			}
-		}
-		return true
-	})
-}
-
-// lintName returns a different name if it should be different.
-func lintName(name string) (should string) {
-	// Fast path for simple cases: "_" and all lowercase.
-	if name == "_" {
-		return name
-	}
-	allLower := true
-	for _, r := range name {
-		if !unicode.IsLower(r) {
-			allLower = false
-			break
-		}
-	}
-	if allLower {
-		return name
-	}
-
-	// Split camelCase at any lower->upper transition, and split on underscores.
-	// Check each word for common initialisms.
-	runes := []rune(name)
-	w, i := 0, 0 // index of start of word, scan
-	for i+1 <= len(runes) {
-		eow := false // whether we hit the end of a word
-		if i+1 == len(runes) {
-			eow = true
-		} else if runes[i+1] == '_' {
-			// underscore; shift the remainder forward over any run of underscores
-			eow = true
-			n := 1
-			for i+n+1 < len(runes) && runes[i+n+1] == '_' {
-				n++
-			}
-
-			// Leave at most one underscore if the underscore is between two digits
-			if i+n+1 < len(runes) && unicode.IsDigit(runes[i]) && unicode.IsDigit(runes[i+n+1]) {
-				n--
-			}
-
-			copy(runes[i+1:], runes[i+n+1:])
-			runes = runes[:len(runes)-n]
-		} else if unicode.IsLower(runes[i]) && !unicode.IsLower(runes[i+1]) {
-			// lower->non-lower
-			eow = true
-		}
-		i++
-		if !eow {
-			continue
-		}
-
-		// [w,i) is a word.
-		word := string(runes[w:i])
-		if u := strings.ToUpper(word); commonInitialisms[u] {
-			// Keep consistent case, which is lowercase only at the start.
-			if w == 0 && unicode.IsLower(runes[w]) {
-				u = strings.ToLower(u)
-			}
-			// All the common initialisms are ASCII,
-			// so we can replace the bytes exactly.
-			copy(runes[w:], []rune(u))
-		} else if w > 0 && strings.ToLower(word) == word {
-			// already all lowercase, and not the first word, so uppercase the first character.
-			runes[w] = unicode.ToUpper(runes[w])
-		}
-		w = i
-	}
-	return string(runes)
-}
-
-// commonInitialisms is a set of common initialisms.
-// Only add entries that are highly unlikely to be non-initialisms.
-// For instance, "ID" is fine (Freudian code is rare), but "AND" is not.
-var commonInitialisms = map[string]bool{
-	"ACL":   true,
-	"API":   true,
-	"ASCII": true,
-	"CPU":   true,
-	"CSS":   true,
-	"DNS":   true,
-	"EOF":   true,
-	"GUID":  true,
-	"HTML":  true,
-	"HTTP":  true,
-	"HTTPS": true,
-	"ID":    true,
-	"IP":    true,
-	"JSON":  true,
-	"LHS":   true,
-	"QPS":   true,
-	"RAM":   true,
-	"RHS":   true,
-	"RPC":   true,
-	"SLA":   true,
-	"SMTP":  true,
-	"SQL":   true,
-	"SSH":   true,
-	"TCP":   true,
-	"TLS":   true,
-	"TTL":   true,
-	"UDP":   true,
-	"UI":    true,
-	"UID":   true,
-	"UUID":  true,
-	"URI":   true,
-	"URL":   true,
-	"UTF8":  true,
-	"VM":    true,
-	"XML":   true,
-	"XMPP":  true,
-	"XSRF":  true,
-	"XSS":   true,
-}
-
-// lintTypeDoc examines the doc comment on a type.
-// It complains if they are missing from an exported type,
-// or if they are not of the standard form.
-func (f *file) lintTypeDoc(t *ast.TypeSpec, doc *ast.CommentGroup) {
-	if !ast.IsExported(t.Name.Name) {
-		return
-	}
-	if doc == nil {
-		f.errorf(t, 1, link(docCommentsLink), category("comments"), "exported type %v should have comment or be unexported", t.Name)
-		return
-	}
-
-	s := doc.Text()
-	articles := [...]string{"A", "An", "The"}
-	for _, a := range articles {
-		if strings.HasPrefix(s, a+" ") {
-			s = s[len(a)+1:]
-			break
-		}
-	}
-	if !strings.HasPrefix(s, t.Name.Name+" ") {
-		f.errorf(doc, 1, link(docCommentsLink), category("comments"), `comment on exported type %v should be of the form "%v ..." (with optional leading article)`, t.Name, t.Name)
-	}
-}
-
-var commonMethods = map[string]bool{
-	"Error":     true,
-	"Read":      true,
-	"ServeHTTP": true,
-	"String":    true,
-	"Write":     true,
-}
-
-// lintFuncDoc examines doc comments on functions and methods.
-// It complains if they are missing, or not of the right form.
-// It has specific exclusions for well-known methods (see commonMethods above).
-func (f *file) lintFuncDoc(fn *ast.FuncDecl) {
-	if !ast.IsExported(fn.Name.Name) {
-		// func is unexported
-		return
-	}
-	kind := "function"
-	name := fn.Name.Name
-	if fn.Recv != nil && len(fn.Recv.List) > 0 {
-		// method
-		kind = "method"
-		recv := receiverType(fn)
-		if !ast.IsExported(recv) {
-			// receiver is unexported
-			return
-		}
-		if commonMethods[name] {
-			return
-		}
-		switch name {
-		case "Len", "Less", "Swap":
-			if f.pkg.sortable[recv] {
-				return
-			}
-		}
-		name = recv + "." + name
-	}
-	if fn.Doc == nil {
-		f.errorf(fn, 1, link(docCommentsLink), category("comments"), "exported %s %s should have comment or be unexported", kind, name)
-		return
-	}
-	s := fn.Doc.Text()
-	prefix := fn.Name.Name + " "
-	if !strings.HasPrefix(s, prefix) {
-		f.errorf(fn.Doc, 1, link(docCommentsLink), category("comments"), `comment on exported %s %s should be of the form "%s..."`, kind, name, prefix)
-	}
-}
-
-// lintValueSpecDoc examines package-global variables and constants.
-// It complains if they are not individually declared,
-// or if they are not suitably documented in the right form (unless they are in a block that is commented).
-func (f *file) lintValueSpecDoc(vs *ast.ValueSpec, gd *ast.GenDecl, genDeclMissingComments map[*ast.GenDecl]bool) {
-	kind := "var"
-	if gd.Tok == token.CONST {
-		kind = "const"
-	}
-
-	if len(vs.Names) > 1 {
-		// Check that none are exported except for the first.
-		for _, n := range vs.Names[1:] {
-			if ast.IsExported(n.Name) {
-				f.errorf(vs, 1, category("comments"), "exported %s %s should have its own declaration", kind, n.Name)
-				return
-			}
-		}
-	}
-
-	// Only one name.
-	name := vs.Names[0].Name
-	if !ast.IsExported(name) {
-		return
-	}
-
-	if vs.Doc == nil && gd.Doc == nil {
-		if genDeclMissingComments[gd] {
-			return
-		}
-		block := ""
-		if kind == "const" && gd.Lparen.IsValid() {
-			block = " (or a comment on this block)"
-		}
-		f.errorf(vs, 1, link(docCommentsLink), category("comments"), "exported %s %s should have comment%s or be unexported", kind, name, block)
-		genDeclMissingComments[gd] = true
-		return
-	}
-	// If this GenDecl has parens and a comment, we don't check its comment form.
-	if gd.Lparen.IsValid() && gd.Doc != nil {
-		return
-	}
-	// The relevant text to check will be on either vs.Doc or gd.Doc.
-	// Use vs.Doc preferentially.
-	doc := vs.Doc
-	if doc == nil {
-		doc = gd.Doc
-	}
-	prefix := name + " "
-	if !strings.HasPrefix(doc.Text(), prefix) {
-		f.errorf(doc, 1, link(docCommentsLink), category("comments"), `comment on exported %s %s should be of the form "%s..."`, kind, name, prefix)
-	}
-}
-
-func (f *file) checkStutter(id *ast.Ident, thing string) {
-	pkg, name := f.f.Name.Name, id.Name
-	if !ast.IsExported(name) {
-		// unexported name
-		return
-	}
-	// A name stutters if the package name is a strict prefix
-	// and the next character of the name starts a new word.
-	if len(name) <= len(pkg) {
-		// name is too short to stutter.
-		// This permits the name to be the same as the package name.
-		return
-	}
-	if !strings.EqualFold(pkg, name[:len(pkg)]) {
-		return
-	}
-	// We can assume the name is well-formed UTF-8.
-	// If the next rune after the package name is uppercase or an underscore
-	// the it's starting a new word and thus this name stutters.
-	rem := name[len(pkg):]
-	if next, _ := utf8.DecodeRuneInString(rem); next == '_' || unicode.IsUpper(next) {
-		f.errorf(id, 0.8, link(styleGuideBase+"#package-names"), category("naming"), "%s name will be used as %s.%s by other packages, and that stutters; consider calling this %s", thing, pkg, name, rem)
-	}
-}
-
-// zeroLiteral is a set of ast.BasicLit values that are zero values.
-// It is not exhaustive.
-var zeroLiteral = map[string]bool{
-	"false": true, // bool
-	// runes
-	`'\x00'`: true,
-	`'\000'`: true,
-	// strings
-	`""`: true,
-	"``": true,
-	// numerics
-	"0":   true,
-	"0.":  true,
-	"0.0": true,
-	"0i":  true,
-}
-
-// lintElses examines else blocks. It complains about any else block whose if block ends in a return.
-func (f *file) lintElses() {
-	// We don't want to flag if { } else if { } else { } constructions.
-	// They will appear as an IfStmt whose Else field is also an IfStmt.
-	// Record such a node so we ignore it when we visit it.
-	ignore := make(map[*ast.IfStmt]bool)
-
-	f.walk(func(node ast.Node) bool {
-		ifStmt, ok := node.(*ast.IfStmt)
-		if !ok || ifStmt.Else == nil {
-			return true
-		}
-		if elseif, ok := ifStmt.Else.(*ast.IfStmt); ok {
-			ignore[elseif] = true
-			return true
-		}
-		if ignore[ifStmt] {
-			return true
-		}
-		if _, ok := ifStmt.Else.(*ast.BlockStmt); !ok {
-			// only care about elses without conditions
-			return true
-		}
-		if len(ifStmt.Body.List) == 0 {
-			return true
-		}
-		shortDecl := false // does the if statement have a ":=" initialization statement?
-		if ifStmt.Init != nil {
-			if as, ok := ifStmt.Init.(*ast.AssignStmt); ok && as.Tok == token.DEFINE {
-				shortDecl = true
-			}
-		}
-		lastStmt := ifStmt.Body.List[len(ifStmt.Body.List)-1]
-		if _, ok := lastStmt.(*ast.ReturnStmt); ok {
-			extra := ""
-			if shortDecl {
-				extra = " (move short variable declaration to its own line if necessary)"
-			}
-			f.errorf(ifStmt.Else, 1, link(styleGuideBase+"#indent-error-flow"), category("indent"), "if block ends with a return statement, so drop this else and outdent its block"+extra)
-		}
-		return true
-	})
-}
-
-// lintRanges examines range clauses. It complains about redundant constructions.
-func (f *file) lintRanges() {
-	f.walk(func(node ast.Node) bool {
-		rs, ok := node.(*ast.RangeStmt)
-		if !ok {
-			return true
-		}
-
-		if isIdent(rs.Key, "_") && (rs.Value == nil || isIdent(rs.Value, "_")) {
-			p := f.errorf(rs.Key, 1, category("range-loop"), "should omit values from range; this loop is equivalent to `for range ...`")
-
-			newRS := *rs // shallow copy
-			newRS.Value = nil
-			newRS.Key = nil
-			p.ReplacementLine = f.firstLineOf(&newRS, rs)
-
-			return true
-		}
-
-		if isIdent(rs.Value, "_") {
-			p := f.errorf(rs.Value, 1, category("range-loop"), "should omit 2nd value from range; this loop is equivalent to `for %s %s range ...`", f.render(rs.Key), rs.Tok)
-
-			newRS := *rs // shallow copy
-			newRS.Value = nil
-			p.ReplacementLine = f.firstLineOf(&newRS, rs)
-		}
-
-		return true
-	})
-}
-
-// lintErrorf examines errors.New and testing.Error calls. It complains if its only argument is an fmt.Sprintf invocation.
-func (f *file) lintErrorf() {
-	f.walk(func(node ast.Node) bool {
-		ce, ok := node.(*ast.CallExpr)
-		if !ok || len(ce.Args) != 1 {
-			return true
-		}
-		isErrorsNew := isPkgDot(ce.Fun, "errors", "New")
-		var isTestingError bool
-		se, ok := ce.Fun.(*ast.SelectorExpr)
-		if ok && se.Sel.Name == "Error" {
-			if typ := f.pkg.typeOf(se.X); typ != nil {
-				isTestingError = typ.String() == "*testing.T"
-			}
-		}
-		if !isErrorsNew && !isTestingError {
-			return true
-		}
-		if !f.imports("errors") {
-			return true
-		}
-		arg := ce.Args[0]
-		ce, ok = arg.(*ast.CallExpr)
-		if !ok || !isPkgDot(ce.Fun, "fmt", "Sprintf") {
-			return true
-		}
-		errorfPrefix := "fmt"
-		if isTestingError {
-			errorfPrefix = f.render(se.X)
-		}
-		p := f.errorf(node, 1, category("errors"), "should replace %s(fmt.Sprintf(...)) with %s.Errorf(...)", f.render(se), errorfPrefix)
-
-		m := f.srcLineWithMatch(ce, `^(.*)`+f.render(se)+`\(fmt\.Sprintf\((.*)\)\)(.*)$`)
-		if m != nil {
-			p.ReplacementLine = m[1] + errorfPrefix + ".Errorf(" + m[2] + ")" + m[3]
-		}
-
-		return true
-	})
-}
-
-// lintErrors examines global error vars. It complains if they aren't named in the standard way.
-func (f *file) lintErrors() {
-	for _, decl := range f.f.Decls {
-		gd, ok := decl.(*ast.GenDecl)
-		if !ok || gd.Tok != token.VAR {
-			continue
-		}
-		for _, spec := range gd.Specs {
-			spec := spec.(*ast.ValueSpec)
-			if len(spec.Names) != 1 || len(spec.Values) != 1 {
-				continue
-			}
-			ce, ok := spec.Values[0].(*ast.CallExpr)
-			if !ok {
-				continue
-			}
-			if !isPkgDot(ce.Fun, "errors", "New") && !isPkgDot(ce.Fun, "fmt", "Errorf") {
-				continue
-			}
-
-			id := spec.Names[0]
-			prefix := "err"
-			if id.IsExported() {
-				prefix = "Err"
-			}
-			if !strings.HasPrefix(id.Name, prefix) {
-				f.errorf(id, 0.9, category("naming"), "error var %s should have name of the form %sFoo", id.Name, prefix)
-			}
-		}
-	}
-}
-
-func lintErrorString(s string) (isClean bool, conf float64) {
-	const basicConfidence = 0.8
-	const capConfidence = basicConfidence - 0.2
-	first, firstN := utf8.DecodeRuneInString(s)
-	last, _ := utf8.DecodeLastRuneInString(s)
-	if last == '.' || last == ':' || last == '!' || last == '\n' {
-		return false, basicConfidence
-	}
-	if unicode.IsUpper(first) {
-		// People use proper nouns and exported Go identifiers in error strings,
-		// so decrease the confidence of warnings for capitalization.
-		if len(s) <= firstN {
-			return false, capConfidence
-		}
-		// Flag strings starting with something that doesn't look like an initialism.
-		if second, _ := utf8.DecodeRuneInString(s[firstN:]); !unicode.IsUpper(second) {
-			return false, capConfidence
-		}
-	}
-	return true, 0
-}
-
-// lintErrorStrings examines error strings.
-// It complains if they are capitalized or end in punctuation or a newline.
-func (f *file) lintErrorStrings() {
-	f.walk(func(node ast.Node) bool {
-		ce, ok := node.(*ast.CallExpr)
-		if !ok {
-			return true
-		}
-		if !isPkgDot(ce.Fun, "errors", "New") && !isPkgDot(ce.Fun, "fmt", "Errorf") {
-			return true
-		}
-		if len(ce.Args) < 1 {
-			return true
-		}
-		str, ok := ce.Args[0].(*ast.BasicLit)
-		if !ok || str.Kind != token.STRING {
-			return true
-		}
-		s, _ := strconv.Unquote(str.Value) // can assume well-formed Go
-		if s == "" {
-			return true
-		}
-		clean, conf := lintErrorString(s)
-		if clean {
-			return true
-		}
-
-		f.errorf(str, conf, link(styleGuideBase+"#error-strings"), category("errors"),
-			"error strings should not be capitalized or end with punctuation or a newline")
-		return true
-	})
-}
-
-// lintReceiverNames examines receiver names. It complains about inconsistent
-// names used for the same type and names such as "this".
-func (f *file) lintReceiverNames() {
-	typeReceiver := map[string]string{}
-	f.walk(func(n ast.Node) bool {
-		fn, ok := n.(*ast.FuncDecl)
-		if !ok || fn.Recv == nil || len(fn.Recv.List) == 0 {
-			return true
-		}
-		names := fn.Recv.List[0].Names
-		if len(names) < 1 {
-			return true
-		}
-		name := names[0].Name
-		const ref = styleGuideBase + "#receiver-names"
-		if name == "_" {
-			f.errorf(n, 1, link(ref), category("naming"), `receiver name should not be an underscore, omit the name if it is unused`)
-			return true
-		}
-		if name == "this" || name == "self" {
-			f.errorf(n, 1, link(ref), category("naming"), `receiver name should be a reflection of its identity; don't use generic names such as "this" or "self"`)
-			return true
-		}
-		recv := receiverType(fn)
-		if prev, ok := typeReceiver[recv]; ok && prev != name {
-			f.errorf(n, 1, link(ref), category("naming"), "receiver name %s should be consistent with previous receiver name %s for %s", name, prev, recv)
-			return true
-		}
-		typeReceiver[recv] = name
-		return true
-	})
-}
-
-// lintIncDec examines statements that increment or decrement a variable.
-// It complains if they don't use x++ or x--.
-func (f *file) lintIncDec() {
-	f.walk(func(n ast.Node) bool {
-		as, ok := n.(*ast.AssignStmt)
-		if !ok {
-			return true
-		}
-		if len(as.Lhs) != 1 {
-			return true
-		}
-		if !isOne(as.Rhs[0]) {
-			return true
-		}
-		var suffix string
-		switch as.Tok {
-		case token.ADD_ASSIGN:
-			suffix = "++"
-		case token.SUB_ASSIGN:
-			suffix = "--"
-		default:
-			return true
-		}
-		f.errorf(as, 0.8, category("unary-op"), "should replace %s with %s%s", f.render(as), f.render(as.Lhs[0]), suffix)
-		return true
-	})
-}
-
-// lintErrorReturn examines function declarations that return an error.
-// It complains if the error isn't the last parameter.
-func (f *file) lintErrorReturn() {
-	f.walk(func(n ast.Node) bool {
-		fn, ok := n.(*ast.FuncDecl)
-		if !ok || fn.Type.Results == nil {
-			return true
-		}
-		ret := fn.Type.Results.List
-		if len(ret) <= 1 {
-			return true
-		}
-		if isIdent(ret[len(ret)-1].Type, "error") {
-			return true
-		}
-		// An error return parameter should be the last parameter.
-		// Flag any error parameters found before the last.
-		for _, r := range ret[:len(ret)-1] {
-			if isIdent(r.Type, "error") {
-				f.errorf(fn, 0.9, category("arg-order"), "error should be the last type when returning multiple items")
-				break // only flag one
-			}
-		}
-		return true
-	})
-}
-
-// lintUnexportedReturn examines exported function declarations.
-// It complains if any return an unexported type.
-func (f *file) lintUnexportedReturn() {
-	f.walk(func(n ast.Node) bool {
-		fn, ok := n.(*ast.FuncDecl)
-		if !ok {
-			return true
-		}
-		if fn.Type.Results == nil {
-			return false
-		}
-		if !fn.Name.IsExported() {
-			return false
-		}
-		thing := "func"
-		if fn.Recv != nil && len(fn.Recv.List) > 0 {
-			thing = "method"
-			if !ast.IsExported(receiverType(fn)) {
-				// Don't report exported methods of unexported types,
-				// such as private implementations of sort.Interface.
-				return false
-			}
-		}
-		for _, ret := range fn.Type.Results.List {
-			typ := f.pkg.typeOf(ret.Type)
-			if exportedType(typ) {
-				continue
-			}
-			f.errorf(ret.Type, 0.8, category("unexported-type-in-api"),
-				"exported %s %s returns unexported type %s, which can be annoying to use",
-				thing, fn.Name.Name, typ)
-			break // only flag one
-		}
-		return false
-	})
-}
-
-// exportedType reports whether typ is an exported type.
-// It is imprecise, and will err on the side of returning true,
-// such as for composite types.
-func exportedType(typ types.Type) bool {
-	switch T := typ.(type) {
-	case *types.Named:
-		// Builtin types have no package.
-		return T.Obj().Pkg() == nil || T.Obj().Exported()
-	case *types.Map:
-		return exportedType(T.Key()) && exportedType(T.Elem())
-	case interface {
-		Elem() types.Type
-	}: // array, slice, pointer, chan
-		return exportedType(T.Elem())
-	}
-	// Be conservative about other types, such as struct, interface, etc.
-	return true
-}
-
-// timeSuffixes is a list of name suffixes that imply a time unit.
-// This is not an exhaustive list.
-var timeSuffixes = []string{
-	"Sec", "Secs", "Seconds",
-	"Msec", "Msecs",
-	"Milli", "Millis", "Milliseconds",
-	"Usec", "Usecs", "Microseconds",
-	"MS", "Ms",
-}
-
-func (f *file) lintTimeNames() {
-	f.walk(func(node ast.Node) bool {
-		v, ok := node.(*ast.ValueSpec)
-		if !ok {
-			return true
-		}
-		for _, name := range v.Names {
-			origTyp := f.pkg.typeOf(name)
-			// Look for time.Duration or *time.Duration;
-			// the latter is common when using flag.Duration.
-			typ := origTyp
-			if pt, ok := typ.(*types.Pointer); ok {
-				typ = pt.Elem()
-			}
-			if !f.pkg.isNamedType(typ, "time", "Duration") {
-				continue
-			}
-			suffix := ""
-			for _, suf := range timeSuffixes {
-				if strings.HasSuffix(name.Name, suf) {
-					suffix = suf
-					break
-				}
-			}
-			if suffix == "" {
-				continue
-			}
-			f.errorf(v, 0.9, category("time"), "var %s is of type %v; don't use unit-specific suffix %q", name.Name, origTyp, suffix)
-		}
-		return true
-	})
-}
-
-// lintContextKeyTypes checks for call expressions to context.WithValue with
-// basic types used for the key argument.
-// See: https://golang.org/issue/17293
-func (f *file) lintContextKeyTypes() {
-	f.walk(func(node ast.Node) bool {
-		switch node := node.(type) {
-		case *ast.CallExpr:
-			f.checkContextKeyType(node)
-		}
-
-		return true
-	})
-}
-
-// checkContextKeyType reports an error if the call expression calls
-// context.WithValue with a key argument of basic type.
-func (f *file) checkContextKeyType(x *ast.CallExpr) {
-	sel, ok := x.Fun.(*ast.SelectorExpr)
-	if !ok {
-		return
-	}
-	pkg, ok := sel.X.(*ast.Ident)
-	if !ok || pkg.Name != "context" {
-		return
-	}
-	if sel.Sel.Name != "WithValue" {
-		return
-	}
-
-	// key is second argument to context.WithValue
-	if len(x.Args) != 3 {
-		return
-	}
-	key := f.pkg.typesInfo.Types[x.Args[1]]
-
-	if ktyp, ok := key.Type.(*types.Basic); ok && ktyp.Kind() != types.Invalid {
-		f.errorf(x, 1.0, category("context"), fmt.Sprintf("should not use basic type %s as key in context.WithValue", key.Type))
-	}
-}
-
-// lintContextArgs examines function declarations that contain an
-// argument with a type of context.Context
-// It complains if that argument isn't the first parameter.
-func (f *file) lintContextArgs() {
-	f.walk(func(n ast.Node) bool {
-		fn, ok := n.(*ast.FuncDecl)
-		if !ok || len(fn.Type.Params.List) <= 1 {
-			return true
-		}
-		// A context.Context should be the first parameter of a function.
-		// Flag any that show up after the first.
-		for _, arg := range fn.Type.Params.List[1:] {
-			if isPkgDot(arg.Type, "context", "Context") {
-				f.errorf(fn, 0.9, link("https://golang.org/pkg/context/"), category("arg-order"), "context.Context should be the first parameter of a function")
-				break // only flag one
-			}
-		}
-		return true
-	})
-}
-
-// containsComments returns whether the interval [start, end) contains any
-// comments without "// MATCH " prefix.
-func (f *file) containsComments(start, end token.Pos) bool {
-	for _, cgroup := range f.f.Comments {
-		comments := cgroup.List
-		if comments[0].Slash >= end {
-			// All comments starting with this group are after end pos.
-			return false
-		}
-		if comments[len(comments)-1].Slash < start {
-			// Comments group ends before start pos.
-			continue
-		}
-		for _, c := range comments {
-			if start <= c.Slash && c.Slash < end && !strings.HasPrefix(c.Text, "// MATCH ") {
-				return true
-			}
-		}
-	}
-	return false
-}
-
-// receiverType returns the named type of the method receiver, sans "*",
-// or "invalid-type" if fn.Recv is ill formed.
-func receiverType(fn *ast.FuncDecl) string {
-	switch e := fn.Recv.List[0].Type.(type) {
-	case *ast.Ident:
-		return e.Name
-	case *ast.StarExpr:
-		if id, ok := e.X.(*ast.Ident); ok {
-			return id.Name
-		}
-	}
-	// The parser accepts much more than just the legal forms.
-	return "invalid-type"
-}
-
-func (f *file) walk(fn func(ast.Node) bool) {
-	ast.Walk(walker(fn), f.f)
-}
-
-func (f *file) render(x interface{}) string {
-	var buf bytes.Buffer
-	if err := printer.Fprint(&buf, f.fset, x); err != nil {
-		panic(err)
-	}
-	return buf.String()
-}
-
-func (f *file) debugRender(x interface{}) string {
-	var buf bytes.Buffer
-	if err := ast.Fprint(&buf, f.fset, x, nil); err != nil {
-		panic(err)
-	}
-	return buf.String()
-}
-
-// walker adapts a function to satisfy the ast.Visitor interface.
-// The function return whether the walk should proceed into the node's children.
-type walker func(ast.Node) bool
-
-func (w walker) Visit(node ast.Node) ast.Visitor {
-	if w(node) {
-		return w
-	}
-	return nil
-}
-
-func isIdent(expr ast.Expr, ident string) bool {
-	id, ok := expr.(*ast.Ident)
-	return ok && id.Name == ident
-}
-
-// isBlank returns whether id is the blank identifier "_".
-// If id == nil, the answer is false.
-func isBlank(id *ast.Ident) bool { return id != nil && id.Name == "_" }
-
-func isPkgDot(expr ast.Expr, pkg, name string) bool {
-	sel, ok := expr.(*ast.SelectorExpr)
-	return ok && isIdent(sel.X, pkg) && isIdent(sel.Sel, name)
-}
-
-func isOne(expr ast.Expr) bool {
-	lit, ok := expr.(*ast.BasicLit)
-	return ok && lit.Kind == token.INT && lit.Value == "1"
-}
-
-func isCgoExported(f *ast.FuncDecl) bool {
-	if f.Recv != nil || f.Doc == nil {
-		return false
-	}
-
-	cgoExport := regexp.MustCompile(fmt.Sprintf("(?m)^//export %s$", regexp.QuoteMeta(f.Name.Name)))
-	for _, c := range f.Doc.List {
-		if cgoExport.MatchString(c.Text) {
-			return true
-		}
-	}
-	return false
-}
-
-var basicTypeKinds = map[types.BasicKind]string{
-	types.UntypedBool:    "bool",
-	types.UntypedInt:     "int",
-	types.UntypedRune:    "rune",
-	types.UntypedFloat:   "float64",
-	types.UntypedComplex: "complex128",
-	types.UntypedString:  "string",
-}
-
-// isUntypedConst reports whether expr is an untyped constant,
-// and indicates what its default type is.
-// scope may be nil.
-func (f *file) isUntypedConst(expr ast.Expr) (defType string, ok bool) {
-	// Re-evaluate expr outside of its context to see if it's untyped.
-	// (An expr evaluated within, for example, an assignment context will get the type of the LHS.)
-	exprStr := f.render(expr)
-	tv, err := types.Eval(f.fset, f.pkg.typesPkg, expr.Pos(), exprStr)
-	if err != nil {
-		return "", false
-	}
-	if b, ok := tv.Type.(*types.Basic); ok {
-		if dt, ok := basicTypeKinds[b.Kind()]; ok {
-			return dt, true
-		}
-	}
-
-	return "", false
-}
-
-// firstLineOf renders the given node and returns its first line.
-// It will also match the indentation of another node.
-func (f *file) firstLineOf(node, match ast.Node) string {
-	line := f.render(node)
-	if i := strings.Index(line, "\n"); i >= 0 {
-		line = line[:i]
-	}
-	return f.indentOf(match) + line
-}
-
-func (f *file) indentOf(node ast.Node) string {
-	line := srcLine(f.src, f.fset.Position(node.Pos()))
-	for i, r := range line {
-		switch r {
-		case ' ', '\t':
-		default:
-			return line[:i]
-		}
-	}
-	return line // unusual or empty line
-}
-
-func (f *file) srcLineWithMatch(node ast.Node, pattern string) (m []string) {
-	line := srcLine(f.src, f.fset.Position(node.Pos()))
-	line = strings.TrimSuffix(line, "\n")
-	rx := regexp.MustCompile(pattern)
-	return rx.FindStringSubmatch(line)
-}
-
-// imports returns true if the current file imports the specified package path.
-func (f *file) imports(importPath string) bool {
-	all := astutil.Imports(f.fset, f.f)
-	for _, p := range all {
-		for _, i := range p {
-			uq, err := strconv.Unquote(i.Path.Value)
-			if err == nil && importPath == uq {
-				return true
-			}
-		}
-	}
-	return false
-}
-
-// srcLine returns the complete line at p, including the terminating newline.
-func srcLine(src []byte, p token.Position) string {
-	// Run to end of line in both directions if not at line start/end.
-	lo, hi := p.Offset, p.Offset+1
-	for lo > 0 && src[lo-1] != '\n' {
-		lo--
-	}
-	for hi < len(src) && src[hi-1] != '\n' {
-		hi++
-	}
-	return string(src[lo:hi])
-}
diff --git a/vendor/github.com/golangci/maligned/LICENSE b/vendor/github.com/golangci/maligned/LICENSE
deleted file mode 100644
index 74487567632c8f137ef3971b0f5912ca50bebcda..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/maligned/LICENSE
+++ /dev/null
@@ -1,27 +0,0 @@
-Copyright (c) 2012 The Go Authors. All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
-   * Redistributions of source code must retain the above copyright
-notice, this list of conditions and the following disclaimer.
-   * Redistributions in binary form must reproduce the above
-copyright notice, this list of conditions and the following disclaimer
-in the documentation and/or other materials provided with the
-distribution.
-   * Neither the name of Google Inc. nor the names of its
-contributors may be used to endorse or promote products derived from
-this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/golangci/maligned/README b/vendor/github.com/golangci/maligned/README
deleted file mode 100644
index 4e57f6eab2424eca850dfd9aaef9d53d4ba6f8e0..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/maligned/README
+++ /dev/null
@@ -1,7 +0,0 @@
-Install:
-
-    go get github.com/mdempsky/maligned
-
-Usage:
-
-    maligned cmd/compile/internal/gc cmd/link/internal/ld
diff --git a/vendor/github.com/golangci/maligned/maligned.go b/vendor/github.com/golangci/maligned/maligned.go
deleted file mode 100644
index c2492b2ffac0a3ee65e98acfadff21efdfdd83ff..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/maligned/maligned.go
+++ /dev/null
@@ -1,253 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package maligned
-
-import (
-	"fmt"
-	"go/ast"
-	"go/build"
-	"go/token"
-	"go/types"
-	"sort"
-	"strings"
-
-	"golang.org/x/tools/go/loader"
-)
-
-var fset = token.NewFileSet()
-
-type Issue struct {
-	OldSize, NewSize int
-	NewStructDef     string
-	Pos              token.Position
-}
-
-func Run(prog *loader.Program) []Issue {
-	flagVerbose := true
-	fset = prog.Fset
-
-	var issues []Issue
-
-	for _, pkg := range prog.InitialPackages() {
-		for _, file := range pkg.Files {
-			ast.Inspect(file, func(node ast.Node) bool {
-				if s, ok := node.(*ast.StructType); ok {
-					i := malign(node.Pos(), pkg.Types[s].Type.(*types.Struct), flagVerbose)
-					if i != nil {
-						issues = append(issues, *i)
-					}
-				}
-				return true
-			})
-		}
-	}
-
-	return issues
-}
-
-func malign(pos token.Pos, str *types.Struct, verbose bool) *Issue {
-	wordSize := int64(8)
-	maxAlign := int64(8)
-	switch build.Default.GOARCH {
-	case "386", "arm":
-		wordSize, maxAlign = 4, 4
-	case "amd64p32":
-		wordSize = 4
-	}
-
-	s := gcSizes{wordSize, maxAlign}
-	sz := s.Sizeof(str)
-	opt, fields := optimalSize(str, &s, verbose)
-	if sz == opt {
-		return nil
-	}
-
-	newStructDefParts := []string{"struct{"}
-
-	var w int
-	for _, f := range fields {
-		if n := len(f.Name()); n > w {
-			w = n
-		}
-	}
-	spaces := strings.Repeat(" ", w)
-	for _, f := range fields {
-		line := fmt.Sprintf("\t%s%s\t%s,", f.Name(), spaces[len(f.Name()):], f.Type().String())
-		newStructDefParts = append(newStructDefParts, line)
-	}
-	newStructDefParts = append(newStructDefParts, "}")
-
-	return &Issue{
-		OldSize:      int(sz),
-		NewSize:      int(opt),
-		NewStructDef: strings.Join(newStructDefParts, "\n"),
-		Pos:          fset.Position(pos),
-	}
-}
-
-func optimalSize(str *types.Struct, sizes *gcSizes, stable bool) (int64, []*types.Var) {
-	nf := str.NumFields()
-	fields := make([]*types.Var, nf)
-	alignofs := make([]int64, nf)
-	sizeofs := make([]int64, nf)
-	for i := 0; i < nf; i++ {
-		fields[i] = str.Field(i)
-		ft := fields[i].Type()
-		alignofs[i] = sizes.Alignof(ft)
-		sizeofs[i] = sizes.Sizeof(ft)
-	}
-	if stable { // Stable keeps as much of the order as possible, but slower
-		sort.Stable(&byAlignAndSize{fields, alignofs, sizeofs})
-	} else {
-		sort.Sort(&byAlignAndSize{fields, alignofs, sizeofs})
-	}
-	return sizes.Sizeof(types.NewStruct(fields, nil)), fields
-}
-
-type byAlignAndSize struct {
-	fields   []*types.Var
-	alignofs []int64
-	sizeofs  []int64
-}
-
-func (s *byAlignAndSize) Len() int { return len(s.fields) }
-func (s *byAlignAndSize) Swap(i, j int) {
-	s.fields[i], s.fields[j] = s.fields[j], s.fields[i]
-	s.alignofs[i], s.alignofs[j] = s.alignofs[j], s.alignofs[i]
-	s.sizeofs[i], s.sizeofs[j] = s.sizeofs[j], s.sizeofs[i]
-}
-
-func (s *byAlignAndSize) Less(i, j int) bool {
-	// Place zero sized objects before non-zero sized objects.
-	if s.sizeofs[i] == 0 && s.sizeofs[j] != 0 {
-		return true
-	}
-	if s.sizeofs[j] == 0 && s.sizeofs[i] != 0 {
-		return false
-	}
-
-	// Next, place more tightly aligned objects before less tightly aligned objects.
-	if s.alignofs[i] != s.alignofs[j] {
-		return s.alignofs[i] > s.alignofs[j]
-	}
-
-	// Lastly, order by size.
-	if s.sizeofs[i] != s.sizeofs[j] {
-		return s.sizeofs[i] > s.sizeofs[j]
-	}
-
-	return false
-}
-
-// Code below based on go/types.StdSizes.
-
-type gcSizes struct {
-	WordSize int64
-	MaxAlign int64
-}
-
-func (s *gcSizes) Alignof(T types.Type) int64 {
-	// NOTE: On amd64, complex64 is 8 byte aligned,
-	// even though float32 is only 4 byte aligned.
-
-	// For arrays and structs, alignment is defined in terms
-	// of alignment of the elements and fields, respectively.
-	switch t := T.Underlying().(type) {
-	case *types.Array:
-		// spec: "For a variable x of array type: unsafe.Alignof(x)
-		// is the same as unsafe.Alignof(x[0]), but at least 1."
-		return s.Alignof(t.Elem())
-	case *types.Struct:
-		// spec: "For a variable x of struct type: unsafe.Alignof(x)
-		// is the largest of the values unsafe.Alignof(x.f) for each
-		// field f of x, but at least 1."
-		max := int64(1)
-		for i, nf := 0, t.NumFields(); i < nf; i++ {
-			if a := s.Alignof(t.Field(i).Type()); a > max {
-				max = a
-			}
-		}
-		return max
-	}
-	a := s.Sizeof(T) // may be 0
-	// spec: "For a variable x of any type: unsafe.Alignof(x) is at least 1."
-	if a < 1 {
-		return 1
-	}
-	if a > s.MaxAlign {
-		return s.MaxAlign
-	}
-	return a
-}
-
-var basicSizes = [...]byte{
-	types.Bool:       1,
-	types.Int8:       1,
-	types.Int16:      2,
-	types.Int32:      4,
-	types.Int64:      8,
-	types.Uint8:      1,
-	types.Uint16:     2,
-	types.Uint32:     4,
-	types.Uint64:     8,
-	types.Float32:    4,
-	types.Float64:    8,
-	types.Complex64:  8,
-	types.Complex128: 16,
-}
-
-func (s *gcSizes) Sizeof(T types.Type) int64 {
-	switch t := T.Underlying().(type) {
-	case *types.Basic:
-		k := t.Kind()
-		if int(k) < len(basicSizes) {
-			if s := basicSizes[k]; s > 0 {
-				return int64(s)
-			}
-		}
-		if k == types.String {
-			return s.WordSize * 2
-		}
-	case *types.Array:
-		n := t.Len()
-		if n == 0 {
-			return 0
-		}
-		a := s.Alignof(t.Elem())
-		z := s.Sizeof(t.Elem())
-		return align(z, a)*(n-1) + z
-	case *types.Slice:
-		return s.WordSize * 3
-	case *types.Struct:
-		nf := t.NumFields()
-		if nf == 0 {
-			return 0
-		}
-
-		var o int64
-		max := int64(1)
-		for i := 0; i < nf; i++ {
-			ft := t.Field(i).Type()
-			a, sz := s.Alignof(ft), s.Sizeof(ft)
-			if a > max {
-				max = a
-			}
-			if i == nf-1 && sz == 0 && o != 0 {
-				sz = 1
-			}
-			o = align(o, a) + sz
-		}
-		return align(o, max)
-	case *types.Interface:
-		return s.WordSize * 2
-	}
-	return s.WordSize // catch-all
-}
-
-// align returns the smallest y >= x such that y % a == 0.
-func align(x, a int64) int64 {
-	y := x + a - 1
-	return y - y%a
-}
diff --git a/vendor/github.com/golangci/misspell/.golangci.yml b/vendor/github.com/golangci/misspell/.golangci.yml
index 31c566eab3e4215b08f79bc0a1bbdd6391c4d87a..2cfed442f59b75c6cc66f5d0caa927cde709fe54 100644
--- a/vendor/github.com/golangci/misspell/.golangci.yml
+++ b/vendor/github.com/golangci/misspell/.golangci.yml
@@ -1,13 +1,11 @@
 run:
   timeout: 2m
-  skip-files: []
 
 linters-settings:
   govet:
     enable-all: true
     disable:
       - fieldalignment
-      - shadow # FIXME(ldez) must be fixed
   gocyclo:
     min-complexity: 16
   goconst:
@@ -97,7 +95,7 @@ linters:
 
 issues:
   exclude-use-default: false
-  max-per-linter: 0
+  max-issues-per-linter: 0
   max-same-issues: 0
   exclude:
     - 'ST1000: at least one file in a package should have a package comment'
diff --git a/vendor/github.com/golangci/misspell/Dockerfile b/vendor/github.com/golangci/misspell/Dockerfile
index 788ce3a77568f7d17a7dcccc1411b9effa459fe1..c85cd6875edcb78fca13fc176c004a431df61f25 100644
--- a/vendor/github.com/golangci/misspell/Dockerfile
+++ b/vendor/github.com/golangci/misspell/Dockerfile
@@ -1,4 +1,4 @@
-FROM golang:1.19-alpine
+FROM golang:1.22-alpine
 
 # cache buster
 RUN echo 4
diff --git a/vendor/github.com/golangci/misspell/Makefile b/vendor/github.com/golangci/misspell/Makefile
index 783f977cb423a4666396357eec67d2990a0f0c95..fcda870ce097abd80e42485ef3a1b3afd28a1249 100644
--- a/vendor/github.com/golangci/misspell/Makefile
+++ b/vendor/github.com/golangci/misspell/Makefile
@@ -9,7 +9,7 @@ build:  ## build misspell
 	go build ./cmd/misspell
 
 test:  ## run all tests
-	go test -v .
+	CGO_ENABLED=1 go test -v -race .
 
 lint:  ## run linter
 	golangci-lint run
diff --git a/vendor/github.com/golangci/misspell/README.md b/vendor/github.com/golangci/misspell/README.md
index cccd04996f6f29201758a75ad88b9f227b3cd6a7..514727fad0e3ec6568a6d266fb10ea6de6a904d2 100644
--- a/vendor/github.com/golangci/misspell/README.md
+++ b/vendor/github.com/golangci/misspell/README.md
@@ -1,39 +1,37 @@
-[![Build Status](https://travis-ci.org/client9/misspell.svg?branch=master)](https://travis-ci.org/client9/misspell) [![Go Report Card](https://goreportcard.com/badge/github.com/client9/misspell)](https://goreportcard.com/report/github.com/client9/misspell) [![GoDoc](https://godoc.org/github.com/client9/misspell?status.svg)](https://godoc.org/github.com/client9/misspell) [![Coverage](http://gocover.io/_badge/github.com/client9/misspell)](http://gocover.io/github.com/client9/misspell) [![license](https://img.shields.io/badge/license-MIT-blue.svg?style=flat)](https://raw.githubusercontent.com/client9/misspell/master/LICENSE)
+[![Main](https://github.com/golangci/misspell/actions/workflows/ci.yml/badge.svg)](https://github.com/golangci/misspell/actions/workflows/ci.yml)
+[![Go Report Card](https://goreportcard.com/badge/github.com/golangci/misspell)](https://goreportcard.com/report/github.com/golangci/misspell)
+[![Go Reference](https://pkg.go.dev/badge/github.com/golangci/misspell.svg)](https://pkg.go.dev/github.com/golangci/misspell)
+[![license](https://img.shields.io/badge/license-MIT-blue.svg?style=flat)](https://raw.golangci.com/golangci/misspell/master/LICENSE)
 
 Correct commonly misspelled English words... quickly.
 
 ### Install
 
-
 If you just want a binary and to start using `misspell`:
 
+```bash
+curl -sfL https://raw.githubusercontent.com/golangci/misspell/master/install-misspell.sh | sh -s -- -b ./bin ${MISSPELL_VERSION}
 ```
-curl -L -o ./install-misspell.sh https://git.io/misspell
-sh ./install-misspell.sh
-```
-
-
-Both will install as `./bin/misspell`.  You can adjust the download location using the `-b` flag.   File a ticket if you want another platform supported.
 
+Both will install as `./bin/misspell`.  
+You can adjust the download location using the `-b` flag.  
+File a ticket if you want another platform supported.
 
-If you use [Go](https://golang.org/), the best way to run `misspell` is by using [gometalinter](#gometalinter).  Otherwise, install `misspell` the old-fashioned way:
+If you use [Go](https://golang.org/), the best way to run `misspell` is by using [golangci-lint](https://github.com/golangci/golangci-lint).  
+Otherwise, install `misspell` the old-fashioned way:
 
+```bash
+go install github.com/golangci/misspell/cmd/misspell@latest
 ```
-go install github.com/client9/misspell/cmd/misspell@latest
-```
-
-and misspell will be in your `GOPATH`
-
 
-Also if you like to live dangerously, one could do
+Also, if you like to live dangerously, one could do
 
 ```bash
-curl -L https://git.io/misspell | bash
+curl -sfL https://raw.githubusercontent.com/golangci/misspell/master/install-misspell.sh | sh -s -- -b $(go env GOPATH)/bin ${MISSPELL_VERSION}
 ```
 
 ### Usage
 
-
 ```bash
 $ misspell all.html your.txt important.md files.go
 your.txt:42:10 found "langauge" a misspelling of "language"
@@ -41,29 +39,32 @@ your.txt:42:10 found "langauge" a misspelling of "language"
 # ^ file, line, column
 ```
 
-```
+```console
 $ misspell -help
 Usage of misspell:
   -debug
-    	Debug matching, very slow
+        Debug matching, very slow
+  -dict string
+        User defined corrections file path (.csv). CSV format: typo,fix
   -error
-    	Exit with 2 if misspelling found
+        Exit with 2 if misspelling found
   -f string
-    	'csv', 'sqlite3' or custom Golang template for output
+        'csv', 'sqlite3' or custom Golang template for output
   -i string
-    	ignore the following corrections, comma separated
+        ignore the following corrections, comma-separated
   -j int
-    	Number of workers, 0 = number of CPUs
+        Number of workers, 0 = number of CPUs
   -legal
-    	Show legal information and exit
+        Show legal information and exit
   -locale string
-    	Correct spellings using locale perferances for US or UK.  Default is to use a neutral variety of English.  Setting locale to US will correct the British spelling of 'colour' to 'color'
+        Correct spellings using locale preferences for US or UK.  Default is to use a neutral variety of English.  Setting locale to US will correct the British spelling of 'colour' to 'color'
   -o string
-    	output file or [stderr|stdout|] (default "stdout")
-  -q	Do not emit misspelling output
+        output file or [stderr|stdout|] (default "stdout")
+  -q    Do not emit misspelling output
   -source string
-    	Source mode: auto=guess, go=golang source, text=plain or markdown-like text (default "auto")
-  -w	Overwrite file with corrections (default is just to display)
+        Source mode: text (default), go (comments only) (default "text")
+  -v    Show version and exit
+  -w    Overwrite file with corrections (default is just to display)
 ```
 
 ## FAQ
@@ -72,7 +73,6 @@ Usage of misspell:
 * [Converting UK spellings to US](#locale)
 * [Using pipes and stdin](#stdin)
 * [Golang special support](#golang)
-* [gometalinter support](#gometalinter)
 * [CSV Output](#csv)
 * [Using SQLite3](#sqlite)
 * [Changing output format](#output)
@@ -92,7 +92,7 @@ Usage of misspell:
 
 Just add the `-w` flag!
 
-```
+```console
 $ misspell -w all.html your.txt important.md files.go
 your.txt:9:21:corrected "langauge" to "language"
 
@@ -104,20 +104,19 @@ your.txt:9:21:corrected "langauge" to "language"
 
 Add the `-locale US` flag!
 
-```bash
+```console
 $ misspell -locale US important.txt
 important.txt:10:20 found "colour" a misspelling of "color"
 ```
 
 Add the `-locale UK` flag!
 
-```bash
+```console
 $ echo "My favorite color is blue" | misspell -locale UK
 stdin:1:3:found "favorite color" a misspelling of "favourite colour"
 ```
 
-Help is appreciated as I'm neither British nor an
-expert in the English language.
+Help is appreciated as I'm neither British nor an expert in the English language.
 
 <a name="recursive"></a>
 ### How do you check an entire folder recursively?
@@ -141,7 +140,8 @@ or
 find . -type f | xargs misspell
 ```
 
-You can select a type of file as well.  The following examples selects all `.txt` files that are *not* in the `vendor` directory:
+You can select a type of file as well.  
+The following examples selects all `.txt` files that are *not* in the `vendor` directory:
 
 ```bash
 find . -type f -name '*.txt' | grep -v vendor/ | xargs misspell -error
@@ -154,14 +154,14 @@ Yes!
 
 Print messages to `stderr` only:
 
-```bash
+```console
 $ echo "zeebra" | misspell
 stdin:1:0:found "zeebra" a misspelling of "zebra"
 ```
 
 Print messages to `stderr`, and corrected text to `stdout`:
 
-```bash
+```console
 $ echo "zeebra" | misspell -w
 stdin:1:0:corrected "zeebra" to "zebra"
 zebra
@@ -169,7 +169,7 @@ zebra
 
 Only print the corrected text to `stdout`:
 
-```bash
+```console
 $ echo "zeebra" | misspell -w -q
 zebra
 ```
@@ -177,55 +177,23 @@ zebra
 <a name="golang"></a>
 ### Are there special rules for golang source files?
 
-Yes!  If the file ends in `.go`, then misspell will only check spelling in
-comments.
-
-If you want to force a file to be checked as a golang source, use `-source=go`
-on the command line.  Conversely, you can check a golang source as if it were
-pure text by using `-source=text`.  You might want to do this since many
-variable names have misspellings in them!
+yes, if you want to force a file to be checked as a golang source, use `-source=go` on the command line.  
+Conversely, you can check a golang source as if it were pure text by using `-source=text`.  
+You might want to do this since many variable names have misspellings in them!
 
-### Can I check only-comments in other other programming languages?
+### Can I check only-comments in other programming languages?
 
-I'm told the using `-source=go` works well for ruby, javascript, java, c and
-c++.
-
-It doesn't work well for python and bash.
-
-<a name="gometalinter"></a>
-### Does this work with gometalinter?
-
-[gometalinter](https://github.com/alecthomas/gometalinter) runs
-multiple golang linters.  Starting on [2016-06-12](https://github.com/alecthomas/gometalinter/pull/134)
-gometalinter supports `misspell` natively but it is disabled by default.
-
-```bash
-# update your copy of gometalinter
-go get -u github.com/alecthomas/gometalinter
-
-# install updates and misspell
-gometalinter --install --update
-```
-
-To use, just enable `misspell`
-
-```
-gometalinter --enable misspell ./...
-```
-
-Note that gometalinter only checks golang files, and uses the default options
-of `misspell`
-
-You may wish to run this on your plaintext (.txt) and/or markdown files too.
+I'm told the using `-source=go` works well for Ruby, Javascript, Java, C and C++.
 
+It doesn't work well for Python and Bash.
 
 <a name="csv"></a>
 ### How Can I Get CSV Output?
 
 Using `-f csv`, the output is standard comma-seprated values with headers in the first row.
 
-```
-misspell -f csv *
+```console
+$ misspell -f csv *
 file,line,column,typo,corrected
 "README.md",9,22,langauge,language
 "README.md",47,25,langauge,language
@@ -236,7 +204,7 @@ file,line,column,typo,corrected
 
 Using `-f sqlite`, the output is a [sqlite3](https://www.sqlite.org/index.html) dump-file.
 
-```bash
+```console
 $ misspell -f sqlite * > /tmp/misspell.sql
 $ cat /tmp/misspell.sql
 
@@ -254,7 +222,7 @@ INSERT INTO misspell VALUES("install.txt",202,31,"immediatly","immediately");
 COMMIT;
 ```
 
-```bash
+```console
 $ sqlite3 -init /tmp/misspell.sql :memory: 'select count(*) from misspell'
 1
 ```
@@ -271,20 +239,22 @@ misspell -f sqlite * | sqlite3 -init /dev/stdin -column -cmd '.width 60 15' ':me
 
 Using the `-i "comma,separated,rules"` flag you can specify corrections to ignore.
 
-For example, if you were to run `misspell -w -error -source=text` against document that contains the string `Guy Finkelshteyn Braswell`, misspell would change the text to `Guy Finkelstheyn Bras well`.  You can then
-determine the rules to ignore by reverting the change and running the with the `-debug` flag.  You can then see
-that the corrections were `htey -> they` and `aswell -> as well`. To ignore these two rules, you add `-i "htey,aswell"` to
-your command. With debug mode on, you can see it print the corrections, but it will no longer make them.
+For example, if you were to run `misspell -w -error -source=text` against document that contains the string `Guy Finkelshteyn Braswell`, 
+misspell would change the text to `Guy Finkelstheyn Bras well`.  
+You can then determine the rules to ignore by reverting the change and running the with the `-debug` flag.  
+You can then see that the corrections were `htey -> they` and `aswell -> as well`. 
+To ignore these two rules, you add `-i "htey,aswell"` to your command.
+With debug mode on, you can see it print the corrections, but it will no longer make them.
 
 <a name="output"></a>
 ### How can I change the output format?
 
-Using the `-f template` flag you can pass in a
-[golang text template](https://golang.org/pkg/text/template/) to format the output.
+Using the `-f template` flag you can pass in a [golang text template](https://golang.org/pkg/text/template/) to format the output.
 
 One can use `printf "%q" VALUE` to safely quote a value.
 
-The default template is compatible with [gometalinter](https://github.com/alecthomas/gometalinter)
+The default template:
+
 ```
 {{ .Filename }}:{{ .Line }}:{{ .Column }}:corrected {{ printf "%q" .Original }} to "{{ printf "%q" .Corrected }}"
 ```
@@ -298,14 +268,12 @@ To just print probable misspellings:
 <a name="problem"></a>
 ### What problem does this solve?
 
-This corrects commonly misspelled English words in computer source
-code, and other text-based formats (`.txt`, `.md`, etc).
+This corrects commonly misspelled English words in computer source code, and other text-based formats (`.txt`, `.md`, etc.).
 
-It is designed to run quickly so it can be
-used as a [pre-commit hook](https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks)
-with minimal burden on the developer.
+It is designed to run quickly,
+so it can be used as a [pre-commit hook](https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks) with minimal burden on the developer.
 
-It does not work with binary formats (e.g. Word, etc).
+It does not work with binary formats (e.g. Word, etc.).
 
 It is not a complete spell-checking program nor a grammar checker.
 
@@ -322,78 +290,71 @@ They all work but had problems that prevented me from using them at scale:
 
 * slow, all of the above check one misspelling at a time (i.e. linear) using regexps
 * not MIT/Apache2 licensed (or equivalent)
-* have dependencies that don't work for me (python3, bash, linux sed, etc)
+* have dependencies that don't work for me (python3, bash, linux sed, etc.)
 * don't understand American vs. British English and sometimes makes unwelcome "corrections"
 
-That said, they might be perfect for you and many have more features
-than this project!
+That said, they might be perfect for you and many have more features than this project!
 
 <a name="performance"></a>
 ### How fast is it?
 
-Misspell is easily 100x to 1000x faster than other spelling correctors.  You
-should be able to check and correct 1000 files in under 250ms.
+Misspell is easily 100x to 1000x faster than other spelling correctors.  
+You should be able to check and correct 1000 files in under 250ms.
 
-This uses the mighty power of golang's
-[strings.Replacer](https://golang.org/pkg/strings/#Replacer) which is
-a implementation or variation of the
-[Aho–Corasick algorithm](https://en.wikipedia.org/wiki/Aho–Corasick_algorithm).
+This uses the mighty power of golang's [strings.Replacer](https://golang.org/pkg/strings/#Replacer)
+which is an implementation or variation of the [Aho–Corasick algorithm](https://en.wikipedia.org/wiki/Aho–Corasick_algorithm).
 This makes multiple substring matches *simultaneously*.
 
-In addition this uses multiple CPU cores to work on multiple files.
+It also uses multiple CPU cores to work on multiple files concurrently.
 
 <a name="issues"></a>
 ### What problems does it have?
 
-Unlike the other projects, this doesn't know what a "word" is.  There may be
-more false positives and false negatives due to this.  On the other hand, it
-sometimes catches things others don't.
+Unlike the other projects, this doesn't know what a "word" is.  
+There may be more false positives and false negatives due to this.  
+On the other hand, it sometimes catches things others don't.
 
 Either way, please file bugs and we'll fix them!
 
-Since it operates in parallel to make corrections, it can be non-obvious to
-determine exactly what word was corrected.
+Since it operates in parallel to make corrections,
+it can be non-obvious to determine exactly what word was corrected.
 
 <a name="debug"></a>
 ### It's making mistakes.  How can I debug?
 
-Run using `-debug` flag on the file you want.  It should then print what word
-it is trying to correct.  Then [file a
-bug](https://github.com/client9/misspell/issues) describing the problem.
+Run using `-debug` flag on the file you want.  
+It should then print what word it is trying to correct.  
+Then [file a bug](https://github.com/golangci/misspell/issues) describing the problem.
 Thanks!
 
 <a name="missing"></a>
 ### Why is it making mistakes or missing items in golang files?
 
-The matching function is *case-sensitive*, so variable names that are multiple
-worlds either in all-upper or all-lower case sometimes can cause false
-positives.  For instance a variable named `bodyreader` could trigger a false
-positive since `yrea` is in the middle that could be corrected to `year`.
-Other problems happen if the variable name uses a English contraction that
-should use an apostrophe.  The best way of fixing this is to use the
-[Effective Go naming
-conventions](https://golang.org/doc/effective_go.html#mixed-caps) and use
-[camelCase](https://en.wikipedia.org/wiki/CamelCase) for variable names.  You
-can check your code using [golint](https://github.com/golang/lint)
+The matching function is *case-sensitive*,
+so variable names that are multiple worlds either in all-uppercase or all-lowercase case sometimes can cause false positives.  
+For instance a variable named `bodyreader` could trigger a false positive since `yrea` is in the middle that could be corrected to `year`.
+Other problems happen if the variable name uses an English contraction that should use an apostrophe.  
+The best way of fixing this is to use the [Effective Go naming conventions](https://golang.org/doc/effective_go.html#mixed-caps)
+and use [camelCase](https://en.wikipedia.org/wiki/CamelCase) for variable names.  
+You can check your code using [golint](https://github.com/golang/lint)
 
 <a name="license"></a>
 ### What license is this?
 
-The main code is [MIT](https://github.com/client9/misspell/blob/master/LICENSE).
+The main code is [MIT](https://github.com/golangci/misspell/blob/master/LICENSE).
 
 Misspell also makes uses of the Golang standard library and contains a modified version of Golang's [strings.Replacer](https://golang.org/pkg/strings/#Replacer)
-which are covered under a [BSD License](https://github.com/golang/go/blob/master/LICENSE).  Type `misspell -legal` for more details or see [legal.go](https://github.com/client9/misspell/blob/master/legal.go)
+which is covered under a [BSD License](https://github.com/golang/go/blob/master/LICENSE).  
+Type `misspell -legal` for more details or see [legal.go](https://github.com/golangci/misspell/blob/master/legal.go)
 
 <a name="words"></a>
 ### Where do the word lists come from?
 
 It started with a word list from
 [Wikipedia](https://en.wikipedia.org/wiki/Wikipedia:Lists_of_common_misspellings/For_machines).
-Unfortunately, this list had to be highly edited as many of the words are
-obsolete or based from mistakes on mechanical typewriters (I'm guessing).
+Unfortunately, this list had to be highly edited as many of the words are obsolete or based on mistakes on mechanical typewriters (I'm guessing).
 
-Additional words were added based on actually mistakes seen in
-the wild (meaning self-generated).
+Additional words were added based on actually mistakes seen in the wild (meaning self-generated).
 
 Variations of UK and US spellings are based on many sources including:
 
@@ -401,24 +362,23 @@ Variations of UK and US spellings are based on many sources including:
 * http://www.oxforddictionaries.com/us/words/american-and-british-spelling-american (excellent site but incomplete)
 * Diffing US and UK [scowl dictionaries](http://wordlist.aspell.net)
 
-American English is more accepting of spelling variations than is British
-English, so "what is American or not" is subject to opinion.  Corrections and help welcome.
+American English is more accepting of spelling variations than is British English,
+so "what is American or not" is subject to opinion.
+Corrections and help welcome.
 
 <a name="otherideas"></a>
 ### What are some other enhancements that could be done?
 
-Here's some ideas for enhancements:
+Here are some ideas for enhancements:
 
 *Capitalization of proper nouns* could be done (e.g. weekday and month names, country names, language names)
 
-*Opinionated US spellings*   US English has a number of words with alternate
-spellings.  Think [adviser vs.
-advisor](http://grammarist.com/spelling/adviser-advisor/).  While "advisor" is not wrong, the opinionated US
-locale would correct "advisor" to "adviser".
+*Opinionated US spellings*   US English has a number of words with alternate spellings.  
+Think [adviser vs. advisor](http://grammarist.com/spelling/adviser-advisor/).  
+While "advisor" is not wrong, the opinionated US  locale would correct "advisor" to "adviser".
 
 *Versioning*  Some type of versioning is needed so reporting mistakes and errors is easier.
 
-*Feedback*  Mistakes would be sent to some server for agregation and feedback review.
+*Feedback*  Mistakes would be sent to some server for aggregation and feedback review.
 
-*Contractions and Apostrophes* This would optionally correct "isnt" to
-"isn't", etc.
+*Contractions and Apostrophes* This would optionally correct "isnt" to "isn't", etc.
diff --git a/vendor/github.com/golangci/misspell/goreleaser.yml b/vendor/github.com/golangci/misspell/goreleaser.yml
index 97aa83e5ac5e98aae04257df43f4e8f460acdd63..99781e27a31f3c3fa5abf2ac4a4aae2ca7eb2f17 100644
--- a/vendor/github.com/golangci/misspell/goreleaser.yml
+++ b/vendor/github.com/golangci/misspell/goreleaser.yml
@@ -16,10 +16,6 @@ builds:
 
 archives:
    - name_template: "{{ .Binary }}_{{ .Version }}_{{ .Os }}_{{ .Arch }}"
-     replacements:
-       amd64: 64bit
-       386: 32bit
-       darwin: mac
      files:
        - LICENSE
 
diff --git a/vendor/github.com/golangci/misspell/mime.go b/vendor/github.com/golangci/misspell/mime.go
index 76a96cfd1b69beb783bdeeb70ff14951a365f958..19d49e08551f16274a6a87db39d106978b727580 100644
--- a/vendor/github.com/golangci/misspell/mime.go
+++ b/vendor/github.com/golangci/misspell/mime.go
@@ -7,6 +7,7 @@ import (
 	"net/http"
 	"os"
 	"path/filepath"
+	"slices"
 	"strings"
 )
 
@@ -77,13 +78,12 @@ func isSCMPath(s string) bool {
 	if strings.Contains(filepath.Base(s), "EDITMSG") {
 		return false
 	}
+
 	parts := strings.Split(filepath.Clean(s), string(filepath.Separator))
-	for _, dir := range parts {
-		if scm[dir] {
-			return true
-		}
-	}
-	return false
+
+	return slices.ContainsFunc(parts, func(dir string) bool {
+		return scm[dir]
+	})
 }
 
 var magicHeaders = [][]byte{
@@ -174,7 +174,8 @@ func ReadTextFile(filename string) (string, error) {
 	// if not-text, then exit
 	isText := false
 	if fstat.Size() > 50000 {
-		fin, err := os.Open(filename)
+		var fin *os.File
+		fin, err = os.Open(filename)
 		if err != nil {
 			return "", fmt.Errorf("unable to open large file %q: %w", filename, err)
 		}
diff --git a/vendor/github.com/golangci/misspell/notwords.go b/vendor/github.com/golangci/misspell/notwords.go
index a250cf7f6e812d72f61dcf2effec974bbca00623..f694f46dc05ce7200e550ab1e8a4a79ee696480b 100644
--- a/vendor/github.com/golangci/misspell/notwords.go
+++ b/vendor/github.com/golangci/misspell/notwords.go
@@ -4,12 +4,17 @@ import (
 	"bytes"
 	"regexp"
 	"strings"
+	"unicode"
 )
 
 var (
-	reEmail     = regexp.MustCompile(`[a-zA-Z0-9_.%+-]+@[a-zA-Z0-9-.]+\.[a-zA-Z]{2,6}[^a-zA-Z]`)
-	reHost      = regexp.MustCompile(`[a-zA-Z0-9-.]+\.[a-zA-Z]+`)
-	reBackslash = regexp.MustCompile(`\\[a-z]`)
+	reEmail     = regexp.MustCompile(`[[:alnum:]_.%+-]+@[[:alnum:]-.]+\.[[:alpha:]]{2,6}[^[:alpha:]]`)
+	reBackslash = regexp.MustCompile(`\\[[:lower:]]`)
+
+	// reHost Host name regular expression.
+	// The length of any one label is limited between 1 and 63 octets. (https://www.ietf.org/rfc/rfc2181.txt)
+	// A TLD has at least 2 letters.
+	reHost = regexp.MustCompile(`([[:alnum:]-]+\.)+[[:alpha:]]{2,63}`)
 )
 
 // RemovePath attempts to strip away embedded file system paths, e.g.
@@ -20,7 +25,7 @@ var (
 func RemovePath(s string) string {
 	out := bytes.Buffer{}
 	var idx int
-	for len(s) > 0 {
+	for s != "" {
 		if idx = strings.IndexByte(s, '/'); idx == -1 {
 			out.WriteString(s)
 			break
@@ -62,6 +67,18 @@ func replaceWithBlanks(s string) string {
 	return strings.Repeat(" ", len(s))
 }
 
+// replaceHost same as replaceWithBlanks but if the string contains at least one uppercase letter returns the string.
+// Domain names are case-insensitive but browsers and DNS convert uppercase to lower case. (https://www.ietf.org/rfc/rfc4343.txt)
+func replaceHost(s string) string {
+	for _, r := range s {
+		if unicode.IsUpper(r) {
+			return s
+		}
+	}
+
+	return replaceWithBlanks(s)
+}
+
 // RemoveEmail remove email-like strings, e.g. "nickg+junk@xfoobar.com", "nickg@xyz.abc123.biz".
 func RemoveEmail(s string) string {
 	return reEmail.ReplaceAllStringFunc(s, replaceWithBlanks)
@@ -69,7 +86,7 @@ func RemoveEmail(s string) string {
 
 // RemoveHost removes host-like strings "foobar.com" "abc123.fo1231.biz".
 func RemoveHost(s string) string {
-	return reHost.ReplaceAllStringFunc(s, replaceWithBlanks)
+	return reHost.ReplaceAllStringFunc(s, replaceHost)
 }
 
 // RemoveBackslashEscapes removes characters that are preceded by a backslash.
diff --git a/vendor/github.com/golangci/misspell/replace.go b/vendor/github.com/golangci/misspell/replace.go
index bcfcf8deb5e7ac92a341232501d2c98d6d7a2d2c..b51dfa83bfe2bece1afcbc0c64e821ad474271ab 100644
--- a/vendor/github.com/golangci/misspell/replace.go
+++ b/vendor/github.com/golangci/misspell/replace.go
@@ -5,6 +5,7 @@ import (
 	"bytes"
 	"io"
 	"regexp"
+	"slices"
 	"strings"
 	"text/scanner"
 )
@@ -17,12 +18,9 @@ func max(x, y int) int {
 }
 
 func inArray(haystack []string, needle string) bool {
-	for _, word := range haystack {
-		if strings.EqualFold(needle, word) {
-			return true
-		}
-	}
-	return false
+	return slices.ContainsFunc(haystack, func(word string) bool {
+		return strings.EqualFold(needle, word)
+	})
 }
 
 var wordRegexp = regexp.MustCompile(`[a-zA-Z0-9']+`)
@@ -192,7 +190,7 @@ Loop:
 	return buf.String(), diffs
 }
 
-// Replace is corrects misspellings in input, returning corrected version along with a list of diffs.
+// Replace is correcting misspellings in input, returning corrected version along with a list of diffs.
 func (r *Replacer) Replace(input string) (string, []Diff) {
 	output := r.engine.Replace(input)
 	if input == output {
diff --git a/vendor/github.com/golangci/misspell/stringreplacer.go b/vendor/github.com/golangci/misspell/stringreplacer.go
index 73ca9a56acdfbd65865f2391237f1e5a78a3b903..46cb6c4b66ccab032110870c5fb221ab6ae6462c 100644
--- a/vendor/github.com/golangci/misspell/stringreplacer.go
+++ b/vendor/github.com/golangci/misspell/stringreplacer.go
@@ -102,7 +102,6 @@ func (t *trieNode) add(key, val string, priority int, r *genericReplacer) {
 		return
 	}
 
-	//nolint:nestif // TODO(ldez) must be fixed.
 	if t.prefix != "" {
 		// Need to split the prefix among multiple nodes.
 		var n int // length of the longest common prefix
@@ -111,9 +110,10 @@ func (t *trieNode) add(key, val string, priority int, r *genericReplacer) {
 				break
 			}
 		}
-		if n == len(t.prefix) {
+		switch n {
+		case len(t.prefix):
 			t.next.add(key[n:], val, priority, r)
-		} else if n == 0 {
+		case 0:
 			// First byte differs, start a new lookup table here. Looking up
 			// what is currently t.prefix[0] will lead to prefixNode, and
 			// looking up key[0] will lead to keyNode.
@@ -133,7 +133,7 @@ func (t *trieNode) add(key, val string, priority int, r *genericReplacer) {
 			t.prefix = ""
 			t.next = nil
 			keyNode.add(key[1:], val, priority, r)
-		} else {
+		default:
 			// Insert new node after the common section of the prefix.
 			next := &trieNode{
 				prefix: t.prefix[n:],
@@ -143,18 +143,22 @@ func (t *trieNode) add(key, val string, priority int, r *genericReplacer) {
 			t.next = next
 			next.add(key[n:], val, priority, r)
 		}
-	} else if t.table != nil {
+		return
+	}
+
+	if t.table != nil {
 		// Insert into existing table.
 		m := r.mapping[key[0]]
 		if t.table[m] == nil {
 			t.table[m] = new(trieNode)
 		}
 		t.table[m].add(key[1:], val, priority, r)
-	} else {
-		t.prefix = key
-		t.next = new(trieNode)
-		t.next.add("", val, priority, r)
+		return
 	}
+
+	t.prefix = key
+	t.next = new(trieNode)
+	t.next.add("", val, priority, r)
 }
 
 // genericReplacer is the fully generic algorithm.
@@ -242,7 +246,6 @@ func (r *genericReplacer) Replace(s string) string {
 	return string(buf)
 }
 
-//nolint:gocognit // TODO(ldez) must be fixed.
 func (r *genericReplacer) WriteString(w io.Writer, s string) (n int, err error) {
 	sw := getStringWriter(w)
 	var last, wn int
diff --git a/vendor/github.com/golangci/misspell/url.go b/vendor/github.com/golangci/misspell/url.go
index 203b91a79ecf35aae791e5b018dcc18e50157afd..a91d1d967db0d357f56b6c8e4a73faaa10f822ac 100644
--- a/vendor/github.com/golangci/misspell/url.go
+++ b/vendor/github.com/golangci/misspell/url.go
@@ -10,7 +10,7 @@ import (
 // @(https?|ftp)://(-\.)?([^\s/?\.#-]+\.?)+(/[^\s]*)?$@iS.
 var reURL = regexp.MustCompile(`(?i)(https?|ftp)://(-\.)?([^\s/?.#]+\.?)+(/\S*)?`)
 
-// StripURL attemps to replace URLs with blank spaces, e.g.
+// StripURL attempts to replace URLs with blank spaces, e.g.
 //
 //	"xxx http://foo.com/ yyy -> "xxx          yyyy".
 func StripURL(s string) string {
diff --git a/vendor/github.com/golangci/modinfo/.gitignore b/vendor/github.com/golangci/modinfo/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..9f11b755a17d8192c60f61cb17b8902dffbd9f23
--- /dev/null
+++ b/vendor/github.com/golangci/modinfo/.gitignore
@@ -0,0 +1 @@
+.idea/
diff --git a/vendor/github.com/golangci/modinfo/.golangci.yml b/vendor/github.com/golangci/modinfo/.golangci.yml
new file mode 100644
index 0000000000000000000000000000000000000000..9698182f2a5b67fb7128c7ec3e8d27259066dfff
--- /dev/null
+++ b/vendor/github.com/golangci/modinfo/.golangci.yml
@@ -0,0 +1,157 @@
+run:
+  timeout: 7m
+
+linters-settings:
+  govet:
+    enable:
+      - shadow
+  gocyclo:
+    min-complexity: 12
+  goconst:
+    min-len: 3
+    min-occurrences: 3
+  funlen:
+    lines: -1
+    statements: 50
+  misspell:
+    locale: US
+  depguard:
+    rules:
+      main:
+        deny:
+          - pkg: "github.com/instana/testify"
+            desc: not allowed
+          - pkg: "github.com/pkg/errors"
+            desc: Should be replaced by standard lib errors package
+  tagalign:
+    align: false
+    order:
+      - xml
+      - json
+      - yaml
+      - yml
+      - toml
+      - mapstructure
+      - url
+  godox:
+    keywords:
+      - FIXME
+  gocritic:
+    enabled-tags:
+      - diagnostic
+      - style
+      - performance
+    disabled-checks:
+      - paramTypeCombine # already handle by gofumpt.extra-rules
+      - whyNoLint # already handle by nonolint
+      - unnamedResult
+      - hugeParam
+      - sloppyReassign
+      - rangeValCopy
+      - octalLiteral
+      - ptrToRefParam
+      - appendAssign
+      - ruleguard
+      - httpNoBody
+      - exposedSyncMutex
+  revive:
+    rules:
+      - name: struct-tag
+      - name: blank-imports
+      - name: context-as-argument
+      - name: context-keys-type
+      - name: dot-imports
+      - name: error-return
+      - name: error-strings
+      - name: error-naming
+      - name: exported
+        disabled: true
+      - name: if-return
+      - name: increment-decrement
+      - name: var-naming
+      - name: var-declaration
+      - name: package-comments
+        disabled: true
+      - name: range
+      - name: receiver-naming
+      - name: time-naming
+      - name: unexported-return
+      - name: indent-error-flow
+      - name: errorf
+      - name: empty-block
+      - name: superfluous-else
+      - name: unused-parameter
+        disabled: true
+      - name: unreachable-code
+      - name: redefines-builtin-id
+
+  tagliatelle:
+    case:
+      rules:
+        json: pascal
+        yaml: camel
+        xml: camel
+        header: header
+        mapstructure: camel
+        env: upperSnake
+        envconfig: upperSnake
+
+linters:
+  enable-all: true
+  disable:
+    - deadcode # deprecated
+    - exhaustivestruct # deprecated
+    - golint # deprecated
+    - ifshort # deprecated
+    - interfacer # deprecated
+    - maligned # deprecated
+    - nosnakecase # deprecated
+    - scopelint # deprecated
+    - structcheck # deprecated
+    - varcheck # deprecated
+    - cyclop # duplicate of gocyclo
+    - sqlclosecheck # not relevant (SQL)
+    - rowserrcheck # not relevant (SQL)
+    - execinquery # not relevant (SQL)
+    - lll
+    - gosec
+    - dupl # not relevant
+    - prealloc # too many false-positive
+    - bodyclose # too many false-positive
+    - gomnd
+    - testpackage # not relevant
+    - tparallel # not relevant
+    - paralleltest # not relevant
+    - nestif # too many false-positive
+    - wrapcheck
+    - goerr113 # not relevant
+    - nlreturn # not relevant
+    - wsl # not relevant
+    - exhaustive # not relevant
+    - exhaustruct # not relevant
+    - makezero # not relevant
+    - forbidigo
+    - varnamelen # not relevant
+    - nilnil # not relevant
+    - ireturn # not relevant
+    - contextcheck # too many false-positive
+    - tenv # we already have a test "framework" to handle env vars
+    - noctx
+    - errchkjson
+    - nonamedreturns
+    - gosmopolitan  # not relevant
+    - gochecknoglobals
+
+issues:
+  exclude-use-default: false
+  max-issues-per-linter: 0
+  max-same-issues: 0
+  exclude:
+    - 'Error return value of .((os\.)?std(out|err)\..*|.*Close|.*Flush|os\.Remove(All)?|.*printf?|os\.(Un)?Setenv). is not checked'
+    - 'ST1000: at least one file in a package should have a package comment'
+  exclude-rules:
+    - path: (.+)_test.go
+      linters:
+        - funlen
+        - goconst
+        - maintidx
diff --git a/vendor/github.com/golangci/modinfo/LICENSE b/vendor/github.com/golangci/modinfo/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..f288702d2fa16d3cdf0035b15a9fcbc552cd88e7
--- /dev/null
+++ b/vendor/github.com/golangci/modinfo/LICENSE
@@ -0,0 +1,674 @@
+                    GNU GENERAL PUBLIC LICENSE
+                       Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+                            Preamble
+
+  The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+  The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works.  By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users.  We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors.  You can apply it to
+your programs, too.
+
+  When we speak of free software, we are referring to freedom, not
+price.  Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+  To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights.  Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+  For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received.  You must make sure that they, too, receive
+or can get the source code.  And you must show them these terms so they
+know their rights.
+
+  Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+  For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software.  For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+  Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so.  This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software.  The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable.  Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products.  If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+  Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary.  To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+  The precise terms and conditions for copying, distribution and
+modification follow.
+
+                       TERMS AND CONDITIONS
+
+  0. Definitions.
+
+  "This License" refers to version 3 of the GNU General Public License.
+
+  "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+  "The Program" refers to any copyrightable work licensed under this
+License.  Each licensee is addressed as "you".  "Licensees" and
+"recipients" may be individuals or organizations.
+
+  To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy.  The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+  A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+  To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy.  Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+  To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies.  Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+  An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License.  If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+  1. Source Code.
+
+  The "source code" for a work means the preferred form of the work
+for making modifications to it.  "Object code" means any non-source
+form of a work.
+
+  A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+  The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form.  A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+  The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities.  However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work.  For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+  The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+  The Corresponding Source for a work in source code form is that
+same work.
+
+  2. Basic Permissions.
+
+  All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met.  This License explicitly affirms your unlimited
+permission to run the unmodified Program.  The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work.  This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+  You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force.  You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright.  Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+  Conveying under any other circumstances is permitted solely under
+the conditions stated below.  Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+  3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+  No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+  When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+  4. Conveying Verbatim Copies.
+
+  You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+  You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+  5. Conveying Modified Source Versions.
+
+  You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+    a) The work must carry prominent notices stating that you modified
+    it, and giving a relevant date.
+
+    b) The work must carry prominent notices stating that it is
+    released under this License and any conditions added under section
+    7.  This requirement modifies the requirement in section 4 to
+    "keep intact all notices".
+
+    c) You must license the entire work, as a whole, under this
+    License to anyone who comes into possession of a copy.  This
+    License will therefore apply, along with any applicable section 7
+    additional terms, to the whole of the work, and all its parts,
+    regardless of how they are packaged.  This License gives no
+    permission to license the work in any other way, but it does not
+    invalidate such permission if you have separately received it.
+
+    d) If the work has interactive user interfaces, each must display
+    Appropriate Legal Notices; however, if the Program has interactive
+    interfaces that do not display Appropriate Legal Notices, your
+    work need not make them do so.
+
+  A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit.  Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+  6. Conveying Non-Source Forms.
+
+  You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+    a) Convey the object code in, or embodied in, a physical product
+    (including a physical distribution medium), accompanied by the
+    Corresponding Source fixed on a durable physical medium
+    customarily used for software interchange.
+
+    b) Convey the object code in, or embodied in, a physical product
+    (including a physical distribution medium), accompanied by a
+    written offer, valid for at least three years and valid for as
+    long as you offer spare parts or customer support for that product
+    model, to give anyone who possesses the object code either (1) a
+    copy of the Corresponding Source for all the software in the
+    product that is covered by this License, on a durable physical
+    medium customarily used for software interchange, for a price no
+    more than your reasonable cost of physically performing this
+    conveying of source, or (2) access to copy the
+    Corresponding Source from a network server at no charge.
+
+    c) Convey individual copies of the object code with a copy of the
+    written offer to provide the Corresponding Source.  This
+    alternative is allowed only occasionally and noncommercially, and
+    only if you received the object code with such an offer, in accord
+    with subsection 6b.
+
+    d) Convey the object code by offering access from a designated
+    place (gratis or for a charge), and offer equivalent access to the
+    Corresponding Source in the same way through the same place at no
+    further charge.  You need not require recipients to copy the
+    Corresponding Source along with the object code.  If the place to
+    copy the object code is a network server, the Corresponding Source
+    may be on a different server (operated by you or a third party)
+    that supports equivalent copying facilities, provided you maintain
+    clear directions next to the object code saying where to find the
+    Corresponding Source.  Regardless of what server hosts the
+    Corresponding Source, you remain obligated to ensure that it is
+    available for as long as needed to satisfy these requirements.
+
+    e) Convey the object code using peer-to-peer transmission, provided
+    you inform other peers where the object code and Corresponding
+    Source of the work are being offered to the general public at no
+    charge under subsection 6d.
+
+  A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+  A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling.  In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage.  For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product.  A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+  "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source.  The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+  If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information.  But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+  The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed.  Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+  Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+  7. Additional Terms.
+
+  "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law.  If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+  When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it.  (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.)  You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+  Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+    a) Disclaiming warranty or limiting liability differently from the
+    terms of sections 15 and 16 of this License; or
+
+    b) Requiring preservation of specified reasonable legal notices or
+    author attributions in that material or in the Appropriate Legal
+    Notices displayed by works containing it; or
+
+    c) Prohibiting misrepresentation of the origin of that material, or
+    requiring that modified versions of such material be marked in
+    reasonable ways as different from the original version; or
+
+    d) Limiting the use for publicity purposes of names of licensors or
+    authors of the material; or
+
+    e) Declining to grant rights under trademark law for use of some
+    trade names, trademarks, or service marks; or
+
+    f) Requiring indemnification of licensors and authors of that
+    material by anyone who conveys the material (or modified versions of
+    it) with contractual assumptions of liability to the recipient, for
+    any liability that these contractual assumptions directly impose on
+    those licensors and authors.
+
+  All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10.  If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term.  If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+  If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+  Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+  8. Termination.
+
+  You may not propagate or modify a covered work except as expressly
+provided under this License.  Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+  However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+  Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+  Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License.  If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+  9. Acceptance Not Required for Having Copies.
+
+  You are not required to accept this License in order to receive or
+run a copy of the Program.  Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance.  However,
+nothing other than this License grants you permission to propagate or
+modify any covered work.  These actions infringe copyright if you do
+not accept this License.  Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+  10. Automatic Licensing of Downstream Recipients.
+
+  Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License.  You are not responsible
+for enforcing compliance by third parties with this License.
+
+  An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations.  If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+  You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License.  For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+  11. Patents.
+
+  A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based.  The
+work thus licensed is called the contributor's "contributor version".
+
+  A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version.  For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+  Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+  In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement).  To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+  If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients.  "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+  If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+  A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License.  You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+  Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+  12. No Surrender of Others' Freedom.
+
+  If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License.  If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all.  For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+  13. Use with the GNU Affero General Public License.
+
+  Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work.  The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+  14. Revised Versions of this License.
+
+  The Free Software Foundation may publish revised and/or new versions of
+the GNU General Public License from time to time.  Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+  Each version is given a distinguishing version number.  If the
+Program specifies that a certain numbered version of the GNU General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation.  If the Program does not specify a version number of the
+GNU General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+  If the Program specifies that a proxy can decide which future
+versions of the GNU General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+  Later license versions may give you additional or different
+permissions.  However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+  15. Disclaimer of Warranty.
+
+  THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW.  EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE.  THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU.  SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+  16. Limitation of Liability.
+
+  IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+  17. Interpretation of Sections 15 and 16.
+
+  If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+                     END OF TERMS AND CONDITIONS
+
+            How to Apply These Terms to Your New Programs
+
+  If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+  To do so, attach the following notices to the program.  It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+    <one line to give the program's name and a brief idea of what it does.>
+    Copyright (C) <year>  <name of author>
+
+    This program is free software: you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation, either version 3 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License
+    along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+Also add information on how to contact you by electronic and paper mail.
+
+  If the program does terminal interaction, make it output a short
+notice like this when it starts in an interactive mode:
+
+    <program>  Copyright (C) <year>  <name of author>
+    This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+    This is free software, and you are welcome to redistribute it
+    under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License.  Of course, your program's commands
+might be different; for a GUI interface, you would use an "about box".
+
+  You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU GPL, see
+<https://www.gnu.org/licenses/>.
+
+  The GNU General Public License does not permit incorporating your program
+into proprietary programs.  If your program is a subroutine library, you
+may consider it more useful to permit linking proprietary applications with
+the library.  If this is what you want to do, use the GNU Lesser General
+Public License instead of this License.  But first, please read
+<https://www.gnu.org/licenses/why-not-lgpl.html>.
diff --git a/vendor/github.com/golangci/modinfo/Makefile b/vendor/github.com/golangci/modinfo/Makefile
new file mode 100644
index 0000000000000000000000000000000000000000..df91018f1189baaa285dc8620669581691225ad7
--- /dev/null
+++ b/vendor/github.com/golangci/modinfo/Makefile
@@ -0,0 +1,12 @@
+.PHONY: clean check test
+
+default: clean check test
+
+clean:
+	rm -rf dist/ cover.out
+
+test: clean
+	go test -v -cover ./...
+
+check:
+	golangci-lint run
diff --git a/vendor/github.com/golangci/modinfo/module.go b/vendor/github.com/golangci/modinfo/module.go
new file mode 100644
index 0000000000000000000000000000000000000000..ff0b21b9b80bf7f108c31cd00c1a523ecd97d670
--- /dev/null
+++ b/vendor/github.com/golangci/modinfo/module.go
@@ -0,0 +1,157 @@
+package modinfo
+
+import (
+	"bytes"
+	"encoding/json"
+	"errors"
+	"fmt"
+	"os"
+	"os/exec"
+	"path/filepath"
+	"reflect"
+	"sort"
+	"strings"
+	"sync"
+
+	"golang.org/x/mod/modfile"
+	"golang.org/x/tools/go/analysis"
+)
+
+type ModInfo struct {
+	Path      string `json:"Path"`
+	Dir       string `json:"Dir"`
+	GoMod     string `json:"GoMod"`
+	GoVersion string `json:"GoVersion"`
+	Main      bool   `json:"Main"`
+}
+
+var (
+	once        sync.Once
+	information []ModInfo
+	errInfo     error
+)
+
+var Analyzer = &analysis.Analyzer{
+	Name:       "modinfo",
+	Doc:        "Module information",
+	URL:        "https://github.com/golangci/modinfo",
+	Run:        runOnce,
+	ResultType: reflect.TypeOf([]ModInfo(nil)),
+}
+
+func runOnce(pass *analysis.Pass) (any, error) {
+	_, ok := os.LookupEnv("MODINFO_DEBUG_DISABLE_ONCE")
+	if ok {
+		return GetModuleInfo(pass)
+	}
+
+	once.Do(func() {
+		information, errInfo = GetModuleInfo(pass)
+	})
+
+	return information, errInfo
+}
+
+// GetModuleInfo gets modules information.
+// Always returns 1 element except for workspace (returns all the modules of the workspace).
+// Based on `go list -m -json` behavior.
+func GetModuleInfo(pass *analysis.Pass) ([]ModInfo, error) {
+	// https://github.com/golang/go/issues/44753#issuecomment-790089020
+	cmd := exec.Command("go", "list", "-m", "-json")
+	for _, file := range pass.Files {
+		name := pass.Fset.File(file.Pos()).Name()
+		if filepath.Ext(name) != ".go" {
+			continue
+		}
+
+		cmd.Dir = filepath.Dir(name)
+		break
+	}
+
+	out, err := cmd.Output()
+	if err != nil {
+		return nil, fmt.Errorf("command go list: %w: %s", err, string(out))
+	}
+
+	var infos []ModInfo
+
+	for dec := json.NewDecoder(bytes.NewBuffer(out)); dec.More(); {
+		var v ModInfo
+		if err := dec.Decode(&v); err != nil {
+			return nil, fmt.Errorf("unmarshaling error: %w: %s", err, string(out))
+		}
+
+		if v.GoMod == "" {
+			return nil, errors.New("working directory is not part of a module")
+		}
+
+		if !v.Main || v.Dir == "" {
+			continue
+		}
+
+		infos = append(infos, v)
+	}
+
+	if len(infos) == 0 {
+		return nil, errors.New("go.mod file not found")
+	}
+
+	sort.Slice(infos, func(i, j int) bool {
+		return len(infos[i].Path) > len(infos[j].Path)
+	})
+
+	return infos, nil
+}
+
+// FindModuleFromPass finds the module related to the files of the pass.
+func FindModuleFromPass(pass *analysis.Pass) (ModInfo, error) {
+	infos, ok := pass.ResultOf[Analyzer].([]ModInfo)
+	if !ok {
+		return ModInfo{}, errors.New("no modinfo analyzer result")
+	}
+
+	var name string
+	for _, file := range pass.Files {
+		f := pass.Fset.File(file.Pos()).Name()
+		if filepath.Ext(f) != ".go" {
+			continue
+		}
+
+		name = f
+		break
+	}
+
+	// no Go file found in analysis pass
+	if name == "" {
+		name, _ = os.Getwd()
+	}
+
+	for _, info := range infos {
+		if !strings.HasPrefix(name, info.Dir) {
+			continue
+		}
+		return info, nil
+	}
+
+	return ModInfo{}, errors.New("module information not found")
+}
+
+// ReadModuleFileFromPass read the `go.mod` file from the pass result.
+func ReadModuleFileFromPass(pass *analysis.Pass) (*modfile.File, error) {
+	info, err := FindModuleFromPass(pass)
+	if err != nil {
+		return nil, err
+	}
+
+	return ReadModuleFile(info)
+}
+
+// ReadModuleFile read the `go.mod` file.
+func ReadModuleFile(info ModInfo) (*modfile.File, error) {
+	raw, err := os.ReadFile(info.GoMod)
+	if err != nil {
+		return nil, fmt.Errorf("reading go.mod file: %w", err)
+	}
+
+	return modfile.Parse("go.mod", raw, nil)
+}
diff --git a/vendor/github.com/golangci/modinfo/readme.md b/vendor/github.com/golangci/modinfo/readme.md
new file mode 100644
index 0000000000000000000000000000000000000000..2175de8eb4110baace632784f30f2b4750fba7ad
--- /dev/null
+++ b/vendor/github.com/golangci/modinfo/readme.md
@@ -0,0 +1,73 @@
+# modinfo
+
+This module contains:
+- an analyzer that returns module information.
+- methods to find and read `go.mod` file
+
+## Examples
+
+```go
+package main
+
+import (
+	"fmt"
+
+	"github.com/golangci/modinfo"
+	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/go/analysis/passes/inspect"
+)
+
+var Analyzer = &analysis.Analyzer{
+    Name: "example",
+    Doc:  "Example",
+    Run: func(pass *analysis.Pass) (interface{}, error) {
+        file, err := modinfo.ReadModuleFileFromPass(pass)
+        if err != nil {
+          return nil, err
+        }
+
+        fmt.Println("go.mod", file)
+
+        // TODO
+
+        return nil, nil
+    },
+    Requires: []*analysis.Analyzer{
+        inspect.Analyzer,
+        modinfo.Analyzer,
+    },
+}
+```
+
+```go
+package main
+
+import (
+	"fmt"
+
+	"github.com/golangci/modinfo"
+	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/go/analysis/passes/inspect"
+)
+
+var Analyzer = &analysis.Analyzer{
+    Name: "example",
+    Doc:  "Example",
+    Run: func(pass *analysis.Pass) (interface{}, error) {
+        info, err := modinfo.FindModuleFromPass(pass)
+        if err != nil {
+          return nil, err
+        }
+
+        fmt.Println("Module", info.Dir)
+
+        // TODO
+
+        return nil, nil
+    },
+    Requires: []*analysis.Analyzer{
+        inspect.Analyzer,
+        modinfo.Analyzer,
+    },
+}
+```
diff --git a/vendor/github.com/golangci/check/LICENSE b/vendor/github.com/golangci/plugin-module-register/LICENSE
similarity index 98%
rename from vendor/github.com/golangci/check/LICENSE
rename to vendor/github.com/golangci/plugin-module-register/LICENSE
index 5a1774b8e682e862b9ac5eba1552ce1b9169fe6f..e72bfddabc15be5718a7cc061ac10e47741d8219 100644
--- a/vendor/github.com/golangci/check/LICENSE
+++ b/vendor/github.com/golangci/plugin-module-register/LICENSE
@@ -1,7 +1,7 @@
-GNU GENERAL PUBLIC LICENSE
+                    GNU GENERAL PUBLIC LICENSE
                        Version 3, 29 June 2007
 
- Copyright (C) 2007 Free Software Foundation, Inc. {http://fsf.org/}
+ Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
  Everyone is permitted to copy and distribute verbatim copies
  of this license document, but changing it is not allowed.
 
@@ -631,8 +631,8 @@ to attach them to the start of each source file to most effectively
 state the exclusion of warranty; and each file should have at least
 the "copyright" line and a pointer to where the full notice is found.
 
-    {one line to give the program's name and a brief idea of what it does.}
-    Copyright (C) {year}  {name of author}
+    <one line to give the program's name and a brief idea of what it does.>
+    Copyright (C) <year>  <name of author>
 
     This program is free software: you can redistribute it and/or modify
     it under the terms of the GNU General Public License as published by
@@ -645,14 +645,14 @@ the "copyright" line and a pointer to where the full notice is found.
     GNU General Public License for more details.
 
     You should have received a copy of the GNU General Public License
-    along with this program.  If not, see {http://www.gnu.org/licenses/}.
+    along with this program.  If not, see <https://www.gnu.org/licenses/>.
 
 Also add information on how to contact you by electronic and paper mail.
 
   If the program does terminal interaction, make it output a short
 notice like this when it starts in an interactive mode:
 
-    opennota  Copyright (C) 2013  opennota
+    <program>  Copyright (C) <year>  <name of author>
     This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
     This is free software, and you are welcome to redistribute it
     under certain conditions; type `show c' for details.
@@ -664,11 +664,11 @@ might be different; for a GUI interface, you would use an "about box".
   You should also get your employer (if you work as a programmer) or school,
 if any, to sign a "copyright disclaimer" for the program, if necessary.
 For more information on this, and how to apply and follow the GNU GPL, see
-{http://www.gnu.org/licenses/}.
+<https://www.gnu.org/licenses/>.
 
   The GNU General Public License does not permit incorporating your program
 into proprietary programs.  If your program is a subroutine library, you
 may consider it more useful to permit linking proprietary applications with
 the library.  If this is what you want to do, use the GNU Lesser General
 Public License instead of this License.  But first, please read
-{http://www.gnu.org/philosophy/why-not-lgpl.html}.
+<https://www.gnu.org/licenses/why-not-lgpl.html>.
\ No newline at end of file
diff --git a/vendor/github.com/golangci/plugin-module-register/register/register.go b/vendor/github.com/golangci/plugin-module-register/register/register.go
new file mode 100644
index 0000000000000000000000000000000000000000..72ad7f46f209732c94a25967108055570b2f4c32
--- /dev/null
+++ b/vendor/github.com/golangci/plugin-module-register/register/register.go
@@ -0,0 +1,73 @@
+package register
+
+import (
+	"bytes"
+	"encoding/json"
+	"fmt"
+	"sync"
+
+	"golang.org/x/tools/go/analysis"
+)
+
+// Plugins load mode.
+const (
+	LoadModeSyntax    = "syntax"
+	LoadModeTypesInfo = "typesinfo"
+)
+
+var (
+	pluginsMu sync.RWMutex
+	plugins   = make(map[string]NewPlugin)
+)
+
+// LinterPlugin the interface of the plugin structure.
+type LinterPlugin interface {
+	BuildAnalyzers() ([]*analysis.Analyzer, error)
+	GetLoadMode() string
+}
+
+// NewPlugin the contract of the constructor of a plugin.
+type NewPlugin func(conf any) (LinterPlugin, error)
+
+// Plugin registers a plugin.
+func Plugin(name string, p NewPlugin) {
+	pluginsMu.Lock()
+
+	plugins[name] = p
+
+	pluginsMu.Unlock()
+}
+
+// GetPlugin gets a plugin by name.
+func GetPlugin(name string) (NewPlugin, error) {
+	pluginsMu.Lock()
+	defer pluginsMu.Unlock()
+
+	p, ok := plugins[name]
+	if !ok {
+		return nil, fmt.Errorf("plugin %q not found", name)
+	}
+
+	return p, nil
+}
+
+// DecodeSettings decode settings from golangci-lint to the structure of the plugin configuration.
+func DecodeSettings[T any](rawSettings any) (T, error) {
+	var buffer bytes.Buffer
+
+	if err := json.NewEncoder(&buffer).Encode(rawSettings); err != nil {
+		var zero T
+		return zero, fmt.Errorf("encoding settings: %w", err)
+	}
+
+	decoder := json.NewDecoder(&buffer)
+	decoder.DisallowUnknownFields()
+
+	s := new(T)
+	if err := decoder.Decode(s); err != nil {
+		var zero T
+		return zero, fmt.Errorf("decoding settings: %w", err)
+	}
+
+	return *s, nil
+}
diff --git a/vendor/github.com/golangci/revgrep/.golangci.yml b/vendor/github.com/golangci/revgrep/.golangci.yml
index b8ed6204feb6148b36fe4096e1752248937eadb8..5239720ac672428d9b811e2f5b7db840752e6f33 100644
--- a/vendor/github.com/golangci/revgrep/.golangci.yml
+++ b/vendor/github.com/golangci/revgrep/.golangci.yml
@@ -3,7 +3,6 @@ run:
 
 linters-settings:
   govet:
-    check-shadowing: true
     enable-all: true
     disable:
       - fieldalignment
@@ -28,18 +27,25 @@ linters-settings:
 linters:
   enable-all: true
   disable:
-    - maligned # Deprecated
-    - scopelint # Deprecated
-    - golint # Deprecated
-    - interfacer # Deprecated
-    - exhaustivestruct # Deprecated
+    - deadcode # deprecated
+    - exhaustivestruct # deprecated
+    - golint # deprecated
+    - ifshort # deprecated
+    - interfacer # deprecated
+    - maligned # deprecated
+    - nosnakecase # deprecated
+    - scopelint # deprecated
+    - structcheck # deprecated
+    - varcheck # deprecated
     - cyclop # duplicate of gocyclo
+    - sqlclosecheck # not relevant (SQL)
+    - rowserrcheck # not relevant (SQL)
+    - execinquery # not relevant (SQL)
     - dupl
     - lll
     - nestif
     - gomnd
     - goerr113
-#    - wrapcheck
     - nlreturn
     - wsl
     - exhaustive
@@ -54,10 +60,11 @@ linters:
     - nosnakecase
     - nonamedreturns
     - nilerr
+    - depguard
 
 issues:
   exclude-use-default: false
-  max-per-linter: 0
+  max-issues-per-linter: 0
   max-same-issues: 0
   exclude:
     - 'ST1000: at least one file in a package should have a package comment'
diff --git a/vendor/github.com/golangci/revgrep/revgrep.go b/vendor/github.com/golangci/revgrep/revgrep.go
index 4b990fa048b17385eb13e0f87b10f015d659a1a6..1ef81b203a1e1fd123dae00692f7ca9fac6113ad 100644
--- a/vendor/github.com/golangci/revgrep/revgrep.go
+++ b/vendor/github.com/golangci/revgrep/revgrep.go
@@ -1,3 +1,4 @@
+// Package revgrep filter static analysis tools to only lines changed based on a commit reference.
 package revgrep
 
 import (
@@ -17,31 +18,26 @@ import (
 // Checker provides APIs to filter static analysis tools to specific commits,
 // such as showing only issues since last commit.
 type Checker struct {
-	// Patch file (unified) to read to detect lines being changed, if nil revgrep
-	// will attempt to detect the VCS and generate an appropriate patch. Auto
-	// detection will search for uncommitted changes first, if none found, will
-	// generate a patch from last committed change. File paths within patches
-	// must be relative to current working directory.
+	// Patch file (unified) to read to detect lines being changed,
+	// if nil revgrep will attempt to detect the VCS and generate an appropriate patch.
+	// Auto-detection will search for uncommitted changes first,
+	// if none found, will generate a patch from last committed change.
+	// File paths within patches must be relative to current working directory.
 	Patch io.Reader
-	// NewFiles is a list of file names (with absolute paths) where the entire
-	// contents of the file is new.
+	// NewFiles is a list of file names (with absolute paths) where the entire contents of the file is new.
 	NewFiles []string
 	// Debug sets the debug writer for additional output.
 	Debug io.Writer
-	// RevisionFrom check revision starting at, leave blank for auto detection
-	// ignored if patch is set.
+	// RevisionFrom check revision starting at, leave blank for auto-detection ignored if patch is set.
 	RevisionFrom string
-	// WholeFiles indicates that the user wishes to see all issues that comes up
-	// anywhere in any file that has been changed in this revision or patch.
+	// WholeFiles indicates that the user wishes to see all issues that comes up anywhere in any file that has been changed in this revision or patch.
 	WholeFiles bool
-	// RevisionTo checks revision finishing at, leave blank for auto detection
-	// ignored if patch is set.
+	// RevisionTo checks revision finishing at, leave blank for auto-detection ignored if patch is set.
 	RevisionTo string
 	// Regexp to match path, line number, optional column number, and message.
 	Regexp string
-	// AbsPath is used to make an absolute path of an issue's filename to be
-	// relative in order to match patch file. If not set, current working
-	// directory is used.
+	// AbsPath is used to make an absolute path of an issue's filename to be relative in order to match patch file.
+	// If not set, current working directory is used.
 	AbsPath string
 
 	// Calculated changes for next calls to IsNewIssue
@@ -56,9 +52,7 @@ type Issue struct {
 	LineNo int
 	// ColNo is the column number or 0 if none could be parsed.
 	ColNo int
-	// HunkPos is position from file's first @@, for new files this will be the
-	// line number.
-	//
+	// HunkPos is position from file's first @@, for new files this will be the line number.
 	// See also: https://developer.github.com/v3/pulls/comments/#create-a-comment
 	HunkPos int
 	// Issue text as it appeared from the tool.
@@ -135,16 +129,14 @@ func (c *Checker) IsNewIssue(i InputIssue) (hunkPos int, isNew bool) {
 	return 0, false
 }
 
-// Check scans reader and writes any lines to writer that have been added in
-// Checker.Patch.
+// Check scans reader and writes any lines to writer that have been added in Checker.Patch.
 //
 // Returns the issues written to writer when no error occurs.
 //
-// If no VCS could be found or other VCS errors occur, all issues are written
-// to writer and an error is returned.
+// If no VCS could be found or other VCS errors occur,
+// all issues are written to writer and an error is returned.
 //
-// File paths in reader must be relative to current working directory or
-// absolute.
+// File paths in reader must be relative to current working directory or absolute.
 func (c *Checker) Check(reader io.Reader, writer io.Writer) (issues []Issue, err error) {
 	returnErr := c.Prepare()
 	writeAll := returnErr != nil
@@ -265,8 +257,7 @@ func (c *Checker) preparePatch() error {
 }
 
 // linesChanges returns a map of file names to line numbers being changed.
-// If key is nil, the file has been recently added, else it contains a slice
-// of positions that have been added.
+// If key is nil, the file has been recently added, else it contains a slice of positions that have been added.
 func (c *Checker) linesChanged() map[string][]pos {
 	type state struct {
 		file    string
@@ -343,17 +334,15 @@ func (c *Checker) linesChanged() map[string][]pos {
 	return changes
 }
 
-// GitPatch returns a patch from a git repository, if no git repository was
-// was found and no errors occurred, nil is returned, else an error is returned
-// revisionFrom and revisionTo defines the git diff parameters, if left blank
-// and there are unstaged changes or untracked files, only those will be returned
-// else only check changes since HEAD~. If revisionFrom is set but revisionTo
-// is not, untracked files will be included, to exclude untracked files set
-// revisionTo to HEAD~. It's incorrect to specify revisionTo without a
-// revisionFrom.
+// GitPatch returns a patch from a git repository.
+// If no git repository was found and no errors occurred, nil is returned,
+// else an error is returned revisionFrom and revisionTo defines the git diff parameters,
+// if left blank and there are unstaged changes or untracked files,
+// only those will be returned else only check changes since HEAD~.
+// If revisionFrom is set but revisionTo is not,
+// untracked files will be included, to exclude untracked files set revisionTo to HEAD~.
+// It's incorrect to specify revisionTo without a revisionFrom.
 func GitPatch(revisionFrom, revisionTo string) (io.Reader, []string, error) {
-	var patch bytes.Buffer
-
 	// check if git repo exists
 	if err := exec.Command("git", "status", "--porcelain").Run(); err != nil {
 		// don't return an error, we assume the error is not repo exists
@@ -370,53 +359,125 @@ func GitPatch(revisionFrom, revisionTo string) (io.Reader, []string, error) {
 	for _, file := range bytes.Split(ls, []byte{'\n'}) {
 		if len(file) == 0 || bytes.HasSuffix(file, []byte{'/'}) {
 			// ls-files was sometimes showing directories when they were ignored
-			// I couldn't create a test case for this as I couldn't reproduce correctly
-			// for the moment, just exclude files with trailing /
+			// I couldn't create a test case for this as I couldn't reproduce correctly for the moment,
+			// just exclude files with trailing /
 			continue
 		}
+
 		newFiles = append(newFiles, string(file))
 	}
 
 	if revisionFrom != "" {
-		cmd := exec.Command("git", "diff", "--color=never", "--relative", revisionFrom)
+		args := []string{revisionFrom}
+
 		if revisionTo != "" {
-			cmd.Args = append(cmd.Args, revisionTo)
+			args = append(args, revisionTo)
 		}
-		cmd.Args = append(cmd.Args, "--")
 
-		cmd.Stdout = &patch
-		if err := cmd.Run(); err != nil {
-			return nil, nil, fmt.Errorf("error executing git diff %q %q: %w", revisionFrom, revisionTo, err)
+		args = append(args, "--")
+
+		patch, errDiff := gitDiff(args...)
+		if errDiff != nil {
+			return nil, nil, errDiff
 		}
 
 		if revisionTo == "" {
-			return &patch, newFiles, nil
+			return patch, newFiles, nil
 		}
-		return &patch, nil, nil
+
+		return patch, nil, nil
 	}
 
 	// make a patch for unstaged changes
-	// use --no-prefix to remove b/ given: +++ b/main.go
-	cmd := exec.Command("git", "diff", "--color=never", "--relative", "--")
-	cmd.Stdout = &patch
-	if err := cmd.Run(); err != nil {
-		return nil, nil, fmt.Errorf("error executing git diff: %w", err)
+	patch, err := gitDiff("--")
+	if err != nil {
+		return nil, nil, err
 	}
+
 	unstaged := patch.Len() > 0
 
-	// If there's unstaged changes OR untracked changes (or both), then this is
-	// a suitable patch
+	// If there's unstaged changes OR untracked changes (or both),
+	// then this is a suitable patch
 	if unstaged || newFiles != nil {
-		return &patch, newFiles, nil
+		return patch, newFiles, nil
 	}
 
 	// check for changes in recent commit
+	patch, err = gitDiff("HEAD~", "--")
+	if err != nil {
+		return nil, nil, err
+	}
+
+	return patch, nil, nil
+}
+
+func gitDiff(extraArgs ...string) (*bytes.Buffer, error) {
+	cmd := exec.Command("git", "diff", "--color=never", "--no-ext-diff")
+
+	if isSupportedByGit(2, 41, 0) {
+		cmd.Args = append(cmd.Args, "--default-prefix")
+	}
+
+	cmd.Args = append(cmd.Args, "--relative")
+	cmd.Args = append(cmd.Args, extraArgs...)
+
+	patch := new(bytes.Buffer)
+	errBuff := new(bytes.Buffer)
+
+	cmd.Stdout = patch
+	cmd.Stderr = errBuff
 
-	cmd = exec.Command("git", "diff", "--color=never", "--relative", "HEAD~", "--")
-	cmd.Stdout = &patch
 	if err := cmd.Run(); err != nil {
-		return nil, nil, fmt.Errorf("error executing git diff HEAD~: %w", err)
+		return nil, fmt.Errorf("error executing %q: %w: %w", strings.Join(cmd.Args, " "), err, readAsError(errBuff))
+	}
+
+	return patch, nil
+}
+
+func readAsError(buff io.Reader) error {
+	output, err := io.ReadAll(buff)
+	if err != nil {
+		return fmt.Errorf("read stderr: %w", err)
+	}
+
+	return errors.New(string(output))
+}
+
+func isSupportedByGit(major, minor, patch int) bool {
+	output, err := exec.Command("git", "version").CombinedOutput()
+	if err != nil {
+		return false
+	}
+
+	parts := bytes.Split(bytes.TrimSpace(output), []byte(" "))
+	if len(parts) < 3 {
+		return false
+	}
+
+	v := string(parts[2])
+	if v == "" {
+		return false
+	}
+
+	vp := regexp.MustCompile(`^(\d+)\.(\d+)(?:\.(\d+))?.*$`).FindStringSubmatch(v)
+	if len(vp) < 4 {
+		return false
+	}
+
+	currentMajor, err := strconv.Atoi(vp[1])
+	if err != nil {
+		return false
+	}
+
+	currentMinor, err := strconv.Atoi(vp[2])
+	if err != nil {
+		return false
+	}
+
+	currentPatch, err := strconv.Atoi(vp[3])
+	if err != nil {
+		return false
 	}
 
-	return &patch, nil, nil
+	return currentMajor*1_000_000_000+currentMinor*1_000_000+currentPatch*1_000 >= major*1_000_000_000+minor*1_000_000+patch*1_000
 }
diff --git a/vendor/github.com/golangci/unconvert/README b/vendor/github.com/golangci/unconvert/README
deleted file mode 100644
index dbaea4f57212c2696e2fc12b54e78210c21fc31f..0000000000000000000000000000000000000000
--- a/vendor/github.com/golangci/unconvert/README
+++ /dev/null
@@ -1,36 +0,0 @@
-About:
-
-The unconvert program analyzes Go packages to identify unnecessary
-type conversions; i.e., expressions T(x) where x already has type T.
-
-Install:
-
-    $ go get github.com/mdempsky/unconvert
-
-Usage:
-
-    $ unconvert -v bytes fmt
-    GOROOT/src/bytes/reader.go:117:14: unnecessary conversion
-                    abs = int64(r.i) + offset
-                               ^
-    GOROOT/src/fmt/print.go:411:21: unnecessary conversion
-            p.fmt.integer(int64(v), 16, unsigned, udigits)
-                               ^
-
-Flags:
-
-Using the -v flag, unconvert will also print the source line and a
-caret to indicate the unnecessary conversion's position therein.
-
-Using the -apply flag, unconvert will rewrite the Go source files
-without the unnecessary type conversions.
-
-Using the -all flag, unconvert will analyze the Go packages under all
-possible GOOS/GOARCH combinations, and only identify conversions that
-are unnecessary in all cases.
-
-E.g., syscall.Timespec's Sec and Nsec fields are int64 under
-linux/amd64 but int32 under linux/386.  An int64(ts.Sec) conversion
-that appears in a linux/amd64-only file will be identified as
-unnecessary, but it will be preserved if it occurs in a file that's
-compiled for both linux/amd64 and linux/386.
diff --git a/vendor/github.com/golangci/unconvert/README.md b/vendor/github.com/golangci/unconvert/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..e9230c2183e69c6a324550f062c8543a0af5ee98
--- /dev/null
+++ b/vendor/github.com/golangci/unconvert/README.md
@@ -0,0 +1,6 @@
+Fork of [unconvert](https://github.com/mdempsky/unconvert) to be usable as a library.
+
+The specific elements are inside the file `golangci.go`.
+
+The only modification of the file `unconvert.go` is the remove of the global variables for the flags.
+The tests will never work because of that, then the CI is disabled.
diff --git a/vendor/github.com/golangci/unconvert/golangci.go b/vendor/github.com/golangci/unconvert/golangci.go
new file mode 100644
index 0000000000000000000000000000000000000000..306c44e5ec1dc3c72ae1eceaaeaff1909ebdbdfd
--- /dev/null
+++ b/vendor/github.com/golangci/unconvert/golangci.go
@@ -0,0 +1,78 @@
+package unconvert
+
+import (
+	"go/ast"
+	"go/token"
+	"strings"
+	"sync"
+
+	"golang.org/x/tools/go/analysis"
+)
+
+// Transformed version of the original unconvert flags section.
+// The section has been removed inside `unconvert.go`
+var (
+	flagAll        = pointer(false)
+	flagApply      = pointer(false)
+	flagCPUProfile = pointer("")
+	flagSafe       = pointer(false)
+	flagV          = pointer(false)
+	flagTests      = pointer(true)
+	flagFastMath   = pointer(false)
+	flagTags       = pointer("")
+	flagConfigs    = pointer("")
+)
+
+func pointer[T string | int | int32 | int64 | bool](v T) *T { return &v }
+
+func Run(pass *analysis.Pass, fastMath, safe bool) []token.Position {
+	type res struct {
+		file  string
+		edits editSet
+	}
+
+	flagFastMath = pointer(fastMath)
+	flagSafe = pointer(safe)
+
+	ch := make(chan res)
+	var wg sync.WaitGroup
+	for _, file := range pass.Files {
+		file := file
+
+		tokenFile := pass.Fset.File(file.Package)
+		filename := tokenFile.Position(file.Package).Filename
+
+		// Hack to recognize _cgo_gotypes.go.
+		if strings.HasSuffix(filename, "-d") || strings.HasSuffix(filename, "/_cgo_gotypes.go") {
+			continue
+		}
+
+		wg.Add(1)
+		go func() {
+			defer wg.Done()
+
+			v := visitor{info: pass.TypesInfo, file: tokenFile, edits: make(editSet)}
+			ast.Walk(&v, file)
+
+			ch <- res{filename, v.edits}
+		}()
+	}
+	go func() {
+		wg.Wait()
+		close(ch)
+	}()
+
+	m := make(fileToEditSet)
+	for r := range ch {
+		m[r.file] = r.edits
+	}
+
+	var positions []token.Position
+	for _, edit := range m {
+		for position, _ := range edit {
+			positions = append(positions, position)
+		}
+	}
+
+	return positions
+}
diff --git a/vendor/github.com/golangci/unconvert/unconvert.go b/vendor/github.com/golangci/unconvert/unconvert.go
index 38737d39f72505264b74f0281c323f73a80ebcbf..222aeadf880bfaa78d13640f12d35db9a329d074 100644
--- a/vendor/github.com/golangci/unconvert/unconvert.go
+++ b/vendor/github.com/golangci/unconvert/unconvert.go
@@ -2,15 +2,15 @@
 // Use of this source code is governed by a BSD-style
 // license that can be found in the LICENSE file.
 
-// Unconvert removes redundant type conversions from Go packages.
+// Package unconvert Unconvert removes redundant type conversions from Go packages.
 package unconvert
 
 import (
 	"bytes"
+	"encoding/json"
 	"flag"
 	"fmt"
 	"go/ast"
-	"go/build"
 	"go/format"
 	"go/parser"
 	"go/token"
@@ -18,15 +18,16 @@ import (
 	"io/ioutil"
 	"log"
 	"os"
+	"os/exec"
 	"reflect"
 	"runtime/pprof"
 	"sort"
+	"strings"
 	"sync"
 	"unicode"
 
-	"github.com/kisielk/gotool"
 	"golang.org/x/text/width"
-	"golang.org/x/tools/go/loader"
+	"golang.org/x/tools/go/packages"
 )
 
 // Unnecessary conversions are identified by the position
@@ -34,6 +35,31 @@ import (
 
 type editSet map[token.Position]struct{}
 
+func (e editSet) add(pos token.Position) {
+	pos.Offset = 0
+	e[pos] = struct{}{}
+}
+
+func (e editSet) has(pos token.Position) bool {
+	pos.Offset = 0
+	_, ok := e[pos]
+	return ok
+}
+
+func (e editSet) remove(pos token.Position) {
+	pos.Offset = 0
+	delete(e, pos)
+}
+
+// intersect removes positions from e that are not present in x.
+func (e editSet) intersect(x editSet) {
+	for pos := range e {
+		if _, ok := x[pos]; !ok {
+			delete(e, pos)
+		}
+	}
+}
+
 type fileToEditSet map[string]editSet
 
 func apply(file string, edits editSet) {
@@ -97,11 +123,11 @@ func (e *editor) rewrite(f *ast.Expr) {
 	}
 
 	pos := e.file.Position(call.Lparen)
-	if _, ok := e.edits[pos]; !ok {
+	if !e.edits.has(pos) {
 		return
 	}
 	*f = call.Args[0]
-	delete(e.edits, pos)
+	e.edits.remove(pos)
 }
 
 var (
@@ -161,21 +187,12 @@ func rub(buf []byte) []byte {
 	return res.Bytes()
 }
 
-var (
-	flagAll        = flag.Bool("unconvert.all", false, "type check all GOOS and GOARCH combinations")
-	flagApply      = flag.Bool("unconvert.apply", false, "apply edits to source files")
-	flagCPUProfile = flag.String("unconvert.cpuprofile", "", "write CPU profile to file")
-	// TODO(mdempsky): Better description and maybe flag name.
-	flagSafe = flag.Bool("unconvert.safe", false, "be more conservative (experimental)")
-	flagV    = flag.Bool("unconvert.v", false, "verbose output")
-)
-
 func usage() {
 	fmt.Fprintf(os.Stderr, "usage: unconvert [flags] [package ...]\n")
 	flag.PrintDefaults()
 }
 
-func nomain() {
+func main() {
 	flag.Usage = usage
 	flag.Parse()
 
@@ -188,18 +205,29 @@ func nomain() {
 		defer pprof.StopCPUProfile()
 	}
 
-	importPaths := gotool.ImportPaths(flag.Args())
-	if len(importPaths) == 0 {
-		return
-	}
+	patterns := flag.Args() // 0 or more import path patterns.
+
+	var configs [][]string
+	if *flagConfigs != "" {
+		if os.Getenv("UNCONVERT_CONFIGS_EXPERIMENT") != "1" {
+			fmt.Println("WARNING: -configs is experimental and subject to change without notice.")
+			fmt.Println("Please comment at https://github.com/mdempsky/unconvert/issues/26")
+			fmt.Println("if you'd like to rely on this interface.")
+			fmt.Println("(Set UNCONVERT_CONFIGS_EXPERIMENT=1 to silence this warning.)")
+			fmt.Println()
+		}
 
-	var m fileToEditSet
-	if *flagAll {
-		m = mergeEdits(importPaths)
+		if err := json.Unmarshal([]byte(*flagConfigs), &configs); err != nil {
+			log.Fatal(err)
+		}
+	} else if *flagAll {
+		configs = allConfigs()
 	} else {
-		m = computeEdits(importPaths, build.Default.GOOS, build.Default.GOARCH, build.Default.CgoEnabled)
+		configs = [][]string{nil}
 	}
 
+	m := mergeEdits(patterns, configs)
+
 	if *flagApply {
 		var wg sync.WaitGroup
 		for f, e := range m {
@@ -226,69 +254,36 @@ func nomain() {
 	}
 }
 
-func Run(prog *loader.Program) []token.Position {
-	m := computeEditsFromProg(prog)
-	var conversions []token.Position
-	for _, positions := range m {
-		for pos := range positions {
-			conversions = append(conversions, pos)
-		}
+func allConfigs() [][]string {
+	out, err := exec.Command("go", "tool", "dist", "list", "-json").Output()
+	if err != nil {
+		log.Fatal(err)
+	}
+
+	var platforms []struct {
+		GOOS, GOARCH string
+	}
+	err = json.Unmarshal(out, &platforms)
+	if err != nil {
+		log.Fatal(err)
 	}
-	return conversions
-}
 
-var plats = [...]struct {
-	goos, goarch string
-}{
-	// TODO(mdempsky): buildall.bash also builds linux-386-387 and linux-arm-arm5.
-	{"android", "386"},
-	{"android", "amd64"},
-	{"android", "arm"},
-	{"android", "arm64"},
-	{"darwin", "386"},
-	{"darwin", "amd64"},
-	{"darwin", "arm"},
-	{"darwin", "arm64"},
-	{"dragonfly", "amd64"},
-	{"freebsd", "386"},
-	{"freebsd", "amd64"},
-	{"freebsd", "arm"},
-	{"linux", "386"},
-	{"linux", "amd64"},
-	{"linux", "arm"},
-	{"linux", "arm64"},
-	{"linux", "mips64"},
-	{"linux", "mips64le"},
-	{"linux", "ppc64"},
-	{"linux", "ppc64le"},
-	{"linux", "s390x"},
-	{"nacl", "386"},
-	{"nacl", "amd64p32"},
-	{"nacl", "arm"},
-	{"netbsd", "386"},
-	{"netbsd", "amd64"},
-	{"netbsd", "arm"},
-	{"openbsd", "386"},
-	{"openbsd", "amd64"},
-	{"openbsd", "arm"},
-	{"plan9", "386"},
-	{"plan9", "amd64"},
-	{"plan9", "arm"},
-	{"solaris", "amd64"},
-	{"windows", "386"},
-	{"windows", "amd64"},
+	var res [][]string
+	for _, platform := range platforms {
+		res = append(res, []string{
+			"GOOS=" + platform.GOOS,
+			"GOARCH=" + platform.GOARCH,
+		})
+	}
+	return res
 }
 
-func mergeEdits(importPaths []string) fileToEditSet {
+func mergeEdits(patterns []string, configs [][]string) fileToEditSet {
 	m := make(fileToEditSet)
-	for _, plat := range plats {
-		for f, e := range computeEdits(importPaths, plat.goos, plat.goarch, false) {
+	for _, config := range configs {
+		for f, e := range computeEdits(patterns, config) {
 			if e0, ok := m[f]; ok {
-				for k := range e0 {
-					if _, ok := e[k]; !ok {
-						delete(e0, k)
-					}
-				}
+				e0.intersect(e)
 			} else {
 				m[f] = e
 			}
@@ -297,48 +292,48 @@ func mergeEdits(importPaths []string) fileToEditSet {
 	return m
 }
 
-type noImporter struct{}
-
-func (noImporter) Import(path string) (*types.Package, error) {
-	panic("golang.org/x/tools/go/loader said this wouldn't be called")
-}
-
-func computeEdits(importPaths []string, os, arch string, cgoEnabled bool) fileToEditSet {
-	ctxt := build.Default
-	ctxt.GOOS = os
-	ctxt.GOARCH = arch
-	ctxt.CgoEnabled = cgoEnabled
-
-	var conf loader.Config
-	conf.Build = &ctxt
-	conf.TypeChecker.Importer = noImporter{}
-	for _, importPath := range importPaths {
-		conf.Import(importPath)
+func computeEdits(patterns []string, config []string) fileToEditSet {
+	// TODO(mdempsky): Move into config?
+	var buildFlags []string
+	if *flagTags != "" {
+		buildFlags = []string{"-tags", *flagTags}
 	}
-	prog, err := conf.Load()
+
+	pkgs, err := packages.Load(&packages.Config{
+		Mode:       packages.NeedSyntax | packages.NeedTypes | packages.NeedTypesInfo,
+		Env:        append(os.Environ(), config...),
+		BuildFlags: buildFlags,
+		Tests:      *flagTests,
+	}, patterns...)
 	if err != nil {
 		log.Fatal(err)
 	}
+	packages.PrintErrors(pkgs)
 
-	return computeEditsFromProg(prog)
-}
-
-func computeEditsFromProg(prog *loader.Program) fileToEditSet {
 	type res struct {
 		file  string
 		edits editSet
 	}
+
 	ch := make(chan res)
 	var wg sync.WaitGroup
-	for _, pkg := range prog.InitialPackages() {
-		for _, file := range pkg.Files {
+	for _, pkg := range pkgs {
+		for _, file := range pkg.Syntax {
 			pkg, file := pkg, file
+			tokenFile := pkg.Fset.File(file.Package)
+			filename := tokenFile.Position(file.Package).Filename
+
+			// Hack to recognize _cgo_gotypes.go.
+			if strings.HasSuffix(filename, "-d") || strings.HasSuffix(filename, "/_cgo_gotypes.go") {
+				continue
+			}
+
 			wg.Add(1)
 			go func() {
 				defer wg.Done()
-				v := visitor{pkg: pkg, file: prog.Fset.File(file.Package), edits: make(editSet)}
+				v := visitor{info: pkg.TypesInfo, file: tokenFile, edits: make(editSet)}
 				ast.Walk(&v, file)
-				ch <- res{v.file.Name(), v.edits}
+				ch <- res{filename, v.edits}
 			}()
 		}
 	}
@@ -360,7 +355,7 @@ type step struct {
 }
 
 type visitor struct {
-	pkg   *loader.PackageInfo
+	info  *types.Info
 	file  *token.File
 	edits editSet
 	path  []step
@@ -390,7 +385,7 @@ func (v *visitor) unconvert(call *ast.CallExpr) {
 	if len(call.Args) != 1 || call.Ellipsis != token.NoPos {
 		return
 	}
-	ft, ok := v.pkg.Types[call.Fun]
+	ft, ok := v.info.Types[call.Fun]
 	if !ok {
 		fmt.Println("Missing type for function")
 		return
@@ -399,7 +394,7 @@ func (v *visitor) unconvert(call *ast.CallExpr) {
 		// Function call; not a conversion.
 		return
 	}
-	at, ok := v.pkg.Types[call.Args[0]]
+	at, ok := v.info.Types[call.Args[0]]
 	if !ok {
 		fmt.Println("Missing type for argument")
 		return
@@ -408,7 +403,13 @@ func (v *visitor) unconvert(call *ast.CallExpr) {
 		// A real conversion.
 		return
 	}
-	if isUntypedValue(call.Args[0], &v.pkg.Info) {
+	if !*flagFastMath && isFloatingPoint(ft.Type) {
+		// As of Go 1.9, explicit floating-point type
+		// conversions are always significant because they
+		// force rounding and prevent operation fusing.
+		return
+	}
+	if isUntypedValue(call.Args[0], v.info) {
 		// Workaround golang.org/issue/13061.
 		return
 	}
@@ -417,31 +418,15 @@ func (v *visitor) unconvert(call *ast.CallExpr) {
 		fmt.Println("Skipped a possible type conversion because of -safe at", v.file.Position(call.Pos()))
 		return
 	}
-	if v.isCgoCheckPointerContext() {
-		// cmd/cgo generates explicit type conversions that
-		// are often redundant when introducing
-		// _cgoCheckPointer calls (issue #16).  Users can't do
-		// anything about these, so skip over them.
-		return
-	}
 
-	v.edits[v.file.Position(call.Lparen)] = struct{}{}
+	v.edits.add(v.file.Position(call.Lparen))
 }
 
-func (v *visitor) isCgoCheckPointerContext() bool {
-	ctxt := &v.path[len(v.path)-2]
-	if ctxt.i != 1 {
-		return false
-	}
-	call, ok := ctxt.n.(*ast.CallExpr)
-	if !ok {
-		return false
-	}
-	ident, ok := call.Fun.(*ast.Ident)
-	if !ok {
-		return false
-	}
-	return ident.Name == "_cgoCheckPointer"
+// isFloatingPointer reports whether t's underlying type is a floating
+// point type.
+func isFloatingPoint(t types.Type) bool {
+	ut, ok := t.Underlying().(*types.Basic)
+	return ok && ut.Info()&(types.IsFloat|types.IsComplex) != 0
 }
 
 // isSafeContext reports whether the current context requires
@@ -463,7 +448,7 @@ func (v *visitor) isSafeContext(t types.Type) bool {
 		}
 		// We're a conversion in the pos'th element of n.Rhs.
 		// Check that the corresponding element of n.Lhs is of type t.
-		lt, ok := v.pkg.Types[n.Lhs[pos]]
+		lt, ok := v.info.Types[n.Lhs[pos]]
 		if !ok {
 			fmt.Println("Missing type for LHS expression")
 			return false
@@ -485,7 +470,7 @@ func (v *visitor) isSafeContext(t types.Type) bool {
 		} else {
 			other = n.X
 		}
-		ot, ok := v.pkg.Types[other]
+		ot, ok := v.info.Types[other]
 		if !ok {
 			fmt.Println("Missing type for other binop subexpr")
 			return false
@@ -497,7 +482,7 @@ func (v *visitor) isSafeContext(t types.Type) bool {
 			// Type conversion in the function subexpr is okay.
 			return true
 		}
-		ft, ok := v.pkg.Types[n.Fun]
+		ft, ok := v.info.Types[n.Fun]
 		if !ok {
 			fmt.Println("Missing type for function expression")
 			return false
@@ -550,7 +535,7 @@ func (v *visitor) isSafeContext(t types.Type) bool {
 		if typeExpr == nil {
 			fmt.Println(ctxt)
 		}
-		pt, ok := v.pkg.Types[typeExpr]
+		pt, ok := v.info.Types[typeExpr]
 		if !ok {
 			fmt.Println("Missing type for return parameter at", v.file.Position(n.Pos()))
 			return false
diff --git a/vendor/github.com/google/pprof/profile/encode.go b/vendor/github.com/google/pprof/profile/encode.go
index ab7f03ae2677b4a5673def3d47e349821629f606..860bb304c349adc32a474a3ee3fb3da568676b27 100644
--- a/vendor/github.com/google/pprof/profile/encode.go
+++ b/vendor/github.com/google/pprof/profile/encode.go
@@ -17,6 +17,7 @@ package profile
 import (
 	"errors"
 	"sort"
+	"strings"
 )
 
 func (p *Profile) decoder() []decoder {
@@ -183,12 +184,13 @@ var profileDecoder = []decoder{
 	// repeated Location location = 4
 	func(b *buffer, m message) error {
 		x := new(Location)
-		x.Line = make([]Line, 0, 8) // Pre-allocate Line buffer
+		x.Line = b.tmpLines[:0] // Use shared space temporarily
 		pp := m.(*Profile)
 		pp.Location = append(pp.Location, x)
 		err := decodeMessage(b, x)
-		var tmp []Line
-		x.Line = append(tmp, x.Line...) // Shrink to allocated size
+		b.tmpLines = x.Line[:0]
+		// Copy to shrink size and detach from shared space.
+		x.Line = append([]Line(nil), x.Line...)
 		return err
 	},
 	// repeated Function function = 5
@@ -252,6 +254,14 @@ func (p *Profile) postDecode() error {
 		} else {
 			mappings[m.ID] = m
 		}
+
+		// If this a main linux kernel mapping with a relocation symbol suffix
+		// ("[kernel.kallsyms]_text"), extract said suffix.
+		// It is fairly hacky to handle at this level, but the alternatives appear even worse.
+		const prefix = "[kernel.kallsyms]"
+		if strings.HasPrefix(m.File, prefix) {
+			m.KernelRelocationSymbol = m.File[len(prefix):]
+		}
 	}
 
 	functions := make(map[uint64]*Function, len(p.Function))
@@ -298,41 +308,52 @@ func (p *Profile) postDecode() error {
 		st.Unit, err = getString(p.stringTable, &st.unitX, err)
 	}
 
+	// Pre-allocate space for all locations.
+	numLocations := 0
+	for _, s := range p.Sample {
+		numLocations += len(s.locationIDX)
+	}
+	locBuffer := make([]*Location, numLocations)
+
 	for _, s := range p.Sample {
-		labels := make(map[string][]string, len(s.labelX))
-		numLabels := make(map[string][]int64, len(s.labelX))
-		numUnits := make(map[string][]string, len(s.labelX))
-		for _, l := range s.labelX {
-			var key, value string
-			key, err = getString(p.stringTable, &l.keyX, err)
-			if l.strX != 0 {
-				value, err = getString(p.stringTable, &l.strX, err)
-				labels[key] = append(labels[key], value)
-			} else if l.numX != 0 || l.unitX != 0 {
-				numValues := numLabels[key]
-				units := numUnits[key]
-				if l.unitX != 0 {
-					var unit string
-					unit, err = getString(p.stringTable, &l.unitX, err)
-					units = padStringArray(units, len(numValues))
-					numUnits[key] = append(units, unit)
+		if len(s.labelX) > 0 {
+			labels := make(map[string][]string, len(s.labelX))
+			numLabels := make(map[string][]int64, len(s.labelX))
+			numUnits := make(map[string][]string, len(s.labelX))
+			for _, l := range s.labelX {
+				var key, value string
+				key, err = getString(p.stringTable, &l.keyX, err)
+				if l.strX != 0 {
+					value, err = getString(p.stringTable, &l.strX, err)
+					labels[key] = append(labels[key], value)
+				} else if l.numX != 0 || l.unitX != 0 {
+					numValues := numLabels[key]
+					units := numUnits[key]
+					if l.unitX != 0 {
+						var unit string
+						unit, err = getString(p.stringTable, &l.unitX, err)
+						units = padStringArray(units, len(numValues))
+						numUnits[key] = append(units, unit)
+					}
+					numLabels[key] = append(numLabels[key], l.numX)
 				}
-				numLabels[key] = append(numLabels[key], l.numX)
 			}
-		}
-		if len(labels) > 0 {
-			s.Label = labels
-		}
-		if len(numLabels) > 0 {
-			s.NumLabel = numLabels
-			for key, units := range numUnits {
-				if len(units) > 0 {
-					numUnits[key] = padStringArray(units, len(numLabels[key]))
+			if len(labels) > 0 {
+				s.Label = labels
+			}
+			if len(numLabels) > 0 {
+				s.NumLabel = numLabels
+				for key, units := range numUnits {
+					if len(units) > 0 {
+						numUnits[key] = padStringArray(units, len(numLabels[key]))
+					}
 				}
+				s.NumUnit = numUnits
 			}
-			s.NumUnit = numUnits
 		}
-		s.Location = make([]*Location, len(s.locationIDX))
+
+		s.Location = locBuffer[:len(s.locationIDX)]
+		locBuffer = locBuffer[len(s.locationIDX):]
 		for i, lid := range s.locationIDX {
 			if lid < uint64(len(locationIds)) {
 				s.Location[i] = locationIds[lid]
@@ -509,6 +530,7 @@ func (p *Line) decoder() []decoder {
 func (p *Line) encode(b *buffer) {
 	encodeUint64Opt(b, 1, p.functionIDX)
 	encodeInt64Opt(b, 2, p.Line)
+	encodeInt64Opt(b, 3, p.Column)
 }
 
 var lineDecoder = []decoder{
@@ -517,6 +539,8 @@ var lineDecoder = []decoder{
 	func(b *buffer, m message) error { return decodeUint64(b, &m.(*Line).functionIDX) },
 	// optional int64 line = 2
 	func(b *buffer, m message) error { return decodeInt64(b, &m.(*Line).Line) },
+	// optional int64 column = 3
+	func(b *buffer, m message) error { return decodeInt64(b, &m.(*Line).Column) },
 }
 
 func (p *Function) decoder() []decoder {
diff --git a/vendor/github.com/google/pprof/profile/filter.go b/vendor/github.com/google/pprof/profile/filter.go
index ea8e66c68d25546dd2f5cd1a882c5261f526a3c4..c794b939067cfd119e0427a5ea7af5ff5ba2cdb7 100644
--- a/vendor/github.com/google/pprof/profile/filter.go
+++ b/vendor/github.com/google/pprof/profile/filter.go
@@ -22,6 +22,10 @@ import "regexp"
 // samples where at least one frame matches focus but none match ignore.
 // Returns true is the corresponding regexp matched at least one sample.
 func (p *Profile) FilterSamplesByName(focus, ignore, hide, show *regexp.Regexp) (fm, im, hm, hnm bool) {
+	if focus == nil && ignore == nil && hide == nil && show == nil {
+		fm = true // Missing focus implies a match
+		return
+	}
 	focusOrIgnore := make(map[uint64]bool)
 	hidden := make(map[uint64]bool)
 	for _, l := range p.Location {
diff --git a/vendor/github.com/google/pprof/profile/legacy_java_profile.go b/vendor/github.com/google/pprof/profile/legacy_java_profile.go
index 91f45e53c6c269d30a9426ca7dc66c3f57060d95..4580bab18396f7bc64f7249ea8493d9e457bcbc3 100644
--- a/vendor/github.com/google/pprof/profile/legacy_java_profile.go
+++ b/vendor/github.com/google/pprof/profile/legacy_java_profile.go
@@ -56,7 +56,7 @@ func javaCPUProfile(b []byte, period int64, parse func(b []byte) (uint64, []byte
 	}
 
 	// Strip out addresses for better merge.
-	if err = p.Aggregate(true, true, true, true, false); err != nil {
+	if err = p.Aggregate(true, true, true, true, false, false); err != nil {
 		return nil, err
 	}
 
@@ -99,7 +99,7 @@ func parseJavaProfile(b []byte) (*Profile, error) {
 	}
 
 	// Strip out addresses for better merge.
-	if err = p.Aggregate(true, true, true, true, false); err != nil {
+	if err = p.Aggregate(true, true, true, true, false, false); err != nil {
 		return nil, err
 	}
 
diff --git a/vendor/github.com/google/pprof/profile/legacy_profile.go b/vendor/github.com/google/pprof/profile/legacy_profile.go
index 0c8f3bb5b71f45f6c9edc402c4a9fada3690b9f8..8d07fd6c27c2af3052c3fd36562307560a058623 100644
--- a/vendor/github.com/google/pprof/profile/legacy_profile.go
+++ b/vendor/github.com/google/pprof/profile/legacy_profile.go
@@ -295,11 +295,12 @@ func get64b(b []byte) (uint64, []byte) {
 //
 // The general format for profilez samples is a sequence of words in
 // binary format. The first words are a header with the following data:
-//   1st word -- 0
-//   2nd word -- 3
-//   3rd word -- 0 if a c++ application, 1 if a java application.
-//   4th word -- Sampling period (in microseconds).
-//   5th word -- Padding.
+//
+//	1st word -- 0
+//	2nd word -- 3
+//	3rd word -- 0 if a c++ application, 1 if a java application.
+//	4th word -- Sampling period (in microseconds).
+//	5th word -- Padding.
 func parseCPU(b []byte) (*Profile, error) {
 	var parse func([]byte) (uint64, []byte)
 	var n1, n2, n3, n4, n5 uint64
@@ -403,15 +404,18 @@ func cleanupDuplicateLocations(p *Profile) {
 //
 // profilez samples are a repeated sequence of stack frames of the
 // form:
-//    1st word -- The number of times this stack was encountered.
-//    2nd word -- The size of the stack (StackSize).
-//    3rd word -- The first address on the stack.
-//    ...
-//    StackSize + 2 -- The last address on the stack
+//
+//	1st word -- The number of times this stack was encountered.
+//	2nd word -- The size of the stack (StackSize).
+//	3rd word -- The first address on the stack.
+//	...
+//	StackSize + 2 -- The last address on the stack
+//
 // The last stack trace is of the form:
-//   1st word -- 0
-//   2nd word -- 1
-//   3rd word -- 0
+//
+//	1st word -- 0
+//	2nd word -- 1
+//	3rd word -- 0
 //
 // Addresses from stack traces may point to the next instruction after
 // each call. Optionally adjust by -1 to land somewhere on the actual
@@ -861,7 +865,6 @@ func parseThread(b []byte) (*Profile, error) {
 	// Recognize each thread and populate profile samples.
 	for !isMemoryMapSentinel(line) {
 		if strings.HasPrefix(line, "---- no stack trace for") {
-			line = ""
 			break
 		}
 		if t := threadStartRE.FindStringSubmatch(line); len(t) != 4 {
diff --git a/vendor/github.com/google/pprof/profile/merge.go b/vendor/github.com/google/pprof/profile/merge.go
index 9978e7330e6e6fe670b78561c2eb45d166b217bb..eee0132e74064a091bb1fbc8ed5c137446d566bf 100644
--- a/vendor/github.com/google/pprof/profile/merge.go
+++ b/vendor/github.com/google/pprof/profile/merge.go
@@ -15,6 +15,7 @@
 package profile
 
 import (
+	"encoding/binary"
 	"fmt"
 	"sort"
 	"strconv"
@@ -58,7 +59,7 @@ func Merge(srcs []*Profile) (*Profile, error) {
 
 	for _, src := range srcs {
 		// Clear the profile-specific hash tables
-		pm.locationsByID = make(map[uint64]*Location, len(src.Location))
+		pm.locationsByID = makeLocationIDMap(len(src.Location))
 		pm.functionsByID = make(map[uint64]*Function, len(src.Function))
 		pm.mappingsByID = make(map[uint64]mapInfo, len(src.Mapping))
 
@@ -136,7 +137,7 @@ type profileMerger struct {
 	p *Profile
 
 	// Memoization tables within a profile.
-	locationsByID map[uint64]*Location
+	locationsByID locationIDMap
 	functionsByID map[uint64]*Function
 	mappingsByID  map[uint64]mapInfo
 
@@ -153,6 +154,16 @@ type mapInfo struct {
 }
 
 func (pm *profileMerger) mapSample(src *Sample) *Sample {
+	// Check memoization table
+	k := pm.sampleKey(src)
+	if ss, ok := pm.samples[k]; ok {
+		for i, v := range src.Value {
+			ss.Value[i] += v
+		}
+		return ss
+	}
+
+	// Make new sample.
 	s := &Sample{
 		Location: make([]*Location, len(src.Location)),
 		Value:    make([]int64, len(src.Value)),
@@ -177,52 +188,98 @@ func (pm *profileMerger) mapSample(src *Sample) *Sample {
 		s.NumLabel[k] = vv
 		s.NumUnit[k] = uu
 	}
-	// Check memoization table. Must be done on the remapped location to
-	// account for the remapped mapping. Add current values to the
-	// existing sample.
-	k := s.key()
-	if ss, ok := pm.samples[k]; ok {
-		for i, v := range src.Value {
-			ss.Value[i] += v
-		}
-		return ss
-	}
 	copy(s.Value, src.Value)
 	pm.samples[k] = s
 	pm.p.Sample = append(pm.p.Sample, s)
 	return s
 }
 
-// key generates sampleKey to be used as a key for maps.
-func (sample *Sample) key() sampleKey {
-	ids := make([]string, len(sample.Location))
-	for i, l := range sample.Location {
-		ids[i] = strconv.FormatUint(l.ID, 16)
+func (pm *profileMerger) sampleKey(sample *Sample) sampleKey {
+	// Accumulate contents into a string.
+	var buf strings.Builder
+	buf.Grow(64) // Heuristic to avoid extra allocs
+
+	// encode a number
+	putNumber := func(v uint64) {
+		var num [binary.MaxVarintLen64]byte
+		n := binary.PutUvarint(num[:], v)
+		buf.Write(num[:n])
+	}
+
+	// encode a string prefixed with its length.
+	putDelimitedString := func(s string) {
+		putNumber(uint64(len(s)))
+		buf.WriteString(s)
+	}
+
+	for _, l := range sample.Location {
+		// Get the location in the merged profile, which may have a different ID.
+		if loc := pm.mapLocation(l); loc != nil {
+			putNumber(loc.ID)
+		}
 	}
+	putNumber(0) // Delimiter
 
-	labels := make([]string, 0, len(sample.Label))
-	for k, v := range sample.Label {
-		labels = append(labels, fmt.Sprintf("%q%q", k, v))
+	for _, l := range sortedKeys1(sample.Label) {
+		putDelimitedString(l)
+		values := sample.Label[l]
+		putNumber(uint64(len(values)))
+		for _, v := range values {
+			putDelimitedString(v)
+		}
 	}
-	sort.Strings(labels)
 
-	numlabels := make([]string, 0, len(sample.NumLabel))
-	for k, v := range sample.NumLabel {
-		numlabels = append(numlabels, fmt.Sprintf("%q%x%x", k, v, sample.NumUnit[k]))
+	for _, l := range sortedKeys2(sample.NumLabel) {
+		putDelimitedString(l)
+		values := sample.NumLabel[l]
+		putNumber(uint64(len(values)))
+		for _, v := range values {
+			putNumber(uint64(v))
+		}
+		units := sample.NumUnit[l]
+		putNumber(uint64(len(units)))
+		for _, v := range units {
+			putDelimitedString(v)
+		}
 	}
-	sort.Strings(numlabels)
 
-	return sampleKey{
-		strings.Join(ids, "|"),
-		strings.Join(labels, ""),
-		strings.Join(numlabels, ""),
+	return sampleKey(buf.String())
+}
+
+type sampleKey string
+
+// sortedKeys1 returns the sorted keys found in a string->[]string map.
+//
+// Note: this is currently non-generic since github pprof runs golint,
+// which does not support generics. When that issue is fixed, it can
+// be merged with sortedKeys2 and made into a generic function.
+func sortedKeys1(m map[string][]string) []string {
+	if len(m) == 0 {
+		return nil
 	}
+	keys := make([]string, 0, len(m))
+	for k := range m {
+		keys = append(keys, k)
+	}
+	sort.Strings(keys)
+	return keys
 }
 
-type sampleKey struct {
-	locations string
-	labels    string
-	numlabels string
+// sortedKeys2 returns the sorted keys found in a string->[]int64 map.
+//
+// Note: this is currently non-generic since github pprof runs golint,
+// which does not support generics. When that issue is fixed, it can
+// be merged with sortedKeys1 and made into a generic function.
+func sortedKeys2(m map[string][]int64) []string {
+	if len(m) == 0 {
+		return nil
+	}
+	keys := make([]string, 0, len(m))
+	for k := range m {
+		keys = append(keys, k)
+	}
+	sort.Strings(keys)
+	return keys
 }
 
 func (pm *profileMerger) mapLocation(src *Location) *Location {
@@ -230,7 +287,7 @@ func (pm *profileMerger) mapLocation(src *Location) *Location {
 		return nil
 	}
 
-	if l, ok := pm.locationsByID[src.ID]; ok {
+	if l := pm.locationsByID.get(src.ID); l != nil {
 		return l
 	}
 
@@ -249,10 +306,10 @@ func (pm *profileMerger) mapLocation(src *Location) *Location {
 	// account for the remapped mapping ID.
 	k := l.key()
 	if ll, ok := pm.locations[k]; ok {
-		pm.locationsByID[src.ID] = ll
+		pm.locationsByID.set(src.ID, ll)
 		return ll
 	}
-	pm.locationsByID[src.ID] = l
+	pm.locationsByID.set(src.ID, l)
 	pm.locations[k] = l
 	pm.p.Location = append(pm.p.Location, l)
 	return l
@@ -269,12 +326,13 @@ func (l *Location) key() locationKey {
 		key.addr -= l.Mapping.Start
 		key.mappingID = l.Mapping.ID
 	}
-	lines := make([]string, len(l.Line)*2)
+	lines := make([]string, len(l.Line)*3)
 	for i, line := range l.Line {
 		if line.Function != nil {
 			lines[i*2] = strconv.FormatUint(line.Function.ID, 16)
 		}
 		lines[i*2+1] = strconv.FormatInt(line.Line, 16)
+		lines[i*2+2] = strconv.FormatInt(line.Column, 16)
 	}
 	key.lines = strings.Join(lines, "|")
 	return key
@@ -303,16 +361,17 @@ func (pm *profileMerger) mapMapping(src *Mapping) mapInfo {
 		return mi
 	}
 	m := &Mapping{
-		ID:              uint64(len(pm.p.Mapping) + 1),
-		Start:           src.Start,
-		Limit:           src.Limit,
-		Offset:          src.Offset,
-		File:            src.File,
-		BuildID:         src.BuildID,
-		HasFunctions:    src.HasFunctions,
-		HasFilenames:    src.HasFilenames,
-		HasLineNumbers:  src.HasLineNumbers,
-		HasInlineFrames: src.HasInlineFrames,
+		ID:                     uint64(len(pm.p.Mapping) + 1),
+		Start:                  src.Start,
+		Limit:                  src.Limit,
+		Offset:                 src.Offset,
+		File:                   src.File,
+		KernelRelocationSymbol: src.KernelRelocationSymbol,
+		BuildID:                src.BuildID,
+		HasFunctions:           src.HasFunctions,
+		HasFilenames:           src.HasFilenames,
+		HasLineNumbers:         src.HasLineNumbers,
+		HasInlineFrames:        src.HasInlineFrames,
 	}
 	pm.p.Mapping = append(pm.p.Mapping, m)
 
@@ -360,6 +419,7 @@ func (pm *profileMerger) mapLine(src Line) Line {
 	ln := Line{
 		Function: pm.mapFunction(src.Function),
 		Line:     src.Line,
+		Column:   src.Column,
 	}
 	return ln
 }
@@ -479,3 +539,131 @@ func (p *Profile) compatible(pb *Profile) error {
 func equalValueType(st1, st2 *ValueType) bool {
 	return st1.Type == st2.Type && st1.Unit == st2.Unit
 }
+
+// locationIDMap is like a map[uint64]*Location, but provides efficiency for
+// ids that are densely numbered, which is often the case.
+type locationIDMap struct {
+	dense  []*Location          // indexed by id for id < len(dense)
+	sparse map[uint64]*Location // indexed by id for id >= len(dense)
+}
+
+func makeLocationIDMap(n int) locationIDMap {
+	return locationIDMap{
+		dense:  make([]*Location, n),
+		sparse: map[uint64]*Location{},
+	}
+}
+
+func (lm locationIDMap) get(id uint64) *Location {
+	if id < uint64(len(lm.dense)) {
+		return lm.dense[int(id)]
+	}
+	return lm.sparse[id]
+}
+
+func (lm locationIDMap) set(id uint64, loc *Location) {
+	if id < uint64(len(lm.dense)) {
+		lm.dense[id] = loc
+		return
+	}
+	lm.sparse[id] = loc
+}
+
+// CompatibilizeSampleTypes makes profiles compatible to be compared/merged. It
+// keeps sample types that appear in all profiles only and drops/reorders the
+// sample types as necessary.
+//
+// In the case of sample types order is not the same for given profiles the
+// order is derived from the first profile.
+//
+// Profiles are modified in-place.
+//
+// It returns an error if the sample type's intersection is empty.
+func CompatibilizeSampleTypes(ps []*Profile) error {
+	sTypes := commonSampleTypes(ps)
+	if len(sTypes) == 0 {
+		return fmt.Errorf("profiles have empty common sample type list")
+	}
+	for _, p := range ps {
+		if err := compatibilizeSampleTypes(p, sTypes); err != nil {
+			return err
+		}
+	}
+	return nil
+}
+
+// commonSampleTypes returns sample types that appear in all profiles in the
+// order how they ordered in the first profile.
+func commonSampleTypes(ps []*Profile) []string {
+	if len(ps) == 0 {
+		return nil
+	}
+	sTypes := map[string]int{}
+	for _, p := range ps {
+		for _, st := range p.SampleType {
+			sTypes[st.Type]++
+		}
+	}
+	var res []string
+	for _, st := range ps[0].SampleType {
+		if sTypes[st.Type] == len(ps) {
+			res = append(res, st.Type)
+		}
+	}
+	return res
+}
+
+// compatibilizeSampleTypes drops sample types that are not present in sTypes
+// list and reorder them if needed.
+//
+// It sets DefaultSampleType to sType[0] if it is not in sType list.
+//
+// It assumes that all sample types from the sTypes list are present in the
+// given profile otherwise it returns an error.
+func compatibilizeSampleTypes(p *Profile, sTypes []string) error {
+	if len(sTypes) == 0 {
+		return fmt.Errorf("sample type list is empty")
+	}
+	defaultSampleType := sTypes[0]
+	reMap, needToModify := make([]int, len(sTypes)), false
+	for i, st := range sTypes {
+		if st == p.DefaultSampleType {
+			defaultSampleType = p.DefaultSampleType
+		}
+		idx := searchValueType(p.SampleType, st)
+		if idx < 0 {
+			return fmt.Errorf("%q sample type is not found in profile", st)
+		}
+		reMap[i] = idx
+		if idx != i {
+			needToModify = true
+		}
+	}
+	if !needToModify && len(sTypes) == len(p.SampleType) {
+		return nil
+	}
+	p.DefaultSampleType = defaultSampleType
+	oldSampleTypes := p.SampleType
+	p.SampleType = make([]*ValueType, len(sTypes))
+	for i, idx := range reMap {
+		p.SampleType[i] = oldSampleTypes[idx]
+	}
+	values := make([]int64, len(sTypes))
+	for _, s := range p.Sample {
+		for i, idx := range reMap {
+			values[i] = s.Value[idx]
+		}
+		s.Value = s.Value[:len(values)]
+		copy(s.Value, values)
+	}
+	return nil
+}
+
+func searchValueType(vts []*ValueType, s string) int {
+	for i, vt := range vts {
+		if vt.Type == s {
+			return i
+		}
+	}
+	return -1
+}
diff --git a/vendor/github.com/google/pprof/profile/profile.go b/vendor/github.com/google/pprof/profile/profile.go
index 2590c8ddb42e25ede9dd54e54a80f0286cc7105e..62df80a55636296b53df24fdee6413a77eebcd0b 100644
--- a/vendor/github.com/google/pprof/profile/profile.go
+++ b/vendor/github.com/google/pprof/profile/profile.go
@@ -21,7 +21,6 @@ import (
 	"compress/gzip"
 	"fmt"
 	"io"
-	"io/ioutil"
 	"math"
 	"path/filepath"
 	"regexp"
@@ -73,9 +72,23 @@ type ValueType struct {
 type Sample struct {
 	Location []*Location
 	Value    []int64
-	Label    map[string][]string
+	// Label is a per-label-key map to values for string labels.
+	//
+	// In general, having multiple values for the given label key is strongly
+	// discouraged - see docs for the sample label field in profile.proto.  The
+	// main reason this unlikely state is tracked here is to make the
+	// decoding->encoding roundtrip not lossy. But we expect that the value
+	// slices present in this map are always of length 1.
+	Label map[string][]string
+	// NumLabel is a per-label-key map to values for numeric labels. See a note
+	// above on handling multiple values for a label.
 	NumLabel map[string][]int64
-	NumUnit  map[string][]string
+	// NumUnit is a per-label-key map to the unit names of corresponding numeric
+	// label values. The unit info may be missing even if the label is in
+	// NumLabel, see the docs in profile.proto for details. When the value is
+	// slice is present and not nil, its length must be equal to the length of
+	// the corresponding value slice in NumLabel.
+	NumUnit map[string][]string
 
 	locationIDX []uint64
 	labelX      []label
@@ -106,6 +119,15 @@ type Mapping struct {
 
 	fileX    int64
 	buildIDX int64
+
+	// Name of the kernel relocation symbol ("_text" or "_stext"), extracted from File.
+	// For linux kernel mappings generated by some tools, correct symbolization depends
+	// on knowing which of the two possible relocation symbols was used for `Start`.
+	// This is given to us as a suffix in `File` (e.g. "[kernel.kallsyms]_stext").
+	//
+	// Note, this public field is not persisted in the proto. For the purposes of
+	// copying / merging / hashing profiles, it is considered subsumed by `File`.
+	KernelRelocationSymbol string
 }
 
 // Location corresponds to Profile.Location
@@ -123,6 +145,7 @@ type Location struct {
 type Line struct {
 	Function *Function
 	Line     int64
+	Column   int64
 
 	functionIDX uint64
 }
@@ -144,7 +167,7 @@ type Function struct {
 // may be a gzip-compressed encoded protobuf or one of many legacy
 // profile formats which may be unsupported in the future.
 func Parse(r io.Reader) (*Profile, error) {
-	data, err := ioutil.ReadAll(r)
+	data, err := io.ReadAll(r)
 	if err != nil {
 		return nil, err
 	}
@@ -159,7 +182,7 @@ func ParseData(data []byte) (*Profile, error) {
 	if len(data) >= 2 && data[0] == 0x1f && data[1] == 0x8b {
 		gz, err := gzip.NewReader(bytes.NewBuffer(data))
 		if err == nil {
-			data, err = ioutil.ReadAll(gz)
+			data, err = io.ReadAll(gz)
 		}
 		if err != nil {
 			return nil, fmt.Errorf("decompressing profile: %v", err)
@@ -414,7 +437,7 @@ func (p *Profile) CheckValid() error {
 // Aggregate merges the locations in the profile into equivalence
 // classes preserving the request attributes. It also updates the
 // samples to point to the merged locations.
-func (p *Profile) Aggregate(inlineFrame, function, filename, linenumber, address bool) error {
+func (p *Profile) Aggregate(inlineFrame, function, filename, linenumber, columnnumber, address bool) error {
 	for _, m := range p.Mapping {
 		m.HasInlineFrames = m.HasInlineFrames && inlineFrame
 		m.HasFunctions = m.HasFunctions && function
@@ -436,7 +459,7 @@ func (p *Profile) Aggregate(inlineFrame, function, filename, linenumber, address
 	}
 
 	// Aggregate locations
-	if !inlineFrame || !address || !linenumber {
+	if !inlineFrame || !address || !linenumber || !columnnumber {
 		for _, l := range p.Location {
 			if !inlineFrame && len(l.Line) > 1 {
 				l.Line = l.Line[len(l.Line)-1:]
@@ -444,6 +467,12 @@ func (p *Profile) Aggregate(inlineFrame, function, filename, linenumber, address
 			if !linenumber {
 				for i := range l.Line {
 					l.Line[i].Line = 0
+					l.Line[i].Column = 0
+				}
+			}
+			if !columnnumber {
+				for i := range l.Line {
+					l.Line[i].Column = 0
 				}
 			}
 			if !address {
@@ -605,10 +634,11 @@ func (l *Location) string() string {
 	for li := range l.Line {
 		lnStr := "??"
 		if fn := l.Line[li].Function; fn != nil {
-			lnStr = fmt.Sprintf("%s %s:%d s=%d",
+			lnStr = fmt.Sprintf("%s %s:%d:%d s=%d",
 				fn.Name,
 				fn.Filename,
 				l.Line[li].Line,
+				l.Line[li].Column,
 				fn.StartLine)
 			if fn.Name != fn.SystemName {
 				lnStr = lnStr + "(" + fn.SystemName + ")"
@@ -707,6 +737,35 @@ func (s *Sample) HasLabel(key, value string) bool {
 	return false
 }
 
+// SetNumLabel sets the specified key to the specified value for all samples in the
+// profile. "unit" is a slice that describes the units that each corresponding member
+// of "values" is measured in (e.g. bytes or seconds).  If there is no relevant
+// unit for a given value, that member of "unit" should be the empty string.
+// "unit" must either have the same length as "value", or be nil.
+func (p *Profile) SetNumLabel(key string, value []int64, unit []string) {
+	for _, sample := range p.Sample {
+		if sample.NumLabel == nil {
+			sample.NumLabel = map[string][]int64{key: value}
+		} else {
+			sample.NumLabel[key] = value
+		}
+		if sample.NumUnit == nil {
+			sample.NumUnit = map[string][]string{key: unit}
+		} else {
+			sample.NumUnit[key] = unit
+		}
+	}
+}
+
+// RemoveNumLabel removes all numerical labels associated with the specified key for all
+// samples in the profile.
+func (p *Profile) RemoveNumLabel(key string) {
+	for _, sample := range p.Sample {
+		delete(sample.NumLabel, key)
+		delete(sample.NumUnit, key)
+	}
+}
+
 // DiffBaseSample returns true if a sample belongs to the diff base and false
 // otherwise.
 func (s *Sample) DiffBaseSample() bool {
diff --git a/vendor/github.com/google/pprof/profile/proto.go b/vendor/github.com/google/pprof/profile/proto.go
index 539ad3ab33f97b11ceb9cf12abda43c0c17388c8..a15696ba16f24448c3e81163c6d41b277eec104e 100644
--- a/vendor/github.com/google/pprof/profile/proto.go
+++ b/vendor/github.com/google/pprof/profile/proto.go
@@ -39,11 +39,12 @@ import (
 )
 
 type buffer struct {
-	field int // field tag
-	typ   int // proto wire type code for field
-	u64   uint64
-	data  []byte
-	tmp   [16]byte
+	field    int // field tag
+	typ      int // proto wire type code for field
+	u64      uint64
+	data     []byte
+	tmp      [16]byte
+	tmpLines []Line // temporary storage used while decoding "repeated Line".
 }
 
 type decoder func(*buffer, message) error
@@ -286,7 +287,6 @@ func decodeInt64s(b *buffer, x *[]int64) error {
 	if b.typ == 2 {
 		// Packed encoding
 		data := b.data
-		tmp := make([]int64, 0, len(data)) // Maximally sized
 		for len(data) > 0 {
 			var u uint64
 			var err error
@@ -294,9 +294,8 @@ func decodeInt64s(b *buffer, x *[]int64) error {
 			if u, data, err = decodeVarint(data); err != nil {
 				return err
 			}
-			tmp = append(tmp, int64(u))
+			*x = append(*x, int64(u))
 		}
-		*x = append(*x, tmp...)
 		return nil
 	}
 	var i int64
@@ -319,7 +318,6 @@ func decodeUint64s(b *buffer, x *[]uint64) error {
 	if b.typ == 2 {
 		data := b.data
 		// Packed encoding
-		tmp := make([]uint64, 0, len(data)) // Maximally sized
 		for len(data) > 0 {
 			var u uint64
 			var err error
@@ -327,9 +325,8 @@ func decodeUint64s(b *buffer, x *[]uint64) error {
 			if u, data, err = decodeVarint(data); err != nil {
 				return err
 			}
-			tmp = append(tmp, u)
+			*x = append(*x, u)
 		}
-		*x = append(*x, tmp...)
 		return nil
 	}
 	var u uint64
diff --git a/vendor/github.com/google/pprof/profile/prune.go b/vendor/github.com/google/pprof/profile/prune.go
index 02d21a8184639e07f7de8b6735f0a2d1a1b598eb..b2f9fd54660d9a55758dc2cd8d05fb27a36fcbc4 100644
--- a/vendor/github.com/google/pprof/profile/prune.go
+++ b/vendor/github.com/google/pprof/profile/prune.go
@@ -62,15 +62,31 @@ func (p *Profile) Prune(dropRx, keepRx *regexp.Regexp) {
 	prune := make(map[uint64]bool)
 	pruneBeneath := make(map[uint64]bool)
 
+	// simplifyFunc can be expensive, so cache results.
+	// Note that the same function name can be encountered many times due
+	// different lines and addresses in the same function.
+	pruneCache := map[string]bool{} // Map from function to whether or not to prune
+	pruneFromHere := func(s string) bool {
+		if r, ok := pruneCache[s]; ok {
+			return r
+		}
+		funcName := simplifyFunc(s)
+		if dropRx.MatchString(funcName) {
+			if keepRx == nil || !keepRx.MatchString(funcName) {
+				pruneCache[s] = true
+				return true
+			}
+		}
+		pruneCache[s] = false
+		return false
+	}
+
 	for _, loc := range p.Location {
 		var i int
 		for i = len(loc.Line) - 1; i >= 0; i-- {
 			if fn := loc.Line[i].Function; fn != nil && fn.Name != "" {
-				funcName := simplifyFunc(fn.Name)
-				if dropRx.MatchString(funcName) {
-					if keepRx == nil || !keepRx.MatchString(funcName) {
-						break
-					}
+				if pruneFromHere(fn.Name) {
+					break
 				}
 			}
 		}
diff --git a/vendor/github.com/hashicorp/errwrap/LICENSE b/vendor/github.com/hashicorp/errwrap/LICENSE
deleted file mode 100644
index c33dcc7c928c646b497b74de395fb53916a7be25..0000000000000000000000000000000000000000
--- a/vendor/github.com/hashicorp/errwrap/LICENSE
+++ /dev/null
@@ -1,354 +0,0 @@
-Mozilla Public License, version 2.0
-
-1. Definitions
-
-1.1. “Contributor”
-
-     means each individual or legal entity that creates, contributes to the
-     creation of, or owns Covered Software.
-
-1.2. “Contributor Version”
-
-     means the combination of the Contributions of others (if any) used by a
-     Contributor and that particular Contributor’s Contribution.
-
-1.3. “Contribution”
-
-     means Covered Software of a particular Contributor.
-
-1.4. “Covered Software”
-
-     means Source Code Form to which the initial Contributor has attached the
-     notice in Exhibit A, the Executable Form of such Source Code Form, and
-     Modifications of such Source Code Form, in each case including portions
-     thereof.
-
-1.5. “Incompatible With Secondary Licenses”
-     means
-
-     a. that the initial Contributor has attached the notice described in
-        Exhibit B to the Covered Software; or
-
-     b. that the Covered Software was made available under the terms of version
-        1.1 or earlier of the License, but not also under the terms of a
-        Secondary License.
-
-1.6. “Executable Form”
-
-     means any form of the work other than Source Code Form.
-
-1.7. “Larger Work”
-
-     means a work that combines Covered Software with other material, in a separate
-     file or files, that is not Covered Software.
-
-1.8. “License”
-
-     means this document.
-
-1.9. “Licensable”
-
-     means having the right to grant, to the maximum extent possible, whether at the
-     time of the initial grant or subsequently, any and all of the rights conveyed by
-     this License.
-
-1.10. “Modifications”
-
-     means any of the following:
-
-     a. any file in Source Code Form that results from an addition to, deletion
-        from, or modification of the contents of Covered Software; or
-
-     b. any new file in Source Code Form that contains any Covered Software.
-
-1.11. “Patent Claims” of a Contributor
-
-      means any patent claim(s), including without limitation, method, process,
-      and apparatus claims, in any patent Licensable by such Contributor that
-      would be infringed, but for the grant of the License, by the making,
-      using, selling, offering for sale, having made, import, or transfer of
-      either its Contributions or its Contributor Version.
-
-1.12. “Secondary License”
-
-      means either the GNU General Public License, Version 2.0, the GNU Lesser
-      General Public License, Version 2.1, the GNU Affero General Public
-      License, Version 3.0, or any later versions of those licenses.
-
-1.13. “Source Code Form”
-
-      means the form of the work preferred for making modifications.
-
-1.14. “You” (or “Your”)
-
-      means an individual or a legal entity exercising rights under this
-      License. For legal entities, “You” includes any entity that controls, is
-      controlled by, or is under common control with You. For purposes of this
-      definition, “control” means (a) the power, direct or indirect, to cause
-      the direction or management of such entity, whether by contract or
-      otherwise, or (b) ownership of more than fifty percent (50%) of the
-      outstanding shares or beneficial ownership of such entity.
-
-
-2. License Grants and Conditions
-
-2.1. Grants
-
-     Each Contributor hereby grants You a world-wide, royalty-free,
-     non-exclusive license:
-
-     a. under intellectual property rights (other than patent or trademark)
-        Licensable by such Contributor to use, reproduce, make available,
-        modify, display, perform, distribute, and otherwise exploit its
-        Contributions, either on an unmodified basis, with Modifications, or as
-        part of a Larger Work; and
-
-     b. under Patent Claims of such Contributor to make, use, sell, offer for
-        sale, have made, import, and otherwise transfer either its Contributions
-        or its Contributor Version.
-
-2.2. Effective Date
-
-     The licenses granted in Section 2.1 with respect to any Contribution become
-     effective for each Contribution on the date the Contributor first distributes
-     such Contribution.
-
-2.3. Limitations on Grant Scope
-
-     The licenses granted in this Section 2 are the only rights granted under this
-     License. No additional rights or licenses will be implied from the distribution
-     or licensing of Covered Software under this License. Notwithstanding Section
-     2.1(b) above, no patent license is granted by a Contributor:
-
-     a. for any code that a Contributor has removed from Covered Software; or
-
-     b. for infringements caused by: (i) Your and any other third party’s
-        modifications of Covered Software, or (ii) the combination of its
-        Contributions with other software (except as part of its Contributor
-        Version); or
-
-     c. under Patent Claims infringed by Covered Software in the absence of its
-        Contributions.
-
-     This License does not grant any rights in the trademarks, service marks, or
-     logos of any Contributor (except as may be necessary to comply with the
-     notice requirements in Section 3.4).
-
-2.4. Subsequent Licenses
-
-     No Contributor makes additional grants as a result of Your choice to
-     distribute the Covered Software under a subsequent version of this License
-     (see Section 10.2) or under the terms of a Secondary License (if permitted
-     under the terms of Section 3.3).
-
-2.5. Representation
-
-     Each Contributor represents that the Contributor believes its Contributions
-     are its original creation(s) or it has sufficient rights to grant the
-     rights to its Contributions conveyed by this License.
-
-2.6. Fair Use
-
-     This License is not intended to limit any rights You have under applicable
-     copyright doctrines of fair use, fair dealing, or other equivalents.
-
-2.7. Conditions
-
-     Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
-     Section 2.1.
-
-
-3. Responsibilities
-
-3.1. Distribution of Source Form
-
-     All distribution of Covered Software in Source Code Form, including any
-     Modifications that You create or to which You contribute, must be under the
-     terms of this License. You must inform recipients that the Source Code Form
-     of the Covered Software is governed by the terms of this License, and how
-     they can obtain a copy of this License. You may not attempt to alter or
-     restrict the recipients’ rights in the Source Code Form.
-
-3.2. Distribution of Executable Form
-
-     If You distribute Covered Software in Executable Form then:
-
-     a. such Covered Software must also be made available in Source Code Form,
-        as described in Section 3.1, and You must inform recipients of the
-        Executable Form how they can obtain a copy of such Source Code Form by
-        reasonable means in a timely manner, at a charge no more than the cost
-        of distribution to the recipient; and
-
-     b. You may distribute such Executable Form under the terms of this License,
-        or sublicense it under different terms, provided that the license for
-        the Executable Form does not attempt to limit or alter the recipients’
-        rights in the Source Code Form under this License.
-
-3.3. Distribution of a Larger Work
-
-     You may create and distribute a Larger Work under terms of Your choice,
-     provided that You also comply with the requirements of this License for the
-     Covered Software. If the Larger Work is a combination of Covered Software
-     with a work governed by one or more Secondary Licenses, and the Covered
-     Software is not Incompatible With Secondary Licenses, this License permits
-     You to additionally distribute such Covered Software under the terms of
-     such Secondary License(s), so that the recipient of the Larger Work may, at
-     their option, further distribute the Covered Software under the terms of
-     either this License or such Secondary License(s).
-
-3.4. Notices
-
-     You may not remove or alter the substance of any license notices (including
-     copyright notices, patent notices, disclaimers of warranty, or limitations
-     of liability) contained within the Source Code Form of the Covered
-     Software, except that You may alter any license notices to the extent
-     required to remedy known factual inaccuracies.
-
-3.5. Application of Additional Terms
-
-     You may choose to offer, and to charge a fee for, warranty, support,
-     indemnity or liability obligations to one or more recipients of Covered
-     Software. However, You may do so only on Your own behalf, and not on behalf
-     of any Contributor. You must make it absolutely clear that any such
-     warranty, support, indemnity, or liability obligation is offered by You
-     alone, and You hereby agree to indemnify every Contributor for any
-     liability incurred by such Contributor as a result of warranty, support,
-     indemnity or liability terms You offer. You may include additional
-     disclaimers of warranty and limitations of liability specific to any
-     jurisdiction.
-
-4. Inability to Comply Due to Statute or Regulation
-
-   If it is impossible for You to comply with any of the terms of this License
-   with respect to some or all of the Covered Software due to statute, judicial
-   order, or regulation then You must: (a) comply with the terms of this License
-   to the maximum extent possible; and (b) describe the limitations and the code
-   they affect. Such description must be placed in a text file included with all
-   distributions of the Covered Software under this License. Except to the
-   extent prohibited by statute or regulation, such description must be
-   sufficiently detailed for a recipient of ordinary skill to be able to
-   understand it.
-
-5. Termination
-
-5.1. The rights granted under this License will terminate automatically if You
-     fail to comply with any of its terms. However, if You become compliant,
-     then the rights granted under this License from a particular Contributor
-     are reinstated (a) provisionally, unless and until such Contributor
-     explicitly and finally terminates Your grants, and (b) on an ongoing basis,
-     if such Contributor fails to notify You of the non-compliance by some
-     reasonable means prior to 60 days after You have come back into compliance.
-     Moreover, Your grants from a particular Contributor are reinstated on an
-     ongoing basis if such Contributor notifies You of the non-compliance by
-     some reasonable means, this is the first time You have received notice of
-     non-compliance with this License from such Contributor, and You become
-     compliant prior to 30 days after Your receipt of the notice.
-
-5.2. If You initiate litigation against any entity by asserting a patent
-     infringement claim (excluding declaratory judgment actions, counter-claims,
-     and cross-claims) alleging that a Contributor Version directly or
-     indirectly infringes any patent, then the rights granted to You by any and
-     all Contributors for the Covered Software under Section 2.1 of this License
-     shall terminate.
-
-5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
-     license agreements (excluding distributors and resellers) which have been
-     validly granted by You or Your distributors under this License prior to
-     termination shall survive termination.
-
-6. Disclaimer of Warranty
-
-   Covered Software is provided under this License on an “as is” basis, without
-   warranty of any kind, either expressed, implied, or statutory, including,
-   without limitation, warranties that the Covered Software is free of defects,
-   merchantable, fit for a particular purpose or non-infringing. The entire
-   risk as to the quality and performance of the Covered Software is with You.
-   Should any Covered Software prove defective in any respect, You (not any
-   Contributor) assume the cost of any necessary servicing, repair, or
-   correction. This disclaimer of warranty constitutes an essential part of this
-   License. No use of  any Covered Software is authorized under this License
-   except under this disclaimer.
-
-7. Limitation of Liability
-
-   Under no circumstances and under no legal theory, whether tort (including
-   negligence), contract, or otherwise, shall any Contributor, or anyone who
-   distributes Covered Software as permitted above, be liable to You for any
-   direct, indirect, special, incidental, or consequential damages of any
-   character including, without limitation, damages for lost profits, loss of
-   goodwill, work stoppage, computer failure or malfunction, or any and all
-   other commercial damages or losses, even if such party shall have been
-   informed of the possibility of such damages. This limitation of liability
-   shall not apply to liability for death or personal injury resulting from such
-   party’s negligence to the extent applicable law prohibits such limitation.
-   Some jurisdictions do not allow the exclusion or limitation of incidental or
-   consequential damages, so this exclusion and limitation may not apply to You.
-
-8. Litigation
-
-   Any litigation relating to this License may be brought only in the courts of
-   a jurisdiction where the defendant maintains its principal place of business
-   and such litigation shall be governed by laws of that jurisdiction, without
-   reference to its conflict-of-law provisions. Nothing in this Section shall
-   prevent a party’s ability to bring cross-claims or counter-claims.
-
-9. Miscellaneous
-
-   This License represents the complete agreement concerning the subject matter
-   hereof. If any provision of this License is held to be unenforceable, such
-   provision shall be reformed only to the extent necessary to make it
-   enforceable. Any law or regulation which provides that the language of a
-   contract shall be construed against the drafter shall not be used to construe
-   this License against a Contributor.
-
-
-10. Versions of the License
-
-10.1. New Versions
-
-      Mozilla Foundation is the license steward. Except as provided in Section
-      10.3, no one other than the license steward has the right to modify or
-      publish new versions of this License. Each version will be given a
-      distinguishing version number.
-
-10.2. Effect of New Versions
-
-      You may distribute the Covered Software under the terms of the version of
-      the License under which You originally received the Covered Software, or
-      under the terms of any subsequent version published by the license
-      steward.
-
-10.3. Modified Versions
-
-      If you create software not governed by this License, and you want to
-      create a new license for such software, you may create and use a modified
-      version of this License if you rename the license and remove any
-      references to the name of the license steward (except to note that such
-      modified license differs from this License).
-
-10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses
-      If You choose to distribute Source Code Form that is Incompatible With
-      Secondary Licenses under the terms of this version of the License, the
-      notice described in Exhibit B of this License must be attached.
-
-Exhibit A - Source Code Form License Notice
-
-      This Source Code Form is subject to the
-      terms of the Mozilla Public License, v.
-      2.0. If a copy of the MPL was not
-      distributed with this file, You can
-      obtain one at
-      http://mozilla.org/MPL/2.0/.
-
-If it is not possible or desirable to put the notice in a particular file, then
-You may include the notice in a location (such as a LICENSE file in a relevant
-directory) where a recipient would be likely to look for such a notice.
-
-You may add additional accurate notices of copyright ownership.
-
-Exhibit B - “Incompatible With Secondary Licenses” Notice
-
-      This Source Code Form is “Incompatible
-      With Secondary Licenses”, as defined by
-      the Mozilla Public License, v. 2.0.
-
diff --git a/vendor/github.com/hashicorp/errwrap/README.md b/vendor/github.com/hashicorp/errwrap/README.md
deleted file mode 100644
index 444df08f8e775967f248994a47d2e070126b12a5..0000000000000000000000000000000000000000
--- a/vendor/github.com/hashicorp/errwrap/README.md
+++ /dev/null
@@ -1,89 +0,0 @@
-# errwrap
-
-`errwrap` is a package for Go that formalizes the pattern of wrapping errors
-and checking if an error contains another error.
-
-There is a common pattern in Go of taking a returned `error` value and
-then wrapping it (such as with `fmt.Errorf`) before returning it. The problem
-with this pattern is that you completely lose the original `error` structure.
-
-Arguably the _correct_ approach is that you should make a custom structure
-implementing the `error` interface, and have the original error as a field
-on that structure, such [as this example](http://golang.org/pkg/os/#PathError).
-This is a good approach, but you have to know the entire chain of possible
-rewrapping that happens, when you might just care about one.
-
-`errwrap` formalizes this pattern (it doesn't matter what approach you use
-above) by giving a single interface for wrapping errors, checking if a specific
-error is wrapped, and extracting that error.
-
-## Installation and Docs
-
-Install using `go get github.com/hashicorp/errwrap`.
-
-Full documentation is available at
-http://godoc.org/github.com/hashicorp/errwrap
-
-## Usage
-
-#### Basic Usage
-
-Below is a very basic example of its usage:
-
-```go
-// A function that always returns an error, but wraps it, like a real
-// function might.
-func tryOpen() error {
-	_, err := os.Open("/i/dont/exist")
-	if err != nil {
-		return errwrap.Wrapf("Doesn't exist: {{err}}", err)
-	}
-
-	return nil
-}
-
-func main() {
-	err := tryOpen()
-
-	// We can use the Contains helpers to check if an error contains
-	// another error. It is safe to do this with a nil error, or with
-	// an error that doesn't even use the errwrap package.
-	if errwrap.Contains(err, "does not exist") {
-		// Do something
-	}
-	if errwrap.ContainsType(err, new(os.PathError)) {
-		// Do something
-	}
-
-	// Or we can use the associated `Get` functions to just extract
-	// a specific error. This would return nil if that specific error doesn't
-	// exist.
-	perr := errwrap.GetType(err, new(os.PathError))
-}
-```
-
-#### Custom Types
-
-If you're already making custom types that properly wrap errors, then
-you can get all the functionality of `errwraps.Contains` and such by
-implementing the `Wrapper` interface with just one function. Example:
-
-```go
-type AppError {
-  Code ErrorCode
-  Err  error
-}
-
-func (e *AppError) WrappedErrors() []error {
-  return []error{e.Err}
-}
-```
-
-Now this works:
-
-```go
-err := &AppError{Err: fmt.Errorf("an error")}
-if errwrap.ContainsType(err, fmt.Errorf("")) {
-	// This will work!
-}
-```
diff --git a/vendor/github.com/hashicorp/errwrap/errwrap.go b/vendor/github.com/hashicorp/errwrap/errwrap.go
deleted file mode 100644
index a733bef18c0590ec04a0e0aff46acf6f6610cf76..0000000000000000000000000000000000000000
--- a/vendor/github.com/hashicorp/errwrap/errwrap.go
+++ /dev/null
@@ -1,169 +0,0 @@
-// Package errwrap implements methods to formalize error wrapping in Go.
-//
-// All of the top-level functions that take an `error` are built to be able
-// to take any error, not just wrapped errors. This allows you to use errwrap
-// without having to type-check and type-cast everywhere.
-package errwrap
-
-import (
-	"errors"
-	"reflect"
-	"strings"
-)
-
-// WalkFunc is the callback called for Walk.
-type WalkFunc func(error)
-
-// Wrapper is an interface that can be implemented by custom types to
-// have all the Contains, Get, etc. functions in errwrap work.
-//
-// When Walk reaches a Wrapper, it will call the callback for every
-// wrapped error in addition to the wrapper itself. Since all the top-level
-// functions in errwrap use Walk, this means that all those functions work
-// with your custom type.
-type Wrapper interface {
-	WrappedErrors() []error
-}
-
-// Wrap defines that outer wraps inner, returning an error type that
-// can be cleanly used with the other methods in this package, such as
-// Contains, GetAll, etc.
-//
-// This function won't modify the error message at all (the outer message
-// will be used).
-func Wrap(outer, inner error) error {
-	return &wrappedError{
-		Outer: outer,
-		Inner: inner,
-	}
-}
-
-// Wrapf wraps an error with a formatting message. This is similar to using
-// `fmt.Errorf` to wrap an error. If you're using `fmt.Errorf` to wrap
-// errors, you should replace it with this.
-//
-// format is the format of the error message. The string '{{err}}' will
-// be replaced with the original error message.
-func Wrapf(format string, err error) error {
-	outerMsg := "<nil>"
-	if err != nil {
-		outerMsg = err.Error()
-	}
-
-	outer := errors.New(strings.Replace(
-		format, "{{err}}", outerMsg, -1))
-
-	return Wrap(outer, err)
-}
-
-// Contains checks if the given error contains an error with the
-// message msg. If err is not a wrapped error, this will always return
-// false unless the error itself happens to match this msg.
-func Contains(err error, msg string) bool {
-	return len(GetAll(err, msg)) > 0
-}
-
-// ContainsType checks if the given error contains an error with
-// the same concrete type as v. If err is not a wrapped error, this will
-// check the err itself.
-func ContainsType(err error, v interface{}) bool {
-	return len(GetAllType(err, v)) > 0
-}
-
-// Get is the same as GetAll but returns the deepest matching error.
-func Get(err error, msg string) error {
-	es := GetAll(err, msg)
-	if len(es) > 0 {
-		return es[len(es)-1]
-	}
-
-	return nil
-}
-
-// GetType is the same as GetAllType but returns the deepest matching error.
-func GetType(err error, v interface{}) error {
-	es := GetAllType(err, v)
-	if len(es) > 0 {
-		return es[len(es)-1]
-	}
-
-	return nil
-}
-
-// GetAll gets all the errors that might be wrapped in err with the
-// given message. The order of the errors is such that the outermost
-// matching error (the most recent wrap) is index zero, and so on.
-func GetAll(err error, msg string) []error {
-	var result []error
-
-	Walk(err, func(err error) {
-		if err.Error() == msg {
-			result = append(result, err)
-		}
-	})
-
-	return result
-}
-
-// GetAllType gets all the errors that are the same type as v.
-//
-// The order of the return value is the same as described in GetAll.
-func GetAllType(err error, v interface{}) []error {
-	var result []error
-
-	var search string
-	if v != nil {
-		search = reflect.TypeOf(v).String()
-	}
-	Walk(err, func(err error) {
-		var needle string
-		if err != nil {
-			needle = reflect.TypeOf(err).String()
-		}
-
-		if needle == search {
-			result = append(result, err)
-		}
-	})
-
-	return result
-}
-
-// Walk walks all the wrapped errors in err and calls the callback. If
-// err isn't a wrapped error, this will be called once for err. If err
-// is a wrapped error, the callback will be called for both the wrapper
-// that implements error as well as the wrapped error itself.
-func Walk(err error, cb WalkFunc) {
-	if err == nil {
-		return
-	}
-
-	switch e := err.(type) {
-	case *wrappedError:
-		cb(e.Outer)
-		Walk(e.Inner, cb)
-	case Wrapper:
-		cb(err)
-
-		for _, err := range e.WrappedErrors() {
-			Walk(err, cb)
-		}
-	default:
-		cb(err)
-	}
-}
-
-// wrappedError is an implementation of error that has both the
-// outer and inner errors.
-type wrappedError struct {
-	Outer error
-	Inner error
-}
-
-func (w *wrappedError) Error() string {
-	return w.Outer.Error()
-}
-
-func (w *wrappedError) WrappedErrors() []error {
-	return []error{w.Outer, w.Inner}
-}
diff --git a/vendor/github.com/hashicorp/go-multierror/LICENSE b/vendor/github.com/hashicorp/go-multierror/LICENSE
deleted file mode 100644
index 82b4de97c7e3246775ac5836680284ea8a628dd9..0000000000000000000000000000000000000000
--- a/vendor/github.com/hashicorp/go-multierror/LICENSE
+++ /dev/null
@@ -1,353 +0,0 @@
-Mozilla Public License, version 2.0
-
-1. Definitions
-
-1.1. “Contributor”
-
-     means each individual or legal entity that creates, contributes to the
-     creation of, or owns Covered Software.
-
-1.2. “Contributor Version”
-
-     means the combination of the Contributions of others (if any) used by a
-     Contributor and that particular Contributor’s Contribution.
-
-1.3. “Contribution”
-
-     means Covered Software of a particular Contributor.
-
-1.4. “Covered Software”
-
-     means Source Code Form to which the initial Contributor has attached the
-     notice in Exhibit A, the Executable Form of such Source Code Form, and
-     Modifications of such Source Code Form, in each case including portions
-     thereof.
-
-1.5. “Incompatible With Secondary Licenses”
-     means
-
-     a. that the initial Contributor has attached the notice described in
-        Exhibit B to the Covered Software; or
-
-     b. that the Covered Software was made available under the terms of version
-        1.1 or earlier of the License, but not also under the terms of a
-        Secondary License.
-
-1.6. “Executable Form”
-
-     means any form of the work other than Source Code Form.
-
-1.7. “Larger Work”
-
-     means a work that combines Covered Software with other material, in a separate
-     file or files, that is not Covered Software.
-
-1.8. “License”
-
-     means this document.
-
-1.9. “Licensable”
-
-     means having the right to grant, to the maximum extent possible, whether at the
-     time of the initial grant or subsequently, any and all of the rights conveyed by
-     this License.
-
-1.10. “Modifications”
-
-     means any of the following:
-
-     a. any file in Source Code Form that results from an addition to, deletion
-        from, or modification of the contents of Covered Software; or
-
-     b. any new file in Source Code Form that contains any Covered Software.
-
-1.11. “Patent Claims” of a Contributor
-
-      means any patent claim(s), including without limitation, method, process,
-      and apparatus claims, in any patent Licensable by such Contributor that
-      would be infringed, but for the grant of the License, by the making,
-      using, selling, offering for sale, having made, import, or transfer of
-      either its Contributions or its Contributor Version.
-
-1.12. “Secondary License”
-
-      means either the GNU General Public License, Version 2.0, the GNU Lesser
-      General Public License, Version 2.1, the GNU Affero General Public
-      License, Version 3.0, or any later versions of those licenses.
-
-1.13. “Source Code Form”
-
-      means the form of the work preferred for making modifications.
-
-1.14. “You” (or “Your”)
-
-      means an individual or a legal entity exercising rights under this
-      License. For legal entities, “You” includes any entity that controls, is
-      controlled by, or is under common control with You. For purposes of this
-      definition, “control” means (a) the power, direct or indirect, to cause
-      the direction or management of such entity, whether by contract or
-      otherwise, or (b) ownership of more than fifty percent (50%) of the
-      outstanding shares or beneficial ownership of such entity.
-
-
-2. License Grants and Conditions
-
-2.1. Grants
-
-     Each Contributor hereby grants You a world-wide, royalty-free,
-     non-exclusive license:
-
-     a. under intellectual property rights (other than patent or trademark)
-        Licensable by such Contributor to use, reproduce, make available,
-        modify, display, perform, distribute, and otherwise exploit its
-        Contributions, either on an unmodified basis, with Modifications, or as
-        part of a Larger Work; and
-
-     b. under Patent Claims of such Contributor to make, use, sell, offer for
-        sale, have made, import, and otherwise transfer either its Contributions
-        or its Contributor Version.
-
-2.2. Effective Date
-
-     The licenses granted in Section 2.1 with respect to any Contribution become
-     effective for each Contribution on the date the Contributor first distributes
-     such Contribution.
-
-2.3. Limitations on Grant Scope
-
-     The licenses granted in this Section 2 are the only rights granted under this
-     License. No additional rights or licenses will be implied from the distribution
-     or licensing of Covered Software under this License. Notwithstanding Section
-     2.1(b) above, no patent license is granted by a Contributor:
-
-     a. for any code that a Contributor has removed from Covered Software; or
-
-     b. for infringements caused by: (i) Your and any other third party’s
-        modifications of Covered Software, or (ii) the combination of its
-        Contributions with other software (except as part of its Contributor
-        Version); or
-
-     c. under Patent Claims infringed by Covered Software in the absence of its
-        Contributions.
-
-     This License does not grant any rights in the trademarks, service marks, or
-     logos of any Contributor (except as may be necessary to comply with the
-     notice requirements in Section 3.4).
-
-2.4. Subsequent Licenses
-
-     No Contributor makes additional grants as a result of Your choice to
-     distribute the Covered Software under a subsequent version of this License
-     (see Section 10.2) or under the terms of a Secondary License (if permitted
-     under the terms of Section 3.3).
-
-2.5. Representation
-
-     Each Contributor represents that the Contributor believes its Contributions
-     are its original creation(s) or it has sufficient rights to grant the
-     rights to its Contributions conveyed by this License.
-
-2.6. Fair Use
-
-     This License is not intended to limit any rights You have under applicable
-     copyright doctrines of fair use, fair dealing, or other equivalents.
-
-2.7. Conditions
-
-     Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
-     Section 2.1.
-
-
-3. Responsibilities
-
-3.1. Distribution of Source Form
-
-     All distribution of Covered Software in Source Code Form, including any
-     Modifications that You create or to which You contribute, must be under the
-     terms of this License. You must inform recipients that the Source Code Form
-     of the Covered Software is governed by the terms of this License, and how
-     they can obtain a copy of this License. You may not attempt to alter or
-     restrict the recipients’ rights in the Source Code Form.
-
-3.2. Distribution of Executable Form
-
-     If You distribute Covered Software in Executable Form then:
-
-     a. such Covered Software must also be made available in Source Code Form,
-        as described in Section 3.1, and You must inform recipients of the
-        Executable Form how they can obtain a copy of such Source Code Form by
-        reasonable means in a timely manner, at a charge no more than the cost
-        of distribution to the recipient; and
-
-     b. You may distribute such Executable Form under the terms of this License,
-        or sublicense it under different terms, provided that the license for
-        the Executable Form does not attempt to limit or alter the recipients’
-        rights in the Source Code Form under this License.
-
-3.3. Distribution of a Larger Work
-
-     You may create and distribute a Larger Work under terms of Your choice,
-     provided that You also comply with the requirements of this License for the
-     Covered Software. If the Larger Work is a combination of Covered Software
-     with a work governed by one or more Secondary Licenses, and the Covered
-     Software is not Incompatible With Secondary Licenses, this License permits
-     You to additionally distribute such Covered Software under the terms of
-     such Secondary License(s), so that the recipient of the Larger Work may, at
-     their option, further distribute the Covered Software under the terms of
-     either this License or such Secondary License(s).
-
-3.4. Notices
-
-     You may not remove or alter the substance of any license notices (including
-     copyright notices, patent notices, disclaimers of warranty, or limitations
-     of liability) contained within the Source Code Form of the Covered
-     Software, except that You may alter any license notices to the extent
-     required to remedy known factual inaccuracies.
-
-3.5. Application of Additional Terms
-
-     You may choose to offer, and to charge a fee for, warranty, support,
-     indemnity or liability obligations to one or more recipients of Covered
-     Software. However, You may do so only on Your own behalf, and not on behalf
-     of any Contributor. You must make it absolutely clear that any such
-     warranty, support, indemnity, or liability obligation is offered by You
-     alone, and You hereby agree to indemnify every Contributor for any
-     liability incurred by such Contributor as a result of warranty, support,
-     indemnity or liability terms You offer. You may include additional
-     disclaimers of warranty and limitations of liability specific to any
-     jurisdiction.
-
-4. Inability to Comply Due to Statute or Regulation
-
-   If it is impossible for You to comply with any of the terms of this License
-   with respect to some or all of the Covered Software due to statute, judicial
-   order, or regulation then You must: (a) comply with the terms of this License
-   to the maximum extent possible; and (b) describe the limitations and the code
-   they affect. Such description must be placed in a text file included with all
-   distributions of the Covered Software under this License. Except to the
-   extent prohibited by statute or regulation, such description must be
-   sufficiently detailed for a recipient of ordinary skill to be able to
-   understand it.
-
-5. Termination
-
-5.1. The rights granted under this License will terminate automatically if You
-     fail to comply with any of its terms. However, if You become compliant,
-     then the rights granted under this License from a particular Contributor
-     are reinstated (a) provisionally, unless and until such Contributor
-     explicitly and finally terminates Your grants, and (b) on an ongoing basis,
-     if such Contributor fails to notify You of the non-compliance by some
-     reasonable means prior to 60 days after You have come back into compliance.
-     Moreover, Your grants from a particular Contributor are reinstated on an
-     ongoing basis if such Contributor notifies You of the non-compliance by
-     some reasonable means, this is the first time You have received notice of
-     non-compliance with this License from such Contributor, and You become
-     compliant prior to 30 days after Your receipt of the notice.
-
-5.2. If You initiate litigation against any entity by asserting a patent
-     infringement claim (excluding declaratory judgment actions, counter-claims,
-     and cross-claims) alleging that a Contributor Version directly or
-     indirectly infringes any patent, then the rights granted to You by any and
-     all Contributors for the Covered Software under Section 2.1 of this License
-     shall terminate.
-
-5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
-     license agreements (excluding distributors and resellers) which have been
-     validly granted by You or Your distributors under this License prior to
-     termination shall survive termination.
-
-6. Disclaimer of Warranty
-
-   Covered Software is provided under this License on an “as is” basis, without
-   warranty of any kind, either expressed, implied, or statutory, including,
-   without limitation, warranties that the Covered Software is free of defects,
-   merchantable, fit for a particular purpose or non-infringing. The entire
-   risk as to the quality and performance of the Covered Software is with You.
-   Should any Covered Software prove defective in any respect, You (not any
-   Contributor) assume the cost of any necessary servicing, repair, or
-   correction. This disclaimer of warranty constitutes an essential part of this
-   License. No use of  any Covered Software is authorized under this License
-   except under this disclaimer.
-
-7. Limitation of Liability
-
-   Under no circumstances and under no legal theory, whether tort (including
-   negligence), contract, or otherwise, shall any Contributor, or anyone who
-   distributes Covered Software as permitted above, be liable to You for any
-   direct, indirect, special, incidental, or consequential damages of any
-   character including, without limitation, damages for lost profits, loss of
-   goodwill, work stoppage, computer failure or malfunction, or any and all
-   other commercial damages or losses, even if such party shall have been
-   informed of the possibility of such damages. This limitation of liability
-   shall not apply to liability for death or personal injury resulting from such
-   party’s negligence to the extent applicable law prohibits such limitation.
-   Some jurisdictions do not allow the exclusion or limitation of incidental or
-   consequential damages, so this exclusion and limitation may not apply to You.
-
-8. Litigation
-
-   Any litigation relating to this License may be brought only in the courts of
-   a jurisdiction where the defendant maintains its principal place of business
-   and such litigation shall be governed by laws of that jurisdiction, without
-   reference to its conflict-of-law provisions. Nothing in this Section shall
-   prevent a party’s ability to bring cross-claims or counter-claims.
-
-9. Miscellaneous
-
-   This License represents the complete agreement concerning the subject matter
-   hereof. If any provision of this License is held to be unenforceable, such
-   provision shall be reformed only to the extent necessary to make it
-   enforceable. Any law or regulation which provides that the language of a
-   contract shall be construed against the drafter shall not be used to construe
-   this License against a Contributor.
-
-
-10. Versions of the License
-
-10.1. New Versions
-
-      Mozilla Foundation is the license steward. Except as provided in Section
-      10.3, no one other than the license steward has the right to modify or
-      publish new versions of this License. Each version will be given a
-      distinguishing version number.
-
-10.2. Effect of New Versions
-
-      You may distribute the Covered Software under the terms of the version of
-      the License under which You originally received the Covered Software, or
-      under the terms of any subsequent version published by the license
-      steward.
-
-10.3. Modified Versions
-
-      If you create software not governed by this License, and you want to
-      create a new license for such software, you may create and use a modified
-      version of this License if you rename the license and remove any
-      references to the name of the license steward (except to note that such
-      modified license differs from this License).
-
-10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses
-      If You choose to distribute Source Code Form that is Incompatible With
-      Secondary Licenses under the terms of this version of the License, the
-      notice described in Exhibit B of this License must be attached.
-
-Exhibit A - Source Code Form License Notice
-
-      This Source Code Form is subject to the
-      terms of the Mozilla Public License, v.
-      2.0. If a copy of the MPL was not
-      distributed with this file, You can
-      obtain one at
-      http://mozilla.org/MPL/2.0/.
-
-If it is not possible or desirable to put the notice in a particular file, then
-You may include the notice in a location (such as a LICENSE file in a relevant
-directory) where a recipient would be likely to look for such a notice.
-
-You may add additional accurate notices of copyright ownership.
-
-Exhibit B - “Incompatible With Secondary Licenses” Notice
-
-      This Source Code Form is “Incompatible
-      With Secondary Licenses”, as defined by
-      the Mozilla Public License, v. 2.0.
diff --git a/vendor/github.com/hashicorp/go-multierror/Makefile b/vendor/github.com/hashicorp/go-multierror/Makefile
deleted file mode 100644
index b97cd6ed02b5b67f40119c94a14d7dc2fd44d1e7..0000000000000000000000000000000000000000
--- a/vendor/github.com/hashicorp/go-multierror/Makefile
+++ /dev/null
@@ -1,31 +0,0 @@
-TEST?=./...
-
-default: test
-
-# test runs the test suite and vets the code.
-test: generate
-	@echo "==> Running tests..."
-	@go list $(TEST) \
-		| grep -v "/vendor/" \
-		| xargs -n1 go test -timeout=60s -parallel=10 ${TESTARGS}
-
-# testrace runs the race checker
-testrace: generate
-	@echo "==> Running tests (race)..."
-	@go list $(TEST) \
-		| grep -v "/vendor/" \
-		| xargs -n1 go test -timeout=60s -race ${TESTARGS}
-
-# updatedeps installs all the dependencies needed to run and build.
-updatedeps:
-	@sh -c "'${CURDIR}/scripts/deps.sh' '${NAME}'"
-
-# generate runs `go generate` to build the dynamically generated source files.
-generate:
-	@echo "==> Generating..."
-	@find . -type f -name '.DS_Store' -delete
-	@go list ./... \
-		| grep -v "/vendor/" \
-		| xargs -n1 go generate
-
-.PHONY: default test testrace updatedeps generate
diff --git a/vendor/github.com/hashicorp/go-multierror/README.md b/vendor/github.com/hashicorp/go-multierror/README.md
deleted file mode 100644
index 71dd308ed8117bab855eae370146a729afcc4237..0000000000000000000000000000000000000000
--- a/vendor/github.com/hashicorp/go-multierror/README.md
+++ /dev/null
@@ -1,150 +0,0 @@
-# go-multierror
-
-[![CircleCI](https://img.shields.io/circleci/build/github/hashicorp/go-multierror/master)](https://circleci.com/gh/hashicorp/go-multierror)
-[![Go Reference](https://pkg.go.dev/badge/github.com/hashicorp/go-multierror.svg)](https://pkg.go.dev/github.com/hashicorp/go-multierror)
-![GitHub go.mod Go version](https://img.shields.io/github/go-mod/go-version/hashicorp/go-multierror)
-
-[circleci]: https://app.circleci.com/pipelines/github/hashicorp/go-multierror
-[godocs]: https://pkg.go.dev/github.com/hashicorp/go-multierror
-
-`go-multierror` is a package for Go that provides a mechanism for
-representing a list of `error` values as a single `error`.
-
-This allows a function in Go to return an `error` that might actually
-be a list of errors. If the caller knows this, they can unwrap the
-list and access the errors. If the caller doesn't know, the error
-formats to a nice human-readable format.
-
-`go-multierror` is fully compatible with the Go standard library
-[errors](https://golang.org/pkg/errors/) package, including the
-functions `As`, `Is`, and `Unwrap`. This provides a standardized approach
-for introspecting on error values.
-
-## Installation and Docs
-
-Install using `go get github.com/hashicorp/go-multierror`.
-
-Full documentation is available at
-https://pkg.go.dev/github.com/hashicorp/go-multierror
-
-### Requires go version 1.13 or newer
-
-`go-multierror` requires go version 1.13 or newer. Go 1.13 introduced
-[error wrapping](https://golang.org/doc/go1.13#error_wrapping), which
-this library takes advantage of.
-
-If you need to use an earlier version of go, you can use the
-[v1.0.0](https://github.com/hashicorp/go-multierror/tree/v1.0.0)
-tag, which doesn't rely on features in go 1.13.
-
-If you see compile errors that look like the below, it's likely that
-you're on an older version of go:
-
-```
-/go/src/github.com/hashicorp/go-multierror/multierror.go:112:9: undefined: errors.As
-/go/src/github.com/hashicorp/go-multierror/multierror.go:117:9: undefined: errors.Is
-```
-
-## Usage
-
-go-multierror is easy to use and purposely built to be unobtrusive in
-existing Go applications/libraries that may not be aware of it.
-
-**Building a list of errors**
-
-The `Append` function is used to create a list of errors. This function
-behaves a lot like the Go built-in `append` function: it doesn't matter
-if the first argument is nil, a `multierror.Error`, or any other `error`,
-the function behaves as you would expect.
-
-```go
-var result error
-
-if err := step1(); err != nil {
-	result = multierror.Append(result, err)
-}
-if err := step2(); err != nil {
-	result = multierror.Append(result, err)
-}
-
-return result
-```
-
-**Customizing the formatting of the errors**
-
-By specifying a custom `ErrorFormat`, you can customize the format
-of the `Error() string` function:
-
-```go
-var result *multierror.Error
-
-// ... accumulate errors here, maybe using Append
-
-if result != nil {
-	result.ErrorFormat = func([]error) string {
-		return "errors!"
-	}
-}
-```
-
-**Accessing the list of errors**
-
-`multierror.Error` implements `error` so if the caller doesn't know about
-multierror, it will work just fine. But if you're aware a multierror might
-be returned, you can use type switches to access the list of errors:
-
-```go
-if err := something(); err != nil {
-	if merr, ok := err.(*multierror.Error); ok {
-		// Use merr.Errors
-	}
-}
-```
-
-You can also use the standard [`errors.Unwrap`](https://golang.org/pkg/errors/#Unwrap)
-function. This will continue to unwrap into subsequent errors until none exist.
-
-**Extracting an error**
-
-The standard library [`errors.As`](https://golang.org/pkg/errors/#As)
-function can be used directly with a multierror to extract a specific error:
-
-```go
-// Assume err is a multierror value
-err := somefunc()
-
-// We want to know if "err" has a "RichErrorType" in it and extract it.
-var errRich RichErrorType
-if errors.As(err, &errRich) {
-	// It has it, and now errRich is populated.
-}
-```
-
-**Checking for an exact error value**
-
-Some errors are returned as exact errors such as the [`ErrNotExist`](https://golang.org/pkg/os/#pkg-variables)
-error in the `os` package. You can check if this error is present by using
-the standard [`errors.Is`](https://golang.org/pkg/errors/#Is) function.
-
-```go
-// Assume err is a multierror value
-err := somefunc()
-if errors.Is(err, os.ErrNotExist) {
-	// err contains os.ErrNotExist
-}
-```
-
-**Returning a multierror only if there are errors**
-
-If you build a `multierror.Error`, you can use the `ErrorOrNil` function
-to return an `error` implementation only if there are errors to return:
-
-```go
-var result *multierror.Error
-
-// ... accumulate errors here
-
-// Return the `error` only if errors were added to the multierror, otherwise
-// return nil since there are no errors.
-return result.ErrorOrNil()
-```
diff --git a/vendor/github.com/hashicorp/go-multierror/append.go b/vendor/github.com/hashicorp/go-multierror/append.go
deleted file mode 100644
index 3e2589bfde0c882b491697b60d98ce699c7d1499..0000000000000000000000000000000000000000
--- a/vendor/github.com/hashicorp/go-multierror/append.go
+++ /dev/null
@@ -1,43 +0,0 @@
-package multierror
-
-// Append is a helper function that will append more errors
-// onto an Error in order to create a larger multi-error.
-//
-// If err is not a multierror.Error, then it will be turned into
-// one. If any of the errs are multierr.Error, they will be flattened
-// one level into err.
-// Any nil errors within errs will be ignored. If err is nil, a new
-// *Error will be returned.
-func Append(err error, errs ...error) *Error {
-	switch err := err.(type) {
-	case *Error:
-		// Typed nils can reach here, so initialize if we are nil
-		if err == nil {
-			err = new(Error)
-		}
-
-		// Go through each error and flatten
-		for _, e := range errs {
-			switch e := e.(type) {
-			case *Error:
-				if e != nil {
-					err.Errors = append(err.Errors, e.Errors...)
-				}
-			default:
-				if e != nil {
-					err.Errors = append(err.Errors, e)
-				}
-			}
-		}
-
-		return err
-	default:
-		newErrs := make([]error, 0, len(errs)+1)
-		if err != nil {
-			newErrs = append(newErrs, err)
-		}
-		newErrs = append(newErrs, errs...)
-
-		return Append(&Error{}, newErrs...)
-	}
-}
diff --git a/vendor/github.com/hashicorp/go-multierror/flatten.go b/vendor/github.com/hashicorp/go-multierror/flatten.go
deleted file mode 100644
index aab8e9abec9d86f6ac11394b311e00dfd1dec7ec..0000000000000000000000000000000000000000
--- a/vendor/github.com/hashicorp/go-multierror/flatten.go
+++ /dev/null
@@ -1,26 +0,0 @@
-package multierror
-
-// Flatten flattens the given error, merging any *Errors together into
-// a single *Error.
-func Flatten(err error) error {
-	// If it isn't an *Error, just return the error as-is
-	if _, ok := err.(*Error); !ok {
-		return err
-	}
-
-	// Otherwise, make the result and flatten away!
-	flatErr := new(Error)
-	flatten(err, flatErr)
-	return flatErr
-}
-
-func flatten(err error, flatErr *Error) {
-	switch err := err.(type) {
-	case *Error:
-		for _, e := range err.Errors {
-			flatten(e, flatErr)
-		}
-	default:
-		flatErr.Errors = append(flatErr.Errors, err)
-	}
-}
diff --git a/vendor/github.com/hashicorp/go-multierror/format.go b/vendor/github.com/hashicorp/go-multierror/format.go
deleted file mode 100644
index 47f13c49a673e2c7e3515b19765d1405b936ed8a..0000000000000000000000000000000000000000
--- a/vendor/github.com/hashicorp/go-multierror/format.go
+++ /dev/null
@@ -1,27 +0,0 @@
-package multierror
-
-import (
-	"fmt"
-	"strings"
-)
-
-// ErrorFormatFunc is a function callback that is called by Error to
-// turn the list of errors into a string.
-type ErrorFormatFunc func([]error) string
-
-// ListFormatFunc is a basic formatter that outputs the number of errors
-// that occurred along with a bullet point list of the errors.
-func ListFormatFunc(es []error) string {
-	if len(es) == 1 {
-		return fmt.Sprintf("1 error occurred:\n\t* %s\n\n", es[0])
-	}
-
-	points := make([]string, len(es))
-	for i, err := range es {
-		points[i] = fmt.Sprintf("* %s", err)
-	}
-
-	return fmt.Sprintf(
-		"%d errors occurred:\n\t%s\n\n",
-		len(es), strings.Join(points, "\n\t"))
-}
diff --git a/vendor/github.com/hashicorp/go-multierror/group.go b/vendor/github.com/hashicorp/go-multierror/group.go
deleted file mode 100644
index 9c29efb7f87e911e2787146940ffbf74a447dba9..0000000000000000000000000000000000000000
--- a/vendor/github.com/hashicorp/go-multierror/group.go
+++ /dev/null
@@ -1,38 +0,0 @@
-package multierror
-
-import "sync"
-
-// Group is a collection of goroutines which return errors that need to be
-// coalesced.
-type Group struct {
-	mutex sync.Mutex
-	err   *Error
-	wg    sync.WaitGroup
-}
-
-// Go calls the given function in a new goroutine.
-//
-// If the function returns an error it is added to the group multierror which
-// is returned by Wait.
-func (g *Group) Go(f func() error) {
-	g.wg.Add(1)
-
-	go func() {
-		defer g.wg.Done()
-
-		if err := f(); err != nil {
-			g.mutex.Lock()
-			g.err = Append(g.err, err)
-			g.mutex.Unlock()
-		}
-	}()
-}
-
-// Wait blocks until all function calls from the Go method have returned, then
-// returns the multierror.
-func (g *Group) Wait() *Error {
-	g.wg.Wait()
-	g.mutex.Lock()
-	defer g.mutex.Unlock()
-	return g.err
-}
diff --git a/vendor/github.com/hashicorp/go-multierror/multierror.go b/vendor/github.com/hashicorp/go-multierror/multierror.go
deleted file mode 100644
index f54574326461616e1e344fb46888c858f9ed1af3..0000000000000000000000000000000000000000
--- a/vendor/github.com/hashicorp/go-multierror/multierror.go
+++ /dev/null
@@ -1,121 +0,0 @@
-package multierror
-
-import (
-	"errors"
-	"fmt"
-)
-
-// Error is an error type to track multiple errors. This is used to
-// accumulate errors in cases and return them as a single "error".
-type Error struct {
-	Errors      []error
-	ErrorFormat ErrorFormatFunc
-}
-
-func (e *Error) Error() string {
-	fn := e.ErrorFormat
-	if fn == nil {
-		fn = ListFormatFunc
-	}
-
-	return fn(e.Errors)
-}
-
-// ErrorOrNil returns an error interface if this Error represents
-// a list of errors, or returns nil if the list of errors is empty. This
-// function is useful at the end of accumulation to make sure that the value
-// returned represents the existence of errors.
-func (e *Error) ErrorOrNil() error {
-	if e == nil {
-		return nil
-	}
-	if len(e.Errors) == 0 {
-		return nil
-	}
-
-	return e
-}
-
-func (e *Error) GoString() string {
-	return fmt.Sprintf("*%#v", *e)
-}
-
-// WrappedErrors returns the list of errors that this Error is wrapping. It is
-// an implementation of the errwrap.Wrapper interface so that multierror.Error
-// can be used with that library.
-//
-// This method is not safe to be called concurrently. Unlike accessing the
-// Errors field directly, this function also checks if the multierror is nil to
-// prevent a null-pointer panic. It satisfies the errwrap.Wrapper interface.
-func (e *Error) WrappedErrors() []error {
-	if e == nil {
-		return nil
-	}
-	return e.Errors
-}
-
-// Unwrap returns an error from Error (or nil if there are no errors).
-// This error returned will further support Unwrap to get the next error,
-// etc. The order will match the order of Errors in the multierror.Error
-// at the time of calling.
-//
-// The resulting error supports errors.As/Is/Unwrap so you can continue
-// to use the stdlib errors package to introspect further.
-//
-// This will perform a shallow copy of the errors slice. Any errors appended
-// to this error after calling Unwrap will not be available until a new
-// Unwrap is called on the multierror.Error.
-func (e *Error) Unwrap() error {
-	// If we have no errors then we do nothing
-	if e == nil || len(e.Errors) == 0 {
-		return nil
-	}
-
-	// If we have exactly one error, we can just return that directly.
-	if len(e.Errors) == 1 {
-		return e.Errors[0]
-	}
-
-	// Shallow copy the slice
-	errs := make([]error, len(e.Errors))
-	copy(errs, e.Errors)
-	return chain(errs)
-}
-
-// chain implements the interfaces necessary for errors.Is/As/Unwrap to
-// work in a deterministic way with multierror. A chain tracks a list of
-// errors while accounting for the current represented error. This lets
-// Is/As be meaningful.
-//
-// Unwrap returns the next error. In the cleanest form, Unwrap would return
-// the wrapped error here but we can't do that if we want to properly
-// get access to all the errors. Instead, users are recommended to use
-// Is/As to get the correct error type out.
-//
-// Precondition: []error is non-empty (len > 0)
-type chain []error
-
-// Error implements the error interface
-func (e chain) Error() string {
-	return e[0].Error()
-}
-
-// Unwrap implements errors.Unwrap by returning the next error in the
-// chain or nil if there are no more errors.
-func (e chain) Unwrap() error {
-	if len(e) == 1 {
-		return nil
-	}
-
-	return e[1:]
-}
-
-// As implements errors.As by attempting to map to the current value.
-func (e chain) As(target interface{}) bool {
-	return errors.As(e[0], target)
-}
-
-// Is implements errors.Is by comparing the current value directly.
-func (e chain) Is(target error) bool {
-	return errors.Is(e[0], target)
-}
diff --git a/vendor/github.com/hashicorp/go-multierror/prefix.go b/vendor/github.com/hashicorp/go-multierror/prefix.go
deleted file mode 100644
index 5c477abe44f80ed57b73796c77bce65b3dbfb15d..0000000000000000000000000000000000000000
--- a/vendor/github.com/hashicorp/go-multierror/prefix.go
+++ /dev/null
@@ -1,37 +0,0 @@
-package multierror
-
-import (
-	"fmt"
-
-	"github.com/hashicorp/errwrap"
-)
-
-// Prefix is a helper function that will prefix some text
-// to the given error. If the error is a multierror.Error, then
-// it will be prefixed to each wrapped error.
-//
-// This is useful to use when appending multiple multierrors
-// together in order to give better scoping.
-func Prefix(err error, prefix string) error {
-	if err == nil {
-		return nil
-	}
-
-	format := fmt.Sprintf("%s {{err}}", prefix)
-	switch err := err.(type) {
-	case *Error:
-		// Typed nils can reach here, so initialize if we are nil
-		if err == nil {
-			err = new(Error)
-		}
-
-		// Wrap each of the errors
-		for i, e := range err.Errors {
-			err.Errors[i] = errwrap.Wrapf(format, e)
-		}
-
-		return err
-	default:
-		return errwrap.Wrapf(format, err)
-	}
-}
diff --git a/vendor/github.com/hashicorp/go-multierror/sort.go b/vendor/github.com/hashicorp/go-multierror/sort.go
deleted file mode 100644
index fecb14e81c5428e4fb7fe5458c34f641a0e24eb3..0000000000000000000000000000000000000000
--- a/vendor/github.com/hashicorp/go-multierror/sort.go
+++ /dev/null
@@ -1,16 +0,0 @@
-package multierror
-
-// Len implements sort.Interface function for length
-func (err Error) Len() int {
-	return len(err.Errors)
-}
-
-// Swap implements sort.Interface function for swapping elements
-func (err Error) Swap(i, j int) {
-	err.Errors[i], err.Errors[j] = err.Errors[j], err.Errors[i]
-}
-
-// Less implements sort.Interface function for determining order
-func (err Error) Less(i, j int) bool {
-	return err.Errors[i].Error() < err.Errors[j].Error()
-}
diff --git a/vendor/github.com/jgautheron/goconst/README.md b/vendor/github.com/jgautheron/goconst/README.md
index 8dd093baf0ce7bc4e5754bc2101a9f0a6adf3bd9..c671eb5412302c21c3f0beaf63691cdb4cf985cd 100644
--- a/vendor/github.com/jgautheron/goconst/README.md
+++ b/vendor/github.com/jgautheron/goconst/README.md
@@ -23,6 +23,7 @@ Usage:
 Flags:
 
   -ignore            exclude files matching the given regular expression
+  -ignore-strings    exclude strings matching the given regular expression  
   -ignore-tests      exclude tests from the search (default: true)
   -min-occurrences   report from how many occurrences (default: 2)
   -min-length        only report strings with the minimum given length (default: 3)
diff --git a/vendor/github.com/jgautheron/goconst/api.go b/vendor/github.com/jgautheron/goconst/api.go
index d56fcd6c2544aba38491bdf49985b9dd5f814863..b838e035f698e84423224bcd2d4b4edf649976f1 100644
--- a/vendor/github.com/jgautheron/goconst/api.go
+++ b/vendor/github.com/jgautheron/goconst/api.go
@@ -14,6 +14,7 @@ type Issue struct {
 }
 
 type Config struct {
+	IgnoreStrings      string
 	IgnoreTests        bool
 	MatchWithConstants bool
 	MinStringLength    int
@@ -28,6 +29,7 @@ func Run(files []*ast.File, fset *token.FileSet, cfg *Config) ([]Issue, error) {
 	p := New(
 		"",
 		"",
+		cfg.IgnoreStrings,
 		cfg.IgnoreTests,
 		cfg.MatchWithConstants,
 		cfg.ParseNumbers,
diff --git a/vendor/github.com/jgautheron/goconst/parser.go b/vendor/github.com/jgautheron/goconst/parser.go
index 2ed7a9a909d9d2bee0eaf88cc931de9c239a414c..2f32740b969031f4ac5eec308240eec9156c690c 100644
--- a/vendor/github.com/jgautheron/goconst/parser.go
+++ b/vendor/github.com/jgautheron/goconst/parser.go
@@ -24,11 +24,11 @@ const (
 
 type Parser struct {
 	// Meant to be passed via New()
-	path, ignore               string
-	ignoreTests, matchConstant bool
-	minLength, minOccurrences  int
-	numberMin, numberMax       int
-	excludeTypes               map[Type]bool
+	path, ignore, ignoreStrings string
+	ignoreTests, matchConstant  bool
+	minLength, minOccurrences   int
+	numberMin, numberMax        int
+	excludeTypes                map[Type]bool
 
 	supportedTokens []token.Token
 
@@ -39,7 +39,7 @@ type Parser struct {
 
 // New creates a new instance of the parser.
 // This is your entry point if you'd like to use goconst as an API.
-func New(path, ignore string, ignoreTests, matchConstant, numbers bool, numberMin, numberMax, minLength, minOccurrences int, excludeTypes map[Type]bool) *Parser {
+func New(path, ignore, ignoreStrings string, ignoreTests, matchConstant, numbers bool, numberMin, numberMax, minLength, minOccurrences int, excludeTypes map[Type]bool) *Parser {
 	supportedTokens := []token.Token{token.STRING}
 	if numbers {
 		supportedTokens = append(supportedTokens, token.INT, token.FLOAT)
@@ -48,6 +48,7 @@ func New(path, ignore string, ignoreTests, matchConstant, numbers bool, numberMi
 	return &Parser{
 		path:            path,
 		ignore:          ignore,
+		ignoreStrings:   ignoreStrings,
 		ignoreTests:     ignoreTests,
 		matchConstant:   matchConstant,
 		minLength:       minLength,
@@ -98,12 +99,22 @@ func (p *Parser) ProcessResults() {
 			delete(p.strs, str)
 		}
 
+		if p.ignoreStrings != "" {
+			match, err := regexp.MatchString(p.ignoreStrings, str)
+			if err != nil {
+				log.Println(err)
+			}
+			if match {
+				delete(p.strs, str)
+			}
+		}
+
 		// If the value is a number
-		if i, err := strconv.Atoi(str); err == nil {
-			if p.numberMin != 0 && i < p.numberMin {
+		if i, err := strconv.ParseInt(str, 0, 0); err == nil {
+			if p.numberMin != 0 && i < int64(p.numberMin) {
 				delete(p.strs, str)
 			}
-			if p.numberMax != 0 && i > p.numberMax {
+			if p.numberMax != 0 && i > int64(p.numberMax) {
 				delete(p.strs, str)
 			}
 		}
diff --git a/vendor/github.com/sivchari/nosnakecase/.gitignore b/vendor/github.com/jjti/go-spancheck/.gitignore
similarity index 59%
rename from vendor/github.com/sivchari/nosnakecase/.gitignore
rename to vendor/github.com/jjti/go-spancheck/.gitignore
index 66fd13c903cac02eb9657cd53fb227823484401d..1f83be414ca27be1f01c2b3778336fb2cabbdfce 100644
--- a/vendor/github.com/sivchari/nosnakecase/.gitignore
+++ b/vendor/github.com/jjti/go-spancheck/.gitignore
@@ -1,3 +1,6 @@
+# If you prefer the allow list template instead of the deny list, see community template:
+# https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore
+#
 # Binaries for programs and plugins
 *.exe
 *.exe~
@@ -13,3 +16,4 @@
 
 # Dependency directories (remove the comment below to include it)
 # vendor/
+src/
diff --git a/vendor/github.com/jjti/go-spancheck/.golangci.yml b/vendor/github.com/jjti/go-spancheck/.golangci.yml
new file mode 100644
index 0000000000000000000000000000000000000000..15d8513d6804c5d6dcab775eb289570c884c55fc
--- /dev/null
+++ b/vendor/github.com/jjti/go-spancheck/.golangci.yml
@@ -0,0 +1,103 @@
+## A good ref for this: https://gist.github.com/maratori/47a4d00457a92aa426dbd48a18776322
+
+run:
+  timeout: 5m
+  tests: true
+linters:
+  enable:
+    - asasalint # checks for pass []any as any in variadic func(...any)
+    - asciicheck # checks that your code does not contain non-ASCII identifiers
+    - bidichk # checks for dangerous unicode character sequences
+    - bodyclose
+    - containedctx
+    - decorder # checks declaration order and count of types, constants, variables and functions
+    - dogsled
+    - dupword # checks for duplicate words in the source code
+    - durationcheck # checks for two durations multiplied together
+    - errcheck
+    - errname
+    - errorlint
+    - exhaustive # checks exhaustiveness of enum switch statements
+    - exportloopref # checks for pointers to enclosing loop variables
+    - gci
+    - gochecknoinits # checks that no init functions are present in Go code
+    - gocritic
+    - gomnd
+    - gosimple
+    - govet
+    - importas # enforces consistent import aliases
+    - ineffassign
+    - loggercheck
+    - makezero # finds slice declarations with non-zero initial length
+    - mirror
+    - misspell
+    - musttag # enforces field tags in (un)marshaled structs
+    - nakedret
+    - nestif # reports deeply nested if statements
+    - nilerr # finds the code that returns nil even if it checks that the error is not nil
+    - noctx # finds sending http request without context.Context
+    - nolintlint # reports ill-formed or insufficient nolint directives
+    - predeclared # finds code that shadows one of Go's predeclared identifiers
+    - promlinter
+    - reassign # checks that package variables are not reassigned
+    - revive # fast, configurable, extensible, flexible, and beautiful linter for Go, drop-in replacement of golint
+    - staticcheck
+    - stylecheck
+    - tenv
+    - thelper # detects golang test helpers without t.Helper() call and checks the consistency of test helpers
+    - unconvert # removes unnecessary type conversions
+    - unparam # reports unused function parameters
+    - unused
+    - usestdlibvars # detects the possibility to use variables/constants from the Go standard library
+    - wastedassign # finds wasted assignment statements
+    - whitespace # detects leading and trailing whitespace
+linters-settings:
+  gci:
+    skip-generated: true
+    custom-order: true
+    sections:
+      - standard # Standard section: captures all standard packages.
+      - default # Default section: contains all imports that could not be matched to another section type.
+      - prefix(github.com/jjti)
+  exhaustive:
+    # Program elements to check for exhaustiveness.
+    # Default: [ switch ]
+    check:
+      - switch
+      - map
+  gocritic:
+    settings:
+      captLocal:
+        # Whether to restrict checker to params only.
+        # Default: true
+        paramsOnly: false
+      underef:
+        # Whether to skip (*x).method() calls where x is a pointer receiver.
+        # Default: true
+        skipRecvDeref: false
+  govet:
+    enable-all: true
+    disable:
+      - fieldalignment # too strict
+      - shadow # bunch of false positive, doesn't realize when we return from a func
+  misspell:
+    locale: US
+  nakedret:
+    max-func-lines: 0
+  nestif:
+    # Minimal complexity of if statements to report.
+    # Default: 5
+    min-complexity: 4
+  nolintlint:
+    # Enable to require an explanation of nonzero length after each nolint directive.
+    # Default: false
+    require-explanation: true
+  stylecheck:
+    checks: ["all"]
+issues:
+  include:
+    - EXC0001 # Error return value of x is not checked
+    - EXC0013 # package comment should be of the form "(.+)...
+    - EXC0014 # comment on exported (.+) should be of the form "(.+)..."
+  exclude:
+    - ifElseChain
diff --git a/vendor/github.com/jjti/go-spancheck/CONTRIBUTING.md b/vendor/github.com/jjti/go-spancheck/CONTRIBUTING.md
new file mode 100644
index 0000000000000000000000000000000000000000..32932fae102b3eca5d16e7d765afd46f3e104b95
--- /dev/null
+++ b/vendor/github.com/jjti/go-spancheck/CONTRIBUTING.md
@@ -0,0 +1,51 @@
+# Contributing guideline
+
+Contributions are welcome + appreciated.
+
+## Open Requests
+
+These are a couple contributions I would especially appreciate:
+
+1. Add check for SetAttributes: https://github.com/jjti/go-spancheck/issues/1
+1. Add SuggestedFix(es): https://github.com/jjti/go-spancheck/issues/2
+
+## Steps
+
+### 1. Create an Issue
+
+If one does not exist already, open a bug report or feature request in [https://github.com/jjti/go-spancheck/issues](https://github.com/jjti/go-spancheck/issues).
+
+### 2. Add a test case
+
+Test cases are in `/testdata`.
+
+If fixing a bug, you can add it to `testdata/enableall/enable_all.go` (for example):
+
+```go
+func _() {
+	ctx, span := otel.Tracer("foo").Start(context.Background(), "bar") // want "span.End is not called on all paths, possible memory leak"
+	print(ctx.Done(), span.IsRecording())
+} // want "return can be reached without calling span.End"
+```
+
+If adding a new feature with a new combination of flags, create a new module within `testdata`:
+
+1. Create a new module, eg `testdata/setattributes`
+1. Copy/paste go.mod/sum into the new module directory and update the module definition, eg `module github.com/jjti/go-spancheck/testdata/setattributes`
+1. Add the module to the workspace in [go.work](./go.work)
+1. Add the module's directory to the `testvendor` Make target in [Makefile](./Makefile)
+
+### 3. Run tests
+
+```bash
+make test
+```
+
+### 4. Open a PR
+
+Eg of a GitHub snippet for PRs:
+
+```bash
+alias gpr='gh pr view --web 2>/dev/null || gh pr create --web --fill'
+gpr
+```
diff --git a/vendor/github.com/esimonov/ifshort/LICENSE b/vendor/github.com/jjti/go-spancheck/LICENSE
similarity index 96%
rename from vendor/github.com/esimonov/ifshort/LICENSE
rename to vendor/github.com/jjti/go-spancheck/LICENSE
index a04e339c019c9fbea4f73695a8dd096a02564c98..552ddf2dc54cb0832774a7b6c9ffbe45f65e2411 100644
--- a/vendor/github.com/esimonov/ifshort/LICENSE
+++ b/vendor/github.com/jjti/go-spancheck/LICENSE
@@ -1,6 +1,6 @@
 MIT License
 
-Copyright (c) 2020 Eugene Simonov
+Copyright (c) 2023 Joshua Timmons
 
 Permission is hereby granted, free of charge, to any person obtaining a copy
 of this software and associated documentation files (the "Software"), to deal
diff --git a/vendor/github.com/jjti/go-spancheck/Makefile b/vendor/github.com/jjti/go-spancheck/Makefile
new file mode 100644
index 0000000000000000000000000000000000000000..39d80f7c61d2a0daeaf4f82ddab495f84df24197
--- /dev/null
+++ b/vendor/github.com/jjti/go-spancheck/Makefile
@@ -0,0 +1,27 @@
+.PHONY: fmt
+fmt:
+	golangci-lint run --fix --config ./.golangci.yml
+
+.PHONY: test
+test: testvendor
+	go test -v ./...
+
+# note: I'm copying https://github.com/ghostiam/protogetter/blob/main/testdata/Makefile
+#
+# x/tools/go/analysis/analysistest does not support go modules. To work around this issue
+# we need to vendor any external modules to `./src`.
+#
+# Follow https://github.com/golang/go/issues/37054 for more details.
+.PHONY: testvendor
+testvendor:
+	@rm -rf base/src
+	@cd testdata/base && go mod vendor
+	@cp -r testdata/base/vendor testdata/base/src
+	@cp -r testdata/base/vendor testdata/disableerrorchecks/src
+	@cp -r testdata/base/vendor testdata/enableall/src
+	@rm -rf testdata/base/vendor
+
+.PHONY: install
+install:
+	go install ./cmd/spancheck
+	@echo "Installed in $(shell which spancheck)"
\ No newline at end of file
diff --git a/vendor/github.com/jjti/go-spancheck/README.md b/vendor/github.com/jjti/go-spancheck/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..393663ba722ade49757baa27f7ab12a29e271779
--- /dev/null
+++ b/vendor/github.com/jjti/go-spancheck/README.md
@@ -0,0 +1,268 @@
+# go-spancheck
+
+![Latest release](https://img.shields.io/github/v/release/jjti/go-spancheck)
+[![ci](https://github.com/jjti/go-spancheck/actions/workflows/ci.yaml/badge.svg)](https://github.com/jjti/go-spancheck/actions/workflows/ci.yaml)
+[![Go Report Card](https://goreportcard.com/badge/github.com/jjti/go-spancheck)](https://goreportcard.com/report/github.com/jjti/go-spancheck)
+[![MIT License](http://img.shields.io/badge/license-MIT-blue.svg?style=flat)](LICENSE)
+
+Checks usage of:
+
+- [OpenTelemetry spans](https://opentelemetry.io/docs/instrumentation/go/manual/) from [go.opentelemetry.io/otel/trace](go.opentelemetry.io/otel/trace)
+- [OpenCensus spans](https://opencensus.io/quickstart/go/tracing/) from [go.opencensus.io/trace](https://pkg.go.dev/go.opencensus.io/trace#Span)
+
+## Example
+
+```bash
+spancheck -checks 'end,set-status,record-error' ./...
+```
+
+```go
+func _() error {
+    // span.End is not called on all paths, possible memory leak
+    // span.SetStatus is not called on all paths
+    // span.RecordError is not called on all paths
+    _, span := otel.Tracer("foo").Start(context.Background(), "bar")
+
+    if true {
+        // return can be reached without calling span.End
+        // return can be reached without calling span.SetStatus
+        // return can be reached without calling span.RecordError
+        return errors.New("err")
+    }
+
+    return nil // return can be reached without calling span.End
+}
+```
+
+## Configuration
+
+### golangci-lint
+
+Docs on configuring the linter are also available at [https://golangci-lint.run/usage/linters/#spancheck](https://golangci-lint.run/usage/linters/#spancheck):
+
+```yaml
+linters:
+  enable:
+    - spancheck
+
+linters-settings:
+  spancheck:
+    # Checks to enable.
+    # Options include:
+    # - `end`: check that `span.End()` is called
+    # - `record-error`: check that `span.RecordError(err)` is called when an error is returned
+    # - `set-status`: check that `span.SetStatus(codes.Error, msg)` is called when an error is returned
+    # Default: ["end"]
+    checks:
+      - end
+      - record-error
+      - set-status
+    # A list of regexes for function signatures that silence `record-error` and `set-status` reports
+    # if found in the call path to a returned error.
+    # https://github.com/jjti/go-spancheck#ignore-check-signatures
+    # Default: []
+    ignore-check-signatures:
+      - "telemetry.RecordError"
+    # A list of regexes for additional function signatures that create spans. This is useful if you have a utility
+    # method to create spans. Each entry should be of the form <regex>:<telemetry-type>, where `telemetry-type`
+    # can be `opentelemetry` or `opencensus`.
+    # https://github.com/jjti/go-spancheck#extra-start-span-signatures
+    # Default: []
+    extra-start-span-signatures:
+      - "github.com/user/repo/telemetry/trace.Start:opentelemetry"
+```
+
+### CLI
+
+To install the linter as a CLI:
+
+```bash
+go install github.com/jjti/go-spancheck/cmd/spancheck@latest
+spancheck ./...
+```
+
+Only the `span.End()` check is enabled by default. The others can be enabled with `-checks 'end,set-status,record-error'`.
+
+```txt
+$ spancheck -h
+...
+Flags:
+  -checks string
+        comma-separated list of checks to enable (options: end, set-status, record-error) (default "end")
+  -extra-start-span-signatures string
+        comma-separated list of regex:telemetry-type for function signatures that indicate the start of a span
+  -ignore-check-signatures string
+        comma-separated list of regex for function signatures that disable checks on errors
+```
+
+### Ignore Check Signatures
+
+The `span.SetStatus()` and `span.RecordError()` checks warn when there is:
+
+1. a path to return statement
+1. that returns an error
+1. without a call (to `SetStatus` or `RecordError`, respectively)
+
+But it's convenient to call `SetStatus` and `RecordError` from utility methods [[1](https://andydote.co.uk/2023/09/19/tracing-is-better/#step-2-wrap-the-errors)]. To support that, the `ignore-*-check-signatures` settings will suppress warnings if the configured function is present in the path.
+
+For example, by default, the code below would have warnings as shown:
+
+```go
+func task(ctx context.Context) error {
+    ctx, span := otel.Tracer("foo").Start(ctx, "bar") // span.SetStatus is not called on all paths
+    defer span.End()
+
+    if err := subTask(ctx); err != nil {
+        return recordErr(span, err) // return can be reached without calling span.SetStatus
+    }
+
+    return nil
+}
+
+func recordErr(span trace.Span, err error) error {
+    span.SetStatus(codes.Error, err.Error())
+    span.RecordError(err)
+    return err
+}
+```
+
+The warnings are can be ignored by setting `-ignore-check-signatures` flag to `recordErr`:
+
+```bash
+spancheck -checks 'end,set-status,record-error' -ignore-check-signatures 'recordErr' ./...
+```
+
+### Extra Start Span Signatures
+
+By default, Span creation will be tracked from calls to [(go.opentelemetry.io/otel/trace.Tracer).Start](https://github.com/open-telemetry/opentelemetry-go/blob/98b32a6c3a87fbee5d34c063b9096f416b250897/trace/trace.go#L523), [go.opencensus.io/trace.StartSpan](https://pkg.go.dev/go.opencensus.io/trace#StartSpan), or [go.opencensus.io/trace.StartSpanWithRemoteParent](https://github.com/census-instrumentation/opencensus-go/blob/v0.24.0/trace/trace_api.go#L66).
+
+You can use the `-extra-start-span-signatures` flag to list additional Span creation functions. For all such functions:
+
+1. their Spans will be linted (for all enable checks)
+1. checks will be disabled (i.e. there is no linting of Spans within the creation functions)
+
+You must pass a comma-separated list of regex patterns and the telemetry library corresponding to the returned Span. Each entry should be of the form `<regex>:<telemetry-type>`, where `telemetry-type` can be `opentelemetry` or `opencensus`. For example, if you have created a function named `StartTrace` in a `telemetry` package, using the `go.opentelemetry.io/otel` library, you can include this function for analysis like so:
+
+```bash
+spancheck -extra-start-span-signatures 'github.com/user/repo/telemetry/StartTrace:opentelemetry' ./...
+```
+
+## Problem Statement
+
+Tracing is a celebrated [[1](https://andydote.co.uk/2023/09/19/tracing-is-better/),[2](https://charity.wtf/2022/08/15/live-your-best-life-with-structured-events/)] and well marketed [[3](https://docs.datadoghq.com/tracing/),[4](https://www.honeycomb.io/distributed-tracing)] pillar of observability. But self-instrumented tracing requires a lot of easy-to-forget boilerplate:
+
+```go
+import (
+	"go.opentelemetry.io/otel"
+	"go.opentelemetry.io/otel/codes"
+)
+
+func task(ctx context.Context) error {
+    ctx, span := otel.Tracer("foo").Start(ctx, "bar")
+    defer span.End() // call `.End()`
+
+    if err := subTask(ctx); err != nil {
+        span.SetStatus(codes.Error, err.Error()) // call SetStatus(codes.Error, msg) to set status:error
+        span.RecordError(err) // call RecordError(err) to record an error event
+        return err
+    }
+
+    return nil
+}
+```
+
+For spans to be _really_ useful, developers need to:
+
+1. call `span.End()` always
+1. call `span.SetStatus(codes.Error, msg)` on error
+1. call `span.RecordError(err)` on error
+1. call `span.SetAttributes()` liberally
+
+- OpenTelemetry: [Creating spans](https://opentelemetry.io/docs/instrumentation/go/manual/#creating-spans)
+- Uptrace: [OpenTelemetry Go Tracing API](https://uptrace.dev/opentelemetry/go-tracing.html#quickstart)
+
+This linter helps developers with steps 1-3.
+
+## Checks
+
+This linter supports three checks, each documented below. Only the check for `span.End()` is enabled by default. See [Configuration](#configuration) for instructions on enabling the others.
+
+### `span.End()`
+
+Enabled by default.
+
+Not calling `End` can cause memory leaks and prevents spans from being closed.
+
+> Any Span that is created MUST also be ended. This is the responsibility of the user. Implementations of this API may leak memory or other resources if Spans are not ended.
+
+[source: trace.go](https://github.com/open-telemetry/opentelemetry-go/blob/98b32a6c3a87fbee5d34c063b9096f416b250897/trace/trace.go#L523)
+
+```go
+func task(ctx context.Context) error {
+    otel.Tracer("app").Start(ctx, "foo") // span is unassigned, probable memory leak
+    _, span := otel.Tracer().Start(ctx, "foo") // span.End is not called on all paths, possible memory leak
+    return nil // return can be reached without calling span.End
+}
+```
+
+### `span.SetStatus(codes.Error, "msg")`
+
+Disabled by default. Enable with `-checks 'set-status'`.
+
+Developers should call `SetStatus` on spans. The status attribute is an important, first-class attribute:
+
+1. observability platforms and APMs differentiate "success" vs "failure" using [span's status codes](https://docs.datadoghq.com/tracing/metrics/).
+1. telemetry collector agents, like the [Open Telemetry Collector's Tail Sampling Processor](https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/tailsamplingprocessor/README.md#:~:text=Sampling%20Processor.-,status_code,-%3A%20Sample%20based%20upon), are configurable to sample `Error` spans at a higher rate than `OK` spans.
+1. observability platforms, like [DataDog, have trace retention filters that use spans' status](https://docs.datadoghq.com/tracing/trace_pipeline/trace_retention/). In other words, `status:error` spans often receive special treatment with the assumption they are more useful for debugging. And forgetting to set the status can lead to spans, with useful debugging information, being dropped.
+
+```go
+func _() error {
+    _, span := otel.Tracer("foo").Start(context.Background(), "bar") // span.SetStatus is not called on all paths
+    defer span.End()
+
+    if err := subTask(); err != nil {
+        span.RecordError(err)
+        return errors.New(err) // return can be reached without calling span.SetStatus
+    }
+
+    return nil
+}
+```
+
+OpenTelemetry docs: [Set span status](https://opentelemetry.io/docs/instrumentation/go/manual/#set-span-status).
+
+### `span.RecordError(err)`
+
+Disabled by default. Enable with `-checks 'record-error'`.
+
+Calling `RecordError` creates a new exception-type [event (structured log message)](https://opentelemetry.io/docs/concepts/signals/traces/#span-events) on the span. This is recommended to capture the error's stack trace.
+
+```go
+func _() error {
+    _, span := otel.Tracer("foo").Start(context.Background(), "bar") // span.RecordError is not called on all paths
+    defer span.End()
+
+    if err := subTask(); err != nil {
+        span.SetStatus(codes.Error, err.Error())
+        return errors.New(err) // return can be reached without calling span.RecordError
+    }
+
+    return nil
+}
+```
+
+OpenTelemetry docs: [Record errors](https://opentelemetry.io/docs/instrumentation/go/manual/#record-errors).
+
+Note: this check is not applied to [OpenCensus spans](https://pkg.go.dev/go.opencensus.io/trace#SpanInterface) because they have no `RecordError` method.
+
+## Attribution
+
+This linter is the product of liberal copying of:
+
+- [github.com/golang/tools/go/analysis/passes/lostcancel](https://github.com/golang/tools/tree/master/go/analysis/passes/lostcancel) (half the linter)
+- [github.com/tomarrell/wrapcheck](https://github.com/tomarrell/wrapcheck) (error type checking and config)
+- [github.com/Antonboom/testifylint](https://github.com/Antonboom/testifylint) (README)
+- [github.com/ghostiam/protogetter](https://github.com/ghostiam/protogetter/blob/main/testdata/Makefile) (test setup)
+
+And the contributions of:
+- [@trixnz](https://github.com/trixnz) who [added support for custom span start functions](https://github.com/jjti/go-spancheck/pull/16)
diff --git a/vendor/github.com/jjti/go-spancheck/config.go b/vendor/github.com/jjti/go-spancheck/config.go
new file mode 100644
index 0000000000000000000000000000000000000000..ed02a1ad910c9b26beffbe2a9bcdfa74e751d214
--- /dev/null
+++ b/vendor/github.com/jjti/go-spancheck/config.go
@@ -0,0 +1,223 @@
+package spancheck
+
+import (
+	"flag"
+	"fmt"
+	"log"
+	"regexp"
+	"strings"
+)
+
+// Check is a type of check that can be enabled or disabled.
+type Check int
+
+const (
+	// EndCheck if enabled, checks that span.End() is called after span creation and before the function returns.
+	EndCheck Check = iota
+
+	// SetStatusCheck if enabled, checks that `span.SetStatus(codes.Error, msg)` is called when returning an error.
+	SetStatusCheck
+
+	// RecordErrorCheck if enabled, checks that span.RecordError(err) is called when returning an error.
+	RecordErrorCheck
+)
+
+var (
+	startSpanSignatureCols     = 2
+	defaultStartSpanSignatures = []string{
+		// https://github.com/open-telemetry/opentelemetry-go/blob/98b32a6c3a87fbee5d34c063b9096f416b250897/trace/trace.go#L523
+		`\(go.opentelemetry.io/otel/trace.Tracer\).Start:opentelemetry`,
+		// https://pkg.go.dev/go.opencensus.io/trace#StartSpan
+		`go.opencensus.io/trace.StartSpan:opencensus`,
+		// https://github.com/census-instrumentation/opencensus-go/blob/v0.24.0/trace/trace_api.go#L66
+		`go.opencensus.io/trace.StartSpanWithRemoteParent:opencensus`,
+	}
+)
+
+func (c Check) String() string {
+	switch c {
+	case EndCheck:
+		return "end"
+	case SetStatusCheck:
+		return "set-status"
+	case RecordErrorCheck:
+		return "record-error"
+	default:
+		return ""
+	}
+}
+
+// Checks is a list of all checks by name.
+var Checks = map[string]Check{
+	EndCheck.String():         EndCheck,
+	SetStatusCheck.String():   SetStatusCheck,
+	RecordErrorCheck.String(): RecordErrorCheck,
+}
+
+type spanStartMatcher struct {
+	signature *regexp.Regexp
+	spanType  spanType
+}
+
+// Config is a configuration for the spancheck analyzer.
+type Config struct {
+	fs flag.FlagSet
+
+	// EnabledChecks is a list of checks to enable by name.
+	EnabledChecks []string
+
+	// IgnoreChecksSignaturesSlice is a slice of strings that are turned into
+	// the IgnoreSetStatusCheckSignatures regex.
+	IgnoreChecksSignaturesSlice []string
+
+	StartSpanMatchersSlice []string
+
+	endCheckEnabled    bool
+	setStatusEnabled   bool
+	recordErrorEnabled bool
+
+	// ignoreChecksSignatures is a regex that, if matched, disables the
+	// SetStatus and RecordError checks on error.
+	ignoreChecksSignatures *regexp.Regexp
+
+	startSpanMatchers            []spanStartMatcher
+	startSpanMatchersCustomRegex *regexp.Regexp
+}
+
+// NewDefaultConfig returns a new Config with default values.
+func NewDefaultConfig() *Config {
+	return &Config{
+		EnabledChecks:          []string{EndCheck.String()},
+		StartSpanMatchersSlice: defaultStartSpanSignatures,
+	}
+}
+
+// finalize parses checks and signatures from the public string slices of Config.
+func (c *Config) finalize() {
+	c.parseSignatures()
+
+	checks := parseChecks(c.EnabledChecks)
+	c.endCheckEnabled = contains(checks, EndCheck)
+	c.setStatusEnabled = contains(checks, SetStatusCheck)
+	c.recordErrorEnabled = contains(checks, RecordErrorCheck)
+}
+
+// parseSignatures sets the Ignore*CheckSignatures regex from the string slices.
+func (c *Config) parseSignatures() {
+	c.parseIgnoreSignatures()
+	c.parseStartSpanSignatures()
+}
+
+func (c *Config) parseIgnoreSignatures() {
+	if c.ignoreChecksSignatures == nil && len(c.IgnoreChecksSignaturesSlice) > 0 {
+		if len(c.IgnoreChecksSignaturesSlice) == 1 && c.IgnoreChecksSignaturesSlice[0] == "" {
+			return
+		}
+
+		c.ignoreChecksSignatures = createRegex(c.IgnoreChecksSignaturesSlice)
+	}
+}
+
+func (c *Config) parseStartSpanSignatures() {
+	if c.startSpanMatchers != nil {
+		return
+	}
+
+	customMatchers := []string{}
+	for i, sig := range c.StartSpanMatchersSlice {
+		parts := strings.Split(sig, ":")
+
+		// Make sure we have both a signature and a telemetry type
+		if len(parts) != startSpanSignatureCols {
+			log.Default().Printf("[WARN] invalid start span signature \"%s\". expected regex:telemetry-type\n", sig)
+
+			continue
+		}
+
+		sig, sigType := parts[0], parts[1]
+		if len(sig) < 1 {
+			log.Default().Print("[WARN] invalid start span signature, empty pattern")
+
+			continue
+		}
+
+		spanType, ok := SpanTypes[sigType]
+		if !ok {
+			validSpanTypes := make([]string, 0, len(SpanTypes))
+			for k := range SpanTypes {
+				validSpanTypes = append(validSpanTypes, k)
+			}
+
+			log.Default().
+				Printf("[WARN] invalid start span type \"%s\". expected one of %s\n", sigType, strings.Join(validSpanTypes, ", "))
+
+			continue
+		}
+
+		regex, err := regexp.Compile(sig)
+		if err != nil {
+			log.Default().Printf("[WARN] failed to compile regex from signature %s: %v\n", sig, err)
+
+			continue
+		}
+
+		c.startSpanMatchers = append(c.startSpanMatchers, spanStartMatcher{
+			signature: regex,
+			spanType:  spanType,
+		})
+
+		if i >= len(defaultStartSpanSignatures) {
+			customMatchers = append(customMatchers, sig)
+		}
+	}
+
+	c.startSpanMatchersCustomRegex = createRegex(customMatchers)
+}
+
+func parseChecks(checksSlice []string) []Check {
+	if len(checksSlice) == 0 {
+		return nil
+	}
+
+	checks := []Check{}
+	for _, check := range checksSlice {
+		checkName := strings.TrimSpace(check)
+		if checkName == "" {
+			continue
+		}
+
+		check, ok := Checks[checkName]
+		if !ok {
+			continue
+		}
+
+		checks = append(checks, check)
+	}
+
+	return checks
+}
+
+func createRegex(sigs []string) *regexp.Regexp {
+	if len(sigs) == 0 {
+		return nil
+	}
+
+	regex := fmt.Sprintf("(%s)", strings.Join(sigs, "|"))
+	regexCompiled, err := regexp.Compile(regex)
+	if err != nil {
+		log.Default().Print("[WARN] failed to compile regex from signature flag", "regex", regex, "err", err)
+		return nil
+	}
+
+	return regexCompiled
+}
+
+func contains(s []Check, e Check) bool {
+	for _, a := range s {
+		if a == e {
+			return true
+		}
+	}
+
+	return false
+}
diff --git a/vendor/github.com/jjti/go-spancheck/doc.go b/vendor/github.com/jjti/go-spancheck/doc.go
new file mode 100644
index 0000000000000000000000000000000000000000..f9dec043f6aa89d487342501cd3a81ab93292aa8
--- /dev/null
+++ b/vendor/github.com/jjti/go-spancheck/doc.go
@@ -0,0 +1,37 @@
+// Package spancheck defines a linter that checks for mistakes with OTEL trace spans.
+//
+// # Analyzer spancheck
+//
+// spancheck: check for mistakes with OpenTelemetry trace spans.
+//
+// Common mistakes with OTEL trace spans include forgetting to call End:
+//
+//	func(ctx context.Context) {
+//		ctx, span := otel.Tracer("app").Start(ctx, "span")
+//		// defer span.End() should be here
+//
+//		// do stuff
+//	}
+//
+// Forgetting to set an Error status:
+//
+//	ctx, span := otel.Tracer("app").Start(ctx, "span")
+//	defer span.End()
+//
+//	if err := task(); err != nil {
+//		// span.SetStatus(codes.Error, err.Error()) should be here
+//		span.RecordError(err)
+//		return fmt.Errorf("failed to run task: %w", err)
+//	}
+//
+// Forgetting to record the Error:
+//
+//	ctx, span := otel.Tracer("app").Start(ctx, "span")
+//	defer span.End()
+//
+//	if err := task(); err != nil {
+//		span.SetStatus(codes.Error, err.Error())
+//		// span.RecordError(err) should be here
+//		return fmt.Errorf("failed to run task: %w", err)
+//	}
+package spancheck
diff --git a/vendor/github.com/jjti/go-spancheck/go.work b/vendor/github.com/jjti/go-spancheck/go.work
new file mode 100644
index 0000000000000000000000000000000000000000..7d0a87b9e17ca1913bdd96c2eb5d271a8b9ed7f3
--- /dev/null
+++ b/vendor/github.com/jjti/go-spancheck/go.work
@@ -0,0 +1,8 @@
+go 1.20
+
+use (
+	.
+	./testdata/base
+	./testdata/disableerrorchecks
+	./testdata/enableall
+)
diff --git a/vendor/github.com/jjti/go-spancheck/go.work.sum b/vendor/github.com/jjti/go-spancheck/go.work.sum
new file mode 100644
index 0000000000000000000000000000000000000000..85e99bad5af60662e9bdbbb4de6f24e8ea7a771a
--- /dev/null
+++ b/vendor/github.com/jjti/go-spancheck/go.work.sum
@@ -0,0 +1,4 @@
+github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
+golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U=
+golang.org/x/sync v0.5.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
+golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
diff --git a/vendor/github.com/jjti/go-spancheck/spancheck.go b/vendor/github.com/jjti/go-spancheck/spancheck.go
new file mode 100644
index 0000000000000000000000000000000000000000..d5d35a5b11e0837964d980eb455a8341a417d8e3
--- /dev/null
+++ b/vendor/github.com/jjti/go-spancheck/spancheck.go
@@ -0,0 +1,466 @@
+package spancheck
+
+import (
+	"go/ast"
+	"go/types"
+	"regexp"
+
+	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/go/analysis/passes/ctrlflow"
+	"golang.org/x/tools/go/analysis/passes/inspect"
+	"golang.org/x/tools/go/ast/inspector"
+	"golang.org/x/tools/go/cfg"
+)
+
+const stackLen = 32
+
+// spanType differentiates span types.
+type spanType int
+
+const (
+	spanUnset         spanType = iota // not a span
+	spanOpenTelemetry                 // from go.opentelemetry.io/otel
+	spanOpenCensus                    // from go.opencensus.io/trace
+)
+
+// SpanTypes is a list of all span types by name.
+var SpanTypes = map[string]spanType{
+	"opentelemetry": spanOpenTelemetry,
+	"opencensus":    spanOpenCensus,
+}
+
+// this approach stolen from errcheck
+// https://github.com/kisielk/errcheck/blob/7f94c385d0116ccc421fbb4709e4a484d98325ee/errcheck/errcheck.go#L22
+var errorType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface)
+
+// NewAnalyzerWithConfig returns a new analyzer configured with the Config passed in.
+// Its config can be set for testing.
+func NewAnalyzerWithConfig(config *Config) *analysis.Analyzer {
+	return newAnalyzer(config)
+}
+
+func newAnalyzer(config *Config) *analysis.Analyzer {
+	config.finalize()
+
+	return &analysis.Analyzer{
+		Name:  "spancheck",
+		Doc:   "Checks for mistakes with OpenTelemetry/Census spans.",
+		Flags: config.fs,
+		Run:   run(config),
+		Requires: []*analysis.Analyzer{
+			ctrlflow.Analyzer,
+			inspect.Analyzer,
+		},
+	}
+}
+
+func run(config *Config) func(*analysis.Pass) (interface{}, error) {
+	return func(pass *analysis.Pass) (interface{}, error) {
+		inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+
+		nodeFilter := []ast.Node{
+			(*ast.FuncLit)(nil),  // f := func() {}
+			(*ast.FuncDecl)(nil), // func foo() {}
+		}
+		inspect.Preorder(nodeFilter, func(n ast.Node) {
+			runFunc(pass, n, config)
+		})
+
+		return nil, nil
+	}
+}
+
+type spanVar struct {
+	stmt     ast.Node
+	id       *ast.Ident
+	vr       *types.Var
+	spanType spanType
+}
+
+// runFunc checks if the node is a function, has a span, and the span never has SetStatus set.
+func runFunc(pass *analysis.Pass, node ast.Node, config *Config) {
+	// copying https://cs.opensource.google/go/x/tools/+/master:go/analysis/passes/lostcancel/lostcancel.go
+
+	// Find scope of function node
+	var funcScope *types.Scope
+	switch v := node.(type) {
+	case *ast.FuncLit:
+		funcScope = pass.TypesInfo.Scopes[v.Type]
+	case *ast.FuncDecl:
+		funcScope = pass.TypesInfo.Scopes[v.Type]
+		fnSig := pass.TypesInfo.ObjectOf(v.Name).String()
+
+		// Skip checking spans in this function if it's a custom starter/creator.
+		if config.startSpanMatchersCustomRegex != nil && config.startSpanMatchersCustomRegex.MatchString(fnSig) {
+			return
+		}
+	}
+
+	// Maps each span variable to its defining ValueSpec/AssignStmt.
+	spanVars := make(map[*ast.Ident]spanVar)
+
+	// Find the set of span vars to analyze.
+	stack := make([]ast.Node, 0, stackLen)
+	ast.Inspect(node, func(n ast.Node) bool {
+		switch n.(type) {
+		case *ast.FuncLit:
+			if len(stack) > 0 {
+				return false // don't stray into nested functions
+			}
+		case nil:
+			stack = stack[:len(stack)-1] // pop
+			return true
+		}
+		stack = append(stack, n) // push
+
+		// Look for [{AssignStmt,ValueSpec} CallExpr SelectorExpr]:
+		//
+		//   ctx, span     := otel.Tracer("app").Start(...)
+		//   ctx, span     = otel.Tracer("app").Start(...)
+		//   var ctx, span = otel.Tracer("app").Start(...)
+		sType, isStart := isSpanStart(pass.TypesInfo, n, config.startSpanMatchers)
+		if !isStart {
+			return true
+		}
+
+		if !isCall(stack[len(stack)-2]) {
+			return true
+		}
+
+		stmt := stack[len(stack)-3]
+		id := getID(stmt)
+		if id == nil {
+			pass.ReportRangef(n, "span is unassigned, probable memory leak")
+			return true
+		}
+
+		if id.Name == "_" {
+			pass.ReportRangef(id, "span is unassigned, probable memory leak")
+		} else if v, ok := pass.TypesInfo.Uses[id].(*types.Var); ok {
+			// If the span variable is defined outside function scope,
+			// do not analyze it.
+			if funcScope.Contains(v.Pos()) {
+				spanVars[id] = spanVar{
+					vr:       v,
+					stmt:     stmt,
+					id:       id,
+					spanType: sType,
+				}
+			}
+		} else if v, ok := pass.TypesInfo.Defs[id].(*types.Var); ok {
+			spanVars[id] = spanVar{
+				vr:       v,
+				stmt:     stmt,
+				id:       id,
+				spanType: sType,
+			}
+		}
+
+		return true
+	})
+
+	if len(spanVars) == 0 {
+		return // no need to inspect CFG
+	}
+
+	// Obtain the CFG.
+	cfgs := pass.ResultOf[ctrlflow.Analyzer].(*ctrlflow.CFGs)
+	var g *cfg.CFG
+	var sig *types.Signature
+	switch node := node.(type) {
+	case *ast.FuncDecl:
+		sig, _ = pass.TypesInfo.Defs[node.Name].Type().(*types.Signature)
+		g = cfgs.FuncDecl(node)
+	case *ast.FuncLit:
+		sig, _ = pass.TypesInfo.Types[node.Type].Type.(*types.Signature)
+		g = cfgs.FuncLit(node)
+	}
+	if sig == nil {
+		return // missing type information
+	}
+
+	// Check for missing calls.
+	for _, sv := range spanVars {
+		if config.endCheckEnabled {
+			// Check if there's no End to the span.
+			if ret := getMissingSpanCalls(pass, g, sv, "End", func(_ *analysis.Pass, ret *ast.ReturnStmt) *ast.ReturnStmt { return ret }, nil, config.startSpanMatchers); ret != nil {
+				pass.ReportRangef(sv.stmt, "%s.End is not called on all paths, possible memory leak", sv.vr.Name())
+				pass.ReportRangef(ret, "return can be reached without calling %s.End", sv.vr.Name())
+			}
+		}
+
+		if config.setStatusEnabled {
+			// Check if there's no SetStatus to the span setting an error.
+			if ret := getMissingSpanCalls(pass, g, sv, "SetStatus", getErrorReturn, config.ignoreChecksSignatures, config.startSpanMatchers); ret != nil {
+				pass.ReportRangef(sv.stmt, "%s.SetStatus is not called on all paths", sv.vr.Name())
+				pass.ReportRangef(ret, "return can be reached without calling %s.SetStatus", sv.vr.Name())
+			}
+		}
+
+		if config.recordErrorEnabled && sv.spanType == spanOpenTelemetry { // RecordError only exists in OpenTelemetry
+			// Check if there's no RecordError to the span setting an error.
+			if ret := getMissingSpanCalls(pass, g, sv, "RecordError", getErrorReturn, config.ignoreChecksSignatures, config.startSpanMatchers); ret != nil {
+				pass.ReportRangef(sv.stmt, "%s.RecordError is not called on all paths", sv.vr.Name())
+				pass.ReportRangef(ret, "return can be reached without calling %s.RecordError", sv.vr.Name())
+			}
+		}
+	}
+}
+
+// isSpanStart reports whether n is tracer.Start()
+func isSpanStart(info *types.Info, n ast.Node, startSpanMatchers []spanStartMatcher) (spanType, bool) {
+	sel, ok := n.(*ast.SelectorExpr)
+	if !ok {
+		return spanUnset, false
+	}
+
+	fnSig := info.ObjectOf(sel.Sel).String()
+
+	// Check if the function is a span start function
+	for _, matcher := range startSpanMatchers {
+		if matcher.signature.MatchString(fnSig) {
+			return matcher.spanType, true
+		}
+	}
+
+	return 0, false
+}
+
+func isCall(n ast.Node) bool {
+	_, ok := n.(*ast.CallExpr)
+	return ok
+}
+
+func getID(node ast.Node) *ast.Ident {
+	switch stmt := node.(type) {
+	case *ast.ValueSpec:
+		if len(stmt.Names) > 1 {
+			return stmt.Names[1]
+		} else if len(stmt.Names) == 1 {
+			return stmt.Names[0]
+		}
+	case *ast.AssignStmt:
+		if len(stmt.Lhs) > 1 {
+			id, _ := stmt.Lhs[1].(*ast.Ident)
+			return id
+		} else if len(stmt.Lhs) == 1 {
+			id, _ := stmt.Lhs[0].(*ast.Ident)
+			return id
+		}
+	}
+	return nil
+}
+
+// getMissingSpanCalls finds a path through the CFG, from stmt (which defines
+// the 'span' variable v) to a return statement, that doesn't call the passed selector on the span.
+func getMissingSpanCalls(
+	pass *analysis.Pass,
+	g *cfg.CFG,
+	sv spanVar,
+	selName string,
+	checkErr func(pass *analysis.Pass, ret *ast.ReturnStmt) *ast.ReturnStmt,
+	ignoreCheckSig *regexp.Regexp,
+	spanStartMatchers []spanStartMatcher,
+) *ast.ReturnStmt {
+	// blockUses computes "uses" for each block, caching the result.
+	memo := make(map[*cfg.Block]bool)
+	blockUses := func(pass *analysis.Pass, b *cfg.Block) bool {
+		res, ok := memo[b]
+		if !ok {
+			res = usesCall(pass, b.Nodes, sv, selName, ignoreCheckSig, spanStartMatchers, 0)
+			memo[b] = res
+		}
+		return res
+	}
+
+	// Find the var's defining block in the CFG,
+	// plus the rest of the statements of that block.
+	var defBlock *cfg.Block
+	var rest []ast.Node
+outer:
+	for _, b := range g.Blocks {
+		for i, n := range b.Nodes {
+			if n == sv.stmt {
+				defBlock = b
+				rest = b.Nodes[i+1:]
+				break outer
+			}
+		}
+	}
+
+	// Is the call "used" in the remainder of its defining block?
+	if usesCall(pass, rest, sv, selName, ignoreCheckSig, spanStartMatchers, 0) {
+		return nil
+	}
+
+	// Does the defining block return without making the call?
+	if ret := defBlock.Return(); ret != nil {
+		return checkErr(pass, ret)
+	}
+
+	// Search the CFG depth-first for a path, from defblock to a
+	// return block, in which v is never "used".
+	seen := make(map[*cfg.Block]bool)
+	var search func(blocks []*cfg.Block) *ast.ReturnStmt
+	search = func(blocks []*cfg.Block) *ast.ReturnStmt {
+		for _, b := range blocks {
+			if seen[b] {
+				continue
+			}
+			seen[b] = true
+
+			// Prune the search if the block uses v.
+			if blockUses(pass, b) {
+				continue
+			}
+
+			// Found path to return statement?
+			if ret := getErrorReturn(pass, b.Return()); ret != nil {
+				return ret // found
+			}
+
+			// Recur
+			if ret := getErrorReturn(pass, search(b.Succs)); ret != nil {
+				return ret
+			}
+		}
+		return nil
+	}
+
+	return search(defBlock.Succs)
+}
+
+// usesCall reports whether stmts contain a use of the selName call on variable v.
+func usesCall(
+	pass *analysis.Pass,
+	stmts []ast.Node,
+	sv spanVar,
+	selName string,
+	ignoreCheckSig *regexp.Regexp,
+	startSpanMatchers []spanStartMatcher,
+	depth int,
+) bool {
+	if depth > 1 { // for perf reasons, do not dive too deep thru func literals, just one level deep check.
+		return false
+	}
+
+	found, reAssigned := false, false
+	for _, subStmt := range stmts {
+		stack := []ast.Node{}
+		ast.Inspect(subStmt, func(n ast.Node) bool {
+			switch n := n.(type) {
+			case *ast.FuncLit:
+				if len(stack) > 0 {
+					cfgs := pass.ResultOf[ctrlflow.Analyzer].(*ctrlflow.CFGs)
+					g := cfgs.FuncLit(n)
+					if g != nil && len(g.Blocks) > 0 {
+						return usesCall(pass, g.Blocks[0].Nodes, sv, selName, ignoreCheckSig, startSpanMatchers, depth+1)
+					}
+
+					return false
+				}
+			case *ast.CallExpr:
+				if ident, ok := n.Fun.(*ast.Ident); ok {
+					fnSig := pass.TypesInfo.ObjectOf(ident).String()
+					if ignoreCheckSig != nil && ignoreCheckSig.MatchString(fnSig) {
+						found = true
+						return false
+					}
+				}
+			case nil:
+				if len(stack) > 0 {
+					stack = stack[:len(stack)-1] // pop
+					return true
+				}
+				return false
+			}
+			stack = append(stack, n) // push
+
+			// Check whether the span was assigned over top of its old value.
+			_, isStart := isSpanStart(pass.TypesInfo, n, startSpanMatchers)
+			if isStart {
+				if id := getID(stack[len(stack)-3]); id != nil && id.Obj.Decl == sv.id.Obj.Decl {
+					reAssigned = true
+					return false
+				}
+			}
+
+			if n, ok := n.(*ast.SelectorExpr); ok {
+				// Selector (End, SetStatus, RecordError) hit.
+				if n.Sel.Name == selName {
+					id, ok := n.X.(*ast.Ident)
+					found = ok && id.Obj.Decl == sv.id.Obj.Decl
+				}
+
+				// Check if an ignore signature matches.
+				fnSig := pass.TypesInfo.ObjectOf(n.Sel).String()
+				if ignoreCheckSig != nil && ignoreCheckSig.MatchString(fnSig) {
+					found = true
+				}
+			}
+
+			return !found
+		})
+	}
+
+	return found && !reAssigned
+}
+
+func getErrorReturn(pass *analysis.Pass, ret *ast.ReturnStmt) *ast.ReturnStmt {
+	if ret == nil {
+		return nil
+	}
+
+	for _, r := range ret.Results {
+		if isErrorType(pass.TypesInfo.TypeOf(r)) {
+			return ret
+		}
+
+		if r, ok := r.(*ast.CallExpr); ok {
+			for _, err := range errorsByArg(pass, r) {
+				if err {
+					return ret
+				}
+			}
+		}
+	}
+
+	return nil
+}
+
+// errorsByArg returns a slice s such that
+// len(s) == number of return types of call
+// s[i] == true iff return type at position i from left is an error type
+//
+// copied from https://github.com/kisielk/errcheck/blob/master/errcheck/errcheck.go
+func errorsByArg(pass *analysis.Pass, call *ast.CallExpr) []bool {
+	switch t := pass.TypesInfo.Types[call].Type.(type) {
+	case *types.Named:
+		// Single return
+		return []bool{isErrorType(t)}
+	case *types.Pointer:
+		// Single return via pointer
+		return []bool{isErrorType(t)}
+	case *types.Tuple:
+		// Multiple returns
+		s := make([]bool, t.Len())
+		for i := 0; i < t.Len(); i++ {
+			switch et := t.At(i).Type().(type) {
+			case *types.Named:
+				// Single return
+				s[i] = isErrorType(et)
+			case *types.Pointer:
+				// Single return via pointer
+				s[i] = isErrorType(et)
+			default:
+				s[i] = false
+			}
+		}
+		return s
+	}
+	return []bool{false}
+}
+
+func isErrorType(t types.Type) bool {
+	return types.Implements(t, errorType)
+}
diff --git a/vendor/github.com/karamaru-alpha/copyloopvar/.gitignore b/vendor/github.com/karamaru-alpha/copyloopvar/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..816abbd923d68f05ee3d8a38401f248bc59e5348
--- /dev/null
+++ b/vendor/github.com/karamaru-alpha/copyloopvar/.gitignore
@@ -0,0 +1,2 @@
+.idea/
+copyloopvar
diff --git a/vendor/github.com/karamaru-alpha/copyloopvar/LICENSE b/vendor/github.com/karamaru-alpha/copyloopvar/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..e2567fd0c54b71cd8faa444d2e3f03312373d3af
--- /dev/null
+++ b/vendor/github.com/karamaru-alpha/copyloopvar/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2024 Ryosei Karaki
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/karamaru-alpha/copyloopvar/README.md b/vendor/github.com/karamaru-alpha/copyloopvar/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..d31d1abd979c2454974492f8bd5b576bddfa04b7
--- /dev/null
+++ b/vendor/github.com/karamaru-alpha/copyloopvar/README.md
@@ -0,0 +1,27 @@
+# copyloopvar
+
+copyloopvar is a linter detects places where loop variables are copied.
+
+cf. [Fixing For Loops in Go 1.22](https://go.dev/blog/loopvar-preview)
+
+## Example
+
+```go
+for i, v := range []int{1, 2, 3} {
+    i := i // The copy of the 'for' variable "i" can be deleted (Go 1.22+)
+    v := v // The copy of the 'for' variable "v" can be deleted (Go 1.22+)
+    _, _ = i, v
+}
+
+for i := 1; i <= 3; i++ {
+    i := i // The copy of the 'for' variable "i" can be deleted (Go 1.22+)
+    _ = i
+}
+```
+
+## Install
+
+```bash
+go install github.com/karamaru-alpha/copyloopvar/cmd/copyloopvar@latest
+go vet -vettool=`which copyloopvar` ./...
+```
diff --git a/vendor/github.com/karamaru-alpha/copyloopvar/copyloopvar.go b/vendor/github.com/karamaru-alpha/copyloopvar/copyloopvar.go
new file mode 100644
index 0000000000000000000000000000000000000000..79dc6afcc49f19a7d2e5349d1589296b7d598afb
--- /dev/null
+++ b/vendor/github.com/karamaru-alpha/copyloopvar/copyloopvar.go
@@ -0,0 +1,133 @@
+package copyloopvar
+
+import (
+	"fmt"
+	"go/ast"
+	"go/token"
+
+	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/go/analysis/passes/inspect"
+	"golang.org/x/tools/go/ast/inspector"
+)
+
+var checkAlias bool
+
+func NewAnalyzer() *analysis.Analyzer {
+	analyzer := &analysis.Analyzer{
+		Name: "copyloopvar",
+		Doc:  "copyloopvar is a linter detects places where loop variables are copied",
+		Run:  run,
+		Requires: []*analysis.Analyzer{
+			inspect.Analyzer,
+		},
+	}
+	analyzer.Flags.BoolVar(&checkAlias, "check-alias", false, "check all assigning the loop variable to another variable")
+	return analyzer
+}
+
+func run(pass *analysis.Pass) (any, error) {
+	pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{
+		(*ast.RangeStmt)(nil),
+		(*ast.ForStmt)(nil),
+	}, func(n ast.Node) {
+		switch node := n.(type) {
+		case *ast.RangeStmt:
+			checkRangeStmt(pass, node)
+		case *ast.ForStmt:
+			checkForStmt(pass, node)
+		}
+	})
+
+	return nil, nil
+}
+
+func checkRangeStmt(pass *analysis.Pass, rangeStmt *ast.RangeStmt) {
+	key, ok := rangeStmt.Key.(*ast.Ident)
+	if !ok {
+		return
+	}
+	var value *ast.Ident
+	if rangeStmt.Value != nil {
+		if value, ok = rangeStmt.Value.(*ast.Ident); !ok {
+			return
+		}
+	}
+	for _, stmt := range rangeStmt.Body.List {
+		assignStmt, ok := stmt.(*ast.AssignStmt)
+		if !ok {
+			continue
+		}
+		if assignStmt.Tok != token.DEFINE {
+			continue
+		}
+		for i, rh := range assignStmt.Rhs {
+			right, ok := rh.(*ast.Ident)
+			if !ok {
+				continue
+			}
+			if right.Name != key.Name && (value == nil || right.Name != value.Name) {
+				continue
+			}
+			if !checkAlias {
+				left, ok := assignStmt.Lhs[i].(*ast.Ident)
+				if !ok {
+					continue
+				}
+				if left.Name != right.Name {
+					continue
+				}
+			}
+			pass.Report(analysis.Diagnostic{
+				Pos:     assignStmt.Pos(),
+				Message: fmt.Sprintf(`The copy of the 'for' variable "%s" can be deleted (Go 1.22+)`, right.Name),
+			})
+		}
+	}
+}
+
+func checkForStmt(pass *analysis.Pass, forStmt *ast.ForStmt) {
+	if forStmt.Init == nil {
+		return
+	}
+	initAssignStmt, ok := forStmt.Init.(*ast.AssignStmt)
+	if !ok {
+		return
+	}
+	initVarNameMap := make(map[string]interface{}, len(initAssignStmt.Lhs))
+	for _, lh := range initAssignStmt.Lhs {
+		if initVar, ok := lh.(*ast.Ident); ok {
+			initVarNameMap[initVar.Name] = struct{}{}
+		}
+	}
+	for _, stmt := range forStmt.Body.List {
+		assignStmt, ok := stmt.(*ast.AssignStmt)
+		if !ok {
+			continue
+		}
+		if assignStmt.Tok != token.DEFINE {
+			continue
+		}
+		for i, rh := range assignStmt.Rhs {
+			right, ok := rh.(*ast.Ident)
+			if !ok {
+				continue
+			}
+			if _, ok := initVarNameMap[right.Name]; !ok {
+				continue
+			}
+			if !checkAlias {
+				left, ok := assignStmt.Lhs[i].(*ast.Ident)
+				if !ok {
+					continue
+				}
+				if left.Name != right.Name {
+					continue
+				}
+			}
+			pass.Report(analysis.Diagnostic{
+				Pos:     assignStmt.Pos(),
+				Message: fmt.Sprintf(`The copy of the 'for' variable "%s" can be deleted (Go 1.22+)`, right.Name),
+			})
+		}
+	}
+}
diff --git a/vendor/github.com/kisielk/errcheck/errcheck/analyzer.go b/vendor/github.com/kisielk/errcheck/errcheck/analyzer.go
index 68593cc9adfe3d548ed9c799b343f63c76f58d63..82ab6298a9d3c26a49b1a8e70aaa8154b006e81c 100644
--- a/vendor/github.com/kisielk/errcheck/errcheck/analyzer.go
+++ b/vendor/github.com/kisielk/errcheck/errcheck/analyzer.go
@@ -31,7 +31,6 @@ func init() {
 }
 
 func runAnalyzer(pass *analysis.Pass) (interface{}, error) {
-
 	exclude := map[string]bool{}
 	if !argExcludeOnly {
 		for _, name := range DefaultExcludedSymbols {
@@ -65,8 +64,9 @@ func runAnalyzer(pass *analysis.Pass) (interface{}, error) {
 
 		for _, err := range v.errors {
 			pass.Report(analysis.Diagnostic{
-				Pos:     pass.Fset.File(f.Pos()).Pos(err.Pos.Offset),
-				Message: "unchecked error",
+				Pos:      pass.Fset.File(f.Pos()).Pos(err.Pos.Offset),
+				Message:  "unchecked error",
+				Category: "errcheck",
 			})
 		}
 
diff --git a/vendor/github.com/kisielk/errcheck/errcheck/errcheck.go b/vendor/github.com/kisielk/errcheck/errcheck/errcheck.go
index a5ee3711c9e3543ae5642a93d90da3886bc80958..d61d348f77d22008c3f7ded5ca445686f12dacd5 100644
--- a/vendor/github.com/kisielk/errcheck/errcheck/errcheck.go
+++ b/vendor/github.com/kisielk/errcheck/errcheck/errcheck.go
@@ -167,7 +167,7 @@ func (c *Checker) LoadPackages(paths ...string) ([]*packages.Package, error) {
 		buildFlags = append(buildFlags, fmt.Sprintf("-mod=%s", c.Mod))
 	}
 	cfg := &packages.Config{
-		Mode:       packages.LoadAllSyntax,
+		Mode:       packages.NeedSyntax | packages.NeedTypes | packages.NeedTypesInfo,
 		Tests:      !c.Exclusions.TestFiles,
 		BuildFlags: buildFlags,
 	}
@@ -205,7 +205,7 @@ func (c *Checker) CheckPackage(pkg *packages.Package) Result {
 
 	ignore := map[string]*regexp.Regexp{}
 	// Apply SymbolRegexpsByPackage first so that if the same path appears in
-	// Packages, a more narrow regexp will be superceded by dotStar below.
+	// Packages, a more narrow regexp will be superseded by dotStar below.
 	if regexps := c.Exclusions.SymbolRegexpsByPackage; regexps != nil {
 		for pkg, re := range regexps {
 			// TODO warn if previous entry overwritten?
@@ -337,7 +337,7 @@ func (v *visitor) selectorName(call *ast.CallExpr) string {
 // names are returned. If the function is package-qualified (like "fmt.Printf()")
 // then just that function's fullName is returned.
 //
-// Otherwise, we walk through all the potentially embeddded interfaces of the receiver
+// Otherwise, we walk through all the potentially embedded interfaces of the receiver
 // the collect a list of type-qualified function names that we will check.
 func (v *visitor) namesForExcludeCheck(call *ast.CallExpr) []string {
 	sel, fn, ok := v.selectorAndFunc(call)
diff --git a/vendor/github.com/kisielk/errcheck/errcheck/excludes.go b/vendor/github.com/kisielk/errcheck/errcheck/excludes.go
index 22db9fe11d646316db990b14a7403e0175d1cefe..a783b5a763f30d82f59d0678a63c7e7f26f7b6a6 100644
--- a/vendor/github.com/kisielk/errcheck/errcheck/excludes.go
+++ b/vendor/github.com/kisielk/errcheck/errcheck/excludes.go
@@ -3,64 +3,60 @@ package errcheck
 import (
 	"bufio"
 	"bytes"
-	"io/ioutil"
+	"os"
 	"strings"
 )
 
-var (
-	// DefaultExcludedSymbols is a list of symbol names that are usually excluded from checks by default.
-	//
-	// Note, that they still need to be explicitly copied to Checker.Exclusions.Symbols
-	DefaultExcludedSymbols = []string{
-		// bytes
-		"(*bytes.Buffer).Write",
-		"(*bytes.Buffer).WriteByte",
-		"(*bytes.Buffer).WriteRune",
-		"(*bytes.Buffer).WriteString",
+// DefaultExcludedSymbols is a list of symbol names that are usually excluded from checks by default.
+//
+// Note, that they still need to be explicitly copied to Checker.Exclusions.Symbols
+var DefaultExcludedSymbols = []string{
+	// bytes
+	"(*bytes.Buffer).Write",
+	"(*bytes.Buffer).WriteByte",
+	"(*bytes.Buffer).WriteRune",
+	"(*bytes.Buffer).WriteString",
 
-		// fmt
-		"fmt.Errorf",
-		"fmt.Print",
-		"fmt.Printf",
-		"fmt.Println",
-		"fmt.Fprint(*bytes.Buffer)",
-		"fmt.Fprintf(*bytes.Buffer)",
-		"fmt.Fprintln(*bytes.Buffer)",
-		"fmt.Fprint(*strings.Builder)",
-		"fmt.Fprintf(*strings.Builder)",
-		"fmt.Fprintln(*strings.Builder)",
-		"fmt.Fprint(os.Stderr)",
-		"fmt.Fprintf(os.Stderr)",
-		"fmt.Fprintln(os.Stderr)",
+	// fmt
+	"fmt.Print",
+	"fmt.Printf",
+	"fmt.Println",
+	"fmt.Fprint(*bytes.Buffer)",
+	"fmt.Fprintf(*bytes.Buffer)",
+	"fmt.Fprintln(*bytes.Buffer)",
+	"fmt.Fprint(*strings.Builder)",
+	"fmt.Fprintf(*strings.Builder)",
+	"fmt.Fprintln(*strings.Builder)",
+	"fmt.Fprint(os.Stderr)",
+	"fmt.Fprintf(os.Stderr)",
+	"fmt.Fprintln(os.Stderr)",
 
-		// io
-		"(*io.PipeReader).CloseWithError",
-		"(*io.PipeWriter).CloseWithError",
+	// io
+	"(*io.PipeReader).CloseWithError",
+	"(*io.PipeWriter).CloseWithError",
 
-		// math/rand
-		"math/rand.Read",
-		"(*math/rand.Rand).Read",
+	// math/rand
+	"math/rand.Read",
+	"(*math/rand.Rand).Read",
 
-		// strings
-		"(*strings.Builder).Write",
-		"(*strings.Builder).WriteByte",
-		"(*strings.Builder).WriteRune",
-		"(*strings.Builder).WriteString",
+	// strings
+	"(*strings.Builder).Write",
+	"(*strings.Builder).WriteByte",
+	"(*strings.Builder).WriteRune",
+	"(*strings.Builder).WriteString",
 
-		// hash
-		"(hash.Hash).Write",
-	}
-)
+	// hash
+	"(hash.Hash).Write",
+}
 
 // ReadExcludes reads an excludes file, a newline delimited file that lists
 // patterns for which to allow unchecked errors.
 //
 // Lines that start with two forward slashes are considered comments and are ignored.
-//
 func ReadExcludes(path string) ([]string, error) {
 	var excludes []string
 
-	buf, err := ioutil.ReadFile(path)
+	buf, err := os.ReadFile(path)
 	if err != nil {
 		return nil, err
 	}
diff --git a/vendor/github.com/kisielk/gotool/.travis.yml b/vendor/github.com/kisielk/gotool/.travis.yml
deleted file mode 100644
index d1784e1e23b74f90773304cb3cb658f7b3eb59c7..0000000000000000000000000000000000000000
--- a/vendor/github.com/kisielk/gotool/.travis.yml
+++ /dev/null
@@ -1,23 +0,0 @@
-sudo: false
-language: go
-go:
-  - 1.2
-  - 1.3
-  - 1.4
-  - 1.5
-  - 1.6
-  - 1.7
-  - 1.8
-  - 1.9
-  - master
-matrix:
-  allow_failures:
-    - go: master
-  fast_finish: true
-install:
-  - # Skip.
-script:
-  - go get -t -v ./...
-  - diff -u <(echo -n) <(gofmt -d .)
-  - go tool vet .
-  - go test -v -race ./...
diff --git a/vendor/github.com/kisielk/gotool/LEGAL b/vendor/github.com/kisielk/gotool/LEGAL
deleted file mode 100644
index 72b859cd6219d796adc0fbbb211c0e390dbf0c8b..0000000000000000000000000000000000000000
--- a/vendor/github.com/kisielk/gotool/LEGAL
+++ /dev/null
@@ -1,32 +0,0 @@
-All the files in this distribution are covered under either the MIT
-license (see the file LICENSE) except some files mentioned below.
-
-match.go, match_test.go:
-
-    Copyright (c) 2009 The Go Authors. All rights reserved.
-
-    Redistribution and use in source and binary forms, with or without
-    modification, are permitted provided that the following conditions are
-    met:
-
-       * Redistributions of source code must retain the above copyright
-    notice, this list of conditions and the following disclaimer.
-       * Redistributions in binary form must reproduce the above
-    copyright notice, this list of conditions and the following disclaimer
-    in the documentation and/or other materials provided with the
-    distribution.
-       * Neither the name of Google Inc. nor the names of its
-    contributors may be used to endorse or promote products derived from
-    this software without specific prior written permission.
-
-    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/kisielk/gotool/LICENSE b/vendor/github.com/kisielk/gotool/LICENSE
deleted file mode 100644
index 1cbf651e2fcbf329af70abd1813b75e6d1819aa3..0000000000000000000000000000000000000000
--- a/vendor/github.com/kisielk/gotool/LICENSE
+++ /dev/null
@@ -1,20 +0,0 @@
-Copyright (c) 2013 Kamil Kisiel <kamil@kamilkisiel.net>
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-"Software"), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
-LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
-WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/vendor/github.com/kisielk/gotool/README.md b/vendor/github.com/kisielk/gotool/README.md
deleted file mode 100644
index 6e4e92b2f607842bfb04063ae30195a07c3b7715..0000000000000000000000000000000000000000
--- a/vendor/github.com/kisielk/gotool/README.md
+++ /dev/null
@@ -1,6 +0,0 @@
-gotool
-======
-[![GoDoc](https://godoc.org/github.com/kisielk/gotool?status.svg)](https://godoc.org/github.com/kisielk/gotool)
-[![Build Status](https://travis-ci.org/kisielk/gotool.svg?branch=master)](https://travis-ci.org/kisielk/gotool)
-
-Package gotool contains utility functions used to implement the standard "cmd/go" tool, provided as a convenience to developers who want to write tools with similar semantics.
diff --git a/vendor/github.com/kisielk/gotool/go13.go b/vendor/github.com/kisielk/gotool/go13.go
deleted file mode 100644
index 2dd9b3fdf0a905f89829482dafeff2cd93eb3c15..0000000000000000000000000000000000000000
--- a/vendor/github.com/kisielk/gotool/go13.go
+++ /dev/null
@@ -1,15 +0,0 @@
-// +build !go1.4
-
-package gotool
-
-import (
-	"go/build"
-	"path/filepath"
-	"runtime"
-)
-
-var gorootSrc = filepath.Join(runtime.GOROOT(), "src", "pkg")
-
-func shouldIgnoreImport(p *build.Package) bool {
-	return true
-}
diff --git a/vendor/github.com/kisielk/gotool/go14-15.go b/vendor/github.com/kisielk/gotool/go14-15.go
deleted file mode 100644
index aa99a32270bac09afbe4f45e4ec148e9b12628c6..0000000000000000000000000000000000000000
--- a/vendor/github.com/kisielk/gotool/go14-15.go
+++ /dev/null
@@ -1,15 +0,0 @@
-// +build go1.4,!go1.6
-
-package gotool
-
-import (
-	"go/build"
-	"path/filepath"
-	"runtime"
-)
-
-var gorootSrc = filepath.Join(runtime.GOROOT(), "src")
-
-func shouldIgnoreImport(p *build.Package) bool {
-	return true
-}
diff --git a/vendor/github.com/kisielk/gotool/go16-18.go b/vendor/github.com/kisielk/gotool/go16-18.go
deleted file mode 100644
index f25cec14a83c34db81417454ac14c76b0065d139..0000000000000000000000000000000000000000
--- a/vendor/github.com/kisielk/gotool/go16-18.go
+++ /dev/null
@@ -1,15 +0,0 @@
-// +build go1.6,!go1.9
-
-package gotool
-
-import (
-	"go/build"
-	"path/filepath"
-	"runtime"
-)
-
-var gorootSrc = filepath.Join(runtime.GOROOT(), "src")
-
-func shouldIgnoreImport(p *build.Package) bool {
-	return p == nil || len(p.InvalidGoFiles) == 0
-}
diff --git a/vendor/github.com/kisielk/gotool/internal/load/path.go b/vendor/github.com/kisielk/gotool/internal/load/path.go
deleted file mode 100644
index 74e15b9d32451fb384813887318d03e882c24dc4..0000000000000000000000000000000000000000
--- a/vendor/github.com/kisielk/gotool/internal/load/path.go
+++ /dev/null
@@ -1,27 +0,0 @@
-// Copyright 2017 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build go1.9
-
-package load
-
-import (
-	"strings"
-)
-
-// hasPathPrefix reports whether the path s begins with the
-// elements in prefix.
-func hasPathPrefix(s, prefix string) bool {
-	switch {
-	default:
-		return false
-	case len(s) == len(prefix):
-		return s == prefix
-	case len(s) > len(prefix):
-		if prefix != "" && prefix[len(prefix)-1] == '/' {
-			return strings.HasPrefix(s, prefix)
-		}
-		return s[len(prefix)] == '/' && s[:len(prefix)] == prefix
-	}
-}
diff --git a/vendor/github.com/kisielk/gotool/internal/load/pkg.go b/vendor/github.com/kisielk/gotool/internal/load/pkg.go
deleted file mode 100644
index b937ede759d600d7267bc0122a65412bc4b75d21..0000000000000000000000000000000000000000
--- a/vendor/github.com/kisielk/gotool/internal/load/pkg.go
+++ /dev/null
@@ -1,25 +0,0 @@
-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build go1.9
-
-// Package load loads packages.
-package load
-
-import (
-	"strings"
-)
-
-// isStandardImportPath reports whether $GOROOT/src/path should be considered
-// part of the standard distribution. For historical reasons we allow people to add
-// their own code to $GOROOT instead of using $GOPATH, but we assume that
-// code will start with a domain name (dot in the first element).
-func isStandardImportPath(path string) bool {
-	i := strings.Index(path, "/")
-	if i < 0 {
-		i = len(path)
-	}
-	elem := path[:i]
-	return !strings.Contains(elem, ".")
-}
diff --git a/vendor/github.com/kisielk/gotool/internal/load/search.go b/vendor/github.com/kisielk/gotool/internal/load/search.go
deleted file mode 100644
index 17ed62ddae76f424aec0079eef60c18d29ea438c..0000000000000000000000000000000000000000
--- a/vendor/github.com/kisielk/gotool/internal/load/search.go
+++ /dev/null
@@ -1,354 +0,0 @@
-// Copyright 2017 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build go1.9
-
-package load
-
-import (
-	"fmt"
-	"go/build"
-	"log"
-	"os"
-	"path"
-	"path/filepath"
-	"regexp"
-	"strings"
-)
-
-// Context specifies values for operation of ImportPaths that would
-// otherwise come from cmd/go/internal/cfg package.
-//
-// This is a construct added for gotool purposes and doesn't have
-// an equivalent upstream in cmd/go.
-type Context struct {
-	// BuildContext is the build context to use.
-	BuildContext build.Context
-
-	// GOROOTsrc is the location of the src directory in GOROOT.
-	// At this time, it's used only in MatchPackages to skip
-	// GOOROOT/src entry from BuildContext.SrcDirs output.
-	GOROOTsrc string
-}
-
-// allPackages returns all the packages that can be found
-// under the $GOPATH directories and $GOROOT matching pattern.
-// The pattern is either "all" (all packages), "std" (standard packages),
-// "cmd" (standard commands), or a path including "...".
-func (c *Context) allPackages(pattern string) []string {
-	pkgs := c.MatchPackages(pattern)
-	if len(pkgs) == 0 {
-		fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern)
-	}
-	return pkgs
-}
-
-// allPackagesInFS is like allPackages but is passed a pattern
-// beginning ./ or ../, meaning it should scan the tree rooted
-// at the given directory. There are ... in the pattern too.
-func (c *Context) allPackagesInFS(pattern string) []string {
-	pkgs := c.MatchPackagesInFS(pattern)
-	if len(pkgs) == 0 {
-		fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern)
-	}
-	return pkgs
-}
-
-// MatchPackages returns a list of package paths matching pattern
-// (see go help packages for pattern syntax).
-func (c *Context) MatchPackages(pattern string) []string {
-	match := func(string) bool { return true }
-	treeCanMatch := func(string) bool { return true }
-	if !IsMetaPackage(pattern) {
-		match = matchPattern(pattern)
-		treeCanMatch = treeCanMatchPattern(pattern)
-	}
-
-	have := map[string]bool{
-		"builtin": true, // ignore pseudo-package that exists only for documentation
-	}
-	if !c.BuildContext.CgoEnabled {
-		have["runtime/cgo"] = true // ignore during walk
-	}
-	var pkgs []string
-
-	for _, src := range c.BuildContext.SrcDirs() {
-		if (pattern == "std" || pattern == "cmd") && src != c.GOROOTsrc {
-			continue
-		}
-		src = filepath.Clean(src) + string(filepath.Separator)
-		root := src
-		if pattern == "cmd" {
-			root += "cmd" + string(filepath.Separator)
-		}
-		filepath.Walk(root, func(path string, fi os.FileInfo, err error) error {
-			if err != nil || path == src {
-				return nil
-			}
-
-			want := true
-			// Avoid .foo, _foo, and testdata directory trees.
-			_, elem := filepath.Split(path)
-			if strings.HasPrefix(elem, ".") || strings.HasPrefix(elem, "_") || elem == "testdata" {
-				want = false
-			}
-
-			name := filepath.ToSlash(path[len(src):])
-			if pattern == "std" && (!isStandardImportPath(name) || name == "cmd") {
-				// The name "std" is only the standard library.
-				// If the name is cmd, it's the root of the command tree.
-				want = false
-			}
-			if !treeCanMatch(name) {
-				want = false
-			}
-
-			if !fi.IsDir() {
-				if fi.Mode()&os.ModeSymlink != 0 && want {
-					if target, err := os.Stat(path); err == nil && target.IsDir() {
-						fmt.Fprintf(os.Stderr, "warning: ignoring symlink %s\n", path)
-					}
-				}
-				return nil
-			}
-			if !want {
-				return filepath.SkipDir
-			}
-
-			if have[name] {
-				return nil
-			}
-			have[name] = true
-			if !match(name) {
-				return nil
-			}
-			pkg, err := c.BuildContext.ImportDir(path, 0)
-			if err != nil {
-				if _, noGo := err.(*build.NoGoError); noGo {
-					return nil
-				}
-			}
-
-			// If we are expanding "cmd", skip main
-			// packages under cmd/vendor. At least as of
-			// March, 2017, there is one there for the
-			// vendored pprof tool.
-			if pattern == "cmd" && strings.HasPrefix(pkg.ImportPath, "cmd/vendor") && pkg.Name == "main" {
-				return nil
-			}
-
-			pkgs = append(pkgs, name)
-			return nil
-		})
-	}
-	return pkgs
-}
-
-// MatchPackagesInFS returns a list of package paths matching pattern,
-// which must begin with ./ or ../
-// (see go help packages for pattern syntax).
-func (c *Context) MatchPackagesInFS(pattern string) []string {
-	// Find directory to begin the scan.
-	// Could be smarter but this one optimization
-	// is enough for now, since ... is usually at the
-	// end of a path.
-	i := strings.Index(pattern, "...")
-	dir, _ := path.Split(pattern[:i])
-
-	// pattern begins with ./ or ../.
-	// path.Clean will discard the ./ but not the ../.
-	// We need to preserve the ./ for pattern matching
-	// and in the returned import paths.
-	prefix := ""
-	if strings.HasPrefix(pattern, "./") {
-		prefix = "./"
-	}
-	match := matchPattern(pattern)
-
-	var pkgs []string
-	filepath.Walk(dir, func(path string, fi os.FileInfo, err error) error {
-		if err != nil || !fi.IsDir() {
-			return nil
-		}
-		if path == dir {
-			// filepath.Walk starts at dir and recurses. For the recursive case,
-			// the path is the result of filepath.Join, which calls filepath.Clean.
-			// The initial case is not Cleaned, though, so we do this explicitly.
-			//
-			// This converts a path like "./io/" to "io". Without this step, running
-			// "cd $GOROOT/src; go list ./io/..." would incorrectly skip the io
-			// package, because prepending the prefix "./" to the unclean path would
-			// result in "././io", and match("././io") returns false.
-			path = filepath.Clean(path)
-		}
-
-		// Avoid .foo, _foo, and testdata directory trees, but do not avoid "." or "..".
-		_, elem := filepath.Split(path)
-		dot := strings.HasPrefix(elem, ".") && elem != "." && elem != ".."
-		if dot || strings.HasPrefix(elem, "_") || elem == "testdata" {
-			return filepath.SkipDir
-		}
-
-		name := prefix + filepath.ToSlash(path)
-		if !match(name) {
-			return nil
-		}
-
-		// We keep the directory if we can import it, or if we can't import it
-		// due to invalid Go source files. This means that directories containing
-		// parse errors will be built (and fail) instead of being silently skipped
-		// as not matching the pattern. Go 1.5 and earlier skipped, but that
-		// behavior means people miss serious mistakes.
-		// See golang.org/issue/11407.
-		if p, err := c.BuildContext.ImportDir(path, 0); err != nil && (p == nil || len(p.InvalidGoFiles) == 0) {
-			if _, noGo := err.(*build.NoGoError); !noGo {
-				log.Print(err)
-			}
-			return nil
-		}
-		pkgs = append(pkgs, name)
-		return nil
-	})
-	return pkgs
-}
-
-// treeCanMatchPattern(pattern)(name) reports whether
-// name or children of name can possibly match pattern.
-// Pattern is the same limited glob accepted by matchPattern.
-func treeCanMatchPattern(pattern string) func(name string) bool {
-	wildCard := false
-	if i := strings.Index(pattern, "..."); i >= 0 {
-		wildCard = true
-		pattern = pattern[:i]
-	}
-	return func(name string) bool {
-		return len(name) <= len(pattern) && hasPathPrefix(pattern, name) ||
-			wildCard && strings.HasPrefix(name, pattern)
-	}
-}
-
-// matchPattern(pattern)(name) reports whether
-// name matches pattern. Pattern is a limited glob
-// pattern in which '...' means 'any string' and there
-// is no other special syntax.
-// Unfortunately, there are two special cases. Quoting "go help packages":
-//
-// First, /... at the end of the pattern can match an empty string,
-// so that net/... matches both net and packages in its subdirectories, like net/http.
-// Second, any slash-separted pattern element containing a wildcard never
-// participates in a match of the "vendor" element in the path of a vendored
-// package, so that ./... does not match packages in subdirectories of
-// ./vendor or ./mycode/vendor, but ./vendor/... and ./mycode/vendor/... do.
-// Note, however, that a directory named vendor that itself contains code
-// is not a vendored package: cmd/vendor would be a command named vendor,
-// and the pattern cmd/... matches it.
-func matchPattern(pattern string) func(name string) bool {
-	// Convert pattern to regular expression.
-	// The strategy for the trailing /... is to nest it in an explicit ? expression.
-	// The strategy for the vendor exclusion is to change the unmatchable
-	// vendor strings to a disallowed code point (vendorChar) and to use
-	// "(anything but that codepoint)*" as the implementation of the ... wildcard.
-	// This is a bit complicated but the obvious alternative,
-	// namely a hand-written search like in most shell glob matchers,
-	// is too easy to make accidentally exponential.
-	// Using package regexp guarantees linear-time matching.
-
-	const vendorChar = "\x00"
-
-	if strings.Contains(pattern, vendorChar) {
-		return func(name string) bool { return false }
-	}
-
-	re := regexp.QuoteMeta(pattern)
-	re = replaceVendor(re, vendorChar)
-	switch {
-	case strings.HasSuffix(re, `/`+vendorChar+`/\.\.\.`):
-		re = strings.TrimSuffix(re, `/`+vendorChar+`/\.\.\.`) + `(/vendor|/` + vendorChar + `/\.\.\.)`
-	case re == vendorChar+`/\.\.\.`:
-		re = `(/vendor|/` + vendorChar + `/\.\.\.)`
-	case strings.HasSuffix(re, `/\.\.\.`):
-		re = strings.TrimSuffix(re, `/\.\.\.`) + `(/\.\.\.)?`
-	}
-	re = strings.Replace(re, `\.\.\.`, `[^`+vendorChar+`]*`, -1)
-
-	reg := regexp.MustCompile(`^` + re + `$`)
-
-	return func(name string) bool {
-		if strings.Contains(name, vendorChar) {
-			return false
-		}
-		return reg.MatchString(replaceVendor(name, vendorChar))
-	}
-}
-
-// replaceVendor returns the result of replacing
-// non-trailing vendor path elements in x with repl.
-func replaceVendor(x, repl string) string {
-	if !strings.Contains(x, "vendor") {
-		return x
-	}
-	elem := strings.Split(x, "/")
-	for i := 0; i < len(elem)-1; i++ {
-		if elem[i] == "vendor" {
-			elem[i] = repl
-		}
-	}
-	return strings.Join(elem, "/")
-}
-
-// ImportPaths returns the import paths to use for the given command line.
-func (c *Context) ImportPaths(args []string) []string {
-	args = c.ImportPathsNoDotExpansion(args)
-	var out []string
-	for _, a := range args {
-		if strings.Contains(a, "...") {
-			if build.IsLocalImport(a) {
-				out = append(out, c.allPackagesInFS(a)...)
-			} else {
-				out = append(out, c.allPackages(a)...)
-			}
-			continue
-		}
-		out = append(out, a)
-	}
-	return out
-}
-
-// ImportPathsNoDotExpansion returns the import paths to use for the given
-// command line, but it does no ... expansion.
-func (c *Context) ImportPathsNoDotExpansion(args []string) []string {
-	if len(args) == 0 {
-		return []string{"."}
-	}
-	var out []string
-	for _, a := range args {
-		// Arguments are supposed to be import paths, but
-		// as a courtesy to Windows developers, rewrite \ to /
-		// in command-line arguments. Handles .\... and so on.
-		if filepath.Separator == '\\' {
-			a = strings.Replace(a, `\`, `/`, -1)
-		}
-
-		// Put argument in canonical form, but preserve leading ./.
-		if strings.HasPrefix(a, "./") {
-			a = "./" + path.Clean(a)
-			if a == "./." {
-				a = "."
-			}
-		} else {
-			a = path.Clean(a)
-		}
-		if IsMetaPackage(a) {
-			out = append(out, c.allPackages(a)...)
-			continue
-		}
-		out = append(out, a)
-	}
-	return out
-}
-
-// IsMetaPackage checks if name is a reserved package name that expands to multiple packages.
-func IsMetaPackage(name string) bool {
-	return name == "std" || name == "cmd" || name == "all"
-}
diff --git a/vendor/github.com/kisielk/gotool/match.go b/vendor/github.com/kisielk/gotool/match.go
deleted file mode 100644
index 4dbdbff47f9db4791e5c134bd0c540c08fc1d422..0000000000000000000000000000000000000000
--- a/vendor/github.com/kisielk/gotool/match.go
+++ /dev/null
@@ -1,56 +0,0 @@
-// Copyright (c) 2009 The Go Authors. All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-//    * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-//    * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-//    * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// +build go1.9
-
-package gotool
-
-import (
-	"path/filepath"
-
-	"github.com/kisielk/gotool/internal/load"
-)
-
-// importPaths returns the import paths to use for the given command line.
-func (c *Context) importPaths(args []string) []string {
-	lctx := load.Context{
-		BuildContext: c.BuildContext,
-		GOROOTsrc:    c.joinPath(c.BuildContext.GOROOT, "src"),
-	}
-	return lctx.ImportPaths(args)
-}
-
-// joinPath calls c.BuildContext.JoinPath (if not nil) or else filepath.Join.
-//
-// It's a copy of the unexported build.Context.joinPath helper.
-func (c *Context) joinPath(elem ...string) string {
-	if f := c.BuildContext.JoinPath; f != nil {
-		return f(elem...)
-	}
-	return filepath.Join(elem...)
-}
diff --git a/vendor/github.com/kisielk/gotool/match18.go b/vendor/github.com/kisielk/gotool/match18.go
deleted file mode 100644
index 6d6b1368c8de20d7bab12adab230f41c3809cd6a..0000000000000000000000000000000000000000
--- a/vendor/github.com/kisielk/gotool/match18.go
+++ /dev/null
@@ -1,317 +0,0 @@
-// Copyright (c) 2009 The Go Authors. All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-//    * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-//    * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-//    * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// +build !go1.9
-
-package gotool
-
-import (
-	"fmt"
-	"go/build"
-	"log"
-	"os"
-	"path"
-	"path/filepath"
-	"regexp"
-	"strings"
-)
-
-// This file contains code from the Go distribution.
-
-// matchPattern(pattern)(name) reports whether
-// name matches pattern. Pattern is a limited glob
-// pattern in which '...' means 'any string' and there
-// is no other special syntax.
-func matchPattern(pattern string) func(name string) bool {
-	re := regexp.QuoteMeta(pattern)
-	re = strings.Replace(re, `\.\.\.`, `.*`, -1)
-	// Special case: foo/... matches foo too.
-	if strings.HasSuffix(re, `/.*`) {
-		re = re[:len(re)-len(`/.*`)] + `(/.*)?`
-	}
-	reg := regexp.MustCompile(`^` + re + `$`)
-	return reg.MatchString
-}
-
-// matchPackages returns a list of package paths matching pattern
-// (see go help packages for pattern syntax).
-func (c *Context) matchPackages(pattern string) []string {
-	match := func(string) bool { return true }
-	treeCanMatch := func(string) bool { return true }
-	if !isMetaPackage(pattern) {
-		match = matchPattern(pattern)
-		treeCanMatch = treeCanMatchPattern(pattern)
-	}
-
-	have := map[string]bool{
-		"builtin": true, // ignore pseudo-package that exists only for documentation
-	}
-	if !c.BuildContext.CgoEnabled {
-		have["runtime/cgo"] = true // ignore during walk
-	}
-	var pkgs []string
-
-	for _, src := range c.BuildContext.SrcDirs() {
-		if (pattern == "std" || pattern == "cmd") && src != gorootSrc {
-			continue
-		}
-		src = filepath.Clean(src) + string(filepath.Separator)
-		root := src
-		if pattern == "cmd" {
-			root += "cmd" + string(filepath.Separator)
-		}
-		filepath.Walk(root, func(path string, fi os.FileInfo, err error) error {
-			if err != nil || !fi.IsDir() || path == src {
-				return nil
-			}
-
-			// Avoid .foo, _foo, and testdata directory trees.
-			_, elem := filepath.Split(path)
-			if strings.HasPrefix(elem, ".") || strings.HasPrefix(elem, "_") || elem == "testdata" {
-				return filepath.SkipDir
-			}
-
-			name := filepath.ToSlash(path[len(src):])
-			if pattern == "std" && (!isStandardImportPath(name) || name == "cmd") {
-				// The name "std" is only the standard library.
-				// If the name is cmd, it's the root of the command tree.
-				return filepath.SkipDir
-			}
-			if !treeCanMatch(name) {
-				return filepath.SkipDir
-			}
-			if have[name] {
-				return nil
-			}
-			have[name] = true
-			if !match(name) {
-				return nil
-			}
-			_, err = c.BuildContext.ImportDir(path, 0)
-			if err != nil {
-				if _, noGo := err.(*build.NoGoError); noGo {
-					return nil
-				}
-			}
-			pkgs = append(pkgs, name)
-			return nil
-		})
-	}
-	return pkgs
-}
-
-// importPathsNoDotExpansion returns the import paths to use for the given
-// command line, but it does no ... expansion.
-func (c *Context) importPathsNoDotExpansion(args []string) []string {
-	if len(args) == 0 {
-		return []string{"."}
-	}
-	var out []string
-	for _, a := range args {
-		// Arguments are supposed to be import paths, but
-		// as a courtesy to Windows developers, rewrite \ to /
-		// in command-line arguments. Handles .\... and so on.
-		if filepath.Separator == '\\' {
-			a = strings.Replace(a, `\`, `/`, -1)
-		}
-
-		// Put argument in canonical form, but preserve leading ./.
-		if strings.HasPrefix(a, "./") {
-			a = "./" + path.Clean(a)
-			if a == "./." {
-				a = "."
-			}
-		} else {
-			a = path.Clean(a)
-		}
-		if isMetaPackage(a) {
-			out = append(out, c.allPackages(a)...)
-			continue
-		}
-		out = append(out, a)
-	}
-	return out
-}
-
-// importPaths returns the import paths to use for the given command line.
-func (c *Context) importPaths(args []string) []string {
-	args = c.importPathsNoDotExpansion(args)
-	var out []string
-	for _, a := range args {
-		if strings.Contains(a, "...") {
-			if build.IsLocalImport(a) {
-				out = append(out, c.allPackagesInFS(a)...)
-			} else {
-				out = append(out, c.allPackages(a)...)
-			}
-			continue
-		}
-		out = append(out, a)
-	}
-	return out
-}
-
-// allPackages returns all the packages that can be found
-// under the $GOPATH directories and $GOROOT matching pattern.
-// The pattern is either "all" (all packages), "std" (standard packages),
-// "cmd" (standard commands), or a path including "...".
-func (c *Context) allPackages(pattern string) []string {
-	pkgs := c.matchPackages(pattern)
-	if len(pkgs) == 0 {
-		fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern)
-	}
-	return pkgs
-}
-
-// allPackagesInFS is like allPackages but is passed a pattern
-// beginning ./ or ../, meaning it should scan the tree rooted
-// at the given directory. There are ... in the pattern too.
-func (c *Context) allPackagesInFS(pattern string) []string {
-	pkgs := c.matchPackagesInFS(pattern)
-	if len(pkgs) == 0 {
-		fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern)
-	}
-	return pkgs
-}
-
-// matchPackagesInFS returns a list of package paths matching pattern,
-// which must begin with ./ or ../
-// (see go help packages for pattern syntax).
-func (c *Context) matchPackagesInFS(pattern string) []string {
-	// Find directory to begin the scan.
-	// Could be smarter but this one optimization
-	// is enough for now, since ... is usually at the
-	// end of a path.
-	i := strings.Index(pattern, "...")
-	dir, _ := path.Split(pattern[:i])
-
-	// pattern begins with ./ or ../.
-	// path.Clean will discard the ./ but not the ../.
-	// We need to preserve the ./ for pattern matching
-	// and in the returned import paths.
-	prefix := ""
-	if strings.HasPrefix(pattern, "./") {
-		prefix = "./"
-	}
-	match := matchPattern(pattern)
-
-	var pkgs []string
-	filepath.Walk(dir, func(path string, fi os.FileInfo, err error) error {
-		if err != nil || !fi.IsDir() {
-			return nil
-		}
-		if path == dir {
-			// filepath.Walk starts at dir and recurses. For the recursive case,
-			// the path is the result of filepath.Join, which calls filepath.Clean.
-			// The initial case is not Cleaned, though, so we do this explicitly.
-			//
-			// This converts a path like "./io/" to "io". Without this step, running
-			// "cd $GOROOT/src; go list ./io/..." would incorrectly skip the io
-			// package, because prepending the prefix "./" to the unclean path would
-			// result in "././io", and match("././io") returns false.
-			path = filepath.Clean(path)
-		}
-
-		// Avoid .foo, _foo, and testdata directory trees, but do not avoid "." or "..".
-		_, elem := filepath.Split(path)
-		dot := strings.HasPrefix(elem, ".") && elem != "." && elem != ".."
-		if dot || strings.HasPrefix(elem, "_") || elem == "testdata" {
-			return filepath.SkipDir
-		}
-
-		name := prefix + filepath.ToSlash(path)
-		if !match(name) {
-			return nil
-		}
-
-		// We keep the directory if we can import it, or if we can't import it
-		// due to invalid Go source files. This means that directories containing
-		// parse errors will be built (and fail) instead of being silently skipped
-		// as not matching the pattern. Go 1.5 and earlier skipped, but that
-		// behavior means people miss serious mistakes.
-		// See golang.org/issue/11407.
-		if p, err := c.BuildContext.ImportDir(path, 0); err != nil && shouldIgnoreImport(p) {
-			if _, noGo := err.(*build.NoGoError); !noGo {
-				log.Print(err)
-			}
-			return nil
-		}
-		pkgs = append(pkgs, name)
-		return nil
-	})
-	return pkgs
-}
-
-// isMetaPackage checks if name is a reserved package name that expands to multiple packages.
-func isMetaPackage(name string) bool {
-	return name == "std" || name == "cmd" || name == "all"
-}
-
-// isStandardImportPath reports whether $GOROOT/src/path should be considered
-// part of the standard distribution. For historical reasons we allow people to add
-// their own code to $GOROOT instead of using $GOPATH, but we assume that
-// code will start with a domain name (dot in the first element).
-func isStandardImportPath(path string) bool {
-	i := strings.Index(path, "/")
-	if i < 0 {
-		i = len(path)
-	}
-	elem := path[:i]
-	return !strings.Contains(elem, ".")
-}
-
-// hasPathPrefix reports whether the path s begins with the
-// elements in prefix.
-func hasPathPrefix(s, prefix string) bool {
-	switch {
-	default:
-		return false
-	case len(s) == len(prefix):
-		return s == prefix
-	case len(s) > len(prefix):
-		if prefix != "" && prefix[len(prefix)-1] == '/' {
-			return strings.HasPrefix(s, prefix)
-		}
-		return s[len(prefix)] == '/' && s[:len(prefix)] == prefix
-	}
-}
-
-// treeCanMatchPattern(pattern)(name) reports whether
-// name or children of name can possibly match pattern.
-// Pattern is the same limited glob accepted by matchPattern.
-func treeCanMatchPattern(pattern string) func(name string) bool {
-	wildCard := false
-	if i := strings.Index(pattern, "..."); i >= 0 {
-		wildCard = true
-		pattern = pattern[:i]
-	}
-	return func(name string) bool {
-		return len(name) <= len(pattern) && hasPathPrefix(pattern, name) ||
-			wildCard && strings.HasPrefix(name, pattern)
-	}
-}
diff --git a/vendor/github.com/kisielk/gotool/tool.go b/vendor/github.com/kisielk/gotool/tool.go
deleted file mode 100644
index c7409e11e6a80d3e494942d68652ef1bff06b65b..0000000000000000000000000000000000000000
--- a/vendor/github.com/kisielk/gotool/tool.go
+++ /dev/null
@@ -1,48 +0,0 @@
-// Package gotool contains utility functions used to implement the standard
-// "cmd/go" tool, provided as a convenience to developers who want to write
-// tools with similar semantics.
-package gotool
-
-import "go/build"
-
-// Export functions here to make it easier to keep the implementations up to date with upstream.
-
-// DefaultContext is the default context that uses build.Default.
-var DefaultContext = Context{
-	BuildContext: build.Default,
-}
-
-// A Context specifies the supporting context.
-type Context struct {
-	// BuildContext is the build.Context that is used when computing import paths.
-	BuildContext build.Context
-}
-
-// ImportPaths returns the import paths to use for the given command line.
-//
-// The path "all" is expanded to all packages in $GOPATH and $GOROOT.
-// The path "std" is expanded to all packages in the Go standard library.
-// The path "cmd" is expanded to all Go standard commands.
-// The string "..." is treated as a wildcard within a path.
-// When matching recursively, directories are ignored if they are prefixed with
-// a dot or an underscore (such as ".foo" or "_foo"), or are named "testdata".
-// Relative import paths are not converted to full import paths.
-// If args is empty, a single element "." is returned.
-func (c *Context) ImportPaths(args []string) []string {
-	return c.importPaths(args)
-}
-
-// ImportPaths returns the import paths to use for the given command line
-// using default context.
-//
-// The path "all" is expanded to all packages in $GOPATH and $GOROOT.
-// The path "std" is expanded to all packages in the Go standard library.
-// The path "cmd" is expanded to all Go standard commands.
-// The string "..." is treated as a wildcard within a path.
-// When matching recursively, directories are ignored if they are prefixed with
-// a dot or an underscore (such as ".foo" or "_foo"), or are named "testdata".
-// Relative import paths are not converted to full import paths.
-// If args is empty, a single element "." is returned.
-func ImportPaths(args []string) []string {
-	return DefaultContext.importPaths(args)
-}
diff --git a/vendor/github.com/kkHAIKE/contextcheck/.gitignore b/vendor/github.com/kkHAIKE/contextcheck/.gitignore
index fc1b400c8a3207ec704ea4c3c06763c767e8494f..1c2ffa5f477b8dc3a442e865474624c0dd5689ad 100644
--- a/vendor/github.com/kkHAIKE/contextcheck/.gitignore
+++ b/vendor/github.com/kkHAIKE/contextcheck/.gitignore
@@ -16,3 +16,5 @@
 
 .idea
 .DS_Store
+
+/contextcheck
diff --git a/vendor/github.com/kkHAIKE/contextcheck/Makefile b/vendor/github.com/kkHAIKE/contextcheck/Makefile
index 9321e9de39d68cd607855f2c610e847b5c0534dc..613d35e9394c2cb6918a207db03f150cbc811999 100644
--- a/vendor/github.com/kkHAIKE/contextcheck/Makefile
+++ b/vendor/github.com/kkHAIKE/contextcheck/Makefile
@@ -1,5 +1,15 @@
+.PHONY: clean test build
+
+default: test build
+
+clean:
+	rm -rf dist/ cover.out
+
+test: clean
+	go test -v -cover ./...
+
 build:
-	@GO111MODULE=on go build -ldflags '-s -w' -o contextcheck ./cmd/contextcheck/main.go
+	go  build -ldflags '-s -w' -o contextcheck ./cmd/contextcheck/main.go
 
 install:
-	@GO111MODULE=on go install -ldflags '-s -w' ./cmd/contextcheck
+	go install -ldflags '-s -w' ./cmd/contextcheck
diff --git a/vendor/github.com/kkHAIKE/contextcheck/README.md b/vendor/github.com/kkHAIKE/contextcheck/README.md
index 2cc7b2e489bd60b220d183c54d43ec332f4150fd..105b2de5a1d5096ce900cfd16bc9aa6d0dd3b757 100644
--- a/vendor/github.com/kkHAIKE/contextcheck/README.md
+++ b/vendor/github.com/kkHAIKE/contextcheck/README.md
@@ -3,7 +3,7 @@
 
 # contextcheck
 
-`contextcheck` is a static analysis tool, it is used to check whether the function uses a non-inherited context, which will result in a broken call link.
+`contextcheck` is a static analysis tool used to check whether a function uses a non-inherited context that could result in a broken call link.
 
 For example:
 
@@ -94,8 +94,8 @@ func NoInheritCancel(_ context.Context) (context.Context,context.CancelFunc) {
 }
 ```
 
-### skip check specify function
-You can add `// nolint: contextcheck` in function decl doc comment, to skip this linter in some false-positive case.
+### skip the check for the specified function
+To skip this linter in some false-positive cases, you can add // nolint: contextcheck to the function declaration's comment.
 
 ```go
 // nolint: contextcheck
@@ -112,8 +112,8 @@ func call3() {
 }
 ```
 
-### force mark specify function have server-side http.Request parameter
-default behavior is mark http.HandlerFunc or a function use r.Context().
+### force the marking of a specified function as having a server-side http.Request parameter
+The default behavior is to mark `http.HandlerFunc` or any function that uses `r.Context()`.
 
 ```go
 // @contextcheck(req_has_ctx)
diff --git a/vendor/github.com/kkHAIKE/contextcheck/contextcheck.go b/vendor/github.com/kkHAIKE/contextcheck/contextcheck.go
index c9ad0101fe99a266ec93b3a9f319fd4999c7b938..62696351adc7c60b6479a04eecb6435eb57ffc75 100644
--- a/vendor/github.com/kkHAIKE/contextcheck/contextcheck.go
+++ b/vendor/github.com/kkHAIKE/contextcheck/contextcheck.go
@@ -2,9 +2,9 @@ package contextcheck
 
 import (
 	"go/ast"
+	"go/token"
 	"go/types"
 	"regexp"
-	"strconv"
 	"strings"
 	"sync"
 
@@ -68,6 +68,11 @@ var (
 	pkgFactMu  sync.RWMutex
 )
 
+type element interface {
+	Pos() token.Pos
+	Parent() *ssa.Function
+}
+
 type resInfo struct {
 	Valid bool
 	Funcs []string
@@ -216,37 +221,6 @@ func (r *runner) collectHttpTyps(pssa *buildssa.SSA) {
 	}
 }
 
-func (r *runner) noImportedContextAndHttp(f *ssa.Function) (ret bool) {
-	if !f.Pos().IsValid() {
-		return false
-	}
-
-	file := analysisutil.File(r.pass, f.Pos())
-	if file == nil {
-		return false
-	}
-
-	if skip, has := r.skipFile[file]; has {
-		return skip
-	}
-	defer func() {
-		r.skipFile[file] = ret
-	}()
-
-	for _, impt := range file.Imports {
-		path, err := strconv.Unquote(impt.Path.Value)
-		if err != nil {
-			continue
-		}
-		path = analysisutil.RemoveVendor(path)
-		if path == ctxPkg || path == httpPkg {
-			return false
-		}
-	}
-
-	return true
-}
-
 func (r *runner) checkIsEntry(f *ssa.Function) (ret entryType) {
 	// if r.noImportedContextAndHttp(f) {
 	// 	return EntryNormal
@@ -456,7 +430,7 @@ func (r *runner) collectCtxRef(f *ssa.Function, isHttpHandler bool) (refMap map[
 
 	for instr := range storeInstrs {
 		if !checkedRefMap[instr.Val] {
-			r.pass.Reportf(instr.Pos(), "Non-inherited new context, use function like `context.WithXXX` instead")
+			r.Reportf(instr, "Non-inherited new context, use function like `context.WithXXX` instead")
 			ok = false
 		}
 	}
@@ -464,7 +438,7 @@ func (r *runner) collectCtxRef(f *ssa.Function, isHttpHandler bool) (refMap map[
 	for instr := range phiInstrs {
 		for _, v := range instr.Edges {
 			if !checkedRefMap[v] {
-				r.pass.Reportf(instr.Pos(), "Non-inherited new context, use function like `context.WithXXX` instead")
+				r.Reportf(instr, "Non-inherited new context, use function like `context.WithXXX` instead")
 				ok = false
 			}
 		}
@@ -564,9 +538,9 @@ func (r *runner) checkFuncWithCtx(f *ssa.Function, tp entryType) {
 			if tp&CtxIn != 0 {
 				if !refMap[instr] {
 					if isHttpHandler {
-						r.pass.Reportf(instr.Pos(), "Non-inherited new context, use function like `context.WithXXX` or `r.Context` instead")
+						r.Reportf(instr, "Non-inherited new context, use function like `context.WithXXX` or `r.Context` instead")
 					} else {
-						r.pass.Reportf(instr.Pos(), "Non-inherited new context, use function like `context.WithXXX` instead")
+						r.Reportf(instr, "Non-inherited new context, use function like `context.WithXXX` instead")
 					}
 				}
 			}
@@ -578,9 +552,11 @@ func (r *runner) checkFuncWithCtx(f *ssa.Function, tp entryType) {
 
 			key := ff.RelString(nil)
 			res, ok := r.getValue(key, ff)
-			if ok {
-				if !res.Valid {
-					r.pass.Reportf(instr.Pos(), "Function `%s` should pass the context parameter", strings.Join(reverse(res.Funcs), "->"))
+			if ok && !res.Valid {
+				if instr.Pos().IsValid() {
+					r.Reportf(instr, "Function `%s` should pass the context parameter", strings.Join(reverse(res.Funcs), "->"))
+				} else {
+					r.Reportf(ff, "Function `%s` should pass the context parameter", strings.Join(reverse(res.Funcs), "->"))
 				}
 			}
 		}
@@ -806,6 +782,20 @@ func (r *runner) setFact(key string, valid bool, funcs ...string) {
 	}
 }
 
+func (r *runner) Reportf(instr element, format string, args ...interface{}) {
+	pos := instr.Pos()
+
+	if !pos.IsValid() && instr.Parent() != nil {
+		pos = instr.Parent().Pos()
+	}
+
+	if !pos.IsValid() {
+		return
+	}
+
+	r.pass.Reportf(pos, format, args...)
+}
+
 // setPkgFact save fact to mem
 func setPkgFact(pkg *types.Package, fact ctxFact) {
 	pkgFactMu.Lock()
diff --git a/vendor/github.com/kunwardeep/paralleltest/pkg/paralleltest/paralleltest.go b/vendor/github.com/kunwardeep/paralleltest/pkg/paralleltest/paralleltest.go
index e21f278cf399afabb131ad795177d3888105d8f4..e9187d6fdb1c2d3bcfe4fdf6b1d6647fe27c13cd 100644
--- a/vendor/github.com/kunwardeep/paralleltest/pkg/paralleltest/paralleltest.go
+++ b/vendor/github.com/kunwardeep/paralleltest/pkg/paralleltest/paralleltest.go
@@ -26,6 +26,7 @@ type parallelAnalyzer struct {
 	analyzer              *analysis.Analyzer
 	ignoreMissing         bool
 	ignoreMissingSubtests bool
+	ignoreLoopVar         bool
 }
 
 func newParallelAnalyzer() *parallelAnalyzer {
@@ -34,6 +35,7 @@ func newParallelAnalyzer() *parallelAnalyzer {
 	var flags flag.FlagSet
 	flags.BoolVar(&a.ignoreMissing, "i", false, "ignore missing calls to t.Parallel")
 	flags.BoolVar(&a.ignoreMissingSubtests, "ignoremissingsubtests", false, "ignore missing calls to t.Parallel in subtests")
+	flags.BoolVar(&a.ignoreLoopVar, "ignoreloopVar", false, "ignore loop variable detection")
 
 	a.analyzer = &analysis.Analyzer{
 		Name:  "paralleltest",
@@ -54,8 +56,10 @@ func (a *parallelAnalyzer) run(pass *analysis.Pass) (interface{}, error) {
 	inspector.Preorder(nodeFilter, func(node ast.Node) {
 		funcDecl := node.(*ast.FuncDecl)
 		var funcHasParallelMethod,
+			funcCantParallelMethod,
 			rangeStatementOverTestCasesExists,
-			rangeStatementHasParallelMethod bool
+			rangeStatementHasParallelMethod,
+			rangeStatementCantParallelMethod bool
 		var loopVariableUsedInRun *string
 		var numberOfTestRun int
 		var positionOfTestRunNode []ast.Node
@@ -77,20 +81,29 @@ func (a *parallelAnalyzer) run(pass *analysis.Pass) (interface{}, error) {
 						funcHasParallelMethod = methodParallelIsCalledInTestFunction(n, testVar)
 					}
 
+					// Check if the test calls t.Setenv, cannot be used in parallel tests or tests with parallel ancestors
+					if !funcCantParallelMethod {
+						funcCantParallelMethod = methodSetenvIsCalledInTestFunction(n, testVar)
+					}
+
 					// Check if the t.Run within the test function is calling t.Parallel
 					if methodRunIsCalledInTestFunction(n, testVar) {
 						// n is a call to t.Run; find out the name of the subtest's *testing.T parameter.
 						innerTestVar := getRunCallbackParameterName(n)
 
 						hasParallel := false
+						cantParallel := false
 						numberOfTestRun++
 						ast.Inspect(v, func(p ast.Node) bool {
 							if !hasParallel {
 								hasParallel = methodParallelIsCalledInTestFunction(p, innerTestVar)
 							}
+							if !cantParallel {
+								cantParallel = methodSetenvIsCalledInTestFunction(p, innerTestVar)
+							}
 							return true
 						})
-						if !hasParallel {
+						if !hasParallel && !cantParallel {
 							positionOfTestRunNode = append(positionOfTestRunNode, n)
 						}
 					}
@@ -122,7 +135,11 @@ func (a *parallelAnalyzer) run(pass *analysis.Pass) (interface{}, error) {
 								rangeStatementHasParallelMethod = methodParallelIsCalledInMethodRun(r.X, innerTestVar)
 							}
 
-							if loopVariableUsedInRun == nil {
+							if !rangeStatementCantParallelMethod {
+								rangeStatementCantParallelMethod = methodSetenvIsCalledInMethodRun(r.X, innerTestVar)
+							}
+
+							if !a.ignoreLoopVar && loopVariableUsedInRun == nil {
 								if run, ok := r.X.(*ast.CallExpr); ok {
 									loopVariableUsedInRun = loopVarReferencedInRun(run, loopVars, pass.TypesInfo)
 								}
@@ -134,12 +151,17 @@ func (a *parallelAnalyzer) run(pass *analysis.Pass) (interface{}, error) {
 			}
 		}
 
-		if !a.ignoreMissing && !funcHasParallelMethod {
+		// Descendents which call Setenv, also prevent tests from calling Parallel
+		if rangeStatementCantParallelMethod {
+			funcCantParallelMethod = true
+		}
+
+		if !a.ignoreMissing && !funcHasParallelMethod && !funcCantParallelMethod {
 			pass.Reportf(node.Pos(), "Function %s missing the call to method parallel\n", funcDecl.Name.Name)
 		}
 
 		if rangeStatementOverTestCasesExists && rangeNode != nil {
-			if !rangeStatementHasParallelMethod {
+			if !rangeStatementHasParallelMethod && !rangeStatementCantParallelMethod {
 				if !a.ignoreMissing && !a.ignoreMissingSubtests {
 					pass.Reportf(rangeNode.Pos(), "Range statement for test %s missing the call to method parallel in test Run\n", funcDecl.Name.Name)
 				}
@@ -162,15 +184,23 @@ func (a *parallelAnalyzer) run(pass *analysis.Pass) (interface{}, error) {
 }
 
 func methodParallelIsCalledInMethodRun(node ast.Node, testVar string) bool {
-	var methodParallelCalled bool
+	return targetMethodIsCalledInMethodRun(node, testVar, "Parallel")
+}
+
+func methodSetenvIsCalledInMethodRun(node ast.Node, testVar string) bool {
+	return targetMethodIsCalledInMethodRun(node, testVar, "Setenv")
+}
+
+func targetMethodIsCalledInMethodRun(node ast.Node, testVar, targetMethod string) bool {
+	var called bool
 	// nolint: gocritic
 	switch callExp := node.(type) {
 	case *ast.CallExpr:
 		for _, arg := range callExp.Args {
-			if !methodParallelCalled {
+			if !called {
 				ast.Inspect(arg, func(n ast.Node) bool {
-					if !methodParallelCalled {
-						methodParallelCalled = methodParallelIsCalledInRunMethod(n, testVar)
+					if !called {
+						called = exprCallHasMethod(n, testVar, targetMethod)
 						return true
 					}
 					return false
@@ -178,11 +208,7 @@ func methodParallelIsCalledInMethodRun(node ast.Node, testVar string) bool {
 			}
 		}
 	}
-	return methodParallelCalled
-}
-
-func methodParallelIsCalledInRunMethod(node ast.Node, testVar string) bool {
-	return exprCallHasMethod(node, testVar, "Parallel")
+	return called
 }
 
 func methodParallelIsCalledInTestFunction(node ast.Node, testVar string) bool {
@@ -196,6 +222,11 @@ func methodRunIsCalledInRangeStatement(node ast.Node, testVar string) bool {
 func methodRunIsCalledInTestFunction(node ast.Node, testVar string) bool {
 	return exprCallHasMethod(node, testVar, "Run")
 }
+
+func methodSetenvIsCalledInTestFunction(node ast.Node, testVar string) bool {
+	return exprCallHasMethod(node, testVar, "Setenv")
+}
+
 func exprCallHasMethod(node ast.Node, receiverName, methodName string) bool {
 	// nolint: gocritic
 	switch n := node.(type) {
diff --git a/vendor/github.com/lasiar/canonicalheader/.gitignore b/vendor/github.com/lasiar/canonicalheader/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..723ef36f4e4f32c4560383aa5987c575a30c6535
--- /dev/null
+++ b/vendor/github.com/lasiar/canonicalheader/.gitignore
@@ -0,0 +1 @@
+.idea
\ No newline at end of file
diff --git a/vendor/github.com/lasiar/canonicalheader/.golangci.yaml b/vendor/github.com/lasiar/canonicalheader/.golangci.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..5652c8d6ccf60d280a532498932c125b2289a740
--- /dev/null
+++ b/vendor/github.com/lasiar/canonicalheader/.golangci.yaml
@@ -0,0 +1,802 @@
+# See: https://olegk.dev/go-linters-configuration-the-right-version
+
+run:
+  # Automatically adjust the maximum concurrency to the container CPU quota.
+  concurrency: 0
+
+  # I really care about the result, so I'm fine to wait for it.
+  timeout: 30m
+
+  # Fail if the error was met.
+  issues-exit-code: 1
+
+  # This is very important, bugs in tests are not acceptable either.
+  tests: true
+
+  # In most cases this can be empty but there is a popular pattern
+  # to keep integration tests under this tag. Such tests often require
+  # additional setups like Postgres, Redis etc and are run separately.
+  # (to be honest I don't find this useful but I have such tags)
+  build-tags:
+    - integration
+
+  # Autogenerated files can be skipped (I'm looking at you gRPC).
+  # AFAIK autogen files are skipped but skipping the whole directory should be somewhat faster.
+  #skip-files:
+  #  - "protobuf/.*.go"
+
+  # With the read-only mode linter will fail if go.mod file is outdated.
+  modules-download-mode: readonly
+
+  # Till today I didn't know this param exists, never ran 2 golangci-lint at once.
+  allow-parallel-runners: false
+
+  # Keep this empty to use the Go version from the go.mod file.
+  go: ""
+
+linters:
+  # Set to true runs only fast linters.
+  # Good option for 'lint on save', pre-commit hook or CI.
+  fast: false
+
+  enable:
+    # Check for pass []any as any in variadic func(...any).
+    # Rare case but saved me from debugging a few times.
+    - asasalint
+
+    # I prefer plane ASCII identifiers.
+    # Symbol `∆` instead of `delta` looks cool but no thanks.
+    - asciicheck
+
+    # Checks for dangerous unicode character sequences.
+    # Super rare but why not to be a bit paranoid?
+    - bidichk
+
+    # Checks whether HTTP response body is closed successfully.
+    - bodyclose
+
+    # Check whether the function uses a non-inherited context.
+    - contextcheck
+
+    # Check for two durations multiplied together.
+    - durationcheck
+
+    # Forces to not skip error check.
+    - errcheck
+
+    # Checks `Err-` prefix for var and `-Error` suffix for error type.
+    - errname
+
+    # Suggests to use `%w` for error-wrapping.
+    - errorlint
+
+    # Checks for pointers to enclosing loop variables.
+    - exportloopref
+
+    # As you already know I'm a co-author. It would be strange to not use
+    # one of my warmly loved projects.
+    - gocritic
+
+    # Forces to put `.` at the end of the comment. Code is poetry.
+    - godot
+
+    # Might not be that important but I prefer to keep all of them.
+    # `gofumpt` is amazing, kudos to Daniel Marti https://github.com/mvdan/gofumpt
+    - gofmt
+    - gofumpt
+    - goimports
+
+    # Allow or ban replace directives in go.mod
+    # or force explanation for retract directives.
+    - gomoddirectives
+
+    # Powerful security-oriented linter. But requires some time to
+    # configure it properly, see https://github.com/securego/gosec#available-rules
+    - gosec
+
+    # Linter that specializes in simplifying code.
+    - gosimple
+
+    # Official Go tool. Must have.
+    - govet
+
+    # Detects when assignments to existing variables are not used
+    # Last week I caught a bug with it.
+    - ineffassign
+
+    # Fix all the misspells, amazing thing.
+    - misspell
+
+    # Finds naked/bare returns and requires change them.
+    - nakedret
+
+    # Both require a bit more explicit returns.
+    - nilerr
+    - nilnil
+
+    # Finds sending HTTP request without context.Context.
+    - noctx
+
+    # Forces comment why another check is disabled.
+    # Better not to have //nolint: at all ;)
+    - nolintlint
+
+    # Finds slices that could potentially be pre-allocated.
+    # Small performance win + cleaner code.
+    - prealloc
+
+    # Finds shadowing of Go's predeclared identifiers.
+    # I hear a lot of complaints from junior developers.
+    # But after some time they find it very useful.
+    - predeclared
+
+    # Lint your Prometheus metrics name.
+    - promlinter
+
+    # Checks that package variables are not reassigned.
+    # Super rare case but can catch bad things (like `io.EOF = nil`)
+    - reassign
+
+    # Drop-in replacement of `golint`.
+    - revive
+
+    # Somewhat similar to `bodyclose` but for `database/sql` package.
+    - rowserrcheck
+    - sqlclosecheck
+
+    # I have found that it's not the same as staticcheck binary :\
+    - staticcheck
+
+    # Is a replacement for `golint`, similar to `revive`.
+    - stylecheck
+
+    # Check struct tags.
+    - tagliatelle
+
+    # Test-related checks. All of them are good.
+    - tenv
+    - testableexamples
+    - thelper
+    - tparallel
+
+    # Remove unnecessary type conversions, make code cleaner
+    - unconvert
+
+    # Might be noisy but better to know what is unused
+    - unparam
+
+    # Must have. Finds unused declarations.
+    - unused
+
+    # Detect the possibility to use variables/constants from stdlib.
+    - usestdlibvars
+
+    # Finds wasted assignment statements.
+    - wastedassign
+
+  disable:
+    # Detects struct contained context.Context field. Not a problem.
+    - containedctx
+
+    # Checks function and package cyclomatic complexity.
+    # I can have a long but trivial switch-case.
+    #
+    # Cyclomatic complexity is a measurement, not a goal.
+    # (c) Bryan C. Mills / https://github.com/bcmills
+    - cyclop
+
+    # Abandoned, replaced by `unused`.
+    - deadcode
+
+    # Check declaration order of types, consts, vars and funcs.
+    # I like it but I don't use it.
+    - decorder
+
+    # Checks if package imports are in a list of acceptable packages.
+    # I'm very picky about what I import, so no automation.
+    - depguard
+
+    # Checks assignments with too many blank identifiers. Very rare.
+    - dogsled
+
+    # Tool for code clone detection.
+    - dupl
+
+    # Find duplicate words, rare.
+    - dupword
+
+    # I'm fine to check the error from json.Marshal ¯\_(ツ)_/¯
+    - errchkjson
+
+    # All SQL queries MUST BE covered with tests.
+    - execinquery
+
+    # Forces to handle more cases. Cool but noisy.
+    - exhaustive
+    - exhaustivestruct # Deprecated, replaced by check below.
+    - exhaustruct
+
+    # Forbids some identifiers. I don't have a case for it.
+    - forbidigo
+
+    # Finds forced type assertions, very good for juniors.
+    - forcetypeassert
+
+    # I might have long but a simple function.
+    - funlen
+
+    # Imports order. I do this manually ¯\_(ツ)_/¯
+    - gci
+
+    # I'm not a fan of ginkgo and gomega packages.
+    - ginkgolinter
+
+    # Checks that compiler directive comments (//go:) are valid. Rare.
+    - gocheckcompilerdirectives
+
+    # Globals and init() are ok.
+    - gochecknoglobals
+    - gochecknoinits
+
+    # Same as `cyclop` linter (see above)
+    - gocognit
+    - goconst
+    - gocyclo
+
+    # TODO and friends are ok.
+    - godox
+
+    # Check the error handling expressions. Too noisy.
+    - goerr113
+
+    # I don't use file headers.
+    - goheader
+
+    # 1st Go linter, deprecated :( use `revive`.
+    - golint
+
+    # Reports magic consts. Might be noisy but still good.
+    - gomnd
+
+    # Allowed/blocked packages to import. I prefer to do it manually.
+    - gomodguard
+
+    # Printf-like functions must have -f.
+    - goprintffuncname
+
+    # Groupt declarations, I prefer manually.
+    - grouper
+
+    # Deprecated.
+    - ifshort
+
+    # Checks imports aliases, rare.
+    - importas
+
+    # Forces tiny interfaces, very subjective.
+    - interfacebloat
+
+    # Accept interfaces, return types. Not always.
+    - ireturn
+
+    # I don't set line length. 120 is fine by the way ;)
+    - lll
+
+    # Some log checkers, might be useful.
+    - loggercheck
+
+    # Maintainability index of each function, subjective.
+    - maintidx
+
+    # Slice declarations with non-zero initial length. Not my case.
+    - makezero
+
+    # Deprecated. Use govet `fieldalignment`.
+    - maligned
+
+    # Enforce tags in un/marshaled structs. Cool but not my case.
+    - musttag
+
+    # Deeply nested if statements, subjective.
+    - nestif
+
+    # Forces newlines in some places.
+    - nlreturn
+
+    # Reports all named returns, not that bad.
+    - nonamedreturns
+
+    # Deprecated. Replaced by `revive`.
+    - nosnakecase
+
+    # Finds misuse of Sprintf with host:port in a URL. Cool but rare.
+    - nosprintfhostport
+
+    # I don't use t.Parallel() that much.
+    - paralleltest
+
+    # Often non-`_test` package is ok.
+    - testpackage
+
+    # Compiler can do it too :)
+    - typecheck
+
+    # I'm fine with long variable names with a small scope.
+    - varnamelen
+
+    # gofmt,gofumpt covers that (from what I know).
+    - whitespace
+
+    # Don't find it useful to wrap all errors from external packages.
+    - wrapcheck
+
+    # Forces you to use empty lines. Great if configured correctly.
+    # I mean there is an agreement in a team.
+    - wsl
+
+linters-settings:
+  revive:
+    # Maximum number of open files at the same time.
+    # See https://github.com/mgechev/revive#command-line-flags
+    # Defaults to unlimited.
+    max-open-files: 2048
+    # When set to false, ignores files with "GENERATED" header, similar to golint.
+    # See https://github.com/mgechev/revive#available-rules for details.
+    # Default: false
+    ignore-generated-header: true
+    # Sets the default severity.
+    # See https://github.com/mgechev/revive#configuration
+    # Default: warning
+    severity: error
+    # Enable all available rules.
+    # Default: false
+    enable-all-rules: true
+    # Sets the default failure confidence.
+    # This means that linting errors with less than 0.8 confidence will be ignored.
+    # Default: 0.8
+    confidence: 0.1
+    rules:
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#add-constant
+      - name: add-constant
+        severity: warning
+        disabled: false
+        arguments:
+          - maxLitCount: "3"
+            allowStrs: '""'
+            allowInts: "0,1,2"
+            allowFloats: "0.0,0.,1.0,1.,2.0,2."
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#argument-limit
+      - name: argument-limit
+        severity: warning
+        disabled: false
+        arguments: [4]
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#atomic
+      - name: atomic
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#banned-characters
+      - name: banned-characters
+        severity: warning
+        disabled: false
+        arguments: ["Ω", "Σ", "σ", "7"]
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#bare-return
+      - name: bare-return
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#blank-imports
+      - name: blank-imports
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#bool-literal-in-expr
+      - name: bool-literal-in-expr
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#call-to-gc
+      - name: call-to-gc
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#cognitive-complexity
+      - name: cognitive-complexity
+        severity: warning
+        disabled: true
+        arguments: [7]
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#comment-spacings
+      - name: comment-spacings
+        severity: warning
+        disabled: false
+        arguments:
+          - mypragma
+          - otherpragma
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#confusing-naming
+      - name: confusing-naming
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#confusing-results
+      - name: confusing-results
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#constant-logical-expr
+      - name: constant-logical-expr
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#context-as-argument
+      - name: context-as-argument
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#context-keys-type
+      - name: context-keys-type
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#cyclomatic
+      - name: cyclomatic
+        severity: warning
+        disabled: true
+        arguments: [3]
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#datarace
+      - name: datarace
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#deep-exit
+      - name: deep-exit
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#defer
+      - name: defer
+        severity: warning
+        disabled: false
+        arguments:
+          - ["call-chain", "loop"]
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#dot-imports
+      - name: dot-imports
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#duplicated-imports
+      - name: duplicated-imports
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#early-return
+      - name: early-return
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#empty-block
+      - name: empty-block
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#empty-lines
+      - name: empty-lines
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#enforce-map-style
+      - name: enforce-map-style
+        severity: warning
+        disabled: false
+        arguments:
+          - "make"
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#error-naming
+      - name: error-naming
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#error-return
+      - name: error-return
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#function-length
+      - name: function-length
+        severity: warning
+        disabled: true
+        arguments: [10, 0]
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#error-strings
+      - name: error-strings
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#errorf
+      - name: errorf
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#exported
+      - name: exported
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#file-header
+      - name: file-header
+        severity: warning
+        disabled: true
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#flag-parameter
+      - name: flag-parameter
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#function-result-limit
+      - name: function-result-limit
+        severity: warning
+        disabled: false
+        arguments: [2]
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#get-return
+      - name: get-return
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#identical-branches
+      - name: identical-branches
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#if-return
+      - name: if-return
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#increment-decrement
+      - name: increment-decrement
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#indent-error-flow
+      - name: indent-error-flow
+        severity: warning
+        disabled: false
+        arguments:
+          - "preserveScope"
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#import-alias-naming
+      - name: import-alias-naming
+        severity: warning
+        disabled: false
+        arguments:
+          - "^[a-z][a-z0-9]{0,}$"
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#imports-blacklist
+      - name: imports-blacklist
+        severity: warning
+        disabled: false
+        arguments:
+          - "crypto/md5"
+          - "crypto/sha1"
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#import-shadowing
+      - name: import-shadowing
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#line-length-limit
+      - name: line-length-limit
+        severity: warning
+        disabled: true
+        arguments: [80]
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#max-public-structs
+      - name: max-public-structs
+        severity: warning
+        disabled: false
+        arguments: [3]
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#modifies-parameter
+      - name: modifies-parameter
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#modifies-value-receiver
+      - name: modifies-value-receiver
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#nested-structs
+      - name: nested-structs
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#optimize-operands-order
+      - name: optimize-operands-order
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#package-comments
+      - name: package-comments
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#range
+      - name: range
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#range-val-in-closure
+      - name: range-val-in-closure
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#range-val-address
+      - name: range-val-address
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#receiver-naming
+      - name: receiver-naming
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#redundant-import-alias
+      - name: redundant-import-alias
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#redefines-builtin-id
+      - name: redefines-builtin-id
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#string-of-int
+      - name: string-of-int
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#string-format
+      - name: string-format
+        severity: warning
+        disabled: false
+        arguments:
+          - - 'core.WriteError[1].Message'
+            - '/^([^A-Z]|$)/'
+            - must not start with a capital letter
+          - - 'fmt.Errorf[0]'
+            - '/(^|[^\.!?])$/'
+            - must not end in punctuation
+          - - panic
+            - '/^[^\n]*$/'
+            - must not contain line breaks
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#struct-tag
+      - name: struct-tag
+        arguments:
+          - "json,inline"
+          - "bson,outline,gnu"
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#superfluous-else
+      - name: superfluous-else
+        severity: warning
+        disabled: false
+        arguments:
+          - "preserveScope"
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#time-equal
+      - name: time-equal
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#time-naming
+      - name: time-naming
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#var-naming
+      - name: var-naming
+        severity: warning
+        disabled: false
+        arguments:
+          - ["ID"] # AllowList
+          - ["VM"] # DenyList
+          - - upperCaseConst: true
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#var-declaration
+      - name: var-declaration
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#unconditional-recursion
+      - name: unconditional-recursion
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#unexported-naming
+      - name: unexported-naming
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#unexported-return
+      - name: unexported-return
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#unhandled-error
+      - name: unhandled-error
+        severity: warning
+        disabled: false
+        arguments:
+          - "fmt.Printf"
+          - "myFunction"
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#unnecessary-stmt
+      - name: unnecessary-stmt
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#unreachable-code
+      - name: unreachable-code
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#unused-parameter
+      - name: unused-parameter
+        severity: warning
+        disabled: false
+        arguments:
+          - allowRegex: "^_"
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#unused-receiver
+      - name: unused-receiver
+        severity: warning
+        disabled: false
+        arguments:
+          - allowRegex: "^_"
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#useless-break
+      - name: useless-break
+        severity: warning
+        disabled: false
+      # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#waitgroup-by-value
+      - name: waitgroup-by-value
+        severity: warning
+        disabled: false
+  # I'm biased and I'm enabling more than 100 checks
+  # Might be too much for you. See https://go-critic.com/overview.html
+  gocritic:
+    enabled-tags:
+      - diagnostic
+      - experimental
+      - opinionated
+      - performance
+      - style
+    disabled-checks:
+      # These 3 will detect many cases, but they do sense
+      # if it's performance oriented code
+      - hugeParam
+      - rangeExprCopy
+      - rangeValCopy
+
+  godot:
+    scope: all
+
+  errcheck:
+    # Report `a := b.(MyStruct)` when `a, ok := ...` should be.
+    check-type-assertions: true # Default: false
+
+    # Report skipped checks:`num, _ := strconv.Atoi(numStr)`.
+    check-blank: true # Default: false
+
+    # Function to skip.
+    exclude-functions:
+      - io/ioutil.ReadFile
+      - io.Copy(*bytes.Buffer)
+      - io.Copy(os.Stdout)
+
+  govet:
+    disable:
+      - fieldalignment # I'm ok to waste some bytes
+
+  nakedret:
+    # No naked returns, ever.
+    max-func-lines: 1 # Default: 30
+
+  tagliatelle:
+    case:
+      rules:
+        json: snake # why it's not a `snake` by default?!
+        yaml: snake # why it's not a `snake` by default?!
+        xml: camel
+        bson: camel
+        avro: snake
+        mapstructure: kebab
+
+# See also https://gist.github.com/cristaloleg/dc29ca0ef2fb554de28d94c3c6f6dc88
+
+output:
+  # I prefer the simplest one: `line-number` and saving to `lint.txt`
+  #
+  # The `tab` also looks good and with the next release I will switch to it
+  # (ref: https://github.com/golangci/golangci-lint/issues/3728)
+  #
+  # There are more formats which can be used on CI or by your IDE.
+  formats:
+    - format: line-number
+
+  # I do not find this useful, parameter above already enables filepath
+  # with a line and column. For me, it's easier to follow the path and
+  # see the line in an IDE where I see more code and understand it better.
+  print-issued-lines: false
+
+  # Must have. Easier to understand the output.
+  print-linter-name: true
+
+  # No, no skips, everything should be reported.
+  uniq-by-line: false
+
+  # To be honest no idea when this can be needed, maybe a multi-module setup?
+  path-prefix: ""
+
+  # Slightly easier to follow the results + getting deterministic output.
+  sort-results: true
+
+issues:
+  exclude-dirs-use-default: false
+  # I found it strange to skip the errors, setting 0 to have all the results.
+  max-issues-per-linter: 0
+
+  # Same here, nothing should be skipped to not miss errors.
+  max-same-issues: 0
+
+  # When set to `true` linter will analyze only new code which are
+  # not committed or after some specific revision. This is a cool
+  # feature when you're going to introduce linter into a big project.
+  # But I prefer going gradually package by package.
+  # So, it's set to `false` to scan all code.
+  new: false
+
+  # 2 other params regarding git integration
+
+  # Even with a recent GPT-4 release I still believe that
+  # I know better how to do my job and fix the suggestions.
+  fix: false
\ No newline at end of file
diff --git a/vendor/github.com/lasiar/canonicalheader/.goreleaser.yaml b/vendor/github.com/lasiar/canonicalheader/.goreleaser.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..ada67063eb5e06a77abda0970b2e79c121307ec3
--- /dev/null
+++ b/vendor/github.com/lasiar/canonicalheader/.goreleaser.yaml
@@ -0,0 +1,18 @@
+builds:
+  - main: ./cmd/canonicalheader
+    env:
+      - CGO_ENABLED=0
+    flags:
+      - -trimpath
+    ldflags:
+      - -s -w
+    targets:
+      - darwin_amd64
+      - darwin_arm64
+      - linux_amd64
+      - windows_amd64
+
+archives:
+  - format_overrides:
+      - goos: windows
+        format: zip
\ No newline at end of file
diff --git a/vendor/github.com/lasiar/canonicalheader/LICENCE b/vendor/github.com/lasiar/canonicalheader/LICENCE
new file mode 100644
index 0000000000000000000000000000000000000000..5b93b736c756d134fa1834c27065c29ef0e9148b
--- /dev/null
+++ b/vendor/github.com/lasiar/canonicalheader/LICENCE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2023 Roman Chaliy
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
\ No newline at end of file
diff --git a/vendor/github.com/lasiar/canonicalheader/README.md b/vendor/github.com/lasiar/canonicalheader/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..9971452197e04632d89fe2a2aa62cf588707b39f
--- /dev/null
+++ b/vendor/github.com/lasiar/canonicalheader/README.md
@@ -0,0 +1,77 @@
+# canonicalheader
+
+[![CI](https://github.com/lasiar/canonicalheader/actions/workflows/test.yml/badge.svg)](https://github.com/lasiar/canonicalheader/actions/workflows/test.yml)
+[![tag](https://img.shields.io/github/tag/lasiar/canonicalheader.svg)](https://github.com/lasiar/canonicalheader/releases)
+[![Go Report Card](https://goreportcard.com/badge/github.com/lasiar/canonicalheader)](https://goreportcard.com/report/github.com/lasiar/canonicalheader)
+[![License](https://img.shields.io/github/license/lasiar/canonicalheader)](./LICENCE)
+
+Golang linter for check canonical header.
+
+### Install
+
+```shell
+go install -v github.com/lasiar/canonicalheader/cmd/canonicalheader@latest 
+```
+
+Or download the binary file from the [release](https://github.com/lasiar/canonicalheader/releases/latest).
+
+
+### Example
+
+before
+
+```go
+package main
+
+import (
+	"net/http"
+)
+
+const testHeader = "testHeader"
+
+func main() {
+	v := http.Header{}
+	v.Get(testHeader)
+
+	v.Get("Test-HEader")
+	v.Set("Test-HEader", "value")
+	v.Add("Test-HEader", "value")
+	v.Del("Test-HEader")
+	v.Values("Test-HEader")
+
+	v.Set("Test-Header", "value")
+	v.Add("Test-Header", "value")
+	v.Del("Test-Header")
+	v.Values("Test-Header")
+}
+
+```
+
+after
+
+```go
+package main
+
+import (
+	"net/http"
+)
+
+const testHeader = "testHeader"
+
+func main() {
+	v := http.Header{}
+	v.Get(testHeader)
+
+	v.Get("Test-Header")
+	v.Set("Test-Header", "value")
+	v.Add("Test-Header", "value")
+	v.Del("Test-Header")
+	v.Values("Test-Header")
+
+	v.Set("Test-Header", "value")
+	v.Add("Test-Header", "value")
+	v.Del("Test-Header")
+	v.Values("Test-Header")
+}
+
+```
diff --git a/vendor/github.com/lasiar/canonicalheader/analyzer.go b/vendor/github.com/lasiar/canonicalheader/analyzer.go
new file mode 100644
index 0000000000000000000000000000000000000000..d3fb529ebdddf237d153d4c1dc8dccd527d33ca2
--- /dev/null
+++ b/vendor/github.com/lasiar/canonicalheader/analyzer.go
@@ -0,0 +1,264 @@
+package canonicalheader
+
+import (
+	"fmt"
+	"go/ast"
+	"go/types"
+	"net/http"
+
+	"github.com/go-toolsmith/astcast"
+	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/go/analysis/passes/inspect"
+	"golang.org/x/tools/go/ast/inspector"
+	"golang.org/x/tools/go/types/typeutil"
+)
+
+const (
+	pkgPath = "net/http"
+	name    = "Header"
+)
+
+var Analyzer = &analysis.Analyzer{
+	Name:     "canonicalheader",
+	Doc:      "canonicalheader checks whether net/http.Header uses canonical header",
+	Run:      run,
+	Requires: []*analysis.Analyzer{inspect.Analyzer},
+}
+
+type argumenter interface {
+	diagnostic(canonicalHeader string) analysis.Diagnostic
+	value() string
+}
+
+func run(pass *analysis.Pass) (any, error) {
+	var headerObject types.Object
+	for _, object := range pass.TypesInfo.Uses {
+		if object.Pkg() != nil &&
+			object.Pkg().Path() == pkgPath &&
+			object.Name() == name {
+			headerObject = object
+			break
+		}
+	}
+
+	if headerObject == nil {
+		//nolint:nilnil // nothing to do here, because http.Header{} not usage.
+		return nil, nil
+	}
+
+	spctor, ok := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+	if !ok {
+		return nil, fmt.Errorf("want %T, got %T", spctor, pass.ResultOf[inspect.Analyzer])
+	}
+
+	wellKnownHeaders := initialism()
+
+	nodeFilter := []ast.Node{
+		(*ast.CallExpr)(nil),
+	}
+	var outerErr error
+
+	spctor.Preorder(nodeFilter, func(n ast.Node) {
+		if outerErr != nil {
+			return
+		}
+
+		callExp, ok := n.(*ast.CallExpr)
+		if !ok {
+			return
+		}
+
+		var (
+			// gotType type receiver.
+			gotType       types.Type
+			gotMethodName string
+		)
+
+		switch t := typeutil.Callee(pass.TypesInfo, callExp).(type) {
+		// Direct call method.
+		case *types.Func:
+			fn := t
+			// Find net/http.Header{} by function call.
+			signature, ok := fn.Type().(*types.Signature)
+			if !ok {
+				return
+			}
+
+			recv := signature.Recv()
+
+			// It's a func, not a method.
+			if recv == nil {
+				return
+			}
+			gotType = recv.Type()
+			gotMethodName = astcast.ToSelectorExpr(callExp.Fun).Sel.Name
+
+		// h := http.Header{}
+		// f := h.Get
+		// v("Test-Value").
+		case *types.Var:
+			ident, ok := callExp.Fun.(*ast.Ident)
+			if !ok {
+				return
+			}
+
+			if ident.Obj == nil {
+				return
+			}
+
+			// f := h.Get.
+			assign, ok := ident.Obj.Decl.(*ast.AssignStmt)
+			if !ok {
+				return
+			}
+
+			// For case `i, v := 0, h.Get`.
+			// indexAssign--^.
+			indexAssign := -1
+			for i, lh := range assign.Lhs {
+				// Find by name of variable.
+				if astcast.ToIdent(lh).Name == ident.Name {
+					indexAssign = i
+				}
+			}
+
+			// Not found.
+			if indexAssign == -1 {
+				return
+			}
+
+			if len(assign.Rhs) <= indexAssign {
+				return
+			}
+
+			sel, ok := assign.Rhs[indexAssign].(*ast.SelectorExpr)
+			if !ok {
+				return
+			}
+
+			gotMethodName = sel.Sel.Name
+			ident, ok = sel.X.(*ast.Ident)
+			if !ok {
+				return
+			}
+
+			obj := pass.TypesInfo.ObjectOf(ident)
+			gotType = obj.Type()
+
+		default:
+			return
+		}
+
+		// It is not net/http.Header{}.
+		if !types.Identical(gotType, headerObject.Type()) {
+			return
+		}
+
+		// Search for known methods where the key is the first arg.
+		if !isValidMethod(gotMethodName) {
+			return
+		}
+
+		// Should be more than one. Because get the value by index it
+		// will not be superfluous.
+		if len(callExp.Args) == 0 {
+			return
+		}
+
+		callArg := callExp.Args[0]
+
+		// Check for type casting from myString to string.
+		// it could be: Get(string(string(string(myString)))).
+		// need this node------------------------^^^^^^^^.
+		for {
+			// If it is not *ast.CallExpr, this is a value.
+			c, ok := callArg.(*ast.CallExpr)
+			if !ok {
+				break
+			}
+
+			// Some function is called, skip this case.
+			if len(c.Args) == 0 {
+				return
+			}
+
+			f, ok := c.Fun.(*ast.Ident)
+			if !ok {
+				break
+			}
+
+			obj := pass.TypesInfo.ObjectOf(f)
+			// nil may be by code, but not by logic.
+			// TypeInfo should contain of type.
+			if obj == nil {
+				break
+			}
+
+			// This is function.
+			// Skip this method call.
+			_, ok = obj.Type().(*types.Signature)
+			if ok {
+				return
+			}
+
+			callArg = c.Args[0]
+		}
+
+		var arg argumenter
+		switch t := callArg.(type) {
+		case *ast.BasicLit:
+			lString, err := newLiteralString(t)
+			if err != nil {
+				return
+			}
+
+			arg = lString
+
+		case *ast.Ident:
+			constString, err := newConstantKey(pass.TypesInfo, t)
+			if err != nil {
+				return
+			}
+
+			arg = constString
+
+		default:
+			return
+		}
+
+		argValue := arg.value()
+		headerKeyCanonical := http.CanonicalHeaderKey(argValue)
+		if argValue == headerKeyCanonical {
+			return
+		}
+
+		headerKeyCanonical, isWellKnown := canonicalHeaderKey(argValue, wellKnownHeaders)
+		if argValue == headerKeyCanonical || isWellKnown {
+			return
+		}
+
+		pass.Report(arg.diagnostic(headerKeyCanonical))
+	})
+
+	return nil, outerErr
+}
+
+func canonicalHeaderKey(s string, m map[string]string) (string, bool) {
+	canonical := http.CanonicalHeaderKey(s)
+
+	wellKnown, ok := m[canonical]
+	if !ok {
+		return canonical, ok
+	}
+
+	return wellKnown, ok
+}
+
+func isValidMethod(name string) bool {
+	switch name {
+	case "Get", "Set", "Add", "Del", "Values":
+		return true
+	default:
+		return false
+	}
+}
diff --git a/vendor/github.com/lasiar/canonicalheader/constant_string.go b/vendor/github.com/lasiar/canonicalheader/constant_string.go
new file mode 100644
index 0000000000000000000000000000000000000000..27988f0d53f61db40cd3f9dd8bba5d8573b7f66c
--- /dev/null
+++ b/vendor/github.com/lasiar/canonicalheader/constant_string.go
@@ -0,0 +1,50 @@
+package canonicalheader
+
+import (
+	"fmt"
+	"go/ast"
+	"go/constant"
+	"go/token"
+	"go/types"
+
+	"golang.org/x/tools/go/analysis"
+)
+
+type constantString struct {
+	originalValue,
+	nameOfConst string
+
+	pos token.Pos
+	end token.Pos
+}
+
+func newConstantKey(info *types.Info, ident *ast.Ident) (constantString, error) {
+	c, ok := info.ObjectOf(ident).(*types.Const)
+	if !ok {
+		return constantString{}, fmt.Errorf("type %T is not support", c)
+	}
+
+	return constantString{
+		nameOfConst:   c.Name(),
+		originalValue: constant.StringVal(c.Val()),
+		pos:           ident.Pos(),
+		end:           ident.End(),
+	}, nil
+}
+
+func (c constantString) diagnostic(canonicalHeader string) analysis.Diagnostic {
+	return analysis.Diagnostic{
+		Pos: c.pos,
+		End: c.end,
+		Message: fmt.Sprintf(
+			"const %q used as a key at http.Header, but %q is not canonical, want %q",
+			c.nameOfConst,
+			c.originalValue,
+			canonicalHeader,
+		),
+	}
+}
+
+func (c constantString) value() string {
+	return c.originalValue
+}
diff --git a/vendor/github.com/lasiar/canonicalheader/initialism.go b/vendor/github.com/lasiar/canonicalheader/initialism.go
new file mode 100644
index 0000000000000000000000000000000000000000..c3d91c23e9813ab537c7dac5dbf26a098cc7c8b9
--- /dev/null
+++ b/vendor/github.com/lasiar/canonicalheader/initialism.go
@@ -0,0 +1,75 @@
+// Code generated by initialismer; DO NOT EDIT.
+package canonicalheader
+
+// initialism mapping of not canonical headers from
+// https://en.wikipedia.org/wiki/List_of_HTTP_header_fields
+// https://www.iana.org/assignments/http-fields/http-fields.xhtml.
+func initialism() map[string]string {
+	return map[string]string{
+		"A-Im":                              "A-IM",
+		"Accept-Ch":                         "Accept-CH",
+		"Alpn":                              "ALPN",
+		"Amp-Cache-Transform":               "AMP-Cache-Transform",
+		"C-Pep":                             "C-PEP",
+		"C-Pep-Info":                        "C-PEP-Info",
+		"Cal-Managed-Id":                    "Cal-Managed-ID",
+		"Caldav-Timezones":                  "CalDAV-Timezones",
+		"Cdn-Cache-Control":                 "CDN-Cache-Control",
+		"Cdn-Loop":                          "CDN-Loop",
+		"Content-Id":                        "Content-ID",
+		"Content-Md5":                       "Content-MD5",
+		"Dasl":                              "DASL",
+		"Dav":                               "DAV",
+		"Differential-Id":                   "Differential-ID",
+		"Dnt":                               "DNT",
+		"Dpop":                              "DPoP",
+		"Dpop-Nonce":                        "DPoP-Nonce",
+		"Ediint-Features":                   "EDIINT-Features",
+		"Etag":                              "ETag",
+		"Expect-Ct":                         "Expect-CT",
+		"Getprofile":                        "GetProfile",
+		"Http2-Settings":                    "HTTP2-Settings",
+		"Im":                                "IM",
+		"Include-Referred-Token-Binding-Id": "Include-Referred-Token-Binding-ID",
+		"Last-Event-Id":                     "Last-Event-ID",
+		"Mime-Version":                      "MIME-Version",
+		"Nel":                               "NEL",
+		"Odata-Entityid":                    "OData-EntityId",
+		"Odata-Isolation":                   "OData-Isolation",
+		"Odata-Maxversion":                  "OData-MaxVersion",
+		"Odata-Version":                     "OData-Version",
+		"Optional-Www-Authenticate":         "Optional-WWW-Authenticate",
+		"Oscore":                            "OSCORE",
+		"Oslc-Core-Version":                 "OSLC-Core-Version",
+		"P3p":                               "P3P",
+		"Pep":                               "PEP",
+		"Pep-Info":                          "PEP-Info",
+		"Pics-Label":                        "PICS-Label",
+		"Profileobject":                     "ProfileObject",
+		"Repeatability-Client-Id":           "Repeatability-Client-ID",
+		"Repeatability-Request-Id":          "Repeatability-Request-ID",
+		"Sec-Gpc":                           "Sec-GPC",
+		"Sec-Websocket-Accept":              "Sec-WebSocket-Accept",
+		"Sec-Websocket-Extensions":          "Sec-WebSocket-Extensions",
+		"Sec-Websocket-Key":                 "Sec-WebSocket-Key",
+		"Sec-Websocket-Protocol":            "Sec-WebSocket-Protocol",
+		"Sec-Websocket-Version":             "Sec-WebSocket-Version",
+		"Setprofile":                        "SetProfile",
+		"Slug":                              "SLUG",
+		"Soapaction":                        "SoapAction",
+		"Status-Uri":                        "Status-URI",
+		"Tcn":                               "TCN",
+		"Te":                                "TE",
+		"Ttl":                               "TTL",
+		"Uri":                               "URI",
+		"Www-Authenticate":                  "WWW-Authenticate",
+		"X-Correlation-Id":                  "X-Correlation-ID",
+		"X-Dns-Prefetch-Control":            "X-DNS-Prefetch-Control",
+		"X-Real-Ip":                         "X-Real-IP",
+		"X-Request-Id":                      "X-Request-ID",
+		"X-Ua-Compatible":                   "X-UA-Compatible",
+		"X-Webkit-Csp":                      "X-WebKit-CSP",
+		"X-Xss":                             "X-XSS",
+		"X-Xss-Protection":                  "X-XSS-Protection",
+	}
+}
diff --git a/vendor/github.com/lasiar/canonicalheader/literal_string.go b/vendor/github.com/lasiar/canonicalheader/literal_string.go
new file mode 100644
index 0000000000000000000000000000000000000000..71cd5f3974757e73f8e42d254a546c6ba172cc1f
--- /dev/null
+++ b/vendor/github.com/lasiar/canonicalheader/literal_string.go
@@ -0,0 +1,80 @@
+package canonicalheader
+
+import (
+	"fmt"
+	"go/ast"
+	"go/token"
+	"strconv"
+	"unicode/utf8"
+	"unsafe"
+
+	"golang.org/x/tools/go/analysis"
+)
+
+type literalString struct {
+	originalValue string
+	quote         byte
+	pos, end      token.Pos
+}
+
+func newLiteralString(basicList *ast.BasicLit) (literalString, error) {
+	if basicList.Kind != token.STRING {
+		return literalString{}, fmt.Errorf("%#v is not a string", basicList)
+	}
+
+	if len(basicList.Value) < 2 {
+		return literalString{}, fmt.Errorf("%#v has a strange value length %q", basicList, basicList.Value)
+	}
+
+	quote := basicList.Value[0]
+	switch quote {
+	case '`', '"':
+	default:
+		return literalString{}, fmt.Errorf("%q is a strange quote", quote)
+	}
+
+	originalValue, err := strconv.Unquote(basicList.Value)
+	if err != nil {
+		return literalString{}, fmt.Errorf("unquote %q: %w", basicList.Value, err)
+	}
+
+	if !utf8.ValidString(originalValue) {
+		return literalString{}, fmt.Errorf("%#v is not a valid utf8 string", basicList.Value)
+	}
+
+	return literalString{
+		originalValue: originalValue,
+		quote:         quote,
+		pos:           basicList.Pos(),
+		end:           basicList.End(),
+	}, nil
+}
+
+func (l literalString) diagnostic(canonicalHeader string) analysis.Diagnostic {
+	newText := make([]byte, 0, len(canonicalHeader)+2)
+	newText = append(newText, l.quote)
+	newText = append(newText, unsafe.Slice(unsafe.StringData(canonicalHeader), len(canonicalHeader))...)
+	newText = append(newText, l.quote)
+
+	return analysis.Diagnostic{
+		Pos:     l.pos,
+		End:     l.end,
+		Message: fmt.Sprintf("non-canonical header %q, instead use: %q", l.originalValue, canonicalHeader),
+		SuggestedFixes: []analysis.SuggestedFix{
+			{
+				Message: fmt.Sprintf("should replace %q with %q", l.originalValue, canonicalHeader),
+				TextEdits: []analysis.TextEdit{
+					{
+						Pos:     l.pos,
+						End:     l.end,
+						NewText: newText,
+					},
+				},
+			},
+		},
+	}
+}
+
+func (l literalString) value() string {
+	return l.originalValue
+}
diff --git a/vendor/github.com/lasiar/canonicalheader/makefile b/vendor/github.com/lasiar/canonicalheader/makefile
new file mode 100644
index 0000000000000000000000000000000000000000..a96cb628e571015f9e2a9d610b1cb98c158a9339
--- /dev/null
+++ b/vendor/github.com/lasiar/canonicalheader/makefile
@@ -0,0 +1,12 @@
+.PHONY:
+
+test:
+	go test -v -race ./...
+
+linter:
+	golangci-lint -v run ./...
+
+generate:
+	go run ./cmd/initialismer/*.go -target="mapping" > ./initialism.go
+	go run ./cmd/initialismer/*.go -target="test" > ./testdata/src/initialism/initialism.go
+	gofmt -w ./initialism.go ./testdata/src/initialism/initialism.go
diff --git a/vendor/github.com/ldez/gomoddirectives/.golangci.yml b/vendor/github.com/ldez/gomoddirectives/.golangci.yml
index a2483e95f7e7ba2c50cf8eda1c81923b4d83df9d..034745570a06b2a25033ace89bf4dd85526b99ca 100644
--- a/vendor/github.com/ldez/gomoddirectives/.golangci.yml
+++ b/vendor/github.com/ldez/gomoddirectives/.golangci.yml
@@ -1,7 +1,5 @@
 run:
-  deadline: 2m
-  skip-files: []
-  skip-dirs: []
+  timeout: 2m
 
 linters-settings:
   govet:
@@ -16,10 +14,13 @@ linters-settings:
   gofumpt:
     extra-rules: true
   depguard:
-    list-type: blacklist
-    include-go-root: false
-    packages:
-      - github.com/pkg/errors
+    rules:
+      main:
+        deny:
+          - pkg: "github.com/instana/testify"
+            desc: not allowed
+          - pkg: "github.com/pkg/errors"
+            desc: Should be replaced by standard lib errors package
   godox:
     keywords:
       - FIXME
@@ -51,12 +52,19 @@ linters-settings:
 linters:
   enable-all: true
   disable:
-    - maligned # deprecated
-    - interfacer # deprecated
+    - deadcode # deprecated
+    - exhaustivestruct # deprecated
     - golint # deprecated
+    - ifshort # deprecated
+    - interfacer # deprecated
+    - maligned # deprecated
+    - nosnakecase # deprecated
     - scopelint # deprecated
+    - structcheck # deprecated
+    - varcheck # deprecated
     - sqlclosecheck # not relevant (SQL)
     - rowserrcheck # not relevant (SQL)
+    - execinquery # not relevant (SQL)
     - cyclop # duplicate of gocyclo
     - lll
     - dupl
@@ -71,14 +79,16 @@ linters:
     - goerr113
     - wrapcheck
     - exhaustive
-    - exhaustivestruct
+    - exhaustruct
     - varnamelen
 
 issues:
   exclude-use-default: false
-  max-per-linter: 0
+  max-issues-per-linter: 0
   max-same-issues: 0
-  exclude: []
+  exclude: [
+    'package-comments: should have a package comment'
+  ]
   exclude-rules:
     - path: "(.+)_test.go"
       linters:
@@ -86,3 +96,7 @@ issues:
       - goconst
     - path: cmd/gomoddirectives/gomoddirectives.go
       text: 'use of `fmt.Println` forbidden'
+
+output:
+  show-stats: true
+  sort-results: true
diff --git a/vendor/github.com/ldez/gomoddirectives/Makefile b/vendor/github.com/ldez/gomoddirectives/Makefile
index dd3b335c7a3713a9b359f294c92808968e07fd4f..5a0a852c8d33b5f6ffd050b7318f23e41423187a 100644
--- a/vendor/github.com/ldez/gomoddirectives/Makefile
+++ b/vendor/github.com/ldez/gomoddirectives/Makefile
@@ -12,4 +12,4 @@ check:
 	golangci-lint run
 
 build:
-	go build -v -ldflags "-s -w" -trimpath ./cmd/gomoddirectives/
+	go build -ldflags "-s -w" -trimpath ./cmd/gomoddirectives/
diff --git a/vendor/github.com/ldez/gomoddirectives/module.go b/vendor/github.com/ldez/gomoddirectives/module.go
index 907be244fcc3b0434562c63efecf5909fee0164c..4cb365379427a293b2de90079916782614dc9c97 100644
--- a/vendor/github.com/ldez/gomoddirectives/module.go
+++ b/vendor/github.com/ldez/gomoddirectives/module.go
@@ -24,7 +24,7 @@ func GetModuleFile() (*modfile.File, error) {
 	// https://github.com/golang/go/issues/44753#issuecomment-790089020
 	cmd := exec.Command("go", "list", "-m", "-json")
 
-	raw, err := cmd.CombinedOutput()
+	raw, err := cmd.Output()
 	if err != nil {
 		return nil, fmt.Errorf("command go list: %w: %s", err, string(raw))
 	}
diff --git a/vendor/github.com/leonklingele/grouper/pkg/analyzer/analyzer.go b/vendor/github.com/leonklingele/grouper/pkg/analyzer/analyzer.go
index 9852c7838006188f79a7257c362a5a300df14d97..7d8c0c4f0d946c1ad41fd6c9336a7769cac44abb 100644
--- a/vendor/github.com/leonklingele/grouper/pkg/analyzer/analyzer.go
+++ b/vendor/github.com/leonklingele/grouper/pkg/analyzer/analyzer.go
@@ -15,7 +15,9 @@ import (
 
 const (
 	Name = "grouper"
-	Doc  = `expression group analyzer: require 'import', 'const', 'var' and/or 'type' declaration groups`
+	Doc  = `analyze expression groups
+
+Require 'import', 'const', 'var' and/or 'type' declaration groups.`
 )
 
 func New() *analysis.Analyzer {
diff --git a/vendor/github.com/macabu/inamedparam/.gitignore b/vendor/github.com/macabu/inamedparam/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..f8d51e94cbbf6e3730ae20062f832f10509c7a3d
--- /dev/null
+++ b/vendor/github.com/macabu/inamedparam/.gitignore
@@ -0,0 +1,22 @@
+# If you prefer the allow list template instead of the deny list, see community template:
+# https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore
+#
+# Binaries for programs and plugins
+*.exe
+*.exe~
+*.dll
+*.so
+*.dylib
+inamedparam
+
+# Test binary, built with `go test -c`
+*.test
+
+# Output of the go coverage tool, specifically when used with LiteIDE
+*.out
+
+# Dependency directories (remove the comment below to include it)
+# vendor/
+
+# Go workspace file
+go.work
diff --git a/vendor/github.com/macabu/inamedparam/.golangci.yml b/vendor/github.com/macabu/inamedparam/.golangci.yml
new file mode 100644
index 0000000000000000000000000000000000000000..f0efa1cb6cd599943c8087fae0b12f3c6a3588e1
--- /dev/null
+++ b/vendor/github.com/macabu/inamedparam/.golangci.yml
@@ -0,0 +1,33 @@
+run:
+  deadline: 30s 
+
+linters:
+  enable-all: true
+  disable:
+    - cyclop
+    - deadcode
+    - depguard
+    - exhaustivestruct
+    - exhaustruct
+    - forcetypeassert
+    - gochecknoglobals
+    - gocognit
+    - golint
+    - ifshort
+    - interfacer
+    - maligned
+    - nilnil
+    - nosnakecase
+    - paralleltest
+    - scopelint
+    - structcheck
+    - varcheck
+
+linters-settings:
+  gci:
+    sections:
+      - standard
+      - default
+      - prefix(github.com/macabu/inamedparam)
+    section-separators:
+      - newLine
diff --git a/vendor/github.com/macabu/inamedparam/LICENSE-MIT b/vendor/github.com/macabu/inamedparam/LICENSE-MIT
new file mode 100644
index 0000000000000000000000000000000000000000..b95f480ee5db988935f322f989fadca11d2e0b95
--- /dev/null
+++ b/vendor/github.com/macabu/inamedparam/LICENSE-MIT
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2023 Matheus Macabu
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/macabu/inamedparam/README.md b/vendor/github.com/macabu/inamedparam/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..3336cb9504da429f2f11263d24225ced6f2415a1
--- /dev/null
+++ b/vendor/github.com/macabu/inamedparam/README.md
@@ -0,0 +1,38 @@
+# inamedparam
+
+A linter that reports interfaces with unnamed method parameters.
+
+## Flags/Config
+```sh
+-skip-single-param
+    skip interfaces with a single unnamed parameter
+```
+
+## Usage 
+
+### Standalone
+You can run it standalone through `go vet`.  
+
+You must install the binary to your `$GOBIN` folder like so:
+```sh
+$ go install github.com/macabu/inamedparam/cmd/inamedparam
+```
+
+And then navigate to your Go project's root folder, where can run `go vet` in the following way:
+```sh
+$ go vet -vettool=$(which inamedparam) ./...
+```
+
+### golangci-lint
+`inamedparam` was added as a linter to `golangci-lint` on version `v1.55.0`. It is disabled by default.
+
+To enable it, you can add it to your `.golangci.yml` file, as such:
+```yaml
+run:
+  deadline: 30s 
+
+linters:
+  disable-all: true
+  enable:
+    - inamedparam
+```
diff --git a/vendor/github.com/macabu/inamedparam/inamedparam.go b/vendor/github.com/macabu/inamedparam/inamedparam.go
new file mode 100644
index 0000000000000000000000000000000000000000..8ba7fe1882b35030b847ade083d6b1eb5195672c
--- /dev/null
+++ b/vendor/github.com/macabu/inamedparam/inamedparam.go
@@ -0,0 +1,94 @@
+package inamedparam
+
+import (
+	"flag"
+	"go/ast"
+
+	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/go/analysis/passes/inspect"
+	"golang.org/x/tools/go/ast/inspector"
+)
+
+const (
+	analyzerName = "inamedparam"
+
+	flagSkipSingleParam = "skip-single-param"
+)
+
+var Analyzer = &analysis.Analyzer{
+	Name:  analyzerName,
+	Doc:   "reports interfaces with unnamed method parameters",
+	Run:   run,
+	Flags: flags(),
+	Requires: []*analysis.Analyzer{
+		inspect.Analyzer,
+	},
+}
+
+func flags() flag.FlagSet {
+	flags := flag.NewFlagSet(analyzerName, flag.ExitOnError)
+
+	flags.Bool(flagSkipSingleParam, false, "skip interface methods with a single unnamed parameter")
+
+	return *flags
+}
+
+func run(pass *analysis.Pass) (interface{}, error) {
+	inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+
+	types := []ast.Node{
+		&ast.InterfaceType{},
+	}
+
+	skipSingleParam := pass.Analyzer.Flags.Lookup(flagSkipSingleParam).Value.(flag.Getter).Get().(bool)
+
+	inspect.Preorder(types, func(n ast.Node) {
+		interfaceType, ok := n.(*ast.InterfaceType)
+		if !ok || interfaceType == nil || interfaceType.Methods == nil {
+			return
+		}
+
+		for _, method := range interfaceType.Methods.List {
+			interfaceFunc, ok := method.Type.(*ast.FuncType)
+			if !ok || interfaceFunc == nil || interfaceFunc.Params == nil {
+				continue
+			}
+
+			// Improvement: add test case to reproduce this. Help wanted.
+			if len(method.Names) == 0 {
+				continue
+			}
+
+			methodName := method.Names[0].Name
+
+			if skipSingleParam && len(interfaceFunc.Params.List) == 1 {
+				continue
+			}
+
+			for _, param := range interfaceFunc.Params.List {
+				if param.Names == nil {
+					var builtParamType string
+
+					switch paramType := param.Type.(type) {
+					case *ast.SelectorExpr:
+						if ident := paramType.X.(*ast.Ident); ident != nil {
+							builtParamType += ident.Name + "."
+						}
+
+						builtParamType += paramType.Sel.Name
+					case *ast.Ident:
+						builtParamType = paramType.Name
+					}
+
+					if builtParamType != "" {
+						pass.Reportf(param.Pos(), "interface method %v must have named param for type %v", methodName, builtParamType)
+					} else {
+						pass.Reportf(param.Pos(), "interface method %v must have all named params", methodName)
+					}
+				}
+			}
+		}
+	})
+
+	return nil, nil
+}
diff --git a/vendor/github.com/mbilski/exhaustivestruct/pkg/analyzer/analyzer.go b/vendor/github.com/mbilski/exhaustivestruct/pkg/analyzer/analyzer.go
deleted file mode 100644
index 0dfb713c5ace414787ca8af6fe19d03aecd240d9..0000000000000000000000000000000000000000
--- a/vendor/github.com/mbilski/exhaustivestruct/pkg/analyzer/analyzer.go
+++ /dev/null
@@ -1,187 +0,0 @@
-package analyzer
-
-import (
-	"flag"
-	"fmt"
-	"go/ast"
-	"go/types"
-	"path"
-	"strings"
-
-	"golang.org/x/tools/go/analysis/passes/inspect"
-	"golang.org/x/tools/go/ast/inspector"
-
-	"golang.org/x/tools/go/analysis"
-)
-
-// Analyzer that checks if all struct's fields are initialized
-var Analyzer = &analysis.Analyzer{
-	Name:     "exhaustivestruct",
-	Doc:      "Checks if all struct's fields are initialized",
-	Run:      run,
-	Requires: []*analysis.Analyzer{inspect.Analyzer},
-	Flags:    newFlagSet(),
-}
-
-// StructPatternList is a comma separated list of expressions to match struct packages and names
-// The struct packages have the form example.com/package.ExampleStruct
-// The matching patterns can use matching syntax from https://pkg.go.dev/path#Match
-// If this list is empty, all structs are tested.
-var StructPatternList string
-
-func newFlagSet() flag.FlagSet {
-	fs := flag.NewFlagSet("", flag.PanicOnError)
-	fs.StringVar(&StructPatternList, "struct_patterns", "", "This is a comma separated list of expressions to match struct packages and names")
-	return *fs
-}
-
-func run(pass *analysis.Pass) (interface{}, error) {
-	splitFn := func(c rune) bool { return c == ',' }
-	inspector := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
-	structPatterns := strings.FieldsFunc(StructPatternList, splitFn)
-	// validate the pattern syntax
-	for _, pattern := range structPatterns {
-		_, err := path.Match(pattern, "")
-		if err != nil {
-			return nil, fmt.Errorf("invalid struct pattern %s: %w", pattern, err)
-		}
-	}
-
-	nodeFilter := []ast.Node{
-		(*ast.CompositeLit)(nil),
-		(*ast.ReturnStmt)(nil),
-	}
-
-	var returnStmt *ast.ReturnStmt
-
-	inspector.Preorder(nodeFilter, func(node ast.Node) {
-		var name string
-
-		compositeLit, ok := node.(*ast.CompositeLit)
-		if !ok {
-			// Keep track of the last return statement whilte iterating
-			retLit, ok := node.(*ast.ReturnStmt)
-			if ok {
-				returnStmt = retLit
-			}
-			return
-		}
-
-		i, ok := compositeLit.Type.(*ast.Ident)
-
-		if ok {
-			name = i.Name
-		} else {
-			s, ok := compositeLit.Type.(*ast.SelectorExpr)
-
-			if !ok {
-				return
-			}
-
-			name = s.Sel.Name
-		}
-
-		if compositeLit.Type == nil {
-			return
-		}
-
-		t := pass.TypesInfo.TypeOf(compositeLit.Type)
-
-		if t == nil {
-			return
-		}
-
-		if len(structPatterns) > 0 {
-			shouldLint := false
-			for _, pattern := range structPatterns {
-				// We check the patterns for vailidy ahead of time, so we don't need to check the error here
-				if match, _ := path.Match(pattern, t.String()); match {
-					shouldLint = true
-					break
-				}
-			}
-			if !shouldLint {
-				return
-			}
-		}
-
-		str, ok := t.Underlying().(*types.Struct)
-
-		if !ok {
-			return
-		}
-
-		// Don't report an error if:
-		// 1. This composite literal contains no fields and
-		// 2. It's in a return statement and
-		// 3. The return statement contains a non-nil error
-		if len(compositeLit.Elts) == 0 {
-			// Check if this composite is one of the results the last return statement
-			isInResults := false
-			if returnStmt != nil {
-				for _, result := range returnStmt.Results {
-					compareComposite, ok := result.(*ast.CompositeLit)
-					if ok {
-						if compareComposite == compositeLit {
-							isInResults = true
-						}
-					}
-				}
-			}
-			nonNilError := false
-			if isInResults {
-				// Check if any of the results has an error type and if that error is set to non-nil (if it's set to nil, the type would be "untyped nil")
-				for _, result := range returnStmt.Results {
-					if pass.TypesInfo.TypeOf(result).String() == "error" {
-						nonNilError = true
-					}
-				}
-			}
-
-			if nonNilError {
-				return
-			}
-		}
-
-		samePackage := strings.HasPrefix(t.String(), pass.Pkg.Path()+".")
-
-		missing := []string{}
-
-		for i := 0; i < str.NumFields(); i++ {
-			fieldName := str.Field(i).Name()
-			exists := false
-
-			if !samePackage && !str.Field(i).Exported() {
-				continue
-			}
-
-			for eIndex, e := range compositeLit.Elts {
-				if k, ok := e.(*ast.KeyValueExpr); ok {
-					if i, ok := k.Key.(*ast.Ident); ok {
-						if i.Name == fieldName {
-							exists = true
-							break
-						}
-					}
-				} else {
-					if eIndex == i {
-						exists = true
-						break
-					}
-				}
-			}
-
-			if !exists {
-				missing = append(missing, fieldName)
-			}
-		}
-
-		if len(missing) == 1 {
-			pass.Reportf(node.Pos(), "%s is missing in %s", missing[0], name)
-		} else if len(missing) > 1 {
-			pass.Reportf(node.Pos(), "%s are missing in %s", strings.Join(missing, ", "), name)
-		}
-	})
-
-	return nil, nil
-}
diff --git a/vendor/github.com/mgechev/revive/config/config.go b/vendor/github.com/mgechev/revive/config/config.go
index 04cd214042e78ead65669262fc32532a6bead3f0..50a2b8966f0211403704e857d25af0f73ad2d317 100644
--- a/vendor/github.com/mgechev/revive/config/config.go
+++ b/vendor/github.com/mgechev/revive/config/config.go
@@ -1,3 +1,4 @@
+// Package config implements revive's configuration data structures and related methods
 package config
 
 import (
@@ -5,9 +6,9 @@ import (
 	"fmt"
 	"os"
 
-	"github.com/mgechev/revive/formatter"
-
 	"github.com/BurntSushi/toml"
+
+	"github.com/mgechev/revive/formatter"
 	"github.com/mgechev/revive/lint"
 	"github.com/mgechev/revive/rule"
 )
@@ -31,7 +32,6 @@ var defaultRules = []lint.Rule{
 	&rule.TimeNamingRule{},
 	&rule.ContextKeysType{},
 	&rule.ContextAsArgumentRule{},
-	&rule.IfReturnRule{},
 	&rule.EmptyBlockRule{},
 	&rule.SuperfluousElseRule{},
 	&rule.UnusedParamRule{},
@@ -55,7 +55,7 @@ var allRules = append([]lint.Rule{
 	&rule.ModifiesValRecRule{},
 	&rule.ConstantLogicalExprRule{},
 	&rule.BoolLiteralRule{},
-	&rule.ImportsBlacklistRule{},
+	&rule.ImportsBlocklistRule{},
 	&rule.FunctionResultsLimitRule{},
 	&rule.MaxPublicStructsRule{},
 	&rule.RangeValInClosureRule{},
@@ -81,12 +81,20 @@ var allRules = append([]lint.Rule{
 	&rule.FunctionLength{},
 	&rule.NestedStructs{},
 	&rule.UselessBreak{},
+	&rule.UncheckedTypeAssertionRule{},
 	&rule.TimeEqualRule{},
 	&rule.BannedCharsRule{},
 	&rule.OptimizeOperandsOrderRule{},
 	&rule.UseAnyRule{},
 	&rule.DataRaceRule{},
 	&rule.CommentSpacingsRule{},
+	&rule.IfReturnRule{},
+	&rule.RedundantImportAlias{},
+	&rule.ImportAliasNamingRule{},
+	&rule.EnforceMapStyleRule{},
+	&rule.EnforceRepeatedArgTypeStyleRule{},
+	&rule.EnforceSliceStyleRule{},
+	&rule.MaxControlNestingRule{},
 }, defaultRules...)
 
 var allFormatters = []lint.Formatter{
@@ -124,7 +132,8 @@ func GetLintingRules(config *lint.Config, extraRules []lint.Rule) ([]lint.Rule,
 
 	var lintingRules []lint.Rule
 	for name, ruleConfig := range config.Rules {
-		r, ok := rulesMap[name]
+		actualName := actualRuleName(name)
+		r, ok := rulesMap[actualName]
 		if !ok {
 			return nil, fmt.Errorf("cannot find rule: %s", name)
 		}
@@ -139,6 +148,15 @@ func GetLintingRules(config *lint.Config, extraRules []lint.Rule) ([]lint.Rule,
 	return lintingRules, nil
 }
 
+func actualRuleName(name string) string {
+	switch name {
+	case "imports-blacklist":
+		return "imports-blocklist"
+	default:
+		return name
+	}
+}
+
 func parseConfig(path string, config *lint.Config) error {
 	file, err := os.ReadFile(path)
 	if err != nil {
@@ -148,6 +166,14 @@ func parseConfig(path string, config *lint.Config) error {
 	if err != nil {
 		return fmt.Errorf("cannot parse the config file: %v", err)
 	}
+	for k, r := range config.Rules {
+		err := r.Initialize()
+		if err != nil {
+			return fmt.Errorf("error in config of rule [%s] : [%v]", k, err)
+		}
+		config.Rules[k] = r
+	}
+
 	return nil
 }
 
diff --git a/vendor/github.com/mgechev/revive/formatter/checkstyle.go b/vendor/github.com/mgechev/revive/formatter/checkstyle.go
index 33a3b2ca17b42cb1e02ea510485661d7204e10ff..f45b63c9256841bdc3817fa3f917886ac17a48c3 100644
--- a/vendor/github.com/mgechev/revive/formatter/checkstyle.go
+++ b/vendor/github.com/mgechev/revive/formatter/checkstyle.go
@@ -3,7 +3,7 @@ package formatter
 import (
 	"bytes"
 	"encoding/xml"
-	plainTemplate "text/template"
+	plain "text/template"
 
 	"github.com/mgechev/revive/lint"
 )
@@ -50,7 +50,7 @@ func (*Checkstyle) Format(failures <-chan lint.Failure, config lint.Config) (str
 		issues[fn] = append(issues[fn], iss)
 	}
 
-	t, err := plainTemplate.New("revive").Parse(checkstyleTemplate)
+	t, err := plain.New("revive").Parse(checkstyleTemplate)
 	if err != nil {
 		return "", err
 	}
diff --git a/vendor/github.com/mgechev/revive/formatter/default.go b/vendor/github.com/mgechev/revive/formatter/default.go
index f76a7b29ab85bbb389578b917269508008b3d80d..2d5a04434fe92e5fc4da333e0532f1df8775bac4 100644
--- a/vendor/github.com/mgechev/revive/formatter/default.go
+++ b/vendor/github.com/mgechev/revive/formatter/default.go
@@ -1,6 +1,7 @@
 package formatter
 
 import (
+	"bytes"
 	"fmt"
 
 	"github.com/mgechev/revive/lint"
@@ -19,8 +20,9 @@ func (*Default) Name() string {
 
 // Format formats the failures gotten from the lint.
 func (*Default) Format(failures <-chan lint.Failure, _ lint.Config) (string, error) {
+	var buf bytes.Buffer
 	for failure := range failures {
-		fmt.Printf("%v: %s\n", failure.Position.Start, failure.Failure)
+		fmt.Fprintf(&buf, "%v: %s\n", failure.Position.Start, failure.Failure)
 	}
-	return "", nil
+	return buf.String(), nil
 }
diff --git a/vendor/github.com/mgechev/revive/formatter/doc.go b/vendor/github.com/mgechev/revive/formatter/doc.go
new file mode 100644
index 0000000000000000000000000000000000000000..bb89f20ea608097365093d8100c960b2779fa3b0
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/formatter/doc.go
@@ -0,0 +1,2 @@
+// Package formatter implements the linter output formatters.
+package formatter
diff --git a/vendor/github.com/mgechev/revive/formatter/friendly.go b/vendor/github.com/mgechev/revive/formatter/friendly.go
index ced8fa46c9b39472a179fb2eef6a46a8cd95fa62..5ff329a23cd062090139ba413859551130162237 100644
--- a/vendor/github.com/mgechev/revive/formatter/friendly.go
+++ b/vendor/github.com/mgechev/revive/formatter/friendly.go
@@ -3,6 +3,7 @@ package formatter
 import (
 	"bytes"
 	"fmt"
+	"io"
 	"sort"
 
 	"github.com/fatih/color"
@@ -31,13 +32,14 @@ func (*Friendly) Name() string {
 
 // Format formats the failures gotten from the lint.
 func (f *Friendly) Format(failures <-chan lint.Failure, config lint.Config) (string, error) {
+	var buf bytes.Buffer
 	errorMap := map[string]int{}
 	warningMap := map[string]int{}
 	totalErrors := 0
 	totalWarnings := 0
 	for failure := range failures {
 		sev := severity(config, failure)
-		f.printFriendlyFailure(failure, sev)
+		f.printFriendlyFailure(&buf, failure, sev)
 		if sev == lint.SeverityWarning {
 			warningMap[failure.RuleName]++
 			totalWarnings++
@@ -47,29 +49,29 @@ func (f *Friendly) Format(failures <-chan lint.Failure, config lint.Config) (str
 			totalErrors++
 		}
 	}
-	f.printSummary(totalErrors, totalWarnings)
-	f.printStatistics(color.RedString("Errors:"), errorMap)
-	f.printStatistics(color.YellowString("Warnings:"), warningMap)
-	return "", nil
+	f.printSummary(&buf, totalErrors, totalWarnings)
+	f.printStatistics(&buf, color.RedString("Errors:"), errorMap)
+	f.printStatistics(&buf, color.YellowString("Warnings:"), warningMap)
+	return buf.String(), nil
 }
 
-func (f *Friendly) printFriendlyFailure(failure lint.Failure, severity lint.Severity) {
-	f.printHeaderRow(failure, severity)
-	f.printFilePosition(failure)
-	fmt.Println()
-	fmt.Println()
+func (f *Friendly) printFriendlyFailure(w io.Writer, failure lint.Failure, severity lint.Severity) {
+	f.printHeaderRow(w, failure, severity)
+	f.printFilePosition(w, failure)
+	fmt.Fprintln(w)
+	fmt.Fprintln(w)
 }
 
-func (f *Friendly) printHeaderRow(failure lint.Failure, severity lint.Severity) {
+func (f *Friendly) printHeaderRow(w io.Writer, failure lint.Failure, severity lint.Severity) {
 	emoji := getWarningEmoji()
 	if severity == lint.SeverityError {
 		emoji = getErrorEmoji()
 	}
-	fmt.Print(f.table([][]string{{emoji, "https://revive.run/r#" + failure.RuleName, color.GreenString(failure.Failure)}}))
+	fmt.Fprint(w, f.table([][]string{{emoji, "https://revive.run/r#" + failure.RuleName, color.GreenString(failure.Failure)}}))
 }
 
-func (*Friendly) printFilePosition(failure lint.Failure) {
-	fmt.Printf("  %s:%d:%d", failure.GetFilename(), failure.Position.Start.Line, failure.Position.Start.Column)
+func (*Friendly) printFilePosition(w io.Writer, failure lint.Failure) {
+	fmt.Fprintf(w, "  %s:%d:%d", failure.GetFilename(), failure.Position.Start.Line, failure.Position.Start.Column)
 }
 
 type statEntry struct {
@@ -77,7 +79,7 @@ type statEntry struct {
 	failures int
 }
 
-func (*Friendly) printSummary(errors, warnings int) {
+func (*Friendly) printSummary(w io.Writer, errors, warnings int) {
 	emoji := getWarningEmoji()
 	if errors > 0 {
 		emoji = getErrorEmoji()
@@ -96,18 +98,18 @@ func (*Friendly) printSummary(errors, warnings int) {
 	}
 	str := fmt.Sprintf("%d %s (%d %s, %d %s)", errors+warnings, problemsLabel, errors, errorsLabel, warnings, warningsLabel)
 	if errors > 0 {
-		fmt.Printf("%s %s\n", emoji, color.RedString(str))
-		fmt.Println()
+		fmt.Fprintf(w, "%s %s\n", emoji, color.RedString(str))
+		fmt.Fprintln(w)
 		return
 	}
 	if warnings > 0 {
-		fmt.Printf("%s %s\n", emoji, color.YellowString(str))
-		fmt.Println()
+		fmt.Fprintf(w, "%s %s\n", emoji, color.YellowString(str))
+		fmt.Fprintln(w)
 		return
 	}
 }
 
-func (f *Friendly) printStatistics(header string, stats map[string]int) {
+func (f *Friendly) printStatistics(w io.Writer, header string, stats map[string]int) {
 	if len(stats) == 0 {
 		return
 	}
@@ -122,8 +124,8 @@ func (f *Friendly) printStatistics(header string, stats map[string]int) {
 	for _, entry := range data {
 		formatted = append(formatted, []string{color.GreenString(fmt.Sprintf("%d", entry.failures)), entry.name})
 	}
-	fmt.Println(header)
-	fmt.Println(f.table(formatted))
+	fmt.Fprintln(w, header)
+	fmt.Fprintln(w, f.table(formatted))
 }
 
 func (*Friendly) table(rows [][]string) string {
diff --git a/vendor/github.com/mgechev/revive/formatter/ndjson.go b/vendor/github.com/mgechev/revive/formatter/ndjson.go
index a02d9c80fa14320a91d189ce7516a4ed0de73bcd..58b35dc44d94692c898ab22220d2cac5469fe970 100644
--- a/vendor/github.com/mgechev/revive/formatter/ndjson.go
+++ b/vendor/github.com/mgechev/revive/formatter/ndjson.go
@@ -1,8 +1,8 @@
 package formatter
 
 import (
+	"bytes"
 	"encoding/json"
-	"os"
 
 	"github.com/mgechev/revive/lint"
 )
@@ -20,7 +20,8 @@ func (*NDJSON) Name() string {
 
 // Format formats the failures gotten from the lint.
 func (*NDJSON) Format(failures <-chan lint.Failure, config lint.Config) (string, error) {
-	enc := json.NewEncoder(os.Stdout)
+	var buf bytes.Buffer
+	enc := json.NewEncoder(&buf)
 	for failure := range failures {
 		obj := jsonObject{}
 		obj.Severity = severity(config, failure)
@@ -30,5 +31,5 @@ func (*NDJSON) Format(failures <-chan lint.Failure, config lint.Config) (string,
 			return "", err
 		}
 	}
-	return "", nil
+	return buf.String(), nil
 }
diff --git a/vendor/github.com/mgechev/revive/formatter/plain.go b/vendor/github.com/mgechev/revive/formatter/plain.go
index 6e083bcfd0749fd4e983b80d08bb13b9d7ad454a..09ebf6cdc83ca248db28467e0fe3f68a8cf4a45a 100644
--- a/vendor/github.com/mgechev/revive/formatter/plain.go
+++ b/vendor/github.com/mgechev/revive/formatter/plain.go
@@ -1,6 +1,7 @@
 package formatter
 
 import (
+	"bytes"
 	"fmt"
 
 	"github.com/mgechev/revive/lint"
@@ -19,8 +20,9 @@ func (*Plain) Name() string {
 
 // Format formats the failures gotten from the lint.
 func (*Plain) Format(failures <-chan lint.Failure, _ lint.Config) (string, error) {
+	var buf bytes.Buffer
 	for failure := range failures {
-		fmt.Printf("%v: %s %s\n", failure.Position.Start, failure.Failure, "https://revive.run/r#"+failure.RuleName)
+		fmt.Fprintf(&buf, "%v: %s %s\n", failure.Position.Start, failure.Failure, "https://revive.run/r#"+failure.RuleName)
 	}
-	return "", nil
+	return buf.String(), nil
 }
diff --git a/vendor/github.com/mgechev/revive/formatter/sarif.go b/vendor/github.com/mgechev/revive/formatter/sarif.go
index c6288db76f69f932cad92f1c7f1063ff1b1faa1b..c42da73eb0ce7c7aab0012afd935aa9de1473371 100644
--- a/vendor/github.com/mgechev/revive/formatter/sarif.go
+++ b/vendor/github.com/mgechev/revive/formatter/sarif.go
@@ -88,7 +88,7 @@ func (l *reviveRunLog) AddResult(failure lint.Failure) {
 	location := garif.NewLocation().WithURI(filename).WithLineColumn(line, column)
 	result.Locations = append(result.Locations, location)
 	result.RuleId = failure.RuleName
-	result.Level = l.rules[failure.RuleName].Severity
+	result.Level = garif.ResultLevel(l.rules[failure.RuleName].Severity)
 
 	l.run.Results = append(l.run.Results, result)
 }
diff --git a/vendor/github.com/mgechev/revive/formatter/unix.go b/vendor/github.com/mgechev/revive/formatter/unix.go
index ef2f1613acc39e287ff75b538facec3199703c2f..e46f3c275fdf4c6a4f88aba322f3011882fd216e 100644
--- a/vendor/github.com/mgechev/revive/formatter/unix.go
+++ b/vendor/github.com/mgechev/revive/formatter/unix.go
@@ -1,6 +1,7 @@
 package formatter
 
 import (
+	"bytes"
 	"fmt"
 
 	"github.com/mgechev/revive/lint"
@@ -8,7 +9,8 @@ import (
 
 // Unix is an implementation of the Formatter interface
 // which formats the errors to a simple line based error format
-//  main.go:24:9: [errorf] should replace errors.New(fmt.Sprintf(...)) with fmt.Errorf(...)
+//
+//	main.go:24:9: [errorf] should replace errors.New(fmt.Sprintf(...)) with fmt.Errorf(...)
 type Unix struct {
 	Metadata lint.FormatterMetadata
 }
@@ -20,8 +22,9 @@ func (*Unix) Name() string {
 
 // Format formats the failures gotten from the lint.
 func (*Unix) Format(failures <-chan lint.Failure, _ lint.Config) (string, error) {
+	var buf bytes.Buffer
 	for failure := range failures {
-		fmt.Printf("%v: [%s] %s\n", failure.Position.Start, failure.RuleName, failure.Failure)
+		fmt.Fprintf(&buf, "%v: [%s] %s\n", failure.Position.Start, failure.RuleName, failure.Failure)
 	}
-	return "", nil
+	return buf.String(), nil
 }
diff --git a/vendor/github.com/mgechev/revive/internal/ifelse/args.go b/vendor/github.com/mgechev/revive/internal/ifelse/args.go
new file mode 100644
index 0000000000000000000000000000000000000000..c6e647e697b6ac80ff5fe6c050505f8b33bfd502
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/internal/ifelse/args.go
@@ -0,0 +1,11 @@
+package ifelse
+
+// PreserveScope is a configuration argument that prevents suggestions
+// that would enlarge variable scope
+const PreserveScope = "preserveScope"
+
+// Args contains arguments common to the early-return, indent-error-flow
+// and superfluous-else rules (currently just preserveScope)
+type Args struct {
+	PreserveScope bool
+}
diff --git a/vendor/github.com/mgechev/revive/internal/ifelse/branch.go b/vendor/github.com/mgechev/revive/internal/ifelse/branch.go
new file mode 100644
index 0000000000000000000000000000000000000000..6e6036b8996c5ad981181db60deee3a8e6b0e186
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/internal/ifelse/branch.go
@@ -0,0 +1,93 @@
+package ifelse
+
+import (
+	"fmt"
+	"go/ast"
+	"go/token"
+)
+
+// Branch contains information about a branch within an if-else chain.
+type Branch struct {
+	BranchKind
+	Call          // The function called at the end for kind Panic or Exit.
+	HasDecls bool // The branch has one or more declarations (at the top level block)
+}
+
+// BlockBranch gets the Branch of an ast.BlockStmt.
+func BlockBranch(block *ast.BlockStmt) Branch {
+	blockLen := len(block.List)
+	if blockLen == 0 {
+		return Empty.Branch()
+	}
+
+	branch := StmtBranch(block.List[blockLen-1])
+	branch.HasDecls = hasDecls(block)
+	return branch
+}
+
+// StmtBranch gets the Branch of an ast.Stmt.
+func StmtBranch(stmt ast.Stmt) Branch {
+	switch stmt := stmt.(type) {
+	case *ast.ReturnStmt:
+		return Return.Branch()
+	case *ast.BlockStmt:
+		return BlockBranch(stmt)
+	case *ast.BranchStmt:
+		switch stmt.Tok {
+		case token.BREAK:
+			return Break.Branch()
+		case token.CONTINUE:
+			return Continue.Branch()
+		case token.GOTO:
+			return Goto.Branch()
+		}
+	case *ast.ExprStmt:
+		fn, ok := ExprCall(stmt)
+		if !ok {
+			break
+		}
+		kind, ok := DeviatingFuncs[fn]
+		if ok {
+			return Branch{BranchKind: kind, Call: fn}
+		}
+	case *ast.EmptyStmt:
+		return Empty.Branch()
+	case *ast.LabeledStmt:
+		return StmtBranch(stmt.Stmt)
+	}
+	return Regular.Branch()
+}
+
+// String returns a brief string representation
+func (b Branch) String() string {
+	switch b.BranchKind {
+	case Panic, Exit:
+		return fmt.Sprintf("... %v()", b.Call)
+	default:
+		return b.BranchKind.String()
+	}
+}
+
+// LongString returns a longer form string representation
+func (b Branch) LongString() string {
+	switch b.BranchKind {
+	case Panic, Exit:
+		return fmt.Sprintf("call to %v function", b.Call)
+	default:
+		return b.BranchKind.LongString()
+	}
+}
+
+func hasDecls(block *ast.BlockStmt) bool {
+	for _, stmt := range block.List {
+		switch stmt := stmt.(type) {
+		case *ast.DeclStmt:
+			return true
+		case *ast.AssignStmt:
+			if stmt.Tok == token.DEFINE {
+				return true
+			}
+		}
+	}
+	return false
+}
diff --git a/vendor/github.com/mgechev/revive/internal/ifelse/branch_kind.go b/vendor/github.com/mgechev/revive/internal/ifelse/branch_kind.go
new file mode 100644
index 0000000000000000000000000000000000000000..41601d1e1d112a2680dbcc9b1fde5cb12dfe47d6
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/internal/ifelse/branch_kind.go
@@ -0,0 +1,101 @@
+package ifelse
+
+// BranchKind is a classifier for if-else branches. It says whether the branch is empty,
+// and whether the branch ends with a statement that deviates control flow.
+type BranchKind int
+
+const (
+	// Empty branches do nothing
+	Empty BranchKind = iota
+
+	// Return branches return from the current function
+	Return
+
+	// Continue branches continue a surrounding "for" loop
+	Continue
+
+	// Break branches break a surrounding "for" loop
+	Break
+
+	// Goto branches conclude with a "goto" statement
+	Goto
+
+	// Panic branches panic the current function
+	Panic
+
+	// Exit branches end the program
+	Exit
+
+	// Regular branches do not fit any category above
+	Regular
+)
+
+// IsEmpty tests if the branch is empty
+func (k BranchKind) IsEmpty() bool { return k == Empty }
+
+// Returns tests if the branch returns from the current function
+func (k BranchKind) Returns() bool { return k == Return }
+
+// Deviates tests if the control does not flow to the first
+// statement following the if-else chain.
+func (k BranchKind) Deviates() bool {
+	switch k {
+	case Empty, Regular:
+		return false
+	case Return, Continue, Break, Goto, Panic, Exit:
+		return true
+	default:
+		panic("invalid kind")
+	}
+}
+
+// Branch returns a Branch with the given kind
+func (k BranchKind) Branch() Branch { return Branch{BranchKind: k} }
+
+// String returns a brief string representation
+func (k BranchKind) String() string {
+	switch k {
+	case Empty:
+		return ""
+	case Regular:
+		return "..."
+	case Return:
+		return "... return"
+	case Continue:
+		return "... continue"
+	case Break:
+		return "... break"
+	case Goto:
+		return "... goto"
+	case Panic:
+		return "... panic()"
+	case Exit:
+		return "... os.Exit()"
+	default:
+		panic("invalid kind")
+	}
+}
+
+// LongString returns a longer form string representation
+func (k BranchKind) LongString() string {
+	switch k {
+	case Empty:
+		return "an empty block"
+	case Regular:
+		return "a regular statement"
+	case Return:
+		return "a return statement"
+	case Continue:
+		return "a continue statement"
+	case Break:
+		return "a break statement"
+	case Goto:
+		return "a goto statement"
+	case Panic:
+		return "a function call that panics"
+	case Exit:
+		return "a function call that exits the program"
+	default:
+		panic("invalid kind")
+	}
+}
diff --git a/vendor/github.com/mgechev/revive/internal/ifelse/chain.go b/vendor/github.com/mgechev/revive/internal/ifelse/chain.go
new file mode 100644
index 0000000000000000000000000000000000000000..9891635ee1eb2d00ebffe2aceb6743be21b0fb7c
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/internal/ifelse/chain.go
@@ -0,0 +1,10 @@
+package ifelse
+
+// Chain contains information about an if-else chain.
+type Chain struct {
+	If                   Branch // what happens at the end of the "if" block
+	Else                 Branch // what happens at the end of the "else" block
+	HasInitializer       bool   // is there an "if"-initializer somewhere in the chain?
+	HasPriorNonDeviating bool   // is there a prior "if" block that does NOT deviate control flow?
+	AtBlockEnd           bool   // whether the chain is placed at the end of the surrounding block
+}
diff --git a/vendor/github.com/mgechev/revive/internal/ifelse/doc.go b/vendor/github.com/mgechev/revive/internal/ifelse/doc.go
new file mode 100644
index 0000000000000000000000000000000000000000..0aa2c98175c35b57fa4be4af62cc57d9633c83b5
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/internal/ifelse/doc.go
@@ -0,0 +1,6 @@
+// Package ifelse provides helpers for analysing the control flow in if-else chains,
+// presently used by the following rules:
+// - early-return
+// - indent-error-flow
+// - superfluous-else
+package ifelse
diff --git a/vendor/github.com/mgechev/revive/internal/ifelse/func.go b/vendor/github.com/mgechev/revive/internal/ifelse/func.go
new file mode 100644
index 0000000000000000000000000000000000000000..7ba3519184e183d5f38bff49459643495712d756
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/internal/ifelse/func.go
@@ -0,0 +1,51 @@
+package ifelse
+
+import (
+	"fmt"
+	"go/ast"
+)
+
+// Call contains the name of a function that deviates control flow.
+type Call struct {
+	Pkg  string // The package qualifier of the function, if not built-in.
+	Name string // The function name.
+}
+
+// DeviatingFuncs lists known control flow deviating function calls.
+var DeviatingFuncs = map[Call]BranchKind{
+	{"os", "Exit"}:     Exit,
+	{"log", "Fatal"}:   Exit,
+	{"log", "Fatalf"}:  Exit,
+	{"log", "Fatalln"}: Exit,
+	{"", "panic"}:      Panic,
+	{"log", "Panic"}:   Panic,
+	{"log", "Panicf"}:  Panic,
+	{"log", "Panicln"}: Panic,
+}
+
+// ExprCall gets the Call of an ExprStmt, if any.
+func ExprCall(expr *ast.ExprStmt) (Call, bool) {
+	call, ok := expr.X.(*ast.CallExpr)
+	if !ok {
+		return Call{}, false
+	}
+	switch v := call.Fun.(type) {
+	case *ast.Ident:
+		return Call{Name: v.Name}, true
+	case *ast.SelectorExpr:
+		if ident, ok := v.X.(*ast.Ident); ok {
+			return Call{Name: v.Sel.Name, Pkg: ident.Name}, true
+		}
+	}
+	return Call{}, false
+}
+
+// String returns the function name with package qualifier (if any)
+func (f Call) String() string {
+	switch {
+	case f.Pkg != "":
+		return fmt.Sprintf("%s.%s", f.Pkg, f.Name)
+	default:
+		return f.Name
+	}
+}
diff --git a/vendor/github.com/mgechev/revive/internal/ifelse/rule.go b/vendor/github.com/mgechev/revive/internal/ifelse/rule.go
new file mode 100644
index 0000000000000000000000000000000000000000..07ad456b6506f6bd008e9d46190ce1f820d22e56
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/internal/ifelse/rule.go
@@ -0,0 +1,105 @@
+package ifelse
+
+import (
+	"go/ast"
+	"go/token"
+
+	"github.com/mgechev/revive/lint"
+)
+
+// Rule is an interface for linters operating on if-else chains
+type Rule interface {
+	CheckIfElse(chain Chain, args Args) (failMsg string)
+}
+
+// Apply evaluates the given Rule on if-else chains found within the given AST,
+// and returns the failures.
+//
+// Note that in if-else chain with multiple "if" blocks, only the *last* one is checked,
+// that is to say, given:
+//
+//	if foo {
+//	    ...
+//	} else if bar {
+//		...
+//	} else {
+//		...
+//	}
+//
+// Only the block following "bar" is linted. This is because the rules that use this function
+// do not presently have anything to say about earlier blocks in the chain.
+func Apply(rule Rule, node ast.Node, target Target, args lint.Arguments) []lint.Failure {
+	v := &visitor{rule: rule, target: target}
+	for _, arg := range args {
+		if arg == PreserveScope {
+			v.args.PreserveScope = true
+		}
+	}
+	ast.Walk(v, node)
+	return v.failures
+}
+
+type visitor struct {
+	failures []lint.Failure
+	target   Target
+	rule     Rule
+	args     Args
+}
+
+func (v *visitor) Visit(node ast.Node) ast.Visitor {
+	block, ok := node.(*ast.BlockStmt)
+	if !ok {
+		return v
+	}
+
+	for i, stmt := range block.List {
+		if ifStmt, ok := stmt.(*ast.IfStmt); ok {
+			v.visitChain(ifStmt, Chain{AtBlockEnd: i == len(block.List)-1})
+			continue
+		}
+		ast.Walk(v, stmt)
+	}
+	return nil
+}
+
+func (v *visitor) visitChain(ifStmt *ast.IfStmt, chain Chain) {
+	// look for other if-else chains nested inside this if { } block
+	ast.Walk(v, ifStmt.Body)
+
+	if ifStmt.Else == nil {
+		// no else branch
+		return
+	}
+
+	if as, ok := ifStmt.Init.(*ast.AssignStmt); ok && as.Tok == token.DEFINE {
+		chain.HasInitializer = true
+	}
+	chain.If = BlockBranch(ifStmt.Body)
+
+	switch elseBlock := ifStmt.Else.(type) {
+	case *ast.IfStmt:
+		if !chain.If.Deviates() {
+			chain.HasPriorNonDeviating = true
+		}
+		v.visitChain(elseBlock, chain)
+	case *ast.BlockStmt:
+		// look for other if-else chains nested inside this else { } block
+		ast.Walk(v, elseBlock)
+
+		chain.Else = BlockBranch(elseBlock)
+		if failMsg := v.rule.CheckIfElse(chain, v.args); failMsg != "" {
+			if chain.HasInitializer {
+				// if statement has a := initializer, so we might need to move the assignment
+				// onto its own line in case the body references it
+				failMsg += " (move short variable declaration to its own line if necessary)"
+			}
+			v.failures = append(v.failures, lint.Failure{
+				Confidence: 1,
+				Node:       v.target.node(ifStmt),
+				Failure:    failMsg,
+			})
+		}
+	default:
+		panic("invalid node type for else")
+	}
+}
diff --git a/vendor/github.com/mgechev/revive/internal/ifelse/target.go b/vendor/github.com/mgechev/revive/internal/ifelse/target.go
new file mode 100644
index 0000000000000000000000000000000000000000..81ff1c3037d6986e2a161797fa2a644d40e94fc8
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/internal/ifelse/target.go
@@ -0,0 +1,25 @@
+package ifelse
+
+import "go/ast"
+
+// Target decides what line/column should be indicated by the rule in question.
+type Target int
+
+const (
+	// TargetIf means the text refers to the "if"
+	TargetIf Target = iota
+
+	// TargetElse means the text refers to the "else"
+	TargetElse
+)
+
+func (t Target) node(ifStmt *ast.IfStmt) ast.Node {
+	switch t {
+	case TargetIf:
+		return ifStmt
+	case TargetElse:
+		return ifStmt.Else
+	default:
+		panic("bad target")
+	}
+}
diff --git a/vendor/github.com/mgechev/revive/lint/config.go b/vendor/github.com/mgechev/revive/lint/config.go
index 27630580462aca070b09e6788561e00d38966942..7e51a93c28cd610da905add1daf66e9da5449dd4 100644
--- a/vendor/github.com/mgechev/revive/lint/config.go
+++ b/vendor/github.com/mgechev/revive/lint/config.go
@@ -3,16 +3,45 @@ package lint
 // Arguments is type used for the arguments of a rule.
 type Arguments = []interface{}
 
+// FileFilters is type used for modeling file filters to apply to rules.
+type FileFilters = []*FileFilter
+
 // RuleConfig is type used for the rule configuration.
 type RuleConfig struct {
 	Arguments Arguments
 	Severity  Severity
 	Disabled  bool
+	// Exclude - rule-level file excludes, TOML related (strings)
+	Exclude []string
+	// excludeFilters - regex-based file filters, initialized from Exclude
+	excludeFilters []*FileFilter
+}
+
+// Initialize - should be called after reading from TOML file
+func (rc *RuleConfig) Initialize() error {
+	for _, f := range rc.Exclude {
+		ff, err := ParseFileFilter(f)
+		if err != nil {
+			return err
+		}
+		rc.excludeFilters = append(rc.excludeFilters, ff)
+	}
+	return nil
 }
 
 // RulesConfig defines the config for all rules.
 type RulesConfig = map[string]RuleConfig
 
+// MustExclude - checks if given filename `name` must be excluded
+func (rc *RuleConfig) MustExclude(name string) bool {
+	for _, exclude := range rc.excludeFilters {
+		if exclude.MatchFileName(name) {
+			return true
+		}
+	}
+	return false
+}
+
 // DirectiveConfig is type used for the linter directive configuration.
 type DirectiveConfig struct {
 	Severity Severity
diff --git a/vendor/github.com/mgechev/revive/lint/doc.go b/vendor/github.com/mgechev/revive/lint/doc.go
new file mode 100644
index 0000000000000000000000000000000000000000..7048adf4b692f20568b67c90436185aae28af89c
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/lint/doc.go
@@ -0,0 +1,2 @@
+// Package lint implements the linting machinery.
+package lint
diff --git a/vendor/github.com/mgechev/revive/lint/file.go b/vendor/github.com/mgechev/revive/lint/file.go
index dcf0e608f646cf53908df9e919e4a8faf590a980..23255304c5c7168424b6a81c5aab9410a2bf2e26 100644
--- a/vendor/github.com/mgechev/revive/lint/file.go
+++ b/vendor/github.com/mgechev/revive/lint/file.go
@@ -102,6 +102,9 @@ func (f *File) lint(rules []Rule, config Config, failures chan Failure) {
 	disabledIntervals := f.disabledIntervals(rules, mustSpecifyDisableReason, failures)
 	for _, currentRule := range rules {
 		ruleConfig := rulesConfig[currentRule.Name()]
+		if ruleConfig.MustExclude(f.Name) {
+			continue
+		}
 		currentFailures := currentRule.Apply(f, ruleConfig.Arguments)
 		for idx, failure := range currentFailures {
 			if failure.RuleName == "" {
diff --git a/vendor/github.com/mgechev/revive/lint/filefilter.go b/vendor/github.com/mgechev/revive/lint/filefilter.go
new file mode 100644
index 0000000000000000000000000000000000000000..8da090b9cc46b9984237ac549d1e7fbf3d733bf7
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/lint/filefilter.go
@@ -0,0 +1,128 @@
+package lint
+
+import (
+	"fmt"
+	"regexp"
+	"strings"
+)
+
+// FileFilter - file filter to exclude some files for rule
+// supports whole
+// 1. file/dir names : pkg/mypkg/my.go,
+// 2. globs: **/*.pb.go,
+// 3. regexes (~ prefix) ~-tmp\.\d+\.go
+// 4. special test marker `TEST` - treats as `~_test\.go`
+type FileFilter struct {
+	// raw definition of filter inside config
+	raw string
+	// don't care what was at start, will use regexes inside
+	rx *regexp.Regexp
+	// marks filter as matching everything
+	matchesAll bool
+	// marks filter as matching nothing
+	matchesNothing bool
+}
+
+// ParseFileFilter - creates [FileFilter] for given raw filter
+// if empty string, it matches nothing
+// if `*`, or `~`, it matches everything
+// while regexp could be invalid, it could return it's compilation error
+func ParseFileFilter(rawFilter string) (*FileFilter, error) {
+	rawFilter = strings.TrimSpace(rawFilter)
+	result := new(FileFilter)
+	result.raw = rawFilter
+	result.matchesNothing = len(result.raw) == 0
+	result.matchesAll = result.raw == "*" || result.raw == "~"
+	if !result.matchesAll && !result.matchesNothing {
+		if err := result.prepareRegexp(); err != nil {
+			return nil, err
+		}
+	}
+	return result, nil
+}
+
+func (ff *FileFilter) String() string { return ff.raw }
+
+// MatchFileName - checks if file name matches filter
+func (ff *FileFilter) MatchFileName(name string) bool {
+	if ff.matchesAll {
+		return true
+	}
+	if ff.matchesNothing {
+		return false
+	}
+	name = strings.ReplaceAll(name, "\\", "/")
+	return ff.rx.MatchString(name)
+}
+
+var fileFilterInvalidGlobRegexp = regexp.MustCompile(`[^/]\*\*[^/]`)
+var escapeRegexSymbols = ".+{}()[]^$"
+
+func (ff *FileFilter) prepareRegexp() error {
+	var err error
+	var src = ff.raw
+	if src == "TEST" {
+		src = "~_test\\.go"
+	}
+	if strings.HasPrefix(src, "~") {
+		ff.rx, err = regexp.Compile(src[1:])
+		if err != nil {
+			return fmt.Errorf("invalid file filter [%s], regexp compile error: [%v]", ff.raw, err)
+		}
+		return nil
+	}
+	/* globs */
+	if strings.Contains(src, "*") {
+		if fileFilterInvalidGlobRegexp.MatchString(src) {
+			return fmt.Errorf("invalid file filter [%s], invalid glob pattern", ff.raw)
+		}
+		var rxBuild strings.Builder
+		rxBuild.WriteByte('^')
+		wasStar := false
+		justDirGlob := false
+		for _, c := range src {
+			if c == '*' {
+				if wasStar {
+					rxBuild.WriteString(`[\s\S]*`)
+					wasStar = false
+					justDirGlob = true
+					continue
+				}
+				wasStar = true
+				continue
+			}
+			if wasStar {
+				rxBuild.WriteString("[^/]*")
+				wasStar = false
+			}
+			if strings.ContainsRune(escapeRegexSymbols, c) {
+				rxBuild.WriteByte('\\')
+			}
+			rxBuild.WriteRune(c)
+			if c == '/' && justDirGlob {
+				rxBuild.WriteRune('?')
+			}
+			justDirGlob = false
+		}
+		if wasStar {
+			rxBuild.WriteString("[^/]*")
+		}
+		rxBuild.WriteByte('$')
+		ff.rx, err = regexp.Compile(rxBuild.String())
+		if err != nil {
+			return fmt.Errorf("invalid file filter [%s], regexp compile error after glob expand: [%v]", ff.raw, err)
+		}
+		return nil
+	}
+
+	// it's whole file mask, just escape dots and normilze separators
+	fillRx := src
+	fillRx = strings.ReplaceAll(fillRx, "\\", "/")
+	fillRx = strings.ReplaceAll(fillRx, ".", `\.`)
+	fillRx = "^" + fillRx + "$"
+	ff.rx, err = regexp.Compile(fillRx)
+	if err != nil {
+		return fmt.Errorf("invalid file filter [%s], regexp compile full path: [%v]", ff.raw, err)
+	}
+	return nil
+}
diff --git a/vendor/github.com/mgechev/revive/lint/utils.go b/vendor/github.com/mgechev/revive/lint/utils.go
index 28657c6df0668ac58592fe9effeb0aaaaa6f3206..6ccfb0ef29aff0c6dc31049084e7339ddad165b1 100644
--- a/vendor/github.com/mgechev/revive/lint/utils.go
+++ b/vendor/github.com/mgechev/revive/lint/utils.go
@@ -6,7 +6,7 @@ import (
 )
 
 // Name returns a different name if it should be different.
-func Name(name string, whitelist, blacklist []string) (should string) {
+func Name(name string, allowlist, blocklist []string) (should string) {
 	// Fast path for simple cases: "_" and all lowercase.
 	if name == "_" {
 		return name
@@ -57,12 +57,12 @@ func Name(name string, whitelist, blacklist []string) (should string) {
 		// [w,i) is a word.
 		word := string(runes[w:i])
 		ignoreInitWarnings := map[string]bool{}
-		for _, i := range whitelist {
+		for _, i := range allowlist {
 			ignoreInitWarnings[i] = true
 		}
 
 		extraInits := map[string]bool{}
-		for _, i := range blacklist {
+		for _, i := range blocklist {
 			extraInits[i] = true
 		}
 
@@ -71,6 +71,10 @@ func Name(name string, whitelist, blacklist []string) (should string) {
 			if w == 0 && unicode.IsLower(runes[w]) {
 				u = strings.ToLower(u)
 			}
+			// Keep lowercase s for IDs
+			if u == "IDS" {
+				u = "IDs"
+			}
 			// All the common initialisms are ASCII,
 			// so we can replace the bytes exactly.
 			copy(runes[w:], []rune(u))
@@ -99,6 +103,7 @@ var commonInitialisms = map[string]bool{
 	"HTTP":  true,
 	"HTTPS": true,
 	"ID":    true,
+	"IDS":   true,
 	"IP":    true,
 	"JSON":  true,
 	"LHS":   true,
diff --git a/vendor/github.com/mgechev/revive/rule/add-constant.go b/vendor/github.com/mgechev/revive/rule/add-constant.go
index 36a7003daf49837b7fed2a81295d5fb29f2ab689..86182623a99e26050add9417606597d421780177 100644
--- a/vendor/github.com/mgechev/revive/rule/add-constant.go
+++ b/vendor/github.com/mgechev/revive/rule/add-constant.go
@@ -18,13 +18,13 @@ const (
 	kindSTRING         = "STRING"
 )
 
-type whiteList map[string]map[string]bool
+type allowList map[string]map[string]bool
 
-func newWhiteList() whiteList {
+func newAllowList() allowList {
 	return map[string]map[string]bool{kindINT: {}, kindFLOAT: {}, kindSTRING: {}}
 }
 
-func (wl whiteList) add(kind, list string) {
+func (wl allowList) add(kind, list string) {
 	elems := strings.Split(list, ",")
 	for _, e := range elems {
 		wl[kind][e] = true
@@ -33,7 +33,7 @@ func (wl whiteList) add(kind, list string) {
 
 // AddConstantRule lints unused params in functions.
 type AddConstantRule struct {
-	whiteList       whiteList
+	allowList       allowList
 	ignoreFunctions []*regexp.Regexp
 	strLitLimit     int
 	sync.Mutex
@@ -49,12 +49,13 @@ func (r *AddConstantRule) Apply(file *lint.File, arguments lint.Arguments) []lin
 		failures = append(failures, failure)
 	}
 
-	w := lintAddConstantRule{
+	w := &lintAddConstantRule{
 		onFailure:       onFailure,
 		strLits:         make(map[string]int),
 		strLitLimit:     r.strLitLimit,
-		whiteLst:        r.whiteList,
+		allowList:        r.allowList,
 		ignoreFunctions: r.ignoreFunctions,
+		structTags:      make(map[*ast.BasicLit]struct{}),
 	}
 
 	ast.Walk(w, file.AST)
@@ -71,11 +72,16 @@ type lintAddConstantRule struct {
 	onFailure       func(lint.Failure)
 	strLits         map[string]int
 	strLitLimit     int
-	whiteLst        whiteList
+	allowList        allowList
 	ignoreFunctions []*regexp.Regexp
+	structTags      map[*ast.BasicLit]struct{}
 }
 
-func (w lintAddConstantRule) Visit(node ast.Node) ast.Visitor {
+func (w *lintAddConstantRule) Visit(node ast.Node) ast.Visitor {
+	if node == nil {
+		return nil
+	}
+
 	switch n := node.(type) {
 	case *ast.CallExpr:
 		w.checkFunc(n)
@@ -83,13 +89,23 @@ func (w lintAddConstantRule) Visit(node ast.Node) ast.Visitor {
 	case *ast.GenDecl:
 		return nil // skip declarations
 	case *ast.BasicLit:
-		w.checkLit(n)
+		if !w.isStructTag(n) {
+			w.checkLit(n)
+		}
+	case *ast.StructType:
+		if n.Fields != nil {
+			for _, field := range n.Fields.List {
+				if field.Tag != nil {
+					w.structTags[field.Tag] = struct{}{}
+				}
+			}
+		}
 	}
 
 	return w
 }
 
-func (w lintAddConstantRule) checkFunc(expr *ast.CallExpr) {
+func (w *lintAddConstantRule) checkFunc(expr *ast.CallExpr) {
 	fName := w.getFuncName(expr)
 
 	for _, arg := range expr.Args {
@@ -105,7 +121,7 @@ func (w lintAddConstantRule) checkFunc(expr *ast.CallExpr) {
 	}
 }
 
-func (w lintAddConstantRule) getFuncName(expr *ast.CallExpr) string {
+func (*lintAddConstantRule) getFuncName(expr *ast.CallExpr) string {
 	switch f := expr.Fun.(type) {
 	case *ast.SelectorExpr:
 		switch prefix := f.X.(type) {
@@ -119,7 +135,7 @@ func (w lintAddConstantRule) getFuncName(expr *ast.CallExpr) string {
 	return ""
 }
 
-func (w lintAddConstantRule) checkLit(n *ast.BasicLit) {
+func (w *lintAddConstantRule) checkLit(n *ast.BasicLit) {
 	switch kind := n.Kind.String(); kind {
 	case kindFLOAT, kindINT:
 		w.checkNumLit(kind, n)
@@ -128,7 +144,7 @@ func (w lintAddConstantRule) checkLit(n *ast.BasicLit) {
 	}
 }
 
-func (w lintAddConstantRule) isIgnoredFunc(fName string) bool {
+func (w *lintAddConstantRule) isIgnoredFunc(fName string) bool {
 	for _, pattern := range w.ignoreFunctions {
 		if pattern.MatchString(fName) {
 			return true
@@ -138,8 +154,8 @@ func (w lintAddConstantRule) isIgnoredFunc(fName string) bool {
 	return false
 }
 
-func (w lintAddConstantRule) checkStrLit(n *ast.BasicLit) {
-	if w.whiteLst[kindSTRING][n.Value] {
+func (w *lintAddConstantRule) checkStrLit(n *ast.BasicLit) {
+	if w.allowList[kindSTRING][n.Value] {
 		return
 	}
 
@@ -158,8 +174,8 @@ func (w lintAddConstantRule) checkStrLit(n *ast.BasicLit) {
 	}
 }
 
-func (w lintAddConstantRule) checkNumLit(kind string, n *ast.BasicLit) {
-	if w.whiteLst[kind][n.Value] {
+func (w *lintAddConstantRule) checkNumLit(kind string, n *ast.BasicLit) {
+	if w.allowList[kind][n.Value] {
 		return
 	}
 
@@ -171,15 +187,20 @@ func (w lintAddConstantRule) checkNumLit(kind string, n *ast.BasicLit) {
 	})
 }
 
+func (w *lintAddConstantRule) isStructTag(n *ast.BasicLit) bool {
+	_, ok := w.structTags[n]
+	return ok
+}
+
 func (r *AddConstantRule) configure(arguments lint.Arguments) {
 	r.Lock()
 	defer r.Unlock()
 
-	if r.whiteList == nil {
+	if r.allowList == nil {
 		r.strLitLimit = defaultStrLitLimit
-		r.whiteList = newWhiteList()
+		r.allowList = newAllowList()
 		if len(arguments) > 0 {
-			args, ok := arguments[0].(map[string]interface{})
+			args, ok := arguments[0].(map[string]any)
 			if !ok {
 				panic(fmt.Sprintf("Invalid argument to the add-constant rule. Expecting a k,v map, got %T", arguments[0]))
 			}
@@ -202,7 +223,7 @@ func (r *AddConstantRule) configure(arguments lint.Arguments) {
 					if !ok {
 						panic(fmt.Sprintf("Invalid argument to the add-constant rule, string expected. Got '%v' (%T)", v, v))
 					}
-					r.whiteList.add(kind, list)
+					r.allowList.add(kind, list)
 				case "maxLitCount":
 					sl, ok := v.(string)
 					if !ok {
diff --git a/vendor/github.com/mgechev/revive/rule/argument-limit.go b/vendor/github.com/mgechev/revive/rule/argument-limit.go
index 8042da15e3b431549fd0e9ef3f19a230223c3f2f..8120288fd51f726a155a526e82a39ca9d25c1183 100644
--- a/vendor/github.com/mgechev/revive/rule/argument-limit.go
+++ b/vendor/github.com/mgechev/revive/rule/argument-limit.go
@@ -14,10 +14,16 @@ type ArgumentsLimitRule struct {
 	sync.Mutex
 }
 
+const defaultArgumentsLimit = 8
+
 func (r *ArgumentsLimitRule) configure(arguments lint.Arguments) {
 	r.Lock()
+	defer r.Unlock()
 	if r.total == 0 {
-		checkNumberOfArguments(1, arguments, r.Name())
+		if len(arguments) < 1 {
+			r.total = defaultArgumentsLimit
+			return
+		}
 
 		total, ok := arguments[0].(int64) // Alt. non panicking version
 		if !ok {
@@ -25,7 +31,6 @@ func (r *ArgumentsLimitRule) configure(arguments lint.Arguments) {
 		}
 		r.total = int(total)
 	}
-	r.Unlock()
 }
 
 // Apply applies the rule to given file.
diff --git a/vendor/github.com/mgechev/revive/rule/banned-characters.go b/vendor/github.com/mgechev/revive/rule/banned-characters.go
index 76fa2235a93e065a5b3470e6064db7a154d0c89b..12997bae11b05ad76cc18af2d48ab35f1f52c7b6 100644
--- a/vendor/github.com/mgechev/revive/rule/banned-characters.go
+++ b/vendor/github.com/mgechev/revive/rule/banned-characters.go
@@ -19,11 +19,11 @@ const bannedCharsRuleName = "banned-characters"
 
 func (r *BannedCharsRule) configure(arguments lint.Arguments) {
 	r.Lock()
-	if r.bannedCharList == nil {
+	defer r.Unlock()
+	if r.bannedCharList == nil && len(arguments) > 0 {
 		checkNumberOfArguments(1, arguments, bannedCharsRuleName)
 		r.bannedCharList = r.getBannedCharsList(arguments)
 	}
-	r.Unlock()
 }
 
 // Apply applied the rule to the given file.
diff --git a/vendor/github.com/mgechev/revive/rule/cognitive-complexity.go b/vendor/github.com/mgechev/revive/rule/cognitive-complexity.go
index a9c11a7d0b3bf951fea8e602ff9317cfa25f00c2..1973faef878241f5a74157e1a584fc69420c95ea 100644
--- a/vendor/github.com/mgechev/revive/rule/cognitive-complexity.go
+++ b/vendor/github.com/mgechev/revive/rule/cognitive-complexity.go
@@ -16,10 +16,17 @@ type CognitiveComplexityRule struct {
 	sync.Mutex
 }
 
+const defaultMaxCognitiveComplexity = 7
+
 func (r *CognitiveComplexityRule) configure(arguments lint.Arguments) {
 	r.Lock()
+	defer r.Unlock()
 	if r.maxComplexity == 0 {
-		checkNumberOfArguments(1, arguments, r.Name())
+
+		if len(arguments) < 1 {
+			r.maxComplexity = defaultMaxCognitiveComplexity
+			return
+		}
 
 		complexity, ok := arguments[0].(int64)
 		if !ok {
@@ -27,7 +34,6 @@ func (r *CognitiveComplexityRule) configure(arguments lint.Arguments) {
 		}
 		r.maxComplexity = int(complexity)
 	}
-	r.Unlock()
 }
 
 // Apply applies the rule to given file.
diff --git a/vendor/github.com/mgechev/revive/rule/comment-spacings.go b/vendor/github.com/mgechev/revive/rule/comment-spacings.go
index 0d75c55f3000462324e4f09c0ac4da0ef05353dc..2b8240ca5839fe898392c45095bff6502f98eb64 100644
--- a/vendor/github.com/mgechev/revive/rule/comment-spacings.go
+++ b/vendor/github.com/mgechev/revive/rule/comment-spacings.go
@@ -8,7 +8,7 @@ import (
 	"github.com/mgechev/revive/lint"
 )
 
-// CommentSpacings Rule check the whether there is a space between
+// CommentSpacingsRule check the whether there is a space between
 // the comment symbol( // ) and the start of the comment text
 type CommentSpacingsRule struct {
 	allowList []string
@@ -36,6 +36,7 @@ func (r *CommentSpacingsRule) configure(arguments lint.Arguments) {
 	}
 }
 
+// Apply the rule.
 func (r *CommentSpacingsRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure {
 	r.configure(args)
 
@@ -74,6 +75,7 @@ func (r *CommentSpacingsRule) Apply(file *lint.File, args lint.Arguments) []lint
 	return failures
 }
 
+// Name yields this rule name.
 func (*CommentSpacingsRule) Name() string {
 	return "comment-spacings"
 }
diff --git a/vendor/github.com/mgechev/revive/rule/confusing-naming.go b/vendor/github.com/mgechev/revive/rule/confusing-naming.go
index 34cdb907a8e73ba529a15c04de1c210aa6c81a96..febfd88245a383394072fd70c6ffdc9f98d1a249 100644
--- a/vendor/github.com/mgechev/revive/rule/confusing-naming.go
+++ b/vendor/github.com/mgechev/revive/rule/confusing-naming.go
@@ -27,10 +27,10 @@ type packages struct {
 
 func (ps *packages) methodNames(lp *lint.Package) pkgMethods {
 	ps.mu.Lock()
+	defer ps.mu.Unlock()
 
 	for _, pkg := range ps.pkgs {
 		if pkg.pkg == lp {
-			ps.mu.Unlock()
 			return pkg
 		}
 	}
@@ -38,7 +38,6 @@ func (ps *packages) methodNames(lp *lint.Package) pkgMethods {
 	pkgm := pkgMethods{pkg: lp, methods: make(map[string]map[string]*referenceMethod), mu: &sync.Mutex{}}
 	ps.pkgs = append(ps.pkgs, pkgm)
 
-	ps.mu.Unlock()
 	return pkgm
 }
 
@@ -112,7 +111,7 @@ func checkMethodName(holder string, id *ast.Ident, w *lintConfusingNames) {
 		pkgm.methods[holder] = make(map[string]*referenceMethod, 1)
 	}
 
-	// update the black list
+	// update the block list
 	if pkgm.methods[holder] == nil {
 		println("no entry for '", holder, "'")
 	}
@@ -137,8 +136,11 @@ func getStructName(r *ast.FieldList) string {
 
 	t := r.List[0].Type
 
-	if p, _ := t.(*ast.StarExpr); p != nil { // if a pointer receiver => dereference pointer receiver types
-		t = p.X
+	switch v := t.(type) {
+	case *ast.StarExpr:
+		t = v.X
+	case *ast.IndexExpr:
+		t = v.X
 	}
 
 	if p, _ := t.(*ast.Ident); p != nil {
diff --git a/vendor/github.com/mgechev/revive/rule/constant-logical-expr.go b/vendor/github.com/mgechev/revive/rule/constant-logical-expr.go
index 9abc95d67c9da8c4e45966a290385a8e45bdbc44..36cd641f74e7af41de961b316e0f3001c9fcc99f 100644
--- a/vendor/github.com/mgechev/revive/rule/constant-logical-expr.go
+++ b/vendor/github.com/mgechev/revive/rule/constant-logical-expr.go
@@ -11,7 +11,7 @@ import (
 type ConstantLogicalExprRule struct{}
 
 // Apply applies the rule to given file.
-func (r *ConstantLogicalExprRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+func (*ConstantLogicalExprRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
 	var failures []lint.Failure
 
 	onFailure := func(failure lint.Failure) {
@@ -63,7 +63,7 @@ func (w *lintConstantLogicalExpr) Visit(node ast.Node) ast.Visitor {
 	return w
 }
 
-func (w *lintConstantLogicalExpr) isOperatorWithLogicalResult(t token.Token) bool {
+func (*lintConstantLogicalExpr) isOperatorWithLogicalResult(t token.Token) bool {
 	switch t {
 	case token.LAND, token.LOR, token.EQL, token.LSS, token.GTR, token.NEQ, token.LEQ, token.GEQ:
 		return true
@@ -72,7 +72,7 @@ func (w *lintConstantLogicalExpr) isOperatorWithLogicalResult(t token.Token) boo
 	return false
 }
 
-func (w *lintConstantLogicalExpr) isEqualityOperator(t token.Token) bool {
+func (*lintConstantLogicalExpr) isEqualityOperator(t token.Token) bool {
 	switch t {
 	case token.EQL, token.LEQ, token.GEQ:
 		return true
@@ -81,7 +81,7 @@ func (w *lintConstantLogicalExpr) isEqualityOperator(t token.Token) bool {
 	return false
 }
 
-func (w *lintConstantLogicalExpr) isInequalityOperator(t token.Token) bool {
+func (*lintConstantLogicalExpr) isInequalityOperator(t token.Token) bool {
 	switch t {
 	case token.LSS, token.GTR, token.NEQ:
 		return true
diff --git a/vendor/github.com/mgechev/revive/rule/context-as-argument.go b/vendor/github.com/mgechev/revive/rule/context-as-argument.go
index 3c400065e129532368b8569c1180e99e068d26e5..e0c8cfa5e976a1cdbac50c3a707dbb129060dfed 100644
--- a/vendor/github.com/mgechev/revive/rule/context-as-argument.go
+++ b/vendor/github.com/mgechev/revive/rule/context-as-argument.go
@@ -82,7 +82,7 @@ func (w lintContextArguments) Visit(n ast.Node) ast.Visitor {
 func getAllowTypesFromArguments(args lint.Arguments) map[string]struct{} {
 	allowTypesBefore := []string{}
 	if len(args) >= 1 {
-		argKV, ok := args[0].(map[string]interface{})
+		argKV, ok := args[0].(map[string]any)
 		if !ok {
 			panic(fmt.Sprintf("Invalid argument to the context-as-argument rule. Expecting a k,v map, got %T", args[0]))
 		}
diff --git a/vendor/github.com/mgechev/revive/rule/cyclomatic.go b/vendor/github.com/mgechev/revive/rule/cyclomatic.go
index afd41818b8f9650323a43f44dd8bec729fa8e870..9f6d50043d2bf79fb09ca5cf5ac5b344cf1a3481 100644
--- a/vendor/github.com/mgechev/revive/rule/cyclomatic.go
+++ b/vendor/github.com/mgechev/revive/rule/cyclomatic.go
@@ -17,10 +17,16 @@ type CyclomaticRule struct {
 	sync.Mutex
 }
 
+const defaultMaxCyclomaticComplexity = 10
+
 func (r *CyclomaticRule) configure(arguments lint.Arguments) {
 	r.Lock()
+	defer r.Unlock()
 	if r.maxComplexity == 0 {
-		checkNumberOfArguments(1, arguments, r.Name())
+		if len(arguments) < 1 {
+			r.maxComplexity = defaultMaxCyclomaticComplexity
+			return
+		}
 
 		complexity, ok := arguments[0].(int64) // Alt. non panicking version
 		if !ok {
@@ -28,7 +34,6 @@ func (r *CyclomaticRule) configure(arguments lint.Arguments) {
 		}
 		r.maxComplexity = int(complexity)
 	}
-	r.Unlock()
 }
 
 // Apply applies the rule to given file.
diff --git a/vendor/github.com/mgechev/revive/rule/datarace.go b/vendor/github.com/mgechev/revive/rule/datarace.go
index 26fcadcdc978438ca775e3ff819026f4a82d59a6..39e96696adf2b8c653a1eb2eea77c4c017a7691b 100644
--- a/vendor/github.com/mgechev/revive/rule/datarace.go
+++ b/vendor/github.com/mgechev/revive/rule/datarace.go
@@ -53,7 +53,7 @@ func (w lintDataRaces) Visit(n ast.Node) ast.Visitor {
 	return nil
 }
 
-func (w lintDataRaces) ExtractReturnIDs(fields []*ast.Field) map[*ast.Object]struct{} {
+func (lintDataRaces) ExtractReturnIDs(fields []*ast.Field) map[*ast.Object]struct{} {
 	r := map[*ast.Object]struct{}{}
 	for _, f := range fields {
 		for _, id := range f.Names {
@@ -111,7 +111,7 @@ func (w lintFunctionForDataRaces) Visit(node ast.Node) ast.Visitor {
 			return ok
 		}
 
-		ids := pick(funcLit.Body, selectIDs, nil)
+		ids := pick(funcLit.Body, selectIDs)
 		for _, id := range ids {
 			id := id.(*ast.Ident)
 			_, isRangeID := w.rangeIDs[id.Obj]
diff --git a/vendor/github.com/mgechev/revive/rule/defer.go b/vendor/github.com/mgechev/revive/rule/defer.go
index f8224fd4d19822614815ef0ae8001c7746d6f40e..adc6478aeee1ea44df113db25c32fbaf8587550e 100644
--- a/vendor/github.com/mgechev/revive/rule/defer.go
+++ b/vendor/github.com/mgechev/revive/rule/defer.go
@@ -56,7 +56,7 @@ func (*DeferRule) allowFromArgs(args lint.Arguments) map[string]bool {
 		return allow
 	}
 
-	aa, ok := args[0].([]interface{})
+	aa, ok := args[0].([]any)
 	if !ok {
 		panic(fmt.Sprintf("Invalid argument '%v' for 'defer' rule. Expecting []string, got %T", args[0], args[0]))
 	}
@@ -97,18 +97,21 @@ func (w lintDeferRule) Visit(node ast.Node) ast.Visitor {
 			w.newFailure("return in a defer function has no effect", n, 1.0, "logic", "return")
 		}
 	case *ast.CallExpr:
-		if !w.inADefer && isIdent(n.Fun, "recover") {
+		isCallToRecover := isIdent(n.Fun, "recover")
+		switch {
+		case !w.inADefer && isCallToRecover:
 			// func fn() { recover() }
 			//
 			// confidence is not 1 because recover can be in a function that is deferred elsewhere
 			w.newFailure("recover must be called inside a deferred function", n, 0.8, "logic", "recover")
-		} else if w.inADefer && !w.inAFuncLit && isIdent(n.Fun, "recover") {
+		case w.inADefer && !w.inAFuncLit && isCallToRecover:
 			// defer helper(recover())
 			//
 			// confidence is not truly 1 because this could be in a correctly-deferred func,
 			// but it is very likely to be a misunderstanding of defer's behavior around arguments.
 			w.newFailure("recover must be called inside a deferred function, this is executing recover immediately", n, 1, "logic", "immediate-recover")
 		}
+
 	case *ast.DeferStmt:
 		if isIdent(n.Call.Fun, "recover") {
 			// defer recover()
@@ -119,7 +122,12 @@ func (w lintDeferRule) Visit(node ast.Node) ast.Visitor {
 		}
 		w.visitSubtree(n.Call.Fun, true, false, false)
 		for _, a := range n.Call.Args {
-			w.visitSubtree(a, true, false, false) // check arguments, they should not contain recover()
+			switch a.(type) {
+			case *ast.FuncLit:
+				continue // too hard to analyze deferred calls with func literals args
+			default:
+				w.visitSubtree(a, true, false, false) // check arguments, they should not contain recover()
+			}
 		}
 
 		if w.inALoop {
@@ -137,6 +145,7 @@ func (w lintDeferRule) Visit(node ast.Node) ast.Visitor {
 				}
 			}
 		}
+
 		return nil
 	}
 
diff --git a/vendor/github.com/mgechev/revive/rule/doc.go b/vendor/github.com/mgechev/revive/rule/doc.go
new file mode 100644
index 0000000000000000000000000000000000000000..55bf6caa6ff041c00123e90808cb1a9868fcb892
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/doc.go
@@ -0,0 +1,2 @@
+// Package rule implements revive's linting rules.
+package rule
diff --git a/vendor/github.com/mgechev/revive/rule/dot-imports.go b/vendor/github.com/mgechev/revive/rule/dot-imports.go
index 25ff526cb5d6236e173f8fe5b8507e883490f765..6b877677db4c63ca2d49a81fc922f83d291d967d 100644
--- a/vendor/github.com/mgechev/revive/rule/dot-imports.go
+++ b/vendor/github.com/mgechev/revive/rule/dot-imports.go
@@ -1,16 +1,23 @@
 package rule
 
 import (
+	"fmt"
 	"go/ast"
+	"sync"
 
 	"github.com/mgechev/revive/lint"
 )
 
 // DotImportsRule lints given else constructs.
-type DotImportsRule struct{}
+type DotImportsRule struct {
+	sync.Mutex
+	allowedPackages allowPackages
+}
 
 // Apply applies the rule to given file.
-func (*DotImportsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+func (r *DotImportsRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
+	r.configure(arguments)
+
 	var failures []lint.Failure
 
 	fileAst := file.AST
@@ -20,6 +27,7 @@ func (*DotImportsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
 		onFailure: func(failure lint.Failure) {
 			failures = append(failures, failure)
 		},
+		allowPackages: r.allowedPackages,
 	}
 
 	ast.Walk(walker, fileAst)
@@ -32,16 +40,49 @@ func (*DotImportsRule) Name() string {
 	return "dot-imports"
 }
 
+func (r *DotImportsRule) configure(arguments lint.Arguments) {
+	r.Lock()
+	defer r.Unlock()
+
+	if r.allowedPackages != nil {
+		return
+	}
+
+	r.allowedPackages = make(allowPackages)
+	if len(arguments) == 0 {
+		return
+	}
+
+	args, ok := arguments[0].(map[string]any)
+	if !ok {
+		panic(fmt.Sprintf("Invalid argument to the dot-imports rule. Expecting a k,v map, got %T", arguments[0]))
+	}
+
+	if allowedPkgArg, ok := args["allowedPackages"]; ok {
+		if pkgs, ok := allowedPkgArg.([]any); ok {
+			for _, p := range pkgs {
+				if pkg, ok := p.(string); ok {
+					r.allowedPackages.add(pkg)
+				} else {
+					panic(fmt.Sprintf("Invalid argument to the dot-imports rule, string expected. Got '%v' (%T)", p, p))
+				}
+			}
+		} else {
+			panic(fmt.Sprintf("Invalid argument to the dot-imports rule, []string expected. Got '%v' (%T)", allowedPkgArg, allowedPkgArg))
+		}
+	}
+}
+
 type lintImports struct {
-	file      *lint.File
-	fileAst   *ast.File
-	onFailure func(lint.Failure)
+	file          *lint.File
+	fileAst       *ast.File
+	onFailure     func(lint.Failure)
+	allowPackages allowPackages
 }
 
 func (w lintImports) Visit(_ ast.Node) ast.Visitor {
-	for i, is := range w.fileAst.Imports {
-		_ = i
-		if is.Name != nil && is.Name.Name == "." && !w.file.IsTest() {
+	for _, is := range w.fileAst.Imports {
+		if is.Name != nil && is.Name.Name == "." && !w.allowPackages.isAllowedPackage(is.Path.Value) {
 			w.onFailure(lint.Failure{
 				Confidence: 1,
 				Failure:    "should not use dot imports",
@@ -52,3 +93,14 @@ func (w lintImports) Visit(_ ast.Node) ast.Visitor {
 	}
 	return nil
 }
+
+type allowPackages map[string]struct{}
+
+func (ap allowPackages) add(pkg string) {
+	ap[fmt.Sprintf(`"%s"`, pkg)] = struct{}{} // import path strings are with double quotes
+}
+
+func (ap allowPackages) isAllowedPackage(pkg string) bool {
+	_, allowed := ap[pkg]
+	return allowed
+}
diff --git a/vendor/github.com/mgechev/revive/rule/early-return.go b/vendor/github.com/mgechev/revive/rule/early-return.go
index ed0fcfae4307da8a682c3110d9660446623dab72..9c04a1dbe93fd7a67be8deacc256d7d6d5537c68 100644
--- a/vendor/github.com/mgechev/revive/rule/early-return.go
+++ b/vendor/github.com/mgechev/revive/rule/early-return.go
@@ -2,9 +2,8 @@ package rule
 
 import (
 	"fmt"
-	"go/ast"
-	"go/token"
 
+	"github.com/mgechev/revive/internal/ifelse"
 	"github.com/mgechev/revive/lint"
 )
 
@@ -13,16 +12,8 @@ import (
 type EarlyReturnRule struct{}
 
 // Apply applies the rule to given file.
-func (*EarlyReturnRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
-	var failures []lint.Failure
-
-	onFailure := func(failure lint.Failure) {
-		failures = append(failures, failure)
-	}
-
-	w := lintEarlyReturnRule{onFailure: onFailure}
-	ast.Walk(w, file.AST)
-	return failures
+func (e *EarlyReturnRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure {
+	return ifelse.Apply(e, file.AST, ifelse.TargetIf, args)
 }
 
 // Name returns the rule name.
@@ -30,147 +21,31 @@ func (*EarlyReturnRule) Name() string {
 	return "early-return"
 }
 
-type lintEarlyReturnRule struct {
-	onFailure func(lint.Failure)
-}
-
-func (w lintEarlyReturnRule) Visit(node ast.Node) ast.Visitor {
-	ifStmt, ok := node.(*ast.IfStmt)
-	if !ok {
-		return w
-	}
-
-	w.visitIf(ifStmt, false, false)
-	return nil
-}
-
-func (w lintEarlyReturnRule) visitIf(ifStmt *ast.IfStmt, hasNonReturnBranch, hasIfInitializer bool) {
-	// look for other if-else chains nested inside this if { } block
-	ast.Walk(w, ifStmt.Body)
-
-	if ifStmt.Else == nil {
-		// no else branch
+// CheckIfElse evaluates the rule against an ifelse.Chain.
+func (*EarlyReturnRule) CheckIfElse(chain ifelse.Chain, args ifelse.Args) (failMsg string) {
+	if !chain.Else.Deviates() {
+		// this rule only applies if the else-block deviates control flow
 		return
 	}
 
-	if as, ok := ifStmt.Init.(*ast.AssignStmt); ok && as.Tok == token.DEFINE {
-		hasIfInitializer = true
-	}
-	bodyFlow := w.branchFlow(ifStmt.Body)
-
-	switch elseBlock := ifStmt.Else.(type) {
-	case *ast.IfStmt:
-		if bodyFlow.canFlowIntoNext() {
-			hasNonReturnBranch = true
-		}
-		w.visitIf(elseBlock, hasNonReturnBranch, hasIfInitializer)
-
-	case *ast.BlockStmt:
-		// look for other if-else chains nested inside this else { } block
-		ast.Walk(w, elseBlock)
-
-		if hasNonReturnBranch && bodyFlow != branchFlowEmpty {
-			// if we de-indent this block then a previous branch
-			// might flow into it, affecting program behaviour
-			return
-		}
-
-		if !bodyFlow.canFlowIntoNext() {
-			// avoid overlapping with superfluous-else
-			return
-		}
-
-		elseFlow := w.branchFlow(elseBlock)
-		if !elseFlow.canFlowIntoNext() {
-			failMsg := fmt.Sprintf("if c {%[1]s } else {%[2]s } can be simplified to if !c {%[2]s }%[1]s",
-				bodyFlow, elseFlow)
-
-			if hasIfInitializer {
-				// if statement has a := initializer, so we might need to move the assignment
-				// onto its own line in case the body references it
-				failMsg += " (move short variable declaration to its own line if necessary)"
-			}
-
-			w.onFailure(lint.Failure{
-				Confidence: 1,
-				Node:       ifStmt,
-				Failure:    failMsg,
-			})
-		}
-
-	default:
-		panic("invalid node type for else")
-	}
-}
-
-type branchFlowKind int
-
-const (
-	branchFlowEmpty branchFlowKind = iota
-	branchFlowReturn
-	branchFlowPanic
-	branchFlowContinue
-	branchFlowBreak
-	branchFlowGoto
-	branchFlowRegular
-)
-
-func (w lintEarlyReturnRule) branchFlow(block *ast.BlockStmt) branchFlowKind {
-	blockLen := len(block.List)
-	if blockLen == 0 {
-		return branchFlowEmpty
+	if chain.HasPriorNonDeviating && !chain.If.IsEmpty() {
+		// if we de-indent this block then a previous branch
+		// might flow into it, affecting program behaviour
+		return
 	}
 
-	switch stmt := block.List[blockLen-1].(type) {
-	case *ast.ReturnStmt:
-		return branchFlowReturn
-	case *ast.BlockStmt:
-		return w.branchFlow(stmt)
-	case *ast.BranchStmt:
-		switch stmt.Tok {
-		case token.BREAK:
-			return branchFlowBreak
-		case token.CONTINUE:
-			return branchFlowContinue
-		case token.GOTO:
-			return branchFlowGoto
-		}
-	case *ast.ExprStmt:
-		if call, ok := stmt.X.(*ast.CallExpr); ok && isIdent(call.Fun, "panic") {
-			return branchFlowPanic
-		}
+	if chain.If.Deviates() {
+		// avoid overlapping with superfluous-else
+		return
 	}
 
-	return branchFlowRegular
-}
-
-// Whether this branch's control can flow into the next statement following the if-else chain
-func (k branchFlowKind) canFlowIntoNext() bool {
-	switch k {
-	case branchFlowReturn, branchFlowPanic, branchFlowContinue, branchFlowBreak, branchFlowGoto:
-		return false
-	default:
-		return true
+	if args.PreserveScope && !chain.AtBlockEnd && (chain.HasInitializer || chain.If.HasDecls) {
+		// avoid increasing variable scope
+		return
 	}
-}
 
-func (k branchFlowKind) String() string {
-	switch k {
-	case branchFlowEmpty:
-		return ""
-	case branchFlowReturn:
-		return " ... return"
-	case branchFlowPanic:
-		return " ... panic()"
-	case branchFlowContinue:
-		return " ... continue"
-	case branchFlowBreak:
-		return " ... break"
-	case branchFlowGoto:
-		return " ... goto"
-	case branchFlowRegular:
-		return " ..."
-	default:
-		panic("invalid kind")
+	if chain.If.IsEmpty() {
+		return fmt.Sprintf("if c { } else { %[1]v } can be simplified to if !c { %[1]v }", chain.Else)
 	}
+	return fmt.Sprintf("if c { ... } else { %[1]v } can be simplified to if !c { %[1]v } ...", chain.Else)
 }
diff --git a/vendor/github.com/mgechev/revive/rule/enforce-map-style.go b/vendor/github.com/mgechev/revive/rule/enforce-map-style.go
new file mode 100644
index 0000000000000000000000000000000000000000..36ac2374c2bb2059a76e63b15a662e4f42bdc0ab
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/enforce-map-style.go
@@ -0,0 +1,164 @@
+package rule
+
+import (
+	"fmt"
+	"go/ast"
+	"sync"
+
+	"github.com/mgechev/revive/lint"
+)
+
+type enforceMapStyleType string
+
+const (
+	enforceMapStyleTypeAny     enforceMapStyleType = "any"
+	enforceMapStyleTypeMake    enforceMapStyleType = "make"
+	enforceMapStyleTypeLiteral enforceMapStyleType = "literal"
+)
+
+func mapStyleFromString(s string) (enforceMapStyleType, error) {
+	switch s {
+	case string(enforceMapStyleTypeAny), "":
+		return enforceMapStyleTypeAny, nil
+	case string(enforceMapStyleTypeMake):
+		return enforceMapStyleTypeMake, nil
+	case string(enforceMapStyleTypeLiteral):
+		return enforceMapStyleTypeLiteral, nil
+	default:
+		return enforceMapStyleTypeAny, fmt.Errorf(
+			"invalid map style: %s (expecting one of %v)",
+			s,
+			[]enforceMapStyleType{
+				enforceMapStyleTypeAny,
+				enforceMapStyleTypeMake,
+				enforceMapStyleTypeLiteral,
+			},
+		)
+	}
+}
+
+// EnforceMapStyleRule implements a rule to enforce `make(map[type]type)` over `map[type]type{}`.
+type EnforceMapStyleRule struct {
+	configured      bool
+	enforceMapStyle enforceMapStyleType
+	sync.Mutex
+}
+
+func (r *EnforceMapStyleRule) configure(arguments lint.Arguments) {
+	r.Lock()
+	defer r.Unlock()
+
+	if r.configured {
+		return
+	}
+	r.configured = true
+
+	if len(arguments) < 1 {
+		r.enforceMapStyle = enforceMapStyleTypeAny
+		return
+	}
+
+	enforceMapStyle, ok := arguments[0].(string)
+	if !ok {
+		panic(fmt.Sprintf("Invalid argument '%v' for 'enforce-map-style' rule. Expecting string, got %T", arguments[0], arguments[0]))
+	}
+
+	var err error
+	r.enforceMapStyle, err = mapStyleFromString(enforceMapStyle)
+
+	if err != nil {
+		panic(fmt.Sprintf("Invalid argument to the enforce-map-style rule: %v", err))
+	}
+}
+
+// Apply applies the rule to given file.
+func (r *EnforceMapStyleRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
+	r.configure(arguments)
+
+	if r.enforceMapStyle == enforceMapStyleTypeAny {
+		// this linter is not configured
+		return nil
+	}
+
+	var failures []lint.Failure
+
+	astFile := file.AST
+	ast.Inspect(astFile, func(n ast.Node) bool {
+		switch v := n.(type) {
+		case *ast.CompositeLit:
+			if r.enforceMapStyle != enforceMapStyleTypeMake {
+				return true
+			}
+
+			if !r.isMapType(v.Type) {
+				return true
+			}
+
+			if len(v.Elts) > 0 {
+				// not an empty map
+				return true
+			}
+
+			failures = append(failures, lint.Failure{
+				Confidence: 1,
+				Node:       v,
+				Category:   "style",
+				Failure:    "use make(map[type]type) instead of map[type]type{}",
+			})
+		case *ast.CallExpr:
+			if r.enforceMapStyle != enforceMapStyleTypeLiteral {
+				// skip any function calls, even if it's make(map[type]type)
+				// we don't want to report it if literals are not enforced
+				return true
+			}
+
+			ident, ok := v.Fun.(*ast.Ident)
+			if !ok || ident.Name != "make" {
+				return true
+			}
+
+			if len(v.Args) != 1 {
+				// skip make(map[type]type, size) and invalid empty declarations
+				return true
+			}
+
+			if !r.isMapType(v.Args[0]) {
+				// not a map type
+				return true
+			}
+
+			failures = append(failures, lint.Failure{
+				Confidence: 1,
+				Node:       v.Args[0],
+				Category:   "style",
+				Failure:    "use map[type]type{} instead of make(map[type]type)",
+			})
+		}
+		return true
+	})
+
+	return failures
+}
+
+// Name returns the rule name.
+func (*EnforceMapStyleRule) Name() string {
+	return "enforce-map-style"
+}
+
+func (r *EnforceMapStyleRule) isMapType(v ast.Expr) bool {
+	switch t := v.(type) {
+	case *ast.MapType:
+		return true
+	case *ast.Ident:
+		if t.Obj == nil {
+			return false
+		}
+		typeSpec, ok := t.Obj.Decl.(*ast.TypeSpec)
+		if !ok {
+			return false
+		}
+		return r.isMapType(typeSpec.Type)
+	default:
+		return false
+	}
+}
diff --git a/vendor/github.com/mgechev/revive/rule/enforce-repeated-arg-type-style.go b/vendor/github.com/mgechev/revive/rule/enforce-repeated-arg-type-style.go
new file mode 100644
index 0000000000000000000000000000000000000000..067082b1b0ce1cfdd92b18e7fd0a463133b0aca4
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/enforce-repeated-arg-type-style.go
@@ -0,0 +1,191 @@
+package rule
+
+import (
+	"fmt"
+	"go/ast"
+	"go/types"
+	"sync"
+
+	"github.com/mgechev/revive/lint"
+)
+
+type enforceRepeatedArgTypeStyleType string
+
+const (
+	enforceRepeatedArgTypeStyleTypeAny   enforceRepeatedArgTypeStyleType = "any"
+	enforceRepeatedArgTypeStyleTypeShort enforceRepeatedArgTypeStyleType = "short"
+	enforceRepeatedArgTypeStyleTypeFull  enforceRepeatedArgTypeStyleType = "full"
+)
+
+func repeatedArgTypeStyleFromString(s string) enforceRepeatedArgTypeStyleType {
+	switch s {
+	case string(enforceRepeatedArgTypeStyleTypeAny), "":
+		return enforceRepeatedArgTypeStyleTypeAny
+	case string(enforceRepeatedArgTypeStyleTypeShort):
+		return enforceRepeatedArgTypeStyleTypeShort
+	case string(enforceRepeatedArgTypeStyleTypeFull):
+		return enforceRepeatedArgTypeStyleTypeFull
+	default:
+		err := fmt.Errorf(
+			"invalid repeated arg type style: %s (expecting one of %v)",
+			s,
+			[]enforceRepeatedArgTypeStyleType{
+				enforceRepeatedArgTypeStyleTypeAny,
+				enforceRepeatedArgTypeStyleTypeShort,
+				enforceRepeatedArgTypeStyleTypeFull,
+			},
+		)
+
+		panic(fmt.Sprintf("Invalid argument to the enforce-repeated-arg-type-style rule: %v", err))
+	}
+}
+
+// EnforceRepeatedArgTypeStyleRule implements a rule to enforce repeated argument type style.
+type EnforceRepeatedArgTypeStyleRule struct {
+	configured      bool
+	funcArgStyle    enforceRepeatedArgTypeStyleType
+	funcRetValStyle enforceRepeatedArgTypeStyleType
+
+	sync.Mutex
+}
+
+func (r *EnforceRepeatedArgTypeStyleRule) configure(arguments lint.Arguments) {
+	r.Lock()
+	defer r.Unlock()
+
+	if r.configured {
+		return
+	}
+	r.configured = true
+
+	r.funcArgStyle = enforceRepeatedArgTypeStyleTypeAny
+	r.funcRetValStyle = enforceRepeatedArgTypeStyleTypeAny
+
+	if len(arguments) == 0 {
+		return
+	}
+
+	switch funcArgStyle := arguments[0].(type) {
+	case string:
+		r.funcArgStyle = repeatedArgTypeStyleFromString(funcArgStyle)
+		r.funcRetValStyle = repeatedArgTypeStyleFromString(funcArgStyle)
+	case map[string]any: // expecting map[string]string
+		for k, v := range funcArgStyle {
+			switch k {
+			case "funcArgStyle":
+				val, ok := v.(string)
+				if !ok {
+					panic(fmt.Sprintf("Invalid map value type for 'enforce-repeated-arg-type-style' rule. Expecting string, got %T", v))
+				}
+				r.funcArgStyle = repeatedArgTypeStyleFromString(val)
+			case "funcRetValStyle":
+				val, ok := v.(string)
+				if !ok {
+					panic(fmt.Sprintf("Invalid map value '%v' for 'enforce-repeated-arg-type-style' rule. Expecting string, got %T", v, v))
+				}
+				r.funcRetValStyle = repeatedArgTypeStyleFromString(val)
+			default:
+				panic(fmt.Sprintf("Invalid map key for 'enforce-repeated-arg-type-style' rule. Expecting 'funcArgStyle' or 'funcRetValStyle', got %v", k))
+			}
+		}
+	default:
+		panic(fmt.Sprintf("Invalid argument '%v' for 'import-alias-naming' rule. Expecting string or map[string]string, got %T", arguments[0], arguments[0]))
+	}
+}
+
+// Apply applies the rule to a given file.
+func (r *EnforceRepeatedArgTypeStyleRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
+	r.configure(arguments)
+
+	if r.funcArgStyle == enforceRepeatedArgTypeStyleTypeAny && r.funcRetValStyle == enforceRepeatedArgTypeStyleTypeAny {
+		// This linter is not configured, return no failures.
+		return nil
+	}
+
+	var failures []lint.Failure
+
+	err := file.Pkg.TypeCheck()
+	if err != nil {
+		// the file has other issues
+		return nil
+	}
+	typesInfo := file.Pkg.TypesInfo()
+
+	astFile := file.AST
+	ast.Inspect(astFile, func(n ast.Node) bool {
+		switch fn := n.(type) {
+		case *ast.FuncDecl:
+			if r.funcArgStyle == enforceRepeatedArgTypeStyleTypeFull {
+				if fn.Type.Params != nil {
+					for _, field := range fn.Type.Params.List {
+						if len(field.Names) > 1 {
+							failures = append(failures, lint.Failure{
+								Confidence: 1,
+								Node:       field,
+								Category:   "style",
+								Failure:    "argument types should not be omitted",
+							})
+						}
+					}
+				}
+			}
+
+			if r.funcArgStyle == enforceRepeatedArgTypeStyleTypeShort {
+				var prevType ast.Expr
+				if fn.Type.Params != nil {
+					for _, field := range fn.Type.Params.List {
+						if types.Identical(typesInfo.Types[field.Type].Type, typesInfo.Types[prevType].Type) {
+							failures = append(failures, lint.Failure{
+								Confidence: 1,
+								Node:       field,
+								Category:   "style",
+								Failure:    "repeated argument type can be omitted",
+							})
+						}
+						prevType = field.Type
+					}
+				}
+			}
+
+			if r.funcRetValStyle == enforceRepeatedArgTypeStyleTypeFull {
+				if fn.Type.Results != nil {
+					for _, field := range fn.Type.Results.List {
+						if len(field.Names) > 1 {
+							failures = append(failures, lint.Failure{
+								Confidence: 1,
+								Node:       field,
+								Category:   "style",
+								Failure:    "return types should not be omitted",
+							})
+						}
+					}
+				}
+			}
+
+			if r.funcRetValStyle == enforceRepeatedArgTypeStyleTypeShort {
+				var prevType ast.Expr
+				if fn.Type.Results != nil {
+					for _, field := range fn.Type.Results.List {
+						if field.Names != nil && types.Identical(typesInfo.Types[field.Type].Type, typesInfo.Types[prevType].Type) {
+							failures = append(failures, lint.Failure{
+								Confidence: 1,
+								Node:       field,
+								Category:   "style",
+								Failure:    "repeated return type can be omitted",
+							})
+						}
+						prevType = field.Type
+					}
+				}
+			}
+		}
+		return true
+	})
+
+	return failures
+}
+
+// Name returns the name of the linter rule.
+func (*EnforceRepeatedArgTypeStyleRule) Name() string {
+	return "enforce-repeated-arg-type-style"
+}
diff --git a/vendor/github.com/mgechev/revive/rule/enforce-slice-style.go b/vendor/github.com/mgechev/revive/rule/enforce-slice-style.go
new file mode 100644
index 0000000000000000000000000000000000000000..abaf20be0e68b5135d54430f68ddef0f006637b9
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/enforce-slice-style.go
@@ -0,0 +1,193 @@
+package rule
+
+import (
+	"fmt"
+	"go/ast"
+	"sync"
+
+	"github.com/mgechev/revive/lint"
+)
+
+type enforceSliceStyleType string
+
+const (
+	enforceSliceStyleTypeAny     enforceSliceStyleType = "any"
+	enforceSliceStyleTypeMake    enforceSliceStyleType = "make"
+	enforceSliceStyleTypeLiteral enforceSliceStyleType = "literal"
+)
+
+func sliceStyleFromString(s string) (enforceSliceStyleType, error) {
+	switch s {
+	case string(enforceSliceStyleTypeAny), "":
+		return enforceSliceStyleTypeAny, nil
+	case string(enforceSliceStyleTypeMake):
+		return enforceSliceStyleTypeMake, nil
+	case string(enforceSliceStyleTypeLiteral):
+		return enforceSliceStyleTypeLiteral, nil
+	default:
+		return enforceSliceStyleTypeAny, fmt.Errorf(
+			"invalid slice style: %s (expecting one of %v)",
+			s,
+			[]enforceSliceStyleType{
+				enforceSliceStyleTypeAny,
+				enforceSliceStyleTypeMake,
+				enforceSliceStyleTypeLiteral,
+			},
+		)
+	}
+}
+
+// EnforceSliceStyleRule implements a rule to enforce `make([]type)` over `[]type{}`.
+type EnforceSliceStyleRule struct {
+	configured        bool
+	enforceSliceStyle enforceSliceStyleType
+	sync.Mutex
+}
+
+func (r *EnforceSliceStyleRule) configure(arguments lint.Arguments) {
+	r.Lock()
+	defer r.Unlock()
+
+	if r.configured {
+		return
+	}
+	r.configured = true
+
+	if len(arguments) < 1 {
+		r.enforceSliceStyle = enforceSliceStyleTypeAny
+		return
+	}
+
+	enforceSliceStyle, ok := arguments[0].(string)
+	if !ok {
+		panic(fmt.Sprintf("Invalid argument '%v' for 'enforce-slice-style' rule. Expecting string, got %T", arguments[0], arguments[0]))
+	}
+
+	var err error
+	r.enforceSliceStyle, err = sliceStyleFromString(enforceSliceStyle)
+
+	if err != nil {
+		panic(fmt.Sprintf("Invalid argument to the enforce-slice-style rule: %v", err))
+	}
+}
+
+// Apply applies the rule to given file.
+func (r *EnforceSliceStyleRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
+	r.configure(arguments)
+
+	if r.enforceSliceStyle == enforceSliceStyleTypeAny {
+		// this linter is not configured
+		return nil
+	}
+
+	var failures []lint.Failure
+
+	astFile := file.AST
+	ast.Inspect(astFile, func(n ast.Node) bool {
+		switch v := n.(type) {
+		case *ast.CompositeLit:
+			if r.enforceSliceStyle != enforceSliceStyleTypeMake {
+				return true
+			}
+
+			if !r.isSliceType(v.Type) {
+				return true
+			}
+
+			if len(v.Elts) > 0 {
+				// not an empty slice
+				return true
+			}
+
+			failures = append(failures, lint.Failure{
+				Confidence: 1,
+				Node:       v,
+				Category:   "style",
+				Failure:    "use make([]type) instead of []type{} (or declare nil slice)",
+			})
+		case *ast.CallExpr:
+			if r.enforceSliceStyle != enforceSliceStyleTypeLiteral {
+				// skip any function calls, even if it's make([]type)
+				// we don't want to report it if literals are not enforced
+				return true
+			}
+
+			ident, ok := v.Fun.(*ast.Ident)
+			if !ok || ident.Name != "make" {
+				return true
+			}
+
+			if len(v.Args) < 2 {
+				// skip invalid make declarations
+				return true
+			}
+
+			if !r.isSliceType(v.Args[0]) {
+				// not a slice type
+				return true
+			}
+
+			arg, ok := v.Args[1].(*ast.BasicLit)
+			if !ok {
+				// skip invalid make declarations
+				return true
+			}
+
+			if arg.Value != "0" {
+				// skip slice with non-zero size
+				return true
+			}
+
+			if len(v.Args) > 2 {
+				arg, ok := v.Args[2].(*ast.BasicLit)
+				if !ok {
+					// skip invalid make declarations
+					return true
+				}
+
+				if arg.Value != "0" {
+					// skip non-zero capacity slice
+					return true
+				}
+			}
+
+			failures = append(failures, lint.Failure{
+				Confidence: 1,
+				Node:       v.Args[0],
+				Category:   "style",
+				Failure:    "use []type{} instead of make([]type, 0) (or declare nil slice)",
+			})
+		}
+		return true
+	})
+
+	return failures
+}
+
+// Name returns the rule name.
+func (*EnforceSliceStyleRule) Name() string {
+	return "enforce-slice-style"
+}
+
+func (r *EnforceSliceStyleRule) isSliceType(v ast.Expr) bool {
+	switch t := v.(type) {
+	case *ast.ArrayType:
+		if t.Len != nil {
+			// array
+			return false
+		}
+		// slice
+		return true
+	case *ast.Ident:
+		if t.Obj == nil {
+			return false
+		}
+		typeSpec, ok := t.Obj.Decl.(*ast.TypeSpec)
+		if !ok {
+			return false
+		}
+		return r.isSliceType(typeSpec.Type)
+	default:
+		return false
+	}
+}
diff --git a/vendor/github.com/mgechev/revive/rule/file-header.go b/vendor/github.com/mgechev/revive/rule/file-header.go
index 76f548f51fb2b292ec2ec46a6262316094ee1324..a7d69ff2b11dcdbbbe589b2c28e12be3ed53bdf3 100644
--- a/vendor/github.com/mgechev/revive/rule/file-header.go
+++ b/vendor/github.com/mgechev/revive/rule/file-header.go
@@ -21,21 +21,28 @@ var (
 
 func (r *FileHeaderRule) configure(arguments lint.Arguments) {
 	r.Lock()
+	defer r.Unlock()
 	if r.header == "" {
-		checkNumberOfArguments(1, arguments, r.Name())
+		if len(arguments) < 1 {
+			return
+		}
+
 		var ok bool
 		r.header, ok = arguments[0].(string)
 		if !ok {
-			panic(fmt.Sprintf("invalid argument for \"file-header\" rule: first argument should be a string, got %T", arguments[0]))
+			panic(fmt.Sprintf("invalid argument for \"file-header\" rule: argument should be a string, got %T", arguments[0]))
 		}
 	}
-	r.Unlock()
 }
 
 // Apply applies the rule to given file.
 func (r *FileHeaderRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
 	r.configure(arguments)
 
+	if r.header == "" {
+		return nil
+	}
+
 	failure := []lint.Failure{
 		{
 			Node:       file.AST,
diff --git a/vendor/github.com/mgechev/revive/rule/flag-param.go b/vendor/github.com/mgechev/revive/rule/flag-param.go
index 19a05f9feaf908543f0cf8ef39d2e69514f48e7d..f9bfb712c46f2ed20a05ca1c6ed0ea74ba2fc7f2 100644
--- a/vendor/github.com/mgechev/revive/rule/flag-param.go
+++ b/vendor/github.com/mgechev/revive/rule/flag-param.go
@@ -88,7 +88,7 @@ func (w conditionVisitor) Visit(node ast.Node) ast.Visitor {
 		return false
 	}
 
-	uses := pick(ifStmt.Cond, fselect, nil)
+	uses := pick(ifStmt.Cond, fselect)
 
 	if len(uses) < 1 {
 		return w
diff --git a/vendor/github.com/mgechev/revive/rule/function-length.go b/vendor/github.com/mgechev/revive/rule/function-length.go
index d600d7a2a12a783d7dfa64285565144a0c727e75..fd65884e97c57a03e1615e7d6edd4c8ef7cbe0a1 100644
--- a/vendor/github.com/mgechev/revive/rule/function-length.go
+++ b/vendor/github.com/mgechev/revive/rule/function-length.go
@@ -19,13 +19,13 @@ type FunctionLength struct {
 
 func (r *FunctionLength) configure(arguments lint.Arguments) {
 	r.Lock()
+	defer r.Unlock()
 	if !r.configured {
 		maxStmt, maxLines := r.parseArguments(arguments)
 		r.maxStmt = int(maxStmt)
 		r.maxLines = int(maxLines)
 		r.configured = true
 	}
-	r.Unlock()
 }
 
 // Apply applies the rule to given file.
@@ -53,7 +53,14 @@ func (*FunctionLength) Name() string {
 	return "function-length"
 }
 
+const defaultFuncStmtsLimit = 50
+const defaultFuncLinesLimit = 75
+
 func (*FunctionLength) parseArguments(arguments lint.Arguments) (maxStmt, maxLines int64) {
+	if len(arguments) == 0 {
+		return defaultFuncStmtsLimit, defaultFuncLinesLimit
+	}
+
 	if len(arguments) != 2 {
 		panic(fmt.Sprintf(`invalid configuration for "function-length" rule, expected 2 arguments but got %d`, len(arguments)))
 	}
@@ -164,7 +171,7 @@ func (w lintFuncLength) countFuncLitStmts(stmt ast.Expr) int {
 	return 0
 }
 
-func (w lintFuncLength) countBodyListStmts(t interface{}) int {
+func (w lintFuncLength) countBodyListStmts(t any) int {
 	i := reflect.ValueOf(t).Elem().FieldByName(`Body`).Elem().FieldByName(`List`).Interface()
 	return w.countStmts(i.([]ast.Stmt))
 }
diff --git a/vendor/github.com/mgechev/revive/rule/function-result-limit.go b/vendor/github.com/mgechev/revive/rule/function-result-limit.go
index 5d2b87316a4b9114dd042dee77f0d0f6b782ce69..6a0748011d084de83e7c1049005abd5331523113 100644
--- a/vendor/github.com/mgechev/revive/rule/function-result-limit.go
+++ b/vendor/github.com/mgechev/revive/rule/function-result-limit.go
@@ -14,11 +14,16 @@ type FunctionResultsLimitRule struct {
 	sync.Mutex
 }
 
+const defaultResultsLimit = 3
+
 func (r *FunctionResultsLimitRule) configure(arguments lint.Arguments) {
 	r.Lock()
+	defer r.Unlock()
 	if r.max == 0 {
-		checkNumberOfArguments(1, arguments, r.Name())
-
+		if len(arguments) < 1 {
+			r.max = defaultResultsLimit
+			return
+		}
 		max, ok := arguments[0].(int64) // Alt. non panicking version
 		if !ok {
 			panic(fmt.Sprintf(`invalid value passed as return results number to the "function-result-limit" rule; need int64 but got %T`, arguments[0]))
@@ -28,7 +33,6 @@ func (r *FunctionResultsLimitRule) configure(arguments lint.Arguments) {
 		}
 		r.max = int(max)
 	}
-	r.Unlock()
 }
 
 // Apply applies the rule to given file.
diff --git a/vendor/github.com/mgechev/revive/rule/import-alias-naming.go b/vendor/github.com/mgechev/revive/rule/import-alias-naming.go
new file mode 100644
index 0000000000000000000000000000000000000000..a6d096c8b27c7f449a0a4f0ff13789b50d19bc99
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/import-alias-naming.go
@@ -0,0 +1,126 @@
+package rule
+
+import (
+	"fmt"
+	"regexp"
+	"sync"
+
+	"github.com/mgechev/revive/lint"
+)
+
+// ImportAliasNamingRule lints import alias naming.
+type ImportAliasNamingRule struct {
+	configured  bool
+	allowRegexp *regexp.Regexp
+	denyRegexp  *regexp.Regexp
+	sync.Mutex
+}
+
+const defaultImportAliasNamingAllowRule = "^[a-z][a-z0-9]{0,}$"
+
+var defaultImportAliasNamingAllowRegexp = regexp.MustCompile(defaultImportAliasNamingAllowRule)
+
+func (r *ImportAliasNamingRule) configure(arguments lint.Arguments) {
+	r.Lock()
+	defer r.Unlock()
+	if r.configured {
+		return
+	}
+
+	if len(arguments) == 0 {
+		r.allowRegexp = defaultImportAliasNamingAllowRegexp
+		return
+	}
+
+	switch namingRule := arguments[0].(type) {
+	case string:
+		r.setAllowRule(namingRule)
+	case map[string]any: // expecting map[string]string
+		for k, v := range namingRule {
+			switch k {
+			case "allowRegex":
+				r.setAllowRule(v)
+			case "denyRegex":
+				r.setDenyRule(v)
+			default:
+				panic(fmt.Sprintf("Invalid map key for 'import-alias-naming' rule. Expecting 'allowRegex' or 'denyRegex', got %v", k))
+			}
+		}
+	default:
+		panic(fmt.Sprintf("Invalid argument '%v' for 'import-alias-naming' rule. Expecting string or map[string]string, got %T", arguments[0], arguments[0]))
+	}
+
+	if r.allowRegexp == nil && r.denyRegexp == nil {
+		r.allowRegexp = defaultImportAliasNamingAllowRegexp
+	}
+}
+
+// Apply applies the rule to given file.
+func (r *ImportAliasNamingRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
+	r.configure(arguments)
+
+	var failures []lint.Failure
+
+	for _, is := range file.AST.Imports {
+		path := is.Path
+		if path == nil {
+			continue
+		}
+
+		alias := is.Name
+		if alias == nil || alias.Name == "_" || alias.Name == "." { // "_" and "." are special types of import aiases and should be processed by another linter rule
+			continue
+		}
+
+		if r.allowRegexp != nil && !r.allowRegexp.MatchString(alias.Name) {
+			failures = append(failures, lint.Failure{
+				Confidence: 1,
+				Failure:    fmt.Sprintf("import name (%s) must match the regular expression: %s", alias.Name, r.allowRegexp.String()),
+				Node:       alias,
+				Category:   "imports",
+			})
+		}
+
+		if r.denyRegexp != nil && r.denyRegexp.MatchString(alias.Name) {
+			failures = append(failures, lint.Failure{
+				Confidence: 1,
+				Failure:    fmt.Sprintf("import name (%s) must NOT match the regular expression: %s", alias.Name, r.denyRegexp.String()),
+				Node:       alias,
+				Category:   "imports",
+			})
+		}
+	}
+
+	return failures
+}
+
+// Name returns the rule name.
+func (*ImportAliasNamingRule) Name() string {
+	return "import-alias-naming"
+}
+
+func (r *ImportAliasNamingRule) setAllowRule(value any) {
+	namingRule, ok := value.(string)
+	if !ok {
+		panic(fmt.Sprintf("Invalid argument '%v' for import-alias-naming allowRegexp rule. Expecting string, got %T", value, value))
+	}
+
+	namingRuleRegexp, err := regexp.Compile(namingRule)
+	if err != nil {
+		panic(fmt.Sprintf("Invalid argument to the import-alias-naming allowRegexp rule. Expecting %q to be a valid regular expression, got: %v", namingRule, err))
+	}
+	r.allowRegexp = namingRuleRegexp
+}
+
+func (r *ImportAliasNamingRule) setDenyRule(value any) {
+	namingRule, ok := value.(string)
+	if !ok {
+		panic(fmt.Sprintf("Invalid argument '%v' for import-alias-naming denyRegexp rule. Expecting string, got %T", value, value))
+	}
+
+	namingRuleRegexp, err := regexp.Compile(namingRule)
+	if err != nil {
+		panic(fmt.Sprintf("Invalid argument to the import-alias-naming denyRegexp rule. Expecting %q to be a valid regular expression, got: %v", namingRule, err))
+	}
+	r.denyRegexp = namingRuleRegexp
+}
diff --git a/vendor/github.com/mgechev/revive/rule/import-shadowing.go b/vendor/github.com/mgechev/revive/rule/import-shadowing.go
index 2bab704d02f0bbe4383a27aa5d91cd14cf219108..046aeb688e8778c91b69e82d656044e00a6aa9a4 100644
--- a/vendor/github.com/mgechev/revive/rule/import-shadowing.go
+++ b/vendor/github.com/mgechev/revive/rule/import-shadowing.go
@@ -29,6 +29,7 @@ func (*ImportShadowingRule) Apply(file *lint.File, _ lint.Arguments) []lint.Fail
 			failures = append(failures, failure)
 		},
 		alreadySeen: map[*ast.Object]struct{}{},
+		skipIdents:  map[*ast.Ident]struct{}{},
 	}
 
 	ast.Walk(walker, fileAst)
@@ -62,6 +63,7 @@ type importShadowing struct {
 	importNames      map[string]struct{}
 	onFailure        func(lint.Failure)
 	alreadySeen      map[*ast.Object]struct{}
+	skipIdents       map[*ast.Ident]struct{}
 }
 
 // Visit visits AST nodes and checks if id nodes (ast.Ident) shadow an import name
@@ -80,6 +82,10 @@ func (w importShadowing) Visit(n ast.Node) ast.Visitor {
 		*ast.SelectorExpr, // skip analysis of selector expressions (anId.otherId): because if anId shadows an import name, it was already detected, and otherId does not shadows the import name
 		*ast.StructType:   // skip analysis of struct type because struct fields can not shadow an import name
 		return nil
+	case *ast.FuncDecl:
+		if n.Recv != nil {
+			w.skipIdents[n.Name] = struct{}{}
+		}
 	case *ast.Ident:
 		if n == w.packageNameIdent {
 			return nil // skip the ident corresponding to the package name of this file
@@ -92,11 +98,12 @@ func (w importShadowing) Visit(n ast.Node) ast.Visitor {
 
 		_, isImportName := w.importNames[id]
 		_, alreadySeen := w.alreadySeen[n.Obj]
-		if isImportName && !alreadySeen {
+		_, skipIdent := w.skipIdents[n]
+		if isImportName && !alreadySeen && !skipIdent {
 			w.onFailure(lint.Failure{
 				Confidence: 1,
 				Node:       n,
-				Category:   "namming",
+				Category:   "naming",
 				Failure:    fmt.Sprintf("The name '%s' shadows an import name", id),
 			})
 
diff --git a/vendor/github.com/mgechev/revive/rule/imports-blacklist.go b/vendor/github.com/mgechev/revive/rule/imports-blacklist.go
deleted file mode 100644
index 7106628155ad4c66adf85d3846ae025b629eaebb..0000000000000000000000000000000000000000
--- a/vendor/github.com/mgechev/revive/rule/imports-blacklist.go
+++ /dev/null
@@ -1,77 +0,0 @@
-package rule
-
-import (
-	"fmt"
-	"regexp"
-	"sync"
-
-	"github.com/mgechev/revive/lint"
-)
-
-// ImportsBlacklistRule lints given else constructs.
-type ImportsBlacklistRule struct {
-	blacklist []*regexp.Regexp
-	sync.Mutex
-}
-
-var replaceRegexp = regexp.MustCompile(`/?\*\*/?`)
-
-func (r *ImportsBlacklistRule) configure(arguments lint.Arguments) {
-	r.Lock()
-	defer r.Unlock()
-
-	if r.blacklist == nil {
-		r.blacklist = make([]*regexp.Regexp, 0)
-
-		for _, arg := range arguments {
-			argStr, ok := arg.(string)
-			if !ok {
-				panic(fmt.Sprintf("Invalid argument to the imports-blacklist rule. Expecting a string, got %T", arg))
-			}
-			regStr, err := regexp.Compile(fmt.Sprintf(`(?m)"%s"$`, replaceRegexp.ReplaceAllString(argStr, `(\W|\w)*`)))
-			if err != nil {
-				panic(fmt.Sprintf("Invalid argument to the imports-blacklist rule. Expecting %q to be a valid regular expression, got: %v", argStr, err))
-			}
-			r.blacklist = append(r.blacklist, regStr)
-		}
-	}
-}
-
-func (r *ImportsBlacklistRule) isBlacklisted(path string) bool {
-	for _, regex := range r.blacklist {
-		if regex.MatchString(path) {
-			return true
-		}
-	}
-	return false
-}
-
-// Apply applies the rule to given file.
-func (r *ImportsBlacklistRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
-	r.configure(arguments)
-
-	var failures []lint.Failure
-
-	if file.IsTest() {
-		return failures // skip, test file
-	}
-
-	for _, is := range file.AST.Imports {
-		path := is.Path
-		if path != nil && r.isBlacklisted(path.Value) {
-			failures = append(failures, lint.Failure{
-				Confidence: 1,
-				Failure:    "should not use the following blacklisted import: " + path.Value,
-				Node:       is,
-				Category:   "imports",
-			})
-		}
-	}
-
-	return failures
-}
-
-// Name returns the rule name.
-func (*ImportsBlacklistRule) Name() string {
-	return "imports-blacklist"
-}
diff --git a/vendor/github.com/mgechev/revive/rule/imports-blocklist.go b/vendor/github.com/mgechev/revive/rule/imports-blocklist.go
new file mode 100644
index 0000000000000000000000000000000000000000..431066403a1ad97e3c2662389c62a5049651601a
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/imports-blocklist.go
@@ -0,0 +1,73 @@
+package rule
+
+import (
+	"fmt"
+	"regexp"
+	"sync"
+
+	"github.com/mgechev/revive/lint"
+)
+
+// ImportsBlocklistRule lints given else constructs.
+type ImportsBlocklistRule struct {
+	blocklist []*regexp.Regexp
+	sync.Mutex
+}
+
+var replaceImportRegexp = regexp.MustCompile(`/?\*\*/?`)
+
+func (r *ImportsBlocklistRule) configure(arguments lint.Arguments) {
+	r.Lock()
+	defer r.Unlock()
+
+	if r.blocklist == nil {
+		r.blocklist = make([]*regexp.Regexp, 0)
+
+		for _, arg := range arguments {
+			argStr, ok := arg.(string)
+			if !ok {
+				panic(fmt.Sprintf("Invalid argument to the imports-blocklist rule. Expecting a string, got %T", arg))
+			}
+			regStr, err := regexp.Compile(fmt.Sprintf(`(?m)"%s"$`, replaceImportRegexp.ReplaceAllString(argStr, `(\W|\w)*`)))
+			if err != nil {
+				panic(fmt.Sprintf("Invalid argument to the imports-blocklist rule. Expecting %q to be a valid regular expression, got: %v", argStr, err))
+			}
+			r.blocklist = append(r.blocklist, regStr)
+		}
+	}
+}
+
+func (r *ImportsBlocklistRule) isBlocklisted(path string) bool {
+	for _, regex := range r.blocklist {
+		if regex.MatchString(path) {
+			return true
+		}
+	}
+	return false
+}
+
+// Apply applies the rule to given file.
+func (r *ImportsBlocklistRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
+	r.configure(arguments)
+
+	var failures []lint.Failure
+
+	for _, is := range file.AST.Imports {
+		path := is.Path
+		if path != nil && r.isBlocklisted(path.Value) {
+			failures = append(failures, lint.Failure{
+				Confidence: 1,
+				Failure:    "should not use the following blocklisted import: " + path.Value,
+				Node:       is,
+				Category:   "imports",
+			})
+		}
+	}
+
+	return failures
+}
+
+// Name returns the rule name.
+func (*ImportsBlocklistRule) Name() string {
+	return "imports-blocklist"
+}
diff --git a/vendor/github.com/mgechev/revive/rule/indent-error-flow.go b/vendor/github.com/mgechev/revive/rule/indent-error-flow.go
index e455801c47e502b1de88f19442ba10b84614390b..294ceef84210b96db316af90c2e137485ec9556e 100644
--- a/vendor/github.com/mgechev/revive/rule/indent-error-flow.go
+++ b/vendor/github.com/mgechev/revive/rule/indent-error-flow.go
@@ -1,9 +1,7 @@
 package rule
 
 import (
-	"go/ast"
-	"go/token"
-
+	"github.com/mgechev/revive/internal/ifelse"
 	"github.com/mgechev/revive/lint"
 )
 
@@ -11,16 +9,8 @@ import (
 type IndentErrorFlowRule struct{}
 
 // Apply applies the rule to given file.
-func (*IndentErrorFlowRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
-	var failures []lint.Failure
-
-	onFailure := func(failure lint.Failure) {
-		failures = append(failures, failure)
-	}
-
-	w := lintElse{make(map[*ast.IfStmt]bool), onFailure}
-	ast.Walk(w, file.AST)
-	return failures
+func (e *IndentErrorFlowRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure {
+	return ifelse.Apply(e, file.AST, ifelse.TargetElse, args)
 }
 
 // Name returns the rule name.
@@ -28,51 +18,28 @@ func (*IndentErrorFlowRule) Name() string {
 	return "indent-error-flow"
 }
 
-type lintElse struct {
-	ignore    map[*ast.IfStmt]bool
-	onFailure func(lint.Failure)
-}
-
-func (w lintElse) Visit(node ast.Node) ast.Visitor {
-	ifStmt, ok := node.(*ast.IfStmt)
-	if !ok || ifStmt.Else == nil {
-		return w
-	}
-	if w.ignore[ifStmt] {
-		if elseif, ok := ifStmt.Else.(*ast.IfStmt); ok {
-			w.ignore[elseif] = true
-		}
-		return w
+// CheckIfElse evaluates the rule against an ifelse.Chain.
+func (*IndentErrorFlowRule) CheckIfElse(chain ifelse.Chain, args ifelse.Args) (failMsg string) {
+	if !chain.If.Deviates() {
+		// this rule only applies if the if-block deviates control flow
+		return
 	}
-	if elseif, ok := ifStmt.Else.(*ast.IfStmt); ok {
-		w.ignore[elseif] = true
-		return w
-	}
-	if _, ok := ifStmt.Else.(*ast.BlockStmt); !ok {
-		// only care about elses without conditions
-		return w
-	}
-	if len(ifStmt.Body.List) == 0 {
-		return w
+
+	if chain.HasPriorNonDeviating {
+		// if we de-indent the "else" block then a previous branch
+		// might flow into it, affecting program behaviour
+		return
 	}
-	shortDecl := false // does the if statement have a ":=" initialization statement?
-	if ifStmt.Init != nil {
-		if as, ok := ifStmt.Init.(*ast.AssignStmt); ok && as.Tok == token.DEFINE {
-			shortDecl = true
-		}
+
+	if !chain.If.Returns() {
+		// avoid overlapping with superfluous-else
+		return
 	}
-	lastStmt := ifStmt.Body.List[len(ifStmt.Body.List)-1]
-	if _, ok := lastStmt.(*ast.ReturnStmt); ok {
-		extra := ""
-		if shortDecl {
-			extra = " (move short variable declaration to its own line if necessary)"
-		}
-		w.onFailure(lint.Failure{
-			Confidence: 1,
-			Node:       ifStmt.Else,
-			Category:   "indent",
-			Failure:    "if block ends with a return statement, so drop this else and outdent its block" + extra,
-		})
+
+	if args.PreserveScope && !chain.AtBlockEnd && (chain.HasInitializer || chain.Else.HasDecls) {
+		// avoid increasing variable scope
+		return
 	}
-	return w
+
+	return "if block ends with a return statement, so drop this else and outdent its block"
 }
diff --git a/vendor/github.com/mgechev/revive/rule/line-length-limit.go b/vendor/github.com/mgechev/revive/rule/line-length-limit.go
index 9e512c1c2c6cb3f68b0d17aee5340461dabd41db..1a414f691479c002773517c6a8505f44f07a8ed1 100644
--- a/vendor/github.com/mgechev/revive/rule/line-length-limit.go
+++ b/vendor/github.com/mgechev/revive/rule/line-length-limit.go
@@ -18,10 +18,16 @@ type LineLengthLimitRule struct {
 	sync.Mutex
 }
 
+const defaultLineLengthLimit = 80
+
 func (r *LineLengthLimitRule) configure(arguments lint.Arguments) {
 	r.Lock()
+	defer r.Unlock()
 	if r.max == 0 {
-		checkNumberOfArguments(1, arguments, r.Name())
+		if len(arguments) < 1 {
+			r.max = defaultLineLengthLimit
+			return
+		}
 
 		max, ok := arguments[0].(int64) // Alt. non panicking version
 		if !ok || max < 0 {
@@ -30,7 +36,6 @@ func (r *LineLengthLimitRule) configure(arguments lint.Arguments) {
 
 		r.max = int(max)
 	}
-	r.Unlock()
 }
 
 // Apply applies the rule to given file.
diff --git a/vendor/github.com/mgechev/revive/rule/max-control-nesting.go b/vendor/github.com/mgechev/revive/rule/max-control-nesting.go
new file mode 100644
index 0000000000000000000000000000000000000000..c4eb361937115a01f6088b3b287ae7616da6f701
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/max-control-nesting.go
@@ -0,0 +1,128 @@
+package rule
+
+import (
+	"fmt"
+	"go/ast"
+	"sync"
+
+	"github.com/mgechev/revive/lint"
+)
+
+// MaxControlNestingRule lints given else constructs.
+type MaxControlNestingRule struct {
+	max int64
+	sync.Mutex
+}
+
+const defaultMaxControlNesting = 5
+
+// Apply applies the rule to given file.
+func (r *MaxControlNestingRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
+	r.configure(arguments)
+
+	var failures []lint.Failure
+
+	fileAst := file.AST
+
+	walker := &lintMaxControlNesting{
+		onFailure: func(failure lint.Failure) {
+			failures = append(failures, failure)
+		},
+		max: int(r.max),
+	}
+
+	ast.Walk(walker, fileAst)
+
+	return failures
+}
+
+// Name returns the rule name.
+func (*MaxControlNestingRule) Name() string {
+	return "max-control-nesting"
+}
+
+type lintMaxControlNesting struct {
+	max             int
+	onFailure       func(lint.Failure)
+	nestingLevelAcc int
+	lastCtrlStmt    ast.Node
+}
+
+func (w *lintMaxControlNesting) Visit(n ast.Node) ast.Visitor {
+	if w.nestingLevelAcc > w.max { // we are visiting a node beyond the max nesting level
+		w.onFailure(lint.Failure{
+			Failure:    fmt.Sprintf("control flow nesting exceeds %d", w.max),
+			Confidence: 1,
+			Node:       w.lastCtrlStmt,
+			Category:   "complexity",
+		})
+		return nil // stop visiting deeper
+	}
+
+	switch v := n.(type) {
+	case *ast.IfStmt:
+		w.lastCtrlStmt = v
+		w.walkControlledBlock(v.Body) // "then" branch block
+		if v.Else != nil {
+			w.walkControlledBlock(v.Else) // "else" branch block
+		}
+		return nil // stop re-visiting nesting blocks (already visited by w.walkControlledBlock)
+
+	case *ast.ForStmt:
+		w.lastCtrlStmt = v
+		w.walkControlledBlock(v.Body)
+		return nil // stop re-visiting nesting blocks (already visited by w.walkControlledBlock)
+
+	case *ast.CaseClause: // switch case
+		w.lastCtrlStmt = v
+		for _, s := range v.Body { // visit each statement in the case clause
+			w.walkControlledBlock(s)
+		}
+		return nil // stop re-visiting nesting blocks (already visited by w.walkControlledBlock)
+
+	case *ast.CommClause: // select case
+		w.lastCtrlStmt = v
+		for _, s := range v.Body { // visit each statement in the select case clause
+			w.walkControlledBlock(s)
+		}
+		return nil // stop re-visiting nesting blocks (already visited by w.walkControlledBlock)
+
+	case *ast.FuncLit:
+		walker := &lintMaxControlNesting{
+			onFailure: w.onFailure,
+			max:       w.max,
+		}
+		ast.Walk(walker, v.Body)
+		return nil
+	}
+
+	return w
+}
+
+func (w *lintMaxControlNesting) walkControlledBlock(b ast.Node) {
+	oldNestingLevel := w.nestingLevelAcc
+	w.nestingLevelAcc++
+	ast.Walk(w, b)
+	w.nestingLevelAcc = oldNestingLevel
+}
+
+func (r *MaxControlNestingRule) configure(arguments lint.Arguments) {
+	r.Lock()
+	defer r.Unlock()
+	if !(r.max < 1) {
+		return // max already set
+	}
+
+	if len(arguments) < 1 {
+		r.max = defaultMaxControlNesting
+		return
+	}
+
+	checkNumberOfArguments(1, arguments, r.Name())
+
+	max, ok := arguments[0].(int64) // Alt. non panicking version
+	if !ok {
+		panic(`invalid value passed as argument number to the "max-control-nesting" rule`)
+	}
+	r.max = max
+}
diff --git a/vendor/github.com/mgechev/revive/rule/max-public-structs.go b/vendor/github.com/mgechev/revive/rule/max-public-structs.go
index e39f49c698ab4f026280ae182aac48c969ff4833..25be3e676fb16526e084f189fb1a872aea09f479 100644
--- a/vendor/github.com/mgechev/revive/rule/max-public-structs.go
+++ b/vendor/github.com/mgechev/revive/rule/max-public-structs.go
@@ -14,9 +14,17 @@ type MaxPublicStructsRule struct {
 	sync.Mutex
 }
 
+const defaultMaxPublicStructs = 5
+
 func (r *MaxPublicStructsRule) configure(arguments lint.Arguments) {
 	r.Lock()
+	defer r.Unlock()
 	if r.max < 1 {
+		if len(arguments) < 1 {
+			r.max = defaultMaxPublicStructs
+			return
+		}
+
 		checkNumberOfArguments(1, arguments, r.Name())
 
 		max, ok := arguments[0].(int64) // Alt. non panicking version
@@ -25,7 +33,6 @@ func (r *MaxPublicStructsRule) configure(arguments lint.Arguments) {
 		}
 		r.max = max
 	}
-	r.Unlock()
 }
 
 // Apply applies the rule to given file.
diff --git a/vendor/github.com/mgechev/revive/rule/modifies-value-receiver.go b/vendor/github.com/mgechev/revive/rule/modifies-value-receiver.go
index 34e65155706d5f56a973ebe7ed6839967d5eeb8e..e9e64b9a6a1dbaabca8f7260f867f6575f703f73 100644
--- a/vendor/github.com/mgechev/revive/rule/modifies-value-receiver.go
+++ b/vendor/github.com/mgechev/revive/rule/modifies-value-receiver.go
@@ -78,11 +78,6 @@ func (w lintModifiesValRecRule) Visit(node ast.Node) ast.Visitor {
 					if name == "" || name != receiverName {
 						continue
 					}
-
-					if w.skipType(ast.Expr(e.Sel)) {
-						continue
-					}
-
 				case *ast.Ident: // receiver := ...
 					if e.Name != receiverName {
 						continue
@@ -97,7 +92,7 @@ func (w lintModifiesValRecRule) Visit(node ast.Node) ast.Visitor {
 			return false
 		}
 
-		assignmentsToReceiver := pick(n.Body, fselect, nil)
+		assignmentsToReceiver := pick(n.Body, fselect)
 
 		for _, assignment := range assignmentsToReceiver {
 			w.onFailure(lint.Failure{
diff --git a/vendor/github.com/mgechev/revive/rule/optimize-operands-order.go b/vendor/github.com/mgechev/revive/rule/optimize-operands-order.go
index 88928bb98cb0e280c1a932af35f07f1dc2cc72af..841bde56c065fccc4fb3013188e887fa7fc3b40b 100644
--- a/vendor/github.com/mgechev/revive/rule/optimize-operands-order.go
+++ b/vendor/github.com/mgechev/revive/rule/optimize-operands-order.go
@@ -54,13 +54,13 @@ func (w lintOptimizeOperandsOrderlExpr) Visit(node ast.Node) ast.Visitor {
 	}
 
 	// check if the left sub-expression contains a function call
-	nodes := pick(binExpr.X, isCaller, nil)
+	nodes := pick(binExpr.X, isCaller)
 	if len(nodes) < 1 {
 		return w
 	}
 
 	// check if the right sub-expression does not contain a function call
-	nodes = pick(binExpr.Y, isCaller, nil)
+	nodes = pick(binExpr.Y, isCaller)
 	if len(nodes) > 0 {
 		return w
 	}
diff --git a/vendor/github.com/mgechev/revive/rule/redundant-import-alias.go b/vendor/github.com/mgechev/revive/rule/redundant-import-alias.go
new file mode 100644
index 0000000000000000000000000000000000000000..fa5281f24baa2a7f271d1185ec8e80f28d5cc50e
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/redundant-import-alias.go
@@ -0,0 +1,52 @@
+package rule
+
+import (
+	"fmt"
+	"go/ast"
+	"strings"
+
+	"github.com/mgechev/revive/lint"
+)
+
+// RedundantImportAlias lints given else constructs.
+type RedundantImportAlias struct{}
+
+// Apply applies the rule to given file.
+func (*RedundantImportAlias) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+	var failures []lint.Failure
+
+	for _, imp := range file.AST.Imports {
+		if imp.Name == nil {
+			continue
+		}
+
+		if getImportPackageName(imp) == imp.Name.Name {
+			failures = append(failures, lint.Failure{
+				Confidence: 1,
+				Failure:    fmt.Sprintf("Import alias \"%s\" is redundant", imp.Name.Name),
+				Node:       imp,
+				Category:   "imports",
+			})
+		}
+	}
+
+	return failures
+}
+
+// Name returns the rule name.
+func (*RedundantImportAlias) Name() string {
+	return "redundant-import-alias"
+}
+
+func getImportPackageName(imp *ast.ImportSpec) string {
+	const pathSep = "/"
+	const strDelim = `"`
+
+	path := imp.Path.Value
+	i := strings.LastIndex(path, pathSep)
+	if i == -1 {
+		return strings.Trim(path, strDelim)
+	}
+
+	return strings.Trim(path[i+1:], strDelim)
+}
diff --git a/vendor/github.com/mgechev/revive/rule/string-format.go b/vendor/github.com/mgechev/revive/rule/string-format.go
index 0e30ebf8b3d3861c894a19ee71b5843c00a380e5..70edf7387c9c91d657d49b65ae984c855a8948c2 100644
--- a/vendor/github.com/mgechev/revive/rule/string-format.go
+++ b/vendor/github.com/mgechev/revive/rule/string-format.go
@@ -38,7 +38,7 @@ func (*StringFormatRule) Name() string {
 // ParseArgumentsTest is a public wrapper around w.parseArguments used for testing. Returns the error message provided to panic, or nil if no error was encountered
 func (StringFormatRule) ParseArgumentsTest(arguments lint.Arguments) *string {
 	w := lintStringFormatRule{}
-	c := make(chan interface{})
+	c := make(chan any)
 	// Parse the arguments in a goroutine, defer a recover() call, return the error encountered (or nil if there was no error)
 	go func() {
 		defer func() {
@@ -101,8 +101,8 @@ func (w *lintStringFormatRule) parseArguments(arguments lint.Arguments) {
 	}
 }
 
-func (w lintStringFormatRule) parseArgument(argument interface{}, ruleNum int) (scope stringFormatSubruleScope, regex *regexp.Regexp, negated bool, errorMessage string) {
-	g, ok := argument.([]interface{}) // Cast to generic slice first
+func (w lintStringFormatRule) parseArgument(argument any, ruleNum int) (scope stringFormatSubruleScope, regex *regexp.Regexp, negated bool, errorMessage string) {
+	g, ok := argument.([]any) // Cast to generic slice first
 	if !ok {
 		w.configError("argument is not a slice", ruleNum, 0)
 	}
@@ -211,10 +211,14 @@ func (lintStringFormatRule) getCallName(call *ast.CallExpr) (callName string, ok
 	if selector, ok := call.Fun.(*ast.SelectorExpr); ok {
 		// Scoped function call
 		scope, ok := selector.X.(*ast.Ident)
-		if !ok {
-			return "", false
+		if ok {
+			return scope.Name + "." + selector.Sel.Name, true
+		}
+		// Scoped function call inside structure
+		recv, ok := selector.X.(*ast.SelectorExpr)
+		if ok {
+			return recv.Sel.Name + "." + selector.Sel.Name, true
 		}
-		return scope.Name + "." + selector.Sel.Name, true
 	}
 
 	return "", false
diff --git a/vendor/github.com/mgechev/revive/rule/struct-tag.go b/vendor/github.com/mgechev/revive/rule/struct-tag.go
index d1c8056aa0564f524287e9feb99095ce93b61158..f6ee47a731d8aff4afffa7b0221c0d522c91ec1b 100644
--- a/vendor/github.com/mgechev/revive/rule/struct-tag.go
+++ b/vendor/github.com/mgechev/revive/rule/struct-tag.go
@@ -140,7 +140,7 @@ func (lintStructTagRule) getTagName(tag *structtag.Tag) string {
 				return strings.TrimPrefix(option, "name=")
 			}
 		}
-		return "" //protobuf tag lacks 'name' option
+		return "" // protobuf tag lacks 'name' option
 	default:
 		return tag.Name
 	}
diff --git a/vendor/github.com/mgechev/revive/rule/superfluous-else.go b/vendor/github.com/mgechev/revive/rule/superfluous-else.go
index a9e4380c90c1936f7fb09e9106890f5e969cb702..2aa1b6b2ca7df773e693a5fcb671d22040a89df9 100644
--- a/vendor/github.com/mgechev/revive/rule/superfluous-else.go
+++ b/vendor/github.com/mgechev/revive/rule/superfluous-else.go
@@ -2,9 +2,7 @@ package rule
 
 import (
 	"fmt"
-	"go/ast"
-	"go/token"
-
+	"github.com/mgechev/revive/internal/ifelse"
 	"github.com/mgechev/revive/lint"
 )
 
@@ -12,27 +10,8 @@ import (
 type SuperfluousElseRule struct{}
 
 // Apply applies the rule to given file.
-func (*SuperfluousElseRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
-	var failures []lint.Failure
-	onFailure := func(failure lint.Failure) {
-		failures = append(failures, failure)
-	}
-
-	branchingFunctions := map[string]map[string]bool{
-		"os": {"Exit": true},
-		"log": {
-			"Fatal":   true,
-			"Fatalf":  true,
-			"Fatalln": true,
-			"Panic":   true,
-			"Panicf":  true,
-			"Panicln": true,
-		},
-	}
-
-	w := lintSuperfluousElse{make(map[*ast.IfStmt]bool), onFailure, branchingFunctions}
-	ast.Walk(w, file.AST)
-	return failures
+func (e *SuperfluousElseRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure {
+	return ifelse.Apply(e, file.AST, ifelse.TargetElse, args)
 }
 
 // Name returns the rule name.
@@ -40,75 +19,28 @@ func (*SuperfluousElseRule) Name() string {
 	return "superfluous-else"
 }
 
-type lintSuperfluousElse struct {
-	ignore             map[*ast.IfStmt]bool
-	onFailure          func(lint.Failure)
-	branchingFunctions map[string]map[string]bool
-}
-
-func (w lintSuperfluousElse) Visit(node ast.Node) ast.Visitor {
-	ifStmt, ok := node.(*ast.IfStmt)
-	if !ok || ifStmt.Else == nil {
-		return w
-	}
-	if w.ignore[ifStmt] {
-		if elseif, ok := ifStmt.Else.(*ast.IfStmt); ok {
-			w.ignore[elseif] = true
-		}
-		return w
-	}
-	if elseif, ok := ifStmt.Else.(*ast.IfStmt); ok {
-		w.ignore[elseif] = true
-		return w
-	}
-	if _, ok := ifStmt.Else.(*ast.BlockStmt); !ok {
-		// only care about elses without conditions
-		return w
-	}
-	if len(ifStmt.Body.List) == 0 {
-		return w
-	}
-	shortDecl := false // does the if statement have a ":=" initialization statement?
-	if ifStmt.Init != nil {
-		if as, ok := ifStmt.Init.(*ast.AssignStmt); ok && as.Tok == token.DEFINE {
-			shortDecl = true
-		}
-	}
-	extra := ""
-	if shortDecl {
-		extra = " (move short variable declaration to its own line if necessary)"
+// CheckIfElse evaluates the rule against an ifelse.Chain.
+func (*SuperfluousElseRule) CheckIfElse(chain ifelse.Chain, args ifelse.Args) (failMsg string) {
+	if !chain.If.Deviates() {
+		// this rule only applies if the if-block deviates control flow
+		return
 	}
 
-	lastStmt := ifStmt.Body.List[len(ifStmt.Body.List)-1]
-	switch stmt := lastStmt.(type) {
-	case *ast.BranchStmt:
-		tok := stmt.Tok.String()
-		if tok != "fallthrough" {
-			w.onFailure(newFailure(ifStmt.Else, "if block ends with a "+tok+" statement, so drop this else and outdent its block"+extra))
-		}
-	case *ast.ExprStmt:
-		if ce, ok := stmt.X.(*ast.CallExpr); ok { // it's a function call
-			if fc, ok := ce.Fun.(*ast.SelectorExpr); ok {
-				if id, ok := fc.X.(*ast.Ident); ok {
-					fn := fc.Sel.Name
-					pkg := id.Name
-					if w.branchingFunctions[pkg][fn] { // it's a call to a branching function
-						w.onFailure(
-							newFailure(ifStmt.Else, fmt.Sprintf("if block ends with call to %s.%s function, so drop this else and outdent its block%s", pkg, fn, extra)))
-					}
-				}
-			}
-		}
+	if chain.HasPriorNonDeviating {
+		// if we de-indent the "else" block then a previous branch
+		// might flow into it, affecting program behaviour
+		return
 	}
 
-	return w
-}
+	if chain.If.Returns() {
+		// avoid overlapping with indent-error-flow
+		return
+	}
 
-func newFailure(node ast.Node, msg string) lint.Failure {
-	return lint.Failure{
-		Confidence: 1,
-		Node:       node,
-		Category:   "indent",
-		Failure:    msg,
+	if args.PreserveScope && !chain.AtBlockEnd && (chain.HasInitializer || chain.Else.HasDecls) {
+		// avoid increasing variable scope
+		return
 	}
+
+	return fmt.Sprintf("if block ends with %v, so drop this else and outdent its block", chain.If.LongString())
 }
diff --git a/vendor/github.com/mgechev/revive/rule/time-equal.go b/vendor/github.com/mgechev/revive/rule/time-equal.go
index 72ecf26fe438c277e8388fefe7e2df3c9c3373d6..3b85e18a8e5efb82872c00eca4c5803e22d940d9 100644
--- a/vendor/github.com/mgechev/revive/rule/time-equal.go
+++ b/vendor/github.com/mgechev/revive/rule/time-equal.go
@@ -60,9 +60,9 @@ func (l *lintTimeEqual) Visit(node ast.Node) ast.Visitor {
 	var failure string
 	switch expr.Op {
 	case token.EQL:
-		failure = fmt.Sprintf("use %s.Equal(%s) instead of %q operator", expr.X, expr.Y, expr.Op)
+		failure = fmt.Sprintf("use %s.Equal(%s) instead of %q operator", gofmt(expr.X), gofmt(expr.Y), expr.Op)
 	case token.NEQ:
-		failure = fmt.Sprintf("use !%s.Equal(%s) instead of %q operator", expr.X, expr.Y, expr.Op)
+		failure = fmt.Sprintf("use !%s.Equal(%s) instead of %q operator", gofmt(expr.X), gofmt(expr.Y), expr.Op)
 	}
 
 	l.onFailure(lint.Failure{
diff --git a/vendor/github.com/mgechev/revive/rule/unchecked-type-assertion.go b/vendor/github.com/mgechev/revive/rule/unchecked-type-assertion.go
new file mode 100644
index 0000000000000000000000000000000000000000..df27743cbdeec1b655aa1af5d49dbf5a27d6c0d8
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/unchecked-type-assertion.go
@@ -0,0 +1,194 @@
+package rule
+
+import (
+	"fmt"
+	"go/ast"
+	"sync"
+
+	"github.com/mgechev/revive/lint"
+)
+
+const (
+	ruleUTAMessagePanic   = "type assertion will panic if not matched"
+	ruleUTAMessageIgnored = "type assertion result ignored"
+)
+
+// UncheckedTypeAssertionRule lints missing or ignored `ok`-value in danymic type casts.
+type UncheckedTypeAssertionRule struct {
+	sync.Mutex
+	acceptIgnoredAssertionResult bool
+	configured                   bool
+}
+
+func (u *UncheckedTypeAssertionRule) configure(arguments lint.Arguments) {
+	u.Lock()
+	defer u.Unlock()
+
+	if len(arguments) == 0 || u.configured {
+		return
+	}
+
+	u.configured = true
+
+	args, ok := arguments[0].(map[string]any)
+	if !ok {
+		panic("Unable to get arguments. Expected object of key-value-pairs.")
+	}
+
+	for k, v := range args {
+		switch k {
+		case "acceptIgnoredAssertionResult":
+			u.acceptIgnoredAssertionResult, ok = v.(bool)
+			if !ok {
+				panic(fmt.Sprintf("Unable to parse argument '%s'. Expected boolean.", k))
+			}
+		default:
+			panic(fmt.Sprintf("Unknown argument: %s", k))
+		}
+	}
+}
+
+// Apply applies the rule to given file.
+func (u *UncheckedTypeAssertionRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure {
+	u.configure(args)
+
+	var failures []lint.Failure
+
+	walker := &lintUnchekedTypeAssertion{
+		onFailure: func(failure lint.Failure) {
+			failures = append(failures, failure)
+		},
+		acceptIgnoredTypeAssertionResult: u.acceptIgnoredAssertionResult,
+	}
+
+	ast.Walk(walker, file.AST)
+
+	return failures
+}
+
+// Name returns the rule name.
+func (*UncheckedTypeAssertionRule) Name() string {
+	return "unchecked-type-assertion"
+}
+
+type lintUnchekedTypeAssertion struct {
+	onFailure                        func(lint.Failure)
+	acceptIgnoredTypeAssertionResult bool
+}
+
+func isIgnored(e ast.Expr) bool {
+	ident, ok := e.(*ast.Ident)
+	if !ok {
+		return false
+	}
+
+	return ident.Name == "_"
+}
+
+func isTypeSwitch(e *ast.TypeAssertExpr) bool {
+	return e.Type == nil
+}
+
+func (w *lintUnchekedTypeAssertion) requireNoTypeAssert(expr ast.Expr) {
+	e, ok := expr.(*ast.TypeAssertExpr)
+	if ok && !isTypeSwitch(e) {
+		w.addFailure(e, ruleUTAMessagePanic)
+	}
+}
+
+func (w *lintUnchekedTypeAssertion) handleIfStmt(n *ast.IfStmt) {
+	ifCondition, ok := n.Cond.(*ast.BinaryExpr)
+	if ok {
+		w.requireNoTypeAssert(ifCondition.X)
+		w.requireNoTypeAssert(ifCondition.Y)
+	}
+}
+
+func (w *lintUnchekedTypeAssertion) requireBinaryExpressionWithoutTypeAssertion(expr ast.Expr) {
+	binaryExpr, ok := expr.(*ast.BinaryExpr)
+	if ok {
+		w.requireNoTypeAssert(binaryExpr.X)
+		w.requireNoTypeAssert(binaryExpr.Y)
+	}
+}
+
+func (w *lintUnchekedTypeAssertion) handleCaseClause(n *ast.CaseClause) {
+	for _, expr := range n.List {
+		w.requireNoTypeAssert(expr)
+		w.requireBinaryExpressionWithoutTypeAssertion(expr)
+	}
+}
+
+func (w *lintUnchekedTypeAssertion) handleSwitch(n *ast.SwitchStmt) {
+	w.requireNoTypeAssert(n.Tag)
+	w.requireBinaryExpressionWithoutTypeAssertion(n.Tag)
+}
+
+func (w *lintUnchekedTypeAssertion) handleAssignment(n *ast.AssignStmt) {
+	if len(n.Rhs) == 0 {
+		return
+	}
+
+	e, ok := n.Rhs[0].(*ast.TypeAssertExpr)
+	if !ok || e == nil {
+		return
+	}
+
+	if isTypeSwitch(e) {
+		return
+	}
+
+	if len(n.Lhs) == 1 {
+		w.addFailure(e, ruleUTAMessagePanic)
+	}
+
+	if !w.acceptIgnoredTypeAssertionResult && len(n.Lhs) == 2 && isIgnored(n.Lhs[1]) {
+		w.addFailure(e, ruleUTAMessageIgnored)
+	}
+}
+
+// handles "return foo(.*bar)" - one of them is enough to fail as golang does not forward the type cast tuples in return statements
+func (w *lintUnchekedTypeAssertion) handleReturn(n *ast.ReturnStmt) {
+	for _, r := range n.Results {
+		w.requireNoTypeAssert(r)
+	}
+}
+
+func (w *lintUnchekedTypeAssertion) handleRange(n *ast.RangeStmt) {
+	w.requireNoTypeAssert(n.X)
+}
+
+func (w *lintUnchekedTypeAssertion) handleChannelSend(n *ast.SendStmt) {
+	w.requireNoTypeAssert(n.Value)
+}
+
+func (w *lintUnchekedTypeAssertion) Visit(node ast.Node) ast.Visitor {
+	switch n := node.(type) {
+	case *ast.RangeStmt:
+		w.handleRange(n)
+	case *ast.SwitchStmt:
+		w.handleSwitch(n)
+	case *ast.ReturnStmt:
+		w.handleReturn(n)
+	case *ast.AssignStmt:
+		w.handleAssignment(n)
+	case *ast.IfStmt:
+		w.handleIfStmt(n)
+	case *ast.CaseClause:
+		w.handleCaseClause(n)
+	case *ast.SendStmt:
+		w.handleChannelSend(n)
+	}
+
+	return w
+}
+
+func (w *lintUnchekedTypeAssertion) addFailure(n *ast.TypeAssertExpr, why string) {
+	s := fmt.Sprintf("type cast result is unchecked in %v - %s", gofmt(n), why)
+	w.onFailure(lint.Failure{
+		Category:   "bad practice",
+		Confidence: 1,
+		Node:       n,
+		Failure:    s,
+	})
+}
diff --git a/vendor/github.com/mgechev/revive/rule/unconditional-recursion.go b/vendor/github.com/mgechev/revive/rule/unconditional-recursion.go
index bad9075338abc5a558a7dbb8d7a9aacd69a5179b..9ac2648cddd77b9fb2da233473fb24e6b82e56f9 100644
--- a/vendor/github.com/mgechev/revive/rule/unconditional-recursion.go
+++ b/vendor/github.com/mgechev/revive/rule/unconditional-recursion.go
@@ -45,8 +45,9 @@ type funcStatus struct {
 }
 
 type lintUnconditionalRecursionRule struct {
-	onFailure   func(lint.Failure)
-	currentFunc *funcStatus
+	onFailure     func(lint.Failure)
+	currentFunc   *funcStatus
+	inGoStatement bool
 }
 
 // Visit will traverse the file AST.
@@ -68,9 +69,13 @@ func (w lintUnconditionalRecursionRule) Visit(node ast.Node) ast.Visitor {
 		default:
 			rec = n.Recv.List[0].Names[0]
 		}
-
 		w.currentFunc = &funcStatus{&funcDesc{rec, n.Name}, false}
 	case *ast.CallExpr:
+		// check if call arguments has a recursive call
+		for _, arg := range n.Args {
+			ast.Walk(w, arg)
+		}
+
 		var funcID *ast.Ident
 		var selector *ast.Ident
 		switch c := n.Fun.(type) {
@@ -84,6 +89,9 @@ func (w lintUnconditionalRecursionRule) Visit(node ast.Node) ast.Visitor {
 				return nil
 			}
 			funcID = c.Sel
+		case *ast.FuncLit:
+			ast.Walk(w, c.Body) // analyze the body of the function literal
+			return nil
 		default:
 			return w
 		}
@@ -93,11 +101,12 @@ func (w lintUnconditionalRecursionRule) Visit(node ast.Node) ast.Visitor {
 			w.currentFunc.funcDesc.equal(&funcDesc{selector, funcID}) {
 			w.onFailure(lint.Failure{
 				Category:   "logic",
-				Confidence: 1,
+				Confidence: 0.8,
 				Node:       n,
 				Failure:    "unconditional recursive call",
 			})
 		}
+		return nil
 	case *ast.IfStmt:
 		w.updateFuncStatus(n.Body)
 		w.updateFuncStatus(n.Else)
@@ -115,16 +124,21 @@ func (w lintUnconditionalRecursionRule) Visit(node ast.Node) ast.Visitor {
 		w.updateFuncStatus(n.Body)
 		return nil
 	case *ast.GoStmt:
-		for _, a := range n.Call.Args {
-			ast.Walk(w, a) // check if arguments have a recursive call
-		}
-		return nil // recursive async call is not an issue
+		w.inGoStatement = true
+		ast.Walk(w, n.Call)
+		w.inGoStatement = false
+		return nil
 	case *ast.ForStmt:
 		if n.Cond != nil {
 			return nil
 		}
 		// unconditional loop
 		return w
+	case *ast.FuncLit:
+		if w.inGoStatement {
+			return w
+		}
+		return nil // literal call (closure) is not necessarily an issue
 	}
 
 	return w
@@ -181,5 +195,5 @@ func (lintUnconditionalRecursionRule) hasControlExit(node ast.Node) bool {
 		return false
 	}
 
-	return len(pick(node, isExit, nil)) != 0
+	return len(pick(node, isExit)) != 0
 }
diff --git a/vendor/github.com/mgechev/revive/rule/unhandled-error.go b/vendor/github.com/mgechev/revive/rule/unhandled-error.go
index 32a5fe48bc28af144d4cd9f42792bbdb710fc997..ce6fa38641663e088663b999bef90e9ec7cc91a6 100644
--- a/vendor/github.com/mgechev/revive/rule/unhandled-error.go
+++ b/vendor/github.com/mgechev/revive/rule/unhandled-error.go
@@ -119,7 +119,7 @@ func (w *lintUnhandledErrors) addFailure(n *ast.CallExpr) {
 		Category:   "bad practice",
 		Confidence: 1,
 		Node:       n,
-		Failure:    fmt.Sprintf("Unhandled error in call to function %v", gofmt(n.Fun)),
+		Failure:    fmt.Sprintf("Unhandled error in call to function %v", name),
 	})
 }
 
diff --git a/vendor/github.com/mgechev/revive/rule/unused-param.go b/vendor/github.com/mgechev/revive/rule/unused-param.go
index ab3da453eed951de0b49090c21aaa08166816a9e..4b04ee916b2e0f09194c5982ccf9faf9c615c924 100644
--- a/vendor/github.com/mgechev/revive/rule/unused-param.go
+++ b/vendor/github.com/mgechev/revive/rule/unused-param.go
@@ -3,22 +3,72 @@ package rule
 import (
 	"fmt"
 	"go/ast"
+	"regexp"
+	"sync"
 
 	"github.com/mgechev/revive/lint"
 )
 
 // UnusedParamRule lints unused params in functions.
-type UnusedParamRule struct{}
+type UnusedParamRule struct {
+	configured bool
+	// regex to check if some name is valid for unused parameter, "^_$" by default
+	allowRegex *regexp.Regexp
+	failureMsg string
+	sync.Mutex
+}
+
+func (r *UnusedParamRule) configure(args lint.Arguments) {
+	r.Lock()
+	defer r.Unlock()
+
+	if r.configured {
+		return
+	}
+	r.configured = true
+
+	// while by default args is an array, i think it's good to provide structures inside it by default, not arrays or primitives
+	// it's more compatible to JSON nature of configurations
+	var allowedRegexStr string
+	if len(args) == 0 {
+		allowedRegexStr = "^_$"
+		r.failureMsg = "parameter '%s' seems to be unused, consider removing or renaming it as _"
+	} else {
+		// Arguments = [{}]
+		options := args[0].(map[string]any)
+		// Arguments = [{allowedRegex="^_"}]
+
+		if allowedRegexParam, ok := options["allowRegex"]; ok {
+			allowedRegexStr, ok = allowedRegexParam.(string)
+			if !ok {
+				panic(fmt.Errorf("error configuring %s rule: allowedRegex is not string but [%T]", r.Name(), allowedRegexParam))
+			}
+		}
+	}
+	var err error
+	r.allowRegex, err = regexp.Compile(allowedRegexStr)
+	if err != nil {
+		panic(fmt.Errorf("error configuring %s rule: allowedRegex is not valid regex [%s]: %v", r.Name(), allowedRegexStr, err))
+	}
+
+	if r.failureMsg == "" {
+		r.failureMsg = "parameter '%s' seems to be unused, consider removing or renaming it to match " + r.allowRegex.String()
+	}
+}
 
 // Apply applies the rule to given file.
-func (*UnusedParamRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+func (r *UnusedParamRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure {
+	r.configure(args)
 	var failures []lint.Failure
 
 	onFailure := func(failure lint.Failure) {
 		failures = append(failures, failure)
 	}
-
-	w := lintUnusedParamRule{onFailure: onFailure}
+	w := lintUnusedParamRule{
+		onFailure:  onFailure,
+		allowRegex: r.allowRegex,
+		failureMsg: r.failureMsg,
+	}
 
 	ast.Walk(w, file.AST)
 
@@ -31,55 +81,70 @@ func (*UnusedParamRule) Name() string {
 }
 
 type lintUnusedParamRule struct {
-	onFailure func(lint.Failure)
+	onFailure  func(lint.Failure)
+	allowRegex *regexp.Regexp
+	failureMsg string
 }
 
 func (w lintUnusedParamRule) Visit(node ast.Node) ast.Visitor {
+	var (
+		funcType *ast.FuncType
+		funcBody *ast.BlockStmt
+	)
 	switch n := node.(type) {
+	case *ast.FuncLit:
+		funcType = n.Type
+		funcBody = n.Body
 	case *ast.FuncDecl:
-		params := retrieveNamedParams(n.Type.Params)
-		if len(params) < 1 {
-			return nil // skip, func without parameters
-		}
-
 		if n.Body == nil {
 			return nil // skip, is a function prototype
 		}
 
-		// inspect the func body looking for references to parameters
-		fselect := func(n ast.Node) bool {
-			ident, isAnID := n.(*ast.Ident)
+		funcType = n.Type
+		funcBody = n.Body
+	default:
+		return w // skip, not a function
+	}
 
-			if !isAnID {
-				return false
-			}
+	params := retrieveNamedParams(funcType.Params)
+	if len(params) < 1 {
+		return w // skip, func without parameters
+	}
 
-			_, isAParam := params[ident.Obj]
-			if isAParam {
-				params[ident.Obj] = false // mark as used
-			}
+	// inspect the func body looking for references to parameters
+	fselect := func(n ast.Node) bool {
+		ident, isAnID := n.(*ast.Ident)
 
+		if !isAnID {
 			return false
 		}
-		_ = pick(n.Body, fselect, nil)
-
-		for _, p := range n.Type.Params.List {
-			for _, n := range p.Names {
-				if params[n.Obj] {
-					w.onFailure(lint.Failure{
-						Confidence: 1,
-						Node:       n,
-						Category:   "bad practice",
-						Failure:    fmt.Sprintf("parameter '%s' seems to be unused, consider removing or renaming it as _", n.Name),
-					})
-				}
-			}
+
+		_, isAParam := params[ident.Obj]
+		if isAParam {
+			params[ident.Obj] = false // mark as used
 		}
 
-		return nil // full method body already inspected
+		return false
+	}
+	_ = pick(funcBody, fselect)
+
+	for _, p := range funcType.Params.List {
+		for _, n := range p.Names {
+			if w.allowRegex.FindStringIndex(n.Name) != nil {
+				continue
+			}
+			if params[n.Obj] {
+				w.onFailure(lint.Failure{
+					Confidence: 1,
+					Node:       n,
+					Category:   "bad practice",
+					Failure:    fmt.Sprintf(w.failureMsg, n.Name),
+				})
+			}
+		}
 	}
 
-	return w
+	return w // full method body was inspected
 }
 
 func retrieveNamedParams(params *ast.FieldList) map[*ast.Object]bool {
diff --git a/vendor/github.com/mgechev/revive/rule/unused-receiver.go b/vendor/github.com/mgechev/revive/rule/unused-receiver.go
index 2289a517e5f2dc20570318ffc3b397c1d3d7a8ac..715dba33836b9944f5fa8f098ab06f54060c2677 100644
--- a/vendor/github.com/mgechev/revive/rule/unused-receiver.go
+++ b/vendor/github.com/mgechev/revive/rule/unused-receiver.go
@@ -3,22 +3,72 @@ package rule
 import (
 	"fmt"
 	"go/ast"
+	"regexp"
+	"sync"
 
 	"github.com/mgechev/revive/lint"
 )
 
 // UnusedReceiverRule lints unused params in functions.
-type UnusedReceiverRule struct{}
+type UnusedReceiverRule struct {
+	configured bool
+	// regex to check if some name is valid for unused parameter, "^_$" by default
+	allowRegex *regexp.Regexp
+	failureMsg string
+	sync.Mutex
+}
+
+func (r *UnusedReceiverRule) configure(args lint.Arguments) {
+	r.Lock()
+	defer r.Unlock()
+
+	if r.configured {
+		return
+	}
+	r.configured = true
+
+	// while by default args is an array, i think it's good to provide structures inside it by default, not arrays or primitives
+	// it's more compatible to JSON nature of configurations
+	var allowedRegexStr string
+	if len(args) == 0 {
+		allowedRegexStr = "^_$"
+		r.failureMsg = "method receiver '%s' is not referenced in method's body, consider removing or renaming it as _"
+	} else {
+		// Arguments = [{}]
+		options := args[0].(map[string]any)
+		// Arguments = [{allowedRegex="^_"}]
+
+		if allowedRegexParam, ok := options["allowRegex"]; ok {
+			allowedRegexStr, ok = allowedRegexParam.(string)
+			if !ok {
+				panic(fmt.Errorf("error configuring [unused-receiver] rule: allowedRegex is not string but [%T]", allowedRegexParam))
+			}
+		}
+	}
+	var err error
+	r.allowRegex, err = regexp.Compile(allowedRegexStr)
+	if err != nil {
+		panic(fmt.Errorf("error configuring [unused-receiver] rule: allowedRegex is not valid regex [%s]: %v", allowedRegexStr, err))
+	}
+	if r.failureMsg == "" {
+		r.failureMsg = "method receiver '%s' is not referenced in method's body, consider removing or renaming it to match " + r.allowRegex.String()
+	}
+}
 
 // Apply applies the rule to given file.
-func (*UnusedReceiverRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+func (r *UnusedReceiverRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure {
+	r.configure(args)
 	var failures []lint.Failure
 
 	onFailure := func(failure lint.Failure) {
 		failures = append(failures, failure)
 	}
 
-	w := lintUnusedReceiverRule{onFailure: onFailure}
+	w := lintUnusedReceiverRule{
+		onFailure:  onFailure,
+		allowRegex: r.allowRegex,
+		failureMsg: r.failureMsg,
+	}
 
 	ast.Walk(w, file.AST)
 
@@ -31,7 +81,9 @@ func (*UnusedReceiverRule) Name() string {
 }
 
 type lintUnusedReceiverRule struct {
-	onFailure func(lint.Failure)
+	onFailure  func(lint.Failure)
+	allowRegex *regexp.Regexp
+	failureMsg string
 }
 
 func (w lintUnusedReceiverRule) Visit(node ast.Node) ast.Visitor {
@@ -51,13 +103,17 @@ func (w lintUnusedReceiverRule) Visit(node ast.Node) ast.Visitor {
 			return nil // the receiver is already named _
 		}
 
+		if w.allowRegex != nil && w.allowRegex.FindStringIndex(recID.Name) != nil {
+			return nil
+		}
+
 		// inspect the func body looking for references to the receiver id
 		fselect := func(n ast.Node) bool {
 			ident, isAnID := n.(*ast.Ident)
 
 			return isAnID && ident.Obj == recID.Obj
 		}
-		refs2recID := pick(n.Body, fselect, nil)
+		refs2recID := pick(n.Body, fselect)
 
 		if len(refs2recID) > 0 {
 			return nil // the receiver is referenced in the func body
@@ -67,7 +123,7 @@ func (w lintUnusedReceiverRule) Visit(node ast.Node) ast.Visitor {
 			Confidence: 1,
 			Node:       recID,
 			Category:   "bad practice",
-			Failure:    fmt.Sprintf("method receiver '%s' is not referenced in method's body, consider removing or renaming it as _", recID.Name),
+			Failure:    fmt.Sprintf(w.failureMsg, recID.Name),
 		})
 
 		return nil // full method body already inspected
diff --git a/vendor/github.com/mgechev/revive/rule/utils.go b/vendor/github.com/mgechev/revive/rule/utils.go
index dca1674ca58a097630832eef4f4e2d7a4aaebc89..5778e7696358fde16adee4061c7f77328eb5027b 100644
--- a/vendor/github.com/mgechev/revive/rule/utils.go
+++ b/vendor/github.com/mgechev/revive/rule/utils.go
@@ -93,21 +93,15 @@ func srcLine(src []byte, p token.Position) string {
 
 // pick yields a list of nodes by picking them from a sub-ast with root node n.
 // Nodes are selected by applying the fselect function
-// f function is applied to each selected node before inserting it in the final result.
-// If f==nil then it defaults to the identity function (ie it returns the node itself)
-func pick(n ast.Node, fselect func(n ast.Node) bool, f func(n ast.Node) []ast.Node) []ast.Node {
+func pick(n ast.Node, fselect func(n ast.Node) bool) []ast.Node {
 	var result []ast.Node
 
 	if n == nil {
 		return result
 	}
 
-	if f == nil {
-		f = func(n ast.Node) []ast.Node { return []ast.Node{n} }
-	}
-
 	onSelect := func(n ast.Node) {
-		result = append(result, f(n)...)
+		result = append(result, n)
 	}
 	p := picker{fselect: fselect, onSelect: onSelect}
 	ast.Walk(p, n)
@@ -158,7 +152,7 @@ func isExprABooleanLit(n ast.Node) (lexeme string, ok bool) {
 }
 
 // gofmt returns a string representation of an AST subtree.
-func gofmt(x interface{}) string {
+func gofmt(x any) string {
 	buf := bytes.Buffer{}
 	fs := token.NewFileSet()
 	printer.Fprint(&buf, fs, x)
diff --git a/vendor/github.com/mgechev/revive/rule/var-naming.go b/vendor/github.com/mgechev/revive/rule/var-naming.go
index fa4a1886424d214a89962e66707d458412d721a5..e91c22dc21edf6ac775da82ac82a337757e68043 100644
--- a/vendor/github.com/mgechev/revive/rule/var-naming.go
+++ b/vendor/github.com/mgechev/revive/rule/var-naming.go
@@ -13,47 +13,55 @@ import (
 
 var anyCapsRE = regexp.MustCompile(`[A-Z]`)
 
+// regexp for constant names like `SOME_CONST`, `SOME_CONST_2`, `X123_3`, `_SOME_PRIVATE_CONST` (#851, #865)
+var upperCaseConstRE = regexp.MustCompile(`^_?[A-Z][A-Z\d]*(_[A-Z\d]+)*$`)
+
 // VarNamingRule lints given else constructs.
 type VarNamingRule struct {
-	configured bool
-	whitelist  []string
-	blacklist  []string
+	configured            bool
+	allowlist             []string
+	blocklist             []string
+	upperCaseConst        bool // if true - allows to use UPPER_SOME_NAMES for constants
+	skipPackageNameChecks bool
 	sync.Mutex
 }
 
 func (r *VarNamingRule) configure(arguments lint.Arguments) {
 	r.Lock()
-	if !r.configured {
-		if len(arguments) >= 1 {
-			r.whitelist = getList(arguments[0], "whitelist")
-		}
-
-		if len(arguments) >= 2 {
-			r.blacklist = getList(arguments[1], "blacklist")
-		}
-		r.configured = true
+	defer r.Unlock()
+	if r.configured {
+		return
 	}
-	r.Unlock()
-}
-
-// Apply applies the rule to given file.
-func (r *VarNamingRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
-	r.configure(arguments)
 
-	var failures []lint.Failure
+	r.configured = true
+	if len(arguments) >= 1 {
+		r.allowlist = getList(arguments[0], "allowlist")
+	}
 
-	fileAst := file.AST
+	if len(arguments) >= 2 {
+		r.blocklist = getList(arguments[1], "blocklist")
+	}
 
-	walker := lintNames{
-		file:      file,
-		fileAst:   fileAst,
-		whitelist: r.whitelist,
-		blacklist: r.blacklist,
-		onFailure: func(failure lint.Failure) {
-			failures = append(failures, failure)
-		},
+	if len(arguments) >= 3 {
+		// not pretty code because should keep compatibility with TOML (no mixed array types) and new map parameters
+		thirdArgument := arguments[2]
+		asSlice, ok := thirdArgument.([]any)
+		if !ok {
+			panic(fmt.Sprintf("Invalid third argument to the var-naming rule. Expecting a %s of type slice, got %T", "options", arguments[2]))
+		}
+		if len(asSlice) != 1 {
+			panic(fmt.Sprintf("Invalid third argument to the var-naming rule. Expecting a %s of type slice, of len==1, but %d", "options", len(asSlice)))
+		}
+		args, ok := asSlice[0].(map[string]any)
+		if !ok {
+			panic(fmt.Sprintf("Invalid third argument to the var-naming rule. Expecting a %s of type slice, of len==1, with map, but %T", "options", asSlice[0]))
+		}
+		r.upperCaseConst = fmt.Sprint(args["upperCaseConst"]) == "true"
+		r.skipPackageNameChecks = fmt.Sprint(args["skipPackageNameChecks"]) == "true"
 	}
+}
 
+func (r *VarNamingRule) applyPackageCheckRules(walker *lintNames) {
 	// Package names need slightly different handling than other names.
 	if strings.Contains(walker.fileAst.Name.Name, "_") && !strings.HasSuffix(walker.fileAst.Name.Name, "_test") {
 		walker.onFailure(lint.Failure{
@@ -72,6 +80,31 @@ func (r *VarNamingRule) Apply(file *lint.File, arguments lint.Arguments) []lint.
 		})
 	}
 
+}
+
+// Apply applies the rule to given file.
+func (r *VarNamingRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
+	r.configure(arguments)
+
+	var failures []lint.Failure
+
+	fileAst := file.AST
+
+	walker := lintNames{
+		file:      file,
+		fileAst:   fileAst,
+		allowlist: r.allowlist,
+		blocklist: r.blocklist,
+		onFailure: func(failure lint.Failure) {
+			failures = append(failures, failure)
+		},
+		upperCaseConst: r.upperCaseConst,
+	}
+
+	if !r.skipPackageNameChecks {
+		r.applyPackageCheckRules(&walker)
+	}
+
 	ast.Walk(&walker, fileAst)
 
 	return failures
@@ -82,18 +115,18 @@ func (*VarNamingRule) Name() string {
 	return "var-naming"
 }
 
-func checkList(fl *ast.FieldList, thing string, w *lintNames) {
+func (w *lintNames) checkList(fl *ast.FieldList, thing string) {
 	if fl == nil {
 		return
 	}
 	for _, f := range fl.List {
 		for _, id := range f.Names {
-			check(id, thing, w)
+			w.check(id, thing)
 		}
 	}
 }
 
-func check(id *ast.Ident, thing string, w *lintNames) {
+func (w *lintNames) check(id *ast.Ident, thing string) {
 	if id.Name == "_" {
 		return
 	}
@@ -101,6 +134,12 @@ func check(id *ast.Ident, thing string, w *lintNames) {
 		return
 	}
 
+	// #851 upperCaseConst support
+	// if it's const
+	if thing == token.CONST.String() && w.upperCaseConst && upperCaseConstRE.MatchString(id.Name) {
+		return
+	}
+
 	// Handle two common styles from other languages that don't belong in Go.
 	if len(id.Name) >= 5 && allCapsRE.MatchString(id.Name) && strings.Contains(id.Name, "_") {
 		w.onFailure(lint.Failure{
@@ -111,17 +150,8 @@ func check(id *ast.Ident, thing string, w *lintNames) {
 		})
 		return
 	}
-	if len(id.Name) > 2 && id.Name[0] == 'k' && id.Name[1] >= 'A' && id.Name[1] <= 'Z' {
-		should := string(id.Name[1]+'a'-'A') + id.Name[2:]
-		w.onFailure(lint.Failure{
-			Failure:    fmt.Sprintf("don't use leading k in Go names; %s %s should be %s", thing, id.Name, should),
-			Confidence: 0.8,
-			Node:       id,
-			Category:   "naming",
-		})
-	}
 
-	should := lint.Name(id.Name, w.whitelist, w.blacklist)
+	should := lint.Name(id.Name, w.allowlist, w.blocklist)
 	if id.Name == should {
 		return
 	}
@@ -144,11 +174,12 @@ func check(id *ast.Ident, thing string, w *lintNames) {
 }
 
 type lintNames struct {
-	file      *lint.File
-	fileAst   *ast.File
-	onFailure func(lint.Failure)
-	whitelist []string
-	blacklist []string
+	file           *lint.File
+	fileAst        *ast.File
+	onFailure      func(lint.Failure)
+	allowlist      []string
+	blocklist      []string
+	upperCaseConst bool
 }
 
 func (w *lintNames) Visit(n ast.Node) ast.Visitor {
@@ -159,7 +190,7 @@ func (w *lintNames) Visit(n ast.Node) ast.Visitor {
 		}
 		for _, exp := range v.Lhs {
 			if id, ok := exp.(*ast.Ident); ok {
-				check(id, "var", w)
+				w.check(id, "var")
 			}
 		}
 	case *ast.FuncDecl:
@@ -181,31 +212,24 @@ func (w *lintNames) Visit(n ast.Node) ast.Visitor {
 		// not exported in the Go API.
 		// See https://github.com/golang/lint/issues/144.
 		if ast.IsExported(v.Name.Name) || !isCgoExported(v) {
-			check(v.Name, thing, w)
+			w.check(v.Name, thing)
 		}
 
-		checkList(v.Type.Params, thing+" parameter", w)
-		checkList(v.Type.Results, thing+" result", w)
+		w.checkList(v.Type.Params, thing+" parameter")
+		w.checkList(v.Type.Results, thing+" result")
 	case *ast.GenDecl:
 		if v.Tok == token.IMPORT {
 			return w
 		}
-		var thing string
-		switch v.Tok {
-		case token.CONST:
-			thing = "const"
-		case token.TYPE:
-			thing = "type"
-		case token.VAR:
-			thing = "var"
-		}
+
+		thing := v.Tok.String()
 		for _, spec := range v.Specs {
 			switch s := spec.(type) {
 			case *ast.TypeSpec:
-				check(s.Name, thing, w)
+				w.check(s.Name, thing)
 			case *ast.ValueSpec:
 				for _, id := range s.Names {
-					check(id, thing, w)
+					w.check(id, thing)
 				}
 			}
 		}
@@ -217,31 +241,31 @@ func (w *lintNames) Visit(n ast.Node) ast.Visitor {
 			if !ok { // might be an embedded interface name
 				continue
 			}
-			checkList(ft.Params, "interface method parameter", w)
-			checkList(ft.Results, "interface method result", w)
+			w.checkList(ft.Params, "interface method parameter")
+			w.checkList(ft.Results, "interface method result")
 		}
 	case *ast.RangeStmt:
 		if v.Tok == token.ASSIGN {
 			return w
 		}
 		if id, ok := v.Key.(*ast.Ident); ok {
-			check(id, "range var", w)
+			w.check(id, "range var")
 		}
 		if id, ok := v.Value.(*ast.Ident); ok {
-			check(id, "range var", w)
+			w.check(id, "range var")
 		}
 	case *ast.StructType:
 		for _, f := range v.Fields.List {
 			for _, id := range f.Names {
-				check(id, "struct field", w)
+				w.check(id, "struct field")
 			}
 		}
 	}
 	return w
 }
 
-func getList(arg interface{}, argName string) []string {
-	temp, ok := arg.([]interface{})
+func getList(arg any, argName string) []string {
+	temp, ok := arg.([]any)
 	if !ok {
 		panic(fmt.Sprintf("Invalid argument to the var-naming rule. Expecting a %s of type slice with initialisms, got %T", argName, arg))
 	}
diff --git a/vendor/github.com/nishanths/exhaustive/comment.go b/vendor/github.com/nishanths/exhaustive/comment.go
index cc84beaf7bd822f812e8869f66c78e1ff12ce910..123e0181ba837593c76d0aba74a7570ceba26316 100644
--- a/vendor/github.com/nishanths/exhaustive/comment.go
+++ b/vendor/github.com/nishanths/exhaustive/comment.go
@@ -3,45 +3,14 @@ package exhaustive
 import (
 	"go/ast"
 	"go/token"
-	"regexp"
 	"strings"
 )
 
-// For definition of generated file see:
-// http://golang.org/s/generatedcode
-
-var generatedCodeRe = regexp.MustCompile(`^// Code generated .* DO NOT EDIT\.$`)
-
-func isGeneratedFile(file *ast.File) bool {
-	// NOTE: file.Comments includes file.Doc as well, so no need
-	// to separately check file.Doc.
-	for _, c := range file.Comments {
-		for _, cc := range c.List {
-			// This check handles the "must appear before the first
-			// non-comment, non-blank text in the file" requirement.
-			//
-			// According to https://golang.org/ref/spec#Source_file_organization
-			// the package clause is the first element in a file, which
-			// should make it the first non-comment, non-blank text.
-			if c.Pos() >= file.Package {
-				return false
-			}
-			// According to the docs:
-			//   '\r' has been removed.
-			//   '\n' has been removed for //-style comments
-			// This has also been manually verified.
-			if generatedCodeRe.MatchString(cc.Text) {
-				return true
-			}
-		}
-	}
-
-	return false
-}
-
 const (
-	ignoreComment  = "//exhaustive:ignore"
-	enforceComment = "//exhaustive:enforce"
+	ignoreComment                     = "//exhaustive:ignore"
+	enforceComment                    = "//exhaustive:enforce"
+	ignoreDefaultCaseRequiredComment  = "//exhaustive:ignore-default-case-required"
+	enforceDefaultCaseRequiredComment = "//exhaustive:enforce-default-case-required"
 )
 
 func hasCommentPrefix(comments []*ast.CommentGroup, comment string) bool {
diff --git a/vendor/github.com/nishanths/exhaustive/comment_go121.go b/vendor/github.com/nishanths/exhaustive/comment_go121.go
new file mode 100644
index 0000000000000000000000000000000000000000..a7bbc8881cda30d652e1f672a3ac302f059630aa
--- /dev/null
+++ b/vendor/github.com/nishanths/exhaustive/comment_go121.go
@@ -0,0 +1,11 @@
+//go:build go1.21
+
+package exhaustive
+
+import (
+	"go/ast"
+)
+
+func isGeneratedFile(file *ast.File) bool {
+	return ast.IsGenerated(file)
+}
diff --git a/vendor/github.com/nishanths/exhaustive/comment_pre_go121.go b/vendor/github.com/nishanths/exhaustive/comment_pre_go121.go
new file mode 100644
index 0000000000000000000000000000000000000000..28d2ed493ee5e4e234ba88aeeb6aef7699d5aefd
--- /dev/null
+++ b/vendor/github.com/nishanths/exhaustive/comment_pre_go121.go
@@ -0,0 +1,27 @@
+//go:build !go1.21
+
+package exhaustive
+
+import (
+	"go/ast"
+	"regexp"
+)
+
+// For definition of generated file see:
+// http://golang.org/s/generatedcode
+
+var generatedCodeRe = regexp.MustCompile(`^// Code generated .* DO NOT EDIT\.$`)
+
+func isGeneratedFile(file *ast.File) bool {
+	for _, c := range file.Comments {
+		for _, cc := range c.List {
+			if cc.Pos() > file.Package {
+				break
+			}
+			if generatedCodeRe.MatchString(cc.Text) {
+				return true
+			}
+		}
+	}
+	return false
+}
diff --git a/vendor/github.com/nishanths/exhaustive/doc.go b/vendor/github.com/nishanths/exhaustive/doc.go
index 8435e5d24552362a5b1e558619371699bb364ce4..a745247db356bf8cb07dda91935aedf7f1db361b 100644
--- a/vendor/github.com/nishanths/exhaustive/doc.go
+++ b/vendor/github.com/nishanths/exhaustive/doc.go
@@ -10,8 +10,8 @@ The Go [language spec] does not have an explicit definition for enums. For
 the purpose of this analyzer, and by convention, an enum type is any named
 type that:
 
-  - has underlying type float, string, or integer (includes byte and rune);
-    and
+  - has an [underlying type] of float, string, or integer (includes byte
+    and rune); and
   - has at least one constant of its type defined in the same [block].
 
 In the example below, Biome is an enum type. The three constants are its
@@ -209,6 +209,7 @@ To ignore specific types, specify the -ignore-enum-types flag:
 	exhaustive -ignore-enum-types '^time\.Duration$|^example\.org/measure\.Unit$'
 
 [language spec]: https://golang.org/ref/spec
+[underlying type]: https://golang.org/ref/spec#Underlying_types
 [block]: https://golang.org/ref/spec#Blocks
 [BasicKind]: https://pkg.go.dev/go/types#BasicKind
 */
diff --git a/vendor/github.com/nishanths/exhaustive/exhaustive.go b/vendor/github.com/nishanths/exhaustive/exhaustive.go
index d67a60c329b15a7de63eee92a108c2e7be8f55ce..013ac47bb762b5500ba958df1530f95a6554c0c6 100644
--- a/vendor/github.com/nishanths/exhaustive/exhaustive.go
+++ b/vendor/github.com/nishanths/exhaustive/exhaustive.go
@@ -19,6 +19,7 @@ func registerFlags() {
 	Analyzer.Flags.BoolVar(&fExplicitExhaustiveMap, ExplicitExhaustiveMapFlag, false, `check map literal only if associated with "//exhaustive:enforce" comment`)
 	Analyzer.Flags.BoolVar(&fCheckGenerated, CheckGeneratedFlag, false, "check generated files")
 	Analyzer.Flags.BoolVar(&fDefaultSignifiesExhaustive, DefaultSignifiesExhaustiveFlag, false, "switch statement is unconditionally exhaustive if it has a default case")
+	Analyzer.Flags.BoolVar(&fDefaultCaseRequired, DefaultCaseRequiredFlag, false, "switch statement requires default case even if exhaustive")
 	Analyzer.Flags.Var(&fIgnoreEnumMembers, IgnoreEnumMembersFlag, "ignore constants matching `regexp`")
 	Analyzer.Flags.Var(&fIgnoreEnumTypes, IgnoreEnumTypesFlag, "ignore types matching `regexp`")
 	Analyzer.Flags.BoolVar(&fPackageScopeOnly, PackageScopeOnlyFlag, false, "only discover enums declared in file-level blocks")
@@ -36,6 +37,7 @@ const (
 	ExplicitExhaustiveMapFlag      = "explicit-exhaustive-map"
 	CheckGeneratedFlag             = "check-generated"
 	DefaultSignifiesExhaustiveFlag = "default-signifies-exhaustive"
+	DefaultCaseRequiredFlag        = "default-case-required"
 	IgnoreEnumMembersFlag          = "ignore-enum-members"
 	IgnoreEnumTypesFlag            = "ignore-enum-types"
 	PackageScopeOnlyFlag           = "package-scope-only"
@@ -52,6 +54,7 @@ var (
 	fExplicitExhaustiveMap      bool
 	fCheckGenerated             bool
 	fDefaultSignifiesExhaustive bool
+	fDefaultCaseRequired        bool
 	fIgnoreEnumMembers          regexpFlag
 	fIgnoreEnumTypes            regexpFlag
 	fPackageScopeOnly           bool
@@ -65,6 +68,7 @@ func resetFlags() {
 	fExplicitExhaustiveMap = false
 	fCheckGenerated = false
 	fDefaultSignifiesExhaustive = false
+	fDefaultCaseRequired = false
 	fIgnoreEnumMembers = regexpFlag{}
 	fIgnoreEnumTypes = regexpFlag{}
 	fPackageScopeOnly = false
@@ -121,6 +125,7 @@ func run(pass *analysis.Pass) (interface{}, error) {
 			conf := switchConfig{
 				explicit:                   fExplicitExhaustiveSwitch,
 				defaultSignifiesExhaustive: fDefaultSignifiesExhaustive,
+				defaultCaseRequired:        fDefaultCaseRequired,
 				checkGenerated:             fCheckGenerated,
 				ignoreConstant:             fIgnoreEnumMembers.re,
 				ignoreType:                 fIgnoreEnumTypes.re,
diff --git a/vendor/github.com/nishanths/exhaustive/switch.go b/vendor/github.com/nishanths/exhaustive/switch.go
index 000ef9886e4de02addd14ca06010dc2a15e7c3aa..d235fbec47c30a31b6a6ca418a11d86bdbb34a3d 100644
--- a/vendor/github.com/nishanths/exhaustive/switch.go
+++ b/vendor/github.com/nishanths/exhaustive/switch.go
@@ -5,6 +5,7 @@ import (
 	"go/ast"
 	"go/types"
 	"regexp"
+	"strings"
 
 	"golang.org/x/tools/go/analysis"
 )
@@ -44,6 +45,7 @@ const (
 	resultNoEnforceComment     = "has no enforce comment"
 	resultEnumMembersAccounted = "required enum members accounted for"
 	resultDefaultCaseSuffices  = "default case satisfies exhaustiveness"
+	resultMissingDefaultCase   = "missing required default case"
 	resultReportedDiagnostic   = "reported diagnostic"
 	resultEnumTypes            = "invalid or empty composing enum types"
 )
@@ -52,11 +54,47 @@ const (
 type switchConfig struct {
 	explicit                   bool
 	defaultSignifiesExhaustive bool
+	defaultCaseRequired        bool
 	checkGenerated             bool
 	ignoreConstant             *regexp.Regexp // can be nil
 	ignoreType                 *regexp.Regexp // can be nil
 }
 
+// There are few possibilities, and often none, so we use a possibly-nil slice
+func userDirectives(comments []*ast.CommentGroup) []string {
+	var directives []string
+	for _, c := range comments {
+		for _, cc := range c.List {
+			// The order matters here: we always want to check the longest first.
+			for _, d := range []string{
+				enforceDefaultCaseRequiredComment,
+				ignoreDefaultCaseRequiredComment,
+				enforceComment,
+				ignoreComment,
+			} {
+				if strings.HasPrefix(cc.Text, d) {
+					directives = append(directives, d)
+					// The break here is important: once we associate a comment
+					// with a particular (longest-possible) directive, we don't want
+					// to map to another!
+					break
+				}
+			}
+		}
+	}
+	return directives
+}
+
+// Can be replaced with slices.Contains with go1.21
+func directivesIncludes(directives []string, d string) bool {
+	for _, ud := range directives {
+		if ud == d {
+			return true
+		}
+	}
+	return false
+}
+
 // switchChecker returns a node visitor that checks exhaustiveness of
 // enum switch statements for the supplied pass, and reports
 // diagnostics. The node visitor expects only *ast.SwitchStmt nodes.
@@ -80,17 +118,27 @@ func switchChecker(pass *analysis.Pass, cfg switchConfig, generated boolCache, c
 		sw := n.(*ast.SwitchStmt)
 
 		switchComments := comments.get(pass.Fset, file)[sw]
-		if !cfg.explicit && hasCommentPrefix(switchComments, ignoreComment) {
+		uDirectives := userDirectives(switchComments)
+		if !cfg.explicit && directivesIncludes(uDirectives, ignoreComment) {
 			// Skip checking of this switch statement due to ignore
 			// comment. Still return true because there may be nested
 			// switch statements that are not to be ignored.
 			return true, resultIgnoreComment
 		}
-		if cfg.explicit && !hasCommentPrefix(switchComments, enforceComment) {
+		if cfg.explicit && !directivesIncludes(uDirectives, enforceComment) {
 			// Skip checking of this switch statement due to missing
 			// enforce comment.
 			return true, resultNoEnforceComment
 		}
+		requireDefaultCase := cfg.defaultCaseRequired
+		if directivesIncludes(uDirectives, ignoreDefaultCaseRequiredComment) {
+			requireDefaultCase = false
+		}
+		if directivesIncludes(uDirectives, enforceDefaultCaseRequiredComment) {
+			// We have "if" instead of "else if" here in case of conflicting ignore/enforce directives.
+			// In that case, because this is second, we will default to enforcing.
+			requireDefaultCase = true
+		}
 
 		if sw.Tag == nil {
 			return true, resultNoSwitchTag // never possible for valid Go program?
@@ -114,13 +162,21 @@ func switchChecker(pass *analysis.Pass, cfg switchConfig, generated boolCache, c
 			checkl.add(e.typ, e.members, pass.Pkg == e.typ.Pkg())
 		}
 
-		def := analyzeSwitchClauses(sw, pass.TypesInfo, checkl.found)
+		defaultCaseExists := analyzeSwitchClauses(sw, pass.TypesInfo, checkl.found)
+		if !defaultCaseExists && requireDefaultCase {
+			// Even if the switch explicitly enumerates all the
+			// enum values, the user has still required all switches
+			// to have a default case. We check this first to avoid
+			// early-outs
+			pass.Report(makeMissingDefaultDiagnostic(sw, dedupEnumTypes(toEnumTypes(es))))
+			return true, resultMissingDefaultCase
+		}
 		if len(checkl.remaining()) == 0 {
 			// All enum members accounted for.
 			// Nothing to report.
 			return true, resultEnumMembersAccounted
 		}
-		if def && cfg.defaultSignifiesExhaustive {
+		if defaultCaseExists && cfg.defaultSignifiesExhaustive {
 			// Though enum members are not accounted for, the
 			// existence of the default case signifies
 			// exhaustiveness.  So don't report.
@@ -167,3 +223,14 @@ func makeSwitchDiagnostic(sw *ast.SwitchStmt, enumTypes []enumType, missing map[
 		),
 	}
 }
+
+func makeMissingDefaultDiagnostic(sw *ast.SwitchStmt, enumTypes []enumType) analysis.Diagnostic {
+	return analysis.Diagnostic{
+		Pos: sw.Pos(),
+		End: sw.End(),
+		Message: fmt.Sprintf(
+			"missing default case in switch of type %s",
+			diagnosticEnumTypes(enumTypes),
+		),
+	}
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/Makefile b/vendor/github.com/nunnatsa/ginkgolinter/Makefile
index e8efae583efae95b07e65219e6c24e015d245e18..586633006a22310313d34e83deda9895e6fbb8b6 100644
--- a/vendor/github.com/nunnatsa/ginkgolinter/Makefile
+++ b/vendor/github.com/nunnatsa/ginkgolinter/Makefile
@@ -5,9 +5,12 @@ HASH_FLAG := -X github.com/nunnatsa/ginkgolinter/version.gitHash=$(COMMIT_HASH)
 
 BUILD_ARGS := -ldflags "$(VERSION_FLAG) $(HASH_FLAG)"
 
-build:
+build: unit-test
 	go build $(BUILD_ARGS) -o ginkgolinter ./cmd/ginkgolinter
 
+unit-test:
+	go test ./...
+
 build-for-windows:
 	GOOS=windows GOARCH=amd64 go build $(BUILD_ARGS) -o bin/ginkgolinter-amd64.exe ./cmd/ginkgolinter
 
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/README.md b/vendor/github.com/nunnatsa/ginkgolinter/README.md
index 4193be63dcb108e65787f3509490641498d263bf..977cec903e051ca7c513c04f09e7543cc47e5a38 100644
--- a/vendor/github.com/nunnatsa/ginkgolinter/README.md
+++ b/vendor/github.com/nunnatsa/ginkgolinter/README.md
@@ -44,7 +44,7 @@ It is not enabled by default, though. There are two ways to run ginkgolinter wit
      enable:
        - ginkgolinter
    ```
-## Linter Checks
+## Linter Rules
 The linter checks the ginkgo and gomega assertions in golang test code. Gomega may be used together with ginkgo tests, 
 For example:
 ```go
@@ -177,14 +177,107 @@ var _ = Describe("checking something", Focus, func() {
 })
 ```
 
-These container, or the `Focus` spec, must not be part of the final source code, and should only be used locally by the developer.
+These container, or the `Focus` spec, must not be part of the final source code, and should only be used locally by the 
+developer.
 
 ***This rule is disabled by default***. Use the `--forbid-focus-container=true` command line flag to enable it.  
 
+### Comparing values from different types [BUG]
 
+The `Equal` and the `BeIdentical` matchers also check the type, not only the value.
+    
+The following code will fail in runtime:    
+```go
+x := 5 // x is int
+Expect(x).Should(Eqaul(uint(5)) // x and uint(5) are with different
+```
+When using negative checks, it's even worse, because we get a false positive:
+```
+x := 5
+Expect(x).ShouldNot(Equal(uint(5))
+```
+
+The linter suggests two options to solve this warning: either compare with the same type, e.g. 
+using casting, or use the `BeEquivalentTo` matcher.
+
+The linter can't guess what is the best solution in each case, and so it won't auto-fix this warning.
+
+To suppress this warning entirely, use the `--suppress-type-compare-assertion=true` command line parameter. 
+
+To suppress a specific file or line, use the `// ginkgo-linter:ignore-type-compare-warning` comment (see [below](#suppress-warning-from-the-code))
+
+### Wrong Usage of the `MatchError` gomega Matcher [BUG]
+The `MatchError` gomega matcher asserts an error value (and if it's not nil).
+There are four valid formats for using this Matcher:
+* error value; e.g. `Expect(err).To(MatchError(anotherErr))`
+* string, to be equal to the output of the `Error()` method; e.g. `Expect(err).To(MatchError("Not Found"))`
+* A gomega matcher that asserts strings; e.g. `Expect(err).To(MatchError(ContainSubstring("Found")))`
+* [from v0.29.0] a function that receive a single error parameter and returns a single boolean value. 
+  In this format, an additional single string parameter, with the function description, is also required; e.g.
+  `Expect(err).To(MatchError(isNotFound, "is the error is a not found error"))`
+
+These four format are checked on runtime, but sometimes it's too late. ginkgolinter performs a static analysis and so it
+will find these issues on build time.
+
+ginkgolinter checks the following:
+* Is the first parameter is one of the four options above.
+* That there are no additional parameters passed to the matcher; e.g.
+  `MatchError(isNotFoundFunc, "a valid description" , "not used string")`. In this case, the matcher won't fail on run 
+  time, but the additional parameters are not in use and ignored.   
+* If the first parameter is a function with the format of `func(error)bool`, ginkgolinter makes sure that the second 
+  parameter exists and its type is string.
+
+### Async timing interval: timeout is shorter than polling interval [BUG]
+***Note***: Only applied when the `suppress-async-assertion` flag is **not set** *and* the `validate-async-intervals` 
+flag **is** set.
+
+***Note***: This rule work with best-effort approach. It can't find many cases, like const defined not in the same
+package, or when using variables.
+
+The timeout and polling intervals may be passed as optional arguments to the `Eventually` or `Constanly` functions, or
+using the `WithTimeout` or , `Within` methods (timeout), and `WithPolling` or `ProbeEvery` methods (polling).
+
+This rule checks if the async (`Eventually` or `Consistently`) timeout duration, is not shorter than the polling interval.
+
+For example:
+   ```go
+   Eventually(aFunc).WithTimeout(500 * time.Millisecond).WithPolling(10 * time.Second).Should(Succeed())
+   ```
+
+This will probably happen when using the old format:
+   ```go
+   Eventually(aFunc, 500 * time.Millisecond /*timeout*/, 10 * time.Second /*polling*/).Should(Succeed())
+   ```
+
+### Avoid Spec Pollution: Don't Initialize Variables in Container Nodes [BUG/STYLE]:
+***Note***: Only applied when the `--forbid-spec-pollution=true` flag is set (disabled by default).
+
+According to [ginkgo documentation](https://onsi.github.io/ginkgo/#avoid-spec-pollution-dont-initialize-variables-in-container-nodes), 
+no variable should be assigned within a container node (`Describe`, `Context`, `When` or their `F`, `P` or `X` forms)
+  
+For example:
+```go
+var _ = Describe("description", func(){
+    var x = 10
+    ...
+})
+```
+
+Instead, use `BeforeEach()`; e.g.
+```go
+var _ = Describe("description", func (){
+    var x int
+	
+    BeforeEach(func (){
+        x = 10
+    })
+    ...
+})
+```
 
 ### Wrong Length Assertion [STYLE]
-The linter finds assertion of the golang built-in `len` function, with all kind of matchers, while there are already gomega matchers for these usecases; We want to assert the item, rather than its length.
+The linter finds assertion of the golang built-in `len` function, with all kind of matchers, while there are already 
+gomega matchers for these usecases; We want to assert the item, rather than its length.
 
 There are several wrong patterns:
 ```go
@@ -214,11 +307,26 @@ The output of the linter,when finding issues, looks like this:
 ./testdata/src/a/a.go:18:5: ginkgo-linter: wrong length assertion; consider using `Expect("").Should(BeEmpty())` instead
 ./testdata/src/a/a.go:22:5: ginkgo-linter: wrong length assertion; consider using `Expect("").Should(BeEmpty())` instead
 ```
+
+### Wrong Cap Assertion [STYLE]
+The linter finds assertion of the golang built-in `cap` function, with all kind of matchers, while there are already
+gomega matchers for these usecases; We want to assert the item, rather than its cap.
+
+There are several wrong patterns:
+```go
+Expect(cap(x)).To(Equal(0)) // should be: Expect(x).To(HaveCap(0))
+Expect(cap(x)).To(BeZero()) // should be: Expect(x).To(HaveCap(0))
+Expect(cap(x)).To(BeNumeric(">", 0)) // should be: Expect(x).ToNot(HaveCap(0))
+Expect(cap(x)).To(BeNumeric("==", 2)) // should be: Expect(x).To(HaveCap(2))
+Expect(cap(x)).To(BeNumeric("!=", 3)) // should be: Expect(x).ToNot(HaveCap(3))
+```
+
 #### use the `HaveLen(0)` matcher.  [STYLE]
 The linter will also warn about the `HaveLen(0)` matcher, and will suggest to replace it with `BeEmpty()`
 
 ### Wrong `nil` Assertion [STYLE]
-The linter finds assertion of the comparison to nil, with all kind of matchers, instead of using the existing `BeNil()` matcher; We want to assert the item, rather than a comparison result.
+The linter finds assertion of the comparison to nil, with all kind of matchers, instead of using the existing `BeNil()` 
+matcher; We want to assert the item, rather than a comparison result.
 
 There are several wrong patterns:
 
@@ -310,18 +418,78 @@ Expect(x1 == c1).Should(BeTrue()) // ==> Expect(x1).Should(Equal(c1))
 Expect(c1 == x1).Should(BeTrue()) // ==> Expect(x1).Should(Equal(c1))
 ```
 
+### Don't Allow Using `Expect` with `Should` or `ShouldNot` [STYLE]
+This optional rule forces the usage of the `Expect` method only with the `To`, `ToNot` or `NotTo` 
+assertion methods; e.g.
+```go
+Expect("abc").Should(HaveLen(3)) // => Expect("abc").To(HaveLen(3))
+Expect("abc").ShouldNot(BeEmpty()) // => Expect("abc").ToNot(BeEmpty())
+```
+This rule support auto fixing.
+
+***This rule is disabled by default***. Use the `--force-expect-to=true` command line flag to enable it.
+
+### Async timing interval: multiple timeout or polling intervals [STYLE]
+***Note***: Only applied when the `suppress-async-assertion` flag is **not set** *and* the `validate-async-intervals`
+flag **is** set.
+
+The timeout and polling intervals may be passed as optional arguments to the `Eventually` or `Constanly` functions, or
+using the `WithTimeout` or , `Within` methods (timeout), and `WithPolling` or `ProbeEvery` methods (polling).
+
+The linter checks that there is up to one polling argument and up to one timeout argument. 
+
+For example:
+
+```go
+// both WithTimeout() and Within()
+Eventually(aFunc).WithTimeout(time.Second * 10).Within(time.Second * 10).WithPolling(time.Millisecond * 500).Should(BeTrue())
+// both polling argument, and WithPolling() method
+Eventually(aFunc, time.Second*10, time.Millisecond * 500).WithPolling(time.Millisecond * 500).Should(BeTrue())
+```
+
+### Async timing interval: non-time.Duration intervals [STYLE]
+***Note***: Only applied when the `suppress-async-assertion` flag is **not set** *and* the `validate-async-intervals`
+flag **is** set.
+
+gomega supports a few formats for timeout and polling intervals, when using the old format (the last two parameters of Eventually and Constantly):
+* a `time.Duration` value
+* any kind of numeric value (int(8/16/32/64), uint(8/16/32/64) or float(32/64), as the number of seconds.
+* duration string like `"12s"`
+
+The linter triggers a warning for any duration value that is not of the `time.Duration` type, assuming that this is
+the desired type, given the type of the argument of the newer "WithTimeout", "WithPolling", "Within" and "ProbeEvery"
+methods.
+
+For example:
+   ```go
+   Eventually(func() bool { return true }, "1s").Should(BeTrue())
+   Eventually(context.Background(), func() bool { return true }, time.Second*60, float64(2)).Should(BeTrue())
+   ```
+
+This rule offers a limited auto fix: for integer values, or integer consts, the linter will suggest multiply the
+value with `time.Second`; e.g.
+```go
+const polling = 1
+Eventually(aFunc, 5, polling)
+```
+will be changed to:
+```go
+Eventually(aFunc, time.Second*5, time.Second*polling)
+```
 ## Suppress the linter
 ### Suppress warning from command line
-* Use the `--suppress-len-assertion=true` flag to suppress the wrong length assertion warning
+* Use the `--suppress-len-assertion=true` flag to suppress the wrong length and cap assertions warning
 * Use the `--suppress-nil-assertion=true` flag to suppress the wrong nil assertion warning
 * Use the `--suppress-err-assertion=true` flag to suppress the wrong error assertion warning
 * Use the `--suppress-compare-assertion=true` flag to suppress the wrong comparison assertion warning
 * Use the `--suppress-async-assertion=true` flag to suppress the function call in async assertion warning
+* Use the `--forbid-focus-container=true` flag to activate the focused container assertion (deactivated by default)
+* Use the `--suppress-type-compare-assertion=true` to suppress the type compare assertion warning
 * Use the `--allow-havelen-0=true` flag to avoid warnings about `HaveLen(0)`; Note: this parameter is only supported from
   command line, and not from a comment.
 
 ### Suppress warning from the code
-To suppress the wrong length assertion warning, add a comment with (only)
+To suppress the wrong length and cap assertions warning, add a comment with (only)
 
 `ginkgo-linter:ignore-len-assert-warning`. 
 
@@ -345,6 +513,10 @@ To supress the focus container warning, add a comment with (only)
 
 `ginkgo-linter:ignore-focus-container-warning`
 
+To suppress the different type comparison, add a comment with (only)
+
+`ginkgo-linter:ignore-type-compare-warning`
+
 Notice that this comment will not work for an anonymous variable container like
 ```go
 // ginkgo-linter:ignore-focus-container-warning (not working!!)
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/analyzer.go b/vendor/github.com/nunnatsa/ginkgolinter/analyzer.go
new file mode 100644
index 0000000000000000000000000000000000000000..edff57acd18921e414cbabccf4098d8af1edcabb
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/analyzer.go
@@ -0,0 +1,58 @@
+package ginkgolinter
+
+import (
+	"flag"
+	"fmt"
+
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/nunnatsa/ginkgolinter/linter"
+	"github.com/nunnatsa/ginkgolinter/types"
+	"github.com/nunnatsa/ginkgolinter/version"
+)
+
+// NewAnalyzerWithConfig returns an Analyzer.
+func NewAnalyzerWithConfig(config *types.Config) *analysis.Analyzer {
+	theLinter := linter.NewGinkgoLinter(config)
+
+	return &analysis.Analyzer{
+		Name: "ginkgolinter",
+		Doc:  fmt.Sprintf(doc, version.Version()),
+		Run:  theLinter.Run,
+	}
+}
+
+// NewAnalyzer returns an Analyzer - the package interface with nogo
+func NewAnalyzer() *analysis.Analyzer {
+	config := &types.Config{
+		SuppressLen:     false,
+		SuppressNil:     false,
+		SuppressErr:     false,
+		SuppressCompare: false,
+		ForbidFocus:     false,
+		AllowHaveLen0:   false,
+		ForceExpectTo:   false,
+	}
+
+	a := NewAnalyzerWithConfig(config)
+
+	var ignored bool
+	a.Flags.Init("ginkgolinter", flag.ExitOnError)
+	a.Flags.Var(&config.SuppressLen, "suppress-len-assertion", "Suppress warning for wrong length assertions")
+	a.Flags.Var(&config.SuppressNil, "suppress-nil-assertion", "Suppress warning for wrong nil assertions")
+	a.Flags.Var(&config.SuppressErr, "suppress-err-assertion", "Suppress warning for wrong error assertions")
+	a.Flags.Var(&config.SuppressCompare, "suppress-compare-assertion", "Suppress warning for wrong comparison assertions")
+	a.Flags.Var(&config.SuppressAsync, "suppress-async-assertion", "Suppress warning for function call in async assertion, like Eventually")
+	a.Flags.Var(&config.ValidateAsyncIntervals, "validate-async-intervals", "best effort validation of async intervals (timeout and polling); ignored the suppress-async-assertion flag is true")
+	a.Flags.Var(&config.SuppressTypeCompare, "suppress-type-compare-assertion", "Suppress warning for comparing values from different types, like int32 and uint32")
+	a.Flags.Var(&config.AllowHaveLen0, "allow-havelen-0", "Do not warn for HaveLen(0); default = false")
+	a.Flags.Var(&config.ForceExpectTo, "force-expect-to", "force using `Expect` with `To`, `ToNot` or `NotTo`. reject using `Expect` with `Should` or `ShouldNot`; default = false (not forced)")
+	a.Flags.BoolVar(&ignored, "suppress-focus-container", true, "Suppress warning for ginkgo focus containers like FDescribe, FContext, FWhen or FIt. Deprecated and ignored: use --forbid-focus-container instead")
+	a.Flags.Var(&config.ForbidFocus, "forbid-focus-container", "trigger a warning for ginkgo focus containers like FDescribe, FContext, FWhen or FIt; default = false.")
+	a.Flags.Var(&config.ForbidSpecPollution, "forbid-spec-pollution", "trigger a warning for variable assignments in ginkgo containers like Describe, Context and When, instead of in BeforeEach(); default = false.")
+
+	return a
+}
+
+// Analyzer is the interface to go_vet
+var Analyzer = NewAnalyzer()
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/doc.go b/vendor/github.com/nunnatsa/ginkgolinter/doc.go
new file mode 100644
index 0000000000000000000000000000000000000000..dd9ecf58a810e8a4e96f76aa0d6ef08ee8c72301
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/doc.go
@@ -0,0 +1,99 @@
+package ginkgolinter
+
+const doc = `enforces standards of using ginkgo and gomega
+
+or
+       ginkgolinter version
+
+version: %s
+
+currently, the linter searches for following:
+* trigger a warning when using Eventually or Consistently with a function call. This is in order to prevent the case when 
+  using a function call instead of a function. Function call returns a value only once, and so the original value
+  is tested again and again and is never changed. [Bug]
+
+* trigger a warning when comparing a pointer to a value. [Bug]
+
+* trigger a warning for missing assertion method: [Bug]
+	Eventually(checkSomething)
+
+* trigger a warning when a ginkgo focus container (FDescribe, FContext, FWhen or FIt) is found. [Bug]
+
+* validate the MatchError gomega matcher [Bug]
+
+* trigger a warning when using the Equal or the BeIdentical matcher with two different types, as these matchers will
+  fail in runtime.
+
+* async timing interval: timeout is shorter than polling interval [Bug]
+For example:
+	Eventually(aFunc).WithTimeout(500 * time.Millisecond).WithPolling(10 * time.Second).Should(Succeed())
+This will probably happen when using the old format:
+	Eventually(aFunc, 500 * time.Millisecond, 10 * time.Second).Should(Succeed())
+
+* reject variable assignments in ginkgo containers [Bug/Style]:
+For example:
+	var _ = Describe("description", func(){
+		var x = 10
+	})
+
+Should use BeforeEach instead; e.g.
+	var _ = Describe("description", func(){
+		var x int
+		BeforeEach(func(){
+			x = 10
+		})
+	})
+
+* wrong length assertions. We want to assert the item rather than its length. [Style]
+For example:
+	Expect(len(x)).Should(Equal(1))
+This should be replaced with:
+	Expect(x)).Should(HavelLen(1))
+	
+* wrong cap assertions. We want to assert the item rather than its cap. [Style]
+For example:
+	Expect(cap(x)).Should(Equal(1))
+This should be replaced with:
+	Expect(x)).Should(HavelCap(1))
+	
+* wrong nil assertions. We want to assert the item rather than a comparison result. [Style]
+For example:
+	Expect(x == nil).Should(BeTrue())
+This should be replaced with:
+	Expect(x).Should(BeNil())
+
+* wrong error assertions. For example: [Style]
+	Expect(err == nil).Should(BeTrue())
+This should be replaced with:
+	Expect(err).ShouldNot(HaveOccurred())
+
+* wrong boolean comparison, for example: [Style]
+	Expect(x == 8).Should(BeTrue())
+This should be replaced with:
+	Expect(x).Should(BeEqual(8))
+
+* replaces Equal(true/false) with BeTrue()/BeFalse() [Style]
+
+* replaces HaveLen(0) with BeEmpty() [Style]
+
+* replaces Expect(...).Should(...) with Expect(...).To() [Style]
+
+* async timing interval: multiple timeout or polling interval [Style]
+For example:
+	Eventually(context.Background(), func() bool { return true }, time.Second*10).WithTimeout(time.Second * 10).WithPolling(time.Millisecond * 500).Should(BeTrue())
+	Eventually(context.Background(), func() bool { return true }, time.Second*10).Within(time.Second * 10).WithPolling(time.Millisecond * 500).Should(BeTrue())
+	Eventually(func() bool { return true }, time.Second*10, 500*time.Millisecond).WithPolling(time.Millisecond * 500).Should(BeTrue())
+	Eventually(func() bool { return true }, time.Second*10, 500*time.Millisecond).ProbeEvery(time.Millisecond * 500).Should(BeTrue())
+
+* async timing interval: non-time.Duration intervals [Style]
+gomega supports a few formats for timeout and polling intervals, when using the old format (the last two parameters of Eventually and Constantly):
+  * time.Duration
+  * any kind of numeric value, as number of seconds
+  * duration string like "12s"
+The linter triggers a warning for any duration value that is not of the time.Duration type, assuming that this is
+the desired type, given the type of the argument of the newer "WithTimeout", "WithPolling", "Within" and "ProbeEvery" 
+methods. 
+For example:
+	Eventually(context.Background(), func() bool { return true }, "1s").Should(BeTrue())
+	Eventually(context.Background(), func() bool { return true }, time.Second*60, 15).Should(BeTrue())
+`
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/ginkgohandler/handler.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/handler.go
similarity index 72%
rename from vendor/github.com/nunnatsa/ginkgolinter/ginkgohandler/handler.go
rename to vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/handler.go
index c0829c46955fc970283e780e0ea345c868fe8f78..f10d8318402e2c18f39e400258b09f1377137e01 100644
--- a/vendor/github.com/nunnatsa/ginkgolinter/ginkgohandler/handler.go
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/handler.go
@@ -15,6 +15,7 @@ const (
 // in imported with "." name, custom name or without any name.
 type Handler interface {
 	GetFocusContainerName(*ast.CallExpr) (bool, *ast.Ident)
+	IsWrapContainer(*ast.CallExpr) bool
 	IsFocusSpec(ident ast.Expr) bool
 }
 
@@ -49,6 +50,13 @@ func (h dotHandler) GetFocusContainerName(exp *ast.CallExpr) (bool, *ast.Ident)
 	return false, nil
 }
 
+func (h dotHandler) IsWrapContainer(exp *ast.CallExpr) bool {
+	if fun, ok := exp.Fun.(*ast.Ident); ok {
+		return IsWrapContainer(fun.Name)
+	}
+	return false
+}
+
 func (h dotHandler) IsFocusSpec(exp ast.Expr) bool {
 	id, ok := exp.(*ast.Ident)
 	return ok && id.Name == focusSpec
@@ -70,6 +78,16 @@ func (h nameHandler) GetFocusContainerName(exp *ast.CallExpr) (bool, *ast.Ident)
 	return false, nil
 }
 
+func (h nameHandler) IsWrapContainer(exp *ast.CallExpr) bool {
+	if sel, ok := exp.Fun.(*ast.SelectorExpr); ok {
+		if id, ok := sel.X.(*ast.Ident); ok && id.Name == string(h) {
+			return IsWrapContainer(sel.Sel.Name)
+		}
+	}
+	return false
+
+}
+
 func (h nameHandler) IsFocusSpec(exp ast.Expr) bool {
 	if selExp, ok := exp.(*ast.SelectorExpr); ok {
 		if x, ok := selExp.X.(*ast.Ident); ok && x.Name == string(h) {
@@ -88,10 +106,24 @@ func isFocusContainer(name string) bool {
 	return false
 }
 
-func IsContainer(id *ast.Ident) bool {
-	switch id.Name {
-	case "It", "When", "Context", "Describe", "DescribeTable", "Entry":
+func IsContainer(name string) bool {
+	switch name {
+	case "It", "When", "Context", "Describe", "DescribeTable", "Entry",
+		"PIt", "PWhen", "PContext", "PDescribe", "PDescribeTable", "PEntry",
+		"XIt", "XWhen", "XContext", "XDescribe", "XDescribeTable", "XEntry":
+		return true
+	}
+	return isFocusContainer(name)
+}
+
+func IsWrapContainer(name string) bool {
+	switch name {
+	case "When", "Context", "Describe",
+		"FWhen", "FContext", "FDescribe",
+		"PWhen", "PContext", "PDescribe",
+		"XWhen", "XContext", "XDescribe":
 		return true
 	}
-	return isFocusContainer(id.Name)
+
+	return false
 }
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/gomegahandler/handler.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/handler.go
similarity index 96%
rename from vendor/github.com/nunnatsa/ginkgolinter/gomegahandler/handler.go
rename to vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/handler.go
index 0c34cb7c1c2b698639f2eb68473c0a5de4a0b7da..4290e737368d23dea0084f61b917d76fd4c46e8d 100644
--- a/vendor/github.com/nunnatsa/ginkgolinter/gomegahandler/handler.go
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/handler.go
@@ -5,6 +5,10 @@ import (
 	"go/token"
 )
 
+const (
+	importPath = `"github.com/onsi/gomega"`
+)
+
 // Handler provide different handling, depend on the way gomega was imported, whether
 // in imported with "." name, custom name or without any name.
 type Handler interface {
@@ -23,7 +27,7 @@ type Handler interface {
 // GetGomegaHandler returns a gomegar handler according to the way gomega was imported in the specific file
 func GetGomegaHandler(file *ast.File) Handler {
 	for _, imp := range file.Imports {
-		if imp.Path.Value != `"github.com/onsi/gomega"` {
+		if imp.Path.Value != importPath {
 			continue
 		}
 
@@ -66,7 +70,12 @@ func (h dotHandler) GetActualFuncName(expr *ast.CallExpr) (string, bool) {
 
 // ReplaceFunction replaces the function with another one, for fix suggestions
 func (dotHandler) ReplaceFunction(caller *ast.CallExpr, newExpr *ast.Ident) {
-	caller.Fun = newExpr
+	switch f := caller.Fun.(type) {
+	case *ast.Ident:
+		caller.Fun = newExpr
+	case *ast.SelectorExpr:
+		f.Sel = newExpr
+	}
 }
 
 func (dotHandler) getDefFuncName(expr *ast.CallExpr) string {
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/interfaces/interfaces.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/interfaces/interfaces.go
new file mode 100644
index 0000000000000000000000000000000000000000..dafeacd4ff4401daa6a15c140910125d330a4ecd
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/interfaces/interfaces.go
@@ -0,0 +1,76 @@
+package interfaces
+
+import (
+	"go/token"
+	gotypes "go/types"
+)
+
+var (
+	errorType         *gotypes.Interface
+	gomegaMatcherType *gotypes.Interface
+)
+
+func init() {
+	errorType = gotypes.Universe.Lookup("error").Type().Underlying().(*gotypes.Interface)
+	gomegaMatcherType = generateTheGomegaMatcherInfType()
+}
+
+// generateTheGomegaMatcherInfType generates a types.Interface instance that represents the
+// GomegaMatcher interface.
+// The original code is (copied from https://github.com/nunnatsa/ginkgolinter/blob/8fdd05eee922578d4699f49d267001c01e0b9f1e/testdata/src/a/vendor/github.com/onsi/gomega/types/types.go)
+//
+//	type GomegaMatcher interface {
+//		Match(actual interface{}) (success bool, err error)
+//		FailureMessage(actual interface{}) (message string)
+//		NegatedFailureMessage(actual interface{}) (message string)
+//	}
+func generateTheGomegaMatcherInfType() *gotypes.Interface {
+	err := gotypes.Universe.Lookup("error").Type()
+	bl := gotypes.Typ[gotypes.Bool]
+	str := gotypes.Typ[gotypes.String]
+	anyType := gotypes.Universe.Lookup("any").Type()
+
+	return gotypes.NewInterfaceType([]*gotypes.Func{
+		// Match(actual interface{}) (success bool, err error)
+		gotypes.NewFunc(token.NoPos, nil, "Match", gotypes.NewSignatureType(
+			nil, nil, nil,
+			gotypes.NewTuple(
+				gotypes.NewVar(token.NoPos, nil, "actual", anyType),
+			),
+			gotypes.NewTuple(
+				gotypes.NewVar(token.NoPos, nil, "", bl),
+				gotypes.NewVar(token.NoPos, nil, "", err),
+			), false),
+		),
+		// FailureMessage(actual interface{}) (message string)
+		gotypes.NewFunc(token.NoPos, nil, "FailureMessage", gotypes.NewSignatureType(
+			nil, nil, nil,
+			gotypes.NewTuple(
+				gotypes.NewVar(token.NoPos, nil, "", anyType),
+			),
+			gotypes.NewTuple(
+				gotypes.NewVar(token.NoPos, nil, "", str),
+			),
+			false),
+		),
+		//NegatedFailureMessage(actual interface{}) (message string)
+		gotypes.NewFunc(token.NoPos, nil, "NegatedFailureMessage", gotypes.NewSignatureType(
+			nil, nil, nil,
+			gotypes.NewTuple(
+				gotypes.NewVar(token.NoPos, nil, "", anyType),
+			),
+			gotypes.NewTuple(
+				gotypes.NewVar(token.NoPos, nil, "", str),
+			),
+			false),
+		),
+	}, nil)
+}
+
+func ImplementsError(t gotypes.Type) bool {
+	return gotypes.Implements(t, errorType)
+}
+
+func ImplementsGomegaMatcher(t gotypes.Type) bool {
+	return gotypes.Implements(t, gomegaMatcherType)
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/intervals/intervals.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/intervals/intervals.go
new file mode 100644
index 0000000000000000000000000000000000000000..b8166bdb21594c235bca36ad19197ab52c9653f6
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/intervals/intervals.go
@@ -0,0 +1,285 @@
+package intervals
+
+import (
+	"errors"
+	"go/ast"
+	"go/constant"
+	"go/token"
+	gotypes "go/types"
+	"strconv"
+	"time"
+
+	"golang.org/x/tools/go/analysis"
+
+	"github.com/nunnatsa/ginkgolinter/internal/gomegahandler"
+	"github.com/nunnatsa/ginkgolinter/internal/reports"
+)
+
+type noDurationIntervalErr struct {
+	value string
+}
+
+func (err noDurationIntervalErr) Error() string {
+	return "only use time.Duration for timeout and polling in Eventually() or Consistently()"
+}
+
+func CheckIntervals(pass *analysis.Pass, expr *ast.CallExpr, actualExpr *ast.CallExpr, reportBuilder *reports.Builder, handler gomegahandler.Handler, timePkg string, funcIndex int) {
+	var (
+		timeout time.Duration
+		polling time.Duration
+		err     error
+	)
+
+	timeoutOffset := funcIndex + 1
+	if len(actualExpr.Args) > timeoutOffset {
+		timeout, err = getDuration(pass, actualExpr.Args[timeoutOffset], timePkg)
+		if err != nil {
+			suggestFix := false
+			if tryFixIntDuration(expr, err, handler, timePkg, timeoutOffset) {
+				suggestFix = true
+			}
+			reportBuilder.AddIssue(suggestFix, err.Error())
+		}
+		pollingOffset := funcIndex + 2
+		if len(actualExpr.Args) > pollingOffset {
+			polling, err = getDuration(pass, actualExpr.Args[pollingOffset], timePkg)
+			if err != nil {
+				suggestFix := false
+				if tryFixIntDuration(expr, err, handler, timePkg, pollingOffset) {
+					suggestFix = true
+				}
+				reportBuilder.AddIssue(suggestFix, err.Error())
+			}
+		}
+	}
+
+	selExp := expr.Fun.(*ast.SelectorExpr)
+	for {
+		call, ok := selExp.X.(*ast.CallExpr)
+		if !ok {
+			break
+		}
+
+		fun, ok := call.Fun.(*ast.SelectorExpr)
+		if !ok {
+			break
+		}
+
+		switch fun.Sel.Name {
+		case "WithTimeout", "Within":
+			if timeout != 0 {
+				reportBuilder.AddIssue(false, "timeout defined more than once")
+			} else if len(call.Args) == 1 {
+				timeout, err = getDurationFromValue(pass, call.Args[0], timePkg)
+				if err != nil {
+					reportBuilder.AddIssue(false, err.Error())
+				}
+			}
+
+		case "WithPolling", "ProbeEvery":
+			if polling != 0 {
+				reportBuilder.AddIssue(false, "polling defined more than once")
+			} else if len(call.Args) == 1 {
+				polling, err = getDurationFromValue(pass, call.Args[0], timePkg)
+				if err != nil {
+					reportBuilder.AddIssue(false, err.Error())
+				}
+			}
+		}
+
+		selExp = fun
+	}
+
+	if timeout != 0 && polling != 0 && timeout < polling {
+		reportBuilder.AddIssue(false, "timeout must not be shorter than the polling interval")
+	}
+}
+
+func tryFixIntDuration(expr *ast.CallExpr, err error, handler gomegahandler.Handler, timePkg string, offset int) bool {
+	suggestFix := false
+	var durErr noDurationIntervalErr
+	if errors.As(err, &durErr) {
+		if len(durErr.value) > 0 {
+			actualExpr := handler.GetActualExpr(expr.Fun.(*ast.SelectorExpr))
+			var newArg ast.Expr
+			second := &ast.SelectorExpr{
+				Sel: ast.NewIdent("Second"),
+				X:   ast.NewIdent(timePkg),
+			}
+			if durErr.value == "1" {
+				newArg = second
+			} else {
+				newArg = &ast.BinaryExpr{
+					X:  second,
+					Op: token.MUL,
+					Y:  actualExpr.Args[offset],
+				}
+			}
+			actualExpr.Args[offset] = newArg
+			suggestFix = true
+		}
+	}
+
+	return suggestFix
+}
+
+func getDuration(pass *analysis.Pass, interval ast.Expr, timePkg string) (time.Duration, error) {
+	argType := pass.TypesInfo.TypeOf(interval)
+	if durType, ok := argType.(*gotypes.Named); ok {
+		if durType.Obj().Name() == "Duration" && durType.Obj().Pkg().Name() == "time" {
+			return getDurationFromValue(pass, interval, timePkg)
+		}
+	}
+
+	value := ""
+	switch val := interval.(type) {
+	case *ast.BasicLit:
+		if val.Kind == token.INT {
+			value = val.Value
+		}
+	case *ast.Ident:
+		i, err := getConstDuration(pass, val, timePkg)
+		if err != nil || i == 0 {
+			return 0, nil
+		}
+		value = val.Name
+	}
+
+	return 0, noDurationIntervalErr{value: value}
+}
+
+func getDurationFromValue(pass *analysis.Pass, interval ast.Expr, timePkg string) (time.Duration, error) {
+	switch dur := interval.(type) {
+	case *ast.SelectorExpr:
+		ident, ok := dur.X.(*ast.Ident)
+		if ok {
+			if ident.Name == timePkg {
+				return getTimeDurationValue(dur)
+			}
+			return getDurationFromValue(pass, dur.Sel, timePkg)
+		}
+	case *ast.BinaryExpr:
+		return getBinaryExprDuration(pass, dur, timePkg)
+
+	case *ast.Ident:
+		return getConstDuration(pass, dur, timePkg)
+	}
+
+	return 0, nil
+}
+
+func getConstDuration(pass *analysis.Pass, ident *ast.Ident, timePkg string) (time.Duration, error) {
+	o := pass.TypesInfo.ObjectOf(ident)
+	if o != nil {
+		if c, ok := o.(*gotypes.Const); ok {
+			if c.Val().Kind() == constant.Int {
+				i, err := strconv.Atoi(c.Val().String())
+				if err != nil {
+					return 0, nil
+				}
+				return time.Duration(i), nil
+			}
+		}
+	}
+
+	if ident.Obj != nil && ident.Obj.Kind == ast.Con && ident.Obj.Decl != nil {
+		if vals, ok := ident.Obj.Decl.(*ast.ValueSpec); ok {
+			if len(vals.Values) == 1 {
+				switch val := vals.Values[0].(type) {
+				case *ast.BasicLit:
+					if val.Kind == token.INT {
+						i, err := strconv.Atoi(val.Value)
+						if err != nil {
+							return 0, nil
+						}
+						return time.Duration(i), nil
+					}
+					return 0, nil
+				case *ast.BinaryExpr:
+					return getBinaryExprDuration(pass, val, timePkg)
+				}
+			}
+		}
+	}
+
+	return 0, nil
+}
+
+func getTimeDurationValue(dur *ast.SelectorExpr) (time.Duration, error) {
+	switch dur.Sel.Name {
+	case "Nanosecond":
+		return time.Nanosecond, nil
+	case "Microsecond":
+		return time.Microsecond, nil
+	case "Millisecond":
+		return time.Millisecond, nil
+	case "Second":
+		return time.Second, nil
+	case "Minute":
+		return time.Minute, nil
+	case "Hour":
+		return time.Hour, nil
+	default:
+		return 0, errors.New("unknown duration value") // should never happen
+	}
+}
+
+func getBinaryExprDuration(pass *analysis.Pass, expr *ast.BinaryExpr, timePkg string) (time.Duration, error) {
+	x, err := getBinaryDurValue(pass, expr.X, timePkg)
+	if err != nil || x == 0 {
+		return 0, nil
+	}
+	y, err := getBinaryDurValue(pass, expr.Y, timePkg)
+	if err != nil || y == 0 {
+		return 0, nil
+	}
+
+	switch expr.Op {
+	case token.ADD:
+		return x + y, nil
+	case token.SUB:
+		val := x - y
+		if val > 0 {
+			return val, nil
+		}
+		return 0, nil
+	case token.MUL:
+		return x * y, nil
+	case token.QUO:
+		if y == 0 {
+			return 0, nil
+		}
+		return x / y, nil
+	case token.REM:
+		if y == 0 {
+			return 0, nil
+		}
+		return x % y, nil
+	default:
+		return 0, nil
+	}
+}
+
+func getBinaryDurValue(pass *analysis.Pass, expr ast.Expr, timePkg string) (time.Duration, error) {
+	switch x := expr.(type) {
+	case *ast.SelectorExpr:
+		return getDurationFromValue(pass, x, timePkg)
+	case *ast.BinaryExpr:
+		return getBinaryExprDuration(pass, x, timePkg)
+	case *ast.BasicLit:
+		if x.Kind == token.INT {
+			val, err := strconv.Atoi(x.Value)
+			if err != nil {
+				return 0, err
+			}
+			return time.Duration(val), nil
+		}
+	case *ast.ParenExpr:
+		return getBinaryDurValue(pass, x.X, timePkg)
+
+	case *ast.Ident:
+		return getConstDuration(pass, x, timePkg)
+	}
+
+	return 0, nil
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/reports/report-builder.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/reports/report-builder.go
new file mode 100644
index 0000000000000000000000000000000000000000..c7f931ca75d80598bd73b80a096fd8b68a9d7d85
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/reports/report-builder.go
@@ -0,0 +1,98 @@
+package reports
+
+import (
+	"bytes"
+	"fmt"
+	"go/ast"
+	"go/printer"
+	"go/token"
+	"strings"
+
+	"golang.org/x/tools/go/analysis"
+)
+
+type Builder struct {
+	pos        token.Pos
+	end        token.Pos
+	oldExpr    string
+	issues     []string
+	fixOffer   string
+	suggestFix bool
+}
+
+func NewBuilder(fset *token.FileSet, oldExpr ast.Expr) *Builder {
+	b := &Builder{
+		pos:        oldExpr.Pos(),
+		end:        oldExpr.End(),
+		oldExpr:    goFmt(fset, oldExpr),
+		suggestFix: false,
+	}
+
+	return b
+}
+
+func (b *Builder) AddIssue(suggestFix bool, issue string, args ...any) {
+	if len(args) > 0 {
+		issue = fmt.Sprintf(issue, args...)
+	}
+	b.issues = append(b.issues, issue)
+
+	if suggestFix {
+		b.suggestFix = true
+	}
+}
+
+func (b *Builder) SetFixOffer(fset *token.FileSet, fixOffer ast.Expr) {
+	if offer := goFmt(fset, fixOffer); offer != b.oldExpr {
+		b.fixOffer = offer
+	}
+}
+
+func (b *Builder) HasReport() bool {
+	return len(b.issues) > 0
+}
+
+func (b *Builder) Build() analysis.Diagnostic {
+	diagnostic := analysis.Diagnostic{
+		Pos:     b.pos,
+		Message: b.getMessage(),
+	}
+
+	if b.suggestFix && len(b.fixOffer) > 0 {
+		diagnostic.SuggestedFixes = []analysis.SuggestedFix{
+			{
+				Message: fmt.Sprintf("should replace %s with %s", b.oldExpr, b.fixOffer),
+				TextEdits: []analysis.TextEdit{
+					{
+						Pos:     b.pos,
+						End:     b.end,
+						NewText: []byte(b.fixOffer),
+					},
+				},
+			},
+		}
+	}
+
+	return diagnostic
+}
+
+func goFmt(fset *token.FileSet, x ast.Expr) string {
+	var b bytes.Buffer
+	_ = printer.Fprint(&b, fset, x)
+	return b.String()
+}
+
+func (b *Builder) getMessage() string {
+	sb := strings.Builder{}
+	sb.WriteString("ginkgo-linter: ")
+	if len(b.issues) > 1 {
+		sb.WriteString("multiple issues: ")
+	}
+	sb.WriteString(strings.Join(b.issues, "; "))
+
+	if b.suggestFix && len(b.fixOffer) != 0 {
+		sb.WriteString(fmt.Sprintf(". Consider using `%s` instead", b.fixOffer))
+	}
+
+	return sb.String()
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/reverseassertion/reverse_assertion.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/reverseassertion/reverse_assertion.go
similarity index 100%
rename from vendor/github.com/nunnatsa/ginkgolinter/reverseassertion/reverse_assertion.go
rename to vendor/github.com/nunnatsa/ginkgolinter/internal/reverseassertion/reverse_assertion.go
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/ginkgo_linter.go b/vendor/github.com/nunnatsa/ginkgolinter/linter/ginkgo_linter.go
similarity index 52%
rename from vendor/github.com/nunnatsa/ginkgolinter/ginkgo_linter.go
rename to vendor/github.com/nunnatsa/ginkgolinter/linter/ginkgo_linter.go
index 11cffaca520de8f6a9ff99f6b93ee9a099129b11..574fdfadf339432fdd90f4c279f17cbb971d9b2b 100644
--- a/vendor/github.com/nunnatsa/ginkgolinter/ginkgo_linter.go
+++ b/vendor/github.com/nunnatsa/ginkgolinter/linter/ginkgo_linter.go
@@ -1,23 +1,25 @@
-package ginkgolinter
+package linter
 
 import (
 	"bytes"
-	"flag"
 	"fmt"
 	"go/ast"
 	"go/constant"
 	"go/printer"
 	"go/token"
 	gotypes "go/types"
+	"reflect"
 
 	"github.com/go-toolsmith/astcopy"
 	"golang.org/x/tools/go/analysis"
 
-	"github.com/nunnatsa/ginkgolinter/ginkgohandler"
-	"github.com/nunnatsa/ginkgolinter/gomegahandler"
-	"github.com/nunnatsa/ginkgolinter/reverseassertion"
+	"github.com/nunnatsa/ginkgolinter/internal/ginkgohandler"
+	"github.com/nunnatsa/ginkgolinter/internal/gomegahandler"
+	"github.com/nunnatsa/ginkgolinter/internal/interfaces"
+	"github.com/nunnatsa/ginkgolinter/internal/intervals"
+	"github.com/nunnatsa/ginkgolinter/internal/reports"
+	"github.com/nunnatsa/ginkgolinter/internal/reverseassertion"
 	"github.com/nunnatsa/ginkgolinter/types"
-	"github.com/nunnatsa/ginkgolinter/version"
 )
 
 // The ginkgolinter enforces standards of using ginkgo and gomega.
@@ -26,19 +28,28 @@ import (
 
 const (
 	linterName                    = "ginkgo-linter"
-	wrongLengthWarningTemplate    = linterName + ": wrong length assertion; consider using `%s` instead"
-	wrongNilWarningTemplate       = linterName + ": wrong nil assertion; consider using `%s` instead"
-	wrongBoolWarningTemplate      = linterName + ": wrong boolean assertion; consider using `%s` instead"
-	wrongErrWarningTemplate       = linterName + ": wrong error assertion; consider using `%s` instead"
-	wrongCompareWarningTemplate   = linterName + ": wrong comparison assertion; consider using `%s` instead"
-	doubleNegativeWarningTemplate = linterName + ": avoid double negative assertion; consider using `%s` instead"
-	valueInEventually             = linterName + ": use a function call in %s. This actually checks nothing, because %s receives the function returned value, instead of function itself, and this value is never changed"
-	comparePointerToValue         = linterName + ": comparing a pointer to a value will always fail. consider using `%s` instead"
-	missingAssertionMessage       = linterName + `: %q: missing assertion method. Expected "Should()", "To()", "ShouldNot()", "ToNot()" or "NotTo()"`
-	missingAsyncAssertionMessage  = linterName + `: %q: missing assertion method. Expected "Should()" or "ShouldNot()"`
-	focusContainerFound           = linterName + ": Focus container found. This is used only for local debug and should not be part of the actual source code, consider to replace with %q"
-	focusSpecFound                = linterName + ": Focus spec found. This is used only for local debug and should not be part of the actual source code, consider to remove it"
+	wrongLengthWarningTemplate    = "wrong length assertion"
+	wrongCapWarningTemplate       = "wrong cap assertion"
+	wrongNilWarningTemplate       = "wrong nil assertion"
+	wrongBoolWarningTemplate      = "wrong boolean assertion"
+	wrongErrWarningTemplate       = "wrong error assertion"
+	wrongCompareWarningTemplate   = "wrong comparison assertion"
+	doubleNegativeWarningTemplate = "avoid double negative assertion"
+	valueInEventually             = "use a function call in %s. This actually checks nothing, because %s receives the function returned value, instead of function itself, and this value is never changed"
+	comparePointerToValue         = "comparing a pointer to a value will always fail"
+	missingAssertionMessage       = linterName + `: %q: missing assertion method. Expected %s`
+	focusContainerFound           = linterName + ": Focus container found. This is used only for local debug and should not be part of the actual source code. Consider to replace with %q"
+	focusSpecFound                = linterName + ": Focus spec found. This is used only for local debug and should not be part of the actual source code. Consider to remove it"
+	compareDifferentTypes         = "use %[1]s with different types: Comparing %[2]s with %[3]s; either change the expected value type if possible, or use the BeEquivalentTo() matcher, instead of %[1]s()"
+	matchErrorArgWrongType        = "the MatchError matcher used to assert a non error type (%s)"
+	matchErrorWrongTypeAssertion  = "MatchError first parameter (%s) must be error, string, GomegaMatcher or func(error)bool are allowed"
+	matchErrorMissingDescription  = "missing function description as second parameter of MatchError"
+	matchErrorRedundantArg        = "redundant MatchError arguments; consider removing them"
+	matchErrorNoFuncDescription   = "The second parameter of MatchError must be the function description (string)"
+	forceExpectToTemplate         = "must not use Expect with %s"
+	useBeforeEachTemplate         = "use BeforeEach() to assign variable %s"
 )
+
 const ( // gomega matchers
 	beEmpty        = "BeEmpty"
 	beEquivalentTo = "BeEquivalentTo"
@@ -50,11 +61,16 @@ const ( // gomega matchers
 	beZero         = "BeZero"
 	equal          = "Equal"
 	haveLen        = "HaveLen"
+	haveCap        = "HaveCap"
 	haveOccurred   = "HaveOccurred"
 	haveValue      = "HaveValue"
 	not            = "Not"
 	omega          = "Ω"
 	succeed        = "Succeed"
+	and            = "And"
+	or             = "Or"
+	withTransform  = "WithTransform"
+	matchError     = "MatchError"
 )
 
 const ( // gomega actuals
@@ -66,95 +82,19 @@ const ( // gomega actuals
 	consistentlyWithOffset = "ConsistentlyWithOffset"
 )
 
-// Analyzer is the interface to go_vet
-var Analyzer = NewAnalyzer()
-
-type ginkgoLinter struct {
+type GinkgoLinter struct {
 	config *types.Config
 }
 
-// NewAnalyzer returns an Analyzer - the package interface with nogo
-func NewAnalyzer() *analysis.Analyzer {
-	linter := ginkgoLinter{
-		config: &types.Config{
-			SuppressLen:     false,
-			SuppressNil:     false,
-			SuppressErr:     false,
-			SuppressCompare: false,
-			ForbidFocus:     false,
-			AllowHaveLen0:   false,
-		},
-	}
-
-	a := &analysis.Analyzer{
-		Name: "ginkgolinter",
-		Doc:  fmt.Sprintf(doc, version.Version()),
-		Run:  linter.run,
+// NewGinkgoLinter return new ginkgolinter object
+func NewGinkgoLinter(config *types.Config) *GinkgoLinter {
+	return &GinkgoLinter{
+		config: config,
 	}
-
-	var ignored bool
-	a.Flags.Init("ginkgolinter", flag.ExitOnError)
-	a.Flags.Var(&linter.config.SuppressLen, "suppress-len-assertion", "Suppress warning for wrong length assertions")
-	a.Flags.Var(&linter.config.SuppressNil, "suppress-nil-assertion", "Suppress warning for wrong nil assertions")
-	a.Flags.Var(&linter.config.SuppressErr, "suppress-err-assertion", "Suppress warning for wrong error assertions")
-	a.Flags.Var(&linter.config.SuppressCompare, "suppress-compare-assertion", "Suppress warning for wrong comparison assertions")
-	a.Flags.Var(&linter.config.SuppressAsync, "suppress-async-assertion", "Suppress warning for function call in async assertion, like Eventually")
-	a.Flags.Var(&linter.config.AllowHaveLen0, "allow-havelen-0", "Do not warn for HaveLen(0); default = false")
-
-	a.Flags.BoolVar(&ignored, "suppress-focus-container", true, "Suppress warning for ginkgo focus containers like FDescribe, FContext, FWhen or FIt. Deprecated and ignored: use --forbid-focus-container instead")
-	a.Flags.Var(&linter.config.ForbidFocus, "forbid-focus-container", "trigger a warning for ginkgo focus containers like FDescribe, FContext, FWhen or FIt; default = false.")
-
-	return a
 }
 
-const doc = `enforces standards of using ginkgo and gomega
-
-or
-       ginkgolinter version
-
-version: %s
-
-currently, the linter searches for following:
-* trigger a warning when using Eventually or Constantly with a function call. This is in order to prevent the case when 
-  using a function call instead of a function. Function call returns a value only once, and so the original value
-  is tested again and again and is never changed. [Bug]
-
-* trigger a warning when comparing a pointer to a value. [Bug]
-
-* trigger a warning for missing assertion method: [Bug]
-	Eventually(checkSomething)
-
-* trigger a warning when a ginkgo focus container (FDescribe, FContext, FWhen or FIt) is found. [Bug]
-
-* wrong length assertions. We want to assert the item rather than its length. [Style]
-For example:
-	Expect(len(x)).Should(Equal(1))
-This should be replaced with:
-	Expect(x)).Should(HavelLen(1))
-	
-* wrong nil assertions. We want to assert the item rather than a comparison result. [Style]
-For example:
-	Expect(x == nil).Should(BeTrue())
-This should be replaced with:
-	Expect(x).Should(BeNil())
-
-* wrong error assertions. For example: [Style]
-	Expect(err == nil).Should(BeTrue())
-This should be replaced with:
-	Expect(err).ShouldNot(HaveOccurred())
-
-* wrong boolean comparison, for example: [Style]
-	Expect(x == 8).Should(BeTrue())
-This should be replaced with:
-	Expect(x).Should(BeEqual(8))
-
-* replaces Equal(true/false) with BeTrue()/BeFalse() [Style]
-
-* replaces HaveLen(0) with BeEmpty() [Style]
-`
-
-// main assertion function
-func (l *ginkgoLinter) run(pass *analysis.Pass) (interface{}, error) {
+// Run is the main assertion function
+func (l *GinkgoLinter) Run(pass *analysis.Pass) (interface{}, error) {
 	for _, file := range pass.Files {
 		fileConfig := l.config.Clone()
 
@@ -169,18 +109,37 @@ func (l *ginkgoLinter) run(pass *analysis.Pass) (interface{}, error) {
 			continue
 		}
 
+		timePks := ""
+		for _, imp := range file.Imports {
+			if imp.Path.Value == `"time"` {
+				if imp.Name == nil {
+					timePks = "time"
+				} else {
+					timePks = imp.Name.Name
+				}
+			}
+		}
+
 		ast.Inspect(file, func(n ast.Node) bool {
-			if ginkgoHndlr != nil && fileConfig.ForbidFocus {
+			if ginkgoHndlr != nil {
+				goDeeper := false
 				spec, ok := n.(*ast.ValueSpec)
 				if ok {
 					for _, val := range spec.Values {
 						if exp, ok := val.(*ast.CallExpr); ok {
-							if checkFocusContainer(pass, ginkgoHndlr, exp) {
-								return true
+							if bool(fileConfig.ForbidFocus) && checkFocusContainer(pass, ginkgoHndlr, exp) {
+								goDeeper = true
+							}
+
+							if bool(fileConfig.ForbidSpecPollution) && checkAssignmentsInContainer(pass, ginkgoHndlr, exp) {
+								goDeeper = true
 							}
 						}
 					}
 				}
+				if goDeeper {
+					return true
+				}
 			}
 
 			stmt, ok := n.(*ast.ExprStmt)
@@ -200,8 +159,17 @@ func (l *ginkgoLinter) run(pass *analysis.Pass) (interface{}, error) {
 				return true
 			}
 
-			if ginkgoHndlr != nil && bool(config.ForbidFocus) && checkFocusContainer(pass, ginkgoHndlr, assertionExp) {
-				return true
+			if ginkgoHndlr != nil {
+				goDeeper := false
+				if bool(config.ForbidFocus) && checkFocusContainer(pass, ginkgoHndlr, assertionExp) {
+					goDeeper = true
+				}
+				if bool(config.ForbidSpecPollution) && checkAssignmentsInContainer(pass, ginkgoHndlr, assertionExp) {
+					goDeeper = true
+				}
+				if goDeeper {
+					return true
+				}
 			}
 
 			// no more ginkgo checks. From here it's only gomega. So if there is no gomega handler, exit here. This is
@@ -226,12 +194,89 @@ func (l *ginkgoLinter) run(pass *analysis.Pass) (interface{}, error) {
 				return true
 			}
 
-			return checkExpression(pass, config, assertionExp, actualExpr, gomegaHndlr)
+			return checkExpression(pass, config, assertionExp, actualExpr, gomegaHndlr, timePks)
 		})
 	}
 	return nil, nil
 }
 
+func checkAssignmentsInContainer(pass *analysis.Pass, ginkgoHndlr ginkgohandler.Handler, exp *ast.CallExpr) bool {
+	foundSomething := false
+	if ginkgoHndlr.IsWrapContainer(exp) {
+		for _, arg := range exp.Args {
+			if fn, ok := arg.(*ast.FuncLit); ok {
+				if fn.Body != nil {
+					if checkAssignments(pass, fn.Body.List) {
+						foundSomething = true
+					}
+					break
+				}
+			}
+		}
+	}
+
+	return foundSomething
+}
+
+func checkAssignments(pass *analysis.Pass, list []ast.Stmt) bool {
+	foundSomething := false
+	for _, stmt := range list {
+		switch st := stmt.(type) {
+		case *ast.DeclStmt:
+			if gen, ok := st.Decl.(*ast.GenDecl); ok {
+				if gen.Tok != token.VAR {
+					continue
+				}
+				for _, spec := range gen.Specs {
+					if valSpec, ok := spec.(*ast.ValueSpec); ok {
+						if checkAssignmentsValues(pass, valSpec.Names, valSpec.Values) {
+							foundSomething = true
+						}
+					}
+				}
+			}
+
+		case *ast.AssignStmt:
+			for i, val := range st.Rhs {
+				if !is[*ast.FuncLit](val) {
+					if id, isIdent := st.Lhs[i].(*ast.Ident); isIdent && id.Name != "_" {
+						reportNoFix(pass, id.Pos(), useBeforeEachTemplate, id.Name)
+						foundSomething = true
+					}
+				}
+			}
+
+		case *ast.IfStmt:
+			if st.Body != nil {
+				if checkAssignments(pass, st.Body.List) {
+					foundSomething = true
+				}
+			}
+			if st.Else != nil {
+				if block, isBlock := st.Else.(*ast.BlockStmt); isBlock {
+					if checkAssignments(pass, block.List) {
+						foundSomething = true
+					}
+				}
+			}
+		}
+	}
+
+	return foundSomething
+}
+
+func checkAssignmentsValues(pass *analysis.Pass, names []*ast.Ident, values []ast.Expr) bool {
+	foundSomething := false
+	for i, val := range values {
+		if !is[*ast.FuncLit](val) {
+			reportNoFix(pass, names[i].Pos(), useBeforeEachTemplate, names[i].Name)
+			foundSomething = true
+		}
+	}
+
+	return foundSomething
+}
+
 func checkFocusContainer(pass *analysis.Pass, ginkgoHndlr ginkgohandler.Handler, exp *ast.CallExpr) bool {
 	foundFocus := false
 	isFocus, id := ginkgoHndlr.GetFocusContainerName(exp)
@@ -240,7 +285,7 @@ func checkFocusContainer(pass *analysis.Pass, ginkgoHndlr ginkgohandler.Handler,
 		foundFocus = true
 	}
 
-	if id != nil && ginkgohandler.IsContainer(id) {
+	if id != nil && ginkgohandler.IsContainer(id.Name) {
 		for _, arg := range exp.Args {
 			if ginkgoHndlr.IsFocusSpec(arg) {
 				reportNoFix(pass, arg.Pos(), focusSpecFound)
@@ -256,21 +301,69 @@ func checkFocusContainer(pass *analysis.Pass, ginkgoHndlr ginkgohandler.Handler,
 	return foundFocus
 }
 
-func checkExpression(pass *analysis.Pass, config types.Config, assertionExp *ast.CallExpr, actualExpr *ast.CallExpr, handler gomegahandler.Handler) bool {
+func checkExpression(pass *analysis.Pass, config types.Config, assertionExp *ast.CallExpr, actualExpr *ast.CallExpr, handler gomegahandler.Handler, timePkg string) bool {
 	expr := astcopy.CallExpr(assertionExp)
-	oldExpr := goFmt(pass.Fset, expr)
 
-	if checkAsyncAssertion(pass, config, expr, actualExpr, handler, oldExpr) {
-		return true
+	reportBuilder := reports.NewBuilder(pass.Fset, expr)
+
+	goNested := false
+	if checkAsyncAssertion(pass, config, expr, actualExpr, handler, reportBuilder, timePkg) {
+		goNested = true
+	} else {
+
+		actualArg := getActualArg(actualExpr, handler)
+		if actualArg == nil {
+			return true
+		}
+
+		if config.ForceExpectTo {
+			goNested = forceExpectTo(expr, handler, reportBuilder) || goNested
+		}
+
+		goNested = doCheckExpression(pass, config, assertionExp, actualArg, expr, handler, reportBuilder) || goNested
 	}
 
-	actualArg := getActualArg(actualExpr, handler)
-	if actualArg == nil {
-		return true
+	if reportBuilder.HasReport() {
+		reportBuilder.SetFixOffer(pass.Fset, expr)
+		pass.Report(reportBuilder.Build())
+	}
+
+	return goNested
+}
+
+func forceExpectTo(expr *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
+	if asrtFun, ok := expr.Fun.(*ast.SelectorExpr); ok {
+		if actualFuncName, ok := handler.GetActualFuncName(expr); ok && actualFuncName == expect {
+			var (
+				name     string
+				newIdent *ast.Ident
+			)
+
+			switch name = asrtFun.Sel.Name; name {
+			case "Should":
+				newIdent = ast.NewIdent("To")
+			case "ShouldNot":
+				newIdent = ast.NewIdent("ToNot")
+			default:
+				return false
+			}
+
+			handler.ReplaceFunction(expr, newIdent)
+			reportBuilder.AddIssue(true, fmt.Sprintf(forceExpectToTemplate, name))
+			return true
+		}
 	}
 
+	return false
+}
+
+func doCheckExpression(pass *analysis.Pass, config types.Config, assertionExp *ast.CallExpr, actualArg ast.Expr, expr *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
 	if !bool(config.SuppressLen) && isActualIsLenFunc(actualArg) {
-		return checkLengthMatcher(expr, pass, handler, oldExpr)
+		return checkLengthMatcher(expr, pass, handler, reportBuilder)
+
+	} else if !bool(config.SuppressLen) && isActualIsCapFunc(actualArg) {
+		return checkCapMatcher(expr, handler, reportBuilder)
+
 	} else if nilable, compOp := getNilableFromComparison(actualArg); nilable != nil {
 		if isExprError(pass, nilable) {
 			if config.SuppressErr {
@@ -280,32 +373,309 @@ func checkExpression(pass *analysis.Pass, config types.Config, assertionExp *ast
 			return true
 		}
 
-		return checkNilMatcher(expr, pass, nilable, handler, compOp == token.NEQ, oldExpr)
+		return checkNilMatcher(expr, pass, nilable, handler, compOp == token.NEQ, reportBuilder)
 
 	} else if first, second, op, ok := isComparison(pass, actualArg); ok {
 		matcher, shouldContinue := startCheckComparison(expr, handler)
 		if !shouldContinue {
 			return false
 		}
-		if !bool(config.SuppressLen) && isActualIsLenFunc(first) {
-			if handleLenComparison(pass, expr, matcher, first, second, op, handler, oldExpr) {
-				return false
+		if !config.SuppressLen {
+			if isActualIsLenFunc(first) {
+				if handleLenComparison(pass, expr, matcher, first, second, op, handler, reportBuilder) {
+					return false
+				}
+			}
+			if isActualIsCapFunc(first) {
+				if handleCapComparison(expr, matcher, first, second, op, handler, reportBuilder) {
+					return false
+				}
 			}
 		}
-		return bool(config.SuppressCompare) || checkComparison(expr, pass, matcher, handler, first, second, op, oldExpr)
+		return bool(config.SuppressCompare) || checkComparison(expr, pass, matcher, handler, first, second, op, reportBuilder)
 
+	} else if checkMatchError(pass, assertionExp, actualArg, handler, reportBuilder) {
+		return false
 	} else if isExprError(pass, actualArg) {
-		return bool(config.SuppressErr) || checkNilError(pass, expr, handler, actualArg, oldExpr)
+		return bool(config.SuppressErr) || checkNilError(pass, expr, handler, actualArg, reportBuilder)
 
-	} else if checkPointerComparison(pass, config, assertionExp, expr, actualArg, handler, oldExpr) {
+	} else if checkPointerComparison(pass, config, assertionExp, expr, actualArg, handler, reportBuilder) {
 		return false
-	} else {
-		return handleAssertionOnly(pass, config, expr, handler, actualArg, oldExpr, true)
+	} else if !handleAssertionOnly(pass, config, expr, handler, actualArg, reportBuilder) {
+		return false
+	} else if !config.SuppressTypeCompare {
+		return !checkEqualWrongType(pass, assertionExp, actualArg, handler, reportBuilder)
+	}
+
+	return true
+}
+
+func checkMatchError(pass *analysis.Pass, origExp *ast.CallExpr, actualArg ast.Expr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
+	matcher, ok := origExp.Args[0].(*ast.CallExpr)
+	if !ok {
+		return false
+	}
+
+	return doCheckMatchError(pass, origExp, matcher, actualArg, handler, reportBuilder)
+}
+
+func doCheckMatchError(pass *analysis.Pass, origExp *ast.CallExpr, matcher *ast.CallExpr, actualArg ast.Expr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
+	name, ok := handler.GetActualFuncName(matcher)
+	if !ok {
+		return false
+	}
+	switch name {
+	case matchError:
+	case not:
+		nested, ok := matcher.Args[0].(*ast.CallExpr)
+		if !ok {
+			return false
+		}
+
+		return doCheckMatchError(pass, origExp, nested, actualArg, handler, reportBuilder)
+	case and, or:
+		res := false
+		for _, arg := range matcher.Args {
+			if nested, ok := arg.(*ast.CallExpr); ok {
+				if valid := doCheckMatchError(pass, origExp, nested, actualArg, handler, reportBuilder); valid {
+					res = true
+				}
+			}
+		}
+		return res
+	default:
+		return false
+	}
+
+	if !isExprError(pass, actualArg) {
+		reportBuilder.AddIssue(false, matchErrorArgWrongType, goFmt(pass.Fset, actualArg))
+	}
+
+	expr := astcopy.CallExpr(matcher)
+
+	validAssertion, requiredParams := checkMatchErrorAssertion(pass, matcher)
+	if !validAssertion {
+		reportBuilder.AddIssue(false, matchErrorWrongTypeAssertion, goFmt(pass.Fset, matcher.Args[0]))
+	}
+
+	numParams := len(matcher.Args)
+	if numParams == requiredParams {
+		if numParams == 2 {
+			t := pass.TypesInfo.TypeOf(matcher.Args[1])
+			if !gotypes.Identical(t, gotypes.Typ[gotypes.String]) {
+				reportBuilder.AddIssue(false, matchErrorNoFuncDescription)
+				return true
+			}
+		}
+		return true
+	}
+
+	if requiredParams == 2 && numParams == 1 {
+		reportBuilder.AddIssue(false, matchErrorMissingDescription)
+		return true
 	}
+
+	var newArgsSuggestion = []ast.Expr{expr.Args[0]}
+	if requiredParams == 2 {
+		newArgsSuggestion = append(newArgsSuggestion, expr.Args[1])
+	}
+	expr.Args = newArgsSuggestion
+
+	reportBuilder.AddIssue(true, matchErrorRedundantArg)
+	return true
+}
+
+func checkMatchErrorAssertion(pass *analysis.Pass, matcher *ast.CallExpr) (bool, int) {
+	if isErrorMatcherValidArg(pass, matcher.Args[0]) {
+		return true, 1
+	}
+
+	t1 := pass.TypesInfo.TypeOf(matcher.Args[0])
+	if isFuncErrBool(t1) {
+		return true, 2
+	}
+
+	return false, 0
+}
+
+// isFuncErrBool checks if a function is with the signature `func(error) bool`
+func isFuncErrBool(t gotypes.Type) bool {
+	sig, ok := t.(*gotypes.Signature)
+	if !ok {
+		return false
+	}
+	if sig.Params().Len() != 1 || sig.Results().Len() != 1 {
+		return false
+	}
+
+	if !interfaces.ImplementsError(sig.Params().At(0).Type()) {
+		return false
+	}
+
+	b, ok := sig.Results().At(0).Type().(*gotypes.Basic)
+	if ok && b.Name() == "bool" && b.Info() == gotypes.IsBoolean && b.Kind() == gotypes.Bool {
+		return true
+	}
+
+	return false
+}
+
+func isErrorMatcherValidArg(pass *analysis.Pass, arg ast.Expr) bool {
+	if isExprError(pass, arg) {
+		return true
+	}
+
+	if t, ok := pass.TypesInfo.TypeOf(arg).(*gotypes.Basic); ok && t.Kind() == gotypes.String {
+		return true
+	}
+
+	t := pass.TypesInfo.TypeOf(arg)
+
+	return interfaces.ImplementsGomegaMatcher(t)
+}
+
+func checkEqualWrongType(pass *analysis.Pass, origExp *ast.CallExpr, actualArg ast.Expr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
+	matcher, ok := origExp.Args[0].(*ast.CallExpr)
+	if !ok {
+		return false
+	}
+
+	return checkEqualDifferentTypes(pass, matcher, actualArg, handler, false, reportBuilder)
+}
+
+func checkEqualDifferentTypes(pass *analysis.Pass, matcher *ast.CallExpr, actualArg ast.Expr, handler gomegahandler.Handler, parentPointer bool, reportBuilder *reports.Builder) bool {
+	matcherFuncName, ok := handler.GetActualFuncName(matcher)
+	if !ok {
+		return false
+	}
+
+	actualType := pass.TypesInfo.TypeOf(actualArg)
+
+	switch matcherFuncName {
+	case equal, beIdenticalTo: // continue
+	case and, or:
+		foundIssue := false
+		for _, nestedExp := range matcher.Args {
+			nested, ok := nestedExp.(*ast.CallExpr)
+			if !ok {
+				continue
+			}
+			if checkEqualDifferentTypes(pass, nested, actualArg, handler, parentPointer, reportBuilder) {
+				foundIssue = true
+			}
+		}
+
+		return foundIssue
+	case withTransform:
+		nested, ok := matcher.Args[1].(*ast.CallExpr)
+		if !ok {
+			return false
+		}
+
+		matcherFuncName, ok = handler.GetActualFuncName(nested)
+		switch matcherFuncName {
+		case equal, beIdenticalTo:
+		case not:
+			return checkEqualDifferentTypes(pass, nested, actualArg, handler, parentPointer, reportBuilder)
+		default:
+			return false
+		}
+
+		if t := getFuncType(pass, matcher.Args[0]); t != nil {
+			actualType = t
+			matcher = nested
+
+			if !ok {
+				return false
+			}
+		} else {
+			return checkEqualDifferentTypes(pass, nested, actualArg, handler, parentPointer, reportBuilder)
+		}
+
+	case not:
+		nested, ok := matcher.Args[0].(*ast.CallExpr)
+		if !ok {
+			return false
+		}
+
+		return checkEqualDifferentTypes(pass, nested, actualArg, handler, parentPointer, reportBuilder)
+
+	case haveValue:
+		nested, ok := matcher.Args[0].(*ast.CallExpr)
+		if !ok {
+			return false
+		}
+
+		return checkEqualDifferentTypes(pass, nested, actualArg, handler, true, reportBuilder)
+	default:
+		return false
+	}
+
+	matcherValue := matcher.Args[0]
+
+	switch act := actualType.(type) {
+	case *gotypes.Tuple:
+		actualType = act.At(0).Type()
+	case *gotypes.Pointer:
+		if parentPointer {
+			actualType = act.Elem()
+		}
+	}
+
+	matcherType := pass.TypesInfo.TypeOf(matcherValue)
+
+	if !reflect.DeepEqual(matcherType, actualType) {
+		// Equal can handle comparison of interface and a value that implements it
+		if isImplementing(matcherType, actualType) || isImplementing(actualType, matcherType) {
+			return false
+		}
+
+		reportBuilder.AddIssue(false, compareDifferentTypes, matcherFuncName, actualType, matcherType)
+		return true
+	}
+
+	return false
+}
+
+func getFuncType(pass *analysis.Pass, expr ast.Expr) gotypes.Type {
+	switch f := expr.(type) {
+	case *ast.FuncLit:
+		if f.Type != nil && f.Type.Results != nil && len(f.Type.Results.List) > 0 {
+			return pass.TypesInfo.TypeOf(f.Type.Results.List[0].Type)
+		}
+	case *ast.Ident:
+		a := pass.TypesInfo.TypeOf(f)
+		if sig, ok := a.(*gotypes.Signature); ok && sig.Results().Len() > 0 {
+			return sig.Results().At(0).Type()
+		}
+	}
+
+	return nil
+}
+
+func isImplementing(ifs, impl gotypes.Type) bool {
+	if gotypes.IsInterface(ifs) {
+
+		var (
+			theIfs *gotypes.Interface
+			ok     bool
+		)
+
+		for {
+			theIfs, ok = ifs.(*gotypes.Interface)
+			if ok {
+				break
+			}
+			ifs = ifs.Underlying()
+		}
+
+		return gotypes.Implements(impl, theIfs)
+	}
+	return false
 }
 
 // be careful - never change origExp!!! only modify its clone, expr!!!
-func checkPointerComparison(pass *analysis.Pass, config types.Config, origExp *ast.CallExpr, expr *ast.CallExpr, actualArg ast.Expr, handler gomegahandler.Handler, oldExpr string) bool {
+func checkPointerComparison(pass *analysis.Pass, config types.Config, origExp *ast.CallExpr, expr *ast.CallExpr, actualArg ast.Expr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
 	if !isPointer(pass, actualArg) {
 		return false
 	}
@@ -352,20 +722,19 @@ func checkPointerComparison(pass *analysis.Pass, config types.Config, origExp *a
 		return false
 	}
 
-	handleAssertionOnly(pass, config, expr, handler, actualArg, oldExpr, false)
+	handleAssertionOnly(pass, config, expr, handler, actualArg, reportBuilder)
 
 	args := []ast.Expr{astcopy.CallExpr(expr.Args[0].(*ast.CallExpr))}
 	handler.ReplaceFunction(expr.Args[0].(*ast.CallExpr), ast.NewIdent(haveValue))
 	expr.Args[0].(*ast.CallExpr).Args = args
-	report(pass, expr, comparePointerToValue, oldExpr)
 
+	reportBuilder.AddIssue(true, comparePointerToValue)
 	return true
-
 }
 
 // check async assertion does not assert function call. This is a real bug in the test. In this case, the assertion is
 // done on the returned value, instead of polling the result of a function, for instance.
-func checkAsyncAssertion(pass *analysis.Pass, config types.Config, expr *ast.CallExpr, actualExpr *ast.CallExpr, handler gomegahandler.Handler, oldExpr string) bool {
+func checkAsyncAssertion(pass *analysis.Pass, config types.Config, expr *ast.CallExpr, actualExpr *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder, timePkg string) bool {
 	funcName, ok := handler.GetActualFuncName(actualExpr)
 	if !ok {
 		return false
@@ -411,19 +780,22 @@ func checkAsyncAssertion(pass *analysis.Pass, config types.Config, expr *ast.Cal
 
 						actualExpr.Fun = call
 						actualExpr.Args = fun.Args
+						actualExpr = actualExpr.Fun.(*ast.SelectorExpr).X.(*ast.CallExpr)
 					} else {
 						actualExpr.Args[funcIndex] = fun.Fun
 					}
 
-					handleAssertionOnly(pass, config, expr, handler, actualExpr, oldExpr, false)
-					report(pass, expr, fmt.Sprintf(valueInEventually, funcName, funcName)+"; consider using `%s` instead", oldExpr)
-					return true
+					reportBuilder.AddIssue(true, valueInEventually, funcName, funcName)
 				}
 			}
 		}
+
+		if config.ValidateAsyncIntervals {
+			intervals.CheckIntervals(pass, expr, actualExpr, reportBuilder, handler, timePkg, funcIndex)
+		}
 	}
 
-	handleAssertionOnly(pass, config, expr, handler, actualExpr, oldExpr, true)
+	handleAssertionOnly(pass, config, expr, handler, actualExpr, reportBuilder)
 	return true
 }
 
@@ -478,7 +850,7 @@ func startCheckComparison(exp *ast.CallExpr, handler gomegahandler.Handler) (*as
 	return matcher, true
 }
 
-func checkComparison(exp *ast.CallExpr, pass *analysis.Pass, matcher *ast.CallExpr, handler gomegahandler.Handler, first ast.Expr, second ast.Expr, op token.Token, oldExp string) bool {
+func checkComparison(exp *ast.CallExpr, pass *analysis.Pass, matcher *ast.CallExpr, handler gomegahandler.Handler, first ast.Expr, second ast.Expr, op token.Token, reportBuilder *reports.Builder) bool {
 	fun, ok := exp.Fun.(*ast.SelectorExpr)
 	if !ok {
 		return true
@@ -510,7 +882,7 @@ func checkComparison(exp *ast.CallExpr, pass *analysis.Pass, matcher *ast.CallEx
 	}
 
 	call.Args = []ast.Expr{first}
-	report(pass, exp, wrongCompareWarningTemplate, oldExp)
+	reportBuilder.AddIssue(true, wrongCompareWarningTemplate)
 	return false
 }
 
@@ -522,7 +894,7 @@ func handleEqualComparison(pass *analysis.Pass, matcher *ast.CallExpr, first ast
 		t := pass.TypesInfo.TypeOf(first)
 		if gotypes.IsInterface(t) {
 			handler.ReplaceFunction(matcher, ast.NewIdent(beIdenticalTo))
-		} else if _, ok := t.(*gotypes.Pointer); ok {
+		} else if is[*gotypes.Pointer](t) {
 			handler.ReplaceFunction(matcher, ast.NewIdent(beIdenticalTo))
 		} else {
 			handler.ReplaceFunction(matcher, ast.NewIdent(equal))
@@ -532,7 +904,7 @@ func handleEqualComparison(pass *analysis.Pass, matcher *ast.CallExpr, first ast
 	}
 }
 
-func handleLenComparison(pass *analysis.Pass, exp *ast.CallExpr, matcher *ast.CallExpr, first ast.Expr, second ast.Expr, op token.Token, handler gomegahandler.Handler, oldExpr string) bool {
+func handleLenComparison(pass *analysis.Pass, exp *ast.CallExpr, matcher *ast.CallExpr, first ast.Expr, second ast.Expr, op token.Token, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
 	switch op {
 	case token.EQL:
 	case token.NEQ:
@@ -559,23 +931,92 @@ func handleLenComparison(pass *analysis.Pass, exp *ast.CallExpr, matcher *ast.Ca
 	fun := handler.GetActualExpr(exp.Fun.(*ast.SelectorExpr))
 	fun.Args = []ast.Expr{val}
 
-	report(pass, exp, wrongLengthWarningTemplate, oldExpr)
+	reportBuilder.AddIssue(true, wrongLengthWarningTemplate)
+	return true
+}
+
+func handleCapComparison(exp *ast.CallExpr, matcher *ast.CallExpr, first ast.Expr, second ast.Expr, op token.Token, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
+	switch op {
+	case token.EQL:
+	case token.NEQ:
+		reverseAssertionFuncLogic(exp)
+	default:
+		return false
+	}
+
+	eql := ast.NewIdent(haveCap)
+	matcher.Args = []ast.Expr{second}
+
+	handler.ReplaceFunction(matcher, eql)
+	firstLen, ok := first.(*ast.CallExpr) // assuming it's len()
+	if !ok {
+		return false // should never happen
+	}
+
+	val := firstLen.Args[0]
+	fun := handler.GetActualExpr(exp.Fun.(*ast.SelectorExpr))
+	fun.Args = []ast.Expr{val}
+
+	reportBuilder.AddIssue(true, wrongCapWarningTemplate)
 	return true
 }
 
 // Check if the "actual" argument is a call to the golang built-in len() function
 func isActualIsLenFunc(actualArg ast.Expr) bool {
+	return checkActualFuncName(actualArg, "len")
+}
+
+// Check if the "actual" argument is a call to the golang built-in len() function
+func isActualIsCapFunc(actualArg ast.Expr) bool {
+	return checkActualFuncName(actualArg, "cap")
+}
+
+func checkActualFuncName(actualArg ast.Expr, name string) bool {
 	lenArgExp, ok := actualArg.(*ast.CallExpr)
 	if !ok {
 		return false
 	}
 
 	lenFunc, ok := lenArgExp.Fun.(*ast.Ident)
-	return ok && lenFunc.Name == "len"
+	return ok && lenFunc.Name == name
+}
+
+// Check if matcher function is in one of the patterns we want to avoid
+func checkLengthMatcher(exp *ast.CallExpr, pass *analysis.Pass, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
+	matcher, ok := exp.Args[0].(*ast.CallExpr)
+	if !ok {
+		return true
+	}
+
+	matcherFuncName, ok := handler.GetActualFuncName(matcher)
+	if !ok {
+		return true
+	}
+
+	switch matcherFuncName {
+	case equal:
+		handleEqualLenMatcher(matcher, pass, exp, handler, reportBuilder)
+		return false
+
+	case beZero:
+		handleBeZero(exp, handler, reportBuilder)
+		return false
+
+	case beNumerically:
+		return handleBeNumerically(matcher, pass, exp, handler, reportBuilder)
+
+	case not:
+		reverseAssertionFuncLogic(exp)
+		exp.Args[0] = exp.Args[0].(*ast.CallExpr).Args[0]
+		return checkLengthMatcher(exp, pass, handler, reportBuilder)
+
+	default:
+		return true
+	}
 }
 
 // Check if matcher function is in one of the patterns we want to avoid
-func checkLengthMatcher(exp *ast.CallExpr, pass *analysis.Pass, handler gomegahandler.Handler, oldExp string) bool {
+func checkCapMatcher(exp *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
 	matcher, ok := exp.Args[0].(*ast.CallExpr)
 	if !ok {
 		return true
@@ -588,20 +1029,20 @@ func checkLengthMatcher(exp *ast.CallExpr, pass *analysis.Pass, handler gomegaha
 
 	switch matcherFuncName {
 	case equal:
-		handleEqualMatcher(matcher, pass, exp, handler, oldExp)
+		handleEqualCapMatcher(matcher, exp, handler, reportBuilder)
 		return false
 
 	case beZero:
-		handleBeZero(pass, exp, handler, oldExp)
+		handleCapBeZero(exp, handler, reportBuilder)
 		return false
 
 	case beNumerically:
-		return handleBeNumerically(matcher, pass, exp, handler, oldExp)
+		return handleCapBeNumerically(matcher, exp, handler, reportBuilder)
 
 	case not:
 		reverseAssertionFuncLogic(exp)
 		exp.Args[0] = exp.Args[0].(*ast.CallExpr).Args[0]
-		return checkLengthMatcher(exp, pass, handler, oldExp)
+		return checkCapMatcher(exp, handler, reportBuilder)
 
 	default:
 		return true
@@ -609,7 +1050,7 @@ func checkLengthMatcher(exp *ast.CallExpr, pass *analysis.Pass, handler gomegaha
 }
 
 // Check if matcher function is in one of the patterns we want to avoid
-func checkNilMatcher(exp *ast.CallExpr, pass *analysis.Pass, nilable ast.Expr, handler gomegahandler.Handler, notEqual bool, oldExp string) bool {
+func checkNilMatcher(exp *ast.CallExpr, pass *analysis.Pass, nilable ast.Expr, handler gomegahandler.Handler, notEqual bool, reportBuilder *reports.Builder) bool {
 	matcher, ok := exp.Args[0].(*ast.CallExpr)
 	if !ok {
 		return true
@@ -622,19 +1063,19 @@ func checkNilMatcher(exp *ast.CallExpr, pass *analysis.Pass, nilable ast.Expr, h
 
 	switch matcherFuncName {
 	case equal:
-		handleEqualNilMatcher(matcher, pass, exp, handler, nilable, notEqual, oldExp)
+		handleEqualNilMatcher(matcher, pass, exp, handler, nilable, notEqual, reportBuilder)
 
 	case beTrue:
-		handleNilBeBoolMatcher(pass, exp, handler, nilable, notEqual, oldExp)
+		handleNilBeBoolMatcher(pass, exp, handler, nilable, notEqual, reportBuilder)
 
 	case beFalse:
 		reverseAssertionFuncLogic(exp)
-		handleNilBeBoolMatcher(pass, exp, handler, nilable, notEqual, oldExp)
+		handleNilBeBoolMatcher(pass, exp, handler, nilable, notEqual, reportBuilder)
 
 	case not:
 		reverseAssertionFuncLogic(exp)
 		exp.Args[0] = exp.Args[0].(*ast.CallExpr).Args[0]
-		return checkNilMatcher(exp, pass, nilable, handler, notEqual, oldExp)
+		return checkNilMatcher(exp, pass, nilable, handler, notEqual, reportBuilder)
 
 	default:
 		return true
@@ -642,7 +1083,7 @@ func checkNilMatcher(exp *ast.CallExpr, pass *analysis.Pass, nilable ast.Expr, h
 	return false
 }
 
-func checkNilError(pass *analysis.Pass, assertionExp *ast.CallExpr, handler gomegahandler.Handler, actualArg ast.Expr, oldExpr string) bool {
+func checkNilError(pass *analysis.Pass, assertionExp *ast.CallExpr, handler gomegahandler.Handler, actualArg ast.Expr, reportBuilder *reports.Builder) bool {
 	if len(assertionExp.Args) == 0 {
 		return true
 	}
@@ -673,13 +1114,13 @@ func checkNilError(pass *analysis.Pass, assertionExp *ast.CallExpr, handler gome
 	case not:
 		reverseAssertionFuncLogic(assertionExp)
 		assertionExp.Args[0] = assertionExp.Args[0].(*ast.CallExpr).Args[0]
-		return checkNilError(pass, assertionExp, handler, actualArg, oldExpr)
+		return checkNilError(pass, assertionExp, handler, actualArg, reportBuilder)
 	default:
 		return true
 	}
 
 	var newFuncName string
-	if _, ok := actualArg.(*ast.CallExpr); ok {
+	if is[*ast.CallExpr](actualArg) {
 		newFuncName = succeed
 	} else {
 		reverseAssertionFuncLogic(assertionExp)
@@ -689,7 +1130,7 @@ func checkNilError(pass *analysis.Pass, assertionExp *ast.CallExpr, handler gome
 	handler.ReplaceFunction(equalFuncExpr, ast.NewIdent(newFuncName))
 	equalFuncExpr.Args = nil
 
-	report(pass, assertionExp, wrongErrWarningTemplate, oldExpr)
+	reportBuilder.AddIssue(true, wrongErrWarningTemplate)
 	return false
 }
 
@@ -700,7 +1141,7 @@ func checkNilError(pass *analysis.Pass, assertionExp *ast.CallExpr, handler gome
 //	Equal(true) => BeTrue()
 //	Equal(false) => BeFalse()
 //	HaveLen(0) => BeEmpty()
-func handleAssertionOnly(pass *analysis.Pass, config types.Config, expr *ast.CallExpr, handler gomegahandler.Handler, actualArg ast.Expr, oldExpr string, shouldReport bool) bool {
+func handleAssertionOnly(pass *analysis.Pass, config types.Config, expr *ast.CallExpr, handler gomegahandler.Handler, actualArg ast.Expr, reportBuilder *reports.Builder) bool {
 	if len(expr.Args) == 0 {
 		return true
 	}
@@ -753,19 +1194,15 @@ func handleAssertionOnly(pass *analysis.Pass, config types.Config, expr *ast.Cal
 		handler.ReplaceFunction(equalFuncExpr, ast.NewIdent(replacement))
 		equalFuncExpr.Args = nil
 
-		if shouldReport {
-			report(pass, expr, template, oldExpr)
-		}
-
+		reportBuilder.AddIssue(true, template)
 		return false
 
 	case beFalse:
 		if isNegativeAssertion(expr) {
 			reverseAssertionFuncLogic(expr)
 			handler.ReplaceFunction(equalFuncExpr, ast.NewIdent(beTrue))
-			if shouldReport {
-				report(pass, expr, doubleNegativeWarningTemplate, oldExpr)
-			}
+			reportBuilder.AddIssue(true, doubleNegativeWarningTemplate)
+			return false
 		}
 		return false
 
@@ -778,9 +1215,7 @@ func handleAssertionOnly(pass *analysis.Pass, config types.Config, expr *ast.Cal
 			if isZero(pass, equalFuncExpr.Args[0]) {
 				handler.ReplaceFunction(equalFuncExpr, ast.NewIdent(beEmpty))
 				equalFuncExpr.Args = nil
-				if shouldReport {
-					report(pass, expr, wrongLengthWarningTemplate, oldExpr)
-				}
+				reportBuilder.AddIssue(true, wrongLengthWarningTemplate)
 				return false
 			}
 		}
@@ -790,7 +1225,7 @@ func handleAssertionOnly(pass *analysis.Pass, config types.Config, expr *ast.Cal
 	case not:
 		reverseAssertionFuncLogic(expr)
 		expr.Args[0] = expr.Args[0].(*ast.CallExpr).Args[0]
-		return handleAssertionOnly(pass, config, expr, handler, actualArg, oldExpr, shouldReport)
+		return handleAssertionOnly(pass, config, expr, handler, actualArg, reportBuilder)
 	default:
 		return true
 	}
@@ -848,13 +1283,13 @@ func replaceLenActualArg(actualExpr *ast.CallExpr, handler gomegahandler.Handler
 	switch name {
 	case expect, omega:
 		arg := actualExpr.Args[0]
-		if isActualIsLenFunc(arg) {
+		if isActualIsLenFunc(arg) || isActualIsCapFunc(arg) {
 			// replace the len function call by its parameter, to create a fix suggestion
 			actualExpr.Args[0] = arg.(*ast.CallExpr).Args[0]
 		}
 	case expectWithOffset:
 		arg := actualExpr.Args[1]
-		if isActualIsLenFunc(arg) {
+		if isActualIsLenFunc(arg) || isActualIsCapFunc(arg) {
 			// replace the len function call by its parameter, to create a fix suggestion
 			actualExpr.Args[1] = arg.(*ast.CallExpr).Args[0]
 		}
@@ -883,7 +1318,7 @@ func replaceNilActualArg(actualExpr *ast.CallExpr, handler gomegahandler.Handler
 }
 
 // For the BeNumerically matcher, we want to avoid the assertion of length to be > 0 or >= 1, or just == number
-func handleBeNumerically(matcher *ast.CallExpr, pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, oldExp string) bool {
+func handleBeNumerically(matcher *ast.CallExpr, pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
 	opExp, ok1 := matcher.Args[0].(*ast.BasicLit)
 	valExp, ok2 := matcher.Args[1].(*ast.BasicLit)
 
@@ -895,20 +1330,45 @@ func handleBeNumerically(matcher *ast.CallExpr, pass *analysis.Pass, exp *ast.Ca
 			reverseAssertionFuncLogic(exp)
 			handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(beEmpty))
 			exp.Args[0].(*ast.CallExpr).Args = nil
-			reportLengthAssertion(pass, exp, handler, oldExp)
-			return false
 		} else if op == `"=="` {
 			chooseNumericMatcher(pass, exp, handler, valExp)
-			reportLengthAssertion(pass, exp, handler, oldExp)
-
-			return false
 		} else if op == `"!="` {
 			reverseAssertionFuncLogic(exp)
 			chooseNumericMatcher(pass, exp, handler, valExp)
-			reportLengthAssertion(pass, exp, handler, oldExp)
+		} else {
+			return true
+		}
 
-			return false
+		reportLengthAssertion(exp, handler, reportBuilder)
+		return false
+	}
+	return true
+}
+
+// For the BeNumerically matcher, we want to avoid the assertion of length to be > 0 or >= 1, or just == number
+func handleCapBeNumerically(matcher *ast.CallExpr, exp *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
+	opExp, ok1 := matcher.Args[0].(*ast.BasicLit)
+	valExp, ok2 := matcher.Args[1].(*ast.BasicLit)
+
+	if ok1 && ok2 {
+		op := opExp.Value
+		val := valExp.Value
+
+		if (op == `">"` && val == "0") || (op == `">="` && val == "1") {
+			reverseAssertionFuncLogic(exp)
+			handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(haveCap))
+			exp.Args[0].(*ast.CallExpr).Args = []ast.Expr{&ast.BasicLit{Kind: token.INT, Value: "0"}}
+		} else if op == `"=="` {
+			replaceNumericCapMatcher(exp, handler, valExp)
+		} else if op == `"!="` {
+			reverseAssertionFuncLogic(exp)
+			replaceNumericCapMatcher(exp, handler, valExp)
+		} else {
+			return true
 		}
+
+		reportCapAssertion(exp, handler, reportBuilder)
+		return false
 	}
 	return true
 }
@@ -924,6 +1384,12 @@ func chooseNumericMatcher(pass *analysis.Pass, exp *ast.CallExpr, handler gomega
 	}
 }
 
+func replaceNumericCapMatcher(exp *ast.CallExpr, handler gomegahandler.Handler, valExp ast.Expr) {
+	caller := exp.Args[0].(*ast.CallExpr)
+	handler.ReplaceFunction(caller, ast.NewIdent(haveCap))
+	exp.Args[0].(*ast.CallExpr).Args = []ast.Expr{valExp}
+}
+
 func reverseAssertionFuncLogic(exp *ast.CallExpr) {
 	assertionFunc := exp.Fun.(*ast.SelectorExpr).Sel
 	assertionFunc.Name = reverseassertion.ChangeAssertionLogic(assertionFunc.Name)
@@ -934,7 +1400,7 @@ func isNegativeAssertion(exp *ast.CallExpr) bool {
 	return reverseassertion.IsNegativeLogic(assertionFunc.Name)
 }
 
-func handleEqualMatcher(matcher *ast.CallExpr, pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, oldExp string) {
+func handleEqualLenMatcher(matcher *ast.CallExpr, pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) {
 	equalTo, ok := matcher.Args[0].(*ast.BasicLit)
 	if ok {
 		chooseNumericMatcher(pass, exp, handler, equalTo)
@@ -942,16 +1408,29 @@ func handleEqualMatcher(matcher *ast.CallExpr, pass *analysis.Pass, exp *ast.Cal
 		handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(haveLen))
 		exp.Args[0].(*ast.CallExpr).Args = []ast.Expr{matcher.Args[0]}
 	}
-	reportLengthAssertion(pass, exp, handler, oldExp)
+	reportLengthAssertion(exp, handler, reportBuilder)
+}
+
+func handleEqualCapMatcher(matcher *ast.CallExpr, exp *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) {
+	handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(haveCap))
+	exp.Args[0].(*ast.CallExpr).Args = []ast.Expr{matcher.Args[0]}
+	reportCapAssertion(exp, handler, reportBuilder)
 }
 
-func handleBeZero(pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, oldExp string) {
+func handleBeZero(exp *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) {
 	exp.Args[0].(*ast.CallExpr).Args = nil
 	handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(beEmpty))
-	reportLengthAssertion(pass, exp, handler, oldExp)
+	reportLengthAssertion(exp, handler, reportBuilder)
+}
+
+func handleCapBeZero(exp *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) {
+	exp.Args[0].(*ast.CallExpr).Args = nil
+	handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(haveCap))
+	exp.Args[0].(*ast.CallExpr).Args = []ast.Expr{&ast.BasicLit{Kind: token.INT, Value: "0"}}
+	reportCapAssertion(exp, handler, reportBuilder)
 }
 
-func handleEqualNilMatcher(matcher *ast.CallExpr, pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, nilable ast.Expr, notEqual bool, oldExp string) {
+func handleEqualNilMatcher(matcher *ast.CallExpr, pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, nilable ast.Expr, notEqual bool, reportBuilder *reports.Builder) {
 	equalTo, ok := matcher.Args[0].(*ast.Ident)
 	if !ok {
 		return
@@ -968,22 +1447,22 @@ func handleEqualNilMatcher(matcher *ast.CallExpr, pass *analysis.Pass, exp *ast.
 	handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(newFuncName))
 	exp.Args[0].(*ast.CallExpr).Args = nil
 
-	reportNilAssertion(pass, exp, handler, nilable, notEqual, oldExp, isItError)
+	reportNilAssertion(exp, handler, nilable, notEqual, isItError, reportBuilder)
 }
 
-func handleNilBeBoolMatcher(pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, nilable ast.Expr, notEqual bool, oldExp string) {
+func handleNilBeBoolMatcher(pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, nilable ast.Expr, notEqual bool, reportBuilder *reports.Builder) {
 	newFuncName, isItError := handleNilComparisonErr(pass, exp, nilable)
 	handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(newFuncName))
 	exp.Args[0].(*ast.CallExpr).Args = nil
 
-	reportNilAssertion(pass, exp, handler, nilable, notEqual, oldExp, isItError)
+	reportNilAssertion(exp, handler, nilable, notEqual, isItError, reportBuilder)
 }
 
 func handleNilComparisonErr(pass *analysis.Pass, exp *ast.CallExpr, nilable ast.Expr) (string, bool) {
 	newFuncName := beNil
 	isItError := isExprError(pass, nilable)
 	if isItError {
-		if _, ok := nilable.(*ast.CallExpr); ok {
+		if is[*ast.CallExpr](nilable) {
 			newFuncName = succeed
 		} else {
 			reverseAssertionFuncLogic(exp)
@@ -1002,14 +1481,21 @@ func isAssertionFunc(name string) bool {
 	return false
 }
 
-func reportLengthAssertion(pass *analysis.Pass, expr *ast.CallExpr, handler gomegahandler.Handler, oldExpr string) {
+func reportLengthAssertion(expr *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) {
 	actualExpr := handler.GetActualExpr(expr.Fun.(*ast.SelectorExpr))
 	replaceLenActualArg(actualExpr, handler)
 
-	report(pass, expr, wrongLengthWarningTemplate, oldExpr)
+	reportBuilder.AddIssue(true, wrongLengthWarningTemplate)
 }
 
-func reportNilAssertion(pass *analysis.Pass, expr *ast.CallExpr, handler gomegahandler.Handler, nilable ast.Expr, notEqual bool, oldExpr string, isItError bool) {
+func reportCapAssertion(expr *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) {
+	actualExpr := handler.GetActualExpr(expr.Fun.(*ast.SelectorExpr))
+	replaceLenActualArg(actualExpr, handler)
+
+	reportBuilder.AddIssue(true, wrongCapWarningTemplate)
+}
+
+func reportNilAssertion(expr *ast.CallExpr, handler gomegahandler.Handler, nilable ast.Expr, notEqual bool, isItError bool, reportBuilder *reports.Builder) {
 	actualExpr := handler.GetActualExpr(expr.Fun.(*ast.SelectorExpr))
 	changed := replaceNilActualArg(actualExpr, handler, nilable)
 	if !changed {
@@ -1024,27 +1510,7 @@ func reportNilAssertion(pass *analysis.Pass, expr *ast.CallExpr, handler gomegah
 		template = wrongErrWarningTemplate
 	}
 
-	report(pass, expr, template, oldExpr)
-}
-
-func report(pass *analysis.Pass, expr ast.Expr, messageTemplate, oldExpr string) {
-	newExp := goFmt(pass.Fset, expr)
-	pass.Report(analysis.Diagnostic{
-		Pos:     expr.Pos(),
-		Message: fmt.Sprintf(messageTemplate, newExp),
-		SuggestedFixes: []analysis.SuggestedFix{
-			{
-				Message: fmt.Sprintf("should replace %s with %s", oldExpr, newExp),
-				TextEdits: []analysis.TextEdit{
-					{
-						Pos:     expr.Pos(),
-						End:     expr.End(),
-						NewText: []byte(newExp),
-					},
-				},
-			},
-		},
-	})
+	reportBuilder.AddIssue(true, template)
 }
 
 func reportNewName(pass *analysis.Pass, id *ast.Ident, newName string, messageTemplate, oldExpr string) {
@@ -1111,7 +1577,7 @@ func isComparison(pass *analysis.Pass, actualArg ast.Expr) (ast.Expr, ast.Expr,
 	case *ast.Ident: // check if const
 		info, ok := pass.TypesInfo.Types[realFirst]
 		if ok {
-			if _, ok := info.Type.(*gotypes.Basic); ok && info.Value != nil {
+			if is[*gotypes.Basic](info.Type) && info.Value != nil {
 				replace = true
 			}
 		}
@@ -1143,28 +1609,18 @@ func goFmt(fset *token.FileSet, x ast.Expr) string {
 	return b.String()
 }
 
-var errorType *gotypes.Interface
-
-func init() {
-	errorType = gotypes.Universe.Lookup("error").Type().Underlying().(*gotypes.Interface)
-}
-
-func isError(t gotypes.Type) bool {
-	return gotypes.Implements(t, errorType)
-}
-
 func isExprError(pass *analysis.Pass, expr ast.Expr) bool {
 	actualArgType := pass.TypesInfo.TypeOf(expr)
 	switch t := actualArgType.(type) {
 	case *gotypes.Named:
-		if isError(actualArgType) {
+		if interfaces.ImplementsError(actualArgType) {
 			return true
 		}
 	case *gotypes.Tuple:
 		if t.Len() > 0 {
 			switch t0 := t.At(0).Type().(type) {
 			case *gotypes.Named, *gotypes.Pointer:
-				if isError(t0) {
+				if interfaces.ImplementsError(t0) {
 					return true
 				}
 			}
@@ -1175,14 +1631,12 @@ func isExprError(pass *analysis.Pass, expr ast.Expr) bool {
 
 func isPointer(pass *analysis.Pass, expr ast.Expr) bool {
 	t := pass.TypesInfo.TypeOf(expr)
-	_, ok := t.(*gotypes.Pointer)
-	return ok
+	return is[*gotypes.Pointer](t)
 }
 
 func isInterface(pass *analysis.Pass, expr ast.Expr) bool {
 	t := pass.TypesInfo.TypeOf(expr)
-	_, ok := t.(*gotypes.Named)
-	return ok
+	return gotypes.IsInterface(t)
 }
 
 func isNumeric(pass *analysis.Pass, node ast.Expr) bool {
@@ -1198,22 +1652,22 @@ func isNumeric(pass *analysis.Pass, node ast.Expr) bool {
 func checkNoAssertion(pass *analysis.Pass, expr *ast.CallExpr, handler gomegahandler.Handler) {
 	funcName, ok := handler.GetActualFuncName(expr)
 	if ok {
-		if isActualFunc(funcName) {
-			reportNoFix(pass, expr.Pos(), missingAssertionMessage, funcName)
-		} else if isActualAsyncFunc(funcName) {
-			reportNoFix(pass, expr.Pos(), missingAsyncAssertionMessage, funcName)
+		var allowedFunction string
+		switch funcName {
+		case expect, expectWithOffset:
+			allowedFunction = `"To()", "ToNot()" or "NotTo()"`
+		case eventually, eventuallyWithOffset, consistently, consistentlyWithOffset:
+			allowedFunction = `"Should()" or "ShouldNot()"`
+		case omega:
+			allowedFunction = `"Should()", "To()", "ShouldNot()", "ToNot()" or "NotTo()"`
+		default:
+			return
 		}
+		reportNoFix(pass, expr.Pos(), missingAssertionMessage, funcName, allowedFunction)
 	}
 }
 
-func isActualFunc(name string) bool {
-	return name == expect || name == expectWithOffset
-}
-
-func isActualAsyncFunc(name string) bool {
-	switch name {
-	case eventually, eventuallyWithOffset, consistently, consistentlyWithOffset:
-		return true
-	}
-	return false
+func is[T any](x any) bool {
+	_, matchType := x.(T)
+	return matchType
 }
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/types/config.go b/vendor/github.com/nunnatsa/ginkgolinter/types/config.go
index 6d7a0991425aa92cacced3db70f5ff4c146055ad..b6838e524437f6e6358b44596eab4c5db0c90425 100644
--- a/vendor/github.com/nunnatsa/ginkgolinter/types/config.go
+++ b/vendor/github.com/nunnatsa/ginkgolinter/types/config.go
@@ -1,9 +1,8 @@
 package types
 
 import (
-	"strings"
-
 	"go/ast"
+	"strings"
 )
 
 const (
@@ -14,16 +13,21 @@ const (
 	suppressCompareAssertionWarning = suppressPrefix + "ignore-compare-assert-warning"
 	suppressAsyncAsertWarning       = suppressPrefix + "ignore-async-assert-warning"
 	suppressFocusContainerWarning   = suppressPrefix + "ignore-focus-container-warning"
+	suppressTypeCompareWarning      = suppressPrefix + "ignore-type-compare-warning"
 )
 
 type Config struct {
-	SuppressLen     Boolean
-	SuppressNil     Boolean
-	SuppressErr     Boolean
-	SuppressCompare Boolean
-	SuppressAsync   Boolean
-	ForbidFocus     Boolean
-	AllowHaveLen0   Boolean
+	SuppressLen            Boolean
+	SuppressNil            Boolean
+	SuppressErr            Boolean
+	SuppressCompare        Boolean
+	SuppressAsync          Boolean
+	ForbidFocus            Boolean
+	SuppressTypeCompare    Boolean
+	AllowHaveLen0          Boolean
+	ForceExpectTo          Boolean
+	ValidateAsyncIntervals Boolean
+	ForbidSpecPollution    Boolean
 }
 
 func (s *Config) AllTrue() bool {
@@ -32,13 +36,17 @@ func (s *Config) AllTrue() bool {
 
 func (s *Config) Clone() Config {
 	return Config{
-		SuppressLen:     s.SuppressLen,
-		SuppressNil:     s.SuppressNil,
-		SuppressErr:     s.SuppressErr,
-		SuppressCompare: s.SuppressCompare,
-		SuppressAsync:   s.SuppressAsync,
-		ForbidFocus:     s.ForbidFocus,
-		AllowHaveLen0:   s.AllowHaveLen0,
+		SuppressLen:            s.SuppressLen,
+		SuppressNil:            s.SuppressNil,
+		SuppressErr:            s.SuppressErr,
+		SuppressCompare:        s.SuppressCompare,
+		SuppressAsync:          s.SuppressAsync,
+		ForbidFocus:            s.ForbidFocus,
+		SuppressTypeCompare:    s.SuppressTypeCompare,
+		AllowHaveLen0:          s.AllowHaveLen0,
+		ForceExpectTo:          s.ForceExpectTo,
+		ValidateAsyncIntervals: s.ValidateAsyncIntervals,
+		ForbidSpecPollution:    s.ForbidSpecPollution,
 	}
 }
 
@@ -69,6 +77,8 @@ func (s *Config) UpdateFromComment(commentGroup []*ast.CommentGroup) {
 					s.SuppressAsync = true
 				case suppressFocusContainerWarning:
 					s.ForbidFocus = false
+				case suppressTypeCompareWarning:
+					s.SuppressTypeCompare = true
 				}
 			}
 		}
diff --git a/vendor/github.com/onsi/ginkgo/v2/CHANGELOG.md b/vendor/github.com/onsi/ginkgo/v2/CHANGELOG.md
index 44222220a383b7e0835222f0ce9157598811846f..0a894979998f59bfbf0f53e938ee6db8dd2261c9 100644
--- a/vendor/github.com/onsi/ginkgo/v2/CHANGELOG.md
+++ b/vendor/github.com/onsi/ginkgo/v2/CHANGELOG.md
@@ -1,3 +1,23 @@
+## 2.17.2
+
+### Fixes
+- fix: close files [32259c8]
+- fix github output log level for skipped specs [780e7a3]
+
+### Maintenance
+- Bump github.com/google/pprof [d91fe4e]
+- Bump github.com/go-task/slim-sprig to v3 [8cb662e]
+- Bump golang.org/x/net in /integration/_fixtures/version_mismatch_fixture (#1391) [3134422]
+- Bump github-pages from 230 to 231 in /docs (#1384) [eca81b4]
+- Bump golang.org/x/tools from 0.19.0 to 0.20.0 (#1383) [760def8]
+- Bump golang.org/x/net from 0.23.0 to 0.24.0 (#1381) [4ce33f4]
+- Fix test for gomega version bump [f2fcd97]
+- Bump github.com/onsi/gomega from 1.30.0 to 1.33.0 (#1390) [fd622d2]
+- Bump golang.org/x/tools from 0.17.0 to 0.19.0 (#1368) [5474a26]
+- Bump github-pages from 229 to 230 in /docs (#1359) [e6d1170]
+- Bump google.golang.org/protobuf from 1.28.0 to 1.33.0 (#1374) [7f447b2]
+- Bump golang.org/x/net from 0.20.0 to 0.23.0 (#1380) [f15239a]
+
 ## 2.17.1
 
 ### Fixes
diff --git a/vendor/github.com/onsi/ginkgo/v2/ginkgo/generators/bootstrap_command.go b/vendor/github.com/onsi/ginkgo/v2/ginkgo/generators/bootstrap_command.go
index 73aff0b7a18503cda63ebdb2974d2e31f03ae353..b2dc59be66fd6ee0208ecf97c8abf3ea2c4b0ad5 100644
--- a/vendor/github.com/onsi/ginkgo/v2/ginkgo/generators/bootstrap_command.go
+++ b/vendor/github.com/onsi/ginkgo/v2/ginkgo/generators/bootstrap_command.go
@@ -7,7 +7,7 @@ import (
 	"os"
 	"text/template"
 
-	sprig "github.com/go-task/slim-sprig"
+	sprig "github.com/go-task/slim-sprig/v3"
 	"github.com/onsi/ginkgo/v2/ginkgo/command"
 	"github.com/onsi/ginkgo/v2/ginkgo/internal"
 	"github.com/onsi/ginkgo/v2/types"
diff --git a/vendor/github.com/onsi/ginkgo/v2/ginkgo/generators/generate_command.go b/vendor/github.com/onsi/ginkgo/v2/ginkgo/generators/generate_command.go
index be01dec979dc2c3ac5c89460aa3d7ebf92fe9dbf..cf3b7cb6d6d5e060482dfd1c175d156090d77213 100644
--- a/vendor/github.com/onsi/ginkgo/v2/ginkgo/generators/generate_command.go
+++ b/vendor/github.com/onsi/ginkgo/v2/ginkgo/generators/generate_command.go
@@ -10,7 +10,7 @@ import (
 	"strings"
 	"text/template"
 
-	sprig "github.com/go-task/slim-sprig"
+	sprig "github.com/go-task/slim-sprig/v3"
 	"github.com/onsi/ginkgo/v2/ginkgo/command"
 	"github.com/onsi/ginkgo/v2/ginkgo/internal"
 	"github.com/onsi/ginkgo/v2/types"
@@ -174,6 +174,7 @@ func moduleName(modRoot string) string {
 	if err != nil {
 		return ""
 	}
+	defer modFile.Close()
 
 	mod := make([]byte, 128)
 	_, err = modFile.Read(mod)
diff --git a/vendor/github.com/onsi/ginkgo/v2/ginkgo/internal/profiles_and_reports.go b/vendor/github.com/onsi/ginkgo/v2/ginkgo/internal/profiles_and_reports.go
index 5f35864ddba7335c73eb0cef153464d6efb5d85f..8e16d2bb034bdf6b3b189cf20b0711b808a382af 100644
--- a/vendor/github.com/onsi/ginkgo/v2/ginkgo/internal/profiles_and_reports.go
+++ b/vendor/github.com/onsi/ginkgo/v2/ginkgo/internal/profiles_and_reports.go
@@ -161,6 +161,7 @@ func MergeAndCleanupCoverProfiles(profiles []string, destination string) error {
 	if err != nil {
 		return err
 	}
+	defer dst.Close()
 	err = DumpCoverProfiles(merged, dst)
 	if err != nil {
 		return err
@@ -196,6 +197,7 @@ func MergeProfiles(profilePaths []string, destination string) error {
 			return fmt.Errorf("Could not open profile: %s\n%s", profilePath, err.Error())
 		}
 		prof, err := profile.Parse(proFile)
+		_ = proFile.Close()
 		if err != nil {
 			return fmt.Errorf("Could not parse profile: %s\n%s", profilePath, err.Error())
 		}
diff --git a/vendor/github.com/onsi/ginkgo/v2/reporters/default_reporter.go b/vendor/github.com/onsi/ginkgo/v2/reporters/default_reporter.go
index 4026859ec397078b758fd4a551f10fd5e007de7b..980973370e06ebe654d688ef8d60c2dd0c5dccf4 100644
--- a/vendor/github.com/onsi/ginkgo/v2/reporters/default_reporter.go
+++ b/vendor/github.com/onsi/ginkgo/v2/reporters/default_reporter.go
@@ -419,7 +419,11 @@ func (r *DefaultReporter) emitFailure(indent uint, state types.SpecState, failur
 	highlightColor := r.highlightColorForState(state)
 	r.emitBlock(r.fi(indent, highlightColor+"[%s] %s{{/}}", r.humanReadableState(state), failure.Message))
 	if r.conf.GithubOutput {
-		r.emitBlock(r.fi(indent, "::error file=%s,line=%d::%s %s", failure.Location.FileName, failure.Location.LineNumber, failure.FailureNodeType, failure.TimelineLocation.Time.Format(types.GINKGO_TIME_FORMAT)))
+		level := "error"
+		if state.Is(types.SpecStateSkipped) {
+			level = "notice"
+		}
+		r.emitBlock(r.fi(indent, "::%s file=%s,line=%d::%s %s", level, failure.Location.FileName, failure.Location.LineNumber, failure.FailureNodeType, failure.TimelineLocation.Time.Format(types.GINKGO_TIME_FORMAT)))
 	} else {
 		r.emitBlock(r.fi(indent, highlightColor+"In {{bold}}[%s]{{/}}"+highlightColor+" at: {{bold}}%s{{/}} {{gray}}@ %s{{/}}\n", failure.FailureNodeType, failure.Location, failure.TimelineLocation.Time.Format(types.GINKGO_TIME_FORMAT)))
 	}
diff --git a/vendor/github.com/onsi/ginkgo/v2/reporters/junit_report.go b/vendor/github.com/onsi/ginkgo/v2/reporters/junit_report.go
index 43244a9bd5191bd6311d506b386a675dd2effaaa..2a3215b5138fded8ef36466b42bb47e446f523f4 100644
--- a/vendor/github.com/onsi/ginkgo/v2/reporters/junit_report.go
+++ b/vendor/github.com/onsi/ginkgo/v2/reporters/junit_report.go
@@ -324,6 +324,7 @@ func MergeAndCleanupJUnitReports(sources []string, dst string) ([]string, error)
 			continue
 		}
 		err = xml.NewDecoder(f).Decode(&report)
+		_ = f.Close()
 		if err != nil {
 			messages = append(messages, fmt.Sprintf("Could not decode %s:\n%s", source, err.Error()))
 			continue
diff --git a/vendor/github.com/onsi/ginkgo/v2/types/version.go b/vendor/github.com/onsi/ginkgo/v2/types/version.go
index 851d42b456b837ad6b97dc492063e04b265d632b..5dd0140cd34c2c11fba0d28f7c586b85716e3b38 100644
--- a/vendor/github.com/onsi/ginkgo/v2/types/version.go
+++ b/vendor/github.com/onsi/ginkgo/v2/types/version.go
@@ -1,3 +1,3 @@
 package types
 
-const VERSION = "2.17.1"
+const VERSION = "2.17.2"
diff --git a/vendor/github.com/onsi/gomega/CHANGELOG.md b/vendor/github.com/onsi/gomega/CHANGELOG.md
index 01ec5245cdc6efa9b045174f0aed9111feb84089..62af14ad2f2a797b13424075b8108fe76a670117 100644
--- a/vendor/github.com/onsi/gomega/CHANGELOG.md
+++ b/vendor/github.com/onsi/gomega/CHANGELOG.md
@@ -1,3 +1,22 @@
+## 1.33.1
+
+### Fixes
+- fix confusing eventually docs [3a66379]
+
+### Maintenance
+- Bump github.com/onsi/ginkgo/v2 from 2.17.1 to 2.17.2 [e9bc35a]
+
+## 1.33.0
+
+### Features
+
+`Receive` not accepts `Receive(<POINTER>, MATCHER>)`, allowing you to pick out a specific value on the channel that satisfies the provided matcher and is stored in the provided pointer.
+
+### Maintenance
+- Bump github.com/onsi/ginkgo/v2 from 2.15.0 to 2.17.1 (#745) [9999deb]
+- Bump github-pages from 229 to 230 in /docs (#735) [cb5ff21]
+- Bump golang.org/x/net from 0.20.0 to 0.23.0 (#746) [bac6596]
+
 ## 1.32.0
 
 ### Maintenance
diff --git a/vendor/github.com/onsi/gomega/gomega_dsl.go b/vendor/github.com/onsi/gomega/gomega_dsl.go
index ffb81b1feb39ab80a21a95a1d4f15c351686017e..9697d5134ff4845dd538568f4ab8697f4e80d69a 100644
--- a/vendor/github.com/onsi/gomega/gomega_dsl.go
+++ b/vendor/github.com/onsi/gomega/gomega_dsl.go
@@ -22,7 +22,7 @@ import (
 	"github.com/onsi/gomega/types"
 )
 
-const GOMEGA_VERSION = "1.32.0"
+const GOMEGA_VERSION = "1.33.1"
 
 const nilGomegaPanic = `You are trying to make an assertion, but haven't registered Gomega's fail handler.
 If you're using Ginkgo then you probably forgot to put your assertion in an It().
@@ -372,11 +372,11 @@ You can ensure that you get a number of consecutive successful tries before succ
 
 Finally, in addition to passing timeouts and a context to Eventually you can be more explicit with Eventually's chaining configuration methods:
 
-	Eventually(..., "1s", "2s", ctx).Should(...)
+	Eventually(..., "10s", "2s", ctx).Should(...)
 
 is equivalent to
 
-	Eventually(...).WithTimeout(time.Second).WithPolling(2*time.Second).WithContext(ctx).Should(...)
+	Eventually(...).WithTimeout(10*time.Second).WithPolling(2*time.Second).WithContext(ctx).Should(...)
 */
 func Eventually(actualOrCtx interface{}, args ...interface{}) AsyncAssertion {
 	ensureDefaultGomegaIsConfigured()
diff --git a/vendor/github.com/onsi/gomega/matchers.go b/vendor/github.com/onsi/gomega/matchers.go
index 8860d677fc8f8076c9533467677b46fd195a0a6f..7ef27dc9c9556c55dbb06324fd1e22a888e9a203 100644
--- a/vendor/github.com/onsi/gomega/matchers.go
+++ b/vendor/github.com/onsi/gomega/matchers.go
@@ -194,20 +194,21 @@ func BeClosed() types.GomegaMatcher {
 //
 // will repeatedly attempt to pull values out of `c` until a value matching "bar" is received.
 //
-// Finally, if you want to have a reference to the value *sent* to the channel you can pass the `Receive` matcher a pointer to a variable of the appropriate type:
+// Furthermore, if you want to have a reference to the value *sent* to the channel you can pass the `Receive` matcher a pointer to a variable of the appropriate type:
 //
 //	var myThing thing
 //	Eventually(thingChan).Should(Receive(&myThing))
 //	Expect(myThing.Sprocket).Should(Equal("foo"))
 //	Expect(myThing.IsValid()).Should(BeTrue())
+//
+// Finally, if you want to match the received object as well as get the actual received value into a variable, so you can reason further about the value received,
+// you can pass a pointer to a variable of the approriate type first, and second a matcher:
+//
+//	var myThing thing
+//	Eventually(thingChan).Should(Receive(&myThing, ContainSubstring("bar")))
 func Receive(args ...interface{}) types.GomegaMatcher {
-	var arg interface{}
-	if len(args) > 0 {
-		arg = args[0]
-	}
-
 	return &matchers.ReceiveMatcher{
-		Arg: arg,
+		Args: args,
 	}
 }
 
diff --git a/vendor/github.com/onsi/gomega/matchers/receive_matcher.go b/vendor/github.com/onsi/gomega/matchers/receive_matcher.go
index 1936a2ba52f28551de9b735b6f597290787c100c..948164eaf88be9037a95e46c2ac068be05321de8 100644
--- a/vendor/github.com/onsi/gomega/matchers/receive_matcher.go
+++ b/vendor/github.com/onsi/gomega/matchers/receive_matcher.go
@@ -3,6 +3,7 @@
 package matchers
 
 import (
+	"errors"
 	"fmt"
 	"reflect"
 
@@ -10,7 +11,7 @@ import (
 )
 
 type ReceiveMatcher struct {
-	Arg           interface{}
+	Args          []interface{}
 	receivedValue reflect.Value
 	channelClosed bool
 }
@@ -29,15 +30,38 @@ func (matcher *ReceiveMatcher) Match(actual interface{}) (success bool, err erro
 
 	var subMatcher omegaMatcher
 	var hasSubMatcher bool
-
-	if matcher.Arg != nil {
-		subMatcher, hasSubMatcher = (matcher.Arg).(omegaMatcher)
+	var resultReference interface{}
+
+	// Valid arg formats are as follows, always with optional POINTER before
+	// optional MATCHER:
+	//   - Receive()
+	//   - Receive(POINTER)
+	//   - Receive(MATCHER)
+	//   - Receive(POINTER, MATCHER)
+	args := matcher.Args
+	if len(args) > 0 {
+		arg := args[0]
+		_, isSubMatcher := arg.(omegaMatcher)
+		if !isSubMatcher && reflect.ValueOf(arg).Kind() == reflect.Ptr {
+			// Consume optional POINTER arg first, if it ain't no matcher ;)
+			resultReference = arg
+			args = args[1:]
+		}
+	}
+	if len(args) > 0 {
+		arg := args[0]
+		subMatcher, hasSubMatcher = arg.(omegaMatcher)
 		if !hasSubMatcher {
-			argType := reflect.TypeOf(matcher.Arg)
-			if argType.Kind() != reflect.Ptr {
-				return false, fmt.Errorf("Cannot assign a value from the channel:\n%s\nTo:\n%s\nYou need to pass a pointer!", format.Object(actual, 1), format.Object(matcher.Arg, 1))
-			}
+			// At this point we assume the dev user wanted to assign a received
+			// value, so [POINTER,]MATCHER.
+			return false, fmt.Errorf("Cannot assign a value from the channel:\n%s\nTo:\n%s\nYou need to pass a pointer!", format.Object(actual, 1), format.Object(arg, 1))
 		}
+		// Consume optional MATCHER arg.
+		args = args[1:]
+	}
+	if len(args) > 0 {
+		// If there are still args present, reject all.
+		return false, errors.New("Receive matcher expects at most an optional pointer and/or an optional matcher")
 	}
 
 	winnerIndex, value, open := reflect.Select([]reflect.SelectCase{
@@ -58,16 +82,20 @@ func (matcher *ReceiveMatcher) Match(actual interface{}) (success bool, err erro
 	}
 
 	if hasSubMatcher {
-		if didReceive {
-			matcher.receivedValue = value
-			return subMatcher.Match(matcher.receivedValue.Interface())
+		if !didReceive {
+			return false, nil
 		}
-		return false, nil
+		matcher.receivedValue = value
+		if match, err := subMatcher.Match(matcher.receivedValue.Interface()); err != nil || !match {
+			return match, err
+		}
+		// if we received a match, then fall through in order to handle an
+		// optional assignment of the received value to the specified reference.
 	}
 
 	if didReceive {
-		if matcher.Arg != nil {
-			outValue := reflect.ValueOf(matcher.Arg)
+		if resultReference != nil {
+			outValue := reflect.ValueOf(resultReference)
 
 			if value.Type().AssignableTo(outValue.Elem().Type()) {
 				outValue.Elem().Set(value)
@@ -77,7 +105,7 @@ func (matcher *ReceiveMatcher) Match(actual interface{}) (success bool, err erro
 				outValue.Elem().Set(value.Elem())
 				return true, nil
 			} else {
-				return false, fmt.Errorf("Cannot assign a value from the channel:\n%s\nType:\n%s\nTo:\n%s", format.Object(actual, 1), format.Object(value.Interface(), 1), format.Object(matcher.Arg, 1))
+				return false, fmt.Errorf("Cannot assign a value from the channel:\n%s\nType:\n%s\nTo:\n%s", format.Object(actual, 1), format.Object(value.Interface(), 1), format.Object(resultReference, 1))
 			}
 
 		}
@@ -88,7 +116,11 @@ func (matcher *ReceiveMatcher) Match(actual interface{}) (success bool, err erro
 }
 
 func (matcher *ReceiveMatcher) FailureMessage(actual interface{}) (message string) {
-	subMatcher, hasSubMatcher := (matcher.Arg).(omegaMatcher)
+	var matcherArg interface{}
+	if len(matcher.Args) > 0 {
+		matcherArg = matcher.Args[len(matcher.Args)-1]
+	}
+	subMatcher, hasSubMatcher := (matcherArg).(omegaMatcher)
 
 	closedAddendum := ""
 	if matcher.channelClosed {
@@ -105,7 +137,11 @@ func (matcher *ReceiveMatcher) FailureMessage(actual interface{}) (message strin
 }
 
 func (matcher *ReceiveMatcher) NegatedFailureMessage(actual interface{}) (message string) {
-	subMatcher, hasSubMatcher := (matcher.Arg).(omegaMatcher)
+	var matcherArg interface{}
+	if len(matcher.Args) > 0 {
+		matcherArg = matcher.Args[len(matcher.Args)-1]
+	}
+	subMatcher, hasSubMatcher := (matcherArg).(omegaMatcher)
 
 	closedAddendum := ""
 	if matcher.channelClosed {
diff --git a/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/LICENSE b/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64
--- /dev/null
+++ b/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/LICENSE
@@ -0,0 +1,201 @@
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/failuredomain/failuredomain.go b/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/failuredomain/failuredomain.go
new file mode 100644
index 0000000000000000000000000000000000000000..937d600be0ef19e4c441585fb06056c69c463d3b
--- /dev/null
+++ b/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/failuredomain/failuredomain.go
@@ -0,0 +1,588 @@
+/*
+Copyright 2022 Red Hat, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package failuredomain
+
+import (
+	"errors"
+	"fmt"
+	"reflect"
+	"strings"
+
+	configv1 "github.com/openshift/api/config/v1"
+	machinev1 "github.com/openshift/api/machine/v1"
+)
+
+const (
+	// unknownFailureDomain is used as the string representation of a failure
+	// domain when the platform type is unrecognised.
+	unknownFailureDomain = "<unknown>"
+)
+
+var (
+	// errUnsupportedPlatformType is an error used when an unknown platform
+	// type is configured within the failure domain config.
+	errUnsupportedPlatformType = errors.New("unsupported platform type")
+
+	// errMissingFailureDomain is an error used when failure domain platform is set
+	// but the failure domain list is nil.
+	errMissingFailureDomain = errors.New("missing failure domain configuration")
+
+	// errMissingTemplateFailureDomain is an error used when you attempt to combine a failure domain with a nil template failure domain.
+	errMissingTemplateFailureDomain = errors.New("failure domain extracted from machine template is nil")
+
+	// errMismatchedPlatformType is an error used when attempting to compare failure domains of different platform types.
+	errMismatchedPlatformType = errors.New("platform types do not match")
+)
+
+// FailureDomain is an interface that allows external code to interact with
+// failure domains across different platform types.
+type FailureDomain interface {
+	// String returns a string representation of the failure domain.
+	String() string
+
+	// Type returns the platform type of the failure domain.
+	Type() configv1.PlatformType
+
+	// AWS returns the AWSFailureDomain if the platform type is AWS.
+	AWS() machinev1.AWSFailureDomain
+
+	// AWS returns the AzureFailureDomain if the platform type is Azure.
+	Azure() machinev1.AzureFailureDomain
+
+	// GCP returns the GCPFailureDomain if the platform type is GCP.
+	GCP() machinev1.GCPFailureDomain
+
+	// OpenStack returns the OpenStackFailureDomain if the platform type is OpenStack.
+	OpenStack() machinev1.OpenStackFailureDomain
+
+	// VSphere returns the VSphereFailureDomain if the platform type is VSphere.
+	VSphere() machinev1.VSphereFailureDomain
+
+	// Nutanix returns the NutanixFailureDomainReference if the platform type is Nutanix.
+	Nutanix() machinev1.NutanixFailureDomainReference
+
+	// Equal compares the underlying failure domain.
+	Equal(other FailureDomain) bool
+
+	// Complete ensures that any empty value in the failure domain is populated based on the template failure domain, to create a fully specified failure domain.
+	Complete(templateFailureDomain FailureDomain) (FailureDomain, error)
+}
+
+// failureDomain holds an implementation of the FailureDomain interface.
+type failureDomain struct {
+	platformType configv1.PlatformType
+
+	aws       machinev1.AWSFailureDomain
+	azure     machinev1.AzureFailureDomain
+	gcp       machinev1.GCPFailureDomain
+	openstack machinev1.OpenStackFailureDomain
+	vsphere   machinev1.VSphereFailureDomain
+	nutanix   machinev1.NutanixFailureDomainReference
+}
+
+// String returns a string representation of the failure domain.
+func (f failureDomain) String() string {
+	switch f.platformType {
+	case configv1.AWSPlatformType:
+		return awsFailureDomainToString(f.aws)
+	case configv1.AzurePlatformType:
+		return azureFailureDomainToString(f.azure)
+	case configv1.GCPPlatformType:
+		return gcpFailureDomainToString(f.gcp)
+	case configv1.OpenStackPlatformType:
+		return openstackFailureDomainToString(f.openstack)
+	case configv1.VSpherePlatformType:
+		return vsphereFailureDomainToString(f.vsphere)
+	case configv1.NutanixPlatformType:
+		return nutanixFailureDomainToString(f.nutanix)
+	default:
+		return fmt.Sprintf("%sFailureDomain{}", f.platformType)
+	}
+}
+
+// Type returns the platform type of the failure domain.
+func (f failureDomain) Type() configv1.PlatformType {
+	return f.platformType
+}
+
+// AWS returns the AWSFailureDomain if the platform type is AWS.
+func (f failureDomain) AWS() machinev1.AWSFailureDomain {
+	return f.aws
+}
+
+// Azure returns the AzureFailureDomain if the platform type is Azure.
+func (f failureDomain) Azure() machinev1.AzureFailureDomain {
+	return f.azure
+}
+
+// GCP returns the GCPFailureDomain if the platform type is GCP.
+func (f failureDomain) GCP() machinev1.GCPFailureDomain {
+	return f.gcp
+}
+
+// OpenStack returns the OpenStackFailureDomain if the platform type is OpenStack.
+func (f failureDomain) OpenStack() machinev1.OpenStackFailureDomain {
+	return f.openstack
+}
+
+// VSphere returns the VSphereFailureDomain if the platform type is VSphere.
+func (f failureDomain) VSphere() machinev1.VSphereFailureDomain {
+	return f.vsphere
+}
+
+// Nutanix returns the NutanixFailureDomainReference if the platform type is Nutanix.
+func (f failureDomain) Nutanix() machinev1.NutanixFailureDomainReference {
+	return f.nutanix
+}
+
+// Equal compares the underlying failure domain.
+func (f failureDomain) Equal(other FailureDomain) bool {
+	if other == nil {
+		return false
+	}
+
+	if f.platformType != other.Type() {
+		return false
+	}
+
+	switch f.platformType {
+	case configv1.AWSPlatformType:
+		return reflect.DeepEqual(f.AWS(), other.AWS())
+	case configv1.AzurePlatformType:
+		return f.azure == other.Azure()
+	case configv1.GCPPlatformType:
+		return f.gcp == other.GCP()
+	case configv1.OpenStackPlatformType:
+		return reflect.DeepEqual(f.openstack, other.OpenStack())
+	case configv1.VSpherePlatformType:
+		return reflect.DeepEqual(f.vsphere, other.VSphere())
+	case configv1.NutanixPlatformType:
+		return reflect.DeepEqual(f.nutanix, other.Nutanix())
+	}
+
+	return true
+}
+
+// CompleteFailureDomains calls Complete on each failure domain in the list.
+func CompleteFailureDomains(failureDomains []FailureDomain, templateFailureDomain FailureDomain) ([]FailureDomain, error) {
+	comparableFailureDomains := []FailureDomain{}
+
+	for _, failureDomain := range failureDomains {
+		failureDomain, err := failureDomain.Complete(templateFailureDomain)
+		if err != nil {
+			return nil, fmt.Errorf("cannot combine failure domain with template failure domain: %w", err)
+		}
+
+		comparableFailureDomains = append(comparableFailureDomains, failureDomain)
+	}
+
+	return comparableFailureDomains, nil
+}
+
+// Complete creates a copy of templateFailureDomain and overrides any set values with the values from the current failure domain.
+func (f failureDomain) Complete(templateFailureDomain FailureDomain) (FailureDomain, error) {
+	if templateFailureDomain == nil {
+		return nil, errMissingTemplateFailureDomain
+	}
+
+	if f.platformType != templateFailureDomain.Type() {
+		return nil, errMismatchedPlatformType
+	}
+
+	switch f.platformType {
+	case configv1.AWSPlatformType:
+		return f.completeAWS(templateFailureDomain.AWS()), nil
+	case configv1.AzurePlatformType:
+		return f.completeAzure(templateFailureDomain.Azure()), nil
+	case configv1.GCPPlatformType:
+		return f.completeGCP(templateFailureDomain.GCP()), nil
+	case configv1.OpenStackPlatformType:
+		return f.completeOpenStack(templateFailureDomain.OpenStack()), nil
+	case configv1.VSpherePlatformType:
+		return f.completeVSphere(templateFailureDomain.VSphere()), nil
+	case configv1.NutanixPlatformType:
+		return f.completeNutanix(templateFailureDomain.Nutanix()), nil
+	default:
+		return NewGenericFailureDomain(), nil
+	}
+}
+
+func (f failureDomain) completeAWS(templateFailureDomain machinev1.AWSFailureDomain) FailureDomain {
+	fd := templateFailureDomain.DeepCopy()
+
+	if f.aws.Placement.AvailabilityZone != "" {
+		fd.Placement = f.aws.Placement
+	}
+
+	if f.aws.Subnet != nil && !reflect.DeepEqual(f.aws.Subnet, machinev1.AWSResourceReference{}) {
+		fd.Subnet = f.aws.Subnet
+	}
+
+	return NewAWSFailureDomain(*fd)
+}
+
+func (f failureDomain) completeAzure(templateFailureDomain machinev1.AzureFailureDomain) FailureDomain {
+	fd := templateFailureDomain.DeepCopy()
+
+	if f.azure.Zone != "" {
+		fd.Zone = f.azure.Zone
+	}
+
+	if f.azure.Subnet != "" {
+		fd.Subnet = f.azure.Subnet
+	}
+
+	return NewAzureFailureDomain(*fd)
+}
+
+func (f failureDomain) completeGCP(templateFailureDomain machinev1.GCPFailureDomain) FailureDomain {
+	fd := templateFailureDomain.DeepCopy()
+
+	if f.gcp.Zone != "" {
+		fd.Zone = f.gcp.Zone
+	}
+
+	return NewGCPFailureDomain(*fd)
+}
+
+func (f failureDomain) completeOpenStack(templateFailureDomain machinev1.OpenStackFailureDomain) FailureDomain {
+	fd := templateFailureDomain.DeepCopy()
+
+	if f.openstack.AvailabilityZone != "" {
+		fd.AvailabilityZone = f.openstack.AvailabilityZone
+	}
+
+	if fd.RootVolume == nil {
+		fd.RootVolume = f.openstack.RootVolume
+	} else if f.openstack.RootVolume != nil {
+		if f.openstack.RootVolume.AvailabilityZone != "" {
+			fd.RootVolume.AvailabilityZone = f.openstack.RootVolume.AvailabilityZone
+		}
+
+		if f.openstack.RootVolume.VolumeType != "" {
+			fd.RootVolume.VolumeType = f.openstack.RootVolume.VolumeType
+		}
+	}
+
+	return NewOpenStackFailureDomain(*fd)
+}
+
+func (f failureDomain) completeVSphere(templateFailureDomain machinev1.VSphereFailureDomain) FailureDomain {
+	fd := templateFailureDomain.DeepCopy()
+
+	if f.vsphere.Name != "" {
+		fd.Name = f.vsphere.Name
+	}
+
+	return NewVSphereFailureDomain(*fd)
+}
+
+func (f failureDomain) completeNutanix(templateFailureDomain machinev1.NutanixFailureDomainReference) FailureDomain {
+	fd := templateFailureDomain.DeepCopy()
+
+	if f.nutanix.Name != "" {
+		fd.Name = f.nutanix.Name
+	}
+
+	return NewNutanixFailureDomain(*fd)
+}
+
+// NewFailureDomains creates a set of FailureDomains representing the input failure
+// domains held within the ControlPlaneMachineSet.
+func NewFailureDomains(failureDomains *machinev1.FailureDomains) ([]FailureDomain, error) {
+	if failureDomains == nil {
+		// Without failure domains all machines will be equal.
+		return nil, nil
+	}
+
+	switch failureDomains.Platform {
+	case configv1.AWSPlatformType:
+		return newAWSFailureDomains(*failureDomains)
+	case configv1.AzurePlatformType:
+		return newAzureFailureDomains(*failureDomains)
+	case configv1.GCPPlatformType:
+		return newGCPFailureDomains(*failureDomains)
+	case configv1.OpenStackPlatformType:
+		return newOpenStackFailureDomains(*failureDomains)
+	case configv1.VSpherePlatformType:
+		return newVSphereFailureDomains(*failureDomains)
+	case configv1.NutanixPlatformType:
+		return newNutanixFailureDomains(*failureDomains)
+	case configv1.PlatformType(""):
+		// An empty failure domains definition is allowed.
+		return nil, nil
+	default:
+		return nil, fmt.Errorf("%w: %s", errUnsupportedPlatformType, failureDomains.Platform)
+	}
+}
+
+// newAWSFailureDomains constructs a slice of AWS FailureDomain from machinev1.FailureDomains.
+func newAWSFailureDomains(failureDomains machinev1.FailureDomains) ([]FailureDomain, error) {
+	foundFailureDomains := []FailureDomain{}
+	if failureDomains.AWS == nil {
+		return foundFailureDomains, errMissingFailureDomain
+	}
+
+	for _, failureDomain := range *failureDomains.AWS {
+		foundFailureDomains = append(foundFailureDomains, NewAWSFailureDomain(failureDomain))
+	}
+
+	return foundFailureDomains, nil
+}
+
+// newAzureFailureDomains constructs a slice of Azure FailureDomain from machinev1.FailureDomains.
+func newAzureFailureDomains(failureDomains machinev1.FailureDomains) ([]FailureDomain, error) {
+	foundFailureDomains := []FailureDomain{}
+	if failureDomains.Azure == nil {
+		return foundFailureDomains, errMissingFailureDomain
+	}
+
+	for _, failureDomain := range *failureDomains.Azure {
+		foundFailureDomains = append(foundFailureDomains, NewAzureFailureDomain(failureDomain))
+	}
+
+	return foundFailureDomains, nil
+}
+
+// newGCPFailureDomains constructs a slice of GCP FailureDomain from machinev1.FailureDomains.
+func newGCPFailureDomains(failureDomains machinev1.FailureDomains) ([]FailureDomain, error) {
+	foundFailureDomains := []FailureDomain{}
+	if failureDomains.GCP == nil {
+		return foundFailureDomains, errMissingFailureDomain
+	}
+
+	for _, failureDomain := range *failureDomains.GCP {
+		foundFailureDomains = append(foundFailureDomains, NewGCPFailureDomain(failureDomain))
+	}
+
+	return foundFailureDomains, nil
+}
+
+// newOpenStackFailureDomains constructs a slice of OpenStack FailureDomain from machinev1.FailureDomains.
+func newOpenStackFailureDomains(failureDomains machinev1.FailureDomains) ([]FailureDomain, error) {
+	foundFailureDomains := []FailureDomain{}
+
+	if len(failureDomains.OpenStack) == 0 {
+		return foundFailureDomains, errMissingFailureDomain
+	}
+
+	for _, failureDomain := range failureDomains.OpenStack {
+		foundFailureDomains = append(foundFailureDomains, NewOpenStackFailureDomain(failureDomain))
+	}
+
+	return foundFailureDomains, nil
+}
+
+// newVSphereFailureDomains constructs a slice of VSphere FailureDomain from machinev1.FailureDomains.
+func newVSphereFailureDomains(failureDomains machinev1.FailureDomains) ([]FailureDomain, error) {
+	foundFailureDomains := []FailureDomain{}
+
+	if len(failureDomains.VSphere) == 0 {
+		return foundFailureDomains, errMissingFailureDomain
+	}
+
+	for _, failureDomain := range failureDomains.VSphere {
+		foundFailureDomains = append(foundFailureDomains, NewVSphereFailureDomain(failureDomain))
+	}
+
+	return foundFailureDomains, nil
+}
+
+// newNutanixFailureDomains constructs a slice of Nutanix FailureDomain from machinev1.FailureDomains.
+func newNutanixFailureDomains(failureDomains machinev1.FailureDomains) ([]FailureDomain, error) {
+	foundFailureDomains := []FailureDomain{}
+
+	if len(failureDomains.Nutanix) == 0 {
+		return foundFailureDomains, errMissingFailureDomain
+	}
+
+	for _, fdRef := range failureDomains.Nutanix {
+		foundFailureDomains = append(foundFailureDomains, NewNutanixFailureDomain(fdRef))
+	}
+
+	return foundFailureDomains, nil
+}
+
+// NewAWSFailureDomain creates an AWS failure domain from the machinev1.AWSFailureDomain.
+// Note this is exported to allow other packages to construct individual failure domains
+// in tests.
+func NewAWSFailureDomain(fd machinev1.AWSFailureDomain) FailureDomain {
+	return &failureDomain{
+		platformType: configv1.AWSPlatformType,
+		aws:          fd,
+	}
+}
+
+// NewAzureFailureDomain creates an Azure failure domain from the machinev1.AzureFailureDomain.
+func NewAzureFailureDomain(fd machinev1.AzureFailureDomain) FailureDomain {
+	return &failureDomain{
+		platformType: configv1.AzurePlatformType,
+		azure:        fd,
+	}
+}
+
+// NewGCPFailureDomain creates a GCP failure domain from the machinev1.GCPFailureDomain.
+func NewGCPFailureDomain(fd machinev1.GCPFailureDomain) FailureDomain {
+	return &failureDomain{
+		platformType: configv1.GCPPlatformType,
+		gcp:          fd,
+	}
+}
+
+// NewOpenStackFailureDomain creates an OpenStack failure domain from the machinev1.OpenStackFailureDomain.
+func NewOpenStackFailureDomain(fd machinev1.OpenStackFailureDomain) FailureDomain {
+	return &failureDomain{
+		platformType: configv1.OpenStackPlatformType,
+		openstack:    fd,
+	}
+}
+
+// NewVSphereFailureDomain creates an VSphere failure domain from the machinev1.VSphereFailureDomain.
+func NewVSphereFailureDomain(fd machinev1.VSphereFailureDomain) FailureDomain {
+	return &failureDomain{
+		platformType: configv1.VSpherePlatformType,
+		vsphere:      fd,
+	}
+}
+
+// NewNutanixFailureDomain creates an Nutanix failure domain from the machinev1.NutanixFailureDomainReference.
+func NewNutanixFailureDomain(fdRef machinev1.NutanixFailureDomainReference) FailureDomain {
+	return &failureDomain{
+		platformType: configv1.NutanixPlatformType,
+		nutanix:      fdRef,
+	}
+}
+
+// NewGenericFailureDomain creates a dummy failure domain for generic platforms that don't support failure domains.
+func NewGenericFailureDomain() FailureDomain {
+	return failureDomain{}
+}
+
+// azString formats AvailabilityZone for awsFailureDomainToString function.
+func azString(az string) string {
+	if az == "" {
+		return ""
+	}
+
+	return fmt.Sprintf("AvailabilityZone:%s, ", az)
+}
+
+// awsFailureDomainToString converts the AWSFailureDomain into a string.
+// The types are slightly changed to be more human readable and nil values are omitted.
+func awsFailureDomainToString(fd machinev1.AWSFailureDomain) string {
+	// Availability zone only
+	if fd.Placement.AvailabilityZone != "" && fd.Subnet == nil {
+		return fmt.Sprintf("AWSFailureDomain{AvailabilityZone:%s}", fd.Placement.AvailabilityZone)
+	}
+
+	// Only subnet or both
+	if fd.Subnet != nil {
+		switch fd.Subnet.Type {
+		case machinev1.AWSARNReferenceType:
+			if fd.Subnet.ARN != nil {
+				return fmt.Sprintf("AWSFailureDomain{%sSubnet:{Type:%s, Value:%s}}", azString(fd.Placement.AvailabilityZone), fd.Subnet.Type, *fd.Subnet.ARN)
+			}
+		case machinev1.AWSFiltersReferenceType:
+			if fd.Subnet.Filters != nil {
+				return fmt.Sprintf("AWSFailureDomain{%sSubnet:{Type:%s, Value:%+v}}", azString(fd.Placement.AvailabilityZone), fd.Subnet.Type, fd.Subnet.Filters)
+			}
+		case machinev1.AWSIDReferenceType:
+			if fd.Subnet.ID != nil {
+				return fmt.Sprintf("AWSFailureDomain{%sSubnet:{Type:%s, Value:%s}}", azString(fd.Placement.AvailabilityZone), fd.Subnet.Type, *fd.Subnet.ID)
+			}
+		}
+	}
+
+	// If the previous attempts to find a suitable string do not work,
+	// this should catch the fallthrough.
+	return unknownFailureDomain
+}
+
+// azureFailureDomainToString converts the AzureFailureDomain into a string.
+func azureFailureDomainToString(fd machinev1.AzureFailureDomain) string {
+	var failureDomain []string
+
+	if fd.Zone != "" {
+		failureDomain = append(failureDomain, fmt.Sprintf("Zone:%s", fd.Zone))
+	}
+
+	if fd.Subnet != "" {
+		failureDomain = append(failureDomain, fmt.Sprintf("Subnet:%s", fd.Subnet))
+	}
+
+	if len(failureDomain) == 0 {
+		return unknownFailureDomain
+	}
+
+	return "AzureFailureDomain{" + strings.Join(failureDomain, ", ") + "}"
+}
+
+// gcpFailureDomainToString converts the GCPFailureDomain into a string.
+func gcpFailureDomainToString(fd machinev1.GCPFailureDomain) string {
+	if fd.Zone != "" {
+		return fmt.Sprintf("GCPFailureDomain{Zone:%s}", fd.Zone)
+	}
+
+	return unknownFailureDomain
+}
+
+// openstackFailureDomainToString converts the OpenStackFailureDomain into a string.
+func openstackFailureDomainToString(fd machinev1.OpenStackFailureDomain) string {
+	if fd.AvailabilityZone == "" && fd.RootVolume == nil {
+		return unknownFailureDomain
+	}
+
+	var failureDomain []string
+
+	if fd.AvailabilityZone != "" {
+		failureDomain = append(failureDomain, "AvailabilityZone:"+fd.AvailabilityZone)
+	}
+
+	if fd.RootVolume != nil {
+		var rootVolume []string
+
+		if fd.RootVolume.AvailabilityZone != "" {
+			rootVolume = append(rootVolume, "AvailabilityZone:"+fd.RootVolume.AvailabilityZone)
+		}
+
+		if fd.RootVolume.VolumeType != "" {
+			rootVolume = append(rootVolume, "VolumeType:"+fd.RootVolume.VolumeType)
+		}
+
+		failureDomain = append(failureDomain, "RootVolume:{"+strings.Join(rootVolume, ", ")+"}")
+	}
+
+	return "OpenStackFailureDomain{" + strings.Join(failureDomain, ", ") + "}"
+}
+
+// vsphereFailureDomainToString converts the VSphereFailureDomain into a string.
+func vsphereFailureDomainToString(fd machinev1.VSphereFailureDomain) string {
+	if fd.Name != "" {
+		return fmt.Sprintf("VSphereFailureDomain{Name:%s}", fd.Name)
+	}
+
+	return unknownFailureDomain
+}
+
+// nutanixFailureDomainToString converts the NutanixFailureDomainReference into a string.
+func nutanixFailureDomainToString(fdRef machinev1.NutanixFailureDomainReference) string {
+	if fdRef.Name != "" {
+		return fmt.Sprintf("NutanixFailureDomainReference{Name:%s}", fdRef.Name)
+	}
+
+	return unknownFailureDomain
+}
diff --git a/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/failuredomain/set.go b/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/failuredomain/set.go
new file mode 100644
index 0000000000000000000000000000000000000000..f577421c3a8f32efbcfd5eaa2f705bddc8508ec4
--- /dev/null
+++ b/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/failuredomain/set.go
@@ -0,0 +1,66 @@
+/*
+Copyright 2022 Red Hat, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package failuredomain
+
+import "sort"
+
+// Set implements a set symantic for a FailureDomain.
+// As this is an interface we cannot use a map directly.
+// We must compare the failure domains directly.
+type Set struct {
+	items []FailureDomain
+}
+
+// NewSet creates a new set from the given items.
+func NewSet(items ...FailureDomain) *Set {
+	s := Set{}
+	s.Insert(items...)
+
+	return &s
+}
+
+// Has returns true if the item is in the set.
+func (s *Set) Has(item FailureDomain) bool {
+	for _, fd := range s.items {
+		if fd.Equal(item) {
+			return true
+		}
+	}
+
+	return false
+}
+
+// Insert adds the item to the set.
+func (s *Set) Insert(items ...FailureDomain) {
+	for _, item := range items {
+		if !s.Has(item) {
+			s.items = append(s.items, item)
+		}
+	}
+}
+
+// List returns the items in the set as a sorted slice.
+func (s *Set) List() []FailureDomain {
+	out := []FailureDomain{}
+	out = append(out, s.items...)
+
+	sort.Slice(out, func(i, j int) bool {
+		return out[i].String() < out[j].String()
+	})
+
+	return out
+}
diff --git a/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/providerconfig/aws.go b/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/providerconfig/aws.go
new file mode 100644
index 0000000000000000000000000000000000000000..2955bf0c1fe19fe9d006f3ba9ed45d6f5fa2cb8f
--- /dev/null
+++ b/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/providerconfig/aws.go
@@ -0,0 +1,159 @@
+/*
+Copyright 2022 Red Hat, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package providerconfig
+
+import (
+	"fmt"
+
+	"github.com/go-logr/logr"
+	configv1 "github.com/openshift/api/config/v1"
+	machinev1 "github.com/openshift/api/machine/v1"
+	machinev1beta1 "github.com/openshift/api/machine/v1beta1"
+	"k8s.io/apimachinery/pkg/runtime"
+)
+
+// AWSProviderConfig holds the provider spec of an AWS Machine.
+// It allows external code to extract and inject failure domain information,
+// as well as gathering the stored config.
+type AWSProviderConfig struct {
+	providerConfig machinev1beta1.AWSMachineProviderConfig
+}
+
+// InjectFailureDomain returns a new AWSProviderConfig configured with the failure domain
+// information provided.
+func (a AWSProviderConfig) InjectFailureDomain(fd machinev1.AWSFailureDomain) AWSProviderConfig {
+	newAWSProviderConfig := a
+
+	if fd.Placement.AvailabilityZone != "" {
+		newAWSProviderConfig.providerConfig.Placement.AvailabilityZone = fd.Placement.AvailabilityZone
+	}
+
+	if fd.Subnet != nil {
+		newAWSProviderConfig.providerConfig.Subnet = convertAWSResourceReferenceV1ToV1Beta1(fd.Subnet)
+	}
+
+	return newAWSProviderConfig
+}
+
+// ExtractFailureDomain returns an AWSFailureDomain based on the failure domain
+// information stored within the AWSProviderConfig.
+func (a AWSProviderConfig) ExtractFailureDomain() machinev1.AWSFailureDomain {
+	return machinev1.AWSFailureDomain{
+		Placement: machinev1.AWSFailureDomainPlacement{
+			AvailabilityZone: a.providerConfig.Placement.AvailabilityZone,
+		},
+		Subnet: convertAWSResourceReferenceV1Beta1ToV1(a.providerConfig.Subnet),
+	}
+}
+
+// Config returns the stored AWSMachineProviderConfig.
+func (a AWSProviderConfig) Config() machinev1beta1.AWSMachineProviderConfig {
+	return a.providerConfig
+}
+
+// newAWSProviderConfig creates an AWS type ProviderConfig from the raw extension.
+// It should return an error if the provided RawExtension does not represent
+// an AWSMachineProviderConfig.
+func newAWSProviderConfig(logger logr.Logger, raw *runtime.RawExtension) (ProviderConfig, error) {
+	if raw == nil {
+		return nil, errNilProviderSpec
+	}
+
+	awsMachineProviderConfig := machinev1beta1.AWSMachineProviderConfig{}
+
+	if err := checkForUnknownFieldsInProviderSpecAndUnmarshal(logger, raw, &awsMachineProviderConfig); err != nil {
+		return nil, fmt.Errorf("failed to check for unknown fields in the provider spec: %w", err)
+	}
+
+	awsProviderConfig := AWSProviderConfig{
+		providerConfig: awsMachineProviderConfig,
+	}
+
+	config := providerConfig{
+		platformType: configv1.AWSPlatformType,
+		aws:          awsProviderConfig,
+	}
+
+	return config, nil
+}
+
+// ConvertAWSResourceReferenceV1Beta1ToV1 creates a machinev1.awsResourceReference from machinev1beta1.awsResourceReference.
+func convertAWSResourceReferenceV1Beta1ToV1(referenceV1Beta1 machinev1beta1.AWSResourceReference) *machinev1.AWSResourceReference {
+	referenceV1 := &machinev1.AWSResourceReference{}
+
+	if referenceV1Beta1.ID != nil {
+		referenceV1.Type = machinev1.AWSIDReferenceType
+		referenceV1.ID = referenceV1Beta1.ID
+
+		return referenceV1
+	}
+
+	if referenceV1Beta1.Filters != nil {
+		referenceV1.Type = machinev1.AWSFiltersReferenceType
+
+		referenceV1.Filters = &[]machinev1.AWSResourceFilter{}
+		for _, filter := range referenceV1Beta1.Filters {
+			*referenceV1.Filters = append(*referenceV1.Filters, machinev1.AWSResourceFilter{
+				Name:   filter.Name,
+				Values: filter.Values,
+			})
+		}
+
+		return referenceV1
+	}
+
+	if referenceV1Beta1.ARN != nil {
+		referenceV1.Type = machinev1.AWSARNReferenceType
+		referenceV1.ARN = referenceV1Beta1.ARN
+
+		return referenceV1
+	}
+
+	return nil
+}
+
+// ConvertAWSResourceReferenceV1ToV1Beta1 creates a machinev1beta1.awsResourceReference from machinev1.awsResourceReference.
+func convertAWSResourceReferenceV1ToV1Beta1(referenceV1 *machinev1.AWSResourceReference) machinev1beta1.AWSResourceReference {
+	referenceV1Beta1 := machinev1beta1.AWSResourceReference{}
+
+	if referenceV1 == nil {
+		return machinev1beta1.AWSResourceReference{}
+	}
+
+	switch referenceV1.Type {
+	case machinev1.AWSIDReferenceType:
+		referenceV1Beta1.ID = referenceV1.ID
+
+		return referenceV1Beta1
+	case machinev1.AWSFiltersReferenceType:
+		referenceV1Beta1.Filters = []machinev1beta1.Filter{}
+		for _, filter := range *referenceV1.Filters {
+			referenceV1Beta1.Filters = append(referenceV1Beta1.Filters, machinev1beta1.Filter{
+				Name:   filter.Name,
+				Values: filter.Values,
+			})
+		}
+
+		return referenceV1Beta1
+	case machinev1.AWSARNReferenceType:
+		referenceV1Beta1.ARN = referenceV1.ARN
+
+		return referenceV1Beta1
+	}
+
+	return referenceV1Beta1
+}
diff --git a/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/providerconfig/azure.go b/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/providerconfig/azure.go
new file mode 100644
index 0000000000000000000000000000000000000000..31e151078de320ab7e814c509a75d3fb576c5eb4
--- /dev/null
+++ b/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/providerconfig/azure.go
@@ -0,0 +1,86 @@
+/*
+Copyright 2022 Red Hat, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package providerconfig
+
+import (
+	"fmt"
+
+	"github.com/go-logr/logr"
+	v1 "github.com/openshift/api/config/v1"
+	machinev1 "github.com/openshift/api/machine/v1"
+	machinev1beta1 "github.com/openshift/api/machine/v1beta1"
+	"k8s.io/apimachinery/pkg/runtime"
+)
+
+// AzureProviderConfig holds the provider spec of an Azure Machine.
+// It allows external code to extract and inject failure domain information,
+// as well as gathering the stored config.
+type AzureProviderConfig struct {
+	providerConfig machinev1beta1.AzureMachineProviderSpec
+}
+
+// InjectFailureDomain returns a new AzureProviderConfig configured with the failure domain
+// information provided.
+func (a AzureProviderConfig) InjectFailureDomain(fd machinev1.AzureFailureDomain) AzureProviderConfig {
+	newAzureProviderConfig := a
+
+	if fd.Zone != "" {
+		newAzureProviderConfig.providerConfig.Zone = fd.Zone
+	}
+
+	if fd.Subnet != "" {
+		newAzureProviderConfig.providerConfig.Subnet = fd.Subnet
+	}
+
+	return newAzureProviderConfig
+}
+
+// ExtractFailureDomain returns an AzureFailureDomain based on the failure domain
+// information stored within the AzureProviderConfig.
+func (a AzureProviderConfig) ExtractFailureDomain() machinev1.AzureFailureDomain {
+	return machinev1.AzureFailureDomain{
+		Zone:   a.providerConfig.Zone,
+		Subnet: a.providerConfig.Subnet,
+	}
+}
+
+// Config returns the stored AzureMachineProviderSpec.
+func (a AzureProviderConfig) Config() machinev1beta1.AzureMachineProviderSpec {
+	return a.providerConfig
+}
+
+// newAzureProviderConfig creates an Azure type ProviderConfig from the raw extension.
+// It should return an error if the provided RawExtension does not represent
+// an AzureMachineProviderConfig.
+func newAzureProviderConfig(logger logr.Logger, raw *runtime.RawExtension) (ProviderConfig, error) {
+	azureMachineProviderSpec := machinev1beta1.AzureMachineProviderSpec{}
+
+	if err := checkForUnknownFieldsInProviderSpecAndUnmarshal(logger, raw, &azureMachineProviderSpec); err != nil {
+		return nil, fmt.Errorf("failed to check for unknown fields in the provider spec: %w", err)
+	}
+
+	azureProviderConfig := AzureProviderConfig{
+		providerConfig: azureMachineProviderSpec,
+	}
+
+	config := providerConfig{
+		platformType: v1.AzurePlatformType,
+		azure:        azureProviderConfig,
+	}
+
+	return config, nil
+}
diff --git a/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/providerconfig/gcp.go b/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/providerconfig/gcp.go
new file mode 100644
index 0000000000000000000000000000000000000000..a2dc3dc60a9bb854a4524e4bd14e246a6182b4d3
--- /dev/null
+++ b/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/providerconfig/gcp.go
@@ -0,0 +1,79 @@
+/*
+Copyright 2022 Red Hat, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package providerconfig
+
+import (
+	"fmt"
+
+	"github.com/go-logr/logr"
+	v1 "github.com/openshift/api/config/v1"
+	machinev1 "github.com/openshift/api/machine/v1"
+	machinev1beta1 "github.com/openshift/api/machine/v1beta1"
+	"k8s.io/apimachinery/pkg/runtime"
+)
+
+// GCPProviderConfig holds the provider spec of a GCP Machine.
+// It allows external code to extract and inject failure domain information,
+// as well as gathering the stored config.
+type GCPProviderConfig struct {
+	providerConfig machinev1beta1.GCPMachineProviderSpec
+}
+
+// InjectFailureDomain returns a new GCPProviderConfig configured with the failure domain.
+func (g GCPProviderConfig) InjectFailureDomain(fd machinev1.GCPFailureDomain) GCPProviderConfig {
+	newGCPProviderConfig := g
+
+	if fd.Zone != "" {
+		newGCPProviderConfig.providerConfig.Zone = fd.Zone
+	}
+
+	return newGCPProviderConfig
+}
+
+// ExtractFailureDomain returns a GCPFailureDomain based on the failure domain
+// information stored within the GCPProviderConfig.
+func (g GCPProviderConfig) ExtractFailureDomain() machinev1.GCPFailureDomain {
+	return machinev1.GCPFailureDomain{
+		Zone: g.providerConfig.Zone,
+	}
+}
+
+// Config returns the stored GCPMachineProviderSpec.
+func (g GCPProviderConfig) Config() machinev1beta1.GCPMachineProviderSpec {
+	return g.providerConfig
+}
+
+// newGCPProviderConfig creates a GCP type ProviderConfig from the raw extension.
+// It should return an error if the provided RawExtension does not represent a GCPProviderConfig.
+func newGCPProviderConfig(logger logr.Logger, raw *runtime.RawExtension) (ProviderConfig, error) {
+	var gcpMachineProviderSpec machinev1beta1.GCPMachineProviderSpec
+
+	if err := checkForUnknownFieldsInProviderSpecAndUnmarshal(logger, raw, &gcpMachineProviderSpec); err != nil {
+		return nil, fmt.Errorf("failed to check for unknown fields in the provider spec: %w", err)
+	}
+
+	gcpProviderConfig := GCPProviderConfig{
+		providerConfig: gcpMachineProviderSpec,
+	}
+
+	config := providerConfig{
+		platformType: v1.GCPPlatformType,
+		gcp:          gcpProviderConfig,
+	}
+
+	return config, nil
+}
diff --git a/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/providerconfig/generic_platform.go b/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/providerconfig/generic_platform.go
new file mode 100644
index 0000000000000000000000000000000000000000..ebfc0a84c949b334eee9037ebd3eba122488276c
--- /dev/null
+++ b/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/providerconfig/generic_platform.go
@@ -0,0 +1,47 @@
+/*
+Copyright 2022 Red Hat, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package providerconfig
+
+import (
+	configv1 "github.com/openshift/api/config/v1"
+	"github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/failuredomain"
+	"k8s.io/apimachinery/pkg/runtime"
+)
+
+// GenericProviderConfig holds the provider spec for machine on platforms that
+// don't support failure domains and can be handled generically.
+type GenericProviderConfig struct {
+	// We don't know the exact type of the providerSpec, but we need to store it to be able to tell if two specs are identical.
+	providerSpec *runtime.RawExtension
+}
+
+// ExtractFailureDomain extract generic failure domain that contains just the platform type.
+func (g GenericProviderConfig) ExtractFailureDomain() failuredomain.FailureDomain {
+	return failuredomain.NewGenericFailureDomain()
+}
+
+// newGenericProviderConfig creates a generic ProviderConfig that can contain providerSpec for any platform.
+func newGenericProviderConfig(providerSpec *runtime.RawExtension, platform configv1.PlatformType) (ProviderConfig, error) {
+	config := providerConfig{
+		platformType: platform,
+		generic: GenericProviderConfig{
+			providerSpec: providerSpec,
+		},
+	}
+
+	return config, nil
+}
diff --git a/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/providerconfig/nutanix.go b/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/providerconfig/nutanix.go
new file mode 100644
index 0000000000000000000000000000000000000000..69532573b21d3d40638d8d285f08010ffe315e45
--- /dev/null
+++ b/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/providerconfig/nutanix.go
@@ -0,0 +1,157 @@
+/*
+Copyright 2023 Red Hat, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package providerconfig
+
+import (
+	"errors"
+	"fmt"
+
+	"github.com/go-logr/logr"
+	configv1 "github.com/openshift/api/config/v1"
+	machinev1 "github.com/openshift/api/machine/v1"
+	"k8s.io/apimachinery/pkg/runtime"
+)
+
+var errInvalidFailureDomainName = errors.New("invalid failure domain name")
+
+// NutanixProviderConfig is a wrapper around machinev1.NutanixMachineProviderConfig.
+type NutanixProviderConfig struct {
+	providerConfig machinev1.NutanixMachineProviderConfig
+	infrastructure *configv1.Infrastructure
+}
+
+// Config returns the stored NutanixMachineProviderConfig.
+func (n NutanixProviderConfig) Config() machinev1.NutanixMachineProviderConfig {
+	return n.providerConfig
+}
+
+// Infrastructure returns the stored *configv1.Infrastructure.
+func (n NutanixProviderConfig) Infrastructure() *configv1.Infrastructure {
+	return n.infrastructure
+}
+
+func newNutanixProviderConfig(logger logr.Logger, raw *runtime.RawExtension, infrastructure *configv1.Infrastructure) (ProviderConfig, error) {
+	nutanixMachineProviderconfig := machinev1.NutanixMachineProviderConfig{}
+
+	if err := checkForUnknownFieldsInProviderSpecAndUnmarshal(logger, raw, &nutanixMachineProviderconfig); err != nil {
+		return nil, fmt.Errorf("failed to check for unknown fields in the provider spec: %w", err)
+	}
+
+	npc := NutanixProviderConfig{
+		providerConfig: nutanixMachineProviderconfig,
+		infrastructure: infrastructure,
+	}
+
+	config := providerConfig{
+		platformType: configv1.NutanixPlatformType,
+		nutanix:      npc,
+	}
+
+	return config, nil
+}
+
+// GetFailureDomainByName returns the NutanixFailureDomain if the input name referenced
+// failureDomain is configured in the Infrastructure resource.
+func (n NutanixProviderConfig) GetFailureDomainByName(failureDomainName string) (*configv1.NutanixFailureDomain, error) {
+	if failureDomainName == "" {
+		return nil, fmt.Errorf("empty failure domain name. %w", errInvalidFailureDomainName)
+	}
+
+	for _, fd := range n.infrastructure.Spec.PlatformSpec.Nutanix.FailureDomains {
+		if fd.Name == failureDomainName {
+			return &fd, nil
+		}
+	}
+
+	return nil, fmt.Errorf("the failure domain with name %q is not defined in the infrastructure resource. %w", failureDomainName, errInvalidFailureDomainName)
+}
+
+// InjectFailureDomain returns a new NutanixProviderConfig configured with the failure domain information provided.
+func (n NutanixProviderConfig) InjectFailureDomain(fdRef machinev1.NutanixFailureDomainReference) (NutanixProviderConfig, error) {
+	newConfig := n
+
+	if fdRef.Name == "" {
+		return newConfig, nil
+	}
+
+	fd, err := newConfig.GetFailureDomainByName(fdRef.Name)
+	if err != nil {
+		return newConfig, fmt.Errorf("unknown failure domain: %w", err)
+	}
+
+	// update the providerConfig fields that is defined in the referenced failure domain
+	newConfig.providerConfig.FailureDomain = &machinev1.NutanixFailureDomainReference{
+		Name: fd.Name,
+	}
+	// update Cluster
+	newConfig.providerConfig.Cluster = machinev1.NutanixResourceIdentifier{
+		Name: fd.Cluster.Name,
+		UUID: fd.Cluster.UUID,
+	}
+	if fd.Cluster.Type == configv1.NutanixIdentifierName {
+		newConfig.providerConfig.Cluster.Type = machinev1.NutanixIdentifierName
+	} else if fd.Cluster.Type == configv1.NutanixIdentifierUUID {
+		newConfig.providerConfig.Cluster.Type = machinev1.NutanixIdentifierUUID
+	}
+
+	// update Subnets
+	newConfig.providerConfig.Subnets = []machinev1.NutanixResourceIdentifier{}
+
+	for _, fdSubnet := range fd.Subnets {
+		pcSubnet := machinev1.NutanixResourceIdentifier{
+			Name: fdSubnet.Name,
+			UUID: fdSubnet.UUID,
+		}
+		if fdSubnet.Type == configv1.NutanixIdentifierName {
+			pcSubnet.Type = machinev1.NutanixIdentifierName
+		} else if fdSubnet.Type == configv1.NutanixIdentifierUUID {
+			pcSubnet.Type = machinev1.NutanixIdentifierUUID
+		}
+
+		newConfig.providerConfig.Subnets = append(newConfig.providerConfig.Subnets, pcSubnet)
+	}
+
+	return newConfig, nil
+}
+
+// ExtractFailureDomain is used to extract a failure domain from the ProviderConfig.
+func (n NutanixProviderConfig) ExtractFailureDomain() machinev1.NutanixFailureDomainReference {
+	if n.providerConfig.FailureDomain == nil {
+		return machinev1.NutanixFailureDomainReference{}
+	}
+
+	if fd, _ := n.GetFailureDomainByName(n.providerConfig.FailureDomain.Name); fd != nil {
+		return *n.providerConfig.FailureDomain
+	}
+
+	return machinev1.NutanixFailureDomainReference{}
+}
+
+// ResetFailureDomainRelatedFields resets fields related to failure domain.
+func (n NutanixProviderConfig) ResetFailureDomainRelatedFields() ProviderConfig {
+	n.providerConfig.Cluster = machinev1.NutanixResourceIdentifier{}
+	n.providerConfig.Subnets = []machinev1.NutanixResourceIdentifier{}
+	n.providerConfig.FailureDomain = nil
+
+	return providerConfig{
+		platformType: configv1.NutanixPlatformType,
+		nutanix: NutanixProviderConfig{
+			providerConfig: n.providerConfig,
+			infrastructure: n.infrastructure,
+		},
+	}
+}
diff --git a/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/providerconfig/openstack.go b/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/providerconfig/openstack.go
new file mode 100644
index 0000000000000000000000000000000000000000..0b910e9cdc1efd33968f7aff98827b2cf94cd200
--- /dev/null
+++ b/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/providerconfig/openstack.go
@@ -0,0 +1,104 @@
+/*
+Copyright 2022 Red Hat, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package providerconfig
+
+import (
+	"fmt"
+
+	"github.com/go-logr/logr"
+	v1 "github.com/openshift/api/config/v1"
+	machinev1 "github.com/openshift/api/machine/v1"
+	machinev1alpha1 "github.com/openshift/api/machine/v1alpha1"
+	"k8s.io/apimachinery/pkg/runtime"
+)
+
+// OpenStackProviderConfig holds the provider spec of an OpenStack Machine.
+// It allows external code to extract and inject failure domain information,
+// as well as gathering the stored config.
+type OpenStackProviderConfig struct {
+	providerConfig machinev1alpha1.OpenstackProviderSpec
+}
+
+// InjectFailureDomain returns a new OpenStackProviderConfig configured with the failure domain
+// information provided.
+func (a OpenStackProviderConfig) InjectFailureDomain(fd machinev1.OpenStackFailureDomain) OpenStackProviderConfig {
+	newOpenStackProviderConfig := a
+
+	if fd.AvailabilityZone != "" {
+		newOpenStackProviderConfig.providerConfig.AvailabilityZone = fd.AvailabilityZone
+	}
+
+	if fd.RootVolume != nil && newOpenStackProviderConfig.providerConfig.RootVolume != nil {
+		if fd.RootVolume.AvailabilityZone != "" {
+			newOpenStackProviderConfig.providerConfig.RootVolume.Zone = fd.RootVolume.AvailabilityZone
+		}
+
+		if fd.RootVolume.VolumeType != "" {
+			newOpenStackProviderConfig.providerConfig.RootVolume.VolumeType = fd.RootVolume.VolumeType
+		}
+	}
+
+	return newOpenStackProviderConfig
+}
+
+// ExtractFailureDomain returns an OpenStackFailureDomain based on the failure domain
+// information stored within the OpenStackProviderConfig.
+func (a OpenStackProviderConfig) ExtractFailureDomain() machinev1.OpenStackFailureDomain {
+	var failureDomainRootVolume *machinev1.RootVolume
+
+	if a.providerConfig.RootVolume != nil {
+		// Be liberal in accepting an empty rootVolume in the
+		// OpenStackFailureDomain. It should count as nil.
+		if az, vt := a.providerConfig.RootVolume.Zone, a.providerConfig.RootVolume.VolumeType; az != "" || vt != "" {
+			failureDomainRootVolume = &machinev1.RootVolume{
+				AvailabilityZone: az,
+				VolumeType:       vt,
+			}
+		}
+	}
+
+	return machinev1.OpenStackFailureDomain{
+		AvailabilityZone: a.providerConfig.AvailabilityZone,
+		RootVolume:       failureDomainRootVolume,
+	}
+}
+
+// Config returns the stored OpenStackMachineProviderSpec.
+func (a OpenStackProviderConfig) Config() machinev1alpha1.OpenstackProviderSpec {
+	return a.providerConfig
+}
+
+// newOpenStackProviderConfig creates an OpenStack type ProviderConfig from the raw extension.
+// It should return an error if the provided RawExtension does not represent
+// an OpenStackMachineProviderConfig.
+func newOpenStackProviderConfig(logger logr.Logger, raw *runtime.RawExtension) (ProviderConfig, error) {
+	openstackProviderSpec := machinev1alpha1.OpenstackProviderSpec{}
+	if err := checkForUnknownFieldsInProviderSpecAndUnmarshal(logger, raw, &openstackProviderSpec); err != nil {
+		return nil, fmt.Errorf("failed to check for unknown fields in the provider spec: %w", err)
+	}
+
+	openstackProviderConfig := OpenStackProviderConfig{
+		providerConfig: openstackProviderSpec,
+	}
+
+	config := providerConfig{
+		platformType: v1.OpenStackPlatformType,
+		openstack:    openstackProviderConfig,
+	}
+
+	return config, nil
+}
diff --git a/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/providerconfig/providerconfig.go b/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/providerconfig/providerconfig.go
new file mode 100644
index 0000000000000000000000000000000000000000..f5306af9fd60885570f444e2295cd4078aa764ee
--- /dev/null
+++ b/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/providerconfig/providerconfig.go
@@ -0,0 +1,477 @@
+/*
+Copyright 2022 Red Hat, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package providerconfig
+
+import (
+	"encoding/json"
+	"errors"
+	"fmt"
+	"reflect"
+
+	"github.com/go-logr/logr"
+	"github.com/go-test/deep"
+	configv1 "github.com/openshift/api/config/v1"
+	machinev1 "github.com/openshift/api/machine/v1"
+	machinev1beta1 "github.com/openshift/api/machine/v1beta1"
+	"github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/failuredomain"
+	metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
+	"k8s.io/apimachinery/pkg/runtime"
+	"sigs.k8s.io/yaml"
+)
+
+var (
+	// errMismatchedPlatformTypes is an error used when two provider configs
+	// are being compared but are from different platform types.
+	errMismatchedPlatformTypes = errors.New("mistmatched platform types")
+
+	// errUnsupportedPlatformType is an error used when an unknown platform
+	// type is configured within the failure domain config.
+	errUnsupportedPlatformType = errors.New("unsupported platform type")
+
+	// errNilProviderSpec is an error used when provider spec is nil.
+	errNilProviderSpec = errors.New("provider spec is nil")
+
+	// errNilFailureDomain is an error used when when nil value is present and failure domain is expected.
+	errNilFailureDomain = errors.New("failure domain is nil")
+
+	// errFailureDomainNotDefinedInInfrastructure failure domain is not defined in infrastructure.
+	errFailureDomainNotDefinedInInfrastructure = errors.New("failure domain is not defined in the infrastructure resource")
+)
+
+// ProviderConfig is an interface that allows external code to interact
+// with provider configuration across different platform types.
+type ProviderConfig interface {
+	// InjectFailureDomain is used to inject a failure domain into the ProviderConfig.
+	// The returned ProviderConfig will be a copy of the current ProviderConfig with
+	// the new failure domain injected.
+	InjectFailureDomain(failuredomain.FailureDomain) (ProviderConfig, error)
+
+	// ExtractFailureDomain is used to extract a failure domain from the ProviderConfig.
+	ExtractFailureDomain() failuredomain.FailureDomain
+
+	// Equal compares two ProviderConfigs to determine whether or not they are equal.
+	Equal(ProviderConfig) (bool, error)
+
+	// Diff compares two ProviderConfigs and returns a list of differences,
+	// or nil if there are none.
+	Diff(ProviderConfig) ([]string, error)
+
+	// RawConfig marshalls the configuration into a JSON byte slice.
+	RawConfig() ([]byte, error)
+
+	// Type returns the platform type of the provider config.
+	Type() configv1.PlatformType
+
+	// AWS returns the AWSProviderConfig if the platform type is AWS.
+	AWS() AWSProviderConfig
+
+	// Azure returns the AzureProviderConfig if the platform type is Azure.
+	Azure() AzureProviderConfig
+
+	// GCP returns the GCPProviderConfig if the platform type is GCP.
+	GCP() GCPProviderConfig
+
+	// Nutanix returns the NutanixProviderConfig if the platform type is Nutanix.
+	Nutanix() NutanixProviderConfig
+
+	// OpenStack returns the OpenStackProviderConfig if the platform type is OpenStack.
+	OpenStack() OpenStackProviderConfig
+
+	// VSphere returns the VSphereProviderConfig if the platform type is VSphere.
+	VSphere() VSphereProviderConfig
+
+	// Generic returns the GenericProviderConfig if we are on a platform that is using generic provider abstraction.
+	Generic() GenericProviderConfig
+}
+
+// NewProviderConfigFromMachineTemplate creates a new ProviderConfig from the provided machine template.
+func NewProviderConfigFromMachineTemplate(logger logr.Logger, tmpl machinev1.OpenShiftMachineV1Beta1MachineTemplate, infrastructure *configv1.Infrastructure) (ProviderConfig, error) {
+	platformType, err := getPlatformTypeFromMachineTemplate(tmpl)
+	if err != nil {
+		return nil, fmt.Errorf("could not determine platform type: %w", err)
+	}
+
+	return newProviderConfigFromProviderSpec(logger, tmpl.Spec.ProviderSpec, platformType, infrastructure)
+}
+
+// NewProviderConfigFromMachineSpec creates a new ProviderConfig from the provided machineSpec object.
+func NewProviderConfigFromMachineSpec(logger logr.Logger, machineSpec machinev1beta1.MachineSpec, infrastructure *configv1.Infrastructure) (ProviderConfig, error) {
+	platformType, err := getPlatformTypeFromProviderSpec(machineSpec.ProviderSpec)
+	if err != nil {
+		return nil, fmt.Errorf("could not determine platform type: %w", err)
+	}
+
+	return newProviderConfigFromProviderSpec(logger, machineSpec.ProviderSpec, platformType, infrastructure)
+}
+
+func newProviderConfigFromProviderSpec(logger logr.Logger, providerSpec machinev1beta1.ProviderSpec, platformType configv1.PlatformType, infrastructure *configv1.Infrastructure) (ProviderConfig, error) {
+	if providerSpec.Value == nil {
+		return nil, errNilProviderSpec
+	}
+
+	switch platformType {
+	case configv1.AWSPlatformType:
+		return newAWSProviderConfig(logger, providerSpec.Value)
+	case configv1.AzurePlatformType:
+		return newAzureProviderConfig(logger, providerSpec.Value)
+	case configv1.GCPPlatformType:
+		return newGCPProviderConfig(logger, providerSpec.Value)
+	case configv1.NutanixPlatformType:
+		return newNutanixProviderConfig(logger, providerSpec.Value, infrastructure)
+	case configv1.OpenStackPlatformType:
+		return newOpenStackProviderConfig(logger, providerSpec.Value)
+	case configv1.VSpherePlatformType:
+		return newVSphereProviderConfig(logger, providerSpec.Value, infrastructure)
+	case configv1.NonePlatformType:
+		return nil, fmt.Errorf("%w: %s", errUnsupportedPlatformType, platformType)
+	default:
+		return newGenericProviderConfig(providerSpec.Value, platformType)
+	}
+}
+
+// providerConfig is an implementation of the ProviderConfig interface.
+type providerConfig struct {
+	platformType configv1.PlatformType
+	aws          AWSProviderConfig
+	azure        AzureProviderConfig
+	gcp          GCPProviderConfig
+	nutanix      NutanixProviderConfig
+	generic      GenericProviderConfig
+	openstack    OpenStackProviderConfig
+	vsphere      VSphereProviderConfig
+}
+
+// InjectFailureDomain is used to inject a failure domain into the ProviderConfig.
+// The returned ProviderConfig will be a copy of the current ProviderConfig with
+// the new failure domain injected.
+//
+//nolint:cyclop
+func (p providerConfig) InjectFailureDomain(fd failuredomain.FailureDomain) (ProviderConfig, error) {
+	if fd == nil {
+		return nil, errNilFailureDomain
+	}
+
+	newConfig := p
+
+	switch p.platformType {
+	case configv1.AWSPlatformType:
+		newConfig.aws = p.AWS().InjectFailureDomain(fd.AWS())
+	case configv1.AzurePlatformType:
+		newConfig.azure = p.Azure().InjectFailureDomain(fd.Azure())
+	case configv1.GCPPlatformType:
+		newConfig.gcp = p.GCP().InjectFailureDomain(fd.GCP())
+	case configv1.OpenStackPlatformType:
+		newConfig.openstack = p.OpenStack().InjectFailureDomain(fd.OpenStack())
+	case configv1.VSpherePlatformType:
+		config, err := p.VSphere().InjectFailureDomain(fd.VSphere())
+		if err != nil {
+			return newConfig, fmt.Errorf("failed to inject failure domain: %w", err)
+		}
+
+		newConfig.vsphere = config
+	case configv1.NutanixPlatformType:
+		config, err := p.Nutanix().InjectFailureDomain(fd.Nutanix())
+		if err != nil {
+			return newConfig, fmt.Errorf("failed to inject failure domain: %w", err)
+		}
+
+		newConfig.nutanix = config
+	case configv1.NonePlatformType:
+		return nil, fmt.Errorf("%w: %s", errUnsupportedPlatformType, p.platformType)
+	}
+
+	return newConfig, nil
+}
+
+// ExtractFailureDomain is used to extract a failure domain from the ProviderConfig.
+func (p providerConfig) ExtractFailureDomain() failuredomain.FailureDomain {
+	switch p.platformType {
+	case configv1.AWSPlatformType:
+		return failuredomain.NewAWSFailureDomain(p.AWS().ExtractFailureDomain())
+	case configv1.AzurePlatformType:
+		return failuredomain.NewAzureFailureDomain(p.Azure().ExtractFailureDomain())
+	case configv1.GCPPlatformType:
+		return failuredomain.NewGCPFailureDomain(p.GCP().ExtractFailureDomain())
+	case configv1.OpenStackPlatformType:
+		return failuredomain.NewOpenStackFailureDomain(p.OpenStack().ExtractFailureDomain())
+	case configv1.VSpherePlatformType:
+		return failuredomain.NewVSphereFailureDomain(p.VSphere().ExtractFailureDomain())
+	case configv1.NutanixPlatformType:
+		return failuredomain.NewNutanixFailureDomain(p.Nutanix().ExtractFailureDomain())
+	case configv1.NonePlatformType:
+		return nil
+	default:
+		return p.Generic().ExtractFailureDomain()
+	}
+}
+
+// Diff compares two ProviderConfigs and returns a list of differences,
+// or nil if there are none.
+//
+//nolint:cyclop
+func (p providerConfig) Diff(other ProviderConfig) ([]string, error) {
+	if other == nil {
+		return nil, nil
+	}
+
+	if p.platformType != other.Type() {
+		return nil, errMismatchedPlatformTypes
+	}
+
+	switch p.platformType {
+	case configv1.AWSPlatformType:
+		return deep.Equal(p.aws.providerConfig, other.AWS().providerConfig), nil
+	case configv1.AzurePlatformType:
+		return deep.Equal(p.azure.providerConfig, other.Azure().providerConfig), nil
+	case configv1.GCPPlatformType:
+		return deep.Equal(p.gcp.providerConfig, other.GCP().providerConfig), nil
+	case configv1.NutanixPlatformType:
+		return deep.Equal(p.nutanix.providerConfig, other.Nutanix().providerConfig), nil
+	case configv1.OpenStackPlatformType:
+		return deep.Equal(p.openstack.providerConfig, other.OpenStack().providerConfig), nil
+	case configv1.VSpherePlatformType:
+		return p.VSphere().Diff(other.VSphere().providerConfig)
+	case configv1.NonePlatformType:
+		return nil, errUnsupportedPlatformType
+	default:
+		return deep.Equal(p.generic.providerSpec, other.Generic().providerSpec), nil
+	}
+}
+
+// Equal compares two ProviderConfigs to determine whether or not they are equal.
+//
+//nolint:cyclop
+func (p providerConfig) Equal(other ProviderConfig) (bool, error) {
+	if other == nil {
+		return false, nil
+	}
+
+	if p.platformType != other.Type() {
+		return false, errMismatchedPlatformTypes
+	}
+
+	switch p.platformType {
+	case configv1.AWSPlatformType:
+		return reflect.DeepEqual(p.aws.providerConfig, other.AWS().providerConfig), nil
+	case configv1.AzurePlatformType:
+		return reflect.DeepEqual(p.azure.providerConfig, other.Azure().providerConfig), nil
+	case configv1.GCPPlatformType:
+		return reflect.DeepEqual(p.gcp.providerConfig, other.GCP().providerConfig), nil
+	case configv1.NutanixPlatformType:
+		return reflect.DeepEqual(p.nutanix.providerConfig, other.Nutanix().providerConfig), nil
+	case configv1.OpenStackPlatformType:
+		return reflect.DeepEqual(p.openstack.providerConfig, other.OpenStack().providerConfig), nil
+	case configv1.VSpherePlatformType:
+		return reflect.DeepEqual(p.vsphere.providerConfig, other.VSphere().providerConfig), nil
+	case configv1.NonePlatformType:
+		return false, errUnsupportedPlatformType
+	default:
+		return reflect.DeepEqual(p.generic.providerSpec, other.Generic().providerSpec), nil
+	}
+}
+
+// RawConfig marshalls the configuration into a JSON byte slice.
+func (p providerConfig) RawConfig() ([]byte, error) {
+	var (
+		rawConfig []byte
+		err       error
+	)
+
+	switch p.platformType {
+	case configv1.AWSPlatformType:
+		rawConfig, err = json.Marshal(p.aws.providerConfig)
+	case configv1.AzurePlatformType:
+		rawConfig, err = json.Marshal(p.azure.providerConfig)
+	case configv1.GCPPlatformType:
+		rawConfig, err = json.Marshal(p.gcp.providerConfig)
+	case configv1.NutanixPlatformType:
+		rawConfig, err = json.Marshal(p.nutanix.providerConfig)
+	case configv1.OpenStackPlatformType:
+		rawConfig, err = json.Marshal(p.openstack.providerConfig)
+	case configv1.VSpherePlatformType:
+		rawConfig, err = json.Marshal(p.vsphere.providerConfig)
+	case configv1.NonePlatformType:
+		return nil, errUnsupportedPlatformType
+	default:
+		rawConfig, err = p.generic.providerSpec.Raw, nil
+	}
+
+	if err != nil {
+		return nil, fmt.Errorf("could not marshal provider config: %w", err)
+	}
+
+	return rawConfig, nil
+}
+
+// Type returns the platform type of the provider config.
+func (p providerConfig) Type() configv1.PlatformType {
+	return p.platformType
+}
+
+// AWS returns the AWSProviderConfig if the platform type is AWS.
+func (p providerConfig) AWS() AWSProviderConfig {
+	return p.aws
+}
+
+// Azure returns the AzureProviderConfig if the platform type is Azure.
+func (p providerConfig) Azure() AzureProviderConfig {
+	return p.azure
+}
+
+// GCP returns the GCPProviderConfig if the platform type is GCP.
+func (p providerConfig) GCP() GCPProviderConfig {
+	return p.gcp
+}
+
+// Nutanix returns the NutanixProviderConfig if the platform type is Nutanix.
+func (p providerConfig) Nutanix() NutanixProviderConfig {
+	return p.nutanix
+}
+
+// OpenStack returns the OpenStackProviderConfig if the platform type is OpenStack.
+func (p providerConfig) OpenStack() OpenStackProviderConfig {
+	return p.openstack
+}
+
+// VSphere returns the VSphereProviderConfig if the platform type is VSphere.
+func (p providerConfig) VSphere() VSphereProviderConfig {
+	return p.vsphere
+}
+
+// Generic returns the GenericProviderConfig if the platform type is generic.
+func (p providerConfig) Generic() GenericProviderConfig {
+	return p.generic
+}
+
+// getPlatformTypeFromProviderSpecKind determines machine platform from providerSpec kind.
+// When platform is unknown, it returns "UnknownPlatform".
+func getPlatformTypeFromProviderSpecKind(kind string) configv1.PlatformType {
+	var providerSpecKindToPlatformType = map[string]configv1.PlatformType{
+		"AWSMachineProviderConfig":     configv1.AWSPlatformType,
+		"AzureMachineProviderSpec":     configv1.AzurePlatformType,
+		"GCPMachineProviderSpec":       configv1.GCPPlatformType,
+		"NutanixMachineProviderConfig": configv1.NutanixPlatformType,
+		"OpenstackProviderSpec":        configv1.OpenStackPlatformType,
+		"VSphereMachineProviderSpec":   configv1.VSpherePlatformType,
+	}
+
+	platformType, ok := providerSpecKindToPlatformType[kind]
+
+	// Attempt to operate on unknown platforms. This should work if the platform does not require failure domains support.
+	if !ok {
+		return "UnknownPlatform"
+	}
+
+	return platformType
+}
+
+// getPlatformTypeFromMachineTemplate extracts the platform type from the Machine template.
+// This can either be gathered from the platform type within the template failure domains,
+// or if that isn't present, by inspecting the providerSpec kind and inferring from there
+// what the configured platform type is.
+func getPlatformTypeFromMachineTemplate(tmpl machinev1.OpenShiftMachineV1Beta1MachineTemplate) (configv1.PlatformType, error) {
+	if tmpl.FailureDomains != nil {
+		platformType := tmpl.FailureDomains.Platform
+		if platformType != "" {
+			return platformType, nil
+		}
+	}
+
+	return getPlatformTypeFromProviderSpec(tmpl.Spec.ProviderSpec)
+}
+
+// getPlatformTypeFromProviderSpec determines machine platform from the providerSpec.
+// The providerSpec object's kind field is unmarshalled and the platform type is inferred from it.
+func getPlatformTypeFromProviderSpec(providerSpec machinev1beta1.ProviderSpec) (configv1.PlatformType, error) {
+	// Simple type for unmarshalling providerSpec kind.
+	type providerSpecKind struct {
+		metav1.TypeMeta `json:",inline"`
+	}
+
+	providerKind := providerSpecKind{}
+
+	if providerSpec.Value == nil {
+		return "", errNilProviderSpec
+	}
+
+	if err := json.Unmarshal(providerSpec.Value.Raw, &providerKind); err != nil {
+		return "", fmt.Errorf("could not unmarshal provider spec: %w", err)
+	}
+
+	return getPlatformTypeFromProviderSpecKind(providerKind.Kind), nil
+}
+
+// ExtractFailureDomainsFromMachines creates list of FailureDomains extracted from the provided list of machines.
+func ExtractFailureDomainsFromMachines(logger logr.Logger, machines []machinev1beta1.Machine, infrastructure *configv1.Infrastructure) ([]failuredomain.FailureDomain, error) {
+	machineFailureDomains := failuredomain.NewSet()
+
+	for _, machine := range machines {
+		providerconfig, err := NewProviderConfigFromMachineSpec(logger, machine.Spec, infrastructure)
+		if err != nil {
+			return nil, fmt.Errorf("error getting failure domain from machine %s: %w", machine.Name, err)
+		}
+
+		machineFailureDomains.Insert(providerconfig.ExtractFailureDomain())
+	}
+
+	return machineFailureDomains.List(), nil
+}
+
+// ExtractFailureDomainFromMachine FailureDomain extracted from the provided machine.
+func ExtractFailureDomainFromMachine(logger logr.Logger, machine machinev1beta1.Machine, infrastructure *configv1.Infrastructure) (failuredomain.FailureDomain, error) {
+	providerConfig, err := NewProviderConfigFromMachineSpec(logger, machine.Spec, infrastructure)
+	if err != nil {
+		return nil, fmt.Errorf("error getting failure domain from machine %s: %w", machine.Name, err)
+	}
+
+	return providerConfig.ExtractFailureDomain(), nil
+}
+
+// ExtractFailureDomainsFromMachineSets creates list of FailureDomains extracted from the provided list of machineSets.
+func ExtractFailureDomainsFromMachineSets(logger logr.Logger, machineSets []machinev1beta1.MachineSet, infrastructure *configv1.Infrastructure) ([]failuredomain.FailureDomain, error) {
+	machineSetFailureDomains := failuredomain.NewSet()
+
+	for _, machineSet := range machineSets {
+		providerconfig, err := NewProviderConfigFromMachineSpec(logger, machineSet.Spec.Template.Spec, infrastructure)
+		if err != nil {
+			return nil, fmt.Errorf("error getting failure domain from machineSet %s: %w", machineSet.Name, err)
+		}
+
+		machineSetFailureDomains.Insert(providerconfig.ExtractFailureDomain())
+	}
+
+	return machineSetFailureDomains.List(), nil
+}
+
+// checkForUnknownFieldsInProviderSpecAndUnmarshal tries to unmarshal content into a platform specific provider spec
+// and detect invalid fields.
+//
+// If the provider spec contains an unknown field, we want to log a warning to the user
+// instead of just omitting the unknown field. This also catches only the first unknown field
+// in the provider spec. In order to not break any live clusters, we keep the original json
+// style of unmarshaling if the strict version fails.
+func checkForUnknownFieldsInProviderSpecAndUnmarshal(logger logr.Logger, raw *runtime.RawExtension, platformProviderSpec interface{}) error {
+	if err := yaml.UnmarshalStrict(raw.Raw, platformProviderSpec); err != nil {
+		logger.Error(err, "failed to strictly unmarshal provider config due to unknown field")
+
+		if err := json.Unmarshal(raw.Raw, platformProviderSpec); err != nil {
+			return fmt.Errorf("failed to unmarshal provider config: %w", err)
+		}
+	}
+
+	return nil
+}
diff --git a/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/providerconfig/vsphere.go b/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/providerconfig/vsphere.go
new file mode 100644
index 0000000000000000000000000000000000000000..5cb77e8d268f6909461016e2b5af1f20c6087525
--- /dev/null
+++ b/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/providerconfig/vsphere.go
@@ -0,0 +1,271 @@
+/*
+Copyright 2023 Red Hat, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package providerconfig
+
+import (
+	"encoding/json"
+	"fmt"
+	"path"
+	"strings"
+
+	"github.com/go-logr/logr"
+	"github.com/go-test/deep"
+	configv1 "github.com/openshift/api/config/v1"
+	machinev1 "github.com/openshift/api/machine/v1"
+	machinev1beta1 "github.com/openshift/api/machine/v1beta1"
+	"k8s.io/apimachinery/pkg/runtime"
+)
+
+// VSphereProviderConfig holds the provider spec of a VSphere Machine.
+// It allows external code to extract and inject failure domain information,
+// as well as gathering the stored config.
+type VSphereProviderConfig struct {
+	providerConfig machinev1beta1.VSphereMachineProviderSpec
+	infrastructure *configv1.Infrastructure
+	logger         logr.Logger
+}
+
+func (v VSphereProviderConfig) getFailureDomainFromInfrastructure(fd machinev1.VSphereFailureDomain) (*configv1.VSpherePlatformFailureDomainSpec, error) {
+	if v.infrastructure.Spec.PlatformSpec.VSphere != nil {
+		for _, failureDomain := range v.infrastructure.Spec.PlatformSpec.VSphere.FailureDomains {
+			if failureDomain.Name == fd.Name {
+				return &failureDomain, nil
+			}
+		}
+	}
+
+	return nil, errFailureDomainNotDefinedInInfrastructure
+}
+
+func (v VSphereProviderConfig) getWorkspaceFromFailureDomain(failureDomain *configv1.VSpherePlatformFailureDomainSpec) *machinev1beta1.Workspace {
+	topology := failureDomain.Topology
+	workspace := &machinev1beta1.Workspace{}
+
+	if len(topology.ComputeCluster) > 0 {
+		workspace.ResourcePool = path.Clean(fmt.Sprintf("/%s/Resources", topology.ComputeCluster))
+	}
+
+	if len(topology.ResourcePool) > 0 {
+		workspace.ResourcePool = topology.ResourcePool
+	}
+
+	if len(topology.Datacenter) > 0 {
+		workspace.Datacenter = topology.Datacenter
+	}
+
+	if len(topology.Datastore) > 0 {
+		workspace.Datastore = topology.Datastore
+	}
+
+	if len(failureDomain.Server) > 0 {
+		workspace.Server = failureDomain.Server
+	}
+
+	if len(topology.Folder) > 0 {
+		workspace.Folder = topology.Folder
+	} else {
+		workspace.Folder = fmt.Sprintf("/%s/vm/%s", workspace.Datacenter, v.infrastructure.Status.InfrastructureName)
+	}
+
+	return workspace
+}
+
+// getTemplateName returns the name of the name of the template.
+func getTemplateName(template string) string {
+	if strings.Contains(template, "/") {
+		return template[strings.LastIndex(template, "/")+1:]
+	}
+
+	return template
+}
+
+// Diff compares two ProviderConfigs and returns a list of differences,
+// or nil if there are none.
+func (v VSphereProviderConfig) Diff(other machinev1beta1.VSphereMachineProviderSpec) ([]string, error) {
+	// Templates can be provided either with an absolute path or relative.
+	// This can result in the control plane nodes rolling out when they dont need to.
+	// As long as the OVA name matches that will be considered a match.
+	otherTemplate := getTemplateName(other.Template)
+	currentTemplate := getTemplateName(v.providerConfig.Template)
+
+	if otherTemplate == currentTemplate {
+		other.Template = v.providerConfig.Template
+	}
+
+	return deep.Equal(v.providerConfig, other), nil
+}
+
+// InjectFailureDomain returns a new VSphereProviderConfig configured with the failure domain.
+func (v VSphereProviderConfig) InjectFailureDomain(fd machinev1.VSphereFailureDomain) (VSphereProviderConfig, error) {
+	newVSphereProviderConfig := v
+
+	failureDomain, err := newVSphereProviderConfig.getFailureDomainFromInfrastructure(fd)
+	if err != nil {
+		return newVSphereProviderConfig, fmt.Errorf("unknown failure domain: %w", err)
+	}
+
+	newVSphereProviderConfig.providerConfig.Workspace = newVSphereProviderConfig.getWorkspaceFromFailureDomain(failureDomain)
+	topology := failureDomain.Topology
+	network := newVSphereProviderConfig.providerConfig.Network
+
+	logNetworkInfo(newVSphereProviderConfig.providerConfig.Network, "control plane machine set network before failure domain: %v", v.logger)
+
+	if len(topology.Networks) > 0 {
+		networkSpec := machinev1beta1.NetworkSpec{}
+		// If original has AddressesFromPools, that means static IP is desired for the CPMS.  Keep that and just add the FD info.
+		// Note, CPMS may have no network devices defined relying on FD to provide.
+		if len(network.Devices) > 0 && len(network.Devices[0].AddressesFromPools) > 0 {
+			networkSpec.Devices = newVSphereProviderConfig.providerConfig.Network.Devices
+		}
+
+		// Set the network name for the device from FD.
+		for index, network := range topology.Networks {
+			if len(networkSpec.Devices) <= index {
+				networkSpec.Devices = append(networkSpec.Devices, machinev1beta1.NetworkDeviceSpec{})
+			}
+
+			networkSpec.Devices[index].NetworkName = network
+		}
+
+		newVSphereProviderConfig.providerConfig.Network = networkSpec
+	}
+
+	logNetworkInfo(newVSphereProviderConfig.providerConfig.Network, "control plane machine set network after failure domain: %v", v.logger)
+
+	if len(topology.Template) > 0 {
+		newVSphereProviderConfig.providerConfig.Template = topology.Template[strings.LastIndex(topology.Template, "/")+1:]
+	} else if len(v.infrastructure.Spec.PlatformSpec.VSphere.FailureDomains) > 0 {
+		newVSphereProviderConfig.providerConfig.Template = fmt.Sprintf("%s-rhcos-%s-%s", v.infrastructure.Status.InfrastructureName, failureDomain.Region, failureDomain.Zone)
+	}
+
+	return newVSphereProviderConfig, nil
+}
+
+// ExtractFailureDomain is used to extract a failure domain from the ProviderConfig.
+func (v VSphereProviderConfig) ExtractFailureDomain() machinev1.VSphereFailureDomain {
+	workspace := v.providerConfig.Workspace
+
+	if v.infrastructure.Spec.PlatformSpec.Type != configv1.VSpherePlatformType {
+		return machinev1.VSphereFailureDomain{}
+	}
+
+	// Older OCP installs will not have PlatformSpec set for infrastructure.
+	if v.infrastructure.Spec.PlatformSpec.VSphere == nil {
+		return machinev1.VSphereFailureDomain{}
+	}
+
+	failureDomains := v.infrastructure.Spec.PlatformSpec.VSphere.FailureDomains
+
+	for _, failureDomain := range failureDomains {
+		topology := failureDomain.Topology
+		if workspace.Datacenter == topology.Datacenter &&
+			workspace.Datastore == topology.Datastore &&
+			workspace.Server == failureDomain.Server &&
+			path.Clean(workspace.ResourcePool) == path.Clean(topology.ResourcePool) {
+			return machinev1.VSphereFailureDomain{
+				Name: failureDomain.Name,
+			}
+		}
+	}
+
+	return machinev1.VSphereFailureDomain{}
+}
+
+// ResetTopologyRelatedFields resets fields related to topology and VM placement such as the workspace, network, and template.
+func (v VSphereProviderConfig) ResetTopologyRelatedFields() ProviderConfig {
+	v.providerConfig.Workspace = &machinev1beta1.Workspace{}
+	v.providerConfig.Template = ""
+
+	networkSpec := machinev1beta1.NetworkSpec{}
+	devices := networkSpec.Devices
+
+	// preserve ippools if they are defined
+	for _, network := range v.providerConfig.Network.Devices {
+		if len(network.AddressesFromPools) > 0 {
+			networkDeviceSpec := machinev1beta1.NetworkDeviceSpec{
+				AddressesFromPools: network.AddressesFromPools,
+			}
+			devices = append(devices, networkDeviceSpec)
+		}
+	}
+
+	networkSpec.Devices = devices
+	v.providerConfig.Network = networkSpec
+
+	return providerConfig{
+		platformType: configv1.VSpherePlatformType,
+		vsphere: VSphereProviderConfig{
+			providerConfig: v.providerConfig,
+			infrastructure: v.infrastructure,
+		},
+	}
+}
+
+// Config returns the stored VSphereMachineProviderSpec.
+func (v VSphereProviderConfig) Config() machinev1beta1.VSphereMachineProviderSpec {
+	return v.providerConfig
+}
+
+// newVSphereProviderConfig creates a VSphere type ProviderConfig from the raw extension.
+// It should return an error if the provided RawExtension does not represent a VSphereProviderConfig.
+func newVSphereProviderConfig(logger logr.Logger, raw *runtime.RawExtension, infrastructure *configv1.Infrastructure) (ProviderConfig, error) {
+	var vsphereMachineProviderSpec machinev1beta1.VSphereMachineProviderSpec
+
+	if err := checkForUnknownFieldsInProviderSpecAndUnmarshal(logger, raw, &vsphereMachineProviderSpec); err != nil {
+		return nil, fmt.Errorf("failed to check for unknown fields in the provider spec: %w", err)
+	}
+
+	VSphereProviderConfig := VSphereProviderConfig{
+		providerConfig: vsphereMachineProviderSpec,
+		infrastructure: infrastructure,
+		logger:         logger,
+	}
+
+	// For networking, we only need to compare the network name.  For static IPs, we can ignore all ip configuration;
+	// however, we may need to verify the addressesFromPools is present.
+	for index, device := range vsphereMachineProviderSpec.Network.Devices {
+		vsphereMachineProviderSpec.Network.Devices[index] = machinev1beta1.NetworkDeviceSpec{}
+		if device.NetworkName != "" {
+			vsphereMachineProviderSpec.Network.Devices[index].NetworkName = device.NetworkName
+		}
+
+		if device.AddressesFromPools != nil {
+			vsphereMachineProviderSpec.Network.Devices[index].AddressesFromPools = device.AddressesFromPools
+			vsphereMachineProviderSpec.Network.Devices[index].Nameservers = device.Nameservers
+		}
+	}
+
+	config := providerConfig{
+		platformType: configv1.VSpherePlatformType,
+		vsphere:      VSphereProviderConfig,
+	}
+
+	return config, nil
+}
+
+// logNetworkInfo log network info as json for better debugging of data being processed.
+func logNetworkInfo(network machinev1beta1.NetworkSpec, msg string, logger logr.Logger) {
+	// To limit marshalling to only when log level > 4.
+	if logger.GetV() >= 4 {
+		jsonOutput, err := json.Marshal(network)
+		if err != nil {
+			logger.Error(err, "Got error Marshalling NetworkSpec")
+		}
+
+		logger.V(4).Info(msg, string(jsonOutput))
+	}
+}
diff --git a/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/test/e2e/framework/framework.go b/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/test/e2e/framework/framework.go
new file mode 100644
index 0000000000000000000000000000000000000000..ebd344cc8454640ede272ff04b61b15a55a28067
--- /dev/null
+++ b/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/test/e2e/framework/framework.go
@@ -0,0 +1,957 @@
+/*
+Copyright 2022 Red Hat, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package framework
+
+import (
+	"context"
+	"encoding/json"
+	"errors"
+	"fmt"
+	"os"
+	"regexp"
+	"strconv"
+	"strings"
+
+	"github.com/go-logr/logr"
+	"github.com/google/uuid"
+	configv1 "github.com/openshift/api/config/v1"
+	machinev1 "github.com/openshift/api/machine/v1"
+	machinev1beta1 "github.com/openshift/api/machine/v1beta1"
+	"github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/providerconfig"
+
+	metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
+	"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
+	"k8s.io/apimachinery/pkg/runtime"
+	kerrors "k8s.io/apimachinery/pkg/util/errors"
+	"k8s.io/client-go/kubernetes/scheme"
+	"k8s.io/klog/v2/textlogger"
+	ctrl "sigs.k8s.io/controller-runtime"
+
+	runtimeclient "sigs.k8s.io/controller-runtime/pkg/client"
+	"sigs.k8s.io/controller-runtime/pkg/client/config"
+)
+
+var (
+	// errUnsupportedPlatform is returned when the platform is not supported.
+	errUnsupportedPlatform = errors.New("unsupported platform")
+
+	// errUnsupportedInstanceSize is returned when the instance size did not match the expected format.
+	// Each platform will have it's own format for the instance size, and if we do not recognise the instance
+	// size we cannot increase it.
+	errInstanceTypeUnsupportedFormat = errors.New("instance type did not match expected format")
+
+	// errUnsupportedInstanceSize is returned when the instance size is not supported.
+	// This means that even though the format is correct, we haven't implemented the logic to increase
+	// this instance size.
+	errInstanceTypeNotSupported = errors.New("instance type is not supported")
+
+	// errMissingInstanceSize is returned when the instance size is missing.
+	errMissingInstanceSize = errors.New("instance size is missing")
+)
+
+// Framework is an interface for getting clients and information
+// about the environment within test cases.
+type Framework interface {
+	// ControlPlaneMachineSetKey returns the object key for fetching a control plane
+	// machine set.
+	ControlPlaneMachineSetKey() runtimeclient.ObjectKey
+
+	// LoadClient returns a new controller-runtime client.
+	GetClient() runtimeclient.Client
+
+	// GetContext returns a context.
+	GetContext() context.Context
+
+	// GetPlatformType returns the platform type.
+	GetPlatformType() configv1.PlatformType
+
+	// GetPlatformSupportLevel returns the support level for the current platform.
+	GetPlatformSupportLevel() PlatformSupportLevel
+
+	// GetScheme returns the scheme.
+	GetScheme() *runtime.Scheme
+
+	// NewEmptyControlPlaneMachineSet returns a new control plane machine set with
+	// just the name and namespace set.
+	NewEmptyControlPlaneMachineSet() *machinev1.ControlPlaneMachineSet
+
+	// IncreaseProviderSpecInstanceSize increases the instance size of the
+	// providerSpec passed. This is used to trigger updates to the Machines
+	// managed by the control plane machine set.
+	IncreaseProviderSpecInstanceSize(providerSpec *runtime.RawExtension) error
+
+	// TagInstanceInProviderSpec tags the instance in the provider spec.
+	TagInstanceInProviderSpec(providerSpec *runtime.RawExtension) error
+
+	// ConvertToControlPlaneMachineSetProviderSpec converts a control plane machine provider spec
+	// to a control plane machine set suitable provider spec.
+	ConvertToControlPlaneMachineSetProviderSpec(providerSpec machinev1beta1.ProviderSpec) (*runtime.RawExtension, error)
+
+	// UpdateDefaultedValueFromCPMS updates a field that is defaulted by the defaulting webhook in the MAO with a desired value.
+	UpdateDefaultedValueFromCPMS(rawProviderSpec *runtime.RawExtension) (*runtime.RawExtension, error)
+}
+
+// PlatformSupportLevel is used to identify which tests should run
+// based on the platform.
+type PlatformSupportLevel int
+
+const (
+	// Unsupported means that the platform is not supported
+	// by CPMS.
+	Unsupported PlatformSupportLevel = iota
+	// Manual means that the platform is supported by CPMS,
+	// but the CPMS must be created manually.
+	Manual
+	// Full means that the platform is supported by CPMS,
+	// and the CPMS will be created automatically.
+	Full
+)
+
+// framework is an implementation of the Framework interface.
+// It is used to provide a common set of functionality to all of the
+// test cases.
+type framework struct {
+	client       runtimeclient.Client
+	logger       logr.Logger
+	platform     configv1.PlatformType
+	supportLevel PlatformSupportLevel
+	scheme       *runtime.Scheme
+	namespace    string
+}
+
+// NewFramework initialises a new test framework for the E2E suite.
+func NewFramework() (Framework, error) {
+	sch, err := loadScheme()
+	if err != nil {
+		return nil, err
+	}
+
+	client, err := loadClient(sch)
+	if err != nil {
+		return nil, err
+	}
+
+	supportLevel, platform, err := getPlatformSupportLevel(client)
+	if err != nil {
+		return nil, err
+	}
+
+	logger := textlogger.NewLogger(textlogger.NewConfig())
+	ctrl.SetLogger(logger)
+
+	return &framework{
+		client:       client,
+		logger:       logger,
+		platform:     platform,
+		supportLevel: supportLevel,
+		scheme:       sch,
+		namespace:    MachineAPINamespace,
+	}, nil
+}
+
+// NewFrameworkWith initialises a new test framework for the E2E suite
+// using the existing scheme, client, platform and support level provided.
+func NewFrameworkWith(sch *runtime.Scheme, client runtimeclient.Client, platform configv1.PlatformType, supportLevel PlatformSupportLevel, namespace string) Framework {
+	return &framework{
+		client:       client,
+		platform:     platform,
+		supportLevel: supportLevel,
+		scheme:       sch,
+		namespace:    namespace,
+	}
+}
+
+// ControlPlaneMachineSetKey is the object key for fetching a control plane
+// machine set.
+func (f *framework) ControlPlaneMachineSetKey() runtimeclient.ObjectKey {
+	return runtimeclient.ObjectKey{
+		Namespace: f.namespace,
+		Name:      ControlPlaneMachineSetName,
+	}
+}
+
+// GetClient returns a controller-runtime client.
+func (f *framework) GetClient() runtimeclient.Client {
+	return f.client
+}
+
+// GetContext returns a context.
+func (f *framework) GetContext() context.Context {
+	return context.Background()
+}
+
+// GetPlatformType returns the platform type.
+func (f *framework) GetPlatformType() configv1.PlatformType {
+	return f.platform
+}
+
+// GetPlatformSupportLevel returns the support level for the current platform.
+func (f *framework) GetPlatformSupportLevel() PlatformSupportLevel {
+	return f.supportLevel
+}
+
+// GetScheme returns the scheme.
+func (f *framework) GetScheme() *runtime.Scheme {
+	return f.scheme
+}
+
+// NewEmptyControlPlaneMachineSet returns a new control plane machine set with
+// just the name and namespace set.
+func (f *framework) NewEmptyControlPlaneMachineSet() *machinev1.ControlPlaneMachineSet {
+	return &machinev1.ControlPlaneMachineSet{
+		ObjectMeta: metav1.ObjectMeta{
+			Name:      ControlPlaneMachineSetName,
+			Namespace: f.namespace,
+		},
+	}
+}
+
+// IncreaseProviderSpecInstanceSize increases the instance size of the instance on the providerSpec
+// that is passed.
+func (f *framework) IncreaseProviderSpecInstanceSize(rawProviderSpec *runtime.RawExtension) error {
+	providerConfig, err := providerconfig.NewProviderConfigFromMachineSpec(f.logger, machinev1beta1.MachineSpec{
+		ProviderSpec: machinev1beta1.ProviderSpec{
+			Value: rawProviderSpec,
+		},
+	}, nil)
+	if err != nil {
+		return fmt.Errorf("failed to get provider config: %w", err)
+	}
+
+	switch f.platform {
+	case configv1.AWSPlatformType:
+		return increaseAWSInstanceSize(rawProviderSpec, providerConfig)
+	case configv1.AzurePlatformType:
+		return increaseAzureInstanceSize(rawProviderSpec, providerConfig)
+	case configv1.GCPPlatformType:
+		return increaseGCPInstanceSize(rawProviderSpec, providerConfig)
+	case configv1.NutanixPlatformType:
+		return increaseNutanixInstanceSize(rawProviderSpec, providerConfig)
+	case configv1.OpenStackPlatformType:
+		return increaseOpenStackInstanceSize(rawProviderSpec, providerConfig)
+	case configv1.VSpherePlatformType:
+		return increaseVSphereInstanceSize(rawProviderSpec, providerConfig)
+	default:
+		return fmt.Errorf("%w: %s", errUnsupportedPlatform, f.platform)
+	}
+}
+
+// TagInstanceInProviderSpec tags the instance in the providerSpec.
+func (f *framework) TagInstanceInProviderSpec(rawProviderSpec *runtime.RawExtension) error {
+	providerConfig, err := providerconfig.NewProviderConfigFromMachineSpec(f.logger, machinev1beta1.MachineSpec{
+		ProviderSpec: machinev1beta1.ProviderSpec{
+			Value: rawProviderSpec,
+		},
+	}, nil)
+	if err != nil {
+		return fmt.Errorf("failed to get provider config: %w", err)
+	}
+
+	switch f.platform {
+	case configv1.OpenStackPlatformType:
+		return tagOpenStackProviderSpec(rawProviderSpec, providerConfig)
+	default:
+		return fmt.Errorf("%w: %s", errUnsupportedPlatform, f.platform)
+	}
+}
+
+// UpdateDefaultedValueFromCPMS updates a defaulted value from the ControlPlaneMachineSet
+// for either AWS, Azure or GCP.
+func (f *framework) UpdateDefaultedValueFromCPMS(rawProviderSpec *runtime.RawExtension) (*runtime.RawExtension, error) {
+	providerConfig, err := providerconfig.NewProviderConfigFromMachineSpec(f.logger, machinev1beta1.MachineSpec{
+		ProviderSpec: machinev1beta1.ProviderSpec{
+			Value: rawProviderSpec,
+		},
+	}, nil)
+	if err != nil {
+		return nil, fmt.Errorf("failed to get provider config: %w", err)
+	}
+
+	switch f.platform {
+	case configv1.AzurePlatformType:
+		return updateCredentialsSecretNameAzure(providerConfig)
+	case configv1.AWSPlatformType:
+		return updateCredentialsSecretNameAWS(providerConfig)
+	case configv1.GCPPlatformType:
+		return updateCredentialsSecretNameGCP(providerConfig)
+	case configv1.NutanixPlatformType:
+		return updateCredentialsSecretNameNutanix(providerConfig)
+	case configv1.VSpherePlatformType:
+		return updateCredentialsSecretNameVSphere(providerConfig)
+	default:
+		return nil, fmt.Errorf("%w: %s", errUnsupportedPlatform, f.platform)
+	}
+}
+
+// updateCredentialsSecretNameAzure updates the credentialSecret field from the ControlPlaneMachineSet.
+func updateCredentialsSecretNameAzure(providerConfig providerconfig.ProviderConfig) (*runtime.RawExtension, error) {
+	cfg := providerConfig.Azure().Config()
+	cfg.CredentialsSecret = nil
+
+	rawBytes, err := json.Marshal(cfg)
+	if err != nil {
+		return nil, fmt.Errorf("error marshalling azure providerSpec: %w", err)
+	}
+
+	return &runtime.RawExtension{
+		Raw: rawBytes,
+	}, nil
+}
+
+// updateCredentialsSecretNameAWS updates the credentialSecret field from the ControlPlaneMachineSet.
+func updateCredentialsSecretNameAWS(providerConfig providerconfig.ProviderConfig) (*runtime.RawExtension, error) {
+	cfg := providerConfig.AWS().Config()
+	cfg.CredentialsSecret = nil
+
+	rawBytes, err := json.Marshal(cfg)
+	if err != nil {
+		return nil, fmt.Errorf("error marshalling aws providerSpec: %w", err)
+	}
+
+	return &runtime.RawExtension{
+		Raw: rawBytes,
+	}, nil
+}
+
+// updateCredentialsSecretNameGCP updates the credentialSecret field from the ControlPlaneMachineSet.
+func updateCredentialsSecretNameGCP(providerConfig providerconfig.ProviderConfig) (*runtime.RawExtension, error) {
+	cfg := providerConfig.GCP().Config()
+	cfg.CredentialsSecret = nil
+
+	rawBytes, err := json.Marshal(cfg)
+	if err != nil {
+		return nil, fmt.Errorf("error marshalling gcp providerSpec: %w", err)
+	}
+
+	return &runtime.RawExtension{
+		Raw: rawBytes,
+	}, nil
+}
+
+// updateCredentialsSecretNameNutanix updates the credentialSecret field from the ControlPlaneMachineSet.
+func updateCredentialsSecretNameNutanix(providerConfig providerconfig.ProviderConfig) (*runtime.RawExtension, error) {
+	cfg := providerConfig.Nutanix().Config()
+	cfg.CredentialsSecret = nil
+
+	rawBytes, err := json.Marshal(cfg)
+	if err != nil {
+		return nil, fmt.Errorf("error marshalling nutanix providerSpec: %w", err)
+	}
+
+	return &runtime.RawExtension{
+		Raw: rawBytes,
+	}, nil
+}
+
+// updateCredentialsSecretNameVSphere updates the credentialSecret field from the ControlPlaneMachineSet.
+func updateCredentialsSecretNameVSphere(providerConfig providerconfig.ProviderConfig) (*runtime.RawExtension, error) {
+	cfg := providerConfig.VSphere().Config()
+	cfg.CredentialsSecret = nil
+
+	rawBytes, err := json.Marshal(cfg)
+	if err != nil {
+		return nil, fmt.Errorf("error marshalling nutanix providerSpec: %w", err)
+	}
+
+	return &runtime.RawExtension{
+		Raw: rawBytes,
+	}, nil
+}
+
+// ConvertToControlPlaneMachineSetProviderSpec converts a control plane machine provider spec
+// to a raw, control plane machine set suitable provider spec.
+func (f *framework) ConvertToControlPlaneMachineSetProviderSpec(providerSpec machinev1beta1.ProviderSpec) (*runtime.RawExtension, error) {
+	providerConfig, err := providerconfig.NewProviderConfigFromMachineSpec(f.logger, machinev1beta1.MachineSpec{
+		ProviderSpec: providerSpec,
+	}, nil)
+	if err != nil {
+		return nil, fmt.Errorf("failed to get provider config: %w", err)
+	}
+
+	switch f.platform {
+	case configv1.AWSPlatformType:
+		return convertAWSProviderConfigToControlPlaneMachineSetProviderSpec(providerConfig)
+	case configv1.AzurePlatformType:
+		return convertAzureProviderConfigToControlPlaneMachineSetProviderSpec(providerConfig)
+	case configv1.GCPPlatformType:
+		return convertGCPProviderConfigToControlPlaneMachineSetProviderSpec(providerConfig)
+	case configv1.NutanixPlatformType:
+		return convertNutanixProviderConfigToControlPlaneMachineSetProviderSpec(providerConfig)
+	case configv1.OpenStackPlatformType:
+		return convertOpenStackProviderConfigToControlPlaneMachineSetProviderSpec(providerConfig)
+	case configv1.VSpherePlatformType:
+		return convertVSphereProviderConfigToControlPlaneMachineSetProviderSpec(providerConfig)
+	default:
+		return nil, fmt.Errorf("%w: %s", errUnsupportedPlatform, f.platform)
+	}
+}
+
+// convertAWSProviderConfigToControlPlaneMachineSetProviderSpec converts an AWS providerConfig into a
+// raw control plane machine set provider spec.
+func convertAWSProviderConfigToControlPlaneMachineSetProviderSpec(providerConfig providerconfig.ProviderConfig) (*runtime.RawExtension, error) {
+	awsPs := providerConfig.AWS().Config()
+	awsPs.Subnet = machinev1beta1.AWSResourceReference{}
+	awsPs.Placement.AvailabilityZone = ""
+
+	rawBytes, err := json.Marshal(awsPs)
+	if err != nil {
+		return nil, fmt.Errorf("error marshalling aws providerSpec: %w", err)
+	}
+
+	return &runtime.RawExtension{
+		Raw: rawBytes,
+	}, nil
+}
+
+// convertGCPProviderConfigToControlPlaneMachineSetProviderSpec converts a GCP providerConfig into a
+// raw control plane machine set provider spec.
+func convertGCPProviderConfigToControlPlaneMachineSetProviderSpec(providerConfig providerconfig.ProviderConfig) (*runtime.RawExtension, error) {
+	gcpPs := providerConfig.GCP().Config()
+	gcpPs.Zone = ""
+
+	rawBytes, err := json.Marshal(gcpPs)
+	if err != nil {
+		return nil, fmt.Errorf("error marshalling gcp providerSpec: %w", err)
+	}
+
+	return &runtime.RawExtension{
+		Raw: rawBytes,
+	}, nil
+}
+
+// convertAzureProviderConfigToControlPlaneMachineSetProviderSpec converts an Azure providerConfig into a
+// raw control plane machine set provider spec.
+func convertAzureProviderConfigToControlPlaneMachineSetProviderSpec(providerConfig providerconfig.ProviderConfig) (*runtime.RawExtension, error) {
+	azurePs := providerConfig.Azure().Config()
+	azurePs.Zone = ""
+	azurePs.Subnet = ""
+
+	rawBytes, err := json.Marshal(azurePs)
+	if err != nil {
+		return nil, fmt.Errorf("error marshalling azure providerSpec: %w", err)
+	}
+
+	return &runtime.RawExtension{
+		Raw: rawBytes,
+	}, nil
+}
+
+// convertVSphereProviderConfigToControlPlaneMachineSetProviderSpec converts an VSphere providerConfig into a
+// raw control plane machine set provider spec.
+func convertVSphereProviderConfigToControlPlaneMachineSetProviderSpec(providerConfig providerconfig.ProviderConfig) (*runtime.RawExtension, error) {
+	vspherePs := providerConfig.VSphere().Config()
+	vspherePs.Name = ""
+
+	vspherePs.Workspace = &machinev1beta1.Workspace{}
+	vspherePs.Template = ""
+	vspherePs.Network = machinev1beta1.NetworkSpec{}
+
+	rawBytes, err := json.Marshal(vspherePs)
+	if err != nil {
+		return nil, fmt.Errorf("error marshalling vsphere providerSpec: %w", err)
+	}
+
+	return &runtime.RawExtension{
+		Raw: rawBytes,
+	}, nil
+}
+
+// convertNutanixProviderConfigToControlPlaneMachineSetProviderSpec converts a Nutanix providerConfig into a
+// raw control plane machine set provider spec.
+func convertNutanixProviderConfigToControlPlaneMachineSetProviderSpec(providerConfig providerconfig.ProviderConfig) (*runtime.RawExtension, error) {
+	nutanixProviderConfig := providerConfig.Nutanix().Config()
+
+	rawBytes, err := json.Marshal(nutanixProviderConfig)
+	if err != nil {
+		return nil, fmt.Errorf("error marshalling nutanix providerSpec: %w", err)
+	}
+
+	return &runtime.RawExtension{
+		Raw: rawBytes,
+	}, nil
+}
+
+// convertOpenStackProviderConfigToControlPlaneMachineSetProviderSpec converts an OpenStack providerConfig into a
+// raw control plane machine set provider spec.
+func convertOpenStackProviderConfigToControlPlaneMachineSetProviderSpec(providerConfig providerconfig.ProviderConfig) (*runtime.RawExtension, error) {
+	openStackPs := providerConfig.OpenStack().Config()
+
+	openStackPs.AvailabilityZone = ""
+
+	if openStackPs.RootVolume != nil {
+		openStackPs.RootVolume.VolumeType = ""
+		openStackPs.RootVolume.Zone = ""
+	}
+
+	rawBytes, err := json.Marshal(openStackPs)
+	if err != nil {
+		return nil, fmt.Errorf("error marshalling openstack providerSpec: %w", err)
+	}
+
+	return &runtime.RawExtension{
+		Raw: rawBytes,
+	}, nil
+}
+
+// loadClient returns a new controller-runtime client.
+func loadClient(sch *runtime.Scheme) (runtimeclient.Client, error) {
+	cfg, err := config.GetConfig()
+	if err != nil {
+		return nil, fmt.Errorf("failed to get Kubernetes config: %w", err)
+	}
+
+	client, err := runtimeclient.New(cfg, runtimeclient.Options{
+		Scheme: sch,
+	})
+	if err != nil {
+		return nil, fmt.Errorf("failed to create Kubernetes client: %w", err)
+	}
+
+	return client, nil
+}
+
+// addToSchemeFunc is an alias for a function that will add types to the scheme.
+// We use this to loop and handle the errors for each scheme.
+type addToSchemeFunc func(*runtime.Scheme) error
+
+// loadScheme creates a scheme with all of the required types for the
+// tests, pre-registered.
+func loadScheme() (*runtime.Scheme, error) {
+	sch := scheme.Scheme
+
+	var errs []error
+
+	for _, f := range []addToSchemeFunc{
+		configv1.AddToScheme,
+		machinev1.AddToScheme,
+		machinev1beta1.AddToScheme,
+	} {
+		if err := f(sch); err != nil {
+			errs = append(errs, fmt.Errorf("failed to add to scheme: %w", err))
+		}
+	}
+
+	if len(errs) > 0 {
+		return nil, kerrors.NewAggregate(errs)
+	}
+
+	return sch, nil
+}
+
+// getPlatformSupportLevel returns the support level for the current platform.
+func getPlatformSupportLevel(k8sClient runtimeclient.Client) (PlatformSupportLevel, configv1.PlatformType, error) {
+	infra := &configv1.Infrastructure{}
+
+	if err := k8sClient.Get(context.Background(), runtimeclient.ObjectKey{Name: "cluster"}, infra); err != nil {
+		return Unsupported, configv1.NonePlatformType, fmt.Errorf("failed to get infrastructure resource: %w", err)
+	}
+
+	platformType := infra.Status.PlatformStatus.Type
+
+	switch platformType {
+	case configv1.AWSPlatformType:
+		return Full, platformType, nil
+	case configv1.AzurePlatformType:
+		return Manual, platformType, nil
+	case configv1.GCPPlatformType:
+		return Manual, platformType, nil
+	case configv1.NutanixPlatformType:
+		return Manual, platformType, nil
+	case configv1.OpenStackPlatformType:
+		return Full, platformType, nil
+	case configv1.VSpherePlatformType:
+		return Full, platformType, nil
+	default:
+		return Unsupported, platformType, nil
+	}
+}
+
+// increaseAWSInstanceSize increases the instance size of the instance on the providerSpec for an AWS providerSpec.
+func increaseAWSInstanceSize(rawProviderSpec *runtime.RawExtension, providerConfig providerconfig.ProviderConfig) error {
+	cfg := providerConfig.AWS().Config()
+
+	var err error
+
+	cfg.InstanceType, err = nextAWSInstanceSize(cfg.InstanceType)
+	if err != nil {
+		return fmt.Errorf("failed to get next instance size: %w", err)
+	}
+
+	if err := setProviderSpecValue(rawProviderSpec, cfg); err != nil {
+		return fmt.Errorf("failed to set provider spec value: %w", err)
+	}
+
+	return nil
+}
+
+// nextAWSInstanceSize returns the next AWS instance size in the series.
+// In AWS terms this normally means doubling the size of the underlying instance.
+// For example:
+// - m6i.large -> m6i.xlarge
+// - m6i.xlarge -> m6i.2xlarge
+// - m6i.2xlarge -> m6i.4xlarge
+// This should mean we do not need to update this when the installer changes the default instance size.
+func nextAWSInstanceSize(current string) (string, error) {
+	// Regex to match the AWS instance type string.
+	re := regexp.MustCompile(`(?P<family>[a-z0-9]+)\.(?P<multiplier>\d)?(?P<size>[a-z]+)`)
+
+	values := re.FindStringSubmatch(current)
+	if len(values) != 4 {
+		return "", fmt.Errorf("%w: %s", errInstanceTypeUnsupportedFormat, current)
+	}
+
+	family := values[1]
+	size := values[3]
+
+	if multiplier := values[2]; multiplier == "" {
+		switch size {
+		case "large":
+			return fmt.Sprintf("%s.xlarge", family), nil
+		case "xlarge":
+			return fmt.Sprintf("%s.2xlarge", family), nil
+		default:
+			return "", fmt.Errorf("%w: %s", errInstanceTypeNotSupported, current)
+		}
+	}
+
+	multiplierInt, err := strconv.Atoi(values[2])
+	if err != nil {
+		// This is a panic because the multiplier should always be a number.
+		panic("failed to convert multiplier to int")
+	}
+
+	return fmt.Sprintf("%s.%d%s", family, multiplierInt*2, size), nil
+}
+
+// increaseAzureInstanceSize increases the instance size of the instance on the providerSpec for an Azure providerSpec.
+func increaseAzureInstanceSize(rawProviderSpec *runtime.RawExtension, providerConfig providerconfig.ProviderConfig) error {
+	cfg := providerConfig.Azure().Config()
+
+	var err error
+
+	cfg.VMSize, err = nextAzureVMSize(cfg.VMSize)
+	if err != nil {
+		return fmt.Errorf("failed to get next instance size: %w", err)
+	}
+
+	if err := setProviderSpecValue(rawProviderSpec, cfg); err != nil {
+		return fmt.Errorf("failed to set provider spec value: %w", err)
+	}
+
+	return nil
+}
+
+// tagOpenStackProviderSpec adds a tag to the providerSpec for an OpenStack providerSpec.
+func tagOpenStackProviderSpec(rawProviderSpec *runtime.RawExtension, providerConfig providerconfig.ProviderConfig) error {
+	cfg := providerConfig.OpenStack().Config()
+
+	randomTag := uuid.New().String()
+	cfg.Tags = append(cfg.Tags, fmt.Sprintf("cpms-test-tag-%s", randomTag))
+
+	if err := setProviderSpecValue(rawProviderSpec, cfg); err != nil {
+		return fmt.Errorf("failed to set provider spec value: %w", err)
+	}
+
+	return nil
+}
+
+// nextAzureVMSize returns the next Azure VM size in the series.
+// In Azure terms this normally means doubling the size of the underlying instance.
+// This should mean we do not need to update this when the installer changes the default instance size.
+func nextAzureVMSize(current string) (string, error) {
+	// Regex to match the Azure VM size string.
+	re := regexp.MustCompile(`Standard_(?P<family>[a-zA-Z]+)(?P<multiplier>[0-9]+)(?P<subfamily>[a-z]*)(?P<version>_v[0-9]+)?`)
+
+	values := re.FindStringSubmatch(current)
+	if len(values) != 5 {
+		return "", fmt.Errorf("%w: %s", errInstanceTypeUnsupportedFormat, current)
+	}
+
+	family := values[1]
+	subfamily := values[3]
+	version := values[4]
+
+	multiplier, err := strconv.Atoi(values[2])
+	if err != nil {
+		// This is a panic because the multiplier should always be a number.
+		panic("failed to convert multiplier to int")
+	}
+
+	switch {
+	case multiplier == 32:
+		multiplier = 48
+	case multiplier == 48:
+		multiplier = 64
+	case multiplier >= 64:
+		return "", fmt.Errorf("%w: %s", errInstanceTypeNotSupported, current)
+	default:
+		multiplier *= 2
+	}
+
+	return fmt.Sprintf("Standard_%s%d%s%s", family, multiplier, subfamily, version), nil
+}
+
+// increaseGCPInstanceSize increases the instance size of the instance on the providerSpec for an GCP providerSpec.
+func increaseGCPInstanceSize(rawProviderSpec *runtime.RawExtension, providerConfig providerconfig.ProviderConfig) error {
+	cfg := providerConfig.GCP().Config()
+
+	var err error
+
+	cfg.MachineType, err = nextGCPMachineSize(cfg.MachineType)
+	if err != nil {
+		return fmt.Errorf("failed to get next instance size: %w", err)
+	}
+
+	if err := setProviderSpecValue(rawProviderSpec, cfg); err != nil {
+		return fmt.Errorf("failed to set provider spec value: %w", err)
+	}
+
+	return nil
+}
+
+// increateNutanixInstanceSize increases the instance size of the instance on the providerSpec for an Nutanix providerSpec.
+func increaseNutanixInstanceSize(rawProviderSpec *runtime.RawExtension, providerConfig providerconfig.ProviderConfig) error {
+	cfg := providerConfig.Nutanix().Config()
+	cfg.VCPUSockets++
+
+	if err := setProviderSpecValue(rawProviderSpec, cfg); err != nil {
+		return fmt.Errorf("failed to set provider spec value: %w", err)
+	}
+
+	return nil
+}
+
+// increaseVSphereInstanceSize increases the instance size of the instance on the providerSpec for an Nutanix providerSpec.
+func increaseVSphereInstanceSize(rawProviderSpec *runtime.RawExtension, providerConfig providerconfig.ProviderConfig) error {
+	cfg := providerConfig.VSphere().Config()
+	cfg.NumCPUs *= 2
+
+	if err := setProviderSpecValue(rawProviderSpec, cfg); err != nil {
+		return fmt.Errorf("failed to set provider spec value: %w", err)
+	}
+
+	return nil
+}
+
+// increase OpenStackInstanceSize increases the instance size of the instance on the providerSpec for an OpenStack providerSpec.
+func increaseOpenStackInstanceSize(rawProviderSpec *runtime.RawExtension, providerConfig providerconfig.ProviderConfig) error {
+	cfg := providerConfig.OpenStack().Config()
+
+	if os.Getenv("OPENSTACK_CONTROLPLANE_FLAVOR_ALTERNATE") == "" {
+		return fmt.Errorf("OPENSTACK_CONTROLPLANE_FLAVOR_ALTERNATE environment variable not set: %w", errMissingInstanceSize)
+	} else {
+		cfg.Flavor = os.Getenv("OPENSTACK_CONTROLPLANE_FLAVOR_ALTERNATE")
+	}
+
+	if err := setProviderSpecValue(rawProviderSpec, cfg); err != nil {
+		return fmt.Errorf("failed to set provider spec value: %w", err)
+	}
+
+	return nil
+}
+
+// nextGCPVMSize returns the next GCP machine size in the series.
+// The Machine sizes being used are in format <e2|n2|n1>-<subfamily(-subfamilyflavor(optional))>-<number>(-<number>(optional)).
+//
+//nolint:cyclop
+func nextGCPMachineSize(current string) (string, error) {
+	// Regex to match the GCP machine size string.
+	re := regexp.MustCompile(`^(?P<family>[0-9a-z]+)-(?P<subfamily>[0-9a-z]+(-(?P<subfamilyflavor>[a-z]+))?)-(?P<multiplier>[0-9\.]+)(-(?P<multiplier2>[0-9]+))?`)
+
+	subexpNames := re.SubexpNames()
+	values := re.FindStringSubmatch(current)
+	result := make(map[string]string)
+
+	// The number of named regex subexpressions must always match the number of submatches.
+	if len(values) != len(subexpNames) {
+		return "", fmt.Errorf("%w: %s", errInstanceTypeUnsupportedFormat, current)
+	}
+
+	// Store the submatches into a subexpression name -> value map.
+	for i, name := range subexpNames {
+		if i != 0 && name != "" {
+			result[name] = values[i]
+		}
+	}
+
+	family, okFamily := result["family"]
+	subfamily, okSubfamily := result["subfamily"]
+	_, okMultiplier := result["multiplier"]
+	subfamilyflavor, okSubfamilyflavor := result["subfamilyflavor"]
+
+	if !(okFamily && okSubfamily && okMultiplier && okSubfamilyflavor) {
+		return "", fmt.Errorf("%w: %s", errInstanceTypeUnsupportedFormat, current)
+	}
+
+	multiplier, err := strconv.ParseFloat(result["multiplier"], 64)
+	if err != nil {
+		// This is a panic because the multiplier should always be a number.
+		panic("failed to convert multiplier to float")
+	}
+
+	var multiplier2 int
+
+	if val, okMultiplier2 := result["multiplier2"]; okMultiplier2 && val != "" {
+		var err error
+
+		multiplier2, err = strconv.Atoi(val)
+		if err != nil {
+			// This is a panic because the multiplier2 should always be a number.
+			panic("failed to convert multiplier2 to int")
+		}
+	}
+
+	return setNextGCPMachineSize(current, family, subfamily, subfamilyflavor, multiplier, multiplier2)
+}
+
+// setNextGCPMachineSize returns the new GCP machine size in the series
+// according to the family supported (e2, n1, n2).
+//
+//nolint:cyclop,funlen,gocognit,gocyclo
+func setNextGCPMachineSize(current, family, subfamily, subfamilyflavor string, multiplier float64, multiplier2 int) (string, error) {
+	switch {
+	case strings.HasPrefix(subfamily, "custom"):
+		ivCPU := int(multiplier)
+		fvCPU := multiplier
+		mem := multiplier2
+
+		switch {
+		case multiplier == 0 || mem == 0:
+			return "", fmt.Errorf("%w: %s", errInstanceTypeNotSupported, current)
+
+		case family == "n1":
+			// You can create N1 custom machine types with 1 or more vCPUs.
+			// Above 1 vCPU, you must increment the number of vCPUs by 2, up to 96 vCPUs for Intel Skylake platform,
+			// or up to 64 vCPUs for Intel Broadwell, Haswell, or Ivy Bridge CPU platforms.
+			// Note: cap it to 64 as we don't detect CPU.
+			if ivCPU < 64 {
+				ivCPU += 2
+			}
+			// For N1 machine types, select between 1 GB and 6.5 GB per vCPU, inclusive.
+			// Note: use 3GB per vCPU, as that's a comfortable bump.
+			mem = ivCPU * 3 * 1024
+
+		case family == "n2":
+			// For N2 custom machine types, you can create a machine type with 2 to 80 vCPUs and memory between 1 and 864 GB.
+			// For machine types with up to 32 vCPUs, you can select a vCPU count that is a multiple of 2.
+			// For machine types with greater than 32 vCPUs,
+			// you must select a vCPU count that is a multiple of 4 (for example, 36, 40, 56, or 80).
+			if ivCPU < 32 {
+				ivCPU += 2
+			} else if ivCPU <= 76 {
+				ivCPU += 4
+			}
+			// For the N2 machine series, select between 0.5 GB and 8.0 GB per vCPU, inclusive.
+			// Note: the max is 864GB.
+			mem = ivCPU * 3 * 1024
+
+		case family == "n2d":
+			// You can create N2D custom machine types with 2, 4, 8, or 16 vCPUs.
+			// After 16, you can increment the number of vCPUs by 16, up to 96 vCPUs.
+			// The minimum acceptable number of vCPUs is 2.
+			switch {
+			case ivCPU == 2:
+				ivCPU = 4
+			case ivCPU == 4:
+				ivCPU = 8
+			case ivCPU == 8:
+				ivCPU = 16
+			case ivCPU == 96:
+				// Keep unchanged.
+			default:
+				ivCPU += 16
+			}
+			// For N2D machine types, select between 0.5 GB and 8.0 GB per vCPU in 0.256 GB increments.
+			mem = ivCPU * 3 * 1024
+
+		case family == "e2" && subfamilyflavor == "micro":
+			// 0.25 vCPU, 1 to 2 GB of memory.
+			if mem >= (2 * 1024) {
+				return "", fmt.Errorf("%w: %s", errInstanceTypeNotSupported, current)
+			}
+
+			mem += 1024
+
+			return fmt.Sprintf("%s-%s-%.2f-%d", family, subfamily, fvCPU, mem), nil
+
+		case family == "e2" && subfamilyflavor == "small":
+			// 0.50 vCPU, 1 to 2 GB of memory.
+			if mem >= (4 * 1024) {
+				return "", fmt.Errorf("%w: %s", errInstanceTypeNotSupported, current)
+			}
+
+			mem += 1024
+
+			return fmt.Sprintf("%s-%s-%.2f-%d", family, subfamily, fvCPU, mem), nil
+
+		case family == "e2" && subfamilyflavor == "medium":
+			// 1 vCPU, 1 to 2 GB of memory.
+			if mem >= (8 * 1024) {
+				return "", fmt.Errorf("%w: %s", errInstanceTypeNotSupported, current)
+			}
+
+			mem += 1024
+
+			return fmt.Sprintf("%s-%s-%d-%d", family, subfamily, ivCPU, mem), nil
+
+		case family == "e2" && subfamilyflavor == "":
+			// You can create E2 custom machine types with vCPUs in multiples of 2, up to 32 vCPUs.
+			// The minimum acceptable number of vCPUs for a VM is 2.
+			if ivCPU < 32 {
+				ivCPU += 2
+			}
+			// For E2, the ratio of memory per vCPU is 0.5 GB to 8 GB inclusive.
+			mem = ivCPU * 3 * 1024
+		}
+
+		return fmt.Sprintf("%s-%s-%d-%d", family, subfamily, ivCPU, mem), nil
+
+	case multiplier >= 32 && family == "e2":
+		return "", fmt.Errorf("%w: %s", errInstanceTypeNotSupported, current)
+	case multiplier == 32 && family == "n2":
+		multiplier = 48
+	case multiplier == 64 && family == "n2":
+		multiplier = 80
+	case multiplier == 64 || multiplier == 80:
+		multiplier = 96
+	case multiplier >= 96 && family == "n1":
+		return "", fmt.Errorf("%w: %s", errInstanceTypeNotSupported, current)
+	case multiplier == 96:
+		multiplier = 128
+	case multiplier >= 128:
+		return "", fmt.Errorf("%w: %s", errInstanceTypeNotSupported, current)
+	default:
+		multiplier *= 2
+	}
+
+	return fmt.Sprintf("%s-standard-%d", family, int(multiplier)), nil
+}
+
+// setProviderSpecValue sets the value of the provider spec to the value that is passed.
+func setProviderSpecValue(rawProviderSpec *runtime.RawExtension, value interface{}) error {
+	providerSpecValue, err := runtime.DefaultUnstructuredConverter.ToUnstructured(&value)
+	if err != nil {
+		return fmt.Errorf("failed to convert provider spec to unstructured: %w", err)
+	}
+
+	rawProviderSpec.Object = &unstructured.Unstructured{
+		Object: providerSpecValue,
+	}
+	rawProviderSpec.Raw = nil
+
+	return nil
+}
diff --git a/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/test/e2e/framework/utils.go b/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/test/e2e/framework/utils.go
new file mode 100644
index 0000000000000000000000000000000000000000..e79fef9af4cb654bba072175e3d7c8ac73875647
--- /dev/null
+++ b/vendor/github.com/openshift/cluster-control-plane-machine-set-operator/test/e2e/framework/utils.go
@@ -0,0 +1,137 @@
+/*
+Copyright 2022 Red Hat, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package framework
+
+import (
+	"context"
+	"errors"
+	"sync"
+	"time"
+
+	"github.com/onsi/ginkgo/v2"
+	"github.com/onsi/gomega"
+)
+
+const (
+	// DefaultTimeout is the default timeout for eventually and consistently assertions.
+	DefaultTimeout = 60 * time.Second
+
+	// DefaultInterval is the default interval for eventually and consistently assertions.
+	DefaultInterval = 5 * time.Second
+
+	// MachineAPINamespace is the name of the openshift-machine-api namespace.
+	MachineAPINamespace = "openshift-machine-api"
+
+	// ControlPlaneMachineSetName is the name of the control plane machine set in all clusters.
+	ControlPlaneMachineSetName = "cluster"
+)
+
+var (
+	errContextCancelled = errors.New("context cancelled")
+)
+
+// GomegaAssertions is a subset of the gomega.Gomega interface.
+// It is the set allowed for checks and conditions in the RunCheckUntil
+// helper function.
+type GomegaAssertions interface {
+	Ω(actual interface{}, extra ...interface{}) gomega.Assertion //nolint:asciicheck
+	Expect(actual interface{}, extra ...interface{}) gomega.Assertion
+	ExpectWithOffset(offset int, actual interface{}, extra ...interface{}) gomega.Assertion
+}
+
+// ControlPlaneMachineSetSelectorLabels are the set of labels use to select
+// control plane machines within the cluster.
+func ControlPlaneMachineSetSelectorLabels() map[string]string {
+	return map[string]string{
+		"machine.openshift.io/cluster-api-machine-role": "master",
+		"machine.openshift.io/cluster-api-machine-type": "master",
+	}
+}
+
+// Periodic is a periodic ginkgo label.
+func Periodic() ginkgo.Labels {
+	return ginkgo.Label("Periodic")
+}
+
+// PreSubmit is a presubmit ginkgo label.
+func PreSubmit() ginkgo.Labels {
+	return ginkgo.Label("PreSubmit")
+}
+
+// Async runs the test function as an asynchronous goroutine.
+// If the test function returns false, the cancel will be called.
+// This allows to cancel the context if the test function fails.
+func Async(wg *sync.WaitGroup, cancel context.CancelFunc, testFunc func() bool) {
+	wg.Add(1)
+
+	go func() {
+		defer ginkgo.GinkgoRecover()
+		defer wg.Done()
+
+		if !testFunc() {
+			cancel()
+		}
+	}()
+}
+
+// RunCheckUntil runs the check function until the condition succeeds or the context is cancelled.
+// If the check fails before the condition succeeds, the test will fail.
+// The check and condition functions must use the passed Gomega for any assertions so that we can handle failures
+// within the functions appropriately.
+func RunCheckUntil(ctx context.Context, check, condition func(context.Context, GomegaAssertions) bool) bool {
+	return gomega.EventuallyWithOffset(1, func() error {
+		checkErr := runAssertion(ctx, check)
+		conditionErr := runAssertion(ctx, condition)
+
+		switch {
+		case conditionErr == nil:
+			// The until finally succeeded.
+			return nil
+		case errors.Is(conditionErr, errContextCancelled) || errors.Is(checkErr, errContextCancelled):
+			// The context was cancelled.
+			// Return the context cancelled error so that the Eventually will fail with a consistent error.
+			return errContextCancelled
+		case checkErr != nil:
+			// The check failed but the until has not completed.
+			// Abort the check.
+			return gomega.StopTrying("Check failed before condition succeeded").Wrap(checkErr)
+		default:
+			return conditionErr
+		}
+	}).WithContext(ctx).Should(gomega.Succeed(), "check failed or condition did not succeed before the context was cancelled")
+}
+
+// runAssertion runs the assertion function and returns an error if the assertion failed.
+func runAssertion(ctx context.Context, assertion func(context.Context, GomegaAssertions) bool) error {
+	select {
+	case <-ctx.Done():
+		return errContextCancelled
+	default:
+	}
+
+	var err error
+
+	g := gomega.NewGomega(func(message string, callerSkip ...int) {
+		err = errors.New(message) //nolint:goerr113
+	})
+
+	if !assertion(ctx, g) {
+		return err
+	}
+
+	return nil
+}
diff --git a/vendor/github.com/pelletier/go-toml/v2/.gitignore b/vendor/github.com/pelletier/go-toml/v2/.gitignore
index a69e2b0ebd73761fa6e619f12cfb8557c2a137e1..4b7c4eda3a915508ac70769f3621d76c38a75650 100644
--- a/vendor/github.com/pelletier/go-toml/v2/.gitignore
+++ b/vendor/github.com/pelletier/go-toml/v2/.gitignore
@@ -3,4 +3,5 @@ fuzz/
 cmd/tomll/tomll
 cmd/tomljson/tomljson
 cmd/tomltestgen/tomltestgen
-dist
\ No newline at end of file
+dist
+tests/
diff --git a/vendor/github.com/pelletier/go-toml/v2/CONTRIBUTING.md b/vendor/github.com/pelletier/go-toml/v2/CONTRIBUTING.md
index 04dd12bcbcad059a9b24e0ceb1c6a191e2c66bb4..96ecf9e2b3ac4968f641f25105c686f6aee2cf80 100644
--- a/vendor/github.com/pelletier/go-toml/v2/CONTRIBUTING.md
+++ b/vendor/github.com/pelletier/go-toml/v2/CONTRIBUTING.md
@@ -165,25 +165,22 @@ Checklist:
 
 ### New release
 
-1. Decide on the next version number. Use semver.
-2. Generate release notes using [`gh`][gh]. Example:
+1. Decide on the next version number. Use semver. Review commits since last
+   version to assess.
+2. Tag release. For example:
 ```
-$ gh api -X POST \
-  -F tag_name='v2.0.0-beta.5' \
-  -F target_commitish='v2' \
-  -F previous_tag_name='v2.0.0-beta.4' \
-  --jq '.body' \
-  repos/pelletier/go-toml/releases/generate-notes
+git checkout v2
+git pull
+git tag v2.2.0
+git push --tags
 ```
-3. Look for "Other changes". That would indicate a pull request not labeled
-   properly. Tweak labels and pull request titles until changelog looks good for
-   users.
-4. [Draft new release][new-release].
-5. Fill tag and target with the same value used to generate the changelog.
-6. Set title to the new tag value.
-7. Paste the generated changelog.
-8. Check "create discussion", in the "Releases" category.
-9. Check pre-release if new version is an alpha or beta.
+3. CI automatically builds a draft Github release. Review it and edit as
+   necessary. Look for "Other changes". That would indicate a pull request not
+   labeled properly. Tweak labels and pull request titles until changelog looks
+   good for users.
+4. Check "create discussion" box, in the "Releases" category.
+5. If new version is an alpha or beta only, check pre-release box.
+
 
 [issues-tracker]: https://github.com/pelletier/go-toml/issues
 [bug-report]: https://github.com/pelletier/go-toml/issues/new?template=bug_report.md
diff --git a/vendor/github.com/pelletier/go-toml/v2/README.md b/vendor/github.com/pelletier/go-toml/v2/README.md
index 63b92f3b0b2b592804b8545dc2cf369c2cd2b00f..d964b25fe19fe6e023948089bbdc2a8323dc73a6 100644
--- a/vendor/github.com/pelletier/go-toml/v2/README.md
+++ b/vendor/github.com/pelletier/go-toml/v2/README.md
@@ -98,9 +98,9 @@ Given the following struct, let's see how to read it and write it as TOML:
 
 ```go
 type MyConfig struct {
-      Version int
-      Name    string
-      Tags    []string
+	Version int
+	Name    string
+	Tags    []string
 }
 ```
 
@@ -119,7 +119,7 @@ tags = ["go", "toml"]
 var cfg MyConfig
 err := toml.Unmarshal([]byte(doc), &cfg)
 if err != nil {
-      panic(err)
+	panic(err)
 }
 fmt.Println("version:", cfg.Version)
 fmt.Println("name:", cfg.Name)
@@ -140,14 +140,14 @@ as a TOML document:
 
 ```go
 cfg := MyConfig{
-      Version: 2,
-      Name:    "go-toml",
-      Tags:    []string{"go", "toml"},
+	Version: 2,
+	Name:    "go-toml",
+	Tags:    []string{"go", "toml"},
 }
 
 b, err := toml.Marshal(cfg)
 if err != nil {
-      panic(err)
+	panic(err)
 }
 fmt.Println(string(b))
 
@@ -175,17 +175,17 @@ the AST level. See https://pkg.go.dev/github.com/pelletier/go-toml/v2/unstable.
 Execution time speedup compared to other Go TOML libraries:
 
 <table>
-    <thead>
-        <tr><th>Benchmark</th><th>go-toml v1</th><th>BurntSushi/toml</th></tr>
-    </thead>
-    <tbody>
-        <tr><td>Marshal/HugoFrontMatter-2</td><td>1.9x</td><td>1.9x</td></tr>
-        <tr><td>Marshal/ReferenceFile/map-2</td><td>1.7x</td><td>1.8x</td></tr>
-        <tr><td>Marshal/ReferenceFile/struct-2</td><td>2.2x</td><td>2.5x</td></tr>
-        <tr><td>Unmarshal/HugoFrontMatter-2</td><td>2.9x</td><td>2.9x</td></tr>
-        <tr><td>Unmarshal/ReferenceFile/map-2</td><td>2.6x</td><td>2.9x</td></tr>
-        <tr><td>Unmarshal/ReferenceFile/struct-2</td><td>4.4x</td><td>5.3x</td></tr>
-     </tbody>
+	<thead>
+		<tr><th>Benchmark</th><th>go-toml v1</th><th>BurntSushi/toml</th></tr>
+	</thead>
+	<tbody>
+		<tr><td>Marshal/HugoFrontMatter-2</td><td>1.9x</td><td>2.2x</td></tr>
+		<tr><td>Marshal/ReferenceFile/map-2</td><td>1.7x</td><td>2.1x</td></tr>
+		<tr><td>Marshal/ReferenceFile/struct-2</td><td>2.2x</td><td>3.0x</td></tr>
+		<tr><td>Unmarshal/HugoFrontMatter-2</td><td>2.9x</td><td>2.7x</td></tr>
+		<tr><td>Unmarshal/ReferenceFile/map-2</td><td>2.6x</td><td>2.7x</td></tr>
+		<tr><td>Unmarshal/ReferenceFile/struct-2</td><td>4.6x</td><td>5.1x</td></tr>
+	 </tbody>
 </table>
 <details><summary>See more</summary>
 <p>The table above has the results of the most common use-cases. The table below
@@ -193,22 +193,22 @@ contains the results of all benchmarks, including unrealistic ones. It is
 provided for completeness.</p>
 
 <table>
-    <thead>
-        <tr><th>Benchmark</th><th>go-toml v1</th><th>BurntSushi/toml</th></tr>
-    </thead>
-    <tbody>
-        <tr><td>Marshal/SimpleDocument/map-2</td><td>1.8x</td><td>2.9x</td></tr>
-        <tr><td>Marshal/SimpleDocument/struct-2</td><td>2.7x</td><td>4.2x</td></tr>
-        <tr><td>Unmarshal/SimpleDocument/map-2</td><td>4.5x</td><td>3.1x</td></tr>
-        <tr><td>Unmarshal/SimpleDocument/struct-2</td><td>6.2x</td><td>3.9x</td></tr>
-        <tr><td>UnmarshalDataset/example-2</td><td>3.1x</td><td>3.5x</td></tr>
-        <tr><td>UnmarshalDataset/code-2</td><td>2.3x</td><td>3.1x</td></tr>
-        <tr><td>UnmarshalDataset/twitter-2</td><td>2.5x</td><td>2.6x</td></tr>
-        <tr><td>UnmarshalDataset/citm_catalog-2</td><td>2.1x</td><td>2.2x</td></tr>
-        <tr><td>UnmarshalDataset/canada-2</td><td>1.6x</td><td>1.3x</td></tr>
-        <tr><td>UnmarshalDataset/config-2</td><td>4.3x</td><td>3.2x</td></tr>
-        <tr><td>[Geo mean]</td><td>2.7x</td><td>2.8x</td></tr>
-     </tbody>
+	<thead>
+		<tr><th>Benchmark</th><th>go-toml v1</th><th>BurntSushi/toml</th></tr>
+	</thead>
+	<tbody>
+		<tr><td>Marshal/SimpleDocument/map-2</td><td>1.8x</td><td>2.7x</td></tr>
+		<tr><td>Marshal/SimpleDocument/struct-2</td><td>2.7x</td><td>3.8x</td></tr>
+		<tr><td>Unmarshal/SimpleDocument/map-2</td><td>3.8x</td><td>3.0x</td></tr>
+		<tr><td>Unmarshal/SimpleDocument/struct-2</td><td>5.6x</td><td>4.1x</td></tr>
+		<tr><td>UnmarshalDataset/example-2</td><td>3.0x</td><td>3.2x</td></tr>
+		<tr><td>UnmarshalDataset/code-2</td><td>2.3x</td><td>2.9x</td></tr>
+		<tr><td>UnmarshalDataset/twitter-2</td><td>2.6x</td><td>2.7x</td></tr>
+		<tr><td>UnmarshalDataset/citm_catalog-2</td><td>2.2x</td><td>2.3x</td></tr>
+		<tr><td>UnmarshalDataset/canada-2</td><td>1.8x</td><td>1.5x</td></tr>
+		<tr><td>UnmarshalDataset/config-2</td><td>4.1x</td><td>2.9x</td></tr>
+		<tr><td>geomean</td><td>2.7x</td><td>2.8x</td></tr>
+	 </tbody>
 </table>
 <p>This table can be generated with <code>./ci.sh benchmark -a -html</code>.</p>
 </details>
@@ -233,24 +233,24 @@ Go-toml provides three handy command line tools:
 
  * `tomljson`: Reads a TOML file and outputs its JSON representation.
 
-    ```
-    $ go install github.com/pelletier/go-toml/v2/cmd/tomljson@latest
-    $ tomljson --help
-    ```
+	```
+	$ go install github.com/pelletier/go-toml/v2/cmd/tomljson@latest
+	$ tomljson --help
+	```
 
  * `jsontoml`: Reads a JSON file and outputs a TOML representation.
 
-    ```
-    $ go install github.com/pelletier/go-toml/v2/cmd/jsontoml@latest
-    $ jsontoml --help
-    ```
+	```
+	$ go install github.com/pelletier/go-toml/v2/cmd/jsontoml@latest
+	$ jsontoml --help
+	```
 
  * `tomll`: Lints and reformats a TOML file.
 
-    ```
-    $ go install github.com/pelletier/go-toml/v2/cmd/tomll@latest
-    $ tomll --help
-    ```
+	```
+	$ go install github.com/pelletier/go-toml/v2/cmd/tomll@latest
+	$ tomll --help
+	```
 
 ### Docker image
 
@@ -261,7 +261,7 @@ Those tools are also available as a [Docker image][docker]. For example, to use
 docker run -i ghcr.io/pelletier/go-toml:v2 tomljson < example.toml
 ```
 
-Multiple versions are availble on [ghcr.io][docker].
+Multiple versions are available on [ghcr.io][docker].
 
 [docker]: https://github.com/pelletier/go-toml/pkgs/container/go-toml
 
@@ -293,16 +293,16 @@ element in the interface to decode the object. For example:
 
 ```go
 type inner struct {
-  B interface{}
+	B interface{}
 }
 type doc struct {
-  A interface{}
+	A interface{}
 }
 
 d := doc{
-  A: inner{
-    B: "Before",
-  },
+	A: inner{
+		B: "Before",
+	},
 }
 
 data := `
@@ -341,7 +341,7 @@ contained in the doc is superior to the capacity of the array. For example:
 
 ```go
 type doc struct {
-  A [2]string
+	A [2]string
 }
 d := doc{}
 err := toml.Unmarshal([]byte(`A = ["one", "two", "many"]`), &d)
@@ -565,10 +565,11 @@ complete solutions exist out there.
 
 ## Versioning
 
-Go-toml follows [Semantic Versioning](https://semver.org). The supported version
-of [TOML](https://github.com/toml-lang/toml) is indicated at the beginning of
-this document. The last two major versions of Go are supported
-(see [Go Release Policy](https://golang.org/doc/devel/release.html#policy)).
+Expect for parts explicitely marked otherwise, go-toml follows [Semantic
+Versioning](https://semver.org). The supported version of
+[TOML](https://github.com/toml-lang/toml) is indicated at the beginning of this
+document. The last two major versions of Go are supported (see [Go Release
+Policy](https://golang.org/doc/devel/release.html#policy)).
 
 ## License
 
diff --git a/vendor/github.com/pelletier/go-toml/v2/SECURITY.md b/vendor/github.com/pelletier/go-toml/v2/SECURITY.md
index b2f21cfc92c98b1dec0d5504b8bd794f87406c77..d4d554fda9d1e6f31a205795acd82db78f2a9bd2 100644
--- a/vendor/github.com/pelletier/go-toml/v2/SECURITY.md
+++ b/vendor/github.com/pelletier/go-toml/v2/SECURITY.md
@@ -2,9 +2,6 @@
 
 ## Supported Versions
 
-Use this section to tell people about which versions of your project are
-currently being supported with security updates.
-
 | Version    | Supported          |
 | ---------- | ------------------ |
 | Latest 2.x | :white_check_mark: |
diff --git a/vendor/github.com/pelletier/go-toml/v2/ci.sh b/vendor/github.com/pelletier/go-toml/v2/ci.sh
index 9ae8b7537597efcc70d1329fae00c0ed80b93e46..86217a9b0979bf9445c0ae7eedbe628ae9f9b5db 100644
--- a/vendor/github.com/pelletier/go-toml/v2/ci.sh
+++ b/vendor/github.com/pelletier/go-toml/v2/ci.sh
@@ -77,7 +77,7 @@ cover() {
 
     pushd "$dir"
     go test -covermode=atomic  -coverpkg=./... -coverprofile=coverage.out.tmp ./...
-    cat coverage.out.tmp | grep -v fuzz | grep -v testsuite | grep -v tomltestgen | grep -v gotoml-test-decoder > coverage.out
+    grep -Ev '(fuzz|testsuite|tomltestgen|gotoml-test-decoder|gotoml-test-encoder)' coverage.out.tmp > coverage.out
     go tool cover -func=coverage.out
     echo "Coverage profile for ${branch}: ${dir}/coverage.out" >&2
     popd
@@ -152,7 +152,7 @@ bench() {
     fi
 
     export GOMAXPROCS=2
-    nice -n -19 taskset --cpu-list 0,1 go test '-bench=^Benchmark(Un)?[mM]arshal' -count=5 -run=Nothing ./... | tee "${out}"
+    go test '-bench=^Benchmark(Un)?[mM]arshal' -count=10 -run=Nothing ./... | tee "${out}"
     popd
 
     if [ "${branch}" != "HEAD" ]; then
@@ -161,10 +161,12 @@ bench() {
 }
 
 fmktemp() {
-    if mktemp --version|grep GNU >/dev/null; then
-        mktemp --suffix=-$1;
+    if mktemp --version &> /dev/null; then
+	# GNU
+        mktemp --suffix=-$1
     else
-        mktemp -t $1;
+	# BSD
+	mktemp -t $1
     fi
 }
 
@@ -184,12 +186,14 @@ with open(sys.argv[1]) as f:
             lines.append(line.split(','))
 
 results = []
-for line in reversed(lines[1:]):
+for line in reversed(lines[2:]):
+    if len(line) < 8 or line[0] == "":
+        continue
     v2 = float(line[1])
     results.append([
         line[0].replace("-32", ""),
         "%.1fx" % (float(line[3])/v2),  # v1
-        "%.1fx" % (float(line[5])/v2),  # bs
+        "%.1fx" % (float(line[7])/v2),  # bs
     ])
 # move geomean to the end
 results.append(results[0])
@@ -260,10 +264,10 @@ benchmark() {
 
         if [ "$1" = "-html" ]; then
             tmpcsv=`fmktemp csv`
-            benchstat -csv -geomean go-toml-v2.txt go-toml-v1.txt bs-toml.txt > $tmpcsv
+            benchstat -format csv go-toml-v2.txt go-toml-v1.txt bs-toml.txt > $tmpcsv
             benchstathtml $tmpcsv
         else
-            benchstat -geomean go-toml-v2.txt go-toml-v1.txt bs-toml.txt
+            benchstat go-toml-v2.txt go-toml-v1.txt bs-toml.txt
         fi
 
         rm -f go-toml-v2.txt go-toml-v1.txt bs-toml.txt
diff --git a/vendor/github.com/pelletier/go-toml/v2/internal/tracker/seen.go b/vendor/github.com/pelletier/go-toml/v2/internal/tracker/seen.go
index 40e23f8304a7f98799776e3073bdfe19beb511f5..76df2d5b6a95f58fe29d1e4ce80b3e134d6437d8 100644
--- a/vendor/github.com/pelletier/go-toml/v2/internal/tracker/seen.go
+++ b/vendor/github.com/pelletier/go-toml/v2/internal/tracker/seen.go
@@ -57,7 +57,11 @@ type SeenTracker struct {
 	currentIdx int
 }
 
-var pool sync.Pool
+var pool = sync.Pool{
+	New: func() interface{} {
+		return &SeenTracker{}
+	},
+}
 
 func (s *SeenTracker) reset() {
 	// Always contains a root element at index 0.
@@ -149,8 +153,9 @@ func (s *SeenTracker) setExplicitFlag(parentIdx int) {
 
 // CheckExpression takes a top-level node and checks that it does not contain
 // keys that have been seen in previous calls, and validates that types are
-// consistent.
-func (s *SeenTracker) CheckExpression(node *unstable.Node) error {
+// consistent. It returns true if it is the first time this node's key is seen.
+// Useful to clear array tables on first use.
+func (s *SeenTracker) CheckExpression(node *unstable.Node) (bool, error) {
 	if s.entries == nil {
 		s.reset()
 	}
@@ -166,7 +171,7 @@ func (s *SeenTracker) CheckExpression(node *unstable.Node) error {
 	}
 }
 
-func (s *SeenTracker) checkTable(node *unstable.Node) error {
+func (s *SeenTracker) checkTable(node *unstable.Node) (bool, error) {
 	if s.currentIdx >= 0 {
 		s.setExplicitFlag(s.currentIdx)
 	}
@@ -192,7 +197,7 @@ func (s *SeenTracker) checkTable(node *unstable.Node) error {
 		} else {
 			entry := s.entries[idx]
 			if entry.kind == valueKind {
-				return fmt.Errorf("toml: expected %s to be a table, not a %s", string(k), entry.kind)
+				return false, fmt.Errorf("toml: expected %s to be a table, not a %s", string(k), entry.kind)
 			}
 		}
 		parentIdx = idx
@@ -201,25 +206,27 @@ func (s *SeenTracker) checkTable(node *unstable.Node) error {
 	k := it.Node().Data
 	idx := s.find(parentIdx, k)
 
+	first := false
 	if idx >= 0 {
 		kind := s.entries[idx].kind
 		if kind != tableKind {
-			return fmt.Errorf("toml: key %s should be a table, not a %s", string(k), kind)
+			return false, fmt.Errorf("toml: key %s should be a table, not a %s", string(k), kind)
 		}
 		if s.entries[idx].explicit {
-			return fmt.Errorf("toml: table %s already exists", string(k))
+			return false, fmt.Errorf("toml: table %s already exists", string(k))
 		}
 		s.entries[idx].explicit = true
 	} else {
 		idx = s.create(parentIdx, k, tableKind, true, false)
+		first = true
 	}
 
 	s.currentIdx = idx
 
-	return nil
+	return first, nil
 }
 
-func (s *SeenTracker) checkArrayTable(node *unstable.Node) error {
+func (s *SeenTracker) checkArrayTable(node *unstable.Node) (bool, error) {
 	if s.currentIdx >= 0 {
 		s.setExplicitFlag(s.currentIdx)
 	}
@@ -242,7 +249,7 @@ func (s *SeenTracker) checkArrayTable(node *unstable.Node) error {
 		} else {
 			entry := s.entries[idx]
 			if entry.kind == valueKind {
-				return fmt.Errorf("toml: expected %s to be a table, not a %s", string(k), entry.kind)
+				return false, fmt.Errorf("toml: expected %s to be a table, not a %s", string(k), entry.kind)
 			}
 		}
 
@@ -252,22 +259,23 @@ func (s *SeenTracker) checkArrayTable(node *unstable.Node) error {
 	k := it.Node().Data
 	idx := s.find(parentIdx, k)
 
-	if idx >= 0 {
+	firstTime := idx < 0
+	if firstTime {
+		idx = s.create(parentIdx, k, arrayTableKind, true, false)
+	} else {
 		kind := s.entries[idx].kind
 		if kind != arrayTableKind {
-			return fmt.Errorf("toml: key %s already exists as a %s,  but should be an array table", kind, string(k))
+			return false, fmt.Errorf("toml: key %s already exists as a %s,  but should be an array table", kind, string(k))
 		}
 		s.clear(idx)
-	} else {
-		idx = s.create(parentIdx, k, arrayTableKind, true, false)
 	}
 
 	s.currentIdx = idx
 
-	return nil
+	return firstTime, nil
 }
 
-func (s *SeenTracker) checkKeyValue(node *unstable.Node) error {
+func (s *SeenTracker) checkKeyValue(node *unstable.Node) (bool, error) {
 	parentIdx := s.currentIdx
 	it := node.Key()
 
@@ -281,11 +289,11 @@ func (s *SeenTracker) checkKeyValue(node *unstable.Node) error {
 		} else {
 			entry := s.entries[idx]
 			if it.IsLast() {
-				return fmt.Errorf("toml: key %s is already defined", string(k))
+				return false, fmt.Errorf("toml: key %s is already defined", string(k))
 			} else if entry.kind != tableKind {
-				return fmt.Errorf("toml: expected %s to be a table, not a %s", string(k), entry.kind)
+				return false, fmt.Errorf("toml: expected %s to be a table, not a %s", string(k), entry.kind)
 			} else if entry.explicit {
-				return fmt.Errorf("toml: cannot redefine table %s that has already been explicitly defined", string(k))
+				return false, fmt.Errorf("toml: cannot redefine table %s that has already been explicitly defined", string(k))
 			}
 		}
 
@@ -303,45 +311,39 @@ func (s *SeenTracker) checkKeyValue(node *unstable.Node) error {
 		return s.checkArray(value)
 	}
 
-	return nil
+	return false, nil
 }
 
-func (s *SeenTracker) checkArray(node *unstable.Node) error {
+func (s *SeenTracker) checkArray(node *unstable.Node) (first bool, err error) {
 	it := node.Children()
 	for it.Next() {
 		n := it.Node()
 		switch n.Kind {
 		case unstable.InlineTable:
-			err := s.checkInlineTable(n)
+			first, err = s.checkInlineTable(n)
 			if err != nil {
-				return err
+				return false, err
 			}
 		case unstable.Array:
-			err := s.checkArray(n)
+			first, err = s.checkArray(n)
 			if err != nil {
-				return err
+				return false, err
 			}
 		}
 	}
-	return nil
+	return first, nil
 }
 
-func (s *SeenTracker) checkInlineTable(node *unstable.Node) error {
-	if pool.New == nil {
-		pool.New = func() interface{} {
-			return &SeenTracker{}
-		}
-	}
-
+func (s *SeenTracker) checkInlineTable(node *unstable.Node) (first bool, err error) {
 	s = pool.Get().(*SeenTracker)
 	s.reset()
 
 	it := node.Children()
 	for it.Next() {
 		n := it.Node()
-		err := s.checkKeyValue(n)
+		first, err = s.checkKeyValue(n)
 		if err != nil {
-			return err
+			return false, err
 		}
 	}
 
@@ -352,5 +354,5 @@ func (s *SeenTracker) checkInlineTable(node *unstable.Node) error {
 	// redefinition of its keys: check* functions cannot walk into
 	// a value.
 	pool.Put(s)
-	return nil
+	return first, nil
 }
diff --git a/vendor/github.com/pelletier/go-toml/v2/marshaler.go b/vendor/github.com/pelletier/go-toml/v2/marshaler.go
index 6fe78533c1c5437cc607686b4945b6924b193d6f..7f4e20c1285aaa513935a275ce7b4c0cc2c2b74b 100644
--- a/vendor/github.com/pelletier/go-toml/v2/marshaler.go
+++ b/vendor/github.com/pelletier/go-toml/v2/marshaler.go
@@ -3,6 +3,7 @@ package toml
 import (
 	"bytes"
 	"encoding"
+	"encoding/json"
 	"fmt"
 	"io"
 	"math"
@@ -37,10 +38,11 @@ type Encoder struct {
 	w io.Writer
 
 	// global settings
-	tablesInline    bool
-	arraysMultiline bool
-	indentSymbol    string
-	indentTables    bool
+	tablesInline       bool
+	arraysMultiline    bool
+	indentSymbol       string
+	indentTables       bool
+	marshalJsonNumbers bool
 }
 
 // NewEncoder returns a new Encoder that writes to w.
@@ -87,6 +89,17 @@ func (enc *Encoder) SetIndentTables(indent bool) *Encoder {
 	return enc
 }
 
+// SetMarshalJsonNumbers forces the encoder to serialize `json.Number` as a
+// float or integer instead of relying on TextMarshaler to emit a string.
+//
+// *Unstable:* This method does not follow the compatibility guarantees of
+// semver. It can be changed or removed without a new major version being
+// issued.
+func (enc *Encoder) SetMarshalJsonNumbers(indent bool) *Encoder {
+	enc.marshalJsonNumbers = indent
+	return enc
+}
+
 // Encode writes a TOML representation of v to the stream.
 //
 // If v cannot be represented to TOML it returns an error.
@@ -252,6 +265,18 @@ func (enc *Encoder) encode(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, e
 		return append(b, x.String()...), nil
 	case LocalDateTime:
 		return append(b, x.String()...), nil
+	case json.Number:
+		if enc.marshalJsonNumbers {
+			if x == "" { /// Useful zero value.
+				return append(b, "0"...), nil
+			} else if v, err := x.Int64(); err == nil {
+				return enc.encode(b, ctx, reflect.ValueOf(v))
+			} else if f, err := x.Float64(); err == nil {
+				return enc.encode(b, ctx, reflect.ValueOf(f))
+			} else {
+				return nil, fmt.Errorf("toml: unable to convert %q to int64 or float64", x)
+			}
+		}
 	}
 
 	hasTextMarshaler := v.Type().Implements(textMarshalerType)
@@ -707,6 +732,8 @@ func walkStruct(ctx encoderCtx, t *table, v reflect.Value) {
 			if fieldType.Anonymous {
 				if fieldType.Type.Kind() == reflect.Struct {
 					walkStruct(ctx, t, f)
+				} else if fieldType.Type.Kind() == reflect.Pointer && !f.IsNil() && f.Elem().Kind() == reflect.Struct {
+					walkStruct(ctx, t, f.Elem())
 				}
 				continue
 			} else {
@@ -998,6 +1025,10 @@ func (enc *Encoder) encodeSliceAsArrayTable(b []byte, ctx encoderCtx, v reflect.
 
 	scratch = enc.commented(ctx.commented, scratch)
 
+	if enc.indentTables {
+		scratch = enc.indent(ctx.indent, scratch)
+	}
+
 	scratch = append(scratch, "[["...)
 
 	for i, k := range ctx.parentKey {
diff --git a/vendor/github.com/pelletier/go-toml/v2/unmarshaler.go b/vendor/github.com/pelletier/go-toml/v2/unmarshaler.go
index 868c74c1577bbe27ab09a2ea538125f41bce5c34..98231bae65b6165c14c7f1cb68e46ceac070f7f9 100644
--- a/vendor/github.com/pelletier/go-toml/v2/unmarshaler.go
+++ b/vendor/github.com/pelletier/go-toml/v2/unmarshaler.go
@@ -35,6 +35,9 @@ type Decoder struct {
 
 	// global settings
 	strict bool
+
+	// toggles unmarshaler interface
+	unmarshalerInterface bool
 }
 
 // NewDecoder creates a new Decoder that will read from r.
@@ -54,6 +57,24 @@ func (d *Decoder) DisallowUnknownFields() *Decoder {
 	return d
 }
 
+// EnableUnmarshalerInterface allows to enable unmarshaler interface.
+//
+// With this feature enabled, types implementing the unstable/Unmarshaler
+// interface can be decoded from any structure of the document. It allows types
+// that don't have a straightfoward TOML representation to provide their own
+// decoding logic.
+//
+// Currently, types can only decode from a single value. Tables and array tables
+// are not supported.
+//
+// *Unstable:* This method does not follow the compatibility guarantees of
+// semver. It can be changed or removed without a new major version being
+// issued.
+func (d *Decoder) EnableUnmarshalerInterface() *Decoder {
+	d.unmarshalerInterface = true
+	return d
+}
+
 // Decode the whole content of r into v.
 //
 // By default, values in the document that don't exist in the target Go value
@@ -108,6 +129,7 @@ func (d *Decoder) Decode(v interface{}) error {
 		strict: strict{
 			Enabled: d.strict,
 		},
+		unmarshalerInterface: d.unmarshalerInterface,
 	}
 
 	return dec.FromParser(v)
@@ -127,6 +149,10 @@ type decoder struct {
 	// need to be skipped.
 	skipUntilTable bool
 
+	// Flag indicating that the current array/slice table should be cleared because
+	// it is the first encounter of an array table.
+	clearArrayTable bool
+
 	// Tracks position in Go arrays.
 	// This is used when decoding [[array tables]] into Go arrays. Given array
 	// tables are separate TOML expression, we need to keep track of where we
@@ -139,6 +165,9 @@ type decoder struct {
 	// Strict mode
 	strict strict
 
+	// Flag that enables/disables unmarshaler interface.
+	unmarshalerInterface bool
+
 	// Current context for the error.
 	errorContext *errorContext
 }
@@ -246,9 +275,10 @@ Rules for the unmarshal code:
 func (d *decoder) handleRootExpression(expr *unstable.Node, v reflect.Value) error {
 	var x reflect.Value
 	var err error
+	var first bool // used for to clear array tables on first use
 
 	if !(d.skipUntilTable && expr.Kind == unstable.KeyValue) {
-		err = d.seen.CheckExpression(expr)
+		first, err = d.seen.CheckExpression(expr)
 		if err != nil {
 			return err
 		}
@@ -267,6 +297,7 @@ func (d *decoder) handleRootExpression(expr *unstable.Node, v reflect.Value) err
 	case unstable.ArrayTable:
 		d.skipUntilTable = false
 		d.strict.EnterArrayTable(expr)
+		d.clearArrayTable = first
 		x, err = d.handleArrayTable(expr.Key(), v)
 	default:
 		panic(fmt.Errorf("parser should not permit expression of kind %s at document root", expr.Kind))
@@ -307,6 +338,10 @@ func (d *decoder) handleArrayTableCollectionLast(key unstable.Iterator, v reflec
 				reflect.Copy(nelem, elem)
 				elem = nelem
 			}
+			if d.clearArrayTable && elem.Len() > 0 {
+				elem.SetLen(0)
+				d.clearArrayTable = false
+			}
 		}
 		return d.handleArrayTableCollectionLast(key, elem)
 	case reflect.Ptr:
@@ -325,6 +360,10 @@ func (d *decoder) handleArrayTableCollectionLast(key unstable.Iterator, v reflec
 
 		return v, nil
 	case reflect.Slice:
+		if d.clearArrayTable && v.Len() > 0 {
+			v.SetLen(0)
+			d.clearArrayTable = false
+		}
 		elemType := v.Type().Elem()
 		var elem reflect.Value
 		if elemType.Kind() == reflect.Interface {
@@ -576,7 +615,7 @@ func (d *decoder) handleKeyValues(v reflect.Value) (reflect.Value, error) {
 			break
 		}
 
-		err := d.seen.CheckExpression(expr)
+		_, err := d.seen.CheckExpression(expr)
 		if err != nil {
 			return reflect.Value{}, err
 		}
@@ -634,6 +673,14 @@ func (d *decoder) handleValue(value *unstable.Node, v reflect.Value) error {
 		v = initAndDereferencePointer(v)
 	}
 
+	if d.unmarshalerInterface {
+		if v.CanAddr() && v.Addr().CanInterface() {
+			if outi, ok := v.Addr().Interface().(unstable.Unmarshaler); ok {
+				return outi.UnmarshalTOML(value)
+			}
+		}
+	}
+
 	ok, err := d.tryTextUnmarshaler(value, v)
 	if ok || err != nil {
 		return err
@@ -1097,9 +1144,9 @@ func (d *decoder) handleKeyValuePart(key unstable.Iterator, value *unstable.Node
 
 		f := fieldByIndex(v, path)
 
-		if !f.CanSet() {
-			// If the field is not settable, need to take a slower path and make a copy of
-			// the struct itself to a new location.
+		if !f.CanAddr() {
+			// If the field is not addressable, need to take a slower path and
+			// make a copy of the struct itself to a new location.
 			nvp := reflect.New(v.Type())
 			nvp.Elem().Set(v)
 			v = nvp.Elem()
diff --git a/vendor/github.com/pelletier/go-toml/v2/unstable/unmarshaler.go b/vendor/github.com/pelletier/go-toml/v2/unstable/unmarshaler.go
new file mode 100644
index 0000000000000000000000000000000000000000..00cfd6de4581b2c6828b43c184aedc803190d407
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/v2/unstable/unmarshaler.go
@@ -0,0 +1,7 @@
+package unstable
+
+// The Unmarshaler interface may be implemented by types to customize their
+// behavior when being unmarshaled from a TOML document.
+type Unmarshaler interface {
+	UnmarshalTOML(value *Node) error
+}
diff --git a/vendor/github.com/polyfloyd/go-errorlint/errorlint/allowed.go b/vendor/github.com/polyfloyd/go-errorlint/errorlint/allowed.go
index be4debf9b35e23a9d060e4453df3946c9e5903c8..7490ceff63006c8f7b3eb783528164b7e476387c 100644
--- a/vendor/github.com/polyfloyd/go-errorlint/errorlint/allowed.go
+++ b/vendor/github.com/polyfloyd/go-errorlint/errorlint/allowed.go
@@ -3,95 +3,127 @@ package errorlint
 import (
 	"fmt"
 	"go/ast"
+	"go/types"
 	"strings"
 )
 
-var allowedErrors = []struct {
-	err string
-	fun string
-}{
-	// pkg/archive/tar
-	{err: "io.EOF", fun: "(*archive/tar.Reader).Next"},
-	{err: "io.EOF", fun: "(*archive/tar.Reader).Read"},
-	// pkg/bufio
-	{err: "io.EOF", fun: "(*bufio.Reader).Discard"},
-	{err: "io.EOF", fun: "(*bufio.Reader).Peek"},
-	{err: "io.EOF", fun: "(*bufio.Reader).Read"},
-	{err: "io.EOF", fun: "(*bufio.Reader).ReadByte"},
-	{err: "io.EOF", fun: "(*bufio.Reader).ReadBytes"},
-	{err: "io.EOF", fun: "(*bufio.Reader).ReadLine"},
-	{err: "io.EOF", fun: "(*bufio.Reader).ReadSlice"},
-	{err: "io.EOF", fun: "(*bufio.Reader).ReadString"},
-	{err: "io.EOF", fun: "(*bufio.Scanner).Scan"},
-	// pkg/bytes
-	{err: "io.EOF", fun: "(*bytes.Buffer).Read"},
-	{err: "io.EOF", fun: "(*bytes.Buffer).ReadByte"},
-	{err: "io.EOF", fun: "(*bytes.Buffer).ReadBytes"},
-	{err: "io.EOF", fun: "(*bytes.Buffer).ReadRune"},
-	{err: "io.EOF", fun: "(*bytes.Buffer).ReadString"},
-	{err: "io.EOF", fun: "(*bytes.Reader).Read"},
-	{err: "io.EOF", fun: "(*bytes.Reader).ReadAt"},
-	{err: "io.EOF", fun: "(*bytes.Reader).ReadByte"},
-	{err: "io.EOF", fun: "(*bytes.Reader).ReadRune"},
-	{err: "io.EOF", fun: "(*bytes.Reader).ReadString"},
-	// pkg/database/sql
-	{err: "database/sql.ErrNoRows", fun: "(*database/sql.Row).Scan"},
-	// pkg/debug/elf
-	{err: "io.EOF", fun: "debug/elf.Open"},
-	{err: "io.EOF", fun: "debug/elf.NewFile"},
-	// pkg/io
-	{err: "io.EOF", fun: "(io.Reader).Read"},
-	{err: "io.EOF", fun: "(io.ReaderAt).ReadAt"},
-	{err: "io.EOF", fun: "(*io.LimitedReader).Read"},
-	{err: "io.EOF", fun: "(*io.SectionReader).Read"},
-	{err: "io.EOF", fun: "(*io.SectionReader).ReadAt"},
-	{err: "io.ErrClosedPipe", fun: "(*io.PipeWriter).Write"},
-	{err: "io.ErrShortBuffer", fun: "io.ReadAtLeast"},
-	{err: "io.ErrUnexpectedEOF", fun: "io.ReadAtLeast"},
-	{err: "io.EOF", fun: "io.ReadFull"},
-	{err: "io.ErrUnexpectedEOF", fun: "io.ReadFull"},
-	// pkg/net/http
-	{err: "net/http.ErrServerClosed", fun: "(*net/http.Server).ListenAndServe"},
-	{err: "net/http.ErrServerClosed", fun: "(*net/http.Server).ListenAndServeTLS"},
-	{err: "net/http.ErrServerClosed", fun: "(*net/http.Server).Serve"},
-	{err: "net/http.ErrServerClosed", fun: "(*net/http.Server).ServeTLS"},
-	{err: "net/http.ErrServerClosed", fun: "net/http.ListenAndServe"},
-	{err: "net/http.ErrServerClosed", fun: "net/http.ListenAndServeTLS"},
-	{err: "net/http.ErrServerClosed", fun: "net/http.Serve"},
-	{err: "net/http.ErrServerClosed", fun: "net/http.ServeTLS"},
-	// pkg/os
-	{err: "io.EOF", fun: "(*os.File).Read"},
-	{err: "io.EOF", fun: "(*os.File).ReadAt"},
-	{err: "io.EOF", fun: "(*os.File).ReadDir"},
-	{err: "io.EOF", fun: "(*os.File).Readdir"},
-	{err: "io.EOF", fun: "(*os.File).Readdirnames"},
-	// pkg/strings
-	{err: "io.EOF", fun: "(*strings.Reader).Read"},
-	{err: "io.EOF", fun: "(*strings.Reader).ReadAt"},
-	{err: "io.EOF", fun: "(*strings.Reader).ReadByte"},
-	{err: "io.EOF", fun: "(*strings.Reader).ReadRune"},
+type AllowPair struct {
+	Err string
+	Fun string
 }
 
-var allowedErrorWildcards = []struct {
-	err string
-	fun string
-}{
+var allowedErrorsMap = make(map[string]map[string]struct{})
+
+func setDefaultAllowedErrors() {
+	allowedMapAppend([]AllowPair{
+		// pkg/archive/tar
+		{Err: "io.EOF", Fun: "(*archive/tar.Reader).Next"},
+		{Err: "io.EOF", Fun: "(*archive/tar.Reader).Read"},
+		// pkg/bufio
+		{Err: "io.EOF", Fun: "(*bufio.Reader).Discard"},
+		{Err: "io.EOF", Fun: "(*bufio.Reader).Peek"},
+		{Err: "io.EOF", Fun: "(*bufio.Reader).Read"},
+		{Err: "io.EOF", Fun: "(*bufio.Reader).ReadByte"},
+		{Err: "io.EOF", Fun: "(*bufio.Reader).ReadBytes"},
+		{Err: "io.EOF", Fun: "(*bufio.Reader).ReadLine"},
+		{Err: "io.EOF", Fun: "(*bufio.Reader).ReadSlice"},
+		{Err: "io.EOF", Fun: "(*bufio.Reader).ReadString"},
+		{Err: "io.EOF", Fun: "(*bufio.Scanner).Scan"},
+		// pkg/bytes
+		{Err: "io.EOF", Fun: "(*bytes.Buffer).Read"},
+		{Err: "io.EOF", Fun: "(*bytes.Buffer).ReadByte"},
+		{Err: "io.EOF", Fun: "(*bytes.Buffer).ReadBytes"},
+		{Err: "io.EOF", Fun: "(*bytes.Buffer).ReadRune"},
+		{Err: "io.EOF", Fun: "(*bytes.Buffer).ReadString"},
+		{Err: "io.EOF", Fun: "(*bytes.Reader).Read"},
+		{Err: "io.EOF", Fun: "(*bytes.Reader).ReadAt"},
+		{Err: "io.EOF", Fun: "(*bytes.Reader).ReadByte"},
+		{Err: "io.EOF", Fun: "(*bytes.Reader).ReadRune"},
+		{Err: "io.EOF", Fun: "(*bytes.Reader).ReadString"},
+		// pkg/database/sql
+		{Err: "database/sql.ErrNoRows", Fun: "(*database/sql.Row).Scan"},
+		// pkg/debug/elf
+		{Err: "io.EOF", Fun: "debug/elf.Open"},
+		{Err: "io.EOF", Fun: "debug/elf.NewFile"},
+		// pkg/io
+		{Err: "io.EOF", Fun: "(io.ReadCloser).Read"},
+		{Err: "io.EOF", Fun: "(io.Reader).Read"},
+		{Err: "io.EOF", Fun: "(io.ReaderAt).ReadAt"},
+		{Err: "io.EOF", Fun: "(*io.LimitedReader).Read"},
+		{Err: "io.EOF", Fun: "(*io.SectionReader).Read"},
+		{Err: "io.EOF", Fun: "(*io.SectionReader).ReadAt"},
+		{Err: "io.ErrClosedPipe", Fun: "(*io.PipeWriter).Write"},
+		{Err: "io.ErrShortBuffer", Fun: "io.ReadAtLeast"},
+		{Err: "io.ErrUnexpectedEOF", Fun: "io.ReadAtLeast"},
+		{Err: "io.EOF", Fun: "io.ReadFull"},
+		{Err: "io.ErrUnexpectedEOF", Fun: "io.ReadFull"},
+		// pkg/net/http
+		{Err: "net/http.ErrServerClosed", Fun: "(*net/http.Server).ListenAndServe"},
+		{Err: "net/http.ErrServerClosed", Fun: "(*net/http.Server).ListenAndServeTLS"},
+		{Err: "net/http.ErrServerClosed", Fun: "(*net/http.Server).Serve"},
+		{Err: "net/http.ErrServerClosed", Fun: "(*net/http.Server).ServeTLS"},
+		{Err: "net/http.ErrServerClosed", Fun: "net/http.ListenAndServe"},
+		{Err: "net/http.ErrServerClosed", Fun: "net/http.ListenAndServeTLS"},
+		{Err: "net/http.ErrServerClosed", Fun: "net/http.Serve"},
+		{Err: "net/http.ErrServerClosed", Fun: "net/http.ServeTLS"},
+		// pkg/os
+		{Err: "io.EOF", Fun: "(*os.File).Read"},
+		{Err: "io.EOF", Fun: "(*os.File).ReadAt"},
+		{Err: "io.EOF", Fun: "(*os.File).ReadDir"},
+		{Err: "io.EOF", Fun: "(*os.File).Readdir"},
+		{Err: "io.EOF", Fun: "(*os.File).Readdirnames"},
+		// pkg/strings
+		{Err: "io.EOF", Fun: "(*strings.Reader).Read"},
+		{Err: "io.EOF", Fun: "(*strings.Reader).ReadAt"},
+		{Err: "io.EOF", Fun: "(*strings.Reader).ReadByte"},
+		{Err: "io.EOF", Fun: "(*strings.Reader).ReadRune"},
+		// pkg/context
+		{Err: "context.DeadlineExceeded", Fun: "(context.Context).Err"},
+		{Err: "context.Canceled", Fun: "(context.Context).Err"},
+		// pkg/encoding/json
+		{Err: "io.EOF", Fun: "(*encoding/json.Decoder).Decode"},
+		// pkg/encoding/csv
+		{Err: "io.EOF", Fun: "(*encoding/csv.Reader).Read"},
+		// pkg/mime/multipart
+		{Err: "io.EOF", Fun: "(*mime/multipart.Reader).NextPart"},
+		{Err: "io.EOF", Fun: "(*mime/multipart.Reader).NextRawPart"},
+		{Err: "mime/multipart.ErrMessageTooLarge", Fun: "(*mime/multipart.Reader).ReadForm"},
+	})
+}
+
+func allowedMapAppend(ap []AllowPair) {
+	for _, pair := range ap {
+		if _, ok := allowedErrorsMap[pair.Err]; !ok {
+			allowedErrorsMap[pair.Err] = make(map[string]struct{})
+		}
+		allowedErrorsMap[pair.Err][pair.Fun] = struct{}{}
+	}
+}
+
+var allowedErrorWildcards = []AllowPair{
+	// pkg/syscall
+	{Err: "syscall.E", Fun: "syscall."},
 	// golang.org/x/sys/unix
-	{err: "golang.org/x/sys/unix.E", fun: "golang.org/x/sys/unix."},
+	{Err: "golang.org/x/sys/unix.E", Fun: "golang.org/x/sys/unix."},
+}
+
+func allowedWildcardAppend(ap []AllowPair) {
+	allowedErrorWildcards = append(allowedErrorWildcards, ap...)
 }
 
 func isAllowedErrAndFunc(err, fun string) bool {
-	for _, allow := range allowedErrorWildcards {
-		if strings.HasPrefix(fun, allow.fun) && strings.HasPrefix(err, allow.err) {
+	if allowedFuncs, allowErr := allowedErrorsMap[err]; allowErr {
+		if _, allow := allowedFuncs[fun]; allow {
 			return true
 		}
 	}
 
-	for _, allow := range allowedErrors {
-		if allow.fun == fun && allow.err == err {
+	for _, allow := range allowedErrorWildcards {
+		if strings.HasPrefix(fun, allow.Fun) && strings.HasPrefix(err, allow.Err) {
 			return true
 		}
 	}
+
 	return false
 }
 
@@ -110,7 +142,7 @@ func isAllowedErrorComparison(pass *TypesInfoExt, binExpr *ast.BinaryExpr) bool
 		case *ast.Ident:
 			// Identifier, most likely to be the `err` variable or whatever
 			// produces it.
-			callExprs = assigningCallExprs(pass, t)
+			callExprs = assigningCallExprs(pass, t, map[types.Object]bool{})
 		case *ast.CallExpr:
 			callExprs = append(callExprs, t)
 		}
@@ -149,15 +181,21 @@ func isAllowedErrorComparison(pass *TypesInfoExt, binExpr *ast.BinaryExpr) bool
 
 // assigningCallExprs finds all *ast.CallExpr nodes that are part of an
 // *ast.AssignStmt that assign to the subject identifier.
-func assigningCallExprs(pass *TypesInfoExt, subject *ast.Ident) []*ast.CallExpr {
+func assigningCallExprs(pass *TypesInfoExt, subject *ast.Ident, visitedObjects map[types.Object]bool) []*ast.CallExpr {
 	if subject.Obj == nil {
 		return nil
 	}
 
-	// Find other identifiers that reference this same object. Make sure to
-	// exclude the subject identifier as it will cause an infinite recursion
-	// and is being used in a read operation anyway.
+	// Find other identifiers that reference this same object.
 	sobj := pass.TypesInfo.ObjectOf(subject)
+
+	if visitedObjects[sobj] {
+		return nil
+	}
+	visitedObjects[sobj] = true
+
+	// Make sure to exclude the subject identifier as it will cause an infinite recursion and is
+	// being used in a read operation anyway.
 	identifiers := []*ast.Ident{}
 	for _, ident := range pass.IdentifiersForObject[sobj] {
 		if subject.Pos() != ident.Pos() {
@@ -196,7 +234,7 @@ func assigningCallExprs(pass *TypesInfoExt, subject *ast.Ident) []*ast.CallExpr
 					continue
 				}
 				// The subject was the result of assigning from another identifier.
-				callExprs = append(callExprs, assigningCallExprs(pass, assignT)...)
+				callExprs = append(callExprs, assigningCallExprs(pass, assignT, visitedObjects)...)
 			default:
 				// TODO: inconclusive?
 			}
diff --git a/vendor/github.com/polyfloyd/go-errorlint/errorlint/analysis.go b/vendor/github.com/polyfloyd/go-errorlint/errorlint/analysis.go
index f034913ea324e9d059f4a11672a1b4861cc4f78c..84ebd6cf8e5f97f20a42188e36d6bcd028cb391f 100644
--- a/vendor/github.com/polyfloyd/go-errorlint/errorlint/analysis.go
+++ b/vendor/github.com/polyfloyd/go-errorlint/errorlint/analysis.go
@@ -1,7 +1,6 @@
 package errorlint
 
 import (
-	"flag"
 	"go/ast"
 	"go/types"
 	"sort"
@@ -9,32 +8,36 @@ import (
 	"golang.org/x/tools/go/analysis"
 )
 
-func NewAnalyzer() *analysis.Analyzer {
-	return &analysis.Analyzer{
-		Name:  "errorlint",
-		Doc:   "Source code linter for Go software that can be used to find code that will cause problems with the error wrapping scheme introduced in Go 1.13.",
-		Run:   run,
-		Flags: flagSet,
+func NewAnalyzer(opts ...Option) *analysis.Analyzer {
+	for _, o := range opts {
+		o()
 	}
+
+	setDefaultAllowedErrors()
+
+	a := &analysis.Analyzer{
+		Name: "errorlint",
+		Doc:  "Source code linter for Go software that can be used to find code that will cause problems with the error wrapping scheme introduced in Go 1.13.",
+		Run:  run,
+	}
+
+	a.Flags.BoolVar(&checkComparison, "comparison", true, "Check for plain error comparisons")
+	a.Flags.BoolVar(&checkAsserts, "asserts", true, "Check for plain type assertions and type switches")
+	a.Flags.BoolVar(&checkErrorf, "errorf", false, "Check whether fmt.Errorf uses the %w verb for formatting errors. See the readme for caveats")
+	a.Flags.BoolVar(&checkErrorfMulti, "errorf-multi", true, "Permit more than 1 %w verb, valid per Go 1.20 (Requires -errorf=true)")
+
+	return a
 }
 
 var (
-	flagSet          flag.FlagSet
 	checkComparison  bool
 	checkAsserts     bool
 	checkErrorf      bool
 	checkErrorfMulti bool
 )
 
-func init() {
-	flagSet.BoolVar(&checkComparison, "comparison", true, "Check for plain error comparisons")
-	flagSet.BoolVar(&checkAsserts, "asserts", true, "Check for plain type assertions and type switches")
-	flagSet.BoolVar(&checkErrorf, "errorf", false, "Check whether fmt.Errorf uses the %w verb for formatting errors. See the readme for caveats")
-	flagSet.BoolVar(&checkErrorfMulti, "errorf-multi", true, "Permit more than 1 %w verb, valid per Go 1.20 (Requires -errorf=true)")
-}
-
 func run(pass *analysis.Pass) (interface{}, error) {
-	lints := []analysis.Diagnostic{}
+	var lints []analysis.Diagnostic
 	extInfo := newTypesInfoExt(pass)
 	if checkComparison {
 		l := LintErrorComparisons(extInfo)
diff --git a/vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go b/vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go
index 817cd6904c2a3a3e2ad7a4124e23452b0979935a..6648d3179001baef72e9c259433894ff9b0d671e 100644
--- a/vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go
+++ b/vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go
@@ -20,7 +20,8 @@ func (l ByPosition) Less(i, j int) bool {
 }
 
 func LintFmtErrorfCalls(fset *token.FileSet, info types.Info, multipleWraps bool) []analysis.Diagnostic {
-	lints := []analysis.Diagnostic{}
+	var lints []analysis.Diagnostic
+
 	for expr, t := range info.Types {
 		// Search for error expressions that are the result of fmt.Errorf
 		// invocations.
@@ -159,7 +160,7 @@ func isFmtErrorfCallExpr(info types.Info, expr ast.Expr) (*ast.CallExpr, bool) {
 }
 
 func LintErrorComparisons(info *TypesInfoExt) []analysis.Diagnostic {
-	lints := []analysis.Diagnostic{}
+	var lints []analysis.Diagnostic
 
 	for expr := range info.TypesInfo.Types {
 		// Find == and != operations.
@@ -289,7 +290,7 @@ func switchComparesNonNil(switchStmt *ast.SwitchStmt) bool {
 }
 
 func LintErrorTypeAssertions(fset *token.FileSet, info *TypesInfoExt) []analysis.Diagnostic {
-	lints := []analysis.Diagnostic{}
+	var lints []analysis.Diagnostic
 
 	for expr := range info.TypesInfo.Types {
 		// Find type assertions.
@@ -307,6 +308,11 @@ func LintErrorTypeAssertions(fset *token.FileSet, info *TypesInfoExt) []analysis
 			continue
 		}
 
+		// If the asserted type is not an error, allow the expression.
+		if !implementsError(info.TypesInfo.Types[typeAssert.Type].Type) {
+			continue
+		}
+
 		lints = append(lints, analysis.Diagnostic{
 			Message: "type assertion on error will fail on wrapped errors. Use errors.As to check for specific errors",
 			Pos:     typeAssert.Pos(),
diff --git a/vendor/github.com/polyfloyd/go-errorlint/errorlint/options.go b/vendor/github.com/polyfloyd/go-errorlint/errorlint/options.go
new file mode 100644
index 0000000000000000000000000000000000000000..4d7c742d839db711a84943a89c4ffb107fdcba0e
--- /dev/null
+++ b/vendor/github.com/polyfloyd/go-errorlint/errorlint/options.go
@@ -0,0 +1,15 @@
+package errorlint
+
+type Option func()
+
+func WithAllowedErrors(ap []AllowPair) Option {
+	return func() {
+		allowedMapAppend(ap)
+	}
+}
+
+func WithAllowedWildcard(ap []AllowPair) Option {
+	return func() {
+		allowedWildcardAppend(ap)
+	}
+}
diff --git a/vendor/github.com/polyfloyd/go-errorlint/errorlint/printf.go b/vendor/github.com/polyfloyd/go-errorlint/errorlint/printf.go
index 9737525924135dd3e742fb9deafc13fb7d332b9a..4c0e12525de1f5157358b04a3c681325d38b29cc 100644
--- a/vendor/github.com/polyfloyd/go-errorlint/errorlint/printf.go
+++ b/vendor/github.com/polyfloyd/go-errorlint/errorlint/printf.go
@@ -7,23 +7,6 @@ import (
 	"strings"
 )
 
-func verbOrder(verbs []verb, numArgs int) [][]verb {
-	orderedVerbs := make([][]verb, numArgs)
-	i := 0
-	for _, v := range verbs {
-		if v.index != -1 {
-			i = v.index - 1
-		}
-		if i >= len(orderedVerbs) {
-			continue
-		}
-		orderedVerbs[i] = append(orderedVerbs[i], v)
-		verbs = verbs[1:]
-		i++
-	}
-	return orderedVerbs
-}
-
 type verb struct {
 	format       string
 	formatOffset int
diff --git a/vendor/github.com/quasilyte/go-ruleguard/dsl/LICENSE b/vendor/github.com/quasilyte/go-ruleguard/dsl/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..558f81ff223d7a1dec5233dd9f1a3f520eb174c3
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/dsl/LICENSE
@@ -0,0 +1,29 @@
+BSD 3-Clause License
+
+Copyright (c) 2022, Iskander (Alex) Sharipov / quasilyte
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this
+   list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice,
+   this list of conditions and the following disclaimer in the documentation
+   and/or other materials provided with the distribution.
+
+3. Neither the name of the copyright holder nor the names of its
+   contributors may be used to endorse or promote products derived from
+   this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/quasilyte/go-ruleguard/dsl/bundle.go b/vendor/github.com/quasilyte/go-ruleguard/dsl/bundle.go
new file mode 100644
index 0000000000000000000000000000000000000000..45a9455d868e7cce252060bd8af39ee1dc2a59c1
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/dsl/bundle.go
@@ -0,0 +1,19 @@
+package dsl
+
+// Bundle is a rules file export manifest.
+type Bundle struct {
+	// TODO: figure out which fields we might want to add here.
+}
+
+// ImportRules imports all rules from the bundle and prefixes them with a specified string.
+//
+// Empty string prefix is something like "dot import" in Go.
+// Group name collisions will result in an error.
+//
+// Only packages that have an exported Bundle variable can be imported.
+//
+// Note: right now imported bundle can't import other bundles.
+// This is not a fundamental limitation but rather a precaution
+// measure before we understand how it should work better.
+// If you need this feature, please open an issue at github.com/quasilyte/go-ruleguard.
+func ImportRules(prefix string, bundle Bundle) {}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/dsl/do.go b/vendor/github.com/quasilyte/go-ruleguard/dsl/do.go
new file mode 100644
index 0000000000000000000000000000000000000000..86bc163a77f40d7fcb6b239e1804a132c43baa91
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/dsl/do.go
@@ -0,0 +1,19 @@
+package dsl
+
+import (
+	"github.com/quasilyte/go-ruleguard/dsl/types"
+)
+
+type DoContext struct{}
+
+func (*DoContext) Var(varname string) *DoVar { return nil }
+
+func (*DoContext) SetReport(report string) {}
+
+func (*DoContext) SetSuggest(suggest string) {}
+
+type DoVar struct{}
+
+func (*DoVar) Text() string { return "" }
+
+func (*DoVar) Type() types.Type { return nil }
diff --git a/vendor/github.com/quasilyte/go-ruleguard/dsl/dsl.go b/vendor/github.com/quasilyte/go-ruleguard/dsl/dsl.go
new file mode 100644
index 0000000000000000000000000000000000000000..d3c73bddd8604d914f65c23eb10a253f7ab1a785
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/dsl/dsl.go
@@ -0,0 +1,361 @@
+package dsl
+
+// Matcher is a main API group-level entry point.
+// It's used to define and configure the group rules.
+// It also represents a map of all rule-local variables.
+type Matcher map[string]Var
+
+// Import loads given package path into a rule group imports table.
+//
+// That table is used during the rules compilation.
+//
+// The table has the following effect on the rules:
+//	* For type expressions, it's used to resolve the
+//	  full package paths of qualified types, like `foo.Bar`.
+//	  If Import(`a/b/foo`) is called, `foo.Bar` will match
+//	  `a/b/foo.Bar` type during the pattern execution.
+func (m Matcher) Import(pkgPath string) {}
+
+// Match specifies a set of patterns that match a rule being defined.
+// Pattern matching succeeds if at least 1 pattern matches.
+//
+// If none of the given patterns matched, rule execution stops.
+func (m Matcher) Match(pattern string, alternatives ...string) Matcher {
+	return m
+}
+
+// MatchComment is like Match, but handles only comments and uses regexp patterns.
+//
+// Multi-line /**/ comments are passed as a single string.
+// Single-line // comments are passed line-by-line.
+//
+// Hint: if you want to match a plain text and don't want to do meta char escaping,
+// prepend `\Q` to your pattern. `\Qf(x)` will match `f(x)` as a plain text
+// and there is no need to escape the `(` and `)` chars.
+//
+// Named regexp capture groups can be accessed using the usual indexing notation.
+//
+// Given this pattern:
+//
+//     `(?P<first>\d+)\.(\d+).(?P<second>\d+)`
+//
+// And this input comment: `// 14.6.600`
+//
+// We'll get these submatches:
+//
+//     m["$$"] => `14.6.600`
+//     m["first"] => `14`
+//     m["second"] => `600`
+//
+// All usual filters can be applied:
+//
+//     Where(!m["first"].Text.Matches(`foo`))
+//
+// You can use this to reject some matches (allow-list behavior).
+func (m Matcher) MatchComment(pattern string, alternatives ...string) Matcher {
+	return m
+}
+
+// Where applies additional constraint to a match.
+// If a given cond is not satisfied, a match is rejected and
+// rule execution stops.
+func (m Matcher) Where(cond bool) Matcher {
+	return m
+}
+
+// Report prints a message if associated rule match is successful.
+//
+// A message is a string that can contain interpolated expressions.
+// For every matched variable it's possible to interpolate
+// their printed representation into the message text with $<name>.
+// An entire match can be addressed with $$.
+func (m Matcher) Report(message string) Matcher {
+	return m
+}
+
+// Suggest assigns a quickfix suggestion for the matched code.
+func (m Matcher) Suggest(suggestion string) Matcher {
+	return m
+}
+
+func (m Matcher) Do(fn func(*DoContext)) Matcher {
+	return m
+}
+
+// At binds the reported node to a named submatch.
+// If no explicit location is given, the outermost node ($$) is used.
+func (m Matcher) At(v Var) Matcher {
+	return m
+}
+
+// File returns the current file context.
+func (m Matcher) File() File { return File{} }
+
+// GoVersion returns the analyzer associated target Go language version.
+func (m Matcher) GoVersion() GoVersion { return GoVersion{} }
+
+// Deadcode reports whether this match is contained inside a dead code path.
+func (m Matcher) Deadcode() bool { return boolResult }
+
+// Var is a pattern variable that describes a named submatch.
+type Var struct {
+	// Pure reports whether expr matched by var is side-effect-free.
+	Pure bool
+
+	// Const reports whether expr matched by var is a constant value.
+	Const bool
+
+	// ConstSlice reports whether expr matched by var is a slice literal
+	// consisting of contant elements.
+	//
+	// We need a separate Const-like predicate here because Go doesn't
+	// treat slices of const elements as constants, so including
+	// them in Const would be incorrect.
+	// Use `m["x"].Const || m["x"].ConstSlice` when you need
+	// to have extended definition of "constant value".
+	//
+	// Some examples:
+	//     []byte("foo") -- constant byte slice
+	//     []byte{'f', 'o', 'o'} -- same constant byte slice
+	//     []int{1, 2} -- constant int slice
+	ConstSlice bool
+
+	// Value is a compile-time computable value of the expression.
+	Value ExprValue
+
+	// Addressable reports whether the corresponding expression is addressable.
+	// See https://golang.org/ref/spec#Address_operators.
+	Addressable bool
+
+	// Comparable reports whether the corresponding expression value is comparable.
+	// See https://pkg.go.dev/go/types#Comparable.
+	Comparable bool
+
+	// Type is a type of a matched expr.
+	//
+	// For function call expressions, a type is a function result type,
+	// but for a function expression itself it's a *types.Signature.
+	//
+	// Suppose we have a `a.b()` expression:
+	//	`$x()` m["x"].Type is `a.b` function type
+	//	`$x` m["x"].Type is `a.b()` function call result type
+	Type ExprType
+
+	SinkType SinkType
+
+	// Object is an associated "go/types" Object.
+	Object TypesObject
+
+	// Text is a captured node text as in the source code.
+	Text MatchedText
+
+	// Node is a captured AST node.
+	Node MatchedNode
+
+	// Line is a source code line number that contains this match.
+	// If this match is multi-line, this is the first line number.
+	Line int
+}
+
+// Filter applies a custom predicate function on a submatch.
+//
+// The callback function should use VarFilterContext to access the
+// information that is usually accessed through Var.
+// For example, `VarFilterContext.Type` is mapped to `Var.Type`.
+func (Var) Filter(pred func(*VarFilterContext) bool) bool { return boolResult }
+
+// Contains runs a sub-search from a given pattern using the captured
+// vars from the original pattern match.
+//
+// For example, given the Match(`$lhs = append($lhs, $x)`) pattern,
+// we can do m["lhs"].Contains(`$x`) and learn whether $lhs contains
+// $x as its sub-expression.
+//
+// Experimental: this function is not part of the stable API.
+func (Var) Contains(pattern string) bool { return boolResult }
+
+// MatchedNode represents an AST node associated with a named submatch.
+type MatchedNode struct{}
+
+// Is reports whether a matched node AST type is compatible with the specified type.
+// A valid argument is a ast.Node implementing type name from the "go/ast" package.
+// Examples: "BasicLit", "Expr", "Stmt", "Ident", "ParenExpr".
+// See https://golang.org/pkg/go/ast/.
+func (MatchedNode) Is(typ string) bool { return boolResult }
+
+// Parent returns a matched node parent.
+func (MatchedNode) Parent() Node { return Node{} }
+
+// Node represents an AST node somewhere inside a match.
+// Unlike MatchedNode, it doesn't have to be associated with a named submatch.
+type Node struct{}
+
+// Is reports whether a node AST type is compatible with the specified type.
+// See `MatchedNode.Is` for the full reference.
+func (Node) Is(typ string) bool { return boolResult }
+
+// ExprValue describes a compile-time computable value of a matched expr.
+type ExprValue struct{}
+
+// Int returns compile-time computable int value of the expression.
+// If value can't be computed, condition will fail.
+func (ExprValue) Int() int { return intResult }
+
+// TypesObject is a types.Object mapping.
+type TypesObject struct{}
+
+// Is reports whether an associated types.Object is compatible with the specified type.
+// A valid argument is a types.Object type name from the "go/types" package.
+// Examples: "Func", "Var", "Const", "TypeName", "Label", "PkgName", "Builtin", "Nil"
+// See https://golang.org/pkg/go/types/.
+func (TypesObject) Is(typ string) bool { return boolResult }
+
+// IsGlobal reports whether an associated types.Object is defined in global scope.
+func (TypesObject) IsGlobal() bool { return boolResult }
+
+// IsVariadicParam reports whether this object represents a function variadic param.
+// This property is not propagated between the assignments.
+func (TypesObject) IsVariadicParam() bool { return boolResult }
+
+type SinkType struct{}
+
+// Is reports whether a type is identical to a given type.
+// Works like ExprType.Is method.
+func (SinkType) Is(typ string) bool { return boolResult }
+
+// ExprType describes a type of a matcher expr.
+type ExprType struct {
+	// Size represents expression type size in bytes.
+	//
+	// For expressions of unknown size, like type params in generics,
+	// any filter using this operand will fail.
+	Size int
+}
+
+// IdenticalTo applies types.Identical(this, v.Type) operation.
+// See https://golang.org/pkg/go/types/#Identical function documentation.
+//
+// Experimental: this function is not part of the stable API.
+func (ExprType) IdenticalTo(v Var) bool { return boolResult }
+
+// Underlying returns expression type underlying type.
+// See https://golang.org/pkg/go/types/#Type Underlying() method documentation.
+// Read https://golang.org/ref/spec#Types section to learn more about underlying types.
+func (ExprType) Underlying() ExprType { return underlyingType }
+
+// AssignableTo reports whether a type is assign-compatible with a given type.
+// See https://golang.org/pkg/go/types/#AssignableTo.
+func (ExprType) AssignableTo(typ string) bool { return boolResult }
+
+// ConvertibleTo reports whether a type is conversible to a given type.
+// See https://golang.org/pkg/go/types/#ConvertibleTo.
+func (ExprType) ConvertibleTo(typ string) bool { return boolResult }
+
+// Implements reports whether a type implements a given interface.
+// See https://golang.org/pkg/go/types/#Implements.
+func (ExprType) Implements(typ typeName) bool { return boolResult }
+
+// HasMethod reports whether a type has a given method.
+// Unlike Implements(), it will work for both value and pointer types.
+//
+// fn argument is a function signature, like `WriteString(string) (int, error)`.
+// It can also be in form of a method reference for importable types: `io.StringWriter.WriteString`.
+//
+// To avoid confusion with Implements() method, here is a hint when to use which:
+//
+//	- To check if it's possible to call F on x, use HasMethod(F)
+//	- To check if x can be passed as I interface, use Implements(I)
+func (ExprType) HasMethod(fn string) bool { return boolResult }
+
+// Is reports whether a type is identical to a given type.
+func (ExprType) Is(typ string) bool { return boolResult }
+
+// HasPointers reports whether a type contains at least one pointer.
+//
+// We try to be as close to the Go sense of pointer-free objects as possible,
+// therefore string type is not considered to be a pointer-free type.
+//
+// This function may return "true" for some complicated cases as a
+// conservative result. It never returns "false" for a type that
+// actually contains a pointer.
+//
+// So this function is mostly useful for !HasPointers() form.
+func (ExprType) HasPointers() bool { return boolResult }
+
+// OfKind reports whether a matched expr type is compatible with the specified kind.
+//
+// Only a few "kinds" are recognized, the list is provided below.
+//
+//	"integer"  -- typ is *types.Basic, where typ.Info()&types.Integer != 0
+//	"unsigned" -- typ is *types.Basic, where typ.Info()&types.Unsigned != 0
+//	"float"    -- typ is *types.Basic, where typ.Info()&types.Float != 0
+//	"complex"  -- typ is *types.Basic, where typ.Info()&types.Complex != 0
+//	"untyped"  -- typ is *types.Basic, where typ.Info()&types.Untyped != 0
+//	"numeric"  -- typ is *types.Basic, where typ.Info()&types.Numeric != 0
+//  "signed"   -- identical to `OfKind("integer") && !OfKind("unsigned")`
+//  "int"      -- int, int8, int16, int32, int64
+//  "uint"     -- uint, uint8, uint16, uint32, uint64
+//
+// Note: "int" will include "rune" as well, as it's an alias.
+// In the same manner, "uint" includes the "byte" type.
+//
+// Using OfKind("unsigned") is more efficient (and concise) than using a set
+// of or-conditions with Is("uint8"), Is("uint16") and so on.
+func (ExprType) OfKind(kind string) bool { return boolResult }
+
+// MatchedText represents a source text associated with a matched node.
+type MatchedText string
+
+// Matches reports whether the text matches the given regexp pattern.
+func (MatchedText) Matches(pattern string) bool { return boolResult }
+
+// String represents an arbitrary string-typed data.
+type String string
+
+// Matches reports whether a string matches the given regexp pattern.
+func (String) Matches(pattern string) bool { return boolResult }
+
+// File represents the current Go source file.
+type File struct {
+	// Name is a file base name.
+	Name String
+
+	// PkgPath is a file package path.
+	// Examples: "io/ioutil", "strings", "github.com/quasilyte/go-ruleguard/dsl".
+	PkgPath String
+}
+
+// Imports reports whether the current file imports the given path.
+func (File) Imports(path string) bool { return boolResult }
+
+// GoVersion is an analysis target go language version.
+// It can be compared to Go versions like "1.10", "1.16" using
+// the associated methods.
+type GoVersion struct{}
+
+// Eq asserts that target Go version is equal to (==) specified version.
+func (GoVersion) Eq(version string) bool { return boolResult }
+
+// GreaterEqThan asserts that target Go version is greater or equal than (>=) specified version.
+func (GoVersion) GreaterEqThan(version string) bool { return boolResult }
+
+// GreaterThan asserts that target Go version is greater than (>) specified version.
+func (GoVersion) GreaterThan(version string) bool { return boolResult }
+
+// LessThan asserts that target Go version is less than (<) specified version.
+func (GoVersion) LessThan(version string) bool { return boolResult }
+
+// LessEqThan asserts that target Go version is less or equal than (<=) specified version.
+func (GoVersion) LessEqThan(version string) bool { return boolResult }
+
+// typeName is a helper type used to document function params better.
+//
+// A type name can be:
+//	- builtin type name: `error`, `string`, etc.
+//	- qualified name from a standard library: `io.Reader`, etc.
+//	- fully-qualified type name, like `github.com/username/pkgname.TypeName`
+//
+// typeName is also affected by a local import table, which can override
+// how qualified names are interpreted.
+// See `Matcher.Import` for more info.
+type typeName = string
diff --git a/vendor/github.com/quasilyte/go-ruleguard/dsl/filter.go b/vendor/github.com/quasilyte/go-ruleguard/dsl/filter.go
new file mode 100644
index 0000000000000000000000000000000000000000..cef880098d38f4337b5758cab7b3457fa8200d9c
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/dsl/filter.go
@@ -0,0 +1,24 @@
+package dsl
+
+import (
+	"github.com/quasilyte/go-ruleguard/dsl/types"
+)
+
+// VarFilterContext carries Var and environment information into the filter function.
+// It's an input parameter type for the Var.Filter function callback.
+type VarFilterContext struct {
+	// Type is mapped to Var.Type field.
+	Type types.Type
+}
+
+// SizeOf returns the size of the given type.
+// It uses the ruleguard.Context.Sizes to calculate the result.
+func (*VarFilterContext) SizeOf(x types.Type) int { return 0 }
+
+// GetType finds a type value by a given name.
+// If a type can't be found (or a name is malformed), this function panics.
+func (*VarFilterContext) GetType(name typeName) types.Type { return nil }
+
+// GetInterface finds a type value that represents an interface by a given name.
+// Works like `types.AsInterface(ctx.GetType(name))`.
+func (*VarFilterContext) GetInterface(name typeName) *types.Interface { return nil }
diff --git a/vendor/github.com/quasilyte/go-ruleguard/dsl/internal.go b/vendor/github.com/quasilyte/go-ruleguard/dsl/internal.go
new file mode 100644
index 0000000000000000000000000000000000000000..3bb6d85dc70c2d7314c385f05e52e76a88a01da3
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/dsl/internal.go
@@ -0,0 +1,6 @@
+package dsl
+
+var boolResult bool
+var intResult int
+
+var underlyingType ExprType
diff --git a/vendor/github.com/quasilyte/go-ruleguard/dsl/types/ext.go b/vendor/github.com/quasilyte/go-ruleguard/dsl/types/ext.go
new file mode 100644
index 0000000000000000000000000000000000000000..82595ff30ddcac1f42cc14362e94daabc6c44d3e
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/dsl/types/ext.go
@@ -0,0 +1,21 @@
+package types
+
+// AsArray is a type-assert like operation, x.(*Array), but never panics.
+// Returns nil if type is not an array.
+func AsArray(x Type) *Array { return nil }
+
+// AsSlice is a type-assert like operation, x.(*Slice), but never panics.
+// Returns nil if type is not an array.
+func AsSlice(x Type) *Slice { return nil }
+
+// AsPointer is a type-assert like operation, x.(*Pointer), but never panics.
+// Returns nil if type is not a pointer.
+func AsPointer(x Type) *Pointer { return nil }
+
+// AsStruct is a type-assert like operation, x.(*Struct), but never panics.
+// Returns nil if type is not a struct.
+func AsStruct(x Type) *Struct { return nil }
+
+// AsInterface is a type-assert like operation, x.(*Interface), but never panics.
+// Returns nil if type is not an interface.
+func AsInterface(x Type) *Interface { return nil }
diff --git a/vendor/github.com/quasilyte/go-ruleguard/dsl/types/type_impl.go b/vendor/github.com/quasilyte/go-ruleguard/dsl/types/type_impl.go
new file mode 100644
index 0000000000000000000000000000000000000000..9fc71d75318f3ab8c63f4ec4e447e0ad64ad2a4c
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/dsl/types/type_impl.go
@@ -0,0 +1,17 @@
+package types
+
+// Method stubs to make various types implement Type interface.
+//
+// Nothing interesting here, hence it's moved to a separate file.
+
+func (*Array) String() string     { return "" }
+func (*Slice) String() string     { return "" }
+func (*Pointer) String() string   { return "" }
+func (*Interface) String() string { return "" }
+func (*Struct) String() string    { return "" }
+
+func (*Array) Underlying() Type     { return nil }
+func (*Slice) Underlying() Type     { return nil }
+func (*Pointer) Underlying() Type   { return nil }
+func (*Interface) Underlying() Type { return nil }
+func (*Struct) Underlying() Type    { return nil }
diff --git a/vendor/github.com/quasilyte/go-ruleguard/dsl/types/types.go b/vendor/github.com/quasilyte/go-ruleguard/dsl/types/types.go
new file mode 100644
index 0000000000000000000000000000000000000000..b6be7cc352e7f8d70835ce881ad74f6f2933b0f7
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/dsl/types/types.go
@@ -0,0 +1,68 @@
+// Package types mimics the https://golang.org/pkg/go/types/ package.
+// It also contains some extra utility functions, they're defined in ext.go file.
+package types
+
+// Implements reports whether a given type implements the specified interface.
+func Implements(typ Type, iface *Interface) bool { return false }
+
+// Identical reports whether x and y are identical types. Receivers of Signature types are ignored.
+func Identical(x, y Type) bool { return false }
+
+// A Type represents a type of Go. All types implement the Type interface.
+type Type interface {
+	// Underlying returns the underlying type of a type.
+	Underlying() Type
+
+	// String returns a string representation of a type.
+	String() string
+}
+
+type (
+	// An Array represents an array type.
+	Array struct{}
+
+	// A Slice represents a slice type.
+	Slice struct{}
+
+	// A Pointer represents a pointer type.
+	Pointer struct{}
+
+	// An Interface represents an interface type.
+	Interface struct{}
+
+	// A struct represents a struct type.
+	Struct struct{}
+)
+
+// NewArray returns a new array type for the given element type and length.
+// A negative length indicates an unknown length.
+func NewArray(elem Type, len int) *Array { return nil }
+
+// Elem returns element type of array.
+func (*Array) Elem() Type { return nil }
+
+// NewSlice returns a new slice type for the given element type.
+func NewSlice(elem Type) *Slice { return nil }
+
+// Elem returns element type of slice.
+func (*Slice) Elem() Type { return nil }
+
+// Len returns the length of array.
+// A negative result indicates an unknown length.
+func (*Array) Len() int { return 0 }
+
+// NewPointer returns a new pointer type for the given element (base) type.
+func NewPointer(elem Type) *Pointer { return nil }
+
+// Elem returns the element type for the given pointer.
+func (*Pointer) Elem() Type { return nil }
+
+func (*Struct) NumFields() int { return 0 }
+
+func (*Struct) Field(i int) *Var { return nil }
+
+type Var struct{}
+
+func (*Var) Embedded() bool { return false }
+
+func (*Var) Type() Type { return nil }
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir_loader.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir_loader.go
index dcb2fd2afa02284f29c5cfbc4a7542b76af78cc0..90dea56acdc5b25d81ffdfe0a99a4f9f77180d19 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir_loader.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir_loader.go
@@ -195,7 +195,7 @@ func (l *irLoader) compileFilterFuncs(filename string, irfile *ir.File) error {
 	if err != nil {
 		// If this ever happens, user will get unexpected error
 		// lines for it; but we should trust that 99.9% errors
-		// should be catched at irconv phase so we get a valid Go
+		// should be caught at irconv phase so we get a valid Go
 		// source here as well?
 		return fmt.Errorf("parse custom decls: %w", err)
 	}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/textmatch/textmatch.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/textmatch/textmatch.go
index a3787e2c1690ebc7206990095b21368aefb7d92c..135f95740e55090515b0a9e9f95b2c3027175d97 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/textmatch/textmatch.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/textmatch/textmatch.go
@@ -9,7 +9,7 @@ type Pattern interface {
 }
 
 // Compile parses a regular expression and returns a compiled
-// pattern that can match inputs descriped by the regexp.
+// pattern that can match inputs described by the regexp.
 //
 // Semantically it's close to the regexp.Compile, but
 // it does recognize some common patterns and creates
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/utils.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/utils.go
index d3226db22bd36e231d691b5f6ea05743dae5f967..6403d91cdce17ee80bb22a64d97eb98fd442de81 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/utils.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/utils.go
@@ -273,7 +273,7 @@ func isTypeExpr(info *types.Info, x ast.Expr) bool {
 
 	case *ast.Ident:
 		// Identifier may be a type expression if object
-		// it reffers to is a type name.
+		// it refers to is a type name.
 		_, ok := info.ObjectOf(x).(*types.TypeName)
 		return ok
 
diff --git a/vendor/github.com/ryancurrah/gomodguard/.golangci.yml b/vendor/github.com/ryancurrah/gomodguard/.golangci.yml
index a0e6fd55ea8039e7bf7522f1932064db1e550690..e39b1445eec859d96b3049753594bb36ccded16b 100644
--- a/vendor/github.com/ryancurrah/gomodguard/.golangci.yml
+++ b/vendor/github.com/ryancurrah/gomodguard/.golangci.yml
@@ -1,111 +1,32 @@
 # See https://golangci-lint.run/usage/configuration/
-
-linters-settings:
-  revive:
-    # see https://github.com/mgechev/revive#available-rules for details.
-    ignore-generated-header: true
-    severity: warning
-    rules:
-      - name: atomic
-      - name: blank-imports
-      - name: bool-literal-in-expr
-      - name: call-to-gc
-      - name: confusing-naming
-      - name: confusing-results
-      - name: constant-logical-expr
-      - name: context-as-argument
-      - name: context-keys-type
-      - name: deep-exit
-      - name: defer
-      - name: dot-imports
-      - name: duplicated-imports
-      - name: early-return
-      - name: empty-block
-      - name: empty-lines
-      - name: error-naming
-      - name: error-return
-      - name: error-strings
-      - name: errorf
-      - name: exported
-      - name: get-return
-      - name: identical-branches
-      - name: if-return
-      - name: import-shadowing
-      - name: increment-decrement
-      - name: indent-error-flow
-      - name: modifies-parameter
-      - name: modifies-value-receiver
-      - name: package-comments
-      - name: range
-      - name: range-val-address
-      - name: range-val-in-closure
-      - name: receiver-naming
-      - name: redefines-builtin-id
-      - name: string-of-int
-      - name: struct-tag
-      - name: superfluous-else
-      - name: time-naming
-      - name: unconditional-recursion
-      - name: unexported-naming
-      - name: unexported-return
-      - name: unnecessary-stmt
-      - name: unreachable-code
-      - name: unused-parameter
-      - name: var-declaration
-      - name: var-naming
-      - name: waitgroup-by-value
-
 linters:
-  disable-all: true
-  enable:
-    - asciicheck
-    - bodyclose
-    - dogsled
-    - dupl
-    - durationcheck
-    - errcheck
-    - errorlint
-    - exhaustive
-    - exportloopref
-    - forcetypeassert
-    - funlen
-    - gochecknoinits
-    - gocognit
-    - goconst
-    - gocritic
-    - gocyclo
-    - godot
-    - godox
-    - goimports
-    - gomoddirectives
-    - gomodguard
-    - goprintffuncname
-    - gosec
-    - gosimple
-    - govet
-    - importas
-    - ineffassign
+  enable-all: true
+  disable:
     - lll
-    - makezero
-    - misspell
-    - nakedret
-    - nestif
-    - nilerr
-    - noctx
-    - nolintlint
-    - prealloc
-    - predeclared
-    - revive
-    - rowserrcheck
-    - sqlclosecheck
-    - staticcheck
-    - stylecheck
-    - testpackage
-    - thelper
-    - tparallel
-    - typecheck
-    - unconvert
-    - unparam
-    - unused
-    - whitespace
-    - wsl
+    - golint
+    - deadcode
+    - maligned
+    - scopelint
+    - nosnakecase
+    - exhaustivestruct
+    - ifshort
+    - varcheck
+    - structcheck
+    - interfacer
+    - gomodguard
+    - gochecknoglobals
+    - paralleltest
+    - varnamelen
+    - exhaustruct
+    - gomnd
+    - depguard
+    - forbidigo
+    - funlen
+    - nlreturn
+    - gofumpt
+    - nonamedreturns
+    - cyclop
+    - goerr113
+    - perfsprint
+    - tagliatelle
+    - wrapcheck
diff --git a/vendor/github.com/ryancurrah/gomodguard/Makefile b/vendor/github.com/ryancurrah/gomodguard/Makefile
index 5235d5aade30a57e6c2e91fd0ceff89eb7fec4d4..a44b707a66d01bffd618dce1e424e2a1088cc9a6 100644
--- a/vendor/github.com/ryancurrah/gomodguard/Makefile
+++ b/vendor/github.com/ryancurrah/gomodguard/Makefile
@@ -6,7 +6,7 @@ lint:
 
 .PHONY: build
 build:
-	go build -o gomodguard cmd/gomodguard/main.go
+	go build -o "$$(go env GOPATH)/bin/gomodguard" cmd/gomodguard/main.go
 
 .PHONY: run
 run: build
diff --git a/vendor/github.com/ryancurrah/gomodguard/README.md b/vendor/github.com/ryancurrah/gomodguard/README.md
index 4945f010121087f552b9b34baf8d5d9a0cc80df7..68dc8604468ec7f94dc168449ed2dc39e45e6387 100644
--- a/vendor/github.com/ryancurrah/gomodguard/README.md
+++ b/vendor/github.com/ryancurrah/gomodguard/README.md
@@ -1,7 +1,7 @@
 # gomodguard
 [![License](https://img.shields.io/github/license/ryancurrah/gomodguard?style=flat-square)](/LICENSE)
 [![Codecov](https://img.shields.io/codecov/c/gh/ryancurrah/gomodguard?style=flat-square)](https://codecov.io/gh/ryancurrah/gomodguard)
-[![GitHub Workflow Status](https://img.shields.io/github/workflow/status/ryancurrah/gomodguard/Go?logo=Go&style=flat-square)](https://github.com/ryancurrah/gomodguard/actions?query=workflow%3AGo)
+[![GitHub Workflow Status](https://img.shields.io/github/actions/workflow/status/ryancurrah/gomodguard/go.yml?branch=main&logo=Go&style=flat-square)](https://github.com/ryancurrah/gomodguard/actions?query=workflow%3AGo)
 [![GitHub release (latest SemVer)](https://img.shields.io/github/v/release/ryancurrah/gomodguard?style=flat-square)](https://github.com/ryancurrah/gomodguard/releases/latest)
 [![Docker](https://img.shields.io/docker/pulls/ryancurrah/gomodguard?style=flat-square)](https://hub.docker.com/r/ryancurrah/gomodguard)
 [![Github Releases Stats of golangci-lint](https://img.shields.io/github/downloads/ryancurrah/gomodguard/total.svg?logo=github&style=flat-square)](https://somsubhra.com/github-release-stats/?username=ryancurrah&repository=gomodguard)
diff --git a/vendor/github.com/ryancurrah/gomodguard/blocked.go b/vendor/github.com/ryancurrah/gomodguard/blocked.go
index 2a6e5c215963bf93bc5e18a0db201eaa116146ee..df24b85ac72daa1a1865520eaf900658429999ec 100644
--- a/vendor/github.com/ryancurrah/gomodguard/blocked.go
+++ b/vendor/github.com/ryancurrah/gomodguard/blocked.go
@@ -4,7 +4,7 @@ import (
 	"fmt"
 	"strings"
 
-	"github.com/Masterminds/semver"
+	"github.com/Masterminds/semver/v3"
 )
 
 // Blocked is a list of modules that are
@@ -15,7 +15,7 @@ type Blocked struct {
 	LocalReplaceDirectives bool            `yaml:"local_replace_directives"`
 }
 
-// BlockedVersion has a version constraint a reason why the the module version is blocked.
+// BlockedVersion has a version constraint a reason why the module version is blocked.
 type BlockedVersion struct {
 	Version string `yaml:"version"`
 	Reason  string `yaml:"reason"`
@@ -23,24 +23,22 @@ type BlockedVersion struct {
 
 // IsLintedModuleVersionBlocked returns true if a version constraint is specified and the
 // linted module version matches the constraint.
-func (r *BlockedVersion) IsLintedModuleVersionBlocked(lintedModuleVersion string) bool {
+func (r *BlockedVersion) IsLintedModuleVersionBlocked(lintedModuleVersion string) (bool, error) {
 	if r.Version == "" {
-		return false
+		return false, nil
 	}
 
 	constraint, err := semver.NewConstraint(r.Version)
 	if err != nil {
-		return false
+		return true, err
 	}
 
 	version, err := semver.NewVersion(lintedModuleVersion)
 	if err != nil {
-		return false
+		return true, err
 	}
 
-	meet := constraint.Check(version)
-
-	return meet
+	return constraint.Check(version), nil
 }
 
 // Message returns the reason why the module version is blocked.
diff --git a/vendor/github.com/ryancurrah/gomodguard/processor.go b/vendor/github.com/ryancurrah/gomodguard/processor.go
index 8457e3b0705b2ad3620e37c28cb7c3bc087dff3f..970dbc45748fe9e625bec59a8faf197692f5be3d 100644
--- a/vendor/github.com/ryancurrah/gomodguard/processor.go
+++ b/vendor/github.com/ryancurrah/gomodguard/processor.go
@@ -22,12 +22,10 @@ const (
 )
 
 var (
-	blockReasonNotInAllowedList = "import of package `%s` is blocked because the module is not in the " +
-		"allowed modules list."
-	blockReasonInBlockedList = "import of package `%s` is blocked because the module is in the " +
-		"blocked modules list."
-	blockReasonHasLocalReplaceDirective = "import of package `%s` is blocked because the module has a " +
-		"local replace directive."
+	blockReasonNotInAllowedList         = "import of package `%s` is blocked because the module is not in the allowed modules list."
+	blockReasonInBlockedList            = "import of package `%s` is blocked because the module is in the blocked modules list."
+	blockReasonHasLocalReplaceDirective = "import of package `%s` is blocked because the module has a local replace directive."
+	blockReasonInvalidVersionConstraint = "import of package `%s` is blocked because the version constraint is invalid."
 
 	// startsWithVersion is used to test when a string begins with the version identifier of a module,
 	// after having stripped the prefix base module name. IE "github.com/foo/bar/v2/baz" => "v2/baz"
@@ -141,7 +139,7 @@ func (p *Processor) addError(fileset *token.FileSet, pos token.Pos, reason strin
 //
 // It works by iterating over the dependant modules specified in the require
 // directive, checking if the module domain or full name is in the allowed list.
-func (p *Processor) SetBlockedModules() { //nolint:gocognit,funlen
+func (p *Processor) SetBlockedModules() { //nolint:funlen
 	blockedModules := make(map[string][]string, len(p.Modfile.Require))
 	currentModuleName := p.Modfile.Module.Mod.Path
 	lintedModules := p.Modfile.Require
@@ -181,9 +179,21 @@ func (p *Processor) SetBlockedModules() { //nolint:gocognit,funlen
 				fmt.Sprintf("%s %s", blockReasonInBlockedList, blockModuleReason.Message()))
 		}
 
-		if blockVersionReason != nil && blockVersionReason.IsLintedModuleVersionBlocked(lintedModuleVersion) {
-			blockedModules[lintedModuleName] = append(blockedModules[lintedModuleName],
-				fmt.Sprintf("%s %s", blockReasonInBlockedList, blockVersionReason.Message(lintedModuleVersion)))
+		if blockVersionReason != nil {
+			isVersBlocked, err := blockVersionReason.IsLintedModuleVersionBlocked(lintedModuleVersion)
+
+			var msg string
+
+			switch err {
+			case nil:
+				msg = fmt.Sprintf("%s %s", blockReasonInBlockedList, blockVersionReason.Message(lintedModuleVersion))
+			default:
+				msg = fmt.Sprintf("%s %s", blockReasonInvalidVersionConstraint, err)
+			}
+
+			if isVersBlocked {
+				blockedModules[lintedModuleName] = append(blockedModules[lintedModuleName], msg)
+			}
 		}
 	}
 
@@ -223,6 +233,11 @@ func (p *Processor) isBlockedPackageFromModFile(packageName string) []string {
 	return nil
 }
 
+// loadGoModFile loads the contents of the go.mod file in the current working directory.
+// It first checks the "GOMOD" environment variable to determine the path of the go.mod file.
+// If the environment variable is not set or the file does not exist, it falls back to reading the go.mod file in the current directory.
+// If the "GOMOD" environment variable is set to "/dev/null", it returns an error indicating that the current working directory must have a go.mod file.
+// The function returns the contents of the go.mod file as a byte slice and any error encountered during the process.
 func loadGoModFile() ([]byte, error) {
 	cmd := exec.Command("go", "env", "-json")
 	stdout, _ := cmd.StdoutPipe()
@@ -257,7 +272,7 @@ func loadGoModFile() ([]byte, error) {
 	return os.ReadFile(goEnv["GOMOD"])
 }
 
-// isPackageInModule determines if a package is apart of the specified go module.
+// isPackageInModule determines if a package is a part of the specified Go module.
 func isPackageInModule(pkg, mod string) bool {
 	// Split pkg and mod paths into parts
 	pkgPart := strings.Split(pkg, "/")
diff --git a/vendor/github.com/ryanrolds/sqlclosecheck/pkg/analyzer/analyzer.go b/vendor/github.com/ryanrolds/sqlclosecheck/pkg/analyzer/analyzer.go
index c22817cafcf81e5d6e54debe9c41384a8dded4b1..55e931a898ceddb9f7c73d0605aa7a561f777f06 100644
--- a/vendor/github.com/ryanrolds/sqlclosecheck/pkg/analyzer/analyzer.go
+++ b/vendor/github.com/ryanrolds/sqlclosecheck/pkg/analyzer/analyzer.go
@@ -9,9 +9,10 @@ import (
 )
 
 const (
-	rowsName    = "Rows"
-	stmtName    = "Stmt"
-	closeMethod = "Close"
+	rowsName      = "Rows"
+	stmtName      = "Stmt"
+	namedStmtName = "NamedStmt"
+	closeMethod   = "Close"
 )
 
 type action uint8
@@ -31,13 +32,15 @@ var (
 	sqlPackages = []string{
 		"database/sql",
 		"github.com/jmoiron/sqlx",
+		"github.com/jackc/pgx/v5",
+		"github.com/jackc/pgx/v5/pgxpool",
 	}
 )
 
 func NewAnalyzer() *analysis.Analyzer {
 	return &analysis.Analyzer{
 		Name: "sqlclosecheck",
-		Doc:  "Checks that sql.Rows and sql.Stmt are closed.",
+		Doc:  "Checks that sql.Rows, sql.Stmt, sqlx.NamedStmt, pgx.Query are closed.",
 		Run:  run,
 		Requires: []*analysis.Analyzer{
 			buildssa.Analyzer,
@@ -63,20 +66,18 @@ func run(pass *analysis.Pass) (interface{}, error) {
 	for _, f := range funcs {
 		for _, b := range f.Blocks {
 			for i := range b.Instrs {
-				// Check if instruction is call that returns a target type
+				// Check if instruction is call that returns a target pointer type
 				targetValues := getTargetTypesValues(b, i, targetTypes)
 				if len(targetValues) == 0 {
 					continue
 				}
 
-				// log.Printf("%s", f.Name())
-
 				// For each found target check if they are closed and deferred
 				for _, targetValue := range targetValues {
 					refs := (*targetValue.value).Referrers()
 					isClosed := checkClosed(refs, targetTypes)
 					if !isClosed {
-						pass.Reportf((targetValue.instr).Pos(), "Rows/Stmt was not closed")
+						pass.Reportf((targetValue.instr).Pos(), "Rows/Stmt/NamedStmt was not closed")
 					}
 
 					checkDeferred(pass, refs, targetTypes, false)
@@ -88,17 +89,22 @@ func run(pass *analysis.Pass) (interface{}, error) {
 	return nil, nil
 }
 
-func getTargetTypes(pssa *buildssa.SSA, targetPackages []string) []*types.Pointer {
-	targets := []*types.Pointer{}
+func getTargetTypes(pssa *buildssa.SSA, targetPackages []string) []any {
+	targets := []any{}
 
 	for _, sqlPkg := range targetPackages {
 		pkg := pssa.Pkg.Prog.ImportedPackage(sqlPkg)
 		if pkg == nil {
 			// the SQL package being checked isn't imported
-			return targets
+			continue
+		}
+
+		rowsPtrType := getTypePointerFromName(pkg, rowsName)
+		if rowsPtrType != nil {
+			targets = append(targets, rowsPtrType)
 		}
 
-		rowsType := getTypePointerFromName(pkg, rowsName)
+		rowsType := getTypeFromName(pkg, rowsName)
 		if rowsType != nil {
 			targets = append(targets, rowsType)
 		}
@@ -107,6 +113,11 @@ func getTargetTypes(pssa *buildssa.SSA, targetPackages []string) []*types.Pointe
 		if stmtType != nil {
 			targets = append(targets, stmtType)
 		}
+
+		namedStmtType := getTypePointerFromName(pkg, namedStmtName)
+		if namedStmtType != nil {
+			targets = append(targets, namedStmtType)
+		}
 	}
 
 	return targets
@@ -115,7 +126,7 @@ func getTargetTypes(pssa *buildssa.SSA, targetPackages []string) []*types.Pointe
 func getTypePointerFromName(pkg *ssa.Package, name string) *types.Pointer {
 	pkgType := pkg.Type(name)
 	if pkgType == nil {
-		// this package does not use Rows/Stmt
+		// this package does not use Rows/Stmt/NamedStmt
 		return nil
 	}
 
@@ -128,12 +139,28 @@ func getTypePointerFromName(pkg *ssa.Package, name string) *types.Pointer {
 	return types.NewPointer(named)
 }
 
+func getTypeFromName(pkg *ssa.Package, name string) *types.Named {
+	pkgType := pkg.Type(name)
+	if pkgType == nil {
+		// this package does not use Rows/Stmt
+		return nil
+	}
+
+	obj := pkgType.Object()
+	named, ok := obj.Type().(*types.Named)
+	if !ok {
+		return nil
+	}
+
+	return named
+}
+
 type targetValue struct {
 	value *ssa.Value
 	instr ssa.Instruction
 }
 
-func getTargetTypesValues(b *ssa.BasicBlock, i int, targetTypes []*types.Pointer) []targetValue {
+func getTargetTypesValues(b *ssa.BasicBlock, i int, targetTypes []any) []targetValue {
 	targetValues := []targetValue{}
 
 	instr := b.Instrs[i]
@@ -149,21 +176,32 @@ func getTargetTypesValues(b *ssa.BasicBlock, i int, targetTypes []*types.Pointer
 		varType := v.Type()
 
 		for _, targetType := range targetTypes {
-			if !types.Identical(varType, targetType) {
+			var tt types.Type
+
+			switch t := targetType.(type) {
+			case *types.Pointer:
+				tt = t
+			case *types.Named:
+				tt = t
+			default:
+				continue
+			}
+
+			if !types.Identical(varType, tt) {
 				continue
 			}
 
 			for _, cRef := range *call.Referrers() {
 				switch instr := cRef.(type) {
 				case *ssa.Call:
-					if len(instr.Call.Args) >= 1 && types.Identical(instr.Call.Args[0].Type(), targetType) {
+					if len(instr.Call.Args) >= 1 && types.Identical(instr.Call.Args[0].Type(), tt) {
 						targetValues = append(targetValues, targetValue{
 							value: &instr.Call.Args[0],
 							instr: call,
 						})
 					}
 				case ssa.Value:
-					if types.Identical(instr.Type(), targetType) {
+					if types.Identical(instr.Type(), tt) {
 						targetValues = append(targetValues, targetValue{
 							value: &instr,
 							instr: call,
@@ -177,43 +215,42 @@ func getTargetTypesValues(b *ssa.BasicBlock, i int, targetTypes []*types.Pointer
 	return targetValues
 }
 
-func checkClosed(refs *[]ssa.Instruction, targetTypes []*types.Pointer) bool {
+func checkClosed(refs *[]ssa.Instruction, targetTypes []any) bool {
 	numInstrs := len(*refs)
 	for idx, ref := range *refs {
-		// log.Printf("%T - %s", ref, ref)
-
 		action := getAction(ref, targetTypes)
 		switch action {
-		case actionClosed:
+		case actionClosed, actionReturned, actionHandled:
 			return true
 		case actionPassed:
 			// Passed and not used after
 			if numInstrs == idx+1 {
 				return true
 			}
-		case actionReturned:
-			return true
-		case actionHandled:
-			return true
-		default:
-			// log.Printf(action)
 		}
 	}
 
 	return false
 }
 
-func getAction(instr ssa.Instruction, targetTypes []*types.Pointer) action {
+func getAction(instr ssa.Instruction, targetTypes []any) action {
 	switch instr := instr.(type) {
 	case *ssa.Defer:
-		if instr.Call.Value == nil {
-			return actionUnvaluedDefer
+		if instr.Call.Value != nil {
+			name := instr.Call.Value.Name()
+			if name == closeMethod {
+				return actionClosed
+			}
 		}
 
-		name := instr.Call.Value.Name()
-		if name == closeMethod {
-			return actionClosed
+		if instr.Call.Method != nil {
+			name := instr.Call.Method.Name()
+			if name == closeMethod {
+				return actionClosed
+			}
 		}
+
+		return actionUnvaluedDefer
 	case *ssa.Call:
 		if instr.Call.Value == nil {
 			return actionUnvaluedCall
@@ -265,7 +302,18 @@ func getAction(instr ssa.Instruction, targetTypes []*types.Pointer) action {
 	case *ssa.UnOp:
 		instrType := instr.Type()
 		for _, targetType := range targetTypes {
-			if types.Identical(instrType, targetType) {
+			var tt types.Type
+
+			switch t := targetType.(type) {
+			case *types.Pointer:
+				tt = t
+			case *types.Named:
+				tt = t
+			default:
+				continue
+			}
+
+			if types.Identical(instrType, tt) {
 				if checkClosed(instr.Referrers(), targetTypes) {
 					return actionHandled
 				}
@@ -277,20 +325,22 @@ func getAction(instr ssa.Instruction, targetTypes []*types.Pointer) action {
 		}
 	case *ssa.Return:
 		return actionReturned
-	default:
-		// log.Printf("%s", instr)
 	}
 
 	return actionUnhandled
 }
 
-func checkDeferred(pass *analysis.Pass, instrs *[]ssa.Instruction, targetTypes []*types.Pointer, inDefer bool) {
+func checkDeferred(pass *analysis.Pass, instrs *[]ssa.Instruction, targetTypes []any, inDefer bool) {
 	for _, instr := range *instrs {
 		switch instr := instr.(type) {
 		case *ssa.Defer:
 			if instr.Call.Value != nil && instr.Call.Value.Name() == closeMethod {
 				return
 			}
+
+			if instr.Call.Method != nil && instr.Call.Method.Name() == closeMethod {
+				return
+			}
 		case *ssa.Call:
 			if instr.Call.Value != nil && instr.Call.Value.Name() == closeMethod {
 				if !inDefer {
@@ -316,7 +366,18 @@ func checkDeferred(pass *analysis.Pass, instrs *[]ssa.Instruction, targetTypes [
 		case *ssa.UnOp:
 			instrType := instr.Type()
 			for _, targetType := range targetTypes {
-				if types.Identical(instrType, targetType) {
+				var tt types.Type
+
+				switch t := targetType.(type) {
+				case *types.Pointer:
+					tt = t
+				case *types.Named:
+					tt = t
+				default:
+					continue
+				}
+
+				if types.Identical(instrType, tt) {
 					checkDeferred(pass, instr.Referrers(), targetTypes, inDefer)
 				}
 			}
@@ -326,10 +387,17 @@ func checkDeferred(pass *analysis.Pass, instrs *[]ssa.Instruction, targetTypes [
 	}
 }
 
-func isTargetType(t types.Type, targetTypes []*types.Pointer) bool {
+func isTargetType(t types.Type, targetTypes []any) bool {
 	for _, targetType := range targetTypes {
-		if types.Identical(t, targetType) {
-			return true
+		switch tt := targetType.(type) {
+		case *types.Pointer:
+			if types.Identical(t, tt) {
+				return true
+			}
+		case *types.Named:
+			if types.Identical(t, tt) {
+				return true
+			}
 		}
 	}
 
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/.gitignore b/vendor/github.com/santhosh-tekuri/jsonschema/v5/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..3c0af382598770feafd01fb3c10839b079b6b92a
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/.gitignore
@@ -0,0 +1,4 @@
+.vscode
+.idea
+*.swp
+cmd/jv/jv
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/.gitmodules b/vendor/github.com/santhosh-tekuri/jsonschema/v5/.gitmodules
new file mode 100644
index 0000000000000000000000000000000000000000..314da31c5ebfe15fb8f8478edfc692ac704579a5
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/.gitmodules
@@ -0,0 +1,3 @@
+[submodule "testdata/JSON-Schema-Test-Suite"]
+	path = testdata/JSON-Schema-Test-Suite
+	url = https://github.com/json-schema-org/JSON-Schema-Test-Suite.git
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/LICENSE b/vendor/github.com/santhosh-tekuri/jsonschema/v5/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..19dc35b2433851a0e8fd866a5d323b2ba18c12ed
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/LICENSE
@@ -0,0 +1,175 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
\ No newline at end of file
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/README.md b/vendor/github.com/santhosh-tekuri/jsonschema/v5/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..b0d05054ca39392cc0f842a1cacbd46391bcb429
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/README.md
@@ -0,0 +1,220 @@
+# jsonschema v5.3.1
+
+[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0)
+[![GoDoc](https://godoc.org/github.com/santhosh-tekuri/jsonschema?status.svg)](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5)
+[![Go Report Card](https://goreportcard.com/badge/github.com/santhosh-tekuri/jsonschema/v5)](https://goreportcard.com/report/github.com/santhosh-tekuri/jsonschema/v5)
+[![Build Status](https://github.com/santhosh-tekuri/jsonschema/actions/workflows/go.yaml/badge.svg?branch=master)](https://github.com/santhosh-tekuri/jsonschema/actions/workflows/go.yaml)
+[![codecov](https://codecov.io/gh/santhosh-tekuri/jsonschema/branch/master/graph/badge.svg?token=JMVj1pFT2l)](https://codecov.io/gh/santhosh-tekuri/jsonschema)
+
+Package jsonschema provides json-schema compilation and validation.
+
+[Benchmarks](https://dev.to/vearutop/benchmarking-correctness-and-performance-of-go-json-schema-validators-3247)
+
+### Features:
+ - implements
+   [draft 2020-12](https://json-schema.org/specification-links.html#2020-12),
+   [draft 2019-09](https://json-schema.org/specification-links.html#draft-2019-09-formerly-known-as-draft-8),
+   [draft-7](https://json-schema.org/specification-links.html#draft-7),
+   [draft-6](https://json-schema.org/specification-links.html#draft-6),
+   [draft-4](https://json-schema.org/specification-links.html#draft-4)
+ - fully compliant with [JSON-Schema-Test-Suite](https://github.com/json-schema-org/JSON-Schema-Test-Suite), (excluding some optional)
+   - list of optional tests that are excluded can be found in schema_test.go(variable [skipTests](https://github.com/santhosh-tekuri/jsonschema/blob/master/schema_test.go#L24))
+ - validates schemas against meta-schema
+ - full support of remote references
+ - support of recursive references between schemas
+ - detects infinite loop in schemas
+ - thread safe validation
+ - rich, intuitive hierarchial error messages with json-pointers to exact location
+ - supports output formats flag, basic and detailed
+ - supports enabling format and content Assertions in draft2019-09 or above
+   - change `Compiler.AssertFormat`, `Compiler.AssertContent` to `true`
+ - compiled schema can be introspected. easier to develop tools like generating go structs given schema
+ - supports user-defined keywords via [extensions](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-Extension)
+ - implements following formats (supports [user-defined](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-UserDefinedFormat))
+   - date-time, date, time, duration, period (supports leap-second)
+   - uuid, hostname, email
+   - ip-address, ipv4, ipv6
+   - uri, uriref, uri-template(limited validation)
+   - json-pointer, relative-json-pointer
+   - regex, format
+ - implements following contentEncoding (supports [user-defined](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-UserDefinedContent))
+   - base64
+ - implements following contentMediaType (supports [user-defined](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-UserDefinedContent))
+   - application/json
+ - can load from files/http/https/[string](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-FromString)/[]byte/io.Reader (supports [user-defined](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-UserDefinedLoader))
+
+
+see examples in [godoc](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5)
+
+The schema is compiled against the version specified in `$schema` property.
+If "$schema" property is missing, it uses latest draft which currently implemented
+by this library.
+
+You can force to use specific version, when `$schema` is missing, as follows:
+
+```go
+compiler := jsonschema.NewCompiler()
+compiler.Draft = jsonschema.Draft4
+```
+
+This package supports loading json-schema from filePath and fileURL.
+
+To load json-schema from HTTPURL, add following import:
+
+```go
+import _ "github.com/santhosh-tekuri/jsonschema/v5/httploader"
+```
+
+## Rich Errors
+
+The ValidationError returned by Validate method contains detailed context to understand why and where the error is.
+
+schema.json:
+```json
+{
+      "$ref": "t.json#/definitions/employee"
+}
+```
+
+t.json:
+```json
+{
+    "definitions": {
+        "employee": {
+            "type": "string"
+        }
+    }
+}
+```
+
+doc.json:
+```json
+1
+```
+
+assuming `err` is the ValidationError returned when `doc.json` validated with `schema.json`,
+```go
+fmt.Printf("%#v\n", err) // using %#v prints errors hierarchy
+```
+Prints:
+```
+[I#] [S#] doesn't validate with file:///Users/santhosh/jsonschema/schema.json#
+  [I#] [S#/$ref] doesn't validate with 'file:///Users/santhosh/jsonschema/t.json#/definitions/employee'
+    [I#] [S#/definitions/employee/type] expected string, but got number
+```
+
+Here `I` stands for instance document and `S` stands for schema document.  
+The json-fragments that caused error in instance and schema documents are represented using json-pointer notation.  
+Nested causes are printed with indent.
+
+To output `err` in `flag` output format:
+```go
+b, _ := json.MarshalIndent(err.FlagOutput(), "", "  ")
+fmt.Println(string(b))
+```
+Prints:
+```json
+{
+  "valid": false
+}
+```
+To output `err` in `basic` output format:
+```go
+b, _ := json.MarshalIndent(err.BasicOutput(), "", "  ")
+fmt.Println(string(b))
+```
+Prints:
+```json
+{
+  "valid": false,
+  "errors": [
+    {
+      "keywordLocation": "",
+      "absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/schema.json#",
+      "instanceLocation": "",
+      "error": "doesn't validate with file:///Users/santhosh/jsonschema/schema.json#"
+    },
+    {
+      "keywordLocation": "/$ref",
+      "absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/schema.json#/$ref",
+      "instanceLocation": "",
+      "error": "doesn't validate with 'file:///Users/santhosh/jsonschema/t.json#/definitions/employee'"
+    },
+    {
+      "keywordLocation": "/$ref/type",
+      "absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/t.json#/definitions/employee/type",
+      "instanceLocation": "",
+      "error": "expected string, but got number"
+    }
+  ]
+}
+```
+To output `err` in `detailed` output format:
+```go
+b, _ := json.MarshalIndent(err.DetailedOutput(), "", "  ")
+fmt.Println(string(b))
+```
+Prints:
+```json
+{
+  "valid": false,
+  "keywordLocation": "",
+  "absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/schema.json#",
+  "instanceLocation": "",
+  "errors": [
+    {
+      "valid": false,
+      "keywordLocation": "/$ref",
+      "absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/schema.json#/$ref",
+      "instanceLocation": "",
+      "errors": [
+        {
+          "valid": false,
+          "keywordLocation": "/$ref/type",
+          "absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/t.json#/definitions/employee/type",
+          "instanceLocation": "",
+          "error": "expected string, but got number"
+        }
+      ]
+    }
+  ]
+}
+```
+
+## CLI
+
+to install `go install github.com/santhosh-tekuri/jsonschema/cmd/jv@latest`
+
+```bash
+jv [-draft INT] [-output FORMAT] [-assertformat] [-assertcontent] <json-schema> [<json-or-yaml-doc>]...
+  -assertcontent
+    	enable content assertions with draft >= 2019
+  -assertformat
+    	enable format assertions with draft >= 2019
+  -draft int
+    	draft used when '$schema' attribute is missing. valid values 4, 5, 7, 2019, 2020 (default 2020)
+  -output string
+    	output format. valid values flag, basic, detailed
+```
+
+if no `<json-or-yaml-doc>` arguments are passed, it simply validates the `<json-schema>`.  
+if `$schema` attribute is missing in schema, it uses latest version. this can be overridden by passing `-draft` flag
+
+exit-code is 1, if there are any validation errors
+
+`jv` can also validate yaml files. It also accepts schema from yaml files.
+
+## Validating YAML Documents
+
+since yaml supports non-string keys, such yaml documents are rendered as invalid json documents.  
+
+most yaml parser use `map[interface{}]interface{}` for object,  
+whereas json parser uses `map[string]interface{}`.  
+
+so we need to manually convert them to `map[string]interface{}`.   
+below code shows such conversion by `toStringKeys` function.
+
+https://play.golang.org/p/Hhax3MrtD8r
+
+NOTE: if you are using `gopkg.in/yaml.v3`, then you do not need such conversion. since this library
+returns `map[string]interface{}` if all keys are strings.
\ No newline at end of file
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/compiler.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/compiler.go
new file mode 100644
index 0000000000000000000000000000000000000000..fdb68e6480f94b6b644f60c9b451540afb5a904e
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/compiler.go
@@ -0,0 +1,812 @@
+package jsonschema
+
+import (
+	"encoding/json"
+	"fmt"
+	"io"
+	"math/big"
+	"regexp"
+	"strconv"
+	"strings"
+)
+
+// A Compiler represents a json-schema compiler.
+type Compiler struct {
+	// Draft represents the draft used when '$schema' attribute is missing.
+	//
+	// This defaults to latest supported draft (currently 2020-12).
+	Draft     *Draft
+	resources map[string]*resource
+
+	// Extensions is used to register extensions.
+	extensions map[string]extension
+
+	// ExtractAnnotations tells whether schema annotations has to be extracted
+	// in compiled Schema or not.
+	ExtractAnnotations bool
+
+	// LoadURL loads the document at given absolute URL.
+	//
+	// If nil, package global LoadURL is used.
+	LoadURL func(s string) (io.ReadCloser, error)
+
+	// Formats can be registered by adding to this map. Key is format name,
+	// value is function that knows how to validate that format.
+	Formats map[string]func(interface{}) bool
+
+	// AssertFormat for specifications >= draft2019-09.
+	AssertFormat bool
+
+	// Decoders can be registered by adding to this map. Key is encoding name,
+	// value is function that knows how to decode string in that format.
+	Decoders map[string]func(string) ([]byte, error)
+
+	// MediaTypes can be registered by adding to this map. Key is mediaType name,
+	// value is function that knows how to validate that mediaType.
+	MediaTypes map[string]func([]byte) error
+
+	// AssertContent for specifications >= draft2019-09.
+	AssertContent bool
+}
+
+// Compile parses json-schema at given url returns, if successful,
+// a Schema object that can be used to match against json.
+//
+// Returned error can be *SchemaError
+func Compile(url string) (*Schema, error) {
+	return NewCompiler().Compile(url)
+}
+
+// MustCompile is like Compile but panics if the url cannot be compiled to *Schema.
+// It simplifies safe initialization of global variables holding compiled Schemas.
+func MustCompile(url string) *Schema {
+	return NewCompiler().MustCompile(url)
+}
+
+// CompileString parses and compiles the given schema with given base url.
+func CompileString(url, schema string) (*Schema, error) {
+	c := NewCompiler()
+	if err := c.AddResource(url, strings.NewReader(schema)); err != nil {
+		return nil, err
+	}
+	return c.Compile(url)
+}
+
+// MustCompileString is like CompileString but panics on error.
+// It simplified safe initialization of global variables holding compiled Schema.
+func MustCompileString(url, schema string) *Schema {
+	c := NewCompiler()
+	if err := c.AddResource(url, strings.NewReader(schema)); err != nil {
+		panic(err)
+	}
+	return c.MustCompile(url)
+}
+
+// NewCompiler returns a json-schema Compiler object.
+// if '$schema' attribute is missing, it is treated as draft7. to change this
+// behavior change Compiler.Draft value
+func NewCompiler() *Compiler {
+	return &Compiler{
+		Draft:      latest,
+		resources:  make(map[string]*resource),
+		Formats:    make(map[string]func(interface{}) bool),
+		Decoders:   make(map[string]func(string) ([]byte, error)),
+		MediaTypes: make(map[string]func([]byte) error),
+		extensions: make(map[string]extension),
+	}
+}
+
+// AddResource adds in-memory resource to the compiler.
+//
+// Note that url must not have fragment
+func (c *Compiler) AddResource(url string, r io.Reader) error {
+	res, err := newResource(url, r)
+	if err != nil {
+		return err
+	}
+	c.resources[res.url] = res
+	return nil
+}
+
+// MustCompile is like Compile but panics if the url cannot be compiled to *Schema.
+// It simplifies safe initialization of global variables holding compiled Schemas.
+func (c *Compiler) MustCompile(url string) *Schema {
+	s, err := c.Compile(url)
+	if err != nil {
+		panic(fmt.Sprintf("jsonschema: %#v", err))
+	}
+	return s
+}
+
+// Compile parses json-schema at given url returns, if successful,
+// a Schema object that can be used to match against json.
+//
+// error returned will be of type *SchemaError
+func (c *Compiler) Compile(url string) (*Schema, error) {
+	// make url absolute
+	u, err := toAbs(url)
+	if err != nil {
+		return nil, &SchemaError{url, err}
+	}
+	url = u
+
+	sch, err := c.compileURL(url, nil, "#")
+	if err != nil {
+		err = &SchemaError{url, err}
+	}
+	return sch, err
+}
+
+func (c *Compiler) findResource(url string) (*resource, error) {
+	if _, ok := c.resources[url]; !ok {
+		// load resource
+		var rdr io.Reader
+		if sch, ok := vocabSchemas[url]; ok {
+			rdr = strings.NewReader(sch)
+		} else {
+			loadURL := LoadURL
+			if c.LoadURL != nil {
+				loadURL = c.LoadURL
+			}
+			r, err := loadURL(url)
+			if err != nil {
+				return nil, err
+			}
+			defer r.Close()
+			rdr = r
+		}
+		if err := c.AddResource(url, rdr); err != nil {
+			return nil, err
+		}
+	}
+
+	r := c.resources[url]
+	if r.draft != nil {
+		return r, nil
+	}
+
+	// set draft
+	r.draft = c.Draft
+	if m, ok := r.doc.(map[string]interface{}); ok {
+		if sch, ok := m["$schema"]; ok {
+			sch, ok := sch.(string)
+			if !ok {
+				return nil, fmt.Errorf("jsonschema: invalid $schema in %s", url)
+			}
+			if !isURI(sch) {
+				return nil, fmt.Errorf("jsonschema: $schema must be uri in %s", url)
+			}
+			r.draft = findDraft(sch)
+			if r.draft == nil {
+				sch, _ := split(sch)
+				if sch == url {
+					return nil, fmt.Errorf("jsonschema: unsupported draft in %s", url)
+				}
+				mr, err := c.findResource(sch)
+				if err != nil {
+					return nil, err
+				}
+				r.draft = mr.draft
+			}
+		}
+	}
+
+	id, err := r.draft.resolveID(r.url, r.doc)
+	if err != nil {
+		return nil, err
+	}
+	if id != "" {
+		r.url = id
+	}
+
+	if err := r.fillSubschemas(c, r); err != nil {
+		return nil, err
+	}
+
+	return r, nil
+}
+
+func (c *Compiler) compileURL(url string, stack []schemaRef, ptr string) (*Schema, error) {
+	// if url points to a draft, return Draft.meta
+	if d := findDraft(url); d != nil && d.meta != nil {
+		return d.meta, nil
+	}
+
+	b, f := split(url)
+	r, err := c.findResource(b)
+	if err != nil {
+		return nil, err
+	}
+	return c.compileRef(r, stack, ptr, r, f)
+}
+
+func (c *Compiler) compileRef(r *resource, stack []schemaRef, refPtr string, res *resource, ref string) (*Schema, error) {
+	base := r.baseURL(res.floc)
+	ref, err := resolveURL(base, ref)
+	if err != nil {
+		return nil, err
+	}
+
+	u, f := split(ref)
+	sr := r.findResource(u)
+	if sr == nil {
+		// external resource
+		return c.compileURL(ref, stack, refPtr)
+	}
+
+	// ensure root resource is always compiled first.
+	// this is required to get schema.meta from root resource
+	if r.schema == nil {
+		r.schema = newSchema(r.url, r.floc, r.draft, r.doc)
+		if _, err := c.compile(r, nil, schemaRef{"#", r.schema, false}, r); err != nil {
+			return nil, err
+		}
+	}
+
+	sr, err = r.resolveFragment(c, sr, f)
+	if err != nil {
+		return nil, err
+	}
+	if sr == nil {
+		return nil, fmt.Errorf("jsonschema: %s not found", ref)
+	}
+
+	if sr.schema != nil {
+		if err := checkLoop(stack, schemaRef{refPtr, sr.schema, false}); err != nil {
+			return nil, err
+		}
+		return sr.schema, nil
+	}
+
+	sr.schema = newSchema(r.url, sr.floc, r.draft, sr.doc)
+	return c.compile(r, stack, schemaRef{refPtr, sr.schema, false}, sr)
+}
+
+func (c *Compiler) compileDynamicAnchors(r *resource, res *resource) error {
+	if r.draft.version < 2020 {
+		return nil
+	}
+
+	rr := r.listResources(res)
+	rr = append(rr, res)
+	for _, sr := range rr {
+		if m, ok := sr.doc.(map[string]interface{}); ok {
+			if _, ok := m["$dynamicAnchor"]; ok {
+				sch, err := c.compileRef(r, nil, "IGNORED", r, sr.floc)
+				if err != nil {
+					return err
+				}
+				res.schema.dynamicAnchors = append(res.schema.dynamicAnchors, sch)
+			}
+		}
+	}
+	return nil
+}
+
+func (c *Compiler) compile(r *resource, stack []schemaRef, sref schemaRef, res *resource) (*Schema, error) {
+	if err := c.compileDynamicAnchors(r, res); err != nil {
+		return nil, err
+	}
+
+	switch v := res.doc.(type) {
+	case bool:
+		res.schema.Always = &v
+		return res.schema, nil
+	default:
+		return res.schema, c.compileMap(r, stack, sref, res)
+	}
+}
+
+func (c *Compiler) compileMap(r *resource, stack []schemaRef, sref schemaRef, res *resource) error {
+	m := res.doc.(map[string]interface{})
+
+	if err := checkLoop(stack, sref); err != nil {
+		return err
+	}
+	stack = append(stack, sref)
+
+	var s = res.schema
+	var err error
+
+	if r == res { // root schema
+		if sch, ok := m["$schema"]; ok {
+			sch := sch.(string)
+			if d := findDraft(sch); d != nil {
+				s.meta = d.meta
+			} else {
+				if s.meta, err = c.compileRef(r, stack, "$schema", res, sch); err != nil {
+					return err
+				}
+			}
+		}
+	}
+
+	if ref, ok := m["$ref"]; ok {
+		s.Ref, err = c.compileRef(r, stack, "$ref", res, ref.(string))
+		if err != nil {
+			return err
+		}
+		if r.draft.version < 2019 {
+			// All other properties in a "$ref" object MUST be ignored
+			return nil
+		}
+	}
+
+	if r.draft.version >= 2019 {
+		if r == res { // root schema
+			if vocab, ok := m["$vocabulary"]; ok {
+				for url, reqd := range vocab.(map[string]interface{}) {
+					if reqd, ok := reqd.(bool); ok && !reqd {
+						continue
+					}
+					if !r.draft.isVocab(url) {
+						return fmt.Errorf("jsonschema: unsupported vocab %q in %s", url, res)
+					}
+					s.vocab = append(s.vocab, url)
+				}
+			} else {
+				s.vocab = r.draft.defaultVocab
+			}
+		}
+
+		if ref, ok := m["$recursiveRef"]; ok {
+			s.RecursiveRef, err = c.compileRef(r, stack, "$recursiveRef", res, ref.(string))
+			if err != nil {
+				return err
+			}
+		}
+	}
+	if r.draft.version >= 2020 {
+		if dref, ok := m["$dynamicRef"]; ok {
+			s.DynamicRef, err = c.compileRef(r, stack, "$dynamicRef", res, dref.(string))
+			if err != nil {
+				return err
+			}
+			if dref, ok := dref.(string); ok {
+				_, frag := split(dref)
+				if frag != "#" && !strings.HasPrefix(frag, "#/") {
+					// frag is anchor
+					s.dynamicRefAnchor = frag[1:]
+				}
+			}
+		}
+	}
+
+	loadInt := func(pname string) int {
+		if num, ok := m[pname]; ok {
+			i, _ := num.(json.Number).Float64()
+			return int(i)
+		}
+		return -1
+	}
+
+	loadRat := func(pname string) *big.Rat {
+		if num, ok := m[pname]; ok {
+			r, _ := new(big.Rat).SetString(string(num.(json.Number)))
+			return r
+		}
+		return nil
+	}
+
+	if r.draft.version < 2019 || r.schema.meta.hasVocab("validation") {
+		if t, ok := m["type"]; ok {
+			switch t := t.(type) {
+			case string:
+				s.Types = []string{t}
+			case []interface{}:
+				s.Types = toStrings(t)
+			}
+		}
+
+		if e, ok := m["enum"]; ok {
+			s.Enum = e.([]interface{})
+			allPrimitives := true
+			for _, item := range s.Enum {
+				switch jsonType(item) {
+				case "object", "array":
+					allPrimitives = false
+					break
+				}
+			}
+			s.enumError = "enum failed"
+			if allPrimitives {
+				if len(s.Enum) == 1 {
+					s.enumError = fmt.Sprintf("value must be %#v", s.Enum[0])
+				} else {
+					strEnum := make([]string, len(s.Enum))
+					for i, item := range s.Enum {
+						strEnum[i] = fmt.Sprintf("%#v", item)
+					}
+					s.enumError = fmt.Sprintf("value must be one of %s", strings.Join(strEnum, ", "))
+				}
+			}
+		}
+
+		s.Minimum = loadRat("minimum")
+		if exclusive, ok := m["exclusiveMinimum"]; ok {
+			if exclusive, ok := exclusive.(bool); ok {
+				if exclusive {
+					s.Minimum, s.ExclusiveMinimum = nil, s.Minimum
+				}
+			} else {
+				s.ExclusiveMinimum = loadRat("exclusiveMinimum")
+			}
+		}
+
+		s.Maximum = loadRat("maximum")
+		if exclusive, ok := m["exclusiveMaximum"]; ok {
+			if exclusive, ok := exclusive.(bool); ok {
+				if exclusive {
+					s.Maximum, s.ExclusiveMaximum = nil, s.Maximum
+				}
+			} else {
+				s.ExclusiveMaximum = loadRat("exclusiveMaximum")
+			}
+		}
+
+		s.MultipleOf = loadRat("multipleOf")
+
+		s.MinProperties, s.MaxProperties = loadInt("minProperties"), loadInt("maxProperties")
+
+		if req, ok := m["required"]; ok {
+			s.Required = toStrings(req.([]interface{}))
+		}
+
+		s.MinItems, s.MaxItems = loadInt("minItems"), loadInt("maxItems")
+
+		if unique, ok := m["uniqueItems"]; ok {
+			s.UniqueItems = unique.(bool)
+		}
+
+		s.MinLength, s.MaxLength = loadInt("minLength"), loadInt("maxLength")
+
+		if pattern, ok := m["pattern"]; ok {
+			s.Pattern = regexp.MustCompile(pattern.(string))
+		}
+
+		if r.draft.version >= 2019 {
+			s.MinContains, s.MaxContains = loadInt("minContains"), loadInt("maxContains")
+			if s.MinContains == -1 {
+				s.MinContains = 1
+			}
+
+			if deps, ok := m["dependentRequired"]; ok {
+				deps := deps.(map[string]interface{})
+				s.DependentRequired = make(map[string][]string, len(deps))
+				for pname, pvalue := range deps {
+					s.DependentRequired[pname] = toStrings(pvalue.([]interface{}))
+				}
+			}
+		}
+	}
+
+	compile := func(stack []schemaRef, ptr string) (*Schema, error) {
+		return c.compileRef(r, stack, ptr, res, r.url+res.floc+"/"+ptr)
+	}
+
+	loadSchema := func(pname string, stack []schemaRef) (*Schema, error) {
+		if _, ok := m[pname]; ok {
+			return compile(stack, escape(pname))
+		}
+		return nil, nil
+	}
+
+	loadSchemas := func(pname string, stack []schemaRef) ([]*Schema, error) {
+		if pvalue, ok := m[pname]; ok {
+			pvalue := pvalue.([]interface{})
+			schemas := make([]*Schema, len(pvalue))
+			for i := range pvalue {
+				sch, err := compile(stack, escape(pname)+"/"+strconv.Itoa(i))
+				if err != nil {
+					return nil, err
+				}
+				schemas[i] = sch
+			}
+			return schemas, nil
+		}
+		return nil, nil
+	}
+
+	if r.draft.version < 2019 || r.schema.meta.hasVocab("applicator") {
+		if s.Not, err = loadSchema("not", stack); err != nil {
+			return err
+		}
+		if s.AllOf, err = loadSchemas("allOf", stack); err != nil {
+			return err
+		}
+		if s.AnyOf, err = loadSchemas("anyOf", stack); err != nil {
+			return err
+		}
+		if s.OneOf, err = loadSchemas("oneOf", stack); err != nil {
+			return err
+		}
+
+		if props, ok := m["properties"]; ok {
+			props := props.(map[string]interface{})
+			s.Properties = make(map[string]*Schema, len(props))
+			for pname := range props {
+				s.Properties[pname], err = compile(nil, "properties/"+escape(pname))
+				if err != nil {
+					return err
+				}
+			}
+		}
+
+		if regexProps, ok := m["regexProperties"]; ok {
+			s.RegexProperties = regexProps.(bool)
+		}
+
+		if patternProps, ok := m["patternProperties"]; ok {
+			patternProps := patternProps.(map[string]interface{})
+			s.PatternProperties = make(map[*regexp.Regexp]*Schema, len(patternProps))
+			for pattern := range patternProps {
+				s.PatternProperties[regexp.MustCompile(pattern)], err = compile(nil, "patternProperties/"+escape(pattern))
+				if err != nil {
+					return err
+				}
+			}
+		}
+
+		if additionalProps, ok := m["additionalProperties"]; ok {
+			switch additionalProps := additionalProps.(type) {
+			case bool:
+				s.AdditionalProperties = additionalProps
+			case map[string]interface{}:
+				s.AdditionalProperties, err = compile(nil, "additionalProperties")
+				if err != nil {
+					return err
+				}
+			}
+		}
+
+		if deps, ok := m["dependencies"]; ok {
+			deps := deps.(map[string]interface{})
+			s.Dependencies = make(map[string]interface{}, len(deps))
+			for pname, pvalue := range deps {
+				switch pvalue := pvalue.(type) {
+				case []interface{}:
+					s.Dependencies[pname] = toStrings(pvalue)
+				default:
+					s.Dependencies[pname], err = compile(stack, "dependencies/"+escape(pname))
+					if err != nil {
+						return err
+					}
+				}
+			}
+		}
+
+		if r.draft.version >= 6 {
+			if s.PropertyNames, err = loadSchema("propertyNames", nil); err != nil {
+				return err
+			}
+			if s.Contains, err = loadSchema("contains", nil); err != nil {
+				return err
+			}
+		}
+
+		if r.draft.version >= 7 {
+			if m["if"] != nil {
+				if s.If, err = loadSchema("if", stack); err != nil {
+					return err
+				}
+				if s.Then, err = loadSchema("then", stack); err != nil {
+					return err
+				}
+				if s.Else, err = loadSchema("else", stack); err != nil {
+					return err
+				}
+			}
+		}
+		if r.draft.version >= 2019 {
+			if deps, ok := m["dependentSchemas"]; ok {
+				deps := deps.(map[string]interface{})
+				s.DependentSchemas = make(map[string]*Schema, len(deps))
+				for pname := range deps {
+					s.DependentSchemas[pname], err = compile(stack, "dependentSchemas/"+escape(pname))
+					if err != nil {
+						return err
+					}
+				}
+			}
+		}
+
+		if r.draft.version >= 2020 {
+			if s.PrefixItems, err = loadSchemas("prefixItems", nil); err != nil {
+				return err
+			}
+			if s.Items2020, err = loadSchema("items", nil); err != nil {
+				return err
+			}
+		} else {
+			if items, ok := m["items"]; ok {
+				switch items.(type) {
+				case []interface{}:
+					s.Items, err = loadSchemas("items", nil)
+					if err != nil {
+						return err
+					}
+					if additionalItems, ok := m["additionalItems"]; ok {
+						switch additionalItems := additionalItems.(type) {
+						case bool:
+							s.AdditionalItems = additionalItems
+						case map[string]interface{}:
+							s.AdditionalItems, err = compile(nil, "additionalItems")
+							if err != nil {
+								return err
+							}
+						}
+					}
+				default:
+					s.Items, err = compile(nil, "items")
+					if err != nil {
+						return err
+					}
+				}
+			}
+		}
+
+	}
+
+	// unevaluatedXXX keywords were in "applicator" vocab in 2019, but moved to new vocab "unevaluated" in 2020
+	if (r.draft.version == 2019 && r.schema.meta.hasVocab("applicator")) || (r.draft.version >= 2020 && r.schema.meta.hasVocab("unevaluated")) {
+		if s.UnevaluatedProperties, err = loadSchema("unevaluatedProperties", nil); err != nil {
+			return err
+		}
+		if s.UnevaluatedItems, err = loadSchema("unevaluatedItems", nil); err != nil {
+			return err
+		}
+		if r.draft.version >= 2020 {
+			// any item in an array that passes validation of the contains schema is considered "evaluated"
+			s.ContainsEval = true
+		}
+	}
+
+	if format, ok := m["format"]; ok {
+		s.Format = format.(string)
+		if r.draft.version < 2019 || c.AssertFormat || r.schema.meta.hasVocab("format-assertion") {
+			if format, ok := c.Formats[s.Format]; ok {
+				s.format = format
+			} else {
+				s.format, _ = Formats[s.Format]
+			}
+		}
+	}
+
+	if c.ExtractAnnotations {
+		if title, ok := m["title"]; ok {
+			s.Title = title.(string)
+		}
+		if description, ok := m["description"]; ok {
+			s.Description = description.(string)
+		}
+		s.Default = m["default"]
+	}
+
+	if r.draft.version >= 6 {
+		if c, ok := m["const"]; ok {
+			s.Constant = []interface{}{c}
+		}
+	}
+
+	if r.draft.version >= 7 {
+		if encoding, ok := m["contentEncoding"]; ok {
+			s.ContentEncoding = encoding.(string)
+			if decoder, ok := c.Decoders[s.ContentEncoding]; ok {
+				s.decoder = decoder
+			} else {
+				s.decoder, _ = Decoders[s.ContentEncoding]
+			}
+		}
+		if mediaType, ok := m["contentMediaType"]; ok {
+			s.ContentMediaType = mediaType.(string)
+			if mediaType, ok := c.MediaTypes[s.ContentMediaType]; ok {
+				s.mediaType = mediaType
+			} else {
+				s.mediaType, _ = MediaTypes[s.ContentMediaType]
+			}
+			if s.ContentSchema, err = loadSchema("contentSchema", stack); err != nil {
+				return err
+			}
+		}
+		if c.ExtractAnnotations {
+			if comment, ok := m["$comment"]; ok {
+				s.Comment = comment.(string)
+			}
+			if readOnly, ok := m["readOnly"]; ok {
+				s.ReadOnly = readOnly.(bool)
+			}
+			if writeOnly, ok := m["writeOnly"]; ok {
+				s.WriteOnly = writeOnly.(bool)
+			}
+			if examples, ok := m["examples"]; ok {
+				s.Examples = examples.([]interface{})
+			}
+		}
+	}
+
+	if r.draft.version >= 2019 {
+		if !c.AssertContent {
+			s.decoder = nil
+			s.mediaType = nil
+			s.ContentSchema = nil
+		}
+		if c.ExtractAnnotations {
+			if deprecated, ok := m["deprecated"]; ok {
+				s.Deprecated = deprecated.(bool)
+			}
+		}
+	}
+
+	for name, ext := range c.extensions {
+		es, err := ext.compiler.Compile(CompilerContext{c, r, stack, res}, m)
+		if err != nil {
+			return err
+		}
+		if es != nil {
+			if s.Extensions == nil {
+				s.Extensions = make(map[string]ExtSchema)
+			}
+			s.Extensions[name] = es
+		}
+	}
+
+	return nil
+}
+
+func (c *Compiler) validateSchema(r *resource, v interface{}, vloc string) error {
+	validate := func(meta *Schema) error {
+		if meta == nil {
+			return nil
+		}
+		return meta.validateValue(v, vloc)
+	}
+
+	if err := validate(r.draft.meta); err != nil {
+		return err
+	}
+	for _, ext := range c.extensions {
+		if err := validate(ext.meta); err != nil {
+			return err
+		}
+	}
+	return nil
+}
+
+func toStrings(arr []interface{}) []string {
+	s := make([]string, len(arr))
+	for i, v := range arr {
+		s[i] = v.(string)
+	}
+	return s
+}
+
+// SchemaRef captures schema and the path referring to it.
+type schemaRef struct {
+	path    string  // relative-json-pointer to schema
+	schema  *Schema // target schema
+	discard bool    // true when scope left
+}
+
+func (sr schemaRef) String() string {
+	return fmt.Sprintf("(%s)%v", sr.path, sr.schema)
+}
+
+func checkLoop(stack []schemaRef, sref schemaRef) error {
+	for _, ref := range stack {
+		if ref.schema == sref.schema {
+			return infiniteLoopError(stack, sref)
+		}
+	}
+	return nil
+}
+
+func keywordLocation(stack []schemaRef, path string) string {
+	var loc string
+	for _, ref := range stack[1:] {
+		loc += "/" + ref.path
+	}
+	if path != "" {
+		loc = loc + "/" + path
+	}
+	return loc
+}
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/content.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/content.go
new file mode 100644
index 0000000000000000000000000000000000000000..7570b8b5a940968061b944af8d2c8d010b851879
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/content.go
@@ -0,0 +1,29 @@
+package jsonschema
+
+import (
+	"encoding/base64"
+	"encoding/json"
+)
+
+// Decoders is a registry of functions, which know how to decode
+// string encoded in specific format.
+//
+// New Decoders can be registered by adding to this map. Key is encoding name,
+// value is function that knows how to decode string in that format.
+var Decoders = map[string]func(string) ([]byte, error){
+	"base64": base64.StdEncoding.DecodeString,
+}
+
+// MediaTypes is a registry of functions, which know how to validate
+// whether the bytes represent data of that mediaType.
+//
+// New mediaTypes can be registered by adding to this map. Key is mediaType name,
+// value is function that knows how to validate that mediaType.
+var MediaTypes = map[string]func([]byte) error{
+	"application/json": validateJSON,
+}
+
+func validateJSON(b []byte) error {
+	var v interface{}
+	return json.Unmarshal(b, &v)
+}
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/doc.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/doc.go
new file mode 100644
index 0000000000000000000000000000000000000000..a124262a51d9baa431e64e24c624677f43b547ee
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/doc.go
@@ -0,0 +1,49 @@
+/*
+Package jsonschema provides json-schema compilation and validation.
+
+Features:
+  - implements draft 2020-12, 2019-09, draft-7, draft-6, draft-4
+  - fully compliant with JSON-Schema-Test-Suite, (excluding some optional)
+  - list of optional tests that are excluded can be found in schema_test.go(variable skipTests)
+  - validates schemas against meta-schema
+  - full support of remote references
+  - support of recursive references between schemas
+  - detects infinite loop in schemas
+  - thread safe validation
+  - rich, intuitive hierarchial error messages with json-pointers to exact location
+  - supports output formats flag, basic and detailed
+  - supports enabling format and content Assertions in draft2019-09 or above
+  - change Compiler.AssertFormat, Compiler.AssertContent to true
+  - compiled schema can be introspected. easier to develop tools like generating go structs given schema
+  - supports user-defined keywords via extensions
+  - implements following formats (supports user-defined)
+  - date-time, date, time, duration (supports leap-second)
+  - uuid, hostname, email
+  - ip-address, ipv4, ipv6
+  - uri, uriref, uri-template(limited validation)
+  - json-pointer, relative-json-pointer
+  - regex, format
+  - implements following contentEncoding (supports user-defined)
+  - base64
+  - implements following contentMediaType (supports user-defined)
+  - application/json
+  - can load from files/http/https/string/[]byte/io.Reader (supports user-defined)
+
+The schema is compiled against the version specified in "$schema" property.
+If "$schema" property is missing, it uses latest draft which currently implemented
+by this library.
+
+You can force to use specific draft,  when "$schema" is missing, as follows:
+
+	compiler := jsonschema.NewCompiler()
+	compiler.Draft = jsonschema.Draft4
+
+This package supports loading json-schema from filePath and fileURL.
+
+To load json-schema from HTTPURL, add following import:
+
+	import _ "github.com/santhosh-tekuri/jsonschema/v5/httploader"
+
+you can validate yaml documents. see https://play.golang.org/p/sJy1qY7dXgA
+*/
+package jsonschema
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/draft.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/draft.go
new file mode 100644
index 0000000000000000000000000000000000000000..154fa5837dc67eb70968c476caf70d75857738f8
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/draft.go
@@ -0,0 +1,1454 @@
+package jsonschema
+
+import (
+	"fmt"
+	"strconv"
+	"strings"
+)
+
+// A Draft represents json-schema draft
+type Draft struct {
+	version      int
+	meta         *Schema
+	id           string   // property name used to represent schema id.
+	boolSchema   bool     // is boolean valid schema
+	vocab        []string // built-in vocab
+	defaultVocab []string // vocabs when $vocabulary is not used
+	subschemas   map[string]position
+}
+
+func (d *Draft) URL() string {
+	switch d.version {
+	case 2020:
+		return "https://json-schema.org/draft/2020-12/schema"
+	case 2019:
+		return "https://json-schema.org/draft/2019-09/schema"
+	case 7:
+		return "https://json-schema.org/draft-07/schema"
+	case 6:
+		return "https://json-schema.org/draft-06/schema"
+	case 4:
+		return "https://json-schema.org/draft-04/schema"
+	}
+	return ""
+}
+
+func (d *Draft) String() string {
+	return fmt.Sprintf("Draft%d", d.version)
+}
+
+func (d *Draft) loadMeta(url, schema string) {
+	c := NewCompiler()
+	c.AssertFormat = true
+	if err := c.AddResource(url, strings.NewReader(schema)); err != nil {
+		panic(err)
+	}
+	d.meta = c.MustCompile(url)
+	d.meta.meta = d.meta
+}
+
+func (d *Draft) getID(sch interface{}) string {
+	m, ok := sch.(map[string]interface{})
+	if !ok {
+		return ""
+	}
+	if _, ok := m["$ref"]; ok && d.version <= 7 {
+		// $ref prevents a sibling id from changing the base uri
+		return ""
+	}
+	v, ok := m[d.id]
+	if !ok {
+		return ""
+	}
+	id, ok := v.(string)
+	if !ok {
+		return ""
+	}
+	return id
+}
+
+func (d *Draft) resolveID(base string, sch interface{}) (string, error) {
+	id, _ := split(d.getID(sch)) // strip fragment
+	if id == "" {
+		return "", nil
+	}
+	url, err := resolveURL(base, id)
+	url, _ = split(url) // strip fragment
+	return url, err
+}
+
+func (d *Draft) anchors(sch interface{}) []string {
+	m, ok := sch.(map[string]interface{})
+	if !ok {
+		return nil
+	}
+
+	var anchors []string
+
+	// before draft2019, anchor is specified in id
+	_, f := split(d.getID(m))
+	if f != "#" {
+		anchors = append(anchors, f[1:])
+	}
+
+	if v, ok := m["$anchor"]; ok && d.version >= 2019 {
+		anchors = append(anchors, v.(string))
+	}
+	if v, ok := m["$dynamicAnchor"]; ok && d.version >= 2020 {
+		anchors = append(anchors, v.(string))
+	}
+	return anchors
+}
+
+// listSubschemas collects subschemas in r into rr.
+func (d *Draft) listSubschemas(r *resource, base string, rr map[string]*resource) error {
+	add := func(loc string, sch interface{}) error {
+		url, err := d.resolveID(base, sch)
+		if err != nil {
+			return err
+		}
+		floc := r.floc + "/" + loc
+		sr := &resource{url: url, floc: floc, doc: sch}
+		rr[floc] = sr
+
+		base := base
+		if url != "" {
+			base = url
+		}
+		return d.listSubschemas(sr, base, rr)
+	}
+
+	sch, ok := r.doc.(map[string]interface{})
+	if !ok {
+		return nil
+	}
+	for kw, pos := range d.subschemas {
+		v, ok := sch[kw]
+		if !ok {
+			continue
+		}
+		if pos&self != 0 {
+			switch v := v.(type) {
+			case map[string]interface{}:
+				if err := add(kw, v); err != nil {
+					return err
+				}
+			case bool:
+				if d.boolSchema {
+					if err := add(kw, v); err != nil {
+						return err
+					}
+				}
+			}
+		}
+		if pos&item != 0 {
+			if v, ok := v.([]interface{}); ok {
+				for i, item := range v {
+					if err := add(kw+"/"+strconv.Itoa(i), item); err != nil {
+						return err
+					}
+				}
+			}
+		}
+		if pos&prop != 0 {
+			if v, ok := v.(map[string]interface{}); ok {
+				for pname, pval := range v {
+					if err := add(kw+"/"+escape(pname), pval); err != nil {
+						return err
+					}
+				}
+			}
+		}
+	}
+	return nil
+}
+
+// isVocab tells whether url is built-in vocab.
+func (d *Draft) isVocab(url string) bool {
+	for _, v := range d.vocab {
+		if url == v {
+			return true
+		}
+	}
+	return false
+}
+
+type position uint
+
+const (
+	self position = 1 << iota
+	prop
+	item
+)
+
+// supported drafts
+var (
+	Draft4    = &Draft{version: 4, id: "id", boolSchema: false}
+	Draft6    = &Draft{version: 6, id: "$id", boolSchema: true}
+	Draft7    = &Draft{version: 7, id: "$id", boolSchema: true}
+	Draft2019 = &Draft{
+		version:    2019,
+		id:         "$id",
+		boolSchema: true,
+		vocab: []string{
+			"https://json-schema.org/draft/2019-09/vocab/core",
+			"https://json-schema.org/draft/2019-09/vocab/applicator",
+			"https://json-schema.org/draft/2019-09/vocab/validation",
+			"https://json-schema.org/draft/2019-09/vocab/meta-data",
+			"https://json-schema.org/draft/2019-09/vocab/format",
+			"https://json-schema.org/draft/2019-09/vocab/content",
+		},
+		defaultVocab: []string{
+			"https://json-schema.org/draft/2019-09/vocab/core",
+			"https://json-schema.org/draft/2019-09/vocab/applicator",
+			"https://json-schema.org/draft/2019-09/vocab/validation",
+		},
+	}
+	Draft2020 = &Draft{
+		version:    2020,
+		id:         "$id",
+		boolSchema: true,
+		vocab: []string{
+			"https://json-schema.org/draft/2020-12/vocab/core",
+			"https://json-schema.org/draft/2020-12/vocab/applicator",
+			"https://json-schema.org/draft/2020-12/vocab/unevaluated",
+			"https://json-schema.org/draft/2020-12/vocab/validation",
+			"https://json-schema.org/draft/2020-12/vocab/meta-data",
+			"https://json-schema.org/draft/2020-12/vocab/format-annotation",
+			"https://json-schema.org/draft/2020-12/vocab/format-assertion",
+			"https://json-schema.org/draft/2020-12/vocab/content",
+		},
+		defaultVocab: []string{
+			"https://json-schema.org/draft/2020-12/vocab/core",
+			"https://json-schema.org/draft/2020-12/vocab/applicator",
+			"https://json-schema.org/draft/2020-12/vocab/unevaluated",
+			"https://json-schema.org/draft/2020-12/vocab/validation",
+		},
+	}
+
+	latest = Draft2020
+)
+
+func findDraft(url string) *Draft {
+	if strings.HasPrefix(url, "http://") {
+		url = "https://" + strings.TrimPrefix(url, "http://")
+	}
+	if strings.HasSuffix(url, "#") || strings.HasSuffix(url, "#/") {
+		url = url[:strings.IndexByte(url, '#')]
+	}
+	switch url {
+	case "https://json-schema.org/schema":
+		return latest
+	case "https://json-schema.org/draft/2020-12/schema":
+		return Draft2020
+	case "https://json-schema.org/draft/2019-09/schema":
+		return Draft2019
+	case "https://json-schema.org/draft-07/schema":
+		return Draft7
+	case "https://json-schema.org/draft-06/schema":
+		return Draft6
+	case "https://json-schema.org/draft-04/schema":
+		return Draft4
+	}
+	return nil
+}
+
+func init() {
+	subschemas := map[string]position{
+		// type agnostic
+		"definitions": prop,
+		"not":         self,
+		"allOf":       item,
+		"anyOf":       item,
+		"oneOf":       item,
+		// object
+		"properties":           prop,
+		"additionalProperties": self,
+		"patternProperties":    prop,
+		// array
+		"items":           self | item,
+		"additionalItems": self,
+		"dependencies":    prop,
+	}
+	Draft4.subschemas = clone(subschemas)
+
+	subschemas["propertyNames"] = self
+	subschemas["contains"] = self
+	Draft6.subschemas = clone(subschemas)
+
+	subschemas["if"] = self
+	subschemas["then"] = self
+	subschemas["else"] = self
+	Draft7.subschemas = clone(subschemas)
+
+	subschemas["$defs"] = prop
+	subschemas["dependentSchemas"] = prop
+	subschemas["unevaluatedProperties"] = self
+	subschemas["unevaluatedItems"] = self
+	subschemas["contentSchema"] = self
+	Draft2019.subschemas = clone(subschemas)
+
+	subschemas["prefixItems"] = item
+	Draft2020.subschemas = clone(subschemas)
+
+	Draft4.loadMeta("http://json-schema.org/draft-04/schema", `{
+		"$schema": "http://json-schema.org/draft-04/schema#",
+		"description": "Core schema meta-schema",
+		"definitions": {
+			"schemaArray": {
+				"type": "array",
+				"minItems": 1,
+				"items": { "$ref": "#" }
+			},
+			"positiveInteger": {
+				"type": "integer",
+				"minimum": 0
+			},
+			"positiveIntegerDefault0": {
+				"allOf": [ { "$ref": "#/definitions/positiveInteger" }, { "default": 0 } ]
+			},
+			"simpleTypes": {
+				"enum": [ "array", "boolean", "integer", "null", "number", "object", "string" ]
+			},
+			"stringArray": {
+				"type": "array",
+				"items": { "type": "string" },
+				"minItems": 1,
+				"uniqueItems": true
+			}
+		},
+		"type": "object",
+		"properties": {
+			"id": {
+				"type": "string",
+				"format": "uriref"
+			},
+			"$schema": {
+				"type": "string",
+				"format": "uri"
+			},
+			"title": {
+				"type": "string"
+			},
+			"description": {
+				"type": "string"
+			},
+			"default": {},
+			"multipleOf": {
+				"type": "number",
+				"minimum": 0,
+				"exclusiveMinimum": true
+			},
+			"maximum": {
+				"type": "number"
+			},
+			"exclusiveMaximum": {
+				"type": "boolean",
+				"default": false
+			},
+			"minimum": {
+				"type": "number"
+			},
+			"exclusiveMinimum": {
+				"type": "boolean",
+				"default": false
+			},
+			"maxLength": { "$ref": "#/definitions/positiveInteger" },
+			"minLength": { "$ref": "#/definitions/positiveIntegerDefault0" },
+			"pattern": {
+				"type": "string",
+				"format": "regex"
+			},
+			"additionalItems": {
+				"anyOf": [
+					{ "type": "boolean" },
+					{ "$ref": "#" }
+				],
+				"default": {}
+			},
+			"items": {
+				"anyOf": [
+					{ "$ref": "#" },
+					{ "$ref": "#/definitions/schemaArray" }
+				],
+				"default": {}
+			},
+			"maxItems": { "$ref": "#/definitions/positiveInteger" },
+			"minItems": { "$ref": "#/definitions/positiveIntegerDefault0" },
+			"uniqueItems": {
+				"type": "boolean",
+				"default": false
+			},
+			"maxProperties": { "$ref": "#/definitions/positiveInteger" },
+			"minProperties": { "$ref": "#/definitions/positiveIntegerDefault0" },
+			"required": { "$ref": "#/definitions/stringArray" },
+			"additionalProperties": {
+				"anyOf": [
+					{ "type": "boolean" },
+					{ "$ref": "#" }
+				],
+				"default": {}
+			},
+			"definitions": {
+				"type": "object",
+				"additionalProperties": { "$ref": "#" },
+				"default": {}
+			},
+			"properties": {
+				"type": "object",
+				"additionalProperties": { "$ref": "#" },
+				"default": {}
+			},
+			"patternProperties": {
+				"type": "object",
+				"regexProperties": true,
+				"additionalProperties": { "$ref": "#" },
+				"default": {}
+			},
+			"regexProperties": { "type": "boolean" },
+			"dependencies": {
+				"type": "object",
+				"additionalProperties": {
+					"anyOf": [
+						{ "$ref": "#" },
+						{ "$ref": "#/definitions/stringArray" }
+					]
+				}
+			},
+			"enum": {
+				"type": "array",
+				"minItems": 1,
+				"uniqueItems": true
+			},
+			"type": {
+				"anyOf": [
+					{ "$ref": "#/definitions/simpleTypes" },
+					{
+						"type": "array",
+						"items": { "$ref": "#/definitions/simpleTypes" },
+						"minItems": 1,
+						"uniqueItems": true
+					}
+				]
+			},
+			"allOf": { "$ref": "#/definitions/schemaArray" },
+			"anyOf": { "$ref": "#/definitions/schemaArray" },
+			"oneOf": { "$ref": "#/definitions/schemaArray" },
+			"not": { "$ref": "#" },
+			"format": { "type": "string" },
+			"$ref": { "type": "string" }
+		},
+		"dependencies": {
+			"exclusiveMaximum": [ "maximum" ],
+			"exclusiveMinimum": [ "minimum" ]
+		},
+		"default": {}
+	}`)
+	Draft6.loadMeta("http://json-schema.org/draft-06/schema", `{
+		"$schema": "http://json-schema.org/draft-06/schema#",
+		"$id": "http://json-schema.org/draft-06/schema#",
+		"title": "Core schema meta-schema",
+		"definitions": {
+			"schemaArray": {
+				"type": "array",
+				"minItems": 1,
+				"items": { "$ref": "#" }
+			},
+			"nonNegativeInteger": {
+				"type": "integer",
+				"minimum": 0
+			},
+			"nonNegativeIntegerDefault0": {
+				"allOf": [
+					{ "$ref": "#/definitions/nonNegativeInteger" },
+					{ "default": 0 }
+				]
+			},
+			"simpleTypes": {
+				"enum": [
+					"array",
+					"boolean",
+					"integer",
+					"null",
+					"number",
+					"object",
+					"string"
+				]
+			},
+			"stringArray": {
+				"type": "array",
+				"items": { "type": "string" },
+				"uniqueItems": true,
+				"default": []
+			}
+		},
+		"type": ["object", "boolean"],
+		"properties": {
+			"$id": {
+				"type": "string",
+				"format": "uri-reference"
+			},
+			"$schema": {
+				"type": "string",
+				"format": "uri"
+			},
+			"$ref": {
+				"type": "string",
+				"format": "uri-reference"
+			},
+			"title": {
+				"type": "string"
+			},
+			"description": {
+				"type": "string"
+			},
+			"default": {},
+			"multipleOf": {
+				"type": "number",
+				"exclusiveMinimum": 0
+			},
+			"maximum": {
+				"type": "number"
+			},
+			"exclusiveMaximum": {
+				"type": "number"
+			},
+			"minimum": {
+				"type": "number"
+			},
+			"exclusiveMinimum": {
+				"type": "number"
+			},
+			"maxLength": { "$ref": "#/definitions/nonNegativeInteger" },
+			"minLength": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
+			"pattern": {
+				"type": "string",
+				"format": "regex"
+			},
+			"additionalItems": { "$ref": "#" },
+			"items": {
+				"anyOf": [
+					{ "$ref": "#" },
+					{ "$ref": "#/definitions/schemaArray" }
+				],
+				"default": {}
+			},
+			"maxItems": { "$ref": "#/definitions/nonNegativeInteger" },
+			"minItems": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
+			"uniqueItems": {
+				"type": "boolean",
+				"default": false
+			},
+			"contains": { "$ref": "#" },
+			"maxProperties": { "$ref": "#/definitions/nonNegativeInteger" },
+			"minProperties": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
+			"required": { "$ref": "#/definitions/stringArray" },
+			"additionalProperties": { "$ref": "#" },
+			"definitions": {
+				"type": "object",
+				"additionalProperties": { "$ref": "#" },
+				"default": {}
+			},
+			"properties": {
+				"type": "object",
+				"additionalProperties": { "$ref": "#" },
+				"default": {}
+			},
+			"patternProperties": {
+				"type": "object",
+				"regexProperties": true,
+				"additionalProperties": { "$ref": "#" },
+				"default": {}
+			},
+			"dependencies": {
+				"type": "object",
+				"additionalProperties": {
+					"anyOf": [
+						{ "$ref": "#" },
+						{ "$ref": "#/definitions/stringArray" }
+					]
+				}
+			},
+			"propertyNames": { "$ref": "#" },
+			"const": {},
+			"enum": {
+				"type": "array",
+				"minItems": 1,
+				"uniqueItems": true
+			},
+			"type": {
+				"anyOf": [
+					{ "$ref": "#/definitions/simpleTypes" },
+					{
+						"type": "array",
+						"items": { "$ref": "#/definitions/simpleTypes" },
+						"minItems": 1,
+						"uniqueItems": true
+					}
+				]
+			},
+			"format": { "type": "string" },
+			"allOf": { "$ref": "#/definitions/schemaArray" },
+			"anyOf": { "$ref": "#/definitions/schemaArray" },
+			"oneOf": { "$ref": "#/definitions/schemaArray" },
+			"not": { "$ref": "#" }
+		},
+		"default": {}
+	}`)
+	Draft7.loadMeta("http://json-schema.org/draft-07/schema", `{
+		"$schema": "http://json-schema.org/draft-07/schema#",
+		"$id": "http://json-schema.org/draft-07/schema#",
+		"title": "Core schema meta-schema",
+		"definitions": {
+			"schemaArray": {
+				"type": "array",
+				"minItems": 1,
+				"items": { "$ref": "#" }
+			},
+			"nonNegativeInteger": {
+				"type": "integer",
+				"minimum": 0
+			},
+			"nonNegativeIntegerDefault0": {
+				"allOf": [
+					{ "$ref": "#/definitions/nonNegativeInteger" },
+					{ "default": 0 }
+				]
+			},
+			"simpleTypes": {
+				"enum": [
+					"array",
+					"boolean",
+					"integer",
+					"null",
+					"number",
+					"object",
+					"string"
+				]
+			},
+			"stringArray": {
+				"type": "array",
+				"items": { "type": "string" },
+				"uniqueItems": true,
+				"default": []
+			}
+		},
+		"type": ["object", "boolean"],
+		"properties": {
+			"$id": {
+				"type": "string",
+				"format": "uri-reference"
+			},
+			"$schema": {
+				"type": "string",
+				"format": "uri"
+			},
+			"$ref": {
+				"type": "string",
+				"format": "uri-reference"
+			},
+			"$comment": {
+				"type": "string"
+			},
+			"title": {
+				"type": "string"
+			},
+			"description": {
+				"type": "string"
+			},
+			"default": true,
+			"readOnly": {
+				"type": "boolean",
+				"default": false
+			},
+			"writeOnly": {
+				"type": "boolean",
+				"default": false
+			},
+			"examples": {
+				"type": "array",
+				"items": true
+			},
+			"multipleOf": {
+				"type": "number",
+				"exclusiveMinimum": 0
+			},
+			"maximum": {
+				"type": "number"
+			},
+			"exclusiveMaximum": {
+				"type": "number"
+			},
+			"minimum": {
+				"type": "number"
+			},
+			"exclusiveMinimum": {
+				"type": "number"
+			},
+			"maxLength": { "$ref": "#/definitions/nonNegativeInteger" },
+			"minLength": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
+			"pattern": {
+				"type": "string",
+				"format": "regex"
+			},
+			"additionalItems": { "$ref": "#" },
+			"items": {
+				"anyOf": [
+					{ "$ref": "#" },
+					{ "$ref": "#/definitions/schemaArray" }
+				],
+				"default": true
+			},
+			"maxItems": { "$ref": "#/definitions/nonNegativeInteger" },
+			"minItems": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
+			"uniqueItems": {
+				"type": "boolean",
+				"default": false
+			},
+			"contains": { "$ref": "#" },
+			"maxProperties": { "$ref": "#/definitions/nonNegativeInteger" },
+			"minProperties": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
+			"required": { "$ref": "#/definitions/stringArray" },
+			"additionalProperties": { "$ref": "#" },
+			"definitions": {
+				"type": "object",
+				"additionalProperties": { "$ref": "#" },
+				"default": {}
+			},
+			"properties": {
+				"type": "object",
+				"additionalProperties": { "$ref": "#" },
+				"default": {}
+			},
+			"patternProperties": {
+				"type": "object",
+				"additionalProperties": { "$ref": "#" },
+				"propertyNames": { "format": "regex" },
+				"default": {}
+			},
+			"dependencies": {
+				"type": "object",
+				"additionalProperties": {
+					"anyOf": [
+						{ "$ref": "#" },
+						{ "$ref": "#/definitions/stringArray" }
+					]
+				}
+			},
+			"propertyNames": { "$ref": "#" },
+			"const": true,
+			"enum": {
+				"type": "array",
+				"items": true,
+				"minItems": 1,
+				"uniqueItems": true
+			},
+			"type": {
+				"anyOf": [
+					{ "$ref": "#/definitions/simpleTypes" },
+					{
+						"type": "array",
+						"items": { "$ref": "#/definitions/simpleTypes" },
+						"minItems": 1,
+						"uniqueItems": true
+					}
+				]
+			},
+			"format": { "type": "string" },
+			"contentMediaType": { "type": "string" },
+			"contentEncoding": { "type": "string" },
+			"if": { "$ref": "#" },
+			"then": { "$ref": "#" },
+			"else": { "$ref": "#" },
+			"allOf": { "$ref": "#/definitions/schemaArray" },
+			"anyOf": { "$ref": "#/definitions/schemaArray" },
+			"oneOf": { "$ref": "#/definitions/schemaArray" },
+			"not": { "$ref": "#" }
+		},
+		"default": true
+	}`)
+	Draft2019.loadMeta("https://json-schema.org/draft/2019-09/schema", `{
+		"$schema": "https://json-schema.org/draft/2019-09/schema",
+		"$id": "https://json-schema.org/draft/2019-09/schema",
+		"$vocabulary": {
+			"https://json-schema.org/draft/2019-09/vocab/core": true,
+			"https://json-schema.org/draft/2019-09/vocab/applicator": true,
+			"https://json-schema.org/draft/2019-09/vocab/validation": true,
+			"https://json-schema.org/draft/2019-09/vocab/meta-data": true,
+			"https://json-schema.org/draft/2019-09/vocab/format": false,
+			"https://json-schema.org/draft/2019-09/vocab/content": true
+		},
+		"$recursiveAnchor": true,
+
+		"title": "Core and Validation specifications meta-schema",
+		"allOf": [
+			{"$ref": "meta/core"},
+			{"$ref": "meta/applicator"},
+			{"$ref": "meta/validation"},
+			{"$ref": "meta/meta-data"},
+			{"$ref": "meta/format"},
+			{"$ref": "meta/content"}
+		],
+		"type": ["object", "boolean"],
+		"properties": {
+			"definitions": {
+				"$comment": "While no longer an official keyword as it is replaced by $defs, this keyword is retained in the meta-schema to prevent incompatible extensions as it remains in common use.",
+				"type": "object",
+				"additionalProperties": { "$recursiveRef": "#" },
+				"default": {}
+			},
+			"dependencies": {
+				"$comment": "\"dependencies\" is no longer a keyword, but schema authors should avoid redefining it to facilitate a smooth transition to \"dependentSchemas\" and \"dependentRequired\"",
+				"type": "object",
+				"additionalProperties": {
+					"anyOf": [
+						{ "$recursiveRef": "#" },
+						{ "$ref": "meta/validation#/$defs/stringArray" }
+					]
+				}
+			}
+		}
+	}`)
+	Draft2020.loadMeta("https://json-schema.org/draft/2020-12/schema", `{
+		"$schema": "https://json-schema.org/draft/2020-12/schema",
+		"$id": "https://json-schema.org/draft/2020-12/schema",
+		"$vocabulary": {
+			"https://json-schema.org/draft/2020-12/vocab/core": true,
+			"https://json-schema.org/draft/2020-12/vocab/applicator": true,
+			"https://json-schema.org/draft/2020-12/vocab/unevaluated": true,
+			"https://json-schema.org/draft/2020-12/vocab/validation": true,
+			"https://json-schema.org/draft/2020-12/vocab/meta-data": true,
+			"https://json-schema.org/draft/2020-12/vocab/format-annotation": true,
+			"https://json-schema.org/draft/2020-12/vocab/content": true
+		},
+		"$dynamicAnchor": "meta",
+
+		"title": "Core and Validation specifications meta-schema",
+		"allOf": [
+			{"$ref": "meta/core"},
+			{"$ref": "meta/applicator"},
+			{"$ref": "meta/unevaluated"},
+			{"$ref": "meta/validation"},
+			{"$ref": "meta/meta-data"},
+			{"$ref": "meta/format-annotation"},
+			{"$ref": "meta/content"}
+		],
+		"type": ["object", "boolean"],
+		"$comment": "This meta-schema also defines keywords that have appeared in previous drafts in order to prevent incompatible extensions as they remain in common use.",
+		"properties": {
+			"definitions": {
+				"$comment": "\"definitions\" has been replaced by \"$defs\".",
+				"type": "object",
+				"additionalProperties": { "$dynamicRef": "#meta" },
+				"deprecated": true,
+				"default": {}
+			},
+			"dependencies": {
+				"$comment": "\"dependencies\" has been split and replaced by \"dependentSchemas\" and \"dependentRequired\" in order to serve their differing semantics.",
+				"type": "object",
+				"additionalProperties": {
+					"anyOf": [
+						{ "$dynamicRef": "#meta" },
+						{ "$ref": "meta/validation#/$defs/stringArray" }
+					]
+				},
+				"deprecated": true,
+				"default": {}
+			},
+			"$recursiveAnchor": {
+				"$comment": "\"$recursiveAnchor\" has been replaced by \"$dynamicAnchor\".",
+				"$ref": "meta/core#/$defs/anchorString",
+				"deprecated": true
+			},
+			"$recursiveRef": {
+				"$comment": "\"$recursiveRef\" has been replaced by \"$dynamicRef\".",
+				"$ref": "meta/core#/$defs/uriReferenceString",
+				"deprecated": true
+			}
+		}
+	}`)
+}
+
+var vocabSchemas = map[string]string{
+	"https://json-schema.org/draft/2019-09/meta/core": `{
+		"$schema": "https://json-schema.org/draft/2019-09/schema",
+		"$id": "https://json-schema.org/draft/2019-09/meta/core",
+		"$vocabulary": {
+			"https://json-schema.org/draft/2019-09/vocab/core": true
+		},
+		"$recursiveAnchor": true,
+
+		"title": "Core vocabulary meta-schema",
+		"type": ["object", "boolean"],
+		"properties": {
+			"$id": {
+				"type": "string",
+				"format": "uri-reference",
+				"$comment": "Non-empty fragments not allowed.",
+				"pattern": "^[^#]*#?$"
+			},
+			"$schema": {
+				"type": "string",
+				"format": "uri"
+			},
+			"$anchor": {
+				"type": "string",
+				"pattern": "^[A-Za-z][-A-Za-z0-9.:_]*$"
+			},
+			"$ref": {
+				"type": "string",
+				"format": "uri-reference"
+			},
+			"$recursiveRef": {
+				"type": "string",
+				"format": "uri-reference"
+			},
+			"$recursiveAnchor": {
+				"type": "boolean",
+				"default": false
+			},
+			"$vocabulary": {
+				"type": "object",
+				"propertyNames": {
+					"type": "string",
+					"format": "uri"
+				},
+				"additionalProperties": {
+					"type": "boolean"
+				}
+			},
+			"$comment": {
+				"type": "string"
+			},
+			"$defs": {
+				"type": "object",
+				"additionalProperties": { "$recursiveRef": "#" },
+				"default": {}
+			}
+		}
+	}`,
+	"https://json-schema.org/draft/2019-09/meta/applicator": `{
+		"$schema": "https://json-schema.org/draft/2019-09/schema",
+		"$id": "https://json-schema.org/draft/2019-09/meta/applicator",
+		"$vocabulary": {
+			"https://json-schema.org/draft/2019-09/vocab/applicator": true
+		},
+		"$recursiveAnchor": true,
+
+		"title": "Applicator vocabulary meta-schema",
+		"type": ["object", "boolean"],
+		"properties": {
+			"additionalItems": { "$recursiveRef": "#" },
+			"unevaluatedItems": { "$recursiveRef": "#" },
+			"items": {
+				"anyOf": [
+					{ "$recursiveRef": "#" },
+					{ "$ref": "#/$defs/schemaArray" }
+				]
+			},
+			"contains": { "$recursiveRef": "#" },
+			"additionalProperties": { "$recursiveRef": "#" },
+			"unevaluatedProperties": { "$recursiveRef": "#" },
+			"properties": {
+				"type": "object",
+				"additionalProperties": { "$recursiveRef": "#" },
+				"default": {}
+			},
+			"patternProperties": {
+				"type": "object",
+				"additionalProperties": { "$recursiveRef": "#" },
+				"propertyNames": { "format": "regex" },
+				"default": {}
+			},
+			"dependentSchemas": {
+				"type": "object",
+				"additionalProperties": {
+					"$recursiveRef": "#"
+				}
+			},
+			"propertyNames": { "$recursiveRef": "#" },
+			"if": { "$recursiveRef": "#" },
+			"then": { "$recursiveRef": "#" },
+			"else": { "$recursiveRef": "#" },
+			"allOf": { "$ref": "#/$defs/schemaArray" },
+			"anyOf": { "$ref": "#/$defs/schemaArray" },
+			"oneOf": { "$ref": "#/$defs/schemaArray" },
+			"not": { "$recursiveRef": "#" }
+		},
+		"$defs": {
+			"schemaArray": {
+				"type": "array",
+				"minItems": 1,
+				"items": { "$recursiveRef": "#" }
+			}
+		}
+	}`,
+	"https://json-schema.org/draft/2019-09/meta/validation": `{
+		"$schema": "https://json-schema.org/draft/2019-09/schema",
+		"$id": "https://json-schema.org/draft/2019-09/meta/validation",
+		"$vocabulary": {
+			"https://json-schema.org/draft/2019-09/vocab/validation": true
+		},
+		"$recursiveAnchor": true,
+
+		"title": "Validation vocabulary meta-schema",
+		"type": ["object", "boolean"],
+		"properties": {
+			"multipleOf": {
+				"type": "number",
+				"exclusiveMinimum": 0
+			},
+			"maximum": {
+				"type": "number"
+			},
+			"exclusiveMaximum": {
+				"type": "number"
+			},
+			"minimum": {
+				"type": "number"
+			},
+			"exclusiveMinimum": {
+				"type": "number"
+			},
+			"maxLength": { "$ref": "#/$defs/nonNegativeInteger" },
+			"minLength": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
+			"pattern": {
+				"type": "string",
+				"format": "regex"
+			},
+			"maxItems": { "$ref": "#/$defs/nonNegativeInteger" },
+			"minItems": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
+			"uniqueItems": {
+				"type": "boolean",
+				"default": false
+			},
+			"maxContains": { "$ref": "#/$defs/nonNegativeInteger" },
+			"minContains": {
+				"$ref": "#/$defs/nonNegativeInteger",
+				"default": 1
+			},
+			"maxProperties": { "$ref": "#/$defs/nonNegativeInteger" },
+			"minProperties": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
+			"required": { "$ref": "#/$defs/stringArray" },
+			"dependentRequired": {
+				"type": "object",
+				"additionalProperties": {
+					"$ref": "#/$defs/stringArray"
+				}
+			},
+			"const": true,
+			"enum": {
+				"type": "array",
+				"items": true
+			},
+			"type": {
+				"anyOf": [
+					{ "$ref": "#/$defs/simpleTypes" },
+					{
+						"type": "array",
+						"items": { "$ref": "#/$defs/simpleTypes" },
+						"minItems": 1,
+						"uniqueItems": true
+					}
+				]
+			}
+		},
+		"$defs": {
+			"nonNegativeInteger": {
+				"type": "integer",
+				"minimum": 0
+			},
+			"nonNegativeIntegerDefault0": {
+				"$ref": "#/$defs/nonNegativeInteger",
+				"default": 0
+			},
+			"simpleTypes": {
+				"enum": [
+					"array",
+					"boolean",
+					"integer",
+					"null",
+					"number",
+					"object",
+					"string"
+				]
+			},
+			"stringArray": {
+				"type": "array",
+				"items": { "type": "string" },
+				"uniqueItems": true,
+				"default": []
+			}
+		}
+	}`,
+	"https://json-schema.org/draft/2019-09/meta/meta-data": `{
+		"$schema": "https://json-schema.org/draft/2019-09/schema",
+		"$id": "https://json-schema.org/draft/2019-09/meta/meta-data",
+		"$vocabulary": {
+			"https://json-schema.org/draft/2019-09/vocab/meta-data": true
+		},
+		"$recursiveAnchor": true,
+
+		"title": "Meta-data vocabulary meta-schema",
+
+		"type": ["object", "boolean"],
+		"properties": {
+			"title": {
+				"type": "string"
+			},
+			"description": {
+				"type": "string"
+			},
+			"default": true,
+			"deprecated": {
+				"type": "boolean",
+				"default": false
+			},
+			"readOnly": {
+				"type": "boolean",
+				"default": false
+			},
+			"writeOnly": {
+				"type": "boolean",
+				"default": false
+			},
+			"examples": {
+				"type": "array",
+				"items": true
+			}
+		}
+	}`,
+	"https://json-schema.org/draft/2019-09/meta/format": `{
+		"$schema": "https://json-schema.org/draft/2019-09/schema",
+		"$id": "https://json-schema.org/draft/2019-09/meta/format",
+		"$vocabulary": {
+			"https://json-schema.org/draft/2019-09/vocab/format": true
+		},
+		"$recursiveAnchor": true,
+
+		"title": "Format vocabulary meta-schema",
+		"type": ["object", "boolean"],
+		"properties": {
+			"format": { "type": "string" }
+		}
+	}`,
+	"https://json-schema.org/draft/2019-09/meta/content": `{
+		"$schema": "https://json-schema.org/draft/2019-09/schema",
+		"$id": "https://json-schema.org/draft/2019-09/meta/content",
+		"$vocabulary": {
+			"https://json-schema.org/draft/2019-09/vocab/content": true
+		},
+		"$recursiveAnchor": true,
+
+		"title": "Content vocabulary meta-schema",
+
+		"type": ["object", "boolean"],
+		"properties": {
+			"contentMediaType": { "type": "string" },
+			"contentEncoding": { "type": "string" },
+			"contentSchema": { "$recursiveRef": "#" }
+		}
+	}`,
+	"https://json-schema.org/draft/2020-12/meta/core": `{
+		"$schema": "https://json-schema.org/draft/2020-12/schema",
+		"$id": "https://json-schema.org/draft/2020-12/meta/core",
+		"$vocabulary": {
+			"https://json-schema.org/draft/2020-12/vocab/core": true
+		},
+		"$dynamicAnchor": "meta",
+
+		"title": "Core vocabulary meta-schema",
+		"type": ["object", "boolean"],
+		"properties": {
+			"$id": {
+				"$ref": "#/$defs/uriReferenceString",
+				"$comment": "Non-empty fragments not allowed.",
+				"pattern": "^[^#]*#?$"
+			},
+			"$schema": { "$ref": "#/$defs/uriString" },
+			"$ref": { "$ref": "#/$defs/uriReferenceString" },
+			"$anchor": { "$ref": "#/$defs/anchorString" },
+			"$dynamicRef": { "$ref": "#/$defs/uriReferenceString" },
+			"$dynamicAnchor": { "$ref": "#/$defs/anchorString" },
+			"$vocabulary": {
+				"type": "object",
+				"propertyNames": { "$ref": "#/$defs/uriString" },
+				"additionalProperties": {
+					"type": "boolean"
+				}
+			},
+			"$comment": {
+				"type": "string"
+			},
+			"$defs": {
+				"type": "object",
+				"additionalProperties": { "$dynamicRef": "#meta" }
+			}
+		},
+		"$defs": {
+			"anchorString": {
+				"type": "string",
+				"pattern": "^[A-Za-z_][-A-Za-z0-9._]*$"
+			},
+			"uriString": {
+				"type": "string",
+				"format": "uri"
+			},
+			"uriReferenceString": {
+				"type": "string",
+				"format": "uri-reference"
+			}
+		}
+	}`,
+	"https://json-schema.org/draft/2020-12/meta/applicator": `{
+		"$schema": "https://json-schema.org/draft/2020-12/schema",
+		"$id": "https://json-schema.org/draft/2020-12/meta/applicator",
+		"$vocabulary": {
+			"https://json-schema.org/draft/2020-12/vocab/applicator": true
+		},
+		"$dynamicAnchor": "meta",
+
+		"title": "Applicator vocabulary meta-schema",
+		"type": ["object", "boolean"],
+		"properties": {
+			"prefixItems": { "$ref": "#/$defs/schemaArray" },
+			"items": { "$dynamicRef": "#meta" },
+			"contains": { "$dynamicRef": "#meta" },
+			"additionalProperties": { "$dynamicRef": "#meta" },
+			"properties": {
+				"type": "object",
+				"additionalProperties": { "$dynamicRef": "#meta" },
+				"default": {}
+			},
+			"patternProperties": {
+				"type": "object",
+				"additionalProperties": { "$dynamicRef": "#meta" },
+				"propertyNames": { "format": "regex" },
+				"default": {}
+			},
+			"dependentSchemas": {
+				"type": "object",
+				"additionalProperties": { "$dynamicRef": "#meta" },
+				"default": {}
+			},
+			"propertyNames": { "$dynamicRef": "#meta" },
+			"if": { "$dynamicRef": "#meta" },
+			"then": { "$dynamicRef": "#meta" },
+			"else": { "$dynamicRef": "#meta" },
+			"allOf": { "$ref": "#/$defs/schemaArray" },
+			"anyOf": { "$ref": "#/$defs/schemaArray" },
+			"oneOf": { "$ref": "#/$defs/schemaArray" },
+			"not": { "$dynamicRef": "#meta" }
+		},
+		"$defs": {
+			"schemaArray": {
+				"type": "array",
+				"minItems": 1,
+				"items": { "$dynamicRef": "#meta" }
+			}
+		}
+	}`,
+	"https://json-schema.org/draft/2020-12/meta/unevaluated": `{
+		"$schema": "https://json-schema.org/draft/2020-12/schema",
+		"$id": "https://json-schema.org/draft/2020-12/meta/unevaluated",
+		"$vocabulary": {
+			"https://json-schema.org/draft/2020-12/vocab/unevaluated": true
+		},
+		"$dynamicAnchor": "meta",
+
+		"title": "Unevaluated applicator vocabulary meta-schema",
+		"type": ["object", "boolean"],
+		"properties": {
+			"unevaluatedItems": { "$dynamicRef": "#meta" },
+			"unevaluatedProperties": { "$dynamicRef": "#meta" }
+		}
+	}`,
+	"https://json-schema.org/draft/2020-12/meta/validation": `{
+		"$schema": "https://json-schema.org/draft/2020-12/schema",
+		"$id": "https://json-schema.org/draft/2020-12/meta/validation",
+		"$vocabulary": {
+			"https://json-schema.org/draft/2020-12/vocab/validation": true
+		},
+		"$dynamicAnchor": "meta",
+
+		"title": "Validation vocabulary meta-schema",
+		"type": ["object", "boolean"],
+		"properties": {
+			"type": {
+				"anyOf": [
+					{ "$ref": "#/$defs/simpleTypes" },
+					{
+						"type": "array",
+						"items": { "$ref": "#/$defs/simpleTypes" },
+						"minItems": 1,
+						"uniqueItems": true
+					}
+				]
+			},
+			"const": true,
+			"enum": {
+				"type": "array",
+				"items": true
+			},
+			"multipleOf": {
+				"type": "number",
+				"exclusiveMinimum": 0
+			},
+			"maximum": {
+				"type": "number"
+			},
+			"exclusiveMaximum": {
+				"type": "number"
+			},
+			"minimum": {
+				"type": "number"
+			},
+			"exclusiveMinimum": {
+				"type": "number"
+			},
+			"maxLength": { "$ref": "#/$defs/nonNegativeInteger" },
+			"minLength": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
+			"pattern": {
+				"type": "string",
+				"format": "regex"
+			},
+			"maxItems": { "$ref": "#/$defs/nonNegativeInteger" },
+			"minItems": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
+			"uniqueItems": {
+				"type": "boolean",
+				"default": false
+			},
+			"maxContains": { "$ref": "#/$defs/nonNegativeInteger" },
+			"minContains": {
+				"$ref": "#/$defs/nonNegativeInteger",
+				"default": 1
+			},
+			"maxProperties": { "$ref": "#/$defs/nonNegativeInteger" },
+			"minProperties": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
+			"required": { "$ref": "#/$defs/stringArray" },
+			"dependentRequired": {
+				"type": "object",
+				"additionalProperties": {
+					"$ref": "#/$defs/stringArray"
+				}
+			}
+		},
+		"$defs": {
+			"nonNegativeInteger": {
+				"type": "integer",
+				"minimum": 0
+			},
+			"nonNegativeIntegerDefault0": {
+				"$ref": "#/$defs/nonNegativeInteger",
+				"default": 0
+			},
+			"simpleTypes": {
+				"enum": [
+					"array",
+					"boolean",
+					"integer",
+					"null",
+					"number",
+					"object",
+					"string"
+				]
+			},
+			"stringArray": {
+				"type": "array",
+				"items": { "type": "string" },
+				"uniqueItems": true,
+				"default": []
+			}
+		}
+	}`,
+	"https://json-schema.org/draft/2020-12/meta/meta-data": `{
+		"$schema": "https://json-schema.org/draft/2020-12/schema",
+		"$id": "https://json-schema.org/draft/2020-12/meta/meta-data",
+		"$vocabulary": {
+			"https://json-schema.org/draft/2020-12/vocab/meta-data": true
+		},
+		"$dynamicAnchor": "meta",
+
+		"title": "Meta-data vocabulary meta-schema",
+
+		"type": ["object", "boolean"],
+		"properties": {
+			"title": {
+				"type": "string"
+			},
+			"description": {
+				"type": "string"
+			},
+			"default": true,
+			"deprecated": {
+				"type": "boolean",
+				"default": false
+			},
+			"readOnly": {
+				"type": "boolean",
+				"default": false
+			},
+			"writeOnly": {
+				"type": "boolean",
+				"default": false
+			},
+			"examples": {
+				"type": "array",
+				"items": true
+			}
+		}
+	}`,
+	"https://json-schema.org/draft/2020-12/meta/format-annotation": `{
+		"$schema": "https://json-schema.org/draft/2020-12/schema",
+		"$id": "https://json-schema.org/draft/2020-12/meta/format-annotation",
+		"$vocabulary": {
+			"https://json-schema.org/draft/2020-12/vocab/format-annotation": true
+		},
+		"$dynamicAnchor": "meta",
+
+		"title": "Format vocabulary meta-schema for annotation results",
+		"type": ["object", "boolean"],
+		"properties": {
+			"format": { "type": "string" }
+		}
+	}`,
+	"https://json-schema.org/draft/2020-12/meta/format-assertion": `{
+		"$schema": "https://json-schema.org/draft/2020-12/schema",
+		"$id": "https://json-schema.org/draft/2020-12/meta/format-assertion",
+		"$vocabulary": {
+			"https://json-schema.org/draft/2020-12/vocab/format-assertion": true
+		},
+		"$dynamicAnchor": "meta",
+
+		"title": "Format vocabulary meta-schema for assertion results",
+		"type": ["object", "boolean"],
+		"properties": {
+			"format": { "type": "string" }
+		}
+	}`,
+	"https://json-schema.org/draft/2020-12/meta/content": `{
+		"$schema": "https://json-schema.org/draft/2020-12/schema",
+		"$id": "https://json-schema.org/draft/2020-12/meta/content",
+		"$vocabulary": {
+			"https://json-schema.org/draft/2020-12/vocab/content": true
+		},
+		"$dynamicAnchor": "meta",
+
+		"title": "Content vocabulary meta-schema",
+
+		"type": ["object", "boolean"],
+		"properties": {
+			"contentEncoding": { "type": "string" },
+			"contentMediaType": { "type": "string" },
+			"contentSchema": { "$dynamicRef": "#meta" }
+		}
+	}`,
+}
+
+func clone(m map[string]position) map[string]position {
+	mm := make(map[string]position)
+	for k, v := range m {
+		mm[k] = v
+	}
+	return mm
+}
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/errors.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/errors.go
new file mode 100644
index 0000000000000000000000000000000000000000..deaded89f7a858b409346e558fb916bd20d0dde3
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/errors.go
@@ -0,0 +1,129 @@
+package jsonschema
+
+import (
+	"fmt"
+	"strings"
+)
+
+// InvalidJSONTypeError is the error type returned by ValidateInterface.
+// this tells that specified go object is not valid jsonType.
+type InvalidJSONTypeError string
+
+func (e InvalidJSONTypeError) Error() string {
+	return fmt.Sprintf("jsonschema: invalid jsonType: %s", string(e))
+}
+
+// InfiniteLoopError is returned by Compile/Validate.
+// this gives url#keywordLocation that lead to infinity loop.
+type InfiniteLoopError string
+
+func (e InfiniteLoopError) Error() string {
+	return "jsonschema: infinite loop " + string(e)
+}
+
+func infiniteLoopError(stack []schemaRef, sref schemaRef) InfiniteLoopError {
+	var path string
+	for _, ref := range stack {
+		if path == "" {
+			path += ref.schema.Location
+		} else {
+			path += "/" + ref.path
+		}
+	}
+	return InfiniteLoopError(path + "/" + sref.path)
+}
+
+// SchemaError is the error type returned by Compile.
+type SchemaError struct {
+	// SchemaURL is the url to json-schema that filed to compile.
+	// This is helpful, if your schema refers to external schemas
+	SchemaURL string
+
+	// Err is the error that occurred during compilation.
+	// It could be ValidationError, because compilation validates
+	// given schema against the json meta-schema
+	Err error
+}
+
+func (se *SchemaError) Unwrap() error {
+	return se.Err
+}
+
+func (se *SchemaError) Error() string {
+	s := fmt.Sprintf("jsonschema %s compilation failed", se.SchemaURL)
+	if se.Err != nil {
+		return fmt.Sprintf("%s: %v", s, strings.TrimPrefix(se.Err.Error(), "jsonschema: "))
+	}
+	return s
+}
+
+func (se *SchemaError) GoString() string {
+	if _, ok := se.Err.(*ValidationError); ok {
+		return fmt.Sprintf("jsonschema %s compilation failed\n%#v", se.SchemaURL, se.Err)
+	}
+	return se.Error()
+}
+
+// ValidationError is the error type returned by Validate.
+type ValidationError struct {
+	KeywordLocation         string             // validation path of validating keyword or schema
+	AbsoluteKeywordLocation string             // absolute location of validating keyword or schema
+	InstanceLocation        string             // location of the json value within the instance being validated
+	Message                 string             // describes error
+	Causes                  []*ValidationError // nested validation errors
+}
+
+func (ve *ValidationError) add(causes ...error) error {
+	for _, cause := range causes {
+		ve.Causes = append(ve.Causes, cause.(*ValidationError))
+	}
+	return ve
+}
+
+func (ve *ValidationError) causes(err error) error {
+	if err := err.(*ValidationError); err.Message == "" {
+		ve.Causes = err.Causes
+	} else {
+		ve.add(err)
+	}
+	return ve
+}
+
+func (ve *ValidationError) Error() string {
+	leaf := ve
+	for len(leaf.Causes) > 0 {
+		leaf = leaf.Causes[0]
+	}
+	u, _ := split(ve.AbsoluteKeywordLocation)
+	return fmt.Sprintf("jsonschema: %s does not validate with %s: %s", quote(leaf.InstanceLocation), u+"#"+leaf.KeywordLocation, leaf.Message)
+}
+
+func (ve *ValidationError) GoString() string {
+	sloc := ve.AbsoluteKeywordLocation
+	sloc = sloc[strings.IndexByte(sloc, '#')+1:]
+	msg := fmt.Sprintf("[I#%s] [S#%s] %s", ve.InstanceLocation, sloc, ve.Message)
+	for _, c := range ve.Causes {
+		for _, line := range strings.Split(c.GoString(), "\n") {
+			msg += "\n  " + line
+		}
+	}
+	return msg
+}
+
+func joinPtr(ptr1, ptr2 string) string {
+	if len(ptr1) == 0 {
+		return ptr2
+	}
+	if len(ptr2) == 0 {
+		return ptr1
+	}
+	return ptr1 + "/" + ptr2
+}
+
+// quote returns single-quoted string
+func quote(s string) string {
+	s = fmt.Sprintf("%q", s)
+	s = strings.ReplaceAll(s, `\"`, `"`)
+	s = strings.ReplaceAll(s, `'`, `\'`)
+	return "'" + s[1:len(s)-1] + "'"
+}
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/extension.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/extension.go
new file mode 100644
index 0000000000000000000000000000000000000000..452ba118c505be66d3e1473991eaa51ff5d0b1a8
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/extension.go
@@ -0,0 +1,116 @@
+package jsonschema
+
+// ExtCompiler compiles custom keyword(s) into ExtSchema.
+type ExtCompiler interface {
+	// Compile compiles the custom keywords in schema m and returns its compiled representation.
+	// if the schema m does not contain the keywords defined by this extension,
+	// compiled representation nil should be returned.
+	Compile(ctx CompilerContext, m map[string]interface{}) (ExtSchema, error)
+}
+
+// ExtSchema is schema representation of custom keyword(s)
+type ExtSchema interface {
+	// Validate validates the json value v with this ExtSchema.
+	// Returned error must be *ValidationError.
+	Validate(ctx ValidationContext, v interface{}) error
+}
+
+type extension struct {
+	meta     *Schema
+	compiler ExtCompiler
+}
+
+// RegisterExtension registers custom keyword(s) into this compiler.
+//
+// name is extension name, used only to avoid name collisions.
+// meta captures the metaschema for the new keywords.
+// This is used to validate the schema before calling ext.Compile.
+func (c *Compiler) RegisterExtension(name string, meta *Schema, ext ExtCompiler) {
+	c.extensions[name] = extension{meta, ext}
+}
+
+// CompilerContext ---
+
+// CompilerContext provides additional context required in compiling for extension.
+type CompilerContext struct {
+	c     *Compiler
+	r     *resource
+	stack []schemaRef
+	res   *resource
+}
+
+// Compile compiles given value at ptr into *Schema. This is useful in implementing
+// keyword like allOf/not/patternProperties.
+//
+// schPath is the relative-json-pointer to the schema to be compiled from parent schema.
+//
+// applicableOnSameInstance tells whether current schema and the given schema
+// are applied on same instance value. this is used to detect infinite loop in schema.
+func (ctx CompilerContext) Compile(schPath string, applicableOnSameInstance bool) (*Schema, error) {
+	var stack []schemaRef
+	if applicableOnSameInstance {
+		stack = ctx.stack
+	}
+	return ctx.c.compileRef(ctx.r, stack, schPath, ctx.res, ctx.r.url+ctx.res.floc+"/"+schPath)
+}
+
+// CompileRef compiles the schema referenced by ref uri
+//
+// refPath is the relative-json-pointer to ref.
+//
+// applicableOnSameInstance tells whether current schema and the given schema
+// are applied on same instance value. this is used to detect infinite loop in schema.
+func (ctx CompilerContext) CompileRef(ref string, refPath string, applicableOnSameInstance bool) (*Schema, error) {
+	var stack []schemaRef
+	if applicableOnSameInstance {
+		stack = ctx.stack
+	}
+	return ctx.c.compileRef(ctx.r, stack, refPath, ctx.res, ref)
+}
+
+// ValidationContext ---
+
+// ValidationContext provides additional context required in validating for extension.
+type ValidationContext struct {
+	result          validationResult
+	validate        func(sch *Schema, schPath string, v interface{}, vpath string) error
+	validateInplace func(sch *Schema, schPath string) error
+	validationError func(keywordPath string, format string, a ...interface{}) *ValidationError
+}
+
+// EvaluatedProp marks given property of object as evaluated.
+func (ctx ValidationContext) EvaluatedProp(prop string) {
+	delete(ctx.result.unevalProps, prop)
+}
+
+// EvaluatedItem marks given index of array as evaluated.
+func (ctx ValidationContext) EvaluatedItem(index int) {
+	delete(ctx.result.unevalItems, index)
+}
+
+// Validate validates schema s with value v. Extension must use this method instead of
+// *Schema.ValidateInterface method. This will be useful in implementing keywords like
+// allOf/oneOf
+//
+// spath is relative-json-pointer to s
+// vpath is relative-json-pointer to v.
+func (ctx ValidationContext) Validate(s *Schema, spath string, v interface{}, vpath string) error {
+	if vpath == "" {
+		return ctx.validateInplace(s, spath)
+	}
+	return ctx.validate(s, spath, v, vpath)
+}
+
+// Error used to construct validation error by extensions.
+//
+// keywordPath is relative-json-pointer to keyword.
+func (ctx ValidationContext) Error(keywordPath string, format string, a ...interface{}) *ValidationError {
+	return ctx.validationError(keywordPath, format, a...)
+}
+
+// Group is used by extensions to group multiple errors as causes to parent error.
+// This is useful in implementing keywords like allOf where each schema specified
+// in allOf can result a validationError.
+func (ValidationError) Group(parent *ValidationError, causes ...error) error {
+	return parent.add(causes...)
+}
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/format.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/format.go
new file mode 100644
index 0000000000000000000000000000000000000000..05686073f0bc44703d1d518a3acb43917d0f42cd
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/format.go
@@ -0,0 +1,567 @@
+package jsonschema
+
+import (
+	"errors"
+	"net"
+	"net/mail"
+	"net/url"
+	"regexp"
+	"strconv"
+	"strings"
+	"time"
+)
+
+// Formats is a registry of functions, which know how to validate
+// a specific format.
+//
+// New Formats can be registered by adding to this map. Key is format name,
+// value is function that knows how to validate that format.
+var Formats = map[string]func(interface{}) bool{
+	"date-time":             isDateTime,
+	"date":                  isDate,
+	"time":                  isTime,
+	"duration":              isDuration,
+	"period":                isPeriod,
+	"hostname":              isHostname,
+	"email":                 isEmail,
+	"ip-address":            isIPV4,
+	"ipv4":                  isIPV4,
+	"ipv6":                  isIPV6,
+	"uri":                   isURI,
+	"iri":                   isURI,
+	"uri-reference":         isURIReference,
+	"uriref":                isURIReference,
+	"iri-reference":         isURIReference,
+	"uri-template":          isURITemplate,
+	"regex":                 isRegex,
+	"json-pointer":          isJSONPointer,
+	"relative-json-pointer": isRelativeJSONPointer,
+	"uuid":                  isUUID,
+}
+
+// isDateTime tells whether given string is a valid date representation
+// as defined by RFC 3339, section 5.6.
+//
+// see https://datatracker.ietf.org/doc/html/rfc3339#section-5.6, for details
+func isDateTime(v interface{}) bool {
+	s, ok := v.(string)
+	if !ok {
+		return true
+	}
+	if len(s) < 20 { // yyyy-mm-ddThh:mm:ssZ
+		return false
+	}
+	if s[10] != 'T' && s[10] != 't' {
+		return false
+	}
+	return isDate(s[:10]) && isTime(s[11:])
+}
+
+// isDate tells whether given string is a valid full-date production
+// as defined by RFC 3339, section 5.6.
+//
+// see https://datatracker.ietf.org/doc/html/rfc3339#section-5.6, for details
+func isDate(v interface{}) bool {
+	s, ok := v.(string)
+	if !ok {
+		return true
+	}
+	_, err := time.Parse("2006-01-02", s)
+	return err == nil
+}
+
+// isTime tells whether given string is a valid full-time production
+// as defined by RFC 3339, section 5.6.
+//
+// see https://datatracker.ietf.org/doc/html/rfc3339#section-5.6, for details
+func isTime(v interface{}) bool {
+	str, ok := v.(string)
+	if !ok {
+		return true
+	}
+
+	// golang time package does not support leap seconds.
+	// so we are parsing it manually here.
+
+	// hh:mm:ss
+	// 01234567
+	if len(str) < 9 || str[2] != ':' || str[5] != ':' {
+		return false
+	}
+	isInRange := func(str string, min, max int) (int, bool) {
+		n, err := strconv.Atoi(str)
+		if err != nil {
+			return 0, false
+		}
+		if n < min || n > max {
+			return 0, false
+		}
+		return n, true
+	}
+	var h, m, s int
+	if h, ok = isInRange(str[0:2], 0, 23); !ok {
+		return false
+	}
+	if m, ok = isInRange(str[3:5], 0, 59); !ok {
+		return false
+	}
+	if s, ok = isInRange(str[6:8], 0, 60); !ok {
+		return false
+	}
+	str = str[8:]
+
+	// parse secfrac if present
+	if str[0] == '.' {
+		// dot following more than one digit
+		str = str[1:]
+		var numDigits int
+		for str != "" {
+			if str[0] < '0' || str[0] > '9' {
+				break
+			}
+			numDigits++
+			str = str[1:]
+		}
+		if numDigits == 0 {
+			return false
+		}
+	}
+
+	if len(str) == 0 {
+		return false
+	}
+
+	if str[0] == 'z' || str[0] == 'Z' {
+		if len(str) != 1 {
+			return false
+		}
+	} else {
+		// time-numoffset
+		// +hh:mm
+		// 012345
+		if len(str) != 6 || str[3] != ':' {
+			return false
+		}
+
+		var sign int
+		if str[0] == '+' {
+			sign = -1
+		} else if str[0] == '-' {
+			sign = +1
+		} else {
+			return false
+		}
+
+		var zh, zm int
+		if zh, ok = isInRange(str[1:3], 0, 23); !ok {
+			return false
+		}
+		if zm, ok = isInRange(str[4:6], 0, 59); !ok {
+			return false
+		}
+
+		// apply timezone offset
+		hm := (h*60 + m) + sign*(zh*60+zm)
+		if hm < 0 {
+			hm += 24 * 60
+		}
+		h, m = hm/60, hm%60
+	}
+
+	// check leapsecond
+	if s == 60 { // leap second
+		if h != 23 || m != 59 {
+			return false
+		}
+	}
+
+	return true
+}
+
+// isDuration tells whether given string is a valid duration format
+// from the ISO 8601 ABNF as given in Appendix A of RFC 3339.
+//
+// see https://datatracker.ietf.org/doc/html/rfc3339#appendix-A, for details
+func isDuration(v interface{}) bool {
+	s, ok := v.(string)
+	if !ok {
+		return true
+	}
+	if len(s) == 0 || s[0] != 'P' {
+		return false
+	}
+	s = s[1:]
+	parseUnits := func() (units string, ok bool) {
+		for len(s) > 0 && s[0] != 'T' {
+			digits := false
+			for {
+				if len(s) == 0 {
+					break
+				}
+				if s[0] < '0' || s[0] > '9' {
+					break
+				}
+				digits = true
+				s = s[1:]
+			}
+			if !digits || len(s) == 0 {
+				return units, false
+			}
+			units += s[:1]
+			s = s[1:]
+		}
+		return units, true
+	}
+	units, ok := parseUnits()
+	if !ok {
+		return false
+	}
+	if units == "W" {
+		return len(s) == 0 // P_W
+	}
+	if len(units) > 0 {
+		if strings.Index("YMD", units) == -1 {
+			return false
+		}
+		if len(s) == 0 {
+			return true // "P" dur-date
+		}
+	}
+	if len(s) == 0 || s[0] != 'T' {
+		return false
+	}
+	s = s[1:]
+	units, ok = parseUnits()
+	return ok && len(s) == 0 && len(units) > 0 && strings.Index("HMS", units) != -1
+}
+
+// isPeriod tells whether given string is a valid period format
+// from the ISO 8601 ABNF as given in Appendix A of RFC 3339.
+//
+// see https://datatracker.ietf.org/doc/html/rfc3339#appendix-A, for details
+func isPeriod(v interface{}) bool {
+	s, ok := v.(string)
+	if !ok {
+		return true
+	}
+	slash := strings.IndexByte(s, '/')
+	if slash == -1 {
+		return false
+	}
+	start, end := s[:slash], s[slash+1:]
+	if isDateTime(start) {
+		return isDateTime(end) || isDuration(end)
+	}
+	return isDuration(start) && isDateTime(end)
+}
+
+// isHostname tells whether given string is a valid representation
+// for an Internet host name, as defined by RFC 1034 section 3.1 and
+// RFC 1123 section 2.1.
+//
+// See https://en.wikipedia.org/wiki/Hostname#Restrictions_on_valid_host_names, for details.
+func isHostname(v interface{}) bool {
+	s, ok := v.(string)
+	if !ok {
+		return true
+	}
+	// entire hostname (including the delimiting dots but not a trailing dot) has a maximum of 253 ASCII characters
+	s = strings.TrimSuffix(s, ".")
+	if len(s) > 253 {
+		return false
+	}
+
+	// Hostnames are composed of series of labels concatenated with dots, as are all domain names
+	for _, label := range strings.Split(s, ".") {
+		// Each label must be from 1 to 63 characters long
+		if labelLen := len(label); labelLen < 1 || labelLen > 63 {
+			return false
+		}
+
+		// labels must not start with a hyphen
+		// RFC 1123 section 2.1: restriction on the first character
+		// is relaxed to allow either a letter or a digit
+		if first := s[0]; first == '-' {
+			return false
+		}
+
+		// must not end with a hyphen
+		if label[len(label)-1] == '-' {
+			return false
+		}
+
+		// labels may contain only the ASCII letters 'a' through 'z' (in a case-insensitive manner),
+		// the digits '0' through '9', and the hyphen ('-')
+		for _, c := range label {
+			if valid := (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || (c == '-'); !valid {
+				return false
+			}
+		}
+	}
+
+	return true
+}
+
+// isEmail tells whether given string is a valid Internet email address
+// as defined by RFC 5322, section 3.4.1.
+//
+// See https://en.wikipedia.org/wiki/Email_address, for details.
+func isEmail(v interface{}) bool {
+	s, ok := v.(string)
+	if !ok {
+		return true
+	}
+	// entire email address to be no more than 254 characters long
+	if len(s) > 254 {
+		return false
+	}
+
+	// email address is generally recognized as having two parts joined with an at-sign
+	at := strings.LastIndexByte(s, '@')
+	if at == -1 {
+		return false
+	}
+	local := s[0:at]
+	domain := s[at+1:]
+
+	// local part may be up to 64 characters long
+	if len(local) > 64 {
+		return false
+	}
+
+	// domain if enclosed in brackets, must match an IP address
+	if len(domain) >= 2 && domain[0] == '[' && domain[len(domain)-1] == ']' {
+		ip := domain[1 : len(domain)-1]
+		if strings.HasPrefix(ip, "IPv6:") {
+			return isIPV6(strings.TrimPrefix(ip, "IPv6:"))
+		}
+		return isIPV4(ip)
+	}
+
+	// domain must match the requirements for a hostname
+	if !isHostname(domain) {
+		return false
+	}
+
+	_, err := mail.ParseAddress(s)
+	return err == nil
+}
+
+// isIPV4 tells whether given string is a valid representation of an IPv4 address
+// according to the "dotted-quad" ABNF syntax as defined in RFC 2673, section 3.2.
+func isIPV4(v interface{}) bool {
+	s, ok := v.(string)
+	if !ok {
+		return true
+	}
+	groups := strings.Split(s, ".")
+	if len(groups) != 4 {
+		return false
+	}
+	for _, group := range groups {
+		n, err := strconv.Atoi(group)
+		if err != nil {
+			return false
+		}
+		if n < 0 || n > 255 {
+			return false
+		}
+		if n != 0 && group[0] == '0' {
+			return false // leading zeroes should be rejected, as they are treated as octals
+		}
+	}
+	return true
+}
+
+// isIPV6 tells whether given string is a valid representation of an IPv6 address
+// as defined in RFC 2373, section 2.2.
+func isIPV6(v interface{}) bool {
+	s, ok := v.(string)
+	if !ok {
+		return true
+	}
+	if !strings.Contains(s, ":") {
+		return false
+	}
+	return net.ParseIP(s) != nil
+}
+
+// isURI tells whether given string is valid URI, according to RFC 3986.
+func isURI(v interface{}) bool {
+	s, ok := v.(string)
+	if !ok {
+		return true
+	}
+	u, err := urlParse(s)
+	return err == nil && u.IsAbs()
+}
+
+func urlParse(s string) (*url.URL, error) {
+	u, err := url.Parse(s)
+	if err != nil {
+		return nil, err
+	}
+
+	// if hostname is ipv6, validate it
+	hostname := u.Hostname()
+	if strings.IndexByte(hostname, ':') != -1 {
+		if strings.IndexByte(u.Host, '[') == -1 || strings.IndexByte(u.Host, ']') == -1 {
+			return nil, errors.New("ipv6 address is not enclosed in brackets")
+		}
+		if !isIPV6(hostname) {
+			return nil, errors.New("invalid ipv6 address")
+		}
+	}
+	return u, nil
+}
+
+// isURIReference tells whether given string is a valid URI Reference
+// (either a URI or a relative-reference), according to RFC 3986.
+func isURIReference(v interface{}) bool {
+	s, ok := v.(string)
+	if !ok {
+		return true
+	}
+	_, err := urlParse(s)
+	return err == nil && !strings.Contains(s, `\`)
+}
+
+// isURITemplate tells whether given string is a valid URI Template
+// according to RFC6570.
+//
+// Current implementation does minimal validation.
+func isURITemplate(v interface{}) bool {
+	s, ok := v.(string)
+	if !ok {
+		return true
+	}
+	u, err := urlParse(s)
+	if err != nil {
+		return false
+	}
+	for _, item := range strings.Split(u.RawPath, "/") {
+		depth := 0
+		for _, ch := range item {
+			switch ch {
+			case '{':
+				depth++
+				if depth != 1 {
+					return false
+				}
+			case '}':
+				depth--
+				if depth != 0 {
+					return false
+				}
+			}
+		}
+		if depth != 0 {
+			return false
+		}
+	}
+	return true
+}
+
+// isRegex tells whether given string is a valid regular expression,
+// according to the ECMA 262 regular expression dialect.
+//
+// The implementation uses go-lang regexp package.
+func isRegex(v interface{}) bool {
+	s, ok := v.(string)
+	if !ok {
+		return true
+	}
+	_, err := regexp.Compile(s)
+	return err == nil
+}
+
+// isJSONPointer tells whether given string is a valid JSON Pointer.
+//
+// Note: It returns false for JSON Pointer URI fragments.
+func isJSONPointer(v interface{}) bool {
+	s, ok := v.(string)
+	if !ok {
+		return true
+	}
+	if s != "" && !strings.HasPrefix(s, "/") {
+		return false
+	}
+	for _, item := range strings.Split(s, "/") {
+		for i := 0; i < len(item); i++ {
+			if item[i] == '~' {
+				if i == len(item)-1 {
+					return false
+				}
+				switch item[i+1] {
+				case '0', '1':
+					// valid
+				default:
+					return false
+				}
+			}
+		}
+	}
+	return true
+}
+
+// isRelativeJSONPointer tells whether given string is a valid Relative JSON Pointer.
+//
+// see https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3
+func isRelativeJSONPointer(v interface{}) bool {
+	s, ok := v.(string)
+	if !ok {
+		return true
+	}
+	if s == "" {
+		return false
+	}
+	if s[0] == '0' {
+		s = s[1:]
+	} else if s[0] >= '0' && s[0] <= '9' {
+		for s != "" && s[0] >= '0' && s[0] <= '9' {
+			s = s[1:]
+		}
+	} else {
+		return false
+	}
+	return s == "#" || isJSONPointer(s)
+}
+
+// isUUID tells whether given string is a valid uuid format
+// as specified in RFC4122.
+//
+// see https://datatracker.ietf.org/doc/html/rfc4122#page-4, for details
+func isUUID(v interface{}) bool {
+	s, ok := v.(string)
+	if !ok {
+		return true
+	}
+	parseHex := func(n int) bool {
+		for n > 0 {
+			if len(s) == 0 {
+				return false
+			}
+			hex := (s[0] >= '0' && s[0] <= '9') || (s[0] >= 'a' && s[0] <= 'f') || (s[0] >= 'A' && s[0] <= 'F')
+			if !hex {
+				return false
+			}
+			s = s[1:]
+			n--
+		}
+		return true
+	}
+	groups := []int{8, 4, 4, 4, 12}
+	for i, numDigits := range groups {
+		if !parseHex(numDigits) {
+			return false
+		}
+		if i == len(groups)-1 {
+			break
+		}
+		if len(s) == 0 || s[0] != '-' {
+			return false
+		}
+		s = s[1:]
+	}
+	return len(s) == 0
+}
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/httploader/httploader.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/httploader/httploader.go
new file mode 100644
index 0000000000000000000000000000000000000000..4198cfe37c3a7d09816e440873ca5e7193bdf81b
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/httploader/httploader.go
@@ -0,0 +1,38 @@
+// Package httploader implements loader.Loader for http/https url.
+//
+// The package is typically only imported for the side effect of
+// registering its Loaders.
+//
+// To use httploader, link this package into your program:
+//
+//	import _ "github.com/santhosh-tekuri/jsonschema/v5/httploader"
+package httploader
+
+import (
+	"fmt"
+	"io"
+	"net/http"
+
+	"github.com/santhosh-tekuri/jsonschema/v5"
+)
+
+// Client is the default HTTP Client used to Get the resource.
+var Client = http.DefaultClient
+
+// Load loads resource from given http(s) url.
+func Load(url string) (io.ReadCloser, error) {
+	resp, err := Client.Get(url)
+	if err != nil {
+		return nil, err
+	}
+	if resp.StatusCode != http.StatusOK {
+		_ = resp.Body.Close()
+		return nil, fmt.Errorf("%s returned status code %d", url, resp.StatusCode)
+	}
+	return resp.Body, nil
+}
+
+func init() {
+	jsonschema.Loaders["http"] = Load
+	jsonschema.Loaders["https"] = Load
+}
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/loader.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/loader.go
new file mode 100644
index 0000000000000000000000000000000000000000..c94195c335cc6f86f02e0ed2dfc7f5090cd62701
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/loader.go
@@ -0,0 +1,60 @@
+package jsonschema
+
+import (
+	"fmt"
+	"io"
+	"net/url"
+	"os"
+	"path/filepath"
+	"runtime"
+	"strings"
+)
+
+func loadFileURL(s string) (io.ReadCloser, error) {
+	u, err := url.Parse(s)
+	if err != nil {
+		return nil, err
+	}
+	f := u.Path
+	if runtime.GOOS == "windows" {
+		f = strings.TrimPrefix(f, "/")
+		f = filepath.FromSlash(f)
+	}
+	return os.Open(f)
+}
+
+// Loaders is a registry of functions, which know how to load
+// absolute url of specific schema.
+//
+// New loaders can be registered by adding to this map. Key is schema,
+// value is function that knows how to load url of that schema
+var Loaders = map[string]func(url string) (io.ReadCloser, error){
+	"file": loadFileURL,
+}
+
+// LoaderNotFoundError is the error type returned by Load function.
+// It tells that no Loader is registered for that URL Scheme.
+type LoaderNotFoundError string
+
+func (e LoaderNotFoundError) Error() string {
+	return fmt.Sprintf("jsonschema: no Loader found for %s", string(e))
+}
+
+// LoadURL loads document at given absolute URL. The default implementation
+// uses Loaders registry to lookup by schema and uses that loader.
+//
+// Users can change this variable, if they would like to take complete
+// responsibility of loading given URL. Used by Compiler if its LoadURL
+// field is nil.
+var LoadURL = func(s string) (io.ReadCloser, error) {
+	u, err := url.Parse(s)
+	if err != nil {
+		return nil, err
+	}
+	loader, ok := Loaders[u.Scheme]
+	if !ok {
+		return nil, LoaderNotFoundError(s)
+
+	}
+	return loader(s)
+}
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/output.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/output.go
new file mode 100644
index 0000000000000000000000000000000000000000..d65ae2a9295907e429bdb80d20e616e60fcff517
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/output.go
@@ -0,0 +1,77 @@
+package jsonschema
+
+// Flag is output format with simple boolean property valid.
+type Flag struct {
+	Valid bool `json:"valid"`
+}
+
+// FlagOutput returns output in flag format
+func (ve *ValidationError) FlagOutput() Flag {
+	return Flag{}
+}
+
+// Basic ---
+
+// Basic is output format with flat list of output units.
+type Basic struct {
+	Valid  bool         `json:"valid"`
+	Errors []BasicError `json:"errors"`
+}
+
+// BasicError is output unit in basic format.
+type BasicError struct {
+	KeywordLocation         string `json:"keywordLocation"`
+	AbsoluteKeywordLocation string `json:"absoluteKeywordLocation"`
+	InstanceLocation        string `json:"instanceLocation"`
+	Error                   string `json:"error"`
+}
+
+// BasicOutput returns output in basic format
+func (ve *ValidationError) BasicOutput() Basic {
+	var errors []BasicError
+	var flatten func(*ValidationError)
+	flatten = func(ve *ValidationError) {
+		errors = append(errors, BasicError{
+			KeywordLocation:         ve.KeywordLocation,
+			AbsoluteKeywordLocation: ve.AbsoluteKeywordLocation,
+			InstanceLocation:        ve.InstanceLocation,
+			Error:                   ve.Message,
+		})
+		for _, cause := range ve.Causes {
+			flatten(cause)
+		}
+	}
+	flatten(ve)
+	return Basic{Errors: errors}
+}
+
+// Detailed ---
+
+// Detailed is output format based on structure of schema.
+type Detailed struct {
+	Valid                   bool       `json:"valid"`
+	KeywordLocation         string     `json:"keywordLocation"`
+	AbsoluteKeywordLocation string     `json:"absoluteKeywordLocation"`
+	InstanceLocation        string     `json:"instanceLocation"`
+	Error                   string     `json:"error,omitempty"`
+	Errors                  []Detailed `json:"errors,omitempty"`
+}
+
+// DetailedOutput returns output in detailed format
+func (ve *ValidationError) DetailedOutput() Detailed {
+	var errors []Detailed
+	for _, cause := range ve.Causes {
+		errors = append(errors, cause.DetailedOutput())
+	}
+	var message = ve.Message
+	if len(ve.Causes) > 0 {
+		message = ""
+	}
+	return Detailed{
+		KeywordLocation:         ve.KeywordLocation,
+		AbsoluteKeywordLocation: ve.AbsoluteKeywordLocation,
+		InstanceLocation:        ve.InstanceLocation,
+		Error:                   message,
+		Errors:                  errors,
+	}
+}
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/resource.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/resource.go
new file mode 100644
index 0000000000000000000000000000000000000000..18349daac73082860204fde7f6a24d5c391d9bd6
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/resource.go
@@ -0,0 +1,280 @@
+package jsonschema
+
+import (
+	"encoding/json"
+	"fmt"
+	"io"
+	"net/url"
+	"path/filepath"
+	"runtime"
+	"strconv"
+	"strings"
+)
+
+type resource struct {
+	url          string // base url of resource. can be empty
+	floc         string // fragment with json-pointer from root resource
+	doc          interface{}
+	draft        *Draft
+	subresources map[string]*resource // key is floc. only applicable for root resource
+	schema       *Schema
+}
+
+func (r *resource) String() string {
+	return r.url + r.floc
+}
+
+func newResource(url string, r io.Reader) (*resource, error) {
+	if strings.IndexByte(url, '#') != -1 {
+		panic(fmt.Sprintf("BUG: newResource(%q)", url))
+	}
+	doc, err := unmarshal(r)
+	if err != nil {
+		return nil, fmt.Errorf("jsonschema: invalid json %s: %v", url, err)
+	}
+	url, err = toAbs(url)
+	if err != nil {
+		return nil, err
+	}
+	return &resource{
+		url:  url,
+		floc: "#",
+		doc:  doc,
+	}, nil
+}
+
+// fillSubschemas fills subschemas in res into r.subresources
+func (r *resource) fillSubschemas(c *Compiler, res *resource) error {
+	if err := c.validateSchema(r, res.doc, res.floc[1:]); err != nil {
+		return err
+	}
+
+	if r.subresources == nil {
+		r.subresources = make(map[string]*resource)
+	}
+	if err := r.draft.listSubschemas(res, r.baseURL(res.floc), r.subresources); err != nil {
+		return err
+	}
+
+	// ensure subresource.url uniqueness
+	url2floc := make(map[string]string)
+	for _, sr := range r.subresources {
+		if sr.url != "" {
+			if floc, ok := url2floc[sr.url]; ok {
+				return fmt.Errorf("jsonschema: %q and %q in %s have same canonical-uri", floc[1:], sr.floc[1:], r.url)
+			}
+			url2floc[sr.url] = sr.floc
+		}
+	}
+
+	return nil
+}
+
+// listResources lists all subresources in res
+func (r *resource) listResources(res *resource) []*resource {
+	var result []*resource
+	prefix := res.floc + "/"
+	for _, sr := range r.subresources {
+		if strings.HasPrefix(sr.floc, prefix) {
+			result = append(result, sr)
+		}
+	}
+	return result
+}
+
+func (r *resource) findResource(url string) *resource {
+	if r.url == url {
+		return r
+	}
+	for _, res := range r.subresources {
+		if res.url == url {
+			return res
+		}
+	}
+	return nil
+}
+
+// resolve fragment f with sr as base
+func (r *resource) resolveFragment(c *Compiler, sr *resource, f string) (*resource, error) {
+	if f == "#" || f == "#/" {
+		return sr, nil
+	}
+
+	// resolve by anchor
+	if !strings.HasPrefix(f, "#/") {
+		// check in given resource
+		for _, anchor := range r.draft.anchors(sr.doc) {
+			if anchor == f[1:] {
+				return sr, nil
+			}
+		}
+
+		// check in subresources that has same base url
+		prefix := sr.floc + "/"
+		for _, res := range r.subresources {
+			if strings.HasPrefix(res.floc, prefix) && r.baseURL(res.floc) == sr.url {
+				for _, anchor := range r.draft.anchors(res.doc) {
+					if anchor == f[1:] {
+						return res, nil
+					}
+				}
+			}
+		}
+		return nil, nil
+	}
+
+	// resolve by ptr
+	floc := sr.floc + f[1:]
+	if res, ok := r.subresources[floc]; ok {
+		return res, nil
+	}
+
+	// non-standrad location
+	doc := r.doc
+	for _, item := range strings.Split(floc[2:], "/") {
+		item = strings.Replace(item, "~1", "/", -1)
+		item = strings.Replace(item, "~0", "~", -1)
+		item, err := url.PathUnescape(item)
+		if err != nil {
+			return nil, err
+		}
+		switch d := doc.(type) {
+		case map[string]interface{}:
+			if _, ok := d[item]; !ok {
+				return nil, nil
+			}
+			doc = d[item]
+		case []interface{}:
+			index, err := strconv.Atoi(item)
+			if err != nil {
+				return nil, err
+			}
+			if index < 0 || index >= len(d) {
+				return nil, nil
+			}
+			doc = d[index]
+		default:
+			return nil, nil
+		}
+	}
+
+	id, err := r.draft.resolveID(r.baseURL(floc), doc)
+	if err != nil {
+		return nil, err
+	}
+	res := &resource{url: id, floc: floc, doc: doc}
+	r.subresources[floc] = res
+	if err := r.fillSubschemas(c, res); err != nil {
+		return nil, err
+	}
+	return res, nil
+}
+
+func (r *resource) baseURL(floc string) string {
+	for {
+		if sr, ok := r.subresources[floc]; ok {
+			if sr.url != "" {
+				return sr.url
+			}
+		}
+		slash := strings.LastIndexByte(floc, '/')
+		if slash == -1 {
+			break
+		}
+		floc = floc[:slash]
+	}
+	return r.url
+}
+
+// url helpers ---
+
+func toAbs(s string) (string, error) {
+	// if windows absolute file path, convert to file url
+	// because: net/url parses driver name as scheme
+	if runtime.GOOS == "windows" && len(s) >= 3 && s[1:3] == `:\` {
+		s = "file:///" + filepath.ToSlash(s)
+	}
+
+	u, err := url.Parse(s)
+	if err != nil {
+		return "", err
+	}
+	if u.IsAbs() {
+		return s, nil
+	}
+
+	// s is filepath
+	if s, err = filepath.Abs(s); err != nil {
+		return "", err
+	}
+	if runtime.GOOS == "windows" {
+		s = "file:///" + filepath.ToSlash(s)
+	} else {
+		s = "file://" + s
+	}
+	u, err = url.Parse(s) // to fix spaces in filepath
+	return u.String(), err
+}
+
+func resolveURL(base, ref string) (string, error) {
+	if ref == "" {
+		return base, nil
+	}
+	if strings.HasPrefix(ref, "urn:") {
+		return ref, nil
+	}
+
+	refURL, err := url.Parse(ref)
+	if err != nil {
+		return "", err
+	}
+	if refURL.IsAbs() {
+		return ref, nil
+	}
+
+	if strings.HasPrefix(base, "urn:") {
+		base, _ = split(base)
+		return base + ref, nil
+	}
+
+	baseURL, err := url.Parse(base)
+	if err != nil {
+		return "", err
+	}
+	return baseURL.ResolveReference(refURL).String(), nil
+}
+
+func split(uri string) (string, string) {
+	hash := strings.IndexByte(uri, '#')
+	if hash == -1 {
+		return uri, "#"
+	}
+	f := uri[hash:]
+	if f == "#/" {
+		f = "#"
+	}
+	return uri[0:hash], f
+}
+
+func (s *Schema) url() string {
+	u, _ := split(s.Location)
+	return u
+}
+
+func (s *Schema) loc() string {
+	_, f := split(s.Location)
+	return f[1:]
+}
+
+func unmarshal(r io.Reader) (interface{}, error) {
+	decoder := json.NewDecoder(r)
+	decoder.UseNumber()
+	var doc interface{}
+	if err := decoder.Decode(&doc); err != nil {
+		return nil, err
+	}
+	if t, _ := decoder.Token(); t != nil {
+		return nil, fmt.Errorf("invalid character %v after top-level value", t)
+	}
+	return doc, nil
+}
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/schema.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/schema.go
new file mode 100644
index 0000000000000000000000000000000000000000..688f0a6fee941bb078538bb11946af6b1247d6eb
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/schema.go
@@ -0,0 +1,900 @@
+package jsonschema
+
+import (
+	"bytes"
+	"encoding/json"
+	"fmt"
+	"hash/maphash"
+	"math/big"
+	"net/url"
+	"regexp"
+	"sort"
+	"strconv"
+	"strings"
+	"unicode/utf8"
+)
+
+// A Schema represents compiled version of json-schema.
+type Schema struct {
+	Location string // absolute location
+
+	Draft          *Draft // draft used by schema.
+	meta           *Schema
+	vocab          []string
+	dynamicAnchors []*Schema
+
+	// type agnostic validations
+	Format           string
+	format           func(interface{}) bool
+	Always           *bool // always pass/fail. used when booleans are used as schemas in draft-07.
+	Ref              *Schema
+	RecursiveAnchor  bool
+	RecursiveRef     *Schema
+	DynamicAnchor    string
+	DynamicRef       *Schema
+	dynamicRefAnchor string
+	Types            []string      // allowed types.
+	Constant         []interface{} // first element in slice is constant value. note: slice is used to capture nil constant.
+	Enum             []interface{} // allowed values.
+	enumError        string        // error message for enum fail. captured here to avoid constructing error message every time.
+	Not              *Schema
+	AllOf            []*Schema
+	AnyOf            []*Schema
+	OneOf            []*Schema
+	If               *Schema
+	Then             *Schema // nil, when If is nil.
+	Else             *Schema // nil, when If is nil.
+
+	// object validations
+	MinProperties         int      // -1 if not specified.
+	MaxProperties         int      // -1 if not specified.
+	Required              []string // list of required properties.
+	Properties            map[string]*Schema
+	PropertyNames         *Schema
+	RegexProperties       bool // property names must be valid regex. used only in draft4 as workaround in metaschema.
+	PatternProperties     map[*regexp.Regexp]*Schema
+	AdditionalProperties  interface{}            // nil or bool or *Schema.
+	Dependencies          map[string]interface{} // map value is *Schema or []string.
+	DependentRequired     map[string][]string
+	DependentSchemas      map[string]*Schema
+	UnevaluatedProperties *Schema
+
+	// array validations
+	MinItems         int // -1 if not specified.
+	MaxItems         int // -1 if not specified.
+	UniqueItems      bool
+	Items            interface{} // nil or *Schema or []*Schema
+	AdditionalItems  interface{} // nil or bool or *Schema.
+	PrefixItems      []*Schema
+	Items2020        *Schema // items keyword reintroduced in draft 2020-12
+	Contains         *Schema
+	ContainsEval     bool // whether any item in an array that passes validation of the contains schema is considered "evaluated"
+	MinContains      int  // 1 if not specified
+	MaxContains      int  // -1 if not specified
+	UnevaluatedItems *Schema
+
+	// string validations
+	MinLength        int // -1 if not specified.
+	MaxLength        int // -1 if not specified.
+	Pattern          *regexp.Regexp
+	ContentEncoding  string
+	decoder          func(string) ([]byte, error)
+	ContentMediaType string
+	mediaType        func([]byte) error
+	ContentSchema    *Schema
+
+	// number validators
+	Minimum          *big.Rat
+	ExclusiveMinimum *big.Rat
+	Maximum          *big.Rat
+	ExclusiveMaximum *big.Rat
+	MultipleOf       *big.Rat
+
+	// annotations. captured only when Compiler.ExtractAnnotations is true.
+	Title       string
+	Description string
+	Default     interface{}
+	Comment     string
+	ReadOnly    bool
+	WriteOnly   bool
+	Examples    []interface{}
+	Deprecated  bool
+
+	// user defined extensions
+	Extensions map[string]ExtSchema
+}
+
+func (s *Schema) String() string {
+	return s.Location
+}
+
+func newSchema(url, floc string, draft *Draft, doc interface{}) *Schema {
+	// fill with default values
+	s := &Schema{
+		Location:      url + floc,
+		Draft:         draft,
+		MinProperties: -1,
+		MaxProperties: -1,
+		MinItems:      -1,
+		MaxItems:      -1,
+		MinContains:   1,
+		MaxContains:   -1,
+		MinLength:     -1,
+		MaxLength:     -1,
+	}
+
+	if doc, ok := doc.(map[string]interface{}); ok {
+		if ra, ok := doc["$recursiveAnchor"]; ok {
+			if ra, ok := ra.(bool); ok {
+				s.RecursiveAnchor = ra
+			}
+		}
+		if da, ok := doc["$dynamicAnchor"]; ok {
+			if da, ok := da.(string); ok {
+				s.DynamicAnchor = da
+			}
+		}
+	}
+	return s
+}
+
+func (s *Schema) hasVocab(name string) bool {
+	if s == nil { // during bootstrap
+		return true
+	}
+	if name == "core" {
+		return true
+	}
+	for _, url := range s.vocab {
+		if url == "https://json-schema.org/draft/2019-09/vocab/"+name {
+			return true
+		}
+		if url == "https://json-schema.org/draft/2020-12/vocab/"+name {
+			return true
+		}
+	}
+	return false
+}
+
+// Validate validates given doc, against the json-schema s.
+//
+// the v must be the raw json value. for number precision
+// unmarshal with json.UseNumber().
+//
+// returns *ValidationError if v does not confirm with schema s.
+// returns InfiniteLoopError if it detects loop during validation.
+// returns InvalidJSONTypeError if it detects any non json value in v.
+func (s *Schema) Validate(v interface{}) (err error) {
+	return s.validateValue(v, "")
+}
+
+func (s *Schema) validateValue(v interface{}, vloc string) (err error) {
+	defer func() {
+		if r := recover(); r != nil {
+			switch r := r.(type) {
+			case InfiniteLoopError, InvalidJSONTypeError:
+				err = r.(error)
+			default:
+				panic(r)
+			}
+		}
+	}()
+	if _, err := s.validate(nil, 0, "", v, vloc); err != nil {
+		ve := ValidationError{
+			KeywordLocation:         "",
+			AbsoluteKeywordLocation: s.Location,
+			InstanceLocation:        vloc,
+			Message:                 fmt.Sprintf("doesn't validate with %s", s.Location),
+		}
+		return ve.causes(err)
+	}
+	return nil
+}
+
+// validate validates given value v with this schema.
+func (s *Schema) validate(scope []schemaRef, vscope int, spath string, v interface{}, vloc string) (result validationResult, err error) {
+	validationError := func(keywordPath string, format string, a ...interface{}) *ValidationError {
+		return &ValidationError{
+			KeywordLocation:         keywordLocation(scope, keywordPath),
+			AbsoluteKeywordLocation: joinPtr(s.Location, keywordPath),
+			InstanceLocation:        vloc,
+			Message:                 fmt.Sprintf(format, a...),
+		}
+	}
+
+	sref := schemaRef{spath, s, false}
+	if err := checkLoop(scope[len(scope)-vscope:], sref); err != nil {
+		panic(err)
+	}
+	scope = append(scope, sref)
+	vscope++
+
+	// populate result
+	switch v := v.(type) {
+	case map[string]interface{}:
+		result.unevalProps = make(map[string]struct{})
+		for pname := range v {
+			result.unevalProps[pname] = struct{}{}
+		}
+	case []interface{}:
+		result.unevalItems = make(map[int]struct{})
+		for i := range v {
+			result.unevalItems[i] = struct{}{}
+		}
+	}
+
+	validate := func(sch *Schema, schPath string, v interface{}, vpath string) error {
+		vloc := vloc
+		if vpath != "" {
+			vloc += "/" + vpath
+		}
+		_, err := sch.validate(scope, 0, schPath, v, vloc)
+		return err
+	}
+
+	validateInplace := func(sch *Schema, schPath string) error {
+		vr, err := sch.validate(scope, vscope, schPath, v, vloc)
+		if err == nil {
+			// update result
+			for pname := range result.unevalProps {
+				if _, ok := vr.unevalProps[pname]; !ok {
+					delete(result.unevalProps, pname)
+				}
+			}
+			for i := range result.unevalItems {
+				if _, ok := vr.unevalItems[i]; !ok {
+					delete(result.unevalItems, i)
+				}
+			}
+		}
+		return err
+	}
+
+	if s.Always != nil {
+		if !*s.Always {
+			return result, validationError("", "not allowed")
+		}
+		return result, nil
+	}
+
+	if len(s.Types) > 0 {
+		vType := jsonType(v)
+		matched := false
+		for _, t := range s.Types {
+			if vType == t {
+				matched = true
+				break
+			} else if t == "integer" && vType == "number" {
+				num, _ := new(big.Rat).SetString(fmt.Sprint(v))
+				if num.IsInt() {
+					matched = true
+					break
+				}
+			}
+		}
+		if !matched {
+			return result, validationError("type", "expected %s, but got %s", strings.Join(s.Types, " or "), vType)
+		}
+	}
+
+	var errors []error
+
+	if len(s.Constant) > 0 {
+		if !equals(v, s.Constant[0]) {
+			switch jsonType(s.Constant[0]) {
+			case "object", "array":
+				errors = append(errors, validationError("const", "const failed"))
+			default:
+				errors = append(errors, validationError("const", "value must be %#v", s.Constant[0]))
+			}
+		}
+	}
+
+	if len(s.Enum) > 0 {
+		matched := false
+		for _, item := range s.Enum {
+			if equals(v, item) {
+				matched = true
+				break
+			}
+		}
+		if !matched {
+			errors = append(errors, validationError("enum", s.enumError))
+		}
+	}
+
+	if s.format != nil && !s.format(v) {
+		var val = v
+		if v, ok := v.(string); ok {
+			val = quote(v)
+		}
+		errors = append(errors, validationError("format", "%v is not valid %s", val, quote(s.Format)))
+	}
+
+	switch v := v.(type) {
+	case map[string]interface{}:
+		if s.MinProperties != -1 && len(v) < s.MinProperties {
+			errors = append(errors, validationError("minProperties", "minimum %d properties allowed, but found %d properties", s.MinProperties, len(v)))
+		}
+		if s.MaxProperties != -1 && len(v) > s.MaxProperties {
+			errors = append(errors, validationError("maxProperties", "maximum %d properties allowed, but found %d properties", s.MaxProperties, len(v)))
+		}
+		if len(s.Required) > 0 {
+			var missing []string
+			for _, pname := range s.Required {
+				if _, ok := v[pname]; !ok {
+					missing = append(missing, quote(pname))
+				}
+			}
+			if len(missing) > 0 {
+				errors = append(errors, validationError("required", "missing properties: %s", strings.Join(missing, ", ")))
+			}
+		}
+
+		for pname, sch := range s.Properties {
+			if pvalue, ok := v[pname]; ok {
+				delete(result.unevalProps, pname)
+				if err := validate(sch, "properties/"+escape(pname), pvalue, escape(pname)); err != nil {
+					errors = append(errors, err)
+				}
+			}
+		}
+
+		if s.PropertyNames != nil {
+			for pname := range v {
+				if err := validate(s.PropertyNames, "propertyNames", pname, escape(pname)); err != nil {
+					errors = append(errors, err)
+				}
+			}
+		}
+
+		if s.RegexProperties {
+			for pname := range v {
+				if !isRegex(pname) {
+					errors = append(errors, validationError("", "patternProperty %s is not valid regex", quote(pname)))
+				}
+			}
+		}
+		for pattern, sch := range s.PatternProperties {
+			for pname, pvalue := range v {
+				if pattern.MatchString(pname) {
+					delete(result.unevalProps, pname)
+					if err := validate(sch, "patternProperties/"+escape(pattern.String()), pvalue, escape(pname)); err != nil {
+						errors = append(errors, err)
+					}
+				}
+			}
+		}
+		if s.AdditionalProperties != nil {
+			if allowed, ok := s.AdditionalProperties.(bool); ok {
+				if !allowed && len(result.unevalProps) > 0 {
+					errors = append(errors, validationError("additionalProperties", "additionalProperties %s not allowed", result.unevalPnames()))
+				}
+			} else {
+				schema := s.AdditionalProperties.(*Schema)
+				for pname := range result.unevalProps {
+					if pvalue, ok := v[pname]; ok {
+						if err := validate(schema, "additionalProperties", pvalue, escape(pname)); err != nil {
+							errors = append(errors, err)
+						}
+					}
+				}
+			}
+			result.unevalProps = nil
+		}
+		for dname, dvalue := range s.Dependencies {
+			if _, ok := v[dname]; ok {
+				switch dvalue := dvalue.(type) {
+				case *Schema:
+					if err := validateInplace(dvalue, "dependencies/"+escape(dname)); err != nil {
+						errors = append(errors, err)
+					}
+				case []string:
+					for i, pname := range dvalue {
+						if _, ok := v[pname]; !ok {
+							errors = append(errors, validationError("dependencies/"+escape(dname)+"/"+strconv.Itoa(i), "property %s is required, if %s property exists", quote(pname), quote(dname)))
+						}
+					}
+				}
+			}
+		}
+		for dname, dvalue := range s.DependentRequired {
+			if _, ok := v[dname]; ok {
+				for i, pname := range dvalue {
+					if _, ok := v[pname]; !ok {
+						errors = append(errors, validationError("dependentRequired/"+escape(dname)+"/"+strconv.Itoa(i), "property %s is required, if %s property exists", quote(pname), quote(dname)))
+					}
+				}
+			}
+		}
+		for dname, sch := range s.DependentSchemas {
+			if _, ok := v[dname]; ok {
+				if err := validateInplace(sch, "dependentSchemas/"+escape(dname)); err != nil {
+					errors = append(errors, err)
+				}
+			}
+		}
+
+	case []interface{}:
+		if s.MinItems != -1 && len(v) < s.MinItems {
+			errors = append(errors, validationError("minItems", "minimum %d items required, but found %d items", s.MinItems, len(v)))
+		}
+		if s.MaxItems != -1 && len(v) > s.MaxItems {
+			errors = append(errors, validationError("maxItems", "maximum %d items required, but found %d items", s.MaxItems, len(v)))
+		}
+		if s.UniqueItems {
+			if len(v) <= 20 {
+			outer1:
+				for i := 1; i < len(v); i++ {
+					for j := 0; j < i; j++ {
+						if equals(v[i], v[j]) {
+							errors = append(errors, validationError("uniqueItems", "items at index %d and %d are equal", j, i))
+							break outer1
+						}
+					}
+				}
+			} else {
+				m := make(map[uint64][]int)
+				var h maphash.Hash
+			outer2:
+				for i, item := range v {
+					h.Reset()
+					hash(item, &h)
+					k := h.Sum64()
+					if err != nil {
+						panic(err)
+					}
+					arr, ok := m[k]
+					if ok {
+						for _, j := range arr {
+							if equals(v[j], item) {
+								errors = append(errors, validationError("uniqueItems", "items at index %d and %d are equal", j, i))
+								break outer2
+							}
+						}
+					}
+					arr = append(arr, i)
+					m[k] = arr
+				}
+			}
+		}
+
+		// items + additionalItems
+		switch items := s.Items.(type) {
+		case *Schema:
+			for i, item := range v {
+				if err := validate(items, "items", item, strconv.Itoa(i)); err != nil {
+					errors = append(errors, err)
+				}
+			}
+			result.unevalItems = nil
+		case []*Schema:
+			for i, item := range v {
+				if i < len(items) {
+					delete(result.unevalItems, i)
+					if err := validate(items[i], "items/"+strconv.Itoa(i), item, strconv.Itoa(i)); err != nil {
+						errors = append(errors, err)
+					}
+				} else if sch, ok := s.AdditionalItems.(*Schema); ok {
+					delete(result.unevalItems, i)
+					if err := validate(sch, "additionalItems", item, strconv.Itoa(i)); err != nil {
+						errors = append(errors, err)
+					}
+				} else {
+					break
+				}
+			}
+			if additionalItems, ok := s.AdditionalItems.(bool); ok {
+				if additionalItems {
+					result.unevalItems = nil
+				} else if len(v) > len(items) {
+					errors = append(errors, validationError("additionalItems", "only %d items are allowed, but found %d items", len(items), len(v)))
+				}
+			}
+		}
+
+		// prefixItems + items
+		for i, item := range v {
+			if i < len(s.PrefixItems) {
+				delete(result.unevalItems, i)
+				if err := validate(s.PrefixItems[i], "prefixItems/"+strconv.Itoa(i), item, strconv.Itoa(i)); err != nil {
+					errors = append(errors, err)
+				}
+			} else if s.Items2020 != nil {
+				delete(result.unevalItems, i)
+				if err := validate(s.Items2020, "items", item, strconv.Itoa(i)); err != nil {
+					errors = append(errors, err)
+				}
+			} else {
+				break
+			}
+		}
+
+		// contains + minContains + maxContains
+		if s.Contains != nil && (s.MinContains != -1 || s.MaxContains != -1) {
+			matched := 0
+			var causes []error
+			for i, item := range v {
+				if err := validate(s.Contains, "contains", item, strconv.Itoa(i)); err != nil {
+					causes = append(causes, err)
+				} else {
+					matched++
+					if s.ContainsEval {
+						delete(result.unevalItems, i)
+					}
+				}
+			}
+			if s.MinContains != -1 && matched < s.MinContains {
+				errors = append(errors, validationError("minContains", "valid must be >= %d, but got %d", s.MinContains, matched).add(causes...))
+			}
+			if s.MaxContains != -1 && matched > s.MaxContains {
+				errors = append(errors, validationError("maxContains", "valid must be <= %d, but got %d", s.MaxContains, matched))
+			}
+		}
+
+	case string:
+		// minLength + maxLength
+		if s.MinLength != -1 || s.MaxLength != -1 {
+			length := utf8.RuneCount([]byte(v))
+			if s.MinLength != -1 && length < s.MinLength {
+				errors = append(errors, validationError("minLength", "length must be >= %d, but got %d", s.MinLength, length))
+			}
+			if s.MaxLength != -1 && length > s.MaxLength {
+				errors = append(errors, validationError("maxLength", "length must be <= %d, but got %d", s.MaxLength, length))
+			}
+		}
+
+		if s.Pattern != nil && !s.Pattern.MatchString(v) {
+			errors = append(errors, validationError("pattern", "does not match pattern %s", quote(s.Pattern.String())))
+		}
+
+		// contentEncoding + contentMediaType
+		if s.decoder != nil || s.mediaType != nil {
+			decoded := s.ContentEncoding == ""
+			var content []byte
+			if s.decoder != nil {
+				b, err := s.decoder(v)
+				if err != nil {
+					errors = append(errors, validationError("contentEncoding", "value is not %s encoded", s.ContentEncoding))
+				} else {
+					content, decoded = b, true
+				}
+			}
+			if decoded && s.mediaType != nil {
+				if s.decoder == nil {
+					content = []byte(v)
+				}
+				if err := s.mediaType(content); err != nil {
+					errors = append(errors, validationError("contentMediaType", "value is not of mediatype %s", quote(s.ContentMediaType)))
+				}
+			}
+			if decoded && s.ContentSchema != nil {
+				contentJSON, err := unmarshal(bytes.NewReader(content))
+				if err != nil {
+					errors = append(errors, validationError("contentSchema", "value is not valid json"))
+				} else {
+					err := validate(s.ContentSchema, "contentSchema", contentJSON, "")
+					if err != nil {
+						errors = append(errors, err)
+					}
+				}
+			}
+		}
+
+	case json.Number, float32, float64, int, int8, int32, int64, uint, uint8, uint32, uint64:
+		// lazy convert to *big.Rat to avoid allocation
+		var numVal *big.Rat
+		num := func() *big.Rat {
+			if numVal == nil {
+				numVal, _ = new(big.Rat).SetString(fmt.Sprint(v))
+			}
+			return numVal
+		}
+		f64 := func(r *big.Rat) float64 {
+			f, _ := r.Float64()
+			return f
+		}
+		if s.Minimum != nil && num().Cmp(s.Minimum) < 0 {
+			errors = append(errors, validationError("minimum", "must be >= %v but found %v", f64(s.Minimum), v))
+		}
+		if s.ExclusiveMinimum != nil && num().Cmp(s.ExclusiveMinimum) <= 0 {
+			errors = append(errors, validationError("exclusiveMinimum", "must be > %v but found %v", f64(s.ExclusiveMinimum), v))
+		}
+		if s.Maximum != nil && num().Cmp(s.Maximum) > 0 {
+			errors = append(errors, validationError("maximum", "must be <= %v but found %v", f64(s.Maximum), v))
+		}
+		if s.ExclusiveMaximum != nil && num().Cmp(s.ExclusiveMaximum) >= 0 {
+			errors = append(errors, validationError("exclusiveMaximum", "must be < %v but found %v", f64(s.ExclusiveMaximum), v))
+		}
+		if s.MultipleOf != nil {
+			if q := new(big.Rat).Quo(num(), s.MultipleOf); !q.IsInt() {
+				errors = append(errors, validationError("multipleOf", "%v not multipleOf %v", v, f64(s.MultipleOf)))
+			}
+		}
+	}
+
+	// $ref + $recursiveRef + $dynamicRef
+	validateRef := func(sch *Schema, refPath string) error {
+		if sch != nil {
+			if err := validateInplace(sch, refPath); err != nil {
+				var url = sch.Location
+				if s.url() == sch.url() {
+					url = sch.loc()
+				}
+				return validationError(refPath, "doesn't validate with %s", quote(url)).causes(err)
+			}
+		}
+		return nil
+	}
+	if err := validateRef(s.Ref, "$ref"); err != nil {
+		errors = append(errors, err)
+	}
+	if s.RecursiveRef != nil {
+		sch := s.RecursiveRef
+		if sch.RecursiveAnchor {
+			// recursiveRef based on scope
+			for _, e := range scope {
+				if e.schema.RecursiveAnchor {
+					sch = e.schema
+					break
+				}
+			}
+		}
+		if err := validateRef(sch, "$recursiveRef"); err != nil {
+			errors = append(errors, err)
+		}
+	}
+	if s.DynamicRef != nil {
+		sch := s.DynamicRef
+		if s.dynamicRefAnchor != "" && sch.DynamicAnchor == s.dynamicRefAnchor {
+			// dynamicRef based on scope
+			for i := len(scope) - 1; i >= 0; i-- {
+				sr := scope[i]
+				if sr.discard {
+					break
+				}
+				for _, da := range sr.schema.dynamicAnchors {
+					if da.DynamicAnchor == s.DynamicRef.DynamicAnchor && da != s.DynamicRef {
+						sch = da
+						break
+					}
+				}
+			}
+		}
+		if err := validateRef(sch, "$dynamicRef"); err != nil {
+			errors = append(errors, err)
+		}
+	}
+
+	if s.Not != nil && validateInplace(s.Not, "not") == nil {
+		errors = append(errors, validationError("not", "not failed"))
+	}
+
+	for i, sch := range s.AllOf {
+		schPath := "allOf/" + strconv.Itoa(i)
+		if err := validateInplace(sch, schPath); err != nil {
+			errors = append(errors, validationError(schPath, "allOf failed").add(err))
+		}
+	}
+
+	if len(s.AnyOf) > 0 {
+		matched := false
+		var causes []error
+		for i, sch := range s.AnyOf {
+			if err := validateInplace(sch, "anyOf/"+strconv.Itoa(i)); err == nil {
+				matched = true
+			} else {
+				causes = append(causes, err)
+			}
+		}
+		if !matched {
+			errors = append(errors, validationError("anyOf", "anyOf failed").add(causes...))
+		}
+	}
+
+	if len(s.OneOf) > 0 {
+		matched := -1
+		var causes []error
+		for i, sch := range s.OneOf {
+			if err := validateInplace(sch, "oneOf/"+strconv.Itoa(i)); err == nil {
+				if matched == -1 {
+					matched = i
+				} else {
+					errors = append(errors, validationError("oneOf", "valid against schemas at indexes %d and %d", matched, i))
+					break
+				}
+			} else {
+				causes = append(causes, err)
+			}
+		}
+		if matched == -1 {
+			errors = append(errors, validationError("oneOf", "oneOf failed").add(causes...))
+		}
+	}
+
+	// if + then + else
+	if s.If != nil {
+		err := validateInplace(s.If, "if")
+		// "if" leaves dynamic scope
+		scope[len(scope)-1].discard = true
+		if err == nil {
+			if s.Then != nil {
+				if err := validateInplace(s.Then, "then"); err != nil {
+					errors = append(errors, validationError("then", "if-then failed").add(err))
+				}
+			}
+		} else {
+			if s.Else != nil {
+				if err := validateInplace(s.Else, "else"); err != nil {
+					errors = append(errors, validationError("else", "if-else failed").add(err))
+				}
+			}
+		}
+		// restore dynamic scope
+		scope[len(scope)-1].discard = false
+	}
+
+	for _, ext := range s.Extensions {
+		if err := ext.Validate(ValidationContext{result, validate, validateInplace, validationError}, v); err != nil {
+			errors = append(errors, err)
+		}
+	}
+
+	// unevaluatedProperties + unevaluatedItems
+	switch v := v.(type) {
+	case map[string]interface{}:
+		if s.UnevaluatedProperties != nil {
+			for pname := range result.unevalProps {
+				if pvalue, ok := v[pname]; ok {
+					if err := validate(s.UnevaluatedProperties, "unevaluatedProperties", pvalue, escape(pname)); err != nil {
+						errors = append(errors, err)
+					}
+				}
+			}
+			result.unevalProps = nil
+		}
+	case []interface{}:
+		if s.UnevaluatedItems != nil {
+			for i := range result.unevalItems {
+				if err := validate(s.UnevaluatedItems, "unevaluatedItems", v[i], strconv.Itoa(i)); err != nil {
+					errors = append(errors, err)
+				}
+			}
+			result.unevalItems = nil
+		}
+	}
+
+	switch len(errors) {
+	case 0:
+		return result, nil
+	case 1:
+		return result, errors[0]
+	default:
+		return result, validationError("", "").add(errors...) // empty message, used just for wrapping
+	}
+}
+
+type validationResult struct {
+	unevalProps map[string]struct{}
+	unevalItems map[int]struct{}
+}
+
+func (vr validationResult) unevalPnames() string {
+	pnames := make([]string, 0, len(vr.unevalProps))
+	for pname := range vr.unevalProps {
+		pnames = append(pnames, quote(pname))
+	}
+	return strings.Join(pnames, ", ")
+}
+
+// jsonType returns the json type of given value v.
+//
+// It panics if the given value is not valid json value
+func jsonType(v interface{}) string {
+	switch v.(type) {
+	case nil:
+		return "null"
+	case bool:
+		return "boolean"
+	case json.Number, float32, float64, int, int8, int32, int64, uint, uint8, uint32, uint64:
+		return "number"
+	case string:
+		return "string"
+	case []interface{}:
+		return "array"
+	case map[string]interface{}:
+		return "object"
+	}
+	panic(InvalidJSONTypeError(fmt.Sprintf("%T", v)))
+}
+
+// equals tells if given two json values are equal or not.
+func equals(v1, v2 interface{}) bool {
+	v1Type := jsonType(v1)
+	if v1Type != jsonType(v2) {
+		return false
+	}
+	switch v1Type {
+	case "array":
+		arr1, arr2 := v1.([]interface{}), v2.([]interface{})
+		if len(arr1) != len(arr2) {
+			return false
+		}
+		for i := range arr1 {
+			if !equals(arr1[i], arr2[i]) {
+				return false
+			}
+		}
+		return true
+	case "object":
+		obj1, obj2 := v1.(map[string]interface{}), v2.(map[string]interface{})
+		if len(obj1) != len(obj2) {
+			return false
+		}
+		for k, v1 := range obj1 {
+			if v2, ok := obj2[k]; ok {
+				if !equals(v1, v2) {
+					return false
+				}
+			} else {
+				return false
+			}
+		}
+		return true
+	case "number":
+		num1, _ := new(big.Rat).SetString(fmt.Sprint(v1))
+		num2, _ := new(big.Rat).SetString(fmt.Sprint(v2))
+		return num1.Cmp(num2) == 0
+	default:
+		return v1 == v2
+	}
+}
+
+func hash(v interface{}, h *maphash.Hash) {
+	switch v := v.(type) {
+	case nil:
+		h.WriteByte(0)
+	case bool:
+		h.WriteByte(1)
+		if v {
+			h.WriteByte(1)
+		} else {
+			h.WriteByte(0)
+		}
+	case json.Number, float32, float64, int, int8, int32, int64, uint, uint8, uint32, uint64:
+		h.WriteByte(2)
+		num, _ := new(big.Rat).SetString(fmt.Sprint(v))
+		h.Write(num.Num().Bytes())
+		h.Write(num.Denom().Bytes())
+	case string:
+		h.WriteByte(3)
+		h.WriteString(v)
+	case []interface{}:
+		h.WriteByte(4)
+		for _, item := range v {
+			hash(item, h)
+		}
+	case map[string]interface{}:
+		h.WriteByte(5)
+		props := make([]string, 0, len(v))
+		for prop := range v {
+			props = append(props, prop)
+		}
+		sort.Slice(props, func(i, j int) bool {
+			return props[i] < props[j]
+		})
+		for _, prop := range props {
+			hash(prop, h)
+			hash(v[prop], h)
+		}
+	default:
+		panic(InvalidJSONTypeError(fmt.Sprintf("%T", v)))
+	}
+}
+
+// escape converts given token to valid json-pointer token
+func escape(token string) string {
+	token = strings.ReplaceAll(token, "~", "~0")
+	token = strings.ReplaceAll(token, "/", "~1")
+	return url.PathEscape(token)
+}
diff --git a/vendor/github.com/securego/gosec/v2/.golangci.yml b/vendor/github.com/securego/gosec/v2/.golangci.yml
index d6c5de7ba29721c4cfe8dadff7bad44887827c42..d591dc2493517a1fcdc869274e59b5c96608d5f7 100644
--- a/vendor/github.com/securego/gosec/v2/.golangci.yml
+++ b/vendor/github.com/securego/gosec/v2/.golangci.yml
@@ -9,6 +9,7 @@ linters:
     - exportloopref
     - gci
     - ginkgolinter
+    - gochecknoinits
     - gofmt
     - gofumpt
     - goimports
@@ -35,6 +36,10 @@ linters-settings:
       - standard
       - default
       - prefix(github.com/securego)
+  revive:
+    rules:
+      - name: dot-imports
+        disabled: true
 
 run:
   timeout: 5m
diff --git a/vendor/github.com/securego/gosec/v2/.goreleaser.yml b/vendor/github.com/securego/gosec/v2/.goreleaser.yml
index e3c903e7a78de2fbe30f47ee181090f5693d27e0..bd85bab3ac2108918553572b0f2eee9f0e7b7f7d 100644
--- a/vendor/github.com/securego/gosec/v2/.goreleaser.yml
+++ b/vendor/github.com/securego/gosec/v2/.goreleaser.yml
@@ -19,6 +19,7 @@ builds:
       - amd64
       - arm64
       - s390x
+      - ppc64le
     ldflags: -X main.Version={{.Version}} -X main.GitTag={{.Tag}} -X main.BuildDate={{.Date}}
     env:
       - CGO_ENABLED=0
diff --git a/vendor/github.com/securego/gosec/v2/Makefile b/vendor/github.com/securego/gosec/v2/Makefile
index 09303d11a0877e539e5bd23a94d1cb0ea941b33d..4f6cce765111840d1c466f6350425e55fe2d6fef 100644
--- a/vendor/github.com/securego/gosec/v2/Makefile
+++ b/vendor/github.com/securego/gosec/v2/Makefile
@@ -11,29 +11,28 @@ endif
 BUILDFLAGS := "-w -s -X 'main.Version=$(GIT_TAG)' -X 'main.GitTag=$(GIT_TAG)' -X 'main.BuildDate=$(BUILD_DATE)'"
 CGO_ENABLED = 0
 GO := GO111MODULE=on go
-GO_NOMOD :=GO111MODULE=off go
 GOPATH ?= $(shell $(GO) env GOPATH)
 GOBIN ?= $(GOPATH)/bin
 GOSEC ?= $(GOBIN)/gosec
 GINKGO ?= $(GOBIN)/ginkgo
 GO_MINOR_VERSION = $(shell $(GO) version | cut -c 14- | cut -d' ' -f1 | cut -d'.' -f2)
 GOVULN_MIN_VERSION = 17
-GO_VERSION = 1.20
+GO_VERSION = 1.22
 
 default:
 	$(MAKE) build
 
 install-test-deps:
 	go install github.com/onsi/ginkgo/v2/ginkgo@latest
-	$(GO_NOMOD) get -u golang.org/x/crypto/ssh
-	$(GO_NOMOD) get -u github.com/lib/pq
+	go install golang.org/x/crypto/...@latest
+	go install github.com/lib/pq/...@latest
 
 install-govulncheck:
 	@if [ $(GO_MINOR_VERSION) -gt $(GOVULN_MIN_VERSION) ]; then \
 		go install golang.org/x/vuln/cmd/govulncheck@latest; \
 	fi
 
-test: install-test-deps build fmt vet sec govulncheck
+test: install-test-deps build-race fmt vet sec govulncheck
 	$(GINKGO) -v --fail-fast
 
 fmt:
@@ -65,6 +64,9 @@ test-coverage: install-test-deps
 build:
 	go build -o $(BIN) ./cmd/gosec/
 
+build-race:
+	go build -race -o $(BIN) ./cmd/gosec/
+
 clean:
 	rm -rf build vendor dist coverage.txt
 	rm -f release image $(BIN)
@@ -74,7 +76,7 @@ release:
 	goreleaser release
 
 build-linux:
-	CGO_ENABLED=$(CGO_ENABLED) GOOS=linux GOARCH=amd64 go build -ldflags=$(BUILDFLAGS) -o $(BIN) ./cmd/gosec/
+	CGO_ENABLED=$(CGO_ENABLED) GOOS=linux go build -ldflags=$(BUILDFLAGS) -o $(BIN) ./cmd/gosec/
 
 image:
 	@echo "Building the Docker image..."
@@ -89,5 +91,5 @@ image-push: image
 
 tlsconfig:
 	go generate ./...
-	
+
 .PHONY: test build clean release image image-push tlsconfig
diff --git a/vendor/github.com/securego/gosec/v2/README.md b/vendor/github.com/securego/gosec/v2/README.md
index 6c6d2982c8b1e74de649307019a8c362e9873977..70633028bb9897f536d4afe545fdf585276fbe1c 100644
--- a/vendor/github.com/securego/gosec/v2/README.md
+++ b/vendor/github.com/securego/gosec/v2/README.md
@@ -1,7 +1,7 @@
 
-# gosec - Golang Security Checker
+# gosec - Go Security Checker
 
-Inspects source code for security problems by scanning the Go AST.
+Inspects source code for security problems by scanning the Go AST and SSA code representation.
 
 <img src="https://securego.io/img/gosec.png" width="320">
 
@@ -105,7 +105,7 @@ jobs:
           # we let the report trigger content trigger a failure using the GitHub Security features.
           args: '-no-fail -fmt sarif -out results.sarif ./...'
       - name: Upload SARIF file
-        uses: github/codeql-action/upload-sarif@v1
+        uses: github/codeql-action/upload-sarif@v2
         with:
           # Path to SARIF file relative to the root of the repository
           sarif_file: results.sarif
@@ -113,18 +113,10 @@ jobs:
 
 ### Local Installation
 
-#### Go 1.16+
-
 ```bash
 go install github.com/securego/gosec/v2/cmd/gosec@latest
 ```
 
-#### Go version < 1.16
-
-```bash
-go get -u github.com/securego/gosec/v2/cmd/gosec
-```
-
 ## Usage
 
 Gosec can be configured to only run a subset of rules, to exclude certain file
@@ -157,6 +149,7 @@ directory you can supply `./...` as the input argument.
 - G304: File path provided as taint input
 - G305: File traversal when extracting zip/tar archive
 - G306: Poor file permissions used when writing to a new file
+- G307: Poor file permissions used when creating a file with os.Create
 - G401: Detect the usage of DES, RC4, MD5 or SHA1
 - G402: Look for bad TLS connection settings
 - G403: Ensure minimum RSA key length of 2048 bits
@@ -166,7 +159,7 @@ directory you can supply `./...` as the input argument.
 - G503: Import blocklist: crypto/rc4
 - G504: Import blocklist: net/http/cgi
 - G505: Import blocklist: crypto/sha1
-- G601: Implicit memory aliasing of items from a range statement
+- G601: Implicit memory aliasing of items from a range statement (only for Go 1.21 or lower)
 - G602: Slice access out of bounds
 
 ### Retired rules
@@ -273,31 +266,33 @@ gosec -exclude-generated ./...
 
 ### Annotating code
 
-As with all automated detection tools, there will be cases of false positives. In cases where gosec reports a failure that has been manually verified as being safe,
+As with all automated detection tools, there will be cases of false positives.
+In cases where gosec reports a failure that has been manually verified as being safe,
 it is possible to annotate the code with a comment that starts with `#nosec`.
+
 The `#nosec` comment should have the format `#nosec [RuleList] [-- Justification]`.
 
-The annotation causes gosec to stop processing any further nodes within the
-AST so can apply to a whole block or more granularly to a single expression.
+The `#nosec` comment needs to be placed on the line where the warning is reported.
 
 ```go
-
-import "md5" //#nosec
-
-
-func main(){
-
-    /* #nosec */
-    if x > y {
-        h := md5.New() // this will also be ignored
-    }
-
+func main() {
+	tr := &http.Transport{
+		TLSClientConfig: &tls.Config{
+			InsecureSkipVerify: true, // #nosec G402
+		},
+	}
+
+	client := &http.Client{Transport: tr}
+	_, err := client.Get("https://golang.org/")
+	if err != nil {
+		fmt.Println(err)
+	}
 }
-
 ```
 
-When a specific false positive has been identified and verified as safe, you may wish to suppress only that single rule (or a specific set of rules)
-within a section of code, while continuing to scan for other problems. To do this, you can list the rule(s) to be suppressed within
+When a specific false positive has been identified and verified as safe, you may
+wish to suppress only that single rule (or a specific set of rules) within a section of code,
+while continuing to scan for other problems. To do this, you can list the rule(s) to be suppressed within
 the `#nosec` annotation, e.g: `/* #nosec G401 */` or `//#nosec G201 G202 G203`
 
 You could put the description or justification text for the annotation. The
@@ -390,7 +385,7 @@ schema-generate -i sarif-schema-2.1.0.json -o mypath/types.go
 ```
 
 Most of the MarshallJSON/UnmarshalJSON are removed except the one for PropertyBag which is handy to inline the additional properties. The rest can be removed.
-The URI,ID, UUID, GUID were renamed so it fits the Golang convention defined [here](https://github.com/golang/lint/blob/master/lint.go#L700)
+The URI,ID, UUID, GUID were renamed so it fits the Go convention defined [here](https://github.com/golang/lint/blob/master/lint.go#L700)
 
 ### Tests
 
diff --git a/vendor/github.com/securego/gosec/v2/USERS.md b/vendor/github.com/securego/gosec/v2/USERS.md
index ffc05608141617429b1d46302a88ab2b825626a5..9b6e4eeee41cc99546250b6fa0c5b09b069a3ad2 100644
--- a/vendor/github.com/securego/gosec/v2/USERS.md
+++ b/vendor/github.com/securego/gosec/v2/USERS.md
@@ -15,6 +15,7 @@ This is a list of gosec's users. Please send a pull request with your organisati
 9. [PingCAP/tidb](https://github.com/pingcap/tidb)
 10. [Checkmarx](https://www.checkmarx.com/)
 11. [SeatGeek](https://www.seatgeek.com/)
+12. [reMarkable](https://remarkable.com)
 
 ## Projects
 
diff --git a/vendor/github.com/securego/gosec/v2/action.yml b/vendor/github.com/securego/gosec/v2/action.yml
index 8e28c346d3504d43ab5ffa74f11349c31efaf1dd..eb5307f040e87852a773361b9c3195c5d8180be9 100644
--- a/vendor/github.com/securego/gosec/v2/action.yml
+++ b/vendor/github.com/securego/gosec/v2/action.yml
@@ -10,7 +10,7 @@ inputs:
 
 runs:
     using: 'docker'
-    image: 'docker://securego/gosec:2.16.0'
+    image: 'docker://securego/gosec:2.19.0'
     args:
       - ${{ inputs.args }}
 
diff --git a/vendor/github.com/securego/gosec/v2/analyzer.go b/vendor/github.com/securego/gosec/v2/analyzer.go
index 023514b8aab50f20c0321e60653e4336c4881b10..f7dd895ed2ef9486d4c302cd83284cfa450dc8cc 100644
--- a/vendor/github.com/securego/gosec/v2/analyzer.go
+++ b/vendor/github.com/securego/gosec/v2/analyzer.go
@@ -31,11 +31,12 @@ import (
 	"strings"
 	"sync"
 
-	"github.com/securego/gosec/v2/analyzers"
-	"github.com/securego/gosec/v2/issue"
 	"golang.org/x/tools/go/analysis"
 	"golang.org/x/tools/go/analysis/passes/buildssa"
 	"golang.org/x/tools/go/packages"
+
+	"github.com/securego/gosec/v2/analyzers"
+	"github.com/securego/gosec/v2/issue"
 )
 
 // LoadMode controls the amount of details to return when loading the packages
@@ -55,7 +56,79 @@ const externalSuppressionJustification = "Globally suppressed."
 
 const aliasOfAllRules = "*"
 
-var generatedCodePattern = regexp.MustCompile(`^// Code generated .* DO NOT EDIT\.$`)
+type ignore struct {
+	start        int
+	end          int
+	suppressions map[string][]issue.SuppressionInfo
+}
+
+type ignores map[string][]ignore
+
+func newIgnores() ignores {
+	return make(map[string][]ignore)
+}
+
+func (i ignores) parseLine(line string) (int, int) {
+	parts := strings.Split(line, "-")
+	start, err := strconv.Atoi(parts[0])
+	if err != nil {
+		start = 0
+	}
+	end := start
+	if len(parts) > 1 {
+		if e, err := strconv.Atoi(parts[1]); err == nil {
+			end = e
+		}
+	}
+	return start, end
+}
+
+func (i ignores) add(file string, line string, suppressions map[string]issue.SuppressionInfo) {
+	is := []ignore{}
+	if _, ok := i[file]; ok {
+		is = i[file]
+	}
+	found := false
+	start, end := i.parseLine(line)
+	for _, ig := range is {
+		if ig.start <= start && ig.end >= end {
+			found = true
+			for r, s := range suppressions {
+				ss, ok := ig.suppressions[r]
+				if !ok {
+					ss = []issue.SuppressionInfo{}
+				}
+				ss = append(ss, s)
+				ig.suppressions[r] = ss
+			}
+			break
+		}
+	}
+	if !found {
+		ig := ignore{
+			start:        start,
+			end:          end,
+			suppressions: map[string][]issue.SuppressionInfo{},
+		}
+		for r, s := range suppressions {
+			ig.suppressions[r] = []issue.SuppressionInfo{s}
+		}
+		is = append(is, ig)
+	}
+	i[file] = is
+}
+
+func (i ignores) get(file string, line string) map[string][]issue.SuppressionInfo {
+	start, end := i.parseLine(line)
+	if is, ok := i[file]; ok {
+		for _, i := range is {
+			if start <= i.start && end >= i.end {
+				return i.suppressions
+			}
+		}
+	}
+	return map[string][]issue.SuppressionInfo{}
+}
 
 // The Context is populated with data parsed from the source code as it is scanned.
 // It is passed through to all rule functions as they are called. Rules may use
@@ -69,7 +142,7 @@ type Context struct {
 	Root         *ast.File
 	Imports      *ImportTracker
 	Config       Config
-	Ignores      []map[string][]issue.SuppressionInfo
+	Ignores      ignores
 	PassedValues map[string]interface{}
 }
 
@@ -110,6 +183,7 @@ type Analyzer struct {
 	trackSuppressions bool
 	concurrency       int
 	analyzerList      []*analysis.Analyzer
+	mu                sync.Mutex
 }
 
 // NewAnalyzer builds a new analyzer.
@@ -231,9 +305,7 @@ func (gosec *Analyzer) Process(buildTags []string, packagePaths ...string) error
 					return fmt.Errorf("parsing errors in pkg %q: %w", pkg.Name, err)
 				}
 				gosec.CheckRules(pkg)
-				if on, err := gosec.config.IsGlobalEnabled(SSA); err == nil && on {
-					gosec.CheckAnalyzers(pkg)
-				}
+				gosec.CheckAnalyzers(pkg)
 			}
 		}
 	}
@@ -252,7 +324,9 @@ func (gosec *Analyzer) load(pkgPath string, conf *packages.Config) ([]*packages.
 	// step 1/3 create build context.
 	buildD := build.Default
 	// step 2/3: add build tags to get env dependent files into basePackage.
+	gosec.mu.Lock()
 	buildD.BuildTags = conf.BuildFlags
+	gosec.mu.Unlock()
 	basePackage, err := buildD.ImportDir(pkgPath, build.ImportComment)
 	if err != nil {
 		return []*packages.Package{}, fmt.Errorf("importing dir %q: %w", pkgPath, err)
@@ -276,7 +350,9 @@ func (gosec *Analyzer) load(pkgPath string, conf *packages.Config) ([]*packages.
 	}
 
 	// step 3/3 remove build tags from conf to proceed build correctly.
+	gosec.mu.Lock()
 	conf.BuildFlags = nil
+	defer gosec.mu.Unlock()
 	pkgs, err := packages.Load(conf, packageFiles...)
 	if err != nil {
 		return []*packages.Package{}, fmt.Errorf("loading files from package %q: %w", pkgPath, err)
@@ -284,7 +360,7 @@ func (gosec *Analyzer) load(pkgPath string, conf *packages.Config) ([]*packages.
 	return pkgs, nil
 }
 
-// CheckRules runs analysis on the given package
+// CheckRules runs analysis on the given package.
 func (gosec *Analyzer) CheckRules(pkg *packages.Package) {
 	gosec.logger.Println("Checking package:", pkg.Name)
 	for _, file := range pkg.Syntax {
@@ -299,7 +375,7 @@ func (gosec *Analyzer) CheckRules(pkg *packages.Package) {
 		if filepath.Ext(checkedFile) != ".go" {
 			continue
 		}
-		if gosec.excludeGenerated && isGeneratedFile(file) {
+		if gosec.excludeGenerated && ast.IsGenerated(file) {
 			gosec.logger.Println("Ignoring generated file:", checkedFile)
 			continue
 		}
@@ -314,37 +390,22 @@ func (gosec *Analyzer) CheckRules(pkg *packages.Package) {
 		gosec.context.PkgFiles = pkg.Syntax
 		gosec.context.Imports = NewImportTracker()
 		gosec.context.PassedValues = make(map[string]interface{})
+		gosec.context.Ignores = newIgnores()
+		gosec.updateIgnores()
 		ast.Walk(gosec, file)
 		gosec.stats.NumFiles++
 		gosec.stats.NumLines += pkg.Fset.File(file.Pos()).LineCount()
 	}
 }
 
-// CheckAnalyzers runs analyzers on a given package
+// CheckAnalyzers runs analyzers on a given package.
 func (gosec *Analyzer) CheckAnalyzers(pkg *packages.Package) {
-	ssaPass := &analysis.Pass{
-		Analyzer:          buildssa.Analyzer,
-		Fset:              pkg.Fset,
-		Files:             pkg.Syntax,
-		OtherFiles:        pkg.OtherFiles,
-		IgnoredFiles:      pkg.IgnoredFiles,
-		Pkg:               pkg.Types,
-		TypesInfo:         pkg.TypesInfo,
-		TypesSizes:        pkg.TypesSizes,
-		ResultOf:          nil,
-		Report:            nil,
-		ImportObjectFact:  nil,
-		ExportObjectFact:  nil,
-		ImportPackageFact: nil,
-		ExportPackageFact: nil,
-		AllObjectFacts:    nil,
-		AllPackageFacts:   nil,
-	}
-	ssaResult, err := ssaPass.Analyzer.Run(ssaPass)
-	if err != nil {
-		gosec.logger.Printf("Error running SSA analyser on package %q: %s", pkg.Name, err)
+	ssaResult, err := gosec.buildSSA(pkg)
+	if err != nil || ssaResult == nil {
+		gosec.logger.Printf("Error building the SSA representation of the package %q: %s", pkg.Name, err)
 		return
 	}
+
 	resultMap := map[*analysis.Analyzer]interface{}{
 		buildssa.Analyzer: &analyzers.SSAAnalyzerResult{
 			Config: gosec.Config(),
@@ -352,6 +413,9 @@ func (gosec *Analyzer) CheckAnalyzers(pkg *packages.Package) {
 			SSA:    ssaResult.(*buildssa.SSA),
 		},
 	}
+
+	generatedFiles := gosec.generatedFiles(pkg)
+
 	for _, analyzer := range gosec.analyzerList {
 		pass := &analysis.Pass{
 			Analyzer:          analyzer,
@@ -377,22 +441,62 @@ func (gosec *Analyzer) CheckAnalyzers(pkg *packages.Package) {
 			continue
 		}
 		if result != nil {
-			if aissue, ok := result.(*issue.Issue); ok {
-				gosec.updateIssues(aissue, false, []issue.SuppressionInfo{})
+			if passIssues, ok := result.([]*issue.Issue); ok {
+				for _, iss := range passIssues {
+					if gosec.excludeGenerated {
+						if _, ok := generatedFiles[iss.File]; ok {
+							continue
+						}
+					}
+					gosec.updateIssues(iss)
+				}
 			}
 		}
 	}
 }
 
-func isGeneratedFile(file *ast.File) bool {
-	for _, comment := range file.Comments {
-		for _, row := range comment.List {
-			if generatedCodePattern.MatchString(row.Text) {
-				return true
+func (gosec *Analyzer) generatedFiles(pkg *packages.Package) map[string]bool {
+	generatedFiles := map[string]bool{}
+	for _, file := range pkg.Syntax {
+		if ast.IsGenerated(file) {
+			fp := pkg.Fset.File(file.Pos())
+			if fp == nil {
+				// skip files which cannot be located
+				continue
 			}
+			generatedFiles[fp.Name()] = true
 		}
 	}
-	return false
+	return generatedFiles
+}
+
+// buildSSA runs the SSA pass which builds the SSA representation of the package. It handles gracefully any panic.
+func (gosec *Analyzer) buildSSA(pkg *packages.Package) (interface{}, error) {
+	defer func() {
+		if r := recover(); r != nil {
+			gosec.logger.Printf("Panic when running SSA analyser on package: %s", pkg.Name)
+		}
+	}()
+	ssaPass := &analysis.Pass{
+		Analyzer:          buildssa.Analyzer,
+		Fset:              pkg.Fset,
+		Files:             pkg.Syntax,
+		OtherFiles:        pkg.OtherFiles,
+		IgnoredFiles:      pkg.IgnoredFiles,
+		Pkg:               pkg.Types,
+		TypesInfo:         pkg.TypesInfo,
+		TypesSizes:        pkg.TypesSizes,
+		ResultOf:          nil,
+		Report:            nil,
+		ImportObjectFact:  nil,
+		ExportObjectFact:  nil,
+		ImportPackageFact: nil,
+		ExportPackageFact: nil,
+		AllObjectFacts:    nil,
+		AllPackageFacts:   nil,
+	}
+
+	return ssaPass.Analyzer.Run(ssaPass)
 }
 
 // ParseErrors parses the errors from given package
@@ -449,7 +553,12 @@ func (gosec *Analyzer) ignore(n ast.Node) map[string]issue.SuppressionInfo {
 	if groups, ok := gosec.context.Comments[n]; ok && !gosec.ignoreNosec {
 
 		// Checks if an alternative for #nosec is set and, if not, uses the default.
-		noSecDefaultTag := NoSecTag(string(Nosec))
+		noSecDefaultTag, err := gosec.config.GetGlobal(Nosec)
+		if err != nil {
+			noSecDefaultTag = NoSecTag(string(Nosec))
+		} else {
+			noSecDefaultTag = NoSecTag(noSecDefaultTag)
+		}
 		noSecAlternativeTag, err := gosec.config.GetGlobal(NoSecAlternative)
 		if err != nil {
 			noSecAlternativeTag = noSecDefaultTag
@@ -459,8 +568,8 @@ func (gosec *Analyzer) ignore(n ast.Node) map[string]issue.SuppressionInfo {
 
 		for _, group := range groups {
 			comment := strings.TrimSpace(group.Text())
-			foundDefaultTag := strings.HasPrefix(comment, noSecDefaultTag) || regexp.MustCompile("\n *"+noSecDefaultTag).Match([]byte(comment))
-			foundAlternativeTag := strings.HasPrefix(comment, noSecAlternativeTag) || regexp.MustCompile("\n *"+noSecAlternativeTag).Match([]byte(comment))
+			foundDefaultTag := strings.HasPrefix(comment, noSecDefaultTag) || regexp.MustCompile("\n *"+noSecDefaultTag).MatchString(comment)
+			foundAlternativeTag := strings.HasPrefix(comment, noSecAlternativeTag) || regexp.MustCompile("\n *"+noSecAlternativeTag).MatchString(comment)
 
 			if foundDefaultTag || foundAlternativeTag {
 				gosec.stats.NumNosec++
@@ -509,11 +618,6 @@ func (gosec *Analyzer) ignore(n ast.Node) map[string]issue.SuppressionInfo {
 // Visit runs the gosec visitor logic over an AST created by parsing go code.
 // Rule methods added with AddRule will be invoked as necessary.
 func (gosec *Analyzer) Visit(n ast.Node) ast.Visitor {
-	ignores, ok := gosec.updateIgnoredRules(n)
-	if !ok {
-		return gosec
-	}
-
 	// Using ast.File instead of ast.ImportSpec, so that we can track all imports at once.
 	switch i := n.(type) {
 	case *ast.File:
@@ -521,56 +625,48 @@ func (gosec *Analyzer) Visit(n ast.Node) ast.Visitor {
 	}
 
 	for _, rule := range gosec.ruleset.RegisteredFor(n) {
-		suppressions, ignored := gosec.updateSuppressions(rule.ID(), ignores)
 		issue, err := rule.Match(n, gosec.context)
 		if err != nil {
 			file, line := GetLocation(n, gosec.context)
 			file = path.Base(file)
 			gosec.logger.Printf("Rule error: %v => %s (%s:%d)\n", reflect.TypeOf(rule), err, file, line)
 		}
-		gosec.updateIssues(issue, ignored, suppressions)
+		gosec.updateIssues(issue)
 	}
 	return gosec
 }
 
-func (gosec *Analyzer) updateIgnoredRules(n ast.Node) (map[string][]issue.SuppressionInfo, bool) {
-	if n == nil {
-		if len(gosec.context.Ignores) > 0 {
-			gosec.context.Ignores = gosec.context.Ignores[1:]
-		}
-		return nil, false
+func (gosec *Analyzer) updateIgnores() {
+	for n := range gosec.context.Comments {
+		gosec.updateIgnoredRulesForNode(n)
 	}
-	// Get any new rule exclusions.
-	ignoredRules := gosec.ignore(n)
+}
 
-	// Now create the union of exclusions.
-	ignores := map[string][]issue.SuppressionInfo{}
-	if len(gosec.context.Ignores) > 0 {
-		for k, v := range gosec.context.Ignores[0] {
-			ignores[k] = v
+func (gosec *Analyzer) updateIgnoredRulesForNode(n ast.Node) {
+	ignoredRules := gosec.ignore(n)
+	if len(ignoredRules) > 0 {
+		if gosec.context.Ignores == nil {
+			gosec.context.Ignores = newIgnores()
 		}
+		line := issue.GetLine(gosec.context.FileSet.File(n.Pos()), n)
+		gosec.context.Ignores.add(
+			gosec.context.FileSet.File(n.Pos()).Name(),
+			line,
+			ignoredRules,
+		)
 	}
-
-	for ruleID, suppression := range ignoredRules {
-		ignores[ruleID] = append(ignores[ruleID], suppression)
-	}
-
-	// Push the new set onto the stack.
-	gosec.context.Ignores = append([]map[string][]issue.SuppressionInfo{ignores}, gosec.context.Ignores...)
-
-	return ignores, true
 }
 
-func (gosec *Analyzer) updateSuppressions(id string, ignores map[string][]issue.SuppressionInfo) ([]issue.SuppressionInfo, bool) {
-	// Check if all rules are ignored.
-	generalSuppressions, generalIgnored := ignores[aliasOfAllRules]
-	// Check if the specific rule is ignored
-	ruleSuppressions, ruleIgnored := ignores[id]
+func (gosec *Analyzer) getSuppressionsAtLineInFile(file string, line string, id string) ([]issue.SuppressionInfo, bool) {
+	ignoredRules := gosec.context.Ignores.get(file, line)
 
+	// Check if the rule was specifically suppressed at this location.
+	generalSuppressions, generalIgnored := ignoredRules[aliasOfAllRules]
+	ruleSuppressions, ruleIgnored := ignoredRules[id]
 	ignored := generalIgnored || ruleIgnored
 	suppressions := append(generalSuppressions, ruleSuppressions...)
 
-	// Track external suppressions.
+	// Track external suppressions of this rule.
 	if gosec.ruleset.IsRuleSuppressed(id) {
 		ignored = true
 		suppressions = append(suppressions, issue.SuppressionInfo{
@@ -581,8 +677,9 @@ func (gosec *Analyzer) updateSuppressions(id string, ignores map[string][]issue.
 	return suppressions, ignored
 }
 
-func (gosec *Analyzer) updateIssues(issue *issue.Issue, ignored bool, suppressions []issue.SuppressionInfo) {
+func (gosec *Analyzer) updateIssues(issue *issue.Issue) {
 	if issue != nil {
+		suppressions, ignored := gosec.getSuppressionsAtLineInFile(issue.File, issue.Line, issue.RuleID)
 		if gosec.showIgnored {
 			issue.NoSec = ignored
 		}
diff --git a/vendor/github.com/securego/gosec/v2/analyzers/slice_bounds.go b/vendor/github.com/securego/gosec/v2/analyzers/slice_bounds.go
new file mode 100644
index 0000000000000000000000000000000000000000..08a55eb4290ccbb2e73b3e20ca334f58eb0d3ee3
--- /dev/null
+++ b/vendor/github.com/securego/gosec/v2/analyzers/slice_bounds.go
@@ -0,0 +1,386 @@
+// (c) Copyright gosec's authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package analyzers
+
+import (
+	"errors"
+	"fmt"
+	"go/token"
+	"regexp"
+	"strconv"
+	"strings"
+
+	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/go/analysis/passes/buildssa"
+	"golang.org/x/tools/go/ssa"
+
+	"github.com/securego/gosec/v2/issue"
+)
+
+type bound int
+
+const (
+	lowerUnbounded bound = iota
+	upperUnbounded
+	unbounded
+	upperBounded
+)
+
+const maxDepth = 20
+
+func newSliceBoundsAnalyzer(id string, description string) *analysis.Analyzer {
+	return &analysis.Analyzer{
+		Name:     id,
+		Doc:      description,
+		Run:      runSliceBounds,
+		Requires: []*analysis.Analyzer{buildssa.Analyzer},
+	}
+}
+
+func runSliceBounds(pass *analysis.Pass) (interface{}, error) {
+	ssaResult, err := getSSAResult(pass)
+	if err != nil {
+		return nil, err
+	}
+
+	issues := map[ssa.Instruction]*issue.Issue{}
+	ifs := map[ssa.If]*ssa.BinOp{}
+	for _, mcall := range ssaResult.SSA.SrcFuncs {
+		for _, block := range mcall.DomPreorder() {
+			for _, instr := range block.Instrs {
+				switch instr := instr.(type) {
+				case *ssa.Alloc:
+					sliceCap, err := extractSliceCapFromAlloc(instr.String())
+					if err != nil {
+						break
+					}
+					allocRefs := instr.Referrers()
+					if allocRefs == nil {
+						break
+					}
+					for _, instr := range *allocRefs {
+						if slice, ok := instr.(*ssa.Slice); ok {
+							if _, ok := slice.X.(*ssa.Alloc); ok {
+								if slice.Parent() != nil {
+									l, h := extractSliceBounds(slice)
+									newCap := computeSliceNewCap(l, h, sliceCap)
+									violations := []ssa.Instruction{}
+									trackSliceBounds(0, newCap, slice, &violations, ifs)
+									for _, s := range violations {
+										switch s := s.(type) {
+										case *ssa.Slice:
+											issue := newIssue(
+												pass.Analyzer.Name,
+												"slice bounds out of range",
+												pass.Fset,
+												s.Pos(),
+												issue.Low,
+												issue.High)
+											issues[s] = issue
+										case *ssa.IndexAddr:
+											issue := newIssue(
+												pass.Analyzer.Name,
+												"slice index out of range",
+												pass.Fset,
+												s.Pos(),
+												issue.Low,
+												issue.High)
+											issues[s] = issue
+										}
+									}
+								}
+							}
+						}
+					}
+				}
+			}
+		}
+	}
+
+	for ifref, binop := range ifs {
+		bound, value, err := extractBinOpBound(binop)
+		if err != nil {
+			continue
+		}
+		for i, block := range ifref.Block().Succs {
+			if i == 1 {
+				bound = invBound(bound)
+			}
+			for _, instr := range block.Instrs {
+				if _, ok := issues[instr]; ok {
+					switch bound {
+					case lowerUnbounded:
+						break
+					case upperUnbounded, unbounded:
+						delete(issues, instr)
+					case upperBounded:
+						switch tinstr := instr.(type) {
+						case *ssa.Slice:
+							lower, upper := extractSliceBounds(tinstr)
+							if isSliceInsideBounds(0, value, lower, upper) {
+								delete(issues, instr)
+							}
+						case *ssa.IndexAddr:
+							indexValue, err := extractIntValue(tinstr.Index.String())
+							if err != nil {
+								break
+							}
+							if isSliceIndexInsideBounds(0, value, indexValue) {
+								delete(issues, instr)
+							}
+						}
+					}
+				}
+			}
+		}
+	}
+
+	foundIssues := []*issue.Issue{}
+	for _, issue := range issues {
+		foundIssues = append(foundIssues, issue)
+	}
+	if len(foundIssues) > 0 {
+		return foundIssues, nil
+	}
+	return nil, nil
+}
+
+func trackSliceBounds(depth int, sliceCap int, slice ssa.Node, violations *[]ssa.Instruction, ifs map[ssa.If]*ssa.BinOp) {
+	if depth == maxDepth {
+		return
+	}
+	depth++
+	if violations == nil {
+		violations = &[]ssa.Instruction{}
+	}
+	referrers := slice.Referrers()
+	if referrers != nil {
+		for _, refinstr := range *referrers {
+			switch refinstr := refinstr.(type) {
+			case *ssa.Slice:
+				checkAllSlicesBounds(depth, sliceCap, refinstr, violations, ifs)
+				switch refinstr.X.(type) {
+				case *ssa.Alloc, *ssa.Parameter:
+					l, h := extractSliceBounds(refinstr)
+					newCap := computeSliceNewCap(l, h, sliceCap)
+					trackSliceBounds(depth, newCap, refinstr, violations, ifs)
+				}
+			case *ssa.IndexAddr:
+				indexValue, err := extractIntValue(refinstr.Index.String())
+				if err == nil && !isSliceIndexInsideBounds(0, sliceCap, indexValue) {
+					*violations = append(*violations, refinstr)
+				}
+			case *ssa.Call:
+				if ifref, cond := extractSliceIfLenCondition(refinstr); ifref != nil && cond != nil {
+					ifs[*ifref] = cond
+				} else {
+					parPos := -1
+					for pos, arg := range refinstr.Call.Args {
+						if a, ok := arg.(*ssa.Slice); ok && a == slice {
+							parPos = pos
+						}
+					}
+					if fn, ok := refinstr.Call.Value.(*ssa.Function); ok {
+						if len(fn.Params) > parPos && parPos > -1 {
+							param := fn.Params[parPos]
+							trackSliceBounds(depth, sliceCap, param, violations, ifs)
+						}
+					}
+				}
+			}
+		}
+	}
+}
+
+func checkAllSlicesBounds(depth int, sliceCap int, slice *ssa.Slice, violations *[]ssa.Instruction, ifs map[ssa.If]*ssa.BinOp) {
+	if depth == maxDepth {
+		return
+	}
+	depth++
+	if violations == nil {
+		violations = &[]ssa.Instruction{}
+	}
+	sliceLow, sliceHigh := extractSliceBounds(slice)
+	if !isSliceInsideBounds(0, sliceCap, sliceLow, sliceHigh) {
+		*violations = append(*violations, slice)
+	}
+	switch slice.X.(type) {
+	case *ssa.Alloc, *ssa.Parameter, *ssa.Slice:
+		l, h := extractSliceBounds(slice)
+		newCap := computeSliceNewCap(l, h, sliceCap)
+		trackSliceBounds(depth, newCap, slice, violations, ifs)
+	}
+
+	references := slice.Referrers()
+	if references == nil {
+		return
+	}
+	for _, ref := range *references {
+		switch s := ref.(type) {
+		case *ssa.Slice:
+			checkAllSlicesBounds(depth, sliceCap, s, violations, ifs)
+			switch s.X.(type) {
+			case *ssa.Alloc, *ssa.Parameter:
+				l, h := extractSliceBounds(s)
+				newCap := computeSliceNewCap(l, h, sliceCap)
+				trackSliceBounds(depth, newCap, s, violations, ifs)
+			}
+		}
+	}
+}
+
+func extractSliceIfLenCondition(call *ssa.Call) (*ssa.If, *ssa.BinOp) {
+	if builtInLen, ok := call.Call.Value.(*ssa.Builtin); ok {
+		if builtInLen.Name() == "len" {
+			refs := call.Referrers()
+			if refs != nil {
+				for _, ref := range *refs {
+					if binop, ok := ref.(*ssa.BinOp); ok {
+						binoprefs := binop.Referrers()
+						for _, ref := range *binoprefs {
+							if ifref, ok := ref.(*ssa.If); ok {
+								return ifref, binop
+							}
+						}
+					}
+				}
+			}
+		}
+	}
+	return nil, nil
+}
+
+func computeSliceNewCap(l, h, oldCap int) int {
+	if l == 0 && h == 0 {
+		return oldCap
+	}
+	if l > 0 && h == 0 {
+		return oldCap - l
+	}
+	if l == 0 && h > 0 {
+		return h
+	}
+	return h - l
+}
+
+func invBound(bound bound) bound {
+	switch bound {
+	case lowerUnbounded:
+		return upperUnbounded
+	case upperUnbounded:
+		return lowerUnbounded
+	case upperBounded:
+		return unbounded
+	case unbounded:
+		return upperBounded
+	default:
+		return unbounded
+	}
+}
+
+func extractBinOpBound(binop *ssa.BinOp) (bound, int, error) {
+	if binop.X != nil {
+		if x, ok := binop.X.(*ssa.Const); ok {
+			value, err := strconv.Atoi(x.Value.String())
+			if err != nil {
+				return lowerUnbounded, value, err
+			}
+			switch binop.Op {
+			case token.LSS, token.LEQ:
+				return upperUnbounded, value, nil
+			case token.GTR, token.GEQ:
+				return lowerUnbounded, value, nil
+			case token.EQL:
+				return upperBounded, value, nil
+			case token.NEQ:
+				return unbounded, value, nil
+			}
+		}
+	}
+	if binop.Y != nil {
+		if y, ok := binop.Y.(*ssa.Const); ok {
+			value, err := strconv.Atoi(y.Value.String())
+			if err != nil {
+				return lowerUnbounded, value, err
+			}
+			switch binop.Op {
+			case token.LSS, token.LEQ:
+				return lowerUnbounded, value, nil
+			case token.GTR, token.GEQ:
+				return upperUnbounded, value, nil
+			case token.EQL:
+				return upperBounded, value, nil
+			case token.NEQ:
+				return unbounded, value, nil
+			}
+		}
+	}
+	return lowerUnbounded, 0, fmt.Errorf("unable to extract constant from binop")
+}
+
+func isSliceIndexInsideBounds(l, h int, index int) bool {
+	return (l <= index && index < h)
+}
+
+func isSliceInsideBounds(l, h int, cl, ch int) bool {
+	return (l <= cl && h >= ch) && (l <= ch && h >= cl)
+}
+
+func extractSliceBounds(slice *ssa.Slice) (int, int) {
+	var low int
+	if slice.Low != nil {
+		l, err := extractIntValue(slice.Low.String())
+		if err == nil {
+			low = l
+		}
+	}
+	var high int
+	if slice.High != nil {
+		h, err := extractIntValue(slice.High.String())
+		if err == nil {
+			high = h
+		}
+	}
+	return low, high
+}
+
+func extractIntValue(value string) (int, error) {
+	parts := strings.Split(value, ":")
+	if len(parts) != 2 {
+		return 0, fmt.Errorf("invalid value: %s", value)
+	}
+	if parts[1] != "int" {
+		return 0, fmt.Errorf("invalid value: %s", value)
+	}
+	return strconv.Atoi(parts[0])
+}
+
+func extractSliceCapFromAlloc(instr string) (int, error) {
+	re := regexp.MustCompile(`new \[(\d+)\]*`)
+	var sliceCap int
+	matches := re.FindAllStringSubmatch(instr, -1)
+	if matches == nil {
+		return sliceCap, errors.New("no slice cap found")
+	}
+
+	if len(matches) > 0 {
+		m := matches[0]
+		if len(m) > 1 {
+			return strconv.Atoi(m[1])
+		}
+	}
+
+	return 0, errors.New("no slice cap found")
+}
diff --git a/vendor/github.com/securego/gosec/v2/analyzers/ssrf.go b/vendor/github.com/securego/gosec/v2/analyzers/ssrf.go
deleted file mode 100644
index 70e0211f10ec3c666a6acc6ebdc565e73f070d09..0000000000000000000000000000000000000000
--- a/vendor/github.com/securego/gosec/v2/analyzers/ssrf.go
+++ /dev/null
@@ -1,57 +0,0 @@
-// (c) Copyright gosec's authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package analyzers
-
-import (
-	"golang.org/x/tools/go/analysis"
-	"golang.org/x/tools/go/analysis/passes/buildssa"
-	"golang.org/x/tools/go/ssa"
-
-	"github.com/securego/gosec/v2/issue"
-)
-
-func newSSRFAnalyzer(id string, description string) *analysis.Analyzer {
-	return &analysis.Analyzer{
-		Name:     id,
-		Doc:      description,
-		Run:      runSSRF,
-		Requires: []*analysis.Analyzer{buildssa.Analyzer},
-	}
-}
-
-func runSSRF(pass *analysis.Pass) (interface{}, error) {
-	ssaResult, err := getSSAResult(pass)
-	if err != nil {
-		return nil, err
-	}
-	// TODO: implement the analysis
-	for _, fn := range ssaResult.SSA.SrcFuncs {
-		for _, block := range fn.DomPreorder() {
-			for _, instr := range block.Instrs {
-				switch instr := instr.(type) {
-				case *ssa.Call:
-					callee := instr.Call.StaticCallee()
-					if callee != nil {
-						ssaResult.Logger.Printf("callee: %s\n", callee)
-						return newIssue(pass.Analyzer.Name,
-							"not implemented",
-							pass.Fset, instr.Call.Pos(), issue.Low, issue.High), nil
-					}
-				}
-			}
-		}
-	}
-	return nil, nil
-}
diff --git a/vendor/github.com/securego/gosec/v2/analyzers/util.go b/vendor/github.com/securego/gosec/v2/analyzers/util.go
index f1bd867ae56d23037e0a2fce5686bd20fe234962..5941184aa2835d88d7afde40ebbef642a43b7f00 100644
--- a/vendor/github.com/securego/gosec/v2/analyzers/util.go
+++ b/vendor/github.com/securego/gosec/v2/analyzers/util.go
@@ -38,7 +38,7 @@ type SSAAnalyzerResult struct {
 // BuildDefaultAnalyzers returns the default list of analyzers
 func BuildDefaultAnalyzers() []*analysis.Analyzer {
 	return []*analysis.Analyzer{
-		newSSRFAnalyzer("G107", "URL provided to HTTP request as taint input"),
+		newSliceBoundsAnalyzer("G602", "Possible slice bounds out of range"),
 	}
 }
 
diff --git a/vendor/github.com/securego/gosec/v2/cwe/data.go b/vendor/github.com/securego/gosec/v2/cwe/data.go
index ff1ad3c7d8fa94d3e99d81d5a8603f3303a72705..79a6b9d2314d287a7422113b5e367396198fbfe7 100644
--- a/vendor/github.com/securego/gosec/v2/cwe/data.go
+++ b/vendor/github.com/securego/gosec/v2/cwe/data.go
@@ -1,7 +1,5 @@
 package cwe
 
-import "fmt"
-
 const (
 	// Acronym is the acronym of CWE
 	Acronym = "CWE"
@@ -13,139 +11,128 @@ const (
 	Organization = "MITRE"
 	// Description the description of CWE
 	Description = "The MITRE Common Weakness Enumeration"
-)
-
-var (
 	// InformationURI link to the published CWE PDF
-	InformationURI = fmt.Sprintf("https://cwe.mitre.org/data/published/cwe_v%s.pdf/", Version)
+	InformationURI = "https://cwe.mitre.org/data/published/cwe_v" + Version + ".pdf/"
 	// DownloadURI link to the zipped XML of the CWE list
-	DownloadURI = fmt.Sprintf("https://cwe.mitre.org/data/xml/cwec_v%s.xml.zip", Version)
-
-	data = map[string]*Weakness{}
-
-	weaknesses = []*Weakness{
-		{
-			ID:          "118",
-			Description: "The software does not restrict or incorrectly restricts operations within the boundaries of a resource that is accessed using an index or pointer, such as memory or files.",
-			Name:        "Incorrect Access of Indexable Resource ('Range Error')",
-		},
-		{
-			ID:          "190",
-			Description: "The software performs a calculation that can produce an integer overflow or wraparound, when the logic assumes that the resulting value will always be larger than the original value. This can introduce other weaknesses when the calculation is used for resource management or execution control.",
-			Name:        "Integer Overflow or Wraparound",
-		},
-		{
-			ID:          "200",
-			Description: "The product exposes sensitive information to an actor that is not explicitly authorized to have access to that information.",
-			Name:        "Exposure of Sensitive Information to an Unauthorized Actor",
-		},
-		{
-			ID:          "22",
-			Description: "The software uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the software does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory.",
-			Name:        "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')",
-		},
-		{
-			ID:          "242",
-			Description: "The program calls a function that can never be guaranteed to work safely.",
-			Name:        "Use of Inherently Dangerous Function",
-		},
-		{
-			ID:          "276",
-			Description: "During installation, installed file permissions are set to allow anyone to modify those files.",
-			Name:        "Incorrect Default Permissions",
-		},
-		{
-			ID:          "295",
-			Description: "The software does not validate, or incorrectly validates, a certificate.",
-			Name:        "Improper Certificate Validation",
-		},
-		{
-			ID:          "310",
-			Description: "Weaknesses in this category are related to the design and implementation of data confidentiality and integrity. Frequently these deal with the use of encoding techniques, encryption libraries, and hashing algorithms. The weaknesses in this category could lead to a degradation of the quality data if they are not addressed.",
-			Name:        "Cryptographic Issues",
-		},
-		{
-			ID:          "322",
-			Description: "The software performs a key exchange with an actor without verifying the identity of that actor.",
-			Name:        "Key Exchange without Entity Authentication",
-		},
-		{
-			ID:          "326",
-			Description: "The software stores or transmits sensitive data using an encryption scheme that is theoretically sound, but is not strong enough for the level of protection required.",
-			Name:        "Inadequate Encryption Strength",
-		},
-		{
-			ID:          "327",
-			Description: "The use of a broken or risky cryptographic algorithm is an unnecessary risk that may result in the exposure of sensitive information.",
-			Name:        "Use of a Broken or Risky Cryptographic Algorithm",
-		},
-		{
-			ID:          "338",
-			Description: "The product uses a Pseudo-Random Number Generator (PRNG) in a security context, but the PRNG's algorithm is not cryptographically strong.",
-			Name:        "Use of Cryptographically Weak Pseudo-Random Number Generator (PRNG)",
-		},
-		{
-			ID:          "377",
-			Description: "Creating and using insecure temporary files can leave application and system data vulnerable to attack.",
-			Name:        "Insecure Temporary File",
-		},
-		{
-			ID:          "400",
-			Description: "The software does not properly control the allocation and maintenance of a limited resource, thereby enabling an actor to influence the amount of resources consumed, eventually leading to the exhaustion of available resources.",
-			Name:        "Uncontrolled Resource Consumption",
-		},
-		{
-			ID:          "409",
-			Description: "The software does not handle or incorrectly handles a compressed input with a very high compression ratio that produces a large output.",
-			Name:        "Improper Handling of Highly Compressed Data (Data Amplification)",
-		},
-		{
-			ID:          "703",
-			Description: "The software does not properly anticipate or handle exceptional conditions that rarely occur during normal operation of the software.",
-			Name:        "Improper Check or Handling of Exceptional Conditions",
-		},
-		{
-			ID:          "78",
-			Description: "The software constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component.",
-			Name:        "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')",
-		},
-		{
-			ID:          "79",
-			Description: "The software does not neutralize or incorrectly neutralizes user-controllable input before it is placed in output that is used as a web page that is served to other users.",
-			Name:        "Improper Neutralization of Input During Web Page Generation ('Cross-site Scripting')",
-		},
-		{
-			ID:          "798",
-			Description: "The software contains hard-coded credentials, such as a password or cryptographic key, which it uses for its own inbound authentication, outbound communication to external components, or encryption of internal data.",
-			Name:        "Use of Hard-coded Credentials",
-		},
-		{
-			ID:          "88",
-			Description: "The software constructs a string for a command to executed by a separate component\nin another control sphere, but it does not properly delimit the\nintended arguments, options, or switches within that command string.",
-			Name:        "Improper Neutralization of Argument Delimiters in a Command ('Argument Injection')",
-		},
-		{
-			ID:          "89",
-			Description: "The software constructs all or part of an SQL command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended SQL command when it is sent to a downstream component.",
-			Name:        "Improper Neutralization of Special Elements used in an SQL Command ('SQL Injection')",
-		},
-		{
-			ID:          "676",
-			Description: "The program invokes a potentially dangerous function that could introduce a vulnerability if it is used incorrectly, but the function can also be used safely.",
-			Name:        "Use of Potentially Dangerous Function",
-		},
-	}
+	DownloadURI = "https://cwe.mitre.org/data/xml/cwec_v" + Version + ".xml.zip"
 )
 
-func init() {
-	for _, weakness := range weaknesses {
-		data[weakness.ID] = weakness
-	}
+var idWeaknesses = map[string]*Weakness{
+	"118": {
+		ID:          "118",
+		Description: "The software does not restrict or incorrectly restricts operations within the boundaries of a resource that is accessed using an index or pointer, such as memory or files.",
+		Name:        "Incorrect Access of Indexable Resource ('Range Error')",
+	},
+	"190": {
+		ID:          "190",
+		Description: "The software performs a calculation that can produce an integer overflow or wraparound, when the logic assumes that the resulting value will always be larger than the original value. This can introduce other weaknesses when the calculation is used for resource management or execution control.",
+		Name:        "Integer Overflow or Wraparound",
+	},
+	"200": {
+		ID:          "200",
+		Description: "The product exposes sensitive information to an actor that is not explicitly authorized to have access to that information.",
+		Name:        "Exposure of Sensitive Information to an Unauthorized Actor",
+	},
+	"22": {
+		ID:          "22",
+		Description: "The software uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the software does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory.",
+		Name:        "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')",
+	},
+	"242": {
+		ID:          "242",
+		Description: "The program calls a function that can never be guaranteed to work safely.",
+		Name:        "Use of Inherently Dangerous Function",
+	},
+	"276": {
+		ID:          "276",
+		Description: "During installation, installed file permissions are set to allow anyone to modify those files.",
+		Name:        "Incorrect Default Permissions",
+	},
+	"295": {
+		ID:          "295",
+		Description: "The software does not validate, or incorrectly validates, a certificate.",
+		Name:        "Improper Certificate Validation",
+	},
+	"310": {
+		ID:          "310",
+		Description: "Weaknesses in this category are related to the design and implementation of data confidentiality and integrity. Frequently these deal with the use of encoding techniques, encryption libraries, and hashing algorithms. The weaknesses in this category could lead to a degradation of the quality data if they are not addressed.",
+		Name:        "Cryptographic Issues",
+	},
+	"322": {
+		ID:          "322",
+		Description: "The software performs a key exchange with an actor without verifying the identity of that actor.",
+		Name:        "Key Exchange without Entity Authentication",
+	},
+	"326": {
+		ID:          "326",
+		Description: "The software stores or transmits sensitive data using an encryption scheme that is theoretically sound, but is not strong enough for the level of protection required.",
+		Name:        "Inadequate Encryption Strength",
+	},
+	"327": {
+		ID:          "327",
+		Description: "The use of a broken or risky cryptographic algorithm is an unnecessary risk that may result in the exposure of sensitive information.",
+		Name:        "Use of a Broken or Risky Cryptographic Algorithm",
+	},
+	"338": {
+		ID:          "338",
+		Description: "The product uses a Pseudo-Random Number Generator (PRNG) in a security context, but the PRNG's algorithm is not cryptographically strong.",
+		Name:        "Use of Cryptographically Weak Pseudo-Random Number Generator (PRNG)",
+	},
+	"377": {
+		ID:          "377",
+		Description: "Creating and using insecure temporary files can leave application and system data vulnerable to attack.",
+		Name:        "Insecure Temporary File",
+	},
+	"400": {
+		ID:          "400",
+		Description: "The software does not properly control the allocation and maintenance of a limited resource, thereby enabling an actor to influence the amount of resources consumed, eventually leading to the exhaustion of available resources.",
+		Name:        "Uncontrolled Resource Consumption",
+	},
+	"409": {
+		ID:          "409",
+		Description: "The software does not handle or incorrectly handles a compressed input with a very high compression ratio that produces a large output.",
+		Name:        "Improper Handling of Highly Compressed Data (Data Amplification)",
+	},
+	"703": {
+		ID:          "703",
+		Description: "The software does not properly anticipate or handle exceptional conditions that rarely occur during normal operation of the software.",
+		Name:        "Improper Check or Handling of Exceptional Conditions",
+	},
+	"78": {
+		ID:          "78",
+		Description: "The software constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component.",
+		Name:        "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')",
+	},
+	"79": {
+		ID:          "79",
+		Description: "The software does not neutralize or incorrectly neutralizes user-controllable input before it is placed in output that is used as a web page that is served to other users.",
+		Name:        "Improper Neutralization of Input During Web Page Generation ('Cross-site Scripting')",
+	},
+	"798": {
+		ID:          "798",
+		Description: "The software contains hard-coded credentials, such as a password or cryptographic key, which it uses for its own inbound authentication, outbound communication to external components, or encryption of internal data.",
+		Name:        "Use of Hard-coded Credentials",
+	},
+	"88": {
+		ID:          "88",
+		Description: "The software constructs a string for a command to executed by a separate component\nin another control sphere, but it does not properly delimit the\nintended arguments, options, or switches within that command string.",
+		Name:        "Improper Neutralization of Argument Delimiters in a Command ('Argument Injection')",
+	},
+	"89": {
+		ID:          "89",
+		Description: "The software constructs all or part of an SQL command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended SQL command when it is sent to a downstream component.",
+		Name:        "Improper Neutralization of Special Elements used in an SQL Command ('SQL Injection')",
+	},
+	"676": {
+		ID:          "676",
+		Description: "The program invokes a potentially dangerous function that could introduce a vulnerability if it is used incorrectly, but the function can also be used safely.",
+		Name:        "Use of Potentially Dangerous Function",
+	},
 }
 
 // Get Retrieves a CWE weakness by it's id
 func Get(id string) *Weakness {
-	weakness, ok := data[id]
+	weakness, ok := idWeaknesses[id]
 	if ok && weakness != nil {
 		return weakness
 	}
diff --git a/vendor/github.com/securego/gosec/v2/helpers.go b/vendor/github.com/securego/gosec/v2/helpers.go
index b4c23e5bba4d191954c3b9822bb302408d8f342e..2e28b318714a27c58685258db247f13d587b5793 100644
--- a/vendor/github.com/securego/gosec/v2/helpers.go
+++ b/vendor/github.com/securego/gosec/v2/helpers.go
@@ -15,12 +15,15 @@
 package gosec
 
 import (
+	"bytes"
+	"encoding/json"
 	"errors"
 	"fmt"
 	"go/ast"
 	"go/token"
 	"go/types"
 	"os"
+	"os/exec"
 	"os/user"
 	"path/filepath"
 	"regexp"
@@ -100,7 +103,7 @@ func GetChar(n ast.Node) (byte, error) {
 // Unlike the other getters, it does _not_ raise an error for unknown ast.Node types. At the base, the recursion will hit a non-BinaryExpr type,
 // either BasicLit or other, so it's not an error case. It will only error if `strconv.Unquote` errors. This matters, because there's
 // currently functionality that relies on error values being returned by GetString if and when it hits a non-basiclit string node type,
-// hence for cases where recursion is needed, we use this separate function, so that we can still be backwards compatbile.
+// hence for cases where recursion is needed, we use this separate function, so that we can still be backwards compatible.
 //
 // This was added to handle a SQL injection concatenation case where the injected value is infixed between two strings, not at the start or end. See example below
 //
@@ -183,7 +186,7 @@ func GetCallInfo(n ast.Node, ctx *Context) (string, string, error) {
 			case *ast.CallExpr:
 				switch call := expr.Fun.(type) {
 				case *ast.Ident:
-					if call.Name == "new" {
+					if call.Name == "new" && len(expr.Args) > 0 {
 						t := ctx.Info.TypeOf(expr.Args[0])
 						if t != nil {
 							return t.String(), fn.Sel.Name, nil
@@ -260,7 +263,7 @@ func getIdentStringValues(ident *ast.Ident, stringFinder func(ast.Node) (string,
 	return values
 }
 
-// getIdentStringRecursive returns the string of values of an Ident if they can be resolved
+// GetIdentStringValuesRecursive returns the string of values of an Ident if they can be resolved
 // The difference between this and GetIdentStringValues is that it will attempt to resolve the strings recursively,
 // if it is passed a *ast.BinaryExpr. See GetStringRecursive for details
 func GetIdentStringValuesRecursive(ident *ast.Ident) []string {
@@ -493,19 +496,45 @@ func RootPath(root string) (string, error) {
 	return filepath.Abs(root)
 }
 
-// GoVersion returns parsed version of Go from runtime
+// GoVersion returns parsed version of Go mod version and fallback to runtime version if not found.
 func GoVersion() (int, int, int) {
-	return parseGoVersion(runtime.Version())
+	goVersion, err := goModVersion()
+	if err != nil {
+		return parseGoVersion(strings.TrimPrefix(runtime.Version(), "go"))
+	}
+
+	return parseGoVersion(goVersion)
+}
+
+type goListOutput struct {
+	GoVersion string `json:"GoVersion"`
+}
+
+func goModVersion() (string, error) {
+	cmd := exec.Command("go", "list", "-m", "-json")
+
+	raw, err := cmd.CombinedOutput()
+	if err != nil {
+		return "", fmt.Errorf("command go list: %w: %s", err, string(raw))
+	}
+
+	var v goListOutput
+	err = json.NewDecoder(bytes.NewBuffer(raw)).Decode(&v)
+	if err != nil {
+		return "", fmt.Errorf("unmarshaling error: %w: %s", err, string(raw))
+	}
+
+	return v.GoVersion, nil
 }
 
 // parseGoVersion parses Go version.
 // example:
-// - go1.19rc2
-// - go1.19beta2
-// - go1.19.4
-// - go1.19
+// - 1.19rc2
+// - 1.19beta2
+// - 1.19.4
+// - 1.19
 func parseGoVersion(version string) (int, int, int) {
-	exp := regexp.MustCompile(`go(\d+).(\d+)(?:.(\d+))?.*`)
+	exp := regexp.MustCompile(`(\d+).(\d+)(?:.(\d+))?.*`)
 	parts := exp.FindStringSubmatch(version)
 	if len(parts) <= 1 {
 		return 0, 0, 0
diff --git a/vendor/github.com/securego/gosec/v2/import_tracker.go b/vendor/github.com/securego/gosec/v2/import_tracker.go
index 7984e99f428ad39525e3441a0a0acd0bb94e63e5..0d9ebfe16a44890e7397d579a4dde0ca56b93816 100644
--- a/vendor/github.com/securego/gosec/v2/import_tracker.go
+++ b/vendor/github.com/securego/gosec/v2/import_tracker.go
@@ -15,9 +15,12 @@ package gosec
 import (
 	"go/ast"
 	"go/types"
+	"regexp"
 	"strings"
 )
 
+var versioningPackagePattern = regexp.MustCompile(`v[0-9]+$`)
+
 // ImportTracker is used to normalize the packages that have been imported
 // by a source file. It is able to differentiate between plain imports, aliased
 // imports and init only imports.
@@ -66,5 +69,10 @@ func importName(importPath string) string {
 	if len(parts) > 0 {
 		name = parts[len(parts)-1]
 	}
+	// If the last segment of the path is version information, consider the second to last segment as the package name.
+	// (e.g., `math/rand/v2` would be `rand`)
+	if len(parts) > 1 && versioningPackagePattern.MatchString(name) {
+		name = parts[len(parts)-2]
+	}
 	return name
 }
diff --git a/vendor/github.com/securego/gosec/v2/issue/issue.go b/vendor/github.com/securego/gosec/v2/issue/issue.go
index db4d630fab452cbb6233608451b9b6167eda09ff..1000b20423abc81f83ac3cad27bc68e01993aaf7 100644
--- a/vendor/github.com/securego/gosec/v2/issue/issue.go
+++ b/vendor/github.com/securego/gosec/v2/issue/issue.go
@@ -178,11 +178,7 @@ func codeSnippetEndLine(node ast.Node, fobj *token.File) int64 {
 // New creates a new Issue
 func New(fobj *token.File, node ast.Node, ruleID, desc string, severity, confidence Score) *Issue {
 	name := fobj.Name()
-	start, end := fobj.Line(node.Pos()), fobj.Line(node.End())
-	line := strconv.Itoa(start)
-	if start != end {
-		line = fmt.Sprintf("%d-%d", start, end)
-	}
+	line := GetLine(fobj, node)
 	col := strconv.Itoa(fobj.Position(node.Pos()).Column)
 
 	var code string
@@ -217,3 +213,13 @@ func (i *Issue) WithSuppressions(suppressions []SuppressionInfo) *Issue {
 	i.Suppressions = suppressions
 	return i
 }
+
+// GetLine returns the line number of a given ast.Node
+func GetLine(fobj *token.File, node ast.Node) string {
+	start, end := fobj.Line(node.Pos()), fobj.Line(node.End())
+	line := strconv.Itoa(start)
+	if start != end {
+		line = fmt.Sprintf("%d-%d", start, end)
+	}
+	return line
+}
diff --git a/vendor/github.com/securego/gosec/v2/rules/fileperms.go b/vendor/github.com/securego/gosec/v2/rules/fileperms.go
index 0376b6a03c606ae945ccd24ba9c3de3f10e068a8..eb1fa2eee9c0b5af052665bc2d2743aec8d347e9 100644
--- a/vendor/github.com/securego/gosec/v2/rules/fileperms.go
+++ b/vendor/github.com/securego/gosec/v2/rules/fileperms.go
@@ -30,6 +30,7 @@ type filePermissions struct {
 	calls []string
 }
 
+// ID returns the ID of the rule.
 func (r *filePermissions) ID() string {
 	return r.MetaData.ID
 }
@@ -55,11 +56,12 @@ func modeIsSubset(subset int64, superset int64) bool {
 	return (subset | superset) == superset
 }
 
+// Match checks if the rule is matched.
 func (r *filePermissions) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) {
 	for _, pkg := range r.pkgs {
 		if callexpr, matched := gosec.MatchCallByPackage(n, c, pkg, r.calls...); matched {
 			modeArg := callexpr.Args[len(callexpr.Args)-1]
-			if mode, err := gosec.GetInt(modeArg); err == nil && !modeIsSubset(mode, r.mode) {
+			if mode, err := gosec.GetInt(modeArg); err == nil && !modeIsSubset(mode, r.mode) || isOsPerm(modeArg) {
 				return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil
 			}
 		}
@@ -67,6 +69,18 @@ func (r *filePermissions) Match(n ast.Node, c *gosec.Context) (*issue.Issue, err
 	return nil, nil
 }
 
+// isOsPerm check if the provide ast node contains a os.PermMode symbol
+func isOsPerm(n ast.Node) bool {
+	if node, ok := n.(*ast.SelectorExpr); ok {
+		if identX, ok := node.X.(*ast.Ident); ok {
+			if identX.Name == "os" && node.Sel != nil && node.Sel.Name == "ModePerm" {
+				return true
+			}
+		}
+	}
+	return false
+}
+
 // NewWritePerms creates a rule to detect file Writes with bad permissions.
 func NewWritePerms(id string, conf gosec.Config) (gosec.Rule, []ast.Node) {
 	mode := getConfiguredMode(conf, id, 0o600)
@@ -116,3 +130,47 @@ func NewMkdirPerms(id string, conf gosec.Config) (gosec.Rule, []ast.Node) {
 		},
 	}, []ast.Node{(*ast.CallExpr)(nil)}
 }
+
+type osCreatePermissions struct {
+	issue.MetaData
+	mode  int64
+	pkgs  []string
+	calls []string
+}
+
+const defaultOsCreateMode = 0o666
+
+// ID returns the ID of the rule.
+func (r *osCreatePermissions) ID() string {
+	return r.MetaData.ID
+}
+
+// Match checks if the rule is matched.
+func (r *osCreatePermissions) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) {
+	for _, pkg := range r.pkgs {
+		if _, matched := gosec.MatchCallByPackage(n, c, pkg, r.calls...); matched {
+			if !modeIsSubset(defaultOsCreateMode, r.mode) {
+				return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil
+			}
+		}
+	}
+	return nil, nil
+}
+
+// NewOsCreatePerms reates a rule to detect file creation with a more permissive than configured
+// permission mask.
+func NewOsCreatePerms(id string, conf gosec.Config) (gosec.Rule, []ast.Node) {
+	mode := getConfiguredMode(conf, id, 0o666)
+	return &osCreatePermissions{
+		mode:  mode,
+		pkgs:  []string{"os"},
+		calls: []string{"Create"},
+		MetaData: issue.MetaData{
+			ID:         id,
+			Severity:   issue.Medium,
+			Confidence: issue.High,
+			What: fmt.Sprintf("Expect file permissions to be %#o or less but os.Create used with default permissions %#o",
+				mode, defaultOsCreateMode),
+		},
+	}, []ast.Node{(*ast.CallExpr)(nil)}
+}
diff --git a/vendor/github.com/securego/gosec/v2/rules/hardcoded_credentials.go b/vendor/github.com/securego/gosec/v2/rules/hardcoded_credentials.go
index ea83860845f3c3b71af5f8448b04ea662817c54d..c10d18b3071220e88106224f7e1fbfe9eb8c3ee3 100644
--- a/vendor/github.com/securego/gosec/v2/rules/hardcoded_credentials.go
+++ b/vendor/github.com/securego/gosec/v2/rules/hardcoded_credentials.go
@@ -15,6 +15,7 @@
 package rules
 
 import (
+	"fmt"
 	"go/ast"
 	"go/token"
 	"regexp"
@@ -26,10 +27,165 @@ import (
 	"github.com/securego/gosec/v2/issue"
 )
 
+type secretPattern struct {
+	name   string
+	regexp *regexp.Regexp
+}
+
+var secretsPatterns = [...]secretPattern{
+	{
+		name:   "RSA private key",
+		regexp: regexp.MustCompile(`-----BEGIN RSA PRIVATE KEY-----`),
+	},
+	{
+		name:   "SSH (DSA) private key",
+		regexp: regexp.MustCompile(`-----BEGIN DSA PRIVATE KEY-----`),
+	},
+	{
+		name:   "SSH (EC) private key",
+		regexp: regexp.MustCompile(`-----BEGIN EC PRIVATE KEY-----`),
+	},
+	{
+		name:   "PGP private key block",
+		regexp: regexp.MustCompile(`-----BEGIN PGP PRIVATE KEY BLOCK-----`),
+	},
+	{
+		name:   "Slack Token",
+		regexp: regexp.MustCompile(`xox[pborsa]-[0-9]{12}-[0-9]{12}-[0-9]{12}-[a-z0-9]{32}`),
+	},
+	{
+		name:   "AWS API Key",
+		regexp: regexp.MustCompile(`AKIA[0-9A-Z]{16}`),
+	},
+	{
+		name:   "Amazon MWS Auth Token",
+		regexp: regexp.MustCompile(`amzn\.mws\.[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}`),
+	},
+	{
+		name:   "AWS AppSync GraphQL Key",
+		regexp: regexp.MustCompile(`da2-[a-z0-9]{26}`),
+	},
+	{
+		name:   "GitHub personal access token",
+		regexp: regexp.MustCompile(`ghp_[a-zA-Z0-9]{36}`),
+	},
+	{
+		name:   "GitHub fine-grained access token",
+		regexp: regexp.MustCompile(`github_pat_[a-zA-Z0-9]{22}_[a-zA-Z0-9]{59}`),
+	},
+	{
+		name:   "GitHub action temporary token",
+		regexp: regexp.MustCompile(`ghs_[a-zA-Z0-9]{36}`),
+	},
+	{
+		name:   "Google API Key",
+		regexp: regexp.MustCompile(`AIza[0-9A-Za-z\-_]{35}`),
+	},
+	{
+		name:   "Google Cloud Platform API Key",
+		regexp: regexp.MustCompile(`AIza[0-9A-Za-z\-_]{35}`),
+	},
+	{
+		name:   "Google Cloud Platform OAuth",
+		regexp: regexp.MustCompile(`[0-9]+-[0-9A-Za-z_]{32}\.apps\.googleusercontent\.com`),
+	},
+	{
+		name:   "Google Drive API Key",
+		regexp: regexp.MustCompile(`AIza[0-9A-Za-z\-_]{35}`),
+	},
+	{
+		name:   "Google Drive OAuth",
+		regexp: regexp.MustCompile(`[0-9]+-[0-9A-Za-z_]{32}\.apps\.googleusercontent\.com`),
+	},
+	{
+		name:   "Google (GCP) Service-account",
+		regexp: regexp.MustCompile(`"type": "service_account"`),
+	},
+	{
+		name:   "Google Gmail API Key",
+		regexp: regexp.MustCompile(`AIza[0-9A-Za-z\-_]{35}`),
+	},
+	{
+		name:   "Google Gmail OAuth",
+		regexp: regexp.MustCompile(`[0-9]+-[0-9A-Za-z_]{32}\.apps\.googleusercontent\.com`),
+	},
+	{
+		name:   "Google OAuth Access Token",
+		regexp: regexp.MustCompile(`ya29\.[0-9A-Za-z\-_]+`),
+	},
+	{
+		name:   "Google YouTube API Key",
+		regexp: regexp.MustCompile(`AIza[0-9A-Za-z\-_]{35}`),
+	},
+	{
+		name:   "Google YouTube OAuth",
+		regexp: regexp.MustCompile(`[0-9]+-[0-9A-Za-z_]{32}\.apps\.googleusercontent\.com`),
+	},
+	{
+		name:   "Generic API Key",
+		regexp: regexp.MustCompile(`[aA][pP][iI]_?[kK][eE][yY].*[''|"][0-9a-zA-Z]{32,45}[''|"]`),
+	},
+	{
+		name:   "Generic Secret",
+		regexp: regexp.MustCompile(`[sS][eE][cC][rR][eE][tT].*[''|"][0-9a-zA-Z]{32,45}[''|"]`),
+	},
+	{
+		name:   "Heroku API Key",
+		regexp: regexp.MustCompile(`[hH][eE][rR][oO][kK][uU].*[0-9A-F]{8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12}`),
+	},
+	{
+		name:   "MailChimp API Key",
+		regexp: regexp.MustCompile(`[0-9a-f]{32}-us[0-9]{1,2}`),
+	},
+	{
+		name:   "Mailgun API Key",
+		regexp: regexp.MustCompile(`key-[0-9a-zA-Z]{32}`),
+	},
+	{
+		name:   "Password in URL",
+		regexp: regexp.MustCompile(`[a-zA-Z]{3,10}://[^/\\s:@]{3,20}:[^/\\s:@]{3,20}@.{1,100}["'\\s]`),
+	},
+	{
+		name:   "Slack Webhook",
+		regexp: regexp.MustCompile(`https://hooks\.slack\.com/services/T[a-zA-Z0-9_]{8}/B[a-zA-Z0-9_]{8}/[a-zA-Z0-9_]{24}`),
+	},
+	{
+		name:   "Stripe API Key",
+		regexp: regexp.MustCompile(`sk_live_[0-9a-zA-Z]{24}`),
+	},
+	{
+		name:   "Stripe Restricted API Key",
+		regexp: regexp.MustCompile(`rk_live_[0-9a-zA-Z]{24}`),
+	},
+	{
+		name:   "Square Access Token",
+		regexp: regexp.MustCompile(`sq0atp-[0-9A-Za-z\-_]{22}`),
+	},
+	{
+		name:   "Square OAuth Secret",
+		regexp: regexp.MustCompile(`sq0csp-[0-9A-Za-z\-_]{43}`),
+	},
+	{
+		name:   "Telegram Bot API Key",
+		regexp: regexp.MustCompile(`[0-9]+:AA[0-9A-Za-z\-_]{33}`),
+	},
+	{
+		name:   "Twilio API Key",
+		regexp: regexp.MustCompile(`SK[0-9a-fA-F]{32}`),
+	},
+	{
+		name:   "Twitter Access Token",
+		regexp: regexp.MustCompile(`[tT][wW][iI][tT][tT][eE][rR].*[1-9][0-9]+-[0-9a-zA-Z]{40}`),
+	},
+	{
+		name:   "Twitter OAuth",
+		regexp: regexp.MustCompile(`[tT][wW][iI][tT][tT][eE][rR].*[''|"][0-9a-zA-Z]{35,44}[''|"]`),
+	},
+}
+
 type credentials struct {
 	issue.MetaData
 	pattern          *regexp.Regexp
-	patternValue     *regexp.Regexp // Pattern for matching string values (LHS on assign statements)
 	entropyThreshold float64
 	perCharThreshold float64
 	truncate         int
@@ -56,6 +212,15 @@ func (r *credentials) isHighEntropyString(str string) bool {
 			entropyPerChar >= r.perCharThreshold))
 }
 
+func (r *credentials) isSecretPattern(str string) (bool, string) {
+	for _, pattern := range secretsPatterns {
+		if pattern.regexp.MatchString(str) {
+			return true, pattern.name
+		}
+	}
+	return false, ""
+}
+
 func (r *credentials) Match(n ast.Node, ctx *gosec.Context) (*issue.Issue, error) {
 	switch node := n.(type) {
 	case *ast.AssignStmt:
@@ -89,9 +254,9 @@ func (r *credentials) matchAssign(assign *ast.AssignStmt, ctx *gosec.Context) (*
 					continue
 				}
 
-				if r.patternValue.MatchString(val) {
-					if r.ignoreEntropy || r.isHighEntropyString(val) {
-						return ctx.NewIssue(assign, r.ID(), r.What, r.Severity, r.Confidence), nil
+				if r.ignoreEntropy || r.isHighEntropyString(val) {
+					if ok, patternName := r.isSecretPattern(val); ok {
+						return ctx.NewIssue(assign, r.ID(), fmt.Sprintf("%s: %s", r.What, patternName), r.Severity, r.Confidence), nil
 					}
 				}
 			}
@@ -120,9 +285,9 @@ func (r *credentials) matchValueSpec(valueSpec *ast.ValueSpec, ctx *gosec.Contex
 	// Now that no variable names have been matched, match the actual values to find any creds
 	for _, ident := range valueSpec.Values {
 		if val, err := gosec.GetString(ident); err == nil {
-			if r.patternValue.MatchString(val) {
-				if r.ignoreEntropy || r.isHighEntropyString(val) {
-					return ctx.NewIssue(valueSpec, r.ID(), r.What, r.Severity, r.Confidence), nil
+			if r.ignoreEntropy || r.isHighEntropyString(val) {
+				if ok, patternName := r.isSecretPattern(val); ok {
+					return ctx.NewIssue(valueSpec, r.ID(), fmt.Sprintf("%s: %s", r.What, patternName), r.Severity, r.Confidence), nil
 				}
 			}
 		}
@@ -159,9 +324,9 @@ func (r *credentials) matchEqualityCheck(binaryExpr *ast.BinaryExpr, ctx *gosec.
 
 		if ok && identStrConst.Kind == token.STRING {
 			s, _ := gosec.GetString(identStrConst)
-			if r.patternValue.MatchString(s) {
-				if r.ignoreEntropy || r.isHighEntropyString(s) {
-					return ctx.NewIssue(binaryExpr, r.ID(), r.What, r.Severity, r.Confidence), nil
+			if r.ignoreEntropy || r.isHighEntropyString(s) {
+				if ok, patternName := r.isSecretPattern(s); ok {
+					return ctx.NewIssue(binaryExpr, r.ID(), fmt.Sprintf("%s: %s", r.What, patternName), r.Severity, r.Confidence), nil
 				}
 			}
 		}
@@ -173,7 +338,6 @@ func (r *credentials) matchEqualityCheck(binaryExpr *ast.BinaryExpr, ctx *gosec.
 // assigned to variables that appear to be related to credentials.
 func NewHardcodedCredentials(id string, conf gosec.Config) (gosec.Rule, []ast.Node) {
 	pattern := `(?i)passwd|pass|password|pwd|secret|token|pw|apiKey|bearer|cred`
-	patternValue := "(?i)(^(.*[:;,](\\s)*)?[a-f0-9]{64}$)|(AIza[0-9A-Za-z-_]{35})|(^(.*[:;,](\\s)*)?github_pat_[a-zA-Z0-9]{22}_[a-zA-Z0-9]{59}$)|(^(.*[:;,](\\s)*)?[0-9a-zA-Z-_]{24}$)"
 	entropyThreshold := 80.0
 	perCharThreshold := 3.0
 	ignoreEntropy := false
@@ -186,12 +350,6 @@ func NewHardcodedCredentials(id string, conf gosec.Config) (gosec.Rule, []ast.No
 			}
 		}
 
-		if configPatternValue, ok := conf["patternValue"]; ok {
-			if cfgPatternValue, ok := configPatternValue.(string); ok {
-				patternValue = cfgPatternValue
-			}
-		}
-
 		if configIgnoreEntropy, ok := conf["ignore_entropy"]; ok {
 			if cfgIgnoreEntropy, ok := configIgnoreEntropy.(bool); ok {
 				ignoreEntropy = cfgIgnoreEntropy
@@ -222,7 +380,6 @@ func NewHardcodedCredentials(id string, conf gosec.Config) (gosec.Rule, []ast.No
 
 	return &credentials{
 		pattern:          regexp.MustCompile(pattern),
-		patternValue:     regexp.MustCompile(patternValue),
 		entropyThreshold: entropyThreshold,
 		perCharThreshold: perCharThreshold,
 		ignoreEntropy:    ignoreEntropy,
diff --git a/vendor/github.com/securego/gosec/v2/rules/implicit_aliasing.go b/vendor/github.com/securego/gosec/v2/rules/implicit_aliasing.go
index 32e2fd205f3997d4eb64a7bd4e530030f75583fd..75de4ed8cf6b1af281b1fa7e728b3593084cdd98 100644
--- a/vendor/github.com/securego/gosec/v2/rules/implicit_aliasing.go
+++ b/vendor/github.com/securego/gosec/v2/rules/implicit_aliasing.go
@@ -3,6 +3,7 @@ package rules
 import (
 	"go/ast"
 	"go/token"
+	"go/types"
 
 	"github.com/securego/gosec/v2"
 	"github.com/securego/gosec/v2/issue"
@@ -28,27 +29,30 @@ func containsUnary(exprs []*ast.UnaryExpr, expr *ast.UnaryExpr) bool {
 	return false
 }
 
-func getIdentExpr(expr ast.Expr) *ast.Ident {
+func getIdentExpr(expr ast.Expr) (*ast.Ident, bool) {
+	return doGetIdentExpr(expr, false)
+}
+
+func doGetIdentExpr(expr ast.Expr, hasSelector bool) (*ast.Ident, bool) {
 	switch node := expr.(type) {
 	case *ast.Ident:
-		return node
+		return node, hasSelector
 	case *ast.SelectorExpr:
-		return getIdentExpr(node.X)
+		return doGetIdentExpr(node.X, true)
 	case *ast.UnaryExpr:
-		switch e := node.X.(type) {
-		case *ast.Ident:
-			return e
-		case *ast.SelectorExpr:
-			return getIdentExpr(e.X)
-		default:
-			return nil
-		}
+		return doGetIdentExpr(node.X, hasSelector)
 	default:
-		return nil
+		return nil, false
 	}
 }
 
 func (r *implicitAliasing) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) {
+	// This rule does not apply for Go 1.22, see https://tip.golang.org/doc/go1.22#language.
+	major, minor, _ := gosec.GoVersion()
+	if major >= 1 && minor >= 22 {
+		return nil, nil
+	}
+
 	switch node := n.(type) {
 	case *ast.RangeStmt:
 		// When presented with a range statement, get the underlying Object bound to
@@ -92,9 +96,13 @@ func (r *implicitAliasing) Match(n ast.Node, c *gosec.Context) (*issue.Issue, er
 		}
 
 		// If we find a unary op of & (reference) of an object within r.aliases, complain.
-		if identExpr := getIdentExpr(node); identExpr != nil && node.Op.String() == "&" {
+		if identExpr, hasSelector := getIdentExpr(node); identExpr != nil && node.Op.String() == "&" {
 			if _, contains := r.aliases[identExpr.Obj]; contains {
-				return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil
+				_, isPointer := c.Info.TypeOf(identExpr).(*types.Pointer)
+
+				if !hasSelector || !isPointer {
+					return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil
+				}
 			}
 		}
 	case *ast.ReturnStmt:
diff --git a/vendor/github.com/securego/gosec/v2/rules/rand.go b/vendor/github.com/securego/gosec/v2/rules/rand.go
index 4491fd9284bed739d7e299af226b24f476bff83e..fe34ca9c3a3c36bd4f17895eb5e9c0867c78e2a0 100644
--- a/vendor/github.com/securego/gosec/v2/rules/rand.go
+++ b/vendor/github.com/securego/gosec/v2/rules/rand.go
@@ -23,8 +23,7 @@ import (
 
 type weakRand struct {
 	issue.MetaData
-	funcNames   []string
-	packagePath string
+	blocklist map[string][]string
 }
 
 func (w *weakRand) ID() string {
@@ -32,8 +31,8 @@ func (w *weakRand) ID() string {
 }
 
 func (w *weakRand) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) {
-	for _, funcName := range w.funcNames {
-		if _, matched := gosec.MatchCallByPackage(n, c, w.packagePath, funcName); matched {
+	for pkg, funcs := range w.blocklist {
+		if _, matched := gosec.MatchCallByPackage(n, c, pkg, funcs...); matched {
 			return c.NewIssue(n, w.ID(), w.What, w.Severity, w.Confidence), nil
 		}
 	}
@@ -43,17 +42,22 @@ func (w *weakRand) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) {
 
 // NewWeakRandCheck detects the use of random number generator that isn't cryptographically secure
 func NewWeakRandCheck(id string, _ gosec.Config) (gosec.Rule, []ast.Node) {
+	calls := make(map[string][]string)
+	calls["math/rand"] = []string{
+		"New", "Read", "Float32", "Float64", "Int", "Int31", "Int31n",
+		"Int63", "Int63n", "Intn", "NormFloat64", "Uint32", "Uint64",
+	}
+	calls["math/rand/v2"] = []string{
+		"New", "Float32", "Float64", "Int", "Int32", "Int32N",
+		"Int64", "Int64N", "IntN", "N", "NormFloat64", "Uint32", "Uint32N", "Uint64", "Uint64N", "UintN",
+	}
 	return &weakRand{
-		funcNames: []string{
-			"New", "Read", "Float32", "Float64", "Int", "Int31",
-			"Int31n", "Int63", "Int63n", "Intn", "NormalFloat64", "Uint32", "Uint64",
-		},
-		packagePath: "math/rand",
+		blocklist: calls,
 		MetaData: issue.MetaData{
 			ID:         id,
 			Severity:   issue.High,
 			Confidence: issue.Medium,
-			What:       "Use of weak random number generator (math/rand instead of crypto/rand)",
+			What:       "Use of weak random number generator (math/rand or math/rand/v2 instead of crypto/rand)",
 		},
 	}, []ast.Node{(*ast.CallExpr)(nil)}
 }
diff --git a/vendor/github.com/securego/gosec/v2/rules/readfile.go b/vendor/github.com/securego/gosec/v2/rules/readfile.go
index 7ef4bbad138f94001c8048e32b5611ab4e0819fb..da6b9c965e1285891c62061a4d9581c2273d973c 100644
--- a/vendor/github.com/securego/gosec/v2/rules/readfile.go
+++ b/vendor/github.com/securego/gosec/v2/rules/readfile.go
@@ -143,6 +143,7 @@ func NewReadFile(id string, _ gosec.Config) (gosec.Rule, []ast.Node) {
 	rule.pathJoin.Add("path", "Join")
 	rule.clean.Add("path/filepath", "Clean")
 	rule.clean.Add("path/filepath", "Rel")
+	rule.clean.Add("path/filepath", "EvalSymlinks")
 	rule.Add("io/ioutil", "ReadFile")
 	rule.Add("os", "ReadFile")
 	rule.Add("os", "Open")
diff --git a/vendor/github.com/securego/gosec/v2/rules/rulelist.go b/vendor/github.com/securego/gosec/v2/rules/rulelist.go
index 316691f6144ba5f71f305f332fb6fa2859183c4e..f9ca4f52c48be2c55551dd2939284a05d73e3a54 100644
--- a/vendor/github.com/securego/gosec/v2/rules/rulelist.go
+++ b/vendor/github.com/securego/gosec/v2/rules/rulelist.go
@@ -91,6 +91,7 @@ func Generate(trackSuppressions bool, filters ...RuleFilter) RuleList {
 		{"G304", "File path provided as taint input", NewReadFile},
 		{"G305", "File path traversal when extracting zip archive", NewArchive},
 		{"G306", "Poor file permissions used when writing to a file", NewWritePerms},
+		{"G307", "Poor file permissions used when creating a file with os.Create", NewOsCreatePerms},
 
 		// crypto
 		{"G401", "Detect the usage of DES, RC4, MD5 or SHA1", NewUsesWeakCryptography},
@@ -107,7 +108,6 @@ func Generate(trackSuppressions bool, filters ...RuleFilter) RuleList {
 
 		// memory safety
 		{"G601", "Implicit memory aliasing in RangeStmt", NewImplicitAliasing},
-		{"G602", "Slice access out of bounds", NewSliceBoundCheck},
 	}
 
 	ruleMap := make(map[string]RuleDefinition)
diff --git a/vendor/github.com/securego/gosec/v2/rules/slice_bounds.go b/vendor/github.com/securego/gosec/v2/rules/slice_bounds.go
deleted file mode 100644
index 04811bb50f24a4855dd70bca94686ed61f9415f4..0000000000000000000000000000000000000000
--- a/vendor/github.com/securego/gosec/v2/rules/slice_bounds.go
+++ /dev/null
@@ -1,405 +0,0 @@
-package rules
-
-import (
-	"fmt"
-	"go/ast"
-	"go/types"
-
-	"github.com/securego/gosec/v2"
-	"github.com/securego/gosec/v2/issue"
-)
-
-// sliceOutOfBounds is a rule which checks for slices which are accessed outside their capacity,
-// either through indexing it out of bounds or through slice expressions whose low or high index
-// are out of bounds.
-type sliceOutOfBounds struct {
-	sliceCaps       map[*ast.CallExpr]map[string]*int64 // Capacities of slices. Maps function call -> var name -> value.
-	currentScope    *types.Scope                        // Current scope. Map is cleared when scope changes.
-	currentFuncName string                              // Current function.
-	funcCallArgs    map[string][]*int64                 // Caps to load once a func declaration is scanned.
-	issue.MetaData                                      // Metadata for this rule.
-}
-
-// ID returns the rule ID for sliceOutOfBounds: G602.
-func (s *sliceOutOfBounds) ID() string {
-	return s.MetaData.ID
-}
-
-func (s *sliceOutOfBounds) Match(node ast.Node, ctx *gosec.Context) (*issue.Issue, error) {
-	if s.currentScope == nil {
-		s.currentScope = ctx.Pkg.Scope()
-	} else if s.currentScope != ctx.Pkg.Scope() {
-		s.currentScope = ctx.Pkg.Scope()
-
-		// Clear slice map, since we are in a new scope
-		sliceMapNil := make(map[string]*int64)
-		sliceCaps := make(map[*ast.CallExpr]map[string]*int64)
-		sliceCaps[nil] = sliceMapNil
-		s.sliceCaps = sliceCaps
-	}
-
-	switch node := node.(type) {
-	case *ast.AssignStmt:
-		return s.matchAssign(node, ctx)
-	case *ast.SliceExpr:
-		return s.matchSliceExpr(node, ctx)
-	case *ast.IndexExpr:
-		return s.matchIndexExpr(node, ctx)
-	case *ast.FuncDecl:
-		s.currentFuncName = node.Name.Name
-		s.loadArgCaps(node)
-	case *ast.CallExpr:
-		if _, ok := node.Fun.(*ast.FuncLit); ok {
-			// Do nothing with func literals for now.
-			break
-		}
-
-		sliceMap := make(map[string]*int64)
-		s.sliceCaps[node] = sliceMap
-		s.setupCallArgCaps(node, ctx)
-	}
-	return nil, nil
-}
-
-// updateSliceCaps takes in a variable name and a map of calls we are updating the variables for to the updated values
-// and will add it to the sliceCaps map.
-func (s *sliceOutOfBounds) updateSliceCaps(varName string, caps map[*ast.CallExpr]*int64) {
-	for callExpr, cap := range caps {
-		s.sliceCaps[callExpr][varName] = cap
-	}
-}
-
-// getAllCalls returns all CallExprs that are calls to the given function.
-func (s *sliceOutOfBounds) getAllCalls(funcName string, ctx *gosec.Context) []*ast.CallExpr {
-	calls := []*ast.CallExpr{}
-
-	for callExpr := range s.sliceCaps {
-		if callExpr != nil {
-			// Compare the names of the function the code is scanning with the current call we are iterating over
-			_, callFuncName, err := gosec.GetCallInfo(callExpr, ctx)
-			if err != nil {
-				continue
-			}
-
-			if callFuncName == funcName {
-				calls = append(calls, callExpr)
-			}
-		}
-	}
-	return calls
-}
-
-// getSliceCapsForFunc gets all the capacities for slice with given name that are stored for each call to the passed function.
-func (s *sliceOutOfBounds) getSliceCapsForFunc(funcName string, varName string, ctx *gosec.Context) map[*ast.CallExpr]*int64 {
-	caps := make(map[*ast.CallExpr]*int64)
-
-	calls := s.getAllCalls(funcName, ctx)
-	for _, call := range calls {
-		if callCaps, ok := s.sliceCaps[call]; ok {
-			caps[call] = callCaps[varName]
-		}
-	}
-
-	return caps
-}
-
-// setupCallArgCaps evaluates and saves the caps for any slices in the args so they can be validated when the function is scanned.
-func (s *sliceOutOfBounds) setupCallArgCaps(callExpr *ast.CallExpr, ctx *gosec.Context) {
-	// Array of caps to be loaded once the function declaration is scanned
-	funcCallArgs := []*int64{}
-
-	// Get function name
-	_, funcName, err := gosec.GetCallInfo(callExpr, ctx)
-	if err != nil {
-		return
-	}
-
-	for _, arg := range callExpr.Args {
-		switch node := arg.(type) {
-		case *ast.SliceExpr:
-			caps := s.evaluateSliceExpr(node, ctx)
-
-			// Simplifying assumption: use the lowest capacity. Storing all possible capacities for slices passed
-			// to a function call would catch the most issues, but would require a data structure like a stack and a
-			// reworking of the code for scanning itself. Use the lowest capacity, as this would be more likely to
-			// raise an issue for being out of bounds.
-			var lowestCap *int64
-			for _, cap := range caps {
-				if cap == nil {
-					continue
-				}
-
-				if lowestCap == nil {
-					lowestCap = cap
-				} else if *lowestCap > *cap {
-					lowestCap = cap
-				}
-			}
-
-			if lowestCap == nil {
-				funcCallArgs = append(funcCallArgs, nil)
-				continue
-			}
-
-			// Now create a map of just this value to add it to the sliceCaps
-			funcCallArgs = append(funcCallArgs, lowestCap)
-		case *ast.Ident:
-			ident := arg.(*ast.Ident)
-			caps := s.getSliceCapsForFunc(s.currentFuncName, ident.Name, ctx)
-
-			var lowestCap *int64
-			for _, cap := range caps {
-				if cap == nil {
-					continue
-				}
-
-				if lowestCap == nil {
-					lowestCap = cap
-				} else if *lowestCap > *cap {
-					lowestCap = cap
-				}
-			}
-
-			if lowestCap == nil {
-				funcCallArgs = append(funcCallArgs, nil)
-				continue
-			}
-
-			// Now create a map of just this value to add it to the sliceCaps
-			funcCallArgs = append(funcCallArgs, lowestCap)
-		default:
-			funcCallArgs = append(funcCallArgs, nil)
-		}
-	}
-	s.funcCallArgs[funcName] = funcCallArgs
-}
-
-// loadArgCaps loads caps that were saved for a call to this function.
-func (s *sliceOutOfBounds) loadArgCaps(funcDecl *ast.FuncDecl) {
-	sliceMap := make(map[string]*int64)
-	funcName := funcDecl.Name.Name
-
-	// Create a dummmy call expr for the new function. This is so we can still store args for
-	// functions which are not explicitly called in the code by other functions (specifically, main).
-	ident := ast.NewIdent(funcName)
-	dummyCallExpr := ast.CallExpr{
-		Fun: ident,
-	}
-
-	argCaps, ok := s.funcCallArgs[funcName]
-	if !ok || len(argCaps) == 0 {
-		s.sliceCaps[&dummyCallExpr] = sliceMap
-		return
-	}
-
-	params := funcDecl.Type.Params.List
-	if len(params) > len(argCaps) {
-		return // Length of params and args doesn't match, so don't do anything with this.
-	}
-
-	for it := range params {
-		capacity := argCaps[it]
-		if capacity == nil {
-			continue
-		}
-
-		if len(params[it].Names) == 0 {
-			continue
-		}
-
-		if paramName := params[it].Names[0]; paramName != nil {
-			sliceMap[paramName.Name] = capacity
-		}
-	}
-
-	s.sliceCaps[&dummyCallExpr] = sliceMap
-}
-
-// matchSliceMake matches calls to make() and stores the capacity of the new slice in the map to compare against future slice usage.
-func (s *sliceOutOfBounds) matchSliceMake(funcCall *ast.CallExpr, sliceName string, ctx *gosec.Context) (*issue.Issue, error) {
-	_, funcName, err := gosec.GetCallInfo(funcCall, ctx)
-	if err != nil || funcName != "make" {
-		return nil, nil
-	}
-
-	var capacityArg int
-	if len(funcCall.Args) < 2 {
-		return nil, nil // No size passed
-	} else if len(funcCall.Args) == 2 {
-		capacityArg = 1
-	} else if len(funcCall.Args) == 3 {
-		capacityArg = 2
-	} else {
-		return nil, nil // Unexpected, args should always be 2 or 3
-	}
-
-	// Check and get the capacity of the slice passed to make. It must be a literal value, since we aren't evaluating the expression.
-	sliceCapLit, ok := funcCall.Args[capacityArg].(*ast.BasicLit)
-	if !ok {
-		return nil, nil
-	}
-
-	capacity, err := gosec.GetInt(sliceCapLit)
-	if err != nil {
-		return nil, nil
-	}
-
-	caps := s.getSliceCapsForFunc(s.currentFuncName, sliceName, ctx)
-	for callExpr := range caps {
-		caps[callExpr] = &capacity
-	}
-
-	s.updateSliceCaps(sliceName, caps)
-	return nil, nil
-}
-
-// evaluateSliceExpr takes a slice expression and evaluates what the capacity of said slice is for each of the
-// calls to the current function. Returns map of the call expressions of each call to the current function to
-// the evaluated capacities.
-func (s *sliceOutOfBounds) evaluateSliceExpr(node *ast.SliceExpr, ctx *gosec.Context) map[*ast.CallExpr]*int64 {
-	// Get ident to get name
-	ident, ok := node.X.(*ast.Ident)
-	if !ok {
-		return nil
-	}
-
-	// Get cap of old slice to calculate this new slice's cap
-	caps := s.getSliceCapsForFunc(s.currentFuncName, ident.Name, ctx)
-	for callExpr, oldCap := range caps {
-		if oldCap == nil {
-			continue
-		}
-
-		// Get and check low value
-		lowIdent, ok := node.Low.(*ast.BasicLit)
-		if ok && lowIdent != nil {
-			low, _ := gosec.GetInt(lowIdent)
-
-			newCap := *oldCap - low
-			caps[callExpr] = &newCap
-		} else if lowIdent == nil { // If no lower bound, capacity will be same
-			continue
-		}
-	}
-
-	return caps
-}
-
-// matchSliceAssignment matches slice assignments, calculates capacity of slice if possible to store it in map.
-func (s *sliceOutOfBounds) matchSliceAssignment(node *ast.SliceExpr, sliceName string, ctx *gosec.Context) (*issue.Issue, error) {
-	// First do the normal match that verifies the slice expr is not out of bounds
-	if i, err := s.matchSliceExpr(node, ctx); err != nil {
-		return i, fmt.Errorf("There was an error while matching a slice expression to check slice bounds for %s: %w", sliceName, err)
-	}
-
-	// Now that the assignment is (presumably) successfully, we can calculate the capacity and add this new slice to the map
-	caps := s.evaluateSliceExpr(node, ctx)
-	s.updateSliceCaps(sliceName, caps)
-
-	return nil, nil
-}
-
-// matchAssign matches checks if an assignment statement is making a slice, or if it is assigning a slice.
-func (s *sliceOutOfBounds) matchAssign(node *ast.AssignStmt, ctx *gosec.Context) (*issue.Issue, error) {
-	// Check RHS for calls to make() so we can get the actual size of the slice
-	for it, i := range node.Rhs {
-		// Get the slice name so we can associate the cap with the slice in the map
-		sliceIdent, ok := node.Lhs[it].(*ast.Ident)
-		if !ok {
-			return nil, nil
-		}
-		sliceName := sliceIdent.Name
-
-		switch expr := i.(type) {
-		case *ast.CallExpr: // Check for and handle call to make()
-			return s.matchSliceMake(expr, sliceName, ctx)
-		case *ast.SliceExpr: // Handle assignments to a slice
-			return s.matchSliceAssignment(expr, sliceName, ctx)
-		}
-	}
-	return nil, nil
-}
-
-// matchSliceExpr validates that a given slice expression (eg, slice[10:30]) is not out of bounds.
-func (s *sliceOutOfBounds) matchSliceExpr(node *ast.SliceExpr, ctx *gosec.Context) (*issue.Issue, error) {
-	// First get the slice name so we can check the size in our map
-	ident, ok := node.X.(*ast.Ident)
-	if !ok {
-		return nil, nil
-	}
-
-	// Get slice cap from the map to compare it against high and low
-	caps := s.getSliceCapsForFunc(s.currentFuncName, ident.Name, ctx)
-
-	for _, cap := range caps {
-		if cap == nil {
-			continue
-		}
-
-		// Get and check high value
-		highIdent, ok := node.High.(*ast.BasicLit)
-		if ok && highIdent != nil {
-			high, _ := gosec.GetInt(highIdent)
-			if high > *cap {
-				return ctx.NewIssue(node, s.ID(), s.What, s.Severity, s.Confidence), nil
-			}
-		}
-
-		// Get and check low value
-		lowIdent, ok := node.Low.(*ast.BasicLit)
-		if ok && lowIdent != nil {
-			low, _ := gosec.GetInt(lowIdent)
-			if low > *cap {
-				return ctx.NewIssue(node, s.ID(), s.What, s.Severity, s.Confidence), nil
-			}
-		}
-	}
-
-	return nil, nil
-}
-
-// matchIndexExpr validates that an index into a slice is not out of bounds.
-func (s *sliceOutOfBounds) matchIndexExpr(node *ast.IndexExpr, ctx *gosec.Context) (*issue.Issue, error) {
-	// First get the slice name so we can check the size in our map
-	ident, ok := node.X.(*ast.Ident)
-	if !ok {
-		return nil, nil
-	}
-
-	// Get slice cap from the map to compare it against high and low
-	caps := s.getSliceCapsForFunc(s.currentFuncName, ident.Name, ctx)
-
-	for _, cap := range caps {
-		if cap == nil {
-			continue
-		}
-		// Get the index literal
-		indexIdent, ok := node.Index.(*ast.BasicLit)
-		if ok && indexIdent != nil {
-			index, _ := gosec.GetInt(indexIdent)
-			if index >= *cap {
-				return ctx.NewIssue(node, s.ID(), s.What, s.Severity, s.Confidence), nil
-			}
-		}
-	}
-
-	return nil, nil
-}
-
-// NewSliceBoundCheck attempts to find any slices being accessed out of bounds
-// by reslicing or by being indexed.
-func NewSliceBoundCheck(id string, _ gosec.Config) (gosec.Rule, []ast.Node) {
-	sliceMap := make(map[*ast.CallExpr]map[string]*int64)
-
-	return &sliceOutOfBounds{
-		sliceCaps:       sliceMap,
-		currentFuncName: "",
-		funcCallArgs:    make(map[string][]*int64),
-		MetaData: issue.MetaData{
-			ID:         id,
-			Severity:   issue.Medium,
-			Confidence: issue.Medium,
-			What:       "Potentially accessing slice out of bounds",
-		},
-	}, []ast.Node{(*ast.CallExpr)(nil), (*ast.FuncDecl)(nil), (*ast.AssignStmt)(nil), (*ast.SliceExpr)(nil), (*ast.IndexExpr)(nil)}
-}
diff --git a/vendor/github.com/securego/gosec/v2/rules/unsafe.go b/vendor/github.com/securego/gosec/v2/rules/unsafe.go
index e1e8d02310de1b885d29ce4b92acf109680dc53f..2e2adca7c7d49db97c10a0a85eaf05195388118e 100644
--- a/vendor/github.com/securego/gosec/v2/rules/unsafe.go
+++ b/vendor/github.com/securego/gosec/v2/rules/unsafe.go
@@ -43,7 +43,7 @@ func (r *usingUnsafe) Match(n ast.Node, c *gosec.Context) (gi *issue.Issue, err
 func NewUsingUnsafe(id string, _ gosec.Config) (gosec.Rule, []ast.Node) {
 	return &usingUnsafe{
 		pkg:   "unsafe",
-		calls: []string{"Alignof", "Offsetof", "Sizeof", "Pointer"},
+		calls: []string{"Pointer", "String", "StringData", "Slice", "SliceData"},
 		MetaData: issue.MetaData{
 			ID:         id,
 			What:       "Use of unsafe calls should be audited",
diff --git a/vendor/github.com/sivchari/nosnakecase/.golangci.yml b/vendor/github.com/sivchari/nosnakecase/.golangci.yml
deleted file mode 100644
index 31e05c4ee11af03fe7ed1cfd03a9a86ec3937ab9..0000000000000000000000000000000000000000
--- a/vendor/github.com/sivchari/nosnakecase/.golangci.yml
+++ /dev/null
@@ -1,40 +0,0 @@
-run:
-  timeout: 5m
-  skip-files: []
-  go: '1.17'
-
-linters-settings:
-  govet:
-    enable-all: true
-    disable:
-      - fieldalignment
-  gocyclo:
-    min-complexity: 18
-  misspell:
-    locale: US
-  godox:
-    keywords:
-      - FIXME
-  gofumpt:
-    extra-rules: true
-
-linters:
-  disable-all: true
-  enable:
-    - govet
-    - revive
-    - goimports
-    - staticcheck
-    - gosimple
-    - unused
-    - godox
-    - gofumpt
-    - misspell
-    - gocyclo
-
-issues:
-  exclude-use-default: true
-  max-per-linter: 0
-  max-same-issues: 0
-  exclude: []
-
diff --git a/vendor/github.com/sivchari/nosnakecase/README.md b/vendor/github.com/sivchari/nosnakecase/README.md
deleted file mode 100644
index 69bb66046231d92679a4da2a90abddceebd6ffb1..0000000000000000000000000000000000000000
--- a/vendor/github.com/sivchari/nosnakecase/README.md
+++ /dev/null
@@ -1,224 +0,0 @@
-# nosnakecase
-nosnakecase is a linter that detects snake case of variable naming and function name.
-
-## Instruction
-
-```sh
-go install github.com/sivchari/nosnakecase/cmd/nosnakecase@latest
-```
-
-## Usage
-
-```go
-package sandbox
-
-// global variable name with underscore.
-var v_v = 0 // want "v_v is used under score. You should use mixedCap or MixedCap."
-
-// global constant name with underscore.
-const c_c = 0 // want "c_c is used under score. You should use mixedCap or MixedCap."
-
-// struct name with underscore.
-type S_a struct { // want "S_a is used under score. You should use mixedCap or MixedCap."
-	fi int
-}
-
-// non-exported struct field name with underscore.
-type Sa struct {
-	fi_a int // // want "fi_a is used under score. You should use mixedCap or MixedCap."
-}
-
-// function as struct field, with parameter name with underscore.
-type Sb struct {
-	fib func(p_a int) // want "p_a is used under score. You should use mixedCap or MixedCap."
-}
-
-// exported struct field with underscore.
-type Sc struct {
-	Fi_A int // want "Fi_A is used under score. You should use mixedCap or MixedCap."
-}
-
-// function as struct field, with return name with underscore.
-type Sd struct {
-	fib func(p int) (r_a int) // want "r_a is used under score. You should use mixedCap or MixedCap."
-}
-
-// interface name with underscore.
-type I_a interface { // want "I_a is used under score. You should use mixedCap or MixedCap."
-	fn(p int)
-}
-
-// interface with parameter name with underscore.
-type Ia interface {
-	fn(p_a int) // want "p_a is used under score. You should use mixedCap or MixedCap."
-}
-
-// interface with parameter name with underscore.
-type Ib interface {
-	Fn(p_a int) // want "p_a is used under score. You should use mixedCap or MixedCap."
-}
-
-// function as struct field, with return name with underscore.
-type Ic interface {
-	Fn_a() // want "Fn_a is used under score. You should use mixedCap or MixedCap."
-}
-
-// interface with return name with underscore.
-type Id interface {
-	Fn() (r_a int) // want "r_a is used under score. You should use mixedCap or MixedCap."
-}
-
-// function name with underscore.
-func f_a() {} // want "f_a is used under score. You should use mixedCap or MixedCap."
-
-// function's parameter name with underscore.
-func fb(p_a int) {} // want "p_a is used under score. You should use mixedCap or MixedCap."
-
-// named return with underscore.
-func fc() (r_b int) { // want "r_b is used under score. You should use mixedCap or MixedCap."
-	return 0
-}
-
-// local variable (short declaration) with underscore.
-func fd(p int) int {
-	v_b := p * 2 // want "v_b is used under score. You should use mixedCap or MixedCap."
-
-	return v_b // want "v_b is used under score. You should use mixedCap or MixedCap."
-}
-
-// local constant with underscore.
-func fe(p int) int {
-	const v_b = 2 // want "v_b is used under score. You should use mixedCap or MixedCap."
-
-	return v_b * p // want "v_b is used under score. You should use mixedCap or MixedCap."
-}
-
-// local variable with underscore.
-func ff(p int) int {
-	var v_b = 2 // want "v_b is used under score. You should use mixedCap or MixedCap."
-
-	return v_b * p // want "v_b is used under score. You should use mixedCap or MixedCap."
-}
-
-// inner function, parameter name with underscore.
-func fg() {
-	fgl := func(p_a int) {} // want "p_a is used under score. You should use mixedCap or MixedCap."
-	fgl(1)
-}
-
-type Foo struct{}
-
-// method name with underscore.
-func (f Foo) f_a() {} // want "f_a is used under score. You should use mixedCap or MixedCap."
-
-// method's parameter name with underscore.
-func (f Foo) fb(p_a int) {} // want "p_a is used under score. You should use mixedCap or MixedCap."
-
-// named return with underscore.
-func (f Foo) fc() (r_b int) { return 0 } // want "r_b is used under score. You should use mixedCap or MixedCap."
-
-// local variable (short declaration) with underscore.
-func (f Foo) fd(p int) int {
-	v_b := p * 2 // want "v_b is used under score. You should use mixedCap or MixedCap."
-
-	return v_b // want "v_b is used under score. You should use mixedCap or MixedCap."
-}
-
-// local constant with underscore.
-func (f Foo) fe(p int) int {
-	const v_b = 2 // want "v_b is used under score. You should use mixedCap or MixedCap."
-
-	return v_b * p // want "v_b is used under score. You should use mixedCap or MixedCap."
-}
-
-// local variable with underscore.
-func (f Foo) ff(p int) int {
-	var v_b = 2 // want "v_b is used under score. You should use mixedCap or MixedCap."
-
-	return v_b * p // want "v_b is used under score. You should use mixedCap or MixedCap."
-}
-
-func fna(a, p_a int) {} // want "p_a is used under score. You should use mixedCap or MixedCap."
-
-func fna1(a string, p_a int) {} // want "p_a is used under score. You should use mixedCap or MixedCap."
-
-func fnb(a, b, p_a int) {} // want "p_a is used under score. You should use mixedCap or MixedCap."
-
-func fnb1(a, b string, p_a int) {} // want "p_a is used under score. You should use mixedCap or MixedCap."
-
-func fnd(
-	p_a int, // want "p_a is used under score. You should use mixedCap or MixedCap."
-	p_b int, // want "p_b is used under score. You should use mixedCap or MixedCap."
-	p_c int, // want "p_c is used under score. You should use mixedCap or MixedCap."
-) {
-}
-```
-
-```console
-go vet -vettool=(which nosnakecase) ./...
-
-# command-line-arguments
-# a
-./a.go:4:5: v_v is used under score. You should use mixedCap or MixedCap.
-./a.go:7:7: c_c is used under score. You should use mixedCap or MixedCap.
-./a.go:10:6: S_a is used under score. You should use mixedCap or MixedCap.
-./a.go:16:2: fi_a is used under score. You should use mixedCap or MixedCap.
-./a.go:21:11: p_a is used under score. You should use mixedCap or MixedCap.
-./a.go:26:2: Fi_A is used under score. You should use mixedCap or MixedCap.
-./a.go:31:19: r_a is used under score. You should use mixedCap or MixedCap.
-./a.go:35:6: I_a is used under score. You should use mixedCap or MixedCap.
-./a.go:41:5: p_a is used under score. You should use mixedCap or MixedCap.
-./a.go:46:5: p_a is used under score. You should use mixedCap or MixedCap.
-./a.go:51:2: Fn_a is used under score. You should use mixedCap or MixedCap.
-./a.go:56:8: r_a is used under score. You should use mixedCap or MixedCap.
-./a.go:60:6: f_a is used under score. You should use mixedCap or MixedCap.
-./a.go:63:9: p_a is used under score. You should use mixedCap or MixedCap.
-./a.go:66:12: r_b is used under score. You should use mixedCap or MixedCap.
-./a.go:72:2: v_b is used under score. You should use mixedCap or MixedCap.
-./a.go:74:9: v_b is used under score. You should use mixedCap or MixedCap.
-./a.go:79:8: v_b is used under score. You should use mixedCap or MixedCap.
-./a.go:81:9: v_b is used under score. You should use mixedCap or MixedCap.
-./a.go:86:6: v_b is used under score. You should use mixedCap or MixedCap.
-./a.go:88:9: v_b is used under score. You should use mixedCap or MixedCap.
-./a.go:93:14: p_a is used under score. You should use mixedCap or MixedCap.
-./a.go:100:14: f_a is used under score. You should use mixedCap or MixedCap.
-./a.go:103:17: p_a is used under score. You should use mixedCap or MixedCap.
-./a.go:106:20: r_b is used under score. You should use mixedCap or MixedCap.
-./a.go:110:2: v_b is used under score. You should use mixedCap or MixedCap.
-./a.go:112:9: v_b is used under score. You should use mixedCap or MixedCap.
-./a.go:117:8: v_b is used under score. You should use mixedCap or MixedCap.
-./a.go:119:9: v_b is used under score. You should use mixedCap or MixedCap.
-./a.go:124:6: v_b is used under score. You should use mixedCap or MixedCap.
-./a.go:126:9: v_b is used under score. You should use mixedCap or MixedCap.
-./a.go:129:13: p_a is used under score. You should use mixedCap or MixedCap.
-./a.go:131:21: p_a is used under score. You should use mixedCap or MixedCap.
-./a.go:133:16: p_a is used under score. You should use mixedCap or MixedCap.
-./a.go:135:24: p_a is used under score. You should use mixedCap or MixedCap.
-./a.go:138:2: p_a is used under score. You should use mixedCap or MixedCap.
-./a.go:139:2: p_b is used under score. You should use mixedCap or MixedCap.
-./a.go:140:2: p_c is used under score. You should use mixedCap or MixedCap.
-```
-
-## CI
-
-### CircleCI
-
-```yaml
-- run:
-    name: install nosnakecase
-    command: go install github.com/sivchari/nosnakecase/cmd/nosnakecase@latest
-
-- run:
-    name: run nosnakecase
-    command: go vet -vettool=`which nosnakecase` ./...
-```
-
-### GitHub Actions
-
-```yaml
-- name: install nosnakecase
-  run: go install github.com/sivchari/nosnakecase/cmd/nosnakecase@latest
-
-- name: run nosnakecase
-  run: go vet -vettool=`which nosnakecase` ./...
-```
diff --git a/vendor/github.com/sivchari/nosnakecase/nosnakecase.go b/vendor/github.com/sivchari/nosnakecase/nosnakecase.go
deleted file mode 100644
index 88cf70e3f052100882a3c6ddf3bb6236c9b10686..0000000000000000000000000000000000000000
--- a/vendor/github.com/sivchari/nosnakecase/nosnakecase.go
+++ /dev/null
@@ -1,63 +0,0 @@
-package nosnakecase
-
-import (
-	"go/ast"
-	"go/token"
-	"strings"
-
-	"golang.org/x/tools/go/analysis"
-	"golang.org/x/tools/go/analysis/passes/inspect"
-	"golang.org/x/tools/go/ast/inspector"
-)
-
-const doc = "nosnakecase is a linter that detects snake case of variable naming and function name."
-
-// Analyzer is a nosnakecase linter.
-var Analyzer = &analysis.Analyzer{
-	Name: "nosnakecase",
-	Doc:  doc,
-	Run:  run,
-	Requires: []*analysis.Analyzer{
-		inspect.Analyzer,
-	},
-}
-
-func run(pass *analysis.Pass) (interface{}, error) {
-	result := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
-
-	nodeFilter := []ast.Node{
-		(*ast.Ident)(nil),
-	}
-
-	result.Preorder(nodeFilter, func(n ast.Node) {
-		switch n := n.(type) {
-		case *ast.Ident:
-			report(pass, n.Pos(), n.Name)
-		}
-	})
-
-	return nil, nil
-}
-
-func report(pass *analysis.Pass, pos token.Pos, name string) {
-	// skip import _ "xxx"
-	if name == "_" {
-		return
-	}
-
-	// skip package xxx_test
-	if strings.Contains(name, "_test") {
-		return
-	}
-
-	// If prefix is Test or Benchmark, Fuzz, skip
-	// FYI https://go.dev/blog/examples
-	if strings.HasPrefix(name, "Test") || strings.HasPrefix(name, "Benchmark") || strings.HasPrefix(name, "Fuzz") {
-		return
-	}
-
-	if strings.Contains(name, "_") {
-		pass.Reportf(pos, "%s contains underscore. You should use mixedCap or MixedCap.", name)
-		return
-	}
-}
diff --git a/vendor/github.com/tetafro/godot/.golangci.yml b/vendor/github.com/tetafro/godot/.golangci.yml
index 920135d40a1e81a71a89dcf7314d98ac13133c95..ea380eb83d7da02e012e70b684491dbc16a57383 100644
--- a/vendor/github.com/tetafro/godot/.golangci.yml
+++ b/vendor/github.com/tetafro/godot/.golangci.yml
@@ -8,45 +8,53 @@ skip-dirs:
 linters:
   disable-all: true
   enable:
-    - deadcode
-    - errcheck
-    - gosimple
-    - govet
-    - ineffassign
-    - staticcheck
-    - structcheck
-    - typecheck
-    - unused
-    - varcheck
+    - asciicheck
     - bodyclose
+    - cyclop
     - dogsled
-    - dupl
-    - funlen
+    - durationcheck
+    - errcheck
+    - errname
+    - errorlint
+    - exhaustive
+    - exportloopref
+    - exportloopref
     - gochecknoinits
+    - gocognit
     - goconst
     - gocritic
     - gocyclo
     - godot
+    - goerr113
     - gofmt
     - gofumpt
     - goimports
-    - golint
-    - gomnd
-    - gomodguard
     - goprintffuncname
     - gosec
+    - gosimple
+    - govet
+    - importas
+    - ineffassign
     - lll
-    - maligned
     - misspell
     - nakedret
     - nestif
+    - noctx
+    - nolintlint
     - prealloc
+    - revive
     - rowserrcheck
-    - scopelint
+    - sqlclosecheck
+    - sqlclosecheck
+    - staticcheck
     - stylecheck
+    - typecheck
     - unconvert
     - unparam
+    - unused
+    - wastedassign
     - whitespace
+    - wrapcheck
 
 linters-settings:
   godot:
@@ -54,13 +62,19 @@ linters-settings:
 
 issues:
   exclude-use-default: false
+  exclude:
+    - "do not define dynamic errors, use wrapped static errors instead"
   exclude-rules:
     - path: _test\.go
       linters:
         - dupl
         - errcheck
         - funlen
+        - gocognit
+        - cyclop
         - gosec
-    - path: cmd/godot/main\.go
+        - noctx
+    - path: main\.go
       linters:
+        - cyclop
         - gomnd
diff --git a/vendor/github.com/tetafro/godot/README.md b/vendor/github.com/tetafro/godot/README.md
index e8d85fb0a7795f3f8349c7fe74d29f7290a4c457..6b2e530b93c604c9cce7c026ccb3d29171f926d9 100644
--- a/vendor/github.com/tetafro/godot/README.md
+++ b/vendor/github.com/tetafro/godot/README.md
@@ -21,7 +21,7 @@ end of the last sentence if needed.
 Build from source
 
 ```sh
-go get -u github.com/tetafro/godot/cmd/godot
+go install github.com/tetafro/godot/cmd/godot@latest
 ```
 
 or download binary from [releases page](https://github.com/tetafro/godot/releases).
diff --git a/vendor/github.com/tetafro/godot/checks.go b/vendor/github.com/tetafro/godot/checks.go
index cba54f310c4d5999957544cfcb28359e106e10ea..0e53c220a2cd6a267e701c8aeee4f02c11103d04 100644
--- a/vendor/github.com/tetafro/godot/checks.go
+++ b/vendor/github.com/tetafro/godot/checks.go
@@ -21,7 +21,7 @@ var (
 	// Abbreviations to exclude from capital letters check.
 	abbreviations = []string{"i.e.", "i. e.", "e.g.", "e. g.", "etc."}
 
-	// Special tags in comments like "// nolint:", or "// +k8s:".
+	// Special tags in comments like "//nolint:", or "//+k8s:".
 	tags = regexp.MustCompile(`^\+?[a-z0-9]+:`)
 
 	// Special hashtags in comments like "// #nosec".
@@ -31,18 +31,24 @@ var (
 	endURL = regexp.MustCompile(`[a-z]+://[^\s]+$`)
 )
 
+// position is a position inside a comment (might be multiline comment).
+type position struct {
+	line   int // starts at 1
+	column int // starts at 1, byte count
+}
+
 // checkComments checks every comment accordings to the rules from
 // `settings` argument.
 func checkComments(comments []comment, settings Settings) []Issue {
-	var issues []Issue // nolint: prealloc
+	var issues []Issue
 	for _, c := range comments {
 		if settings.Period {
-			if iss := checkCommentForPeriod(c); iss != nil {
+			if iss := checkPeriod(c); iss != nil {
 				issues = append(issues, *iss)
 			}
 		}
 		if settings.Capital {
-			if iss := checkCommentForCapital(c); len(iss) > 0 {
+			if iss := checkCapital(c); len(iss) > 0 {
 				issues = append(issues, iss...)
 			}
 		}
@@ -50,14 +56,34 @@ func checkComments(comments []comment, settings Settings) []Issue {
 	return issues
 }
 
-// checkCommentForPeriod checks that the last sentense of the comment ends
+// checkPeriod checks that the last sentense of the comment ends
 // in a period.
-func checkCommentForPeriod(c comment) *Issue {
-	pos, ok := checkPeriod(c.text)
-	if ok {
+func checkPeriod(c comment) *Issue {
+	// Check last non-empty line
+	var found bool
+	var line string
+	var pos position
+	lines := strings.Split(c.text, "\n")
+	for i := len(lines) - 1; i >= 0; i-- {
+		line = strings.TrimRightFunc(lines[i], unicode.IsSpace)
+		if line == "" {
+			continue
+		}
+		found = true
+		pos.line = i + 1
+		break
+	}
+	// All lines are empty
+	if !found {
+		return nil
+	}
+	// Correct line
+	if hasSuffix(line, lastChars) {
 		return nil
 	}
 
+	pos.column = len(line) + 1
+
 	// Shift position to its real value. `c.text` doesn't contain comment's
 	// special symbols: /* or //, and line indentations inside. It also
 	// contains */ in the end in case of block comment.
@@ -94,95 +120,15 @@ func checkCommentForPeriod(c comment) *Issue {
 	return &iss
 }
 
-// checkCommentForCapital checks that each sentense of the comment starts with
-// a capital letter.
-// nolint: unparam
-func checkCommentForCapital(c comment) []Issue {
-	pp := checkCapital(c.text, c.decl)
-	if len(pp) == 0 {
-		return nil
-	}
-
-	issues := make([]Issue, len(pp))
-	for i, pos := range pp {
-		// Shift position by the length of comment's special symbols: /* or //
-		isBlock := strings.HasPrefix(c.lines[0], "/*")
-		if (isBlock && pos.line == 1) || !isBlock {
-			pos.column += 2
-		}
-
-		iss := Issue{
-			Pos: token.Position{
-				Filename: c.start.Filename,
-				Offset:   c.start.Offset,
-				Line:     pos.line + c.start.Line - 1,
-				Column:   pos.column + c.start.Column - 1,
-			},
-			Message: noCapitalMessage,
-		}
-
-		// Make a replacement. Use `pos.original` to get an original original from
-		// attached lines. Use `iss.Pos.Column` because it's a position in
-		// the original original.
-		original := c.lines[pos.line-1]
-		col := byteToRuneColumn(original, iss.Pos.Column) - 1
-		rep := string(unicode.ToTitle([]rune(original)[col])) // capital letter
-		if len(original) < iss.Pos.Column-1+len(rep) {
-			// This should never happen. Avoid panics, skip this check.
-			continue
-		}
-		iss.Replacement = original[:iss.Pos.Column-1] + rep +
-			original[iss.Pos.Column-1+len(rep):]
-
-		// Save replacement to raw lines to be able to combine it with
-		// further replacements
-		c.lines[pos.line-1] = iss.Replacement
-
-		issues[i] = iss
-	}
-
-	return issues
-}
-
-// checkPeriod checks that the last sentense of the text ends in a period.
-// NOTE: Returned position is a position inside given text, not in the
-// original file.
-func checkPeriod(comment string) (pos position, ok bool) {
-	// Check last non-empty line
-	var found bool
-	var line string
-	lines := strings.Split(comment, "\n")
-	for i := len(lines) - 1; i >= 0; i-- {
-		line = strings.TrimRightFunc(lines[i], unicode.IsSpace)
-		if line == "" {
-			continue
-		}
-		found = true
-		pos.line = i + 1
-		break
-	}
-	// All lines are empty
-	if !found {
-		return position{}, true
-	}
-	// Correct line
-	if hasSuffix(line, lastChars) {
-		return position{}, true
-	}
-
-	pos.column = len(line) + 1
-	return pos, false
-}
-
-// checkCapital checks that each sentense of the text starts with
+// checkCapital checks that each sentense of the comment starts with
 // a capital letter.
-// NOTE: First letter is not checked in declaration comments, because they
-// can describe unexported functions, which start with small letter.
-func checkCapital(comment string, skipFirst bool) (pp []position) {
+//
+//nolint:cyclop,funlen
+func checkCapital(c comment) []Issue {
 	// Remove common abbreviations from the comment
 	for _, abbr := range abbreviations {
 		repl := strings.ReplaceAll(abbr, ".", "_")
-		comment = strings.ReplaceAll(comment, abbr, repl)
+		c.text = strings.ReplaceAll(c.text, abbr, repl)
 	}
 
 	// List of states during the scan: `empty` - nothing special,
@@ -190,12 +136,14 @@ func checkCapital(comment string, skipFirst bool) (pp []position) {
 	// `endOfSentence` - found `endChar`, and then space or newline.
 	const empty, endChar, endOfSentence = 1, 2, 3
 
+	var pp []position
 	pos := position{line: 1}
 	state := endOfSentence
-	if skipFirst {
+	if c.decl {
+		// Skip first
 		state = empty
 	}
-	for _, r := range comment {
+	for _, r := range c.text {
 		s := string(r)
 
 		pos.column++
@@ -223,12 +171,54 @@ func checkCapital(comment string, skipFirst bool) (pp []position) {
 		if state == endOfSentence && unicode.IsLower(r) {
 			pp = append(pp, position{
 				line:   pos.line,
-				column: runeToByteColumn(comment, pos.column),
+				column: runeToByteColumn(c.text, pos.column),
 			})
 		}
 		state = empty
 	}
-	return pp
+	if len(pp) == 0 {
+		return nil
+	}
+
+	issues := make([]Issue, len(pp))
+	for i, pos := range pp {
+		// Shift position by the length of comment's special symbols: /* or //
+		isBlock := strings.HasPrefix(c.lines[0], "/*")
+		if (isBlock && pos.line == 1) || !isBlock {
+			pos.column += 2
+		}
+
+		iss := Issue{
+			Pos: token.Position{
+				Filename: c.start.Filename,
+				Offset:   c.start.Offset,
+				Line:     pos.line + c.start.Line - 1,
+				Column:   pos.column + c.start.Column - 1,
+			},
+			Message: noCapitalMessage,
+		}
+
+		// Make a replacement. Use `pos.original` to get an original original from
+		// attached lines. Use `iss.Pos.Column` because it's a position in
+		// the original original.
+		original := c.lines[pos.line-1]
+		col := byteToRuneColumn(original, iss.Pos.Column) - 1
+		rep := string(unicode.ToTitle([]rune(original)[col])) // capital letter
+		if len(original) < iss.Pos.Column-1+len(rep) {
+			// This should never happen. Avoid panics, skip this check.
+			continue
+		}
+		iss.Replacement = original[:iss.Pos.Column-1] + rep +
+			original[iss.Pos.Column-1+len(rep):]
+
+		// Save replacement to raw lines to be able to combine it with
+		// further replacements
+		c.lines[pos.line-1] = iss.Replacement
+
+		issues[i] = iss
+	}
+
+	return issues
 }
 
 // isSpecialBlock checks that given block of comment lines is special and
@@ -240,10 +230,13 @@ func isSpecialBlock(comment string) bool {
 		strings.Contains(comment, "#define")) {
 		return true
 	}
+	if strings.HasPrefix(comment, "// Output: ") {
+		return true
+	}
 	return false
 }
 
-// isSpecialBlock checks that given comment line is special and
+// isSpecialLine checks that given comment line is special and
 // shouldn't be checked as a regular sentence.
 func isSpecialLine(comment string) bool {
 	// Skip cgo export tags: https://golang.org/cmd/cgo/#hdr-C_references_to_Go
diff --git a/vendor/github.com/tetafro/godot/getters.go b/vendor/github.com/tetafro/godot/getters.go
index 8adcc46ae84b3dd13c1e60d1e7f3e6b4fd9a4f0d..7d3d22fb13d1a0c92f68c5d39f1da7cb458e9e9b 100644
--- a/vendor/github.com/tetafro/godot/getters.go
+++ b/vendor/github.com/tetafro/godot/getters.go
@@ -44,7 +44,7 @@ func newParsedFile(file *ast.File, fset *token.FileSet) (*parsedFile, error) {
 	// from "go/format" won't help here if the original file is not gofmt-ed.
 	pf.lines, err = readFile(file, fset)
 	if err != nil {
-		return nil, fmt.Errorf("read file: %v", err)
+		return nil, fmt.Errorf("read file: %w", err)
 	}
 
 	// Dirty hack. For some cases Go generates temporary files during
@@ -58,9 +58,13 @@ func newParsedFile(file *ast.File, fset *token.FileSet) (*parsedFile, error) {
 		return nil, errUnsuitableInput
 	}
 
-	// Check consistency to avoid checking slice indexes in each function
+	// Check consistency to avoid checking slice indexes in each function.
+	// Note that `PositionFor` is used with `adjusted=false` to skip `//line`
+	// directives that can set references to other files (e.g. templates)
+	// instead of the real ones, and break consistency here.
+	// Issue: https://github.com/tetafro/godot/issues/32
 	lastComment := pf.file.Comments[len(pf.file.Comments)-1]
-	if p := pf.fset.Position(lastComment.End()); len(pf.lines) < p.Line {
+	if p := pf.fset.PositionFor(lastComment.End(), false); len(pf.lines) < p.Line {
 		return nil, fmt.Errorf("inconsistency between file and AST: %s", p.Filename)
 	}
 
@@ -82,7 +86,7 @@ func (pf *parsedFile) getComments(scope Scope, exclude []*regexp.Regexp) []comme
 			pf.getBlockComments(exclude),
 			pf.getTopLevelComments(exclude)...,
 		)
-	default:
+	case DeclScope:
 		// Top level declaration comments and comments from the inside
 		// of top level blocks
 		comments = append(pf.getBlockComments(exclude), decl...)
@@ -118,7 +122,7 @@ func (pf *parsedFile) getBlockComments(exclude []*regexp.Regexp) []comment {
 			// Skip comments that are not top-level for this block
 			// (the block itself is top level, so comments inside this block
 			// would be on column 2)
-			// nolint: gomnd
+			//nolint:gomnd
 			if pf.fset.Position(c.Pos()).Column != 2 {
 				continue
 			}
@@ -136,7 +140,7 @@ func (pf *parsedFile) getBlockComments(exclude []*regexp.Regexp) []comment {
 
 // getTopLevelComments gets all top level comments.
 func (pf *parsedFile) getTopLevelComments(exclude []*regexp.Regexp) []comment {
-	var comments []comment // nolint: prealloc
+	var comments []comment //nolint:prealloc
 	for _, c := range pf.file.Comments {
 		if c == nil || len(c.List) == 0 {
 			continue
@@ -157,7 +161,7 @@ func (pf *parsedFile) getTopLevelComments(exclude []*regexp.Regexp) []comment {
 
 // getDeclarationComments gets top level declaration comments.
 func (pf *parsedFile) getDeclarationComments(exclude []*regexp.Regexp) []comment {
-	var comments []comment // nolint: prealloc
+	var comments []comment //nolint:prealloc
 	for _, decl := range pf.file.Decls {
 		var cg *ast.CommentGroup
 		switch d := decl.(type) {
@@ -184,7 +188,7 @@ func (pf *parsedFile) getDeclarationComments(exclude []*regexp.Regexp) []comment
 
 // getAllComments gets every single comment from the file.
 func (pf *parsedFile) getAllComments(exclude []*regexp.Regexp) []comment {
-	var comments []comment //nolint: prealloc
+	var comments []comment //nolint:prealloc
 	for _, c := range pf.file.Comments {
 		if c == nil || len(c.List) == 0 {
 			continue
@@ -200,11 +204,13 @@ func (pf *parsedFile) getAllComments(exclude []*regexp.Regexp) []comment {
 	return comments
 }
 
-// getText extracts text from comment. If comment is a special block
+// getText extracts text from comment. If the comment is a special block
 // (e.g., CGO code), a block of empty lines is returned. If comment contains
 // special lines (e.g., tags or indented code examples), they are replaced
-// with `specialReplacer` to skip checks for it.
+// with `specialReplacer` to skip checks for them.
 // The result can be multiline.
+//
+//nolint:cyclop
 func getText(comment *ast.CommentGroup, exclude []*regexp.Regexp) (s string) {
 	if len(comment.List) == 1 &&
 		strings.HasPrefix(comment.List[0].Text, "/*") &&
@@ -241,12 +247,12 @@ func getText(comment *ast.CommentGroup, exclude []*regexp.Regexp) (s string) {
 	return s[:len(s)-1] // trim last "\n"
 }
 
-// readFile reads file and returns it's lines as strings.
+// readFile reads file and returns its lines as strings.
 func readFile(file *ast.File, fset *token.FileSet) ([]string, error) {
 	fname := fset.File(file.Package)
 	f, err := os.ReadFile(fname.Name())
 	if err != nil {
-		return nil, err
+		return nil, err //nolint:wrapcheck
 	}
 	return strings.Split(string(f), "\n"), nil
 }
diff --git a/vendor/github.com/tetafro/godot/godot.go b/vendor/github.com/tetafro/godot/godot.go
index 19a652fba4e68365624cad129dcf99d3bbd916e5..e825e9a6dee1ed1a97035443a7b040130685357e 100644
--- a/vendor/github.com/tetafro/godot/godot.go
+++ b/vendor/github.com/tetafro/godot/godot.go
@@ -3,6 +3,7 @@
 package godot
 
 import (
+	"errors"
 	"fmt"
 	"go/ast"
 	"go/token"
@@ -24,12 +25,6 @@ type Issue struct {
 	Replacement string
 }
 
-// position is a position inside a comment (might be multiline comment).
-type position struct {
-	line   int // starts at 1
-	column int // starts at 1, byte count
-}
-
 // comment is an internal representation of AST comment entity with additional
 // data attached. The latter is used for creating a full replacement for
 // the line with issues.
@@ -43,18 +38,18 @@ type comment struct {
 // Run runs this linter on the provided code.
 func Run(file *ast.File, fset *token.FileSet, settings Settings) ([]Issue, error) {
 	pf, err := newParsedFile(file, fset)
-	if err == errEmptyInput || err == errUnsuitableInput {
+	if errors.Is(err, errEmptyInput) || errors.Is(err, errUnsuitableInput) {
 		return nil, nil
 	}
 	if err != nil {
-		return nil, fmt.Errorf("parse input file: %v", err)
+		return nil, fmt.Errorf("parse input file: %w", err)
 	}
 
 	exclude := make([]*regexp.Regexp, len(settings.Exclude))
 	for i := 0; i < len(settings.Exclude); i++ {
 		exclude[i], err = regexp.Compile(settings.Exclude[i])
 		if err != nil {
-			return nil, fmt.Errorf("invalid regexp: %v", err)
+			return nil, fmt.Errorf("invalid regexp: %w", err)
 		}
 	}
 
@@ -68,9 +63,9 @@ func Run(file *ast.File, fset *token.FileSet, settings Settings) ([]Issue, error
 // Fix fixes all issues and returns new version of file content.
 func Fix(path string, file *ast.File, fset *token.FileSet, settings Settings) ([]byte, error) {
 	// Read file
-	content, err := os.ReadFile(path) // nolint: gosec
+	content, err := os.ReadFile(path) //nolint:gosec
 	if err != nil {
-		return nil, fmt.Errorf("read file: %v", err)
+		return nil, fmt.Errorf("read file: %w", err)
 	}
 	if len(content) == 0 {
 		return nil, nil
@@ -78,7 +73,7 @@ func Fix(path string, file *ast.File, fset *token.FileSet, settings Settings) ([
 
 	issues, err := Run(file, fset, settings)
 	if err != nil {
-		return nil, fmt.Errorf("run linter: %v", err)
+		return nil, fmt.Errorf("run linter: %w", err)
 	}
 
 	// slice -> map
@@ -101,21 +96,21 @@ func Fix(path string, file *ast.File, fset *token.FileSet, settings Settings) ([
 	return fixed, nil
 }
 
-// Replace rewrites original file with it's fixed version.
+// Replace rewrites original file with its fixed version.
 func Replace(path string, file *ast.File, fset *token.FileSet, settings Settings) error {
 	info, err := os.Stat(path)
 	if err != nil {
-		return fmt.Errorf("check file: %v", err)
+		return fmt.Errorf("check file: %w", err)
 	}
 	mode := info.Mode()
 
 	fixed, err := Fix(path, file, fset, settings)
 	if err != nil {
-		return fmt.Errorf("fix issues: %v", err)
+		return fmt.Errorf("fix issues: %w", err)
 	}
 
 	if err := os.WriteFile(path, fixed, mode); err != nil {
-		return fmt.Errorf("write file: %v", err)
+		return fmt.Errorf("write file: %w", err)
 	}
 	return nil
 }
diff --git a/vendor/github.com/tomarrell/wrapcheck/v2/wrapcheck/wrapcheck.go b/vendor/github.com/tomarrell/wrapcheck/v2/wrapcheck/wrapcheck.go
index 6da17bd867838ddf748619acbae9ba7f52e76ed9..79e7bba8631ae1d85cbac64115e2f2b85cdea2bf 100644
--- a/vendor/github.com/tomarrell/wrapcheck/v2/wrapcheck/wrapcheck.go
+++ b/vendor/github.com/tomarrell/wrapcheck/v2/wrapcheck/wrapcheck.go
@@ -121,7 +121,20 @@ func run(cfg WrapcheckConfig) func(*analysis.Pass) (interface{}, error) {
 		}
 
 		for _, file := range pass.Files {
+			// Keep track of parents so that can can traverse upwards to check for
+			// FuncDecls and FuncLits.
+			var parents []ast.Node
+
 			ast.Inspect(file, func(n ast.Node) bool {
+				if n == nil {
+					// Pop, since we're done with this node and its children.
+					parents = parents[:len(parents)-1]
+				} else {
+					// Push this node on the stack, since its children will be visited
+					// next.
+					parents = append(parents, n)
+				}
+
 				ret, ok := n.(*ast.ReturnStmt)
 				if !ok {
 					return true
@@ -137,6 +150,17 @@ func run(cfg WrapcheckConfig) func(*analysis.Pass) (interface{}, error) {
 					// to handle it by checking the return params of the function.
 					retFn, ok := expr.(*ast.CallExpr)
 					if ok {
+						// If you go up, and the parent is a FuncLit, then don't report an
+						// error as you are in an anonymous function. If you are inside a
+						// FuncDecl, then continue as normal.
+						for i := len(parents) - 1; i > 0; i-- {
+							if _, ok := parents[i].(*ast.FuncLit); ok {
+								return true
+							} else if _, ok := parents[i].(*ast.FuncDecl); ok {
+								break
+							}
+						}
+
 						// If the return type of the function is a single error. This will not
 						// match an error within multiple return values, for that, the below
 						// tuple check is required.
diff --git a/vendor/github.com/ultraware/whitespace/README.md b/vendor/github.com/ultraware/whitespace/README.md
index 2a88f13388a2727752f3483c838c1240c1aa1c8c..f99ecce36c80c3e8728da108943183bbf200b0ac 100644
--- a/vendor/github.com/ultraware/whitespace/README.md
+++ b/vendor/github.com/ultraware/whitespace/README.md
@@ -4,4 +4,6 @@ Whitespace is a linter that checks for unnecessary newlines at the start and end
 
 ## Installation guide
 
-Whitespace is included in [golangci-lint](https://github.com/golangci/golangci-lint/). Install it and enable whitespace.
+To install as a standalone linter, run `go install github.com/ultraware/whitespace`.
+
+Whitespace is also included in [golangci-lint](https://github.com/golangci/golangci-lint/). Install it and enable whitespace.
diff --git a/vendor/github.com/ultraware/whitespace/main.go b/vendor/github.com/ultraware/whitespace/main.go
deleted file mode 100644
index d178ea29394536391d03a58f3110586f5b784425..0000000000000000000000000000000000000000
--- a/vendor/github.com/ultraware/whitespace/main.go
+++ /dev/null
@@ -1,162 +0,0 @@
-package whitespace
-
-import (
-	"go/ast"
-	"go/token"
-)
-
-// Message contains a message
-type Message struct {
-	Pos     token.Position
-	Type    MessageType
-	Message string
-}
-
-// MessageType describes what should happen to fix the warning
-type MessageType uint8
-
-// List of MessageTypes
-const (
-	MessageTypeLeading MessageType = iota + 1
-	MessageTypeTrailing
-	MessageTypeAddAfter
-)
-
-// Settings contains settings for edge-cases
-type Settings struct {
-	MultiIf   bool
-	MultiFunc bool
-}
-
-// Run runs this linter on the provided code
-func Run(file *ast.File, fset *token.FileSet, settings Settings) []Message {
-	var messages []Message
-
-	for _, f := range file.Decls {
-		decl, ok := f.(*ast.FuncDecl)
-		if !ok || decl.Body == nil { // decl.Body can be nil for e.g. cgo
-			continue
-		}
-
-		vis := visitor{file.Comments, fset, nil, make(map[*ast.BlockStmt]bool), settings}
-		ast.Walk(&vis, decl)
-
-		messages = append(messages, vis.messages...)
-	}
-
-	return messages
-}
-
-type visitor struct {
-	comments    []*ast.CommentGroup
-	fset        *token.FileSet
-	messages    []Message
-	wantNewline map[*ast.BlockStmt]bool
-	settings    Settings
-}
-
-func (v *visitor) Visit(node ast.Node) ast.Visitor {
-	if node == nil {
-		return v
-	}
-
-	if stmt, ok := node.(*ast.IfStmt); ok && v.settings.MultiIf {
-		checkMultiLine(v, stmt.Body, stmt.Cond)
-	}
-
-	if stmt, ok := node.(*ast.FuncLit); ok && v.settings.MultiFunc {
-		checkMultiLine(v, stmt.Body, stmt.Type)
-	}
-
-	if stmt, ok := node.(*ast.FuncDecl); ok && v.settings.MultiFunc {
-		checkMultiLine(v, stmt.Body, stmt.Type)
-	}
-
-	if stmt, ok := node.(*ast.BlockStmt); ok {
-		wantNewline := v.wantNewline[stmt]
-
-		comments := v.comments
-		if wantNewline {
-			comments = nil // Comments also count as a newline if we want a newline
-		}
-		first, last := firstAndLast(comments, v.fset, stmt.Pos(), stmt.End(), stmt.List)
-
-		startMsg := checkStart(v.fset, stmt.Lbrace, first)
-
-		if wantNewline && startMsg == nil {
-			v.messages = append(v.messages, Message{v.fset.Position(stmt.Pos()), MessageTypeAddAfter, `multi-line statement should be followed by a newline`})
-		} else if !wantNewline && startMsg != nil {
-			v.messages = append(v.messages, *startMsg)
-		}
-
-		if msg := checkEnd(v.fset, stmt.Rbrace, last); msg != nil {
-			v.messages = append(v.messages, *msg)
-		}
-	}
-
-	return v
-}
-
-func checkMultiLine(v *visitor, body *ast.BlockStmt, stmtStart ast.Node) {
-	start, end := posLine(v.fset, stmtStart.Pos()), posLine(v.fset, stmtStart.End())
-
-	if end > start { // Check only multi line conditions
-		v.wantNewline[body] = true
-	}
-}
-
-func posLine(fset *token.FileSet, pos token.Pos) int {
-	return fset.Position(pos).Line
-}
-
-func firstAndLast(comments []*ast.CommentGroup, fset *token.FileSet, start, end token.Pos, stmts []ast.Stmt) (ast.Node, ast.Node) {
-	if len(stmts) == 0 {
-		return nil, nil
-	}
-
-	first, last := ast.Node(stmts[0]), ast.Node(stmts[len(stmts)-1])
-
-	for _, c := range comments {
-		if posLine(fset, c.Pos()) == posLine(fset, start) || posLine(fset, c.End()) == posLine(fset, end) {
-			continue
-		}
-
-		if c.Pos() < start || c.End() > end {
-			continue
-		}
-		if c.Pos() < first.Pos() {
-			first = c
-		}
-		if c.End() > last.End() {
-			last = c
-		}
-	}
-
-	return first, last
-}
-
-func checkStart(fset *token.FileSet, start token.Pos, first ast.Node) *Message {
-	if first == nil {
-		return nil
-	}
-
-	if posLine(fset, start)+1 < posLine(fset, first.Pos()) {
-		pos := fset.Position(start)
-		return &Message{pos, MessageTypeLeading, `unnecessary leading newline`}
-	}
-
-	return nil
-}
-
-func checkEnd(fset *token.FileSet, end token.Pos, last ast.Node) *Message {
-	if last == nil {
-		return nil
-	}
-
-	if posLine(fset, end)-1 > posLine(fset, last.End()) {
-		pos := fset.Position(end)
-		return &Message{pos, MessageTypeTrailing, `unnecessary trailing newline`}
-	}
-
-	return nil
-}
diff --git a/vendor/github.com/ultraware/whitespace/whitespace.go b/vendor/github.com/ultraware/whitespace/whitespace.go
new file mode 100644
index 0000000000000000000000000000000000000000..350e9b7e4eb073e57bc050c95da773cbef960582
--- /dev/null
+++ b/vendor/github.com/ultraware/whitespace/whitespace.go
@@ -0,0 +1,307 @@
+package whitespace
+
+import (
+	"flag"
+	"go/ast"
+	"go/token"
+	"strings"
+
+	"golang.org/x/tools/go/analysis"
+)
+
+// MessageType describes what should happen to fix the warning.
+type MessageType uint8
+
+// List of MessageTypes.
+const (
+	MessageTypeRemove MessageType = iota + 1
+	MessageTypeAdd
+)
+
+// RunningMode describes the mode the linter is run in. This can be either
+// native or golangci-lint.
+type RunningMode uint8
+
+const (
+	RunningModeNative RunningMode = iota
+	RunningModeGolangCI
+)
+
+// Message contains a message and diagnostic information.
+type Message struct {
+	// Diagnostic is what position the diagnostic should be put at. This isn't
+	// always the same as the fix start, f.ex. when we fix trailing newlines we
+	// put the diagnostic at the right bracket but we fix between the end of the
+	// last statement and the bracket.
+	Diagnostic token.Pos
+
+	// FixStart is the span start of the fix.
+	FixStart token.Pos
+
+	// FixEnd is the span end of the fix.
+	FixEnd token.Pos
+
+	// LineNumbers represent the actual line numbers in the file. This is set
+	// when finding the diagnostic to make it easier to suggest fixes in
+	// golangci-lint.
+	LineNumbers []int
+
+	// MessageType represents the type of message it is.
+	MessageType MessageType
+
+	// Message is the diagnostic to show.
+	Message string
+}
+
+// Settings contains settings for edge-cases.
+type Settings struct {
+	Mode      RunningMode
+	MultiIf   bool
+	MultiFunc bool
+}
+
+// NewAnalyzer creates a new whitespace analyzer.
+func NewAnalyzer(settings *Settings) *analysis.Analyzer {
+	if settings == nil {
+		settings = &Settings{}
+	}
+
+	return &analysis.Analyzer{
+		Name:  "whitespace",
+		Doc:   "Whitespace is a linter that checks for unnecessary newlines at the start and end of functions, if, for, etc.",
+		Flags: flags(settings),
+		Run: func(p *analysis.Pass) (any, error) {
+			Run(p, settings)
+			return nil, nil
+		},
+		RunDespiteErrors: true,
+	}
+}
+
+func flags(settings *Settings) flag.FlagSet {
+	flags := flag.NewFlagSet("", flag.ExitOnError)
+	flags.BoolVar(&settings.MultiIf, "multi-if", settings.MultiIf, "Check that multi line if-statements have a leading newline")
+	flags.BoolVar(&settings.MultiFunc, "multi-func", settings.MultiFunc, "Check that multi line functions have a leading newline")
+
+	return *flags
+}
+
+func Run(pass *analysis.Pass, settings *Settings) []Message {
+	messages := []Message{}
+
+	for _, file := range pass.Files {
+		filename := pass.Fset.Position(file.Pos()).Filename
+		if !strings.HasSuffix(filename, ".go") {
+			continue
+		}
+
+		fileMessages := runFile(file, pass.Fset, *settings)
+
+		if settings.Mode == RunningModeGolangCI {
+			messages = append(messages, fileMessages...)
+			continue
+		}
+
+		for _, message := range fileMessages {
+			pass.Report(analysis.Diagnostic{
+				Pos:      message.Diagnostic,
+				Category: "whitespace",
+				Message:  message.Message,
+				SuggestedFixes: []analysis.SuggestedFix{
+					{
+						TextEdits: []analysis.TextEdit{
+							{
+								Pos:     message.FixStart,
+								End:     message.FixEnd,
+								NewText: []byte("\n"),
+							},
+						},
+					},
+				},
+			})
+		}
+	}
+
+	return messages
+}
+
+func runFile(file *ast.File, fset *token.FileSet, settings Settings) []Message {
+	var messages []Message
+
+	for _, f := range file.Decls {
+		decl, ok := f.(*ast.FuncDecl)
+		if !ok || decl.Body == nil { // decl.Body can be nil for e.g. cgo
+			continue
+		}
+
+		vis := visitor{file.Comments, fset, nil, make(map[*ast.BlockStmt]bool), settings}
+		ast.Walk(&vis, decl)
+
+		messages = append(messages, vis.messages...)
+	}
+
+	return messages
+}
+
+type visitor struct {
+	comments    []*ast.CommentGroup
+	fset        *token.FileSet
+	messages    []Message
+	wantNewline map[*ast.BlockStmt]bool
+	settings    Settings
+}
+
+func (v *visitor) Visit(node ast.Node) ast.Visitor {
+	if node == nil {
+		return v
+	}
+
+	if stmt, ok := node.(*ast.IfStmt); ok && v.settings.MultiIf {
+		checkMultiLine(v, stmt.Body, stmt.Cond)
+	}
+
+	if stmt, ok := node.(*ast.FuncLit); ok && v.settings.MultiFunc {
+		checkMultiLine(v, stmt.Body, stmt.Type)
+	}
+
+	if stmt, ok := node.(*ast.FuncDecl); ok && v.settings.MultiFunc {
+		checkMultiLine(v, stmt.Body, stmt.Type)
+	}
+
+	if stmt, ok := node.(*ast.BlockStmt); ok {
+		wantNewline := v.wantNewline[stmt]
+
+		comments := v.comments
+		if wantNewline {
+			comments = nil // Comments also count as a newline if we want a newline
+		}
+
+		opening, first, last := firstAndLast(comments, v.fset, stmt)
+		startMsg := checkStart(v.fset, opening, first)
+
+		if wantNewline && startMsg == nil && len(stmt.List) >= 1 {
+			v.messages = append(v.messages, Message{
+				Diagnostic:  opening,
+				FixStart:    stmt.List[0].Pos(),
+				FixEnd:      stmt.List[0].Pos(),
+				LineNumbers: []int{v.fset.PositionFor(stmt.List[0].Pos(), false).Line},
+				MessageType: MessageTypeAdd,
+				Message:     "multi-line statement should be followed by a newline",
+			})
+		} else if !wantNewline && startMsg != nil {
+			v.messages = append(v.messages, *startMsg)
+		}
+
+		if msg := checkEnd(v.fset, stmt.Rbrace, last); msg != nil {
+			v.messages = append(v.messages, *msg)
+		}
+	}
+
+	return v
+}
+
+func checkMultiLine(v *visitor, body *ast.BlockStmt, stmtStart ast.Node) {
+	start, end := posLine(v.fset, stmtStart.Pos()), posLine(v.fset, stmtStart.End())
+
+	if end > start { // Check only multi line conditions
+		v.wantNewline[body] = true
+	}
+}
+
+func posLine(fset *token.FileSet, pos token.Pos) int {
+	return fset.PositionFor(pos, false).Line
+}
+
+func firstAndLast(comments []*ast.CommentGroup, fset *token.FileSet, stmt *ast.BlockStmt) (token.Pos, ast.Node, ast.Node) {
+	openingPos := stmt.Lbrace + 1
+
+	if len(stmt.List) == 0 {
+		return openingPos, nil, nil
+	}
+
+	first, last := ast.Node(stmt.List[0]), ast.Node(stmt.List[len(stmt.List)-1])
+
+	for _, c := range comments {
+		// If the comment is on the same line as the opening pos (initially the
+		// left bracket) but it starts after the pos the comment must be after
+		// the bracket and where that comment ends should be considered where
+		// the fix should start.
+		if posLine(fset, c.Pos()) == posLine(fset, openingPos) && c.Pos() > openingPos {
+			if posLine(fset, c.End()) != posLine(fset, openingPos) {
+				// This is a multiline comment that spans from the `LBrace` line
+				// to a line further down. This should always be seen as ok!
+				first = c
+			} else {
+				openingPos = c.End()
+			}
+		}
+
+		if posLine(fset, c.Pos()) == posLine(fset, stmt.Pos()) || posLine(fset, c.End()) == posLine(fset, stmt.End()) {
+			continue
+		}
+
+		if c.Pos() < stmt.Pos() || c.End() > stmt.End() {
+			continue
+		}
+
+		if c.Pos() < first.Pos() {
+			first = c
+		}
+
+		if c.End() > last.End() {
+			last = c
+		}
+	}
+
+	return openingPos, first, last
+}
+
+func checkStart(fset *token.FileSet, start token.Pos, first ast.Node) *Message {
+	if first == nil {
+		return nil
+	}
+
+	if posLine(fset, start)+1 < posLine(fset, first.Pos()) {
+		return &Message{
+			Diagnostic:  start,
+			FixStart:    start,
+			FixEnd:      first.Pos(),
+			LineNumbers: linesBetween(fset, start, first.Pos()),
+			MessageType: MessageTypeRemove,
+			Message:     "unnecessary leading newline",
+		}
+	}
+
+	return nil
+}
+
+func checkEnd(fset *token.FileSet, end token.Pos, last ast.Node) *Message {
+	if last == nil {
+		return nil
+	}
+
+	if posLine(fset, end)-1 > posLine(fset, last.End()) {
+		return &Message{
+			Diagnostic:  end,
+			FixStart:    last.End(),
+			FixEnd:      end,
+			LineNumbers: linesBetween(fset, last.End(), end),
+			MessageType: MessageTypeRemove,
+			Message:     "unnecessary trailing newline",
+		}
+	}
+
+	return nil
+}
+
+func linesBetween(fset *token.FileSet, a, b token.Pos) []int {
+	lines := []int{}
+	aPosition := fset.PositionFor(a, false)
+	bPosition := fset.PositionFor(b, false)
+
+	for i := aPosition.Line + 1; i < bPosition.Line; i++ {
+		lines = append(lines, i)
+	}
+
+	return lines
+}
diff --git a/vendor/github.com/uudashr/gocognit/README.md b/vendor/github.com/uudashr/gocognit/README.md
index 1e028c7897342daa05e133b2f1a157bc6497b790..57f31cf740f9b1bc2a0c7121faf2e5764b42d4a6 100644
--- a/vendor/github.com/uudashr/gocognit/README.md
+++ b/vendor/github.com/uudashr/gocognit/README.md
@@ -1,6 +1,6 @@
 [![GoDoc](https://godoc.org/github.com/uudashr/gocognit?status.svg)](https://godoc.org/github.com/uudashr/gocognit)
 # Gocognit
-Gocognit calculates cognitive complexities of functions in Go source code. A measurement of how hard does the code is intuitively to understand.
+Gocognit calculates cognitive complexities of functions (and methods) in Go source code. A measurement of how hard does the code is intuitively to understand.
 
 ## Understanding the complexity
 
@@ -37,10 +37,10 @@ func GetWords(number int) string {
 
 As you see above codes are the same, but the second code are easier to understand, that is why the cognitive complexity score are lower compare to the first one.
 
-## Comparison with cyclometic complexity
+## Comparison with cyclomatic complexity
 
 ### Example 1
-#### Cyclometic complexity
+#### Cyclomatic complexity
 ```go
 func GetWords(number int) string {      // +1
     switch number {
@@ -160,16 +160,40 @@ $ go get github.com/uudashr/gocognit/cmd/gocognit
 ```
 $ gocognit
 Calculate cognitive complexities of Go functions.
+
 Usage:
-        gocognit [flags] <Go file or directory> ...
+
+  gocognit [<flag> ...] <Go file or directory> ...
+
 Flags:
-        -over N   show functions with complexity > N only and
-                  return exit code 1 if the set is non-empty
-        -top N    show the top N most complex functions only
-        -avg      show the average complexity over all functions,
-                  not depending on whether -over or -top are set
-The output fields for each line are:
-<complexity> <package> <function> <file:row:column>
+
+  -over N    show functions with complexity > N only
+             and return exit code 1 if the output is non-empty
+  -top N     show the top N most complex functions only
+  -avg       show the average complexity over all functions,
+             not depending on whether -over or -top are set
+  -json      encode the output as JSON
+  -f format  string the format to use 
+             (default "{{.PkgName}}.{{.FuncName}}:{{.Complexity}}:{{.Pos}}")
+
+The (default) output fields for each line are:
+
+  <complexity> <package> <function> <file:row:column>
+
+The (default) output fields for each line are:
+
+  {{.Complexity}} {{.PkgName}} {{.FuncName}} {{.Pos}}
+
+or equal to <complexity> <package> <function> <file:row:column>
+
+The struct being passed to the template is:
+
+  type Stat struct {
+    PkgName    string
+    FuncName   string
+    Complexity int
+    Pos        token.Position
+  }
 ```
 
 Examples:
@@ -180,6 +204,7 @@ $ gocognit main.go
 $ gocognit -top 10 src/
 $ gocognit -over 25 docker
 $ gocognit -avg .
+$ gocognit -ignore "_test|testdata" .
 ```
 
 The output fields for each line are:
@@ -187,6 +212,15 @@ The output fields for each line are:
 <complexity> <package> <function> <file:row:column>
 ```
 
+## Ignore individual functions
+Ignore individual functions by specifying `gocognit:ignore` directive.
+```go
+//gocognit:ignore
+func IgnoreMe() {
+    // ...
+}
+```
+
 ## Related project
 - [Gocyclo](https://github.com/fzipp/gocyclo) where the code are based on.
 - [Cognitive Complexity: A new way of measuring understandability](https://www.sonarsource.com/docs/CognitiveComplexity.pdf) white paper by G. Ann Campbell.
\ No newline at end of file
diff --git a/vendor/github.com/uudashr/gocognit/gocognit.go b/vendor/github.com/uudashr/gocognit/gocognit.go
index 2fe22abc4bfa4e12fde46fe2637fa53b5f712fb7..2bba2eb4f0f094b2273cc590baf80b026f457d8e 100644
--- a/vendor/github.com/uudashr/gocognit/gocognit.go
+++ b/vendor/github.com/uudashr/gocognit/gocognit.go
@@ -26,6 +26,11 @@ func (s Stat) String() string {
 func ComplexityStats(f *ast.File, fset *token.FileSet, stats []Stat) []Stat {
 	for _, decl := range f.Decls {
 		if fn, ok := decl.(*ast.FuncDecl); ok {
+			d := parseDirective(fn.Doc)
+			if d.Ignore {
+				continue
+			}
+
 			stats = append(stats, Stat{
 				PkgName:    f.Name.Name,
 				FuncName:   funcName(fn),
@@ -37,6 +42,24 @@ func ComplexityStats(f *ast.File, fset *token.FileSet, stats []Stat) []Stat {
 	return stats
 }
 
+type directive struct {
+	Ignore bool
+}
+
+func parseDirective(doc *ast.CommentGroup) directive {
+	if doc == nil {
+		return directive{}
+	}
+
+	for _, c := range doc.List {
+		if c.Text == "//gocognit:ignore" {
+			return directive{Ignore: true}
+		}
+	}
+
+	return directive{}
+}
+
 // funcName returns the name representation of a function or method:
 // "(Type).Name" for methods or simply "Name" for functions.
 func funcName(fn *ast.FuncDecl) string {
@@ -356,13 +379,19 @@ func run(pass *analysis.Pass) (interface{}, error) {
 		(*ast.FuncDecl)(nil),
 	}
 	inspect.Preorder(nodeFilter, func(n ast.Node) {
-		fnDecl := n.(*ast.FuncDecl)
+		funcDecl := n.(*ast.FuncDecl)
+
+		d := parseDirective(funcDecl.Doc)
+		if d.Ignore {
+			return
+		}
+
+		fnName := funcName(funcDecl)
 
-		fnName := funcName(fnDecl)
-		fnComplexity := Complexity(fnDecl)
+		fnComplexity := Complexity(funcDecl)
 
 		if fnComplexity > over {
-			pass.Reportf(fnDecl.Pos(), "cognitive complexity %d of func %s is high (> %d)", fnComplexity, fnName, over)
+			pass.Reportf(funcDecl.Pos(), "cognitive complexity %d of func %s is high (> %d)", fnComplexity, fnName, over)
 		}
 	})
 
diff --git a/vendor/github.com/xen0n/gosmopolitan/README.md b/vendor/github.com/xen0n/gosmopolitan/README.md
index 93e3701e5d5b6f8b927e5e93e920f0c862f85e98..86a5e64e00f4654f172de64d8237cf10e967af03 100644
--- a/vendor/github.com/xen0n/gosmopolitan/README.md
+++ b/vendor/github.com/xen0n/gosmopolitan/README.md
@@ -54,7 +54,9 @@ following characteristics, and may not suit your particular project's needs:
 
 * Originally developed for an audience using non-Latin writing system(s),
 * Returns bare strings intended for humans containing such non-Latin characters, and
-* May occasionally (or frequently) refer to the local timezone.
+* May occasionally (or frequently) refer to the system timezone, but is
+  architecturally forbidden/discouraged to just treat the system timezone as
+  the reference timezone.
 
 For example, the lints may prove valuable if you're revamping a web service
 originally targetting the Chinese market (hence producing strings with Chinese
@@ -64,71 +66,18 @@ will output nothing.
 
 ## golangci-lint integration
 
-`gosmopolitan` is not integrated into [`golangci-lint`][gcl-home] yet, but
-you can nevertheless run it [as a custom plugin][gcl-plugin].
+`gosmopolitan` support [has been merged][gcl-pr] into [`golangci-lint`][gcl-home],
+and will be usable out-of-the-box in golangci-lint v1.53.0 or later.
 
+Due to the opinionated coding style this linter advocates and checks for, if
+you have `enable-all: true` in your `golangci.yml` and your project deals a
+lot with Chinese text and/or `time.Local`, then you'll get flooded with lints
+when you upgrade to golangci-lint v1.53.0. Just disable this linter (and
+better yet, move away from `enable-all: true`) if the style does not suit your
+specific use case.
+
+[gcl-pr]: https://github.com/golangci/golangci-lint/pull/3458
 [gcl-home]: https://golangci-lint.run
-[gcl-plugin]: https://golangci-lint.run/contributing/new-linters/#how-to-add-a-private-linter-to-golangci-lint
-
-First make yourself a plugin `.so` file like this:
-
-```go
-// compile this with something like `go build -buildmode=plugin`
-
-package main
-
-import (
-	"github.com/xen0n/gosmopolitan"
-	"golang.org/x/tools/go/analysis"
-)
-
-type analyzerPlugin struct{}
-
-func (analyzerPlugin) GetAnalyzers() []*analysis.Analyzer {
-	// You can customize the options via gosmopolitan.NewAnalyzerWithConfig
-	// instead.
-	return []*analysis.Analyzer{
-		gosmopolitan.DefaultAnalyzer,
-	}
-}
-
-var AnalyzerPlugin analyzerPlugin
-```
-
-You just need to make sure the `golang.org/x/tools` version used to build the
-plugin is consistent with that of your `golangci-lint` binary. (Of course the
-`golangci-lint` binary should be built with plugin support enabled too;
-notably, [the Homebrew `golangci-lint` is built without plugin support][hb-issue],
-so beware of this.)
-
-[hb-issue]: https://github.com/golangci/golangci-lint/issues/1182
-
-|`golangci-lint` version|`gosmopolitan` tag to use|
-|-----------------------|-------------------------|
-|1.50.x|v1.0.0|
-
-Then reference it in your `.golangci.yml`, and enable it in the `linters`
-section:
-
-```yaml
-linters:
-  # ...
-  enable:
-    # ...
-    - gosmopolitan
-    # ...
-
-linters-settings:
-  custom:
-    gosmopolitan:
-      path: 'path/to/your/plugin.so'
-      description: 'Report certain i18n/l10n anti-patterns in your Go codebase'
-      original-url: 'https://github.com/xen0n/gosmopolitan'
-  # ...
-```
-
-Then you can `golangci-lint run` and `//nolint:gosmopolitan` as you would
-with any other supported linter.
 
 ## License
 
diff --git a/vendor/github.com/xen0n/gosmopolitan/README.zh-Hans.md b/vendor/github.com/xen0n/gosmopolitan/README.zh-Hans.md
index 7f1b7b7adf71a0369f14efb3dbab0208005cd6d4..682d10880eb23eeef1125cd7ff91e77a0de6531e 100644
--- a/vendor/github.com/xen0n/gosmopolitan/README.zh-Hans.md
+++ b/vendor/github.com/xen0n/gosmopolitan/README.zh-Hans.md
@@ -46,7 +46,7 @@
 
 * 项目原先是为使用非拉丁字母书写系统的受众群体开发的,
 * 项目会返回包含这些非拉丁字母字符的裸的字符串(即,未经处理或变换的),
-* 项目可能偶尔(或者经常)引用程序当前运行环境的本地时区。
+* 项目可能偶尔(或者经常)引用程序当前运行环境的系统时区,但项目架构上禁止或不建议把系统时区直接作为业务参考时区使用。
 
 举个例子:如果您在翻新一个本来面向中国用户群体(因此到处都在产生含有汉字的字符串)的
 web 服务,以使其更加国际化,这里的 lints 可能会很有价值。
@@ -55,66 +55,14 @@ linter 则什么都不会输出。
 
 ## 与 golangci-lint 集成
 
-`gosmopolitan` 目前没有集成进上游 [`golangci-lint`][gcl-home],但您仍然可以[以自定义插件的方式][gcl-plugin]使用本项目。
+`gosmopolitan` 支持[已经被合并][gcl-pr]入 [`golangci-lint`][gcl-home] 上游,在 golangci-lint v1.53.0 及以后的版本可以开箱即用。
 
+[gcl-pr]: https://github.com/golangci/golangci-lint/pull/3458
 [gcl-home]: https://golangci-lint.run
-[gcl-plugin]: https://golangci-lint.run/contributing/new-linters/#how-to-add-a-private-linter-to-golangci-lint
 
-首先像这样做一个插件 `.so` 文件:
-
-```go
-// 用类似 `go build -buildmode=plugin` 的方式编译
-
-package main
-
-import (
-	"github.com/xen0n/gosmopolitan"
-	"golang.org/x/tools/go/analysis"
-)
-
-type analyzerPlugin struct{}
-
-func (analyzerPlugin) GetAnalyzers() []*analysis.Analyzer {
-	// 你可以用 gosmopolitan.NewAnalyzer 来自定义配置。
-	return []*analysis.Analyzer{
-		gosmopolitan.DefaultAnalyzer,
-	}
-}
-
-var AnalyzerPlugin analyzerPlugin
-```
-
-您只需要保证构建时使用的 `golang.org/x/tools` 模块版本和您的 `golangci-lint`
-二进制的相应模块版本一致。(当然,`golangci-lint` 二进制也应该包含插件支持;
-[Homebrew 的 `golangci-lint` 没有插件支持][hb-issue],尤其需要注意。)
-
-[hb-issue]: https://github.com/golangci/golangci-lint/issues/1182
-
-|`golangci-lint` 版本|对应可用的 `gosmopolitan` tag|
-|--------------------|-----------------------------|
-|1.50.x|v1.0.0|
-
-然后在您的 `.golangci.yml` 中引用它,在 `linters` 一节中启用它:
-
-```yaml
-linters:
-  # ...
-  enable:
-    # ...
-    - gosmopolitan
-    # ...
-
-linters-settings:
-  custom:
-    gosmopolitan:
-      path: 'path/to/your/plugin.so'
-      description: 'Report certain i18n/l10n anti-patterns in your Go codebase'
-      original-url: 'https://github.com/xen0n/gosmopolitan'
-  # ...
-```
-
-这样您就可以像使用其他 linters 一样 `golangci-lint run` 和
-`//nolint:gosmopolitan` 了。
+由于本 linter 倡导和检查的代码风格带有鲜明立场,如果您在 `golangci.yml` 开了
+`enable-all: true` 并且您的项目处理很多中文文本或者 `time.Local`,那么您一旦升级到
+golangci-lint v1.53.0 就将被 lints 淹没。如果这种代码风格不适合您的具体使用场景,直接禁用本 linter(或者更彻底一些,不要 `enable-all: true` 了)就好。
 
 ## 许可证
 
diff --git a/vendor/github.com/yeya24/promlinter/promlinter.go b/vendor/github.com/yeya24/promlinter/promlinter.go
index 2ed4d60a8afa5eb25af9a9f223acff3e1eaded46..17a7f4c3e49217bebca0fb3fe1df306985774af1 100644
--- a/vendor/github.com/yeya24/promlinter/promlinter.go
+++ b/vendor/github.com/yeya24/promlinter/promlinter.go
@@ -82,6 +82,23 @@ type MetricFamilyWithPos struct {
 	Pos          token.Position
 }
 
+func (m *MetricFamilyWithPos) Labels() []string {
+	var arr []string
+	if len(m.MetricFamily.Metric) > 0 {
+		for _, label := range m.MetricFamily.Metric[0].Label {
+			if label.Value != nil {
+				arr = append(arr,
+					fmt.Sprintf("%s=%s",
+						strings.Trim(*label.Name, `"`),
+						strings.Trim(*label.Value, `"`)))
+			} else {
+				arr = append(arr, strings.Trim(*label.Name, `"`))
+			}
+		}
+	}
+	return arr
+}
+
 type visitor struct {
 	fs      *token.FileSet
 	metrics []MetricFamilyWithPos
@@ -93,6 +110,12 @@ type opt struct {
 	namespace string
 	subsystem string
 	name      string
+
+	help    string
+	helpSet bool
+
+	labels      []string
+	constLabels map[string]string
 }
 
 func RunList(fs *token.FileSet, files []*ast.File, strict bool) []MetricFamilyWithPos {
@@ -151,9 +174,6 @@ func RunLint(fs *token.FileSet, files []*ast.File, s Setting) []Issue {
 		}
 	}
 
-	sort.Slice(v.issues, func(i, j int) bool {
-		return v.issues[i].Pos.String() < v.issues[j].Pos.String()
-	})
 	return v.issues
 }
 
@@ -168,11 +188,23 @@ func (v *visitor) Visit(n ast.Node) ast.Visitor {
 
 	case *ast.SendStmt:
 		return v.parseSendMetricChanExpr(t)
+
+	default:
 	}
 
 	return v
 }
 
+func (v *visitor) addMetric(mfp *MetricFamilyWithPos) {
+	for _, m := range v.metrics {
+		if mfp.MetricFamily.String() == m.MetricFamily.String() {
+			return
+		}
+	}
+
+	v.metrics = append(v.metrics, *mfp)
+}
+
 func (v *visitor) parseCallerExpr(call *ast.CallExpr) ast.Visitor {
 	var (
 		metricType dto.MetricType
@@ -190,16 +222,17 @@ func (v *visitor) parseCallerExpr(call *ast.CallExpr) ast.Visitor {
 	*/
 	case *ast.Ident:
 		if stmt.Name == "NewCounterFunc" {
-			return v.parseOpts(call.Args[0], dto.MetricType_COUNTER)
+			return v.parseOpts(call.Args, dto.MetricType_COUNTER)
 		}
 
 		if stmt.Name == "NewGaugeFunc" {
-			return v.parseOpts(call.Args[0], dto.MetricType_GAUGE)
+			return v.parseOpts(call.Args, dto.MetricType_GAUGE)
 		}
 
 		if metricType, ok = metricsType[stmt.Name]; !ok {
 			return v
 		}
+
 		methodName = stmt.Name
 
 	/*
@@ -216,11 +249,11 @@ func (v *visitor) parseCallerExpr(call *ast.CallExpr) ast.Visitor {
 	*/
 	case *ast.SelectorExpr:
 		if stmt.Sel.Name == "NewCounterFunc" {
-			return v.parseOpts(call.Args[0], dto.MetricType_COUNTER)
+			return v.parseOpts(call.Args, dto.MetricType_COUNTER)
 		}
 
 		if stmt.Sel.Name == "NewGaugeFunc" {
-			return v.parseOpts(call.Args[0], dto.MetricType_GAUGE)
+			return v.parseOpts(call.Args, dto.MetricType_GAUGE)
 		}
 
 		if stmt.Sel.Name == "NewFamilyGenerator" && len(call.Args) == 5 {
@@ -230,6 +263,7 @@ func (v *visitor) parseCallerExpr(call *ast.CallExpr) ast.Visitor {
 		if metricType, ok = metricsType[stmt.Sel.Name]; !ok {
 			return v
 		}
+
 		methodName = stmt.Sel.Name
 
 	default:
@@ -254,19 +288,44 @@ func (v *visitor) parseCallerExpr(call *ast.CallExpr) ast.Visitor {
 		return v
 	}
 
-	return v.parseOpts(call.Args[0], metricType)
+	return v.parseOpts(call.Args, metricType)
 }
 
-func (v *visitor) parseOpts(optArg ast.Node, metricType dto.MetricType) ast.Visitor {
+func (v *visitor) parseOpts(optArgs []ast.Expr, metricType dto.MetricType) ast.Visitor {
 	// position for the first arg of the CallExpr
-	optsPosition := v.fs.Position(optArg.Pos())
-	opts, help := v.parseOptsExpr(optArg)
+	optsPosition := v.fs.Position(optArgs[0].Pos())
+	opts := v.parseOptsExpr(optArgs[0])
+
+	var metric *dto.Metric
+	if len(optArgs) > 1 {
+		// parse labels
+		if labelOpts := v.parseOptsExpr(optArgs[1]); labelOpts != nil && len(labelOpts.labels) > 0 {
+			metric = &dto.Metric{}
+			for idx, _ := range labelOpts.labels {
+				metric.Label = append(metric.Label,
+					&dto.LabelPair{
+						Name: &labelOpts.labels[idx],
+					})
+			}
+		}
+	}
+
 	if opts == nil {
 		return v
 	}
+
 	currentMetric := dto.MetricFamily{
 		Type: &metricType,
-		Help: help,
+	}
+
+	if !opts.helpSet {
+		currentMetric.Help = nil
+	} else {
+		currentMetric.Help = &opts.help
+	}
+
+	if metric != nil {
+		currentMetric.Metric = append(currentMetric.Metric, metric)
 	}
 
 	metricName := prometheus.BuildFQName(opts.namespace, opts.subsystem, opts.name)
@@ -278,7 +337,7 @@ func (v *visitor) parseOpts(optArg ast.Node, metricType dto.MetricType) ast.Visi
 	}
 	currentMetric.Name = &metricName
 
-	v.metrics = append(v.metrics, MetricFamilyWithPos{MetricFamily: &currentMetric, Pos: optsPosition})
+	v.addMetric(&MetricFamilyWithPos{MetricFamily: &currentMetric, Pos: optsPosition})
 	return v
 }
 
@@ -307,7 +366,8 @@ func (v *visitor) parseKSMMetrics(nameArg ast.Node, helpArg ast.Node, metricType
 		}
 	}
 
-	v.metrics = append(v.metrics, MetricFamilyWithPos{MetricFamily: &currentMetric, Pos: optsPosition})
+	v.addMetric(&MetricFamilyWithPos{MetricFamily: &currentMetric, Pos: optsPosition})
+
 	return v
 }
 
@@ -347,15 +407,40 @@ func (v *visitor) parseSendMetricChanExpr(chExpr *ast.SendStmt) ast.Visitor {
 		return v
 	}
 
-	name, help := v.parseConstMetricOptsExpr(call.Args[0])
-	if name == nil {
+	if len(call.Args) == 0 {
+		return v
+	}
+
+	descCall := v.parseConstMetricOptsExpr(call.Args[0])
+	if descCall == nil {
 		return v
 	}
 
 	metric := &dto.MetricFamily{
-		Name: name,
-		Help: help,
+		Name: descCall.name,
+		Help: descCall.help,
 	}
+
+	if len(descCall.labels) > 0 {
+		m := &dto.Metric{}
+		for idx, _ := range descCall.labels {
+			m.Label = append(m.Label,
+				&dto.LabelPair{
+					Name: &descCall.labels[idx],
+				})
+		}
+
+		for idx, _ := range descCall.constLabels {
+			m.Label = append(m.Label,
+				&dto.LabelPair{
+					Name:  &descCall.constLabels[idx][0],
+					Value: &descCall.constLabels[idx][1],
+				})
+		}
+
+		metric.Metric = append(metric.Metric, m)
+	}
+
 	switch methodName {
 	case "MustNewConstMetric", "NewLazyConstMetric":
 		switch t := call.Args[1].(type) {
@@ -373,11 +458,11 @@ func (v *visitor) parseSendMetricChanExpr(chExpr *ast.SendStmt) ast.Visitor {
 		metric.Type = &metricType
 	}
 
-	v.metrics = append(v.metrics, MetricFamilyWithPos{MetricFamily: metric, Pos: v.fs.Position(call.Pos())})
+	v.addMetric(&MetricFamilyWithPos{MetricFamily: metric, Pos: v.fs.Position(call.Pos())})
 	return v
 }
 
-func (v *visitor) parseOptsExpr(n ast.Node) (*opt, *string) {
+func (v *visitor) parseOptsExpr(n ast.Node) *opt {
 	switch stmt := n.(type) {
 	case *ast.CompositeLit:
 		return v.parseCompositeOpts(stmt)
@@ -395,17 +480,49 @@ func (v *visitor) parseOptsExpr(n ast.Node) (*opt, *string) {
 		return v.parseOptsExpr(stmt.X)
 	}
 
-	return nil, nil
+	return nil
 }
 
-func (v *visitor) parseCompositeOpts(stmt *ast.CompositeLit) (*opt, *string) {
+func (v *visitor) parseCompositeOpts(stmt *ast.CompositeLit) *opt {
 	metricOption := &opt{}
-	var help *string
+
 	for _, elt := range stmt.Elts {
+
+		// labels
+		label, ok := elt.(*ast.BasicLit)
+		if ok {
+			metricOption.labels = append(metricOption.labels, strings.Trim(label.Value, `"`))
+			continue
+		}
+
 		kvExpr, ok := elt.(*ast.KeyValueExpr)
 		if !ok {
 			continue
 		}
+
+		// const labels
+		if key, ok := kvExpr.Key.(*ast.BasicLit); ok {
+
+			if metricOption.constLabels == nil {
+				metricOption.constLabels = map[string]string{}
+			}
+
+			// only accept literal string value
+			//
+			//  {
+			//  	"key": "some-string-literal",
+			//  }
+			switch val := kvExpr.Value.(type) {
+			case *ast.BasicLit:
+				metricOption.constLabels[key.Value] = val.Value
+
+			default:
+				metricOption.constLabels[key.Value] = "?" // use a placeholder for the const label
+			}
+
+			continue
+		}
+
 		object, ok := kvExpr.Key.(*ast.Ident)
 		if !ok {
 			continue
@@ -418,7 +535,7 @@ func (v *visitor) parseCompositeOpts(stmt *ast.CompositeLit) (*opt, *string) {
 		// If failed to parse field value, stop parsing.
 		stringLiteral, ok := v.parseValue(object.Name, kvExpr.Value)
 		if !ok {
-			return nil, nil
+			return nil
 		}
 
 		switch object.Name {
@@ -429,14 +546,16 @@ func (v *visitor) parseCompositeOpts(stmt *ast.CompositeLit) (*opt, *string) {
 		case "Name":
 			metricOption.name = stringLiteral
 		case "Help":
-			help = &stringLiteral
+			metricOption.help = stringLiteral
+			metricOption.helpSet = true
 		}
 	}
 
-	return metricOption, help
+	return metricOption
 }
 
 func (v *visitor) parseValue(object string, n ast.Node) (string, bool) {
+
 	switch t := n.(type) {
 
 	// make sure it is string literal value
@@ -452,14 +571,26 @@ func (v *visitor) parseValue(object string, n ast.Node) (string, bool) {
 			return "", false
 		}
 
-		if vs, ok := t.Obj.Decl.(*ast.ValueSpec); ok {
+		switch vs := t.Obj.Decl.(type) {
+
+		case *ast.ValueSpec:
+			// var some string = "some string"
 			return v.parseValue(object, vs)
+
+		case *ast.AssignStmt:
+			// TODO:
+			// some := "some string"
+			return "", false
+
+		default:
+			return "", false
 		}
 
 	case *ast.ValueSpec:
 		if len(t.Values) == 0 {
 			return "", false
 		}
+
 		return v.parseValue(object, t.Values[0])
 
 	// For binary expr, we only support adding two strings like `foo` + `bar`.
@@ -539,7 +670,7 @@ func (v *visitor) parseValueCallExpr(object string, call *ast.CallExpr) (string,
 	return "", false
 }
 
-func (v *visitor) parseConstMetricOptsExpr(n ast.Node) (*string, *string) {
+func (v *visitor) parseConstMetricOptsExpr(n ast.Node) *descCallExpr {
 	switch stmt := n.(type) {
 	case *ast.CallExpr:
 		return v.parseNewDescCallExpr(stmt)
@@ -580,10 +711,16 @@ func (v *visitor) parseConstMetricOptsExpr(n ast.Node) (*string, *string) {
 		}
 	}
 
-	return nil, nil
+	return nil
 }
 
-func (v *visitor) parseNewDescCallExpr(call *ast.CallExpr) (*string, *string) {
+type descCallExpr struct {
+	name, help  *string
+	labels      []string
+	constLabels [][2]string
+}
+
+func (v *visitor) parseNewDescCallExpr(call *ast.CallExpr) *descCallExpr {
 	var (
 		help string
 		name string
@@ -600,7 +737,7 @@ func (v *visitor) parseNewDescCallExpr(call *ast.CallExpr) (*string, *string) {
 					Text:   fmt.Sprintf("parsing desc with function %s is not supported", expr.Name),
 				})
 			}
-			return nil, nil
+			return nil
 		}
 	case *ast.SelectorExpr:
 		if expr.Sel.Name != "NewDesc" {
@@ -611,7 +748,7 @@ func (v *visitor) parseNewDescCallExpr(call *ast.CallExpr) (*string, *string) {
 					Text:   fmt.Sprintf("parsing desc with function %s is not supported", expr.Sel.Name),
 				})
 			}
-			return nil, nil
+			return nil
 		}
 	default:
 		if v.strict {
@@ -621,7 +758,7 @@ func (v *visitor) parseNewDescCallExpr(call *ast.CallExpr) (*string, *string) {
 				Text:   fmt.Sprintf("parsing desc of %T is not supported", expr),
 			})
 		}
-		return nil, nil
+		return nil
 	}
 
 	// k8s.io/component-base/metrics.NewDesc has 6 args
@@ -632,19 +769,45 @@ func (v *visitor) parseNewDescCallExpr(call *ast.CallExpr) (*string, *string) {
 			Pos:    v.fs.Position(call.Pos()),
 			Text:   "NewDesc should have at least 4 args",
 		})
-		return nil, nil
+		return nil
 	}
 
 	name, ok = v.parseValue("fqName", call.Args[0])
 	if !ok {
-		return nil, nil
+		return nil
 	}
+
 	help, ok = v.parseValue("help", call.Args[1])
 	if !ok {
-		return nil, nil
+		return nil
+	}
+
+	res := &descCallExpr{
+		name: &name,
+		help: &help,
+	}
+
+	if x, ok := call.Args[2].(*ast.CompositeLit); ok {
+		opt := v.parseCompositeOpts(x)
+		if opt == nil {
+			return nil
+		}
+
+		res.labels = opt.labels
+	}
+
+	if x, ok := call.Args[3].(*ast.CompositeLit); ok {
+		opt := v.parseCompositeOpts(x)
+		if opt == nil {
+			return nil
+		}
+
+		for k, v := range opt.constLabels {
+			res.constLabels = append(res.constLabels, [2]string{k, v})
+		}
 	}
 
-	return &name, &help
+	return res
 }
 
 func mustUnquote(str string) string {
diff --git a/vendor/github.com/ykadowak/zerologlint/.goreleaser.yaml b/vendor/github.com/ykadowak/zerologlint/.goreleaser.yaml
index c1b23f00e9edff46d63d92bbc5d16bc345035169..f3af3f21218d78d55baeeed80e906876c399cd9b 100644
--- a/vendor/github.com/ykadowak/zerologlint/.goreleaser.yaml
+++ b/vendor/github.com/ykadowak/zerologlint/.goreleaser.yaml
@@ -11,13 +11,6 @@ builds:
       - linux
       - windows
       - darwin
-archives:
-  - replacements:
-      darwin: Darwin
-      linux: Linux
-      windows: Windows
-      386: i386
-      amd64: x86_64
 checksum:
   name_template: 'checksums.txt'
 snapshot:
diff --git a/vendor/github.com/ykadowak/zerologlint/zerologlint.go b/vendor/github.com/ykadowak/zerologlint/zerologlint.go
index bec50e52bca125d8c758d9cb9ba3aecdd6025444..8c8fb74fc3de3e9fffb909b6321bdb49ac4ebd88 100644
--- a/vendor/github.com/ykadowak/zerologlint/zerologlint.go
+++ b/vendor/github.com/ykadowak/zerologlint/zerologlint.go
@@ -13,8 +13,8 @@ import (
 )
 
 var Analyzer = &analysis.Analyzer{
-	Name: "zerologlinter",
-	Doc:  "finds cases where zerolog methods are not followed by Msg or Send",
+	Name: "zerologlint",
+	Doc:  "Detects the wrong usage of `zerolog` that a user forgets to dispatch with `Send` or `Msg`",
 	Run:  run,
 	Requires: []*analysis.Analyzer{
 		buildssa.Analyzer,
@@ -26,42 +26,65 @@ type posser interface {
 	Pos() token.Pos
 }
 
-// posser is an interface just to hold both ssa.Call and ssa.Defer in our set
+// callDefer is an interface just to hold both ssa.Call and ssa.Defer in our set
 type callDefer interface {
 	Common() *ssa.CallCommon
 	Pos() token.Pos
 }
 
-func run(pass *analysis.Pass) (interface{}, error) {
-	srcFuncs := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs
-
-	// This set holds all the ssa block that is a zerolog.Event type instance
+type linter struct {
+	// eventSet holds all the ssa block that is a zerolog.Event type instance
 	// that should be dispatched.
 	// Everytime the zerolog.Event is dispatched with Msg() or Send(),
 	// deletes that block from this set.
 	// At the end, check if the set is empty, or report the not dispatched block.
-	set := make(map[posser]struct{})
+	eventSet    map[posser]struct{}
+	// deleteLater holds the ssa block that should be deleted from eventSet after
+	// all the inspection is done.
+	// this is required because `else` ssa block comes after the dispatch of `if`` block.
+	// e.g., if err != nil { log.Error() } else { log.Info() } log.Send()
+	//       deleteLater takes care of the log.Info() block.
+	deleteLater map[posser]struct{}
+	recLimit    uint
+}
+
+func run(pass *analysis.Pass) (interface{}, error) {
+	srcFuncs := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs
+
+	l := &linter{
+		eventSet:    make(map[posser]struct{}),
+		deleteLater: make(map[posser]struct{}),
+		recLimit:    100,
+	}
 
 	for _, sf := range srcFuncs {
 		for _, b := range sf.Blocks {
 			for _, instr := range b.Instrs {
 				if c, ok := instr.(*ssa.Call); ok {
-					inspect(c, &set)
+					l.inspect(c)
 				} else if c, ok := instr.(*ssa.Defer); ok {
-					inspect(c, &set)
+					l.inspect(c)
 				}
 			}
 		}
 	}
+
+	// apply deleteLater to envetSet for else branches of if-else cases
+
+	for k := range l.deleteLater {
+		delete(l.eventSet, k)
+	}
+
 	// At the end, if the set is clear -> ok.
-	// Otherwise, there must be a left zerolog.Event var that weren't dispached. So report it.
-	for k := range set {
+	// Otherwise, there must be a left zerolog.Event var that weren't dispatched. So report it.
+	for k := range l.eventSet {
 		pass.Reportf(k.Pos(), "must be dispatched by Msg or Send method")
 	}
+
 	return nil, nil
 }
 
-func inspect(cd callDefer, set *map[posser]struct{}) {
+func (l *linter) inspect(cd callDefer) {
 	c := cd.Common()
 
 	// check if it's in github.com/rs/zerolog/log since there's some
@@ -70,7 +93,7 @@ func inspect(cd callDefer, set *map[posser]struct{}) {
 	if isInLogPkg(*c) || isLoggerRecv(*c) {
 		if isZerologEvent(c.Value) {
 			// this ssa block should be dispatched afterwards at some point
-			(*set)[cd] = struct{}{}
+			l.eventSet[cd] = struct{}{}
 			return
 		}
 	}
@@ -88,7 +111,7 @@ func inspect(cd callDefer, set *map[posser]struct{}) {
 		for _, p := range f.Params {
 			if isZerologEvent(p) {
 				// check if this zerolog.Event as a parameter is dispatched in the function
-				// TODO: specifically, it can be dispatched in another function that is called in this function, and
+				// TODO: technically, it can be dispatched in another function that is called in this function, and
 				//       this algorithm cannot track that. But I'm tired of thinking about that for now.
 				for _, b := range f.Blocks {
 					for _, instr := range b.Instrs {
@@ -96,10 +119,12 @@ func inspect(cd callDefer, set *map[posser]struct{}) {
 						case *ssa.Call:
 							if inspectDispatchInFunction(v.Common()) {
 								shouldReturn = false
+								break
 							}
 						case *ssa.Defer:
 							if inspectDispatchInFunction(v.Common()) {
 								shouldReturn = false
+								break
 							}
 						}
 					}
@@ -112,19 +137,56 @@ func inspect(cd callDefer, set *map[posser]struct{}) {
 	}
 	for _, arg := range c.Args {
 		if isZerologEvent(arg) {
-			val := getRootSsaValue(arg)
-			// if there's branch, remove both ways from the set
-			if phi, ok := val.(*ssa.Phi); ok {
+			// if there's branch, track both ways
+			// this is for the case like:
+			//   logger := log.Info()
+			//   if err != nil {
+			//     logger = log.Error()
+			//   }
+			//   logger.Send()
+			//
+			// Similar case like below goes to the same root but that doesn't
+			// have any side effect.
+			//   logger := log.Info()
+			//   if err != nil {
+			//     logger = logger.Str("a", "b")
+			//   }
+			//   logger.Send()
+			if phi, ok := arg.(*ssa.Phi); ok {
 				for _, edge := range phi.Edges {
-					delete(*set, edge)
+					l.dfsEdge(edge, make(map[ssa.Value]struct{}), 0)
 				}
 			} else {
-				delete(*set, val)
+				val := getRootSsaValue(arg)
+				delete(l.eventSet, val)
 			}
 		}
 	}
 }
 
+func (l *linter) dfsEdge(v ssa.Value, visit map[ssa.Value]struct{}, cnt uint) {
+	// only for safety
+	if cnt > l.recLimit {
+		return
+	}
+	cnt++
+
+	if _, ok := visit[v]; ok {
+		return
+	}
+	visit[v] = struct{}{}
+
+	val := getRootSsaValue(v)
+	phi, ok := val.(*ssa.Phi)
+	if !ok {
+		l.deleteLater[val] = struct{}{}
+		return
+	}
+	for _, edge := range phi.Edges {
+		l.dfsEdge(edge, visit, cnt)
+	}
+}
+
 func inspectDispatchInFunction(cc *ssa.CallCommon) bool {
 	if isDispatchMethod(cc.StaticCallee()) {
 		for _, arg := range cc.Args {
diff --git a/vendor/gitlab.com/bosi/decorder/.gitlab-ci.yml b/vendor/gitlab.com/bosi/decorder/.gitlab-ci.yml
index f3ec4f21a206c06109d1869388198909cf1d4382..0e7ed1b7a97ee1f2125cd57ef0951cf5d7f80338 100644
--- a/vendor/gitlab.com/bosi/decorder/.gitlab-ci.yml
+++ b/vendor/gitlab.com/bosi/decorder/.gitlab-ci.yml
@@ -12,7 +12,7 @@ stages:
 
 test:
     stage: test
-    image: golang:1.20.6@sha256:cfc9d1b07b1ef4f7a4571f0b60a99646a92ef76adb7d9943f4cb7b606c6554e2
+    image: golang:1.22.2@sha256:450e3822c7a135e1463cd83e51c8e2eb03b86a02113c89424e6f0f8344bb4168
     before_script:
         - set -eu
         - if [[ -f .env.pipeline ]];then cp .env.pipeline .env;fi
@@ -27,7 +27,7 @@ test:
 
 lint:source-code:
     stage: test
-    image: golangci/golangci-lint:v1.53.3-alpine@sha256:b61d8503f0ad16499c023772301ec8c0f2559bf76c28d228c390446c5e647f55
+    image: golangci/golangci-lint:v1.55.2-alpine@sha256:22e4dd2bba6ad3c6ef918432b92329b51e82d55e470d268d315bfff6a160bceb
     script:
         - apk add make bash
         - make settings
@@ -36,7 +36,7 @@ lint:source-code:
 
 license-check:
     stage: test
-    image: golang:1.20.6@sha256:cfc9d1b07b1ef4f7a4571f0b60a99646a92ef76adb7d9943f4cb7b606c6554e2
+    image: golang:1.22.2@sha256:450e3822c7a135e1463cd83e51c8e2eb03b86a02113c89424e6f0f8344bb4168
     before_script:
         - set -eu
         - if [[ -f .env.pipeline ]];then cp .env.pipeline .env;fi
@@ -53,7 +53,7 @@ license-check:
 
 pages:
     stage: release
-    image: golang:1.20.6@sha256:cfc9d1b07b1ef4f7a4571f0b60a99646a92ef76adb7d9943f4cb7b606c6554e2
+    image: golang:1.22.2@sha256:450e3822c7a135e1463cd83e51c8e2eb03b86a02113c89424e6f0f8344bb4168
     only:
         - tags
     script:
diff --git a/vendor/gitlab.com/bosi/decorder/analyzer.go b/vendor/gitlab.com/bosi/decorder/analyzer.go
index f486129447dcaee60c2d6f5b50d46fe47a718499..08f82ccc116f6baed733971195a82b0bb05f5d41 100644
--- a/vendor/gitlab.com/bosi/decorder/analyzer.go
+++ b/vendor/gitlab.com/bosi/decorder/analyzer.go
@@ -5,6 +5,7 @@ import (
 	"go/ast"
 	"go/token"
 	"strings"
+	"sync"
 
 	"golang.org/x/tools/go/analysis"
 )
@@ -65,6 +66,7 @@ var (
 		token.CONST: false,
 		token.VAR:   false,
 	}
+	decLock sync.Mutex
 )
 
 //nolint:lll
@@ -79,10 +81,16 @@ func init() {
 	Analyzer.Flags.BoolVar(&opts.disableInitFuncFirstCheck, FlagDiffc, false, "option to disable check that init function is always first function in file")
 }
 
-func run(pass *analysis.Pass) (interface{}, error) {
+func initDec() {
+	decLock.Lock()
 	decNumConf[token.TYPE] = opts.disableTypeDecNumCheck
 	decNumConf[token.CONST] = opts.disableConstDecNumCheck
 	decNumConf[token.VAR] = opts.disableVarDecNumCheck
+	decLock.Unlock()
+}
+
+func run(pass *analysis.Pass) (interface{}, error) {
+	initDec()
 
 	for _, f := range pass.Files {
 		ast.Inspect(f, runDeclNumAndDecOrderCheck(pass))
diff --git a/vendor/go.tmz.dev/musttag/.golangci.yml b/vendor/go-simpler.org/musttag/.golangci.yml
similarity index 100%
rename from vendor/go.tmz.dev/musttag/.golangci.yml
rename to vendor/go-simpler.org/musttag/.golangci.yml
diff --git a/vendor/go.tmz.dev/musttag/.goreleaser.yml b/vendor/go-simpler.org/musttag/.goreleaser.yml
similarity index 55%
rename from vendor/go.tmz.dev/musttag/.goreleaser.yml
rename to vendor/go-simpler.org/musttag/.goreleaser.yml
index 259080886f677266c4efb12b4f052d44b3551964..dd75ac945b2723f491dee968a3cd0563bc16fac2 100644
--- a/vendor/go.tmz.dev/musttag/.goreleaser.yml
+++ b/vendor/go-simpler.org/musttag/.goreleaser.yml
@@ -16,12 +16,3 @@ archives:
   - format_overrides:
       - goos: windows
         format: zip
-
-brews:
-  - tap:
-      owner: tmzane
-      name: homebrew-tap
-      token: '{{ .Env.HOMEBREW_TAP_TOKEN }}'
-    homepage: https://github.com/tmzane/musttag
-    description: A Go linter that enforces field tags in (un)marshaled structs
-    license: MPL-2.0
diff --git a/vendor/go.tmz.dev/musttag/LICENSE b/vendor/go-simpler.org/musttag/LICENSE
similarity index 100%
rename from vendor/go.tmz.dev/musttag/LICENSE
rename to vendor/go-simpler.org/musttag/LICENSE
diff --git a/vendor/go-simpler.org/musttag/Makefile b/vendor/go-simpler.org/musttag/Makefile
new file mode 100644
index 0000000000000000000000000000000000000000..6165b16f474e1794ab7a84098c13d7b237add564
--- /dev/null
+++ b/vendor/go-simpler.org/musttag/Makefile
@@ -0,0 +1,28 @@
+.POSIX:
+.SUFFIXES:
+
+all: test lint
+
+test:
+	go test -race -shuffle=on -cover ./...
+
+test/cover:
+	go test -race -shuffle=on -coverprofile=coverage.out ./...
+	go tool cover -html=coverage.out
+
+lint:
+	golangci-lint run
+
+tidy:
+	go mod tidy
+
+generate:
+	go generate ./...
+
+# run `make pre-commit` once to install the hook.
+pre-commit: .git/hooks/pre-commit test lint tidy generate
+	git diff --exit-code
+
+.git/hooks/pre-commit:
+	echo "make pre-commit" > .git/hooks/pre-commit
+	chmod +x .git/hooks/pre-commit
diff --git a/vendor/go-simpler.org/musttag/README.md b/vendor/go-simpler.org/musttag/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..3f3a25330235945b6c6cc5785bbfb0e8b258b34b
--- /dev/null
+++ b/vendor/go-simpler.org/musttag/README.md
@@ -0,0 +1,102 @@
+# musttag
+
+[![checks](https://github.com/go-simpler/musttag/actions/workflows/checks.yml/badge.svg)](https://github.com/go-simpler/musttag/actions/workflows/checks.yml)
+[![pkg.go.dev](https://pkg.go.dev/badge/go-simpler.org/musttag.svg)](https://pkg.go.dev/go-simpler.org/musttag)
+[![goreportcard](https://goreportcard.com/badge/go-simpler.org/musttag)](https://goreportcard.com/report/go-simpler.org/musttag)
+[![codecov](https://codecov.io/gh/go-simpler/musttag/branch/main/graph/badge.svg)](https://codecov.io/gh/go-simpler/musttag)
+
+A Go linter that enforces field tags in (un)marshaled structs.
+
+## 📌 About
+
+`musttag` checks that exported fields of a struct passed to a `Marshal`-like function are annotated with the relevant tag:
+
+```go
+// BAD:
+var user struct {
+    Name string
+}
+data, err := json.Marshal(user)
+
+// GOOD:
+var user struct {
+    Name string `json:"name"`
+}
+data, err := json.Marshal(user)
+```
+
+The rational from [Uber Style Guide][1]:
+
+> The serialized form of the structure is a contract between different systems.
+> Changes to the structure of the serialized form, including field names, break this contract.
+> Specifying field names inside tags makes the contract explicit,
+> and it guards against accidentally breaking the contract by refactoring or renaming fields.
+
+## 🚀 Features
+
+The following packages are supported out of the box:
+
+* [encoding/json][2]
+* [encoding/xml][3]
+* [gopkg.in/yaml.v3][4]
+* [github.com/BurntSushi/toml][5]
+* [github.com/mitchellh/mapstructure][6]
+* [github.com/jmoiron/sqlx][7]
+
+In addition, any [custom package](#custom-packages) can be added to the list.
+
+## 📦 Install
+
+`musttag` is integrated into [`golangci-lint`][8], and this is the recommended way to use it.
+
+To enable the linter, add the following lines to `.golangci.yml`:
+
+```yaml
+linters:
+  enable:
+    - musttag
+```
+
+Alternatively, you can download a prebuilt binary from the [Releases][9] page to use `musttag` standalone.
+
+## 📋 Usage
+
+Run `golangci-lint` with `musttag` enabled.
+See the list of [available options][10] to configure the linter.
+
+When using `musttag` standalone, pass the options as flags.
+
+### Custom packages
+
+To report a custom function, you need to add its description to `.golangci.yml`.
+The following is an example of adding support for [`hclsimple.Decode`][11]:
+
+```yaml
+linters-settings:
+  musttag:
+    functions:
+        # The full name of the function, including the package.
+      - name: github.com/hashicorp/hcl/v2/hclsimple.Decode
+        # The struct tag whose presence should be ensured.
+        tag: hcl
+        # The position of the argument to check.
+        arg-pos: 2
+```
+
+The same can be done via the `-fn=<name:tag:arg-pos>` flag when using `musttag` standalone:
+
+```shell
+musttag -fn="github.com/hashicorp/hcl/v2/hclsimple.DecodeFile:hcl:2" ./...
+```
+
+[1]: https://github.com/uber-go/guide/blob/master/style.md#use-field-tags-in-marshaled-structs
+[2]: https://pkg.go.dev/encoding/json
+[3]: https://pkg.go.dev/encoding/xml
+[4]: https://pkg.go.dev/gopkg.in/yaml.v3
+[5]: https://pkg.go.dev/github.com/BurntSushi/toml
+[6]: https://pkg.go.dev/github.com/mitchellh/mapstructure
+[7]: https://pkg.go.dev/github.com/jmoiron/sqlx
+[8]: https://golangci-lint.run
+[9]: https://github.com/go-simpler/musttag/releases
+[10]: https://golangci-lint.run/usage/linters/#musttag
+[11]: https://pkg.go.dev/github.com/hashicorp/hcl/v2/hclsimple#Decode
diff --git a/vendor/go-simpler.org/musttag/builtins.go b/vendor/go-simpler.org/musttag/builtins.go
new file mode 100644
index 0000000000000000000000000000000000000000..3305513f8f59832034551be1acb1716a9c4f2608
--- /dev/null
+++ b/vendor/go-simpler.org/musttag/builtins.go
@@ -0,0 +1,133 @@
+package musttag
+
+// builtins is a set of functions supported out of the box.
+var builtins = []Func{
+	// https://pkg.go.dev/encoding/json
+	{
+		Name: "encoding/json.Marshal", Tag: "json", ArgPos: 0,
+		ifaceWhitelist: []string{"encoding/json.Marshaler", "encoding.TextMarshaler"},
+	},
+	{
+		Name: "encoding/json.MarshalIndent", Tag: "json", ArgPos: 0,
+		ifaceWhitelist: []string{"encoding/json.Marshaler", "encoding.TextMarshaler"},
+	},
+	{
+		Name: "encoding/json.Unmarshal", Tag: "json", ArgPos: 1,
+		ifaceWhitelist: []string{"encoding/json.Unmarshaler", "encoding.TextUnmarshaler"},
+	},
+	{
+		Name: "(*encoding/json.Encoder).Encode", Tag: "json", ArgPos: 0,
+		ifaceWhitelist: []string{"encoding/json.Marshaler", "encoding.TextMarshaler"},
+	},
+	{
+		Name: "(*encoding/json.Decoder).Decode", Tag: "json", ArgPos: 0,
+		ifaceWhitelist: []string{"encoding/json.Unmarshaler", "encoding.TextUnmarshaler"},
+	},
+
+	// https://pkg.go.dev/encoding/xml
+	{
+		Name: "encoding/xml.Marshal", Tag: "xml", ArgPos: 0,
+		ifaceWhitelist: []string{"encoding/xml.Marshaler", "encoding.TextMarshaler"},
+	},
+	{
+		Name: "encoding/xml.MarshalIndent", Tag: "xml", ArgPos: 0,
+		ifaceWhitelist: []string{"encoding/xml.Marshaler", "encoding.TextMarshaler"},
+	},
+	{
+		Name: "encoding/xml.Unmarshal", Tag: "xml", ArgPos: 1,
+		ifaceWhitelist: []string{"encoding/xml.Unmarshaler", "encoding.TextUnmarshaler"},
+	},
+	{
+		Name: "(*encoding/xml.Encoder).Encode", Tag: "xml", ArgPos: 0,
+		ifaceWhitelist: []string{"encoding/xml.Marshaler", "encoding.TextMarshaler"},
+	},
+	{
+		Name: "(*encoding/xml.Decoder).Decode", Tag: "xml", ArgPos: 0,
+		ifaceWhitelist: []string{"encoding/xml.Unmarshaler", "encoding.TextUnmarshaler"},
+	},
+	{
+		Name: "(*encoding/xml.Encoder).EncodeElement", Tag: "xml", ArgPos: 0,
+		ifaceWhitelist: []string{"encoding/xml.Marshaler", "encoding.TextMarshaler"},
+	},
+	{
+		Name: "(*encoding/xml.Decoder).DecodeElement", Tag: "xml", ArgPos: 0,
+		ifaceWhitelist: []string{"encoding/xml.Unmarshaler", "encoding.TextUnmarshaler"},
+	},
+
+	// https://pkg.go.dev/gopkg.in/yaml.v3
+	{
+		Name: "gopkg.in/yaml.v3.Marshal", Tag: "yaml", ArgPos: 0,
+		ifaceWhitelist: []string{"gopkg.in/yaml.v3.Marshaler"},
+	},
+	{
+		Name: "gopkg.in/yaml.v3.Unmarshal", Tag: "yaml", ArgPos: 1,
+		ifaceWhitelist: []string{"gopkg.in/yaml.v3.Unmarshaler"},
+	},
+	{
+		Name: "(*gopkg.in/yaml.v3.Encoder).Encode", Tag: "yaml", ArgPos: 0,
+		ifaceWhitelist: []string{"gopkg.in/yaml.v3.Marshaler"},
+	},
+	{
+		Name: "(*gopkg.in/yaml.v3.Decoder).Decode", Tag: "yaml", ArgPos: 0,
+		ifaceWhitelist: []string{"gopkg.in/yaml.v3.Unmarshaler"},
+	},
+
+	// https://pkg.go.dev/github.com/BurntSushi/toml
+	{
+		Name: "github.com/BurntSushi/toml.Unmarshal", Tag: "toml", ArgPos: 1,
+		ifaceWhitelist: []string{"github.com/BurntSushi/toml.Unmarshaler", "encoding.TextUnmarshaler"},
+	},
+	{
+		Name: "github.com/BurntSushi/toml.Decode", Tag: "toml", ArgPos: 1,
+		ifaceWhitelist: []string{"github.com/BurntSushi/toml.Unmarshaler", "encoding.TextUnmarshaler"},
+	},
+	{
+		Name: "github.com/BurntSushi/toml.DecodeFS", Tag: "toml", ArgPos: 2,
+		ifaceWhitelist: []string{"github.com/BurntSushi/toml.Unmarshaler", "encoding.TextUnmarshaler"},
+	},
+	{
+		Name: "github.com/BurntSushi/toml.DecodeFile", Tag: "toml", ArgPos: 1,
+		ifaceWhitelist: []string{"github.com/BurntSushi/toml.Unmarshaler", "encoding.TextUnmarshaler"},
+	},
+	{
+		Name: "(*github.com/BurntSushi/toml.Encoder).Encode", Tag: "toml", ArgPos: 0,
+		ifaceWhitelist: []string{"encoding.TextMarshaler"},
+	},
+	{
+		Name: "(*github.com/BurntSushi/toml.Decoder).Decode", Tag: "toml", ArgPos: 0,
+		ifaceWhitelist: []string{"github.com/BurntSushi/toml.Unmarshaler", "encoding.TextUnmarshaler"},
+	},
+
+	// https://pkg.go.dev/github.com/mitchellh/mapstructure
+	{Name: "github.com/mitchellh/mapstructure.Decode", Tag: "mapstructure", ArgPos: 1},
+	{Name: "github.com/mitchellh/mapstructure.DecodeMetadata", Tag: "mapstructure", ArgPos: 1},
+	{Name: "github.com/mitchellh/mapstructure.WeakDecode", Tag: "mapstructure", ArgPos: 1},
+	{Name: "github.com/mitchellh/mapstructure.WeakDecodeMetadata", Tag: "mapstructure", ArgPos: 1},
+
+	// https://pkg.go.dev/github.com/jmoiron/sqlx
+	{Name: "github.com/jmoiron/sqlx.Get", Tag: "db", ArgPos: 1},
+	{Name: "github.com/jmoiron/sqlx.GetContext", Tag: "db", ArgPos: 2},
+	{Name: "github.com/jmoiron/sqlx.Select", Tag: "db", ArgPos: 1},
+	{Name: "github.com/jmoiron/sqlx.SelectContext", Tag: "db", ArgPos: 2},
+	{Name: "github.com/jmoiron/sqlx.StructScan", Tag: "db", ArgPos: 1},
+	{Name: "(*github.com/jmoiron/sqlx.Conn).GetContext", Tag: "db", ArgPos: 1},
+	{Name: "(*github.com/jmoiron/sqlx.Conn).SelectContext", Tag: "db", ArgPos: 1},
+	{Name: "(*github.com/jmoiron/sqlx.DB).Get", Tag: "db", ArgPos: 0},
+	{Name: "(*github.com/jmoiron/sqlx.DB).GetContext", Tag: "db", ArgPos: 1},
+	{Name: "(*github.com/jmoiron/sqlx.DB).Select", Tag: "db", ArgPos: 0},
+	{Name: "(*github.com/jmoiron/sqlx.DB).SelectContext", Tag: "db", ArgPos: 1},
+	{Name: "(*github.com/jmoiron/sqlx.NamedStmt).Get", Tag: "db", ArgPos: 0},
+	{Name: "(*github.com/jmoiron/sqlx.NamedStmt).GetContext", Tag: "db", ArgPos: 1},
+	{Name: "(*github.com/jmoiron/sqlx.NamedStmt).Select", Tag: "db", ArgPos: 0},
+	{Name: "(*github.com/jmoiron/sqlx.NamedStmt).SelectContext", Tag: "db", ArgPos: 1},
+	{Name: "(*github.com/jmoiron/sqlx.Row).StructScan", Tag: "db", ArgPos: 0},
+	{Name: "(*github.com/jmoiron/sqlx.Rows).StructScan", Tag: "db", ArgPos: 0},
+	{Name: "(*github.com/jmoiron/sqlx.Stmt).Get", Tag: "db", ArgPos: 0},
+	{Name: "(*github.com/jmoiron/sqlx.Stmt).GetContext", Tag: "db", ArgPos: 1},
+	{Name: "(*github.com/jmoiron/sqlx.Stmt).Select", Tag: "db", ArgPos: 0},
+	{Name: "(*github.com/jmoiron/sqlx.Stmt).SelectContext", Tag: "db", ArgPos: 1},
+	{Name: "(*github.com/jmoiron/sqlx.Tx).Get", Tag: "db", ArgPos: 0},
+	{Name: "(*github.com/jmoiron/sqlx.Tx).GetContext", Tag: "db", ArgPos: 1},
+	{Name: "(*github.com/jmoiron/sqlx.Tx).Select", Tag: "db", ArgPos: 0},
+	{Name: "(*github.com/jmoiron/sqlx.Tx).SelectContext", Tag: "db", ArgPos: 1},
+}
diff --git a/vendor/go-simpler.org/musttag/musttag.go b/vendor/go-simpler.org/musttag/musttag.go
new file mode 100644
index 0000000000000000000000000000000000000000..70c84201b009ae70325bcee66dc85f678fad99b0
--- /dev/null
+++ b/vendor/go-simpler.org/musttag/musttag.go
@@ -0,0 +1,282 @@
+// Package musttag implements the musttag analyzer.
+package musttag
+
+import (
+	"flag"
+	"fmt"
+	"go/ast"
+	"go/types"
+	"reflect"
+	"strconv"
+	"strings"
+
+	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/go/analysis/passes/inspect"
+	"golang.org/x/tools/go/ast/inspector"
+	"golang.org/x/tools/go/types/typeutil"
+)
+
+// Func describes a function call to look for, e.g. [json.Marshal].
+type Func struct {
+	Name   string // The full name of the function, including the package.
+	Tag    string // The struct tag whose presence should be ensured.
+	ArgPos int    // The position of the argument to check.
+
+	// a list of interface names (including the package);
+	// if at least one is implemented by the argument, no check is performed.
+	ifaceWhitelist []string
+}
+
+// New creates a new musttag analyzer.
+// To report a custom function, provide its description as [Func].
+func New(funcs ...Func) *analysis.Analyzer {
+	var flagFuncs []Func
+	return &analysis.Analyzer{
+		Name:     "musttag",
+		Doc:      "enforce field tags in (un)marshaled structs",
+		Flags:    flags(&flagFuncs),
+		Requires: []*analysis.Analyzer{inspect.Analyzer},
+		Run: func(pass *analysis.Pass) (any, error) {
+			l := len(builtins) + len(funcs) + len(flagFuncs)
+			allFuncs := make(map[string]Func, l)
+
+			merge := func(slice []Func) {
+				for _, fn := range slice {
+					allFuncs[fn.Name] = fn
+				}
+			}
+			merge(builtins)
+			merge(funcs)
+			merge(flagFuncs)
+
+			mainModule, err := getMainModule()
+			if err != nil {
+				return nil, err
+			}
+
+			return run(pass, mainModule, allFuncs)
+		},
+	}
+}
+
+func flags(funcs *[]Func) flag.FlagSet {
+	fs := flag.NewFlagSet("musttag", flag.ContinueOnError)
+	fs.Func("fn", "report a custom function (name:tag:arg-pos)", func(s string) error {
+		parts := strings.Split(s, ":")
+		if len(parts) != 3 || parts[0] == "" || parts[1] == "" {
+			return strconv.ErrSyntax
+		}
+		pos, err := strconv.Atoi(parts[2])
+		if err != nil {
+			return err
+		}
+		*funcs = append(*funcs, Func{
+			Name:   parts[0],
+			Tag:    parts[1],
+			ArgPos: pos,
+		})
+		return nil
+	})
+	return *fs
+}
+
+func run(pass *analysis.Pass, mainModule string, funcs map[string]Func) (_ any, err error) {
+	visit := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+	filter := []ast.Node{(*ast.CallExpr)(nil)}
+
+	visit.Preorder(filter, func(node ast.Node) {
+		if err != nil {
+			return // there is already an error.
+		}
+
+		call, ok := node.(*ast.CallExpr)
+		if !ok {
+			return // not a function call.
+		}
+
+		callee := typeutil.StaticCallee(pass.TypesInfo, call)
+		if callee == nil {
+			return // not a static call.
+		}
+
+		fn, ok := funcs[cutVendor(callee.FullName())]
+		if !ok {
+			return // unsupported function.
+		}
+
+		if len(call.Args) <= fn.ArgPos {
+			err = fmt.Errorf("musttag: Func.ArgPos cannot be %d: %s accepts only %d argument(s)", fn.ArgPos, fn.Name, len(call.Args))
+			return
+		}
+
+		arg := call.Args[fn.ArgPos]
+		if ident, ok := arg.(*ast.Ident); ok && ident.Obj == nil {
+			return // e.g. json.Marshal(nil)
+		}
+
+		typ := pass.TypesInfo.TypeOf(arg)
+		if typ == nil {
+			return // no type info found.
+		}
+
+		checker := checker{
+			mainModule:     mainModule,
+			seenTypes:      make(map[string]struct{}),
+			ifaceWhitelist: fn.ifaceWhitelist,
+			imports:        pass.Pkg.Imports(),
+		}
+
+		if valid := checker.checkType(typ, fn.Tag); valid {
+			return // nothing to report.
+		}
+
+		pass.Reportf(arg.Pos(), "the given struct should be annotated with the `%s` tag", fn.Tag)
+	})
+
+	return nil, err
+}
+
+type checker struct {
+	mainModule     string
+	seenTypes      map[string]struct{}
+	ifaceWhitelist []string
+	imports        []*types.Package
+}
+
+func (c *checker) checkType(typ types.Type, tag string) bool {
+	if _, ok := c.seenTypes[typ.String()]; ok {
+		return true // already checked.
+	}
+	c.seenTypes[typ.String()] = struct{}{}
+
+	styp, ok := c.parseStruct(typ)
+	if !ok {
+		return true // not a struct.
+	}
+
+	return c.checkStruct(styp, tag)
+}
+
+// recursively unwrap a type until we get to an underlying
+// raw struct type that should have its fields checked
+//
+//	SomeStruct -> struct{SomeStructField: ... }
+//	[]*SomeStruct -> struct{SomeStructField: ... }
+//	...
+//
+// exits early if it hits a type that implements a whitelisted interface
+func (c *checker) parseStruct(typ types.Type) (*types.Struct, bool) {
+	if implementsInterface(typ, c.ifaceWhitelist, c.imports) {
+		return nil, false // the type implements a Marshaler interface; see issue #64.
+	}
+
+	switch typ := typ.(type) {
+	case *types.Pointer:
+		return c.parseStruct(typ.Elem())
+
+	case *types.Array:
+		return c.parseStruct(typ.Elem())
+
+	case *types.Slice:
+		return c.parseStruct(typ.Elem())
+
+	case *types.Map:
+		return c.parseStruct(typ.Elem())
+
+	case *types.Named: // a struct of the named type.
+		pkg := typ.Obj().Pkg()
+		if pkg == nil {
+			return nil, false
+		}
+		if !strings.HasPrefix(pkg.Path(), c.mainModule) {
+			return nil, false
+		}
+		styp, ok := typ.Underlying().(*types.Struct)
+		if !ok {
+			return nil, false
+		}
+		return styp, true
+
+	case *types.Struct: // an anonymous struct.
+		return typ, true
+
+	default:
+		return nil, false
+	}
+}
+
+func (c *checker) checkStruct(styp *types.Struct, tag string) (valid bool) {
+	for i := 0; i < styp.NumFields(); i++ {
+		field := styp.Field(i)
+		if !field.Exported() {
+			continue
+		}
+
+		tagValue, ok := reflect.StructTag(styp.Tag(i)).Lookup(tag)
+		if !ok {
+			// tag is not required for embedded types; see issue #12.
+			if !field.Embedded() {
+				return false
+			}
+		}
+
+		// Do not recurse into ignored fields.
+		if tagValue == "-" {
+			continue
+		}
+
+		if valid := c.checkType(field.Type(), tag); !valid {
+			return false
+		}
+	}
+
+	return true
+}
+
+func implementsInterface(typ types.Type, ifaces []string, imports []*types.Package) bool {
+	findScope := func(pkgName string) (*types.Scope, bool) {
+		// fast path: check direct imports (e.g. looking for "encoding/json.Marshaler").
+		for _, direct := range imports {
+			if pkgName == cutVendor(direct.Path()) {
+				return direct.Scope(), true
+			}
+		}
+		// slow path: check indirect imports (e.g. looking for "encoding.TextMarshaler").
+		// TODO: only check indirect imports from the package (e.g. "encoding/json") of the analyzed function (e.g. "encoding/json.Marshal").
+		for _, direct := range imports {
+			for _, indirect := range direct.Imports() {
+				if pkgName == cutVendor(indirect.Path()) {
+					return indirect.Scope(), true
+				}
+			}
+		}
+		return nil, false
+	}
+
+	for _, ifacePath := range ifaces {
+		// "encoding/json.Marshaler" -> "encoding/json" + "Marshaler"
+		idx := strings.LastIndex(ifacePath, ".")
+		if idx == -1 {
+			continue
+		}
+		pkgName, ifaceName := ifacePath[:idx], ifacePath[idx+1:]
+
+		scope, ok := findScope(pkgName)
+		if !ok {
+			continue
+		}
+		obj := scope.Lookup(ifaceName)
+		if obj == nil {
+			continue
+		}
+		iface, ok := obj.Type().Underlying().(*types.Interface)
+		if !ok {
+			continue
+		}
+		if types.Implements(typ, iface) || types.Implements(types.NewPointer(typ), iface) {
+			return true
+		}
+	}
+
+	return false
+}
diff --git a/vendor/go-simpler.org/musttag/utils.go b/vendor/go-simpler.org/musttag/utils.go
new file mode 100644
index 0000000000000000000000000000000000000000..1a13f96c2830e93deaa76c4fabab08600ec8b093
--- /dev/null
+++ b/vendor/go-simpler.org/musttag/utils.go
@@ -0,0 +1,49 @@
+package musttag
+
+import (
+	"encoding/json"
+	"fmt"
+	"os/exec"
+	"strings"
+)
+
+func getMainModule() (string, error) {
+	args := [...]string{"go", "mod", "edit", "-json"}
+
+	out, err := exec.Command(args[0], args[1:]...).Output()
+	if err != nil {
+		return "", fmt.Errorf("running %q: %w", strings.Join(args[:], " "), err)
+	}
+
+	var info struct {
+		Module struct {
+			Path string `json:"Path"`
+		} `json:"Module"`
+	}
+	if err := json.Unmarshal(out, &info); err != nil {
+		return "", fmt.Errorf("decoding module info: %w\n%s", err, out)
+	}
+
+	return info.Module.Path, nil
+}
+
+// based on golang.org/x/tools/imports.VendorlessPath
+func cutVendor(path string) string {
+	var prefix string
+
+	switch {
+	case strings.HasPrefix(path, "(*"):
+		prefix, path = "(*", path[len("(*"):]
+	case strings.HasPrefix(path, "("):
+		prefix, path = "(", path[len("("):]
+	}
+
+	if i := strings.LastIndex(path, "/vendor/"); i >= 0 {
+		return prefix + path[i+len("/vendor/"):]
+	}
+	if strings.HasPrefix(path, "vendor/") {
+		return prefix + path[len("vendor/"):]
+	}
+
+	return prefix + path
+}
diff --git a/vendor/go-simpler.org/sloglint/.golangci.yml b/vendor/go-simpler.org/sloglint/.golangci.yml
new file mode 100644
index 0000000000000000000000000000000000000000..ef926a0562d20196a16536daf776784a8377cfce
--- /dev/null
+++ b/vendor/go-simpler.org/sloglint/.golangci.yml
@@ -0,0 +1,22 @@
+linters:
+  disable-all: true
+  enable:
+    # enabled by default:
+    - errcheck
+    - gosimple
+    - govet
+    - ineffassign
+    - staticcheck
+    - unused
+    # disabled by default:
+    - gocritic
+    - gofumpt
+
+linters-settings:
+  gocritic:
+    enabled-tags:
+      - diagnostic
+      - style
+      - performance
+      - experimental
+      - opinionated
diff --git a/vendor/go-simpler.org/sloglint/.goreleaser.yml b/vendor/go-simpler.org/sloglint/.goreleaser.yml
new file mode 100644
index 0000000000000000000000000000000000000000..d31ea11d3901e948fa68129f0322a8f12dbde427
--- /dev/null
+++ b/vendor/go-simpler.org/sloglint/.goreleaser.yml
@@ -0,0 +1,18 @@
+builds:
+  - main: ./cmd/sloglint
+    env:
+      - CGO_ENABLED=0
+    flags:
+      - -trimpath
+    ldflags:
+      - -s -w -X main.version={{.Version}}
+    targets:
+      - darwin_amd64
+      - darwin_arm64
+      - linux_amd64
+      - windows_amd64
+
+archives:
+  - format_overrides:
+      - goos: windows
+        format: zip
diff --git a/vendor/go-simpler.org/sloglint/LICENSE b/vendor/go-simpler.org/sloglint/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..a612ad9813b006ce81d1ee438dd784da99a54007
--- /dev/null
+++ b/vendor/go-simpler.org/sloglint/LICENSE
@@ -0,0 +1,373 @@
+Mozilla Public License Version 2.0
+==================================
+
+1. Definitions
+--------------
+
+1.1. "Contributor"
+    means each individual or legal entity that creates, contributes to
+    the creation of, or owns Covered Software.
+
+1.2. "Contributor Version"
+    means the combination of the Contributions of others (if any) used
+    by a Contributor and that particular Contributor's Contribution.
+
+1.3. "Contribution"
+    means Covered Software of a particular Contributor.
+
+1.4. "Covered Software"
+    means Source Code Form to which the initial Contributor has attached
+    the notice in Exhibit A, the Executable Form of such Source Code
+    Form, and Modifications of such Source Code Form, in each case
+    including portions thereof.
+
+1.5. "Incompatible With Secondary Licenses"
+    means
+
+    (a) that the initial Contributor has attached the notice described
+        in Exhibit B to the Covered Software; or
+
+    (b) that the Covered Software was made available under the terms of
+        version 1.1 or earlier of the License, but not also under the
+        terms of a Secondary License.
+
+1.6. "Executable Form"
+    means any form of the work other than Source Code Form.
+
+1.7. "Larger Work"
+    means a work that combines Covered Software with other material, in
+    a separate file or files, that is not Covered Software.
+
+1.8. "License"
+    means this document.
+
+1.9. "Licensable"
+    means having the right to grant, to the maximum extent possible,
+    whether at the time of the initial grant or subsequently, any and
+    all of the rights conveyed by this License.
+
+1.10. "Modifications"
+    means any of the following:
+
+    (a) any file in Source Code Form that results from an addition to,
+        deletion from, or modification of the contents of Covered
+        Software; or
+
+    (b) any new file in Source Code Form that contains any Covered
+        Software.
+
+1.11. "Patent Claims" of a Contributor
+    means any patent claim(s), including without limitation, method,
+    process, and apparatus claims, in any patent Licensable by such
+    Contributor that would be infringed, but for the grant of the
+    License, by the making, using, selling, offering for sale, having
+    made, import, or transfer of either its Contributions or its
+    Contributor Version.
+
+1.12. "Secondary License"
+    means either the GNU General Public License, Version 2.0, the GNU
+    Lesser General Public License, Version 2.1, the GNU Affero General
+    Public License, Version 3.0, or any later versions of those
+    licenses.
+
+1.13. "Source Code Form"
+    means the form of the work preferred for making modifications.
+
+1.14. "You" (or "Your")
+    means an individual or a legal entity exercising rights under this
+    License. For legal entities, "You" includes any entity that
+    controls, is controlled by, or is under common control with You. For
+    purposes of this definition, "control" means (a) the power, direct
+    or indirect, to cause the direction or management of such entity,
+    whether by contract or otherwise, or (b) ownership of more than
+    fifty percent (50%) of the outstanding shares or beneficial
+    ownership of such entity.
+
+2. License Grants and Conditions
+--------------------------------
+
+2.1. Grants
+
+Each Contributor hereby grants You a world-wide, royalty-free,
+non-exclusive license:
+
+(a) under intellectual property rights (other than patent or trademark)
+    Licensable by such Contributor to use, reproduce, make available,
+    modify, display, perform, distribute, and otherwise exploit its
+    Contributions, either on an unmodified basis, with Modifications, or
+    as part of a Larger Work; and
+
+(b) under Patent Claims of such Contributor to make, use, sell, offer
+    for sale, have made, import, and otherwise transfer either its
+    Contributions or its Contributor Version.
+
+2.2. Effective Date
+
+The licenses granted in Section 2.1 with respect to any Contribution
+become effective for each Contribution on the date the Contributor first
+distributes such Contribution.
+
+2.3. Limitations on Grant Scope
+
+The licenses granted in this Section 2 are the only rights granted under
+this License. No additional rights or licenses will be implied from the
+distribution or licensing of Covered Software under this License.
+Notwithstanding Section 2.1(b) above, no patent license is granted by a
+Contributor:
+
+(a) for any code that a Contributor has removed from Covered Software;
+    or
+
+(b) for infringements caused by: (i) Your and any other third party's
+    modifications of Covered Software, or (ii) the combination of its
+    Contributions with other software (except as part of its Contributor
+    Version); or
+
+(c) under Patent Claims infringed by Covered Software in the absence of
+    its Contributions.
+
+This License does not grant any rights in the trademarks, service marks,
+or logos of any Contributor (except as may be necessary to comply with
+the notice requirements in Section 3.4).
+
+2.4. Subsequent Licenses
+
+No Contributor makes additional grants as a result of Your choice to
+distribute the Covered Software under a subsequent version of this
+License (see Section 10.2) or under the terms of a Secondary License (if
+permitted under the terms of Section 3.3).
+
+2.5. Representation
+
+Each Contributor represents that the Contributor believes its
+Contributions are its original creation(s) or it has sufficient rights
+to grant the rights to its Contributions conveyed by this License.
+
+2.6. Fair Use
+
+This License is not intended to limit any rights You have under
+applicable copyright doctrines of fair use, fair dealing, or other
+equivalents.
+
+2.7. Conditions
+
+Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
+in Section 2.1.
+
+3. Responsibilities
+-------------------
+
+3.1. Distribution of Source Form
+
+All distribution of Covered Software in Source Code Form, including any
+Modifications that You create or to which You contribute, must be under
+the terms of this License. You must inform recipients that the Source
+Code Form of the Covered Software is governed by the terms of this
+License, and how they can obtain a copy of this License. You may not
+attempt to alter or restrict the recipients' rights in the Source Code
+Form.
+
+3.2. Distribution of Executable Form
+
+If You distribute Covered Software in Executable Form then:
+
+(a) such Covered Software must also be made available in Source Code
+    Form, as described in Section 3.1, and You must inform recipients of
+    the Executable Form how they can obtain a copy of such Source Code
+    Form by reasonable means in a timely manner, at a charge no more
+    than the cost of distribution to the recipient; and
+
+(b) You may distribute such Executable Form under the terms of this
+    License, or sublicense it under different terms, provided that the
+    license for the Executable Form does not attempt to limit or alter
+    the recipients' rights in the Source Code Form under this License.
+
+3.3. Distribution of a Larger Work
+
+You may create and distribute a Larger Work under terms of Your choice,
+provided that You also comply with the requirements of this License for
+the Covered Software. If the Larger Work is a combination of Covered
+Software with a work governed by one or more Secondary Licenses, and the
+Covered Software is not Incompatible With Secondary Licenses, this
+License permits You to additionally distribute such Covered Software
+under the terms of such Secondary License(s), so that the recipient of
+the Larger Work may, at their option, further distribute the Covered
+Software under the terms of either this License or such Secondary
+License(s).
+
+3.4. Notices
+
+You may not remove or alter the substance of any license notices
+(including copyright notices, patent notices, disclaimers of warranty,
+or limitations of liability) contained within the Source Code Form of
+the Covered Software, except that You may alter any license notices to
+the extent required to remedy known factual inaccuracies.
+
+3.5. Application of Additional Terms
+
+You may choose to offer, and to charge a fee for, warranty, support,
+indemnity or liability obligations to one or more recipients of Covered
+Software. However, You may do so only on Your own behalf, and not on
+behalf of any Contributor. You must make it absolutely clear that any
+such warranty, support, indemnity, or liability obligation is offered by
+You alone, and You hereby agree to indemnify every Contributor for any
+liability incurred by such Contributor as a result of warranty, support,
+indemnity or liability terms You offer. You may include additional
+disclaimers of warranty and limitations of liability specific to any
+jurisdiction.
+
+4. Inability to Comply Due to Statute or Regulation
+---------------------------------------------------
+
+If it is impossible for You to comply with any of the terms of this
+License with respect to some or all of the Covered Software due to
+statute, judicial order, or regulation then You must: (a) comply with
+the terms of this License to the maximum extent possible; and (b)
+describe the limitations and the code they affect. Such description must
+be placed in a text file included with all distributions of the Covered
+Software under this License. Except to the extent prohibited by statute
+or regulation, such description must be sufficiently detailed for a
+recipient of ordinary skill to be able to understand it.
+
+5. Termination
+--------------
+
+5.1. The rights granted under this License will terminate automatically
+if You fail to comply with any of its terms. However, if You become
+compliant, then the rights granted under this License from a particular
+Contributor are reinstated (a) provisionally, unless and until such
+Contributor explicitly and finally terminates Your grants, and (b) on an
+ongoing basis, if such Contributor fails to notify You of the
+non-compliance by some reasonable means prior to 60 days after You have
+come back into compliance. Moreover, Your grants from a particular
+Contributor are reinstated on an ongoing basis if such Contributor
+notifies You of the non-compliance by some reasonable means, this is the
+first time You have received notice of non-compliance with this License
+from such Contributor, and You become compliant prior to 30 days after
+Your receipt of the notice.
+
+5.2. If You initiate litigation against any entity by asserting a patent
+infringement claim (excluding declaratory judgment actions,
+counter-claims, and cross-claims) alleging that a Contributor Version
+directly or indirectly infringes any patent, then the rights granted to
+You by any and all Contributors for the Covered Software under Section
+2.1 of this License shall terminate.
+
+5.3. In the event of termination under Sections 5.1 or 5.2 above, all
+end user license agreements (excluding distributors and resellers) which
+have been validly granted by You or Your distributors under this License
+prior to termination shall survive termination.
+
+************************************************************************
+*                                                                      *
+*  6. Disclaimer of Warranty                                           *
+*  -------------------------                                           *
+*                                                                      *
+*  Covered Software is provided under this License on an "as is"       *
+*  basis, without warranty of any kind, either expressed, implied, or  *
+*  statutory, including, without limitation, warranties that the       *
+*  Covered Software is free of defects, merchantable, fit for a        *
+*  particular purpose or non-infringing. The entire risk as to the     *
+*  quality and performance of the Covered Software is with You.        *
+*  Should any Covered Software prove defective in any respect, You     *
+*  (not any Contributor) assume the cost of any necessary servicing,   *
+*  repair, or correction. This disclaimer of warranty constitutes an   *
+*  essential part of this License. No use of any Covered Software is   *
+*  authorized under this License except under this disclaimer.         *
+*                                                                      *
+************************************************************************
+
+************************************************************************
+*                                                                      *
+*  7. Limitation of Liability                                          *
+*  --------------------------                                          *
+*                                                                      *
+*  Under no circumstances and under no legal theory, whether tort      *
+*  (including negligence), contract, or otherwise, shall any           *
+*  Contributor, or anyone who distributes Covered Software as          *
+*  permitted above, be liable to You for any direct, indirect,         *
+*  special, incidental, or consequential damages of any character      *
+*  including, without limitation, damages for lost profits, loss of    *
+*  goodwill, work stoppage, computer failure or malfunction, or any    *
+*  and all other commercial damages or losses, even if such party      *
+*  shall have been informed of the possibility of such damages. This   *
+*  limitation of liability shall not apply to liability for death or   *
+*  personal injury resulting from such party's negligence to the       *
+*  extent applicable law prohibits such limitation. Some               *
+*  jurisdictions do not allow the exclusion or limitation of           *
+*  incidental or consequential damages, so this exclusion and          *
+*  limitation may not apply to You.                                    *
+*                                                                      *
+************************************************************************
+
+8. Litigation
+-------------
+
+Any litigation relating to this License may be brought only in the
+courts of a jurisdiction where the defendant maintains its principal
+place of business and such litigation shall be governed by laws of that
+jurisdiction, without reference to its conflict-of-law provisions.
+Nothing in this Section shall prevent a party's ability to bring
+cross-claims or counter-claims.
+
+9. Miscellaneous
+----------------
+
+This License represents the complete agreement concerning the subject
+matter hereof. If any provision of this License is held to be
+unenforceable, such provision shall be reformed only to the extent
+necessary to make it enforceable. Any law or regulation which provides
+that the language of a contract shall be construed against the drafter
+shall not be used to construe this License against a Contributor.
+
+10. Versions of the License
+---------------------------
+
+10.1. New Versions
+
+Mozilla Foundation is the license steward. Except as provided in Section
+10.3, no one other than the license steward has the right to modify or
+publish new versions of this License. Each version will be given a
+distinguishing version number.
+
+10.2. Effect of New Versions
+
+You may distribute the Covered Software under the terms of the version
+of the License under which You originally received the Covered Software,
+or under the terms of any subsequent version published by the license
+steward.
+
+10.3. Modified Versions
+
+If you create software not governed by this License, and you want to
+create a new license for such software, you may create and use a
+modified version of this License if you rename the license and remove
+any references to the name of the license steward (except to note that
+such modified license differs from this License).
+
+10.4. Distributing Source Code Form that is Incompatible With Secondary
+Licenses
+
+If You choose to distribute Source Code Form that is Incompatible With
+Secondary Licenses under the terms of this version of the License, the
+notice described in Exhibit B of this License must be attached.
+
+Exhibit A - Source Code Form License Notice
+-------------------------------------------
+
+  This Source Code Form is subject to the terms of the Mozilla Public
+  License, v. 2.0. If a copy of the MPL was not distributed with this
+  file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+If it is not possible or desirable to put the notice in a particular
+file, then You may include the notice in a location (such as a LICENSE
+file in a relevant directory) where a recipient would be likely to look
+for such a notice.
+
+You may add additional accurate notices of copyright ownership.
+
+Exhibit B - "Incompatible With Secondary Licenses" Notice
+---------------------------------------------------------
+
+  This Source Code Form is "Incompatible With Secondary Licenses", as
+  defined by the Mozilla Public License, v. 2.0.
diff --git a/vendor/go-simpler.org/sloglint/Makefile b/vendor/go-simpler.org/sloglint/Makefile
new file mode 100644
index 0000000000000000000000000000000000000000..6165b16f474e1794ab7a84098c13d7b237add564
--- /dev/null
+++ b/vendor/go-simpler.org/sloglint/Makefile
@@ -0,0 +1,28 @@
+.POSIX:
+.SUFFIXES:
+
+all: test lint
+
+test:
+	go test -race -shuffle=on -cover ./...
+
+test/cover:
+	go test -race -shuffle=on -coverprofile=coverage.out ./...
+	go tool cover -html=coverage.out
+
+lint:
+	golangci-lint run
+
+tidy:
+	go mod tidy
+
+generate:
+	go generate ./...
+
+# run `make pre-commit` once to install the hook.
+pre-commit: .git/hooks/pre-commit test lint tidy generate
+	git diff --exit-code
+
+.git/hooks/pre-commit:
+	echo "make pre-commit" > .git/hooks/pre-commit
+	chmod +x .git/hooks/pre-commit
diff --git a/vendor/go-simpler.org/sloglint/README.md b/vendor/go-simpler.org/sloglint/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..c080c2bea797d3ac279ea9d0195e537b102ac270
--- /dev/null
+++ b/vendor/go-simpler.org/sloglint/README.md
@@ -0,0 +1,173 @@
+# sloglint
+
+[![checks](https://github.com/go-simpler/sloglint/actions/workflows/checks.yml/badge.svg)](https://github.com/go-simpler/sloglint/actions/workflows/checks.yml)
+[![pkg.go.dev](https://pkg.go.dev/badge/go-simpler.org/sloglint.svg)](https://pkg.go.dev/go-simpler.org/sloglint)
+[![goreportcard](https://goreportcard.com/badge/go-simpler.org/sloglint)](https://goreportcard.com/report/go-simpler.org/sloglint)
+[![codecov](https://codecov.io/gh/go-simpler/sloglint/branch/main/graph/badge.svg)](https://codecov.io/gh/go-simpler/sloglint)
+
+A Go linter that ensures consistent code style when using `log/slog`.
+
+## 📌 About
+
+The `log/slog` API allows two different types of arguments: key-value pairs and attributes.
+While people may have different opinions about which one is better, most seem to agree on one thing: it should be consistent.
+With `sloglint` you can enforce various rules for `log/slog` based on your preferred code style.
+
+## 🚀 Features
+
+* Enforce not mixing key-value pairs and attributes (default)
+* Enforce using either key-value pairs only or attributes only (optional)
+* Enforce not using global loggers (optional)
+* Enforce using methods that accept a context (optional)
+* Enforce using static log messages (optional)
+* Enforce using constants instead of raw keys (optional)
+* Enforce a single key naming convention (optional)
+* Enforce putting arguments on separate lines (optional)
+
+## 📦 Install
+
+`sloglint` is integrated into [`golangci-lint`][1], and this is the recommended way to use it.
+
+To enable the linter, add the following lines to `.golangci.yml`:
+
+```yaml
+linters:
+  enable:
+    - sloglint
+```
+
+Alternatively, you can download a prebuilt binary from the [Releases][2] page to use `sloglint` standalone.
+
+## 📋 Usage
+
+Run `golangci-lint` with `sloglint` enabled.
+See the list of [available options][3] to configure the linter.
+
+When using `sloglint` standalone, pass the options as flags of the same name.
+
+### No mixed arguments
+
+The `no-mixed-args` option causes `sloglint` to report mixing key-values pairs and attributes within a single function call:
+
+```go
+slog.Info("a user has logged in", "user_id", 42, slog.String("ip_address", "192.0.2.0")) // sloglint: key-value pairs and attributes should not be mixed
+```
+
+It is enabled by default.
+
+### Key-value pairs only
+
+The `kv-only` option causes `sloglint` to report any use of attributes:
+
+```go
+slog.Info("a user has logged in", slog.Int("user_id", 42)) // sloglint: attributes should not be used
+```
+
+### Attributes only
+
+In contrast, the `attr-only` option causes `sloglint` to report any use of key-value pairs:
+
+```go
+slog.Info("a user has logged in", "user_id", 42) // sloglint: key-value pairs should not be used
+```
+
+### No global
+
+Some projects prefer to pass loggers as explicit dependencies.
+The `no-global` option causes `sloglint` to report the usage of global loggers.
+
+```go
+slog.Info("a user has logged in", "user_id", 42) // sloglint: global logger should not be used
+```
+
+Possible values are `all` (report all global loggers) and `default` (report only the default `slog` logger).
+
+### Context only
+
+Some `slog.Handler` implementations make use of the given `context.Context` (e.g. to access context values).
+For them to work properly, you need to pass a context to all logger calls.
+The `context-only` option causes `sloglint` to report the use of methods without a context:
+
+```go
+slog.Info("a user has logged in") // sloglint: InfoContext should be used instead
+```
+
+Possible values are `all` (report all contextless calls) and `scope` (report only if a context exists in the scope of the outermost function).
+
+### Static messages
+
+To get the most out of structured logging, you may want to require log messages to be static.
+The `static-msg` option causes `sloglint` to report non-static messages:
+
+```go
+slog.Info(fmt.Sprintf("a user with id %d has logged in", 42)) // sloglint: message should be a string literal or a constant
+```
+
+The report can be fixed by moving dynamic values to arguments:
+
+```go
+slog.Info("a user has logged in", "user_id", 42)
+```
+
+### No raw keys
+
+To prevent typos, you may want to forbid the use of raw keys altogether.
+The `no-raw-keys` option causes `sloglint` to report the use of strings as keys
+(including `slog.Attr` calls, e.g. `slog.Int("user_id", 42)`):
+
+```go
+slog.Info("a user has logged in", "user_id", 42) // sloglint: raw keys should not be used
+```
+
+This report can be fixed by using either constants...
+
+```go
+const UserId = "user_id"
+
+slog.Info("a user has logged in", UserId, 42)
+```
+
+...or custom `slog.Attr` constructors:
+
+```go
+func UserId(value int) slog.Attr { return slog.Int("user_id", value) }
+
+slog.Info("a user has logged in", UserId(42))
+```
+
+> [!TIP]
+> Such helpers can be automatically generated for you by the [`sloggen`][4] tool. Give it a try too!
+
+### Key naming convention
+
+To ensure consistency in logs, you may want to enforce a single key naming convention.
+The `key-naming-case` option causes `sloglint` to report keys written in a case other than the given one:
+
+```go
+slog.Info("a user has logged in", "user-id", 42) // sloglint: keys should be written in snake_case
+```
+
+Possible values are `snake`, `kebab`, `camel`, or `pascal`.
+
+### Arguments on separate lines
+
+To improve code readability, you may want to put arguments on separate lines, especially when using key-value pairs.
+The `args-on-sep-lines` option causes `sloglint` to report 2+ arguments on the same line:
+
+```go
+slog.Info("a user has logged in", "user_id", 42, "ip_address", "192.0.2.0") // sloglint: arguments should be put on separate lines
+```
+
+This report can be fixed by reformatting the code:
+
+```go
+slog.Info("a user has logged in",
+    "user_id", 42,
+    "ip_address", "192.0.2.0",
+)
+```
+
+[1]: https://golangci-lint.run
+[2]: https://github.com/go-simpler/sloglint/releases
+[3]: https://golangci-lint.run/usage/linters/#sloglint
+[4]: https://github.com/go-simpler/sloggen
diff --git a/vendor/go-simpler.org/sloglint/sloglint.go b/vendor/go-simpler.org/sloglint/sloglint.go
new file mode 100644
index 0000000000000000000000000000000000000000..d3b0176a6ed89f81539f3e42fada61b77dafe3da
--- /dev/null
+++ b/vendor/go-simpler.org/sloglint/sloglint.go
@@ -0,0 +1,440 @@
+// Package sloglint implements the sloglint analyzer.
+package sloglint
+
+import (
+	"errors"
+	"flag"
+	"fmt"
+	"go/ast"
+	"go/token"
+	"go/types"
+	"strconv"
+	"strings"
+
+	"github.com/ettle/strcase"
+	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/go/analysis/passes/inspect"
+	"golang.org/x/tools/go/ast/inspector"
+	"golang.org/x/tools/go/types/typeutil"
+)
+
+// Options are options for the sloglint analyzer.
+type Options struct {
+	NoMixedArgs    bool   // Enforce not mixing key-value pairs and attributes (default true).
+	KVOnly         bool   // Enforce using key-value pairs only (overrides NoMixedArgs, incompatible with AttrOnly).
+	AttrOnly       bool   // Enforce using attributes only (overrides NoMixedArgs, incompatible with KVOnly).
+	NoGlobal       string // Enforce not using global loggers ("all" or "default").
+	ContextOnly    string // Enforce using methods that accept a context ("all" or "scope").
+	StaticMsg      bool   // Enforce using static log messages.
+	NoRawKeys      bool   // Enforce using constants instead of raw keys.
+	KeyNamingCase  string // Enforce a single key naming convention ("snake", "kebab", "camel", or "pascal").
+	ArgsOnSepLines bool   // Enforce putting arguments on separate lines.
+}
+
+// New creates a new sloglint analyzer.
+func New(opts *Options) *analysis.Analyzer {
+	if opts == nil {
+		opts = &Options{NoMixedArgs: true}
+	}
+
+	return &analysis.Analyzer{
+		Name:     "sloglint",
+		Doc:      "ensure consistent code style when using log/slog",
+		Flags:    flags(opts),
+		Requires: []*analysis.Analyzer{inspect.Analyzer},
+		Run: func(pass *analysis.Pass) (any, error) {
+			if opts.KVOnly && opts.AttrOnly {
+				return nil, fmt.Errorf("sloglint: Options.KVOnly and Options.AttrOnly: %w", errIncompatible)
+			}
+
+			switch opts.NoGlobal {
+			case "", "all", "default":
+			default:
+				return nil, fmt.Errorf("sloglint: Options.NoGlobal=%s: %w", opts.NoGlobal, errInvalidValue)
+			}
+
+			switch opts.ContextOnly {
+			case "", "all", "scope":
+			default:
+				return nil, fmt.Errorf("sloglint: Options.ContextOnly=%s: %w", opts.ContextOnly, errInvalidValue)
+			}
+
+			switch opts.KeyNamingCase {
+			case "", snakeCase, kebabCase, camelCase, pascalCase:
+			default:
+				return nil, fmt.Errorf("sloglint: Options.KeyNamingCase=%s: %w", opts.KeyNamingCase, errInvalidValue)
+			}
+
+			run(pass, opts)
+			return nil, nil
+		},
+	}
+}
+
+var (
+	errIncompatible = errors.New("incompatible options")
+	errInvalidValue = errors.New("invalid value")
+)
+
+func flags(opts *Options) flag.FlagSet {
+	fset := flag.NewFlagSet("sloglint", flag.ContinueOnError)
+
+	boolVar := func(value *bool, name, usage string) {
+		fset.Func(name, usage, func(s string) error {
+			v, err := strconv.ParseBool(s)
+			*value = v
+			return err
+		})
+	}
+
+	strVar := func(value *string, name, usage string) {
+		fset.Func(name, usage, func(s string) error {
+			*value = s
+			return nil
+		})
+	}
+
+	boolVar(&opts.NoMixedArgs, "no-mixed-args", "enforce not mixing key-value pairs and attributes (default true)")
+	boolVar(&opts.KVOnly, "kv-only", "enforce using key-value pairs only (overrides -no-mixed-args, incompatible with -attr-only)")
+	boolVar(&opts.AttrOnly, "attr-only", "enforce using attributes only (overrides -no-mixed-args, incompatible with -kv-only)")
+	strVar(&opts.NoGlobal, "no-global", "enforce not using global loggers (all|default)")
+	strVar(&opts.ContextOnly, "context-only", "enforce using methods that accept a context (all|scope)")
+	boolVar(&opts.StaticMsg, "static-msg", "enforce using static log messages")
+	boolVar(&opts.NoRawKeys, "no-raw-keys", "enforce using constants instead of raw keys")
+	strVar(&opts.KeyNamingCase, "key-naming-case", "enforce a single key naming convention (snake|kebab|camel|pascal)")
+	boolVar(&opts.ArgsOnSepLines, "args-on-sep-lines", "enforce putting arguments on separate lines")
+
+	return *fset
+}
+
+var slogFuncs = map[string]int{ // funcName:argsPos
+	"log/slog.Log":                    3,
+	"log/slog.Debug":                  1,
+	"log/slog.Info":                   1,
+	"log/slog.Warn":                   1,
+	"log/slog.Error":                  1,
+	"log/slog.DebugContext":           2,
+	"log/slog.InfoContext":            2,
+	"log/slog.WarnContext":            2,
+	"log/slog.ErrorContext":           2,
+	"(*log/slog.Logger).Log":          3,
+	"(*log/slog.Logger).Debug":        1,
+	"(*log/slog.Logger).Info":         1,
+	"(*log/slog.Logger).Warn":         1,
+	"(*log/slog.Logger).Error":        1,
+	"(*log/slog.Logger).DebugContext": 2,
+	"(*log/slog.Logger).InfoContext":  2,
+	"(*log/slog.Logger).WarnContext":  2,
+	"(*log/slog.Logger).ErrorContext": 2,
+}
+
+var attrFuncs = map[string]struct{}{
+	"log/slog.String":   {},
+	"log/slog.Int64":    {},
+	"log/slog.Int":      {},
+	"log/slog.Uint64":   {},
+	"log/slog.Float64":  {},
+	"log/slog.Bool":     {},
+	"log/slog.Time":     {},
+	"log/slog.Duration": {},
+	"log/slog.Group":    {},
+	"log/slog.Any":      {},
+}
+
+const (
+	snakeCase  = "snake"
+	kebabCase  = "kebab"
+	camelCase  = "camel"
+	pascalCase = "pascal"
+)
+
+func run(pass *analysis.Pass, opts *Options) {
+	visitor := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+	filter := []ast.Node{(*ast.CallExpr)(nil)}
+
+	// WithStack is ~2x slower than Preorder, use it only when stack is needed.
+	if opts.ContextOnly == "scope" {
+		visitor.WithStack(filter, func(node ast.Node, _ bool, stack []ast.Node) bool {
+			visit(pass, opts, node, stack)
+			return false
+		})
+		return
+	}
+
+	visitor.Preorder(filter, func(node ast.Node) {
+		visit(pass, opts, node, nil)
+	})
+}
+
+// NOTE: stack is nil if Preorder is used.
+func visit(pass *analysis.Pass, opts *Options, node ast.Node, stack []ast.Node) {
+	call := node.(*ast.CallExpr)
+
+	fn := typeutil.StaticCallee(pass.TypesInfo, call)
+	if fn == nil {
+		return
+	}
+
+	name := fn.FullName()
+	argsPos, ok := slogFuncs[name]
+	if !ok {
+		return
+	}
+
+	switch opts.NoGlobal {
+	case "all":
+		if strings.HasPrefix(name, "log/slog.") || globalLoggerUsed(pass.TypesInfo, call.Fun) {
+			pass.Reportf(call.Pos(), "global logger should not be used")
+		}
+	case "default":
+		if strings.HasPrefix(name, "log/slog.") {
+			pass.Reportf(call.Pos(), "default logger should not be used")
+		}
+	}
+
+	switch opts.ContextOnly {
+	case "all":
+		typ := pass.TypesInfo.TypeOf(call.Args[0])
+		if typ != nil && typ.String() != "context.Context" {
+			pass.Reportf(call.Pos(), "%sContext should be used instead", fn.Name())
+		}
+	case "scope":
+		typ := pass.TypesInfo.TypeOf(call.Args[0])
+		if typ != nil && typ.String() != "context.Context" && hasContextInScope(pass.TypesInfo, stack) {
+			pass.Reportf(call.Pos(), "%sContext should be used instead", fn.Name())
+		}
+	}
+
+	if opts.StaticMsg && !staticMsg(call.Args[argsPos-1]) {
+		pass.Reportf(call.Pos(), "message should be a string literal or a constant")
+	}
+
+	// NOTE: we assume that the arguments have already been validated by govet.
+	args := call.Args[argsPos:]
+	if len(args) == 0 {
+		return
+	}
+
+	var keys []ast.Expr
+	var attrs []ast.Expr
+
+	for i := 0; i < len(args); i++ {
+		typ := pass.TypesInfo.TypeOf(args[i])
+		if typ == nil {
+			continue
+		}
+		switch typ.String() {
+		case "string":
+			keys = append(keys, args[i])
+			i++ // skip the value.
+		case "log/slog.Attr":
+			attrs = append(attrs, args[i])
+		}
+	}
+
+	switch {
+	case opts.KVOnly && len(attrs) > 0:
+		pass.Reportf(call.Pos(), "attributes should not be used")
+	case opts.AttrOnly && len(attrs) < len(args):
+		pass.Reportf(call.Pos(), "key-value pairs should not be used")
+	case opts.NoMixedArgs && 0 < len(attrs) && len(attrs) < len(args):
+		pass.Reportf(call.Pos(), "key-value pairs and attributes should not be mixed")
+	}
+
+	if opts.NoRawKeys && rawKeysUsed(pass.TypesInfo, keys, attrs) {
+		pass.Reportf(call.Pos(), "raw keys should not be used")
+	}
+
+	if opts.ArgsOnSepLines && argsOnSameLine(pass.Fset, call, keys, attrs) {
+		pass.Reportf(call.Pos(), "arguments should be put on separate lines")
+	}
+
+	switch {
+	case opts.KeyNamingCase == snakeCase && badKeyNames(pass.TypesInfo, strcase.ToSnake, keys, attrs):
+		pass.Reportf(call.Pos(), "keys should be written in snake_case")
+	case opts.KeyNamingCase == kebabCase && badKeyNames(pass.TypesInfo, strcase.ToKebab, keys, attrs):
+		pass.Reportf(call.Pos(), "keys should be written in kebab-case")
+	case opts.KeyNamingCase == camelCase && badKeyNames(pass.TypesInfo, strcase.ToCamel, keys, attrs):
+		pass.Reportf(call.Pos(), "keys should be written in camelCase")
+	case opts.KeyNamingCase == pascalCase && badKeyNames(pass.TypesInfo, strcase.ToPascal, keys, attrs):
+		pass.Reportf(call.Pos(), "keys should be written in PascalCase")
+	}
+}
+
+func globalLoggerUsed(info *types.Info, expr ast.Expr) bool {
+	selector, ok := expr.(*ast.SelectorExpr)
+	if !ok {
+		return false
+	}
+	ident, ok := selector.X.(*ast.Ident)
+	if !ok {
+		return false
+	}
+	obj := info.ObjectOf(ident)
+	return obj.Parent() == obj.Pkg().Scope()
+}
+
+func hasContextInScope(info *types.Info, stack []ast.Node) bool {
+	for i := len(stack) - 1; i >= 0; i-- {
+		decl, ok := stack[i].(*ast.FuncDecl)
+		if !ok {
+			continue
+		}
+		params := decl.Type.Params
+		if len(params.List) == 0 || len(params.List[0].Names) == 0 {
+			continue
+		}
+		typ := info.TypeOf(params.List[0].Names[0])
+		if typ != nil && typ.String() == "context.Context" {
+			return true
+		}
+	}
+	return false
+}
+
+func staticMsg(expr ast.Expr) bool {
+	switch msg := expr.(type) {
+	case *ast.BasicLit: // e.g. slog.Info("msg")
+		return msg.Kind == token.STRING
+	case *ast.Ident: // e.g. const msg = "msg"; slog.Info(msg)
+		return msg.Obj != nil && msg.Obj.Kind == ast.Con
+	default:
+		return false
+	}
+}
+
+func rawKeysUsed(info *types.Info, keys, attrs []ast.Expr) bool {
+	isConst := func(expr ast.Expr) bool {
+		ident, ok := expr.(*ast.Ident)
+		return ok && ident.Obj != nil && ident.Obj.Kind == ast.Con
+	}
+
+	for _, key := range keys {
+		if !isConst(key) {
+			return true
+		}
+	}
+
+	for _, attr := range attrs {
+		switch attr := attr.(type) {
+		case *ast.CallExpr: // e.g. slog.Int()
+			fn := typeutil.StaticCallee(info, attr)
+			if _, ok := attrFuncs[fn.FullName()]; ok && !isConst(attr.Args[0]) {
+				return true
+			}
+
+		case *ast.CompositeLit: // slog.Attr{}
+			isRawKey := func(kv *ast.KeyValueExpr) bool {
+				return kv.Key.(*ast.Ident).Name == "Key" && !isConst(kv.Value)
+			}
+
+			switch len(attr.Elts) {
+			case 1: // slog.Attr{Key: ...} | slog.Attr{Value: ...}
+				kv := attr.Elts[0].(*ast.KeyValueExpr)
+				if isRawKey(kv) {
+					return true
+				}
+			case 2: // slog.Attr{..., ...} | slog.Attr{Key: ..., Value: ...}
+				kv1, ok := attr.Elts[0].(*ast.KeyValueExpr)
+				if ok {
+					kv2 := attr.Elts[1].(*ast.KeyValueExpr)
+					if isRawKey(kv1) || isRawKey(kv2) {
+						return true
+					}
+				} else if !isConst(attr.Elts[0]) {
+					return true
+				}
+			}
+		}
+	}
+
+	return false
+}
+
+func badKeyNames(info *types.Info, caseFn func(string) string, keys, attrs []ast.Expr) bool {
+	for _, key := range keys {
+		if name, ok := getKeyName(key); ok && name != caseFn(name) {
+			return true
+		}
+	}
+
+	for _, attr := range attrs {
+		var expr ast.Expr
+
+		switch attr := attr.(type) {
+		case *ast.CallExpr: // e.g. slog.Int()
+			fn := typeutil.StaticCallee(info, attr)
+			if fn == nil {
+				continue
+			}
+			if _, ok := attrFuncs[fn.FullName()]; !ok {
+				continue
+			}
+			expr = attr.Args[0]
+
+		case *ast.CompositeLit: // slog.Attr{}
+			switch len(attr.Elts) {
+			case 1: // slog.Attr{Key: ...} | slog.Attr{Value: ...}
+				if kv := attr.Elts[0].(*ast.KeyValueExpr); kv.Key.(*ast.Ident).Name == "Key" {
+					expr = kv.Value
+				}
+			case 2: // slog.Attr{..., ...} | slog.Attr{Key: ..., Value: ...}
+				expr = attr.Elts[0]
+				if kv1, ok := attr.Elts[0].(*ast.KeyValueExpr); ok && kv1.Key.(*ast.Ident).Name == "Key" {
+					expr = kv1.Value
+				}
+				if kv2, ok := attr.Elts[1].(*ast.KeyValueExpr); ok && kv2.Key.(*ast.Ident).Name == "Key" {
+					expr = kv2.Value
+				}
+			}
+		}
+
+		if name, ok := getKeyName(expr); ok && name != caseFn(name) {
+			return true
+		}
+	}
+
+	return false
+}
+
+func getKeyName(expr ast.Expr) (string, bool) {
+	if expr == nil {
+		return "", false
+	}
+	if ident, ok := expr.(*ast.Ident); ok {
+		if ident.Obj == nil || ident.Obj.Decl == nil || ident.Obj.Kind != ast.Con {
+			return "", false
+		}
+		if spec, ok := ident.Obj.Decl.(*ast.ValueSpec); ok && len(spec.Values) > 0 {
+			// TODO: support len(spec.Values) > 1; e.g. "const foo, bar = 1, 2"
+			expr = spec.Values[0]
+		}
+	}
+	if lit, ok := expr.(*ast.BasicLit); ok && lit.Kind == token.STRING {
+		return lit.Value, true
+	}
+	return "", false
+}
+
+func argsOnSameLine(fset *token.FileSet, call ast.Expr, keys, attrs []ast.Expr) bool {
+	if len(keys)+len(attrs) <= 1 {
+		return false // special case: slog.Info("msg", "key", "value") is ok.
+	}
+
+	l := len(keys) + len(attrs) + 1
+	args := make([]ast.Expr, 0, l)
+	args = append(args, call)
+	args = append(args, keys...)
+	args = append(args, attrs...)
+
+	lines := make(map[int]struct{}, l)
+	for _, arg := range args {
+		line := fset.Position(arg.Pos()).Line
+		if _, ok := lines[line]; ok {
+			return true
+		}
+		lines[line] = struct{}{}
+	}
+
+	return false
+}
diff --git a/vendor/go.tmz.dev/musttag/README.md b/vendor/go.tmz.dev/musttag/README.md
deleted file mode 100644
index 621c7a9a3d3e764c78b9a0a10dda91b0704c5319..0000000000000000000000000000000000000000
--- a/vendor/go.tmz.dev/musttag/README.md
+++ /dev/null
@@ -1,106 +0,0 @@
-# musttag
-
-[![checks](https://github.com/tmzane/musttag/actions/workflows/checks.yml/badge.svg)](https://github.com/tmzane/musttag/actions/workflows/checks.yml)
-[![pkg.go.dev](https://pkg.go.dev/badge/go.tmz.dev/musttag.svg)](https://pkg.go.dev/go.tmz.dev/musttag)
-[![goreportcard](https://goreportcard.com/badge/go.tmz.dev/musttag)](https://goreportcard.com/report/go.tmz.dev/musttag)
-[![codecov](https://codecov.io/gh/tmzane/musttag/branch/main/graph/badge.svg)](https://codecov.io/gh/tmzane/musttag)
-
-A Go linter that enforces field tags in (un)marshaled structs
-
-## 📌 About
-
-`musttag` checks that exported fields of a struct passed to a `Marshal`-like function are annotated with the relevant tag:
-
-```go
-// BAD:
-var user struct {
-	Name string
-}
-data, err := json.Marshal(user)
-
-// GOOD:
-var user struct {
-	Name string `json:"name"`
-}
-data, err := json.Marshal(user)
-```
-
-The rational from [Uber Style Guide][1]:
-
-> The serialized form of the structure is a contract between different systems.
-> Changes to the structure of the serialized form, including field names, break this contract.
-> Specifying field names inside tags makes the contract explicit,
-> and it guards against accidentally breaking the contract by refactoring or renaming fields.
-
-## 🚀 Features
-
-The following packages are supported out of the box:
-
-* [`encoding/json`][2]
-* [`encoding/xml`][3]
-* [`gopkg.in/yaml.v3`][4]
-* [`github.com/BurntSushi/toml`][5]
-* [`github.com/mitchellh/mapstructure`][6]
-* [`github.com/jmoiron/sqlx`][7]
-
-In addition, any [custom package](#custom-packages) can be added to the list.
-
-## 📋 Usage
-
-`musttag` is already integrated into `golangci-lint`, and this is the recommended way to use it.
-
-To enable the linter, add the following lines to `.golangci.yml`:
-
-```yaml
-linters:
-  enable:
-    - musttag
-```
-
-If you'd rather prefer to use `musttag` standalone, you can install it via `brew`...
-
-```shell
-brew install tmzane/tap/musttag
-```
-
-...or download a prebuilt binary from the [Releases][9] page.
-
-Then run it either directly or as a `go vet` tool:
-
-```shell
-go vet -vettool=$(which musttag) ./...
-```
-
-### Custom packages
-
-To enable reporting a custom function, you need to add its description to `.golangci.yml`.
-
-The following is an example of adding support for the `hclsimple.DecodeFile` function from [`github.com/hashicorp/hcl`][8]:
-
-```yaml
-linters-settings:
-  musttag:
-    functions:
-        # The full name of the function, including the package.
-      - name: github.com/hashicorp/hcl/v2/hclsimple.DecodeFile
-        # The struct tag whose presence should be ensured.
-        tag: hcl
-        # The position of the argument to check.
-        arg-pos: 2
-```
-
-The same can be done via the `-fn=name:tag:arg-pos` flag when using `musttag` standalone:
-
-```shell
-musttag -fn="github.com/hashicorp/hcl/v2/hclsimple.DecodeFile:hcl:2" ./...
-```
-
-[1]: https://github.com/uber-go/guide/blob/master/style.md#use-field-tags-in-marshaled-structs
-[2]: https://pkg.go.dev/encoding/json
-[3]: https://pkg.go.dev/encoding/xml
-[4]: https://github.com/go-yaml/yaml
-[5]: https://github.com/BurntSushi/toml
-[6]: https://github.com/mitchellh/mapstructure
-[7]: https://github.com/jmoiron/sqlx
-[8]: https://github.com/hashicorp/hcl
-[9]: https://github.com/tmzane/musttag/releases
diff --git a/vendor/go.tmz.dev/musttag/builtins.go b/vendor/go.tmz.dev/musttag/builtins.go
deleted file mode 100644
index 66914fa9e8229a6819fe516d399368669dd9e849..0000000000000000000000000000000000000000
--- a/vendor/go.tmz.dev/musttag/builtins.go
+++ /dev/null
@@ -1,67 +0,0 @@
-package musttag
-
-// builtins is a set of functions supported out of the box.
-var builtins = []Func{
-	// https://pkg.go.dev/encoding/json
-	{Name: "encoding/json.Marshal", Tag: "json", ArgPos: 0},
-	{Name: "encoding/json.MarshalIndent", Tag: "json", ArgPos: 0},
-	{Name: "encoding/json.Unmarshal", Tag: "json", ArgPos: 1},
-	{Name: "(*encoding/json.Encoder).Encode", Tag: "json", ArgPos: 0},
-	{Name: "(*encoding/json.Decoder).Decode", Tag: "json", ArgPos: 0},
-
-	// https://pkg.go.dev/encoding/xml
-	{Name: "encoding/xml.Marshal", Tag: "xml", ArgPos: 0},
-	{Name: "encoding/xml.MarshalIndent", Tag: "xml", ArgPos: 0},
-	{Name: "encoding/xml.Unmarshal", Tag: "xml", ArgPos: 1},
-	{Name: "(*encoding/xml.Encoder).Encode", Tag: "xml", ArgPos: 0},
-	{Name: "(*encoding/xml.Decoder).Decode", Tag: "xml", ArgPos: 0},
-	{Name: "(*encoding/xml.Encoder).EncodeElement", Tag: "xml", ArgPos: 0},
-	{Name: "(*encoding/xml.Decoder).DecodeElement", Tag: "xml", ArgPos: 0},
-
-	// https://github.com/go-yaml/yaml
-	{Name: "gopkg.in/yaml.v3.Marshal", Tag: "yaml", ArgPos: 0},
-	{Name: "gopkg.in/yaml.v3.Unmarshal", Tag: "yaml", ArgPos: 1},
-	{Name: "(*gopkg.in/yaml.v3.Encoder).Encode", Tag: "yaml", ArgPos: 0},
-	{Name: "(*gopkg.in/yaml.v3.Decoder).Decode", Tag: "yaml", ArgPos: 0},
-
-	// https://github.com/BurntSushi/toml
-	{Name: "github.com/BurntSushi/toml.Unmarshal", Tag: "toml", ArgPos: 1},
-	{Name: "github.com/BurntSushi/toml.Decode", Tag: "toml", ArgPos: 1},
-	{Name: "github.com/BurntSushi/toml.DecodeFS", Tag: "toml", ArgPos: 2},
-	{Name: "github.com/BurntSushi/toml.DecodeFile", Tag: "toml", ArgPos: 1},
-	{Name: "(*github.com/BurntSushi/toml.Encoder).Encode", Tag: "toml", ArgPos: 0},
-	{Name: "(*github.com/BurntSushi/toml.Decoder).Decode", Tag: "toml", ArgPos: 0},
-
-	// https://github.com/mitchellh/mapstructure
-	{Name: "github.com/mitchellh/mapstructure.Decode", Tag: "mapstructure", ArgPos: 1},
-	{Name: "github.com/mitchellh/mapstructure.DecodeMetadata", Tag: "mapstructure", ArgPos: 1},
-	{Name: "github.com/mitchellh/mapstructure.WeakDecode", Tag: "mapstructure", ArgPos: 1},
-	{Name: "github.com/mitchellh/mapstructure.WeakDecodeMetadata", Tag: "mapstructure", ArgPos: 1},
-
-	// https://github.com/jmoiron/sqlx
-	{Name: "github.com/jmoiron/sqlx.Get", Tag: "db", ArgPos: 1},
-	{Name: "github.com/jmoiron/sqlx.GetContext", Tag: "db", ArgPos: 2},
-	{Name: "github.com/jmoiron/sqlx.Select", Tag: "db", ArgPos: 1},
-	{Name: "github.com/jmoiron/sqlx.SelectContext", Tag: "db", ArgPos: 2},
-	{Name: "github.com/jmoiron/sqlx.StructScan", Tag: "db", ArgPos: 1},
-	{Name: "(*github.com/jmoiron/sqlx.Conn).GetContext", Tag: "db", ArgPos: 1},
-	{Name: "(*github.com/jmoiron/sqlx.Conn).SelectContext", Tag: "db", ArgPos: 1},
-	{Name: "(*github.com/jmoiron/sqlx.DB).Get", Tag: "db", ArgPos: 0},
-	{Name: "(*github.com/jmoiron/sqlx.DB).GetContext", Tag: "db", ArgPos: 1},
-	{Name: "(*github.com/jmoiron/sqlx.DB).Select", Tag: "db", ArgPos: 0},
-	{Name: "(*github.com/jmoiron/sqlx.DB).SelectContext", Tag: "db", ArgPos: 1},
-	{Name: "(*github.com/jmoiron/sqlx.NamedStmt).Get", Tag: "db", ArgPos: 0},
-	{Name: "(*github.com/jmoiron/sqlx.NamedStmt).GetContext", Tag: "db", ArgPos: 1},
-	{Name: "(*github.com/jmoiron/sqlx.NamedStmt).Select", Tag: "db", ArgPos: 0},
-	{Name: "(*github.com/jmoiron/sqlx.NamedStmt).SelectContext", Tag: "db", ArgPos: 1},
-	{Name: "(*github.com/jmoiron/sqlx.Row).StructScan", Tag: "db", ArgPos: 0},
-	{Name: "(*github.com/jmoiron/sqlx.Rows).StructScan", Tag: "db", ArgPos: 0},
-	{Name: "(*github.com/jmoiron/sqlx.Stmt).Get", Tag: "db", ArgPos: 0},
-	{Name: "(*github.com/jmoiron/sqlx.Stmt).GetContext", Tag: "db", ArgPos: 1},
-	{Name: "(*github.com/jmoiron/sqlx.Stmt).Select", Tag: "db", ArgPos: 0},
-	{Name: "(*github.com/jmoiron/sqlx.Stmt).SelectContext", Tag: "db", ArgPos: 1},
-	{Name: "(*github.com/jmoiron/sqlx.Tx).Get", Tag: "db", ArgPos: 0},
-	{Name: "(*github.com/jmoiron/sqlx.Tx).GetContext", Tag: "db", ArgPos: 1},
-	{Name: "(*github.com/jmoiron/sqlx.Tx).Select", Tag: "db", ArgPos: 0},
-	{Name: "(*github.com/jmoiron/sqlx.Tx).SelectContext", Tag: "db", ArgPos: 1},
-}
diff --git a/vendor/go.tmz.dev/musttag/musttag.go b/vendor/go.tmz.dev/musttag/musttag.go
deleted file mode 100644
index 7f4e05e75504c4c6ddcffdf6a39339fa88df6b78..0000000000000000000000000000000000000000
--- a/vendor/go.tmz.dev/musttag/musttag.go
+++ /dev/null
@@ -1,259 +0,0 @@
-// Package musttag implements the musttag analyzer.
-package musttag
-
-import (
-	"flag"
-	"fmt"
-	"go/ast"
-	"go/token"
-	"go/types"
-	"path"
-	"reflect"
-	"regexp"
-	"strconv"
-	"strings"
-
-	"golang.org/x/tools/go/analysis"
-	"golang.org/x/tools/go/analysis/passes/inspect"
-	"golang.org/x/tools/go/ast/inspector"
-	"golang.org/x/tools/go/types/typeutil"
-)
-
-// Func describes a function call to look for, e.g. json.Marshal.
-type Func struct {
-	Name   string // Name is the full name of the function, including the package.
-	Tag    string // Tag is the struct tag whose presence should be ensured.
-	ArgPos int    // ArgPos is the position of the argument to check.
-}
-
-func (fn Func) shortName() string {
-	name := strings.NewReplacer("*", "", "(", "", ")", "").Replace(fn.Name)
-	return path.Base(name)
-}
-
-// New creates a new musttag analyzer.
-// To report a custom function provide its description via Func,
-// it will be added to the builtin ones.
-func New(funcs ...Func) *analysis.Analyzer {
-	var flagFuncs []Func
-	return &analysis.Analyzer{
-		Name:     "musttag",
-		Doc:      "enforce field tags in (un)marshaled structs",
-		Flags:    flags(&flagFuncs),
-		Requires: []*analysis.Analyzer{inspect.Analyzer},
-		Run: func(pass *analysis.Pass) (any, error) {
-			l := len(builtins) + len(funcs) + len(flagFuncs)
-			f := make(map[string]Func, l)
-
-			toMap := func(slice []Func) {
-				for _, fn := range slice {
-					f[fn.Name] = fn
-				}
-			}
-			toMap(builtins)
-			toMap(funcs)
-			toMap(flagFuncs)
-
-			mainModule, err := getMainModule()
-			if err != nil {
-				return nil, err
-			}
-
-			return run(pass, mainModule, f)
-		},
-	}
-}
-
-// flags creates a flag set for the analyzer.
-// The funcs slice will be filled with custom functions passed via CLI flags.
-func flags(funcs *[]Func) flag.FlagSet {
-	fs := flag.NewFlagSet("musttag", flag.ContinueOnError)
-	fs.Func("fn", "report custom function (name:tag:argpos)", func(s string) error {
-		parts := strings.Split(s, ":")
-		if len(parts) != 3 || parts[0] == "" || parts[1] == "" {
-			return strconv.ErrSyntax
-		}
-		pos, err := strconv.Atoi(parts[2])
-		if err != nil {
-			return err
-		}
-		*funcs = append(*funcs, Func{
-			Name:   parts[0],
-			Tag:    parts[1],
-			ArgPos: pos,
-		})
-		return nil
-	})
-	return *fs
-}
-
-// for tests only.
-var report = func(pass *analysis.Pass, st *structType, fn Func, fnPos token.Position) {
-	const format = "`%s` should be annotated with the `%s` tag as it is passed to `%s` at %s"
-	pass.Reportf(st.Pos, format, st.Name, fn.Tag, fn.shortName(), fnPos)
-}
-
-var cleanFullName = regexp.MustCompile(`([^*/(]+/vendor/)`)
-
-// run starts the analysis.
-func run(pass *analysis.Pass, mainModule string, funcs map[string]Func) (any, error) {
-	var err error
-
-	walk := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
-	filter := []ast.Node{(*ast.CallExpr)(nil)}
-
-	walk.Preorder(filter, func(n ast.Node) {
-		if err != nil {
-			return // there is already an error.
-		}
-
-		call, ok := n.(*ast.CallExpr)
-		if !ok {
-			return // not a function call.
-		}
-
-		callee := typeutil.StaticCallee(pass.TypesInfo, call)
-		if callee == nil {
-			return // not a static call.
-		}
-
-		name := cleanFullName.ReplaceAllString(callee.FullName(), "")
-		fn, ok := funcs[name]
-		if !ok {
-			return // the function is not supported.
-		}
-
-		if len(call.Args) <= fn.ArgPos {
-			err = fmt.Errorf("Func.ArgPos cannot be %d: %s accepts only %d argument(s)", fn.ArgPos, fn.Name, len(call.Args))
-			return
-		}
-
-		arg := call.Args[fn.ArgPos]
-		if unary, ok := arg.(*ast.UnaryExpr); ok {
-			arg = unary.X // e.g. json.Marshal(&foo)
-		}
-
-		initialPos := token.NoPos
-		switch arg := arg.(type) {
-		case *ast.Ident: // e.g. json.Marshal(foo)
-			if arg.Obj == nil {
-				return // e.g. json.Marshal(nil)
-			}
-			initialPos = arg.Obj.Pos()
-		case *ast.CompositeLit: // e.g. json.Marshal(struct{}{})
-			initialPos = arg.Pos()
-		}
-
-		checker := checker{
-			mainModule: mainModule,
-			seenTypes:  make(map[string]struct{}),
-		}
-
-		t := pass.TypesInfo.TypeOf(arg)
-		st, ok := checker.parseStructType(t, initialPos)
-		if !ok {
-			return // not a struct argument.
-		}
-
-		result, ok := checker.checkStructType(st, fn.Tag)
-		if ok {
-			return // nothing to report.
-		}
-
-		p := pass.Fset.Position(call.Pos())
-		report(pass, result, fn, p)
-	})
-
-	return nil, err
-}
-
-// structType is an extension for types.Struct.
-// The content of the fields depends on whether the type is named or not.
-type structType struct {
-	*types.Struct
-	Name string    // for types.Named: the type's name; for anonymous: a placeholder string.
-	Pos  token.Pos // for types.Named: the type's position; for anonymous: the corresponding identifier's position.
-}
-
-// checker parses and checks struct types.
-type checker struct {
-	mainModule string
-	seenTypes  map[string]struct{} // prevent panic on recursive types; see issue #16.
-}
-
-// parseStructType parses the given types.Type, returning the underlying struct type.
-func (c *checker) parseStructType(t types.Type, pos token.Pos) (*structType, bool) {
-	for {
-		// unwrap pointers (if any) first.
-		ptr, ok := t.(*types.Pointer)
-		if !ok {
-			break
-		}
-		t = ptr.Elem()
-	}
-
-	switch t := t.(type) {
-	case *types.Named: // a struct of the named type.
-		pkg := t.Obj().Pkg() // may be nil; see issue #38.
-		if pkg == nil {
-			return nil, false
-		}
-
-		if !strings.HasPrefix(pkg.Path(), c.mainModule) {
-			return nil, false
-		}
-
-		s, ok := t.Underlying().(*types.Struct)
-		if !ok {
-			return nil, false
-		}
-
-		return &structType{
-			Struct: s,
-			Pos:    t.Obj().Pos(),
-			Name:   t.Obj().Name(),
-		}, true
-
-	case *types.Struct: // an anonymous struct.
-		return &structType{
-			Struct: t,
-			Pos:    pos,
-			Name:   "anonymous struct",
-		}, true
-	}
-
-	return nil, false
-}
-
-// checkStructType recursively checks whether the given struct type is annotated with the tag.
-// The result is the type of the first nested struct which fields are not properly annotated.
-func (c *checker) checkStructType(st *structType, tag string) (*structType, bool) {
-	c.seenTypes[st.String()] = struct{}{}
-
-	for i := 0; i < st.NumFields(); i++ {
-		field := st.Field(i)
-		if !field.Exported() {
-			continue
-		}
-
-		if _, ok := reflect.StructTag(st.Tag(i)).Lookup(tag); !ok {
-			// tag is not required for embedded types; see issue #12.
-			if !field.Embedded() {
-				return st, false
-			}
-		}
-
-		nested, ok := c.parseStructType(field.Type(), st.Pos) // TODO: or field.Pos()?
-		if !ok {
-			continue
-		}
-		if _, ok := c.seenTypes[nested.String()]; ok {
-			continue
-		}
-		if result, ok := c.checkStructType(nested, tag); !ok {
-			return result, false
-		}
-	}
-
-	return nil, true
-}
diff --git a/vendor/go.tmz.dev/musttag/utils.go b/vendor/go.tmz.dev/musttag/utils.go
deleted file mode 100644
index 673747f15f4fc40cff2f32a0056e9a935463f76d..0000000000000000000000000000000000000000
--- a/vendor/go.tmz.dev/musttag/utils.go
+++ /dev/null
@@ -1,57 +0,0 @@
-package musttag
-
-import (
-	"encoding/json"
-	"errors"
-	"fmt"
-	"io"
-	"os"
-	"os/exec"
-	"strings"
-)
-
-var (
-	getwd = os.Getwd
-
-	commandOutput = func(name string, args ...string) (string, error) {
-		output, err := exec.Command(name, args...).Output()
-		return string(output), err
-	}
-)
-
-func getMainModule() (string, error) {
-	args := []string{"go", "list", "-m", "-json"}
-
-	output, err := commandOutput(args[0], args[1:]...)
-	if err != nil {
-		return "", fmt.Errorf("running `%s`: %w", strings.Join(args, " "), err)
-	}
-
-	cwd, err := getwd()
-	if err != nil {
-		return "", fmt.Errorf("getting wd: %w", err)
-	}
-
-	decoder := json.NewDecoder(strings.NewReader(output))
-
-	for {
-		// multiple JSON objects will be returned when using Go workspaces; see #63 for details.
-		var module struct {
-			Path      string `json:"Path"`
-			Main      bool   `json:"Main"`
-			Dir       string `json:"Dir"`
-			GoMod     string `json:"GoMod"`
-			GoVersion string `json:"GoVersion"`
-		}
-		if err := decoder.Decode(&module); err != nil {
-			if errors.Is(err, io.EOF) {
-				return "", fmt.Errorf("main module not found\n%s", output)
-			}
-			return "", fmt.Errorf("decoding json: %w\n%s", err, output)
-		}
-
-		if module.Main && strings.HasPrefix(cwd, module.Dir) {
-			return module.Path, nil
-		}
-	}
-}
diff --git a/vendor/go.uber.org/automaxprocs/LICENSE b/vendor/go.uber.org/automaxprocs/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..20dcf51d96d71b38860412ee6ecaf4fd91ab379c
--- /dev/null
+++ b/vendor/go.uber.org/automaxprocs/LICENSE
@@ -0,0 +1,19 @@
+Copyright (c) 2017 Uber Technologies, Inc.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
\ No newline at end of file
diff --git a/vendor/go.uber.org/automaxprocs/internal/cgroups/cgroup.go b/vendor/go.uber.org/automaxprocs/internal/cgroups/cgroup.go
new file mode 100644
index 0000000000000000000000000000000000000000..fe4ecf561e27f9296a0a1f3bc8797d2c7d54f332
--- /dev/null
+++ b/vendor/go.uber.org/automaxprocs/internal/cgroups/cgroup.go
@@ -0,0 +1,79 @@
+// Copyright (c) 2017 Uber Technologies, Inc.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+//go:build linux
+// +build linux
+
+package cgroups
+
+import (
+	"bufio"
+	"io"
+	"os"
+	"path/filepath"
+	"strconv"
+)
+
+// CGroup represents the data structure for a Linux control group.
+type CGroup struct {
+	path string
+}
+
+// NewCGroup returns a new *CGroup from a given path.
+func NewCGroup(path string) *CGroup {
+	return &CGroup{path: path}
+}
+
+// Path returns the path of the CGroup*.
+func (cg *CGroup) Path() string {
+	return cg.path
+}
+
+// ParamPath returns the path of the given cgroup param under itself.
+func (cg *CGroup) ParamPath(param string) string {
+	return filepath.Join(cg.path, param)
+}
+
+// readFirstLine reads the first line from a cgroup param file.
+func (cg *CGroup) readFirstLine(param string) (string, error) {
+	paramFile, err := os.Open(cg.ParamPath(param))
+	if err != nil {
+		return "", err
+	}
+	defer paramFile.Close()
+
+	scanner := bufio.NewScanner(paramFile)
+	if scanner.Scan() {
+		return scanner.Text(), nil
+	}
+	if err := scanner.Err(); err != nil {
+		return "", err
+	}
+	return "", io.ErrUnexpectedEOF
+}
+
+// readInt parses the first line from a cgroup param file as int.
+func (cg *CGroup) readInt(param string) (int, error) {
+	text, err := cg.readFirstLine(param)
+	if err != nil {
+		return 0, err
+	}
+	return strconv.Atoi(text)
+}
diff --git a/vendor/go.uber.org/automaxprocs/internal/cgroups/cgroups.go b/vendor/go.uber.org/automaxprocs/internal/cgroups/cgroups.go
new file mode 100644
index 0000000000000000000000000000000000000000..e89f5436028fcb5b37da126ce98b4b1071c4bc3b
--- /dev/null
+++ b/vendor/go.uber.org/automaxprocs/internal/cgroups/cgroups.go
@@ -0,0 +1,118 @@
+// Copyright (c) 2017 Uber Technologies, Inc.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+//go:build linux
+// +build linux
+
+package cgroups
+
+const (
+	// _cgroupFSType is the Linux CGroup file system type used in
+	// `/proc/$PID/mountinfo`.
+	_cgroupFSType = "cgroup"
+	// _cgroupSubsysCPU is the CPU CGroup subsystem.
+	_cgroupSubsysCPU = "cpu"
+	// _cgroupSubsysCPUAcct is the CPU accounting CGroup subsystem.
+	_cgroupSubsysCPUAcct = "cpuacct"
+	// _cgroupSubsysCPUSet is the CPUSet CGroup subsystem.
+	_cgroupSubsysCPUSet = "cpuset"
+	// _cgroupSubsysMemory is the Memory CGroup subsystem.
+	_cgroupSubsysMemory = "memory"
+
+	// _cgroupCPUCFSQuotaUsParam is the file name for the CGroup CFS quota
+	// parameter.
+	_cgroupCPUCFSQuotaUsParam = "cpu.cfs_quota_us"
+	// _cgroupCPUCFSPeriodUsParam is the file name for the CGroup CFS period
+	// parameter.
+	_cgroupCPUCFSPeriodUsParam = "cpu.cfs_period_us"
+)
+
+const (
+	_procPathCGroup    = "/proc/self/cgroup"
+	_procPathMountInfo = "/proc/self/mountinfo"
+)
+
+// CGroups is a map that associates each CGroup with its subsystem name.
+type CGroups map[string]*CGroup
+
+// NewCGroups returns a new *CGroups from given `mountinfo` and `cgroup` files
+// under for some process under `/proc` file system (see also proc(5) for more
+// information).
+func NewCGroups(procPathMountInfo, procPathCGroup string) (CGroups, error) {
+	cgroupSubsystems, err := parseCGroupSubsystems(procPathCGroup)
+	if err != nil {
+		return nil, err
+	}
+
+	cgroups := make(CGroups)
+	newMountPoint := func(mp *MountPoint) error {
+		if mp.FSType != _cgroupFSType {
+			return nil
+		}
+
+		for _, opt := range mp.SuperOptions {
+			subsys, exists := cgroupSubsystems[opt]
+			if !exists {
+				continue
+			}
+
+			cgroupPath, err := mp.Translate(subsys.Name)
+			if err != nil {
+				return err
+			}
+			cgroups[opt] = NewCGroup(cgroupPath)
+		}
+
+		return nil
+	}
+
+	if err := parseMountInfo(procPathMountInfo, newMountPoint); err != nil {
+		return nil, err
+	}
+	return cgroups, nil
+}
+
+// NewCGroupsForCurrentProcess returns a new *CGroups instance for the current
+// process.
+func NewCGroupsForCurrentProcess() (CGroups, error) {
+	return NewCGroups(_procPathMountInfo, _procPathCGroup)
+}
+
+// CPUQuota returns the CPU quota applied with the CPU cgroup controller.
+// It is a result of `cpu.cfs_quota_us / cpu.cfs_period_us`. If the value of
+// `cpu.cfs_quota_us` was not set (-1), the method returns `(-1, nil)`.
+func (cg CGroups) CPUQuota() (float64, bool, error) {
+	cpuCGroup, exists := cg[_cgroupSubsysCPU]
+	if !exists {
+		return -1, false, nil
+	}
+
+	cfsQuotaUs, err := cpuCGroup.readInt(_cgroupCPUCFSQuotaUsParam)
+	if defined := cfsQuotaUs > 0; err != nil || !defined {
+		return -1, defined, err
+	}
+
+	cfsPeriodUs, err := cpuCGroup.readInt(_cgroupCPUCFSPeriodUsParam)
+	if defined := cfsPeriodUs > 0; err != nil || !defined {
+		return -1, defined, err
+	}
+
+	return float64(cfsQuotaUs) / float64(cfsPeriodUs), true, nil
+}
diff --git a/vendor/go.uber.org/automaxprocs/internal/cgroups/cgroups2.go b/vendor/go.uber.org/automaxprocs/internal/cgroups/cgroups2.go
new file mode 100644
index 0000000000000000000000000000000000000000..78556062fe2789434fc9d3056f4b0e4349d6e98b
--- /dev/null
+++ b/vendor/go.uber.org/automaxprocs/internal/cgroups/cgroups2.go
@@ -0,0 +1,176 @@
+// Copyright (c) 2022 Uber Technologies, Inc.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+//go:build linux
+// +build linux
+
+package cgroups
+
+import (
+	"bufio"
+	"errors"
+	"fmt"
+	"io"
+	"os"
+	"path"
+	"strconv"
+	"strings"
+)
+
+const (
+	// _cgroupv2CPUMax is the file name for the CGroup-V2 CPU max and period
+	// parameter.
+	_cgroupv2CPUMax = "cpu.max"
+	// _cgroupFSType is the Linux CGroup-V2 file system type used in
+	// `/proc/$PID/mountinfo`.
+	_cgroupv2FSType = "cgroup2"
+
+	_cgroupv2MountPoint = "/sys/fs/cgroup"
+
+	_cgroupV2CPUMaxDefaultPeriod = 100000
+	_cgroupV2CPUMaxQuotaMax      = "max"
+)
+
+const (
+	_cgroupv2CPUMaxQuotaIndex = iota
+	_cgroupv2CPUMaxPeriodIndex
+)
+
+// ErrNotV2 indicates that the system is not using cgroups2.
+var ErrNotV2 = errors.New("not using cgroups2")
+
+// CGroups2 provides access to cgroups data for systems using cgroups2.
+type CGroups2 struct {
+	mountPoint string
+	groupPath  string
+	cpuMaxFile string
+}
+
+// NewCGroups2ForCurrentProcess builds a CGroups2 for the current process.
+//
+// This returns ErrNotV2 if the system is not using cgroups2.
+func NewCGroups2ForCurrentProcess() (*CGroups2, error) {
+	return newCGroups2From(_procPathMountInfo, _procPathCGroup)
+}
+
+func newCGroups2From(mountInfoPath, procPathCGroup string) (*CGroups2, error) {
+	isV2, err := isCGroupV2(mountInfoPath)
+	if err != nil {
+		return nil, err
+	}
+
+	if !isV2 {
+		return nil, ErrNotV2
+	}
+
+	subsystems, err := parseCGroupSubsystems(procPathCGroup)
+	if err != nil {
+		return nil, err
+	}
+
+	// Find v2 subsystem by looking for the `0` id
+	var v2subsys *CGroupSubsys
+	for _, subsys := range subsystems {
+		if subsys.ID == 0 {
+			v2subsys = subsys
+			break
+		}
+	}
+
+	if v2subsys == nil {
+		return nil, ErrNotV2
+	}
+
+	return &CGroups2{
+		mountPoint: _cgroupv2MountPoint,
+		groupPath:  v2subsys.Name,
+		cpuMaxFile: _cgroupv2CPUMax,
+	}, nil
+}
+
+func isCGroupV2(procPathMountInfo string) (bool, error) {
+	var (
+		isV2          bool
+		newMountPoint = func(mp *MountPoint) error {
+			isV2 = isV2 || (mp.FSType == _cgroupv2FSType && mp.MountPoint == _cgroupv2MountPoint)
+			return nil
+		}
+	)
+
+	if err := parseMountInfo(procPathMountInfo, newMountPoint); err != nil {
+		return false, err
+	}
+
+	return isV2, nil
+}
+
+// CPUQuota returns the CPU quota applied with the CPU cgroup2 controller.
+// It is a result of reading cpu quota and period from cpu.max file.
+// It will return `cpu.max / cpu.period`. If cpu.max is set to max, it returns
+// (-1, false, nil)
+func (cg *CGroups2) CPUQuota() (float64, bool, error) {
+	cpuMaxParams, err := os.Open(path.Join(cg.mountPoint, cg.groupPath, cg.cpuMaxFile))
+	if err != nil {
+		if os.IsNotExist(err) {
+			return -1, false, nil
+		}
+		return -1, false, err
+	}
+	defer cpuMaxParams.Close()
+
+	scanner := bufio.NewScanner(cpuMaxParams)
+	if scanner.Scan() {
+		fields := strings.Fields(scanner.Text())
+		if len(fields) == 0 || len(fields) > 2 {
+			return -1, false, fmt.Errorf("invalid format")
+		}
+
+		if fields[_cgroupv2CPUMaxQuotaIndex] == _cgroupV2CPUMaxQuotaMax {
+			return -1, false, nil
+		}
+
+		max, err := strconv.Atoi(fields[_cgroupv2CPUMaxQuotaIndex])
+		if err != nil {
+			return -1, false, err
+		}
+
+		var period int
+		if len(fields) == 1 {
+			period = _cgroupV2CPUMaxDefaultPeriod
+		} else {
+			period, err = strconv.Atoi(fields[_cgroupv2CPUMaxPeriodIndex])
+			if err != nil {
+				return -1, false, err
+			}
+
+			if period == 0 {
+				return -1, false, errors.New("zero value for period is not allowed")
+			}
+		}
+
+		return float64(max) / float64(period), true, nil
+	}
+
+	if err := scanner.Err(); err != nil {
+		return -1, false, err
+	}
+
+	return 0, false, io.ErrUnexpectedEOF
+}
diff --git a/vendor/go.uber.org/automaxprocs/internal/cgroups/doc.go b/vendor/go.uber.org/automaxprocs/internal/cgroups/doc.go
new file mode 100644
index 0000000000000000000000000000000000000000..113555f63da1c50d8ce2580ddd58c458bda66018
--- /dev/null
+++ b/vendor/go.uber.org/automaxprocs/internal/cgroups/doc.go
@@ -0,0 +1,23 @@
+// Copyright (c) 2017 Uber Technologies, Inc.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+// Package cgroups provides utilities to access Linux control group (CGroups)
+// parameters (CPU quota, for example) for a given process.
+package cgroups
diff --git a/vendor/go.uber.org/automaxprocs/internal/cgroups/errors.go b/vendor/go.uber.org/automaxprocs/internal/cgroups/errors.go
new file mode 100644
index 0000000000000000000000000000000000000000..94ac75a46e8bf82fe1b471e3144370d128934083
--- /dev/null
+++ b/vendor/go.uber.org/automaxprocs/internal/cgroups/errors.go
@@ -0,0 +1,52 @@
+// Copyright (c) 2017 Uber Technologies, Inc.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+//go:build linux
+// +build linux
+
+package cgroups
+
+import "fmt"
+
+type cgroupSubsysFormatInvalidError struct {
+	line string
+}
+
+type mountPointFormatInvalidError struct {
+	line string
+}
+
+type pathNotExposedFromMountPointError struct {
+	mountPoint string
+	root       string
+	path       string
+}
+
+func (err cgroupSubsysFormatInvalidError) Error() string {
+	return fmt.Sprintf("invalid format for CGroupSubsys: %q", err.line)
+}
+
+func (err mountPointFormatInvalidError) Error() string {
+	return fmt.Sprintf("invalid format for MountPoint: %q", err.line)
+}
+
+func (err pathNotExposedFromMountPointError) Error() string {
+	return fmt.Sprintf("path %q is not a descendant of mount point root %q and cannot be exposed from %q", err.path, err.root, err.mountPoint)
+}
diff --git a/vendor/go.uber.org/automaxprocs/internal/cgroups/mountpoint.go b/vendor/go.uber.org/automaxprocs/internal/cgroups/mountpoint.go
new file mode 100644
index 0000000000000000000000000000000000000000..f3877f78aa604c9534d300f246c9cd268985cbe2
--- /dev/null
+++ b/vendor/go.uber.org/automaxprocs/internal/cgroups/mountpoint.go
@@ -0,0 +1,171 @@
+// Copyright (c) 2017 Uber Technologies, Inc.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+//go:build linux
+// +build linux
+
+package cgroups
+
+import (
+	"bufio"
+	"os"
+	"path/filepath"
+	"strconv"
+	"strings"
+)
+
+const (
+	_mountInfoSep               = " "
+	_mountInfoOptsSep           = ","
+	_mountInfoOptionalFieldsSep = "-"
+)
+
+const (
+	_miFieldIDMountID = iota
+	_miFieldIDParentID
+	_miFieldIDDeviceID
+	_miFieldIDRoot
+	_miFieldIDMountPoint
+	_miFieldIDOptions
+	_miFieldIDOptionalFields
+
+	_miFieldCountFirstHalf
+)
+
+const (
+	_miFieldOffsetFSType = iota
+	_miFieldOffsetMountSource
+	_miFieldOffsetSuperOptions
+
+	_miFieldCountSecondHalf
+)
+
+const _miFieldCountMin = _miFieldCountFirstHalf + _miFieldCountSecondHalf
+
+// MountPoint is the data structure for the mount points in
+// `/proc/$PID/mountinfo`. See also proc(5) for more information.
+type MountPoint struct {
+	MountID        int
+	ParentID       int
+	DeviceID       string
+	Root           string
+	MountPoint     string
+	Options        []string
+	OptionalFields []string
+	FSType         string
+	MountSource    string
+	SuperOptions   []string
+}
+
+// NewMountPointFromLine parses a line read from `/proc/$PID/mountinfo` and
+// returns a new *MountPoint.
+func NewMountPointFromLine(line string) (*MountPoint, error) {
+	fields := strings.Split(line, _mountInfoSep)
+
+	if len(fields) < _miFieldCountMin {
+		return nil, mountPointFormatInvalidError{line}
+	}
+
+	mountID, err := strconv.Atoi(fields[_miFieldIDMountID])
+	if err != nil {
+		return nil, err
+	}
+
+	parentID, err := strconv.Atoi(fields[_miFieldIDParentID])
+	if err != nil {
+		return nil, err
+	}
+
+	for i, field := range fields[_miFieldIDOptionalFields:] {
+		if field == _mountInfoOptionalFieldsSep {
+			// End of optional fields.
+			fsTypeStart := _miFieldIDOptionalFields + i + 1
+
+			// Now we know where the optional fields end, split the line again with a
+			// limit to avoid issues with spaces in super options as present on WSL.
+			fields = strings.SplitN(line, _mountInfoSep, fsTypeStart+_miFieldCountSecondHalf)
+			if len(fields) != fsTypeStart+_miFieldCountSecondHalf {
+				return nil, mountPointFormatInvalidError{line}
+			}
+
+			miFieldIDFSType := _miFieldOffsetFSType + fsTypeStart
+			miFieldIDMountSource := _miFieldOffsetMountSource + fsTypeStart
+			miFieldIDSuperOptions := _miFieldOffsetSuperOptions + fsTypeStart
+
+			return &MountPoint{
+				MountID:        mountID,
+				ParentID:       parentID,
+				DeviceID:       fields[_miFieldIDDeviceID],
+				Root:           fields[_miFieldIDRoot],
+				MountPoint:     fields[_miFieldIDMountPoint],
+				Options:        strings.Split(fields[_miFieldIDOptions], _mountInfoOptsSep),
+				OptionalFields: fields[_miFieldIDOptionalFields:(fsTypeStart - 1)],
+				FSType:         fields[miFieldIDFSType],
+				MountSource:    fields[miFieldIDMountSource],
+				SuperOptions:   strings.Split(fields[miFieldIDSuperOptions], _mountInfoOptsSep),
+			}, nil
+		}
+	}
+
+	return nil, mountPointFormatInvalidError{line}
+}
+
+// Translate converts an absolute path inside the *MountPoint's file system to
+// the host file system path in the mount namespace the *MountPoint belongs to.
+func (mp *MountPoint) Translate(absPath string) (string, error) {
+	relPath, err := filepath.Rel(mp.Root, absPath)
+
+	if err != nil {
+		return "", err
+	}
+	if relPath == ".." || strings.HasPrefix(relPath, "../") {
+		return "", pathNotExposedFromMountPointError{
+			mountPoint: mp.MountPoint,
+			root:       mp.Root,
+			path:       absPath,
+		}
+	}
+
+	return filepath.Join(mp.MountPoint, relPath), nil
+}
+
+// parseMountInfo parses procPathMountInfo (usually at `/proc/$PID/mountinfo`)
+// and yields parsed *MountPoint into newMountPoint.
+func parseMountInfo(procPathMountInfo string, newMountPoint func(*MountPoint) error) error {
+	mountInfoFile, err := os.Open(procPathMountInfo)
+	if err != nil {
+		return err
+	}
+	defer mountInfoFile.Close()
+
+	scanner := bufio.NewScanner(mountInfoFile)
+
+	for scanner.Scan() {
+		mountPoint, err := NewMountPointFromLine(scanner.Text())
+		if err != nil {
+			return err
+		}
+		if err := newMountPoint(mountPoint); err != nil {
+			return err
+		}
+	}
+
+	return scanner.Err()
+}
diff --git a/vendor/go.uber.org/automaxprocs/internal/cgroups/subsys.go b/vendor/go.uber.org/automaxprocs/internal/cgroups/subsys.go
new file mode 100644
index 0000000000000000000000000000000000000000..cddc3eaec39575ec5773b4aa9af23ae0979b0c34
--- /dev/null
+++ b/vendor/go.uber.org/automaxprocs/internal/cgroups/subsys.go
@@ -0,0 +1,103 @@
+// Copyright (c) 2017 Uber Technologies, Inc.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+//go:build linux
+// +build linux
+
+package cgroups
+
+import (
+	"bufio"
+	"os"
+	"strconv"
+	"strings"
+)
+
+const (
+	_cgroupSep       = ":"
+	_cgroupSubsysSep = ","
+)
+
+const (
+	_csFieldIDID = iota
+	_csFieldIDSubsystems
+	_csFieldIDName
+	_csFieldCount
+)
+
+// CGroupSubsys represents the data structure for entities in
+// `/proc/$PID/cgroup`. See also proc(5) for more information.
+type CGroupSubsys struct {
+	ID         int
+	Subsystems []string
+	Name       string
+}
+
+// NewCGroupSubsysFromLine returns a new *CGroupSubsys by parsing a string in
+// the format of `/proc/$PID/cgroup`
+func NewCGroupSubsysFromLine(line string) (*CGroupSubsys, error) {
+	fields := strings.SplitN(line, _cgroupSep, _csFieldCount)
+
+	if len(fields) != _csFieldCount {
+		return nil, cgroupSubsysFormatInvalidError{line}
+	}
+
+	id, err := strconv.Atoi(fields[_csFieldIDID])
+	if err != nil {
+		return nil, err
+	}
+
+	cgroup := &CGroupSubsys{
+		ID:         id,
+		Subsystems: strings.Split(fields[_csFieldIDSubsystems], _cgroupSubsysSep),
+		Name:       fields[_csFieldIDName],
+	}
+
+	return cgroup, nil
+}
+
+// parseCGroupSubsystems parses procPathCGroup (usually at `/proc/$PID/cgroup`)
+// and returns a new map[string]*CGroupSubsys.
+func parseCGroupSubsystems(procPathCGroup string) (map[string]*CGroupSubsys, error) {
+	cgroupFile, err := os.Open(procPathCGroup)
+	if err != nil {
+		return nil, err
+	}
+	defer cgroupFile.Close()
+
+	scanner := bufio.NewScanner(cgroupFile)
+	subsystems := make(map[string]*CGroupSubsys)
+
+	for scanner.Scan() {
+		cgroup, err := NewCGroupSubsysFromLine(scanner.Text())
+		if err != nil {
+			return nil, err
+		}
+		for _, subsys := range cgroup.Subsystems {
+			subsystems[subsys] = cgroup
+		}
+	}
+
+	if err := scanner.Err(); err != nil {
+		return nil, err
+	}
+
+	return subsystems, nil
+}
diff --git a/vendor/go.uber.org/automaxprocs/internal/runtime/cpu_quota_linux.go b/vendor/go.uber.org/automaxprocs/internal/runtime/cpu_quota_linux.go
new file mode 100644
index 0000000000000000000000000000000000000000..3b974754c3ee1d6fd36988e66295d154b6c79efd
--- /dev/null
+++ b/vendor/go.uber.org/automaxprocs/internal/runtime/cpu_quota_linux.go
@@ -0,0 +1,71 @@
+// Copyright (c) 2017 Uber Technologies, Inc.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+//go:build linux
+// +build linux
+
+package runtime
+
+import (
+	"errors"
+	"math"
+
+	cg "go.uber.org/automaxprocs/internal/cgroups"
+)
+
+// CPUQuotaToGOMAXPROCS converts the CPU quota applied to the calling process
+// to a valid GOMAXPROCS value.
+func CPUQuotaToGOMAXPROCS(minValue int) (int, CPUQuotaStatus, error) {
+	cgroups, err := newQueryer()
+	if err != nil {
+		return -1, CPUQuotaUndefined, err
+	}
+
+	quota, defined, err := cgroups.CPUQuota()
+	if !defined || err != nil {
+		return -1, CPUQuotaUndefined, err
+	}
+
+	maxProcs := int(math.Floor(quota))
+	if minValue > 0 && maxProcs < minValue {
+		return minValue, CPUQuotaMinUsed, nil
+	}
+	return maxProcs, CPUQuotaUsed, nil
+}
+
+type queryer interface {
+	CPUQuota() (float64, bool, error)
+}
+
+var (
+	_newCgroups2 = cg.NewCGroups2ForCurrentProcess
+	_newCgroups  = cg.NewCGroupsForCurrentProcess
+)
+
+func newQueryer() (queryer, error) {
+	cgroups, err := _newCgroups2()
+	if err == nil {
+		return cgroups, nil
+	}
+	if errors.Is(err, cg.ErrNotV2) {
+		return _newCgroups()
+	}
+	return nil, err
+}
diff --git a/vendor/go.uber.org/automaxprocs/internal/runtime/cpu_quota_unsupported.go b/vendor/go.uber.org/automaxprocs/internal/runtime/cpu_quota_unsupported.go
new file mode 100644
index 0000000000000000000000000000000000000000..6922554484e7b8dff6d597a6a3443f7152cfcd28
--- /dev/null
+++ b/vendor/go.uber.org/automaxprocs/internal/runtime/cpu_quota_unsupported.go
@@ -0,0 +1,31 @@
+// Copyright (c) 2017 Uber Technologies, Inc.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+//go:build !linux
+// +build !linux
+
+package runtime
+
+// CPUQuotaToGOMAXPROCS converts the CPU quota applied to the calling process
+// to a valid GOMAXPROCS value. This is Linux-specific and not supported in the
+// current OS.
+func CPUQuotaToGOMAXPROCS(_ int) (int, CPUQuotaStatus, error) {
+	return -1, CPUQuotaUndefined, nil
+}
diff --git a/vendor/go.uber.org/automaxprocs/internal/runtime/runtime.go b/vendor/go.uber.org/automaxprocs/internal/runtime/runtime.go
new file mode 100644
index 0000000000000000000000000000000000000000..df6eacf0530c5d7ff1e01f719410e6f167bd55a8
--- /dev/null
+++ b/vendor/go.uber.org/automaxprocs/internal/runtime/runtime.go
@@ -0,0 +1,33 @@
+// Copyright (c) 2017 Uber Technologies, Inc.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+package runtime
+
+// CPUQuotaStatus presents the status of how CPU quota is used
+type CPUQuotaStatus int
+
+const (
+	// CPUQuotaUndefined is returned when CPU quota is undefined
+	CPUQuotaUndefined CPUQuotaStatus = iota
+	// CPUQuotaUsed is returned when a valid CPU quota can be used
+	CPUQuotaUsed
+	// CPUQuotaMinUsed is returned when CPU quota is smaller than the min value
+	CPUQuotaMinUsed
+)
diff --git a/vendor/go.uber.org/automaxprocs/maxprocs/maxprocs.go b/vendor/go.uber.org/automaxprocs/maxprocs/maxprocs.go
new file mode 100644
index 0000000000000000000000000000000000000000..98176d6457529eabc2f7e6489437229ac101429a
--- /dev/null
+++ b/vendor/go.uber.org/automaxprocs/maxprocs/maxprocs.go
@@ -0,0 +1,130 @@
+// Copyright (c) 2017 Uber Technologies, Inc.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+// Package maxprocs lets Go programs easily configure runtime.GOMAXPROCS to
+// match the configured Linux CPU quota. Unlike the top-level automaxprocs
+// package, it lets the caller configure logging and handle errors.
+package maxprocs // import "go.uber.org/automaxprocs/maxprocs"
+
+import (
+	"os"
+	"runtime"
+
+	iruntime "go.uber.org/automaxprocs/internal/runtime"
+)
+
+const _maxProcsKey = "GOMAXPROCS"
+
+func currentMaxProcs() int {
+	return runtime.GOMAXPROCS(0)
+}
+
+type config struct {
+	printf        func(string, ...interface{})
+	procs         func(int) (int, iruntime.CPUQuotaStatus, error)
+	minGOMAXPROCS int
+}
+
+func (c *config) log(fmt string, args ...interface{}) {
+	if c.printf != nil {
+		c.printf(fmt, args...)
+	}
+}
+
+// An Option alters the behavior of Set.
+type Option interface {
+	apply(*config)
+}
+
+// Logger uses the supplied printf implementation for log output. By default,
+// Set doesn't log anything.
+func Logger(printf func(string, ...interface{})) Option {
+	return optionFunc(func(cfg *config) {
+		cfg.printf = printf
+	})
+}
+
+// Min sets the minimum GOMAXPROCS value that will be used.
+// Any value below 1 is ignored.
+func Min(n int) Option {
+	return optionFunc(func(cfg *config) {
+		if n >= 1 {
+			cfg.minGOMAXPROCS = n
+		}
+	})
+}
+
+type optionFunc func(*config)
+
+func (of optionFunc) apply(cfg *config) { of(cfg) }
+
+// Set GOMAXPROCS to match the Linux container CPU quota (if any), returning
+// any error encountered and an undo function.
+//
+// Set is a no-op on non-Linux systems and in Linux environments without a
+// configured CPU quota.
+func Set(opts ...Option) (func(), error) {
+	cfg := &config{
+		procs:         iruntime.CPUQuotaToGOMAXPROCS,
+		minGOMAXPROCS: 1,
+	}
+	for _, o := range opts {
+		o.apply(cfg)
+	}
+
+	undoNoop := func() {
+		cfg.log("maxprocs: No GOMAXPROCS change to reset")
+	}
+
+	// Honor the GOMAXPROCS environment variable if present. Otherwise, amend
+	// `runtime.GOMAXPROCS()` with the current process' CPU quota if the OS is
+	// Linux, and guarantee a minimum value of 1. The minimum guaranteed value
+	// can be overridden using `maxprocs.Min()`.
+	if max, exists := os.LookupEnv(_maxProcsKey); exists {
+		cfg.log("maxprocs: Honoring GOMAXPROCS=%q as set in environment", max)
+		return undoNoop, nil
+	}
+
+	maxProcs, status, err := cfg.procs(cfg.minGOMAXPROCS)
+	if err != nil {
+		return undoNoop, err
+	}
+
+	if status == iruntime.CPUQuotaUndefined {
+		cfg.log("maxprocs: Leaving GOMAXPROCS=%v: CPU quota undefined", currentMaxProcs())
+		return undoNoop, nil
+	}
+
+	prev := currentMaxProcs()
+	undo := func() {
+		cfg.log("maxprocs: Resetting GOMAXPROCS to %v", prev)
+		runtime.GOMAXPROCS(prev)
+	}
+
+	switch status {
+	case iruntime.CPUQuotaMinUsed:
+		cfg.log("maxprocs: Updating GOMAXPROCS=%v: using minimum allowed GOMAXPROCS", maxProcs)
+	case iruntime.CPUQuotaUsed:
+		cfg.log("maxprocs: Updating GOMAXPROCS=%v: determined from CPU quota", maxProcs)
+	}
+
+	runtime.GOMAXPROCS(maxProcs)
+	return undo, nil
+}
diff --git a/vendor/go.uber.org/automaxprocs/maxprocs/version.go b/vendor/go.uber.org/automaxprocs/maxprocs/version.go
new file mode 100644
index 0000000000000000000000000000000000000000..108a95535e5e1f92bf4db61ae67a58c6c58190bd
--- /dev/null
+++ b/vendor/go.uber.org/automaxprocs/maxprocs/version.go
@@ -0,0 +1,24 @@
+// Copyright (c) 2017 Uber Technologies, Inc.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+package maxprocs
+
+// Version is the current package version.
+const Version = "1.5.2"
diff --git a/vendor/golang.org/x/exp/slog/handler.go b/vendor/golang.org/x/exp/slog/handler.go
index 74f88738c9a8e9d5cf4eb52c14fcdd87f4662d64..bd635cb8180707814ba33ca8a87ec22ed7329de5 100644
--- a/vendor/golang.org/x/exp/slog/handler.go
+++ b/vendor/golang.org/x/exp/slog/handler.go
@@ -8,6 +8,7 @@ import (
 	"context"
 	"fmt"
 	"io"
+	"reflect"
 	"strconv"
 	"sync"
 	"time"
@@ -504,6 +505,23 @@ func (s *handleState) appendString(str string) {
 }
 
 func (s *handleState) appendValue(v Value) {
+	defer func() {
+		if r := recover(); r != nil {
+			// If it panics with a nil pointer, the most likely cases are
+			// an encoding.TextMarshaler or error fails to guard against nil,
+			// in which case "<nil>" seems to be the feasible choice.
+			//
+			// Adapted from the code in fmt/print.go.
+			if v := reflect.ValueOf(v.any); v.Kind() == reflect.Pointer && v.IsNil() {
+				s.appendString("<nil>")
+				return
+			}
+
+			// Otherwise just print the original panic message.
+			s.appendString(fmt.Sprintf("!PANIC: %v", r))
+		}
+	}()
+
 	var err error
 	if s.h.json {
 		err = appendJSONValue(s, v)
diff --git a/vendor/golang.org/x/exp/typeparams/typeparams_go117.go b/vendor/golang.org/x/exp/typeparams/typeparams_go117.go
index efc33f10f3ccd6a14a1e4b11bcfacd9e97ec822c..c1da793168205971eefb7cbd6af3f4b5f473ba50 100644
--- a/vendor/golang.org/x/exp/typeparams/typeparams_go117.go
+++ b/vendor/golang.org/x/exp/typeparams/typeparams_go117.go
@@ -3,7 +3,6 @@
 // license that can be found in the LICENSE file.
 
 //go:build !go1.18
-// +build !go1.18
 
 package typeparams
 
diff --git a/vendor/golang.org/x/exp/typeparams/typeparams_go118.go b/vendor/golang.org/x/exp/typeparams/typeparams_go118.go
index 1176104b20381f542608fbbb89f3f337213f3767..0b35449d15346e026f84c6aa2710173c081fe010 100644
--- a/vendor/golang.org/x/exp/typeparams/typeparams_go118.go
+++ b/vendor/golang.org/x/exp/typeparams/typeparams_go118.go
@@ -3,7 +3,6 @@
 // license that can be found in the LICENSE file.
 
 //go:build go1.18
-// +build go1.18
 
 package typeparams
 
diff --git a/vendor/golang.org/x/mod/modfile/read.go b/vendor/golang.org/x/mod/modfile/read.go
index 5b5bb5e115b37d037dbe609ae4e30510c7939d98..2205682591587f10120227ed91d6c0d90d36816e 100644
--- a/vendor/golang.org/x/mod/modfile/read.go
+++ b/vendor/golang.org/x/mod/modfile/read.go
@@ -225,7 +225,7 @@ func (x *FileSyntax) Cleanup() {
 			if ww == 0 {
 				continue
 			}
-			if ww == 1 {
+			if ww == 1 && len(stmt.RParen.Comments.Before) == 0 {
 				// Collapse block into single line.
 				line := &Line{
 					Comments: Comments{
diff --git a/vendor/golang.org/x/mod/modfile/rule.go b/vendor/golang.org/x/mod/modfile/rule.go
index 26acaa5f7c4174b29cd1b310a41c9929c314736c..0e7b7e26792b24e70e7fed28b9d9c6667efbded1 100644
--- a/vendor/golang.org/x/mod/modfile/rule.go
+++ b/vendor/golang.org/x/mod/modfile/rule.go
@@ -975,6 +975,8 @@ func (f *File) AddGoStmt(version string) error {
 		var hint Expr
 		if f.Module != nil && f.Module.Syntax != nil {
 			hint = f.Module.Syntax
+		} else if f.Syntax == nil {
+			f.Syntax = new(FileSyntax)
 		}
 		f.Go = &Go{
 			Version: version,
diff --git a/vendor/golang.org/x/sync/semaphore/semaphore.go b/vendor/golang.org/x/sync/semaphore/semaphore.go
index 30f632c577bd1aa41489c813ed1cd580b6f52d45..b618162aab648f61fedf72757e0e46cca3af2c9a 100644
--- a/vendor/golang.org/x/sync/semaphore/semaphore.go
+++ b/vendor/golang.org/x/sync/semaphore/semaphore.go
@@ -35,11 +35,25 @@ type Weighted struct {
 // Acquire acquires the semaphore with a weight of n, blocking until resources
 // are available or ctx is done. On success, returns nil. On failure, returns
 // ctx.Err() and leaves the semaphore unchanged.
-//
-// If ctx is already done, Acquire may still succeed without blocking.
 func (s *Weighted) Acquire(ctx context.Context, n int64) error {
+	done := ctx.Done()
+
 	s.mu.Lock()
+	select {
+	case <-done:
+		// ctx becoming done has "happened before" acquiring the semaphore,
+		// whether it became done before the call began or while we were
+		// waiting for the mutex. We prefer to fail even if we could acquire
+		// the mutex without blocking.
+		s.mu.Unlock()
+		return ctx.Err()
+	default:
+	}
 	if s.size-s.cur >= n && s.waiters.Len() == 0 {
+		// Since we hold s.mu and haven't synchronized since checking done, if
+		// ctx becomes done before we return here, it becoming done must have
+		// "happened concurrently" with this call - it cannot "happen before"
+		// we return in this branch. So, we're ok to always acquire here.
 		s.cur += n
 		s.mu.Unlock()
 		return nil
@@ -48,7 +62,7 @@ func (s *Weighted) Acquire(ctx context.Context, n int64) error {
 	if n > s.size {
 		// Don't make other Acquire calls block on one that's doomed to fail.
 		s.mu.Unlock()
-		<-ctx.Done()
+		<-done
 		return ctx.Err()
 	}
 
@@ -58,14 +72,14 @@ func (s *Weighted) Acquire(ctx context.Context, n int64) error {
 	s.mu.Unlock()
 
 	select {
-	case <-ctx.Done():
-		err := ctx.Err()
+	case <-done:
 		s.mu.Lock()
 		select {
 		case <-ready:
-			// Acquired the semaphore after we were canceled.  Rather than trying to
-			// fix up the queue, just pretend we didn't notice the cancelation.
-			err = nil
+			// Acquired the semaphore after we were canceled.
+			// Pretend we didn't and put the tokens back.
+			s.cur -= n
+			s.notifyWaiters()
 		default:
 			isFront := s.waiters.Front() == elem
 			s.waiters.Remove(elem)
@@ -75,9 +89,19 @@ func (s *Weighted) Acquire(ctx context.Context, n int64) error {
 			}
 		}
 		s.mu.Unlock()
-		return err
+		return ctx.Err()
 
 	case <-ready:
+		// Acquired the semaphore. Check that ctx isn't already done.
+		// We check the done channel instead of calling ctx.Err because we
+		// already have the channel, and ctx.Err is O(n) with the nesting
+		// depth of ctx.
+		select {
+		case <-done:
+			s.Release(n)
+			return ctx.Err()
+		default:
+		}
 		return nil
 	}
 }
diff --git a/vendor/golang.org/x/tools/go/analysis/analysis.go b/vendor/golang.org/x/tools/go/analysis/analysis.go
index 5da33c7e6e13e7cfc25290459c68b661332cfc4d..521177365744343f5987d6a18781021d939473b2 100644
--- a/vendor/golang.org/x/tools/go/analysis/analysis.go
+++ b/vendor/golang.org/x/tools/go/analysis/analysis.go
@@ -112,6 +112,19 @@ type Pass struct {
 	// analysis's ResultType.
 	ResultOf map[*Analyzer]interface{}
 
+	// ReadFile returns the contents of the named file.
+	//
+	// The only valid file names are the elements of OtherFiles
+	// and IgnoredFiles, and names returned by
+	// Fset.File(f.FileStart).Name() for each f in Files.
+	//
+	// Analyzers must use this function (if provided) instead of
+	// accessing the file system directly. This allows a driver to
+	// provide a virtualized file tree (including, for example,
+	// unsaved editor buffers) and to track dependencies precisely
+	// to avoid unnecessary recomputation.
+	ReadFile func(filename string) ([]byte, error)
+
 	// -- facts --
 
 	// ImportObjectFact retrieves a fact associated with obj.
diff --git a/vendor/golang.org/x/tools/go/analysis/doc.go b/vendor/golang.org/x/tools/go/analysis/doc.go
index 44867d599e4a38f4af0e0e2083bcaa76a9a97a23..2a0aa5771267aef54f9b6f913b6250c7a0f06dfc 100644
--- a/vendor/golang.org/x/tools/go/analysis/doc.go
+++ b/vendor/golang.org/x/tools/go/analysis/doc.go
@@ -32,7 +32,7 @@ bases, and so on.
 
 # Analyzer
 
-The primary type in the API is Analyzer. An Analyzer statically
+The primary type in the API is [Analyzer]. An Analyzer statically
 describes an analysis function: its name, documentation, flags,
 relationship to other analyzers, and of course, its logic.
 
@@ -72,7 +72,7 @@ help that describes the analyses it performs.
 The doc comment contains a brief one-line summary,
 optionally followed by paragraphs of explanation.
 
-The Analyzer type has more fields besides those shown above:
+The [Analyzer] type has more fields besides those shown above:
 
 	type Analyzer struct {
 		Name             string
@@ -114,7 +114,7 @@ instance of the Pass type.
 
 # Pass
 
-A Pass describes a single unit of work: the application of a particular
+A [Pass] describes a single unit of work: the application of a particular
 Analyzer to a particular package of Go code.
 The Pass provides information to the Analyzer's Run function about the
 package being analyzed, and provides operations to the Run function for
@@ -135,16 +135,14 @@ reporting diagnostics and other information back to the driver.
 The Fset, Files, Pkg, and TypesInfo fields provide the syntax trees,
 type information, and source positions for a single package of Go code.
 
-The OtherFiles field provides the names, but not the contents, of non-Go
-files such as assembly that are part of this package. See the "asmdecl"
-or "buildtags" analyzers for examples of loading non-Go files and reporting
-diagnostics against them.
-
-The IgnoredFiles field provides the names, but not the contents,
-of ignored Go and non-Go source files that are not part of this package
-with the current build configuration but may be part of other build
-configurations. See the "buildtags" analyzer for an example of loading
-and checking IgnoredFiles.
+The OtherFiles field provides the names of non-Go
+files such as assembly that are part of this package.
+Similarly, the IgnoredFiles field provides the names of Go and non-Go
+source files that are not part of this package with the current build
+configuration but may be part of other build configurations.
+The contents of these files may be read using Pass.ReadFile;
+see the "asmdecl" or "buildtags" analyzers for examples of loading
+non-Go files and reporting diagnostics against them.
 
 The ResultOf field provides the results computed by the analyzers
 required by this one, as expressed in its Analyzer.Requires field. The
@@ -177,7 +175,7 @@ Diagnostic is defined as:
 The optional Category field is a short identifier that classifies the
 kind of message when an analysis produces several kinds of diagnostic.
 
-The Diagnostic struct does not have a field to indicate its severity
+The [Diagnostic] struct does not have a field to indicate its severity
 because opinions about the relative importance of Analyzers and their
 diagnostics vary widely among users. The design of this framework does
 not hold each Analyzer responsible for identifying the severity of its
@@ -191,7 +189,7 @@ and buildtag, inspect the raw text of Go source files or even non-Go
 files such as assembly. To report a diagnostic against a line of a
 raw text file, use the following sequence:
 
-	content, err := os.ReadFile(filename)
+	content, err := pass.ReadFile(filename)
 	if err != nil { ... }
 	tf := fset.AddFile(filename, -1, len(content))
 	tf.SetLinesForContent(content)
@@ -216,7 +214,7 @@ addition, it records which functions are printf wrappers for use by
 later analysis passes to identify other printf wrappers by induction.
 A result such as “f is a printf wrapper” that is not interesting by
 itself but serves as a stepping stone to an interesting result (such as
-a diagnostic) is called a "fact".
+a diagnostic) is called a [Fact].
 
 The analysis API allows an analysis to define new types of facts, to
 associate facts of these types with objects (named entities) declared
diff --git a/vendor/golang.org/x/tools/go/analysis/passes/appends/appends.go b/vendor/golang.org/x/tools/go/analysis/passes/appends/appends.go
new file mode 100644
index 0000000000000000000000000000000000000000..6976f0d909025bad9ff06ccd326a9e2716f367e3
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/analysis/passes/appends/appends.go
@@ -0,0 +1,47 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package appends defines an Analyzer that detects
+// if there is only one variable in append.
+package appends
+
+import (
+	_ "embed"
+	"go/ast"
+	"go/types"
+
+	"golang.org/x/tools/go/analysis"
+	"golang.org/x/tools/go/analysis/passes/inspect"
+	"golang.org/x/tools/go/analysis/passes/internal/analysisutil"
+	"golang.org/x/tools/go/ast/inspector"
+	"golang.org/x/tools/go/types/typeutil"
+)
+
+//go:embed doc.go
+var doc string
+
+var Analyzer = &analysis.Analyzer{
+	Name:     "appends",
+	Doc:      analysisutil.MustExtractDoc(doc, "appends"),
+	URL:      "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/appends",
+	Requires: []*analysis.Analyzer{inspect.Analyzer},
+	Run:      run,
+}
+
+func run(pass *analysis.Pass) (interface{}, error) {
+	inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+
+	nodeFilter := []ast.Node{
+		(*ast.CallExpr)(nil),
+	}
+	inspect.Preorder(nodeFilter, func(n ast.Node) {
+		call := n.(*ast.CallExpr)
+		b, ok := typeutil.Callee(pass.TypesInfo, call).(*types.Builtin)
+		if ok && b.Name() == "append" && len(call.Args) == 1 {
+			pass.ReportRangef(call, "append with no values")
+		}
+	})
+
+	return nil, nil
+}
diff --git a/vendor/golang.org/x/tools/go/analysis/passes/appends/doc.go b/vendor/golang.org/x/tools/go/analysis/passes/appends/doc.go
new file mode 100644
index 0000000000000000000000000000000000000000..2e6a2e010bae65189b9adf5b1414b9449f6ef167
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/analysis/passes/appends/doc.go
@@ -0,0 +1,20 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package appends defines an Analyzer that detects
+// if there is only one variable in append.
+//
+// # Analyzer appends
+//
+// appends: check for missing values after append
+//
+// This checker reports calls to append that pass
+// no values to be appended to the slice.
+//
+//	s := []string{"a", "b", "c"}
+//	_ = append(s)
+//
+// Such calls are always no-ops and often indicate an
+// underlying mistake.
+package appends
diff --git a/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go b/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go
index f2ca95aa9ebe89ffeb9295b362cef3830d837348..3417232ce35f43bd8fc3d32662817ebf29ab1ff7 100644
--- a/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go
+++ b/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go
@@ -173,7 +173,7 @@ func run(pass *analysis.Pass) (interface{}, error) {
 
 Files:
 	for _, fname := range sfiles {
-		content, tf, err := analysisutil.ReadFile(pass.Fset, fname)
+		content, tf, err := analysisutil.ReadFile(pass, fname)
 		if err != nil {
 			return nil, err
 		}
diff --git a/vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag.go b/vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag.go
index 55bdad78b76a2c398324e805edee541ea20a3a5c..51ba2a91e5b94d2cbf570296ffd0139cd7416fbb 100644
--- a/vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag.go
+++ b/vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag.go
@@ -89,7 +89,7 @@ func checkOtherFile(pass *analysis.Pass, filename string) error {
 
 	// We cannot use the Go parser, since this may not be a Go source file.
 	// Read the raw bytes instead.
-	content, tf, err := analysisutil.ReadFile(pass.Fset, filename)
+	content, tf, err := analysisutil.ReadFile(pass, filename)
 	if err != nil {
 		return err
 	}
diff --git a/vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag_old.go b/vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag_old.go
index 0001ba536397806ca6d66dd3789dea1b75604366..19ef6b9bce401d118ea2f0a62e8d8f8b72797add 100644
--- a/vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag_old.go
+++ b/vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag_old.go
@@ -83,7 +83,7 @@ func checkGoFile(pass *analysis.Pass, f *ast.File) {
 }
 
 func checkOtherFile(pass *analysis.Pass, filename string) error {
-	content, tf, err := analysisutil.ReadFile(pass.Fset, filename)
+	content, tf, err := analysisutil.ReadFile(pass, filename)
 	if err != nil {
 		return err
 	}
diff --git a/vendor/golang.org/x/tools/go/analysis/passes/composite/composite.go b/vendor/golang.org/x/tools/go/analysis/passes/composite/composite.go
index 6b126f897d89f27fcb7027f7d598613fef92ce58..8cc6c4a058b53dea2e66a68f4e9504eb52935df7 100644
--- a/vendor/golang.org/x/tools/go/analysis/passes/composite/composite.go
+++ b/vendor/golang.org/x/tools/go/analysis/passes/composite/composite.go
@@ -84,10 +84,9 @@ func run(pass *analysis.Pass) (interface{}, error) {
 		default:
 			structuralTypes = append(structuralTypes, typ)
 		}
+
 		for _, typ := range structuralTypes {
-			// TODO(adonovan): this operation is questionable.
-			under := aliases.Unalias(deref(typ.Underlying()))
-			strct, ok := under.(*types.Struct)
+			strct, ok := typeparams.Deref(typ).Underlying().(*types.Struct)
 			if !ok {
 				// skip non-struct composite literals
 				continue
@@ -144,19 +143,6 @@ func run(pass *analysis.Pass) (interface{}, error) {
 	return nil, nil
 }
 
-// Note: this is not the usual deref operator!
-// It strips off all Pointer constructors (and their Aliases).
-func deref(typ types.Type) types.Type {
-	for {
-		ptr, ok := aliases.Unalias(typ).(*types.Pointer)
-		if !ok {
-			break
-		}
-		typ = ptr.Elem().Underlying()
-	}
-	return typ
-}
-
 // isLocalType reports whether typ belongs to the same package as pass.
 // TODO(adonovan): local means "internal to a function"; rename to isSamePackageType.
 func isLocalType(pass *analysis.Pass, typ types.Type) bool {
diff --git a/vendor/golang.org/x/tools/go/analysis/passes/directive/directive.go b/vendor/golang.org/x/tools/go/analysis/passes/directive/directive.go
index 2691f189aaec456a4e71fd670431ca998af4a005..f6727c5ada05da764fda793522311d5dcb1019ff 100644
--- a/vendor/golang.org/x/tools/go/analysis/passes/directive/directive.go
+++ b/vendor/golang.org/x/tools/go/analysis/passes/directive/directive.go
@@ -90,7 +90,7 @@ func checkGoFile(pass *analysis.Pass, f *ast.File) {
 func checkOtherFile(pass *analysis.Pass, filename string) error {
 	// We cannot use the Go parser, since is not a Go source file.
 	// Read the raw bytes instead.
-	content, tf, err := analysisutil.ReadFile(pass.Fset, filename)
+	content, tf, err := analysisutil.ReadFile(pass, filename)
 	if err != nil {
 		return err
 	}
diff --git a/vendor/golang.org/x/tools/go/analysis/passes/framepointer/framepointer.go b/vendor/golang.org/x/tools/go/analysis/passes/framepointer/framepointer.go
index 0b3ded47eaf3e309e7f83626ffc7506f4411ebb8..6eff3a20feae11090a1781200f3c9b9918a751b7 100644
--- a/vendor/golang.org/x/tools/go/analysis/passes/framepointer/framepointer.go
+++ b/vendor/golang.org/x/tools/go/analysis/passes/framepointer/framepointer.go
@@ -48,7 +48,7 @@ func run(pass *analysis.Pass) (interface{}, error) {
 	}
 
 	for _, fname := range sfiles {
-		content, tf, err := analysisutil.ReadFile(pass.Fset, fname)
+		content, tf, err := analysisutil.ReadFile(pass, fname)
 		if err != nil {
 			return nil, err
 		}
diff --git a/vendor/golang.org/x/tools/go/analysis/passes/httpresponse/httpresponse.go b/vendor/golang.org/x/tools/go/analysis/passes/httpresponse/httpresponse.go
index 047ae07cca129ec684a948770a0a470553e5ef68..e1ca9b2f514cb3879b93c812f33040dd94d20b14 100644
--- a/vendor/golang.org/x/tools/go/analysis/passes/httpresponse/httpresponse.go
+++ b/vendor/golang.org/x/tools/go/analysis/passes/httpresponse/httpresponse.go
@@ -119,7 +119,7 @@ func isHTTPFuncOrMethodOnClient(info *types.Info, expr *ast.CallExpr) bool {
 		return false // the function called does not return two values.
 	}
 	isPtr, named := typesinternal.ReceiverNamed(res.At(0))
-	if !isPtr || !analysisutil.IsNamedType(named, "net/http", "Response") {
+	if !isPtr || named == nil || !analysisutil.IsNamedType(named, "net/http", "Response") {
 		return false // the first return type is not *http.Response.
 	}
 
diff --git a/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/ifaceassert.go b/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/ifaceassert.go
index cd4a477626d612bacfd8bbb897451b7c1c08bfcb..5f07ed3ffde9f44fe0d1c7454dce6f37e7b3f135 100644
--- a/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/ifaceassert.go
+++ b/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/ifaceassert.go
@@ -13,6 +13,7 @@ import (
 	"golang.org/x/tools/go/analysis/passes/inspect"
 	"golang.org/x/tools/go/analysis/passes/internal/analysisutil"
 	"golang.org/x/tools/go/ast/inspector"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 //go:embed doc.go
@@ -28,7 +29,7 @@ var Analyzer = &analysis.Analyzer{
 
 // assertableTo checks whether interface v can be asserted into t. It returns
 // nil on success, or the first conflicting method on failure.
-func assertableTo(v, t types.Type) *types.Func {
+func assertableTo(free *typeparams.Free, v, t types.Type) *types.Func {
 	if t == nil || v == nil {
 		// not assertable to, but there is no missing method
 		return nil
@@ -42,7 +43,7 @@ func assertableTo(v, t types.Type) *types.Func {
 
 	// Mitigations for interface comparisons and generics.
 	// TODO(https://github.com/golang/go/issues/50658): Support more precise conclusion.
-	if isParameterized(V) || isParameterized(T) {
+	if free.Has(V) || free.Has(T) {
 		return nil
 	}
 	if f, wrongType := types.MissingMethod(V, T, false); wrongType {
@@ -57,6 +58,7 @@ func run(pass *analysis.Pass) (interface{}, error) {
 		(*ast.TypeAssertExpr)(nil),
 		(*ast.TypeSwitchStmt)(nil),
 	}
+	var free typeparams.Free
 	inspect.Preorder(nodeFilter, func(n ast.Node) {
 		var (
 			assert  *ast.TypeAssertExpr // v.(T) expression
@@ -86,7 +88,7 @@ func run(pass *analysis.Pass) (interface{}, error) {
 		V := pass.TypesInfo.TypeOf(assert.X)
 		for _, target := range targets {
 			T := pass.TypesInfo.TypeOf(target)
-			if f := assertableTo(V, T); f != nil {
+			if f := assertableTo(&free, V, T); f != nil {
 				pass.Reportf(
 					target.Pos(),
 					"impossible type assertion: no type can implement both %v and %v (conflicting types for %v method)",
diff --git a/vendor/golang.org/x/tools/go/analysis/passes/internal/analysisutil/util.go b/vendor/golang.org/x/tools/go/analysis/passes/internal/analysisutil/util.go
index 89291602a5ba86038becc70c424cb8bbb2d1697d..f7f071dc8be98f730476f0b08b415e1ba095f4bd 100644
--- a/vendor/golang.org/x/tools/go/analysis/passes/internal/analysisutil/util.go
+++ b/vendor/golang.org/x/tools/go/analysis/passes/internal/analysisutil/util.go
@@ -14,6 +14,7 @@ import (
 	"go/types"
 	"os"
 
+	"golang.org/x/tools/go/analysis"
 	"golang.org/x/tools/internal/aliases"
 	"golang.org/x/tools/internal/analysisinternal"
 )
@@ -60,12 +61,16 @@ func HasSideEffects(info *types.Info, e ast.Expr) bool {
 
 // ReadFile reads a file and adds it to the FileSet
 // so that we can report errors against it using lineStart.
-func ReadFile(fset *token.FileSet, filename string) ([]byte, *token.File, error) {
-	content, err := os.ReadFile(filename)
+func ReadFile(pass *analysis.Pass, filename string) ([]byte, *token.File, error) {
+	readFile := pass.ReadFile
+	if readFile == nil {
+		readFile = os.ReadFile
+	}
+	content, err := readFile(filename)
 	if err != nil {
 		return nil, nil, err
 	}
-	tf := fset.AddFile(filename, -1, len(content))
+	tf := pass.Fset.AddFile(filename, -1, len(content))
 	tf.SetLinesForContent(content)
 	return content, tf, nil
 }
diff --git a/vendor/golang.org/x/tools/go/analysis/passes/nilness/doc.go b/vendor/golang.org/x/tools/go/analysis/passes/nilness/doc.go
index 212263741d8b9e58ce9f4ceb75936c1aa8218d0f..e27863e83377ad7d6af4f90f2849d31db9ed0a9d 100644
--- a/vendor/golang.org/x/tools/go/analysis/passes/nilness/doc.go
+++ b/vendor/golang.org/x/tools/go/analysis/passes/nilness/doc.go
@@ -42,4 +42,31 @@
 //	if p == nil {
 //		panic(p)
 //	}
+//
+// Sometimes the control flow may be quite complex, making bugs hard
+// to spot. In the example below, the err.Error expression is
+// guaranteed to panic because, after the first return, err must be
+// nil. The intervening loop is just a distraction.
+//
+//	...
+//	err := g.Wait()
+//	if err != nil {
+//		return err
+//	}
+//	partialSuccess := false
+//	for _, err := range errs {
+//		if err == nil {
+//			partialSuccess = true
+//			break
+//		}
+//	}
+//	if partialSuccess {
+//		reportStatus(StatusMessage{
+//			Code:   code.ERROR,
+//			Detail: err.Error(), // "nil dereference in dynamic method call"
+//		})
+//		return nil
+//	}
+//
+// ...
 package nilness
diff --git a/vendor/golang.org/x/tools/go/analysis/passes/printf/doc.go b/vendor/golang.org/x/tools/go/analysis/passes/printf/doc.go
index 1ee16126ade3ad706d77370774863f9080a45d44..85da8346f752eb48c1769c9f236472f0860fe193 100644
--- a/vendor/golang.org/x/tools/go/analysis/passes/printf/doc.go
+++ b/vendor/golang.org/x/tools/go/analysis/passes/printf/doc.go
@@ -11,14 +11,53 @@
 //
 // The check applies to calls of the formatting functions such as
 // [fmt.Printf] and [fmt.Sprintf], as well as any detected wrappers of
-// those functions.
+// those functions such as [log.Printf]. It reports a variety of
+// mistakes such as syntax errors in the format string and mismatches
+// (of number and type) between the verbs and their arguments.
 //
-// In this example, the %d format operator requires an integer operand:
+// See the documentation of the fmt package for the complete set of
+// format operators and their operand types.
+//
+// # Examples
+//
+// The %d format operator requires an integer operand.
+// Here it is incorrectly applied to a string:
 //
 //	fmt.Printf("%d", "hello") // fmt.Printf format %d has arg "hello" of wrong type string
 //
-// See the documentation of the fmt package for the complete set of
-// format operators and their operand types.
+// A call to Printf must have as many operands as there are "verbs" in
+// the format string, not too few:
+//
+//	fmt.Printf("%d") // fmt.Printf format reads arg 1, but call has 0 args
+//
+// nor too many:
+//
+//	fmt.Printf("%d", 1, 2) // fmt.Printf call needs 1 arg, but has 2 args
+//
+// Explicit argument indexes must be no greater than the number of
+// arguments:
+//
+//	fmt.Printf("%[3]d", 1, 2) // fmt.Printf call has invalid argument index 3
+//
+// The checker also uses a heuristic to report calls to Print-like
+// functions that appear to have been intended for their Printf-like
+// counterpart:
+//
+//	log.Print("%d", 123) // log.Print call has possible formatting directive %d
+//
+// # Inferred printf wrappers
+//
+// Functions that delegate their arguments to fmt.Printf are
+// considered "printf wrappers"; calls to them are subject to the same
+// checking. In this example, logf is a printf wrapper:
+//
+//	func logf(level int, format string, args ...any) {
+//		if enabled(level) {
+//			log.Printf(format, args...)
+//		}
+//	}
+//
+//	logf(3, "invalid request: %v") // logf format reads arg 1, but call has 0 args
 //
 // To enable printf checking on a function that is not found by this
 // analyzer's heuristics (for example, because control is obscured by
@@ -26,14 +65,19 @@
 //
 //	func MyPrintf(format string, args ...any) {
 //		if false {
-//			_ = fmt.Sprintf(format, args...) // enable printf checker
+//			_ = fmt.Sprintf(format, args...) // enable printf checking
 //		}
 //		...
 //	}
 //
-// The -funcs flag specifies a comma-separated list of names of additional
-// known formatting functions or methods. If the name contains a period,
-// it must denote a specific function using one of the following forms:
+// # Specifying printf wrappers by flag
+//
+// The -funcs flag specifies a comma-separated list of names of
+// additional known formatting functions or methods. (This legacy flag
+// is rarely used due to the automatic inference described above.)
+//
+// If the name contains a period, it must denote a specific function
+// using one of the following forms:
 //
 //	dir/pkg.Function
 //	dir/pkg.Type.Method
diff --git a/vendor/golang.org/x/tools/go/analysis/passes/slog/slog.go b/vendor/golang.org/x/tools/go/analysis/passes/slog/slog.go
index b3c683b61cba50af5c4e94a6322c44a266cd5c06..0cade7bad7e100ee983e66a6c3bb79073b4fcdcb 100644
--- a/vendor/golang.org/x/tools/go/analysis/passes/slog/slog.go
+++ b/vendor/golang.org/x/tools/go/analysis/passes/slog/slog.go
@@ -49,6 +49,7 @@ const (
 )
 
 func run(pass *analysis.Pass) (any, error) {
+	var attrType types.Type // The type of slog.Attr
 	inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
 	nodeFilter := []ast.Node{
 		(*ast.CallExpr)(nil),
@@ -67,6 +68,11 @@ func run(pass *analysis.Pass) (any, error) {
 			// Not a slog function that takes key-value pairs.
 			return
 		}
+		// Here we know that fn.Pkg() is "log/slog".
+		if attrType == nil {
+			attrType = fn.Pkg().Scope().Lookup("Attr").Type()
+		}
+
 		if isMethodExpr(pass.TypesInfo, call) {
 			// Call is to a method value. Skip the first argument.
 			skipArgs++
@@ -92,8 +98,19 @@ func run(pass *analysis.Pass) (any, error) {
 					pos = key
 				case types.IsInterface(t):
 					// As we do not do dataflow, we do not know what the dynamic type is.
-					// It could be a string or an Attr so we don't know what to expect next.
-					pos = unknown
+					// But we might be able to learn enough to make a decision.
+					if types.AssignableTo(stringType, t) {
+						// t must be an empty interface. So it can also be an Attr.
+						// We don't know enough to make an assumption.
+						pos = unknown
+						continue
+					} else if attrType != nil && types.AssignableTo(attrType, t) {
+						// Assume it is an Attr.
+						pos = key
+						continue
+					}
+					// Can't be either a string or Attr. Definitely an error.
+					fallthrough
 				default:
 					if unknownArg == nil {
 						pass.ReportRangef(arg, "%s arg %q should be a string or a slog.Attr (possible missing key or value)",
diff --git a/vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go b/vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go
index 39d0d9e429e63112b1455e298607a4532b0eb711..f5e760ca265e4c5b9789d0d214e691765073b924 100644
--- a/vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go
+++ b/vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go
@@ -447,6 +447,18 @@ func checkExampleName(pass *analysis.Pass, fn *ast.FuncDecl) {
 	}
 }
 
+type tokenRange struct {
+	p, e token.Pos
+}
+
+func (r tokenRange) Pos() token.Pos {
+	return r.p
+}
+
+func (r tokenRange) End() token.Pos {
+	return r.e
+}
+
 func checkTest(pass *analysis.Pass, fn *ast.FuncDecl, prefix string) {
 	// Want functions with 0 results and 1 parameter.
 	if fn.Type.Results != nil && len(fn.Type.Results.List) > 0 ||
@@ -464,12 +476,11 @@ func checkTest(pass *analysis.Pass, fn *ast.FuncDecl, prefix string) {
 	if tparams := fn.Type.TypeParams; tparams != nil && len(tparams.List) > 0 {
 		// Note: cmd/go/internal/load also errors about TestXXX and BenchmarkXXX functions with type parameters.
 		// We have currently decided to also warn before compilation/package loading. This can help users in IDEs.
-		// TODO(adonovan): use ReportRangef(tparams).
-		pass.Reportf(fn.Pos(), "%s has type parameters: it will not be run by go test as a %sXXX function", fn.Name.Name, prefix)
+		at := tokenRange{tparams.Opening, tparams.Closing}
+		pass.ReportRangef(at, "%s has type parameters: it will not be run by go test as a %sXXX function", fn.Name.Name, prefix)
 	}
 
 	if !isTestSuffix(fn.Name.Name[len(prefix):]) {
-		// TODO(adonovan): use ReportRangef(fn.Name).
-		pass.Reportf(fn.Pos(), "%s has malformed name: first letter after '%s' must not be lowercase", fn.Name.Name, prefix)
+		pass.ReportRangef(fn.Name, "%s has malformed name: first letter after '%s' must not be lowercase", fn.Name.Name, prefix)
 	}
 }
diff --git a/vendor/golang.org/x/tools/go/analysis/passes/unusedwrite/unusedwrite.go b/vendor/golang.org/x/tools/go/analysis/passes/unusedwrite/unusedwrite.go
index a01cbb8f83a2e52294237561b07f2eaff1db1549..a99c5483351fc88781cc87b9e885a40c46a58e52 100644
--- a/vendor/golang.org/x/tools/go/analysis/passes/unusedwrite/unusedwrite.go
+++ b/vendor/golang.org/x/tools/go/analysis/passes/unusedwrite/unusedwrite.go
@@ -6,7 +6,6 @@ package unusedwrite
 
 import (
 	_ "embed"
-	"fmt"
 	"go/types"
 
 	"golang.org/x/tools/go/analysis"
@@ -14,6 +13,7 @@ import (
 	"golang.org/x/tools/go/analysis/passes/internal/analysisutil"
 	"golang.org/x/tools/go/ssa"
 	"golang.org/x/tools/internal/aliases"
+	"golang.org/x/tools/internal/typeparams"
 )
 
 //go:embed doc.go
@@ -37,9 +37,9 @@ func run(pass *analysis.Pass) (interface{}, error) {
 		for _, store := range reports {
 			switch addr := store.Addr.(type) {
 			case *ssa.FieldAddr:
+				field := typeparams.CoreType(typeparams.MustDeref(addr.X.Type())).(*types.Struct).Field(addr.Field)
 				pass.Reportf(store.Pos(),
-					"unused write to field %s",
-					getFieldName(addr.X.Type(), addr.Field))
+					"unused write to field %s", field.Name())
 			case *ssa.IndexAddr:
 				pass.Reportf(store.Pos(),
 					"unused write to array index %s", addr.Index)
@@ -151,21 +151,3 @@ func hasStructOrArrayType(v ssa.Value) bool {
 	}
 	return isStructOrArray(v.Type())
 }
-
-// getFieldName returns the name of a field in a struct.
-// It the field is not found, then it returns the string format of the index.
-//
-// For example, for struct T {x int, y int), getFieldName(*T, 1) returns "y".
-func getFieldName(tp types.Type, index int) string {
-	// TODO(adonovan): use
-	//   stp, ok := typeparams.Deref(tp).Underlying().(*types.Struct); ok {
-	// when Deref is defined. But see CL 565456 for a better fix.
-
-	if pt, ok := aliases.Unalias(tp).(*types.Pointer); ok {
-		tp = pt.Elem()
-	}
-	if stp, ok := tp.Underlying().(*types.Struct); ok {
-		return stp.Field(index).Name()
-	}
-	return fmt.Sprintf("%d", index)
-}
diff --git a/vendor/golang.org/x/tools/go/cfg/cfg.go b/vendor/golang.org/x/tools/go/cfg/cfg.go
index 01668359af2d27c4e95b55ea90b74ea02c6656ac..fad4530ff3c239a8e3aa51deb905409c1687f9f2 100644
--- a/vendor/golang.org/x/tools/go/cfg/cfg.go
+++ b/vendor/golang.org/x/tools/go/cfg/cfg.go
@@ -211,9 +211,13 @@ func (g *CFG) Format(fset *token.FileSet) string {
 	return buf.String()
 }
 
-// digraph emits AT&T GraphViz (dot) syntax for the CFG.
-// TODO(adonovan): publish; needs a proposal.
-func (g *CFG) digraph(fset *token.FileSet) string {
+// Dot returns the control-flow graph in the [Dot graph description language].
+// Use a command such as 'dot -Tsvg' to render it in a form viewable in a browser.
+// This method is provided as a debugging aid; the details of the
+// output are unspecified and may change.
+//
+// [Dot graph description language]: ​​https://en.wikipedia.org/wiki/DOT_(graph_description_language)
+func (g *CFG) Dot(fset *token.FileSet) string {
 	var buf bytes.Buffer
 	buf.WriteString("digraph CFG {\n")
 	buf.WriteString("  node [shape=box];\n")
@@ -235,11 +239,6 @@ func (g *CFG) digraph(fset *token.FileSet) string {
 	return buf.String()
 }
 
-// exposed to main.go
-func digraph(g *CFG, fset *token.FileSet) string {
-	return g.digraph(fset)
-}
-
 func formatNode(fset *token.FileSet, n ast.Node) string {
 	var buf bytes.Buffer
 	format.Node(&buf, fset, n)
diff --git a/vendor/golang.org/x/tools/go/packages/packages.go b/vendor/golang.org/x/tools/go/packages/packages.go
index f33b0afc22cf5255924629489fdaa9fc820e0af7..3ea1b3fa46d0a58f9095bf9ab502f3d530bae777 100644
--- a/vendor/golang.org/x/tools/go/packages/packages.go
+++ b/vendor/golang.org/x/tools/go/packages/packages.go
@@ -9,6 +9,7 @@ package packages
 import (
 	"context"
 	"encoding/json"
+	"errors"
 	"fmt"
 	"go/ast"
 	"go/parser"
@@ -24,6 +25,8 @@ import (
 	"sync"
 	"time"
 
+	"golang.org/x/sync/errgroup"
+
 	"golang.org/x/tools/go/gcexportdata"
 	"golang.org/x/tools/internal/gocommand"
 	"golang.org/x/tools/internal/packagesinternal"
@@ -126,9 +129,8 @@ type Config struct {
 	Mode LoadMode
 
 	// Context specifies the context for the load operation.
-	// If the context is cancelled, the loader may stop early
-	// and return an ErrCancelled error.
-	// If Context is nil, the load cannot be cancelled.
+	// Cancelling the context may cause [Load] to abort and
+	// return an error.
 	Context context.Context
 
 	// Logf is the logger for the config.
@@ -211,8 +213,8 @@ type Config struct {
 // Config specifies loading options;
 // nil behaves the same as an empty Config.
 //
-// Load returns an error if any of the patterns was invalid
-// as defined by the underlying build system.
+// If any of the patterns was invalid as defined by the
+// underlying build system, Load returns an error.
 // It may return an empty list of packages without an error,
 // for instance for an empty expansion of a valid wildcard.
 // Errors associated with a particular package are recorded in the
@@ -255,8 +257,27 @@ func Load(cfg *Config, patterns ...string) ([]*Package, error) {
 // defaultDriver will fall back to the go list driver.
 // The boolean result indicates that an external driver handled the request.
 func defaultDriver(cfg *Config, patterns ...string) (*DriverResponse, bool, error) {
+	const (
+		// windowsArgMax specifies the maximum command line length for
+		// the Windows' CreateProcess function.
+		windowsArgMax = 32767
+		// maxEnvSize is a very rough estimation of the maximum environment
+		// size of a user.
+		maxEnvSize = 16384
+		// safeArgMax specifies the maximum safe command line length to use
+		// by the underlying driver excl. the environment. We choose the Windows'
+		// ARG_MAX as the starting point because it's one of the lowest ARG_MAX
+		// constants out of the different supported platforms,
+		// e.g., https://www.in-ulm.de/~mascheck/various/argmax/#results.
+		safeArgMax = windowsArgMax - maxEnvSize
+	)
+	chunks, err := splitIntoChunks(patterns, safeArgMax)
+	if err != nil {
+		return nil, false, err
+	}
+
 	if driver := findExternalDriver(cfg); driver != nil {
-		response, err := driver(cfg, patterns...)
+		response, err := callDriverOnChunks(driver, cfg, chunks)
 		if err != nil {
 			return nil, false, err
 		} else if !response.NotHandled {
@@ -265,11 +286,82 @@ func defaultDriver(cfg *Config, patterns ...string) (*DriverResponse, bool, erro
 		// (fall through)
 	}
 
-	response, err := goListDriver(cfg, patterns...)
+	response, err := callDriverOnChunks(goListDriver, cfg, chunks)
 	if err != nil {
 		return nil, false, err
 	}
-	return response, false, nil
+	return response, false, err
+}
+
+// splitIntoChunks chunks the slice so that the total number of characters
+// in a chunk is no longer than argMax.
+func splitIntoChunks(patterns []string, argMax int) ([][]string, error) {
+	if argMax <= 0 {
+		return nil, errors.New("failed to split patterns into chunks, negative safe argMax value")
+	}
+	var chunks [][]string
+	charsInChunk := 0
+	nextChunkStart := 0
+	for i, v := range patterns {
+		vChars := len(v)
+		if vChars > argMax {
+			// a single pattern is longer than the maximum safe ARG_MAX, hardly should happen
+			return nil, errors.New("failed to split patterns into chunks, a pattern is too long")
+		}
+		charsInChunk += vChars + 1 // +1 is for a whitespace between patterns that has to be counted too
+		if charsInChunk > argMax {
+			chunks = append(chunks, patterns[nextChunkStart:i])
+			nextChunkStart = i
+			charsInChunk = vChars
+		}
+	}
+	// add the last chunk
+	if nextChunkStart < len(patterns) {
+		chunks = append(chunks, patterns[nextChunkStart:])
+	}
+	return chunks, nil
+}
+
+func callDriverOnChunks(driver driver, cfg *Config, chunks [][]string) (*DriverResponse, error) {
+	if len(chunks) == 0 {
+		return driver(cfg)
+	}
+	responses := make([]*DriverResponse, len(chunks))
+	errNotHandled := errors.New("driver returned NotHandled")
+	var g errgroup.Group
+	for i, chunk := range chunks {
+		i := i
+		chunk := chunk
+		g.Go(func() (err error) {
+			responses[i], err = driver(cfg, chunk...)
+			if responses[i] != nil && responses[i].NotHandled {
+				err = errNotHandled
+			}
+			return err
+		})
+	}
+	if err := g.Wait(); err != nil {
+		if errors.Is(err, errNotHandled) {
+			return &DriverResponse{NotHandled: true}, nil
+		}
+		return nil, err
+	}
+	return mergeResponses(responses...), nil
+}
+
+func mergeResponses(responses ...*DriverResponse) *DriverResponse {
+	if len(responses) == 0 {
+		return nil
+	}
+	response := newDeduper()
+	response.dr.NotHandled = false
+	response.dr.Compiler = responses[0].Compiler
+	response.dr.Arch = responses[0].Arch
+	response.dr.GoVersion = responses[0].GoVersion
+	for _, v := range responses {
+		response.addAll(v)
+	}
+	return response.dr
 }
 
 // A Package describes a loaded Go package.
@@ -335,6 +427,10 @@ type Package struct {
 	// The NeedTypes LoadMode bit sets this field for packages matching the
 	// patterns; type information for dependencies may be missing or incomplete,
 	// unless NeedDeps and NeedImports are also set.
+	//
+	// Each call to [Load] returns a consistent set of type
+	// symbols, as defined by the comment at [types.Identical].
+	// Avoid mixing type information from two or more calls to [Load].
 	Types *types.Package
 
 	// Fset provides position information for Types, TypesInfo, and Syntax.
@@ -761,6 +857,12 @@ func (ld *loader) refine(response *DriverResponse) ([]*Package, error) {
 		wg.Wait()
 	}
 
+	// If the context is done, return its error and
+	// throw out [likely] incomplete packages.
+	if err := ld.Context.Err(); err != nil {
+		return nil, err
+	}
+
 	result := make([]*Package, len(initial))
 	for i, lpkg := range initial {
 		result[i] = lpkg.Package
@@ -856,6 +958,14 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) {
 	lpkg.Types = types.NewPackage(lpkg.PkgPath, lpkg.Name)
 	lpkg.Fset = ld.Fset
 
+	// Start shutting down if the context is done and do not load
+	// source or export data files.
+	// Packages that import this one will have ld.Context.Err() != nil.
+	// ld.Context.Err() will be returned later by refine.
+	if ld.Context.Err() != nil {
+		return
+	}
+
 	// Subtle: we populate all Types fields with an empty Package
 	// before loading export data so that export data processing
 	// never has to create a types.Package for an indirect dependency,
@@ -975,6 +1085,13 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) {
 		return
 	}
 
+	// Start shutting down if the context is done and do not type check.
+	// Packages that import this one will have ld.Context.Err() != nil.
+	// ld.Context.Err() will be returned later by refine.
+	if ld.Context.Err() != nil {
+		return
+	}
+
 	lpkg.TypesInfo = &types.Info{
 		Types:      make(map[ast.Expr]types.TypeAndValue),
 		Defs:       make(map[*ast.Ident]types.Object),
@@ -1025,7 +1142,7 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) {
 		Sizes: ld.sizes, // may be nil
 	}
 	if lpkg.Module != nil && lpkg.Module.GoVersion != "" {
-		typesinternal.SetGoVersion(tc, "go"+lpkg.Module.GoVersion)
+		tc.GoVersion = "go" + lpkg.Module.GoVersion
 	}
 	if (ld.Mode & typecheckCgo) != 0 {
 		if !typesinternal.SetUsesCgo(tc) {
@@ -1036,10 +1153,24 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) {
 			return
 		}
 	}
-	types.NewChecker(tc, ld.Fset, lpkg.Types, lpkg.TypesInfo).Files(lpkg.Syntax)
 
+	typErr := types.NewChecker(tc, ld.Fset, lpkg.Types, lpkg.TypesInfo).Files(lpkg.Syntax)
 	lpkg.importErrors = nil // no longer needed
 
+	// In go/types go1.21 and go1.22, Checker.Files failed fast with a
+	// a "too new" error, without calling tc.Error and without
+	// proceeding to type-check the package (#66525).
+	// We rely on the runtimeVersion error to give the suggested remedy.
+	if typErr != nil && len(lpkg.Errors) == 0 && len(lpkg.Syntax) > 0 {
+		if msg := typErr.Error(); strings.HasPrefix(msg, "package requires newer Go version") {
+			appendError(types.Error{
+				Fset: ld.Fset,
+				Pos:  lpkg.Syntax[0].Package,
+				Msg:  msg,
+			})
+		}
+	}
+
 	// If !Cgo, the type-checker uses FakeImportC mode, so
 	// it doesn't invoke the importer for import "C",
 	// nor report an error for the import,
@@ -1061,6 +1192,12 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) {
 		}
 	}
 
+	// If types.Checker.Files had an error that was unreported,
+	// make sure to report the unknown error so the package is illTyped.
+	if typErr != nil && len(lpkg.Errors) == 0 {
+		appendError(typErr)
+	}
+
 	// Record accumulated errors.
 	illTyped := len(lpkg.Errors) > 0
 	if !illTyped {
@@ -1132,11 +1269,6 @@ func (ld *loader) parseFiles(filenames []string) ([]*ast.File, []error) {
 	parsed := make([]*ast.File, n)
 	errors := make([]error, n)
 	for i, file := range filenames {
-		if ld.Config.Context.Err() != nil {
-			parsed[i] = nil
-			errors[i] = ld.Config.Context.Err()
-			continue
-		}
 		wg.Add(1)
 		go func(i int, filename string) {
 			parsed[i], errors[i] = ld.parseFile(filename)
diff --git a/vendor/golang.org/x/tools/go/ssa/builder.go b/vendor/golang.org/x/tools/go/ssa/builder.go
index 72e906c38506183687c3a146acdbc7af56d36e49..1f7f364eef0404231c67d30fe046436b09c29fdd 100644
--- a/vendor/golang.org/x/tools/go/ssa/builder.go
+++ b/vendor/golang.org/x/tools/go/ssa/builder.go
@@ -79,6 +79,7 @@ import (
 	"go/token"
 	"go/types"
 	"os"
+	"runtime"
 	"sync"
 
 	"golang.org/x/tools/internal/aliases"
@@ -335,7 +336,7 @@ func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ
 		// We must still evaluate the value, though.  (If it
 		// was side-effect free, the whole call would have
 		// been constant-folded.)
-		t, _ := deref(fn.typeOf(args[0]))
+		t := typeparams.Deref(fn.typeOf(args[0]))
 		if at, ok := typeparams.CoreType(t).(*types.Array); ok {
 			b.expr(fn, args[0]) // for effects only
 			return intConst(at.Len())
@@ -391,7 +392,7 @@ func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue {
 		return &address{addr: v, pos: e.Pos(), expr: e}
 
 	case *ast.CompositeLit:
-		typ, _ := deref(fn.typeOf(e))
+		typ := typeparams.Deref(fn.typeOf(e))
 		var v *Alloc
 		if escaping {
 			v = emitNew(fn, typ, e.Lbrace, "complit")
@@ -512,17 +513,15 @@ func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb *
 		// A CompositeLit never evaluates to a pointer,
 		// so if the type of the location is a pointer,
 		// an &-operation is implied.
-		if _, ok := loc.(blank); !ok { // avoid calling blank.typ()
-			if _, ok := deref(loc.typ()); ok {
-				ptr := b.addr(fn, e, true).address(fn)
-				// copy address
-				if sb != nil {
-					sb.store(loc, ptr)
-				} else {
-					loc.store(fn, ptr)
-				}
-				return
+		if !is[blank](loc) && isPointerCore(loc.typ()) { // avoid calling blank.typ()
+			ptr := b.addr(fn, e, true).address(fn)
+			// copy address
+			if sb != nil {
+				sb.store(loc, ptr)
+			} else {
+				loc.store(fn, ptr)
 			}
+			return
 		}
 
 		if _, ok := loc.(*address); ok {
@@ -794,7 +793,7 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value {
 			// The result is a "bound".
 			obj := sel.obj.(*types.Func)
 			rt := fn.typ(recvType(obj))
-			_, wantAddr := deref(rt)
+			wantAddr := isPointer(rt)
 			escaping := true
 			v := b.receiver(fn, e.X, wantAddr, escaping, sel)
 
@@ -922,7 +921,7 @@ func (b *builder) stmtList(fn *Function, list []ast.Stmt) {
 // escaping is defined as per builder.addr().
 func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, sel *selection) Value {
 	var v Value
-	if _, eptr := deref(fn.typeOf(e)); wantAddr && !sel.indirect && !eptr {
+	if wantAddr && !sel.indirect && !isPointerCore(fn.typeOf(e)) {
 		v = b.addr(fn, e, escaping).address(fn)
 	} else {
 		v = b.expr(fn, e)
@@ -934,7 +933,7 @@ func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, se
 	if types.IsInterface(v.Type()) {
 		// When v is an interface, sel.Kind()==MethodValue and v.f is invoked.
 		// So v is not loaded, even if v has a pointer core type.
-	} else if _, vptr := deref(v.Type()); !wantAddr && vptr {
+	} else if !wantAddr && isPointerCore(v.Type()) {
 		v = emitLoad(fn, v)
 	}
 	return v
@@ -953,7 +952,7 @@ func (b *builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) {
 			obj := sel.obj.(*types.Func)
 			recv := recvType(obj)
 
-			_, wantAddr := deref(recv)
+			wantAddr := isPointer(recv)
 			escaping := true
 			v := b.receiver(fn, selector.X, wantAddr, escaping, sel)
 			if types.IsInterface(recv) {
@@ -1214,12 +1213,12 @@ func (b *builder) arrayLen(fn *Function, elts []ast.Expr) int64 {
 // literal has type *T behaves like &T{}.
 // In that case, addr must hold a T, not a *T.
 func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero bool, sb *storebuf) {
-	typ, _ := deref(fn.typeOf(e)) // type with name [may be type param]
+	typ := typeparams.Deref(fn.typeOf(e)) // retain the named/alias/param type, if any
 	switch t := typeparams.CoreType(typ).(type) {
 	case *types.Struct:
 		if !isZero && len(e.Elts) != t.NumFields() {
 			// memclear
-			zt, _ := deref(addr.Type())
+			zt := typeparams.MustDeref(addr.Type())
 			sb.store(&address{addr, e.Lbrace, nil}, zeroConst(zt))
 			isZero = true
 		}
@@ -1252,7 +1251,7 @@ func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero
 	case *types.Array, *types.Slice:
 		var at *types.Array
 		var array Value
-		switch t := aliases.Unalias(t).(type) {
+		switch t := t.(type) {
 		case *types.Slice:
 			at = types.NewArray(t.Elem(), b.arrayLen(fn, e.Elts))
 			array = emitNew(fn, at, e.Lbrace, "slicelit")
@@ -1262,7 +1261,7 @@ func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero
 
 			if !isZero && int64(len(e.Elts)) != at.Len() {
 				// memclear
-				zt, _ := deref(array.Type())
+				zt := typeparams.MustDeref(array.Type())
 				sb.store(&address{array, e.Lbrace, nil}, zeroConst(zt))
 			}
 		}
@@ -1318,7 +1317,7 @@ func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero
 			//	map[*struct{}]bool{&struct{}{}: true}
 			wantAddr := false
 			if _, ok := unparen(e.Key).(*ast.CompositeLit); ok {
-				_, wantAddr = deref(t.Key())
+				wantAddr = isPointerCore(t.Key())
 			}
 
 			var key Value
@@ -1865,7 +1864,7 @@ func (b *builder) forStmtGo122(fn *Function, s *ast.ForStmt, label *lblock) {
 		fn.emit(phi)
 
 		fn.currentBlock = post
-		// If next is is local, it reuses the address and zeroes the old value so
+		// If next is local, it reuses the address and zeroes the old value so
 		// load before allocating next.
 		load := emitLoad(fn, phi)
 		next := emitLocal(fn, typ, v.Pos(), v.Name())
@@ -1991,7 +1990,7 @@ func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, pos token.P
 
 	// Determine number of iterations.
 	var length Value
-	dt, _ := deref(x.Type())
+	dt := typeparams.Deref(x.Type())
 	if arr, ok := typeparams.CoreType(dt).(*types.Array); ok {
 		// For array or *array, the number of iterations is
 		// known statically thanks to the type.  We avoid a
@@ -2338,6 +2337,12 @@ start:
 		}
 
 	case *ast.LabeledStmt:
+		if s.Label.Name == "_" {
+			// Blank labels can't be the target of a goto, break,
+			// or continue statement, so we don't need a new block.
+			_s = s.Stmt
+			goto start
+		}
 		label = fn.labelledBlock(s.Label)
 		emitJump(fn, label._goto)
 		fn.currentBlock = label._goto
@@ -2616,15 +2621,20 @@ func (prog *Program) Build() {
 			p.Build()
 		} else {
 			wg.Add(1)
+			cpuLimit <- struct{}{} // acquire a token
 			go func(p *Package) {
 				p.Build()
 				wg.Done()
+				<-cpuLimit // release a token
 			}(p)
 		}
 	}
 	wg.Wait()
 }
 
+// cpuLimit is a counting semaphore to limit CPU parallelism.
+var cpuLimit = make(chan struct{}, runtime.GOMAXPROCS(0))
+
 // Build builds SSA code for all functions and vars in package p.
 //
 // CreatePackage must have been called for all of p's direct imports
diff --git a/vendor/golang.org/x/tools/go/ssa/const.go b/vendor/golang.org/x/tools/go/ssa/const.go
index e0d79f5ef72e262a26d09b01ce77b85f341599c2..2a4e0dde28afbbcc388ddfeb2957ac0fa4b24ac1 100644
--- a/vendor/golang.org/x/tools/go/ssa/const.go
+++ b/vendor/golang.org/x/tools/go/ssa/const.go
@@ -46,9 +46,9 @@ func soleTypeKind(typ types.Type) types.BasicInfo {
 	// Candidates (perhaps all) are eliminated during the type-set
 	// iteration, which executes at least once.
 	state := types.IsBoolean | types.IsInteger | types.IsString
-	underIs(typeSetOf(typ), func(t types.Type) bool {
+	underIs(typeSetOf(typ), func(ut types.Type) bool {
 		var c types.BasicInfo
-		if t, ok := aliases.Unalias(t).(*types.Basic); ok {
+		if t, ok := ut.(*types.Basic); ok {
 			c = t.Info()
 		}
 		if c&types.IsNumeric != 0 { // int/float/complex
diff --git a/vendor/golang.org/x/tools/go/ssa/coretype.go b/vendor/golang.org/x/tools/go/ssa/coretype.go
index 3a512830b1f24f3f70b23e0418a6ddd0f63af409..8c218f919fa0c1615593cc7d215b7d9e1fc371af 100644
--- a/vendor/golang.org/x/tools/go/ssa/coretype.go
+++ b/vendor/golang.org/x/tools/go/ssa/coretype.go
@@ -50,6 +50,7 @@ func typeSetOf(typ types.Type) termList {
 	// This is a adaptation of x/exp/typeparams.NormalTerms which x/tools cannot depend on.
 	var terms []*types.Term
 	var err error
+	// typeSetOf(t) == typeSetOf(Unalias(t))
 	switch typ := aliases.Unalias(typ).(type) {
 	case *types.TypeParam:
 		terms, err = typeparams.StructuralTerms(typ)
diff --git a/vendor/golang.org/x/tools/go/ssa/create.go b/vendor/golang.org/x/tools/go/ssa/create.go
index f8f584a1a560ac1877409477669feedaff1048c1..f4dab2decddbead70b586b2dc890dead25eb4f06 100644
--- a/vendor/golang.org/x/tools/go/ssa/create.go
+++ b/vendor/golang.org/x/tools/go/ssa/create.go
@@ -34,13 +34,12 @@ import (
 // See the Example tests for simple examples.
 func NewProgram(fset *token.FileSet, mode BuilderMode) *Program {
 	return &Program{
-		Fset:          fset,
-		imported:      make(map[string]*Package),
-		packages:      make(map[*types.Package]*Package),
-		mode:          mode,
-		canon:         newCanonizer(),
-		ctxt:          types.NewContext(),
-		parameterized: tpWalker{seen: make(map[types.Type]bool)},
+		Fset:     fset,
+		imported: make(map[string]*Package),
+		packages: make(map[*types.Package]*Package),
+		mode:     mode,
+		canon:    newCanonizer(),
+		ctxt:     types.NewContext(),
 	}
 }
 
@@ -301,7 +300,7 @@ func (prog *Program) CreatePackage(pkg *types.Package, files []*ast.File, info *
 var printMu sync.Mutex
 
 // AllPackages returns a new slice containing all packages created by
-// prog.CreatePackage in in unspecified order.
+// prog.CreatePackage in unspecified order.
 func (prog *Program) AllPackages() []*Package {
 	pkgs := make([]*Package, 0, len(prog.packages))
 	for _, pkg := range prog.packages {
diff --git a/vendor/golang.org/x/tools/go/ssa/emit.go b/vendor/golang.org/x/tools/go/ssa/emit.go
index 549c9114d43b953419ba6c2fcc2e236a7176658f..716299ffe680bd558d9a002878d458dac6cef06c 100644
--- a/vendor/golang.org/x/tools/go/ssa/emit.go
+++ b/vendor/golang.org/x/tools/go/ssa/emit.go
@@ -12,7 +12,6 @@ import (
 	"go/token"
 	"go/types"
 
-	"golang.org/x/tools/internal/aliases"
 	"golang.org/x/tools/internal/typeparams"
 )
 
@@ -249,7 +248,7 @@ func emitConv(f *Function, val Value, typ types.Type) Value {
 		// Record the types of operands to MakeInterface, if
 		// non-parameterized, as they are the set of runtime types.
 		t := val.Type()
-		if f.typeparams.Len() == 0 || !f.Prog.parameterized.isParameterized(t) {
+		if f.typeparams.Len() == 0 || !f.Prog.isParameterized(t) {
 			addRuntimeType(f.Prog, t)
 		}
 
@@ -277,18 +276,20 @@ func emitConv(f *Function, val Value, typ types.Type) Value {
 		sliceTo0ArrayPtr
 		convert
 	)
-	classify := func(s, d types.Type) conversionCase {
+	// classify the conversion case of a source type us to a destination type ud.
+	// us and ud are underlying types (not *Named or *Alias)
+	classify := func(us, ud types.Type) conversionCase {
 		// Just a change of type, but not value or representation?
-		if isValuePreserving(s, d) {
+		if isValuePreserving(us, ud) {
 			return changeType
 		}
 
 		// Conversion from slice to array or slice to array pointer?
-		if slice, ok := aliases.Unalias(s).(*types.Slice); ok {
+		if slice, ok := us.(*types.Slice); ok {
 			var arr *types.Array
 			var ptr bool
 			// Conversion from slice to array pointer?
-			switch d := aliases.Unalias(d).(type) {
+			switch d := ud.(type) {
 			case *types.Array:
 				arr = d
 			case *types.Pointer:
@@ -313,8 +314,8 @@ func emitConv(f *Function, val Value, typ types.Type) Value {
 
 		// The only remaining case in well-typed code is a representation-
 		// changing conversion of basic types (possibly with []byte/[]rune).
-		if !isBasic(s) && !isBasic(d) {
-			panic(fmt.Sprintf("in %s: cannot convert term %s (%s [within %s]) to type %s [within %s]", f, val, val.Type(), s, typ, d))
+		if !isBasic(us) && !isBasic(ud) {
+			panic(fmt.Sprintf("in %s: cannot convert term %s (%s [within %s]) to type %s [within %s]", f, val, val.Type(), us, typ, ud))
 		}
 		return convert
 	}
@@ -523,8 +524,8 @@ func emitTailCall(f *Function, call *Call) {
 // value of a field.
 func emitImplicitSelections(f *Function, v Value, indices []int, pos token.Pos) Value {
 	for _, index := range indices {
-		if st, vptr := deref(v.Type()); vptr {
-			fld := fieldOf(st, index)
+		if isPointerCore(v.Type()) {
+			fld := fieldOf(typeparams.MustDeref(v.Type()), index)
 			instr := &FieldAddr{
 				X:     v,
 				Field: index,
@@ -533,7 +534,7 @@ func emitImplicitSelections(f *Function, v Value, indices []int, pos token.Pos)
 			instr.setType(types.NewPointer(fld.Type()))
 			v = f.emit(instr)
 			// Load the field's value iff indirectly embedded.
-			if _, fldptr := deref(fld.Type()); fldptr {
+			if isPointerCore(fld.Type()) {
 				v = emitLoad(f, v)
 			}
 		} else {
@@ -557,8 +558,8 @@ func emitImplicitSelections(f *Function, v Value, indices []int, pos token.Pos)
 // field's value.
 // Ident id is used for position and debug info.
 func emitFieldSelection(f *Function, v Value, index int, wantAddr bool, id *ast.Ident) Value {
-	if st, vptr := deref(v.Type()); vptr {
-		fld := fieldOf(st, index)
+	if isPointerCore(v.Type()) {
+		fld := fieldOf(typeparams.MustDeref(v.Type()), index)
 		instr := &FieldAddr{
 			X:     v,
 			Field: index,
diff --git a/vendor/golang.org/x/tools/go/ssa/func.go b/vendor/golang.org/x/tools/go/ssa/func.go
index 4d3e39129c516e53f40f1be8a0b29f212fff63ff..f645fa1d8b002be41f90f88bfd5fd1b29ce647e0 100644
--- a/vendor/golang.org/x/tools/go/ssa/func.go
+++ b/vendor/golang.org/x/tools/go/ssa/func.go
@@ -37,7 +37,8 @@ func (f *Function) typeOf(e ast.Expr) types.Type {
 	panic(fmt.Sprintf("no type for %T @ %s", e, f.Prog.Fset.Position(e.Pos())))
 }
 
-// typ is the locally instantiated type of T. T==typ(T) if f is not an instantiation.
+// typ is the locally instantiated type of T.
+// If f is not an instantiation, then f.typ(T)==T.
 func (f *Function) typ(T types.Type) types.Type {
 	return f.subst.typ(T)
 }
@@ -106,6 +107,7 @@ type lblock struct {
 
 // labelledBlock returns the branch target associated with the
 // specified label, creating it if needed.
+// label should be a non-blank identifier (label.Name != "_").
 func (f *Function) labelledBlock(label *ast.Ident) *lblock {
 	obj := f.objectOf(label).(*types.Label)
 	lb := f.lblocks[obj]
diff --git a/vendor/golang.org/x/tools/go/ssa/instantiate.go b/vendor/golang.org/x/tools/go/ssa/instantiate.go
index c155f6736af9a1d3f6c9d259097d5a66ae906aeb..e5e7162a8a21f6ae5c3e304073c3259486e9b9c9 100644
--- a/vendor/golang.org/x/tools/go/ssa/instantiate.go
+++ b/vendor/golang.org/x/tools/go/ssa/instantiate.go
@@ -8,8 +8,6 @@ import (
 	"fmt"
 	"go/types"
 	"sync"
-
-	"golang.org/x/tools/internal/typeparams"
 )
 
 // A generic records information about a generic origin function,
@@ -77,10 +75,10 @@ func createInstance(fn *Function, targs []types.Type, cr *creator) *Function {
 		subst     *subster
 		build     buildFunc
 	)
-	if prog.mode&InstantiateGenerics != 0 && !prog.parameterized.anyParameterized(targs) {
+	if prog.mode&InstantiateGenerics != 0 && !prog.isParameterized(targs...) {
 		synthetic = fmt.Sprintf("instance of %s", fn.Name())
 		if fn.syntax != nil {
-			scope := typeparams.OriginMethod(obj).Scope()
+			scope := obj.Origin().Scope()
 			subst = makeSubster(prog.ctxt, scope, fn.typeparams, targs, false)
 			build = (*builder).buildFromSyntax
 		} else {
@@ -112,3 +110,21 @@ func createInstance(fn *Function, targs []types.Type, cr *creator) *Function {
 	cr.Add(instance)
 	return instance
 }
+
+// isParameterized reports whether any of the specified types contains
+// a free type parameter. It is safe to call concurrently.
+func (prog *Program) isParameterized(ts ...types.Type) bool {
+	prog.hasParamsMu.Lock()
+	defer prog.hasParamsMu.Unlock()
+
+	// TODO(adonovan): profile. If this operation is expensive,
+	// handle the most common but shallow cases such as T, pkg.T,
+	// *T without consulting the cache under the lock.
+
+	for _, t := range ts {
+		if prog.hasParams.Has(t) {
+			return true
+		}
+	}
+	return false
+}
diff --git a/vendor/golang.org/x/tools/go/ssa/methods.go b/vendor/golang.org/x/tools/go/ssa/methods.go
index 5f46a18484cc8eaf9adbd4ff3ca226b3a9e42cbf..58bd45b81462ca6c59c54b88a59562fca2683354 100644
--- a/vendor/golang.org/x/tools/go/ssa/methods.go
+++ b/vendor/golang.org/x/tools/go/ssa/methods.go
@@ -12,7 +12,6 @@ import (
 
 	"golang.org/x/tools/go/types/typeutil"
 	"golang.org/x/tools/internal/aliases"
-	"golang.org/x/tools/internal/typeparams"
 )
 
 // MethodValue returns the Function implementing method sel, building
@@ -33,7 +32,7 @@ func (prog *Program) MethodValue(sel *types.Selection) *Function {
 		return nil // interface method or type parameter
 	}
 
-	if prog.parameterized.isParameterized(T) {
+	if prog.isParameterized(T) {
 		return nil // generic method
 	}
 
@@ -59,10 +58,8 @@ func (prog *Program) MethodValue(sel *types.Selection) *Function {
 		fn, ok := mset.mapping[id]
 		if !ok {
 			obj := sel.Obj().(*types.Func)
-			_, ptrObj := deptr(recvType(obj))
-			_, ptrRecv := deptr(T)
 			needsPromotion := len(sel.Index()) > 1
-			needsIndirection := !ptrObj && ptrRecv
+			needsIndirection := !isPointer(recvType(obj)) && isPointer(T)
 			if needsPromotion || needsIndirection {
 				fn = createWrapper(prog, toSelection(sel), &cr)
 			} else {
@@ -103,7 +100,7 @@ func (prog *Program) objectMethod(obj *types.Func, cr *creator) *Function {
 	}
 
 	// Instantiation of generic?
-	if originObj := typeparams.OriginMethod(obj); originObj != obj {
+	if originObj := obj.Origin(); originObj != obj {
 		origin := prog.objectMethod(originObj, cr)
 		assert(origin.typeparams.Len() > 0, "origin is not generic")
 		targs := receiverTypeArgs(obj)
@@ -211,7 +208,7 @@ func forEachReachable(msets *typeutil.MethodSetCache, T types.Type, f func(types
 
 		switch T := T.(type) {
 		case *aliases.Alias:
-			visit(aliases.Unalias(T), false)
+			visit(aliases.Unalias(T), skip) // emulates the pre-Alias behavior
 
 		case *types.Basic:
 			// nop
diff --git a/vendor/golang.org/x/tools/go/ssa/parameterized.go b/vendor/golang.org/x/tools/go/ssa/parameterized.go
deleted file mode 100644
index 74c541107ef6a5cc89f06e6d344ea94d7c6d5e76..0000000000000000000000000000000000000000
--- a/vendor/golang.org/x/tools/go/ssa/parameterized.go
+++ /dev/null
@@ -1,145 +0,0 @@
-// Copyright 2022 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package ssa
-
-import (
-	"go/types"
-	"sync"
-
-	"golang.org/x/tools/internal/aliases"
-	"golang.org/x/tools/internal/typeparams"
-)
-
-// tpWalker walks over types looking for parameterized types.
-//
-// NOTE: Adapted from go/types/infer.go. If that is exported in a future release remove this copy.
-type tpWalker struct {
-	mu   sync.Mutex
-	seen map[types.Type]bool
-}
-
-// isParameterized reports whether t recursively contains a type parameter.
-// Thread-safe.
-func (w *tpWalker) isParameterized(t types.Type) bool {
-	// TODO(adonovan): profile. If this operation is expensive,
-	// handle the most common but shallow cases such as T, pkg.T,
-	// *T without consulting the cache under the lock.
-
-	w.mu.Lock()
-	defer w.mu.Unlock()
-	return w.isParameterizedLocked(t)
-}
-
-// Requires w.mu.
-func (w *tpWalker) isParameterizedLocked(typ types.Type) (res bool) {
-	// NOTE: Adapted from go/types/infer.go. Try to keep in sync.
-
-	// detect cycles
-	if x, ok := w.seen[typ]; ok {
-		return x
-	}
-	w.seen[typ] = false
-	defer func() {
-		w.seen[typ] = res
-	}()
-
-	switch t := typ.(type) {
-	case nil, *types.Basic: // TODO(gri) should nil be handled here?
-		break
-
-	case *aliases.Alias:
-		return w.isParameterizedLocked(aliases.Unalias(t))
-
-	case *types.Array:
-		return w.isParameterizedLocked(t.Elem())
-
-	case *types.Slice:
-		return w.isParameterizedLocked(t.Elem())
-
-	case *types.Struct:
-		for i, n := 0, t.NumFields(); i < n; i++ {
-			if w.isParameterizedLocked(t.Field(i).Type()) {
-				return true
-			}
-		}
-
-	case *types.Pointer:
-		return w.isParameterizedLocked(t.Elem())
-
-	case *types.Tuple:
-		n := t.Len()
-		for i := 0; i < n; i++ {
-			if w.isParameterizedLocked(t.At(i).Type()) {
-				return true
-			}
-		}
-
-	case *types.Signature:
-		// t.tparams may not be nil if we are looking at a signature
-		// of a generic function type (or an interface method) that is
-		// part of the type we're testing. We don't care about these type
-		// parameters.
-		// Similarly, the receiver of a method may declare (rather than
-		// use) type parameters, we don't care about those either.
-		// Thus, we only need to look at the input and result parameters.
-		return w.isParameterizedLocked(t.Params()) || w.isParameterizedLocked(t.Results())
-
-	case *types.Interface:
-		for i, n := 0, t.NumMethods(); i < n; i++ {
-			if w.isParameterizedLocked(t.Method(i).Type()) {
-				return true
-			}
-		}
-		terms, err := typeparams.InterfaceTermSet(t)
-		if err != nil {
-			panic(err)
-		}
-		for _, term := range terms {
-			if w.isParameterizedLocked(term.Type()) {
-				return true
-			}
-		}
-
-	case *types.Map:
-		return w.isParameterizedLocked(t.Key()) || w.isParameterizedLocked(t.Elem())
-
-	case *types.Chan:
-		return w.isParameterizedLocked(t.Elem())
-
-	case *types.Named:
-		args := t.TypeArgs()
-		// TODO(taking): this does not match go/types/infer.go. Check with rfindley.
-		if params := t.TypeParams(); params.Len() > args.Len() {
-			return true
-		}
-		for i, n := 0, args.Len(); i < n; i++ {
-			if w.isParameterizedLocked(args.At(i)) {
-				return true
-			}
-		}
-		return w.isParameterizedLocked(t.Underlying()) // recurse for types local to parameterized functions
-
-	case *types.TypeParam:
-		return true
-
-	default:
-		panic(t) // unreachable
-	}
-
-	return false
-}
-
-// anyParameterized reports whether any element of ts is parameterized.
-// Thread-safe.
-func (w *tpWalker) anyParameterized(ts []types.Type) bool {
-	w.mu.Lock()
-	defer w.mu.Unlock()
-	for _, t := range ts {
-		if w.isParameterizedLocked(t) {
-			return true
-		}
-	}
-	return false
-}
diff --git a/vendor/golang.org/x/tools/go/ssa/source.go b/vendor/golang.org/x/tools/go/ssa/source.go
index 6700305bd9abd0cbf57bcec7db750c1722da3b24..7b71c88d1208a0a28070e271053cfacd0655b3e7 100644
--- a/vendor/golang.org/x/tools/go/ssa/source.go
+++ b/vendor/golang.org/x/tools/go/ssa/source.go
@@ -14,8 +14,6 @@ import (
 	"go/ast"
 	"go/token"
 	"go/types"
-
-	"golang.org/x/tools/internal/typeparams"
 )
 
 // EnclosingFunction returns the function that contains the syntax
@@ -122,7 +120,7 @@ func findNamedFunc(pkg *Package, pos token.Pos) *Function {
 				obj := mset.At(i).Obj().(*types.Func)
 				if obj.Pos() == pos {
 					// obj from MethodSet may not be the origin type.
-					m := typeparams.OriginMethod(obj)
+					m := obj.Origin()
 					return pkg.objects[m].(*Function)
 				}
 			}
diff --git a/vendor/golang.org/x/tools/go/ssa/ssa.go b/vendor/golang.org/x/tools/go/ssa/ssa.go
index 30bf4bc67771a08dd913da5fc5751dc4f8a645ee..5ff12d2f572cd94419ec1ce419e015dd66e0bb37 100644
--- a/vendor/golang.org/x/tools/go/ssa/ssa.go
+++ b/vendor/golang.org/x/tools/go/ssa/ssa.go
@@ -33,7 +33,9 @@ type Program struct {
 	methodsMu  sync.Mutex
 	methodSets typeutil.Map // maps type to its concrete *methodSet
 
-	parameterized tpWalker // memoization of whether a type refers to type parameters
+	// memoization of whether a type refers to type parameters
+	hasParamsMu sync.Mutex
+	hasParams   typeparams.Free
 
 	runtimeTypesMu sync.Mutex
 	runtimeTypes   typeutil.Map // set of runtime types (from MakeInterface)
diff --git a/vendor/golang.org/x/tools/go/ssa/subst.go b/vendor/golang.org/x/tools/go/ssa/subst.go
index 9f2f2f300085c5b06d3a28a04be3a7037a540b37..e1b8e198c0363e33188627ed9e4903d0a4cc7612 100644
--- a/vendor/golang.org/x/tools/go/ssa/subst.go
+++ b/vendor/golang.org/x/tools/go/ssa/subst.go
@@ -80,11 +80,7 @@ func (subst *subster) typ(t types.Type) (res types.Type) {
 		subst.cache[t] = res
 	}()
 
-	// fall through if result r will be identical to t, types.Identical(r, t).
 	switch t := t.(type) {
-	case *aliases.Alias:
-		return subst.typ(aliases.Unalias(t))
-
 	case *types.TypeParam:
 		r := subst.replacements[t]
 		assert(r != nil, "type param without replacement encountered")
@@ -140,6 +136,9 @@ func (subst *subster) typ(t types.Type) (res types.Type) {
 	case *types.Interface:
 		return subst.interface_(t)
 
+	case *aliases.Alias:
+		return subst.alias(t)
+
 	case *types.Named:
 		return subst.named(t)
 
@@ -307,6 +306,18 @@ func (subst *subster) interface_(iface *types.Interface) *types.Interface {
 	return types.NewInterfaceType(methods, embeds).Complete()
 }
 
+func (subst *subster) alias(t *aliases.Alias) types.Type {
+	// TODO(go.dev/issues/46477): support TypeParameters once these are available from go/types.
+	u := aliases.Unalias(t)
+	if s := subst.typ(u); s != u {
+		// If there is any change, do not create a new alias.
+		return s
+	}
+	// If there is no change, t did not reach any type parameter.
+	// Keep the Alias.
+	return t
+}
+
 func (subst *subster) named(t *types.Named) types.Type {
 	// A named type may be:
 	// (1) ordinary named type (non-local scope, no type parameters, no type arguments),
diff --git a/vendor/golang.org/x/tools/go/ssa/util.go b/vendor/golang.org/x/tools/go/ssa/util.go
index 4d65259ed9c512a1af6de8ca1bc4e6605b2f1018..314ca2b6f7a8426698a61a777d7407b83ed2cddf 100644
--- a/vendor/golang.org/x/tools/go/ssa/util.go
+++ b/vendor/golang.org/x/tools/go/ssa/util.go
@@ -51,8 +51,9 @@ func isNonTypeParamInterface(t types.Type) bool {
 }
 
 // isBasic reports whether t is a basic type.
+// t is assumed to be an Underlying type (not Named or Alias).
 func isBasic(t types.Type) bool {
-	_, ok := aliases.Unalias(t).(*types.Basic)
+	_, ok := t.(*types.Basic)
 	return ok
 }
 
@@ -100,24 +101,22 @@ func isBasicConvTypes(tset termList) bool {
 	return all && basics >= 1 && tset.Len()-basics <= 1
 }
 
-// deptr returns a pointer's element type and true; otherwise it returns (typ, false).
-// This function is oblivious to core types and is not suitable for generics.
+// isPointer reports whether t's underlying type is a pointer.
+func isPointer(t types.Type) bool {
+	return is[*types.Pointer](t.Underlying())
+}
+
+// isPointerCore reports whether t's core type is a pointer.
 //
-// TODO: Deprecate this function once all usages have been audited.
-func deptr(typ types.Type) (types.Type, bool) {
-	if p, ok := typ.Underlying().(*types.Pointer); ok {
-		return p.Elem(), true
-	}
-	return typ, false
+// (Most pointer manipulation is related to receivers, in which case
+// isPointer is appropriate. tecallers can use isPointer(t).
+func isPointerCore(t types.Type) bool {
+	return is[*types.Pointer](typeparams.CoreType(t))
 }
 
-// deref returns the element type of a type with a pointer core type and true;
-// otherwise it returns (typ, false).
-func deref(typ types.Type) (types.Type, bool) {
-	if p, ok := typeparams.CoreType(typ).(*types.Pointer); ok {
-		return p.Elem(), true
-	}
-	return typ, false
+func is[T any](x any) bool {
+	_, ok := x.(T)
+	return ok
 }
 
 // recvType returns the receiver type of method obj.
@@ -263,13 +262,40 @@ func (c *canonizer) List(ts []types.Type) *typeList {
 		return nil
 	}
 
+	unaliasAll := func(ts []types.Type) []types.Type {
+		// Is there some top level alias?
+		var found bool
+		for _, t := range ts {
+			if _, ok := t.(*aliases.Alias); ok {
+				found = true
+				break
+			}
+		}
+		if !found {
+			return ts // no top level alias
+		}
+
+		cp := make([]types.Type, len(ts)) // copy with top level aliases removed.
+		for i, t := range ts {
+			cp[i] = aliases.Unalias(t)
+		}
+		return cp
+	}
+	l := unaliasAll(ts)
+
 	c.mu.Lock()
 	defer c.mu.Unlock()
-	return c.lists.rep(ts)
+	return c.lists.rep(l)
 }
 
 // Type returns a canonical representative of type T.
+// Removes top-level aliases.
+//
+// For performance, reasons the canonical instance is order-dependent,
+// and may contain deeply nested aliases.
 func (c *canonizer) Type(T types.Type) types.Type {
+	T = aliases.Unalias(T) // remove the top level alias.
+
 	c.mu.Lock()
 	defer c.mu.Unlock()
 
diff --git a/vendor/golang.org/x/tools/go/ssa/wrappers.go b/vendor/golang.org/x/tools/go/ssa/wrappers.go
index 7c7ee4099e3b804a50fa35e8f2bf63ca3231d398..b25c4c789799d24b76211c600f2e50b314c60a30 100644
--- a/vendor/golang.org/x/tools/go/ssa/wrappers.go
+++ b/vendor/golang.org/x/tools/go/ssa/wrappers.go
@@ -24,6 +24,8 @@ import (
 
 	"go/token"
 	"go/types"
+
+	"golang.org/x/tools/internal/typeparams"
 )
 
 // -- wrappers -----------------------------------------------------------
@@ -97,14 +99,12 @@ func (b *builder) buildWrapper(fn *Function) {
 	indices := fn.method.index
 
 	var v Value = fn.Locals[0] // spilled receiver
-	srdt, ptrRecv := deptr(fn.method.recv)
-	if ptrRecv {
+	if isPointer(fn.method.recv) {
 		v = emitLoad(fn, v)
 
 		// For simple indirection wrappers, perform an informative nil-check:
 		// "value method (T).f called using nil *T pointer"
-		_, ptrObj := deptr(recvType(fn.object))
-		if len(indices) == 1 && !ptrObj {
+		if len(indices) == 1 && !isPointer(recvType(fn.object)) {
 			var c Call
 			c.Call.Value = &Builtin{
 				name: "ssa:wrapnilchk",
@@ -114,7 +114,7 @@ func (b *builder) buildWrapper(fn *Function) {
 			}
 			c.Call.Args = []Value{
 				v,
-				stringConst(srdt.String()),
+				stringConst(typeparams.MustDeref(fn.method.recv).String()),
 				stringConst(fn.method.obj.Name()),
 			}
 			c.setType(v.Type())
@@ -138,7 +138,7 @@ func (b *builder) buildWrapper(fn *Function) {
 
 	var c Call
 	if r := recvType(fn.object); !types.IsInterface(r) { // concrete method
-		if _, ptrObj := deptr(r); !ptrObj {
+		if !isPointer(r) {
 			v = emitLoad(fn, v)
 		}
 		c.Call.Value = fn.Prog.objectMethod(fn.object, b.created)
diff --git a/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go b/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go
index 6a57ce3b13695944ca5963d40c706761bada6468..a2386c347a25020511b08c93d30c0b554bd1cb49 100644
--- a/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go
+++ b/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go
@@ -30,7 +30,6 @@ import (
 	"strings"
 
 	"golang.org/x/tools/internal/aliases"
-	"golang.org/x/tools/internal/typeparams"
 	"golang.org/x/tools/internal/typesinternal"
 )
 
@@ -395,7 +394,7 @@ func (enc *Encoder) concreteMethod(meth *types.Func) (Path, bool) {
 	// of objectpath will only be giving us origin methods, anyway, as referring
 	// to instantiated methods is usually not useful.
 
-	if typeparams.OriginMethod(meth) != meth {
+	if meth.Origin() != meth {
 		return "", false
 	}
 
diff --git a/vendor/golang.org/x/tools/go/types/typeutil/map.go b/vendor/golang.org/x/tools/go/types/typeutil/map.go
index e154be0bd608375f014cbf13fd19937fb5aa9bd1..a92f80dd2da99cb536b339bece9f43b5f031d1ac 100644
--- a/vendor/golang.org/x/tools/go/types/typeutil/map.go
+++ b/vendor/golang.org/x/tools/go/types/typeutil/map.go
@@ -3,7 +3,7 @@
 // license that can be found in the LICENSE file.
 
 // Package typeutil defines various utilities for types, such as Map,
-// a mapping from types.Type to interface{} values.
+// a mapping from types.Type to any values.
 package typeutil // import "golang.org/x/tools/go/types/typeutil"
 
 import (
@@ -17,7 +17,7 @@ import (
 )
 
 // Map is a hash-table-based mapping from types (types.Type) to
-// arbitrary interface{} values.  The concrete types that implement
+// arbitrary any values.  The concrete types that implement
 // the Type interface are pointers.  Since they are not canonicalized,
 // == cannot be used to check for equivalence, and thus we cannot
 // simply use a Go map.
@@ -34,7 +34,7 @@ type Map struct {
 // entry is an entry (key/value association) in a hash bucket.
 type entry struct {
 	key   types.Type
-	value interface{}
+	value any
 }
 
 // SetHasher sets the hasher used by Map.
@@ -82,7 +82,7 @@ func (m *Map) Delete(key types.Type) bool {
 
 // At returns the map entry for the given key.
 // The result is nil if the entry is not present.
-func (m *Map) At(key types.Type) interface{} {
+func (m *Map) At(key types.Type) any {
 	if m != nil && m.table != nil {
 		for _, e := range m.table[m.hasher.Hash(key)] {
 			if e.key != nil && types.Identical(key, e.key) {
@@ -95,7 +95,7 @@ func (m *Map) At(key types.Type) interface{} {
 
 // Set sets the map entry for key to val,
 // and returns the previous entry, if any.
-func (m *Map) Set(key types.Type, value interface{}) (prev interface{}) {
+func (m *Map) Set(key types.Type, value any) (prev any) {
 	if m.table != nil {
 		hash := m.hasher.Hash(key)
 		bucket := m.table[hash]
@@ -142,7 +142,7 @@ func (m *Map) Len() int {
 // f will not be invoked for it, but if f inserts a map entry that
 // Iterate has not yet reached, whether or not f will be invoked for
 // it is unspecified.
-func (m *Map) Iterate(f func(key types.Type, value interface{})) {
+func (m *Map) Iterate(f func(key types.Type, value any)) {
 	if m != nil {
 		for _, bucket := range m.table {
 			for _, e := range bucket {
@@ -158,7 +158,7 @@ func (m *Map) Iterate(f func(key types.Type, value interface{})) {
 // The order is unspecified.
 func (m *Map) Keys() []types.Type {
 	keys := make([]types.Type, 0, m.Len())
-	m.Iterate(func(key types.Type, _ interface{}) {
+	m.Iterate(func(key types.Type, _ any) {
 		keys = append(keys, key)
 	})
 	return keys
@@ -171,7 +171,7 @@ func (m *Map) toString(values bool) string {
 	var buf bytes.Buffer
 	fmt.Fprint(&buf, "{")
 	sep := ""
-	m.Iterate(func(key types.Type, value interface{}) {
+	m.Iterate(func(key types.Type, value any) {
 		fmt.Fprint(&buf, sep)
 		sep = ", "
 		fmt.Fprint(&buf, key)
@@ -209,7 +209,7 @@ type Hasher struct {
 	memo map[types.Type]uint32
 
 	// ptrMap records pointer identity.
-	ptrMap map[interface{}]uint32
+	ptrMap map[any]uint32
 
 	// sigTParams holds type parameters from the signature being hashed.
 	// Signatures are considered identical modulo renaming of type parameters, so
@@ -227,7 +227,7 @@ type Hasher struct {
 func MakeHasher() Hasher {
 	return Hasher{
 		memo:       make(map[types.Type]uint32),
-		ptrMap:     make(map[interface{}]uint32),
+		ptrMap:     make(map[any]uint32),
 		sigTParams: nil,
 	}
 }
@@ -261,7 +261,7 @@ func (h Hasher) hashFor(t types.Type) uint32 {
 		return uint32(t.Kind())
 
 	case *aliases.Alias:
-		return h.Hash(t.Underlying())
+		return h.Hash(aliases.Unalias(t))
 
 	case *types.Array:
 		return 9043 + 2*uint32(t.Len()) + 3*h.Hash(t.Elem())
@@ -432,7 +432,7 @@ func (h Hasher) hashTypeParam(t *types.TypeParam) uint32 {
 
 // hashPtr hashes the pointer identity of ptr. It uses h.ptrMap to ensure that
 // pointers values are not dependent on the GC.
-func (h Hasher) hashPtr(ptr interface{}) uint32 {
+func (h Hasher) hashPtr(ptr any) uint32 {
 	if hash, ok := h.ptrMap[ptr]; ok {
 		return hash
 	}
@@ -462,7 +462,7 @@ func (h Hasher) shallowHash(t types.Type) uint32 {
 	// so there's no need to optimize anything else.
 	switch t := t.(type) {
 	case *aliases.Alias:
-		return h.shallowHash(t.Underlying())
+		return h.shallowHash(aliases.Unalias(t))
 
 	case *types.Signature:
 		var hash uint32 = 604171
diff --git a/vendor/golang.org/x/tools/internal/aliases/aliases.go b/vendor/golang.org/x/tools/internal/aliases/aliases.go
index f89112c8ee573dc119438244d6d5784abf63e3e1..c24c2eee457f2e2dc1fcf6daa337bed2ed4cba9c 100644
--- a/vendor/golang.org/x/tools/internal/aliases/aliases.go
+++ b/vendor/golang.org/x/tools/internal/aliases/aliases.go
@@ -16,10 +16,14 @@ import (
 // NewAlias creates a new TypeName in Package pkg that
 // is an alias for the type rhs.
 //
-// When GoVersion>=1.22 and GODEBUG=gotypesalias=1,
-// the Type() of the return value is a *types.Alias.
-func NewAlias(pos token.Pos, pkg *types.Package, name string, rhs types.Type) *types.TypeName {
-	if enabled() {
+// The enabled parameter determines whether the resulting [TypeName]'s
+// type is an [types.Alias]. Its value must be the result of a call to
+// [Enabled], which computes the effective value of
+// GODEBUG=gotypesalias=... by invoking the type checker. The Enabled
+// function is expensive and should be called once per task (e.g.
+// package import), not once per call to NewAlias.
+func NewAlias(enabled bool, pos token.Pos, pkg *types.Package, name string, rhs types.Type) *types.TypeName {
+	if enabled {
 		tname := types.NewTypeName(pos, pkg, name, nil)
 		newAlias(tname, rhs)
 		return tname
diff --git a/vendor/golang.org/x/tools/internal/aliases/aliases_go121.go b/vendor/golang.org/x/tools/internal/aliases/aliases_go121.go
index 1872b56ff8fc2b6103d47789e9e0aeb713d0e3ee..c027b9f315f6803c1b72cfbf7d0db47724e4eeee 100644
--- a/vendor/golang.org/x/tools/internal/aliases/aliases_go121.go
+++ b/vendor/golang.org/x/tools/internal/aliases/aliases_go121.go
@@ -15,16 +15,17 @@ import (
 // It will never be created by go/types.
 type Alias struct{}
 
-func (*Alias) String() string { panic("unreachable") }
-
+func (*Alias) String() string         { panic("unreachable") }
 func (*Alias) Underlying() types.Type { panic("unreachable") }
-
-func (*Alias) Obj() *types.TypeName { panic("unreachable") }
+func (*Alias) Obj() *types.TypeName   { panic("unreachable") }
+func Rhs(alias *Alias) types.Type     { panic("unreachable") }
 
 // Unalias returns the type t for go <=1.21.
 func Unalias(t types.Type) types.Type { return t }
 
-// Always false for go <=1.21. Ignores GODEBUG.
-func enabled() bool { return false }
-
 func newAlias(name *types.TypeName, rhs types.Type) *Alias { panic("unreachable") }
+
+// Enabled reports whether [NewAlias] should create [types.Alias] types.
+//
+// Before go1.22, this function always returns false.
+func Enabled() bool { return false }
diff --git a/vendor/golang.org/x/tools/internal/aliases/aliases_go122.go b/vendor/golang.org/x/tools/internal/aliases/aliases_go122.go
index 8b92116284d0b37bf289a65abacecfdff5fc492f..b32995484190af15e881214c8c94eb1154436095 100644
--- a/vendor/golang.org/x/tools/internal/aliases/aliases_go122.go
+++ b/vendor/golang.org/x/tools/internal/aliases/aliases_go122.go
@@ -12,14 +12,22 @@ import (
 	"go/parser"
 	"go/token"
 	"go/types"
-	"os"
-	"strings"
-	"sync"
 )
 
 // Alias is an alias of types.Alias.
 type Alias = types.Alias
 
+// Rhs returns the type on the right-hand side of the alias declaration.
+func Rhs(alias *Alias) types.Type {
+	if alias, ok := any(alias).(interface{ Rhs() types.Type }); ok {
+		return alias.Rhs() // go1.23+
+	}
+
+	// go1.22's Alias didn't have the Rhs method,
+	// so Unalias is the best we can do.
+	return Unalias(alias)
+}
+
 // Unalias is a wrapper of types.Unalias.
 func Unalias(t types.Type) types.Type { return types.Unalias(t) }
 
@@ -33,40 +41,23 @@ func newAlias(tname *types.TypeName, rhs types.Type) *Alias {
 	return a
 }
 
-// enabled returns true when types.Aliases are enabled.
-func enabled() bool {
-	// Use the gotypesalias value in GODEBUG if set.
-	godebug := os.Getenv("GODEBUG")
-	value := -1 // last set value.
-	for _, f := range strings.Split(godebug, ",") {
-		switch f {
-		case "gotypesalias=1":
-			value = 1
-		case "gotypesalias=0":
-			value = 0
-		}
-	}
-	switch value {
-	case 0:
-		return false
-	case 1:
-		return true
-	default:
-		return aliasesDefault()
-	}
-}
-
-// aliasesDefault reports if aliases are enabled by default.
-func aliasesDefault() bool {
-	// Dynamically check if Aliases will be produced from go/types.
-	aliasesDefaultOnce.Do(func() {
-		fset := token.NewFileSet()
-		f, _ := parser.ParseFile(fset, "a.go", "package p; type A = int", 0)
-		pkg, _ := new(types.Config).Check("p", fset, []*ast.File{f}, nil)
-		_, gotypesaliasDefault = pkg.Scope().Lookup("A").Type().(*types.Alias)
-	})
-	return gotypesaliasDefault
+// Enabled reports whether [NewAlias] should create [types.Alias] types.
+//
+// This function is expensive! Call it sparingly.
+func Enabled() bool {
+	// The only reliable way to compute the answer is to invoke go/types.
+	// We don't parse the GODEBUG environment variable, because
+	// (a) it's tricky to do so in a manner that is consistent
+	//     with the godebug package; in particular, a simple
+	//     substring check is not good enough. The value is a
+	//     rightmost-wins list of options. But more importantly:
+	// (b) it is impossible to detect changes to the effective
+	//     setting caused by os.Setenv("GODEBUG"), as happens in
+	//     many tests. Therefore any attempt to cache the result
+	//     is just incorrect.
+	fset := token.NewFileSet()
+	f, _ := parser.ParseFile(fset, "a.go", "package p; type A = int", 0)
+	pkg, _ := new(types.Config).Check("p", fset, []*ast.File{f}, nil)
+	_, enabled := pkg.Scope().Lookup("A").Type().(*types.Alias)
+	return enabled
 }
-
-var gotypesaliasDefault bool
-var aliasesDefaultOnce sync.Once
diff --git a/vendor/golang.org/x/tools/internal/analysisinternal/analysis.go b/vendor/golang.org/x/tools/internal/analysisinternal/analysis.go
index c3022a286255d326ec0cd70f9c60f8d49011c6c9..2c406ded0c9df27bd59a2f8e7011f6ffd5579134 100644
--- a/vendor/golang.org/x/tools/internal/analysisinternal/analysis.go
+++ b/vendor/golang.org/x/tools/internal/analysisinternal/analysis.go
@@ -12,8 +12,10 @@ import (
 	"go/ast"
 	"go/token"
 	"go/types"
+	"os"
 	"strconv"
 
+	"golang.org/x/tools/go/analysis"
 	"golang.org/x/tools/internal/aliases"
 )
 
@@ -32,22 +34,22 @@ func TypeErrorEndPos(fset *token.FileSet, src []byte, start token.Pos) token.Pos
 func ZeroValue(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr {
 	// TODO(adonovan): think about generics, and also generic aliases.
 	under := aliases.Unalias(typ)
-	// Don't call Underlying unconditionally: although it removed
+	// Don't call Underlying unconditionally: although it removes
 	// Named and Alias, it also removes TypeParam.
-	if n, ok := typ.(*types.Named); ok {
+	if n, ok := under.(*types.Named); ok {
 		under = n.Underlying()
 	}
-	switch u := under.(type) {
+	switch under := under.(type) {
 	case *types.Basic:
 		switch {
-		case u.Info()&types.IsNumeric != 0:
+		case under.Info()&types.IsNumeric != 0:
 			return &ast.BasicLit{Kind: token.INT, Value: "0"}
-		case u.Info()&types.IsBoolean != 0:
+		case under.Info()&types.IsBoolean != 0:
 			return &ast.Ident{Name: "false"}
-		case u.Info()&types.IsString != 0:
+		case under.Info()&types.IsString != 0:
 			return &ast.BasicLit{Kind: token.STRING, Value: `""`}
 		default:
-			panic(fmt.Sprintf("unknown basic type %v", u))
+			panic(fmt.Sprintf("unknown basic type %v", under))
 		}
 	case *types.Chan, *types.Interface, *types.Map, *types.Pointer, *types.Signature, *types.Slice, *types.Array:
 		return ast.NewIdent("nil")
@@ -178,7 +180,7 @@ func TypeExpr(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr {
 				List: returns,
 			},
 		}
-	case *types.Named:
+	case interface{ Obj() *types.TypeName }: // *types.{Alias,Named,TypeParam}
 		if t.Obj().Pkg() == nil {
 			return ast.NewIdent(t.Obj().Name())
 		}
@@ -393,3 +395,38 @@ func equivalentTypes(want, got types.Type) bool {
 	}
 	return types.AssignableTo(want, got)
 }
+
+// MakeReadFile returns a simple implementation of the Pass.ReadFile function.
+func MakeReadFile(pass *analysis.Pass) func(filename string) ([]byte, error) {
+	return func(filename string) ([]byte, error) {
+		if err := checkReadable(pass, filename); err != nil {
+			return nil, err
+		}
+		return os.ReadFile(filename)
+	}
+}
+
+// checkReadable enforces the access policy defined by the ReadFile field of [analysis.Pass].
+func checkReadable(pass *analysis.Pass, filename string) error {
+	if slicesContains(pass.OtherFiles, filename) ||
+		slicesContains(pass.IgnoredFiles, filename) {
+		return nil
+	}
+	for _, f := range pass.Files {
+		// TODO(adonovan): use go1.20 f.FileStart
+		if pass.Fset.File(f.Pos()).Name() == filename {
+			return nil
+		}
+	}
+	return fmt.Errorf("Pass.ReadFile: %s is not among OtherFiles, IgnoredFiles, or names of Files", filename)
+}
+
+// TODO(adonovan): use go1.21 slices.Contains.
+func slicesContains[S ~[]E, E comparable](slice S, x E) bool {
+	for _, elem := range slice {
+		if elem == x {
+			return true
+		}
+	}
+	return false
+}
diff --git a/vendor/golang.org/x/tools/internal/event/tag/tag.go b/vendor/golang.org/x/tools/internal/event/tag/tag.go
deleted file mode 100644
index 581b26c2041fe63e8b4950ae3b6868bb3cc31575..0000000000000000000000000000000000000000
--- a/vendor/golang.org/x/tools/internal/event/tag/tag.go
+++ /dev/null
@@ -1,59 +0,0 @@
-// Copyright 2019 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package tag provides the labels used for telemetry throughout gopls.
-package tag
-
-import (
-	"golang.org/x/tools/internal/event/keys"
-)
-
-var (
-	// create the label keys we use
-	Method        = keys.NewString("method", "")
-	StatusCode    = keys.NewString("status.code", "")
-	StatusMessage = keys.NewString("status.message", "")
-	RPCID         = keys.NewString("id", "")
-	RPCDirection  = keys.NewString("direction", "")
-	File          = keys.NewString("file", "")
-	Directory     = keys.New("directory", "")
-	URI           = keys.New("URI", "")
-	Package       = keys.NewString("package", "") // sorted comma-separated list of Package IDs
-	PackagePath   = keys.NewString("package_path", "")
-	Query         = keys.New("query", "")
-	Snapshot      = keys.NewUInt64("snapshot", "")
-	Operation     = keys.NewString("operation", "")
-
-	Position     = keys.New("position", "")
-	Category     = keys.NewString("category", "")
-	PackageCount = keys.NewInt("packages", "")
-	Files        = keys.New("files", "")
-	Port         = keys.NewInt("port", "")
-	Type         = keys.New("type", "")
-	HoverKind    = keys.NewString("hoverkind", "")
-
-	NewServer = keys.NewString("new_server", "A new server was added")
-	EndServer = keys.NewString("end_server", "A server was shut down")
-
-	ServerID     = keys.NewString("server", "The server ID an event is related to")
-	Logfile      = keys.NewString("logfile", "")
-	DebugAddress = keys.NewString("debug_address", "")
-	GoplsPath    = keys.NewString("gopls_path", "")
-	ClientID     = keys.NewString("client_id", "")
-
-	Level = keys.NewInt("level", "The logging level")
-)
-
-var (
-	// create the stats we measure
-	Started       = keys.NewInt64("started", "Count of started RPCs.")
-	ReceivedBytes = keys.NewInt64("received_bytes", "Bytes received.")            //, unit.Bytes)
-	SentBytes     = keys.NewInt64("sent_bytes", "Bytes sent.")                    //, unit.Bytes)
-	Latency       = keys.NewFloat64("latency_ms", "Elapsed time in milliseconds") //, unit.Milliseconds)
-)
-
-const (
-	Inbound  = "in"
-	Outbound = "out"
-)
diff --git a/vendor/golang.org/x/tools/internal/gcimporter/iexport.go b/vendor/golang.org/x/tools/internal/gcimporter/iexport.go
index 638fc1d3b86ae99bde148132a038f10691e79be0..deeb67f315af817906cd2cc09fd80351ec12207a 100644
--- a/vendor/golang.org/x/tools/internal/gcimporter/iexport.go
+++ b/vendor/golang.org/x/tools/internal/gcimporter/iexport.go
@@ -464,7 +464,7 @@ func (p *iexporter) doDecl(obj types.Object) {
 
 	switch obj := obj.(type) {
 	case *types.Var:
-		w.tag('V')
+		w.tag(varTag)
 		w.pos(obj.Pos())
 		w.typ(obj.Type(), obj.Pkg())
 
@@ -482,9 +482,9 @@ func (p *iexporter) doDecl(obj types.Object) {
 
 		// Function.
 		if sig.TypeParams().Len() == 0 {
-			w.tag('F')
+			w.tag(funcTag)
 		} else {
-			w.tag('G')
+			w.tag(genericFuncTag)
 		}
 		w.pos(obj.Pos())
 		// The tparam list of the function type is the declaration of the type
@@ -500,7 +500,7 @@ func (p *iexporter) doDecl(obj types.Object) {
 		w.signature(sig)
 
 	case *types.Const:
-		w.tag('C')
+		w.tag(constTag)
 		w.pos(obj.Pos())
 		w.value(obj.Type(), obj.Val())
 
@@ -508,7 +508,7 @@ func (p *iexporter) doDecl(obj types.Object) {
 		t := obj.Type()
 
 		if tparam, ok := aliases.Unalias(t).(*types.TypeParam); ok {
-			w.tag('P')
+			w.tag(typeParamTag)
 			w.pos(obj.Pos())
 			constraint := tparam.Constraint()
 			if p.version >= iexportVersionGo1_18 {
@@ -523,8 +523,13 @@ func (p *iexporter) doDecl(obj types.Object) {
 		}
 
 		if obj.IsAlias() {
-			w.tag('A')
+			w.tag(aliasTag)
 			w.pos(obj.Pos())
+			if alias, ok := t.(*aliases.Alias); ok {
+				// Preserve materialized aliases,
+				// even of non-exported types.
+				t = aliases.Rhs(alias)
+			}
 			w.typ(t, obj.Pkg())
 			break
 		}
@@ -536,9 +541,9 @@ func (p *iexporter) doDecl(obj types.Object) {
 		}
 
 		if named.TypeParams().Len() == 0 {
-			w.tag('T')
+			w.tag(typeTag)
 		} else {
-			w.tag('U')
+			w.tag(genericTypeTag)
 		}
 		w.pos(obj.Pos())
 
@@ -548,7 +553,7 @@ func (p *iexporter) doDecl(obj types.Object) {
 			w.tparamList(obj.Name(), named.TypeParams(), obj.Pkg())
 		}
 
-		underlying := obj.Type().Underlying()
+		underlying := named.Underlying()
 		w.typ(underlying, obj.Pkg())
 
 		if types.IsInterface(t) {
@@ -739,7 +744,10 @@ func (w *exportWriter) doTyp(t types.Type, pkg *types.Package) {
 		}()
 	}
 	switch t := t.(type) {
-	// TODO(adonovan): support types.Alias.
+	case *aliases.Alias:
+		// TODO(adonovan): support parameterized aliases, following *types.Named.
+		w.startType(aliasType)
+		w.qualifiedType(t.Obj())
 
 	case *types.Named:
 		if targs := t.TypeArgs(); targs.Len() > 0 {
diff --git a/vendor/golang.org/x/tools/internal/gcimporter/iimport.go b/vendor/golang.org/x/tools/internal/gcimporter/iimport.go
index 4d50eb8e58780b568646a12b5c105f402c8fa055..136aa03653ccededbf5097fa25938018fa165ca2 100644
--- a/vendor/golang.org/x/tools/internal/gcimporter/iimport.go
+++ b/vendor/golang.org/x/tools/internal/gcimporter/iimport.go
@@ -80,6 +80,20 @@ const (
 	typeParamType
 	instanceType
 	unionType
+	aliasType
+)
+
+// Object tags
+const (
+	varTag          = 'V'
+	funcTag         = 'F'
+	genericFuncTag  = 'G'
+	constTag        = 'C'
+	aliasTag        = 'A'
+	genericAliasTag = 'B'
+	typeParamTag    = 'P'
+	typeTag         = 'T'
+	genericTypeTag  = 'U'
 )
 
 // IImportData imports a package from the serialized package data
@@ -196,6 +210,7 @@ func iimportCommon(fset *token.FileSet, getPackages GetPackagesFunc, data []byte
 	p := iimporter{
 		version: int(version),
 		ipath:   path,
+		aliases: aliases.Enabled(),
 		shallow: shallow,
 		reportf: reportf,
 
@@ -324,7 +339,7 @@ func iimportCommon(fset *token.FileSet, getPackages GetPackagesFunc, data []byte
 	}
 
 	// SetConstraint can't be called if the constraint type is not yet complete.
-	// When type params are created in the 'P' case of (*importReader).obj(),
+	// When type params are created in the typeParamTag case of (*importReader).obj(),
 	// the associated constraint type may not be complete due to recursion.
 	// Therefore, we defer calling SetConstraint there, and call it here instead
 	// after all types are complete.
@@ -355,6 +370,7 @@ type iimporter struct {
 	version int
 	ipath   string
 
+	aliases bool
 	shallow bool
 	reportf ReportFunc // if non-nil, used to report bugs
 
@@ -546,25 +562,29 @@ func (r *importReader) obj(name string) {
 	pos := r.pos()
 
 	switch tag {
-	case 'A':
+	case aliasTag:
 		typ := r.typ()
-
-		r.declare(types.NewTypeName(pos, r.currPkg, name, typ))
-
-	case 'C':
+		// TODO(adonovan): support generic aliases:
+		// if tag == genericAliasTag {
+		// 	tparams := r.tparamList()
+		// 	alias.SetTypeParams(tparams)
+		// }
+		r.declare(aliases.NewAlias(r.p.aliases, pos, r.currPkg, name, typ))
+
+	case constTag:
 		typ, val := r.value()
 
 		r.declare(types.NewConst(pos, r.currPkg, name, typ, val))
 
-	case 'F', 'G':
+	case funcTag, genericFuncTag:
 		var tparams []*types.TypeParam
-		if tag == 'G' {
+		if tag == genericFuncTag {
 			tparams = r.tparamList()
 		}
 		sig := r.signature(nil, nil, tparams)
 		r.declare(types.NewFunc(pos, r.currPkg, name, sig))
 
-	case 'T', 'U':
+	case typeTag, genericTypeTag:
 		// Types can be recursive. We need to setup a stub
 		// declaration before recursing.
 		obj := types.NewTypeName(pos, r.currPkg, name, nil)
@@ -572,7 +592,7 @@ func (r *importReader) obj(name string) {
 		// Declare obj before calling r.tparamList, so the new type name is recognized
 		// if used in the constraint of one of its own typeparams (see #48280).
 		r.declare(obj)
-		if tag == 'U' {
+		if tag == genericTypeTag {
 			tparams := r.tparamList()
 			named.SetTypeParams(tparams)
 		}
@@ -604,7 +624,7 @@ func (r *importReader) obj(name string) {
 			}
 		}
 
-	case 'P':
+	case typeParamTag:
 		// We need to "declare" a typeparam in order to have a name that
 		// can be referenced recursively (if needed) in the type param's
 		// bound.
@@ -637,7 +657,7 @@ func (r *importReader) obj(name string) {
 		// completely set up all types in ImportData.
 		r.p.later = append(r.p.later, setConstraintArgs{t: t, constraint: constraint})
 
-	case 'V':
+	case varTag:
 		typ := r.typ()
 
 		r.declare(types.NewVar(pos, r.currPkg, name, typ))
@@ -854,7 +874,7 @@ func (r *importReader) doType(base *types.Named) (res types.Type) {
 		errorf("unexpected kind tag in %q: %v", r.p.ipath, k)
 		return nil
 
-	case definedType:
+	case aliasType, definedType:
 		pkg, name := r.qualifiedIdent()
 		r.p.doDecl(pkg, name)
 		return pkg.Scope().Lookup(name).(*types.TypeName).Type()
diff --git a/vendor/golang.org/x/tools/internal/gcimporter/ureader_yes.go b/vendor/golang.org/x/tools/internal/gcimporter/ureader_yes.go
index f4edc46ab74b7f6251dffedf729cb52fc031df5a..2c0770688771a0e9e6fc05758089dd3a21075cb4 100644
--- a/vendor/golang.org/x/tools/internal/gcimporter/ureader_yes.go
+++ b/vendor/golang.org/x/tools/internal/gcimporter/ureader_yes.go
@@ -26,6 +26,7 @@ type pkgReader struct {
 
 	ctxt    *types.Context
 	imports map[string]*types.Package // previously imported packages, indexed by path
+	aliases bool                      // create types.Alias nodes
 
 	// lazily initialized arrays corresponding to the unified IR
 	// PosBase, Pkg, and Type sections, respectively.
@@ -99,6 +100,7 @@ func readUnifiedPackage(fset *token.FileSet, ctxt *types.Context, imports map[st
 
 		ctxt:    ctxt,
 		imports: imports,
+		aliases: aliases.Enabled(),
 
 		posBases: make([]string, input.NumElems(pkgbits.RelocPosBase)),
 		pkgs:     make([]*types.Package, input.NumElems(pkgbits.RelocPkg)),
@@ -524,7 +526,7 @@ func (pr *pkgReader) objIdx(idx pkgbits.Index) (*types.Package, string) {
 		case pkgbits.ObjAlias:
 			pos := r.pos()
 			typ := r.typ()
-			declare(types.NewTypeName(pos, objPkg, objName, typ))
+			declare(aliases.NewAlias(r.p.aliases, pos, objPkg, objName, typ))
 
 		case pkgbits.ObjConst:
 			pos := r.pos()
diff --git a/vendor/golang.org/x/tools/internal/gocommand/invoke.go b/vendor/golang.org/x/tools/internal/gocommand/invoke.go
index 55312522dc2d61ac5079bffdf62f83c02727fffb..eb7a8282f9e7d012c100097d89325eb29fb46308 100644
--- a/vendor/golang.org/x/tools/internal/gocommand/invoke.go
+++ b/vendor/golang.org/x/tools/internal/gocommand/invoke.go
@@ -25,7 +25,6 @@ import (
 	"golang.org/x/tools/internal/event"
 	"golang.org/x/tools/internal/event/keys"
 	"golang.org/x/tools/internal/event/label"
-	"golang.org/x/tools/internal/event/tag"
 )
 
 // An Runner will run go command invocations and serialize
@@ -55,11 +54,14 @@ func (runner *Runner) initialize() {
 // 1.14: go: updating go.mod: existing contents have changed since last read
 var modConcurrencyError = regexp.MustCompile(`go:.*go.mod.*contents have changed`)
 
-// verb is an event label for the go command verb.
-var verb = keys.NewString("verb", "go command verb")
+// event keys for go command invocations
+var (
+	verb      = keys.NewString("verb", "go command verb")
+	directory = keys.NewString("directory", "")
+)
 
 func invLabels(inv Invocation) []label.Label {
-	return []label.Label{verb.Of(inv.Verb), tag.Directory.Of(inv.WorkingDir)}
+	return []label.Label{verb.Of(inv.Verb), directory.Of(inv.WorkingDir)}
 }
 
 // Run is a convenience wrapper around RunRaw.
@@ -158,12 +160,15 @@ type Invocation struct {
 	BuildFlags []string
 
 	// If ModFlag is set, the go command is invoked with -mod=ModFlag.
+	// TODO(rfindley): remove, in favor of Args.
 	ModFlag string
 
 	// If ModFile is set, the go command is invoked with -modfile=ModFile.
+	// TODO(rfindley): remove, in favor of Args.
 	ModFile string
 
 	// If Overlay is set, the go command is invoked with -overlay=Overlay.
+	// TODO(rfindley): remove, in favor of Args.
 	Overlay string
 
 	// If CleanEnv is set, the invocation will run only with the environment
diff --git a/vendor/golang.org/x/tools/internal/gocommand/vendor.go b/vendor/golang.org/x/tools/internal/gocommand/vendor.go
index 2d3d408c0bed3f2ce5fd6fe52ec280b3e3848e4e..e38d1fb48888c05e1353bae7961e8ab2dd21ba2c 100644
--- a/vendor/golang.org/x/tools/internal/gocommand/vendor.go
+++ b/vendor/golang.org/x/tools/internal/gocommand/vendor.go
@@ -107,3 +107,57 @@ func getMainModuleAnd114(ctx context.Context, inv Invocation, r *Runner) (*Modul
 	}
 	return mod, lines[4] == "go1.14", nil
 }
+
+// WorkspaceVendorEnabled reports whether workspace vendoring is enabled. It takes a *Runner to execute Go commands
+// with the supplied context.Context and Invocation. The Invocation can contain pre-defined fields,
+// of which only Verb and Args are modified to run the appropriate Go command.
+// Inspired by setDefaultBuildMod in modload/init.go
+func WorkspaceVendorEnabled(ctx context.Context, inv Invocation, r *Runner) (bool, []*ModuleJSON, error) {
+	inv.Verb = "env"
+	inv.Args = []string{"GOWORK"}
+	stdout, err := r.Run(ctx, inv)
+	if err != nil {
+		return false, nil, err
+	}
+	goWork := string(bytes.TrimSpace(stdout.Bytes()))
+	if fi, err := os.Stat(filepath.Join(filepath.Dir(goWork), "vendor")); err == nil && fi.IsDir() {
+		mainMods, err := getWorkspaceMainModules(ctx, inv, r)
+		if err != nil {
+			return false, nil, err
+		}
+		return true, mainMods, nil
+	}
+	return false, nil, nil
+}
+
+// getWorkspaceMainModules gets the main modules' information.
+// This is the information needed to figure out if vendoring should be enabled.
+func getWorkspaceMainModules(ctx context.Context, inv Invocation, r *Runner) ([]*ModuleJSON, error) {
+	const format = `{{.Path}}
+{{.Dir}}
+{{.GoMod}}
+{{.GoVersion}}
+`
+	inv.Verb = "list"
+	inv.Args = []string{"-m", "-f", format}
+	stdout, err := r.Run(ctx, inv)
+	if err != nil {
+		return nil, err
+	}
+
+	lines := strings.Split(strings.TrimSuffix(stdout.String(), "\n"), "\n")
+	if len(lines) < 4 {
+		return nil, fmt.Errorf("unexpected stdout: %q", stdout.String())
+	}
+	mods := make([]*ModuleJSON, 0, len(lines)/4)
+	for i := 0; i < len(lines); i += 4 {
+		mods = append(mods, &ModuleJSON{
+			Path:      lines[i],
+			Dir:       lines[i+1],
+			GoMod:     lines[i+2],
+			GoVersion: lines[i+3],
+			Main:      true,
+		})
+	}
+	return mods, nil
+}
diff --git a/vendor/golang.org/x/tools/internal/imports/fix.go b/vendor/golang.org/x/tools/internal/imports/fix.go
index 6a18f63a44dc807c2e4341ae6410686683344bdc..93d49a6efd0cdbe87d1c6158ac1ca0c8f1ace581 100644
--- a/vendor/golang.org/x/tools/internal/imports/fix.go
+++ b/vendor/golang.org/x/tools/internal/imports/fix.go
@@ -31,6 +31,7 @@ import (
 	"golang.org/x/tools/internal/event"
 	"golang.org/x/tools/internal/gocommand"
 	"golang.org/x/tools/internal/gopathwalk"
+	"golang.org/x/tools/internal/stdlib"
 )
 
 // importToGroup is a list of functions which map from an import path to
@@ -300,6 +301,20 @@ func (p *pass) loadPackageNames(imports []*ImportInfo) error {
 	return nil
 }
 
+// if there is a trailing major version, remove it
+func withoutVersion(nm string) string {
+	if v := path.Base(nm); len(v) > 0 && v[0] == 'v' {
+		if _, err := strconv.Atoi(v[1:]); err == nil {
+			// this is, for instance, called with rand/v2 and returns rand
+			if len(v) < len(nm) {
+				xnm := nm[:len(nm)-len(v)-1]
+				return path.Base(xnm)
+			}
+		}
+	}
+	return nm
+}
+
 // importIdentifier returns the identifier that imp will introduce. It will
 // guess if the package name has not been loaded, e.g. because the source
 // is not available.
@@ -309,7 +324,7 @@ func (p *pass) importIdentifier(imp *ImportInfo) string {
 	}
 	known := p.knownPackages[imp.ImportPath]
 	if known != nil && known.name != "" {
-		return known.name
+		return withoutVersion(known.name)
 	}
 	return ImportPathToAssumedName(imp.ImportPath)
 }
@@ -511,9 +526,9 @@ func (p *pass) assumeSiblingImportsValid() {
 		}
 		for left, rights := range refs {
 			if imp, ok := importsByName[left]; ok {
-				if m, ok := stdlib[imp.ImportPath]; ok {
+				if m, ok := stdlib.PackageSymbols[imp.ImportPath]; ok {
 					// We have the stdlib in memory; no need to guess.
-					rights = copyExports(m)
+					rights = symbolNameSet(m)
 				}
 				p.addCandidate(imp, &packageInfo{
 					// no name; we already know it.
@@ -641,7 +656,7 @@ func getCandidatePkgs(ctx context.Context, wrappedCallback *scanCallback, filena
 	dupCheck := map[string]struct{}{}
 
 	// Start off with the standard library.
-	for importPath, exports := range stdlib {
+	for importPath, symbols := range stdlib.PackageSymbols {
 		p := &pkg{
 			dir:             filepath.Join(goenv["GOROOT"], "src", importPath),
 			importPathShort: importPath,
@@ -650,6 +665,13 @@ func getCandidatePkgs(ctx context.Context, wrappedCallback *scanCallback, filena
 		}
 		dupCheck[importPath] = struct{}{}
 		if notSelf(p) && wrappedCallback.dirFound(p) && wrappedCallback.packageNameLoaded(p) {
+			var exports []stdlib.Symbol
+			for _, sym := range symbols {
+				switch sym.Kind {
+				case stdlib.Func, stdlib.Type, stdlib.Var, stdlib.Const:
+					exports = append(exports, sym)
+				}
+			}
 			wrappedCallback.exportsLoaded(p, exports)
 		}
 	}
@@ -670,7 +692,7 @@ func getCandidatePkgs(ctx context.Context, wrappedCallback *scanCallback, filena
 			dupCheck[pkg.importPathShort] = struct{}{}
 			return notSelf(pkg) && wrappedCallback.packageNameLoaded(pkg)
 		},
-		exportsLoaded: func(pkg *pkg, exports []string) {
+		exportsLoaded: func(pkg *pkg, exports []stdlib.Symbol) {
 			// If we're an x_test, load the package under test's test variant.
 			if strings.HasSuffix(filePkg, "_test") && pkg.dir == filepath.Dir(filename) {
 				var err error
@@ -795,7 +817,7 @@ func GetImportPaths(ctx context.Context, wrapped func(ImportFix), searchPrefix,
 // A PackageExport is a package and its exports.
 type PackageExport struct {
 	Fix     *ImportFix
-	Exports []string
+	Exports []stdlib.Symbol
 }
 
 // GetPackageExports returns all known packages with name pkg and their exports.
@@ -810,8 +832,8 @@ func GetPackageExports(ctx context.Context, wrapped func(PackageExport), searchP
 		packageNameLoaded: func(pkg *pkg) bool {
 			return pkg.packageName == searchPkg
 		},
-		exportsLoaded: func(pkg *pkg, exports []string) {
-			sort.Strings(exports)
+		exportsLoaded: func(pkg *pkg, exports []stdlib.Symbol) {
+			sortSymbols(exports)
 			wrapped(PackageExport{
 				Fix: &ImportFix{
 					StmtInfo: ImportInfo{
@@ -988,8 +1010,10 @@ func (e *ProcessEnv) GetResolver() (Resolver, error) {
 		// already know the view type.
 		if len(e.Env["GOMOD"]) == 0 && len(e.Env["GOWORK"]) == 0 {
 			e.resolver = newGopathResolver(e)
+		} else if r, err := newModuleResolver(e, e.ModCache); err != nil {
+			e.resolverErr = err
 		} else {
-			e.resolver, e.resolverErr = newModuleResolver(e, e.ModCache)
+			e.resolver = Resolver(r)
 		}
 	}
 
@@ -1049,24 +1073,40 @@ func addStdlibCandidates(pass *pass, refs references) error {
 	if err != nil {
 		return err
 	}
+	localbase := func(nm string) string {
+		ans := path.Base(nm)
+		if ans[0] == 'v' {
+			// this is called, for instance, with math/rand/v2 and returns rand/v2
+			if _, err := strconv.Atoi(ans[1:]); err == nil {
+				ix := strings.LastIndex(nm, ans)
+				more := path.Base(nm[:ix])
+				ans = path.Join(more, ans)
+			}
+		}
+		return ans
+	}
 	add := func(pkg string) {
 		// Prevent self-imports.
 		if path.Base(pkg) == pass.f.Name.Name && filepath.Join(goenv["GOROOT"], "src", pkg) == pass.srcDir {
 			return
 		}
-		exports := copyExports(stdlib[pkg])
+		exports := symbolNameSet(stdlib.PackageSymbols[pkg])
 		pass.addCandidate(
 			&ImportInfo{ImportPath: pkg},
-			&packageInfo{name: path.Base(pkg), exports: exports})
+			&packageInfo{name: localbase(pkg), exports: exports})
 	}
 	for left := range refs {
 		if left == "rand" {
-			// Make sure we try crypto/rand before math/rand.
+			// Make sure we try crypto/rand before any version of math/rand as both have Int()
+			// and our policy is to recommend crypto
 			add("crypto/rand")
-			add("math/rand")
+			// if the user's no later than go1.21, this should be "math/rand"
+			// but we have no way of figuring out what the user is using
+			// TODO: investigate using the toolchain version to disambiguate in the stdlib
+			add("math/rand/v2")
 			continue
 		}
-		for importPath := range stdlib {
+		for importPath := range stdlib.PackageSymbols {
 			if path.Base(importPath) == left {
 				add(importPath)
 			}
@@ -1085,7 +1125,7 @@ type Resolver interface {
 
 	// loadExports returns the set of exported symbols in the package at dir.
 	// loadExports may be called concurrently.
-	loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []string, error)
+	loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []stdlib.Symbol, error)
 
 	// scoreImportPath returns the relevance for an import path.
 	scoreImportPath(ctx context.Context, path string) float64
@@ -1114,7 +1154,7 @@ type scanCallback struct {
 	// If it returns true, the package's exports will be loaded.
 	packageNameLoaded func(pkg *pkg) bool
 	// exportsLoaded is called when a package's exports have been loaded.
-	exportsLoaded func(pkg *pkg, exports []string)
+	exportsLoaded func(pkg *pkg, exports []stdlib.Symbol)
 }
 
 func addExternalCandidates(ctx context.Context, pass *pass, refs references, filename string) error {
@@ -1295,7 +1335,7 @@ func (r *gopathResolver) loadPackageNames(importPaths []string, srcDir string) (
 // importPathToName finds out the actual package name, as declared in its .go files.
 func importPathToName(bctx *build.Context, importPath, srcDir string) string {
 	// Fast path for standard library without going to disk.
-	if _, ok := stdlib[importPath]; ok {
+	if stdlib.HasPackage(importPath) {
 		return path.Base(importPath) // stdlib packages always match their paths.
 	}
 
@@ -1493,7 +1533,7 @@ func (r *gopathResolver) scan(ctx context.Context, callback *scanCallback) error
 }
 
 func (r *gopathResolver) scoreImportPath(ctx context.Context, path string) float64 {
-	if _, ok := stdlib[path]; ok {
+	if stdlib.HasPackage(path) {
 		return MaxRelevance
 	}
 	return MaxRelevance - 1
@@ -1510,7 +1550,7 @@ func filterRoots(roots []gopathwalk.Root, include func(gopathwalk.Root) bool) []
 	return result
 }
 
-func (r *gopathResolver) loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []string, error) {
+func (r *gopathResolver) loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []stdlib.Symbol, error) {
 	if info, ok := r.cache.Load(pkg.dir); ok && !includeTest {
 		return r.cache.CacheExports(ctx, r.env, info)
 	}
@@ -1530,7 +1570,7 @@ func VendorlessPath(ipath string) string {
 	return ipath
 }
 
-func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, dir string, includeTest bool) (string, []string, error) {
+func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, dir string, includeTest bool) (string, []stdlib.Symbol, error) {
 	// Look for non-test, buildable .go files which could provide exports.
 	all, err := os.ReadDir(dir)
 	if err != nil {
@@ -1554,7 +1594,7 @@ func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, dir string, incl
 	}
 
 	var pkgName string
-	var exports []string
+	var exports []stdlib.Symbol
 	fset := token.NewFileSet()
 	for _, fi := range files {
 		select {
@@ -1581,21 +1621,41 @@ func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, dir string, incl
 			continue
 		}
 		pkgName = f.Name.Name
-		for name := range f.Scope.Objects {
+		for name, obj := range f.Scope.Objects {
 			if ast.IsExported(name) {
-				exports = append(exports, name)
+				var kind stdlib.Kind
+				switch obj.Kind {
+				case ast.Con:
+					kind = stdlib.Const
+				case ast.Typ:
+					kind = stdlib.Type
+				case ast.Var:
+					kind = stdlib.Var
+				case ast.Fun:
+					kind = stdlib.Func
+				}
+				exports = append(exports, stdlib.Symbol{
+					Name:    name,
+					Kind:    kind,
+					Version: 0, // unknown; be permissive
+				})
 			}
 		}
 	}
+	sortSymbols(exports)
 
 	if env.Logf != nil {
-		sortedExports := append([]string(nil), exports...)
-		sort.Strings(sortedExports)
-		env.Logf("loaded exports in dir %v (package %v): %v", dir, pkgName, strings.Join(sortedExports, ", "))
+		env.Logf("loaded exports in dir %v (package %v): %v", dir, pkgName, exports)
 	}
 	return pkgName, exports, nil
 }
 
+func sortSymbols(syms []stdlib.Symbol) {
+	sort.Slice(syms, func(i, j int) bool {
+		return syms[i].Name < syms[j].Name
+	})
+}
+
 // findImport searches for a package with the given symbols.
 // If no package is found, findImport returns ("", false, nil)
 func findImport(ctx context.Context, pass *pass, candidates []pkgDistance, pkgName string, symbols map[string]bool) (*pkg, error) {
@@ -1662,7 +1722,7 @@ func findImport(ctx context.Context, pass *pass, candidates []pkgDistance, pkgNa
 
 				exportsMap := make(map[string]bool, len(exports))
 				for _, sym := range exports {
-					exportsMap[sym] = true
+					exportsMap[sym.Name] = true
 				}
 
 				// If it doesn't have the right
@@ -1820,10 +1880,13 @@ func (fn visitFn) Visit(node ast.Node) ast.Visitor {
 	return fn(node)
 }
 
-func copyExports(pkg []string) map[string]bool {
-	m := make(map[string]bool, len(pkg))
-	for _, v := range pkg {
-		m[v] = true
+func symbolNameSet(symbols []stdlib.Symbol) map[string]bool {
+	names := make(map[string]bool)
+	for _, sym := range symbols {
+		switch sym.Kind {
+		case stdlib.Const, stdlib.Var, stdlib.Type, stdlib.Func:
+			names[sym.Name] = true
+		}
 	}
-	return m
+	return names
 }
diff --git a/vendor/golang.org/x/tools/internal/imports/imports.go b/vendor/golang.org/x/tools/internal/imports/imports.go
index 660407548e5ab4fab48abef7a571a08df3cf9ea3..f83465520a45420ec0d905e860778bc8a858bb6a 100644
--- a/vendor/golang.org/x/tools/internal/imports/imports.go
+++ b/vendor/golang.org/x/tools/internal/imports/imports.go
@@ -2,8 +2,6 @@
 // Use of this source code is governed by a BSD-style
 // license that can be found in the LICENSE file.
 
-//go:generate go run mkstdlib.go
-
 // Package imports implements a Go pretty-printer (like package "go/format")
 // that also adds or removes import statements as necessary.
 package imports
@@ -109,7 +107,7 @@ func ApplyFixes(fixes []*ImportFix, filename string, src []byte, opt *Options, e
 }
 
 // formatFile formats the file syntax tree.
-// It may mutate the token.FileSet.
+// It may mutate the token.FileSet and the ast.File.
 //
 // If an adjust function is provided, it is called after formatting
 // with the original source (formatFile's src parameter) and the
diff --git a/vendor/golang.org/x/tools/internal/imports/mod.go b/vendor/golang.org/x/tools/internal/imports/mod.go
index 3d0f38f6c23125b9465e0539457db3eed29f3ee7..82fe644a189b4142834735db8ceb81fd7adca3be 100644
--- a/vendor/golang.org/x/tools/internal/imports/mod.go
+++ b/vendor/golang.org/x/tools/internal/imports/mod.go
@@ -21,6 +21,7 @@ import (
 	"golang.org/x/tools/internal/event"
 	"golang.org/x/tools/internal/gocommand"
 	"golang.org/x/tools/internal/gopathwalk"
+	"golang.org/x/tools/internal/stdlib"
 )
 
 // Notes(rfindley): ModuleResolver appears to be heavily optimized for scanning
@@ -111,11 +112,11 @@ func newModuleResolver(e *ProcessEnv, moduleCacheCache *DirInfoCache) (*ModuleRe
 	}
 
 	vendorEnabled := false
-	var mainModVendor *gocommand.ModuleJSON
+	var mainModVendor *gocommand.ModuleJSON    // for module vendoring
+	var mainModsVendor []*gocommand.ModuleJSON // for workspace vendoring
 
-	// Module vendor directories are ignored in workspace mode:
-	// https://go.googlesource.com/proposal/+/master/design/45713-workspace.md
-	if len(r.env.Env["GOWORK"]) == 0 {
+	goWork := r.env.Env["GOWORK"]
+	if len(goWork) == 0 {
 		// TODO(rfindley): VendorEnabled runs the go command to get GOFLAGS, but
 		// they should be available from the ProcessEnv. Can we avoid the redundant
 		// invocation?
@@ -123,18 +124,35 @@ func newModuleResolver(e *ProcessEnv, moduleCacheCache *DirInfoCache) (*ModuleRe
 		if err != nil {
 			return nil, err
 		}
+	} else {
+		vendorEnabled, mainModsVendor, err = gocommand.WorkspaceVendorEnabled(context.Background(), inv, r.env.GocmdRunner)
+		if err != nil {
+			return nil, err
+		}
 	}
 
-	if mainModVendor != nil && vendorEnabled {
-		// Vendor mode is on, so all the non-Main modules are irrelevant,
-		// and we need to search /vendor for everything.
-		r.mains = []*gocommand.ModuleJSON{mainModVendor}
-		r.dummyVendorMod = &gocommand.ModuleJSON{
-			Path: "",
-			Dir:  filepath.Join(mainModVendor.Dir, "vendor"),
+	if vendorEnabled {
+		if mainModVendor != nil {
+			// Module vendor mode is on, so all the non-Main modules are irrelevant,
+			// and we need to search /vendor for everything.
+			r.mains = []*gocommand.ModuleJSON{mainModVendor}
+			r.dummyVendorMod = &gocommand.ModuleJSON{
+				Path: "",
+				Dir:  filepath.Join(mainModVendor.Dir, "vendor"),
+			}
+			r.modsByModPath = []*gocommand.ModuleJSON{mainModVendor, r.dummyVendorMod}
+			r.modsByDir = []*gocommand.ModuleJSON{mainModVendor, r.dummyVendorMod}
+		} else {
+			// Workspace vendor mode is on, so all the non-Main modules are irrelevant,
+			// and we need to search /vendor for everything.
+			r.mains = mainModsVendor
+			r.dummyVendorMod = &gocommand.ModuleJSON{
+				Path: "",
+				Dir:  filepath.Join(filepath.Dir(goWork), "vendor"),
+			}
+			r.modsByModPath = append(append([]*gocommand.ModuleJSON{}, mainModsVendor...), r.dummyVendorMod)
+			r.modsByDir = append(append([]*gocommand.ModuleJSON{}, mainModsVendor...), r.dummyVendorMod)
 		}
-		r.modsByModPath = []*gocommand.ModuleJSON{mainModVendor, r.dummyVendorMod}
-		r.modsByDir = []*gocommand.ModuleJSON{mainModVendor, r.dummyVendorMod}
 	} else {
 		// Vendor mode is off, so run go list -m ... to find everything.
 		err := r.initAllMods()
@@ -165,8 +183,9 @@ func newModuleResolver(e *ProcessEnv, moduleCacheCache *DirInfoCache) (*ModuleRe
 		return count(j) < count(i) // descending order
 	})
 
-	r.roots = []gopathwalk.Root{
-		{Path: filepath.Join(goenv["GOROOT"], "/src"), Type: gopathwalk.RootGOROOT},
+	r.roots = []gopathwalk.Root{}
+	if goenv["GOROOT"] != "" { // "" happens in tests
+		r.roots = append(r.roots, gopathwalk.Root{Path: filepath.Join(goenv["GOROOT"], "/src"), Type: gopathwalk.RootGOROOT})
 	}
 	r.mainByDir = make(map[string]*gocommand.ModuleJSON)
 	for _, main := range r.mains {
@@ -313,15 +332,19 @@ func (r *ModuleResolver) ClearForNewScan() Resolver {
 // TODO(rfindley): move this to a new env.go, consolidating ProcessEnv methods.
 func (e *ProcessEnv) ClearModuleInfo() {
 	if r, ok := e.resolver.(*ModuleResolver); ok {
-		resolver, resolverErr := newModuleResolver(e, e.ModCache)
-		if resolverErr == nil {
-			<-r.scanSema // acquire (guards caches)
-			resolver.moduleCacheCache = r.moduleCacheCache
-			resolver.otherCache = r.otherCache
-			r.scanSema <- struct{}{} // release
+		resolver, err := newModuleResolver(e, e.ModCache)
+		if err != nil {
+			e.resolver = nil
+			e.resolverErr = err
+			return
 		}
-		e.resolver = resolver
-		e.resolverErr = resolverErr
+
+		<-r.scanSema // acquire (guards caches)
+		resolver.moduleCacheCache = r.moduleCacheCache
+		resolver.otherCache = r.otherCache
+		r.scanSema <- struct{}{} // release
+
+		e.UpdateResolver(resolver)
 	}
 }
 
@@ -412,7 +435,7 @@ func (r *ModuleResolver) cachePackageName(info directoryPackageInfo) (string, er
 	return r.otherCache.CachePackageName(info)
 }
 
-func (r *ModuleResolver) cacheExports(ctx context.Context, env *ProcessEnv, info directoryPackageInfo) (string, []string, error) {
+func (r *ModuleResolver) cacheExports(ctx context.Context, env *ProcessEnv, info directoryPackageInfo) (string, []stdlib.Symbol, error) {
 	if info.rootType == gopathwalk.RootModuleCache {
 		return r.moduleCacheCache.CacheExports(ctx, env, info)
 	}
@@ -632,7 +655,7 @@ func (r *ModuleResolver) scan(ctx context.Context, callback *scanCallback) error
 }
 
 func (r *ModuleResolver) scoreImportPath(ctx context.Context, path string) float64 {
-	if _, ok := stdlib[path]; ok {
+	if stdlib.HasPackage(path) {
 		return MaxRelevance
 	}
 	mod, _ := r.findPackage(path)
@@ -710,7 +733,7 @@ func (r *ModuleResolver) canonicalize(info directoryPackageInfo) (*pkg, error) {
 	return res, nil
 }
 
-func (r *ModuleResolver) loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []string, error) {
+func (r *ModuleResolver) loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []stdlib.Symbol, error) {
 	if info, ok := r.cacheLoad(pkg.dir); ok && !includeTest {
 		return r.cacheExports(ctx, r.env, info)
 	}
diff --git a/vendor/golang.org/x/tools/internal/imports/mod_cache.go b/vendor/golang.org/x/tools/internal/imports/mod_cache.go
index cfc54657656deb703f610e9644b3016efc619a1f..b1192696b28e4983ac690632be26670ea0ea9d9a 100644
--- a/vendor/golang.org/x/tools/internal/imports/mod_cache.go
+++ b/vendor/golang.org/x/tools/internal/imports/mod_cache.go
@@ -14,6 +14,7 @@ import (
 
 	"golang.org/x/mod/module"
 	"golang.org/x/tools/internal/gopathwalk"
+	"golang.org/x/tools/internal/stdlib"
 )
 
 // To find packages to import, the resolver needs to know about all of
@@ -73,7 +74,7 @@ type directoryPackageInfo struct {
 	// the default build context GOOS and GOARCH.
 	//
 	// We can make this explicit, and key exports by GOOS, GOARCH.
-	exports []string
+	exports []stdlib.Symbol
 }
 
 // reachedStatus returns true when info has a status at least target and any error associated with
@@ -229,7 +230,7 @@ func (d *DirInfoCache) CachePackageName(info directoryPackageInfo) (string, erro
 	return info.packageName, info.err
 }
 
-func (d *DirInfoCache) CacheExports(ctx context.Context, env *ProcessEnv, info directoryPackageInfo) (string, []string, error) {
+func (d *DirInfoCache) CacheExports(ctx context.Context, env *ProcessEnv, info directoryPackageInfo) (string, []stdlib.Symbol, error) {
 	if reached, _ := info.reachedStatus(exportsLoaded); reached {
 		return info.packageName, info.exports, info.err
 	}
diff --git a/vendor/golang.org/x/tools/internal/imports/sortimports.go b/vendor/golang.org/x/tools/internal/imports/sortimports.go
index 1a0a7ebd9e4d078be3d0b35dade18cecef16594a..da8194fd965b45f666ccebb6e3939646112a9175 100644
--- a/vendor/golang.org/x/tools/internal/imports/sortimports.go
+++ b/vendor/golang.org/x/tools/internal/imports/sortimports.go
@@ -18,7 +18,7 @@ import (
 // sortImports sorts runs of consecutive import lines in import blocks in f.
 // It also removes duplicate imports when it is possible to do so without data loss.
 //
-// It may mutate the token.File.
+// It may mutate the token.File and the ast.File.
 func sortImports(localPrefix string, tokFile *token.File, f *ast.File) {
 	for i, d := range f.Decls {
 		d, ok := d.(*ast.GenDecl)
diff --git a/vendor/golang.org/x/tools/internal/imports/zstdlib.go b/vendor/golang.org/x/tools/internal/imports/zstdlib.go
deleted file mode 100644
index 8db24df2ff46811a1e2c46aaa8b59de3b2aca7f2..0000000000000000000000000000000000000000
--- a/vendor/golang.org/x/tools/internal/imports/zstdlib.go
+++ /dev/null
@@ -1,11406 +0,0 @@
-// Copyright 2022 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Code generated by mkstdlib.go. DO NOT EDIT.
-
-package imports
-
-var stdlib = map[string][]string{
-	"archive/tar": {
-		"ErrFieldTooLong",
-		"ErrHeader",
-		"ErrInsecurePath",
-		"ErrWriteAfterClose",
-		"ErrWriteTooLong",
-		"FileInfoHeader",
-		"Format",
-		"FormatGNU",
-		"FormatPAX",
-		"FormatUSTAR",
-		"FormatUnknown",
-		"Header",
-		"NewReader",
-		"NewWriter",
-		"Reader",
-		"TypeBlock",
-		"TypeChar",
-		"TypeCont",
-		"TypeDir",
-		"TypeFifo",
-		"TypeGNULongLink",
-		"TypeGNULongName",
-		"TypeGNUSparse",
-		"TypeLink",
-		"TypeReg",
-		"TypeRegA",
-		"TypeSymlink",
-		"TypeXGlobalHeader",
-		"TypeXHeader",
-		"Writer",
-	},
-	"archive/zip": {
-		"Compressor",
-		"Decompressor",
-		"Deflate",
-		"ErrAlgorithm",
-		"ErrChecksum",
-		"ErrFormat",
-		"ErrInsecurePath",
-		"File",
-		"FileHeader",
-		"FileInfoHeader",
-		"NewReader",
-		"NewWriter",
-		"OpenReader",
-		"ReadCloser",
-		"Reader",
-		"RegisterCompressor",
-		"RegisterDecompressor",
-		"Store",
-		"Writer",
-	},
-	"bufio": {
-		"ErrAdvanceTooFar",
-		"ErrBadReadCount",
-		"ErrBufferFull",
-		"ErrFinalToken",
-		"ErrInvalidUnreadByte",
-		"ErrInvalidUnreadRune",
-		"ErrNegativeAdvance",
-		"ErrNegativeCount",
-		"ErrTooLong",
-		"MaxScanTokenSize",
-		"NewReadWriter",
-		"NewReader",
-		"NewReaderSize",
-		"NewScanner",
-		"NewWriter",
-		"NewWriterSize",
-		"ReadWriter",
-		"Reader",
-		"ScanBytes",
-		"ScanLines",
-		"ScanRunes",
-		"ScanWords",
-		"Scanner",
-		"SplitFunc",
-		"Writer",
-	},
-	"bytes": {
-		"Buffer",
-		"Clone",
-		"Compare",
-		"Contains",
-		"ContainsAny",
-		"ContainsFunc",
-		"ContainsRune",
-		"Count",
-		"Cut",
-		"CutPrefix",
-		"CutSuffix",
-		"Equal",
-		"EqualFold",
-		"ErrTooLarge",
-		"Fields",
-		"FieldsFunc",
-		"HasPrefix",
-		"HasSuffix",
-		"Index",
-		"IndexAny",
-		"IndexByte",
-		"IndexFunc",
-		"IndexRune",
-		"Join",
-		"LastIndex",
-		"LastIndexAny",
-		"LastIndexByte",
-		"LastIndexFunc",
-		"Map",
-		"MinRead",
-		"NewBuffer",
-		"NewBufferString",
-		"NewReader",
-		"Reader",
-		"Repeat",
-		"Replace",
-		"ReplaceAll",
-		"Runes",
-		"Split",
-		"SplitAfter",
-		"SplitAfterN",
-		"SplitN",
-		"Title",
-		"ToLower",
-		"ToLowerSpecial",
-		"ToTitle",
-		"ToTitleSpecial",
-		"ToUpper",
-		"ToUpperSpecial",
-		"ToValidUTF8",
-		"Trim",
-		"TrimFunc",
-		"TrimLeft",
-		"TrimLeftFunc",
-		"TrimPrefix",
-		"TrimRight",
-		"TrimRightFunc",
-		"TrimSpace",
-		"TrimSuffix",
-	},
-	"cmp": {
-		"Compare",
-		"Less",
-		"Or",
-		"Ordered",
-	},
-	"compress/bzip2": {
-		"NewReader",
-		"StructuralError",
-	},
-	"compress/flate": {
-		"BestCompression",
-		"BestSpeed",
-		"CorruptInputError",
-		"DefaultCompression",
-		"HuffmanOnly",
-		"InternalError",
-		"NewReader",
-		"NewReaderDict",
-		"NewWriter",
-		"NewWriterDict",
-		"NoCompression",
-		"ReadError",
-		"Reader",
-		"Resetter",
-		"WriteError",
-		"Writer",
-	},
-	"compress/gzip": {
-		"BestCompression",
-		"BestSpeed",
-		"DefaultCompression",
-		"ErrChecksum",
-		"ErrHeader",
-		"Header",
-		"HuffmanOnly",
-		"NewReader",
-		"NewWriter",
-		"NewWriterLevel",
-		"NoCompression",
-		"Reader",
-		"Writer",
-	},
-	"compress/lzw": {
-		"LSB",
-		"MSB",
-		"NewReader",
-		"NewWriter",
-		"Order",
-		"Reader",
-		"Writer",
-	},
-	"compress/zlib": {
-		"BestCompression",
-		"BestSpeed",
-		"DefaultCompression",
-		"ErrChecksum",
-		"ErrDictionary",
-		"ErrHeader",
-		"HuffmanOnly",
-		"NewReader",
-		"NewReaderDict",
-		"NewWriter",
-		"NewWriterLevel",
-		"NewWriterLevelDict",
-		"NoCompression",
-		"Resetter",
-		"Writer",
-	},
-	"container/heap": {
-		"Fix",
-		"Init",
-		"Interface",
-		"Pop",
-		"Push",
-		"Remove",
-	},
-	"container/list": {
-		"Element",
-		"List",
-		"New",
-	},
-	"container/ring": {
-		"New",
-		"Ring",
-	},
-	"context": {
-		"AfterFunc",
-		"Background",
-		"CancelCauseFunc",
-		"CancelFunc",
-		"Canceled",
-		"Cause",
-		"Context",
-		"DeadlineExceeded",
-		"TODO",
-		"WithCancel",
-		"WithCancelCause",
-		"WithDeadline",
-		"WithDeadlineCause",
-		"WithTimeout",
-		"WithTimeoutCause",
-		"WithValue",
-		"WithoutCancel",
-	},
-	"crypto": {
-		"BLAKE2b_256",
-		"BLAKE2b_384",
-		"BLAKE2b_512",
-		"BLAKE2s_256",
-		"Decrypter",
-		"DecrypterOpts",
-		"Hash",
-		"MD4",
-		"MD5",
-		"MD5SHA1",
-		"PrivateKey",
-		"PublicKey",
-		"RIPEMD160",
-		"RegisterHash",
-		"SHA1",
-		"SHA224",
-		"SHA256",
-		"SHA384",
-		"SHA3_224",
-		"SHA3_256",
-		"SHA3_384",
-		"SHA3_512",
-		"SHA512",
-		"SHA512_224",
-		"SHA512_256",
-		"Signer",
-		"SignerOpts",
-	},
-	"crypto/aes": {
-		"BlockSize",
-		"KeySizeError",
-		"NewCipher",
-	},
-	"crypto/cipher": {
-		"AEAD",
-		"Block",
-		"BlockMode",
-		"NewCBCDecrypter",
-		"NewCBCEncrypter",
-		"NewCFBDecrypter",
-		"NewCFBEncrypter",
-		"NewCTR",
-		"NewGCM",
-		"NewGCMWithNonceSize",
-		"NewGCMWithTagSize",
-		"NewOFB",
-		"Stream",
-		"StreamReader",
-		"StreamWriter",
-	},
-	"crypto/des": {
-		"BlockSize",
-		"KeySizeError",
-		"NewCipher",
-		"NewTripleDESCipher",
-	},
-	"crypto/dsa": {
-		"ErrInvalidPublicKey",
-		"GenerateKey",
-		"GenerateParameters",
-		"L1024N160",
-		"L2048N224",
-		"L2048N256",
-		"L3072N256",
-		"ParameterSizes",
-		"Parameters",
-		"PrivateKey",
-		"PublicKey",
-		"Sign",
-		"Verify",
-	},
-	"crypto/ecdh": {
-		"Curve",
-		"P256",
-		"P384",
-		"P521",
-		"PrivateKey",
-		"PublicKey",
-		"X25519",
-	},
-	"crypto/ecdsa": {
-		"GenerateKey",
-		"PrivateKey",
-		"PublicKey",
-		"Sign",
-		"SignASN1",
-		"Verify",
-		"VerifyASN1",
-	},
-	"crypto/ed25519": {
-		"GenerateKey",
-		"NewKeyFromSeed",
-		"Options",
-		"PrivateKey",
-		"PrivateKeySize",
-		"PublicKey",
-		"PublicKeySize",
-		"SeedSize",
-		"Sign",
-		"SignatureSize",
-		"Verify",
-		"VerifyWithOptions",
-	},
-	"crypto/elliptic": {
-		"Curve",
-		"CurveParams",
-		"GenerateKey",
-		"Marshal",
-		"MarshalCompressed",
-		"P224",
-		"P256",
-		"P384",
-		"P521",
-		"Unmarshal",
-		"UnmarshalCompressed",
-	},
-	"crypto/hmac": {
-		"Equal",
-		"New",
-	},
-	"crypto/md5": {
-		"BlockSize",
-		"New",
-		"Size",
-		"Sum",
-	},
-	"crypto/rand": {
-		"Int",
-		"Prime",
-		"Read",
-		"Reader",
-	},
-	"crypto/rc4": {
-		"Cipher",
-		"KeySizeError",
-		"NewCipher",
-	},
-	"crypto/rsa": {
-		"CRTValue",
-		"DecryptOAEP",
-		"DecryptPKCS1v15",
-		"DecryptPKCS1v15SessionKey",
-		"EncryptOAEP",
-		"EncryptPKCS1v15",
-		"ErrDecryption",
-		"ErrMessageTooLong",
-		"ErrVerification",
-		"GenerateKey",
-		"GenerateMultiPrimeKey",
-		"OAEPOptions",
-		"PKCS1v15DecryptOptions",
-		"PSSOptions",
-		"PSSSaltLengthAuto",
-		"PSSSaltLengthEqualsHash",
-		"PrecomputedValues",
-		"PrivateKey",
-		"PublicKey",
-		"SignPKCS1v15",
-		"SignPSS",
-		"VerifyPKCS1v15",
-		"VerifyPSS",
-	},
-	"crypto/sha1": {
-		"BlockSize",
-		"New",
-		"Size",
-		"Sum",
-	},
-	"crypto/sha256": {
-		"BlockSize",
-		"New",
-		"New224",
-		"Size",
-		"Size224",
-		"Sum224",
-		"Sum256",
-	},
-	"crypto/sha512": {
-		"BlockSize",
-		"New",
-		"New384",
-		"New512_224",
-		"New512_256",
-		"Size",
-		"Size224",
-		"Size256",
-		"Size384",
-		"Sum384",
-		"Sum512",
-		"Sum512_224",
-		"Sum512_256",
-	},
-	"crypto/subtle": {
-		"ConstantTimeByteEq",
-		"ConstantTimeCompare",
-		"ConstantTimeCopy",
-		"ConstantTimeEq",
-		"ConstantTimeLessOrEq",
-		"ConstantTimeSelect",
-		"XORBytes",
-	},
-	"crypto/tls": {
-		"AlertError",
-		"Certificate",
-		"CertificateRequestInfo",
-		"CertificateVerificationError",
-		"CipherSuite",
-		"CipherSuiteName",
-		"CipherSuites",
-		"Client",
-		"ClientAuthType",
-		"ClientHelloInfo",
-		"ClientSessionCache",
-		"ClientSessionState",
-		"Config",
-		"Conn",
-		"ConnectionState",
-		"CurveID",
-		"CurveP256",
-		"CurveP384",
-		"CurveP521",
-		"Dial",
-		"DialWithDialer",
-		"Dialer",
-		"ECDSAWithP256AndSHA256",
-		"ECDSAWithP384AndSHA384",
-		"ECDSAWithP521AndSHA512",
-		"ECDSAWithSHA1",
-		"Ed25519",
-		"InsecureCipherSuites",
-		"Listen",
-		"LoadX509KeyPair",
-		"NewLRUClientSessionCache",
-		"NewListener",
-		"NewResumptionState",
-		"NoClientCert",
-		"PKCS1WithSHA1",
-		"PKCS1WithSHA256",
-		"PKCS1WithSHA384",
-		"PKCS1WithSHA512",
-		"PSSWithSHA256",
-		"PSSWithSHA384",
-		"PSSWithSHA512",
-		"ParseSessionState",
-		"QUICClient",
-		"QUICConfig",
-		"QUICConn",
-		"QUICEncryptionLevel",
-		"QUICEncryptionLevelApplication",
-		"QUICEncryptionLevelEarly",
-		"QUICEncryptionLevelHandshake",
-		"QUICEncryptionLevelInitial",
-		"QUICEvent",
-		"QUICEventKind",
-		"QUICHandshakeDone",
-		"QUICNoEvent",
-		"QUICRejectedEarlyData",
-		"QUICServer",
-		"QUICSessionTicketOptions",
-		"QUICSetReadSecret",
-		"QUICSetWriteSecret",
-		"QUICTransportParameters",
-		"QUICTransportParametersRequired",
-		"QUICWriteData",
-		"RecordHeaderError",
-		"RenegotiateFreelyAsClient",
-		"RenegotiateNever",
-		"RenegotiateOnceAsClient",
-		"RenegotiationSupport",
-		"RequestClientCert",
-		"RequireAndVerifyClientCert",
-		"RequireAnyClientCert",
-		"Server",
-		"SessionState",
-		"SignatureScheme",
-		"TLS_AES_128_GCM_SHA256",
-		"TLS_AES_256_GCM_SHA384",
-		"TLS_CHACHA20_POLY1305_SHA256",
-		"TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA",
-		"TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256",
-		"TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256",
-		"TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA",
-		"TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384",
-		"TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305",
-		"TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256",
-		"TLS_ECDHE_ECDSA_WITH_RC4_128_SHA",
-		"TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA",
-		"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA",
-		"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256",
-		"TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256",
-		"TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA",
-		"TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384",
-		"TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305",
-		"TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256",
-		"TLS_ECDHE_RSA_WITH_RC4_128_SHA",
-		"TLS_FALLBACK_SCSV",
-		"TLS_RSA_WITH_3DES_EDE_CBC_SHA",
-		"TLS_RSA_WITH_AES_128_CBC_SHA",
-		"TLS_RSA_WITH_AES_128_CBC_SHA256",
-		"TLS_RSA_WITH_AES_128_GCM_SHA256",
-		"TLS_RSA_WITH_AES_256_CBC_SHA",
-		"TLS_RSA_WITH_AES_256_GCM_SHA384",
-		"TLS_RSA_WITH_RC4_128_SHA",
-		"VerifyClientCertIfGiven",
-		"VersionName",
-		"VersionSSL30",
-		"VersionTLS10",
-		"VersionTLS11",
-		"VersionTLS12",
-		"VersionTLS13",
-		"X25519",
-		"X509KeyPair",
-	},
-	"crypto/x509": {
-		"CANotAuthorizedForExtKeyUsage",
-		"CANotAuthorizedForThisName",
-		"CertPool",
-		"Certificate",
-		"CertificateInvalidError",
-		"CertificateRequest",
-		"ConstraintViolationError",
-		"CreateCertificate",
-		"CreateCertificateRequest",
-		"CreateRevocationList",
-		"DSA",
-		"DSAWithSHA1",
-		"DSAWithSHA256",
-		"DecryptPEMBlock",
-		"ECDSA",
-		"ECDSAWithSHA1",
-		"ECDSAWithSHA256",
-		"ECDSAWithSHA384",
-		"ECDSAWithSHA512",
-		"Ed25519",
-		"EncryptPEMBlock",
-		"ErrUnsupportedAlgorithm",
-		"Expired",
-		"ExtKeyUsage",
-		"ExtKeyUsageAny",
-		"ExtKeyUsageClientAuth",
-		"ExtKeyUsageCodeSigning",
-		"ExtKeyUsageEmailProtection",
-		"ExtKeyUsageIPSECEndSystem",
-		"ExtKeyUsageIPSECTunnel",
-		"ExtKeyUsageIPSECUser",
-		"ExtKeyUsageMicrosoftCommercialCodeSigning",
-		"ExtKeyUsageMicrosoftKernelCodeSigning",
-		"ExtKeyUsageMicrosoftServerGatedCrypto",
-		"ExtKeyUsageNetscapeServerGatedCrypto",
-		"ExtKeyUsageOCSPSigning",
-		"ExtKeyUsageServerAuth",
-		"ExtKeyUsageTimeStamping",
-		"HostnameError",
-		"IncompatibleUsage",
-		"IncorrectPasswordError",
-		"InsecureAlgorithmError",
-		"InvalidReason",
-		"IsEncryptedPEMBlock",
-		"KeyUsage",
-		"KeyUsageCRLSign",
-		"KeyUsageCertSign",
-		"KeyUsageContentCommitment",
-		"KeyUsageDataEncipherment",
-		"KeyUsageDecipherOnly",
-		"KeyUsageDigitalSignature",
-		"KeyUsageEncipherOnly",
-		"KeyUsageKeyAgreement",
-		"KeyUsageKeyEncipherment",
-		"MD2WithRSA",
-		"MD5WithRSA",
-		"MarshalECPrivateKey",
-		"MarshalPKCS1PrivateKey",
-		"MarshalPKCS1PublicKey",
-		"MarshalPKCS8PrivateKey",
-		"MarshalPKIXPublicKey",
-		"NameConstraintsWithoutSANs",
-		"NameMismatch",
-		"NewCertPool",
-		"NotAuthorizedToSign",
-		"OID",
-		"OIDFromInts",
-		"PEMCipher",
-		"PEMCipher3DES",
-		"PEMCipherAES128",
-		"PEMCipherAES192",
-		"PEMCipherAES256",
-		"PEMCipherDES",
-		"ParseCRL",
-		"ParseCertificate",
-		"ParseCertificateRequest",
-		"ParseCertificates",
-		"ParseDERCRL",
-		"ParseECPrivateKey",
-		"ParsePKCS1PrivateKey",
-		"ParsePKCS1PublicKey",
-		"ParsePKCS8PrivateKey",
-		"ParsePKIXPublicKey",
-		"ParseRevocationList",
-		"PublicKeyAlgorithm",
-		"PureEd25519",
-		"RSA",
-		"RevocationList",
-		"RevocationListEntry",
-		"SHA1WithRSA",
-		"SHA256WithRSA",
-		"SHA256WithRSAPSS",
-		"SHA384WithRSA",
-		"SHA384WithRSAPSS",
-		"SHA512WithRSA",
-		"SHA512WithRSAPSS",
-		"SetFallbackRoots",
-		"SignatureAlgorithm",
-		"SystemCertPool",
-		"SystemRootsError",
-		"TooManyConstraints",
-		"TooManyIntermediates",
-		"UnconstrainedName",
-		"UnhandledCriticalExtension",
-		"UnknownAuthorityError",
-		"UnknownPublicKeyAlgorithm",
-		"UnknownSignatureAlgorithm",
-		"VerifyOptions",
-	},
-	"crypto/x509/pkix": {
-		"AlgorithmIdentifier",
-		"AttributeTypeAndValue",
-		"AttributeTypeAndValueSET",
-		"CertificateList",
-		"Extension",
-		"Name",
-		"RDNSequence",
-		"RelativeDistinguishedNameSET",
-		"RevokedCertificate",
-		"TBSCertificateList",
-	},
-	"database/sql": {
-		"ColumnType",
-		"Conn",
-		"DB",
-		"DBStats",
-		"Drivers",
-		"ErrConnDone",
-		"ErrNoRows",
-		"ErrTxDone",
-		"IsolationLevel",
-		"LevelDefault",
-		"LevelLinearizable",
-		"LevelReadCommitted",
-		"LevelReadUncommitted",
-		"LevelRepeatableRead",
-		"LevelSerializable",
-		"LevelSnapshot",
-		"LevelWriteCommitted",
-		"Named",
-		"NamedArg",
-		"Null",
-		"NullBool",
-		"NullByte",
-		"NullFloat64",
-		"NullInt16",
-		"NullInt32",
-		"NullInt64",
-		"NullString",
-		"NullTime",
-		"Open",
-		"OpenDB",
-		"Out",
-		"RawBytes",
-		"Register",
-		"Result",
-		"Row",
-		"Rows",
-		"Scanner",
-		"Stmt",
-		"Tx",
-		"TxOptions",
-	},
-	"database/sql/driver": {
-		"Bool",
-		"ColumnConverter",
-		"Conn",
-		"ConnBeginTx",
-		"ConnPrepareContext",
-		"Connector",
-		"DefaultParameterConverter",
-		"Driver",
-		"DriverContext",
-		"ErrBadConn",
-		"ErrRemoveArgument",
-		"ErrSkip",
-		"Execer",
-		"ExecerContext",
-		"Int32",
-		"IsScanValue",
-		"IsValue",
-		"IsolationLevel",
-		"NamedValue",
-		"NamedValueChecker",
-		"NotNull",
-		"Null",
-		"Pinger",
-		"Queryer",
-		"QueryerContext",
-		"Result",
-		"ResultNoRows",
-		"Rows",
-		"RowsAffected",
-		"RowsColumnTypeDatabaseTypeName",
-		"RowsColumnTypeLength",
-		"RowsColumnTypeNullable",
-		"RowsColumnTypePrecisionScale",
-		"RowsColumnTypeScanType",
-		"RowsNextResultSet",
-		"SessionResetter",
-		"Stmt",
-		"StmtExecContext",
-		"StmtQueryContext",
-		"String",
-		"Tx",
-		"TxOptions",
-		"Validator",
-		"Value",
-		"ValueConverter",
-		"Valuer",
-	},
-	"debug/buildinfo": {
-		"BuildInfo",
-		"Read",
-		"ReadFile",
-	},
-	"debug/dwarf": {
-		"AddrType",
-		"ArrayType",
-		"Attr",
-		"AttrAbstractOrigin",
-		"AttrAccessibility",
-		"AttrAddrBase",
-		"AttrAddrClass",
-		"AttrAlignment",
-		"AttrAllocated",
-		"AttrArtificial",
-		"AttrAssociated",
-		"AttrBaseTypes",
-		"AttrBinaryScale",
-		"AttrBitOffset",
-		"AttrBitSize",
-		"AttrByteSize",
-		"AttrCallAllCalls",
-		"AttrCallAllSourceCalls",
-		"AttrCallAllTailCalls",
-		"AttrCallColumn",
-		"AttrCallDataLocation",
-		"AttrCallDataValue",
-		"AttrCallFile",
-		"AttrCallLine",
-		"AttrCallOrigin",
-		"AttrCallPC",
-		"AttrCallParameter",
-		"AttrCallReturnPC",
-		"AttrCallTailCall",
-		"AttrCallTarget",
-		"AttrCallTargetClobbered",
-		"AttrCallValue",
-		"AttrCalling",
-		"AttrCommonRef",
-		"AttrCompDir",
-		"AttrConstExpr",
-		"AttrConstValue",
-		"AttrContainingType",
-		"AttrCount",
-		"AttrDataBitOffset",
-		"AttrDataLocation",
-		"AttrDataMemberLoc",
-		"AttrDecimalScale",
-		"AttrDecimalSign",
-		"AttrDeclColumn",
-		"AttrDeclFile",
-		"AttrDeclLine",
-		"AttrDeclaration",
-		"AttrDefaultValue",
-		"AttrDefaulted",
-		"AttrDeleted",
-		"AttrDescription",
-		"AttrDigitCount",
-		"AttrDiscr",
-		"AttrDiscrList",
-		"AttrDiscrValue",
-		"AttrDwoName",
-		"AttrElemental",
-		"AttrEncoding",
-		"AttrEndianity",
-		"AttrEntrypc",
-		"AttrEnumClass",
-		"AttrExplicit",
-		"AttrExportSymbols",
-		"AttrExtension",
-		"AttrExternal",
-		"AttrFrameBase",
-		"AttrFriend",
-		"AttrHighpc",
-		"AttrIdentifierCase",
-		"AttrImport",
-		"AttrInline",
-		"AttrIsOptional",
-		"AttrLanguage",
-		"AttrLinkageName",
-		"AttrLocation",
-		"AttrLoclistsBase",
-		"AttrLowerBound",
-		"AttrLowpc",
-		"AttrMacroInfo",
-		"AttrMacros",
-		"AttrMainSubprogram",
-		"AttrMutable",
-		"AttrName",
-		"AttrNamelistItem",
-		"AttrNoreturn",
-		"AttrObjectPointer",
-		"AttrOrdering",
-		"AttrPictureString",
-		"AttrPriority",
-		"AttrProducer",
-		"AttrPrototyped",
-		"AttrPure",
-		"AttrRanges",
-		"AttrRank",
-		"AttrRecursive",
-		"AttrReference",
-		"AttrReturnAddr",
-		"AttrRnglistsBase",
-		"AttrRvalueReference",
-		"AttrSegment",
-		"AttrSibling",
-		"AttrSignature",
-		"AttrSmall",
-		"AttrSpecification",
-		"AttrStartScope",
-		"AttrStaticLink",
-		"AttrStmtList",
-		"AttrStrOffsetsBase",
-		"AttrStride",
-		"AttrStrideSize",
-		"AttrStringLength",
-		"AttrStringLengthBitSize",
-		"AttrStringLengthByteSize",
-		"AttrThreadsScaled",
-		"AttrTrampoline",
-		"AttrType",
-		"AttrUpperBound",
-		"AttrUseLocation",
-		"AttrUseUTF8",
-		"AttrVarParam",
-		"AttrVirtuality",
-		"AttrVisibility",
-		"AttrVtableElemLoc",
-		"BasicType",
-		"BoolType",
-		"CharType",
-		"Class",
-		"ClassAddrPtr",
-		"ClassAddress",
-		"ClassBlock",
-		"ClassConstant",
-		"ClassExprLoc",
-		"ClassFlag",
-		"ClassLinePtr",
-		"ClassLocList",
-		"ClassLocListPtr",
-		"ClassMacPtr",
-		"ClassRangeListPtr",
-		"ClassReference",
-		"ClassReferenceAlt",
-		"ClassReferenceSig",
-		"ClassRngList",
-		"ClassRngListsPtr",
-		"ClassStrOffsetsPtr",
-		"ClassString",
-		"ClassStringAlt",
-		"ClassUnknown",
-		"CommonType",
-		"ComplexType",
-		"Data",
-		"DecodeError",
-		"DotDotDotType",
-		"Entry",
-		"EnumType",
-		"EnumValue",
-		"ErrUnknownPC",
-		"Field",
-		"FloatType",
-		"FuncType",
-		"IntType",
-		"LineEntry",
-		"LineFile",
-		"LineReader",
-		"LineReaderPos",
-		"New",
-		"Offset",
-		"PtrType",
-		"QualType",
-		"Reader",
-		"StructField",
-		"StructType",
-		"Tag",
-		"TagAccessDeclaration",
-		"TagArrayType",
-		"TagAtomicType",
-		"TagBaseType",
-		"TagCallSite",
-		"TagCallSiteParameter",
-		"TagCatchDwarfBlock",
-		"TagClassType",
-		"TagCoarrayType",
-		"TagCommonDwarfBlock",
-		"TagCommonInclusion",
-		"TagCompileUnit",
-		"TagCondition",
-		"TagConstType",
-		"TagConstant",
-		"TagDwarfProcedure",
-		"TagDynamicType",
-		"TagEntryPoint",
-		"TagEnumerationType",
-		"TagEnumerator",
-		"TagFileType",
-		"TagFormalParameter",
-		"TagFriend",
-		"TagGenericSubrange",
-		"TagImmutableType",
-		"TagImportedDeclaration",
-		"TagImportedModule",
-		"TagImportedUnit",
-		"TagInheritance",
-		"TagInlinedSubroutine",
-		"TagInterfaceType",
-		"TagLabel",
-		"TagLexDwarfBlock",
-		"TagMember",
-		"TagModule",
-		"TagMutableType",
-		"TagNamelist",
-		"TagNamelistItem",
-		"TagNamespace",
-		"TagPackedType",
-		"TagPartialUnit",
-		"TagPointerType",
-		"TagPtrToMemberType",
-		"TagReferenceType",
-		"TagRestrictType",
-		"TagRvalueReferenceType",
-		"TagSetType",
-		"TagSharedType",
-		"TagSkeletonUnit",
-		"TagStringType",
-		"TagStructType",
-		"TagSubprogram",
-		"TagSubrangeType",
-		"TagSubroutineType",
-		"TagTemplateAlias",
-		"TagTemplateTypeParameter",
-		"TagTemplateValueParameter",
-		"TagThrownType",
-		"TagTryDwarfBlock",
-		"TagTypeUnit",
-		"TagTypedef",
-		"TagUnionType",
-		"TagUnspecifiedParameters",
-		"TagUnspecifiedType",
-		"TagVariable",
-		"TagVariant",
-		"TagVariantPart",
-		"TagVolatileType",
-		"TagWithStmt",
-		"Type",
-		"TypedefType",
-		"UcharType",
-		"UintType",
-		"UnspecifiedType",
-		"UnsupportedType",
-		"VoidType",
-	},
-	"debug/elf": {
-		"ARM_MAGIC_TRAMP_NUMBER",
-		"COMPRESS_HIOS",
-		"COMPRESS_HIPROC",
-		"COMPRESS_LOOS",
-		"COMPRESS_LOPROC",
-		"COMPRESS_ZLIB",
-		"COMPRESS_ZSTD",
-		"Chdr32",
-		"Chdr64",
-		"Class",
-		"CompressionType",
-		"DF_1_CONFALT",
-		"DF_1_DIRECT",
-		"DF_1_DISPRELDNE",
-		"DF_1_DISPRELPND",
-		"DF_1_EDITED",
-		"DF_1_ENDFILTEE",
-		"DF_1_GLOBAL",
-		"DF_1_GLOBAUDIT",
-		"DF_1_GROUP",
-		"DF_1_IGNMULDEF",
-		"DF_1_INITFIRST",
-		"DF_1_INTERPOSE",
-		"DF_1_KMOD",
-		"DF_1_LOADFLTR",
-		"DF_1_NOCOMMON",
-		"DF_1_NODEFLIB",
-		"DF_1_NODELETE",
-		"DF_1_NODIRECT",
-		"DF_1_NODUMP",
-		"DF_1_NOHDR",
-		"DF_1_NOKSYMS",
-		"DF_1_NOOPEN",
-		"DF_1_NORELOC",
-		"DF_1_NOW",
-		"DF_1_ORIGIN",
-		"DF_1_PIE",
-		"DF_1_SINGLETON",
-		"DF_1_STUB",
-		"DF_1_SYMINTPOSE",
-		"DF_1_TRANS",
-		"DF_1_WEAKFILTER",
-		"DF_BIND_NOW",
-		"DF_ORIGIN",
-		"DF_STATIC_TLS",
-		"DF_SYMBOLIC",
-		"DF_TEXTREL",
-		"DT_ADDRRNGHI",
-		"DT_ADDRRNGLO",
-		"DT_AUDIT",
-		"DT_AUXILIARY",
-		"DT_BIND_NOW",
-		"DT_CHECKSUM",
-		"DT_CONFIG",
-		"DT_DEBUG",
-		"DT_DEPAUDIT",
-		"DT_ENCODING",
-		"DT_FEATURE",
-		"DT_FILTER",
-		"DT_FINI",
-		"DT_FINI_ARRAY",
-		"DT_FINI_ARRAYSZ",
-		"DT_FLAGS",
-		"DT_FLAGS_1",
-		"DT_GNU_CONFLICT",
-		"DT_GNU_CONFLICTSZ",
-		"DT_GNU_HASH",
-		"DT_GNU_LIBLIST",
-		"DT_GNU_LIBLISTSZ",
-		"DT_GNU_PRELINKED",
-		"DT_HASH",
-		"DT_HIOS",
-		"DT_HIPROC",
-		"DT_INIT",
-		"DT_INIT_ARRAY",
-		"DT_INIT_ARRAYSZ",
-		"DT_JMPREL",
-		"DT_LOOS",
-		"DT_LOPROC",
-		"DT_MIPS_AUX_DYNAMIC",
-		"DT_MIPS_BASE_ADDRESS",
-		"DT_MIPS_COMPACT_SIZE",
-		"DT_MIPS_CONFLICT",
-		"DT_MIPS_CONFLICTNO",
-		"DT_MIPS_CXX_FLAGS",
-		"DT_MIPS_DELTA_CLASS",
-		"DT_MIPS_DELTA_CLASSSYM",
-		"DT_MIPS_DELTA_CLASSSYM_NO",
-		"DT_MIPS_DELTA_CLASS_NO",
-		"DT_MIPS_DELTA_INSTANCE",
-		"DT_MIPS_DELTA_INSTANCE_NO",
-		"DT_MIPS_DELTA_RELOC",
-		"DT_MIPS_DELTA_RELOC_NO",
-		"DT_MIPS_DELTA_SYM",
-		"DT_MIPS_DELTA_SYM_NO",
-		"DT_MIPS_DYNSTR_ALIGN",
-		"DT_MIPS_FLAGS",
-		"DT_MIPS_GOTSYM",
-		"DT_MIPS_GP_VALUE",
-		"DT_MIPS_HIDDEN_GOTIDX",
-		"DT_MIPS_HIPAGENO",
-		"DT_MIPS_ICHECKSUM",
-		"DT_MIPS_INTERFACE",
-		"DT_MIPS_INTERFACE_SIZE",
-		"DT_MIPS_IVERSION",
-		"DT_MIPS_LIBLIST",
-		"DT_MIPS_LIBLISTNO",
-		"DT_MIPS_LOCALPAGE_GOTIDX",
-		"DT_MIPS_LOCAL_GOTIDX",
-		"DT_MIPS_LOCAL_GOTNO",
-		"DT_MIPS_MSYM",
-		"DT_MIPS_OPTIONS",
-		"DT_MIPS_PERF_SUFFIX",
-		"DT_MIPS_PIXIE_INIT",
-		"DT_MIPS_PLTGOT",
-		"DT_MIPS_PROTECTED_GOTIDX",
-		"DT_MIPS_RLD_MAP",
-		"DT_MIPS_RLD_MAP_REL",
-		"DT_MIPS_RLD_TEXT_RESOLVE_ADDR",
-		"DT_MIPS_RLD_VERSION",
-		"DT_MIPS_RWPLT",
-		"DT_MIPS_SYMBOL_LIB",
-		"DT_MIPS_SYMTABNO",
-		"DT_MIPS_TIME_STAMP",
-		"DT_MIPS_UNREFEXTNO",
-		"DT_MOVEENT",
-		"DT_MOVESZ",
-		"DT_MOVETAB",
-		"DT_NEEDED",
-		"DT_NULL",
-		"DT_PLTGOT",
-		"DT_PLTPAD",
-		"DT_PLTPADSZ",
-		"DT_PLTREL",
-		"DT_PLTRELSZ",
-		"DT_POSFLAG_1",
-		"DT_PPC64_GLINK",
-		"DT_PPC64_OPD",
-		"DT_PPC64_OPDSZ",
-		"DT_PPC64_OPT",
-		"DT_PPC_GOT",
-		"DT_PPC_OPT",
-		"DT_PREINIT_ARRAY",
-		"DT_PREINIT_ARRAYSZ",
-		"DT_REL",
-		"DT_RELA",
-		"DT_RELACOUNT",
-		"DT_RELAENT",
-		"DT_RELASZ",
-		"DT_RELCOUNT",
-		"DT_RELENT",
-		"DT_RELSZ",
-		"DT_RPATH",
-		"DT_RUNPATH",
-		"DT_SONAME",
-		"DT_SPARC_REGISTER",
-		"DT_STRSZ",
-		"DT_STRTAB",
-		"DT_SYMBOLIC",
-		"DT_SYMENT",
-		"DT_SYMINENT",
-		"DT_SYMINFO",
-		"DT_SYMINSZ",
-		"DT_SYMTAB",
-		"DT_SYMTAB_SHNDX",
-		"DT_TEXTREL",
-		"DT_TLSDESC_GOT",
-		"DT_TLSDESC_PLT",
-		"DT_USED",
-		"DT_VALRNGHI",
-		"DT_VALRNGLO",
-		"DT_VERDEF",
-		"DT_VERDEFNUM",
-		"DT_VERNEED",
-		"DT_VERNEEDNUM",
-		"DT_VERSYM",
-		"Data",
-		"Dyn32",
-		"Dyn64",
-		"DynFlag",
-		"DynFlag1",
-		"DynTag",
-		"EI_ABIVERSION",
-		"EI_CLASS",
-		"EI_DATA",
-		"EI_NIDENT",
-		"EI_OSABI",
-		"EI_PAD",
-		"EI_VERSION",
-		"ELFCLASS32",
-		"ELFCLASS64",
-		"ELFCLASSNONE",
-		"ELFDATA2LSB",
-		"ELFDATA2MSB",
-		"ELFDATANONE",
-		"ELFMAG",
-		"ELFOSABI_86OPEN",
-		"ELFOSABI_AIX",
-		"ELFOSABI_ARM",
-		"ELFOSABI_AROS",
-		"ELFOSABI_CLOUDABI",
-		"ELFOSABI_FENIXOS",
-		"ELFOSABI_FREEBSD",
-		"ELFOSABI_HPUX",
-		"ELFOSABI_HURD",
-		"ELFOSABI_IRIX",
-		"ELFOSABI_LINUX",
-		"ELFOSABI_MODESTO",
-		"ELFOSABI_NETBSD",
-		"ELFOSABI_NONE",
-		"ELFOSABI_NSK",
-		"ELFOSABI_OPENBSD",
-		"ELFOSABI_OPENVMS",
-		"ELFOSABI_SOLARIS",
-		"ELFOSABI_STANDALONE",
-		"ELFOSABI_TRU64",
-		"EM_386",
-		"EM_486",
-		"EM_56800EX",
-		"EM_68HC05",
-		"EM_68HC08",
-		"EM_68HC11",
-		"EM_68HC12",
-		"EM_68HC16",
-		"EM_68K",
-		"EM_78KOR",
-		"EM_8051",
-		"EM_860",
-		"EM_88K",
-		"EM_960",
-		"EM_AARCH64",
-		"EM_ALPHA",
-		"EM_ALPHA_STD",
-		"EM_ALTERA_NIOS2",
-		"EM_AMDGPU",
-		"EM_ARC",
-		"EM_ARCA",
-		"EM_ARC_COMPACT",
-		"EM_ARC_COMPACT2",
-		"EM_ARM",
-		"EM_AVR",
-		"EM_AVR32",
-		"EM_BA1",
-		"EM_BA2",
-		"EM_BLACKFIN",
-		"EM_BPF",
-		"EM_C166",
-		"EM_CDP",
-		"EM_CE",
-		"EM_CLOUDSHIELD",
-		"EM_COGE",
-		"EM_COLDFIRE",
-		"EM_COOL",
-		"EM_COREA_1ST",
-		"EM_COREA_2ND",
-		"EM_CR",
-		"EM_CR16",
-		"EM_CRAYNV2",
-		"EM_CRIS",
-		"EM_CRX",
-		"EM_CSR_KALIMBA",
-		"EM_CUDA",
-		"EM_CYPRESS_M8C",
-		"EM_D10V",
-		"EM_D30V",
-		"EM_DSP24",
-		"EM_DSPIC30F",
-		"EM_DXP",
-		"EM_ECOG1",
-		"EM_ECOG16",
-		"EM_ECOG1X",
-		"EM_ECOG2",
-		"EM_ETPU",
-		"EM_EXCESS",
-		"EM_F2MC16",
-		"EM_FIREPATH",
-		"EM_FR20",
-		"EM_FR30",
-		"EM_FT32",
-		"EM_FX66",
-		"EM_H8S",
-		"EM_H8_300",
-		"EM_H8_300H",
-		"EM_H8_500",
-		"EM_HUANY",
-		"EM_IA_64",
-		"EM_INTEL205",
-		"EM_INTEL206",
-		"EM_INTEL207",
-		"EM_INTEL208",
-		"EM_INTEL209",
-		"EM_IP2K",
-		"EM_JAVELIN",
-		"EM_K10M",
-		"EM_KM32",
-		"EM_KMX16",
-		"EM_KMX32",
-		"EM_KMX8",
-		"EM_KVARC",
-		"EM_L10M",
-		"EM_LANAI",
-		"EM_LATTICEMICO32",
-		"EM_LOONGARCH",
-		"EM_M16C",
-		"EM_M32",
-		"EM_M32C",
-		"EM_M32R",
-		"EM_MANIK",
-		"EM_MAX",
-		"EM_MAXQ30",
-		"EM_MCHP_PIC",
-		"EM_MCST_ELBRUS",
-		"EM_ME16",
-		"EM_METAG",
-		"EM_MICROBLAZE",
-		"EM_MIPS",
-		"EM_MIPS_RS3_LE",
-		"EM_MIPS_RS4_BE",
-		"EM_MIPS_X",
-		"EM_MMA",
-		"EM_MMDSP_PLUS",
-		"EM_MMIX",
-		"EM_MN10200",
-		"EM_MN10300",
-		"EM_MOXIE",
-		"EM_MSP430",
-		"EM_NCPU",
-		"EM_NDR1",
-		"EM_NDS32",
-		"EM_NONE",
-		"EM_NORC",
-		"EM_NS32K",
-		"EM_OPEN8",
-		"EM_OPENRISC",
-		"EM_PARISC",
-		"EM_PCP",
-		"EM_PDP10",
-		"EM_PDP11",
-		"EM_PDSP",
-		"EM_PJ",
-		"EM_PPC",
-		"EM_PPC64",
-		"EM_PRISM",
-		"EM_QDSP6",
-		"EM_R32C",
-		"EM_RCE",
-		"EM_RH32",
-		"EM_RISCV",
-		"EM_RL78",
-		"EM_RS08",
-		"EM_RX",
-		"EM_S370",
-		"EM_S390",
-		"EM_SCORE7",
-		"EM_SEP",
-		"EM_SE_C17",
-		"EM_SE_C33",
-		"EM_SH",
-		"EM_SHARC",
-		"EM_SLE9X",
-		"EM_SNP1K",
-		"EM_SPARC",
-		"EM_SPARC32PLUS",
-		"EM_SPARCV9",
-		"EM_ST100",
-		"EM_ST19",
-		"EM_ST200",
-		"EM_ST7",
-		"EM_ST9PLUS",
-		"EM_STARCORE",
-		"EM_STM8",
-		"EM_STXP7X",
-		"EM_SVX",
-		"EM_TILE64",
-		"EM_TILEGX",
-		"EM_TILEPRO",
-		"EM_TINYJ",
-		"EM_TI_ARP32",
-		"EM_TI_C2000",
-		"EM_TI_C5500",
-		"EM_TI_C6000",
-		"EM_TI_PRU",
-		"EM_TMM_GPP",
-		"EM_TPC",
-		"EM_TRICORE",
-		"EM_TRIMEDIA",
-		"EM_TSK3000",
-		"EM_UNICORE",
-		"EM_V800",
-		"EM_V850",
-		"EM_VAX",
-		"EM_VIDEOCORE",
-		"EM_VIDEOCORE3",
-		"EM_VIDEOCORE5",
-		"EM_VISIUM",
-		"EM_VPP500",
-		"EM_X86_64",
-		"EM_XCORE",
-		"EM_XGATE",
-		"EM_XIMO16",
-		"EM_XTENSA",
-		"EM_Z80",
-		"EM_ZSP",
-		"ET_CORE",
-		"ET_DYN",
-		"ET_EXEC",
-		"ET_HIOS",
-		"ET_HIPROC",
-		"ET_LOOS",
-		"ET_LOPROC",
-		"ET_NONE",
-		"ET_REL",
-		"EV_CURRENT",
-		"EV_NONE",
-		"ErrNoSymbols",
-		"File",
-		"FileHeader",
-		"FormatError",
-		"Header32",
-		"Header64",
-		"ImportedSymbol",
-		"Machine",
-		"NT_FPREGSET",
-		"NT_PRPSINFO",
-		"NT_PRSTATUS",
-		"NType",
-		"NewFile",
-		"OSABI",
-		"Open",
-		"PF_MASKOS",
-		"PF_MASKPROC",
-		"PF_R",
-		"PF_W",
-		"PF_X",
-		"PT_AARCH64_ARCHEXT",
-		"PT_AARCH64_UNWIND",
-		"PT_ARM_ARCHEXT",
-		"PT_ARM_EXIDX",
-		"PT_DYNAMIC",
-		"PT_GNU_EH_FRAME",
-		"PT_GNU_MBIND_HI",
-		"PT_GNU_MBIND_LO",
-		"PT_GNU_PROPERTY",
-		"PT_GNU_RELRO",
-		"PT_GNU_STACK",
-		"PT_HIOS",
-		"PT_HIPROC",
-		"PT_INTERP",
-		"PT_LOAD",
-		"PT_LOOS",
-		"PT_LOPROC",
-		"PT_MIPS_ABIFLAGS",
-		"PT_MIPS_OPTIONS",
-		"PT_MIPS_REGINFO",
-		"PT_MIPS_RTPROC",
-		"PT_NOTE",
-		"PT_NULL",
-		"PT_OPENBSD_BOOTDATA",
-		"PT_OPENBSD_RANDOMIZE",
-		"PT_OPENBSD_WXNEEDED",
-		"PT_PAX_FLAGS",
-		"PT_PHDR",
-		"PT_S390_PGSTE",
-		"PT_SHLIB",
-		"PT_SUNWSTACK",
-		"PT_SUNW_EH_FRAME",
-		"PT_TLS",
-		"Prog",
-		"Prog32",
-		"Prog64",
-		"ProgFlag",
-		"ProgHeader",
-		"ProgType",
-		"R_386",
-		"R_386_16",
-		"R_386_32",
-		"R_386_32PLT",
-		"R_386_8",
-		"R_386_COPY",
-		"R_386_GLOB_DAT",
-		"R_386_GOT32",
-		"R_386_GOT32X",
-		"R_386_GOTOFF",
-		"R_386_GOTPC",
-		"R_386_IRELATIVE",
-		"R_386_JMP_SLOT",
-		"R_386_NONE",
-		"R_386_PC16",
-		"R_386_PC32",
-		"R_386_PC8",
-		"R_386_PLT32",
-		"R_386_RELATIVE",
-		"R_386_SIZE32",
-		"R_386_TLS_DESC",
-		"R_386_TLS_DESC_CALL",
-		"R_386_TLS_DTPMOD32",
-		"R_386_TLS_DTPOFF32",
-		"R_386_TLS_GD",
-		"R_386_TLS_GD_32",
-		"R_386_TLS_GD_CALL",
-		"R_386_TLS_GD_POP",
-		"R_386_TLS_GD_PUSH",
-		"R_386_TLS_GOTDESC",
-		"R_386_TLS_GOTIE",
-		"R_386_TLS_IE",
-		"R_386_TLS_IE_32",
-		"R_386_TLS_LDM",
-		"R_386_TLS_LDM_32",
-		"R_386_TLS_LDM_CALL",
-		"R_386_TLS_LDM_POP",
-		"R_386_TLS_LDM_PUSH",
-		"R_386_TLS_LDO_32",
-		"R_386_TLS_LE",
-		"R_386_TLS_LE_32",
-		"R_386_TLS_TPOFF",
-		"R_386_TLS_TPOFF32",
-		"R_390",
-		"R_390_12",
-		"R_390_16",
-		"R_390_20",
-		"R_390_32",
-		"R_390_64",
-		"R_390_8",
-		"R_390_COPY",
-		"R_390_GLOB_DAT",
-		"R_390_GOT12",
-		"R_390_GOT16",
-		"R_390_GOT20",
-		"R_390_GOT32",
-		"R_390_GOT64",
-		"R_390_GOTENT",
-		"R_390_GOTOFF",
-		"R_390_GOTOFF16",
-		"R_390_GOTOFF64",
-		"R_390_GOTPC",
-		"R_390_GOTPCDBL",
-		"R_390_GOTPLT12",
-		"R_390_GOTPLT16",
-		"R_390_GOTPLT20",
-		"R_390_GOTPLT32",
-		"R_390_GOTPLT64",
-		"R_390_GOTPLTENT",
-		"R_390_GOTPLTOFF16",
-		"R_390_GOTPLTOFF32",
-		"R_390_GOTPLTOFF64",
-		"R_390_JMP_SLOT",
-		"R_390_NONE",
-		"R_390_PC16",
-		"R_390_PC16DBL",
-		"R_390_PC32",
-		"R_390_PC32DBL",
-		"R_390_PC64",
-		"R_390_PLT16DBL",
-		"R_390_PLT32",
-		"R_390_PLT32DBL",
-		"R_390_PLT64",
-		"R_390_RELATIVE",
-		"R_390_TLS_DTPMOD",
-		"R_390_TLS_DTPOFF",
-		"R_390_TLS_GD32",
-		"R_390_TLS_GD64",
-		"R_390_TLS_GDCALL",
-		"R_390_TLS_GOTIE12",
-		"R_390_TLS_GOTIE20",
-		"R_390_TLS_GOTIE32",
-		"R_390_TLS_GOTIE64",
-		"R_390_TLS_IE32",
-		"R_390_TLS_IE64",
-		"R_390_TLS_IEENT",
-		"R_390_TLS_LDCALL",
-		"R_390_TLS_LDM32",
-		"R_390_TLS_LDM64",
-		"R_390_TLS_LDO32",
-		"R_390_TLS_LDO64",
-		"R_390_TLS_LE32",
-		"R_390_TLS_LE64",
-		"R_390_TLS_LOAD",
-		"R_390_TLS_TPOFF",
-		"R_AARCH64",
-		"R_AARCH64_ABS16",
-		"R_AARCH64_ABS32",
-		"R_AARCH64_ABS64",
-		"R_AARCH64_ADD_ABS_LO12_NC",
-		"R_AARCH64_ADR_GOT_PAGE",
-		"R_AARCH64_ADR_PREL_LO21",
-		"R_AARCH64_ADR_PREL_PG_HI21",
-		"R_AARCH64_ADR_PREL_PG_HI21_NC",
-		"R_AARCH64_CALL26",
-		"R_AARCH64_CONDBR19",
-		"R_AARCH64_COPY",
-		"R_AARCH64_GLOB_DAT",
-		"R_AARCH64_GOT_LD_PREL19",
-		"R_AARCH64_IRELATIVE",
-		"R_AARCH64_JUMP26",
-		"R_AARCH64_JUMP_SLOT",
-		"R_AARCH64_LD64_GOTOFF_LO15",
-		"R_AARCH64_LD64_GOTPAGE_LO15",
-		"R_AARCH64_LD64_GOT_LO12_NC",
-		"R_AARCH64_LDST128_ABS_LO12_NC",
-		"R_AARCH64_LDST16_ABS_LO12_NC",
-		"R_AARCH64_LDST32_ABS_LO12_NC",
-		"R_AARCH64_LDST64_ABS_LO12_NC",
-		"R_AARCH64_LDST8_ABS_LO12_NC",
-		"R_AARCH64_LD_PREL_LO19",
-		"R_AARCH64_MOVW_SABS_G0",
-		"R_AARCH64_MOVW_SABS_G1",
-		"R_AARCH64_MOVW_SABS_G2",
-		"R_AARCH64_MOVW_UABS_G0",
-		"R_AARCH64_MOVW_UABS_G0_NC",
-		"R_AARCH64_MOVW_UABS_G1",
-		"R_AARCH64_MOVW_UABS_G1_NC",
-		"R_AARCH64_MOVW_UABS_G2",
-		"R_AARCH64_MOVW_UABS_G2_NC",
-		"R_AARCH64_MOVW_UABS_G3",
-		"R_AARCH64_NONE",
-		"R_AARCH64_NULL",
-		"R_AARCH64_P32_ABS16",
-		"R_AARCH64_P32_ABS32",
-		"R_AARCH64_P32_ADD_ABS_LO12_NC",
-		"R_AARCH64_P32_ADR_GOT_PAGE",
-		"R_AARCH64_P32_ADR_PREL_LO21",
-		"R_AARCH64_P32_ADR_PREL_PG_HI21",
-		"R_AARCH64_P32_CALL26",
-		"R_AARCH64_P32_CONDBR19",
-		"R_AARCH64_P32_COPY",
-		"R_AARCH64_P32_GLOB_DAT",
-		"R_AARCH64_P32_GOT_LD_PREL19",
-		"R_AARCH64_P32_IRELATIVE",
-		"R_AARCH64_P32_JUMP26",
-		"R_AARCH64_P32_JUMP_SLOT",
-		"R_AARCH64_P32_LD32_GOT_LO12_NC",
-		"R_AARCH64_P32_LDST128_ABS_LO12_NC",
-		"R_AARCH64_P32_LDST16_ABS_LO12_NC",
-		"R_AARCH64_P32_LDST32_ABS_LO12_NC",
-		"R_AARCH64_P32_LDST64_ABS_LO12_NC",
-		"R_AARCH64_P32_LDST8_ABS_LO12_NC",
-		"R_AARCH64_P32_LD_PREL_LO19",
-		"R_AARCH64_P32_MOVW_SABS_G0",
-		"R_AARCH64_P32_MOVW_UABS_G0",
-		"R_AARCH64_P32_MOVW_UABS_G0_NC",
-		"R_AARCH64_P32_MOVW_UABS_G1",
-		"R_AARCH64_P32_PREL16",
-		"R_AARCH64_P32_PREL32",
-		"R_AARCH64_P32_RELATIVE",
-		"R_AARCH64_P32_TLSDESC",
-		"R_AARCH64_P32_TLSDESC_ADD_LO12_NC",
-		"R_AARCH64_P32_TLSDESC_ADR_PAGE21",
-		"R_AARCH64_P32_TLSDESC_ADR_PREL21",
-		"R_AARCH64_P32_TLSDESC_CALL",
-		"R_AARCH64_P32_TLSDESC_LD32_LO12_NC",
-		"R_AARCH64_P32_TLSDESC_LD_PREL19",
-		"R_AARCH64_P32_TLSGD_ADD_LO12_NC",
-		"R_AARCH64_P32_TLSGD_ADR_PAGE21",
-		"R_AARCH64_P32_TLSIE_ADR_GOTTPREL_PAGE21",
-		"R_AARCH64_P32_TLSIE_LD32_GOTTPREL_LO12_NC",
-		"R_AARCH64_P32_TLSIE_LD_GOTTPREL_PREL19",
-		"R_AARCH64_P32_TLSLE_ADD_TPREL_HI12",
-		"R_AARCH64_P32_TLSLE_ADD_TPREL_LO12",
-		"R_AARCH64_P32_TLSLE_ADD_TPREL_LO12_NC",
-		"R_AARCH64_P32_TLSLE_MOVW_TPREL_G0",
-		"R_AARCH64_P32_TLSLE_MOVW_TPREL_G0_NC",
-		"R_AARCH64_P32_TLSLE_MOVW_TPREL_G1",
-		"R_AARCH64_P32_TLS_DTPMOD",
-		"R_AARCH64_P32_TLS_DTPREL",
-		"R_AARCH64_P32_TLS_TPREL",
-		"R_AARCH64_P32_TSTBR14",
-		"R_AARCH64_PREL16",
-		"R_AARCH64_PREL32",
-		"R_AARCH64_PREL64",
-		"R_AARCH64_RELATIVE",
-		"R_AARCH64_TLSDESC",
-		"R_AARCH64_TLSDESC_ADD",
-		"R_AARCH64_TLSDESC_ADD_LO12_NC",
-		"R_AARCH64_TLSDESC_ADR_PAGE21",
-		"R_AARCH64_TLSDESC_ADR_PREL21",
-		"R_AARCH64_TLSDESC_CALL",
-		"R_AARCH64_TLSDESC_LD64_LO12_NC",
-		"R_AARCH64_TLSDESC_LDR",
-		"R_AARCH64_TLSDESC_LD_PREL19",
-		"R_AARCH64_TLSDESC_OFF_G0_NC",
-		"R_AARCH64_TLSDESC_OFF_G1",
-		"R_AARCH64_TLSGD_ADD_LO12_NC",
-		"R_AARCH64_TLSGD_ADR_PAGE21",
-		"R_AARCH64_TLSGD_ADR_PREL21",
-		"R_AARCH64_TLSGD_MOVW_G0_NC",
-		"R_AARCH64_TLSGD_MOVW_G1",
-		"R_AARCH64_TLSIE_ADR_GOTTPREL_PAGE21",
-		"R_AARCH64_TLSIE_LD64_GOTTPREL_LO12_NC",
-		"R_AARCH64_TLSIE_LD_GOTTPREL_PREL19",
-		"R_AARCH64_TLSIE_MOVW_GOTTPREL_G0_NC",
-		"R_AARCH64_TLSIE_MOVW_GOTTPREL_G1",
-		"R_AARCH64_TLSLD_ADR_PAGE21",
-		"R_AARCH64_TLSLD_ADR_PREL21",
-		"R_AARCH64_TLSLD_LDST128_DTPREL_LO12",
-		"R_AARCH64_TLSLD_LDST128_DTPREL_LO12_NC",
-		"R_AARCH64_TLSLE_ADD_TPREL_HI12",
-		"R_AARCH64_TLSLE_ADD_TPREL_LO12",
-		"R_AARCH64_TLSLE_ADD_TPREL_LO12_NC",
-		"R_AARCH64_TLSLE_LDST128_TPREL_LO12",
-		"R_AARCH64_TLSLE_LDST128_TPREL_LO12_NC",
-		"R_AARCH64_TLSLE_MOVW_TPREL_G0",
-		"R_AARCH64_TLSLE_MOVW_TPREL_G0_NC",
-		"R_AARCH64_TLSLE_MOVW_TPREL_G1",
-		"R_AARCH64_TLSLE_MOVW_TPREL_G1_NC",
-		"R_AARCH64_TLSLE_MOVW_TPREL_G2",
-		"R_AARCH64_TLS_DTPMOD64",
-		"R_AARCH64_TLS_DTPREL64",
-		"R_AARCH64_TLS_TPREL64",
-		"R_AARCH64_TSTBR14",
-		"R_ALPHA",
-		"R_ALPHA_BRADDR",
-		"R_ALPHA_COPY",
-		"R_ALPHA_GLOB_DAT",
-		"R_ALPHA_GPDISP",
-		"R_ALPHA_GPREL32",
-		"R_ALPHA_GPRELHIGH",
-		"R_ALPHA_GPRELLOW",
-		"R_ALPHA_GPVALUE",
-		"R_ALPHA_HINT",
-		"R_ALPHA_IMMED_BR_HI32",
-		"R_ALPHA_IMMED_GP_16",
-		"R_ALPHA_IMMED_GP_HI32",
-		"R_ALPHA_IMMED_LO32",
-		"R_ALPHA_IMMED_SCN_HI32",
-		"R_ALPHA_JMP_SLOT",
-		"R_ALPHA_LITERAL",
-		"R_ALPHA_LITUSE",
-		"R_ALPHA_NONE",
-		"R_ALPHA_OP_PRSHIFT",
-		"R_ALPHA_OP_PSUB",
-		"R_ALPHA_OP_PUSH",
-		"R_ALPHA_OP_STORE",
-		"R_ALPHA_REFLONG",
-		"R_ALPHA_REFQUAD",
-		"R_ALPHA_RELATIVE",
-		"R_ALPHA_SREL16",
-		"R_ALPHA_SREL32",
-		"R_ALPHA_SREL64",
-		"R_ARM",
-		"R_ARM_ABS12",
-		"R_ARM_ABS16",
-		"R_ARM_ABS32",
-		"R_ARM_ABS32_NOI",
-		"R_ARM_ABS8",
-		"R_ARM_ALU_PCREL_15_8",
-		"R_ARM_ALU_PCREL_23_15",
-		"R_ARM_ALU_PCREL_7_0",
-		"R_ARM_ALU_PC_G0",
-		"R_ARM_ALU_PC_G0_NC",
-		"R_ARM_ALU_PC_G1",
-		"R_ARM_ALU_PC_G1_NC",
-		"R_ARM_ALU_PC_G2",
-		"R_ARM_ALU_SBREL_19_12_NC",
-		"R_ARM_ALU_SBREL_27_20_CK",
-		"R_ARM_ALU_SB_G0",
-		"R_ARM_ALU_SB_G0_NC",
-		"R_ARM_ALU_SB_G1",
-		"R_ARM_ALU_SB_G1_NC",
-		"R_ARM_ALU_SB_G2",
-		"R_ARM_AMP_VCALL9",
-		"R_ARM_BASE_ABS",
-		"R_ARM_CALL",
-		"R_ARM_COPY",
-		"R_ARM_GLOB_DAT",
-		"R_ARM_GNU_VTENTRY",
-		"R_ARM_GNU_VTINHERIT",
-		"R_ARM_GOT32",
-		"R_ARM_GOTOFF",
-		"R_ARM_GOTOFF12",
-		"R_ARM_GOTPC",
-		"R_ARM_GOTRELAX",
-		"R_ARM_GOT_ABS",
-		"R_ARM_GOT_BREL12",
-		"R_ARM_GOT_PREL",
-		"R_ARM_IRELATIVE",
-		"R_ARM_JUMP24",
-		"R_ARM_JUMP_SLOT",
-		"R_ARM_LDC_PC_G0",
-		"R_ARM_LDC_PC_G1",
-		"R_ARM_LDC_PC_G2",
-		"R_ARM_LDC_SB_G0",
-		"R_ARM_LDC_SB_G1",
-		"R_ARM_LDC_SB_G2",
-		"R_ARM_LDRS_PC_G0",
-		"R_ARM_LDRS_PC_G1",
-		"R_ARM_LDRS_PC_G2",
-		"R_ARM_LDRS_SB_G0",
-		"R_ARM_LDRS_SB_G1",
-		"R_ARM_LDRS_SB_G2",
-		"R_ARM_LDR_PC_G1",
-		"R_ARM_LDR_PC_G2",
-		"R_ARM_LDR_SBREL_11_10_NC",
-		"R_ARM_LDR_SB_G0",
-		"R_ARM_LDR_SB_G1",
-		"R_ARM_LDR_SB_G2",
-		"R_ARM_ME_TOO",
-		"R_ARM_MOVT_ABS",
-		"R_ARM_MOVT_BREL",
-		"R_ARM_MOVT_PREL",
-		"R_ARM_MOVW_ABS_NC",
-		"R_ARM_MOVW_BREL",
-		"R_ARM_MOVW_BREL_NC",
-		"R_ARM_MOVW_PREL_NC",
-		"R_ARM_NONE",
-		"R_ARM_PC13",
-		"R_ARM_PC24",
-		"R_ARM_PLT32",
-		"R_ARM_PLT32_ABS",
-		"R_ARM_PREL31",
-		"R_ARM_PRIVATE_0",
-		"R_ARM_PRIVATE_1",
-		"R_ARM_PRIVATE_10",
-		"R_ARM_PRIVATE_11",
-		"R_ARM_PRIVATE_12",
-		"R_ARM_PRIVATE_13",
-		"R_ARM_PRIVATE_14",
-		"R_ARM_PRIVATE_15",
-		"R_ARM_PRIVATE_2",
-		"R_ARM_PRIVATE_3",
-		"R_ARM_PRIVATE_4",
-		"R_ARM_PRIVATE_5",
-		"R_ARM_PRIVATE_6",
-		"R_ARM_PRIVATE_7",
-		"R_ARM_PRIVATE_8",
-		"R_ARM_PRIVATE_9",
-		"R_ARM_RABS32",
-		"R_ARM_RBASE",
-		"R_ARM_REL32",
-		"R_ARM_REL32_NOI",
-		"R_ARM_RELATIVE",
-		"R_ARM_RPC24",
-		"R_ARM_RREL32",
-		"R_ARM_RSBREL32",
-		"R_ARM_RXPC25",
-		"R_ARM_SBREL31",
-		"R_ARM_SBREL32",
-		"R_ARM_SWI24",
-		"R_ARM_TARGET1",
-		"R_ARM_TARGET2",
-		"R_ARM_THM_ABS5",
-		"R_ARM_THM_ALU_ABS_G0_NC",
-		"R_ARM_THM_ALU_ABS_G1_NC",
-		"R_ARM_THM_ALU_ABS_G2_NC",
-		"R_ARM_THM_ALU_ABS_G3",
-		"R_ARM_THM_ALU_PREL_11_0",
-		"R_ARM_THM_GOT_BREL12",
-		"R_ARM_THM_JUMP11",
-		"R_ARM_THM_JUMP19",
-		"R_ARM_THM_JUMP24",
-		"R_ARM_THM_JUMP6",
-		"R_ARM_THM_JUMP8",
-		"R_ARM_THM_MOVT_ABS",
-		"R_ARM_THM_MOVT_BREL",
-		"R_ARM_THM_MOVT_PREL",
-		"R_ARM_THM_MOVW_ABS_NC",
-		"R_ARM_THM_MOVW_BREL",
-		"R_ARM_THM_MOVW_BREL_NC",
-		"R_ARM_THM_MOVW_PREL_NC",
-		"R_ARM_THM_PC12",
-		"R_ARM_THM_PC22",
-		"R_ARM_THM_PC8",
-		"R_ARM_THM_RPC22",
-		"R_ARM_THM_SWI8",
-		"R_ARM_THM_TLS_CALL",
-		"R_ARM_THM_TLS_DESCSEQ16",
-		"R_ARM_THM_TLS_DESCSEQ32",
-		"R_ARM_THM_XPC22",
-		"R_ARM_TLS_CALL",
-		"R_ARM_TLS_DESCSEQ",
-		"R_ARM_TLS_DTPMOD32",
-		"R_ARM_TLS_DTPOFF32",
-		"R_ARM_TLS_GD32",
-		"R_ARM_TLS_GOTDESC",
-		"R_ARM_TLS_IE12GP",
-		"R_ARM_TLS_IE32",
-		"R_ARM_TLS_LDM32",
-		"R_ARM_TLS_LDO12",
-		"R_ARM_TLS_LDO32",
-		"R_ARM_TLS_LE12",
-		"R_ARM_TLS_LE32",
-		"R_ARM_TLS_TPOFF32",
-		"R_ARM_V4BX",
-		"R_ARM_XPC25",
-		"R_INFO",
-		"R_INFO32",
-		"R_LARCH",
-		"R_LARCH_32",
-		"R_LARCH_32_PCREL",
-		"R_LARCH_64",
-		"R_LARCH_64_PCREL",
-		"R_LARCH_ABS64_HI12",
-		"R_LARCH_ABS64_LO20",
-		"R_LARCH_ABS_HI20",
-		"R_LARCH_ABS_LO12",
-		"R_LARCH_ADD16",
-		"R_LARCH_ADD24",
-		"R_LARCH_ADD32",
-		"R_LARCH_ADD6",
-		"R_LARCH_ADD64",
-		"R_LARCH_ADD8",
-		"R_LARCH_ADD_ULEB128",
-		"R_LARCH_ALIGN",
-		"R_LARCH_B16",
-		"R_LARCH_B21",
-		"R_LARCH_B26",
-		"R_LARCH_CFA",
-		"R_LARCH_COPY",
-		"R_LARCH_DELETE",
-		"R_LARCH_GNU_VTENTRY",
-		"R_LARCH_GNU_VTINHERIT",
-		"R_LARCH_GOT64_HI12",
-		"R_LARCH_GOT64_LO20",
-		"R_LARCH_GOT64_PC_HI12",
-		"R_LARCH_GOT64_PC_LO20",
-		"R_LARCH_GOT_HI20",
-		"R_LARCH_GOT_LO12",
-		"R_LARCH_GOT_PC_HI20",
-		"R_LARCH_GOT_PC_LO12",
-		"R_LARCH_IRELATIVE",
-		"R_LARCH_JUMP_SLOT",
-		"R_LARCH_MARK_LA",
-		"R_LARCH_MARK_PCREL",
-		"R_LARCH_NONE",
-		"R_LARCH_PCALA64_HI12",
-		"R_LARCH_PCALA64_LO20",
-		"R_LARCH_PCALA_HI20",
-		"R_LARCH_PCALA_LO12",
-		"R_LARCH_PCREL20_S2",
-		"R_LARCH_RELATIVE",
-		"R_LARCH_RELAX",
-		"R_LARCH_SOP_ADD",
-		"R_LARCH_SOP_AND",
-		"R_LARCH_SOP_ASSERT",
-		"R_LARCH_SOP_IF_ELSE",
-		"R_LARCH_SOP_NOT",
-		"R_LARCH_SOP_POP_32_S_0_10_10_16_S2",
-		"R_LARCH_SOP_POP_32_S_0_5_10_16_S2",
-		"R_LARCH_SOP_POP_32_S_10_12",
-		"R_LARCH_SOP_POP_32_S_10_16",
-		"R_LARCH_SOP_POP_32_S_10_16_S2",
-		"R_LARCH_SOP_POP_32_S_10_5",
-		"R_LARCH_SOP_POP_32_S_5_20",
-		"R_LARCH_SOP_POP_32_U",
-		"R_LARCH_SOP_POP_32_U_10_12",
-		"R_LARCH_SOP_PUSH_ABSOLUTE",
-		"R_LARCH_SOP_PUSH_DUP",
-		"R_LARCH_SOP_PUSH_GPREL",
-		"R_LARCH_SOP_PUSH_PCREL",
-		"R_LARCH_SOP_PUSH_PLT_PCREL",
-		"R_LARCH_SOP_PUSH_TLS_GD",
-		"R_LARCH_SOP_PUSH_TLS_GOT",
-		"R_LARCH_SOP_PUSH_TLS_TPREL",
-		"R_LARCH_SOP_SL",
-		"R_LARCH_SOP_SR",
-		"R_LARCH_SOP_SUB",
-		"R_LARCH_SUB16",
-		"R_LARCH_SUB24",
-		"R_LARCH_SUB32",
-		"R_LARCH_SUB6",
-		"R_LARCH_SUB64",
-		"R_LARCH_SUB8",
-		"R_LARCH_SUB_ULEB128",
-		"R_LARCH_TLS_DTPMOD32",
-		"R_LARCH_TLS_DTPMOD64",
-		"R_LARCH_TLS_DTPREL32",
-		"R_LARCH_TLS_DTPREL64",
-		"R_LARCH_TLS_GD_HI20",
-		"R_LARCH_TLS_GD_PC_HI20",
-		"R_LARCH_TLS_IE64_HI12",
-		"R_LARCH_TLS_IE64_LO20",
-		"R_LARCH_TLS_IE64_PC_HI12",
-		"R_LARCH_TLS_IE64_PC_LO20",
-		"R_LARCH_TLS_IE_HI20",
-		"R_LARCH_TLS_IE_LO12",
-		"R_LARCH_TLS_IE_PC_HI20",
-		"R_LARCH_TLS_IE_PC_LO12",
-		"R_LARCH_TLS_LD_HI20",
-		"R_LARCH_TLS_LD_PC_HI20",
-		"R_LARCH_TLS_LE64_HI12",
-		"R_LARCH_TLS_LE64_LO20",
-		"R_LARCH_TLS_LE_HI20",
-		"R_LARCH_TLS_LE_LO12",
-		"R_LARCH_TLS_TPREL32",
-		"R_LARCH_TLS_TPREL64",
-		"R_MIPS",
-		"R_MIPS_16",
-		"R_MIPS_26",
-		"R_MIPS_32",
-		"R_MIPS_64",
-		"R_MIPS_ADD_IMMEDIATE",
-		"R_MIPS_CALL16",
-		"R_MIPS_CALL_HI16",
-		"R_MIPS_CALL_LO16",
-		"R_MIPS_DELETE",
-		"R_MIPS_GOT16",
-		"R_MIPS_GOT_DISP",
-		"R_MIPS_GOT_HI16",
-		"R_MIPS_GOT_LO16",
-		"R_MIPS_GOT_OFST",
-		"R_MIPS_GOT_PAGE",
-		"R_MIPS_GPREL16",
-		"R_MIPS_GPREL32",
-		"R_MIPS_HI16",
-		"R_MIPS_HIGHER",
-		"R_MIPS_HIGHEST",
-		"R_MIPS_INSERT_A",
-		"R_MIPS_INSERT_B",
-		"R_MIPS_JALR",
-		"R_MIPS_LITERAL",
-		"R_MIPS_LO16",
-		"R_MIPS_NONE",
-		"R_MIPS_PC16",
-		"R_MIPS_PC32",
-		"R_MIPS_PJUMP",
-		"R_MIPS_REL16",
-		"R_MIPS_REL32",
-		"R_MIPS_RELGOT",
-		"R_MIPS_SCN_DISP",
-		"R_MIPS_SHIFT5",
-		"R_MIPS_SHIFT6",
-		"R_MIPS_SUB",
-		"R_MIPS_TLS_DTPMOD32",
-		"R_MIPS_TLS_DTPMOD64",
-		"R_MIPS_TLS_DTPREL32",
-		"R_MIPS_TLS_DTPREL64",
-		"R_MIPS_TLS_DTPREL_HI16",
-		"R_MIPS_TLS_DTPREL_LO16",
-		"R_MIPS_TLS_GD",
-		"R_MIPS_TLS_GOTTPREL",
-		"R_MIPS_TLS_LDM",
-		"R_MIPS_TLS_TPREL32",
-		"R_MIPS_TLS_TPREL64",
-		"R_MIPS_TLS_TPREL_HI16",
-		"R_MIPS_TLS_TPREL_LO16",
-		"R_PPC",
-		"R_PPC64",
-		"R_PPC64_ADDR14",
-		"R_PPC64_ADDR14_BRNTAKEN",
-		"R_PPC64_ADDR14_BRTAKEN",
-		"R_PPC64_ADDR16",
-		"R_PPC64_ADDR16_DS",
-		"R_PPC64_ADDR16_HA",
-		"R_PPC64_ADDR16_HI",
-		"R_PPC64_ADDR16_HIGH",
-		"R_PPC64_ADDR16_HIGHA",
-		"R_PPC64_ADDR16_HIGHER",
-		"R_PPC64_ADDR16_HIGHER34",
-		"R_PPC64_ADDR16_HIGHERA",
-		"R_PPC64_ADDR16_HIGHERA34",
-		"R_PPC64_ADDR16_HIGHEST",
-		"R_PPC64_ADDR16_HIGHEST34",
-		"R_PPC64_ADDR16_HIGHESTA",
-		"R_PPC64_ADDR16_HIGHESTA34",
-		"R_PPC64_ADDR16_LO",
-		"R_PPC64_ADDR16_LO_DS",
-		"R_PPC64_ADDR24",
-		"R_PPC64_ADDR32",
-		"R_PPC64_ADDR64",
-		"R_PPC64_ADDR64_LOCAL",
-		"R_PPC64_COPY",
-		"R_PPC64_D28",
-		"R_PPC64_D34",
-		"R_PPC64_D34_HA30",
-		"R_PPC64_D34_HI30",
-		"R_PPC64_D34_LO",
-		"R_PPC64_DTPMOD64",
-		"R_PPC64_DTPREL16",
-		"R_PPC64_DTPREL16_DS",
-		"R_PPC64_DTPREL16_HA",
-		"R_PPC64_DTPREL16_HI",
-		"R_PPC64_DTPREL16_HIGH",
-		"R_PPC64_DTPREL16_HIGHA",
-		"R_PPC64_DTPREL16_HIGHER",
-		"R_PPC64_DTPREL16_HIGHERA",
-		"R_PPC64_DTPREL16_HIGHEST",
-		"R_PPC64_DTPREL16_HIGHESTA",
-		"R_PPC64_DTPREL16_LO",
-		"R_PPC64_DTPREL16_LO_DS",
-		"R_PPC64_DTPREL34",
-		"R_PPC64_DTPREL64",
-		"R_PPC64_ENTRY",
-		"R_PPC64_GLOB_DAT",
-		"R_PPC64_GNU_VTENTRY",
-		"R_PPC64_GNU_VTINHERIT",
-		"R_PPC64_GOT16",
-		"R_PPC64_GOT16_DS",
-		"R_PPC64_GOT16_HA",
-		"R_PPC64_GOT16_HI",
-		"R_PPC64_GOT16_LO",
-		"R_PPC64_GOT16_LO_DS",
-		"R_PPC64_GOT_DTPREL16_DS",
-		"R_PPC64_GOT_DTPREL16_HA",
-		"R_PPC64_GOT_DTPREL16_HI",
-		"R_PPC64_GOT_DTPREL16_LO_DS",
-		"R_PPC64_GOT_DTPREL_PCREL34",
-		"R_PPC64_GOT_PCREL34",
-		"R_PPC64_GOT_TLSGD16",
-		"R_PPC64_GOT_TLSGD16_HA",
-		"R_PPC64_GOT_TLSGD16_HI",
-		"R_PPC64_GOT_TLSGD16_LO",
-		"R_PPC64_GOT_TLSGD_PCREL34",
-		"R_PPC64_GOT_TLSLD16",
-		"R_PPC64_GOT_TLSLD16_HA",
-		"R_PPC64_GOT_TLSLD16_HI",
-		"R_PPC64_GOT_TLSLD16_LO",
-		"R_PPC64_GOT_TLSLD_PCREL34",
-		"R_PPC64_GOT_TPREL16_DS",
-		"R_PPC64_GOT_TPREL16_HA",
-		"R_PPC64_GOT_TPREL16_HI",
-		"R_PPC64_GOT_TPREL16_LO_DS",
-		"R_PPC64_GOT_TPREL_PCREL34",
-		"R_PPC64_IRELATIVE",
-		"R_PPC64_JMP_IREL",
-		"R_PPC64_JMP_SLOT",
-		"R_PPC64_NONE",
-		"R_PPC64_PCREL28",
-		"R_PPC64_PCREL34",
-		"R_PPC64_PCREL_OPT",
-		"R_PPC64_PLT16_HA",
-		"R_PPC64_PLT16_HI",
-		"R_PPC64_PLT16_LO",
-		"R_PPC64_PLT16_LO_DS",
-		"R_PPC64_PLT32",
-		"R_PPC64_PLT64",
-		"R_PPC64_PLTCALL",
-		"R_PPC64_PLTCALL_NOTOC",
-		"R_PPC64_PLTGOT16",
-		"R_PPC64_PLTGOT16_DS",
-		"R_PPC64_PLTGOT16_HA",
-		"R_PPC64_PLTGOT16_HI",
-		"R_PPC64_PLTGOT16_LO",
-		"R_PPC64_PLTGOT_LO_DS",
-		"R_PPC64_PLTREL32",
-		"R_PPC64_PLTREL64",
-		"R_PPC64_PLTSEQ",
-		"R_PPC64_PLTSEQ_NOTOC",
-		"R_PPC64_PLT_PCREL34",
-		"R_PPC64_PLT_PCREL34_NOTOC",
-		"R_PPC64_REL14",
-		"R_PPC64_REL14_BRNTAKEN",
-		"R_PPC64_REL14_BRTAKEN",
-		"R_PPC64_REL16",
-		"R_PPC64_REL16DX_HA",
-		"R_PPC64_REL16_HA",
-		"R_PPC64_REL16_HI",
-		"R_PPC64_REL16_HIGH",
-		"R_PPC64_REL16_HIGHA",
-		"R_PPC64_REL16_HIGHER",
-		"R_PPC64_REL16_HIGHER34",
-		"R_PPC64_REL16_HIGHERA",
-		"R_PPC64_REL16_HIGHERA34",
-		"R_PPC64_REL16_HIGHEST",
-		"R_PPC64_REL16_HIGHEST34",
-		"R_PPC64_REL16_HIGHESTA",
-		"R_PPC64_REL16_HIGHESTA34",
-		"R_PPC64_REL16_LO",
-		"R_PPC64_REL24",
-		"R_PPC64_REL24_NOTOC",
-		"R_PPC64_REL24_P9NOTOC",
-		"R_PPC64_REL30",
-		"R_PPC64_REL32",
-		"R_PPC64_REL64",
-		"R_PPC64_RELATIVE",
-		"R_PPC64_SECTOFF",
-		"R_PPC64_SECTOFF_DS",
-		"R_PPC64_SECTOFF_HA",
-		"R_PPC64_SECTOFF_HI",
-		"R_PPC64_SECTOFF_LO",
-		"R_PPC64_SECTOFF_LO_DS",
-		"R_PPC64_TLS",
-		"R_PPC64_TLSGD",
-		"R_PPC64_TLSLD",
-		"R_PPC64_TOC",
-		"R_PPC64_TOC16",
-		"R_PPC64_TOC16_DS",
-		"R_PPC64_TOC16_HA",
-		"R_PPC64_TOC16_HI",
-		"R_PPC64_TOC16_LO",
-		"R_PPC64_TOC16_LO_DS",
-		"R_PPC64_TOCSAVE",
-		"R_PPC64_TPREL16",
-		"R_PPC64_TPREL16_DS",
-		"R_PPC64_TPREL16_HA",
-		"R_PPC64_TPREL16_HI",
-		"R_PPC64_TPREL16_HIGH",
-		"R_PPC64_TPREL16_HIGHA",
-		"R_PPC64_TPREL16_HIGHER",
-		"R_PPC64_TPREL16_HIGHERA",
-		"R_PPC64_TPREL16_HIGHEST",
-		"R_PPC64_TPREL16_HIGHESTA",
-		"R_PPC64_TPREL16_LO",
-		"R_PPC64_TPREL16_LO_DS",
-		"R_PPC64_TPREL34",
-		"R_PPC64_TPREL64",
-		"R_PPC64_UADDR16",
-		"R_PPC64_UADDR32",
-		"R_PPC64_UADDR64",
-		"R_PPC_ADDR14",
-		"R_PPC_ADDR14_BRNTAKEN",
-		"R_PPC_ADDR14_BRTAKEN",
-		"R_PPC_ADDR16",
-		"R_PPC_ADDR16_HA",
-		"R_PPC_ADDR16_HI",
-		"R_PPC_ADDR16_LO",
-		"R_PPC_ADDR24",
-		"R_PPC_ADDR32",
-		"R_PPC_COPY",
-		"R_PPC_DTPMOD32",
-		"R_PPC_DTPREL16",
-		"R_PPC_DTPREL16_HA",
-		"R_PPC_DTPREL16_HI",
-		"R_PPC_DTPREL16_LO",
-		"R_PPC_DTPREL32",
-		"R_PPC_EMB_BIT_FLD",
-		"R_PPC_EMB_MRKREF",
-		"R_PPC_EMB_NADDR16",
-		"R_PPC_EMB_NADDR16_HA",
-		"R_PPC_EMB_NADDR16_HI",
-		"R_PPC_EMB_NADDR16_LO",
-		"R_PPC_EMB_NADDR32",
-		"R_PPC_EMB_RELSDA",
-		"R_PPC_EMB_RELSEC16",
-		"R_PPC_EMB_RELST_HA",
-		"R_PPC_EMB_RELST_HI",
-		"R_PPC_EMB_RELST_LO",
-		"R_PPC_EMB_SDA21",
-		"R_PPC_EMB_SDA2I16",
-		"R_PPC_EMB_SDA2REL",
-		"R_PPC_EMB_SDAI16",
-		"R_PPC_GLOB_DAT",
-		"R_PPC_GOT16",
-		"R_PPC_GOT16_HA",
-		"R_PPC_GOT16_HI",
-		"R_PPC_GOT16_LO",
-		"R_PPC_GOT_TLSGD16",
-		"R_PPC_GOT_TLSGD16_HA",
-		"R_PPC_GOT_TLSGD16_HI",
-		"R_PPC_GOT_TLSGD16_LO",
-		"R_PPC_GOT_TLSLD16",
-		"R_PPC_GOT_TLSLD16_HA",
-		"R_PPC_GOT_TLSLD16_HI",
-		"R_PPC_GOT_TLSLD16_LO",
-		"R_PPC_GOT_TPREL16",
-		"R_PPC_GOT_TPREL16_HA",
-		"R_PPC_GOT_TPREL16_HI",
-		"R_PPC_GOT_TPREL16_LO",
-		"R_PPC_JMP_SLOT",
-		"R_PPC_LOCAL24PC",
-		"R_PPC_NONE",
-		"R_PPC_PLT16_HA",
-		"R_PPC_PLT16_HI",
-		"R_PPC_PLT16_LO",
-		"R_PPC_PLT32",
-		"R_PPC_PLTREL24",
-		"R_PPC_PLTREL32",
-		"R_PPC_REL14",
-		"R_PPC_REL14_BRNTAKEN",
-		"R_PPC_REL14_BRTAKEN",
-		"R_PPC_REL24",
-		"R_PPC_REL32",
-		"R_PPC_RELATIVE",
-		"R_PPC_SDAREL16",
-		"R_PPC_SECTOFF",
-		"R_PPC_SECTOFF_HA",
-		"R_PPC_SECTOFF_HI",
-		"R_PPC_SECTOFF_LO",
-		"R_PPC_TLS",
-		"R_PPC_TPREL16",
-		"R_PPC_TPREL16_HA",
-		"R_PPC_TPREL16_HI",
-		"R_PPC_TPREL16_LO",
-		"R_PPC_TPREL32",
-		"R_PPC_UADDR16",
-		"R_PPC_UADDR32",
-		"R_RISCV",
-		"R_RISCV_32",
-		"R_RISCV_32_PCREL",
-		"R_RISCV_64",
-		"R_RISCV_ADD16",
-		"R_RISCV_ADD32",
-		"R_RISCV_ADD64",
-		"R_RISCV_ADD8",
-		"R_RISCV_ALIGN",
-		"R_RISCV_BRANCH",
-		"R_RISCV_CALL",
-		"R_RISCV_CALL_PLT",
-		"R_RISCV_COPY",
-		"R_RISCV_GNU_VTENTRY",
-		"R_RISCV_GNU_VTINHERIT",
-		"R_RISCV_GOT_HI20",
-		"R_RISCV_GPREL_I",
-		"R_RISCV_GPREL_S",
-		"R_RISCV_HI20",
-		"R_RISCV_JAL",
-		"R_RISCV_JUMP_SLOT",
-		"R_RISCV_LO12_I",
-		"R_RISCV_LO12_S",
-		"R_RISCV_NONE",
-		"R_RISCV_PCREL_HI20",
-		"R_RISCV_PCREL_LO12_I",
-		"R_RISCV_PCREL_LO12_S",
-		"R_RISCV_RELATIVE",
-		"R_RISCV_RELAX",
-		"R_RISCV_RVC_BRANCH",
-		"R_RISCV_RVC_JUMP",
-		"R_RISCV_RVC_LUI",
-		"R_RISCV_SET16",
-		"R_RISCV_SET32",
-		"R_RISCV_SET6",
-		"R_RISCV_SET8",
-		"R_RISCV_SUB16",
-		"R_RISCV_SUB32",
-		"R_RISCV_SUB6",
-		"R_RISCV_SUB64",
-		"R_RISCV_SUB8",
-		"R_RISCV_TLS_DTPMOD32",
-		"R_RISCV_TLS_DTPMOD64",
-		"R_RISCV_TLS_DTPREL32",
-		"R_RISCV_TLS_DTPREL64",
-		"R_RISCV_TLS_GD_HI20",
-		"R_RISCV_TLS_GOT_HI20",
-		"R_RISCV_TLS_TPREL32",
-		"R_RISCV_TLS_TPREL64",
-		"R_RISCV_TPREL_ADD",
-		"R_RISCV_TPREL_HI20",
-		"R_RISCV_TPREL_I",
-		"R_RISCV_TPREL_LO12_I",
-		"R_RISCV_TPREL_LO12_S",
-		"R_RISCV_TPREL_S",
-		"R_SPARC",
-		"R_SPARC_10",
-		"R_SPARC_11",
-		"R_SPARC_13",
-		"R_SPARC_16",
-		"R_SPARC_22",
-		"R_SPARC_32",
-		"R_SPARC_5",
-		"R_SPARC_6",
-		"R_SPARC_64",
-		"R_SPARC_7",
-		"R_SPARC_8",
-		"R_SPARC_COPY",
-		"R_SPARC_DISP16",
-		"R_SPARC_DISP32",
-		"R_SPARC_DISP64",
-		"R_SPARC_DISP8",
-		"R_SPARC_GLOB_DAT",
-		"R_SPARC_GLOB_JMP",
-		"R_SPARC_GOT10",
-		"R_SPARC_GOT13",
-		"R_SPARC_GOT22",
-		"R_SPARC_H44",
-		"R_SPARC_HH22",
-		"R_SPARC_HI22",
-		"R_SPARC_HIPLT22",
-		"R_SPARC_HIX22",
-		"R_SPARC_HM10",
-		"R_SPARC_JMP_SLOT",
-		"R_SPARC_L44",
-		"R_SPARC_LM22",
-		"R_SPARC_LO10",
-		"R_SPARC_LOPLT10",
-		"R_SPARC_LOX10",
-		"R_SPARC_M44",
-		"R_SPARC_NONE",
-		"R_SPARC_OLO10",
-		"R_SPARC_PC10",
-		"R_SPARC_PC22",
-		"R_SPARC_PCPLT10",
-		"R_SPARC_PCPLT22",
-		"R_SPARC_PCPLT32",
-		"R_SPARC_PC_HH22",
-		"R_SPARC_PC_HM10",
-		"R_SPARC_PC_LM22",
-		"R_SPARC_PLT32",
-		"R_SPARC_PLT64",
-		"R_SPARC_REGISTER",
-		"R_SPARC_RELATIVE",
-		"R_SPARC_UA16",
-		"R_SPARC_UA32",
-		"R_SPARC_UA64",
-		"R_SPARC_WDISP16",
-		"R_SPARC_WDISP19",
-		"R_SPARC_WDISP22",
-		"R_SPARC_WDISP30",
-		"R_SPARC_WPLT30",
-		"R_SYM32",
-		"R_SYM64",
-		"R_TYPE32",
-		"R_TYPE64",
-		"R_X86_64",
-		"R_X86_64_16",
-		"R_X86_64_32",
-		"R_X86_64_32S",
-		"R_X86_64_64",
-		"R_X86_64_8",
-		"R_X86_64_COPY",
-		"R_X86_64_DTPMOD64",
-		"R_X86_64_DTPOFF32",
-		"R_X86_64_DTPOFF64",
-		"R_X86_64_GLOB_DAT",
-		"R_X86_64_GOT32",
-		"R_X86_64_GOT64",
-		"R_X86_64_GOTOFF64",
-		"R_X86_64_GOTPC32",
-		"R_X86_64_GOTPC32_TLSDESC",
-		"R_X86_64_GOTPC64",
-		"R_X86_64_GOTPCREL",
-		"R_X86_64_GOTPCREL64",
-		"R_X86_64_GOTPCRELX",
-		"R_X86_64_GOTPLT64",
-		"R_X86_64_GOTTPOFF",
-		"R_X86_64_IRELATIVE",
-		"R_X86_64_JMP_SLOT",
-		"R_X86_64_NONE",
-		"R_X86_64_PC16",
-		"R_X86_64_PC32",
-		"R_X86_64_PC32_BND",
-		"R_X86_64_PC64",
-		"R_X86_64_PC8",
-		"R_X86_64_PLT32",
-		"R_X86_64_PLT32_BND",
-		"R_X86_64_PLTOFF64",
-		"R_X86_64_RELATIVE",
-		"R_X86_64_RELATIVE64",
-		"R_X86_64_REX_GOTPCRELX",
-		"R_X86_64_SIZE32",
-		"R_X86_64_SIZE64",
-		"R_X86_64_TLSDESC",
-		"R_X86_64_TLSDESC_CALL",
-		"R_X86_64_TLSGD",
-		"R_X86_64_TLSLD",
-		"R_X86_64_TPOFF32",
-		"R_X86_64_TPOFF64",
-		"Rel32",
-		"Rel64",
-		"Rela32",
-		"Rela64",
-		"SHF_ALLOC",
-		"SHF_COMPRESSED",
-		"SHF_EXECINSTR",
-		"SHF_GROUP",
-		"SHF_INFO_LINK",
-		"SHF_LINK_ORDER",
-		"SHF_MASKOS",
-		"SHF_MASKPROC",
-		"SHF_MERGE",
-		"SHF_OS_NONCONFORMING",
-		"SHF_STRINGS",
-		"SHF_TLS",
-		"SHF_WRITE",
-		"SHN_ABS",
-		"SHN_COMMON",
-		"SHN_HIOS",
-		"SHN_HIPROC",
-		"SHN_HIRESERVE",
-		"SHN_LOOS",
-		"SHN_LOPROC",
-		"SHN_LORESERVE",
-		"SHN_UNDEF",
-		"SHN_XINDEX",
-		"SHT_DYNAMIC",
-		"SHT_DYNSYM",
-		"SHT_FINI_ARRAY",
-		"SHT_GNU_ATTRIBUTES",
-		"SHT_GNU_HASH",
-		"SHT_GNU_LIBLIST",
-		"SHT_GNU_VERDEF",
-		"SHT_GNU_VERNEED",
-		"SHT_GNU_VERSYM",
-		"SHT_GROUP",
-		"SHT_HASH",
-		"SHT_HIOS",
-		"SHT_HIPROC",
-		"SHT_HIUSER",
-		"SHT_INIT_ARRAY",
-		"SHT_LOOS",
-		"SHT_LOPROC",
-		"SHT_LOUSER",
-		"SHT_MIPS_ABIFLAGS",
-		"SHT_NOBITS",
-		"SHT_NOTE",
-		"SHT_NULL",
-		"SHT_PREINIT_ARRAY",
-		"SHT_PROGBITS",
-		"SHT_REL",
-		"SHT_RELA",
-		"SHT_SHLIB",
-		"SHT_STRTAB",
-		"SHT_SYMTAB",
-		"SHT_SYMTAB_SHNDX",
-		"STB_GLOBAL",
-		"STB_HIOS",
-		"STB_HIPROC",
-		"STB_LOCAL",
-		"STB_LOOS",
-		"STB_LOPROC",
-		"STB_WEAK",
-		"STT_COMMON",
-		"STT_FILE",
-		"STT_FUNC",
-		"STT_HIOS",
-		"STT_HIPROC",
-		"STT_LOOS",
-		"STT_LOPROC",
-		"STT_NOTYPE",
-		"STT_OBJECT",
-		"STT_SECTION",
-		"STT_TLS",
-		"STV_DEFAULT",
-		"STV_HIDDEN",
-		"STV_INTERNAL",
-		"STV_PROTECTED",
-		"ST_BIND",
-		"ST_INFO",
-		"ST_TYPE",
-		"ST_VISIBILITY",
-		"Section",
-		"Section32",
-		"Section64",
-		"SectionFlag",
-		"SectionHeader",
-		"SectionIndex",
-		"SectionType",
-		"Sym32",
-		"Sym32Size",
-		"Sym64",
-		"Sym64Size",
-		"SymBind",
-		"SymType",
-		"SymVis",
-		"Symbol",
-		"Type",
-		"Version",
-	},
-	"debug/gosym": {
-		"DecodingError",
-		"Func",
-		"LineTable",
-		"NewLineTable",
-		"NewTable",
-		"Obj",
-		"Sym",
-		"Table",
-		"UnknownFileError",
-		"UnknownLineError",
-	},
-	"debug/macho": {
-		"ARM64_RELOC_ADDEND",
-		"ARM64_RELOC_BRANCH26",
-		"ARM64_RELOC_GOT_LOAD_PAGE21",
-		"ARM64_RELOC_GOT_LOAD_PAGEOFF12",
-		"ARM64_RELOC_PAGE21",
-		"ARM64_RELOC_PAGEOFF12",
-		"ARM64_RELOC_POINTER_TO_GOT",
-		"ARM64_RELOC_SUBTRACTOR",
-		"ARM64_RELOC_TLVP_LOAD_PAGE21",
-		"ARM64_RELOC_TLVP_LOAD_PAGEOFF12",
-		"ARM64_RELOC_UNSIGNED",
-		"ARM_RELOC_BR24",
-		"ARM_RELOC_HALF",
-		"ARM_RELOC_HALF_SECTDIFF",
-		"ARM_RELOC_LOCAL_SECTDIFF",
-		"ARM_RELOC_PAIR",
-		"ARM_RELOC_PB_LA_PTR",
-		"ARM_RELOC_SECTDIFF",
-		"ARM_RELOC_VANILLA",
-		"ARM_THUMB_32BIT_BRANCH",
-		"ARM_THUMB_RELOC_BR22",
-		"Cpu",
-		"Cpu386",
-		"CpuAmd64",
-		"CpuArm",
-		"CpuArm64",
-		"CpuPpc",
-		"CpuPpc64",
-		"Dylib",
-		"DylibCmd",
-		"Dysymtab",
-		"DysymtabCmd",
-		"ErrNotFat",
-		"FatArch",
-		"FatArchHeader",
-		"FatFile",
-		"File",
-		"FileHeader",
-		"FlagAllModsBound",
-		"FlagAllowStackExecution",
-		"FlagAppExtensionSafe",
-		"FlagBindAtLoad",
-		"FlagBindsToWeak",
-		"FlagCanonical",
-		"FlagDeadStrippableDylib",
-		"FlagDyldLink",
-		"FlagForceFlat",
-		"FlagHasTLVDescriptors",
-		"FlagIncrLink",
-		"FlagLazyInit",
-		"FlagNoFixPrebinding",
-		"FlagNoHeapExecution",
-		"FlagNoMultiDefs",
-		"FlagNoReexportedDylibs",
-		"FlagNoUndefs",
-		"FlagPIE",
-		"FlagPrebindable",
-		"FlagPrebound",
-		"FlagRootSafe",
-		"FlagSetuidSafe",
-		"FlagSplitSegs",
-		"FlagSubsectionsViaSymbols",
-		"FlagTwoLevel",
-		"FlagWeakDefines",
-		"FormatError",
-		"GENERIC_RELOC_LOCAL_SECTDIFF",
-		"GENERIC_RELOC_PAIR",
-		"GENERIC_RELOC_PB_LA_PTR",
-		"GENERIC_RELOC_SECTDIFF",
-		"GENERIC_RELOC_TLV",
-		"GENERIC_RELOC_VANILLA",
-		"Load",
-		"LoadBytes",
-		"LoadCmd",
-		"LoadCmdDylib",
-		"LoadCmdDylinker",
-		"LoadCmdDysymtab",
-		"LoadCmdRpath",
-		"LoadCmdSegment",
-		"LoadCmdSegment64",
-		"LoadCmdSymtab",
-		"LoadCmdThread",
-		"LoadCmdUnixThread",
-		"Magic32",
-		"Magic64",
-		"MagicFat",
-		"NewFatFile",
-		"NewFile",
-		"Nlist32",
-		"Nlist64",
-		"Open",
-		"OpenFat",
-		"Regs386",
-		"RegsAMD64",
-		"Reloc",
-		"RelocTypeARM",
-		"RelocTypeARM64",
-		"RelocTypeGeneric",
-		"RelocTypeX86_64",
-		"Rpath",
-		"RpathCmd",
-		"Section",
-		"Section32",
-		"Section64",
-		"SectionHeader",
-		"Segment",
-		"Segment32",
-		"Segment64",
-		"SegmentHeader",
-		"Symbol",
-		"Symtab",
-		"SymtabCmd",
-		"Thread",
-		"Type",
-		"TypeBundle",
-		"TypeDylib",
-		"TypeExec",
-		"TypeObj",
-		"X86_64_RELOC_BRANCH",
-		"X86_64_RELOC_GOT",
-		"X86_64_RELOC_GOT_LOAD",
-		"X86_64_RELOC_SIGNED",
-		"X86_64_RELOC_SIGNED_1",
-		"X86_64_RELOC_SIGNED_2",
-		"X86_64_RELOC_SIGNED_4",
-		"X86_64_RELOC_SUBTRACTOR",
-		"X86_64_RELOC_TLV",
-		"X86_64_RELOC_UNSIGNED",
-	},
-	"debug/pe": {
-		"COFFSymbol",
-		"COFFSymbolAuxFormat5",
-		"COFFSymbolSize",
-		"DataDirectory",
-		"File",
-		"FileHeader",
-		"FormatError",
-		"IMAGE_COMDAT_SELECT_ANY",
-		"IMAGE_COMDAT_SELECT_ASSOCIATIVE",
-		"IMAGE_COMDAT_SELECT_EXACT_MATCH",
-		"IMAGE_COMDAT_SELECT_LARGEST",
-		"IMAGE_COMDAT_SELECT_NODUPLICATES",
-		"IMAGE_COMDAT_SELECT_SAME_SIZE",
-		"IMAGE_DIRECTORY_ENTRY_ARCHITECTURE",
-		"IMAGE_DIRECTORY_ENTRY_BASERELOC",
-		"IMAGE_DIRECTORY_ENTRY_BOUND_IMPORT",
-		"IMAGE_DIRECTORY_ENTRY_COM_DESCRIPTOR",
-		"IMAGE_DIRECTORY_ENTRY_DEBUG",
-		"IMAGE_DIRECTORY_ENTRY_DELAY_IMPORT",
-		"IMAGE_DIRECTORY_ENTRY_EXCEPTION",
-		"IMAGE_DIRECTORY_ENTRY_EXPORT",
-		"IMAGE_DIRECTORY_ENTRY_GLOBALPTR",
-		"IMAGE_DIRECTORY_ENTRY_IAT",
-		"IMAGE_DIRECTORY_ENTRY_IMPORT",
-		"IMAGE_DIRECTORY_ENTRY_LOAD_CONFIG",
-		"IMAGE_DIRECTORY_ENTRY_RESOURCE",
-		"IMAGE_DIRECTORY_ENTRY_SECURITY",
-		"IMAGE_DIRECTORY_ENTRY_TLS",
-		"IMAGE_DLLCHARACTERISTICS_APPCONTAINER",
-		"IMAGE_DLLCHARACTERISTICS_DYNAMIC_BASE",
-		"IMAGE_DLLCHARACTERISTICS_FORCE_INTEGRITY",
-		"IMAGE_DLLCHARACTERISTICS_GUARD_CF",
-		"IMAGE_DLLCHARACTERISTICS_HIGH_ENTROPY_VA",
-		"IMAGE_DLLCHARACTERISTICS_NO_BIND",
-		"IMAGE_DLLCHARACTERISTICS_NO_ISOLATION",
-		"IMAGE_DLLCHARACTERISTICS_NO_SEH",
-		"IMAGE_DLLCHARACTERISTICS_NX_COMPAT",
-		"IMAGE_DLLCHARACTERISTICS_TERMINAL_SERVER_AWARE",
-		"IMAGE_DLLCHARACTERISTICS_WDM_DRIVER",
-		"IMAGE_FILE_32BIT_MACHINE",
-		"IMAGE_FILE_AGGRESIVE_WS_TRIM",
-		"IMAGE_FILE_BYTES_REVERSED_HI",
-		"IMAGE_FILE_BYTES_REVERSED_LO",
-		"IMAGE_FILE_DEBUG_STRIPPED",
-		"IMAGE_FILE_DLL",
-		"IMAGE_FILE_EXECUTABLE_IMAGE",
-		"IMAGE_FILE_LARGE_ADDRESS_AWARE",
-		"IMAGE_FILE_LINE_NUMS_STRIPPED",
-		"IMAGE_FILE_LOCAL_SYMS_STRIPPED",
-		"IMAGE_FILE_MACHINE_AM33",
-		"IMAGE_FILE_MACHINE_AMD64",
-		"IMAGE_FILE_MACHINE_ARM",
-		"IMAGE_FILE_MACHINE_ARM64",
-		"IMAGE_FILE_MACHINE_ARMNT",
-		"IMAGE_FILE_MACHINE_EBC",
-		"IMAGE_FILE_MACHINE_I386",
-		"IMAGE_FILE_MACHINE_IA64",
-		"IMAGE_FILE_MACHINE_LOONGARCH32",
-		"IMAGE_FILE_MACHINE_LOONGARCH64",
-		"IMAGE_FILE_MACHINE_M32R",
-		"IMAGE_FILE_MACHINE_MIPS16",
-		"IMAGE_FILE_MACHINE_MIPSFPU",
-		"IMAGE_FILE_MACHINE_MIPSFPU16",
-		"IMAGE_FILE_MACHINE_POWERPC",
-		"IMAGE_FILE_MACHINE_POWERPCFP",
-		"IMAGE_FILE_MACHINE_R4000",
-		"IMAGE_FILE_MACHINE_RISCV128",
-		"IMAGE_FILE_MACHINE_RISCV32",
-		"IMAGE_FILE_MACHINE_RISCV64",
-		"IMAGE_FILE_MACHINE_SH3",
-		"IMAGE_FILE_MACHINE_SH3DSP",
-		"IMAGE_FILE_MACHINE_SH4",
-		"IMAGE_FILE_MACHINE_SH5",
-		"IMAGE_FILE_MACHINE_THUMB",
-		"IMAGE_FILE_MACHINE_UNKNOWN",
-		"IMAGE_FILE_MACHINE_WCEMIPSV2",
-		"IMAGE_FILE_NET_RUN_FROM_SWAP",
-		"IMAGE_FILE_RELOCS_STRIPPED",
-		"IMAGE_FILE_REMOVABLE_RUN_FROM_SWAP",
-		"IMAGE_FILE_SYSTEM",
-		"IMAGE_FILE_UP_SYSTEM_ONLY",
-		"IMAGE_SCN_CNT_CODE",
-		"IMAGE_SCN_CNT_INITIALIZED_DATA",
-		"IMAGE_SCN_CNT_UNINITIALIZED_DATA",
-		"IMAGE_SCN_LNK_COMDAT",
-		"IMAGE_SCN_MEM_DISCARDABLE",
-		"IMAGE_SCN_MEM_EXECUTE",
-		"IMAGE_SCN_MEM_READ",
-		"IMAGE_SCN_MEM_WRITE",
-		"IMAGE_SUBSYSTEM_EFI_APPLICATION",
-		"IMAGE_SUBSYSTEM_EFI_BOOT_SERVICE_DRIVER",
-		"IMAGE_SUBSYSTEM_EFI_ROM",
-		"IMAGE_SUBSYSTEM_EFI_RUNTIME_DRIVER",
-		"IMAGE_SUBSYSTEM_NATIVE",
-		"IMAGE_SUBSYSTEM_NATIVE_WINDOWS",
-		"IMAGE_SUBSYSTEM_OS2_CUI",
-		"IMAGE_SUBSYSTEM_POSIX_CUI",
-		"IMAGE_SUBSYSTEM_UNKNOWN",
-		"IMAGE_SUBSYSTEM_WINDOWS_BOOT_APPLICATION",
-		"IMAGE_SUBSYSTEM_WINDOWS_CE_GUI",
-		"IMAGE_SUBSYSTEM_WINDOWS_CUI",
-		"IMAGE_SUBSYSTEM_WINDOWS_GUI",
-		"IMAGE_SUBSYSTEM_XBOX",
-		"ImportDirectory",
-		"NewFile",
-		"Open",
-		"OptionalHeader32",
-		"OptionalHeader64",
-		"Reloc",
-		"Section",
-		"SectionHeader",
-		"SectionHeader32",
-		"StringTable",
-		"Symbol",
-	},
-	"debug/plan9obj": {
-		"ErrNoSymbols",
-		"File",
-		"FileHeader",
-		"Magic386",
-		"Magic64",
-		"MagicAMD64",
-		"MagicARM",
-		"NewFile",
-		"Open",
-		"Section",
-		"SectionHeader",
-		"Sym",
-	},
-	"embed": {
-		"FS",
-	},
-	"encoding": {
-		"BinaryMarshaler",
-		"BinaryUnmarshaler",
-		"TextMarshaler",
-		"TextUnmarshaler",
-	},
-	"encoding/ascii85": {
-		"CorruptInputError",
-		"Decode",
-		"Encode",
-		"MaxEncodedLen",
-		"NewDecoder",
-		"NewEncoder",
-	},
-	"encoding/asn1": {
-		"BitString",
-		"ClassApplication",
-		"ClassContextSpecific",
-		"ClassPrivate",
-		"ClassUniversal",
-		"Enumerated",
-		"Flag",
-		"Marshal",
-		"MarshalWithParams",
-		"NullBytes",
-		"NullRawValue",
-		"ObjectIdentifier",
-		"RawContent",
-		"RawValue",
-		"StructuralError",
-		"SyntaxError",
-		"TagBMPString",
-		"TagBitString",
-		"TagBoolean",
-		"TagEnum",
-		"TagGeneralString",
-		"TagGeneralizedTime",
-		"TagIA5String",
-		"TagInteger",
-		"TagNull",
-		"TagNumericString",
-		"TagOID",
-		"TagOctetString",
-		"TagPrintableString",
-		"TagSequence",
-		"TagSet",
-		"TagT61String",
-		"TagUTCTime",
-		"TagUTF8String",
-		"Unmarshal",
-		"UnmarshalWithParams",
-	},
-	"encoding/base32": {
-		"CorruptInputError",
-		"Encoding",
-		"HexEncoding",
-		"NewDecoder",
-		"NewEncoder",
-		"NewEncoding",
-		"NoPadding",
-		"StdEncoding",
-		"StdPadding",
-	},
-	"encoding/base64": {
-		"CorruptInputError",
-		"Encoding",
-		"NewDecoder",
-		"NewEncoder",
-		"NewEncoding",
-		"NoPadding",
-		"RawStdEncoding",
-		"RawURLEncoding",
-		"StdEncoding",
-		"StdPadding",
-		"URLEncoding",
-	},
-	"encoding/binary": {
-		"AppendByteOrder",
-		"AppendUvarint",
-		"AppendVarint",
-		"BigEndian",
-		"ByteOrder",
-		"LittleEndian",
-		"MaxVarintLen16",
-		"MaxVarintLen32",
-		"MaxVarintLen64",
-		"NativeEndian",
-		"PutUvarint",
-		"PutVarint",
-		"Read",
-		"ReadUvarint",
-		"ReadVarint",
-		"Size",
-		"Uvarint",
-		"Varint",
-		"Write",
-	},
-	"encoding/csv": {
-		"ErrBareQuote",
-		"ErrFieldCount",
-		"ErrQuote",
-		"ErrTrailingComma",
-		"NewReader",
-		"NewWriter",
-		"ParseError",
-		"Reader",
-		"Writer",
-	},
-	"encoding/gob": {
-		"CommonType",
-		"Decoder",
-		"Encoder",
-		"GobDecoder",
-		"GobEncoder",
-		"NewDecoder",
-		"NewEncoder",
-		"Register",
-		"RegisterName",
-	},
-	"encoding/hex": {
-		"AppendDecode",
-		"AppendEncode",
-		"Decode",
-		"DecodeString",
-		"DecodedLen",
-		"Dump",
-		"Dumper",
-		"Encode",
-		"EncodeToString",
-		"EncodedLen",
-		"ErrLength",
-		"InvalidByteError",
-		"NewDecoder",
-		"NewEncoder",
-	},
-	"encoding/json": {
-		"Compact",
-		"Decoder",
-		"Delim",
-		"Encoder",
-		"HTMLEscape",
-		"Indent",
-		"InvalidUTF8Error",
-		"InvalidUnmarshalError",
-		"Marshal",
-		"MarshalIndent",
-		"Marshaler",
-		"MarshalerError",
-		"NewDecoder",
-		"NewEncoder",
-		"Number",
-		"RawMessage",
-		"SyntaxError",
-		"Token",
-		"Unmarshal",
-		"UnmarshalFieldError",
-		"UnmarshalTypeError",
-		"Unmarshaler",
-		"UnsupportedTypeError",
-		"UnsupportedValueError",
-		"Valid",
-	},
-	"encoding/pem": {
-		"Block",
-		"Decode",
-		"Encode",
-		"EncodeToMemory",
-	},
-	"encoding/xml": {
-		"Attr",
-		"CharData",
-		"Comment",
-		"CopyToken",
-		"Decoder",
-		"Directive",
-		"Encoder",
-		"EndElement",
-		"Escape",
-		"EscapeText",
-		"HTMLAutoClose",
-		"HTMLEntity",
-		"Header",
-		"Marshal",
-		"MarshalIndent",
-		"Marshaler",
-		"MarshalerAttr",
-		"Name",
-		"NewDecoder",
-		"NewEncoder",
-		"NewTokenDecoder",
-		"ProcInst",
-		"StartElement",
-		"SyntaxError",
-		"TagPathError",
-		"Token",
-		"TokenReader",
-		"Unmarshal",
-		"UnmarshalError",
-		"Unmarshaler",
-		"UnmarshalerAttr",
-		"UnsupportedTypeError",
-	},
-	"errors": {
-		"As",
-		"ErrUnsupported",
-		"Is",
-		"Join",
-		"New",
-		"Unwrap",
-	},
-	"expvar": {
-		"Do",
-		"Float",
-		"Func",
-		"Get",
-		"Handler",
-		"Int",
-		"KeyValue",
-		"Map",
-		"NewFloat",
-		"NewInt",
-		"NewMap",
-		"NewString",
-		"Publish",
-		"String",
-		"Var",
-	},
-	"flag": {
-		"Arg",
-		"Args",
-		"Bool",
-		"BoolFunc",
-		"BoolVar",
-		"CommandLine",
-		"ContinueOnError",
-		"Duration",
-		"DurationVar",
-		"ErrHelp",
-		"ErrorHandling",
-		"ExitOnError",
-		"Flag",
-		"FlagSet",
-		"Float64",
-		"Float64Var",
-		"Func",
-		"Getter",
-		"Int",
-		"Int64",
-		"Int64Var",
-		"IntVar",
-		"Lookup",
-		"NArg",
-		"NFlag",
-		"NewFlagSet",
-		"PanicOnError",
-		"Parse",
-		"Parsed",
-		"PrintDefaults",
-		"Set",
-		"String",
-		"StringVar",
-		"TextVar",
-		"Uint",
-		"Uint64",
-		"Uint64Var",
-		"UintVar",
-		"UnquoteUsage",
-		"Usage",
-		"Value",
-		"Var",
-		"Visit",
-		"VisitAll",
-	},
-	"fmt": {
-		"Append",
-		"Appendf",
-		"Appendln",
-		"Errorf",
-		"FormatString",
-		"Formatter",
-		"Fprint",
-		"Fprintf",
-		"Fprintln",
-		"Fscan",
-		"Fscanf",
-		"Fscanln",
-		"GoStringer",
-		"Print",
-		"Printf",
-		"Println",
-		"Scan",
-		"ScanState",
-		"Scanf",
-		"Scanln",
-		"Scanner",
-		"Sprint",
-		"Sprintf",
-		"Sprintln",
-		"Sscan",
-		"Sscanf",
-		"Sscanln",
-		"State",
-		"Stringer",
-	},
-	"go/ast": {
-		"ArrayType",
-		"AssignStmt",
-		"Bad",
-		"BadDecl",
-		"BadExpr",
-		"BadStmt",
-		"BasicLit",
-		"BinaryExpr",
-		"BlockStmt",
-		"BranchStmt",
-		"CallExpr",
-		"CaseClause",
-		"ChanDir",
-		"ChanType",
-		"CommClause",
-		"Comment",
-		"CommentGroup",
-		"CommentMap",
-		"CompositeLit",
-		"Con",
-		"Decl",
-		"DeclStmt",
-		"DeferStmt",
-		"Ellipsis",
-		"EmptyStmt",
-		"Expr",
-		"ExprStmt",
-		"Field",
-		"FieldFilter",
-		"FieldList",
-		"File",
-		"FileExports",
-		"Filter",
-		"FilterDecl",
-		"FilterFile",
-		"FilterFuncDuplicates",
-		"FilterImportDuplicates",
-		"FilterPackage",
-		"FilterUnassociatedComments",
-		"ForStmt",
-		"Fprint",
-		"Fun",
-		"FuncDecl",
-		"FuncLit",
-		"FuncType",
-		"GenDecl",
-		"GoStmt",
-		"Ident",
-		"IfStmt",
-		"ImportSpec",
-		"Importer",
-		"IncDecStmt",
-		"IndexExpr",
-		"IndexListExpr",
-		"Inspect",
-		"InterfaceType",
-		"IsExported",
-		"IsGenerated",
-		"KeyValueExpr",
-		"LabeledStmt",
-		"Lbl",
-		"MapType",
-		"MergeMode",
-		"MergePackageFiles",
-		"NewCommentMap",
-		"NewIdent",
-		"NewObj",
-		"NewPackage",
-		"NewScope",
-		"Node",
-		"NotNilFilter",
-		"ObjKind",
-		"Object",
-		"Package",
-		"PackageExports",
-		"ParenExpr",
-		"Pkg",
-		"Print",
-		"RECV",
-		"RangeStmt",
-		"ReturnStmt",
-		"SEND",
-		"Scope",
-		"SelectStmt",
-		"SelectorExpr",
-		"SendStmt",
-		"SliceExpr",
-		"SortImports",
-		"Spec",
-		"StarExpr",
-		"Stmt",
-		"StructType",
-		"SwitchStmt",
-		"Typ",
-		"TypeAssertExpr",
-		"TypeSpec",
-		"TypeSwitchStmt",
-		"UnaryExpr",
-		"Unparen",
-		"ValueSpec",
-		"Var",
-		"Visitor",
-		"Walk",
-	},
-	"go/build": {
-		"AllowBinary",
-		"ArchChar",
-		"Context",
-		"Default",
-		"Directive",
-		"FindOnly",
-		"IgnoreVendor",
-		"Import",
-		"ImportComment",
-		"ImportDir",
-		"ImportMode",
-		"IsLocalImport",
-		"MultiplePackageError",
-		"NoGoError",
-		"Package",
-		"ToolDir",
-	},
-	"go/build/constraint": {
-		"AndExpr",
-		"Expr",
-		"GoVersion",
-		"IsGoBuild",
-		"IsPlusBuild",
-		"NotExpr",
-		"OrExpr",
-		"Parse",
-		"PlusBuildLines",
-		"SyntaxError",
-		"TagExpr",
-	},
-	"go/constant": {
-		"BinaryOp",
-		"BitLen",
-		"Bool",
-		"BoolVal",
-		"Bytes",
-		"Compare",
-		"Complex",
-		"Denom",
-		"Float",
-		"Float32Val",
-		"Float64Val",
-		"Imag",
-		"Int",
-		"Int64Val",
-		"Kind",
-		"Make",
-		"MakeBool",
-		"MakeFloat64",
-		"MakeFromBytes",
-		"MakeFromLiteral",
-		"MakeImag",
-		"MakeInt64",
-		"MakeString",
-		"MakeUint64",
-		"MakeUnknown",
-		"Num",
-		"Real",
-		"Shift",
-		"Sign",
-		"String",
-		"StringVal",
-		"ToComplex",
-		"ToFloat",
-		"ToInt",
-		"Uint64Val",
-		"UnaryOp",
-		"Unknown",
-		"Val",
-		"Value",
-	},
-	"go/doc": {
-		"AllDecls",
-		"AllMethods",
-		"Example",
-		"Examples",
-		"Filter",
-		"Func",
-		"IllegalPrefixes",
-		"IsPredeclared",
-		"Mode",
-		"New",
-		"NewFromFiles",
-		"Note",
-		"Package",
-		"PreserveAST",
-		"Synopsis",
-		"ToHTML",
-		"ToText",
-		"Type",
-		"Value",
-	},
-	"go/doc/comment": {
-		"Block",
-		"Code",
-		"DefaultLookupPackage",
-		"Doc",
-		"DocLink",
-		"Heading",
-		"Italic",
-		"Link",
-		"LinkDef",
-		"List",
-		"ListItem",
-		"Paragraph",
-		"Parser",
-		"Plain",
-		"Printer",
-		"Text",
-	},
-	"go/format": {
-		"Node",
-		"Source",
-	},
-	"go/importer": {
-		"Default",
-		"For",
-		"ForCompiler",
-		"Lookup",
-	},
-	"go/parser": {
-		"AllErrors",
-		"DeclarationErrors",
-		"ImportsOnly",
-		"Mode",
-		"PackageClauseOnly",
-		"ParseComments",
-		"ParseDir",
-		"ParseExpr",
-		"ParseExprFrom",
-		"ParseFile",
-		"SkipObjectResolution",
-		"SpuriousErrors",
-		"Trace",
-	},
-	"go/printer": {
-		"CommentedNode",
-		"Config",
-		"Fprint",
-		"Mode",
-		"RawFormat",
-		"SourcePos",
-		"TabIndent",
-		"UseSpaces",
-	},
-	"go/scanner": {
-		"Error",
-		"ErrorHandler",
-		"ErrorList",
-		"Mode",
-		"PrintError",
-		"ScanComments",
-		"Scanner",
-	},
-	"go/token": {
-		"ADD",
-		"ADD_ASSIGN",
-		"AND",
-		"AND_ASSIGN",
-		"AND_NOT",
-		"AND_NOT_ASSIGN",
-		"ARROW",
-		"ASSIGN",
-		"BREAK",
-		"CASE",
-		"CHAN",
-		"CHAR",
-		"COLON",
-		"COMMA",
-		"COMMENT",
-		"CONST",
-		"CONTINUE",
-		"DEC",
-		"DEFAULT",
-		"DEFER",
-		"DEFINE",
-		"ELLIPSIS",
-		"ELSE",
-		"EOF",
-		"EQL",
-		"FALLTHROUGH",
-		"FLOAT",
-		"FOR",
-		"FUNC",
-		"File",
-		"FileSet",
-		"GEQ",
-		"GO",
-		"GOTO",
-		"GTR",
-		"HighestPrec",
-		"IDENT",
-		"IF",
-		"ILLEGAL",
-		"IMAG",
-		"IMPORT",
-		"INC",
-		"INT",
-		"INTERFACE",
-		"IsExported",
-		"IsIdentifier",
-		"IsKeyword",
-		"LAND",
-		"LBRACE",
-		"LBRACK",
-		"LEQ",
-		"LOR",
-		"LPAREN",
-		"LSS",
-		"Lookup",
-		"LowestPrec",
-		"MAP",
-		"MUL",
-		"MUL_ASSIGN",
-		"NEQ",
-		"NOT",
-		"NewFileSet",
-		"NoPos",
-		"OR",
-		"OR_ASSIGN",
-		"PACKAGE",
-		"PERIOD",
-		"Pos",
-		"Position",
-		"QUO",
-		"QUO_ASSIGN",
-		"RANGE",
-		"RBRACE",
-		"RBRACK",
-		"REM",
-		"REM_ASSIGN",
-		"RETURN",
-		"RPAREN",
-		"SELECT",
-		"SEMICOLON",
-		"SHL",
-		"SHL_ASSIGN",
-		"SHR",
-		"SHR_ASSIGN",
-		"STRING",
-		"STRUCT",
-		"SUB",
-		"SUB_ASSIGN",
-		"SWITCH",
-		"TILDE",
-		"TYPE",
-		"Token",
-		"UnaryPrec",
-		"VAR",
-		"XOR",
-		"XOR_ASSIGN",
-	},
-	"go/types": {
-		"Alias",
-		"ArgumentError",
-		"Array",
-		"AssertableTo",
-		"AssignableTo",
-		"Basic",
-		"BasicInfo",
-		"BasicKind",
-		"Bool",
-		"Builtin",
-		"Byte",
-		"Chan",
-		"ChanDir",
-		"CheckExpr",
-		"Checker",
-		"Comparable",
-		"Complex128",
-		"Complex64",
-		"Config",
-		"Const",
-		"Context",
-		"ConvertibleTo",
-		"DefPredeclaredTestFuncs",
-		"Default",
-		"Error",
-		"Eval",
-		"ExprString",
-		"FieldVal",
-		"Float32",
-		"Float64",
-		"Func",
-		"Id",
-		"Identical",
-		"IdenticalIgnoreTags",
-		"Implements",
-		"ImportMode",
-		"Importer",
-		"ImporterFrom",
-		"Info",
-		"Initializer",
-		"Instance",
-		"Instantiate",
-		"Int",
-		"Int16",
-		"Int32",
-		"Int64",
-		"Int8",
-		"Interface",
-		"Invalid",
-		"IsBoolean",
-		"IsComplex",
-		"IsConstType",
-		"IsFloat",
-		"IsInteger",
-		"IsInterface",
-		"IsNumeric",
-		"IsOrdered",
-		"IsString",
-		"IsUnsigned",
-		"IsUntyped",
-		"Label",
-		"LookupFieldOrMethod",
-		"Map",
-		"MethodExpr",
-		"MethodSet",
-		"MethodVal",
-		"MissingMethod",
-		"Named",
-		"NewAlias",
-		"NewArray",
-		"NewChan",
-		"NewChecker",
-		"NewConst",
-		"NewContext",
-		"NewField",
-		"NewFunc",
-		"NewInterface",
-		"NewInterfaceType",
-		"NewLabel",
-		"NewMap",
-		"NewMethodSet",
-		"NewNamed",
-		"NewPackage",
-		"NewParam",
-		"NewPkgName",
-		"NewPointer",
-		"NewScope",
-		"NewSignature",
-		"NewSignatureType",
-		"NewSlice",
-		"NewStruct",
-		"NewTerm",
-		"NewTuple",
-		"NewTypeName",
-		"NewTypeParam",
-		"NewUnion",
-		"NewVar",
-		"Nil",
-		"Object",
-		"ObjectString",
-		"Package",
-		"PkgName",
-		"Pointer",
-		"Qualifier",
-		"RecvOnly",
-		"RelativeTo",
-		"Rune",
-		"Satisfies",
-		"Scope",
-		"Selection",
-		"SelectionKind",
-		"SelectionString",
-		"SendOnly",
-		"SendRecv",
-		"Signature",
-		"Sizes",
-		"SizesFor",
-		"Slice",
-		"StdSizes",
-		"String",
-		"Struct",
-		"Term",
-		"Tuple",
-		"Typ",
-		"Type",
-		"TypeAndValue",
-		"TypeList",
-		"TypeName",
-		"TypeParam",
-		"TypeParamList",
-		"TypeString",
-		"Uint",
-		"Uint16",
-		"Uint32",
-		"Uint64",
-		"Uint8",
-		"Uintptr",
-		"Unalias",
-		"Union",
-		"Universe",
-		"Unsafe",
-		"UnsafePointer",
-		"UntypedBool",
-		"UntypedComplex",
-		"UntypedFloat",
-		"UntypedInt",
-		"UntypedNil",
-		"UntypedRune",
-		"UntypedString",
-		"Var",
-		"WriteExpr",
-		"WriteSignature",
-		"WriteType",
-	},
-	"go/version": {
-		"Compare",
-		"IsValid",
-		"Lang",
-	},
-	"hash": {
-		"Hash",
-		"Hash32",
-		"Hash64",
-	},
-	"hash/adler32": {
-		"Checksum",
-		"New",
-		"Size",
-	},
-	"hash/crc32": {
-		"Castagnoli",
-		"Checksum",
-		"ChecksumIEEE",
-		"IEEE",
-		"IEEETable",
-		"Koopman",
-		"MakeTable",
-		"New",
-		"NewIEEE",
-		"Size",
-		"Table",
-		"Update",
-	},
-	"hash/crc64": {
-		"Checksum",
-		"ECMA",
-		"ISO",
-		"MakeTable",
-		"New",
-		"Size",
-		"Table",
-		"Update",
-	},
-	"hash/fnv": {
-		"New128",
-		"New128a",
-		"New32",
-		"New32a",
-		"New64",
-		"New64a",
-	},
-	"hash/maphash": {
-		"Bytes",
-		"Hash",
-		"MakeSeed",
-		"Seed",
-		"String",
-	},
-	"html": {
-		"EscapeString",
-		"UnescapeString",
-	},
-	"html/template": {
-		"CSS",
-		"ErrAmbigContext",
-		"ErrBadHTML",
-		"ErrBranchEnd",
-		"ErrEndContext",
-		"ErrJSTemplate",
-		"ErrNoSuchTemplate",
-		"ErrOutputContext",
-		"ErrPartialCharset",
-		"ErrPartialEscape",
-		"ErrPredefinedEscaper",
-		"ErrRangeLoopReentry",
-		"ErrSlashAmbig",
-		"Error",
-		"ErrorCode",
-		"FuncMap",
-		"HTML",
-		"HTMLAttr",
-		"HTMLEscape",
-		"HTMLEscapeString",
-		"HTMLEscaper",
-		"IsTrue",
-		"JS",
-		"JSEscape",
-		"JSEscapeString",
-		"JSEscaper",
-		"JSStr",
-		"Must",
-		"New",
-		"OK",
-		"ParseFS",
-		"ParseFiles",
-		"ParseGlob",
-		"Srcset",
-		"Template",
-		"URL",
-		"URLQueryEscaper",
-	},
-	"image": {
-		"Alpha",
-		"Alpha16",
-		"Black",
-		"CMYK",
-		"Config",
-		"Decode",
-		"DecodeConfig",
-		"ErrFormat",
-		"Gray",
-		"Gray16",
-		"Image",
-		"NRGBA",
-		"NRGBA64",
-		"NYCbCrA",
-		"NewAlpha",
-		"NewAlpha16",
-		"NewCMYK",
-		"NewGray",
-		"NewGray16",
-		"NewNRGBA",
-		"NewNRGBA64",
-		"NewNYCbCrA",
-		"NewPaletted",
-		"NewRGBA",
-		"NewRGBA64",
-		"NewUniform",
-		"NewYCbCr",
-		"Opaque",
-		"Paletted",
-		"PalettedImage",
-		"Point",
-		"Pt",
-		"RGBA",
-		"RGBA64",
-		"RGBA64Image",
-		"Rect",
-		"Rectangle",
-		"RegisterFormat",
-		"Transparent",
-		"Uniform",
-		"White",
-		"YCbCr",
-		"YCbCrSubsampleRatio",
-		"YCbCrSubsampleRatio410",
-		"YCbCrSubsampleRatio411",
-		"YCbCrSubsampleRatio420",
-		"YCbCrSubsampleRatio422",
-		"YCbCrSubsampleRatio440",
-		"YCbCrSubsampleRatio444",
-		"ZP",
-		"ZR",
-	},
-	"image/color": {
-		"Alpha",
-		"Alpha16",
-		"Alpha16Model",
-		"AlphaModel",
-		"Black",
-		"CMYK",
-		"CMYKModel",
-		"CMYKToRGB",
-		"Color",
-		"Gray",
-		"Gray16",
-		"Gray16Model",
-		"GrayModel",
-		"Model",
-		"ModelFunc",
-		"NRGBA",
-		"NRGBA64",
-		"NRGBA64Model",
-		"NRGBAModel",
-		"NYCbCrA",
-		"NYCbCrAModel",
-		"Opaque",
-		"Palette",
-		"RGBA",
-		"RGBA64",
-		"RGBA64Model",
-		"RGBAModel",
-		"RGBToCMYK",
-		"RGBToYCbCr",
-		"Transparent",
-		"White",
-		"YCbCr",
-		"YCbCrModel",
-		"YCbCrToRGB",
-	},
-	"image/color/palette": {
-		"Plan9",
-		"WebSafe",
-	},
-	"image/draw": {
-		"Draw",
-		"DrawMask",
-		"Drawer",
-		"FloydSteinberg",
-		"Image",
-		"Op",
-		"Over",
-		"Quantizer",
-		"RGBA64Image",
-		"Src",
-	},
-	"image/gif": {
-		"Decode",
-		"DecodeAll",
-		"DecodeConfig",
-		"DisposalBackground",
-		"DisposalNone",
-		"DisposalPrevious",
-		"Encode",
-		"EncodeAll",
-		"GIF",
-		"Options",
-	},
-	"image/jpeg": {
-		"Decode",
-		"DecodeConfig",
-		"DefaultQuality",
-		"Encode",
-		"FormatError",
-		"Options",
-		"Reader",
-		"UnsupportedError",
-	},
-	"image/png": {
-		"BestCompression",
-		"BestSpeed",
-		"CompressionLevel",
-		"Decode",
-		"DecodeConfig",
-		"DefaultCompression",
-		"Encode",
-		"Encoder",
-		"EncoderBuffer",
-		"EncoderBufferPool",
-		"FormatError",
-		"NoCompression",
-		"UnsupportedError",
-	},
-	"index/suffixarray": {
-		"Index",
-		"New",
-	},
-	"io": {
-		"ByteReader",
-		"ByteScanner",
-		"ByteWriter",
-		"Closer",
-		"Copy",
-		"CopyBuffer",
-		"CopyN",
-		"Discard",
-		"EOF",
-		"ErrClosedPipe",
-		"ErrNoProgress",
-		"ErrShortBuffer",
-		"ErrShortWrite",
-		"ErrUnexpectedEOF",
-		"LimitReader",
-		"LimitedReader",
-		"MultiReader",
-		"MultiWriter",
-		"NewOffsetWriter",
-		"NewSectionReader",
-		"NopCloser",
-		"OffsetWriter",
-		"Pipe",
-		"PipeReader",
-		"PipeWriter",
-		"ReadAll",
-		"ReadAtLeast",
-		"ReadCloser",
-		"ReadFull",
-		"ReadSeekCloser",
-		"ReadSeeker",
-		"ReadWriteCloser",
-		"ReadWriteSeeker",
-		"ReadWriter",
-		"Reader",
-		"ReaderAt",
-		"ReaderFrom",
-		"RuneReader",
-		"RuneScanner",
-		"SectionReader",
-		"SeekCurrent",
-		"SeekEnd",
-		"SeekStart",
-		"Seeker",
-		"StringWriter",
-		"TeeReader",
-		"WriteCloser",
-		"WriteSeeker",
-		"WriteString",
-		"Writer",
-		"WriterAt",
-		"WriterTo",
-	},
-	"io/fs": {
-		"DirEntry",
-		"ErrClosed",
-		"ErrExist",
-		"ErrInvalid",
-		"ErrNotExist",
-		"ErrPermission",
-		"FS",
-		"File",
-		"FileInfo",
-		"FileInfoToDirEntry",
-		"FileMode",
-		"FormatDirEntry",
-		"FormatFileInfo",
-		"Glob",
-		"GlobFS",
-		"ModeAppend",
-		"ModeCharDevice",
-		"ModeDevice",
-		"ModeDir",
-		"ModeExclusive",
-		"ModeIrregular",
-		"ModeNamedPipe",
-		"ModePerm",
-		"ModeSetgid",
-		"ModeSetuid",
-		"ModeSocket",
-		"ModeSticky",
-		"ModeSymlink",
-		"ModeTemporary",
-		"ModeType",
-		"PathError",
-		"ReadDir",
-		"ReadDirFS",
-		"ReadDirFile",
-		"ReadFile",
-		"ReadFileFS",
-		"SkipAll",
-		"SkipDir",
-		"Stat",
-		"StatFS",
-		"Sub",
-		"SubFS",
-		"ValidPath",
-		"WalkDir",
-		"WalkDirFunc",
-	},
-	"io/ioutil": {
-		"Discard",
-		"NopCloser",
-		"ReadAll",
-		"ReadDir",
-		"ReadFile",
-		"TempDir",
-		"TempFile",
-		"WriteFile",
-	},
-	"log": {
-		"Default",
-		"Fatal",
-		"Fatalf",
-		"Fatalln",
-		"Flags",
-		"LUTC",
-		"Ldate",
-		"Llongfile",
-		"Lmicroseconds",
-		"Lmsgprefix",
-		"Logger",
-		"Lshortfile",
-		"LstdFlags",
-		"Ltime",
-		"New",
-		"Output",
-		"Panic",
-		"Panicf",
-		"Panicln",
-		"Prefix",
-		"Print",
-		"Printf",
-		"Println",
-		"SetFlags",
-		"SetOutput",
-		"SetPrefix",
-		"Writer",
-	},
-	"log/slog": {
-		"Any",
-		"AnyValue",
-		"Attr",
-		"Bool",
-		"BoolValue",
-		"Debug",
-		"DebugContext",
-		"Default",
-		"Duration",
-		"DurationValue",
-		"Error",
-		"ErrorContext",
-		"Float64",
-		"Float64Value",
-		"Group",
-		"GroupValue",
-		"Handler",
-		"HandlerOptions",
-		"Info",
-		"InfoContext",
-		"Int",
-		"Int64",
-		"Int64Value",
-		"IntValue",
-		"JSONHandler",
-		"Kind",
-		"KindAny",
-		"KindBool",
-		"KindDuration",
-		"KindFloat64",
-		"KindGroup",
-		"KindInt64",
-		"KindLogValuer",
-		"KindString",
-		"KindTime",
-		"KindUint64",
-		"Level",
-		"LevelDebug",
-		"LevelError",
-		"LevelInfo",
-		"LevelKey",
-		"LevelVar",
-		"LevelWarn",
-		"Leveler",
-		"Log",
-		"LogAttrs",
-		"LogValuer",
-		"Logger",
-		"MessageKey",
-		"New",
-		"NewJSONHandler",
-		"NewLogLogger",
-		"NewRecord",
-		"NewTextHandler",
-		"Record",
-		"SetDefault",
-		"SetLogLoggerLevel",
-		"Source",
-		"SourceKey",
-		"String",
-		"StringValue",
-		"TextHandler",
-		"Time",
-		"TimeKey",
-		"TimeValue",
-		"Uint64",
-		"Uint64Value",
-		"Value",
-		"Warn",
-		"WarnContext",
-		"With",
-	},
-	"log/syslog": {
-		"Dial",
-		"LOG_ALERT",
-		"LOG_AUTH",
-		"LOG_AUTHPRIV",
-		"LOG_CRIT",
-		"LOG_CRON",
-		"LOG_DAEMON",
-		"LOG_DEBUG",
-		"LOG_EMERG",
-		"LOG_ERR",
-		"LOG_FTP",
-		"LOG_INFO",
-		"LOG_KERN",
-		"LOG_LOCAL0",
-		"LOG_LOCAL1",
-		"LOG_LOCAL2",
-		"LOG_LOCAL3",
-		"LOG_LOCAL4",
-		"LOG_LOCAL5",
-		"LOG_LOCAL6",
-		"LOG_LOCAL7",
-		"LOG_LPR",
-		"LOG_MAIL",
-		"LOG_NEWS",
-		"LOG_NOTICE",
-		"LOG_SYSLOG",
-		"LOG_USER",
-		"LOG_UUCP",
-		"LOG_WARNING",
-		"New",
-		"NewLogger",
-		"Priority",
-		"Writer",
-	},
-	"maps": {
-		"Clone",
-		"Copy",
-		"DeleteFunc",
-		"Equal",
-		"EqualFunc",
-	},
-	"math": {
-		"Abs",
-		"Acos",
-		"Acosh",
-		"Asin",
-		"Asinh",
-		"Atan",
-		"Atan2",
-		"Atanh",
-		"Cbrt",
-		"Ceil",
-		"Copysign",
-		"Cos",
-		"Cosh",
-		"Dim",
-		"E",
-		"Erf",
-		"Erfc",
-		"Erfcinv",
-		"Erfinv",
-		"Exp",
-		"Exp2",
-		"Expm1",
-		"FMA",
-		"Float32bits",
-		"Float32frombits",
-		"Float64bits",
-		"Float64frombits",
-		"Floor",
-		"Frexp",
-		"Gamma",
-		"Hypot",
-		"Ilogb",
-		"Inf",
-		"IsInf",
-		"IsNaN",
-		"J0",
-		"J1",
-		"Jn",
-		"Ldexp",
-		"Lgamma",
-		"Ln10",
-		"Ln2",
-		"Log",
-		"Log10",
-		"Log10E",
-		"Log1p",
-		"Log2",
-		"Log2E",
-		"Logb",
-		"Max",
-		"MaxFloat32",
-		"MaxFloat64",
-		"MaxInt",
-		"MaxInt16",
-		"MaxInt32",
-		"MaxInt64",
-		"MaxInt8",
-		"MaxUint",
-		"MaxUint16",
-		"MaxUint32",
-		"MaxUint64",
-		"MaxUint8",
-		"Min",
-		"MinInt",
-		"MinInt16",
-		"MinInt32",
-		"MinInt64",
-		"MinInt8",
-		"Mod",
-		"Modf",
-		"NaN",
-		"Nextafter",
-		"Nextafter32",
-		"Phi",
-		"Pi",
-		"Pow",
-		"Pow10",
-		"Remainder",
-		"Round",
-		"RoundToEven",
-		"Signbit",
-		"Sin",
-		"Sincos",
-		"Sinh",
-		"SmallestNonzeroFloat32",
-		"SmallestNonzeroFloat64",
-		"Sqrt",
-		"Sqrt2",
-		"SqrtE",
-		"SqrtPhi",
-		"SqrtPi",
-		"Tan",
-		"Tanh",
-		"Trunc",
-		"Y0",
-		"Y1",
-		"Yn",
-	},
-	"math/big": {
-		"Above",
-		"Accuracy",
-		"AwayFromZero",
-		"Below",
-		"ErrNaN",
-		"Exact",
-		"Float",
-		"Int",
-		"Jacobi",
-		"MaxBase",
-		"MaxExp",
-		"MaxPrec",
-		"MinExp",
-		"NewFloat",
-		"NewInt",
-		"NewRat",
-		"ParseFloat",
-		"Rat",
-		"RoundingMode",
-		"ToNearestAway",
-		"ToNearestEven",
-		"ToNegativeInf",
-		"ToPositiveInf",
-		"ToZero",
-		"Word",
-	},
-	"math/bits": {
-		"Add",
-		"Add32",
-		"Add64",
-		"Div",
-		"Div32",
-		"Div64",
-		"LeadingZeros",
-		"LeadingZeros16",
-		"LeadingZeros32",
-		"LeadingZeros64",
-		"LeadingZeros8",
-		"Len",
-		"Len16",
-		"Len32",
-		"Len64",
-		"Len8",
-		"Mul",
-		"Mul32",
-		"Mul64",
-		"OnesCount",
-		"OnesCount16",
-		"OnesCount32",
-		"OnesCount64",
-		"OnesCount8",
-		"Rem",
-		"Rem32",
-		"Rem64",
-		"Reverse",
-		"Reverse16",
-		"Reverse32",
-		"Reverse64",
-		"Reverse8",
-		"ReverseBytes",
-		"ReverseBytes16",
-		"ReverseBytes32",
-		"ReverseBytes64",
-		"RotateLeft",
-		"RotateLeft16",
-		"RotateLeft32",
-		"RotateLeft64",
-		"RotateLeft8",
-		"Sub",
-		"Sub32",
-		"Sub64",
-		"TrailingZeros",
-		"TrailingZeros16",
-		"TrailingZeros32",
-		"TrailingZeros64",
-		"TrailingZeros8",
-		"UintSize",
-	},
-	"math/cmplx": {
-		"Abs",
-		"Acos",
-		"Acosh",
-		"Asin",
-		"Asinh",
-		"Atan",
-		"Atanh",
-		"Conj",
-		"Cos",
-		"Cosh",
-		"Cot",
-		"Exp",
-		"Inf",
-		"IsInf",
-		"IsNaN",
-		"Log",
-		"Log10",
-		"NaN",
-		"Phase",
-		"Polar",
-		"Pow",
-		"Rect",
-		"Sin",
-		"Sinh",
-		"Sqrt",
-		"Tan",
-		"Tanh",
-	},
-	"math/rand": {
-		"ExpFloat64",
-		"Float32",
-		"Float64",
-		"Int",
-		"Int31",
-		"Int31n",
-		"Int63",
-		"Int63n",
-		"Intn",
-		"New",
-		"NewSource",
-		"NewZipf",
-		"NormFloat64",
-		"Perm",
-		"Rand",
-		"Read",
-		"Seed",
-		"Shuffle",
-		"Source",
-		"Source64",
-		"Uint32",
-		"Uint64",
-		"Zipf",
-	},
-	"math/rand/v2": {
-		"ChaCha8",
-		"ExpFloat64",
-		"Float32",
-		"Float64",
-		"Int",
-		"Int32",
-		"Int32N",
-		"Int64",
-		"Int64N",
-		"IntN",
-		"N",
-		"New",
-		"NewChaCha8",
-		"NewPCG",
-		"NewZipf",
-		"NormFloat64",
-		"PCG",
-		"Perm",
-		"Rand",
-		"Shuffle",
-		"Source",
-		"Uint32",
-		"Uint32N",
-		"Uint64",
-		"Uint64N",
-		"UintN",
-		"Zipf",
-	},
-	"mime": {
-		"AddExtensionType",
-		"BEncoding",
-		"ErrInvalidMediaParameter",
-		"ExtensionsByType",
-		"FormatMediaType",
-		"ParseMediaType",
-		"QEncoding",
-		"TypeByExtension",
-		"WordDecoder",
-		"WordEncoder",
-	},
-	"mime/multipart": {
-		"ErrMessageTooLarge",
-		"File",
-		"FileHeader",
-		"Form",
-		"NewReader",
-		"NewWriter",
-		"Part",
-		"Reader",
-		"Writer",
-	},
-	"mime/quotedprintable": {
-		"NewReader",
-		"NewWriter",
-		"Reader",
-		"Writer",
-	},
-	"net": {
-		"Addr",
-		"AddrError",
-		"Buffers",
-		"CIDRMask",
-		"Conn",
-		"DNSConfigError",
-		"DNSError",
-		"DefaultResolver",
-		"Dial",
-		"DialIP",
-		"DialTCP",
-		"DialTimeout",
-		"DialUDP",
-		"DialUnix",
-		"Dialer",
-		"ErrClosed",
-		"ErrWriteToConnected",
-		"Error",
-		"FileConn",
-		"FileListener",
-		"FilePacketConn",
-		"FlagBroadcast",
-		"FlagLoopback",
-		"FlagMulticast",
-		"FlagPointToPoint",
-		"FlagRunning",
-		"FlagUp",
-		"Flags",
-		"HardwareAddr",
-		"IP",
-		"IPAddr",
-		"IPConn",
-		"IPMask",
-		"IPNet",
-		"IPv4",
-		"IPv4Mask",
-		"IPv4allrouter",
-		"IPv4allsys",
-		"IPv4bcast",
-		"IPv4len",
-		"IPv4zero",
-		"IPv6interfacelocalallnodes",
-		"IPv6len",
-		"IPv6linklocalallnodes",
-		"IPv6linklocalallrouters",
-		"IPv6loopback",
-		"IPv6unspecified",
-		"IPv6zero",
-		"Interface",
-		"InterfaceAddrs",
-		"InterfaceByIndex",
-		"InterfaceByName",
-		"Interfaces",
-		"InvalidAddrError",
-		"JoinHostPort",
-		"Listen",
-		"ListenConfig",
-		"ListenIP",
-		"ListenMulticastUDP",
-		"ListenPacket",
-		"ListenTCP",
-		"ListenUDP",
-		"ListenUnix",
-		"ListenUnixgram",
-		"Listener",
-		"LookupAddr",
-		"LookupCNAME",
-		"LookupHost",
-		"LookupIP",
-		"LookupMX",
-		"LookupNS",
-		"LookupPort",
-		"LookupSRV",
-		"LookupTXT",
-		"MX",
-		"NS",
-		"OpError",
-		"PacketConn",
-		"ParseCIDR",
-		"ParseError",
-		"ParseIP",
-		"ParseMAC",
-		"Pipe",
-		"ResolveIPAddr",
-		"ResolveTCPAddr",
-		"ResolveUDPAddr",
-		"ResolveUnixAddr",
-		"Resolver",
-		"SRV",
-		"SplitHostPort",
-		"TCPAddr",
-		"TCPAddrFromAddrPort",
-		"TCPConn",
-		"TCPListener",
-		"UDPAddr",
-		"UDPAddrFromAddrPort",
-		"UDPConn",
-		"UnixAddr",
-		"UnixConn",
-		"UnixListener",
-		"UnknownNetworkError",
-	},
-	"net/http": {
-		"AllowQuerySemicolons",
-		"CanonicalHeaderKey",
-		"Client",
-		"CloseNotifier",
-		"ConnState",
-		"Cookie",
-		"CookieJar",
-		"DefaultClient",
-		"DefaultMaxHeaderBytes",
-		"DefaultMaxIdleConnsPerHost",
-		"DefaultServeMux",
-		"DefaultTransport",
-		"DetectContentType",
-		"Dir",
-		"ErrAbortHandler",
-		"ErrBodyNotAllowed",
-		"ErrBodyReadAfterClose",
-		"ErrContentLength",
-		"ErrHandlerTimeout",
-		"ErrHeaderTooLong",
-		"ErrHijacked",
-		"ErrLineTooLong",
-		"ErrMissingBoundary",
-		"ErrMissingContentLength",
-		"ErrMissingFile",
-		"ErrNoCookie",
-		"ErrNoLocation",
-		"ErrNotMultipart",
-		"ErrNotSupported",
-		"ErrSchemeMismatch",
-		"ErrServerClosed",
-		"ErrShortBody",
-		"ErrSkipAltProtocol",
-		"ErrUnexpectedTrailer",
-		"ErrUseLastResponse",
-		"ErrWriteAfterFlush",
-		"Error",
-		"FS",
-		"File",
-		"FileServer",
-		"FileServerFS",
-		"FileSystem",
-		"Flusher",
-		"Get",
-		"Handle",
-		"HandleFunc",
-		"Handler",
-		"HandlerFunc",
-		"Head",
-		"Header",
-		"Hijacker",
-		"ListenAndServe",
-		"ListenAndServeTLS",
-		"LocalAddrContextKey",
-		"MaxBytesError",
-		"MaxBytesHandler",
-		"MaxBytesReader",
-		"MethodConnect",
-		"MethodDelete",
-		"MethodGet",
-		"MethodHead",
-		"MethodOptions",
-		"MethodPatch",
-		"MethodPost",
-		"MethodPut",
-		"MethodTrace",
-		"NewFileTransport",
-		"NewFileTransportFS",
-		"NewRequest",
-		"NewRequestWithContext",
-		"NewResponseController",
-		"NewServeMux",
-		"NoBody",
-		"NotFound",
-		"NotFoundHandler",
-		"ParseHTTPVersion",
-		"ParseTime",
-		"Post",
-		"PostForm",
-		"ProtocolError",
-		"ProxyFromEnvironment",
-		"ProxyURL",
-		"PushOptions",
-		"Pusher",
-		"ReadRequest",
-		"ReadResponse",
-		"Redirect",
-		"RedirectHandler",
-		"Request",
-		"Response",
-		"ResponseController",
-		"ResponseWriter",
-		"RoundTripper",
-		"SameSite",
-		"SameSiteDefaultMode",
-		"SameSiteLaxMode",
-		"SameSiteNoneMode",
-		"SameSiteStrictMode",
-		"Serve",
-		"ServeContent",
-		"ServeFile",
-		"ServeFileFS",
-		"ServeMux",
-		"ServeTLS",
-		"Server",
-		"ServerContextKey",
-		"SetCookie",
-		"StateActive",
-		"StateClosed",
-		"StateHijacked",
-		"StateIdle",
-		"StateNew",
-		"StatusAccepted",
-		"StatusAlreadyReported",
-		"StatusBadGateway",
-		"StatusBadRequest",
-		"StatusConflict",
-		"StatusContinue",
-		"StatusCreated",
-		"StatusEarlyHints",
-		"StatusExpectationFailed",
-		"StatusFailedDependency",
-		"StatusForbidden",
-		"StatusFound",
-		"StatusGatewayTimeout",
-		"StatusGone",
-		"StatusHTTPVersionNotSupported",
-		"StatusIMUsed",
-		"StatusInsufficientStorage",
-		"StatusInternalServerError",
-		"StatusLengthRequired",
-		"StatusLocked",
-		"StatusLoopDetected",
-		"StatusMethodNotAllowed",
-		"StatusMisdirectedRequest",
-		"StatusMovedPermanently",
-		"StatusMultiStatus",
-		"StatusMultipleChoices",
-		"StatusNetworkAuthenticationRequired",
-		"StatusNoContent",
-		"StatusNonAuthoritativeInfo",
-		"StatusNotAcceptable",
-		"StatusNotExtended",
-		"StatusNotFound",
-		"StatusNotImplemented",
-		"StatusNotModified",
-		"StatusOK",
-		"StatusPartialContent",
-		"StatusPaymentRequired",
-		"StatusPermanentRedirect",
-		"StatusPreconditionFailed",
-		"StatusPreconditionRequired",
-		"StatusProcessing",
-		"StatusProxyAuthRequired",
-		"StatusRequestEntityTooLarge",
-		"StatusRequestHeaderFieldsTooLarge",
-		"StatusRequestTimeout",
-		"StatusRequestURITooLong",
-		"StatusRequestedRangeNotSatisfiable",
-		"StatusResetContent",
-		"StatusSeeOther",
-		"StatusServiceUnavailable",
-		"StatusSwitchingProtocols",
-		"StatusTeapot",
-		"StatusTemporaryRedirect",
-		"StatusText",
-		"StatusTooEarly",
-		"StatusTooManyRequests",
-		"StatusUnauthorized",
-		"StatusUnavailableForLegalReasons",
-		"StatusUnprocessableEntity",
-		"StatusUnsupportedMediaType",
-		"StatusUpgradeRequired",
-		"StatusUseProxy",
-		"StatusVariantAlsoNegotiates",
-		"StripPrefix",
-		"TimeFormat",
-		"TimeoutHandler",
-		"TrailerPrefix",
-		"Transport",
-	},
-	"net/http/cgi": {
-		"Handler",
-		"Request",
-		"RequestFromMap",
-		"Serve",
-	},
-	"net/http/cookiejar": {
-		"Jar",
-		"New",
-		"Options",
-		"PublicSuffixList",
-	},
-	"net/http/fcgi": {
-		"ErrConnClosed",
-		"ErrRequestAborted",
-		"ProcessEnv",
-		"Serve",
-	},
-	"net/http/httptest": {
-		"DefaultRemoteAddr",
-		"NewRecorder",
-		"NewRequest",
-		"NewServer",
-		"NewTLSServer",
-		"NewUnstartedServer",
-		"ResponseRecorder",
-		"Server",
-	},
-	"net/http/httptrace": {
-		"ClientTrace",
-		"ContextClientTrace",
-		"DNSDoneInfo",
-		"DNSStartInfo",
-		"GotConnInfo",
-		"WithClientTrace",
-		"WroteRequestInfo",
-	},
-	"net/http/httputil": {
-		"BufferPool",
-		"ClientConn",
-		"DumpRequest",
-		"DumpRequestOut",
-		"DumpResponse",
-		"ErrClosed",
-		"ErrLineTooLong",
-		"ErrPersistEOF",
-		"ErrPipeline",
-		"NewChunkedReader",
-		"NewChunkedWriter",
-		"NewClientConn",
-		"NewProxyClientConn",
-		"NewServerConn",
-		"NewSingleHostReverseProxy",
-		"ProxyRequest",
-		"ReverseProxy",
-		"ServerConn",
-	},
-	"net/http/pprof": {
-		"Cmdline",
-		"Handler",
-		"Index",
-		"Profile",
-		"Symbol",
-		"Trace",
-	},
-	"net/mail": {
-		"Address",
-		"AddressParser",
-		"ErrHeaderNotPresent",
-		"Header",
-		"Message",
-		"ParseAddress",
-		"ParseAddressList",
-		"ParseDate",
-		"ReadMessage",
-	},
-	"net/netip": {
-		"Addr",
-		"AddrFrom16",
-		"AddrFrom4",
-		"AddrFromSlice",
-		"AddrPort",
-		"AddrPortFrom",
-		"IPv4Unspecified",
-		"IPv6LinkLocalAllNodes",
-		"IPv6LinkLocalAllRouters",
-		"IPv6Loopback",
-		"IPv6Unspecified",
-		"MustParseAddr",
-		"MustParseAddrPort",
-		"MustParsePrefix",
-		"ParseAddr",
-		"ParseAddrPort",
-		"ParsePrefix",
-		"Prefix",
-		"PrefixFrom",
-	},
-	"net/rpc": {
-		"Accept",
-		"Call",
-		"Client",
-		"ClientCodec",
-		"DefaultDebugPath",
-		"DefaultRPCPath",
-		"DefaultServer",
-		"Dial",
-		"DialHTTP",
-		"DialHTTPPath",
-		"ErrShutdown",
-		"HandleHTTP",
-		"NewClient",
-		"NewClientWithCodec",
-		"NewServer",
-		"Register",
-		"RegisterName",
-		"Request",
-		"Response",
-		"ServeCodec",
-		"ServeConn",
-		"ServeRequest",
-		"Server",
-		"ServerCodec",
-		"ServerError",
-	},
-	"net/rpc/jsonrpc": {
-		"Dial",
-		"NewClient",
-		"NewClientCodec",
-		"NewServerCodec",
-		"ServeConn",
-	},
-	"net/smtp": {
-		"Auth",
-		"CRAMMD5Auth",
-		"Client",
-		"Dial",
-		"NewClient",
-		"PlainAuth",
-		"SendMail",
-		"ServerInfo",
-	},
-	"net/textproto": {
-		"CanonicalMIMEHeaderKey",
-		"Conn",
-		"Dial",
-		"Error",
-		"MIMEHeader",
-		"NewConn",
-		"NewReader",
-		"NewWriter",
-		"Pipeline",
-		"ProtocolError",
-		"Reader",
-		"TrimBytes",
-		"TrimString",
-		"Writer",
-	},
-	"net/url": {
-		"Error",
-		"EscapeError",
-		"InvalidHostError",
-		"JoinPath",
-		"Parse",
-		"ParseQuery",
-		"ParseRequestURI",
-		"PathEscape",
-		"PathUnescape",
-		"QueryEscape",
-		"QueryUnescape",
-		"URL",
-		"User",
-		"UserPassword",
-		"Userinfo",
-		"Values",
-	},
-	"os": {
-		"Args",
-		"Chdir",
-		"Chmod",
-		"Chown",
-		"Chtimes",
-		"Clearenv",
-		"Create",
-		"CreateTemp",
-		"DevNull",
-		"DirEntry",
-		"DirFS",
-		"Environ",
-		"ErrClosed",
-		"ErrDeadlineExceeded",
-		"ErrExist",
-		"ErrInvalid",
-		"ErrNoDeadline",
-		"ErrNotExist",
-		"ErrPermission",
-		"ErrProcessDone",
-		"Executable",
-		"Exit",
-		"Expand",
-		"ExpandEnv",
-		"File",
-		"FileInfo",
-		"FileMode",
-		"FindProcess",
-		"Getegid",
-		"Getenv",
-		"Geteuid",
-		"Getgid",
-		"Getgroups",
-		"Getpagesize",
-		"Getpid",
-		"Getppid",
-		"Getuid",
-		"Getwd",
-		"Hostname",
-		"Interrupt",
-		"IsExist",
-		"IsNotExist",
-		"IsPathSeparator",
-		"IsPermission",
-		"IsTimeout",
-		"Kill",
-		"Lchown",
-		"Link",
-		"LinkError",
-		"LookupEnv",
-		"Lstat",
-		"Mkdir",
-		"MkdirAll",
-		"MkdirTemp",
-		"ModeAppend",
-		"ModeCharDevice",
-		"ModeDevice",
-		"ModeDir",
-		"ModeExclusive",
-		"ModeIrregular",
-		"ModeNamedPipe",
-		"ModePerm",
-		"ModeSetgid",
-		"ModeSetuid",
-		"ModeSocket",
-		"ModeSticky",
-		"ModeSymlink",
-		"ModeTemporary",
-		"ModeType",
-		"NewFile",
-		"NewSyscallError",
-		"O_APPEND",
-		"O_CREATE",
-		"O_EXCL",
-		"O_RDONLY",
-		"O_RDWR",
-		"O_SYNC",
-		"O_TRUNC",
-		"O_WRONLY",
-		"Open",
-		"OpenFile",
-		"PathError",
-		"PathListSeparator",
-		"PathSeparator",
-		"Pipe",
-		"ProcAttr",
-		"Process",
-		"ProcessState",
-		"ReadDir",
-		"ReadFile",
-		"Readlink",
-		"Remove",
-		"RemoveAll",
-		"Rename",
-		"SEEK_CUR",
-		"SEEK_END",
-		"SEEK_SET",
-		"SameFile",
-		"Setenv",
-		"Signal",
-		"StartProcess",
-		"Stat",
-		"Stderr",
-		"Stdin",
-		"Stdout",
-		"Symlink",
-		"SyscallError",
-		"TempDir",
-		"Truncate",
-		"Unsetenv",
-		"UserCacheDir",
-		"UserConfigDir",
-		"UserHomeDir",
-		"WriteFile",
-	},
-	"os/exec": {
-		"Cmd",
-		"Command",
-		"CommandContext",
-		"ErrDot",
-		"ErrNotFound",
-		"ErrWaitDelay",
-		"Error",
-		"ExitError",
-		"LookPath",
-	},
-	"os/signal": {
-		"Ignore",
-		"Ignored",
-		"Notify",
-		"NotifyContext",
-		"Reset",
-		"Stop",
-	},
-	"os/user": {
-		"Current",
-		"Group",
-		"Lookup",
-		"LookupGroup",
-		"LookupGroupId",
-		"LookupId",
-		"UnknownGroupError",
-		"UnknownGroupIdError",
-		"UnknownUserError",
-		"UnknownUserIdError",
-		"User",
-	},
-	"path": {
-		"Base",
-		"Clean",
-		"Dir",
-		"ErrBadPattern",
-		"Ext",
-		"IsAbs",
-		"Join",
-		"Match",
-		"Split",
-	},
-	"path/filepath": {
-		"Abs",
-		"Base",
-		"Clean",
-		"Dir",
-		"ErrBadPattern",
-		"EvalSymlinks",
-		"Ext",
-		"FromSlash",
-		"Glob",
-		"HasPrefix",
-		"IsAbs",
-		"IsLocal",
-		"Join",
-		"ListSeparator",
-		"Match",
-		"Rel",
-		"Separator",
-		"SkipAll",
-		"SkipDir",
-		"Split",
-		"SplitList",
-		"ToSlash",
-		"VolumeName",
-		"Walk",
-		"WalkDir",
-		"WalkFunc",
-	},
-	"plugin": {
-		"Open",
-		"Plugin",
-		"Symbol",
-	},
-	"reflect": {
-		"Append",
-		"AppendSlice",
-		"Array",
-		"ArrayOf",
-		"Bool",
-		"BothDir",
-		"Chan",
-		"ChanDir",
-		"ChanOf",
-		"Complex128",
-		"Complex64",
-		"Copy",
-		"DeepEqual",
-		"Float32",
-		"Float64",
-		"Func",
-		"FuncOf",
-		"Indirect",
-		"Int",
-		"Int16",
-		"Int32",
-		"Int64",
-		"Int8",
-		"Interface",
-		"Invalid",
-		"Kind",
-		"MakeChan",
-		"MakeFunc",
-		"MakeMap",
-		"MakeMapWithSize",
-		"MakeSlice",
-		"Map",
-		"MapIter",
-		"MapOf",
-		"Method",
-		"New",
-		"NewAt",
-		"Pointer",
-		"PointerTo",
-		"Ptr",
-		"PtrTo",
-		"RecvDir",
-		"Select",
-		"SelectCase",
-		"SelectDefault",
-		"SelectDir",
-		"SelectRecv",
-		"SelectSend",
-		"SendDir",
-		"Slice",
-		"SliceHeader",
-		"SliceOf",
-		"String",
-		"StringHeader",
-		"Struct",
-		"StructField",
-		"StructOf",
-		"StructTag",
-		"Swapper",
-		"Type",
-		"TypeFor",
-		"TypeOf",
-		"Uint",
-		"Uint16",
-		"Uint32",
-		"Uint64",
-		"Uint8",
-		"Uintptr",
-		"UnsafePointer",
-		"Value",
-		"ValueError",
-		"ValueOf",
-		"VisibleFields",
-		"Zero",
-	},
-	"regexp": {
-		"Compile",
-		"CompilePOSIX",
-		"Match",
-		"MatchReader",
-		"MatchString",
-		"MustCompile",
-		"MustCompilePOSIX",
-		"QuoteMeta",
-		"Regexp",
-	},
-	"regexp/syntax": {
-		"ClassNL",
-		"Compile",
-		"DotNL",
-		"EmptyBeginLine",
-		"EmptyBeginText",
-		"EmptyEndLine",
-		"EmptyEndText",
-		"EmptyNoWordBoundary",
-		"EmptyOp",
-		"EmptyOpContext",
-		"EmptyWordBoundary",
-		"ErrInternalError",
-		"ErrInvalidCharClass",
-		"ErrInvalidCharRange",
-		"ErrInvalidEscape",
-		"ErrInvalidNamedCapture",
-		"ErrInvalidPerlOp",
-		"ErrInvalidRepeatOp",
-		"ErrInvalidRepeatSize",
-		"ErrInvalidUTF8",
-		"ErrLarge",
-		"ErrMissingBracket",
-		"ErrMissingParen",
-		"ErrMissingRepeatArgument",
-		"ErrNestingDepth",
-		"ErrTrailingBackslash",
-		"ErrUnexpectedParen",
-		"Error",
-		"ErrorCode",
-		"Flags",
-		"FoldCase",
-		"Inst",
-		"InstAlt",
-		"InstAltMatch",
-		"InstCapture",
-		"InstEmptyWidth",
-		"InstFail",
-		"InstMatch",
-		"InstNop",
-		"InstOp",
-		"InstRune",
-		"InstRune1",
-		"InstRuneAny",
-		"InstRuneAnyNotNL",
-		"IsWordChar",
-		"Literal",
-		"MatchNL",
-		"NonGreedy",
-		"OneLine",
-		"Op",
-		"OpAlternate",
-		"OpAnyChar",
-		"OpAnyCharNotNL",
-		"OpBeginLine",
-		"OpBeginText",
-		"OpCapture",
-		"OpCharClass",
-		"OpConcat",
-		"OpEmptyMatch",
-		"OpEndLine",
-		"OpEndText",
-		"OpLiteral",
-		"OpNoMatch",
-		"OpNoWordBoundary",
-		"OpPlus",
-		"OpQuest",
-		"OpRepeat",
-		"OpStar",
-		"OpWordBoundary",
-		"POSIX",
-		"Parse",
-		"Perl",
-		"PerlX",
-		"Prog",
-		"Regexp",
-		"Simple",
-		"UnicodeGroups",
-		"WasDollar",
-	},
-	"runtime": {
-		"BlockProfile",
-		"BlockProfileRecord",
-		"Breakpoint",
-		"CPUProfile",
-		"Caller",
-		"Callers",
-		"CallersFrames",
-		"Compiler",
-		"Error",
-		"Frame",
-		"Frames",
-		"Func",
-		"FuncForPC",
-		"GC",
-		"GOARCH",
-		"GOMAXPROCS",
-		"GOOS",
-		"GOROOT",
-		"Goexit",
-		"GoroutineProfile",
-		"Gosched",
-		"KeepAlive",
-		"LockOSThread",
-		"MemProfile",
-		"MemProfileRate",
-		"MemProfileRecord",
-		"MemStats",
-		"MutexProfile",
-		"NumCPU",
-		"NumCgoCall",
-		"NumGoroutine",
-		"PanicNilError",
-		"Pinner",
-		"ReadMemStats",
-		"ReadTrace",
-		"SetBlockProfileRate",
-		"SetCPUProfileRate",
-		"SetCgoTraceback",
-		"SetFinalizer",
-		"SetMutexProfileFraction",
-		"Stack",
-		"StackRecord",
-		"StartTrace",
-		"StopTrace",
-		"ThreadCreateProfile",
-		"TypeAssertionError",
-		"UnlockOSThread",
-		"Version",
-	},
-	"runtime/cgo": {
-		"Handle",
-		"Incomplete",
-		"NewHandle",
-	},
-	"runtime/coverage": {
-		"ClearCounters",
-		"WriteCounters",
-		"WriteCountersDir",
-		"WriteMeta",
-		"WriteMetaDir",
-	},
-	"runtime/debug": {
-		"BuildInfo",
-		"BuildSetting",
-		"FreeOSMemory",
-		"GCStats",
-		"Module",
-		"ParseBuildInfo",
-		"PrintStack",
-		"ReadBuildInfo",
-		"ReadGCStats",
-		"SetGCPercent",
-		"SetMaxStack",
-		"SetMaxThreads",
-		"SetMemoryLimit",
-		"SetPanicOnFault",
-		"SetTraceback",
-		"Stack",
-		"WriteHeapDump",
-	},
-	"runtime/metrics": {
-		"All",
-		"Description",
-		"Float64Histogram",
-		"KindBad",
-		"KindFloat64",
-		"KindFloat64Histogram",
-		"KindUint64",
-		"Read",
-		"Sample",
-		"Value",
-		"ValueKind",
-	},
-	"runtime/pprof": {
-		"Do",
-		"ForLabels",
-		"Label",
-		"LabelSet",
-		"Labels",
-		"Lookup",
-		"NewProfile",
-		"Profile",
-		"Profiles",
-		"SetGoroutineLabels",
-		"StartCPUProfile",
-		"StopCPUProfile",
-		"WithLabels",
-		"WriteHeapProfile",
-	},
-	"runtime/trace": {
-		"IsEnabled",
-		"Log",
-		"Logf",
-		"NewTask",
-		"Region",
-		"Start",
-		"StartRegion",
-		"Stop",
-		"Task",
-		"WithRegion",
-	},
-	"slices": {
-		"BinarySearch",
-		"BinarySearchFunc",
-		"Clip",
-		"Clone",
-		"Compact",
-		"CompactFunc",
-		"Compare",
-		"CompareFunc",
-		"Concat",
-		"Contains",
-		"ContainsFunc",
-		"Delete",
-		"DeleteFunc",
-		"Equal",
-		"EqualFunc",
-		"Grow",
-		"Index",
-		"IndexFunc",
-		"Insert",
-		"IsSorted",
-		"IsSortedFunc",
-		"Max",
-		"MaxFunc",
-		"Min",
-		"MinFunc",
-		"Replace",
-		"Reverse",
-		"Sort",
-		"SortFunc",
-		"SortStableFunc",
-	},
-	"sort": {
-		"Find",
-		"Float64Slice",
-		"Float64s",
-		"Float64sAreSorted",
-		"IntSlice",
-		"Interface",
-		"Ints",
-		"IntsAreSorted",
-		"IsSorted",
-		"Reverse",
-		"Search",
-		"SearchFloat64s",
-		"SearchInts",
-		"SearchStrings",
-		"Slice",
-		"SliceIsSorted",
-		"SliceStable",
-		"Sort",
-		"Stable",
-		"StringSlice",
-		"Strings",
-		"StringsAreSorted",
-	},
-	"strconv": {
-		"AppendBool",
-		"AppendFloat",
-		"AppendInt",
-		"AppendQuote",
-		"AppendQuoteRune",
-		"AppendQuoteRuneToASCII",
-		"AppendQuoteRuneToGraphic",
-		"AppendQuoteToASCII",
-		"AppendQuoteToGraphic",
-		"AppendUint",
-		"Atoi",
-		"CanBackquote",
-		"ErrRange",
-		"ErrSyntax",
-		"FormatBool",
-		"FormatComplex",
-		"FormatFloat",
-		"FormatInt",
-		"FormatUint",
-		"IntSize",
-		"IsGraphic",
-		"IsPrint",
-		"Itoa",
-		"NumError",
-		"ParseBool",
-		"ParseComplex",
-		"ParseFloat",
-		"ParseInt",
-		"ParseUint",
-		"Quote",
-		"QuoteRune",
-		"QuoteRuneToASCII",
-		"QuoteRuneToGraphic",
-		"QuoteToASCII",
-		"QuoteToGraphic",
-		"QuotedPrefix",
-		"Unquote",
-		"UnquoteChar",
-	},
-	"strings": {
-		"Builder",
-		"Clone",
-		"Compare",
-		"Contains",
-		"ContainsAny",
-		"ContainsFunc",
-		"ContainsRune",
-		"Count",
-		"Cut",
-		"CutPrefix",
-		"CutSuffix",
-		"EqualFold",
-		"Fields",
-		"FieldsFunc",
-		"HasPrefix",
-		"HasSuffix",
-		"Index",
-		"IndexAny",
-		"IndexByte",
-		"IndexFunc",
-		"IndexRune",
-		"Join",
-		"LastIndex",
-		"LastIndexAny",
-		"LastIndexByte",
-		"LastIndexFunc",
-		"Map",
-		"NewReader",
-		"NewReplacer",
-		"Reader",
-		"Repeat",
-		"Replace",
-		"ReplaceAll",
-		"Replacer",
-		"Split",
-		"SplitAfter",
-		"SplitAfterN",
-		"SplitN",
-		"Title",
-		"ToLower",
-		"ToLowerSpecial",
-		"ToTitle",
-		"ToTitleSpecial",
-		"ToUpper",
-		"ToUpperSpecial",
-		"ToValidUTF8",
-		"Trim",
-		"TrimFunc",
-		"TrimLeft",
-		"TrimLeftFunc",
-		"TrimPrefix",
-		"TrimRight",
-		"TrimRightFunc",
-		"TrimSpace",
-		"TrimSuffix",
-	},
-	"sync": {
-		"Cond",
-		"Locker",
-		"Map",
-		"Mutex",
-		"NewCond",
-		"Once",
-		"OnceFunc",
-		"OnceValue",
-		"OnceValues",
-		"Pool",
-		"RWMutex",
-		"WaitGroup",
-	},
-	"sync/atomic": {
-		"AddInt32",
-		"AddInt64",
-		"AddUint32",
-		"AddUint64",
-		"AddUintptr",
-		"Bool",
-		"CompareAndSwapInt32",
-		"CompareAndSwapInt64",
-		"CompareAndSwapPointer",
-		"CompareAndSwapUint32",
-		"CompareAndSwapUint64",
-		"CompareAndSwapUintptr",
-		"Int32",
-		"Int64",
-		"LoadInt32",
-		"LoadInt64",
-		"LoadPointer",
-		"LoadUint32",
-		"LoadUint64",
-		"LoadUintptr",
-		"Pointer",
-		"StoreInt32",
-		"StoreInt64",
-		"StorePointer",
-		"StoreUint32",
-		"StoreUint64",
-		"StoreUintptr",
-		"SwapInt32",
-		"SwapInt64",
-		"SwapPointer",
-		"SwapUint32",
-		"SwapUint64",
-		"SwapUintptr",
-		"Uint32",
-		"Uint64",
-		"Uintptr",
-		"Value",
-	},
-	"syscall": {
-		"AF_ALG",
-		"AF_APPLETALK",
-		"AF_ARP",
-		"AF_ASH",
-		"AF_ATM",
-		"AF_ATMPVC",
-		"AF_ATMSVC",
-		"AF_AX25",
-		"AF_BLUETOOTH",
-		"AF_BRIDGE",
-		"AF_CAIF",
-		"AF_CAN",
-		"AF_CCITT",
-		"AF_CHAOS",
-		"AF_CNT",
-		"AF_COIP",
-		"AF_DATAKIT",
-		"AF_DECnet",
-		"AF_DLI",
-		"AF_E164",
-		"AF_ECMA",
-		"AF_ECONET",
-		"AF_ENCAP",
-		"AF_FILE",
-		"AF_HYLINK",
-		"AF_IEEE80211",
-		"AF_IEEE802154",
-		"AF_IMPLINK",
-		"AF_INET",
-		"AF_INET6",
-		"AF_INET6_SDP",
-		"AF_INET_SDP",
-		"AF_IPX",
-		"AF_IRDA",
-		"AF_ISDN",
-		"AF_ISO",
-		"AF_IUCV",
-		"AF_KEY",
-		"AF_LAT",
-		"AF_LINK",
-		"AF_LLC",
-		"AF_LOCAL",
-		"AF_MAX",
-		"AF_MPLS",
-		"AF_NATM",
-		"AF_NDRV",
-		"AF_NETBEUI",
-		"AF_NETBIOS",
-		"AF_NETGRAPH",
-		"AF_NETLINK",
-		"AF_NETROM",
-		"AF_NS",
-		"AF_OROUTE",
-		"AF_OSI",
-		"AF_PACKET",
-		"AF_PHONET",
-		"AF_PPP",
-		"AF_PPPOX",
-		"AF_PUP",
-		"AF_RDS",
-		"AF_RESERVED_36",
-		"AF_ROSE",
-		"AF_ROUTE",
-		"AF_RXRPC",
-		"AF_SCLUSTER",
-		"AF_SECURITY",
-		"AF_SIP",
-		"AF_SLOW",
-		"AF_SNA",
-		"AF_SYSTEM",
-		"AF_TIPC",
-		"AF_UNIX",
-		"AF_UNSPEC",
-		"AF_UTUN",
-		"AF_VENDOR00",
-		"AF_VENDOR01",
-		"AF_VENDOR02",
-		"AF_VENDOR03",
-		"AF_VENDOR04",
-		"AF_VENDOR05",
-		"AF_VENDOR06",
-		"AF_VENDOR07",
-		"AF_VENDOR08",
-		"AF_VENDOR09",
-		"AF_VENDOR10",
-		"AF_VENDOR11",
-		"AF_VENDOR12",
-		"AF_VENDOR13",
-		"AF_VENDOR14",
-		"AF_VENDOR15",
-		"AF_VENDOR16",
-		"AF_VENDOR17",
-		"AF_VENDOR18",
-		"AF_VENDOR19",
-		"AF_VENDOR20",
-		"AF_VENDOR21",
-		"AF_VENDOR22",
-		"AF_VENDOR23",
-		"AF_VENDOR24",
-		"AF_VENDOR25",
-		"AF_VENDOR26",
-		"AF_VENDOR27",
-		"AF_VENDOR28",
-		"AF_VENDOR29",
-		"AF_VENDOR30",
-		"AF_VENDOR31",
-		"AF_VENDOR32",
-		"AF_VENDOR33",
-		"AF_VENDOR34",
-		"AF_VENDOR35",
-		"AF_VENDOR36",
-		"AF_VENDOR37",
-		"AF_VENDOR38",
-		"AF_VENDOR39",
-		"AF_VENDOR40",
-		"AF_VENDOR41",
-		"AF_VENDOR42",
-		"AF_VENDOR43",
-		"AF_VENDOR44",
-		"AF_VENDOR45",
-		"AF_VENDOR46",
-		"AF_VENDOR47",
-		"AF_WANPIPE",
-		"AF_X25",
-		"AI_CANONNAME",
-		"AI_NUMERICHOST",
-		"AI_PASSIVE",
-		"APPLICATION_ERROR",
-		"ARPHRD_ADAPT",
-		"ARPHRD_APPLETLK",
-		"ARPHRD_ARCNET",
-		"ARPHRD_ASH",
-		"ARPHRD_ATM",
-		"ARPHRD_AX25",
-		"ARPHRD_BIF",
-		"ARPHRD_CHAOS",
-		"ARPHRD_CISCO",
-		"ARPHRD_CSLIP",
-		"ARPHRD_CSLIP6",
-		"ARPHRD_DDCMP",
-		"ARPHRD_DLCI",
-		"ARPHRD_ECONET",
-		"ARPHRD_EETHER",
-		"ARPHRD_ETHER",
-		"ARPHRD_EUI64",
-		"ARPHRD_FCAL",
-		"ARPHRD_FCFABRIC",
-		"ARPHRD_FCPL",
-		"ARPHRD_FCPP",
-		"ARPHRD_FDDI",
-		"ARPHRD_FRAD",
-		"ARPHRD_FRELAY",
-		"ARPHRD_HDLC",
-		"ARPHRD_HIPPI",
-		"ARPHRD_HWX25",
-		"ARPHRD_IEEE1394",
-		"ARPHRD_IEEE802",
-		"ARPHRD_IEEE80211",
-		"ARPHRD_IEEE80211_PRISM",
-		"ARPHRD_IEEE80211_RADIOTAP",
-		"ARPHRD_IEEE802154",
-		"ARPHRD_IEEE802154_PHY",
-		"ARPHRD_IEEE802_TR",
-		"ARPHRD_INFINIBAND",
-		"ARPHRD_IPDDP",
-		"ARPHRD_IPGRE",
-		"ARPHRD_IRDA",
-		"ARPHRD_LAPB",
-		"ARPHRD_LOCALTLK",
-		"ARPHRD_LOOPBACK",
-		"ARPHRD_METRICOM",
-		"ARPHRD_NETROM",
-		"ARPHRD_NONE",
-		"ARPHRD_PIMREG",
-		"ARPHRD_PPP",
-		"ARPHRD_PRONET",
-		"ARPHRD_RAWHDLC",
-		"ARPHRD_ROSE",
-		"ARPHRD_RSRVD",
-		"ARPHRD_SIT",
-		"ARPHRD_SKIP",
-		"ARPHRD_SLIP",
-		"ARPHRD_SLIP6",
-		"ARPHRD_STRIP",
-		"ARPHRD_TUNNEL",
-		"ARPHRD_TUNNEL6",
-		"ARPHRD_VOID",
-		"ARPHRD_X25",
-		"AUTHTYPE_CLIENT",
-		"AUTHTYPE_SERVER",
-		"Accept",
-		"Accept4",
-		"AcceptEx",
-		"Access",
-		"Acct",
-		"AddrinfoW",
-		"Adjtime",
-		"Adjtimex",
-		"AllThreadsSyscall",
-		"AllThreadsSyscall6",
-		"AttachLsf",
-		"B0",
-		"B1000000",
-		"B110",
-		"B115200",
-		"B1152000",
-		"B1200",
-		"B134",
-		"B14400",
-		"B150",
-		"B1500000",
-		"B1800",
-		"B19200",
-		"B200",
-		"B2000000",
-		"B230400",
-		"B2400",
-		"B2500000",
-		"B28800",
-		"B300",
-		"B3000000",
-		"B3500000",
-		"B38400",
-		"B4000000",
-		"B460800",
-		"B4800",
-		"B50",
-		"B500000",
-		"B57600",
-		"B576000",
-		"B600",
-		"B7200",
-		"B75",
-		"B76800",
-		"B921600",
-		"B9600",
-		"BASE_PROTOCOL",
-		"BIOCFEEDBACK",
-		"BIOCFLUSH",
-		"BIOCGBLEN",
-		"BIOCGDIRECTION",
-		"BIOCGDIRFILT",
-		"BIOCGDLT",
-		"BIOCGDLTLIST",
-		"BIOCGETBUFMODE",
-		"BIOCGETIF",
-		"BIOCGETZMAX",
-		"BIOCGFEEDBACK",
-		"BIOCGFILDROP",
-		"BIOCGHDRCMPLT",
-		"BIOCGRSIG",
-		"BIOCGRTIMEOUT",
-		"BIOCGSEESENT",
-		"BIOCGSTATS",
-		"BIOCGSTATSOLD",
-		"BIOCGTSTAMP",
-		"BIOCIMMEDIATE",
-		"BIOCLOCK",
-		"BIOCPROMISC",
-		"BIOCROTZBUF",
-		"BIOCSBLEN",
-		"BIOCSDIRECTION",
-		"BIOCSDIRFILT",
-		"BIOCSDLT",
-		"BIOCSETBUFMODE",
-		"BIOCSETF",
-		"BIOCSETFNR",
-		"BIOCSETIF",
-		"BIOCSETWF",
-		"BIOCSETZBUF",
-		"BIOCSFEEDBACK",
-		"BIOCSFILDROP",
-		"BIOCSHDRCMPLT",
-		"BIOCSRSIG",
-		"BIOCSRTIMEOUT",
-		"BIOCSSEESENT",
-		"BIOCSTCPF",
-		"BIOCSTSTAMP",
-		"BIOCSUDPF",
-		"BIOCVERSION",
-		"BPF_A",
-		"BPF_ABS",
-		"BPF_ADD",
-		"BPF_ALIGNMENT",
-		"BPF_ALIGNMENT32",
-		"BPF_ALU",
-		"BPF_AND",
-		"BPF_B",
-		"BPF_BUFMODE_BUFFER",
-		"BPF_BUFMODE_ZBUF",
-		"BPF_DFLTBUFSIZE",
-		"BPF_DIRECTION_IN",
-		"BPF_DIRECTION_OUT",
-		"BPF_DIV",
-		"BPF_H",
-		"BPF_IMM",
-		"BPF_IND",
-		"BPF_JA",
-		"BPF_JEQ",
-		"BPF_JGE",
-		"BPF_JGT",
-		"BPF_JMP",
-		"BPF_JSET",
-		"BPF_K",
-		"BPF_LD",
-		"BPF_LDX",
-		"BPF_LEN",
-		"BPF_LSH",
-		"BPF_MAJOR_VERSION",
-		"BPF_MAXBUFSIZE",
-		"BPF_MAXINSNS",
-		"BPF_MEM",
-		"BPF_MEMWORDS",
-		"BPF_MINBUFSIZE",
-		"BPF_MINOR_VERSION",
-		"BPF_MISC",
-		"BPF_MSH",
-		"BPF_MUL",
-		"BPF_NEG",
-		"BPF_OR",
-		"BPF_RELEASE",
-		"BPF_RET",
-		"BPF_RSH",
-		"BPF_ST",
-		"BPF_STX",
-		"BPF_SUB",
-		"BPF_TAX",
-		"BPF_TXA",
-		"BPF_T_BINTIME",
-		"BPF_T_BINTIME_FAST",
-		"BPF_T_BINTIME_MONOTONIC",
-		"BPF_T_BINTIME_MONOTONIC_FAST",
-		"BPF_T_FAST",
-		"BPF_T_FLAG_MASK",
-		"BPF_T_FORMAT_MASK",
-		"BPF_T_MICROTIME",
-		"BPF_T_MICROTIME_FAST",
-		"BPF_T_MICROTIME_MONOTONIC",
-		"BPF_T_MICROTIME_MONOTONIC_FAST",
-		"BPF_T_MONOTONIC",
-		"BPF_T_MONOTONIC_FAST",
-		"BPF_T_NANOTIME",
-		"BPF_T_NANOTIME_FAST",
-		"BPF_T_NANOTIME_MONOTONIC",
-		"BPF_T_NANOTIME_MONOTONIC_FAST",
-		"BPF_T_NONE",
-		"BPF_T_NORMAL",
-		"BPF_W",
-		"BPF_X",
-		"BRKINT",
-		"Bind",
-		"BindToDevice",
-		"BpfBuflen",
-		"BpfDatalink",
-		"BpfHdr",
-		"BpfHeadercmpl",
-		"BpfInsn",
-		"BpfInterface",
-		"BpfJump",
-		"BpfProgram",
-		"BpfStat",
-		"BpfStats",
-		"BpfStmt",
-		"BpfTimeout",
-		"BpfTimeval",
-		"BpfVersion",
-		"BpfZbuf",
-		"BpfZbufHeader",
-		"ByHandleFileInformation",
-		"BytePtrFromString",
-		"ByteSliceFromString",
-		"CCR0_FLUSH",
-		"CERT_CHAIN_POLICY_AUTHENTICODE",
-		"CERT_CHAIN_POLICY_AUTHENTICODE_TS",
-		"CERT_CHAIN_POLICY_BASE",
-		"CERT_CHAIN_POLICY_BASIC_CONSTRAINTS",
-		"CERT_CHAIN_POLICY_EV",
-		"CERT_CHAIN_POLICY_MICROSOFT_ROOT",
-		"CERT_CHAIN_POLICY_NT_AUTH",
-		"CERT_CHAIN_POLICY_SSL",
-		"CERT_E_CN_NO_MATCH",
-		"CERT_E_EXPIRED",
-		"CERT_E_PURPOSE",
-		"CERT_E_ROLE",
-		"CERT_E_UNTRUSTEDROOT",
-		"CERT_STORE_ADD_ALWAYS",
-		"CERT_STORE_DEFER_CLOSE_UNTIL_LAST_FREE_FLAG",
-		"CERT_STORE_PROV_MEMORY",
-		"CERT_TRUST_HAS_EXCLUDED_NAME_CONSTRAINT",
-		"CERT_TRUST_HAS_NOT_DEFINED_NAME_CONSTRAINT",
-		"CERT_TRUST_HAS_NOT_PERMITTED_NAME_CONSTRAINT",
-		"CERT_TRUST_HAS_NOT_SUPPORTED_CRITICAL_EXT",
-		"CERT_TRUST_HAS_NOT_SUPPORTED_NAME_CONSTRAINT",
-		"CERT_TRUST_INVALID_BASIC_CONSTRAINTS",
-		"CERT_TRUST_INVALID_EXTENSION",
-		"CERT_TRUST_INVALID_NAME_CONSTRAINTS",
-		"CERT_TRUST_INVALID_POLICY_CONSTRAINTS",
-		"CERT_TRUST_IS_CYCLIC",
-		"CERT_TRUST_IS_EXPLICIT_DISTRUST",
-		"CERT_TRUST_IS_NOT_SIGNATURE_VALID",
-		"CERT_TRUST_IS_NOT_TIME_VALID",
-		"CERT_TRUST_IS_NOT_VALID_FOR_USAGE",
-		"CERT_TRUST_IS_OFFLINE_REVOCATION",
-		"CERT_TRUST_IS_REVOKED",
-		"CERT_TRUST_IS_UNTRUSTED_ROOT",
-		"CERT_TRUST_NO_ERROR",
-		"CERT_TRUST_NO_ISSUANCE_CHAIN_POLICY",
-		"CERT_TRUST_REVOCATION_STATUS_UNKNOWN",
-		"CFLUSH",
-		"CLOCAL",
-		"CLONE_CHILD_CLEARTID",
-		"CLONE_CHILD_SETTID",
-		"CLONE_CLEAR_SIGHAND",
-		"CLONE_CSIGNAL",
-		"CLONE_DETACHED",
-		"CLONE_FILES",
-		"CLONE_FS",
-		"CLONE_INTO_CGROUP",
-		"CLONE_IO",
-		"CLONE_NEWCGROUP",
-		"CLONE_NEWIPC",
-		"CLONE_NEWNET",
-		"CLONE_NEWNS",
-		"CLONE_NEWPID",
-		"CLONE_NEWTIME",
-		"CLONE_NEWUSER",
-		"CLONE_NEWUTS",
-		"CLONE_PARENT",
-		"CLONE_PARENT_SETTID",
-		"CLONE_PID",
-		"CLONE_PIDFD",
-		"CLONE_PTRACE",
-		"CLONE_SETTLS",
-		"CLONE_SIGHAND",
-		"CLONE_SYSVSEM",
-		"CLONE_THREAD",
-		"CLONE_UNTRACED",
-		"CLONE_VFORK",
-		"CLONE_VM",
-		"CPUID_CFLUSH",
-		"CREAD",
-		"CREATE_ALWAYS",
-		"CREATE_NEW",
-		"CREATE_NEW_PROCESS_GROUP",
-		"CREATE_UNICODE_ENVIRONMENT",
-		"CRYPT_DEFAULT_CONTAINER_OPTIONAL",
-		"CRYPT_DELETEKEYSET",
-		"CRYPT_MACHINE_KEYSET",
-		"CRYPT_NEWKEYSET",
-		"CRYPT_SILENT",
-		"CRYPT_VERIFYCONTEXT",
-		"CS5",
-		"CS6",
-		"CS7",
-		"CS8",
-		"CSIZE",
-		"CSTART",
-		"CSTATUS",
-		"CSTOP",
-		"CSTOPB",
-		"CSUSP",
-		"CTL_MAXNAME",
-		"CTL_NET",
-		"CTL_QUERY",
-		"CTRL_BREAK_EVENT",
-		"CTRL_CLOSE_EVENT",
-		"CTRL_C_EVENT",
-		"CTRL_LOGOFF_EVENT",
-		"CTRL_SHUTDOWN_EVENT",
-		"CancelIo",
-		"CancelIoEx",
-		"CertAddCertificateContextToStore",
-		"CertChainContext",
-		"CertChainElement",
-		"CertChainPara",
-		"CertChainPolicyPara",
-		"CertChainPolicyStatus",
-		"CertCloseStore",
-		"CertContext",
-		"CertCreateCertificateContext",
-		"CertEnhKeyUsage",
-		"CertEnumCertificatesInStore",
-		"CertFreeCertificateChain",
-		"CertFreeCertificateContext",
-		"CertGetCertificateChain",
-		"CertInfo",
-		"CertOpenStore",
-		"CertOpenSystemStore",
-		"CertRevocationCrlInfo",
-		"CertRevocationInfo",
-		"CertSimpleChain",
-		"CertTrustListInfo",
-		"CertTrustStatus",
-		"CertUsageMatch",
-		"CertVerifyCertificateChainPolicy",
-		"Chdir",
-		"CheckBpfVersion",
-		"Chflags",
-		"Chmod",
-		"Chown",
-		"Chroot",
-		"Clearenv",
-		"Close",
-		"CloseHandle",
-		"CloseOnExec",
-		"Closesocket",
-		"CmsgLen",
-		"CmsgSpace",
-		"Cmsghdr",
-		"CommandLineToArgv",
-		"ComputerName",
-		"Conn",
-		"Connect",
-		"ConnectEx",
-		"ConvertSidToStringSid",
-		"ConvertStringSidToSid",
-		"CopySid",
-		"Creat",
-		"CreateDirectory",
-		"CreateFile",
-		"CreateFileMapping",
-		"CreateHardLink",
-		"CreateIoCompletionPort",
-		"CreatePipe",
-		"CreateProcess",
-		"CreateProcessAsUser",
-		"CreateSymbolicLink",
-		"CreateToolhelp32Snapshot",
-		"Credential",
-		"CryptAcquireContext",
-		"CryptGenRandom",
-		"CryptReleaseContext",
-		"DIOCBSFLUSH",
-		"DIOCOSFPFLUSH",
-		"DLL",
-		"DLLError",
-		"DLT_A429",
-		"DLT_A653_ICM",
-		"DLT_AIRONET_HEADER",
-		"DLT_AOS",
-		"DLT_APPLE_IP_OVER_IEEE1394",
-		"DLT_ARCNET",
-		"DLT_ARCNET_LINUX",
-		"DLT_ATM_CLIP",
-		"DLT_ATM_RFC1483",
-		"DLT_AURORA",
-		"DLT_AX25",
-		"DLT_AX25_KISS",
-		"DLT_BACNET_MS_TP",
-		"DLT_BLUETOOTH_HCI_H4",
-		"DLT_BLUETOOTH_HCI_H4_WITH_PHDR",
-		"DLT_CAN20B",
-		"DLT_CAN_SOCKETCAN",
-		"DLT_CHAOS",
-		"DLT_CHDLC",
-		"DLT_CISCO_IOS",
-		"DLT_C_HDLC",
-		"DLT_C_HDLC_WITH_DIR",
-		"DLT_DBUS",
-		"DLT_DECT",
-		"DLT_DOCSIS",
-		"DLT_DVB_CI",
-		"DLT_ECONET",
-		"DLT_EN10MB",
-		"DLT_EN3MB",
-		"DLT_ENC",
-		"DLT_ERF",
-		"DLT_ERF_ETH",
-		"DLT_ERF_POS",
-		"DLT_FC_2",
-		"DLT_FC_2_WITH_FRAME_DELIMS",
-		"DLT_FDDI",
-		"DLT_FLEXRAY",
-		"DLT_FRELAY",
-		"DLT_FRELAY_WITH_DIR",
-		"DLT_GCOM_SERIAL",
-		"DLT_GCOM_T1E1",
-		"DLT_GPF_F",
-		"DLT_GPF_T",
-		"DLT_GPRS_LLC",
-		"DLT_GSMTAP_ABIS",
-		"DLT_GSMTAP_UM",
-		"DLT_HDLC",
-		"DLT_HHDLC",
-		"DLT_HIPPI",
-		"DLT_IBM_SN",
-		"DLT_IBM_SP",
-		"DLT_IEEE802",
-		"DLT_IEEE802_11",
-		"DLT_IEEE802_11_RADIO",
-		"DLT_IEEE802_11_RADIO_AVS",
-		"DLT_IEEE802_15_4",
-		"DLT_IEEE802_15_4_LINUX",
-		"DLT_IEEE802_15_4_NOFCS",
-		"DLT_IEEE802_15_4_NONASK_PHY",
-		"DLT_IEEE802_16_MAC_CPS",
-		"DLT_IEEE802_16_MAC_CPS_RADIO",
-		"DLT_IPFILTER",
-		"DLT_IPMB",
-		"DLT_IPMB_LINUX",
-		"DLT_IPNET",
-		"DLT_IPOIB",
-		"DLT_IPV4",
-		"DLT_IPV6",
-		"DLT_IP_OVER_FC",
-		"DLT_JUNIPER_ATM1",
-		"DLT_JUNIPER_ATM2",
-		"DLT_JUNIPER_ATM_CEMIC",
-		"DLT_JUNIPER_CHDLC",
-		"DLT_JUNIPER_ES",
-		"DLT_JUNIPER_ETHER",
-		"DLT_JUNIPER_FIBRECHANNEL",
-		"DLT_JUNIPER_FRELAY",
-		"DLT_JUNIPER_GGSN",
-		"DLT_JUNIPER_ISM",
-		"DLT_JUNIPER_MFR",
-		"DLT_JUNIPER_MLFR",
-		"DLT_JUNIPER_MLPPP",
-		"DLT_JUNIPER_MONITOR",
-		"DLT_JUNIPER_PIC_PEER",
-		"DLT_JUNIPER_PPP",
-		"DLT_JUNIPER_PPPOE",
-		"DLT_JUNIPER_PPPOE_ATM",
-		"DLT_JUNIPER_SERVICES",
-		"DLT_JUNIPER_SRX_E2E",
-		"DLT_JUNIPER_ST",
-		"DLT_JUNIPER_VP",
-		"DLT_JUNIPER_VS",
-		"DLT_LAPB_WITH_DIR",
-		"DLT_LAPD",
-		"DLT_LIN",
-		"DLT_LINUX_EVDEV",
-		"DLT_LINUX_IRDA",
-		"DLT_LINUX_LAPD",
-		"DLT_LINUX_PPP_WITHDIRECTION",
-		"DLT_LINUX_SLL",
-		"DLT_LOOP",
-		"DLT_LTALK",
-		"DLT_MATCHING_MAX",
-		"DLT_MATCHING_MIN",
-		"DLT_MFR",
-		"DLT_MOST",
-		"DLT_MPEG_2_TS",
-		"DLT_MPLS",
-		"DLT_MTP2",
-		"DLT_MTP2_WITH_PHDR",
-		"DLT_MTP3",
-		"DLT_MUX27010",
-		"DLT_NETANALYZER",
-		"DLT_NETANALYZER_TRANSPARENT",
-		"DLT_NFC_LLCP",
-		"DLT_NFLOG",
-		"DLT_NG40",
-		"DLT_NULL",
-		"DLT_PCI_EXP",
-		"DLT_PFLOG",
-		"DLT_PFSYNC",
-		"DLT_PPI",
-		"DLT_PPP",
-		"DLT_PPP_BSDOS",
-		"DLT_PPP_ETHER",
-		"DLT_PPP_PPPD",
-		"DLT_PPP_SERIAL",
-		"DLT_PPP_WITH_DIR",
-		"DLT_PPP_WITH_DIRECTION",
-		"DLT_PRISM_HEADER",
-		"DLT_PRONET",
-		"DLT_RAIF1",
-		"DLT_RAW",
-		"DLT_RAWAF_MASK",
-		"DLT_RIO",
-		"DLT_SCCP",
-		"DLT_SITA",
-		"DLT_SLIP",
-		"DLT_SLIP_BSDOS",
-		"DLT_STANAG_5066_D_PDU",
-		"DLT_SUNATM",
-		"DLT_SYMANTEC_FIREWALL",
-		"DLT_TZSP",
-		"DLT_USB",
-		"DLT_USB_LINUX",
-		"DLT_USB_LINUX_MMAPPED",
-		"DLT_USER0",
-		"DLT_USER1",
-		"DLT_USER10",
-		"DLT_USER11",
-		"DLT_USER12",
-		"DLT_USER13",
-		"DLT_USER14",
-		"DLT_USER15",
-		"DLT_USER2",
-		"DLT_USER3",
-		"DLT_USER4",
-		"DLT_USER5",
-		"DLT_USER6",
-		"DLT_USER7",
-		"DLT_USER8",
-		"DLT_USER9",
-		"DLT_WIHART",
-		"DLT_X2E_SERIAL",
-		"DLT_X2E_XORAYA",
-		"DNSMXData",
-		"DNSPTRData",
-		"DNSRecord",
-		"DNSSRVData",
-		"DNSTXTData",
-		"DNS_INFO_NO_RECORDS",
-		"DNS_TYPE_A",
-		"DNS_TYPE_A6",
-		"DNS_TYPE_AAAA",
-		"DNS_TYPE_ADDRS",
-		"DNS_TYPE_AFSDB",
-		"DNS_TYPE_ALL",
-		"DNS_TYPE_ANY",
-		"DNS_TYPE_ATMA",
-		"DNS_TYPE_AXFR",
-		"DNS_TYPE_CERT",
-		"DNS_TYPE_CNAME",
-		"DNS_TYPE_DHCID",
-		"DNS_TYPE_DNAME",
-		"DNS_TYPE_DNSKEY",
-		"DNS_TYPE_DS",
-		"DNS_TYPE_EID",
-		"DNS_TYPE_GID",
-		"DNS_TYPE_GPOS",
-		"DNS_TYPE_HINFO",
-		"DNS_TYPE_ISDN",
-		"DNS_TYPE_IXFR",
-		"DNS_TYPE_KEY",
-		"DNS_TYPE_KX",
-		"DNS_TYPE_LOC",
-		"DNS_TYPE_MAILA",
-		"DNS_TYPE_MAILB",
-		"DNS_TYPE_MB",
-		"DNS_TYPE_MD",
-		"DNS_TYPE_MF",
-		"DNS_TYPE_MG",
-		"DNS_TYPE_MINFO",
-		"DNS_TYPE_MR",
-		"DNS_TYPE_MX",
-		"DNS_TYPE_NAPTR",
-		"DNS_TYPE_NBSTAT",
-		"DNS_TYPE_NIMLOC",
-		"DNS_TYPE_NS",
-		"DNS_TYPE_NSAP",
-		"DNS_TYPE_NSAPPTR",
-		"DNS_TYPE_NSEC",
-		"DNS_TYPE_NULL",
-		"DNS_TYPE_NXT",
-		"DNS_TYPE_OPT",
-		"DNS_TYPE_PTR",
-		"DNS_TYPE_PX",
-		"DNS_TYPE_RP",
-		"DNS_TYPE_RRSIG",
-		"DNS_TYPE_RT",
-		"DNS_TYPE_SIG",
-		"DNS_TYPE_SINK",
-		"DNS_TYPE_SOA",
-		"DNS_TYPE_SRV",
-		"DNS_TYPE_TEXT",
-		"DNS_TYPE_TKEY",
-		"DNS_TYPE_TSIG",
-		"DNS_TYPE_UID",
-		"DNS_TYPE_UINFO",
-		"DNS_TYPE_UNSPEC",
-		"DNS_TYPE_WINS",
-		"DNS_TYPE_WINSR",
-		"DNS_TYPE_WKS",
-		"DNS_TYPE_X25",
-		"DT_BLK",
-		"DT_CHR",
-		"DT_DIR",
-		"DT_FIFO",
-		"DT_LNK",
-		"DT_REG",
-		"DT_SOCK",
-		"DT_UNKNOWN",
-		"DT_WHT",
-		"DUPLICATE_CLOSE_SOURCE",
-		"DUPLICATE_SAME_ACCESS",
-		"DeleteFile",
-		"DetachLsf",
-		"DeviceIoControl",
-		"Dirent",
-		"DnsNameCompare",
-		"DnsQuery",
-		"DnsRecordListFree",
-		"DnsSectionAdditional",
-		"DnsSectionAnswer",
-		"DnsSectionAuthority",
-		"DnsSectionQuestion",
-		"Dup",
-		"Dup2",
-		"Dup3",
-		"DuplicateHandle",
-		"E2BIG",
-		"EACCES",
-		"EADDRINUSE",
-		"EADDRNOTAVAIL",
-		"EADV",
-		"EAFNOSUPPORT",
-		"EAGAIN",
-		"EALREADY",
-		"EAUTH",
-		"EBADARCH",
-		"EBADE",
-		"EBADEXEC",
-		"EBADF",
-		"EBADFD",
-		"EBADMACHO",
-		"EBADMSG",
-		"EBADR",
-		"EBADRPC",
-		"EBADRQC",
-		"EBADSLT",
-		"EBFONT",
-		"EBUSY",
-		"ECANCELED",
-		"ECAPMODE",
-		"ECHILD",
-		"ECHO",
-		"ECHOCTL",
-		"ECHOE",
-		"ECHOK",
-		"ECHOKE",
-		"ECHONL",
-		"ECHOPRT",
-		"ECHRNG",
-		"ECOMM",
-		"ECONNABORTED",
-		"ECONNREFUSED",
-		"ECONNRESET",
-		"EDEADLK",
-		"EDEADLOCK",
-		"EDESTADDRREQ",
-		"EDEVERR",
-		"EDOM",
-		"EDOOFUS",
-		"EDOTDOT",
-		"EDQUOT",
-		"EEXIST",
-		"EFAULT",
-		"EFBIG",
-		"EFER_LMA",
-		"EFER_LME",
-		"EFER_NXE",
-		"EFER_SCE",
-		"EFTYPE",
-		"EHOSTDOWN",
-		"EHOSTUNREACH",
-		"EHWPOISON",
-		"EIDRM",
-		"EILSEQ",
-		"EINPROGRESS",
-		"EINTR",
-		"EINVAL",
-		"EIO",
-		"EIPSEC",
-		"EISCONN",
-		"EISDIR",
-		"EISNAM",
-		"EKEYEXPIRED",
-		"EKEYREJECTED",
-		"EKEYREVOKED",
-		"EL2HLT",
-		"EL2NSYNC",
-		"EL3HLT",
-		"EL3RST",
-		"ELAST",
-		"ELF_NGREG",
-		"ELF_PRARGSZ",
-		"ELIBACC",
-		"ELIBBAD",
-		"ELIBEXEC",
-		"ELIBMAX",
-		"ELIBSCN",
-		"ELNRNG",
-		"ELOOP",
-		"EMEDIUMTYPE",
-		"EMFILE",
-		"EMLINK",
-		"EMSGSIZE",
-		"EMT_TAGOVF",
-		"EMULTIHOP",
-		"EMUL_ENABLED",
-		"EMUL_LINUX",
-		"EMUL_LINUX32",
-		"EMUL_MAXID",
-		"EMUL_NATIVE",
-		"ENAMETOOLONG",
-		"ENAVAIL",
-		"ENDRUNDISC",
-		"ENEEDAUTH",
-		"ENETDOWN",
-		"ENETRESET",
-		"ENETUNREACH",
-		"ENFILE",
-		"ENOANO",
-		"ENOATTR",
-		"ENOBUFS",
-		"ENOCSI",
-		"ENODATA",
-		"ENODEV",
-		"ENOENT",
-		"ENOEXEC",
-		"ENOKEY",
-		"ENOLCK",
-		"ENOLINK",
-		"ENOMEDIUM",
-		"ENOMEM",
-		"ENOMSG",
-		"ENONET",
-		"ENOPKG",
-		"ENOPOLICY",
-		"ENOPROTOOPT",
-		"ENOSPC",
-		"ENOSR",
-		"ENOSTR",
-		"ENOSYS",
-		"ENOTBLK",
-		"ENOTCAPABLE",
-		"ENOTCONN",
-		"ENOTDIR",
-		"ENOTEMPTY",
-		"ENOTNAM",
-		"ENOTRECOVERABLE",
-		"ENOTSOCK",
-		"ENOTSUP",
-		"ENOTTY",
-		"ENOTUNIQ",
-		"ENXIO",
-		"EN_SW_CTL_INF",
-		"EN_SW_CTL_PREC",
-		"EN_SW_CTL_ROUND",
-		"EN_SW_DATACHAIN",
-		"EN_SW_DENORM",
-		"EN_SW_INVOP",
-		"EN_SW_OVERFLOW",
-		"EN_SW_PRECLOSS",
-		"EN_SW_UNDERFLOW",
-		"EN_SW_ZERODIV",
-		"EOPNOTSUPP",
-		"EOVERFLOW",
-		"EOWNERDEAD",
-		"EPERM",
-		"EPFNOSUPPORT",
-		"EPIPE",
-		"EPOLLERR",
-		"EPOLLET",
-		"EPOLLHUP",
-		"EPOLLIN",
-		"EPOLLMSG",
-		"EPOLLONESHOT",
-		"EPOLLOUT",
-		"EPOLLPRI",
-		"EPOLLRDBAND",
-		"EPOLLRDHUP",
-		"EPOLLRDNORM",
-		"EPOLLWRBAND",
-		"EPOLLWRNORM",
-		"EPOLL_CLOEXEC",
-		"EPOLL_CTL_ADD",
-		"EPOLL_CTL_DEL",
-		"EPOLL_CTL_MOD",
-		"EPOLL_NONBLOCK",
-		"EPROCLIM",
-		"EPROCUNAVAIL",
-		"EPROGMISMATCH",
-		"EPROGUNAVAIL",
-		"EPROTO",
-		"EPROTONOSUPPORT",
-		"EPROTOTYPE",
-		"EPWROFF",
-		"EQFULL",
-		"ERANGE",
-		"EREMCHG",
-		"EREMOTE",
-		"EREMOTEIO",
-		"ERESTART",
-		"ERFKILL",
-		"EROFS",
-		"ERPCMISMATCH",
-		"ERROR_ACCESS_DENIED",
-		"ERROR_ALREADY_EXISTS",
-		"ERROR_BROKEN_PIPE",
-		"ERROR_BUFFER_OVERFLOW",
-		"ERROR_DIR_NOT_EMPTY",
-		"ERROR_ENVVAR_NOT_FOUND",
-		"ERROR_FILE_EXISTS",
-		"ERROR_FILE_NOT_FOUND",
-		"ERROR_HANDLE_EOF",
-		"ERROR_INSUFFICIENT_BUFFER",
-		"ERROR_IO_PENDING",
-		"ERROR_MOD_NOT_FOUND",
-		"ERROR_MORE_DATA",
-		"ERROR_NETNAME_DELETED",
-		"ERROR_NOT_FOUND",
-		"ERROR_NO_MORE_FILES",
-		"ERROR_OPERATION_ABORTED",
-		"ERROR_PATH_NOT_FOUND",
-		"ERROR_PRIVILEGE_NOT_HELD",
-		"ERROR_PROC_NOT_FOUND",
-		"ESHLIBVERS",
-		"ESHUTDOWN",
-		"ESOCKTNOSUPPORT",
-		"ESPIPE",
-		"ESRCH",
-		"ESRMNT",
-		"ESTALE",
-		"ESTRPIPE",
-		"ETHERCAP_JUMBO_MTU",
-		"ETHERCAP_VLAN_HWTAGGING",
-		"ETHERCAP_VLAN_MTU",
-		"ETHERMIN",
-		"ETHERMTU",
-		"ETHERMTU_JUMBO",
-		"ETHERTYPE_8023",
-		"ETHERTYPE_AARP",
-		"ETHERTYPE_ACCTON",
-		"ETHERTYPE_AEONIC",
-		"ETHERTYPE_ALPHA",
-		"ETHERTYPE_AMBER",
-		"ETHERTYPE_AMOEBA",
-		"ETHERTYPE_AOE",
-		"ETHERTYPE_APOLLO",
-		"ETHERTYPE_APOLLODOMAIN",
-		"ETHERTYPE_APPLETALK",
-		"ETHERTYPE_APPLITEK",
-		"ETHERTYPE_ARGONAUT",
-		"ETHERTYPE_ARP",
-		"ETHERTYPE_AT",
-		"ETHERTYPE_ATALK",
-		"ETHERTYPE_ATOMIC",
-		"ETHERTYPE_ATT",
-		"ETHERTYPE_ATTSTANFORD",
-		"ETHERTYPE_AUTOPHON",
-		"ETHERTYPE_AXIS",
-		"ETHERTYPE_BCLOOP",
-		"ETHERTYPE_BOFL",
-		"ETHERTYPE_CABLETRON",
-		"ETHERTYPE_CHAOS",
-		"ETHERTYPE_COMDESIGN",
-		"ETHERTYPE_COMPUGRAPHIC",
-		"ETHERTYPE_COUNTERPOINT",
-		"ETHERTYPE_CRONUS",
-		"ETHERTYPE_CRONUSVLN",
-		"ETHERTYPE_DCA",
-		"ETHERTYPE_DDE",
-		"ETHERTYPE_DEBNI",
-		"ETHERTYPE_DECAM",
-		"ETHERTYPE_DECCUST",
-		"ETHERTYPE_DECDIAG",
-		"ETHERTYPE_DECDNS",
-		"ETHERTYPE_DECDTS",
-		"ETHERTYPE_DECEXPER",
-		"ETHERTYPE_DECLAST",
-		"ETHERTYPE_DECLTM",
-		"ETHERTYPE_DECMUMPS",
-		"ETHERTYPE_DECNETBIOS",
-		"ETHERTYPE_DELTACON",
-		"ETHERTYPE_DIDDLE",
-		"ETHERTYPE_DLOG1",
-		"ETHERTYPE_DLOG2",
-		"ETHERTYPE_DN",
-		"ETHERTYPE_DOGFIGHT",
-		"ETHERTYPE_DSMD",
-		"ETHERTYPE_ECMA",
-		"ETHERTYPE_ENCRYPT",
-		"ETHERTYPE_ES",
-		"ETHERTYPE_EXCELAN",
-		"ETHERTYPE_EXPERDATA",
-		"ETHERTYPE_FLIP",
-		"ETHERTYPE_FLOWCONTROL",
-		"ETHERTYPE_FRARP",
-		"ETHERTYPE_GENDYN",
-		"ETHERTYPE_HAYES",
-		"ETHERTYPE_HIPPI_FP",
-		"ETHERTYPE_HITACHI",
-		"ETHERTYPE_HP",
-		"ETHERTYPE_IEEEPUP",
-		"ETHERTYPE_IEEEPUPAT",
-		"ETHERTYPE_IMLBL",
-		"ETHERTYPE_IMLBLDIAG",
-		"ETHERTYPE_IP",
-		"ETHERTYPE_IPAS",
-		"ETHERTYPE_IPV6",
-		"ETHERTYPE_IPX",
-		"ETHERTYPE_IPXNEW",
-		"ETHERTYPE_KALPANA",
-		"ETHERTYPE_LANBRIDGE",
-		"ETHERTYPE_LANPROBE",
-		"ETHERTYPE_LAT",
-		"ETHERTYPE_LBACK",
-		"ETHERTYPE_LITTLE",
-		"ETHERTYPE_LLDP",
-		"ETHERTYPE_LOGICRAFT",
-		"ETHERTYPE_LOOPBACK",
-		"ETHERTYPE_MATRA",
-		"ETHERTYPE_MAX",
-		"ETHERTYPE_MERIT",
-		"ETHERTYPE_MICP",
-		"ETHERTYPE_MOPDL",
-		"ETHERTYPE_MOPRC",
-		"ETHERTYPE_MOTOROLA",
-		"ETHERTYPE_MPLS",
-		"ETHERTYPE_MPLS_MCAST",
-		"ETHERTYPE_MUMPS",
-		"ETHERTYPE_NBPCC",
-		"ETHERTYPE_NBPCLAIM",
-		"ETHERTYPE_NBPCLREQ",
-		"ETHERTYPE_NBPCLRSP",
-		"ETHERTYPE_NBPCREQ",
-		"ETHERTYPE_NBPCRSP",
-		"ETHERTYPE_NBPDG",
-		"ETHERTYPE_NBPDGB",
-		"ETHERTYPE_NBPDLTE",
-		"ETHERTYPE_NBPRAR",
-		"ETHERTYPE_NBPRAS",
-		"ETHERTYPE_NBPRST",
-		"ETHERTYPE_NBPSCD",
-		"ETHERTYPE_NBPVCD",
-		"ETHERTYPE_NBS",
-		"ETHERTYPE_NCD",
-		"ETHERTYPE_NESTAR",
-		"ETHERTYPE_NETBEUI",
-		"ETHERTYPE_NOVELL",
-		"ETHERTYPE_NS",
-		"ETHERTYPE_NSAT",
-		"ETHERTYPE_NSCOMPAT",
-		"ETHERTYPE_NTRAILER",
-		"ETHERTYPE_OS9",
-		"ETHERTYPE_OS9NET",
-		"ETHERTYPE_PACER",
-		"ETHERTYPE_PAE",
-		"ETHERTYPE_PCS",
-		"ETHERTYPE_PLANNING",
-		"ETHERTYPE_PPP",
-		"ETHERTYPE_PPPOE",
-		"ETHERTYPE_PPPOEDISC",
-		"ETHERTYPE_PRIMENTS",
-		"ETHERTYPE_PUP",
-		"ETHERTYPE_PUPAT",
-		"ETHERTYPE_QINQ",
-		"ETHERTYPE_RACAL",
-		"ETHERTYPE_RATIONAL",
-		"ETHERTYPE_RAWFR",
-		"ETHERTYPE_RCL",
-		"ETHERTYPE_RDP",
-		"ETHERTYPE_RETIX",
-		"ETHERTYPE_REVARP",
-		"ETHERTYPE_SCA",
-		"ETHERTYPE_SECTRA",
-		"ETHERTYPE_SECUREDATA",
-		"ETHERTYPE_SGITW",
-		"ETHERTYPE_SG_BOUNCE",
-		"ETHERTYPE_SG_DIAG",
-		"ETHERTYPE_SG_NETGAMES",
-		"ETHERTYPE_SG_RESV",
-		"ETHERTYPE_SIMNET",
-		"ETHERTYPE_SLOW",
-		"ETHERTYPE_SLOWPROTOCOLS",
-		"ETHERTYPE_SNA",
-		"ETHERTYPE_SNMP",
-		"ETHERTYPE_SONIX",
-		"ETHERTYPE_SPIDER",
-		"ETHERTYPE_SPRITE",
-		"ETHERTYPE_STP",
-		"ETHERTYPE_TALARIS",
-		"ETHERTYPE_TALARISMC",
-		"ETHERTYPE_TCPCOMP",
-		"ETHERTYPE_TCPSM",
-		"ETHERTYPE_TEC",
-		"ETHERTYPE_TIGAN",
-		"ETHERTYPE_TRAIL",
-		"ETHERTYPE_TRANSETHER",
-		"ETHERTYPE_TYMSHARE",
-		"ETHERTYPE_UBBST",
-		"ETHERTYPE_UBDEBUG",
-		"ETHERTYPE_UBDIAGLOOP",
-		"ETHERTYPE_UBDL",
-		"ETHERTYPE_UBNIU",
-		"ETHERTYPE_UBNMC",
-		"ETHERTYPE_VALID",
-		"ETHERTYPE_VARIAN",
-		"ETHERTYPE_VAXELN",
-		"ETHERTYPE_VEECO",
-		"ETHERTYPE_VEXP",
-		"ETHERTYPE_VGLAB",
-		"ETHERTYPE_VINES",
-		"ETHERTYPE_VINESECHO",
-		"ETHERTYPE_VINESLOOP",
-		"ETHERTYPE_VITAL",
-		"ETHERTYPE_VLAN",
-		"ETHERTYPE_VLTLMAN",
-		"ETHERTYPE_VPROD",
-		"ETHERTYPE_VURESERVED",
-		"ETHERTYPE_WATERLOO",
-		"ETHERTYPE_WELLFLEET",
-		"ETHERTYPE_X25",
-		"ETHERTYPE_X75",
-		"ETHERTYPE_XNSSM",
-		"ETHERTYPE_XTP",
-		"ETHER_ADDR_LEN",
-		"ETHER_ALIGN",
-		"ETHER_CRC_LEN",
-		"ETHER_CRC_POLY_BE",
-		"ETHER_CRC_POLY_LE",
-		"ETHER_HDR_LEN",
-		"ETHER_MAX_DIX_LEN",
-		"ETHER_MAX_LEN",
-		"ETHER_MAX_LEN_JUMBO",
-		"ETHER_MIN_LEN",
-		"ETHER_PPPOE_ENCAP_LEN",
-		"ETHER_TYPE_LEN",
-		"ETHER_VLAN_ENCAP_LEN",
-		"ETH_P_1588",
-		"ETH_P_8021Q",
-		"ETH_P_802_2",
-		"ETH_P_802_3",
-		"ETH_P_AARP",
-		"ETH_P_ALL",
-		"ETH_P_AOE",
-		"ETH_P_ARCNET",
-		"ETH_P_ARP",
-		"ETH_P_ATALK",
-		"ETH_P_ATMFATE",
-		"ETH_P_ATMMPOA",
-		"ETH_P_AX25",
-		"ETH_P_BPQ",
-		"ETH_P_CAIF",
-		"ETH_P_CAN",
-		"ETH_P_CONTROL",
-		"ETH_P_CUST",
-		"ETH_P_DDCMP",
-		"ETH_P_DEC",
-		"ETH_P_DIAG",
-		"ETH_P_DNA_DL",
-		"ETH_P_DNA_RC",
-		"ETH_P_DNA_RT",
-		"ETH_P_DSA",
-		"ETH_P_ECONET",
-		"ETH_P_EDSA",
-		"ETH_P_FCOE",
-		"ETH_P_FIP",
-		"ETH_P_HDLC",
-		"ETH_P_IEEE802154",
-		"ETH_P_IEEEPUP",
-		"ETH_P_IEEEPUPAT",
-		"ETH_P_IP",
-		"ETH_P_IPV6",
-		"ETH_P_IPX",
-		"ETH_P_IRDA",
-		"ETH_P_LAT",
-		"ETH_P_LINK_CTL",
-		"ETH_P_LOCALTALK",
-		"ETH_P_LOOP",
-		"ETH_P_MOBITEX",
-		"ETH_P_MPLS_MC",
-		"ETH_P_MPLS_UC",
-		"ETH_P_PAE",
-		"ETH_P_PAUSE",
-		"ETH_P_PHONET",
-		"ETH_P_PPPTALK",
-		"ETH_P_PPP_DISC",
-		"ETH_P_PPP_MP",
-		"ETH_P_PPP_SES",
-		"ETH_P_PUP",
-		"ETH_P_PUPAT",
-		"ETH_P_RARP",
-		"ETH_P_SCA",
-		"ETH_P_SLOW",
-		"ETH_P_SNAP",
-		"ETH_P_TEB",
-		"ETH_P_TIPC",
-		"ETH_P_TRAILER",
-		"ETH_P_TR_802_2",
-		"ETH_P_WAN_PPP",
-		"ETH_P_WCCP",
-		"ETH_P_X25",
-		"ETIME",
-		"ETIMEDOUT",
-		"ETOOMANYREFS",
-		"ETXTBSY",
-		"EUCLEAN",
-		"EUNATCH",
-		"EUSERS",
-		"EVFILT_AIO",
-		"EVFILT_FS",
-		"EVFILT_LIO",
-		"EVFILT_MACHPORT",
-		"EVFILT_PROC",
-		"EVFILT_READ",
-		"EVFILT_SIGNAL",
-		"EVFILT_SYSCOUNT",
-		"EVFILT_THREADMARKER",
-		"EVFILT_TIMER",
-		"EVFILT_USER",
-		"EVFILT_VM",
-		"EVFILT_VNODE",
-		"EVFILT_WRITE",
-		"EV_ADD",
-		"EV_CLEAR",
-		"EV_DELETE",
-		"EV_DISABLE",
-		"EV_DISPATCH",
-		"EV_DROP",
-		"EV_ENABLE",
-		"EV_EOF",
-		"EV_ERROR",
-		"EV_FLAG0",
-		"EV_FLAG1",
-		"EV_ONESHOT",
-		"EV_OOBAND",
-		"EV_POLL",
-		"EV_RECEIPT",
-		"EV_SYSFLAGS",
-		"EWINDOWS",
-		"EWOULDBLOCK",
-		"EXDEV",
-		"EXFULL",
-		"EXTA",
-		"EXTB",
-		"EXTPROC",
-		"Environ",
-		"EpollCreate",
-		"EpollCreate1",
-		"EpollCtl",
-		"EpollEvent",
-		"EpollWait",
-		"Errno",
-		"EscapeArg",
-		"Exchangedata",
-		"Exec",
-		"Exit",
-		"ExitProcess",
-		"FD_CLOEXEC",
-		"FD_SETSIZE",
-		"FILE_ACTION_ADDED",
-		"FILE_ACTION_MODIFIED",
-		"FILE_ACTION_REMOVED",
-		"FILE_ACTION_RENAMED_NEW_NAME",
-		"FILE_ACTION_RENAMED_OLD_NAME",
-		"FILE_APPEND_DATA",
-		"FILE_ATTRIBUTE_ARCHIVE",
-		"FILE_ATTRIBUTE_DIRECTORY",
-		"FILE_ATTRIBUTE_HIDDEN",
-		"FILE_ATTRIBUTE_NORMAL",
-		"FILE_ATTRIBUTE_READONLY",
-		"FILE_ATTRIBUTE_REPARSE_POINT",
-		"FILE_ATTRIBUTE_SYSTEM",
-		"FILE_BEGIN",
-		"FILE_CURRENT",
-		"FILE_END",
-		"FILE_FLAG_BACKUP_SEMANTICS",
-		"FILE_FLAG_OPEN_REPARSE_POINT",
-		"FILE_FLAG_OVERLAPPED",
-		"FILE_LIST_DIRECTORY",
-		"FILE_MAP_COPY",
-		"FILE_MAP_EXECUTE",
-		"FILE_MAP_READ",
-		"FILE_MAP_WRITE",
-		"FILE_NOTIFY_CHANGE_ATTRIBUTES",
-		"FILE_NOTIFY_CHANGE_CREATION",
-		"FILE_NOTIFY_CHANGE_DIR_NAME",
-		"FILE_NOTIFY_CHANGE_FILE_NAME",
-		"FILE_NOTIFY_CHANGE_LAST_ACCESS",
-		"FILE_NOTIFY_CHANGE_LAST_WRITE",
-		"FILE_NOTIFY_CHANGE_SIZE",
-		"FILE_SHARE_DELETE",
-		"FILE_SHARE_READ",
-		"FILE_SHARE_WRITE",
-		"FILE_SKIP_COMPLETION_PORT_ON_SUCCESS",
-		"FILE_SKIP_SET_EVENT_ON_HANDLE",
-		"FILE_TYPE_CHAR",
-		"FILE_TYPE_DISK",
-		"FILE_TYPE_PIPE",
-		"FILE_TYPE_REMOTE",
-		"FILE_TYPE_UNKNOWN",
-		"FILE_WRITE_ATTRIBUTES",
-		"FLUSHO",
-		"FORMAT_MESSAGE_ALLOCATE_BUFFER",
-		"FORMAT_MESSAGE_ARGUMENT_ARRAY",
-		"FORMAT_MESSAGE_FROM_HMODULE",
-		"FORMAT_MESSAGE_FROM_STRING",
-		"FORMAT_MESSAGE_FROM_SYSTEM",
-		"FORMAT_MESSAGE_IGNORE_INSERTS",
-		"FORMAT_MESSAGE_MAX_WIDTH_MASK",
-		"FSCTL_GET_REPARSE_POINT",
-		"F_ADDFILESIGS",
-		"F_ADDSIGS",
-		"F_ALLOCATEALL",
-		"F_ALLOCATECONTIG",
-		"F_CANCEL",
-		"F_CHKCLEAN",
-		"F_CLOSEM",
-		"F_DUP2FD",
-		"F_DUP2FD_CLOEXEC",
-		"F_DUPFD",
-		"F_DUPFD_CLOEXEC",
-		"F_EXLCK",
-		"F_FINDSIGS",
-		"F_FLUSH_DATA",
-		"F_FREEZE_FS",
-		"F_FSCTL",
-		"F_FSDIRMASK",
-		"F_FSIN",
-		"F_FSINOUT",
-		"F_FSOUT",
-		"F_FSPRIV",
-		"F_FSVOID",
-		"F_FULLFSYNC",
-		"F_GETCODEDIR",
-		"F_GETFD",
-		"F_GETFL",
-		"F_GETLEASE",
-		"F_GETLK",
-		"F_GETLK64",
-		"F_GETLKPID",
-		"F_GETNOSIGPIPE",
-		"F_GETOWN",
-		"F_GETOWN_EX",
-		"F_GETPATH",
-		"F_GETPATH_MTMINFO",
-		"F_GETPIPE_SZ",
-		"F_GETPROTECTIONCLASS",
-		"F_GETPROTECTIONLEVEL",
-		"F_GETSIG",
-		"F_GLOBAL_NOCACHE",
-		"F_LOCK",
-		"F_LOG2PHYS",
-		"F_LOG2PHYS_EXT",
-		"F_MARKDEPENDENCY",
-		"F_MAXFD",
-		"F_NOCACHE",
-		"F_NODIRECT",
-		"F_NOTIFY",
-		"F_OGETLK",
-		"F_OK",
-		"F_OSETLK",
-		"F_OSETLKW",
-		"F_PARAM_MASK",
-		"F_PARAM_MAX",
-		"F_PATHPKG_CHECK",
-		"F_PEOFPOSMODE",
-		"F_PREALLOCATE",
-		"F_RDADVISE",
-		"F_RDAHEAD",
-		"F_RDLCK",
-		"F_READAHEAD",
-		"F_READBOOTSTRAP",
-		"F_SETBACKINGSTORE",
-		"F_SETFD",
-		"F_SETFL",
-		"F_SETLEASE",
-		"F_SETLK",
-		"F_SETLK64",
-		"F_SETLKW",
-		"F_SETLKW64",
-		"F_SETLKWTIMEOUT",
-		"F_SETLK_REMOTE",
-		"F_SETNOSIGPIPE",
-		"F_SETOWN",
-		"F_SETOWN_EX",
-		"F_SETPIPE_SZ",
-		"F_SETPROTECTIONCLASS",
-		"F_SETSIG",
-		"F_SETSIZE",
-		"F_SHLCK",
-		"F_SINGLE_WRITER",
-		"F_TEST",
-		"F_THAW_FS",
-		"F_TLOCK",
-		"F_TRANSCODEKEY",
-		"F_ULOCK",
-		"F_UNLCK",
-		"F_UNLCKSYS",
-		"F_VOLPOSMODE",
-		"F_WRITEBOOTSTRAP",
-		"F_WRLCK",
-		"Faccessat",
-		"Fallocate",
-		"Fbootstraptransfer_t",
-		"Fchdir",
-		"Fchflags",
-		"Fchmod",
-		"Fchmodat",
-		"Fchown",
-		"Fchownat",
-		"FcntlFlock",
-		"FdSet",
-		"Fdatasync",
-		"FileNotifyInformation",
-		"Filetime",
-		"FindClose",
-		"FindFirstFile",
-		"FindNextFile",
-		"Flock",
-		"Flock_t",
-		"FlushBpf",
-		"FlushFileBuffers",
-		"FlushViewOfFile",
-		"ForkExec",
-		"ForkLock",
-		"FormatMessage",
-		"Fpathconf",
-		"FreeAddrInfoW",
-		"FreeEnvironmentStrings",
-		"FreeLibrary",
-		"Fsid",
-		"Fstat",
-		"Fstatat",
-		"Fstatfs",
-		"Fstore_t",
-		"Fsync",
-		"Ftruncate",
-		"FullPath",
-		"Futimes",
-		"Futimesat",
-		"GENERIC_ALL",
-		"GENERIC_EXECUTE",
-		"GENERIC_READ",
-		"GENERIC_WRITE",
-		"GUID",
-		"GetAcceptExSockaddrs",
-		"GetAdaptersInfo",
-		"GetAddrInfoW",
-		"GetCommandLine",
-		"GetComputerName",
-		"GetConsoleMode",
-		"GetCurrentDirectory",
-		"GetCurrentProcess",
-		"GetEnvironmentStrings",
-		"GetEnvironmentVariable",
-		"GetExitCodeProcess",
-		"GetFileAttributes",
-		"GetFileAttributesEx",
-		"GetFileExInfoStandard",
-		"GetFileExMaxInfoLevel",
-		"GetFileInformationByHandle",
-		"GetFileType",
-		"GetFullPathName",
-		"GetHostByName",
-		"GetIfEntry",
-		"GetLastError",
-		"GetLengthSid",
-		"GetLongPathName",
-		"GetProcAddress",
-		"GetProcessTimes",
-		"GetProtoByName",
-		"GetQueuedCompletionStatus",
-		"GetServByName",
-		"GetShortPathName",
-		"GetStartupInfo",
-		"GetStdHandle",
-		"GetSystemTimeAsFileTime",
-		"GetTempPath",
-		"GetTimeZoneInformation",
-		"GetTokenInformation",
-		"GetUserNameEx",
-		"GetUserProfileDirectory",
-		"GetVersion",
-		"Getcwd",
-		"Getdents",
-		"Getdirentries",
-		"Getdtablesize",
-		"Getegid",
-		"Getenv",
-		"Geteuid",
-		"Getfsstat",
-		"Getgid",
-		"Getgroups",
-		"Getpagesize",
-		"Getpeername",
-		"Getpgid",
-		"Getpgrp",
-		"Getpid",
-		"Getppid",
-		"Getpriority",
-		"Getrlimit",
-		"Getrusage",
-		"Getsid",
-		"Getsockname",
-		"Getsockopt",
-		"GetsockoptByte",
-		"GetsockoptICMPv6Filter",
-		"GetsockoptIPMreq",
-		"GetsockoptIPMreqn",
-		"GetsockoptIPv6MTUInfo",
-		"GetsockoptIPv6Mreq",
-		"GetsockoptInet4Addr",
-		"GetsockoptInt",
-		"GetsockoptUcred",
-		"Gettid",
-		"Gettimeofday",
-		"Getuid",
-		"Getwd",
-		"Getxattr",
-		"HANDLE_FLAG_INHERIT",
-		"HKEY_CLASSES_ROOT",
-		"HKEY_CURRENT_CONFIG",
-		"HKEY_CURRENT_USER",
-		"HKEY_DYN_DATA",
-		"HKEY_LOCAL_MACHINE",
-		"HKEY_PERFORMANCE_DATA",
-		"HKEY_USERS",
-		"HUPCL",
-		"Handle",
-		"Hostent",
-		"ICANON",
-		"ICMP6_FILTER",
-		"ICMPV6_FILTER",
-		"ICMPv6Filter",
-		"ICRNL",
-		"IEXTEN",
-		"IFAN_ARRIVAL",
-		"IFAN_DEPARTURE",
-		"IFA_ADDRESS",
-		"IFA_ANYCAST",
-		"IFA_BROADCAST",
-		"IFA_CACHEINFO",
-		"IFA_F_DADFAILED",
-		"IFA_F_DEPRECATED",
-		"IFA_F_HOMEADDRESS",
-		"IFA_F_NODAD",
-		"IFA_F_OPTIMISTIC",
-		"IFA_F_PERMANENT",
-		"IFA_F_SECONDARY",
-		"IFA_F_TEMPORARY",
-		"IFA_F_TENTATIVE",
-		"IFA_LABEL",
-		"IFA_LOCAL",
-		"IFA_MAX",
-		"IFA_MULTICAST",
-		"IFA_ROUTE",
-		"IFA_UNSPEC",
-		"IFF_ALLMULTI",
-		"IFF_ALTPHYS",
-		"IFF_AUTOMEDIA",
-		"IFF_BROADCAST",
-		"IFF_CANTCHANGE",
-		"IFF_CANTCONFIG",
-		"IFF_DEBUG",
-		"IFF_DRV_OACTIVE",
-		"IFF_DRV_RUNNING",
-		"IFF_DYING",
-		"IFF_DYNAMIC",
-		"IFF_LINK0",
-		"IFF_LINK1",
-		"IFF_LINK2",
-		"IFF_LOOPBACK",
-		"IFF_MASTER",
-		"IFF_MONITOR",
-		"IFF_MULTICAST",
-		"IFF_NOARP",
-		"IFF_NOTRAILERS",
-		"IFF_NO_PI",
-		"IFF_OACTIVE",
-		"IFF_ONE_QUEUE",
-		"IFF_POINTOPOINT",
-		"IFF_POINTTOPOINT",
-		"IFF_PORTSEL",
-		"IFF_PPROMISC",
-		"IFF_PROMISC",
-		"IFF_RENAMING",
-		"IFF_RUNNING",
-		"IFF_SIMPLEX",
-		"IFF_SLAVE",
-		"IFF_SMART",
-		"IFF_STATICARP",
-		"IFF_TAP",
-		"IFF_TUN",
-		"IFF_TUN_EXCL",
-		"IFF_UP",
-		"IFF_VNET_HDR",
-		"IFLA_ADDRESS",
-		"IFLA_BROADCAST",
-		"IFLA_COST",
-		"IFLA_IFALIAS",
-		"IFLA_IFNAME",
-		"IFLA_LINK",
-		"IFLA_LINKINFO",
-		"IFLA_LINKMODE",
-		"IFLA_MAP",
-		"IFLA_MASTER",
-		"IFLA_MAX",
-		"IFLA_MTU",
-		"IFLA_NET_NS_PID",
-		"IFLA_OPERSTATE",
-		"IFLA_PRIORITY",
-		"IFLA_PROTINFO",
-		"IFLA_QDISC",
-		"IFLA_STATS",
-		"IFLA_TXQLEN",
-		"IFLA_UNSPEC",
-		"IFLA_WEIGHT",
-		"IFLA_WIRELESS",
-		"IFNAMSIZ",
-		"IFT_1822",
-		"IFT_A12MPPSWITCH",
-		"IFT_AAL2",
-		"IFT_AAL5",
-		"IFT_ADSL",
-		"IFT_AFLANE8023",
-		"IFT_AFLANE8025",
-		"IFT_ARAP",
-		"IFT_ARCNET",
-		"IFT_ARCNETPLUS",
-		"IFT_ASYNC",
-		"IFT_ATM",
-		"IFT_ATMDXI",
-		"IFT_ATMFUNI",
-		"IFT_ATMIMA",
-		"IFT_ATMLOGICAL",
-		"IFT_ATMRADIO",
-		"IFT_ATMSUBINTERFACE",
-		"IFT_ATMVCIENDPT",
-		"IFT_ATMVIRTUAL",
-		"IFT_BGPPOLICYACCOUNTING",
-		"IFT_BLUETOOTH",
-		"IFT_BRIDGE",
-		"IFT_BSC",
-		"IFT_CARP",
-		"IFT_CCTEMUL",
-		"IFT_CELLULAR",
-		"IFT_CEPT",
-		"IFT_CES",
-		"IFT_CHANNEL",
-		"IFT_CNR",
-		"IFT_COFFEE",
-		"IFT_COMPOSITELINK",
-		"IFT_DCN",
-		"IFT_DIGITALPOWERLINE",
-		"IFT_DIGITALWRAPPEROVERHEADCHANNEL",
-		"IFT_DLSW",
-		"IFT_DOCSCABLEDOWNSTREAM",
-		"IFT_DOCSCABLEMACLAYER",
-		"IFT_DOCSCABLEUPSTREAM",
-		"IFT_DOCSCABLEUPSTREAMCHANNEL",
-		"IFT_DS0",
-		"IFT_DS0BUNDLE",
-		"IFT_DS1FDL",
-		"IFT_DS3",
-		"IFT_DTM",
-		"IFT_DUMMY",
-		"IFT_DVBASILN",
-		"IFT_DVBASIOUT",
-		"IFT_DVBRCCDOWNSTREAM",
-		"IFT_DVBRCCMACLAYER",
-		"IFT_DVBRCCUPSTREAM",
-		"IFT_ECONET",
-		"IFT_ENC",
-		"IFT_EON",
-		"IFT_EPLRS",
-		"IFT_ESCON",
-		"IFT_ETHER",
-		"IFT_FAITH",
-		"IFT_FAST",
-		"IFT_FASTETHER",
-		"IFT_FASTETHERFX",
-		"IFT_FDDI",
-		"IFT_FIBRECHANNEL",
-		"IFT_FRAMERELAYINTERCONNECT",
-		"IFT_FRAMERELAYMPI",
-		"IFT_FRDLCIENDPT",
-		"IFT_FRELAY",
-		"IFT_FRELAYDCE",
-		"IFT_FRF16MFRBUNDLE",
-		"IFT_FRFORWARD",
-		"IFT_G703AT2MB",
-		"IFT_G703AT64K",
-		"IFT_GIF",
-		"IFT_GIGABITETHERNET",
-		"IFT_GR303IDT",
-		"IFT_GR303RDT",
-		"IFT_H323GATEKEEPER",
-		"IFT_H323PROXY",
-		"IFT_HDH1822",
-		"IFT_HDLC",
-		"IFT_HDSL2",
-		"IFT_HIPERLAN2",
-		"IFT_HIPPI",
-		"IFT_HIPPIINTERFACE",
-		"IFT_HOSTPAD",
-		"IFT_HSSI",
-		"IFT_HY",
-		"IFT_IBM370PARCHAN",
-		"IFT_IDSL",
-		"IFT_IEEE1394",
-		"IFT_IEEE80211",
-		"IFT_IEEE80212",
-		"IFT_IEEE8023ADLAG",
-		"IFT_IFGSN",
-		"IFT_IMT",
-		"IFT_INFINIBAND",
-		"IFT_INTERLEAVE",
-		"IFT_IP",
-		"IFT_IPFORWARD",
-		"IFT_IPOVERATM",
-		"IFT_IPOVERCDLC",
-		"IFT_IPOVERCLAW",
-		"IFT_IPSWITCH",
-		"IFT_IPXIP",
-		"IFT_ISDN",
-		"IFT_ISDNBASIC",
-		"IFT_ISDNPRIMARY",
-		"IFT_ISDNS",
-		"IFT_ISDNU",
-		"IFT_ISO88022LLC",
-		"IFT_ISO88023",
-		"IFT_ISO88024",
-		"IFT_ISO88025",
-		"IFT_ISO88025CRFPINT",
-		"IFT_ISO88025DTR",
-		"IFT_ISO88025FIBER",
-		"IFT_ISO88026",
-		"IFT_ISUP",
-		"IFT_L2VLAN",
-		"IFT_L3IPVLAN",
-		"IFT_L3IPXVLAN",
-		"IFT_LAPB",
-		"IFT_LAPD",
-		"IFT_LAPF",
-		"IFT_LINEGROUP",
-		"IFT_LOCALTALK",
-		"IFT_LOOP",
-		"IFT_MEDIAMAILOVERIP",
-		"IFT_MFSIGLINK",
-		"IFT_MIOX25",
-		"IFT_MODEM",
-		"IFT_MPC",
-		"IFT_MPLS",
-		"IFT_MPLSTUNNEL",
-		"IFT_MSDSL",
-		"IFT_MVL",
-		"IFT_MYRINET",
-		"IFT_NFAS",
-		"IFT_NSIP",
-		"IFT_OPTICALCHANNEL",
-		"IFT_OPTICALTRANSPORT",
-		"IFT_OTHER",
-		"IFT_P10",
-		"IFT_P80",
-		"IFT_PARA",
-		"IFT_PDP",
-		"IFT_PFLOG",
-		"IFT_PFLOW",
-		"IFT_PFSYNC",
-		"IFT_PLC",
-		"IFT_PON155",
-		"IFT_PON622",
-		"IFT_POS",
-		"IFT_PPP",
-		"IFT_PPPMULTILINKBUNDLE",
-		"IFT_PROPATM",
-		"IFT_PROPBWAP2MP",
-		"IFT_PROPCNLS",
-		"IFT_PROPDOCSWIRELESSDOWNSTREAM",
-		"IFT_PROPDOCSWIRELESSMACLAYER",
-		"IFT_PROPDOCSWIRELESSUPSTREAM",
-		"IFT_PROPMUX",
-		"IFT_PROPVIRTUAL",
-		"IFT_PROPWIRELESSP2P",
-		"IFT_PTPSERIAL",
-		"IFT_PVC",
-		"IFT_Q2931",
-		"IFT_QLLC",
-		"IFT_RADIOMAC",
-		"IFT_RADSL",
-		"IFT_REACHDSL",
-		"IFT_RFC1483",
-		"IFT_RS232",
-		"IFT_RSRB",
-		"IFT_SDLC",
-		"IFT_SDSL",
-		"IFT_SHDSL",
-		"IFT_SIP",
-		"IFT_SIPSIG",
-		"IFT_SIPTG",
-		"IFT_SLIP",
-		"IFT_SMDSDXI",
-		"IFT_SMDSICIP",
-		"IFT_SONET",
-		"IFT_SONETOVERHEADCHANNEL",
-		"IFT_SONETPATH",
-		"IFT_SONETVT",
-		"IFT_SRP",
-		"IFT_SS7SIGLINK",
-		"IFT_STACKTOSTACK",
-		"IFT_STARLAN",
-		"IFT_STF",
-		"IFT_T1",
-		"IFT_TDLC",
-		"IFT_TELINK",
-		"IFT_TERMPAD",
-		"IFT_TR008",
-		"IFT_TRANSPHDLC",
-		"IFT_TUNNEL",
-		"IFT_ULTRA",
-		"IFT_USB",
-		"IFT_V11",
-		"IFT_V35",
-		"IFT_V36",
-		"IFT_V37",
-		"IFT_VDSL",
-		"IFT_VIRTUALIPADDRESS",
-		"IFT_VIRTUALTG",
-		"IFT_VOICEDID",
-		"IFT_VOICEEM",
-		"IFT_VOICEEMFGD",
-		"IFT_VOICEENCAP",
-		"IFT_VOICEFGDEANA",
-		"IFT_VOICEFXO",
-		"IFT_VOICEFXS",
-		"IFT_VOICEOVERATM",
-		"IFT_VOICEOVERCABLE",
-		"IFT_VOICEOVERFRAMERELAY",
-		"IFT_VOICEOVERIP",
-		"IFT_X213",
-		"IFT_X25",
-		"IFT_X25DDN",
-		"IFT_X25HUNTGROUP",
-		"IFT_X25MLP",
-		"IFT_X25PLE",
-		"IFT_XETHER",
-		"IGNBRK",
-		"IGNCR",
-		"IGNORE",
-		"IGNPAR",
-		"IMAXBEL",
-		"INFINITE",
-		"INLCR",
-		"INPCK",
-		"INVALID_FILE_ATTRIBUTES",
-		"IN_ACCESS",
-		"IN_ALL_EVENTS",
-		"IN_ATTRIB",
-		"IN_CLASSA_HOST",
-		"IN_CLASSA_MAX",
-		"IN_CLASSA_NET",
-		"IN_CLASSA_NSHIFT",
-		"IN_CLASSB_HOST",
-		"IN_CLASSB_MAX",
-		"IN_CLASSB_NET",
-		"IN_CLASSB_NSHIFT",
-		"IN_CLASSC_HOST",
-		"IN_CLASSC_NET",
-		"IN_CLASSC_NSHIFT",
-		"IN_CLASSD_HOST",
-		"IN_CLASSD_NET",
-		"IN_CLASSD_NSHIFT",
-		"IN_CLOEXEC",
-		"IN_CLOSE",
-		"IN_CLOSE_NOWRITE",
-		"IN_CLOSE_WRITE",
-		"IN_CREATE",
-		"IN_DELETE",
-		"IN_DELETE_SELF",
-		"IN_DONT_FOLLOW",
-		"IN_EXCL_UNLINK",
-		"IN_IGNORED",
-		"IN_ISDIR",
-		"IN_LINKLOCALNETNUM",
-		"IN_LOOPBACKNET",
-		"IN_MASK_ADD",
-		"IN_MODIFY",
-		"IN_MOVE",
-		"IN_MOVED_FROM",
-		"IN_MOVED_TO",
-		"IN_MOVE_SELF",
-		"IN_NONBLOCK",
-		"IN_ONESHOT",
-		"IN_ONLYDIR",
-		"IN_OPEN",
-		"IN_Q_OVERFLOW",
-		"IN_RFC3021_HOST",
-		"IN_RFC3021_MASK",
-		"IN_RFC3021_NET",
-		"IN_RFC3021_NSHIFT",
-		"IN_UNMOUNT",
-		"IOC_IN",
-		"IOC_INOUT",
-		"IOC_OUT",
-		"IOC_VENDOR",
-		"IOC_WS2",
-		"IO_REPARSE_TAG_SYMLINK",
-		"IPMreq",
-		"IPMreqn",
-		"IPPROTO_3PC",
-		"IPPROTO_ADFS",
-		"IPPROTO_AH",
-		"IPPROTO_AHIP",
-		"IPPROTO_APES",
-		"IPPROTO_ARGUS",
-		"IPPROTO_AX25",
-		"IPPROTO_BHA",
-		"IPPROTO_BLT",
-		"IPPROTO_BRSATMON",
-		"IPPROTO_CARP",
-		"IPPROTO_CFTP",
-		"IPPROTO_CHAOS",
-		"IPPROTO_CMTP",
-		"IPPROTO_COMP",
-		"IPPROTO_CPHB",
-		"IPPROTO_CPNX",
-		"IPPROTO_DCCP",
-		"IPPROTO_DDP",
-		"IPPROTO_DGP",
-		"IPPROTO_DIVERT",
-		"IPPROTO_DIVERT_INIT",
-		"IPPROTO_DIVERT_RESP",
-		"IPPROTO_DONE",
-		"IPPROTO_DSTOPTS",
-		"IPPROTO_EGP",
-		"IPPROTO_EMCON",
-		"IPPROTO_ENCAP",
-		"IPPROTO_EON",
-		"IPPROTO_ESP",
-		"IPPROTO_ETHERIP",
-		"IPPROTO_FRAGMENT",
-		"IPPROTO_GGP",
-		"IPPROTO_GMTP",
-		"IPPROTO_GRE",
-		"IPPROTO_HELLO",
-		"IPPROTO_HMP",
-		"IPPROTO_HOPOPTS",
-		"IPPROTO_ICMP",
-		"IPPROTO_ICMPV6",
-		"IPPROTO_IDP",
-		"IPPROTO_IDPR",
-		"IPPROTO_IDRP",
-		"IPPROTO_IGMP",
-		"IPPROTO_IGP",
-		"IPPROTO_IGRP",
-		"IPPROTO_IL",
-		"IPPROTO_INLSP",
-		"IPPROTO_INP",
-		"IPPROTO_IP",
-		"IPPROTO_IPCOMP",
-		"IPPROTO_IPCV",
-		"IPPROTO_IPEIP",
-		"IPPROTO_IPIP",
-		"IPPROTO_IPPC",
-		"IPPROTO_IPV4",
-		"IPPROTO_IPV6",
-		"IPPROTO_IPV6_ICMP",
-		"IPPROTO_IRTP",
-		"IPPROTO_KRYPTOLAN",
-		"IPPROTO_LARP",
-		"IPPROTO_LEAF1",
-		"IPPROTO_LEAF2",
-		"IPPROTO_MAX",
-		"IPPROTO_MAXID",
-		"IPPROTO_MEAS",
-		"IPPROTO_MH",
-		"IPPROTO_MHRP",
-		"IPPROTO_MICP",
-		"IPPROTO_MOBILE",
-		"IPPROTO_MPLS",
-		"IPPROTO_MTP",
-		"IPPROTO_MUX",
-		"IPPROTO_ND",
-		"IPPROTO_NHRP",
-		"IPPROTO_NONE",
-		"IPPROTO_NSP",
-		"IPPROTO_NVPII",
-		"IPPROTO_OLD_DIVERT",
-		"IPPROTO_OSPFIGP",
-		"IPPROTO_PFSYNC",
-		"IPPROTO_PGM",
-		"IPPROTO_PIGP",
-		"IPPROTO_PIM",
-		"IPPROTO_PRM",
-		"IPPROTO_PUP",
-		"IPPROTO_PVP",
-		"IPPROTO_RAW",
-		"IPPROTO_RCCMON",
-		"IPPROTO_RDP",
-		"IPPROTO_ROUTING",
-		"IPPROTO_RSVP",
-		"IPPROTO_RVD",
-		"IPPROTO_SATEXPAK",
-		"IPPROTO_SATMON",
-		"IPPROTO_SCCSP",
-		"IPPROTO_SCTP",
-		"IPPROTO_SDRP",
-		"IPPROTO_SEND",
-		"IPPROTO_SEP",
-		"IPPROTO_SKIP",
-		"IPPROTO_SPACER",
-		"IPPROTO_SRPC",
-		"IPPROTO_ST",
-		"IPPROTO_SVMTP",
-		"IPPROTO_SWIPE",
-		"IPPROTO_TCF",
-		"IPPROTO_TCP",
-		"IPPROTO_TLSP",
-		"IPPROTO_TP",
-		"IPPROTO_TPXX",
-		"IPPROTO_TRUNK1",
-		"IPPROTO_TRUNK2",
-		"IPPROTO_TTP",
-		"IPPROTO_UDP",
-		"IPPROTO_UDPLITE",
-		"IPPROTO_VINES",
-		"IPPROTO_VISA",
-		"IPPROTO_VMTP",
-		"IPPROTO_VRRP",
-		"IPPROTO_WBEXPAK",
-		"IPPROTO_WBMON",
-		"IPPROTO_WSN",
-		"IPPROTO_XNET",
-		"IPPROTO_XTP",
-		"IPV6_2292DSTOPTS",
-		"IPV6_2292HOPLIMIT",
-		"IPV6_2292HOPOPTS",
-		"IPV6_2292NEXTHOP",
-		"IPV6_2292PKTINFO",
-		"IPV6_2292PKTOPTIONS",
-		"IPV6_2292RTHDR",
-		"IPV6_ADDRFORM",
-		"IPV6_ADD_MEMBERSHIP",
-		"IPV6_AUTHHDR",
-		"IPV6_AUTH_LEVEL",
-		"IPV6_AUTOFLOWLABEL",
-		"IPV6_BINDANY",
-		"IPV6_BINDV6ONLY",
-		"IPV6_BOUND_IF",
-		"IPV6_CHECKSUM",
-		"IPV6_DEFAULT_MULTICAST_HOPS",
-		"IPV6_DEFAULT_MULTICAST_LOOP",
-		"IPV6_DEFHLIM",
-		"IPV6_DONTFRAG",
-		"IPV6_DROP_MEMBERSHIP",
-		"IPV6_DSTOPTS",
-		"IPV6_ESP_NETWORK_LEVEL",
-		"IPV6_ESP_TRANS_LEVEL",
-		"IPV6_FAITH",
-		"IPV6_FLOWINFO_MASK",
-		"IPV6_FLOWLABEL_MASK",
-		"IPV6_FRAGTTL",
-		"IPV6_FW_ADD",
-		"IPV6_FW_DEL",
-		"IPV6_FW_FLUSH",
-		"IPV6_FW_GET",
-		"IPV6_FW_ZERO",
-		"IPV6_HLIMDEC",
-		"IPV6_HOPLIMIT",
-		"IPV6_HOPOPTS",
-		"IPV6_IPCOMP_LEVEL",
-		"IPV6_IPSEC_POLICY",
-		"IPV6_JOIN_ANYCAST",
-		"IPV6_JOIN_GROUP",
-		"IPV6_LEAVE_ANYCAST",
-		"IPV6_LEAVE_GROUP",
-		"IPV6_MAXHLIM",
-		"IPV6_MAXOPTHDR",
-		"IPV6_MAXPACKET",
-		"IPV6_MAX_GROUP_SRC_FILTER",
-		"IPV6_MAX_MEMBERSHIPS",
-		"IPV6_MAX_SOCK_SRC_FILTER",
-		"IPV6_MIN_MEMBERSHIPS",
-		"IPV6_MMTU",
-		"IPV6_MSFILTER",
-		"IPV6_MTU",
-		"IPV6_MTU_DISCOVER",
-		"IPV6_MULTICAST_HOPS",
-		"IPV6_MULTICAST_IF",
-		"IPV6_MULTICAST_LOOP",
-		"IPV6_NEXTHOP",
-		"IPV6_OPTIONS",
-		"IPV6_PATHMTU",
-		"IPV6_PIPEX",
-		"IPV6_PKTINFO",
-		"IPV6_PMTUDISC_DO",
-		"IPV6_PMTUDISC_DONT",
-		"IPV6_PMTUDISC_PROBE",
-		"IPV6_PMTUDISC_WANT",
-		"IPV6_PORTRANGE",
-		"IPV6_PORTRANGE_DEFAULT",
-		"IPV6_PORTRANGE_HIGH",
-		"IPV6_PORTRANGE_LOW",
-		"IPV6_PREFER_TEMPADDR",
-		"IPV6_RECVDSTOPTS",
-		"IPV6_RECVDSTPORT",
-		"IPV6_RECVERR",
-		"IPV6_RECVHOPLIMIT",
-		"IPV6_RECVHOPOPTS",
-		"IPV6_RECVPATHMTU",
-		"IPV6_RECVPKTINFO",
-		"IPV6_RECVRTHDR",
-		"IPV6_RECVTCLASS",
-		"IPV6_ROUTER_ALERT",
-		"IPV6_RTABLE",
-		"IPV6_RTHDR",
-		"IPV6_RTHDRDSTOPTS",
-		"IPV6_RTHDR_LOOSE",
-		"IPV6_RTHDR_STRICT",
-		"IPV6_RTHDR_TYPE_0",
-		"IPV6_RXDSTOPTS",
-		"IPV6_RXHOPOPTS",
-		"IPV6_SOCKOPT_RESERVED1",
-		"IPV6_TCLASS",
-		"IPV6_UNICAST_HOPS",
-		"IPV6_USE_MIN_MTU",
-		"IPV6_V6ONLY",
-		"IPV6_VERSION",
-		"IPV6_VERSION_MASK",
-		"IPV6_XFRM_POLICY",
-		"IP_ADD_MEMBERSHIP",
-		"IP_ADD_SOURCE_MEMBERSHIP",
-		"IP_AUTH_LEVEL",
-		"IP_BINDANY",
-		"IP_BLOCK_SOURCE",
-		"IP_BOUND_IF",
-		"IP_DEFAULT_MULTICAST_LOOP",
-		"IP_DEFAULT_MULTICAST_TTL",
-		"IP_DF",
-		"IP_DIVERTFL",
-		"IP_DONTFRAG",
-		"IP_DROP_MEMBERSHIP",
-		"IP_DROP_SOURCE_MEMBERSHIP",
-		"IP_DUMMYNET3",
-		"IP_DUMMYNET_CONFIGURE",
-		"IP_DUMMYNET_DEL",
-		"IP_DUMMYNET_FLUSH",
-		"IP_DUMMYNET_GET",
-		"IP_EF",
-		"IP_ERRORMTU",
-		"IP_ESP_NETWORK_LEVEL",
-		"IP_ESP_TRANS_LEVEL",
-		"IP_FAITH",
-		"IP_FREEBIND",
-		"IP_FW3",
-		"IP_FW_ADD",
-		"IP_FW_DEL",
-		"IP_FW_FLUSH",
-		"IP_FW_GET",
-		"IP_FW_NAT_CFG",
-		"IP_FW_NAT_DEL",
-		"IP_FW_NAT_GET_CONFIG",
-		"IP_FW_NAT_GET_LOG",
-		"IP_FW_RESETLOG",
-		"IP_FW_TABLE_ADD",
-		"IP_FW_TABLE_DEL",
-		"IP_FW_TABLE_FLUSH",
-		"IP_FW_TABLE_GETSIZE",
-		"IP_FW_TABLE_LIST",
-		"IP_FW_ZERO",
-		"IP_HDRINCL",
-		"IP_IPCOMP_LEVEL",
-		"IP_IPSECFLOWINFO",
-		"IP_IPSEC_LOCAL_AUTH",
-		"IP_IPSEC_LOCAL_CRED",
-		"IP_IPSEC_LOCAL_ID",
-		"IP_IPSEC_POLICY",
-		"IP_IPSEC_REMOTE_AUTH",
-		"IP_IPSEC_REMOTE_CRED",
-		"IP_IPSEC_REMOTE_ID",
-		"IP_MAXPACKET",
-		"IP_MAX_GROUP_SRC_FILTER",
-		"IP_MAX_MEMBERSHIPS",
-		"IP_MAX_SOCK_MUTE_FILTER",
-		"IP_MAX_SOCK_SRC_FILTER",
-		"IP_MAX_SOURCE_FILTER",
-		"IP_MF",
-		"IP_MINFRAGSIZE",
-		"IP_MINTTL",
-		"IP_MIN_MEMBERSHIPS",
-		"IP_MSFILTER",
-		"IP_MSS",
-		"IP_MTU",
-		"IP_MTU_DISCOVER",
-		"IP_MULTICAST_IF",
-		"IP_MULTICAST_IFINDEX",
-		"IP_MULTICAST_LOOP",
-		"IP_MULTICAST_TTL",
-		"IP_MULTICAST_VIF",
-		"IP_NAT__XXX",
-		"IP_OFFMASK",
-		"IP_OLD_FW_ADD",
-		"IP_OLD_FW_DEL",
-		"IP_OLD_FW_FLUSH",
-		"IP_OLD_FW_GET",
-		"IP_OLD_FW_RESETLOG",
-		"IP_OLD_FW_ZERO",
-		"IP_ONESBCAST",
-		"IP_OPTIONS",
-		"IP_ORIGDSTADDR",
-		"IP_PASSSEC",
-		"IP_PIPEX",
-		"IP_PKTINFO",
-		"IP_PKTOPTIONS",
-		"IP_PMTUDISC",
-		"IP_PMTUDISC_DO",
-		"IP_PMTUDISC_DONT",
-		"IP_PMTUDISC_PROBE",
-		"IP_PMTUDISC_WANT",
-		"IP_PORTRANGE",
-		"IP_PORTRANGE_DEFAULT",
-		"IP_PORTRANGE_HIGH",
-		"IP_PORTRANGE_LOW",
-		"IP_RECVDSTADDR",
-		"IP_RECVDSTPORT",
-		"IP_RECVERR",
-		"IP_RECVIF",
-		"IP_RECVOPTS",
-		"IP_RECVORIGDSTADDR",
-		"IP_RECVPKTINFO",
-		"IP_RECVRETOPTS",
-		"IP_RECVRTABLE",
-		"IP_RECVTOS",
-		"IP_RECVTTL",
-		"IP_RETOPTS",
-		"IP_RF",
-		"IP_ROUTER_ALERT",
-		"IP_RSVP_OFF",
-		"IP_RSVP_ON",
-		"IP_RSVP_VIF_OFF",
-		"IP_RSVP_VIF_ON",
-		"IP_RTABLE",
-		"IP_SENDSRCADDR",
-		"IP_STRIPHDR",
-		"IP_TOS",
-		"IP_TRAFFIC_MGT_BACKGROUND",
-		"IP_TRANSPARENT",
-		"IP_TTL",
-		"IP_UNBLOCK_SOURCE",
-		"IP_XFRM_POLICY",
-		"IPv6MTUInfo",
-		"IPv6Mreq",
-		"ISIG",
-		"ISTRIP",
-		"IUCLC",
-		"IUTF8",
-		"IXANY",
-		"IXOFF",
-		"IXON",
-		"IfAddrmsg",
-		"IfAnnounceMsghdr",
-		"IfData",
-		"IfInfomsg",
-		"IfMsghdr",
-		"IfaMsghdr",
-		"IfmaMsghdr",
-		"IfmaMsghdr2",
-		"ImplementsGetwd",
-		"Inet4Pktinfo",
-		"Inet6Pktinfo",
-		"InotifyAddWatch",
-		"InotifyEvent",
-		"InotifyInit",
-		"InotifyInit1",
-		"InotifyRmWatch",
-		"InterfaceAddrMessage",
-		"InterfaceAnnounceMessage",
-		"InterfaceInfo",
-		"InterfaceMessage",
-		"InterfaceMulticastAddrMessage",
-		"InvalidHandle",
-		"Ioperm",
-		"Iopl",
-		"Iovec",
-		"IpAdapterInfo",
-		"IpAddrString",
-		"IpAddressString",
-		"IpMaskString",
-		"Issetugid",
-		"KEY_ALL_ACCESS",
-		"KEY_CREATE_LINK",
-		"KEY_CREATE_SUB_KEY",
-		"KEY_ENUMERATE_SUB_KEYS",
-		"KEY_EXECUTE",
-		"KEY_NOTIFY",
-		"KEY_QUERY_VALUE",
-		"KEY_READ",
-		"KEY_SET_VALUE",
-		"KEY_WOW64_32KEY",
-		"KEY_WOW64_64KEY",
-		"KEY_WRITE",
-		"Kevent",
-		"Kevent_t",
-		"Kill",
-		"Klogctl",
-		"Kqueue",
-		"LANG_ENGLISH",
-		"LAYERED_PROTOCOL",
-		"LCNT_OVERLOAD_FLUSH",
-		"LINUX_REBOOT_CMD_CAD_OFF",
-		"LINUX_REBOOT_CMD_CAD_ON",
-		"LINUX_REBOOT_CMD_HALT",
-		"LINUX_REBOOT_CMD_KEXEC",
-		"LINUX_REBOOT_CMD_POWER_OFF",
-		"LINUX_REBOOT_CMD_RESTART",
-		"LINUX_REBOOT_CMD_RESTART2",
-		"LINUX_REBOOT_CMD_SW_SUSPEND",
-		"LINUX_REBOOT_MAGIC1",
-		"LINUX_REBOOT_MAGIC2",
-		"LOCK_EX",
-		"LOCK_NB",
-		"LOCK_SH",
-		"LOCK_UN",
-		"LazyDLL",
-		"LazyProc",
-		"Lchown",
-		"Linger",
-		"Link",
-		"Listen",
-		"Listxattr",
-		"LoadCancelIoEx",
-		"LoadConnectEx",
-		"LoadCreateSymbolicLink",
-		"LoadDLL",
-		"LoadGetAddrInfo",
-		"LoadLibrary",
-		"LoadSetFileCompletionNotificationModes",
-		"LocalFree",
-		"Log2phys_t",
-		"LookupAccountName",
-		"LookupAccountSid",
-		"LookupSID",
-		"LsfJump",
-		"LsfSocket",
-		"LsfStmt",
-		"Lstat",
-		"MADV_AUTOSYNC",
-		"MADV_CAN_REUSE",
-		"MADV_CORE",
-		"MADV_DOFORK",
-		"MADV_DONTFORK",
-		"MADV_DONTNEED",
-		"MADV_FREE",
-		"MADV_FREE_REUSABLE",
-		"MADV_FREE_REUSE",
-		"MADV_HUGEPAGE",
-		"MADV_HWPOISON",
-		"MADV_MERGEABLE",
-		"MADV_NOCORE",
-		"MADV_NOHUGEPAGE",
-		"MADV_NORMAL",
-		"MADV_NOSYNC",
-		"MADV_PROTECT",
-		"MADV_RANDOM",
-		"MADV_REMOVE",
-		"MADV_SEQUENTIAL",
-		"MADV_SPACEAVAIL",
-		"MADV_UNMERGEABLE",
-		"MADV_WILLNEED",
-		"MADV_ZERO_WIRED_PAGES",
-		"MAP_32BIT",
-		"MAP_ALIGNED_SUPER",
-		"MAP_ALIGNMENT_16MB",
-		"MAP_ALIGNMENT_1TB",
-		"MAP_ALIGNMENT_256TB",
-		"MAP_ALIGNMENT_4GB",
-		"MAP_ALIGNMENT_64KB",
-		"MAP_ALIGNMENT_64PB",
-		"MAP_ALIGNMENT_MASK",
-		"MAP_ALIGNMENT_SHIFT",
-		"MAP_ANON",
-		"MAP_ANONYMOUS",
-		"MAP_COPY",
-		"MAP_DENYWRITE",
-		"MAP_EXECUTABLE",
-		"MAP_FILE",
-		"MAP_FIXED",
-		"MAP_FLAGMASK",
-		"MAP_GROWSDOWN",
-		"MAP_HASSEMAPHORE",
-		"MAP_HUGETLB",
-		"MAP_INHERIT",
-		"MAP_INHERIT_COPY",
-		"MAP_INHERIT_DEFAULT",
-		"MAP_INHERIT_DONATE_COPY",
-		"MAP_INHERIT_NONE",
-		"MAP_INHERIT_SHARE",
-		"MAP_JIT",
-		"MAP_LOCKED",
-		"MAP_NOCACHE",
-		"MAP_NOCORE",
-		"MAP_NOEXTEND",
-		"MAP_NONBLOCK",
-		"MAP_NORESERVE",
-		"MAP_NOSYNC",
-		"MAP_POPULATE",
-		"MAP_PREFAULT_READ",
-		"MAP_PRIVATE",
-		"MAP_RENAME",
-		"MAP_RESERVED0080",
-		"MAP_RESERVED0100",
-		"MAP_SHARED",
-		"MAP_STACK",
-		"MAP_TRYFIXED",
-		"MAP_TYPE",
-		"MAP_WIRED",
-		"MAXIMUM_REPARSE_DATA_BUFFER_SIZE",
-		"MAXLEN_IFDESCR",
-		"MAXLEN_PHYSADDR",
-		"MAX_ADAPTER_ADDRESS_LENGTH",
-		"MAX_ADAPTER_DESCRIPTION_LENGTH",
-		"MAX_ADAPTER_NAME_LENGTH",
-		"MAX_COMPUTERNAME_LENGTH",
-		"MAX_INTERFACE_NAME_LEN",
-		"MAX_LONG_PATH",
-		"MAX_PATH",
-		"MAX_PROTOCOL_CHAIN",
-		"MCL_CURRENT",
-		"MCL_FUTURE",
-		"MNT_DETACH",
-		"MNT_EXPIRE",
-		"MNT_FORCE",
-		"MSG_BCAST",
-		"MSG_CMSG_CLOEXEC",
-		"MSG_COMPAT",
-		"MSG_CONFIRM",
-		"MSG_CONTROLMBUF",
-		"MSG_CTRUNC",
-		"MSG_DONTROUTE",
-		"MSG_DONTWAIT",
-		"MSG_EOF",
-		"MSG_EOR",
-		"MSG_ERRQUEUE",
-		"MSG_FASTOPEN",
-		"MSG_FIN",
-		"MSG_FLUSH",
-		"MSG_HAVEMORE",
-		"MSG_HOLD",
-		"MSG_IOVUSRSPACE",
-		"MSG_LENUSRSPACE",
-		"MSG_MCAST",
-		"MSG_MORE",
-		"MSG_NAMEMBUF",
-		"MSG_NBIO",
-		"MSG_NEEDSA",
-		"MSG_NOSIGNAL",
-		"MSG_NOTIFICATION",
-		"MSG_OOB",
-		"MSG_PEEK",
-		"MSG_PROXY",
-		"MSG_RCVMORE",
-		"MSG_RST",
-		"MSG_SEND",
-		"MSG_SYN",
-		"MSG_TRUNC",
-		"MSG_TRYHARD",
-		"MSG_USERFLAGS",
-		"MSG_WAITALL",
-		"MSG_WAITFORONE",
-		"MSG_WAITSTREAM",
-		"MS_ACTIVE",
-		"MS_ASYNC",
-		"MS_BIND",
-		"MS_DEACTIVATE",
-		"MS_DIRSYNC",
-		"MS_INVALIDATE",
-		"MS_I_VERSION",
-		"MS_KERNMOUNT",
-		"MS_KILLPAGES",
-		"MS_MANDLOCK",
-		"MS_MGC_MSK",
-		"MS_MGC_VAL",
-		"MS_MOVE",
-		"MS_NOATIME",
-		"MS_NODEV",
-		"MS_NODIRATIME",
-		"MS_NOEXEC",
-		"MS_NOSUID",
-		"MS_NOUSER",
-		"MS_POSIXACL",
-		"MS_PRIVATE",
-		"MS_RDONLY",
-		"MS_REC",
-		"MS_RELATIME",
-		"MS_REMOUNT",
-		"MS_RMT_MASK",
-		"MS_SHARED",
-		"MS_SILENT",
-		"MS_SLAVE",
-		"MS_STRICTATIME",
-		"MS_SYNC",
-		"MS_SYNCHRONOUS",
-		"MS_UNBINDABLE",
-		"Madvise",
-		"MapViewOfFile",
-		"MaxTokenInfoClass",
-		"Mclpool",
-		"MibIfRow",
-		"Mkdir",
-		"Mkdirat",
-		"Mkfifo",
-		"Mknod",
-		"Mknodat",
-		"Mlock",
-		"Mlockall",
-		"Mmap",
-		"Mount",
-		"MoveFile",
-		"Mprotect",
-		"Msghdr",
-		"Munlock",
-		"Munlockall",
-		"Munmap",
-		"MustLoadDLL",
-		"NAME_MAX",
-		"NETLINK_ADD_MEMBERSHIP",
-		"NETLINK_AUDIT",
-		"NETLINK_BROADCAST_ERROR",
-		"NETLINK_CONNECTOR",
-		"NETLINK_DNRTMSG",
-		"NETLINK_DROP_MEMBERSHIP",
-		"NETLINK_ECRYPTFS",
-		"NETLINK_FIB_LOOKUP",
-		"NETLINK_FIREWALL",
-		"NETLINK_GENERIC",
-		"NETLINK_INET_DIAG",
-		"NETLINK_IP6_FW",
-		"NETLINK_ISCSI",
-		"NETLINK_KOBJECT_UEVENT",
-		"NETLINK_NETFILTER",
-		"NETLINK_NFLOG",
-		"NETLINK_NO_ENOBUFS",
-		"NETLINK_PKTINFO",
-		"NETLINK_RDMA",
-		"NETLINK_ROUTE",
-		"NETLINK_SCSITRANSPORT",
-		"NETLINK_SELINUX",
-		"NETLINK_UNUSED",
-		"NETLINK_USERSOCK",
-		"NETLINK_XFRM",
-		"NET_RT_DUMP",
-		"NET_RT_DUMP2",
-		"NET_RT_FLAGS",
-		"NET_RT_IFLIST",
-		"NET_RT_IFLIST2",
-		"NET_RT_IFLISTL",
-		"NET_RT_IFMALIST",
-		"NET_RT_MAXID",
-		"NET_RT_OIFLIST",
-		"NET_RT_OOIFLIST",
-		"NET_RT_STAT",
-		"NET_RT_STATS",
-		"NET_RT_TABLE",
-		"NET_RT_TRASH",
-		"NLA_ALIGNTO",
-		"NLA_F_NESTED",
-		"NLA_F_NET_BYTEORDER",
-		"NLA_HDRLEN",
-		"NLMSG_ALIGNTO",
-		"NLMSG_DONE",
-		"NLMSG_ERROR",
-		"NLMSG_HDRLEN",
-		"NLMSG_MIN_TYPE",
-		"NLMSG_NOOP",
-		"NLMSG_OVERRUN",
-		"NLM_F_ACK",
-		"NLM_F_APPEND",
-		"NLM_F_ATOMIC",
-		"NLM_F_CREATE",
-		"NLM_F_DUMP",
-		"NLM_F_ECHO",
-		"NLM_F_EXCL",
-		"NLM_F_MATCH",
-		"NLM_F_MULTI",
-		"NLM_F_REPLACE",
-		"NLM_F_REQUEST",
-		"NLM_F_ROOT",
-		"NOFLSH",
-		"NOTE_ABSOLUTE",
-		"NOTE_ATTRIB",
-		"NOTE_BACKGROUND",
-		"NOTE_CHILD",
-		"NOTE_CRITICAL",
-		"NOTE_DELETE",
-		"NOTE_EOF",
-		"NOTE_EXEC",
-		"NOTE_EXIT",
-		"NOTE_EXITSTATUS",
-		"NOTE_EXIT_CSERROR",
-		"NOTE_EXIT_DECRYPTFAIL",
-		"NOTE_EXIT_DETAIL",
-		"NOTE_EXIT_DETAIL_MASK",
-		"NOTE_EXIT_MEMORY",
-		"NOTE_EXIT_REPARENTED",
-		"NOTE_EXTEND",
-		"NOTE_FFAND",
-		"NOTE_FFCOPY",
-		"NOTE_FFCTRLMASK",
-		"NOTE_FFLAGSMASK",
-		"NOTE_FFNOP",
-		"NOTE_FFOR",
-		"NOTE_FORK",
-		"NOTE_LEEWAY",
-		"NOTE_LINK",
-		"NOTE_LOWAT",
-		"NOTE_NONE",
-		"NOTE_NSECONDS",
-		"NOTE_PCTRLMASK",
-		"NOTE_PDATAMASK",
-		"NOTE_REAP",
-		"NOTE_RENAME",
-		"NOTE_RESOURCEEND",
-		"NOTE_REVOKE",
-		"NOTE_SECONDS",
-		"NOTE_SIGNAL",
-		"NOTE_TRACK",
-		"NOTE_TRACKERR",
-		"NOTE_TRIGGER",
-		"NOTE_TRUNCATE",
-		"NOTE_USECONDS",
-		"NOTE_VM_ERROR",
-		"NOTE_VM_PRESSURE",
-		"NOTE_VM_PRESSURE_SUDDEN_TERMINATE",
-		"NOTE_VM_PRESSURE_TERMINATE",
-		"NOTE_WRITE",
-		"NameCanonical",
-		"NameCanonicalEx",
-		"NameDisplay",
-		"NameDnsDomain",
-		"NameFullyQualifiedDN",
-		"NameSamCompatible",
-		"NameServicePrincipal",
-		"NameUniqueId",
-		"NameUnknown",
-		"NameUserPrincipal",
-		"Nanosleep",
-		"NetApiBufferFree",
-		"NetGetJoinInformation",
-		"NetSetupDomainName",
-		"NetSetupUnjoined",
-		"NetSetupUnknownStatus",
-		"NetSetupWorkgroupName",
-		"NetUserGetInfo",
-		"NetlinkMessage",
-		"NetlinkRIB",
-		"NetlinkRouteAttr",
-		"NetlinkRouteRequest",
-		"NewCallback",
-		"NewCallbackCDecl",
-		"NewLazyDLL",
-		"NlAttr",
-		"NlMsgerr",
-		"NlMsghdr",
-		"NsecToFiletime",
-		"NsecToTimespec",
-		"NsecToTimeval",
-		"Ntohs",
-		"OCRNL",
-		"OFDEL",
-		"OFILL",
-		"OFIOGETBMAP",
-		"OID_PKIX_KP_SERVER_AUTH",
-		"OID_SERVER_GATED_CRYPTO",
-		"OID_SGC_NETSCAPE",
-		"OLCUC",
-		"ONLCR",
-		"ONLRET",
-		"ONOCR",
-		"ONOEOT",
-		"OPEN_ALWAYS",
-		"OPEN_EXISTING",
-		"OPOST",
-		"O_ACCMODE",
-		"O_ALERT",
-		"O_ALT_IO",
-		"O_APPEND",
-		"O_ASYNC",
-		"O_CLOEXEC",
-		"O_CREAT",
-		"O_DIRECT",
-		"O_DIRECTORY",
-		"O_DP_GETRAWENCRYPTED",
-		"O_DSYNC",
-		"O_EVTONLY",
-		"O_EXCL",
-		"O_EXEC",
-		"O_EXLOCK",
-		"O_FSYNC",
-		"O_LARGEFILE",
-		"O_NDELAY",
-		"O_NOATIME",
-		"O_NOCTTY",
-		"O_NOFOLLOW",
-		"O_NONBLOCK",
-		"O_NOSIGPIPE",
-		"O_POPUP",
-		"O_RDONLY",
-		"O_RDWR",
-		"O_RSYNC",
-		"O_SHLOCK",
-		"O_SYMLINK",
-		"O_SYNC",
-		"O_TRUNC",
-		"O_TTY_INIT",
-		"O_WRONLY",
-		"Open",
-		"OpenCurrentProcessToken",
-		"OpenProcess",
-		"OpenProcessToken",
-		"Openat",
-		"Overlapped",
-		"PACKET_ADD_MEMBERSHIP",
-		"PACKET_BROADCAST",
-		"PACKET_DROP_MEMBERSHIP",
-		"PACKET_FASTROUTE",
-		"PACKET_HOST",
-		"PACKET_LOOPBACK",
-		"PACKET_MR_ALLMULTI",
-		"PACKET_MR_MULTICAST",
-		"PACKET_MR_PROMISC",
-		"PACKET_MULTICAST",
-		"PACKET_OTHERHOST",
-		"PACKET_OUTGOING",
-		"PACKET_RECV_OUTPUT",
-		"PACKET_RX_RING",
-		"PACKET_STATISTICS",
-		"PAGE_EXECUTE_READ",
-		"PAGE_EXECUTE_READWRITE",
-		"PAGE_EXECUTE_WRITECOPY",
-		"PAGE_READONLY",
-		"PAGE_READWRITE",
-		"PAGE_WRITECOPY",
-		"PARENB",
-		"PARMRK",
-		"PARODD",
-		"PENDIN",
-		"PFL_HIDDEN",
-		"PFL_MATCHES_PROTOCOL_ZERO",
-		"PFL_MULTIPLE_PROTO_ENTRIES",
-		"PFL_NETWORKDIRECT_PROVIDER",
-		"PFL_RECOMMENDED_PROTO_ENTRY",
-		"PF_FLUSH",
-		"PKCS_7_ASN_ENCODING",
-		"PMC5_PIPELINE_FLUSH",
-		"PRIO_PGRP",
-		"PRIO_PROCESS",
-		"PRIO_USER",
-		"PRI_IOFLUSH",
-		"PROCESS_QUERY_INFORMATION",
-		"PROCESS_TERMINATE",
-		"PROT_EXEC",
-		"PROT_GROWSDOWN",
-		"PROT_GROWSUP",
-		"PROT_NONE",
-		"PROT_READ",
-		"PROT_WRITE",
-		"PROV_DH_SCHANNEL",
-		"PROV_DSS",
-		"PROV_DSS_DH",
-		"PROV_EC_ECDSA_FULL",
-		"PROV_EC_ECDSA_SIG",
-		"PROV_EC_ECNRA_FULL",
-		"PROV_EC_ECNRA_SIG",
-		"PROV_FORTEZZA",
-		"PROV_INTEL_SEC",
-		"PROV_MS_EXCHANGE",
-		"PROV_REPLACE_OWF",
-		"PROV_RNG",
-		"PROV_RSA_AES",
-		"PROV_RSA_FULL",
-		"PROV_RSA_SCHANNEL",
-		"PROV_RSA_SIG",
-		"PROV_SPYRUS_LYNKS",
-		"PROV_SSL",
-		"PR_CAPBSET_DROP",
-		"PR_CAPBSET_READ",
-		"PR_CLEAR_SECCOMP_FILTER",
-		"PR_ENDIAN_BIG",
-		"PR_ENDIAN_LITTLE",
-		"PR_ENDIAN_PPC_LITTLE",
-		"PR_FPEMU_NOPRINT",
-		"PR_FPEMU_SIGFPE",
-		"PR_FP_EXC_ASYNC",
-		"PR_FP_EXC_DISABLED",
-		"PR_FP_EXC_DIV",
-		"PR_FP_EXC_INV",
-		"PR_FP_EXC_NONRECOV",
-		"PR_FP_EXC_OVF",
-		"PR_FP_EXC_PRECISE",
-		"PR_FP_EXC_RES",
-		"PR_FP_EXC_SW_ENABLE",
-		"PR_FP_EXC_UND",
-		"PR_GET_DUMPABLE",
-		"PR_GET_ENDIAN",
-		"PR_GET_FPEMU",
-		"PR_GET_FPEXC",
-		"PR_GET_KEEPCAPS",
-		"PR_GET_NAME",
-		"PR_GET_PDEATHSIG",
-		"PR_GET_SECCOMP",
-		"PR_GET_SECCOMP_FILTER",
-		"PR_GET_SECUREBITS",
-		"PR_GET_TIMERSLACK",
-		"PR_GET_TIMING",
-		"PR_GET_TSC",
-		"PR_GET_UNALIGN",
-		"PR_MCE_KILL",
-		"PR_MCE_KILL_CLEAR",
-		"PR_MCE_KILL_DEFAULT",
-		"PR_MCE_KILL_EARLY",
-		"PR_MCE_KILL_GET",
-		"PR_MCE_KILL_LATE",
-		"PR_MCE_KILL_SET",
-		"PR_SECCOMP_FILTER_EVENT",
-		"PR_SECCOMP_FILTER_SYSCALL",
-		"PR_SET_DUMPABLE",
-		"PR_SET_ENDIAN",
-		"PR_SET_FPEMU",
-		"PR_SET_FPEXC",
-		"PR_SET_KEEPCAPS",
-		"PR_SET_NAME",
-		"PR_SET_PDEATHSIG",
-		"PR_SET_PTRACER",
-		"PR_SET_SECCOMP",
-		"PR_SET_SECCOMP_FILTER",
-		"PR_SET_SECUREBITS",
-		"PR_SET_TIMERSLACK",
-		"PR_SET_TIMING",
-		"PR_SET_TSC",
-		"PR_SET_UNALIGN",
-		"PR_TASK_PERF_EVENTS_DISABLE",
-		"PR_TASK_PERF_EVENTS_ENABLE",
-		"PR_TIMING_STATISTICAL",
-		"PR_TIMING_TIMESTAMP",
-		"PR_TSC_ENABLE",
-		"PR_TSC_SIGSEGV",
-		"PR_UNALIGN_NOPRINT",
-		"PR_UNALIGN_SIGBUS",
-		"PTRACE_ARCH_PRCTL",
-		"PTRACE_ATTACH",
-		"PTRACE_CONT",
-		"PTRACE_DETACH",
-		"PTRACE_EVENT_CLONE",
-		"PTRACE_EVENT_EXEC",
-		"PTRACE_EVENT_EXIT",
-		"PTRACE_EVENT_FORK",
-		"PTRACE_EVENT_VFORK",
-		"PTRACE_EVENT_VFORK_DONE",
-		"PTRACE_GETCRUNCHREGS",
-		"PTRACE_GETEVENTMSG",
-		"PTRACE_GETFPREGS",
-		"PTRACE_GETFPXREGS",
-		"PTRACE_GETHBPREGS",
-		"PTRACE_GETREGS",
-		"PTRACE_GETREGSET",
-		"PTRACE_GETSIGINFO",
-		"PTRACE_GETVFPREGS",
-		"PTRACE_GETWMMXREGS",
-		"PTRACE_GET_THREAD_AREA",
-		"PTRACE_KILL",
-		"PTRACE_OLDSETOPTIONS",
-		"PTRACE_O_MASK",
-		"PTRACE_O_TRACECLONE",
-		"PTRACE_O_TRACEEXEC",
-		"PTRACE_O_TRACEEXIT",
-		"PTRACE_O_TRACEFORK",
-		"PTRACE_O_TRACESYSGOOD",
-		"PTRACE_O_TRACEVFORK",
-		"PTRACE_O_TRACEVFORKDONE",
-		"PTRACE_PEEKDATA",
-		"PTRACE_PEEKTEXT",
-		"PTRACE_PEEKUSR",
-		"PTRACE_POKEDATA",
-		"PTRACE_POKETEXT",
-		"PTRACE_POKEUSR",
-		"PTRACE_SETCRUNCHREGS",
-		"PTRACE_SETFPREGS",
-		"PTRACE_SETFPXREGS",
-		"PTRACE_SETHBPREGS",
-		"PTRACE_SETOPTIONS",
-		"PTRACE_SETREGS",
-		"PTRACE_SETREGSET",
-		"PTRACE_SETSIGINFO",
-		"PTRACE_SETVFPREGS",
-		"PTRACE_SETWMMXREGS",
-		"PTRACE_SET_SYSCALL",
-		"PTRACE_SET_THREAD_AREA",
-		"PTRACE_SINGLEBLOCK",
-		"PTRACE_SINGLESTEP",
-		"PTRACE_SYSCALL",
-		"PTRACE_SYSEMU",
-		"PTRACE_SYSEMU_SINGLESTEP",
-		"PTRACE_TRACEME",
-		"PT_ATTACH",
-		"PT_ATTACHEXC",
-		"PT_CONTINUE",
-		"PT_DATA_ADDR",
-		"PT_DENY_ATTACH",
-		"PT_DETACH",
-		"PT_FIRSTMACH",
-		"PT_FORCEQUOTA",
-		"PT_KILL",
-		"PT_MASK",
-		"PT_READ_D",
-		"PT_READ_I",
-		"PT_READ_U",
-		"PT_SIGEXC",
-		"PT_STEP",
-		"PT_TEXT_ADDR",
-		"PT_TEXT_END_ADDR",
-		"PT_THUPDATE",
-		"PT_TRACE_ME",
-		"PT_WRITE_D",
-		"PT_WRITE_I",
-		"PT_WRITE_U",
-		"ParseDirent",
-		"ParseNetlinkMessage",
-		"ParseNetlinkRouteAttr",
-		"ParseRoutingMessage",
-		"ParseRoutingSockaddr",
-		"ParseSocketControlMessage",
-		"ParseUnixCredentials",
-		"ParseUnixRights",
-		"PathMax",
-		"Pathconf",
-		"Pause",
-		"Pipe",
-		"Pipe2",
-		"PivotRoot",
-		"Pointer",
-		"PostQueuedCompletionStatus",
-		"Pread",
-		"Proc",
-		"ProcAttr",
-		"Process32First",
-		"Process32Next",
-		"ProcessEntry32",
-		"ProcessInformation",
-		"Protoent",
-		"PtraceAttach",
-		"PtraceCont",
-		"PtraceDetach",
-		"PtraceGetEventMsg",
-		"PtraceGetRegs",
-		"PtracePeekData",
-		"PtracePeekText",
-		"PtracePokeData",
-		"PtracePokeText",
-		"PtraceRegs",
-		"PtraceSetOptions",
-		"PtraceSetRegs",
-		"PtraceSingleStep",
-		"PtraceSyscall",
-		"Pwrite",
-		"REG_BINARY",
-		"REG_DWORD",
-		"REG_DWORD_BIG_ENDIAN",
-		"REG_DWORD_LITTLE_ENDIAN",
-		"REG_EXPAND_SZ",
-		"REG_FULL_RESOURCE_DESCRIPTOR",
-		"REG_LINK",
-		"REG_MULTI_SZ",
-		"REG_NONE",
-		"REG_QWORD",
-		"REG_QWORD_LITTLE_ENDIAN",
-		"REG_RESOURCE_LIST",
-		"REG_RESOURCE_REQUIREMENTS_LIST",
-		"REG_SZ",
-		"RLIMIT_AS",
-		"RLIMIT_CORE",
-		"RLIMIT_CPU",
-		"RLIMIT_CPU_USAGE_MONITOR",
-		"RLIMIT_DATA",
-		"RLIMIT_FSIZE",
-		"RLIMIT_NOFILE",
-		"RLIMIT_STACK",
-		"RLIM_INFINITY",
-		"RTAX_ADVMSS",
-		"RTAX_AUTHOR",
-		"RTAX_BRD",
-		"RTAX_CWND",
-		"RTAX_DST",
-		"RTAX_FEATURES",
-		"RTAX_FEATURE_ALLFRAG",
-		"RTAX_FEATURE_ECN",
-		"RTAX_FEATURE_SACK",
-		"RTAX_FEATURE_TIMESTAMP",
-		"RTAX_GATEWAY",
-		"RTAX_GENMASK",
-		"RTAX_HOPLIMIT",
-		"RTAX_IFA",
-		"RTAX_IFP",
-		"RTAX_INITCWND",
-		"RTAX_INITRWND",
-		"RTAX_LABEL",
-		"RTAX_LOCK",
-		"RTAX_MAX",
-		"RTAX_MTU",
-		"RTAX_NETMASK",
-		"RTAX_REORDERING",
-		"RTAX_RTO_MIN",
-		"RTAX_RTT",
-		"RTAX_RTTVAR",
-		"RTAX_SRC",
-		"RTAX_SRCMASK",
-		"RTAX_SSTHRESH",
-		"RTAX_TAG",
-		"RTAX_UNSPEC",
-		"RTAX_WINDOW",
-		"RTA_ALIGNTO",
-		"RTA_AUTHOR",
-		"RTA_BRD",
-		"RTA_CACHEINFO",
-		"RTA_DST",
-		"RTA_FLOW",
-		"RTA_GATEWAY",
-		"RTA_GENMASK",
-		"RTA_IFA",
-		"RTA_IFP",
-		"RTA_IIF",
-		"RTA_LABEL",
-		"RTA_MAX",
-		"RTA_METRICS",
-		"RTA_MULTIPATH",
-		"RTA_NETMASK",
-		"RTA_OIF",
-		"RTA_PREFSRC",
-		"RTA_PRIORITY",
-		"RTA_SRC",
-		"RTA_SRCMASK",
-		"RTA_TABLE",
-		"RTA_TAG",
-		"RTA_UNSPEC",
-		"RTCF_DIRECTSRC",
-		"RTCF_DOREDIRECT",
-		"RTCF_LOG",
-		"RTCF_MASQ",
-		"RTCF_NAT",
-		"RTCF_VALVE",
-		"RTF_ADDRCLASSMASK",
-		"RTF_ADDRCONF",
-		"RTF_ALLONLINK",
-		"RTF_ANNOUNCE",
-		"RTF_BLACKHOLE",
-		"RTF_BROADCAST",
-		"RTF_CACHE",
-		"RTF_CLONED",
-		"RTF_CLONING",
-		"RTF_CONDEMNED",
-		"RTF_DEFAULT",
-		"RTF_DELCLONE",
-		"RTF_DONE",
-		"RTF_DYNAMIC",
-		"RTF_FLOW",
-		"RTF_FMASK",
-		"RTF_GATEWAY",
-		"RTF_GWFLAG_COMPAT",
-		"RTF_HOST",
-		"RTF_IFREF",
-		"RTF_IFSCOPE",
-		"RTF_INTERFACE",
-		"RTF_IRTT",
-		"RTF_LINKRT",
-		"RTF_LLDATA",
-		"RTF_LLINFO",
-		"RTF_LOCAL",
-		"RTF_MASK",
-		"RTF_MODIFIED",
-		"RTF_MPATH",
-		"RTF_MPLS",
-		"RTF_MSS",
-		"RTF_MTU",
-		"RTF_MULTICAST",
-		"RTF_NAT",
-		"RTF_NOFORWARD",
-		"RTF_NONEXTHOP",
-		"RTF_NOPMTUDISC",
-		"RTF_PERMANENT_ARP",
-		"RTF_PINNED",
-		"RTF_POLICY",
-		"RTF_PRCLONING",
-		"RTF_PROTO1",
-		"RTF_PROTO2",
-		"RTF_PROTO3",
-		"RTF_PROXY",
-		"RTF_REINSTATE",
-		"RTF_REJECT",
-		"RTF_RNH_LOCKED",
-		"RTF_ROUTER",
-		"RTF_SOURCE",
-		"RTF_SRC",
-		"RTF_STATIC",
-		"RTF_STICKY",
-		"RTF_THROW",
-		"RTF_TUNNEL",
-		"RTF_UP",
-		"RTF_USETRAILERS",
-		"RTF_WASCLONED",
-		"RTF_WINDOW",
-		"RTF_XRESOLVE",
-		"RTM_ADD",
-		"RTM_BASE",
-		"RTM_CHANGE",
-		"RTM_CHGADDR",
-		"RTM_DELACTION",
-		"RTM_DELADDR",
-		"RTM_DELADDRLABEL",
-		"RTM_DELETE",
-		"RTM_DELLINK",
-		"RTM_DELMADDR",
-		"RTM_DELNEIGH",
-		"RTM_DELQDISC",
-		"RTM_DELROUTE",
-		"RTM_DELRULE",
-		"RTM_DELTCLASS",
-		"RTM_DELTFILTER",
-		"RTM_DESYNC",
-		"RTM_F_CLONED",
-		"RTM_F_EQUALIZE",
-		"RTM_F_NOTIFY",
-		"RTM_F_PREFIX",
-		"RTM_GET",
-		"RTM_GET2",
-		"RTM_GETACTION",
-		"RTM_GETADDR",
-		"RTM_GETADDRLABEL",
-		"RTM_GETANYCAST",
-		"RTM_GETDCB",
-		"RTM_GETLINK",
-		"RTM_GETMULTICAST",
-		"RTM_GETNEIGH",
-		"RTM_GETNEIGHTBL",
-		"RTM_GETQDISC",
-		"RTM_GETROUTE",
-		"RTM_GETRULE",
-		"RTM_GETTCLASS",
-		"RTM_GETTFILTER",
-		"RTM_IEEE80211",
-		"RTM_IFANNOUNCE",
-		"RTM_IFINFO",
-		"RTM_IFINFO2",
-		"RTM_LLINFO_UPD",
-		"RTM_LOCK",
-		"RTM_LOSING",
-		"RTM_MAX",
-		"RTM_MAXSIZE",
-		"RTM_MISS",
-		"RTM_NEWACTION",
-		"RTM_NEWADDR",
-		"RTM_NEWADDRLABEL",
-		"RTM_NEWLINK",
-		"RTM_NEWMADDR",
-		"RTM_NEWMADDR2",
-		"RTM_NEWNDUSEROPT",
-		"RTM_NEWNEIGH",
-		"RTM_NEWNEIGHTBL",
-		"RTM_NEWPREFIX",
-		"RTM_NEWQDISC",
-		"RTM_NEWROUTE",
-		"RTM_NEWRULE",
-		"RTM_NEWTCLASS",
-		"RTM_NEWTFILTER",
-		"RTM_NR_FAMILIES",
-		"RTM_NR_MSGTYPES",
-		"RTM_OIFINFO",
-		"RTM_OLDADD",
-		"RTM_OLDDEL",
-		"RTM_OOIFINFO",
-		"RTM_REDIRECT",
-		"RTM_RESOLVE",
-		"RTM_RTTUNIT",
-		"RTM_SETDCB",
-		"RTM_SETGATE",
-		"RTM_SETLINK",
-		"RTM_SETNEIGHTBL",
-		"RTM_VERSION",
-		"RTNH_ALIGNTO",
-		"RTNH_F_DEAD",
-		"RTNH_F_ONLINK",
-		"RTNH_F_PERVASIVE",
-		"RTNLGRP_IPV4_IFADDR",
-		"RTNLGRP_IPV4_MROUTE",
-		"RTNLGRP_IPV4_ROUTE",
-		"RTNLGRP_IPV4_RULE",
-		"RTNLGRP_IPV6_IFADDR",
-		"RTNLGRP_IPV6_IFINFO",
-		"RTNLGRP_IPV6_MROUTE",
-		"RTNLGRP_IPV6_PREFIX",
-		"RTNLGRP_IPV6_ROUTE",
-		"RTNLGRP_IPV6_RULE",
-		"RTNLGRP_LINK",
-		"RTNLGRP_ND_USEROPT",
-		"RTNLGRP_NEIGH",
-		"RTNLGRP_NONE",
-		"RTNLGRP_NOTIFY",
-		"RTNLGRP_TC",
-		"RTN_ANYCAST",
-		"RTN_BLACKHOLE",
-		"RTN_BROADCAST",
-		"RTN_LOCAL",
-		"RTN_MAX",
-		"RTN_MULTICAST",
-		"RTN_NAT",
-		"RTN_PROHIBIT",
-		"RTN_THROW",
-		"RTN_UNICAST",
-		"RTN_UNREACHABLE",
-		"RTN_UNSPEC",
-		"RTN_XRESOLVE",
-		"RTPROT_BIRD",
-		"RTPROT_BOOT",
-		"RTPROT_DHCP",
-		"RTPROT_DNROUTED",
-		"RTPROT_GATED",
-		"RTPROT_KERNEL",
-		"RTPROT_MRT",
-		"RTPROT_NTK",
-		"RTPROT_RA",
-		"RTPROT_REDIRECT",
-		"RTPROT_STATIC",
-		"RTPROT_UNSPEC",
-		"RTPROT_XORP",
-		"RTPROT_ZEBRA",
-		"RTV_EXPIRE",
-		"RTV_HOPCOUNT",
-		"RTV_MTU",
-		"RTV_RPIPE",
-		"RTV_RTT",
-		"RTV_RTTVAR",
-		"RTV_SPIPE",
-		"RTV_SSTHRESH",
-		"RTV_WEIGHT",
-		"RT_CACHING_CONTEXT",
-		"RT_CLASS_DEFAULT",
-		"RT_CLASS_LOCAL",
-		"RT_CLASS_MAIN",
-		"RT_CLASS_MAX",
-		"RT_CLASS_UNSPEC",
-		"RT_DEFAULT_FIB",
-		"RT_NORTREF",
-		"RT_SCOPE_HOST",
-		"RT_SCOPE_LINK",
-		"RT_SCOPE_NOWHERE",
-		"RT_SCOPE_SITE",
-		"RT_SCOPE_UNIVERSE",
-		"RT_TABLEID_MAX",
-		"RT_TABLE_COMPAT",
-		"RT_TABLE_DEFAULT",
-		"RT_TABLE_LOCAL",
-		"RT_TABLE_MAIN",
-		"RT_TABLE_MAX",
-		"RT_TABLE_UNSPEC",
-		"RUSAGE_CHILDREN",
-		"RUSAGE_SELF",
-		"RUSAGE_THREAD",
-		"Radvisory_t",
-		"RawConn",
-		"RawSockaddr",
-		"RawSockaddrAny",
-		"RawSockaddrDatalink",
-		"RawSockaddrInet4",
-		"RawSockaddrInet6",
-		"RawSockaddrLinklayer",
-		"RawSockaddrNetlink",
-		"RawSockaddrUnix",
-		"RawSyscall",
-		"RawSyscall6",
-		"Read",
-		"ReadConsole",
-		"ReadDirectoryChanges",
-		"ReadDirent",
-		"ReadFile",
-		"Readlink",
-		"Reboot",
-		"Recvfrom",
-		"Recvmsg",
-		"RegCloseKey",
-		"RegEnumKeyEx",
-		"RegOpenKeyEx",
-		"RegQueryInfoKey",
-		"RegQueryValueEx",
-		"RemoveDirectory",
-		"Removexattr",
-		"Rename",
-		"Renameat",
-		"Revoke",
-		"Rlimit",
-		"Rmdir",
-		"RouteMessage",
-		"RouteRIB",
-		"RoutingMessage",
-		"RtAttr",
-		"RtGenmsg",
-		"RtMetrics",
-		"RtMsg",
-		"RtMsghdr",
-		"RtNexthop",
-		"Rusage",
-		"SCM_BINTIME",
-		"SCM_CREDENTIALS",
-		"SCM_CREDS",
-		"SCM_RIGHTS",
-		"SCM_TIMESTAMP",
-		"SCM_TIMESTAMPING",
-		"SCM_TIMESTAMPNS",
-		"SCM_TIMESTAMP_MONOTONIC",
-		"SHUT_RD",
-		"SHUT_RDWR",
-		"SHUT_WR",
-		"SID",
-		"SIDAndAttributes",
-		"SIGABRT",
-		"SIGALRM",
-		"SIGBUS",
-		"SIGCHLD",
-		"SIGCLD",
-		"SIGCONT",
-		"SIGEMT",
-		"SIGFPE",
-		"SIGHUP",
-		"SIGILL",
-		"SIGINFO",
-		"SIGINT",
-		"SIGIO",
-		"SIGIOT",
-		"SIGKILL",
-		"SIGLIBRT",
-		"SIGLWP",
-		"SIGPIPE",
-		"SIGPOLL",
-		"SIGPROF",
-		"SIGPWR",
-		"SIGQUIT",
-		"SIGSEGV",
-		"SIGSTKFLT",
-		"SIGSTOP",
-		"SIGSYS",
-		"SIGTERM",
-		"SIGTHR",
-		"SIGTRAP",
-		"SIGTSTP",
-		"SIGTTIN",
-		"SIGTTOU",
-		"SIGUNUSED",
-		"SIGURG",
-		"SIGUSR1",
-		"SIGUSR2",
-		"SIGVTALRM",
-		"SIGWINCH",
-		"SIGXCPU",
-		"SIGXFSZ",
-		"SIOCADDDLCI",
-		"SIOCADDMULTI",
-		"SIOCADDRT",
-		"SIOCAIFADDR",
-		"SIOCAIFGROUP",
-		"SIOCALIFADDR",
-		"SIOCARPIPLL",
-		"SIOCATMARK",
-		"SIOCAUTOADDR",
-		"SIOCAUTONETMASK",
-		"SIOCBRDGADD",
-		"SIOCBRDGADDS",
-		"SIOCBRDGARL",
-		"SIOCBRDGDADDR",
-		"SIOCBRDGDEL",
-		"SIOCBRDGDELS",
-		"SIOCBRDGFLUSH",
-		"SIOCBRDGFRL",
-		"SIOCBRDGGCACHE",
-		"SIOCBRDGGFD",
-		"SIOCBRDGGHT",
-		"SIOCBRDGGIFFLGS",
-		"SIOCBRDGGMA",
-		"SIOCBRDGGPARAM",
-		"SIOCBRDGGPRI",
-		"SIOCBRDGGRL",
-		"SIOCBRDGGSIFS",
-		"SIOCBRDGGTO",
-		"SIOCBRDGIFS",
-		"SIOCBRDGRTS",
-		"SIOCBRDGSADDR",
-		"SIOCBRDGSCACHE",
-		"SIOCBRDGSFD",
-		"SIOCBRDGSHT",
-		"SIOCBRDGSIFCOST",
-		"SIOCBRDGSIFFLGS",
-		"SIOCBRDGSIFPRIO",
-		"SIOCBRDGSMA",
-		"SIOCBRDGSPRI",
-		"SIOCBRDGSPROTO",
-		"SIOCBRDGSTO",
-		"SIOCBRDGSTXHC",
-		"SIOCDARP",
-		"SIOCDELDLCI",
-		"SIOCDELMULTI",
-		"SIOCDELRT",
-		"SIOCDEVPRIVATE",
-		"SIOCDIFADDR",
-		"SIOCDIFGROUP",
-		"SIOCDIFPHYADDR",
-		"SIOCDLIFADDR",
-		"SIOCDRARP",
-		"SIOCGARP",
-		"SIOCGDRVSPEC",
-		"SIOCGETKALIVE",
-		"SIOCGETLABEL",
-		"SIOCGETPFLOW",
-		"SIOCGETPFSYNC",
-		"SIOCGETSGCNT",
-		"SIOCGETVIFCNT",
-		"SIOCGETVLAN",
-		"SIOCGHIWAT",
-		"SIOCGIFADDR",
-		"SIOCGIFADDRPREF",
-		"SIOCGIFALIAS",
-		"SIOCGIFALTMTU",
-		"SIOCGIFASYNCMAP",
-		"SIOCGIFBOND",
-		"SIOCGIFBR",
-		"SIOCGIFBRDADDR",
-		"SIOCGIFCAP",
-		"SIOCGIFCONF",
-		"SIOCGIFCOUNT",
-		"SIOCGIFDATA",
-		"SIOCGIFDESCR",
-		"SIOCGIFDEVMTU",
-		"SIOCGIFDLT",
-		"SIOCGIFDSTADDR",
-		"SIOCGIFENCAP",
-		"SIOCGIFFIB",
-		"SIOCGIFFLAGS",
-		"SIOCGIFGATTR",
-		"SIOCGIFGENERIC",
-		"SIOCGIFGMEMB",
-		"SIOCGIFGROUP",
-		"SIOCGIFHARDMTU",
-		"SIOCGIFHWADDR",
-		"SIOCGIFINDEX",
-		"SIOCGIFKPI",
-		"SIOCGIFMAC",
-		"SIOCGIFMAP",
-		"SIOCGIFMEDIA",
-		"SIOCGIFMEM",
-		"SIOCGIFMETRIC",
-		"SIOCGIFMTU",
-		"SIOCGIFNAME",
-		"SIOCGIFNETMASK",
-		"SIOCGIFPDSTADDR",
-		"SIOCGIFPFLAGS",
-		"SIOCGIFPHYS",
-		"SIOCGIFPRIORITY",
-		"SIOCGIFPSRCADDR",
-		"SIOCGIFRDOMAIN",
-		"SIOCGIFRTLABEL",
-		"SIOCGIFSLAVE",
-		"SIOCGIFSTATUS",
-		"SIOCGIFTIMESLOT",
-		"SIOCGIFTXQLEN",
-		"SIOCGIFVLAN",
-		"SIOCGIFWAKEFLAGS",
-		"SIOCGIFXFLAGS",
-		"SIOCGLIFADDR",
-		"SIOCGLIFPHYADDR",
-		"SIOCGLIFPHYRTABLE",
-		"SIOCGLIFPHYTTL",
-		"SIOCGLINKSTR",
-		"SIOCGLOWAT",
-		"SIOCGPGRP",
-		"SIOCGPRIVATE_0",
-		"SIOCGPRIVATE_1",
-		"SIOCGRARP",
-		"SIOCGSPPPPARAMS",
-		"SIOCGSTAMP",
-		"SIOCGSTAMPNS",
-		"SIOCGVH",
-		"SIOCGVNETID",
-		"SIOCIFCREATE",
-		"SIOCIFCREATE2",
-		"SIOCIFDESTROY",
-		"SIOCIFGCLONERS",
-		"SIOCINITIFADDR",
-		"SIOCPROTOPRIVATE",
-		"SIOCRSLVMULTI",
-		"SIOCRTMSG",
-		"SIOCSARP",
-		"SIOCSDRVSPEC",
-		"SIOCSETKALIVE",
-		"SIOCSETLABEL",
-		"SIOCSETPFLOW",
-		"SIOCSETPFSYNC",
-		"SIOCSETVLAN",
-		"SIOCSHIWAT",
-		"SIOCSIFADDR",
-		"SIOCSIFADDRPREF",
-		"SIOCSIFALTMTU",
-		"SIOCSIFASYNCMAP",
-		"SIOCSIFBOND",
-		"SIOCSIFBR",
-		"SIOCSIFBRDADDR",
-		"SIOCSIFCAP",
-		"SIOCSIFDESCR",
-		"SIOCSIFDSTADDR",
-		"SIOCSIFENCAP",
-		"SIOCSIFFIB",
-		"SIOCSIFFLAGS",
-		"SIOCSIFGATTR",
-		"SIOCSIFGENERIC",
-		"SIOCSIFHWADDR",
-		"SIOCSIFHWBROADCAST",
-		"SIOCSIFKPI",
-		"SIOCSIFLINK",
-		"SIOCSIFLLADDR",
-		"SIOCSIFMAC",
-		"SIOCSIFMAP",
-		"SIOCSIFMEDIA",
-		"SIOCSIFMEM",
-		"SIOCSIFMETRIC",
-		"SIOCSIFMTU",
-		"SIOCSIFNAME",
-		"SIOCSIFNETMASK",
-		"SIOCSIFPFLAGS",
-		"SIOCSIFPHYADDR",
-		"SIOCSIFPHYS",
-		"SIOCSIFPRIORITY",
-		"SIOCSIFRDOMAIN",
-		"SIOCSIFRTLABEL",
-		"SIOCSIFRVNET",
-		"SIOCSIFSLAVE",
-		"SIOCSIFTIMESLOT",
-		"SIOCSIFTXQLEN",
-		"SIOCSIFVLAN",
-		"SIOCSIFVNET",
-		"SIOCSIFXFLAGS",
-		"SIOCSLIFPHYADDR",
-		"SIOCSLIFPHYRTABLE",
-		"SIOCSLIFPHYTTL",
-		"SIOCSLINKSTR",
-		"SIOCSLOWAT",
-		"SIOCSPGRP",
-		"SIOCSRARP",
-		"SIOCSSPPPPARAMS",
-		"SIOCSVH",
-		"SIOCSVNETID",
-		"SIOCZIFDATA",
-		"SIO_GET_EXTENSION_FUNCTION_POINTER",
-		"SIO_GET_INTERFACE_LIST",
-		"SIO_KEEPALIVE_VALS",
-		"SIO_UDP_CONNRESET",
-		"SOCK_CLOEXEC",
-		"SOCK_DCCP",
-		"SOCK_DGRAM",
-		"SOCK_FLAGS_MASK",
-		"SOCK_MAXADDRLEN",
-		"SOCK_NONBLOCK",
-		"SOCK_NOSIGPIPE",
-		"SOCK_PACKET",
-		"SOCK_RAW",
-		"SOCK_RDM",
-		"SOCK_SEQPACKET",
-		"SOCK_STREAM",
-		"SOL_AAL",
-		"SOL_ATM",
-		"SOL_DECNET",
-		"SOL_ICMPV6",
-		"SOL_IP",
-		"SOL_IPV6",
-		"SOL_IRDA",
-		"SOL_PACKET",
-		"SOL_RAW",
-		"SOL_SOCKET",
-		"SOL_TCP",
-		"SOL_X25",
-		"SOMAXCONN",
-		"SO_ACCEPTCONN",
-		"SO_ACCEPTFILTER",
-		"SO_ATTACH_FILTER",
-		"SO_BINDANY",
-		"SO_BINDTODEVICE",
-		"SO_BINTIME",
-		"SO_BROADCAST",
-		"SO_BSDCOMPAT",
-		"SO_DEBUG",
-		"SO_DETACH_FILTER",
-		"SO_DOMAIN",
-		"SO_DONTROUTE",
-		"SO_DONTTRUNC",
-		"SO_ERROR",
-		"SO_KEEPALIVE",
-		"SO_LABEL",
-		"SO_LINGER",
-		"SO_LINGER_SEC",
-		"SO_LISTENINCQLEN",
-		"SO_LISTENQLEN",
-		"SO_LISTENQLIMIT",
-		"SO_MARK",
-		"SO_NETPROC",
-		"SO_NKE",
-		"SO_NOADDRERR",
-		"SO_NOHEADER",
-		"SO_NOSIGPIPE",
-		"SO_NOTIFYCONFLICT",
-		"SO_NO_CHECK",
-		"SO_NO_DDP",
-		"SO_NO_OFFLOAD",
-		"SO_NP_EXTENSIONS",
-		"SO_NREAD",
-		"SO_NUMRCVPKT",
-		"SO_NWRITE",
-		"SO_OOBINLINE",
-		"SO_OVERFLOWED",
-		"SO_PASSCRED",
-		"SO_PASSSEC",
-		"SO_PEERCRED",
-		"SO_PEERLABEL",
-		"SO_PEERNAME",
-		"SO_PEERSEC",
-		"SO_PRIORITY",
-		"SO_PROTOCOL",
-		"SO_PROTOTYPE",
-		"SO_RANDOMPORT",
-		"SO_RCVBUF",
-		"SO_RCVBUFFORCE",
-		"SO_RCVLOWAT",
-		"SO_RCVTIMEO",
-		"SO_RESTRICTIONS",
-		"SO_RESTRICT_DENYIN",
-		"SO_RESTRICT_DENYOUT",
-		"SO_RESTRICT_DENYSET",
-		"SO_REUSEADDR",
-		"SO_REUSEPORT",
-		"SO_REUSESHAREUID",
-		"SO_RTABLE",
-		"SO_RXQ_OVFL",
-		"SO_SECURITY_AUTHENTICATION",
-		"SO_SECURITY_ENCRYPTION_NETWORK",
-		"SO_SECURITY_ENCRYPTION_TRANSPORT",
-		"SO_SETFIB",
-		"SO_SNDBUF",
-		"SO_SNDBUFFORCE",
-		"SO_SNDLOWAT",
-		"SO_SNDTIMEO",
-		"SO_SPLICE",
-		"SO_TIMESTAMP",
-		"SO_TIMESTAMPING",
-		"SO_TIMESTAMPNS",
-		"SO_TIMESTAMP_MONOTONIC",
-		"SO_TYPE",
-		"SO_UPCALLCLOSEWAIT",
-		"SO_UPDATE_ACCEPT_CONTEXT",
-		"SO_UPDATE_CONNECT_CONTEXT",
-		"SO_USELOOPBACK",
-		"SO_USER_COOKIE",
-		"SO_VENDOR",
-		"SO_WANTMORE",
-		"SO_WANTOOBFLAG",
-		"SSLExtraCertChainPolicyPara",
-		"STANDARD_RIGHTS_ALL",
-		"STANDARD_RIGHTS_EXECUTE",
-		"STANDARD_RIGHTS_READ",
-		"STANDARD_RIGHTS_REQUIRED",
-		"STANDARD_RIGHTS_WRITE",
-		"STARTF_USESHOWWINDOW",
-		"STARTF_USESTDHANDLES",
-		"STD_ERROR_HANDLE",
-		"STD_INPUT_HANDLE",
-		"STD_OUTPUT_HANDLE",
-		"SUBLANG_ENGLISH_US",
-		"SW_FORCEMINIMIZE",
-		"SW_HIDE",
-		"SW_MAXIMIZE",
-		"SW_MINIMIZE",
-		"SW_NORMAL",
-		"SW_RESTORE",
-		"SW_SHOW",
-		"SW_SHOWDEFAULT",
-		"SW_SHOWMAXIMIZED",
-		"SW_SHOWMINIMIZED",
-		"SW_SHOWMINNOACTIVE",
-		"SW_SHOWNA",
-		"SW_SHOWNOACTIVATE",
-		"SW_SHOWNORMAL",
-		"SYMBOLIC_LINK_FLAG_DIRECTORY",
-		"SYNCHRONIZE",
-		"SYSCTL_VERSION",
-		"SYSCTL_VERS_0",
-		"SYSCTL_VERS_1",
-		"SYSCTL_VERS_MASK",
-		"SYS_ABORT2",
-		"SYS_ACCEPT",
-		"SYS_ACCEPT4",
-		"SYS_ACCEPT_NOCANCEL",
-		"SYS_ACCESS",
-		"SYS_ACCESS_EXTENDED",
-		"SYS_ACCT",
-		"SYS_ADD_KEY",
-		"SYS_ADD_PROFIL",
-		"SYS_ADJFREQ",
-		"SYS_ADJTIME",
-		"SYS_ADJTIMEX",
-		"SYS_AFS_SYSCALL",
-		"SYS_AIO_CANCEL",
-		"SYS_AIO_ERROR",
-		"SYS_AIO_FSYNC",
-		"SYS_AIO_MLOCK",
-		"SYS_AIO_READ",
-		"SYS_AIO_RETURN",
-		"SYS_AIO_SUSPEND",
-		"SYS_AIO_SUSPEND_NOCANCEL",
-		"SYS_AIO_WAITCOMPLETE",
-		"SYS_AIO_WRITE",
-		"SYS_ALARM",
-		"SYS_ARCH_PRCTL",
-		"SYS_ARM_FADVISE64_64",
-		"SYS_ARM_SYNC_FILE_RANGE",
-		"SYS_ATGETMSG",
-		"SYS_ATPGETREQ",
-		"SYS_ATPGETRSP",
-		"SYS_ATPSNDREQ",
-		"SYS_ATPSNDRSP",
-		"SYS_ATPUTMSG",
-		"SYS_ATSOCKET",
-		"SYS_AUDIT",
-		"SYS_AUDITCTL",
-		"SYS_AUDITON",
-		"SYS_AUDIT_SESSION_JOIN",
-		"SYS_AUDIT_SESSION_PORT",
-		"SYS_AUDIT_SESSION_SELF",
-		"SYS_BDFLUSH",
-		"SYS_BIND",
-		"SYS_BINDAT",
-		"SYS_BREAK",
-		"SYS_BRK",
-		"SYS_BSDTHREAD_CREATE",
-		"SYS_BSDTHREAD_REGISTER",
-		"SYS_BSDTHREAD_TERMINATE",
-		"SYS_CAPGET",
-		"SYS_CAPSET",
-		"SYS_CAP_ENTER",
-		"SYS_CAP_FCNTLS_GET",
-		"SYS_CAP_FCNTLS_LIMIT",
-		"SYS_CAP_GETMODE",
-		"SYS_CAP_GETRIGHTS",
-		"SYS_CAP_IOCTLS_GET",
-		"SYS_CAP_IOCTLS_LIMIT",
-		"SYS_CAP_NEW",
-		"SYS_CAP_RIGHTS_GET",
-		"SYS_CAP_RIGHTS_LIMIT",
-		"SYS_CHDIR",
-		"SYS_CHFLAGS",
-		"SYS_CHFLAGSAT",
-		"SYS_CHMOD",
-		"SYS_CHMOD_EXTENDED",
-		"SYS_CHOWN",
-		"SYS_CHOWN32",
-		"SYS_CHROOT",
-		"SYS_CHUD",
-		"SYS_CLOCK_ADJTIME",
-		"SYS_CLOCK_GETCPUCLOCKID2",
-		"SYS_CLOCK_GETRES",
-		"SYS_CLOCK_GETTIME",
-		"SYS_CLOCK_NANOSLEEP",
-		"SYS_CLOCK_SETTIME",
-		"SYS_CLONE",
-		"SYS_CLOSE",
-		"SYS_CLOSEFROM",
-		"SYS_CLOSE_NOCANCEL",
-		"SYS_CONNECT",
-		"SYS_CONNECTAT",
-		"SYS_CONNECT_NOCANCEL",
-		"SYS_COPYFILE",
-		"SYS_CPUSET",
-		"SYS_CPUSET_GETAFFINITY",
-		"SYS_CPUSET_GETID",
-		"SYS_CPUSET_SETAFFINITY",
-		"SYS_CPUSET_SETID",
-		"SYS_CREAT",
-		"SYS_CREATE_MODULE",
-		"SYS_CSOPS",
-		"SYS_CSOPS_AUDITTOKEN",
-		"SYS_DELETE",
-		"SYS_DELETE_MODULE",
-		"SYS_DUP",
-		"SYS_DUP2",
-		"SYS_DUP3",
-		"SYS_EACCESS",
-		"SYS_EPOLL_CREATE",
-		"SYS_EPOLL_CREATE1",
-		"SYS_EPOLL_CTL",
-		"SYS_EPOLL_CTL_OLD",
-		"SYS_EPOLL_PWAIT",
-		"SYS_EPOLL_WAIT",
-		"SYS_EPOLL_WAIT_OLD",
-		"SYS_EVENTFD",
-		"SYS_EVENTFD2",
-		"SYS_EXCHANGEDATA",
-		"SYS_EXECVE",
-		"SYS_EXIT",
-		"SYS_EXIT_GROUP",
-		"SYS_EXTATTRCTL",
-		"SYS_EXTATTR_DELETE_FD",
-		"SYS_EXTATTR_DELETE_FILE",
-		"SYS_EXTATTR_DELETE_LINK",
-		"SYS_EXTATTR_GET_FD",
-		"SYS_EXTATTR_GET_FILE",
-		"SYS_EXTATTR_GET_LINK",
-		"SYS_EXTATTR_LIST_FD",
-		"SYS_EXTATTR_LIST_FILE",
-		"SYS_EXTATTR_LIST_LINK",
-		"SYS_EXTATTR_SET_FD",
-		"SYS_EXTATTR_SET_FILE",
-		"SYS_EXTATTR_SET_LINK",
-		"SYS_FACCESSAT",
-		"SYS_FADVISE64",
-		"SYS_FADVISE64_64",
-		"SYS_FALLOCATE",
-		"SYS_FANOTIFY_INIT",
-		"SYS_FANOTIFY_MARK",
-		"SYS_FCHDIR",
-		"SYS_FCHFLAGS",
-		"SYS_FCHMOD",
-		"SYS_FCHMODAT",
-		"SYS_FCHMOD_EXTENDED",
-		"SYS_FCHOWN",
-		"SYS_FCHOWN32",
-		"SYS_FCHOWNAT",
-		"SYS_FCHROOT",
-		"SYS_FCNTL",
-		"SYS_FCNTL64",
-		"SYS_FCNTL_NOCANCEL",
-		"SYS_FDATASYNC",
-		"SYS_FEXECVE",
-		"SYS_FFCLOCK_GETCOUNTER",
-		"SYS_FFCLOCK_GETESTIMATE",
-		"SYS_FFCLOCK_SETESTIMATE",
-		"SYS_FFSCTL",
-		"SYS_FGETATTRLIST",
-		"SYS_FGETXATTR",
-		"SYS_FHOPEN",
-		"SYS_FHSTAT",
-		"SYS_FHSTATFS",
-		"SYS_FILEPORT_MAKEFD",
-		"SYS_FILEPORT_MAKEPORT",
-		"SYS_FKTRACE",
-		"SYS_FLISTXATTR",
-		"SYS_FLOCK",
-		"SYS_FORK",
-		"SYS_FPATHCONF",
-		"SYS_FREEBSD6_FTRUNCATE",
-		"SYS_FREEBSD6_LSEEK",
-		"SYS_FREEBSD6_MMAP",
-		"SYS_FREEBSD6_PREAD",
-		"SYS_FREEBSD6_PWRITE",
-		"SYS_FREEBSD6_TRUNCATE",
-		"SYS_FREMOVEXATTR",
-		"SYS_FSCTL",
-		"SYS_FSETATTRLIST",
-		"SYS_FSETXATTR",
-		"SYS_FSGETPATH",
-		"SYS_FSTAT",
-		"SYS_FSTAT64",
-		"SYS_FSTAT64_EXTENDED",
-		"SYS_FSTATAT",
-		"SYS_FSTATAT64",
-		"SYS_FSTATFS",
-		"SYS_FSTATFS64",
-		"SYS_FSTATV",
-		"SYS_FSTATVFS1",
-		"SYS_FSTAT_EXTENDED",
-		"SYS_FSYNC",
-		"SYS_FSYNC_NOCANCEL",
-		"SYS_FSYNC_RANGE",
-		"SYS_FTIME",
-		"SYS_FTRUNCATE",
-		"SYS_FTRUNCATE64",
-		"SYS_FUTEX",
-		"SYS_FUTIMENS",
-		"SYS_FUTIMES",
-		"SYS_FUTIMESAT",
-		"SYS_GETATTRLIST",
-		"SYS_GETAUDIT",
-		"SYS_GETAUDIT_ADDR",
-		"SYS_GETAUID",
-		"SYS_GETCONTEXT",
-		"SYS_GETCPU",
-		"SYS_GETCWD",
-		"SYS_GETDENTS",
-		"SYS_GETDENTS64",
-		"SYS_GETDIRENTRIES",
-		"SYS_GETDIRENTRIES64",
-		"SYS_GETDIRENTRIESATTR",
-		"SYS_GETDTABLECOUNT",
-		"SYS_GETDTABLESIZE",
-		"SYS_GETEGID",
-		"SYS_GETEGID32",
-		"SYS_GETEUID",
-		"SYS_GETEUID32",
-		"SYS_GETFH",
-		"SYS_GETFSSTAT",
-		"SYS_GETFSSTAT64",
-		"SYS_GETGID",
-		"SYS_GETGID32",
-		"SYS_GETGROUPS",
-		"SYS_GETGROUPS32",
-		"SYS_GETHOSTUUID",
-		"SYS_GETITIMER",
-		"SYS_GETLCID",
-		"SYS_GETLOGIN",
-		"SYS_GETLOGINCLASS",
-		"SYS_GETPEERNAME",
-		"SYS_GETPGID",
-		"SYS_GETPGRP",
-		"SYS_GETPID",
-		"SYS_GETPMSG",
-		"SYS_GETPPID",
-		"SYS_GETPRIORITY",
-		"SYS_GETRESGID",
-		"SYS_GETRESGID32",
-		"SYS_GETRESUID",
-		"SYS_GETRESUID32",
-		"SYS_GETRLIMIT",
-		"SYS_GETRTABLE",
-		"SYS_GETRUSAGE",
-		"SYS_GETSGROUPS",
-		"SYS_GETSID",
-		"SYS_GETSOCKNAME",
-		"SYS_GETSOCKOPT",
-		"SYS_GETTHRID",
-		"SYS_GETTID",
-		"SYS_GETTIMEOFDAY",
-		"SYS_GETUID",
-		"SYS_GETUID32",
-		"SYS_GETVFSSTAT",
-		"SYS_GETWGROUPS",
-		"SYS_GETXATTR",
-		"SYS_GET_KERNEL_SYMS",
-		"SYS_GET_MEMPOLICY",
-		"SYS_GET_ROBUST_LIST",
-		"SYS_GET_THREAD_AREA",
-		"SYS_GSSD_SYSCALL",
-		"SYS_GTTY",
-		"SYS_IDENTITYSVC",
-		"SYS_IDLE",
-		"SYS_INITGROUPS",
-		"SYS_INIT_MODULE",
-		"SYS_INOTIFY_ADD_WATCH",
-		"SYS_INOTIFY_INIT",
-		"SYS_INOTIFY_INIT1",
-		"SYS_INOTIFY_RM_WATCH",
-		"SYS_IOCTL",
-		"SYS_IOPERM",
-		"SYS_IOPL",
-		"SYS_IOPOLICYSYS",
-		"SYS_IOPRIO_GET",
-		"SYS_IOPRIO_SET",
-		"SYS_IO_CANCEL",
-		"SYS_IO_DESTROY",
-		"SYS_IO_GETEVENTS",
-		"SYS_IO_SETUP",
-		"SYS_IO_SUBMIT",
-		"SYS_IPC",
-		"SYS_ISSETUGID",
-		"SYS_JAIL",
-		"SYS_JAIL_ATTACH",
-		"SYS_JAIL_GET",
-		"SYS_JAIL_REMOVE",
-		"SYS_JAIL_SET",
-		"SYS_KAS_INFO",
-		"SYS_KDEBUG_TRACE",
-		"SYS_KENV",
-		"SYS_KEVENT",
-		"SYS_KEVENT64",
-		"SYS_KEXEC_LOAD",
-		"SYS_KEYCTL",
-		"SYS_KILL",
-		"SYS_KLDFIND",
-		"SYS_KLDFIRSTMOD",
-		"SYS_KLDLOAD",
-		"SYS_KLDNEXT",
-		"SYS_KLDSTAT",
-		"SYS_KLDSYM",
-		"SYS_KLDUNLOAD",
-		"SYS_KLDUNLOADF",
-		"SYS_KMQ_NOTIFY",
-		"SYS_KMQ_OPEN",
-		"SYS_KMQ_SETATTR",
-		"SYS_KMQ_TIMEDRECEIVE",
-		"SYS_KMQ_TIMEDSEND",
-		"SYS_KMQ_UNLINK",
-		"SYS_KQUEUE",
-		"SYS_KQUEUE1",
-		"SYS_KSEM_CLOSE",
-		"SYS_KSEM_DESTROY",
-		"SYS_KSEM_GETVALUE",
-		"SYS_KSEM_INIT",
-		"SYS_KSEM_OPEN",
-		"SYS_KSEM_POST",
-		"SYS_KSEM_TIMEDWAIT",
-		"SYS_KSEM_TRYWAIT",
-		"SYS_KSEM_UNLINK",
-		"SYS_KSEM_WAIT",
-		"SYS_KTIMER_CREATE",
-		"SYS_KTIMER_DELETE",
-		"SYS_KTIMER_GETOVERRUN",
-		"SYS_KTIMER_GETTIME",
-		"SYS_KTIMER_SETTIME",
-		"SYS_KTRACE",
-		"SYS_LCHFLAGS",
-		"SYS_LCHMOD",
-		"SYS_LCHOWN",
-		"SYS_LCHOWN32",
-		"SYS_LEDGER",
-		"SYS_LGETFH",
-		"SYS_LGETXATTR",
-		"SYS_LINK",
-		"SYS_LINKAT",
-		"SYS_LIO_LISTIO",
-		"SYS_LISTEN",
-		"SYS_LISTXATTR",
-		"SYS_LLISTXATTR",
-		"SYS_LOCK",
-		"SYS_LOOKUP_DCOOKIE",
-		"SYS_LPATHCONF",
-		"SYS_LREMOVEXATTR",
-		"SYS_LSEEK",
-		"SYS_LSETXATTR",
-		"SYS_LSTAT",
-		"SYS_LSTAT64",
-		"SYS_LSTAT64_EXTENDED",
-		"SYS_LSTATV",
-		"SYS_LSTAT_EXTENDED",
-		"SYS_LUTIMES",
-		"SYS_MAC_SYSCALL",
-		"SYS_MADVISE",
-		"SYS_MADVISE1",
-		"SYS_MAXSYSCALL",
-		"SYS_MBIND",
-		"SYS_MIGRATE_PAGES",
-		"SYS_MINCORE",
-		"SYS_MINHERIT",
-		"SYS_MKCOMPLEX",
-		"SYS_MKDIR",
-		"SYS_MKDIRAT",
-		"SYS_MKDIR_EXTENDED",
-		"SYS_MKFIFO",
-		"SYS_MKFIFOAT",
-		"SYS_MKFIFO_EXTENDED",
-		"SYS_MKNOD",
-		"SYS_MKNODAT",
-		"SYS_MLOCK",
-		"SYS_MLOCKALL",
-		"SYS_MMAP",
-		"SYS_MMAP2",
-		"SYS_MODCTL",
-		"SYS_MODFIND",
-		"SYS_MODFNEXT",
-		"SYS_MODIFY_LDT",
-		"SYS_MODNEXT",
-		"SYS_MODSTAT",
-		"SYS_MODWATCH",
-		"SYS_MOUNT",
-		"SYS_MOVE_PAGES",
-		"SYS_MPROTECT",
-		"SYS_MPX",
-		"SYS_MQUERY",
-		"SYS_MQ_GETSETATTR",
-		"SYS_MQ_NOTIFY",
-		"SYS_MQ_OPEN",
-		"SYS_MQ_TIMEDRECEIVE",
-		"SYS_MQ_TIMEDSEND",
-		"SYS_MQ_UNLINK",
-		"SYS_MREMAP",
-		"SYS_MSGCTL",
-		"SYS_MSGGET",
-		"SYS_MSGRCV",
-		"SYS_MSGRCV_NOCANCEL",
-		"SYS_MSGSND",
-		"SYS_MSGSND_NOCANCEL",
-		"SYS_MSGSYS",
-		"SYS_MSYNC",
-		"SYS_MSYNC_NOCANCEL",
-		"SYS_MUNLOCK",
-		"SYS_MUNLOCKALL",
-		"SYS_MUNMAP",
-		"SYS_NAME_TO_HANDLE_AT",
-		"SYS_NANOSLEEP",
-		"SYS_NEWFSTATAT",
-		"SYS_NFSCLNT",
-		"SYS_NFSSERVCTL",
-		"SYS_NFSSVC",
-		"SYS_NFSTAT",
-		"SYS_NICE",
-		"SYS_NLM_SYSCALL",
-		"SYS_NLSTAT",
-		"SYS_NMOUNT",
-		"SYS_NSTAT",
-		"SYS_NTP_ADJTIME",
-		"SYS_NTP_GETTIME",
-		"SYS_NUMA_GETAFFINITY",
-		"SYS_NUMA_SETAFFINITY",
-		"SYS_OABI_SYSCALL_BASE",
-		"SYS_OBREAK",
-		"SYS_OLDFSTAT",
-		"SYS_OLDLSTAT",
-		"SYS_OLDOLDUNAME",
-		"SYS_OLDSTAT",
-		"SYS_OLDUNAME",
-		"SYS_OPEN",
-		"SYS_OPENAT",
-		"SYS_OPENBSD_POLL",
-		"SYS_OPEN_BY_HANDLE_AT",
-		"SYS_OPEN_DPROTECTED_NP",
-		"SYS_OPEN_EXTENDED",
-		"SYS_OPEN_NOCANCEL",
-		"SYS_OVADVISE",
-		"SYS_PACCEPT",
-		"SYS_PATHCONF",
-		"SYS_PAUSE",
-		"SYS_PCICONFIG_IOBASE",
-		"SYS_PCICONFIG_READ",
-		"SYS_PCICONFIG_WRITE",
-		"SYS_PDFORK",
-		"SYS_PDGETPID",
-		"SYS_PDKILL",
-		"SYS_PERF_EVENT_OPEN",
-		"SYS_PERSONALITY",
-		"SYS_PID_HIBERNATE",
-		"SYS_PID_RESUME",
-		"SYS_PID_SHUTDOWN_SOCKETS",
-		"SYS_PID_SUSPEND",
-		"SYS_PIPE",
-		"SYS_PIPE2",
-		"SYS_PIVOT_ROOT",
-		"SYS_PMC_CONTROL",
-		"SYS_PMC_GET_INFO",
-		"SYS_POLL",
-		"SYS_POLLTS",
-		"SYS_POLL_NOCANCEL",
-		"SYS_POSIX_FADVISE",
-		"SYS_POSIX_FALLOCATE",
-		"SYS_POSIX_OPENPT",
-		"SYS_POSIX_SPAWN",
-		"SYS_PPOLL",
-		"SYS_PRCTL",
-		"SYS_PREAD",
-		"SYS_PREAD64",
-		"SYS_PREADV",
-		"SYS_PREAD_NOCANCEL",
-		"SYS_PRLIMIT64",
-		"SYS_PROCCTL",
-		"SYS_PROCESS_POLICY",
-		"SYS_PROCESS_VM_READV",
-		"SYS_PROCESS_VM_WRITEV",
-		"SYS_PROC_INFO",
-		"SYS_PROF",
-		"SYS_PROFIL",
-		"SYS_PSELECT",
-		"SYS_PSELECT6",
-		"SYS_PSET_ASSIGN",
-		"SYS_PSET_CREATE",
-		"SYS_PSET_DESTROY",
-		"SYS_PSYNCH_CVBROAD",
-		"SYS_PSYNCH_CVCLRPREPOST",
-		"SYS_PSYNCH_CVSIGNAL",
-		"SYS_PSYNCH_CVWAIT",
-		"SYS_PSYNCH_MUTEXDROP",
-		"SYS_PSYNCH_MUTEXWAIT",
-		"SYS_PSYNCH_RW_DOWNGRADE",
-		"SYS_PSYNCH_RW_LONGRDLOCK",
-		"SYS_PSYNCH_RW_RDLOCK",
-		"SYS_PSYNCH_RW_UNLOCK",
-		"SYS_PSYNCH_RW_UNLOCK2",
-		"SYS_PSYNCH_RW_UPGRADE",
-		"SYS_PSYNCH_RW_WRLOCK",
-		"SYS_PSYNCH_RW_YIELDWRLOCK",
-		"SYS_PTRACE",
-		"SYS_PUTPMSG",
-		"SYS_PWRITE",
-		"SYS_PWRITE64",
-		"SYS_PWRITEV",
-		"SYS_PWRITE_NOCANCEL",
-		"SYS_QUERY_MODULE",
-		"SYS_QUOTACTL",
-		"SYS_RASCTL",
-		"SYS_RCTL_ADD_RULE",
-		"SYS_RCTL_GET_LIMITS",
-		"SYS_RCTL_GET_RACCT",
-		"SYS_RCTL_GET_RULES",
-		"SYS_RCTL_REMOVE_RULE",
-		"SYS_READ",
-		"SYS_READAHEAD",
-		"SYS_READDIR",
-		"SYS_READLINK",
-		"SYS_READLINKAT",
-		"SYS_READV",
-		"SYS_READV_NOCANCEL",
-		"SYS_READ_NOCANCEL",
-		"SYS_REBOOT",
-		"SYS_RECV",
-		"SYS_RECVFROM",
-		"SYS_RECVFROM_NOCANCEL",
-		"SYS_RECVMMSG",
-		"SYS_RECVMSG",
-		"SYS_RECVMSG_NOCANCEL",
-		"SYS_REMAP_FILE_PAGES",
-		"SYS_REMOVEXATTR",
-		"SYS_RENAME",
-		"SYS_RENAMEAT",
-		"SYS_REQUEST_KEY",
-		"SYS_RESTART_SYSCALL",
-		"SYS_REVOKE",
-		"SYS_RFORK",
-		"SYS_RMDIR",
-		"SYS_RTPRIO",
-		"SYS_RTPRIO_THREAD",
-		"SYS_RT_SIGACTION",
-		"SYS_RT_SIGPENDING",
-		"SYS_RT_SIGPROCMASK",
-		"SYS_RT_SIGQUEUEINFO",
-		"SYS_RT_SIGRETURN",
-		"SYS_RT_SIGSUSPEND",
-		"SYS_RT_SIGTIMEDWAIT",
-		"SYS_RT_TGSIGQUEUEINFO",
-		"SYS_SBRK",
-		"SYS_SCHED_GETAFFINITY",
-		"SYS_SCHED_GETPARAM",
-		"SYS_SCHED_GETSCHEDULER",
-		"SYS_SCHED_GET_PRIORITY_MAX",
-		"SYS_SCHED_GET_PRIORITY_MIN",
-		"SYS_SCHED_RR_GET_INTERVAL",
-		"SYS_SCHED_SETAFFINITY",
-		"SYS_SCHED_SETPARAM",
-		"SYS_SCHED_SETSCHEDULER",
-		"SYS_SCHED_YIELD",
-		"SYS_SCTP_GENERIC_RECVMSG",
-		"SYS_SCTP_GENERIC_SENDMSG",
-		"SYS_SCTP_GENERIC_SENDMSG_IOV",
-		"SYS_SCTP_PEELOFF",
-		"SYS_SEARCHFS",
-		"SYS_SECURITY",
-		"SYS_SELECT",
-		"SYS_SELECT_NOCANCEL",
-		"SYS_SEMCONFIG",
-		"SYS_SEMCTL",
-		"SYS_SEMGET",
-		"SYS_SEMOP",
-		"SYS_SEMSYS",
-		"SYS_SEMTIMEDOP",
-		"SYS_SEM_CLOSE",
-		"SYS_SEM_DESTROY",
-		"SYS_SEM_GETVALUE",
-		"SYS_SEM_INIT",
-		"SYS_SEM_OPEN",
-		"SYS_SEM_POST",
-		"SYS_SEM_TRYWAIT",
-		"SYS_SEM_UNLINK",
-		"SYS_SEM_WAIT",
-		"SYS_SEM_WAIT_NOCANCEL",
-		"SYS_SEND",
-		"SYS_SENDFILE",
-		"SYS_SENDFILE64",
-		"SYS_SENDMMSG",
-		"SYS_SENDMSG",
-		"SYS_SENDMSG_NOCANCEL",
-		"SYS_SENDTO",
-		"SYS_SENDTO_NOCANCEL",
-		"SYS_SETATTRLIST",
-		"SYS_SETAUDIT",
-		"SYS_SETAUDIT_ADDR",
-		"SYS_SETAUID",
-		"SYS_SETCONTEXT",
-		"SYS_SETDOMAINNAME",
-		"SYS_SETEGID",
-		"SYS_SETEUID",
-		"SYS_SETFIB",
-		"SYS_SETFSGID",
-		"SYS_SETFSGID32",
-		"SYS_SETFSUID",
-		"SYS_SETFSUID32",
-		"SYS_SETGID",
-		"SYS_SETGID32",
-		"SYS_SETGROUPS",
-		"SYS_SETGROUPS32",
-		"SYS_SETHOSTNAME",
-		"SYS_SETITIMER",
-		"SYS_SETLCID",
-		"SYS_SETLOGIN",
-		"SYS_SETLOGINCLASS",
-		"SYS_SETNS",
-		"SYS_SETPGID",
-		"SYS_SETPRIORITY",
-		"SYS_SETPRIVEXEC",
-		"SYS_SETREGID",
-		"SYS_SETREGID32",
-		"SYS_SETRESGID",
-		"SYS_SETRESGID32",
-		"SYS_SETRESUID",
-		"SYS_SETRESUID32",
-		"SYS_SETREUID",
-		"SYS_SETREUID32",
-		"SYS_SETRLIMIT",
-		"SYS_SETRTABLE",
-		"SYS_SETSGROUPS",
-		"SYS_SETSID",
-		"SYS_SETSOCKOPT",
-		"SYS_SETTID",
-		"SYS_SETTID_WITH_PID",
-		"SYS_SETTIMEOFDAY",
-		"SYS_SETUID",
-		"SYS_SETUID32",
-		"SYS_SETWGROUPS",
-		"SYS_SETXATTR",
-		"SYS_SET_MEMPOLICY",
-		"SYS_SET_ROBUST_LIST",
-		"SYS_SET_THREAD_AREA",
-		"SYS_SET_TID_ADDRESS",
-		"SYS_SGETMASK",
-		"SYS_SHARED_REGION_CHECK_NP",
-		"SYS_SHARED_REGION_MAP_AND_SLIDE_NP",
-		"SYS_SHMAT",
-		"SYS_SHMCTL",
-		"SYS_SHMDT",
-		"SYS_SHMGET",
-		"SYS_SHMSYS",
-		"SYS_SHM_OPEN",
-		"SYS_SHM_UNLINK",
-		"SYS_SHUTDOWN",
-		"SYS_SIGACTION",
-		"SYS_SIGALTSTACK",
-		"SYS_SIGNAL",
-		"SYS_SIGNALFD",
-		"SYS_SIGNALFD4",
-		"SYS_SIGPENDING",
-		"SYS_SIGPROCMASK",
-		"SYS_SIGQUEUE",
-		"SYS_SIGQUEUEINFO",
-		"SYS_SIGRETURN",
-		"SYS_SIGSUSPEND",
-		"SYS_SIGSUSPEND_NOCANCEL",
-		"SYS_SIGTIMEDWAIT",
-		"SYS_SIGWAIT",
-		"SYS_SIGWAITINFO",
-		"SYS_SOCKET",
-		"SYS_SOCKETCALL",
-		"SYS_SOCKETPAIR",
-		"SYS_SPLICE",
-		"SYS_SSETMASK",
-		"SYS_SSTK",
-		"SYS_STACK_SNAPSHOT",
-		"SYS_STAT",
-		"SYS_STAT64",
-		"SYS_STAT64_EXTENDED",
-		"SYS_STATFS",
-		"SYS_STATFS64",
-		"SYS_STATV",
-		"SYS_STATVFS1",
-		"SYS_STAT_EXTENDED",
-		"SYS_STIME",
-		"SYS_STTY",
-		"SYS_SWAPCONTEXT",
-		"SYS_SWAPCTL",
-		"SYS_SWAPOFF",
-		"SYS_SWAPON",
-		"SYS_SYMLINK",
-		"SYS_SYMLINKAT",
-		"SYS_SYNC",
-		"SYS_SYNCFS",
-		"SYS_SYNC_FILE_RANGE",
-		"SYS_SYSARCH",
-		"SYS_SYSCALL",
-		"SYS_SYSCALL_BASE",
-		"SYS_SYSFS",
-		"SYS_SYSINFO",
-		"SYS_SYSLOG",
-		"SYS_TEE",
-		"SYS_TGKILL",
-		"SYS_THREAD_SELFID",
-		"SYS_THR_CREATE",
-		"SYS_THR_EXIT",
-		"SYS_THR_KILL",
-		"SYS_THR_KILL2",
-		"SYS_THR_NEW",
-		"SYS_THR_SELF",
-		"SYS_THR_SET_NAME",
-		"SYS_THR_SUSPEND",
-		"SYS_THR_WAKE",
-		"SYS_TIME",
-		"SYS_TIMERFD_CREATE",
-		"SYS_TIMERFD_GETTIME",
-		"SYS_TIMERFD_SETTIME",
-		"SYS_TIMER_CREATE",
-		"SYS_TIMER_DELETE",
-		"SYS_TIMER_GETOVERRUN",
-		"SYS_TIMER_GETTIME",
-		"SYS_TIMER_SETTIME",
-		"SYS_TIMES",
-		"SYS_TKILL",
-		"SYS_TRUNCATE",
-		"SYS_TRUNCATE64",
-		"SYS_TUXCALL",
-		"SYS_UGETRLIMIT",
-		"SYS_ULIMIT",
-		"SYS_UMASK",
-		"SYS_UMASK_EXTENDED",
-		"SYS_UMOUNT",
-		"SYS_UMOUNT2",
-		"SYS_UNAME",
-		"SYS_UNDELETE",
-		"SYS_UNLINK",
-		"SYS_UNLINKAT",
-		"SYS_UNMOUNT",
-		"SYS_UNSHARE",
-		"SYS_USELIB",
-		"SYS_USTAT",
-		"SYS_UTIME",
-		"SYS_UTIMENSAT",
-		"SYS_UTIMES",
-		"SYS_UTRACE",
-		"SYS_UUIDGEN",
-		"SYS_VADVISE",
-		"SYS_VFORK",
-		"SYS_VHANGUP",
-		"SYS_VM86",
-		"SYS_VM86OLD",
-		"SYS_VMSPLICE",
-		"SYS_VM_PRESSURE_MONITOR",
-		"SYS_VSERVER",
-		"SYS_WAIT4",
-		"SYS_WAIT4_NOCANCEL",
-		"SYS_WAIT6",
-		"SYS_WAITEVENT",
-		"SYS_WAITID",
-		"SYS_WAITID_NOCANCEL",
-		"SYS_WAITPID",
-		"SYS_WATCHEVENT",
-		"SYS_WORKQ_KERNRETURN",
-		"SYS_WORKQ_OPEN",
-		"SYS_WRITE",
-		"SYS_WRITEV",
-		"SYS_WRITEV_NOCANCEL",
-		"SYS_WRITE_NOCANCEL",
-		"SYS_YIELD",
-		"SYS__LLSEEK",
-		"SYS__LWP_CONTINUE",
-		"SYS__LWP_CREATE",
-		"SYS__LWP_CTL",
-		"SYS__LWP_DETACH",
-		"SYS__LWP_EXIT",
-		"SYS__LWP_GETNAME",
-		"SYS__LWP_GETPRIVATE",
-		"SYS__LWP_KILL",
-		"SYS__LWP_PARK",
-		"SYS__LWP_SELF",
-		"SYS__LWP_SETNAME",
-		"SYS__LWP_SETPRIVATE",
-		"SYS__LWP_SUSPEND",
-		"SYS__LWP_UNPARK",
-		"SYS__LWP_UNPARK_ALL",
-		"SYS__LWP_WAIT",
-		"SYS__LWP_WAKEUP",
-		"SYS__NEWSELECT",
-		"SYS__PSET_BIND",
-		"SYS__SCHED_GETAFFINITY",
-		"SYS__SCHED_GETPARAM",
-		"SYS__SCHED_SETAFFINITY",
-		"SYS__SCHED_SETPARAM",
-		"SYS__SYSCTL",
-		"SYS__UMTX_LOCK",
-		"SYS__UMTX_OP",
-		"SYS__UMTX_UNLOCK",
-		"SYS___ACL_ACLCHECK_FD",
-		"SYS___ACL_ACLCHECK_FILE",
-		"SYS___ACL_ACLCHECK_LINK",
-		"SYS___ACL_DELETE_FD",
-		"SYS___ACL_DELETE_FILE",
-		"SYS___ACL_DELETE_LINK",
-		"SYS___ACL_GET_FD",
-		"SYS___ACL_GET_FILE",
-		"SYS___ACL_GET_LINK",
-		"SYS___ACL_SET_FD",
-		"SYS___ACL_SET_FILE",
-		"SYS___ACL_SET_LINK",
-		"SYS___CAP_RIGHTS_GET",
-		"SYS___CLONE",
-		"SYS___DISABLE_THREADSIGNAL",
-		"SYS___GETCWD",
-		"SYS___GETLOGIN",
-		"SYS___GET_TCB",
-		"SYS___MAC_EXECVE",
-		"SYS___MAC_GETFSSTAT",
-		"SYS___MAC_GET_FD",
-		"SYS___MAC_GET_FILE",
-		"SYS___MAC_GET_LCID",
-		"SYS___MAC_GET_LCTX",
-		"SYS___MAC_GET_LINK",
-		"SYS___MAC_GET_MOUNT",
-		"SYS___MAC_GET_PID",
-		"SYS___MAC_GET_PROC",
-		"SYS___MAC_MOUNT",
-		"SYS___MAC_SET_FD",
-		"SYS___MAC_SET_FILE",
-		"SYS___MAC_SET_LCTX",
-		"SYS___MAC_SET_LINK",
-		"SYS___MAC_SET_PROC",
-		"SYS___MAC_SYSCALL",
-		"SYS___OLD_SEMWAIT_SIGNAL",
-		"SYS___OLD_SEMWAIT_SIGNAL_NOCANCEL",
-		"SYS___POSIX_CHOWN",
-		"SYS___POSIX_FCHOWN",
-		"SYS___POSIX_LCHOWN",
-		"SYS___POSIX_RENAME",
-		"SYS___PTHREAD_CANCELED",
-		"SYS___PTHREAD_CHDIR",
-		"SYS___PTHREAD_FCHDIR",
-		"SYS___PTHREAD_KILL",
-		"SYS___PTHREAD_MARKCANCEL",
-		"SYS___PTHREAD_SIGMASK",
-		"SYS___QUOTACTL",
-		"SYS___SEMCTL",
-		"SYS___SEMWAIT_SIGNAL",
-		"SYS___SEMWAIT_SIGNAL_NOCANCEL",
-		"SYS___SETLOGIN",
-		"SYS___SETUGID",
-		"SYS___SET_TCB",
-		"SYS___SIGACTION_SIGTRAMP",
-		"SYS___SIGTIMEDWAIT",
-		"SYS___SIGWAIT",
-		"SYS___SIGWAIT_NOCANCEL",
-		"SYS___SYSCTL",
-		"SYS___TFORK",
-		"SYS___THREXIT",
-		"SYS___THRSIGDIVERT",
-		"SYS___THRSLEEP",
-		"SYS___THRWAKEUP",
-		"S_ARCH1",
-		"S_ARCH2",
-		"S_BLKSIZE",
-		"S_IEXEC",
-		"S_IFBLK",
-		"S_IFCHR",
-		"S_IFDIR",
-		"S_IFIFO",
-		"S_IFLNK",
-		"S_IFMT",
-		"S_IFREG",
-		"S_IFSOCK",
-		"S_IFWHT",
-		"S_IREAD",
-		"S_IRGRP",
-		"S_IROTH",
-		"S_IRUSR",
-		"S_IRWXG",
-		"S_IRWXO",
-		"S_IRWXU",
-		"S_ISGID",
-		"S_ISTXT",
-		"S_ISUID",
-		"S_ISVTX",
-		"S_IWGRP",
-		"S_IWOTH",
-		"S_IWRITE",
-		"S_IWUSR",
-		"S_IXGRP",
-		"S_IXOTH",
-		"S_IXUSR",
-		"S_LOGIN_SET",
-		"SecurityAttributes",
-		"Seek",
-		"Select",
-		"Sendfile",
-		"Sendmsg",
-		"SendmsgN",
-		"Sendto",
-		"Servent",
-		"SetBpf",
-		"SetBpfBuflen",
-		"SetBpfDatalink",
-		"SetBpfHeadercmpl",
-		"SetBpfImmediate",
-		"SetBpfInterface",
-		"SetBpfPromisc",
-		"SetBpfTimeout",
-		"SetCurrentDirectory",
-		"SetEndOfFile",
-		"SetEnvironmentVariable",
-		"SetFileAttributes",
-		"SetFileCompletionNotificationModes",
-		"SetFilePointer",
-		"SetFileTime",
-		"SetHandleInformation",
-		"SetKevent",
-		"SetLsfPromisc",
-		"SetNonblock",
-		"Setdomainname",
-		"Setegid",
-		"Setenv",
-		"Seteuid",
-		"Setfsgid",
-		"Setfsuid",
-		"Setgid",
-		"Setgroups",
-		"Sethostname",
-		"Setlogin",
-		"Setpgid",
-		"Setpriority",
-		"Setprivexec",
-		"Setregid",
-		"Setresgid",
-		"Setresuid",
-		"Setreuid",
-		"Setrlimit",
-		"Setsid",
-		"Setsockopt",
-		"SetsockoptByte",
-		"SetsockoptICMPv6Filter",
-		"SetsockoptIPMreq",
-		"SetsockoptIPMreqn",
-		"SetsockoptIPv6Mreq",
-		"SetsockoptInet4Addr",
-		"SetsockoptInt",
-		"SetsockoptLinger",
-		"SetsockoptString",
-		"SetsockoptTimeval",
-		"Settimeofday",
-		"Setuid",
-		"Setxattr",
-		"Shutdown",
-		"SidTypeAlias",
-		"SidTypeComputer",
-		"SidTypeDeletedAccount",
-		"SidTypeDomain",
-		"SidTypeGroup",
-		"SidTypeInvalid",
-		"SidTypeLabel",
-		"SidTypeUnknown",
-		"SidTypeUser",
-		"SidTypeWellKnownGroup",
-		"Signal",
-		"SizeofBpfHdr",
-		"SizeofBpfInsn",
-		"SizeofBpfProgram",
-		"SizeofBpfStat",
-		"SizeofBpfVersion",
-		"SizeofBpfZbuf",
-		"SizeofBpfZbufHeader",
-		"SizeofCmsghdr",
-		"SizeofICMPv6Filter",
-		"SizeofIPMreq",
-		"SizeofIPMreqn",
-		"SizeofIPv6MTUInfo",
-		"SizeofIPv6Mreq",
-		"SizeofIfAddrmsg",
-		"SizeofIfAnnounceMsghdr",
-		"SizeofIfData",
-		"SizeofIfInfomsg",
-		"SizeofIfMsghdr",
-		"SizeofIfaMsghdr",
-		"SizeofIfmaMsghdr",
-		"SizeofIfmaMsghdr2",
-		"SizeofInet4Pktinfo",
-		"SizeofInet6Pktinfo",
-		"SizeofInotifyEvent",
-		"SizeofLinger",
-		"SizeofMsghdr",
-		"SizeofNlAttr",
-		"SizeofNlMsgerr",
-		"SizeofNlMsghdr",
-		"SizeofRtAttr",
-		"SizeofRtGenmsg",
-		"SizeofRtMetrics",
-		"SizeofRtMsg",
-		"SizeofRtMsghdr",
-		"SizeofRtNexthop",
-		"SizeofSockFilter",
-		"SizeofSockFprog",
-		"SizeofSockaddrAny",
-		"SizeofSockaddrDatalink",
-		"SizeofSockaddrInet4",
-		"SizeofSockaddrInet6",
-		"SizeofSockaddrLinklayer",
-		"SizeofSockaddrNetlink",
-		"SizeofSockaddrUnix",
-		"SizeofTCPInfo",
-		"SizeofUcred",
-		"SlicePtrFromStrings",
-		"SockFilter",
-		"SockFprog",
-		"Sockaddr",
-		"SockaddrDatalink",
-		"SockaddrGen",
-		"SockaddrInet4",
-		"SockaddrInet6",
-		"SockaddrLinklayer",
-		"SockaddrNetlink",
-		"SockaddrUnix",
-		"Socket",
-		"SocketControlMessage",
-		"SocketDisableIPv6",
-		"Socketpair",
-		"Splice",
-		"StartProcess",
-		"StartupInfo",
-		"Stat",
-		"Stat_t",
-		"Statfs",
-		"Statfs_t",
-		"Stderr",
-		"Stdin",
-		"Stdout",
-		"StringBytePtr",
-		"StringByteSlice",
-		"StringSlicePtr",
-		"StringToSid",
-		"StringToUTF16",
-		"StringToUTF16Ptr",
-		"Symlink",
-		"Sync",
-		"SyncFileRange",
-		"SysProcAttr",
-		"SysProcIDMap",
-		"Syscall",
-		"Syscall12",
-		"Syscall15",
-		"Syscall18",
-		"Syscall6",
-		"Syscall9",
-		"SyscallN",
-		"Sysctl",
-		"SysctlUint32",
-		"Sysctlnode",
-		"Sysinfo",
-		"Sysinfo_t",
-		"Systemtime",
-		"TCGETS",
-		"TCIFLUSH",
-		"TCIOFLUSH",
-		"TCOFLUSH",
-		"TCPInfo",
-		"TCPKeepalive",
-		"TCP_CA_NAME_MAX",
-		"TCP_CONGCTL",
-		"TCP_CONGESTION",
-		"TCP_CONNECTIONTIMEOUT",
-		"TCP_CORK",
-		"TCP_DEFER_ACCEPT",
-		"TCP_ENABLE_ECN",
-		"TCP_INFO",
-		"TCP_KEEPALIVE",
-		"TCP_KEEPCNT",
-		"TCP_KEEPIDLE",
-		"TCP_KEEPINIT",
-		"TCP_KEEPINTVL",
-		"TCP_LINGER2",
-		"TCP_MAXBURST",
-		"TCP_MAXHLEN",
-		"TCP_MAXOLEN",
-		"TCP_MAXSEG",
-		"TCP_MAXWIN",
-		"TCP_MAX_SACK",
-		"TCP_MAX_WINSHIFT",
-		"TCP_MD5SIG",
-		"TCP_MD5SIG_MAXKEYLEN",
-		"TCP_MINMSS",
-		"TCP_MINMSSOVERLOAD",
-		"TCP_MSS",
-		"TCP_NODELAY",
-		"TCP_NOOPT",
-		"TCP_NOPUSH",
-		"TCP_NOTSENT_LOWAT",
-		"TCP_NSTATES",
-		"TCP_QUICKACK",
-		"TCP_RXT_CONNDROPTIME",
-		"TCP_RXT_FINDROP",
-		"TCP_SACK_ENABLE",
-		"TCP_SENDMOREACKS",
-		"TCP_SYNCNT",
-		"TCP_VENDOR",
-		"TCP_WINDOW_CLAMP",
-		"TCSAFLUSH",
-		"TCSETS",
-		"TF_DISCONNECT",
-		"TF_REUSE_SOCKET",
-		"TF_USE_DEFAULT_WORKER",
-		"TF_USE_KERNEL_APC",
-		"TF_USE_SYSTEM_THREAD",
-		"TF_WRITE_BEHIND",
-		"TH32CS_INHERIT",
-		"TH32CS_SNAPALL",
-		"TH32CS_SNAPHEAPLIST",
-		"TH32CS_SNAPMODULE",
-		"TH32CS_SNAPMODULE32",
-		"TH32CS_SNAPPROCESS",
-		"TH32CS_SNAPTHREAD",
-		"TIME_ZONE_ID_DAYLIGHT",
-		"TIME_ZONE_ID_STANDARD",
-		"TIME_ZONE_ID_UNKNOWN",
-		"TIOCCBRK",
-		"TIOCCDTR",
-		"TIOCCONS",
-		"TIOCDCDTIMESTAMP",
-		"TIOCDRAIN",
-		"TIOCDSIMICROCODE",
-		"TIOCEXCL",
-		"TIOCEXT",
-		"TIOCFLAG_CDTRCTS",
-		"TIOCFLAG_CLOCAL",
-		"TIOCFLAG_CRTSCTS",
-		"TIOCFLAG_MDMBUF",
-		"TIOCFLAG_PPS",
-		"TIOCFLAG_SOFTCAR",
-		"TIOCFLUSH",
-		"TIOCGDEV",
-		"TIOCGDRAINWAIT",
-		"TIOCGETA",
-		"TIOCGETD",
-		"TIOCGFLAGS",
-		"TIOCGICOUNT",
-		"TIOCGLCKTRMIOS",
-		"TIOCGLINED",
-		"TIOCGPGRP",
-		"TIOCGPTN",
-		"TIOCGQSIZE",
-		"TIOCGRANTPT",
-		"TIOCGRS485",
-		"TIOCGSERIAL",
-		"TIOCGSID",
-		"TIOCGSIZE",
-		"TIOCGSOFTCAR",
-		"TIOCGTSTAMP",
-		"TIOCGWINSZ",
-		"TIOCINQ",
-		"TIOCIXOFF",
-		"TIOCIXON",
-		"TIOCLINUX",
-		"TIOCMBIC",
-		"TIOCMBIS",
-		"TIOCMGDTRWAIT",
-		"TIOCMGET",
-		"TIOCMIWAIT",
-		"TIOCMODG",
-		"TIOCMODS",
-		"TIOCMSDTRWAIT",
-		"TIOCMSET",
-		"TIOCM_CAR",
-		"TIOCM_CD",
-		"TIOCM_CTS",
-		"TIOCM_DCD",
-		"TIOCM_DSR",
-		"TIOCM_DTR",
-		"TIOCM_LE",
-		"TIOCM_RI",
-		"TIOCM_RNG",
-		"TIOCM_RTS",
-		"TIOCM_SR",
-		"TIOCM_ST",
-		"TIOCNOTTY",
-		"TIOCNXCL",
-		"TIOCOUTQ",
-		"TIOCPKT",
-		"TIOCPKT_DATA",
-		"TIOCPKT_DOSTOP",
-		"TIOCPKT_FLUSHREAD",
-		"TIOCPKT_FLUSHWRITE",
-		"TIOCPKT_IOCTL",
-		"TIOCPKT_NOSTOP",
-		"TIOCPKT_START",
-		"TIOCPKT_STOP",
-		"TIOCPTMASTER",
-		"TIOCPTMGET",
-		"TIOCPTSNAME",
-		"TIOCPTYGNAME",
-		"TIOCPTYGRANT",
-		"TIOCPTYUNLK",
-		"TIOCRCVFRAME",
-		"TIOCREMOTE",
-		"TIOCSBRK",
-		"TIOCSCONS",
-		"TIOCSCTTY",
-		"TIOCSDRAINWAIT",
-		"TIOCSDTR",
-		"TIOCSERCONFIG",
-		"TIOCSERGETLSR",
-		"TIOCSERGETMULTI",
-		"TIOCSERGSTRUCT",
-		"TIOCSERGWILD",
-		"TIOCSERSETMULTI",
-		"TIOCSERSWILD",
-		"TIOCSER_TEMT",
-		"TIOCSETA",
-		"TIOCSETAF",
-		"TIOCSETAW",
-		"TIOCSETD",
-		"TIOCSFLAGS",
-		"TIOCSIG",
-		"TIOCSLCKTRMIOS",
-		"TIOCSLINED",
-		"TIOCSPGRP",
-		"TIOCSPTLCK",
-		"TIOCSQSIZE",
-		"TIOCSRS485",
-		"TIOCSSERIAL",
-		"TIOCSSIZE",
-		"TIOCSSOFTCAR",
-		"TIOCSTART",
-		"TIOCSTAT",
-		"TIOCSTI",
-		"TIOCSTOP",
-		"TIOCSTSTAMP",
-		"TIOCSWINSZ",
-		"TIOCTIMESTAMP",
-		"TIOCUCNTL",
-		"TIOCVHANGUP",
-		"TIOCXMTFRAME",
-		"TOKEN_ADJUST_DEFAULT",
-		"TOKEN_ADJUST_GROUPS",
-		"TOKEN_ADJUST_PRIVILEGES",
-		"TOKEN_ADJUST_SESSIONID",
-		"TOKEN_ALL_ACCESS",
-		"TOKEN_ASSIGN_PRIMARY",
-		"TOKEN_DUPLICATE",
-		"TOKEN_EXECUTE",
-		"TOKEN_IMPERSONATE",
-		"TOKEN_QUERY",
-		"TOKEN_QUERY_SOURCE",
-		"TOKEN_READ",
-		"TOKEN_WRITE",
-		"TOSTOP",
-		"TRUNCATE_EXISTING",
-		"TUNATTACHFILTER",
-		"TUNDETACHFILTER",
-		"TUNGETFEATURES",
-		"TUNGETIFF",
-		"TUNGETSNDBUF",
-		"TUNGETVNETHDRSZ",
-		"TUNSETDEBUG",
-		"TUNSETGROUP",
-		"TUNSETIFF",
-		"TUNSETLINK",
-		"TUNSETNOCSUM",
-		"TUNSETOFFLOAD",
-		"TUNSETOWNER",
-		"TUNSETPERSIST",
-		"TUNSETSNDBUF",
-		"TUNSETTXFILTER",
-		"TUNSETVNETHDRSZ",
-		"Tee",
-		"TerminateProcess",
-		"Termios",
-		"Tgkill",
-		"Time",
-		"Time_t",
-		"Times",
-		"Timespec",
-		"TimespecToNsec",
-		"Timeval",
-		"Timeval32",
-		"TimevalToNsec",
-		"Timex",
-		"Timezoneinformation",
-		"Tms",
-		"Token",
-		"TokenAccessInformation",
-		"TokenAuditPolicy",
-		"TokenDefaultDacl",
-		"TokenElevation",
-		"TokenElevationType",
-		"TokenGroups",
-		"TokenGroupsAndPrivileges",
-		"TokenHasRestrictions",
-		"TokenImpersonationLevel",
-		"TokenIntegrityLevel",
-		"TokenLinkedToken",
-		"TokenLogonSid",
-		"TokenMandatoryPolicy",
-		"TokenOrigin",
-		"TokenOwner",
-		"TokenPrimaryGroup",
-		"TokenPrivileges",
-		"TokenRestrictedSids",
-		"TokenSandBoxInert",
-		"TokenSessionId",
-		"TokenSessionReference",
-		"TokenSource",
-		"TokenStatistics",
-		"TokenType",
-		"TokenUIAccess",
-		"TokenUser",
-		"TokenVirtualizationAllowed",
-		"TokenVirtualizationEnabled",
-		"Tokenprimarygroup",
-		"Tokenuser",
-		"TranslateAccountName",
-		"TranslateName",
-		"TransmitFile",
-		"TransmitFileBuffers",
-		"Truncate",
-		"UNIX_PATH_MAX",
-		"USAGE_MATCH_TYPE_AND",
-		"USAGE_MATCH_TYPE_OR",
-		"UTF16FromString",
-		"UTF16PtrFromString",
-		"UTF16ToString",
-		"Ucred",
-		"Umask",
-		"Uname",
-		"Undelete",
-		"UnixCredentials",
-		"UnixRights",
-		"Unlink",
-		"Unlinkat",
-		"UnmapViewOfFile",
-		"Unmount",
-		"Unsetenv",
-		"Unshare",
-		"UserInfo10",
-		"Ustat",
-		"Ustat_t",
-		"Utimbuf",
-		"Utime",
-		"Utimes",
-		"UtimesNano",
-		"Utsname",
-		"VDISCARD",
-		"VDSUSP",
-		"VEOF",
-		"VEOL",
-		"VEOL2",
-		"VERASE",
-		"VERASE2",
-		"VINTR",
-		"VKILL",
-		"VLNEXT",
-		"VMIN",
-		"VQUIT",
-		"VREPRINT",
-		"VSTART",
-		"VSTATUS",
-		"VSTOP",
-		"VSUSP",
-		"VSWTC",
-		"VT0",
-		"VT1",
-		"VTDLY",
-		"VTIME",
-		"VWERASE",
-		"VirtualLock",
-		"VirtualUnlock",
-		"WAIT_ABANDONED",
-		"WAIT_FAILED",
-		"WAIT_OBJECT_0",
-		"WAIT_TIMEOUT",
-		"WALL",
-		"WALLSIG",
-		"WALTSIG",
-		"WCLONE",
-		"WCONTINUED",
-		"WCOREFLAG",
-		"WEXITED",
-		"WLINUXCLONE",
-		"WNOHANG",
-		"WNOTHREAD",
-		"WNOWAIT",
-		"WNOZOMBIE",
-		"WOPTSCHECKED",
-		"WORDSIZE",
-		"WSABuf",
-		"WSACleanup",
-		"WSADESCRIPTION_LEN",
-		"WSAData",
-		"WSAEACCES",
-		"WSAECONNABORTED",
-		"WSAECONNRESET",
-		"WSAEnumProtocols",
-		"WSAID_CONNECTEX",
-		"WSAIoctl",
-		"WSAPROTOCOL_LEN",
-		"WSAProtocolChain",
-		"WSAProtocolInfo",
-		"WSARecv",
-		"WSARecvFrom",
-		"WSASYS_STATUS_LEN",
-		"WSASend",
-		"WSASendTo",
-		"WSASendto",
-		"WSAStartup",
-		"WSTOPPED",
-		"WTRAPPED",
-		"WUNTRACED",
-		"Wait4",
-		"WaitForSingleObject",
-		"WaitStatus",
-		"Win32FileAttributeData",
-		"Win32finddata",
-		"Write",
-		"WriteConsole",
-		"WriteFile",
-		"X509_ASN_ENCODING",
-		"XCASE",
-		"XP1_CONNECTIONLESS",
-		"XP1_CONNECT_DATA",
-		"XP1_DISCONNECT_DATA",
-		"XP1_EXPEDITED_DATA",
-		"XP1_GRACEFUL_CLOSE",
-		"XP1_GUARANTEED_DELIVERY",
-		"XP1_GUARANTEED_ORDER",
-		"XP1_IFS_HANDLES",
-		"XP1_MESSAGE_ORIENTED",
-		"XP1_MULTIPOINT_CONTROL_PLANE",
-		"XP1_MULTIPOINT_DATA_PLANE",
-		"XP1_PARTIAL_MESSAGE",
-		"XP1_PSEUDO_STREAM",
-		"XP1_QOS_SUPPORTED",
-		"XP1_SAN_SUPPORT_SDP",
-		"XP1_SUPPORT_BROADCAST",
-		"XP1_SUPPORT_MULTIPOINT",
-		"XP1_UNI_RECV",
-		"XP1_UNI_SEND",
-	},
-	"syscall/js": {
-		"CopyBytesToGo",
-		"CopyBytesToJS",
-		"Error",
-		"Func",
-		"FuncOf",
-		"Global",
-		"Null",
-		"Type",
-		"TypeBoolean",
-		"TypeFunction",
-		"TypeNull",
-		"TypeNumber",
-		"TypeObject",
-		"TypeString",
-		"TypeSymbol",
-		"TypeUndefined",
-		"Undefined",
-		"Value",
-		"ValueError",
-		"ValueOf",
-	},
-	"testing": {
-		"AllocsPerRun",
-		"B",
-		"Benchmark",
-		"BenchmarkResult",
-		"Cover",
-		"CoverBlock",
-		"CoverMode",
-		"Coverage",
-		"F",
-		"Init",
-		"InternalBenchmark",
-		"InternalExample",
-		"InternalFuzzTarget",
-		"InternalTest",
-		"M",
-		"Main",
-		"MainStart",
-		"PB",
-		"RegisterCover",
-		"RunBenchmarks",
-		"RunExamples",
-		"RunTests",
-		"Short",
-		"T",
-		"TB",
-		"Testing",
-		"Verbose",
-	},
-	"testing/fstest": {
-		"MapFS",
-		"MapFile",
-		"TestFS",
-	},
-	"testing/iotest": {
-		"DataErrReader",
-		"ErrReader",
-		"ErrTimeout",
-		"HalfReader",
-		"NewReadLogger",
-		"NewWriteLogger",
-		"OneByteReader",
-		"TestReader",
-		"TimeoutReader",
-		"TruncateWriter",
-	},
-	"testing/quick": {
-		"Check",
-		"CheckEqual",
-		"CheckEqualError",
-		"CheckError",
-		"Config",
-		"Generator",
-		"SetupError",
-		"Value",
-	},
-	"testing/slogtest": {
-		"Run",
-		"TestHandler",
-	},
-	"text/scanner": {
-		"Char",
-		"Comment",
-		"EOF",
-		"Float",
-		"GoTokens",
-		"GoWhitespace",
-		"Ident",
-		"Int",
-		"Position",
-		"RawString",
-		"ScanChars",
-		"ScanComments",
-		"ScanFloats",
-		"ScanIdents",
-		"ScanInts",
-		"ScanRawStrings",
-		"ScanStrings",
-		"Scanner",
-		"SkipComments",
-		"String",
-		"TokenString",
-	},
-	"text/tabwriter": {
-		"AlignRight",
-		"Debug",
-		"DiscardEmptyColumns",
-		"Escape",
-		"FilterHTML",
-		"NewWriter",
-		"StripEscape",
-		"TabIndent",
-		"Writer",
-	},
-	"text/template": {
-		"ExecError",
-		"FuncMap",
-		"HTMLEscape",
-		"HTMLEscapeString",
-		"HTMLEscaper",
-		"IsTrue",
-		"JSEscape",
-		"JSEscapeString",
-		"JSEscaper",
-		"Must",
-		"New",
-		"ParseFS",
-		"ParseFiles",
-		"ParseGlob",
-		"Template",
-		"URLQueryEscaper",
-	},
-	"text/template/parse": {
-		"ActionNode",
-		"BoolNode",
-		"BranchNode",
-		"BreakNode",
-		"ChainNode",
-		"CommandNode",
-		"CommentNode",
-		"ContinueNode",
-		"DotNode",
-		"FieldNode",
-		"IdentifierNode",
-		"IfNode",
-		"IsEmptyTree",
-		"ListNode",
-		"Mode",
-		"New",
-		"NewIdentifier",
-		"NilNode",
-		"Node",
-		"NodeAction",
-		"NodeBool",
-		"NodeBreak",
-		"NodeChain",
-		"NodeCommand",
-		"NodeComment",
-		"NodeContinue",
-		"NodeDot",
-		"NodeField",
-		"NodeIdentifier",
-		"NodeIf",
-		"NodeList",
-		"NodeNil",
-		"NodeNumber",
-		"NodePipe",
-		"NodeRange",
-		"NodeString",
-		"NodeTemplate",
-		"NodeText",
-		"NodeType",
-		"NodeVariable",
-		"NodeWith",
-		"NumberNode",
-		"Parse",
-		"ParseComments",
-		"PipeNode",
-		"Pos",
-		"RangeNode",
-		"SkipFuncCheck",
-		"StringNode",
-		"TemplateNode",
-		"TextNode",
-		"Tree",
-		"VariableNode",
-		"WithNode",
-	},
-	"time": {
-		"ANSIC",
-		"After",
-		"AfterFunc",
-		"April",
-		"August",
-		"Date",
-		"DateOnly",
-		"DateTime",
-		"December",
-		"Duration",
-		"February",
-		"FixedZone",
-		"Friday",
-		"Hour",
-		"January",
-		"July",
-		"June",
-		"Kitchen",
-		"Layout",
-		"LoadLocation",
-		"LoadLocationFromTZData",
-		"Local",
-		"Location",
-		"March",
-		"May",
-		"Microsecond",
-		"Millisecond",
-		"Minute",
-		"Monday",
-		"Month",
-		"Nanosecond",
-		"NewTicker",
-		"NewTimer",
-		"November",
-		"Now",
-		"October",
-		"Parse",
-		"ParseDuration",
-		"ParseError",
-		"ParseInLocation",
-		"RFC1123",
-		"RFC1123Z",
-		"RFC3339",
-		"RFC3339Nano",
-		"RFC822",
-		"RFC822Z",
-		"RFC850",
-		"RubyDate",
-		"Saturday",
-		"Second",
-		"September",
-		"Since",
-		"Sleep",
-		"Stamp",
-		"StampMicro",
-		"StampMilli",
-		"StampNano",
-		"Sunday",
-		"Thursday",
-		"Tick",
-		"Ticker",
-		"Time",
-		"TimeOnly",
-		"Timer",
-		"Tuesday",
-		"UTC",
-		"Unix",
-		"UnixDate",
-		"UnixMicro",
-		"UnixMilli",
-		"Until",
-		"Wednesday",
-		"Weekday",
-	},
-	"unicode": {
-		"ASCII_Hex_Digit",
-		"Adlam",
-		"Ahom",
-		"Anatolian_Hieroglyphs",
-		"Arabic",
-		"Armenian",
-		"Avestan",
-		"AzeriCase",
-		"Balinese",
-		"Bamum",
-		"Bassa_Vah",
-		"Batak",
-		"Bengali",
-		"Bhaiksuki",
-		"Bidi_Control",
-		"Bopomofo",
-		"Brahmi",
-		"Braille",
-		"Buginese",
-		"Buhid",
-		"C",
-		"Canadian_Aboriginal",
-		"Carian",
-		"CaseRange",
-		"CaseRanges",
-		"Categories",
-		"Caucasian_Albanian",
-		"Cc",
-		"Cf",
-		"Chakma",
-		"Cham",
-		"Cherokee",
-		"Chorasmian",
-		"Co",
-		"Common",
-		"Coptic",
-		"Cs",
-		"Cuneiform",
-		"Cypriot",
-		"Cypro_Minoan",
-		"Cyrillic",
-		"Dash",
-		"Deprecated",
-		"Deseret",
-		"Devanagari",
-		"Diacritic",
-		"Digit",
-		"Dives_Akuru",
-		"Dogra",
-		"Duployan",
-		"Egyptian_Hieroglyphs",
-		"Elbasan",
-		"Elymaic",
-		"Ethiopic",
-		"Extender",
-		"FoldCategory",
-		"FoldScript",
-		"Georgian",
-		"Glagolitic",
-		"Gothic",
-		"Grantha",
-		"GraphicRanges",
-		"Greek",
-		"Gujarati",
-		"Gunjala_Gondi",
-		"Gurmukhi",
-		"Han",
-		"Hangul",
-		"Hanifi_Rohingya",
-		"Hanunoo",
-		"Hatran",
-		"Hebrew",
-		"Hex_Digit",
-		"Hiragana",
-		"Hyphen",
-		"IDS_Binary_Operator",
-		"IDS_Trinary_Operator",
-		"Ideographic",
-		"Imperial_Aramaic",
-		"In",
-		"Inherited",
-		"Inscriptional_Pahlavi",
-		"Inscriptional_Parthian",
-		"Is",
-		"IsControl",
-		"IsDigit",
-		"IsGraphic",
-		"IsLetter",
-		"IsLower",
-		"IsMark",
-		"IsNumber",
-		"IsOneOf",
-		"IsPrint",
-		"IsPunct",
-		"IsSpace",
-		"IsSymbol",
-		"IsTitle",
-		"IsUpper",
-		"Javanese",
-		"Join_Control",
-		"Kaithi",
-		"Kannada",
-		"Katakana",
-		"Kawi",
-		"Kayah_Li",
-		"Kharoshthi",
-		"Khitan_Small_Script",
-		"Khmer",
-		"Khojki",
-		"Khudawadi",
-		"L",
-		"Lao",
-		"Latin",
-		"Lepcha",
-		"Letter",
-		"Limbu",
-		"Linear_A",
-		"Linear_B",
-		"Lisu",
-		"Ll",
-		"Lm",
-		"Lo",
-		"Logical_Order_Exception",
-		"Lower",
-		"LowerCase",
-		"Lt",
-		"Lu",
-		"Lycian",
-		"Lydian",
-		"M",
-		"Mahajani",
-		"Makasar",
-		"Malayalam",
-		"Mandaic",
-		"Manichaean",
-		"Marchen",
-		"Mark",
-		"Masaram_Gondi",
-		"MaxASCII",
-		"MaxCase",
-		"MaxLatin1",
-		"MaxRune",
-		"Mc",
-		"Me",
-		"Medefaidrin",
-		"Meetei_Mayek",
-		"Mende_Kikakui",
-		"Meroitic_Cursive",
-		"Meroitic_Hieroglyphs",
-		"Miao",
-		"Mn",
-		"Modi",
-		"Mongolian",
-		"Mro",
-		"Multani",
-		"Myanmar",
-		"N",
-		"Nabataean",
-		"Nag_Mundari",
-		"Nandinagari",
-		"Nd",
-		"New_Tai_Lue",
-		"Newa",
-		"Nko",
-		"Nl",
-		"No",
-		"Noncharacter_Code_Point",
-		"Number",
-		"Nushu",
-		"Nyiakeng_Puachue_Hmong",
-		"Ogham",
-		"Ol_Chiki",
-		"Old_Hungarian",
-		"Old_Italic",
-		"Old_North_Arabian",
-		"Old_Permic",
-		"Old_Persian",
-		"Old_Sogdian",
-		"Old_South_Arabian",
-		"Old_Turkic",
-		"Old_Uyghur",
-		"Oriya",
-		"Osage",
-		"Osmanya",
-		"Other",
-		"Other_Alphabetic",
-		"Other_Default_Ignorable_Code_Point",
-		"Other_Grapheme_Extend",
-		"Other_ID_Continue",
-		"Other_ID_Start",
-		"Other_Lowercase",
-		"Other_Math",
-		"Other_Uppercase",
-		"P",
-		"Pahawh_Hmong",
-		"Palmyrene",
-		"Pattern_Syntax",
-		"Pattern_White_Space",
-		"Pau_Cin_Hau",
-		"Pc",
-		"Pd",
-		"Pe",
-		"Pf",
-		"Phags_Pa",
-		"Phoenician",
-		"Pi",
-		"Po",
-		"Prepended_Concatenation_Mark",
-		"PrintRanges",
-		"Properties",
-		"Ps",
-		"Psalter_Pahlavi",
-		"Punct",
-		"Quotation_Mark",
-		"Radical",
-		"Range16",
-		"Range32",
-		"RangeTable",
-		"Regional_Indicator",
-		"Rejang",
-		"ReplacementChar",
-		"Runic",
-		"S",
-		"STerm",
-		"Samaritan",
-		"Saurashtra",
-		"Sc",
-		"Scripts",
-		"Sentence_Terminal",
-		"Sharada",
-		"Shavian",
-		"Siddham",
-		"SignWriting",
-		"SimpleFold",
-		"Sinhala",
-		"Sk",
-		"Sm",
-		"So",
-		"Soft_Dotted",
-		"Sogdian",
-		"Sora_Sompeng",
-		"Soyombo",
-		"Space",
-		"SpecialCase",
-		"Sundanese",
-		"Syloti_Nagri",
-		"Symbol",
-		"Syriac",
-		"Tagalog",
-		"Tagbanwa",
-		"Tai_Le",
-		"Tai_Tham",
-		"Tai_Viet",
-		"Takri",
-		"Tamil",
-		"Tangsa",
-		"Tangut",
-		"Telugu",
-		"Terminal_Punctuation",
-		"Thaana",
-		"Thai",
-		"Tibetan",
-		"Tifinagh",
-		"Tirhuta",
-		"Title",
-		"TitleCase",
-		"To",
-		"ToLower",
-		"ToTitle",
-		"ToUpper",
-		"Toto",
-		"TurkishCase",
-		"Ugaritic",
-		"Unified_Ideograph",
-		"Upper",
-		"UpperCase",
-		"UpperLower",
-		"Vai",
-		"Variation_Selector",
-		"Version",
-		"Vithkuqi",
-		"Wancho",
-		"Warang_Citi",
-		"White_Space",
-		"Yezidi",
-		"Yi",
-		"Z",
-		"Zanabazar_Square",
-		"Zl",
-		"Zp",
-		"Zs",
-	},
-	"unicode/utf16": {
-		"AppendRune",
-		"Decode",
-		"DecodeRune",
-		"Encode",
-		"EncodeRune",
-		"IsSurrogate",
-	},
-	"unicode/utf8": {
-		"AppendRune",
-		"DecodeLastRune",
-		"DecodeLastRuneInString",
-		"DecodeRune",
-		"DecodeRuneInString",
-		"EncodeRune",
-		"FullRune",
-		"FullRuneInString",
-		"MaxRune",
-		"RuneCount",
-		"RuneCountInString",
-		"RuneError",
-		"RuneLen",
-		"RuneSelf",
-		"RuneStart",
-		"UTFMax",
-		"Valid",
-		"ValidRune",
-		"ValidString",
-	},
-	"unsafe": {
-		"Add",
-		"Alignof",
-		"Offsetof",
-		"Pointer",
-		"Sizeof",
-		"Slice",
-		"SliceData",
-		"String",
-		"StringData",
-	},
-}
diff --git a/vendor/golang.org/x/tools/internal/pkgbits/decoder.go b/vendor/golang.org/x/tools/internal/pkgbits/decoder.go
index b92e8e6eb3299a9cb58f9861ce3a79436015202d..2acd85851e362b893627d039520068f2b38f70a2 100644
--- a/vendor/golang.org/x/tools/internal/pkgbits/decoder.go
+++ b/vendor/golang.org/x/tools/internal/pkgbits/decoder.go
@@ -23,6 +23,9 @@ type PkgDecoder struct {
 	// version is the file format version.
 	version uint32
 
+	// aliases determines whether types.Aliases should be created
+	aliases bool
+
 	// sync indicates whether the file uses sync markers.
 	sync bool
 
@@ -73,6 +76,7 @@ func (pr *PkgDecoder) SyncMarkers() bool { return pr.sync }
 func NewPkgDecoder(pkgPath, input string) PkgDecoder {
 	pr := PkgDecoder{
 		pkgPath: pkgPath,
+		//aliases: aliases.Enabled(),
 	}
 
 	// TODO(mdempsky): Implement direct indexing of input string to
diff --git a/vendor/golang.org/x/tools/internal/stdlib/manifest.go b/vendor/golang.org/x/tools/internal/stdlib/manifest.go
new file mode 100644
index 0000000000000000000000000000000000000000..fd6892075ee4bed106b1cdea50607d8e0e5c98c2
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/stdlib/manifest.go
@@ -0,0 +1,17320 @@
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Code generated by generate.go. DO NOT EDIT.
+
+package stdlib
+
+var PackageSymbols = map[string][]Symbol{
+	"archive/tar": {
+		{"(*Header).FileInfo", Method, 1},
+		{"(*Reader).Next", Method, 0},
+		{"(*Reader).Read", Method, 0},
+		{"(*Writer).AddFS", Method, 22},
+		{"(*Writer).Close", Method, 0},
+		{"(*Writer).Flush", Method, 0},
+		{"(*Writer).Write", Method, 0},
+		{"(*Writer).WriteHeader", Method, 0},
+		{"(Format).String", Method, 10},
+		{"ErrFieldTooLong", Var, 0},
+		{"ErrHeader", Var, 0},
+		{"ErrInsecurePath", Var, 20},
+		{"ErrWriteAfterClose", Var, 0},
+		{"ErrWriteTooLong", Var, 0},
+		{"FileInfoHeader", Func, 1},
+		{"Format", Type, 10},
+		{"FormatGNU", Const, 10},
+		{"FormatPAX", Const, 10},
+		{"FormatUSTAR", Const, 10},
+		{"FormatUnknown", Const, 10},
+		{"Header", Type, 0},
+		{"Header.AccessTime", Field, 0},
+		{"Header.ChangeTime", Field, 0},
+		{"Header.Devmajor", Field, 0},
+		{"Header.Devminor", Field, 0},
+		{"Header.Format", Field, 10},
+		{"Header.Gid", Field, 0},
+		{"Header.Gname", Field, 0},
+		{"Header.Linkname", Field, 0},
+		{"Header.ModTime", Field, 0},
+		{"Header.Mode", Field, 0},
+		{"Header.Name", Field, 0},
+		{"Header.PAXRecords", Field, 10},
+		{"Header.Size", Field, 0},
+		{"Header.Typeflag", Field, 0},
+		{"Header.Uid", Field, 0},
+		{"Header.Uname", Field, 0},
+		{"Header.Xattrs", Field, 3},
+		{"NewReader", Func, 0},
+		{"NewWriter", Func, 0},
+		{"Reader", Type, 0},
+		{"TypeBlock", Const, 0},
+		{"TypeChar", Const, 0},
+		{"TypeCont", Const, 0},
+		{"TypeDir", Const, 0},
+		{"TypeFifo", Const, 0},
+		{"TypeGNULongLink", Const, 1},
+		{"TypeGNULongName", Const, 1},
+		{"TypeGNUSparse", Const, 3},
+		{"TypeLink", Const, 0},
+		{"TypeReg", Const, 0},
+		{"TypeRegA", Const, 0},
+		{"TypeSymlink", Const, 0},
+		{"TypeXGlobalHeader", Const, 0},
+		{"TypeXHeader", Const, 0},
+		{"Writer", Type, 0},
+	},
+	"archive/zip": {
+		{"(*File).DataOffset", Method, 2},
+		{"(*File).FileInfo", Method, 0},
+		{"(*File).ModTime", Method, 0},
+		{"(*File).Mode", Method, 0},
+		{"(*File).Open", Method, 0},
+		{"(*File).OpenRaw", Method, 17},
+		{"(*File).SetModTime", Method, 0},
+		{"(*File).SetMode", Method, 0},
+		{"(*FileHeader).FileInfo", Method, 0},
+		{"(*FileHeader).ModTime", Method, 0},
+		{"(*FileHeader).Mode", Method, 0},
+		{"(*FileHeader).SetModTime", Method, 0},
+		{"(*FileHeader).SetMode", Method, 0},
+		{"(*ReadCloser).Close", Method, 0},
+		{"(*ReadCloser).Open", Method, 16},
+		{"(*ReadCloser).RegisterDecompressor", Method, 6},
+		{"(*Reader).Open", Method, 16},
+		{"(*Reader).RegisterDecompressor", Method, 6},
+		{"(*Writer).AddFS", Method, 22},
+		{"(*Writer).Close", Method, 0},
+		{"(*Writer).Copy", Method, 17},
+		{"(*Writer).Create", Method, 0},
+		{"(*Writer).CreateHeader", Method, 0},
+		{"(*Writer).CreateRaw", Method, 17},
+		{"(*Writer).Flush", Method, 4},
+		{"(*Writer).RegisterCompressor", Method, 6},
+		{"(*Writer).SetComment", Method, 10},
+		{"(*Writer).SetOffset", Method, 5},
+		{"Compressor", Type, 2},
+		{"Decompressor", Type, 2},
+		{"Deflate", Const, 0},
+		{"ErrAlgorithm", Var, 0},
+		{"ErrChecksum", Var, 0},
+		{"ErrFormat", Var, 0},
+		{"ErrInsecurePath", Var, 20},
+		{"File", Type, 0},
+		{"File.FileHeader", Field, 0},
+		{"FileHeader", Type, 0},
+		{"FileHeader.CRC32", Field, 0},
+		{"FileHeader.Comment", Field, 0},
+		{"FileHeader.CompressedSize", Field, 0},
+		{"FileHeader.CompressedSize64", Field, 1},
+		{"FileHeader.CreatorVersion", Field, 0},
+		{"FileHeader.ExternalAttrs", Field, 0},
+		{"FileHeader.Extra", Field, 0},
+		{"FileHeader.Flags", Field, 0},
+		{"FileHeader.Method", Field, 0},
+		{"FileHeader.Modified", Field, 10},
+		{"FileHeader.ModifiedDate", Field, 0},
+		{"FileHeader.ModifiedTime", Field, 0},
+		{"FileHeader.Name", Field, 0},
+		{"FileHeader.NonUTF8", Field, 10},
+		{"FileHeader.ReaderVersion", Field, 0},
+		{"FileHeader.UncompressedSize", Field, 0},
+		{"FileHeader.UncompressedSize64", Field, 1},
+		{"FileInfoHeader", Func, 0},
+		{"NewReader", Func, 0},
+		{"NewWriter", Func, 0},
+		{"OpenReader", Func, 0},
+		{"ReadCloser", Type, 0},
+		{"ReadCloser.Reader", Field, 0},
+		{"Reader", Type, 0},
+		{"Reader.Comment", Field, 0},
+		{"Reader.File", Field, 0},
+		{"RegisterCompressor", Func, 2},
+		{"RegisterDecompressor", Func, 2},
+		{"Store", Const, 0},
+		{"Writer", Type, 0},
+	},
+	"bufio": {
+		{"(*Reader).Buffered", Method, 0},
+		{"(*Reader).Discard", Method, 5},
+		{"(*Reader).Peek", Method, 0},
+		{"(*Reader).Read", Method, 0},
+		{"(*Reader).ReadByte", Method, 0},
+		{"(*Reader).ReadBytes", Method, 0},
+		{"(*Reader).ReadLine", Method, 0},
+		{"(*Reader).ReadRune", Method, 0},
+		{"(*Reader).ReadSlice", Method, 0},
+		{"(*Reader).ReadString", Method, 0},
+		{"(*Reader).Reset", Method, 2},
+		{"(*Reader).Size", Method, 10},
+		{"(*Reader).UnreadByte", Method, 0},
+		{"(*Reader).UnreadRune", Method, 0},
+		{"(*Reader).WriteTo", Method, 1},
+		{"(*Scanner).Buffer", Method, 6},
+		{"(*Scanner).Bytes", Method, 1},
+		{"(*Scanner).Err", Method, 1},
+		{"(*Scanner).Scan", Method, 1},
+		{"(*Scanner).Split", Method, 1},
+		{"(*Scanner).Text", Method, 1},
+		{"(*Writer).Available", Method, 0},
+		{"(*Writer).AvailableBuffer", Method, 18},
+		{"(*Writer).Buffered", Method, 0},
+		{"(*Writer).Flush", Method, 0},
+		{"(*Writer).ReadFrom", Method, 1},
+		{"(*Writer).Reset", Method, 2},
+		{"(*Writer).Size", Method, 10},
+		{"(*Writer).Write", Method, 0},
+		{"(*Writer).WriteByte", Method, 0},
+		{"(*Writer).WriteRune", Method, 0},
+		{"(*Writer).WriteString", Method, 0},
+		{"(ReadWriter).Available", Method, 0},
+		{"(ReadWriter).AvailableBuffer", Method, 18},
+		{"(ReadWriter).Discard", Method, 5},
+		{"(ReadWriter).Flush", Method, 0},
+		{"(ReadWriter).Peek", Method, 0},
+		{"(ReadWriter).Read", Method, 0},
+		{"(ReadWriter).ReadByte", Method, 0},
+		{"(ReadWriter).ReadBytes", Method, 0},
+		{"(ReadWriter).ReadFrom", Method, 1},
+		{"(ReadWriter).ReadLine", Method, 0},
+		{"(ReadWriter).ReadRune", Method, 0},
+		{"(ReadWriter).ReadSlice", Method, 0},
+		{"(ReadWriter).ReadString", Method, 0},
+		{"(ReadWriter).UnreadByte", Method, 0},
+		{"(ReadWriter).UnreadRune", Method, 0},
+		{"(ReadWriter).Write", Method, 0},
+		{"(ReadWriter).WriteByte", Method, 0},
+		{"(ReadWriter).WriteRune", Method, 0},
+		{"(ReadWriter).WriteString", Method, 0},
+		{"(ReadWriter).WriteTo", Method, 1},
+		{"ErrAdvanceTooFar", Var, 1},
+		{"ErrBadReadCount", Var, 15},
+		{"ErrBufferFull", Var, 0},
+		{"ErrFinalToken", Var, 6},
+		{"ErrInvalidUnreadByte", Var, 0},
+		{"ErrInvalidUnreadRune", Var, 0},
+		{"ErrNegativeAdvance", Var, 1},
+		{"ErrNegativeCount", Var, 0},
+		{"ErrTooLong", Var, 1},
+		{"MaxScanTokenSize", Const, 1},
+		{"NewReadWriter", Func, 0},
+		{"NewReader", Func, 0},
+		{"NewReaderSize", Func, 0},
+		{"NewScanner", Func, 1},
+		{"NewWriter", Func, 0},
+		{"NewWriterSize", Func, 0},
+		{"ReadWriter", Type, 0},
+		{"ReadWriter.Reader", Field, 0},
+		{"ReadWriter.Writer", Field, 0},
+		{"Reader", Type, 0},
+		{"ScanBytes", Func, 1},
+		{"ScanLines", Func, 1},
+		{"ScanRunes", Func, 1},
+		{"ScanWords", Func, 1},
+		{"Scanner", Type, 1},
+		{"SplitFunc", Type, 1},
+		{"Writer", Type, 0},
+	},
+	"bytes": {
+		{"(*Buffer).Available", Method, 21},
+		{"(*Buffer).AvailableBuffer", Method, 21},
+		{"(*Buffer).Bytes", Method, 0},
+		{"(*Buffer).Cap", Method, 5},
+		{"(*Buffer).Grow", Method, 1},
+		{"(*Buffer).Len", Method, 0},
+		{"(*Buffer).Next", Method, 0},
+		{"(*Buffer).Read", Method, 0},
+		{"(*Buffer).ReadByte", Method, 0},
+		{"(*Buffer).ReadBytes", Method, 0},
+		{"(*Buffer).ReadFrom", Method, 0},
+		{"(*Buffer).ReadRune", Method, 0},
+		{"(*Buffer).ReadString", Method, 0},
+		{"(*Buffer).Reset", Method, 0},
+		{"(*Buffer).String", Method, 0},
+		{"(*Buffer).Truncate", Method, 0},
+		{"(*Buffer).UnreadByte", Method, 0},
+		{"(*Buffer).UnreadRune", Method, 0},
+		{"(*Buffer).Write", Method, 0},
+		{"(*Buffer).WriteByte", Method, 0},
+		{"(*Buffer).WriteRune", Method, 0},
+		{"(*Buffer).WriteString", Method, 0},
+		{"(*Buffer).WriteTo", Method, 0},
+		{"(*Reader).Len", Method, 0},
+		{"(*Reader).Read", Method, 0},
+		{"(*Reader).ReadAt", Method, 0},
+		{"(*Reader).ReadByte", Method, 0},
+		{"(*Reader).ReadRune", Method, 0},
+		{"(*Reader).Reset", Method, 7},
+		{"(*Reader).Seek", Method, 0},
+		{"(*Reader).Size", Method, 5},
+		{"(*Reader).UnreadByte", Method, 0},
+		{"(*Reader).UnreadRune", Method, 0},
+		{"(*Reader).WriteTo", Method, 1},
+		{"Buffer", Type, 0},
+		{"Clone", Func, 20},
+		{"Compare", Func, 0},
+		{"Contains", Func, 0},
+		{"ContainsAny", Func, 7},
+		{"ContainsFunc", Func, 21},
+		{"ContainsRune", Func, 7},
+		{"Count", Func, 0},
+		{"Cut", Func, 18},
+		{"CutPrefix", Func, 20},
+		{"CutSuffix", Func, 20},
+		{"Equal", Func, 0},
+		{"EqualFold", Func, 0},
+		{"ErrTooLarge", Var, 0},
+		{"Fields", Func, 0},
+		{"FieldsFunc", Func, 0},
+		{"HasPrefix", Func, 0},
+		{"HasSuffix", Func, 0},
+		{"Index", Func, 0},
+		{"IndexAny", Func, 0},
+		{"IndexByte", Func, 0},
+		{"IndexFunc", Func, 0},
+		{"IndexRune", Func, 0},
+		{"Join", Func, 0},
+		{"LastIndex", Func, 0},
+		{"LastIndexAny", Func, 0},
+		{"LastIndexByte", Func, 5},
+		{"LastIndexFunc", Func, 0},
+		{"Map", Func, 0},
+		{"MinRead", Const, 0},
+		{"NewBuffer", Func, 0},
+		{"NewBufferString", Func, 0},
+		{"NewReader", Func, 0},
+		{"Reader", Type, 0},
+		{"Repeat", Func, 0},
+		{"Replace", Func, 0},
+		{"ReplaceAll", Func, 12},
+		{"Runes", Func, 0},
+		{"Split", Func, 0},
+		{"SplitAfter", Func, 0},
+		{"SplitAfterN", Func, 0},
+		{"SplitN", Func, 0},
+		{"Title", Func, 0},
+		{"ToLower", Func, 0},
+		{"ToLowerSpecial", Func, 0},
+		{"ToTitle", Func, 0},
+		{"ToTitleSpecial", Func, 0},
+		{"ToUpper", Func, 0},
+		{"ToUpperSpecial", Func, 0},
+		{"ToValidUTF8", Func, 13},
+		{"Trim", Func, 0},
+		{"TrimFunc", Func, 0},
+		{"TrimLeft", Func, 0},
+		{"TrimLeftFunc", Func, 0},
+		{"TrimPrefix", Func, 1},
+		{"TrimRight", Func, 0},
+		{"TrimRightFunc", Func, 0},
+		{"TrimSpace", Func, 0},
+		{"TrimSuffix", Func, 1},
+	},
+	"cmp": {
+		{"Compare", Func, 21},
+		{"Less", Func, 21},
+		{"Or", Func, 22},
+		{"Ordered", Type, 21},
+	},
+	"compress/bzip2": {
+		{"(StructuralError).Error", Method, 0},
+		{"NewReader", Func, 0},
+		{"StructuralError", Type, 0},
+	},
+	"compress/flate": {
+		{"(*ReadError).Error", Method, 0},
+		{"(*WriteError).Error", Method, 0},
+		{"(*Writer).Close", Method, 0},
+		{"(*Writer).Flush", Method, 0},
+		{"(*Writer).Reset", Method, 2},
+		{"(*Writer).Write", Method, 0},
+		{"(CorruptInputError).Error", Method, 0},
+		{"(InternalError).Error", Method, 0},
+		{"BestCompression", Const, 0},
+		{"BestSpeed", Const, 0},
+		{"CorruptInputError", Type, 0},
+		{"DefaultCompression", Const, 0},
+		{"HuffmanOnly", Const, 7},
+		{"InternalError", Type, 0},
+		{"NewReader", Func, 0},
+		{"NewReaderDict", Func, 0},
+		{"NewWriter", Func, 0},
+		{"NewWriterDict", Func, 0},
+		{"NoCompression", Const, 0},
+		{"ReadError", Type, 0},
+		{"ReadError.Err", Field, 0},
+		{"ReadError.Offset", Field, 0},
+		{"Reader", Type, 0},
+		{"Resetter", Type, 4},
+		{"WriteError", Type, 0},
+		{"WriteError.Err", Field, 0},
+		{"WriteError.Offset", Field, 0},
+		{"Writer", Type, 0},
+	},
+	"compress/gzip": {
+		{"(*Reader).Close", Method, 0},
+		{"(*Reader).Multistream", Method, 4},
+		{"(*Reader).Read", Method, 0},
+		{"(*Reader).Reset", Method, 3},
+		{"(*Writer).Close", Method, 0},
+		{"(*Writer).Flush", Method, 1},
+		{"(*Writer).Reset", Method, 2},
+		{"(*Writer).Write", Method, 0},
+		{"BestCompression", Const, 0},
+		{"BestSpeed", Const, 0},
+		{"DefaultCompression", Const, 0},
+		{"ErrChecksum", Var, 0},
+		{"ErrHeader", Var, 0},
+		{"Header", Type, 0},
+		{"Header.Comment", Field, 0},
+		{"Header.Extra", Field, 0},
+		{"Header.ModTime", Field, 0},
+		{"Header.Name", Field, 0},
+		{"Header.OS", Field, 0},
+		{"HuffmanOnly", Const, 8},
+		{"NewReader", Func, 0},
+		{"NewWriter", Func, 0},
+		{"NewWriterLevel", Func, 0},
+		{"NoCompression", Const, 0},
+		{"Reader", Type, 0},
+		{"Reader.Header", Field, 0},
+		{"Writer", Type, 0},
+		{"Writer.Header", Field, 0},
+	},
+	"compress/lzw": {
+		{"(*Reader).Close", Method, 17},
+		{"(*Reader).Read", Method, 17},
+		{"(*Reader).Reset", Method, 17},
+		{"(*Writer).Close", Method, 17},
+		{"(*Writer).Reset", Method, 17},
+		{"(*Writer).Write", Method, 17},
+		{"LSB", Const, 0},
+		{"MSB", Const, 0},
+		{"NewReader", Func, 0},
+		{"NewWriter", Func, 0},
+		{"Order", Type, 0},
+		{"Reader", Type, 17},
+		{"Writer", Type, 17},
+	},
+	"compress/zlib": {
+		{"(*Writer).Close", Method, 0},
+		{"(*Writer).Flush", Method, 0},
+		{"(*Writer).Reset", Method, 2},
+		{"(*Writer).Write", Method, 0},
+		{"BestCompression", Const, 0},
+		{"BestSpeed", Const, 0},
+		{"DefaultCompression", Const, 0},
+		{"ErrChecksum", Var, 0},
+		{"ErrDictionary", Var, 0},
+		{"ErrHeader", Var, 0},
+		{"HuffmanOnly", Const, 8},
+		{"NewReader", Func, 0},
+		{"NewReaderDict", Func, 0},
+		{"NewWriter", Func, 0},
+		{"NewWriterLevel", Func, 0},
+		{"NewWriterLevelDict", Func, 0},
+		{"NoCompression", Const, 0},
+		{"Resetter", Type, 4},
+		{"Writer", Type, 0},
+	},
+	"container/heap": {
+		{"Fix", Func, 2},
+		{"Init", Func, 0},
+		{"Interface", Type, 0},
+		{"Pop", Func, 0},
+		{"Push", Func, 0},
+		{"Remove", Func, 0},
+	},
+	"container/list": {
+		{"(*Element).Next", Method, 0},
+		{"(*Element).Prev", Method, 0},
+		{"(*List).Back", Method, 0},
+		{"(*List).Front", Method, 0},
+		{"(*List).Init", Method, 0},
+		{"(*List).InsertAfter", Method, 0},
+		{"(*List).InsertBefore", Method, 0},
+		{"(*List).Len", Method, 0},
+		{"(*List).MoveAfter", Method, 2},
+		{"(*List).MoveBefore", Method, 2},
+		{"(*List).MoveToBack", Method, 0},
+		{"(*List).MoveToFront", Method, 0},
+		{"(*List).PushBack", Method, 0},
+		{"(*List).PushBackList", Method, 0},
+		{"(*List).PushFront", Method, 0},
+		{"(*List).PushFrontList", Method, 0},
+		{"(*List).Remove", Method, 0},
+		{"Element", Type, 0},
+		{"Element.Value", Field, 0},
+		{"List", Type, 0},
+		{"New", Func, 0},
+	},
+	"container/ring": {
+		{"(*Ring).Do", Method, 0},
+		{"(*Ring).Len", Method, 0},
+		{"(*Ring).Link", Method, 0},
+		{"(*Ring).Move", Method, 0},
+		{"(*Ring).Next", Method, 0},
+		{"(*Ring).Prev", Method, 0},
+		{"(*Ring).Unlink", Method, 0},
+		{"New", Func, 0},
+		{"Ring", Type, 0},
+		{"Ring.Value", Field, 0},
+	},
+	"context": {
+		{"AfterFunc", Func, 21},
+		{"Background", Func, 7},
+		{"CancelCauseFunc", Type, 20},
+		{"CancelFunc", Type, 7},
+		{"Canceled", Var, 7},
+		{"Cause", Func, 20},
+		{"Context", Type, 7},
+		{"DeadlineExceeded", Var, 7},
+		{"TODO", Func, 7},
+		{"WithCancel", Func, 7},
+		{"WithCancelCause", Func, 20},
+		{"WithDeadline", Func, 7},
+		{"WithDeadlineCause", Func, 21},
+		{"WithTimeout", Func, 7},
+		{"WithTimeoutCause", Func, 21},
+		{"WithValue", Func, 7},
+		{"WithoutCancel", Func, 21},
+	},
+	"crypto": {
+		{"(Hash).Available", Method, 0},
+		{"(Hash).HashFunc", Method, 4},
+		{"(Hash).New", Method, 0},
+		{"(Hash).Size", Method, 0},
+		{"(Hash).String", Method, 15},
+		{"BLAKE2b_256", Const, 9},
+		{"BLAKE2b_384", Const, 9},
+		{"BLAKE2b_512", Const, 9},
+		{"BLAKE2s_256", Const, 9},
+		{"Decrypter", Type, 5},
+		{"DecrypterOpts", Type, 5},
+		{"Hash", Type, 0},
+		{"MD4", Const, 0},
+		{"MD5", Const, 0},
+		{"MD5SHA1", Const, 0},
+		{"PrivateKey", Type, 0},
+		{"PublicKey", Type, 2},
+		{"RIPEMD160", Const, 0},
+		{"RegisterHash", Func, 0},
+		{"SHA1", Const, 0},
+		{"SHA224", Const, 0},
+		{"SHA256", Const, 0},
+		{"SHA384", Const, 0},
+		{"SHA3_224", Const, 4},
+		{"SHA3_256", Const, 4},
+		{"SHA3_384", Const, 4},
+		{"SHA3_512", Const, 4},
+		{"SHA512", Const, 0},
+		{"SHA512_224", Const, 5},
+		{"SHA512_256", Const, 5},
+		{"Signer", Type, 4},
+		{"SignerOpts", Type, 4},
+	},
+	"crypto/aes": {
+		{"(KeySizeError).Error", Method, 0},
+		{"BlockSize", Const, 0},
+		{"KeySizeError", Type, 0},
+		{"NewCipher", Func, 0},
+	},
+	"crypto/cipher": {
+		{"(StreamReader).Read", Method, 0},
+		{"(StreamWriter).Close", Method, 0},
+		{"(StreamWriter).Write", Method, 0},
+		{"AEAD", Type, 2},
+		{"Block", Type, 0},
+		{"BlockMode", Type, 0},
+		{"NewCBCDecrypter", Func, 0},
+		{"NewCBCEncrypter", Func, 0},
+		{"NewCFBDecrypter", Func, 0},
+		{"NewCFBEncrypter", Func, 0},
+		{"NewCTR", Func, 0},
+		{"NewGCM", Func, 2},
+		{"NewGCMWithNonceSize", Func, 5},
+		{"NewGCMWithTagSize", Func, 11},
+		{"NewOFB", Func, 0},
+		{"Stream", Type, 0},
+		{"StreamReader", Type, 0},
+		{"StreamReader.R", Field, 0},
+		{"StreamReader.S", Field, 0},
+		{"StreamWriter", Type, 0},
+		{"StreamWriter.Err", Field, 0},
+		{"StreamWriter.S", Field, 0},
+		{"StreamWriter.W", Field, 0},
+	},
+	"crypto/des": {
+		{"(KeySizeError).Error", Method, 0},
+		{"BlockSize", Const, 0},
+		{"KeySizeError", Type, 0},
+		{"NewCipher", Func, 0},
+		{"NewTripleDESCipher", Func, 0},
+	},
+	"crypto/dsa": {
+		{"ErrInvalidPublicKey", Var, 0},
+		{"GenerateKey", Func, 0},
+		{"GenerateParameters", Func, 0},
+		{"L1024N160", Const, 0},
+		{"L2048N224", Const, 0},
+		{"L2048N256", Const, 0},
+		{"L3072N256", Const, 0},
+		{"ParameterSizes", Type, 0},
+		{"Parameters", Type, 0},
+		{"Parameters.G", Field, 0},
+		{"Parameters.P", Field, 0},
+		{"Parameters.Q", Field, 0},
+		{"PrivateKey", Type, 0},
+		{"PrivateKey.PublicKey", Field, 0},
+		{"PrivateKey.X", Field, 0},
+		{"PublicKey", Type, 0},
+		{"PublicKey.Parameters", Field, 0},
+		{"PublicKey.Y", Field, 0},
+		{"Sign", Func, 0},
+		{"Verify", Func, 0},
+	},
+	"crypto/ecdh": {
+		{"(*PrivateKey).Bytes", Method, 20},
+		{"(*PrivateKey).Curve", Method, 20},
+		{"(*PrivateKey).ECDH", Method, 20},
+		{"(*PrivateKey).Equal", Method, 20},
+		{"(*PrivateKey).Public", Method, 20},
+		{"(*PrivateKey).PublicKey", Method, 20},
+		{"(*PublicKey).Bytes", Method, 20},
+		{"(*PublicKey).Curve", Method, 20},
+		{"(*PublicKey).Equal", Method, 20},
+		{"Curve", Type, 20},
+		{"P256", Func, 20},
+		{"P384", Func, 20},
+		{"P521", Func, 20},
+		{"PrivateKey", Type, 20},
+		{"PublicKey", Type, 20},
+		{"X25519", Func, 20},
+	},
+	"crypto/ecdsa": {
+		{"(*PrivateKey).ECDH", Method, 20},
+		{"(*PrivateKey).Equal", Method, 15},
+		{"(*PrivateKey).Public", Method, 4},
+		{"(*PrivateKey).Sign", Method, 4},
+		{"(*PublicKey).ECDH", Method, 20},
+		{"(*PublicKey).Equal", Method, 15},
+		{"(PrivateKey).Add", Method, 0},
+		{"(PrivateKey).Double", Method, 0},
+		{"(PrivateKey).IsOnCurve", Method, 0},
+		{"(PrivateKey).Params", Method, 0},
+		{"(PrivateKey).ScalarBaseMult", Method, 0},
+		{"(PrivateKey).ScalarMult", Method, 0},
+		{"(PublicKey).Add", Method, 0},
+		{"(PublicKey).Double", Method, 0},
+		{"(PublicKey).IsOnCurve", Method, 0},
+		{"(PublicKey).Params", Method, 0},
+		{"(PublicKey).ScalarBaseMult", Method, 0},
+		{"(PublicKey).ScalarMult", Method, 0},
+		{"GenerateKey", Func, 0},
+		{"PrivateKey", Type, 0},
+		{"PrivateKey.D", Field, 0},
+		{"PrivateKey.PublicKey", Field, 0},
+		{"PublicKey", Type, 0},
+		{"PublicKey.Curve", Field, 0},
+		{"PublicKey.X", Field, 0},
+		{"PublicKey.Y", Field, 0},
+		{"Sign", Func, 0},
+		{"SignASN1", Func, 15},
+		{"Verify", Func, 0},
+		{"VerifyASN1", Func, 15},
+	},
+	"crypto/ed25519": {
+		{"(*Options).HashFunc", Method, 20},
+		{"(PrivateKey).Equal", Method, 15},
+		{"(PrivateKey).Public", Method, 13},
+		{"(PrivateKey).Seed", Method, 13},
+		{"(PrivateKey).Sign", Method, 13},
+		{"(PublicKey).Equal", Method, 15},
+		{"GenerateKey", Func, 13},
+		{"NewKeyFromSeed", Func, 13},
+		{"Options", Type, 20},
+		{"Options.Context", Field, 20},
+		{"Options.Hash", Field, 20},
+		{"PrivateKey", Type, 13},
+		{"PrivateKeySize", Const, 13},
+		{"PublicKey", Type, 13},
+		{"PublicKeySize", Const, 13},
+		{"SeedSize", Const, 13},
+		{"Sign", Func, 13},
+		{"SignatureSize", Const, 13},
+		{"Verify", Func, 13},
+		{"VerifyWithOptions", Func, 20},
+	},
+	"crypto/elliptic": {
+		{"(*CurveParams).Add", Method, 0},
+		{"(*CurveParams).Double", Method, 0},
+		{"(*CurveParams).IsOnCurve", Method, 0},
+		{"(*CurveParams).Params", Method, 0},
+		{"(*CurveParams).ScalarBaseMult", Method, 0},
+		{"(*CurveParams).ScalarMult", Method, 0},
+		{"Curve", Type, 0},
+		{"CurveParams", Type, 0},
+		{"CurveParams.B", Field, 0},
+		{"CurveParams.BitSize", Field, 0},
+		{"CurveParams.Gx", Field, 0},
+		{"CurveParams.Gy", Field, 0},
+		{"CurveParams.N", Field, 0},
+		{"CurveParams.Name", Field, 5},
+		{"CurveParams.P", Field, 0},
+		{"GenerateKey", Func, 0},
+		{"Marshal", Func, 0},
+		{"MarshalCompressed", Func, 15},
+		{"P224", Func, 0},
+		{"P256", Func, 0},
+		{"P384", Func, 0},
+		{"P521", Func, 0},
+		{"Unmarshal", Func, 0},
+		{"UnmarshalCompressed", Func, 15},
+	},
+	"crypto/hmac": {
+		{"Equal", Func, 1},
+		{"New", Func, 0},
+	},
+	"crypto/md5": {
+		{"BlockSize", Const, 0},
+		{"New", Func, 0},
+		{"Size", Const, 0},
+		{"Sum", Func, 2},
+	},
+	"crypto/rand": {
+		{"Int", Func, 0},
+		{"Prime", Func, 0},
+		{"Read", Func, 0},
+		{"Reader", Var, 0},
+	},
+	"crypto/rc4": {
+		{"(*Cipher).Reset", Method, 0},
+		{"(*Cipher).XORKeyStream", Method, 0},
+		{"(KeySizeError).Error", Method, 0},
+		{"Cipher", Type, 0},
+		{"KeySizeError", Type, 0},
+		{"NewCipher", Func, 0},
+	},
+	"crypto/rsa": {
+		{"(*PSSOptions).HashFunc", Method, 4},
+		{"(*PrivateKey).Decrypt", Method, 5},
+		{"(*PrivateKey).Equal", Method, 15},
+		{"(*PrivateKey).Precompute", Method, 0},
+		{"(*PrivateKey).Public", Method, 4},
+		{"(*PrivateKey).Sign", Method, 4},
+		{"(*PrivateKey).Size", Method, 11},
+		{"(*PrivateKey).Validate", Method, 0},
+		{"(*PublicKey).Equal", Method, 15},
+		{"(*PublicKey).Size", Method, 11},
+		{"CRTValue", Type, 0},
+		{"CRTValue.Coeff", Field, 0},
+		{"CRTValue.Exp", Field, 0},
+		{"CRTValue.R", Field, 0},
+		{"DecryptOAEP", Func, 0},
+		{"DecryptPKCS1v15", Func, 0},
+		{"DecryptPKCS1v15SessionKey", Func, 0},
+		{"EncryptOAEP", Func, 0},
+		{"EncryptPKCS1v15", Func, 0},
+		{"ErrDecryption", Var, 0},
+		{"ErrMessageTooLong", Var, 0},
+		{"ErrVerification", Var, 0},
+		{"GenerateKey", Func, 0},
+		{"GenerateMultiPrimeKey", Func, 0},
+		{"OAEPOptions", Type, 5},
+		{"OAEPOptions.Hash", Field, 5},
+		{"OAEPOptions.Label", Field, 5},
+		{"OAEPOptions.MGFHash", Field, 20},
+		{"PKCS1v15DecryptOptions", Type, 5},
+		{"PKCS1v15DecryptOptions.SessionKeyLen", Field, 5},
+		{"PSSOptions", Type, 2},
+		{"PSSOptions.Hash", Field, 4},
+		{"PSSOptions.SaltLength", Field, 2},
+		{"PSSSaltLengthAuto", Const, 2},
+		{"PSSSaltLengthEqualsHash", Const, 2},
+		{"PrecomputedValues", Type, 0},
+		{"PrecomputedValues.CRTValues", Field, 0},
+		{"PrecomputedValues.Dp", Field, 0},
+		{"PrecomputedValues.Dq", Field, 0},
+		{"PrecomputedValues.Qinv", Field, 0},
+		{"PrivateKey", Type, 0},
+		{"PrivateKey.D", Field, 0},
+		{"PrivateKey.Precomputed", Field, 0},
+		{"PrivateKey.Primes", Field, 0},
+		{"PrivateKey.PublicKey", Field, 0},
+		{"PublicKey", Type, 0},
+		{"PublicKey.E", Field, 0},
+		{"PublicKey.N", Field, 0},
+		{"SignPKCS1v15", Func, 0},
+		{"SignPSS", Func, 2},
+		{"VerifyPKCS1v15", Func, 0},
+		{"VerifyPSS", Func, 2},
+	},
+	"crypto/sha1": {
+		{"BlockSize", Const, 0},
+		{"New", Func, 0},
+		{"Size", Const, 0},
+		{"Sum", Func, 2},
+	},
+	"crypto/sha256": {
+		{"BlockSize", Const, 0},
+		{"New", Func, 0},
+		{"New224", Func, 0},
+		{"Size", Const, 0},
+		{"Size224", Const, 0},
+		{"Sum224", Func, 2},
+		{"Sum256", Func, 2},
+	},
+	"crypto/sha512": {
+		{"BlockSize", Const, 0},
+		{"New", Func, 0},
+		{"New384", Func, 0},
+		{"New512_224", Func, 5},
+		{"New512_256", Func, 5},
+		{"Size", Const, 0},
+		{"Size224", Const, 5},
+		{"Size256", Const, 5},
+		{"Size384", Const, 0},
+		{"Sum384", Func, 2},
+		{"Sum512", Func, 2},
+		{"Sum512_224", Func, 5},
+		{"Sum512_256", Func, 5},
+	},
+	"crypto/subtle": {
+		{"ConstantTimeByteEq", Func, 0},
+		{"ConstantTimeCompare", Func, 0},
+		{"ConstantTimeCopy", Func, 0},
+		{"ConstantTimeEq", Func, 0},
+		{"ConstantTimeLessOrEq", Func, 2},
+		{"ConstantTimeSelect", Func, 0},
+		{"XORBytes", Func, 20},
+	},
+	"crypto/tls": {
+		{"(*CertificateRequestInfo).Context", Method, 17},
+		{"(*CertificateRequestInfo).SupportsCertificate", Method, 14},
+		{"(*CertificateVerificationError).Error", Method, 20},
+		{"(*CertificateVerificationError).Unwrap", Method, 20},
+		{"(*ClientHelloInfo).Context", Method, 17},
+		{"(*ClientHelloInfo).SupportsCertificate", Method, 14},
+		{"(*ClientSessionState).ResumptionState", Method, 21},
+		{"(*Config).BuildNameToCertificate", Method, 0},
+		{"(*Config).Clone", Method, 8},
+		{"(*Config).DecryptTicket", Method, 21},
+		{"(*Config).EncryptTicket", Method, 21},
+		{"(*Config).SetSessionTicketKeys", Method, 5},
+		{"(*Conn).Close", Method, 0},
+		{"(*Conn).CloseWrite", Method, 8},
+		{"(*Conn).ConnectionState", Method, 0},
+		{"(*Conn).Handshake", Method, 0},
+		{"(*Conn).HandshakeContext", Method, 17},
+		{"(*Conn).LocalAddr", Method, 0},
+		{"(*Conn).NetConn", Method, 18},
+		{"(*Conn).OCSPResponse", Method, 0},
+		{"(*Conn).Read", Method, 0},
+		{"(*Conn).RemoteAddr", Method, 0},
+		{"(*Conn).SetDeadline", Method, 0},
+		{"(*Conn).SetReadDeadline", Method, 0},
+		{"(*Conn).SetWriteDeadline", Method, 0},
+		{"(*Conn).VerifyHostname", Method, 0},
+		{"(*Conn).Write", Method, 0},
+		{"(*ConnectionState).ExportKeyingMaterial", Method, 11},
+		{"(*Dialer).Dial", Method, 15},
+		{"(*Dialer).DialContext", Method, 15},
+		{"(*QUICConn).Close", Method, 21},
+		{"(*QUICConn).ConnectionState", Method, 21},
+		{"(*QUICConn).HandleData", Method, 21},
+		{"(*QUICConn).NextEvent", Method, 21},
+		{"(*QUICConn).SendSessionTicket", Method, 21},
+		{"(*QUICConn).SetTransportParameters", Method, 21},
+		{"(*QUICConn).Start", Method, 21},
+		{"(*SessionState).Bytes", Method, 21},
+		{"(AlertError).Error", Method, 21},
+		{"(ClientAuthType).String", Method, 15},
+		{"(CurveID).String", Method, 15},
+		{"(QUICEncryptionLevel).String", Method, 21},
+		{"(RecordHeaderError).Error", Method, 6},
+		{"(SignatureScheme).String", Method, 15},
+		{"AlertError", Type, 21},
+		{"Certificate", Type, 0},
+		{"Certificate.Certificate", Field, 0},
+		{"Certificate.Leaf", Field, 0},
+		{"Certificate.OCSPStaple", Field, 0},
+		{"Certificate.PrivateKey", Field, 0},
+		{"Certificate.SignedCertificateTimestamps", Field, 5},
+		{"Certificate.SupportedSignatureAlgorithms", Field, 14},
+		{"CertificateRequestInfo", Type, 8},
+		{"CertificateRequestInfo.AcceptableCAs", Field, 8},
+		{"CertificateRequestInfo.SignatureSchemes", Field, 8},
+		{"CertificateRequestInfo.Version", Field, 14},
+		{"CertificateVerificationError", Type, 20},
+		{"CertificateVerificationError.Err", Field, 20},
+		{"CertificateVerificationError.UnverifiedCertificates", Field, 20},
+		{"CipherSuite", Type, 14},
+		{"CipherSuite.ID", Field, 14},
+		{"CipherSuite.Insecure", Field, 14},
+		{"CipherSuite.Name", Field, 14},
+		{"CipherSuite.SupportedVersions", Field, 14},
+		{"CipherSuiteName", Func, 14},
+		{"CipherSuites", Func, 14},
+		{"Client", Func, 0},
+		{"ClientAuthType", Type, 0},
+		{"ClientHelloInfo", Type, 4},
+		{"ClientHelloInfo.CipherSuites", Field, 4},
+		{"ClientHelloInfo.Conn", Field, 8},
+		{"ClientHelloInfo.ServerName", Field, 4},
+		{"ClientHelloInfo.SignatureSchemes", Field, 8},
+		{"ClientHelloInfo.SupportedCurves", Field, 4},
+		{"ClientHelloInfo.SupportedPoints", Field, 4},
+		{"ClientHelloInfo.SupportedProtos", Field, 8},
+		{"ClientHelloInfo.SupportedVersions", Field, 8},
+		{"ClientSessionCache", Type, 3},
+		{"ClientSessionState", Type, 3},
+		{"Config", Type, 0},
+		{"Config.Certificates", Field, 0},
+		{"Config.CipherSuites", Field, 0},
+		{"Config.ClientAuth", Field, 0},
+		{"Config.ClientCAs", Field, 0},
+		{"Config.ClientSessionCache", Field, 3},
+		{"Config.CurvePreferences", Field, 3},
+		{"Config.DynamicRecordSizingDisabled", Field, 7},
+		{"Config.GetCertificate", Field, 4},
+		{"Config.GetClientCertificate", Field, 8},
+		{"Config.GetConfigForClient", Field, 8},
+		{"Config.InsecureSkipVerify", Field, 0},
+		{"Config.KeyLogWriter", Field, 8},
+		{"Config.MaxVersion", Field, 2},
+		{"Config.MinVersion", Field, 2},
+		{"Config.NameToCertificate", Field, 0},
+		{"Config.NextProtos", Field, 0},
+		{"Config.PreferServerCipherSuites", Field, 1},
+		{"Config.Rand", Field, 0},
+		{"Config.Renegotiation", Field, 7},
+		{"Config.RootCAs", Field, 0},
+		{"Config.ServerName", Field, 0},
+		{"Config.SessionTicketKey", Field, 1},
+		{"Config.SessionTicketsDisabled", Field, 1},
+		{"Config.Time", Field, 0},
+		{"Config.UnwrapSession", Field, 21},
+		{"Config.VerifyConnection", Field, 15},
+		{"Config.VerifyPeerCertificate", Field, 8},
+		{"Config.WrapSession", Field, 21},
+		{"Conn", Type, 0},
+		{"ConnectionState", Type, 0},
+		{"ConnectionState.CipherSuite", Field, 0},
+		{"ConnectionState.DidResume", Field, 1},
+		{"ConnectionState.HandshakeComplete", Field, 0},
+		{"ConnectionState.NegotiatedProtocol", Field, 0},
+		{"ConnectionState.NegotiatedProtocolIsMutual", Field, 0},
+		{"ConnectionState.OCSPResponse", Field, 5},
+		{"ConnectionState.PeerCertificates", Field, 0},
+		{"ConnectionState.ServerName", Field, 0},
+		{"ConnectionState.SignedCertificateTimestamps", Field, 5},
+		{"ConnectionState.TLSUnique", Field, 4},
+		{"ConnectionState.VerifiedChains", Field, 0},
+		{"ConnectionState.Version", Field, 3},
+		{"CurveID", Type, 3},
+		{"CurveP256", Const, 3},
+		{"CurveP384", Const, 3},
+		{"CurveP521", Const, 3},
+		{"Dial", Func, 0},
+		{"DialWithDialer", Func, 3},
+		{"Dialer", Type, 15},
+		{"Dialer.Config", Field, 15},
+		{"Dialer.NetDialer", Field, 15},
+		{"ECDSAWithP256AndSHA256", Const, 8},
+		{"ECDSAWithP384AndSHA384", Const, 8},
+		{"ECDSAWithP521AndSHA512", Const, 8},
+		{"ECDSAWithSHA1", Const, 10},
+		{"Ed25519", Const, 13},
+		{"InsecureCipherSuites", Func, 14},
+		{"Listen", Func, 0},
+		{"LoadX509KeyPair", Func, 0},
+		{"NewLRUClientSessionCache", Func, 3},
+		{"NewListener", Func, 0},
+		{"NewResumptionState", Func, 21},
+		{"NoClientCert", Const, 0},
+		{"PKCS1WithSHA1", Const, 8},
+		{"PKCS1WithSHA256", Const, 8},
+		{"PKCS1WithSHA384", Const, 8},
+		{"PKCS1WithSHA512", Const, 8},
+		{"PSSWithSHA256", Const, 8},
+		{"PSSWithSHA384", Const, 8},
+		{"PSSWithSHA512", Const, 8},
+		{"ParseSessionState", Func, 21},
+		{"QUICClient", Func, 21},
+		{"QUICConfig", Type, 21},
+		{"QUICConfig.TLSConfig", Field, 21},
+		{"QUICConn", Type, 21},
+		{"QUICEncryptionLevel", Type, 21},
+		{"QUICEncryptionLevelApplication", Const, 21},
+		{"QUICEncryptionLevelEarly", Const, 21},
+		{"QUICEncryptionLevelHandshake", Const, 21},
+		{"QUICEncryptionLevelInitial", Const, 21},
+		{"QUICEvent", Type, 21},
+		{"QUICEvent.Data", Field, 21},
+		{"QUICEvent.Kind", Field, 21},
+		{"QUICEvent.Level", Field, 21},
+		{"QUICEvent.Suite", Field, 21},
+		{"QUICEventKind", Type, 21},
+		{"QUICHandshakeDone", Const, 21},
+		{"QUICNoEvent", Const, 21},
+		{"QUICRejectedEarlyData", Const, 21},
+		{"QUICServer", Func, 21},
+		{"QUICSessionTicketOptions", Type, 21},
+		{"QUICSessionTicketOptions.EarlyData", Field, 21},
+		{"QUICSetReadSecret", Const, 21},
+		{"QUICSetWriteSecret", Const, 21},
+		{"QUICTransportParameters", Const, 21},
+		{"QUICTransportParametersRequired", Const, 21},
+		{"QUICWriteData", Const, 21},
+		{"RecordHeaderError", Type, 6},
+		{"RecordHeaderError.Conn", Field, 12},
+		{"RecordHeaderError.Msg", Field, 6},
+		{"RecordHeaderError.RecordHeader", Field, 6},
+		{"RenegotiateFreelyAsClient", Const, 7},
+		{"RenegotiateNever", Const, 7},
+		{"RenegotiateOnceAsClient", Const, 7},
+		{"RenegotiationSupport", Type, 7},
+		{"RequestClientCert", Const, 0},
+		{"RequireAndVerifyClientCert", Const, 0},
+		{"RequireAnyClientCert", Const, 0},
+		{"Server", Func, 0},
+		{"SessionState", Type, 21},
+		{"SessionState.EarlyData", Field, 21},
+		{"SessionState.Extra", Field, 21},
+		{"SignatureScheme", Type, 8},
+		{"TLS_AES_128_GCM_SHA256", Const, 12},
+		{"TLS_AES_256_GCM_SHA384", Const, 12},
+		{"TLS_CHACHA20_POLY1305_SHA256", Const, 12},
+		{"TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA", Const, 2},
+		{"TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256", Const, 8},
+		{"TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256", Const, 2},
+		{"TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA", Const, 2},
+		{"TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384", Const, 5},
+		{"TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305", Const, 8},
+		{"TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256", Const, 14},
+		{"TLS_ECDHE_ECDSA_WITH_RC4_128_SHA", Const, 2},
+		{"TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA", Const, 0},
+		{"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA", Const, 0},
+		{"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256", Const, 8},
+		{"TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256", Const, 2},
+		{"TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA", Const, 1},
+		{"TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384", Const, 5},
+		{"TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305", Const, 8},
+		{"TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256", Const, 14},
+		{"TLS_ECDHE_RSA_WITH_RC4_128_SHA", Const, 0},
+		{"TLS_FALLBACK_SCSV", Const, 4},
+		{"TLS_RSA_WITH_3DES_EDE_CBC_SHA", Const, 0},
+		{"TLS_RSA_WITH_AES_128_CBC_SHA", Const, 0},
+		{"TLS_RSA_WITH_AES_128_CBC_SHA256", Const, 8},
+		{"TLS_RSA_WITH_AES_128_GCM_SHA256", Const, 6},
+		{"TLS_RSA_WITH_AES_256_CBC_SHA", Const, 1},
+		{"TLS_RSA_WITH_AES_256_GCM_SHA384", Const, 6},
+		{"TLS_RSA_WITH_RC4_128_SHA", Const, 0},
+		{"VerifyClientCertIfGiven", Const, 0},
+		{"VersionName", Func, 21},
+		{"VersionSSL30", Const, 2},
+		{"VersionTLS10", Const, 2},
+		{"VersionTLS11", Const, 2},
+		{"VersionTLS12", Const, 2},
+		{"VersionTLS13", Const, 12},
+		{"X25519", Const, 8},
+		{"X509KeyPair", Func, 0},
+	},
+	"crypto/x509": {
+		{"(*CertPool).AddCert", Method, 0},
+		{"(*CertPool).AddCertWithConstraint", Method, 22},
+		{"(*CertPool).AppendCertsFromPEM", Method, 0},
+		{"(*CertPool).Clone", Method, 19},
+		{"(*CertPool).Equal", Method, 19},
+		{"(*CertPool).Subjects", Method, 0},
+		{"(*Certificate).CheckCRLSignature", Method, 0},
+		{"(*Certificate).CheckSignature", Method, 0},
+		{"(*Certificate).CheckSignatureFrom", Method, 0},
+		{"(*Certificate).CreateCRL", Method, 0},
+		{"(*Certificate).Equal", Method, 0},
+		{"(*Certificate).Verify", Method, 0},
+		{"(*Certificate).VerifyHostname", Method, 0},
+		{"(*CertificateRequest).CheckSignature", Method, 5},
+		{"(*RevocationList).CheckSignatureFrom", Method, 19},
+		{"(CertificateInvalidError).Error", Method, 0},
+		{"(ConstraintViolationError).Error", Method, 0},
+		{"(HostnameError).Error", Method, 0},
+		{"(InsecureAlgorithmError).Error", Method, 6},
+		{"(OID).Equal", Method, 22},
+		{"(OID).EqualASN1OID", Method, 22},
+		{"(OID).String", Method, 22},
+		{"(PublicKeyAlgorithm).String", Method, 10},
+		{"(SignatureAlgorithm).String", Method, 6},
+		{"(SystemRootsError).Error", Method, 1},
+		{"(SystemRootsError).Unwrap", Method, 16},
+		{"(UnhandledCriticalExtension).Error", Method, 0},
+		{"(UnknownAuthorityError).Error", Method, 0},
+		{"CANotAuthorizedForExtKeyUsage", Const, 10},
+		{"CANotAuthorizedForThisName", Const, 0},
+		{"CertPool", Type, 0},
+		{"Certificate", Type, 0},
+		{"Certificate.AuthorityKeyId", Field, 0},
+		{"Certificate.BasicConstraintsValid", Field, 0},
+		{"Certificate.CRLDistributionPoints", Field, 2},
+		{"Certificate.DNSNames", Field, 0},
+		{"Certificate.EmailAddresses", Field, 0},
+		{"Certificate.ExcludedDNSDomains", Field, 9},
+		{"Certificate.ExcludedEmailAddresses", Field, 10},
+		{"Certificate.ExcludedIPRanges", Field, 10},
+		{"Certificate.ExcludedURIDomains", Field, 10},
+		{"Certificate.ExtKeyUsage", Field, 0},
+		{"Certificate.Extensions", Field, 2},
+		{"Certificate.ExtraExtensions", Field, 2},
+		{"Certificate.IPAddresses", Field, 1},
+		{"Certificate.IsCA", Field, 0},
+		{"Certificate.Issuer", Field, 0},
+		{"Certificate.IssuingCertificateURL", Field, 2},
+		{"Certificate.KeyUsage", Field, 0},
+		{"Certificate.MaxPathLen", Field, 0},
+		{"Certificate.MaxPathLenZero", Field, 4},
+		{"Certificate.NotAfter", Field, 0},
+		{"Certificate.NotBefore", Field, 0},
+		{"Certificate.OCSPServer", Field, 2},
+		{"Certificate.PermittedDNSDomains", Field, 0},
+		{"Certificate.PermittedDNSDomainsCritical", Field, 0},
+		{"Certificate.PermittedEmailAddresses", Field, 10},
+		{"Certificate.PermittedIPRanges", Field, 10},
+		{"Certificate.PermittedURIDomains", Field, 10},
+		{"Certificate.Policies", Field, 22},
+		{"Certificate.PolicyIdentifiers", Field, 0},
+		{"Certificate.PublicKey", Field, 0},
+		{"Certificate.PublicKeyAlgorithm", Field, 0},
+		{"Certificate.Raw", Field, 0},
+		{"Certificate.RawIssuer", Field, 0},
+		{"Certificate.RawSubject", Field, 0},
+		{"Certificate.RawSubjectPublicKeyInfo", Field, 0},
+		{"Certificate.RawTBSCertificate", Field, 0},
+		{"Certificate.SerialNumber", Field, 0},
+		{"Certificate.Signature", Field, 0},
+		{"Certificate.SignatureAlgorithm", Field, 0},
+		{"Certificate.Subject", Field, 0},
+		{"Certificate.SubjectKeyId", Field, 0},
+		{"Certificate.URIs", Field, 10},
+		{"Certificate.UnhandledCriticalExtensions", Field, 5},
+		{"Certificate.UnknownExtKeyUsage", Field, 0},
+		{"Certificate.Version", Field, 0},
+		{"CertificateInvalidError", Type, 0},
+		{"CertificateInvalidError.Cert", Field, 0},
+		{"CertificateInvalidError.Detail", Field, 10},
+		{"CertificateInvalidError.Reason", Field, 0},
+		{"CertificateRequest", Type, 3},
+		{"CertificateRequest.Attributes", Field, 3},
+		{"CertificateRequest.DNSNames", Field, 3},
+		{"CertificateRequest.EmailAddresses", Field, 3},
+		{"CertificateRequest.Extensions", Field, 3},
+		{"CertificateRequest.ExtraExtensions", Field, 3},
+		{"CertificateRequest.IPAddresses", Field, 3},
+		{"CertificateRequest.PublicKey", Field, 3},
+		{"CertificateRequest.PublicKeyAlgorithm", Field, 3},
+		{"CertificateRequest.Raw", Field, 3},
+		{"CertificateRequest.RawSubject", Field, 3},
+		{"CertificateRequest.RawSubjectPublicKeyInfo", Field, 3},
+		{"CertificateRequest.RawTBSCertificateRequest", Field, 3},
+		{"CertificateRequest.Signature", Field, 3},
+		{"CertificateRequest.SignatureAlgorithm", Field, 3},
+		{"CertificateRequest.Subject", Field, 3},
+		{"CertificateRequest.URIs", Field, 10},
+		{"CertificateRequest.Version", Field, 3},
+		{"ConstraintViolationError", Type, 0},
+		{"CreateCertificate", Func, 0},
+		{"CreateCertificateRequest", Func, 3},
+		{"CreateRevocationList", Func, 15},
+		{"DSA", Const, 0},
+		{"DSAWithSHA1", Const, 0},
+		{"DSAWithSHA256", Const, 0},
+		{"DecryptPEMBlock", Func, 1},
+		{"ECDSA", Const, 1},
+		{"ECDSAWithSHA1", Const, 1},
+		{"ECDSAWithSHA256", Const, 1},
+		{"ECDSAWithSHA384", Const, 1},
+		{"ECDSAWithSHA512", Const, 1},
+		{"Ed25519", Const, 13},
+		{"EncryptPEMBlock", Func, 1},
+		{"ErrUnsupportedAlgorithm", Var, 0},
+		{"Expired", Const, 0},
+		{"ExtKeyUsage", Type, 0},
+		{"ExtKeyUsageAny", Const, 0},
+		{"ExtKeyUsageClientAuth", Const, 0},
+		{"ExtKeyUsageCodeSigning", Const, 0},
+		{"ExtKeyUsageEmailProtection", Const, 0},
+		{"ExtKeyUsageIPSECEndSystem", Const, 1},
+		{"ExtKeyUsageIPSECTunnel", Const, 1},
+		{"ExtKeyUsageIPSECUser", Const, 1},
+		{"ExtKeyUsageMicrosoftCommercialCodeSigning", Const, 10},
+		{"ExtKeyUsageMicrosoftKernelCodeSigning", Const, 10},
+		{"ExtKeyUsageMicrosoftServerGatedCrypto", Const, 1},
+		{"ExtKeyUsageNetscapeServerGatedCrypto", Const, 1},
+		{"ExtKeyUsageOCSPSigning", Const, 0},
+		{"ExtKeyUsageServerAuth", Const, 0},
+		{"ExtKeyUsageTimeStamping", Const, 0},
+		{"HostnameError", Type, 0},
+		{"HostnameError.Certificate", Field, 0},
+		{"HostnameError.Host", Field, 0},
+		{"IncompatibleUsage", Const, 1},
+		{"IncorrectPasswordError", Var, 1},
+		{"InsecureAlgorithmError", Type, 6},
+		{"InvalidReason", Type, 0},
+		{"IsEncryptedPEMBlock", Func, 1},
+		{"KeyUsage", Type, 0},
+		{"KeyUsageCRLSign", Const, 0},
+		{"KeyUsageCertSign", Const, 0},
+		{"KeyUsageContentCommitment", Const, 0},
+		{"KeyUsageDataEncipherment", Const, 0},
+		{"KeyUsageDecipherOnly", Const, 0},
+		{"KeyUsageDigitalSignature", Const, 0},
+		{"KeyUsageEncipherOnly", Const, 0},
+		{"KeyUsageKeyAgreement", Const, 0},
+		{"KeyUsageKeyEncipherment", Const, 0},
+		{"MD2WithRSA", Const, 0},
+		{"MD5WithRSA", Const, 0},
+		{"MarshalECPrivateKey", Func, 2},
+		{"MarshalPKCS1PrivateKey", Func, 0},
+		{"MarshalPKCS1PublicKey", Func, 10},
+		{"MarshalPKCS8PrivateKey", Func, 10},
+		{"MarshalPKIXPublicKey", Func, 0},
+		{"NameConstraintsWithoutSANs", Const, 10},
+		{"NameMismatch", Const, 8},
+		{"NewCertPool", Func, 0},
+		{"NotAuthorizedToSign", Const, 0},
+		{"OID", Type, 22},
+		{"OIDFromInts", Func, 22},
+		{"PEMCipher", Type, 1},
+		{"PEMCipher3DES", Const, 1},
+		{"PEMCipherAES128", Const, 1},
+		{"PEMCipherAES192", Const, 1},
+		{"PEMCipherAES256", Const, 1},
+		{"PEMCipherDES", Const, 1},
+		{"ParseCRL", Func, 0},
+		{"ParseCertificate", Func, 0},
+		{"ParseCertificateRequest", Func, 3},
+		{"ParseCertificates", Func, 0},
+		{"ParseDERCRL", Func, 0},
+		{"ParseECPrivateKey", Func, 1},
+		{"ParsePKCS1PrivateKey", Func, 0},
+		{"ParsePKCS1PublicKey", Func, 10},
+		{"ParsePKCS8PrivateKey", Func, 0},
+		{"ParsePKIXPublicKey", Func, 0},
+		{"ParseRevocationList", Func, 19},
+		{"PublicKeyAlgorithm", Type, 0},
+		{"PureEd25519", Const, 13},
+		{"RSA", Const, 0},
+		{"RevocationList", Type, 15},
+		{"RevocationList.AuthorityKeyId", Field, 19},
+		{"RevocationList.Extensions", Field, 19},
+		{"RevocationList.ExtraExtensions", Field, 15},
+		{"RevocationList.Issuer", Field, 19},
+		{"RevocationList.NextUpdate", Field, 15},
+		{"RevocationList.Number", Field, 15},
+		{"RevocationList.Raw", Field, 19},
+		{"RevocationList.RawIssuer", Field, 19},
+		{"RevocationList.RawTBSRevocationList", Field, 19},
+		{"RevocationList.RevokedCertificateEntries", Field, 21},
+		{"RevocationList.RevokedCertificates", Field, 15},
+		{"RevocationList.Signature", Field, 19},
+		{"RevocationList.SignatureAlgorithm", Field, 15},
+		{"RevocationList.ThisUpdate", Field, 15},
+		{"RevocationListEntry", Type, 21},
+		{"RevocationListEntry.Extensions", Field, 21},
+		{"RevocationListEntry.ExtraExtensions", Field, 21},
+		{"RevocationListEntry.Raw", Field, 21},
+		{"RevocationListEntry.ReasonCode", Field, 21},
+		{"RevocationListEntry.RevocationTime", Field, 21},
+		{"RevocationListEntry.SerialNumber", Field, 21},
+		{"SHA1WithRSA", Const, 0},
+		{"SHA256WithRSA", Const, 0},
+		{"SHA256WithRSAPSS", Const, 8},
+		{"SHA384WithRSA", Const, 0},
+		{"SHA384WithRSAPSS", Const, 8},
+		{"SHA512WithRSA", Const, 0},
+		{"SHA512WithRSAPSS", Const, 8},
+		{"SetFallbackRoots", Func, 20},
+		{"SignatureAlgorithm", Type, 0},
+		{"SystemCertPool", Func, 7},
+		{"SystemRootsError", Type, 1},
+		{"SystemRootsError.Err", Field, 7},
+		{"TooManyConstraints", Const, 10},
+		{"TooManyIntermediates", Const, 0},
+		{"UnconstrainedName", Const, 10},
+		{"UnhandledCriticalExtension", Type, 0},
+		{"UnknownAuthorityError", Type, 0},
+		{"UnknownAuthorityError.Cert", Field, 8},
+		{"UnknownPublicKeyAlgorithm", Const, 0},
+		{"UnknownSignatureAlgorithm", Const, 0},
+		{"VerifyOptions", Type, 0},
+		{"VerifyOptions.CurrentTime", Field, 0},
+		{"VerifyOptions.DNSName", Field, 0},
+		{"VerifyOptions.Intermediates", Field, 0},
+		{"VerifyOptions.KeyUsages", Field, 1},
+		{"VerifyOptions.MaxConstraintComparisions", Field, 10},
+		{"VerifyOptions.Roots", Field, 0},
+	},
+	"crypto/x509/pkix": {
+		{"(*CertificateList).HasExpired", Method, 0},
+		{"(*Name).FillFromRDNSequence", Method, 0},
+		{"(Name).String", Method, 10},
+		{"(Name).ToRDNSequence", Method, 0},
+		{"(RDNSequence).String", Method, 10},
+		{"AlgorithmIdentifier", Type, 0},
+		{"AlgorithmIdentifier.Algorithm", Field, 0},
+		{"AlgorithmIdentifier.Parameters", Field, 0},
+		{"AttributeTypeAndValue", Type, 0},
+		{"AttributeTypeAndValue.Type", Field, 0},
+		{"AttributeTypeAndValue.Value", Field, 0},
+		{"AttributeTypeAndValueSET", Type, 3},
+		{"AttributeTypeAndValueSET.Type", Field, 3},
+		{"AttributeTypeAndValueSET.Value", Field, 3},
+		{"CertificateList", Type, 0},
+		{"CertificateList.SignatureAlgorithm", Field, 0},
+		{"CertificateList.SignatureValue", Field, 0},
+		{"CertificateList.TBSCertList", Field, 0},
+		{"Extension", Type, 0},
+		{"Extension.Critical", Field, 0},
+		{"Extension.Id", Field, 0},
+		{"Extension.Value", Field, 0},
+		{"Name", Type, 0},
+		{"Name.CommonName", Field, 0},
+		{"Name.Country", Field, 0},
+		{"Name.ExtraNames", Field, 5},
+		{"Name.Locality", Field, 0},
+		{"Name.Names", Field, 0},
+		{"Name.Organization", Field, 0},
+		{"Name.OrganizationalUnit", Field, 0},
+		{"Name.PostalCode", Field, 0},
+		{"Name.Province", Field, 0},
+		{"Name.SerialNumber", Field, 0},
+		{"Name.StreetAddress", Field, 0},
+		{"RDNSequence", Type, 0},
+		{"RelativeDistinguishedNameSET", Type, 0},
+		{"RevokedCertificate", Type, 0},
+		{"RevokedCertificate.Extensions", Field, 0},
+		{"RevokedCertificate.RevocationTime", Field, 0},
+		{"RevokedCertificate.SerialNumber", Field, 0},
+		{"TBSCertificateList", Type, 0},
+		{"TBSCertificateList.Extensions", Field, 0},
+		{"TBSCertificateList.Issuer", Field, 0},
+		{"TBSCertificateList.NextUpdate", Field, 0},
+		{"TBSCertificateList.Raw", Field, 0},
+		{"TBSCertificateList.RevokedCertificates", Field, 0},
+		{"TBSCertificateList.Signature", Field, 0},
+		{"TBSCertificateList.ThisUpdate", Field, 0},
+		{"TBSCertificateList.Version", Field, 0},
+	},
+	"database/sql": {
+		{"(*ColumnType).DatabaseTypeName", Method, 8},
+		{"(*ColumnType).DecimalSize", Method, 8},
+		{"(*ColumnType).Length", Method, 8},
+		{"(*ColumnType).Name", Method, 8},
+		{"(*ColumnType).Nullable", Method, 8},
+		{"(*ColumnType).ScanType", Method, 8},
+		{"(*Conn).BeginTx", Method, 9},
+		{"(*Conn).Close", Method, 9},
+		{"(*Conn).ExecContext", Method, 9},
+		{"(*Conn).PingContext", Method, 9},
+		{"(*Conn).PrepareContext", Method, 9},
+		{"(*Conn).QueryContext", Method, 9},
+		{"(*Conn).QueryRowContext", Method, 9},
+		{"(*Conn).Raw", Method, 13},
+		{"(*DB).Begin", Method, 0},
+		{"(*DB).BeginTx", Method, 8},
+		{"(*DB).Close", Method, 0},
+		{"(*DB).Conn", Method, 9},
+		{"(*DB).Driver", Method, 0},
+		{"(*DB).Exec", Method, 0},
+		{"(*DB).ExecContext", Method, 8},
+		{"(*DB).Ping", Method, 1},
+		{"(*DB).PingContext", Method, 8},
+		{"(*DB).Prepare", Method, 0},
+		{"(*DB).PrepareContext", Method, 8},
+		{"(*DB).Query", Method, 0},
+		{"(*DB).QueryContext", Method, 8},
+		{"(*DB).QueryRow", Method, 0},
+		{"(*DB).QueryRowContext", Method, 8},
+		{"(*DB).SetConnMaxIdleTime", Method, 15},
+		{"(*DB).SetConnMaxLifetime", Method, 6},
+		{"(*DB).SetMaxIdleConns", Method, 1},
+		{"(*DB).SetMaxOpenConns", Method, 2},
+		{"(*DB).Stats", Method, 5},
+		{"(*Null).Scan", Method, 22},
+		{"(*NullBool).Scan", Method, 0},
+		{"(*NullByte).Scan", Method, 17},
+		{"(*NullFloat64).Scan", Method, 0},
+		{"(*NullInt16).Scan", Method, 17},
+		{"(*NullInt32).Scan", Method, 13},
+		{"(*NullInt64).Scan", Method, 0},
+		{"(*NullString).Scan", Method, 0},
+		{"(*NullTime).Scan", Method, 13},
+		{"(*Row).Err", Method, 15},
+		{"(*Row).Scan", Method, 0},
+		{"(*Rows).Close", Method, 0},
+		{"(*Rows).ColumnTypes", Method, 8},
+		{"(*Rows).Columns", Method, 0},
+		{"(*Rows).Err", Method, 0},
+		{"(*Rows).Next", Method, 0},
+		{"(*Rows).NextResultSet", Method, 8},
+		{"(*Rows).Scan", Method, 0},
+		{"(*Stmt).Close", Method, 0},
+		{"(*Stmt).Exec", Method, 0},
+		{"(*Stmt).ExecContext", Method, 8},
+		{"(*Stmt).Query", Method, 0},
+		{"(*Stmt).QueryContext", Method, 8},
+		{"(*Stmt).QueryRow", Method, 0},
+		{"(*Stmt).QueryRowContext", Method, 8},
+		{"(*Tx).Commit", Method, 0},
+		{"(*Tx).Exec", Method, 0},
+		{"(*Tx).ExecContext", Method, 8},
+		{"(*Tx).Prepare", Method, 0},
+		{"(*Tx).PrepareContext", Method, 8},
+		{"(*Tx).Query", Method, 0},
+		{"(*Tx).QueryContext", Method, 8},
+		{"(*Tx).QueryRow", Method, 0},
+		{"(*Tx).QueryRowContext", Method, 8},
+		{"(*Tx).Rollback", Method, 0},
+		{"(*Tx).Stmt", Method, 0},
+		{"(*Tx).StmtContext", Method, 8},
+		{"(IsolationLevel).String", Method, 11},
+		{"(Null).Value", Method, 22},
+		{"(NullBool).Value", Method, 0},
+		{"(NullByte).Value", Method, 17},
+		{"(NullFloat64).Value", Method, 0},
+		{"(NullInt16).Value", Method, 17},
+		{"(NullInt32).Value", Method, 13},
+		{"(NullInt64).Value", Method, 0},
+		{"(NullString).Value", Method, 0},
+		{"(NullTime).Value", Method, 13},
+		{"ColumnType", Type, 8},
+		{"Conn", Type, 9},
+		{"DB", Type, 0},
+		{"DBStats", Type, 5},
+		{"DBStats.Idle", Field, 11},
+		{"DBStats.InUse", Field, 11},
+		{"DBStats.MaxIdleClosed", Field, 11},
+		{"DBStats.MaxIdleTimeClosed", Field, 15},
+		{"DBStats.MaxLifetimeClosed", Field, 11},
+		{"DBStats.MaxOpenConnections", Field, 11},
+		{"DBStats.OpenConnections", Field, 5},
+		{"DBStats.WaitCount", Field, 11},
+		{"DBStats.WaitDuration", Field, 11},
+		{"Drivers", Func, 4},
+		{"ErrConnDone", Var, 9},
+		{"ErrNoRows", Var, 0},
+		{"ErrTxDone", Var, 0},
+		{"IsolationLevel", Type, 8},
+		{"LevelDefault", Const, 8},
+		{"LevelLinearizable", Const, 8},
+		{"LevelReadCommitted", Const, 8},
+		{"LevelReadUncommitted", Const, 8},
+		{"LevelRepeatableRead", Const, 8},
+		{"LevelSerializable", Const, 8},
+		{"LevelSnapshot", Const, 8},
+		{"LevelWriteCommitted", Const, 8},
+		{"Named", Func, 8},
+		{"NamedArg", Type, 8},
+		{"NamedArg.Name", Field, 8},
+		{"NamedArg.Value", Field, 8},
+		{"Null", Type, 22},
+		{"Null.V", Field, 22},
+		{"Null.Valid", Field, 22},
+		{"NullBool", Type, 0},
+		{"NullBool.Bool", Field, 0},
+		{"NullBool.Valid", Field, 0},
+		{"NullByte", Type, 17},
+		{"NullByte.Byte", Field, 17},
+		{"NullByte.Valid", Field, 17},
+		{"NullFloat64", Type, 0},
+		{"NullFloat64.Float64", Field, 0},
+		{"NullFloat64.Valid", Field, 0},
+		{"NullInt16", Type, 17},
+		{"NullInt16.Int16", Field, 17},
+		{"NullInt16.Valid", Field, 17},
+		{"NullInt32", Type, 13},
+		{"NullInt32.Int32", Field, 13},
+		{"NullInt32.Valid", Field, 13},
+		{"NullInt64", Type, 0},
+		{"NullInt64.Int64", Field, 0},
+		{"NullInt64.Valid", Field, 0},
+		{"NullString", Type, 0},
+		{"NullString.String", Field, 0},
+		{"NullString.Valid", Field, 0},
+		{"NullTime", Type, 13},
+		{"NullTime.Time", Field, 13},
+		{"NullTime.Valid", Field, 13},
+		{"Open", Func, 0},
+		{"OpenDB", Func, 10},
+		{"Out", Type, 9},
+		{"Out.Dest", Field, 9},
+		{"Out.In", Field, 9},
+		{"RawBytes", Type, 0},
+		{"Register", Func, 0},
+		{"Result", Type, 0},
+		{"Row", Type, 0},
+		{"Rows", Type, 0},
+		{"Scanner", Type, 0},
+		{"Stmt", Type, 0},
+		{"Tx", Type, 0},
+		{"TxOptions", Type, 8},
+		{"TxOptions.Isolation", Field, 8},
+		{"TxOptions.ReadOnly", Field, 8},
+	},
+	"database/sql/driver": {
+		{"(NotNull).ConvertValue", Method, 0},
+		{"(Null).ConvertValue", Method, 0},
+		{"(RowsAffected).LastInsertId", Method, 0},
+		{"(RowsAffected).RowsAffected", Method, 0},
+		{"Bool", Var, 0},
+		{"ColumnConverter", Type, 0},
+		{"Conn", Type, 0},
+		{"ConnBeginTx", Type, 8},
+		{"ConnPrepareContext", Type, 8},
+		{"Connector", Type, 10},
+		{"DefaultParameterConverter", Var, 0},
+		{"Driver", Type, 0},
+		{"DriverContext", Type, 10},
+		{"ErrBadConn", Var, 0},
+		{"ErrRemoveArgument", Var, 9},
+		{"ErrSkip", Var, 0},
+		{"Execer", Type, 0},
+		{"ExecerContext", Type, 8},
+		{"Int32", Var, 0},
+		{"IsScanValue", Func, 0},
+		{"IsValue", Func, 0},
+		{"IsolationLevel", Type, 8},
+		{"NamedValue", Type, 8},
+		{"NamedValue.Name", Field, 8},
+		{"NamedValue.Ordinal", Field, 8},
+		{"NamedValue.Value", Field, 8},
+		{"NamedValueChecker", Type, 9},
+		{"NotNull", Type, 0},
+		{"NotNull.Converter", Field, 0},
+		{"Null", Type, 0},
+		{"Null.Converter", Field, 0},
+		{"Pinger", Type, 8},
+		{"Queryer", Type, 1},
+		{"QueryerContext", Type, 8},
+		{"Result", Type, 0},
+		{"ResultNoRows", Var, 0},
+		{"Rows", Type, 0},
+		{"RowsAffected", Type, 0},
+		{"RowsColumnTypeDatabaseTypeName", Type, 8},
+		{"RowsColumnTypeLength", Type, 8},
+		{"RowsColumnTypeNullable", Type, 8},
+		{"RowsColumnTypePrecisionScale", Type, 8},
+		{"RowsColumnTypeScanType", Type, 8},
+		{"RowsNextResultSet", Type, 8},
+		{"SessionResetter", Type, 10},
+		{"Stmt", Type, 0},
+		{"StmtExecContext", Type, 8},
+		{"StmtQueryContext", Type, 8},
+		{"String", Var, 0},
+		{"Tx", Type, 0},
+		{"TxOptions", Type, 8},
+		{"TxOptions.Isolation", Field, 8},
+		{"TxOptions.ReadOnly", Field, 8},
+		{"Validator", Type, 15},
+		{"Value", Type, 0},
+		{"ValueConverter", Type, 0},
+		{"Valuer", Type, 0},
+	},
+	"debug/buildinfo": {
+		{"BuildInfo", Type, 18},
+		{"Read", Func, 18},
+		{"ReadFile", Func, 18},
+	},
+	"debug/dwarf": {
+		{"(*AddrType).Basic", Method, 0},
+		{"(*AddrType).Common", Method, 0},
+		{"(*AddrType).Size", Method, 0},
+		{"(*AddrType).String", Method, 0},
+		{"(*ArrayType).Common", Method, 0},
+		{"(*ArrayType).Size", Method, 0},
+		{"(*ArrayType).String", Method, 0},
+		{"(*BasicType).Basic", Method, 0},
+		{"(*BasicType).Common", Method, 0},
+		{"(*BasicType).Size", Method, 0},
+		{"(*BasicType).String", Method, 0},
+		{"(*BoolType).Basic", Method, 0},
+		{"(*BoolType).Common", Method, 0},
+		{"(*BoolType).Size", Method, 0},
+		{"(*BoolType).String", Method, 0},
+		{"(*CharType).Basic", Method, 0},
+		{"(*CharType).Common", Method, 0},
+		{"(*CharType).Size", Method, 0},
+		{"(*CharType).String", Method, 0},
+		{"(*CommonType).Common", Method, 0},
+		{"(*CommonType).Size", Method, 0},
+		{"(*ComplexType).Basic", Method, 0},
+		{"(*ComplexType).Common", Method, 0},
+		{"(*ComplexType).Size", Method, 0},
+		{"(*ComplexType).String", Method, 0},
+		{"(*Data).AddSection", Method, 14},
+		{"(*Data).AddTypes", Method, 3},
+		{"(*Data).LineReader", Method, 5},
+		{"(*Data).Ranges", Method, 7},
+		{"(*Data).Reader", Method, 0},
+		{"(*Data).Type", Method, 0},
+		{"(*DotDotDotType).Common", Method, 0},
+		{"(*DotDotDotType).Size", Method, 0},
+		{"(*DotDotDotType).String", Method, 0},
+		{"(*Entry).AttrField", Method, 5},
+		{"(*Entry).Val", Method, 0},
+		{"(*EnumType).Common", Method, 0},
+		{"(*EnumType).Size", Method, 0},
+		{"(*EnumType).String", Method, 0},
+		{"(*FloatType).Basic", Method, 0},
+		{"(*FloatType).Common", Method, 0},
+		{"(*FloatType).Size", Method, 0},
+		{"(*FloatType).String", Method, 0},
+		{"(*FuncType).Common", Method, 0},
+		{"(*FuncType).Size", Method, 0},
+		{"(*FuncType).String", Method, 0},
+		{"(*IntType).Basic", Method, 0},
+		{"(*IntType).Common", Method, 0},
+		{"(*IntType).Size", Method, 0},
+		{"(*IntType).String", Method, 0},
+		{"(*LineReader).Files", Method, 14},
+		{"(*LineReader).Next", Method, 5},
+		{"(*LineReader).Reset", Method, 5},
+		{"(*LineReader).Seek", Method, 5},
+		{"(*LineReader).SeekPC", Method, 5},
+		{"(*LineReader).Tell", Method, 5},
+		{"(*PtrType).Common", Method, 0},
+		{"(*PtrType).Size", Method, 0},
+		{"(*PtrType).String", Method, 0},
+		{"(*QualType).Common", Method, 0},
+		{"(*QualType).Size", Method, 0},
+		{"(*QualType).String", Method, 0},
+		{"(*Reader).AddressSize", Method, 5},
+		{"(*Reader).ByteOrder", Method, 14},
+		{"(*Reader).Next", Method, 0},
+		{"(*Reader).Seek", Method, 0},
+		{"(*Reader).SeekPC", Method, 7},
+		{"(*Reader).SkipChildren", Method, 0},
+		{"(*StructType).Common", Method, 0},
+		{"(*StructType).Defn", Method, 0},
+		{"(*StructType).Size", Method, 0},
+		{"(*StructType).String", Method, 0},
+		{"(*TypedefType).Common", Method, 0},
+		{"(*TypedefType).Size", Method, 0},
+		{"(*TypedefType).String", Method, 0},
+		{"(*UcharType).Basic", Method, 0},
+		{"(*UcharType).Common", Method, 0},
+		{"(*UcharType).Size", Method, 0},
+		{"(*UcharType).String", Method, 0},
+		{"(*UintType).Basic", Method, 0},
+		{"(*UintType).Common", Method, 0},
+		{"(*UintType).Size", Method, 0},
+		{"(*UintType).String", Method, 0},
+		{"(*UnspecifiedType).Basic", Method, 4},
+		{"(*UnspecifiedType).Common", Method, 4},
+		{"(*UnspecifiedType).Size", Method, 4},
+		{"(*UnspecifiedType).String", Method, 4},
+		{"(*UnsupportedType).Common", Method, 13},
+		{"(*UnsupportedType).Size", Method, 13},
+		{"(*UnsupportedType).String", Method, 13},
+		{"(*VoidType).Common", Method, 0},
+		{"(*VoidType).Size", Method, 0},
+		{"(*VoidType).String", Method, 0},
+		{"(Attr).GoString", Method, 0},
+		{"(Attr).String", Method, 0},
+		{"(Class).GoString", Method, 5},
+		{"(Class).String", Method, 5},
+		{"(DecodeError).Error", Method, 0},
+		{"(Tag).GoString", Method, 0},
+		{"(Tag).String", Method, 0},
+		{"AddrType", Type, 0},
+		{"AddrType.BasicType", Field, 0},
+		{"ArrayType", Type, 0},
+		{"ArrayType.CommonType", Field, 0},
+		{"ArrayType.Count", Field, 0},
+		{"ArrayType.StrideBitSize", Field, 0},
+		{"ArrayType.Type", Field, 0},
+		{"Attr", Type, 0},
+		{"AttrAbstractOrigin", Const, 0},
+		{"AttrAccessibility", Const, 0},
+		{"AttrAddrBase", Const, 14},
+		{"AttrAddrClass", Const, 0},
+		{"AttrAlignment", Const, 14},
+		{"AttrAllocated", Const, 0},
+		{"AttrArtificial", Const, 0},
+		{"AttrAssociated", Const, 0},
+		{"AttrBaseTypes", Const, 0},
+		{"AttrBinaryScale", Const, 14},
+		{"AttrBitOffset", Const, 0},
+		{"AttrBitSize", Const, 0},
+		{"AttrByteSize", Const, 0},
+		{"AttrCallAllCalls", Const, 14},
+		{"AttrCallAllSourceCalls", Const, 14},
+		{"AttrCallAllTailCalls", Const, 14},
+		{"AttrCallColumn", Const, 0},
+		{"AttrCallDataLocation", Const, 14},
+		{"AttrCallDataValue", Const, 14},
+		{"AttrCallFile", Const, 0},
+		{"AttrCallLine", Const, 0},
+		{"AttrCallOrigin", Const, 14},
+		{"AttrCallPC", Const, 14},
+		{"AttrCallParameter", Const, 14},
+		{"AttrCallReturnPC", Const, 14},
+		{"AttrCallTailCall", Const, 14},
+		{"AttrCallTarget", Const, 14},
+		{"AttrCallTargetClobbered", Const, 14},
+		{"AttrCallValue", Const, 14},
+		{"AttrCalling", Const, 0},
+		{"AttrCommonRef", Const, 0},
+		{"AttrCompDir", Const, 0},
+		{"AttrConstExpr", Const, 14},
+		{"AttrConstValue", Const, 0},
+		{"AttrContainingType", Const, 0},
+		{"AttrCount", Const, 0},
+		{"AttrDataBitOffset", Const, 14},
+		{"AttrDataLocation", Const, 0},
+		{"AttrDataMemberLoc", Const, 0},
+		{"AttrDecimalScale", Const, 14},
+		{"AttrDecimalSign", Const, 14},
+		{"AttrDeclColumn", Const, 0},
+		{"AttrDeclFile", Const, 0},
+		{"AttrDeclLine", Const, 0},
+		{"AttrDeclaration", Const, 0},
+		{"AttrDefaultValue", Const, 0},
+		{"AttrDefaulted", Const, 14},
+		{"AttrDeleted", Const, 14},
+		{"AttrDescription", Const, 0},
+		{"AttrDigitCount", Const, 14},
+		{"AttrDiscr", Const, 0},
+		{"AttrDiscrList", Const, 0},
+		{"AttrDiscrValue", Const, 0},
+		{"AttrDwoName", Const, 14},
+		{"AttrElemental", Const, 14},
+		{"AttrEncoding", Const, 0},
+		{"AttrEndianity", Const, 14},
+		{"AttrEntrypc", Const, 0},
+		{"AttrEnumClass", Const, 14},
+		{"AttrExplicit", Const, 14},
+		{"AttrExportSymbols", Const, 14},
+		{"AttrExtension", Const, 0},
+		{"AttrExternal", Const, 0},
+		{"AttrFrameBase", Const, 0},
+		{"AttrFriend", Const, 0},
+		{"AttrHighpc", Const, 0},
+		{"AttrIdentifierCase", Const, 0},
+		{"AttrImport", Const, 0},
+		{"AttrInline", Const, 0},
+		{"AttrIsOptional", Const, 0},
+		{"AttrLanguage", Const, 0},
+		{"AttrLinkageName", Const, 14},
+		{"AttrLocation", Const, 0},
+		{"AttrLoclistsBase", Const, 14},
+		{"AttrLowerBound", Const, 0},
+		{"AttrLowpc", Const, 0},
+		{"AttrMacroInfo", Const, 0},
+		{"AttrMacros", Const, 14},
+		{"AttrMainSubprogram", Const, 14},
+		{"AttrMutable", Const, 14},
+		{"AttrName", Const, 0},
+		{"AttrNamelistItem", Const, 0},
+		{"AttrNoreturn", Const, 14},
+		{"AttrObjectPointer", Const, 14},
+		{"AttrOrdering", Const, 0},
+		{"AttrPictureString", Const, 14},
+		{"AttrPriority", Const, 0},
+		{"AttrProducer", Const, 0},
+		{"AttrPrototyped", Const, 0},
+		{"AttrPure", Const, 14},
+		{"AttrRanges", Const, 0},
+		{"AttrRank", Const, 14},
+		{"AttrRecursive", Const, 14},
+		{"AttrReference", Const, 14},
+		{"AttrReturnAddr", Const, 0},
+		{"AttrRnglistsBase", Const, 14},
+		{"AttrRvalueReference", Const, 14},
+		{"AttrSegment", Const, 0},
+		{"AttrSibling", Const, 0},
+		{"AttrSignature", Const, 14},
+		{"AttrSmall", Const, 14},
+		{"AttrSpecification", Const, 0},
+		{"AttrStartScope", Const, 0},
+		{"AttrStaticLink", Const, 0},
+		{"AttrStmtList", Const, 0},
+		{"AttrStrOffsetsBase", Const, 14},
+		{"AttrStride", Const, 0},
+		{"AttrStrideSize", Const, 0},
+		{"AttrStringLength", Const, 0},
+		{"AttrStringLengthBitSize", Const, 14},
+		{"AttrStringLengthByteSize", Const, 14},
+		{"AttrThreadsScaled", Const, 14},
+		{"AttrTrampoline", Const, 0},
+		{"AttrType", Const, 0},
+		{"AttrUpperBound", Const, 0},
+		{"AttrUseLocation", Const, 0},
+		{"AttrUseUTF8", Const, 0},
+		{"AttrVarParam", Const, 0},
+		{"AttrVirtuality", Const, 0},
+		{"AttrVisibility", Const, 0},
+		{"AttrVtableElemLoc", Const, 0},
+		{"BasicType", Type, 0},
+		{"BasicType.BitOffset", Field, 0},
+		{"BasicType.BitSize", Field, 0},
+		{"BasicType.CommonType", Field, 0},
+		{"BasicType.DataBitOffset", Field, 18},
+		{"BoolType", Type, 0},
+		{"BoolType.BasicType", Field, 0},
+		{"CharType", Type, 0},
+		{"CharType.BasicType", Field, 0},
+		{"Class", Type, 5},
+		{"ClassAddrPtr", Const, 14},
+		{"ClassAddress", Const, 5},
+		{"ClassBlock", Const, 5},
+		{"ClassConstant", Const, 5},
+		{"ClassExprLoc", Const, 5},
+		{"ClassFlag", Const, 5},
+		{"ClassLinePtr", Const, 5},
+		{"ClassLocList", Const, 14},
+		{"ClassLocListPtr", Const, 5},
+		{"ClassMacPtr", Const, 5},
+		{"ClassRangeListPtr", Const, 5},
+		{"ClassReference", Const, 5},
+		{"ClassReferenceAlt", Const, 5},
+		{"ClassReferenceSig", Const, 5},
+		{"ClassRngList", Const, 14},
+		{"ClassRngListsPtr", Const, 14},
+		{"ClassStrOffsetsPtr", Const, 14},
+		{"ClassString", Const, 5},
+		{"ClassStringAlt", Const, 5},
+		{"ClassUnknown", Const, 6},
+		{"CommonType", Type, 0},
+		{"CommonType.ByteSize", Field, 0},
+		{"CommonType.Name", Field, 0},
+		{"ComplexType", Type, 0},
+		{"ComplexType.BasicType", Field, 0},
+		{"Data", Type, 0},
+		{"DecodeError", Type, 0},
+		{"DecodeError.Err", Field, 0},
+		{"DecodeError.Name", Field, 0},
+		{"DecodeError.Offset", Field, 0},
+		{"DotDotDotType", Type, 0},
+		{"DotDotDotType.CommonType", Field, 0},
+		{"Entry", Type, 0},
+		{"Entry.Children", Field, 0},
+		{"Entry.Field", Field, 0},
+		{"Entry.Offset", Field, 0},
+		{"Entry.Tag", Field, 0},
+		{"EnumType", Type, 0},
+		{"EnumType.CommonType", Field, 0},
+		{"EnumType.EnumName", Field, 0},
+		{"EnumType.Val", Field, 0},
+		{"EnumValue", Type, 0},
+		{"EnumValue.Name", Field, 0},
+		{"EnumValue.Val", Field, 0},
+		{"ErrUnknownPC", Var, 5},
+		{"Field", Type, 0},
+		{"Field.Attr", Field, 0},
+		{"Field.Class", Field, 5},
+		{"Field.Val", Field, 0},
+		{"FloatType", Type, 0},
+		{"FloatType.BasicType", Field, 0},
+		{"FuncType", Type, 0},
+		{"FuncType.CommonType", Field, 0},
+		{"FuncType.ParamType", Field, 0},
+		{"FuncType.ReturnType", Field, 0},
+		{"IntType", Type, 0},
+		{"IntType.BasicType", Field, 0},
+		{"LineEntry", Type, 5},
+		{"LineEntry.Address", Field, 5},
+		{"LineEntry.BasicBlock", Field, 5},
+		{"LineEntry.Column", Field, 5},
+		{"LineEntry.Discriminator", Field, 5},
+		{"LineEntry.EndSequence", Field, 5},
+		{"LineEntry.EpilogueBegin", Field, 5},
+		{"LineEntry.File", Field, 5},
+		{"LineEntry.ISA", Field, 5},
+		{"LineEntry.IsStmt", Field, 5},
+		{"LineEntry.Line", Field, 5},
+		{"LineEntry.OpIndex", Field, 5},
+		{"LineEntry.PrologueEnd", Field, 5},
+		{"LineFile", Type, 5},
+		{"LineFile.Length", Field, 5},
+		{"LineFile.Mtime", Field, 5},
+		{"LineFile.Name", Field, 5},
+		{"LineReader", Type, 5},
+		{"LineReaderPos", Type, 5},
+		{"New", Func, 0},
+		{"Offset", Type, 0},
+		{"PtrType", Type, 0},
+		{"PtrType.CommonType", Field, 0},
+		{"PtrType.Type", Field, 0},
+		{"QualType", Type, 0},
+		{"QualType.CommonType", Field, 0},
+		{"QualType.Qual", Field, 0},
+		{"QualType.Type", Field, 0},
+		{"Reader", Type, 0},
+		{"StructField", Type, 0},
+		{"StructField.BitOffset", Field, 0},
+		{"StructField.BitSize", Field, 0},
+		{"StructField.ByteOffset", Field, 0},
+		{"StructField.ByteSize", Field, 0},
+		{"StructField.DataBitOffset", Field, 18},
+		{"StructField.Name", Field, 0},
+		{"StructField.Type", Field, 0},
+		{"StructType", Type, 0},
+		{"StructType.CommonType", Field, 0},
+		{"StructType.Field", Field, 0},
+		{"StructType.Incomplete", Field, 0},
+		{"StructType.Kind", Field, 0},
+		{"StructType.StructName", Field, 0},
+		{"Tag", Type, 0},
+		{"TagAccessDeclaration", Const, 0},
+		{"TagArrayType", Const, 0},
+		{"TagAtomicType", Const, 14},
+		{"TagBaseType", Const, 0},
+		{"TagCallSite", Const, 14},
+		{"TagCallSiteParameter", Const, 14},
+		{"TagCatchDwarfBlock", Const, 0},
+		{"TagClassType", Const, 0},
+		{"TagCoarrayType", Const, 14},
+		{"TagCommonDwarfBlock", Const, 0},
+		{"TagCommonInclusion", Const, 0},
+		{"TagCompileUnit", Const, 0},
+		{"TagCondition", Const, 3},
+		{"TagConstType", Const, 0},
+		{"TagConstant", Const, 0},
+		{"TagDwarfProcedure", Const, 0},
+		{"TagDynamicType", Const, 14},
+		{"TagEntryPoint", Const, 0},
+		{"TagEnumerationType", Const, 0},
+		{"TagEnumerator", Const, 0},
+		{"TagFileType", Const, 0},
+		{"TagFormalParameter", Const, 0},
+		{"TagFriend", Const, 0},
+		{"TagGenericSubrange", Const, 14},
+		{"TagImmutableType", Const, 14},
+		{"TagImportedDeclaration", Const, 0},
+		{"TagImportedModule", Const, 0},
+		{"TagImportedUnit", Const, 0},
+		{"TagInheritance", Const, 0},
+		{"TagInlinedSubroutine", Const, 0},
+		{"TagInterfaceType", Const, 0},
+		{"TagLabel", Const, 0},
+		{"TagLexDwarfBlock", Const, 0},
+		{"TagMember", Const, 0},
+		{"TagModule", Const, 0},
+		{"TagMutableType", Const, 0},
+		{"TagNamelist", Const, 0},
+		{"TagNamelistItem", Const, 0},
+		{"TagNamespace", Const, 0},
+		{"TagPackedType", Const, 0},
+		{"TagPartialUnit", Const, 0},
+		{"TagPointerType", Const, 0},
+		{"TagPtrToMemberType", Const, 0},
+		{"TagReferenceType", Const, 0},
+		{"TagRestrictType", Const, 0},
+		{"TagRvalueReferenceType", Const, 3},
+		{"TagSetType", Const, 0},
+		{"TagSharedType", Const, 3},
+		{"TagSkeletonUnit", Const, 14},
+		{"TagStringType", Const, 0},
+		{"TagStructType", Const, 0},
+		{"TagSubprogram", Const, 0},
+		{"TagSubrangeType", Const, 0},
+		{"TagSubroutineType", Const, 0},
+		{"TagTemplateAlias", Const, 3},
+		{"TagTemplateTypeParameter", Const, 0},
+		{"TagTemplateValueParameter", Const, 0},
+		{"TagThrownType", Const, 0},
+		{"TagTryDwarfBlock", Const, 0},
+		{"TagTypeUnit", Const, 3},
+		{"TagTypedef", Const, 0},
+		{"TagUnionType", Const, 0},
+		{"TagUnspecifiedParameters", Const, 0},
+		{"TagUnspecifiedType", Const, 0},
+		{"TagVariable", Const, 0},
+		{"TagVariant", Const, 0},
+		{"TagVariantPart", Const, 0},
+		{"TagVolatileType", Const, 0},
+		{"TagWithStmt", Const, 0},
+		{"Type", Type, 0},
+		{"TypedefType", Type, 0},
+		{"TypedefType.CommonType", Field, 0},
+		{"TypedefType.Type", Field, 0},
+		{"UcharType", Type, 0},
+		{"UcharType.BasicType", Field, 0},
+		{"UintType", Type, 0},
+		{"UintType.BasicType", Field, 0},
+		{"UnspecifiedType", Type, 4},
+		{"UnspecifiedType.BasicType", Field, 4},
+		{"UnsupportedType", Type, 13},
+		{"UnsupportedType.CommonType", Field, 13},
+		{"UnsupportedType.Tag", Field, 13},
+		{"VoidType", Type, 0},
+		{"VoidType.CommonType", Field, 0},
+	},
+	"debug/elf": {
+		{"(*File).Close", Method, 0},
+		{"(*File).DWARF", Method, 0},
+		{"(*File).DynString", Method, 1},
+		{"(*File).DynValue", Method, 21},
+		{"(*File).DynamicSymbols", Method, 4},
+		{"(*File).ImportedLibraries", Method, 0},
+		{"(*File).ImportedSymbols", Method, 0},
+		{"(*File).Section", Method, 0},
+		{"(*File).SectionByType", Method, 0},
+		{"(*File).Symbols", Method, 0},
+		{"(*FormatError).Error", Method, 0},
+		{"(*Prog).Open", Method, 0},
+		{"(*Section).Data", Method, 0},
+		{"(*Section).Open", Method, 0},
+		{"(Class).GoString", Method, 0},
+		{"(Class).String", Method, 0},
+		{"(CompressionType).GoString", Method, 6},
+		{"(CompressionType).String", Method, 6},
+		{"(Data).GoString", Method, 0},
+		{"(Data).String", Method, 0},
+		{"(DynFlag).GoString", Method, 0},
+		{"(DynFlag).String", Method, 0},
+		{"(DynFlag1).GoString", Method, 21},
+		{"(DynFlag1).String", Method, 21},
+		{"(DynTag).GoString", Method, 0},
+		{"(DynTag).String", Method, 0},
+		{"(Machine).GoString", Method, 0},
+		{"(Machine).String", Method, 0},
+		{"(NType).GoString", Method, 0},
+		{"(NType).String", Method, 0},
+		{"(OSABI).GoString", Method, 0},
+		{"(OSABI).String", Method, 0},
+		{"(Prog).ReadAt", Method, 0},
+		{"(ProgFlag).GoString", Method, 0},
+		{"(ProgFlag).String", Method, 0},
+		{"(ProgType).GoString", Method, 0},
+		{"(ProgType).String", Method, 0},
+		{"(R_386).GoString", Method, 0},
+		{"(R_386).String", Method, 0},
+		{"(R_390).GoString", Method, 7},
+		{"(R_390).String", Method, 7},
+		{"(R_AARCH64).GoString", Method, 4},
+		{"(R_AARCH64).String", Method, 4},
+		{"(R_ALPHA).GoString", Method, 0},
+		{"(R_ALPHA).String", Method, 0},
+		{"(R_ARM).GoString", Method, 0},
+		{"(R_ARM).String", Method, 0},
+		{"(R_LARCH).GoString", Method, 19},
+		{"(R_LARCH).String", Method, 19},
+		{"(R_MIPS).GoString", Method, 6},
+		{"(R_MIPS).String", Method, 6},
+		{"(R_PPC).GoString", Method, 0},
+		{"(R_PPC).String", Method, 0},
+		{"(R_PPC64).GoString", Method, 5},
+		{"(R_PPC64).String", Method, 5},
+		{"(R_RISCV).GoString", Method, 11},
+		{"(R_RISCV).String", Method, 11},
+		{"(R_SPARC).GoString", Method, 0},
+		{"(R_SPARC).String", Method, 0},
+		{"(R_X86_64).GoString", Method, 0},
+		{"(R_X86_64).String", Method, 0},
+		{"(Section).ReadAt", Method, 0},
+		{"(SectionFlag).GoString", Method, 0},
+		{"(SectionFlag).String", Method, 0},
+		{"(SectionIndex).GoString", Method, 0},
+		{"(SectionIndex).String", Method, 0},
+		{"(SectionType).GoString", Method, 0},
+		{"(SectionType).String", Method, 0},
+		{"(SymBind).GoString", Method, 0},
+		{"(SymBind).String", Method, 0},
+		{"(SymType).GoString", Method, 0},
+		{"(SymType).String", Method, 0},
+		{"(SymVis).GoString", Method, 0},
+		{"(SymVis).String", Method, 0},
+		{"(Type).GoString", Method, 0},
+		{"(Type).String", Method, 0},
+		{"(Version).GoString", Method, 0},
+		{"(Version).String", Method, 0},
+		{"ARM_MAGIC_TRAMP_NUMBER", Const, 0},
+		{"COMPRESS_HIOS", Const, 6},
+		{"COMPRESS_HIPROC", Const, 6},
+		{"COMPRESS_LOOS", Const, 6},
+		{"COMPRESS_LOPROC", Const, 6},
+		{"COMPRESS_ZLIB", Const, 6},
+		{"COMPRESS_ZSTD", Const, 21},
+		{"Chdr32", Type, 6},
+		{"Chdr32.Addralign", Field, 6},
+		{"Chdr32.Size", Field, 6},
+		{"Chdr32.Type", Field, 6},
+		{"Chdr64", Type, 6},
+		{"Chdr64.Addralign", Field, 6},
+		{"Chdr64.Size", Field, 6},
+		{"Chdr64.Type", Field, 6},
+		{"Class", Type, 0},
+		{"CompressionType", Type, 6},
+		{"DF_1_CONFALT", Const, 21},
+		{"DF_1_DIRECT", Const, 21},
+		{"DF_1_DISPRELDNE", Const, 21},
+		{"DF_1_DISPRELPND", Const, 21},
+		{"DF_1_EDITED", Const, 21},
+		{"DF_1_ENDFILTEE", Const, 21},
+		{"DF_1_GLOBAL", Const, 21},
+		{"DF_1_GLOBAUDIT", Const, 21},
+		{"DF_1_GROUP", Const, 21},
+		{"DF_1_IGNMULDEF", Const, 21},
+		{"DF_1_INITFIRST", Const, 21},
+		{"DF_1_INTERPOSE", Const, 21},
+		{"DF_1_KMOD", Const, 21},
+		{"DF_1_LOADFLTR", Const, 21},
+		{"DF_1_NOCOMMON", Const, 21},
+		{"DF_1_NODEFLIB", Const, 21},
+		{"DF_1_NODELETE", Const, 21},
+		{"DF_1_NODIRECT", Const, 21},
+		{"DF_1_NODUMP", Const, 21},
+		{"DF_1_NOHDR", Const, 21},
+		{"DF_1_NOKSYMS", Const, 21},
+		{"DF_1_NOOPEN", Const, 21},
+		{"DF_1_NORELOC", Const, 21},
+		{"DF_1_NOW", Const, 21},
+		{"DF_1_ORIGIN", Const, 21},
+		{"DF_1_PIE", Const, 21},
+		{"DF_1_SINGLETON", Const, 21},
+		{"DF_1_STUB", Const, 21},
+		{"DF_1_SYMINTPOSE", Const, 21},
+		{"DF_1_TRANS", Const, 21},
+		{"DF_1_WEAKFILTER", Const, 21},
+		{"DF_BIND_NOW", Const, 0},
+		{"DF_ORIGIN", Const, 0},
+		{"DF_STATIC_TLS", Const, 0},
+		{"DF_SYMBOLIC", Const, 0},
+		{"DF_TEXTREL", Const, 0},
+		{"DT_ADDRRNGHI", Const, 16},
+		{"DT_ADDRRNGLO", Const, 16},
+		{"DT_AUDIT", Const, 16},
+		{"DT_AUXILIARY", Const, 16},
+		{"DT_BIND_NOW", Const, 0},
+		{"DT_CHECKSUM", Const, 16},
+		{"DT_CONFIG", Const, 16},
+		{"DT_DEBUG", Const, 0},
+		{"DT_DEPAUDIT", Const, 16},
+		{"DT_ENCODING", Const, 0},
+		{"DT_FEATURE", Const, 16},
+		{"DT_FILTER", Const, 16},
+		{"DT_FINI", Const, 0},
+		{"DT_FINI_ARRAY", Const, 0},
+		{"DT_FINI_ARRAYSZ", Const, 0},
+		{"DT_FLAGS", Const, 0},
+		{"DT_FLAGS_1", Const, 16},
+		{"DT_GNU_CONFLICT", Const, 16},
+		{"DT_GNU_CONFLICTSZ", Const, 16},
+		{"DT_GNU_HASH", Const, 16},
+		{"DT_GNU_LIBLIST", Const, 16},
+		{"DT_GNU_LIBLISTSZ", Const, 16},
+		{"DT_GNU_PRELINKED", Const, 16},
+		{"DT_HASH", Const, 0},
+		{"DT_HIOS", Const, 0},
+		{"DT_HIPROC", Const, 0},
+		{"DT_INIT", Const, 0},
+		{"DT_INIT_ARRAY", Const, 0},
+		{"DT_INIT_ARRAYSZ", Const, 0},
+		{"DT_JMPREL", Const, 0},
+		{"DT_LOOS", Const, 0},
+		{"DT_LOPROC", Const, 0},
+		{"DT_MIPS_AUX_DYNAMIC", Const, 16},
+		{"DT_MIPS_BASE_ADDRESS", Const, 16},
+		{"DT_MIPS_COMPACT_SIZE", Const, 16},
+		{"DT_MIPS_CONFLICT", Const, 16},
+		{"DT_MIPS_CONFLICTNO", Const, 16},
+		{"DT_MIPS_CXX_FLAGS", Const, 16},
+		{"DT_MIPS_DELTA_CLASS", Const, 16},
+		{"DT_MIPS_DELTA_CLASSSYM", Const, 16},
+		{"DT_MIPS_DELTA_CLASSSYM_NO", Const, 16},
+		{"DT_MIPS_DELTA_CLASS_NO", Const, 16},
+		{"DT_MIPS_DELTA_INSTANCE", Const, 16},
+		{"DT_MIPS_DELTA_INSTANCE_NO", Const, 16},
+		{"DT_MIPS_DELTA_RELOC", Const, 16},
+		{"DT_MIPS_DELTA_RELOC_NO", Const, 16},
+		{"DT_MIPS_DELTA_SYM", Const, 16},
+		{"DT_MIPS_DELTA_SYM_NO", Const, 16},
+		{"DT_MIPS_DYNSTR_ALIGN", Const, 16},
+		{"DT_MIPS_FLAGS", Const, 16},
+		{"DT_MIPS_GOTSYM", Const, 16},
+		{"DT_MIPS_GP_VALUE", Const, 16},
+		{"DT_MIPS_HIDDEN_GOTIDX", Const, 16},
+		{"DT_MIPS_HIPAGENO", Const, 16},
+		{"DT_MIPS_ICHECKSUM", Const, 16},
+		{"DT_MIPS_INTERFACE", Const, 16},
+		{"DT_MIPS_INTERFACE_SIZE", Const, 16},
+		{"DT_MIPS_IVERSION", Const, 16},
+		{"DT_MIPS_LIBLIST", Const, 16},
+		{"DT_MIPS_LIBLISTNO", Const, 16},
+		{"DT_MIPS_LOCALPAGE_GOTIDX", Const, 16},
+		{"DT_MIPS_LOCAL_GOTIDX", Const, 16},
+		{"DT_MIPS_LOCAL_GOTNO", Const, 16},
+		{"DT_MIPS_MSYM", Const, 16},
+		{"DT_MIPS_OPTIONS", Const, 16},
+		{"DT_MIPS_PERF_SUFFIX", Const, 16},
+		{"DT_MIPS_PIXIE_INIT", Const, 16},
+		{"DT_MIPS_PLTGOT", Const, 16},
+		{"DT_MIPS_PROTECTED_GOTIDX", Const, 16},
+		{"DT_MIPS_RLD_MAP", Const, 16},
+		{"DT_MIPS_RLD_MAP_REL", Const, 16},
+		{"DT_MIPS_RLD_TEXT_RESOLVE_ADDR", Const, 16},
+		{"DT_MIPS_RLD_VERSION", Const, 16},
+		{"DT_MIPS_RWPLT", Const, 16},
+		{"DT_MIPS_SYMBOL_LIB", Const, 16},
+		{"DT_MIPS_SYMTABNO", Const, 16},
+		{"DT_MIPS_TIME_STAMP", Const, 16},
+		{"DT_MIPS_UNREFEXTNO", Const, 16},
+		{"DT_MOVEENT", Const, 16},
+		{"DT_MOVESZ", Const, 16},
+		{"DT_MOVETAB", Const, 16},
+		{"DT_NEEDED", Const, 0},
+		{"DT_NULL", Const, 0},
+		{"DT_PLTGOT", Const, 0},
+		{"DT_PLTPAD", Const, 16},
+		{"DT_PLTPADSZ", Const, 16},
+		{"DT_PLTREL", Const, 0},
+		{"DT_PLTRELSZ", Const, 0},
+		{"DT_POSFLAG_1", Const, 16},
+		{"DT_PPC64_GLINK", Const, 16},
+		{"DT_PPC64_OPD", Const, 16},
+		{"DT_PPC64_OPDSZ", Const, 16},
+		{"DT_PPC64_OPT", Const, 16},
+		{"DT_PPC_GOT", Const, 16},
+		{"DT_PPC_OPT", Const, 16},
+		{"DT_PREINIT_ARRAY", Const, 0},
+		{"DT_PREINIT_ARRAYSZ", Const, 0},
+		{"DT_REL", Const, 0},
+		{"DT_RELA", Const, 0},
+		{"DT_RELACOUNT", Const, 16},
+		{"DT_RELAENT", Const, 0},
+		{"DT_RELASZ", Const, 0},
+		{"DT_RELCOUNT", Const, 16},
+		{"DT_RELENT", Const, 0},
+		{"DT_RELSZ", Const, 0},
+		{"DT_RPATH", Const, 0},
+		{"DT_RUNPATH", Const, 0},
+		{"DT_SONAME", Const, 0},
+		{"DT_SPARC_REGISTER", Const, 16},
+		{"DT_STRSZ", Const, 0},
+		{"DT_STRTAB", Const, 0},
+		{"DT_SYMBOLIC", Const, 0},
+		{"DT_SYMENT", Const, 0},
+		{"DT_SYMINENT", Const, 16},
+		{"DT_SYMINFO", Const, 16},
+		{"DT_SYMINSZ", Const, 16},
+		{"DT_SYMTAB", Const, 0},
+		{"DT_SYMTAB_SHNDX", Const, 16},
+		{"DT_TEXTREL", Const, 0},
+		{"DT_TLSDESC_GOT", Const, 16},
+		{"DT_TLSDESC_PLT", Const, 16},
+		{"DT_USED", Const, 16},
+		{"DT_VALRNGHI", Const, 16},
+		{"DT_VALRNGLO", Const, 16},
+		{"DT_VERDEF", Const, 16},
+		{"DT_VERDEFNUM", Const, 16},
+		{"DT_VERNEED", Const, 0},
+		{"DT_VERNEEDNUM", Const, 0},
+		{"DT_VERSYM", Const, 0},
+		{"Data", Type, 0},
+		{"Dyn32", Type, 0},
+		{"Dyn32.Tag", Field, 0},
+		{"Dyn32.Val", Field, 0},
+		{"Dyn64", Type, 0},
+		{"Dyn64.Tag", Field, 0},
+		{"Dyn64.Val", Field, 0},
+		{"DynFlag", Type, 0},
+		{"DynFlag1", Type, 21},
+		{"DynTag", Type, 0},
+		{"EI_ABIVERSION", Const, 0},
+		{"EI_CLASS", Const, 0},
+		{"EI_DATA", Const, 0},
+		{"EI_NIDENT", Const, 0},
+		{"EI_OSABI", Const, 0},
+		{"EI_PAD", Const, 0},
+		{"EI_VERSION", Const, 0},
+		{"ELFCLASS32", Const, 0},
+		{"ELFCLASS64", Const, 0},
+		{"ELFCLASSNONE", Const, 0},
+		{"ELFDATA2LSB", Const, 0},
+		{"ELFDATA2MSB", Const, 0},
+		{"ELFDATANONE", Const, 0},
+		{"ELFMAG", Const, 0},
+		{"ELFOSABI_86OPEN", Const, 0},
+		{"ELFOSABI_AIX", Const, 0},
+		{"ELFOSABI_ARM", Const, 0},
+		{"ELFOSABI_AROS", Const, 11},
+		{"ELFOSABI_CLOUDABI", Const, 11},
+		{"ELFOSABI_FENIXOS", Const, 11},
+		{"ELFOSABI_FREEBSD", Const, 0},
+		{"ELFOSABI_HPUX", Const, 0},
+		{"ELFOSABI_HURD", Const, 0},
+		{"ELFOSABI_IRIX", Const, 0},
+		{"ELFOSABI_LINUX", Const, 0},
+		{"ELFOSABI_MODESTO", Const, 0},
+		{"ELFOSABI_NETBSD", Const, 0},
+		{"ELFOSABI_NONE", Const, 0},
+		{"ELFOSABI_NSK", Const, 0},
+		{"ELFOSABI_OPENBSD", Const, 0},
+		{"ELFOSABI_OPENVMS", Const, 0},
+		{"ELFOSABI_SOLARIS", Const, 0},
+		{"ELFOSABI_STANDALONE", Const, 0},
+		{"ELFOSABI_TRU64", Const, 0},
+		{"EM_386", Const, 0},
+		{"EM_486", Const, 0},
+		{"EM_56800EX", Const, 11},
+		{"EM_68HC05", Const, 11},
+		{"EM_68HC08", Const, 11},
+		{"EM_68HC11", Const, 11},
+		{"EM_68HC12", Const, 0},
+		{"EM_68HC16", Const, 11},
+		{"EM_68K", Const, 0},
+		{"EM_78KOR", Const, 11},
+		{"EM_8051", Const, 11},
+		{"EM_860", Const, 0},
+		{"EM_88K", Const, 0},
+		{"EM_960", Const, 0},
+		{"EM_AARCH64", Const, 4},
+		{"EM_ALPHA", Const, 0},
+		{"EM_ALPHA_STD", Const, 0},
+		{"EM_ALTERA_NIOS2", Const, 11},
+		{"EM_AMDGPU", Const, 11},
+		{"EM_ARC", Const, 0},
+		{"EM_ARCA", Const, 11},
+		{"EM_ARC_COMPACT", Const, 11},
+		{"EM_ARC_COMPACT2", Const, 11},
+		{"EM_ARM", Const, 0},
+		{"EM_AVR", Const, 11},
+		{"EM_AVR32", Const, 11},
+		{"EM_BA1", Const, 11},
+		{"EM_BA2", Const, 11},
+		{"EM_BLACKFIN", Const, 11},
+		{"EM_BPF", Const, 11},
+		{"EM_C166", Const, 11},
+		{"EM_CDP", Const, 11},
+		{"EM_CE", Const, 11},
+		{"EM_CLOUDSHIELD", Const, 11},
+		{"EM_COGE", Const, 11},
+		{"EM_COLDFIRE", Const, 0},
+		{"EM_COOL", Const, 11},
+		{"EM_COREA_1ST", Const, 11},
+		{"EM_COREA_2ND", Const, 11},
+		{"EM_CR", Const, 11},
+		{"EM_CR16", Const, 11},
+		{"EM_CRAYNV2", Const, 11},
+		{"EM_CRIS", Const, 11},
+		{"EM_CRX", Const, 11},
+		{"EM_CSR_KALIMBA", Const, 11},
+		{"EM_CUDA", Const, 11},
+		{"EM_CYPRESS_M8C", Const, 11},
+		{"EM_D10V", Const, 11},
+		{"EM_D30V", Const, 11},
+		{"EM_DSP24", Const, 11},
+		{"EM_DSPIC30F", Const, 11},
+		{"EM_DXP", Const, 11},
+		{"EM_ECOG1", Const, 11},
+		{"EM_ECOG16", Const, 11},
+		{"EM_ECOG1X", Const, 11},
+		{"EM_ECOG2", Const, 11},
+		{"EM_ETPU", Const, 11},
+		{"EM_EXCESS", Const, 11},
+		{"EM_F2MC16", Const, 11},
+		{"EM_FIREPATH", Const, 11},
+		{"EM_FR20", Const, 0},
+		{"EM_FR30", Const, 11},
+		{"EM_FT32", Const, 11},
+		{"EM_FX66", Const, 11},
+		{"EM_H8S", Const, 0},
+		{"EM_H8_300", Const, 0},
+		{"EM_H8_300H", Const, 0},
+		{"EM_H8_500", Const, 0},
+		{"EM_HUANY", Const, 11},
+		{"EM_IA_64", Const, 0},
+		{"EM_INTEL205", Const, 11},
+		{"EM_INTEL206", Const, 11},
+		{"EM_INTEL207", Const, 11},
+		{"EM_INTEL208", Const, 11},
+		{"EM_INTEL209", Const, 11},
+		{"EM_IP2K", Const, 11},
+		{"EM_JAVELIN", Const, 11},
+		{"EM_K10M", Const, 11},
+		{"EM_KM32", Const, 11},
+		{"EM_KMX16", Const, 11},
+		{"EM_KMX32", Const, 11},
+		{"EM_KMX8", Const, 11},
+		{"EM_KVARC", Const, 11},
+		{"EM_L10M", Const, 11},
+		{"EM_LANAI", Const, 11},
+		{"EM_LATTICEMICO32", Const, 11},
+		{"EM_LOONGARCH", Const, 19},
+		{"EM_M16C", Const, 11},
+		{"EM_M32", Const, 0},
+		{"EM_M32C", Const, 11},
+		{"EM_M32R", Const, 11},
+		{"EM_MANIK", Const, 11},
+		{"EM_MAX", Const, 11},
+		{"EM_MAXQ30", Const, 11},
+		{"EM_MCHP_PIC", Const, 11},
+		{"EM_MCST_ELBRUS", Const, 11},
+		{"EM_ME16", Const, 0},
+		{"EM_METAG", Const, 11},
+		{"EM_MICROBLAZE", Const, 11},
+		{"EM_MIPS", Const, 0},
+		{"EM_MIPS_RS3_LE", Const, 0},
+		{"EM_MIPS_RS4_BE", Const, 0},
+		{"EM_MIPS_X", Const, 0},
+		{"EM_MMA", Const, 0},
+		{"EM_MMDSP_PLUS", Const, 11},
+		{"EM_MMIX", Const, 11},
+		{"EM_MN10200", Const, 11},
+		{"EM_MN10300", Const, 11},
+		{"EM_MOXIE", Const, 11},
+		{"EM_MSP430", Const, 11},
+		{"EM_NCPU", Const, 0},
+		{"EM_NDR1", Const, 0},
+		{"EM_NDS32", Const, 11},
+		{"EM_NONE", Const, 0},
+		{"EM_NORC", Const, 11},
+		{"EM_NS32K", Const, 11},
+		{"EM_OPEN8", Const, 11},
+		{"EM_OPENRISC", Const, 11},
+		{"EM_PARISC", Const, 0},
+		{"EM_PCP", Const, 0},
+		{"EM_PDP10", Const, 11},
+		{"EM_PDP11", Const, 11},
+		{"EM_PDSP", Const, 11},
+		{"EM_PJ", Const, 11},
+		{"EM_PPC", Const, 0},
+		{"EM_PPC64", Const, 0},
+		{"EM_PRISM", Const, 11},
+		{"EM_QDSP6", Const, 11},
+		{"EM_R32C", Const, 11},
+		{"EM_RCE", Const, 0},
+		{"EM_RH32", Const, 0},
+		{"EM_RISCV", Const, 11},
+		{"EM_RL78", Const, 11},
+		{"EM_RS08", Const, 11},
+		{"EM_RX", Const, 11},
+		{"EM_S370", Const, 0},
+		{"EM_S390", Const, 0},
+		{"EM_SCORE7", Const, 11},
+		{"EM_SEP", Const, 11},
+		{"EM_SE_C17", Const, 11},
+		{"EM_SE_C33", Const, 11},
+		{"EM_SH", Const, 0},
+		{"EM_SHARC", Const, 11},
+		{"EM_SLE9X", Const, 11},
+		{"EM_SNP1K", Const, 11},
+		{"EM_SPARC", Const, 0},
+		{"EM_SPARC32PLUS", Const, 0},
+		{"EM_SPARCV9", Const, 0},
+		{"EM_ST100", Const, 0},
+		{"EM_ST19", Const, 11},
+		{"EM_ST200", Const, 11},
+		{"EM_ST7", Const, 11},
+		{"EM_ST9PLUS", Const, 11},
+		{"EM_STARCORE", Const, 0},
+		{"EM_STM8", Const, 11},
+		{"EM_STXP7X", Const, 11},
+		{"EM_SVX", Const, 11},
+		{"EM_TILE64", Const, 11},
+		{"EM_TILEGX", Const, 11},
+		{"EM_TILEPRO", Const, 11},
+		{"EM_TINYJ", Const, 0},
+		{"EM_TI_ARP32", Const, 11},
+		{"EM_TI_C2000", Const, 11},
+		{"EM_TI_C5500", Const, 11},
+		{"EM_TI_C6000", Const, 11},
+		{"EM_TI_PRU", Const, 11},
+		{"EM_TMM_GPP", Const, 11},
+		{"EM_TPC", Const, 11},
+		{"EM_TRICORE", Const, 0},
+		{"EM_TRIMEDIA", Const, 11},
+		{"EM_TSK3000", Const, 11},
+		{"EM_UNICORE", Const, 11},
+		{"EM_V800", Const, 0},
+		{"EM_V850", Const, 11},
+		{"EM_VAX", Const, 11},
+		{"EM_VIDEOCORE", Const, 11},
+		{"EM_VIDEOCORE3", Const, 11},
+		{"EM_VIDEOCORE5", Const, 11},
+		{"EM_VISIUM", Const, 11},
+		{"EM_VPP500", Const, 0},
+		{"EM_X86_64", Const, 0},
+		{"EM_XCORE", Const, 11},
+		{"EM_XGATE", Const, 11},
+		{"EM_XIMO16", Const, 11},
+		{"EM_XTENSA", Const, 11},
+		{"EM_Z80", Const, 11},
+		{"EM_ZSP", Const, 11},
+		{"ET_CORE", Const, 0},
+		{"ET_DYN", Const, 0},
+		{"ET_EXEC", Const, 0},
+		{"ET_HIOS", Const, 0},
+		{"ET_HIPROC", Const, 0},
+		{"ET_LOOS", Const, 0},
+		{"ET_LOPROC", Const, 0},
+		{"ET_NONE", Const, 0},
+		{"ET_REL", Const, 0},
+		{"EV_CURRENT", Const, 0},
+		{"EV_NONE", Const, 0},
+		{"ErrNoSymbols", Var, 4},
+		{"File", Type, 0},
+		{"File.FileHeader", Field, 0},
+		{"File.Progs", Field, 0},
+		{"File.Sections", Field, 0},
+		{"FileHeader", Type, 0},
+		{"FileHeader.ABIVersion", Field, 0},
+		{"FileHeader.ByteOrder", Field, 0},
+		{"FileHeader.Class", Field, 0},
+		{"FileHeader.Data", Field, 0},
+		{"FileHeader.Entry", Field, 1},
+		{"FileHeader.Machine", Field, 0},
+		{"FileHeader.OSABI", Field, 0},
+		{"FileHeader.Type", Field, 0},
+		{"FileHeader.Version", Field, 0},
+		{"FormatError", Type, 0},
+		{"Header32", Type, 0},
+		{"Header32.Ehsize", Field, 0},
+		{"Header32.Entry", Field, 0},
+		{"Header32.Flags", Field, 0},
+		{"Header32.Ident", Field, 0},
+		{"Header32.Machine", Field, 0},
+		{"Header32.Phentsize", Field, 0},
+		{"Header32.Phnum", Field, 0},
+		{"Header32.Phoff", Field, 0},
+		{"Header32.Shentsize", Field, 0},
+		{"Header32.Shnum", Field, 0},
+		{"Header32.Shoff", Field, 0},
+		{"Header32.Shstrndx", Field, 0},
+		{"Header32.Type", Field, 0},
+		{"Header32.Version", Field, 0},
+		{"Header64", Type, 0},
+		{"Header64.Ehsize", Field, 0},
+		{"Header64.Entry", Field, 0},
+		{"Header64.Flags", Field, 0},
+		{"Header64.Ident", Field, 0},
+		{"Header64.Machine", Field, 0},
+		{"Header64.Phentsize", Field, 0},
+		{"Header64.Phnum", Field, 0},
+		{"Header64.Phoff", Field, 0},
+		{"Header64.Shentsize", Field, 0},
+		{"Header64.Shnum", Field, 0},
+		{"Header64.Shoff", Field, 0},
+		{"Header64.Shstrndx", Field, 0},
+		{"Header64.Type", Field, 0},
+		{"Header64.Version", Field, 0},
+		{"ImportedSymbol", Type, 0},
+		{"ImportedSymbol.Library", Field, 0},
+		{"ImportedSymbol.Name", Field, 0},
+		{"ImportedSymbol.Version", Field, 0},
+		{"Machine", Type, 0},
+		{"NT_FPREGSET", Const, 0},
+		{"NT_PRPSINFO", Const, 0},
+		{"NT_PRSTATUS", Const, 0},
+		{"NType", Type, 0},
+		{"NewFile", Func, 0},
+		{"OSABI", Type, 0},
+		{"Open", Func, 0},
+		{"PF_MASKOS", Const, 0},
+		{"PF_MASKPROC", Const, 0},
+		{"PF_R", Const, 0},
+		{"PF_W", Const, 0},
+		{"PF_X", Const, 0},
+		{"PT_AARCH64_ARCHEXT", Const, 16},
+		{"PT_AARCH64_UNWIND", Const, 16},
+		{"PT_ARM_ARCHEXT", Const, 16},
+		{"PT_ARM_EXIDX", Const, 16},
+		{"PT_DYNAMIC", Const, 0},
+		{"PT_GNU_EH_FRAME", Const, 16},
+		{"PT_GNU_MBIND_HI", Const, 16},
+		{"PT_GNU_MBIND_LO", Const, 16},
+		{"PT_GNU_PROPERTY", Const, 16},
+		{"PT_GNU_RELRO", Const, 16},
+		{"PT_GNU_STACK", Const, 16},
+		{"PT_HIOS", Const, 0},
+		{"PT_HIPROC", Const, 0},
+		{"PT_INTERP", Const, 0},
+		{"PT_LOAD", Const, 0},
+		{"PT_LOOS", Const, 0},
+		{"PT_LOPROC", Const, 0},
+		{"PT_MIPS_ABIFLAGS", Const, 16},
+		{"PT_MIPS_OPTIONS", Const, 16},
+		{"PT_MIPS_REGINFO", Const, 16},
+		{"PT_MIPS_RTPROC", Const, 16},
+		{"PT_NOTE", Const, 0},
+		{"PT_NULL", Const, 0},
+		{"PT_OPENBSD_BOOTDATA", Const, 16},
+		{"PT_OPENBSD_RANDOMIZE", Const, 16},
+		{"PT_OPENBSD_WXNEEDED", Const, 16},
+		{"PT_PAX_FLAGS", Const, 16},
+		{"PT_PHDR", Const, 0},
+		{"PT_S390_PGSTE", Const, 16},
+		{"PT_SHLIB", Const, 0},
+		{"PT_SUNWSTACK", Const, 16},
+		{"PT_SUNW_EH_FRAME", Const, 16},
+		{"PT_TLS", Const, 0},
+		{"Prog", Type, 0},
+		{"Prog.ProgHeader", Field, 0},
+		{"Prog.ReaderAt", Field, 0},
+		{"Prog32", Type, 0},
+		{"Prog32.Align", Field, 0},
+		{"Prog32.Filesz", Field, 0},
+		{"Prog32.Flags", Field, 0},
+		{"Prog32.Memsz", Field, 0},
+		{"Prog32.Off", Field, 0},
+		{"Prog32.Paddr", Field, 0},
+		{"Prog32.Type", Field, 0},
+		{"Prog32.Vaddr", Field, 0},
+		{"Prog64", Type, 0},
+		{"Prog64.Align", Field, 0},
+		{"Prog64.Filesz", Field, 0},
+		{"Prog64.Flags", Field, 0},
+		{"Prog64.Memsz", Field, 0},
+		{"Prog64.Off", Field, 0},
+		{"Prog64.Paddr", Field, 0},
+		{"Prog64.Type", Field, 0},
+		{"Prog64.Vaddr", Field, 0},
+		{"ProgFlag", Type, 0},
+		{"ProgHeader", Type, 0},
+		{"ProgHeader.Align", Field, 0},
+		{"ProgHeader.Filesz", Field, 0},
+		{"ProgHeader.Flags", Field, 0},
+		{"ProgHeader.Memsz", Field, 0},
+		{"ProgHeader.Off", Field, 0},
+		{"ProgHeader.Paddr", Field, 0},
+		{"ProgHeader.Type", Field, 0},
+		{"ProgHeader.Vaddr", Field, 0},
+		{"ProgType", Type, 0},
+		{"R_386", Type, 0},
+		{"R_386_16", Const, 10},
+		{"R_386_32", Const, 0},
+		{"R_386_32PLT", Const, 10},
+		{"R_386_8", Const, 10},
+		{"R_386_COPY", Const, 0},
+		{"R_386_GLOB_DAT", Const, 0},
+		{"R_386_GOT32", Const, 0},
+		{"R_386_GOT32X", Const, 10},
+		{"R_386_GOTOFF", Const, 0},
+		{"R_386_GOTPC", Const, 0},
+		{"R_386_IRELATIVE", Const, 10},
+		{"R_386_JMP_SLOT", Const, 0},
+		{"R_386_NONE", Const, 0},
+		{"R_386_PC16", Const, 10},
+		{"R_386_PC32", Const, 0},
+		{"R_386_PC8", Const, 10},
+		{"R_386_PLT32", Const, 0},
+		{"R_386_RELATIVE", Const, 0},
+		{"R_386_SIZE32", Const, 10},
+		{"R_386_TLS_DESC", Const, 10},
+		{"R_386_TLS_DESC_CALL", Const, 10},
+		{"R_386_TLS_DTPMOD32", Const, 0},
+		{"R_386_TLS_DTPOFF32", Const, 0},
+		{"R_386_TLS_GD", Const, 0},
+		{"R_386_TLS_GD_32", Const, 0},
+		{"R_386_TLS_GD_CALL", Const, 0},
+		{"R_386_TLS_GD_POP", Const, 0},
+		{"R_386_TLS_GD_PUSH", Const, 0},
+		{"R_386_TLS_GOTDESC", Const, 10},
+		{"R_386_TLS_GOTIE", Const, 0},
+		{"R_386_TLS_IE", Const, 0},
+		{"R_386_TLS_IE_32", Const, 0},
+		{"R_386_TLS_LDM", Const, 0},
+		{"R_386_TLS_LDM_32", Const, 0},
+		{"R_386_TLS_LDM_CALL", Const, 0},
+		{"R_386_TLS_LDM_POP", Const, 0},
+		{"R_386_TLS_LDM_PUSH", Const, 0},
+		{"R_386_TLS_LDO_32", Const, 0},
+		{"R_386_TLS_LE", Const, 0},
+		{"R_386_TLS_LE_32", Const, 0},
+		{"R_386_TLS_TPOFF", Const, 0},
+		{"R_386_TLS_TPOFF32", Const, 0},
+		{"R_390", Type, 7},
+		{"R_390_12", Const, 7},
+		{"R_390_16", Const, 7},
+		{"R_390_20", Const, 7},
+		{"R_390_32", Const, 7},
+		{"R_390_64", Const, 7},
+		{"R_390_8", Const, 7},
+		{"R_390_COPY", Const, 7},
+		{"R_390_GLOB_DAT", Const, 7},
+		{"R_390_GOT12", Const, 7},
+		{"R_390_GOT16", Const, 7},
+		{"R_390_GOT20", Const, 7},
+		{"R_390_GOT32", Const, 7},
+		{"R_390_GOT64", Const, 7},
+		{"R_390_GOTENT", Const, 7},
+		{"R_390_GOTOFF", Const, 7},
+		{"R_390_GOTOFF16", Const, 7},
+		{"R_390_GOTOFF64", Const, 7},
+		{"R_390_GOTPC", Const, 7},
+		{"R_390_GOTPCDBL", Const, 7},
+		{"R_390_GOTPLT12", Const, 7},
+		{"R_390_GOTPLT16", Const, 7},
+		{"R_390_GOTPLT20", Const, 7},
+		{"R_390_GOTPLT32", Const, 7},
+		{"R_390_GOTPLT64", Const, 7},
+		{"R_390_GOTPLTENT", Const, 7},
+		{"R_390_GOTPLTOFF16", Const, 7},
+		{"R_390_GOTPLTOFF32", Const, 7},
+		{"R_390_GOTPLTOFF64", Const, 7},
+		{"R_390_JMP_SLOT", Const, 7},
+		{"R_390_NONE", Const, 7},
+		{"R_390_PC16", Const, 7},
+		{"R_390_PC16DBL", Const, 7},
+		{"R_390_PC32", Const, 7},
+		{"R_390_PC32DBL", Const, 7},
+		{"R_390_PC64", Const, 7},
+		{"R_390_PLT16DBL", Const, 7},
+		{"R_390_PLT32", Const, 7},
+		{"R_390_PLT32DBL", Const, 7},
+		{"R_390_PLT64", Const, 7},
+		{"R_390_RELATIVE", Const, 7},
+		{"R_390_TLS_DTPMOD", Const, 7},
+		{"R_390_TLS_DTPOFF", Const, 7},
+		{"R_390_TLS_GD32", Const, 7},
+		{"R_390_TLS_GD64", Const, 7},
+		{"R_390_TLS_GDCALL", Const, 7},
+		{"R_390_TLS_GOTIE12", Const, 7},
+		{"R_390_TLS_GOTIE20", Const, 7},
+		{"R_390_TLS_GOTIE32", Const, 7},
+		{"R_390_TLS_GOTIE64", Const, 7},
+		{"R_390_TLS_IE32", Const, 7},
+		{"R_390_TLS_IE64", Const, 7},
+		{"R_390_TLS_IEENT", Const, 7},
+		{"R_390_TLS_LDCALL", Const, 7},
+		{"R_390_TLS_LDM32", Const, 7},
+		{"R_390_TLS_LDM64", Const, 7},
+		{"R_390_TLS_LDO32", Const, 7},
+		{"R_390_TLS_LDO64", Const, 7},
+		{"R_390_TLS_LE32", Const, 7},
+		{"R_390_TLS_LE64", Const, 7},
+		{"R_390_TLS_LOAD", Const, 7},
+		{"R_390_TLS_TPOFF", Const, 7},
+		{"R_AARCH64", Type, 4},
+		{"R_AARCH64_ABS16", Const, 4},
+		{"R_AARCH64_ABS32", Const, 4},
+		{"R_AARCH64_ABS64", Const, 4},
+		{"R_AARCH64_ADD_ABS_LO12_NC", Const, 4},
+		{"R_AARCH64_ADR_GOT_PAGE", Const, 4},
+		{"R_AARCH64_ADR_PREL_LO21", Const, 4},
+		{"R_AARCH64_ADR_PREL_PG_HI21", Const, 4},
+		{"R_AARCH64_ADR_PREL_PG_HI21_NC", Const, 4},
+		{"R_AARCH64_CALL26", Const, 4},
+		{"R_AARCH64_CONDBR19", Const, 4},
+		{"R_AARCH64_COPY", Const, 4},
+		{"R_AARCH64_GLOB_DAT", Const, 4},
+		{"R_AARCH64_GOT_LD_PREL19", Const, 4},
+		{"R_AARCH64_IRELATIVE", Const, 4},
+		{"R_AARCH64_JUMP26", Const, 4},
+		{"R_AARCH64_JUMP_SLOT", Const, 4},
+		{"R_AARCH64_LD64_GOTOFF_LO15", Const, 10},
+		{"R_AARCH64_LD64_GOTPAGE_LO15", Const, 10},
+		{"R_AARCH64_LD64_GOT_LO12_NC", Const, 4},
+		{"R_AARCH64_LDST128_ABS_LO12_NC", Const, 4},
+		{"R_AARCH64_LDST16_ABS_LO12_NC", Const, 4},
+		{"R_AARCH64_LDST32_ABS_LO12_NC", Const, 4},
+		{"R_AARCH64_LDST64_ABS_LO12_NC", Const, 4},
+		{"R_AARCH64_LDST8_ABS_LO12_NC", Const, 4},
+		{"R_AARCH64_LD_PREL_LO19", Const, 4},
+		{"R_AARCH64_MOVW_SABS_G0", Const, 4},
+		{"R_AARCH64_MOVW_SABS_G1", Const, 4},
+		{"R_AARCH64_MOVW_SABS_G2", Const, 4},
+		{"R_AARCH64_MOVW_UABS_G0", Const, 4},
+		{"R_AARCH64_MOVW_UABS_G0_NC", Const, 4},
+		{"R_AARCH64_MOVW_UABS_G1", Const, 4},
+		{"R_AARCH64_MOVW_UABS_G1_NC", Const, 4},
+		{"R_AARCH64_MOVW_UABS_G2", Const, 4},
+		{"R_AARCH64_MOVW_UABS_G2_NC", Const, 4},
+		{"R_AARCH64_MOVW_UABS_G3", Const, 4},
+		{"R_AARCH64_NONE", Const, 4},
+		{"R_AARCH64_NULL", Const, 4},
+		{"R_AARCH64_P32_ABS16", Const, 4},
+		{"R_AARCH64_P32_ABS32", Const, 4},
+		{"R_AARCH64_P32_ADD_ABS_LO12_NC", Const, 4},
+		{"R_AARCH64_P32_ADR_GOT_PAGE", Const, 4},
+		{"R_AARCH64_P32_ADR_PREL_LO21", Const, 4},
+		{"R_AARCH64_P32_ADR_PREL_PG_HI21", Const, 4},
+		{"R_AARCH64_P32_CALL26", Const, 4},
+		{"R_AARCH64_P32_CONDBR19", Const, 4},
+		{"R_AARCH64_P32_COPY", Const, 4},
+		{"R_AARCH64_P32_GLOB_DAT", Const, 4},
+		{"R_AARCH64_P32_GOT_LD_PREL19", Const, 4},
+		{"R_AARCH64_P32_IRELATIVE", Const, 4},
+		{"R_AARCH64_P32_JUMP26", Const, 4},
+		{"R_AARCH64_P32_JUMP_SLOT", Const, 4},
+		{"R_AARCH64_P32_LD32_GOT_LO12_NC", Const, 4},
+		{"R_AARCH64_P32_LDST128_ABS_LO12_NC", Const, 4},
+		{"R_AARCH64_P32_LDST16_ABS_LO12_NC", Const, 4},
+		{"R_AARCH64_P32_LDST32_ABS_LO12_NC", Const, 4},
+		{"R_AARCH64_P32_LDST64_ABS_LO12_NC", Const, 4},
+		{"R_AARCH64_P32_LDST8_ABS_LO12_NC", Const, 4},
+		{"R_AARCH64_P32_LD_PREL_LO19", Const, 4},
+		{"R_AARCH64_P32_MOVW_SABS_G0", Const, 4},
+		{"R_AARCH64_P32_MOVW_UABS_G0", Const, 4},
+		{"R_AARCH64_P32_MOVW_UABS_G0_NC", Const, 4},
+		{"R_AARCH64_P32_MOVW_UABS_G1", Const, 4},
+		{"R_AARCH64_P32_PREL16", Const, 4},
+		{"R_AARCH64_P32_PREL32", Const, 4},
+		{"R_AARCH64_P32_RELATIVE", Const, 4},
+		{"R_AARCH64_P32_TLSDESC", Const, 4},
+		{"R_AARCH64_P32_TLSDESC_ADD_LO12_NC", Const, 4},
+		{"R_AARCH64_P32_TLSDESC_ADR_PAGE21", Const, 4},
+		{"R_AARCH64_P32_TLSDESC_ADR_PREL21", Const, 4},
+		{"R_AARCH64_P32_TLSDESC_CALL", Const, 4},
+		{"R_AARCH64_P32_TLSDESC_LD32_LO12_NC", Const, 4},
+		{"R_AARCH64_P32_TLSDESC_LD_PREL19", Const, 4},
+		{"R_AARCH64_P32_TLSGD_ADD_LO12_NC", Const, 4},
+		{"R_AARCH64_P32_TLSGD_ADR_PAGE21", Const, 4},
+		{"R_AARCH64_P32_TLSIE_ADR_GOTTPREL_PAGE21", Const, 4},
+		{"R_AARCH64_P32_TLSIE_LD32_GOTTPREL_LO12_NC", Const, 4},
+		{"R_AARCH64_P32_TLSIE_LD_GOTTPREL_PREL19", Const, 4},
+		{"R_AARCH64_P32_TLSLE_ADD_TPREL_HI12", Const, 4},
+		{"R_AARCH64_P32_TLSLE_ADD_TPREL_LO12", Const, 4},
+		{"R_AARCH64_P32_TLSLE_ADD_TPREL_LO12_NC", Const, 4},
+		{"R_AARCH64_P32_TLSLE_MOVW_TPREL_G0", Const, 4},
+		{"R_AARCH64_P32_TLSLE_MOVW_TPREL_G0_NC", Const, 4},
+		{"R_AARCH64_P32_TLSLE_MOVW_TPREL_G1", Const, 4},
+		{"R_AARCH64_P32_TLS_DTPMOD", Const, 4},
+		{"R_AARCH64_P32_TLS_DTPREL", Const, 4},
+		{"R_AARCH64_P32_TLS_TPREL", Const, 4},
+		{"R_AARCH64_P32_TSTBR14", Const, 4},
+		{"R_AARCH64_PREL16", Const, 4},
+		{"R_AARCH64_PREL32", Const, 4},
+		{"R_AARCH64_PREL64", Const, 4},
+		{"R_AARCH64_RELATIVE", Const, 4},
+		{"R_AARCH64_TLSDESC", Const, 4},
+		{"R_AARCH64_TLSDESC_ADD", Const, 4},
+		{"R_AARCH64_TLSDESC_ADD_LO12_NC", Const, 4},
+		{"R_AARCH64_TLSDESC_ADR_PAGE21", Const, 4},
+		{"R_AARCH64_TLSDESC_ADR_PREL21", Const, 4},
+		{"R_AARCH64_TLSDESC_CALL", Const, 4},
+		{"R_AARCH64_TLSDESC_LD64_LO12_NC", Const, 4},
+		{"R_AARCH64_TLSDESC_LDR", Const, 4},
+		{"R_AARCH64_TLSDESC_LD_PREL19", Const, 4},
+		{"R_AARCH64_TLSDESC_OFF_G0_NC", Const, 4},
+		{"R_AARCH64_TLSDESC_OFF_G1", Const, 4},
+		{"R_AARCH64_TLSGD_ADD_LO12_NC", Const, 4},
+		{"R_AARCH64_TLSGD_ADR_PAGE21", Const, 4},
+		{"R_AARCH64_TLSGD_ADR_PREL21", Const, 10},
+		{"R_AARCH64_TLSGD_MOVW_G0_NC", Const, 10},
+		{"R_AARCH64_TLSGD_MOVW_G1", Const, 10},
+		{"R_AARCH64_TLSIE_ADR_GOTTPREL_PAGE21", Const, 4},
+		{"R_AARCH64_TLSIE_LD64_GOTTPREL_LO12_NC", Const, 4},
+		{"R_AARCH64_TLSIE_LD_GOTTPREL_PREL19", Const, 4},
+		{"R_AARCH64_TLSIE_MOVW_GOTTPREL_G0_NC", Const, 4},
+		{"R_AARCH64_TLSIE_MOVW_GOTTPREL_G1", Const, 4},
+		{"R_AARCH64_TLSLD_ADR_PAGE21", Const, 10},
+		{"R_AARCH64_TLSLD_ADR_PREL21", Const, 10},
+		{"R_AARCH64_TLSLD_LDST128_DTPREL_LO12", Const, 10},
+		{"R_AARCH64_TLSLD_LDST128_DTPREL_LO12_NC", Const, 10},
+		{"R_AARCH64_TLSLE_ADD_TPREL_HI12", Const, 4},
+		{"R_AARCH64_TLSLE_ADD_TPREL_LO12", Const, 4},
+		{"R_AARCH64_TLSLE_ADD_TPREL_LO12_NC", Const, 4},
+		{"R_AARCH64_TLSLE_LDST128_TPREL_LO12", Const, 10},
+		{"R_AARCH64_TLSLE_LDST128_TPREL_LO12_NC", Const, 10},
+		{"R_AARCH64_TLSLE_MOVW_TPREL_G0", Const, 4},
+		{"R_AARCH64_TLSLE_MOVW_TPREL_G0_NC", Const, 4},
+		{"R_AARCH64_TLSLE_MOVW_TPREL_G1", Const, 4},
+		{"R_AARCH64_TLSLE_MOVW_TPREL_G1_NC", Const, 4},
+		{"R_AARCH64_TLSLE_MOVW_TPREL_G2", Const, 4},
+		{"R_AARCH64_TLS_DTPMOD64", Const, 4},
+		{"R_AARCH64_TLS_DTPREL64", Const, 4},
+		{"R_AARCH64_TLS_TPREL64", Const, 4},
+		{"R_AARCH64_TSTBR14", Const, 4},
+		{"R_ALPHA", Type, 0},
+		{"R_ALPHA_BRADDR", Const, 0},
+		{"R_ALPHA_COPY", Const, 0},
+		{"R_ALPHA_GLOB_DAT", Const, 0},
+		{"R_ALPHA_GPDISP", Const, 0},
+		{"R_ALPHA_GPREL32", Const, 0},
+		{"R_ALPHA_GPRELHIGH", Const, 0},
+		{"R_ALPHA_GPRELLOW", Const, 0},
+		{"R_ALPHA_GPVALUE", Const, 0},
+		{"R_ALPHA_HINT", Const, 0},
+		{"R_ALPHA_IMMED_BR_HI32", Const, 0},
+		{"R_ALPHA_IMMED_GP_16", Const, 0},
+		{"R_ALPHA_IMMED_GP_HI32", Const, 0},
+		{"R_ALPHA_IMMED_LO32", Const, 0},
+		{"R_ALPHA_IMMED_SCN_HI32", Const, 0},
+		{"R_ALPHA_JMP_SLOT", Const, 0},
+		{"R_ALPHA_LITERAL", Const, 0},
+		{"R_ALPHA_LITUSE", Const, 0},
+		{"R_ALPHA_NONE", Const, 0},
+		{"R_ALPHA_OP_PRSHIFT", Const, 0},
+		{"R_ALPHA_OP_PSUB", Const, 0},
+		{"R_ALPHA_OP_PUSH", Const, 0},
+		{"R_ALPHA_OP_STORE", Const, 0},
+		{"R_ALPHA_REFLONG", Const, 0},
+		{"R_ALPHA_REFQUAD", Const, 0},
+		{"R_ALPHA_RELATIVE", Const, 0},
+		{"R_ALPHA_SREL16", Const, 0},
+		{"R_ALPHA_SREL32", Const, 0},
+		{"R_ALPHA_SREL64", Const, 0},
+		{"R_ARM", Type, 0},
+		{"R_ARM_ABS12", Const, 0},
+		{"R_ARM_ABS16", Const, 0},
+		{"R_ARM_ABS32", Const, 0},
+		{"R_ARM_ABS32_NOI", Const, 10},
+		{"R_ARM_ABS8", Const, 0},
+		{"R_ARM_ALU_PCREL_15_8", Const, 10},
+		{"R_ARM_ALU_PCREL_23_15", Const, 10},
+		{"R_ARM_ALU_PCREL_7_0", Const, 10},
+		{"R_ARM_ALU_PC_G0", Const, 10},
+		{"R_ARM_ALU_PC_G0_NC", Const, 10},
+		{"R_ARM_ALU_PC_G1", Const, 10},
+		{"R_ARM_ALU_PC_G1_NC", Const, 10},
+		{"R_ARM_ALU_PC_G2", Const, 10},
+		{"R_ARM_ALU_SBREL_19_12_NC", Const, 10},
+		{"R_ARM_ALU_SBREL_27_20_CK", Const, 10},
+		{"R_ARM_ALU_SB_G0", Const, 10},
+		{"R_ARM_ALU_SB_G0_NC", Const, 10},
+		{"R_ARM_ALU_SB_G1", Const, 10},
+		{"R_ARM_ALU_SB_G1_NC", Const, 10},
+		{"R_ARM_ALU_SB_G2", Const, 10},
+		{"R_ARM_AMP_VCALL9", Const, 0},
+		{"R_ARM_BASE_ABS", Const, 10},
+		{"R_ARM_CALL", Const, 10},
+		{"R_ARM_COPY", Const, 0},
+		{"R_ARM_GLOB_DAT", Const, 0},
+		{"R_ARM_GNU_VTENTRY", Const, 0},
+		{"R_ARM_GNU_VTINHERIT", Const, 0},
+		{"R_ARM_GOT32", Const, 0},
+		{"R_ARM_GOTOFF", Const, 0},
+		{"R_ARM_GOTOFF12", Const, 10},
+		{"R_ARM_GOTPC", Const, 0},
+		{"R_ARM_GOTRELAX", Const, 10},
+		{"R_ARM_GOT_ABS", Const, 10},
+		{"R_ARM_GOT_BREL12", Const, 10},
+		{"R_ARM_GOT_PREL", Const, 10},
+		{"R_ARM_IRELATIVE", Const, 10},
+		{"R_ARM_JUMP24", Const, 10},
+		{"R_ARM_JUMP_SLOT", Const, 0},
+		{"R_ARM_LDC_PC_G0", Const, 10},
+		{"R_ARM_LDC_PC_G1", Const, 10},
+		{"R_ARM_LDC_PC_G2", Const, 10},
+		{"R_ARM_LDC_SB_G0", Const, 10},
+		{"R_ARM_LDC_SB_G1", Const, 10},
+		{"R_ARM_LDC_SB_G2", Const, 10},
+		{"R_ARM_LDRS_PC_G0", Const, 10},
+		{"R_ARM_LDRS_PC_G1", Const, 10},
+		{"R_ARM_LDRS_PC_G2", Const, 10},
+		{"R_ARM_LDRS_SB_G0", Const, 10},
+		{"R_ARM_LDRS_SB_G1", Const, 10},
+		{"R_ARM_LDRS_SB_G2", Const, 10},
+		{"R_ARM_LDR_PC_G1", Const, 10},
+		{"R_ARM_LDR_PC_G2", Const, 10},
+		{"R_ARM_LDR_SBREL_11_10_NC", Const, 10},
+		{"R_ARM_LDR_SB_G0", Const, 10},
+		{"R_ARM_LDR_SB_G1", Const, 10},
+		{"R_ARM_LDR_SB_G2", Const, 10},
+		{"R_ARM_ME_TOO", Const, 10},
+		{"R_ARM_MOVT_ABS", Const, 10},
+		{"R_ARM_MOVT_BREL", Const, 10},
+		{"R_ARM_MOVT_PREL", Const, 10},
+		{"R_ARM_MOVW_ABS_NC", Const, 10},
+		{"R_ARM_MOVW_BREL", Const, 10},
+		{"R_ARM_MOVW_BREL_NC", Const, 10},
+		{"R_ARM_MOVW_PREL_NC", Const, 10},
+		{"R_ARM_NONE", Const, 0},
+		{"R_ARM_PC13", Const, 0},
+		{"R_ARM_PC24", Const, 0},
+		{"R_ARM_PLT32", Const, 0},
+		{"R_ARM_PLT32_ABS", Const, 10},
+		{"R_ARM_PREL31", Const, 10},
+		{"R_ARM_PRIVATE_0", Const, 10},
+		{"R_ARM_PRIVATE_1", Const, 10},
+		{"R_ARM_PRIVATE_10", Const, 10},
+		{"R_ARM_PRIVATE_11", Const, 10},
+		{"R_ARM_PRIVATE_12", Const, 10},
+		{"R_ARM_PRIVATE_13", Const, 10},
+		{"R_ARM_PRIVATE_14", Const, 10},
+		{"R_ARM_PRIVATE_15", Const, 10},
+		{"R_ARM_PRIVATE_2", Const, 10},
+		{"R_ARM_PRIVATE_3", Const, 10},
+		{"R_ARM_PRIVATE_4", Const, 10},
+		{"R_ARM_PRIVATE_5", Const, 10},
+		{"R_ARM_PRIVATE_6", Const, 10},
+		{"R_ARM_PRIVATE_7", Const, 10},
+		{"R_ARM_PRIVATE_8", Const, 10},
+		{"R_ARM_PRIVATE_9", Const, 10},
+		{"R_ARM_RABS32", Const, 0},
+		{"R_ARM_RBASE", Const, 0},
+		{"R_ARM_REL32", Const, 0},
+		{"R_ARM_REL32_NOI", Const, 10},
+		{"R_ARM_RELATIVE", Const, 0},
+		{"R_ARM_RPC24", Const, 0},
+		{"R_ARM_RREL32", Const, 0},
+		{"R_ARM_RSBREL32", Const, 0},
+		{"R_ARM_RXPC25", Const, 10},
+		{"R_ARM_SBREL31", Const, 10},
+		{"R_ARM_SBREL32", Const, 0},
+		{"R_ARM_SWI24", Const, 0},
+		{"R_ARM_TARGET1", Const, 10},
+		{"R_ARM_TARGET2", Const, 10},
+		{"R_ARM_THM_ABS5", Const, 0},
+		{"R_ARM_THM_ALU_ABS_G0_NC", Const, 10},
+		{"R_ARM_THM_ALU_ABS_G1_NC", Const, 10},
+		{"R_ARM_THM_ALU_ABS_G2_NC", Const, 10},
+		{"R_ARM_THM_ALU_ABS_G3", Const, 10},
+		{"R_ARM_THM_ALU_PREL_11_0", Const, 10},
+		{"R_ARM_THM_GOT_BREL12", Const, 10},
+		{"R_ARM_THM_JUMP11", Const, 10},
+		{"R_ARM_THM_JUMP19", Const, 10},
+		{"R_ARM_THM_JUMP24", Const, 10},
+		{"R_ARM_THM_JUMP6", Const, 10},
+		{"R_ARM_THM_JUMP8", Const, 10},
+		{"R_ARM_THM_MOVT_ABS", Const, 10},
+		{"R_ARM_THM_MOVT_BREL", Const, 10},
+		{"R_ARM_THM_MOVT_PREL", Const, 10},
+		{"R_ARM_THM_MOVW_ABS_NC", Const, 10},
+		{"R_ARM_THM_MOVW_BREL", Const, 10},
+		{"R_ARM_THM_MOVW_BREL_NC", Const, 10},
+		{"R_ARM_THM_MOVW_PREL_NC", Const, 10},
+		{"R_ARM_THM_PC12", Const, 10},
+		{"R_ARM_THM_PC22", Const, 0},
+		{"R_ARM_THM_PC8", Const, 0},
+		{"R_ARM_THM_RPC22", Const, 0},
+		{"R_ARM_THM_SWI8", Const, 0},
+		{"R_ARM_THM_TLS_CALL", Const, 10},
+		{"R_ARM_THM_TLS_DESCSEQ16", Const, 10},
+		{"R_ARM_THM_TLS_DESCSEQ32", Const, 10},
+		{"R_ARM_THM_XPC22", Const, 0},
+		{"R_ARM_TLS_CALL", Const, 10},
+		{"R_ARM_TLS_DESCSEQ", Const, 10},
+		{"R_ARM_TLS_DTPMOD32", Const, 10},
+		{"R_ARM_TLS_DTPOFF32", Const, 10},
+		{"R_ARM_TLS_GD32", Const, 10},
+		{"R_ARM_TLS_GOTDESC", Const, 10},
+		{"R_ARM_TLS_IE12GP", Const, 10},
+		{"R_ARM_TLS_IE32", Const, 10},
+		{"R_ARM_TLS_LDM32", Const, 10},
+		{"R_ARM_TLS_LDO12", Const, 10},
+		{"R_ARM_TLS_LDO32", Const, 10},
+		{"R_ARM_TLS_LE12", Const, 10},
+		{"R_ARM_TLS_LE32", Const, 10},
+		{"R_ARM_TLS_TPOFF32", Const, 10},
+		{"R_ARM_V4BX", Const, 10},
+		{"R_ARM_XPC25", Const, 0},
+		{"R_INFO", Func, 0},
+		{"R_INFO32", Func, 0},
+		{"R_LARCH", Type, 19},
+		{"R_LARCH_32", Const, 19},
+		{"R_LARCH_32_PCREL", Const, 20},
+		{"R_LARCH_64", Const, 19},
+		{"R_LARCH_64_PCREL", Const, 22},
+		{"R_LARCH_ABS64_HI12", Const, 20},
+		{"R_LARCH_ABS64_LO20", Const, 20},
+		{"R_LARCH_ABS_HI20", Const, 20},
+		{"R_LARCH_ABS_LO12", Const, 20},
+		{"R_LARCH_ADD16", Const, 19},
+		{"R_LARCH_ADD24", Const, 19},
+		{"R_LARCH_ADD32", Const, 19},
+		{"R_LARCH_ADD6", Const, 22},
+		{"R_LARCH_ADD64", Const, 19},
+		{"R_LARCH_ADD8", Const, 19},
+		{"R_LARCH_ADD_ULEB128", Const, 22},
+		{"R_LARCH_ALIGN", Const, 22},
+		{"R_LARCH_B16", Const, 20},
+		{"R_LARCH_B21", Const, 20},
+		{"R_LARCH_B26", Const, 20},
+		{"R_LARCH_CFA", Const, 22},
+		{"R_LARCH_COPY", Const, 19},
+		{"R_LARCH_DELETE", Const, 22},
+		{"R_LARCH_GNU_VTENTRY", Const, 20},
+		{"R_LARCH_GNU_VTINHERIT", Const, 20},
+		{"R_LARCH_GOT64_HI12", Const, 20},
+		{"R_LARCH_GOT64_LO20", Const, 20},
+		{"R_LARCH_GOT64_PC_HI12", Const, 20},
+		{"R_LARCH_GOT64_PC_LO20", Const, 20},
+		{"R_LARCH_GOT_HI20", Const, 20},
+		{"R_LARCH_GOT_LO12", Const, 20},
+		{"R_LARCH_GOT_PC_HI20", Const, 20},
+		{"R_LARCH_GOT_PC_LO12", Const, 20},
+		{"R_LARCH_IRELATIVE", Const, 19},
+		{"R_LARCH_JUMP_SLOT", Const, 19},
+		{"R_LARCH_MARK_LA", Const, 19},
+		{"R_LARCH_MARK_PCREL", Const, 19},
+		{"R_LARCH_NONE", Const, 19},
+		{"R_LARCH_PCALA64_HI12", Const, 20},
+		{"R_LARCH_PCALA64_LO20", Const, 20},
+		{"R_LARCH_PCALA_HI20", Const, 20},
+		{"R_LARCH_PCALA_LO12", Const, 20},
+		{"R_LARCH_PCREL20_S2", Const, 22},
+		{"R_LARCH_RELATIVE", Const, 19},
+		{"R_LARCH_RELAX", Const, 20},
+		{"R_LARCH_SOP_ADD", Const, 19},
+		{"R_LARCH_SOP_AND", Const, 19},
+		{"R_LARCH_SOP_ASSERT", Const, 19},
+		{"R_LARCH_SOP_IF_ELSE", Const, 19},
+		{"R_LARCH_SOP_NOT", Const, 19},
+		{"R_LARCH_SOP_POP_32_S_0_10_10_16_S2", Const, 19},
+		{"R_LARCH_SOP_POP_32_S_0_5_10_16_S2", Const, 19},
+		{"R_LARCH_SOP_POP_32_S_10_12", Const, 19},
+		{"R_LARCH_SOP_POP_32_S_10_16", Const, 19},
+		{"R_LARCH_SOP_POP_32_S_10_16_S2", Const, 19},
+		{"R_LARCH_SOP_POP_32_S_10_5", Const, 19},
+		{"R_LARCH_SOP_POP_32_S_5_20", Const, 19},
+		{"R_LARCH_SOP_POP_32_U", Const, 19},
+		{"R_LARCH_SOP_POP_32_U_10_12", Const, 19},
+		{"R_LARCH_SOP_PUSH_ABSOLUTE", Const, 19},
+		{"R_LARCH_SOP_PUSH_DUP", Const, 19},
+		{"R_LARCH_SOP_PUSH_GPREL", Const, 19},
+		{"R_LARCH_SOP_PUSH_PCREL", Const, 19},
+		{"R_LARCH_SOP_PUSH_PLT_PCREL", Const, 19},
+		{"R_LARCH_SOP_PUSH_TLS_GD", Const, 19},
+		{"R_LARCH_SOP_PUSH_TLS_GOT", Const, 19},
+		{"R_LARCH_SOP_PUSH_TLS_TPREL", Const, 19},
+		{"R_LARCH_SOP_SL", Const, 19},
+		{"R_LARCH_SOP_SR", Const, 19},
+		{"R_LARCH_SOP_SUB", Const, 19},
+		{"R_LARCH_SUB16", Const, 19},
+		{"R_LARCH_SUB24", Const, 19},
+		{"R_LARCH_SUB32", Const, 19},
+		{"R_LARCH_SUB6", Const, 22},
+		{"R_LARCH_SUB64", Const, 19},
+		{"R_LARCH_SUB8", Const, 19},
+		{"R_LARCH_SUB_ULEB128", Const, 22},
+		{"R_LARCH_TLS_DTPMOD32", Const, 19},
+		{"R_LARCH_TLS_DTPMOD64", Const, 19},
+		{"R_LARCH_TLS_DTPREL32", Const, 19},
+		{"R_LARCH_TLS_DTPREL64", Const, 19},
+		{"R_LARCH_TLS_GD_HI20", Const, 20},
+		{"R_LARCH_TLS_GD_PC_HI20", Const, 20},
+		{"R_LARCH_TLS_IE64_HI12", Const, 20},
+		{"R_LARCH_TLS_IE64_LO20", Const, 20},
+		{"R_LARCH_TLS_IE64_PC_HI12", Const, 20},
+		{"R_LARCH_TLS_IE64_PC_LO20", Const, 20},
+		{"R_LARCH_TLS_IE_HI20", Const, 20},
+		{"R_LARCH_TLS_IE_LO12", Const, 20},
+		{"R_LARCH_TLS_IE_PC_HI20", Const, 20},
+		{"R_LARCH_TLS_IE_PC_LO12", Const, 20},
+		{"R_LARCH_TLS_LD_HI20", Const, 20},
+		{"R_LARCH_TLS_LD_PC_HI20", Const, 20},
+		{"R_LARCH_TLS_LE64_HI12", Const, 20},
+		{"R_LARCH_TLS_LE64_LO20", Const, 20},
+		{"R_LARCH_TLS_LE_HI20", Const, 20},
+		{"R_LARCH_TLS_LE_LO12", Const, 20},
+		{"R_LARCH_TLS_TPREL32", Const, 19},
+		{"R_LARCH_TLS_TPREL64", Const, 19},
+		{"R_MIPS", Type, 6},
+		{"R_MIPS_16", Const, 6},
+		{"R_MIPS_26", Const, 6},
+		{"R_MIPS_32", Const, 6},
+		{"R_MIPS_64", Const, 6},
+		{"R_MIPS_ADD_IMMEDIATE", Const, 6},
+		{"R_MIPS_CALL16", Const, 6},
+		{"R_MIPS_CALL_HI16", Const, 6},
+		{"R_MIPS_CALL_LO16", Const, 6},
+		{"R_MIPS_DELETE", Const, 6},
+		{"R_MIPS_GOT16", Const, 6},
+		{"R_MIPS_GOT_DISP", Const, 6},
+		{"R_MIPS_GOT_HI16", Const, 6},
+		{"R_MIPS_GOT_LO16", Const, 6},
+		{"R_MIPS_GOT_OFST", Const, 6},
+		{"R_MIPS_GOT_PAGE", Const, 6},
+		{"R_MIPS_GPREL16", Const, 6},
+		{"R_MIPS_GPREL32", Const, 6},
+		{"R_MIPS_HI16", Const, 6},
+		{"R_MIPS_HIGHER", Const, 6},
+		{"R_MIPS_HIGHEST", Const, 6},
+		{"R_MIPS_INSERT_A", Const, 6},
+		{"R_MIPS_INSERT_B", Const, 6},
+		{"R_MIPS_JALR", Const, 6},
+		{"R_MIPS_LITERAL", Const, 6},
+		{"R_MIPS_LO16", Const, 6},
+		{"R_MIPS_NONE", Const, 6},
+		{"R_MIPS_PC16", Const, 6},
+		{"R_MIPS_PC32", Const, 22},
+		{"R_MIPS_PJUMP", Const, 6},
+		{"R_MIPS_REL16", Const, 6},
+		{"R_MIPS_REL32", Const, 6},
+		{"R_MIPS_RELGOT", Const, 6},
+		{"R_MIPS_SCN_DISP", Const, 6},
+		{"R_MIPS_SHIFT5", Const, 6},
+		{"R_MIPS_SHIFT6", Const, 6},
+		{"R_MIPS_SUB", Const, 6},
+		{"R_MIPS_TLS_DTPMOD32", Const, 6},
+		{"R_MIPS_TLS_DTPMOD64", Const, 6},
+		{"R_MIPS_TLS_DTPREL32", Const, 6},
+		{"R_MIPS_TLS_DTPREL64", Const, 6},
+		{"R_MIPS_TLS_DTPREL_HI16", Const, 6},
+		{"R_MIPS_TLS_DTPREL_LO16", Const, 6},
+		{"R_MIPS_TLS_GD", Const, 6},
+		{"R_MIPS_TLS_GOTTPREL", Const, 6},
+		{"R_MIPS_TLS_LDM", Const, 6},
+		{"R_MIPS_TLS_TPREL32", Const, 6},
+		{"R_MIPS_TLS_TPREL64", Const, 6},
+		{"R_MIPS_TLS_TPREL_HI16", Const, 6},
+		{"R_MIPS_TLS_TPREL_LO16", Const, 6},
+		{"R_PPC", Type, 0},
+		{"R_PPC64", Type, 5},
+		{"R_PPC64_ADDR14", Const, 5},
+		{"R_PPC64_ADDR14_BRNTAKEN", Const, 5},
+		{"R_PPC64_ADDR14_BRTAKEN", Const, 5},
+		{"R_PPC64_ADDR16", Const, 5},
+		{"R_PPC64_ADDR16_DS", Const, 5},
+		{"R_PPC64_ADDR16_HA", Const, 5},
+		{"R_PPC64_ADDR16_HI", Const, 5},
+		{"R_PPC64_ADDR16_HIGH", Const, 10},
+		{"R_PPC64_ADDR16_HIGHA", Const, 10},
+		{"R_PPC64_ADDR16_HIGHER", Const, 5},
+		{"R_PPC64_ADDR16_HIGHER34", Const, 20},
+		{"R_PPC64_ADDR16_HIGHERA", Const, 5},
+		{"R_PPC64_ADDR16_HIGHERA34", Const, 20},
+		{"R_PPC64_ADDR16_HIGHEST", Const, 5},
+		{"R_PPC64_ADDR16_HIGHEST34", Const, 20},
+		{"R_PPC64_ADDR16_HIGHESTA", Const, 5},
+		{"R_PPC64_ADDR16_HIGHESTA34", Const, 20},
+		{"R_PPC64_ADDR16_LO", Const, 5},
+		{"R_PPC64_ADDR16_LO_DS", Const, 5},
+		{"R_PPC64_ADDR24", Const, 5},
+		{"R_PPC64_ADDR32", Const, 5},
+		{"R_PPC64_ADDR64", Const, 5},
+		{"R_PPC64_ADDR64_LOCAL", Const, 10},
+		{"R_PPC64_COPY", Const, 20},
+		{"R_PPC64_D28", Const, 20},
+		{"R_PPC64_D34", Const, 20},
+		{"R_PPC64_D34_HA30", Const, 20},
+		{"R_PPC64_D34_HI30", Const, 20},
+		{"R_PPC64_D34_LO", Const, 20},
+		{"R_PPC64_DTPMOD64", Const, 5},
+		{"R_PPC64_DTPREL16", Const, 5},
+		{"R_PPC64_DTPREL16_DS", Const, 5},
+		{"R_PPC64_DTPREL16_HA", Const, 5},
+		{"R_PPC64_DTPREL16_HI", Const, 5},
+		{"R_PPC64_DTPREL16_HIGH", Const, 10},
+		{"R_PPC64_DTPREL16_HIGHA", Const, 10},
+		{"R_PPC64_DTPREL16_HIGHER", Const, 5},
+		{"R_PPC64_DTPREL16_HIGHERA", Const, 5},
+		{"R_PPC64_DTPREL16_HIGHEST", Const, 5},
+		{"R_PPC64_DTPREL16_HIGHESTA", Const, 5},
+		{"R_PPC64_DTPREL16_LO", Const, 5},
+		{"R_PPC64_DTPREL16_LO_DS", Const, 5},
+		{"R_PPC64_DTPREL34", Const, 20},
+		{"R_PPC64_DTPREL64", Const, 5},
+		{"R_PPC64_ENTRY", Const, 10},
+		{"R_PPC64_GLOB_DAT", Const, 20},
+		{"R_PPC64_GNU_VTENTRY", Const, 20},
+		{"R_PPC64_GNU_VTINHERIT", Const, 20},
+		{"R_PPC64_GOT16", Const, 5},
+		{"R_PPC64_GOT16_DS", Const, 5},
+		{"R_PPC64_GOT16_HA", Const, 5},
+		{"R_PPC64_GOT16_HI", Const, 5},
+		{"R_PPC64_GOT16_LO", Const, 5},
+		{"R_PPC64_GOT16_LO_DS", Const, 5},
+		{"R_PPC64_GOT_DTPREL16_DS", Const, 5},
+		{"R_PPC64_GOT_DTPREL16_HA", Const, 5},
+		{"R_PPC64_GOT_DTPREL16_HI", Const, 5},
+		{"R_PPC64_GOT_DTPREL16_LO_DS", Const, 5},
+		{"R_PPC64_GOT_DTPREL_PCREL34", Const, 20},
+		{"R_PPC64_GOT_PCREL34", Const, 20},
+		{"R_PPC64_GOT_TLSGD16", Const, 5},
+		{"R_PPC64_GOT_TLSGD16_HA", Const, 5},
+		{"R_PPC64_GOT_TLSGD16_HI", Const, 5},
+		{"R_PPC64_GOT_TLSGD16_LO", Const, 5},
+		{"R_PPC64_GOT_TLSGD_PCREL34", Const, 20},
+		{"R_PPC64_GOT_TLSLD16", Const, 5},
+		{"R_PPC64_GOT_TLSLD16_HA", Const, 5},
+		{"R_PPC64_GOT_TLSLD16_HI", Const, 5},
+		{"R_PPC64_GOT_TLSLD16_LO", Const, 5},
+		{"R_PPC64_GOT_TLSLD_PCREL34", Const, 20},
+		{"R_PPC64_GOT_TPREL16_DS", Const, 5},
+		{"R_PPC64_GOT_TPREL16_HA", Const, 5},
+		{"R_PPC64_GOT_TPREL16_HI", Const, 5},
+		{"R_PPC64_GOT_TPREL16_LO_DS", Const, 5},
+		{"R_PPC64_GOT_TPREL_PCREL34", Const, 20},
+		{"R_PPC64_IRELATIVE", Const, 10},
+		{"R_PPC64_JMP_IREL", Const, 10},
+		{"R_PPC64_JMP_SLOT", Const, 5},
+		{"R_PPC64_NONE", Const, 5},
+		{"R_PPC64_PCREL28", Const, 20},
+		{"R_PPC64_PCREL34", Const, 20},
+		{"R_PPC64_PCREL_OPT", Const, 20},
+		{"R_PPC64_PLT16_HA", Const, 20},
+		{"R_PPC64_PLT16_HI", Const, 20},
+		{"R_PPC64_PLT16_LO", Const, 20},
+		{"R_PPC64_PLT16_LO_DS", Const, 10},
+		{"R_PPC64_PLT32", Const, 20},
+		{"R_PPC64_PLT64", Const, 20},
+		{"R_PPC64_PLTCALL", Const, 20},
+		{"R_PPC64_PLTCALL_NOTOC", Const, 20},
+		{"R_PPC64_PLTGOT16", Const, 10},
+		{"R_PPC64_PLTGOT16_DS", Const, 10},
+		{"R_PPC64_PLTGOT16_HA", Const, 10},
+		{"R_PPC64_PLTGOT16_HI", Const, 10},
+		{"R_PPC64_PLTGOT16_LO", Const, 10},
+		{"R_PPC64_PLTGOT_LO_DS", Const, 10},
+		{"R_PPC64_PLTREL32", Const, 20},
+		{"R_PPC64_PLTREL64", Const, 20},
+		{"R_PPC64_PLTSEQ", Const, 20},
+		{"R_PPC64_PLTSEQ_NOTOC", Const, 20},
+		{"R_PPC64_PLT_PCREL34", Const, 20},
+		{"R_PPC64_PLT_PCREL34_NOTOC", Const, 20},
+		{"R_PPC64_REL14", Const, 5},
+		{"R_PPC64_REL14_BRNTAKEN", Const, 5},
+		{"R_PPC64_REL14_BRTAKEN", Const, 5},
+		{"R_PPC64_REL16", Const, 5},
+		{"R_PPC64_REL16DX_HA", Const, 10},
+		{"R_PPC64_REL16_HA", Const, 5},
+		{"R_PPC64_REL16_HI", Const, 5},
+		{"R_PPC64_REL16_HIGH", Const, 20},
+		{"R_PPC64_REL16_HIGHA", Const, 20},
+		{"R_PPC64_REL16_HIGHER", Const, 20},
+		{"R_PPC64_REL16_HIGHER34", Const, 20},
+		{"R_PPC64_REL16_HIGHERA", Const, 20},
+		{"R_PPC64_REL16_HIGHERA34", Const, 20},
+		{"R_PPC64_REL16_HIGHEST", Const, 20},
+		{"R_PPC64_REL16_HIGHEST34", Const, 20},
+		{"R_PPC64_REL16_HIGHESTA", Const, 20},
+		{"R_PPC64_REL16_HIGHESTA34", Const, 20},
+		{"R_PPC64_REL16_LO", Const, 5},
+		{"R_PPC64_REL24", Const, 5},
+		{"R_PPC64_REL24_NOTOC", Const, 10},
+		{"R_PPC64_REL24_P9NOTOC", Const, 21},
+		{"R_PPC64_REL30", Const, 20},
+		{"R_PPC64_REL32", Const, 5},
+		{"R_PPC64_REL64", Const, 5},
+		{"R_PPC64_RELATIVE", Const, 18},
+		{"R_PPC64_SECTOFF", Const, 20},
+		{"R_PPC64_SECTOFF_DS", Const, 10},
+		{"R_PPC64_SECTOFF_HA", Const, 20},
+		{"R_PPC64_SECTOFF_HI", Const, 20},
+		{"R_PPC64_SECTOFF_LO", Const, 20},
+		{"R_PPC64_SECTOFF_LO_DS", Const, 10},
+		{"R_PPC64_TLS", Const, 5},
+		{"R_PPC64_TLSGD", Const, 5},
+		{"R_PPC64_TLSLD", Const, 5},
+		{"R_PPC64_TOC", Const, 5},
+		{"R_PPC64_TOC16", Const, 5},
+		{"R_PPC64_TOC16_DS", Const, 5},
+		{"R_PPC64_TOC16_HA", Const, 5},
+		{"R_PPC64_TOC16_HI", Const, 5},
+		{"R_PPC64_TOC16_LO", Const, 5},
+		{"R_PPC64_TOC16_LO_DS", Const, 5},
+		{"R_PPC64_TOCSAVE", Const, 10},
+		{"R_PPC64_TPREL16", Const, 5},
+		{"R_PPC64_TPREL16_DS", Const, 5},
+		{"R_PPC64_TPREL16_HA", Const, 5},
+		{"R_PPC64_TPREL16_HI", Const, 5},
+		{"R_PPC64_TPREL16_HIGH", Const, 10},
+		{"R_PPC64_TPREL16_HIGHA", Const, 10},
+		{"R_PPC64_TPREL16_HIGHER", Const, 5},
+		{"R_PPC64_TPREL16_HIGHERA", Const, 5},
+		{"R_PPC64_TPREL16_HIGHEST", Const, 5},
+		{"R_PPC64_TPREL16_HIGHESTA", Const, 5},
+		{"R_PPC64_TPREL16_LO", Const, 5},
+		{"R_PPC64_TPREL16_LO_DS", Const, 5},
+		{"R_PPC64_TPREL34", Const, 20},
+		{"R_PPC64_TPREL64", Const, 5},
+		{"R_PPC64_UADDR16", Const, 20},
+		{"R_PPC64_UADDR32", Const, 20},
+		{"R_PPC64_UADDR64", Const, 20},
+		{"R_PPC_ADDR14", Const, 0},
+		{"R_PPC_ADDR14_BRNTAKEN", Const, 0},
+		{"R_PPC_ADDR14_BRTAKEN", Const, 0},
+		{"R_PPC_ADDR16", Const, 0},
+		{"R_PPC_ADDR16_HA", Const, 0},
+		{"R_PPC_ADDR16_HI", Const, 0},
+		{"R_PPC_ADDR16_LO", Const, 0},
+		{"R_PPC_ADDR24", Const, 0},
+		{"R_PPC_ADDR32", Const, 0},
+		{"R_PPC_COPY", Const, 0},
+		{"R_PPC_DTPMOD32", Const, 0},
+		{"R_PPC_DTPREL16", Const, 0},
+		{"R_PPC_DTPREL16_HA", Const, 0},
+		{"R_PPC_DTPREL16_HI", Const, 0},
+		{"R_PPC_DTPREL16_LO", Const, 0},
+		{"R_PPC_DTPREL32", Const, 0},
+		{"R_PPC_EMB_BIT_FLD", Const, 0},
+		{"R_PPC_EMB_MRKREF", Const, 0},
+		{"R_PPC_EMB_NADDR16", Const, 0},
+		{"R_PPC_EMB_NADDR16_HA", Const, 0},
+		{"R_PPC_EMB_NADDR16_HI", Const, 0},
+		{"R_PPC_EMB_NADDR16_LO", Const, 0},
+		{"R_PPC_EMB_NADDR32", Const, 0},
+		{"R_PPC_EMB_RELSDA", Const, 0},
+		{"R_PPC_EMB_RELSEC16", Const, 0},
+		{"R_PPC_EMB_RELST_HA", Const, 0},
+		{"R_PPC_EMB_RELST_HI", Const, 0},
+		{"R_PPC_EMB_RELST_LO", Const, 0},
+		{"R_PPC_EMB_SDA21", Const, 0},
+		{"R_PPC_EMB_SDA2I16", Const, 0},
+		{"R_PPC_EMB_SDA2REL", Const, 0},
+		{"R_PPC_EMB_SDAI16", Const, 0},
+		{"R_PPC_GLOB_DAT", Const, 0},
+		{"R_PPC_GOT16", Const, 0},
+		{"R_PPC_GOT16_HA", Const, 0},
+		{"R_PPC_GOT16_HI", Const, 0},
+		{"R_PPC_GOT16_LO", Const, 0},
+		{"R_PPC_GOT_TLSGD16", Const, 0},
+		{"R_PPC_GOT_TLSGD16_HA", Const, 0},
+		{"R_PPC_GOT_TLSGD16_HI", Const, 0},
+		{"R_PPC_GOT_TLSGD16_LO", Const, 0},
+		{"R_PPC_GOT_TLSLD16", Const, 0},
+		{"R_PPC_GOT_TLSLD16_HA", Const, 0},
+		{"R_PPC_GOT_TLSLD16_HI", Const, 0},
+		{"R_PPC_GOT_TLSLD16_LO", Const, 0},
+		{"R_PPC_GOT_TPREL16", Const, 0},
+		{"R_PPC_GOT_TPREL16_HA", Const, 0},
+		{"R_PPC_GOT_TPREL16_HI", Const, 0},
+		{"R_PPC_GOT_TPREL16_LO", Const, 0},
+		{"R_PPC_JMP_SLOT", Const, 0},
+		{"R_PPC_LOCAL24PC", Const, 0},
+		{"R_PPC_NONE", Const, 0},
+		{"R_PPC_PLT16_HA", Const, 0},
+		{"R_PPC_PLT16_HI", Const, 0},
+		{"R_PPC_PLT16_LO", Const, 0},
+		{"R_PPC_PLT32", Const, 0},
+		{"R_PPC_PLTREL24", Const, 0},
+		{"R_PPC_PLTREL32", Const, 0},
+		{"R_PPC_REL14", Const, 0},
+		{"R_PPC_REL14_BRNTAKEN", Const, 0},
+		{"R_PPC_REL14_BRTAKEN", Const, 0},
+		{"R_PPC_REL24", Const, 0},
+		{"R_PPC_REL32", Const, 0},
+		{"R_PPC_RELATIVE", Const, 0},
+		{"R_PPC_SDAREL16", Const, 0},
+		{"R_PPC_SECTOFF", Const, 0},
+		{"R_PPC_SECTOFF_HA", Const, 0},
+		{"R_PPC_SECTOFF_HI", Const, 0},
+		{"R_PPC_SECTOFF_LO", Const, 0},
+		{"R_PPC_TLS", Const, 0},
+		{"R_PPC_TPREL16", Const, 0},
+		{"R_PPC_TPREL16_HA", Const, 0},
+		{"R_PPC_TPREL16_HI", Const, 0},
+		{"R_PPC_TPREL16_LO", Const, 0},
+		{"R_PPC_TPREL32", Const, 0},
+		{"R_PPC_UADDR16", Const, 0},
+		{"R_PPC_UADDR32", Const, 0},
+		{"R_RISCV", Type, 11},
+		{"R_RISCV_32", Const, 11},
+		{"R_RISCV_32_PCREL", Const, 12},
+		{"R_RISCV_64", Const, 11},
+		{"R_RISCV_ADD16", Const, 11},
+		{"R_RISCV_ADD32", Const, 11},
+		{"R_RISCV_ADD64", Const, 11},
+		{"R_RISCV_ADD8", Const, 11},
+		{"R_RISCV_ALIGN", Const, 11},
+		{"R_RISCV_BRANCH", Const, 11},
+		{"R_RISCV_CALL", Const, 11},
+		{"R_RISCV_CALL_PLT", Const, 11},
+		{"R_RISCV_COPY", Const, 11},
+		{"R_RISCV_GNU_VTENTRY", Const, 11},
+		{"R_RISCV_GNU_VTINHERIT", Const, 11},
+		{"R_RISCV_GOT_HI20", Const, 11},
+		{"R_RISCV_GPREL_I", Const, 11},
+		{"R_RISCV_GPREL_S", Const, 11},
+		{"R_RISCV_HI20", Const, 11},
+		{"R_RISCV_JAL", Const, 11},
+		{"R_RISCV_JUMP_SLOT", Const, 11},
+		{"R_RISCV_LO12_I", Const, 11},
+		{"R_RISCV_LO12_S", Const, 11},
+		{"R_RISCV_NONE", Const, 11},
+		{"R_RISCV_PCREL_HI20", Const, 11},
+		{"R_RISCV_PCREL_LO12_I", Const, 11},
+		{"R_RISCV_PCREL_LO12_S", Const, 11},
+		{"R_RISCV_RELATIVE", Const, 11},
+		{"R_RISCV_RELAX", Const, 11},
+		{"R_RISCV_RVC_BRANCH", Const, 11},
+		{"R_RISCV_RVC_JUMP", Const, 11},
+		{"R_RISCV_RVC_LUI", Const, 11},
+		{"R_RISCV_SET16", Const, 11},
+		{"R_RISCV_SET32", Const, 11},
+		{"R_RISCV_SET6", Const, 11},
+		{"R_RISCV_SET8", Const, 11},
+		{"R_RISCV_SUB16", Const, 11},
+		{"R_RISCV_SUB32", Const, 11},
+		{"R_RISCV_SUB6", Const, 11},
+		{"R_RISCV_SUB64", Const, 11},
+		{"R_RISCV_SUB8", Const, 11},
+		{"R_RISCV_TLS_DTPMOD32", Const, 11},
+		{"R_RISCV_TLS_DTPMOD64", Const, 11},
+		{"R_RISCV_TLS_DTPREL32", Const, 11},
+		{"R_RISCV_TLS_DTPREL64", Const, 11},
+		{"R_RISCV_TLS_GD_HI20", Const, 11},
+		{"R_RISCV_TLS_GOT_HI20", Const, 11},
+		{"R_RISCV_TLS_TPREL32", Const, 11},
+		{"R_RISCV_TLS_TPREL64", Const, 11},
+		{"R_RISCV_TPREL_ADD", Const, 11},
+		{"R_RISCV_TPREL_HI20", Const, 11},
+		{"R_RISCV_TPREL_I", Const, 11},
+		{"R_RISCV_TPREL_LO12_I", Const, 11},
+		{"R_RISCV_TPREL_LO12_S", Const, 11},
+		{"R_RISCV_TPREL_S", Const, 11},
+		{"R_SPARC", Type, 0},
+		{"R_SPARC_10", Const, 0},
+		{"R_SPARC_11", Const, 0},
+		{"R_SPARC_13", Const, 0},
+		{"R_SPARC_16", Const, 0},
+		{"R_SPARC_22", Const, 0},
+		{"R_SPARC_32", Const, 0},
+		{"R_SPARC_5", Const, 0},
+		{"R_SPARC_6", Const, 0},
+		{"R_SPARC_64", Const, 0},
+		{"R_SPARC_7", Const, 0},
+		{"R_SPARC_8", Const, 0},
+		{"R_SPARC_COPY", Const, 0},
+		{"R_SPARC_DISP16", Const, 0},
+		{"R_SPARC_DISP32", Const, 0},
+		{"R_SPARC_DISP64", Const, 0},
+		{"R_SPARC_DISP8", Const, 0},
+		{"R_SPARC_GLOB_DAT", Const, 0},
+		{"R_SPARC_GLOB_JMP", Const, 0},
+		{"R_SPARC_GOT10", Const, 0},
+		{"R_SPARC_GOT13", Const, 0},
+		{"R_SPARC_GOT22", Const, 0},
+		{"R_SPARC_H44", Const, 0},
+		{"R_SPARC_HH22", Const, 0},
+		{"R_SPARC_HI22", Const, 0},
+		{"R_SPARC_HIPLT22", Const, 0},
+		{"R_SPARC_HIX22", Const, 0},
+		{"R_SPARC_HM10", Const, 0},
+		{"R_SPARC_JMP_SLOT", Const, 0},
+		{"R_SPARC_L44", Const, 0},
+		{"R_SPARC_LM22", Const, 0},
+		{"R_SPARC_LO10", Const, 0},
+		{"R_SPARC_LOPLT10", Const, 0},
+		{"R_SPARC_LOX10", Const, 0},
+		{"R_SPARC_M44", Const, 0},
+		{"R_SPARC_NONE", Const, 0},
+		{"R_SPARC_OLO10", Const, 0},
+		{"R_SPARC_PC10", Const, 0},
+		{"R_SPARC_PC22", Const, 0},
+		{"R_SPARC_PCPLT10", Const, 0},
+		{"R_SPARC_PCPLT22", Const, 0},
+		{"R_SPARC_PCPLT32", Const, 0},
+		{"R_SPARC_PC_HH22", Const, 0},
+		{"R_SPARC_PC_HM10", Const, 0},
+		{"R_SPARC_PC_LM22", Const, 0},
+		{"R_SPARC_PLT32", Const, 0},
+		{"R_SPARC_PLT64", Const, 0},
+		{"R_SPARC_REGISTER", Const, 0},
+		{"R_SPARC_RELATIVE", Const, 0},
+		{"R_SPARC_UA16", Const, 0},
+		{"R_SPARC_UA32", Const, 0},
+		{"R_SPARC_UA64", Const, 0},
+		{"R_SPARC_WDISP16", Const, 0},
+		{"R_SPARC_WDISP19", Const, 0},
+		{"R_SPARC_WDISP22", Const, 0},
+		{"R_SPARC_WDISP30", Const, 0},
+		{"R_SPARC_WPLT30", Const, 0},
+		{"R_SYM32", Func, 0},
+		{"R_SYM64", Func, 0},
+		{"R_TYPE32", Func, 0},
+		{"R_TYPE64", Func, 0},
+		{"R_X86_64", Type, 0},
+		{"R_X86_64_16", Const, 0},
+		{"R_X86_64_32", Const, 0},
+		{"R_X86_64_32S", Const, 0},
+		{"R_X86_64_64", Const, 0},
+		{"R_X86_64_8", Const, 0},
+		{"R_X86_64_COPY", Const, 0},
+		{"R_X86_64_DTPMOD64", Const, 0},
+		{"R_X86_64_DTPOFF32", Const, 0},
+		{"R_X86_64_DTPOFF64", Const, 0},
+		{"R_X86_64_GLOB_DAT", Const, 0},
+		{"R_X86_64_GOT32", Const, 0},
+		{"R_X86_64_GOT64", Const, 10},
+		{"R_X86_64_GOTOFF64", Const, 10},
+		{"R_X86_64_GOTPC32", Const, 10},
+		{"R_X86_64_GOTPC32_TLSDESC", Const, 10},
+		{"R_X86_64_GOTPC64", Const, 10},
+		{"R_X86_64_GOTPCREL", Const, 0},
+		{"R_X86_64_GOTPCREL64", Const, 10},
+		{"R_X86_64_GOTPCRELX", Const, 10},
+		{"R_X86_64_GOTPLT64", Const, 10},
+		{"R_X86_64_GOTTPOFF", Const, 0},
+		{"R_X86_64_IRELATIVE", Const, 10},
+		{"R_X86_64_JMP_SLOT", Const, 0},
+		{"R_X86_64_NONE", Const, 0},
+		{"R_X86_64_PC16", Const, 0},
+		{"R_X86_64_PC32", Const, 0},
+		{"R_X86_64_PC32_BND", Const, 10},
+		{"R_X86_64_PC64", Const, 10},
+		{"R_X86_64_PC8", Const, 0},
+		{"R_X86_64_PLT32", Const, 0},
+		{"R_X86_64_PLT32_BND", Const, 10},
+		{"R_X86_64_PLTOFF64", Const, 10},
+		{"R_X86_64_RELATIVE", Const, 0},
+		{"R_X86_64_RELATIVE64", Const, 10},
+		{"R_X86_64_REX_GOTPCRELX", Const, 10},
+		{"R_X86_64_SIZE32", Const, 10},
+		{"R_X86_64_SIZE64", Const, 10},
+		{"R_X86_64_TLSDESC", Const, 10},
+		{"R_X86_64_TLSDESC_CALL", Const, 10},
+		{"R_X86_64_TLSGD", Const, 0},
+		{"R_X86_64_TLSLD", Const, 0},
+		{"R_X86_64_TPOFF32", Const, 0},
+		{"R_X86_64_TPOFF64", Const, 0},
+		{"Rel32", Type, 0},
+		{"Rel32.Info", Field, 0},
+		{"Rel32.Off", Field, 0},
+		{"Rel64", Type, 0},
+		{"Rel64.Info", Field, 0},
+		{"Rel64.Off", Field, 0},
+		{"Rela32", Type, 0},
+		{"Rela32.Addend", Field, 0},
+		{"Rela32.Info", Field, 0},
+		{"Rela32.Off", Field, 0},
+		{"Rela64", Type, 0},
+		{"Rela64.Addend", Field, 0},
+		{"Rela64.Info", Field, 0},
+		{"Rela64.Off", Field, 0},
+		{"SHF_ALLOC", Const, 0},
+		{"SHF_COMPRESSED", Const, 6},
+		{"SHF_EXECINSTR", Const, 0},
+		{"SHF_GROUP", Const, 0},
+		{"SHF_INFO_LINK", Const, 0},
+		{"SHF_LINK_ORDER", Const, 0},
+		{"SHF_MASKOS", Const, 0},
+		{"SHF_MASKPROC", Const, 0},
+		{"SHF_MERGE", Const, 0},
+		{"SHF_OS_NONCONFORMING", Const, 0},
+		{"SHF_STRINGS", Const, 0},
+		{"SHF_TLS", Const, 0},
+		{"SHF_WRITE", Const, 0},
+		{"SHN_ABS", Const, 0},
+		{"SHN_COMMON", Const, 0},
+		{"SHN_HIOS", Const, 0},
+		{"SHN_HIPROC", Const, 0},
+		{"SHN_HIRESERVE", Const, 0},
+		{"SHN_LOOS", Const, 0},
+		{"SHN_LOPROC", Const, 0},
+		{"SHN_LORESERVE", Const, 0},
+		{"SHN_UNDEF", Const, 0},
+		{"SHN_XINDEX", Const, 0},
+		{"SHT_DYNAMIC", Const, 0},
+		{"SHT_DYNSYM", Const, 0},
+		{"SHT_FINI_ARRAY", Const, 0},
+		{"SHT_GNU_ATTRIBUTES", Const, 0},
+		{"SHT_GNU_HASH", Const, 0},
+		{"SHT_GNU_LIBLIST", Const, 0},
+		{"SHT_GNU_VERDEF", Const, 0},
+		{"SHT_GNU_VERNEED", Const, 0},
+		{"SHT_GNU_VERSYM", Const, 0},
+		{"SHT_GROUP", Const, 0},
+		{"SHT_HASH", Const, 0},
+		{"SHT_HIOS", Const, 0},
+		{"SHT_HIPROC", Const, 0},
+		{"SHT_HIUSER", Const, 0},
+		{"SHT_INIT_ARRAY", Const, 0},
+		{"SHT_LOOS", Const, 0},
+		{"SHT_LOPROC", Const, 0},
+		{"SHT_LOUSER", Const, 0},
+		{"SHT_MIPS_ABIFLAGS", Const, 17},
+		{"SHT_NOBITS", Const, 0},
+		{"SHT_NOTE", Const, 0},
+		{"SHT_NULL", Const, 0},
+		{"SHT_PREINIT_ARRAY", Const, 0},
+		{"SHT_PROGBITS", Const, 0},
+		{"SHT_REL", Const, 0},
+		{"SHT_RELA", Const, 0},
+		{"SHT_SHLIB", Const, 0},
+		{"SHT_STRTAB", Const, 0},
+		{"SHT_SYMTAB", Const, 0},
+		{"SHT_SYMTAB_SHNDX", Const, 0},
+		{"STB_GLOBAL", Const, 0},
+		{"STB_HIOS", Const, 0},
+		{"STB_HIPROC", Const, 0},
+		{"STB_LOCAL", Const, 0},
+		{"STB_LOOS", Const, 0},
+		{"STB_LOPROC", Const, 0},
+		{"STB_WEAK", Const, 0},
+		{"STT_COMMON", Const, 0},
+		{"STT_FILE", Const, 0},
+		{"STT_FUNC", Const, 0},
+		{"STT_HIOS", Const, 0},
+		{"STT_HIPROC", Const, 0},
+		{"STT_LOOS", Const, 0},
+		{"STT_LOPROC", Const, 0},
+		{"STT_NOTYPE", Const, 0},
+		{"STT_OBJECT", Const, 0},
+		{"STT_SECTION", Const, 0},
+		{"STT_TLS", Const, 0},
+		{"STV_DEFAULT", Const, 0},
+		{"STV_HIDDEN", Const, 0},
+		{"STV_INTERNAL", Const, 0},
+		{"STV_PROTECTED", Const, 0},
+		{"ST_BIND", Func, 0},
+		{"ST_INFO", Func, 0},
+		{"ST_TYPE", Func, 0},
+		{"ST_VISIBILITY", Func, 0},
+		{"Section", Type, 0},
+		{"Section.ReaderAt", Field, 0},
+		{"Section.SectionHeader", Field, 0},
+		{"Section32", Type, 0},
+		{"Section32.Addr", Field, 0},
+		{"Section32.Addralign", Field, 0},
+		{"Section32.Entsize", Field, 0},
+		{"Section32.Flags", Field, 0},
+		{"Section32.Info", Field, 0},
+		{"Section32.Link", Field, 0},
+		{"Section32.Name", Field, 0},
+		{"Section32.Off", Field, 0},
+		{"Section32.Size", Field, 0},
+		{"Section32.Type", Field, 0},
+		{"Section64", Type, 0},
+		{"Section64.Addr", Field, 0},
+		{"Section64.Addralign", Field, 0},
+		{"Section64.Entsize", Field, 0},
+		{"Section64.Flags", Field, 0},
+		{"Section64.Info", Field, 0},
+		{"Section64.Link", Field, 0},
+		{"Section64.Name", Field, 0},
+		{"Section64.Off", Field, 0},
+		{"Section64.Size", Field, 0},
+		{"Section64.Type", Field, 0},
+		{"SectionFlag", Type, 0},
+		{"SectionHeader", Type, 0},
+		{"SectionHeader.Addr", Field, 0},
+		{"SectionHeader.Addralign", Field, 0},
+		{"SectionHeader.Entsize", Field, 0},
+		{"SectionHeader.FileSize", Field, 6},
+		{"SectionHeader.Flags", Field, 0},
+		{"SectionHeader.Info", Field, 0},
+		{"SectionHeader.Link", Field, 0},
+		{"SectionHeader.Name", Field, 0},
+		{"SectionHeader.Offset", Field, 0},
+		{"SectionHeader.Size", Field, 0},
+		{"SectionHeader.Type", Field, 0},
+		{"SectionIndex", Type, 0},
+		{"SectionType", Type, 0},
+		{"Sym32", Type, 0},
+		{"Sym32.Info", Field, 0},
+		{"Sym32.Name", Field, 0},
+		{"Sym32.Other", Field, 0},
+		{"Sym32.Shndx", Field, 0},
+		{"Sym32.Size", Field, 0},
+		{"Sym32.Value", Field, 0},
+		{"Sym32Size", Const, 0},
+		{"Sym64", Type, 0},
+		{"Sym64.Info", Field, 0},
+		{"Sym64.Name", Field, 0},
+		{"Sym64.Other", Field, 0},
+		{"Sym64.Shndx", Field, 0},
+		{"Sym64.Size", Field, 0},
+		{"Sym64.Value", Field, 0},
+		{"Sym64Size", Const, 0},
+		{"SymBind", Type, 0},
+		{"SymType", Type, 0},
+		{"SymVis", Type, 0},
+		{"Symbol", Type, 0},
+		{"Symbol.Info", Field, 0},
+		{"Symbol.Library", Field, 13},
+		{"Symbol.Name", Field, 0},
+		{"Symbol.Other", Field, 0},
+		{"Symbol.Section", Field, 0},
+		{"Symbol.Size", Field, 0},
+		{"Symbol.Value", Field, 0},
+		{"Symbol.Version", Field, 13},
+		{"Type", Type, 0},
+		{"Version", Type, 0},
+	},
+	"debug/gosym": {
+		{"(*DecodingError).Error", Method, 0},
+		{"(*LineTable).LineToPC", Method, 0},
+		{"(*LineTable).PCToLine", Method, 0},
+		{"(*Sym).BaseName", Method, 0},
+		{"(*Sym).PackageName", Method, 0},
+		{"(*Sym).ReceiverName", Method, 0},
+		{"(*Sym).Static", Method, 0},
+		{"(*Table).LineToPC", Method, 0},
+		{"(*Table).LookupFunc", Method, 0},
+		{"(*Table).LookupSym", Method, 0},
+		{"(*Table).PCToFunc", Method, 0},
+		{"(*Table).PCToLine", Method, 0},
+		{"(*Table).SymByAddr", Method, 0},
+		{"(*UnknownLineError).Error", Method, 0},
+		{"(Func).BaseName", Method, 0},
+		{"(Func).PackageName", Method, 0},
+		{"(Func).ReceiverName", Method, 0},
+		{"(Func).Static", Method, 0},
+		{"(UnknownFileError).Error", Method, 0},
+		{"DecodingError", Type, 0},
+		{"Func", Type, 0},
+		{"Func.End", Field, 0},
+		{"Func.Entry", Field, 0},
+		{"Func.FrameSize", Field, 0},
+		{"Func.LineTable", Field, 0},
+		{"Func.Locals", Field, 0},
+		{"Func.Obj", Field, 0},
+		{"Func.Params", Field, 0},
+		{"Func.Sym", Field, 0},
+		{"LineTable", Type, 0},
+		{"LineTable.Data", Field, 0},
+		{"LineTable.Line", Field, 0},
+		{"LineTable.PC", Field, 0},
+		{"NewLineTable", Func, 0},
+		{"NewTable", Func, 0},
+		{"Obj", Type, 0},
+		{"Obj.Funcs", Field, 0},
+		{"Obj.Paths", Field, 0},
+		{"Sym", Type, 0},
+		{"Sym.Func", Field, 0},
+		{"Sym.GoType", Field, 0},
+		{"Sym.Name", Field, 0},
+		{"Sym.Type", Field, 0},
+		{"Sym.Value", Field, 0},
+		{"Table", Type, 0},
+		{"Table.Files", Field, 0},
+		{"Table.Funcs", Field, 0},
+		{"Table.Objs", Field, 0},
+		{"Table.Syms", Field, 0},
+		{"UnknownFileError", Type, 0},
+		{"UnknownLineError", Type, 0},
+		{"UnknownLineError.File", Field, 0},
+		{"UnknownLineError.Line", Field, 0},
+	},
+	"debug/macho": {
+		{"(*FatFile).Close", Method, 3},
+		{"(*File).Close", Method, 0},
+		{"(*File).DWARF", Method, 0},
+		{"(*File).ImportedLibraries", Method, 0},
+		{"(*File).ImportedSymbols", Method, 0},
+		{"(*File).Section", Method, 0},
+		{"(*File).Segment", Method, 0},
+		{"(*FormatError).Error", Method, 0},
+		{"(*Section).Data", Method, 0},
+		{"(*Section).Open", Method, 0},
+		{"(*Segment).Data", Method, 0},
+		{"(*Segment).Open", Method, 0},
+		{"(Cpu).GoString", Method, 0},
+		{"(Cpu).String", Method, 0},
+		{"(Dylib).Raw", Method, 0},
+		{"(Dysymtab).Raw", Method, 0},
+		{"(FatArch).Close", Method, 3},
+		{"(FatArch).DWARF", Method, 3},
+		{"(FatArch).ImportedLibraries", Method, 3},
+		{"(FatArch).ImportedSymbols", Method, 3},
+		{"(FatArch).Section", Method, 3},
+		{"(FatArch).Segment", Method, 3},
+		{"(LoadBytes).Raw", Method, 0},
+		{"(LoadCmd).GoString", Method, 0},
+		{"(LoadCmd).String", Method, 0},
+		{"(RelocTypeARM).GoString", Method, 10},
+		{"(RelocTypeARM).String", Method, 10},
+		{"(RelocTypeARM64).GoString", Method, 10},
+		{"(RelocTypeARM64).String", Method, 10},
+		{"(RelocTypeGeneric).GoString", Method, 10},
+		{"(RelocTypeGeneric).String", Method, 10},
+		{"(RelocTypeX86_64).GoString", Method, 10},
+		{"(RelocTypeX86_64).String", Method, 10},
+		{"(Rpath).Raw", Method, 10},
+		{"(Section).ReadAt", Method, 0},
+		{"(Segment).Raw", Method, 0},
+		{"(Segment).ReadAt", Method, 0},
+		{"(Symtab).Raw", Method, 0},
+		{"(Type).GoString", Method, 10},
+		{"(Type).String", Method, 10},
+		{"ARM64_RELOC_ADDEND", Const, 10},
+		{"ARM64_RELOC_BRANCH26", Const, 10},
+		{"ARM64_RELOC_GOT_LOAD_PAGE21", Const, 10},
+		{"ARM64_RELOC_GOT_LOAD_PAGEOFF12", Const, 10},
+		{"ARM64_RELOC_PAGE21", Const, 10},
+		{"ARM64_RELOC_PAGEOFF12", Const, 10},
+		{"ARM64_RELOC_POINTER_TO_GOT", Const, 10},
+		{"ARM64_RELOC_SUBTRACTOR", Const, 10},
+		{"ARM64_RELOC_TLVP_LOAD_PAGE21", Const, 10},
+		{"ARM64_RELOC_TLVP_LOAD_PAGEOFF12", Const, 10},
+		{"ARM64_RELOC_UNSIGNED", Const, 10},
+		{"ARM_RELOC_BR24", Const, 10},
+		{"ARM_RELOC_HALF", Const, 10},
+		{"ARM_RELOC_HALF_SECTDIFF", Const, 10},
+		{"ARM_RELOC_LOCAL_SECTDIFF", Const, 10},
+		{"ARM_RELOC_PAIR", Const, 10},
+		{"ARM_RELOC_PB_LA_PTR", Const, 10},
+		{"ARM_RELOC_SECTDIFF", Const, 10},
+		{"ARM_RELOC_VANILLA", Const, 10},
+		{"ARM_THUMB_32BIT_BRANCH", Const, 10},
+		{"ARM_THUMB_RELOC_BR22", Const, 10},
+		{"Cpu", Type, 0},
+		{"Cpu386", Const, 0},
+		{"CpuAmd64", Const, 0},
+		{"CpuArm", Const, 3},
+		{"CpuArm64", Const, 11},
+		{"CpuPpc", Const, 3},
+		{"CpuPpc64", Const, 3},
+		{"Dylib", Type, 0},
+		{"Dylib.CompatVersion", Field, 0},
+		{"Dylib.CurrentVersion", Field, 0},
+		{"Dylib.LoadBytes", Field, 0},
+		{"Dylib.Name", Field, 0},
+		{"Dylib.Time", Field, 0},
+		{"DylibCmd", Type, 0},
+		{"DylibCmd.Cmd", Field, 0},
+		{"DylibCmd.CompatVersion", Field, 0},
+		{"DylibCmd.CurrentVersion", Field, 0},
+		{"DylibCmd.Len", Field, 0},
+		{"DylibCmd.Name", Field, 0},
+		{"DylibCmd.Time", Field, 0},
+		{"Dysymtab", Type, 0},
+		{"Dysymtab.DysymtabCmd", Field, 0},
+		{"Dysymtab.IndirectSyms", Field, 0},
+		{"Dysymtab.LoadBytes", Field, 0},
+		{"DysymtabCmd", Type, 0},
+		{"DysymtabCmd.Cmd", Field, 0},
+		{"DysymtabCmd.Extrefsymoff", Field, 0},
+		{"DysymtabCmd.Extreloff", Field, 0},
+		{"DysymtabCmd.Iextdefsym", Field, 0},
+		{"DysymtabCmd.Ilocalsym", Field, 0},
+		{"DysymtabCmd.Indirectsymoff", Field, 0},
+		{"DysymtabCmd.Iundefsym", Field, 0},
+		{"DysymtabCmd.Len", Field, 0},
+		{"DysymtabCmd.Locreloff", Field, 0},
+		{"DysymtabCmd.Modtaboff", Field, 0},
+		{"DysymtabCmd.Nextdefsym", Field, 0},
+		{"DysymtabCmd.Nextrefsyms", Field, 0},
+		{"DysymtabCmd.Nextrel", Field, 0},
+		{"DysymtabCmd.Nindirectsyms", Field, 0},
+		{"DysymtabCmd.Nlocalsym", Field, 0},
+		{"DysymtabCmd.Nlocrel", Field, 0},
+		{"DysymtabCmd.Nmodtab", Field, 0},
+		{"DysymtabCmd.Ntoc", Field, 0},
+		{"DysymtabCmd.Nundefsym", Field, 0},
+		{"DysymtabCmd.Tocoffset", Field, 0},
+		{"ErrNotFat", Var, 3},
+		{"FatArch", Type, 3},
+		{"FatArch.FatArchHeader", Field, 3},
+		{"FatArch.File", Field, 3},
+		{"FatArchHeader", Type, 3},
+		{"FatArchHeader.Align", Field, 3},
+		{"FatArchHeader.Cpu", Field, 3},
+		{"FatArchHeader.Offset", Field, 3},
+		{"FatArchHeader.Size", Field, 3},
+		{"FatArchHeader.SubCpu", Field, 3},
+		{"FatFile", Type, 3},
+		{"FatFile.Arches", Field, 3},
+		{"FatFile.Magic", Field, 3},
+		{"File", Type, 0},
+		{"File.ByteOrder", Field, 0},
+		{"File.Dysymtab", Field, 0},
+		{"File.FileHeader", Field, 0},
+		{"File.Loads", Field, 0},
+		{"File.Sections", Field, 0},
+		{"File.Symtab", Field, 0},
+		{"FileHeader", Type, 0},
+		{"FileHeader.Cmdsz", Field, 0},
+		{"FileHeader.Cpu", Field, 0},
+		{"FileHeader.Flags", Field, 0},
+		{"FileHeader.Magic", Field, 0},
+		{"FileHeader.Ncmd", Field, 0},
+		{"FileHeader.SubCpu", Field, 0},
+		{"FileHeader.Type", Field, 0},
+		{"FlagAllModsBound", Const, 10},
+		{"FlagAllowStackExecution", Const, 10},
+		{"FlagAppExtensionSafe", Const, 10},
+		{"FlagBindAtLoad", Const, 10},
+		{"FlagBindsToWeak", Const, 10},
+		{"FlagCanonical", Const, 10},
+		{"FlagDeadStrippableDylib", Const, 10},
+		{"FlagDyldLink", Const, 10},
+		{"FlagForceFlat", Const, 10},
+		{"FlagHasTLVDescriptors", Const, 10},
+		{"FlagIncrLink", Const, 10},
+		{"FlagLazyInit", Const, 10},
+		{"FlagNoFixPrebinding", Const, 10},
+		{"FlagNoHeapExecution", Const, 10},
+		{"FlagNoMultiDefs", Const, 10},
+		{"FlagNoReexportedDylibs", Const, 10},
+		{"FlagNoUndefs", Const, 10},
+		{"FlagPIE", Const, 10},
+		{"FlagPrebindable", Const, 10},
+		{"FlagPrebound", Const, 10},
+		{"FlagRootSafe", Const, 10},
+		{"FlagSetuidSafe", Const, 10},
+		{"FlagSplitSegs", Const, 10},
+		{"FlagSubsectionsViaSymbols", Const, 10},
+		{"FlagTwoLevel", Const, 10},
+		{"FlagWeakDefines", Const, 10},
+		{"FormatError", Type, 0},
+		{"GENERIC_RELOC_LOCAL_SECTDIFF", Const, 10},
+		{"GENERIC_RELOC_PAIR", Const, 10},
+		{"GENERIC_RELOC_PB_LA_PTR", Const, 10},
+		{"GENERIC_RELOC_SECTDIFF", Const, 10},
+		{"GENERIC_RELOC_TLV", Const, 10},
+		{"GENERIC_RELOC_VANILLA", Const, 10},
+		{"Load", Type, 0},
+		{"LoadBytes", Type, 0},
+		{"LoadCmd", Type, 0},
+		{"LoadCmdDylib", Const, 0},
+		{"LoadCmdDylinker", Const, 0},
+		{"LoadCmdDysymtab", Const, 0},
+		{"LoadCmdRpath", Const, 10},
+		{"LoadCmdSegment", Const, 0},
+		{"LoadCmdSegment64", Const, 0},
+		{"LoadCmdSymtab", Const, 0},
+		{"LoadCmdThread", Const, 0},
+		{"LoadCmdUnixThread", Const, 0},
+		{"Magic32", Const, 0},
+		{"Magic64", Const, 0},
+		{"MagicFat", Const, 3},
+		{"NewFatFile", Func, 3},
+		{"NewFile", Func, 0},
+		{"Nlist32", Type, 0},
+		{"Nlist32.Desc", Field, 0},
+		{"Nlist32.Name", Field, 0},
+		{"Nlist32.Sect", Field, 0},
+		{"Nlist32.Type", Field, 0},
+		{"Nlist32.Value", Field, 0},
+		{"Nlist64", Type, 0},
+		{"Nlist64.Desc", Field, 0},
+		{"Nlist64.Name", Field, 0},
+		{"Nlist64.Sect", Field, 0},
+		{"Nlist64.Type", Field, 0},
+		{"Nlist64.Value", Field, 0},
+		{"Open", Func, 0},
+		{"OpenFat", Func, 3},
+		{"Regs386", Type, 0},
+		{"Regs386.AX", Field, 0},
+		{"Regs386.BP", Field, 0},
+		{"Regs386.BX", Field, 0},
+		{"Regs386.CS", Field, 0},
+		{"Regs386.CX", Field, 0},
+		{"Regs386.DI", Field, 0},
+		{"Regs386.DS", Field, 0},
+		{"Regs386.DX", Field, 0},
+		{"Regs386.ES", Field, 0},
+		{"Regs386.FLAGS", Field, 0},
+		{"Regs386.FS", Field, 0},
+		{"Regs386.GS", Field, 0},
+		{"Regs386.IP", Field, 0},
+		{"Regs386.SI", Field, 0},
+		{"Regs386.SP", Field, 0},
+		{"Regs386.SS", Field, 0},
+		{"RegsAMD64", Type, 0},
+		{"RegsAMD64.AX", Field, 0},
+		{"RegsAMD64.BP", Field, 0},
+		{"RegsAMD64.BX", Field, 0},
+		{"RegsAMD64.CS", Field, 0},
+		{"RegsAMD64.CX", Field, 0},
+		{"RegsAMD64.DI", Field, 0},
+		{"RegsAMD64.DX", Field, 0},
+		{"RegsAMD64.FLAGS", Field, 0},
+		{"RegsAMD64.FS", Field, 0},
+		{"RegsAMD64.GS", Field, 0},
+		{"RegsAMD64.IP", Field, 0},
+		{"RegsAMD64.R10", Field, 0},
+		{"RegsAMD64.R11", Field, 0},
+		{"RegsAMD64.R12", Field, 0},
+		{"RegsAMD64.R13", Field, 0},
+		{"RegsAMD64.R14", Field, 0},
+		{"RegsAMD64.R15", Field, 0},
+		{"RegsAMD64.R8", Field, 0},
+		{"RegsAMD64.R9", Field, 0},
+		{"RegsAMD64.SI", Field, 0},
+		{"RegsAMD64.SP", Field, 0},
+		{"Reloc", Type, 10},
+		{"Reloc.Addr", Field, 10},
+		{"Reloc.Extern", Field, 10},
+		{"Reloc.Len", Field, 10},
+		{"Reloc.Pcrel", Field, 10},
+		{"Reloc.Scattered", Field, 10},
+		{"Reloc.Type", Field, 10},
+		{"Reloc.Value", Field, 10},
+		{"RelocTypeARM", Type, 10},
+		{"RelocTypeARM64", Type, 10},
+		{"RelocTypeGeneric", Type, 10},
+		{"RelocTypeX86_64", Type, 10},
+		{"Rpath", Type, 10},
+		{"Rpath.LoadBytes", Field, 10},
+		{"Rpath.Path", Field, 10},
+		{"RpathCmd", Type, 10},
+		{"RpathCmd.Cmd", Field, 10},
+		{"RpathCmd.Len", Field, 10},
+		{"RpathCmd.Path", Field, 10},
+		{"Section", Type, 0},
+		{"Section.ReaderAt", Field, 0},
+		{"Section.Relocs", Field, 10},
+		{"Section.SectionHeader", Field, 0},
+		{"Section32", Type, 0},
+		{"Section32.Addr", Field, 0},
+		{"Section32.Align", Field, 0},
+		{"Section32.Flags", Field, 0},
+		{"Section32.Name", Field, 0},
+		{"Section32.Nreloc", Field, 0},
+		{"Section32.Offset", Field, 0},
+		{"Section32.Reloff", Field, 0},
+		{"Section32.Reserve1", Field, 0},
+		{"Section32.Reserve2", Field, 0},
+		{"Section32.Seg", Field, 0},
+		{"Section32.Size", Field, 0},
+		{"Section64", Type, 0},
+		{"Section64.Addr", Field, 0},
+		{"Section64.Align", Field, 0},
+		{"Section64.Flags", Field, 0},
+		{"Section64.Name", Field, 0},
+		{"Section64.Nreloc", Field, 0},
+		{"Section64.Offset", Field, 0},
+		{"Section64.Reloff", Field, 0},
+		{"Section64.Reserve1", Field, 0},
+		{"Section64.Reserve2", Field, 0},
+		{"Section64.Reserve3", Field, 0},
+		{"Section64.Seg", Field, 0},
+		{"Section64.Size", Field, 0},
+		{"SectionHeader", Type, 0},
+		{"SectionHeader.Addr", Field, 0},
+		{"SectionHeader.Align", Field, 0},
+		{"SectionHeader.Flags", Field, 0},
+		{"SectionHeader.Name", Field, 0},
+		{"SectionHeader.Nreloc", Field, 0},
+		{"SectionHeader.Offset", Field, 0},
+		{"SectionHeader.Reloff", Field, 0},
+		{"SectionHeader.Seg", Field, 0},
+		{"SectionHeader.Size", Field, 0},
+		{"Segment", Type, 0},
+		{"Segment.LoadBytes", Field, 0},
+		{"Segment.ReaderAt", Field, 0},
+		{"Segment.SegmentHeader", Field, 0},
+		{"Segment32", Type, 0},
+		{"Segment32.Addr", Field, 0},
+		{"Segment32.Cmd", Field, 0},
+		{"Segment32.Filesz", Field, 0},
+		{"Segment32.Flag", Field, 0},
+		{"Segment32.Len", Field, 0},
+		{"Segment32.Maxprot", Field, 0},
+		{"Segment32.Memsz", Field, 0},
+		{"Segment32.Name", Field, 0},
+		{"Segment32.Nsect", Field, 0},
+		{"Segment32.Offset", Field, 0},
+		{"Segment32.Prot", Field, 0},
+		{"Segment64", Type, 0},
+		{"Segment64.Addr", Field, 0},
+		{"Segment64.Cmd", Field, 0},
+		{"Segment64.Filesz", Field, 0},
+		{"Segment64.Flag", Field, 0},
+		{"Segment64.Len", Field, 0},
+		{"Segment64.Maxprot", Field, 0},
+		{"Segment64.Memsz", Field, 0},
+		{"Segment64.Name", Field, 0},
+		{"Segment64.Nsect", Field, 0},
+		{"Segment64.Offset", Field, 0},
+		{"Segment64.Prot", Field, 0},
+		{"SegmentHeader", Type, 0},
+		{"SegmentHeader.Addr", Field, 0},
+		{"SegmentHeader.Cmd", Field, 0},
+		{"SegmentHeader.Filesz", Field, 0},
+		{"SegmentHeader.Flag", Field, 0},
+		{"SegmentHeader.Len", Field, 0},
+		{"SegmentHeader.Maxprot", Field, 0},
+		{"SegmentHeader.Memsz", Field, 0},
+		{"SegmentHeader.Name", Field, 0},
+		{"SegmentHeader.Nsect", Field, 0},
+		{"SegmentHeader.Offset", Field, 0},
+		{"SegmentHeader.Prot", Field, 0},
+		{"Symbol", Type, 0},
+		{"Symbol.Desc", Field, 0},
+		{"Symbol.Name", Field, 0},
+		{"Symbol.Sect", Field, 0},
+		{"Symbol.Type", Field, 0},
+		{"Symbol.Value", Field, 0},
+		{"Symtab", Type, 0},
+		{"Symtab.LoadBytes", Field, 0},
+		{"Symtab.Syms", Field, 0},
+		{"Symtab.SymtabCmd", Field, 0},
+		{"SymtabCmd", Type, 0},
+		{"SymtabCmd.Cmd", Field, 0},
+		{"SymtabCmd.Len", Field, 0},
+		{"SymtabCmd.Nsyms", Field, 0},
+		{"SymtabCmd.Stroff", Field, 0},
+		{"SymtabCmd.Strsize", Field, 0},
+		{"SymtabCmd.Symoff", Field, 0},
+		{"Thread", Type, 0},
+		{"Thread.Cmd", Field, 0},
+		{"Thread.Data", Field, 0},
+		{"Thread.Len", Field, 0},
+		{"Thread.Type", Field, 0},
+		{"Type", Type, 0},
+		{"TypeBundle", Const, 3},
+		{"TypeDylib", Const, 3},
+		{"TypeExec", Const, 0},
+		{"TypeObj", Const, 0},
+		{"X86_64_RELOC_BRANCH", Const, 10},
+		{"X86_64_RELOC_GOT", Const, 10},
+		{"X86_64_RELOC_GOT_LOAD", Const, 10},
+		{"X86_64_RELOC_SIGNED", Const, 10},
+		{"X86_64_RELOC_SIGNED_1", Const, 10},
+		{"X86_64_RELOC_SIGNED_2", Const, 10},
+		{"X86_64_RELOC_SIGNED_4", Const, 10},
+		{"X86_64_RELOC_SUBTRACTOR", Const, 10},
+		{"X86_64_RELOC_TLV", Const, 10},
+		{"X86_64_RELOC_UNSIGNED", Const, 10},
+	},
+	"debug/pe": {
+		{"(*COFFSymbol).FullName", Method, 8},
+		{"(*File).COFFSymbolReadSectionDefAux", Method, 19},
+		{"(*File).Close", Method, 0},
+		{"(*File).DWARF", Method, 0},
+		{"(*File).ImportedLibraries", Method, 0},
+		{"(*File).ImportedSymbols", Method, 0},
+		{"(*File).Section", Method, 0},
+		{"(*FormatError).Error", Method, 0},
+		{"(*Section).Data", Method, 0},
+		{"(*Section).Open", Method, 0},
+		{"(Section).ReadAt", Method, 0},
+		{"(StringTable).String", Method, 8},
+		{"COFFSymbol", Type, 1},
+		{"COFFSymbol.Name", Field, 1},
+		{"COFFSymbol.NumberOfAuxSymbols", Field, 1},
+		{"COFFSymbol.SectionNumber", Field, 1},
+		{"COFFSymbol.StorageClass", Field, 1},
+		{"COFFSymbol.Type", Field, 1},
+		{"COFFSymbol.Value", Field, 1},
+		{"COFFSymbolAuxFormat5", Type, 19},
+		{"COFFSymbolAuxFormat5.Checksum", Field, 19},
+		{"COFFSymbolAuxFormat5.NumLineNumbers", Field, 19},
+		{"COFFSymbolAuxFormat5.NumRelocs", Field, 19},
+		{"COFFSymbolAuxFormat5.SecNum", Field, 19},
+		{"COFFSymbolAuxFormat5.Selection", Field, 19},
+		{"COFFSymbolAuxFormat5.Size", Field, 19},
+		{"COFFSymbolSize", Const, 1},
+		{"DataDirectory", Type, 3},
+		{"DataDirectory.Size", Field, 3},
+		{"DataDirectory.VirtualAddress", Field, 3},
+		{"File", Type, 0},
+		{"File.COFFSymbols", Field, 8},
+		{"File.FileHeader", Field, 0},
+		{"File.OptionalHeader", Field, 3},
+		{"File.Sections", Field, 0},
+		{"File.StringTable", Field, 8},
+		{"File.Symbols", Field, 1},
+		{"FileHeader", Type, 0},
+		{"FileHeader.Characteristics", Field, 0},
+		{"FileHeader.Machine", Field, 0},
+		{"FileHeader.NumberOfSections", Field, 0},
+		{"FileHeader.NumberOfSymbols", Field, 0},
+		{"FileHeader.PointerToSymbolTable", Field, 0},
+		{"FileHeader.SizeOfOptionalHeader", Field, 0},
+		{"FileHeader.TimeDateStamp", Field, 0},
+		{"FormatError", Type, 0},
+		{"IMAGE_COMDAT_SELECT_ANY", Const, 19},
+		{"IMAGE_COMDAT_SELECT_ASSOCIATIVE", Const, 19},
+		{"IMAGE_COMDAT_SELECT_EXACT_MATCH", Const, 19},
+		{"IMAGE_COMDAT_SELECT_LARGEST", Const, 19},
+		{"IMAGE_COMDAT_SELECT_NODUPLICATES", Const, 19},
+		{"IMAGE_COMDAT_SELECT_SAME_SIZE", Const, 19},
+		{"IMAGE_DIRECTORY_ENTRY_ARCHITECTURE", Const, 11},
+		{"IMAGE_DIRECTORY_ENTRY_BASERELOC", Const, 11},
+		{"IMAGE_DIRECTORY_ENTRY_BOUND_IMPORT", Const, 11},
+		{"IMAGE_DIRECTORY_ENTRY_COM_DESCRIPTOR", Const, 11},
+		{"IMAGE_DIRECTORY_ENTRY_DEBUG", Const, 11},
+		{"IMAGE_DIRECTORY_ENTRY_DELAY_IMPORT", Const, 11},
+		{"IMAGE_DIRECTORY_ENTRY_EXCEPTION", Const, 11},
+		{"IMAGE_DIRECTORY_ENTRY_EXPORT", Const, 11},
+		{"IMAGE_DIRECTORY_ENTRY_GLOBALPTR", Const, 11},
+		{"IMAGE_DIRECTORY_ENTRY_IAT", Const, 11},
+		{"IMAGE_DIRECTORY_ENTRY_IMPORT", Const, 11},
+		{"IMAGE_DIRECTORY_ENTRY_LOAD_CONFIG", Const, 11},
+		{"IMAGE_DIRECTORY_ENTRY_RESOURCE", Const, 11},
+		{"IMAGE_DIRECTORY_ENTRY_SECURITY", Const, 11},
+		{"IMAGE_DIRECTORY_ENTRY_TLS", Const, 11},
+		{"IMAGE_DLLCHARACTERISTICS_APPCONTAINER", Const, 15},
+		{"IMAGE_DLLCHARACTERISTICS_DYNAMIC_BASE", Const, 15},
+		{"IMAGE_DLLCHARACTERISTICS_FORCE_INTEGRITY", Const, 15},
+		{"IMAGE_DLLCHARACTERISTICS_GUARD_CF", Const, 15},
+		{"IMAGE_DLLCHARACTERISTICS_HIGH_ENTROPY_VA", Const, 15},
+		{"IMAGE_DLLCHARACTERISTICS_NO_BIND", Const, 15},
+		{"IMAGE_DLLCHARACTERISTICS_NO_ISOLATION", Const, 15},
+		{"IMAGE_DLLCHARACTERISTICS_NO_SEH", Const, 15},
+		{"IMAGE_DLLCHARACTERISTICS_NX_COMPAT", Const, 15},
+		{"IMAGE_DLLCHARACTERISTICS_TERMINAL_SERVER_AWARE", Const, 15},
+		{"IMAGE_DLLCHARACTERISTICS_WDM_DRIVER", Const, 15},
+		{"IMAGE_FILE_32BIT_MACHINE", Const, 15},
+		{"IMAGE_FILE_AGGRESIVE_WS_TRIM", Const, 15},
+		{"IMAGE_FILE_BYTES_REVERSED_HI", Const, 15},
+		{"IMAGE_FILE_BYTES_REVERSED_LO", Const, 15},
+		{"IMAGE_FILE_DEBUG_STRIPPED", Const, 15},
+		{"IMAGE_FILE_DLL", Const, 15},
+		{"IMAGE_FILE_EXECUTABLE_IMAGE", Const, 15},
+		{"IMAGE_FILE_LARGE_ADDRESS_AWARE", Const, 15},
+		{"IMAGE_FILE_LINE_NUMS_STRIPPED", Const, 15},
+		{"IMAGE_FILE_LOCAL_SYMS_STRIPPED", Const, 15},
+		{"IMAGE_FILE_MACHINE_AM33", Const, 0},
+		{"IMAGE_FILE_MACHINE_AMD64", Const, 0},
+		{"IMAGE_FILE_MACHINE_ARM", Const, 0},
+		{"IMAGE_FILE_MACHINE_ARM64", Const, 11},
+		{"IMAGE_FILE_MACHINE_ARMNT", Const, 12},
+		{"IMAGE_FILE_MACHINE_EBC", Const, 0},
+		{"IMAGE_FILE_MACHINE_I386", Const, 0},
+		{"IMAGE_FILE_MACHINE_IA64", Const, 0},
+		{"IMAGE_FILE_MACHINE_LOONGARCH32", Const, 19},
+		{"IMAGE_FILE_MACHINE_LOONGARCH64", Const, 19},
+		{"IMAGE_FILE_MACHINE_M32R", Const, 0},
+		{"IMAGE_FILE_MACHINE_MIPS16", Const, 0},
+		{"IMAGE_FILE_MACHINE_MIPSFPU", Const, 0},
+		{"IMAGE_FILE_MACHINE_MIPSFPU16", Const, 0},
+		{"IMAGE_FILE_MACHINE_POWERPC", Const, 0},
+		{"IMAGE_FILE_MACHINE_POWERPCFP", Const, 0},
+		{"IMAGE_FILE_MACHINE_R4000", Const, 0},
+		{"IMAGE_FILE_MACHINE_RISCV128", Const, 20},
+		{"IMAGE_FILE_MACHINE_RISCV32", Const, 20},
+		{"IMAGE_FILE_MACHINE_RISCV64", Const, 20},
+		{"IMAGE_FILE_MACHINE_SH3", Const, 0},
+		{"IMAGE_FILE_MACHINE_SH3DSP", Const, 0},
+		{"IMAGE_FILE_MACHINE_SH4", Const, 0},
+		{"IMAGE_FILE_MACHINE_SH5", Const, 0},
+		{"IMAGE_FILE_MACHINE_THUMB", Const, 0},
+		{"IMAGE_FILE_MACHINE_UNKNOWN", Const, 0},
+		{"IMAGE_FILE_MACHINE_WCEMIPSV2", Const, 0},
+		{"IMAGE_FILE_NET_RUN_FROM_SWAP", Const, 15},
+		{"IMAGE_FILE_RELOCS_STRIPPED", Const, 15},
+		{"IMAGE_FILE_REMOVABLE_RUN_FROM_SWAP", Const, 15},
+		{"IMAGE_FILE_SYSTEM", Const, 15},
+		{"IMAGE_FILE_UP_SYSTEM_ONLY", Const, 15},
+		{"IMAGE_SCN_CNT_CODE", Const, 19},
+		{"IMAGE_SCN_CNT_INITIALIZED_DATA", Const, 19},
+		{"IMAGE_SCN_CNT_UNINITIALIZED_DATA", Const, 19},
+		{"IMAGE_SCN_LNK_COMDAT", Const, 19},
+		{"IMAGE_SCN_MEM_DISCARDABLE", Const, 19},
+		{"IMAGE_SCN_MEM_EXECUTE", Const, 19},
+		{"IMAGE_SCN_MEM_READ", Const, 19},
+		{"IMAGE_SCN_MEM_WRITE", Const, 19},
+		{"IMAGE_SUBSYSTEM_EFI_APPLICATION", Const, 15},
+		{"IMAGE_SUBSYSTEM_EFI_BOOT_SERVICE_DRIVER", Const, 15},
+		{"IMAGE_SUBSYSTEM_EFI_ROM", Const, 15},
+		{"IMAGE_SUBSYSTEM_EFI_RUNTIME_DRIVER", Const, 15},
+		{"IMAGE_SUBSYSTEM_NATIVE", Const, 15},
+		{"IMAGE_SUBSYSTEM_NATIVE_WINDOWS", Const, 15},
+		{"IMAGE_SUBSYSTEM_OS2_CUI", Const, 15},
+		{"IMAGE_SUBSYSTEM_POSIX_CUI", Const, 15},
+		{"IMAGE_SUBSYSTEM_UNKNOWN", Const, 15},
+		{"IMAGE_SUBSYSTEM_WINDOWS_BOOT_APPLICATION", Const, 15},
+		{"IMAGE_SUBSYSTEM_WINDOWS_CE_GUI", Const, 15},
+		{"IMAGE_SUBSYSTEM_WINDOWS_CUI", Const, 15},
+		{"IMAGE_SUBSYSTEM_WINDOWS_GUI", Const, 15},
+		{"IMAGE_SUBSYSTEM_XBOX", Const, 15},
+		{"ImportDirectory", Type, 0},
+		{"ImportDirectory.FirstThunk", Field, 0},
+		{"ImportDirectory.ForwarderChain", Field, 0},
+		{"ImportDirectory.Name", Field, 0},
+		{"ImportDirectory.OriginalFirstThunk", Field, 0},
+		{"ImportDirectory.TimeDateStamp", Field, 0},
+		{"NewFile", Func, 0},
+		{"Open", Func, 0},
+		{"OptionalHeader32", Type, 3},
+		{"OptionalHeader32.AddressOfEntryPoint", Field, 3},
+		{"OptionalHeader32.BaseOfCode", Field, 3},
+		{"OptionalHeader32.BaseOfData", Field, 3},
+		{"OptionalHeader32.CheckSum", Field, 3},
+		{"OptionalHeader32.DataDirectory", Field, 3},
+		{"OptionalHeader32.DllCharacteristics", Field, 3},
+		{"OptionalHeader32.FileAlignment", Field, 3},
+		{"OptionalHeader32.ImageBase", Field, 3},
+		{"OptionalHeader32.LoaderFlags", Field, 3},
+		{"OptionalHeader32.Magic", Field, 3},
+		{"OptionalHeader32.MajorImageVersion", Field, 3},
+		{"OptionalHeader32.MajorLinkerVersion", Field, 3},
+		{"OptionalHeader32.MajorOperatingSystemVersion", Field, 3},
+		{"OptionalHeader32.MajorSubsystemVersion", Field, 3},
+		{"OptionalHeader32.MinorImageVersion", Field, 3},
+		{"OptionalHeader32.MinorLinkerVersion", Field, 3},
+		{"OptionalHeader32.MinorOperatingSystemVersion", Field, 3},
+		{"OptionalHeader32.MinorSubsystemVersion", Field, 3},
+		{"OptionalHeader32.NumberOfRvaAndSizes", Field, 3},
+		{"OptionalHeader32.SectionAlignment", Field, 3},
+		{"OptionalHeader32.SizeOfCode", Field, 3},
+		{"OptionalHeader32.SizeOfHeaders", Field, 3},
+		{"OptionalHeader32.SizeOfHeapCommit", Field, 3},
+		{"OptionalHeader32.SizeOfHeapReserve", Field, 3},
+		{"OptionalHeader32.SizeOfImage", Field, 3},
+		{"OptionalHeader32.SizeOfInitializedData", Field, 3},
+		{"OptionalHeader32.SizeOfStackCommit", Field, 3},
+		{"OptionalHeader32.SizeOfStackReserve", Field, 3},
+		{"OptionalHeader32.SizeOfUninitializedData", Field, 3},
+		{"OptionalHeader32.Subsystem", Field, 3},
+		{"OptionalHeader32.Win32VersionValue", Field, 3},
+		{"OptionalHeader64", Type, 3},
+		{"OptionalHeader64.AddressOfEntryPoint", Field, 3},
+		{"OptionalHeader64.BaseOfCode", Field, 3},
+		{"OptionalHeader64.CheckSum", Field, 3},
+		{"OptionalHeader64.DataDirectory", Field, 3},
+		{"OptionalHeader64.DllCharacteristics", Field, 3},
+		{"OptionalHeader64.FileAlignment", Field, 3},
+		{"OptionalHeader64.ImageBase", Field, 3},
+		{"OptionalHeader64.LoaderFlags", Field, 3},
+		{"OptionalHeader64.Magic", Field, 3},
+		{"OptionalHeader64.MajorImageVersion", Field, 3},
+		{"OptionalHeader64.MajorLinkerVersion", Field, 3},
+		{"OptionalHeader64.MajorOperatingSystemVersion", Field, 3},
+		{"OptionalHeader64.MajorSubsystemVersion", Field, 3},
+		{"OptionalHeader64.MinorImageVersion", Field, 3},
+		{"OptionalHeader64.MinorLinkerVersion", Field, 3},
+		{"OptionalHeader64.MinorOperatingSystemVersion", Field, 3},
+		{"OptionalHeader64.MinorSubsystemVersion", Field, 3},
+		{"OptionalHeader64.NumberOfRvaAndSizes", Field, 3},
+		{"OptionalHeader64.SectionAlignment", Field, 3},
+		{"OptionalHeader64.SizeOfCode", Field, 3},
+		{"OptionalHeader64.SizeOfHeaders", Field, 3},
+		{"OptionalHeader64.SizeOfHeapCommit", Field, 3},
+		{"OptionalHeader64.SizeOfHeapReserve", Field, 3},
+		{"OptionalHeader64.SizeOfImage", Field, 3},
+		{"OptionalHeader64.SizeOfInitializedData", Field, 3},
+		{"OptionalHeader64.SizeOfStackCommit", Field, 3},
+		{"OptionalHeader64.SizeOfStackReserve", Field, 3},
+		{"OptionalHeader64.SizeOfUninitializedData", Field, 3},
+		{"OptionalHeader64.Subsystem", Field, 3},
+		{"OptionalHeader64.Win32VersionValue", Field, 3},
+		{"Reloc", Type, 8},
+		{"Reloc.SymbolTableIndex", Field, 8},
+		{"Reloc.Type", Field, 8},
+		{"Reloc.VirtualAddress", Field, 8},
+		{"Section", Type, 0},
+		{"Section.ReaderAt", Field, 0},
+		{"Section.Relocs", Field, 8},
+		{"Section.SectionHeader", Field, 0},
+		{"SectionHeader", Type, 0},
+		{"SectionHeader.Characteristics", Field, 0},
+		{"SectionHeader.Name", Field, 0},
+		{"SectionHeader.NumberOfLineNumbers", Field, 0},
+		{"SectionHeader.NumberOfRelocations", Field, 0},
+		{"SectionHeader.Offset", Field, 0},
+		{"SectionHeader.PointerToLineNumbers", Field, 0},
+		{"SectionHeader.PointerToRelocations", Field, 0},
+		{"SectionHeader.Size", Field, 0},
+		{"SectionHeader.VirtualAddress", Field, 0},
+		{"SectionHeader.VirtualSize", Field, 0},
+		{"SectionHeader32", Type, 0},
+		{"SectionHeader32.Characteristics", Field, 0},
+		{"SectionHeader32.Name", Field, 0},
+		{"SectionHeader32.NumberOfLineNumbers", Field, 0},
+		{"SectionHeader32.NumberOfRelocations", Field, 0},
+		{"SectionHeader32.PointerToLineNumbers", Field, 0},
+		{"SectionHeader32.PointerToRawData", Field, 0},
+		{"SectionHeader32.PointerToRelocations", Field, 0},
+		{"SectionHeader32.SizeOfRawData", Field, 0},
+		{"SectionHeader32.VirtualAddress", Field, 0},
+		{"SectionHeader32.VirtualSize", Field, 0},
+		{"StringTable", Type, 8},
+		{"Symbol", Type, 1},
+		{"Symbol.Name", Field, 1},
+		{"Symbol.SectionNumber", Field, 1},
+		{"Symbol.StorageClass", Field, 1},
+		{"Symbol.Type", Field, 1},
+		{"Symbol.Value", Field, 1},
+	},
+	"debug/plan9obj": {
+		{"(*File).Close", Method, 3},
+		{"(*File).Section", Method, 3},
+		{"(*File).Symbols", Method, 3},
+		{"(*Section).Data", Method, 3},
+		{"(*Section).Open", Method, 3},
+		{"(Section).ReadAt", Method, 3},
+		{"ErrNoSymbols", Var, 18},
+		{"File", Type, 3},
+		{"File.FileHeader", Field, 3},
+		{"File.Sections", Field, 3},
+		{"FileHeader", Type, 3},
+		{"FileHeader.Bss", Field, 3},
+		{"FileHeader.Entry", Field, 3},
+		{"FileHeader.HdrSize", Field, 4},
+		{"FileHeader.LoadAddress", Field, 4},
+		{"FileHeader.Magic", Field, 3},
+		{"FileHeader.PtrSize", Field, 3},
+		{"Magic386", Const, 3},
+		{"Magic64", Const, 3},
+		{"MagicAMD64", Const, 3},
+		{"MagicARM", Const, 3},
+		{"NewFile", Func, 3},
+		{"Open", Func, 3},
+		{"Section", Type, 3},
+		{"Section.ReaderAt", Field, 3},
+		{"Section.SectionHeader", Field, 3},
+		{"SectionHeader", Type, 3},
+		{"SectionHeader.Name", Field, 3},
+		{"SectionHeader.Offset", Field, 3},
+		{"SectionHeader.Size", Field, 3},
+		{"Sym", Type, 3},
+		{"Sym.Name", Field, 3},
+		{"Sym.Type", Field, 3},
+		{"Sym.Value", Field, 3},
+	},
+	"embed": {
+		{"(FS).Open", Method, 16},
+		{"(FS).ReadDir", Method, 16},
+		{"(FS).ReadFile", Method, 16},
+		{"FS", Type, 16},
+	},
+	"encoding": {
+		{"BinaryMarshaler", Type, 2},
+		{"BinaryUnmarshaler", Type, 2},
+		{"TextMarshaler", Type, 2},
+		{"TextUnmarshaler", Type, 2},
+	},
+	"encoding/ascii85": {
+		{"(CorruptInputError).Error", Method, 0},
+		{"CorruptInputError", Type, 0},
+		{"Decode", Func, 0},
+		{"Encode", Func, 0},
+		{"MaxEncodedLen", Func, 0},
+		{"NewDecoder", Func, 0},
+		{"NewEncoder", Func, 0},
+	},
+	"encoding/asn1": {
+		{"(BitString).At", Method, 0},
+		{"(BitString).RightAlign", Method, 0},
+		{"(ObjectIdentifier).Equal", Method, 0},
+		{"(ObjectIdentifier).String", Method, 3},
+		{"(StructuralError).Error", Method, 0},
+		{"(SyntaxError).Error", Method, 0},
+		{"BitString", Type, 0},
+		{"BitString.BitLength", Field, 0},
+		{"BitString.Bytes", Field, 0},
+		{"ClassApplication", Const, 6},
+		{"ClassContextSpecific", Const, 6},
+		{"ClassPrivate", Const, 6},
+		{"ClassUniversal", Const, 6},
+		{"Enumerated", Type, 0},
+		{"Flag", Type, 0},
+		{"Marshal", Func, 0},
+		{"MarshalWithParams", Func, 10},
+		{"NullBytes", Var, 9},
+		{"NullRawValue", Var, 9},
+		{"ObjectIdentifier", Type, 0},
+		{"RawContent", Type, 0},
+		{"RawValue", Type, 0},
+		{"RawValue.Bytes", Field, 0},
+		{"RawValue.Class", Field, 0},
+		{"RawValue.FullBytes", Field, 0},
+		{"RawValue.IsCompound", Field, 0},
+		{"RawValue.Tag", Field, 0},
+		{"StructuralError", Type, 0},
+		{"StructuralError.Msg", Field, 0},
+		{"SyntaxError", Type, 0},
+		{"SyntaxError.Msg", Field, 0},
+		{"TagBMPString", Const, 14},
+		{"TagBitString", Const, 6},
+		{"TagBoolean", Const, 6},
+		{"TagEnum", Const, 6},
+		{"TagGeneralString", Const, 6},
+		{"TagGeneralizedTime", Const, 6},
+		{"TagIA5String", Const, 6},
+		{"TagInteger", Const, 6},
+		{"TagNull", Const, 9},
+		{"TagNumericString", Const, 10},
+		{"TagOID", Const, 6},
+		{"TagOctetString", Const, 6},
+		{"TagPrintableString", Const, 6},
+		{"TagSequence", Const, 6},
+		{"TagSet", Const, 6},
+		{"TagT61String", Const, 6},
+		{"TagUTCTime", Const, 6},
+		{"TagUTF8String", Const, 6},
+		{"Unmarshal", Func, 0},
+		{"UnmarshalWithParams", Func, 0},
+	},
+	"encoding/base32": {
+		{"(*Encoding).AppendDecode", Method, 22},
+		{"(*Encoding).AppendEncode", Method, 22},
+		{"(*Encoding).Decode", Method, 0},
+		{"(*Encoding).DecodeString", Method, 0},
+		{"(*Encoding).DecodedLen", Method, 0},
+		{"(*Encoding).Encode", Method, 0},
+		{"(*Encoding).EncodeToString", Method, 0},
+		{"(*Encoding).EncodedLen", Method, 0},
+		{"(CorruptInputError).Error", Method, 0},
+		{"(Encoding).WithPadding", Method, 9},
+		{"CorruptInputError", Type, 0},
+		{"Encoding", Type, 0},
+		{"HexEncoding", Var, 0},
+		{"NewDecoder", Func, 0},
+		{"NewEncoder", Func, 0},
+		{"NewEncoding", Func, 0},
+		{"NoPadding", Const, 9},
+		{"StdEncoding", Var, 0},
+		{"StdPadding", Const, 9},
+	},
+	"encoding/base64": {
+		{"(*Encoding).AppendDecode", Method, 22},
+		{"(*Encoding).AppendEncode", Method, 22},
+		{"(*Encoding).Decode", Method, 0},
+		{"(*Encoding).DecodeString", Method, 0},
+		{"(*Encoding).DecodedLen", Method, 0},
+		{"(*Encoding).Encode", Method, 0},
+		{"(*Encoding).EncodeToString", Method, 0},
+		{"(*Encoding).EncodedLen", Method, 0},
+		{"(CorruptInputError).Error", Method, 0},
+		{"(Encoding).Strict", Method, 8},
+		{"(Encoding).WithPadding", Method, 5},
+		{"CorruptInputError", Type, 0},
+		{"Encoding", Type, 0},
+		{"NewDecoder", Func, 0},
+		{"NewEncoder", Func, 0},
+		{"NewEncoding", Func, 0},
+		{"NoPadding", Const, 5},
+		{"RawStdEncoding", Var, 5},
+		{"RawURLEncoding", Var, 5},
+		{"StdEncoding", Var, 0},
+		{"StdPadding", Const, 5},
+		{"URLEncoding", Var, 0},
+	},
+	"encoding/binary": {
+		{"AppendByteOrder", Type, 19},
+		{"AppendUvarint", Func, 19},
+		{"AppendVarint", Func, 19},
+		{"BigEndian", Var, 0},
+		{"ByteOrder", Type, 0},
+		{"LittleEndian", Var, 0},
+		{"MaxVarintLen16", Const, 0},
+		{"MaxVarintLen32", Const, 0},
+		{"MaxVarintLen64", Const, 0},
+		{"NativeEndian", Var, 21},
+		{"PutUvarint", Func, 0},
+		{"PutVarint", Func, 0},
+		{"Read", Func, 0},
+		{"ReadUvarint", Func, 0},
+		{"ReadVarint", Func, 0},
+		{"Size", Func, 0},
+		{"Uvarint", Func, 0},
+		{"Varint", Func, 0},
+		{"Write", Func, 0},
+	},
+	"encoding/csv": {
+		{"(*ParseError).Error", Method, 0},
+		{"(*ParseError).Unwrap", Method, 13},
+		{"(*Reader).FieldPos", Method, 17},
+		{"(*Reader).InputOffset", Method, 19},
+		{"(*Reader).Read", Method, 0},
+		{"(*Reader).ReadAll", Method, 0},
+		{"(*Writer).Error", Method, 1},
+		{"(*Writer).Flush", Method, 0},
+		{"(*Writer).Write", Method, 0},
+		{"(*Writer).WriteAll", Method, 0},
+		{"ErrBareQuote", Var, 0},
+		{"ErrFieldCount", Var, 0},
+		{"ErrQuote", Var, 0},
+		{"ErrTrailingComma", Var, 0},
+		{"NewReader", Func, 0},
+		{"NewWriter", Func, 0},
+		{"ParseError", Type, 0},
+		{"ParseError.Column", Field, 0},
+		{"ParseError.Err", Field, 0},
+		{"ParseError.Line", Field, 0},
+		{"ParseError.StartLine", Field, 10},
+		{"Reader", Type, 0},
+		{"Reader.Comma", Field, 0},
+		{"Reader.Comment", Field, 0},
+		{"Reader.FieldsPerRecord", Field, 0},
+		{"Reader.LazyQuotes", Field, 0},
+		{"Reader.ReuseRecord", Field, 9},
+		{"Reader.TrailingComma", Field, 0},
+		{"Reader.TrimLeadingSpace", Field, 0},
+		{"Writer", Type, 0},
+		{"Writer.Comma", Field, 0},
+		{"Writer.UseCRLF", Field, 0},
+	},
+	"encoding/gob": {
+		{"(*Decoder).Decode", Method, 0},
+		{"(*Decoder).DecodeValue", Method, 0},
+		{"(*Encoder).Encode", Method, 0},
+		{"(*Encoder).EncodeValue", Method, 0},
+		{"CommonType", Type, 0},
+		{"CommonType.Id", Field, 0},
+		{"CommonType.Name", Field, 0},
+		{"Decoder", Type, 0},
+		{"Encoder", Type, 0},
+		{"GobDecoder", Type, 0},
+		{"GobEncoder", Type, 0},
+		{"NewDecoder", Func, 0},
+		{"NewEncoder", Func, 0},
+		{"Register", Func, 0},
+		{"RegisterName", Func, 0},
+	},
+	"encoding/hex": {
+		{"(InvalidByteError).Error", Method, 0},
+		{"AppendDecode", Func, 22},
+		{"AppendEncode", Func, 22},
+		{"Decode", Func, 0},
+		{"DecodeString", Func, 0},
+		{"DecodedLen", Func, 0},
+		{"Dump", Func, 0},
+		{"Dumper", Func, 0},
+		{"Encode", Func, 0},
+		{"EncodeToString", Func, 0},
+		{"EncodedLen", Func, 0},
+		{"ErrLength", Var, 0},
+		{"InvalidByteError", Type, 0},
+		{"NewDecoder", Func, 10},
+		{"NewEncoder", Func, 10},
+	},
+	"encoding/json": {
+		{"(*Decoder).Buffered", Method, 1},
+		{"(*Decoder).Decode", Method, 0},
+		{"(*Decoder).DisallowUnknownFields", Method, 10},
+		{"(*Decoder).InputOffset", Method, 14},
+		{"(*Decoder).More", Method, 5},
+		{"(*Decoder).Token", Method, 5},
+		{"(*Decoder).UseNumber", Method, 1},
+		{"(*Encoder).Encode", Method, 0},
+		{"(*Encoder).SetEscapeHTML", Method, 7},
+		{"(*Encoder).SetIndent", Method, 7},
+		{"(*InvalidUTF8Error).Error", Method, 0},
+		{"(*InvalidUnmarshalError).Error", Method, 0},
+		{"(*MarshalerError).Error", Method, 0},
+		{"(*MarshalerError).Unwrap", Method, 13},
+		{"(*RawMessage).MarshalJSON", Method, 0},
+		{"(*RawMessage).UnmarshalJSON", Method, 0},
+		{"(*SyntaxError).Error", Method, 0},
+		{"(*UnmarshalFieldError).Error", Method, 0},
+		{"(*UnmarshalTypeError).Error", Method, 0},
+		{"(*UnsupportedTypeError).Error", Method, 0},
+		{"(*UnsupportedValueError).Error", Method, 0},
+		{"(Delim).String", Method, 5},
+		{"(Number).Float64", Method, 1},
+		{"(Number).Int64", Method, 1},
+		{"(Number).String", Method, 1},
+		{"(RawMessage).MarshalJSON", Method, 8},
+		{"Compact", Func, 0},
+		{"Decoder", Type, 0},
+		{"Delim", Type, 5},
+		{"Encoder", Type, 0},
+		{"HTMLEscape", Func, 0},
+		{"Indent", Func, 0},
+		{"InvalidUTF8Error", Type, 0},
+		{"InvalidUTF8Error.S", Field, 0},
+		{"InvalidUnmarshalError", Type, 0},
+		{"InvalidUnmarshalError.Type", Field, 0},
+		{"Marshal", Func, 0},
+		{"MarshalIndent", Func, 0},
+		{"Marshaler", Type, 0},
+		{"MarshalerError", Type, 0},
+		{"MarshalerError.Err", Field, 0},
+		{"MarshalerError.Type", Field, 0},
+		{"NewDecoder", Func, 0},
+		{"NewEncoder", Func, 0},
+		{"Number", Type, 1},
+		{"RawMessage", Type, 0},
+		{"SyntaxError", Type, 0},
+		{"SyntaxError.Offset", Field, 0},
+		{"Token", Type, 5},
+		{"Unmarshal", Func, 0},
+		{"UnmarshalFieldError", Type, 0},
+		{"UnmarshalFieldError.Field", Field, 0},
+		{"UnmarshalFieldError.Key", Field, 0},
+		{"UnmarshalFieldError.Type", Field, 0},
+		{"UnmarshalTypeError", Type, 0},
+		{"UnmarshalTypeError.Field", Field, 8},
+		{"UnmarshalTypeError.Offset", Field, 5},
+		{"UnmarshalTypeError.Struct", Field, 8},
+		{"UnmarshalTypeError.Type", Field, 0},
+		{"UnmarshalTypeError.Value", Field, 0},
+		{"Unmarshaler", Type, 0},
+		{"UnsupportedTypeError", Type, 0},
+		{"UnsupportedTypeError.Type", Field, 0},
+		{"UnsupportedValueError", Type, 0},
+		{"UnsupportedValueError.Str", Field, 0},
+		{"UnsupportedValueError.Value", Field, 0},
+		{"Valid", Func, 9},
+	},
+	"encoding/pem": {
+		{"Block", Type, 0},
+		{"Block.Bytes", Field, 0},
+		{"Block.Headers", Field, 0},
+		{"Block.Type", Field, 0},
+		{"Decode", Func, 0},
+		{"Encode", Func, 0},
+		{"EncodeToMemory", Func, 0},
+	},
+	"encoding/xml": {
+		{"(*Decoder).Decode", Method, 0},
+		{"(*Decoder).DecodeElement", Method, 0},
+		{"(*Decoder).InputOffset", Method, 4},
+		{"(*Decoder).InputPos", Method, 19},
+		{"(*Decoder).RawToken", Method, 0},
+		{"(*Decoder).Skip", Method, 0},
+		{"(*Decoder).Token", Method, 0},
+		{"(*Encoder).Close", Method, 20},
+		{"(*Encoder).Encode", Method, 0},
+		{"(*Encoder).EncodeElement", Method, 2},
+		{"(*Encoder).EncodeToken", Method, 2},
+		{"(*Encoder).Flush", Method, 2},
+		{"(*Encoder).Indent", Method, 1},
+		{"(*SyntaxError).Error", Method, 0},
+		{"(*TagPathError).Error", Method, 0},
+		{"(*UnsupportedTypeError).Error", Method, 0},
+		{"(CharData).Copy", Method, 0},
+		{"(Comment).Copy", Method, 0},
+		{"(Directive).Copy", Method, 0},
+		{"(ProcInst).Copy", Method, 0},
+		{"(StartElement).Copy", Method, 0},
+		{"(StartElement).End", Method, 2},
+		{"(UnmarshalError).Error", Method, 0},
+		{"Attr", Type, 0},
+		{"Attr.Name", Field, 0},
+		{"Attr.Value", Field, 0},
+		{"CharData", Type, 0},
+		{"Comment", Type, 0},
+		{"CopyToken", Func, 0},
+		{"Decoder", Type, 0},
+		{"Decoder.AutoClose", Field, 0},
+		{"Decoder.CharsetReader", Field, 0},
+		{"Decoder.DefaultSpace", Field, 1},
+		{"Decoder.Entity", Field, 0},
+		{"Decoder.Strict", Field, 0},
+		{"Directive", Type, 0},
+		{"Encoder", Type, 0},
+		{"EndElement", Type, 0},
+		{"EndElement.Name", Field, 0},
+		{"Escape", Func, 0},
+		{"EscapeText", Func, 1},
+		{"HTMLAutoClose", Var, 0},
+		{"HTMLEntity", Var, 0},
+		{"Header", Const, 0},
+		{"Marshal", Func, 0},
+		{"MarshalIndent", Func, 0},
+		{"Marshaler", Type, 2},
+		{"MarshalerAttr", Type, 2},
+		{"Name", Type, 0},
+		{"Name.Local", Field, 0},
+		{"Name.Space", Field, 0},
+		{"NewDecoder", Func, 0},
+		{"NewEncoder", Func, 0},
+		{"NewTokenDecoder", Func, 10},
+		{"ProcInst", Type, 0},
+		{"ProcInst.Inst", Field, 0},
+		{"ProcInst.Target", Field, 0},
+		{"StartElement", Type, 0},
+		{"StartElement.Attr", Field, 0},
+		{"StartElement.Name", Field, 0},
+		{"SyntaxError", Type, 0},
+		{"SyntaxError.Line", Field, 0},
+		{"SyntaxError.Msg", Field, 0},
+		{"TagPathError", Type, 0},
+		{"TagPathError.Field1", Field, 0},
+		{"TagPathError.Field2", Field, 0},
+		{"TagPathError.Struct", Field, 0},
+		{"TagPathError.Tag1", Field, 0},
+		{"TagPathError.Tag2", Field, 0},
+		{"Token", Type, 0},
+		{"TokenReader", Type, 10},
+		{"Unmarshal", Func, 0},
+		{"UnmarshalError", Type, 0},
+		{"Unmarshaler", Type, 2},
+		{"UnmarshalerAttr", Type, 2},
+		{"UnsupportedTypeError", Type, 0},
+		{"UnsupportedTypeError.Type", Field, 0},
+	},
+	"errors": {
+		{"As", Func, 13},
+		{"ErrUnsupported", Var, 21},
+		{"Is", Func, 13},
+		{"Join", Func, 20},
+		{"New", Func, 0},
+		{"Unwrap", Func, 13},
+	},
+	"expvar": {
+		{"(*Float).Add", Method, 0},
+		{"(*Float).Set", Method, 0},
+		{"(*Float).String", Method, 0},
+		{"(*Float).Value", Method, 8},
+		{"(*Int).Add", Method, 0},
+		{"(*Int).Set", Method, 0},
+		{"(*Int).String", Method, 0},
+		{"(*Int).Value", Method, 8},
+		{"(*Map).Add", Method, 0},
+		{"(*Map).AddFloat", Method, 0},
+		{"(*Map).Delete", Method, 12},
+		{"(*Map).Do", Method, 0},
+		{"(*Map).Get", Method, 0},
+		{"(*Map).Init", Method, 0},
+		{"(*Map).Set", Method, 0},
+		{"(*Map).String", Method, 0},
+		{"(*String).Set", Method, 0},
+		{"(*String).String", Method, 0},
+		{"(*String).Value", Method, 8},
+		{"(Func).String", Method, 0},
+		{"(Func).Value", Method, 8},
+		{"Do", Func, 0},
+		{"Float", Type, 0},
+		{"Func", Type, 0},
+		{"Get", Func, 0},
+		{"Handler", Func, 8},
+		{"Int", Type, 0},
+		{"KeyValue", Type, 0},
+		{"KeyValue.Key", Field, 0},
+		{"KeyValue.Value", Field, 0},
+		{"Map", Type, 0},
+		{"NewFloat", Func, 0},
+		{"NewInt", Func, 0},
+		{"NewMap", Func, 0},
+		{"NewString", Func, 0},
+		{"Publish", Func, 0},
+		{"String", Type, 0},
+		{"Var", Type, 0},
+	},
+	"flag": {
+		{"(*FlagSet).Arg", Method, 0},
+		{"(*FlagSet).Args", Method, 0},
+		{"(*FlagSet).Bool", Method, 0},
+		{"(*FlagSet).BoolFunc", Method, 21},
+		{"(*FlagSet).BoolVar", Method, 0},
+		{"(*FlagSet).Duration", Method, 0},
+		{"(*FlagSet).DurationVar", Method, 0},
+		{"(*FlagSet).ErrorHandling", Method, 10},
+		{"(*FlagSet).Float64", Method, 0},
+		{"(*FlagSet).Float64Var", Method, 0},
+		{"(*FlagSet).Func", Method, 16},
+		{"(*FlagSet).Init", Method, 0},
+		{"(*FlagSet).Int", Method, 0},
+		{"(*FlagSet).Int64", Method, 0},
+		{"(*FlagSet).Int64Var", Method, 0},
+		{"(*FlagSet).IntVar", Method, 0},
+		{"(*FlagSet).Lookup", Method, 0},
+		{"(*FlagSet).NArg", Method, 0},
+		{"(*FlagSet).NFlag", Method, 0},
+		{"(*FlagSet).Name", Method, 10},
+		{"(*FlagSet).Output", Method, 10},
+		{"(*FlagSet).Parse", Method, 0},
+		{"(*FlagSet).Parsed", Method, 0},
+		{"(*FlagSet).PrintDefaults", Method, 0},
+		{"(*FlagSet).Set", Method, 0},
+		{"(*FlagSet).SetOutput", Method, 0},
+		{"(*FlagSet).String", Method, 0},
+		{"(*FlagSet).StringVar", Method, 0},
+		{"(*FlagSet).TextVar", Method, 19},
+		{"(*FlagSet).Uint", Method, 0},
+		{"(*FlagSet).Uint64", Method, 0},
+		{"(*FlagSet).Uint64Var", Method, 0},
+		{"(*FlagSet).UintVar", Method, 0},
+		{"(*FlagSet).Var", Method, 0},
+		{"(*FlagSet).Visit", Method, 0},
+		{"(*FlagSet).VisitAll", Method, 0},
+		{"Arg", Func, 0},
+		{"Args", Func, 0},
+		{"Bool", Func, 0},
+		{"BoolFunc", Func, 21},
+		{"BoolVar", Func, 0},
+		{"CommandLine", Var, 2},
+		{"ContinueOnError", Const, 0},
+		{"Duration", Func, 0},
+		{"DurationVar", Func, 0},
+		{"ErrHelp", Var, 0},
+		{"ErrorHandling", Type, 0},
+		{"ExitOnError", Const, 0},
+		{"Flag", Type, 0},
+		{"Flag.DefValue", Field, 0},
+		{"Flag.Name", Field, 0},
+		{"Flag.Usage", Field, 0},
+		{"Flag.Value", Field, 0},
+		{"FlagSet", Type, 0},
+		{"FlagSet.Usage", Field, 0},
+		{"Float64", Func, 0},
+		{"Float64Var", Func, 0},
+		{"Func", Func, 16},
+		{"Getter", Type, 2},
+		{"Int", Func, 0},
+		{"Int64", Func, 0},
+		{"Int64Var", Func, 0},
+		{"IntVar", Func, 0},
+		{"Lookup", Func, 0},
+		{"NArg", Func, 0},
+		{"NFlag", Func, 0},
+		{"NewFlagSet", Func, 0},
+		{"PanicOnError", Const, 0},
+		{"Parse", Func, 0},
+		{"Parsed", Func, 0},
+		{"PrintDefaults", Func, 0},
+		{"Set", Func, 0},
+		{"String", Func, 0},
+		{"StringVar", Func, 0},
+		{"TextVar", Func, 19},
+		{"Uint", Func, 0},
+		{"Uint64", Func, 0},
+		{"Uint64Var", Func, 0},
+		{"UintVar", Func, 0},
+		{"UnquoteUsage", Func, 5},
+		{"Usage", Var, 0},
+		{"Value", Type, 0},
+		{"Var", Func, 0},
+		{"Visit", Func, 0},
+		{"VisitAll", Func, 0},
+	},
+	"fmt": {
+		{"Append", Func, 19},
+		{"Appendf", Func, 19},
+		{"Appendln", Func, 19},
+		{"Errorf", Func, 0},
+		{"FormatString", Func, 20},
+		{"Formatter", Type, 0},
+		{"Fprint", Func, 0},
+		{"Fprintf", Func, 0},
+		{"Fprintln", Func, 0},
+		{"Fscan", Func, 0},
+		{"Fscanf", Func, 0},
+		{"Fscanln", Func, 0},
+		{"GoStringer", Type, 0},
+		{"Print", Func, 0},
+		{"Printf", Func, 0},
+		{"Println", Func, 0},
+		{"Scan", Func, 0},
+		{"ScanState", Type, 0},
+		{"Scanf", Func, 0},
+		{"Scanln", Func, 0},
+		{"Scanner", Type, 0},
+		{"Sprint", Func, 0},
+		{"Sprintf", Func, 0},
+		{"Sprintln", Func, 0},
+		{"Sscan", Func, 0},
+		{"Sscanf", Func, 0},
+		{"Sscanln", Func, 0},
+		{"State", Type, 0},
+		{"Stringer", Type, 0},
+	},
+	"go/ast": {
+		{"(*ArrayType).End", Method, 0},
+		{"(*ArrayType).Pos", Method, 0},
+		{"(*AssignStmt).End", Method, 0},
+		{"(*AssignStmt).Pos", Method, 0},
+		{"(*BadDecl).End", Method, 0},
+		{"(*BadDecl).Pos", Method, 0},
+		{"(*BadExpr).End", Method, 0},
+		{"(*BadExpr).Pos", Method, 0},
+		{"(*BadStmt).End", Method, 0},
+		{"(*BadStmt).Pos", Method, 0},
+		{"(*BasicLit).End", Method, 0},
+		{"(*BasicLit).Pos", Method, 0},
+		{"(*BinaryExpr).End", Method, 0},
+		{"(*BinaryExpr).Pos", Method, 0},
+		{"(*BlockStmt).End", Method, 0},
+		{"(*BlockStmt).Pos", Method, 0},
+		{"(*BranchStmt).End", Method, 0},
+		{"(*BranchStmt).Pos", Method, 0},
+		{"(*CallExpr).End", Method, 0},
+		{"(*CallExpr).Pos", Method, 0},
+		{"(*CaseClause).End", Method, 0},
+		{"(*CaseClause).Pos", Method, 0},
+		{"(*ChanType).End", Method, 0},
+		{"(*ChanType).Pos", Method, 0},
+		{"(*CommClause).End", Method, 0},
+		{"(*CommClause).Pos", Method, 0},
+		{"(*Comment).End", Method, 0},
+		{"(*Comment).Pos", Method, 0},
+		{"(*CommentGroup).End", Method, 0},
+		{"(*CommentGroup).Pos", Method, 0},
+		{"(*CommentGroup).Text", Method, 0},
+		{"(*CompositeLit).End", Method, 0},
+		{"(*CompositeLit).Pos", Method, 0},
+		{"(*DeclStmt).End", Method, 0},
+		{"(*DeclStmt).Pos", Method, 0},
+		{"(*DeferStmt).End", Method, 0},
+		{"(*DeferStmt).Pos", Method, 0},
+		{"(*Ellipsis).End", Method, 0},
+		{"(*Ellipsis).Pos", Method, 0},
+		{"(*EmptyStmt).End", Method, 0},
+		{"(*EmptyStmt).Pos", Method, 0},
+		{"(*ExprStmt).End", Method, 0},
+		{"(*ExprStmt).Pos", Method, 0},
+		{"(*Field).End", Method, 0},
+		{"(*Field).Pos", Method, 0},
+		{"(*FieldList).End", Method, 0},
+		{"(*FieldList).NumFields", Method, 0},
+		{"(*FieldList).Pos", Method, 0},
+		{"(*File).End", Method, 0},
+		{"(*File).Pos", Method, 0},
+		{"(*ForStmt).End", Method, 0},
+		{"(*ForStmt).Pos", Method, 0},
+		{"(*FuncDecl).End", Method, 0},
+		{"(*FuncDecl).Pos", Method, 0},
+		{"(*FuncLit).End", Method, 0},
+		{"(*FuncLit).Pos", Method, 0},
+		{"(*FuncType).End", Method, 0},
+		{"(*FuncType).Pos", Method, 0},
+		{"(*GenDecl).End", Method, 0},
+		{"(*GenDecl).Pos", Method, 0},
+		{"(*GoStmt).End", Method, 0},
+		{"(*GoStmt).Pos", Method, 0},
+		{"(*Ident).End", Method, 0},
+		{"(*Ident).IsExported", Method, 0},
+		{"(*Ident).Pos", Method, 0},
+		{"(*Ident).String", Method, 0},
+		{"(*IfStmt).End", Method, 0},
+		{"(*IfStmt).Pos", Method, 0},
+		{"(*ImportSpec).End", Method, 0},
+		{"(*ImportSpec).Pos", Method, 0},
+		{"(*IncDecStmt).End", Method, 0},
+		{"(*IncDecStmt).Pos", Method, 0},
+		{"(*IndexExpr).End", Method, 0},
+		{"(*IndexExpr).Pos", Method, 0},
+		{"(*IndexListExpr).End", Method, 18},
+		{"(*IndexListExpr).Pos", Method, 18},
+		{"(*InterfaceType).End", Method, 0},
+		{"(*InterfaceType).Pos", Method, 0},
+		{"(*KeyValueExpr).End", Method, 0},
+		{"(*KeyValueExpr).Pos", Method, 0},
+		{"(*LabeledStmt).End", Method, 0},
+		{"(*LabeledStmt).Pos", Method, 0},
+		{"(*MapType).End", Method, 0},
+		{"(*MapType).Pos", Method, 0},
+		{"(*Object).Pos", Method, 0},
+		{"(*Package).End", Method, 0},
+		{"(*Package).Pos", Method, 0},
+		{"(*ParenExpr).End", Method, 0},
+		{"(*ParenExpr).Pos", Method, 0},
+		{"(*RangeStmt).End", Method, 0},
+		{"(*RangeStmt).Pos", Method, 0},
+		{"(*ReturnStmt).End", Method, 0},
+		{"(*ReturnStmt).Pos", Method, 0},
+		{"(*Scope).Insert", Method, 0},
+		{"(*Scope).Lookup", Method, 0},
+		{"(*Scope).String", Method, 0},
+		{"(*SelectStmt).End", Method, 0},
+		{"(*SelectStmt).Pos", Method, 0},
+		{"(*SelectorExpr).End", Method, 0},
+		{"(*SelectorExpr).Pos", Method, 0},
+		{"(*SendStmt).End", Method, 0},
+		{"(*SendStmt).Pos", Method, 0},
+		{"(*SliceExpr).End", Method, 0},
+		{"(*SliceExpr).Pos", Method, 0},
+		{"(*StarExpr).End", Method, 0},
+		{"(*StarExpr).Pos", Method, 0},
+		{"(*StructType).End", Method, 0},
+		{"(*StructType).Pos", Method, 0},
+		{"(*SwitchStmt).End", Method, 0},
+		{"(*SwitchStmt).Pos", Method, 0},
+		{"(*TypeAssertExpr).End", Method, 0},
+		{"(*TypeAssertExpr).Pos", Method, 0},
+		{"(*TypeSpec).End", Method, 0},
+		{"(*TypeSpec).Pos", Method, 0},
+		{"(*TypeSwitchStmt).End", Method, 0},
+		{"(*TypeSwitchStmt).Pos", Method, 0},
+		{"(*UnaryExpr).End", Method, 0},
+		{"(*UnaryExpr).Pos", Method, 0},
+		{"(*ValueSpec).End", Method, 0},
+		{"(*ValueSpec).Pos", Method, 0},
+		{"(CommentMap).Comments", Method, 1},
+		{"(CommentMap).Filter", Method, 1},
+		{"(CommentMap).String", Method, 1},
+		{"(CommentMap).Update", Method, 1},
+		{"(ObjKind).String", Method, 0},
+		{"ArrayType", Type, 0},
+		{"ArrayType.Elt", Field, 0},
+		{"ArrayType.Lbrack", Field, 0},
+		{"ArrayType.Len", Field, 0},
+		{"AssignStmt", Type, 0},
+		{"AssignStmt.Lhs", Field, 0},
+		{"AssignStmt.Rhs", Field, 0},
+		{"AssignStmt.Tok", Field, 0},
+		{"AssignStmt.TokPos", Field, 0},
+		{"Bad", Const, 0},
+		{"BadDecl", Type, 0},
+		{"BadDecl.From", Field, 0},
+		{"BadDecl.To", Field, 0},
+		{"BadExpr", Type, 0},
+		{"BadExpr.From", Field, 0},
+		{"BadExpr.To", Field, 0},
+		{"BadStmt", Type, 0},
+		{"BadStmt.From", Field, 0},
+		{"BadStmt.To", Field, 0},
+		{"BasicLit", Type, 0},
+		{"BasicLit.Kind", Field, 0},
+		{"BasicLit.Value", Field, 0},
+		{"BasicLit.ValuePos", Field, 0},
+		{"BinaryExpr", Type, 0},
+		{"BinaryExpr.Op", Field, 0},
+		{"BinaryExpr.OpPos", Field, 0},
+		{"BinaryExpr.X", Field, 0},
+		{"BinaryExpr.Y", Field, 0},
+		{"BlockStmt", Type, 0},
+		{"BlockStmt.Lbrace", Field, 0},
+		{"BlockStmt.List", Field, 0},
+		{"BlockStmt.Rbrace", Field, 0},
+		{"BranchStmt", Type, 0},
+		{"BranchStmt.Label", Field, 0},
+		{"BranchStmt.Tok", Field, 0},
+		{"BranchStmt.TokPos", Field, 0},
+		{"CallExpr", Type, 0},
+		{"CallExpr.Args", Field, 0},
+		{"CallExpr.Ellipsis", Field, 0},
+		{"CallExpr.Fun", Field, 0},
+		{"CallExpr.Lparen", Field, 0},
+		{"CallExpr.Rparen", Field, 0},
+		{"CaseClause", Type, 0},
+		{"CaseClause.Body", Field, 0},
+		{"CaseClause.Case", Field, 0},
+		{"CaseClause.Colon", Field, 0},
+		{"CaseClause.List", Field, 0},
+		{"ChanDir", Type, 0},
+		{"ChanType", Type, 0},
+		{"ChanType.Arrow", Field, 1},
+		{"ChanType.Begin", Field, 0},
+		{"ChanType.Dir", Field, 0},
+		{"ChanType.Value", Field, 0},
+		{"CommClause", Type, 0},
+		{"CommClause.Body", Field, 0},
+		{"CommClause.Case", Field, 0},
+		{"CommClause.Colon", Field, 0},
+		{"CommClause.Comm", Field, 0},
+		{"Comment", Type, 0},
+		{"Comment.Slash", Field, 0},
+		{"Comment.Text", Field, 0},
+		{"CommentGroup", Type, 0},
+		{"CommentGroup.List", Field, 0},
+		{"CommentMap", Type, 1},
+		{"CompositeLit", Type, 0},
+		{"CompositeLit.Elts", Field, 0},
+		{"CompositeLit.Incomplete", Field, 11},
+		{"CompositeLit.Lbrace", Field, 0},
+		{"CompositeLit.Rbrace", Field, 0},
+		{"CompositeLit.Type", Field, 0},
+		{"Con", Const, 0},
+		{"Decl", Type, 0},
+		{"DeclStmt", Type, 0},
+		{"DeclStmt.Decl", Field, 0},
+		{"DeferStmt", Type, 0},
+		{"DeferStmt.Call", Field, 0},
+		{"DeferStmt.Defer", Field, 0},
+		{"Ellipsis", Type, 0},
+		{"Ellipsis.Ellipsis", Field, 0},
+		{"Ellipsis.Elt", Field, 0},
+		{"EmptyStmt", Type, 0},
+		{"EmptyStmt.Implicit", Field, 5},
+		{"EmptyStmt.Semicolon", Field, 0},
+		{"Expr", Type, 0},
+		{"ExprStmt", Type, 0},
+		{"ExprStmt.X", Field, 0},
+		{"Field", Type, 0},
+		{"Field.Comment", Field, 0},
+		{"Field.Doc", Field, 0},
+		{"Field.Names", Field, 0},
+		{"Field.Tag", Field, 0},
+		{"Field.Type", Field, 0},
+		{"FieldFilter", Type, 0},
+		{"FieldList", Type, 0},
+		{"FieldList.Closing", Field, 0},
+		{"FieldList.List", Field, 0},
+		{"FieldList.Opening", Field, 0},
+		{"File", Type, 0},
+		{"File.Comments", Field, 0},
+		{"File.Decls", Field, 0},
+		{"File.Doc", Field, 0},
+		{"File.FileEnd", Field, 20},
+		{"File.FileStart", Field, 20},
+		{"File.GoVersion", Field, 21},
+		{"File.Imports", Field, 0},
+		{"File.Name", Field, 0},
+		{"File.Package", Field, 0},
+		{"File.Scope", Field, 0},
+		{"File.Unresolved", Field, 0},
+		{"FileExports", Func, 0},
+		{"Filter", Type, 0},
+		{"FilterDecl", Func, 0},
+		{"FilterFile", Func, 0},
+		{"FilterFuncDuplicates", Const, 0},
+		{"FilterImportDuplicates", Const, 0},
+		{"FilterPackage", Func, 0},
+		{"FilterUnassociatedComments", Const, 0},
+		{"ForStmt", Type, 0},
+		{"ForStmt.Body", Field, 0},
+		{"ForStmt.Cond", Field, 0},
+		{"ForStmt.For", Field, 0},
+		{"ForStmt.Init", Field, 0},
+		{"ForStmt.Post", Field, 0},
+		{"Fprint", Func, 0},
+		{"Fun", Const, 0},
+		{"FuncDecl", Type, 0},
+		{"FuncDecl.Body", Field, 0},
+		{"FuncDecl.Doc", Field, 0},
+		{"FuncDecl.Name", Field, 0},
+		{"FuncDecl.Recv", Field, 0},
+		{"FuncDecl.Type", Field, 0},
+		{"FuncLit", Type, 0},
+		{"FuncLit.Body", Field, 0},
+		{"FuncLit.Type", Field, 0},
+		{"FuncType", Type, 0},
+		{"FuncType.Func", Field, 0},
+		{"FuncType.Params", Field, 0},
+		{"FuncType.Results", Field, 0},
+		{"FuncType.TypeParams", Field, 18},
+		{"GenDecl", Type, 0},
+		{"GenDecl.Doc", Field, 0},
+		{"GenDecl.Lparen", Field, 0},
+		{"GenDecl.Rparen", Field, 0},
+		{"GenDecl.Specs", Field, 0},
+		{"GenDecl.Tok", Field, 0},
+		{"GenDecl.TokPos", Field, 0},
+		{"GoStmt", Type, 0},
+		{"GoStmt.Call", Field, 0},
+		{"GoStmt.Go", Field, 0},
+		{"Ident", Type, 0},
+		{"Ident.Name", Field, 0},
+		{"Ident.NamePos", Field, 0},
+		{"Ident.Obj", Field, 0},
+		{"IfStmt", Type, 0},
+		{"IfStmt.Body", Field, 0},
+		{"IfStmt.Cond", Field, 0},
+		{"IfStmt.Else", Field, 0},
+		{"IfStmt.If", Field, 0},
+		{"IfStmt.Init", Field, 0},
+		{"ImportSpec", Type, 0},
+		{"ImportSpec.Comment", Field, 0},
+		{"ImportSpec.Doc", Field, 0},
+		{"ImportSpec.EndPos", Field, 0},
+		{"ImportSpec.Name", Field, 0},
+		{"ImportSpec.Path", Field, 0},
+		{"Importer", Type, 0},
+		{"IncDecStmt", Type, 0},
+		{"IncDecStmt.Tok", Field, 0},
+		{"IncDecStmt.TokPos", Field, 0},
+		{"IncDecStmt.X", Field, 0},
+		{"IndexExpr", Type, 0},
+		{"IndexExpr.Index", Field, 0},
+		{"IndexExpr.Lbrack", Field, 0},
+		{"IndexExpr.Rbrack", Field, 0},
+		{"IndexExpr.X", Field, 0},
+		{"IndexListExpr", Type, 18},
+		{"IndexListExpr.Indices", Field, 18},
+		{"IndexListExpr.Lbrack", Field, 18},
+		{"IndexListExpr.Rbrack", Field, 18},
+		{"IndexListExpr.X", Field, 18},
+		{"Inspect", Func, 0},
+		{"InterfaceType", Type, 0},
+		{"InterfaceType.Incomplete", Field, 0},
+		{"InterfaceType.Interface", Field, 0},
+		{"InterfaceType.Methods", Field, 0},
+		{"IsExported", Func, 0},
+		{"IsGenerated", Func, 21},
+		{"KeyValueExpr", Type, 0},
+		{"KeyValueExpr.Colon", Field, 0},
+		{"KeyValueExpr.Key", Field, 0},
+		{"KeyValueExpr.Value", Field, 0},
+		{"LabeledStmt", Type, 0},
+		{"LabeledStmt.Colon", Field, 0},
+		{"LabeledStmt.Label", Field, 0},
+		{"LabeledStmt.Stmt", Field, 0},
+		{"Lbl", Const, 0},
+		{"MapType", Type, 0},
+		{"MapType.Key", Field, 0},
+		{"MapType.Map", Field, 0},
+		{"MapType.Value", Field, 0},
+		{"MergeMode", Type, 0},
+		{"MergePackageFiles", Func, 0},
+		{"NewCommentMap", Func, 1},
+		{"NewIdent", Func, 0},
+		{"NewObj", Func, 0},
+		{"NewPackage", Func, 0},
+		{"NewScope", Func, 0},
+		{"Node", Type, 0},
+		{"NotNilFilter", Func, 0},
+		{"ObjKind", Type, 0},
+		{"Object", Type, 0},
+		{"Object.Data", Field, 0},
+		{"Object.Decl", Field, 0},
+		{"Object.Kind", Field, 0},
+		{"Object.Name", Field, 0},
+		{"Object.Type", Field, 0},
+		{"Package", Type, 0},
+		{"Package.Files", Field, 0},
+		{"Package.Imports", Field, 0},
+		{"Package.Name", Field, 0},
+		{"Package.Scope", Field, 0},
+		{"PackageExports", Func, 0},
+		{"ParenExpr", Type, 0},
+		{"ParenExpr.Lparen", Field, 0},
+		{"ParenExpr.Rparen", Field, 0},
+		{"ParenExpr.X", Field, 0},
+		{"Pkg", Const, 0},
+		{"Print", Func, 0},
+		{"RECV", Const, 0},
+		{"RangeStmt", Type, 0},
+		{"RangeStmt.Body", Field, 0},
+		{"RangeStmt.For", Field, 0},
+		{"RangeStmt.Key", Field, 0},
+		{"RangeStmt.Range", Field, 20},
+		{"RangeStmt.Tok", Field, 0},
+		{"RangeStmt.TokPos", Field, 0},
+		{"RangeStmt.Value", Field, 0},
+		{"RangeStmt.X", Field, 0},
+		{"ReturnStmt", Type, 0},
+		{"ReturnStmt.Results", Field, 0},
+		{"ReturnStmt.Return", Field, 0},
+		{"SEND", Const, 0},
+		{"Scope", Type, 0},
+		{"Scope.Objects", Field, 0},
+		{"Scope.Outer", Field, 0},
+		{"SelectStmt", Type, 0},
+		{"SelectStmt.Body", Field, 0},
+		{"SelectStmt.Select", Field, 0},
+		{"SelectorExpr", Type, 0},
+		{"SelectorExpr.Sel", Field, 0},
+		{"SelectorExpr.X", Field, 0},
+		{"SendStmt", Type, 0},
+		{"SendStmt.Arrow", Field, 0},
+		{"SendStmt.Chan", Field, 0},
+		{"SendStmt.Value", Field, 0},
+		{"SliceExpr", Type, 0},
+		{"SliceExpr.High", Field, 0},
+		{"SliceExpr.Lbrack", Field, 0},
+		{"SliceExpr.Low", Field, 0},
+		{"SliceExpr.Max", Field, 2},
+		{"SliceExpr.Rbrack", Field, 0},
+		{"SliceExpr.Slice3", Field, 2},
+		{"SliceExpr.X", Field, 0},
+		{"SortImports", Func, 0},
+		{"Spec", Type, 0},
+		{"StarExpr", Type, 0},
+		{"StarExpr.Star", Field, 0},
+		{"StarExpr.X", Field, 0},
+		{"Stmt", Type, 0},
+		{"StructType", Type, 0},
+		{"StructType.Fields", Field, 0},
+		{"StructType.Incomplete", Field, 0},
+		{"StructType.Struct", Field, 0},
+		{"SwitchStmt", Type, 0},
+		{"SwitchStmt.Body", Field, 0},
+		{"SwitchStmt.Init", Field, 0},
+		{"SwitchStmt.Switch", Field, 0},
+		{"SwitchStmt.Tag", Field, 0},
+		{"Typ", Const, 0},
+		{"TypeAssertExpr", Type, 0},
+		{"TypeAssertExpr.Lparen", Field, 2},
+		{"TypeAssertExpr.Rparen", Field, 2},
+		{"TypeAssertExpr.Type", Field, 0},
+		{"TypeAssertExpr.X", Field, 0},
+		{"TypeSpec", Type, 0},
+		{"TypeSpec.Assign", Field, 9},
+		{"TypeSpec.Comment", Field, 0},
+		{"TypeSpec.Doc", Field, 0},
+		{"TypeSpec.Name", Field, 0},
+		{"TypeSpec.Type", Field, 0},
+		{"TypeSpec.TypeParams", Field, 18},
+		{"TypeSwitchStmt", Type, 0},
+		{"TypeSwitchStmt.Assign", Field, 0},
+		{"TypeSwitchStmt.Body", Field, 0},
+		{"TypeSwitchStmt.Init", Field, 0},
+		{"TypeSwitchStmt.Switch", Field, 0},
+		{"UnaryExpr", Type, 0},
+		{"UnaryExpr.Op", Field, 0},
+		{"UnaryExpr.OpPos", Field, 0},
+		{"UnaryExpr.X", Field, 0},
+		{"Unparen", Func, 22},
+		{"ValueSpec", Type, 0},
+		{"ValueSpec.Comment", Field, 0},
+		{"ValueSpec.Doc", Field, 0},
+		{"ValueSpec.Names", Field, 0},
+		{"ValueSpec.Type", Field, 0},
+		{"ValueSpec.Values", Field, 0},
+		{"Var", Const, 0},
+		{"Visitor", Type, 0},
+		{"Walk", Func, 0},
+	},
+	"go/build": {
+		{"(*Context).Import", Method, 0},
+		{"(*Context).ImportDir", Method, 0},
+		{"(*Context).MatchFile", Method, 2},
+		{"(*Context).SrcDirs", Method, 0},
+		{"(*MultiplePackageError).Error", Method, 4},
+		{"(*NoGoError).Error", Method, 0},
+		{"(*Package).IsCommand", Method, 0},
+		{"AllowBinary", Const, 0},
+		{"ArchChar", Func, 0},
+		{"Context", Type, 0},
+		{"Context.BuildTags", Field, 0},
+		{"Context.CgoEnabled", Field, 0},
+		{"Context.Compiler", Field, 0},
+		{"Context.Dir", Field, 14},
+		{"Context.GOARCH", Field, 0},
+		{"Context.GOOS", Field, 0},
+		{"Context.GOPATH", Field, 0},
+		{"Context.GOROOT", Field, 0},
+		{"Context.HasSubdir", Field, 0},
+		{"Context.InstallSuffix", Field, 1},
+		{"Context.IsAbsPath", Field, 0},
+		{"Context.IsDir", Field, 0},
+		{"Context.JoinPath", Field, 0},
+		{"Context.OpenFile", Field, 0},
+		{"Context.ReadDir", Field, 0},
+		{"Context.ReleaseTags", Field, 1},
+		{"Context.SplitPathList", Field, 0},
+		{"Context.ToolTags", Field, 17},
+		{"Context.UseAllFiles", Field, 0},
+		{"Default", Var, 0},
+		{"Directive", Type, 21},
+		{"Directive.Pos", Field, 21},
+		{"Directive.Text", Field, 21},
+		{"FindOnly", Const, 0},
+		{"IgnoreVendor", Const, 6},
+		{"Import", Func, 0},
+		{"ImportComment", Const, 4},
+		{"ImportDir", Func, 0},
+		{"ImportMode", Type, 0},
+		{"IsLocalImport", Func, 0},
+		{"MultiplePackageError", Type, 4},
+		{"MultiplePackageError.Dir", Field, 4},
+		{"MultiplePackageError.Files", Field, 4},
+		{"MultiplePackageError.Packages", Field, 4},
+		{"NoGoError", Type, 0},
+		{"NoGoError.Dir", Field, 0},
+		{"Package", Type, 0},
+		{"Package.AllTags", Field, 2},
+		{"Package.BinDir", Field, 0},
+		{"Package.BinaryOnly", Field, 7},
+		{"Package.CFiles", Field, 0},
+		{"Package.CXXFiles", Field, 2},
+		{"Package.CgoCFLAGS", Field, 0},
+		{"Package.CgoCPPFLAGS", Field, 2},
+		{"Package.CgoCXXFLAGS", Field, 2},
+		{"Package.CgoFFLAGS", Field, 7},
+		{"Package.CgoFiles", Field, 0},
+		{"Package.CgoLDFLAGS", Field, 0},
+		{"Package.CgoPkgConfig", Field, 0},
+		{"Package.ConflictDir", Field, 2},
+		{"Package.Dir", Field, 0},
+		{"Package.Directives", Field, 21},
+		{"Package.Doc", Field, 0},
+		{"Package.EmbedPatternPos", Field, 16},
+		{"Package.EmbedPatterns", Field, 16},
+		{"Package.FFiles", Field, 7},
+		{"Package.GoFiles", Field, 0},
+		{"Package.Goroot", Field, 0},
+		{"Package.HFiles", Field, 0},
+		{"Package.IgnoredGoFiles", Field, 1},
+		{"Package.IgnoredOtherFiles", Field, 16},
+		{"Package.ImportComment", Field, 4},
+		{"Package.ImportPath", Field, 0},
+		{"Package.ImportPos", Field, 0},
+		{"Package.Imports", Field, 0},
+		{"Package.InvalidGoFiles", Field, 6},
+		{"Package.MFiles", Field, 3},
+		{"Package.Name", Field, 0},
+		{"Package.PkgObj", Field, 0},
+		{"Package.PkgRoot", Field, 0},
+		{"Package.PkgTargetRoot", Field, 5},
+		{"Package.Root", Field, 0},
+		{"Package.SFiles", Field, 0},
+		{"Package.SrcRoot", Field, 0},
+		{"Package.SwigCXXFiles", Field, 1},
+		{"Package.SwigFiles", Field, 1},
+		{"Package.SysoFiles", Field, 0},
+		{"Package.TestDirectives", Field, 21},
+		{"Package.TestEmbedPatternPos", Field, 16},
+		{"Package.TestEmbedPatterns", Field, 16},
+		{"Package.TestGoFiles", Field, 0},
+		{"Package.TestImportPos", Field, 0},
+		{"Package.TestImports", Field, 0},
+		{"Package.XTestDirectives", Field, 21},
+		{"Package.XTestEmbedPatternPos", Field, 16},
+		{"Package.XTestEmbedPatterns", Field, 16},
+		{"Package.XTestGoFiles", Field, 0},
+		{"Package.XTestImportPos", Field, 0},
+		{"Package.XTestImports", Field, 0},
+		{"ToolDir", Var, 0},
+	},
+	"go/build/constraint": {
+		{"(*AndExpr).Eval", Method, 16},
+		{"(*AndExpr).String", Method, 16},
+		{"(*NotExpr).Eval", Method, 16},
+		{"(*NotExpr).String", Method, 16},
+		{"(*OrExpr).Eval", Method, 16},
+		{"(*OrExpr).String", Method, 16},
+		{"(*SyntaxError).Error", Method, 16},
+		{"(*TagExpr).Eval", Method, 16},
+		{"(*TagExpr).String", Method, 16},
+		{"AndExpr", Type, 16},
+		{"AndExpr.X", Field, 16},
+		{"AndExpr.Y", Field, 16},
+		{"Expr", Type, 16},
+		{"GoVersion", Func, 21},
+		{"IsGoBuild", Func, 16},
+		{"IsPlusBuild", Func, 16},
+		{"NotExpr", Type, 16},
+		{"NotExpr.X", Field, 16},
+		{"OrExpr", Type, 16},
+		{"OrExpr.X", Field, 16},
+		{"OrExpr.Y", Field, 16},
+		{"Parse", Func, 16},
+		{"PlusBuildLines", Func, 16},
+		{"SyntaxError", Type, 16},
+		{"SyntaxError.Err", Field, 16},
+		{"SyntaxError.Offset", Field, 16},
+		{"TagExpr", Type, 16},
+		{"TagExpr.Tag", Field, 16},
+	},
+	"go/constant": {
+		{"(Kind).String", Method, 18},
+		{"BinaryOp", Func, 5},
+		{"BitLen", Func, 5},
+		{"Bool", Const, 5},
+		{"BoolVal", Func, 5},
+		{"Bytes", Func, 5},
+		{"Compare", Func, 5},
+		{"Complex", Const, 5},
+		{"Denom", Func, 5},
+		{"Float", Const, 5},
+		{"Float32Val", Func, 5},
+		{"Float64Val", Func, 5},
+		{"Imag", Func, 5},
+		{"Int", Const, 5},
+		{"Int64Val", Func, 5},
+		{"Kind", Type, 5},
+		{"Make", Func, 13},
+		{"MakeBool", Func, 5},
+		{"MakeFloat64", Func, 5},
+		{"MakeFromBytes", Func, 5},
+		{"MakeFromLiteral", Func, 5},
+		{"MakeImag", Func, 5},
+		{"MakeInt64", Func, 5},
+		{"MakeString", Func, 5},
+		{"MakeUint64", Func, 5},
+		{"MakeUnknown", Func, 5},
+		{"Num", Func, 5},
+		{"Real", Func, 5},
+		{"Shift", Func, 5},
+		{"Sign", Func, 5},
+		{"String", Const, 5},
+		{"StringVal", Func, 5},
+		{"ToComplex", Func, 6},
+		{"ToFloat", Func, 6},
+		{"ToInt", Func, 6},
+		{"Uint64Val", Func, 5},
+		{"UnaryOp", Func, 5},
+		{"Unknown", Const, 5},
+		{"Val", Func, 13},
+		{"Value", Type, 5},
+	},
+	"go/doc": {
+		{"(*Package).Filter", Method, 0},
+		{"(*Package).HTML", Method, 19},
+		{"(*Package).Markdown", Method, 19},
+		{"(*Package).Parser", Method, 19},
+		{"(*Package).Printer", Method, 19},
+		{"(*Package).Synopsis", Method, 19},
+		{"(*Package).Text", Method, 19},
+		{"AllDecls", Const, 0},
+		{"AllMethods", Const, 0},
+		{"Example", Type, 0},
+		{"Example.Code", Field, 0},
+		{"Example.Comments", Field, 0},
+		{"Example.Doc", Field, 0},
+		{"Example.EmptyOutput", Field, 1},
+		{"Example.Name", Field, 0},
+		{"Example.Order", Field, 1},
+		{"Example.Output", Field, 0},
+		{"Example.Play", Field, 1},
+		{"Example.Suffix", Field, 14},
+		{"Example.Unordered", Field, 7},
+		{"Examples", Func, 0},
+		{"Filter", Type, 0},
+		{"Func", Type, 0},
+		{"Func.Decl", Field, 0},
+		{"Func.Doc", Field, 0},
+		{"Func.Examples", Field, 14},
+		{"Func.Level", Field, 0},
+		{"Func.Name", Field, 0},
+		{"Func.Orig", Field, 0},
+		{"Func.Recv", Field, 0},
+		{"IllegalPrefixes", Var, 1},
+		{"IsPredeclared", Func, 8},
+		{"Mode", Type, 0},
+		{"New", Func, 0},
+		{"NewFromFiles", Func, 14},
+		{"Note", Type, 1},
+		{"Note.Body", Field, 1},
+		{"Note.End", Field, 1},
+		{"Note.Pos", Field, 1},
+		{"Note.UID", Field, 1},
+		{"Package", Type, 0},
+		{"Package.Bugs", Field, 0},
+		{"Package.Consts", Field, 0},
+		{"Package.Doc", Field, 0},
+		{"Package.Examples", Field, 14},
+		{"Package.Filenames", Field, 0},
+		{"Package.Funcs", Field, 0},
+		{"Package.ImportPath", Field, 0},
+		{"Package.Imports", Field, 0},
+		{"Package.Name", Field, 0},
+		{"Package.Notes", Field, 1},
+		{"Package.Types", Field, 0},
+		{"Package.Vars", Field, 0},
+		{"PreserveAST", Const, 12},
+		{"Synopsis", Func, 0},
+		{"ToHTML", Func, 0},
+		{"ToText", Func, 0},
+		{"Type", Type, 0},
+		{"Type.Consts", Field, 0},
+		{"Type.Decl", Field, 0},
+		{"Type.Doc", Field, 0},
+		{"Type.Examples", Field, 14},
+		{"Type.Funcs", Field, 0},
+		{"Type.Methods", Field, 0},
+		{"Type.Name", Field, 0},
+		{"Type.Vars", Field, 0},
+		{"Value", Type, 0},
+		{"Value.Decl", Field, 0},
+		{"Value.Doc", Field, 0},
+		{"Value.Names", Field, 0},
+	},
+	"go/doc/comment": {
+		{"(*DocLink).DefaultURL", Method, 19},
+		{"(*Heading).DefaultID", Method, 19},
+		{"(*List).BlankBefore", Method, 19},
+		{"(*List).BlankBetween", Method, 19},
+		{"(*Parser).Parse", Method, 19},
+		{"(*Printer).Comment", Method, 19},
+		{"(*Printer).HTML", Method, 19},
+		{"(*Printer).Markdown", Method, 19},
+		{"(*Printer).Text", Method, 19},
+		{"Block", Type, 19},
+		{"Code", Type, 19},
+		{"Code.Text", Field, 19},
+		{"DefaultLookupPackage", Func, 19},
+		{"Doc", Type, 19},
+		{"Doc.Content", Field, 19},
+		{"Doc.Links", Field, 19},
+		{"DocLink", Type, 19},
+		{"DocLink.ImportPath", Field, 19},
+		{"DocLink.Name", Field, 19},
+		{"DocLink.Recv", Field, 19},
+		{"DocLink.Text", Field, 19},
+		{"Heading", Type, 19},
+		{"Heading.Text", Field, 19},
+		{"Italic", Type, 19},
+		{"Link", Type, 19},
+		{"Link.Auto", Field, 19},
+		{"Link.Text", Field, 19},
+		{"Link.URL", Field, 19},
+		{"LinkDef", Type, 19},
+		{"LinkDef.Text", Field, 19},
+		{"LinkDef.URL", Field, 19},
+		{"LinkDef.Used", Field, 19},
+		{"List", Type, 19},
+		{"List.ForceBlankBefore", Field, 19},
+		{"List.ForceBlankBetween", Field, 19},
+		{"List.Items", Field, 19},
+		{"ListItem", Type, 19},
+		{"ListItem.Content", Field, 19},
+		{"ListItem.Number", Field, 19},
+		{"Paragraph", Type, 19},
+		{"Paragraph.Text", Field, 19},
+		{"Parser", Type, 19},
+		{"Parser.LookupPackage", Field, 19},
+		{"Parser.LookupSym", Field, 19},
+		{"Parser.Words", Field, 19},
+		{"Plain", Type, 19},
+		{"Printer", Type, 19},
+		{"Printer.DocLinkBaseURL", Field, 19},
+		{"Printer.DocLinkURL", Field, 19},
+		{"Printer.HeadingID", Field, 19},
+		{"Printer.HeadingLevel", Field, 19},
+		{"Printer.TextCodePrefix", Field, 19},
+		{"Printer.TextPrefix", Field, 19},
+		{"Printer.TextWidth", Field, 19},
+		{"Text", Type, 19},
+	},
+	"go/format": {
+		{"Node", Func, 1},
+		{"Source", Func, 1},
+	},
+	"go/importer": {
+		{"Default", Func, 5},
+		{"For", Func, 5},
+		{"ForCompiler", Func, 12},
+		{"Lookup", Type, 5},
+	},
+	"go/parser": {
+		{"AllErrors", Const, 1},
+		{"DeclarationErrors", Const, 0},
+		{"ImportsOnly", Const, 0},
+		{"Mode", Type, 0},
+		{"PackageClauseOnly", Const, 0},
+		{"ParseComments", Const, 0},
+		{"ParseDir", Func, 0},
+		{"ParseExpr", Func, 0},
+		{"ParseExprFrom", Func, 5},
+		{"ParseFile", Func, 0},
+		{"SkipObjectResolution", Const, 17},
+		{"SpuriousErrors", Const, 0},
+		{"Trace", Const, 0},
+	},
+	"go/printer": {
+		{"(*Config).Fprint", Method, 0},
+		{"CommentedNode", Type, 0},
+		{"CommentedNode.Comments", Field, 0},
+		{"CommentedNode.Node", Field, 0},
+		{"Config", Type, 0},
+		{"Config.Indent", Field, 1},
+		{"Config.Mode", Field, 0},
+		{"Config.Tabwidth", Field, 0},
+		{"Fprint", Func, 0},
+		{"Mode", Type, 0},
+		{"RawFormat", Const, 0},
+		{"SourcePos", Const, 0},
+		{"TabIndent", Const, 0},
+		{"UseSpaces", Const, 0},
+	},
+	"go/scanner": {
+		{"(*ErrorList).Add", Method, 0},
+		{"(*ErrorList).RemoveMultiples", Method, 0},
+		{"(*ErrorList).Reset", Method, 0},
+		{"(*Scanner).Init", Method, 0},
+		{"(*Scanner).Scan", Method, 0},
+		{"(Error).Error", Method, 0},
+		{"(ErrorList).Err", Method, 0},
+		{"(ErrorList).Error", Method, 0},
+		{"(ErrorList).Len", Method, 0},
+		{"(ErrorList).Less", Method, 0},
+		{"(ErrorList).Sort", Method, 0},
+		{"(ErrorList).Swap", Method, 0},
+		{"Error", Type, 0},
+		{"Error.Msg", Field, 0},
+		{"Error.Pos", Field, 0},
+		{"ErrorHandler", Type, 0},
+		{"ErrorList", Type, 0},
+		{"Mode", Type, 0},
+		{"PrintError", Func, 0},
+		{"ScanComments", Const, 0},
+		{"Scanner", Type, 0},
+		{"Scanner.ErrorCount", Field, 0},
+	},
+	"go/token": {
+		{"(*File).AddLine", Method, 0},
+		{"(*File).AddLineColumnInfo", Method, 11},
+		{"(*File).AddLineInfo", Method, 0},
+		{"(*File).Base", Method, 0},
+		{"(*File).Line", Method, 0},
+		{"(*File).LineCount", Method, 0},
+		{"(*File).LineStart", Method, 12},
+		{"(*File).Lines", Method, 21},
+		{"(*File).MergeLine", Method, 2},
+		{"(*File).Name", Method, 0},
+		{"(*File).Offset", Method, 0},
+		{"(*File).Pos", Method, 0},
+		{"(*File).Position", Method, 0},
+		{"(*File).PositionFor", Method, 4},
+		{"(*File).SetLines", Method, 0},
+		{"(*File).SetLinesForContent", Method, 0},
+		{"(*File).Size", Method, 0},
+		{"(*FileSet).AddFile", Method, 0},
+		{"(*FileSet).Base", Method, 0},
+		{"(*FileSet).File", Method, 0},
+		{"(*FileSet).Iterate", Method, 0},
+		{"(*FileSet).Position", Method, 0},
+		{"(*FileSet).PositionFor", Method, 4},
+		{"(*FileSet).Read", Method, 0},
+		{"(*FileSet).RemoveFile", Method, 20},
+		{"(*FileSet).Write", Method, 0},
+		{"(*Position).IsValid", Method, 0},
+		{"(Pos).IsValid", Method, 0},
+		{"(Position).String", Method, 0},
+		{"(Token).IsKeyword", Method, 0},
+		{"(Token).IsLiteral", Method, 0},
+		{"(Token).IsOperator", Method, 0},
+		{"(Token).Precedence", Method, 0},
+		{"(Token).String", Method, 0},
+		{"ADD", Const, 0},
+		{"ADD_ASSIGN", Const, 0},
+		{"AND", Const, 0},
+		{"AND_ASSIGN", Const, 0},
+		{"AND_NOT", Const, 0},
+		{"AND_NOT_ASSIGN", Const, 0},
+		{"ARROW", Const, 0},
+		{"ASSIGN", Const, 0},
+		{"BREAK", Const, 0},
+		{"CASE", Const, 0},
+		{"CHAN", Const, 0},
+		{"CHAR", Const, 0},
+		{"COLON", Const, 0},
+		{"COMMA", Const, 0},
+		{"COMMENT", Const, 0},
+		{"CONST", Const, 0},
+		{"CONTINUE", Const, 0},
+		{"DEC", Const, 0},
+		{"DEFAULT", Const, 0},
+		{"DEFER", Const, 0},
+		{"DEFINE", Const, 0},
+		{"ELLIPSIS", Const, 0},
+		{"ELSE", Const, 0},
+		{"EOF", Const, 0},
+		{"EQL", Const, 0},
+		{"FALLTHROUGH", Const, 0},
+		{"FLOAT", Const, 0},
+		{"FOR", Const, 0},
+		{"FUNC", Const, 0},
+		{"File", Type, 0},
+		{"FileSet", Type, 0},
+		{"GEQ", Const, 0},
+		{"GO", Const, 0},
+		{"GOTO", Const, 0},
+		{"GTR", Const, 0},
+		{"HighestPrec", Const, 0},
+		{"IDENT", Const, 0},
+		{"IF", Const, 0},
+		{"ILLEGAL", Const, 0},
+		{"IMAG", Const, 0},
+		{"IMPORT", Const, 0},
+		{"INC", Const, 0},
+		{"INT", Const, 0},
+		{"INTERFACE", Const, 0},
+		{"IsExported", Func, 13},
+		{"IsIdentifier", Func, 13},
+		{"IsKeyword", Func, 13},
+		{"LAND", Const, 0},
+		{"LBRACE", Const, 0},
+		{"LBRACK", Const, 0},
+		{"LEQ", Const, 0},
+		{"LOR", Const, 0},
+		{"LPAREN", Const, 0},
+		{"LSS", Const, 0},
+		{"Lookup", Func, 0},
+		{"LowestPrec", Const, 0},
+		{"MAP", Const, 0},
+		{"MUL", Const, 0},
+		{"MUL_ASSIGN", Const, 0},
+		{"NEQ", Const, 0},
+		{"NOT", Const, 0},
+		{"NewFileSet", Func, 0},
+		{"NoPos", Const, 0},
+		{"OR", Const, 0},
+		{"OR_ASSIGN", Const, 0},
+		{"PACKAGE", Const, 0},
+		{"PERIOD", Const, 0},
+		{"Pos", Type, 0},
+		{"Position", Type, 0},
+		{"Position.Column", Field, 0},
+		{"Position.Filename", Field, 0},
+		{"Position.Line", Field, 0},
+		{"Position.Offset", Field, 0},
+		{"QUO", Const, 0},
+		{"QUO_ASSIGN", Const, 0},
+		{"RANGE", Const, 0},
+		{"RBRACE", Const, 0},
+		{"RBRACK", Const, 0},
+		{"REM", Const, 0},
+		{"REM_ASSIGN", Const, 0},
+		{"RETURN", Const, 0},
+		{"RPAREN", Const, 0},
+		{"SELECT", Const, 0},
+		{"SEMICOLON", Const, 0},
+		{"SHL", Const, 0},
+		{"SHL_ASSIGN", Const, 0},
+		{"SHR", Const, 0},
+		{"SHR_ASSIGN", Const, 0},
+		{"STRING", Const, 0},
+		{"STRUCT", Const, 0},
+		{"SUB", Const, 0},
+		{"SUB_ASSIGN", Const, 0},
+		{"SWITCH", Const, 0},
+		{"TILDE", Const, 18},
+		{"TYPE", Const, 0},
+		{"Token", Type, 0},
+		{"UnaryPrec", Const, 0},
+		{"VAR", Const, 0},
+		{"XOR", Const, 0},
+		{"XOR_ASSIGN", Const, 0},
+	},
+	"go/types": {
+		{"(*Alias).Obj", Method, 22},
+		{"(*Alias).String", Method, 22},
+		{"(*Alias).Underlying", Method, 22},
+		{"(*ArgumentError).Error", Method, 18},
+		{"(*ArgumentError).Unwrap", Method, 18},
+		{"(*Array).Elem", Method, 5},
+		{"(*Array).Len", Method, 5},
+		{"(*Array).String", Method, 5},
+		{"(*Array).Underlying", Method, 5},
+		{"(*Basic).Info", Method, 5},
+		{"(*Basic).Kind", Method, 5},
+		{"(*Basic).Name", Method, 5},
+		{"(*Basic).String", Method, 5},
+		{"(*Basic).Underlying", Method, 5},
+		{"(*Builtin).Exported", Method, 5},
+		{"(*Builtin).Id", Method, 5},
+		{"(*Builtin).Name", Method, 5},
+		{"(*Builtin).Parent", Method, 5},
+		{"(*Builtin).Pkg", Method, 5},
+		{"(*Builtin).Pos", Method, 5},
+		{"(*Builtin).String", Method, 5},
+		{"(*Builtin).Type", Method, 5},
+		{"(*Chan).Dir", Method, 5},
+		{"(*Chan).Elem", Method, 5},
+		{"(*Chan).String", Method, 5},
+		{"(*Chan).Underlying", Method, 5},
+		{"(*Checker).Files", Method, 5},
+		{"(*Config).Check", Method, 5},
+		{"(*Const).Exported", Method, 5},
+		{"(*Const).Id", Method, 5},
+		{"(*Const).Name", Method, 5},
+		{"(*Const).Parent", Method, 5},
+		{"(*Const).Pkg", Method, 5},
+		{"(*Const).Pos", Method, 5},
+		{"(*Const).String", Method, 5},
+		{"(*Const).Type", Method, 5},
+		{"(*Const).Val", Method, 5},
+		{"(*Func).Exported", Method, 5},
+		{"(*Func).FullName", Method, 5},
+		{"(*Func).Id", Method, 5},
+		{"(*Func).Name", Method, 5},
+		{"(*Func).Origin", Method, 19},
+		{"(*Func).Parent", Method, 5},
+		{"(*Func).Pkg", Method, 5},
+		{"(*Func).Pos", Method, 5},
+		{"(*Func).Scope", Method, 5},
+		{"(*Func).String", Method, 5},
+		{"(*Func).Type", Method, 5},
+		{"(*Info).ObjectOf", Method, 5},
+		{"(*Info).PkgNameOf", Method, 22},
+		{"(*Info).TypeOf", Method, 5},
+		{"(*Initializer).String", Method, 5},
+		{"(*Interface).Complete", Method, 5},
+		{"(*Interface).Embedded", Method, 5},
+		{"(*Interface).EmbeddedType", Method, 11},
+		{"(*Interface).Empty", Method, 5},
+		{"(*Interface).ExplicitMethod", Method, 5},
+		{"(*Interface).IsComparable", Method, 18},
+		{"(*Interface).IsImplicit", Method, 18},
+		{"(*Interface).IsMethodSet", Method, 18},
+		{"(*Interface).MarkImplicit", Method, 18},
+		{"(*Interface).Method", Method, 5},
+		{"(*Interface).NumEmbeddeds", Method, 5},
+		{"(*Interface).NumExplicitMethods", Method, 5},
+		{"(*Interface).NumMethods", Method, 5},
+		{"(*Interface).String", Method, 5},
+		{"(*Interface).Underlying", Method, 5},
+		{"(*Label).Exported", Method, 5},
+		{"(*Label).Id", Method, 5},
+		{"(*Label).Name", Method, 5},
+		{"(*Label).Parent", Method, 5},
+		{"(*Label).Pkg", Method, 5},
+		{"(*Label).Pos", Method, 5},
+		{"(*Label).String", Method, 5},
+		{"(*Label).Type", Method, 5},
+		{"(*Map).Elem", Method, 5},
+		{"(*Map).Key", Method, 5},
+		{"(*Map).String", Method, 5},
+		{"(*Map).Underlying", Method, 5},
+		{"(*MethodSet).At", Method, 5},
+		{"(*MethodSet).Len", Method, 5},
+		{"(*MethodSet).Lookup", Method, 5},
+		{"(*MethodSet).String", Method, 5},
+		{"(*Named).AddMethod", Method, 5},
+		{"(*Named).Method", Method, 5},
+		{"(*Named).NumMethods", Method, 5},
+		{"(*Named).Obj", Method, 5},
+		{"(*Named).Origin", Method, 18},
+		{"(*Named).SetTypeParams", Method, 18},
+		{"(*Named).SetUnderlying", Method, 5},
+		{"(*Named).String", Method, 5},
+		{"(*Named).TypeArgs", Method, 18},
+		{"(*Named).TypeParams", Method, 18},
+		{"(*Named).Underlying", Method, 5},
+		{"(*Nil).Exported", Method, 5},
+		{"(*Nil).Id", Method, 5},
+		{"(*Nil).Name", Method, 5},
+		{"(*Nil).Parent", Method, 5},
+		{"(*Nil).Pkg", Method, 5},
+		{"(*Nil).Pos", Method, 5},
+		{"(*Nil).String", Method, 5},
+		{"(*Nil).Type", Method, 5},
+		{"(*Package).Complete", Method, 5},
+		{"(*Package).GoVersion", Method, 21},
+		{"(*Package).Imports", Method, 5},
+		{"(*Package).MarkComplete", Method, 5},
+		{"(*Package).Name", Method, 5},
+		{"(*Package).Path", Method, 5},
+		{"(*Package).Scope", Method, 5},
+		{"(*Package).SetImports", Method, 5},
+		{"(*Package).SetName", Method, 6},
+		{"(*Package).String", Method, 5},
+		{"(*PkgName).Exported", Method, 5},
+		{"(*PkgName).Id", Method, 5},
+		{"(*PkgName).Imported", Method, 5},
+		{"(*PkgName).Name", Method, 5},
+		{"(*PkgName).Parent", Method, 5},
+		{"(*PkgName).Pkg", Method, 5},
+		{"(*PkgName).Pos", Method, 5},
+		{"(*PkgName).String", Method, 5},
+		{"(*PkgName).Type", Method, 5},
+		{"(*Pointer).Elem", Method, 5},
+		{"(*Pointer).String", Method, 5},
+		{"(*Pointer).Underlying", Method, 5},
+		{"(*Scope).Child", Method, 5},
+		{"(*Scope).Contains", Method, 5},
+		{"(*Scope).End", Method, 5},
+		{"(*Scope).Innermost", Method, 5},
+		{"(*Scope).Insert", Method, 5},
+		{"(*Scope).Len", Method, 5},
+		{"(*Scope).Lookup", Method, 5},
+		{"(*Scope).LookupParent", Method, 5},
+		{"(*Scope).Names", Method, 5},
+		{"(*Scope).NumChildren", Method, 5},
+		{"(*Scope).Parent", Method, 5},
+		{"(*Scope).Pos", Method, 5},
+		{"(*Scope).String", Method, 5},
+		{"(*Scope).WriteTo", Method, 5},
+		{"(*Selection).Index", Method, 5},
+		{"(*Selection).Indirect", Method, 5},
+		{"(*Selection).Kind", Method, 5},
+		{"(*Selection).Obj", Method, 5},
+		{"(*Selection).Recv", Method, 5},
+		{"(*Selection).String", Method, 5},
+		{"(*Selection).Type", Method, 5},
+		{"(*Signature).Params", Method, 5},
+		{"(*Signature).Recv", Method, 5},
+		{"(*Signature).RecvTypeParams", Method, 18},
+		{"(*Signature).Results", Method, 5},
+		{"(*Signature).String", Method, 5},
+		{"(*Signature).TypeParams", Method, 18},
+		{"(*Signature).Underlying", Method, 5},
+		{"(*Signature).Variadic", Method, 5},
+		{"(*Slice).Elem", Method, 5},
+		{"(*Slice).String", Method, 5},
+		{"(*Slice).Underlying", Method, 5},
+		{"(*StdSizes).Alignof", Method, 5},
+		{"(*StdSizes).Offsetsof", Method, 5},
+		{"(*StdSizes).Sizeof", Method, 5},
+		{"(*Struct).Field", Method, 5},
+		{"(*Struct).NumFields", Method, 5},
+		{"(*Struct).String", Method, 5},
+		{"(*Struct).Tag", Method, 5},
+		{"(*Struct).Underlying", Method, 5},
+		{"(*Term).String", Method, 18},
+		{"(*Term).Tilde", Method, 18},
+		{"(*Term).Type", Method, 18},
+		{"(*Tuple).At", Method, 5},
+		{"(*Tuple).Len", Method, 5},
+		{"(*Tuple).String", Method, 5},
+		{"(*Tuple).Underlying", Method, 5},
+		{"(*TypeList).At", Method, 18},
+		{"(*TypeList).Len", Method, 18},
+		{"(*TypeName).Exported", Method, 5},
+		{"(*TypeName).Id", Method, 5},
+		{"(*TypeName).IsAlias", Method, 9},
+		{"(*TypeName).Name", Method, 5},
+		{"(*TypeName).Parent", Method, 5},
+		{"(*TypeName).Pkg", Method, 5},
+		{"(*TypeName).Pos", Method, 5},
+		{"(*TypeName).String", Method, 5},
+		{"(*TypeName).Type", Method, 5},
+		{"(*TypeParam).Constraint", Method, 18},
+		{"(*TypeParam).Index", Method, 18},
+		{"(*TypeParam).Obj", Method, 18},
+		{"(*TypeParam).SetConstraint", Method, 18},
+		{"(*TypeParam).String", Method, 18},
+		{"(*TypeParam).Underlying", Method, 18},
+		{"(*TypeParamList).At", Method, 18},
+		{"(*TypeParamList).Len", Method, 18},
+		{"(*Union).Len", Method, 18},
+		{"(*Union).String", Method, 18},
+		{"(*Union).Term", Method, 18},
+		{"(*Union).Underlying", Method, 18},
+		{"(*Var).Anonymous", Method, 5},
+		{"(*Var).Embedded", Method, 11},
+		{"(*Var).Exported", Method, 5},
+		{"(*Var).Id", Method, 5},
+		{"(*Var).IsField", Method, 5},
+		{"(*Var).Name", Method, 5},
+		{"(*Var).Origin", Method, 19},
+		{"(*Var).Parent", Method, 5},
+		{"(*Var).Pkg", Method, 5},
+		{"(*Var).Pos", Method, 5},
+		{"(*Var).String", Method, 5},
+		{"(*Var).Type", Method, 5},
+		{"(Checker).ObjectOf", Method, 5},
+		{"(Checker).PkgNameOf", Method, 22},
+		{"(Checker).TypeOf", Method, 5},
+		{"(Error).Error", Method, 5},
+		{"(TypeAndValue).Addressable", Method, 5},
+		{"(TypeAndValue).Assignable", Method, 5},
+		{"(TypeAndValue).HasOk", Method, 5},
+		{"(TypeAndValue).IsBuiltin", Method, 5},
+		{"(TypeAndValue).IsNil", Method, 5},
+		{"(TypeAndValue).IsType", Method, 5},
+		{"(TypeAndValue).IsValue", Method, 5},
+		{"(TypeAndValue).IsVoid", Method, 5},
+		{"Alias", Type, 22},
+		{"ArgumentError", Type, 18},
+		{"ArgumentError.Err", Field, 18},
+		{"ArgumentError.Index", Field, 18},
+		{"Array", Type, 5},
+		{"AssertableTo", Func, 5},
+		{"AssignableTo", Func, 5},
+		{"Basic", Type, 5},
+		{"BasicInfo", Type, 5},
+		{"BasicKind", Type, 5},
+		{"Bool", Const, 5},
+		{"Builtin", Type, 5},
+		{"Byte", Const, 5},
+		{"Chan", Type, 5},
+		{"ChanDir", Type, 5},
+		{"CheckExpr", Func, 13},
+		{"Checker", Type, 5},
+		{"Checker.Info", Field, 5},
+		{"Comparable", Func, 5},
+		{"Complex128", Const, 5},
+		{"Complex64", Const, 5},
+		{"Config", Type, 5},
+		{"Config.Context", Field, 18},
+		{"Config.DisableUnusedImportCheck", Field, 5},
+		{"Config.Error", Field, 5},
+		{"Config.FakeImportC", Field, 5},
+		{"Config.GoVersion", Field, 18},
+		{"Config.IgnoreFuncBodies", Field, 5},
+		{"Config.Importer", Field, 5},
+		{"Config.Sizes", Field, 5},
+		{"Const", Type, 5},
+		{"Context", Type, 18},
+		{"ConvertibleTo", Func, 5},
+		{"DefPredeclaredTestFuncs", Func, 5},
+		{"Default", Func, 8},
+		{"Error", Type, 5},
+		{"Error.Fset", Field, 5},
+		{"Error.Msg", Field, 5},
+		{"Error.Pos", Field, 5},
+		{"Error.Soft", Field, 5},
+		{"Eval", Func, 5},
+		{"ExprString", Func, 5},
+		{"FieldVal", Const, 5},
+		{"Float32", Const, 5},
+		{"Float64", Const, 5},
+		{"Func", Type, 5},
+		{"Id", Func, 5},
+		{"Identical", Func, 5},
+		{"IdenticalIgnoreTags", Func, 8},
+		{"Implements", Func, 5},
+		{"ImportMode", Type, 6},
+		{"Importer", Type, 5},
+		{"ImporterFrom", Type, 6},
+		{"Info", Type, 5},
+		{"Info.Defs", Field, 5},
+		{"Info.FileVersions", Field, 22},
+		{"Info.Implicits", Field, 5},
+		{"Info.InitOrder", Field, 5},
+		{"Info.Instances", Field, 18},
+		{"Info.Scopes", Field, 5},
+		{"Info.Selections", Field, 5},
+		{"Info.Types", Field, 5},
+		{"Info.Uses", Field, 5},
+		{"Initializer", Type, 5},
+		{"Initializer.Lhs", Field, 5},
+		{"Initializer.Rhs", Field, 5},
+		{"Instance", Type, 18},
+		{"Instance.Type", Field, 18},
+		{"Instance.TypeArgs", Field, 18},
+		{"Instantiate", Func, 18},
+		{"Int", Const, 5},
+		{"Int16", Const, 5},
+		{"Int32", Const, 5},
+		{"Int64", Const, 5},
+		{"Int8", Const, 5},
+		{"Interface", Type, 5},
+		{"Invalid", Const, 5},
+		{"IsBoolean", Const, 5},
+		{"IsComplex", Const, 5},
+		{"IsConstType", Const, 5},
+		{"IsFloat", Const, 5},
+		{"IsInteger", Const, 5},
+		{"IsInterface", Func, 5},
+		{"IsNumeric", Const, 5},
+		{"IsOrdered", Const, 5},
+		{"IsString", Const, 5},
+		{"IsUnsigned", Const, 5},
+		{"IsUntyped", Const, 5},
+		{"Label", Type, 5},
+		{"LookupFieldOrMethod", Func, 5},
+		{"Map", Type, 5},
+		{"MethodExpr", Const, 5},
+		{"MethodSet", Type, 5},
+		{"MethodVal", Const, 5},
+		{"MissingMethod", Func, 5},
+		{"Named", Type, 5},
+		{"NewAlias", Func, 22},
+		{"NewArray", Func, 5},
+		{"NewChan", Func, 5},
+		{"NewChecker", Func, 5},
+		{"NewConst", Func, 5},
+		{"NewContext", Func, 18},
+		{"NewField", Func, 5},
+		{"NewFunc", Func, 5},
+		{"NewInterface", Func, 5},
+		{"NewInterfaceType", Func, 11},
+		{"NewLabel", Func, 5},
+		{"NewMap", Func, 5},
+		{"NewMethodSet", Func, 5},
+		{"NewNamed", Func, 5},
+		{"NewPackage", Func, 5},
+		{"NewParam", Func, 5},
+		{"NewPkgName", Func, 5},
+		{"NewPointer", Func, 5},
+		{"NewScope", Func, 5},
+		{"NewSignature", Func, 5},
+		{"NewSignatureType", Func, 18},
+		{"NewSlice", Func, 5},
+		{"NewStruct", Func, 5},
+		{"NewTerm", Func, 18},
+		{"NewTuple", Func, 5},
+		{"NewTypeName", Func, 5},
+		{"NewTypeParam", Func, 18},
+		{"NewUnion", Func, 18},
+		{"NewVar", Func, 5},
+		{"Nil", Type, 5},
+		{"Object", Type, 5},
+		{"ObjectString", Func, 5},
+		{"Package", Type, 5},
+		{"PkgName", Type, 5},
+		{"Pointer", Type, 5},
+		{"Qualifier", Type, 5},
+		{"RecvOnly", Const, 5},
+		{"RelativeTo", Func, 5},
+		{"Rune", Const, 5},
+		{"Satisfies", Func, 20},
+		{"Scope", Type, 5},
+		{"Selection", Type, 5},
+		{"SelectionKind", Type, 5},
+		{"SelectionString", Func, 5},
+		{"SendOnly", Const, 5},
+		{"SendRecv", Const, 5},
+		{"Signature", Type, 5},
+		{"Sizes", Type, 5},
+		{"SizesFor", Func, 9},
+		{"Slice", Type, 5},
+		{"StdSizes", Type, 5},
+		{"StdSizes.MaxAlign", Field, 5},
+		{"StdSizes.WordSize", Field, 5},
+		{"String", Const, 5},
+		{"Struct", Type, 5},
+		{"Term", Type, 18},
+		{"Tuple", Type, 5},
+		{"Typ", Var, 5},
+		{"Type", Type, 5},
+		{"TypeAndValue", Type, 5},
+		{"TypeAndValue.Type", Field, 5},
+		{"TypeAndValue.Value", Field, 5},
+		{"TypeList", Type, 18},
+		{"TypeName", Type, 5},
+		{"TypeParam", Type, 18},
+		{"TypeParamList", Type, 18},
+		{"TypeString", Func, 5},
+		{"Uint", Const, 5},
+		{"Uint16", Const, 5},
+		{"Uint32", Const, 5},
+		{"Uint64", Const, 5},
+		{"Uint8", Const, 5},
+		{"Uintptr", Const, 5},
+		{"Unalias", Func, 22},
+		{"Union", Type, 18},
+		{"Universe", Var, 5},
+		{"Unsafe", Var, 5},
+		{"UnsafePointer", Const, 5},
+		{"UntypedBool", Const, 5},
+		{"UntypedComplex", Const, 5},
+		{"UntypedFloat", Const, 5},
+		{"UntypedInt", Const, 5},
+		{"UntypedNil", Const, 5},
+		{"UntypedRune", Const, 5},
+		{"UntypedString", Const, 5},
+		{"Var", Type, 5},
+		{"WriteExpr", Func, 5},
+		{"WriteSignature", Func, 5},
+		{"WriteType", Func, 5},
+	},
+	"go/version": {
+		{"Compare", Func, 22},
+		{"IsValid", Func, 22},
+		{"Lang", Func, 22},
+	},
+	"hash": {
+		{"Hash", Type, 0},
+		{"Hash32", Type, 0},
+		{"Hash64", Type, 0},
+	},
+	"hash/adler32": {
+		{"Checksum", Func, 0},
+		{"New", Func, 0},
+		{"Size", Const, 0},
+	},
+	"hash/crc32": {
+		{"Castagnoli", Const, 0},
+		{"Checksum", Func, 0},
+		{"ChecksumIEEE", Func, 0},
+		{"IEEE", Const, 0},
+		{"IEEETable", Var, 0},
+		{"Koopman", Const, 0},
+		{"MakeTable", Func, 0},
+		{"New", Func, 0},
+		{"NewIEEE", Func, 0},
+		{"Size", Const, 0},
+		{"Table", Type, 0},
+		{"Update", Func, 0},
+	},
+	"hash/crc64": {
+		{"Checksum", Func, 0},
+		{"ECMA", Const, 0},
+		{"ISO", Const, 0},
+		{"MakeTable", Func, 0},
+		{"New", Func, 0},
+		{"Size", Const, 0},
+		{"Table", Type, 0},
+		{"Update", Func, 0},
+	},
+	"hash/fnv": {
+		{"New128", Func, 9},
+		{"New128a", Func, 9},
+		{"New32", Func, 0},
+		{"New32a", Func, 0},
+		{"New64", Func, 0},
+		{"New64a", Func, 0},
+	},
+	"hash/maphash": {
+		{"(*Hash).BlockSize", Method, 14},
+		{"(*Hash).Reset", Method, 14},
+		{"(*Hash).Seed", Method, 14},
+		{"(*Hash).SetSeed", Method, 14},
+		{"(*Hash).Size", Method, 14},
+		{"(*Hash).Sum", Method, 14},
+		{"(*Hash).Sum64", Method, 14},
+		{"(*Hash).Write", Method, 14},
+		{"(*Hash).WriteByte", Method, 14},
+		{"(*Hash).WriteString", Method, 14},
+		{"Bytes", Func, 19},
+		{"Hash", Type, 14},
+		{"MakeSeed", Func, 14},
+		{"Seed", Type, 14},
+		{"String", Func, 19},
+	},
+	"html": {
+		{"EscapeString", Func, 0},
+		{"UnescapeString", Func, 0},
+	},
+	"html/template": {
+		{"(*Error).Error", Method, 0},
+		{"(*Template).AddParseTree", Method, 0},
+		{"(*Template).Clone", Method, 0},
+		{"(*Template).DefinedTemplates", Method, 6},
+		{"(*Template).Delims", Method, 0},
+		{"(*Template).Execute", Method, 0},
+		{"(*Template).ExecuteTemplate", Method, 0},
+		{"(*Template).Funcs", Method, 0},
+		{"(*Template).Lookup", Method, 0},
+		{"(*Template).Name", Method, 0},
+		{"(*Template).New", Method, 0},
+		{"(*Template).Option", Method, 5},
+		{"(*Template).Parse", Method, 0},
+		{"(*Template).ParseFS", Method, 16},
+		{"(*Template).ParseFiles", Method, 0},
+		{"(*Template).ParseGlob", Method, 0},
+		{"(*Template).Templates", Method, 0},
+		{"CSS", Type, 0},
+		{"ErrAmbigContext", Const, 0},
+		{"ErrBadHTML", Const, 0},
+		{"ErrBranchEnd", Const, 0},
+		{"ErrEndContext", Const, 0},
+		{"ErrJSTemplate", Const, 21},
+		{"ErrNoSuchTemplate", Const, 0},
+		{"ErrOutputContext", Const, 0},
+		{"ErrPartialCharset", Const, 0},
+		{"ErrPartialEscape", Const, 0},
+		{"ErrPredefinedEscaper", Const, 9},
+		{"ErrRangeLoopReentry", Const, 0},
+		{"ErrSlashAmbig", Const, 0},
+		{"Error", Type, 0},
+		{"Error.Description", Field, 0},
+		{"Error.ErrorCode", Field, 0},
+		{"Error.Line", Field, 0},
+		{"Error.Name", Field, 0},
+		{"Error.Node", Field, 4},
+		{"ErrorCode", Type, 0},
+		{"FuncMap", Type, 0},
+		{"HTML", Type, 0},
+		{"HTMLAttr", Type, 0},
+		{"HTMLEscape", Func, 0},
+		{"HTMLEscapeString", Func, 0},
+		{"HTMLEscaper", Func, 0},
+		{"IsTrue", Func, 6},
+		{"JS", Type, 0},
+		{"JSEscape", Func, 0},
+		{"JSEscapeString", Func, 0},
+		{"JSEscaper", Func, 0},
+		{"JSStr", Type, 0},
+		{"Must", Func, 0},
+		{"New", Func, 0},
+		{"OK", Const, 0},
+		{"ParseFS", Func, 16},
+		{"ParseFiles", Func, 0},
+		{"ParseGlob", Func, 0},
+		{"Srcset", Type, 10},
+		{"Template", Type, 0},
+		{"Template.Tree", Field, 2},
+		{"URL", Type, 0},
+		{"URLQueryEscaper", Func, 0},
+	},
+	"image": {
+		{"(*Alpha).AlphaAt", Method, 4},
+		{"(*Alpha).At", Method, 0},
+		{"(*Alpha).Bounds", Method, 0},
+		{"(*Alpha).ColorModel", Method, 0},
+		{"(*Alpha).Opaque", Method, 0},
+		{"(*Alpha).PixOffset", Method, 0},
+		{"(*Alpha).RGBA64At", Method, 17},
+		{"(*Alpha).Set", Method, 0},
+		{"(*Alpha).SetAlpha", Method, 0},
+		{"(*Alpha).SetRGBA64", Method, 17},
+		{"(*Alpha).SubImage", Method, 0},
+		{"(*Alpha16).Alpha16At", Method, 4},
+		{"(*Alpha16).At", Method, 0},
+		{"(*Alpha16).Bounds", Method, 0},
+		{"(*Alpha16).ColorModel", Method, 0},
+		{"(*Alpha16).Opaque", Method, 0},
+		{"(*Alpha16).PixOffset", Method, 0},
+		{"(*Alpha16).RGBA64At", Method, 17},
+		{"(*Alpha16).Set", Method, 0},
+		{"(*Alpha16).SetAlpha16", Method, 0},
+		{"(*Alpha16).SetRGBA64", Method, 17},
+		{"(*Alpha16).SubImage", Method, 0},
+		{"(*CMYK).At", Method, 5},
+		{"(*CMYK).Bounds", Method, 5},
+		{"(*CMYK).CMYKAt", Method, 5},
+		{"(*CMYK).ColorModel", Method, 5},
+		{"(*CMYK).Opaque", Method, 5},
+		{"(*CMYK).PixOffset", Method, 5},
+		{"(*CMYK).RGBA64At", Method, 17},
+		{"(*CMYK).Set", Method, 5},
+		{"(*CMYK).SetCMYK", Method, 5},
+		{"(*CMYK).SetRGBA64", Method, 17},
+		{"(*CMYK).SubImage", Method, 5},
+		{"(*Gray).At", Method, 0},
+		{"(*Gray).Bounds", Method, 0},
+		{"(*Gray).ColorModel", Method, 0},
+		{"(*Gray).GrayAt", Method, 4},
+		{"(*Gray).Opaque", Method, 0},
+		{"(*Gray).PixOffset", Method, 0},
+		{"(*Gray).RGBA64At", Method, 17},
+		{"(*Gray).Set", Method, 0},
+		{"(*Gray).SetGray", Method, 0},
+		{"(*Gray).SetRGBA64", Method, 17},
+		{"(*Gray).SubImage", Method, 0},
+		{"(*Gray16).At", Method, 0},
+		{"(*Gray16).Bounds", Method, 0},
+		{"(*Gray16).ColorModel", Method, 0},
+		{"(*Gray16).Gray16At", Method, 4},
+		{"(*Gray16).Opaque", Method, 0},
+		{"(*Gray16).PixOffset", Method, 0},
+		{"(*Gray16).RGBA64At", Method, 17},
+		{"(*Gray16).Set", Method, 0},
+		{"(*Gray16).SetGray16", Method, 0},
+		{"(*Gray16).SetRGBA64", Method, 17},
+		{"(*Gray16).SubImage", Method, 0},
+		{"(*NRGBA).At", Method, 0},
+		{"(*NRGBA).Bounds", Method, 0},
+		{"(*NRGBA).ColorModel", Method, 0},
+		{"(*NRGBA).NRGBAAt", Method, 4},
+		{"(*NRGBA).Opaque", Method, 0},
+		{"(*NRGBA).PixOffset", Method, 0},
+		{"(*NRGBA).RGBA64At", Method, 17},
+		{"(*NRGBA).Set", Method, 0},
+		{"(*NRGBA).SetNRGBA", Method, 0},
+		{"(*NRGBA).SetRGBA64", Method, 17},
+		{"(*NRGBA).SubImage", Method, 0},
+		{"(*NRGBA64).At", Method, 0},
+		{"(*NRGBA64).Bounds", Method, 0},
+		{"(*NRGBA64).ColorModel", Method, 0},
+		{"(*NRGBA64).NRGBA64At", Method, 4},
+		{"(*NRGBA64).Opaque", Method, 0},
+		{"(*NRGBA64).PixOffset", Method, 0},
+		{"(*NRGBA64).RGBA64At", Method, 17},
+		{"(*NRGBA64).Set", Method, 0},
+		{"(*NRGBA64).SetNRGBA64", Method, 0},
+		{"(*NRGBA64).SetRGBA64", Method, 17},
+		{"(*NRGBA64).SubImage", Method, 0},
+		{"(*NYCbCrA).AOffset", Method, 6},
+		{"(*NYCbCrA).At", Method, 6},
+		{"(*NYCbCrA).Bounds", Method, 6},
+		{"(*NYCbCrA).COffset", Method, 6},
+		{"(*NYCbCrA).ColorModel", Method, 6},
+		{"(*NYCbCrA).NYCbCrAAt", Method, 6},
+		{"(*NYCbCrA).Opaque", Method, 6},
+		{"(*NYCbCrA).RGBA64At", Method, 17},
+		{"(*NYCbCrA).SubImage", Method, 6},
+		{"(*NYCbCrA).YCbCrAt", Method, 6},
+		{"(*NYCbCrA).YOffset", Method, 6},
+		{"(*Paletted).At", Method, 0},
+		{"(*Paletted).Bounds", Method, 0},
+		{"(*Paletted).ColorIndexAt", Method, 0},
+		{"(*Paletted).ColorModel", Method, 0},
+		{"(*Paletted).Opaque", Method, 0},
+		{"(*Paletted).PixOffset", Method, 0},
+		{"(*Paletted).RGBA64At", Method, 17},
+		{"(*Paletted).Set", Method, 0},
+		{"(*Paletted).SetColorIndex", Method, 0},
+		{"(*Paletted).SetRGBA64", Method, 17},
+		{"(*Paletted).SubImage", Method, 0},
+		{"(*RGBA).At", Method, 0},
+		{"(*RGBA).Bounds", Method, 0},
+		{"(*RGBA).ColorModel", Method, 0},
+		{"(*RGBA).Opaque", Method, 0},
+		{"(*RGBA).PixOffset", Method, 0},
+		{"(*RGBA).RGBA64At", Method, 17},
+		{"(*RGBA).RGBAAt", Method, 4},
+		{"(*RGBA).Set", Method, 0},
+		{"(*RGBA).SetRGBA", Method, 0},
+		{"(*RGBA).SetRGBA64", Method, 17},
+		{"(*RGBA).SubImage", Method, 0},
+		{"(*RGBA64).At", Method, 0},
+		{"(*RGBA64).Bounds", Method, 0},
+		{"(*RGBA64).ColorModel", Method, 0},
+		{"(*RGBA64).Opaque", Method, 0},
+		{"(*RGBA64).PixOffset", Method, 0},
+		{"(*RGBA64).RGBA64At", Method, 4},
+		{"(*RGBA64).Set", Method, 0},
+		{"(*RGBA64).SetRGBA64", Method, 0},
+		{"(*RGBA64).SubImage", Method, 0},
+		{"(*Uniform).At", Method, 0},
+		{"(*Uniform).Bounds", Method, 0},
+		{"(*Uniform).ColorModel", Method, 0},
+		{"(*Uniform).Convert", Method, 0},
+		{"(*Uniform).Opaque", Method, 0},
+		{"(*Uniform).RGBA", Method, 0},
+		{"(*Uniform).RGBA64At", Method, 17},
+		{"(*YCbCr).At", Method, 0},
+		{"(*YCbCr).Bounds", Method, 0},
+		{"(*YCbCr).COffset", Method, 0},
+		{"(*YCbCr).ColorModel", Method, 0},
+		{"(*YCbCr).Opaque", Method, 0},
+		{"(*YCbCr).RGBA64At", Method, 17},
+		{"(*YCbCr).SubImage", Method, 0},
+		{"(*YCbCr).YCbCrAt", Method, 4},
+		{"(*YCbCr).YOffset", Method, 0},
+		{"(Point).Add", Method, 0},
+		{"(Point).Div", Method, 0},
+		{"(Point).Eq", Method, 0},
+		{"(Point).In", Method, 0},
+		{"(Point).Mod", Method, 0},
+		{"(Point).Mul", Method, 0},
+		{"(Point).String", Method, 0},
+		{"(Point).Sub", Method, 0},
+		{"(Rectangle).Add", Method, 0},
+		{"(Rectangle).At", Method, 5},
+		{"(Rectangle).Bounds", Method, 5},
+		{"(Rectangle).Canon", Method, 0},
+		{"(Rectangle).ColorModel", Method, 5},
+		{"(Rectangle).Dx", Method, 0},
+		{"(Rectangle).Dy", Method, 0},
+		{"(Rectangle).Empty", Method, 0},
+		{"(Rectangle).Eq", Method, 0},
+		{"(Rectangle).In", Method, 0},
+		{"(Rectangle).Inset", Method, 0},
+		{"(Rectangle).Intersect", Method, 0},
+		{"(Rectangle).Overlaps", Method, 0},
+		{"(Rectangle).RGBA64At", Method, 17},
+		{"(Rectangle).Size", Method, 0},
+		{"(Rectangle).String", Method, 0},
+		{"(Rectangle).Sub", Method, 0},
+		{"(Rectangle).Union", Method, 0},
+		{"(YCbCrSubsampleRatio).String", Method, 0},
+		{"Alpha", Type, 0},
+		{"Alpha.Pix", Field, 0},
+		{"Alpha.Rect", Field, 0},
+		{"Alpha.Stride", Field, 0},
+		{"Alpha16", Type, 0},
+		{"Alpha16.Pix", Field, 0},
+		{"Alpha16.Rect", Field, 0},
+		{"Alpha16.Stride", Field, 0},
+		{"Black", Var, 0},
+		{"CMYK", Type, 5},
+		{"CMYK.Pix", Field, 5},
+		{"CMYK.Rect", Field, 5},
+		{"CMYK.Stride", Field, 5},
+		{"Config", Type, 0},
+		{"Config.ColorModel", Field, 0},
+		{"Config.Height", Field, 0},
+		{"Config.Width", Field, 0},
+		{"Decode", Func, 0},
+		{"DecodeConfig", Func, 0},
+		{"ErrFormat", Var, 0},
+		{"Gray", Type, 0},
+		{"Gray.Pix", Field, 0},
+		{"Gray.Rect", Field, 0},
+		{"Gray.Stride", Field, 0},
+		{"Gray16", Type, 0},
+		{"Gray16.Pix", Field, 0},
+		{"Gray16.Rect", Field, 0},
+		{"Gray16.Stride", Field, 0},
+		{"Image", Type, 0},
+		{"NRGBA", Type, 0},
+		{"NRGBA.Pix", Field, 0},
+		{"NRGBA.Rect", Field, 0},
+		{"NRGBA.Stride", Field, 0},
+		{"NRGBA64", Type, 0},
+		{"NRGBA64.Pix", Field, 0},
+		{"NRGBA64.Rect", Field, 0},
+		{"NRGBA64.Stride", Field, 0},
+		{"NYCbCrA", Type, 6},
+		{"NYCbCrA.A", Field, 6},
+		{"NYCbCrA.AStride", Field, 6},
+		{"NYCbCrA.YCbCr", Field, 6},
+		{"NewAlpha", Func, 0},
+		{"NewAlpha16", Func, 0},
+		{"NewCMYK", Func, 5},
+		{"NewGray", Func, 0},
+		{"NewGray16", Func, 0},
+		{"NewNRGBA", Func, 0},
+		{"NewNRGBA64", Func, 0},
+		{"NewNYCbCrA", Func, 6},
+		{"NewPaletted", Func, 0},
+		{"NewRGBA", Func, 0},
+		{"NewRGBA64", Func, 0},
+		{"NewUniform", Func, 0},
+		{"NewYCbCr", Func, 0},
+		{"Opaque", Var, 0},
+		{"Paletted", Type, 0},
+		{"Paletted.Palette", Field, 0},
+		{"Paletted.Pix", Field, 0},
+		{"Paletted.Rect", Field, 0},
+		{"Paletted.Stride", Field, 0},
+		{"PalettedImage", Type, 0},
+		{"Point", Type, 0},
+		{"Point.X", Field, 0},
+		{"Point.Y", Field, 0},
+		{"Pt", Func, 0},
+		{"RGBA", Type, 0},
+		{"RGBA.Pix", Field, 0},
+		{"RGBA.Rect", Field, 0},
+		{"RGBA.Stride", Field, 0},
+		{"RGBA64", Type, 0},
+		{"RGBA64.Pix", Field, 0},
+		{"RGBA64.Rect", Field, 0},
+		{"RGBA64.Stride", Field, 0},
+		{"RGBA64Image", Type, 17},
+		{"Rect", Func, 0},
+		{"Rectangle", Type, 0},
+		{"Rectangle.Max", Field, 0},
+		{"Rectangle.Min", Field, 0},
+		{"RegisterFormat", Func, 0},
+		{"Transparent", Var, 0},
+		{"Uniform", Type, 0},
+		{"Uniform.C", Field, 0},
+		{"White", Var, 0},
+		{"YCbCr", Type, 0},
+		{"YCbCr.CStride", Field, 0},
+		{"YCbCr.Cb", Field, 0},
+		{"YCbCr.Cr", Field, 0},
+		{"YCbCr.Rect", Field, 0},
+		{"YCbCr.SubsampleRatio", Field, 0},
+		{"YCbCr.Y", Field, 0},
+		{"YCbCr.YStride", Field, 0},
+		{"YCbCrSubsampleRatio", Type, 0},
+		{"YCbCrSubsampleRatio410", Const, 5},
+		{"YCbCrSubsampleRatio411", Const, 5},
+		{"YCbCrSubsampleRatio420", Const, 0},
+		{"YCbCrSubsampleRatio422", Const, 0},
+		{"YCbCrSubsampleRatio440", Const, 1},
+		{"YCbCrSubsampleRatio444", Const, 0},
+		{"ZP", Var, 0},
+		{"ZR", Var, 0},
+	},
+	"image/color": {
+		{"(Alpha).RGBA", Method, 0},
+		{"(Alpha16).RGBA", Method, 0},
+		{"(CMYK).RGBA", Method, 5},
+		{"(Gray).RGBA", Method, 0},
+		{"(Gray16).RGBA", Method, 0},
+		{"(NRGBA).RGBA", Method, 0},
+		{"(NRGBA64).RGBA", Method, 0},
+		{"(NYCbCrA).RGBA", Method, 6},
+		{"(Palette).Convert", Method, 0},
+		{"(Palette).Index", Method, 0},
+		{"(RGBA).RGBA", Method, 0},
+		{"(RGBA64).RGBA", Method, 0},
+		{"(YCbCr).RGBA", Method, 0},
+		{"Alpha", Type, 0},
+		{"Alpha.A", Field, 0},
+		{"Alpha16", Type, 0},
+		{"Alpha16.A", Field, 0},
+		{"Alpha16Model", Var, 0},
+		{"AlphaModel", Var, 0},
+		{"Black", Var, 0},
+		{"CMYK", Type, 5},
+		{"CMYK.C", Field, 5},
+		{"CMYK.K", Field, 5},
+		{"CMYK.M", Field, 5},
+		{"CMYK.Y", Field, 5},
+		{"CMYKModel", Var, 5},
+		{"CMYKToRGB", Func, 5},
+		{"Color", Type, 0},
+		{"Gray", Type, 0},
+		{"Gray.Y", Field, 0},
+		{"Gray16", Type, 0},
+		{"Gray16.Y", Field, 0},
+		{"Gray16Model", Var, 0},
+		{"GrayModel", Var, 0},
+		{"Model", Type, 0},
+		{"ModelFunc", Func, 0},
+		{"NRGBA", Type, 0},
+		{"NRGBA.A", Field, 0},
+		{"NRGBA.B", Field, 0},
+		{"NRGBA.G", Field, 0},
+		{"NRGBA.R", Field, 0},
+		{"NRGBA64", Type, 0},
+		{"NRGBA64.A", Field, 0},
+		{"NRGBA64.B", Field, 0},
+		{"NRGBA64.G", Field, 0},
+		{"NRGBA64.R", Field, 0},
+		{"NRGBA64Model", Var, 0},
+		{"NRGBAModel", Var, 0},
+		{"NYCbCrA", Type, 6},
+		{"NYCbCrA.A", Field, 6},
+		{"NYCbCrA.YCbCr", Field, 6},
+		{"NYCbCrAModel", Var, 6},
+		{"Opaque", Var, 0},
+		{"Palette", Type, 0},
+		{"RGBA", Type, 0},
+		{"RGBA.A", Field, 0},
+		{"RGBA.B", Field, 0},
+		{"RGBA.G", Field, 0},
+		{"RGBA.R", Field, 0},
+		{"RGBA64", Type, 0},
+		{"RGBA64.A", Field, 0},
+		{"RGBA64.B", Field, 0},
+		{"RGBA64.G", Field, 0},
+		{"RGBA64.R", Field, 0},
+		{"RGBA64Model", Var, 0},
+		{"RGBAModel", Var, 0},
+		{"RGBToCMYK", Func, 5},
+		{"RGBToYCbCr", Func, 0},
+		{"Transparent", Var, 0},
+		{"White", Var, 0},
+		{"YCbCr", Type, 0},
+		{"YCbCr.Cb", Field, 0},
+		{"YCbCr.Cr", Field, 0},
+		{"YCbCr.Y", Field, 0},
+		{"YCbCrModel", Var, 0},
+		{"YCbCrToRGB", Func, 0},
+	},
+	"image/color/palette": {
+		{"Plan9", Var, 2},
+		{"WebSafe", Var, 2},
+	},
+	"image/draw": {
+		{"(Op).Draw", Method, 2},
+		{"Draw", Func, 0},
+		{"DrawMask", Func, 0},
+		{"Drawer", Type, 2},
+		{"FloydSteinberg", Var, 2},
+		{"Image", Type, 0},
+		{"Op", Type, 0},
+		{"Over", Const, 0},
+		{"Quantizer", Type, 2},
+		{"RGBA64Image", Type, 17},
+		{"Src", Const, 0},
+	},
+	"image/gif": {
+		{"Decode", Func, 0},
+		{"DecodeAll", Func, 0},
+		{"DecodeConfig", Func, 0},
+		{"DisposalBackground", Const, 5},
+		{"DisposalNone", Const, 5},
+		{"DisposalPrevious", Const, 5},
+		{"Encode", Func, 2},
+		{"EncodeAll", Func, 2},
+		{"GIF", Type, 0},
+		{"GIF.BackgroundIndex", Field, 5},
+		{"GIF.Config", Field, 5},
+		{"GIF.Delay", Field, 0},
+		{"GIF.Disposal", Field, 5},
+		{"GIF.Image", Field, 0},
+		{"GIF.LoopCount", Field, 0},
+		{"Options", Type, 2},
+		{"Options.Drawer", Field, 2},
+		{"Options.NumColors", Field, 2},
+		{"Options.Quantizer", Field, 2},
+	},
+	"image/jpeg": {
+		{"(FormatError).Error", Method, 0},
+		{"(UnsupportedError).Error", Method, 0},
+		{"Decode", Func, 0},
+		{"DecodeConfig", Func, 0},
+		{"DefaultQuality", Const, 0},
+		{"Encode", Func, 0},
+		{"FormatError", Type, 0},
+		{"Options", Type, 0},
+		{"Options.Quality", Field, 0},
+		{"Reader", Type, 0},
+		{"UnsupportedError", Type, 0},
+	},
+	"image/png": {
+		{"(*Encoder).Encode", Method, 4},
+		{"(FormatError).Error", Method, 0},
+		{"(UnsupportedError).Error", Method, 0},
+		{"BestCompression", Const, 4},
+		{"BestSpeed", Const, 4},
+		{"CompressionLevel", Type, 4},
+		{"Decode", Func, 0},
+		{"DecodeConfig", Func, 0},
+		{"DefaultCompression", Const, 4},
+		{"Encode", Func, 0},
+		{"Encoder", Type, 4},
+		{"Encoder.BufferPool", Field, 9},
+		{"Encoder.CompressionLevel", Field, 4},
+		{"EncoderBuffer", Type, 9},
+		{"EncoderBufferPool", Type, 9},
+		{"FormatError", Type, 0},
+		{"NoCompression", Const, 4},
+		{"UnsupportedError", Type, 0},
+	},
+	"index/suffixarray": {
+		{"(*Index).Bytes", Method, 0},
+		{"(*Index).FindAllIndex", Method, 0},
+		{"(*Index).Lookup", Method, 0},
+		{"(*Index).Read", Method, 0},
+		{"(*Index).Write", Method, 0},
+		{"Index", Type, 0},
+		{"New", Func, 0},
+	},
+	"io": {
+		{"(*LimitedReader).Read", Method, 0},
+		{"(*OffsetWriter).Seek", Method, 20},
+		{"(*OffsetWriter).Write", Method, 20},
+		{"(*OffsetWriter).WriteAt", Method, 20},
+		{"(*PipeReader).Close", Method, 0},
+		{"(*PipeReader).CloseWithError", Method, 0},
+		{"(*PipeReader).Read", Method, 0},
+		{"(*PipeWriter).Close", Method, 0},
+		{"(*PipeWriter).CloseWithError", Method, 0},
+		{"(*PipeWriter).Write", Method, 0},
+		{"(*SectionReader).Outer", Method, 22},
+		{"(*SectionReader).Read", Method, 0},
+		{"(*SectionReader).ReadAt", Method, 0},
+		{"(*SectionReader).Seek", Method, 0},
+		{"(*SectionReader).Size", Method, 0},
+		{"ByteReader", Type, 0},
+		{"ByteScanner", Type, 0},
+		{"ByteWriter", Type, 1},
+		{"Closer", Type, 0},
+		{"Copy", Func, 0},
+		{"CopyBuffer", Func, 5},
+		{"CopyN", Func, 0},
+		{"Discard", Var, 16},
+		{"EOF", Var, 0},
+		{"ErrClosedPipe", Var, 0},
+		{"ErrNoProgress", Var, 1},
+		{"ErrShortBuffer", Var, 0},
+		{"ErrShortWrite", Var, 0},
+		{"ErrUnexpectedEOF", Var, 0},
+		{"LimitReader", Func, 0},
+		{"LimitedReader", Type, 0},
+		{"LimitedReader.N", Field, 0},
+		{"LimitedReader.R", Field, 0},
+		{"MultiReader", Func, 0},
+		{"MultiWriter", Func, 0},
+		{"NewOffsetWriter", Func, 20},
+		{"NewSectionReader", Func, 0},
+		{"NopCloser", Func, 16},
+		{"OffsetWriter", Type, 20},
+		{"Pipe", Func, 0},
+		{"PipeReader", Type, 0},
+		{"PipeWriter", Type, 0},
+		{"ReadAll", Func, 16},
+		{"ReadAtLeast", Func, 0},
+		{"ReadCloser", Type, 0},
+		{"ReadFull", Func, 0},
+		{"ReadSeekCloser", Type, 16},
+		{"ReadSeeker", Type, 0},
+		{"ReadWriteCloser", Type, 0},
+		{"ReadWriteSeeker", Type, 0},
+		{"ReadWriter", Type, 0},
+		{"Reader", Type, 0},
+		{"ReaderAt", Type, 0},
+		{"ReaderFrom", Type, 0},
+		{"RuneReader", Type, 0},
+		{"RuneScanner", Type, 0},
+		{"SectionReader", Type, 0},
+		{"SeekCurrent", Const, 7},
+		{"SeekEnd", Const, 7},
+		{"SeekStart", Const, 7},
+		{"Seeker", Type, 0},
+		{"StringWriter", Type, 12},
+		{"TeeReader", Func, 0},
+		{"WriteCloser", Type, 0},
+		{"WriteSeeker", Type, 0},
+		{"WriteString", Func, 0},
+		{"Writer", Type, 0},
+		{"WriterAt", Type, 0},
+		{"WriterTo", Type, 0},
+	},
+	"io/fs": {
+		{"(*PathError).Error", Method, 16},
+		{"(*PathError).Timeout", Method, 16},
+		{"(*PathError).Unwrap", Method, 16},
+		{"(FileMode).IsDir", Method, 16},
+		{"(FileMode).IsRegular", Method, 16},
+		{"(FileMode).Perm", Method, 16},
+		{"(FileMode).String", Method, 16},
+		{"(FileMode).Type", Method, 16},
+		{"DirEntry", Type, 16},
+		{"ErrClosed", Var, 16},
+		{"ErrExist", Var, 16},
+		{"ErrInvalid", Var, 16},
+		{"ErrNotExist", Var, 16},
+		{"ErrPermission", Var, 16},
+		{"FS", Type, 16},
+		{"File", Type, 16},
+		{"FileInfo", Type, 16},
+		{"FileInfoToDirEntry", Func, 17},
+		{"FileMode", Type, 16},
+		{"FormatDirEntry", Func, 21},
+		{"FormatFileInfo", Func, 21},
+		{"Glob", Func, 16},
+		{"GlobFS", Type, 16},
+		{"ModeAppend", Const, 16},
+		{"ModeCharDevice", Const, 16},
+		{"ModeDevice", Const, 16},
+		{"ModeDir", Const, 16},
+		{"ModeExclusive", Const, 16},
+		{"ModeIrregular", Const, 16},
+		{"ModeNamedPipe", Const, 16},
+		{"ModePerm", Const, 16},
+		{"ModeSetgid", Const, 16},
+		{"ModeSetuid", Const, 16},
+		{"ModeSocket", Const, 16},
+		{"ModeSticky", Const, 16},
+		{"ModeSymlink", Const, 16},
+		{"ModeTemporary", Const, 16},
+		{"ModeType", Const, 16},
+		{"PathError", Type, 16},
+		{"PathError.Err", Field, 16},
+		{"PathError.Op", Field, 16},
+		{"PathError.Path", Field, 16},
+		{"ReadDir", Func, 16},
+		{"ReadDirFS", Type, 16},
+		{"ReadDirFile", Type, 16},
+		{"ReadFile", Func, 16},
+		{"ReadFileFS", Type, 16},
+		{"SkipAll", Var, 20},
+		{"SkipDir", Var, 16},
+		{"Stat", Func, 16},
+		{"StatFS", Type, 16},
+		{"Sub", Func, 16},
+		{"SubFS", Type, 16},
+		{"ValidPath", Func, 16},
+		{"WalkDir", Func, 16},
+		{"WalkDirFunc", Type, 16},
+	},
+	"io/ioutil": {
+		{"Discard", Var, 0},
+		{"NopCloser", Func, 0},
+		{"ReadAll", Func, 0},
+		{"ReadDir", Func, 0},
+		{"ReadFile", Func, 0},
+		{"TempDir", Func, 0},
+		{"TempFile", Func, 0},
+		{"WriteFile", Func, 0},
+	},
+	"log": {
+		{"(*Logger).Fatal", Method, 0},
+		{"(*Logger).Fatalf", Method, 0},
+		{"(*Logger).Fatalln", Method, 0},
+		{"(*Logger).Flags", Method, 0},
+		{"(*Logger).Output", Method, 0},
+		{"(*Logger).Panic", Method, 0},
+		{"(*Logger).Panicf", Method, 0},
+		{"(*Logger).Panicln", Method, 0},
+		{"(*Logger).Prefix", Method, 0},
+		{"(*Logger).Print", Method, 0},
+		{"(*Logger).Printf", Method, 0},
+		{"(*Logger).Println", Method, 0},
+		{"(*Logger).SetFlags", Method, 0},
+		{"(*Logger).SetOutput", Method, 5},
+		{"(*Logger).SetPrefix", Method, 0},
+		{"(*Logger).Writer", Method, 12},
+		{"Default", Func, 16},
+		{"Fatal", Func, 0},
+		{"Fatalf", Func, 0},
+		{"Fatalln", Func, 0},
+		{"Flags", Func, 0},
+		{"LUTC", Const, 5},
+		{"Ldate", Const, 0},
+		{"Llongfile", Const, 0},
+		{"Lmicroseconds", Const, 0},
+		{"Lmsgprefix", Const, 14},
+		{"Logger", Type, 0},
+		{"Lshortfile", Const, 0},
+		{"LstdFlags", Const, 0},
+		{"Ltime", Const, 0},
+		{"New", Func, 0},
+		{"Output", Func, 5},
+		{"Panic", Func, 0},
+		{"Panicf", Func, 0},
+		{"Panicln", Func, 0},
+		{"Prefix", Func, 0},
+		{"Print", Func, 0},
+		{"Printf", Func, 0},
+		{"Println", Func, 0},
+		{"SetFlags", Func, 0},
+		{"SetOutput", Func, 0},
+		{"SetPrefix", Func, 0},
+		{"Writer", Func, 13},
+	},
+	"log/slog": {
+		{"(*JSONHandler).Enabled", Method, 21},
+		{"(*JSONHandler).Handle", Method, 21},
+		{"(*JSONHandler).WithAttrs", Method, 21},
+		{"(*JSONHandler).WithGroup", Method, 21},
+		{"(*Level).UnmarshalJSON", Method, 21},
+		{"(*Level).UnmarshalText", Method, 21},
+		{"(*LevelVar).Level", Method, 21},
+		{"(*LevelVar).MarshalText", Method, 21},
+		{"(*LevelVar).Set", Method, 21},
+		{"(*LevelVar).String", Method, 21},
+		{"(*LevelVar).UnmarshalText", Method, 21},
+		{"(*Logger).Debug", Method, 21},
+		{"(*Logger).DebugContext", Method, 21},
+		{"(*Logger).Enabled", Method, 21},
+		{"(*Logger).Error", Method, 21},
+		{"(*Logger).ErrorContext", Method, 21},
+		{"(*Logger).Handler", Method, 21},
+		{"(*Logger).Info", Method, 21},
+		{"(*Logger).InfoContext", Method, 21},
+		{"(*Logger).Log", Method, 21},
+		{"(*Logger).LogAttrs", Method, 21},
+		{"(*Logger).Warn", Method, 21},
+		{"(*Logger).WarnContext", Method, 21},
+		{"(*Logger).With", Method, 21},
+		{"(*Logger).WithGroup", Method, 21},
+		{"(*Record).Add", Method, 21},
+		{"(*Record).AddAttrs", Method, 21},
+		{"(*TextHandler).Enabled", Method, 21},
+		{"(*TextHandler).Handle", Method, 21},
+		{"(*TextHandler).WithAttrs", Method, 21},
+		{"(*TextHandler).WithGroup", Method, 21},
+		{"(Attr).Equal", Method, 21},
+		{"(Attr).String", Method, 21},
+		{"(Kind).String", Method, 21},
+		{"(Level).Level", Method, 21},
+		{"(Level).MarshalJSON", Method, 21},
+		{"(Level).MarshalText", Method, 21},
+		{"(Level).String", Method, 21},
+		{"(Record).Attrs", Method, 21},
+		{"(Record).Clone", Method, 21},
+		{"(Record).NumAttrs", Method, 21},
+		{"(Value).Any", Method, 21},
+		{"(Value).Bool", Method, 21},
+		{"(Value).Duration", Method, 21},
+		{"(Value).Equal", Method, 21},
+		{"(Value).Float64", Method, 21},
+		{"(Value).Group", Method, 21},
+		{"(Value).Int64", Method, 21},
+		{"(Value).Kind", Method, 21},
+		{"(Value).LogValuer", Method, 21},
+		{"(Value).Resolve", Method, 21},
+		{"(Value).String", Method, 21},
+		{"(Value).Time", Method, 21},
+		{"(Value).Uint64", Method, 21},
+		{"Any", Func, 21},
+		{"AnyValue", Func, 21},
+		{"Attr", Type, 21},
+		{"Attr.Key", Field, 21},
+		{"Attr.Value", Field, 21},
+		{"Bool", Func, 21},
+		{"BoolValue", Func, 21},
+		{"Debug", Func, 21},
+		{"DebugContext", Func, 21},
+		{"Default", Func, 21},
+		{"Duration", Func, 21},
+		{"DurationValue", Func, 21},
+		{"Error", Func, 21},
+		{"ErrorContext", Func, 21},
+		{"Float64", Func, 21},
+		{"Float64Value", Func, 21},
+		{"Group", Func, 21},
+		{"GroupValue", Func, 21},
+		{"Handler", Type, 21},
+		{"HandlerOptions", Type, 21},
+		{"HandlerOptions.AddSource", Field, 21},
+		{"HandlerOptions.Level", Field, 21},
+		{"HandlerOptions.ReplaceAttr", Field, 21},
+		{"Info", Func, 21},
+		{"InfoContext", Func, 21},
+		{"Int", Func, 21},
+		{"Int64", Func, 21},
+		{"Int64Value", Func, 21},
+		{"IntValue", Func, 21},
+		{"JSONHandler", Type, 21},
+		{"Kind", Type, 21},
+		{"KindAny", Const, 21},
+		{"KindBool", Const, 21},
+		{"KindDuration", Const, 21},
+		{"KindFloat64", Const, 21},
+		{"KindGroup", Const, 21},
+		{"KindInt64", Const, 21},
+		{"KindLogValuer", Const, 21},
+		{"KindString", Const, 21},
+		{"KindTime", Const, 21},
+		{"KindUint64", Const, 21},
+		{"Level", Type, 21},
+		{"LevelDebug", Const, 21},
+		{"LevelError", Const, 21},
+		{"LevelInfo", Const, 21},
+		{"LevelKey", Const, 21},
+		{"LevelVar", Type, 21},
+		{"LevelWarn", Const, 21},
+		{"Leveler", Type, 21},
+		{"Log", Func, 21},
+		{"LogAttrs", Func, 21},
+		{"LogValuer", Type, 21},
+		{"Logger", Type, 21},
+		{"MessageKey", Const, 21},
+		{"New", Func, 21},
+		{"NewJSONHandler", Func, 21},
+		{"NewLogLogger", Func, 21},
+		{"NewRecord", Func, 21},
+		{"NewTextHandler", Func, 21},
+		{"Record", Type, 21},
+		{"Record.Level", Field, 21},
+		{"Record.Message", Field, 21},
+		{"Record.PC", Field, 21},
+		{"Record.Time", Field, 21},
+		{"SetDefault", Func, 21},
+		{"SetLogLoggerLevel", Func, 22},
+		{"Source", Type, 21},
+		{"Source.File", Field, 21},
+		{"Source.Function", Field, 21},
+		{"Source.Line", Field, 21},
+		{"SourceKey", Const, 21},
+		{"String", Func, 21},
+		{"StringValue", Func, 21},
+		{"TextHandler", Type, 21},
+		{"Time", Func, 21},
+		{"TimeKey", Const, 21},
+		{"TimeValue", Func, 21},
+		{"Uint64", Func, 21},
+		{"Uint64Value", Func, 21},
+		{"Value", Type, 21},
+		{"Warn", Func, 21},
+		{"WarnContext", Func, 21},
+		{"With", Func, 21},
+	},
+	"log/syslog": {
+		{"(*Writer).Alert", Method, 0},
+		{"(*Writer).Close", Method, 0},
+		{"(*Writer).Crit", Method, 0},
+		{"(*Writer).Debug", Method, 0},
+		{"(*Writer).Emerg", Method, 0},
+		{"(*Writer).Err", Method, 0},
+		{"(*Writer).Info", Method, 0},
+		{"(*Writer).Notice", Method, 0},
+		{"(*Writer).Warning", Method, 0},
+		{"(*Writer).Write", Method, 0},
+		{"Dial", Func, 0},
+		{"LOG_ALERT", Const, 0},
+		{"LOG_AUTH", Const, 1},
+		{"LOG_AUTHPRIV", Const, 1},
+		{"LOG_CRIT", Const, 0},
+		{"LOG_CRON", Const, 1},
+		{"LOG_DAEMON", Const, 1},
+		{"LOG_DEBUG", Const, 0},
+		{"LOG_EMERG", Const, 0},
+		{"LOG_ERR", Const, 0},
+		{"LOG_FTP", Const, 1},
+		{"LOG_INFO", Const, 0},
+		{"LOG_KERN", Const, 1},
+		{"LOG_LOCAL0", Const, 1},
+		{"LOG_LOCAL1", Const, 1},
+		{"LOG_LOCAL2", Const, 1},
+		{"LOG_LOCAL3", Const, 1},
+		{"LOG_LOCAL4", Const, 1},
+		{"LOG_LOCAL5", Const, 1},
+		{"LOG_LOCAL6", Const, 1},
+		{"LOG_LOCAL7", Const, 1},
+		{"LOG_LPR", Const, 1},
+		{"LOG_MAIL", Const, 1},
+		{"LOG_NEWS", Const, 1},
+		{"LOG_NOTICE", Const, 0},
+		{"LOG_SYSLOG", Const, 1},
+		{"LOG_USER", Const, 1},
+		{"LOG_UUCP", Const, 1},
+		{"LOG_WARNING", Const, 0},
+		{"New", Func, 0},
+		{"NewLogger", Func, 0},
+		{"Priority", Type, 0},
+		{"Writer", Type, 0},
+	},
+	"maps": {
+		{"Clone", Func, 21},
+		{"Copy", Func, 21},
+		{"DeleteFunc", Func, 21},
+		{"Equal", Func, 21},
+		{"EqualFunc", Func, 21},
+	},
+	"math": {
+		{"Abs", Func, 0},
+		{"Acos", Func, 0},
+		{"Acosh", Func, 0},
+		{"Asin", Func, 0},
+		{"Asinh", Func, 0},
+		{"Atan", Func, 0},
+		{"Atan2", Func, 0},
+		{"Atanh", Func, 0},
+		{"Cbrt", Func, 0},
+		{"Ceil", Func, 0},
+		{"Copysign", Func, 0},
+		{"Cos", Func, 0},
+		{"Cosh", Func, 0},
+		{"Dim", Func, 0},
+		{"E", Const, 0},
+		{"Erf", Func, 0},
+		{"Erfc", Func, 0},
+		{"Erfcinv", Func, 10},
+		{"Erfinv", Func, 10},
+		{"Exp", Func, 0},
+		{"Exp2", Func, 0},
+		{"Expm1", Func, 0},
+		{"FMA", Func, 14},
+		{"Float32bits", Func, 0},
+		{"Float32frombits", Func, 0},
+		{"Float64bits", Func, 0},
+		{"Float64frombits", Func, 0},
+		{"Floor", Func, 0},
+		{"Frexp", Func, 0},
+		{"Gamma", Func, 0},
+		{"Hypot", Func, 0},
+		{"Ilogb", Func, 0},
+		{"Inf", Func, 0},
+		{"IsInf", Func, 0},
+		{"IsNaN", Func, 0},
+		{"J0", Func, 0},
+		{"J1", Func, 0},
+		{"Jn", Func, 0},
+		{"Ldexp", Func, 0},
+		{"Lgamma", Func, 0},
+		{"Ln10", Const, 0},
+		{"Ln2", Const, 0},
+		{"Log", Func, 0},
+		{"Log10", Func, 0},
+		{"Log10E", Const, 0},
+		{"Log1p", Func, 0},
+		{"Log2", Func, 0},
+		{"Log2E", Const, 0},
+		{"Logb", Func, 0},
+		{"Max", Func, 0},
+		{"MaxFloat32", Const, 0},
+		{"MaxFloat64", Const, 0},
+		{"MaxInt", Const, 17},
+		{"MaxInt16", Const, 0},
+		{"MaxInt32", Const, 0},
+		{"MaxInt64", Const, 0},
+		{"MaxInt8", Const, 0},
+		{"MaxUint", Const, 17},
+		{"MaxUint16", Const, 0},
+		{"MaxUint32", Const, 0},
+		{"MaxUint64", Const, 0},
+		{"MaxUint8", Const, 0},
+		{"Min", Func, 0},
+		{"MinInt", Const, 17},
+		{"MinInt16", Const, 0},
+		{"MinInt32", Const, 0},
+		{"MinInt64", Const, 0},
+		{"MinInt8", Const, 0},
+		{"Mod", Func, 0},
+		{"Modf", Func, 0},
+		{"NaN", Func, 0},
+		{"Nextafter", Func, 0},
+		{"Nextafter32", Func, 4},
+		{"Phi", Const, 0},
+		{"Pi", Const, 0},
+		{"Pow", Func, 0},
+		{"Pow10", Func, 0},
+		{"Remainder", Func, 0},
+		{"Round", Func, 10},
+		{"RoundToEven", Func, 10},
+		{"Signbit", Func, 0},
+		{"Sin", Func, 0},
+		{"Sincos", Func, 0},
+		{"Sinh", Func, 0},
+		{"SmallestNonzeroFloat32", Const, 0},
+		{"SmallestNonzeroFloat64", Const, 0},
+		{"Sqrt", Func, 0},
+		{"Sqrt2", Const, 0},
+		{"SqrtE", Const, 0},
+		{"SqrtPhi", Const, 0},
+		{"SqrtPi", Const, 0},
+		{"Tan", Func, 0},
+		{"Tanh", Func, 0},
+		{"Trunc", Func, 0},
+		{"Y0", Func, 0},
+		{"Y1", Func, 0},
+		{"Yn", Func, 0},
+	},
+	"math/big": {
+		{"(*Float).Abs", Method, 5},
+		{"(*Float).Acc", Method, 5},
+		{"(*Float).Add", Method, 5},
+		{"(*Float).Append", Method, 5},
+		{"(*Float).Cmp", Method, 5},
+		{"(*Float).Copy", Method, 5},
+		{"(*Float).Float32", Method, 5},
+		{"(*Float).Float64", Method, 5},
+		{"(*Float).Format", Method, 5},
+		{"(*Float).GobDecode", Method, 7},
+		{"(*Float).GobEncode", Method, 7},
+		{"(*Float).Int", Method, 5},
+		{"(*Float).Int64", Method, 5},
+		{"(*Float).IsInf", Method, 5},
+		{"(*Float).IsInt", Method, 5},
+		{"(*Float).MantExp", Method, 5},
+		{"(*Float).MarshalText", Method, 6},
+		{"(*Float).MinPrec", Method, 5},
+		{"(*Float).Mode", Method, 5},
+		{"(*Float).Mul", Method, 5},
+		{"(*Float).Neg", Method, 5},
+		{"(*Float).Parse", Method, 5},
+		{"(*Float).Prec", Method, 5},
+		{"(*Float).Quo", Method, 5},
+		{"(*Float).Rat", Method, 5},
+		{"(*Float).Scan", Method, 8},
+		{"(*Float).Set", Method, 5},
+		{"(*Float).SetFloat64", Method, 5},
+		{"(*Float).SetInf", Method, 5},
+		{"(*Float).SetInt", Method, 5},
+		{"(*Float).SetInt64", Method, 5},
+		{"(*Float).SetMantExp", Method, 5},
+		{"(*Float).SetMode", Method, 5},
+		{"(*Float).SetPrec", Method, 5},
+		{"(*Float).SetRat", Method, 5},
+		{"(*Float).SetString", Method, 5},
+		{"(*Float).SetUint64", Method, 5},
+		{"(*Float).Sign", Method, 5},
+		{"(*Float).Signbit", Method, 5},
+		{"(*Float).Sqrt", Method, 10},
+		{"(*Float).String", Method, 5},
+		{"(*Float).Sub", Method, 5},
+		{"(*Float).Text", Method, 5},
+		{"(*Float).Uint64", Method, 5},
+		{"(*Float).UnmarshalText", Method, 6},
+		{"(*Int).Abs", Method, 0},
+		{"(*Int).Add", Method, 0},
+		{"(*Int).And", Method, 0},
+		{"(*Int).AndNot", Method, 0},
+		{"(*Int).Append", Method, 6},
+		{"(*Int).Binomial", Method, 0},
+		{"(*Int).Bit", Method, 0},
+		{"(*Int).BitLen", Method, 0},
+		{"(*Int).Bits", Method, 0},
+		{"(*Int).Bytes", Method, 0},
+		{"(*Int).Cmp", Method, 0},
+		{"(*Int).CmpAbs", Method, 10},
+		{"(*Int).Div", Method, 0},
+		{"(*Int).DivMod", Method, 0},
+		{"(*Int).Exp", Method, 0},
+		{"(*Int).FillBytes", Method, 15},
+		{"(*Int).Float64", Method, 21},
+		{"(*Int).Format", Method, 0},
+		{"(*Int).GCD", Method, 0},
+		{"(*Int).GobDecode", Method, 0},
+		{"(*Int).GobEncode", Method, 0},
+		{"(*Int).Int64", Method, 0},
+		{"(*Int).IsInt64", Method, 9},
+		{"(*Int).IsUint64", Method, 9},
+		{"(*Int).Lsh", Method, 0},
+		{"(*Int).MarshalJSON", Method, 1},
+		{"(*Int).MarshalText", Method, 3},
+		{"(*Int).Mod", Method, 0},
+		{"(*Int).ModInverse", Method, 0},
+		{"(*Int).ModSqrt", Method, 5},
+		{"(*Int).Mul", Method, 0},
+		{"(*Int).MulRange", Method, 0},
+		{"(*Int).Neg", Method, 0},
+		{"(*Int).Not", Method, 0},
+		{"(*Int).Or", Method, 0},
+		{"(*Int).ProbablyPrime", Method, 0},
+		{"(*Int).Quo", Method, 0},
+		{"(*Int).QuoRem", Method, 0},
+		{"(*Int).Rand", Method, 0},
+		{"(*Int).Rem", Method, 0},
+		{"(*Int).Rsh", Method, 0},
+		{"(*Int).Scan", Method, 0},
+		{"(*Int).Set", Method, 0},
+		{"(*Int).SetBit", Method, 0},
+		{"(*Int).SetBits", Method, 0},
+		{"(*Int).SetBytes", Method, 0},
+		{"(*Int).SetInt64", Method, 0},
+		{"(*Int).SetString", Method, 0},
+		{"(*Int).SetUint64", Method, 1},
+		{"(*Int).Sign", Method, 0},
+		{"(*Int).Sqrt", Method, 8},
+		{"(*Int).String", Method, 0},
+		{"(*Int).Sub", Method, 0},
+		{"(*Int).Text", Method, 6},
+		{"(*Int).TrailingZeroBits", Method, 13},
+		{"(*Int).Uint64", Method, 1},
+		{"(*Int).UnmarshalJSON", Method, 1},
+		{"(*Int).UnmarshalText", Method, 3},
+		{"(*Int).Xor", Method, 0},
+		{"(*Rat).Abs", Method, 0},
+		{"(*Rat).Add", Method, 0},
+		{"(*Rat).Cmp", Method, 0},
+		{"(*Rat).Denom", Method, 0},
+		{"(*Rat).Float32", Method, 4},
+		{"(*Rat).Float64", Method, 1},
+		{"(*Rat).FloatPrec", Method, 22},
+		{"(*Rat).FloatString", Method, 0},
+		{"(*Rat).GobDecode", Method, 0},
+		{"(*Rat).GobEncode", Method, 0},
+		{"(*Rat).Inv", Method, 0},
+		{"(*Rat).IsInt", Method, 0},
+		{"(*Rat).MarshalText", Method, 3},
+		{"(*Rat).Mul", Method, 0},
+		{"(*Rat).Neg", Method, 0},
+		{"(*Rat).Num", Method, 0},
+		{"(*Rat).Quo", Method, 0},
+		{"(*Rat).RatString", Method, 0},
+		{"(*Rat).Scan", Method, 0},
+		{"(*Rat).Set", Method, 0},
+		{"(*Rat).SetFloat64", Method, 1},
+		{"(*Rat).SetFrac", Method, 0},
+		{"(*Rat).SetFrac64", Method, 0},
+		{"(*Rat).SetInt", Method, 0},
+		{"(*Rat).SetInt64", Method, 0},
+		{"(*Rat).SetString", Method, 0},
+		{"(*Rat).SetUint64", Method, 13},
+		{"(*Rat).Sign", Method, 0},
+		{"(*Rat).String", Method, 0},
+		{"(*Rat).Sub", Method, 0},
+		{"(*Rat).UnmarshalText", Method, 3},
+		{"(Accuracy).String", Method, 5},
+		{"(ErrNaN).Error", Method, 5},
+		{"(RoundingMode).String", Method, 5},
+		{"Above", Const, 5},
+		{"Accuracy", Type, 5},
+		{"AwayFromZero", Const, 5},
+		{"Below", Const, 5},
+		{"ErrNaN", Type, 5},
+		{"Exact", Const, 5},
+		{"Float", Type, 5},
+		{"Int", Type, 0},
+		{"Jacobi", Func, 5},
+		{"MaxBase", Const, 0},
+		{"MaxExp", Const, 5},
+		{"MaxPrec", Const, 5},
+		{"MinExp", Const, 5},
+		{"NewFloat", Func, 5},
+		{"NewInt", Func, 0},
+		{"NewRat", Func, 0},
+		{"ParseFloat", Func, 5},
+		{"Rat", Type, 0},
+		{"RoundingMode", Type, 5},
+		{"ToNearestAway", Const, 5},
+		{"ToNearestEven", Const, 5},
+		{"ToNegativeInf", Const, 5},
+		{"ToPositiveInf", Const, 5},
+		{"ToZero", Const, 5},
+		{"Word", Type, 0},
+	},
+	"math/bits": {
+		{"Add", Func, 12},
+		{"Add32", Func, 12},
+		{"Add64", Func, 12},
+		{"Div", Func, 12},
+		{"Div32", Func, 12},
+		{"Div64", Func, 12},
+		{"LeadingZeros", Func, 9},
+		{"LeadingZeros16", Func, 9},
+		{"LeadingZeros32", Func, 9},
+		{"LeadingZeros64", Func, 9},
+		{"LeadingZeros8", Func, 9},
+		{"Len", Func, 9},
+		{"Len16", Func, 9},
+		{"Len32", Func, 9},
+		{"Len64", Func, 9},
+		{"Len8", Func, 9},
+		{"Mul", Func, 12},
+		{"Mul32", Func, 12},
+		{"Mul64", Func, 12},
+		{"OnesCount", Func, 9},
+		{"OnesCount16", Func, 9},
+		{"OnesCount32", Func, 9},
+		{"OnesCount64", Func, 9},
+		{"OnesCount8", Func, 9},
+		{"Rem", Func, 14},
+		{"Rem32", Func, 14},
+		{"Rem64", Func, 14},
+		{"Reverse", Func, 9},
+		{"Reverse16", Func, 9},
+		{"Reverse32", Func, 9},
+		{"Reverse64", Func, 9},
+		{"Reverse8", Func, 9},
+		{"ReverseBytes", Func, 9},
+		{"ReverseBytes16", Func, 9},
+		{"ReverseBytes32", Func, 9},
+		{"ReverseBytes64", Func, 9},
+		{"RotateLeft", Func, 9},
+		{"RotateLeft16", Func, 9},
+		{"RotateLeft32", Func, 9},
+		{"RotateLeft64", Func, 9},
+		{"RotateLeft8", Func, 9},
+		{"Sub", Func, 12},
+		{"Sub32", Func, 12},
+		{"Sub64", Func, 12},
+		{"TrailingZeros", Func, 9},
+		{"TrailingZeros16", Func, 9},
+		{"TrailingZeros32", Func, 9},
+		{"TrailingZeros64", Func, 9},
+		{"TrailingZeros8", Func, 9},
+		{"UintSize", Const, 9},
+	},
+	"math/cmplx": {
+		{"Abs", Func, 0},
+		{"Acos", Func, 0},
+		{"Acosh", Func, 0},
+		{"Asin", Func, 0},
+		{"Asinh", Func, 0},
+		{"Atan", Func, 0},
+		{"Atanh", Func, 0},
+		{"Conj", Func, 0},
+		{"Cos", Func, 0},
+		{"Cosh", Func, 0},
+		{"Cot", Func, 0},
+		{"Exp", Func, 0},
+		{"Inf", Func, 0},
+		{"IsInf", Func, 0},
+		{"IsNaN", Func, 0},
+		{"Log", Func, 0},
+		{"Log10", Func, 0},
+		{"NaN", Func, 0},
+		{"Phase", Func, 0},
+		{"Polar", Func, 0},
+		{"Pow", Func, 0},
+		{"Rect", Func, 0},
+		{"Sin", Func, 0},
+		{"Sinh", Func, 0},
+		{"Sqrt", Func, 0},
+		{"Tan", Func, 0},
+		{"Tanh", Func, 0},
+	},
+	"math/rand": {
+		{"(*Rand).ExpFloat64", Method, 0},
+		{"(*Rand).Float32", Method, 0},
+		{"(*Rand).Float64", Method, 0},
+		{"(*Rand).Int", Method, 0},
+		{"(*Rand).Int31", Method, 0},
+		{"(*Rand).Int31n", Method, 0},
+		{"(*Rand).Int63", Method, 0},
+		{"(*Rand).Int63n", Method, 0},
+		{"(*Rand).Intn", Method, 0},
+		{"(*Rand).NormFloat64", Method, 0},
+		{"(*Rand).Perm", Method, 0},
+		{"(*Rand).Read", Method, 6},
+		{"(*Rand).Seed", Method, 0},
+		{"(*Rand).Shuffle", Method, 10},
+		{"(*Rand).Uint32", Method, 0},
+		{"(*Rand).Uint64", Method, 8},
+		{"(*Zipf).Uint64", Method, 0},
+		{"ExpFloat64", Func, 0},
+		{"Float32", Func, 0},
+		{"Float64", Func, 0},
+		{"Int", Func, 0},
+		{"Int31", Func, 0},
+		{"Int31n", Func, 0},
+		{"Int63", Func, 0},
+		{"Int63n", Func, 0},
+		{"Intn", Func, 0},
+		{"New", Func, 0},
+		{"NewSource", Func, 0},
+		{"NewZipf", Func, 0},
+		{"NormFloat64", Func, 0},
+		{"Perm", Func, 0},
+		{"Rand", Type, 0},
+		{"Read", Func, 6},
+		{"Seed", Func, 0},
+		{"Shuffle", Func, 10},
+		{"Source", Type, 0},
+		{"Source64", Type, 8},
+		{"Uint32", Func, 0},
+		{"Uint64", Func, 8},
+		{"Zipf", Type, 0},
+	},
+	"math/rand/v2": {
+		{"(*ChaCha8).MarshalBinary", Method, 22},
+		{"(*ChaCha8).Seed", Method, 22},
+		{"(*ChaCha8).Uint64", Method, 22},
+		{"(*ChaCha8).UnmarshalBinary", Method, 22},
+		{"(*PCG).MarshalBinary", Method, 22},
+		{"(*PCG).Seed", Method, 22},
+		{"(*PCG).Uint64", Method, 22},
+		{"(*PCG).UnmarshalBinary", Method, 22},
+		{"(*Rand).ExpFloat64", Method, 22},
+		{"(*Rand).Float32", Method, 22},
+		{"(*Rand).Float64", Method, 22},
+		{"(*Rand).Int", Method, 22},
+		{"(*Rand).Int32", Method, 22},
+		{"(*Rand).Int32N", Method, 22},
+		{"(*Rand).Int64", Method, 22},
+		{"(*Rand).Int64N", Method, 22},
+		{"(*Rand).IntN", Method, 22},
+		{"(*Rand).NormFloat64", Method, 22},
+		{"(*Rand).Perm", Method, 22},
+		{"(*Rand).Shuffle", Method, 22},
+		{"(*Rand).Uint32", Method, 22},
+		{"(*Rand).Uint32N", Method, 22},
+		{"(*Rand).Uint64", Method, 22},
+		{"(*Rand).Uint64N", Method, 22},
+		{"(*Rand).UintN", Method, 22},
+		{"(*Zipf).Uint64", Method, 22},
+		{"ChaCha8", Type, 22},
+		{"ExpFloat64", Func, 22},
+		{"Float32", Func, 22},
+		{"Float64", Func, 22},
+		{"Int", Func, 22},
+		{"Int32", Func, 22},
+		{"Int32N", Func, 22},
+		{"Int64", Func, 22},
+		{"Int64N", Func, 22},
+		{"IntN", Func, 22},
+		{"N", Func, 22},
+		{"New", Func, 22},
+		{"NewChaCha8", Func, 22},
+		{"NewPCG", Func, 22},
+		{"NewZipf", Func, 22},
+		{"NormFloat64", Func, 22},
+		{"PCG", Type, 22},
+		{"Perm", Func, 22},
+		{"Rand", Type, 22},
+		{"Shuffle", Func, 22},
+		{"Source", Type, 22},
+		{"Uint32", Func, 22},
+		{"Uint32N", Func, 22},
+		{"Uint64", Func, 22},
+		{"Uint64N", Func, 22},
+		{"UintN", Func, 22},
+		{"Zipf", Type, 22},
+	},
+	"mime": {
+		{"(*WordDecoder).Decode", Method, 5},
+		{"(*WordDecoder).DecodeHeader", Method, 5},
+		{"(WordEncoder).Encode", Method, 5},
+		{"AddExtensionType", Func, 0},
+		{"BEncoding", Const, 5},
+		{"ErrInvalidMediaParameter", Var, 9},
+		{"ExtensionsByType", Func, 5},
+		{"FormatMediaType", Func, 0},
+		{"ParseMediaType", Func, 0},
+		{"QEncoding", Const, 5},
+		{"TypeByExtension", Func, 0},
+		{"WordDecoder", Type, 5},
+		{"WordDecoder.CharsetReader", Field, 5},
+		{"WordEncoder", Type, 5},
+	},
+	"mime/multipart": {
+		{"(*FileHeader).Open", Method, 0},
+		{"(*Form).RemoveAll", Method, 0},
+		{"(*Part).Close", Method, 0},
+		{"(*Part).FileName", Method, 0},
+		{"(*Part).FormName", Method, 0},
+		{"(*Part).Read", Method, 0},
+		{"(*Reader).NextPart", Method, 0},
+		{"(*Reader).NextRawPart", Method, 14},
+		{"(*Reader).ReadForm", Method, 0},
+		{"(*Writer).Boundary", Method, 0},
+		{"(*Writer).Close", Method, 0},
+		{"(*Writer).CreateFormField", Method, 0},
+		{"(*Writer).CreateFormFile", Method, 0},
+		{"(*Writer).CreatePart", Method, 0},
+		{"(*Writer).FormDataContentType", Method, 0},
+		{"(*Writer).SetBoundary", Method, 1},
+		{"(*Writer).WriteField", Method, 0},
+		{"ErrMessageTooLarge", Var, 9},
+		{"File", Type, 0},
+		{"FileHeader", Type, 0},
+		{"FileHeader.Filename", Field, 0},
+		{"FileHeader.Header", Field, 0},
+		{"FileHeader.Size", Field, 9},
+		{"Form", Type, 0},
+		{"Form.File", Field, 0},
+		{"Form.Value", Field, 0},
+		{"NewReader", Func, 0},
+		{"NewWriter", Func, 0},
+		{"Part", Type, 0},
+		{"Part.Header", Field, 0},
+		{"Reader", Type, 0},
+		{"Writer", Type, 0},
+	},
+	"mime/quotedprintable": {
+		{"(*Reader).Read", Method, 5},
+		{"(*Writer).Close", Method, 5},
+		{"(*Writer).Write", Method, 5},
+		{"NewReader", Func, 5},
+		{"NewWriter", Func, 5},
+		{"Reader", Type, 5},
+		{"Writer", Type, 5},
+		{"Writer.Binary", Field, 5},
+	},
+	"net": {
+		{"(*AddrError).Error", Method, 0},
+		{"(*AddrError).Temporary", Method, 0},
+		{"(*AddrError).Timeout", Method, 0},
+		{"(*Buffers).Read", Method, 8},
+		{"(*Buffers).WriteTo", Method, 8},
+		{"(*DNSConfigError).Error", Method, 0},
+		{"(*DNSConfigError).Temporary", Method, 0},
+		{"(*DNSConfigError).Timeout", Method, 0},
+		{"(*DNSConfigError).Unwrap", Method, 13},
+		{"(*DNSError).Error", Method, 0},
+		{"(*DNSError).Temporary", Method, 0},
+		{"(*DNSError).Timeout", Method, 0},
+		{"(*Dialer).Dial", Method, 1},
+		{"(*Dialer).DialContext", Method, 7},
+		{"(*Dialer).MultipathTCP", Method, 21},
+		{"(*Dialer).SetMultipathTCP", Method, 21},
+		{"(*IP).UnmarshalText", Method, 2},
+		{"(*IPAddr).Network", Method, 0},
+		{"(*IPAddr).String", Method, 0},
+		{"(*IPConn).Close", Method, 0},
+		{"(*IPConn).File", Method, 0},
+		{"(*IPConn).LocalAddr", Method, 0},
+		{"(*IPConn).Read", Method, 0},
+		{"(*IPConn).ReadFrom", Method, 0},
+		{"(*IPConn).ReadFromIP", Method, 0},
+		{"(*IPConn).ReadMsgIP", Method, 1},
+		{"(*IPConn).RemoteAddr", Method, 0},
+		{"(*IPConn).SetDeadline", Method, 0},
+		{"(*IPConn).SetReadBuffer", Method, 0},
+		{"(*IPConn).SetReadDeadline", Method, 0},
+		{"(*IPConn).SetWriteBuffer", Method, 0},
+		{"(*IPConn).SetWriteDeadline", Method, 0},
+		{"(*IPConn).SyscallConn", Method, 9},
+		{"(*IPConn).Write", Method, 0},
+		{"(*IPConn).WriteMsgIP", Method, 1},
+		{"(*IPConn).WriteTo", Method, 0},
+		{"(*IPConn).WriteToIP", Method, 0},
+		{"(*IPNet).Contains", Method, 0},
+		{"(*IPNet).Network", Method, 0},
+		{"(*IPNet).String", Method, 0},
+		{"(*Interface).Addrs", Method, 0},
+		{"(*Interface).MulticastAddrs", Method, 0},
+		{"(*ListenConfig).Listen", Method, 11},
+		{"(*ListenConfig).ListenPacket", Method, 11},
+		{"(*ListenConfig).MultipathTCP", Method, 21},
+		{"(*ListenConfig).SetMultipathTCP", Method, 21},
+		{"(*OpError).Error", Method, 0},
+		{"(*OpError).Temporary", Method, 0},
+		{"(*OpError).Timeout", Method, 0},
+		{"(*OpError).Unwrap", Method, 13},
+		{"(*ParseError).Error", Method, 0},
+		{"(*ParseError).Temporary", Method, 17},
+		{"(*ParseError).Timeout", Method, 17},
+		{"(*Resolver).LookupAddr", Method, 8},
+		{"(*Resolver).LookupCNAME", Method, 8},
+		{"(*Resolver).LookupHost", Method, 8},
+		{"(*Resolver).LookupIP", Method, 15},
+		{"(*Resolver).LookupIPAddr", Method, 8},
+		{"(*Resolver).LookupMX", Method, 8},
+		{"(*Resolver).LookupNS", Method, 8},
+		{"(*Resolver).LookupNetIP", Method, 18},
+		{"(*Resolver).LookupPort", Method, 8},
+		{"(*Resolver).LookupSRV", Method, 8},
+		{"(*Resolver).LookupTXT", Method, 8},
+		{"(*TCPAddr).AddrPort", Method, 18},
+		{"(*TCPAddr).Network", Method, 0},
+		{"(*TCPAddr).String", Method, 0},
+		{"(*TCPConn).Close", Method, 0},
+		{"(*TCPConn).CloseRead", Method, 0},
+		{"(*TCPConn).CloseWrite", Method, 0},
+		{"(*TCPConn).File", Method, 0},
+		{"(*TCPConn).LocalAddr", Method, 0},
+		{"(*TCPConn).MultipathTCP", Method, 21},
+		{"(*TCPConn).Read", Method, 0},
+		{"(*TCPConn).ReadFrom", Method, 0},
+		{"(*TCPConn).RemoteAddr", Method, 0},
+		{"(*TCPConn).SetDeadline", Method, 0},
+		{"(*TCPConn).SetKeepAlive", Method, 0},
+		{"(*TCPConn).SetKeepAlivePeriod", Method, 2},
+		{"(*TCPConn).SetLinger", Method, 0},
+		{"(*TCPConn).SetNoDelay", Method, 0},
+		{"(*TCPConn).SetReadBuffer", Method, 0},
+		{"(*TCPConn).SetReadDeadline", Method, 0},
+		{"(*TCPConn).SetWriteBuffer", Method, 0},
+		{"(*TCPConn).SetWriteDeadline", Method, 0},
+		{"(*TCPConn).SyscallConn", Method, 9},
+		{"(*TCPConn).Write", Method, 0},
+		{"(*TCPConn).WriteTo", Method, 22},
+		{"(*TCPListener).Accept", Method, 0},
+		{"(*TCPListener).AcceptTCP", Method, 0},
+		{"(*TCPListener).Addr", Method, 0},
+		{"(*TCPListener).Close", Method, 0},
+		{"(*TCPListener).File", Method, 0},
+		{"(*TCPListener).SetDeadline", Method, 0},
+		{"(*TCPListener).SyscallConn", Method, 10},
+		{"(*UDPAddr).AddrPort", Method, 18},
+		{"(*UDPAddr).Network", Method, 0},
+		{"(*UDPAddr).String", Method, 0},
+		{"(*UDPConn).Close", Method, 0},
+		{"(*UDPConn).File", Method, 0},
+		{"(*UDPConn).LocalAddr", Method, 0},
+		{"(*UDPConn).Read", Method, 0},
+		{"(*UDPConn).ReadFrom", Method, 0},
+		{"(*UDPConn).ReadFromUDP", Method, 0},
+		{"(*UDPConn).ReadFromUDPAddrPort", Method, 18},
+		{"(*UDPConn).ReadMsgUDP", Method, 1},
+		{"(*UDPConn).ReadMsgUDPAddrPort", Method, 18},
+		{"(*UDPConn).RemoteAddr", Method, 0},
+		{"(*UDPConn).SetDeadline", Method, 0},
+		{"(*UDPConn).SetReadBuffer", Method, 0},
+		{"(*UDPConn).SetReadDeadline", Method, 0},
+		{"(*UDPConn).SetWriteBuffer", Method, 0},
+		{"(*UDPConn).SetWriteDeadline", Method, 0},
+		{"(*UDPConn).SyscallConn", Method, 9},
+		{"(*UDPConn).Write", Method, 0},
+		{"(*UDPConn).WriteMsgUDP", Method, 1},
+		{"(*UDPConn).WriteMsgUDPAddrPort", Method, 18},
+		{"(*UDPConn).WriteTo", Method, 0},
+		{"(*UDPConn).WriteToUDP", Method, 0},
+		{"(*UDPConn).WriteToUDPAddrPort", Method, 18},
+		{"(*UnixAddr).Network", Method, 0},
+		{"(*UnixAddr).String", Method, 0},
+		{"(*UnixConn).Close", Method, 0},
+		{"(*UnixConn).CloseRead", Method, 1},
+		{"(*UnixConn).CloseWrite", Method, 1},
+		{"(*UnixConn).File", Method, 0},
+		{"(*UnixConn).LocalAddr", Method, 0},
+		{"(*UnixConn).Read", Method, 0},
+		{"(*UnixConn).ReadFrom", Method, 0},
+		{"(*UnixConn).ReadFromUnix", Method, 0},
+		{"(*UnixConn).ReadMsgUnix", Method, 0},
+		{"(*UnixConn).RemoteAddr", Method, 0},
+		{"(*UnixConn).SetDeadline", Method, 0},
+		{"(*UnixConn).SetReadBuffer", Method, 0},
+		{"(*UnixConn).SetReadDeadline", Method, 0},
+		{"(*UnixConn).SetWriteBuffer", Method, 0},
+		{"(*UnixConn).SetWriteDeadline", Method, 0},
+		{"(*UnixConn).SyscallConn", Method, 9},
+		{"(*UnixConn).Write", Method, 0},
+		{"(*UnixConn).WriteMsgUnix", Method, 0},
+		{"(*UnixConn).WriteTo", Method, 0},
+		{"(*UnixConn).WriteToUnix", Method, 0},
+		{"(*UnixListener).Accept", Method, 0},
+		{"(*UnixListener).AcceptUnix", Method, 0},
+		{"(*UnixListener).Addr", Method, 0},
+		{"(*UnixListener).Close", Method, 0},
+		{"(*UnixListener).File", Method, 0},
+		{"(*UnixListener).SetDeadline", Method, 0},
+		{"(*UnixListener).SetUnlinkOnClose", Method, 8},
+		{"(*UnixListener).SyscallConn", Method, 10},
+		{"(Flags).String", Method, 0},
+		{"(HardwareAddr).String", Method, 0},
+		{"(IP).DefaultMask", Method, 0},
+		{"(IP).Equal", Method, 0},
+		{"(IP).IsGlobalUnicast", Method, 0},
+		{"(IP).IsInterfaceLocalMulticast", Method, 0},
+		{"(IP).IsLinkLocalMulticast", Method, 0},
+		{"(IP).IsLinkLocalUnicast", Method, 0},
+		{"(IP).IsLoopback", Method, 0},
+		{"(IP).IsMulticast", Method, 0},
+		{"(IP).IsPrivate", Method, 17},
+		{"(IP).IsUnspecified", Method, 0},
+		{"(IP).MarshalText", Method, 2},
+		{"(IP).Mask", Method, 0},
+		{"(IP).String", Method, 0},
+		{"(IP).To16", Method, 0},
+		{"(IP).To4", Method, 0},
+		{"(IPMask).Size", Method, 0},
+		{"(IPMask).String", Method, 0},
+		{"(InvalidAddrError).Error", Method, 0},
+		{"(InvalidAddrError).Temporary", Method, 0},
+		{"(InvalidAddrError).Timeout", Method, 0},
+		{"(UnknownNetworkError).Error", Method, 0},
+		{"(UnknownNetworkError).Temporary", Method, 0},
+		{"(UnknownNetworkError).Timeout", Method, 0},
+		{"Addr", Type, 0},
+		{"AddrError", Type, 0},
+		{"AddrError.Addr", Field, 0},
+		{"AddrError.Err", Field, 0},
+		{"Buffers", Type, 8},
+		{"CIDRMask", Func, 0},
+		{"Conn", Type, 0},
+		{"DNSConfigError", Type, 0},
+		{"DNSConfigError.Err", Field, 0},
+		{"DNSError", Type, 0},
+		{"DNSError.Err", Field, 0},
+		{"DNSError.IsNotFound", Field, 13},
+		{"DNSError.IsTemporary", Field, 6},
+		{"DNSError.IsTimeout", Field, 0},
+		{"DNSError.Name", Field, 0},
+		{"DNSError.Server", Field, 0},
+		{"DefaultResolver", Var, 8},
+		{"Dial", Func, 0},
+		{"DialIP", Func, 0},
+		{"DialTCP", Func, 0},
+		{"DialTimeout", Func, 0},
+		{"DialUDP", Func, 0},
+		{"DialUnix", Func, 0},
+		{"Dialer", Type, 1},
+		{"Dialer.Cancel", Field, 6},
+		{"Dialer.Control", Field, 11},
+		{"Dialer.ControlContext", Field, 20},
+		{"Dialer.Deadline", Field, 1},
+		{"Dialer.DualStack", Field, 2},
+		{"Dialer.FallbackDelay", Field, 5},
+		{"Dialer.KeepAlive", Field, 3},
+		{"Dialer.LocalAddr", Field, 1},
+		{"Dialer.Resolver", Field, 8},
+		{"Dialer.Timeout", Field, 1},
+		{"ErrClosed", Var, 16},
+		{"ErrWriteToConnected", Var, 0},
+		{"Error", Type, 0},
+		{"FileConn", Func, 0},
+		{"FileListener", Func, 0},
+		{"FilePacketConn", Func, 0},
+		{"FlagBroadcast", Const, 0},
+		{"FlagLoopback", Const, 0},
+		{"FlagMulticast", Const, 0},
+		{"FlagPointToPoint", Const, 0},
+		{"FlagRunning", Const, 20},
+		{"FlagUp", Const, 0},
+		{"Flags", Type, 0},
+		{"HardwareAddr", Type, 0},
+		{"IP", Type, 0},
+		{"IPAddr", Type, 0},
+		{"IPAddr.IP", Field, 0},
+		{"IPAddr.Zone", Field, 1},
+		{"IPConn", Type, 0},
+		{"IPMask", Type, 0},
+		{"IPNet", Type, 0},
+		{"IPNet.IP", Field, 0},
+		{"IPNet.Mask", Field, 0},
+		{"IPv4", Func, 0},
+		{"IPv4Mask", Func, 0},
+		{"IPv4allrouter", Var, 0},
+		{"IPv4allsys", Var, 0},
+		{"IPv4bcast", Var, 0},
+		{"IPv4len", Const, 0},
+		{"IPv4zero", Var, 0},
+		{"IPv6interfacelocalallnodes", Var, 0},
+		{"IPv6len", Const, 0},
+		{"IPv6linklocalallnodes", Var, 0},
+		{"IPv6linklocalallrouters", Var, 0},
+		{"IPv6loopback", Var, 0},
+		{"IPv6unspecified", Var, 0},
+		{"IPv6zero", Var, 0},
+		{"Interface", Type, 0},
+		{"Interface.Flags", Field, 0},
+		{"Interface.HardwareAddr", Field, 0},
+		{"Interface.Index", Field, 0},
+		{"Interface.MTU", Field, 0},
+		{"Interface.Name", Field, 0},
+		{"InterfaceAddrs", Func, 0},
+		{"InterfaceByIndex", Func, 0},
+		{"InterfaceByName", Func, 0},
+		{"Interfaces", Func, 0},
+		{"InvalidAddrError", Type, 0},
+		{"JoinHostPort", Func, 0},
+		{"Listen", Func, 0},
+		{"ListenConfig", Type, 11},
+		{"ListenConfig.Control", Field, 11},
+		{"ListenConfig.KeepAlive", Field, 13},
+		{"ListenIP", Func, 0},
+		{"ListenMulticastUDP", Func, 0},
+		{"ListenPacket", Func, 0},
+		{"ListenTCP", Func, 0},
+		{"ListenUDP", Func, 0},
+		{"ListenUnix", Func, 0},
+		{"ListenUnixgram", Func, 0},
+		{"Listener", Type, 0},
+		{"LookupAddr", Func, 0},
+		{"LookupCNAME", Func, 0},
+		{"LookupHost", Func, 0},
+		{"LookupIP", Func, 0},
+		{"LookupMX", Func, 0},
+		{"LookupNS", Func, 1},
+		{"LookupPort", Func, 0},
+		{"LookupSRV", Func, 0},
+		{"LookupTXT", Func, 0},
+		{"MX", Type, 0},
+		{"MX.Host", Field, 0},
+		{"MX.Pref", Field, 0},
+		{"NS", Type, 1},
+		{"NS.Host", Field, 1},
+		{"OpError", Type, 0},
+		{"OpError.Addr", Field, 0},
+		{"OpError.Err", Field, 0},
+		{"OpError.Net", Field, 0},
+		{"OpError.Op", Field, 0},
+		{"OpError.Source", Field, 5},
+		{"PacketConn", Type, 0},
+		{"ParseCIDR", Func, 0},
+		{"ParseError", Type, 0},
+		{"ParseError.Text", Field, 0},
+		{"ParseError.Type", Field, 0},
+		{"ParseIP", Func, 0},
+		{"ParseMAC", Func, 0},
+		{"Pipe", Func, 0},
+		{"ResolveIPAddr", Func, 0},
+		{"ResolveTCPAddr", Func, 0},
+		{"ResolveUDPAddr", Func, 0},
+		{"ResolveUnixAddr", Func, 0},
+		{"Resolver", Type, 8},
+		{"Resolver.Dial", Field, 9},
+		{"Resolver.PreferGo", Field, 8},
+		{"Resolver.StrictErrors", Field, 9},
+		{"SRV", Type, 0},
+		{"SRV.Port", Field, 0},
+		{"SRV.Priority", Field, 0},
+		{"SRV.Target", Field, 0},
+		{"SRV.Weight", Field, 0},
+		{"SplitHostPort", Func, 0},
+		{"TCPAddr", Type, 0},
+		{"TCPAddr.IP", Field, 0},
+		{"TCPAddr.Port", Field, 0},
+		{"TCPAddr.Zone", Field, 1},
+		{"TCPAddrFromAddrPort", Func, 18},
+		{"TCPConn", Type, 0},
+		{"TCPListener", Type, 0},
+		{"UDPAddr", Type, 0},
+		{"UDPAddr.IP", Field, 0},
+		{"UDPAddr.Port", Field, 0},
+		{"UDPAddr.Zone", Field, 1},
+		{"UDPAddrFromAddrPort", Func, 18},
+		{"UDPConn", Type, 0},
+		{"UnixAddr", Type, 0},
+		{"UnixAddr.Name", Field, 0},
+		{"UnixAddr.Net", Field, 0},
+		{"UnixConn", Type, 0},
+		{"UnixListener", Type, 0},
+		{"UnknownNetworkError", Type, 0},
+	},
+	"net/http": {
+		{"(*Client).CloseIdleConnections", Method, 12},
+		{"(*Client).Do", Method, 0},
+		{"(*Client).Get", Method, 0},
+		{"(*Client).Head", Method, 0},
+		{"(*Client).Post", Method, 0},
+		{"(*Client).PostForm", Method, 0},
+		{"(*Cookie).String", Method, 0},
+		{"(*Cookie).Valid", Method, 18},
+		{"(*MaxBytesError).Error", Method, 19},
+		{"(*ProtocolError).Error", Method, 0},
+		{"(*ProtocolError).Is", Method, 21},
+		{"(*Request).AddCookie", Method, 0},
+		{"(*Request).BasicAuth", Method, 4},
+		{"(*Request).Clone", Method, 13},
+		{"(*Request).Context", Method, 7},
+		{"(*Request).Cookie", Method, 0},
+		{"(*Request).Cookies", Method, 0},
+		{"(*Request).FormFile", Method, 0},
+		{"(*Request).FormValue", Method, 0},
+		{"(*Request).MultipartReader", Method, 0},
+		{"(*Request).ParseForm", Method, 0},
+		{"(*Request).ParseMultipartForm", Method, 0},
+		{"(*Request).PathValue", Method, 22},
+		{"(*Request).PostFormValue", Method, 1},
+		{"(*Request).ProtoAtLeast", Method, 0},
+		{"(*Request).Referer", Method, 0},
+		{"(*Request).SetBasicAuth", Method, 0},
+		{"(*Request).SetPathValue", Method, 22},
+		{"(*Request).UserAgent", Method, 0},
+		{"(*Request).WithContext", Method, 7},
+		{"(*Request).Write", Method, 0},
+		{"(*Request).WriteProxy", Method, 0},
+		{"(*Response).Cookies", Method, 0},
+		{"(*Response).Location", Method, 0},
+		{"(*Response).ProtoAtLeast", Method, 0},
+		{"(*Response).Write", Method, 0},
+		{"(*ResponseController).EnableFullDuplex", Method, 21},
+		{"(*ResponseController).Flush", Method, 20},
+		{"(*ResponseController).Hijack", Method, 20},
+		{"(*ResponseController).SetReadDeadline", Method, 20},
+		{"(*ResponseController).SetWriteDeadline", Method, 20},
+		{"(*ServeMux).Handle", Method, 0},
+		{"(*ServeMux).HandleFunc", Method, 0},
+		{"(*ServeMux).Handler", Method, 1},
+		{"(*ServeMux).ServeHTTP", Method, 0},
+		{"(*Server).Close", Method, 8},
+		{"(*Server).ListenAndServe", Method, 0},
+		{"(*Server).ListenAndServeTLS", Method, 0},
+		{"(*Server).RegisterOnShutdown", Method, 9},
+		{"(*Server).Serve", Method, 0},
+		{"(*Server).ServeTLS", Method, 9},
+		{"(*Server).SetKeepAlivesEnabled", Method, 3},
+		{"(*Server).Shutdown", Method, 8},
+		{"(*Transport).CancelRequest", Method, 1},
+		{"(*Transport).Clone", Method, 13},
+		{"(*Transport).CloseIdleConnections", Method, 0},
+		{"(*Transport).RegisterProtocol", Method, 0},
+		{"(*Transport).RoundTrip", Method, 0},
+		{"(ConnState).String", Method, 3},
+		{"(Dir).Open", Method, 0},
+		{"(HandlerFunc).ServeHTTP", Method, 0},
+		{"(Header).Add", Method, 0},
+		{"(Header).Clone", Method, 13},
+		{"(Header).Del", Method, 0},
+		{"(Header).Get", Method, 0},
+		{"(Header).Set", Method, 0},
+		{"(Header).Values", Method, 14},
+		{"(Header).Write", Method, 0},
+		{"(Header).WriteSubset", Method, 0},
+		{"AllowQuerySemicolons", Func, 17},
+		{"CanonicalHeaderKey", Func, 0},
+		{"Client", Type, 0},
+		{"Client.CheckRedirect", Field, 0},
+		{"Client.Jar", Field, 0},
+		{"Client.Timeout", Field, 3},
+		{"Client.Transport", Field, 0},
+		{"CloseNotifier", Type, 1},
+		{"ConnState", Type, 3},
+		{"Cookie", Type, 0},
+		{"Cookie.Domain", Field, 0},
+		{"Cookie.Expires", Field, 0},
+		{"Cookie.HttpOnly", Field, 0},
+		{"Cookie.MaxAge", Field, 0},
+		{"Cookie.Name", Field, 0},
+		{"Cookie.Path", Field, 0},
+		{"Cookie.Raw", Field, 0},
+		{"Cookie.RawExpires", Field, 0},
+		{"Cookie.SameSite", Field, 11},
+		{"Cookie.Secure", Field, 0},
+		{"Cookie.Unparsed", Field, 0},
+		{"Cookie.Value", Field, 0},
+		{"CookieJar", Type, 0},
+		{"DefaultClient", Var, 0},
+		{"DefaultMaxHeaderBytes", Const, 0},
+		{"DefaultMaxIdleConnsPerHost", Const, 0},
+		{"DefaultServeMux", Var, 0},
+		{"DefaultTransport", Var, 0},
+		{"DetectContentType", Func, 0},
+		{"Dir", Type, 0},
+		{"ErrAbortHandler", Var, 8},
+		{"ErrBodyNotAllowed", Var, 0},
+		{"ErrBodyReadAfterClose", Var, 0},
+		{"ErrContentLength", Var, 0},
+		{"ErrHandlerTimeout", Var, 0},
+		{"ErrHeaderTooLong", Var, 0},
+		{"ErrHijacked", Var, 0},
+		{"ErrLineTooLong", Var, 0},
+		{"ErrMissingBoundary", Var, 0},
+		{"ErrMissingContentLength", Var, 0},
+		{"ErrMissingFile", Var, 0},
+		{"ErrNoCookie", Var, 0},
+		{"ErrNoLocation", Var, 0},
+		{"ErrNotMultipart", Var, 0},
+		{"ErrNotSupported", Var, 0},
+		{"ErrSchemeMismatch", Var, 21},
+		{"ErrServerClosed", Var, 8},
+		{"ErrShortBody", Var, 0},
+		{"ErrSkipAltProtocol", Var, 6},
+		{"ErrUnexpectedTrailer", Var, 0},
+		{"ErrUseLastResponse", Var, 7},
+		{"ErrWriteAfterFlush", Var, 0},
+		{"Error", Func, 0},
+		{"FS", Func, 16},
+		{"File", Type, 0},
+		{"FileServer", Func, 0},
+		{"FileServerFS", Func, 22},
+		{"FileSystem", Type, 0},
+		{"Flusher", Type, 0},
+		{"Get", Func, 0},
+		{"Handle", Func, 0},
+		{"HandleFunc", Func, 0},
+		{"Handler", Type, 0},
+		{"HandlerFunc", Type, 0},
+		{"Head", Func, 0},
+		{"Header", Type, 0},
+		{"Hijacker", Type, 0},
+		{"ListenAndServe", Func, 0},
+		{"ListenAndServeTLS", Func, 0},
+		{"LocalAddrContextKey", Var, 7},
+		{"MaxBytesError", Type, 19},
+		{"MaxBytesError.Limit", Field, 19},
+		{"MaxBytesHandler", Func, 18},
+		{"MaxBytesReader", Func, 0},
+		{"MethodConnect", Const, 6},
+		{"MethodDelete", Const, 6},
+		{"MethodGet", Const, 6},
+		{"MethodHead", Const, 6},
+		{"MethodOptions", Const, 6},
+		{"MethodPatch", Const, 6},
+		{"MethodPost", Const, 6},
+		{"MethodPut", Const, 6},
+		{"MethodTrace", Const, 6},
+		{"NewFileTransport", Func, 0},
+		{"NewFileTransportFS", Func, 22},
+		{"NewRequest", Func, 0},
+		{"NewRequestWithContext", Func, 13},
+		{"NewResponseController", Func, 20},
+		{"NewServeMux", Func, 0},
+		{"NoBody", Var, 8},
+		{"NotFound", Func, 0},
+		{"NotFoundHandler", Func, 0},
+		{"ParseHTTPVersion", Func, 0},
+		{"ParseTime", Func, 1},
+		{"Post", Func, 0},
+		{"PostForm", Func, 0},
+		{"ProtocolError", Type, 0},
+		{"ProtocolError.ErrorString", Field, 0},
+		{"ProxyFromEnvironment", Func, 0},
+		{"ProxyURL", Func, 0},
+		{"PushOptions", Type, 8},
+		{"PushOptions.Header", Field, 8},
+		{"PushOptions.Method", Field, 8},
+		{"Pusher", Type, 8},
+		{"ReadRequest", Func, 0},
+		{"ReadResponse", Func, 0},
+		{"Redirect", Func, 0},
+		{"RedirectHandler", Func, 0},
+		{"Request", Type, 0},
+		{"Request.Body", Field, 0},
+		{"Request.Cancel", Field, 5},
+		{"Request.Close", Field, 0},
+		{"Request.ContentLength", Field, 0},
+		{"Request.Form", Field, 0},
+		{"Request.GetBody", Field, 8},
+		{"Request.Header", Field, 0},
+		{"Request.Host", Field, 0},
+		{"Request.Method", Field, 0},
+		{"Request.MultipartForm", Field, 0},
+		{"Request.PostForm", Field, 1},
+		{"Request.Proto", Field, 0},
+		{"Request.ProtoMajor", Field, 0},
+		{"Request.ProtoMinor", Field, 0},
+		{"Request.RemoteAddr", Field, 0},
+		{"Request.RequestURI", Field, 0},
+		{"Request.Response", Field, 7},
+		{"Request.TLS", Field, 0},
+		{"Request.Trailer", Field, 0},
+		{"Request.TransferEncoding", Field, 0},
+		{"Request.URL", Field, 0},
+		{"Response", Type, 0},
+		{"Response.Body", Field, 0},
+		{"Response.Close", Field, 0},
+		{"Response.ContentLength", Field, 0},
+		{"Response.Header", Field, 0},
+		{"Response.Proto", Field, 0},
+		{"Response.ProtoMajor", Field, 0},
+		{"Response.ProtoMinor", Field, 0},
+		{"Response.Request", Field, 0},
+		{"Response.Status", Field, 0},
+		{"Response.StatusCode", Field, 0},
+		{"Response.TLS", Field, 3},
+		{"Response.Trailer", Field, 0},
+		{"Response.TransferEncoding", Field, 0},
+		{"Response.Uncompressed", Field, 7},
+		{"ResponseController", Type, 20},
+		{"ResponseWriter", Type, 0},
+		{"RoundTripper", Type, 0},
+		{"SameSite", Type, 11},
+		{"SameSiteDefaultMode", Const, 11},
+		{"SameSiteLaxMode", Const, 11},
+		{"SameSiteNoneMode", Const, 13},
+		{"SameSiteStrictMode", Const, 11},
+		{"Serve", Func, 0},
+		{"ServeContent", Func, 0},
+		{"ServeFile", Func, 0},
+		{"ServeFileFS", Func, 22},
+		{"ServeMux", Type, 0},
+		{"ServeTLS", Func, 9},
+		{"Server", Type, 0},
+		{"Server.Addr", Field, 0},
+		{"Server.BaseContext", Field, 13},
+		{"Server.ConnContext", Field, 13},
+		{"Server.ConnState", Field, 3},
+		{"Server.DisableGeneralOptionsHandler", Field, 20},
+		{"Server.ErrorLog", Field, 3},
+		{"Server.Handler", Field, 0},
+		{"Server.IdleTimeout", Field, 8},
+		{"Server.MaxHeaderBytes", Field, 0},
+		{"Server.ReadHeaderTimeout", Field, 8},
+		{"Server.ReadTimeout", Field, 0},
+		{"Server.TLSConfig", Field, 0},
+		{"Server.TLSNextProto", Field, 1},
+		{"Server.WriteTimeout", Field, 0},
+		{"ServerContextKey", Var, 7},
+		{"SetCookie", Func, 0},
+		{"StateActive", Const, 3},
+		{"StateClosed", Const, 3},
+		{"StateHijacked", Const, 3},
+		{"StateIdle", Const, 3},
+		{"StateNew", Const, 3},
+		{"StatusAccepted", Const, 0},
+		{"StatusAlreadyReported", Const, 7},
+		{"StatusBadGateway", Const, 0},
+		{"StatusBadRequest", Const, 0},
+		{"StatusConflict", Const, 0},
+		{"StatusContinue", Const, 0},
+		{"StatusCreated", Const, 0},
+		{"StatusEarlyHints", Const, 13},
+		{"StatusExpectationFailed", Const, 0},
+		{"StatusFailedDependency", Const, 7},
+		{"StatusForbidden", Const, 0},
+		{"StatusFound", Const, 0},
+		{"StatusGatewayTimeout", Const, 0},
+		{"StatusGone", Const, 0},
+		{"StatusHTTPVersionNotSupported", Const, 0},
+		{"StatusIMUsed", Const, 7},
+		{"StatusInsufficientStorage", Const, 7},
+		{"StatusInternalServerError", Const, 0},
+		{"StatusLengthRequired", Const, 0},
+		{"StatusLocked", Const, 7},
+		{"StatusLoopDetected", Const, 7},
+		{"StatusMethodNotAllowed", Const, 0},
+		{"StatusMisdirectedRequest", Const, 11},
+		{"StatusMovedPermanently", Const, 0},
+		{"StatusMultiStatus", Const, 7},
+		{"StatusMultipleChoices", Const, 0},
+		{"StatusNetworkAuthenticationRequired", Const, 6},
+		{"StatusNoContent", Const, 0},
+		{"StatusNonAuthoritativeInfo", Const, 0},
+		{"StatusNotAcceptable", Const, 0},
+		{"StatusNotExtended", Const, 7},
+		{"StatusNotFound", Const, 0},
+		{"StatusNotImplemented", Const, 0},
+		{"StatusNotModified", Const, 0},
+		{"StatusOK", Const, 0},
+		{"StatusPartialContent", Const, 0},
+		{"StatusPaymentRequired", Const, 0},
+		{"StatusPermanentRedirect", Const, 7},
+		{"StatusPreconditionFailed", Const, 0},
+		{"StatusPreconditionRequired", Const, 6},
+		{"StatusProcessing", Const, 7},
+		{"StatusProxyAuthRequired", Const, 0},
+		{"StatusRequestEntityTooLarge", Const, 0},
+		{"StatusRequestHeaderFieldsTooLarge", Const, 6},
+		{"StatusRequestTimeout", Const, 0},
+		{"StatusRequestURITooLong", Const, 0},
+		{"StatusRequestedRangeNotSatisfiable", Const, 0},
+		{"StatusResetContent", Const, 0},
+		{"StatusSeeOther", Const, 0},
+		{"StatusServiceUnavailable", Const, 0},
+		{"StatusSwitchingProtocols", Const, 0},
+		{"StatusTeapot", Const, 0},
+		{"StatusTemporaryRedirect", Const, 0},
+		{"StatusText", Func, 0},
+		{"StatusTooEarly", Const, 12},
+		{"StatusTooManyRequests", Const, 6},
+		{"StatusUnauthorized", Const, 0},
+		{"StatusUnavailableForLegalReasons", Const, 6},
+		{"StatusUnprocessableEntity", Const, 7},
+		{"StatusUnsupportedMediaType", Const, 0},
+		{"StatusUpgradeRequired", Const, 7},
+		{"StatusUseProxy", Const, 0},
+		{"StatusVariantAlsoNegotiates", Const, 7},
+		{"StripPrefix", Func, 0},
+		{"TimeFormat", Const, 0},
+		{"TimeoutHandler", Func, 0},
+		{"TrailerPrefix", Const, 8},
+		{"Transport", Type, 0},
+		{"Transport.Dial", Field, 0},
+		{"Transport.DialContext", Field, 7},
+		{"Transport.DialTLS", Field, 4},
+		{"Transport.DialTLSContext", Field, 14},
+		{"Transport.DisableCompression", Field, 0},
+		{"Transport.DisableKeepAlives", Field, 0},
+		{"Transport.ExpectContinueTimeout", Field, 6},
+		{"Transport.ForceAttemptHTTP2", Field, 13},
+		{"Transport.GetProxyConnectHeader", Field, 16},
+		{"Transport.IdleConnTimeout", Field, 7},
+		{"Transport.MaxConnsPerHost", Field, 11},
+		{"Transport.MaxIdleConns", Field, 7},
+		{"Transport.MaxIdleConnsPerHost", Field, 0},
+		{"Transport.MaxResponseHeaderBytes", Field, 7},
+		{"Transport.OnProxyConnectResponse", Field, 20},
+		{"Transport.Proxy", Field, 0},
+		{"Transport.ProxyConnectHeader", Field, 8},
+		{"Transport.ReadBufferSize", Field, 13},
+		{"Transport.ResponseHeaderTimeout", Field, 1},
+		{"Transport.TLSClientConfig", Field, 0},
+		{"Transport.TLSHandshakeTimeout", Field, 3},
+		{"Transport.TLSNextProto", Field, 6},
+		{"Transport.WriteBufferSize", Field, 13},
+	},
+	"net/http/cgi": {
+		{"(*Handler).ServeHTTP", Method, 0},
+		{"Handler", Type, 0},
+		{"Handler.Args", Field, 0},
+		{"Handler.Dir", Field, 0},
+		{"Handler.Env", Field, 0},
+		{"Handler.InheritEnv", Field, 0},
+		{"Handler.Logger", Field, 0},
+		{"Handler.Path", Field, 0},
+		{"Handler.PathLocationHandler", Field, 0},
+		{"Handler.Root", Field, 0},
+		{"Handler.Stderr", Field, 7},
+		{"Request", Func, 0},
+		{"RequestFromMap", Func, 0},
+		{"Serve", Func, 0},
+	},
+	"net/http/cookiejar": {
+		{"(*Jar).Cookies", Method, 1},
+		{"(*Jar).SetCookies", Method, 1},
+		{"Jar", Type, 1},
+		{"New", Func, 1},
+		{"Options", Type, 1},
+		{"Options.PublicSuffixList", Field, 1},
+		{"PublicSuffixList", Type, 1},
+	},
+	"net/http/fcgi": {
+		{"ErrConnClosed", Var, 5},
+		{"ErrRequestAborted", Var, 5},
+		{"ProcessEnv", Func, 9},
+		{"Serve", Func, 0},
+	},
+	"net/http/httptest": {
+		{"(*ResponseRecorder).Flush", Method, 0},
+		{"(*ResponseRecorder).Header", Method, 0},
+		{"(*ResponseRecorder).Result", Method, 7},
+		{"(*ResponseRecorder).Write", Method, 0},
+		{"(*ResponseRecorder).WriteHeader", Method, 0},
+		{"(*ResponseRecorder).WriteString", Method, 6},
+		{"(*Server).Certificate", Method, 9},
+		{"(*Server).Client", Method, 9},
+		{"(*Server).Close", Method, 0},
+		{"(*Server).CloseClientConnections", Method, 0},
+		{"(*Server).Start", Method, 0},
+		{"(*Server).StartTLS", Method, 0},
+		{"DefaultRemoteAddr", Const, 0},
+		{"NewRecorder", Func, 0},
+		{"NewRequest", Func, 7},
+		{"NewServer", Func, 0},
+		{"NewTLSServer", Func, 0},
+		{"NewUnstartedServer", Func, 0},
+		{"ResponseRecorder", Type, 0},
+		{"ResponseRecorder.Body", Field, 0},
+		{"ResponseRecorder.Code", Field, 0},
+		{"ResponseRecorder.Flushed", Field, 0},
+		{"ResponseRecorder.HeaderMap", Field, 0},
+		{"Server", Type, 0},
+		{"Server.Config", Field, 0},
+		{"Server.EnableHTTP2", Field, 14},
+		{"Server.Listener", Field, 0},
+		{"Server.TLS", Field, 0},
+		{"Server.URL", Field, 0},
+	},
+	"net/http/httptrace": {
+		{"ClientTrace", Type, 7},
+		{"ClientTrace.ConnectDone", Field, 7},
+		{"ClientTrace.ConnectStart", Field, 7},
+		{"ClientTrace.DNSDone", Field, 7},
+		{"ClientTrace.DNSStart", Field, 7},
+		{"ClientTrace.GetConn", Field, 7},
+		{"ClientTrace.Got100Continue", Field, 7},
+		{"ClientTrace.Got1xxResponse", Field, 11},
+		{"ClientTrace.GotConn", Field, 7},
+		{"ClientTrace.GotFirstResponseByte", Field, 7},
+		{"ClientTrace.PutIdleConn", Field, 7},
+		{"ClientTrace.TLSHandshakeDone", Field, 8},
+		{"ClientTrace.TLSHandshakeStart", Field, 8},
+		{"ClientTrace.Wait100Continue", Field, 7},
+		{"ClientTrace.WroteHeaderField", Field, 11},
+		{"ClientTrace.WroteHeaders", Field, 7},
+		{"ClientTrace.WroteRequest", Field, 7},
+		{"ContextClientTrace", Func, 7},
+		{"DNSDoneInfo", Type, 7},
+		{"DNSDoneInfo.Addrs", Field, 7},
+		{"DNSDoneInfo.Coalesced", Field, 7},
+		{"DNSDoneInfo.Err", Field, 7},
+		{"DNSStartInfo", Type, 7},
+		{"DNSStartInfo.Host", Field, 7},
+		{"GotConnInfo", Type, 7},
+		{"GotConnInfo.Conn", Field, 7},
+		{"GotConnInfo.IdleTime", Field, 7},
+		{"GotConnInfo.Reused", Field, 7},
+		{"GotConnInfo.WasIdle", Field, 7},
+		{"WithClientTrace", Func, 7},
+		{"WroteRequestInfo", Type, 7},
+		{"WroteRequestInfo.Err", Field, 7},
+	},
+	"net/http/httputil": {
+		{"(*ClientConn).Close", Method, 0},
+		{"(*ClientConn).Do", Method, 0},
+		{"(*ClientConn).Hijack", Method, 0},
+		{"(*ClientConn).Pending", Method, 0},
+		{"(*ClientConn).Read", Method, 0},
+		{"(*ClientConn).Write", Method, 0},
+		{"(*ProxyRequest).SetURL", Method, 20},
+		{"(*ProxyRequest).SetXForwarded", Method, 20},
+		{"(*ReverseProxy).ServeHTTP", Method, 0},
+		{"(*ServerConn).Close", Method, 0},
+		{"(*ServerConn).Hijack", Method, 0},
+		{"(*ServerConn).Pending", Method, 0},
+		{"(*ServerConn).Read", Method, 0},
+		{"(*ServerConn).Write", Method, 0},
+		{"BufferPool", Type, 6},
+		{"ClientConn", Type, 0},
+		{"DumpRequest", Func, 0},
+		{"DumpRequestOut", Func, 0},
+		{"DumpResponse", Func, 0},
+		{"ErrClosed", Var, 0},
+		{"ErrLineTooLong", Var, 0},
+		{"ErrPersistEOF", Var, 0},
+		{"ErrPipeline", Var, 0},
+		{"NewChunkedReader", Func, 0},
+		{"NewChunkedWriter", Func, 0},
+		{"NewClientConn", Func, 0},
+		{"NewProxyClientConn", Func, 0},
+		{"NewServerConn", Func, 0},
+		{"NewSingleHostReverseProxy", Func, 0},
+		{"ProxyRequest", Type, 20},
+		{"ProxyRequest.In", Field, 20},
+		{"ProxyRequest.Out", Field, 20},
+		{"ReverseProxy", Type, 0},
+		{"ReverseProxy.BufferPool", Field, 6},
+		{"ReverseProxy.Director", Field, 0},
+		{"ReverseProxy.ErrorHandler", Field, 11},
+		{"ReverseProxy.ErrorLog", Field, 4},
+		{"ReverseProxy.FlushInterval", Field, 0},
+		{"ReverseProxy.ModifyResponse", Field, 8},
+		{"ReverseProxy.Rewrite", Field, 20},
+		{"ReverseProxy.Transport", Field, 0},
+		{"ServerConn", Type, 0},
+	},
+	"net/http/pprof": {
+		{"Cmdline", Func, 0},
+		{"Handler", Func, 0},
+		{"Index", Func, 0},
+		{"Profile", Func, 0},
+		{"Symbol", Func, 0},
+		{"Trace", Func, 5},
+	},
+	"net/mail": {
+		{"(*Address).String", Method, 0},
+		{"(*AddressParser).Parse", Method, 5},
+		{"(*AddressParser).ParseList", Method, 5},
+		{"(Header).AddressList", Method, 0},
+		{"(Header).Date", Method, 0},
+		{"(Header).Get", Method, 0},
+		{"Address", Type, 0},
+		{"Address.Address", Field, 0},
+		{"Address.Name", Field, 0},
+		{"AddressParser", Type, 5},
+		{"AddressParser.WordDecoder", Field, 5},
+		{"ErrHeaderNotPresent", Var, 0},
+		{"Header", Type, 0},
+		{"Message", Type, 0},
+		{"Message.Body", Field, 0},
+		{"Message.Header", Field, 0},
+		{"ParseAddress", Func, 1},
+		{"ParseAddressList", Func, 1},
+		{"ParseDate", Func, 8},
+		{"ReadMessage", Func, 0},
+	},
+	"net/netip": {
+		{"(*Addr).UnmarshalBinary", Method, 18},
+		{"(*Addr).UnmarshalText", Method, 18},
+		{"(*AddrPort).UnmarshalBinary", Method, 18},
+		{"(*AddrPort).UnmarshalText", Method, 18},
+		{"(*Prefix).UnmarshalBinary", Method, 18},
+		{"(*Prefix).UnmarshalText", Method, 18},
+		{"(Addr).AppendTo", Method, 18},
+		{"(Addr).As16", Method, 18},
+		{"(Addr).As4", Method, 18},
+		{"(Addr).AsSlice", Method, 18},
+		{"(Addr).BitLen", Method, 18},
+		{"(Addr).Compare", Method, 18},
+		{"(Addr).Is4", Method, 18},
+		{"(Addr).Is4In6", Method, 18},
+		{"(Addr).Is6", Method, 18},
+		{"(Addr).IsGlobalUnicast", Method, 18},
+		{"(Addr).IsInterfaceLocalMulticast", Method, 18},
+		{"(Addr).IsLinkLocalMulticast", Method, 18},
+		{"(Addr).IsLinkLocalUnicast", Method, 18},
+		{"(Addr).IsLoopback", Method, 18},
+		{"(Addr).IsMulticast", Method, 18},
+		{"(Addr).IsPrivate", Method, 18},
+		{"(Addr).IsUnspecified", Method, 18},
+		{"(Addr).IsValid", Method, 18},
+		{"(Addr).Less", Method, 18},
+		{"(Addr).MarshalBinary", Method, 18},
+		{"(Addr).MarshalText", Method, 18},
+		{"(Addr).Next", Method, 18},
+		{"(Addr).Prefix", Method, 18},
+		{"(Addr).Prev", Method, 18},
+		{"(Addr).String", Method, 18},
+		{"(Addr).StringExpanded", Method, 18},
+		{"(Addr).Unmap", Method, 18},
+		{"(Addr).WithZone", Method, 18},
+		{"(Addr).Zone", Method, 18},
+		{"(AddrPort).Addr", Method, 18},
+		{"(AddrPort).AppendTo", Method, 18},
+		{"(AddrPort).Compare", Method, 22},
+		{"(AddrPort).IsValid", Method, 18},
+		{"(AddrPort).MarshalBinary", Method, 18},
+		{"(AddrPort).MarshalText", Method, 18},
+		{"(AddrPort).Port", Method, 18},
+		{"(AddrPort).String", Method, 18},
+		{"(Prefix).Addr", Method, 18},
+		{"(Prefix).AppendTo", Method, 18},
+		{"(Prefix).Bits", Method, 18},
+		{"(Prefix).Contains", Method, 18},
+		{"(Prefix).IsSingleIP", Method, 18},
+		{"(Prefix).IsValid", Method, 18},
+		{"(Prefix).MarshalBinary", Method, 18},
+		{"(Prefix).MarshalText", Method, 18},
+		{"(Prefix).Masked", Method, 18},
+		{"(Prefix).Overlaps", Method, 18},
+		{"(Prefix).String", Method, 18},
+		{"Addr", Type, 18},
+		{"AddrFrom16", Func, 18},
+		{"AddrFrom4", Func, 18},
+		{"AddrFromSlice", Func, 18},
+		{"AddrPort", Type, 18},
+		{"AddrPortFrom", Func, 18},
+		{"IPv4Unspecified", Func, 18},
+		{"IPv6LinkLocalAllNodes", Func, 18},
+		{"IPv6LinkLocalAllRouters", Func, 20},
+		{"IPv6Loopback", Func, 20},
+		{"IPv6Unspecified", Func, 18},
+		{"MustParseAddr", Func, 18},
+		{"MustParseAddrPort", Func, 18},
+		{"MustParsePrefix", Func, 18},
+		{"ParseAddr", Func, 18},
+		{"ParseAddrPort", Func, 18},
+		{"ParsePrefix", Func, 18},
+		{"Prefix", Type, 18},
+		{"PrefixFrom", Func, 18},
+	},
+	"net/rpc": {
+		{"(*Client).Call", Method, 0},
+		{"(*Client).Close", Method, 0},
+		{"(*Client).Go", Method, 0},
+		{"(*Server).Accept", Method, 0},
+		{"(*Server).HandleHTTP", Method, 0},
+		{"(*Server).Register", Method, 0},
+		{"(*Server).RegisterName", Method, 0},
+		{"(*Server).ServeCodec", Method, 0},
+		{"(*Server).ServeConn", Method, 0},
+		{"(*Server).ServeHTTP", Method, 0},
+		{"(*Server).ServeRequest", Method, 0},
+		{"(ServerError).Error", Method, 0},
+		{"Accept", Func, 0},
+		{"Call", Type, 0},
+		{"Call.Args", Field, 0},
+		{"Call.Done", Field, 0},
+		{"Call.Error", Field, 0},
+		{"Call.Reply", Field, 0},
+		{"Call.ServiceMethod", Field, 0},
+		{"Client", Type, 0},
+		{"ClientCodec", Type, 0},
+		{"DefaultDebugPath", Const, 0},
+		{"DefaultRPCPath", Const, 0},
+		{"DefaultServer", Var, 0},
+		{"Dial", Func, 0},
+		{"DialHTTP", Func, 0},
+		{"DialHTTPPath", Func, 0},
+		{"ErrShutdown", Var, 0},
+		{"HandleHTTP", Func, 0},
+		{"NewClient", Func, 0},
+		{"NewClientWithCodec", Func, 0},
+		{"NewServer", Func, 0},
+		{"Register", Func, 0},
+		{"RegisterName", Func, 0},
+		{"Request", Type, 0},
+		{"Request.Seq", Field, 0},
+		{"Request.ServiceMethod", Field, 0},
+		{"Response", Type, 0},
+		{"Response.Error", Field, 0},
+		{"Response.Seq", Field, 0},
+		{"Response.ServiceMethod", Field, 0},
+		{"ServeCodec", Func, 0},
+		{"ServeConn", Func, 0},
+		{"ServeRequest", Func, 0},
+		{"Server", Type, 0},
+		{"ServerCodec", Type, 0},
+		{"ServerError", Type, 0},
+	},
+	"net/rpc/jsonrpc": {
+		{"Dial", Func, 0},
+		{"NewClient", Func, 0},
+		{"NewClientCodec", Func, 0},
+		{"NewServerCodec", Func, 0},
+		{"ServeConn", Func, 0},
+	},
+	"net/smtp": {
+		{"(*Client).Auth", Method, 0},
+		{"(*Client).Close", Method, 2},
+		{"(*Client).Data", Method, 0},
+		{"(*Client).Extension", Method, 0},
+		{"(*Client).Hello", Method, 1},
+		{"(*Client).Mail", Method, 0},
+		{"(*Client).Noop", Method, 10},
+		{"(*Client).Quit", Method, 0},
+		{"(*Client).Rcpt", Method, 0},
+		{"(*Client).Reset", Method, 0},
+		{"(*Client).StartTLS", Method, 0},
+		{"(*Client).TLSConnectionState", Method, 5},
+		{"(*Client).Verify", Method, 0},
+		{"Auth", Type, 0},
+		{"CRAMMD5Auth", Func, 0},
+		{"Client", Type, 0},
+		{"Client.Text", Field, 0},
+		{"Dial", Func, 0},
+		{"NewClient", Func, 0},
+		{"PlainAuth", Func, 0},
+		{"SendMail", Func, 0},
+		{"ServerInfo", Type, 0},
+		{"ServerInfo.Auth", Field, 0},
+		{"ServerInfo.Name", Field, 0},
+		{"ServerInfo.TLS", Field, 0},
+	},
+	"net/textproto": {
+		{"(*Conn).Close", Method, 0},
+		{"(*Conn).Cmd", Method, 0},
+		{"(*Conn).DotReader", Method, 0},
+		{"(*Conn).DotWriter", Method, 0},
+		{"(*Conn).EndRequest", Method, 0},
+		{"(*Conn).EndResponse", Method, 0},
+		{"(*Conn).Next", Method, 0},
+		{"(*Conn).PrintfLine", Method, 0},
+		{"(*Conn).ReadCodeLine", Method, 0},
+		{"(*Conn).ReadContinuedLine", Method, 0},
+		{"(*Conn).ReadContinuedLineBytes", Method, 0},
+		{"(*Conn).ReadDotBytes", Method, 0},
+		{"(*Conn).ReadDotLines", Method, 0},
+		{"(*Conn).ReadLine", Method, 0},
+		{"(*Conn).ReadLineBytes", Method, 0},
+		{"(*Conn).ReadMIMEHeader", Method, 0},
+		{"(*Conn).ReadResponse", Method, 0},
+		{"(*Conn).StartRequest", Method, 0},
+		{"(*Conn).StartResponse", Method, 0},
+		{"(*Error).Error", Method, 0},
+		{"(*Pipeline).EndRequest", Method, 0},
+		{"(*Pipeline).EndResponse", Method, 0},
+		{"(*Pipeline).Next", Method, 0},
+		{"(*Pipeline).StartRequest", Method, 0},
+		{"(*Pipeline).StartResponse", Method, 0},
+		{"(*Reader).DotReader", Method, 0},
+		{"(*Reader).ReadCodeLine", Method, 0},
+		{"(*Reader).ReadContinuedLine", Method, 0},
+		{"(*Reader).ReadContinuedLineBytes", Method, 0},
+		{"(*Reader).ReadDotBytes", Method, 0},
+		{"(*Reader).ReadDotLines", Method, 0},
+		{"(*Reader).ReadLine", Method, 0},
+		{"(*Reader).ReadLineBytes", Method, 0},
+		{"(*Reader).ReadMIMEHeader", Method, 0},
+		{"(*Reader).ReadResponse", Method, 0},
+		{"(*Writer).DotWriter", Method, 0},
+		{"(*Writer).PrintfLine", Method, 0},
+		{"(MIMEHeader).Add", Method, 0},
+		{"(MIMEHeader).Del", Method, 0},
+		{"(MIMEHeader).Get", Method, 0},
+		{"(MIMEHeader).Set", Method, 0},
+		{"(MIMEHeader).Values", Method, 14},
+		{"(ProtocolError).Error", Method, 0},
+		{"CanonicalMIMEHeaderKey", Func, 0},
+		{"Conn", Type, 0},
+		{"Conn.Pipeline", Field, 0},
+		{"Conn.Reader", Field, 0},
+		{"Conn.Writer", Field, 0},
+		{"Dial", Func, 0},
+		{"Error", Type, 0},
+		{"Error.Code", Field, 0},
+		{"Error.Msg", Field, 0},
+		{"MIMEHeader", Type, 0},
+		{"NewConn", Func, 0},
+		{"NewReader", Func, 0},
+		{"NewWriter", Func, 0},
+		{"Pipeline", Type, 0},
+		{"ProtocolError", Type, 0},
+		{"Reader", Type, 0},
+		{"Reader.R", Field, 0},
+		{"TrimBytes", Func, 1},
+		{"TrimString", Func, 1},
+		{"Writer", Type, 0},
+		{"Writer.W", Field, 0},
+	},
+	"net/url": {
+		{"(*Error).Error", Method, 0},
+		{"(*Error).Temporary", Method, 6},
+		{"(*Error).Timeout", Method, 6},
+		{"(*Error).Unwrap", Method, 13},
+		{"(*URL).EscapedFragment", Method, 15},
+		{"(*URL).EscapedPath", Method, 5},
+		{"(*URL).Hostname", Method, 8},
+		{"(*URL).IsAbs", Method, 0},
+		{"(*URL).JoinPath", Method, 19},
+		{"(*URL).MarshalBinary", Method, 8},
+		{"(*URL).Parse", Method, 0},
+		{"(*URL).Port", Method, 8},
+		{"(*URL).Query", Method, 0},
+		{"(*URL).Redacted", Method, 15},
+		{"(*URL).RequestURI", Method, 0},
+		{"(*URL).ResolveReference", Method, 0},
+		{"(*URL).String", Method, 0},
+		{"(*URL).UnmarshalBinary", Method, 8},
+		{"(*Userinfo).Password", Method, 0},
+		{"(*Userinfo).String", Method, 0},
+		{"(*Userinfo).Username", Method, 0},
+		{"(EscapeError).Error", Method, 0},
+		{"(InvalidHostError).Error", Method, 6},
+		{"(Values).Add", Method, 0},
+		{"(Values).Del", Method, 0},
+		{"(Values).Encode", Method, 0},
+		{"(Values).Get", Method, 0},
+		{"(Values).Has", Method, 17},
+		{"(Values).Set", Method, 0},
+		{"Error", Type, 0},
+		{"Error.Err", Field, 0},
+		{"Error.Op", Field, 0},
+		{"Error.URL", Field, 0},
+		{"EscapeError", Type, 0},
+		{"InvalidHostError", Type, 6},
+		{"JoinPath", Func, 19},
+		{"Parse", Func, 0},
+		{"ParseQuery", Func, 0},
+		{"ParseRequestURI", Func, 0},
+		{"PathEscape", Func, 8},
+		{"PathUnescape", Func, 8},
+		{"QueryEscape", Func, 0},
+		{"QueryUnescape", Func, 0},
+		{"URL", Type, 0},
+		{"URL.ForceQuery", Field, 7},
+		{"URL.Fragment", Field, 0},
+		{"URL.Host", Field, 0},
+		{"URL.OmitHost", Field, 19},
+		{"URL.Opaque", Field, 0},
+		{"URL.Path", Field, 0},
+		{"URL.RawFragment", Field, 15},
+		{"URL.RawPath", Field, 5},
+		{"URL.RawQuery", Field, 0},
+		{"URL.Scheme", Field, 0},
+		{"URL.User", Field, 0},
+		{"User", Func, 0},
+		{"UserPassword", Func, 0},
+		{"Userinfo", Type, 0},
+		{"Values", Type, 0},
+	},
+	"os": {
+		{"(*File).Chdir", Method, 0},
+		{"(*File).Chmod", Method, 0},
+		{"(*File).Chown", Method, 0},
+		{"(*File).Close", Method, 0},
+		{"(*File).Fd", Method, 0},
+		{"(*File).Name", Method, 0},
+		{"(*File).Read", Method, 0},
+		{"(*File).ReadAt", Method, 0},
+		{"(*File).ReadDir", Method, 16},
+		{"(*File).ReadFrom", Method, 15},
+		{"(*File).Readdir", Method, 0},
+		{"(*File).Readdirnames", Method, 0},
+		{"(*File).Seek", Method, 0},
+		{"(*File).SetDeadline", Method, 10},
+		{"(*File).SetReadDeadline", Method, 10},
+		{"(*File).SetWriteDeadline", Method, 10},
+		{"(*File).Stat", Method, 0},
+		{"(*File).Sync", Method, 0},
+		{"(*File).SyscallConn", Method, 12},
+		{"(*File).Truncate", Method, 0},
+		{"(*File).Write", Method, 0},
+		{"(*File).WriteAt", Method, 0},
+		{"(*File).WriteString", Method, 0},
+		{"(*File).WriteTo", Method, 22},
+		{"(*LinkError).Error", Method, 0},
+		{"(*LinkError).Unwrap", Method, 13},
+		{"(*PathError).Error", Method, 0},
+		{"(*PathError).Timeout", Method, 10},
+		{"(*PathError).Unwrap", Method, 13},
+		{"(*Process).Kill", Method, 0},
+		{"(*Process).Release", Method, 0},
+		{"(*Process).Signal", Method, 0},
+		{"(*Process).Wait", Method, 0},
+		{"(*ProcessState).ExitCode", Method, 12},
+		{"(*ProcessState).Exited", Method, 0},
+		{"(*ProcessState).Pid", Method, 0},
+		{"(*ProcessState).String", Method, 0},
+		{"(*ProcessState).Success", Method, 0},
+		{"(*ProcessState).Sys", Method, 0},
+		{"(*ProcessState).SysUsage", Method, 0},
+		{"(*ProcessState).SystemTime", Method, 0},
+		{"(*ProcessState).UserTime", Method, 0},
+		{"(*SyscallError).Error", Method, 0},
+		{"(*SyscallError).Timeout", Method, 10},
+		{"(*SyscallError).Unwrap", Method, 13},
+		{"(FileMode).IsDir", Method, 0},
+		{"(FileMode).IsRegular", Method, 1},
+		{"(FileMode).Perm", Method, 0},
+		{"(FileMode).String", Method, 0},
+		{"Args", Var, 0},
+		{"Chdir", Func, 0},
+		{"Chmod", Func, 0},
+		{"Chown", Func, 0},
+		{"Chtimes", Func, 0},
+		{"Clearenv", Func, 0},
+		{"Create", Func, 0},
+		{"CreateTemp", Func, 16},
+		{"DevNull", Const, 0},
+		{"DirEntry", Type, 16},
+		{"DirFS", Func, 16},
+		{"Environ", Func, 0},
+		{"ErrClosed", Var, 8},
+		{"ErrDeadlineExceeded", Var, 15},
+		{"ErrExist", Var, 0},
+		{"ErrInvalid", Var, 0},
+		{"ErrNoDeadline", Var, 10},
+		{"ErrNotExist", Var, 0},
+		{"ErrPermission", Var, 0},
+		{"ErrProcessDone", Var, 16},
+		{"Executable", Func, 8},
+		{"Exit", Func, 0},
+		{"Expand", Func, 0},
+		{"ExpandEnv", Func, 0},
+		{"File", Type, 0},
+		{"FileInfo", Type, 0},
+		{"FileMode", Type, 0},
+		{"FindProcess", Func, 0},
+		{"Getegid", Func, 0},
+		{"Getenv", Func, 0},
+		{"Geteuid", Func, 0},
+		{"Getgid", Func, 0},
+		{"Getgroups", Func, 0},
+		{"Getpagesize", Func, 0},
+		{"Getpid", Func, 0},
+		{"Getppid", Func, 0},
+		{"Getuid", Func, 0},
+		{"Getwd", Func, 0},
+		{"Hostname", Func, 0},
+		{"Interrupt", Var, 0},
+		{"IsExist", Func, 0},
+		{"IsNotExist", Func, 0},
+		{"IsPathSeparator", Func, 0},
+		{"IsPermission", Func, 0},
+		{"IsTimeout", Func, 10},
+		{"Kill", Var, 0},
+		{"Lchown", Func, 0},
+		{"Link", Func, 0},
+		{"LinkError", Type, 0},
+		{"LinkError.Err", Field, 0},
+		{"LinkError.New", Field, 0},
+		{"LinkError.Old", Field, 0},
+		{"LinkError.Op", Field, 0},
+		{"LookupEnv", Func, 5},
+		{"Lstat", Func, 0},
+		{"Mkdir", Func, 0},
+		{"MkdirAll", Func, 0},
+		{"MkdirTemp", Func, 16},
+		{"ModeAppend", Const, 0},
+		{"ModeCharDevice", Const, 0},
+		{"ModeDevice", Const, 0},
+		{"ModeDir", Const, 0},
+		{"ModeExclusive", Const, 0},
+		{"ModeIrregular", Const, 11},
+		{"ModeNamedPipe", Const, 0},
+		{"ModePerm", Const, 0},
+		{"ModeSetgid", Const, 0},
+		{"ModeSetuid", Const, 0},
+		{"ModeSocket", Const, 0},
+		{"ModeSticky", Const, 0},
+		{"ModeSymlink", Const, 0},
+		{"ModeTemporary", Const, 0},
+		{"ModeType", Const, 0},
+		{"NewFile", Func, 0},
+		{"NewSyscallError", Func, 0},
+		{"O_APPEND", Const, 0},
+		{"O_CREATE", Const, 0},
+		{"O_EXCL", Const, 0},
+		{"O_RDONLY", Const, 0},
+		{"O_RDWR", Const, 0},
+		{"O_SYNC", Const, 0},
+		{"O_TRUNC", Const, 0},
+		{"O_WRONLY", Const, 0},
+		{"Open", Func, 0},
+		{"OpenFile", Func, 0},
+		{"PathError", Type, 0},
+		{"PathError.Err", Field, 0},
+		{"PathError.Op", Field, 0},
+		{"PathError.Path", Field, 0},
+		{"PathListSeparator", Const, 0},
+		{"PathSeparator", Const, 0},
+		{"Pipe", Func, 0},
+		{"ProcAttr", Type, 0},
+		{"ProcAttr.Dir", Field, 0},
+		{"ProcAttr.Env", Field, 0},
+		{"ProcAttr.Files", Field, 0},
+		{"ProcAttr.Sys", Field, 0},
+		{"Process", Type, 0},
+		{"Process.Pid", Field, 0},
+		{"ProcessState", Type, 0},
+		{"ReadDir", Func, 16},
+		{"ReadFile", Func, 16},
+		{"Readlink", Func, 0},
+		{"Remove", Func, 0},
+		{"RemoveAll", Func, 0},
+		{"Rename", Func, 0},
+		{"SEEK_CUR", Const, 0},
+		{"SEEK_END", Const, 0},
+		{"SEEK_SET", Const, 0},
+		{"SameFile", Func, 0},
+		{"Setenv", Func, 0},
+		{"Signal", Type, 0},
+		{"StartProcess", Func, 0},
+		{"Stat", Func, 0},
+		{"Stderr", Var, 0},
+		{"Stdin", Var, 0},
+		{"Stdout", Var, 0},
+		{"Symlink", Func, 0},
+		{"SyscallError", Type, 0},
+		{"SyscallError.Err", Field, 0},
+		{"SyscallError.Syscall", Field, 0},
+		{"TempDir", Func, 0},
+		{"Truncate", Func, 0},
+		{"Unsetenv", Func, 4},
+		{"UserCacheDir", Func, 11},
+		{"UserConfigDir", Func, 13},
+		{"UserHomeDir", Func, 12},
+		{"WriteFile", Func, 16},
+	},
+	"os/exec": {
+		{"(*Cmd).CombinedOutput", Method, 0},
+		{"(*Cmd).Environ", Method, 19},
+		{"(*Cmd).Output", Method, 0},
+		{"(*Cmd).Run", Method, 0},
+		{"(*Cmd).Start", Method, 0},
+		{"(*Cmd).StderrPipe", Method, 0},
+		{"(*Cmd).StdinPipe", Method, 0},
+		{"(*Cmd).StdoutPipe", Method, 0},
+		{"(*Cmd).String", Method, 13},
+		{"(*Cmd).Wait", Method, 0},
+		{"(*Error).Error", Method, 0},
+		{"(*Error).Unwrap", Method, 13},
+		{"(*ExitError).Error", Method, 0},
+		{"(ExitError).ExitCode", Method, 12},
+		{"(ExitError).Exited", Method, 0},
+		{"(ExitError).Pid", Method, 0},
+		{"(ExitError).String", Method, 0},
+		{"(ExitError).Success", Method, 0},
+		{"(ExitError).Sys", Method, 0},
+		{"(ExitError).SysUsage", Method, 0},
+		{"(ExitError).SystemTime", Method, 0},
+		{"(ExitError).UserTime", Method, 0},
+		{"Cmd", Type, 0},
+		{"Cmd.Args", Field, 0},
+		{"Cmd.Cancel", Field, 20},
+		{"Cmd.Dir", Field, 0},
+		{"Cmd.Env", Field, 0},
+		{"Cmd.Err", Field, 19},
+		{"Cmd.ExtraFiles", Field, 0},
+		{"Cmd.Path", Field, 0},
+		{"Cmd.Process", Field, 0},
+		{"Cmd.ProcessState", Field, 0},
+		{"Cmd.Stderr", Field, 0},
+		{"Cmd.Stdin", Field, 0},
+		{"Cmd.Stdout", Field, 0},
+		{"Cmd.SysProcAttr", Field, 0},
+		{"Cmd.WaitDelay", Field, 20},
+		{"Command", Func, 0},
+		{"CommandContext", Func, 7},
+		{"ErrDot", Var, 19},
+		{"ErrNotFound", Var, 0},
+		{"ErrWaitDelay", Var, 20},
+		{"Error", Type, 0},
+		{"Error.Err", Field, 0},
+		{"Error.Name", Field, 0},
+		{"ExitError", Type, 0},
+		{"ExitError.ProcessState", Field, 0},
+		{"ExitError.Stderr", Field, 6},
+		{"LookPath", Func, 0},
+	},
+	"os/signal": {
+		{"Ignore", Func, 5},
+		{"Ignored", Func, 11},
+		{"Notify", Func, 0},
+		{"NotifyContext", Func, 16},
+		{"Reset", Func, 5},
+		{"Stop", Func, 1},
+	},
+	"os/user": {
+		{"(*User).GroupIds", Method, 7},
+		{"(UnknownGroupError).Error", Method, 7},
+		{"(UnknownGroupIdError).Error", Method, 7},
+		{"(UnknownUserError).Error", Method, 0},
+		{"(UnknownUserIdError).Error", Method, 0},
+		{"Current", Func, 0},
+		{"Group", Type, 7},
+		{"Group.Gid", Field, 7},
+		{"Group.Name", Field, 7},
+		{"Lookup", Func, 0},
+		{"LookupGroup", Func, 7},
+		{"LookupGroupId", Func, 7},
+		{"LookupId", Func, 0},
+		{"UnknownGroupError", Type, 7},
+		{"UnknownGroupIdError", Type, 7},
+		{"UnknownUserError", Type, 0},
+		{"UnknownUserIdError", Type, 0},
+		{"User", Type, 0},
+		{"User.Gid", Field, 0},
+		{"User.HomeDir", Field, 0},
+		{"User.Name", Field, 0},
+		{"User.Uid", Field, 0},
+		{"User.Username", Field, 0},
+	},
+	"path": {
+		{"Base", Func, 0},
+		{"Clean", Func, 0},
+		{"Dir", Func, 0},
+		{"ErrBadPattern", Var, 0},
+		{"Ext", Func, 0},
+		{"IsAbs", Func, 0},
+		{"Join", Func, 0},
+		{"Match", Func, 0},
+		{"Split", Func, 0},
+	},
+	"path/filepath": {
+		{"Abs", Func, 0},
+		{"Base", Func, 0},
+		{"Clean", Func, 0},
+		{"Dir", Func, 0},
+		{"ErrBadPattern", Var, 0},
+		{"EvalSymlinks", Func, 0},
+		{"Ext", Func, 0},
+		{"FromSlash", Func, 0},
+		{"Glob", Func, 0},
+		{"HasPrefix", Func, 0},
+		{"IsAbs", Func, 0},
+		{"IsLocal", Func, 20},
+		{"Join", Func, 0},
+		{"ListSeparator", Const, 0},
+		{"Match", Func, 0},
+		{"Rel", Func, 0},
+		{"Separator", Const, 0},
+		{"SkipAll", Var, 20},
+		{"SkipDir", Var, 0},
+		{"Split", Func, 0},
+		{"SplitList", Func, 0},
+		{"ToSlash", Func, 0},
+		{"VolumeName", Func, 0},
+		{"Walk", Func, 0},
+		{"WalkDir", Func, 16},
+		{"WalkFunc", Type, 0},
+	},
+	"plugin": {
+		{"(*Plugin).Lookup", Method, 8},
+		{"Open", Func, 8},
+		{"Plugin", Type, 8},
+		{"Symbol", Type, 8},
+	},
+	"reflect": {
+		{"(*MapIter).Key", Method, 12},
+		{"(*MapIter).Next", Method, 12},
+		{"(*MapIter).Reset", Method, 18},
+		{"(*MapIter).Value", Method, 12},
+		{"(*ValueError).Error", Method, 0},
+		{"(ChanDir).String", Method, 0},
+		{"(Kind).String", Method, 0},
+		{"(Method).IsExported", Method, 17},
+		{"(StructField).IsExported", Method, 17},
+		{"(StructTag).Get", Method, 0},
+		{"(StructTag).Lookup", Method, 7},
+		{"(Value).Addr", Method, 0},
+		{"(Value).Bool", Method, 0},
+		{"(Value).Bytes", Method, 0},
+		{"(Value).Call", Method, 0},
+		{"(Value).CallSlice", Method, 0},
+		{"(Value).CanAddr", Method, 0},
+		{"(Value).CanComplex", Method, 18},
+		{"(Value).CanConvert", Method, 17},
+		{"(Value).CanFloat", Method, 18},
+		{"(Value).CanInt", Method, 18},
+		{"(Value).CanInterface", Method, 0},
+		{"(Value).CanSet", Method, 0},
+		{"(Value).CanUint", Method, 18},
+		{"(Value).Cap", Method, 0},
+		{"(Value).Clear", Method, 21},
+		{"(Value).Close", Method, 0},
+		{"(Value).Comparable", Method, 20},
+		{"(Value).Complex", Method, 0},
+		{"(Value).Convert", Method, 1},
+		{"(Value).Elem", Method, 0},
+		{"(Value).Equal", Method, 20},
+		{"(Value).Field", Method, 0},
+		{"(Value).FieldByIndex", Method, 0},
+		{"(Value).FieldByIndexErr", Method, 18},
+		{"(Value).FieldByName", Method, 0},
+		{"(Value).FieldByNameFunc", Method, 0},
+		{"(Value).Float", Method, 0},
+		{"(Value).Grow", Method, 20},
+		{"(Value).Index", Method, 0},
+		{"(Value).Int", Method, 0},
+		{"(Value).Interface", Method, 0},
+		{"(Value).InterfaceData", Method, 0},
+		{"(Value).IsNil", Method, 0},
+		{"(Value).IsValid", Method, 0},
+		{"(Value).IsZero", Method, 13},
+		{"(Value).Kind", Method, 0},
+		{"(Value).Len", Method, 0},
+		{"(Value).MapIndex", Method, 0},
+		{"(Value).MapKeys", Method, 0},
+		{"(Value).MapRange", Method, 12},
+		{"(Value).Method", Method, 0},
+		{"(Value).MethodByName", Method, 0},
+		{"(Value).NumField", Method, 0},
+		{"(Value).NumMethod", Method, 0},
+		{"(Value).OverflowComplex", Method, 0},
+		{"(Value).OverflowFloat", Method, 0},
+		{"(Value).OverflowInt", Method, 0},
+		{"(Value).OverflowUint", Method, 0},
+		{"(Value).Pointer", Method, 0},
+		{"(Value).Recv", Method, 0},
+		{"(Value).Send", Method, 0},
+		{"(Value).Set", Method, 0},
+		{"(Value).SetBool", Method, 0},
+		{"(Value).SetBytes", Method, 0},
+		{"(Value).SetCap", Method, 2},
+		{"(Value).SetComplex", Method, 0},
+		{"(Value).SetFloat", Method, 0},
+		{"(Value).SetInt", Method, 0},
+		{"(Value).SetIterKey", Method, 18},
+		{"(Value).SetIterValue", Method, 18},
+		{"(Value).SetLen", Method, 0},
+		{"(Value).SetMapIndex", Method, 0},
+		{"(Value).SetPointer", Method, 0},
+		{"(Value).SetString", Method, 0},
+		{"(Value).SetUint", Method, 0},
+		{"(Value).SetZero", Method, 20},
+		{"(Value).Slice", Method, 0},
+		{"(Value).Slice3", Method, 2},
+		{"(Value).String", Method, 0},
+		{"(Value).TryRecv", Method, 0},
+		{"(Value).TrySend", Method, 0},
+		{"(Value).Type", Method, 0},
+		{"(Value).Uint", Method, 0},
+		{"(Value).UnsafeAddr", Method, 0},
+		{"(Value).UnsafePointer", Method, 18},
+		{"Append", Func, 0},
+		{"AppendSlice", Func, 0},
+		{"Array", Const, 0},
+		{"ArrayOf", Func, 5},
+		{"Bool", Const, 0},
+		{"BothDir", Const, 0},
+		{"Chan", Const, 0},
+		{"ChanDir", Type, 0},
+		{"ChanOf", Func, 1},
+		{"Complex128", Const, 0},
+		{"Complex64", Const, 0},
+		{"Copy", Func, 0},
+		{"DeepEqual", Func, 0},
+		{"Float32", Const, 0},
+		{"Float64", Const, 0},
+		{"Func", Const, 0},
+		{"FuncOf", Func, 5},
+		{"Indirect", Func, 0},
+		{"Int", Const, 0},
+		{"Int16", Const, 0},
+		{"Int32", Const, 0},
+		{"Int64", Const, 0},
+		{"Int8", Const, 0},
+		{"Interface", Const, 0},
+		{"Invalid", Const, 0},
+		{"Kind", Type, 0},
+		{"MakeChan", Func, 0},
+		{"MakeFunc", Func, 1},
+		{"MakeMap", Func, 0},
+		{"MakeMapWithSize", Func, 9},
+		{"MakeSlice", Func, 0},
+		{"Map", Const, 0},
+		{"MapIter", Type, 12},
+		{"MapOf", Func, 1},
+		{"Method", Type, 0},
+		{"Method.Func", Field, 0},
+		{"Method.Index", Field, 0},
+		{"Method.Name", Field, 0},
+		{"Method.PkgPath", Field, 0},
+		{"Method.Type", Field, 0},
+		{"New", Func, 0},
+		{"NewAt", Func, 0},
+		{"Pointer", Const, 18},
+		{"PointerTo", Func, 18},
+		{"Ptr", Const, 0},
+		{"PtrTo", Func, 0},
+		{"RecvDir", Const, 0},
+		{"Select", Func, 1},
+		{"SelectCase", Type, 1},
+		{"SelectCase.Chan", Field, 1},
+		{"SelectCase.Dir", Field, 1},
+		{"SelectCase.Send", Field, 1},
+		{"SelectDefault", Const, 1},
+		{"SelectDir", Type, 1},
+		{"SelectRecv", Const, 1},
+		{"SelectSend", Const, 1},
+		{"SendDir", Const, 0},
+		{"Slice", Const, 0},
+		{"SliceHeader", Type, 0},
+		{"SliceHeader.Cap", Field, 0},
+		{"SliceHeader.Data", Field, 0},
+		{"SliceHeader.Len", Field, 0},
+		{"SliceOf", Func, 1},
+		{"String", Const, 0},
+		{"StringHeader", Type, 0},
+		{"StringHeader.Data", Field, 0},
+		{"StringHeader.Len", Field, 0},
+		{"Struct", Const, 0},
+		{"StructField", Type, 0},
+		{"StructField.Anonymous", Field, 0},
+		{"StructField.Index", Field, 0},
+		{"StructField.Name", Field, 0},
+		{"StructField.Offset", Field, 0},
+		{"StructField.PkgPath", Field, 0},
+		{"StructField.Tag", Field, 0},
+		{"StructField.Type", Field, 0},
+		{"StructOf", Func, 7},
+		{"StructTag", Type, 0},
+		{"Swapper", Func, 8},
+		{"Type", Type, 0},
+		{"TypeFor", Func, 22},
+		{"TypeOf", Func, 0},
+		{"Uint", Const, 0},
+		{"Uint16", Const, 0},
+		{"Uint32", Const, 0},
+		{"Uint64", Const, 0},
+		{"Uint8", Const, 0},
+		{"Uintptr", Const, 0},
+		{"UnsafePointer", Const, 0},
+		{"Value", Type, 0},
+		{"ValueError", Type, 0},
+		{"ValueError.Kind", Field, 0},
+		{"ValueError.Method", Field, 0},
+		{"ValueOf", Func, 0},
+		{"VisibleFields", Func, 17},
+		{"Zero", Func, 0},
+	},
+	"regexp": {
+		{"(*Regexp).Copy", Method, 6},
+		{"(*Regexp).Expand", Method, 0},
+		{"(*Regexp).ExpandString", Method, 0},
+		{"(*Regexp).Find", Method, 0},
+		{"(*Regexp).FindAll", Method, 0},
+		{"(*Regexp).FindAllIndex", Method, 0},
+		{"(*Regexp).FindAllString", Method, 0},
+		{"(*Regexp).FindAllStringIndex", Method, 0},
+		{"(*Regexp).FindAllStringSubmatch", Method, 0},
+		{"(*Regexp).FindAllStringSubmatchIndex", Method, 0},
+		{"(*Regexp).FindAllSubmatch", Method, 0},
+		{"(*Regexp).FindAllSubmatchIndex", Method, 0},
+		{"(*Regexp).FindIndex", Method, 0},
+		{"(*Regexp).FindReaderIndex", Method, 0},
+		{"(*Regexp).FindReaderSubmatchIndex", Method, 0},
+		{"(*Regexp).FindString", Method, 0},
+		{"(*Regexp).FindStringIndex", Method, 0},
+		{"(*Regexp).FindStringSubmatch", Method, 0},
+		{"(*Regexp).FindStringSubmatchIndex", Method, 0},
+		{"(*Regexp).FindSubmatch", Method, 0},
+		{"(*Regexp).FindSubmatchIndex", Method, 0},
+		{"(*Regexp).LiteralPrefix", Method, 0},
+		{"(*Regexp).Longest", Method, 1},
+		{"(*Regexp).MarshalText", Method, 21},
+		{"(*Regexp).Match", Method, 0},
+		{"(*Regexp).MatchReader", Method, 0},
+		{"(*Regexp).MatchString", Method, 0},
+		{"(*Regexp).NumSubexp", Method, 0},
+		{"(*Regexp).ReplaceAll", Method, 0},
+		{"(*Regexp).ReplaceAllFunc", Method, 0},
+		{"(*Regexp).ReplaceAllLiteral", Method, 0},
+		{"(*Regexp).ReplaceAllLiteralString", Method, 0},
+		{"(*Regexp).ReplaceAllString", Method, 0},
+		{"(*Regexp).ReplaceAllStringFunc", Method, 0},
+		{"(*Regexp).Split", Method, 1},
+		{"(*Regexp).String", Method, 0},
+		{"(*Regexp).SubexpIndex", Method, 15},
+		{"(*Regexp).SubexpNames", Method, 0},
+		{"(*Regexp).UnmarshalText", Method, 21},
+		{"Compile", Func, 0},
+		{"CompilePOSIX", Func, 0},
+		{"Match", Func, 0},
+		{"MatchReader", Func, 0},
+		{"MatchString", Func, 0},
+		{"MustCompile", Func, 0},
+		{"MustCompilePOSIX", Func, 0},
+		{"QuoteMeta", Func, 0},
+		{"Regexp", Type, 0},
+	},
+	"regexp/syntax": {
+		{"(*Error).Error", Method, 0},
+		{"(*Inst).MatchEmptyWidth", Method, 0},
+		{"(*Inst).MatchRune", Method, 0},
+		{"(*Inst).MatchRunePos", Method, 3},
+		{"(*Inst).String", Method, 0},
+		{"(*Prog).Prefix", Method, 0},
+		{"(*Prog).StartCond", Method, 0},
+		{"(*Prog).String", Method, 0},
+		{"(*Regexp).CapNames", Method, 0},
+		{"(*Regexp).Equal", Method, 0},
+		{"(*Regexp).MaxCap", Method, 0},
+		{"(*Regexp).Simplify", Method, 0},
+		{"(*Regexp).String", Method, 0},
+		{"(ErrorCode).String", Method, 0},
+		{"(InstOp).String", Method, 3},
+		{"(Op).String", Method, 11},
+		{"ClassNL", Const, 0},
+		{"Compile", Func, 0},
+		{"DotNL", Const, 0},
+		{"EmptyBeginLine", Const, 0},
+		{"EmptyBeginText", Const, 0},
+		{"EmptyEndLine", Const, 0},
+		{"EmptyEndText", Const, 0},
+		{"EmptyNoWordBoundary", Const, 0},
+		{"EmptyOp", Type, 0},
+		{"EmptyOpContext", Func, 0},
+		{"EmptyWordBoundary", Const, 0},
+		{"ErrInternalError", Const, 0},
+		{"ErrInvalidCharClass", Const, 0},
+		{"ErrInvalidCharRange", Const, 0},
+		{"ErrInvalidEscape", Const, 0},
+		{"ErrInvalidNamedCapture", Const, 0},
+		{"ErrInvalidPerlOp", Const, 0},
+		{"ErrInvalidRepeatOp", Const, 0},
+		{"ErrInvalidRepeatSize", Const, 0},
+		{"ErrInvalidUTF8", Const, 0},
+		{"ErrLarge", Const, 20},
+		{"ErrMissingBracket", Const, 0},
+		{"ErrMissingParen", Const, 0},
+		{"ErrMissingRepeatArgument", Const, 0},
+		{"ErrNestingDepth", Const, 19},
+		{"ErrTrailingBackslash", Const, 0},
+		{"ErrUnexpectedParen", Const, 1},
+		{"Error", Type, 0},
+		{"Error.Code", Field, 0},
+		{"Error.Expr", Field, 0},
+		{"ErrorCode", Type, 0},
+		{"Flags", Type, 0},
+		{"FoldCase", Const, 0},
+		{"Inst", Type, 0},
+		{"Inst.Arg", Field, 0},
+		{"Inst.Op", Field, 0},
+		{"Inst.Out", Field, 0},
+		{"Inst.Rune", Field, 0},
+		{"InstAlt", Const, 0},
+		{"InstAltMatch", Const, 0},
+		{"InstCapture", Const, 0},
+		{"InstEmptyWidth", Const, 0},
+		{"InstFail", Const, 0},
+		{"InstMatch", Const, 0},
+		{"InstNop", Const, 0},
+		{"InstOp", Type, 0},
+		{"InstRune", Const, 0},
+		{"InstRune1", Const, 0},
+		{"InstRuneAny", Const, 0},
+		{"InstRuneAnyNotNL", Const, 0},
+		{"IsWordChar", Func, 0},
+		{"Literal", Const, 0},
+		{"MatchNL", Const, 0},
+		{"NonGreedy", Const, 0},
+		{"OneLine", Const, 0},
+		{"Op", Type, 0},
+		{"OpAlternate", Const, 0},
+		{"OpAnyChar", Const, 0},
+		{"OpAnyCharNotNL", Const, 0},
+		{"OpBeginLine", Const, 0},
+		{"OpBeginText", Const, 0},
+		{"OpCapture", Const, 0},
+		{"OpCharClass", Const, 0},
+		{"OpConcat", Const, 0},
+		{"OpEmptyMatch", Const, 0},
+		{"OpEndLine", Const, 0},
+		{"OpEndText", Const, 0},
+		{"OpLiteral", Const, 0},
+		{"OpNoMatch", Const, 0},
+		{"OpNoWordBoundary", Const, 0},
+		{"OpPlus", Const, 0},
+		{"OpQuest", Const, 0},
+		{"OpRepeat", Const, 0},
+		{"OpStar", Const, 0},
+		{"OpWordBoundary", Const, 0},
+		{"POSIX", Const, 0},
+		{"Parse", Func, 0},
+		{"Perl", Const, 0},
+		{"PerlX", Const, 0},
+		{"Prog", Type, 0},
+		{"Prog.Inst", Field, 0},
+		{"Prog.NumCap", Field, 0},
+		{"Prog.Start", Field, 0},
+		{"Regexp", Type, 0},
+		{"Regexp.Cap", Field, 0},
+		{"Regexp.Flags", Field, 0},
+		{"Regexp.Max", Field, 0},
+		{"Regexp.Min", Field, 0},
+		{"Regexp.Name", Field, 0},
+		{"Regexp.Op", Field, 0},
+		{"Regexp.Rune", Field, 0},
+		{"Regexp.Rune0", Field, 0},
+		{"Regexp.Sub", Field, 0},
+		{"Regexp.Sub0", Field, 0},
+		{"Simple", Const, 0},
+		{"UnicodeGroups", Const, 0},
+		{"WasDollar", Const, 0},
+	},
+	"runtime": {
+		{"(*BlockProfileRecord).Stack", Method, 1},
+		{"(*Frames).Next", Method, 7},
+		{"(*Func).Entry", Method, 0},
+		{"(*Func).FileLine", Method, 0},
+		{"(*Func).Name", Method, 0},
+		{"(*MemProfileRecord).InUseBytes", Method, 0},
+		{"(*MemProfileRecord).InUseObjects", Method, 0},
+		{"(*MemProfileRecord).Stack", Method, 0},
+		{"(*PanicNilError).Error", Method, 21},
+		{"(*PanicNilError).RuntimeError", Method, 21},
+		{"(*Pinner).Pin", Method, 21},
+		{"(*Pinner).Unpin", Method, 21},
+		{"(*StackRecord).Stack", Method, 0},
+		{"(*TypeAssertionError).Error", Method, 0},
+		{"(*TypeAssertionError).RuntimeError", Method, 0},
+		{"BlockProfile", Func, 1},
+		{"BlockProfileRecord", Type, 1},
+		{"BlockProfileRecord.Count", Field, 1},
+		{"BlockProfileRecord.Cycles", Field, 1},
+		{"BlockProfileRecord.StackRecord", Field, 1},
+		{"Breakpoint", Func, 0},
+		{"CPUProfile", Func, 0},
+		{"Caller", Func, 0},
+		{"Callers", Func, 0},
+		{"CallersFrames", Func, 7},
+		{"Compiler", Const, 0},
+		{"Error", Type, 0},
+		{"Frame", Type, 7},
+		{"Frame.Entry", Field, 7},
+		{"Frame.File", Field, 7},
+		{"Frame.Func", Field, 7},
+		{"Frame.Function", Field, 7},
+		{"Frame.Line", Field, 7},
+		{"Frame.PC", Field, 7},
+		{"Frames", Type, 7},
+		{"Func", Type, 0},
+		{"FuncForPC", Func, 0},
+		{"GC", Func, 0},
+		{"GOARCH", Const, 0},
+		{"GOMAXPROCS", Func, 0},
+		{"GOOS", Const, 0},
+		{"GOROOT", Func, 0},
+		{"Goexit", Func, 0},
+		{"GoroutineProfile", Func, 0},
+		{"Gosched", Func, 0},
+		{"KeepAlive", Func, 7},
+		{"LockOSThread", Func, 0},
+		{"MemProfile", Func, 0},
+		{"MemProfileRate", Var, 0},
+		{"MemProfileRecord", Type, 0},
+		{"MemProfileRecord.AllocBytes", Field, 0},
+		{"MemProfileRecord.AllocObjects", Field, 0},
+		{"MemProfileRecord.FreeBytes", Field, 0},
+		{"MemProfileRecord.FreeObjects", Field, 0},
+		{"MemProfileRecord.Stack0", Field, 0},
+		{"MemStats", Type, 0},
+		{"MemStats.Alloc", Field, 0},
+		{"MemStats.BuckHashSys", Field, 0},
+		{"MemStats.BySize", Field, 0},
+		{"MemStats.DebugGC", Field, 0},
+		{"MemStats.EnableGC", Field, 0},
+		{"MemStats.Frees", Field, 0},
+		{"MemStats.GCCPUFraction", Field, 5},
+		{"MemStats.GCSys", Field, 2},
+		{"MemStats.HeapAlloc", Field, 0},
+		{"MemStats.HeapIdle", Field, 0},
+		{"MemStats.HeapInuse", Field, 0},
+		{"MemStats.HeapObjects", Field, 0},
+		{"MemStats.HeapReleased", Field, 0},
+		{"MemStats.HeapSys", Field, 0},
+		{"MemStats.LastGC", Field, 0},
+		{"MemStats.Lookups", Field, 0},
+		{"MemStats.MCacheInuse", Field, 0},
+		{"MemStats.MCacheSys", Field, 0},
+		{"MemStats.MSpanInuse", Field, 0},
+		{"MemStats.MSpanSys", Field, 0},
+		{"MemStats.Mallocs", Field, 0},
+		{"MemStats.NextGC", Field, 0},
+		{"MemStats.NumForcedGC", Field, 8},
+		{"MemStats.NumGC", Field, 0},
+		{"MemStats.OtherSys", Field, 2},
+		{"MemStats.PauseEnd", Field, 4},
+		{"MemStats.PauseNs", Field, 0},
+		{"MemStats.PauseTotalNs", Field, 0},
+		{"MemStats.StackInuse", Field, 0},
+		{"MemStats.StackSys", Field, 0},
+		{"MemStats.Sys", Field, 0},
+		{"MemStats.TotalAlloc", Field, 0},
+		{"MutexProfile", Func, 8},
+		{"NumCPU", Func, 0},
+		{"NumCgoCall", Func, 0},
+		{"NumGoroutine", Func, 0},
+		{"PanicNilError", Type, 21},
+		{"Pinner", Type, 21},
+		{"ReadMemStats", Func, 0},
+		{"ReadTrace", Func, 5},
+		{"SetBlockProfileRate", Func, 1},
+		{"SetCPUProfileRate", Func, 0},
+		{"SetCgoTraceback", Func, 7},
+		{"SetFinalizer", Func, 0},
+		{"SetMutexProfileFraction", Func, 8},
+		{"Stack", Func, 0},
+		{"StackRecord", Type, 0},
+		{"StackRecord.Stack0", Field, 0},
+		{"StartTrace", Func, 5},
+		{"StopTrace", Func, 5},
+		{"ThreadCreateProfile", Func, 0},
+		{"TypeAssertionError", Type, 0},
+		{"UnlockOSThread", Func, 0},
+		{"Version", Func, 0},
+	},
+	"runtime/cgo": {
+		{"(Handle).Delete", Method, 17},
+		{"(Handle).Value", Method, 17},
+		{"Handle", Type, 17},
+		{"Incomplete", Type, 20},
+		{"NewHandle", Func, 17},
+	},
+	"runtime/coverage": {
+		{"ClearCounters", Func, 20},
+		{"WriteCounters", Func, 20},
+		{"WriteCountersDir", Func, 20},
+		{"WriteMeta", Func, 20},
+		{"WriteMetaDir", Func, 20},
+	},
+	"runtime/debug": {
+		{"(*BuildInfo).String", Method, 18},
+		{"BuildInfo", Type, 12},
+		{"BuildInfo.Deps", Field, 12},
+		{"BuildInfo.GoVersion", Field, 18},
+		{"BuildInfo.Main", Field, 12},
+		{"BuildInfo.Path", Field, 12},
+		{"BuildInfo.Settings", Field, 18},
+		{"BuildSetting", Type, 18},
+		{"BuildSetting.Key", Field, 18},
+		{"BuildSetting.Value", Field, 18},
+		{"FreeOSMemory", Func, 1},
+		{"GCStats", Type, 1},
+		{"GCStats.LastGC", Field, 1},
+		{"GCStats.NumGC", Field, 1},
+		{"GCStats.Pause", Field, 1},
+		{"GCStats.PauseEnd", Field, 4},
+		{"GCStats.PauseQuantiles", Field, 1},
+		{"GCStats.PauseTotal", Field, 1},
+		{"Module", Type, 12},
+		{"Module.Path", Field, 12},
+		{"Module.Replace", Field, 12},
+		{"Module.Sum", Field, 12},
+		{"Module.Version", Field, 12},
+		{"ParseBuildInfo", Func, 18},
+		{"PrintStack", Func, 0},
+		{"ReadBuildInfo", Func, 12},
+		{"ReadGCStats", Func, 1},
+		{"SetGCPercent", Func, 1},
+		{"SetMaxStack", Func, 2},
+		{"SetMaxThreads", Func, 2},
+		{"SetMemoryLimit", Func, 19},
+		{"SetPanicOnFault", Func, 3},
+		{"SetTraceback", Func, 6},
+		{"Stack", Func, 0},
+		{"WriteHeapDump", Func, 3},
+	},
+	"runtime/metrics": {
+		{"(Value).Float64", Method, 16},
+		{"(Value).Float64Histogram", Method, 16},
+		{"(Value).Kind", Method, 16},
+		{"(Value).Uint64", Method, 16},
+		{"All", Func, 16},
+		{"Description", Type, 16},
+		{"Description.Cumulative", Field, 16},
+		{"Description.Description", Field, 16},
+		{"Description.Kind", Field, 16},
+		{"Description.Name", Field, 16},
+		{"Float64Histogram", Type, 16},
+		{"Float64Histogram.Buckets", Field, 16},
+		{"Float64Histogram.Counts", Field, 16},
+		{"KindBad", Const, 16},
+		{"KindFloat64", Const, 16},
+		{"KindFloat64Histogram", Const, 16},
+		{"KindUint64", Const, 16},
+		{"Read", Func, 16},
+		{"Sample", Type, 16},
+		{"Sample.Name", Field, 16},
+		{"Sample.Value", Field, 16},
+		{"Value", Type, 16},
+		{"ValueKind", Type, 16},
+	},
+	"runtime/pprof": {
+		{"(*Profile).Add", Method, 0},
+		{"(*Profile).Count", Method, 0},
+		{"(*Profile).Name", Method, 0},
+		{"(*Profile).Remove", Method, 0},
+		{"(*Profile).WriteTo", Method, 0},
+		{"Do", Func, 9},
+		{"ForLabels", Func, 9},
+		{"Label", Func, 9},
+		{"LabelSet", Type, 9},
+		{"Labels", Func, 9},
+		{"Lookup", Func, 0},
+		{"NewProfile", Func, 0},
+		{"Profile", Type, 0},
+		{"Profiles", Func, 0},
+		{"SetGoroutineLabels", Func, 9},
+		{"StartCPUProfile", Func, 0},
+		{"StopCPUProfile", Func, 0},
+		{"WithLabels", Func, 9},
+		{"WriteHeapProfile", Func, 0},
+	},
+	"runtime/trace": {
+		{"(*Region).End", Method, 11},
+		{"(*Task).End", Method, 11},
+		{"IsEnabled", Func, 11},
+		{"Log", Func, 11},
+		{"Logf", Func, 11},
+		{"NewTask", Func, 11},
+		{"Region", Type, 11},
+		{"Start", Func, 5},
+		{"StartRegion", Func, 11},
+		{"Stop", Func, 5},
+		{"Task", Type, 11},
+		{"WithRegion", Func, 11},
+	},
+	"slices": {
+		{"BinarySearch", Func, 21},
+		{"BinarySearchFunc", Func, 21},
+		{"Clip", Func, 21},
+		{"Clone", Func, 21},
+		{"Compact", Func, 21},
+		{"CompactFunc", Func, 21},
+		{"Compare", Func, 21},
+		{"CompareFunc", Func, 21},
+		{"Concat", Func, 22},
+		{"Contains", Func, 21},
+		{"ContainsFunc", Func, 21},
+		{"Delete", Func, 21},
+		{"DeleteFunc", Func, 21},
+		{"Equal", Func, 21},
+		{"EqualFunc", Func, 21},
+		{"Grow", Func, 21},
+		{"Index", Func, 21},
+		{"IndexFunc", Func, 21},
+		{"Insert", Func, 21},
+		{"IsSorted", Func, 21},
+		{"IsSortedFunc", Func, 21},
+		{"Max", Func, 21},
+		{"MaxFunc", Func, 21},
+		{"Min", Func, 21},
+		{"MinFunc", Func, 21},
+		{"Replace", Func, 21},
+		{"Reverse", Func, 21},
+		{"Sort", Func, 21},
+		{"SortFunc", Func, 21},
+		{"SortStableFunc", Func, 21},
+	},
+	"sort": {
+		{"(Float64Slice).Len", Method, 0},
+		{"(Float64Slice).Less", Method, 0},
+		{"(Float64Slice).Search", Method, 0},
+		{"(Float64Slice).Sort", Method, 0},
+		{"(Float64Slice).Swap", Method, 0},
+		{"(IntSlice).Len", Method, 0},
+		{"(IntSlice).Less", Method, 0},
+		{"(IntSlice).Search", Method, 0},
+		{"(IntSlice).Sort", Method, 0},
+		{"(IntSlice).Swap", Method, 0},
+		{"(StringSlice).Len", Method, 0},
+		{"(StringSlice).Less", Method, 0},
+		{"(StringSlice).Search", Method, 0},
+		{"(StringSlice).Sort", Method, 0},
+		{"(StringSlice).Swap", Method, 0},
+		{"Find", Func, 19},
+		{"Float64Slice", Type, 0},
+		{"Float64s", Func, 0},
+		{"Float64sAreSorted", Func, 0},
+		{"IntSlice", Type, 0},
+		{"Interface", Type, 0},
+		{"Ints", Func, 0},
+		{"IntsAreSorted", Func, 0},
+		{"IsSorted", Func, 0},
+		{"Reverse", Func, 1},
+		{"Search", Func, 0},
+		{"SearchFloat64s", Func, 0},
+		{"SearchInts", Func, 0},
+		{"SearchStrings", Func, 0},
+		{"Slice", Func, 8},
+		{"SliceIsSorted", Func, 8},
+		{"SliceStable", Func, 8},
+		{"Sort", Func, 0},
+		{"Stable", Func, 2},
+		{"StringSlice", Type, 0},
+		{"Strings", Func, 0},
+		{"StringsAreSorted", Func, 0},
+	},
+	"strconv": {
+		{"(*NumError).Error", Method, 0},
+		{"(*NumError).Unwrap", Method, 14},
+		{"AppendBool", Func, 0},
+		{"AppendFloat", Func, 0},
+		{"AppendInt", Func, 0},
+		{"AppendQuote", Func, 0},
+		{"AppendQuoteRune", Func, 0},
+		{"AppendQuoteRuneToASCII", Func, 0},
+		{"AppendQuoteRuneToGraphic", Func, 6},
+		{"AppendQuoteToASCII", Func, 0},
+		{"AppendQuoteToGraphic", Func, 6},
+		{"AppendUint", Func, 0},
+		{"Atoi", Func, 0},
+		{"CanBackquote", Func, 0},
+		{"ErrRange", Var, 0},
+		{"ErrSyntax", Var, 0},
+		{"FormatBool", Func, 0},
+		{"FormatComplex", Func, 15},
+		{"FormatFloat", Func, 0},
+		{"FormatInt", Func, 0},
+		{"FormatUint", Func, 0},
+		{"IntSize", Const, 0},
+		{"IsGraphic", Func, 6},
+		{"IsPrint", Func, 0},
+		{"Itoa", Func, 0},
+		{"NumError", Type, 0},
+		{"NumError.Err", Field, 0},
+		{"NumError.Func", Field, 0},
+		{"NumError.Num", Field, 0},
+		{"ParseBool", Func, 0},
+		{"ParseComplex", Func, 15},
+		{"ParseFloat", Func, 0},
+		{"ParseInt", Func, 0},
+		{"ParseUint", Func, 0},
+		{"Quote", Func, 0},
+		{"QuoteRune", Func, 0},
+		{"QuoteRuneToASCII", Func, 0},
+		{"QuoteRuneToGraphic", Func, 6},
+		{"QuoteToASCII", Func, 0},
+		{"QuoteToGraphic", Func, 6},
+		{"QuotedPrefix", Func, 17},
+		{"Unquote", Func, 0},
+		{"UnquoteChar", Func, 0},
+	},
+	"strings": {
+		{"(*Builder).Cap", Method, 12},
+		{"(*Builder).Grow", Method, 10},
+		{"(*Builder).Len", Method, 10},
+		{"(*Builder).Reset", Method, 10},
+		{"(*Builder).String", Method, 10},
+		{"(*Builder).Write", Method, 10},
+		{"(*Builder).WriteByte", Method, 10},
+		{"(*Builder).WriteRune", Method, 10},
+		{"(*Builder).WriteString", Method, 10},
+		{"(*Reader).Len", Method, 0},
+		{"(*Reader).Read", Method, 0},
+		{"(*Reader).ReadAt", Method, 0},
+		{"(*Reader).ReadByte", Method, 0},
+		{"(*Reader).ReadRune", Method, 0},
+		{"(*Reader).Reset", Method, 7},
+		{"(*Reader).Seek", Method, 0},
+		{"(*Reader).Size", Method, 5},
+		{"(*Reader).UnreadByte", Method, 0},
+		{"(*Reader).UnreadRune", Method, 0},
+		{"(*Reader).WriteTo", Method, 1},
+		{"(*Replacer).Replace", Method, 0},
+		{"(*Replacer).WriteString", Method, 0},
+		{"Builder", Type, 10},
+		{"Clone", Func, 18},
+		{"Compare", Func, 5},
+		{"Contains", Func, 0},
+		{"ContainsAny", Func, 0},
+		{"ContainsFunc", Func, 21},
+		{"ContainsRune", Func, 0},
+		{"Count", Func, 0},
+		{"Cut", Func, 18},
+		{"CutPrefix", Func, 20},
+		{"CutSuffix", Func, 20},
+		{"EqualFold", Func, 0},
+		{"Fields", Func, 0},
+		{"FieldsFunc", Func, 0},
+		{"HasPrefix", Func, 0},
+		{"HasSuffix", Func, 0},
+		{"Index", Func, 0},
+		{"IndexAny", Func, 0},
+		{"IndexByte", Func, 2},
+		{"IndexFunc", Func, 0},
+		{"IndexRune", Func, 0},
+		{"Join", Func, 0},
+		{"LastIndex", Func, 0},
+		{"LastIndexAny", Func, 0},
+		{"LastIndexByte", Func, 5},
+		{"LastIndexFunc", Func, 0},
+		{"Map", Func, 0},
+		{"NewReader", Func, 0},
+		{"NewReplacer", Func, 0},
+		{"Reader", Type, 0},
+		{"Repeat", Func, 0},
+		{"Replace", Func, 0},
+		{"ReplaceAll", Func, 12},
+		{"Replacer", Type, 0},
+		{"Split", Func, 0},
+		{"SplitAfter", Func, 0},
+		{"SplitAfterN", Func, 0},
+		{"SplitN", Func, 0},
+		{"Title", Func, 0},
+		{"ToLower", Func, 0},
+		{"ToLowerSpecial", Func, 0},
+		{"ToTitle", Func, 0},
+		{"ToTitleSpecial", Func, 0},
+		{"ToUpper", Func, 0},
+		{"ToUpperSpecial", Func, 0},
+		{"ToValidUTF8", Func, 13},
+		{"Trim", Func, 0},
+		{"TrimFunc", Func, 0},
+		{"TrimLeft", Func, 0},
+		{"TrimLeftFunc", Func, 0},
+		{"TrimPrefix", Func, 1},
+		{"TrimRight", Func, 0},
+		{"TrimRightFunc", Func, 0},
+		{"TrimSpace", Func, 0},
+		{"TrimSuffix", Func, 1},
+	},
+	"sync": {
+		{"(*Cond).Broadcast", Method, 0},
+		{"(*Cond).Signal", Method, 0},
+		{"(*Cond).Wait", Method, 0},
+		{"(*Map).CompareAndDelete", Method, 20},
+		{"(*Map).CompareAndSwap", Method, 20},
+		{"(*Map).Delete", Method, 9},
+		{"(*Map).Load", Method, 9},
+		{"(*Map).LoadAndDelete", Method, 15},
+		{"(*Map).LoadOrStore", Method, 9},
+		{"(*Map).Range", Method, 9},
+		{"(*Map).Store", Method, 9},
+		{"(*Map).Swap", Method, 20},
+		{"(*Mutex).Lock", Method, 0},
+		{"(*Mutex).TryLock", Method, 18},
+		{"(*Mutex).Unlock", Method, 0},
+		{"(*Once).Do", Method, 0},
+		{"(*Pool).Get", Method, 3},
+		{"(*Pool).Put", Method, 3},
+		{"(*RWMutex).Lock", Method, 0},
+		{"(*RWMutex).RLock", Method, 0},
+		{"(*RWMutex).RLocker", Method, 0},
+		{"(*RWMutex).RUnlock", Method, 0},
+		{"(*RWMutex).TryLock", Method, 18},
+		{"(*RWMutex).TryRLock", Method, 18},
+		{"(*RWMutex).Unlock", Method, 0},
+		{"(*WaitGroup).Add", Method, 0},
+		{"(*WaitGroup).Done", Method, 0},
+		{"(*WaitGroup).Wait", Method, 0},
+		{"Cond", Type, 0},
+		{"Cond.L", Field, 0},
+		{"Locker", Type, 0},
+		{"Map", Type, 9},
+		{"Mutex", Type, 0},
+		{"NewCond", Func, 0},
+		{"Once", Type, 0},
+		{"OnceFunc", Func, 21},
+		{"OnceValue", Func, 21},
+		{"OnceValues", Func, 21},
+		{"Pool", Type, 3},
+		{"Pool.New", Field, 3},
+		{"RWMutex", Type, 0},
+		{"WaitGroup", Type, 0},
+	},
+	"sync/atomic": {
+		{"(*Bool).CompareAndSwap", Method, 19},
+		{"(*Bool).Load", Method, 19},
+		{"(*Bool).Store", Method, 19},
+		{"(*Bool).Swap", Method, 19},
+		{"(*Int32).Add", Method, 19},
+		{"(*Int32).CompareAndSwap", Method, 19},
+		{"(*Int32).Load", Method, 19},
+		{"(*Int32).Store", Method, 19},
+		{"(*Int32).Swap", Method, 19},
+		{"(*Int64).Add", Method, 19},
+		{"(*Int64).CompareAndSwap", Method, 19},
+		{"(*Int64).Load", Method, 19},
+		{"(*Int64).Store", Method, 19},
+		{"(*Int64).Swap", Method, 19},
+		{"(*Pointer).CompareAndSwap", Method, 19},
+		{"(*Pointer).Load", Method, 19},
+		{"(*Pointer).Store", Method, 19},
+		{"(*Pointer).Swap", Method, 19},
+		{"(*Uint32).Add", Method, 19},
+		{"(*Uint32).CompareAndSwap", Method, 19},
+		{"(*Uint32).Load", Method, 19},
+		{"(*Uint32).Store", Method, 19},
+		{"(*Uint32).Swap", Method, 19},
+		{"(*Uint64).Add", Method, 19},
+		{"(*Uint64).CompareAndSwap", Method, 19},
+		{"(*Uint64).Load", Method, 19},
+		{"(*Uint64).Store", Method, 19},
+		{"(*Uint64).Swap", Method, 19},
+		{"(*Uintptr).Add", Method, 19},
+		{"(*Uintptr).CompareAndSwap", Method, 19},
+		{"(*Uintptr).Load", Method, 19},
+		{"(*Uintptr).Store", Method, 19},
+		{"(*Uintptr).Swap", Method, 19},
+		{"(*Value).CompareAndSwap", Method, 17},
+		{"(*Value).Load", Method, 4},
+		{"(*Value).Store", Method, 4},
+		{"(*Value).Swap", Method, 17},
+		{"AddInt32", Func, 0},
+		{"AddInt64", Func, 0},
+		{"AddUint32", Func, 0},
+		{"AddUint64", Func, 0},
+		{"AddUintptr", Func, 0},
+		{"Bool", Type, 19},
+		{"CompareAndSwapInt32", Func, 0},
+		{"CompareAndSwapInt64", Func, 0},
+		{"CompareAndSwapPointer", Func, 0},
+		{"CompareAndSwapUint32", Func, 0},
+		{"CompareAndSwapUint64", Func, 0},
+		{"CompareAndSwapUintptr", Func, 0},
+		{"Int32", Type, 19},
+		{"Int64", Type, 19},
+		{"LoadInt32", Func, 0},
+		{"LoadInt64", Func, 0},
+		{"LoadPointer", Func, 0},
+		{"LoadUint32", Func, 0},
+		{"LoadUint64", Func, 0},
+		{"LoadUintptr", Func, 0},
+		{"Pointer", Type, 19},
+		{"StoreInt32", Func, 0},
+		{"StoreInt64", Func, 0},
+		{"StorePointer", Func, 0},
+		{"StoreUint32", Func, 0},
+		{"StoreUint64", Func, 0},
+		{"StoreUintptr", Func, 0},
+		{"SwapInt32", Func, 2},
+		{"SwapInt64", Func, 2},
+		{"SwapPointer", Func, 2},
+		{"SwapUint32", Func, 2},
+		{"SwapUint64", Func, 2},
+		{"SwapUintptr", Func, 2},
+		{"Uint32", Type, 19},
+		{"Uint64", Type, 19},
+		{"Uintptr", Type, 19},
+		{"Value", Type, 4},
+	},
+	"syscall": {
+		{"(*Cmsghdr).SetLen", Method, 0},
+		{"(*DLL).FindProc", Method, 0},
+		{"(*DLL).MustFindProc", Method, 0},
+		{"(*DLL).Release", Method, 0},
+		{"(*DLLError).Error", Method, 0},
+		{"(*DLLError).Unwrap", Method, 16},
+		{"(*Filetime).Nanoseconds", Method, 0},
+		{"(*Iovec).SetLen", Method, 0},
+		{"(*LazyDLL).Handle", Method, 0},
+		{"(*LazyDLL).Load", Method, 0},
+		{"(*LazyDLL).NewProc", Method, 0},
+		{"(*LazyProc).Addr", Method, 0},
+		{"(*LazyProc).Call", Method, 0},
+		{"(*LazyProc).Find", Method, 0},
+		{"(*Msghdr).SetControllen", Method, 0},
+		{"(*Proc).Addr", Method, 0},
+		{"(*Proc).Call", Method, 0},
+		{"(*PtraceRegs).PC", Method, 0},
+		{"(*PtraceRegs).SetPC", Method, 0},
+		{"(*RawSockaddrAny).Sockaddr", Method, 0},
+		{"(*SID).Copy", Method, 0},
+		{"(*SID).Len", Method, 0},
+		{"(*SID).LookupAccount", Method, 0},
+		{"(*SID).String", Method, 0},
+		{"(*Timespec).Nano", Method, 0},
+		{"(*Timespec).Unix", Method, 0},
+		{"(*Timeval).Nano", Method, 0},
+		{"(*Timeval).Nanoseconds", Method, 0},
+		{"(*Timeval).Unix", Method, 0},
+		{"(Errno).Error", Method, 0},
+		{"(Errno).Is", Method, 13},
+		{"(Errno).Temporary", Method, 0},
+		{"(Errno).Timeout", Method, 0},
+		{"(Signal).Signal", Method, 0},
+		{"(Signal).String", Method, 0},
+		{"(Token).Close", Method, 0},
+		{"(Token).GetTokenPrimaryGroup", Method, 0},
+		{"(Token).GetTokenUser", Method, 0},
+		{"(Token).GetUserProfileDirectory", Method, 0},
+		{"(WaitStatus).Continued", Method, 0},
+		{"(WaitStatus).CoreDump", Method, 0},
+		{"(WaitStatus).ExitStatus", Method, 0},
+		{"(WaitStatus).Exited", Method, 0},
+		{"(WaitStatus).Signal", Method, 0},
+		{"(WaitStatus).Signaled", Method, 0},
+		{"(WaitStatus).StopSignal", Method, 0},
+		{"(WaitStatus).Stopped", Method, 0},
+		{"(WaitStatus).TrapCause", Method, 0},
+		{"AF_ALG", Const, 0},
+		{"AF_APPLETALK", Const, 0},
+		{"AF_ARP", Const, 0},
+		{"AF_ASH", Const, 0},
+		{"AF_ATM", Const, 0},
+		{"AF_ATMPVC", Const, 0},
+		{"AF_ATMSVC", Const, 0},
+		{"AF_AX25", Const, 0},
+		{"AF_BLUETOOTH", Const, 0},
+		{"AF_BRIDGE", Const, 0},
+		{"AF_CAIF", Const, 0},
+		{"AF_CAN", Const, 0},
+		{"AF_CCITT", Const, 0},
+		{"AF_CHAOS", Const, 0},
+		{"AF_CNT", Const, 0},
+		{"AF_COIP", Const, 0},
+		{"AF_DATAKIT", Const, 0},
+		{"AF_DECnet", Const, 0},
+		{"AF_DLI", Const, 0},
+		{"AF_E164", Const, 0},
+		{"AF_ECMA", Const, 0},
+		{"AF_ECONET", Const, 0},
+		{"AF_ENCAP", Const, 1},
+		{"AF_FILE", Const, 0},
+		{"AF_HYLINK", Const, 0},
+		{"AF_IEEE80211", Const, 0},
+		{"AF_IEEE802154", Const, 0},
+		{"AF_IMPLINK", Const, 0},
+		{"AF_INET", Const, 0},
+		{"AF_INET6", Const, 0},
+		{"AF_INET6_SDP", Const, 3},
+		{"AF_INET_SDP", Const, 3},
+		{"AF_IPX", Const, 0},
+		{"AF_IRDA", Const, 0},
+		{"AF_ISDN", Const, 0},
+		{"AF_ISO", Const, 0},
+		{"AF_IUCV", Const, 0},
+		{"AF_KEY", Const, 0},
+		{"AF_LAT", Const, 0},
+		{"AF_LINK", Const, 0},
+		{"AF_LLC", Const, 0},
+		{"AF_LOCAL", Const, 0},
+		{"AF_MAX", Const, 0},
+		{"AF_MPLS", Const, 1},
+		{"AF_NATM", Const, 0},
+		{"AF_NDRV", Const, 0},
+		{"AF_NETBEUI", Const, 0},
+		{"AF_NETBIOS", Const, 0},
+		{"AF_NETGRAPH", Const, 0},
+		{"AF_NETLINK", Const, 0},
+		{"AF_NETROM", Const, 0},
+		{"AF_NS", Const, 0},
+		{"AF_OROUTE", Const, 1},
+		{"AF_OSI", Const, 0},
+		{"AF_PACKET", Const, 0},
+		{"AF_PHONET", Const, 0},
+		{"AF_PPP", Const, 0},
+		{"AF_PPPOX", Const, 0},
+		{"AF_PUP", Const, 0},
+		{"AF_RDS", Const, 0},
+		{"AF_RESERVED_36", Const, 0},
+		{"AF_ROSE", Const, 0},
+		{"AF_ROUTE", Const, 0},
+		{"AF_RXRPC", Const, 0},
+		{"AF_SCLUSTER", Const, 0},
+		{"AF_SECURITY", Const, 0},
+		{"AF_SIP", Const, 0},
+		{"AF_SLOW", Const, 0},
+		{"AF_SNA", Const, 0},
+		{"AF_SYSTEM", Const, 0},
+		{"AF_TIPC", Const, 0},
+		{"AF_UNIX", Const, 0},
+		{"AF_UNSPEC", Const, 0},
+		{"AF_UTUN", Const, 16},
+		{"AF_VENDOR00", Const, 0},
+		{"AF_VENDOR01", Const, 0},
+		{"AF_VENDOR02", Const, 0},
+		{"AF_VENDOR03", Const, 0},
+		{"AF_VENDOR04", Const, 0},
+		{"AF_VENDOR05", Const, 0},
+		{"AF_VENDOR06", Const, 0},
+		{"AF_VENDOR07", Const, 0},
+		{"AF_VENDOR08", Const, 0},
+		{"AF_VENDOR09", Const, 0},
+		{"AF_VENDOR10", Const, 0},
+		{"AF_VENDOR11", Const, 0},
+		{"AF_VENDOR12", Const, 0},
+		{"AF_VENDOR13", Const, 0},
+		{"AF_VENDOR14", Const, 0},
+		{"AF_VENDOR15", Const, 0},
+		{"AF_VENDOR16", Const, 0},
+		{"AF_VENDOR17", Const, 0},
+		{"AF_VENDOR18", Const, 0},
+		{"AF_VENDOR19", Const, 0},
+		{"AF_VENDOR20", Const, 0},
+		{"AF_VENDOR21", Const, 0},
+		{"AF_VENDOR22", Const, 0},
+		{"AF_VENDOR23", Const, 0},
+		{"AF_VENDOR24", Const, 0},
+		{"AF_VENDOR25", Const, 0},
+		{"AF_VENDOR26", Const, 0},
+		{"AF_VENDOR27", Const, 0},
+		{"AF_VENDOR28", Const, 0},
+		{"AF_VENDOR29", Const, 0},
+		{"AF_VENDOR30", Const, 0},
+		{"AF_VENDOR31", Const, 0},
+		{"AF_VENDOR32", Const, 0},
+		{"AF_VENDOR33", Const, 0},
+		{"AF_VENDOR34", Const, 0},
+		{"AF_VENDOR35", Const, 0},
+		{"AF_VENDOR36", Const, 0},
+		{"AF_VENDOR37", Const, 0},
+		{"AF_VENDOR38", Const, 0},
+		{"AF_VENDOR39", Const, 0},
+		{"AF_VENDOR40", Const, 0},
+		{"AF_VENDOR41", Const, 0},
+		{"AF_VENDOR42", Const, 0},
+		{"AF_VENDOR43", Const, 0},
+		{"AF_VENDOR44", Const, 0},
+		{"AF_VENDOR45", Const, 0},
+		{"AF_VENDOR46", Const, 0},
+		{"AF_VENDOR47", Const, 0},
+		{"AF_WANPIPE", Const, 0},
+		{"AF_X25", Const, 0},
+		{"AI_CANONNAME", Const, 1},
+		{"AI_NUMERICHOST", Const, 1},
+		{"AI_PASSIVE", Const, 1},
+		{"APPLICATION_ERROR", Const, 0},
+		{"ARPHRD_ADAPT", Const, 0},
+		{"ARPHRD_APPLETLK", Const, 0},
+		{"ARPHRD_ARCNET", Const, 0},
+		{"ARPHRD_ASH", Const, 0},
+		{"ARPHRD_ATM", Const, 0},
+		{"ARPHRD_AX25", Const, 0},
+		{"ARPHRD_BIF", Const, 0},
+		{"ARPHRD_CHAOS", Const, 0},
+		{"ARPHRD_CISCO", Const, 0},
+		{"ARPHRD_CSLIP", Const, 0},
+		{"ARPHRD_CSLIP6", Const, 0},
+		{"ARPHRD_DDCMP", Const, 0},
+		{"ARPHRD_DLCI", Const, 0},
+		{"ARPHRD_ECONET", Const, 0},
+		{"ARPHRD_EETHER", Const, 0},
+		{"ARPHRD_ETHER", Const, 0},
+		{"ARPHRD_EUI64", Const, 0},
+		{"ARPHRD_FCAL", Const, 0},
+		{"ARPHRD_FCFABRIC", Const, 0},
+		{"ARPHRD_FCPL", Const, 0},
+		{"ARPHRD_FCPP", Const, 0},
+		{"ARPHRD_FDDI", Const, 0},
+		{"ARPHRD_FRAD", Const, 0},
+		{"ARPHRD_FRELAY", Const, 1},
+		{"ARPHRD_HDLC", Const, 0},
+		{"ARPHRD_HIPPI", Const, 0},
+		{"ARPHRD_HWX25", Const, 0},
+		{"ARPHRD_IEEE1394", Const, 0},
+		{"ARPHRD_IEEE802", Const, 0},
+		{"ARPHRD_IEEE80211", Const, 0},
+		{"ARPHRD_IEEE80211_PRISM", Const, 0},
+		{"ARPHRD_IEEE80211_RADIOTAP", Const, 0},
+		{"ARPHRD_IEEE802154", Const, 0},
+		{"ARPHRD_IEEE802154_PHY", Const, 0},
+		{"ARPHRD_IEEE802_TR", Const, 0},
+		{"ARPHRD_INFINIBAND", Const, 0},
+		{"ARPHRD_IPDDP", Const, 0},
+		{"ARPHRD_IPGRE", Const, 0},
+		{"ARPHRD_IRDA", Const, 0},
+		{"ARPHRD_LAPB", Const, 0},
+		{"ARPHRD_LOCALTLK", Const, 0},
+		{"ARPHRD_LOOPBACK", Const, 0},
+		{"ARPHRD_METRICOM", Const, 0},
+		{"ARPHRD_NETROM", Const, 0},
+		{"ARPHRD_NONE", Const, 0},
+		{"ARPHRD_PIMREG", Const, 0},
+		{"ARPHRD_PPP", Const, 0},
+		{"ARPHRD_PRONET", Const, 0},
+		{"ARPHRD_RAWHDLC", Const, 0},
+		{"ARPHRD_ROSE", Const, 0},
+		{"ARPHRD_RSRVD", Const, 0},
+		{"ARPHRD_SIT", Const, 0},
+		{"ARPHRD_SKIP", Const, 0},
+		{"ARPHRD_SLIP", Const, 0},
+		{"ARPHRD_SLIP6", Const, 0},
+		{"ARPHRD_STRIP", Const, 1},
+		{"ARPHRD_TUNNEL", Const, 0},
+		{"ARPHRD_TUNNEL6", Const, 0},
+		{"ARPHRD_VOID", Const, 0},
+		{"ARPHRD_X25", Const, 0},
+		{"AUTHTYPE_CLIENT", Const, 0},
+		{"AUTHTYPE_SERVER", Const, 0},
+		{"Accept", Func, 0},
+		{"Accept4", Func, 1},
+		{"AcceptEx", Func, 0},
+		{"Access", Func, 0},
+		{"Acct", Func, 0},
+		{"AddrinfoW", Type, 1},
+		{"AddrinfoW.Addr", Field, 1},
+		{"AddrinfoW.Addrlen", Field, 1},
+		{"AddrinfoW.Canonname", Field, 1},
+		{"AddrinfoW.Family", Field, 1},
+		{"AddrinfoW.Flags", Field, 1},
+		{"AddrinfoW.Next", Field, 1},
+		{"AddrinfoW.Protocol", Field, 1},
+		{"AddrinfoW.Socktype", Field, 1},
+		{"Adjtime", Func, 0},
+		{"Adjtimex", Func, 0},
+		{"AllThreadsSyscall", Func, 16},
+		{"AllThreadsSyscall6", Func, 16},
+		{"AttachLsf", Func, 0},
+		{"B0", Const, 0},
+		{"B1000000", Const, 0},
+		{"B110", Const, 0},
+		{"B115200", Const, 0},
+		{"B1152000", Const, 0},
+		{"B1200", Const, 0},
+		{"B134", Const, 0},
+		{"B14400", Const, 1},
+		{"B150", Const, 0},
+		{"B1500000", Const, 0},
+		{"B1800", Const, 0},
+		{"B19200", Const, 0},
+		{"B200", Const, 0},
+		{"B2000000", Const, 0},
+		{"B230400", Const, 0},
+		{"B2400", Const, 0},
+		{"B2500000", Const, 0},
+		{"B28800", Const, 1},
+		{"B300", Const, 0},
+		{"B3000000", Const, 0},
+		{"B3500000", Const, 0},
+		{"B38400", Const, 0},
+		{"B4000000", Const, 0},
+		{"B460800", Const, 0},
+		{"B4800", Const, 0},
+		{"B50", Const, 0},
+		{"B500000", Const, 0},
+		{"B57600", Const, 0},
+		{"B576000", Const, 0},
+		{"B600", Const, 0},
+		{"B7200", Const, 1},
+		{"B75", Const, 0},
+		{"B76800", Const, 1},
+		{"B921600", Const, 0},
+		{"B9600", Const, 0},
+		{"BASE_PROTOCOL", Const, 2},
+		{"BIOCFEEDBACK", Const, 0},
+		{"BIOCFLUSH", Const, 0},
+		{"BIOCGBLEN", Const, 0},
+		{"BIOCGDIRECTION", Const, 0},
+		{"BIOCGDIRFILT", Const, 1},
+		{"BIOCGDLT", Const, 0},
+		{"BIOCGDLTLIST", Const, 0},
+		{"BIOCGETBUFMODE", Const, 0},
+		{"BIOCGETIF", Const, 0},
+		{"BIOCGETZMAX", Const, 0},
+		{"BIOCGFEEDBACK", Const, 1},
+		{"BIOCGFILDROP", Const, 1},
+		{"BIOCGHDRCMPLT", Const, 0},
+		{"BIOCGRSIG", Const, 0},
+		{"BIOCGRTIMEOUT", Const, 0},
+		{"BIOCGSEESENT", Const, 0},
+		{"BIOCGSTATS", Const, 0},
+		{"BIOCGSTATSOLD", Const, 1},
+		{"BIOCGTSTAMP", Const, 1},
+		{"BIOCIMMEDIATE", Const, 0},
+		{"BIOCLOCK", Const, 0},
+		{"BIOCPROMISC", Const, 0},
+		{"BIOCROTZBUF", Const, 0},
+		{"BIOCSBLEN", Const, 0},
+		{"BIOCSDIRECTION", Const, 0},
+		{"BIOCSDIRFILT", Const, 1},
+		{"BIOCSDLT", Const, 0},
+		{"BIOCSETBUFMODE", Const, 0},
+		{"BIOCSETF", Const, 0},
+		{"BIOCSETFNR", Const, 0},
+		{"BIOCSETIF", Const, 0},
+		{"BIOCSETWF", Const, 0},
+		{"BIOCSETZBUF", Const, 0},
+		{"BIOCSFEEDBACK", Const, 1},
+		{"BIOCSFILDROP", Const, 1},
+		{"BIOCSHDRCMPLT", Const, 0},
+		{"BIOCSRSIG", Const, 0},
+		{"BIOCSRTIMEOUT", Const, 0},
+		{"BIOCSSEESENT", Const, 0},
+		{"BIOCSTCPF", Const, 1},
+		{"BIOCSTSTAMP", Const, 1},
+		{"BIOCSUDPF", Const, 1},
+		{"BIOCVERSION", Const, 0},
+		{"BPF_A", Const, 0},
+		{"BPF_ABS", Const, 0},
+		{"BPF_ADD", Const, 0},
+		{"BPF_ALIGNMENT", Const, 0},
+		{"BPF_ALIGNMENT32", Const, 1},
+		{"BPF_ALU", Const, 0},
+		{"BPF_AND", Const, 0},
+		{"BPF_B", Const, 0},
+		{"BPF_BUFMODE_BUFFER", Const, 0},
+		{"BPF_BUFMODE_ZBUF", Const, 0},
+		{"BPF_DFLTBUFSIZE", Const, 1},
+		{"BPF_DIRECTION_IN", Const, 1},
+		{"BPF_DIRECTION_OUT", Const, 1},
+		{"BPF_DIV", Const, 0},
+		{"BPF_H", Const, 0},
+		{"BPF_IMM", Const, 0},
+		{"BPF_IND", Const, 0},
+		{"BPF_JA", Const, 0},
+		{"BPF_JEQ", Const, 0},
+		{"BPF_JGE", Const, 0},
+		{"BPF_JGT", Const, 0},
+		{"BPF_JMP", Const, 0},
+		{"BPF_JSET", Const, 0},
+		{"BPF_K", Const, 0},
+		{"BPF_LD", Const, 0},
+		{"BPF_LDX", Const, 0},
+		{"BPF_LEN", Const, 0},
+		{"BPF_LSH", Const, 0},
+		{"BPF_MAJOR_VERSION", Const, 0},
+		{"BPF_MAXBUFSIZE", Const, 0},
+		{"BPF_MAXINSNS", Const, 0},
+		{"BPF_MEM", Const, 0},
+		{"BPF_MEMWORDS", Const, 0},
+		{"BPF_MINBUFSIZE", Const, 0},
+		{"BPF_MINOR_VERSION", Const, 0},
+		{"BPF_MISC", Const, 0},
+		{"BPF_MSH", Const, 0},
+		{"BPF_MUL", Const, 0},
+		{"BPF_NEG", Const, 0},
+		{"BPF_OR", Const, 0},
+		{"BPF_RELEASE", Const, 0},
+		{"BPF_RET", Const, 0},
+		{"BPF_RSH", Const, 0},
+		{"BPF_ST", Const, 0},
+		{"BPF_STX", Const, 0},
+		{"BPF_SUB", Const, 0},
+		{"BPF_TAX", Const, 0},
+		{"BPF_TXA", Const, 0},
+		{"BPF_T_BINTIME", Const, 1},
+		{"BPF_T_BINTIME_FAST", Const, 1},
+		{"BPF_T_BINTIME_MONOTONIC", Const, 1},
+		{"BPF_T_BINTIME_MONOTONIC_FAST", Const, 1},
+		{"BPF_T_FAST", Const, 1},
+		{"BPF_T_FLAG_MASK", Const, 1},
+		{"BPF_T_FORMAT_MASK", Const, 1},
+		{"BPF_T_MICROTIME", Const, 1},
+		{"BPF_T_MICROTIME_FAST", Const, 1},
+		{"BPF_T_MICROTIME_MONOTONIC", Const, 1},
+		{"BPF_T_MICROTIME_MONOTONIC_FAST", Const, 1},
+		{"BPF_T_MONOTONIC", Const, 1},
+		{"BPF_T_MONOTONIC_FAST", Const, 1},
+		{"BPF_T_NANOTIME", Const, 1},
+		{"BPF_T_NANOTIME_FAST", Const, 1},
+		{"BPF_T_NANOTIME_MONOTONIC", Const, 1},
+		{"BPF_T_NANOTIME_MONOTONIC_FAST", Const, 1},
+		{"BPF_T_NONE", Const, 1},
+		{"BPF_T_NORMAL", Const, 1},
+		{"BPF_W", Const, 0},
+		{"BPF_X", Const, 0},
+		{"BRKINT", Const, 0},
+		{"Bind", Func, 0},
+		{"BindToDevice", Func, 0},
+		{"BpfBuflen", Func, 0},
+		{"BpfDatalink", Func, 0},
+		{"BpfHdr", Type, 0},
+		{"BpfHdr.Caplen", Field, 0},
+		{"BpfHdr.Datalen", Field, 0},
+		{"BpfHdr.Hdrlen", Field, 0},
+		{"BpfHdr.Pad_cgo_0", Field, 0},
+		{"BpfHdr.Tstamp", Field, 0},
+		{"BpfHeadercmpl", Func, 0},
+		{"BpfInsn", Type, 0},
+		{"BpfInsn.Code", Field, 0},
+		{"BpfInsn.Jf", Field, 0},
+		{"BpfInsn.Jt", Field, 0},
+		{"BpfInsn.K", Field, 0},
+		{"BpfInterface", Func, 0},
+		{"BpfJump", Func, 0},
+		{"BpfProgram", Type, 0},
+		{"BpfProgram.Insns", Field, 0},
+		{"BpfProgram.Len", Field, 0},
+		{"BpfProgram.Pad_cgo_0", Field, 0},
+		{"BpfStat", Type, 0},
+		{"BpfStat.Capt", Field, 2},
+		{"BpfStat.Drop", Field, 0},
+		{"BpfStat.Padding", Field, 2},
+		{"BpfStat.Recv", Field, 0},
+		{"BpfStats", Func, 0},
+		{"BpfStmt", Func, 0},
+		{"BpfTimeout", Func, 0},
+		{"BpfTimeval", Type, 2},
+		{"BpfTimeval.Sec", Field, 2},
+		{"BpfTimeval.Usec", Field, 2},
+		{"BpfVersion", Type, 0},
+		{"BpfVersion.Major", Field, 0},
+		{"BpfVersion.Minor", Field, 0},
+		{"BpfZbuf", Type, 0},
+		{"BpfZbuf.Bufa", Field, 0},
+		{"BpfZbuf.Bufb", Field, 0},
+		{"BpfZbuf.Buflen", Field, 0},
+		{"BpfZbufHeader", Type, 0},
+		{"BpfZbufHeader.Kernel_gen", Field, 0},
+		{"BpfZbufHeader.Kernel_len", Field, 0},
+		{"BpfZbufHeader.User_gen", Field, 0},
+		{"BpfZbufHeader.X_bzh_pad", Field, 0},
+		{"ByHandleFileInformation", Type, 0},
+		{"ByHandleFileInformation.CreationTime", Field, 0},
+		{"ByHandleFileInformation.FileAttributes", Field, 0},
+		{"ByHandleFileInformation.FileIndexHigh", Field, 0},
+		{"ByHandleFileInformation.FileIndexLow", Field, 0},
+		{"ByHandleFileInformation.FileSizeHigh", Field, 0},
+		{"ByHandleFileInformation.FileSizeLow", Field, 0},
+		{"ByHandleFileInformation.LastAccessTime", Field, 0},
+		{"ByHandleFileInformation.LastWriteTime", Field, 0},
+		{"ByHandleFileInformation.NumberOfLinks", Field, 0},
+		{"ByHandleFileInformation.VolumeSerialNumber", Field, 0},
+		{"BytePtrFromString", Func, 1},
+		{"ByteSliceFromString", Func, 1},
+		{"CCR0_FLUSH", Const, 1},
+		{"CERT_CHAIN_POLICY_AUTHENTICODE", Const, 0},
+		{"CERT_CHAIN_POLICY_AUTHENTICODE_TS", Const, 0},
+		{"CERT_CHAIN_POLICY_BASE", Const, 0},
+		{"CERT_CHAIN_POLICY_BASIC_CONSTRAINTS", Const, 0},
+		{"CERT_CHAIN_POLICY_EV", Const, 0},
+		{"CERT_CHAIN_POLICY_MICROSOFT_ROOT", Const, 0},
+		{"CERT_CHAIN_POLICY_NT_AUTH", Const, 0},
+		{"CERT_CHAIN_POLICY_SSL", Const, 0},
+		{"CERT_E_CN_NO_MATCH", Const, 0},
+		{"CERT_E_EXPIRED", Const, 0},
+		{"CERT_E_PURPOSE", Const, 0},
+		{"CERT_E_ROLE", Const, 0},
+		{"CERT_E_UNTRUSTEDROOT", Const, 0},
+		{"CERT_STORE_ADD_ALWAYS", Const, 0},
+		{"CERT_STORE_DEFER_CLOSE_UNTIL_LAST_FREE_FLAG", Const, 0},
+		{"CERT_STORE_PROV_MEMORY", Const, 0},
+		{"CERT_TRUST_HAS_EXCLUDED_NAME_CONSTRAINT", Const, 0},
+		{"CERT_TRUST_HAS_NOT_DEFINED_NAME_CONSTRAINT", Const, 0},
+		{"CERT_TRUST_HAS_NOT_PERMITTED_NAME_CONSTRAINT", Const, 0},
+		{"CERT_TRUST_HAS_NOT_SUPPORTED_CRITICAL_EXT", Const, 0},
+		{"CERT_TRUST_HAS_NOT_SUPPORTED_NAME_CONSTRAINT", Const, 0},
+		{"CERT_TRUST_INVALID_BASIC_CONSTRAINTS", Const, 0},
+		{"CERT_TRUST_INVALID_EXTENSION", Const, 0},
+		{"CERT_TRUST_INVALID_NAME_CONSTRAINTS", Const, 0},
+		{"CERT_TRUST_INVALID_POLICY_CONSTRAINTS", Const, 0},
+		{"CERT_TRUST_IS_CYCLIC", Const, 0},
+		{"CERT_TRUST_IS_EXPLICIT_DISTRUST", Const, 0},
+		{"CERT_TRUST_IS_NOT_SIGNATURE_VALID", Const, 0},
+		{"CERT_TRUST_IS_NOT_TIME_VALID", Const, 0},
+		{"CERT_TRUST_IS_NOT_VALID_FOR_USAGE", Const, 0},
+		{"CERT_TRUST_IS_OFFLINE_REVOCATION", Const, 0},
+		{"CERT_TRUST_IS_REVOKED", Const, 0},
+		{"CERT_TRUST_IS_UNTRUSTED_ROOT", Const, 0},
+		{"CERT_TRUST_NO_ERROR", Const, 0},
+		{"CERT_TRUST_NO_ISSUANCE_CHAIN_POLICY", Const, 0},
+		{"CERT_TRUST_REVOCATION_STATUS_UNKNOWN", Const, 0},
+		{"CFLUSH", Const, 1},
+		{"CLOCAL", Const, 0},
+		{"CLONE_CHILD_CLEARTID", Const, 2},
+		{"CLONE_CHILD_SETTID", Const, 2},
+		{"CLONE_CLEAR_SIGHAND", Const, 20},
+		{"CLONE_CSIGNAL", Const, 3},
+		{"CLONE_DETACHED", Const, 2},
+		{"CLONE_FILES", Const, 2},
+		{"CLONE_FS", Const, 2},
+		{"CLONE_INTO_CGROUP", Const, 20},
+		{"CLONE_IO", Const, 2},
+		{"CLONE_NEWCGROUP", Const, 20},
+		{"CLONE_NEWIPC", Const, 2},
+		{"CLONE_NEWNET", Const, 2},
+		{"CLONE_NEWNS", Const, 2},
+		{"CLONE_NEWPID", Const, 2},
+		{"CLONE_NEWTIME", Const, 20},
+		{"CLONE_NEWUSER", Const, 2},
+		{"CLONE_NEWUTS", Const, 2},
+		{"CLONE_PARENT", Const, 2},
+		{"CLONE_PARENT_SETTID", Const, 2},
+		{"CLONE_PID", Const, 3},
+		{"CLONE_PIDFD", Const, 20},
+		{"CLONE_PTRACE", Const, 2},
+		{"CLONE_SETTLS", Const, 2},
+		{"CLONE_SIGHAND", Const, 2},
+		{"CLONE_SYSVSEM", Const, 2},
+		{"CLONE_THREAD", Const, 2},
+		{"CLONE_UNTRACED", Const, 2},
+		{"CLONE_VFORK", Const, 2},
+		{"CLONE_VM", Const, 2},
+		{"CPUID_CFLUSH", Const, 1},
+		{"CREAD", Const, 0},
+		{"CREATE_ALWAYS", Const, 0},
+		{"CREATE_NEW", Const, 0},
+		{"CREATE_NEW_PROCESS_GROUP", Const, 1},
+		{"CREATE_UNICODE_ENVIRONMENT", Const, 0},
+		{"CRYPT_DEFAULT_CONTAINER_OPTIONAL", Const, 0},
+		{"CRYPT_DELETEKEYSET", Const, 0},
+		{"CRYPT_MACHINE_KEYSET", Const, 0},
+		{"CRYPT_NEWKEYSET", Const, 0},
+		{"CRYPT_SILENT", Const, 0},
+		{"CRYPT_VERIFYCONTEXT", Const, 0},
+		{"CS5", Const, 0},
+		{"CS6", Const, 0},
+		{"CS7", Const, 0},
+		{"CS8", Const, 0},
+		{"CSIZE", Const, 0},
+		{"CSTART", Const, 1},
+		{"CSTATUS", Const, 1},
+		{"CSTOP", Const, 1},
+		{"CSTOPB", Const, 0},
+		{"CSUSP", Const, 1},
+		{"CTL_MAXNAME", Const, 0},
+		{"CTL_NET", Const, 0},
+		{"CTL_QUERY", Const, 1},
+		{"CTRL_BREAK_EVENT", Const, 1},
+		{"CTRL_CLOSE_EVENT", Const, 14},
+		{"CTRL_C_EVENT", Const, 1},
+		{"CTRL_LOGOFF_EVENT", Const, 14},
+		{"CTRL_SHUTDOWN_EVENT", Const, 14},
+		{"CancelIo", Func, 0},
+		{"CancelIoEx", Func, 1},
+		{"CertAddCertificateContextToStore", Func, 0},
+		{"CertChainContext", Type, 0},
+		{"CertChainContext.ChainCount", Field, 0},
+		{"CertChainContext.Chains", Field, 0},
+		{"CertChainContext.HasRevocationFreshnessTime", Field, 0},
+		{"CertChainContext.LowerQualityChainCount", Field, 0},
+		{"CertChainContext.LowerQualityChains", Field, 0},
+		{"CertChainContext.RevocationFreshnessTime", Field, 0},
+		{"CertChainContext.Size", Field, 0},
+		{"CertChainContext.TrustStatus", Field, 0},
+		{"CertChainElement", Type, 0},
+		{"CertChainElement.ApplicationUsage", Field, 0},
+		{"CertChainElement.CertContext", Field, 0},
+		{"CertChainElement.ExtendedErrorInfo", Field, 0},
+		{"CertChainElement.IssuanceUsage", Field, 0},
+		{"CertChainElement.RevocationInfo", Field, 0},
+		{"CertChainElement.Size", Field, 0},
+		{"CertChainElement.TrustStatus", Field, 0},
+		{"CertChainPara", Type, 0},
+		{"CertChainPara.CacheResync", Field, 0},
+		{"CertChainPara.CheckRevocationFreshnessTime", Field, 0},
+		{"CertChainPara.RequestedUsage", Field, 0},
+		{"CertChainPara.RequstedIssuancePolicy", Field, 0},
+		{"CertChainPara.RevocationFreshnessTime", Field, 0},
+		{"CertChainPara.Size", Field, 0},
+		{"CertChainPara.URLRetrievalTimeout", Field, 0},
+		{"CertChainPolicyPara", Type, 0},
+		{"CertChainPolicyPara.ExtraPolicyPara", Field, 0},
+		{"CertChainPolicyPara.Flags", Field, 0},
+		{"CertChainPolicyPara.Size", Field, 0},
+		{"CertChainPolicyStatus", Type, 0},
+		{"CertChainPolicyStatus.ChainIndex", Field, 0},
+		{"CertChainPolicyStatus.ElementIndex", Field, 0},
+		{"CertChainPolicyStatus.Error", Field, 0},
+		{"CertChainPolicyStatus.ExtraPolicyStatus", Field, 0},
+		{"CertChainPolicyStatus.Size", Field, 0},
+		{"CertCloseStore", Func, 0},
+		{"CertContext", Type, 0},
+		{"CertContext.CertInfo", Field, 0},
+		{"CertContext.EncodedCert", Field, 0},
+		{"CertContext.EncodingType", Field, 0},
+		{"CertContext.Length", Field, 0},
+		{"CertContext.Store", Field, 0},
+		{"CertCreateCertificateContext", Func, 0},
+		{"CertEnhKeyUsage", Type, 0},
+		{"CertEnhKeyUsage.Length", Field, 0},
+		{"CertEnhKeyUsage.UsageIdentifiers", Field, 0},
+		{"CertEnumCertificatesInStore", Func, 0},
+		{"CertFreeCertificateChain", Func, 0},
+		{"CertFreeCertificateContext", Func, 0},
+		{"CertGetCertificateChain", Func, 0},
+		{"CertInfo", Type, 11},
+		{"CertOpenStore", Func, 0},
+		{"CertOpenSystemStore", Func, 0},
+		{"CertRevocationCrlInfo", Type, 11},
+		{"CertRevocationInfo", Type, 0},
+		{"CertRevocationInfo.CrlInfo", Field, 0},
+		{"CertRevocationInfo.FreshnessTime", Field, 0},
+		{"CertRevocationInfo.HasFreshnessTime", Field, 0},
+		{"CertRevocationInfo.OidSpecificInfo", Field, 0},
+		{"CertRevocationInfo.RevocationOid", Field, 0},
+		{"CertRevocationInfo.RevocationResult", Field, 0},
+		{"CertRevocationInfo.Size", Field, 0},
+		{"CertSimpleChain", Type, 0},
+		{"CertSimpleChain.Elements", Field, 0},
+		{"CertSimpleChain.HasRevocationFreshnessTime", Field, 0},
+		{"CertSimpleChain.NumElements", Field, 0},
+		{"CertSimpleChain.RevocationFreshnessTime", Field, 0},
+		{"CertSimpleChain.Size", Field, 0},
+		{"CertSimpleChain.TrustListInfo", Field, 0},
+		{"CertSimpleChain.TrustStatus", Field, 0},
+		{"CertTrustListInfo", Type, 11},
+		{"CertTrustStatus", Type, 0},
+		{"CertTrustStatus.ErrorStatus", Field, 0},
+		{"CertTrustStatus.InfoStatus", Field, 0},
+		{"CertUsageMatch", Type, 0},
+		{"CertUsageMatch.Type", Field, 0},
+		{"CertUsageMatch.Usage", Field, 0},
+		{"CertVerifyCertificateChainPolicy", Func, 0},
+		{"Chdir", Func, 0},
+		{"CheckBpfVersion", Func, 0},
+		{"Chflags", Func, 0},
+		{"Chmod", Func, 0},
+		{"Chown", Func, 0},
+		{"Chroot", Func, 0},
+		{"Clearenv", Func, 0},
+		{"Close", Func, 0},
+		{"CloseHandle", Func, 0},
+		{"CloseOnExec", Func, 0},
+		{"Closesocket", Func, 0},
+		{"CmsgLen", Func, 0},
+		{"CmsgSpace", Func, 0},
+		{"Cmsghdr", Type, 0},
+		{"Cmsghdr.Len", Field, 0},
+		{"Cmsghdr.Level", Field, 0},
+		{"Cmsghdr.Type", Field, 0},
+		{"Cmsghdr.X__cmsg_data", Field, 0},
+		{"CommandLineToArgv", Func, 0},
+		{"ComputerName", Func, 0},
+		{"Conn", Type, 9},
+		{"Connect", Func, 0},
+		{"ConnectEx", Func, 1},
+		{"ConvertSidToStringSid", Func, 0},
+		{"ConvertStringSidToSid", Func, 0},
+		{"CopySid", Func, 0},
+		{"Creat", Func, 0},
+		{"CreateDirectory", Func, 0},
+		{"CreateFile", Func, 0},
+		{"CreateFileMapping", Func, 0},
+		{"CreateHardLink", Func, 4},
+		{"CreateIoCompletionPort", Func, 0},
+		{"CreatePipe", Func, 0},
+		{"CreateProcess", Func, 0},
+		{"CreateProcessAsUser", Func, 10},
+		{"CreateSymbolicLink", Func, 4},
+		{"CreateToolhelp32Snapshot", Func, 4},
+		{"Credential", Type, 0},
+		{"Credential.Gid", Field, 0},
+		{"Credential.Groups", Field, 0},
+		{"Credential.NoSetGroups", Field, 9},
+		{"Credential.Uid", Field, 0},
+		{"CryptAcquireContext", Func, 0},
+		{"CryptGenRandom", Func, 0},
+		{"CryptReleaseContext", Func, 0},
+		{"DIOCBSFLUSH", Const, 1},
+		{"DIOCOSFPFLUSH", Const, 1},
+		{"DLL", Type, 0},
+		{"DLL.Handle", Field, 0},
+		{"DLL.Name", Field, 0},
+		{"DLLError", Type, 0},
+		{"DLLError.Err", Field, 0},
+		{"DLLError.Msg", Field, 0},
+		{"DLLError.ObjName", Field, 0},
+		{"DLT_A429", Const, 0},
+		{"DLT_A653_ICM", Const, 0},
+		{"DLT_AIRONET_HEADER", Const, 0},
+		{"DLT_AOS", Const, 1},
+		{"DLT_APPLE_IP_OVER_IEEE1394", Const, 0},
+		{"DLT_ARCNET", Const, 0},
+		{"DLT_ARCNET_LINUX", Const, 0},
+		{"DLT_ATM_CLIP", Const, 0},
+		{"DLT_ATM_RFC1483", Const, 0},
+		{"DLT_AURORA", Const, 0},
+		{"DLT_AX25", Const, 0},
+		{"DLT_AX25_KISS", Const, 0},
+		{"DLT_BACNET_MS_TP", Const, 0},
+		{"DLT_BLUETOOTH_HCI_H4", Const, 0},
+		{"DLT_BLUETOOTH_HCI_H4_WITH_PHDR", Const, 0},
+		{"DLT_CAN20B", Const, 0},
+		{"DLT_CAN_SOCKETCAN", Const, 1},
+		{"DLT_CHAOS", Const, 0},
+		{"DLT_CHDLC", Const, 0},
+		{"DLT_CISCO_IOS", Const, 0},
+		{"DLT_C_HDLC", Const, 0},
+		{"DLT_C_HDLC_WITH_DIR", Const, 0},
+		{"DLT_DBUS", Const, 1},
+		{"DLT_DECT", Const, 1},
+		{"DLT_DOCSIS", Const, 0},
+		{"DLT_DVB_CI", Const, 1},
+		{"DLT_ECONET", Const, 0},
+		{"DLT_EN10MB", Const, 0},
+		{"DLT_EN3MB", Const, 0},
+		{"DLT_ENC", Const, 0},
+		{"DLT_ERF", Const, 0},
+		{"DLT_ERF_ETH", Const, 0},
+		{"DLT_ERF_POS", Const, 0},
+		{"DLT_FC_2", Const, 1},
+		{"DLT_FC_2_WITH_FRAME_DELIMS", Const, 1},
+		{"DLT_FDDI", Const, 0},
+		{"DLT_FLEXRAY", Const, 0},
+		{"DLT_FRELAY", Const, 0},
+		{"DLT_FRELAY_WITH_DIR", Const, 0},
+		{"DLT_GCOM_SERIAL", Const, 0},
+		{"DLT_GCOM_T1E1", Const, 0},
+		{"DLT_GPF_F", Const, 0},
+		{"DLT_GPF_T", Const, 0},
+		{"DLT_GPRS_LLC", Const, 0},
+		{"DLT_GSMTAP_ABIS", Const, 1},
+		{"DLT_GSMTAP_UM", Const, 1},
+		{"DLT_HDLC", Const, 1},
+		{"DLT_HHDLC", Const, 0},
+		{"DLT_HIPPI", Const, 1},
+		{"DLT_IBM_SN", Const, 0},
+		{"DLT_IBM_SP", Const, 0},
+		{"DLT_IEEE802", Const, 0},
+		{"DLT_IEEE802_11", Const, 0},
+		{"DLT_IEEE802_11_RADIO", Const, 0},
+		{"DLT_IEEE802_11_RADIO_AVS", Const, 0},
+		{"DLT_IEEE802_15_4", Const, 0},
+		{"DLT_IEEE802_15_4_LINUX", Const, 0},
+		{"DLT_IEEE802_15_4_NOFCS", Const, 1},
+		{"DLT_IEEE802_15_4_NONASK_PHY", Const, 0},
+		{"DLT_IEEE802_16_MAC_CPS", Const, 0},
+		{"DLT_IEEE802_16_MAC_CPS_RADIO", Const, 0},
+		{"DLT_IPFILTER", Const, 0},
+		{"DLT_IPMB", Const, 0},
+		{"DLT_IPMB_LINUX", Const, 0},
+		{"DLT_IPNET", Const, 1},
+		{"DLT_IPOIB", Const, 1},
+		{"DLT_IPV4", Const, 1},
+		{"DLT_IPV6", Const, 1},
+		{"DLT_IP_OVER_FC", Const, 0},
+		{"DLT_JUNIPER_ATM1", Const, 0},
+		{"DLT_JUNIPER_ATM2", Const, 0},
+		{"DLT_JUNIPER_ATM_CEMIC", Const, 1},
+		{"DLT_JUNIPER_CHDLC", Const, 0},
+		{"DLT_JUNIPER_ES", Const, 0},
+		{"DLT_JUNIPER_ETHER", Const, 0},
+		{"DLT_JUNIPER_FIBRECHANNEL", Const, 1},
+		{"DLT_JUNIPER_FRELAY", Const, 0},
+		{"DLT_JUNIPER_GGSN", Const, 0},
+		{"DLT_JUNIPER_ISM", Const, 0},
+		{"DLT_JUNIPER_MFR", Const, 0},
+		{"DLT_JUNIPER_MLFR", Const, 0},
+		{"DLT_JUNIPER_MLPPP", Const, 0},
+		{"DLT_JUNIPER_MONITOR", Const, 0},
+		{"DLT_JUNIPER_PIC_PEER", Const, 0},
+		{"DLT_JUNIPER_PPP", Const, 0},
+		{"DLT_JUNIPER_PPPOE", Const, 0},
+		{"DLT_JUNIPER_PPPOE_ATM", Const, 0},
+		{"DLT_JUNIPER_SERVICES", Const, 0},
+		{"DLT_JUNIPER_SRX_E2E", Const, 1},
+		{"DLT_JUNIPER_ST", Const, 0},
+		{"DLT_JUNIPER_VP", Const, 0},
+		{"DLT_JUNIPER_VS", Const, 1},
+		{"DLT_LAPB_WITH_DIR", Const, 0},
+		{"DLT_LAPD", Const, 0},
+		{"DLT_LIN", Const, 0},
+		{"DLT_LINUX_EVDEV", Const, 1},
+		{"DLT_LINUX_IRDA", Const, 0},
+		{"DLT_LINUX_LAPD", Const, 0},
+		{"DLT_LINUX_PPP_WITHDIRECTION", Const, 0},
+		{"DLT_LINUX_SLL", Const, 0},
+		{"DLT_LOOP", Const, 0},
+		{"DLT_LTALK", Const, 0},
+		{"DLT_MATCHING_MAX", Const, 1},
+		{"DLT_MATCHING_MIN", Const, 1},
+		{"DLT_MFR", Const, 0},
+		{"DLT_MOST", Const, 0},
+		{"DLT_MPEG_2_TS", Const, 1},
+		{"DLT_MPLS", Const, 1},
+		{"DLT_MTP2", Const, 0},
+		{"DLT_MTP2_WITH_PHDR", Const, 0},
+		{"DLT_MTP3", Const, 0},
+		{"DLT_MUX27010", Const, 1},
+		{"DLT_NETANALYZER", Const, 1},
+		{"DLT_NETANALYZER_TRANSPARENT", Const, 1},
+		{"DLT_NFC_LLCP", Const, 1},
+		{"DLT_NFLOG", Const, 1},
+		{"DLT_NG40", Const, 1},
+		{"DLT_NULL", Const, 0},
+		{"DLT_PCI_EXP", Const, 0},
+		{"DLT_PFLOG", Const, 0},
+		{"DLT_PFSYNC", Const, 0},
+		{"DLT_PPI", Const, 0},
+		{"DLT_PPP", Const, 0},
+		{"DLT_PPP_BSDOS", Const, 0},
+		{"DLT_PPP_ETHER", Const, 0},
+		{"DLT_PPP_PPPD", Const, 0},
+		{"DLT_PPP_SERIAL", Const, 0},
+		{"DLT_PPP_WITH_DIR", Const, 0},
+		{"DLT_PPP_WITH_DIRECTION", Const, 0},
+		{"DLT_PRISM_HEADER", Const, 0},
+		{"DLT_PRONET", Const, 0},
+		{"DLT_RAIF1", Const, 0},
+		{"DLT_RAW", Const, 0},
+		{"DLT_RAWAF_MASK", Const, 1},
+		{"DLT_RIO", Const, 0},
+		{"DLT_SCCP", Const, 0},
+		{"DLT_SITA", Const, 0},
+		{"DLT_SLIP", Const, 0},
+		{"DLT_SLIP_BSDOS", Const, 0},
+		{"DLT_STANAG_5066_D_PDU", Const, 1},
+		{"DLT_SUNATM", Const, 0},
+		{"DLT_SYMANTEC_FIREWALL", Const, 0},
+		{"DLT_TZSP", Const, 0},
+		{"DLT_USB", Const, 0},
+		{"DLT_USB_LINUX", Const, 0},
+		{"DLT_USB_LINUX_MMAPPED", Const, 1},
+		{"DLT_USER0", Const, 0},
+		{"DLT_USER1", Const, 0},
+		{"DLT_USER10", Const, 0},
+		{"DLT_USER11", Const, 0},
+		{"DLT_USER12", Const, 0},
+		{"DLT_USER13", Const, 0},
+		{"DLT_USER14", Const, 0},
+		{"DLT_USER15", Const, 0},
+		{"DLT_USER2", Const, 0},
+		{"DLT_USER3", Const, 0},
+		{"DLT_USER4", Const, 0},
+		{"DLT_USER5", Const, 0},
+		{"DLT_USER6", Const, 0},
+		{"DLT_USER7", Const, 0},
+		{"DLT_USER8", Const, 0},
+		{"DLT_USER9", Const, 0},
+		{"DLT_WIHART", Const, 1},
+		{"DLT_X2E_SERIAL", Const, 0},
+		{"DLT_X2E_XORAYA", Const, 0},
+		{"DNSMXData", Type, 0},
+		{"DNSMXData.NameExchange", Field, 0},
+		{"DNSMXData.Pad", Field, 0},
+		{"DNSMXData.Preference", Field, 0},
+		{"DNSPTRData", Type, 0},
+		{"DNSPTRData.Host", Field, 0},
+		{"DNSRecord", Type, 0},
+		{"DNSRecord.Data", Field, 0},
+		{"DNSRecord.Dw", Field, 0},
+		{"DNSRecord.Length", Field, 0},
+		{"DNSRecord.Name", Field, 0},
+		{"DNSRecord.Next", Field, 0},
+		{"DNSRecord.Reserved", Field, 0},
+		{"DNSRecord.Ttl", Field, 0},
+		{"DNSRecord.Type", Field, 0},
+		{"DNSSRVData", Type, 0},
+		{"DNSSRVData.Pad", Field, 0},
+		{"DNSSRVData.Port", Field, 0},
+		{"DNSSRVData.Priority", Field, 0},
+		{"DNSSRVData.Target", Field, 0},
+		{"DNSSRVData.Weight", Field, 0},
+		{"DNSTXTData", Type, 0},
+		{"DNSTXTData.StringArray", Field, 0},
+		{"DNSTXTData.StringCount", Field, 0},
+		{"DNS_INFO_NO_RECORDS", Const, 4},
+		{"DNS_TYPE_A", Const, 0},
+		{"DNS_TYPE_A6", Const, 0},
+		{"DNS_TYPE_AAAA", Const, 0},
+		{"DNS_TYPE_ADDRS", Const, 0},
+		{"DNS_TYPE_AFSDB", Const, 0},
+		{"DNS_TYPE_ALL", Const, 0},
+		{"DNS_TYPE_ANY", Const, 0},
+		{"DNS_TYPE_ATMA", Const, 0},
+		{"DNS_TYPE_AXFR", Const, 0},
+		{"DNS_TYPE_CERT", Const, 0},
+		{"DNS_TYPE_CNAME", Const, 0},
+		{"DNS_TYPE_DHCID", Const, 0},
+		{"DNS_TYPE_DNAME", Const, 0},
+		{"DNS_TYPE_DNSKEY", Const, 0},
+		{"DNS_TYPE_DS", Const, 0},
+		{"DNS_TYPE_EID", Const, 0},
+		{"DNS_TYPE_GID", Const, 0},
+		{"DNS_TYPE_GPOS", Const, 0},
+		{"DNS_TYPE_HINFO", Const, 0},
+		{"DNS_TYPE_ISDN", Const, 0},
+		{"DNS_TYPE_IXFR", Const, 0},
+		{"DNS_TYPE_KEY", Const, 0},
+		{"DNS_TYPE_KX", Const, 0},
+		{"DNS_TYPE_LOC", Const, 0},
+		{"DNS_TYPE_MAILA", Const, 0},
+		{"DNS_TYPE_MAILB", Const, 0},
+		{"DNS_TYPE_MB", Const, 0},
+		{"DNS_TYPE_MD", Const, 0},
+		{"DNS_TYPE_MF", Const, 0},
+		{"DNS_TYPE_MG", Const, 0},
+		{"DNS_TYPE_MINFO", Const, 0},
+		{"DNS_TYPE_MR", Const, 0},
+		{"DNS_TYPE_MX", Const, 0},
+		{"DNS_TYPE_NAPTR", Const, 0},
+		{"DNS_TYPE_NBSTAT", Const, 0},
+		{"DNS_TYPE_NIMLOC", Const, 0},
+		{"DNS_TYPE_NS", Const, 0},
+		{"DNS_TYPE_NSAP", Const, 0},
+		{"DNS_TYPE_NSAPPTR", Const, 0},
+		{"DNS_TYPE_NSEC", Const, 0},
+		{"DNS_TYPE_NULL", Const, 0},
+		{"DNS_TYPE_NXT", Const, 0},
+		{"DNS_TYPE_OPT", Const, 0},
+		{"DNS_TYPE_PTR", Const, 0},
+		{"DNS_TYPE_PX", Const, 0},
+		{"DNS_TYPE_RP", Const, 0},
+		{"DNS_TYPE_RRSIG", Const, 0},
+		{"DNS_TYPE_RT", Const, 0},
+		{"DNS_TYPE_SIG", Const, 0},
+		{"DNS_TYPE_SINK", Const, 0},
+		{"DNS_TYPE_SOA", Const, 0},
+		{"DNS_TYPE_SRV", Const, 0},
+		{"DNS_TYPE_TEXT", Const, 0},
+		{"DNS_TYPE_TKEY", Const, 0},
+		{"DNS_TYPE_TSIG", Const, 0},
+		{"DNS_TYPE_UID", Const, 0},
+		{"DNS_TYPE_UINFO", Const, 0},
+		{"DNS_TYPE_UNSPEC", Const, 0},
+		{"DNS_TYPE_WINS", Const, 0},
+		{"DNS_TYPE_WINSR", Const, 0},
+		{"DNS_TYPE_WKS", Const, 0},
+		{"DNS_TYPE_X25", Const, 0},
+		{"DT_BLK", Const, 0},
+		{"DT_CHR", Const, 0},
+		{"DT_DIR", Const, 0},
+		{"DT_FIFO", Const, 0},
+		{"DT_LNK", Const, 0},
+		{"DT_REG", Const, 0},
+		{"DT_SOCK", Const, 0},
+		{"DT_UNKNOWN", Const, 0},
+		{"DT_WHT", Const, 0},
+		{"DUPLICATE_CLOSE_SOURCE", Const, 0},
+		{"DUPLICATE_SAME_ACCESS", Const, 0},
+		{"DeleteFile", Func, 0},
+		{"DetachLsf", Func, 0},
+		{"DeviceIoControl", Func, 4},
+		{"Dirent", Type, 0},
+		{"Dirent.Fileno", Field, 0},
+		{"Dirent.Ino", Field, 0},
+		{"Dirent.Name", Field, 0},
+		{"Dirent.Namlen", Field, 0},
+		{"Dirent.Off", Field, 0},
+		{"Dirent.Pad0", Field, 12},
+		{"Dirent.Pad1", Field, 12},
+		{"Dirent.Pad_cgo_0", Field, 0},
+		{"Dirent.Reclen", Field, 0},
+		{"Dirent.Seekoff", Field, 0},
+		{"Dirent.Type", Field, 0},
+		{"Dirent.X__d_padding", Field, 3},
+		{"DnsNameCompare", Func, 4},
+		{"DnsQuery", Func, 0},
+		{"DnsRecordListFree", Func, 0},
+		{"DnsSectionAdditional", Const, 4},
+		{"DnsSectionAnswer", Const, 4},
+		{"DnsSectionAuthority", Const, 4},
+		{"DnsSectionQuestion", Const, 4},
+		{"Dup", Func, 0},
+		{"Dup2", Func, 0},
+		{"Dup3", Func, 2},
+		{"DuplicateHandle", Func, 0},
+		{"E2BIG", Const, 0},
+		{"EACCES", Const, 0},
+		{"EADDRINUSE", Const, 0},
+		{"EADDRNOTAVAIL", Const, 0},
+		{"EADV", Const, 0},
+		{"EAFNOSUPPORT", Const, 0},
+		{"EAGAIN", Const, 0},
+		{"EALREADY", Const, 0},
+		{"EAUTH", Const, 0},
+		{"EBADARCH", Const, 0},
+		{"EBADE", Const, 0},
+		{"EBADEXEC", Const, 0},
+		{"EBADF", Const, 0},
+		{"EBADFD", Const, 0},
+		{"EBADMACHO", Const, 0},
+		{"EBADMSG", Const, 0},
+		{"EBADR", Const, 0},
+		{"EBADRPC", Const, 0},
+		{"EBADRQC", Const, 0},
+		{"EBADSLT", Const, 0},
+		{"EBFONT", Const, 0},
+		{"EBUSY", Const, 0},
+		{"ECANCELED", Const, 0},
+		{"ECAPMODE", Const, 1},
+		{"ECHILD", Const, 0},
+		{"ECHO", Const, 0},
+		{"ECHOCTL", Const, 0},
+		{"ECHOE", Const, 0},
+		{"ECHOK", Const, 0},
+		{"ECHOKE", Const, 0},
+		{"ECHONL", Const, 0},
+		{"ECHOPRT", Const, 0},
+		{"ECHRNG", Const, 0},
+		{"ECOMM", Const, 0},
+		{"ECONNABORTED", Const, 0},
+		{"ECONNREFUSED", Const, 0},
+		{"ECONNRESET", Const, 0},
+		{"EDEADLK", Const, 0},
+		{"EDEADLOCK", Const, 0},
+		{"EDESTADDRREQ", Const, 0},
+		{"EDEVERR", Const, 0},
+		{"EDOM", Const, 0},
+		{"EDOOFUS", Const, 0},
+		{"EDOTDOT", Const, 0},
+		{"EDQUOT", Const, 0},
+		{"EEXIST", Const, 0},
+		{"EFAULT", Const, 0},
+		{"EFBIG", Const, 0},
+		{"EFER_LMA", Const, 1},
+		{"EFER_LME", Const, 1},
+		{"EFER_NXE", Const, 1},
+		{"EFER_SCE", Const, 1},
+		{"EFTYPE", Const, 0},
+		{"EHOSTDOWN", Const, 0},
+		{"EHOSTUNREACH", Const, 0},
+		{"EHWPOISON", Const, 0},
+		{"EIDRM", Const, 0},
+		{"EILSEQ", Const, 0},
+		{"EINPROGRESS", Const, 0},
+		{"EINTR", Const, 0},
+		{"EINVAL", Const, 0},
+		{"EIO", Const, 0},
+		{"EIPSEC", Const, 1},
+		{"EISCONN", Const, 0},
+		{"EISDIR", Const, 0},
+		{"EISNAM", Const, 0},
+		{"EKEYEXPIRED", Const, 0},
+		{"EKEYREJECTED", Const, 0},
+		{"EKEYREVOKED", Const, 0},
+		{"EL2HLT", Const, 0},
+		{"EL2NSYNC", Const, 0},
+		{"EL3HLT", Const, 0},
+		{"EL3RST", Const, 0},
+		{"ELAST", Const, 0},
+		{"ELF_NGREG", Const, 0},
+		{"ELF_PRARGSZ", Const, 0},
+		{"ELIBACC", Const, 0},
+		{"ELIBBAD", Const, 0},
+		{"ELIBEXEC", Const, 0},
+		{"ELIBMAX", Const, 0},
+		{"ELIBSCN", Const, 0},
+		{"ELNRNG", Const, 0},
+		{"ELOOP", Const, 0},
+		{"EMEDIUMTYPE", Const, 0},
+		{"EMFILE", Const, 0},
+		{"EMLINK", Const, 0},
+		{"EMSGSIZE", Const, 0},
+		{"EMT_TAGOVF", Const, 1},
+		{"EMULTIHOP", Const, 0},
+		{"EMUL_ENABLED", Const, 1},
+		{"EMUL_LINUX", Const, 1},
+		{"EMUL_LINUX32", Const, 1},
+		{"EMUL_MAXID", Const, 1},
+		{"EMUL_NATIVE", Const, 1},
+		{"ENAMETOOLONG", Const, 0},
+		{"ENAVAIL", Const, 0},
+		{"ENDRUNDISC", Const, 1},
+		{"ENEEDAUTH", Const, 0},
+		{"ENETDOWN", Const, 0},
+		{"ENETRESET", Const, 0},
+		{"ENETUNREACH", Const, 0},
+		{"ENFILE", Const, 0},
+		{"ENOANO", Const, 0},
+		{"ENOATTR", Const, 0},
+		{"ENOBUFS", Const, 0},
+		{"ENOCSI", Const, 0},
+		{"ENODATA", Const, 0},
+		{"ENODEV", Const, 0},
+		{"ENOENT", Const, 0},
+		{"ENOEXEC", Const, 0},
+		{"ENOKEY", Const, 0},
+		{"ENOLCK", Const, 0},
+		{"ENOLINK", Const, 0},
+		{"ENOMEDIUM", Const, 0},
+		{"ENOMEM", Const, 0},
+		{"ENOMSG", Const, 0},
+		{"ENONET", Const, 0},
+		{"ENOPKG", Const, 0},
+		{"ENOPOLICY", Const, 0},
+		{"ENOPROTOOPT", Const, 0},
+		{"ENOSPC", Const, 0},
+		{"ENOSR", Const, 0},
+		{"ENOSTR", Const, 0},
+		{"ENOSYS", Const, 0},
+		{"ENOTBLK", Const, 0},
+		{"ENOTCAPABLE", Const, 0},
+		{"ENOTCONN", Const, 0},
+		{"ENOTDIR", Const, 0},
+		{"ENOTEMPTY", Const, 0},
+		{"ENOTNAM", Const, 0},
+		{"ENOTRECOVERABLE", Const, 0},
+		{"ENOTSOCK", Const, 0},
+		{"ENOTSUP", Const, 0},
+		{"ENOTTY", Const, 0},
+		{"ENOTUNIQ", Const, 0},
+		{"ENXIO", Const, 0},
+		{"EN_SW_CTL_INF", Const, 1},
+		{"EN_SW_CTL_PREC", Const, 1},
+		{"EN_SW_CTL_ROUND", Const, 1},
+		{"EN_SW_DATACHAIN", Const, 1},
+		{"EN_SW_DENORM", Const, 1},
+		{"EN_SW_INVOP", Const, 1},
+		{"EN_SW_OVERFLOW", Const, 1},
+		{"EN_SW_PRECLOSS", Const, 1},
+		{"EN_SW_UNDERFLOW", Const, 1},
+		{"EN_SW_ZERODIV", Const, 1},
+		{"EOPNOTSUPP", Const, 0},
+		{"EOVERFLOW", Const, 0},
+		{"EOWNERDEAD", Const, 0},
+		{"EPERM", Const, 0},
+		{"EPFNOSUPPORT", Const, 0},
+		{"EPIPE", Const, 0},
+		{"EPOLLERR", Const, 0},
+		{"EPOLLET", Const, 0},
+		{"EPOLLHUP", Const, 0},
+		{"EPOLLIN", Const, 0},
+		{"EPOLLMSG", Const, 0},
+		{"EPOLLONESHOT", Const, 0},
+		{"EPOLLOUT", Const, 0},
+		{"EPOLLPRI", Const, 0},
+		{"EPOLLRDBAND", Const, 0},
+		{"EPOLLRDHUP", Const, 0},
+		{"EPOLLRDNORM", Const, 0},
+		{"EPOLLWRBAND", Const, 0},
+		{"EPOLLWRNORM", Const, 0},
+		{"EPOLL_CLOEXEC", Const, 0},
+		{"EPOLL_CTL_ADD", Const, 0},
+		{"EPOLL_CTL_DEL", Const, 0},
+		{"EPOLL_CTL_MOD", Const, 0},
+		{"EPOLL_NONBLOCK", Const, 0},
+		{"EPROCLIM", Const, 0},
+		{"EPROCUNAVAIL", Const, 0},
+		{"EPROGMISMATCH", Const, 0},
+		{"EPROGUNAVAIL", Const, 0},
+		{"EPROTO", Const, 0},
+		{"EPROTONOSUPPORT", Const, 0},
+		{"EPROTOTYPE", Const, 0},
+		{"EPWROFF", Const, 0},
+		{"EQFULL", Const, 16},
+		{"ERANGE", Const, 0},
+		{"EREMCHG", Const, 0},
+		{"EREMOTE", Const, 0},
+		{"EREMOTEIO", Const, 0},
+		{"ERESTART", Const, 0},
+		{"ERFKILL", Const, 0},
+		{"EROFS", Const, 0},
+		{"ERPCMISMATCH", Const, 0},
+		{"ERROR_ACCESS_DENIED", Const, 0},
+		{"ERROR_ALREADY_EXISTS", Const, 0},
+		{"ERROR_BROKEN_PIPE", Const, 0},
+		{"ERROR_BUFFER_OVERFLOW", Const, 0},
+		{"ERROR_DIR_NOT_EMPTY", Const, 8},
+		{"ERROR_ENVVAR_NOT_FOUND", Const, 0},
+		{"ERROR_FILE_EXISTS", Const, 0},
+		{"ERROR_FILE_NOT_FOUND", Const, 0},
+		{"ERROR_HANDLE_EOF", Const, 2},
+		{"ERROR_INSUFFICIENT_BUFFER", Const, 0},
+		{"ERROR_IO_PENDING", Const, 0},
+		{"ERROR_MOD_NOT_FOUND", Const, 0},
+		{"ERROR_MORE_DATA", Const, 3},
+		{"ERROR_NETNAME_DELETED", Const, 3},
+		{"ERROR_NOT_FOUND", Const, 1},
+		{"ERROR_NO_MORE_FILES", Const, 0},
+		{"ERROR_OPERATION_ABORTED", Const, 0},
+		{"ERROR_PATH_NOT_FOUND", Const, 0},
+		{"ERROR_PRIVILEGE_NOT_HELD", Const, 4},
+		{"ERROR_PROC_NOT_FOUND", Const, 0},
+		{"ESHLIBVERS", Const, 0},
+		{"ESHUTDOWN", Const, 0},
+		{"ESOCKTNOSUPPORT", Const, 0},
+		{"ESPIPE", Const, 0},
+		{"ESRCH", Const, 0},
+		{"ESRMNT", Const, 0},
+		{"ESTALE", Const, 0},
+		{"ESTRPIPE", Const, 0},
+		{"ETHERCAP_JUMBO_MTU", Const, 1},
+		{"ETHERCAP_VLAN_HWTAGGING", Const, 1},
+		{"ETHERCAP_VLAN_MTU", Const, 1},
+		{"ETHERMIN", Const, 1},
+		{"ETHERMTU", Const, 1},
+		{"ETHERMTU_JUMBO", Const, 1},
+		{"ETHERTYPE_8023", Const, 1},
+		{"ETHERTYPE_AARP", Const, 1},
+		{"ETHERTYPE_ACCTON", Const, 1},
+		{"ETHERTYPE_AEONIC", Const, 1},
+		{"ETHERTYPE_ALPHA", Const, 1},
+		{"ETHERTYPE_AMBER", Const, 1},
+		{"ETHERTYPE_AMOEBA", Const, 1},
+		{"ETHERTYPE_AOE", Const, 1},
+		{"ETHERTYPE_APOLLO", Const, 1},
+		{"ETHERTYPE_APOLLODOMAIN", Const, 1},
+		{"ETHERTYPE_APPLETALK", Const, 1},
+		{"ETHERTYPE_APPLITEK", Const, 1},
+		{"ETHERTYPE_ARGONAUT", Const, 1},
+		{"ETHERTYPE_ARP", Const, 1},
+		{"ETHERTYPE_AT", Const, 1},
+		{"ETHERTYPE_ATALK", Const, 1},
+		{"ETHERTYPE_ATOMIC", Const, 1},
+		{"ETHERTYPE_ATT", Const, 1},
+		{"ETHERTYPE_ATTSTANFORD", Const, 1},
+		{"ETHERTYPE_AUTOPHON", Const, 1},
+		{"ETHERTYPE_AXIS", Const, 1},
+		{"ETHERTYPE_BCLOOP", Const, 1},
+		{"ETHERTYPE_BOFL", Const, 1},
+		{"ETHERTYPE_CABLETRON", Const, 1},
+		{"ETHERTYPE_CHAOS", Const, 1},
+		{"ETHERTYPE_COMDESIGN", Const, 1},
+		{"ETHERTYPE_COMPUGRAPHIC", Const, 1},
+		{"ETHERTYPE_COUNTERPOINT", Const, 1},
+		{"ETHERTYPE_CRONUS", Const, 1},
+		{"ETHERTYPE_CRONUSVLN", Const, 1},
+		{"ETHERTYPE_DCA", Const, 1},
+		{"ETHERTYPE_DDE", Const, 1},
+		{"ETHERTYPE_DEBNI", Const, 1},
+		{"ETHERTYPE_DECAM", Const, 1},
+		{"ETHERTYPE_DECCUST", Const, 1},
+		{"ETHERTYPE_DECDIAG", Const, 1},
+		{"ETHERTYPE_DECDNS", Const, 1},
+		{"ETHERTYPE_DECDTS", Const, 1},
+		{"ETHERTYPE_DECEXPER", Const, 1},
+		{"ETHERTYPE_DECLAST", Const, 1},
+		{"ETHERTYPE_DECLTM", Const, 1},
+		{"ETHERTYPE_DECMUMPS", Const, 1},
+		{"ETHERTYPE_DECNETBIOS", Const, 1},
+		{"ETHERTYPE_DELTACON", Const, 1},
+		{"ETHERTYPE_DIDDLE", Const, 1},
+		{"ETHERTYPE_DLOG1", Const, 1},
+		{"ETHERTYPE_DLOG2", Const, 1},
+		{"ETHERTYPE_DN", Const, 1},
+		{"ETHERTYPE_DOGFIGHT", Const, 1},
+		{"ETHERTYPE_DSMD", Const, 1},
+		{"ETHERTYPE_ECMA", Const, 1},
+		{"ETHERTYPE_ENCRYPT", Const, 1},
+		{"ETHERTYPE_ES", Const, 1},
+		{"ETHERTYPE_EXCELAN", Const, 1},
+		{"ETHERTYPE_EXPERDATA", Const, 1},
+		{"ETHERTYPE_FLIP", Const, 1},
+		{"ETHERTYPE_FLOWCONTROL", Const, 1},
+		{"ETHERTYPE_FRARP", Const, 1},
+		{"ETHERTYPE_GENDYN", Const, 1},
+		{"ETHERTYPE_HAYES", Const, 1},
+		{"ETHERTYPE_HIPPI_FP", Const, 1},
+		{"ETHERTYPE_HITACHI", Const, 1},
+		{"ETHERTYPE_HP", Const, 1},
+		{"ETHERTYPE_IEEEPUP", Const, 1},
+		{"ETHERTYPE_IEEEPUPAT", Const, 1},
+		{"ETHERTYPE_IMLBL", Const, 1},
+		{"ETHERTYPE_IMLBLDIAG", Const, 1},
+		{"ETHERTYPE_IP", Const, 1},
+		{"ETHERTYPE_IPAS", Const, 1},
+		{"ETHERTYPE_IPV6", Const, 1},
+		{"ETHERTYPE_IPX", Const, 1},
+		{"ETHERTYPE_IPXNEW", Const, 1},
+		{"ETHERTYPE_KALPANA", Const, 1},
+		{"ETHERTYPE_LANBRIDGE", Const, 1},
+		{"ETHERTYPE_LANPROBE", Const, 1},
+		{"ETHERTYPE_LAT", Const, 1},
+		{"ETHERTYPE_LBACK", Const, 1},
+		{"ETHERTYPE_LITTLE", Const, 1},
+		{"ETHERTYPE_LLDP", Const, 1},
+		{"ETHERTYPE_LOGICRAFT", Const, 1},
+		{"ETHERTYPE_LOOPBACK", Const, 1},
+		{"ETHERTYPE_MATRA", Const, 1},
+		{"ETHERTYPE_MAX", Const, 1},
+		{"ETHERTYPE_MERIT", Const, 1},
+		{"ETHERTYPE_MICP", Const, 1},
+		{"ETHERTYPE_MOPDL", Const, 1},
+		{"ETHERTYPE_MOPRC", Const, 1},
+		{"ETHERTYPE_MOTOROLA", Const, 1},
+		{"ETHERTYPE_MPLS", Const, 1},
+		{"ETHERTYPE_MPLS_MCAST", Const, 1},
+		{"ETHERTYPE_MUMPS", Const, 1},
+		{"ETHERTYPE_NBPCC", Const, 1},
+		{"ETHERTYPE_NBPCLAIM", Const, 1},
+		{"ETHERTYPE_NBPCLREQ", Const, 1},
+		{"ETHERTYPE_NBPCLRSP", Const, 1},
+		{"ETHERTYPE_NBPCREQ", Const, 1},
+		{"ETHERTYPE_NBPCRSP", Const, 1},
+		{"ETHERTYPE_NBPDG", Const, 1},
+		{"ETHERTYPE_NBPDGB", Const, 1},
+		{"ETHERTYPE_NBPDLTE", Const, 1},
+		{"ETHERTYPE_NBPRAR", Const, 1},
+		{"ETHERTYPE_NBPRAS", Const, 1},
+		{"ETHERTYPE_NBPRST", Const, 1},
+		{"ETHERTYPE_NBPSCD", Const, 1},
+		{"ETHERTYPE_NBPVCD", Const, 1},
+		{"ETHERTYPE_NBS", Const, 1},
+		{"ETHERTYPE_NCD", Const, 1},
+		{"ETHERTYPE_NESTAR", Const, 1},
+		{"ETHERTYPE_NETBEUI", Const, 1},
+		{"ETHERTYPE_NOVELL", Const, 1},
+		{"ETHERTYPE_NS", Const, 1},
+		{"ETHERTYPE_NSAT", Const, 1},
+		{"ETHERTYPE_NSCOMPAT", Const, 1},
+		{"ETHERTYPE_NTRAILER", Const, 1},
+		{"ETHERTYPE_OS9", Const, 1},
+		{"ETHERTYPE_OS9NET", Const, 1},
+		{"ETHERTYPE_PACER", Const, 1},
+		{"ETHERTYPE_PAE", Const, 1},
+		{"ETHERTYPE_PCS", Const, 1},
+		{"ETHERTYPE_PLANNING", Const, 1},
+		{"ETHERTYPE_PPP", Const, 1},
+		{"ETHERTYPE_PPPOE", Const, 1},
+		{"ETHERTYPE_PPPOEDISC", Const, 1},
+		{"ETHERTYPE_PRIMENTS", Const, 1},
+		{"ETHERTYPE_PUP", Const, 1},
+		{"ETHERTYPE_PUPAT", Const, 1},
+		{"ETHERTYPE_QINQ", Const, 1},
+		{"ETHERTYPE_RACAL", Const, 1},
+		{"ETHERTYPE_RATIONAL", Const, 1},
+		{"ETHERTYPE_RAWFR", Const, 1},
+		{"ETHERTYPE_RCL", Const, 1},
+		{"ETHERTYPE_RDP", Const, 1},
+		{"ETHERTYPE_RETIX", Const, 1},
+		{"ETHERTYPE_REVARP", Const, 1},
+		{"ETHERTYPE_SCA", Const, 1},
+		{"ETHERTYPE_SECTRA", Const, 1},
+		{"ETHERTYPE_SECUREDATA", Const, 1},
+		{"ETHERTYPE_SGITW", Const, 1},
+		{"ETHERTYPE_SG_BOUNCE", Const, 1},
+		{"ETHERTYPE_SG_DIAG", Const, 1},
+		{"ETHERTYPE_SG_NETGAMES", Const, 1},
+		{"ETHERTYPE_SG_RESV", Const, 1},
+		{"ETHERTYPE_SIMNET", Const, 1},
+		{"ETHERTYPE_SLOW", Const, 1},
+		{"ETHERTYPE_SLOWPROTOCOLS", Const, 1},
+		{"ETHERTYPE_SNA", Const, 1},
+		{"ETHERTYPE_SNMP", Const, 1},
+		{"ETHERTYPE_SONIX", Const, 1},
+		{"ETHERTYPE_SPIDER", Const, 1},
+		{"ETHERTYPE_SPRITE", Const, 1},
+		{"ETHERTYPE_STP", Const, 1},
+		{"ETHERTYPE_TALARIS", Const, 1},
+		{"ETHERTYPE_TALARISMC", Const, 1},
+		{"ETHERTYPE_TCPCOMP", Const, 1},
+		{"ETHERTYPE_TCPSM", Const, 1},
+		{"ETHERTYPE_TEC", Const, 1},
+		{"ETHERTYPE_TIGAN", Const, 1},
+		{"ETHERTYPE_TRAIL", Const, 1},
+		{"ETHERTYPE_TRANSETHER", Const, 1},
+		{"ETHERTYPE_TYMSHARE", Const, 1},
+		{"ETHERTYPE_UBBST", Const, 1},
+		{"ETHERTYPE_UBDEBUG", Const, 1},
+		{"ETHERTYPE_UBDIAGLOOP", Const, 1},
+		{"ETHERTYPE_UBDL", Const, 1},
+		{"ETHERTYPE_UBNIU", Const, 1},
+		{"ETHERTYPE_UBNMC", Const, 1},
+		{"ETHERTYPE_VALID", Const, 1},
+		{"ETHERTYPE_VARIAN", Const, 1},
+		{"ETHERTYPE_VAXELN", Const, 1},
+		{"ETHERTYPE_VEECO", Const, 1},
+		{"ETHERTYPE_VEXP", Const, 1},
+		{"ETHERTYPE_VGLAB", Const, 1},
+		{"ETHERTYPE_VINES", Const, 1},
+		{"ETHERTYPE_VINESECHO", Const, 1},
+		{"ETHERTYPE_VINESLOOP", Const, 1},
+		{"ETHERTYPE_VITAL", Const, 1},
+		{"ETHERTYPE_VLAN", Const, 1},
+		{"ETHERTYPE_VLTLMAN", Const, 1},
+		{"ETHERTYPE_VPROD", Const, 1},
+		{"ETHERTYPE_VURESERVED", Const, 1},
+		{"ETHERTYPE_WATERLOO", Const, 1},
+		{"ETHERTYPE_WELLFLEET", Const, 1},
+		{"ETHERTYPE_X25", Const, 1},
+		{"ETHERTYPE_X75", Const, 1},
+		{"ETHERTYPE_XNSSM", Const, 1},
+		{"ETHERTYPE_XTP", Const, 1},
+		{"ETHER_ADDR_LEN", Const, 1},
+		{"ETHER_ALIGN", Const, 1},
+		{"ETHER_CRC_LEN", Const, 1},
+		{"ETHER_CRC_POLY_BE", Const, 1},
+		{"ETHER_CRC_POLY_LE", Const, 1},
+		{"ETHER_HDR_LEN", Const, 1},
+		{"ETHER_MAX_DIX_LEN", Const, 1},
+		{"ETHER_MAX_LEN", Const, 1},
+		{"ETHER_MAX_LEN_JUMBO", Const, 1},
+		{"ETHER_MIN_LEN", Const, 1},
+		{"ETHER_PPPOE_ENCAP_LEN", Const, 1},
+		{"ETHER_TYPE_LEN", Const, 1},
+		{"ETHER_VLAN_ENCAP_LEN", Const, 1},
+		{"ETH_P_1588", Const, 0},
+		{"ETH_P_8021Q", Const, 0},
+		{"ETH_P_802_2", Const, 0},
+		{"ETH_P_802_3", Const, 0},
+		{"ETH_P_AARP", Const, 0},
+		{"ETH_P_ALL", Const, 0},
+		{"ETH_P_AOE", Const, 0},
+		{"ETH_P_ARCNET", Const, 0},
+		{"ETH_P_ARP", Const, 0},
+		{"ETH_P_ATALK", Const, 0},
+		{"ETH_P_ATMFATE", Const, 0},
+		{"ETH_P_ATMMPOA", Const, 0},
+		{"ETH_P_AX25", Const, 0},
+		{"ETH_P_BPQ", Const, 0},
+		{"ETH_P_CAIF", Const, 0},
+		{"ETH_P_CAN", Const, 0},
+		{"ETH_P_CONTROL", Const, 0},
+		{"ETH_P_CUST", Const, 0},
+		{"ETH_P_DDCMP", Const, 0},
+		{"ETH_P_DEC", Const, 0},
+		{"ETH_P_DIAG", Const, 0},
+		{"ETH_P_DNA_DL", Const, 0},
+		{"ETH_P_DNA_RC", Const, 0},
+		{"ETH_P_DNA_RT", Const, 0},
+		{"ETH_P_DSA", Const, 0},
+		{"ETH_P_ECONET", Const, 0},
+		{"ETH_P_EDSA", Const, 0},
+		{"ETH_P_FCOE", Const, 0},
+		{"ETH_P_FIP", Const, 0},
+		{"ETH_P_HDLC", Const, 0},
+		{"ETH_P_IEEE802154", Const, 0},
+		{"ETH_P_IEEEPUP", Const, 0},
+		{"ETH_P_IEEEPUPAT", Const, 0},
+		{"ETH_P_IP", Const, 0},
+		{"ETH_P_IPV6", Const, 0},
+		{"ETH_P_IPX", Const, 0},
+		{"ETH_P_IRDA", Const, 0},
+		{"ETH_P_LAT", Const, 0},
+		{"ETH_P_LINK_CTL", Const, 0},
+		{"ETH_P_LOCALTALK", Const, 0},
+		{"ETH_P_LOOP", Const, 0},
+		{"ETH_P_MOBITEX", Const, 0},
+		{"ETH_P_MPLS_MC", Const, 0},
+		{"ETH_P_MPLS_UC", Const, 0},
+		{"ETH_P_PAE", Const, 0},
+		{"ETH_P_PAUSE", Const, 0},
+		{"ETH_P_PHONET", Const, 0},
+		{"ETH_P_PPPTALK", Const, 0},
+		{"ETH_P_PPP_DISC", Const, 0},
+		{"ETH_P_PPP_MP", Const, 0},
+		{"ETH_P_PPP_SES", Const, 0},
+		{"ETH_P_PUP", Const, 0},
+		{"ETH_P_PUPAT", Const, 0},
+		{"ETH_P_RARP", Const, 0},
+		{"ETH_P_SCA", Const, 0},
+		{"ETH_P_SLOW", Const, 0},
+		{"ETH_P_SNAP", Const, 0},
+		{"ETH_P_TEB", Const, 0},
+		{"ETH_P_TIPC", Const, 0},
+		{"ETH_P_TRAILER", Const, 0},
+		{"ETH_P_TR_802_2", Const, 0},
+		{"ETH_P_WAN_PPP", Const, 0},
+		{"ETH_P_WCCP", Const, 0},
+		{"ETH_P_X25", Const, 0},
+		{"ETIME", Const, 0},
+		{"ETIMEDOUT", Const, 0},
+		{"ETOOMANYREFS", Const, 0},
+		{"ETXTBSY", Const, 0},
+		{"EUCLEAN", Const, 0},
+		{"EUNATCH", Const, 0},
+		{"EUSERS", Const, 0},
+		{"EVFILT_AIO", Const, 0},
+		{"EVFILT_FS", Const, 0},
+		{"EVFILT_LIO", Const, 0},
+		{"EVFILT_MACHPORT", Const, 0},
+		{"EVFILT_PROC", Const, 0},
+		{"EVFILT_READ", Const, 0},
+		{"EVFILT_SIGNAL", Const, 0},
+		{"EVFILT_SYSCOUNT", Const, 0},
+		{"EVFILT_THREADMARKER", Const, 0},
+		{"EVFILT_TIMER", Const, 0},
+		{"EVFILT_USER", Const, 0},
+		{"EVFILT_VM", Const, 0},
+		{"EVFILT_VNODE", Const, 0},
+		{"EVFILT_WRITE", Const, 0},
+		{"EV_ADD", Const, 0},
+		{"EV_CLEAR", Const, 0},
+		{"EV_DELETE", Const, 0},
+		{"EV_DISABLE", Const, 0},
+		{"EV_DISPATCH", Const, 0},
+		{"EV_DROP", Const, 3},
+		{"EV_ENABLE", Const, 0},
+		{"EV_EOF", Const, 0},
+		{"EV_ERROR", Const, 0},
+		{"EV_FLAG0", Const, 0},
+		{"EV_FLAG1", Const, 0},
+		{"EV_ONESHOT", Const, 0},
+		{"EV_OOBAND", Const, 0},
+		{"EV_POLL", Const, 0},
+		{"EV_RECEIPT", Const, 0},
+		{"EV_SYSFLAGS", Const, 0},
+		{"EWINDOWS", Const, 0},
+		{"EWOULDBLOCK", Const, 0},
+		{"EXDEV", Const, 0},
+		{"EXFULL", Const, 0},
+		{"EXTA", Const, 0},
+		{"EXTB", Const, 0},
+		{"EXTPROC", Const, 0},
+		{"Environ", Func, 0},
+		{"EpollCreate", Func, 0},
+		{"EpollCreate1", Func, 0},
+		{"EpollCtl", Func, 0},
+		{"EpollEvent", Type, 0},
+		{"EpollEvent.Events", Field, 0},
+		{"EpollEvent.Fd", Field, 0},
+		{"EpollEvent.Pad", Field, 0},
+		{"EpollEvent.PadFd", Field, 0},
+		{"EpollWait", Func, 0},
+		{"Errno", Type, 0},
+		{"EscapeArg", Func, 0},
+		{"Exchangedata", Func, 0},
+		{"Exec", Func, 0},
+		{"Exit", Func, 0},
+		{"ExitProcess", Func, 0},
+		{"FD_CLOEXEC", Const, 0},
+		{"FD_SETSIZE", Const, 0},
+		{"FILE_ACTION_ADDED", Const, 0},
+		{"FILE_ACTION_MODIFIED", Const, 0},
+		{"FILE_ACTION_REMOVED", Const, 0},
+		{"FILE_ACTION_RENAMED_NEW_NAME", Const, 0},
+		{"FILE_ACTION_RENAMED_OLD_NAME", Const, 0},
+		{"FILE_APPEND_DATA", Const, 0},
+		{"FILE_ATTRIBUTE_ARCHIVE", Const, 0},
+		{"FILE_ATTRIBUTE_DIRECTORY", Const, 0},
+		{"FILE_ATTRIBUTE_HIDDEN", Const, 0},
+		{"FILE_ATTRIBUTE_NORMAL", Const, 0},
+		{"FILE_ATTRIBUTE_READONLY", Const, 0},
+		{"FILE_ATTRIBUTE_REPARSE_POINT", Const, 4},
+		{"FILE_ATTRIBUTE_SYSTEM", Const, 0},
+		{"FILE_BEGIN", Const, 0},
+		{"FILE_CURRENT", Const, 0},
+		{"FILE_END", Const, 0},
+		{"FILE_FLAG_BACKUP_SEMANTICS", Const, 0},
+		{"FILE_FLAG_OPEN_REPARSE_POINT", Const, 4},
+		{"FILE_FLAG_OVERLAPPED", Const, 0},
+		{"FILE_LIST_DIRECTORY", Const, 0},
+		{"FILE_MAP_COPY", Const, 0},
+		{"FILE_MAP_EXECUTE", Const, 0},
+		{"FILE_MAP_READ", Const, 0},
+		{"FILE_MAP_WRITE", Const, 0},
+		{"FILE_NOTIFY_CHANGE_ATTRIBUTES", Const, 0},
+		{"FILE_NOTIFY_CHANGE_CREATION", Const, 0},
+		{"FILE_NOTIFY_CHANGE_DIR_NAME", Const, 0},
+		{"FILE_NOTIFY_CHANGE_FILE_NAME", Const, 0},
+		{"FILE_NOTIFY_CHANGE_LAST_ACCESS", Const, 0},
+		{"FILE_NOTIFY_CHANGE_LAST_WRITE", Const, 0},
+		{"FILE_NOTIFY_CHANGE_SIZE", Const, 0},
+		{"FILE_SHARE_DELETE", Const, 0},
+		{"FILE_SHARE_READ", Const, 0},
+		{"FILE_SHARE_WRITE", Const, 0},
+		{"FILE_SKIP_COMPLETION_PORT_ON_SUCCESS", Const, 2},
+		{"FILE_SKIP_SET_EVENT_ON_HANDLE", Const, 2},
+		{"FILE_TYPE_CHAR", Const, 0},
+		{"FILE_TYPE_DISK", Const, 0},
+		{"FILE_TYPE_PIPE", Const, 0},
+		{"FILE_TYPE_REMOTE", Const, 0},
+		{"FILE_TYPE_UNKNOWN", Const, 0},
+		{"FILE_WRITE_ATTRIBUTES", Const, 0},
+		{"FLUSHO", Const, 0},
+		{"FORMAT_MESSAGE_ALLOCATE_BUFFER", Const, 0},
+		{"FORMAT_MESSAGE_ARGUMENT_ARRAY", Const, 0},
+		{"FORMAT_MESSAGE_FROM_HMODULE", Const, 0},
+		{"FORMAT_MESSAGE_FROM_STRING", Const, 0},
+		{"FORMAT_MESSAGE_FROM_SYSTEM", Const, 0},
+		{"FORMAT_MESSAGE_IGNORE_INSERTS", Const, 0},
+		{"FORMAT_MESSAGE_MAX_WIDTH_MASK", Const, 0},
+		{"FSCTL_GET_REPARSE_POINT", Const, 4},
+		{"F_ADDFILESIGS", Const, 0},
+		{"F_ADDSIGS", Const, 0},
+		{"F_ALLOCATEALL", Const, 0},
+		{"F_ALLOCATECONTIG", Const, 0},
+		{"F_CANCEL", Const, 0},
+		{"F_CHKCLEAN", Const, 0},
+		{"F_CLOSEM", Const, 1},
+		{"F_DUP2FD", Const, 0},
+		{"F_DUP2FD_CLOEXEC", Const, 1},
+		{"F_DUPFD", Const, 0},
+		{"F_DUPFD_CLOEXEC", Const, 0},
+		{"F_EXLCK", Const, 0},
+		{"F_FINDSIGS", Const, 16},
+		{"F_FLUSH_DATA", Const, 0},
+		{"F_FREEZE_FS", Const, 0},
+		{"F_FSCTL", Const, 1},
+		{"F_FSDIRMASK", Const, 1},
+		{"F_FSIN", Const, 1},
+		{"F_FSINOUT", Const, 1},
+		{"F_FSOUT", Const, 1},
+		{"F_FSPRIV", Const, 1},
+		{"F_FSVOID", Const, 1},
+		{"F_FULLFSYNC", Const, 0},
+		{"F_GETCODEDIR", Const, 16},
+		{"F_GETFD", Const, 0},
+		{"F_GETFL", Const, 0},
+		{"F_GETLEASE", Const, 0},
+		{"F_GETLK", Const, 0},
+		{"F_GETLK64", Const, 0},
+		{"F_GETLKPID", Const, 0},
+		{"F_GETNOSIGPIPE", Const, 0},
+		{"F_GETOWN", Const, 0},
+		{"F_GETOWN_EX", Const, 0},
+		{"F_GETPATH", Const, 0},
+		{"F_GETPATH_MTMINFO", Const, 0},
+		{"F_GETPIPE_SZ", Const, 0},
+		{"F_GETPROTECTIONCLASS", Const, 0},
+		{"F_GETPROTECTIONLEVEL", Const, 16},
+		{"F_GETSIG", Const, 0},
+		{"F_GLOBAL_NOCACHE", Const, 0},
+		{"F_LOCK", Const, 0},
+		{"F_LOG2PHYS", Const, 0},
+		{"F_LOG2PHYS_EXT", Const, 0},
+		{"F_MARKDEPENDENCY", Const, 0},
+		{"F_MAXFD", Const, 1},
+		{"F_NOCACHE", Const, 0},
+		{"F_NODIRECT", Const, 0},
+		{"F_NOTIFY", Const, 0},
+		{"F_OGETLK", Const, 0},
+		{"F_OK", Const, 0},
+		{"F_OSETLK", Const, 0},
+		{"F_OSETLKW", Const, 0},
+		{"F_PARAM_MASK", Const, 1},
+		{"F_PARAM_MAX", Const, 1},
+		{"F_PATHPKG_CHECK", Const, 0},
+		{"F_PEOFPOSMODE", Const, 0},
+		{"F_PREALLOCATE", Const, 0},
+		{"F_RDADVISE", Const, 0},
+		{"F_RDAHEAD", Const, 0},
+		{"F_RDLCK", Const, 0},
+		{"F_READAHEAD", Const, 0},
+		{"F_READBOOTSTRAP", Const, 0},
+		{"F_SETBACKINGSTORE", Const, 0},
+		{"F_SETFD", Const, 0},
+		{"F_SETFL", Const, 0},
+		{"F_SETLEASE", Const, 0},
+		{"F_SETLK", Const, 0},
+		{"F_SETLK64", Const, 0},
+		{"F_SETLKW", Const, 0},
+		{"F_SETLKW64", Const, 0},
+		{"F_SETLKWTIMEOUT", Const, 16},
+		{"F_SETLK_REMOTE", Const, 0},
+		{"F_SETNOSIGPIPE", Const, 0},
+		{"F_SETOWN", Const, 0},
+		{"F_SETOWN_EX", Const, 0},
+		{"F_SETPIPE_SZ", Const, 0},
+		{"F_SETPROTECTIONCLASS", Const, 0},
+		{"F_SETSIG", Const, 0},
+		{"F_SETSIZE", Const, 0},
+		{"F_SHLCK", Const, 0},
+		{"F_SINGLE_WRITER", Const, 16},
+		{"F_TEST", Const, 0},
+		{"F_THAW_FS", Const, 0},
+		{"F_TLOCK", Const, 0},
+		{"F_TRANSCODEKEY", Const, 16},
+		{"F_ULOCK", Const, 0},
+		{"F_UNLCK", Const, 0},
+		{"F_UNLCKSYS", Const, 0},
+		{"F_VOLPOSMODE", Const, 0},
+		{"F_WRITEBOOTSTRAP", Const, 0},
+		{"F_WRLCK", Const, 0},
+		{"Faccessat", Func, 0},
+		{"Fallocate", Func, 0},
+		{"Fbootstraptransfer_t", Type, 0},
+		{"Fbootstraptransfer_t.Buffer", Field, 0},
+		{"Fbootstraptransfer_t.Length", Field, 0},
+		{"Fbootstraptransfer_t.Offset", Field, 0},
+		{"Fchdir", Func, 0},
+		{"Fchflags", Func, 0},
+		{"Fchmod", Func, 0},
+		{"Fchmodat", Func, 0},
+		{"Fchown", Func, 0},
+		{"Fchownat", Func, 0},
+		{"FcntlFlock", Func, 3},
+		{"FdSet", Type, 0},
+		{"FdSet.Bits", Field, 0},
+		{"FdSet.X__fds_bits", Field, 0},
+		{"Fdatasync", Func, 0},
+		{"FileNotifyInformation", Type, 0},
+		{"FileNotifyInformation.Action", Field, 0},
+		{"FileNotifyInformation.FileName", Field, 0},
+		{"FileNotifyInformation.FileNameLength", Field, 0},
+		{"FileNotifyInformation.NextEntryOffset", Field, 0},
+		{"Filetime", Type, 0},
+		{"Filetime.HighDateTime", Field, 0},
+		{"Filetime.LowDateTime", Field, 0},
+		{"FindClose", Func, 0},
+		{"FindFirstFile", Func, 0},
+		{"FindNextFile", Func, 0},
+		{"Flock", Func, 0},
+		{"Flock_t", Type, 0},
+		{"Flock_t.Len", Field, 0},
+		{"Flock_t.Pad_cgo_0", Field, 0},
+		{"Flock_t.Pad_cgo_1", Field, 3},
+		{"Flock_t.Pid", Field, 0},
+		{"Flock_t.Start", Field, 0},
+		{"Flock_t.Sysid", Field, 0},
+		{"Flock_t.Type", Field, 0},
+		{"Flock_t.Whence", Field, 0},
+		{"FlushBpf", Func, 0},
+		{"FlushFileBuffers", Func, 0},
+		{"FlushViewOfFile", Func, 0},
+		{"ForkExec", Func, 0},
+		{"ForkLock", Var, 0},
+		{"FormatMessage", Func, 0},
+		{"Fpathconf", Func, 0},
+		{"FreeAddrInfoW", Func, 1},
+		{"FreeEnvironmentStrings", Func, 0},
+		{"FreeLibrary", Func, 0},
+		{"Fsid", Type, 0},
+		{"Fsid.Val", Field, 0},
+		{"Fsid.X__fsid_val", Field, 2},
+		{"Fsid.X__val", Field, 0},
+		{"Fstat", Func, 0},
+		{"Fstatat", Func, 12},
+		{"Fstatfs", Func, 0},
+		{"Fstore_t", Type, 0},
+		{"Fstore_t.Bytesalloc", Field, 0},
+		{"Fstore_t.Flags", Field, 0},
+		{"Fstore_t.Length", Field, 0},
+		{"Fstore_t.Offset", Field, 0},
+		{"Fstore_t.Posmode", Field, 0},
+		{"Fsync", Func, 0},
+		{"Ftruncate", Func, 0},
+		{"FullPath", Func, 4},
+		{"Futimes", Func, 0},
+		{"Futimesat", Func, 0},
+		{"GENERIC_ALL", Const, 0},
+		{"GENERIC_EXECUTE", Const, 0},
+		{"GENERIC_READ", Const, 0},
+		{"GENERIC_WRITE", Const, 0},
+		{"GUID", Type, 1},
+		{"GUID.Data1", Field, 1},
+		{"GUID.Data2", Field, 1},
+		{"GUID.Data3", Field, 1},
+		{"GUID.Data4", Field, 1},
+		{"GetAcceptExSockaddrs", Func, 0},
+		{"GetAdaptersInfo", Func, 0},
+		{"GetAddrInfoW", Func, 1},
+		{"GetCommandLine", Func, 0},
+		{"GetComputerName", Func, 0},
+		{"GetConsoleMode", Func, 1},
+		{"GetCurrentDirectory", Func, 0},
+		{"GetCurrentProcess", Func, 0},
+		{"GetEnvironmentStrings", Func, 0},
+		{"GetEnvironmentVariable", Func, 0},
+		{"GetExitCodeProcess", Func, 0},
+		{"GetFileAttributes", Func, 0},
+		{"GetFileAttributesEx", Func, 0},
+		{"GetFileExInfoStandard", Const, 0},
+		{"GetFileExMaxInfoLevel", Const, 0},
+		{"GetFileInformationByHandle", Func, 0},
+		{"GetFileType", Func, 0},
+		{"GetFullPathName", Func, 0},
+		{"GetHostByName", Func, 0},
+		{"GetIfEntry", Func, 0},
+		{"GetLastError", Func, 0},
+		{"GetLengthSid", Func, 0},
+		{"GetLongPathName", Func, 0},
+		{"GetProcAddress", Func, 0},
+		{"GetProcessTimes", Func, 0},
+		{"GetProtoByName", Func, 0},
+		{"GetQueuedCompletionStatus", Func, 0},
+		{"GetServByName", Func, 0},
+		{"GetShortPathName", Func, 0},
+		{"GetStartupInfo", Func, 0},
+		{"GetStdHandle", Func, 0},
+		{"GetSystemTimeAsFileTime", Func, 0},
+		{"GetTempPath", Func, 0},
+		{"GetTimeZoneInformation", Func, 0},
+		{"GetTokenInformation", Func, 0},
+		{"GetUserNameEx", Func, 0},
+		{"GetUserProfileDirectory", Func, 0},
+		{"GetVersion", Func, 0},
+		{"Getcwd", Func, 0},
+		{"Getdents", Func, 0},
+		{"Getdirentries", Func, 0},
+		{"Getdtablesize", Func, 0},
+		{"Getegid", Func, 0},
+		{"Getenv", Func, 0},
+		{"Geteuid", Func, 0},
+		{"Getfsstat", Func, 0},
+		{"Getgid", Func, 0},
+		{"Getgroups", Func, 0},
+		{"Getpagesize", Func, 0},
+		{"Getpeername", Func, 0},
+		{"Getpgid", Func, 0},
+		{"Getpgrp", Func, 0},
+		{"Getpid", Func, 0},
+		{"Getppid", Func, 0},
+		{"Getpriority", Func, 0},
+		{"Getrlimit", Func, 0},
+		{"Getrusage", Func, 0},
+		{"Getsid", Func, 0},
+		{"Getsockname", Func, 0},
+		{"Getsockopt", Func, 1},
+		{"GetsockoptByte", Func, 0},
+		{"GetsockoptICMPv6Filter", Func, 2},
+		{"GetsockoptIPMreq", Func, 0},
+		{"GetsockoptIPMreqn", Func, 0},
+		{"GetsockoptIPv6MTUInfo", Func, 2},
+		{"GetsockoptIPv6Mreq", Func, 0},
+		{"GetsockoptInet4Addr", Func, 0},
+		{"GetsockoptInt", Func, 0},
+		{"GetsockoptUcred", Func, 1},
+		{"Gettid", Func, 0},
+		{"Gettimeofday", Func, 0},
+		{"Getuid", Func, 0},
+		{"Getwd", Func, 0},
+		{"Getxattr", Func, 1},
+		{"HANDLE_FLAG_INHERIT", Const, 0},
+		{"HKEY_CLASSES_ROOT", Const, 0},
+		{"HKEY_CURRENT_CONFIG", Const, 0},
+		{"HKEY_CURRENT_USER", Const, 0},
+		{"HKEY_DYN_DATA", Const, 0},
+		{"HKEY_LOCAL_MACHINE", Const, 0},
+		{"HKEY_PERFORMANCE_DATA", Const, 0},
+		{"HKEY_USERS", Const, 0},
+		{"HUPCL", Const, 0},
+		{"Handle", Type, 0},
+		{"Hostent", Type, 0},
+		{"Hostent.AddrList", Field, 0},
+		{"Hostent.AddrType", Field, 0},
+		{"Hostent.Aliases", Field, 0},
+		{"Hostent.Length", Field, 0},
+		{"Hostent.Name", Field, 0},
+		{"ICANON", Const, 0},
+		{"ICMP6_FILTER", Const, 2},
+		{"ICMPV6_FILTER", Const, 2},
+		{"ICMPv6Filter", Type, 2},
+		{"ICMPv6Filter.Data", Field, 2},
+		{"ICMPv6Filter.Filt", Field, 2},
+		{"ICRNL", Const, 0},
+		{"IEXTEN", Const, 0},
+		{"IFAN_ARRIVAL", Const, 1},
+		{"IFAN_DEPARTURE", Const, 1},
+		{"IFA_ADDRESS", Const, 0},
+		{"IFA_ANYCAST", Const, 0},
+		{"IFA_BROADCAST", Const, 0},
+		{"IFA_CACHEINFO", Const, 0},
+		{"IFA_F_DADFAILED", Const, 0},
+		{"IFA_F_DEPRECATED", Const, 0},
+		{"IFA_F_HOMEADDRESS", Const, 0},
+		{"IFA_F_NODAD", Const, 0},
+		{"IFA_F_OPTIMISTIC", Const, 0},
+		{"IFA_F_PERMANENT", Const, 0},
+		{"IFA_F_SECONDARY", Const, 0},
+		{"IFA_F_TEMPORARY", Const, 0},
+		{"IFA_F_TENTATIVE", Const, 0},
+		{"IFA_LABEL", Const, 0},
+		{"IFA_LOCAL", Const, 0},
+		{"IFA_MAX", Const, 0},
+		{"IFA_MULTICAST", Const, 0},
+		{"IFA_ROUTE", Const, 1},
+		{"IFA_UNSPEC", Const, 0},
+		{"IFF_ALLMULTI", Const, 0},
+		{"IFF_ALTPHYS", Const, 0},
+		{"IFF_AUTOMEDIA", Const, 0},
+		{"IFF_BROADCAST", Const, 0},
+		{"IFF_CANTCHANGE", Const, 0},
+		{"IFF_CANTCONFIG", Const, 1},
+		{"IFF_DEBUG", Const, 0},
+		{"IFF_DRV_OACTIVE", Const, 0},
+		{"IFF_DRV_RUNNING", Const, 0},
+		{"IFF_DYING", Const, 0},
+		{"IFF_DYNAMIC", Const, 0},
+		{"IFF_LINK0", Const, 0},
+		{"IFF_LINK1", Const, 0},
+		{"IFF_LINK2", Const, 0},
+		{"IFF_LOOPBACK", Const, 0},
+		{"IFF_MASTER", Const, 0},
+		{"IFF_MONITOR", Const, 0},
+		{"IFF_MULTICAST", Const, 0},
+		{"IFF_NOARP", Const, 0},
+		{"IFF_NOTRAILERS", Const, 0},
+		{"IFF_NO_PI", Const, 0},
+		{"IFF_OACTIVE", Const, 0},
+		{"IFF_ONE_QUEUE", Const, 0},
+		{"IFF_POINTOPOINT", Const, 0},
+		{"IFF_POINTTOPOINT", Const, 0},
+		{"IFF_PORTSEL", Const, 0},
+		{"IFF_PPROMISC", Const, 0},
+		{"IFF_PROMISC", Const, 0},
+		{"IFF_RENAMING", Const, 0},
+		{"IFF_RUNNING", Const, 0},
+		{"IFF_SIMPLEX", Const, 0},
+		{"IFF_SLAVE", Const, 0},
+		{"IFF_SMART", Const, 0},
+		{"IFF_STATICARP", Const, 0},
+		{"IFF_TAP", Const, 0},
+		{"IFF_TUN", Const, 0},
+		{"IFF_TUN_EXCL", Const, 0},
+		{"IFF_UP", Const, 0},
+		{"IFF_VNET_HDR", Const, 0},
+		{"IFLA_ADDRESS", Const, 0},
+		{"IFLA_BROADCAST", Const, 0},
+		{"IFLA_COST", Const, 0},
+		{"IFLA_IFALIAS", Const, 0},
+		{"IFLA_IFNAME", Const, 0},
+		{"IFLA_LINK", Const, 0},
+		{"IFLA_LINKINFO", Const, 0},
+		{"IFLA_LINKMODE", Const, 0},
+		{"IFLA_MAP", Const, 0},
+		{"IFLA_MASTER", Const, 0},
+		{"IFLA_MAX", Const, 0},
+		{"IFLA_MTU", Const, 0},
+		{"IFLA_NET_NS_PID", Const, 0},
+		{"IFLA_OPERSTATE", Const, 0},
+		{"IFLA_PRIORITY", Const, 0},
+		{"IFLA_PROTINFO", Const, 0},
+		{"IFLA_QDISC", Const, 0},
+		{"IFLA_STATS", Const, 0},
+		{"IFLA_TXQLEN", Const, 0},
+		{"IFLA_UNSPEC", Const, 0},
+		{"IFLA_WEIGHT", Const, 0},
+		{"IFLA_WIRELESS", Const, 0},
+		{"IFNAMSIZ", Const, 0},
+		{"IFT_1822", Const, 0},
+		{"IFT_A12MPPSWITCH", Const, 0},
+		{"IFT_AAL2", Const, 0},
+		{"IFT_AAL5", Const, 0},
+		{"IFT_ADSL", Const, 0},
+		{"IFT_AFLANE8023", Const, 0},
+		{"IFT_AFLANE8025", Const, 0},
+		{"IFT_ARAP", Const, 0},
+		{"IFT_ARCNET", Const, 0},
+		{"IFT_ARCNETPLUS", Const, 0},
+		{"IFT_ASYNC", Const, 0},
+		{"IFT_ATM", Const, 0},
+		{"IFT_ATMDXI", Const, 0},
+		{"IFT_ATMFUNI", Const, 0},
+		{"IFT_ATMIMA", Const, 0},
+		{"IFT_ATMLOGICAL", Const, 0},
+		{"IFT_ATMRADIO", Const, 0},
+		{"IFT_ATMSUBINTERFACE", Const, 0},
+		{"IFT_ATMVCIENDPT", Const, 0},
+		{"IFT_ATMVIRTUAL", Const, 0},
+		{"IFT_BGPPOLICYACCOUNTING", Const, 0},
+		{"IFT_BLUETOOTH", Const, 1},
+		{"IFT_BRIDGE", Const, 0},
+		{"IFT_BSC", Const, 0},
+		{"IFT_CARP", Const, 0},
+		{"IFT_CCTEMUL", Const, 0},
+		{"IFT_CELLULAR", Const, 0},
+		{"IFT_CEPT", Const, 0},
+		{"IFT_CES", Const, 0},
+		{"IFT_CHANNEL", Const, 0},
+		{"IFT_CNR", Const, 0},
+		{"IFT_COFFEE", Const, 0},
+		{"IFT_COMPOSITELINK", Const, 0},
+		{"IFT_DCN", Const, 0},
+		{"IFT_DIGITALPOWERLINE", Const, 0},
+		{"IFT_DIGITALWRAPPEROVERHEADCHANNEL", Const, 0},
+		{"IFT_DLSW", Const, 0},
+		{"IFT_DOCSCABLEDOWNSTREAM", Const, 0},
+		{"IFT_DOCSCABLEMACLAYER", Const, 0},
+		{"IFT_DOCSCABLEUPSTREAM", Const, 0},
+		{"IFT_DOCSCABLEUPSTREAMCHANNEL", Const, 1},
+		{"IFT_DS0", Const, 0},
+		{"IFT_DS0BUNDLE", Const, 0},
+		{"IFT_DS1FDL", Const, 0},
+		{"IFT_DS3", Const, 0},
+		{"IFT_DTM", Const, 0},
+		{"IFT_DUMMY", Const, 1},
+		{"IFT_DVBASILN", Const, 0},
+		{"IFT_DVBASIOUT", Const, 0},
+		{"IFT_DVBRCCDOWNSTREAM", Const, 0},
+		{"IFT_DVBRCCMACLAYER", Const, 0},
+		{"IFT_DVBRCCUPSTREAM", Const, 0},
+		{"IFT_ECONET", Const, 1},
+		{"IFT_ENC", Const, 0},
+		{"IFT_EON", Const, 0},
+		{"IFT_EPLRS", Const, 0},
+		{"IFT_ESCON", Const, 0},
+		{"IFT_ETHER", Const, 0},
+		{"IFT_FAITH", Const, 0},
+		{"IFT_FAST", Const, 0},
+		{"IFT_FASTETHER", Const, 0},
+		{"IFT_FASTETHERFX", Const, 0},
+		{"IFT_FDDI", Const, 0},
+		{"IFT_FIBRECHANNEL", Const, 0},
+		{"IFT_FRAMERELAYINTERCONNECT", Const, 0},
+		{"IFT_FRAMERELAYMPI", Const, 0},
+		{"IFT_FRDLCIENDPT", Const, 0},
+		{"IFT_FRELAY", Const, 0},
+		{"IFT_FRELAYDCE", Const, 0},
+		{"IFT_FRF16MFRBUNDLE", Const, 0},
+		{"IFT_FRFORWARD", Const, 0},
+		{"IFT_G703AT2MB", Const, 0},
+		{"IFT_G703AT64K", Const, 0},
+		{"IFT_GIF", Const, 0},
+		{"IFT_GIGABITETHERNET", Const, 0},
+		{"IFT_GR303IDT", Const, 0},
+		{"IFT_GR303RDT", Const, 0},
+		{"IFT_H323GATEKEEPER", Const, 0},
+		{"IFT_H323PROXY", Const, 0},
+		{"IFT_HDH1822", Const, 0},
+		{"IFT_HDLC", Const, 0},
+		{"IFT_HDSL2", Const, 0},
+		{"IFT_HIPERLAN2", Const, 0},
+		{"IFT_HIPPI", Const, 0},
+		{"IFT_HIPPIINTERFACE", Const, 0},
+		{"IFT_HOSTPAD", Const, 0},
+		{"IFT_HSSI", Const, 0},
+		{"IFT_HY", Const, 0},
+		{"IFT_IBM370PARCHAN", Const, 0},
+		{"IFT_IDSL", Const, 0},
+		{"IFT_IEEE1394", Const, 0},
+		{"IFT_IEEE80211", Const, 0},
+		{"IFT_IEEE80212", Const, 0},
+		{"IFT_IEEE8023ADLAG", Const, 0},
+		{"IFT_IFGSN", Const, 0},
+		{"IFT_IMT", Const, 0},
+		{"IFT_INFINIBAND", Const, 1},
+		{"IFT_INTERLEAVE", Const, 0},
+		{"IFT_IP", Const, 0},
+		{"IFT_IPFORWARD", Const, 0},
+		{"IFT_IPOVERATM", Const, 0},
+		{"IFT_IPOVERCDLC", Const, 0},
+		{"IFT_IPOVERCLAW", Const, 0},
+		{"IFT_IPSWITCH", Const, 0},
+		{"IFT_IPXIP", Const, 0},
+		{"IFT_ISDN", Const, 0},
+		{"IFT_ISDNBASIC", Const, 0},
+		{"IFT_ISDNPRIMARY", Const, 0},
+		{"IFT_ISDNS", Const, 0},
+		{"IFT_ISDNU", Const, 0},
+		{"IFT_ISO88022LLC", Const, 0},
+		{"IFT_ISO88023", Const, 0},
+		{"IFT_ISO88024", Const, 0},
+		{"IFT_ISO88025", Const, 0},
+		{"IFT_ISO88025CRFPINT", Const, 0},
+		{"IFT_ISO88025DTR", Const, 0},
+		{"IFT_ISO88025FIBER", Const, 0},
+		{"IFT_ISO88026", Const, 0},
+		{"IFT_ISUP", Const, 0},
+		{"IFT_L2VLAN", Const, 0},
+		{"IFT_L3IPVLAN", Const, 0},
+		{"IFT_L3IPXVLAN", Const, 0},
+		{"IFT_LAPB", Const, 0},
+		{"IFT_LAPD", Const, 0},
+		{"IFT_LAPF", Const, 0},
+		{"IFT_LINEGROUP", Const, 1},
+		{"IFT_LOCALTALK", Const, 0},
+		{"IFT_LOOP", Const, 0},
+		{"IFT_MEDIAMAILOVERIP", Const, 0},
+		{"IFT_MFSIGLINK", Const, 0},
+		{"IFT_MIOX25", Const, 0},
+		{"IFT_MODEM", Const, 0},
+		{"IFT_MPC", Const, 0},
+		{"IFT_MPLS", Const, 0},
+		{"IFT_MPLSTUNNEL", Const, 0},
+		{"IFT_MSDSL", Const, 0},
+		{"IFT_MVL", Const, 0},
+		{"IFT_MYRINET", Const, 0},
+		{"IFT_NFAS", Const, 0},
+		{"IFT_NSIP", Const, 0},
+		{"IFT_OPTICALCHANNEL", Const, 0},
+		{"IFT_OPTICALTRANSPORT", Const, 0},
+		{"IFT_OTHER", Const, 0},
+		{"IFT_P10", Const, 0},
+		{"IFT_P80", Const, 0},
+		{"IFT_PARA", Const, 0},
+		{"IFT_PDP", Const, 0},
+		{"IFT_PFLOG", Const, 0},
+		{"IFT_PFLOW", Const, 1},
+		{"IFT_PFSYNC", Const, 0},
+		{"IFT_PLC", Const, 0},
+		{"IFT_PON155", Const, 1},
+		{"IFT_PON622", Const, 1},
+		{"IFT_POS", Const, 0},
+		{"IFT_PPP", Const, 0},
+		{"IFT_PPPMULTILINKBUNDLE", Const, 0},
+		{"IFT_PROPATM", Const, 1},
+		{"IFT_PROPBWAP2MP", Const, 0},
+		{"IFT_PROPCNLS", Const, 0},
+		{"IFT_PROPDOCSWIRELESSDOWNSTREAM", Const, 0},
+		{"IFT_PROPDOCSWIRELESSMACLAYER", Const, 0},
+		{"IFT_PROPDOCSWIRELESSUPSTREAM", Const, 0},
+		{"IFT_PROPMUX", Const, 0},
+		{"IFT_PROPVIRTUAL", Const, 0},
+		{"IFT_PROPWIRELESSP2P", Const, 0},
+		{"IFT_PTPSERIAL", Const, 0},
+		{"IFT_PVC", Const, 0},
+		{"IFT_Q2931", Const, 1},
+		{"IFT_QLLC", Const, 0},
+		{"IFT_RADIOMAC", Const, 0},
+		{"IFT_RADSL", Const, 0},
+		{"IFT_REACHDSL", Const, 0},
+		{"IFT_RFC1483", Const, 0},
+		{"IFT_RS232", Const, 0},
+		{"IFT_RSRB", Const, 0},
+		{"IFT_SDLC", Const, 0},
+		{"IFT_SDSL", Const, 0},
+		{"IFT_SHDSL", Const, 0},
+		{"IFT_SIP", Const, 0},
+		{"IFT_SIPSIG", Const, 1},
+		{"IFT_SIPTG", Const, 1},
+		{"IFT_SLIP", Const, 0},
+		{"IFT_SMDSDXI", Const, 0},
+		{"IFT_SMDSICIP", Const, 0},
+		{"IFT_SONET", Const, 0},
+		{"IFT_SONETOVERHEADCHANNEL", Const, 0},
+		{"IFT_SONETPATH", Const, 0},
+		{"IFT_SONETVT", Const, 0},
+		{"IFT_SRP", Const, 0},
+		{"IFT_SS7SIGLINK", Const, 0},
+		{"IFT_STACKTOSTACK", Const, 0},
+		{"IFT_STARLAN", Const, 0},
+		{"IFT_STF", Const, 0},
+		{"IFT_T1", Const, 0},
+		{"IFT_TDLC", Const, 0},
+		{"IFT_TELINK", Const, 1},
+		{"IFT_TERMPAD", Const, 0},
+		{"IFT_TR008", Const, 0},
+		{"IFT_TRANSPHDLC", Const, 0},
+		{"IFT_TUNNEL", Const, 0},
+		{"IFT_ULTRA", Const, 0},
+		{"IFT_USB", Const, 0},
+		{"IFT_V11", Const, 0},
+		{"IFT_V35", Const, 0},
+		{"IFT_V36", Const, 0},
+		{"IFT_V37", Const, 0},
+		{"IFT_VDSL", Const, 0},
+		{"IFT_VIRTUALIPADDRESS", Const, 0},
+		{"IFT_VIRTUALTG", Const, 1},
+		{"IFT_VOICEDID", Const, 1},
+		{"IFT_VOICEEM", Const, 0},
+		{"IFT_VOICEEMFGD", Const, 1},
+		{"IFT_VOICEENCAP", Const, 0},
+		{"IFT_VOICEFGDEANA", Const, 1},
+		{"IFT_VOICEFXO", Const, 0},
+		{"IFT_VOICEFXS", Const, 0},
+		{"IFT_VOICEOVERATM", Const, 0},
+		{"IFT_VOICEOVERCABLE", Const, 1},
+		{"IFT_VOICEOVERFRAMERELAY", Const, 0},
+		{"IFT_VOICEOVERIP", Const, 0},
+		{"IFT_X213", Const, 0},
+		{"IFT_X25", Const, 0},
+		{"IFT_X25DDN", Const, 0},
+		{"IFT_X25HUNTGROUP", Const, 0},
+		{"IFT_X25MLP", Const, 0},
+		{"IFT_X25PLE", Const, 0},
+		{"IFT_XETHER", Const, 0},
+		{"IGNBRK", Const, 0},
+		{"IGNCR", Const, 0},
+		{"IGNORE", Const, 0},
+		{"IGNPAR", Const, 0},
+		{"IMAXBEL", Const, 0},
+		{"INFINITE", Const, 0},
+		{"INLCR", Const, 0},
+		{"INPCK", Const, 0},
+		{"INVALID_FILE_ATTRIBUTES", Const, 0},
+		{"IN_ACCESS", Const, 0},
+		{"IN_ALL_EVENTS", Const, 0},
+		{"IN_ATTRIB", Const, 0},
+		{"IN_CLASSA_HOST", Const, 0},
+		{"IN_CLASSA_MAX", Const, 0},
+		{"IN_CLASSA_NET", Const, 0},
+		{"IN_CLASSA_NSHIFT", Const, 0},
+		{"IN_CLASSB_HOST", Const, 0},
+		{"IN_CLASSB_MAX", Const, 0},
+		{"IN_CLASSB_NET", Const, 0},
+		{"IN_CLASSB_NSHIFT", Const, 0},
+		{"IN_CLASSC_HOST", Const, 0},
+		{"IN_CLASSC_NET", Const, 0},
+		{"IN_CLASSC_NSHIFT", Const, 0},
+		{"IN_CLASSD_HOST", Const, 0},
+		{"IN_CLASSD_NET", Const, 0},
+		{"IN_CLASSD_NSHIFT", Const, 0},
+		{"IN_CLOEXEC", Const, 0},
+		{"IN_CLOSE", Const, 0},
+		{"IN_CLOSE_NOWRITE", Const, 0},
+		{"IN_CLOSE_WRITE", Const, 0},
+		{"IN_CREATE", Const, 0},
+		{"IN_DELETE", Const, 0},
+		{"IN_DELETE_SELF", Const, 0},
+		{"IN_DONT_FOLLOW", Const, 0},
+		{"IN_EXCL_UNLINK", Const, 0},
+		{"IN_IGNORED", Const, 0},
+		{"IN_ISDIR", Const, 0},
+		{"IN_LINKLOCALNETNUM", Const, 0},
+		{"IN_LOOPBACKNET", Const, 0},
+		{"IN_MASK_ADD", Const, 0},
+		{"IN_MODIFY", Const, 0},
+		{"IN_MOVE", Const, 0},
+		{"IN_MOVED_FROM", Const, 0},
+		{"IN_MOVED_TO", Const, 0},
+		{"IN_MOVE_SELF", Const, 0},
+		{"IN_NONBLOCK", Const, 0},
+		{"IN_ONESHOT", Const, 0},
+		{"IN_ONLYDIR", Const, 0},
+		{"IN_OPEN", Const, 0},
+		{"IN_Q_OVERFLOW", Const, 0},
+		{"IN_RFC3021_HOST", Const, 1},
+		{"IN_RFC3021_MASK", Const, 1},
+		{"IN_RFC3021_NET", Const, 1},
+		{"IN_RFC3021_NSHIFT", Const, 1},
+		{"IN_UNMOUNT", Const, 0},
+		{"IOC_IN", Const, 1},
+		{"IOC_INOUT", Const, 1},
+		{"IOC_OUT", Const, 1},
+		{"IOC_VENDOR", Const, 3},
+		{"IOC_WS2", Const, 1},
+		{"IO_REPARSE_TAG_SYMLINK", Const, 4},
+		{"IPMreq", Type, 0},
+		{"IPMreq.Interface", Field, 0},
+		{"IPMreq.Multiaddr", Field, 0},
+		{"IPMreqn", Type, 0},
+		{"IPMreqn.Address", Field, 0},
+		{"IPMreqn.Ifindex", Field, 0},
+		{"IPMreqn.Multiaddr", Field, 0},
+		{"IPPROTO_3PC", Const, 0},
+		{"IPPROTO_ADFS", Const, 0},
+		{"IPPROTO_AH", Const, 0},
+		{"IPPROTO_AHIP", Const, 0},
+		{"IPPROTO_APES", Const, 0},
+		{"IPPROTO_ARGUS", Const, 0},
+		{"IPPROTO_AX25", Const, 0},
+		{"IPPROTO_BHA", Const, 0},
+		{"IPPROTO_BLT", Const, 0},
+		{"IPPROTO_BRSATMON", Const, 0},
+		{"IPPROTO_CARP", Const, 0},
+		{"IPPROTO_CFTP", Const, 0},
+		{"IPPROTO_CHAOS", Const, 0},
+		{"IPPROTO_CMTP", Const, 0},
+		{"IPPROTO_COMP", Const, 0},
+		{"IPPROTO_CPHB", Const, 0},
+		{"IPPROTO_CPNX", Const, 0},
+		{"IPPROTO_DCCP", Const, 0},
+		{"IPPROTO_DDP", Const, 0},
+		{"IPPROTO_DGP", Const, 0},
+		{"IPPROTO_DIVERT", Const, 0},
+		{"IPPROTO_DIVERT_INIT", Const, 3},
+		{"IPPROTO_DIVERT_RESP", Const, 3},
+		{"IPPROTO_DONE", Const, 0},
+		{"IPPROTO_DSTOPTS", Const, 0},
+		{"IPPROTO_EGP", Const, 0},
+		{"IPPROTO_EMCON", Const, 0},
+		{"IPPROTO_ENCAP", Const, 0},
+		{"IPPROTO_EON", Const, 0},
+		{"IPPROTO_ESP", Const, 0},
+		{"IPPROTO_ETHERIP", Const, 0},
+		{"IPPROTO_FRAGMENT", Const, 0},
+		{"IPPROTO_GGP", Const, 0},
+		{"IPPROTO_GMTP", Const, 0},
+		{"IPPROTO_GRE", Const, 0},
+		{"IPPROTO_HELLO", Const, 0},
+		{"IPPROTO_HMP", Const, 0},
+		{"IPPROTO_HOPOPTS", Const, 0},
+		{"IPPROTO_ICMP", Const, 0},
+		{"IPPROTO_ICMPV6", Const, 0},
+		{"IPPROTO_IDP", Const, 0},
+		{"IPPROTO_IDPR", Const, 0},
+		{"IPPROTO_IDRP", Const, 0},
+		{"IPPROTO_IGMP", Const, 0},
+		{"IPPROTO_IGP", Const, 0},
+		{"IPPROTO_IGRP", Const, 0},
+		{"IPPROTO_IL", Const, 0},
+		{"IPPROTO_INLSP", Const, 0},
+		{"IPPROTO_INP", Const, 0},
+		{"IPPROTO_IP", Const, 0},
+		{"IPPROTO_IPCOMP", Const, 0},
+		{"IPPROTO_IPCV", Const, 0},
+		{"IPPROTO_IPEIP", Const, 0},
+		{"IPPROTO_IPIP", Const, 0},
+		{"IPPROTO_IPPC", Const, 0},
+		{"IPPROTO_IPV4", Const, 0},
+		{"IPPROTO_IPV6", Const, 0},
+		{"IPPROTO_IPV6_ICMP", Const, 1},
+		{"IPPROTO_IRTP", Const, 0},
+		{"IPPROTO_KRYPTOLAN", Const, 0},
+		{"IPPROTO_LARP", Const, 0},
+		{"IPPROTO_LEAF1", Const, 0},
+		{"IPPROTO_LEAF2", Const, 0},
+		{"IPPROTO_MAX", Const, 0},
+		{"IPPROTO_MAXID", Const, 0},
+		{"IPPROTO_MEAS", Const, 0},
+		{"IPPROTO_MH", Const, 1},
+		{"IPPROTO_MHRP", Const, 0},
+		{"IPPROTO_MICP", Const, 0},
+		{"IPPROTO_MOBILE", Const, 0},
+		{"IPPROTO_MPLS", Const, 1},
+		{"IPPROTO_MTP", Const, 0},
+		{"IPPROTO_MUX", Const, 0},
+		{"IPPROTO_ND", Const, 0},
+		{"IPPROTO_NHRP", Const, 0},
+		{"IPPROTO_NONE", Const, 0},
+		{"IPPROTO_NSP", Const, 0},
+		{"IPPROTO_NVPII", Const, 0},
+		{"IPPROTO_OLD_DIVERT", Const, 0},
+		{"IPPROTO_OSPFIGP", Const, 0},
+		{"IPPROTO_PFSYNC", Const, 0},
+		{"IPPROTO_PGM", Const, 0},
+		{"IPPROTO_PIGP", Const, 0},
+		{"IPPROTO_PIM", Const, 0},
+		{"IPPROTO_PRM", Const, 0},
+		{"IPPROTO_PUP", Const, 0},
+		{"IPPROTO_PVP", Const, 0},
+		{"IPPROTO_RAW", Const, 0},
+		{"IPPROTO_RCCMON", Const, 0},
+		{"IPPROTO_RDP", Const, 0},
+		{"IPPROTO_ROUTING", Const, 0},
+		{"IPPROTO_RSVP", Const, 0},
+		{"IPPROTO_RVD", Const, 0},
+		{"IPPROTO_SATEXPAK", Const, 0},
+		{"IPPROTO_SATMON", Const, 0},
+		{"IPPROTO_SCCSP", Const, 0},
+		{"IPPROTO_SCTP", Const, 0},
+		{"IPPROTO_SDRP", Const, 0},
+		{"IPPROTO_SEND", Const, 1},
+		{"IPPROTO_SEP", Const, 0},
+		{"IPPROTO_SKIP", Const, 0},
+		{"IPPROTO_SPACER", Const, 0},
+		{"IPPROTO_SRPC", Const, 0},
+		{"IPPROTO_ST", Const, 0},
+		{"IPPROTO_SVMTP", Const, 0},
+		{"IPPROTO_SWIPE", Const, 0},
+		{"IPPROTO_TCF", Const, 0},
+		{"IPPROTO_TCP", Const, 0},
+		{"IPPROTO_TLSP", Const, 0},
+		{"IPPROTO_TP", Const, 0},
+		{"IPPROTO_TPXX", Const, 0},
+		{"IPPROTO_TRUNK1", Const, 0},
+		{"IPPROTO_TRUNK2", Const, 0},
+		{"IPPROTO_TTP", Const, 0},
+		{"IPPROTO_UDP", Const, 0},
+		{"IPPROTO_UDPLITE", Const, 0},
+		{"IPPROTO_VINES", Const, 0},
+		{"IPPROTO_VISA", Const, 0},
+		{"IPPROTO_VMTP", Const, 0},
+		{"IPPROTO_VRRP", Const, 1},
+		{"IPPROTO_WBEXPAK", Const, 0},
+		{"IPPROTO_WBMON", Const, 0},
+		{"IPPROTO_WSN", Const, 0},
+		{"IPPROTO_XNET", Const, 0},
+		{"IPPROTO_XTP", Const, 0},
+		{"IPV6_2292DSTOPTS", Const, 0},
+		{"IPV6_2292HOPLIMIT", Const, 0},
+		{"IPV6_2292HOPOPTS", Const, 0},
+		{"IPV6_2292NEXTHOP", Const, 0},
+		{"IPV6_2292PKTINFO", Const, 0},
+		{"IPV6_2292PKTOPTIONS", Const, 0},
+		{"IPV6_2292RTHDR", Const, 0},
+		{"IPV6_ADDRFORM", Const, 0},
+		{"IPV6_ADD_MEMBERSHIP", Const, 0},
+		{"IPV6_AUTHHDR", Const, 0},
+		{"IPV6_AUTH_LEVEL", Const, 1},
+		{"IPV6_AUTOFLOWLABEL", Const, 0},
+		{"IPV6_BINDANY", Const, 0},
+		{"IPV6_BINDV6ONLY", Const, 0},
+		{"IPV6_BOUND_IF", Const, 0},
+		{"IPV6_CHECKSUM", Const, 0},
+		{"IPV6_DEFAULT_MULTICAST_HOPS", Const, 0},
+		{"IPV6_DEFAULT_MULTICAST_LOOP", Const, 0},
+		{"IPV6_DEFHLIM", Const, 0},
+		{"IPV6_DONTFRAG", Const, 0},
+		{"IPV6_DROP_MEMBERSHIP", Const, 0},
+		{"IPV6_DSTOPTS", Const, 0},
+		{"IPV6_ESP_NETWORK_LEVEL", Const, 1},
+		{"IPV6_ESP_TRANS_LEVEL", Const, 1},
+		{"IPV6_FAITH", Const, 0},
+		{"IPV6_FLOWINFO_MASK", Const, 0},
+		{"IPV6_FLOWLABEL_MASK", Const, 0},
+		{"IPV6_FRAGTTL", Const, 0},
+		{"IPV6_FW_ADD", Const, 0},
+		{"IPV6_FW_DEL", Const, 0},
+		{"IPV6_FW_FLUSH", Const, 0},
+		{"IPV6_FW_GET", Const, 0},
+		{"IPV6_FW_ZERO", Const, 0},
+		{"IPV6_HLIMDEC", Const, 0},
+		{"IPV6_HOPLIMIT", Const, 0},
+		{"IPV6_HOPOPTS", Const, 0},
+		{"IPV6_IPCOMP_LEVEL", Const, 1},
+		{"IPV6_IPSEC_POLICY", Const, 0},
+		{"IPV6_JOIN_ANYCAST", Const, 0},
+		{"IPV6_JOIN_GROUP", Const, 0},
+		{"IPV6_LEAVE_ANYCAST", Const, 0},
+		{"IPV6_LEAVE_GROUP", Const, 0},
+		{"IPV6_MAXHLIM", Const, 0},
+		{"IPV6_MAXOPTHDR", Const, 0},
+		{"IPV6_MAXPACKET", Const, 0},
+		{"IPV6_MAX_GROUP_SRC_FILTER", Const, 0},
+		{"IPV6_MAX_MEMBERSHIPS", Const, 0},
+		{"IPV6_MAX_SOCK_SRC_FILTER", Const, 0},
+		{"IPV6_MIN_MEMBERSHIPS", Const, 0},
+		{"IPV6_MMTU", Const, 0},
+		{"IPV6_MSFILTER", Const, 0},
+		{"IPV6_MTU", Const, 0},
+		{"IPV6_MTU_DISCOVER", Const, 0},
+		{"IPV6_MULTICAST_HOPS", Const, 0},
+		{"IPV6_MULTICAST_IF", Const, 0},
+		{"IPV6_MULTICAST_LOOP", Const, 0},
+		{"IPV6_NEXTHOP", Const, 0},
+		{"IPV6_OPTIONS", Const, 1},
+		{"IPV6_PATHMTU", Const, 0},
+		{"IPV6_PIPEX", Const, 1},
+		{"IPV6_PKTINFO", Const, 0},
+		{"IPV6_PMTUDISC_DO", Const, 0},
+		{"IPV6_PMTUDISC_DONT", Const, 0},
+		{"IPV6_PMTUDISC_PROBE", Const, 0},
+		{"IPV6_PMTUDISC_WANT", Const, 0},
+		{"IPV6_PORTRANGE", Const, 0},
+		{"IPV6_PORTRANGE_DEFAULT", Const, 0},
+		{"IPV6_PORTRANGE_HIGH", Const, 0},
+		{"IPV6_PORTRANGE_LOW", Const, 0},
+		{"IPV6_PREFER_TEMPADDR", Const, 0},
+		{"IPV6_RECVDSTOPTS", Const, 0},
+		{"IPV6_RECVDSTPORT", Const, 3},
+		{"IPV6_RECVERR", Const, 0},
+		{"IPV6_RECVHOPLIMIT", Const, 0},
+		{"IPV6_RECVHOPOPTS", Const, 0},
+		{"IPV6_RECVPATHMTU", Const, 0},
+		{"IPV6_RECVPKTINFO", Const, 0},
+		{"IPV6_RECVRTHDR", Const, 0},
+		{"IPV6_RECVTCLASS", Const, 0},
+		{"IPV6_ROUTER_ALERT", Const, 0},
+		{"IPV6_RTABLE", Const, 1},
+		{"IPV6_RTHDR", Const, 0},
+		{"IPV6_RTHDRDSTOPTS", Const, 0},
+		{"IPV6_RTHDR_LOOSE", Const, 0},
+		{"IPV6_RTHDR_STRICT", Const, 0},
+		{"IPV6_RTHDR_TYPE_0", Const, 0},
+		{"IPV6_RXDSTOPTS", Const, 0},
+		{"IPV6_RXHOPOPTS", Const, 0},
+		{"IPV6_SOCKOPT_RESERVED1", Const, 0},
+		{"IPV6_TCLASS", Const, 0},
+		{"IPV6_UNICAST_HOPS", Const, 0},
+		{"IPV6_USE_MIN_MTU", Const, 0},
+		{"IPV6_V6ONLY", Const, 0},
+		{"IPV6_VERSION", Const, 0},
+		{"IPV6_VERSION_MASK", Const, 0},
+		{"IPV6_XFRM_POLICY", Const, 0},
+		{"IP_ADD_MEMBERSHIP", Const, 0},
+		{"IP_ADD_SOURCE_MEMBERSHIP", Const, 0},
+		{"IP_AUTH_LEVEL", Const, 1},
+		{"IP_BINDANY", Const, 0},
+		{"IP_BLOCK_SOURCE", Const, 0},
+		{"IP_BOUND_IF", Const, 0},
+		{"IP_DEFAULT_MULTICAST_LOOP", Const, 0},
+		{"IP_DEFAULT_MULTICAST_TTL", Const, 0},
+		{"IP_DF", Const, 0},
+		{"IP_DIVERTFL", Const, 3},
+		{"IP_DONTFRAG", Const, 0},
+		{"IP_DROP_MEMBERSHIP", Const, 0},
+		{"IP_DROP_SOURCE_MEMBERSHIP", Const, 0},
+		{"IP_DUMMYNET3", Const, 0},
+		{"IP_DUMMYNET_CONFIGURE", Const, 0},
+		{"IP_DUMMYNET_DEL", Const, 0},
+		{"IP_DUMMYNET_FLUSH", Const, 0},
+		{"IP_DUMMYNET_GET", Const, 0},
+		{"IP_EF", Const, 1},
+		{"IP_ERRORMTU", Const, 1},
+		{"IP_ESP_NETWORK_LEVEL", Const, 1},
+		{"IP_ESP_TRANS_LEVEL", Const, 1},
+		{"IP_FAITH", Const, 0},
+		{"IP_FREEBIND", Const, 0},
+		{"IP_FW3", Const, 0},
+		{"IP_FW_ADD", Const, 0},
+		{"IP_FW_DEL", Const, 0},
+		{"IP_FW_FLUSH", Const, 0},
+		{"IP_FW_GET", Const, 0},
+		{"IP_FW_NAT_CFG", Const, 0},
+		{"IP_FW_NAT_DEL", Const, 0},
+		{"IP_FW_NAT_GET_CONFIG", Const, 0},
+		{"IP_FW_NAT_GET_LOG", Const, 0},
+		{"IP_FW_RESETLOG", Const, 0},
+		{"IP_FW_TABLE_ADD", Const, 0},
+		{"IP_FW_TABLE_DEL", Const, 0},
+		{"IP_FW_TABLE_FLUSH", Const, 0},
+		{"IP_FW_TABLE_GETSIZE", Const, 0},
+		{"IP_FW_TABLE_LIST", Const, 0},
+		{"IP_FW_ZERO", Const, 0},
+		{"IP_HDRINCL", Const, 0},
+		{"IP_IPCOMP_LEVEL", Const, 1},
+		{"IP_IPSECFLOWINFO", Const, 1},
+		{"IP_IPSEC_LOCAL_AUTH", Const, 1},
+		{"IP_IPSEC_LOCAL_CRED", Const, 1},
+		{"IP_IPSEC_LOCAL_ID", Const, 1},
+		{"IP_IPSEC_POLICY", Const, 0},
+		{"IP_IPSEC_REMOTE_AUTH", Const, 1},
+		{"IP_IPSEC_REMOTE_CRED", Const, 1},
+		{"IP_IPSEC_REMOTE_ID", Const, 1},
+		{"IP_MAXPACKET", Const, 0},
+		{"IP_MAX_GROUP_SRC_FILTER", Const, 0},
+		{"IP_MAX_MEMBERSHIPS", Const, 0},
+		{"IP_MAX_SOCK_MUTE_FILTER", Const, 0},
+		{"IP_MAX_SOCK_SRC_FILTER", Const, 0},
+		{"IP_MAX_SOURCE_FILTER", Const, 0},
+		{"IP_MF", Const, 0},
+		{"IP_MINFRAGSIZE", Const, 1},
+		{"IP_MINTTL", Const, 0},
+		{"IP_MIN_MEMBERSHIPS", Const, 0},
+		{"IP_MSFILTER", Const, 0},
+		{"IP_MSS", Const, 0},
+		{"IP_MTU", Const, 0},
+		{"IP_MTU_DISCOVER", Const, 0},
+		{"IP_MULTICAST_IF", Const, 0},
+		{"IP_MULTICAST_IFINDEX", Const, 0},
+		{"IP_MULTICAST_LOOP", Const, 0},
+		{"IP_MULTICAST_TTL", Const, 0},
+		{"IP_MULTICAST_VIF", Const, 0},
+		{"IP_NAT__XXX", Const, 0},
+		{"IP_OFFMASK", Const, 0},
+		{"IP_OLD_FW_ADD", Const, 0},
+		{"IP_OLD_FW_DEL", Const, 0},
+		{"IP_OLD_FW_FLUSH", Const, 0},
+		{"IP_OLD_FW_GET", Const, 0},
+		{"IP_OLD_FW_RESETLOG", Const, 0},
+		{"IP_OLD_FW_ZERO", Const, 0},
+		{"IP_ONESBCAST", Const, 0},
+		{"IP_OPTIONS", Const, 0},
+		{"IP_ORIGDSTADDR", Const, 0},
+		{"IP_PASSSEC", Const, 0},
+		{"IP_PIPEX", Const, 1},
+		{"IP_PKTINFO", Const, 0},
+		{"IP_PKTOPTIONS", Const, 0},
+		{"IP_PMTUDISC", Const, 0},
+		{"IP_PMTUDISC_DO", Const, 0},
+		{"IP_PMTUDISC_DONT", Const, 0},
+		{"IP_PMTUDISC_PROBE", Const, 0},
+		{"IP_PMTUDISC_WANT", Const, 0},
+		{"IP_PORTRANGE", Const, 0},
+		{"IP_PORTRANGE_DEFAULT", Const, 0},
+		{"IP_PORTRANGE_HIGH", Const, 0},
+		{"IP_PORTRANGE_LOW", Const, 0},
+		{"IP_RECVDSTADDR", Const, 0},
+		{"IP_RECVDSTPORT", Const, 1},
+		{"IP_RECVERR", Const, 0},
+		{"IP_RECVIF", Const, 0},
+		{"IP_RECVOPTS", Const, 0},
+		{"IP_RECVORIGDSTADDR", Const, 0},
+		{"IP_RECVPKTINFO", Const, 0},
+		{"IP_RECVRETOPTS", Const, 0},
+		{"IP_RECVRTABLE", Const, 1},
+		{"IP_RECVTOS", Const, 0},
+		{"IP_RECVTTL", Const, 0},
+		{"IP_RETOPTS", Const, 0},
+		{"IP_RF", Const, 0},
+		{"IP_ROUTER_ALERT", Const, 0},
+		{"IP_RSVP_OFF", Const, 0},
+		{"IP_RSVP_ON", Const, 0},
+		{"IP_RSVP_VIF_OFF", Const, 0},
+		{"IP_RSVP_VIF_ON", Const, 0},
+		{"IP_RTABLE", Const, 1},
+		{"IP_SENDSRCADDR", Const, 0},
+		{"IP_STRIPHDR", Const, 0},
+		{"IP_TOS", Const, 0},
+		{"IP_TRAFFIC_MGT_BACKGROUND", Const, 0},
+		{"IP_TRANSPARENT", Const, 0},
+		{"IP_TTL", Const, 0},
+		{"IP_UNBLOCK_SOURCE", Const, 0},
+		{"IP_XFRM_POLICY", Const, 0},
+		{"IPv6MTUInfo", Type, 2},
+		{"IPv6MTUInfo.Addr", Field, 2},
+		{"IPv6MTUInfo.Mtu", Field, 2},
+		{"IPv6Mreq", Type, 0},
+		{"IPv6Mreq.Interface", Field, 0},
+		{"IPv6Mreq.Multiaddr", Field, 0},
+		{"ISIG", Const, 0},
+		{"ISTRIP", Const, 0},
+		{"IUCLC", Const, 0},
+		{"IUTF8", Const, 0},
+		{"IXANY", Const, 0},
+		{"IXOFF", Const, 0},
+		{"IXON", Const, 0},
+		{"IfAddrmsg", Type, 0},
+		{"IfAddrmsg.Family", Field, 0},
+		{"IfAddrmsg.Flags", Field, 0},
+		{"IfAddrmsg.Index", Field, 0},
+		{"IfAddrmsg.Prefixlen", Field, 0},
+		{"IfAddrmsg.Scope", Field, 0},
+		{"IfAnnounceMsghdr", Type, 1},
+		{"IfAnnounceMsghdr.Hdrlen", Field, 2},
+		{"IfAnnounceMsghdr.Index", Field, 1},
+		{"IfAnnounceMsghdr.Msglen", Field, 1},
+		{"IfAnnounceMsghdr.Name", Field, 1},
+		{"IfAnnounceMsghdr.Type", Field, 1},
+		{"IfAnnounceMsghdr.Version", Field, 1},
+		{"IfAnnounceMsghdr.What", Field, 1},
+		{"IfData", Type, 0},
+		{"IfData.Addrlen", Field, 0},
+		{"IfData.Baudrate", Field, 0},
+		{"IfData.Capabilities", Field, 2},
+		{"IfData.Collisions", Field, 0},
+		{"IfData.Datalen", Field, 0},
+		{"IfData.Epoch", Field, 0},
+		{"IfData.Hdrlen", Field, 0},
+		{"IfData.Hwassist", Field, 0},
+		{"IfData.Ibytes", Field, 0},
+		{"IfData.Ierrors", Field, 0},
+		{"IfData.Imcasts", Field, 0},
+		{"IfData.Ipackets", Field, 0},
+		{"IfData.Iqdrops", Field, 0},
+		{"IfData.Lastchange", Field, 0},
+		{"IfData.Link_state", Field, 0},
+		{"IfData.Mclpool", Field, 2},
+		{"IfData.Metric", Field, 0},
+		{"IfData.Mtu", Field, 0},
+		{"IfData.Noproto", Field, 0},
+		{"IfData.Obytes", Field, 0},
+		{"IfData.Oerrors", Field, 0},
+		{"IfData.Omcasts", Field, 0},
+		{"IfData.Opackets", Field, 0},
+		{"IfData.Pad", Field, 2},
+		{"IfData.Pad_cgo_0", Field, 2},
+		{"IfData.Pad_cgo_1", Field, 2},
+		{"IfData.Physical", Field, 0},
+		{"IfData.Recvquota", Field, 0},
+		{"IfData.Recvtiming", Field, 0},
+		{"IfData.Reserved1", Field, 0},
+		{"IfData.Reserved2", Field, 0},
+		{"IfData.Spare_char1", Field, 0},
+		{"IfData.Spare_char2", Field, 0},
+		{"IfData.Type", Field, 0},
+		{"IfData.Typelen", Field, 0},
+		{"IfData.Unused1", Field, 0},
+		{"IfData.Unused2", Field, 0},
+		{"IfData.Xmitquota", Field, 0},
+		{"IfData.Xmittiming", Field, 0},
+		{"IfInfomsg", Type, 0},
+		{"IfInfomsg.Change", Field, 0},
+		{"IfInfomsg.Family", Field, 0},
+		{"IfInfomsg.Flags", Field, 0},
+		{"IfInfomsg.Index", Field, 0},
+		{"IfInfomsg.Type", Field, 0},
+		{"IfInfomsg.X__ifi_pad", Field, 0},
+		{"IfMsghdr", Type, 0},
+		{"IfMsghdr.Addrs", Field, 0},
+		{"IfMsghdr.Data", Field, 0},
+		{"IfMsghdr.Flags", Field, 0},
+		{"IfMsghdr.Hdrlen", Field, 2},
+		{"IfMsghdr.Index", Field, 0},
+		{"IfMsghdr.Msglen", Field, 0},
+		{"IfMsghdr.Pad1", Field, 2},
+		{"IfMsghdr.Pad2", Field, 2},
+		{"IfMsghdr.Pad_cgo_0", Field, 0},
+		{"IfMsghdr.Pad_cgo_1", Field, 2},
+		{"IfMsghdr.Tableid", Field, 2},
+		{"IfMsghdr.Type", Field, 0},
+		{"IfMsghdr.Version", Field, 0},
+		{"IfMsghdr.Xflags", Field, 2},
+		{"IfaMsghdr", Type, 0},
+		{"IfaMsghdr.Addrs", Field, 0},
+		{"IfaMsghdr.Flags", Field, 0},
+		{"IfaMsghdr.Hdrlen", Field, 2},
+		{"IfaMsghdr.Index", Field, 0},
+		{"IfaMsghdr.Metric", Field, 0},
+		{"IfaMsghdr.Msglen", Field, 0},
+		{"IfaMsghdr.Pad1", Field, 2},
+		{"IfaMsghdr.Pad2", Field, 2},
+		{"IfaMsghdr.Pad_cgo_0", Field, 0},
+		{"IfaMsghdr.Tableid", Field, 2},
+		{"IfaMsghdr.Type", Field, 0},
+		{"IfaMsghdr.Version", Field, 0},
+		{"IfmaMsghdr", Type, 0},
+		{"IfmaMsghdr.Addrs", Field, 0},
+		{"IfmaMsghdr.Flags", Field, 0},
+		{"IfmaMsghdr.Index", Field, 0},
+		{"IfmaMsghdr.Msglen", Field, 0},
+		{"IfmaMsghdr.Pad_cgo_0", Field, 0},
+		{"IfmaMsghdr.Type", Field, 0},
+		{"IfmaMsghdr.Version", Field, 0},
+		{"IfmaMsghdr2", Type, 0},
+		{"IfmaMsghdr2.Addrs", Field, 0},
+		{"IfmaMsghdr2.Flags", Field, 0},
+		{"IfmaMsghdr2.Index", Field, 0},
+		{"IfmaMsghdr2.Msglen", Field, 0},
+		{"IfmaMsghdr2.Pad_cgo_0", Field, 0},
+		{"IfmaMsghdr2.Refcount", Field, 0},
+		{"IfmaMsghdr2.Type", Field, 0},
+		{"IfmaMsghdr2.Version", Field, 0},
+		{"ImplementsGetwd", Const, 0},
+		{"Inet4Pktinfo", Type, 0},
+		{"Inet4Pktinfo.Addr", Field, 0},
+		{"Inet4Pktinfo.Ifindex", Field, 0},
+		{"Inet4Pktinfo.Spec_dst", Field, 0},
+		{"Inet6Pktinfo", Type, 0},
+		{"Inet6Pktinfo.Addr", Field, 0},
+		{"Inet6Pktinfo.Ifindex", Field, 0},
+		{"InotifyAddWatch", Func, 0},
+		{"InotifyEvent", Type, 0},
+		{"InotifyEvent.Cookie", Field, 0},
+		{"InotifyEvent.Len", Field, 0},
+		{"InotifyEvent.Mask", Field, 0},
+		{"InotifyEvent.Name", Field, 0},
+		{"InotifyEvent.Wd", Field, 0},
+		{"InotifyInit", Func, 0},
+		{"InotifyInit1", Func, 0},
+		{"InotifyRmWatch", Func, 0},
+		{"InterfaceAddrMessage", Type, 0},
+		{"InterfaceAddrMessage.Data", Field, 0},
+		{"InterfaceAddrMessage.Header", Field, 0},
+		{"InterfaceAnnounceMessage", Type, 1},
+		{"InterfaceAnnounceMessage.Header", Field, 1},
+		{"InterfaceInfo", Type, 0},
+		{"InterfaceInfo.Address", Field, 0},
+		{"InterfaceInfo.BroadcastAddress", Field, 0},
+		{"InterfaceInfo.Flags", Field, 0},
+		{"InterfaceInfo.Netmask", Field, 0},
+		{"InterfaceMessage", Type, 0},
+		{"InterfaceMessage.Data", Field, 0},
+		{"InterfaceMessage.Header", Field, 0},
+		{"InterfaceMulticastAddrMessage", Type, 0},
+		{"InterfaceMulticastAddrMessage.Data", Field, 0},
+		{"InterfaceMulticastAddrMessage.Header", Field, 0},
+		{"InvalidHandle", Const, 0},
+		{"Ioperm", Func, 0},
+		{"Iopl", Func, 0},
+		{"Iovec", Type, 0},
+		{"Iovec.Base", Field, 0},
+		{"Iovec.Len", Field, 0},
+		{"IpAdapterInfo", Type, 0},
+		{"IpAdapterInfo.AdapterName", Field, 0},
+		{"IpAdapterInfo.Address", Field, 0},
+		{"IpAdapterInfo.AddressLength", Field, 0},
+		{"IpAdapterInfo.ComboIndex", Field, 0},
+		{"IpAdapterInfo.CurrentIpAddress", Field, 0},
+		{"IpAdapterInfo.Description", Field, 0},
+		{"IpAdapterInfo.DhcpEnabled", Field, 0},
+		{"IpAdapterInfo.DhcpServer", Field, 0},
+		{"IpAdapterInfo.GatewayList", Field, 0},
+		{"IpAdapterInfo.HaveWins", Field, 0},
+		{"IpAdapterInfo.Index", Field, 0},
+		{"IpAdapterInfo.IpAddressList", Field, 0},
+		{"IpAdapterInfo.LeaseExpires", Field, 0},
+		{"IpAdapterInfo.LeaseObtained", Field, 0},
+		{"IpAdapterInfo.Next", Field, 0},
+		{"IpAdapterInfo.PrimaryWinsServer", Field, 0},
+		{"IpAdapterInfo.SecondaryWinsServer", Field, 0},
+		{"IpAdapterInfo.Type", Field, 0},
+		{"IpAddrString", Type, 0},
+		{"IpAddrString.Context", Field, 0},
+		{"IpAddrString.IpAddress", Field, 0},
+		{"IpAddrString.IpMask", Field, 0},
+		{"IpAddrString.Next", Field, 0},
+		{"IpAddressString", Type, 0},
+		{"IpAddressString.String", Field, 0},
+		{"IpMaskString", Type, 0},
+		{"IpMaskString.String", Field, 2},
+		{"Issetugid", Func, 0},
+		{"KEY_ALL_ACCESS", Const, 0},
+		{"KEY_CREATE_LINK", Const, 0},
+		{"KEY_CREATE_SUB_KEY", Const, 0},
+		{"KEY_ENUMERATE_SUB_KEYS", Const, 0},
+		{"KEY_EXECUTE", Const, 0},
+		{"KEY_NOTIFY", Const, 0},
+		{"KEY_QUERY_VALUE", Const, 0},
+		{"KEY_READ", Const, 0},
+		{"KEY_SET_VALUE", Const, 0},
+		{"KEY_WOW64_32KEY", Const, 0},
+		{"KEY_WOW64_64KEY", Const, 0},
+		{"KEY_WRITE", Const, 0},
+		{"Kevent", Func, 0},
+		{"Kevent_t", Type, 0},
+		{"Kevent_t.Data", Field, 0},
+		{"Kevent_t.Fflags", Field, 0},
+		{"Kevent_t.Filter", Field, 0},
+		{"Kevent_t.Flags", Field, 0},
+		{"Kevent_t.Ident", Field, 0},
+		{"Kevent_t.Pad_cgo_0", Field, 2},
+		{"Kevent_t.Udata", Field, 0},
+		{"Kill", Func, 0},
+		{"Klogctl", Func, 0},
+		{"Kqueue", Func, 0},
+		{"LANG_ENGLISH", Const, 0},
+		{"LAYERED_PROTOCOL", Const, 2},
+		{"LCNT_OVERLOAD_FLUSH", Const, 1},
+		{"LINUX_REBOOT_CMD_CAD_OFF", Const, 0},
+		{"LINUX_REBOOT_CMD_CAD_ON", Const, 0},
+		{"LINUX_REBOOT_CMD_HALT", Const, 0},
+		{"LINUX_REBOOT_CMD_KEXEC", Const, 0},
+		{"LINUX_REBOOT_CMD_POWER_OFF", Const, 0},
+		{"LINUX_REBOOT_CMD_RESTART", Const, 0},
+		{"LINUX_REBOOT_CMD_RESTART2", Const, 0},
+		{"LINUX_REBOOT_CMD_SW_SUSPEND", Const, 0},
+		{"LINUX_REBOOT_MAGIC1", Const, 0},
+		{"LINUX_REBOOT_MAGIC2", Const, 0},
+		{"LOCK_EX", Const, 0},
+		{"LOCK_NB", Const, 0},
+		{"LOCK_SH", Const, 0},
+		{"LOCK_UN", Const, 0},
+		{"LazyDLL", Type, 0},
+		{"LazyDLL.Name", Field, 0},
+		{"LazyProc", Type, 0},
+		{"LazyProc.Name", Field, 0},
+		{"Lchown", Func, 0},
+		{"Linger", Type, 0},
+		{"Linger.Linger", Field, 0},
+		{"Linger.Onoff", Field, 0},
+		{"Link", Func, 0},
+		{"Listen", Func, 0},
+		{"Listxattr", Func, 1},
+		{"LoadCancelIoEx", Func, 1},
+		{"LoadConnectEx", Func, 1},
+		{"LoadCreateSymbolicLink", Func, 4},
+		{"LoadDLL", Func, 0},
+		{"LoadGetAddrInfo", Func, 1},
+		{"LoadLibrary", Func, 0},
+		{"LoadSetFileCompletionNotificationModes", Func, 2},
+		{"LocalFree", Func, 0},
+		{"Log2phys_t", Type, 0},
+		{"Log2phys_t.Contigbytes", Field, 0},
+		{"Log2phys_t.Devoffset", Field, 0},
+		{"Log2phys_t.Flags", Field, 0},
+		{"LookupAccountName", Func, 0},
+		{"LookupAccountSid", Func, 0},
+		{"LookupSID", Func, 0},
+		{"LsfJump", Func, 0},
+		{"LsfSocket", Func, 0},
+		{"LsfStmt", Func, 0},
+		{"Lstat", Func, 0},
+		{"MADV_AUTOSYNC", Const, 1},
+		{"MADV_CAN_REUSE", Const, 0},
+		{"MADV_CORE", Const, 1},
+		{"MADV_DOFORK", Const, 0},
+		{"MADV_DONTFORK", Const, 0},
+		{"MADV_DONTNEED", Const, 0},
+		{"MADV_FREE", Const, 0},
+		{"MADV_FREE_REUSABLE", Const, 0},
+		{"MADV_FREE_REUSE", Const, 0},
+		{"MADV_HUGEPAGE", Const, 0},
+		{"MADV_HWPOISON", Const, 0},
+		{"MADV_MERGEABLE", Const, 0},
+		{"MADV_NOCORE", Const, 1},
+		{"MADV_NOHUGEPAGE", Const, 0},
+		{"MADV_NORMAL", Const, 0},
+		{"MADV_NOSYNC", Const, 1},
+		{"MADV_PROTECT", Const, 1},
+		{"MADV_RANDOM", Const, 0},
+		{"MADV_REMOVE", Const, 0},
+		{"MADV_SEQUENTIAL", Const, 0},
+		{"MADV_SPACEAVAIL", Const, 3},
+		{"MADV_UNMERGEABLE", Const, 0},
+		{"MADV_WILLNEED", Const, 0},
+		{"MADV_ZERO_WIRED_PAGES", Const, 0},
+		{"MAP_32BIT", Const, 0},
+		{"MAP_ALIGNED_SUPER", Const, 3},
+		{"MAP_ALIGNMENT_16MB", Const, 3},
+		{"MAP_ALIGNMENT_1TB", Const, 3},
+		{"MAP_ALIGNMENT_256TB", Const, 3},
+		{"MAP_ALIGNMENT_4GB", Const, 3},
+		{"MAP_ALIGNMENT_64KB", Const, 3},
+		{"MAP_ALIGNMENT_64PB", Const, 3},
+		{"MAP_ALIGNMENT_MASK", Const, 3},
+		{"MAP_ALIGNMENT_SHIFT", Const, 3},
+		{"MAP_ANON", Const, 0},
+		{"MAP_ANONYMOUS", Const, 0},
+		{"MAP_COPY", Const, 0},
+		{"MAP_DENYWRITE", Const, 0},
+		{"MAP_EXECUTABLE", Const, 0},
+		{"MAP_FILE", Const, 0},
+		{"MAP_FIXED", Const, 0},
+		{"MAP_FLAGMASK", Const, 3},
+		{"MAP_GROWSDOWN", Const, 0},
+		{"MAP_HASSEMAPHORE", Const, 0},
+		{"MAP_HUGETLB", Const, 0},
+		{"MAP_INHERIT", Const, 3},
+		{"MAP_INHERIT_COPY", Const, 3},
+		{"MAP_INHERIT_DEFAULT", Const, 3},
+		{"MAP_INHERIT_DONATE_COPY", Const, 3},
+		{"MAP_INHERIT_NONE", Const, 3},
+		{"MAP_INHERIT_SHARE", Const, 3},
+		{"MAP_JIT", Const, 0},
+		{"MAP_LOCKED", Const, 0},
+		{"MAP_NOCACHE", Const, 0},
+		{"MAP_NOCORE", Const, 1},
+		{"MAP_NOEXTEND", Const, 0},
+		{"MAP_NONBLOCK", Const, 0},
+		{"MAP_NORESERVE", Const, 0},
+		{"MAP_NOSYNC", Const, 1},
+		{"MAP_POPULATE", Const, 0},
+		{"MAP_PREFAULT_READ", Const, 1},
+		{"MAP_PRIVATE", Const, 0},
+		{"MAP_RENAME", Const, 0},
+		{"MAP_RESERVED0080", Const, 0},
+		{"MAP_RESERVED0100", Const, 1},
+		{"MAP_SHARED", Const, 0},
+		{"MAP_STACK", Const, 0},
+		{"MAP_TRYFIXED", Const, 3},
+		{"MAP_TYPE", Const, 0},
+		{"MAP_WIRED", Const, 3},
+		{"MAXIMUM_REPARSE_DATA_BUFFER_SIZE", Const, 4},
+		{"MAXLEN_IFDESCR", Const, 0},
+		{"MAXLEN_PHYSADDR", Const, 0},
+		{"MAX_ADAPTER_ADDRESS_LENGTH", Const, 0},
+		{"MAX_ADAPTER_DESCRIPTION_LENGTH", Const, 0},
+		{"MAX_ADAPTER_NAME_LENGTH", Const, 0},
+		{"MAX_COMPUTERNAME_LENGTH", Const, 0},
+		{"MAX_INTERFACE_NAME_LEN", Const, 0},
+		{"MAX_LONG_PATH", Const, 0},
+		{"MAX_PATH", Const, 0},
+		{"MAX_PROTOCOL_CHAIN", Const, 2},
+		{"MCL_CURRENT", Const, 0},
+		{"MCL_FUTURE", Const, 0},
+		{"MNT_DETACH", Const, 0},
+		{"MNT_EXPIRE", Const, 0},
+		{"MNT_FORCE", Const, 0},
+		{"MSG_BCAST", Const, 1},
+		{"MSG_CMSG_CLOEXEC", Const, 0},
+		{"MSG_COMPAT", Const, 0},
+		{"MSG_CONFIRM", Const, 0},
+		{"MSG_CONTROLMBUF", Const, 1},
+		{"MSG_CTRUNC", Const, 0},
+		{"MSG_DONTROUTE", Const, 0},
+		{"MSG_DONTWAIT", Const, 0},
+		{"MSG_EOF", Const, 0},
+		{"MSG_EOR", Const, 0},
+		{"MSG_ERRQUEUE", Const, 0},
+		{"MSG_FASTOPEN", Const, 1},
+		{"MSG_FIN", Const, 0},
+		{"MSG_FLUSH", Const, 0},
+		{"MSG_HAVEMORE", Const, 0},
+		{"MSG_HOLD", Const, 0},
+		{"MSG_IOVUSRSPACE", Const, 1},
+		{"MSG_LENUSRSPACE", Const, 1},
+		{"MSG_MCAST", Const, 1},
+		{"MSG_MORE", Const, 0},
+		{"MSG_NAMEMBUF", Const, 1},
+		{"MSG_NBIO", Const, 0},
+		{"MSG_NEEDSA", Const, 0},
+		{"MSG_NOSIGNAL", Const, 0},
+		{"MSG_NOTIFICATION", Const, 0},
+		{"MSG_OOB", Const, 0},
+		{"MSG_PEEK", Const, 0},
+		{"MSG_PROXY", Const, 0},
+		{"MSG_RCVMORE", Const, 0},
+		{"MSG_RST", Const, 0},
+		{"MSG_SEND", Const, 0},
+		{"MSG_SYN", Const, 0},
+		{"MSG_TRUNC", Const, 0},
+		{"MSG_TRYHARD", Const, 0},
+		{"MSG_USERFLAGS", Const, 1},
+		{"MSG_WAITALL", Const, 0},
+		{"MSG_WAITFORONE", Const, 0},
+		{"MSG_WAITSTREAM", Const, 0},
+		{"MS_ACTIVE", Const, 0},
+		{"MS_ASYNC", Const, 0},
+		{"MS_BIND", Const, 0},
+		{"MS_DEACTIVATE", Const, 0},
+		{"MS_DIRSYNC", Const, 0},
+		{"MS_INVALIDATE", Const, 0},
+		{"MS_I_VERSION", Const, 0},
+		{"MS_KERNMOUNT", Const, 0},
+		{"MS_KILLPAGES", Const, 0},
+		{"MS_MANDLOCK", Const, 0},
+		{"MS_MGC_MSK", Const, 0},
+		{"MS_MGC_VAL", Const, 0},
+		{"MS_MOVE", Const, 0},
+		{"MS_NOATIME", Const, 0},
+		{"MS_NODEV", Const, 0},
+		{"MS_NODIRATIME", Const, 0},
+		{"MS_NOEXEC", Const, 0},
+		{"MS_NOSUID", Const, 0},
+		{"MS_NOUSER", Const, 0},
+		{"MS_POSIXACL", Const, 0},
+		{"MS_PRIVATE", Const, 0},
+		{"MS_RDONLY", Const, 0},
+		{"MS_REC", Const, 0},
+		{"MS_RELATIME", Const, 0},
+		{"MS_REMOUNT", Const, 0},
+		{"MS_RMT_MASK", Const, 0},
+		{"MS_SHARED", Const, 0},
+		{"MS_SILENT", Const, 0},
+		{"MS_SLAVE", Const, 0},
+		{"MS_STRICTATIME", Const, 0},
+		{"MS_SYNC", Const, 0},
+		{"MS_SYNCHRONOUS", Const, 0},
+		{"MS_UNBINDABLE", Const, 0},
+		{"Madvise", Func, 0},
+		{"MapViewOfFile", Func, 0},
+		{"MaxTokenInfoClass", Const, 0},
+		{"Mclpool", Type, 2},
+		{"Mclpool.Alive", Field, 2},
+		{"Mclpool.Cwm", Field, 2},
+		{"Mclpool.Grown", Field, 2},
+		{"Mclpool.Hwm", Field, 2},
+		{"Mclpool.Lwm", Field, 2},
+		{"MibIfRow", Type, 0},
+		{"MibIfRow.AdminStatus", Field, 0},
+		{"MibIfRow.Descr", Field, 0},
+		{"MibIfRow.DescrLen", Field, 0},
+		{"MibIfRow.InDiscards", Field, 0},
+		{"MibIfRow.InErrors", Field, 0},
+		{"MibIfRow.InNUcastPkts", Field, 0},
+		{"MibIfRow.InOctets", Field, 0},
+		{"MibIfRow.InUcastPkts", Field, 0},
+		{"MibIfRow.InUnknownProtos", Field, 0},
+		{"MibIfRow.Index", Field, 0},
+		{"MibIfRow.LastChange", Field, 0},
+		{"MibIfRow.Mtu", Field, 0},
+		{"MibIfRow.Name", Field, 0},
+		{"MibIfRow.OperStatus", Field, 0},
+		{"MibIfRow.OutDiscards", Field, 0},
+		{"MibIfRow.OutErrors", Field, 0},
+		{"MibIfRow.OutNUcastPkts", Field, 0},
+		{"MibIfRow.OutOctets", Field, 0},
+		{"MibIfRow.OutQLen", Field, 0},
+		{"MibIfRow.OutUcastPkts", Field, 0},
+		{"MibIfRow.PhysAddr", Field, 0},
+		{"MibIfRow.PhysAddrLen", Field, 0},
+		{"MibIfRow.Speed", Field, 0},
+		{"MibIfRow.Type", Field, 0},
+		{"Mkdir", Func, 0},
+		{"Mkdirat", Func, 0},
+		{"Mkfifo", Func, 0},
+		{"Mknod", Func, 0},
+		{"Mknodat", Func, 0},
+		{"Mlock", Func, 0},
+		{"Mlockall", Func, 0},
+		{"Mmap", Func, 0},
+		{"Mount", Func, 0},
+		{"MoveFile", Func, 0},
+		{"Mprotect", Func, 0},
+		{"Msghdr", Type, 0},
+		{"Msghdr.Control", Field, 0},
+		{"Msghdr.Controllen", Field, 0},
+		{"Msghdr.Flags", Field, 0},
+		{"Msghdr.Iov", Field, 0},
+		{"Msghdr.Iovlen", Field, 0},
+		{"Msghdr.Name", Field, 0},
+		{"Msghdr.Namelen", Field, 0},
+		{"Msghdr.Pad_cgo_0", Field, 0},
+		{"Msghdr.Pad_cgo_1", Field, 0},
+		{"Munlock", Func, 0},
+		{"Munlockall", Func, 0},
+		{"Munmap", Func, 0},
+		{"MustLoadDLL", Func, 0},
+		{"NAME_MAX", Const, 0},
+		{"NETLINK_ADD_MEMBERSHIP", Const, 0},
+		{"NETLINK_AUDIT", Const, 0},
+		{"NETLINK_BROADCAST_ERROR", Const, 0},
+		{"NETLINK_CONNECTOR", Const, 0},
+		{"NETLINK_DNRTMSG", Const, 0},
+		{"NETLINK_DROP_MEMBERSHIP", Const, 0},
+		{"NETLINK_ECRYPTFS", Const, 0},
+		{"NETLINK_FIB_LOOKUP", Const, 0},
+		{"NETLINK_FIREWALL", Const, 0},
+		{"NETLINK_GENERIC", Const, 0},
+		{"NETLINK_INET_DIAG", Const, 0},
+		{"NETLINK_IP6_FW", Const, 0},
+		{"NETLINK_ISCSI", Const, 0},
+		{"NETLINK_KOBJECT_UEVENT", Const, 0},
+		{"NETLINK_NETFILTER", Const, 0},
+		{"NETLINK_NFLOG", Const, 0},
+		{"NETLINK_NO_ENOBUFS", Const, 0},
+		{"NETLINK_PKTINFO", Const, 0},
+		{"NETLINK_RDMA", Const, 0},
+		{"NETLINK_ROUTE", Const, 0},
+		{"NETLINK_SCSITRANSPORT", Const, 0},
+		{"NETLINK_SELINUX", Const, 0},
+		{"NETLINK_UNUSED", Const, 0},
+		{"NETLINK_USERSOCK", Const, 0},
+		{"NETLINK_XFRM", Const, 0},
+		{"NET_RT_DUMP", Const, 0},
+		{"NET_RT_DUMP2", Const, 0},
+		{"NET_RT_FLAGS", Const, 0},
+		{"NET_RT_IFLIST", Const, 0},
+		{"NET_RT_IFLIST2", Const, 0},
+		{"NET_RT_IFLISTL", Const, 1},
+		{"NET_RT_IFMALIST", Const, 0},
+		{"NET_RT_MAXID", Const, 0},
+		{"NET_RT_OIFLIST", Const, 1},
+		{"NET_RT_OOIFLIST", Const, 1},
+		{"NET_RT_STAT", Const, 0},
+		{"NET_RT_STATS", Const, 1},
+		{"NET_RT_TABLE", Const, 1},
+		{"NET_RT_TRASH", Const, 0},
+		{"NLA_ALIGNTO", Const, 0},
+		{"NLA_F_NESTED", Const, 0},
+		{"NLA_F_NET_BYTEORDER", Const, 0},
+		{"NLA_HDRLEN", Const, 0},
+		{"NLMSG_ALIGNTO", Const, 0},
+		{"NLMSG_DONE", Const, 0},
+		{"NLMSG_ERROR", Const, 0},
+		{"NLMSG_HDRLEN", Const, 0},
+		{"NLMSG_MIN_TYPE", Const, 0},
+		{"NLMSG_NOOP", Const, 0},
+		{"NLMSG_OVERRUN", Const, 0},
+		{"NLM_F_ACK", Const, 0},
+		{"NLM_F_APPEND", Const, 0},
+		{"NLM_F_ATOMIC", Const, 0},
+		{"NLM_F_CREATE", Const, 0},
+		{"NLM_F_DUMP", Const, 0},
+		{"NLM_F_ECHO", Const, 0},
+		{"NLM_F_EXCL", Const, 0},
+		{"NLM_F_MATCH", Const, 0},
+		{"NLM_F_MULTI", Const, 0},
+		{"NLM_F_REPLACE", Const, 0},
+		{"NLM_F_REQUEST", Const, 0},
+		{"NLM_F_ROOT", Const, 0},
+		{"NOFLSH", Const, 0},
+		{"NOTE_ABSOLUTE", Const, 0},
+		{"NOTE_ATTRIB", Const, 0},
+		{"NOTE_BACKGROUND", Const, 16},
+		{"NOTE_CHILD", Const, 0},
+		{"NOTE_CRITICAL", Const, 16},
+		{"NOTE_DELETE", Const, 0},
+		{"NOTE_EOF", Const, 1},
+		{"NOTE_EXEC", Const, 0},
+		{"NOTE_EXIT", Const, 0},
+		{"NOTE_EXITSTATUS", Const, 0},
+		{"NOTE_EXIT_CSERROR", Const, 16},
+		{"NOTE_EXIT_DECRYPTFAIL", Const, 16},
+		{"NOTE_EXIT_DETAIL", Const, 16},
+		{"NOTE_EXIT_DETAIL_MASK", Const, 16},
+		{"NOTE_EXIT_MEMORY", Const, 16},
+		{"NOTE_EXIT_REPARENTED", Const, 16},
+		{"NOTE_EXTEND", Const, 0},
+		{"NOTE_FFAND", Const, 0},
+		{"NOTE_FFCOPY", Const, 0},
+		{"NOTE_FFCTRLMASK", Const, 0},
+		{"NOTE_FFLAGSMASK", Const, 0},
+		{"NOTE_FFNOP", Const, 0},
+		{"NOTE_FFOR", Const, 0},
+		{"NOTE_FORK", Const, 0},
+		{"NOTE_LEEWAY", Const, 16},
+		{"NOTE_LINK", Const, 0},
+		{"NOTE_LOWAT", Const, 0},
+		{"NOTE_NONE", Const, 0},
+		{"NOTE_NSECONDS", Const, 0},
+		{"NOTE_PCTRLMASK", Const, 0},
+		{"NOTE_PDATAMASK", Const, 0},
+		{"NOTE_REAP", Const, 0},
+		{"NOTE_RENAME", Const, 0},
+		{"NOTE_RESOURCEEND", Const, 0},
+		{"NOTE_REVOKE", Const, 0},
+		{"NOTE_SECONDS", Const, 0},
+		{"NOTE_SIGNAL", Const, 0},
+		{"NOTE_TRACK", Const, 0},
+		{"NOTE_TRACKERR", Const, 0},
+		{"NOTE_TRIGGER", Const, 0},
+		{"NOTE_TRUNCATE", Const, 1},
+		{"NOTE_USECONDS", Const, 0},
+		{"NOTE_VM_ERROR", Const, 0},
+		{"NOTE_VM_PRESSURE", Const, 0},
+		{"NOTE_VM_PRESSURE_SUDDEN_TERMINATE", Const, 0},
+		{"NOTE_VM_PRESSURE_TERMINATE", Const, 0},
+		{"NOTE_WRITE", Const, 0},
+		{"NameCanonical", Const, 0},
+		{"NameCanonicalEx", Const, 0},
+		{"NameDisplay", Const, 0},
+		{"NameDnsDomain", Const, 0},
+		{"NameFullyQualifiedDN", Const, 0},
+		{"NameSamCompatible", Const, 0},
+		{"NameServicePrincipal", Const, 0},
+		{"NameUniqueId", Const, 0},
+		{"NameUnknown", Const, 0},
+		{"NameUserPrincipal", Const, 0},
+		{"Nanosleep", Func, 0},
+		{"NetApiBufferFree", Func, 0},
+		{"NetGetJoinInformation", Func, 2},
+		{"NetSetupDomainName", Const, 2},
+		{"NetSetupUnjoined", Const, 2},
+		{"NetSetupUnknownStatus", Const, 2},
+		{"NetSetupWorkgroupName", Const, 2},
+		{"NetUserGetInfo", Func, 0},
+		{"NetlinkMessage", Type, 0},
+		{"NetlinkMessage.Data", Field, 0},
+		{"NetlinkMessage.Header", Field, 0},
+		{"NetlinkRIB", Func, 0},
+		{"NetlinkRouteAttr", Type, 0},
+		{"NetlinkRouteAttr.Attr", Field, 0},
+		{"NetlinkRouteAttr.Value", Field, 0},
+		{"NetlinkRouteRequest", Type, 0},
+		{"NetlinkRouteRequest.Data", Field, 0},
+		{"NetlinkRouteRequest.Header", Field, 0},
+		{"NewCallback", Func, 0},
+		{"NewCallbackCDecl", Func, 3},
+		{"NewLazyDLL", Func, 0},
+		{"NlAttr", Type, 0},
+		{"NlAttr.Len", Field, 0},
+		{"NlAttr.Type", Field, 0},
+		{"NlMsgerr", Type, 0},
+		{"NlMsgerr.Error", Field, 0},
+		{"NlMsgerr.Msg", Field, 0},
+		{"NlMsghdr", Type, 0},
+		{"NlMsghdr.Flags", Field, 0},
+		{"NlMsghdr.Len", Field, 0},
+		{"NlMsghdr.Pid", Field, 0},
+		{"NlMsghdr.Seq", Field, 0},
+		{"NlMsghdr.Type", Field, 0},
+		{"NsecToFiletime", Func, 0},
+		{"NsecToTimespec", Func, 0},
+		{"NsecToTimeval", Func, 0},
+		{"Ntohs", Func, 0},
+		{"OCRNL", Const, 0},
+		{"OFDEL", Const, 0},
+		{"OFILL", Const, 0},
+		{"OFIOGETBMAP", Const, 1},
+		{"OID_PKIX_KP_SERVER_AUTH", Var, 0},
+		{"OID_SERVER_GATED_CRYPTO", Var, 0},
+		{"OID_SGC_NETSCAPE", Var, 0},
+		{"OLCUC", Const, 0},
+		{"ONLCR", Const, 0},
+		{"ONLRET", Const, 0},
+		{"ONOCR", Const, 0},
+		{"ONOEOT", Const, 1},
+		{"OPEN_ALWAYS", Const, 0},
+		{"OPEN_EXISTING", Const, 0},
+		{"OPOST", Const, 0},
+		{"O_ACCMODE", Const, 0},
+		{"O_ALERT", Const, 0},
+		{"O_ALT_IO", Const, 1},
+		{"O_APPEND", Const, 0},
+		{"O_ASYNC", Const, 0},
+		{"O_CLOEXEC", Const, 0},
+		{"O_CREAT", Const, 0},
+		{"O_DIRECT", Const, 0},
+		{"O_DIRECTORY", Const, 0},
+		{"O_DP_GETRAWENCRYPTED", Const, 16},
+		{"O_DSYNC", Const, 0},
+		{"O_EVTONLY", Const, 0},
+		{"O_EXCL", Const, 0},
+		{"O_EXEC", Const, 0},
+		{"O_EXLOCK", Const, 0},
+		{"O_FSYNC", Const, 0},
+		{"O_LARGEFILE", Const, 0},
+		{"O_NDELAY", Const, 0},
+		{"O_NOATIME", Const, 0},
+		{"O_NOCTTY", Const, 0},
+		{"O_NOFOLLOW", Const, 0},
+		{"O_NONBLOCK", Const, 0},
+		{"O_NOSIGPIPE", Const, 1},
+		{"O_POPUP", Const, 0},
+		{"O_RDONLY", Const, 0},
+		{"O_RDWR", Const, 0},
+		{"O_RSYNC", Const, 0},
+		{"O_SHLOCK", Const, 0},
+		{"O_SYMLINK", Const, 0},
+		{"O_SYNC", Const, 0},
+		{"O_TRUNC", Const, 0},
+		{"O_TTY_INIT", Const, 0},
+		{"O_WRONLY", Const, 0},
+		{"Open", Func, 0},
+		{"OpenCurrentProcessToken", Func, 0},
+		{"OpenProcess", Func, 0},
+		{"OpenProcessToken", Func, 0},
+		{"Openat", Func, 0},
+		{"Overlapped", Type, 0},
+		{"Overlapped.HEvent", Field, 0},
+		{"Overlapped.Internal", Field, 0},
+		{"Overlapped.InternalHigh", Field, 0},
+		{"Overlapped.Offset", Field, 0},
+		{"Overlapped.OffsetHigh", Field, 0},
+		{"PACKET_ADD_MEMBERSHIP", Const, 0},
+		{"PACKET_BROADCAST", Const, 0},
+		{"PACKET_DROP_MEMBERSHIP", Const, 0},
+		{"PACKET_FASTROUTE", Const, 0},
+		{"PACKET_HOST", Const, 0},
+		{"PACKET_LOOPBACK", Const, 0},
+		{"PACKET_MR_ALLMULTI", Const, 0},
+		{"PACKET_MR_MULTICAST", Const, 0},
+		{"PACKET_MR_PROMISC", Const, 0},
+		{"PACKET_MULTICAST", Const, 0},
+		{"PACKET_OTHERHOST", Const, 0},
+		{"PACKET_OUTGOING", Const, 0},
+		{"PACKET_RECV_OUTPUT", Const, 0},
+		{"PACKET_RX_RING", Const, 0},
+		{"PACKET_STATISTICS", Const, 0},
+		{"PAGE_EXECUTE_READ", Const, 0},
+		{"PAGE_EXECUTE_READWRITE", Const, 0},
+		{"PAGE_EXECUTE_WRITECOPY", Const, 0},
+		{"PAGE_READONLY", Const, 0},
+		{"PAGE_READWRITE", Const, 0},
+		{"PAGE_WRITECOPY", Const, 0},
+		{"PARENB", Const, 0},
+		{"PARMRK", Const, 0},
+		{"PARODD", Const, 0},
+		{"PENDIN", Const, 0},
+		{"PFL_HIDDEN", Const, 2},
+		{"PFL_MATCHES_PROTOCOL_ZERO", Const, 2},
+		{"PFL_MULTIPLE_PROTO_ENTRIES", Const, 2},
+		{"PFL_NETWORKDIRECT_PROVIDER", Const, 2},
+		{"PFL_RECOMMENDED_PROTO_ENTRY", Const, 2},
+		{"PF_FLUSH", Const, 1},
+		{"PKCS_7_ASN_ENCODING", Const, 0},
+		{"PMC5_PIPELINE_FLUSH", Const, 1},
+		{"PRIO_PGRP", Const, 2},
+		{"PRIO_PROCESS", Const, 2},
+		{"PRIO_USER", Const, 2},
+		{"PRI_IOFLUSH", Const, 1},
+		{"PROCESS_QUERY_INFORMATION", Const, 0},
+		{"PROCESS_TERMINATE", Const, 2},
+		{"PROT_EXEC", Const, 0},
+		{"PROT_GROWSDOWN", Const, 0},
+		{"PROT_GROWSUP", Const, 0},
+		{"PROT_NONE", Const, 0},
+		{"PROT_READ", Const, 0},
+		{"PROT_WRITE", Const, 0},
+		{"PROV_DH_SCHANNEL", Const, 0},
+		{"PROV_DSS", Const, 0},
+		{"PROV_DSS_DH", Const, 0},
+		{"PROV_EC_ECDSA_FULL", Const, 0},
+		{"PROV_EC_ECDSA_SIG", Const, 0},
+		{"PROV_EC_ECNRA_FULL", Const, 0},
+		{"PROV_EC_ECNRA_SIG", Const, 0},
+		{"PROV_FORTEZZA", Const, 0},
+		{"PROV_INTEL_SEC", Const, 0},
+		{"PROV_MS_EXCHANGE", Const, 0},
+		{"PROV_REPLACE_OWF", Const, 0},
+		{"PROV_RNG", Const, 0},
+		{"PROV_RSA_AES", Const, 0},
+		{"PROV_RSA_FULL", Const, 0},
+		{"PROV_RSA_SCHANNEL", Const, 0},
+		{"PROV_RSA_SIG", Const, 0},
+		{"PROV_SPYRUS_LYNKS", Const, 0},
+		{"PROV_SSL", Const, 0},
+		{"PR_CAPBSET_DROP", Const, 0},
+		{"PR_CAPBSET_READ", Const, 0},
+		{"PR_CLEAR_SECCOMP_FILTER", Const, 0},
+		{"PR_ENDIAN_BIG", Const, 0},
+		{"PR_ENDIAN_LITTLE", Const, 0},
+		{"PR_ENDIAN_PPC_LITTLE", Const, 0},
+		{"PR_FPEMU_NOPRINT", Const, 0},
+		{"PR_FPEMU_SIGFPE", Const, 0},
+		{"PR_FP_EXC_ASYNC", Const, 0},
+		{"PR_FP_EXC_DISABLED", Const, 0},
+		{"PR_FP_EXC_DIV", Const, 0},
+		{"PR_FP_EXC_INV", Const, 0},
+		{"PR_FP_EXC_NONRECOV", Const, 0},
+		{"PR_FP_EXC_OVF", Const, 0},
+		{"PR_FP_EXC_PRECISE", Const, 0},
+		{"PR_FP_EXC_RES", Const, 0},
+		{"PR_FP_EXC_SW_ENABLE", Const, 0},
+		{"PR_FP_EXC_UND", Const, 0},
+		{"PR_GET_DUMPABLE", Const, 0},
+		{"PR_GET_ENDIAN", Const, 0},
+		{"PR_GET_FPEMU", Const, 0},
+		{"PR_GET_FPEXC", Const, 0},
+		{"PR_GET_KEEPCAPS", Const, 0},
+		{"PR_GET_NAME", Const, 0},
+		{"PR_GET_PDEATHSIG", Const, 0},
+		{"PR_GET_SECCOMP", Const, 0},
+		{"PR_GET_SECCOMP_FILTER", Const, 0},
+		{"PR_GET_SECUREBITS", Const, 0},
+		{"PR_GET_TIMERSLACK", Const, 0},
+		{"PR_GET_TIMING", Const, 0},
+		{"PR_GET_TSC", Const, 0},
+		{"PR_GET_UNALIGN", Const, 0},
+		{"PR_MCE_KILL", Const, 0},
+		{"PR_MCE_KILL_CLEAR", Const, 0},
+		{"PR_MCE_KILL_DEFAULT", Const, 0},
+		{"PR_MCE_KILL_EARLY", Const, 0},
+		{"PR_MCE_KILL_GET", Const, 0},
+		{"PR_MCE_KILL_LATE", Const, 0},
+		{"PR_MCE_KILL_SET", Const, 0},
+		{"PR_SECCOMP_FILTER_EVENT", Const, 0},
+		{"PR_SECCOMP_FILTER_SYSCALL", Const, 0},
+		{"PR_SET_DUMPABLE", Const, 0},
+		{"PR_SET_ENDIAN", Const, 0},
+		{"PR_SET_FPEMU", Const, 0},
+		{"PR_SET_FPEXC", Const, 0},
+		{"PR_SET_KEEPCAPS", Const, 0},
+		{"PR_SET_NAME", Const, 0},
+		{"PR_SET_PDEATHSIG", Const, 0},
+		{"PR_SET_PTRACER", Const, 0},
+		{"PR_SET_SECCOMP", Const, 0},
+		{"PR_SET_SECCOMP_FILTER", Const, 0},
+		{"PR_SET_SECUREBITS", Const, 0},
+		{"PR_SET_TIMERSLACK", Const, 0},
+		{"PR_SET_TIMING", Const, 0},
+		{"PR_SET_TSC", Const, 0},
+		{"PR_SET_UNALIGN", Const, 0},
+		{"PR_TASK_PERF_EVENTS_DISABLE", Const, 0},
+		{"PR_TASK_PERF_EVENTS_ENABLE", Const, 0},
+		{"PR_TIMING_STATISTICAL", Const, 0},
+		{"PR_TIMING_TIMESTAMP", Const, 0},
+		{"PR_TSC_ENABLE", Const, 0},
+		{"PR_TSC_SIGSEGV", Const, 0},
+		{"PR_UNALIGN_NOPRINT", Const, 0},
+		{"PR_UNALIGN_SIGBUS", Const, 0},
+		{"PTRACE_ARCH_PRCTL", Const, 0},
+		{"PTRACE_ATTACH", Const, 0},
+		{"PTRACE_CONT", Const, 0},
+		{"PTRACE_DETACH", Const, 0},
+		{"PTRACE_EVENT_CLONE", Const, 0},
+		{"PTRACE_EVENT_EXEC", Const, 0},
+		{"PTRACE_EVENT_EXIT", Const, 0},
+		{"PTRACE_EVENT_FORK", Const, 0},
+		{"PTRACE_EVENT_VFORK", Const, 0},
+		{"PTRACE_EVENT_VFORK_DONE", Const, 0},
+		{"PTRACE_GETCRUNCHREGS", Const, 0},
+		{"PTRACE_GETEVENTMSG", Const, 0},
+		{"PTRACE_GETFPREGS", Const, 0},
+		{"PTRACE_GETFPXREGS", Const, 0},
+		{"PTRACE_GETHBPREGS", Const, 0},
+		{"PTRACE_GETREGS", Const, 0},
+		{"PTRACE_GETREGSET", Const, 0},
+		{"PTRACE_GETSIGINFO", Const, 0},
+		{"PTRACE_GETVFPREGS", Const, 0},
+		{"PTRACE_GETWMMXREGS", Const, 0},
+		{"PTRACE_GET_THREAD_AREA", Const, 0},
+		{"PTRACE_KILL", Const, 0},
+		{"PTRACE_OLDSETOPTIONS", Const, 0},
+		{"PTRACE_O_MASK", Const, 0},
+		{"PTRACE_O_TRACECLONE", Const, 0},
+		{"PTRACE_O_TRACEEXEC", Const, 0},
+		{"PTRACE_O_TRACEEXIT", Const, 0},
+		{"PTRACE_O_TRACEFORK", Const, 0},
+		{"PTRACE_O_TRACESYSGOOD", Const, 0},
+		{"PTRACE_O_TRACEVFORK", Const, 0},
+		{"PTRACE_O_TRACEVFORKDONE", Const, 0},
+		{"PTRACE_PEEKDATA", Const, 0},
+		{"PTRACE_PEEKTEXT", Const, 0},
+		{"PTRACE_PEEKUSR", Const, 0},
+		{"PTRACE_POKEDATA", Const, 0},
+		{"PTRACE_POKETEXT", Const, 0},
+		{"PTRACE_POKEUSR", Const, 0},
+		{"PTRACE_SETCRUNCHREGS", Const, 0},
+		{"PTRACE_SETFPREGS", Const, 0},
+		{"PTRACE_SETFPXREGS", Const, 0},
+		{"PTRACE_SETHBPREGS", Const, 0},
+		{"PTRACE_SETOPTIONS", Const, 0},
+		{"PTRACE_SETREGS", Const, 0},
+		{"PTRACE_SETREGSET", Const, 0},
+		{"PTRACE_SETSIGINFO", Const, 0},
+		{"PTRACE_SETVFPREGS", Const, 0},
+		{"PTRACE_SETWMMXREGS", Const, 0},
+		{"PTRACE_SET_SYSCALL", Const, 0},
+		{"PTRACE_SET_THREAD_AREA", Const, 0},
+		{"PTRACE_SINGLEBLOCK", Const, 0},
+		{"PTRACE_SINGLESTEP", Const, 0},
+		{"PTRACE_SYSCALL", Const, 0},
+		{"PTRACE_SYSEMU", Const, 0},
+		{"PTRACE_SYSEMU_SINGLESTEP", Const, 0},
+		{"PTRACE_TRACEME", Const, 0},
+		{"PT_ATTACH", Const, 0},
+		{"PT_ATTACHEXC", Const, 0},
+		{"PT_CONTINUE", Const, 0},
+		{"PT_DATA_ADDR", Const, 0},
+		{"PT_DENY_ATTACH", Const, 0},
+		{"PT_DETACH", Const, 0},
+		{"PT_FIRSTMACH", Const, 0},
+		{"PT_FORCEQUOTA", Const, 0},
+		{"PT_KILL", Const, 0},
+		{"PT_MASK", Const, 1},
+		{"PT_READ_D", Const, 0},
+		{"PT_READ_I", Const, 0},
+		{"PT_READ_U", Const, 0},
+		{"PT_SIGEXC", Const, 0},
+		{"PT_STEP", Const, 0},
+		{"PT_TEXT_ADDR", Const, 0},
+		{"PT_TEXT_END_ADDR", Const, 0},
+		{"PT_THUPDATE", Const, 0},
+		{"PT_TRACE_ME", Const, 0},
+		{"PT_WRITE_D", Const, 0},
+		{"PT_WRITE_I", Const, 0},
+		{"PT_WRITE_U", Const, 0},
+		{"ParseDirent", Func, 0},
+		{"ParseNetlinkMessage", Func, 0},
+		{"ParseNetlinkRouteAttr", Func, 0},
+		{"ParseRoutingMessage", Func, 0},
+		{"ParseRoutingSockaddr", Func, 0},
+		{"ParseSocketControlMessage", Func, 0},
+		{"ParseUnixCredentials", Func, 0},
+		{"ParseUnixRights", Func, 0},
+		{"PathMax", Const, 0},
+		{"Pathconf", Func, 0},
+		{"Pause", Func, 0},
+		{"Pipe", Func, 0},
+		{"Pipe2", Func, 1},
+		{"PivotRoot", Func, 0},
+		{"Pointer", Type, 11},
+		{"PostQueuedCompletionStatus", Func, 0},
+		{"Pread", Func, 0},
+		{"Proc", Type, 0},
+		{"Proc.Dll", Field, 0},
+		{"Proc.Name", Field, 0},
+		{"ProcAttr", Type, 0},
+		{"ProcAttr.Dir", Field, 0},
+		{"ProcAttr.Env", Field, 0},
+		{"ProcAttr.Files", Field, 0},
+		{"ProcAttr.Sys", Field, 0},
+		{"Process32First", Func, 4},
+		{"Process32Next", Func, 4},
+		{"ProcessEntry32", Type, 4},
+		{"ProcessEntry32.DefaultHeapID", Field, 4},
+		{"ProcessEntry32.ExeFile", Field, 4},
+		{"ProcessEntry32.Flags", Field, 4},
+		{"ProcessEntry32.ModuleID", Field, 4},
+		{"ProcessEntry32.ParentProcessID", Field, 4},
+		{"ProcessEntry32.PriClassBase", Field, 4},
+		{"ProcessEntry32.ProcessID", Field, 4},
+		{"ProcessEntry32.Size", Field, 4},
+		{"ProcessEntry32.Threads", Field, 4},
+		{"ProcessEntry32.Usage", Field, 4},
+		{"ProcessInformation", Type, 0},
+		{"ProcessInformation.Process", Field, 0},
+		{"ProcessInformation.ProcessId", Field, 0},
+		{"ProcessInformation.Thread", Field, 0},
+		{"ProcessInformation.ThreadId", Field, 0},
+		{"Protoent", Type, 0},
+		{"Protoent.Aliases", Field, 0},
+		{"Protoent.Name", Field, 0},
+		{"Protoent.Proto", Field, 0},
+		{"PtraceAttach", Func, 0},
+		{"PtraceCont", Func, 0},
+		{"PtraceDetach", Func, 0},
+		{"PtraceGetEventMsg", Func, 0},
+		{"PtraceGetRegs", Func, 0},
+		{"PtracePeekData", Func, 0},
+		{"PtracePeekText", Func, 0},
+		{"PtracePokeData", Func, 0},
+		{"PtracePokeText", Func, 0},
+		{"PtraceRegs", Type, 0},
+		{"PtraceRegs.Cs", Field, 0},
+		{"PtraceRegs.Ds", Field, 0},
+		{"PtraceRegs.Eax", Field, 0},
+		{"PtraceRegs.Ebp", Field, 0},
+		{"PtraceRegs.Ebx", Field, 0},
+		{"PtraceRegs.Ecx", Field, 0},
+		{"PtraceRegs.Edi", Field, 0},
+		{"PtraceRegs.Edx", Field, 0},
+		{"PtraceRegs.Eflags", Field, 0},
+		{"PtraceRegs.Eip", Field, 0},
+		{"PtraceRegs.Es", Field, 0},
+		{"PtraceRegs.Esi", Field, 0},
+		{"PtraceRegs.Esp", Field, 0},
+		{"PtraceRegs.Fs", Field, 0},
+		{"PtraceRegs.Fs_base", Field, 0},
+		{"PtraceRegs.Gs", Field, 0},
+		{"PtraceRegs.Gs_base", Field, 0},
+		{"PtraceRegs.Orig_eax", Field, 0},
+		{"PtraceRegs.Orig_rax", Field, 0},
+		{"PtraceRegs.R10", Field, 0},
+		{"PtraceRegs.R11", Field, 0},
+		{"PtraceRegs.R12", Field, 0},
+		{"PtraceRegs.R13", Field, 0},
+		{"PtraceRegs.R14", Field, 0},
+		{"PtraceRegs.R15", Field, 0},
+		{"PtraceRegs.R8", Field, 0},
+		{"PtraceRegs.R9", Field, 0},
+		{"PtraceRegs.Rax", Field, 0},
+		{"PtraceRegs.Rbp", Field, 0},
+		{"PtraceRegs.Rbx", Field, 0},
+		{"PtraceRegs.Rcx", Field, 0},
+		{"PtraceRegs.Rdi", Field, 0},
+		{"PtraceRegs.Rdx", Field, 0},
+		{"PtraceRegs.Rip", Field, 0},
+		{"PtraceRegs.Rsi", Field, 0},
+		{"PtraceRegs.Rsp", Field, 0},
+		{"PtraceRegs.Ss", Field, 0},
+		{"PtraceRegs.Uregs", Field, 0},
+		{"PtraceRegs.Xcs", Field, 0},
+		{"PtraceRegs.Xds", Field, 0},
+		{"PtraceRegs.Xes", Field, 0},
+		{"PtraceRegs.Xfs", Field, 0},
+		{"PtraceRegs.Xgs", Field, 0},
+		{"PtraceRegs.Xss", Field, 0},
+		{"PtraceSetOptions", Func, 0},
+		{"PtraceSetRegs", Func, 0},
+		{"PtraceSingleStep", Func, 0},
+		{"PtraceSyscall", Func, 1},
+		{"Pwrite", Func, 0},
+		{"REG_BINARY", Const, 0},
+		{"REG_DWORD", Const, 0},
+		{"REG_DWORD_BIG_ENDIAN", Const, 0},
+		{"REG_DWORD_LITTLE_ENDIAN", Const, 0},
+		{"REG_EXPAND_SZ", Const, 0},
+		{"REG_FULL_RESOURCE_DESCRIPTOR", Const, 0},
+		{"REG_LINK", Const, 0},
+		{"REG_MULTI_SZ", Const, 0},
+		{"REG_NONE", Const, 0},
+		{"REG_QWORD", Const, 0},
+		{"REG_QWORD_LITTLE_ENDIAN", Const, 0},
+		{"REG_RESOURCE_LIST", Const, 0},
+		{"REG_RESOURCE_REQUIREMENTS_LIST", Const, 0},
+		{"REG_SZ", Const, 0},
+		{"RLIMIT_AS", Const, 0},
+		{"RLIMIT_CORE", Const, 0},
+		{"RLIMIT_CPU", Const, 0},
+		{"RLIMIT_CPU_USAGE_MONITOR", Const, 16},
+		{"RLIMIT_DATA", Const, 0},
+		{"RLIMIT_FSIZE", Const, 0},
+		{"RLIMIT_NOFILE", Const, 0},
+		{"RLIMIT_STACK", Const, 0},
+		{"RLIM_INFINITY", Const, 0},
+		{"RTAX_ADVMSS", Const, 0},
+		{"RTAX_AUTHOR", Const, 0},
+		{"RTAX_BRD", Const, 0},
+		{"RTAX_CWND", Const, 0},
+		{"RTAX_DST", Const, 0},
+		{"RTAX_FEATURES", Const, 0},
+		{"RTAX_FEATURE_ALLFRAG", Const, 0},
+		{"RTAX_FEATURE_ECN", Const, 0},
+		{"RTAX_FEATURE_SACK", Const, 0},
+		{"RTAX_FEATURE_TIMESTAMP", Const, 0},
+		{"RTAX_GATEWAY", Const, 0},
+		{"RTAX_GENMASK", Const, 0},
+		{"RTAX_HOPLIMIT", Const, 0},
+		{"RTAX_IFA", Const, 0},
+		{"RTAX_IFP", Const, 0},
+		{"RTAX_INITCWND", Const, 0},
+		{"RTAX_INITRWND", Const, 0},
+		{"RTAX_LABEL", Const, 1},
+		{"RTAX_LOCK", Const, 0},
+		{"RTAX_MAX", Const, 0},
+		{"RTAX_MTU", Const, 0},
+		{"RTAX_NETMASK", Const, 0},
+		{"RTAX_REORDERING", Const, 0},
+		{"RTAX_RTO_MIN", Const, 0},
+		{"RTAX_RTT", Const, 0},
+		{"RTAX_RTTVAR", Const, 0},
+		{"RTAX_SRC", Const, 1},
+		{"RTAX_SRCMASK", Const, 1},
+		{"RTAX_SSTHRESH", Const, 0},
+		{"RTAX_TAG", Const, 1},
+		{"RTAX_UNSPEC", Const, 0},
+		{"RTAX_WINDOW", Const, 0},
+		{"RTA_ALIGNTO", Const, 0},
+		{"RTA_AUTHOR", Const, 0},
+		{"RTA_BRD", Const, 0},
+		{"RTA_CACHEINFO", Const, 0},
+		{"RTA_DST", Const, 0},
+		{"RTA_FLOW", Const, 0},
+		{"RTA_GATEWAY", Const, 0},
+		{"RTA_GENMASK", Const, 0},
+		{"RTA_IFA", Const, 0},
+		{"RTA_IFP", Const, 0},
+		{"RTA_IIF", Const, 0},
+		{"RTA_LABEL", Const, 1},
+		{"RTA_MAX", Const, 0},
+		{"RTA_METRICS", Const, 0},
+		{"RTA_MULTIPATH", Const, 0},
+		{"RTA_NETMASK", Const, 0},
+		{"RTA_OIF", Const, 0},
+		{"RTA_PREFSRC", Const, 0},
+		{"RTA_PRIORITY", Const, 0},
+		{"RTA_SRC", Const, 0},
+		{"RTA_SRCMASK", Const, 1},
+		{"RTA_TABLE", Const, 0},
+		{"RTA_TAG", Const, 1},
+		{"RTA_UNSPEC", Const, 0},
+		{"RTCF_DIRECTSRC", Const, 0},
+		{"RTCF_DOREDIRECT", Const, 0},
+		{"RTCF_LOG", Const, 0},
+		{"RTCF_MASQ", Const, 0},
+		{"RTCF_NAT", Const, 0},
+		{"RTCF_VALVE", Const, 0},
+		{"RTF_ADDRCLASSMASK", Const, 0},
+		{"RTF_ADDRCONF", Const, 0},
+		{"RTF_ALLONLINK", Const, 0},
+		{"RTF_ANNOUNCE", Const, 1},
+		{"RTF_BLACKHOLE", Const, 0},
+		{"RTF_BROADCAST", Const, 0},
+		{"RTF_CACHE", Const, 0},
+		{"RTF_CLONED", Const, 1},
+		{"RTF_CLONING", Const, 0},
+		{"RTF_CONDEMNED", Const, 0},
+		{"RTF_DEFAULT", Const, 0},
+		{"RTF_DELCLONE", Const, 0},
+		{"RTF_DONE", Const, 0},
+		{"RTF_DYNAMIC", Const, 0},
+		{"RTF_FLOW", Const, 0},
+		{"RTF_FMASK", Const, 0},
+		{"RTF_GATEWAY", Const, 0},
+		{"RTF_GWFLAG_COMPAT", Const, 3},
+		{"RTF_HOST", Const, 0},
+		{"RTF_IFREF", Const, 0},
+		{"RTF_IFSCOPE", Const, 0},
+		{"RTF_INTERFACE", Const, 0},
+		{"RTF_IRTT", Const, 0},
+		{"RTF_LINKRT", Const, 0},
+		{"RTF_LLDATA", Const, 0},
+		{"RTF_LLINFO", Const, 0},
+		{"RTF_LOCAL", Const, 0},
+		{"RTF_MASK", Const, 1},
+		{"RTF_MODIFIED", Const, 0},
+		{"RTF_MPATH", Const, 1},
+		{"RTF_MPLS", Const, 1},
+		{"RTF_MSS", Const, 0},
+		{"RTF_MTU", Const, 0},
+		{"RTF_MULTICAST", Const, 0},
+		{"RTF_NAT", Const, 0},
+		{"RTF_NOFORWARD", Const, 0},
+		{"RTF_NONEXTHOP", Const, 0},
+		{"RTF_NOPMTUDISC", Const, 0},
+		{"RTF_PERMANENT_ARP", Const, 1},
+		{"RTF_PINNED", Const, 0},
+		{"RTF_POLICY", Const, 0},
+		{"RTF_PRCLONING", Const, 0},
+		{"RTF_PROTO1", Const, 0},
+		{"RTF_PROTO2", Const, 0},
+		{"RTF_PROTO3", Const, 0},
+		{"RTF_PROXY", Const, 16},
+		{"RTF_REINSTATE", Const, 0},
+		{"RTF_REJECT", Const, 0},
+		{"RTF_RNH_LOCKED", Const, 0},
+		{"RTF_ROUTER", Const, 16},
+		{"RTF_SOURCE", Const, 1},
+		{"RTF_SRC", Const, 1},
+		{"RTF_STATIC", Const, 0},
+		{"RTF_STICKY", Const, 0},
+		{"RTF_THROW", Const, 0},
+		{"RTF_TUNNEL", Const, 1},
+		{"RTF_UP", Const, 0},
+		{"RTF_USETRAILERS", Const, 1},
+		{"RTF_WASCLONED", Const, 0},
+		{"RTF_WINDOW", Const, 0},
+		{"RTF_XRESOLVE", Const, 0},
+		{"RTM_ADD", Const, 0},
+		{"RTM_BASE", Const, 0},
+		{"RTM_CHANGE", Const, 0},
+		{"RTM_CHGADDR", Const, 1},
+		{"RTM_DELACTION", Const, 0},
+		{"RTM_DELADDR", Const, 0},
+		{"RTM_DELADDRLABEL", Const, 0},
+		{"RTM_DELETE", Const, 0},
+		{"RTM_DELLINK", Const, 0},
+		{"RTM_DELMADDR", Const, 0},
+		{"RTM_DELNEIGH", Const, 0},
+		{"RTM_DELQDISC", Const, 0},
+		{"RTM_DELROUTE", Const, 0},
+		{"RTM_DELRULE", Const, 0},
+		{"RTM_DELTCLASS", Const, 0},
+		{"RTM_DELTFILTER", Const, 0},
+		{"RTM_DESYNC", Const, 1},
+		{"RTM_F_CLONED", Const, 0},
+		{"RTM_F_EQUALIZE", Const, 0},
+		{"RTM_F_NOTIFY", Const, 0},
+		{"RTM_F_PREFIX", Const, 0},
+		{"RTM_GET", Const, 0},
+		{"RTM_GET2", Const, 0},
+		{"RTM_GETACTION", Const, 0},
+		{"RTM_GETADDR", Const, 0},
+		{"RTM_GETADDRLABEL", Const, 0},
+		{"RTM_GETANYCAST", Const, 0},
+		{"RTM_GETDCB", Const, 0},
+		{"RTM_GETLINK", Const, 0},
+		{"RTM_GETMULTICAST", Const, 0},
+		{"RTM_GETNEIGH", Const, 0},
+		{"RTM_GETNEIGHTBL", Const, 0},
+		{"RTM_GETQDISC", Const, 0},
+		{"RTM_GETROUTE", Const, 0},
+		{"RTM_GETRULE", Const, 0},
+		{"RTM_GETTCLASS", Const, 0},
+		{"RTM_GETTFILTER", Const, 0},
+		{"RTM_IEEE80211", Const, 0},
+		{"RTM_IFANNOUNCE", Const, 0},
+		{"RTM_IFINFO", Const, 0},
+		{"RTM_IFINFO2", Const, 0},
+		{"RTM_LLINFO_UPD", Const, 1},
+		{"RTM_LOCK", Const, 0},
+		{"RTM_LOSING", Const, 0},
+		{"RTM_MAX", Const, 0},
+		{"RTM_MAXSIZE", Const, 1},
+		{"RTM_MISS", Const, 0},
+		{"RTM_NEWACTION", Const, 0},
+		{"RTM_NEWADDR", Const, 0},
+		{"RTM_NEWADDRLABEL", Const, 0},
+		{"RTM_NEWLINK", Const, 0},
+		{"RTM_NEWMADDR", Const, 0},
+		{"RTM_NEWMADDR2", Const, 0},
+		{"RTM_NEWNDUSEROPT", Const, 0},
+		{"RTM_NEWNEIGH", Const, 0},
+		{"RTM_NEWNEIGHTBL", Const, 0},
+		{"RTM_NEWPREFIX", Const, 0},
+		{"RTM_NEWQDISC", Const, 0},
+		{"RTM_NEWROUTE", Const, 0},
+		{"RTM_NEWRULE", Const, 0},
+		{"RTM_NEWTCLASS", Const, 0},
+		{"RTM_NEWTFILTER", Const, 0},
+		{"RTM_NR_FAMILIES", Const, 0},
+		{"RTM_NR_MSGTYPES", Const, 0},
+		{"RTM_OIFINFO", Const, 1},
+		{"RTM_OLDADD", Const, 0},
+		{"RTM_OLDDEL", Const, 0},
+		{"RTM_OOIFINFO", Const, 1},
+		{"RTM_REDIRECT", Const, 0},
+		{"RTM_RESOLVE", Const, 0},
+		{"RTM_RTTUNIT", Const, 0},
+		{"RTM_SETDCB", Const, 0},
+		{"RTM_SETGATE", Const, 1},
+		{"RTM_SETLINK", Const, 0},
+		{"RTM_SETNEIGHTBL", Const, 0},
+		{"RTM_VERSION", Const, 0},
+		{"RTNH_ALIGNTO", Const, 0},
+		{"RTNH_F_DEAD", Const, 0},
+		{"RTNH_F_ONLINK", Const, 0},
+		{"RTNH_F_PERVASIVE", Const, 0},
+		{"RTNLGRP_IPV4_IFADDR", Const, 1},
+		{"RTNLGRP_IPV4_MROUTE", Const, 1},
+		{"RTNLGRP_IPV4_ROUTE", Const, 1},
+		{"RTNLGRP_IPV4_RULE", Const, 1},
+		{"RTNLGRP_IPV6_IFADDR", Const, 1},
+		{"RTNLGRP_IPV6_IFINFO", Const, 1},
+		{"RTNLGRP_IPV6_MROUTE", Const, 1},
+		{"RTNLGRP_IPV6_PREFIX", Const, 1},
+		{"RTNLGRP_IPV6_ROUTE", Const, 1},
+		{"RTNLGRP_IPV6_RULE", Const, 1},
+		{"RTNLGRP_LINK", Const, 1},
+		{"RTNLGRP_ND_USEROPT", Const, 1},
+		{"RTNLGRP_NEIGH", Const, 1},
+		{"RTNLGRP_NONE", Const, 1},
+		{"RTNLGRP_NOTIFY", Const, 1},
+		{"RTNLGRP_TC", Const, 1},
+		{"RTN_ANYCAST", Const, 0},
+		{"RTN_BLACKHOLE", Const, 0},
+		{"RTN_BROADCAST", Const, 0},
+		{"RTN_LOCAL", Const, 0},
+		{"RTN_MAX", Const, 0},
+		{"RTN_MULTICAST", Const, 0},
+		{"RTN_NAT", Const, 0},
+		{"RTN_PROHIBIT", Const, 0},
+		{"RTN_THROW", Const, 0},
+		{"RTN_UNICAST", Const, 0},
+		{"RTN_UNREACHABLE", Const, 0},
+		{"RTN_UNSPEC", Const, 0},
+		{"RTN_XRESOLVE", Const, 0},
+		{"RTPROT_BIRD", Const, 0},
+		{"RTPROT_BOOT", Const, 0},
+		{"RTPROT_DHCP", Const, 0},
+		{"RTPROT_DNROUTED", Const, 0},
+		{"RTPROT_GATED", Const, 0},
+		{"RTPROT_KERNEL", Const, 0},
+		{"RTPROT_MRT", Const, 0},
+		{"RTPROT_NTK", Const, 0},
+		{"RTPROT_RA", Const, 0},
+		{"RTPROT_REDIRECT", Const, 0},
+		{"RTPROT_STATIC", Const, 0},
+		{"RTPROT_UNSPEC", Const, 0},
+		{"RTPROT_XORP", Const, 0},
+		{"RTPROT_ZEBRA", Const, 0},
+		{"RTV_EXPIRE", Const, 0},
+		{"RTV_HOPCOUNT", Const, 0},
+		{"RTV_MTU", Const, 0},
+		{"RTV_RPIPE", Const, 0},
+		{"RTV_RTT", Const, 0},
+		{"RTV_RTTVAR", Const, 0},
+		{"RTV_SPIPE", Const, 0},
+		{"RTV_SSTHRESH", Const, 0},
+		{"RTV_WEIGHT", Const, 0},
+		{"RT_CACHING_CONTEXT", Const, 1},
+		{"RT_CLASS_DEFAULT", Const, 0},
+		{"RT_CLASS_LOCAL", Const, 0},
+		{"RT_CLASS_MAIN", Const, 0},
+		{"RT_CLASS_MAX", Const, 0},
+		{"RT_CLASS_UNSPEC", Const, 0},
+		{"RT_DEFAULT_FIB", Const, 1},
+		{"RT_NORTREF", Const, 1},
+		{"RT_SCOPE_HOST", Const, 0},
+		{"RT_SCOPE_LINK", Const, 0},
+		{"RT_SCOPE_NOWHERE", Const, 0},
+		{"RT_SCOPE_SITE", Const, 0},
+		{"RT_SCOPE_UNIVERSE", Const, 0},
+		{"RT_TABLEID_MAX", Const, 1},
+		{"RT_TABLE_COMPAT", Const, 0},
+		{"RT_TABLE_DEFAULT", Const, 0},
+		{"RT_TABLE_LOCAL", Const, 0},
+		{"RT_TABLE_MAIN", Const, 0},
+		{"RT_TABLE_MAX", Const, 0},
+		{"RT_TABLE_UNSPEC", Const, 0},
+		{"RUSAGE_CHILDREN", Const, 0},
+		{"RUSAGE_SELF", Const, 0},
+		{"RUSAGE_THREAD", Const, 0},
+		{"Radvisory_t", Type, 0},
+		{"Radvisory_t.Count", Field, 0},
+		{"Radvisory_t.Offset", Field, 0},
+		{"Radvisory_t.Pad_cgo_0", Field, 0},
+		{"RawConn", Type, 9},
+		{"RawSockaddr", Type, 0},
+		{"RawSockaddr.Data", Field, 0},
+		{"RawSockaddr.Family", Field, 0},
+		{"RawSockaddr.Len", Field, 0},
+		{"RawSockaddrAny", Type, 0},
+		{"RawSockaddrAny.Addr", Field, 0},
+		{"RawSockaddrAny.Pad", Field, 0},
+		{"RawSockaddrDatalink", Type, 0},
+		{"RawSockaddrDatalink.Alen", Field, 0},
+		{"RawSockaddrDatalink.Data", Field, 0},
+		{"RawSockaddrDatalink.Family", Field, 0},
+		{"RawSockaddrDatalink.Index", Field, 0},
+		{"RawSockaddrDatalink.Len", Field, 0},
+		{"RawSockaddrDatalink.Nlen", Field, 0},
+		{"RawSockaddrDatalink.Pad_cgo_0", Field, 2},
+		{"RawSockaddrDatalink.Slen", Field, 0},
+		{"RawSockaddrDatalink.Type", Field, 0},
+		{"RawSockaddrInet4", Type, 0},
+		{"RawSockaddrInet4.Addr", Field, 0},
+		{"RawSockaddrInet4.Family", Field, 0},
+		{"RawSockaddrInet4.Len", Field, 0},
+		{"RawSockaddrInet4.Port", Field, 0},
+		{"RawSockaddrInet4.Zero", Field, 0},
+		{"RawSockaddrInet6", Type, 0},
+		{"RawSockaddrInet6.Addr", Field, 0},
+		{"RawSockaddrInet6.Family", Field, 0},
+		{"RawSockaddrInet6.Flowinfo", Field, 0},
+		{"RawSockaddrInet6.Len", Field, 0},
+		{"RawSockaddrInet6.Port", Field, 0},
+		{"RawSockaddrInet6.Scope_id", Field, 0},
+		{"RawSockaddrLinklayer", Type, 0},
+		{"RawSockaddrLinklayer.Addr", Field, 0},
+		{"RawSockaddrLinklayer.Family", Field, 0},
+		{"RawSockaddrLinklayer.Halen", Field, 0},
+		{"RawSockaddrLinklayer.Hatype", Field, 0},
+		{"RawSockaddrLinklayer.Ifindex", Field, 0},
+		{"RawSockaddrLinklayer.Pkttype", Field, 0},
+		{"RawSockaddrLinklayer.Protocol", Field, 0},
+		{"RawSockaddrNetlink", Type, 0},
+		{"RawSockaddrNetlink.Family", Field, 0},
+		{"RawSockaddrNetlink.Groups", Field, 0},
+		{"RawSockaddrNetlink.Pad", Field, 0},
+		{"RawSockaddrNetlink.Pid", Field, 0},
+		{"RawSockaddrUnix", Type, 0},
+		{"RawSockaddrUnix.Family", Field, 0},
+		{"RawSockaddrUnix.Len", Field, 0},
+		{"RawSockaddrUnix.Pad_cgo_0", Field, 2},
+		{"RawSockaddrUnix.Path", Field, 0},
+		{"RawSyscall", Func, 0},
+		{"RawSyscall6", Func, 0},
+		{"Read", Func, 0},
+		{"ReadConsole", Func, 1},
+		{"ReadDirectoryChanges", Func, 0},
+		{"ReadDirent", Func, 0},
+		{"ReadFile", Func, 0},
+		{"Readlink", Func, 0},
+		{"Reboot", Func, 0},
+		{"Recvfrom", Func, 0},
+		{"Recvmsg", Func, 0},
+		{"RegCloseKey", Func, 0},
+		{"RegEnumKeyEx", Func, 0},
+		{"RegOpenKeyEx", Func, 0},
+		{"RegQueryInfoKey", Func, 0},
+		{"RegQueryValueEx", Func, 0},
+		{"RemoveDirectory", Func, 0},
+		{"Removexattr", Func, 1},
+		{"Rename", Func, 0},
+		{"Renameat", Func, 0},
+		{"Revoke", Func, 0},
+		{"Rlimit", Type, 0},
+		{"Rlimit.Cur", Field, 0},
+		{"Rlimit.Max", Field, 0},
+		{"Rmdir", Func, 0},
+		{"RouteMessage", Type, 0},
+		{"RouteMessage.Data", Field, 0},
+		{"RouteMessage.Header", Field, 0},
+		{"RouteRIB", Func, 0},
+		{"RoutingMessage", Type, 0},
+		{"RtAttr", Type, 0},
+		{"RtAttr.Len", Field, 0},
+		{"RtAttr.Type", Field, 0},
+		{"RtGenmsg", Type, 0},
+		{"RtGenmsg.Family", Field, 0},
+		{"RtMetrics", Type, 0},
+		{"RtMetrics.Expire", Field, 0},
+		{"RtMetrics.Filler", Field, 0},
+		{"RtMetrics.Hopcount", Field, 0},
+		{"RtMetrics.Locks", Field, 0},
+		{"RtMetrics.Mtu", Field, 0},
+		{"RtMetrics.Pad", Field, 3},
+		{"RtMetrics.Pksent", Field, 0},
+		{"RtMetrics.Recvpipe", Field, 0},
+		{"RtMetrics.Refcnt", Field, 2},
+		{"RtMetrics.Rtt", Field, 0},
+		{"RtMetrics.Rttvar", Field, 0},
+		{"RtMetrics.Sendpipe", Field, 0},
+		{"RtMetrics.Ssthresh", Field, 0},
+		{"RtMetrics.Weight", Field, 0},
+		{"RtMsg", Type, 0},
+		{"RtMsg.Dst_len", Field, 0},
+		{"RtMsg.Family", Field, 0},
+		{"RtMsg.Flags", Field, 0},
+		{"RtMsg.Protocol", Field, 0},
+		{"RtMsg.Scope", Field, 0},
+		{"RtMsg.Src_len", Field, 0},
+		{"RtMsg.Table", Field, 0},
+		{"RtMsg.Tos", Field, 0},
+		{"RtMsg.Type", Field, 0},
+		{"RtMsghdr", Type, 0},
+		{"RtMsghdr.Addrs", Field, 0},
+		{"RtMsghdr.Errno", Field, 0},
+		{"RtMsghdr.Flags", Field, 0},
+		{"RtMsghdr.Fmask", Field, 0},
+		{"RtMsghdr.Hdrlen", Field, 2},
+		{"RtMsghdr.Index", Field, 0},
+		{"RtMsghdr.Inits", Field, 0},
+		{"RtMsghdr.Mpls", Field, 2},
+		{"RtMsghdr.Msglen", Field, 0},
+		{"RtMsghdr.Pad_cgo_0", Field, 0},
+		{"RtMsghdr.Pad_cgo_1", Field, 2},
+		{"RtMsghdr.Pid", Field, 0},
+		{"RtMsghdr.Priority", Field, 2},
+		{"RtMsghdr.Rmx", Field, 0},
+		{"RtMsghdr.Seq", Field, 0},
+		{"RtMsghdr.Tableid", Field, 2},
+		{"RtMsghdr.Type", Field, 0},
+		{"RtMsghdr.Use", Field, 0},
+		{"RtMsghdr.Version", Field, 0},
+		{"RtNexthop", Type, 0},
+		{"RtNexthop.Flags", Field, 0},
+		{"RtNexthop.Hops", Field, 0},
+		{"RtNexthop.Ifindex", Field, 0},
+		{"RtNexthop.Len", Field, 0},
+		{"Rusage", Type, 0},
+		{"Rusage.CreationTime", Field, 0},
+		{"Rusage.ExitTime", Field, 0},
+		{"Rusage.Idrss", Field, 0},
+		{"Rusage.Inblock", Field, 0},
+		{"Rusage.Isrss", Field, 0},
+		{"Rusage.Ixrss", Field, 0},
+		{"Rusage.KernelTime", Field, 0},
+		{"Rusage.Majflt", Field, 0},
+		{"Rusage.Maxrss", Field, 0},
+		{"Rusage.Minflt", Field, 0},
+		{"Rusage.Msgrcv", Field, 0},
+		{"Rusage.Msgsnd", Field, 0},
+		{"Rusage.Nivcsw", Field, 0},
+		{"Rusage.Nsignals", Field, 0},
+		{"Rusage.Nswap", Field, 0},
+		{"Rusage.Nvcsw", Field, 0},
+		{"Rusage.Oublock", Field, 0},
+		{"Rusage.Stime", Field, 0},
+		{"Rusage.UserTime", Field, 0},
+		{"Rusage.Utime", Field, 0},
+		{"SCM_BINTIME", Const, 0},
+		{"SCM_CREDENTIALS", Const, 0},
+		{"SCM_CREDS", Const, 0},
+		{"SCM_RIGHTS", Const, 0},
+		{"SCM_TIMESTAMP", Const, 0},
+		{"SCM_TIMESTAMPING", Const, 0},
+		{"SCM_TIMESTAMPNS", Const, 0},
+		{"SCM_TIMESTAMP_MONOTONIC", Const, 0},
+		{"SHUT_RD", Const, 0},
+		{"SHUT_RDWR", Const, 0},
+		{"SHUT_WR", Const, 0},
+		{"SID", Type, 0},
+		{"SIDAndAttributes", Type, 0},
+		{"SIDAndAttributes.Attributes", Field, 0},
+		{"SIDAndAttributes.Sid", Field, 0},
+		{"SIGABRT", Const, 0},
+		{"SIGALRM", Const, 0},
+		{"SIGBUS", Const, 0},
+		{"SIGCHLD", Const, 0},
+		{"SIGCLD", Const, 0},
+		{"SIGCONT", Const, 0},
+		{"SIGEMT", Const, 0},
+		{"SIGFPE", Const, 0},
+		{"SIGHUP", Const, 0},
+		{"SIGILL", Const, 0},
+		{"SIGINFO", Const, 0},
+		{"SIGINT", Const, 0},
+		{"SIGIO", Const, 0},
+		{"SIGIOT", Const, 0},
+		{"SIGKILL", Const, 0},
+		{"SIGLIBRT", Const, 1},
+		{"SIGLWP", Const, 0},
+		{"SIGPIPE", Const, 0},
+		{"SIGPOLL", Const, 0},
+		{"SIGPROF", Const, 0},
+		{"SIGPWR", Const, 0},
+		{"SIGQUIT", Const, 0},
+		{"SIGSEGV", Const, 0},
+		{"SIGSTKFLT", Const, 0},
+		{"SIGSTOP", Const, 0},
+		{"SIGSYS", Const, 0},
+		{"SIGTERM", Const, 0},
+		{"SIGTHR", Const, 0},
+		{"SIGTRAP", Const, 0},
+		{"SIGTSTP", Const, 0},
+		{"SIGTTIN", Const, 0},
+		{"SIGTTOU", Const, 0},
+		{"SIGUNUSED", Const, 0},
+		{"SIGURG", Const, 0},
+		{"SIGUSR1", Const, 0},
+		{"SIGUSR2", Const, 0},
+		{"SIGVTALRM", Const, 0},
+		{"SIGWINCH", Const, 0},
+		{"SIGXCPU", Const, 0},
+		{"SIGXFSZ", Const, 0},
+		{"SIOCADDDLCI", Const, 0},
+		{"SIOCADDMULTI", Const, 0},
+		{"SIOCADDRT", Const, 0},
+		{"SIOCAIFADDR", Const, 0},
+		{"SIOCAIFGROUP", Const, 0},
+		{"SIOCALIFADDR", Const, 0},
+		{"SIOCARPIPLL", Const, 0},
+		{"SIOCATMARK", Const, 0},
+		{"SIOCAUTOADDR", Const, 0},
+		{"SIOCAUTONETMASK", Const, 0},
+		{"SIOCBRDGADD", Const, 1},
+		{"SIOCBRDGADDS", Const, 1},
+		{"SIOCBRDGARL", Const, 1},
+		{"SIOCBRDGDADDR", Const, 1},
+		{"SIOCBRDGDEL", Const, 1},
+		{"SIOCBRDGDELS", Const, 1},
+		{"SIOCBRDGFLUSH", Const, 1},
+		{"SIOCBRDGFRL", Const, 1},
+		{"SIOCBRDGGCACHE", Const, 1},
+		{"SIOCBRDGGFD", Const, 1},
+		{"SIOCBRDGGHT", Const, 1},
+		{"SIOCBRDGGIFFLGS", Const, 1},
+		{"SIOCBRDGGMA", Const, 1},
+		{"SIOCBRDGGPARAM", Const, 1},
+		{"SIOCBRDGGPRI", Const, 1},
+		{"SIOCBRDGGRL", Const, 1},
+		{"SIOCBRDGGSIFS", Const, 1},
+		{"SIOCBRDGGTO", Const, 1},
+		{"SIOCBRDGIFS", Const, 1},
+		{"SIOCBRDGRTS", Const, 1},
+		{"SIOCBRDGSADDR", Const, 1},
+		{"SIOCBRDGSCACHE", Const, 1},
+		{"SIOCBRDGSFD", Const, 1},
+		{"SIOCBRDGSHT", Const, 1},
+		{"SIOCBRDGSIFCOST", Const, 1},
+		{"SIOCBRDGSIFFLGS", Const, 1},
+		{"SIOCBRDGSIFPRIO", Const, 1},
+		{"SIOCBRDGSMA", Const, 1},
+		{"SIOCBRDGSPRI", Const, 1},
+		{"SIOCBRDGSPROTO", Const, 1},
+		{"SIOCBRDGSTO", Const, 1},
+		{"SIOCBRDGSTXHC", Const, 1},
+		{"SIOCDARP", Const, 0},
+		{"SIOCDELDLCI", Const, 0},
+		{"SIOCDELMULTI", Const, 0},
+		{"SIOCDELRT", Const, 0},
+		{"SIOCDEVPRIVATE", Const, 0},
+		{"SIOCDIFADDR", Const, 0},
+		{"SIOCDIFGROUP", Const, 0},
+		{"SIOCDIFPHYADDR", Const, 0},
+		{"SIOCDLIFADDR", Const, 0},
+		{"SIOCDRARP", Const, 0},
+		{"SIOCGARP", Const, 0},
+		{"SIOCGDRVSPEC", Const, 0},
+		{"SIOCGETKALIVE", Const, 1},
+		{"SIOCGETLABEL", Const, 1},
+		{"SIOCGETPFLOW", Const, 1},
+		{"SIOCGETPFSYNC", Const, 1},
+		{"SIOCGETSGCNT", Const, 0},
+		{"SIOCGETVIFCNT", Const, 0},
+		{"SIOCGETVLAN", Const, 0},
+		{"SIOCGHIWAT", Const, 0},
+		{"SIOCGIFADDR", Const, 0},
+		{"SIOCGIFADDRPREF", Const, 1},
+		{"SIOCGIFALIAS", Const, 1},
+		{"SIOCGIFALTMTU", Const, 0},
+		{"SIOCGIFASYNCMAP", Const, 0},
+		{"SIOCGIFBOND", Const, 0},
+		{"SIOCGIFBR", Const, 0},
+		{"SIOCGIFBRDADDR", Const, 0},
+		{"SIOCGIFCAP", Const, 0},
+		{"SIOCGIFCONF", Const, 0},
+		{"SIOCGIFCOUNT", Const, 0},
+		{"SIOCGIFDATA", Const, 1},
+		{"SIOCGIFDESCR", Const, 0},
+		{"SIOCGIFDEVMTU", Const, 0},
+		{"SIOCGIFDLT", Const, 1},
+		{"SIOCGIFDSTADDR", Const, 0},
+		{"SIOCGIFENCAP", Const, 0},
+		{"SIOCGIFFIB", Const, 1},
+		{"SIOCGIFFLAGS", Const, 0},
+		{"SIOCGIFGATTR", Const, 1},
+		{"SIOCGIFGENERIC", Const, 0},
+		{"SIOCGIFGMEMB", Const, 0},
+		{"SIOCGIFGROUP", Const, 0},
+		{"SIOCGIFHARDMTU", Const, 3},
+		{"SIOCGIFHWADDR", Const, 0},
+		{"SIOCGIFINDEX", Const, 0},
+		{"SIOCGIFKPI", Const, 0},
+		{"SIOCGIFMAC", Const, 0},
+		{"SIOCGIFMAP", Const, 0},
+		{"SIOCGIFMEDIA", Const, 0},
+		{"SIOCGIFMEM", Const, 0},
+		{"SIOCGIFMETRIC", Const, 0},
+		{"SIOCGIFMTU", Const, 0},
+		{"SIOCGIFNAME", Const, 0},
+		{"SIOCGIFNETMASK", Const, 0},
+		{"SIOCGIFPDSTADDR", Const, 0},
+		{"SIOCGIFPFLAGS", Const, 0},
+		{"SIOCGIFPHYS", Const, 0},
+		{"SIOCGIFPRIORITY", Const, 1},
+		{"SIOCGIFPSRCADDR", Const, 0},
+		{"SIOCGIFRDOMAIN", Const, 1},
+		{"SIOCGIFRTLABEL", Const, 1},
+		{"SIOCGIFSLAVE", Const, 0},
+		{"SIOCGIFSTATUS", Const, 0},
+		{"SIOCGIFTIMESLOT", Const, 1},
+		{"SIOCGIFTXQLEN", Const, 0},
+		{"SIOCGIFVLAN", Const, 0},
+		{"SIOCGIFWAKEFLAGS", Const, 0},
+		{"SIOCGIFXFLAGS", Const, 1},
+		{"SIOCGLIFADDR", Const, 0},
+		{"SIOCGLIFPHYADDR", Const, 0},
+		{"SIOCGLIFPHYRTABLE", Const, 1},
+		{"SIOCGLIFPHYTTL", Const, 3},
+		{"SIOCGLINKSTR", Const, 1},
+		{"SIOCGLOWAT", Const, 0},
+		{"SIOCGPGRP", Const, 0},
+		{"SIOCGPRIVATE_0", Const, 0},
+		{"SIOCGPRIVATE_1", Const, 0},
+		{"SIOCGRARP", Const, 0},
+		{"SIOCGSPPPPARAMS", Const, 3},
+		{"SIOCGSTAMP", Const, 0},
+		{"SIOCGSTAMPNS", Const, 0},
+		{"SIOCGVH", Const, 1},
+		{"SIOCGVNETID", Const, 3},
+		{"SIOCIFCREATE", Const, 0},
+		{"SIOCIFCREATE2", Const, 0},
+		{"SIOCIFDESTROY", Const, 0},
+		{"SIOCIFGCLONERS", Const, 0},
+		{"SIOCINITIFADDR", Const, 1},
+		{"SIOCPROTOPRIVATE", Const, 0},
+		{"SIOCRSLVMULTI", Const, 0},
+		{"SIOCRTMSG", Const, 0},
+		{"SIOCSARP", Const, 0},
+		{"SIOCSDRVSPEC", Const, 0},
+		{"SIOCSETKALIVE", Const, 1},
+		{"SIOCSETLABEL", Const, 1},
+		{"SIOCSETPFLOW", Const, 1},
+		{"SIOCSETPFSYNC", Const, 1},
+		{"SIOCSETVLAN", Const, 0},
+		{"SIOCSHIWAT", Const, 0},
+		{"SIOCSIFADDR", Const, 0},
+		{"SIOCSIFADDRPREF", Const, 1},
+		{"SIOCSIFALTMTU", Const, 0},
+		{"SIOCSIFASYNCMAP", Const, 0},
+		{"SIOCSIFBOND", Const, 0},
+		{"SIOCSIFBR", Const, 0},
+		{"SIOCSIFBRDADDR", Const, 0},
+		{"SIOCSIFCAP", Const, 0},
+		{"SIOCSIFDESCR", Const, 0},
+		{"SIOCSIFDSTADDR", Const, 0},
+		{"SIOCSIFENCAP", Const, 0},
+		{"SIOCSIFFIB", Const, 1},
+		{"SIOCSIFFLAGS", Const, 0},
+		{"SIOCSIFGATTR", Const, 1},
+		{"SIOCSIFGENERIC", Const, 0},
+		{"SIOCSIFHWADDR", Const, 0},
+		{"SIOCSIFHWBROADCAST", Const, 0},
+		{"SIOCSIFKPI", Const, 0},
+		{"SIOCSIFLINK", Const, 0},
+		{"SIOCSIFLLADDR", Const, 0},
+		{"SIOCSIFMAC", Const, 0},
+		{"SIOCSIFMAP", Const, 0},
+		{"SIOCSIFMEDIA", Const, 0},
+		{"SIOCSIFMEM", Const, 0},
+		{"SIOCSIFMETRIC", Const, 0},
+		{"SIOCSIFMTU", Const, 0},
+		{"SIOCSIFNAME", Const, 0},
+		{"SIOCSIFNETMASK", Const, 0},
+		{"SIOCSIFPFLAGS", Const, 0},
+		{"SIOCSIFPHYADDR", Const, 0},
+		{"SIOCSIFPHYS", Const, 0},
+		{"SIOCSIFPRIORITY", Const, 1},
+		{"SIOCSIFRDOMAIN", Const, 1},
+		{"SIOCSIFRTLABEL", Const, 1},
+		{"SIOCSIFRVNET", Const, 0},
+		{"SIOCSIFSLAVE", Const, 0},
+		{"SIOCSIFTIMESLOT", Const, 1},
+		{"SIOCSIFTXQLEN", Const, 0},
+		{"SIOCSIFVLAN", Const, 0},
+		{"SIOCSIFVNET", Const, 0},
+		{"SIOCSIFXFLAGS", Const, 1},
+		{"SIOCSLIFPHYADDR", Const, 0},
+		{"SIOCSLIFPHYRTABLE", Const, 1},
+		{"SIOCSLIFPHYTTL", Const, 3},
+		{"SIOCSLINKSTR", Const, 1},
+		{"SIOCSLOWAT", Const, 0},
+		{"SIOCSPGRP", Const, 0},
+		{"SIOCSRARP", Const, 0},
+		{"SIOCSSPPPPARAMS", Const, 3},
+		{"SIOCSVH", Const, 1},
+		{"SIOCSVNETID", Const, 3},
+		{"SIOCZIFDATA", Const, 1},
+		{"SIO_GET_EXTENSION_FUNCTION_POINTER", Const, 1},
+		{"SIO_GET_INTERFACE_LIST", Const, 0},
+		{"SIO_KEEPALIVE_VALS", Const, 3},
+		{"SIO_UDP_CONNRESET", Const, 4},
+		{"SOCK_CLOEXEC", Const, 0},
+		{"SOCK_DCCP", Const, 0},
+		{"SOCK_DGRAM", Const, 0},
+		{"SOCK_FLAGS_MASK", Const, 1},
+		{"SOCK_MAXADDRLEN", Const, 0},
+		{"SOCK_NONBLOCK", Const, 0},
+		{"SOCK_NOSIGPIPE", Const, 1},
+		{"SOCK_PACKET", Const, 0},
+		{"SOCK_RAW", Const, 0},
+		{"SOCK_RDM", Const, 0},
+		{"SOCK_SEQPACKET", Const, 0},
+		{"SOCK_STREAM", Const, 0},
+		{"SOL_AAL", Const, 0},
+		{"SOL_ATM", Const, 0},
+		{"SOL_DECNET", Const, 0},
+		{"SOL_ICMPV6", Const, 0},
+		{"SOL_IP", Const, 0},
+		{"SOL_IPV6", Const, 0},
+		{"SOL_IRDA", Const, 0},
+		{"SOL_PACKET", Const, 0},
+		{"SOL_RAW", Const, 0},
+		{"SOL_SOCKET", Const, 0},
+		{"SOL_TCP", Const, 0},
+		{"SOL_X25", Const, 0},
+		{"SOMAXCONN", Const, 0},
+		{"SO_ACCEPTCONN", Const, 0},
+		{"SO_ACCEPTFILTER", Const, 0},
+		{"SO_ATTACH_FILTER", Const, 0},
+		{"SO_BINDANY", Const, 1},
+		{"SO_BINDTODEVICE", Const, 0},
+		{"SO_BINTIME", Const, 0},
+		{"SO_BROADCAST", Const, 0},
+		{"SO_BSDCOMPAT", Const, 0},
+		{"SO_DEBUG", Const, 0},
+		{"SO_DETACH_FILTER", Const, 0},
+		{"SO_DOMAIN", Const, 0},
+		{"SO_DONTROUTE", Const, 0},
+		{"SO_DONTTRUNC", Const, 0},
+		{"SO_ERROR", Const, 0},
+		{"SO_KEEPALIVE", Const, 0},
+		{"SO_LABEL", Const, 0},
+		{"SO_LINGER", Const, 0},
+		{"SO_LINGER_SEC", Const, 0},
+		{"SO_LISTENINCQLEN", Const, 0},
+		{"SO_LISTENQLEN", Const, 0},
+		{"SO_LISTENQLIMIT", Const, 0},
+		{"SO_MARK", Const, 0},
+		{"SO_NETPROC", Const, 1},
+		{"SO_NKE", Const, 0},
+		{"SO_NOADDRERR", Const, 0},
+		{"SO_NOHEADER", Const, 1},
+		{"SO_NOSIGPIPE", Const, 0},
+		{"SO_NOTIFYCONFLICT", Const, 0},
+		{"SO_NO_CHECK", Const, 0},
+		{"SO_NO_DDP", Const, 0},
+		{"SO_NO_OFFLOAD", Const, 0},
+		{"SO_NP_EXTENSIONS", Const, 0},
+		{"SO_NREAD", Const, 0},
+		{"SO_NUMRCVPKT", Const, 16},
+		{"SO_NWRITE", Const, 0},
+		{"SO_OOBINLINE", Const, 0},
+		{"SO_OVERFLOWED", Const, 1},
+		{"SO_PASSCRED", Const, 0},
+		{"SO_PASSSEC", Const, 0},
+		{"SO_PEERCRED", Const, 0},
+		{"SO_PEERLABEL", Const, 0},
+		{"SO_PEERNAME", Const, 0},
+		{"SO_PEERSEC", Const, 0},
+		{"SO_PRIORITY", Const, 0},
+		{"SO_PROTOCOL", Const, 0},
+		{"SO_PROTOTYPE", Const, 1},
+		{"SO_RANDOMPORT", Const, 0},
+		{"SO_RCVBUF", Const, 0},
+		{"SO_RCVBUFFORCE", Const, 0},
+		{"SO_RCVLOWAT", Const, 0},
+		{"SO_RCVTIMEO", Const, 0},
+		{"SO_RESTRICTIONS", Const, 0},
+		{"SO_RESTRICT_DENYIN", Const, 0},
+		{"SO_RESTRICT_DENYOUT", Const, 0},
+		{"SO_RESTRICT_DENYSET", Const, 0},
+		{"SO_REUSEADDR", Const, 0},
+		{"SO_REUSEPORT", Const, 0},
+		{"SO_REUSESHAREUID", Const, 0},
+		{"SO_RTABLE", Const, 1},
+		{"SO_RXQ_OVFL", Const, 0},
+		{"SO_SECURITY_AUTHENTICATION", Const, 0},
+		{"SO_SECURITY_ENCRYPTION_NETWORK", Const, 0},
+		{"SO_SECURITY_ENCRYPTION_TRANSPORT", Const, 0},
+		{"SO_SETFIB", Const, 0},
+		{"SO_SNDBUF", Const, 0},
+		{"SO_SNDBUFFORCE", Const, 0},
+		{"SO_SNDLOWAT", Const, 0},
+		{"SO_SNDTIMEO", Const, 0},
+		{"SO_SPLICE", Const, 1},
+		{"SO_TIMESTAMP", Const, 0},
+		{"SO_TIMESTAMPING", Const, 0},
+		{"SO_TIMESTAMPNS", Const, 0},
+		{"SO_TIMESTAMP_MONOTONIC", Const, 0},
+		{"SO_TYPE", Const, 0},
+		{"SO_UPCALLCLOSEWAIT", Const, 0},
+		{"SO_UPDATE_ACCEPT_CONTEXT", Const, 0},
+		{"SO_UPDATE_CONNECT_CONTEXT", Const, 1},
+		{"SO_USELOOPBACK", Const, 0},
+		{"SO_USER_COOKIE", Const, 1},
+		{"SO_VENDOR", Const, 3},
+		{"SO_WANTMORE", Const, 0},
+		{"SO_WANTOOBFLAG", Const, 0},
+		{"SSLExtraCertChainPolicyPara", Type, 0},
+		{"SSLExtraCertChainPolicyPara.AuthType", Field, 0},
+		{"SSLExtraCertChainPolicyPara.Checks", Field, 0},
+		{"SSLExtraCertChainPolicyPara.ServerName", Field, 0},
+		{"SSLExtraCertChainPolicyPara.Size", Field, 0},
+		{"STANDARD_RIGHTS_ALL", Const, 0},
+		{"STANDARD_RIGHTS_EXECUTE", Const, 0},
+		{"STANDARD_RIGHTS_READ", Const, 0},
+		{"STANDARD_RIGHTS_REQUIRED", Const, 0},
+		{"STANDARD_RIGHTS_WRITE", Const, 0},
+		{"STARTF_USESHOWWINDOW", Const, 0},
+		{"STARTF_USESTDHANDLES", Const, 0},
+		{"STD_ERROR_HANDLE", Const, 0},
+		{"STD_INPUT_HANDLE", Const, 0},
+		{"STD_OUTPUT_HANDLE", Const, 0},
+		{"SUBLANG_ENGLISH_US", Const, 0},
+		{"SW_FORCEMINIMIZE", Const, 0},
+		{"SW_HIDE", Const, 0},
+		{"SW_MAXIMIZE", Const, 0},
+		{"SW_MINIMIZE", Const, 0},
+		{"SW_NORMAL", Const, 0},
+		{"SW_RESTORE", Const, 0},
+		{"SW_SHOW", Const, 0},
+		{"SW_SHOWDEFAULT", Const, 0},
+		{"SW_SHOWMAXIMIZED", Const, 0},
+		{"SW_SHOWMINIMIZED", Const, 0},
+		{"SW_SHOWMINNOACTIVE", Const, 0},
+		{"SW_SHOWNA", Const, 0},
+		{"SW_SHOWNOACTIVATE", Const, 0},
+		{"SW_SHOWNORMAL", Const, 0},
+		{"SYMBOLIC_LINK_FLAG_DIRECTORY", Const, 4},
+		{"SYNCHRONIZE", Const, 0},
+		{"SYSCTL_VERSION", Const, 1},
+		{"SYSCTL_VERS_0", Const, 1},
+		{"SYSCTL_VERS_1", Const, 1},
+		{"SYSCTL_VERS_MASK", Const, 1},
+		{"SYS_ABORT2", Const, 0},
+		{"SYS_ACCEPT", Const, 0},
+		{"SYS_ACCEPT4", Const, 0},
+		{"SYS_ACCEPT_NOCANCEL", Const, 0},
+		{"SYS_ACCESS", Const, 0},
+		{"SYS_ACCESS_EXTENDED", Const, 0},
+		{"SYS_ACCT", Const, 0},
+		{"SYS_ADD_KEY", Const, 0},
+		{"SYS_ADD_PROFIL", Const, 0},
+		{"SYS_ADJFREQ", Const, 1},
+		{"SYS_ADJTIME", Const, 0},
+		{"SYS_ADJTIMEX", Const, 0},
+		{"SYS_AFS_SYSCALL", Const, 0},
+		{"SYS_AIO_CANCEL", Const, 0},
+		{"SYS_AIO_ERROR", Const, 0},
+		{"SYS_AIO_FSYNC", Const, 0},
+		{"SYS_AIO_MLOCK", Const, 14},
+		{"SYS_AIO_READ", Const, 0},
+		{"SYS_AIO_RETURN", Const, 0},
+		{"SYS_AIO_SUSPEND", Const, 0},
+		{"SYS_AIO_SUSPEND_NOCANCEL", Const, 0},
+		{"SYS_AIO_WAITCOMPLETE", Const, 14},
+		{"SYS_AIO_WRITE", Const, 0},
+		{"SYS_ALARM", Const, 0},
+		{"SYS_ARCH_PRCTL", Const, 0},
+		{"SYS_ARM_FADVISE64_64", Const, 0},
+		{"SYS_ARM_SYNC_FILE_RANGE", Const, 0},
+		{"SYS_ATGETMSG", Const, 0},
+		{"SYS_ATPGETREQ", Const, 0},
+		{"SYS_ATPGETRSP", Const, 0},
+		{"SYS_ATPSNDREQ", Const, 0},
+		{"SYS_ATPSNDRSP", Const, 0},
+		{"SYS_ATPUTMSG", Const, 0},
+		{"SYS_ATSOCKET", Const, 0},
+		{"SYS_AUDIT", Const, 0},
+		{"SYS_AUDITCTL", Const, 0},
+		{"SYS_AUDITON", Const, 0},
+		{"SYS_AUDIT_SESSION_JOIN", Const, 0},
+		{"SYS_AUDIT_SESSION_PORT", Const, 0},
+		{"SYS_AUDIT_SESSION_SELF", Const, 0},
+		{"SYS_BDFLUSH", Const, 0},
+		{"SYS_BIND", Const, 0},
+		{"SYS_BINDAT", Const, 3},
+		{"SYS_BREAK", Const, 0},
+		{"SYS_BRK", Const, 0},
+		{"SYS_BSDTHREAD_CREATE", Const, 0},
+		{"SYS_BSDTHREAD_REGISTER", Const, 0},
+		{"SYS_BSDTHREAD_TERMINATE", Const, 0},
+		{"SYS_CAPGET", Const, 0},
+		{"SYS_CAPSET", Const, 0},
+		{"SYS_CAP_ENTER", Const, 0},
+		{"SYS_CAP_FCNTLS_GET", Const, 1},
+		{"SYS_CAP_FCNTLS_LIMIT", Const, 1},
+		{"SYS_CAP_GETMODE", Const, 0},
+		{"SYS_CAP_GETRIGHTS", Const, 0},
+		{"SYS_CAP_IOCTLS_GET", Const, 1},
+		{"SYS_CAP_IOCTLS_LIMIT", Const, 1},
+		{"SYS_CAP_NEW", Const, 0},
+		{"SYS_CAP_RIGHTS_GET", Const, 1},
+		{"SYS_CAP_RIGHTS_LIMIT", Const, 1},
+		{"SYS_CHDIR", Const, 0},
+		{"SYS_CHFLAGS", Const, 0},
+		{"SYS_CHFLAGSAT", Const, 3},
+		{"SYS_CHMOD", Const, 0},
+		{"SYS_CHMOD_EXTENDED", Const, 0},
+		{"SYS_CHOWN", Const, 0},
+		{"SYS_CHOWN32", Const, 0},
+		{"SYS_CHROOT", Const, 0},
+		{"SYS_CHUD", Const, 0},
+		{"SYS_CLOCK_ADJTIME", Const, 0},
+		{"SYS_CLOCK_GETCPUCLOCKID2", Const, 1},
+		{"SYS_CLOCK_GETRES", Const, 0},
+		{"SYS_CLOCK_GETTIME", Const, 0},
+		{"SYS_CLOCK_NANOSLEEP", Const, 0},
+		{"SYS_CLOCK_SETTIME", Const, 0},
+		{"SYS_CLONE", Const, 0},
+		{"SYS_CLOSE", Const, 0},
+		{"SYS_CLOSEFROM", Const, 0},
+		{"SYS_CLOSE_NOCANCEL", Const, 0},
+		{"SYS_CONNECT", Const, 0},
+		{"SYS_CONNECTAT", Const, 3},
+		{"SYS_CONNECT_NOCANCEL", Const, 0},
+		{"SYS_COPYFILE", Const, 0},
+		{"SYS_CPUSET", Const, 0},
+		{"SYS_CPUSET_GETAFFINITY", Const, 0},
+		{"SYS_CPUSET_GETID", Const, 0},
+		{"SYS_CPUSET_SETAFFINITY", Const, 0},
+		{"SYS_CPUSET_SETID", Const, 0},
+		{"SYS_CREAT", Const, 0},
+		{"SYS_CREATE_MODULE", Const, 0},
+		{"SYS_CSOPS", Const, 0},
+		{"SYS_CSOPS_AUDITTOKEN", Const, 16},
+		{"SYS_DELETE", Const, 0},
+		{"SYS_DELETE_MODULE", Const, 0},
+		{"SYS_DUP", Const, 0},
+		{"SYS_DUP2", Const, 0},
+		{"SYS_DUP3", Const, 0},
+		{"SYS_EACCESS", Const, 0},
+		{"SYS_EPOLL_CREATE", Const, 0},
+		{"SYS_EPOLL_CREATE1", Const, 0},
+		{"SYS_EPOLL_CTL", Const, 0},
+		{"SYS_EPOLL_CTL_OLD", Const, 0},
+		{"SYS_EPOLL_PWAIT", Const, 0},
+		{"SYS_EPOLL_WAIT", Const, 0},
+		{"SYS_EPOLL_WAIT_OLD", Const, 0},
+		{"SYS_EVENTFD", Const, 0},
+		{"SYS_EVENTFD2", Const, 0},
+		{"SYS_EXCHANGEDATA", Const, 0},
+		{"SYS_EXECVE", Const, 0},
+		{"SYS_EXIT", Const, 0},
+		{"SYS_EXIT_GROUP", Const, 0},
+		{"SYS_EXTATTRCTL", Const, 0},
+		{"SYS_EXTATTR_DELETE_FD", Const, 0},
+		{"SYS_EXTATTR_DELETE_FILE", Const, 0},
+		{"SYS_EXTATTR_DELETE_LINK", Const, 0},
+		{"SYS_EXTATTR_GET_FD", Const, 0},
+		{"SYS_EXTATTR_GET_FILE", Const, 0},
+		{"SYS_EXTATTR_GET_LINK", Const, 0},
+		{"SYS_EXTATTR_LIST_FD", Const, 0},
+		{"SYS_EXTATTR_LIST_FILE", Const, 0},
+		{"SYS_EXTATTR_LIST_LINK", Const, 0},
+		{"SYS_EXTATTR_SET_FD", Const, 0},
+		{"SYS_EXTATTR_SET_FILE", Const, 0},
+		{"SYS_EXTATTR_SET_LINK", Const, 0},
+		{"SYS_FACCESSAT", Const, 0},
+		{"SYS_FADVISE64", Const, 0},
+		{"SYS_FADVISE64_64", Const, 0},
+		{"SYS_FALLOCATE", Const, 0},
+		{"SYS_FANOTIFY_INIT", Const, 0},
+		{"SYS_FANOTIFY_MARK", Const, 0},
+		{"SYS_FCHDIR", Const, 0},
+		{"SYS_FCHFLAGS", Const, 0},
+		{"SYS_FCHMOD", Const, 0},
+		{"SYS_FCHMODAT", Const, 0},
+		{"SYS_FCHMOD_EXTENDED", Const, 0},
+		{"SYS_FCHOWN", Const, 0},
+		{"SYS_FCHOWN32", Const, 0},
+		{"SYS_FCHOWNAT", Const, 0},
+		{"SYS_FCHROOT", Const, 1},
+		{"SYS_FCNTL", Const, 0},
+		{"SYS_FCNTL64", Const, 0},
+		{"SYS_FCNTL_NOCANCEL", Const, 0},
+		{"SYS_FDATASYNC", Const, 0},
+		{"SYS_FEXECVE", Const, 0},
+		{"SYS_FFCLOCK_GETCOUNTER", Const, 0},
+		{"SYS_FFCLOCK_GETESTIMATE", Const, 0},
+		{"SYS_FFCLOCK_SETESTIMATE", Const, 0},
+		{"SYS_FFSCTL", Const, 0},
+		{"SYS_FGETATTRLIST", Const, 0},
+		{"SYS_FGETXATTR", Const, 0},
+		{"SYS_FHOPEN", Const, 0},
+		{"SYS_FHSTAT", Const, 0},
+		{"SYS_FHSTATFS", Const, 0},
+		{"SYS_FILEPORT_MAKEFD", Const, 0},
+		{"SYS_FILEPORT_MAKEPORT", Const, 0},
+		{"SYS_FKTRACE", Const, 1},
+		{"SYS_FLISTXATTR", Const, 0},
+		{"SYS_FLOCK", Const, 0},
+		{"SYS_FORK", Const, 0},
+		{"SYS_FPATHCONF", Const, 0},
+		{"SYS_FREEBSD6_FTRUNCATE", Const, 0},
+		{"SYS_FREEBSD6_LSEEK", Const, 0},
+		{"SYS_FREEBSD6_MMAP", Const, 0},
+		{"SYS_FREEBSD6_PREAD", Const, 0},
+		{"SYS_FREEBSD6_PWRITE", Const, 0},
+		{"SYS_FREEBSD6_TRUNCATE", Const, 0},
+		{"SYS_FREMOVEXATTR", Const, 0},
+		{"SYS_FSCTL", Const, 0},
+		{"SYS_FSETATTRLIST", Const, 0},
+		{"SYS_FSETXATTR", Const, 0},
+		{"SYS_FSGETPATH", Const, 0},
+		{"SYS_FSTAT", Const, 0},
+		{"SYS_FSTAT64", Const, 0},
+		{"SYS_FSTAT64_EXTENDED", Const, 0},
+		{"SYS_FSTATAT", Const, 0},
+		{"SYS_FSTATAT64", Const, 0},
+		{"SYS_FSTATFS", Const, 0},
+		{"SYS_FSTATFS64", Const, 0},
+		{"SYS_FSTATV", Const, 0},
+		{"SYS_FSTATVFS1", Const, 1},
+		{"SYS_FSTAT_EXTENDED", Const, 0},
+		{"SYS_FSYNC", Const, 0},
+		{"SYS_FSYNC_NOCANCEL", Const, 0},
+		{"SYS_FSYNC_RANGE", Const, 1},
+		{"SYS_FTIME", Const, 0},
+		{"SYS_FTRUNCATE", Const, 0},
+		{"SYS_FTRUNCATE64", Const, 0},
+		{"SYS_FUTEX", Const, 0},
+		{"SYS_FUTIMENS", Const, 1},
+		{"SYS_FUTIMES", Const, 0},
+		{"SYS_FUTIMESAT", Const, 0},
+		{"SYS_GETATTRLIST", Const, 0},
+		{"SYS_GETAUDIT", Const, 0},
+		{"SYS_GETAUDIT_ADDR", Const, 0},
+		{"SYS_GETAUID", Const, 0},
+		{"SYS_GETCONTEXT", Const, 0},
+		{"SYS_GETCPU", Const, 0},
+		{"SYS_GETCWD", Const, 0},
+		{"SYS_GETDENTS", Const, 0},
+		{"SYS_GETDENTS64", Const, 0},
+		{"SYS_GETDIRENTRIES", Const, 0},
+		{"SYS_GETDIRENTRIES64", Const, 0},
+		{"SYS_GETDIRENTRIESATTR", Const, 0},
+		{"SYS_GETDTABLECOUNT", Const, 1},
+		{"SYS_GETDTABLESIZE", Const, 0},
+		{"SYS_GETEGID", Const, 0},
+		{"SYS_GETEGID32", Const, 0},
+		{"SYS_GETEUID", Const, 0},
+		{"SYS_GETEUID32", Const, 0},
+		{"SYS_GETFH", Const, 0},
+		{"SYS_GETFSSTAT", Const, 0},
+		{"SYS_GETFSSTAT64", Const, 0},
+		{"SYS_GETGID", Const, 0},
+		{"SYS_GETGID32", Const, 0},
+		{"SYS_GETGROUPS", Const, 0},
+		{"SYS_GETGROUPS32", Const, 0},
+		{"SYS_GETHOSTUUID", Const, 0},
+		{"SYS_GETITIMER", Const, 0},
+		{"SYS_GETLCID", Const, 0},
+		{"SYS_GETLOGIN", Const, 0},
+		{"SYS_GETLOGINCLASS", Const, 0},
+		{"SYS_GETPEERNAME", Const, 0},
+		{"SYS_GETPGID", Const, 0},
+		{"SYS_GETPGRP", Const, 0},
+		{"SYS_GETPID", Const, 0},
+		{"SYS_GETPMSG", Const, 0},
+		{"SYS_GETPPID", Const, 0},
+		{"SYS_GETPRIORITY", Const, 0},
+		{"SYS_GETRESGID", Const, 0},
+		{"SYS_GETRESGID32", Const, 0},
+		{"SYS_GETRESUID", Const, 0},
+		{"SYS_GETRESUID32", Const, 0},
+		{"SYS_GETRLIMIT", Const, 0},
+		{"SYS_GETRTABLE", Const, 1},
+		{"SYS_GETRUSAGE", Const, 0},
+		{"SYS_GETSGROUPS", Const, 0},
+		{"SYS_GETSID", Const, 0},
+		{"SYS_GETSOCKNAME", Const, 0},
+		{"SYS_GETSOCKOPT", Const, 0},
+		{"SYS_GETTHRID", Const, 1},
+		{"SYS_GETTID", Const, 0},
+		{"SYS_GETTIMEOFDAY", Const, 0},
+		{"SYS_GETUID", Const, 0},
+		{"SYS_GETUID32", Const, 0},
+		{"SYS_GETVFSSTAT", Const, 1},
+		{"SYS_GETWGROUPS", Const, 0},
+		{"SYS_GETXATTR", Const, 0},
+		{"SYS_GET_KERNEL_SYMS", Const, 0},
+		{"SYS_GET_MEMPOLICY", Const, 0},
+		{"SYS_GET_ROBUST_LIST", Const, 0},
+		{"SYS_GET_THREAD_AREA", Const, 0},
+		{"SYS_GSSD_SYSCALL", Const, 14},
+		{"SYS_GTTY", Const, 0},
+		{"SYS_IDENTITYSVC", Const, 0},
+		{"SYS_IDLE", Const, 0},
+		{"SYS_INITGROUPS", Const, 0},
+		{"SYS_INIT_MODULE", Const, 0},
+		{"SYS_INOTIFY_ADD_WATCH", Const, 0},
+		{"SYS_INOTIFY_INIT", Const, 0},
+		{"SYS_INOTIFY_INIT1", Const, 0},
+		{"SYS_INOTIFY_RM_WATCH", Const, 0},
+		{"SYS_IOCTL", Const, 0},
+		{"SYS_IOPERM", Const, 0},
+		{"SYS_IOPL", Const, 0},
+		{"SYS_IOPOLICYSYS", Const, 0},
+		{"SYS_IOPRIO_GET", Const, 0},
+		{"SYS_IOPRIO_SET", Const, 0},
+		{"SYS_IO_CANCEL", Const, 0},
+		{"SYS_IO_DESTROY", Const, 0},
+		{"SYS_IO_GETEVENTS", Const, 0},
+		{"SYS_IO_SETUP", Const, 0},
+		{"SYS_IO_SUBMIT", Const, 0},
+		{"SYS_IPC", Const, 0},
+		{"SYS_ISSETUGID", Const, 0},
+		{"SYS_JAIL", Const, 0},
+		{"SYS_JAIL_ATTACH", Const, 0},
+		{"SYS_JAIL_GET", Const, 0},
+		{"SYS_JAIL_REMOVE", Const, 0},
+		{"SYS_JAIL_SET", Const, 0},
+		{"SYS_KAS_INFO", Const, 16},
+		{"SYS_KDEBUG_TRACE", Const, 0},
+		{"SYS_KENV", Const, 0},
+		{"SYS_KEVENT", Const, 0},
+		{"SYS_KEVENT64", Const, 0},
+		{"SYS_KEXEC_LOAD", Const, 0},
+		{"SYS_KEYCTL", Const, 0},
+		{"SYS_KILL", Const, 0},
+		{"SYS_KLDFIND", Const, 0},
+		{"SYS_KLDFIRSTMOD", Const, 0},
+		{"SYS_KLDLOAD", Const, 0},
+		{"SYS_KLDNEXT", Const, 0},
+		{"SYS_KLDSTAT", Const, 0},
+		{"SYS_KLDSYM", Const, 0},
+		{"SYS_KLDUNLOAD", Const, 0},
+		{"SYS_KLDUNLOADF", Const, 0},
+		{"SYS_KMQ_NOTIFY", Const, 14},
+		{"SYS_KMQ_OPEN", Const, 14},
+		{"SYS_KMQ_SETATTR", Const, 14},
+		{"SYS_KMQ_TIMEDRECEIVE", Const, 14},
+		{"SYS_KMQ_TIMEDSEND", Const, 14},
+		{"SYS_KMQ_UNLINK", Const, 14},
+		{"SYS_KQUEUE", Const, 0},
+		{"SYS_KQUEUE1", Const, 1},
+		{"SYS_KSEM_CLOSE", Const, 14},
+		{"SYS_KSEM_DESTROY", Const, 14},
+		{"SYS_KSEM_GETVALUE", Const, 14},
+		{"SYS_KSEM_INIT", Const, 14},
+		{"SYS_KSEM_OPEN", Const, 14},
+		{"SYS_KSEM_POST", Const, 14},
+		{"SYS_KSEM_TIMEDWAIT", Const, 14},
+		{"SYS_KSEM_TRYWAIT", Const, 14},
+		{"SYS_KSEM_UNLINK", Const, 14},
+		{"SYS_KSEM_WAIT", Const, 14},
+		{"SYS_KTIMER_CREATE", Const, 0},
+		{"SYS_KTIMER_DELETE", Const, 0},
+		{"SYS_KTIMER_GETOVERRUN", Const, 0},
+		{"SYS_KTIMER_GETTIME", Const, 0},
+		{"SYS_KTIMER_SETTIME", Const, 0},
+		{"SYS_KTRACE", Const, 0},
+		{"SYS_LCHFLAGS", Const, 0},
+		{"SYS_LCHMOD", Const, 0},
+		{"SYS_LCHOWN", Const, 0},
+		{"SYS_LCHOWN32", Const, 0},
+		{"SYS_LEDGER", Const, 16},
+		{"SYS_LGETFH", Const, 0},
+		{"SYS_LGETXATTR", Const, 0},
+		{"SYS_LINK", Const, 0},
+		{"SYS_LINKAT", Const, 0},
+		{"SYS_LIO_LISTIO", Const, 0},
+		{"SYS_LISTEN", Const, 0},
+		{"SYS_LISTXATTR", Const, 0},
+		{"SYS_LLISTXATTR", Const, 0},
+		{"SYS_LOCK", Const, 0},
+		{"SYS_LOOKUP_DCOOKIE", Const, 0},
+		{"SYS_LPATHCONF", Const, 0},
+		{"SYS_LREMOVEXATTR", Const, 0},
+		{"SYS_LSEEK", Const, 0},
+		{"SYS_LSETXATTR", Const, 0},
+		{"SYS_LSTAT", Const, 0},
+		{"SYS_LSTAT64", Const, 0},
+		{"SYS_LSTAT64_EXTENDED", Const, 0},
+		{"SYS_LSTATV", Const, 0},
+		{"SYS_LSTAT_EXTENDED", Const, 0},
+		{"SYS_LUTIMES", Const, 0},
+		{"SYS_MAC_SYSCALL", Const, 0},
+		{"SYS_MADVISE", Const, 0},
+		{"SYS_MADVISE1", Const, 0},
+		{"SYS_MAXSYSCALL", Const, 0},
+		{"SYS_MBIND", Const, 0},
+		{"SYS_MIGRATE_PAGES", Const, 0},
+		{"SYS_MINCORE", Const, 0},
+		{"SYS_MINHERIT", Const, 0},
+		{"SYS_MKCOMPLEX", Const, 0},
+		{"SYS_MKDIR", Const, 0},
+		{"SYS_MKDIRAT", Const, 0},
+		{"SYS_MKDIR_EXTENDED", Const, 0},
+		{"SYS_MKFIFO", Const, 0},
+		{"SYS_MKFIFOAT", Const, 0},
+		{"SYS_MKFIFO_EXTENDED", Const, 0},
+		{"SYS_MKNOD", Const, 0},
+		{"SYS_MKNODAT", Const, 0},
+		{"SYS_MLOCK", Const, 0},
+		{"SYS_MLOCKALL", Const, 0},
+		{"SYS_MMAP", Const, 0},
+		{"SYS_MMAP2", Const, 0},
+		{"SYS_MODCTL", Const, 1},
+		{"SYS_MODFIND", Const, 0},
+		{"SYS_MODFNEXT", Const, 0},
+		{"SYS_MODIFY_LDT", Const, 0},
+		{"SYS_MODNEXT", Const, 0},
+		{"SYS_MODSTAT", Const, 0},
+		{"SYS_MODWATCH", Const, 0},
+		{"SYS_MOUNT", Const, 0},
+		{"SYS_MOVE_PAGES", Const, 0},
+		{"SYS_MPROTECT", Const, 0},
+		{"SYS_MPX", Const, 0},
+		{"SYS_MQUERY", Const, 1},
+		{"SYS_MQ_GETSETATTR", Const, 0},
+		{"SYS_MQ_NOTIFY", Const, 0},
+		{"SYS_MQ_OPEN", Const, 0},
+		{"SYS_MQ_TIMEDRECEIVE", Const, 0},
+		{"SYS_MQ_TIMEDSEND", Const, 0},
+		{"SYS_MQ_UNLINK", Const, 0},
+		{"SYS_MREMAP", Const, 0},
+		{"SYS_MSGCTL", Const, 0},
+		{"SYS_MSGGET", Const, 0},
+		{"SYS_MSGRCV", Const, 0},
+		{"SYS_MSGRCV_NOCANCEL", Const, 0},
+		{"SYS_MSGSND", Const, 0},
+		{"SYS_MSGSND_NOCANCEL", Const, 0},
+		{"SYS_MSGSYS", Const, 0},
+		{"SYS_MSYNC", Const, 0},
+		{"SYS_MSYNC_NOCANCEL", Const, 0},
+		{"SYS_MUNLOCK", Const, 0},
+		{"SYS_MUNLOCKALL", Const, 0},
+		{"SYS_MUNMAP", Const, 0},
+		{"SYS_NAME_TO_HANDLE_AT", Const, 0},
+		{"SYS_NANOSLEEP", Const, 0},
+		{"SYS_NEWFSTATAT", Const, 0},
+		{"SYS_NFSCLNT", Const, 0},
+		{"SYS_NFSSERVCTL", Const, 0},
+		{"SYS_NFSSVC", Const, 0},
+		{"SYS_NFSTAT", Const, 0},
+		{"SYS_NICE", Const, 0},
+		{"SYS_NLM_SYSCALL", Const, 14},
+		{"SYS_NLSTAT", Const, 0},
+		{"SYS_NMOUNT", Const, 0},
+		{"SYS_NSTAT", Const, 0},
+		{"SYS_NTP_ADJTIME", Const, 0},
+		{"SYS_NTP_GETTIME", Const, 0},
+		{"SYS_NUMA_GETAFFINITY", Const, 14},
+		{"SYS_NUMA_SETAFFINITY", Const, 14},
+		{"SYS_OABI_SYSCALL_BASE", Const, 0},
+		{"SYS_OBREAK", Const, 0},
+		{"SYS_OLDFSTAT", Const, 0},
+		{"SYS_OLDLSTAT", Const, 0},
+		{"SYS_OLDOLDUNAME", Const, 0},
+		{"SYS_OLDSTAT", Const, 0},
+		{"SYS_OLDUNAME", Const, 0},
+		{"SYS_OPEN", Const, 0},
+		{"SYS_OPENAT", Const, 0},
+		{"SYS_OPENBSD_POLL", Const, 0},
+		{"SYS_OPEN_BY_HANDLE_AT", Const, 0},
+		{"SYS_OPEN_DPROTECTED_NP", Const, 16},
+		{"SYS_OPEN_EXTENDED", Const, 0},
+		{"SYS_OPEN_NOCANCEL", Const, 0},
+		{"SYS_OVADVISE", Const, 0},
+		{"SYS_PACCEPT", Const, 1},
+		{"SYS_PATHCONF", Const, 0},
+		{"SYS_PAUSE", Const, 0},
+		{"SYS_PCICONFIG_IOBASE", Const, 0},
+		{"SYS_PCICONFIG_READ", Const, 0},
+		{"SYS_PCICONFIG_WRITE", Const, 0},
+		{"SYS_PDFORK", Const, 0},
+		{"SYS_PDGETPID", Const, 0},
+		{"SYS_PDKILL", Const, 0},
+		{"SYS_PERF_EVENT_OPEN", Const, 0},
+		{"SYS_PERSONALITY", Const, 0},
+		{"SYS_PID_HIBERNATE", Const, 0},
+		{"SYS_PID_RESUME", Const, 0},
+		{"SYS_PID_SHUTDOWN_SOCKETS", Const, 0},
+		{"SYS_PID_SUSPEND", Const, 0},
+		{"SYS_PIPE", Const, 0},
+		{"SYS_PIPE2", Const, 0},
+		{"SYS_PIVOT_ROOT", Const, 0},
+		{"SYS_PMC_CONTROL", Const, 1},
+		{"SYS_PMC_GET_INFO", Const, 1},
+		{"SYS_POLL", Const, 0},
+		{"SYS_POLLTS", Const, 1},
+		{"SYS_POLL_NOCANCEL", Const, 0},
+		{"SYS_POSIX_FADVISE", Const, 0},
+		{"SYS_POSIX_FALLOCATE", Const, 0},
+		{"SYS_POSIX_OPENPT", Const, 0},
+		{"SYS_POSIX_SPAWN", Const, 0},
+		{"SYS_PPOLL", Const, 0},
+		{"SYS_PRCTL", Const, 0},
+		{"SYS_PREAD", Const, 0},
+		{"SYS_PREAD64", Const, 0},
+		{"SYS_PREADV", Const, 0},
+		{"SYS_PREAD_NOCANCEL", Const, 0},
+		{"SYS_PRLIMIT64", Const, 0},
+		{"SYS_PROCCTL", Const, 3},
+		{"SYS_PROCESS_POLICY", Const, 0},
+		{"SYS_PROCESS_VM_READV", Const, 0},
+		{"SYS_PROCESS_VM_WRITEV", Const, 0},
+		{"SYS_PROC_INFO", Const, 0},
+		{"SYS_PROF", Const, 0},
+		{"SYS_PROFIL", Const, 0},
+		{"SYS_PSELECT", Const, 0},
+		{"SYS_PSELECT6", Const, 0},
+		{"SYS_PSET_ASSIGN", Const, 1},
+		{"SYS_PSET_CREATE", Const, 1},
+		{"SYS_PSET_DESTROY", Const, 1},
+		{"SYS_PSYNCH_CVBROAD", Const, 0},
+		{"SYS_PSYNCH_CVCLRPREPOST", Const, 0},
+		{"SYS_PSYNCH_CVSIGNAL", Const, 0},
+		{"SYS_PSYNCH_CVWAIT", Const, 0},
+		{"SYS_PSYNCH_MUTEXDROP", Const, 0},
+		{"SYS_PSYNCH_MUTEXWAIT", Const, 0},
+		{"SYS_PSYNCH_RW_DOWNGRADE", Const, 0},
+		{"SYS_PSYNCH_RW_LONGRDLOCK", Const, 0},
+		{"SYS_PSYNCH_RW_RDLOCK", Const, 0},
+		{"SYS_PSYNCH_RW_UNLOCK", Const, 0},
+		{"SYS_PSYNCH_RW_UNLOCK2", Const, 0},
+		{"SYS_PSYNCH_RW_UPGRADE", Const, 0},
+		{"SYS_PSYNCH_RW_WRLOCK", Const, 0},
+		{"SYS_PSYNCH_RW_YIELDWRLOCK", Const, 0},
+		{"SYS_PTRACE", Const, 0},
+		{"SYS_PUTPMSG", Const, 0},
+		{"SYS_PWRITE", Const, 0},
+		{"SYS_PWRITE64", Const, 0},
+		{"SYS_PWRITEV", Const, 0},
+		{"SYS_PWRITE_NOCANCEL", Const, 0},
+		{"SYS_QUERY_MODULE", Const, 0},
+		{"SYS_QUOTACTL", Const, 0},
+		{"SYS_RASCTL", Const, 1},
+		{"SYS_RCTL_ADD_RULE", Const, 0},
+		{"SYS_RCTL_GET_LIMITS", Const, 0},
+		{"SYS_RCTL_GET_RACCT", Const, 0},
+		{"SYS_RCTL_GET_RULES", Const, 0},
+		{"SYS_RCTL_REMOVE_RULE", Const, 0},
+		{"SYS_READ", Const, 0},
+		{"SYS_READAHEAD", Const, 0},
+		{"SYS_READDIR", Const, 0},
+		{"SYS_READLINK", Const, 0},
+		{"SYS_READLINKAT", Const, 0},
+		{"SYS_READV", Const, 0},
+		{"SYS_READV_NOCANCEL", Const, 0},
+		{"SYS_READ_NOCANCEL", Const, 0},
+		{"SYS_REBOOT", Const, 0},
+		{"SYS_RECV", Const, 0},
+		{"SYS_RECVFROM", Const, 0},
+		{"SYS_RECVFROM_NOCANCEL", Const, 0},
+		{"SYS_RECVMMSG", Const, 0},
+		{"SYS_RECVMSG", Const, 0},
+		{"SYS_RECVMSG_NOCANCEL", Const, 0},
+		{"SYS_REMAP_FILE_PAGES", Const, 0},
+		{"SYS_REMOVEXATTR", Const, 0},
+		{"SYS_RENAME", Const, 0},
+		{"SYS_RENAMEAT", Const, 0},
+		{"SYS_REQUEST_KEY", Const, 0},
+		{"SYS_RESTART_SYSCALL", Const, 0},
+		{"SYS_REVOKE", Const, 0},
+		{"SYS_RFORK", Const, 0},
+		{"SYS_RMDIR", Const, 0},
+		{"SYS_RTPRIO", Const, 0},
+		{"SYS_RTPRIO_THREAD", Const, 0},
+		{"SYS_RT_SIGACTION", Const, 0},
+		{"SYS_RT_SIGPENDING", Const, 0},
+		{"SYS_RT_SIGPROCMASK", Const, 0},
+		{"SYS_RT_SIGQUEUEINFO", Const, 0},
+		{"SYS_RT_SIGRETURN", Const, 0},
+		{"SYS_RT_SIGSUSPEND", Const, 0},
+		{"SYS_RT_SIGTIMEDWAIT", Const, 0},
+		{"SYS_RT_TGSIGQUEUEINFO", Const, 0},
+		{"SYS_SBRK", Const, 0},
+		{"SYS_SCHED_GETAFFINITY", Const, 0},
+		{"SYS_SCHED_GETPARAM", Const, 0},
+		{"SYS_SCHED_GETSCHEDULER", Const, 0},
+		{"SYS_SCHED_GET_PRIORITY_MAX", Const, 0},
+		{"SYS_SCHED_GET_PRIORITY_MIN", Const, 0},
+		{"SYS_SCHED_RR_GET_INTERVAL", Const, 0},
+		{"SYS_SCHED_SETAFFINITY", Const, 0},
+		{"SYS_SCHED_SETPARAM", Const, 0},
+		{"SYS_SCHED_SETSCHEDULER", Const, 0},
+		{"SYS_SCHED_YIELD", Const, 0},
+		{"SYS_SCTP_GENERIC_RECVMSG", Const, 0},
+		{"SYS_SCTP_GENERIC_SENDMSG", Const, 0},
+		{"SYS_SCTP_GENERIC_SENDMSG_IOV", Const, 0},
+		{"SYS_SCTP_PEELOFF", Const, 0},
+		{"SYS_SEARCHFS", Const, 0},
+		{"SYS_SECURITY", Const, 0},
+		{"SYS_SELECT", Const, 0},
+		{"SYS_SELECT_NOCANCEL", Const, 0},
+		{"SYS_SEMCONFIG", Const, 1},
+		{"SYS_SEMCTL", Const, 0},
+		{"SYS_SEMGET", Const, 0},
+		{"SYS_SEMOP", Const, 0},
+		{"SYS_SEMSYS", Const, 0},
+		{"SYS_SEMTIMEDOP", Const, 0},
+		{"SYS_SEM_CLOSE", Const, 0},
+		{"SYS_SEM_DESTROY", Const, 0},
+		{"SYS_SEM_GETVALUE", Const, 0},
+		{"SYS_SEM_INIT", Const, 0},
+		{"SYS_SEM_OPEN", Const, 0},
+		{"SYS_SEM_POST", Const, 0},
+		{"SYS_SEM_TRYWAIT", Const, 0},
+		{"SYS_SEM_UNLINK", Const, 0},
+		{"SYS_SEM_WAIT", Const, 0},
+		{"SYS_SEM_WAIT_NOCANCEL", Const, 0},
+		{"SYS_SEND", Const, 0},
+		{"SYS_SENDFILE", Const, 0},
+		{"SYS_SENDFILE64", Const, 0},
+		{"SYS_SENDMMSG", Const, 0},
+		{"SYS_SENDMSG", Const, 0},
+		{"SYS_SENDMSG_NOCANCEL", Const, 0},
+		{"SYS_SENDTO", Const, 0},
+		{"SYS_SENDTO_NOCANCEL", Const, 0},
+		{"SYS_SETATTRLIST", Const, 0},
+		{"SYS_SETAUDIT", Const, 0},
+		{"SYS_SETAUDIT_ADDR", Const, 0},
+		{"SYS_SETAUID", Const, 0},
+		{"SYS_SETCONTEXT", Const, 0},
+		{"SYS_SETDOMAINNAME", Const, 0},
+		{"SYS_SETEGID", Const, 0},
+		{"SYS_SETEUID", Const, 0},
+		{"SYS_SETFIB", Const, 0},
+		{"SYS_SETFSGID", Const, 0},
+		{"SYS_SETFSGID32", Const, 0},
+		{"SYS_SETFSUID", Const, 0},
+		{"SYS_SETFSUID32", Const, 0},
+		{"SYS_SETGID", Const, 0},
+		{"SYS_SETGID32", Const, 0},
+		{"SYS_SETGROUPS", Const, 0},
+		{"SYS_SETGROUPS32", Const, 0},
+		{"SYS_SETHOSTNAME", Const, 0},
+		{"SYS_SETITIMER", Const, 0},
+		{"SYS_SETLCID", Const, 0},
+		{"SYS_SETLOGIN", Const, 0},
+		{"SYS_SETLOGINCLASS", Const, 0},
+		{"SYS_SETNS", Const, 0},
+		{"SYS_SETPGID", Const, 0},
+		{"SYS_SETPRIORITY", Const, 0},
+		{"SYS_SETPRIVEXEC", Const, 0},
+		{"SYS_SETREGID", Const, 0},
+		{"SYS_SETREGID32", Const, 0},
+		{"SYS_SETRESGID", Const, 0},
+		{"SYS_SETRESGID32", Const, 0},
+		{"SYS_SETRESUID", Const, 0},
+		{"SYS_SETRESUID32", Const, 0},
+		{"SYS_SETREUID", Const, 0},
+		{"SYS_SETREUID32", Const, 0},
+		{"SYS_SETRLIMIT", Const, 0},
+		{"SYS_SETRTABLE", Const, 1},
+		{"SYS_SETSGROUPS", Const, 0},
+		{"SYS_SETSID", Const, 0},
+		{"SYS_SETSOCKOPT", Const, 0},
+		{"SYS_SETTID", Const, 0},
+		{"SYS_SETTID_WITH_PID", Const, 0},
+		{"SYS_SETTIMEOFDAY", Const, 0},
+		{"SYS_SETUID", Const, 0},
+		{"SYS_SETUID32", Const, 0},
+		{"SYS_SETWGROUPS", Const, 0},
+		{"SYS_SETXATTR", Const, 0},
+		{"SYS_SET_MEMPOLICY", Const, 0},
+		{"SYS_SET_ROBUST_LIST", Const, 0},
+		{"SYS_SET_THREAD_AREA", Const, 0},
+		{"SYS_SET_TID_ADDRESS", Const, 0},
+		{"SYS_SGETMASK", Const, 0},
+		{"SYS_SHARED_REGION_CHECK_NP", Const, 0},
+		{"SYS_SHARED_REGION_MAP_AND_SLIDE_NP", Const, 0},
+		{"SYS_SHMAT", Const, 0},
+		{"SYS_SHMCTL", Const, 0},
+		{"SYS_SHMDT", Const, 0},
+		{"SYS_SHMGET", Const, 0},
+		{"SYS_SHMSYS", Const, 0},
+		{"SYS_SHM_OPEN", Const, 0},
+		{"SYS_SHM_UNLINK", Const, 0},
+		{"SYS_SHUTDOWN", Const, 0},
+		{"SYS_SIGACTION", Const, 0},
+		{"SYS_SIGALTSTACK", Const, 0},
+		{"SYS_SIGNAL", Const, 0},
+		{"SYS_SIGNALFD", Const, 0},
+		{"SYS_SIGNALFD4", Const, 0},
+		{"SYS_SIGPENDING", Const, 0},
+		{"SYS_SIGPROCMASK", Const, 0},
+		{"SYS_SIGQUEUE", Const, 0},
+		{"SYS_SIGQUEUEINFO", Const, 1},
+		{"SYS_SIGRETURN", Const, 0},
+		{"SYS_SIGSUSPEND", Const, 0},
+		{"SYS_SIGSUSPEND_NOCANCEL", Const, 0},
+		{"SYS_SIGTIMEDWAIT", Const, 0},
+		{"SYS_SIGWAIT", Const, 0},
+		{"SYS_SIGWAITINFO", Const, 0},
+		{"SYS_SOCKET", Const, 0},
+		{"SYS_SOCKETCALL", Const, 0},
+		{"SYS_SOCKETPAIR", Const, 0},
+		{"SYS_SPLICE", Const, 0},
+		{"SYS_SSETMASK", Const, 0},
+		{"SYS_SSTK", Const, 0},
+		{"SYS_STACK_SNAPSHOT", Const, 0},
+		{"SYS_STAT", Const, 0},
+		{"SYS_STAT64", Const, 0},
+		{"SYS_STAT64_EXTENDED", Const, 0},
+		{"SYS_STATFS", Const, 0},
+		{"SYS_STATFS64", Const, 0},
+		{"SYS_STATV", Const, 0},
+		{"SYS_STATVFS1", Const, 1},
+		{"SYS_STAT_EXTENDED", Const, 0},
+		{"SYS_STIME", Const, 0},
+		{"SYS_STTY", Const, 0},
+		{"SYS_SWAPCONTEXT", Const, 0},
+		{"SYS_SWAPCTL", Const, 1},
+		{"SYS_SWAPOFF", Const, 0},
+		{"SYS_SWAPON", Const, 0},
+		{"SYS_SYMLINK", Const, 0},
+		{"SYS_SYMLINKAT", Const, 0},
+		{"SYS_SYNC", Const, 0},
+		{"SYS_SYNCFS", Const, 0},
+		{"SYS_SYNC_FILE_RANGE", Const, 0},
+		{"SYS_SYSARCH", Const, 0},
+		{"SYS_SYSCALL", Const, 0},
+		{"SYS_SYSCALL_BASE", Const, 0},
+		{"SYS_SYSFS", Const, 0},
+		{"SYS_SYSINFO", Const, 0},
+		{"SYS_SYSLOG", Const, 0},
+		{"SYS_TEE", Const, 0},
+		{"SYS_TGKILL", Const, 0},
+		{"SYS_THREAD_SELFID", Const, 0},
+		{"SYS_THR_CREATE", Const, 0},
+		{"SYS_THR_EXIT", Const, 0},
+		{"SYS_THR_KILL", Const, 0},
+		{"SYS_THR_KILL2", Const, 0},
+		{"SYS_THR_NEW", Const, 0},
+		{"SYS_THR_SELF", Const, 0},
+		{"SYS_THR_SET_NAME", Const, 0},
+		{"SYS_THR_SUSPEND", Const, 0},
+		{"SYS_THR_WAKE", Const, 0},
+		{"SYS_TIME", Const, 0},
+		{"SYS_TIMERFD_CREATE", Const, 0},
+		{"SYS_TIMERFD_GETTIME", Const, 0},
+		{"SYS_TIMERFD_SETTIME", Const, 0},
+		{"SYS_TIMER_CREATE", Const, 0},
+		{"SYS_TIMER_DELETE", Const, 0},
+		{"SYS_TIMER_GETOVERRUN", Const, 0},
+		{"SYS_TIMER_GETTIME", Const, 0},
+		{"SYS_TIMER_SETTIME", Const, 0},
+		{"SYS_TIMES", Const, 0},
+		{"SYS_TKILL", Const, 0},
+		{"SYS_TRUNCATE", Const, 0},
+		{"SYS_TRUNCATE64", Const, 0},
+		{"SYS_TUXCALL", Const, 0},
+		{"SYS_UGETRLIMIT", Const, 0},
+		{"SYS_ULIMIT", Const, 0},
+		{"SYS_UMASK", Const, 0},
+		{"SYS_UMASK_EXTENDED", Const, 0},
+		{"SYS_UMOUNT", Const, 0},
+		{"SYS_UMOUNT2", Const, 0},
+		{"SYS_UNAME", Const, 0},
+		{"SYS_UNDELETE", Const, 0},
+		{"SYS_UNLINK", Const, 0},
+		{"SYS_UNLINKAT", Const, 0},
+		{"SYS_UNMOUNT", Const, 0},
+		{"SYS_UNSHARE", Const, 0},
+		{"SYS_USELIB", Const, 0},
+		{"SYS_USTAT", Const, 0},
+		{"SYS_UTIME", Const, 0},
+		{"SYS_UTIMENSAT", Const, 0},
+		{"SYS_UTIMES", Const, 0},
+		{"SYS_UTRACE", Const, 0},
+		{"SYS_UUIDGEN", Const, 0},
+		{"SYS_VADVISE", Const, 1},
+		{"SYS_VFORK", Const, 0},
+		{"SYS_VHANGUP", Const, 0},
+		{"SYS_VM86", Const, 0},
+		{"SYS_VM86OLD", Const, 0},
+		{"SYS_VMSPLICE", Const, 0},
+		{"SYS_VM_PRESSURE_MONITOR", Const, 0},
+		{"SYS_VSERVER", Const, 0},
+		{"SYS_WAIT4", Const, 0},
+		{"SYS_WAIT4_NOCANCEL", Const, 0},
+		{"SYS_WAIT6", Const, 1},
+		{"SYS_WAITEVENT", Const, 0},
+		{"SYS_WAITID", Const, 0},
+		{"SYS_WAITID_NOCANCEL", Const, 0},
+		{"SYS_WAITPID", Const, 0},
+		{"SYS_WATCHEVENT", Const, 0},
+		{"SYS_WORKQ_KERNRETURN", Const, 0},
+		{"SYS_WORKQ_OPEN", Const, 0},
+		{"SYS_WRITE", Const, 0},
+		{"SYS_WRITEV", Const, 0},
+		{"SYS_WRITEV_NOCANCEL", Const, 0},
+		{"SYS_WRITE_NOCANCEL", Const, 0},
+		{"SYS_YIELD", Const, 0},
+		{"SYS__LLSEEK", Const, 0},
+		{"SYS__LWP_CONTINUE", Const, 1},
+		{"SYS__LWP_CREATE", Const, 1},
+		{"SYS__LWP_CTL", Const, 1},
+		{"SYS__LWP_DETACH", Const, 1},
+		{"SYS__LWP_EXIT", Const, 1},
+		{"SYS__LWP_GETNAME", Const, 1},
+		{"SYS__LWP_GETPRIVATE", Const, 1},
+		{"SYS__LWP_KILL", Const, 1},
+		{"SYS__LWP_PARK", Const, 1},
+		{"SYS__LWP_SELF", Const, 1},
+		{"SYS__LWP_SETNAME", Const, 1},
+		{"SYS__LWP_SETPRIVATE", Const, 1},
+		{"SYS__LWP_SUSPEND", Const, 1},
+		{"SYS__LWP_UNPARK", Const, 1},
+		{"SYS__LWP_UNPARK_ALL", Const, 1},
+		{"SYS__LWP_WAIT", Const, 1},
+		{"SYS__LWP_WAKEUP", Const, 1},
+		{"SYS__NEWSELECT", Const, 0},
+		{"SYS__PSET_BIND", Const, 1},
+		{"SYS__SCHED_GETAFFINITY", Const, 1},
+		{"SYS__SCHED_GETPARAM", Const, 1},
+		{"SYS__SCHED_SETAFFINITY", Const, 1},
+		{"SYS__SCHED_SETPARAM", Const, 1},
+		{"SYS__SYSCTL", Const, 0},
+		{"SYS__UMTX_LOCK", Const, 0},
+		{"SYS__UMTX_OP", Const, 0},
+		{"SYS__UMTX_UNLOCK", Const, 0},
+		{"SYS___ACL_ACLCHECK_FD", Const, 0},
+		{"SYS___ACL_ACLCHECK_FILE", Const, 0},
+		{"SYS___ACL_ACLCHECK_LINK", Const, 0},
+		{"SYS___ACL_DELETE_FD", Const, 0},
+		{"SYS___ACL_DELETE_FILE", Const, 0},
+		{"SYS___ACL_DELETE_LINK", Const, 0},
+		{"SYS___ACL_GET_FD", Const, 0},
+		{"SYS___ACL_GET_FILE", Const, 0},
+		{"SYS___ACL_GET_LINK", Const, 0},
+		{"SYS___ACL_SET_FD", Const, 0},
+		{"SYS___ACL_SET_FILE", Const, 0},
+		{"SYS___ACL_SET_LINK", Const, 0},
+		{"SYS___CAP_RIGHTS_GET", Const, 14},
+		{"SYS___CLONE", Const, 1},
+		{"SYS___DISABLE_THREADSIGNAL", Const, 0},
+		{"SYS___GETCWD", Const, 0},
+		{"SYS___GETLOGIN", Const, 1},
+		{"SYS___GET_TCB", Const, 1},
+		{"SYS___MAC_EXECVE", Const, 0},
+		{"SYS___MAC_GETFSSTAT", Const, 0},
+		{"SYS___MAC_GET_FD", Const, 0},
+		{"SYS___MAC_GET_FILE", Const, 0},
+		{"SYS___MAC_GET_LCID", Const, 0},
+		{"SYS___MAC_GET_LCTX", Const, 0},
+		{"SYS___MAC_GET_LINK", Const, 0},
+		{"SYS___MAC_GET_MOUNT", Const, 0},
+		{"SYS___MAC_GET_PID", Const, 0},
+		{"SYS___MAC_GET_PROC", Const, 0},
+		{"SYS___MAC_MOUNT", Const, 0},
+		{"SYS___MAC_SET_FD", Const, 0},
+		{"SYS___MAC_SET_FILE", Const, 0},
+		{"SYS___MAC_SET_LCTX", Const, 0},
+		{"SYS___MAC_SET_LINK", Const, 0},
+		{"SYS___MAC_SET_PROC", Const, 0},
+		{"SYS___MAC_SYSCALL", Const, 0},
+		{"SYS___OLD_SEMWAIT_SIGNAL", Const, 0},
+		{"SYS___OLD_SEMWAIT_SIGNAL_NOCANCEL", Const, 0},
+		{"SYS___POSIX_CHOWN", Const, 1},
+		{"SYS___POSIX_FCHOWN", Const, 1},
+		{"SYS___POSIX_LCHOWN", Const, 1},
+		{"SYS___POSIX_RENAME", Const, 1},
+		{"SYS___PTHREAD_CANCELED", Const, 0},
+		{"SYS___PTHREAD_CHDIR", Const, 0},
+		{"SYS___PTHREAD_FCHDIR", Const, 0},
+		{"SYS___PTHREAD_KILL", Const, 0},
+		{"SYS___PTHREAD_MARKCANCEL", Const, 0},
+		{"SYS___PTHREAD_SIGMASK", Const, 0},
+		{"SYS___QUOTACTL", Const, 1},
+		{"SYS___SEMCTL", Const, 1},
+		{"SYS___SEMWAIT_SIGNAL", Const, 0},
+		{"SYS___SEMWAIT_SIGNAL_NOCANCEL", Const, 0},
+		{"SYS___SETLOGIN", Const, 1},
+		{"SYS___SETUGID", Const, 0},
+		{"SYS___SET_TCB", Const, 1},
+		{"SYS___SIGACTION_SIGTRAMP", Const, 1},
+		{"SYS___SIGTIMEDWAIT", Const, 1},
+		{"SYS___SIGWAIT", Const, 0},
+		{"SYS___SIGWAIT_NOCANCEL", Const, 0},
+		{"SYS___SYSCTL", Const, 0},
+		{"SYS___TFORK", Const, 1},
+		{"SYS___THREXIT", Const, 1},
+		{"SYS___THRSIGDIVERT", Const, 1},
+		{"SYS___THRSLEEP", Const, 1},
+		{"SYS___THRWAKEUP", Const, 1},
+		{"S_ARCH1", Const, 1},
+		{"S_ARCH2", Const, 1},
+		{"S_BLKSIZE", Const, 0},
+		{"S_IEXEC", Const, 0},
+		{"S_IFBLK", Const, 0},
+		{"S_IFCHR", Const, 0},
+		{"S_IFDIR", Const, 0},
+		{"S_IFIFO", Const, 0},
+		{"S_IFLNK", Const, 0},
+		{"S_IFMT", Const, 0},
+		{"S_IFREG", Const, 0},
+		{"S_IFSOCK", Const, 0},
+		{"S_IFWHT", Const, 0},
+		{"S_IREAD", Const, 0},
+		{"S_IRGRP", Const, 0},
+		{"S_IROTH", Const, 0},
+		{"S_IRUSR", Const, 0},
+		{"S_IRWXG", Const, 0},
+		{"S_IRWXO", Const, 0},
+		{"S_IRWXU", Const, 0},
+		{"S_ISGID", Const, 0},
+		{"S_ISTXT", Const, 0},
+		{"S_ISUID", Const, 0},
+		{"S_ISVTX", Const, 0},
+		{"S_IWGRP", Const, 0},
+		{"S_IWOTH", Const, 0},
+		{"S_IWRITE", Const, 0},
+		{"S_IWUSR", Const, 0},
+		{"S_IXGRP", Const, 0},
+		{"S_IXOTH", Const, 0},
+		{"S_IXUSR", Const, 0},
+		{"S_LOGIN_SET", Const, 1},
+		{"SecurityAttributes", Type, 0},
+		{"SecurityAttributes.InheritHandle", Field, 0},
+		{"SecurityAttributes.Length", Field, 0},
+		{"SecurityAttributes.SecurityDescriptor", Field, 0},
+		{"Seek", Func, 0},
+		{"Select", Func, 0},
+		{"Sendfile", Func, 0},
+		{"Sendmsg", Func, 0},
+		{"SendmsgN", Func, 3},
+		{"Sendto", Func, 0},
+		{"Servent", Type, 0},
+		{"Servent.Aliases", Field, 0},
+		{"Servent.Name", Field, 0},
+		{"Servent.Port", Field, 0},
+		{"Servent.Proto", Field, 0},
+		{"SetBpf", Func, 0},
+		{"SetBpfBuflen", Func, 0},
+		{"SetBpfDatalink", Func, 0},
+		{"SetBpfHeadercmpl", Func, 0},
+		{"SetBpfImmediate", Func, 0},
+		{"SetBpfInterface", Func, 0},
+		{"SetBpfPromisc", Func, 0},
+		{"SetBpfTimeout", Func, 0},
+		{"SetCurrentDirectory", Func, 0},
+		{"SetEndOfFile", Func, 0},
+		{"SetEnvironmentVariable", Func, 0},
+		{"SetFileAttributes", Func, 0},
+		{"SetFileCompletionNotificationModes", Func, 2},
+		{"SetFilePointer", Func, 0},
+		{"SetFileTime", Func, 0},
+		{"SetHandleInformation", Func, 0},
+		{"SetKevent", Func, 0},
+		{"SetLsfPromisc", Func, 0},
+		{"SetNonblock", Func, 0},
+		{"Setdomainname", Func, 0},
+		{"Setegid", Func, 0},
+		{"Setenv", Func, 0},
+		{"Seteuid", Func, 0},
+		{"Setfsgid", Func, 0},
+		{"Setfsuid", Func, 0},
+		{"Setgid", Func, 0},
+		{"Setgroups", Func, 0},
+		{"Sethostname", Func, 0},
+		{"Setlogin", Func, 0},
+		{"Setpgid", Func, 0},
+		{"Setpriority", Func, 0},
+		{"Setprivexec", Func, 0},
+		{"Setregid", Func, 0},
+		{"Setresgid", Func, 0},
+		{"Setresuid", Func, 0},
+		{"Setreuid", Func, 0},
+		{"Setrlimit", Func, 0},
+		{"Setsid", Func, 0},
+		{"Setsockopt", Func, 0},
+		{"SetsockoptByte", Func, 0},
+		{"SetsockoptICMPv6Filter", Func, 2},
+		{"SetsockoptIPMreq", Func, 0},
+		{"SetsockoptIPMreqn", Func, 0},
+		{"SetsockoptIPv6Mreq", Func, 0},
+		{"SetsockoptInet4Addr", Func, 0},
+		{"SetsockoptInt", Func, 0},
+		{"SetsockoptLinger", Func, 0},
+		{"SetsockoptString", Func, 0},
+		{"SetsockoptTimeval", Func, 0},
+		{"Settimeofday", Func, 0},
+		{"Setuid", Func, 0},
+		{"Setxattr", Func, 1},
+		{"Shutdown", Func, 0},
+		{"SidTypeAlias", Const, 0},
+		{"SidTypeComputer", Const, 0},
+		{"SidTypeDeletedAccount", Const, 0},
+		{"SidTypeDomain", Const, 0},
+		{"SidTypeGroup", Const, 0},
+		{"SidTypeInvalid", Const, 0},
+		{"SidTypeLabel", Const, 0},
+		{"SidTypeUnknown", Const, 0},
+		{"SidTypeUser", Const, 0},
+		{"SidTypeWellKnownGroup", Const, 0},
+		{"Signal", Type, 0},
+		{"SizeofBpfHdr", Const, 0},
+		{"SizeofBpfInsn", Const, 0},
+		{"SizeofBpfProgram", Const, 0},
+		{"SizeofBpfStat", Const, 0},
+		{"SizeofBpfVersion", Const, 0},
+		{"SizeofBpfZbuf", Const, 0},
+		{"SizeofBpfZbufHeader", Const, 0},
+		{"SizeofCmsghdr", Const, 0},
+		{"SizeofICMPv6Filter", Const, 2},
+		{"SizeofIPMreq", Const, 0},
+		{"SizeofIPMreqn", Const, 0},
+		{"SizeofIPv6MTUInfo", Const, 2},
+		{"SizeofIPv6Mreq", Const, 0},
+		{"SizeofIfAddrmsg", Const, 0},
+		{"SizeofIfAnnounceMsghdr", Const, 1},
+		{"SizeofIfData", Const, 0},
+		{"SizeofIfInfomsg", Const, 0},
+		{"SizeofIfMsghdr", Const, 0},
+		{"SizeofIfaMsghdr", Const, 0},
+		{"SizeofIfmaMsghdr", Const, 0},
+		{"SizeofIfmaMsghdr2", Const, 0},
+		{"SizeofInet4Pktinfo", Const, 0},
+		{"SizeofInet6Pktinfo", Const, 0},
+		{"SizeofInotifyEvent", Const, 0},
+		{"SizeofLinger", Const, 0},
+		{"SizeofMsghdr", Const, 0},
+		{"SizeofNlAttr", Const, 0},
+		{"SizeofNlMsgerr", Const, 0},
+		{"SizeofNlMsghdr", Const, 0},
+		{"SizeofRtAttr", Const, 0},
+		{"SizeofRtGenmsg", Const, 0},
+		{"SizeofRtMetrics", Const, 0},
+		{"SizeofRtMsg", Const, 0},
+		{"SizeofRtMsghdr", Const, 0},
+		{"SizeofRtNexthop", Const, 0},
+		{"SizeofSockFilter", Const, 0},
+		{"SizeofSockFprog", Const, 0},
+		{"SizeofSockaddrAny", Const, 0},
+		{"SizeofSockaddrDatalink", Const, 0},
+		{"SizeofSockaddrInet4", Const, 0},
+		{"SizeofSockaddrInet6", Const, 0},
+		{"SizeofSockaddrLinklayer", Const, 0},
+		{"SizeofSockaddrNetlink", Const, 0},
+		{"SizeofSockaddrUnix", Const, 0},
+		{"SizeofTCPInfo", Const, 1},
+		{"SizeofUcred", Const, 0},
+		{"SlicePtrFromStrings", Func, 1},
+		{"SockFilter", Type, 0},
+		{"SockFilter.Code", Field, 0},
+		{"SockFilter.Jf", Field, 0},
+		{"SockFilter.Jt", Field, 0},
+		{"SockFilter.K", Field, 0},
+		{"SockFprog", Type, 0},
+		{"SockFprog.Filter", Field, 0},
+		{"SockFprog.Len", Field, 0},
+		{"SockFprog.Pad_cgo_0", Field, 0},
+		{"Sockaddr", Type, 0},
+		{"SockaddrDatalink", Type, 0},
+		{"SockaddrDatalink.Alen", Field, 0},
+		{"SockaddrDatalink.Data", Field, 0},
+		{"SockaddrDatalink.Family", Field, 0},
+		{"SockaddrDatalink.Index", Field, 0},
+		{"SockaddrDatalink.Len", Field, 0},
+		{"SockaddrDatalink.Nlen", Field, 0},
+		{"SockaddrDatalink.Slen", Field, 0},
+		{"SockaddrDatalink.Type", Field, 0},
+		{"SockaddrGen", Type, 0},
+		{"SockaddrInet4", Type, 0},
+		{"SockaddrInet4.Addr", Field, 0},
+		{"SockaddrInet4.Port", Field, 0},
+		{"SockaddrInet6", Type, 0},
+		{"SockaddrInet6.Addr", Field, 0},
+		{"SockaddrInet6.Port", Field, 0},
+		{"SockaddrInet6.ZoneId", Field, 0},
+		{"SockaddrLinklayer", Type, 0},
+		{"SockaddrLinklayer.Addr", Field, 0},
+		{"SockaddrLinklayer.Halen", Field, 0},
+		{"SockaddrLinklayer.Hatype", Field, 0},
+		{"SockaddrLinklayer.Ifindex", Field, 0},
+		{"SockaddrLinklayer.Pkttype", Field, 0},
+		{"SockaddrLinklayer.Protocol", Field, 0},
+		{"SockaddrNetlink", Type, 0},
+		{"SockaddrNetlink.Family", Field, 0},
+		{"SockaddrNetlink.Groups", Field, 0},
+		{"SockaddrNetlink.Pad", Field, 0},
+		{"SockaddrNetlink.Pid", Field, 0},
+		{"SockaddrUnix", Type, 0},
+		{"SockaddrUnix.Name", Field, 0},
+		{"Socket", Func, 0},
+		{"SocketControlMessage", Type, 0},
+		{"SocketControlMessage.Data", Field, 0},
+		{"SocketControlMessage.Header", Field, 0},
+		{"SocketDisableIPv6", Var, 0},
+		{"Socketpair", Func, 0},
+		{"Splice", Func, 0},
+		{"StartProcess", Func, 0},
+		{"StartupInfo", Type, 0},
+		{"StartupInfo.Cb", Field, 0},
+		{"StartupInfo.Desktop", Field, 0},
+		{"StartupInfo.FillAttribute", Field, 0},
+		{"StartupInfo.Flags", Field, 0},
+		{"StartupInfo.ShowWindow", Field, 0},
+		{"StartupInfo.StdErr", Field, 0},
+		{"StartupInfo.StdInput", Field, 0},
+		{"StartupInfo.StdOutput", Field, 0},
+		{"StartupInfo.Title", Field, 0},
+		{"StartupInfo.X", Field, 0},
+		{"StartupInfo.XCountChars", Field, 0},
+		{"StartupInfo.XSize", Field, 0},
+		{"StartupInfo.Y", Field, 0},
+		{"StartupInfo.YCountChars", Field, 0},
+		{"StartupInfo.YSize", Field, 0},
+		{"Stat", Func, 0},
+		{"Stat_t", Type, 0},
+		{"Stat_t.Atim", Field, 0},
+		{"Stat_t.Atim_ext", Field, 12},
+		{"Stat_t.Atimespec", Field, 0},
+		{"Stat_t.Birthtimespec", Field, 0},
+		{"Stat_t.Blksize", Field, 0},
+		{"Stat_t.Blocks", Field, 0},
+		{"Stat_t.Btim_ext", Field, 12},
+		{"Stat_t.Ctim", Field, 0},
+		{"Stat_t.Ctim_ext", Field, 12},
+		{"Stat_t.Ctimespec", Field, 0},
+		{"Stat_t.Dev", Field, 0},
+		{"Stat_t.Flags", Field, 0},
+		{"Stat_t.Gen", Field, 0},
+		{"Stat_t.Gid", Field, 0},
+		{"Stat_t.Ino", Field, 0},
+		{"Stat_t.Lspare", Field, 0},
+		{"Stat_t.Lspare0", Field, 2},
+		{"Stat_t.Lspare1", Field, 2},
+		{"Stat_t.Mode", Field, 0},
+		{"Stat_t.Mtim", Field, 0},
+		{"Stat_t.Mtim_ext", Field, 12},
+		{"Stat_t.Mtimespec", Field, 0},
+		{"Stat_t.Nlink", Field, 0},
+		{"Stat_t.Pad_cgo_0", Field, 0},
+		{"Stat_t.Pad_cgo_1", Field, 0},
+		{"Stat_t.Pad_cgo_2", Field, 0},
+		{"Stat_t.Padding0", Field, 12},
+		{"Stat_t.Padding1", Field, 12},
+		{"Stat_t.Qspare", Field, 0},
+		{"Stat_t.Rdev", Field, 0},
+		{"Stat_t.Size", Field, 0},
+		{"Stat_t.Spare", Field, 2},
+		{"Stat_t.Uid", Field, 0},
+		{"Stat_t.X__pad0", Field, 0},
+		{"Stat_t.X__pad1", Field, 0},
+		{"Stat_t.X__pad2", Field, 0},
+		{"Stat_t.X__st_birthtim", Field, 2},
+		{"Stat_t.X__st_ino", Field, 0},
+		{"Stat_t.X__unused", Field, 0},
+		{"Statfs", Func, 0},
+		{"Statfs_t", Type, 0},
+		{"Statfs_t.Asyncreads", Field, 0},
+		{"Statfs_t.Asyncwrites", Field, 0},
+		{"Statfs_t.Bavail", Field, 0},
+		{"Statfs_t.Bfree", Field, 0},
+		{"Statfs_t.Blocks", Field, 0},
+		{"Statfs_t.Bsize", Field, 0},
+		{"Statfs_t.Charspare", Field, 0},
+		{"Statfs_t.F_asyncreads", Field, 2},
+		{"Statfs_t.F_asyncwrites", Field, 2},
+		{"Statfs_t.F_bavail", Field, 2},
+		{"Statfs_t.F_bfree", Field, 2},
+		{"Statfs_t.F_blocks", Field, 2},
+		{"Statfs_t.F_bsize", Field, 2},
+		{"Statfs_t.F_ctime", Field, 2},
+		{"Statfs_t.F_favail", Field, 2},
+		{"Statfs_t.F_ffree", Field, 2},
+		{"Statfs_t.F_files", Field, 2},
+		{"Statfs_t.F_flags", Field, 2},
+		{"Statfs_t.F_fsid", Field, 2},
+		{"Statfs_t.F_fstypename", Field, 2},
+		{"Statfs_t.F_iosize", Field, 2},
+		{"Statfs_t.F_mntfromname", Field, 2},
+		{"Statfs_t.F_mntfromspec", Field, 3},
+		{"Statfs_t.F_mntonname", Field, 2},
+		{"Statfs_t.F_namemax", Field, 2},
+		{"Statfs_t.F_owner", Field, 2},
+		{"Statfs_t.F_spare", Field, 2},
+		{"Statfs_t.F_syncreads", Field, 2},
+		{"Statfs_t.F_syncwrites", Field, 2},
+		{"Statfs_t.Ffree", Field, 0},
+		{"Statfs_t.Files", Field, 0},
+		{"Statfs_t.Flags", Field, 0},
+		{"Statfs_t.Frsize", Field, 0},
+		{"Statfs_t.Fsid", Field, 0},
+		{"Statfs_t.Fssubtype", Field, 0},
+		{"Statfs_t.Fstypename", Field, 0},
+		{"Statfs_t.Iosize", Field, 0},
+		{"Statfs_t.Mntfromname", Field, 0},
+		{"Statfs_t.Mntonname", Field, 0},
+		{"Statfs_t.Mount_info", Field, 2},
+		{"Statfs_t.Namelen", Field, 0},
+		{"Statfs_t.Namemax", Field, 0},
+		{"Statfs_t.Owner", Field, 0},
+		{"Statfs_t.Pad_cgo_0", Field, 0},
+		{"Statfs_t.Pad_cgo_1", Field, 2},
+		{"Statfs_t.Reserved", Field, 0},
+		{"Statfs_t.Spare", Field, 0},
+		{"Statfs_t.Syncreads", Field, 0},
+		{"Statfs_t.Syncwrites", Field, 0},
+		{"Statfs_t.Type", Field, 0},
+		{"Statfs_t.Version", Field, 0},
+		{"Stderr", Var, 0},
+		{"Stdin", Var, 0},
+		{"Stdout", Var, 0},
+		{"StringBytePtr", Func, 0},
+		{"StringByteSlice", Func, 0},
+		{"StringSlicePtr", Func, 0},
+		{"StringToSid", Func, 0},
+		{"StringToUTF16", Func, 0},
+		{"StringToUTF16Ptr", Func, 0},
+		{"Symlink", Func, 0},
+		{"Sync", Func, 0},
+		{"SyncFileRange", Func, 0},
+		{"SysProcAttr", Type, 0},
+		{"SysProcAttr.AdditionalInheritedHandles", Field, 17},
+		{"SysProcAttr.AmbientCaps", Field, 9},
+		{"SysProcAttr.CgroupFD", Field, 20},
+		{"SysProcAttr.Chroot", Field, 0},
+		{"SysProcAttr.Cloneflags", Field, 2},
+		{"SysProcAttr.CmdLine", Field, 0},
+		{"SysProcAttr.CreationFlags", Field, 1},
+		{"SysProcAttr.Credential", Field, 0},
+		{"SysProcAttr.Ctty", Field, 1},
+		{"SysProcAttr.Foreground", Field, 5},
+		{"SysProcAttr.GidMappings", Field, 4},
+		{"SysProcAttr.GidMappingsEnableSetgroups", Field, 5},
+		{"SysProcAttr.HideWindow", Field, 0},
+		{"SysProcAttr.Jail", Field, 21},
+		{"SysProcAttr.NoInheritHandles", Field, 16},
+		{"SysProcAttr.Noctty", Field, 0},
+		{"SysProcAttr.ParentProcess", Field, 17},
+		{"SysProcAttr.Pdeathsig", Field, 0},
+		{"SysProcAttr.Pgid", Field, 5},
+		{"SysProcAttr.PidFD", Field, 22},
+		{"SysProcAttr.ProcessAttributes", Field, 13},
+		{"SysProcAttr.Ptrace", Field, 0},
+		{"SysProcAttr.Setctty", Field, 0},
+		{"SysProcAttr.Setpgid", Field, 0},
+		{"SysProcAttr.Setsid", Field, 0},
+		{"SysProcAttr.ThreadAttributes", Field, 13},
+		{"SysProcAttr.Token", Field, 10},
+		{"SysProcAttr.UidMappings", Field, 4},
+		{"SysProcAttr.Unshareflags", Field, 7},
+		{"SysProcAttr.UseCgroupFD", Field, 20},
+		{"SysProcIDMap", Type, 4},
+		{"SysProcIDMap.ContainerID", Field, 4},
+		{"SysProcIDMap.HostID", Field, 4},
+		{"SysProcIDMap.Size", Field, 4},
+		{"Syscall", Func, 0},
+		{"Syscall12", Func, 0},
+		{"Syscall15", Func, 0},
+		{"Syscall18", Func, 12},
+		{"Syscall6", Func, 0},
+		{"Syscall9", Func, 0},
+		{"SyscallN", Func, 18},
+		{"Sysctl", Func, 0},
+		{"SysctlUint32", Func, 0},
+		{"Sysctlnode", Type, 2},
+		{"Sysctlnode.Flags", Field, 2},
+		{"Sysctlnode.Name", Field, 2},
+		{"Sysctlnode.Num", Field, 2},
+		{"Sysctlnode.Un", Field, 2},
+		{"Sysctlnode.Ver", Field, 2},
+		{"Sysctlnode.X__rsvd", Field, 2},
+		{"Sysctlnode.X_sysctl_desc", Field, 2},
+		{"Sysctlnode.X_sysctl_func", Field, 2},
+		{"Sysctlnode.X_sysctl_parent", Field, 2},
+		{"Sysctlnode.X_sysctl_size", Field, 2},
+		{"Sysinfo", Func, 0},
+		{"Sysinfo_t", Type, 0},
+		{"Sysinfo_t.Bufferram", Field, 0},
+		{"Sysinfo_t.Freehigh", Field, 0},
+		{"Sysinfo_t.Freeram", Field, 0},
+		{"Sysinfo_t.Freeswap", Field, 0},
+		{"Sysinfo_t.Loads", Field, 0},
+		{"Sysinfo_t.Pad", Field, 0},
+		{"Sysinfo_t.Pad_cgo_0", Field, 0},
+		{"Sysinfo_t.Pad_cgo_1", Field, 0},
+		{"Sysinfo_t.Procs", Field, 0},
+		{"Sysinfo_t.Sharedram", Field, 0},
+		{"Sysinfo_t.Totalhigh", Field, 0},
+		{"Sysinfo_t.Totalram", Field, 0},
+		{"Sysinfo_t.Totalswap", Field, 0},
+		{"Sysinfo_t.Unit", Field, 0},
+		{"Sysinfo_t.Uptime", Field, 0},
+		{"Sysinfo_t.X_f", Field, 0},
+		{"Systemtime", Type, 0},
+		{"Systemtime.Day", Field, 0},
+		{"Systemtime.DayOfWeek", Field, 0},
+		{"Systemtime.Hour", Field, 0},
+		{"Systemtime.Milliseconds", Field, 0},
+		{"Systemtime.Minute", Field, 0},
+		{"Systemtime.Month", Field, 0},
+		{"Systemtime.Second", Field, 0},
+		{"Systemtime.Year", Field, 0},
+		{"TCGETS", Const, 0},
+		{"TCIFLUSH", Const, 1},
+		{"TCIOFLUSH", Const, 1},
+		{"TCOFLUSH", Const, 1},
+		{"TCPInfo", Type, 1},
+		{"TCPInfo.Advmss", Field, 1},
+		{"TCPInfo.Ato", Field, 1},
+		{"TCPInfo.Backoff", Field, 1},
+		{"TCPInfo.Ca_state", Field, 1},
+		{"TCPInfo.Fackets", Field, 1},
+		{"TCPInfo.Last_ack_recv", Field, 1},
+		{"TCPInfo.Last_ack_sent", Field, 1},
+		{"TCPInfo.Last_data_recv", Field, 1},
+		{"TCPInfo.Last_data_sent", Field, 1},
+		{"TCPInfo.Lost", Field, 1},
+		{"TCPInfo.Options", Field, 1},
+		{"TCPInfo.Pad_cgo_0", Field, 1},
+		{"TCPInfo.Pmtu", Field, 1},
+		{"TCPInfo.Probes", Field, 1},
+		{"TCPInfo.Rcv_mss", Field, 1},
+		{"TCPInfo.Rcv_rtt", Field, 1},
+		{"TCPInfo.Rcv_space", Field, 1},
+		{"TCPInfo.Rcv_ssthresh", Field, 1},
+		{"TCPInfo.Reordering", Field, 1},
+		{"TCPInfo.Retrans", Field, 1},
+		{"TCPInfo.Retransmits", Field, 1},
+		{"TCPInfo.Rto", Field, 1},
+		{"TCPInfo.Rtt", Field, 1},
+		{"TCPInfo.Rttvar", Field, 1},
+		{"TCPInfo.Sacked", Field, 1},
+		{"TCPInfo.Snd_cwnd", Field, 1},
+		{"TCPInfo.Snd_mss", Field, 1},
+		{"TCPInfo.Snd_ssthresh", Field, 1},
+		{"TCPInfo.State", Field, 1},
+		{"TCPInfo.Total_retrans", Field, 1},
+		{"TCPInfo.Unacked", Field, 1},
+		{"TCPKeepalive", Type, 3},
+		{"TCPKeepalive.Interval", Field, 3},
+		{"TCPKeepalive.OnOff", Field, 3},
+		{"TCPKeepalive.Time", Field, 3},
+		{"TCP_CA_NAME_MAX", Const, 0},
+		{"TCP_CONGCTL", Const, 1},
+		{"TCP_CONGESTION", Const, 0},
+		{"TCP_CONNECTIONTIMEOUT", Const, 0},
+		{"TCP_CORK", Const, 0},
+		{"TCP_DEFER_ACCEPT", Const, 0},
+		{"TCP_ENABLE_ECN", Const, 16},
+		{"TCP_INFO", Const, 0},
+		{"TCP_KEEPALIVE", Const, 0},
+		{"TCP_KEEPCNT", Const, 0},
+		{"TCP_KEEPIDLE", Const, 0},
+		{"TCP_KEEPINIT", Const, 1},
+		{"TCP_KEEPINTVL", Const, 0},
+		{"TCP_LINGER2", Const, 0},
+		{"TCP_MAXBURST", Const, 0},
+		{"TCP_MAXHLEN", Const, 0},
+		{"TCP_MAXOLEN", Const, 0},
+		{"TCP_MAXSEG", Const, 0},
+		{"TCP_MAXWIN", Const, 0},
+		{"TCP_MAX_SACK", Const, 0},
+		{"TCP_MAX_WINSHIFT", Const, 0},
+		{"TCP_MD5SIG", Const, 0},
+		{"TCP_MD5SIG_MAXKEYLEN", Const, 0},
+		{"TCP_MINMSS", Const, 0},
+		{"TCP_MINMSSOVERLOAD", Const, 0},
+		{"TCP_MSS", Const, 0},
+		{"TCP_NODELAY", Const, 0},
+		{"TCP_NOOPT", Const, 0},
+		{"TCP_NOPUSH", Const, 0},
+		{"TCP_NOTSENT_LOWAT", Const, 16},
+		{"TCP_NSTATES", Const, 1},
+		{"TCP_QUICKACK", Const, 0},
+		{"TCP_RXT_CONNDROPTIME", Const, 0},
+		{"TCP_RXT_FINDROP", Const, 0},
+		{"TCP_SACK_ENABLE", Const, 1},
+		{"TCP_SENDMOREACKS", Const, 16},
+		{"TCP_SYNCNT", Const, 0},
+		{"TCP_VENDOR", Const, 3},
+		{"TCP_WINDOW_CLAMP", Const, 0},
+		{"TCSAFLUSH", Const, 1},
+		{"TCSETS", Const, 0},
+		{"TF_DISCONNECT", Const, 0},
+		{"TF_REUSE_SOCKET", Const, 0},
+		{"TF_USE_DEFAULT_WORKER", Const, 0},
+		{"TF_USE_KERNEL_APC", Const, 0},
+		{"TF_USE_SYSTEM_THREAD", Const, 0},
+		{"TF_WRITE_BEHIND", Const, 0},
+		{"TH32CS_INHERIT", Const, 4},
+		{"TH32CS_SNAPALL", Const, 4},
+		{"TH32CS_SNAPHEAPLIST", Const, 4},
+		{"TH32CS_SNAPMODULE", Const, 4},
+		{"TH32CS_SNAPMODULE32", Const, 4},
+		{"TH32CS_SNAPPROCESS", Const, 4},
+		{"TH32CS_SNAPTHREAD", Const, 4},
+		{"TIME_ZONE_ID_DAYLIGHT", Const, 0},
+		{"TIME_ZONE_ID_STANDARD", Const, 0},
+		{"TIME_ZONE_ID_UNKNOWN", Const, 0},
+		{"TIOCCBRK", Const, 0},
+		{"TIOCCDTR", Const, 0},
+		{"TIOCCONS", Const, 0},
+		{"TIOCDCDTIMESTAMP", Const, 0},
+		{"TIOCDRAIN", Const, 0},
+		{"TIOCDSIMICROCODE", Const, 0},
+		{"TIOCEXCL", Const, 0},
+		{"TIOCEXT", Const, 0},
+		{"TIOCFLAG_CDTRCTS", Const, 1},
+		{"TIOCFLAG_CLOCAL", Const, 1},
+		{"TIOCFLAG_CRTSCTS", Const, 1},
+		{"TIOCFLAG_MDMBUF", Const, 1},
+		{"TIOCFLAG_PPS", Const, 1},
+		{"TIOCFLAG_SOFTCAR", Const, 1},
+		{"TIOCFLUSH", Const, 0},
+		{"TIOCGDEV", Const, 0},
+		{"TIOCGDRAINWAIT", Const, 0},
+		{"TIOCGETA", Const, 0},
+		{"TIOCGETD", Const, 0},
+		{"TIOCGFLAGS", Const, 1},
+		{"TIOCGICOUNT", Const, 0},
+		{"TIOCGLCKTRMIOS", Const, 0},
+		{"TIOCGLINED", Const, 1},
+		{"TIOCGPGRP", Const, 0},
+		{"TIOCGPTN", Const, 0},
+		{"TIOCGQSIZE", Const, 1},
+		{"TIOCGRANTPT", Const, 1},
+		{"TIOCGRS485", Const, 0},
+		{"TIOCGSERIAL", Const, 0},
+		{"TIOCGSID", Const, 0},
+		{"TIOCGSIZE", Const, 1},
+		{"TIOCGSOFTCAR", Const, 0},
+		{"TIOCGTSTAMP", Const, 1},
+		{"TIOCGWINSZ", Const, 0},
+		{"TIOCINQ", Const, 0},
+		{"TIOCIXOFF", Const, 0},
+		{"TIOCIXON", Const, 0},
+		{"TIOCLINUX", Const, 0},
+		{"TIOCMBIC", Const, 0},
+		{"TIOCMBIS", Const, 0},
+		{"TIOCMGDTRWAIT", Const, 0},
+		{"TIOCMGET", Const, 0},
+		{"TIOCMIWAIT", Const, 0},
+		{"TIOCMODG", Const, 0},
+		{"TIOCMODS", Const, 0},
+		{"TIOCMSDTRWAIT", Const, 0},
+		{"TIOCMSET", Const, 0},
+		{"TIOCM_CAR", Const, 0},
+		{"TIOCM_CD", Const, 0},
+		{"TIOCM_CTS", Const, 0},
+		{"TIOCM_DCD", Const, 0},
+		{"TIOCM_DSR", Const, 0},
+		{"TIOCM_DTR", Const, 0},
+		{"TIOCM_LE", Const, 0},
+		{"TIOCM_RI", Const, 0},
+		{"TIOCM_RNG", Const, 0},
+		{"TIOCM_RTS", Const, 0},
+		{"TIOCM_SR", Const, 0},
+		{"TIOCM_ST", Const, 0},
+		{"TIOCNOTTY", Const, 0},
+		{"TIOCNXCL", Const, 0},
+		{"TIOCOUTQ", Const, 0},
+		{"TIOCPKT", Const, 0},
+		{"TIOCPKT_DATA", Const, 0},
+		{"TIOCPKT_DOSTOP", Const, 0},
+		{"TIOCPKT_FLUSHREAD", Const, 0},
+		{"TIOCPKT_FLUSHWRITE", Const, 0},
+		{"TIOCPKT_IOCTL", Const, 0},
+		{"TIOCPKT_NOSTOP", Const, 0},
+		{"TIOCPKT_START", Const, 0},
+		{"TIOCPKT_STOP", Const, 0},
+		{"TIOCPTMASTER", Const, 0},
+		{"TIOCPTMGET", Const, 1},
+		{"TIOCPTSNAME", Const, 1},
+		{"TIOCPTYGNAME", Const, 0},
+		{"TIOCPTYGRANT", Const, 0},
+		{"TIOCPTYUNLK", Const, 0},
+		{"TIOCRCVFRAME", Const, 1},
+		{"TIOCREMOTE", Const, 0},
+		{"TIOCSBRK", Const, 0},
+		{"TIOCSCONS", Const, 0},
+		{"TIOCSCTTY", Const, 0},
+		{"TIOCSDRAINWAIT", Const, 0},
+		{"TIOCSDTR", Const, 0},
+		{"TIOCSERCONFIG", Const, 0},
+		{"TIOCSERGETLSR", Const, 0},
+		{"TIOCSERGETMULTI", Const, 0},
+		{"TIOCSERGSTRUCT", Const, 0},
+		{"TIOCSERGWILD", Const, 0},
+		{"TIOCSERSETMULTI", Const, 0},
+		{"TIOCSERSWILD", Const, 0},
+		{"TIOCSER_TEMT", Const, 0},
+		{"TIOCSETA", Const, 0},
+		{"TIOCSETAF", Const, 0},
+		{"TIOCSETAW", Const, 0},
+		{"TIOCSETD", Const, 0},
+		{"TIOCSFLAGS", Const, 1},
+		{"TIOCSIG", Const, 0},
+		{"TIOCSLCKTRMIOS", Const, 0},
+		{"TIOCSLINED", Const, 1},
+		{"TIOCSPGRP", Const, 0},
+		{"TIOCSPTLCK", Const, 0},
+		{"TIOCSQSIZE", Const, 1},
+		{"TIOCSRS485", Const, 0},
+		{"TIOCSSERIAL", Const, 0},
+		{"TIOCSSIZE", Const, 1},
+		{"TIOCSSOFTCAR", Const, 0},
+		{"TIOCSTART", Const, 0},
+		{"TIOCSTAT", Const, 0},
+		{"TIOCSTI", Const, 0},
+		{"TIOCSTOP", Const, 0},
+		{"TIOCSTSTAMP", Const, 1},
+		{"TIOCSWINSZ", Const, 0},
+		{"TIOCTIMESTAMP", Const, 0},
+		{"TIOCUCNTL", Const, 0},
+		{"TIOCVHANGUP", Const, 0},
+		{"TIOCXMTFRAME", Const, 1},
+		{"TOKEN_ADJUST_DEFAULT", Const, 0},
+		{"TOKEN_ADJUST_GROUPS", Const, 0},
+		{"TOKEN_ADJUST_PRIVILEGES", Const, 0},
+		{"TOKEN_ADJUST_SESSIONID", Const, 11},
+		{"TOKEN_ALL_ACCESS", Const, 0},
+		{"TOKEN_ASSIGN_PRIMARY", Const, 0},
+		{"TOKEN_DUPLICATE", Const, 0},
+		{"TOKEN_EXECUTE", Const, 0},
+		{"TOKEN_IMPERSONATE", Const, 0},
+		{"TOKEN_QUERY", Const, 0},
+		{"TOKEN_QUERY_SOURCE", Const, 0},
+		{"TOKEN_READ", Const, 0},
+		{"TOKEN_WRITE", Const, 0},
+		{"TOSTOP", Const, 0},
+		{"TRUNCATE_EXISTING", Const, 0},
+		{"TUNATTACHFILTER", Const, 0},
+		{"TUNDETACHFILTER", Const, 0},
+		{"TUNGETFEATURES", Const, 0},
+		{"TUNGETIFF", Const, 0},
+		{"TUNGETSNDBUF", Const, 0},
+		{"TUNGETVNETHDRSZ", Const, 0},
+		{"TUNSETDEBUG", Const, 0},
+		{"TUNSETGROUP", Const, 0},
+		{"TUNSETIFF", Const, 0},
+		{"TUNSETLINK", Const, 0},
+		{"TUNSETNOCSUM", Const, 0},
+		{"TUNSETOFFLOAD", Const, 0},
+		{"TUNSETOWNER", Const, 0},
+		{"TUNSETPERSIST", Const, 0},
+		{"TUNSETSNDBUF", Const, 0},
+		{"TUNSETTXFILTER", Const, 0},
+		{"TUNSETVNETHDRSZ", Const, 0},
+		{"Tee", Func, 0},
+		{"TerminateProcess", Func, 0},
+		{"Termios", Type, 0},
+		{"Termios.Cc", Field, 0},
+		{"Termios.Cflag", Field, 0},
+		{"Termios.Iflag", Field, 0},
+		{"Termios.Ispeed", Field, 0},
+		{"Termios.Lflag", Field, 0},
+		{"Termios.Line", Field, 0},
+		{"Termios.Oflag", Field, 0},
+		{"Termios.Ospeed", Field, 0},
+		{"Termios.Pad_cgo_0", Field, 0},
+		{"Tgkill", Func, 0},
+		{"Time", Func, 0},
+		{"Time_t", Type, 0},
+		{"Times", Func, 0},
+		{"Timespec", Type, 0},
+		{"Timespec.Nsec", Field, 0},
+		{"Timespec.Pad_cgo_0", Field, 2},
+		{"Timespec.Sec", Field, 0},
+		{"TimespecToNsec", Func, 0},
+		{"Timeval", Type, 0},
+		{"Timeval.Pad_cgo_0", Field, 0},
+		{"Timeval.Sec", Field, 0},
+		{"Timeval.Usec", Field, 0},
+		{"Timeval32", Type, 0},
+		{"Timeval32.Sec", Field, 0},
+		{"Timeval32.Usec", Field, 0},
+		{"TimevalToNsec", Func, 0},
+		{"Timex", Type, 0},
+		{"Timex.Calcnt", Field, 0},
+		{"Timex.Constant", Field, 0},
+		{"Timex.Errcnt", Field, 0},
+		{"Timex.Esterror", Field, 0},
+		{"Timex.Freq", Field, 0},
+		{"Timex.Jitcnt", Field, 0},
+		{"Timex.Jitter", Field, 0},
+		{"Timex.Maxerror", Field, 0},
+		{"Timex.Modes", Field, 0},
+		{"Timex.Offset", Field, 0},
+		{"Timex.Pad_cgo_0", Field, 0},
+		{"Timex.Pad_cgo_1", Field, 0},
+		{"Timex.Pad_cgo_2", Field, 0},
+		{"Timex.Pad_cgo_3", Field, 0},
+		{"Timex.Ppsfreq", Field, 0},
+		{"Timex.Precision", Field, 0},
+		{"Timex.Shift", Field, 0},
+		{"Timex.Stabil", Field, 0},
+		{"Timex.Status", Field, 0},
+		{"Timex.Stbcnt", Field, 0},
+		{"Timex.Tai", Field, 0},
+		{"Timex.Tick", Field, 0},
+		{"Timex.Time", Field, 0},
+		{"Timex.Tolerance", Field, 0},
+		{"Timezoneinformation", Type, 0},
+		{"Timezoneinformation.Bias", Field, 0},
+		{"Timezoneinformation.DaylightBias", Field, 0},
+		{"Timezoneinformation.DaylightDate", Field, 0},
+		{"Timezoneinformation.DaylightName", Field, 0},
+		{"Timezoneinformation.StandardBias", Field, 0},
+		{"Timezoneinformation.StandardDate", Field, 0},
+		{"Timezoneinformation.StandardName", Field, 0},
+		{"Tms", Type, 0},
+		{"Tms.Cstime", Field, 0},
+		{"Tms.Cutime", Field, 0},
+		{"Tms.Stime", Field, 0},
+		{"Tms.Utime", Field, 0},
+		{"Token", Type, 0},
+		{"TokenAccessInformation", Const, 0},
+		{"TokenAuditPolicy", Const, 0},
+		{"TokenDefaultDacl", Const, 0},
+		{"TokenElevation", Const, 0},
+		{"TokenElevationType", Const, 0},
+		{"TokenGroups", Const, 0},
+		{"TokenGroupsAndPrivileges", Const, 0},
+		{"TokenHasRestrictions", Const, 0},
+		{"TokenImpersonationLevel", Const, 0},
+		{"TokenIntegrityLevel", Const, 0},
+		{"TokenLinkedToken", Const, 0},
+		{"TokenLogonSid", Const, 0},
+		{"TokenMandatoryPolicy", Const, 0},
+		{"TokenOrigin", Const, 0},
+		{"TokenOwner", Const, 0},
+		{"TokenPrimaryGroup", Const, 0},
+		{"TokenPrivileges", Const, 0},
+		{"TokenRestrictedSids", Const, 0},
+		{"TokenSandBoxInert", Const, 0},
+		{"TokenSessionId", Const, 0},
+		{"TokenSessionReference", Const, 0},
+		{"TokenSource", Const, 0},
+		{"TokenStatistics", Const, 0},
+		{"TokenType", Const, 0},
+		{"TokenUIAccess", Const, 0},
+		{"TokenUser", Const, 0},
+		{"TokenVirtualizationAllowed", Const, 0},
+		{"TokenVirtualizationEnabled", Const, 0},
+		{"Tokenprimarygroup", Type, 0},
+		{"Tokenprimarygroup.PrimaryGroup", Field, 0},
+		{"Tokenuser", Type, 0},
+		{"Tokenuser.User", Field, 0},
+		{"TranslateAccountName", Func, 0},
+		{"TranslateName", Func, 0},
+		{"TransmitFile", Func, 0},
+		{"TransmitFileBuffers", Type, 0},
+		{"TransmitFileBuffers.Head", Field, 0},
+		{"TransmitFileBuffers.HeadLength", Field, 0},
+		{"TransmitFileBuffers.Tail", Field, 0},
+		{"TransmitFileBuffers.TailLength", Field, 0},
+		{"Truncate", Func, 0},
+		{"UNIX_PATH_MAX", Const, 12},
+		{"USAGE_MATCH_TYPE_AND", Const, 0},
+		{"USAGE_MATCH_TYPE_OR", Const, 0},
+		{"UTF16FromString", Func, 1},
+		{"UTF16PtrFromString", Func, 1},
+		{"UTF16ToString", Func, 0},
+		{"Ucred", Type, 0},
+		{"Ucred.Gid", Field, 0},
+		{"Ucred.Pid", Field, 0},
+		{"Ucred.Uid", Field, 0},
+		{"Umask", Func, 0},
+		{"Uname", Func, 0},
+		{"Undelete", Func, 0},
+		{"UnixCredentials", Func, 0},
+		{"UnixRights", Func, 0},
+		{"Unlink", Func, 0},
+		{"Unlinkat", Func, 0},
+		{"UnmapViewOfFile", Func, 0},
+		{"Unmount", Func, 0},
+		{"Unsetenv", Func, 4},
+		{"Unshare", Func, 0},
+		{"UserInfo10", Type, 0},
+		{"UserInfo10.Comment", Field, 0},
+		{"UserInfo10.FullName", Field, 0},
+		{"UserInfo10.Name", Field, 0},
+		{"UserInfo10.UsrComment", Field, 0},
+		{"Ustat", Func, 0},
+		{"Ustat_t", Type, 0},
+		{"Ustat_t.Fname", Field, 0},
+		{"Ustat_t.Fpack", Field, 0},
+		{"Ustat_t.Pad_cgo_0", Field, 0},
+		{"Ustat_t.Pad_cgo_1", Field, 0},
+		{"Ustat_t.Tfree", Field, 0},
+		{"Ustat_t.Tinode", Field, 0},
+		{"Utimbuf", Type, 0},
+		{"Utimbuf.Actime", Field, 0},
+		{"Utimbuf.Modtime", Field, 0},
+		{"Utime", Func, 0},
+		{"Utimes", Func, 0},
+		{"UtimesNano", Func, 1},
+		{"Utsname", Type, 0},
+		{"Utsname.Domainname", Field, 0},
+		{"Utsname.Machine", Field, 0},
+		{"Utsname.Nodename", Field, 0},
+		{"Utsname.Release", Field, 0},
+		{"Utsname.Sysname", Field, 0},
+		{"Utsname.Version", Field, 0},
+		{"VDISCARD", Const, 0},
+		{"VDSUSP", Const, 1},
+		{"VEOF", Const, 0},
+		{"VEOL", Const, 0},
+		{"VEOL2", Const, 0},
+		{"VERASE", Const, 0},
+		{"VERASE2", Const, 1},
+		{"VINTR", Const, 0},
+		{"VKILL", Const, 0},
+		{"VLNEXT", Const, 0},
+		{"VMIN", Const, 0},
+		{"VQUIT", Const, 0},
+		{"VREPRINT", Const, 0},
+		{"VSTART", Const, 0},
+		{"VSTATUS", Const, 1},
+		{"VSTOP", Const, 0},
+		{"VSUSP", Const, 0},
+		{"VSWTC", Const, 0},
+		{"VT0", Const, 1},
+		{"VT1", Const, 1},
+		{"VTDLY", Const, 1},
+		{"VTIME", Const, 0},
+		{"VWERASE", Const, 0},
+		{"VirtualLock", Func, 0},
+		{"VirtualUnlock", Func, 0},
+		{"WAIT_ABANDONED", Const, 0},
+		{"WAIT_FAILED", Const, 0},
+		{"WAIT_OBJECT_0", Const, 0},
+		{"WAIT_TIMEOUT", Const, 0},
+		{"WALL", Const, 0},
+		{"WALLSIG", Const, 1},
+		{"WALTSIG", Const, 1},
+		{"WCLONE", Const, 0},
+		{"WCONTINUED", Const, 0},
+		{"WCOREFLAG", Const, 0},
+		{"WEXITED", Const, 0},
+		{"WLINUXCLONE", Const, 0},
+		{"WNOHANG", Const, 0},
+		{"WNOTHREAD", Const, 0},
+		{"WNOWAIT", Const, 0},
+		{"WNOZOMBIE", Const, 1},
+		{"WOPTSCHECKED", Const, 1},
+		{"WORDSIZE", Const, 0},
+		{"WSABuf", Type, 0},
+		{"WSABuf.Buf", Field, 0},
+		{"WSABuf.Len", Field, 0},
+		{"WSACleanup", Func, 0},
+		{"WSADESCRIPTION_LEN", Const, 0},
+		{"WSAData", Type, 0},
+		{"WSAData.Description", Field, 0},
+		{"WSAData.HighVersion", Field, 0},
+		{"WSAData.MaxSockets", Field, 0},
+		{"WSAData.MaxUdpDg", Field, 0},
+		{"WSAData.SystemStatus", Field, 0},
+		{"WSAData.VendorInfo", Field, 0},
+		{"WSAData.Version", Field, 0},
+		{"WSAEACCES", Const, 2},
+		{"WSAECONNABORTED", Const, 9},
+		{"WSAECONNRESET", Const, 3},
+		{"WSAEnumProtocols", Func, 2},
+		{"WSAID_CONNECTEX", Var, 1},
+		{"WSAIoctl", Func, 0},
+		{"WSAPROTOCOL_LEN", Const, 2},
+		{"WSAProtocolChain", Type, 2},
+		{"WSAProtocolChain.ChainEntries", Field, 2},
+		{"WSAProtocolChain.ChainLen", Field, 2},
+		{"WSAProtocolInfo", Type, 2},
+		{"WSAProtocolInfo.AddressFamily", Field, 2},
+		{"WSAProtocolInfo.CatalogEntryId", Field, 2},
+		{"WSAProtocolInfo.MaxSockAddr", Field, 2},
+		{"WSAProtocolInfo.MessageSize", Field, 2},
+		{"WSAProtocolInfo.MinSockAddr", Field, 2},
+		{"WSAProtocolInfo.NetworkByteOrder", Field, 2},
+		{"WSAProtocolInfo.Protocol", Field, 2},
+		{"WSAProtocolInfo.ProtocolChain", Field, 2},
+		{"WSAProtocolInfo.ProtocolMaxOffset", Field, 2},
+		{"WSAProtocolInfo.ProtocolName", Field, 2},
+		{"WSAProtocolInfo.ProviderFlags", Field, 2},
+		{"WSAProtocolInfo.ProviderId", Field, 2},
+		{"WSAProtocolInfo.ProviderReserved", Field, 2},
+		{"WSAProtocolInfo.SecurityScheme", Field, 2},
+		{"WSAProtocolInfo.ServiceFlags1", Field, 2},
+		{"WSAProtocolInfo.ServiceFlags2", Field, 2},
+		{"WSAProtocolInfo.ServiceFlags3", Field, 2},
+		{"WSAProtocolInfo.ServiceFlags4", Field, 2},
+		{"WSAProtocolInfo.SocketType", Field, 2},
+		{"WSAProtocolInfo.Version", Field, 2},
+		{"WSARecv", Func, 0},
+		{"WSARecvFrom", Func, 0},
+		{"WSASYS_STATUS_LEN", Const, 0},
+		{"WSASend", Func, 0},
+		{"WSASendTo", Func, 0},
+		{"WSASendto", Func, 0},
+		{"WSAStartup", Func, 0},
+		{"WSTOPPED", Const, 0},
+		{"WTRAPPED", Const, 1},
+		{"WUNTRACED", Const, 0},
+		{"Wait4", Func, 0},
+		{"WaitForSingleObject", Func, 0},
+		{"WaitStatus", Type, 0},
+		{"WaitStatus.ExitCode", Field, 0},
+		{"Win32FileAttributeData", Type, 0},
+		{"Win32FileAttributeData.CreationTime", Field, 0},
+		{"Win32FileAttributeData.FileAttributes", Field, 0},
+		{"Win32FileAttributeData.FileSizeHigh", Field, 0},
+		{"Win32FileAttributeData.FileSizeLow", Field, 0},
+		{"Win32FileAttributeData.LastAccessTime", Field, 0},
+		{"Win32FileAttributeData.LastWriteTime", Field, 0},
+		{"Win32finddata", Type, 0},
+		{"Win32finddata.AlternateFileName", Field, 0},
+		{"Win32finddata.CreationTime", Field, 0},
+		{"Win32finddata.FileAttributes", Field, 0},
+		{"Win32finddata.FileName", Field, 0},
+		{"Win32finddata.FileSizeHigh", Field, 0},
+		{"Win32finddata.FileSizeLow", Field, 0},
+		{"Win32finddata.LastAccessTime", Field, 0},
+		{"Win32finddata.LastWriteTime", Field, 0},
+		{"Win32finddata.Reserved0", Field, 0},
+		{"Win32finddata.Reserved1", Field, 0},
+		{"Write", Func, 0},
+		{"WriteConsole", Func, 1},
+		{"WriteFile", Func, 0},
+		{"X509_ASN_ENCODING", Const, 0},
+		{"XCASE", Const, 0},
+		{"XP1_CONNECTIONLESS", Const, 2},
+		{"XP1_CONNECT_DATA", Const, 2},
+		{"XP1_DISCONNECT_DATA", Const, 2},
+		{"XP1_EXPEDITED_DATA", Const, 2},
+		{"XP1_GRACEFUL_CLOSE", Const, 2},
+		{"XP1_GUARANTEED_DELIVERY", Const, 2},
+		{"XP1_GUARANTEED_ORDER", Const, 2},
+		{"XP1_IFS_HANDLES", Const, 2},
+		{"XP1_MESSAGE_ORIENTED", Const, 2},
+		{"XP1_MULTIPOINT_CONTROL_PLANE", Const, 2},
+		{"XP1_MULTIPOINT_DATA_PLANE", Const, 2},
+		{"XP1_PARTIAL_MESSAGE", Const, 2},
+		{"XP1_PSEUDO_STREAM", Const, 2},
+		{"XP1_QOS_SUPPORTED", Const, 2},
+		{"XP1_SAN_SUPPORT_SDP", Const, 2},
+		{"XP1_SUPPORT_BROADCAST", Const, 2},
+		{"XP1_SUPPORT_MULTIPOINT", Const, 2},
+		{"XP1_UNI_RECV", Const, 2},
+		{"XP1_UNI_SEND", Const, 2},
+	},
+	"syscall/js": {
+		{"CopyBytesToGo", Func, 0},
+		{"CopyBytesToJS", Func, 0},
+		{"Error", Type, 0},
+		{"Func", Type, 0},
+		{"FuncOf", Func, 0},
+		{"Global", Func, 0},
+		{"Null", Func, 0},
+		{"Type", Type, 0},
+		{"TypeBoolean", Const, 0},
+		{"TypeFunction", Const, 0},
+		{"TypeNull", Const, 0},
+		{"TypeNumber", Const, 0},
+		{"TypeObject", Const, 0},
+		{"TypeString", Const, 0},
+		{"TypeSymbol", Const, 0},
+		{"TypeUndefined", Const, 0},
+		{"Undefined", Func, 0},
+		{"Value", Type, 0},
+		{"ValueError", Type, 0},
+		{"ValueOf", Func, 0},
+	},
+	"testing": {
+		{"(*B).Cleanup", Method, 14},
+		{"(*B).Elapsed", Method, 20},
+		{"(*B).Error", Method, 0},
+		{"(*B).Errorf", Method, 0},
+		{"(*B).Fail", Method, 0},
+		{"(*B).FailNow", Method, 0},
+		{"(*B).Failed", Method, 0},
+		{"(*B).Fatal", Method, 0},
+		{"(*B).Fatalf", Method, 0},
+		{"(*B).Helper", Method, 9},
+		{"(*B).Log", Method, 0},
+		{"(*B).Logf", Method, 0},
+		{"(*B).Name", Method, 8},
+		{"(*B).ReportAllocs", Method, 1},
+		{"(*B).ReportMetric", Method, 13},
+		{"(*B).ResetTimer", Method, 0},
+		{"(*B).Run", Method, 7},
+		{"(*B).RunParallel", Method, 3},
+		{"(*B).SetBytes", Method, 0},
+		{"(*B).SetParallelism", Method, 3},
+		{"(*B).Setenv", Method, 17},
+		{"(*B).Skip", Method, 1},
+		{"(*B).SkipNow", Method, 1},
+		{"(*B).Skipf", Method, 1},
+		{"(*B).Skipped", Method, 1},
+		{"(*B).StartTimer", Method, 0},
+		{"(*B).StopTimer", Method, 0},
+		{"(*B).TempDir", Method, 15},
+		{"(*F).Add", Method, 18},
+		{"(*F).Cleanup", Method, 18},
+		{"(*F).Error", Method, 18},
+		{"(*F).Errorf", Method, 18},
+		{"(*F).Fail", Method, 18},
+		{"(*F).FailNow", Method, 18},
+		{"(*F).Failed", Method, 18},
+		{"(*F).Fatal", Method, 18},
+		{"(*F).Fatalf", Method, 18},
+		{"(*F).Fuzz", Method, 18},
+		{"(*F).Helper", Method, 18},
+		{"(*F).Log", Method, 18},
+		{"(*F).Logf", Method, 18},
+		{"(*F).Name", Method, 18},
+		{"(*F).Setenv", Method, 18},
+		{"(*F).Skip", Method, 18},
+		{"(*F).SkipNow", Method, 18},
+		{"(*F).Skipf", Method, 18},
+		{"(*F).Skipped", Method, 18},
+		{"(*F).TempDir", Method, 18},
+		{"(*M).Run", Method, 4},
+		{"(*PB).Next", Method, 3},
+		{"(*T).Cleanup", Method, 14},
+		{"(*T).Deadline", Method, 15},
+		{"(*T).Error", Method, 0},
+		{"(*T).Errorf", Method, 0},
+		{"(*T).Fail", Method, 0},
+		{"(*T).FailNow", Method, 0},
+		{"(*T).Failed", Method, 0},
+		{"(*T).Fatal", Method, 0},
+		{"(*T).Fatalf", Method, 0},
+		{"(*T).Helper", Method, 9},
+		{"(*T).Log", Method, 0},
+		{"(*T).Logf", Method, 0},
+		{"(*T).Name", Method, 8},
+		{"(*T).Parallel", Method, 0},
+		{"(*T).Run", Method, 7},
+		{"(*T).Setenv", Method, 17},
+		{"(*T).Skip", Method, 1},
+		{"(*T).SkipNow", Method, 1},
+		{"(*T).Skipf", Method, 1},
+		{"(*T).Skipped", Method, 1},
+		{"(*T).TempDir", Method, 15},
+		{"(BenchmarkResult).AllocedBytesPerOp", Method, 1},
+		{"(BenchmarkResult).AllocsPerOp", Method, 1},
+		{"(BenchmarkResult).MemString", Method, 1},
+		{"(BenchmarkResult).NsPerOp", Method, 0},
+		{"(BenchmarkResult).String", Method, 0},
+		{"AllocsPerRun", Func, 1},
+		{"B", Type, 0},
+		{"B.N", Field, 0},
+		{"Benchmark", Func, 0},
+		{"BenchmarkResult", Type, 0},
+		{"BenchmarkResult.Bytes", Field, 0},
+		{"BenchmarkResult.Extra", Field, 13},
+		{"BenchmarkResult.MemAllocs", Field, 1},
+		{"BenchmarkResult.MemBytes", Field, 1},
+		{"BenchmarkResult.N", Field, 0},
+		{"BenchmarkResult.T", Field, 0},
+		{"Cover", Type, 2},
+		{"Cover.Blocks", Field, 2},
+		{"Cover.Counters", Field, 2},
+		{"Cover.CoveredPackages", Field, 2},
+		{"Cover.Mode", Field, 2},
+		{"CoverBlock", Type, 2},
+		{"CoverBlock.Col0", Field, 2},
+		{"CoverBlock.Col1", Field, 2},
+		{"CoverBlock.Line0", Field, 2},
+		{"CoverBlock.Line1", Field, 2},
+		{"CoverBlock.Stmts", Field, 2},
+		{"CoverMode", Func, 8},
+		{"Coverage", Func, 4},
+		{"F", Type, 18},
+		{"Init", Func, 13},
+		{"InternalBenchmark", Type, 0},
+		{"InternalBenchmark.F", Field, 0},
+		{"InternalBenchmark.Name", Field, 0},
+		{"InternalExample", Type, 0},
+		{"InternalExample.F", Field, 0},
+		{"InternalExample.Name", Field, 0},
+		{"InternalExample.Output", Field, 0},
+		{"InternalExample.Unordered", Field, 7},
+		{"InternalFuzzTarget", Type, 18},
+		{"InternalFuzzTarget.Fn", Field, 18},
+		{"InternalFuzzTarget.Name", Field, 18},
+		{"InternalTest", Type, 0},
+		{"InternalTest.F", Field, 0},
+		{"InternalTest.Name", Field, 0},
+		{"M", Type, 4},
+		{"Main", Func, 0},
+		{"MainStart", Func, 4},
+		{"PB", Type, 3},
+		{"RegisterCover", Func, 2},
+		{"RunBenchmarks", Func, 0},
+		{"RunExamples", Func, 0},
+		{"RunTests", Func, 0},
+		{"Short", Func, 0},
+		{"T", Type, 0},
+		{"TB", Type, 2},
+		{"Testing", Func, 21},
+		{"Verbose", Func, 1},
+	},
+	"testing/fstest": {
+		{"(MapFS).Glob", Method, 16},
+		{"(MapFS).Open", Method, 16},
+		{"(MapFS).ReadDir", Method, 16},
+		{"(MapFS).ReadFile", Method, 16},
+		{"(MapFS).Stat", Method, 16},
+		{"(MapFS).Sub", Method, 16},
+		{"MapFS", Type, 16},
+		{"MapFile", Type, 16},
+		{"MapFile.Data", Field, 16},
+		{"MapFile.ModTime", Field, 16},
+		{"MapFile.Mode", Field, 16},
+		{"MapFile.Sys", Field, 16},
+		{"TestFS", Func, 16},
+	},
+	"testing/iotest": {
+		{"DataErrReader", Func, 0},
+		{"ErrReader", Func, 16},
+		{"ErrTimeout", Var, 0},
+		{"HalfReader", Func, 0},
+		{"NewReadLogger", Func, 0},
+		{"NewWriteLogger", Func, 0},
+		{"OneByteReader", Func, 0},
+		{"TestReader", Func, 16},
+		{"TimeoutReader", Func, 0},
+		{"TruncateWriter", Func, 0},
+	},
+	"testing/quick": {
+		{"(*CheckEqualError).Error", Method, 0},
+		{"(*CheckError).Error", Method, 0},
+		{"(SetupError).Error", Method, 0},
+		{"Check", Func, 0},
+		{"CheckEqual", Func, 0},
+		{"CheckEqualError", Type, 0},
+		{"CheckEqualError.CheckError", Field, 0},
+		{"CheckEqualError.Out1", Field, 0},
+		{"CheckEqualError.Out2", Field, 0},
+		{"CheckError", Type, 0},
+		{"CheckError.Count", Field, 0},
+		{"CheckError.In", Field, 0},
+		{"Config", Type, 0},
+		{"Config.MaxCount", Field, 0},
+		{"Config.MaxCountScale", Field, 0},
+		{"Config.Rand", Field, 0},
+		{"Config.Values", Field, 0},
+		{"Generator", Type, 0},
+		{"SetupError", Type, 0},
+		{"Value", Func, 0},
+	},
+	"testing/slogtest": {
+		{"Run", Func, 22},
+		{"TestHandler", Func, 21},
+	},
+	"text/scanner": {
+		{"(*Position).IsValid", Method, 0},
+		{"(*Scanner).Init", Method, 0},
+		{"(*Scanner).IsValid", Method, 0},
+		{"(*Scanner).Next", Method, 0},
+		{"(*Scanner).Peek", Method, 0},
+		{"(*Scanner).Pos", Method, 0},
+		{"(*Scanner).Scan", Method, 0},
+		{"(*Scanner).TokenText", Method, 0},
+		{"(Position).String", Method, 0},
+		{"(Scanner).String", Method, 0},
+		{"Char", Const, 0},
+		{"Comment", Const, 0},
+		{"EOF", Const, 0},
+		{"Float", Const, 0},
+		{"GoTokens", Const, 0},
+		{"GoWhitespace", Const, 0},
+		{"Ident", Const, 0},
+		{"Int", Const, 0},
+		{"Position", Type, 0},
+		{"Position.Column", Field, 0},
+		{"Position.Filename", Field, 0},
+		{"Position.Line", Field, 0},
+		{"Position.Offset", Field, 0},
+		{"RawString", Const, 0},
+		{"ScanChars", Const, 0},
+		{"ScanComments", Const, 0},
+		{"ScanFloats", Const, 0},
+		{"ScanIdents", Const, 0},
+		{"ScanInts", Const, 0},
+		{"ScanRawStrings", Const, 0},
+		{"ScanStrings", Const, 0},
+		{"Scanner", Type, 0},
+		{"Scanner.Error", Field, 0},
+		{"Scanner.ErrorCount", Field, 0},
+		{"Scanner.IsIdentRune", Field, 4},
+		{"Scanner.Mode", Field, 0},
+		{"Scanner.Position", Field, 0},
+		{"Scanner.Whitespace", Field, 0},
+		{"SkipComments", Const, 0},
+		{"String", Const, 0},
+		{"TokenString", Func, 0},
+	},
+	"text/tabwriter": {
+		{"(*Writer).Flush", Method, 0},
+		{"(*Writer).Init", Method, 0},
+		{"(*Writer).Write", Method, 0},
+		{"AlignRight", Const, 0},
+		{"Debug", Const, 0},
+		{"DiscardEmptyColumns", Const, 0},
+		{"Escape", Const, 0},
+		{"FilterHTML", Const, 0},
+		{"NewWriter", Func, 0},
+		{"StripEscape", Const, 0},
+		{"TabIndent", Const, 0},
+		{"Writer", Type, 0},
+	},
+	"text/template": {
+		{"(*Template).AddParseTree", Method, 0},
+		{"(*Template).Clone", Method, 0},
+		{"(*Template).DefinedTemplates", Method, 5},
+		{"(*Template).Delims", Method, 0},
+		{"(*Template).Execute", Method, 0},
+		{"(*Template).ExecuteTemplate", Method, 0},
+		{"(*Template).Funcs", Method, 0},
+		{"(*Template).Lookup", Method, 0},
+		{"(*Template).Name", Method, 0},
+		{"(*Template).New", Method, 0},
+		{"(*Template).Option", Method, 5},
+		{"(*Template).Parse", Method, 0},
+		{"(*Template).ParseFS", Method, 16},
+		{"(*Template).ParseFiles", Method, 0},
+		{"(*Template).ParseGlob", Method, 0},
+		{"(*Template).Templates", Method, 0},
+		{"(ExecError).Error", Method, 6},
+		{"(ExecError).Unwrap", Method, 13},
+		{"(Template).Copy", Method, 2},
+		{"(Template).ErrorContext", Method, 1},
+		{"ExecError", Type, 6},
+		{"ExecError.Err", Field, 6},
+		{"ExecError.Name", Field, 6},
+		{"FuncMap", Type, 0},
+		{"HTMLEscape", Func, 0},
+		{"HTMLEscapeString", Func, 0},
+		{"HTMLEscaper", Func, 0},
+		{"IsTrue", Func, 6},
+		{"JSEscape", Func, 0},
+		{"JSEscapeString", Func, 0},
+		{"JSEscaper", Func, 0},
+		{"Must", Func, 0},
+		{"New", Func, 0},
+		{"ParseFS", Func, 16},
+		{"ParseFiles", Func, 0},
+		{"ParseGlob", Func, 0},
+		{"Template", Type, 0},
+		{"Template.Tree", Field, 0},
+		{"URLQueryEscaper", Func, 0},
+	},
+	"text/template/parse": {
+		{"(*ActionNode).Copy", Method, 0},
+		{"(*ActionNode).String", Method, 0},
+		{"(*BoolNode).Copy", Method, 0},
+		{"(*BoolNode).String", Method, 0},
+		{"(*BranchNode).Copy", Method, 4},
+		{"(*BranchNode).String", Method, 0},
+		{"(*BreakNode).Copy", Method, 18},
+		{"(*BreakNode).String", Method, 18},
+		{"(*ChainNode).Add", Method, 1},
+		{"(*ChainNode).Copy", Method, 1},
+		{"(*ChainNode).String", Method, 1},
+		{"(*CommandNode).Copy", Method, 0},
+		{"(*CommandNode).String", Method, 0},
+		{"(*CommentNode).Copy", Method, 16},
+		{"(*CommentNode).String", Method, 16},
+		{"(*ContinueNode).Copy", Method, 18},
+		{"(*ContinueNode).String", Method, 18},
+		{"(*DotNode).Copy", Method, 0},
+		{"(*DotNode).String", Method, 0},
+		{"(*DotNode).Type", Method, 0},
+		{"(*FieldNode).Copy", Method, 0},
+		{"(*FieldNode).String", Method, 0},
+		{"(*IdentifierNode).Copy", Method, 0},
+		{"(*IdentifierNode).SetPos", Method, 1},
+		{"(*IdentifierNode).SetTree", Method, 4},
+		{"(*IdentifierNode).String", Method, 0},
+		{"(*IfNode).Copy", Method, 0},
+		{"(*IfNode).String", Method, 0},
+		{"(*ListNode).Copy", Method, 0},
+		{"(*ListNode).CopyList", Method, 0},
+		{"(*ListNode).String", Method, 0},
+		{"(*NilNode).Copy", Method, 1},
+		{"(*NilNode).String", Method, 1},
+		{"(*NilNode).Type", Method, 1},
+		{"(*NumberNode).Copy", Method, 0},
+		{"(*NumberNode).String", Method, 0},
+		{"(*PipeNode).Copy", Method, 0},
+		{"(*PipeNode).CopyPipe", Method, 0},
+		{"(*PipeNode).String", Method, 0},
+		{"(*RangeNode).Copy", Method, 0},
+		{"(*RangeNode).String", Method, 0},
+		{"(*StringNode).Copy", Method, 0},
+		{"(*StringNode).String", Method, 0},
+		{"(*TemplateNode).Copy", Method, 0},
+		{"(*TemplateNode).String", Method, 0},
+		{"(*TextNode).Copy", Method, 0},
+		{"(*TextNode).String", Method, 0},
+		{"(*Tree).Copy", Method, 2},
+		{"(*Tree).ErrorContext", Method, 1},
+		{"(*Tree).Parse", Method, 0},
+		{"(*VariableNode).Copy", Method, 0},
+		{"(*VariableNode).String", Method, 0},
+		{"(*WithNode).Copy", Method, 0},
+		{"(*WithNode).String", Method, 0},
+		{"(ActionNode).Position", Method, 1},
+		{"(ActionNode).Type", Method, 0},
+		{"(BoolNode).Position", Method, 1},
+		{"(BoolNode).Type", Method, 0},
+		{"(BranchNode).Position", Method, 1},
+		{"(BranchNode).Type", Method, 0},
+		{"(BreakNode).Position", Method, 18},
+		{"(BreakNode).Type", Method, 18},
+		{"(ChainNode).Position", Method, 1},
+		{"(ChainNode).Type", Method, 1},
+		{"(CommandNode).Position", Method, 1},
+		{"(CommandNode).Type", Method, 0},
+		{"(CommentNode).Position", Method, 16},
+		{"(CommentNode).Type", Method, 16},
+		{"(ContinueNode).Position", Method, 18},
+		{"(ContinueNode).Type", Method, 18},
+		{"(DotNode).Position", Method, 1},
+		{"(FieldNode).Position", Method, 1},
+		{"(FieldNode).Type", Method, 0},
+		{"(IdentifierNode).Position", Method, 1},
+		{"(IdentifierNode).Type", Method, 0},
+		{"(IfNode).Position", Method, 1},
+		{"(IfNode).Type", Method, 0},
+		{"(ListNode).Position", Method, 1},
+		{"(ListNode).Type", Method, 0},
+		{"(NilNode).Position", Method, 1},
+		{"(NodeType).Type", Method, 0},
+		{"(NumberNode).Position", Method, 1},
+		{"(NumberNode).Type", Method, 0},
+		{"(PipeNode).Position", Method, 1},
+		{"(PipeNode).Type", Method, 0},
+		{"(Pos).Position", Method, 1},
+		{"(RangeNode).Position", Method, 1},
+		{"(RangeNode).Type", Method, 0},
+		{"(StringNode).Position", Method, 1},
+		{"(StringNode).Type", Method, 0},
+		{"(TemplateNode).Position", Method, 1},
+		{"(TemplateNode).Type", Method, 0},
+		{"(TextNode).Position", Method, 1},
+		{"(TextNode).Type", Method, 0},
+		{"(VariableNode).Position", Method, 1},
+		{"(VariableNode).Type", Method, 0},
+		{"(WithNode).Position", Method, 1},
+		{"(WithNode).Type", Method, 0},
+		{"ActionNode", Type, 0},
+		{"ActionNode.Line", Field, 0},
+		{"ActionNode.NodeType", Field, 0},
+		{"ActionNode.Pipe", Field, 0},
+		{"ActionNode.Pos", Field, 1},
+		{"BoolNode", Type, 0},
+		{"BoolNode.NodeType", Field, 0},
+		{"BoolNode.Pos", Field, 1},
+		{"BoolNode.True", Field, 0},
+		{"BranchNode", Type, 0},
+		{"BranchNode.ElseList", Field, 0},
+		{"BranchNode.Line", Field, 0},
+		{"BranchNode.List", Field, 0},
+		{"BranchNode.NodeType", Field, 0},
+		{"BranchNode.Pipe", Field, 0},
+		{"BranchNode.Pos", Field, 1},
+		{"BreakNode", Type, 18},
+		{"BreakNode.Line", Field, 18},
+		{"BreakNode.NodeType", Field, 18},
+		{"BreakNode.Pos", Field, 18},
+		{"ChainNode", Type, 1},
+		{"ChainNode.Field", Field, 1},
+		{"ChainNode.Node", Field, 1},
+		{"ChainNode.NodeType", Field, 1},
+		{"ChainNode.Pos", Field, 1},
+		{"CommandNode", Type, 0},
+		{"CommandNode.Args", Field, 0},
+		{"CommandNode.NodeType", Field, 0},
+		{"CommandNode.Pos", Field, 1},
+		{"CommentNode", Type, 16},
+		{"CommentNode.NodeType", Field, 16},
+		{"CommentNode.Pos", Field, 16},
+		{"CommentNode.Text", Field, 16},
+		{"ContinueNode", Type, 18},
+		{"ContinueNode.Line", Field, 18},
+		{"ContinueNode.NodeType", Field, 18},
+		{"ContinueNode.Pos", Field, 18},
+		{"DotNode", Type, 0},
+		{"DotNode.NodeType", Field, 4},
+		{"DotNode.Pos", Field, 1},
+		{"FieldNode", Type, 0},
+		{"FieldNode.Ident", Field, 0},
+		{"FieldNode.NodeType", Field, 0},
+		{"FieldNode.Pos", Field, 1},
+		{"IdentifierNode", Type, 0},
+		{"IdentifierNode.Ident", Field, 0},
+		{"IdentifierNode.NodeType", Field, 0},
+		{"IdentifierNode.Pos", Field, 1},
+		{"IfNode", Type, 0},
+		{"IfNode.BranchNode", Field, 0},
+		{"IsEmptyTree", Func, 0},
+		{"ListNode", Type, 0},
+		{"ListNode.NodeType", Field, 0},
+		{"ListNode.Nodes", Field, 0},
+		{"ListNode.Pos", Field, 1},
+		{"Mode", Type, 16},
+		{"New", Func, 0},
+		{"NewIdentifier", Func, 0},
+		{"NilNode", Type, 1},
+		{"NilNode.NodeType", Field, 4},
+		{"NilNode.Pos", Field, 1},
+		{"Node", Type, 0},
+		{"NodeAction", Const, 0},
+		{"NodeBool", Const, 0},
+		{"NodeBreak", Const, 18},
+		{"NodeChain", Const, 1},
+		{"NodeCommand", Const, 0},
+		{"NodeComment", Const, 16},
+		{"NodeContinue", Const, 18},
+		{"NodeDot", Const, 0},
+		{"NodeField", Const, 0},
+		{"NodeIdentifier", Const, 0},
+		{"NodeIf", Const, 0},
+		{"NodeList", Const, 0},
+		{"NodeNil", Const, 1},
+		{"NodeNumber", Const, 0},
+		{"NodePipe", Const, 0},
+		{"NodeRange", Const, 0},
+		{"NodeString", Const, 0},
+		{"NodeTemplate", Const, 0},
+		{"NodeText", Const, 0},
+		{"NodeType", Type, 0},
+		{"NodeVariable", Const, 0},
+		{"NodeWith", Const, 0},
+		{"NumberNode", Type, 0},
+		{"NumberNode.Complex128", Field, 0},
+		{"NumberNode.Float64", Field, 0},
+		{"NumberNode.Int64", Field, 0},
+		{"NumberNode.IsComplex", Field, 0},
+		{"NumberNode.IsFloat", Field, 0},
+		{"NumberNode.IsInt", Field, 0},
+		{"NumberNode.IsUint", Field, 0},
+		{"NumberNode.NodeType", Field, 0},
+		{"NumberNode.Pos", Field, 1},
+		{"NumberNode.Text", Field, 0},
+		{"NumberNode.Uint64", Field, 0},
+		{"Parse", Func, 0},
+		{"ParseComments", Const, 16},
+		{"PipeNode", Type, 0},
+		{"PipeNode.Cmds", Field, 0},
+		{"PipeNode.Decl", Field, 0},
+		{"PipeNode.IsAssign", Field, 11},
+		{"PipeNode.Line", Field, 0},
+		{"PipeNode.NodeType", Field, 0},
+		{"PipeNode.Pos", Field, 1},
+		{"Pos", Type, 1},
+		{"RangeNode", Type, 0},
+		{"RangeNode.BranchNode", Field, 0},
+		{"SkipFuncCheck", Const, 17},
+		{"StringNode", Type, 0},
+		{"StringNode.NodeType", Field, 0},
+		{"StringNode.Pos", Field, 1},
+		{"StringNode.Quoted", Field, 0},
+		{"StringNode.Text", Field, 0},
+		{"TemplateNode", Type, 0},
+		{"TemplateNode.Line", Field, 0},
+		{"TemplateNode.Name", Field, 0},
+		{"TemplateNode.NodeType", Field, 0},
+		{"TemplateNode.Pipe", Field, 0},
+		{"TemplateNode.Pos", Field, 1},
+		{"TextNode", Type, 0},
+		{"TextNode.NodeType", Field, 0},
+		{"TextNode.Pos", Field, 1},
+		{"TextNode.Text", Field, 0},
+		{"Tree", Type, 0},
+		{"Tree.Mode", Field, 16},
+		{"Tree.Name", Field, 0},
+		{"Tree.ParseName", Field, 1},
+		{"Tree.Root", Field, 0},
+		{"VariableNode", Type, 0},
+		{"VariableNode.Ident", Field, 0},
+		{"VariableNode.NodeType", Field, 0},
+		{"VariableNode.Pos", Field, 1},
+		{"WithNode", Type, 0},
+		{"WithNode.BranchNode", Field, 0},
+	},
+	"time": {
+		{"(*Location).String", Method, 0},
+		{"(*ParseError).Error", Method, 0},
+		{"(*Ticker).Reset", Method, 15},
+		{"(*Ticker).Stop", Method, 0},
+		{"(*Time).GobDecode", Method, 0},
+		{"(*Time).UnmarshalBinary", Method, 2},
+		{"(*Time).UnmarshalJSON", Method, 0},
+		{"(*Time).UnmarshalText", Method, 2},
+		{"(*Timer).Reset", Method, 1},
+		{"(*Timer).Stop", Method, 0},
+		{"(Duration).Abs", Method, 19},
+		{"(Duration).Hours", Method, 0},
+		{"(Duration).Microseconds", Method, 13},
+		{"(Duration).Milliseconds", Method, 13},
+		{"(Duration).Minutes", Method, 0},
+		{"(Duration).Nanoseconds", Method, 0},
+		{"(Duration).Round", Method, 9},
+		{"(Duration).Seconds", Method, 0},
+		{"(Duration).String", Method, 0},
+		{"(Duration).Truncate", Method, 9},
+		{"(Month).String", Method, 0},
+		{"(Time).Add", Method, 0},
+		{"(Time).AddDate", Method, 0},
+		{"(Time).After", Method, 0},
+		{"(Time).AppendFormat", Method, 5},
+		{"(Time).Before", Method, 0},
+		{"(Time).Clock", Method, 0},
+		{"(Time).Compare", Method, 20},
+		{"(Time).Date", Method, 0},
+		{"(Time).Day", Method, 0},
+		{"(Time).Equal", Method, 0},
+		{"(Time).Format", Method, 0},
+		{"(Time).GoString", Method, 17},
+		{"(Time).GobEncode", Method, 0},
+		{"(Time).Hour", Method, 0},
+		{"(Time).ISOWeek", Method, 0},
+		{"(Time).In", Method, 0},
+		{"(Time).IsDST", Method, 17},
+		{"(Time).IsZero", Method, 0},
+		{"(Time).Local", Method, 0},
+		{"(Time).Location", Method, 0},
+		{"(Time).MarshalBinary", Method, 2},
+		{"(Time).MarshalJSON", Method, 0},
+		{"(Time).MarshalText", Method, 2},
+		{"(Time).Minute", Method, 0},
+		{"(Time).Month", Method, 0},
+		{"(Time).Nanosecond", Method, 0},
+		{"(Time).Round", Method, 1},
+		{"(Time).Second", Method, 0},
+		{"(Time).String", Method, 0},
+		{"(Time).Sub", Method, 0},
+		{"(Time).Truncate", Method, 1},
+		{"(Time).UTC", Method, 0},
+		{"(Time).Unix", Method, 0},
+		{"(Time).UnixMicro", Method, 17},
+		{"(Time).UnixMilli", Method, 17},
+		{"(Time).UnixNano", Method, 0},
+		{"(Time).Weekday", Method, 0},
+		{"(Time).Year", Method, 0},
+		{"(Time).YearDay", Method, 1},
+		{"(Time).Zone", Method, 0},
+		{"(Time).ZoneBounds", Method, 19},
+		{"(Weekday).String", Method, 0},
+		{"ANSIC", Const, 0},
+		{"After", Func, 0},
+		{"AfterFunc", Func, 0},
+		{"April", Const, 0},
+		{"August", Const, 0},
+		{"Date", Func, 0},
+		{"DateOnly", Const, 20},
+		{"DateTime", Const, 20},
+		{"December", Const, 0},
+		{"Duration", Type, 0},
+		{"February", Const, 0},
+		{"FixedZone", Func, 0},
+		{"Friday", Const, 0},
+		{"Hour", Const, 0},
+		{"January", Const, 0},
+		{"July", Const, 0},
+		{"June", Const, 0},
+		{"Kitchen", Const, 0},
+		{"Layout", Const, 17},
+		{"LoadLocation", Func, 0},
+		{"LoadLocationFromTZData", Func, 10},
+		{"Local", Var, 0},
+		{"Location", Type, 0},
+		{"March", Const, 0},
+		{"May", Const, 0},
+		{"Microsecond", Const, 0},
+		{"Millisecond", Const, 0},
+		{"Minute", Const, 0},
+		{"Monday", Const, 0},
+		{"Month", Type, 0},
+		{"Nanosecond", Const, 0},
+		{"NewTicker", Func, 0},
+		{"NewTimer", Func, 0},
+		{"November", Const, 0},
+		{"Now", Func, 0},
+		{"October", Const, 0},
+		{"Parse", Func, 0},
+		{"ParseDuration", Func, 0},
+		{"ParseError", Type, 0},
+		{"ParseError.Layout", Field, 0},
+		{"ParseError.LayoutElem", Field, 0},
+		{"ParseError.Message", Field, 0},
+		{"ParseError.Value", Field, 0},
+		{"ParseError.ValueElem", Field, 0},
+		{"ParseInLocation", Func, 1},
+		{"RFC1123", Const, 0},
+		{"RFC1123Z", Const, 0},
+		{"RFC3339", Const, 0},
+		{"RFC3339Nano", Const, 0},
+		{"RFC822", Const, 0},
+		{"RFC822Z", Const, 0},
+		{"RFC850", Const, 0},
+		{"RubyDate", Const, 0},
+		{"Saturday", Const, 0},
+		{"Second", Const, 0},
+		{"September", Const, 0},
+		{"Since", Func, 0},
+		{"Sleep", Func, 0},
+		{"Stamp", Const, 0},
+		{"StampMicro", Const, 0},
+		{"StampMilli", Const, 0},
+		{"StampNano", Const, 0},
+		{"Sunday", Const, 0},
+		{"Thursday", Const, 0},
+		{"Tick", Func, 0},
+		{"Ticker", Type, 0},
+		{"Ticker.C", Field, 0},
+		{"Time", Type, 0},
+		{"TimeOnly", Const, 20},
+		{"Timer", Type, 0},
+		{"Timer.C", Field, 0},
+		{"Tuesday", Const, 0},
+		{"UTC", Var, 0},
+		{"Unix", Func, 0},
+		{"UnixDate", Const, 0},
+		{"UnixMicro", Func, 17},
+		{"UnixMilli", Func, 17},
+		{"Until", Func, 8},
+		{"Wednesday", Const, 0},
+		{"Weekday", Type, 0},
+	},
+	"unicode": {
+		{"(SpecialCase).ToLower", Method, 0},
+		{"(SpecialCase).ToTitle", Method, 0},
+		{"(SpecialCase).ToUpper", Method, 0},
+		{"ASCII_Hex_Digit", Var, 0},
+		{"Adlam", Var, 7},
+		{"Ahom", Var, 5},
+		{"Anatolian_Hieroglyphs", Var, 5},
+		{"Arabic", Var, 0},
+		{"Armenian", Var, 0},
+		{"Avestan", Var, 0},
+		{"AzeriCase", Var, 0},
+		{"Balinese", Var, 0},
+		{"Bamum", Var, 0},
+		{"Bassa_Vah", Var, 4},
+		{"Batak", Var, 0},
+		{"Bengali", Var, 0},
+		{"Bhaiksuki", Var, 7},
+		{"Bidi_Control", Var, 0},
+		{"Bopomofo", Var, 0},
+		{"Brahmi", Var, 0},
+		{"Braille", Var, 0},
+		{"Buginese", Var, 0},
+		{"Buhid", Var, 0},
+		{"C", Var, 0},
+		{"Canadian_Aboriginal", Var, 0},
+		{"Carian", Var, 0},
+		{"CaseRange", Type, 0},
+		{"CaseRange.Delta", Field, 0},
+		{"CaseRange.Hi", Field, 0},
+		{"CaseRange.Lo", Field, 0},
+		{"CaseRanges", Var, 0},
+		{"Categories", Var, 0},
+		{"Caucasian_Albanian", Var, 4},
+		{"Cc", Var, 0},
+		{"Cf", Var, 0},
+		{"Chakma", Var, 1},
+		{"Cham", Var, 0},
+		{"Cherokee", Var, 0},
+		{"Chorasmian", Var, 16},
+		{"Co", Var, 0},
+		{"Common", Var, 0},
+		{"Coptic", Var, 0},
+		{"Cs", Var, 0},
+		{"Cuneiform", Var, 0},
+		{"Cypriot", Var, 0},
+		{"Cypro_Minoan", Var, 21},
+		{"Cyrillic", Var, 0},
+		{"Dash", Var, 0},
+		{"Deprecated", Var, 0},
+		{"Deseret", Var, 0},
+		{"Devanagari", Var, 0},
+		{"Diacritic", Var, 0},
+		{"Digit", Var, 0},
+		{"Dives_Akuru", Var, 16},
+		{"Dogra", Var, 13},
+		{"Duployan", Var, 4},
+		{"Egyptian_Hieroglyphs", Var, 0},
+		{"Elbasan", Var, 4},
+		{"Elymaic", Var, 14},
+		{"Ethiopic", Var, 0},
+		{"Extender", Var, 0},
+		{"FoldCategory", Var, 0},
+		{"FoldScript", Var, 0},
+		{"Georgian", Var, 0},
+		{"Glagolitic", Var, 0},
+		{"Gothic", Var, 0},
+		{"Grantha", Var, 4},
+		{"GraphicRanges", Var, 0},
+		{"Greek", Var, 0},
+		{"Gujarati", Var, 0},
+		{"Gunjala_Gondi", Var, 13},
+		{"Gurmukhi", Var, 0},
+		{"Han", Var, 0},
+		{"Hangul", Var, 0},
+		{"Hanifi_Rohingya", Var, 13},
+		{"Hanunoo", Var, 0},
+		{"Hatran", Var, 5},
+		{"Hebrew", Var, 0},
+		{"Hex_Digit", Var, 0},
+		{"Hiragana", Var, 0},
+		{"Hyphen", Var, 0},
+		{"IDS_Binary_Operator", Var, 0},
+		{"IDS_Trinary_Operator", Var, 0},
+		{"Ideographic", Var, 0},
+		{"Imperial_Aramaic", Var, 0},
+		{"In", Func, 2},
+		{"Inherited", Var, 0},
+		{"Inscriptional_Pahlavi", Var, 0},
+		{"Inscriptional_Parthian", Var, 0},
+		{"Is", Func, 0},
+		{"IsControl", Func, 0},
+		{"IsDigit", Func, 0},
+		{"IsGraphic", Func, 0},
+		{"IsLetter", Func, 0},
+		{"IsLower", Func, 0},
+		{"IsMark", Func, 0},
+		{"IsNumber", Func, 0},
+		{"IsOneOf", Func, 0},
+		{"IsPrint", Func, 0},
+		{"IsPunct", Func, 0},
+		{"IsSpace", Func, 0},
+		{"IsSymbol", Func, 0},
+		{"IsTitle", Func, 0},
+		{"IsUpper", Func, 0},
+		{"Javanese", Var, 0},
+		{"Join_Control", Var, 0},
+		{"Kaithi", Var, 0},
+		{"Kannada", Var, 0},
+		{"Katakana", Var, 0},
+		{"Kawi", Var, 21},
+		{"Kayah_Li", Var, 0},
+		{"Kharoshthi", Var, 0},
+		{"Khitan_Small_Script", Var, 16},
+		{"Khmer", Var, 0},
+		{"Khojki", Var, 4},
+		{"Khudawadi", Var, 4},
+		{"L", Var, 0},
+		{"Lao", Var, 0},
+		{"Latin", Var, 0},
+		{"Lepcha", Var, 0},
+		{"Letter", Var, 0},
+		{"Limbu", Var, 0},
+		{"Linear_A", Var, 4},
+		{"Linear_B", Var, 0},
+		{"Lisu", Var, 0},
+		{"Ll", Var, 0},
+		{"Lm", Var, 0},
+		{"Lo", Var, 0},
+		{"Logical_Order_Exception", Var, 0},
+		{"Lower", Var, 0},
+		{"LowerCase", Const, 0},
+		{"Lt", Var, 0},
+		{"Lu", Var, 0},
+		{"Lycian", Var, 0},
+		{"Lydian", Var, 0},
+		{"M", Var, 0},
+		{"Mahajani", Var, 4},
+		{"Makasar", Var, 13},
+		{"Malayalam", Var, 0},
+		{"Mandaic", Var, 0},
+		{"Manichaean", Var, 4},
+		{"Marchen", Var, 7},
+		{"Mark", Var, 0},
+		{"Masaram_Gondi", Var, 10},
+		{"MaxASCII", Const, 0},
+		{"MaxCase", Const, 0},
+		{"MaxLatin1", Const, 0},
+		{"MaxRune", Const, 0},
+		{"Mc", Var, 0},
+		{"Me", Var, 0},
+		{"Medefaidrin", Var, 13},
+		{"Meetei_Mayek", Var, 0},
+		{"Mende_Kikakui", Var, 4},
+		{"Meroitic_Cursive", Var, 1},
+		{"Meroitic_Hieroglyphs", Var, 1},
+		{"Miao", Var, 1},
+		{"Mn", Var, 0},
+		{"Modi", Var, 4},
+		{"Mongolian", Var, 0},
+		{"Mro", Var, 4},
+		{"Multani", Var, 5},
+		{"Myanmar", Var, 0},
+		{"N", Var, 0},
+		{"Nabataean", Var, 4},
+		{"Nag_Mundari", Var, 21},
+		{"Nandinagari", Var, 14},
+		{"Nd", Var, 0},
+		{"New_Tai_Lue", Var, 0},
+		{"Newa", Var, 7},
+		{"Nko", Var, 0},
+		{"Nl", Var, 0},
+		{"No", Var, 0},
+		{"Noncharacter_Code_Point", Var, 0},
+		{"Number", Var, 0},
+		{"Nushu", Var, 10},
+		{"Nyiakeng_Puachue_Hmong", Var, 14},
+		{"Ogham", Var, 0},
+		{"Ol_Chiki", Var, 0},
+		{"Old_Hungarian", Var, 5},
+		{"Old_Italic", Var, 0},
+		{"Old_North_Arabian", Var, 4},
+		{"Old_Permic", Var, 4},
+		{"Old_Persian", Var, 0},
+		{"Old_Sogdian", Var, 13},
+		{"Old_South_Arabian", Var, 0},
+		{"Old_Turkic", Var, 0},
+		{"Old_Uyghur", Var, 21},
+		{"Oriya", Var, 0},
+		{"Osage", Var, 7},
+		{"Osmanya", Var, 0},
+		{"Other", Var, 0},
+		{"Other_Alphabetic", Var, 0},
+		{"Other_Default_Ignorable_Code_Point", Var, 0},
+		{"Other_Grapheme_Extend", Var, 0},
+		{"Other_ID_Continue", Var, 0},
+		{"Other_ID_Start", Var, 0},
+		{"Other_Lowercase", Var, 0},
+		{"Other_Math", Var, 0},
+		{"Other_Uppercase", Var, 0},
+		{"P", Var, 0},
+		{"Pahawh_Hmong", Var, 4},
+		{"Palmyrene", Var, 4},
+		{"Pattern_Syntax", Var, 0},
+		{"Pattern_White_Space", Var, 0},
+		{"Pau_Cin_Hau", Var, 4},
+		{"Pc", Var, 0},
+		{"Pd", Var, 0},
+		{"Pe", Var, 0},
+		{"Pf", Var, 0},
+		{"Phags_Pa", Var, 0},
+		{"Phoenician", Var, 0},
+		{"Pi", Var, 0},
+		{"Po", Var, 0},
+		{"Prepended_Concatenation_Mark", Var, 7},
+		{"PrintRanges", Var, 0},
+		{"Properties", Var, 0},
+		{"Ps", Var, 0},
+		{"Psalter_Pahlavi", Var, 4},
+		{"Punct", Var, 0},
+		{"Quotation_Mark", Var, 0},
+		{"Radical", Var, 0},
+		{"Range16", Type, 0},
+		{"Range16.Hi", Field, 0},
+		{"Range16.Lo", Field, 0},
+		{"Range16.Stride", Field, 0},
+		{"Range32", Type, 0},
+		{"Range32.Hi", Field, 0},
+		{"Range32.Lo", Field, 0},
+		{"Range32.Stride", Field, 0},
+		{"RangeTable", Type, 0},
+		{"RangeTable.LatinOffset", Field, 1},
+		{"RangeTable.R16", Field, 0},
+		{"RangeTable.R32", Field, 0},
+		{"Regional_Indicator", Var, 10},
+		{"Rejang", Var, 0},
+		{"ReplacementChar", Const, 0},
+		{"Runic", Var, 0},
+		{"S", Var, 0},
+		{"STerm", Var, 0},
+		{"Samaritan", Var, 0},
+		{"Saurashtra", Var, 0},
+		{"Sc", Var, 0},
+		{"Scripts", Var, 0},
+		{"Sentence_Terminal", Var, 7},
+		{"Sharada", Var, 1},
+		{"Shavian", Var, 0},
+		{"Siddham", Var, 4},
+		{"SignWriting", Var, 5},
+		{"SimpleFold", Func, 0},
+		{"Sinhala", Var, 0},
+		{"Sk", Var, 0},
+		{"Sm", Var, 0},
+		{"So", Var, 0},
+		{"Soft_Dotted", Var, 0},
+		{"Sogdian", Var, 13},
+		{"Sora_Sompeng", Var, 1},
+		{"Soyombo", Var, 10},
+		{"Space", Var, 0},
+		{"SpecialCase", Type, 0},
+		{"Sundanese", Var, 0},
+		{"Syloti_Nagri", Var, 0},
+		{"Symbol", Var, 0},
+		{"Syriac", Var, 0},
+		{"Tagalog", Var, 0},
+		{"Tagbanwa", Var, 0},
+		{"Tai_Le", Var, 0},
+		{"Tai_Tham", Var, 0},
+		{"Tai_Viet", Var, 0},
+		{"Takri", Var, 1},
+		{"Tamil", Var, 0},
+		{"Tangsa", Var, 21},
+		{"Tangut", Var, 7},
+		{"Telugu", Var, 0},
+		{"Terminal_Punctuation", Var, 0},
+		{"Thaana", Var, 0},
+		{"Thai", Var, 0},
+		{"Tibetan", Var, 0},
+		{"Tifinagh", Var, 0},
+		{"Tirhuta", Var, 4},
+		{"Title", Var, 0},
+		{"TitleCase", Const, 0},
+		{"To", Func, 0},
+		{"ToLower", Func, 0},
+		{"ToTitle", Func, 0},
+		{"ToUpper", Func, 0},
+		{"Toto", Var, 21},
+		{"TurkishCase", Var, 0},
+		{"Ugaritic", Var, 0},
+		{"Unified_Ideograph", Var, 0},
+		{"Upper", Var, 0},
+		{"UpperCase", Const, 0},
+		{"UpperLower", Const, 0},
+		{"Vai", Var, 0},
+		{"Variation_Selector", Var, 0},
+		{"Version", Const, 0},
+		{"Vithkuqi", Var, 21},
+		{"Wancho", Var, 14},
+		{"Warang_Citi", Var, 4},
+		{"White_Space", Var, 0},
+		{"Yezidi", Var, 16},
+		{"Yi", Var, 0},
+		{"Z", Var, 0},
+		{"Zanabazar_Square", Var, 10},
+		{"Zl", Var, 0},
+		{"Zp", Var, 0},
+		{"Zs", Var, 0},
+	},
+	"unicode/utf16": {
+		{"AppendRune", Func, 20},
+		{"Decode", Func, 0},
+		{"DecodeRune", Func, 0},
+		{"Encode", Func, 0},
+		{"EncodeRune", Func, 0},
+		{"IsSurrogate", Func, 0},
+	},
+	"unicode/utf8": {
+		{"AppendRune", Func, 18},
+		{"DecodeLastRune", Func, 0},
+		{"DecodeLastRuneInString", Func, 0},
+		{"DecodeRune", Func, 0},
+		{"DecodeRuneInString", Func, 0},
+		{"EncodeRune", Func, 0},
+		{"FullRune", Func, 0},
+		{"FullRuneInString", Func, 0},
+		{"MaxRune", Const, 0},
+		{"RuneCount", Func, 0},
+		{"RuneCountInString", Func, 0},
+		{"RuneError", Const, 0},
+		{"RuneLen", Func, 0},
+		{"RuneSelf", Const, 0},
+		{"RuneStart", Func, 0},
+		{"UTFMax", Const, 0},
+		{"Valid", Func, 0},
+		{"ValidRune", Func, 1},
+		{"ValidString", Func, 0},
+	},
+	"unsafe": {
+		{"Add", Func, 0},
+		{"Alignof", Func, 0},
+		{"Offsetof", Func, 0},
+		{"Pointer", Type, 0},
+		{"Sizeof", Func, 0},
+		{"Slice", Func, 0},
+		{"SliceData", Func, 0},
+		{"String", Func, 0},
+		{"StringData", Func, 0},
+	},
+}
diff --git a/vendor/golang.org/x/tools/internal/stdlib/stdlib.go b/vendor/golang.org/x/tools/internal/stdlib/stdlib.go
new file mode 100644
index 0000000000000000000000000000000000000000..98904017f2ca7aedfdab8cfe214dbde6c5778ff3
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/stdlib/stdlib.go
@@ -0,0 +1,97 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:generate go run generate.go
+
+// Package stdlib provides a table of all exported symbols in the
+// standard library, along with the version at which they first
+// appeared.
+package stdlib
+
+import (
+	"fmt"
+	"strings"
+)
+
+type Symbol struct {
+	Name    string
+	Kind    Kind
+	Version Version // Go version that first included the symbol
+}
+
+// A Kind indicates the kind of a symbol:
+// function, variable, constant, type, and so on.
+type Kind int8
+
+const (
+	Invalid Kind = iota // Example name:
+	Type                // "Buffer"
+	Func                // "Println"
+	Var                 // "EOF"
+	Const               // "Pi"
+	Field               // "Point.X"
+	Method              // "(*Buffer).Grow"
+)
+
+func (kind Kind) String() string {
+	return [...]string{
+		Invalid: "invalid",
+		Type:    "type",
+		Func:    "func",
+		Var:     "var",
+		Const:   "const",
+		Field:   "field",
+		Method:  "method",
+	}[kind]
+}
+
+// A Version represents a version of Go of the form "go1.%d".
+type Version int8
+
+// String returns a version string of the form "go1.23", without allocating.
+func (v Version) String() string { return versions[v] }
+
+var versions [30]string // (increase constant as needed)
+
+func init() {
+	for i := range versions {
+		versions[i] = fmt.Sprintf("go1.%d", i)
+	}
+}
+
+// HasPackage reports whether the specified package path is part of
+// the standard library's public API.
+func HasPackage(path string) bool {
+	_, ok := PackageSymbols[path]
+	return ok
+}
+
+// SplitField splits the field symbol name into type and field
+// components. It must be called only on Field symbols.
+//
+// Example: "File.Package" -> ("File", "Package")
+func (sym *Symbol) SplitField() (typename, name string) {
+	if sym.Kind != Field {
+		panic("not a field")
+	}
+	typename, name, _ = strings.Cut(sym.Name, ".")
+	return
+}
+
+// SplitMethod splits the method symbol name into pointer, receiver,
+// and method components. It must be called only on Method symbols.
+//
+// Example: "(*Buffer).Grow" -> (true, "Buffer", "Grow")
+func (sym *Symbol) SplitMethod() (ptr bool, recv, name string) {
+	if sym.Kind != Method {
+		panic("not a method")
+	}
+	recv, name, _ = strings.Cut(sym.Name, ".")
+	recv = recv[len("(") : len(recv)-len(")")]
+	ptr = recv[0] == '*'
+	if ptr {
+		recv = recv[len("*"):]
+	}
+	return
+}
diff --git a/vendor/golang.org/x/tools/internal/typeparams/common.go b/vendor/golang.org/x/tools/internal/typeparams/common.go
index 8c3a42dc3118ab6dba625626d6c4a40749ebae12..89bd256dc67b3351059347eff11822fdf8c63d9c 100644
--- a/vendor/golang.org/x/tools/internal/typeparams/common.go
+++ b/vendor/golang.org/x/tools/internal/typeparams/common.go
@@ -13,13 +13,11 @@
 package typeparams
 
 import (
-	"fmt"
 	"go/ast"
 	"go/token"
 	"go/types"
 
 	"golang.org/x/tools/internal/aliases"
-	"golang.org/x/tools/internal/typesinternal"
 )
 
 // UnpackIndexExpr extracts data from AST nodes that represent index
@@ -71,62 +69,11 @@ func IsTypeParam(t types.Type) bool {
 	return ok
 }
 
-// OriginMethod returns the origin method associated with the method fn.
-// For methods on a non-generic receiver base type, this is just
-// fn. However, for methods with a generic receiver, OriginMethod returns the
-// corresponding method in the method set of the origin type.
-//
-// As a special case, if fn is not a method (has no receiver), OriginMethod
-// returns fn.
-func OriginMethod(fn *types.Func) *types.Func {
-	recv := fn.Type().(*types.Signature).Recv()
-	if recv == nil {
-		return fn
-	}
-	_, named := typesinternal.ReceiverNamed(recv)
-	if named == nil {
-		// Receiver is a *types.Interface.
-		return fn
-	}
-	if named.TypeParams().Len() == 0 {
-		// Receiver base has no type parameters, so we can avoid the lookup below.
-		return fn
-	}
-	orig := named.Origin()
-	gfn, _, _ := types.LookupFieldOrMethod(orig, true, fn.Pkg(), fn.Name())
-
-	// This is a fix for a gopls crash (#60628) due to a go/types bug (#60634). In:
-	// 	package p
-	//      type T *int
-	//      func (*T) f() {}
-	// LookupFieldOrMethod(T, true, p, f)=nil, but NewMethodSet(*T)={(*T).f}.
-	// Here we make them consistent by force.
-	// (The go/types bug is general, but this workaround is reached only
-	// for generic T thanks to the early return above.)
-	if gfn == nil {
-		mset := types.NewMethodSet(types.NewPointer(orig))
-		for i := 0; i < mset.Len(); i++ {
-			m := mset.At(i)
-			if m.Obj().Id() == fn.Id() {
-				gfn = m.Obj()
-				break
-			}
-		}
-	}
-
-	// In golang/go#61196, we observe another crash, this time inexplicable.
-	if gfn == nil {
-		panic(fmt.Sprintf("missing origin method for %s.%s; named == origin: %t, named.NumMethods(): %d, origin.NumMethods(): %d", named, fn, named == orig, named.NumMethods(), orig.NumMethods()))
-	}
-
-	return gfn.(*types.Func)
-}
-
 // GenericAssignableTo is a generalization of types.AssignableTo that
 // implements the following rule for uninstantiated generic types:
 //
 // If V and T are generic named types, then V is considered assignable to T if,
-// for every possible instantation of V[A_1, ..., A_N], the instantiation
+// for every possible instantiation of V[A_1, ..., A_N], the instantiation
 // T[A_1, ..., A_N] is valid and V[A_1, ..., A_N] implements T[A_1, ..., A_N].
 //
 // If T has structural constraints, they must be satisfied by V.
diff --git a/vendor/golang.org/x/tools/internal/typeparams/coretype.go b/vendor/golang.org/x/tools/internal/typeparams/coretype.go
index e66e9d0f48c9bb394d74ae96ef63eaa6cce1ba54..24933e43dac8c2627c42f15e0eeaf929c6502220 100644
--- a/vendor/golang.org/x/tools/internal/typeparams/coretype.go
+++ b/vendor/golang.org/x/tools/internal/typeparams/coretype.go
@@ -124,6 +124,21 @@ func _NormalTerms(typ types.Type) ([]*types.Term, error) {
 	}
 }
 
+// Deref returns the type of the variable pointed to by t,
+// if t's core type is a pointer; otherwise it returns t.
+//
+// Do not assume that Deref(T)==T implies T is not a pointer:
+// consider "type T *T", for example.
+//
+// TODO(adonovan): ideally this would live in typesinternal, but that
+// creates an import cycle. Move there when we melt this package down.
+func Deref(t types.Type) types.Type {
+	if ptr, ok := CoreType(t).(*types.Pointer); ok {
+		return ptr.Elem()
+	}
+	return t
+}
+
 // MustDeref returns the type of the variable pointed to by t.
 // It panics if t's core type is not a pointer.
 //
diff --git a/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/parameterized.go b/vendor/golang.org/x/tools/internal/typeparams/free.go
similarity index 54%
rename from vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/parameterized.go
rename to vendor/golang.org/x/tools/internal/typeparams/free.go
index a077d440246e86d0dc8077e727ec7e195c2d9567..de3496d10b35dad19cac691befe7be59ecf1f4c7 100644
--- a/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/parameterized.go
+++ b/vendor/golang.org/x/tools/internal/typeparams/free.go
@@ -1,35 +1,34 @@
-// Copyright 2022 The Go Authors. All rights reserved.
+// Copyright 2024 The Go Authors. All rights reserved.
 // Use of this source code is governed by a BSD-style
 // license that can be found in the LICENSE file.
 
-package ifaceassert
+package typeparams
 
 import (
 	"go/types"
 
 	"golang.org/x/tools/internal/aliases"
-	"golang.org/x/tools/internal/typeparams"
 )
 
-// isParameterized reports whether typ contains any of the type parameters of tparams.
+// Free is a memoization of the set of free type parameters within a
+// type. It makes a sequence of calls to [Free.Has] for overlapping
+// types more efficient. The zero value is ready for use.
 //
-// NOTE: Adapted from go/types/infer.go. If that is exported in a future release remove this copy.
-func isParameterized(typ types.Type) bool {
-	w := tpWalker{
-		seen: make(map[types.Type]bool),
-	}
-	return w.isParameterized(typ)
-}
-
-type tpWalker struct {
+// NOTE: Adapted from go/types/infer.go. If it is later exported, factor.
+type Free struct {
 	seen map[types.Type]bool
 }
 
-func (w *tpWalker) isParameterized(typ types.Type) (res bool) {
+// Has reports whether the specified type has a free type parameter.
+func (w *Free) Has(typ types.Type) (res bool) {
+
 	// detect cycles
 	if x, ok := w.seen[typ]; ok {
 		return x
 	}
+	if w.seen == nil {
+		w.seen = make(map[types.Type]bool)
+	}
 	w.seen[typ] = false
 	defer func() {
 		w.seen[typ] = res
@@ -39,26 +38,29 @@ func (w *tpWalker) isParameterized(typ types.Type) (res bool) {
 	case nil, *types.Basic: // TODO(gri) should nil be handled here?
 		break
 
+	case *aliases.Alias:
+		return w.Has(aliases.Unalias(t))
+
 	case *types.Array:
-		return w.isParameterized(t.Elem())
+		return w.Has(t.Elem())
 
 	case *types.Slice:
-		return w.isParameterized(t.Elem())
+		return w.Has(t.Elem())
 
 	case *types.Struct:
 		for i, n := 0, t.NumFields(); i < n; i++ {
-			if w.isParameterized(t.Field(i).Type()) {
+			if w.Has(t.Field(i).Type()) {
 				return true
 			}
 		}
 
 	case *types.Pointer:
-		return w.isParameterized(t.Elem())
+		return w.Has(t.Elem())
 
 	case *types.Tuple:
 		n := t.Len()
 		for i := 0; i < n; i++ {
-			if w.isParameterized(t.At(i).Type()) {
+			if w.Has(t.At(i).Type()) {
 				return true
 			}
 		}
@@ -71,41 +73,42 @@ func (w *tpWalker) isParameterized(typ types.Type) (res bool) {
 		// Similarly, the receiver of a method may declare (rather than
 		// use) type parameters, we don't care about those either.
 		// Thus, we only need to look at the input and result parameters.
-		return w.isParameterized(t.Params()) || w.isParameterized(t.Results())
+		return w.Has(t.Params()) || w.Has(t.Results())
 
 	case *types.Interface:
 		for i, n := 0, t.NumMethods(); i < n; i++ {
-			if w.isParameterized(t.Method(i).Type()) {
+			if w.Has(t.Method(i).Type()) {
 				return true
 			}
 		}
-		terms, err := typeparams.InterfaceTermSet(t)
+		terms, err := InterfaceTermSet(t)
 		if err != nil {
 			panic(err)
 		}
 		for _, term := range terms {
-			if w.isParameterized(term.Type()) {
+			if w.Has(term.Type()) {
 				return true
 			}
 		}
 
 	case *types.Map:
-		return w.isParameterized(t.Key()) || w.isParameterized(t.Elem())
+		return w.Has(t.Key()) || w.Has(t.Elem())
 
 	case *types.Chan:
-		return w.isParameterized(t.Elem())
-
-	case *aliases.Alias:
-		// TODO(adonovan): think about generic aliases.
-		return w.isParameterized(aliases.Unalias(t))
+		return w.Has(t.Elem())
 
 	case *types.Named:
-		list := t.TypeArgs()
-		for i, n := 0, list.Len(); i < n; i++ {
-			if w.isParameterized(list.At(i)) {
+		args := t.TypeArgs()
+		// TODO(taking): this does not match go/types/infer.go. Check with rfindley.
+		if params := t.TypeParams(); params.Len() > args.Len() {
+			return true
+		}
+		for i, n := 0, args.Len(); i < n; i++ {
+			if w.Has(args.At(i)) {
 				return true
 			}
 		}
+		return w.Has(t.Underlying()) // recurse for types local to parameterized functions
 
 	case *types.TypeParam:
 		return true
diff --git a/vendor/golang.org/x/tools/internal/typesinternal/errorcode.go b/vendor/golang.org/x/tools/internal/typesinternal/errorcode.go
index 07484073a57d16215f1832820eefd843752b1061..834e05381ceaaf48db27dee9e2211a40ab81a7d2 100644
--- a/vendor/golang.org/x/tools/internal/typesinternal/errorcode.go
+++ b/vendor/golang.org/x/tools/internal/typesinternal/errorcode.go
@@ -167,7 +167,7 @@ const (
 	UntypedNilUse
 
 	// WrongAssignCount occurs when the number of values on the right-hand side
-	// of an assignment or or initialization expression does not match the number
+	// of an assignment or initialization expression does not match the number
 	// of variables on the left-hand side.
 	//
 	// Example:
@@ -1449,10 +1449,10 @@ const (
 	NotAGenericType
 
 	// WrongTypeArgCount occurs when a type or function is instantiated with an
-	// incorrent number of type arguments, including when a generic type or
+	// incorrect number of type arguments, including when a generic type or
 	// function is used without instantiation.
 	//
-	// Errors inolving failed type inference are assigned other error codes.
+	// Errors involving failed type inference are assigned other error codes.
 	//
 	// Example:
 	//  type T[p any] int
diff --git a/vendor/golang.org/x/tools/internal/typesinternal/toonew.go b/vendor/golang.org/x/tools/internal/typesinternal/toonew.go
new file mode 100644
index 0000000000000000000000000000000000000000..cc86487eaa0a0750f838db039c0a4ae2fc997e34
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/typesinternal/toonew.go
@@ -0,0 +1,89 @@
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typesinternal
+
+import (
+	"go/types"
+
+	"golang.org/x/tools/internal/stdlib"
+	"golang.org/x/tools/internal/versions"
+)
+
+// TooNewStdSymbols computes the set of package-level symbols
+// exported by pkg that are not available at the specified version.
+// The result maps each symbol to its minimum version.
+//
+// The pkg is allowed to contain type errors.
+func TooNewStdSymbols(pkg *types.Package, version string) map[types.Object]string {
+	disallowed := make(map[types.Object]string)
+
+	// Pass 1: package-level symbols.
+	symbols := stdlib.PackageSymbols[pkg.Path()]
+	for _, sym := range symbols {
+		symver := sym.Version.String()
+		if versions.Before(version, symver) {
+			switch sym.Kind {
+			case stdlib.Func, stdlib.Var, stdlib.Const, stdlib.Type:
+				disallowed[pkg.Scope().Lookup(sym.Name)] = symver
+			}
+		}
+	}
+
+	// Pass 2: fields and methods.
+	//
+	// We allow fields and methods if their associated type is
+	// disallowed, as otherwise we would report false positives
+	// for compatibility shims. Consider:
+	//
+	//   //go:build go1.22
+	//   type T struct { F std.Real } // correct new API
+	//
+	//   //go:build !go1.22
+	//   type T struct { F fake } // shim
+	//   type fake struct { ... }
+	//   func (fake) M () {}
+	//
+	// These alternative declarations of T use either the std.Real
+	// type, introduced in go1.22, or a fake type, for the field
+	// F. (The fakery could be arbitrarily deep, involving more
+	// nested fields and methods than are shown here.) Clients
+	// that use the compatibility shim T will compile with any
+	// version of go, whether older or newer than go1.22, but only
+	// the newer version will use the std.Real implementation.
+	//
+	// Now consider a reference to method M in new(T).F.M() in a
+	// module that requires a minimum of go1.21. The analysis may
+	// occur using a version of Go higher than 1.21, selecting the
+	// first version of T, so the method M is Real.M. This would
+	// spuriously cause the analyzer to report a reference to a
+	// too-new symbol even though this expression compiles just
+	// fine (with the fake implementation) using go1.21.
+	for _, sym := range symbols {
+		symVersion := sym.Version.String()
+		if !versions.Before(version, symVersion) {
+			continue // allowed
+		}
+
+		var obj types.Object
+		switch sym.Kind {
+		case stdlib.Field:
+			typename, name := sym.SplitField()
+			if t := pkg.Scope().Lookup(typename); t != nil && disallowed[t] == "" {
+				obj, _, _ = types.LookupFieldOrMethod(t.Type(), false, pkg, name)
+			}
+
+		case stdlib.Method:
+			ptr, recvname, name := sym.SplitMethod()
+			if t := pkg.Scope().Lookup(recvname); t != nil && disallowed[t] == "" {
+				obj, _, _ = types.LookupFieldOrMethod(t.Type(), ptr, pkg, name)
+			}
+		}
+		if obj != nil {
+			disallowed[obj] = symVersion
+		}
+	}
+
+	return disallowed
+}
diff --git a/vendor/golang.org/x/tools/internal/typesinternal/types.go b/vendor/golang.org/x/tools/internal/typesinternal/types.go
index ce7d4351b2203af122e6fc05506203736807736a..7c77c2fbc038f8a7112df69c3c4092ca3913a368 100644
--- a/vendor/golang.org/x/tools/internal/typesinternal/types.go
+++ b/vendor/golang.org/x/tools/internal/typesinternal/types.go
@@ -48,5 +48,3 @@ func ReadGo116ErrorData(err types.Error) (code ErrorCode, start, end token.Pos,
 	}
 	return ErrorCode(data[0]), token.Pos(data[1]), token.Pos(data[2]), true
 }
-
-var SetGoVersion = func(conf *types.Config, version string) bool { return false }
diff --git a/vendor/golang.org/x/tools/internal/typesinternal/types_118.go b/vendor/golang.org/x/tools/internal/typesinternal/types_118.go
deleted file mode 100644
index ef7ea290c0bdedf405fa8da85b59e02bc295dbd6..0000000000000000000000000000000000000000
--- a/vendor/golang.org/x/tools/internal/typesinternal/types_118.go
+++ /dev/null
@@ -1,16 +0,0 @@
-// Copyright 2021 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package typesinternal
-
-import (
-	"go/types"
-)
-
-func init() {
-	SetGoVersion = func(conf *types.Config, version string) bool {
-		conf.GoVersion = version
-		return true
-	}
-}
diff --git a/vendor/honnef.co/go/tools/go/ir/UPSTREAM b/vendor/honnef.co/go/tools/go/ir/UPSTREAM
index 757ebfd599df9e1168f285c6ee16ae027c3f14a7..e92b016b39a8d6aa3b9263ac4685f75415bf1091 100644
--- a/vendor/honnef.co/go/tools/go/ir/UPSTREAM
+++ b/vendor/honnef.co/go/tools/go/ir/UPSTREAM
@@ -5,5 +5,5 @@ The changes are too many to list here, and it is best to consider this package i
 Upstream changes still get applied when they address bugs in portions of code we have inherited.
 
 The last upstream commit we've looked at was:
-915f6209478fe61eb90dbe155a8a1c58655b931f
+e854e0228e2ef1cc6e42bbfde1951925096a1272
 
diff --git a/vendor/honnef.co/go/tools/go/ir/builder.go b/vendor/honnef.co/go/tools/go/ir/builder.go
index 1a77ed0429c0c57770cceb0534522eb16ffe88b3..82ca94ba14282e06fe83695c11ef42158d022da6 100644
--- a/vendor/honnef.co/go/tools/go/ir/builder.go
+++ b/vendor/honnef.co/go/tools/go/ir/builder.go
@@ -353,11 +353,16 @@ func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) (RET lvalue) {
 		}
 		wantAddr := true
 		v := b.receiver(fn, e.X, wantAddr, escaping, sel, e)
-		last := len(sel.Index()) - 1
-		return &address{
-			addr: emitFieldSelection(fn, v, sel.Index()[last], true, e.Sel),
-			expr: e.Sel,
+		index := sel.Index()[len(sel.Index())-1]
+		vut := typeutil.CoreType(deref(v.Type())).Underlying().(*types.Struct)
+		fld := vut.Field(index)
+		// Due to the two phases of resolving AssignStmt, a panic from x.f = p()
+		// when x is nil is required to come after the side-effects of
+		// evaluating x and p().
+		emit := func(fn *Function) Value {
+			return emitFieldSelection(fn, v, index, true, e.Sel)
 		}
+		return &lazyAddress{addr: emit, t: fld.Type(), expr: e.Sel}
 
 	case *ast.IndexExpr:
 		var x Value
@@ -411,12 +416,19 @@ func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) (RET lvalue) {
 			panic("unexpected container type in IndexExpr: " + t.String())
 		}
 
-		v := &IndexAddr{
-			X:     x,
-			Index: b.expr(fn, e.Index),
+		// Due to the two phases of resolving AssignStmt, a panic from x[i] = p()
+		// when x is nil or i is out-of-bounds is required to come after the
+		// side-effects of evaluating x, i and p().
+		index := b.expr(fn, e.Index)
+		emit := func(fn *Function) Value {
+			v := &IndexAddr{
+				X:     x,
+				Index: index,
+			}
+			v.setType(et)
+			return fn.emit(v, e)
 		}
-		v.setType(et)
-		return &address{addr: fn.emit(v, e), expr: e}
+		return &lazyAddress{addr: emit, t: deref(et), expr: e}
 
 	case *ast.StarExpr:
 		return &address{addr: b.expr(fn, e.X), expr: e}
@@ -680,12 +692,12 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value {
 		}
 
 		var low, high, max Value
-		if e.High != nil {
-			high = b.expr(fn, e.High)
-		}
 		if e.Low != nil {
 			low = b.expr(fn, e.Low)
 		}
+		if e.High != nil {
+			high = b.expr(fn, e.High)
+		}
 		if e.Slice3 {
 			max = b.expr(fn, e.Max)
 		}
@@ -1027,8 +1039,7 @@ func (b *builder) setCall(fn *Function, e *ast.CallExpr, c *CallCommon) {
 
 // assignOp emits to fn code to perform loc <op>= val.
 func (b *builder) assignOp(fn *Function, loc lvalue, val Value, op token.Token, source ast.Node) {
-	oldv := loc.load(fn, source)
-	loc.store(fn, emitArith(fn, op, oldv, emitConv(fn, val, oldv.Type(), source), loc.typ(), source), source)
+	loc.store(fn, emitArith(fn, op, loc.load(fn, source), val, loc.typ(), source), source)
 }
 
 // localValueSpec emits to fn code to define all of the vars in the
diff --git a/vendor/honnef.co/go/tools/go/ir/lift.go b/vendor/honnef.co/go/tools/go/ir/lift.go
index 8ac8330dc2a62edd84f4da83a2025860b6ab9618..1a4cd3026d7a758b30ac058ab924bfc09c2e582b 100644
--- a/vendor/honnef.co/go/tools/go/ir/lift.go
+++ b/vendor/honnef.co/go/tools/go/ir/lift.go
@@ -970,7 +970,24 @@ func liftable(alloc *Alloc, instructions BlockMap[liftInstructions]) bool {
 	for i := range blocks {
 		// Update firstUnliftable to be one after lastLiftable. We do this to include the unliftable's preceding
 		// DebugRefs in the renaming.
-		blocks[i].firstUnliftable = blocks[i].lastLiftable + 1
+		if blocks[i].lastLiftable == -1 && !blocks[i].storeInPreds {
+			// There are no liftable instructions (for this alloc) in this block. Set firstUnliftable to the
+			// first non-head instruction to avoid inserting the store before phi instructions, which would
+			// fail validation.
+			first := -1
+		instrLoop:
+			for i, instr := range fn.Blocks[i].Instrs {
+				switch instr.(type) {
+				case *Phi, *Sigma:
+				default:
+					first = i
+					break instrLoop
+				}
+			}
+			blocks[i].firstUnliftable = first
+		} else {
+			blocks[i].firstUnliftable = blocks[i].lastLiftable + 1
+		}
 	}
 
 	// If a block is reachable by a (partially) unliftable block, then the entirety of the block is unliftable. In that
diff --git a/vendor/honnef.co/go/tools/go/ir/lvalue.go b/vendor/honnef.co/go/tools/go/ir/lvalue.go
index 119eed6c3b23d5b7140422f2b506a6df92d4c945..86eb4a5d126a9ffe7adc1750ab6dae86925325b2 100644
--- a/vendor/honnef.co/go/tools/go/ir/lvalue.go
+++ b/vendor/honnef.co/go/tools/go/ir/lvalue.go
@@ -114,6 +114,40 @@ func (e *element) typ() types.Type {
 	return e.t
 }
 
+// A lazyAddress is an lvalue whose address is the result of an instruction.
+// These work like an *address except a new address.address() Value
+// is created on each load, store and address call.
+// A lazyAddress can be used to control when a side effect (nil pointer
+// dereference, index out of bounds) of using a location happens.
+type lazyAddress struct {
+	addr func(fn *Function) Value // emit to fn the computation of the address
+	t    types.Type               // type of the location
+	expr ast.Expr                 // source syntax of the value (not address) [debug mode]
+}
+
+func (l *lazyAddress) load(fn *Function, source ast.Node) Value {
+	load := emitLoad(fn, l.addr(fn), source)
+	return load
+}
+
+func (l *lazyAddress) store(fn *Function, v Value, source ast.Node) {
+	store := emitStore(fn, l.addr(fn), v, source)
+	if l.expr != nil {
+		// store.Val is v, converted for assignability.
+		emitDebugRef(fn, l.expr, store.Val, false)
+	}
+}
+
+func (l *lazyAddress) address(fn *Function) Value {
+	addr := l.addr(fn)
+	if l.expr != nil {
+		emitDebugRef(fn, l.expr, addr, true)
+	}
+	return addr
+}
+
+func (l *lazyAddress) typ() types.Type { return l.t }
+
 // A blank is a dummy variable whose name is "_".
 // It is not reified: loads are illegal and stores are ignored.
 type blank struct{}
diff --git a/vendor/honnef.co/go/tools/go/types/typeutil/util.go b/vendor/honnef.co/go/tools/go/types/typeutil/util.go
index b0aca16bdb0ee3806e7f803fdc9e1c357720efac..3a2ad973bbcbf4239af654aa26ab1d14d63d1fec 100644
--- a/vendor/honnef.co/go/tools/go/types/typeutil/util.go
+++ b/vendor/honnef.co/go/tools/go/types/typeutil/util.go
@@ -122,6 +122,8 @@ func flattenFields(T *types.Struct, path []int, seen map[types.Type]bool) []Fiel
 		if field.Anonymous() {
 			if s, ok := Dereference(field.Type()).Underlying().(*types.Struct); ok {
 				out = append(out, flattenFields(s, np, seen)...)
+			} else {
+				out = append(out, Field{field, tag, np})
 			}
 		} else {
 			out = append(out, Field{field, tag, np})
diff --git a/vendor/modules.txt b/vendor/modules.txt
index 3cacf5217317a37779513d10ea35c63663a4ef17..616201649cfef616cf9e6e3a32cabf6e3d6c9d50 100644
--- a/vendor/modules.txt
+++ b/vendor/modules.txt
@@ -4,18 +4,25 @@
 # 4d63.com/gochecknoglobals v0.2.1
 ## explicit; go 1.15
 4d63.com/gochecknoglobals/checknoglobals
-# github.com/4meepo/tagalign v1.3.2
+# github.com/4meepo/tagalign v1.3.4
 ## explicit; go 1.19
 github.com/4meepo/tagalign
-# github.com/Abirdcfly/dupword v0.0.12
+# github.com/Abirdcfly/dupword v0.0.14
 ## explicit; go 1.20
 github.com/Abirdcfly/dupword
-# github.com/Antonboom/errname v0.1.12
+# github.com/Antonboom/errname v0.1.13
 ## explicit; go 1.20
 github.com/Antonboom/errname/pkg/analyzer
-# github.com/Antonboom/nilnil v0.1.7
+# github.com/Antonboom/nilnil v0.1.9
 ## explicit; go 1.20
 github.com/Antonboom/nilnil/pkg/analyzer
+# github.com/Antonboom/testifylint v1.2.0
+## explicit; go 1.20
+github.com/Antonboom/testifylint/analyzer
+github.com/Antonboom/testifylint/internal/analysisutil
+github.com/Antonboom/testifylint/internal/checkers
+github.com/Antonboom/testifylint/internal/config
+github.com/Antonboom/testifylint/internal/testify
 # github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161
 ## explicit; go 1.16
 github.com/Azure/go-ansiterm
@@ -24,26 +31,33 @@ github.com/Azure/go-ansiterm/winterm
 ## explicit; go 1.16
 github.com/BurntSushi/toml
 github.com/BurntSushi/toml/internal
+# github.com/Crocmagnon/fatcontext v0.2.2
+## explicit; go 1.21
+github.com/Crocmagnon/fatcontext/pkg/analyzer
 # github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24
 ## explicit; go 1.13
 github.com/Djarvur/go-err113
-# github.com/GaijinEntertainment/go-exhaustruct/v3 v3.1.0
-## explicit; go 1.20
+# github.com/GaijinEntertainment/go-exhaustruct/v3 v3.2.0
+## explicit; go 1.21
 github.com/GaijinEntertainment/go-exhaustruct/v3/analyzer
-github.com/GaijinEntertainment/go-exhaustruct/v3/internal/fields
+github.com/GaijinEntertainment/go-exhaustruct/v3/internal/comment
 github.com/GaijinEntertainment/go-exhaustruct/v3/internal/pattern
+github.com/GaijinEntertainment/go-exhaustruct/v3/internal/structure
 # github.com/MakeNowJust/heredoc v1.0.0
 ## explicit; go 1.12
 github.com/MakeNowJust/heredoc
-# github.com/Masterminds/semver v1.5.0
-## explicit
-github.com/Masterminds/semver
-# github.com/OpenPeeDeeP/depguard/v2 v2.1.0
+# github.com/Masterminds/semver/v3 v3.2.1
+## explicit; go 1.18
+github.com/Masterminds/semver/v3
+# github.com/OpenPeeDeeP/depguard/v2 v2.2.0
 ## explicit; go 1.20
 github.com/OpenPeeDeeP/depguard/v2
 github.com/OpenPeeDeeP/depguard/v2/internal/utils
-# github.com/alexkohler/nakedret/v2 v2.0.2
+# github.com/alecthomas/go-check-sumtype v0.1.4
 ## explicit; go 1.18
+github.com/alecthomas/go-check-sumtype
+# github.com/alexkohler/nakedret/v2 v2.0.4
+## explicit; go 1.21
 github.com/alexkohler/nakedret/v2
 # github.com/alexkohler/prealloc v1.0.0
 ## explicit; go 1.15
@@ -72,25 +86,28 @@ github.com/blang/semver/v4
 # github.com/blizzy78/varnamelen v0.8.0
 ## explicit; go 1.16
 github.com/blizzy78/varnamelen
-# github.com/bombsimon/wsl/v3 v3.4.0
-## explicit; go 1.19
-github.com/bombsimon/wsl/v3
-# github.com/breml/bidichk v0.2.4
-## explicit; go 1.19
+# github.com/bombsimon/wsl/v4 v4.2.1
+## explicit; go 1.21
+github.com/bombsimon/wsl/v4
+# github.com/breml/bidichk v0.2.7
+## explicit; go 1.20
 github.com/breml/bidichk/pkg/bidichk
-# github.com/breml/errchkjson v0.3.1
-## explicit; go 1.17
+# github.com/breml/errchkjson v0.3.6
+## explicit; go 1.20
 github.com/breml/errchkjson
-# github.com/butuzov/ireturn v0.2.0
-## explicit; go 1.15
+# github.com/butuzov/ireturn v0.3.0
+## explicit; go 1.18
 github.com/butuzov/ireturn/analyzer
 github.com/butuzov/ireturn/analyzer/internal/config
 github.com/butuzov/ireturn/analyzer/internal/types
-# github.com/butuzov/mirror v1.1.0
+# github.com/butuzov/mirror v1.2.0
 ## explicit; go 1.19
 github.com/butuzov/mirror
 github.com/butuzov/mirror/internal/checker
-# github.com/ccojocar/zxcvbn-go v1.0.1
+# github.com/catenacyber/perfsprint v0.7.1
+## explicit; go 1.20
+github.com/catenacyber/perfsprint/analyzer
+# github.com/ccojocar/zxcvbn-go v1.0.2
 ## explicit; go 1.20
 github.com/ccojocar/zxcvbn-go
 github.com/ccojocar/zxcvbn-go/adjacency
@@ -113,15 +130,18 @@ github.com/chai2010/gettext-go/po
 # github.com/charithe/durationcheck v0.0.10
 ## explicit; go 1.14
 github.com/charithe/durationcheck
-# github.com/chavacava/garif v0.0.0-20230227094218-b8c73b2037b8
+# github.com/chavacava/garif v0.1.0
 ## explicit; go 1.16
 github.com/chavacava/garif
+# github.com/ckaznocha/intrange v0.1.2
+## explicit; go 1.21
+github.com/ckaznocha/intrange
 # github.com/curioswitch/go-reassign v0.2.0
 ## explicit; go 1.18
 github.com/curioswitch/go-reassign
 github.com/curioswitch/go-reassign/internal/analyzer
-# github.com/daixiang0/gci v0.11.0
-## explicit; go 1.18
+# github.com/daixiang0/gci v0.13.4
+## explicit; go 1.21
 github.com/daixiang0/gci/pkg/config
 github.com/daixiang0/gci/pkg/format
 github.com/daixiang0/gci/pkg/gci
@@ -134,17 +154,14 @@ github.com/daixiang0/gci/pkg/utils
 # github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc
 ## explicit
 github.com/davecgh/go-spew/spew
-# github.com/denis-tingaikin/go-header v0.4.3
-## explicit; go 1.17
+# github.com/denis-tingaikin/go-header v0.5.0
+## explicit; go 1.21
 github.com/denis-tingaikin/go-header
 # github.com/emicklei/go-restful/v3 v3.12.0
 ## explicit; go 1.13
 github.com/emicklei/go-restful/v3
 github.com/emicklei/go-restful/v3/log
-# github.com/esimonov/ifshort v1.0.4
-## explicit; go 1.17
-github.com/esimonov/ifshort/pkg/analyzer
-# github.com/ettle/strcase v0.1.1
+# github.com/ettle/strcase v0.2.0
 ## explicit; go 1.12
 github.com/ettle/strcase
 # github.com/evanphx/json-patch v5.6.0+incompatible
@@ -157,13 +174,13 @@ github.com/evanphx/json-patch/v5/internal/json
 # github.com/exponent-io/jsonpath v0.0.0-20151013193312-d6023ce2651d
 ## explicit
 github.com/exponent-io/jsonpath
-# github.com/fatih/color v1.16.0
+# github.com/fatih/color v1.17.0
 ## explicit; go 1.17
 github.com/fatih/color
 # github.com/fatih/structtag v1.2.0
 ## explicit; go 1.12
 github.com/fatih/structtag
-# github.com/firefart/nonamedreturns v1.0.4
+# github.com/firefart/nonamedreturns v1.0.5
 ## explicit; go 1.18
 github.com/firefart/nonamedreturns/analyzer
 # github.com/fsnotify/fsnotify v1.7.0
@@ -172,7 +189,10 @@ github.com/fsnotify/fsnotify
 # github.com/fzipp/gocyclo v0.6.0
 ## explicit; go 1.18
 github.com/fzipp/gocyclo
-# github.com/go-critic/go-critic v0.9.0
+# github.com/ghostiam/protogetter v0.3.6
+## explicit; go 1.19
+github.com/ghostiam/protogetter
+# github.com/go-critic/go-critic v0.11.3
 ## explicit; go 1.18
 github.com/go-critic/go-critic/checkers
 github.com/go-critic/go-critic/checkers/internal/astwalk
@@ -200,17 +220,20 @@ github.com/go-openapi/jsonreference/internal
 # github.com/go-openapi/swag v0.23.0
 ## explicit; go 1.20
 github.com/go-openapi/swag
-# github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572
-## explicit; go 1.13
-github.com/go-task/slim-sprig
+# github.com/go-task/slim-sprig/v3 v3.0.0
+## explicit; go 1.20
+github.com/go-task/slim-sprig/v3
+# github.com/go-test/deep v1.1.0
+## explicit; go 1.16
+github.com/go-test/deep
 # github.com/go-toolsmith/astcast v1.1.0
 ## explicit; go 1.16
 github.com/go-toolsmith/astcast
 # github.com/go-toolsmith/astcopy v1.1.0
 ## explicit; go 1.16
 github.com/go-toolsmith/astcopy
-# github.com/go-toolsmith/astequal v1.1.0
-## explicit; go 1.16
+# github.com/go-toolsmith/astequal v1.2.0
+## explicit; go 1.18
 github.com/go-toolsmith/astequal
 # github.com/go-toolsmith/astfmt v1.1.0
 ## explicit; go 1.16
@@ -224,6 +247,9 @@ github.com/go-toolsmith/strparse
 # github.com/go-toolsmith/typep v1.1.0
 ## explicit; go 1.16
 github.com/go-toolsmith/typep
+# github.com/go-viper/mapstructure/v2 v2.0.0-alpha.1
+## explicit; go 1.18
+github.com/go-viper/mapstructure/v2
 # github.com/go-xmlfmt/xmlfmt v1.1.2
 ## explicit
 github.com/go-xmlfmt/xmlfmt
@@ -254,10 +280,6 @@ github.com/golang/protobuf/ptypes
 github.com/golang/protobuf/ptypes/any
 github.com/golang/protobuf/ptypes/duration
 github.com/golang/protobuf/ptypes/timestamp
-# github.com/golangci/check v0.0.0-20180506172741-cfe4005ccda2
-## explicit
-github.com/golangci/check/cmd/structcheck
-github.com/golangci/check/cmd/varcheck
 # github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a
 ## explicit
 github.com/golangci/dupl
@@ -266,17 +288,13 @@ github.com/golangci/dupl/printer
 github.com/golangci/dupl/suffixtree
 github.com/golangci/dupl/syntax
 github.com/golangci/dupl/syntax/golang
-# github.com/golangci/go-misc v0.0.0-20220329215616-d24fe342adfe
-## explicit; go 1.17
-github.com/golangci/go-misc/deadcode
-# github.com/golangci/gofmt v0.0.0-20220901101216-f2edd75033f2
-## explicit; go 1.18
+# github.com/golangci/gofmt v0.0.0-20231018234816-f50ced29576e
+## explicit; go 1.20
 github.com/golangci/gofmt/gofmt
 github.com/golangci/gofmt/gofmt/internal/diff
-github.com/golangci/gofmt/gofmt/internal/execabs
 github.com/golangci/gofmt/goimports
-# github.com/golangci/golangci-lint v1.54.2
-## explicit; go 1.20
+# github.com/golangci/golangci-lint v1.58.2
+## explicit; go 1.21
 github.com/golangci/golangci-lint/cmd/golangci-lint
 github.com/golangci/golangci-lint/internal/cache
 github.com/golangci/golangci-lint/internal/errorutil
@@ -284,38 +302,147 @@ github.com/golangci/golangci-lint/internal/pkgcache
 github.com/golangci/golangci-lint/internal/renameio
 github.com/golangci/golangci-lint/internal/robustio
 github.com/golangci/golangci-lint/pkg/commands
+github.com/golangci/golangci-lint/pkg/commands/internal
 github.com/golangci/golangci-lint/pkg/config
 github.com/golangci/golangci-lint/pkg/exitcodes
 github.com/golangci/golangci-lint/pkg/fsutils
+github.com/golangci/golangci-lint/pkg/goanalysis
+github.com/golangci/golangci-lint/pkg/goanalysis/load
+github.com/golangci/golangci-lint/pkg/goanalysis/pkgerrors
 github.com/golangci/golangci-lint/pkg/golinters
-github.com/golangci/golangci-lint/pkg/golinters/goanalysis
-github.com/golangci/golangci-lint/pkg/golinters/goanalysis/load
+github.com/golangci/golangci-lint/pkg/golinters/asasalint
+github.com/golangci/golangci-lint/pkg/golinters/asciicheck
+github.com/golangci/golangci-lint/pkg/golinters/bidichk
+github.com/golangci/golangci-lint/pkg/golinters/bodyclose
+github.com/golangci/golangci-lint/pkg/golinters/canonicalheader
+github.com/golangci/golangci-lint/pkg/golinters/containedctx
+github.com/golangci/golangci-lint/pkg/golinters/contextcheck
+github.com/golangci/golangci-lint/pkg/golinters/copyloopvar
+github.com/golangci/golangci-lint/pkg/golinters/cyclop
+github.com/golangci/golangci-lint/pkg/golinters/decorder
+github.com/golangci/golangci-lint/pkg/golinters/depguard
+github.com/golangci/golangci-lint/pkg/golinters/dogsled
+github.com/golangci/golangci-lint/pkg/golinters/dupl
+github.com/golangci/golangci-lint/pkg/golinters/dupword
+github.com/golangci/golangci-lint/pkg/golinters/durationcheck
+github.com/golangci/golangci-lint/pkg/golinters/err113
+github.com/golangci/golangci-lint/pkg/golinters/errcheck
+github.com/golangci/golangci-lint/pkg/golinters/errchkjson
+github.com/golangci/golangci-lint/pkg/golinters/errname
+github.com/golangci/golangci-lint/pkg/golinters/errorlint
+github.com/golangci/golangci-lint/pkg/golinters/execinquery
+github.com/golangci/golangci-lint/pkg/golinters/exhaustive
+github.com/golangci/golangci-lint/pkg/golinters/exhaustruct
+github.com/golangci/golangci-lint/pkg/golinters/exportloopref
+github.com/golangci/golangci-lint/pkg/golinters/fatcontext
+github.com/golangci/golangci-lint/pkg/golinters/forbidigo
+github.com/golangci/golangci-lint/pkg/golinters/forcetypeassert
+github.com/golangci/golangci-lint/pkg/golinters/funlen
+github.com/golangci/golangci-lint/pkg/golinters/gci
+github.com/golangci/golangci-lint/pkg/golinters/ginkgolinter
+github.com/golangci/golangci-lint/pkg/golinters/gocheckcompilerdirectives
+github.com/golangci/golangci-lint/pkg/golinters/gochecknoglobals
+github.com/golangci/golangci-lint/pkg/golinters/gochecknoinits
+github.com/golangci/golangci-lint/pkg/golinters/gochecksumtype
+github.com/golangci/golangci-lint/pkg/golinters/gocognit
+github.com/golangci/golangci-lint/pkg/golinters/goconst
+github.com/golangci/golangci-lint/pkg/golinters/gocritic
+github.com/golangci/golangci-lint/pkg/golinters/gocyclo
+github.com/golangci/golangci-lint/pkg/golinters/godot
+github.com/golangci/golangci-lint/pkg/golinters/godox
+github.com/golangci/golangci-lint/pkg/golinters/gofmt
+github.com/golangci/golangci-lint/pkg/golinters/gofumpt
+github.com/golangci/golangci-lint/pkg/golinters/goheader
+github.com/golangci/golangci-lint/pkg/golinters/goimports
+github.com/golangci/golangci-lint/pkg/golinters/gomoddirectives
+github.com/golangci/golangci-lint/pkg/golinters/gomodguard
+github.com/golangci/golangci-lint/pkg/golinters/goprintffuncname
+github.com/golangci/golangci-lint/pkg/golinters/gosec
+github.com/golangci/golangci-lint/pkg/golinters/gosimple
+github.com/golangci/golangci-lint/pkg/golinters/gosmopolitan
+github.com/golangci/golangci-lint/pkg/golinters/govet
+github.com/golangci/golangci-lint/pkg/golinters/grouper
+github.com/golangci/golangci-lint/pkg/golinters/importas
+github.com/golangci/golangci-lint/pkg/golinters/inamedparam
+github.com/golangci/golangci-lint/pkg/golinters/ineffassign
+github.com/golangci/golangci-lint/pkg/golinters/interfacebloat
+github.com/golangci/golangci-lint/pkg/golinters/internal
+github.com/golangci/golangci-lint/pkg/golinters/intrange
+github.com/golangci/golangci-lint/pkg/golinters/ireturn
+github.com/golangci/golangci-lint/pkg/golinters/lll
+github.com/golangci/golangci-lint/pkg/golinters/loggercheck
+github.com/golangci/golangci-lint/pkg/golinters/maintidx
+github.com/golangci/golangci-lint/pkg/golinters/makezero
+github.com/golangci/golangci-lint/pkg/golinters/mirror
+github.com/golangci/golangci-lint/pkg/golinters/misspell
+github.com/golangci/golangci-lint/pkg/golinters/mnd
+github.com/golangci/golangci-lint/pkg/golinters/musttag
+github.com/golangci/golangci-lint/pkg/golinters/nakedret
+github.com/golangci/golangci-lint/pkg/golinters/nestif
+github.com/golangci/golangci-lint/pkg/golinters/nilerr
+github.com/golangci/golangci-lint/pkg/golinters/nilnil
+github.com/golangci/golangci-lint/pkg/golinters/nlreturn
+github.com/golangci/golangci-lint/pkg/golinters/noctx
 github.com/golangci/golangci-lint/pkg/golinters/nolintlint
+github.com/golangci/golangci-lint/pkg/golinters/nolintlint/internal
+github.com/golangci/golangci-lint/pkg/golinters/nonamedreturns
+github.com/golangci/golangci-lint/pkg/golinters/nosprintfhostport
+github.com/golangci/golangci-lint/pkg/golinters/paralleltest
+github.com/golangci/golangci-lint/pkg/golinters/perfsprint
+github.com/golangci/golangci-lint/pkg/golinters/prealloc
+github.com/golangci/golangci-lint/pkg/golinters/predeclared
+github.com/golangci/golangci-lint/pkg/golinters/promlinter
+github.com/golangci/golangci-lint/pkg/golinters/protogetter
+github.com/golangci/golangci-lint/pkg/golinters/reassign
+github.com/golangci/golangci-lint/pkg/golinters/revive
+github.com/golangci/golangci-lint/pkg/golinters/rowserrcheck
+github.com/golangci/golangci-lint/pkg/golinters/sloglint
+github.com/golangci/golangci-lint/pkg/golinters/spancheck
+github.com/golangci/golangci-lint/pkg/golinters/sqlclosecheck
+github.com/golangci/golangci-lint/pkg/golinters/staticcheck
+github.com/golangci/golangci-lint/pkg/golinters/stylecheck
+github.com/golangci/golangci-lint/pkg/golinters/tagalign
+github.com/golangci/golangci-lint/pkg/golinters/tagliatelle
+github.com/golangci/golangci-lint/pkg/golinters/tenv
+github.com/golangci/golangci-lint/pkg/golinters/testableexamples
+github.com/golangci/golangci-lint/pkg/golinters/testifylint
+github.com/golangci/golangci-lint/pkg/golinters/testpackage
+github.com/golangci/golangci-lint/pkg/golinters/thelper
+github.com/golangci/golangci-lint/pkg/golinters/tparallel
+github.com/golangci/golangci-lint/pkg/golinters/unconvert
+github.com/golangci/golangci-lint/pkg/golinters/unparam
+github.com/golangci/golangci-lint/pkg/golinters/unused
+github.com/golangci/golangci-lint/pkg/golinters/usestdlibvars
+github.com/golangci/golangci-lint/pkg/golinters/varnamelen
+github.com/golangci/golangci-lint/pkg/golinters/wastedassign
+github.com/golangci/golangci-lint/pkg/golinters/whitespace
+github.com/golangci/golangci-lint/pkg/golinters/wrapcheck
+github.com/golangci/golangci-lint/pkg/golinters/wsl
+github.com/golangci/golangci-lint/pkg/golinters/zerologlint
 github.com/golangci/golangci-lint/pkg/goutil
 github.com/golangci/golangci-lint/pkg/lint
 github.com/golangci/golangci-lint/pkg/lint/linter
 github.com/golangci/golangci-lint/pkg/lint/lintersdb
 github.com/golangci/golangci-lint/pkg/logutils
-github.com/golangci/golangci-lint/pkg/packages
 github.com/golangci/golangci-lint/pkg/printers
 github.com/golangci/golangci-lint/pkg/report
 github.com/golangci/golangci-lint/pkg/result
 github.com/golangci/golangci-lint/pkg/result/processors
 github.com/golangci/golangci-lint/pkg/timeutils
-# github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0
-## explicit
-github.com/golangci/lint-1
-# github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca
-## explicit
-github.com/golangci/maligned
-# github.com/golangci/misspell v0.4.1
-## explicit; go 1.19
+# github.com/golangci/misspell v0.5.1
+## explicit; go 1.21
 github.com/golangci/misspell
-# github.com/golangci/revgrep v0.0.0-20220804021717-745bb2f7c2e6
-## explicit; go 1.17
+# github.com/golangci/modinfo v0.3.4
+## explicit; go 1.21
+github.com/golangci/modinfo
+# github.com/golangci/plugin-module-register v0.1.1
+## explicit; go 1.21
+github.com/golangci/plugin-module-register/register
+# github.com/golangci/revgrep v0.5.3
+## explicit; go 1.21
 github.com/golangci/revgrep
-# github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4
-## explicit
+# github.com/golangci/unconvert v0.0.0-20240309020433-c5143eacb3ed
+## explicit; go 1.20
 github.com/golangci/unconvert
 # github.com/google/btree v1.0.1
 ## explicit; go 1.12
@@ -338,8 +465,8 @@ github.com/google/go-cmp/cmp/internal/value
 ## explicit; go 1.12
 github.com/google/gofuzz
 github.com/google/gofuzz/bytesource
-# github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1
-## explicit; go 1.14
+# github.com/google/pprof v0.0.0-20240424215950-a892ee059fd6
+## explicit; go 1.19
 github.com/google/pprof/profile
 # github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510
 ## explicit; go 1.13
@@ -347,7 +474,7 @@ github.com/google/shlex
 # github.com/google/uuid v1.6.0
 ## explicit
 github.com/google/uuid
-# github.com/gordonklaus/ineffassign v0.0.0-20230610083614-0e73809eb601
+# github.com/gordonklaus/ineffassign v0.1.0
 ## explicit; go 1.14
 github.com/gordonklaus/ineffassign/pkg/ineffassign
 # github.com/gorilla/websocket v1.5.0
@@ -369,12 +496,6 @@ github.com/gostaticanalysis/nilerr
 # github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7
 ## explicit
 github.com/gregjones/httpcache
-# github.com/hashicorp/errwrap v1.0.0
-## explicit
-github.com/hashicorp/errwrap
-# github.com/hashicorp/go-multierror v1.1.1
-## explicit; go 1.13
-github.com/hashicorp/go-multierror
 # github.com/hashicorp/go-version v1.6.0
 ## explicit
 github.com/hashicorp/go-version
@@ -401,7 +522,7 @@ github.com/imdario/mergo
 # github.com/inconshreveable/mousetrap v1.1.0
 ## explicit; go 1.18
 github.com/inconshreveable/mousetrap
-# github.com/jgautheron/goconst v1.5.1
+# github.com/jgautheron/goconst v1.7.1
 ## explicit; go 1.13
 github.com/jgautheron/goconst
 # github.com/jingyugao/rowserrcheck v1.1.1
@@ -410,6 +531,9 @@ github.com/jingyugao/rowserrcheck/passes/rowserr
 # github.com/jirfag/go-printf-func-name v0.0.0-20200119135958-7558a9eaa5af
 ## explicit; go 1.13
 github.com/jirfag/go-printf-func-name/pkg/analyzer
+# github.com/jjti/go-spancheck v0.6.1
+## explicit; go 1.20
+github.com/jjti/go-spancheck
 # github.com/josharian/intern v1.0.0
 ## explicit; go 1.5
 github.com/josharian/intern
@@ -419,33 +543,35 @@ github.com/json-iterator/go
 # github.com/julz/importas v0.1.0
 ## explicit; go 1.15
 github.com/julz/importas
-# github.com/kisielk/errcheck v1.6.3
-## explicit; go 1.14
+# github.com/karamaru-alpha/copyloopvar v1.1.0
+## explicit; go 1.21
+github.com/karamaru-alpha/copyloopvar
+# github.com/kisielk/errcheck v1.7.0
+## explicit; go 1.18
 github.com/kisielk/errcheck/errcheck
-# github.com/kisielk/gotool v1.0.0
-## explicit
-github.com/kisielk/gotool
-github.com/kisielk/gotool/internal/load
-# github.com/kkHAIKE/contextcheck v1.1.4
+# github.com/kkHAIKE/contextcheck v1.1.5
 ## explicit; go 1.20
 github.com/kkHAIKE/contextcheck
 # github.com/kulti/thelper v0.6.3
 ## explicit; go 1.18
 github.com/kulti/thelper/pkg/analyzer
-# github.com/kunwardeep/paralleltest v1.0.8
+# github.com/kunwardeep/paralleltest v1.0.10
 ## explicit; go 1.17
 github.com/kunwardeep/paralleltest/pkg/paralleltest
 # github.com/kyoh86/exportloopref v0.1.11
 ## explicit; go 1.18
 github.com/kyoh86/exportloopref
-# github.com/ldez/gomoddirectives v0.2.3
-## explicit; go 1.16
+# github.com/lasiar/canonicalheader v1.1.1
+## explicit; go 1.21
+github.com/lasiar/canonicalheader
+# github.com/ldez/gomoddirectives v0.2.4
+## explicit; go 1.21
 github.com/ldez/gomoddirectives
 # github.com/ldez/tagliatelle v0.5.0
 ## explicit; go 1.19
 github.com/ldez/tagliatelle
-# github.com/leonklingele/grouper v1.1.1
-## explicit; go 1.17
+# github.com/leonklingele/grouper v1.1.2
+## explicit; go 1.18
 github.com/leonklingele/grouper/pkg/analyzer
 github.com/leonklingele/grouper/pkg/analyzer/consts
 github.com/leonklingele/grouper/pkg/analyzer/globals
@@ -458,6 +584,9 @@ github.com/liggitt/tabwriter
 # github.com/lufeee/execinquery v1.2.1
 ## explicit; go 1.17
 github.com/lufeee/execinquery
+# github.com/macabu/inamedparam v0.1.3
+## explicit; go 1.20
+github.com/macabu/inamedparam
 # github.com/magiconair/properties v1.8.7
 ## explicit; go 1.19
 github.com/magiconair/properties
@@ -487,13 +616,11 @@ github.com/mattn/go-runewidth
 # github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0
 ## explicit; go 1.19
 github.com/matttproud/golang_protobuf_extensions/v2/pbutil
-# github.com/mbilski/exhaustivestruct v1.2.0
-## explicit; go 1.15
-github.com/mbilski/exhaustivestruct/pkg/analyzer
-# github.com/mgechev/revive v1.3.2
-## explicit; go 1.19
+# github.com/mgechev/revive v1.3.7
+## explicit; go 1.20
 github.com/mgechev/revive/config
 github.com/mgechev/revive/formatter
+github.com/mgechev/revive/internal/ifelse
 github.com/mgechev/revive/internal/typeparams
 github.com/mgechev/revive/lint
 github.com/mgechev/revive/rule
@@ -537,24 +664,28 @@ github.com/mxk/go-flowrate/flowrate
 # github.com/nakabonne/nestif v0.3.1
 ## explicit; go 1.15
 github.com/nakabonne/nestif
-# github.com/nishanths/exhaustive v0.11.0
+# github.com/nishanths/exhaustive v0.12.0
 ## explicit; go 1.18
 github.com/nishanths/exhaustive
 # github.com/nishanths/predeclared v0.2.2
 ## explicit; go 1.14
 github.com/nishanths/predeclared/passes/predeclared
-# github.com/nunnatsa/ginkgolinter v0.13.5
-## explicit; go 1.20
+# github.com/nunnatsa/ginkgolinter v0.16.2
+## explicit; go 1.21
 github.com/nunnatsa/ginkgolinter
-github.com/nunnatsa/ginkgolinter/ginkgohandler
-github.com/nunnatsa/ginkgolinter/gomegahandler
-github.com/nunnatsa/ginkgolinter/reverseassertion
+github.com/nunnatsa/ginkgolinter/internal/ginkgohandler
+github.com/nunnatsa/ginkgolinter/internal/gomegahandler
+github.com/nunnatsa/ginkgolinter/internal/interfaces
+github.com/nunnatsa/ginkgolinter/internal/intervals
+github.com/nunnatsa/ginkgolinter/internal/reports
+github.com/nunnatsa/ginkgolinter/internal/reverseassertion
+github.com/nunnatsa/ginkgolinter/linter
 github.com/nunnatsa/ginkgolinter/types
 github.com/nunnatsa/ginkgolinter/version
 # github.com/olekukonko/tablewriter v0.0.5
 ## explicit; go 1.12
 github.com/olekukonko/tablewriter
-# github.com/onsi/ginkgo/v2 v2.17.1
+# github.com/onsi/ginkgo/v2 v2.17.2
 ## explicit; go 1.20
 github.com/onsi/ginkgo/v2
 github.com/onsi/ginkgo/v2/config
@@ -576,7 +707,7 @@ github.com/onsi/ginkgo/v2/internal/parallel_support
 github.com/onsi/ginkgo/v2/internal/testingtproxy
 github.com/onsi/ginkgo/v2/reporters
 github.com/onsi/ginkgo/v2/types
-# github.com/onsi/gomega v1.32.0
+# github.com/onsi/gomega v1.33.1
 ## explicit; go 1.20
 github.com/onsi/gomega
 github.com/onsi/gomega/format
@@ -703,6 +834,11 @@ github.com/openshift/client-go/machine/listers/machine/v1beta1
 github.com/openshift/cluster-api-actuator-pkg/testutils/resourcebuilder
 github.com/openshift/cluster-api-actuator-pkg/testutils/resourcebuilder/config/v1
 github.com/openshift/cluster-api-actuator-pkg/testutils/resourcebuilder/machine/v1beta1
+# github.com/openshift/cluster-control-plane-machine-set-operator v0.0.0-20240909043600-373ac49835bf
+## explicit; go 1.22.0
+github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/failuredomain
+github.com/openshift/cluster-control-plane-machine-set-operator/pkg/machineproviders/providers/openshift/machine/v1beta1/providerconfig
+github.com/openshift/cluster-control-plane-machine-set-operator/test/e2e/framework
 # github.com/openshift/library-go v0.0.0-20240903143724-7c5c5d305ac1
 ## explicit; go 1.22.0
 github.com/openshift/library-go/pkg/certs
@@ -725,7 +861,7 @@ github.com/openshift/library-go/pkg/operator/resource/resourcemerge
 github.com/openshift/library-go/pkg/operator/resource/resourceread
 github.com/openshift/library-go/pkg/operator/resourcesynccontroller
 github.com/openshift/library-go/pkg/operator/v1helpers
-# github.com/pelletier/go-toml/v2 v2.1.0
+# github.com/pelletier/go-toml/v2 v2.2.2
 ## explicit; go 1.16
 github.com/pelletier/go-toml/v2
 github.com/pelletier/go-toml/v2/internal/characters
@@ -741,7 +877,7 @@ github.com/pkg/errors
 # github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2
 ## explicit
 github.com/pmezard/go-difflib/difflib
-# github.com/polyfloyd/go-errorlint v1.4.4
+# github.com/polyfloyd/go-errorlint v1.5.1
 ## explicit; go 1.20
 github.com/polyfloyd/go-errorlint/errorlint
 # github.com/prometheus/client_golang v1.18.0
@@ -765,7 +901,7 @@ github.com/prometheus/common/model
 github.com/prometheus/procfs
 github.com/prometheus/procfs/internal/fs
 github.com/prometheus/procfs/internal/util
-# github.com/quasilyte/go-ruleguard v0.4.0
+# github.com/quasilyte/go-ruleguard v0.4.2
 ## explicit; go 1.19
 github.com/quasilyte/go-ruleguard/internal/goenv
 github.com/quasilyte/go-ruleguard/internal/golist
@@ -782,6 +918,10 @@ github.com/quasilyte/go-ruleguard/ruleguard/quasigo/stdlib/qstrconv
 github.com/quasilyte/go-ruleguard/ruleguard/quasigo/stdlib/qstrings
 github.com/quasilyte/go-ruleguard/ruleguard/textmatch
 github.com/quasilyte/go-ruleguard/ruleguard/typematch
+# github.com/quasilyte/go-ruleguard/dsl v0.3.22
+## explicit; go 1.15
+github.com/quasilyte/go-ruleguard/dsl
+github.com/quasilyte/go-ruleguard/dsl/types
 # github.com/quasilyte/gogrep v0.5.0
 ## explicit; go 1.16
 github.com/quasilyte/gogrep
@@ -802,11 +942,11 @@ github.com/robfig/cron
 # github.com/russross/blackfriday/v2 v2.1.0
 ## explicit
 github.com/russross/blackfriday/v2
-# github.com/ryancurrah/gomodguard v1.3.0
-## explicit; go 1.19
+# github.com/ryancurrah/gomodguard v1.3.2
+## explicit; go 1.21
 github.com/ryancurrah/gomodguard
-# github.com/ryanrolds/sqlclosecheck v0.4.0
-## explicit; go 1.19
+# github.com/ryanrolds/sqlclosecheck v0.5.1
+## explicit; go 1.20
 github.com/ryanrolds/sqlclosecheck/pkg/analyzer
 # github.com/sagikazarmark/locafero v0.3.0
 ## explicit; go 1.20
@@ -817,14 +957,18 @@ github.com/sagikazarmark/slog-shim
 # github.com/sanposhiho/wastedassign/v2 v2.0.7
 ## explicit; go 1.14
 github.com/sanposhiho/wastedassign/v2
+# github.com/santhosh-tekuri/jsonschema/v5 v5.3.1
+## explicit; go 1.19
+github.com/santhosh-tekuri/jsonschema/v5
+github.com/santhosh-tekuri/jsonschema/v5/httploader
 # github.com/sashamelentyev/interfacebloat v1.1.0
 ## explicit; go 1.18
 github.com/sashamelentyev/interfacebloat/pkg/analyzer
-# github.com/sashamelentyev/usestdlibvars v1.24.0
+# github.com/sashamelentyev/usestdlibvars v1.25.0
 ## explicit; go 1.20
 github.com/sashamelentyev/usestdlibvars/pkg/analyzer
 github.com/sashamelentyev/usestdlibvars/pkg/analyzer/internal/mapping
-# github.com/securego/gosec/v2 v2.17.0
+# github.com/securego/gosec/v2 v2.20.0
 ## explicit; go 1.20
 github.com/securego/gosec/v2
 github.com/securego/gosec/v2/analyzers
@@ -840,9 +984,6 @@ github.com/sirupsen/logrus
 # github.com/sivchari/containedctx v1.0.3
 ## explicit; go 1.17
 github.com/sivchari/containedctx
-# github.com/sivchari/nosnakecase v1.7.0
-## explicit; go 1.18
-github.com/sivchari/nosnakecase
 # github.com/sivchari/tenv v1.7.1
 ## explicit; go 1.18
 github.com/sivchari/tenv
@@ -860,8 +1001,8 @@ github.com/sourcegraph/conc/panics
 # github.com/sourcegraph/go-diff v0.7.0
 ## explicit; go 1.14
 github.com/sourcegraph/go-diff/diff
-# github.com/spf13/afero v1.10.0
-## explicit; go 1.16
+# github.com/spf13/afero v1.11.0
+## explicit; go 1.19
 github.com/spf13/afero
 github.com/spf13/afero/internal/common
 github.com/spf13/afero/mem
@@ -907,7 +1048,7 @@ github.com/t-yuki/gocover-cobertura
 # github.com/tdakkota/asciicheck v0.2.0
 ## explicit; go 1.18
 github.com/tdakkota/asciicheck
-# github.com/tetafro/godot v1.4.14
+# github.com/tetafro/godot v1.4.16
 ## explicit; go 1.20
 github.com/tetafro/godot
 # github.com/timakin/bodyclose v0.0.0-20230421092635-574207250966
@@ -922,8 +1063,8 @@ github.com/timonwong/loggercheck/internal/checkers/printf
 github.com/timonwong/loggercheck/internal/rules
 github.com/timonwong/loggercheck/internal/sets
 github.com/timonwong/loggercheck/internal/stringutil
-# github.com/tomarrell/wrapcheck/v2 v2.8.1
-## explicit; go 1.18
+# github.com/tomarrell/wrapcheck/v2 v2.8.3
+## explicit; go 1.21
 github.com/tomarrell/wrapcheck/v2/wrapcheck
 # github.com/tommy-muehle/go-mnd/v2 v2.5.1
 ## explicit; go 1.12
@@ -933,10 +1074,10 @@ github.com/tommy-muehle/go-mnd/v2/config
 # github.com/ultraware/funlen v0.1.0
 ## explicit; go 1.20
 github.com/ultraware/funlen
-# github.com/ultraware/whitespace v0.0.5
-## explicit
+# github.com/ultraware/whitespace v0.1.1
+## explicit; go 1.20
 github.com/ultraware/whitespace
-# github.com/uudashr/gocognit v1.0.7
+# github.com/uudashr/gocognit v1.1.2
 ## explicit; go 1.16
 github.com/uudashr/gocognit
 # github.com/vmware/govmomi v0.37.2
@@ -981,7 +1122,7 @@ github.com/vmware/govmomi/vim25/progress
 github.com/vmware/govmomi/vim25/soap
 github.com/vmware/govmomi/vim25/types
 github.com/vmware/govmomi/vim25/xml
-# github.com/xen0n/gosmopolitan v1.2.1
+# github.com/xen0n/gosmopolitan v1.2.2
 ## explicit; go 1.19
 github.com/xen0n/gosmopolitan
 # github.com/xlab/treeprint v1.2.0
@@ -992,15 +1133,21 @@ github.com/xlab/treeprint
 github.com/yagipy/maintidx
 github.com/yagipy/maintidx/pkg/cyc
 github.com/yagipy/maintidx/pkg/halstvol
-# github.com/yeya24/promlinter v0.2.0
-## explicit; go 1.16
+# github.com/yeya24/promlinter v0.3.0
+## explicit; go 1.20
 github.com/yeya24/promlinter
-# github.com/ykadowak/zerologlint v0.1.3
+# github.com/ykadowak/zerologlint v0.1.5
 ## explicit; go 1.19
 github.com/ykadowak/zerologlint
-# gitlab.com/bosi/decorder v0.4.0
-## explicit; go 1.17
+# gitlab.com/bosi/decorder v0.4.2
+## explicit; go 1.20
 gitlab.com/bosi/decorder
+# go-simpler.org/musttag v0.12.2
+## explicit; go 1.20
+go-simpler.org/musttag
+# go-simpler.org/sloglint v0.6.0
+## explicit; go 1.20
+go-simpler.org/sloglint
 # go.starlark.net v0.0.0-20230525235612-a134d8f9ddca
 ## explicit; go 1.16
 go.starlark.net/internal/compile
@@ -1009,9 +1156,11 @@ go.starlark.net/resolve
 go.starlark.net/starlark
 go.starlark.net/starlarkstruct
 go.starlark.net/syntax
-# go.tmz.dev/musttag v0.7.2
-## explicit; go 1.19
-go.tmz.dev/musttag
+# go.uber.org/automaxprocs v1.5.3
+## explicit; go 1.18
+go.uber.org/automaxprocs/internal/cgroups
+go.uber.org/automaxprocs/internal/runtime
+go.uber.org/automaxprocs/maxprocs
 # go.uber.org/multierr v1.11.0
 ## explicit; go 1.19
 go.uber.org/multierr
@@ -1026,7 +1175,7 @@ go.uber.org/zap/internal/exit
 go.uber.org/zap/internal/pool
 go.uber.org/zap/internal/stacktrace
 go.uber.org/zap/zapcore
-# golang.org/x/exp v0.0.0-20230905200255-921286631fa9
+# golang.org/x/exp v0.0.0-20240103183307-be819d1f06fc
 ## explicit; go 1.20
 golang.org/x/exp/constraints
 golang.org/x/exp/maps
@@ -1034,10 +1183,10 @@ golang.org/x/exp/slices
 golang.org/x/exp/slog
 golang.org/x/exp/slog/internal
 golang.org/x/exp/slog/internal/buffer
-# golang.org/x/exp/typeparams v0.0.0-20230307190834-24139beb5833
+# golang.org/x/exp/typeparams v0.0.0-20240314144324-c7f7c6466f7f
 ## explicit; go 1.18
 golang.org/x/exp/typeparams
-# golang.org/x/mod v0.16.0
+# golang.org/x/mod v0.17.0
 ## explicit; go 1.18
 golang.org/x/mod/internal/lazyregexp
 golang.org/x/mod/modfile
@@ -1060,7 +1209,7 @@ golang.org/x/net/websocket
 ## explicit; go 1.18
 golang.org/x/oauth2
 golang.org/x/oauth2/internal
-# golang.org/x/sync v0.6.0
+# golang.org/x/sync v0.7.0
 ## explicit; go 1.18
 golang.org/x/sync/errgroup
 golang.org/x/sync/semaphore
@@ -1098,10 +1247,11 @@ golang.org/x/text/width
 # golang.org/x/time v0.5.0
 ## explicit; go 1.18
 golang.org/x/time/rate
-# golang.org/x/tools v0.19.0
+# golang.org/x/tools v0.21.0
 ## explicit; go 1.19
 golang.org/x/tools/cover
 golang.org/x/tools/go/analysis
+golang.org/x/tools/go/analysis/passes/appends
 golang.org/x/tools/go/analysis/passes/asmdecl
 golang.org/x/tools/go/analysis/passes/assign
 golang.org/x/tools/go/analysis/passes/atomic
@@ -1167,13 +1317,13 @@ golang.org/x/tools/internal/event
 golang.org/x/tools/internal/event/core
 golang.org/x/tools/internal/event/keys
 golang.org/x/tools/internal/event/label
-golang.org/x/tools/internal/event/tag
 golang.org/x/tools/internal/gcimporter
 golang.org/x/tools/internal/gocommand
 golang.org/x/tools/internal/gopathwalk
 golang.org/x/tools/internal/imports
 golang.org/x/tools/internal/packagesinternal
 golang.org/x/tools/internal/pkgbits
+golang.org/x/tools/internal/stdlib
 golang.org/x/tools/internal/tokeninternal
 golang.org/x/tools/internal/typeparams
 golang.org/x/tools/internal/typesinternal
@@ -1245,7 +1395,7 @@ gopkg.in/yaml.v2
 # gopkg.in/yaml.v3 v3.0.1
 ## explicit
 gopkg.in/yaml.v3
-# honnef.co/go/tools v0.4.5
+# honnef.co/go/tools v0.4.7
 ## explicit; go 1.19
 honnef.co/go/tools/analysis/code
 honnef.co/go/tools/analysis/edit
@@ -1817,21 +1967,15 @@ k8s.io/utils/pointer
 k8s.io/utils/ptr
 k8s.io/utils/strings/slices
 k8s.io/utils/trace
-# mvdan.cc/gofumpt v0.5.0
-## explicit; go 1.19
+# mvdan.cc/gofumpt v0.6.0
+## explicit; go 1.20
 mvdan.cc/gofumpt/format
 mvdan.cc/gofumpt/internal/govendor/go/doc/comment
 mvdan.cc/gofumpt/internal/govendor/go/format
 mvdan.cc/gofumpt/internal/govendor/go/printer
 mvdan.cc/gofumpt/internal/version
-# mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed
-## explicit
-mvdan.cc/interfacer/check
-# mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b
-## explicit
-mvdan.cc/lint
-# mvdan.cc/unparam v0.0.0-20221223090309-7455f1af531d
-## explicit; go 1.18
+# mvdan.cc/unparam v0.0.0-20240427195214-063aff900ca1
+## explicit; go 1.20
 mvdan.cc/unparam/check
 # sigs.k8s.io/cluster-api v1.6.1
 ## explicit; go 1.20
diff --git a/vendor/mvdan.cc/gofumpt/format/format.go b/vendor/mvdan.cc/gofumpt/format/format.go
index 787561781e806da711e3ba40273356e203326f12..7316eb78b8982efbfadaa500a45ab9bfefe9fd74 100644
--- a/vendor/mvdan.cc/gofumpt/format/format.go
+++ b/vendor/mvdan.cc/gofumpt/format/format.go
@@ -31,17 +31,20 @@ import (
 
 // Options is the set of formatting options which affect gofumpt.
 type Options struct {
-	// LangVersion corresponds to the Go language version a piece of code is
-	// written in. The version is used to decide whether to apply formatting
-	// rules which require new language features. When inside a Go module,
-	// LangVersion should be:
-	//
-	//     go mod edit -json | jq -r '.Go'
+	// TODO: link to the go/version docs once Go 1.22 is out.
+	// The old semver docs said:
 	//
 	// LangVersion is treated as a semantic version, which may start with a "v"
 	// prefix. Like Go versions, it may also be incomplete; "1.14" is equivalent
 	// to "1.14.0". When empty, it is equivalent to "v1", to not use language
 	// features which could break programs.
+
+	// LangVersion is the Go version a piece of code is written in.
+	// The version is used to decide whether to apply formatting
+	// rules which require new language features.
+	// When inside a Go module, LangVersion should typically be:
+	//
+	//     go mod edit -json | jq -r '.Go'
 	LangVersion string
 
 	// ModulePath corresponds to the Go module path which contains the source
@@ -82,22 +85,28 @@ func Source(src []byte, opts Options) ([]byte, error) {
 	return buf.Bytes(), nil
 }
 
+var rxGoVersionMajorMinor = regexp.MustCompile(`^(v|go)?([1-9]+)\.([0-9]+)`)
+
 // File modifies a file and fset in place to follow gofumpt's format. The
 // changes might include manipulating adding or removing newlines in fset,
 // modifying the position of nodes, or modifying literal values.
 func File(fset *token.FileSet, file *ast.File, opts Options) {
 	simplify(file)
 
+	// TODO: replace this hacky mess with go/version once we can rely on Go 1.22,
+	// as well as replacing our uses of the semver package.
+	// In particular, we likely want to allow any of 1.21, 1.21.2, or go1.21rc3,
+	// but we can rely on go/version.Lang to validate and normalize.
 	if opts.LangVersion == "" {
-		opts.LangVersion = "v1"
-	} else if opts.LangVersion[0] != 'v' {
-		opts.LangVersion = "v" + opts.LangVersion
+		opts.LangVersion = "v1.0"
 	}
-	if !semver.IsValid(opts.LangVersion) {
-		panic(fmt.Sprintf("invalid semver string: %q", opts.LangVersion))
+	m := rxGoVersionMajorMinor.FindStringSubmatch(opts.LangVersion)
+	if m == nil {
+		panic(fmt.Sprintf("invalid Go version: %q", opts.LangVersion))
 	}
+	opts.LangVersion = "v" + m[2] + "." + m[3]
 	f := &fumpter{
-		File:    fset.File(file.Pos()),
+		file:    fset.File(file.Pos()),
 		fset:    fset,
 		astFile: file,
 		Options: opts,
@@ -170,7 +179,7 @@ var rxOctalInteger = regexp.MustCompile(`\A0[0-7_]+\z`)
 type fumpter struct {
 	Options
 
-	*token.File
+	file *token.File
 	fset *token.FileSet
 
 	astFile *ast.File
@@ -217,7 +226,8 @@ func (f *fumpter) inlineComment(pos token.Pos) *ast.Comment {
 func (f *fumpter) addNewline(at token.Pos) {
 	offset := f.Offset(at)
 
-	field := reflect.ValueOf(f.File).Elem().FieldByName("lines")
+	// TODO: replace with the new Lines method once we require Go 1.21 or later
+	field := reflect.ValueOf(f.file).Elem().FieldByName("lines")
 	n := field.Len()
 	lines := make([]int, 0, n+1)
 	for i := 0; i < n; i++ {
@@ -236,7 +246,7 @@ func (f *fumpter) addNewline(at token.Pos) {
 	if offset >= 0 {
 		lines = append(lines, offset)
 	}
-	if !f.SetLines(lines) {
+	if !f.file.SetLines(lines) {
 		panic(fmt.Sprintf("could not set lines to %v", lines))
 	}
 }
@@ -245,7 +255,7 @@ func (f *fumpter) addNewline(at token.Pos) {
 // up on the same line.
 func (f *fumpter) removeLines(fromLine, toLine int) {
 	for fromLine < toLine {
-		f.MergeLine(fromLine)
+		f.file.MergeLine(fromLine)
 		toLine--
 	}
 }
@@ -256,6 +266,18 @@ func (f *fumpter) removeLinesBetween(from, to token.Pos) {
 	f.removeLines(f.Line(from)+1, f.Line(to))
 }
 
+func (f *fumpter) Position(p token.Pos) token.Position {
+	return f.file.PositionFor(p, false)
+}
+
+func (f *fumpter) Line(p token.Pos) int {
+	return f.Position(p).Line
+}
+
+func (f *fumpter) Offset(p token.Pos) int {
+	return f.file.Offset(p)
+}
+
 type byteCounter int
 
 func (b *byteCounter) Write(p []byte) (n int, err error) {
@@ -285,14 +307,14 @@ func (f *fumpter) lineEnd(line int) token.Pos {
 	if line < 1 {
 		panic("illegal line number")
 	}
-	total := f.LineCount()
+	total := f.file.LineCount()
 	if line > total {
 		panic("illegal line number")
 	}
 	if line == total {
 		return f.astFile.End()
 	}
-	return f.LineStart(line+1) - 1
+	return f.file.LineStart(line+1) - 1
 }
 
 // rxCommentDirective covers all common Go comment directives:
@@ -305,10 +327,11 @@ func (f *fumpter) lineEnd(line int) token.Pos {
 //	//sys(nb)?     | syscall function wrapper prototypes
 //	//nolint       | nolint directive for golangci
 //	//noinspection | noinspection directive for GoLand and friends
+//	//NOSONAR      | NOSONAR directive for SonarQube
 //
 // Note that the "some-words:" matching expects a letter afterward, such as
 // "go:generate", to prevent matching false positives like "https://site".
-var rxCommentDirective = regexp.MustCompile(`^([a-z-]+:[a-z]+|line\b|export\b|extern\b|sys(nb)?\b|no(lint|inspection)\b)`)
+var rxCommentDirective = regexp.MustCompile(`^([a-z-]+:[a-z]+|line\b|export\b|extern\b|sys(nb)?\b|no(lint|inspection)\b)|NOSONAR\b`)
 
 func (f *fumpter) applyPre(c *astutil.Cursor) {
 	f.splitLongLine(c)
@@ -865,9 +888,9 @@ func (f *fumpter) stmts(list []ast.Stmt) {
 			continue // not an if following another statement
 		}
 		as, ok := list[i-1].(*ast.AssignStmt)
-		if !ok || as.Tok != token.DEFINE ||
+		if !ok || (as.Tok != token.DEFINE && as.Tok != token.ASSIGN) ||
 			!identEqual(as.Lhs[len(as.Lhs)-1], "err") {
-			continue // not "..., err := ..."
+			continue // not ", err :=" nor ", err ="
 		}
 		be, ok := ifs.Cond.(*ast.BinaryExpr)
 		if !ok || ifs.Init != nil || ifs.Else != nil {
diff --git a/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/print.go b/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/print.go
index 4e9da3d1e85fe4bcc92dcc2306a48336e516425f..e1c070d5a53e8f6fcad003380df434e06829291b 100644
--- a/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/print.go
+++ b/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/print.go
@@ -148,8 +148,6 @@ func (h *Heading) DefaultID() string {
 
 type commentPrinter struct {
 	*Printer
-	headingPrefix string
-	needDoc       map[string]bool
 }
 
 // Comment returns the standard Go formatting of the Doc,
diff --git a/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/std.go b/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/std.go
index 6786e4d7a1c680c5d5b58fe79f66493074404efa..d128eda8c5ff73db012d1333a02c0b4349eb8133 100644
--- a/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/std.go
+++ b/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/std.go
@@ -10,6 +10,7 @@ package comment
 var stdPkgs = []string{
 	"bufio",
 	"bytes",
+	"cmp",
 	"context",
 	"crypto",
 	"embed",
@@ -23,6 +24,7 @@ var stdPkgs = []string{
 	"image",
 	"io",
 	"log",
+	"maps",
 	"math",
 	"mime",
 	"net",
@@ -32,6 +34,7 @@ var stdPkgs = []string{
 	"reflect",
 	"regexp",
 	"runtime",
+	"slices",
 	"sort",
 	"strconv",
 	"strings",
diff --git a/vendor/mvdan.cc/gofumpt/internal/govendor/go/printer/comment.go b/vendor/mvdan.cc/gofumpt/internal/govendor/go/printer/comment.go
index 5b9775dc8481f0574267a9fd0b70b83ea49757a0..1f0e7df9dd091a4083b702a0e1383abb3ff8349d 100644
--- a/vendor/mvdan.cc/gofumpt/internal/govendor/go/printer/comment.go
+++ b/vendor/mvdan.cc/gofumpt/internal/govendor/go/printer/comment.go
@@ -37,15 +37,16 @@ func formatDocComment(list []*ast.Comment) []*ast.Comment {
 		kind = "//"
 		var b strings.Builder
 		for _, c := range list {
-			if !strings.HasPrefix(c.Text, "//") {
+			after, found := strings.CutPrefix(c.Text, "//")
+			if !found {
 				return list
 			}
 			// Accumulate //go:build etc lines separately.
-			if isDirective(c.Text[2:]) {
+			if isDirective(after) {
 				directives = append(directives, c)
 				continue
 			}
-			b.WriteString(strings.TrimPrefix(c.Text[2:], " "))
+			b.WriteString(strings.TrimPrefix(after, " "))
 			b.WriteString("\n")
 		}
 		text = b.String()
diff --git a/vendor/mvdan.cc/gofumpt/internal/govendor/go/printer/printer.go b/vendor/mvdan.cc/gofumpt/internal/govendor/go/printer/printer.go
index 576f9690d7319a0e78c24be2c0b70fccc1edda87..2ab0278b0894fdaf1ea2967cd1be329d14af943f 100644
--- a/vendor/mvdan.cc/gofumpt/internal/govendor/go/printer/printer.go
+++ b/vendor/mvdan.cc/gofumpt/internal/govendor/go/printer/printer.go
@@ -75,10 +75,11 @@ type printer struct {
 	// white space). If there's a difference and SourcePos is set in
 	// ConfigMode, //line directives are used in the output to restore
 	// original source positions for a reader.
-	pos     token.Position // current position in AST (source) space
-	out     token.Position // current position in output space
-	last    token.Position // value of pos after calling writeString
-	linePtr *int           // if set, record out.Line for the next token in *linePtr
+	pos          token.Position // current position in AST (source) space
+	out          token.Position // current position in output space
+	last         token.Position // value of pos after calling writeString
+	linePtr      *int           // if set, record out.Line for the next token in *linePtr
+	sourcePosErr error          // if non-nil, the first error emitting a //line directive
 
 	// The list of all source comments, in order of appearance.
 	comments        []*ast.CommentGroup // may be nil
@@ -196,6 +197,13 @@ func (p *printer) lineFor(pos token.Pos) int {
 // writeLineDirective writes a //line directive if necessary.
 func (p *printer) writeLineDirective(pos token.Position) {
 	if pos.IsValid() && (p.out.Line != pos.Line || p.out.Filename != pos.Filename) {
+		if strings.ContainsAny(pos.Filename, "\r\n") {
+			if p.sourcePosErr == nil {
+				p.sourcePosErr = fmt.Errorf("mvdan.cc/gofumpt/internal/govendor/go/printer: source filename contains unexpected newline character: %q", pos.Filename)
+			}
+			return
+		}
+
 		p.output = append(p.output, tabwriter.Escape) // protect '\n' in //line from tabwriter interpretation
 		p.output = append(p.output, fmt.Sprintf("//line %s:%d\n", pos.Filename, pos.Line)...)
 		p.output = append(p.output, tabwriter.Escape)
@@ -804,7 +812,7 @@ func (p *printer) intersperseComments(next token.Position, tok token.Token) (wro
 	return
 }
 
-// whiteWhitespace writes the first n whitespace entries.
+// writeWhitespace writes the first n whitespace entries.
 func (p *printer) writeWhitespace(n int) {
 	// write entries
 	for i := 0; i < n; i++ {
@@ -1168,7 +1176,7 @@ func (p *printer) printNode(node any) error {
 		goto unsupported
 	}
 
-	return nil
+	return p.sourcePosErr
 
 unsupported:
 	return fmt.Errorf("mvdan.cc/gofumpt/internal/govendor/go/printer: unsupported node type %T", node)
diff --git a/vendor/mvdan.cc/interfacer/LICENSE b/vendor/mvdan.cc/interfacer/LICENSE
deleted file mode 100644
index 7d71d51a5eb32d5c24fd20a5f5db3705e5840ad4..0000000000000000000000000000000000000000
--- a/vendor/mvdan.cc/interfacer/LICENSE
+++ /dev/null
@@ -1,27 +0,0 @@
-Copyright (c) 2015, Daniel Martí. All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
-   * Redistributions of source code must retain the above copyright
-notice, this list of conditions and the following disclaimer.
-   * Redistributions in binary form must reproduce the above
-copyright notice, this list of conditions and the following disclaimer
-in the documentation and/or other materials provided with the
-distribution.
-   * Neither the name of the copyright holder nor the names of its
-contributors may be used to endorse or promote products derived from
-this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/mvdan.cc/interfacer/check/cache.go b/vendor/mvdan.cc/interfacer/check/cache.go
deleted file mode 100644
index 757eca55e136a232fc858dfe6074ac3c46d75b52..0000000000000000000000000000000000000000
--- a/vendor/mvdan.cc/interfacer/check/cache.go
+++ /dev/null
@@ -1,50 +0,0 @@
-// Copyright (c) 2015, Daniel Martí <mvdan@mvdan.cc>
-// See LICENSE for licensing information
-
-package check
-
-import (
-	"go/ast"
-	"go/types"
-)
-
-type pkgTypes struct {
-	ifaces    map[string]string
-	funcSigns map[string]bool
-}
-
-func (p *pkgTypes) getTypes(pkg *types.Package) {
-	p.ifaces = make(map[string]string)
-	p.funcSigns = make(map[string]bool)
-	done := make(map[*types.Package]bool)
-	addTypes := func(pkg *types.Package, top bool) {
-		if done[pkg] {
-			return
-		}
-		done[pkg] = true
-		ifs, funs := fromScope(pkg.Scope())
-		fullName := func(name string) string {
-			if !top {
-				return pkg.Path() + "." + name
-			}
-			return name
-		}
-		for iftype, name := range ifs {
-			// only suggest exported interfaces
-			if ast.IsExported(name) {
-				p.ifaces[iftype] = fullName(name)
-			}
-		}
-		for ftype := range funs {
-			// ignore non-exported func signatures too
-			p.funcSigns[ftype] = true
-		}
-	}
-	for _, imp := range pkg.Imports() {
-		addTypes(imp, false)
-		for _, imp2 := range imp.Imports() {
-			addTypes(imp2, false)
-		}
-	}
-	addTypes(pkg, true)
-}
diff --git a/vendor/mvdan.cc/interfacer/check/check.go b/vendor/mvdan.cc/interfacer/check/check.go
deleted file mode 100644
index f4d3b4037b08728d5ccaebb5bb4e180946fd30a9..0000000000000000000000000000000000000000
--- a/vendor/mvdan.cc/interfacer/check/check.go
+++ /dev/null
@@ -1,462 +0,0 @@
-// Copyright (c) 2015, Daniel Martí <mvdan@mvdan.cc>
-// See LICENSE for licensing information
-
-package check // import "mvdan.cc/interfacer/check"
-
-import (
-	"fmt"
-	"go/ast"
-	"go/token"
-	"go/types"
-	"os"
-	"strings"
-
-	"golang.org/x/tools/go/loader"
-	"golang.org/x/tools/go/ssa"
-	"golang.org/x/tools/go/ssa/ssautil"
-
-	"github.com/kisielk/gotool"
-	"mvdan.cc/lint"
-)
-
-func toDiscard(usage *varUsage) bool {
-	if usage.discard {
-		return true
-	}
-	for to := range usage.assigned {
-		if toDiscard(to) {
-			return true
-		}
-	}
-	return false
-}
-
-func allCalls(usage *varUsage, all, ftypes map[string]string) {
-	for fname := range usage.calls {
-		all[fname] = ftypes[fname]
-	}
-	for to := range usage.assigned {
-		allCalls(to, all, ftypes)
-	}
-}
-
-func (c *Checker) interfaceMatching(param *types.Var, usage *varUsage) (string, string) {
-	if toDiscard(usage) {
-		return "", ""
-	}
-	ftypes := typeFuncMap(param.Type())
-	called := make(map[string]string, len(usage.calls))
-	allCalls(usage, called, ftypes)
-	s := funcMapString(called)
-	return c.ifaces[s], s
-}
-
-type varUsage struct {
-	calls   map[string]struct{}
-	discard bool
-
-	assigned map[*varUsage]struct{}
-}
-
-type funcDecl struct {
-	astDecl *ast.FuncDecl
-	ssaFn   *ssa.Function
-}
-
-// CheckArgs checks the packages specified by their import paths in
-// args.
-func CheckArgs(args []string) ([]string, error) {
-	paths := gotool.ImportPaths(args)
-	conf := loader.Config{}
-	conf.AllowErrors = true
-	rest, err := conf.FromArgs(paths, false)
-	if err != nil {
-		return nil, err
-	}
-	if len(rest) > 0 {
-		return nil, fmt.Errorf("unwanted extra args: %v", rest)
-	}
-	lprog, err := conf.Load()
-	if err != nil {
-		return nil, err
-	}
-	prog := ssautil.CreateProgram(lprog, 0)
-	prog.Build()
-	c := new(Checker)
-	c.Program(lprog)
-	c.ProgramSSA(prog)
-	issues, err := c.Check()
-	if err != nil {
-		return nil, err
-	}
-	wd, err := os.Getwd()
-	if err != nil {
-		return nil, err
-	}
-	lines := make([]string, len(issues))
-	for i, issue := range issues {
-		fpos := prog.Fset.Position(issue.Pos()).String()
-		if strings.HasPrefix(fpos, wd) {
-			fpos = fpos[len(wd)+1:]
-		}
-		lines[i] = fmt.Sprintf("%s: %s", fpos, issue.Message())
-	}
-	return lines, nil
-}
-
-type Checker struct {
-	lprog *loader.Program
-	prog  *ssa.Program
-
-	pkgTypes
-	*loader.PackageInfo
-
-	funcs []*funcDecl
-
-	ssaByPos map[token.Pos]*ssa.Function
-
-	discardFuncs map[*types.Signature]struct{}
-
-	vars map[*types.Var]*varUsage
-}
-
-var (
-	_ lint.Checker = (*Checker)(nil)
-	_ lint.WithSSA = (*Checker)(nil)
-)
-
-func (c *Checker) Program(lprog *loader.Program) {
-	c.lprog = lprog
-}
-
-func (c *Checker) ProgramSSA(prog *ssa.Program) {
-	c.prog = prog
-}
-
-func (c *Checker) Check() ([]lint.Issue, error) {
-	var total []lint.Issue
-	c.ssaByPos = make(map[token.Pos]*ssa.Function)
-	wantPkg := make(map[*types.Package]bool)
-	for _, pinfo := range c.lprog.InitialPackages() {
-		wantPkg[pinfo.Pkg] = true
-	}
-	for fn := range ssautil.AllFunctions(c.prog) {
-		if fn.Pkg == nil { // builtin?
-			continue
-		}
-		if len(fn.Blocks) == 0 { // stub
-			continue
-		}
-		if !wantPkg[fn.Pkg.Pkg] { // not part of given pkgs
-			continue
-		}
-		c.ssaByPos[fn.Pos()] = fn
-	}
-	for _, pinfo := range c.lprog.InitialPackages() {
-		pkg := pinfo.Pkg
-		c.getTypes(pkg)
-		c.PackageInfo = c.lprog.AllPackages[pkg]
-		total = append(total, c.checkPkg()...)
-	}
-	return total, nil
-}
-
-func (c *Checker) checkPkg() []lint.Issue {
-	c.discardFuncs = make(map[*types.Signature]struct{})
-	c.vars = make(map[*types.Var]*varUsage)
-	c.funcs = c.funcs[:0]
-	findFuncs := func(node ast.Node) bool {
-		decl, ok := node.(*ast.FuncDecl)
-		if !ok {
-			return true
-		}
-		ssaFn := c.ssaByPos[decl.Name.Pos()]
-		if ssaFn == nil {
-			return true
-		}
-		fd := &funcDecl{
-			astDecl: decl,
-			ssaFn:   ssaFn,
-		}
-		if c.funcSigns[signString(fd.ssaFn.Signature)] {
-			// implements interface
-			return true
-		}
-		c.funcs = append(c.funcs, fd)
-		ast.Walk(c, decl.Body)
-		return true
-	}
-	for _, f := range c.Files {
-		ast.Inspect(f, findFuncs)
-	}
-	return c.packageIssues()
-}
-
-func paramVarAndType(sign *types.Signature, i int) (*types.Var, types.Type) {
-	params := sign.Params()
-	extra := sign.Variadic() && i >= params.Len()-1
-	if !extra {
-		if i >= params.Len() {
-			// builtins with multiple signatures
-			return nil, nil
-		}
-		vr := params.At(i)
-		return vr, vr.Type()
-	}
-	last := params.At(params.Len() - 1)
-	switch x := last.Type().(type) {
-	case *types.Slice:
-		return nil, x.Elem()
-	default:
-		return nil, x
-	}
-}
-
-func (c *Checker) varUsage(e ast.Expr) *varUsage {
-	id, ok := e.(*ast.Ident)
-	if !ok {
-		return nil
-	}
-	param, ok := c.ObjectOf(id).(*types.Var)
-	if !ok {
-		// not a variable
-		return nil
-	}
-	if usage, e := c.vars[param]; e {
-		return usage
-	}
-	if !interesting(param.Type()) {
-		return nil
-	}
-	usage := &varUsage{
-		calls:    make(map[string]struct{}),
-		assigned: make(map[*varUsage]struct{}),
-	}
-	c.vars[param] = usage
-	return usage
-}
-
-func (c *Checker) addUsed(e ast.Expr, as types.Type) {
-	if as == nil {
-		return
-	}
-	if usage := c.varUsage(e); usage != nil {
-		// using variable
-		iface, ok := as.Underlying().(*types.Interface)
-		if !ok {
-			usage.discard = true
-			return
-		}
-		for i := 0; i < iface.NumMethods(); i++ {
-			m := iface.Method(i)
-			usage.calls[m.Name()] = struct{}{}
-		}
-	} else if t, ok := c.TypeOf(e).(*types.Signature); ok {
-		// using func
-		c.discardFuncs[t] = struct{}{}
-	}
-}
-
-func (c *Checker) addAssign(to, from ast.Expr) {
-	pto := c.varUsage(to)
-	pfrom := c.varUsage(from)
-	if pto == nil || pfrom == nil {
-		// either isn't interesting
-		return
-	}
-	pfrom.assigned[pto] = struct{}{}
-}
-
-func (c *Checker) discard(e ast.Expr) {
-	if usage := c.varUsage(e); usage != nil {
-		usage.discard = true
-	}
-}
-
-func (c *Checker) comparedWith(e, with ast.Expr) {
-	if _, ok := with.(*ast.BasicLit); ok {
-		c.discard(e)
-	}
-}
-
-func (c *Checker) Visit(node ast.Node) ast.Visitor {
-	switch x := node.(type) {
-	case *ast.SelectorExpr:
-		if _, ok := c.TypeOf(x.Sel).(*types.Signature); !ok {
-			c.discard(x.X)
-		}
-	case *ast.StarExpr:
-		c.discard(x.X)
-	case *ast.UnaryExpr:
-		c.discard(x.X)
-	case *ast.IndexExpr:
-		c.discard(x.X)
-	case *ast.IncDecStmt:
-		c.discard(x.X)
-	case *ast.BinaryExpr:
-		switch x.Op {
-		case token.EQL, token.NEQ:
-			c.comparedWith(x.X, x.Y)
-			c.comparedWith(x.Y, x.X)
-		default:
-			c.discard(x.X)
-			c.discard(x.Y)
-		}
-	case *ast.ValueSpec:
-		for _, val := range x.Values {
-			c.addUsed(val, c.TypeOf(x.Type))
-		}
-	case *ast.AssignStmt:
-		for i, val := range x.Rhs {
-			left := x.Lhs[i]
-			if x.Tok == token.ASSIGN {
-				c.addUsed(val, c.TypeOf(left))
-			}
-			c.addAssign(left, val)
-		}
-	case *ast.CompositeLit:
-		for i, e := range x.Elts {
-			switch y := e.(type) {
-			case *ast.KeyValueExpr:
-				c.addUsed(y.Key, c.TypeOf(y.Value))
-				c.addUsed(y.Value, c.TypeOf(y.Key))
-			case *ast.Ident:
-				c.addUsed(y, compositeIdentType(c.TypeOf(x), i))
-			}
-		}
-	case *ast.CallExpr:
-		switch y := c.TypeOf(x.Fun).Underlying().(type) {
-		case *types.Signature:
-			c.onMethodCall(x, y)
-		default:
-			// type conversion
-			if len(x.Args) == 1 {
-				c.addUsed(x.Args[0], y)
-			}
-		}
-	}
-	return c
-}
-
-func compositeIdentType(t types.Type, i int) types.Type {
-	switch x := t.(type) {
-	case *types.Named:
-		return compositeIdentType(x.Underlying(), i)
-	case *types.Struct:
-		return x.Field(i).Type()
-	case *types.Array:
-		return x.Elem()
-	case *types.Slice:
-		return x.Elem()
-	}
-	return nil
-}
-
-func (c *Checker) onMethodCall(ce *ast.CallExpr, sign *types.Signature) {
-	for i, e := range ce.Args {
-		paramObj, t := paramVarAndType(sign, i)
-		// Don't if this is a parameter being re-used as itself
-		// in a recursive call
-		if id, ok := e.(*ast.Ident); ok {
-			if paramObj == c.ObjectOf(id) {
-				continue
-			}
-		}
-		c.addUsed(e, t)
-	}
-	sel, ok := ce.Fun.(*ast.SelectorExpr)
-	if !ok {
-		return
-	}
-	// receiver func call on the left side
-	if usage := c.varUsage(sel.X); usage != nil {
-		usage.calls[sel.Sel.Name] = struct{}{}
-	}
-}
-
-func (fd *funcDecl) paramGroups() [][]*types.Var {
-	astList := fd.astDecl.Type.Params.List
-	groups := make([][]*types.Var, len(astList))
-	signIndex := 0
-	for i, field := range astList {
-		group := make([]*types.Var, len(field.Names))
-		for j := range field.Names {
-			group[j] = fd.ssaFn.Signature.Params().At(signIndex)
-			signIndex++
-		}
-		groups[i] = group
-	}
-	return groups
-}
-
-func (c *Checker) packageIssues() []lint.Issue {
-	var issues []lint.Issue
-	for _, fd := range c.funcs {
-		if _, e := c.discardFuncs[fd.ssaFn.Signature]; e {
-			continue
-		}
-		for _, group := range fd.paramGroups() {
-			issues = append(issues, c.groupIssues(fd, group)...)
-		}
-	}
-	return issues
-}
-
-type Issue struct {
-	pos token.Pos
-	msg string
-}
-
-func (i Issue) Pos() token.Pos  { return i.pos }
-func (i Issue) Message() string { return i.msg }
-
-func (c *Checker) groupIssues(fd *funcDecl, group []*types.Var) []lint.Issue {
-	var issues []lint.Issue
-	for _, param := range group {
-		usage := c.vars[param]
-		if usage == nil {
-			return nil
-		}
-		newType := c.paramNewType(fd.astDecl.Name.Name, param, usage)
-		if newType == "" {
-			return nil
-		}
-		issues = append(issues, Issue{
-			pos: param.Pos(),
-			msg: fmt.Sprintf("%s can be %s", param.Name(), newType),
-		})
-	}
-	return issues
-}
-
-func willAddAllocation(t types.Type) bool {
-	switch t.Underlying().(type) {
-	case *types.Pointer, *types.Interface:
-		return false
-	}
-	return true
-}
-
-func (c *Checker) paramNewType(funcName string, param *types.Var, usage *varUsage) string {
-	t := param.Type()
-	if !ast.IsExported(funcName) && willAddAllocation(t) {
-		return ""
-	}
-	if named := typeNamed(t); named != nil {
-		tname := named.Obj().Name()
-		vname := param.Name()
-		if mentionsName(funcName, tname) || mentionsName(funcName, vname) {
-			return ""
-		}
-	}
-	ifname, iftype := c.interfaceMatching(param, usage)
-	if ifname == "" {
-		return ""
-	}
-	if types.IsInterface(t.Underlying()) {
-		if have := funcMapString(typeFuncMap(t)); have == iftype {
-			return ""
-		}
-	}
-	return ifname
-}
diff --git a/vendor/mvdan.cc/interfacer/check/types.go b/vendor/mvdan.cc/interfacer/check/types.go
deleted file mode 100644
index 393bb0b9fabef4c0e759713d40469c825ca41852..0000000000000000000000000000000000000000
--- a/vendor/mvdan.cc/interfacer/check/types.go
+++ /dev/null
@@ -1,170 +0,0 @@
-// Copyright (c) 2015, Daniel Martí <mvdan@mvdan.cc>
-// See LICENSE for licensing information
-
-package check
-
-import (
-	"bytes"
-	"fmt"
-	"go/types"
-	"sort"
-	"strings"
-)
-
-type methoder interface {
-	NumMethods() int
-	Method(int) *types.Func
-}
-
-func methoderFuncMap(m methoder, skip bool) map[string]string {
-	ifuncs := make(map[string]string, m.NumMethods())
-	for i := 0; i < m.NumMethods(); i++ {
-		f := m.Method(i)
-		if !f.Exported() {
-			if skip {
-				continue
-			}
-			return nil
-		}
-		sign := f.Type().(*types.Signature)
-		ifuncs[f.Name()] = signString(sign)
-	}
-	return ifuncs
-}
-
-func typeFuncMap(t types.Type) map[string]string {
-	switch x := t.(type) {
-	case *types.Pointer:
-		return typeFuncMap(x.Elem())
-	case *types.Named:
-		u := x.Underlying()
-		if types.IsInterface(u) {
-			return typeFuncMap(u)
-		}
-		return methoderFuncMap(x, true)
-	case *types.Interface:
-		return methoderFuncMap(x, false)
-	default:
-		return nil
-	}
-}
-
-func funcMapString(iface map[string]string) string {
-	fnames := make([]string, 0, len(iface))
-	for fname := range iface {
-		fnames = append(fnames, fname)
-	}
-	sort.Strings(fnames)
-	var b bytes.Buffer
-	for i, fname := range fnames {
-		if i > 0 {
-			fmt.Fprint(&b, "; ")
-		}
-		fmt.Fprint(&b, fname, iface[fname])
-	}
-	return b.String()
-}
-
-func tupleJoin(buf *bytes.Buffer, t *types.Tuple) {
-	buf.WriteByte('(')
-	for i := 0; i < t.Len(); i++ {
-		if i > 0 {
-			buf.WriteString(", ")
-		}
-		buf.WriteString(t.At(i).Type().String())
-	}
-	buf.WriteByte(')')
-}
-
-// signString is similar to Signature.String(), but it ignores
-// param/result names.
-func signString(sign *types.Signature) string {
-	var buf bytes.Buffer
-	tupleJoin(&buf, sign.Params())
-	tupleJoin(&buf, sign.Results())
-	return buf.String()
-}
-
-func interesting(t types.Type) bool {
-	switch x := t.(type) {
-	case *types.Interface:
-		return x.NumMethods() > 1
-	case *types.Named:
-		if u := x.Underlying(); types.IsInterface(u) {
-			return interesting(u)
-		}
-		return x.NumMethods() >= 1
-	case *types.Pointer:
-		return interesting(x.Elem())
-	default:
-		return false
-	}
-}
-
-func anyInteresting(params *types.Tuple) bool {
-	for i := 0; i < params.Len(); i++ {
-		t := params.At(i).Type()
-		if interesting(t) {
-			return true
-		}
-	}
-	return false
-}
-
-func fromScope(scope *types.Scope) (ifaces map[string]string, funcs map[string]bool) {
-	ifaces = make(map[string]string)
-	funcs = make(map[string]bool)
-	for _, name := range scope.Names() {
-		tn, ok := scope.Lookup(name).(*types.TypeName)
-		if !ok {
-			continue
-		}
-		switch x := tn.Type().Underlying().(type) {
-		case *types.Interface:
-			iface := methoderFuncMap(x, false)
-			if len(iface) == 0 {
-				continue
-			}
-			for i := 0; i < x.NumMethods(); i++ {
-				f := x.Method(i)
-				sign := f.Type().(*types.Signature)
-				if !anyInteresting(sign.Params()) {
-					continue
-				}
-				funcs[signString(sign)] = true
-			}
-			s := funcMapString(iface)
-			if _, e := ifaces[s]; !e {
-				ifaces[s] = tn.Name()
-			}
-		case *types.Signature:
-			if !anyInteresting(x.Params()) {
-				continue
-			}
-			funcs[signString(x)] = true
-		}
-	}
-	return ifaces, funcs
-}
-
-func mentionsName(fname, name string) bool {
-	if len(name) < 2 {
-		return false
-	}
-	capit := strings.ToUpper(name[:1]) + name[1:]
-	lower := strings.ToLower(name)
-	return strings.Contains(fname, capit) || strings.HasPrefix(fname, lower)
-}
-
-func typeNamed(t types.Type) *types.Named {
-	for {
-		switch x := t.(type) {
-		case *types.Named:
-			return x
-		case *types.Pointer:
-			t = x.Elem()
-		default:
-			return nil
-		}
-	}
-}
diff --git a/vendor/mvdan.cc/lint/.travis.yml b/vendor/mvdan.cc/lint/.travis.yml
deleted file mode 100644
index 2ccdeab9adfceadd49cb047c2e503ab1c5e53edf..0000000000000000000000000000000000000000
--- a/vendor/mvdan.cc/lint/.travis.yml
+++ /dev/null
@@ -1,7 +0,0 @@
-language: go
-
-go:
-  - 1.8.x
-  - 1.9.x
-
-go_import_path: mvdan.cc/lint
diff --git a/vendor/mvdan.cc/lint/LICENSE b/vendor/mvdan.cc/lint/LICENSE
deleted file mode 100644
index a06c5ebfc882995f3223fddfb8c70bbce3b94b3e..0000000000000000000000000000000000000000
--- a/vendor/mvdan.cc/lint/LICENSE
+++ /dev/null
@@ -1,27 +0,0 @@
-Copyright (c) 2017, Daniel Martí. All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
-   * Redistributions of source code must retain the above copyright
-notice, this list of conditions and the following disclaimer.
-   * Redistributions in binary form must reproduce the above
-copyright notice, this list of conditions and the following disclaimer
-in the documentation and/or other materials provided with the
-distribution.
-   * Neither the name of the copyright holder nor the names of its
-contributors may be used to endorse or promote products derived from
-this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/mvdan.cc/lint/README.md b/vendor/mvdan.cc/lint/README.md
deleted file mode 100644
index 8a9c8b51c3c4cf280f9952b1767104a8b0fc8c80..0000000000000000000000000000000000000000
--- a/vendor/mvdan.cc/lint/README.md
+++ /dev/null
@@ -1,27 +0,0 @@
-# lint
-
-[![GoDoc](https://godoc.org/mvdan.cc/lint?status.svg)](https://godoc.org/mvdan.cc/lint)
-[![Build Status](https://travis-ci.org/mvdan/lint.svg?branch=master)](https://travis-ci.org/mvdan/lint)
-
-Work in progress. Its API might change before the 1.0 release.
-
-This package intends to define simple interfaces that Go code checkers
-can implement. This would simplify calling them from Go code, as well as
-running multiple linters while sharing initial loading work.
-
-### metalint
-
-	go get -u mvdan.cc/lint/cmd/metalint
-
-The start of a linter that runs many linters leveraging the common
-interface. Not stable yet.
-
-Linters included:
-
-* [unparam](https://mvdan.cc/unparam)
-* [interfacer](https://github.com/mvdan/interfacer)
-
-### Related projects
-
-* [golinters](https://github.com/thomasheller/golinters) - Report on
-  linter support
diff --git a/vendor/mvdan.cc/lint/lint.go b/vendor/mvdan.cc/lint/lint.go
deleted file mode 100644
index a16789fad5688acdb91aaa966a98f74ac8222240..0000000000000000000000000000000000000000
--- a/vendor/mvdan.cc/lint/lint.go
+++ /dev/null
@@ -1,28 +0,0 @@
-// Copyright (c) 2017, Daniel Martí <mvdan@mvdan.cc>
-// See LICENSE for licensing information
-
-// Package lint defines common interfaces for Go code checkers.
-package lint // import "mvdan.cc/lint"
-
-import (
-	"go/token"
-
-	"golang.org/x/tools/go/loader"
-	"golang.org/x/tools/go/ssa"
-)
-
-// A Checker points out issues in a program.
-type Checker interface {
-	Program(*loader.Program)
-	Check() ([]Issue, error)
-}
-
-type WithSSA interface {
-	ProgramSSA(*ssa.Program)
-}
-
-// Issue represents an issue somewhere in a source code file.
-type Issue interface {
-	Pos() token.Pos
-	Message() string
-}
diff --git a/vendor/mvdan.cc/unparam/check/check.go b/vendor/mvdan.cc/unparam/check/check.go
index 8f9a3cb0ae8eeadfbaaa7847b48820e42b1db558..b1704e454c9eb10571915e7de5cd559e2d5205e7 100644
--- a/vendor/mvdan.cc/unparam/check/check.go
+++ b/vendor/mvdan.cc/unparam/check/check.go
@@ -913,10 +913,12 @@ func recvPrefix(recv *ast.FieldList) string {
 		return expr.Name + "."
 	case *ast.IndexExpr:
 		return expr.X.(*ast.Ident).Name + "."
+	case *ast.ParenExpr:
+		return expr.X.(*ast.Ident).Name + "."
 	case *ast.IndexListExpr:
 		return expr.X.(*ast.Ident).Name + "."
 	default:
-		panic(fmt.Sprintf("unexepected receiver AST node: %T", expr))
+		panic(fmt.Sprintf("unexpected receiver AST node: %T", expr))
 	}
 }