diff --git a/go.mod b/go.mod
index 51c07bc60a..26aaf187cc 100644
--- a/go.mod
+++ b/go.mod
@@ -62,7 +62,7 @@ require (
github.com/onsi/ginkgo v1.16.5
github.com/onsi/ginkgo/v2 v2.23.0
github.com/onsi/gomega v1.36.2
- github.com/open-policy-agent/opa v1.1.0
+ github.com/open-policy-agent/opa v1.2.0
github.com/opencloud-eu/reva/v2 v2.28.0
github.com/orcaman/concurrent-map v1.0.0
github.com/owncloud/libre-graph-api-go v1.0.5-0.20240829135935-80dc00d6f5ea
@@ -77,7 +77,7 @@ require (
github.com/shamaton/msgpack/v2 v2.2.2
github.com/sirupsen/logrus v1.9.3
github.com/spf13/afero v1.12.0
- github.com/spf13/cobra v1.8.1
+ github.com/spf13/cobra v1.9.1
github.com/stretchr/testify v1.10.0
github.com/test-go/testify v1.1.4
github.com/thejerf/suture/v4 v4.0.6
@@ -120,10 +120,9 @@ require (
github.com/Masterminds/goutils v1.1.1 // indirect
github.com/Masterminds/sprig v2.22.0+incompatible // indirect
github.com/Microsoft/go-winio v0.6.2 // indirect
- github.com/OneOfOne/xxhash v1.2.8 // indirect
github.com/ProtonMail/go-crypto v1.1.5 // indirect
github.com/RoaringBitmap/roaring v1.9.3 // indirect
- github.com/agnivade/levenshtein v1.2.0 // indirect
+ github.com/agnivade/levenshtein v1.2.1 // indirect
github.com/ajg/form v1.5.1 // indirect
github.com/alexedwards/argon2id v1.0.0 // indirect
github.com/amoghe/go-crypt v0.0.0-20220222110647-20eada5f5964 // indirect
@@ -160,7 +159,7 @@ require (
github.com/coreos/go-semver v0.3.0 // indirect
github.com/coreos/go-systemd/v22 v22.5.0 // indirect
github.com/cornelk/hashmap v1.0.8 // indirect
- github.com/cpuguy83/go-md2man/v2 v2.0.5 // indirect
+ github.com/cpuguy83/go-md2man/v2 v2.0.6 // indirect
github.com/crewjam/httperr v0.2.0 // indirect
github.com/crewjam/saml v0.4.14 // indirect
github.com/cyphar/filepath-securejoin v0.3.6 // indirect
diff --git a/go.sum b/go.sum
index c3ad4f5ea6..5dcf156108 100644
--- a/go.sum
+++ b/go.sum
@@ -84,8 +84,6 @@ github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA
github.com/Nerzal/gocloak/v13 v13.9.0 h1:YWsJsdM5b0yhM2Ba3MLydiOlujkBry4TtdzfIzSVZhw=
github.com/Nerzal/gocloak/v13 v13.9.0/go.mod h1:YYuDcXZ7K2zKECyVP7pPqjKxx2AzYSpKDj8d6GuyM10=
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
-github.com/OneOfOne/xxhash v1.2.8 h1:31czK/TI9sNkxIKfaUfGlU47BAxQ0ztGgd9vPyqimf8=
-github.com/OneOfOne/xxhash v1.2.8/go.mod h1:eZbhyaAYD41SGSSsnmcpxVoRiQ/MPUTjUdIIOT9Um7Q=
github.com/OpenDNS/vegadns2client v0.0.0-20180418235048-a3fa4a771d87/go.mod h1:iGLljf5n9GjT6kc0HBvyI1nOKnGQbNB66VzSNbK5iks=
github.com/ProtonMail/go-crypto v1.1.5 h1:eoAQfK2dwL+tFSFpr7TbOaPNUbPiJj4fLYwwGE1FQO4=
github.com/ProtonMail/go-crypto v1.1.5/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE=
@@ -93,8 +91,8 @@ github.com/RoaringBitmap/roaring v1.9.3 h1:t4EbC5qQwnisr5PrP9nt0IRhRTb9gMUgQF4t4
github.com/RoaringBitmap/roaring v1.9.3/go.mod h1:6AXUsoIEzDTFFQCe1RbGA6uFONMhvejWj5rqITANK90=
github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo=
github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI=
-github.com/agnivade/levenshtein v1.2.0 h1:U9L4IOT0Y3i0TIlUIDJ7rVUziKi/zPbrJGaFrtYH3SY=
-github.com/agnivade/levenshtein v1.2.0/go.mod h1:QVVI16kDrtSuwcpd0p1+xMC6Z/VfhtCyDIjcwga4/DU=
+github.com/agnivade/levenshtein v1.2.1 h1:EHBY3UOn1gwdy/VbFwgo4cxecRznFk7fKWN1KOX7eoM=
+github.com/agnivade/levenshtein v1.2.1/go.mod h1:QVVI16kDrtSuwcpd0p1+xMC6Z/VfhtCyDIjcwga4/DU=
github.com/ajg/form v1.5.1 h1:t9c7v8JUKu/XxOGBU0yjNpaMloxGEJhUkqFRq0ibGeU=
github.com/ajg/form v1.5.1/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY=
github.com/akamai/AkamaiOPEN-edgegrid-golang v1.1.0/go.mod h1:kX6YddBkXqqywAe8c9LyvgTCyFuZCTMF4cRPQhc3Fy8=
@@ -235,9 +233,8 @@ github.com/cornelk/hashmap v1.0.8/go.mod h1:RfZb7JO3RviW/rT6emczVuC/oxpdz4UsSB2L
github.com/cpu/goacmedns v0.1.1/go.mod h1:MuaouqEhPAHxsbqjgnck5zeghuwBP1dLnPoobeGqugQ=
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
-github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
-github.com/cpuguy83/go-md2man/v2 v2.0.5 h1:ZtcqGrnekaHpVLArFSe4HK5DoKx1T0rq2DwVB0alcyc=
-github.com/cpuguy83/go-md2man/v2 v2.0.5/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
+github.com/cpuguy83/go-md2man/v2 v2.0.6 h1:XJtiaUW6dEEqVuZiMTn1ldk455QWwEIsMIJlo5vtkx0=
+github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/crewjam/httperr v0.2.0 h1:b2BfXR8U3AlIHwNeFFvZ+BV1LFvKLlzMjzaTnZMybNo=
github.com/crewjam/httperr v0.2.0/go.mod h1:Jlz+Sg/XqBQhyMjdDiC+GNNRzZTD7x39Gu3pglZ5oH4=
@@ -862,8 +859,8 @@ github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7J
github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo=
github.com/onsi/gomega v1.36.2 h1:koNYke6TVk6ZmnyHrCXba/T/MoLBXFjeC1PtvYgw0A8=
github.com/onsi/gomega v1.36.2/go.mod h1:DdwyADRjrc825LhMEkD76cHR5+pUnjhUN8GlHlRPHzY=
-github.com/open-policy-agent/opa v1.1.0 h1:HMz2evdEMTyNqtdLjmu3Vyx06BmhNYAx67Yz3Ll9q2s=
-github.com/open-policy-agent/opa v1.1.0/go.mod h1:T1pASQ1/vwfTa+e2fYcfpLCvWgYtqtiUv+IuA/dLPQs=
+github.com/open-policy-agent/opa v1.2.0 h1:88NDVCM0of1eO6Z4AFeL3utTEtMuwloFmWWU7dRV1z0=
+github.com/open-policy-agent/opa v1.2.0/go.mod h1:30euUmOvuBoebRCcJ7DMF42bRBOPznvt0ACUMYDUGVY=
github.com/opencloud-eu/reva/v2 v2.28.0 h1:ai7PRIESdw2SiM/MmK8Tc+C/GDHBwzlQp4MwCnqTl5Y=
github.com/opencloud-eu/reva/v2 v2.28.0/go.mod h1:hbCaf73/SzHtbVlmVCU1Eheadds029am/X0Bff5k514=
github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
@@ -1040,8 +1037,8 @@ github.com/spf13/afero v1.12.0/go.mod h1:ZTlWwG4/ahT8W7T0WQ5uYmjI9duaLQGy3Q2OAl4
github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
github.com/spf13/cobra v1.1.1/go.mod h1:WnodtKOvamDL/PwE2M4iKs8aMDBZ5Q5klgD3qfVJQMI=
-github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM=
-github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y=
+github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo=
+github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0=
github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo=
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
diff --git a/vendor/github.com/OneOfOne/xxhash/.gitignore b/vendor/github.com/OneOfOne/xxhash/.gitignore
deleted file mode 100644
index f4faa7f8f1..0000000000
--- a/vendor/github.com/OneOfOne/xxhash/.gitignore
+++ /dev/null
@@ -1,4 +0,0 @@
-*.txt
-*.pprof
-cmap2/
-cache/
diff --git a/vendor/github.com/OneOfOne/xxhash/.travis.yml b/vendor/github.com/OneOfOne/xxhash/.travis.yml
deleted file mode 100644
index 1c6dc55bc7..0000000000
--- a/vendor/github.com/OneOfOne/xxhash/.travis.yml
+++ /dev/null
@@ -1,13 +0,0 @@
-language: go
-sudo: false
-
-go:
- - "1.10"
- - "1.11"
- - "1.12"
- - master
-
-script:
- - go test -tags safe ./...
- - go test ./...
- -
diff --git a/vendor/github.com/OneOfOne/xxhash/LICENSE b/vendor/github.com/OneOfOne/xxhash/LICENSE
deleted file mode 100644
index 9e30b4f342..0000000000
--- a/vendor/github.com/OneOfOne/xxhash/LICENSE
+++ /dev/null
@@ -1,187 +0,0 @@
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "{}"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
diff --git a/vendor/github.com/OneOfOne/xxhash/README.md b/vendor/github.com/OneOfOne/xxhash/README.md
deleted file mode 100644
index 8eea28c394..0000000000
--- a/vendor/github.com/OneOfOne/xxhash/README.md
+++ /dev/null
@@ -1,74 +0,0 @@
-# xxhash [](https://godoc.org/github.com/OneOfOne/xxhash) [](https://travis-ci.org/OneOfOne/xxhash) [](https://gocover.io/github.com/OneOfOne/xxhash)
-
-This is a native Go implementation of the excellent [xxhash](https://github.com/Cyan4973/xxHash)* algorithm, an extremely fast non-cryptographic Hash algorithm, working at speeds close to RAM limits.
-
-* The C implementation is ([Copyright](https://github.com/Cyan4973/xxHash/blob/master/LICENSE) (c) 2012-2014, Yann Collet)
-
-## Install
-
- go get github.com/OneOfOne/xxhash
-
-## Features
-
-* On Go 1.7+ the pure go version is faster than CGO for all inputs.
-* Supports ChecksumString{32,64} xxhash{32,64}.WriteString, which uses no copies when it can, falls back to copy on appengine.
-* The native version falls back to a less optimized version on appengine due to the lack of unsafe.
-* Almost as fast as the mostly pure assembly version written by the brilliant [cespare](https://github.com/cespare/xxhash), while also supporting seeds.
-* To manually toggle the appengine version build with `-tags safe`.
-
-## Benchmark
-
-### Core i7-4790 @ 3.60GHz, Linux 4.12.6-1-ARCH (64bit), Go tip (+ff90f4af66 2017-08-19)
-
-```bash
-➤ go test -bench '64' -count 5 -tags cespare | benchstat /dev/stdin
-name time/op
-
-# https://github.com/cespare/xxhash
-XXSum64Cespare/Func-8 160ns ± 2%
-XXSum64Cespare/Struct-8 173ns ± 1%
-XXSum64ShortCespare/Func-8 6.78ns ± 1%
-XXSum64ShortCespare/Struct-8 19.6ns ± 2%
-
-# this package (default mode, using unsafe)
-XXSum64/Func-8 170ns ± 1%
-XXSum64/Struct-8 182ns ± 1%
-XXSum64Short/Func-8 13.5ns ± 3%
-XXSum64Short/Struct-8 20.4ns ± 0%
-
-# this package (appengine, *not* using unsafe)
-XXSum64/Func-8 241ns ± 5%
-XXSum64/Struct-8 243ns ± 6%
-XXSum64Short/Func-8 15.2ns ± 2%
-XXSum64Short/Struct-8 23.7ns ± 5%
-
-CRC64ISO-8 1.23µs ± 1%
-CRC64ISOString-8 2.71µs ± 4%
-CRC64ISOShort-8 22.2ns ± 3%
-
-Fnv64-8 2.34µs ± 1%
-Fnv64Short-8 74.7ns ± 8%
-```
-
-## Usage
-
-```go
- h := xxhash.New64()
- // r, err := os.Open("......")
- // defer f.Close()
- r := strings.NewReader(F)
- io.Copy(h, r)
- fmt.Println("xxhash.Backend:", xxhash.Backend)
- fmt.Println("File checksum:", h.Sum64())
-```
-
-[playground](https://play.golang.org/p/wHKBwfu6CPV)
-
-## TODO
-
-* Rewrite the 32bit version to be more optimized.
-* General cleanup as the Go inliner gets smarter.
-
-## License
-
-This project is released under the Apache v2. license. See [LICENSE](LICENSE) for more details.
diff --git a/vendor/github.com/OneOfOne/xxhash/xxhash.go b/vendor/github.com/OneOfOne/xxhash/xxhash.go
deleted file mode 100644
index af2496b77f..0000000000
--- a/vendor/github.com/OneOfOne/xxhash/xxhash.go
+++ /dev/null
@@ -1,294 +0,0 @@
-package xxhash
-
-import (
- "encoding/binary"
- "errors"
- "hash"
-)
-
-const (
- prime32x1 uint32 = 2654435761
- prime32x2 uint32 = 2246822519
- prime32x3 uint32 = 3266489917
- prime32x4 uint32 = 668265263
- prime32x5 uint32 = 374761393
-
- prime64x1 uint64 = 11400714785074694791
- prime64x2 uint64 = 14029467366897019727
- prime64x3 uint64 = 1609587929392839161
- prime64x4 uint64 = 9650029242287828579
- prime64x5 uint64 = 2870177450012600261
-
- maxInt32 int32 = (1<<31 - 1)
-
- // precomputed zero Vs for seed 0
- zero64x1 = 0x60ea27eeadc0b5d6
- zero64x2 = 0xc2b2ae3d27d4eb4f
- zero64x3 = 0x0
- zero64x4 = 0x61c8864e7a143579
-)
-
-const (
- magic32 = "xxh\x07"
- magic64 = "xxh\x08"
- marshaled32Size = len(magic32) + 4*7 + 16
- marshaled64Size = len(magic64) + 8*6 + 32 + 1
-)
-
-func NewHash32() hash.Hash { return New32() }
-func NewHash64() hash.Hash { return New64() }
-
-// Checksum32 returns the checksum of the input data with the seed set to 0.
-func Checksum32(in []byte) uint32 {
- return Checksum32S(in, 0)
-}
-
-// ChecksumString32 returns the checksum of the input data, without creating a copy, with the seed set to 0.
-func ChecksumString32(s string) uint32 {
- return ChecksumString32S(s, 0)
-}
-
-type XXHash32 struct {
- mem [16]byte
- ln, memIdx int32
- v1, v2, v3, v4 uint32
- seed uint32
-}
-
-// Size returns the number of bytes Sum will return.
-func (xx *XXHash32) Size() int {
- return 4
-}
-
-// BlockSize returns the hash's underlying block size.
-// The Write method must be able to accept any amount
-// of data, but it may operate more efficiently if all writes
-// are a multiple of the block size.
-func (xx *XXHash32) BlockSize() int {
- return 16
-}
-
-// NewS32 creates a new hash.Hash32 computing the 32bit xxHash checksum starting with the specific seed.
-func NewS32(seed uint32) (xx *XXHash32) {
- xx = &XXHash32{
- seed: seed,
- }
- xx.Reset()
- return
-}
-
-// New32 creates a new hash.Hash32 computing the 32bit xxHash checksum starting with the seed set to 0.
-func New32() *XXHash32 {
- return NewS32(0)
-}
-
-func (xx *XXHash32) Reset() {
- xx.v1 = xx.seed + prime32x1 + prime32x2
- xx.v2 = xx.seed + prime32x2
- xx.v3 = xx.seed
- xx.v4 = xx.seed - prime32x1
- xx.ln, xx.memIdx = 0, 0
-}
-
-// Sum appends the current hash to b and returns the resulting slice.
-// It does not change the underlying hash state.
-func (xx *XXHash32) Sum(in []byte) []byte {
- s := xx.Sum32()
- return append(in, byte(s>>24), byte(s>>16), byte(s>>8), byte(s))
-}
-
-// MarshalBinary implements the encoding.BinaryMarshaler interface.
-func (xx *XXHash32) MarshalBinary() ([]byte, error) {
- b := make([]byte, 0, marshaled32Size)
- b = append(b, magic32...)
- b = appendUint32(b, xx.v1)
- b = appendUint32(b, xx.v2)
- b = appendUint32(b, xx.v3)
- b = appendUint32(b, xx.v4)
- b = appendUint32(b, xx.seed)
- b = appendInt32(b, xx.ln)
- b = appendInt32(b, xx.memIdx)
- b = append(b, xx.mem[:]...)
- return b, nil
-}
-
-// UnmarshalBinary implements the encoding.BinaryUnmarshaler interface.
-func (xx *XXHash32) UnmarshalBinary(b []byte) error {
- if len(b) < len(magic32) || string(b[:len(magic32)]) != magic32 {
- return errors.New("xxhash: invalid hash state identifier")
- }
- if len(b) != marshaled32Size {
- return errors.New("xxhash: invalid hash state size")
- }
- b = b[len(magic32):]
- b, xx.v1 = consumeUint32(b)
- b, xx.v2 = consumeUint32(b)
- b, xx.v3 = consumeUint32(b)
- b, xx.v4 = consumeUint32(b)
- b, xx.seed = consumeUint32(b)
- b, xx.ln = consumeInt32(b)
- b, xx.memIdx = consumeInt32(b)
- copy(xx.mem[:], b)
- return nil
-}
-
-// Checksum64 an alias for Checksum64S(in, 0)
-func Checksum64(in []byte) uint64 {
- return Checksum64S(in, 0)
-}
-
-// ChecksumString64 returns the checksum of the input data, without creating a copy, with the seed set to 0.
-func ChecksumString64(s string) uint64 {
- return ChecksumString64S(s, 0)
-}
-
-type XXHash64 struct {
- v1, v2, v3, v4 uint64
- seed uint64
- ln uint64
- mem [32]byte
- memIdx int8
-}
-
-// Size returns the number of bytes Sum will return.
-func (xx *XXHash64) Size() int {
- return 8
-}
-
-// BlockSize returns the hash's underlying block size.
-// The Write method must be able to accept any amount
-// of data, but it may operate more efficiently if all writes
-// are a multiple of the block size.
-func (xx *XXHash64) BlockSize() int {
- return 32
-}
-
-// NewS64 creates a new hash.Hash64 computing the 64bit xxHash checksum starting with the specific seed.
-func NewS64(seed uint64) (xx *XXHash64) {
- xx = &XXHash64{
- seed: seed,
- }
- xx.Reset()
- return
-}
-
-// New64 creates a new hash.Hash64 computing the 64bit xxHash checksum starting with the seed set to 0x0.
-func New64() *XXHash64 {
- return NewS64(0)
-}
-
-func (xx *XXHash64) Reset() {
- xx.ln, xx.memIdx = 0, 0
- xx.v1, xx.v2, xx.v3, xx.v4 = resetVs64(xx.seed)
-}
-
-// Sum appends the current hash to b and returns the resulting slice.
-// It does not change the underlying hash state.
-func (xx *XXHash64) Sum(in []byte) []byte {
- s := xx.Sum64()
- return append(in, byte(s>>56), byte(s>>48), byte(s>>40), byte(s>>32), byte(s>>24), byte(s>>16), byte(s>>8), byte(s))
-}
-
-// MarshalBinary implements the encoding.BinaryMarshaler interface.
-func (xx *XXHash64) MarshalBinary() ([]byte, error) {
- b := make([]byte, 0, marshaled64Size)
- b = append(b, magic64...)
- b = appendUint64(b, xx.v1)
- b = appendUint64(b, xx.v2)
- b = appendUint64(b, xx.v3)
- b = appendUint64(b, xx.v4)
- b = appendUint64(b, xx.seed)
- b = appendUint64(b, xx.ln)
- b = append(b, byte(xx.memIdx))
- b = append(b, xx.mem[:]...)
- return b, nil
-}
-
-// UnmarshalBinary implements the encoding.BinaryUnmarshaler interface.
-func (xx *XXHash64) UnmarshalBinary(b []byte) error {
- if len(b) < len(magic64) || string(b[:len(magic64)]) != magic64 {
- return errors.New("xxhash: invalid hash state identifier")
- }
- if len(b) != marshaled64Size {
- return errors.New("xxhash: invalid hash state size")
- }
- b = b[len(magic64):]
- b, xx.v1 = consumeUint64(b)
- b, xx.v2 = consumeUint64(b)
- b, xx.v3 = consumeUint64(b)
- b, xx.v4 = consumeUint64(b)
- b, xx.seed = consumeUint64(b)
- b, xx.ln = consumeUint64(b)
- xx.memIdx = int8(b[0])
- b = b[1:]
- copy(xx.mem[:], b)
- return nil
-}
-
-func appendInt32(b []byte, x int32) []byte { return appendUint32(b, uint32(x)) }
-
-func appendUint32(b []byte, x uint32) []byte {
- var a [4]byte
- binary.LittleEndian.PutUint32(a[:], x)
- return append(b, a[:]...)
-}
-
-func appendUint64(b []byte, x uint64) []byte {
- var a [8]byte
- binary.LittleEndian.PutUint64(a[:], x)
- return append(b, a[:]...)
-}
-
-func consumeInt32(b []byte) ([]byte, int32) { bn, x := consumeUint32(b); return bn, int32(x) }
-func consumeUint32(b []byte) ([]byte, uint32) { x := u32(b); return b[4:], x }
-func consumeUint64(b []byte) ([]byte, uint64) { x := u64(b); return b[8:], x }
-
-// force the compiler to use ROTL instructions
-
-func rotl32_1(x uint32) uint32 { return (x << 1) | (x >> (32 - 1)) }
-func rotl32_7(x uint32) uint32 { return (x << 7) | (x >> (32 - 7)) }
-func rotl32_11(x uint32) uint32 { return (x << 11) | (x >> (32 - 11)) }
-func rotl32_12(x uint32) uint32 { return (x << 12) | (x >> (32 - 12)) }
-func rotl32_13(x uint32) uint32 { return (x << 13) | (x >> (32 - 13)) }
-func rotl32_17(x uint32) uint32 { return (x << 17) | (x >> (32 - 17)) }
-func rotl32_18(x uint32) uint32 { return (x << 18) | (x >> (32 - 18)) }
-
-func rotl64_1(x uint64) uint64 { return (x << 1) | (x >> (64 - 1)) }
-func rotl64_7(x uint64) uint64 { return (x << 7) | (x >> (64 - 7)) }
-func rotl64_11(x uint64) uint64 { return (x << 11) | (x >> (64 - 11)) }
-func rotl64_12(x uint64) uint64 { return (x << 12) | (x >> (64 - 12)) }
-func rotl64_18(x uint64) uint64 { return (x << 18) | (x >> (64 - 18)) }
-func rotl64_23(x uint64) uint64 { return (x << 23) | (x >> (64 - 23)) }
-func rotl64_27(x uint64) uint64 { return (x << 27) | (x >> (64 - 27)) }
-func rotl64_31(x uint64) uint64 { return (x << 31) | (x >> (64 - 31)) }
-
-func mix64(h uint64) uint64 {
- h ^= h >> 33
- h *= prime64x2
- h ^= h >> 29
- h *= prime64x3
- h ^= h >> 32
- return h
-}
-
-func resetVs64(seed uint64) (v1, v2, v3, v4 uint64) {
- if seed == 0 {
- return zero64x1, zero64x2, zero64x3, zero64x4
- }
- return (seed + prime64x1 + prime64x2), (seed + prime64x2), (seed), (seed - prime64x1)
-}
-
-// borrowed from cespare
-func round64(h, v uint64) uint64 {
- h += v * prime64x2
- h = rotl64_31(h)
- h *= prime64x1
- return h
-}
-
-func mergeRound64(h, v uint64) uint64 {
- v = round64(0, v)
- h ^= v
- h = h*prime64x1 + prime64x4
- return h
-}
diff --git a/vendor/github.com/OneOfOne/xxhash/xxhash_go17.go b/vendor/github.com/OneOfOne/xxhash/xxhash_go17.go
deleted file mode 100644
index ae48e0c5ca..0000000000
--- a/vendor/github.com/OneOfOne/xxhash/xxhash_go17.go
+++ /dev/null
@@ -1,161 +0,0 @@
-package xxhash
-
-func u32(in []byte) uint32 {
- return uint32(in[0]) | uint32(in[1])<<8 | uint32(in[2])<<16 | uint32(in[3])<<24
-}
-
-func u64(in []byte) uint64 {
- return uint64(in[0]) | uint64(in[1])<<8 | uint64(in[2])<<16 | uint64(in[3])<<24 | uint64(in[4])<<32 | uint64(in[5])<<40 | uint64(in[6])<<48 | uint64(in[7])<<56
-}
-
-// Checksum32S returns the checksum of the input bytes with the specific seed.
-func Checksum32S(in []byte, seed uint32) (h uint32) {
- var i int
-
- if len(in) > 15 {
- var (
- v1 = seed + prime32x1 + prime32x2
- v2 = seed + prime32x2
- v3 = seed + 0
- v4 = seed - prime32x1
- )
- for ; i < len(in)-15; i += 16 {
- in := in[i : i+16 : len(in)]
- v1 += u32(in[0:4:len(in)]) * prime32x2
- v1 = rotl32_13(v1) * prime32x1
-
- v2 += u32(in[4:8:len(in)]) * prime32x2
- v2 = rotl32_13(v2) * prime32x1
-
- v3 += u32(in[8:12:len(in)]) * prime32x2
- v3 = rotl32_13(v3) * prime32x1
-
- v4 += u32(in[12:16:len(in)]) * prime32x2
- v4 = rotl32_13(v4) * prime32x1
- }
-
- h = rotl32_1(v1) + rotl32_7(v2) + rotl32_12(v3) + rotl32_18(v4)
-
- } else {
- h = seed + prime32x5
- }
-
- h += uint32(len(in))
- for ; i <= len(in)-4; i += 4 {
- in := in[i : i+4 : len(in)]
- h += u32(in[0:4:len(in)]) * prime32x3
- h = rotl32_17(h) * prime32x4
- }
-
- for ; i < len(in); i++ {
- h += uint32(in[i]) * prime32x5
- h = rotl32_11(h) * prime32x1
- }
-
- h ^= h >> 15
- h *= prime32x2
- h ^= h >> 13
- h *= prime32x3
- h ^= h >> 16
-
- return
-}
-
-func (xx *XXHash32) Write(in []byte) (n int, err error) {
- i, ml := 0, int(xx.memIdx)
- n = len(in)
- xx.ln += int32(n)
-
- if d := 16 - ml; ml > 0 && ml+len(in) > 16 {
- xx.memIdx += int32(copy(xx.mem[xx.memIdx:], in[:d]))
- ml, in = 16, in[d:len(in):len(in)]
- } else if ml+len(in) < 16 {
- xx.memIdx += int32(copy(xx.mem[xx.memIdx:], in))
- return
- }
-
- if ml > 0 {
- i += 16 - ml
- xx.memIdx += int32(copy(xx.mem[xx.memIdx:len(xx.mem):len(xx.mem)], in))
- in := xx.mem[:16:len(xx.mem)]
-
- xx.v1 += u32(in[0:4:len(in)]) * prime32x2
- xx.v1 = rotl32_13(xx.v1) * prime32x1
-
- xx.v2 += u32(in[4:8:len(in)]) * prime32x2
- xx.v2 = rotl32_13(xx.v2) * prime32x1
-
- xx.v3 += u32(in[8:12:len(in)]) * prime32x2
- xx.v3 = rotl32_13(xx.v3) * prime32x1
-
- xx.v4 += u32(in[12:16:len(in)]) * prime32x2
- xx.v4 = rotl32_13(xx.v4) * prime32x1
-
- xx.memIdx = 0
- }
-
- for ; i <= len(in)-16; i += 16 {
- in := in[i : i+16 : len(in)]
- xx.v1 += u32(in[0:4:len(in)]) * prime32x2
- xx.v1 = rotl32_13(xx.v1) * prime32x1
-
- xx.v2 += u32(in[4:8:len(in)]) * prime32x2
- xx.v2 = rotl32_13(xx.v2) * prime32x1
-
- xx.v3 += u32(in[8:12:len(in)]) * prime32x2
- xx.v3 = rotl32_13(xx.v3) * prime32x1
-
- xx.v4 += u32(in[12:16:len(in)]) * prime32x2
- xx.v4 = rotl32_13(xx.v4) * prime32x1
- }
-
- if len(in)-i != 0 {
- xx.memIdx += int32(copy(xx.mem[xx.memIdx:], in[i:len(in):len(in)]))
- }
-
- return
-}
-
-func (xx *XXHash32) Sum32() (h uint32) {
- var i int32
- if xx.ln > 15 {
- h = rotl32_1(xx.v1) + rotl32_7(xx.v2) + rotl32_12(xx.v3) + rotl32_18(xx.v4)
- } else {
- h = xx.seed + prime32x5
- }
-
- h += uint32(xx.ln)
-
- if xx.memIdx > 0 {
- for ; i < xx.memIdx-3; i += 4 {
- in := xx.mem[i : i+4 : len(xx.mem)]
- h += u32(in[0:4:len(in)]) * prime32x3
- h = rotl32_17(h) * prime32x4
- }
-
- for ; i < xx.memIdx; i++ {
- h += uint32(xx.mem[i]) * prime32x5
- h = rotl32_11(h) * prime32x1
- }
- }
- h ^= h >> 15
- h *= prime32x2
- h ^= h >> 13
- h *= prime32x3
- h ^= h >> 16
-
- return
-}
-
-// Checksum64S returns the 64bit xxhash checksum for a single input
-func Checksum64S(in []byte, seed uint64) uint64 {
- if len(in) == 0 && seed == 0 {
- return 0xef46db3751d8e999
- }
-
- if len(in) > 31 {
- return checksum64(in, seed)
- }
-
- return checksum64Short(in, seed)
-}
diff --git a/vendor/github.com/OneOfOne/xxhash/xxhash_safe.go b/vendor/github.com/OneOfOne/xxhash/xxhash_safe.go
deleted file mode 100644
index e92ec29e02..0000000000
--- a/vendor/github.com/OneOfOne/xxhash/xxhash_safe.go
+++ /dev/null
@@ -1,183 +0,0 @@
-// +build appengine safe ppc64le ppc64be mipsle mips s390x
-
-package xxhash
-
-// Backend returns the current version of xxhash being used.
-const Backend = "GoSafe"
-
-func ChecksumString32S(s string, seed uint32) uint32 {
- return Checksum32S([]byte(s), seed)
-}
-
-func (xx *XXHash32) WriteString(s string) (int, error) {
- if len(s) == 0 {
- return 0, nil
- }
- return xx.Write([]byte(s))
-}
-
-func ChecksumString64S(s string, seed uint64) uint64 {
- return Checksum64S([]byte(s), seed)
-}
-
-func (xx *XXHash64) WriteString(s string) (int, error) {
- if len(s) == 0 {
- return 0, nil
- }
- return xx.Write([]byte(s))
-}
-
-func checksum64(in []byte, seed uint64) (h uint64) {
- var (
- v1, v2, v3, v4 = resetVs64(seed)
-
- i int
- )
-
- for ; i < len(in)-31; i += 32 {
- in := in[i : i+32 : len(in)]
- v1 = round64(v1, u64(in[0:8:len(in)]))
- v2 = round64(v2, u64(in[8:16:len(in)]))
- v3 = round64(v3, u64(in[16:24:len(in)]))
- v4 = round64(v4, u64(in[24:32:len(in)]))
- }
-
- h = rotl64_1(v1) + rotl64_7(v2) + rotl64_12(v3) + rotl64_18(v4)
-
- h = mergeRound64(h, v1)
- h = mergeRound64(h, v2)
- h = mergeRound64(h, v3)
- h = mergeRound64(h, v4)
-
- h += uint64(len(in))
-
- for ; i < len(in)-7; i += 8 {
- h ^= round64(0, u64(in[i:len(in):len(in)]))
- h = rotl64_27(h)*prime64x1 + prime64x4
- }
-
- for ; i < len(in)-3; i += 4 {
- h ^= uint64(u32(in[i:len(in):len(in)])) * prime64x1
- h = rotl64_23(h)*prime64x2 + prime64x3
- }
-
- for ; i < len(in); i++ {
- h ^= uint64(in[i]) * prime64x5
- h = rotl64_11(h) * prime64x1
- }
-
- return mix64(h)
-}
-
-func checksum64Short(in []byte, seed uint64) uint64 {
- var (
- h = seed + prime64x5 + uint64(len(in))
- i int
- )
-
- for ; i < len(in)-7; i += 8 {
- k := u64(in[i : i+8 : len(in)])
- h ^= round64(0, k)
- h = rotl64_27(h)*prime64x1 + prime64x4
- }
-
- for ; i < len(in)-3; i += 4 {
- h ^= uint64(u32(in[i:i+4:len(in)])) * prime64x1
- h = rotl64_23(h)*prime64x2 + prime64x3
- }
-
- for ; i < len(in); i++ {
- h ^= uint64(in[i]) * prime64x5
- h = rotl64_11(h) * prime64x1
- }
-
- return mix64(h)
-}
-
-func (xx *XXHash64) Write(in []byte) (n int, err error) {
- var (
- ml = int(xx.memIdx)
- d = 32 - ml
- )
-
- n = len(in)
- xx.ln += uint64(n)
-
- if ml+len(in) < 32 {
- xx.memIdx += int8(copy(xx.mem[xx.memIdx:len(xx.mem):len(xx.mem)], in))
- return
- }
-
- i, v1, v2, v3, v4 := 0, xx.v1, xx.v2, xx.v3, xx.v4
- if ml > 0 && ml+len(in) > 32 {
- xx.memIdx += int8(copy(xx.mem[xx.memIdx:len(xx.mem):len(xx.mem)], in[:d:len(in)]))
- in = in[d:len(in):len(in)]
-
- in := xx.mem[0:32:len(xx.mem)]
-
- v1 = round64(v1, u64(in[0:8:len(in)]))
- v2 = round64(v2, u64(in[8:16:len(in)]))
- v3 = round64(v3, u64(in[16:24:len(in)]))
- v4 = round64(v4, u64(in[24:32:len(in)]))
-
- xx.memIdx = 0
- }
-
- for ; i < len(in)-31; i += 32 {
- in := in[i : i+32 : len(in)]
- v1 = round64(v1, u64(in[0:8:len(in)]))
- v2 = round64(v2, u64(in[8:16:len(in)]))
- v3 = round64(v3, u64(in[16:24:len(in)]))
- v4 = round64(v4, u64(in[24:32:len(in)]))
- }
-
- if len(in)-i != 0 {
- xx.memIdx += int8(copy(xx.mem[xx.memIdx:], in[i:len(in):len(in)]))
- }
-
- xx.v1, xx.v2, xx.v3, xx.v4 = v1, v2, v3, v4
-
- return
-}
-
-func (xx *XXHash64) Sum64() (h uint64) {
- var i int
- if xx.ln > 31 {
- v1, v2, v3, v4 := xx.v1, xx.v2, xx.v3, xx.v4
- h = rotl64_1(v1) + rotl64_7(v2) + rotl64_12(v3) + rotl64_18(v4)
-
- h = mergeRound64(h, v1)
- h = mergeRound64(h, v2)
- h = mergeRound64(h, v3)
- h = mergeRound64(h, v4)
- } else {
- h = xx.seed + prime64x5
- }
-
- h += uint64(xx.ln)
- if xx.memIdx > 0 {
- in := xx.mem[:xx.memIdx]
- for ; i < int(xx.memIdx)-7; i += 8 {
- in := in[i : i+8 : len(in)]
- k := u64(in[0:8:len(in)])
- k *= prime64x2
- k = rotl64_31(k)
- k *= prime64x1
- h ^= k
- h = rotl64_27(h)*prime64x1 + prime64x4
- }
-
- for ; i < int(xx.memIdx)-3; i += 4 {
- in := in[i : i+4 : len(in)]
- h ^= uint64(u32(in[0:4:len(in)])) * prime64x1
- h = rotl64_23(h)*prime64x2 + prime64x3
- }
-
- for ; i < int(xx.memIdx); i++ {
- h ^= uint64(in[i]) * prime64x5
- h = rotl64_11(h) * prime64x1
- }
- }
-
- return mix64(h)
-}
diff --git a/vendor/github.com/OneOfOne/xxhash/xxhash_unsafe.go b/vendor/github.com/OneOfOne/xxhash/xxhash_unsafe.go
deleted file mode 100644
index 1e2b5e8f1f..0000000000
--- a/vendor/github.com/OneOfOne/xxhash/xxhash_unsafe.go
+++ /dev/null
@@ -1,240 +0,0 @@
-// +build !safe
-// +build !appengine
-// +build !ppc64le
-// +build !mipsle
-// +build !ppc64be
-// +build !mips
-// +build !s390x
-
-package xxhash
-
-import (
- "reflect"
- "unsafe"
-)
-
-// Backend returns the current version of xxhash being used.
-const Backend = "GoUnsafe"
-
-// ChecksumString32S returns the checksum of the input data, without creating a copy, with the specific seed.
-func ChecksumString32S(s string, seed uint32) uint32 {
- if len(s) == 0 {
- return Checksum32S(nil, seed)
- }
- ss := (*reflect.StringHeader)(unsafe.Pointer(&s))
- return Checksum32S((*[maxInt32]byte)(unsafe.Pointer(ss.Data))[:len(s):len(s)], seed)
-}
-
-func (xx *XXHash32) WriteString(s string) (int, error) {
- if len(s) == 0 {
- return 0, nil
- }
-
- ss := (*reflect.StringHeader)(unsafe.Pointer(&s))
- return xx.Write((*[maxInt32]byte)(unsafe.Pointer(ss.Data))[:len(s):len(s)])
-}
-
-// ChecksumString64S returns the checksum of the input data, without creating a copy, with the specific seed.
-func ChecksumString64S(s string, seed uint64) uint64 {
- if len(s) == 0 {
- return Checksum64S(nil, seed)
- }
-
- ss := (*reflect.StringHeader)(unsafe.Pointer(&s))
- return Checksum64S((*[maxInt32]byte)(unsafe.Pointer(ss.Data))[:len(s):len(s)], seed)
-}
-
-func (xx *XXHash64) WriteString(s string) (int, error) {
- if len(s) == 0 {
- return 0, nil
- }
- ss := (*reflect.StringHeader)(unsafe.Pointer(&s))
- return xx.Write((*[maxInt32]byte)(unsafe.Pointer(ss.Data))[:len(s):len(s)])
-}
-
-//go:nocheckptr
-func checksum64(in []byte, seed uint64) uint64 {
- var (
- wordsLen = len(in) >> 3
- words = ((*[maxInt32 / 8]uint64)(unsafe.Pointer(&in[0])))[:wordsLen:wordsLen]
-
- v1, v2, v3, v4 = resetVs64(seed)
-
- h uint64
- i int
- )
-
- for ; i < len(words)-3; i += 4 {
- words := (*[4]uint64)(unsafe.Pointer(&words[i]))
-
- v1 = round64(v1, words[0])
- v2 = round64(v2, words[1])
- v3 = round64(v3, words[2])
- v4 = round64(v4, words[3])
- }
-
- h = rotl64_1(v1) + rotl64_7(v2) + rotl64_12(v3) + rotl64_18(v4)
-
- h = mergeRound64(h, v1)
- h = mergeRound64(h, v2)
- h = mergeRound64(h, v3)
- h = mergeRound64(h, v4)
-
- h += uint64(len(in))
-
- for _, k := range words[i:] {
- h ^= round64(0, k)
- h = rotl64_27(h)*prime64x1 + prime64x4
- }
-
- if in = in[wordsLen<<3 : len(in) : len(in)]; len(in) > 3 {
- words := (*[1]uint32)(unsafe.Pointer(&in[0]))
- h ^= uint64(words[0]) * prime64x1
- h = rotl64_23(h)*prime64x2 + prime64x3
-
- in = in[4:len(in):len(in)]
- }
-
- for _, b := range in {
- h ^= uint64(b) * prime64x5
- h = rotl64_11(h) * prime64x1
- }
-
- return mix64(h)
-}
-
-//go:nocheckptr
-func checksum64Short(in []byte, seed uint64) uint64 {
- var (
- h = seed + prime64x5 + uint64(len(in))
- i int
- )
-
- if len(in) > 7 {
- var (
- wordsLen = len(in) >> 3
- words = ((*[maxInt32 / 8]uint64)(unsafe.Pointer(&in[0])))[:wordsLen:wordsLen]
- )
-
- for i := range words {
- h ^= round64(0, words[i])
- h = rotl64_27(h)*prime64x1 + prime64x4
- }
-
- i = wordsLen << 3
- }
-
- if in = in[i:len(in):len(in)]; len(in) > 3 {
- words := (*[1]uint32)(unsafe.Pointer(&in[0]))
- h ^= uint64(words[0]) * prime64x1
- h = rotl64_23(h)*prime64x2 + prime64x3
-
- in = in[4:len(in):len(in)]
- }
-
- for _, b := range in {
- h ^= uint64(b) * prime64x5
- h = rotl64_11(h) * prime64x1
- }
-
- return mix64(h)
-}
-
-func (xx *XXHash64) Write(in []byte) (n int, err error) {
- mem, idx := xx.mem[:], int(xx.memIdx)
-
- xx.ln, n = xx.ln+uint64(len(in)), len(in)
-
- if idx+len(in) < 32 {
- xx.memIdx += int8(copy(mem[idx:len(mem):len(mem)], in))
- return
- }
-
- var (
- v1, v2, v3, v4 = xx.v1, xx.v2, xx.v3, xx.v4
-
- i int
- )
-
- if d := 32 - int(idx); d > 0 && int(idx)+len(in) > 31 {
- copy(mem[idx:len(mem):len(mem)], in[:len(in):len(in)])
-
- words := (*[4]uint64)(unsafe.Pointer(&mem[0]))
-
- v1 = round64(v1, words[0])
- v2 = round64(v2, words[1])
- v3 = round64(v3, words[2])
- v4 = round64(v4, words[3])
-
- if in, xx.memIdx = in[d:len(in):len(in)], 0; len(in) == 0 {
- goto RET
- }
- }
-
- for ; i < len(in)-31; i += 32 {
- words := (*[4]uint64)(unsafe.Pointer(&in[i]))
-
- v1 = round64(v1, words[0])
- v2 = round64(v2, words[1])
- v3 = round64(v3, words[2])
- v4 = round64(v4, words[3])
- }
-
- if len(in)-i != 0 {
- xx.memIdx += int8(copy(mem[xx.memIdx:len(mem):len(mem)], in[i:len(in):len(in)]))
- }
-
-RET:
- xx.v1, xx.v2, xx.v3, xx.v4 = v1, v2, v3, v4
-
- return
-}
-
-func (xx *XXHash64) Sum64() (h uint64) {
- if seed := xx.seed; xx.ln > 31 {
- v1, v2, v3, v4 := xx.v1, xx.v2, xx.v3, xx.v4
- h = rotl64_1(v1) + rotl64_7(v2) + rotl64_12(v3) + rotl64_18(v4)
-
- h = mergeRound64(h, v1)
- h = mergeRound64(h, v2)
- h = mergeRound64(h, v3)
- h = mergeRound64(h, v4)
- } else if seed == 0 {
- h = prime64x5
- } else {
- h = seed + prime64x5
- }
-
- h += uint64(xx.ln)
-
- if xx.memIdx == 0 {
- return mix64(h)
- }
-
- var (
- in = xx.mem[:xx.memIdx:xx.memIdx]
- wordsLen = len(in) >> 3
- words = ((*[maxInt32 / 8]uint64)(unsafe.Pointer(&in[0])))[:wordsLen:wordsLen]
- )
-
- for _, k := range words {
- h ^= round64(0, k)
- h = rotl64_27(h)*prime64x1 + prime64x4
- }
-
- if in = in[wordsLen<<3 : len(in) : len(in)]; len(in) > 3 {
- words := (*[1]uint32)(unsafe.Pointer(&in[0]))
-
- h ^= uint64(words[0]) * prime64x1
- h = rotl64_23(h)*prime64x2 + prime64x3
-
- in = in[4:len(in):len(in)]
- }
-
- for _, b := range in {
- h ^= uint64(b) * prime64x5
- h = rotl64_11(h) * prime64x1
- }
-
- return mix64(h)
-}
diff --git a/vendor/github.com/cpuguy83/go-md2man/v2/md2man/roff.go b/vendor/github.com/cpuguy83/go-md2man/v2/md2man/roff.go
index 9d6c473fdc..96a80c99b8 100644
--- a/vendor/github.com/cpuguy83/go-md2man/v2/md2man/roff.go
+++ b/vendor/github.com/cpuguy83/go-md2man/v2/md2man/roff.go
@@ -104,7 +104,7 @@ func (r *roffRenderer) RenderNode(w io.Writer, node *blackfriday.Node, entering
node.Parent.Prev.Type == blackfriday.Heading &&
node.Parent.Prev.FirstChild != nil &&
bytes.EqualFold(node.Parent.Prev.FirstChild.Literal, []byte("NAME")) {
- before, after, found := bytes.Cut(node.Literal, []byte(" - "))
+ before, after, found := bytesCut(node.Literal, []byte(" - "))
escapeSpecialChars(w, before)
if found {
out(w, ` \- `)
@@ -406,3 +406,12 @@ func escapeSpecialCharsLine(w io.Writer, text []byte) {
w.Write([]byte{'\\', text[i]}) // nolint: errcheck
}
}
+
+// bytesCut is a copy of [bytes.Cut] to provide compatibility with go1.17
+// and older. We can remove this once we drop support for go1.17 and older.
+func bytesCut(s, sep []byte) (before, after []byte, found bool) {
+ if i := bytes.Index(s, sep); i >= 0 {
+ return s[:i], s[i+len(sep):], true
+ }
+ return s, nil, false
+}
diff --git a/vendor/github.com/open-policy-agent/opa/ast/annotations.go b/vendor/github.com/open-policy-agent/opa/ast/annotations.go
index 533290d323..3bc5fb36a5 100644
--- a/vendor/github.com/open-policy-agent/opa/ast/annotations.go
+++ b/vendor/github.com/open-policy-agent/opa/ast/annotations.go
@@ -31,3 +31,7 @@ type (
func NewAnnotationsRef(a *Annotations) *AnnotationsRef {
return v1.NewAnnotationsRef(a)
}
+
+func BuildAnnotationSet(modules []*Module) (*AnnotationSet, Errors) {
+ return v1.BuildAnnotationSet(modules)
+}
diff --git a/vendor/github.com/open-policy-agent/opa/ast/parser_ext.go b/vendor/github.com/open-policy-agent/opa/ast/parser_ext.go
index 3b8b406825..2d59616932 100644
--- a/vendor/github.com/open-policy-agent/opa/ast/parser_ext.go
+++ b/vendor/github.com/open-policy-agent/opa/ast/parser_ext.go
@@ -5,6 +5,7 @@
package ast
import (
+ "errors"
"fmt"
v1 "github.com/open-policy-agent/opa/v1/ast"
@@ -279,7 +280,7 @@ func ParseStatement(input string) (Statement, error) {
return nil, err
}
if len(stmts) != 1 {
- return nil, fmt.Errorf("expected exactly one statement")
+ return nil, errors.New("expected exactly one statement")
}
return stmts[0], nil
}
diff --git a/vendor/github.com/open-policy-agent/opa/ast/policy.go b/vendor/github.com/open-policy-agent/opa/ast/policy.go
index a29f0dcc75..3da7fdd636 100644
--- a/vendor/github.com/open-policy-agent/opa/ast/policy.go
+++ b/vendor/github.com/open-policy-agent/opa/ast/policy.go
@@ -184,7 +184,7 @@ func RefHead(ref Ref, args ...*Term) *Head {
}
// DocKind represents the collection of document types that can be produced by rules.
-type DocKind int
+type DocKind = v1.DocKind
const (
// CompleteDoc represents a document that is completely defined by the rule.
diff --git a/vendor/github.com/open-policy-agent/opa/capabilities/v1.2.0.json b/vendor/github.com/open-policy-agent/opa/capabilities/v1.2.0.json
new file mode 100644
index 0000000000..1253c88b30
--- /dev/null
+++ b/vendor/github.com/open-policy-agent/opa/capabilities/v1.2.0.json
@@ -0,0 +1,4849 @@
+{
+ "builtins": [
+ {
+ "name": "abs",
+ "decl": {
+ "args": [
+ {
+ "type": "number"
+ }
+ ],
+ "result": {
+ "type": "number"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "all",
+ "decl": {
+ "args": [
+ {
+ "of": [
+ {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ },
+ {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "and",
+ "decl": {
+ "args": [
+ {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ },
+ {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ }
+ ],
+ "result": {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ },
+ "type": "function"
+ },
+ "infix": "\u0026"
+ },
+ {
+ "name": "any",
+ "decl": {
+ "args": [
+ {
+ "of": [
+ {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ },
+ {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "array.concat",
+ "decl": {
+ "args": [
+ {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ },
+ {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ }
+ ],
+ "result": {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "array.reverse",
+ "decl": {
+ "args": [
+ {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ }
+ ],
+ "result": {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "array.slice",
+ "decl": {
+ "args": [
+ {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "number"
+ }
+ ],
+ "result": {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "assign",
+ "decl": {
+ "args": [
+ {
+ "type": "any"
+ },
+ {
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ },
+ "infix": ":="
+ },
+ {
+ "name": "base64.decode",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "base64.encode",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "base64.is_valid",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "base64url.decode",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "base64url.encode",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "base64url.encode_no_pad",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "bits.and",
+ "decl": {
+ "args": [
+ {
+ "type": "number"
+ },
+ {
+ "type": "number"
+ }
+ ],
+ "result": {
+ "type": "number"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "bits.lsh",
+ "decl": {
+ "args": [
+ {
+ "type": "number"
+ },
+ {
+ "type": "number"
+ }
+ ],
+ "result": {
+ "type": "number"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "bits.negate",
+ "decl": {
+ "args": [
+ {
+ "type": "number"
+ }
+ ],
+ "result": {
+ "type": "number"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "bits.or",
+ "decl": {
+ "args": [
+ {
+ "type": "number"
+ },
+ {
+ "type": "number"
+ }
+ ],
+ "result": {
+ "type": "number"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "bits.rsh",
+ "decl": {
+ "args": [
+ {
+ "type": "number"
+ },
+ {
+ "type": "number"
+ }
+ ],
+ "result": {
+ "type": "number"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "bits.xor",
+ "decl": {
+ "args": [
+ {
+ "type": "number"
+ },
+ {
+ "type": "number"
+ }
+ ],
+ "result": {
+ "type": "number"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "cast_array",
+ "decl": {
+ "args": [
+ {
+ "type": "any"
+ }
+ ],
+ "result": {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "cast_boolean",
+ "decl": {
+ "args": [
+ {
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "cast_null",
+ "decl": {
+ "args": [
+ {
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "null"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "cast_object",
+ "decl": {
+ "args": [
+ {
+ "type": "any"
+ }
+ ],
+ "result": {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "cast_set",
+ "decl": {
+ "args": [
+ {
+ "type": "any"
+ }
+ ],
+ "result": {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "cast_string",
+ "decl": {
+ "args": [
+ {
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "ceil",
+ "decl": {
+ "args": [
+ {
+ "type": "number"
+ }
+ ],
+ "result": {
+ "type": "number"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "concat",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "of": [
+ {
+ "dynamic": {
+ "type": "string"
+ },
+ "type": "array"
+ },
+ {
+ "of": {
+ "type": "string"
+ },
+ "type": "set"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "contains",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "count",
+ "decl": {
+ "args": [
+ {
+ "of": [
+ {
+ "type": "string"
+ },
+ {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ },
+ {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "number"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "crypto.hmac.equal",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "crypto.hmac.md5",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "crypto.hmac.sha1",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "crypto.hmac.sha256",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "crypto.hmac.sha512",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "crypto.md5",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "crypto.parse_private_keys",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "dynamic": {
+ "dynamic": {
+ "key": {
+ "type": "string"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ "type": "array"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "crypto.sha1",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "crypto.sha256",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "crypto.x509.parse_and_verify_certificates",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "static": [
+ {
+ "type": "boolean"
+ },
+ {
+ "dynamic": {
+ "dynamic": {
+ "key": {
+ "type": "string"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ "type": "array"
+ }
+ ],
+ "type": "array"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "crypto.x509.parse_and_verify_certificates_with_options",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "dynamic": {
+ "key": {
+ "type": "string"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ }
+ ],
+ "result": {
+ "static": [
+ {
+ "type": "boolean"
+ },
+ {
+ "dynamic": {
+ "dynamic": {
+ "key": {
+ "type": "string"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ "type": "array"
+ }
+ ],
+ "type": "array"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "crypto.x509.parse_certificate_request",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "dynamic": {
+ "key": {
+ "type": "string"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "crypto.x509.parse_certificates",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "dynamic": {
+ "dynamic": {
+ "key": {
+ "type": "string"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ "type": "array"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "crypto.x509.parse_keypair",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "dynamic": {
+ "key": {
+ "type": "string"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "crypto.x509.parse_rsa_private_key",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "dynamic": {
+ "key": {
+ "type": "string"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "div",
+ "decl": {
+ "args": [
+ {
+ "type": "number"
+ },
+ {
+ "type": "number"
+ }
+ ],
+ "result": {
+ "type": "number"
+ },
+ "type": "function"
+ },
+ "infix": "/"
+ },
+ {
+ "name": "endswith",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "eq",
+ "decl": {
+ "args": [
+ {
+ "type": "any"
+ },
+ {
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ },
+ "infix": "="
+ },
+ {
+ "name": "equal",
+ "decl": {
+ "args": [
+ {
+ "type": "any"
+ },
+ {
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ },
+ "infix": "=="
+ },
+ {
+ "name": "floor",
+ "decl": {
+ "args": [
+ {
+ "type": "number"
+ }
+ ],
+ "result": {
+ "type": "number"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "format_int",
+ "decl": {
+ "args": [
+ {
+ "type": "number"
+ },
+ {
+ "type": "number"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "glob.match",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "of": [
+ {
+ "type": "null"
+ },
+ {
+ "dynamic": {
+ "type": "string"
+ },
+ "type": "array"
+ }
+ ],
+ "type": "any"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "glob.quote_meta",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "graph.reachable",
+ "decl": {
+ "args": [
+ {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "of": [
+ {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ },
+ {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ }
+ ],
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ {
+ "of": [
+ {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ },
+ {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "result": {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "graph.reachable_paths",
+ "decl": {
+ "args": [
+ {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "of": [
+ {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ },
+ {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ }
+ ],
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ {
+ "of": [
+ {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ },
+ {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "result": {
+ "of": {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ },
+ "type": "set"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "graphql.is_valid",
+ "decl": {
+ "args": [
+ {
+ "of": [
+ {
+ "type": "string"
+ },
+ {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ }
+ ],
+ "type": "any"
+ },
+ {
+ "of": [
+ {
+ "type": "string"
+ },
+ {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "graphql.parse",
+ "decl": {
+ "args": [
+ {
+ "of": [
+ {
+ "type": "string"
+ },
+ {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ }
+ ],
+ "type": "any"
+ },
+ {
+ "of": [
+ {
+ "type": "string"
+ },
+ {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "result": {
+ "static": [
+ {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ }
+ ],
+ "type": "array"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "graphql.parse_and_verify",
+ "decl": {
+ "args": [
+ {
+ "of": [
+ {
+ "type": "string"
+ },
+ {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ }
+ ],
+ "type": "any"
+ },
+ {
+ "of": [
+ {
+ "type": "string"
+ },
+ {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "result": {
+ "static": [
+ {
+ "type": "boolean"
+ },
+ {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ }
+ ],
+ "type": "array"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "graphql.parse_query",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "graphql.parse_schema",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "graphql.schema_is_valid",
+ "decl": {
+ "args": [
+ {
+ "of": [
+ {
+ "type": "string"
+ },
+ {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "gt",
+ "decl": {
+ "args": [
+ {
+ "type": "any"
+ },
+ {
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ },
+ "infix": "\u003e"
+ },
+ {
+ "name": "gte",
+ "decl": {
+ "args": [
+ {
+ "type": "any"
+ },
+ {
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ },
+ "infix": "\u003e="
+ },
+ {
+ "name": "hex.decode",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "hex.encode",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "http.send",
+ "decl": {
+ "args": [
+ {
+ "dynamic": {
+ "key": {
+ "type": "string"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ }
+ ],
+ "result": {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ "type": "function"
+ },
+ "nondeterministic": true
+ },
+ {
+ "name": "indexof",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "number"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "indexof_n",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "dynamic": {
+ "type": "number"
+ },
+ "type": "array"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "internal.member_2",
+ "decl": {
+ "args": [
+ {
+ "type": "any"
+ },
+ {
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ },
+ "infix": "in"
+ },
+ {
+ "name": "internal.member_3",
+ "decl": {
+ "args": [
+ {
+ "type": "any"
+ },
+ {
+ "type": "any"
+ },
+ {
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ },
+ "infix": "in"
+ },
+ {
+ "name": "internal.print",
+ "decl": {
+ "args": [
+ {
+ "dynamic": {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ },
+ "type": "array"
+ }
+ ],
+ "type": "function"
+ }
+ },
+ {
+ "name": "internal.test_case",
+ "decl": {
+ "args": [
+ {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ }
+ ],
+ "type": "function"
+ }
+ },
+ {
+ "name": "intersection",
+ "decl": {
+ "args": [
+ {
+ "of": {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ },
+ "type": "set"
+ }
+ ],
+ "result": {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "io.jwt.decode",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "static": [
+ {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "type": "array"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "io.jwt.decode_verify",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "dynamic": {
+ "key": {
+ "type": "string"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ }
+ ],
+ "result": {
+ "static": [
+ {
+ "type": "boolean"
+ },
+ {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ }
+ ],
+ "type": "array"
+ },
+ "type": "function"
+ },
+ "nondeterministic": true
+ },
+ {
+ "name": "io.jwt.encode_sign",
+ "decl": {
+ "args": [
+ {
+ "dynamic": {
+ "key": {
+ "type": "string"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ {
+ "dynamic": {
+ "key": {
+ "type": "string"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ {
+ "dynamic": {
+ "key": {
+ "type": "string"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ },
+ "nondeterministic": true
+ },
+ {
+ "name": "io.jwt.encode_sign_raw",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ },
+ "nondeterministic": true
+ },
+ {
+ "name": "io.jwt.verify_es256",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "io.jwt.verify_es384",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "io.jwt.verify_es512",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "io.jwt.verify_hs256",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "io.jwt.verify_hs384",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "io.jwt.verify_hs512",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "io.jwt.verify_ps256",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "io.jwt.verify_ps384",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "io.jwt.verify_ps512",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "io.jwt.verify_rs256",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "io.jwt.verify_rs384",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "io.jwt.verify_rs512",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "is_array",
+ "decl": {
+ "args": [
+ {
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "is_boolean",
+ "decl": {
+ "args": [
+ {
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "is_null",
+ "decl": {
+ "args": [
+ {
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "is_number",
+ "decl": {
+ "args": [
+ {
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "is_object",
+ "decl": {
+ "args": [
+ {
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "is_set",
+ "decl": {
+ "args": [
+ {
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "is_string",
+ "decl": {
+ "args": [
+ {
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "json.filter",
+ "decl": {
+ "args": [
+ {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ {
+ "of": [
+ {
+ "dynamic": {
+ "of": [
+ {
+ "type": "string"
+ },
+ {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ }
+ ],
+ "type": "any"
+ },
+ "type": "array"
+ },
+ {
+ "of": {
+ "of": [
+ {
+ "type": "string"
+ },
+ {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ }
+ ],
+ "type": "any"
+ },
+ "type": "set"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "any"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "json.is_valid",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "json.marshal",
+ "decl": {
+ "args": [
+ {
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "json.marshal_with_options",
+ "decl": {
+ "args": [
+ {
+ "type": "any"
+ },
+ {
+ "dynamic": {
+ "key": {
+ "type": "string"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "static": [
+ {
+ "key": "indent",
+ "value": {
+ "type": "string"
+ }
+ },
+ {
+ "key": "prefix",
+ "value": {
+ "type": "string"
+ }
+ },
+ {
+ "key": "pretty",
+ "value": {
+ "type": "boolean"
+ }
+ }
+ ],
+ "type": "object"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "json.match_schema",
+ "decl": {
+ "args": [
+ {
+ "of": [
+ {
+ "type": "string"
+ },
+ {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ }
+ ],
+ "type": "any"
+ },
+ {
+ "of": [
+ {
+ "type": "string"
+ },
+ {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "result": {
+ "static": [
+ {
+ "type": "boolean"
+ },
+ {
+ "dynamic": {
+ "static": [
+ {
+ "key": "desc",
+ "value": {
+ "type": "string"
+ }
+ },
+ {
+ "key": "error",
+ "value": {
+ "type": "string"
+ }
+ },
+ {
+ "key": "field",
+ "value": {
+ "type": "string"
+ }
+ },
+ {
+ "key": "type",
+ "value": {
+ "type": "string"
+ }
+ }
+ ],
+ "type": "object"
+ },
+ "type": "array"
+ }
+ ],
+ "type": "array"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "json.patch",
+ "decl": {
+ "args": [
+ {
+ "type": "any"
+ },
+ {
+ "dynamic": {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "static": [
+ {
+ "key": "op",
+ "value": {
+ "type": "string"
+ }
+ },
+ {
+ "key": "path",
+ "value": {
+ "type": "any"
+ }
+ }
+ ],
+ "type": "object"
+ },
+ "type": "array"
+ }
+ ],
+ "result": {
+ "type": "any"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "json.remove",
+ "decl": {
+ "args": [
+ {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ {
+ "of": [
+ {
+ "dynamic": {
+ "of": [
+ {
+ "type": "string"
+ },
+ {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ }
+ ],
+ "type": "any"
+ },
+ "type": "array"
+ },
+ {
+ "of": {
+ "of": [
+ {
+ "type": "string"
+ },
+ {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ }
+ ],
+ "type": "any"
+ },
+ "type": "set"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "any"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "json.unmarshal",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "any"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "json.verify_schema",
+ "decl": {
+ "args": [
+ {
+ "of": [
+ {
+ "type": "string"
+ },
+ {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "result": {
+ "static": [
+ {
+ "type": "boolean"
+ },
+ {
+ "of": [
+ {
+ "type": "null"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "type": "array"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "lower",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "lt",
+ "decl": {
+ "args": [
+ {
+ "type": "any"
+ },
+ {
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ },
+ "infix": "\u003c"
+ },
+ {
+ "name": "lte",
+ "decl": {
+ "args": [
+ {
+ "type": "any"
+ },
+ {
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ },
+ "infix": "\u003c="
+ },
+ {
+ "name": "max",
+ "decl": {
+ "args": [
+ {
+ "of": [
+ {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ },
+ {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "any"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "min",
+ "decl": {
+ "args": [
+ {
+ "of": [
+ {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ },
+ {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "any"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "minus",
+ "decl": {
+ "args": [
+ {
+ "of": [
+ {
+ "type": "number"
+ },
+ {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ }
+ ],
+ "type": "any"
+ },
+ {
+ "of": [
+ {
+ "type": "number"
+ },
+ {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "result": {
+ "of": [
+ {
+ "type": "number"
+ },
+ {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ }
+ ],
+ "type": "any"
+ },
+ "type": "function"
+ },
+ "infix": "-"
+ },
+ {
+ "name": "mul",
+ "decl": {
+ "args": [
+ {
+ "type": "number"
+ },
+ {
+ "type": "number"
+ }
+ ],
+ "result": {
+ "type": "number"
+ },
+ "type": "function"
+ },
+ "infix": "*"
+ },
+ {
+ "name": "neq",
+ "decl": {
+ "args": [
+ {
+ "type": "any"
+ },
+ {
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ },
+ "infix": "!="
+ },
+ {
+ "name": "net.cidr_contains",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "net.cidr_contains_matches",
+ "decl": {
+ "args": [
+ {
+ "of": [
+ {
+ "type": "string"
+ },
+ {
+ "dynamic": {
+ "of": [
+ {
+ "type": "string"
+ },
+ {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ }
+ ],
+ "type": "any"
+ },
+ "type": "array"
+ },
+ {
+ "dynamic": {
+ "key": {
+ "type": "string"
+ },
+ "value": {
+ "of": [
+ {
+ "type": "string"
+ },
+ {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ }
+ ],
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ {
+ "of": {
+ "of": [
+ {
+ "type": "string"
+ },
+ {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ }
+ ],
+ "type": "any"
+ },
+ "type": "set"
+ }
+ ],
+ "type": "any"
+ },
+ {
+ "of": [
+ {
+ "type": "string"
+ },
+ {
+ "dynamic": {
+ "of": [
+ {
+ "type": "string"
+ },
+ {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ }
+ ],
+ "type": "any"
+ },
+ "type": "array"
+ },
+ {
+ "dynamic": {
+ "key": {
+ "type": "string"
+ },
+ "value": {
+ "of": [
+ {
+ "type": "string"
+ },
+ {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ }
+ ],
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ {
+ "of": {
+ "of": [
+ {
+ "type": "string"
+ },
+ {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ }
+ ],
+ "type": "any"
+ },
+ "type": "set"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "result": {
+ "of": {
+ "static": [
+ {
+ "type": "any"
+ },
+ {
+ "type": "any"
+ }
+ ],
+ "type": "array"
+ },
+ "type": "set"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "net.cidr_expand",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "of": {
+ "type": "string"
+ },
+ "type": "set"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "net.cidr_intersects",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "net.cidr_is_valid",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "net.cidr_merge",
+ "decl": {
+ "args": [
+ {
+ "of": [
+ {
+ "dynamic": {
+ "of": [
+ {
+ "type": "string"
+ }
+ ],
+ "type": "any"
+ },
+ "type": "array"
+ },
+ {
+ "of": {
+ "type": "string"
+ },
+ "type": "set"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "result": {
+ "of": {
+ "type": "string"
+ },
+ "type": "set"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "net.cidr_overlap",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "net.lookup_ip_addr",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "of": {
+ "type": "string"
+ },
+ "type": "set"
+ },
+ "type": "function"
+ },
+ "nondeterministic": true
+ },
+ {
+ "name": "numbers.range",
+ "decl": {
+ "args": [
+ {
+ "type": "number"
+ },
+ {
+ "type": "number"
+ }
+ ],
+ "result": {
+ "dynamic": {
+ "type": "number"
+ },
+ "type": "array"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "numbers.range_step",
+ "decl": {
+ "args": [
+ {
+ "type": "number"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "number"
+ }
+ ],
+ "result": {
+ "dynamic": {
+ "type": "number"
+ },
+ "type": "array"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "object.filter",
+ "decl": {
+ "args": [
+ {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ {
+ "of": [
+ {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ },
+ {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "any"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "object.get",
+ "decl": {
+ "args": [
+ {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ {
+ "type": "any"
+ },
+ {
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "any"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "object.keys",
+ "decl": {
+ "args": [
+ {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ }
+ ],
+ "result": {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "object.remove",
+ "decl": {
+ "args": [
+ {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ {
+ "of": [
+ {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ },
+ {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "any"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "object.subset",
+ "decl": {
+ "args": [
+ {
+ "of": [
+ {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ },
+ {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ }
+ ],
+ "type": "any"
+ },
+ {
+ "of": [
+ {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ },
+ {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "any"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "object.union",
+ "decl": {
+ "args": [
+ {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ }
+ ],
+ "result": {
+ "type": "any"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "object.union_n",
+ "decl": {
+ "args": [
+ {
+ "dynamic": {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ "type": "array"
+ }
+ ],
+ "result": {
+ "type": "any"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "opa.runtime",
+ "decl": {
+ "result": {
+ "dynamic": {
+ "key": {
+ "type": "string"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ "type": "function"
+ },
+ "nondeterministic": true
+ },
+ {
+ "name": "or",
+ "decl": {
+ "args": [
+ {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ },
+ {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ }
+ ],
+ "result": {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ },
+ "type": "function"
+ },
+ "infix": "|"
+ },
+ {
+ "name": "plus",
+ "decl": {
+ "args": [
+ {
+ "type": "number"
+ },
+ {
+ "type": "number"
+ }
+ ],
+ "result": {
+ "type": "number"
+ },
+ "type": "function"
+ },
+ "infix": "+"
+ },
+ {
+ "name": "print",
+ "decl": {
+ "type": "function",
+ "variadic": {
+ "type": "any"
+ }
+ }
+ },
+ {
+ "name": "product",
+ "decl": {
+ "args": [
+ {
+ "of": [
+ {
+ "dynamic": {
+ "type": "number"
+ },
+ "type": "array"
+ },
+ {
+ "of": {
+ "type": "number"
+ },
+ "type": "set"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "number"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "providers.aws.sign_req",
+ "decl": {
+ "args": [
+ {
+ "dynamic": {
+ "key": {
+ "type": "string"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ {
+ "dynamic": {
+ "key": {
+ "type": "string"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ {
+ "type": "number"
+ }
+ ],
+ "result": {
+ "dynamic": {
+ "key": {
+ "type": "any"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "rand.intn",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "number"
+ }
+ ],
+ "result": {
+ "type": "number"
+ },
+ "type": "function"
+ },
+ "nondeterministic": true
+ },
+ {
+ "name": "re_match",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "regex.find_all_string_submatch_n",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ },
+ {
+ "type": "number"
+ }
+ ],
+ "result": {
+ "dynamic": {
+ "dynamic": {
+ "type": "string"
+ },
+ "type": "array"
+ },
+ "type": "array"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "regex.find_n",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ },
+ {
+ "type": "number"
+ }
+ ],
+ "result": {
+ "dynamic": {
+ "type": "string"
+ },
+ "type": "array"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "regex.globs_match",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "regex.is_valid",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "regex.match",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "regex.replace",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "regex.split",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "dynamic": {
+ "type": "string"
+ },
+ "type": "array"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "regex.template_match",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "rego.metadata.chain",
+ "decl": {
+ "result": {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "rego.metadata.rule",
+ "decl": {
+ "result": {
+ "type": "any"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "rego.parse_module",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "dynamic": {
+ "key": {
+ "type": "string"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "rem",
+ "decl": {
+ "args": [
+ {
+ "type": "number"
+ },
+ {
+ "type": "number"
+ }
+ ],
+ "result": {
+ "type": "number"
+ },
+ "type": "function"
+ },
+ "infix": "%"
+ },
+ {
+ "name": "replace",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "round",
+ "decl": {
+ "args": [
+ {
+ "type": "number"
+ }
+ ],
+ "result": {
+ "type": "number"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "semver.compare",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "number"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "semver.is_valid",
+ "decl": {
+ "args": [
+ {
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "set_diff",
+ "decl": {
+ "args": [
+ {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ },
+ {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ }
+ ],
+ "result": {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "sort",
+ "decl": {
+ "args": [
+ {
+ "of": [
+ {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ },
+ {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "result": {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "split",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "dynamic": {
+ "type": "string"
+ },
+ "type": "array"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "sprintf",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "startswith",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "strings.any_prefix_match",
+ "decl": {
+ "args": [
+ {
+ "of": [
+ {
+ "type": "string"
+ },
+ {
+ "dynamic": {
+ "type": "string"
+ },
+ "type": "array"
+ },
+ {
+ "of": {
+ "type": "string"
+ },
+ "type": "set"
+ }
+ ],
+ "type": "any"
+ },
+ {
+ "of": [
+ {
+ "type": "string"
+ },
+ {
+ "dynamic": {
+ "type": "string"
+ },
+ "type": "array"
+ },
+ {
+ "of": {
+ "type": "string"
+ },
+ "type": "set"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "strings.any_suffix_match",
+ "decl": {
+ "args": [
+ {
+ "of": [
+ {
+ "type": "string"
+ },
+ {
+ "dynamic": {
+ "type": "string"
+ },
+ "type": "array"
+ },
+ {
+ "of": {
+ "type": "string"
+ },
+ "type": "set"
+ }
+ ],
+ "type": "any"
+ },
+ {
+ "of": [
+ {
+ "type": "string"
+ },
+ {
+ "dynamic": {
+ "type": "string"
+ },
+ "type": "array"
+ },
+ {
+ "of": {
+ "type": "string"
+ },
+ "type": "set"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "strings.count",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "number"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "strings.render_template",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "dynamic": {
+ "key": {
+ "type": "string"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "strings.replace_n",
+ "decl": {
+ "args": [
+ {
+ "dynamic": {
+ "key": {
+ "type": "string"
+ },
+ "value": {
+ "type": "string"
+ }
+ },
+ "type": "object"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "strings.reverse",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "substring",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "number"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "sum",
+ "decl": {
+ "args": [
+ {
+ "of": [
+ {
+ "dynamic": {
+ "type": "number"
+ },
+ "type": "array"
+ },
+ {
+ "of": {
+ "type": "number"
+ },
+ "type": "set"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "number"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "time.add_date",
+ "decl": {
+ "args": [
+ {
+ "type": "number"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "number"
+ }
+ ],
+ "result": {
+ "type": "number"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "time.clock",
+ "decl": {
+ "args": [
+ {
+ "of": [
+ {
+ "type": "number"
+ },
+ {
+ "static": [
+ {
+ "type": "number"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "type": "array"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "result": {
+ "static": [
+ {
+ "type": "number"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "number"
+ }
+ ],
+ "type": "array"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "time.date",
+ "decl": {
+ "args": [
+ {
+ "of": [
+ {
+ "type": "number"
+ },
+ {
+ "static": [
+ {
+ "type": "number"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "type": "array"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "result": {
+ "static": [
+ {
+ "type": "number"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "number"
+ }
+ ],
+ "type": "array"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "time.diff",
+ "decl": {
+ "args": [
+ {
+ "of": [
+ {
+ "type": "number"
+ },
+ {
+ "static": [
+ {
+ "type": "number"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "type": "array"
+ }
+ ],
+ "type": "any"
+ },
+ {
+ "of": [
+ {
+ "type": "number"
+ },
+ {
+ "static": [
+ {
+ "type": "number"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "type": "array"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "result": {
+ "static": [
+ {
+ "type": "number"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "number"
+ }
+ ],
+ "type": "array"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "time.format",
+ "decl": {
+ "args": [
+ {
+ "of": [
+ {
+ "type": "number"
+ },
+ {
+ "static": [
+ {
+ "type": "number"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "type": "array"
+ },
+ {
+ "static": [
+ {
+ "type": "number"
+ },
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "type": "array"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "time.now_ns",
+ "decl": {
+ "result": {
+ "type": "number"
+ },
+ "type": "function"
+ },
+ "nondeterministic": true
+ },
+ {
+ "name": "time.parse_duration_ns",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "number"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "time.parse_ns",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "number"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "time.parse_rfc3339_ns",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "number"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "time.weekday",
+ "decl": {
+ "args": [
+ {
+ "of": [
+ {
+ "type": "number"
+ },
+ {
+ "static": [
+ {
+ "type": "number"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "type": "array"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "to_number",
+ "decl": {
+ "args": [
+ {
+ "of": [
+ {
+ "type": "null"
+ },
+ {
+ "type": "boolean"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "number"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "trace",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "trim",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "trim_left",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "trim_prefix",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "trim_right",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "trim_space",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "trim_suffix",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "type_name",
+ "decl": {
+ "args": [
+ {
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "union",
+ "decl": {
+ "args": [
+ {
+ "of": {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ },
+ "type": "set"
+ }
+ ],
+ "result": {
+ "of": {
+ "type": "any"
+ },
+ "type": "set"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "units.parse",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "number"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "units.parse_bytes",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "number"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "upper",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "urlquery.decode",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "urlquery.decode_object",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "dynamic": {
+ "key": {
+ "type": "string"
+ },
+ "value": {
+ "dynamic": {
+ "type": "string"
+ },
+ "type": "array"
+ }
+ },
+ "type": "object"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "urlquery.encode",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "urlquery.encode_object",
+ "decl": {
+ "args": [
+ {
+ "dynamic": {
+ "key": {
+ "type": "string"
+ },
+ "value": {
+ "of": [
+ {
+ "type": "string"
+ },
+ {
+ "dynamic": {
+ "type": "string"
+ },
+ "type": "array"
+ },
+ {
+ "of": {
+ "type": "string"
+ },
+ "type": "set"
+ }
+ ],
+ "type": "any"
+ }
+ },
+ "type": "object"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "uuid.parse",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "dynamic": {
+ "key": {
+ "type": "string"
+ },
+ "value": {
+ "type": "any"
+ }
+ },
+ "type": "object"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "uuid.rfc4122",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ },
+ "nondeterministic": true
+ },
+ {
+ "name": "walk",
+ "decl": {
+ "args": [
+ {
+ "type": "any"
+ }
+ ],
+ "result": {
+ "static": [
+ {
+ "dynamic": {
+ "type": "any"
+ },
+ "type": "array"
+ },
+ {
+ "type": "any"
+ }
+ ],
+ "type": "array"
+ },
+ "type": "function"
+ },
+ "relation": true
+ },
+ {
+ "name": "yaml.is_valid",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "boolean"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "yaml.marshal",
+ "decl": {
+ "args": [
+ {
+ "type": "any"
+ }
+ ],
+ "result": {
+ "type": "string"
+ },
+ "type": "function"
+ }
+ },
+ {
+ "name": "yaml.unmarshal",
+ "decl": {
+ "args": [
+ {
+ "type": "string"
+ }
+ ],
+ "result": {
+ "type": "any"
+ },
+ "type": "function"
+ }
+ }
+ ],
+ "wasm_abi_versions": [
+ {
+ "version": 1,
+ "minor_version": 1
+ },
+ {
+ "version": 1,
+ "minor_version": 2
+ }
+ ],
+ "features": [
+ "rego_v1"
+ ]
+}
diff --git a/vendor/github.com/open-policy-agent/opa/internal/bundle/utils.go b/vendor/github.com/open-policy-agent/opa/internal/bundle/utils.go
index 3d67d56929..836aa586b9 100644
--- a/vendor/github.com/open-policy-agent/opa/internal/bundle/utils.go
+++ b/vendor/github.com/open-policy-agent/opa/internal/bundle/utils.go
@@ -6,6 +6,7 @@ package bundle
import (
"context"
+ "errors"
"fmt"
"io"
"os"
@@ -97,7 +98,7 @@ func LoadBundleFromDiskForRegoVersion(regoVersion ast.RegoVersion, path, name st
_, err := os.Stat(bundlePath)
if err == nil {
- f, err := os.Open(filepath.Join(bundlePath))
+ f, err := os.Open(bundlePath)
if err != nil {
return nil, err
}
@@ -132,7 +133,7 @@ func SaveBundleToDisk(path string, raw io.Reader) (string, error) {
}
if raw == nil {
- return "", fmt.Errorf("no raw bundle bytes to persist to disk")
+ return "", errors.New("no raw bundle bytes to persist to disk")
}
dest, err := os.CreateTemp(path, ".bundle.tar.gz.*.tmp")
diff --git a/vendor/github.com/open-policy-agent/opa/internal/cidr/merge/merge.go b/vendor/github.com/open-policy-agent/opa/internal/cidr/merge/merge.go
index a019cde128..c2392b6775 100644
--- a/vendor/github.com/open-policy-agent/opa/internal/cidr/merge/merge.go
+++ b/vendor/github.com/open-policy-agent/opa/internal/cidr/merge/merge.go
@@ -114,7 +114,7 @@ func GetAddressRange(ipNet net.IPNet) (net.IP, net.IP) {
copy(lastIPMask, ipNet.Mask)
for i := range lastIPMask {
lastIPMask[len(lastIPMask)-i-1] = ^lastIPMask[len(lastIPMask)-i-1]
- lastIP[net.IPv6len-i-1] = lastIP[net.IPv6len-i-1] | lastIPMask[len(lastIPMask)-i-1]
+ lastIP[net.IPv6len-i-1] |= lastIPMask[len(lastIPMask)-i-1]
}
return firstIP, lastIP
diff --git a/vendor/github.com/open-policy-agent/opa/internal/compiler/utils.go b/vendor/github.com/open-policy-agent/opa/internal/compiler/utils.go
index dfb781e19b..5d2e778b13 100644
--- a/vendor/github.com/open-policy-agent/opa/internal/compiler/utils.go
+++ b/vendor/github.com/open-policy-agent/opa/internal/compiler/utils.go
@@ -5,6 +5,9 @@
package compiler
import (
+ "errors"
+ "sync"
+
"github.com/open-policy-agent/opa/v1/ast"
"github.com/open-policy-agent/opa/v1/schemas"
"github.com/open-policy-agent/opa/v1/util"
@@ -16,12 +19,35 @@ const (
AuthorizationPolicySchema SchemaFile = "authorizationPolicy.json"
)
-var schemaDefinitions = map[SchemaFile]interface{}{}
+var schemaDefinitions = map[SchemaFile]any{}
+
+var loadOnce = sync.OnceValue(func() error {
+ cont, err := schemas.FS.ReadFile(string(AuthorizationPolicySchema))
+ if err != nil {
+ return err
+ }
+
+ if len(cont) == 0 {
+ return errors.New("expected authorization policy schema file to be present")
+ }
+
+ var schema any
+ if err := util.Unmarshal(cont, &schema); err != nil {
+ return err
+ }
+
+ schemaDefinitions[AuthorizationPolicySchema] = schema
+
+ return nil
+})
// VerifyAuthorizationPolicySchema performs type checking on rules against the schema for the Authorization Policy
// Input document.
// NOTE: The provided compiler should have already run the compilation process on the input modules
func VerifyAuthorizationPolicySchema(compiler *ast.Compiler, ref ast.Ref) error {
+ if err := loadOnce(); err != nil {
+ panic(err)
+ }
rules := getRulesWithDependencies(compiler, ref)
@@ -67,26 +93,3 @@ func transitiveDependencies(compiler *ast.Compiler, rule *ast.Rule, deps map[*as
transitiveDependencies(compiler, other, deps)
}
}
-
-func loadAuthorizationPolicySchema() {
-
- cont, err := schemas.FS.ReadFile(string(AuthorizationPolicySchema))
- if err != nil {
- panic(err)
- }
-
- if len(cont) == 0 {
- panic("expected authorization policy schema file to be present")
- }
-
- var schema interface{}
- if err := util.Unmarshal(cont, &schema); err != nil {
- panic(err)
- }
-
- schemaDefinitions[AuthorizationPolicySchema] = schema
-}
-
-func init() {
- loadAuthorizationPolicySchema()
-}
diff --git a/vendor/github.com/open-policy-agent/opa/internal/compiler/wasm/wasm.go b/vendor/github.com/open-policy-agent/opa/internal/compiler/wasm/wasm.go
index 08dfe44862..25cbc13b47 100644
--- a/vendor/github.com/open-policy-agent/opa/internal/compiler/wasm/wasm.go
+++ b/vendor/github.com/open-policy-agent/opa/internal/compiler/wasm/wasm.go
@@ -340,7 +340,7 @@ func (c *Compiler) initModule() error {
// two times. But let's deal with that when it happens.
if _, ok := c.funcs[name]; ok { // already seen
c.debug.Printf("function name duplicate: %s (%d)", name, fn.Index)
- name = name + ".1"
+ name += ".1"
}
c.funcs[name] = fn.Index
}
@@ -348,7 +348,7 @@ func (c *Compiler) initModule() error {
for _, fn := range c.policy.Funcs.Funcs {
params := make([]types.ValueType, len(fn.Params))
- for i := 0; i < len(params); i++ {
+ for i := range params {
params[i] = types.I32
}
@@ -827,7 +827,7 @@ func (c *Compiler) compileFunc(fn *ir.Func) error {
memoize := len(fn.Params) == 2
if len(fn.Params) == 0 {
- return fmt.Errorf("illegal function: zero args")
+ return errors.New("illegal function: zero args")
}
c.nextLocal = 0
@@ -996,12 +996,16 @@ func (c *Compiler) compileBlock(block *ir.Block) ([]instruction.Instruction, err
for _, stmt := range block.Stmts {
switch stmt := stmt.(type) {
case *ir.ResultSetAddStmt:
- instrs = append(instrs, instruction.GetLocal{Index: c.lrs})
- instrs = append(instrs, instruction.GetLocal{Index: c.local(stmt.Value)})
- instrs = append(instrs, instruction.Call{Index: c.function(opaSetAdd)})
+ instrs = append(instrs,
+ instruction.GetLocal{Index: c.lrs},
+ instruction.GetLocal{Index: c.local(stmt.Value)},
+ instruction.Call{Index: c.function(opaSetAdd)},
+ )
case *ir.ReturnLocalStmt:
- instrs = append(instrs, instruction.GetLocal{Index: c.local(stmt.Source)})
- instrs = append(instrs, instruction.Return{})
+ instrs = append(instrs,
+ instruction.GetLocal{Index: c.local(stmt.Source)},
+ instruction.Return{},
+ )
case *ir.BlockStmt:
for i := range stmt.Blocks {
block, err := c.compileBlock(stmt.Blocks[i])
@@ -1029,8 +1033,10 @@ func (c *Compiler) compileBlock(block *ir.Block) ([]instruction.Instruction, err
return instrs, err
}
case *ir.AssignVarStmt:
- instrs = append(instrs, c.instrRead(stmt.Source))
- instrs = append(instrs, instruction.SetLocal{Index: c.local(stmt.Target)})
+ instrs = append(instrs,
+ c.instrRead(stmt.Source),
+ instruction.SetLocal{Index: c.local(stmt.Target)},
+ )
case *ir.AssignVarOnceStmt:
instrs = append(instrs, instruction.Block{
Instrs: []instruction.Instruction{
@@ -1360,7 +1366,7 @@ func (c *Compiler) compileUpsert(local ir.Local, path []int, value ir.Operand, _
// Initialize the locals that specify the path of the upsert operation.
lpath := make(map[int]uint32, len(path))
- for i := 0; i < len(path); i++ {
+ for i := range path {
lpath[i] = c.genLocal()
instrs = append(instrs, instruction.I32Const{Value: c.opaStringAddr(path[i])})
instrs = append(instrs, instruction.SetLocal{Index: lpath[i]})
@@ -1369,10 +1375,10 @@ func (c *Compiler) compileUpsert(local ir.Local, path []int, value ir.Operand, _
// Generate a block that traverses the path of the upsert operation,
// shallowing copying values at each step as needed. Stop before the final
// segment that will only be inserted.
- var inner []instruction.Instruction
+ inner := make([]instruction.Instruction, 0, len(path)*21+1)
ltemp := c.genLocal()
- for i := 0; i < len(path)-1; i++ {
+ for i := range len(path) - 1 {
// Lookup the next part of the path.
inner = append(inner, instruction.GetLocal{Index: lcopy})
@@ -1408,10 +1414,10 @@ func (c *Compiler) compileUpsert(local ir.Local, path []int, value ir.Operand, _
inner = append(inner, instruction.Br{Index: uint32(len(path) - 1)})
// Generate blocks that handle missing nodes during traversal.
- var block []instruction.Instruction
+ block := make([]instruction.Instruction, 0, len(path)*10)
lval := c.genLocal()
- for i := 0; i < len(path)-1; i++ {
+ for i := range len(path) - 1 {
block = append(block, instruction.Block{Instrs: inner})
block = append(block, instruction.Call{Index: c.function(opaObject)})
block = append(block, instruction.SetLocal{Index: lval})
@@ -1535,8 +1541,7 @@ func (c *Compiler) compileExternalCall(stmt *ir.CallStmt, ef externalFunc, resul
}
instrs := *result
- instrs = append(instrs, instruction.I32Const{Value: ef.ID})
- instrs = append(instrs, instruction.I32Const{Value: 0}) // unused context parameter
+ instrs = append(instrs, instruction.I32Const{Value: ef.ID}, instruction.I32Const{Value: 0}) // unused context parameter
for _, arg := range stmt.Args {
instrs = append(instrs, c.instrRead(arg))
@@ -1545,9 +1550,11 @@ func (c *Compiler) compileExternalCall(stmt *ir.CallStmt, ef externalFunc, resul
instrs = append(instrs, instruction.Call{Index: c.function(builtinDispatchers[len(stmt.Args)])})
if ef.Decl.Result() != nil {
- instrs = append(instrs, instruction.TeeLocal{Index: c.local(stmt.Result)})
- instrs = append(instrs, instruction.I32Eqz{})
- instrs = append(instrs, instruction.BrIf{Index: 0})
+ instrs = append(instrs,
+ instruction.TeeLocal{Index: c.local(stmt.Result)},
+ instruction.I32Eqz{},
+ instruction.BrIf{Index: 0},
+ )
} else {
instrs = append(instrs, instruction.Drop{})
}
@@ -1678,7 +1685,7 @@ func (c *Compiler) genLocal() uint32 {
func (c *Compiler) function(name string) uint32 {
fidx, ok := c.funcs[name]
if !ok {
- panic(fmt.Sprintf("function not found: %s", name))
+ panic("function not found: " + name)
}
return fidx
}
diff --git a/vendor/github.com/open-policy-agent/opa/internal/edittree/bitvector/bitvector.go b/vendor/github.com/open-policy-agent/opa/internal/edittree/bitvector/bitvector.go
index 89e7e137b7..8e4d65ed3f 100644
--- a/vendor/github.com/open-policy-agent/opa/internal/edittree/bitvector/bitvector.go
+++ b/vendor/github.com/open-policy-agent/opa/internal/edittree/bitvector/bitvector.go
@@ -36,7 +36,7 @@ func (vector *BitVector) Length() int {
// position of the last byte in the slice.
// This returns the bit that was shifted off of the last byte.
func shiftLower(bit byte, b []byte) byte {
- bit = bit << 7
+ bit <<= 7
for i := len(b) - 1; i >= 0; i-- {
newByte := b[i] >> 1
newByte |= bit
diff --git a/vendor/github.com/open-policy-agent/opa/internal/edittree/edittree.go b/vendor/github.com/open-policy-agent/opa/internal/edittree/edittree.go
index 4a4f8101f8..378fe99a32 100644
--- a/vendor/github.com/open-policy-agent/opa/internal/edittree/edittree.go
+++ b/vendor/github.com/open-policy-agent/opa/internal/edittree/edittree.go
@@ -146,6 +146,7 @@
package edittree
import (
+ "errors"
"fmt"
"math/big"
"sort"
@@ -335,13 +336,13 @@ func (e *EditTree) deleteChildValue(hash int) {
// Insert creates a new child of e, and returns the new child EditTree node.
func (e *EditTree) Insert(key, value *ast.Term) (*EditTree, error) {
if e.value == nil {
- return nil, fmt.Errorf("deleted node encountered during insert operation")
+ return nil, errors.New("deleted node encountered during insert operation")
}
if key == nil {
- return nil, fmt.Errorf("nil key provided for insert operation")
+ return nil, errors.New("nil key provided for insert operation")
}
if value == nil {
- return nil, fmt.Errorf("nil value provided for insert operation")
+ return nil, errors.New("nil value provided for insert operation")
}
switch x := e.value.Value.(type) {
@@ -367,7 +368,7 @@ func (e *EditTree) Insert(key, value *ast.Term) (*EditTree, error) {
return nil, err
}
if idx < 0 || idx > e.insertions.Length() {
- return nil, fmt.Errorf("index for array insertion out of bounds")
+ return nil, errors.New("index for array insertion out of bounds")
}
return e.unsafeInsertArray(idx, value), nil
default:
@@ -457,10 +458,10 @@ func (e *EditTree) unsafeInsertArray(idx int, value *ast.Term) *EditTree {
// already present in e. It then returns the deleted child EditTree node.
func (e *EditTree) Delete(key *ast.Term) (*EditTree, error) {
if e.value == nil {
- return nil, fmt.Errorf("deleted node encountered during delete operation")
+ return nil, errors.New("deleted node encountered during delete operation")
}
if key == nil {
- return nil, fmt.Errorf("nil key provided for delete operation")
+ return nil, errors.New("nil key provided for delete operation")
}
switch e.value.Value.(type) {
@@ -531,7 +532,7 @@ func (e *EditTree) Delete(key *ast.Term) (*EditTree, error) {
return nil, err
}
if idx < 0 || idx > e.insertions.Length()-1 {
- return nil, fmt.Errorf("index for array delete out of bounds")
+ return nil, errors.New("index for array delete out of bounds")
}
// Collect insertion indexes above the delete site for rewriting.
@@ -552,14 +553,14 @@ func (e *EditTree) Delete(key *ast.Term) (*EditTree, error) {
}
// Do rewrites to clear out the newly-removed element.
e.deleteChildValue(idx)
- for i := 0; i < len(rewritesScalars); i++ {
+ for i := range rewritesScalars {
originalIdx := rewritesScalars[i]
rewriteIdx := rewritesScalars[i] - 1
v := e.childScalarValues[originalIdx]
e.deleteChildValue(originalIdx)
e.setChildScalarValue(rewriteIdx, v)
}
- for i := 0; i < len(rewritesComposites); i++ {
+ for i := range rewritesComposites {
originalIdx := rewritesComposites[i]
rewriteIdx := rewritesComposites[i] - 1
v := e.childCompositeValues[originalIdx]
@@ -591,7 +592,7 @@ func (e *EditTree) Delete(key *ast.Term) (*EditTree, error) {
//gcassert:inline
func sumZeroesBelowIndex(index int, bv *bitvector.BitVector) int {
zeroesSeen := 0
- for i := 0; i < index; i++ {
+ for i := range index {
if bv.Element(i) == 0 {
zeroesSeen++
}
@@ -601,7 +602,7 @@ func sumZeroesBelowIndex(index int, bv *bitvector.BitVector) int {
func findIndexOfNthZero(n int, bv *bitvector.BitVector) (int, bool) {
zeroesSeen := 0
- for i := 0; i < bv.Length(); i++ {
+ for i := range bv.Length() {
if bv.Element(i) == 0 {
zeroesSeen++
}
@@ -637,7 +638,7 @@ func (e *EditTree) Unfold(path ast.Ref) (*EditTree, error) {
}
// 1+ path segment case.
if e.value == nil {
- return nil, fmt.Errorf("nil value encountered where composite value was expected")
+ return nil, errors.New("nil value encountered where composite value was expected")
}
// Switch behavior based on types.
@@ -831,7 +832,7 @@ func (e *EditTree) Render() *ast.Term {
// original array. We build a new Array with modified/deleted keys.
out := make([]*ast.Term, 0, e.insertions.Length())
eIdx := 0
- for i := 0; i < e.insertions.Length(); i++ {
+ for i := range e.insertions.Length() {
// If the index == 0, that indicates we should look up the next
// surviving original element.
// If the index == 1, that indicates we should look up that
@@ -879,7 +880,7 @@ func (e *EditTree) Render() *ast.Term {
// Returns the inserted EditTree node.
func (e *EditTree) InsertAtPath(path ast.Ref, value *ast.Term) (*EditTree, error) {
if value == nil {
- return nil, fmt.Errorf("cannot insert nil value into EditTree")
+ return nil, errors.New("cannot insert nil value into EditTree")
}
if len(path) == 0 {
@@ -910,7 +911,7 @@ func (e *EditTree) DeleteAtPath(path ast.Ref) (*EditTree, error) {
// Root document case:
if len(path) == 0 {
if e.value == nil {
- return nil, fmt.Errorf("deleted node encountered during delete operation")
+ return nil, errors.New("deleted node encountered during delete operation")
}
e.value = nil
e.childKeys = nil
@@ -1046,7 +1047,7 @@ func toIndex(arrayLength int, term *ast.Term) (int, error) {
switch v := term.Value.(type) {
case ast.Number:
if i, ok = v.Int(); !ok {
- return 0, fmt.Errorf("invalid number type for indexing")
+ return 0, errors.New("invalid number type for indexing")
}
case ast.String:
if v == "-" {
@@ -1054,13 +1055,13 @@ func toIndex(arrayLength int, term *ast.Term) (int, error) {
}
num := ast.Number(v)
if i, ok = num.Int(); !ok {
- return 0, fmt.Errorf("invalid string for indexing")
+ return 0, errors.New("invalid string for indexing")
}
if v != "0" && strings.HasPrefix(string(v), "0") {
- return 0, fmt.Errorf("leading zeros are not allowed in JSON paths")
+ return 0, errors.New("leading zeros are not allowed in JSON paths")
}
default:
- return 0, fmt.Errorf("invalid type for indexing")
+ return 0, errors.New("invalid type for indexing")
}
return i, nil
@@ -1179,5 +1180,5 @@ func (e *EditTree) Filter(paths []ast.Ref) *ast.Term {
type termSlice []*ast.Term
func (s termSlice) Less(i, j int) bool { return ast.Compare(s[i].Value, s[j].Value) < 0 }
-func (s termSlice) Swap(i, j int) { x := s[i]; s[i] = s[j]; s[j] = x }
+func (s termSlice) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
func (s termSlice) Len() int { return len(s) }
diff --git a/vendor/github.com/open-policy-agent/opa/internal/future/filter_imports.go b/vendor/github.com/open-policy-agent/opa/internal/future/filter_imports.go
index eb6091cc6e..f8037ed63e 100644
--- a/vendor/github.com/open-policy-agent/opa/internal/future/filter_imports.go
+++ b/vendor/github.com/open-policy-agent/opa/internal/future/filter_imports.go
@@ -19,12 +19,14 @@ func FilterFutureImports(imps []*ast.Import) []*ast.Import {
return ret
}
+var keywordsTerm = ast.StringTerm("keywords")
+
// IsAllFutureKeywords returns true if the passed *ast.Import is `future.keywords`
func IsAllFutureKeywords(imp *ast.Import) bool {
path := imp.Path.Value.(ast.Ref)
return len(path) == 2 &&
ast.FutureRootDocument.Equal(path[0]) &&
- path[1].Equal(ast.StringTerm("keywords"))
+ path[1].Equal(keywordsTerm)
}
// IsFutureKeyword returns true if the passed *ast.Import is `future.keywords.{kw}`
@@ -32,7 +34,7 @@ func IsFutureKeyword(imp *ast.Import, kw string) bool {
path := imp.Path.Value.(ast.Ref)
return len(path) == 3 &&
ast.FutureRootDocument.Equal(path[0]) &&
- path[1].Equal(ast.StringTerm("keywords")) &&
+ path[1].Equal(keywordsTerm) &&
path[2].Equal(ast.StringTerm(kw))
}
@@ -40,7 +42,7 @@ func WhichFutureKeyword(imp *ast.Import) (string, bool) {
path := imp.Path.Value.(ast.Ref)
if len(path) == 3 &&
ast.FutureRootDocument.Equal(path[0]) &&
- path[1].Equal(ast.StringTerm("keywords")) {
+ path[1].Equal(keywordsTerm) {
if str, ok := path[2].Value.(ast.String); ok {
return string(str), true
}
diff --git a/vendor/github.com/open-policy-agent/opa/internal/future/parser_opts.go b/vendor/github.com/open-policy-agent/opa/internal/future/parser_opts.go
index 84a5292870..eaeb87e296 100644
--- a/vendor/github.com/open-policy-agent/opa/internal/future/parser_opts.go
+++ b/vendor/github.com/open-policy-agent/opa/internal/future/parser_opts.go
@@ -5,6 +5,7 @@
package future
import (
+ "errors"
"fmt"
"github.com/open-policy-agent/opa/v1/ast"
@@ -33,7 +34,7 @@ func ParserOptionsFromFutureImports(imports []*ast.Import) (ast.ParserOptions, e
}
if len(path) == 3 {
if imp.Alias != "" {
- return popts, fmt.Errorf("alias not supported")
+ return popts, errors.New("alias not supported")
}
popts.FutureKeywords = append(popts.FutureKeywords, string(path[2].Value.(ast.String)))
}
diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/dumper.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/dumper.go
index dbb7a7efaf..84266a618f 100644
--- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/dumper.go
+++ b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/dumper.go
@@ -40,10 +40,10 @@ func (d *dumper) dump(v reflect.Value) {
d.WriteString("false")
}
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
- d.WriteString(fmt.Sprintf("%d", v.Int()))
+ d.WriteString(strconv.FormatInt(v.Int(), 10))
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
- d.WriteString(fmt.Sprintf("%d", v.Uint()))
+ d.WriteString(strconv.FormatUint(v.Uint(), 10))
case reflect.Float32, reflect.Float64:
d.WriteString(fmt.Sprintf("%.2f", v.Float()))
@@ -88,7 +88,7 @@ func typeName(t reflect.Type) string {
func (d *dumper) dumpArray(v reflect.Value) {
d.WriteString("[" + typeName(v.Type().Elem()) + "]")
- for i := 0; i < v.Len(); i++ {
+ for i := range v.Len() {
d.nl()
d.WriteString("- ")
d.indent++
@@ -102,7 +102,7 @@ func (d *dumper) dumpStruct(v reflect.Value) {
d.indent++
typ := v.Type()
- for i := 0; i < v.NumField(); i++ {
+ for i := range v.NumField() {
f := v.Field(i)
if typ.Field(i).Tag.Get("dump") == "-" {
continue
@@ -132,13 +132,13 @@ func isZero(v reflect.Value) bool {
return true
}
z := true
- for i := 0; i < v.Len(); i++ {
+ for i := range v.Len() {
z = z && isZero(v.Index(i))
}
return z
case reflect.Struct:
z := true
- for i := 0; i < v.NumField(); i++ {
+ for i := range v.NumField() {
z = z && isZero(v.Field(i))
}
return z
diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/no_fragment_cycles.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/no_fragment_cycles.go
index a7de611f17..edc562ddd4 100644
--- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/no_fragment_cycles.go
+++ b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/no_fragment_cycles.go
@@ -51,7 +51,7 @@ func init() {
}
var via string
if len(fragmentNames) != 0 {
- via = fmt.Sprintf(" via %s", strings.Join(fragmentNames, ", "))
+ via = " via " + strings.Join(fragmentNames, ", ")
}
addError(
Message(`Cannot spread fragment "%s" within itself%s.`, spreadName, via),
diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/values_of_correct_type.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/values_of_correct_type.go
index 8858023d4e..afd9f54f10 100644
--- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/values_of_correct_type.go
+++ b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/values_of_correct_type.go
@@ -159,8 +159,6 @@ func unexpectedTypeMessageOnly(v *ast.Value) ErrorOption {
return Message(`Float cannot represent non numeric value: %s`, v.String())
case "ID", "ID!":
return Message(`ID cannot represent a non-string and non-integer value: %s`, v.String())
- //case "Enum":
- // return Message(`Enum "%s" cannot represent non-enum value: %s`, v.ExpectedType.String(), v.String())
default:
if v.Definition.Kind == ast.Enum {
return Message(`Enum "%s" cannot represent non-enum value: %s.`, v.ExpectedType.String(), v.String())
diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/vars.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/vars.go
index 86be7c4df2..66924148ba 100644
--- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/vars.go
+++ b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/vars.go
@@ -2,6 +2,7 @@ package validator
import (
"encoding/json"
+ "errors"
"fmt"
"reflect"
"strconv"
@@ -11,7 +12,7 @@ import (
"github.com/open-policy-agent/opa/internal/gqlparser/gqlerror"
)
-var ErrUnexpectedType = fmt.Errorf("Unexpected Type")
+var ErrUnexpectedType = errors.New("Unexpected Type")
// VariableValues coerces and validates variable values
func VariableValues(schema *ast.Schema, op *ast.OperationDefinition, variables map[string]interface{}) (map[string]interface{}, error) {
@@ -106,7 +107,7 @@ func (v *varValidator) validateVarType(typ *ast.Type, val reflect.Value) (reflec
slc = reflect.Append(slc, val)
val = slc
}
- for i := 0; i < val.Len(); i++ {
+ for i := range val.Len() {
resetPath()
v.path = append(v.path, ast.PathIndex(i))
field := val.Index(i)
@@ -222,7 +223,7 @@ func (v *varValidator) validateVarType(typ *ast.Type, val reflect.Value) (reflec
if fieldDef.Type.NonNull && field.IsNil() {
return val, gqlerror.ErrorPathf(v.path, "cannot be null")
}
- //allow null object field and skip it
+ // allow null object field and skip it
if !fieldDef.Type.NonNull && field.IsNil() {
continue
}
diff --git a/vendor/github.com/open-policy-agent/opa/internal/json/patch/patch.go b/vendor/github.com/open-policy-agent/opa/internal/json/patch/patch.go
index 5506180799..9ddb93506e 100644
--- a/vendor/github.com/open-policy-agent/opa/internal/json/patch/patch.go
+++ b/vendor/github.com/open-policy-agent/opa/internal/json/patch/patch.go
@@ -37,8 +37,8 @@ func ParsePatchPathEscaped(str string) (path storage.Path, ok bool) {
// the substitutions in this order, an implementation avoids the error of
// turning '~01' first into '~1' and then into '/', which would be
// incorrect (the string '~01' correctly becomes '~1' after transformation)."
- path[i] = strings.Replace(path[i], "~1", "/", -1)
- path[i] = strings.Replace(path[i], "~0", "~", -1)
+ path[i] = strings.ReplaceAll(path[i], "~1", "/")
+ path[i] = strings.ReplaceAll(path[i], "~0", "~")
}
return
diff --git a/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/jwk.go b/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/jwk.go
index aa22a3830f..7de27d4e4e 100644
--- a/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/jwk.go
+++ b/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/jwk.go
@@ -114,7 +114,7 @@ func parse(jwkSrc string) (*Set, error) {
// ParseBytes parses JWK from the incoming byte buffer.
func ParseBytes(buf []byte) (*Set, error) {
- return parse(string(buf[:]))
+ return parse(string(buf))
}
// ParseString parses JWK from the incoming string.
diff --git a/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/key_ops.go b/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/key_ops.go
index e8fe4cd854..c02b0b9990 100644
--- a/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/key_ops.go
+++ b/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/key_ops.go
@@ -2,6 +2,7 @@ package jwk
import (
"encoding/json"
+ "errors"
"fmt"
)
@@ -53,12 +54,12 @@ func (keyOperationList *KeyOperationList) UnmarshalJSON(data []byte) error {
var tempKeyOperationList []string
err := json.Unmarshal(data, &tempKeyOperationList)
if err != nil {
- return fmt.Errorf("invalid key operation")
+ return errors.New("invalid key operation")
}
for _, value := range tempKeyOperationList {
_, ok := keyOps[value]
if !ok {
- return fmt.Errorf("unknown key operation")
+ return errors.New("unknown key operation")
}
*keyOperationList = append(*keyOperationList, KeyOperation(value))
}
diff --git a/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/jws.go b/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/jws.go
index 2a5fe3c173..20fb957d3e 100644
--- a/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/jws.go
+++ b/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/jws.go
@@ -111,7 +111,7 @@ func Verify(buf []byte, alg jwa.SignatureAlgorithm, key interface{}) (ret []byte
return nil, errors.New(`attempt to verify empty buffer`)
}
- parts, err := SplitCompact(string(buf[:]))
+ parts, err := SplitCompact(string(buf))
if err != nil {
return nil, fmt.Errorf("failed extract from compact serialization format: %w", err)
}
@@ -164,7 +164,7 @@ func VerifyWithJWKSet(buf []byte, keyset *jwk.Set) (payload []byte, err error) {
// ParseByte parses a JWS value serialized via compact serialization and provided as []byte.
func ParseByte(jwsCompact []byte) (m *Message, err error) {
- return parseCompact(string(jwsCompact[:]))
+ return parseCompact(string(jwsCompact))
}
// ParseString parses a JWS value serialized via compact serialization and provided as string.
diff --git a/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/sign/sign.go b/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/sign/sign.go
index 7db7bbd69c..fd123eb759 100644
--- a/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/sign/sign.go
+++ b/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/sign/sign.go
@@ -3,6 +3,7 @@ package sign
import (
"crypto/x509"
"encoding/pem"
+ "errors"
"fmt"
"github.com/open-policy-agent/opa/internal/jwx/jwa"
@@ -30,7 +31,7 @@ func GetSigningKey(key string, alg jwa.SignatureAlgorithm) (interface{}, error)
case jwa.RS256, jwa.RS384, jwa.RS512, jwa.PS256, jwa.PS384, jwa.PS512:
block, _ := pem.Decode([]byte(key))
if block == nil {
- return nil, fmt.Errorf("failed to parse PEM block containing the key")
+ return nil, errors.New("failed to parse PEM block containing the key")
}
priv, err := x509.ParsePKCS1PrivateKey(block.Bytes)
@@ -45,7 +46,7 @@ func GetSigningKey(key string, alg jwa.SignatureAlgorithm) (interface{}, error)
case jwa.ES256, jwa.ES384, jwa.ES512:
block, _ := pem.Decode([]byte(key))
if block == nil {
- return nil, fmt.Errorf("failed to parse PEM block containing the key")
+ return nil, errors.New("failed to parse PEM block containing the key")
}
priv, err := x509.ParseECPrivateKey(block.Bytes)
diff --git a/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/verify/verify.go b/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/verify/verify.go
index 05720a64e0..04ee9141e9 100644
--- a/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/verify/verify.go
+++ b/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/verify/verify.go
@@ -5,6 +5,7 @@ import (
"crypto/rsa"
"crypto/x509"
"encoding/pem"
+ "errors"
"fmt"
"github.com/open-policy-agent/opa/internal/jwx/jwa"
@@ -33,7 +34,7 @@ func GetSigningKey(key string, alg jwa.SignatureAlgorithm) (interface{}, error)
case jwa.RS256, jwa.RS384, jwa.RS512, jwa.PS256, jwa.PS384, jwa.PS512, jwa.ES256, jwa.ES384, jwa.ES512:
block, _ := pem.Decode([]byte(key))
if block == nil {
- return nil, fmt.Errorf("failed to parse PEM block containing the key")
+ return nil, errors.New("failed to parse PEM block containing the key")
}
pub, err := x509.ParsePKIXPublicKey(block.Bytes)
diff --git a/vendor/github.com/open-policy-agent/opa/internal/planner/planner.go b/vendor/github.com/open-policy-agent/opa/internal/planner/planner.go
index 160775c0e9..2abde17216 100644
--- a/vendor/github.com/open-policy-agent/opa/internal/planner/planner.go
+++ b/vendor/github.com/open-policy-agent/opa/internal/planner/planner.go
@@ -223,7 +223,7 @@ func (p *Planner) planRules(rules []*ast.Rule) (string, error) {
}
// Initialize parameters for functions.
- for i := 0; i < len(rules[0].Head.Args); i++ {
+ for range len(rules[0].Head.Args) {
fn.Params = append(fn.Params, p.newLocal())
}
@@ -385,7 +385,7 @@ func (p *Planner) planRules(rules []*ast.Rule) (string, error) {
return nil
})
default:
- return fmt.Errorf("illegal rule kind")
+ return errors.New("illegal rule kind")
}
})
})
@@ -497,7 +497,6 @@ func (p *Planner) planDotOr(obj ir.Local, key ir.Operand, or stmtFactory, iter p
func (p *Planner) planNestedObjects(obj ir.Local, ref ast.Ref, iter planLocalIter) error {
if len(ref) == 0 {
- //return fmt.Errorf("nested object construction didn't create object")
return iter(obj)
}
@@ -991,8 +990,7 @@ func (p *Planner) planExprCall(e *ast.Expr, iter planiter) error {
op := e.Operator()
if replacement := p.mocks.Lookup(operator); replacement != nil {
- switch r := replacement.Value.(type) {
- case ast.Ref:
+ if r, ok := replacement.Value.(ast.Ref); ok {
if !r.HasPrefix(ast.DefaultRootRef) && !r.HasPrefix(ast.InputRootRef) {
// replacement is builtin
operator = r.String()
@@ -1147,7 +1145,7 @@ func (p *Planner) planExprCallFunc(name string, arity int, void bool, operands [
})
default:
- return fmt.Errorf("impossible replacement, arity mismatch")
+ return errors.New("impossible replacement, arity mismatch")
}
}
@@ -1173,7 +1171,7 @@ func (p *Planner) planExprCallValue(value *ast.Term, arity int, operands []*ast.
})
})
default:
- return fmt.Errorf("impossible replacement, arity mismatch")
+ return errors.New("impossible replacement, arity mismatch")
}
}
@@ -1750,7 +1748,7 @@ func (p *Planner) planRef(ref ast.Ref, iter planiter) error {
head, ok := ref[0].Value.(ast.Var)
if !ok {
- return fmt.Errorf("illegal ref: non-var head")
+ return errors.New("illegal ref: non-var head")
}
if head.Compare(ast.DefaultRootDocument.Value) == 0 {
@@ -1767,7 +1765,7 @@ func (p *Planner) planRef(ref ast.Ref, iter planiter) error {
p.ltarget, ok = p.vars.GetOp(head)
if !ok {
- return fmt.Errorf("illegal ref: unsafe head")
+ return errors.New("illegal ref: unsafe head")
}
return p.planRefRec(ref, 1, iter)
diff --git a/vendor/github.com/open-policy-agent/opa/internal/planner/rules.go b/vendor/github.com/open-policy-agent/opa/internal/planner/rules.go
index 2f424da526..cc7f12bd2b 100644
--- a/vendor/github.com/open-policy-agent/opa/internal/planner/rules.go
+++ b/vendor/github.com/open-policy-agent/opa/internal/planner/rules.go
@@ -111,7 +111,7 @@ func (t *ruletrie) Rules() []*ast.Rule {
func (t *ruletrie) Push(key ast.Ref) {
node := t
- for i := 0; i < len(key)-1; i++ {
+ for i := range len(key) - 1 {
node = node.Get(key[i].Value)
if node == nil {
return
@@ -123,7 +123,7 @@ func (t *ruletrie) Push(key ast.Ref) {
func (t *ruletrie) Pop(key ast.Ref) {
node := t
- for i := 0; i < len(key)-1; i++ {
+ for i := range len(key) - 1 {
node = node.Get(key[i].Value)
if node == nil {
return
diff --git a/vendor/github.com/open-policy-agent/opa/internal/providers/aws/crypto/compare.go b/vendor/github.com/open-policy-agent/opa/internal/providers/aws/crypto/compare.go
index 1d0f25f8c2..103dc77667 100644
--- a/vendor/github.com/open-policy-agent/opa/internal/providers/aws/crypto/compare.go
+++ b/vendor/github.com/open-policy-agent/opa/internal/providers/aws/crypto/compare.go
@@ -1,6 +1,6 @@
package crypto
-import "fmt"
+import "errors"
// ConstantTimeByteCompare is a constant-time byte comparison of x and y. This function performs an absolute comparison
// if the two byte slices assuming they represent a big-endian number.
@@ -11,7 +11,7 @@ import "fmt"
// +1 if x > y
func ConstantTimeByteCompare(x, y []byte) (int, error) {
if len(x) != len(y) {
- return 0, fmt.Errorf("slice lengths do not match")
+ return 0, errors.New("slice lengths do not match")
}
xLarger, yLarger := 0, 0
diff --git a/vendor/github.com/open-policy-agent/opa/internal/providers/aws/crypto/ecc.go b/vendor/github.com/open-policy-agent/opa/internal/providers/aws/crypto/ecc.go
index 758c73fcb3..12679a15be 100644
--- a/vendor/github.com/open-policy-agent/opa/internal/providers/aws/crypto/ecc.go
+++ b/vendor/github.com/open-policy-agent/opa/internal/providers/aws/crypto/ecc.go
@@ -7,6 +7,7 @@ import (
"crypto/hmac"
"encoding/asn1"
"encoding/binary"
+ "errors"
"fmt"
"hash"
"math"
@@ -82,7 +83,7 @@ func HMACKeyDerivation(hash func() hash.Hash, bitLen int, key []byte, label, con
// verify the requested bit length is not larger then the length encoding size
if int64(bitLen) > 0x7FFFFFFF {
- return nil, fmt.Errorf("bitLen is greater than 32-bits")
+ return nil, errors.New("bitLen is greater than 32-bits")
}
fixedInput := bytes.NewBuffer(nil)
diff --git a/vendor/github.com/open-policy-agent/opa/internal/providers/aws/signing_v4.go b/vendor/github.com/open-policy-agent/opa/internal/providers/aws/signing_v4.go
index 1e50d01f92..07aa568fa2 100644
--- a/vendor/github.com/open-policy-agent/opa/internal/providers/aws/signing_v4.go
+++ b/vendor/github.com/open-policy-agent/opa/internal/providers/aws/signing_v4.go
@@ -8,6 +8,7 @@ import (
"bytes"
"crypto/hmac"
"crypto/sha256"
+ "encoding/hex"
"errors"
"fmt"
"io"
@@ -189,7 +190,7 @@ func SignV4(headers map[string][]string, method string, theURL *url.URL, body []
authHeader := "AWS4-HMAC-SHA256 Credential=" + awsCreds.AccessKey + "/" + dateNow
authHeader += "/" + awsCreds.RegionName + "/" + service + "/aws4_request,"
authHeader += "SignedHeaders=" + headerList + ","
- authHeader += "Signature=" + fmt.Sprintf("%x", signature)
+ authHeader += "Signature=" + hex.EncodeToString(signature)
return authHeader, awsHeaders
}
diff --git a/vendor/github.com/open-policy-agent/opa/internal/providers/aws/signing_v4a.go b/vendor/github.com/open-policy-agent/opa/internal/providers/aws/signing_v4a.go
index 929f2006e7..59e49c1f30 100644
--- a/vendor/github.com/open-policy-agent/opa/internal/providers/aws/signing_v4a.go
+++ b/vendor/github.com/open-policy-agent/opa/internal/providers/aws/signing_v4a.go
@@ -9,7 +9,7 @@ import (
"crypto/rand"
"crypto/sha256"
"encoding/hex"
- "fmt"
+ "errors"
"hash"
"io"
"math/big"
@@ -107,7 +107,7 @@ func deriveKeyFromAccessKeyPair(accessKey, secretKey string) (*ecdsa.PrivateKey,
counter++
if counter > 0xFF {
- return nil, fmt.Errorf("exhausted single byte external counter")
+ return nil, errors.New("exhausted single byte external counter")
}
}
d = d.Add(d, one)
@@ -146,7 +146,7 @@ func retrievePrivateKey(symmetric Credentials) (v4aCredentials, error) {
privateKey, err := deriveKeyFromAccessKeyPair(symmetric.AccessKey, symmetric.SecretKey)
if err != nil {
- return v4aCredentials{}, fmt.Errorf("failed to derive asymmetric key from credentials")
+ return v4aCredentials{}, errors.New("failed to derive asymmetric key from credentials")
}
creds := v4aCredentials{
@@ -216,7 +216,7 @@ func (s *httpSigner) Build() (signedRequest, error) {
signedHeaders, signedHeadersStr, canonicalHeaderStr := s.buildCanonicalHeaders(host, v4Internal.IgnoredHeaders, unsignedHeaders, s.Request.ContentLength)
- rawQuery := strings.Replace(query.Encode(), "+", "%20", -1)
+ rawQuery := strings.ReplaceAll(query.Encode(), "+", "%20")
canonicalURI := v4Internal.GetURIPath(req.URL)
@@ -314,7 +314,7 @@ func (s *httpSigner) buildCanonicalHeaders(host string, rule v4Internal.Rule, he
var canonicalHeaders strings.Builder
n := len(headers)
const colon = ':'
- for i := 0; i < n; i++ {
+ for i := range n {
if headers[i] == hostHeader {
canonicalHeaders.WriteString(hostHeader)
canonicalHeaders.WriteRune(colon)
diff --git a/vendor/github.com/open-policy-agent/opa/internal/strings/strings.go b/vendor/github.com/open-policy-agent/opa/internal/strings/strings.go
index 08f3bf9182..f2838ac36a 100644
--- a/vendor/github.com/open-policy-agent/opa/internal/strings/strings.go
+++ b/vendor/github.com/open-policy-agent/opa/internal/strings/strings.go
@@ -57,7 +57,7 @@ func TruncateFilePaths(maxIdealWidth, maxWidth int, path ...string) (map[string]
}
// Drop the overall length down to match our substitution
- longestLocation = longestLocation - (len(lcs) - 3)
+ longestLocation -= (len(lcs) - 3)
}
return result, longestLocation
diff --git a/vendor/github.com/open-policy-agent/opa/internal/strvals/parser.go b/vendor/github.com/open-policy-agent/opa/internal/strvals/parser.go
index 1eceb83df9..3b12d9526b 100644
--- a/vendor/github.com/open-policy-agent/opa/internal/strvals/parser.go
+++ b/vendor/github.com/open-policy-agent/opa/internal/strvals/parser.go
@@ -148,8 +148,6 @@ func (t *parser) key(data map[string]interface{}) error {
return err
}
return fmt.Errorf("key %q has no value", string(k))
- //set(data, string(k), "")
- //return err
case last == '[':
// We are in a list index context, so we need to set an index.
i, err := t.keyIndex()
@@ -168,7 +166,7 @@ func (t *parser) key(data map[string]interface{}) error {
set(data, kk, list)
return err
case last == '=':
- //End of key. Consume =, Get value.
+ // End of key. Consume =, Get value.
// FIXME: Get value list first
vl, e := t.valList()
switch e {
diff --git a/vendor/github.com/open-policy-agent/opa/internal/wasm/encoding/reader.go b/vendor/github.com/open-policy-agent/opa/internal/wasm/encoding/reader.go
index 7120392ce2..0695ce94fe 100644
--- a/vendor/github.com/open-policy-agent/opa/internal/wasm/encoding/reader.go
+++ b/vendor/github.com/open-policy-agent/opa/internal/wasm/encoding/reader.go
@@ -7,6 +7,7 @@ package encoding
import (
"bytes"
"encoding/binary"
+ "errors"
"fmt"
"io"
@@ -105,7 +106,7 @@ func readMagic(r io.Reader) error {
if err := binary.Read(r, binary.LittleEndian, &v); err != nil {
return err
} else if v != constant.Magic {
- return fmt.Errorf("illegal magic value")
+ return errors.New("illegal magic value")
}
return nil
}
@@ -115,7 +116,7 @@ func readVersion(r io.Reader) error {
if err := binary.Read(r, binary.LittleEndian, &v); err != nil {
return err
} else if v != constant.Version {
- return fmt.Errorf("illegal wasm version")
+ return errors.New("illegal wasm version")
}
return nil
}
@@ -199,7 +200,7 @@ func readSections(r io.Reader, m *module.Module) error {
return fmt.Errorf("code section: %w", err)
}
default:
- return fmt.Errorf("illegal section id")
+ return errors.New("illegal section id")
}
}
}
@@ -269,7 +270,7 @@ func readNameMap(r io.Reader) ([]module.NameMap, error) {
return nil, err
}
nm := make([]module.NameMap, n)
- for i := uint32(0); i < n; i++ {
+ for i := range n {
var name string
id, err := leb128.ReadVarUint32(r)
if err != nil {
@@ -289,7 +290,7 @@ func readNameSectionLocals(r io.Reader, s *module.NameSection) error {
if err != nil {
return err
}
- for i := uint32(0); i < n; i++ {
+ for range n {
id, err := leb128.ReadVarUint32(r) // func index
if err != nil {
return err
@@ -326,7 +327,7 @@ func readTypeSection(r io.Reader, s *module.TypeSection) error {
return err
}
- for i := uint32(0); i < n; i++ {
+ for range n {
var ftype module.FunctionType
if err := readFunctionType(r, &ftype); err != nil {
@@ -346,7 +347,7 @@ func readImportSection(r io.Reader, s *module.ImportSection) error {
return err
}
- for i := uint32(0); i < n; i++ {
+ for range n {
var imp module.Import
@@ -367,14 +368,14 @@ func readTableSection(r io.Reader, s *module.TableSection) error {
return err
}
- for i := uint32(0); i < n; i++ {
+ for range n {
var table module.Table
if elem, err := readByte(r); err != nil {
return err
} else if elem != constant.ElementTypeAnyFunc {
- return fmt.Errorf("illegal element type")
+ return errors.New("illegal element type")
}
table.Type = types.Anyfunc
@@ -396,7 +397,7 @@ func readMemorySection(r io.Reader, s *module.MemorySection) error {
return err
}
- for i := uint32(0); i < n; i++ {
+ for range n {
var mem module.Memory
@@ -417,7 +418,7 @@ func readGlobalSection(r io.Reader, s *module.GlobalSection) error {
return err
}
- for i := uint32(0); i < n; i++ {
+ for range n {
var global module.Global
@@ -442,7 +443,7 @@ func readExportSection(r io.Reader, s *module.ExportSection) error {
return err
}
- for i := uint32(0); i < n; i++ {
+ for range n {
var exp module.Export
@@ -463,7 +464,7 @@ func readElementSection(r io.Reader, s *module.ElementSection) error {
return err
}
- for i := uint32(0); i < n; i++ {
+ for range n {
var seg module.ElementSegment
@@ -484,7 +485,7 @@ func readDataSection(r io.Reader, s *module.DataSection) error {
return err
}
- for i := uint32(0); i < n; i++ {
+ for range n {
var seg module.DataSegment
@@ -505,7 +506,7 @@ func readRawCodeSection(r io.Reader, s *module.RawCodeSection) error {
return err
}
- for i := uint32(0); i < n; i++ {
+ for range n {
var seg module.RawCodeSegment
if err := readRawCodeSegment(r, &seg); err != nil {
@@ -547,7 +548,7 @@ func readGlobal(r io.Reader, global *module.Global) error {
if b == 1 {
global.Mutable = true
} else if b != 0 {
- return fmt.Errorf("illegal mutability flag")
+ return errors.New("illegal mutability flag")
}
return readConstantExpr(r, &global.Init)
@@ -584,7 +585,7 @@ func readImport(r io.Reader, imp *module.Import) error {
if elem, err := readByte(r); err != nil {
return err
} else if elem != constant.ElementTypeAnyFunc {
- return fmt.Errorf("illegal element type")
+ return errors.New("illegal element type")
}
desc := module.TableImport{
Type: types.Anyfunc,
@@ -617,12 +618,12 @@ func readImport(r io.Reader, imp *module.Import) error {
if b == 1 {
desc.Mutable = true
} else if b != 0 {
- return fmt.Errorf("illegal mutability flag")
+ return errors.New("illegal mutability flag")
}
return nil
}
- return fmt.Errorf("illegal import descriptor type")
+ return errors.New("illegal import descriptor type")
}
func readExport(r io.Reader, exp *module.Export) error {
@@ -646,7 +647,7 @@ func readExport(r io.Reader, exp *module.Export) error {
case constant.ExportDescGlobal:
exp.Descriptor.Type = module.GlobalExportType
default:
- return fmt.Errorf("illegal export descriptor type")
+ return errors.New("illegal export descriptor type")
}
exp.Descriptor.Index, err = leb128.ReadVarUint32(r)
@@ -727,7 +728,7 @@ func readExpr(r io.Reader, expr *module.Expr) (err error) {
case error:
err = r
default:
- err = fmt.Errorf("unknown panic")
+ err = errors.New("unknown panic")
}
}
}()
@@ -823,7 +824,7 @@ func readLimits(r io.Reader, l *module.Limit) error {
}
l.Max = &maxLim
} else if b != 0 {
- return fmt.Errorf("illegal limit flag")
+ return errors.New("illegal limit flag")
}
return nil
@@ -838,7 +839,7 @@ func readLocals(r io.Reader, locals *[]module.LocalDeclaration) error {
ret := make([]module.LocalDeclaration, n)
- for i := uint32(0); i < n; i++ {
+ for i := range n {
if err := readVarUint32(r, &ret[i].Count); err != nil {
return err
}
@@ -888,7 +889,7 @@ func readVarUint32Vector(r io.Reader, v *[]uint32) error {
ret := make([]uint32, n)
- for i := uint32(0); i < n; i++ {
+ for i := range n {
if err := readVarUint32(r, &ret[i]); err != nil {
return err
}
@@ -907,7 +908,7 @@ func readValueTypeVector(r io.Reader, v *[]types.ValueType) error {
ret := make([]types.ValueType, n)
- for i := uint32(0); i < n; i++ {
+ for i := range n {
if err := readValueType(r, &ret[i]); err != nil {
return err
}
diff --git a/vendor/github.com/open-policy-agent/opa/internal/wasm/encoding/writer.go b/vendor/github.com/open-policy-agent/opa/internal/wasm/encoding/writer.go
index 6917b8d1d1..19df3bd6e6 100644
--- a/vendor/github.com/open-policy-agent/opa/internal/wasm/encoding/writer.go
+++ b/vendor/github.com/open-policy-agent/opa/internal/wasm/encoding/writer.go
@@ -7,6 +7,7 @@ package encoding
import (
"bytes"
"encoding/binary"
+ "errors"
"fmt"
"io"
"math"
@@ -260,7 +261,7 @@ func writeTableSection(w io.Writer, s module.TableSection) error {
return err
}
default:
- return fmt.Errorf("illegal table element type")
+ return errors.New("illegal table element type")
}
if err := writeLimits(&buf, table.Lim); err != nil {
return err
@@ -588,7 +589,7 @@ func writeImport(w io.Writer, imp module.Import) error {
}
return writeByte(w, constant.Const)
default:
- return fmt.Errorf("illegal import descriptor type")
+ return errors.New("illegal import descriptor type")
}
}
diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/annotations.go b/vendor/github.com/open-policy-agent/opa/v1/ast/annotations.go
index 297c6907cd..def7604edf 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/ast/annotations.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/ast/annotations.go
@@ -8,7 +8,7 @@ import (
"encoding/json"
"fmt"
"net/url"
- "sort"
+ "slices"
"strings"
"github.com/open-policy-agent/opa/internal/deepcopy"
@@ -18,12 +18,32 @@ import (
const (
annotationScopePackage = "package"
- annotationScopeImport = "import"
annotationScopeRule = "rule"
annotationScopeDocument = "document"
annotationScopeSubpackages = "subpackages"
)
+var (
+ scopeTerm = StringTerm("scope")
+ titleTerm = StringTerm("title")
+ entrypointTerm = StringTerm("entrypoint")
+ descriptionTerm = StringTerm("description")
+ organizationsTerm = StringTerm("organizations")
+ authorsTerm = StringTerm("authors")
+ relatedResourcesTerm = StringTerm("related_resources")
+ schemasTerm = StringTerm("schemas")
+ customTerm = StringTerm("custom")
+ refTerm = StringTerm("ref")
+ nameTerm = StringTerm("name")
+ emailTerm = StringTerm("email")
+ schemaTerm = StringTerm("schema")
+ definitionTerm = StringTerm("definition")
+ documentTerm = StringTerm(annotationScopeDocument)
+ packageTerm = StringTerm(annotationScopePackage)
+ ruleTerm = StringTerm(annotationScopeRule)
+ subpackagesTerm = StringTerm(annotationScopeSubpackages)
+)
+
type (
// Annotations represents metadata attached to other AST nodes such as rules.
Annotations struct {
@@ -291,7 +311,6 @@ func (ar *AnnotationsRef) MarshalJSON() ([]byte, error) {
}
func scopeCompare(s1, s2 string) int {
-
o1 := scopeOrder(s1)
o2 := scopeOrder(s2)
@@ -311,8 +330,7 @@ func scopeCompare(s1, s2 string) int {
}
func scopeOrder(s string) int {
- switch s {
- case annotationScopeRule:
+ if s == annotationScopeRule {
return 1
}
return 0
@@ -325,7 +343,7 @@ func compareAuthors(a, b []*AuthorAnnotation) int {
return -1
}
- for i := 0; i < len(a); i++ {
+ for i := range a {
if cmp := a[i].Compare(b[i]); cmp != 0 {
return cmp
}
@@ -341,8 +359,8 @@ func compareRelatedResources(a, b []*RelatedResourceAnnotation) int {
return -1
}
- for i := 0; i < len(a); i++ {
- if cmp := strings.Compare(a[i].String(), b[i].String()); cmp != 0 {
+ for i := range a {
+ if cmp := a[i].Compare(b[i]); cmp != 0 {
return cmp
}
}
@@ -356,7 +374,7 @@ func compareSchemas(a, b []*SchemaAnnotation) int {
maxLen = len(b)
}
- for i := 0; i < maxLen; i++ {
+ for i := range maxLen {
if cmp := a[i].Compare(b[i]); cmp != 0 {
return cmp
}
@@ -378,7 +396,7 @@ func compareStringLists(a, b []string) int {
return -1
}
- for i := 0; i < len(a); i++ {
+ for i := range a {
if cmp := strings.Compare(a[i], b[i]); cmp != 0 {
return cmp
}
@@ -409,7 +427,9 @@ func (a *Annotations) Copy(node Node) *Annotations {
cpy.Schemas[i] = a.Schemas[i].Copy()
}
- cpy.Custom = deepcopy.Map(a.Custom)
+ if a.Custom != nil {
+ cpy.Custom = deepcopy.Map(a.Custom)
+ }
cpy.node = node
@@ -425,19 +445,30 @@ func (a *Annotations) toObject() (*Object, *Error) {
}
if len(a.Scope) > 0 {
- obj.Insert(StringTerm("scope"), StringTerm(a.Scope))
+ switch a.Scope {
+ case annotationScopeDocument:
+ obj.Insert(scopeTerm, documentTerm)
+ case annotationScopePackage:
+ obj.Insert(scopeTerm, packageTerm)
+ case annotationScopeRule:
+ obj.Insert(scopeTerm, ruleTerm)
+ case annotationScopeSubpackages:
+ obj.Insert(scopeTerm, subpackagesTerm)
+ default:
+ obj.Insert(scopeTerm, StringTerm(a.Scope))
+ }
}
if len(a.Title) > 0 {
- obj.Insert(StringTerm("title"), StringTerm(a.Title))
+ obj.Insert(titleTerm, StringTerm(a.Title))
}
if a.Entrypoint {
- obj.Insert(StringTerm("entrypoint"), BooleanTerm(true))
+ obj.Insert(entrypointTerm, InternedBooleanTerm(true))
}
if len(a.Description) > 0 {
- obj.Insert(StringTerm("description"), StringTerm(a.Description))
+ obj.Insert(descriptionTerm, StringTerm(a.Description))
}
if len(a.Organizations) > 0 {
@@ -445,19 +476,19 @@ func (a *Annotations) toObject() (*Object, *Error) {
for _, org := range a.Organizations {
orgs = append(orgs, StringTerm(org))
}
- obj.Insert(StringTerm("organizations"), ArrayTerm(orgs...))
+ obj.Insert(organizationsTerm, ArrayTerm(orgs...))
}
if len(a.RelatedResources) > 0 {
rrs := make([]*Term, 0, len(a.RelatedResources))
for _, rr := range a.RelatedResources {
- rrObj := NewObject(Item(StringTerm("ref"), StringTerm(rr.Ref.String())))
+ rrObj := NewObject(Item(refTerm, StringTerm(rr.Ref.String())))
if len(rr.Description) > 0 {
- rrObj.Insert(StringTerm("description"), StringTerm(rr.Description))
+ rrObj.Insert(descriptionTerm, StringTerm(rr.Description))
}
rrs = append(rrs, NewTerm(rrObj))
}
- obj.Insert(StringTerm("related_resources"), ArrayTerm(rrs...))
+ obj.Insert(relatedResourcesTerm, ArrayTerm(rrs...))
}
if len(a.Authors) > 0 {
@@ -465,14 +496,14 @@ func (a *Annotations) toObject() (*Object, *Error) {
for _, author := range a.Authors {
aObj := NewObject()
if len(author.Name) > 0 {
- aObj.Insert(StringTerm("name"), StringTerm(author.Name))
+ aObj.Insert(nameTerm, StringTerm(author.Name))
}
if len(author.Email) > 0 {
- aObj.Insert(StringTerm("email"), StringTerm(author.Email))
+ aObj.Insert(emailTerm, StringTerm(author.Email))
}
as = append(as, NewTerm(aObj))
}
- obj.Insert(StringTerm("authors"), ArrayTerm(as...))
+ obj.Insert(authorsTerm, ArrayTerm(as...))
}
if len(a.Schemas) > 0 {
@@ -480,21 +511,21 @@ func (a *Annotations) toObject() (*Object, *Error) {
for _, s := range a.Schemas {
sObj := NewObject()
if len(s.Path) > 0 {
- sObj.Insert(StringTerm("path"), NewTerm(s.Path.toArray()))
+ sObj.Insert(pathTerm, NewTerm(s.Path.toArray()))
}
if len(s.Schema) > 0 {
- sObj.Insert(StringTerm("schema"), NewTerm(s.Schema.toArray()))
+ sObj.Insert(schemaTerm, NewTerm(s.Schema.toArray()))
}
if s.Definition != nil {
def, err := InterfaceToValue(s.Definition)
if err != nil {
return nil, NewError(CompileErr, a.Location, "invalid definition in schema annotation: %s", err.Error())
}
- sObj.Insert(StringTerm("definition"), NewTerm(def))
+ sObj.Insert(definitionTerm, NewTerm(def))
}
ss = append(ss, NewTerm(sObj))
}
- obj.Insert(StringTerm("schemas"), ArrayTerm(ss...))
+ obj.Insert(schemasTerm, ArrayTerm(ss...))
}
if len(a.Custom) > 0 {
@@ -502,7 +533,7 @@ func (a *Annotations) toObject() (*Object, *Error) {
if err != nil {
return nil, NewError(CompileErr, a.Location, "invalid custom annotation %s", err.Error())
}
- obj.Insert(StringTerm("custom"), NewTerm(c))
+ obj.Insert(customTerm, NewTerm(c))
}
return &obj, nil
@@ -563,7 +594,11 @@ func attachAnnotationsNodes(mod *Module) Errors {
case *Package:
a.Scope = annotationScopePackage
case *Import:
- a.Scope = annotationScopeImport
+ // Note that this isn't a valid scope, but set here so that the
+ // validate function called below can print an error message with
+ // a context that makes sense ("invalid scope: 'import'" instead of
+ // "invalid scope: '')
+ a.Scope = "import"
}
}
@@ -681,7 +716,6 @@ func (s *SchemaAnnotation) Copy() *SchemaAnnotation {
// Compare returns an integer indicating if s is less than, equal to, or greater
// than other.
func (s *SchemaAnnotation) Compare(other *SchemaAnnotation) int {
-
if cmp := s.Path.Compare(other.Path); cmp != 0 {
return cmp
}
@@ -819,9 +853,7 @@ func (as *AnnotationSet) Flatten() FlatAnnotationsRefSet {
}
// Sort by path, then annotation location, for stable output
- sort.SliceStable(refs, func(i, j int) bool {
- return refs[i].Compare(refs[j]) < 0
- })
+ slices.SortStableFunc(refs, (*AnnotationsRef).Compare)
return refs
}
@@ -853,8 +885,8 @@ func (as *AnnotationSet) Chain(rule *Rule) AnnotationsRefSet {
if len(refs) > 1 {
// Sort by annotation location; chain must start with annotations declared closest to rule, then going outward
- sort.SliceStable(refs, func(i, j int) bool {
- return refs[i].Annotations.Location.Compare(refs[j].Annotations.Location) > 0
+ slices.SortStableFunc(refs, func(a, b *AnnotationsRef) int {
+ return -a.Annotations.Location.Compare(b.Annotations.Location)
})
}
diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/builtins.go b/vendor/github.com/open-policy-agent/opa/v1/ast/builtins.go
index 9585620dca..32ab2d153f 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/ast/builtins.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/ast/builtins.go
@@ -299,6 +299,9 @@ var DefaultBuiltins = [...]*Builtin{
// Printing
Print,
InternalPrint,
+
+ // Testing
+ InternalTestCase,
}
// BuiltinMap provides a convenient mapping of built-in names to
@@ -486,10 +489,10 @@ var Minus = &Builtin{
Description: "Minus subtracts the second number from the first number or computes the difference between two sets.",
Decl: types.NewFunction(
types.Args(
- types.Named("x", types.NewAny(types.N, types.NewSet(types.A))),
- types.Named("y", types.NewAny(types.N, types.NewSet(types.A))),
+ types.Named("x", types.NewAny(types.N, types.SetOfAny)),
+ types.Named("y", types.NewAny(types.N, types.SetOfAny)),
),
- types.Named("z", types.NewAny(types.N, types.NewSet(types.A))).Description("the difference of `x` and `y`"),
+ types.Named("z", types.NewAny(types.N, types.SetOfAny)).Description("the difference of `x` and `y`"),
),
Categories: category("sets", "numbers"),
}
@@ -671,10 +674,10 @@ var And = &Builtin{
Description: "Returns the intersection of two sets.",
Decl: types.NewFunction(
types.Args(
- types.Named("x", types.NewSet(types.A)).Description("the first set"),
- types.Named("y", types.NewSet(types.A)).Description("the second set"),
+ types.Named("x", types.SetOfAny).Description("the first set"),
+ types.Named("y", types.SetOfAny).Description("the second set"),
),
- types.Named("z", types.NewSet(types.A)).Description("the intersection of `x` and `y`"),
+ types.Named("z", types.SetOfAny).Description("the intersection of `x` and `y`"),
),
Categories: sets,
}
@@ -686,10 +689,10 @@ var Or = &Builtin{
Description: "Returns the union of two sets.",
Decl: types.NewFunction(
types.Args(
- types.Named("x", types.NewSet(types.A)),
- types.Named("y", types.NewSet(types.A)),
+ types.Named("x", types.SetOfAny),
+ types.Named("y", types.SetOfAny),
),
- types.Named("z", types.NewSet(types.A)).Description("the union of `x` and `y`"),
+ types.Named("z", types.SetOfAny).Description("the union of `x` and `y`"),
),
Categories: sets,
}
@@ -699,9 +702,9 @@ var Intersection = &Builtin{
Description: "Returns the intersection of the given input sets.",
Decl: types.NewFunction(
types.Args(
- types.Named("xs", types.NewSet(types.NewSet(types.A))).Description("set of sets to intersect"),
+ types.Named("xs", types.NewSet(types.SetOfAny)).Description("set of sets to intersect"),
),
- types.Named("y", types.NewSet(types.A)).Description("the intersection of all `xs` sets"),
+ types.Named("y", types.SetOfAny).Description("the intersection of all `xs` sets"),
),
Categories: sets,
}
@@ -711,9 +714,9 @@ var Union = &Builtin{
Description: "Returns the union of the given input sets.",
Decl: types.NewFunction(
types.Args(
- types.Named("xs", types.NewSet(types.NewSet(types.A))).Description("set of sets to merge"),
+ types.Named("xs", types.NewSet(types.SetOfAny)).Description("set of sets to merge"),
),
- types.Named("y", types.NewSet(types.A)).Description("the union of all `xs` sets"),
+ types.Named("y", types.SetOfAny).Description("the union of all `xs` sets"),
),
Categories: sets,
}
@@ -730,7 +733,7 @@ var Count = &Builtin{
Decl: types.NewFunction(
types.Args(
types.Named("collection", types.NewAny(
- types.NewSet(types.A),
+ types.SetOfAny,
types.NewArray(nil, types.A),
types.NewObject(nil, types.NewDynamicProperty(types.A, types.A)),
types.S,
@@ -747,7 +750,7 @@ var Sum = &Builtin{
Decl: types.NewFunction(
types.Args(
types.Named("collection", types.NewAny(
- types.NewSet(types.N),
+ types.SetOfNum,
types.NewArray(nil, types.N),
)).Description("the set or array of numbers to sum"),
),
@@ -762,7 +765,7 @@ var Product = &Builtin{
Decl: types.NewFunction(
types.Args(
types.Named("collection", types.NewAny(
- types.NewSet(types.N),
+ types.SetOfNum,
types.NewArray(nil, types.N),
)).Description("the set or array of numbers to multiply"),
),
@@ -777,7 +780,7 @@ var Max = &Builtin{
Decl: types.NewFunction(
types.Args(
types.Named("collection", types.NewAny(
- types.NewSet(types.A),
+ types.SetOfAny,
types.NewArray(nil, types.A),
)).Description("the set or array to be searched"),
),
@@ -792,7 +795,7 @@ var Min = &Builtin{
Decl: types.NewFunction(
types.Args(
types.Named("collection", types.NewAny(
- types.NewSet(types.A),
+ types.SetOfAny,
types.NewArray(nil, types.A),
)).Description("the set or array to be searched"),
),
@@ -812,7 +815,7 @@ var Sort = &Builtin{
types.Args(
types.Named("collection", types.NewAny(
types.NewArray(nil, types.A),
- types.NewSet(types.A),
+ types.SetOfAny,
)).Description("the array or set to be sorted"),
),
types.Named("n", types.NewArray(nil, types.A)).Description("the sorted array"),
@@ -842,8 +845,8 @@ var ArraySlice = &Builtin{
Decl: types.NewFunction(
types.Args(
types.Named("arr", types.NewArray(nil, types.A)).Description("the array to be sliced"),
- types.Named("start", types.NewNumber()).Description("the start index of the returned slice; if less than zero, it's clamped to 0"),
- types.Named("stop", types.NewNumber()).Description("the stop index of the returned slice; if larger than `count(arr)`, it's clamped to `count(arr)`"),
+ types.Named("start", types.N).Description("the start index of the returned slice; if less than zero, it's clamped to 0"),
+ types.Named("stop", types.N).Description("the stop index of the returned slice; if larger than `count(arr)`, it's clamped to `count(arr)`"),
),
types.Named("slice", types.NewArray(nil, types.A)).Description("the subslice of `array`, from `start` to `end`, including `arr[start]`, but excluding `arr[end]`"),
),
@@ -993,12 +996,12 @@ var AnyPrefixMatch = &Builtin{
types.Args(
types.Named("search", types.NewAny(
types.S,
- types.NewSet(types.S),
+ types.SetOfStr,
types.NewArray(nil, types.S),
)).Description("search string(s)"),
types.Named("base", types.NewAny(
types.S,
- types.NewSet(types.S),
+ types.SetOfStr,
types.NewArray(nil, types.S),
)).Description("base string(s)"),
),
@@ -1014,12 +1017,12 @@ var AnySuffixMatch = &Builtin{
types.Args(
types.Named("search", types.NewAny(
types.S,
- types.NewSet(types.S),
+ types.SetOfStr,
types.NewArray(nil, types.S),
)).Description("search string(s)"),
types.Named("base", types.NewAny(
types.S,
- types.NewSet(types.S),
+ types.SetOfStr,
types.NewArray(nil, types.S),
)).Description("base string(s)"),
),
@@ -1035,7 +1038,7 @@ var Concat = &Builtin{
types.Args(
types.Named("delimiter", types.S).Description("string to use as a delimiter"),
types.Named("collection", types.NewAny(
- types.NewSet(types.S),
+ types.SetOfStr,
types.NewArray(nil, types.S),
)).Description("strings to join"),
),
@@ -1597,13 +1600,13 @@ var ObjectSubset = &Builtin{
types.Named("super", types.NewAny(types.NewObject(
nil,
types.NewDynamicProperty(types.A, types.A),
- ), types.NewSet(types.A),
+ ), types.SetOfAny,
types.NewArray(nil, types.A),
)).Description("object to test if sub is a subset of"),
types.Named("sub", types.NewAny(types.NewObject(
nil,
types.NewDynamicProperty(types.A, types.A),
- ), types.NewSet(types.A),
+ ), types.SetOfAny,
types.NewArray(nil, types.A),
)).Description("object to test if super is a superset of"),
),
@@ -1656,7 +1659,7 @@ var ObjectRemove = &Builtin{
)).Description("object to remove keys from"),
types.Named("keys", types.NewAny(
types.NewArray(nil, types.A),
- types.NewSet(types.A),
+ types.SetOfAny,
types.NewObject(nil, types.NewDynamicProperty(types.A, types.A)),
)).Description("keys to remove from x"),
),
@@ -1676,7 +1679,7 @@ var ObjectFilter = &Builtin{
)).Description("object to filter keys"),
types.Named("keys", types.NewAny(
types.NewArray(nil, types.A),
- types.NewSet(types.A),
+ types.SetOfAny,
types.NewObject(nil, types.NewDynamicProperty(types.A, types.A)),
)).Description("keys to keep in `object`"),
),
@@ -1707,7 +1710,7 @@ var ObjectKeys = &Builtin{
types.Args(
types.Named("object", types.NewObject(nil, types.NewDynamicProperty(types.A, types.A))).Description("object to get keys from"),
),
- types.Named("value", types.NewSet(types.A)).Description("set of `object`'s keys"),
+ types.Named("value", types.SetOfAny).Description("set of `object`'s keys"),
),
}
@@ -1881,7 +1884,8 @@ var URLQueryEncodeObject = &Builtin{
types.NewAny(
types.S,
types.NewArray(nil, types.S),
- types.NewSet(types.S)),
+ types.SetOfStr,
+ ),
),
),
).Description("the object to encode"),
@@ -2572,13 +2576,13 @@ var ReachableBuiltin = &Builtin{
types.NewDynamicProperty(
types.A,
types.NewAny(
- types.NewSet(types.A),
+ types.SetOfAny,
types.NewArray(nil, types.A)),
)),
).Description("object containing a set or array of neighboring vertices"),
- types.Named("initial", types.NewAny(types.NewSet(types.A), types.NewArray(nil, types.A))).Description("set or array of root vertices"),
+ types.Named("initial", types.NewAny(types.SetOfAny, types.NewArray(nil, types.A))).Description("set or array of root vertices"),
),
- types.Named("output", types.NewSet(types.A)).Description("set of vertices reachable from the `initial` vertices in the directed `graph`"),
+ types.Named("output", types.SetOfAny).Description("set of vertices reachable from the `initial` vertices in the directed `graph`"),
),
}
@@ -2592,11 +2596,11 @@ var ReachablePathsBuiltin = &Builtin{
types.NewDynamicProperty(
types.A,
types.NewAny(
- types.NewSet(types.A),
+ types.SetOfAny,
types.NewArray(nil, types.A)),
)),
).Description("object containing a set or array of root vertices"),
- types.Named("initial", types.NewAny(types.NewSet(types.A), types.NewArray(nil, types.A))).Description("initial paths"), // TODO(sr): copied. is that correct?
+ types.Named("initial", types.NewAny(types.SetOfAny, types.NewArray(nil, types.A))).Description("initial paths"), // TODO(sr): copied. is that correct?
),
types.Named("output", types.NewSet(types.NewArray(nil, types.A))).Description("paths reachable from the `initial` vertices in the directed `graph`"),
),
@@ -3027,7 +3031,7 @@ var NetCIDRExpand = &Builtin{
types.Args(
types.Named("cidr", types.S).Description("CIDR to expand"),
),
- types.Named("hosts", types.NewSet(types.S)).Description("set of IP addresses the CIDR `cidr` expands to"),
+ types.Named("hosts", types.SetOfStr).Description("set of IP addresses the CIDR `cidr` expands to"),
),
}
@@ -3065,10 +3069,10 @@ Supports both IPv4 and IPv6 notations. IPv6 inputs need a prefix length (e.g. "/
types.Args(
types.Named("addrs", types.NewAny(
types.NewArray(nil, types.NewAny(types.S)),
- types.NewSet(types.S),
+ types.SetOfStr,
)).Description("CIDRs or IP addresses"),
),
- types.Named("output", types.NewSet(types.S)).Description("smallest possible set of CIDRs obtained after merging the provided list of IP addresses and subnets in `addrs`"),
+ types.Named("output", types.SetOfStr).Description("smallest possible set of CIDRs obtained after merging the provided list of IP addresses and subnets in `addrs`"),
),
}
@@ -3110,7 +3114,7 @@ var NetLookupIPAddr = &Builtin{
types.Args(
types.Named("name", types.S).Description("domain name to resolve"),
),
- types.Named("addrs", types.NewSet(types.S)).Description("IP addresses (v4 and v6) that `name` resolves to"),
+ types.Named("addrs", types.SetOfStr).Description("IP addresses (v4 and v6) that `name` resolves to"),
),
Nondeterministic: true,
}
@@ -3160,7 +3164,12 @@ var Print = &Builtin{
// The compiler rewrites print() calls to refer to the internal implementation.
var InternalPrint = &Builtin{
Name: "internal.print",
- Decl: types.NewFunction([]types.Type{types.NewArray(nil, types.NewSet(types.A))}, nil),
+ Decl: types.NewFunction([]types.Type{types.NewArray(nil, types.SetOfAny)}, nil),
+}
+
+var InternalTestCase = &Builtin{
+ Name: "internal.test_case",
+ Decl: types.NewFunction([]types.Type{types.NewArray(nil, types.A)}, nil),
}
/**
@@ -3172,10 +3181,10 @@ var SetDiff = &Builtin{
Name: "set_diff",
Decl: types.NewFunction(
types.Args(
- types.NewSet(types.A),
- types.NewSet(types.A),
+ types.SetOfAny,
+ types.SetOfAny,
),
- types.NewSet(types.A),
+ types.SetOfAny,
),
deprecated: true,
}
@@ -3212,7 +3221,7 @@ var CastSet = &Builtin{
Name: "cast_set",
Decl: types.NewFunction(
types.Args(types.A),
- types.NewSet(types.A),
+ types.SetOfAny,
),
deprecated: true,
}
@@ -3278,7 +3287,7 @@ var All = &Builtin{
Decl: types.NewFunction(
types.Args(
types.NewAny(
- types.NewSet(types.A),
+ types.SetOfAny,
types.NewArray(nil, types.A),
),
),
@@ -3294,7 +3303,7 @@ var Any = &Builtin{
Decl: types.NewFunction(
types.Args(
types.NewAny(
- types.NewSet(types.A),
+ types.SetOfAny,
types.NewArray(nil, types.A),
),
),
@@ -3392,7 +3401,7 @@ func (b *Builtin) IsTargetPos(i int) bool {
func init() {
BuiltinMap = map[string]*Builtin{}
- for _, b := range DefaultBuiltins {
+ for _, b := range &DefaultBuiltins {
RegisterBuiltin(b)
}
}
diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/capabilities.go b/vendor/github.com/open-policy-agent/opa/v1/ast/capabilities.go
index e7d561d9e8..3dfc9f5c8a 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/ast/capabilities.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/ast/capabilities.go
@@ -11,6 +11,7 @@ import (
"fmt"
"io"
"os"
+ "slices"
"sort"
"strings"
@@ -116,8 +117,9 @@ func CapabilitiesForThisVersion(opts ...CapabilitiesOption) *Capabilities {
f.Builtins = make([]*Builtin, len(Builtins))
copy(f.Builtins, Builtins)
- sort.Slice(f.Builtins, func(i, j int) bool {
- return f.Builtins[i].Name < f.Builtins[j].Name
+
+ slices.SortFunc(f.Builtins, func(a, b *Builtin) int {
+ return strings.Compare(a.Name, b.Name)
})
if co.regoVersion == RegoV0 || co.regoVersion == RegoV0CompatV1 {
@@ -243,12 +245,7 @@ func (c *Capabilities) MinimumCompatibleVersion() (string, bool) {
}
func (c *Capabilities) ContainsFeature(feature string) bool {
- for _, f := range c.Features {
- if f == feature {
- return true
- }
- }
- return false
+ return slices.Contains(c.Features, feature)
}
// addBuiltinSorted inserts a built-in into c in sorted order. An existing built-in with the same name
diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/check.go b/vendor/github.com/open-policy-agent/opa/v1/ast/check.go
index 57c2fa5d75..ecfb320649 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/ast/check.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/ast/check.go
@@ -276,7 +276,7 @@ func (tc *typeChecker) checkRule(env *TypeEnv, as *AnnotationSet, rule *Rule) {
if len(rule.Head.Args) > 0 {
// If args are not referred to in body, infer as any.
WalkVars(rule.Head.Args, func(v Var) bool {
- if cpy.Get(v) == nil {
+ if cpy.GetByValue(v) == nil {
cpy.tree.PutOne(v, types.A)
}
return false
@@ -284,8 +284,8 @@ func (tc *typeChecker) checkRule(env *TypeEnv, as *AnnotationSet, rule *Rule) {
// Construct function type.
args := make([]types.Type, len(rule.Head.Args))
- for i := 0; i < len(rule.Head.Args); i++ {
- args[i] = cpy.Get(rule.Head.Args[i])
+ for i := range len(rule.Head.Args) {
+ args[i] = cpy.GetByValue(rule.Head.Args[i].Value)
}
f := types.NewFunction(args, cpy.Get(rule.Head.Value))
@@ -294,7 +294,7 @@ func (tc *typeChecker) checkRule(env *TypeEnv, as *AnnotationSet, rule *Rule) {
} else {
switch rule.Head.RuleKind() {
case SingleValue:
- typeV := cpy.Get(rule.Head.Value)
+ typeV := cpy.GetByValue(rule.Head.Value.Value)
if !path.IsGround() {
// e.g. store object[string: whatever] at data.p.q.r, not data.p.q.r[x] or data.p.q.r[x].y[z]
objPath := path.DynamicSuffix()
@@ -306,13 +306,11 @@ func (tc *typeChecker) checkRule(env *TypeEnv, as *AnnotationSet, rule *Rule) {
tc.err([]*Error{NewError(TypeErr, rule.Head.Location, err.Error())}) //nolint:govet
tpe = nil
}
- } else {
- if typeV != nil {
- tpe = typeV
- }
+ } else if typeV != nil {
+ tpe = typeV
}
case MultiValue:
- typeK := cpy.Get(rule.Head.Key)
+ typeK := cpy.GetByValue(rule.Head.Key.Value)
if typeK != nil {
tpe = types.NewSet(typeK)
}
@@ -341,7 +339,7 @@ func nestedObject(env *TypeEnv, path Ref, tpe types.Type) (types.Type, error) {
}
var dynamicProperty *types.DynamicProperty
- typeK := env.Get(k)
+ typeK := env.GetByValue(k.Value)
if typeK == nil {
return nil, nil
}
@@ -391,7 +389,7 @@ func (tc *typeChecker) checkExprBuiltin(env *TypeEnv, expr *Expr) *Error {
// type checker relies on reordering (in particular for references to local
// vars).
name := expr.Operator()
- tpe := env.Get(name)
+ tpe := env.GetByRef(name)
if tpe == nil {
if tc.allowUndefinedFuncs {
@@ -431,7 +429,7 @@ func (tc *typeChecker) checkExprBuiltin(env *TypeEnv, expr *Expr) *Error {
if !unify1(env, args[i], fargs.Arg(i), false) {
post := make([]types.Type, len(args))
for i := range args {
- post[i] = env.Get(args[i])
+ post[i] = env.GetByValue(args[i].Value)
}
return newArgError(expr.Location, name, "invalid argument(s)", post, namedFargs)
}
@@ -453,7 +451,7 @@ func checkExprEq(env *TypeEnv, expr *Expr) *Error {
}
a, b := expr.Operand(0), expr.Operand(1)
- typeA, typeB := env.Get(a), env.Get(b)
+ typeA, typeB := env.GetByValue(a.Value), env.GetByValue(b.Value)
if !unify2(env, a, typeA, b, typeB) {
err := NewError(TypeErr, expr.Location, "match error")
@@ -473,7 +471,7 @@ func (tc *typeChecker) checkExprWith(env *TypeEnv, expr *Expr, i int) *Error {
}
target, value := expr.With[i].Target, expr.With[i].Value
- targetType, valueType := env.Get(target), env.Get(value)
+ targetType, valueType := env.GetByValue(target.Value), env.GetByValue(value.Value)
if t, ok := targetType.(*types.Function); ok { // built-in function replacement
switch v := valueType.(type) {
@@ -509,7 +507,7 @@ func unify2(env *TypeEnv, a *Term, typeA types.Type, b *Term, typeB types.Type)
case Var:
switch b.Value.(type) {
case Var:
- return unify1(env, a, types.A, false) && unify1(env, b, env.Get(a), false)
+ return unify1(env, a, types.A, false) && unify1(env, b, env.GetByValue(a.Value), false)
case *Array:
return unify2Array(env, b, a)
case *object:
@@ -525,15 +523,15 @@ func unify2Array(env *TypeEnv, a *Term, b *Term) bool {
switch bv := b.Value.(type) {
case *Array:
if arr.Len() == bv.Len() {
- for i := 0; i < arr.Len(); i++ {
- if !unify2(env, arr.Elem(i), env.Get(arr.Elem(i)), bv.Elem(i), env.Get(bv.Elem(i))) {
+ for i := range arr.Len() {
+ if !unify2(env, arr.Elem(i), env.GetByValue(arr.Elem(i).Value), bv.Elem(i), env.GetByValue(bv.Elem(i).Value)) {
return false
}
}
return true
}
case Var:
- return unify1(env, a, types.A, false) && unify1(env, b, env.Get(a), false)
+ return unify1(env, a, types.A, false) && unify1(env, b, env.GetByValue(a.Value), false)
}
return false
}
@@ -545,14 +543,14 @@ func unify2Object(env *TypeEnv, a *Term, b *Term) bool {
cv := obj.Intersect(bv)
if obj.Len() == bv.Len() && bv.Len() == len(cv) {
for i := range cv {
- if !unify2(env, cv[i][1], env.Get(cv[i][1]), cv[i][2], env.Get(cv[i][2])) {
+ if !unify2(env, cv[i][1], env.GetByValue(cv[i][1].Value), cv[i][2], env.GetByValue(cv[i][2].Value)) {
return false
}
}
return true
}
case Var:
- return unify1(env, a, types.A, false) && unify1(env, b, env.Get(a), false)
+ return unify1(env, a, types.A, false) && unify1(env, b, env.GetByValue(a.Value), false)
}
return false
}
@@ -565,7 +563,7 @@ func unify1(env *TypeEnv, term *Term, tpe types.Type, union bool) bool {
return unify1Array(env, v, tpe, union)
case types.Any:
if types.Compare(tpe, types.A) == 0 {
- for i := 0; i < v.Len(); i++ {
+ for i := range v.Len() {
unify1(env, v.Elem(i), types.A, true)
}
return true
@@ -615,22 +613,22 @@ func unify1(env *TypeEnv, term *Term, tpe types.Type, union bool) bool {
}
return false
case Ref, *ArrayComprehension, *ObjectComprehension, *SetComprehension:
- return unifies(env.Get(v), tpe)
+ return unifies(env.GetByValue(v), tpe)
case Var:
if !union {
- if exist := env.Get(v); exist != nil {
+ if exist := env.GetByValue(v); exist != nil {
return unifies(exist, tpe)
}
env.tree.PutOne(term.Value, tpe)
} else {
- env.tree.PutOne(term.Value, types.Or(env.Get(v), tpe))
+ env.tree.PutOne(term.Value, types.Or(env.GetByValue(v), tpe))
}
return true
default:
if !IsConstant(v) {
panic("unreachable")
}
- return unifies(env.Get(term), tpe)
+ return unifies(env.GetByValue(term.Value), tpe)
}
}
@@ -638,7 +636,7 @@ func unify1Array(env *TypeEnv, val *Array, tpe *types.Array, union bool) bool {
if val.Len() != tpe.Len() && tpe.Dynamic() == nil {
return false
}
- for i := 0; i < val.Len(); i++ {
+ for i := range val.Len() {
if !unify1(env, val.Elem(i), tpe.Select(i), union) {
return false
}
@@ -732,8 +730,8 @@ func (rc *refChecker) Visit(x interface{}) bool {
}
func (rc *refChecker) checkApply(curr *TypeEnv, ref Ref) *Error {
- switch tpe := curr.Get(ref).(type) {
- case *types.Function: // NOTE(sr): We don't support first-class functions, except for `with`.
+ if tpe, ok := curr.GetByRef(ref).(*types.Function); ok {
+ // NOTE(sr): We don't support first-class functions, except for `with`.
return newRefErrUnsupported(ref[0].Location, rc.varRewriter(ref), len(ref)-1, tpe)
}
@@ -755,19 +753,19 @@ func (rc *refChecker) checkRef(curr *TypeEnv, node *typeTreeNode, ref Ref, idx i
switch head.Value.(type) {
case Var, String: // OK
default:
- have := rc.env.Get(head.Value)
+ have := rc.env.GetByValue(head.Value)
return newRefErrInvalid(ref[0].Location, rc.varRewriter(ref), idx, have, types.S, getOneOfForNode(node))
}
}
- if v, ok := head.Value.(Var); ok && idx != 0 {
+ if _, ok := head.Value.(Var); ok && idx != 0 {
tpe := types.Keys(rc.env.getRefRecExtent(node))
- if exist := rc.env.Get(v); exist != nil {
+ if exist := rc.env.GetByValue(head.Value); exist != nil {
if !unifies(tpe, exist) {
return newRefErrInvalid(ref[0].Location, rc.varRewriter(ref), idx, exist, tpe, getOneOfForNode(node))
}
} else {
- rc.env.tree.PutOne(v, tpe)
+ rc.env.tree.PutOne(head.Value, tpe)
}
}
@@ -781,8 +779,8 @@ func (rc *refChecker) checkRef(curr *TypeEnv, node *typeTreeNode, ref Ref, idx i
case RootDocumentNames.Contains(ref[0]):
if idx != 0 {
- node.Children().Iter(func(_, child util.T) bool {
- _ = rc.checkRef(curr, child.(*typeTreeNode), ref, idx+1) // ignore error
+ node.Children().Iter(func(_ Value, child *typeTreeNode) bool {
+ _ = rc.checkRef(curr, child, ref, idx+1) // ignore error
return false
})
return nil
@@ -817,7 +815,7 @@ func (rc *refChecker) checkRefLeaf(tpe types.Type, ref Ref, idx int) *Error {
switch value := head.Value.(type) {
case Var:
- if exist := rc.env.Get(value); exist != nil {
+ if exist := rc.env.GetByValue(value); exist != nil {
if !unifies(exist, keys) {
return newRefErrInvalid(ref[0].Location, rc.varRewriter(ref), idx, exist, keys, getOneOfForType(tpe))
}
@@ -948,7 +946,7 @@ func unifiesArrays(a, b *types.Array) bool {
func unifiesArraysStatic(a, b *types.Array) bool {
if a.Len() != 0 {
- for i := 0; i < a.Len(); i++ {
+ for i := range a.Len() {
if !unifies(a.Select(i), b.Select(i)) {
return false
}
@@ -1003,7 +1001,7 @@ type ArgErrDetail struct {
func (d *ArgErrDetail) Lines() []string {
lines := make([]string, 2)
lines[0] = "have: " + formatArgs(d.Have)
- lines[1] = "want: " + fmt.Sprint(d.Want)
+ lines[1] = "want: " + d.Want.String()
return lines
}
@@ -1069,7 +1067,7 @@ func (r *RefErrInvalidDetail) Lines() []string {
lines := []string{r.Ref.String()}
offset := len(r.Ref[:r.Pos].String()) + 1
pad := strings.Repeat(" ", offset)
- lines = append(lines, fmt.Sprintf("%s^", pad))
+ lines = append(lines, pad+"^")
if r.Have != nil {
lines = append(lines, fmt.Sprintf("%shave (type): %v", pad, r.Have))
} else {
@@ -1127,8 +1125,8 @@ func newArgError(loc *Location, builtinName Ref, msg string, have []types.Type,
}
func getOneOfForNode(node *typeTreeNode) (result []Value) {
- node.Children().Iter(func(k, _ util.T) bool {
- result = append(result, k.(Value))
+ node.Children().Iter(func(k Value, _ *typeTreeNode) bool {
+ result = append(result, k)
return false
})
diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/compare.go b/vendor/github.com/open-policy-agent/opa/v1/ast/compare.go
index 24e61712e7..452c6365a3 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/ast/compare.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/ast/compare.go
@@ -236,7 +236,7 @@ func Compare(a, b interface{}) int {
type termSlice []*Term
func (s termSlice) Less(i, j int) bool { return Compare(s[i].Value, s[j].Value) < 0 }
-func (s termSlice) Swap(i, j int) { x := s[i]; s[i] = s[j]; s[j] = x }
+func (s termSlice) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
func (s termSlice) Len() int { return len(s) }
func sortOrder(x interface{}) int {
@@ -300,7 +300,7 @@ func importsCompare(a, b []*Import) int {
if len(b) < minLen {
minLen = len(b)
}
- for i := 0; i < minLen; i++ {
+ for i := range minLen {
if cmp := a[i].Compare(b[i]); cmp != 0 {
return cmp
}
@@ -319,7 +319,7 @@ func annotationsCompare(a, b []*Annotations) int {
if len(b) < minLen {
minLen = len(b)
}
- for i := 0; i < minLen; i++ {
+ for i := range minLen {
if cmp := a[i].Compare(b[i]); cmp != 0 {
return cmp
}
@@ -338,7 +338,7 @@ func rulesCompare(a, b []*Rule) int {
if len(b) < minLen {
minLen = len(b)
}
- for i := 0; i < minLen; i++ {
+ for i := range minLen {
if cmp := a[i].Compare(b[i]); cmp != 0 {
return cmp
}
@@ -357,7 +357,7 @@ func termSliceCompare(a, b []*Term) int {
if len(b) < minLen {
minLen = len(b)
}
- for i := 0; i < minLen; i++ {
+ for i := range minLen {
if cmp := Compare(a[i], b[i]); cmp != 0 {
return cmp
}
@@ -375,7 +375,7 @@ func withSliceCompare(a, b []*With) int {
if len(b) < minLen {
minLen = len(b)
}
- for i := 0; i < minLen; i++ {
+ for i := range minLen {
if cmp := Compare(a[i], b[i]); cmp != 0 {
return cmp
}
@@ -402,6 +402,10 @@ func TermValueCompare(a, b *Term) int {
return a.Value.Compare(b.Value)
}
+func TermValueEqual(a, b *Term) bool {
+ return ValueEqual(a.Value, b.Value)
+}
+
func ValueEqual(a, b Value) bool {
// TODO(ae): why doesn't this work the same?
//
diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/compile.go b/vendor/github.com/open-policy-agent/opa/v1/ast/compile.go
index 9b0302474e..2092708af6 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/ast/compile.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/ast/compile.go
@@ -124,7 +124,7 @@ type Compiler struct {
localvargen *localVarGenerator
moduleLoader ModuleLoader
- ruleIndices *util.HashMap
+ ruleIndices *util.HasherMap[Ref, RuleIndex]
stages []stage
maxErrs int
sorted []string // list of sorted module names
@@ -303,15 +303,10 @@ type stage struct {
func NewCompiler() *Compiler {
c := &Compiler{
- Modules: map[string]*Module{},
- RewrittenVars: map[Var]Var{},
- Required: &Capabilities{},
- ruleIndices: util.NewHashMap(func(a, b util.T) bool {
- r1, r2 := a.(Ref), b.(Ref)
- return r1.Equal(r2)
- }, func(x util.T) int {
- return x.(Ref).Hash()
- }),
+ Modules: map[string]*Module{},
+ RewrittenVars: map[Var]Var{},
+ Required: &Capabilities{},
+ ruleIndices: util.NewHasherMap[Ref, RuleIndex](RefEqual),
maxErrs: CompileErrorLimitDefault,
after: map[string][]CompilerStageDefinition{},
unsafeBuiltinsMap: map[string]struct{}{},
@@ -825,7 +820,7 @@ func (c *Compiler) RuleIndex(path Ref) RuleIndex {
if !ok {
return nil
}
- return r.(RuleIndex)
+ return r
}
// PassesTypeCheck determines whether the given body passes type checking
@@ -1114,7 +1109,7 @@ func (c *Compiler) checkRuleConflicts() {
for _, rule := range node.Values {
r := rule.(*Rule)
ref := r.Ref()
- name = rw(ref.Copy()).String() // varRewriter operates in-place
+ name = rw(ref.CopyNonGround()).String() // varRewriter operates in-place
kinds[r.Head.RuleKind()] = struct{}{}
arities[len(r.Head.Args)] = struct{}{}
if r.Default {
@@ -1156,7 +1151,7 @@ func (c *Compiler) checkRuleConflicts() {
// data.p.q[r][s] { r := input.r; s := input.s }
// data.p[q].r.s { q := input.q }
- if r.Ref().IsGround() && len(node.Children) > 0 {
+ if ref.IsGround() && len(node.Children) > 0 {
conflicts = node.flattenChildren()
}
@@ -1351,7 +1346,7 @@ func compileSchema(goSchema interface{}, allowNet []string) (*gojsonschema.Schem
if goSchema != nil {
refLoader = gojsonschema.NewGoLoader(goSchema)
} else {
- return nil, fmt.Errorf("no schema as input to compile")
+ return nil, errors.New("no schema as input to compile")
}
schemasCompiled, err := sl.Compile(refLoader)
if err != nil {
@@ -1370,13 +1365,13 @@ func mergeSchemas(schemas ...*gojsonschema.SubSchema) (*gojsonschema.SubSchema,
if len(schemas[i].PropertiesChildren) > 0 {
if !schemas[i].Types.Contains("object") {
if err := schemas[i].Types.Add("object"); err != nil {
- return nil, fmt.Errorf("unable to set the type in schemas")
+ return nil, errors.New("unable to set the type in schemas")
}
}
} else if len(schemas[i].ItemsChildren) > 0 {
if !schemas[i].Types.Contains("array") {
if err := schemas[i].Types.Add("array"); err != nil {
- return nil, fmt.Errorf("unable to set the type in schemas")
+ return nil, errors.New("unable to set the type in schemas")
}
}
}
@@ -1388,12 +1383,12 @@ func mergeSchemas(schemas ...*gojsonschema.SubSchema) (*gojsonschema.SubSchema,
} else if result.Types.Contains("object") && len(result.PropertiesChildren) > 0 && schemas[i].Types.Contains("object") && len(schemas[i].PropertiesChildren) > 0 {
result.PropertiesChildren = append(result.PropertiesChildren, schemas[i].PropertiesChildren...)
} else if result.Types.Contains("array") && len(result.ItemsChildren) > 0 && schemas[i].Types.Contains("array") && len(schemas[i].ItemsChildren) > 0 {
- for j := 0; j < len(schemas[i].ItemsChildren); j++ {
+ for j := range len(schemas[i].ItemsChildren) {
if len(result.ItemsChildren)-1 < j && !(len(schemas[i].ItemsChildren)-1 < j) {
result.ItemsChildren = append(result.ItemsChildren, schemas[i].ItemsChildren[j])
}
if result.ItemsChildren[j].Types.String() != schemas[i].ItemsChildren[j].Types.String() {
- return nil, fmt.Errorf("unable to merge these schemas")
+ return nil, errors.New("unable to merge these schemas")
}
}
}
@@ -1482,7 +1477,7 @@ func (parser *schemaParser) parseSchemaWithPropertyKey(schema interface{}, prope
}
return parser.parseSchema(objectOrArrayResult)
} else if subSchema.Types.String() != allOfResult.Types.String() {
- return nil, fmt.Errorf("unable to merge these schemas")
+ return nil, errors.New("unable to merge these schemas")
}
}
return parser.parseSchema(allOfResult)
@@ -1738,13 +1733,9 @@ func (c *Compiler) err(err *Error) {
c.Errors = append(c.Errors, err)
}
-func (c *Compiler) getExports() *util.HashMap {
+func (c *Compiler) getExports() *util.HasherMap[Ref, []Ref] {
- rules := util.NewHashMap(func(a, b util.T) bool {
- return a.(Ref).Equal(b.(Ref))
- }, func(v util.T) int {
- return v.(Ref).Hash()
- })
+ rules := util.NewHasherMap[Ref, []Ref](RefEqual)
for _, name := range c.sorted {
mod := c.Modules[name]
@@ -1757,18 +1748,30 @@ func (c *Compiler) getExports() *util.HashMap {
return rules
}
-func hashMapAdd(rules *util.HashMap, pkg, rule Ref) {
+func refSliceEqual(a, b []Ref) bool {
+ if len(a) != len(b) {
+ return false
+ }
+ for i := range a {
+ if !a[i].Equal(b[i]) {
+ return false
+ }
+ }
+ return true
+}
+
+func hashMapAdd(rules *util.HasherMap[Ref, []Ref], pkg, rule Ref) {
prev, ok := rules.Get(pkg)
if !ok {
rules.Put(pkg, []Ref{rule})
return
}
- for _, p := range prev.([]Ref) {
+ for _, p := range prev {
if p.Equal(rule) {
return
}
}
- rules.Put(pkg, append(prev.([]Ref), rule))
+ rules.Put(pkg, append(prev, rule))
}
func (c *Compiler) GetAnnotationSet() *AnnotationSet {
@@ -1867,7 +1870,7 @@ func (c *Compiler) resolveAllRefs() {
var ruleExports []Ref
if x, ok := rules.Get(mod.Package.Path); ok {
- ruleExports = x.([]Ref)
+ ruleExports = x
}
globals := getGlobals(mod.Package, ruleExports, mod.Imports)
@@ -3014,7 +3017,7 @@ func (qc *queryCompiler) resolveRefs(qctx *QueryContext, body Body) (Body, error
var ruleExports []Ref
rules := qc.compiler.getExports()
if exist, ok := rules.Get(pkg.Path); ok {
- ruleExports = exist.([]Ref)
+ ruleExports = exist
}
globals = getGlobals(qctx.Package, ruleExports, qctx.Imports)
@@ -3542,10 +3545,8 @@ func (n *TreeNode) add(path Ref, rule *Rule) {
}
node.Children[sub.Key] = sub
node.Sorted = append(node.Sorted, sub.Key)
- } else {
- if rule != nil {
- node.Values = append(node.Values, rule)
- }
+ } else if rule != nil {
+ node.Values = append(node.Values, rule)
}
}
@@ -4231,6 +4232,9 @@ func (f *equalityFactory) Generate(other *Term) *Expr {
return expr
}
+// TODO: Move to internal package?
+const LocalVarPrefix = "__local"
+
type localVarGenerator struct {
exclude VarSet
suffix string
@@ -4255,7 +4259,7 @@ func newLocalVarGenerator(suffix string, node interface{}) *localVarGenerator {
func (l *localVarGenerator) Generate() Var {
for {
- result := Var("__local" + l.suffix + strconv.Itoa(l.next) + "__")
+ result := Var(LocalVarPrefix + l.suffix + strconv.Itoa(l.next) + "__")
l.next++
if !l.exclude.Contains(result) {
return result
@@ -4411,7 +4415,7 @@ func resolveRefsInExpr(globals map[Var]*usedRef, ignore *declaredVarStack, expr
cpy.Terms = resolveRefsInTerm(globals, ignore, ts)
case []*Term:
buf := make([]*Term, len(ts))
- for i := 0; i < len(ts); i++ {
+ for i := range ts {
buf[i] = resolveRefsInTerm(globals, ignore, ts[i])
}
cpy.Terms = buf
@@ -4516,7 +4520,7 @@ func resolveRefsInTerm(globals map[Var]*usedRef, ignore *declaredVarStack, term
func resolveRefsInTermArray(globals map[Var]*usedRef, ignore *declaredVarStack, terms *Array) []*Term {
cpy := make([]*Term, terms.Len())
- for i := 0; i < terms.Len(); i++ {
+ for i := range terms.Len() {
cpy[i] = resolveRefsInTerm(globals, ignore, terms.Elem(i))
}
return cpy
@@ -4524,7 +4528,7 @@ func resolveRefsInTermArray(globals map[Var]*usedRef, ignore *declaredVarStack,
func resolveRefsInTermSlice(globals map[Var]*usedRef, ignore *declaredVarStack, terms []*Term) []*Term {
cpy := make([]*Term, len(terms))
- for i := 0; i < len(terms); i++ {
+ for i := range terms {
cpy[i] = resolveRefsInTerm(globals, ignore, terms[i])
}
return cpy
@@ -4798,7 +4802,7 @@ func rewriteDynamicsOne(original *Expr, f *equalityFactory, term *Term, result B
connectGeneratedExprs(original, generated)
return result, result[len(result)-1].Operand(0)
case *Array:
- for i := 0; i < v.Len(); i++ {
+ for i := range v.Len() {
var t *Term
result, t = rewriteDynamicsOne(original, f, v.Elem(i), result)
v.set(i, t)
@@ -4875,7 +4879,7 @@ func rewriteExprTermsInHead(gen *localVarGenerator, rule *Rule) {
func rewriteExprTermsInBody(gen *localVarGenerator, body Body) Body {
cpy := make(Body, 0, len(body))
- for i := 0; i < len(body); i++ {
+ for i := range body {
for _, expr := range expandExpr(gen, body[i]) {
cpy.Append(expr)
}
@@ -5028,7 +5032,7 @@ func expandExprRef(gen *localVarGenerator, v []*Term) (support []*Expr) {
}
func expandExprTermArray(gen *localVarGenerator, arr *Array) (support []*Expr) {
- for i := 0; i < arr.Len(); i++ {
+ for i := range arr.Len() {
extras, v := expandExprTerm(gen, arr.Elem(i))
arr.set(i, v)
support = append(support, extras...)
@@ -5710,7 +5714,7 @@ func validateWith(c *Compiler, unsafeBuiltinsMap map[string]struct{}, expr *Expr
case isDataRef(target):
ref := target.Value.(Ref)
targetNode := c.RuleTree
- for i := 0; i < len(ref)-1; i++ {
+ for i := range len(ref) - 1 {
child := targetNode.Child(ref[i].Value)
if child == nil {
break
diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/env.go b/vendor/github.com/open-policy-agent/opa/v1/ast/env.go
index fb374b1739..9bffd03e0a 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/ast/env.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/ast/env.go
@@ -29,29 +29,38 @@ func newTypeEnv(f func() *typeChecker) *TypeEnv {
}
// Get returns the type of x.
+// Deprecated: Use GetByValue or GetByRef instead, as they are more efficient.
func (env *TypeEnv) Get(x interface{}) types.Type {
-
if term, ok := x.(*Term); ok {
x = term.Value
}
- switch x := x.(type) {
+ if v, ok := x.(Value); ok {
+ return env.GetByValue(v)
+ }
+
+ panic("unreachable")
+}
+
+// GetByValue returns the type of v.
+func (env *TypeEnv) GetByValue(v Value) types.Type {
+ switch x := v.(type) {
// Scalars.
case Null:
- return types.NewNull()
+ return types.Nl
case Boolean:
- return types.NewBoolean()
+ return types.B
case Number:
- return types.NewNumber()
+ return types.N
case String:
- return types.NewString()
+ return types.S
// Composites.
case *Array:
static := make([]types.Type, x.Len())
for i := range static {
- tpe := env.Get(x.Elem(i).Value)
+ tpe := env.GetByValue(x.Elem(i).Value)
static[i] = tpe
}
@@ -63,7 +72,7 @@ func (env *TypeEnv) Get(x interface{}) types.Type {
return types.NewArray(static, dynamic)
case *lazyObj:
- return env.Get(x.force())
+ return env.GetByValue(x.force())
case *object:
static := []*types.StaticProperty{}
var dynamic *types.DynamicProperty
@@ -72,14 +81,14 @@ func (env *TypeEnv) Get(x interface{}) types.Type {
if IsConstant(k.Value) {
kjson, err := JSON(k.Value)
if err == nil {
- tpe := env.Get(v)
+ tpe := env.GetByValue(v.Value)
static = append(static, types.NewStaticProperty(kjson, tpe))
return
}
}
// Can't handle it as a static property, fallback to dynamic
- typeK := env.Get(k.Value)
- typeV := env.Get(v.Value)
+ typeK := env.GetByValue(k.Value)
+ typeV := env.GetByValue(v.Value)
dynamic = types.NewDynamicProperty(typeK, typeV)
})
@@ -92,8 +101,7 @@ func (env *TypeEnv) Get(x interface{}) types.Type {
case Set:
var tpe types.Type
x.Foreach(func(elem *Term) {
- other := env.Get(elem.Value)
- tpe = types.Or(tpe, other)
+ tpe = types.Or(tpe, env.GetByValue(elem.Value))
})
if tpe == nil {
tpe = types.A
@@ -104,47 +112,46 @@ func (env *TypeEnv) Get(x interface{}) types.Type {
case *ArrayComprehension:
cpy, errs := env.newChecker().CheckBody(env, x.Body)
if len(errs) == 0 {
- return types.NewArray(nil, cpy.Get(x.Term))
+ return types.NewArray(nil, cpy.GetByValue(x.Term.Value))
}
return nil
case *ObjectComprehension:
cpy, errs := env.newChecker().CheckBody(env, x.Body)
if len(errs) == 0 {
- return types.NewObject(nil, types.NewDynamicProperty(cpy.Get(x.Key), cpy.Get(x.Value)))
+ return types.NewObject(nil, types.NewDynamicProperty(cpy.GetByValue(x.Key.Value), cpy.GetByValue(x.Value.Value)))
}
return nil
case *SetComprehension:
cpy, errs := env.newChecker().CheckBody(env, x.Body)
if len(errs) == 0 {
- return types.NewSet(cpy.Get(x.Term))
+ return types.NewSet(cpy.GetByValue(x.Term.Value))
}
return nil
// Refs.
case Ref:
- return env.getRef(x)
+ return env.GetByRef(x)
// Vars.
case Var:
- if node := env.tree.Child(x); node != nil {
+ if node := env.tree.Child(v); node != nil {
return node.Value()
}
if env.next != nil {
- return env.next.Get(x)
+ return env.next.GetByValue(v)
}
return nil
// Calls.
case Call:
return nil
-
- default:
- panic("unreachable")
}
+
+ return env.Get(v)
}
-func (env *TypeEnv) getRef(ref Ref) types.Type {
-
+// GetByRef returns the type of the value referred to by ref.
+func (env *TypeEnv) GetByRef(ref Ref) types.Type {
node := env.tree.Child(ref[0].Value)
if node == nil {
return env.getRefFallback(ref)
@@ -156,7 +163,7 @@ func (env *TypeEnv) getRef(ref Ref) types.Type {
func (env *TypeEnv) getRefFallback(ref Ref) types.Type {
if env.next != nil {
- return env.next.Get(ref)
+ return env.next.GetByRef(ref)
}
if RootDocumentNames.Contains(ref[0]) {
@@ -200,10 +207,7 @@ func (env *TypeEnv) getRefRecExtent(node *typeTreeNode) types.Type {
children := []*types.StaticProperty{}
- node.Children().Iter(func(k, v util.T) bool {
- key := k.(Value)
- child := v.(*typeTreeNode)
-
+ node.Children().Iter(func(key Value, child *typeTreeNode) bool {
tpe := env.getRefRecExtent(child)
// NOTE(sr): Converting to Golang-native types here is an extension of what we did
@@ -237,14 +241,14 @@ func (env *TypeEnv) wrap() *TypeEnv {
type typeTreeNode struct {
key Value
value types.Type
- children *util.HashMap
+ children *util.HasherMap[Value, *typeTreeNode]
}
func newTypeTree() *typeTreeNode {
return &typeTreeNode{
key: nil,
value: nil,
- children: util.NewHashMap(valueEq, valueHash),
+ children: util.NewHasherMap[Value, *typeTreeNode](ValueEqual),
}
}
@@ -253,10 +257,10 @@ func (n *typeTreeNode) Child(key Value) *typeTreeNode {
if !ok {
return nil
}
- return value.(*typeTreeNode)
+ return value
}
-func (n *typeTreeNode) Children() *util.HashMap {
+func (n *typeTreeNode) Children() *util.HasherMap[Value, *typeTreeNode] {
return n.children
}
@@ -267,7 +271,7 @@ func (n *typeTreeNode) Get(path Ref) types.Type {
if !ok {
return nil
}
- curr = child.(*typeTreeNode)
+ curr = child
}
return curr.Value()
}
@@ -285,7 +289,7 @@ func (n *typeTreeNode) PutOne(key Value, tpe types.Type) {
child.key = key
n.children.Put(key, child)
} else {
- child = c.(*typeTreeNode)
+ child = c
}
child.value = tpe
@@ -302,7 +306,7 @@ func (n *typeTreeNode) Put(path Ref, tpe types.Type) {
child.key = term.Value
curr.children.Put(child.key, child)
} else {
- child = c.(*typeTreeNode)
+ child = c
}
curr = child
@@ -324,8 +328,7 @@ func (n *typeTreeNode) Insert(path Ref, tpe types.Type, env *TypeEnv) {
child.key = term.Value
curr.children.Put(child.key, child)
} else {
- child = c.(*typeTreeNode)
-
+ child = c
if child.value != nil && i+1 < len(path) {
// If child has an object value, merge the new value into it.
if o, ok := child.value.(*types.Object); ok {
@@ -426,13 +429,12 @@ func (n *typeTreeNode) String() string {
b.WriteString(v.String())
}
- n.children.Iter(func(_, v util.T) bool {
- if child, ok := v.(*typeTreeNode); ok {
- b.WriteString("\n\t+ ")
- s := child.String()
- s = strings.ReplaceAll(s, "\n", "\n\t")
- b.WriteString(s)
- }
+ n.children.Iter(func(_ Value, child *typeTreeNode) bool {
+ b.WriteString("\n\t+ ")
+ s := child.String()
+ s = strings.ReplaceAll(s, "\n", "\n\t")
+ b.WriteString(s)
+
return false
})
@@ -444,7 +446,7 @@ func insertIntoObject(o *types.Object, path Ref, tpe types.Type, env *TypeEnv) (
return o, nil
}
- key := env.Get(path[0].Value)
+ key := env.GetByValue(path[0].Value)
if len(path) == 1 {
var dynamicProps *types.DynamicProperty
@@ -472,8 +474,8 @@ func insertIntoObject(o *types.Object, path Ref, tpe types.Type, env *TypeEnv) (
func (n *typeTreeNode) Leafs() map[*Ref]types.Type {
leafs := map[*Ref]types.Type{}
- n.children.Iter(func(_, v util.T) bool {
- collectLeafs(v.(*typeTreeNode), nil, leafs)
+ n.children.Iter(func(_ Value, v *typeTreeNode) bool {
+ collectLeafs(v, nil, leafs)
return false
})
return leafs
@@ -485,8 +487,8 @@ func collectLeafs(n *typeTreeNode, path Ref, leafs map[*Ref]types.Type) {
leafs[&nPath] = n.Value()
return
}
- n.children.Iter(func(_, v util.T) bool {
- collectLeafs(v.(*typeTreeNode), nPath, leafs)
+ n.children.Iter(func(_ Value, v *typeTreeNode) bool {
+ collectLeafs(v, nPath, leafs)
return false
})
}
diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/errors.go b/vendor/github.com/open-policy-agent/opa/v1/ast/errors.go
index 066dfcdd68..ff4088edd2 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/ast/errors.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/ast/errors.go
@@ -6,7 +6,8 @@ package ast
import (
"fmt"
- "sort"
+ "slices"
+ "strconv"
"strings"
)
@@ -35,15 +36,12 @@ func (e Errors) Error() string {
// Sort sorts the error slice by location. If the locations are equal then the
// error message is compared.
func (e Errors) Sort() {
- sort.Slice(e, func(i, j int) bool {
- a := e[i]
- b := e[j]
-
+ slices.SortFunc(e, func(a, b *Error) int {
if cmp := a.Location.Compare(b.Location); cmp != 0 {
- return cmp < 0
+ return cmp
}
- return a.Error() < b.Error()
+ return strings.Compare(a.Error(), b.Error())
})
}
@@ -92,9 +90,9 @@ func (e *Error) Error() string {
if e.Location != nil {
if len(e.Location.File) > 0 {
- prefix += e.Location.File + ":" + fmt.Sprint(e.Location.Row)
+ prefix += e.Location.File + ":" + strconv.Itoa(e.Location.Row)
} else {
- prefix += fmt.Sprint(e.Location.Row) + ":" + fmt.Sprint(e.Location.Col)
+ prefix += strconv.Itoa(e.Location.Row) + ":" + strconv.Itoa(e.Location.Col)
}
}
diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/index.go b/vendor/github.com/open-policy-agent/opa/v1/ast/index.go
index 63cd480d13..722b70e57e 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/ast/index.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/ast/index.go
@@ -6,6 +6,7 @@ package ast
import (
"fmt"
+ "slices"
"sort"
"strings"
"sync"
@@ -33,10 +34,10 @@ type RuleIndex interface {
// IndexResult contains the result of an index lookup.
type IndexResult struct {
- Kind RuleKind
Rules []*Rule
Else map[*Rule][]*Rule
Default *Rule
+ Kind RuleKind
EarlyExit bool
OnlyGroundRefs bool
}
@@ -45,7 +46,6 @@ type IndexResult struct {
func NewIndexResult(kind RuleKind) *IndexResult {
return &IndexResult{
Kind: kind,
- Else: map[*Rule][]*Rule{},
}
}
@@ -55,7 +55,6 @@ func (ir *IndexResult) Empty() bool {
}
type baseDocEqIndex struct {
- skipIndexing Set
isVirtual func(Ref) bool
root *trieNode
defaultRule *Rule
@@ -64,15 +63,17 @@ type baseDocEqIndex struct {
}
var (
- equalityRef = Equality.Ref()
- equalRef = Equal.Ref()
- globMatchRef = GlobMatch.Ref()
- internalPrintRef = InternalPrint.Ref()
+ equalityRef = Equality.Ref()
+ equalRef = Equal.Ref()
+ globMatchRef = GlobMatch.Ref()
+ internalPrintRef = InternalPrint.Ref()
+ internalTestCaseRef = InternalTestCase.Ref()
+
+ skipIndexing = NewSet(NewTerm(internalPrintRef), NewTerm(internalTestCaseRef))
)
func newBaseDocEqIndex(isVirtual func(Ref) bool) *baseDocEqIndex {
return &baseDocEqIndex{
- skipIndexing: NewSet(NewTerm(internalPrintRef)),
isVirtual: isVirtual,
root: newTrieNodeImpl(),
onlyGroundRefs: true,
@@ -98,15 +99,15 @@ func (i *baseDocEqIndex) Build(rules []*Rule) bool {
i.onlyGroundRefs = rule.Head.Reference.IsGround()
}
var skip bool
- for _, expr := range rule.Body {
- if op := expr.OperatorTerm(); op != nil && i.skipIndexing.Contains(op) {
+ for i := range rule.Body {
+ if op := rule.Body[i].OperatorTerm(); op != nil && skipIndexing.Contains(op) {
skip = true
break
}
}
if !skip {
- for _, expr := range rule.Body {
- indices.Update(rule, expr)
+ for i := range rule.Body {
+ indices.Update(rule, rule.Body[i])
}
}
return false
@@ -143,7 +144,8 @@ func (i *baseDocEqIndex) Lookup(resolver ValueResolver) (*IndexResult, error) {
defer func() {
clear(tr.unordered)
tr.ordering = tr.ordering[:0]
- tr.values.clear()
+ tr.multiple = false
+ tr.exist = nil
ttrPool.Put(tr)
}()
@@ -153,20 +155,33 @@ func (i *baseDocEqIndex) Lookup(resolver ValueResolver) (*IndexResult, error) {
return nil, err
}
- result := NewIndexResult(i.kind)
+ result := IndexResultPool.Get()
+
+ result.Kind = i.kind
result.Default = i.defaultRule
result.OnlyGroundRefs = i.onlyGroundRefs
- result.Rules = make([]*Rule, 0, len(tr.ordering))
+
+ if result.Rules == nil {
+ result.Rules = make([]*Rule, 0, len(tr.ordering))
+ } else {
+ result.Rules = result.Rules[:0]
+ }
+
+ clear(result.Else)
for _, pos := range tr.ordering {
- sort.Slice(tr.unordered[pos], func(i, j int) bool {
- return tr.unordered[pos][i].prio[1] < tr.unordered[pos][j].prio[1]
+ slices.SortFunc(tr.unordered[pos], func(a, b *ruleNode) int {
+ return a.prio[1] - b.prio[1]
})
nodes := tr.unordered[pos]
root := nodes[0].rule
result.Rules = append(result.Rules, root)
if len(nodes) > 1 {
+ if result.Else == nil {
+ result.Else = map[*Rule][]*Rule{}
+ }
+
result.Else[root] = make([]*Rule, len(nodes)-1)
for i := 1; i < len(nodes); i++ {
result.Else[root][i-1] = nodes[i].rule
@@ -174,7 +189,26 @@ func (i *baseDocEqIndex) Lookup(resolver ValueResolver) (*IndexResult, error) {
}
}
- result.EarlyExit = tr.values.Len() == 1 && tr.values.Slice()[0].IsGround()
+ if !tr.multiple {
+ // even when the indexer hasn't seen multiple values, the rule itself could be one
+ // where early exit shouldn't be applied.
+ var lastValue Value
+ for i := range result.Rules {
+ if result.Rules[i].Head.DocKind() != CompleteDoc {
+ tr.multiple = true
+ break
+ }
+ if result.Rules[i].Head.Value != nil {
+ if lastValue != nil && !ValueEqual(lastValue, result.Rules[i].Head.Value.Value) {
+ tr.multiple = true
+ break
+ }
+ lastValue = result.Rules[i].Head.Value.Value
+ }
+ }
+ }
+
+ result.EarlyExit = !tr.multiple
return result, nil
}
@@ -192,13 +226,17 @@ func (i *baseDocEqIndex) AllRules(_ ValueResolver) (*IndexResult, error) {
result.Rules = make([]*Rule, 0, len(tr.ordering))
for _, pos := range tr.ordering {
- sort.Slice(tr.unordered[pos], func(i, j int) bool {
- return tr.unordered[pos][i].prio[1] < tr.unordered[pos][j].prio[1]
+ slices.SortFunc(tr.unordered[pos], func(a, b *ruleNode) int {
+ return a.prio[1] - b.prio[1]
})
nodes := tr.unordered[pos]
root := nodes[0].rule
result.Rules = append(result.Rules, root)
if len(nodes) > 1 {
+ if result.Else == nil {
+ result.Else = map[*Rule][]*Rule{}
+ }
+
result.Else[root] = make([]*Rule, len(nodes)-1)
for i := 1; i < len(nodes); i++ {
result.Else[root][i-1] = nodes[i].rule
@@ -206,7 +244,7 @@ func (i *baseDocEqIndex) AllRules(_ ValueResolver) (*IndexResult, error) {
}
}
- result.EarlyExit = tr.values.Len() == 1 && tr.values.Slice()[0].IsGround()
+ result.EarlyExit = !tr.multiple
return result, nil
}
@@ -235,7 +273,7 @@ type refindex struct {
type refindices struct {
isVirtual func(Ref) bool
rules map[*Rule][]*refindex
- frequency *util.HashMap
+ frequency *util.HasherMap[Ref, int]
sorted []Ref
}
@@ -243,12 +281,7 @@ func newrefindices(isVirtual func(Ref) bool) *refindices {
return &refindices{
isVirtual: isVirtual,
rules: map[*Rule][]*refindex{},
- frequency: util.NewHashMap(func(a, b util.T) bool {
- r1, r2 := a.(Ref), b.(Ref)
- return r1.Equal(r2)
- }, func(x util.T) int {
- return x.(Ref).Hash()
- }),
+ frequency: util.NewHasherMap[Ref, int](RefEqual),
}
}
@@ -296,9 +329,9 @@ func (i *refindices) Sorted() []Ref {
counts := make([]int, 0, i.frequency.Len())
i.sorted = make([]Ref, 0, i.frequency.Len())
- i.frequency.Iter(func(k, v util.T) bool {
- counts = append(counts, v.(int))
- i.sorted = append(i.sorted, k.(Ref))
+ i.frequency.Iter(func(k Ref, v int) bool {
+ counts = append(counts, v)
+ i.sorted = append(i.sorted, k)
return false
})
@@ -399,7 +432,7 @@ func (i *refindices) insert(rule *Rule, index *refindex) {
count = 0
}
- i.frequency.Put(index.Ref, count.(int)+1)
+ i.frequency.Put(index.Ref, count+1)
for pos, other := range i.rules[rule] {
if other.Ref.Equal(index.Ref) {
@@ -427,7 +460,8 @@ type trieWalker interface {
type trieTraversalResult struct {
unordered map[int][]*ruleNode
ordering []int
- values *set
+ exist *Term
+ multiple bool
}
var ttrPool = sync.Pool{
@@ -439,10 +473,6 @@ var ttrPool = sync.Pool{
func newTrieTraversalResult() *trieTraversalResult {
return &trieTraversalResult{
unordered: map[int][]*ruleNode{},
- // Number 3 is arbitrary, but seemed to be the most common number of values
- // stored when benchmarking the trie traversal against a large policy library
- // (Regal).
- values: newset(3),
}
}
@@ -455,21 +485,30 @@ func (tr *trieTraversalResult) Add(t *trieNode) {
}
tr.unordered[root] = append(nodes, node)
}
- if t.values != nil {
- t.values.Foreach(tr.values.insertNoGuard)
+ if t.multiple {
+ tr.multiple = true
}
+ if tr.multiple || t.value == nil {
+ return
+ }
+ if t.value.IsGround() && tr.exist == nil || tr.exist.Equal(t.value) {
+ tr.exist = t.value
+ return
+ }
+ tr.multiple = true
}
type trieNode struct {
ref Ref
- values Set
mappers []*valueMapper
next *trieNode
any *trieNode
undefined *trieNode
- scalars *util.HashMap
+ scalars *util.HasherMap[Value, *trieNode]
array *trieNode
rules []*ruleNode
+ value *Term
+ multiple bool
}
func (node *trieNode) String() string {
@@ -492,9 +531,7 @@ func (node *trieNode) String() string {
}
if node.scalars.Len() > 0 {
buf := make([]string, 0, node.scalars.Len())
- node.scalars.Iter(func(k, v util.T) bool {
- key := k.(Value)
- val := v.(*trieNode)
+ node.scalars.Iter(func(key Value, val *trieNode) bool {
buf = append(buf, fmt.Sprintf("scalar(%v):%p", key, val))
return false
})
@@ -507,10 +544,8 @@ func (node *trieNode) String() string {
if len(node.mappers) > 0 {
flags = append(flags, fmt.Sprintf("%d mapper(s)", len(node.mappers)))
}
- if node.values != nil {
- if l := node.values.Len(); l > 0 {
- flags = append(flags, fmt.Sprintf("%d value(s)", l))
- }
+ if node.value != nil {
+ flags = append(flags, "value exists")
}
return strings.Join(flags, " ")
}
@@ -518,13 +553,12 @@ func (node *trieNode) String() string {
func (node *trieNode) append(prio [2]int, rule *Rule) {
node.rules = append(node.rules, &ruleNode{prio, rule})
- if node.values != nil && rule.Head.Value != nil {
- node.values.Add(rule.Head.Value)
- return
+ if node.value != nil && rule.Head.Value != nil && !node.value.Equal(rule.Head.Value) {
+ node.multiple = true
}
- if node.values == nil && rule.Head.DocKind() == CompleteDoc {
- node.values = NewSet(rule.Head.Value)
+ if node.value == nil && rule.Head.DocKind() == CompleteDoc {
+ node.value = rule.Head.Value
}
}
@@ -535,7 +569,7 @@ type ruleNode struct {
func newTrieNodeImpl() *trieNode {
return &trieNode{
- scalars: util.NewHashMap(valueEq, valueHash),
+ scalars: util.NewHasherMap[Value, *trieNode](ValueEqual),
}
}
@@ -551,8 +585,7 @@ func (node *trieNode) Do(walker trieWalker) {
node.undefined.Do(next)
}
- node.scalars.Iter(func(_, v util.T) bool {
- child := v.(*trieNode)
+ node.scalars.Iter(func(_ Value, child *trieNode) bool {
child.Do(next)
return false
})
@@ -618,7 +651,7 @@ func (node *trieNode) insertValue(value Value) *trieNode {
child = newTrieNodeImpl()
node.scalars.Put(value, child)
}
- return child.(*trieNode)
+ return child
case *Array:
if node.array == nil {
node.array = newTrieNodeImpl()
@@ -647,7 +680,7 @@ func (node *trieNode) insertArray(arr *Array) *trieNode {
child = newTrieNodeImpl()
node.scalars.Put(head, child)
}
- return child.(*trieNode).insertArray(arr.Slice(1, -1))
+ return child.insertArray(arr.Slice(1, -1))
}
panic("illegal value")
@@ -712,7 +745,7 @@ func (node *trieNode) traverseValue(resolver ValueResolver, tr *trieTraversalRes
if !ok {
return nil
}
- return child.(*trieNode).Traverse(resolver, tr)
+ return child.Traverse(resolver, tr)
}
return nil
@@ -737,11 +770,16 @@ func (node *trieNode) traverseArray(resolver ValueResolver, tr *trieTraversalRes
return nil
}
- child, ok := node.scalars.Get(head)
- if !ok {
- return nil
+ switch head := head.(type) {
+ case Null, Boolean, Number, String:
+ child, ok := node.scalars.Get(head)
+ if !ok {
+ return nil
+ }
+ return child.traverseArray(resolver, tr, arr.Slice(1, -1))
}
- return child.(*trieNode).traverseArray(resolver, tr, arr.Slice(1, -1))
+
+ panic("illegal value")
}
func (node *trieNode) traverseUnknown(resolver ValueResolver, tr *trieTraversalResult) error {
@@ -767,12 +805,8 @@ func (node *trieNode) traverseUnknown(resolver ValueResolver, tr *trieTraversalR
}
var iterErr error
- node.scalars.Iter(func(_, v util.T) bool {
- child := v.(*trieNode)
- if iterErr = child.traverseUnknown(resolver, tr); iterErr != nil {
- return true
- }
- return false
+ node.scalars.Iter(func(_ Value, child *trieNode) bool {
+ return child.traverseUnknown(resolver, tr) != nil
})
return iterErr
@@ -786,7 +820,7 @@ func eqOperandsToRefAndValue(isVirtual func(Ref) bool, args []*Term, a, b *Term)
switch v := a.Value.(type) {
case Var:
for i, arg := range args {
- if arg.Value.Compare(v) == 0 {
+ if arg.Value.Compare(a.Value) == 0 {
if bval, ok := indexValue(b); ok {
return &refindex{Ref: Ref{FunctionArgRootDocument, InternedIntNumberTerm(i)}, Value: bval}, true
}
@@ -849,7 +883,7 @@ func globDelimiterToString(delim *Term) (string, bool) {
if arr.Len() == 0 {
result = "."
} else {
- for i := 0; i < arr.Len(); i++ {
+ for i := range arr.Len() {
term := arr.Elem(i)
s, ok := term.Value.(String)
if !ok {
@@ -862,6 +896,8 @@ func globDelimiterToString(delim *Term) (string, bool) {
return result, true
}
+var globwildcard = VarTerm("$globwildcard")
+
func globPatternToArray(pattern *Term, delim string) *Term {
s, ok := pattern.Value.(String)
@@ -874,7 +910,7 @@ func globPatternToArray(pattern *Term, delim string) *Term {
for i := range parts {
if parts[i] == "*" {
- arr[i] = VarTerm("$globwildcard")
+ arr[i] = globwildcard
} else {
var escaped bool
for _, c := range parts[i] {
diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/internal/scanner/scanner.go b/vendor/github.com/open-policy-agent/opa/v1/ast/internal/scanner/scanner.go
index 4558f91415..d70253bc5c 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/ast/internal/scanner/scanner.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/ast/internal/scanner/scanner.go
@@ -9,9 +9,9 @@ import (
"io"
"unicode"
"unicode/utf8"
- "unsafe"
"github.com/open-policy-agent/opa/v1/ast/internal/tokens"
+ "github.com/open-policy-agent/opa/v1/util"
)
const bom = 0xFEFF
@@ -101,8 +101,8 @@ func (s *Scanner) Keyword(lit string) tokens.Token {
func (s *Scanner) AddKeyword(kw string, tok tokens.Token) {
s.keywords[kw] = tok
- switch tok {
- case tokens.Every: // importing 'every' means also importing 'in'
+ if tok == tokens.Every {
+ // importing 'every' means also importing 'in'
s.keywords["in"] = tokens.In
}
}
@@ -165,7 +165,21 @@ func (s *Scanner) Scan() (tokens.Token, Position, string, []Error) {
var lit string
if s.isWhitespace() {
- lit = string(s.curr)
+ // string(rune) is an unnecessary heap allocation in this case as we know all
+ // the possible whitespace values, and can simply translate to string ourselves
+ switch s.curr {
+ case ' ':
+ lit = " "
+ case '\t':
+ lit = "\t"
+ case '\n':
+ lit = "\n"
+ case '\r':
+ lit = "\r"
+ default:
+ // unreachable unless isWhitespace changes
+ lit = string(s.curr)
+ }
s.next()
tok = tokens.Whitespace
} else if isLetter(s.curr) {
@@ -272,7 +286,7 @@ func (s *Scanner) scanIdentifier() string {
s.next()
}
- return byteSliceToString(s.bs[start : s.offset-1])
+ return util.ByteSliceToString(s.bs[start : s.offset-1])
}
func (s *Scanner) scanNumber() string {
@@ -323,7 +337,7 @@ func (s *Scanner) scanNumber() string {
}
}
- return byteSliceToString(s.bs[start : s.offset-1])
+ return util.ByteSliceToString(s.bs[start : s.offset-1])
}
func (s *Scanner) scanString() string {
@@ -357,7 +371,7 @@ func (s *Scanner) scanString() string {
}
}
- return byteSliceToString(s.bs[start : s.offset-1])
+ return util.ByteSliceToString(s.bs[start : s.offset-1])
}
func (s *Scanner) scanRawString() string {
@@ -373,7 +387,7 @@ func (s *Scanner) scanRawString() string {
}
}
- return byteSliceToString(s.bs[start : s.offset-1])
+ return util.ByteSliceToString(s.bs[start : s.offset-1])
}
func (s *Scanner) scanComment() string {
@@ -384,10 +398,10 @@ func (s *Scanner) scanComment() string {
end := s.offset - 1
// Trim carriage returns that precede the newline
if s.offset > 1 && s.bs[s.offset-2] == '\r' {
- end = end - 1
+ end -= 1
}
- return byteSliceToString(s.bs[start:end])
+ return util.ByteSliceToString(s.bs[start:end])
}
func (s *Scanner) next() {
@@ -457,7 +471,3 @@ func (s *Scanner) error(reason string) {
Col: s.col,
}, Message: reason})
}
-
-func byteSliceToString(bs []byte) string {
- return unsafe.String(unsafe.SliceData(bs), len(bs))
-}
diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/internal/tokens/tokens.go b/vendor/github.com/open-policy-agent/opa/v1/ast/internal/tokens/tokens.go
index 623ed7ed21..4033ba81ae 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/ast/internal/tokens/tokens.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/ast/internal/tokens/tokens.go
@@ -4,12 +4,14 @@
package tokens
+import "maps"
+
// Token represents a single Rego source code token
// for use by the Parser.
-type Token int
+type Token uint8
func (t Token) String() string {
- if t < 0 || int(t) >= len(strings) {
+ if int(t) >= len(strings) {
return "unknown"
}
return strings[t]
@@ -137,11 +139,7 @@ var keywords = map[string]Token{
// Keywords returns a copy of the default string -> Token keyword map.
func Keywords() map[string]Token {
- cpy := make(map[string]Token, len(keywords))
- for k, v := range keywords {
- cpy[k] = v
- }
- return cpy
+ return maps.Clone(keywords)
}
// IsKeyword returns if a token is a keyword
diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/interning.go b/vendor/github.com/open-policy-agent/opa/v1/ast/interning.go
index 17b10231b7..012cffb9a9 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/ast/interning.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/ast/interning.go
@@ -17,6 +17,9 @@ var (
minusOneTerm = &Term{Value: Number("-1")}
InternedNullTerm = &Term{Value: Null{}}
+
+ InternedEmptyString = StringTerm("")
+ InternedEmptyObject = ObjectTerm()
)
// InternedBooleanTerm returns an interned term with the given boolean value.
@@ -60,6 +63,29 @@ func HasInternedIntNumberTerm(i int) bool {
return i >= -1 && i < len(intNumberTerms)
}
+func InternedStringTerm(s string) *Term {
+ if term, ok := internedStringTerms[s]; ok {
+ return term
+ }
+
+ return StringTerm(s)
+}
+
+var internedStringTerms = map[string]*Term{
+ "": InternedEmptyString,
+ "0": StringTerm("0"),
+ "1": StringTerm("1"),
+ "2": StringTerm("2"),
+ "3": StringTerm("3"),
+ "4": StringTerm("4"),
+ "5": StringTerm("5"),
+ "6": StringTerm("6"),
+ "7": StringTerm("7"),
+ "8": StringTerm("8"),
+ "9": StringTerm("9"),
+ "10": StringTerm("10"),
+}
+
var stringToIntNumberTermMap = map[string]*Term{
"-1": minusOneTerm,
"0": intNumberTerms[0],
@@ -1092,7 +1118,3 @@ var intNumberTerms = [...]*Term{
{Value: Number("511")},
{Value: Number("512")},
}
-
-var InternedEmptyString = StringTerm("")
-
-var InternedEmptyObject = ObjectTerm()
diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/map.go b/vendor/github.com/open-policy-agent/opa/v1/ast/map.go
index 5a64f32505..d0aa43755f 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/ast/map.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/ast/map.go
@@ -13,15 +13,14 @@ import (
// ValueMap represents a key/value map between AST term values. Any type of term
// can be used as a key in the map.
type ValueMap struct {
- hashMap *util.HashMap
+ hashMap *util.TypedHashMap[Value, Value]
}
// NewValueMap returns a new ValueMap.
func NewValueMap() *ValueMap {
- vs := &ValueMap{
- hashMap: util.NewHashMap(valueEq, valueHash),
+ return &ValueMap{
+ hashMap: util.NewTypedHashMap(ValueEqual, ValueEqual, Value.Hash, Value.Hash, nil),
}
- return vs
}
// MarshalJSON provides a custom marshaller for the ValueMap which
@@ -39,16 +38,6 @@ func (vs *ValueMap) MarshalJSON() ([]byte, error) {
return json.Marshal(tmp)
}
-// Copy returns a shallow copy of the ValueMap.
-func (vs *ValueMap) Copy() *ValueMap {
- if vs == nil {
- return nil
- }
- cpy := NewValueMap()
- cpy.hashMap = vs.hashMap.Copy()
- return cpy
-}
-
// Equal returns true if this ValueMap equals the other.
func (vs *ValueMap) Equal(other *ValueMap) bool {
if vs == nil {
@@ -72,7 +61,7 @@ func (vs *ValueMap) Len() int {
func (vs *ValueMap) Get(k Value) Value {
if vs != nil {
if v, ok := vs.hashMap.Get(k); ok {
- return v.(Value)
+ return v
}
}
return nil
@@ -92,11 +81,7 @@ func (vs *ValueMap) Iter(iter func(Value, Value) bool) bool {
if vs == nil {
return false
}
- return vs.hashMap.Iter(func(kt, vt util.T) bool {
- k := kt.(Value)
- v := vt.(Value)
- return iter(k, v)
- })
+ return vs.hashMap.Iter(iter)
}
// Put inserts a key k into the map with value v.
@@ -121,13 +106,3 @@ func (vs *ValueMap) String() string {
}
return vs.hashMap.String()
}
-
-func valueHash(v util.T) int {
- return v.(Value).Hash()
-}
-
-func valueEq(a, b util.T) bool {
- av := a.(Value)
- bv := b.(Value)
- return av.Compare(bv) == 0
-}
diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/parser.go b/vendor/github.com/open-policy-agent/opa/v1/ast/parser.go
index 2054141d30..66779b8d75 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/ast/parser.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/ast/parser.go
@@ -7,6 +7,7 @@ package ast
import (
"bytes"
"encoding/json"
+ "errors"
"fmt"
"io"
"math/big"
@@ -133,7 +134,7 @@ func (c parsedTermCache) String() string {
s.WriteRune('{')
var e *parsedTermCacheItem
for e = c.m; e != nil; e = e.next {
- s.WriteString(fmt.Sprintf("%v", e))
+ s.WriteString(e.String())
}
s.WriteRune('}')
return s.String()
@@ -517,7 +518,7 @@ func parseAnnotations(comments []*Comment) ([]*Annotations, Errors) {
var curr *metadataParser
var blocks []*metadataParser
- for i := 0; i < len(comments); i++ {
+ for i := range comments {
if curr != nil {
if comments[i].Location.Row == comments[i-1].Location.Row+1 && comments[i].Location.Col == 1 {
curr.Append(comments[i])
@@ -725,7 +726,9 @@ func (p *Parser) parseRules() []*Rule {
// p[x] if ... becomes a single-value rule p[x]
if hasIf && !usesContains && len(rule.Head.Ref()) == 2 {
- if !rule.Head.Ref()[1].IsGround() && len(rule.Head.Args) == 0 {
+ v := rule.Head.Ref()[1]
+ _, isRef := v.Value.(Ref)
+ if (!v.IsGround() || isRef) && len(rule.Head.Args) == 0 {
rule.Head.Key = rule.Head.Ref()[1]
}
@@ -1638,6 +1641,10 @@ func (p *Parser) parseNumber() *Term {
func (p *Parser) parseString() *Term {
if p.s.lit[0] == '"' {
+ if p.s.lit == "\"\"" {
+ return NewTerm(InternedEmptyString.Value).SetLocation(p.s.Loc())
+ }
+
var s string
err := json.Unmarshal([]byte(p.s.lit), &s)
if err != nil {
@@ -2060,7 +2067,7 @@ func (p *Parser) parseTermPairList(end tokens.Token, r [][2]*Term) [][2]*Term {
func (p *Parser) parseTermOp(values ...tokens.Token) *Term {
for i := range values {
if p.s.tok == values[i] {
- r := RefTerm(VarTerm(fmt.Sprint(p.s.tok)).SetLocation(p.s.Loc())).SetLocation(p.s.Loc())
+ r := RefTerm(VarTerm(p.s.tok.String()).SetLocation(p.s.Loc())).SetLocation(p.s.Loc())
p.scan()
return r
}
@@ -2354,7 +2361,7 @@ func (b *metadataParser) Parse() (*Annotations, error) {
var raw rawAnnotation
if len(bytes.TrimSpace(b.buf.Bytes())) == 0 {
- return nil, fmt.Errorf("expected METADATA block, found whitespace")
+ return nil, errors.New("expected METADATA block, found whitespace")
}
if err := yaml.Unmarshal(b.buf.Bytes(), &raw); err != nil {
@@ -2403,7 +2410,7 @@ func (b *metadataParser) Parse() (*Annotations, error) {
a.Path, err = ParseRef(k)
if err != nil {
- return nil, fmt.Errorf("invalid document reference")
+ return nil, errors.New("invalid document reference")
}
switch v := v.(type) {
@@ -2503,7 +2510,7 @@ func unwrapPair(pair map[string]interface{}) (string, interface{}) {
return "", nil
}
-var errInvalidSchemaRef = fmt.Errorf("invalid schema reference")
+var errInvalidSchemaRef = errors.New("invalid schema reference")
// NOTE(tsandall): 'schema' is not registered as a root because it's not
// supported by the compiler or evaluator today. Once we fix that, we can remove
@@ -2542,7 +2549,7 @@ func parseRelatedResource(rr interface{}) (*RelatedResourceAnnotation, error) {
}
return &RelatedResourceAnnotation{Ref: *u}, nil
}
- return nil, fmt.Errorf("ref URL may not be empty string")
+ return nil, errors.New("ref URL may not be empty string")
case map[string]interface{}:
description := strings.TrimSpace(getSafeString(rr, "description"))
ref := strings.TrimSpace(getSafeString(rr, "ref"))
@@ -2553,10 +2560,10 @@ func parseRelatedResource(rr interface{}) (*RelatedResourceAnnotation, error) {
}
return &RelatedResourceAnnotation{Description: description, Ref: *u}, nil
}
- return nil, fmt.Errorf("'ref' value required in object")
+ return nil, errors.New("'ref' value required in object")
}
- return nil, fmt.Errorf("invalid value type, must be string or map")
+ return nil, errors.New("invalid value type, must be string or map")
}
func parseAuthor(a interface{}) (*AuthorAnnotation, error) {
@@ -2574,10 +2581,10 @@ func parseAuthor(a interface{}) (*AuthorAnnotation, error) {
if len(name) > 0 || len(email) > 0 {
return &AuthorAnnotation{name, email}, nil
}
- return nil, fmt.Errorf("'name' and/or 'email' values required in object")
+ return nil, errors.New("'name' and/or 'email' values required in object")
}
- return nil, fmt.Errorf("invalid value type, must be string or map")
+ return nil, errors.New("invalid value type, must be string or map")
}
func getSafeString(m map[string]interface{}, k string) string {
@@ -2599,7 +2606,7 @@ func parseAuthorString(s string) (*AuthorAnnotation, error) {
parts := strings.Fields(s)
if len(parts) == 0 {
- return nil, fmt.Errorf("author is an empty string")
+ return nil, errors.New("author is an empty string")
}
namePartCount := len(parts)
@@ -2609,7 +2616,7 @@ func parseAuthorString(s string) (*AuthorAnnotation, error) {
strings.HasSuffix(trailing, emailSuffix) {
email = trailing[len(emailPrefix):]
email = email[0 : len(email)-len(emailSuffix)]
- namePartCount = namePartCount - 1
+ namePartCount -= 1
}
name := strings.Join(parts[0:namePartCount], " ")
@@ -2635,7 +2642,7 @@ func convertYAMLMapKeyTypes(x any, path []string) (any, error) {
return result, nil
case []any:
for i := range x {
- x[i], err = convertYAMLMapKeyTypes(x[i], append(path, fmt.Sprintf("%d", i)))
+ x[i], err = convertYAMLMapKeyTypes(x[i], append(path, strconv.Itoa(i)))
if err != nil {
return nil, err
}
@@ -2681,7 +2688,7 @@ func IsFutureKeywordForRegoVersion(s string, v RegoVersion) bool {
func (p *Parser) futureImport(imp *Import, allowedFutureKeywords map[string]tokens.Token) {
path := imp.Path.Value.(Ref)
- if len(path) == 1 || !path[1].Equal(StringTerm("keywords")) {
+ if len(path) == 1 || !path[1].Equal(keywordsTerm) {
p.errorf(imp.Path.Location, "invalid import, must be `future.keywords`")
return
}
diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/parser_ext.go b/vendor/github.com/open-policy-agent/opa/v1/ast/parser_ext.go
index 9712cb611a..dec06f1969 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/ast/parser_ext.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/ast/parser_ext.go
@@ -155,7 +155,7 @@ func MustParseTerm(input string) *Term {
func ParseRuleFromBody(module *Module, body Body) (*Rule, error) {
if len(body) != 1 {
- return nil, fmt.Errorf("multiple expressions cannot be used for rule head")
+ return nil, errors.New("multiple expressions cannot be used for rule head")
}
return ParseRuleFromExpr(module, body[0])
@@ -166,11 +166,11 @@ func ParseRuleFromBody(module *Module, body Body) (*Rule, error) {
func ParseRuleFromExpr(module *Module, expr *Expr) (*Rule, error) {
if len(expr.With) > 0 {
- return nil, fmt.Errorf("expressions using with keyword cannot be used for rule head")
+ return nil, errors.New("expressions using with keyword cannot be used for rule head")
}
if expr.Negated {
- return nil, fmt.Errorf("negated expressions cannot be used for rule head")
+ return nil, errors.New("negated expressions cannot be used for rule head")
}
if _, ok := expr.Terms.(*SomeDecl); ok {
@@ -207,7 +207,7 @@ func ParseRuleFromExpr(module *Module, expr *Expr) (*Rule, error) {
}
if _, ok := BuiltinMap[expr.Operator().String()]; ok {
- return nil, fmt.Errorf("rule name conflicts with built-in function")
+ return nil, errors.New("rule name conflicts with built-in function")
}
return ParseRuleFromCallExpr(module, expr.Terms.([]*Term))
@@ -272,7 +272,7 @@ func ParseCompleteDocRuleFromEqExpr(module *Module, lhs, rhs *Term) (*Rule, erro
}
head = RefHead(r)
if len(r) > 1 && !r[len(r)-1].IsGround() {
- return nil, fmt.Errorf("ref not ground")
+ return nil, errors.New("ref not ground")
}
} else {
return nil, fmt.Errorf("%v cannot be used for rule name", ValueName(lhs.Value))
@@ -387,7 +387,7 @@ func ParseRuleFromCallEqExpr(module *Module, lhs, rhs *Term) (*Rule, error) {
call, ok := lhs.Value.(Call)
if !ok {
- return nil, fmt.Errorf("must be call")
+ return nil, errors.New("must be call")
}
ref, ok := call[0].Value.(Ref)
@@ -419,7 +419,7 @@ func ParseRuleFromCallEqExpr(module *Module, lhs, rhs *Term) (*Rule, error) {
func ParseRuleFromCallExpr(module *Module, terms []*Term) (*Rule, error) {
if len(terms) <= 1 {
- return nil, fmt.Errorf("rule argument list must take at least one argument")
+ return nil, errors.New("rule argument list must take at least one argument")
}
loc := terms[0].Location
@@ -600,7 +600,7 @@ func ParseStatement(input string) (Statement, error) {
return nil, err
}
if len(stmts) != 1 {
- return nil, fmt.Errorf("expected exactly one statement")
+ return nil, errors.New("expected exactly one statement")
}
return stmts[0], nil
}
@@ -611,7 +611,7 @@ func ParseStatementWithOpts(input string, popts ParserOptions) (Statement, error
return nil, err
}
if len(stmts) != 1 {
- return nil, fmt.Errorf("expected exactly one statement")
+ return nil, errors.New("expected exactly one statement")
}
return stmts[0], nil
}
diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/policy.go b/vendor/github.com/open-policy-agent/opa/v1/ast/policy.go
index 94dc25244b..978de9441b 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/ast/policy.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/ast/policy.go
@@ -8,21 +8,14 @@ import (
"bytes"
"encoding/json"
"fmt"
- "math/rand"
+ "slices"
"strings"
- "time"
"github.com/open-policy-agent/opa/v1/ast/internal/tokens"
astJSON "github.com/open-policy-agent/opa/v1/ast/json"
"github.com/open-policy-agent/opa/v1/util"
)
-// Initialize seed for term hashing. This is intentionally placed before the
-// root document sets are constructed to ensure they use the same hash seed as
-// subsequent lookups. If the hash seeds are out of sync, lookups will fail.
-var hashSeed = rand.New(rand.NewSource(time.Now().UnixNano()))
-var hashSeed0 = (uint64(hashSeed.Uint32()) << 32) | uint64(hashSeed.Uint32())
-
// DefaultRootDocument is the default root document.
//
// All package directives inside source files are implicitly prefixed with the
@@ -502,7 +495,7 @@ func (c *Comment) Equal(other *Comment) bool {
// Compare returns an integer indicating whether pkg is less than, equal to,
// or greater than other.
func (pkg *Package) Compare(other *Package) int {
- return Compare(pkg.Path, other.Path)
+ return termSliceCompare(pkg.Path, other.Path)
}
// Copy returns a deep copy of pkg.
@@ -594,7 +587,8 @@ func (imp *Import) Compare(other *Import) int {
if cmp := Compare(imp.Path, other.Path); cmp != 0 {
return cmp
}
- return Compare(imp.Alias, other.Alias)
+
+ return VarCompare(imp.Alias, other.Alias)
}
// Copy returns a deep copy of imp.
@@ -644,7 +638,7 @@ func (imp *Import) Name() Var {
func (imp *Import) String() string {
buf := []string{"import", imp.Path.String()}
if len(imp.Alias) > 0 {
- buf = append(buf, "as "+imp.Alias.String())
+ buf = append(buf, "as", imp.Alias.String())
}
return strings.Join(buf, " ")
}
@@ -681,8 +675,11 @@ func (rule *Rule) Compare(other *Rule) int {
if cmp := rule.Head.Compare(other.Head); cmp != 0 {
return cmp
}
- if cmp := util.Compare(rule.Default, other.Default); cmp != 0 {
- return cmp
+ if rule.Default != other.Default {
+ if !rule.Default {
+ return -1
+ }
+ return 1
}
if cmp := rule.Body.Compare(other.Body); cmp != 0 {
return cmp
@@ -701,9 +698,11 @@ func (rule *Rule) Copy() *Rule {
cpy.Head = rule.Head.Copy()
cpy.Body = rule.Body.Copy()
- cpy.Annotations = make([]*Annotations, len(rule.Annotations))
- for i, a := range rule.Annotations {
- cpy.Annotations[i] = a.Copy(&cpy)
+ if len(cpy.Annotations) > 0 {
+ cpy.Annotations = make([]*Annotations, len(rule.Annotations))
+ for i, a := range rule.Annotations {
+ cpy.Annotations[i] = a.Copy(&cpy)
+ }
}
if cpy.Else != nil {
@@ -780,9 +779,7 @@ func (rule *Rule) stringWithOpts(opts toStringOpts) string {
case RegoV1, RegoV0CompatV1:
buf = append(buf, "if")
}
- buf = append(buf, "{")
- buf = append(buf, rule.Body.String())
- buf = append(buf, "}")
+ buf = append(buf, "{", rule.Body.String(), "}")
}
if rule.Else != nil {
buf = append(buf, rule.Else.elseString(opts))
@@ -828,8 +825,7 @@ func (rule *Rule) elseString(opts toStringOpts) string {
value := rule.Head.Value
if value != nil {
- buf = append(buf, "=")
- buf = append(buf, value.String())
+ buf = append(buf, "=", value.String())
}
switch opts.RegoVersion() {
@@ -837,9 +833,7 @@ func (rule *Rule) elseString(opts toStringOpts) string {
buf = append(buf, "if")
}
- buf = append(buf, "{")
- buf = append(buf, rule.Body.String())
- buf = append(buf, "}")
+ buf = append(buf, "{", rule.Body.String(), "}")
if rule.Else != nil {
buf = append(buf, rule.Else.elseString(opts))
@@ -892,7 +886,7 @@ func RefHead(ref Ref, args ...*Term) *Head {
}
// DocKind represents the collection of document types that can be produced by rules.
-type DocKind int
+type DocKind byte
const (
// CompleteDoc represents a document that is completely defined by the rule.
@@ -912,11 +906,13 @@ func (head *Head) DocKind() DocKind {
return PartialObjectDoc
}
return PartialSetDoc
+ } else if head.HasDynamicRef() {
+ return PartialObjectDoc
}
return CompleteDoc
}
-type RuleKind int
+type RuleKind byte
const (
SingleValue = iota
@@ -973,7 +969,7 @@ func (head *Head) Compare(other *Head) int {
if cmp := Compare(head.Reference, other.Reference); cmp != 0 {
return cmp
}
- if cmp := Compare(head.Name, other.Name); cmp != 0 {
+ if cmp := VarCompare(head.Name, other.Name); cmp != 0 {
return cmp
}
if cmp := Compare(head.Key, other.Key); cmp != 0 {
@@ -1091,8 +1087,7 @@ func (head *Head) SetLoc(loc *Location) {
func (head *Head) HasDynamicRef() bool {
pos := head.Reference.Dynamic()
- // Ref is dynamic if it has one non-constant term that isn't the first or last term or if it's a partial set rule.
- return pos > 0 && (pos < len(head.Reference)-1 || head.RuleKind() == MultiValue)
+ return pos > 0 && (pos < len(head.Reference))
}
// Copy returns a deep copy of a.
@@ -1177,7 +1172,7 @@ func (body Body) Compare(other Body) int {
if len(other) < minLen {
minLen = len(other)
}
- for i := 0; i < minLen; i++ {
+ for i := range minLen {
if cmp := body[i].Compare(other[i]); cmp != 0 {
return cmp
}
@@ -1202,12 +1197,7 @@ func (body Body) Copy() Body {
// Contains returns true if this body contains the given expression.
func (body Body) Contains(x *Expr) bool {
- for _, e := range body {
- if e.Equal(x) {
- return true
- }
- }
- return false
+ return slices.ContainsFunc(body, x.Equal)
}
// Equal returns true if this Body is equal to the other Body.
@@ -1406,11 +1396,7 @@ func (expr *Expr) Copy() *Expr {
case *SomeDecl:
cpy.Terms = ts.Copy()
case []*Term:
- cpyTs := make([]*Term, len(ts))
- for i := range ts {
- cpyTs[i] = ts[i].Copy()
- }
- cpy.Terms = cpyTs
+ cpy.Terms = termSliceCopy(ts)
case *Term:
cpy.Terms = ts.Copy()
case *Every:
diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/schema.go b/vendor/github.com/open-policy-agent/opa/v1/ast/schema.go
index e84a147a4a..3f9e2001d5 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/ast/schema.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/ast/schema.go
@@ -13,41 +13,32 @@ import (
// SchemaSet holds a map from a path to a schema.
type SchemaSet struct {
- m *util.HashMap
+ m *util.HasherMap[Ref, any]
}
// NewSchemaSet returns an empty SchemaSet.
func NewSchemaSet() *SchemaSet {
-
- eqFunc := func(a, b util.T) bool {
- return a.(Ref).Equal(b.(Ref))
- }
-
- hashFunc := func(x util.T) int { return x.(Ref).Hash() }
-
return &SchemaSet{
- m: util.NewHashMap(eqFunc, hashFunc),
+ m: util.NewHasherMap[Ref, any](RefEqual),
}
}
// Put inserts a raw schema into the set.
-func (ss *SchemaSet) Put(path Ref, raw interface{}) {
+func (ss *SchemaSet) Put(path Ref, raw any) {
ss.m.Put(path, raw)
}
// Get returns the raw schema identified by the path.
-func (ss *SchemaSet) Get(path Ref) interface{} {
- if ss == nil {
- return nil
+func (ss *SchemaSet) Get(path Ref) any {
+ if ss != nil {
+ if x, ok := ss.m.Get(path); ok {
+ return x
+ }
}
- x, ok := ss.m.Get(path)
- if !ok {
- return nil
- }
- return x
+ return nil
}
-func loadSchema(raw interface{}, allowNet []string) (types.Type, error) {
+func loadSchema(raw any, allowNet []string) (types.Type, error) {
jsonSchema, err := compileSchema(raw, allowNet)
if err != nil {
diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/syncpools.go b/vendor/github.com/open-policy-agent/opa/v1/ast/syncpools.go
new file mode 100644
index 0000000000..cb150d39b5
--- /dev/null
+++ b/vendor/github.com/open-policy-agent/opa/v1/ast/syncpools.go
@@ -0,0 +1,69 @@
+package ast
+
+import (
+ "strings"
+ "sync"
+)
+
+type termPtrPool struct {
+ pool sync.Pool
+}
+
+type stringBuilderPool struct {
+ pool sync.Pool
+}
+
+type indexResultPool struct {
+ pool sync.Pool
+}
+
+func (p *termPtrPool) Get() *Term {
+ return p.pool.Get().(*Term)
+}
+
+func (p *termPtrPool) Put(t *Term) {
+ p.pool.Put(t)
+}
+
+func (p *stringBuilderPool) Get() *strings.Builder {
+ return p.pool.Get().(*strings.Builder)
+}
+
+func (p *stringBuilderPool) Put(sb *strings.Builder) {
+ sb.Reset()
+ p.pool.Put(sb)
+}
+
+func (p *indexResultPool) Get() *IndexResult {
+ return p.pool.Get().(*IndexResult)
+}
+
+func (p *indexResultPool) Put(x *IndexResult) {
+ if x != nil {
+ p.pool.Put(x)
+ }
+}
+
+var TermPtrPool = &termPtrPool{
+ pool: sync.Pool{
+ New: func() any {
+ return &Term{}
+ },
+ },
+}
+
+var sbPool = &stringBuilderPool{
+ pool: sync.Pool{
+ New: func() any {
+ return &strings.Builder{}
+ },
+ },
+}
+
+var IndexResultPool = &indexResultPool{
+ pool: sync.Pool{
+ New: func() any {
+ return &IndexResult{}
+ },
+ },
+}
diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/term.go b/vendor/github.com/open-policy-agent/opa/v1/ast/term.go
index 9abc29346a..866fc4ddb6 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/ast/term.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/ast/term.go
@@ -8,6 +8,7 @@ package ast
import (
"bytes"
"encoding/json"
+ "errors"
"fmt"
"io"
"math"
@@ -19,14 +20,14 @@ import (
"strings"
"sync"
- "github.com/OneOfOne/xxhash"
+ "github.com/cespare/xxhash/v2"
astJSON "github.com/open-policy-agent/opa/v1/ast/json"
"github.com/open-policy-agent/opa/v1/ast/location"
"github.com/open-policy-agent/opa/v1/util"
)
-var errFindNotFound = fmt.Errorf("find: not found")
+var errFindNotFound = errors.New("find: not found")
// Location records a position in source code.
type Location = location.Location
@@ -55,13 +56,12 @@ type Value interface {
// InterfaceToValue converts a native Go value x to a Value.
func InterfaceToValue(x interface{}) (Value, error) {
switch x := x.(type) {
+ case Value:
+ return x, nil
case nil:
return NullValue, nil
case bool:
- if x {
- return InternedBooleanTerm(true).Value, nil
- }
- return InternedBooleanTerm(false).Value, nil
+ return InternedBooleanTerm(x).Value, nil
case json.Number:
if interned := InternedIntNumberTermFromString(string(x)); interned != nil {
return interned.Value, nil
@@ -87,6 +87,12 @@ func InterfaceToValue(x interface{}) (Value, error) {
r[i].Value = e
}
return NewArray(r...), nil
+ case []string:
+ r := util.NewPtrSlice[Term](len(x))
+ for i, e := range x {
+ r[i].Value = String(e)
+ }
+ return NewArray(r...), nil
case map[string]any:
kvs := util.NewPtrSlice[Term](len(x) * 2)
idx := 0
@@ -182,7 +188,7 @@ func valueToInterface(v Value, resolver Resolver, opt JSONOpt) (interface{}, err
return string(v), nil
case *Array:
buf := []interface{}{}
- for i := 0; i < v.Len(); i++ {
+ for i := range v.Len() {
x1, err := valueToInterface(v.Elem(i).Value, resolver, opt)
if err != nil {
return nil, err
@@ -618,10 +624,7 @@ func (bol Boolean) Compare(other Value) int {
// Find returns the current value or a not found error.
func (bol Boolean) Find(path Ref) (Value, error) {
if len(path) == 0 {
- if bol {
- return InternedBooleanTerm(true).Value, nil
- }
- return InternedBooleanTerm(false).Value, nil
+ return InternedBooleanTerm(bool(bol)).Value, nil
}
return nil, errFindNotFound
}
@@ -718,7 +721,7 @@ func (num Number) Hash() int {
f, err := json.Number(num).Float64()
if err != nil {
bs := []byte(num)
- h := xxhash.Checksum64(bs)
+ h := xxhash.Sum64(bs)
return int(h)
}
return int(f)
@@ -834,8 +837,7 @@ func (str String) String() string {
// Hash returns the hash code for the Value.
func (str String) Hash() int {
- h := xxhash.ChecksumString64S(string(str), hashSeed0)
- return int(h)
+ return int(xxhash.Sum64String(string(str)))
}
// Var represents a variable as defined by the language.
@@ -876,8 +878,7 @@ func (v Var) Find(path Ref) (Value, error) {
// Hash returns the hash code for the Value.
func (v Var) Hash() int {
- h := xxhash.ChecksumString64S(string(v), hashSeed0)
- return int(h)
+ return int(xxhash.Sum64String(string(v)))
}
// IsGround always returns false.
@@ -1014,6 +1015,25 @@ func (ref Ref) Copy() Ref {
return termSliceCopy(ref)
}
+// CopyNonGround returns a new ref with deep copies of the non-ground parts and shallow
+// copies of the ground parts. This is a *much* cheaper operation than Copy for operations
+// that only intend to modify (e.g. plug) the non-ground parts. The head element of the ref
+// is always shallow copied.
+func (ref Ref) CopyNonGround() Ref {
+ cpy := make(Ref, len(ref))
+ cpy[0] = ref[0]
+
+ for i := 1; i < len(ref); i++ {
+ if ref[i].Value.IsGround() {
+ cpy[i] = ref[i]
+ } else {
+ cpy[i] = ref[i].Copy()
+ }
+ }
+
+ return cpy
+}
+
// Equal returns true if ref is equal to other.
func (ref Ref) Equal(other Value) bool {
switch o := other.(type) {
@@ -1143,7 +1163,7 @@ func (ref Ref) Ptr() (string, error) {
if str, ok := term.Value.(String); ok {
parts = append(parts, url.PathEscape(string(str)))
} else {
- return "", fmt.Errorf("invalid path value type")
+ return "", errors.New("invalid path value type")
}
}
return strings.Join(parts, "/"), nil
@@ -1155,20 +1175,12 @@ func IsVarCompatibleString(s string) bool {
return varRegexp.MatchString(s)
}
-var sbPool = sync.Pool{
- New: func() any {
- return &strings.Builder{}
- },
-}
-
func (ref Ref) String() string {
if len(ref) == 0 {
return ""
}
- sb := sbPool.Get().(*strings.Builder)
- sb.Reset()
-
+ sb := sbPool.Get()
defer sbPool.Put(sb)
sb.Grow(10 * len(ref))
@@ -1311,7 +1323,15 @@ func (arr *Array) Find(path Ref) (Value, error) {
if i < 0 || i >= arr.Len() {
return nil, errFindNotFound
}
- return arr.Elem(i).Value.Find(path[1:])
+
+ term := arr.Elem(i)
+ // Using Find on scalar values costs an allocation (type -> Value conversion)
+ // and since we already have the Value here, we can avoid that.
+ if len(path) == 1 && IsScalar(term.Value) {
+ return term.Value, nil
+ }
+
+ return term.Value.Find(path[1:])
}
// Get returns the element at pos or nil if not possible.
@@ -1366,20 +1386,19 @@ func (arr *Array) MarshalJSON() ([]byte, error) {
}
func (arr *Array) String() string {
- sb := sbPool.Get().(*strings.Builder)
- sb.Reset()
+ sb := sbPool.Get()
sb.Grow(len(arr.elems) * 16)
defer sbPool.Put(sb)
- sb.WriteRune('[')
+ sb.WriteByte('[')
for i, e := range arr.elems {
if i > 0 {
sb.WriteString(", ")
}
sb.WriteString(e.String())
}
- sb.WriteRune(']')
+ sb.WriteByte(']')
return sb.String()
}
@@ -1565,20 +1584,19 @@ func (s *set) String() string {
return "set()"
}
- sb := sbPool.Get().(*strings.Builder)
- sb.Reset()
+ sb := sbPool.Get()
sb.Grow(s.Len() * 16)
defer sbPool.Put(sb)
- sb.WriteRune('{')
+ sb.WriteByte('{')
for i := range s.sortedKeys() {
if i > 0 {
sb.WriteString(", ")
}
sb.WriteString(s.keys[i].Value.String())
}
- sb.WriteRune('}')
+ sb.WriteByte('}')
return sb.String()
}
@@ -1748,20 +1766,6 @@ func (s *set) Slice() []*Term {
return s.sortedKeys()
}
-// Internal method to use for cases where a set may be reused in favor
-// of creating a new one (with the associated allocations).
-func (s *set) clear() {
- clear(s.elems)
- s.keys = s.keys[:0]
- s.hash = 0
- s.ground = true
- s.sortGuard = sync.Once{}
-}
-
-func (s *set) insertNoGuard(x *Term) {
- s.insert(x, false)
-}
-
// NOTE(philipc): We assume a many-readers, single-writer model here.
// This method should NOT be used concurrently, or else we risk data races.
func (s *set) insert(x *Term, resetSortGuard bool) {
@@ -2213,7 +2217,7 @@ type objectElem struct {
type objectElemSlice []*objectElem
func (s objectElemSlice) Less(i, j int) bool { return Compare(s[i].key.Value, s[j].key.Value) < 0 }
-func (s objectElemSlice) Swap(i, j int) { x := s[i]; s[i] = s[j]; s[j] = x }
+func (s objectElemSlice) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
func (s objectElemSlice) Len() int { return len(s) }
// Item is a helper for constructing an tuple containing two Terms
@@ -2253,7 +2257,7 @@ func (obj *object) Compare(other Value) int {
if len(b.keys) < len(akeys) {
minLen = len(bkeys)
}
- for i := 0; i < minLen; i++ {
+ for i := range minLen {
keysCmp := Compare(akeys[i].key, bkeys[i].key)
if keysCmp < 0 {
return -1
@@ -2282,11 +2286,17 @@ func (obj *object) Find(path Ref) (Value, error) {
if len(path) == 0 {
return obj, nil
}
- value := obj.Get(path[0])
- if value == nil {
+ term := obj.Get(path[0])
+ if term == nil {
return nil, errFindNotFound
}
- return value.Value.Find(path[1:])
+ // Using Find on scalar values costs an allocation (type -> Value conversion)
+ // and since we already have the Value here, we can avoid that.
+ if len(path) == 1 && IsScalar(term.Value) {
+ return term.Value, nil
+ }
+
+ return term.Value.Find(path[1:])
}
func (obj *object) Insert(k, v *Term) {
@@ -2375,7 +2385,8 @@ func (obj *object) Foreach(f func(*Term, *Term)) {
}
// Map returns a new Object constructed by mapping each element in the object
-// using the function f.
+// using the function f. If f returns an error, the error is returned by Map.
+// If f return a nil key, the element is skipped.
func (obj *object) Map(f func(*Term, *Term) (*Term, *Term, error)) (Object, error) {
cpy := newobject(obj.Len())
for _, node := range obj.sortedKeys() {
@@ -2383,7 +2394,9 @@ func (obj *object) Map(f func(*Term, *Term) (*Term, *Term, error)) (Object, erro
if err != nil {
return nil, err
}
- cpy.insert(k, v, false)
+ if k != nil {
+ cpy.insert(k, v, false)
+ }
}
return cpy, nil
}
@@ -2484,13 +2497,12 @@ func (obj *object) Len() int {
}
func (obj *object) String() string {
- sb := sbPool.Get().(*strings.Builder)
- sb.Reset()
+ sb := sbPool.Get()
sb.Grow(obj.Len() * 32)
defer sbPool.Put(sb)
- sb.WriteRune('{')
+ sb.WriteByte('{')
for i, elem := range obj.sortedKeys() {
if i > 0 {
@@ -2500,7 +2512,7 @@ func (obj *object) String() string {
sb.WriteString(": ")
sb.WriteString(elem.value.String())
}
- sb.WriteRune('}')
+ sb.WriteByte('}')
return sb.String()
}
@@ -2750,7 +2762,7 @@ func filterObject(o Value, filter Value) (Value, error) {
return o, nil
case *Array:
values := NewArray()
- for i := 0; i < v.Len(); i++ {
+ for i := range v.Len() {
subFilter := filteredObj.Get(StringTerm(strconv.Itoa(i)))
if subFilter != nil {
filteredValue, err := filterObject(v.Elem(i).Value, subFilter.Value)
@@ -3054,14 +3066,10 @@ func (c Call) String() string {
func termSliceCopy(a []*Term) []*Term {
cpy := make([]*Term, len(a))
- termSliceCopyTo(a, cpy)
- return cpy
-}
-
-func termSliceCopyTo(src, dst []*Term) {
- for i := range src {
- dst[i] = src[i].Copy()
+ for i := range a {
+ cpy[i] = a[i].Copy()
}
+ return cpy
}
func termSliceEqual(a, b []*Term) bool {
@@ -3115,7 +3123,7 @@ func unmarshalBody(b []interface{}) (Body, error) {
}
return buf, nil
unmarshal_error:
- return nil, fmt.Errorf("ast: unable to unmarshal body")
+ return nil, errors.New("ast: unable to unmarshal body")
}
func unmarshalExpr(expr *Expr, v map[string]interface{}) error {
@@ -3252,7 +3260,7 @@ func unmarshalTermSlice(s []interface{}) ([]*Term, error) {
}
return nil, err
}
- return nil, fmt.Errorf("ast: unable to unmarshal term")
+ return nil, errors.New("ast: unable to unmarshal term")
}
return buf, nil
}
@@ -3261,7 +3269,7 @@ func unmarshalTermSliceValue(d map[string]interface{}) ([]*Term, error) {
if s, ok := d["value"].([]interface{}); ok {
return unmarshalTermSlice(s)
}
- return nil, fmt.Errorf(`ast: unable to unmarshal term (expected {"value": [...], "type": ...} where type is one of: ref, array, or set)`)
+ return nil, errors.New(`ast: unable to unmarshal term (expected {"value": [...], "type": ...} where type is one of: ref, array, or set)`)
}
func unmarshalWith(i interface{}) (*With, error) {
@@ -3281,7 +3289,7 @@ func unmarshalWith(i interface{}) (*With, error) {
}
return nil, err
}
- return nil, fmt.Errorf(`ast: unable to unmarshal with modifier (expected {"target": {...}, "value": {...}})`)
+ return nil, errors.New(`ast: unable to unmarshal with modifier (expected {"target": {...}, "value": {...}})`)
}
func unmarshalValue(d map[string]interface{}) (Value, error) {
@@ -3399,5 +3407,5 @@ func unmarshalValue(d map[string]interface{}) (Value, error) {
}
}
unmarshal_error:
- return nil, fmt.Errorf("ast: unable to unmarshal term")
+ return nil, errors.New("ast: unable to unmarshal term")
}
diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/transform.go b/vendor/github.com/open-policy-agent/opa/v1/ast/transform.go
index 391a164860..e8c9ddcab1 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/ast/transform.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/ast/transform.go
@@ -234,7 +234,7 @@ func Transform(t Transformer, x interface{}) (interface{}, error) {
return k, v, nil
})
case *Array:
- for i := 0; i < y.Len(); i++ {
+ for i := range y.Len() {
v, err := transformTerm(t, y.Elem(i))
if err != nil {
return nil, err
diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/unify.go b/vendor/github.com/open-policy-agent/opa/v1/ast/unify.go
index 60244974a9..182aae090b 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/ast/unify.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/ast/unify.go
@@ -135,7 +135,7 @@ func (u *unifier) unify(a *Term, b *Term) {
}
case *Array:
if a.Len() == b.Len() {
- for i := 0; i < a.Len(); i++ {
+ for i := range a.Len() {
u.unify(a.Elem(i), b.Elem(i))
}
}
diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/varset.go b/vendor/github.com/open-policy-agent/opa/v1/ast/varset.go
index d51abbdae6..bccb035e30 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/ast/varset.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/ast/varset.go
@@ -16,13 +16,18 @@ type VarSet map[Var]struct{}
// NewVarSet returns a new VarSet containing the specified variables.
func NewVarSet(vs ...Var) VarSet {
- s := VarSet{}
+ s := make(VarSet, len(vs))
for _, v := range vs {
s.Add(v)
}
return s
}
+// NewVarSet returns a new VarSet containing the specified variables.
+func NewVarSetOfSize(size int) VarSet {
+ return make(VarSet, size)
+}
+
// Add updates the set to include the variable "v".
func (s VarSet) Add(v Var) {
s[v] = struct{}{}
@@ -36,7 +41,7 @@ func (s VarSet) Contains(v Var) bool {
// Copy returns a shallow copy of the VarSet.
func (s VarSet) Copy() VarSet {
- cpy := VarSet{}
+ cpy := NewVarSetOfSize(len(s))
for v := range s {
cpy.Add(v)
}
@@ -45,7 +50,13 @@ func (s VarSet) Copy() VarSet {
// Diff returns a VarSet containing variables in s that are not in vs.
func (s VarSet) Diff(vs VarSet) VarSet {
- r := VarSet{}
+ i := 0
+ for v := range s {
+ if !vs.Contains(v) {
+ i++
+ }
+ }
+ r := NewVarSetOfSize(i)
for v := range s {
if !vs.Contains(v) {
r.Add(v)
@@ -56,15 +67,26 @@ func (s VarSet) Diff(vs VarSet) VarSet {
// Equal returns true if s contains exactly the same elements as vs.
func (s VarSet) Equal(vs VarSet) bool {
- if len(s.Diff(vs)) > 0 {
+ if len(s) != len(vs) {
return false
}
- return len(vs.Diff(s)) == 0
+ for v := range s {
+ if !vs.Contains(v) {
+ return false
+ }
+ }
+ return true
}
// Intersect returns a VarSet containing variables in s that are in vs.
func (s VarSet) Intersect(vs VarSet) VarSet {
- r := VarSet{}
+ i := 0
+ for v := range s {
+ if vs.Contains(v) {
+ i++
+ }
+ }
+ r := NewVarSetOfSize(i)
for v := range s {
if vs.Contains(v) {
r.Add(v)
@@ -73,7 +95,7 @@ func (s VarSet) Intersect(vs VarSet) VarSet {
return r
}
-// Sorted returns a sorted slice of vars from s.
+// Sorted returns a new sorted slice of vars from s.
func (s VarSet) Sorted() []Var {
sorted := make([]Var, 0, len(s))
for v := range s {
diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/version_index.json b/vendor/github.com/open-policy-agent/opa/v1/ast/version_index.json
index b888b3e028..eecb68c772 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/ast/version_index.json
+++ b/vendor/github.com/open-policy-agent/opa/v1/ast/version_index.json
@@ -497,6 +497,13 @@
"PreRelease": "",
"Metadata": ""
},
+ "internal.test_case": {
+ "Major": 1,
+ "Minor": 2,
+ "Patch": 0,
+ "PreRelease": "",
+ "Metadata": ""
+ },
"intersection": {
"Major": 0,
"Minor": 17,
diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/visit.go b/vendor/github.com/open-policy-agent/opa/v1/ast/visit.go
index 91cfa208e2..0115c4f455 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/ast/visit.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/ast/visit.go
@@ -362,7 +362,7 @@ func (vis *GenericVisitor) Walk(x interface{}) {
vis.Walk(x.Get(k))
}
case *Array:
- for i := 0; i < x.Len(); i++ {
+ for i := range x.Len() {
vis.Walk(x.Elem(i))
}
case Set:
diff --git a/vendor/github.com/open-policy-agent/opa/v1/bundle/bundle.go b/vendor/github.com/open-policy-agent/opa/v1/bundle/bundle.go
index 12f8bfb32c..7d7277de08 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/bundle/bundle.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/bundle/bundle.go
@@ -267,7 +267,7 @@ func (m Manifest) equalWasmResolversAndRoots(other Manifest) bool {
return false
}
- for i := 0; i < len(m.WasmResolvers); i++ {
+ for i := range len(m.WasmResolvers) {
if !m.WasmResolvers[i].Equal(&other.WasmResolvers[i]) {
return false
}
@@ -298,7 +298,7 @@ func (wr *WasmResolver) Equal(other *WasmResolver) bool {
return false
}
- for i := 0; i < annotLen; i++ {
+ for i := range annotLen {
if wr.Annotations[i].Compare(other.Annotations[i]) != 0 {
return false
}
@@ -333,7 +333,7 @@ func (m *Manifest) validateAndInjectDefaults(b Bundle) error {
roots[i] = strings.Trim(roots[i], "/")
}
- for i := 0; i < len(roots)-1; i++ {
+ for i := range len(roots) - 1 {
for j := i + 1; j < len(roots); j++ {
if RootPathsOverlap(roots[i], roots[j]) {
return fmt.Errorf("manifest has overlapped roots: '%v' and '%v'", roots[i], roots[j])
@@ -715,8 +715,11 @@ func (r *Reader) Read() (Bundle, error) {
popts.RegoVersion = bundle.RegoVersion(popts.EffectiveRegoVersion())
for _, mf := range modules {
modulePopts := popts
- if modulePopts.RegoVersion, err = bundle.RegoVersionForFile(mf.RelativePath, popts.EffectiveRegoVersion()); err != nil {
+ if regoVersion, err := bundle.RegoVersionForFile(mf.RelativePath, popts.EffectiveRegoVersion()); err != nil {
return bundle, err
+ } else if regoVersion != ast.RegoUndefined {
+ // We don't expect ast.RegoUndefined here, but don't override configured rego-version if we do just to be extra protective
+ modulePopts.RegoVersion = regoVersion
}
r.metrics.Timer(metrics.RegoModuleParse).Start()
mf.Parsed, err = ast.ParseModuleWithOpts(mf.Path, string(mf.Raw), modulePopts)
@@ -729,19 +732,19 @@ func (r *Reader) Read() (Bundle, error) {
if bundle.Type() == DeltaBundleType {
if len(bundle.Data) != 0 {
- return bundle, fmt.Errorf("delta bundle expected to contain only patch file but data files found")
+ return bundle, errors.New("delta bundle expected to contain only patch file but data files found")
}
if len(bundle.Modules) != 0 {
- return bundle, fmt.Errorf("delta bundle expected to contain only patch file but policy files found")
+ return bundle, errors.New("delta bundle expected to contain only patch file but policy files found")
}
if len(bundle.WasmModules) != 0 {
- return bundle, fmt.Errorf("delta bundle expected to contain only patch file but wasm files found")
+ return bundle, errors.New("delta bundle expected to contain only patch file but wasm files found")
}
if r.persist {
- return bundle, fmt.Errorf("'persist' property is true in config. persisting delta bundle to disk is not supported")
+ return bundle, errors.New("'persist' property is true in config. persisting delta bundle to disk is not supported")
}
}
@@ -763,7 +766,7 @@ func (r *Reader) Read() (Bundle, error) {
for _, r := range bundle.Manifest.WasmResolvers {
epMap[r.Module] = append(epMap[r.Module], r.Entrypoint)
}
- for i := 0; i < len(bundle.WasmModules); i++ {
+ for i := range len(bundle.WasmModules) {
entrypoints := epMap[bundle.WasmModules[i].Path]
for _, entrypoint := range entrypoints {
ref, err := ast.PtrRef(ast.DefaultRootDocument, entrypoint)
@@ -816,12 +819,12 @@ func (r *Reader) checkSignaturesAndDescriptors(signatures SignaturesConfig) erro
}
if signatures.isEmpty() && r.verificationConfig != nil && r.verificationConfig.KeyID != "" {
- return fmt.Errorf("bundle missing .signatures.json file")
+ return errors.New("bundle missing .signatures.json file")
}
if !signatures.isEmpty() {
if r.verificationConfig == nil {
- return fmt.Errorf("verification key not provided")
+ return errors.New("verification key not provided")
}
// verify the JWT signatures included in the `.signatures.json` file
@@ -1204,10 +1207,6 @@ func (b *Bundle) SetRegoVersion(v ast.RegoVersion) {
// If there is no defined version for the given path, the default version def is returned.
// If the version does not correspond to ast.RegoV0 or ast.RegoV1, an error is returned.
func (b *Bundle) RegoVersionForFile(path string, def ast.RegoVersion) (ast.RegoVersion, error) {
- if def == ast.RegoUndefined {
- def = ast.DefaultRegoVersion
- }
-
version, err := b.Manifest.numericRegoVersionForFile(path)
if err != nil {
return def, err
@@ -1354,7 +1353,7 @@ func (b *Bundle) readData(key []string) *interface{} {
node := b.Data
- for i := 0; i < len(key)-1; i++ {
+ for i := range len(key) - 1 {
child, ok := node[key[i]]
if !ok {
@@ -1390,7 +1389,7 @@ func mktree(path []string, value interface{}) (map[string]interface{}, error) {
// For 0 length path the value is the full tree.
obj, ok := value.(map[string]interface{})
if !ok {
- return nil, fmt.Errorf("root value must be object")
+ return nil, errors.New("root value must be object")
}
return obj, nil
}
@@ -1513,7 +1512,7 @@ func bundleRegoVersions(bundle *Bundle, regoVersion ast.RegoVersion, usePath boo
return nil, err
}
// only record the rego version if it's different from one applied globally to the result bundle
- if v != regoVersion {
+ if regoVersion != ast.RegoUndefined && v != regoVersion {
// We store the rego version by the absolute path to the bundle root, as this will be the - possibly new - path
// to the module inside the merged bundle.
fileRegoVersions[bundleAbsolutePath(m, usePath)] = v.Int()
diff --git a/vendor/github.com/open-policy-agent/opa/v1/bundle/sign.go b/vendor/github.com/open-policy-agent/opa/v1/bundle/sign.go
index cf9a3e183a..710e296860 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/bundle/sign.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/bundle/sign.go
@@ -101,11 +101,9 @@ func generatePayload(files []FileInfo, sc *SigningConfig, keyID string) ([]byte,
for claim, value := range claims {
payload[claim] = value
}
- } else {
- if keyID != "" {
- // keyid claim is deprecated but include it for backwards compatibility.
- payload["keyid"] = keyID
- }
+ } else if keyID != "" {
+ // keyid claim is deprecated but include it for backwards compatibility.
+ payload["keyid"] = keyID
}
return json.Marshal(payload)
}
diff --git a/vendor/github.com/open-policy-agent/opa/v1/bundle/store.go b/vendor/github.com/open-policy-agent/opa/v1/bundle/store.go
index e77c052d9b..363f7664d7 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/bundle/store.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/bundle/store.go
@@ -8,6 +8,7 @@ import (
"context"
"encoding/base64"
"encoding/json"
+ "errors"
"fmt"
"path/filepath"
"strings"
@@ -94,7 +95,7 @@ func ReadBundleNamesFromStore(ctx context.Context, store storage.Store, txn stor
bundleMap, ok := value.(map[string]interface{})
if !ok {
- return nil, fmt.Errorf("corrupt manifest roots")
+ return nil, errors.New("corrupt manifest roots")
}
bundles := make([]string, len(bundleMap))
@@ -196,14 +197,14 @@ func ReadWasmMetadataFromStore(ctx context.Context, store storage.Store, txn sto
bs, err := json.Marshal(value)
if err != nil {
- return nil, fmt.Errorf("corrupt wasm manifest data")
+ return nil, errors.New("corrupt wasm manifest data")
}
var wasmMetadata []WasmResolver
err = util.UnmarshalJSON(bs, &wasmMetadata)
if err != nil {
- return nil, fmt.Errorf("corrupt wasm manifest data")
+ return nil, errors.New("corrupt wasm manifest data")
}
return wasmMetadata, nil
@@ -219,14 +220,14 @@ func ReadWasmModulesFromStore(ctx context.Context, store storage.Store, txn stor
encodedModules, ok := value.(map[string]interface{})
if !ok {
- return nil, fmt.Errorf("corrupt wasm modules")
+ return nil, errors.New("corrupt wasm modules")
}
rawModules := map[string][]byte{}
for path, enc := range encodedModules {
encStr, ok := enc.(string)
if !ok {
- return nil, fmt.Errorf("corrupt wasm modules")
+ return nil, errors.New("corrupt wasm modules")
}
bs, err := base64.StdEncoding.DecodeString(encStr)
if err != nil {
@@ -248,7 +249,7 @@ func ReadBundleRootsFromStore(ctx context.Context, store storage.Store, txn stor
sl, ok := value.([]interface{})
if !ok {
- return nil, fmt.Errorf("corrupt manifest roots")
+ return nil, errors.New("corrupt manifest roots")
}
roots := make([]string, len(sl))
@@ -256,7 +257,7 @@ func ReadBundleRootsFromStore(ctx context.Context, store storage.Store, txn stor
for i := range sl {
roots[i], ok = sl[i].(string)
if !ok {
- return nil, fmt.Errorf("corrupt manifest root")
+ return nil, errors.New("corrupt manifest root")
}
}
@@ -278,7 +279,7 @@ func readRevisionFromStore(ctx context.Context, store storage.Store, txn storage
str, ok := value.(string)
if !ok {
- return "", fmt.Errorf("corrupt manifest revision")
+ return "", errors.New("corrupt manifest revision")
}
return str, nil
@@ -299,7 +300,7 @@ func readMetadataFromStore(ctx context.Context, store storage.Store, txn storage
data, ok := value.(map[string]interface{})
if !ok {
- return nil, fmt.Errorf("corrupt manifest metadata")
+ return nil, errors.New("corrupt manifest metadata")
}
return data, nil
@@ -320,7 +321,7 @@ func readEtagFromStore(ctx context.Context, store storage.Store, txn storage.Tra
str, ok := value.(string)
if !ok {
- return "", fmt.Errorf("corrupt bundle etag")
+ return "", errors.New("corrupt bundle etag")
}
return str, nil
@@ -446,7 +447,7 @@ func activateBundles(opts *ActivateOpts) error {
p := getNormalizedPath(path)
if len(p) == 0 {
- return fmt.Errorf("root value must be object")
+ return errors.New("root value must be object")
}
// verify valid YAML or JSON value
@@ -716,7 +717,7 @@ func readModuleInfoFromStore(ctx context.Context, store storage.Store, txn stora
if vs, ok := ver.(json.Number); ok {
i, err := vs.Int64()
if err != nil {
- return nil, fmt.Errorf("corrupt rego version")
+ return nil, errors.New("corrupt rego version")
}
versions[k] = moduleInfo{RegoVersion: ast.RegoVersionFromInt(int(i))}
}
@@ -726,7 +727,7 @@ func readModuleInfoFromStore(ctx context.Context, store storage.Store, txn stora
return versions, nil
}
- return nil, fmt.Errorf("corrupt rego version")
+ return nil, errors.New("corrupt rego version")
}
func erasePolicies(ctx context.Context, store storage.Store, txn storage.Transaction, parserOpts ast.ParserOptions, roots map[string]struct{}) (map[string]*ast.Module, []string, error) {
@@ -826,7 +827,7 @@ func writeModuleRegoVersionToStore(ctx context.Context, store storage.Store, txn
if regoVersion == ast.RegoUndefined {
var err error
- regoVersion, err = b.RegoVersionForFile(mf.Path, ast.RegoUndefined)
+ regoVersion, err = b.RegoVersionForFile(mf.Path, runtimeRegoVersion)
if err != nil {
return fmt.Errorf("failed to get rego version for module '%s' in bundle: %w", mf.Path, err)
}
@@ -1019,7 +1020,7 @@ func lookup(path storage.Path, data map[string]interface{}) (interface{}, bool)
if len(path) == 0 {
return data, true
}
- for i := 0; i < len(path)-1; i++ {
+ for i := range len(path) - 1 {
value, ok := data[path[i]]
if !ok {
return nil, false
@@ -1093,7 +1094,7 @@ func applyPatches(ctx context.Context, store storage.Store, txn storage.Transact
// construct patch path
path, ok := patch.ParsePatchPathEscaped("/" + strings.Trim(pat.Path, "/"))
if !ok {
- return fmt.Errorf("error parsing patch path")
+ return errors.New("error parsing patch path")
}
var op storage.PatchOp
diff --git a/vendor/github.com/open-policy-agent/opa/v1/bundle/verify.go b/vendor/github.com/open-policy-agent/opa/v1/bundle/verify.go
index 2a4bb02c05..0645d3aafb 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/bundle/verify.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/bundle/verify.go
@@ -10,6 +10,7 @@ import (
"encoding/base64"
"encoding/hex"
"encoding/json"
+ "errors"
"fmt"
"github.com/open-policy-agent/opa/internal/jwx/jwa"
@@ -60,11 +61,11 @@ func (*DefaultVerifier) VerifyBundleSignature(sc SignaturesConfig, bvc *Verifica
files := make(map[string]FileInfo)
if len(sc.Signatures) == 0 {
- return files, fmt.Errorf(".signatures.json: missing JWT (expected exactly one)")
+ return files, errors.New(".signatures.json: missing JWT (expected exactly one)")
}
if len(sc.Signatures) > 1 {
- return files, fmt.Errorf(".signatures.json: multiple JWTs not supported (expected exactly one)")
+ return files, errors.New(".signatures.json: multiple JWTs not supported (expected exactly one)")
}
for _, token := range sc.Signatures {
@@ -120,7 +121,7 @@ func verifyJWTSignature(token string, bvc *VerificationConfig) (*DecodedSignatur
}
if keyID == "" {
- return nil, fmt.Errorf("verification key ID is empty")
+ return nil, errors.New("verification key ID is empty")
}
// now that we have the keyID, fetch the actual key
@@ -148,7 +149,7 @@ func verifyJWTSignature(token string, bvc *VerificationConfig) (*DecodedSignatur
}
if ds.Scope != scope {
- return nil, fmt.Errorf("scope mismatch")
+ return nil, errors.New("scope mismatch")
}
return &ds, nil
}
diff --git a/vendor/github.com/open-policy-agent/opa/v1/config/config.go b/vendor/github.com/open-policy-agent/opa/v1/config/config.go
index 09adb556f8..490f90b905 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/config/config.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/config/config.go
@@ -7,6 +7,7 @@ package config
import (
"encoding/json"
+ "errors"
"fmt"
"os"
"path/filepath"
@@ -98,7 +99,7 @@ func (c Config) PluginNames() (result []string) {
// PluginsEnabled returns true if one or more plugin features are enabled.
//
-// Deprecated. Use PluginNames instead.
+// Deprecated: Use PluginNames instead.
func (c Config) PluginsEnabled() bool {
return c.Bundle != nil || c.Bundles != nil || c.DecisionLogs != nil || c.Status != nil || len(c.Plugins) > 0
}
@@ -243,7 +244,7 @@ func removeCryptoKeys(x interface{}) error {
func removeKey(x interface{}, keys ...string) error {
val, ok := x.(map[string]interface{})
if !ok {
- return fmt.Errorf("type assertion error")
+ return errors.New("type assertion error")
}
for _, key := range keys {
diff --git a/vendor/github.com/open-policy-agent/opa/v1/format/format.go b/vendor/github.com/open-policy-agent/opa/v1/format/format.go
index e86964d1b4..ed5770d83b 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/format/format.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/format/format.go
@@ -9,6 +9,7 @@ import (
"bytes"
"fmt"
"regexp"
+ "slices"
"sort"
"strings"
"unicode"
@@ -62,12 +63,10 @@ func SourceWithOpts(filename string, src []byte, opts Opts) ([]byte, error) {
var parserOpts ast.ParserOptions
if opts.ParserOptions != nil {
parserOpts = *opts.ParserOptions
- } else {
- if regoVersion == ast.RegoV1 {
- // If the rego version is V1, we need to parse it as such, to allow for future keywords not being imported.
- // Otherwise, we'll default to the default rego-version.
- parserOpts.RegoVersion = ast.RegoV1
- }
+ } else if regoVersion == ast.RegoV1 {
+ // If the rego version is V1, we need to parse it as such, to allow for future keywords not being imported.
+ // Otherwise, we'll default to the default rego-version.
+ parserOpts.RegoVersion = ast.RegoV1
}
if parserOpts.RegoVersion == ast.RegoUndefined {
@@ -179,6 +178,9 @@ func AstWithOpts(x interface{}, opts Opts) ([]byte, error) {
o.contains = true
}
+ memberRef := ast.Member.Ref()
+ memberWithKeyRef := ast.MemberWithKey.Ref()
+
// Preprocess the AST. Set any required defaults and calculate
// values required for printing the formatted output.
ast.WalkNodes(x, func(x ast.Node) bool {
@@ -192,7 +194,7 @@ func AstWithOpts(x interface{}, opts Opts) ([]byte, error) {
case *ast.Expr:
switch {
- case n.IsCall() && ast.Member.Ref().Equal(n.Operator()) || ast.MemberWithKey.Ref().Equal(n.Operator()):
+ case n.IsCall() && memberRef.Equal(n.Operator()) || memberWithKeyRef.Equal(n.Operator()):
extraFutureKeywordImports["in"] = struct{}{}
case n.IsEvery():
extraFutureKeywordImports["every"] = struct{}{}
@@ -421,7 +423,7 @@ func (w *writer) writePackage(pkg *ast.Package, comments []*ast.Comment) []*ast.
}
func (w *writer) writeComments(comments []*ast.Comment) {
- for i := 0; i < len(comments); i++ {
+ for i := range comments {
if i > 0 && locCmp(comments[i], comments[i-1]) > 1 {
w.blankLine()
}
@@ -438,6 +440,8 @@ func (w *writer) writeRules(rules []*ast.Rule, comments []*ast.Comment) []*ast.C
return comments
}
+var expandedConst = ast.NewBody(ast.NewExpr(ast.InternedBooleanTerm(true)))
+
func (w *writer) writeRule(rule *ast.Rule, isElse bool, comments []*ast.Comment) []*ast.Comment {
if rule == nil {
return comments
@@ -455,7 +459,7 @@ func (w *writer) writeRule(rule *ast.Rule, isElse bool, comments []*ast.Comment)
// `foo = {"a": "b"} { true }` in the AST. We want to preserve that notation
// in the formatted code instead of expanding the bodies into rules, so we
// pretend that the rule has no body in this case.
- isExpandedConst := rule.Body.Equal(ast.NewBody(ast.NewExpr(ast.BooleanTerm(true)))) && rule.Else == nil
+ isExpandedConst := rule.Body.Equal(expandedConst) && rule.Else == nil
comments = w.writeHead(rule.Head, rule.Default, isExpandedConst, comments)
@@ -508,6 +512,8 @@ func (w *writer) writeRule(rule *ast.Rule, isElse bool, comments []*ast.Comment)
return comments
}
+var elseVar ast.Value = ast.Var("else")
+
func (w *writer) writeElse(rule *ast.Rule, comments []*ast.Comment) []*ast.Comment {
// If there was nothing else on the line before the "else" starts
// then preserve this style of else block, otherwise it will be
@@ -554,7 +560,7 @@ func (w *writer) writeElse(rule *ast.Rule, comments []*ast.Comment) []*ast.Comme
rule.Else.Head.Name = "else" // NOTE(sr): whaaat
- elseHeadReference := ast.VarTerm("else") // construct a reference for the term
+ elseHeadReference := ast.NewTerm(elseVar) // construct a reference for the term
elseHeadReference.Location = rule.Else.Head.Location // and set the location to match the rule location
rule.Else.Head.Reference = ast.Ref{elseHeadReference}
@@ -612,7 +618,7 @@ func (w *writer) writeHead(head *ast.Head, isDefault, isExpandedConst bool, comm
}
if head.Value != nil &&
- (head.Key != nil || ast.Compare(head.Value, ast.BooleanTerm(true)) != 0 || isExpandedConst || isDefault) {
+ (head.Key != nil || !ast.InternedBooleanTerm(true).Equal(head.Value) || isExpandedConst || isDefault) {
// in rego v1, explicitly print value for ref-head constants that aren't partial set assignments, e.g.:
// * a -> parser error, won't reach here
@@ -623,7 +629,7 @@ func (w *writer) writeHead(head *ast.Head, isDefault, isExpandedConst bool, comm
if head.Location == head.Value.Location &&
head.Name != "else" &&
- ast.Compare(head.Value, ast.BooleanTerm(true)) == 0 &&
+ ast.InternedBooleanTerm(true).Equal(head.Value) &&
!isRegoV1RefConst {
// If the value location is the same as the location of the head,
// we know that the value is generated, i.e. f(1)
@@ -1115,11 +1121,7 @@ func (w *writer) writeImports(imports []*ast.Import, comments []*ast.Comment) []
comments = w.insertComments(comments, group[0].Loc())
// Sort imports within a newline grouping.
- sort.Slice(group, func(i, j int) bool {
- a := group[i]
- b := group[j]
- return a.Compare(b) < 0
- })
+ slices.SortFunc(group, (*ast.Import).Compare)
for _, i := range group {
w.startLine()
w.writeImport(i)
@@ -1277,9 +1279,8 @@ func groupIterable(elements []interface{}, last *ast.Location) [][]interface{} {
return [][]interface{}{elements}
}
}
- sort.Slice(elements, func(i, j int) bool {
- return locLess(elements[i], elements[j])
- })
+
+ slices.SortFunc(elements, locCmp)
var lines [][]interface{}
cur := make([]interface{}, 0, len(elements))
@@ -1351,7 +1352,30 @@ func groupImports(imports []*ast.Import) [][]*ast.Import {
return groups
}
-func partitionComments(comments []*ast.Comment, l *ast.Location) (before []*ast.Comment, at *ast.Comment, after []*ast.Comment) {
+func partitionComments(comments []*ast.Comment, l *ast.Location) ([]*ast.Comment, *ast.Comment, []*ast.Comment) {
+ if len(comments) == 0 {
+ return nil, nil, nil
+ }
+
+ numBefore, numAfter := 0, 0
+ for _, c := range comments {
+ switch cmp := c.Location.Row - l.Row; {
+ case cmp < 0:
+ numBefore++
+ case cmp > 0:
+ numAfter++
+ }
+ }
+
+ if numAfter == len(comments) {
+ return nil, nil, comments
+ }
+
+ var at *ast.Comment
+
+ before := make([]*ast.Comment, 0, numBefore)
+ after := comments[0 : 0 : len(comments)-numBefore]
+
for _, c := range comments {
switch cmp := c.Location.Row - l.Row; {
case cmp < 0:
@@ -1430,6 +1454,8 @@ func getLoc(x interface{}) *ast.Location {
}
}
+var negativeRow = &ast.Location{Row: -1}
+
func closingLoc(skipOpen, skipClose, openChar, closeChar byte, loc *ast.Location) *ast.Location {
i, offset := 0, 0
@@ -1445,14 +1471,14 @@ func closingLoc(skipOpen, skipClose, openChar, closeChar byte, loc *ast.Location
}
if i >= len(loc.Text) {
- return &ast.Location{Row: -1}
+ return negativeRow
}
state := 1
for state > 0 {
i++
if i >= len(loc.Text) {
- return &ast.Location{Row: -1}
+ return negativeRow
}
switch loc.Text[i] {
@@ -1500,7 +1526,7 @@ func skipPast(openChar, closeChar byte, loc *ast.Location) (int, int) {
// startLine begins a line with the current indentation level.
func (w *writer) startLine() {
w.inline = true
- for i := 0; i < w.level; i++ {
+ for range w.level {
w.write(w.indent)
}
}
@@ -1636,7 +1662,7 @@ func ArityFormatMismatchError(operands []*ast.Term, operator string, loc *ast.Lo
}
have := make([]string, len(operands))
- for i := 0; i < len(operands); i++ {
+ for i := range operands {
have[i] = ast.ValueName(operands[i].Value)
}
err := ast.NewError(ast.TypeErr, loc, "%s: %s", operator, "arity mismatch")
@@ -1650,8 +1676,8 @@ func ArityFormatMismatchError(operands []*ast.Term, operator string, loc *ast.Lo
// Lines returns the string representation of the detail.
func (d *ArityFormatErrDetail) Lines() []string {
return []string{
- "have: " + "(" + strings.Join(d.Have, ",") + ")",
- "want: " + "(" + strings.Join(d.Want, ",") + ")",
+ "have: (" + strings.Join(d.Have, ",") + ")",
+ "want: (" + strings.Join(d.Want, ",") + ")",
}
}
@@ -1664,10 +1690,12 @@ func moduleIsRegoV1Compatible(m *ast.Module) bool {
return false
}
+var v1StringTerm = ast.StringTerm("v1")
+
// isRegoV1Compatible returns true if the passed *ast.Import is `rego.v1`
func isRegoV1Compatible(imp *ast.Import) bool {
path := imp.Path.Value.(ast.Ref)
return len(path) == 2 &&
ast.RegoRootDocument.Equal(path[0]) &&
- path[1].Equal(ast.StringTerm("v1"))
+ path[1].Equal(v1StringTerm)
}
diff --git a/vendor/github.com/open-policy-agent/opa/v1/metrics/metrics.go b/vendor/github.com/open-policy-agent/opa/v1/metrics/metrics.go
index 53cd606a36..eaf0d99593 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/metrics/metrics.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/metrics/metrics.go
@@ -8,7 +8,7 @@ package metrics
import (
"encoding/json"
"fmt"
- "sort"
+ "slices"
"strings"
"sync"
"sync/atomic"
@@ -94,8 +94,8 @@ func (m *metrics) String() string {
})
}
- sort.Slice(sorted, func(i, j int) bool {
- return sorted[i].Key < sorted[j].Key
+ slices.SortFunc(sorted, func(a, b metric) int {
+ return strings.Compare(a.Key, b.Key)
})
buf := make([]string, len(sorted))
diff --git a/vendor/github.com/open-policy-agent/opa/v1/plugins/plugins.go b/vendor/github.com/open-policy-agent/opa/v1/plugins/plugins.go
index c9b99ab28b..7e8b900bfc 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/plugins/plugins.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/plugins/plugins.go
@@ -448,6 +448,11 @@ func New(raw []byte, id string, store storage.Store, opts ...func(*Manager)) (*M
f(m)
}
+ if m.parserOptions.RegoVersion == ast.RegoUndefined {
+ // Default to v1 if rego-version is not set through options
+ m.parserOptions.RegoVersion = ast.DefaultRegoVersion
+ }
+
if m.logger == nil {
m.logger = logging.Get()
}
@@ -480,13 +485,7 @@ func New(raw []byte, id string, store storage.Store, opts ...func(*Manager)) (*M
return nil, err
}
- serviceOpts := cfg.ServiceOptions{
- Raw: parsedConfig.Services,
- AuthPlugin: m.AuthPlugin,
- Keys: m.keys,
- Logger: m.logger,
- DistributedTacingOpts: m.distributedTacingOpts,
- }
+ serviceOpts := m.DefaultServiceOpts(parsedConfig)
m.services, err = cfg.ParseServicesConfig(serviceOpts)
if err != nil {
@@ -502,8 +501,8 @@ func New(raw []byte, id string, store storage.Store, opts ...func(*Manager)) (*M
m.reporter.RegisterGatherer("min_compatible_version", func(_ context.Context) (any, error) {
var minimumCompatibleVersion string
- if m.compiler != nil && m.compiler.Required != nil {
- minimumCompatibleVersion, _ = m.compiler.Required.MinimumCompatibleVersion()
+ if c := m.GetCompiler(); c != nil && c.Required != nil {
+ minimumCompatibleVersion, _ = c.Required.MinimumCompatibleVersion()
}
return minimumCompatibleVersion, nil
})
@@ -755,14 +754,19 @@ func (m *Manager) Stop(ctx context.Context) {
}
}
-// Reconfigure updates the configuration on the manager.
-func (m *Manager) Reconfigure(config *config.Config) error {
- opts := cfg.ServiceOptions{
+func (m *Manager) DefaultServiceOpts(config *config.Config) cfg.ServiceOptions {
+ return cfg.ServiceOptions{
Raw: config.Services,
AuthPlugin: m.AuthPlugin,
Logger: m.logger,
+ Keys: m.keys,
DistributedTacingOpts: m.distributedTacingOpts,
}
+}
+
+// Reconfigure updates the configuration on the manager.
+func (m *Manager) Reconfigure(config *config.Config) error {
+ opts := m.DefaultServiceOpts(config)
keys, err := keys.ParseKeysConfig(config.Keys)
if err != nil {
@@ -799,7 +803,7 @@ func (m *Manager) Reconfigure(config *config.Config) error {
m.Config = config
m.interQueryBuiltinCacheConfig = interQueryBuiltinCacheConfig
- for name, client := range services {
+ for name, client := range services { //nolint:gocritic
m.services[name] = client
}
diff --git a/vendor/github.com/open-policy-agent/opa/v1/plugins/rest/auth.go b/vendor/github.com/open-policy-agent/opa/v1/plugins/rest/auth.go
index 964630fa2f..abd391f015 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/plugins/rest/auth.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/plugins/rest/auth.go
@@ -126,10 +126,14 @@ type bearerAuthPlugin struct {
// encode is set to true for the OCIDownloader because
// it expects tokens in plain text but needs them in base64.
encode bool
+ logger logging.Logger
}
func (ap *bearerAuthPlugin) NewClient(c Config) (*http.Client, error) {
t, err := DefaultTLSConfig(c)
+
+ ap.logger = c.logger
+
if err != nil {
return nil, err
}
@@ -153,6 +157,9 @@ func (ap *bearerAuthPlugin) NewClient(c Config) (*http.Client, error) {
func (ap *bearerAuthPlugin) Prepare(req *http.Request) error {
token := ap.Token
+ if ap.logger == nil {
+ ap.logger = logging.Get()
+ }
if ap.TokenPath != "" {
bytes, err := os.ReadFile(ap.TokenPath)
@@ -166,7 +173,12 @@ func (ap *bearerAuthPlugin) Prepare(req *http.Request) error {
token = base64.StdEncoding.EncodeToString([]byte(token))
}
- req.Header.Add("Authorization", fmt.Sprintf("%v %v", ap.Scheme, token))
+ if req.Response != nil && (req.Response.StatusCode == http.StatusPermanentRedirect || req.Response.StatusCode == http.StatusTemporaryRedirect) {
+ ap.logger.Debug("not attaching authorization header as the response contains a redirect")
+ } else {
+ ap.logger.Debug("attaching authorization header")
+ req.Header.Add("Authorization", fmt.Sprintf("%v %v", ap.Scheme, token))
+ }
return nil
}
@@ -194,7 +206,7 @@ func convertSignatureToBase64(alg string, der []byte) (string, error) {
return signatureData, nil
}
-func pointsFromDER(der []byte) (R, S *big.Int, err error) {
+func pointsFromDER(der []byte) (R, S *big.Int, err error) { //nolint:gocritic
R, S = &big.Int{}, &big.Int{}
data := asn1.RawValue{}
if _, err := asn1.Unmarshal(der, &data); err != nil {
@@ -382,12 +394,7 @@ func (ap *oauth2ClientCredentialsAuthPlugin) SignWithKMS(ctx context.Context, pa
encodedHdr := base64.RawURLEncoding.EncodeToString(hdrBuf)
encodedPayload := base64.RawURLEncoding.EncodeToString(payload)
- input := strings.Join(
- []string{
- encodedHdr,
- encodedPayload,
- }, ".",
- )
+ input := encodedHdr + "." + encodedPayload
digest, err := messageDigest([]byte(input), ap.AWSKmsKey.Algorithm)
if err != nil {
return nil, err
@@ -616,7 +623,7 @@ func (ap *oauth2ClientCredentialsAuthPlugin) requestToken(ctx context.Context) (
return nil, err
}
- if strings.ToLower(tokenResponse.TokenType) != "bearer" {
+ if !strings.EqualFold(tokenResponse.TokenType, "bearer") {
return nil, errors.New("unknown token type returned from token endpoint")
}
diff --git a/vendor/github.com/open-policy-agent/opa/v1/plugins/rest/aws.go b/vendor/github.com/open-policy-agent/opa/v1/plugins/rest/aws.go
index 133df80996..defae62be0 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/plugins/rest/aws.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/plugins/rest/aws.go
@@ -678,7 +678,7 @@ func (ap *ecrAuthPlugin) Prepare(r *http.Request) error {
ap.logger.Debug("Signing request with ECR authorization token")
- r.Header.Set("Authorization", fmt.Sprintf("Basic %s", ap.token.AuthorizationToken))
+ r.Header.Set("Authorization", "Basic "+ap.token.AuthorizationToken)
return nil
}
diff --git a/vendor/github.com/open-policy-agent/opa/v1/plugins/rest/rest.go b/vendor/github.com/open-policy-agent/opa/v1/plugins/rest/rest.go
index fea351557b..e5d8e0f0d6 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/plugins/rest/rest.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/plugins/rest/rest.go
@@ -12,6 +12,7 @@ import (
"errors"
"fmt"
"io"
+ "maps"
"net/http"
"net/http/httputil"
"reflect"
@@ -94,7 +95,7 @@ func (c *Config) AuthPlugin(lookup AuthPluginLookupFunc) (HTTPAuthPlugin, error)
}
// reflection avoids need for this code to change as auth plugins are added
s := reflect.ValueOf(c.Credentials)
- for i := 0; i < s.NumField(); i++ {
+ for i := range s.NumField() {
if s.Field(i).IsNil() {
continue
}
@@ -293,7 +294,7 @@ func (c Client) Do(ctx context.Context, method, path string) (*http.Response, er
}
url := c.config.URL + "/" + path
- req, err := http.NewRequest(method, url, body)
+ req, err := http.NewRequestWithContext(ctx, method, url, body)
if err != nil {
return nil, err
}
@@ -303,23 +304,16 @@ func (c Client) Do(ctx context.Context, method, path string) (*http.Response, er
}
// Copy custom headers from config.
- for key, value := range c.config.Headers {
- headers[key] = value
- }
+ maps.Copy(headers, c.config.Headers)
// Overwrite with headers set directly on client.
- for key, value := range c.headers {
- headers[key] = value
- }
+ maps.Copy(headers, c.headers)
for key, value := range headers {
req.Header.Add(key, value)
}
- req = req.WithContext(ctx)
-
- err = c.config.authPrepare(req, c.authPluginLookup)
- if err != nil {
+ if err = c.config.authPrepare(req, c.authPluginLookup); err != nil {
return nil, err
}
@@ -347,7 +341,7 @@ func (c Client) Do(ctx context.Context, method, path string) (*http.Response, er
return nil, err
}
- if len(string(dump)) < defaultResponseSizeLimitBytes {
+ if len(dump) < defaultResponseSizeLimitBytes {
c.loggerFields["response"] = string(dump)
} else {
c.loggerFields["response"] = fmt.Sprintf("%v...", string(dump[:defaultResponseSizeLimitBytes]))
diff --git a/vendor/github.com/open-policy-agent/opa/v1/rego/rego.go b/vendor/github.com/open-policy-agent/opa/v1/rego/rego.go
index ef930a2cf7..64b3ef5963 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/rego/rego.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/rego/rego.go
@@ -11,6 +11,7 @@ import (
"errors"
"fmt"
"io"
+ "maps"
"strings"
"time"
@@ -78,12 +79,8 @@ func (pr PartialResult) Rego(options ...func(*Rego)) *Rego {
r := New(options...)
// Propagate any custom builtins.
- for k, v := range pr.builtinDecls {
- r.builtinDecls[k] = v
- }
- for k, v := range pr.builtinFuncs {
- r.builtinFuncs[k] = v
- }
+ maps.Copy(r.builtinDecls, pr.builtinDecls)
+ maps.Copy(r.builtinFuncs, pr.builtinFuncs)
return r
}
@@ -128,6 +125,7 @@ type EvalContext struct {
capabilities *ast.Capabilities
strictBuiltinErrors bool
virtualCache topdown.VirtualCache
+ baseCache topdown.BaseCache
}
func (e *EvalContext) RawInput() *interface{} {
@@ -365,14 +363,22 @@ func EvalPrintHook(ph print.Hook) EvalOption {
}
}
-// EvalVirtualCache sets the topdown.VirtualCache to use for evaluation. This is
-// optional, and if not set, the default cache is used.
+// EvalVirtualCache sets the topdown.VirtualCache to use for evaluation.
+// This is optional, and if not set, the default cache is used.
func EvalVirtualCache(vc topdown.VirtualCache) EvalOption {
return func(e *EvalContext) {
e.virtualCache = vc
}
}
+// EvalBaseCache sets the topdown.BaseCache to use for evaluation.
+// This is optional, and if not set, the default cache is used.
+func EvalBaseCache(bc topdown.BaseCache) EvalOption {
+ return func(e *EvalContext) {
+ e.baseCache = bc
+ }
+}
+
// EvalNondeterministicBuiltins causes non-deterministic builtins to be evalued
// during partial evaluation. This is needed to pull in external data, or validate
// a JWT, during PE, so that the result informs what queries are returned.
@@ -825,7 +831,7 @@ func memoize(decl *Function, bctx BuiltinContext, terms []*ast.Term, ifEmpty fun
// The term slice _may_ include an output term depending on how the caller
// referred to the built-in function. Only use the arguments as the cache
// key. Unification ensures we don't get false positive matches.
- for i := 0; i < decl.Decl.Arity(); i++ {
+ for i := range decl.Decl.Arity() {
if _, err := b.WriteString(terms[i].String()); err != nil {
return nil, err
}
@@ -1570,7 +1576,7 @@ func (r *Rego) Compile(ctx context.Context, opts ...CompileOption) (*CompileResu
}
if tgt := r.targetPlugin(r.target); tgt != nil {
- return nil, fmt.Errorf("unsupported for rego target plugins")
+ return nil, errors.New("unsupported for rego target plugins")
}
return r.compileWasm(modules, queries, compileQueryType) // TODO(sr) control flow is funky here
@@ -1630,10 +1636,9 @@ func WithNoInline(paths []string) PrepareOption {
func WithBuiltinFuncs(bis map[string]*topdown.Builtin) PrepareOption {
return func(p *PrepareConfig) {
if p.builtinFuncs == nil {
- p.builtinFuncs = make(map[string]*topdown.Builtin, len(bis))
- }
- for k, v := range bis {
- p.builtinFuncs[k] = v
+ p.builtinFuncs = maps.Clone(bis)
+ } else {
+ maps.Copy(p.builtinFuncs, bis)
}
}
}
@@ -1648,7 +1653,7 @@ func (p *PrepareConfig) BuiltinFuncs() map[string]*topdown.Builtin {
// of evaluating them.
func (r *Rego) PrepareForEval(ctx context.Context, opts ...PrepareOption) (PreparedEvalQuery, error) {
if !r.hasQuery() {
- return PreparedEvalQuery{}, fmt.Errorf("cannot evaluate empty query")
+ return PreparedEvalQuery{}, errors.New("cannot evaluate empty query")
}
pCfg := &PrepareConfig{}
@@ -1702,7 +1707,7 @@ func (r *Rego) PrepareForEval(ctx context.Context, opts ...PrepareOption) (Prepa
if r.hasWasmModule() {
_ = txnClose(ctx, err) // Ignore error
- return PreparedEvalQuery{}, fmt.Errorf("wasm target not supported")
+ return PreparedEvalQuery{}, errors.New("wasm target not supported")
}
var modules []*ast.Module
@@ -1767,7 +1772,7 @@ func (r *Rego) PrepareForEval(ctx context.Context, opts ...PrepareOption) (Prepa
// of partially evaluating them.
func (r *Rego) PrepareForPartial(ctx context.Context, opts ...PrepareOption) (PreparedPartialQuery, error) {
if !r.hasQuery() {
- return PreparedPartialQuery{}, fmt.Errorf("cannot evaluate empty query")
+ return PreparedPartialQuery{}, errors.New("cannot evaluate empty query")
}
pCfg := &PrepareConfig{}
@@ -2183,7 +2188,8 @@ func (r *Rego) eval(ctx context.Context, ectx *EvalContext) (ResultSet, error) {
WithSeed(ectx.seed).
WithPrintHook(ectx.printHook).
WithDistributedTracingOpts(r.distributedTacingOpts).
- WithVirtualCache(ectx.virtualCache)
+ WithVirtualCache(ectx.virtualCache).
+ WithBaseCache(ectx.baseCache)
if !ectx.time.IsZero() {
q = q.WithTime(ectx.time)
@@ -2270,7 +2276,7 @@ func (r *Rego) evalWasm(ctx context.Context, ectx *EvalContext) (ResultSet, erro
func (r *Rego) valueToQueryResult(res ast.Value, ectx *EvalContext) (ResultSet, error) {
resultSet, ok := res.(ast.Set)
if !ok {
- return nil, fmt.Errorf("illegal result type")
+ return nil, errors.New("illegal result type")
}
if resultSet.Len() == 0 {
@@ -2281,7 +2287,7 @@ func (r *Rego) valueToQueryResult(res ast.Value, ectx *EvalContext) (ResultSet,
err := resultSet.Iter(func(term *ast.Term) error {
obj, ok := term.Value.(ast.Object)
if !ok {
- return fmt.Errorf("illegal result type")
+ return errors.New("illegal result type")
}
qr := topdown.QueryResult{}
obj.Foreach(func(k, v *ast.Term) {
@@ -2391,7 +2397,7 @@ func (r *Rego) partialResult(ctx context.Context, pCfg *PrepareConfig) (PartialR
module, err := ast.ParseModuleWithOpts(id, "package "+ectx.partialNamespace,
ast.ParserOptions{RegoVersion: r.regoVersion})
if err != nil {
- return PartialResult{}, fmt.Errorf("bad partial namespace")
+ return PartialResult{}, errors.New("bad partial namespace")
}
module.Rules = make([]*ast.Rule, len(pq.Queries))
@@ -2611,12 +2617,12 @@ func (r *Rego) rewriteQueryToCaptureValue(_ ast.QueryCompiler, query ast.Body) (
func (r *Rego) rewriteQueryForPartialEval(_ ast.QueryCompiler, query ast.Body) (ast.Body, error) {
if len(query) != 1 {
- return nil, fmt.Errorf("partial evaluation requires single ref (not multiple expressions)")
+ return nil, errors.New("partial evaluation requires single ref (not multiple expressions)")
}
term, ok := query[0].Terms.(*ast.Term)
if !ok {
- return nil, fmt.Errorf("partial evaluation requires ref (not expression)")
+ return nil, errors.New("partial evaluation requires ref (not expression)")
}
ref, ok := term.Value.(ast.Ref)
@@ -2625,7 +2631,7 @@ func (r *Rego) rewriteQueryForPartialEval(_ ast.QueryCompiler, query ast.Body) (
}
if !ref.IsGround() {
- return nil, fmt.Errorf("partial evaluation requires ground ref")
+ return nil, errors.New("partial evaluation requires ground ref")
}
return ast.NewBody(ast.Equality.Expr(ast.Wildcard, term)), nil
@@ -2844,17 +2850,26 @@ func parseStringsToRefs(s []string) ([]ast.Ref, error) {
func finishFunction(name string, bctx topdown.BuiltinContext, result *ast.Term, err error, iter func(*ast.Term) error) error {
if err != nil {
var e *HaltError
+ sb := strings.Builder{}
if errors.As(err, &e) {
+ sb.Grow(len(name) + len(e.Error()) + 2)
+ sb.WriteString(name)
+ sb.WriteString(": ")
+ sb.WriteString(e.Error())
tdErr := &topdown.Error{
Code: topdown.BuiltinErr,
- Message: fmt.Sprintf("%v: %v", name, e.Error()),
+ Message: sb.String(),
Location: bctx.Location,
}
return topdown.Halt{Err: tdErr.Wrap(e)}
}
+ sb.Grow(len(name) + len(err.Error()) + 2)
+ sb.WriteString(name)
+ sb.WriteString(": ")
+ sb.WriteString(err.Error())
tdErr := &topdown.Error{
Code: topdown.BuiltinErr,
- Message: fmt.Sprintf("%v: %v", name, err.Error()),
+ Message: sb.String(),
Location: bctx.Location,
}
return tdErr.Wrap(err)
@@ -2895,14 +2910,8 @@ func (r *Rego) planQuery(queries []ast.Body, evalQueryType queryType) (*ir.Polic
}
decls := make(map[string]*ast.Builtin, len(r.builtinDecls)+len(ast.BuiltinMap))
-
- for k, v := range ast.BuiltinMap {
- decls[k] = v
- }
-
- for k, v := range r.builtinDecls {
- decls[k] = v
- }
+ maps.Copy(decls, ast.BuiltinMap)
+ maps.Copy(decls, r.builtinDecls)
const queryName = "eval" // NOTE(tsandall): the query name is arbitrary
diff --git a/vendor/github.com/open-policy-agent/opa/v1/resolver/wasm/wasm.go b/vendor/github.com/open-policy-agent/opa/v1/resolver/wasm/wasm.go
index 4f57b3ef82..c70daa8db6 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/resolver/wasm/wasm.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/resolver/wasm/wasm.go
@@ -6,6 +6,7 @@ package wasm
import (
"context"
+ "errors"
"fmt"
"strconv"
@@ -144,7 +145,7 @@ func getResult(evalResult *opa.Result) (ast.Value, error) {
resultSet, ok := parsed.Value.(ast.Set)
if !ok {
- return nil, fmt.Errorf("illegal result type")
+ return nil, errors.New("illegal result type")
}
if resultSet.Len() == 0 {
@@ -152,14 +153,14 @@ func getResult(evalResult *opa.Result) (ast.Value, error) {
}
if resultSet.Len() > 1 {
- return nil, fmt.Errorf("illegal result type")
+ return nil, errors.New("illegal result type")
}
var obj ast.Object
err = resultSet.Iter(func(term *ast.Term) error {
obj, ok = term.Value.(ast.Object)
if !ok || obj.Len() != 1 {
- return fmt.Errorf("illegal result type")
+ return errors.New("illegal result type")
}
return nil
})
diff --git a/vendor/github.com/open-policy-agent/opa/v1/storage/errors.go b/vendor/github.com/open-policy-agent/opa/v1/storage/errors.go
index 8c789052ed..a3d1c00737 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/storage/errors.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/storage/errors.go
@@ -56,8 +56,7 @@ func (err *Error) Error() string {
// IsNotFound returns true if this error is a NotFoundErr.
func IsNotFound(err error) bool {
- switch err := err.(type) {
- case *Error:
+ if err, ok := err.(*Error); ok {
return err.Code == NotFoundErr
}
return false
diff --git a/vendor/github.com/open-policy-agent/opa/v1/storage/inmem/ast.go b/vendor/github.com/open-policy-agent/opa/v1/storage/inmem/ast.go
index 667ca608e0..9f14df0e5b 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/storage/inmem/ast.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/storage/inmem/ast.go
@@ -101,8 +101,7 @@ func newUpdateArrayAST(data *ast.Array, op storage.PatchOp, path storage.Path, i
return nil, invalidPatchError("%v: invalid patch path", path)
}
- cpy := data.Copy()
- cpy = cpy.Append(ast.NewTerm(value))
+ cpy := data.Append(ast.NewTerm(value))
return &updateAST{path[:len(path)-1], false, cpy}, nil
}
@@ -114,7 +113,7 @@ func newUpdateArrayAST(data *ast.Array, op storage.PatchOp, path storage.Path, i
switch op {
case storage.AddOp:
var results []*ast.Term
- for i := 0; i < data.Len(); i++ {
+ for i := range data.Len() {
if i == pos {
results = append(results, ast.NewTerm(value))
}
@@ -125,7 +124,7 @@ func newUpdateArrayAST(data *ast.Array, op storage.PatchOp, path storage.Path, i
case storage.RemoveOp:
var results []*ast.Term
- for i := 0; i < data.Len(); i++ {
+ for i := range data.Len() {
if i != pos {
results = append(results, data.Elem(i))
}
@@ -134,7 +133,7 @@ func newUpdateArrayAST(data *ast.Array, op storage.PatchOp, path storage.Path, i
default:
var results []*ast.Term
- for i := 0; i < data.Len(); i++ {
+ for i := range data.Len() {
if i == pos {
results = append(results, ast.NewTerm(value))
} else {
@@ -296,7 +295,7 @@ func removeInAstArray(arr *ast.Array, path storage.Path) (ast.Value, error) {
if len(path) == 1 {
var elems []*ast.Term
// Note: possibly expensive operation for large data.
- for i := 0; i < arr.Len(); i++ {
+ for i := range arr.Len() {
if i == idx {
continue
}
diff --git a/vendor/github.com/open-policy-agent/opa/v1/storage/inmem/inmem.go b/vendor/github.com/open-policy-agent/opa/v1/storage/inmem/inmem.go
index 7c5116b527..c70d234d74 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/storage/inmem/inmem.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/storage/inmem/inmem.go
@@ -185,7 +185,9 @@ func (db *store) Truncate(ctx context.Context, txn storage.Transaction, params s
}
}
- if err != nil && err != io.EOF {
+ // err is known not to be nil at this point, as it getting assigned
+ // a non-nil value is the only way the loop above can exit.
+ if err != io.EOF {
return err
}
@@ -442,7 +444,7 @@ func lookup(path storage.Path, data map[string]interface{}) (interface{}, bool)
if len(path) == 0 {
return data, true
}
- for i := 0; i < len(path)-1; i++ {
+ for i := range len(path) - 1 {
value, ok := data[path[i]]
if !ok {
return nil, false
diff --git a/vendor/github.com/open-policy-agent/opa/v1/storage/internal/errors/errors.go b/vendor/github.com/open-policy-agent/opa/v1/storage/internal/errors/errors.go
index 06063b4c77..778f30d1f4 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/storage/internal/errors/errors.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/storage/internal/errors/errors.go
@@ -20,7 +20,11 @@ func NewNotFoundError(path storage.Path) *storage.Error {
}
func NewNotFoundErrorWithHint(path storage.Path, hint string) *storage.Error {
- return NewNotFoundErrorf("%v: %v", path.String(), hint)
+ message := path.String() + ": " + hint
+ return &storage.Error{
+ Code: storage.NotFoundErr,
+ Message: message,
+ }
}
func NewNotFoundErrorf(f string, a ...interface{}) *storage.Error {
diff --git a/vendor/github.com/open-policy-agent/opa/v1/storage/internal/ptr/ptr.go b/vendor/github.com/open-policy-agent/opa/v1/storage/internal/ptr/ptr.go
index d1c36a15a0..902e73546e 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/storage/internal/ptr/ptr.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/storage/internal/ptr/ptr.go
@@ -43,8 +43,15 @@ func ValuePtr(data ast.Value, path storage.Path) (ast.Value, error) {
key := path[i]
switch curr := node.(type) {
case ast.Object:
- keyTerm := ast.StringTerm(key)
+ // This term is only created for the lookup, which is not.. ideal.
+ // By using the pool, we can at least avoid allocating the term itself,
+ // while still having to pay 1 allocation for the value. A better solution
+ // would be dynamically interned string terms.
+ keyTerm := ast.TermPtrPool.Get()
+ keyTerm.Value = ast.String(key)
+
val := curr.Get(keyTerm)
+ ast.TermPtrPool.Put(keyTerm)
if val == nil {
return nil, errors.NewNotFoundError(path)
}
diff --git a/vendor/github.com/open-policy-agent/opa/v1/storage/path.go b/vendor/github.com/open-policy-agent/opa/v1/storage/path.go
index 7f90c666b0..f774d2eeda 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/storage/path.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/storage/path.go
@@ -5,6 +5,7 @@
package storage
import (
+ "errors"
"fmt"
"net/url"
"strconv"
@@ -50,7 +51,7 @@ func ParsePathEscaped(str string) (path Path, ok bool) {
func NewPathForRef(ref ast.Ref) (path Path, err error) {
if len(ref) == 0 {
- return nil, fmt.Errorf("empty reference (indicates error in caller)")
+ return nil, errors.New("empty reference (indicates error in caller)")
}
if len(ref) == 1 {
@@ -84,7 +85,7 @@ func NewPathForRef(ref ast.Ref) (path Path, err error) {
// is less than other, 0 if p is equal to other, or 1 if p is greater than
// other.
func (p Path) Compare(other Path) (cmp int) {
- for i := 0; i < min(len(p), len(other)); i++ {
+ for i := range min(len(p), len(other)) {
if cmp := strings.Compare(p[i], other[i]); cmp != 0 {
return cmp
}
@@ -132,11 +133,22 @@ func (p Path) Ref(head *ast.Term) (ref ast.Ref) {
}
func (p Path) String() string {
- buf := make([]string, len(p))
- for i := range buf {
- buf[i] = url.PathEscape(p[i])
+ if len(p) == 0 {
+ return "/"
}
- return "/" + strings.Join(buf, "/")
+
+ l := 0
+ for i := range p {
+ l += len(p[i]) + 1
+ }
+
+ sb := strings.Builder{}
+ sb.Grow(l)
+ for i := range p {
+ sb.WriteByte('/')
+ sb.WriteString(url.PathEscape(p[i]))
+ }
+ return sb.String()
}
// MustParsePath returns a new Path for s. If s cannot be parsed, this function
diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/aggregates.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/aggregates.go
index 02425d2411..fb59fd07f0 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/topdown/aggregates.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/aggregates.go
@@ -230,7 +230,7 @@ func builtinMember(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term)
case ast.Set:
return iter(ast.InternedBooleanTerm(c.Contains(containee)))
case *ast.Array:
- for i := 0; i < c.Len(); i++ {
+ for i := range c.Len() {
if c.Elem(i).Value.Compare(containee.Value) == 0 {
return iter(ast.InternedBooleanTerm(true))
}
diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/arithmetic.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/arithmetic.go
index 68c3b496e2..acfbba3c74 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/topdown/arithmetic.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/arithmetic.go
@@ -5,7 +5,7 @@
package topdown
import (
- "fmt"
+ "errors"
"math/big"
"github.com/open-policy-agent/opa/v1/ast"
@@ -116,14 +116,14 @@ func arithMultiply(a, b *big.Float) (*big.Float, error) {
func arithDivide(a, b *big.Float) (*big.Float, error) {
i, acc := b.Int64()
if acc == big.Exact && i == 0 {
- return nil, fmt.Errorf("divide by zero")
+ return nil, errors.New("divide by zero")
}
return new(big.Float).Quo(a, b), nil
}
func arithRem(a, b *big.Int) (*big.Int, error) {
if b.Int64() == 0 {
- return nil, fmt.Errorf("modulo by zero")
+ return nil, errors.New("modulo by zero")
}
return new(big.Int).Rem(a, b), nil
}
@@ -210,7 +210,7 @@ func builtinRem(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) err
if okx && oky && inSmallIntRange(x) && inSmallIntRange(y) {
if y == 0 {
- return fmt.Errorf("modulo by zero")
+ return errors.New("modulo by zero")
}
return iter(ast.InternedIntNumberTerm(x % y))
@@ -220,7 +220,7 @@ func builtinRem(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) err
op2, err2 := builtins.NumberToInt(n2)
if err1 != nil || err2 != nil {
- return fmt.Errorf("modulo on floating-point number")
+ return errors.New("modulo on floating-point number")
}
i, err := arithRem(op1, op2)
diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/array.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/array.go
index 4a2a2ed148..526e3ed26d 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/topdown/array.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/array.go
@@ -91,7 +91,7 @@ func builtinArrayReverse(_ BuiltinContext, operands []*ast.Term, iter func(*ast.
length := arr.Len()
reversedArr := make([]*ast.Term, length)
- for index := 0; index < length; index++ {
+ for index := range length {
reversedArr[index] = arr.Elem(length - index - 1)
}
diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/bindings.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/bindings.go
index ae6ca15daa..8c7bfbd178 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/topdown/bindings.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/bindings.go
@@ -6,6 +6,7 @@ package topdown
import (
"fmt"
+ "strconv"
"strings"
"github.com/open-policy-agent/opa/v1/ast"
@@ -184,7 +185,7 @@ func (u *bindings) namespaceVar(v *ast.Term, caller *bindings) *ast.Term {
// Root documents (i.e., data, input) should never be namespaced because they
// are globally unique.
if !ast.RootDocumentNames.Contains(v) {
- return ast.NewTerm(ast.Var(string(name) + fmt.Sprint(u.id)))
+ return ast.NewTerm(ast.Var(string(name) + strconv.FormatUint(u.id, 10)))
}
}
return v
@@ -313,12 +314,12 @@ func (b *bindingsArrayHashmap) Put(key *ast.Term, value value) {
if b.a == nil {
b.a = new([maxLinearScan]bindingArrayKeyValue)
} else if i := b.find(key); i >= 0 {
- (*b.a)[i].value = value
+ b.a[i].value = value
return
}
if b.n < maxLinearScan {
- (*b.a)[b.n] = bindingArrayKeyValue{key, value}
+ b.a[b.n] = bindingArrayKeyValue{key, value}
b.n++
return
}
@@ -341,7 +342,7 @@ func (b *bindingsArrayHashmap) Put(key *ast.Term, value value) {
func (b *bindingsArrayHashmap) Get(key *ast.Term) (value, bool) {
if b.m == nil {
if i := b.find(key); i >= 0 {
- return (*b.a)[i].value, true
+ return b.a[i].value, true
}
return value{}, false
@@ -360,7 +361,7 @@ func (b *bindingsArrayHashmap) Delete(key *ast.Term) {
if i := b.find(key); i >= 0 {
n := b.n - 1
if i < n {
- (*b.a)[i] = (*b.a)[n]
+ b.a[i] = b.a[n]
}
b.n = n
@@ -373,8 +374,8 @@ func (b *bindingsArrayHashmap) Delete(key *ast.Term) {
func (b *bindingsArrayHashmap) Iter(f func(k *ast.Term, v value) bool) {
if b.m == nil {
- for i := 0; i < b.n; i++ {
- if f((*b.a)[i].key, (*b.a)[i].value) {
+ for i := range b.n {
+ if f(b.a[i].key, b.a[i].value) {
return
}
}
@@ -390,8 +391,8 @@ func (b *bindingsArrayHashmap) Iter(f func(k *ast.Term, v value) bool) {
func (b *bindingsArrayHashmap) find(key *ast.Term) int {
v := key.Value.(ast.Var)
- for i := 0; i < b.n; i++ {
- if (*b.a)[i].key.Value.(ast.Var) == v {
+ for i := range b.n {
+ if b.a[i].key.Value.(ast.Var) == v {
return i
}
}
diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/builtins/builtins.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/builtins/builtins.go
index 45a0b88408..9fcaea4a23 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/topdown/builtins/builtins.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/builtins/builtins.go
@@ -7,6 +7,7 @@ package builtins
import (
"encoding/json"
+ "errors"
"fmt"
"math/big"
"strings"
@@ -97,7 +98,7 @@ func (c *NDBCache) UnmarshalJSON(data []byte) error {
out[string(k.Value.(ast.String))] = obj
return nil
}
- return fmt.Errorf("expected Object, got other Value type in conversion")
+ return errors.New("expected Object, got other Value type in conversion")
})
if err != nil {
return err
@@ -262,7 +263,7 @@ func NumberToInt(n ast.Number) (*big.Int, error) {
f := NumberToFloat(n)
r, accuracy := f.Int(nil)
if accuracy != big.Exact {
- return nil, fmt.Errorf("illegal value")
+ return nil, errors.New("illegal value")
}
return r, nil
}
@@ -309,7 +310,7 @@ func RuneSliceOperand(x ast.Value, pos int) ([]rune, error) {
}
var f = make([]rune, a.Len())
- for k := 0; k < a.Len(); k++ {
+ for k := range a.Len() {
b := a.Elem(k)
c, ok := b.Value.(ast.String)
if !ok {
diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/cache.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/cache.go
index 607abf46e7..42fb6ad3f5 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/topdown/cache.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/cache.go
@@ -32,13 +32,19 @@ type VirtualCache interface {
Keys() []ast.Ref
}
+// BaseCache defines the interface for a cache that stores cached base documents, i.e. data.
+type BaseCache interface {
+ Get(ast.Ref) ast.Value
+ Put(ast.Ref, ast.Value)
+}
+
type virtualCache struct {
stack []*virtualCacheElem
}
type virtualCacheElem struct {
value *ast.Term
- children *util.HashMap
+ children *util.HasherMap[*ast.Term, *virtualCacheElem]
undefined bool
}
@@ -65,12 +71,12 @@ func (c *virtualCache) Pop() {
// ast.Term, true is impossible
func (c *virtualCache) Get(ref ast.Ref) (*ast.Term, bool) {
node := c.stack[len(c.stack)-1]
- for i := 0; i < len(ref); i++ {
+ for i := range ref {
x, ok := node.children.Get(ref[i])
if !ok {
return nil, false
}
- node = x.(*virtualCacheElem)
+ node = x
}
if node.undefined {
return nil, true
@@ -83,10 +89,10 @@ func (c *virtualCache) Get(ref ast.Ref) (*ast.Term, bool) {
// indicate that the Ref has resolved to undefined.
func (c *virtualCache) Put(ref ast.Ref, value *ast.Term) {
node := c.stack[len(c.stack)-1]
- for i := 0; i < len(ref); i++ {
+ for i := range ref {
x, ok := node.children.Get(ref[i])
if ok {
- node = x.(*virtualCacheElem)
+ node = x
} else {
next := newVirtualCacheElem()
node.children.Put(ref[i], next)
@@ -107,13 +113,13 @@ func (c *virtualCache) Keys() []ast.Ref {
func keysRecursive(root ast.Ref, node *virtualCacheElem) []ast.Ref {
var keys []ast.Ref
- node.children.Iter(func(k, v util.T) bool {
- ref := root.Append(k.(*ast.Term))
- if v.(*virtualCacheElem).value != nil {
+ node.children.Iter(func(k *ast.Term, v *virtualCacheElem) bool {
+ ref := root.Append(k)
+ if v.value != nil {
keys = append(keys, ref)
}
- if v.(*virtualCacheElem).children.Len() > 0 {
- keys = append(keys, keysRecursive(ref, v.(*virtualCacheElem))...)
+ if v.children.Len() > 0 {
+ keys = append(keys, keysRecursive(ref, v)...)
}
return false
})
@@ -124,12 +130,8 @@ func newVirtualCacheElem() *virtualCacheElem {
return &virtualCacheElem{children: newVirtualCacheHashMap()}
}
-func newVirtualCacheHashMap() *util.HashMap {
- return util.NewHashMap(func(a, b util.T) bool {
- return a.(*ast.Term).Equal(b.(*ast.Term))
- }, func(x util.T) int {
- return x.(*ast.Term).Hash()
- })
+func newVirtualCacheHashMap() *util.HasherMap[*ast.Term, *virtualCacheElem] {
+ return util.NewHasherMap[*ast.Term, *virtualCacheElem](ast.TermValueEqual)
}
// baseCache implements a trie structure to cache base documents read out of
@@ -148,11 +150,17 @@ func newBaseCache() *baseCache {
func (c *baseCache) Get(ref ast.Ref) ast.Value {
node := c.root
- for i := 0; i < len(ref); i++ {
+ for i := range ref {
node = node.children[ref[i].Value]
if node == nil {
return nil
} else if node.value != nil {
+ if len(ref) == 1 && ast.IsScalar(node.value) {
+ // If the node is a scalar, return the value directly
+ // and avoid an allocation when calling Find.
+ return node.value
+ }
+
result, err := node.value.Find(ref[i+1:])
if err != nil {
return nil
@@ -165,7 +173,7 @@ func (c *baseCache) Get(ref ast.Ref) ast.Value {
func (c *baseCache) Put(ref ast.Ref, value ast.Value) {
node := c.root
- for i := 0; i < len(ref); i++ {
+ for i := range ref {
if child, ok := node.children[ref[i].Value]; ok {
node = child
} else {
@@ -232,7 +240,7 @@ type comprehensionCache struct {
type comprehensionCacheElem struct {
value *ast.Term
- children *util.HashMap
+ children *util.HasherMap[*ast.Term, *comprehensionCacheElem]
}
func newComprehensionCache() *comprehensionCache {
@@ -264,22 +272,22 @@ func newComprehensionCacheElem() *comprehensionCacheElem {
func (c *comprehensionCacheElem) Get(key []*ast.Term) *ast.Term {
node := c
- for i := 0; i < len(key); i++ {
+ for i := range key {
x, ok := node.children.Get(key[i])
if !ok {
return nil
}
- node = x.(*comprehensionCacheElem)
+ node = x
}
return node.value
}
func (c *comprehensionCacheElem) Put(key []*ast.Term, value *ast.Term) {
node := c
- for i := 0; i < len(key); i++ {
+ for i := range key {
x, ok := node.children.Get(key[i])
if ok {
- node = x.(*comprehensionCacheElem)
+ node = x
} else {
next := newComprehensionCacheElem()
node.children.Put(key[i], next)
@@ -289,12 +297,8 @@ func (c *comprehensionCacheElem) Put(key []*ast.Term, value *ast.Term) {
node.value = value
}
-func newComprehensionCacheHashMap() *util.HashMap {
- return util.NewHashMap(func(a, b util.T) bool {
- return a.(*ast.Term).Equal(b.(*ast.Term))
- }, func(x util.T) int {
- return x.(*ast.Term).Hash()
- })
+func newComprehensionCacheHashMap() *util.HasherMap[*ast.Term, *comprehensionCacheElem] {
+ return util.NewHasherMap[*ast.Term, *comprehensionCacheElem](ast.TermValueEqual)
}
type functionMocksStack struct {
diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/cache/cache.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/cache/cache.go
index a2b80c0a77..064e9c4adc 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/topdown/cache/cache.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/cache/cache.go
@@ -372,19 +372,13 @@ type InterQueryValueCacheBucket interface {
}
type interQueryValueCacheBucket struct {
- items util.TypedHashMap[ast.Value, any]
+ items util.HasherMap[ast.Value, any]
config *NamedValueCacheConfig
mtx sync.RWMutex
}
-func newItemsMap() *util.TypedHashMap[ast.Value, any] {
- return util.NewTypedHashMap[ast.Value, any](
- func(a, b ast.Value) bool { return a.Compare(b) == 0 },
- func(any, any) bool { return false }, // map equality not supported
- func(a ast.Value) int { return a.Hash() },
- func(any) int { return 0 }, // map equality not supported
- nil,
- )
+func newItemsMap() *util.HasherMap[ast.Value, any] {
+ return util.NewHasherMap[ast.Value, any](ast.ValueEqual)
}
func (c *interQueryValueCacheBucket) Get(k ast.Value) (any, bool) {
diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/casts.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/casts.go
index 9be7271c44..f395324841 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/topdown/casts.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/casts.go
@@ -22,7 +22,7 @@ func builtinToNumber(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term
}
return iter(ast.InternedIntNumberTerm(0))
case ast.Number:
- return iter(ast.NewTerm(a))
+ return iter(operands[0])
case ast.String:
strValue := string(a)
@@ -46,7 +46,7 @@ func builtinToNumber(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term
return builtins.NewOperandTypeErr(1, operands[0].Value, "null", "boolean", "number", "string")
}
-// Deprecated in v0.13.0.
+// Deprecated: deprecated in v0.13.0.
func builtinToArray(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
switch val := operands[0].Value.(type) {
case *ast.Array:
@@ -64,7 +64,7 @@ func builtinToArray(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term)
}
}
-// Deprecated in v0.13.0.
+// Deprecated: deprecated in v0.13.0.
func builtinToSet(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
switch val := operands[0].Value.(type) {
case *ast.Array:
@@ -80,7 +80,7 @@ func builtinToSet(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) e
}
}
-// Deprecated in v0.13.0.
+// Deprecated: deprecated in v0.13.0.
func builtinToString(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
switch val := operands[0].Value.(type) {
case ast.String:
@@ -90,7 +90,7 @@ func builtinToString(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term
}
}
-// Deprecated in v0.13.0.
+// Deprecated: deprecated in v0.13.0.
func builtinToBoolean(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
switch val := operands[0].Value.(type) {
case ast.Boolean:
@@ -100,7 +100,7 @@ func builtinToBoolean(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Ter
}
}
-// Deprecated in v0.13.0.
+// Deprecated: deprecated in v0.13.0.
func builtinToNull(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
switch val := operands[0].Value.(type) {
case ast.Null:
@@ -110,7 +110,7 @@ func builtinToNull(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term)
}
}
-// Deprecated in v0.13.0.
+// Deprecated: deprecated in v0.13.0.
func builtinToObject(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
switch val := operands[0].Value.(type) {
case ast.Object:
diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/cidr.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/cidr.go
index 113bd2f372..00c034656b 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/topdown/cidr.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/cidr.go
@@ -31,7 +31,7 @@ func getLastIP(cidr *net.IPNet) (net.IP, error) {
prefixLen, bits := cidr.Mask.Size()
if prefixLen == 0 && bits == 0 {
// non-standard mask, see https://golang.org/pkg/net/#IPMask.Size
- return nil, fmt.Errorf("CIDR mask is in non-standard format")
+ return nil, errors.New("CIDR mask is in non-standard format")
}
var lastIP []byte
if prefixLen == bits {
@@ -137,7 +137,7 @@ func evalNetCIDRContainsMatchesOperand(operand int, a *ast.Term, iter func(cidr,
case ast.String:
return iter(a, a)
case *ast.Array:
- for i := 0; i < v.Len(); i++ {
+ for i := range v.Len() {
cidr, err := getCIDRMatchTerm(v.Elem(i))
if err != nil {
return fmt.Errorf("operand %v: %v", operand, err)
@@ -255,7 +255,7 @@ func (c cidrBlockRanges) Less(i, j int) bool {
}
// Then compare first IP.
- cmp = bytes.Compare(*c[i].First, *c[i].First)
+ cmp = bytes.Compare(*c[i].First, *c[j].First)
if cmp < 0 {
return true
} else if cmp > 0 {
@@ -274,7 +274,7 @@ func builtinNetCIDRMerge(_ BuiltinContext, operands []*ast.Term, iter func(*ast.
switch v := operands[0].Value.(type) {
case *ast.Array:
- for i := 0; i < v.Len(); i++ {
+ for i := range v.Len() {
network, err := generateIPNet(v.Elem(i))
if err != nil {
return err
diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/copypropagation/unionfind.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/copypropagation/unionfind.go
index 679464250e..528c83a0f4 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/topdown/copypropagation/unionfind.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/copypropagation/unionfind.go
@@ -14,18 +14,14 @@ import (
type rankFunc func(*unionFindRoot, *unionFindRoot) (*unionFindRoot, *unionFindRoot)
type unionFind struct {
- roots *util.HashMap
+ roots *util.HasherMap[ast.Value, *unionFindRoot]
parents *ast.ValueMap
rank rankFunc
}
func newUnionFind(rank rankFunc) *unionFind {
return &unionFind{
- roots: util.NewHashMap(func(a util.T, b util.T) bool {
- return a.(ast.Value).Compare(b.(ast.Value)) == 0
- }, func(v util.T) int {
- return v.(ast.Value).Hash()
- }),
+ roots: util.NewHasherMap[ast.Value, *unionFindRoot](ast.ValueEqual),
parents: ast.NewValueMap(),
rank: rank,
}
@@ -53,7 +49,7 @@ func (uf *unionFind) Find(v ast.Value) (*unionFindRoot, bool) {
if parent.Compare(v) == 0 {
r, ok := uf.roots.Get(v)
- return r.(*unionFindRoot), ok
+ return r, ok
}
return uf.Find(parent)
@@ -93,13 +89,13 @@ func (uf *unionFind) String() string {
map[string]ast.Value{},
}
- uf.roots.Iter(func(k util.T, v util.T) bool {
- o.Roots[k.(ast.Value).String()] = struct {
+ uf.roots.Iter(func(k ast.Value, v *unionFindRoot) bool {
+ o.Roots[k.String()] = struct {
Constant *ast.Term
Key ast.Value
}{
- v.(*unionFindRoot).constant,
- v.(*unionFindRoot).key,
+ v.constant,
+ v.key,
}
return true
})
diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/crypto.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/crypto.go
index ab499e3e8f..dafbac7850 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/topdown/crypto.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/crypto.go
@@ -18,6 +18,7 @@ import (
"encoding/hex"
"encoding/json"
"encoding/pem"
+ "errors"
"fmt"
"hash"
"os"
@@ -204,7 +205,7 @@ func extractVerifyOpts(options ast.Object) (verifyOpt x509.VerifyOptions, err er
if ok {
verifyOpt.DNSName = strings.Trim(string(dns), "\"")
} else {
- return verifyOpt, fmt.Errorf("'DNSName' should be a string")
+ return verifyOpt, errors.New("'DNSName' should be a string")
}
case "CurrentTime":
c, ok := options.Get(key).Value.(ast.Number)
@@ -213,10 +214,10 @@ func extractVerifyOpts(options ast.Object) (verifyOpt x509.VerifyOptions, err er
if ok {
verifyOpt.CurrentTime = time.Unix(0, nanosecs)
} else {
- return verifyOpt, fmt.Errorf("'CurrentTime' should be a valid int64 number")
+ return verifyOpt, errors.New("'CurrentTime' should be a valid int64 number")
}
} else {
- return verifyOpt, fmt.Errorf("'CurrentTime' should be a number")
+ return verifyOpt, errors.New("'CurrentTime' should be a number")
}
case "MaxConstraintComparisons":
c, ok := options.Get(key).Value.(ast.Number)
@@ -225,23 +226,23 @@ func extractVerifyOpts(options ast.Object) (verifyOpt x509.VerifyOptions, err er
if ok {
verifyOpt.MaxConstraintComparisions = maxComparisons
} else {
- return verifyOpt, fmt.Errorf("'MaxConstraintComparisons' should be a valid number")
+ return verifyOpt, errors.New("'MaxConstraintComparisons' should be a valid number")
}
} else {
- return verifyOpt, fmt.Errorf("'MaxConstraintComparisons' should be a number")
+ return verifyOpt, errors.New("'MaxConstraintComparisons' should be a number")
}
case "KeyUsages":
type forEach interface {
Foreach(func(*ast.Term))
}
var ks forEach
- switch options.Get(key).Value.(type) {
+ switch v := options.Get(key).Value.(type) {
case *ast.Array:
- ks = options.Get(key).Value.(*ast.Array)
+ ks = v
case ast.Set:
- ks = options.Get(key).Value.(ast.Set)
+ ks = v
default:
- return verifyOpt, fmt.Errorf("'KeyUsages' should be an Array or Set")
+ return verifyOpt, errors.New("'KeyUsages' should be an Array or Set")
}
// Collect the x509.ExtKeyUsage values by looking up the
@@ -262,7 +263,7 @@ func extractVerifyOpts(options ast.Object) (verifyOpt x509.VerifyOptions, err er
return x509.VerifyOptions{}, fmt.Errorf("invalid entries for 'KeyUsages' found: %s", invalidKUsgs)
}
default:
- return verifyOpt, fmt.Errorf("invalid key option")
+ return verifyOpt, errors.New("invalid key option")
}
}
@@ -312,7 +313,7 @@ func builtinCryptoX509ParseCertificateRequest(_ BuiltinContext, operands []*ast.
p, _ := pem.Decode(bytes)
if p != nil && p.Type != blockTypeCertificateRequest {
- return fmt.Errorf("invalid PEM-encoded certificate signing request")
+ return errors.New("invalid PEM-encoded certificate signing request")
}
if p != nil {
bytes = p.Bytes
@@ -354,7 +355,7 @@ func builtinCryptoJWKFromPrivateKey(_ BuiltinContext, operands []*ast.Term, iter
pemDataString := string(input)
if pemDataString == "" {
- return fmt.Errorf("input PEM data was empty")
+ return errors.New("input PEM data was empty")
}
// This built in must be supplied a valid PEM or base64 encoded string.
@@ -495,7 +496,7 @@ func hmacHelper(operands []*ast.Term, iter func(*ast.Term) error, h func() hash.
mac.Write([]byte(message))
messageDigest := mac.Sum(nil)
- return iter(ast.StringTerm(fmt.Sprintf("%x", messageDigest)))
+ return iter(ast.StringTerm(hex.EncodeToString(messageDigest)))
}
func builtinCryptoHmacMd5(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
@@ -704,7 +705,7 @@ func addCACertsFromBytes(pool *x509.CertPool, pemBytes []byte) (*x509.CertPool,
}
if ok := pool.AppendCertsFromPEM(pemBytes); !ok {
- return nil, fmt.Errorf("could not append certificates")
+ return nil, errors.New("could not append certificates")
}
return pool, nil
diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/eval.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/eval.go
index 4758759e71..635ea38451 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/topdown/eval.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/eval.go
@@ -72,6 +72,7 @@ type eval struct {
store storage.Store
txn storage.Transaction
virtualCache VirtualCache
+ baseCache BaseCache
interQueryBuiltinCache cache.InterQueryCache
interQueryBuiltinValueCache cache.InterQueryValueCache
printHook print.Hook
@@ -80,7 +81,6 @@ type eval struct {
parent *eval
caller *eval
bindings *bindings
- baseCache *baseCache
compiler *ast.Compiler
input *ast.Term
data *ast.Term
@@ -162,10 +162,10 @@ func (e *eval) String() string {
func (e *eval) string(s *strings.Builder) {
fmt.Fprintf(s, "')
+ s.WriteByte('>')
}
func (e *eval) builtinFunc(name string) (*ast.Builtin, BuiltinFunc, bool) {
@@ -380,9 +380,7 @@ func (e *eval) evalExpr(iter evalIterator) error {
}
if e.index >= len(e.query) {
- err := iter(e)
-
- if err != nil {
+ if err := iter(e); err != nil {
switch err := err.(type) {
case *deferredEarlyExitError, *earlyExitError:
return wrapErr(err)
@@ -592,16 +590,18 @@ func (e *eval) evalWith(iter evalIterator) error {
expr := e.query[e.index]
- // Disable inlining on all references in the expression so the result of
- // partial evaluation has the same semantics w/ the with statements
- // preserved.
var disable []ast.Ref
- disableRef := func(x ast.Ref) bool {
- disable = append(disable, x.GroundPrefix())
- return false
- }
if e.partial() {
+ // Avoid the `disable` var to escape to heap unless partial evaluation is enabled.
+ var disablePartial []ast.Ref
+ // Disable inlining on all references in the expression so the result of
+ // partial evaluation has the same semantics w/ the with statements
+ // preserved.
+ disableRef := func(x ast.Ref) bool {
+ disablePartial = append(disablePartial, x.GroundPrefix())
+ return false
+ }
// If the value is unknown the with statement cannot be evaluated and so
// the entire expression should be saved to be safe. In the future this
@@ -626,12 +626,15 @@ func (e *eval) evalWith(iter evalIterator) error {
}
ast.WalkRefs(expr.NoWith(), disableRef)
+
+ disable = disablePartial
}
pairsInput := [][2]*ast.Term{}
pairsData := [][2]*ast.Term{}
- functionMocks := [][2]*ast.Term{}
- targets := []ast.Ref{}
+ targets := make([]ast.Ref, 0, len(expr.With))
+
+ var functionMocks [][2]*ast.Term
for i := range expr.With {
target := expr.With[i].Target
@@ -858,7 +861,6 @@ func (e *eval) evalCall(terms []*ast.Term, iter unifyIterator) error {
ref := terms[0].Value.(ast.Ref)
- var mocked bool
mock, mocked := e.functionMocks.Get(ref)
if mocked {
if m, ok := mock.Value.(ast.Ref); ok && isFunction(e.compiler.TypeEnv, m) { // builtin or data function
@@ -1185,7 +1187,7 @@ func (e *eval) biunifyRef(a, b *ast.Term, b1, b2 *bindings, iter unifyIterator)
e: e,
ref: ref,
pos: 1,
- plugged: ref.Copy(),
+ plugged: ref.CopyNonGround(),
bindings: b1,
rterm: b,
rbindings: b2,
@@ -1504,7 +1506,7 @@ func (e *eval) saveExprMarkUnknowns(expr *ast.Expr, b *bindings, iter unifyItera
e.traceSave(expr)
err = iter()
e.saveStack.Pop()
- for i := 0; i < pops; i++ {
+ for range pops {
e.saveSet.Pop()
}
return err
@@ -1534,7 +1536,7 @@ func (e *eval) saveUnify(a, b *ast.Term, b1, b2 *bindings, iter unifyIterator) e
err := iter()
e.saveStack.Pop()
- for i := 0; i < pops; i++ {
+ for range pops {
e.saveSet.Pop()
}
@@ -1561,7 +1563,7 @@ func (e *eval) saveCall(declArgsLen int, terms []*ast.Term, iter unifyIterator)
err := iter()
e.saveStack.Pop()
- for i := 0; i < pops; i++ {
+ for range pops {
e.saveSet.Pop()
}
return err
@@ -1583,7 +1585,7 @@ func (e *eval) saveInlinedNegatedExprs(exprs []*ast.Expr, iter unifyIterator) er
e.traceSave(expr)
}
err := iter()
- for i := 0; i < len(exprs); i++ {
+ for range exprs {
e.saveStack.Pop()
}
return err
@@ -1745,7 +1747,7 @@ func (e *evalResolver) Resolve(ref ast.Ref) (ast.Value, error) {
return merged, err
}
e.e.instr.stopTimer(evalOpResolve)
- return nil, fmt.Errorf("illegal ref")
+ return nil, errors.New("illegal ref")
}
func (e *eval) resolveReadFromStorage(ref ast.Ref, a ast.Value) (ast.Value, error) {
@@ -1788,16 +1790,7 @@ func (e *eval) resolveReadFromStorage(ref ast.Ref, a ast.Value) (ast.Value, erro
}
case ast.Object:
if obj.Len() > 0 {
- cpy := ast.NewObject()
- if err := obj.Iter(func(k *ast.Term, v *ast.Term) error {
- if !ast.SystemDocumentKey.Equal(k.Value) {
- cpy.Insert(k, v)
- }
- return nil
- }); err != nil {
- return nil, err
- }
- blob = cpy
+ blob, _ = obj.Map(systemDocumentKeyRemoveMapper)
}
}
}
@@ -1830,6 +1823,13 @@ func (e *eval) resolveReadFromStorage(ref ast.Ref, a ast.Value) (ast.Value, erro
return merged, nil
}
+func systemDocumentKeyRemoveMapper(k, v *ast.Term) (*ast.Term, *ast.Term, error) {
+ if ast.SystemDocumentKey.Equal(k.Value) {
+ return nil, nil, nil
+ }
+ return k, v, nil
+}
+
func (e *eval) generateVar(suffix string) *ast.Term {
buf := make([]byte, 0, len(e.genvarprefix)+len(suffix)+1)
@@ -1910,7 +1910,6 @@ func (e *evalBuiltin) eval(iter unifyIterator) error {
numDeclArgs := e.bi.Decl.Arity()
e.e.instr.startTimer(evalOpBuiltinCall)
- var err error
// NOTE(philipc): We sometimes have to drop the very last term off
// the args list for cases where a builtin's result is used/assigned,
@@ -1946,7 +1945,7 @@ func (e *evalBuiltin) eval(iter unifyIterator) error {
}
// Normal unification flow for builtins:
- err = e.f(e.bctx, operands, func(output *ast.Term) error {
+ err := e.f(e.bctx, operands, func(output *ast.Term) error {
e.e.instr.stopTimer(evalOpBuiltinCall)
@@ -2048,12 +2047,23 @@ func (e evalFunc) evalValue(iter unifyIterator, argCount int, findOne bool) erro
}
}
+ // NOTE(anders): While it makes the code a bit more complex, reusing the
+ // args slice across each function increment saves a lot of resources
+ // compared to creating a new one inside each call to evalOneRule... so
+ // think twice before simplifying this :)
+ args := make([]*ast.Term, len(e.terms)-1)
+
var prev *ast.Term
return withSuppressEarlyExit(func() error {
var outerEe *deferredEarlyExitError
for _, rule := range e.ir.Rules {
- next, err := e.evalOneRule(iter, rule, cacheKey, prev, findOne)
+ copy(args, rule.Head.Args)
+ if len(args) == len(rule.Head.Args)+1 {
+ args[len(args)-1] = rule.Head.Value
+ }
+
+ next, err := e.evalOneRule(iter, rule, args, cacheKey, prev, findOne)
if err != nil {
if oee, ok := err.(*deferredEarlyExitError); ok {
if outerEe == nil {
@@ -2065,7 +2075,12 @@ func (e evalFunc) evalValue(iter unifyIterator, argCount int, findOne bool) erro
}
if next == nil {
for _, erule := range e.ir.Else[rule] {
- next, err = e.evalOneRule(iter, erule, cacheKey, prev, findOne)
+ copy(args, erule.Head.Args)
+ if len(args) == len(erule.Head.Args)+1 {
+ args[len(args)-1] = erule.Head.Value
+ }
+
+ next, err = e.evalOneRule(iter, erule, args, cacheKey, prev, findOne)
if err != nil {
if oee, ok := err.(*deferredEarlyExitError); ok {
if outerEe == nil {
@@ -2086,7 +2101,13 @@ func (e evalFunc) evalValue(iter unifyIterator, argCount int, findOne bool) erro
}
if e.ir.Default != nil && prev == nil {
- _, err := e.evalOneRule(iter, e.ir.Default, cacheKey, prev, findOne)
+ copy(args, e.ir.Default.Head.Args)
+ if len(args) == len(e.ir.Default.Head.Args)+1 {
+ args[len(args)-1] = e.ir.Default.Head.Value
+ }
+
+ _, err := e.evalOneRule(iter, e.ir.Default, args, cacheKey, prev, findOne)
+
return err
}
@@ -2107,7 +2128,7 @@ func (e evalFunc) evalCache(argCount int, iter unifyIterator) (ast.Ref, bool, er
}
cacheKey := make([]*ast.Term, plen)
- for i := 0; i < plen; i++ {
+ for i := range plen {
if e.terms[i].IsGround() {
// Avoid expensive copying of ref if it is ground.
cacheKey[i] = e.terms[i]
@@ -2132,20 +2153,13 @@ func (e evalFunc) evalCache(argCount int, iter unifyIterator) (ast.Ref, bool, er
return cacheKey, false, nil
}
-func (e evalFunc) evalOneRule(iter unifyIterator, rule *ast.Rule, cacheKey ast.Ref, prev *ast.Term, findOne bool) (*ast.Term, error) {
+func (e evalFunc) evalOneRule(iter unifyIterator, rule *ast.Rule, args []*ast.Term, cacheKey ast.Ref, prev *ast.Term, findOne bool) (*ast.Term, error) {
child := evalPool.Get()
defer evalPool.Put(child)
e.e.child(rule.Body, child)
child.findOne = findOne
- args := make([]*ast.Term, len(e.terms)-1)
- copy(args, rule.Head.Args)
-
- if len(args) == len(rule.Head.Args)+1 {
- args[len(args)-1] = rule.Head.Value
- }
-
var result *ast.Term
child.traceEnter(rule)
@@ -2167,28 +2181,24 @@ func (e evalFunc) evalOneRule(iter unifyIterator, rule *ast.Rule, cacheKey ast.R
e.e.virtualCache.Put(cacheKey, result) // the redos confirm this, or the evaluation is aborted
}
- if len(rule.Head.Args) == len(e.terms)-1 {
- if ast.Boolean(false).Equal(result.Value) {
- if prev != nil && !prev.Equal(result) {
- return functionConflictErr(rule.Location)
- }
- prev = result
- return nil
+ if len(rule.Head.Args) == len(e.terms)-1 && ast.Boolean(false).Equal(result.Value) {
+ if prev != nil && !prev.Equal(result) {
+ return functionConflictErr(rule.Location)
}
+ prev = result
+ return nil
}
// Partial evaluation should explore all rules and may not produce
// a ground result so we do not perform conflict detection or
// deduplication. See "ignore conflicts: functions" test case for
// an example.
- if !e.e.partial() {
- if prev != nil {
- if !prev.Equal(result) {
- return functionConflictErr(rule.Location)
- }
- child.traceRedo(rule)
- return nil
+ if !e.e.partial() && prev != nil {
+ if !prev.Equal(result) {
+ return functionConflictErr(rule.Location)
}
+ child.traceRedo(rule)
+ return nil
}
prev = result
@@ -2278,6 +2288,39 @@ func (e evalFunc) partialEvalSupportRule(rule *ast.Rule, path ast.Ref) error {
return err
}
+type deferredEarlyExitContainer struct {
+ deferred *deferredEarlyExitError
+}
+
+func (dc *deferredEarlyExitContainer) handleErr(err error) error {
+ if err == nil {
+ return nil
+ }
+
+ if dc.deferred == nil && errors.As(err, &dc.deferred) && dc.deferred != nil {
+ return nil
+ }
+
+ return err
+}
+
+// copyError returns a copy of the deferred early exit error if one is present.
+// This exists only to allow the container to be reused.
+func (dc *deferredEarlyExitContainer) copyError() *deferredEarlyExitError {
+ if dc.deferred == nil {
+ return nil
+ }
+
+ cpy := *dc.deferred
+ return &cpy
+}
+
+var deecPool = sync.Pool{
+ New: func() any {
+ return &deferredEarlyExitContainer{}
+ },
+}
+
type evalTree struct {
e *eval
bindings *bindings
@@ -2363,28 +2406,20 @@ func (e evalTree) enumerate(iter unifyIterator) error {
return err
}
- var deferredEe *deferredEarlyExitError
- handleErr := func(err error) error {
- var dee *deferredEarlyExitError
- if errors.As(err, &dee) {
- if deferredEe == nil {
- deferredEe = dee
- }
- return nil
- }
- return err
- }
+ dc := deecPool.Get().(*deferredEarlyExitContainer)
+ dc.deferred = nil
+ defer deecPool.Put(dc)
if doc != nil {
switch doc := doc.(type) {
case *ast.Array:
- for i := 0; i < doc.Len(); i++ {
+ for i := range doc.Len() {
k := ast.InternedIntNumberTerm(i)
err := e.e.biunify(k, e.ref[e.pos], e.bindings, e.bindings, func() error {
return e.next(iter, k)
})
- if err := handleErr(err); err != nil {
+ if err := dc.handleErr(err); err != nil {
return err
}
}
@@ -2394,7 +2429,7 @@ func (e evalTree) enumerate(iter unifyIterator) error {
err := e.e.biunify(k, e.ref[e.pos], e.bindings, e.bindings, func() error {
return e.next(iter, k)
})
- if err := handleErr(err); err != nil {
+ if err := dc.handleErr(err); err != nil {
return err
}
}
@@ -2403,15 +2438,15 @@ func (e evalTree) enumerate(iter unifyIterator) error {
err := e.e.biunify(elem, e.ref[e.pos], e.bindings, e.bindings, func() error {
return e.next(iter, elem)
})
- return handleErr(err)
+ return dc.handleErr(err)
}); err != nil {
return err
}
}
}
- if deferredEe != nil {
- return deferredEe
+ if dc.deferred != nil {
+ return dc.copyError()
}
if e.node == nil {
@@ -2634,7 +2669,7 @@ func maxRefLength(rules []*ast.Rule, ceil int) int {
for _, r := range rules {
rl := len(r.Ref())
if r.Head.RuleKind() == ast.MultiValue {
- rl = rl + 1
+ rl++
}
if rl >= ceil {
return ceil
@@ -3650,28 +3685,55 @@ func (e evalTerm) enumerate(iter unifyIterator) error {
switch v := e.term.Value.(type) {
case *ast.Array:
- for i := 0; i < v.Len(); i++ {
- k := ast.InternedIntNumberTerm(i)
- if err := handleErr(e.e.biunify(k, e.ref[e.pos], e.bindings, e.bindings, func() error {
- return e.next(iter, k)
- })); err != nil {
- return err
+ // Note(anders):
+ // For this case (e.g. input.foo[_]), we can avoid the (quite expensive) overhead of a callback
+ // function literal escaping to the heap in each iteration by inlining the biunification logic,
+ // meaning a 10x reduction in both the number of allocations made as well as the memory consumed.
+ // It is possible that such inlining could be done for the set/object cases as well, and that's
+ // worth looking into later, as I imagine set iteration in particular would be an even greater
+ // win across most policies. Those cases are however much more complex, as we need to deal with
+ // any type on either side, not just int/var as is the case here.
+ for i := range v.Len() {
+ a := ast.InternedIntNumberTerm(i)
+ b := e.ref[e.pos]
+
+ if _, ok := b.Value.(ast.Var); ok {
+ if e.e.traceEnabled {
+ e.e.traceUnify(a, b)
+ }
+ var undo undo
+ b, e.bindings = e.bindings.apply(b)
+ e.bindings.bind(b, a, e.bindings, &undo)
+
+ err := e.next(iter, a)
+ undo.Undo()
+ if err != nil {
+ if err := handleErr(err); err != nil {
+ return err
+ }
+ }
}
}
case ast.Object:
for _, k := range v.Keys() {
- if err := handleErr(e.e.biunify(k, e.ref[e.pos], e.termbindings, e.bindings, func() error {
+ err := e.e.biunify(k, e.ref[e.pos], e.termbindings, e.bindings, func() error {
return e.next(iter, e.termbindings.Plug(k))
- })); err != nil {
- return err
+ })
+ if err != nil {
+ if err := handleErr(err); err != nil {
+ return err
+ }
}
}
case ast.Set:
for _, elem := range v.Slice() {
- if err := handleErr(e.e.biunify(elem, e.ref[e.pos], e.termbindings, e.bindings, func() error {
+ err := e.e.biunify(elem, e.ref[e.pos], e.termbindings, e.bindings, func() error {
return e.next(iter, e.termbindings.Plug(elem))
- })); err != nil {
- return err
+ })
+ if err != nil {
+ if err := handleErr(err); err != nil {
+ return err
+ }
}
}
}
diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/graphql.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/graphql.go
index 0ad1cfdb5f..a3f30c5e4e 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/topdown/graphql.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/graphql.go
@@ -160,7 +160,7 @@ func pruneIrrelevantGraphQLASTNodes(value ast.Value) ast.Value {
// Iterate over the array's elements, and do the following:
// - Drop any Nulls
// - Drop any any empty object/array value (after running the pruner)
- for i := 0; i < x.Len(); i++ {
+ for i := range x.Len() {
vTerm := x.Elem(i)
switch v := vTerm.Value.(type) {
case ast.Null:
diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/http.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/http.go
index 71c7c7d9eb..2d29c12f4d 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/topdown/http.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/http.go
@@ -10,6 +10,7 @@ import (
"crypto/tls"
"crypto/x509"
"encoding/json"
+ "errors"
"fmt"
"io"
"math"
@@ -519,7 +520,7 @@ func createHTTPRequest(bctx BuiltinContext, obj ast.Object) (*http.Request, *htt
var ok bool
customHeaders, ok = headersValInterface.(map[string]interface{})
if !ok {
- return nil, nil, fmt.Errorf("invalid type for headers key")
+ return nil, nil, errors.New("invalid type for headers key")
}
case "tls_insecure_skip_verify":
tlsInsecureSkipVerify, err = strconv.ParseBool(obj.Get(val).String())
@@ -606,7 +607,7 @@ func createHTTPRequest(bctx BuiltinContext, obj ast.Object) (*http.Request, *htt
}
if len(tlsCaCert) != 0 {
- tlsCaCert = bytes.Replace(tlsCaCert, []byte("\\n"), []byte("\n"), -1)
+ tlsCaCert = bytes.ReplaceAll(tlsCaCert, []byte("\\n"), []byte("\n"))
pool, err := addCACertsFromBytes(tlsConfig.RootCAs, tlsCaCert)
if err != nil {
return nil, nil, err
@@ -780,28 +781,17 @@ type httpSendCacheEntry struct {
// The httpSendCache is used for intra-query caching of http.send results.
type httpSendCache struct {
- entries *util.HashMap
+ entries *util.HasherMap[ast.Value, httpSendCacheEntry]
}
func newHTTPSendCache() *httpSendCache {
return &httpSendCache{
- entries: util.NewHashMap(valueEq, valueHash),
+ entries: util.NewHasherMap[ast.Value, httpSendCacheEntry](ast.ValueEqual),
}
}
-func valueHash(v util.T) int {
- return ast.StringTerm(v.(ast.Value).String()).Hash()
-}
-
-func valueEq(a, b util.T) bool {
- av := a.(ast.Value)
- bv := b.(ast.Value)
- return av.String() == bv.String()
-}
-
func (cache *httpSendCache) get(k ast.Value) *httpSendCacheEntry {
if v, ok := cache.entries.Get(k); ok {
- v := v.(httpSendCacheEntry)
return &v
}
return nil
@@ -990,7 +980,7 @@ func insertIntoHTTPSendInterQueryCache(bctx BuiltinContext, key ast.Value, resp
obj, ok := key.(ast.Object)
if !ok {
- return fmt.Errorf("interface conversion error")
+ return errors.New("interface conversion error")
}
cachingMode, err := getCachingMode(obj)
@@ -1336,7 +1326,7 @@ func parseCacheControlHeader(headers http.Header) map[string]string {
func getResponseHeaderDate(headers http.Header) (date time.Time, err error) {
dateHeader := headers.Get("date")
if dateHeader == "" {
- err = fmt.Errorf("no date header")
+ err = errors.New("no date header")
return
}
return http.ParseTime(dateHeader)
@@ -1614,7 +1604,7 @@ type forceCacheParams struct {
func newForceCacheParams(req ast.Object) (*forceCacheParams, error) {
term := req.Get(keyCache["force_cache_duration_seconds"])
if term == nil {
- return nil, fmt.Errorf("'force_cache' set but 'force_cache_duration_seconds' parameter is missing")
+ return nil, errors.New("'force_cache' set but 'force_cache_duration_seconds' parameter is missing")
}
forceCacheDurationSeconds := term.String()
@@ -1632,7 +1622,7 @@ func getRaiseErrorValue(req ast.Object) (bool, error) {
var ok bool
if v := req.Get(keyCache["raise_error"]); v != nil {
if result, ok = v.Value.(ast.Boolean); !ok {
- return false, fmt.Errorf("invalid value for raise_error field")
+ return false, errors.New("invalid value for raise_error field")
}
}
return bool(result), nil
diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/input.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/input.go
index dccf94d89a..ec37b36451 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/topdown/input.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/input.go
@@ -5,12 +5,12 @@
package topdown
import (
- "fmt"
+ "errors"
"github.com/open-policy-agent/opa/v1/ast"
)
-var errBadPath = fmt.Errorf("bad document path")
+var errBadPath = errors.New("bad document path")
func mergeTermWithValues(exist *ast.Term, pairs [][2]*ast.Term) (*ast.Term, error) {
diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/json.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/json.go
index 5b7c414e40..aa1023d377 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/topdown/json.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/json.go
@@ -5,6 +5,7 @@
package topdown
import (
+ "errors"
"fmt"
"strconv"
"strings"
@@ -98,7 +99,7 @@ func jsonRemove(a *ast.Term, b *ast.Term) (*ast.Term, error) {
// When indexes are removed we shift left to close empty spots in the array
// as per the JSON patch spec.
newArray := ast.NewArray()
- for i := 0; i < aValue.Len(); i++ {
+ for i := range aValue.Len() {
v := aValue.Elem(i)
// recurse and add the diff of sub objects as needed
// Note: Keys in b will be strings for the index, eg path /a/1/b => {"a": {"1": {"b": null}}}
@@ -144,7 +145,7 @@ func getJSONPaths(operand ast.Value) ([]ast.Ref, error) {
switch v := operand.(type) {
case *ast.Array:
- for i := 0; i < v.Len(); i++ {
+ for i := range v.Len() {
filter, err := parsePath(v.Elem(i))
if err != nil {
return nil, err
@@ -263,7 +264,7 @@ func getPatch(o ast.Object) (jsonPatch, error) {
}
op, ok := opTerm.Value.(ast.String)
if !ok {
- return out, fmt.Errorf("attribute 'op' must be a string")
+ return out, errors.New("attribute 'op' must be a string")
}
out.op = string(op)
if _, found := validOps[out.op]; !found {
@@ -302,10 +303,10 @@ func getPatch(o ast.Object) (jsonPatch, error) {
func applyPatches(source *ast.Term, operations *ast.Array) (*ast.Term, error) {
et := edittree.NewEditTree(source)
- for i := 0; i < operations.Len(); i++ {
+ for i := range operations.Len() {
object, ok := operations.Elem(i).Value.(ast.Object)
if !ok {
- return nil, fmt.Errorf("must be an array of JSON-Patch objects, but at least one element is not an object")
+ return nil, errors.New("must be an array of JSON-Patch objects, but at least one element is not an object")
}
patch, err := getPatch(object)
if err != nil {
diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/numbers.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/numbers.go
index 855aef04b3..398040d7ae 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/topdown/numbers.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/numbers.go
@@ -5,6 +5,7 @@
package topdown
import (
+ "errors"
"fmt"
"math/big"
@@ -58,7 +59,7 @@ func builtinNumbersRangeStep(bctx BuiltinContext, operands []*ast.Term, iter fun
}
if step.Cmp(zero) <= 0 {
- return fmt.Errorf("numbers.range_step: step must be a positive number above zero")
+ return errors.New("numbers.range_step: step must be a positive number above zero")
}
ast, err := generateRange(bctx, x, y, step, "numbers.range_step")
@@ -104,7 +105,7 @@ func generateCheapRange(operands []*ast.Term, iter func(*ast.Term) error) error
}
if step <= 0 {
- return fmt.Errorf("numbers.range_step: step must be a positive number above zero")
+ return errors.New("numbers.range_step: step must be a positive number above zero")
}
terms := make([]*ast.Term, 0, y+1)
@@ -138,7 +139,7 @@ func generateRange(bctx BuiltinContext, x *big.Int, y *big.Int, step *big.Int, f
haltErr := Halt{
Err: &Error{
Code: CancelErr,
- Message: fmt.Sprintf("%s: timed out before generating all numbers in range", funcName),
+ Message: funcName + ": timed out before generating all numbers in range",
},
}
diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/object.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/object.go
index 4db8fa8272..56313b5b56 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/topdown/object.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/object.go
@@ -21,6 +21,16 @@ func builtinObjectUnion(_ BuiltinContext, operands []*ast.Term, iter func(*ast.T
return err
}
+ if objA.Len() == 0 {
+ return iter(operands[1])
+ }
+ if objB.Len() == 0 {
+ return iter(operands[0])
+ }
+ if objA.Compare(objB) == 0 {
+ return iter(operands[0])
+ }
+
r := mergeWithOverwrite(objA, objB)
return iter(ast.NewTerm(r))
diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/parse_bytes.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/parse_bytes.go
index dcc8e21997..cd36b87b17 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/topdown/parse_bytes.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/parse_bytes.go
@@ -109,7 +109,7 @@ func builtinNumBytes(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term
func formatString(s ast.String) string {
str := string(s)
lower := strings.ToLower(str)
- return strings.Replace(lower, "\"", "", -1)
+ return strings.ReplaceAll(lower, "\"", "")
}
// Splits the string into a number string à la "10" or "10.2" and a unit
diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/parse_units.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/parse_units.go
index 47e459510a..44aec86299 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/topdown/parse_units.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/parse_units.go
@@ -50,7 +50,7 @@ func builtinUnits(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) e
// We remove escaped quotes from strings here to retain parity with units.parse_bytes.
s := string(raw)
- s = strings.Replace(s, "\"", "", -1)
+ s = strings.ReplaceAll(s, "\"", "")
if strings.Contains(s, " ") {
return errIncludesSpaces
diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/query.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/query.go
index a008517cca..28af1e4bd4 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/topdown/query.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/query.go
@@ -62,6 +62,7 @@ type Query struct {
printHook print.Hook
tracingOpts tracing.Options
virtualCache VirtualCache
+ baseCache BaseCache
}
// Builtin represents a built-in function that queries can call.
@@ -314,6 +315,13 @@ func (q *Query) WithVirtualCache(vc VirtualCache) *Query {
return q
}
+// WithBaseCache sets the BaseCache to use during evaluation. This is
+// optional, and if not set, the default cache is used.
+func (q *Query) WithBaseCache(bc BaseCache) *Query {
+ q.baseCache = bc
+ return q
+}
+
// WithNondeterministicBuiltins causes non-deterministic builtins to be evalued
// during partial evaluation. This is needed to pull in external data, or validate
// a JWT, during PE, so that the result informs what queries are returned.
@@ -353,6 +361,13 @@ func (q *Query) PartialRun(ctx context.Context) (partials []ast.Body, support []
vc = NewVirtualCache()
}
+ var bc BaseCache
+ if q.baseCache != nil {
+ bc = q.baseCache
+ } else {
+ bc = newBaseCache()
+ }
+
e := &eval{
ctx: ctx,
metrics: q.metrics,
@@ -366,7 +381,7 @@ func (q *Query) PartialRun(ctx context.Context) (partials []ast.Body, support []
bindings: b,
compiler: q.compiler,
store: q.store,
- baseCache: newBaseCache(),
+ baseCache: bc,
targetStack: newRefStack(),
txn: q.txn,
input: q.input,
@@ -544,6 +559,13 @@ func (q *Query) Iter(ctx context.Context, iter func(QueryResult) error) error {
vc = NewVirtualCache()
}
+ var bc BaseCache
+ if q.baseCache != nil {
+ bc = q.baseCache
+ } else {
+ bc = newBaseCache()
+ }
+
e := &eval{
ctx: ctx,
metrics: q.metrics,
@@ -557,7 +579,7 @@ func (q *Query) Iter(ctx context.Context, iter func(QueryResult) error) error {
bindings: newBindings(0, q.instr),
compiler: q.compiler,
store: q.store,
- baseCache: newBaseCache(),
+ baseCache: bc,
targetStack: newRefStack(),
txn: q.txn,
input: q.input,
diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/regex.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/regex.go
index 6c1f6794cc..2c434dda87 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/topdown/regex.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/regex.go
@@ -260,6 +260,9 @@ func builtinRegexReplace(bctx BuiltinContext, operands []*ast.Term, iter func(*a
}
res := re.ReplaceAllString(string(base), string(value))
+ if res == string(base) {
+ return iter(operands[0])
+ }
return iter(ast.StringTerm(res))
}
diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/regex_template.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/regex_template.go
index 4bcddc060b..a1d946fd59 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/topdown/regex_template.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/regex_template.go
@@ -45,7 +45,7 @@ import (
func delimiterIndices(s string, delimiterStart, delimiterEnd byte) ([]int, error) {
var level, idx int
idxs := make([]int, 0)
- for i := 0; i < len(s); i++ {
+ for i := range len(s) {
switch s[i] {
case delimiterStart:
if level++; level == 1 {
diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/resolver.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/resolver.go
index 170e6e6402..3620168874 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/topdown/resolver.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/resolver.go
@@ -48,7 +48,11 @@ func (t *resolverTrie) Resolve(e *eval, ref ast.Ref) (ast.Value, error) {
Input: e.input,
Metrics: e.metrics,
}
- e.traceWasm(e.query[e.index], &in.Ref)
+ if e.traceEnabled {
+ // avoid leaking pointer if trace is disabled
+ cpy := in.Ref
+ e.traceWasm(e.query[e.index], &cpy)
+ }
if e.data != nil {
return nil, errInScopeWithStmt
}
@@ -75,7 +79,10 @@ func (t *resolverTrie) Resolve(e *eval, ref ast.Ref) (ast.Value, error) {
func (t *resolverTrie) mktree(e *eval, in resolver.Input) (ast.Value, error) {
if t.r != nil {
- e.traceWasm(e.query[e.index], &in.Ref)
+ if e.traceEnabled {
+ cpy := in.Ref
+ e.traceWasm(e.query[e.index], &cpy)
+ }
if e.data != nil {
return nil, errInScopeWithStmt
}
diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/runtime.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/runtime.go
index 9323225832..dc72fc5818 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/topdown/runtime.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/runtime.go
@@ -5,6 +5,7 @@
package topdown
import (
+ "errors"
"fmt"
"github.com/open-policy-agent/opa/v1/ast"
@@ -114,7 +115,7 @@ func removeCryptoKeys(x interface{}) error {
func removeKey(x interface{}, keys ...string) error {
val, ok := x.(map[string]interface{})
if !ok {
- return fmt.Errorf("type assertion error")
+ return errors.New("type assertion error")
}
for _, key := range keys {
diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/sets.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/sets.go
index b7566b8e6e..9df2d328a0 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/topdown/sets.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/sets.go
@@ -9,7 +9,7 @@ import (
"github.com/open-policy-agent/opa/v1/topdown/builtins"
)
-// Deprecated in v0.4.2 in favour of minus/infix "-" operation.
+// Deprecated: deprecated in v0.4.2 in favour of minus/infix "-" operation.
func builtinSetDiff(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
s1, err := builtins.SetOperand(operands[0].Value, 1)
diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/strings.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/strings.go
index 929a18ea0a..654428c19a 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/topdown/strings.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/strings.go
@@ -5,6 +5,7 @@
package topdown
import (
+ "errors"
"fmt"
"math/big"
"sort"
@@ -103,11 +104,11 @@ func anyStartsWithAny(strs []string, prefixes []string) bool {
}
trie := patricia.NewTrie()
- for i := 0; i < len(strs); i++ {
+ for i := range strs {
trie.Insert([]byte(strs[i]), true)
}
- for i := 0; i < len(prefixes); i++ {
+ for i := range prefixes {
if trie.MatchSubtree([]byte(prefixes[i])) {
return true
}
@@ -160,7 +161,7 @@ func builtinConcat(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term)
switch b := operands[1].Value.(type) {
case *ast.Array:
var l int
- for i := 0; i < b.Len(); i++ {
+ for i := range b.Len() {
s, ok := b.Elem(i).Value.(ast.String)
if !ok {
return builtins.NewOperandElementErr(2, operands[1].Value, b.Elem(i).Value, "string")
@@ -173,14 +174,14 @@ func builtinConcat(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term)
}
strs = make([]string, 0, l)
- for i := 0; i < b.Len(); i++ {
+ for i := range b.Len() {
strs = append(strs, string(b.Elem(i).Value.(ast.String)))
}
case ast.Set:
var l int
terms := b.Slice()
- for i := 0; i < len(terms); i++ {
+ for i := range terms {
s, ok := terms[i].Value.(ast.String)
if !ok {
return builtins.NewOperandElementErr(2, operands[1].Value, terms[i].Value, "string")
@@ -193,7 +194,7 @@ func builtinConcat(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term)
}
strs = make([]string, 0, l)
- for i := 0; i < b.Len(); i++ {
+ for i := range b.Len() {
strs = append(strs, string(terms[i].Value.(ast.String)))
}
@@ -227,10 +228,13 @@ func builtinIndexOf(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term)
return err
}
if len(string(search)) == 0 {
- return fmt.Errorf("empty search character")
+ return errors.New("empty search character")
}
if isASCII(string(base)) && isASCII(string(search)) {
+ // this is a false positive in the indexAlloc rule that thinks
+ // we're converting byte arrays to strings
+ //nolint:gocritic
return iter(ast.InternedIntNumberTerm(strings.Index(string(base), string(search))))
}
@@ -262,7 +266,7 @@ func builtinIndexOfN(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term
return err
}
if len(string(search)) == 0 {
- return fmt.Errorf("empty search character")
+ return errors.New("empty search character")
}
baseRunes := []rune(string(base))
@@ -301,7 +305,7 @@ func builtinSubstring(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Ter
}
if startIndex < 0 {
- return fmt.Errorf("negative offset")
+ return errors.New("negative offset")
}
sbase := string(base)
@@ -320,6 +324,10 @@ func builtinSubstring(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Ter
return iter(ast.StringTerm(sbase[startIndex:]))
}
+ if startIndex == 0 && length >= len(sbase) {
+ return iter(operands[0])
+ }
+
upto := startIndex + length
if len(sbase) < upto {
upto = len(sbase)
@@ -327,6 +335,10 @@ func builtinSubstring(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Ter
return iter(ast.StringTerm(sbase[startIndex:upto]))
}
+ if startIndex == 0 && length >= utf8.RuneCountInString(sbase) {
+ return iter(operands[0])
+ }
+
runes := []rune(base)
if startIndex >= len(runes) {
@@ -348,7 +360,7 @@ func builtinSubstring(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Ter
}
func isASCII(s string) bool {
- for i := 0; i < len(s); i++ {
+ for i := range len(s) {
if s[i] > unicode.MaxASCII {
return false
}
@@ -474,7 +486,7 @@ func builtinReplace(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term)
return err
}
- replaced := strings.Replace(string(s), string(old), string(n), -1)
+ replaced := strings.ReplaceAll(string(s), string(old), string(n))
if replaced == string(s) {
return iter(operands[0])
}
@@ -637,7 +649,7 @@ func builtinSprintf(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term)
if s == "%d" && astArr.Len() == 1 {
if n, ok := astArr.Elem(0).Value.(ast.Number); ok {
if i, ok := n.Int(); ok {
- return iter(ast.StringTerm(strconv.Itoa(i)))
+ return iter(ast.InternedStringTerm(strconv.Itoa(i)))
}
}
}
diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/test.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/test.go
new file mode 100644
index 0000000000..02958d2264
--- /dev/null
+++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/test.go
@@ -0,0 +1,30 @@
+// Copyright 2025 The OPA Authors. All rights reserved.
+// Use of this source code is governed by an Apache2
+// license that can be found in the LICENSE file.
+
+package topdown
+
+import "github.com/open-policy-agent/opa/v1/ast"
+
+const TestCaseOp Op = "TestCase"
+
+func builtinTestCase(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
+ e := &Event{
+ Op: TestCaseOp,
+ QueryID: bctx.QueryID,
+ Node: ast.NewExpr([]*ast.Term{
+ ast.NewTerm(ast.InternalTestCase.Ref()),
+ ast.NewTerm(operands[0].Value),
+ }),
+ }
+
+ for _, tracer := range bctx.QueryTracers {
+ tracer.TraceEvent(*e)
+ }
+
+ return iter(ast.BooleanTerm(true))
+}
+
+func init() {
+ RegisterBuiltinFunc(ast.InternalTestCase.Name, builtinTestCase)
+}
diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/time.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/time.go
index 1c5ddaa6f4..8d2d9b27a2 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/topdown/time.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/time.go
@@ -6,7 +6,7 @@ package topdown
import (
"encoding/json"
- "fmt"
+ "errors"
"math"
"math/big"
"strconv"
@@ -29,7 +29,7 @@ var maxDateAllowedForNsConversion = time.Unix(0, math.MaxInt64)
func toSafeUnixNano(t time.Time, iter func(*ast.Term) error) error {
if t.Before(minDateAllowedForNsConversion) || t.After(maxDateAllowedForNsConversion) {
- return fmt.Errorf("time outside of valid range")
+ return errors.New("time outside of valid range")
}
return iter(ast.NewTerm(ast.Number(int64ToJSONNumber(t.UnixNano()))))
@@ -313,7 +313,7 @@ func tzTime(a ast.Value) (t time.Time, lay string, err error) {
f := builtins.NumberToFloat(value)
i64, acc := f.Int64()
if acc != big.Exact {
- return time.Time{}, layout, fmt.Errorf("timestamp too big")
+ return time.Time{}, layout, errors.New("timestamp too big")
}
t = time.Unix(0, i64).In(loc)
diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/tokens.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/tokens.go
index b44c5a253d..2050e82d63 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/topdown/tokens.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/tokens.go
@@ -227,7 +227,7 @@ func builtinJWTVerifyRSA(bctx BuiltinContext, jwt ast.Value, keyStr ast.Value, h
return builtinJWTVerify(bctx, jwt, keyStr, hasher, func(publicKey interface{}, digest []byte, signature []byte) error {
publicKeyRsa, ok := publicKey.(*rsa.PublicKey)
if !ok {
- return fmt.Errorf("incorrect public key type")
+ return errors.New("incorrect public key type")
}
return verify(publicKeyRsa, digest, signature)
})
@@ -268,7 +268,7 @@ func verifyES(publicKey interface{}, digest []byte, signature []byte) (err error
}()
publicKeyEcdsa, ok := publicKey.(*ecdsa.PublicKey)
if !ok {
- return fmt.Errorf("incorrect public key type")
+ return errors.New("incorrect public key type")
}
r, s := &big.Int{}, &big.Int{}
n := len(signature) / 2
@@ -277,7 +277,7 @@ func verifyES(publicKey interface{}, digest []byte, signature []byte) (err error
if ecdsa.Verify(publicKeyEcdsa, digest, r, s) {
return nil
}
- return fmt.Errorf("ECDSA signature verification error")
+ return errors.New("ECDSA signature verification error")
}
type verificationKey struct {
@@ -292,7 +292,7 @@ type verificationKey struct {
func getKeysFromCertOrJWK(certificate string) ([]verificationKey, error) {
if block, rest := pem.Decode([]byte(certificate)); block != nil {
if len(rest) > 0 {
- return nil, fmt.Errorf("extra data after a PEM certificate block")
+ return nil, errors.New("extra data after a PEM certificate block")
}
if block.Type == blockTypeCertificate {
@@ -312,7 +312,7 @@ func getKeysFromCertOrJWK(certificate string) ([]verificationKey, error) {
return []verificationKey{{key: key}}, nil
}
- return nil, fmt.Errorf("failed to extract a Key from the PEM certificate")
+ return nil, errors.New("failed to extract a Key from the PEM certificate")
}
jwks, err := jwk.ParseString(certificate)
@@ -533,7 +533,7 @@ var tokenConstraintTypes = map[string]tokenConstraintHandler{
func tokenConstraintCert(value ast.Value, constraints *tokenConstraints) error {
s, ok := value.(ast.String)
if !ok {
- return fmt.Errorf("cert constraint: must be a string")
+ return errors.New("cert constraint: must be a string")
}
keys, err := getKeysFromCertOrJWK(string(s))
@@ -558,14 +558,14 @@ func tokenConstraintTime(value ast.Value, constraints *tokenConstraints) error {
func timeFromValue(value ast.Value) (float64, error) {
time, ok := value.(ast.Number)
if !ok {
- return 0, fmt.Errorf("token time constraint: must be a number")
+ return 0, errors.New("token time constraint: must be a number")
}
timeFloat, ok := time.Float64()
if !ok {
- return 0, fmt.Errorf("token time constraint: unvalid float64")
+ return 0, errors.New("token time constraint: unvalid float64")
}
if timeFloat < 0 {
- return 0, fmt.Errorf("token time constraint: must not be negative")
+ return 0, errors.New("token time constraint: must not be negative")
}
return timeFloat, nil
}
@@ -616,10 +616,10 @@ func (constraints *tokenConstraints) validate() error {
keys++
}
if keys > 1 {
- return fmt.Errorf("duplicate key constraints")
+ return errors.New("duplicate key constraints")
}
if keys < 1 {
- return fmt.Errorf("no key constraint")
+ return errors.New("no key constraint")
}
return nil
}
@@ -733,7 +733,7 @@ var errSignatureNotVerified = errors.New("signature not verified")
func verifyHMAC(key interface{}, hash crypto.Hash, payload []byte, signature []byte) error {
macKey, ok := key.([]byte)
if !ok {
- return fmt.Errorf("incorrect symmetric key type")
+ return errors.New("incorrect symmetric key type")
}
mac := hmac.New(hash.New, macKey)
if _, err := mac.Write(payload); err != nil {
@@ -756,7 +756,7 @@ func verifyAsymmetric(verify tokenVerifyAsymmetricFunction) tokenVerifyFunction
func verifyRSAPKCS(key interface{}, hash crypto.Hash, digest []byte, signature []byte) error {
publicKeyRsa, ok := key.(*rsa.PublicKey)
if !ok {
- return fmt.Errorf("incorrect public key type")
+ return errors.New("incorrect public key type")
}
if err := rsa.VerifyPKCS1v15(publicKeyRsa, hash, digest, signature); err != nil {
return errSignatureNotVerified
@@ -767,7 +767,7 @@ func verifyRSAPKCS(key interface{}, hash crypto.Hash, digest []byte, signature [
func verifyRSAPSS(key interface{}, hash crypto.Hash, digest []byte, signature []byte) error {
publicKeyRsa, ok := key.(*rsa.PublicKey)
if !ok {
- return fmt.Errorf("incorrect public key type")
+ return errors.New("incorrect public key type")
}
if err := rsa.VerifyPSS(publicKeyRsa, hash, digest, signature, nil); err != nil {
return errSignatureNotVerified
@@ -783,7 +783,7 @@ func verifyECDSA(key interface{}, _ crypto.Hash, digest []byte, signature []byte
}()
publicKeyEcdsa, ok := key.(*ecdsa.PublicKey)
if !ok {
- return fmt.Errorf("incorrect public key type")
+ return errors.New("incorrect public key type")
}
r, s := &big.Int{}, &big.Int{}
n := len(signature) / 2
@@ -832,19 +832,19 @@ var tokenHeaderTypes = map[string]tokenHeaderHandler{
func tokenHeaderCrit(header *tokenHeader, value ast.Value) error {
v, ok := value.(*ast.Array)
if !ok {
- return fmt.Errorf("crit: must be a list")
+ return errors.New("crit: must be a list")
}
header.crit = map[string]bool{}
_ = v.Iter(func(elem *ast.Term) error {
tv, ok := elem.Value.(ast.String)
if !ok {
- return fmt.Errorf("crit: must be a list of strings")
+ return errors.New("crit: must be a list of strings")
}
header.crit[string(tv)] = true
return nil
})
if len(header.crit) == 0 {
- return fmt.Errorf("crit: must be a nonempty list") // 'MUST NOT' use the empty list
+ return errors.New("crit: must be a nonempty list") // 'MUST NOT' use the empty list
}
return nil
}
@@ -903,7 +903,7 @@ func commonBuiltinJWTEncodeSign(bctx BuiltinContext, inputHeaders, jwsPayload, j
return err
}
if jwk.GetKeyTypeFromKey(key) != keys.Keys[0].GetKeyType() {
- return fmt.Errorf("JWK derived key type and keyType parameter do not match")
+ return errors.New("JWK derived key type and keyType parameter do not match")
}
standardHeaders := &jws.StandardHeaders{}
@@ -914,11 +914,11 @@ func commonBuiltinJWTEncodeSign(bctx BuiltinContext, inputHeaders, jwsPayload, j
}
alg := standardHeaders.GetAlgorithm()
if alg == jwa.Unsupported {
- return fmt.Errorf("unknown signature algorithm")
+ return errors.New("unknown signature algorithm")
}
if (standardHeaders.Type == "" || standardHeaders.Type == headerJwt) && !json.Valid([]byte(jwsPayload)) {
- return fmt.Errorf("type is JWT but payload is not JSON")
+ return errors.New("type is JWT but payload is not JSON")
}
// process payload and sign
@@ -1105,7 +1105,7 @@ func builtinJWTDecodeVerify(bctx BuiltinContext, operands []*ast.Term, iter func
}
// RFC7159 7.2 #8 and 5.2 cty
- if strings.ToUpper(header.cty) == headerJwt {
+ if strings.EqualFold(header.cty, headerJwt) {
// Nested JWT, go round again with payload as first argument
a = p.Value
continue
@@ -1149,28 +1149,28 @@ func builtinJWTDecodeVerify(bctx BuiltinContext, operands []*ast.Term, iter func
}
// RFC7159 4.1.4 exp
if exp := payload.Get(jwtExpKey); exp != nil {
- switch exp.Value.(type) {
+ switch v := exp.Value.(type) {
case ast.Number:
// constraints.time is in nanoseconds but exp Value is in seconds
compareTime := ast.FloatNumberTerm(constraints.time / 1000000000)
- if ast.Compare(compareTime, exp.Value.(ast.Number)) != -1 {
+ if ast.Compare(compareTime, v) != -1 {
return iter(unverified)
}
default:
- return fmt.Errorf("exp value must be a number")
+ return errors.New("exp value must be a number")
}
}
// RFC7159 4.1.5 nbf
if nbf := payload.Get(jwtNbfKey); nbf != nil {
- switch nbf.Value.(type) {
+ switch v := nbf.Value.(type) {
case ast.Number:
// constraints.time is in nanoseconds but nbf Value is in seconds
compareTime := ast.FloatNumberTerm(constraints.time / 1000000000)
- if ast.Compare(compareTime, nbf.Value.(ast.Number)) == -1 {
+ if ast.Compare(compareTime, v) == -1 {
return iter(unverified)
}
default:
- return fmt.Errorf("nbf value must be a number")
+ return errors.New("nbf value must be a number")
}
}
diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/trace.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/trace.go
index 1c45ef23ba..85143bf711 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/topdown/trace.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/trace.go
@@ -373,10 +373,6 @@ func exprLocalVars(e *Event) *ast.ValueMap {
vars := ast.NewValueMap()
findVars := func(term *ast.Term) bool {
- //if r, ok := term.Value.(ast.Ref); ok {
- // fmt.Printf("ref: %v\n", r)
- // //return true
- //}
if name, ok := term.Value.(ast.Var); ok {
if meta, ok := e.LocalMetadata[name]; ok {
if val := e.Locals.Get(name); val != nil {
@@ -867,7 +863,7 @@ func printArrows(w *bytes.Buffer, l []varInfo, printValueAt int) {
spaces = (col - prevCol) - 1
}
- for j := 0; j < spaces; j++ {
+ for j := range spaces {
tab := false
for _, t := range info.exprLoc.Tabs {
if t == j+prevCol+1 {
diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/type_name.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/type_name.go
index fc3de48793..a611e8f30e 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/topdown/type_name.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/type_name.go
@@ -5,7 +5,7 @@
package topdown
import (
- "fmt"
+ "errors"
"github.com/open-policy-agent/opa/v1/ast"
)
@@ -38,7 +38,7 @@ func builtinTypeName(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term
return iter(setStringTerm)
}
- return fmt.Errorf("illegal value")
+ return errors.New("illegal value")
}
func init() {
diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/walk.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/walk.go
index f5dcf5c9f1..43aa29c97a 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/topdown/walk.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/walk.go
@@ -50,7 +50,7 @@ func walk(filter, path *ast.Array, input *ast.Term, iter func(*ast.Term) error)
switch v := input.Value.(type) {
case *ast.Array:
- for i := 0; i < v.Len(); i++ {
+ for i := range v.Len() {
if err := walk(filter, pathAppend(path, ast.InternedIntNumberTerm(i)), v.Elem(i), iter); err != nil {
return err
}
@@ -94,7 +94,7 @@ func walkNoPath(input *ast.Term, iter func(*ast.Term) error) error {
}
}
case *ast.Array:
- for i := 0; i < v.Len(); i++ {
+ for i := range v.Len() {
inputArray.Set(1, v.Elem(i))
if err := walkNoPath(input, iter); err != nil {
return err
diff --git a/vendor/github.com/open-policy-agent/opa/v1/types/decode.go b/vendor/github.com/open-policy-agent/opa/v1/types/decode.go
index 3fcc01664c..e3e1e98370 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/types/decode.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/types/decode.go
@@ -31,13 +31,13 @@ func Unmarshal(bs []byte) (result Type, err error) {
if err = util.UnmarshalJSON(bs, &hint); err == nil {
switch hint.Type {
case typeNull:
- result = NewNull()
+ result = Nl
case typeBoolean:
- result = NewBoolean()
+ result = B
case typeNumber:
- result = NewNumber()
+ result = N
case typeString:
- result = NewString()
+ result = S
case typeArray:
var arr rawarray
if err = util.UnmarshalJSON(bs, &arr); err == nil {
diff --git a/vendor/github.com/open-policy-agent/opa/v1/types/types.go b/vendor/github.com/open-policy-agent/opa/v1/types/types.go
index 0705210875..1bf4d6aed0 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/types/types.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/types/types.go
@@ -8,7 +8,9 @@ package types
import (
"encoding/json"
+ "errors"
"fmt"
+ "slices"
"sort"
"strings"
@@ -48,6 +50,8 @@ func NewNull() Null {
return Null{}
}
+var Nl Type = NewNull()
+
// NamedType represents a type alias with an arbitrary name and description.
// This is useful for generating documentation for built-in functions.
type NamedType struct {
@@ -113,7 +117,7 @@ func (t Null) String() string {
type Boolean struct{}
// B represents an instance of the boolean type.
-var B = NewBoolean()
+var B Type = NewBoolean()
// NewBoolean returns a new Boolean type.
func NewBoolean() Boolean {
@@ -136,7 +140,7 @@ func (t Boolean) String() string {
type String struct{}
// S represents an instance of the string type.
-var S = NewString()
+var S Type = NewString()
// NewString returns a new String type.
func NewString() String {
@@ -158,7 +162,7 @@ func (String) String() string {
type Number struct{}
// N represents an instance of the number type.
-var N = NewNumber()
+var N Type = NewNumber()
// NewNumber returns a new Number type.
func NewNumber() Number {
@@ -252,6 +256,13 @@ type Set struct {
of Type
}
+// Boxed set types.
+var (
+ SetOfAny Type = NewSet(A)
+ SetOfStr Type = NewSet(S)
+ SetOfNum Type = NewSet(N)
+)
+
// NewSet returns a new Set type.
func NewSet(of Type) *Set {
return &Set{
@@ -339,9 +350,8 @@ type Object struct {
// NewObject returns a new Object type.
func NewObject(static []*StaticProperty, dynamic *DynamicProperty) *Object {
- sort.Slice(static, func(i, j int) bool {
- cmp := util.Compare(static[i].Key, static[j].Key)
- return cmp == -1
+ slices.SortFunc(static, func(a, b *StaticProperty) int {
+ return util.Compare(a.Key, b.Key)
})
return &Object{
static: static,
@@ -504,7 +514,7 @@ func mergeObjects(a, b *Object) *Object {
type Any []Type
// A represents the superset of all types.
-var A = NewAny()
+var A Type = NewAny()
// NewAny returns a new Any type.
func NewAny(of ...Type) Any {
@@ -768,7 +778,7 @@ func (t *Function) UnmarshalJSON(bs []byte) error {
f, ok := tpe.(*Function)
if !ok {
- return fmt.Errorf("invalid type")
+ return errors.New("invalid type")
}
*t = *f
@@ -850,7 +860,7 @@ func Compare(a, b Type) int {
} else if x < y {
return -1
}
- switch a.(type) {
+ switch a.(type) { //nolint:gocritic
case nil, Null, Boolean, Number, String:
return 0
case *Array:
@@ -892,7 +902,7 @@ func Compare(a, b Type) int {
minLen = lenStaticB
}
- for i := 0; i < minLen; i++ {
+ for i := range minLen {
if cmp := util.Compare(objA.static[i].Key, objB.static[i].Key); cmp != 0 {
return cmp
}
@@ -931,7 +941,7 @@ func Compare(a, b Type) int {
} else if len(fA.args) > len(fB.args) {
return 1
}
- for i := 0; i < len(fA.args); i++ {
+ for i := range len(fA.args) {
if cmp := Compare(fA.args[i], fB.args[i]); cmp != 0 {
return cmp
}
@@ -1129,7 +1139,7 @@ func Nil(a Type) bool {
func TypeOf(x interface{}) Type {
switch x := x.(type) {
case nil:
- return NewNull()
+ return Nl
case bool:
return B
case string:
@@ -1164,7 +1174,7 @@ func TypeOf(x interface{}) Type {
type typeSlice []Type
func (s typeSlice) Less(i, j int) bool { return Compare(s[i], s[j]) < 0 }
-func (s typeSlice) Swap(i, j int) { x := s[i]; s[i] = s[j]; s[j] = x }
+func (s typeSlice) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
func (s typeSlice) Len() int { return len(s) }
func typeSliceCompare(a, b []Type) int {
@@ -1172,7 +1182,7 @@ func typeSliceCompare(a, b []Type) int {
if len(b) < minLen {
minLen = len(b)
}
- for i := 0; i < minLen; i++ {
+ for i := range minLen {
if cmp := Compare(a[i], b[i]); cmp != 0 {
return cmp
}
diff --git a/vendor/github.com/open-policy-agent/opa/v1/util/backoff.go b/vendor/github.com/open-policy-agent/opa/v1/util/backoff.go
index 36d57f14e2..1558f0cff8 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/util/backoff.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/util/backoff.go
@@ -9,17 +9,6 @@ import (
"time"
)
-func init() {
- // NOTE(sr): We don't need good random numbers here; it's used for jittering
- // the backup timing a bit. But anyways, let's make it random enough; without
- // a call to rand.Seed() we'd get the same stream of numbers for each program
- // run. (Or not, if some other packages happens to seed the global randomness
- // source.)
- // Note(philipc): rand.Seed() was deprecated in Go 1.20, so we've switched to
- // using the recommended rand.New(rand.NewSource(seed)) style.
- rand.New(rand.NewSource(time.Now().UnixNano()))
-}
-
// DefaultBackoff returns a delay with an exponential backoff based on the
// number of retries.
func DefaultBackoff(base, maxNS float64, retries int) time.Duration {
diff --git a/vendor/github.com/open-policy-agent/opa/v1/util/compare.go b/vendor/github.com/open-policy-agent/opa/v1/util/compare.go
index 8775a603dd..2569375b19 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/util/compare.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/util/compare.go
@@ -82,7 +82,7 @@ func Compare(a, b interface{}) int {
if bLen < minLen {
minLen = bLen
}
- for i := 0; i < minLen; i++ {
+ for i := range minLen {
cmp := Compare(a[i], b[i])
if cmp != 0 {
return cmp
@@ -106,7 +106,7 @@ func Compare(a, b interface{}) int {
if bLen < minLen {
minLen = bLen
}
- for i := 0; i < minLen; i++ {
+ for i := range minLen {
if aKeys[i] < bKeys[i] {
return -1
} else if bKeys[i] < aKeys[i] {
diff --git a/vendor/github.com/open-policy-agent/opa/v1/util/hashmap.go b/vendor/github.com/open-policy-agent/opa/v1/util/hashmap.go
index a6c584c570..cf6a385f41 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/util/hashmap.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/util/hashmap.go
@@ -12,6 +12,10 @@ import (
// T is a concise way to refer to T.
type T interface{}
+type Hasher interface {
+ Hash() int
+}
+
type hashEntry[K any, V any] struct {
k K
v V
@@ -177,3 +181,91 @@ func (h *TypedHashMap[K, V]) Update(other *TypedHashMap[K, V]) *TypedHashMap[K,
})
return updated
}
+
+type hasherEntry[K Hasher, V any] struct {
+ k K
+ v V
+ next *hasherEntry[K, V]
+}
+
+// HasherMap represents a simpler version of TypedHashMap that uses Hasher's
+// for keys, and requires only an equality function for keys. Ideally we'd have
+// and Equal method for all key types too, and we could get rid of that requirement.
+type HasherMap[K Hasher, V any] struct {
+ keq func(K, K) bool
+ table map[int]*hasherEntry[K, V]
+ size int
+}
+
+// NewHasherMap returns a new empty HasherMap.
+func NewHasherMap[K Hasher, V any](keq func(K, K) bool) *HasherMap[K, V] {
+ return &HasherMap[K, V]{
+ keq: keq,
+ table: make(map[int]*hasherEntry[K, V]),
+ size: 0,
+ }
+}
+
+// Get returns the value for k.
+func (h *HasherMap[K, V]) Get(k K) (V, bool) {
+ for entry := h.table[k.Hash()]; entry != nil; entry = entry.next {
+ if h.keq(entry.k, k) {
+ return entry.v, true
+ }
+ }
+ var zero V
+ return zero, false
+}
+
+// Put inserts a key/value pair into this HashMap. If the key is already present, the existing
+// value is overwritten.
+func (h *HasherMap[K, V]) Put(k K, v V) {
+ hash := k.Hash()
+ head := h.table[hash]
+ for entry := head; entry != nil; entry = entry.next {
+ if h.keq(entry.k, k) {
+ entry.v = v
+ return
+ }
+ }
+ h.table[hash] = &hasherEntry[K, V]{k: k, v: v, next: head}
+ h.size++
+}
+
+// Delete removes the key k.
+func (h *HasherMap[K, V]) Delete(k K) {
+ hash := k.Hash()
+ var prev *hasherEntry[K, V]
+ for entry := h.table[hash]; entry != nil; entry = entry.next {
+ if h.keq(entry.k, k) {
+ if prev != nil {
+ prev.next = entry.next
+ } else {
+ h.table[hash] = entry.next
+ }
+ h.size--
+ return
+ }
+ prev = entry
+ }
+}
+
+// Iter invokes the iter function for each element in the HasherMap.
+// If the iter function returns true, iteration stops and the return value is true.
+// If the iter function never returns true, iteration proceeds through all elements
+// and the return value is false.
+func (h *HasherMap[K, V]) Iter(iter func(K, V) bool) bool {
+ for _, entry := range h.table {
+ for ; entry != nil; entry = entry.next {
+ if iter(entry.k, entry.v) {
+ return true
+ }
+ }
+ }
+ return false
+}
+
+// Len returns the current size of this HashMap.
+func (h *HasherMap[K, V]) Len() int {
+ return h.size
+}
diff --git a/vendor/github.com/open-policy-agent/opa/v1/util/performance.go b/vendor/github.com/open-policy-agent/opa/v1/util/performance.go
index b24b49a1d7..467fe766bb 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/util/performance.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/util/performance.go
@@ -21,7 +21,7 @@ func NewPtrSlice[T any](n int) []*T {
func GrowPtrSlice[T any](s []*T, n int) []*T {
s = slices.Grow(s, n)
p := make([]T, n)
- for i := 0; i < n; i++ {
+ for i := range n {
s = append(s, &p[i])
}
return s
diff --git a/vendor/github.com/open-policy-agent/opa/v1/util/read_gzip_body.go b/vendor/github.com/open-policy-agent/opa/v1/util/read_gzip_body.go
index 74bca7263a..b979d0bd0f 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/util/read_gzip_body.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/util/read_gzip_body.go
@@ -4,7 +4,7 @@ import (
"bytes"
"compress/gzip"
"encoding/binary"
- "fmt"
+ "errors"
"io"
"net/http"
"strings"
@@ -58,7 +58,7 @@ func ReadMaybeCompressedBody(r *http.Request) ([]byte, error) {
// earlier in DecodingLimitHandler.
sizeTrailerField := binary.LittleEndian.Uint32(content.Bytes()[content.Len()-4:])
if sizeTrailerField > uint32(gzipMaxLength) {
- return content.Bytes(), fmt.Errorf("gzip payload too large")
+ return content.Bytes(), errors.New("gzip payload too large")
}
// Pull a gzip decompressor from the pool, and assign it to the current
// buffer, using Reset(). Later, return it back to the pool for another
diff --git a/vendor/github.com/open-policy-agent/opa/v1/util/wait.go b/vendor/github.com/open-policy-agent/opa/v1/util/wait.go
index b70ab6fcf9..b1ea84fd53 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/util/wait.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/util/wait.go
@@ -5,7 +5,7 @@
package util
import (
- "fmt"
+ "errors"
"time"
)
@@ -24,7 +24,7 @@ func WaitFunc(fun func() bool, interval, timeout time.Duration) error {
for {
select {
case <-timer.C:
- return fmt.Errorf("timeout")
+ return errors.New("timeout")
case <-ticker.C:
if fun() {
return nil
diff --git a/vendor/github.com/open-policy-agent/opa/v1/version/version.go b/vendor/github.com/open-policy-agent/opa/v1/version/version.go
index 0af828f888..e6d052d3be 100644
--- a/vendor/github.com/open-policy-agent/opa/v1/version/version.go
+++ b/vendor/github.com/open-policy-agent/opa/v1/version/version.go
@@ -11,7 +11,7 @@ import (
)
// Version is the canonical version of OPA.
-var Version = "1.1.0"
+var Version = "1.2.0"
// GoVersion is the version of Go this was built with
var GoVersion = runtime.Version()
@@ -44,6 +44,6 @@ func init() {
}
}
if dirty {
- Vcs = Vcs + "-dirty"
+ Vcs += "-dirty"
}
}
diff --git a/vendor/github.com/spf13/cobra/README.md b/vendor/github.com/spf13/cobra/README.md
index 6444f4b7f6..71757151c3 100644
--- a/vendor/github.com/spf13/cobra/README.md
+++ b/vendor/github.com/spf13/cobra/README.md
@@ -1,4 +1,5 @@
-
+
+
Cobra is a library for creating powerful modern CLI applications.
@@ -105,7 +106,7 @@ go install github.com/spf13/cobra-cli@latest
For complete details on using the Cobra-CLI generator, please read [The Cobra Generator README](https://github.com/spf13/cobra-cli/blob/main/README.md)
-For complete details on using the Cobra library, please read the [The Cobra User Guide](site/content/user_guide.md).
+For complete details on using the Cobra library, please read [The Cobra User Guide](site/content/user_guide.md).
# License
diff --git a/vendor/github.com/spf13/cobra/active_help.go b/vendor/github.com/spf13/cobra/active_help.go
index 25c30e3ccc..b3e2dadfed 100644
--- a/vendor/github.com/spf13/cobra/active_help.go
+++ b/vendor/github.com/spf13/cobra/active_help.go
@@ -35,7 +35,7 @@ const (
// This function can be called multiple times before and/or after completions are added to
// the array. Each time this function is called with the same array, the new
// ActiveHelp line will be shown below the previous ones when completion is triggered.
-func AppendActiveHelp(compArray []string, activeHelpStr string) []string {
+func AppendActiveHelp(compArray []Completion, activeHelpStr string) []Completion {
return append(compArray, fmt.Sprintf("%s%s", activeHelpMarker, activeHelpStr))
}
diff --git a/vendor/github.com/spf13/cobra/bash_completionsV2.go b/vendor/github.com/spf13/cobra/bash_completionsV2.go
index 1cce5c329c..d2397aa366 100644
--- a/vendor/github.com/spf13/cobra/bash_completionsV2.go
+++ b/vendor/github.com/spf13/cobra/bash_completionsV2.go
@@ -146,7 +146,7 @@ __%[1]s_process_completion_results() {
if (((directive & shellCompDirectiveFilterFileExt) != 0)); then
# File extension filtering
- local fullFilter filter filteringCmd
+ local fullFilter="" filter filteringCmd
# Do not use quotes around the $completions variable or else newline
# characters will be kept.
@@ -177,20 +177,71 @@ __%[1]s_process_completion_results() {
__%[1]s_handle_special_char "$cur" =
# Print the activeHelp statements before we finish
- if ((${#activeHelp[*]} != 0)); then
- printf "\n";
- printf "%%s\n" "${activeHelp[@]}"
- printf "\n"
+ __%[1]s_handle_activeHelp
+}
- # The prompt format is only available from bash 4.4.
- # We test if it is available before using it.
- if (x=${PS1@P}) 2> /dev/null; then
- printf "%%s" "${PS1@P}${COMP_LINE[@]}"
- else
- # Can't print the prompt. Just print the
- # text the user had typed, it is workable enough.
- printf "%%s" "${COMP_LINE[@]}"
+__%[1]s_handle_activeHelp() {
+ # Print the activeHelp statements
+ if ((${#activeHelp[*]} != 0)); then
+ if [ -z $COMP_TYPE ]; then
+ # Bash v3 does not set the COMP_TYPE variable.
+ printf "\n";
+ printf "%%s\n" "${activeHelp[@]}"
+ printf "\n"
+ __%[1]s_reprint_commandLine
+ return
fi
+
+ # Only print ActiveHelp on the second TAB press
+ if [ $COMP_TYPE -eq 63 ]; then
+ printf "\n"
+ printf "%%s\n" "${activeHelp[@]}"
+
+ if ((${#COMPREPLY[*]} == 0)); then
+ # When there are no completion choices from the program, file completion
+ # may kick in if the program has not disabled it; in such a case, we want
+ # to know if any files will match what the user typed, so that we know if
+ # there will be completions presented, so that we know how to handle ActiveHelp.
+ # To find out, we actually trigger the file completion ourselves;
+ # the call to _filedir will fill COMPREPLY if files match.
+ if (((directive & shellCompDirectiveNoFileComp) == 0)); then
+ __%[1]s_debug "Listing files"
+ _filedir
+ fi
+ fi
+
+ if ((${#COMPREPLY[*]} != 0)); then
+ # If there are completion choices to be shown, print a delimiter.
+ # Re-printing the command-line will automatically be done
+ # by the shell when it prints the completion choices.
+ printf -- "--"
+ else
+ # When there are no completion choices at all, we need
+ # to re-print the command-line since the shell will
+ # not be doing it itself.
+ __%[1]s_reprint_commandLine
+ fi
+ elif [ $COMP_TYPE -eq 37 ] || [ $COMP_TYPE -eq 42 ]; then
+ # For completion type: menu-complete/menu-complete-backward and insert-completions
+ # the completions are immediately inserted into the command-line, so we first
+ # print the activeHelp message and reprint the command-line since the shell won't.
+ printf "\n"
+ printf "%%s\n" "${activeHelp[@]}"
+
+ __%[1]s_reprint_commandLine
+ fi
+ fi
+}
+
+__%[1]s_reprint_commandLine() {
+ # The prompt format is only available from bash 4.4.
+ # We test if it is available before using it.
+ if (x=${PS1@P}) 2> /dev/null; then
+ printf "%%s" "${PS1@P}${COMP_LINE[@]}"
+ else
+ # Can't print the prompt. Just print the
+ # text the user had typed, it is workable enough.
+ printf "%%s" "${COMP_LINE[@]}"
fi
}
@@ -201,6 +252,8 @@ __%[1]s_extract_activeHelp() {
local endIndex=${#activeHelpMarker}
while IFS='' read -r comp; do
+ [[ -z $comp ]] && continue
+
if [[ ${comp:0:endIndex} == $activeHelpMarker ]]; then
comp=${comp:endIndex}
__%[1]s_debug "ActiveHelp found: $comp"
@@ -223,16 +276,21 @@ __%[1]s_handle_completion_types() {
# If the user requested inserting one completion at a time, or all
# completions at once on the command-line we must remove the descriptions.
# https://github.com/spf13/cobra/issues/1508
- local tab=$'\t' comp
- while IFS='' read -r comp; do
- [[ -z $comp ]] && continue
- # Strip any description
- comp=${comp%%%%$tab*}
- # Only consider the completions that match
- if [[ $comp == "$cur"* ]]; then
- COMPREPLY+=("$comp")
- fi
- done < <(printf "%%s\n" "${completions[@]}")
+
+ # If there are no completions, we don't need to do anything
+ (( ${#completions[@]} == 0 )) && return 0
+
+ local tab=$'\t'
+
+ # Strip any description and escape the completion to handled special characters
+ IFS=$'\n' read -ra completions -d '' < <(printf "%%q\n" "${completions[@]%%%%$tab*}")
+
+ # Only consider the completions that match
+ IFS=$'\n' read -ra COMPREPLY -d '' < <(IFS=$'\n'; compgen -W "${completions[*]}" -- "${cur}")
+
+ # compgen looses the escaping so we need to escape all completions again since they will
+ # all be inserted on the command-line.
+ IFS=$'\n' read -ra COMPREPLY -d '' < <(printf "%%q\n" "${COMPREPLY[@]}")
;;
*)
@@ -243,11 +301,25 @@ __%[1]s_handle_completion_types() {
}
__%[1]s_handle_standard_completion_case() {
- local tab=$'\t' comp
+ local tab=$'\t'
+
+ # If there are no completions, we don't need to do anything
+ (( ${#completions[@]} == 0 )) && return 0
# Short circuit to optimize if we don't have descriptions
if [[ "${completions[*]}" != *$tab* ]]; then
- IFS=$'\n' read -ra COMPREPLY -d '' < <(compgen -W "${completions[*]}" -- "$cur")
+ # First, escape the completions to handle special characters
+ IFS=$'\n' read -ra completions -d '' < <(printf "%%q\n" "${completions[@]}")
+ # Only consider the completions that match what the user typed
+ IFS=$'\n' read -ra COMPREPLY -d '' < <(IFS=$'\n'; compgen -W "${completions[*]}" -- "${cur}")
+
+ # compgen looses the escaping so, if there is only a single completion, we need to
+ # escape it again because it will be inserted on the command-line. If there are multiple
+ # completions, we don't want to escape them because they will be printed in a list
+ # and we don't want to show escape characters in that list.
+ if (( ${#COMPREPLY[@]} == 1 )); then
+ COMPREPLY[0]=$(printf "%%q" "${COMPREPLY[0]}")
+ fi
return 0
fi
@@ -256,23 +328,39 @@ __%[1]s_handle_standard_completion_case() {
# Look for the longest completion so that we can format things nicely
while IFS='' read -r compline; do
[[ -z $compline ]] && continue
- # Strip any description before checking the length
- comp=${compline%%%%$tab*}
+
+ # Before checking if the completion matches what the user typed,
+ # we need to strip any description and escape the completion to handle special
+ # characters because those escape characters are part of what the user typed.
+ # Don't call "printf" in a sub-shell because it will be much slower
+ # since we are in a loop.
+ printf -v comp "%%q" "${compline%%%%$tab*}" &>/dev/null || comp=$(printf "%%q" "${compline%%%%$tab*}")
+
# Only consider the completions that match
[[ $comp == "$cur"* ]] || continue
+
+ # The completions matches. Add it to the list of full completions including
+ # its description. We don't escape the completion because it may get printed
+ # in a list if there are more than one and we don't want show escape characters
+ # in that list.
COMPREPLY+=("$compline")
+
+ # Strip any description before checking the length, and again, don't escape
+ # the completion because this length is only used when printing the completions
+ # in a list and we don't want show escape characters in that list.
+ comp=${compline%%%%$tab*}
if ((${#comp}>longest)); then
longest=${#comp}
fi
done < <(printf "%%s\n" "${completions[@]}")
- # If there is a single completion left, remove the description text
+ # If there is a single completion left, remove the description text and escape any special characters
if ((${#COMPREPLY[*]} == 1)); then
__%[1]s_debug "COMPREPLY[0]: ${COMPREPLY[0]}"
- comp="${COMPREPLY[0]%%%%$tab*}"
- __%[1]s_debug "Removed description from single completion, which is now: ${comp}"
- COMPREPLY[0]=$comp
- else # Format the descriptions
+ COMPREPLY[0]=$(printf "%%q" "${COMPREPLY[0]%%%%$tab*}")
+ __%[1]s_debug "Removed description from single completion, which is now: ${COMPREPLY[0]}"
+ else
+ # Format the descriptions
__%[1]s_format_comp_descriptions $longest
fi
}
diff --git a/vendor/github.com/spf13/cobra/cobra.go b/vendor/github.com/spf13/cobra/cobra.go
index e0b0947b04..d9cd2414e2 100644
--- a/vendor/github.com/spf13/cobra/cobra.go
+++ b/vendor/github.com/spf13/cobra/cobra.go
@@ -176,12 +176,16 @@ func rpad(s string, padding int) string {
return fmt.Sprintf(formattedString, s)
}
-// tmpl executes the given template text on data, writing the result to w.
-func tmpl(w io.Writer, text string, data interface{}) error {
- t := template.New("top")
- t.Funcs(templateFuncs)
- template.Must(t.Parse(text))
- return t.Execute(w, data)
+func tmpl(text string) *tmplFunc {
+ return &tmplFunc{
+ tmpl: text,
+ fn: func(w io.Writer, data interface{}) error {
+ t := template.New("top")
+ t.Funcs(templateFuncs)
+ template.Must(t.Parse(text))
+ return t.Execute(w, data)
+ },
+ }
}
// ld compares two strings and returns the levenshtein distance between them.
diff --git a/vendor/github.com/spf13/cobra/command.go b/vendor/github.com/spf13/cobra/command.go
index 54748fc67e..dbb2c298ba 100644
--- a/vendor/github.com/spf13/cobra/command.go
+++ b/vendor/github.com/spf13/cobra/command.go
@@ -33,6 +33,9 @@ import (
const (
FlagSetByCobraAnnotation = "cobra_annotation_flag_set_by_cobra"
CommandDisplayNameAnnotation = "cobra_annotation_command_display_name"
+
+ helpFlagName = "help"
+ helpCommandName = "help"
)
// FParseErrWhitelist configures Flag parse errors to be ignored
@@ -80,11 +83,11 @@ type Command struct {
Example string
// ValidArgs is list of all valid non-flag arguments that are accepted in shell completions
- ValidArgs []string
+ ValidArgs []Completion
// ValidArgsFunction is an optional function that provides valid non-flag arguments for shell completion.
// It is a dynamic version of using ValidArgs.
// Only one of ValidArgs and ValidArgsFunction can be used for a command.
- ValidArgsFunction func(cmd *Command, args []string, toComplete string) ([]string, ShellCompDirective)
+ ValidArgsFunction CompletionFunc
// Expected arguments
Args PositionalArgs
@@ -168,12 +171,12 @@ type Command struct {
// usageFunc is usage func defined by user.
usageFunc func(*Command) error
// usageTemplate is usage template defined by user.
- usageTemplate string
+ usageTemplate *tmplFunc
// flagErrorFunc is func defined by user and it's called when the parsing of
// flags returns an error.
flagErrorFunc func(*Command, error) error
// helpTemplate is help template defined by user.
- helpTemplate string
+ helpTemplate *tmplFunc
// helpFunc is help func defined by user.
helpFunc func(*Command, []string)
// helpCommand is command with usage 'help'. If it's not defined by user,
@@ -186,7 +189,7 @@ type Command struct {
completionCommandGroupID string
// versionTemplate is the version template defined by user.
- versionTemplate string
+ versionTemplate *tmplFunc
// errPrefix is the error message prefix defined by user.
errPrefix string
@@ -281,6 +284,7 @@ func (c *Command) SetArgs(a []string) {
// SetOutput sets the destination for usage and error messages.
// If output is nil, os.Stderr is used.
+//
// Deprecated: Use SetOut and/or SetErr instead
func (c *Command) SetOutput(output io.Writer) {
c.outWriter = output
@@ -312,7 +316,11 @@ func (c *Command) SetUsageFunc(f func(*Command) error) {
// SetUsageTemplate sets usage template. Can be defined by Application.
func (c *Command) SetUsageTemplate(s string) {
- c.usageTemplate = s
+ if s == "" {
+ c.usageTemplate = nil
+ return
+ }
+ c.usageTemplate = tmpl(s)
}
// SetFlagErrorFunc sets a function to generate an error when flag parsing
@@ -348,12 +356,20 @@ func (c *Command) SetCompletionCommandGroupID(groupID string) {
// SetHelpTemplate sets help template to be used. Application can use it to set custom template.
func (c *Command) SetHelpTemplate(s string) {
- c.helpTemplate = s
+ if s == "" {
+ c.helpTemplate = nil
+ return
+ }
+ c.helpTemplate = tmpl(s)
}
// SetVersionTemplate sets version template to be used. Application can use it to set custom template.
func (c *Command) SetVersionTemplate(s string) {
- c.versionTemplate = s
+ if s == "" {
+ c.versionTemplate = nil
+ return
+ }
+ c.versionTemplate = tmpl(s)
}
// SetErrPrefix sets error message prefix to be used. Application can use it to set custom prefix.
@@ -434,7 +450,8 @@ func (c *Command) UsageFunc() (f func(*Command) error) {
}
return func(c *Command) error {
c.mergePersistentFlags()
- err := tmpl(c.OutOrStderr(), c.UsageTemplate(), c)
+ fn := c.getUsageTemplateFunc()
+ err := fn(c.OutOrStderr(), c)
if err != nil {
c.PrintErrln(err)
}
@@ -442,6 +459,19 @@ func (c *Command) UsageFunc() (f func(*Command) error) {
}
}
+// getUsageTemplateFunc returns the usage template function for the command
+// going up the command tree if necessary.
+func (c *Command) getUsageTemplateFunc() func(w io.Writer, data interface{}) error {
+ if c.usageTemplate != nil {
+ return c.usageTemplate.fn
+ }
+
+ if c.HasParent() {
+ return c.parent.getUsageTemplateFunc()
+ }
+ return defaultUsageFunc
+}
+
// Usage puts out the usage for the command.
// Used when a user provides invalid input.
// Can be defined by user by overriding UsageFunc.
@@ -460,15 +490,30 @@ func (c *Command) HelpFunc() func(*Command, []string) {
}
return func(c *Command, a []string) {
c.mergePersistentFlags()
+ fn := c.getHelpTemplateFunc()
// The help should be sent to stdout
// See https://github.com/spf13/cobra/issues/1002
- err := tmpl(c.OutOrStdout(), c.HelpTemplate(), c)
+ err := fn(c.OutOrStdout(), c)
if err != nil {
c.PrintErrln(err)
}
}
}
+// getHelpTemplateFunc returns the help template function for the command
+// going up the command tree if necessary.
+func (c *Command) getHelpTemplateFunc() func(w io.Writer, data interface{}) error {
+ if c.helpTemplate != nil {
+ return c.helpTemplate.fn
+ }
+
+ if c.HasParent() {
+ return c.parent.getHelpTemplateFunc()
+ }
+
+ return defaultHelpFunc
+}
+
// Help puts out the help for the command.
// Used when a user calls help [command].
// Can be defined by user by overriding HelpFunc.
@@ -543,71 +588,55 @@ func (c *Command) NamePadding() int {
}
// UsageTemplate returns usage template for the command.
+// This function is kept for backwards-compatibility reasons.
func (c *Command) UsageTemplate() string {
- if c.usageTemplate != "" {
- return c.usageTemplate
+ if c.usageTemplate != nil {
+ return c.usageTemplate.tmpl
}
if c.HasParent() {
return c.parent.UsageTemplate()
}
- return `Usage:{{if .Runnable}}
- {{.UseLine}}{{end}}{{if .HasAvailableSubCommands}}
- {{.CommandPath}} [command]{{end}}{{if gt (len .Aliases) 0}}
-
-Aliases:
- {{.NameAndAliases}}{{end}}{{if .HasExample}}
-
-Examples:
-{{.Example}}{{end}}{{if .HasAvailableSubCommands}}{{$cmds := .Commands}}{{if eq (len .Groups) 0}}
-
-Available Commands:{{range $cmds}}{{if (or .IsAvailableCommand (eq .Name "help"))}}
- {{rpad .Name .NamePadding }} {{.Short}}{{end}}{{end}}{{else}}{{range $group := .Groups}}
-
-{{.Title}}{{range $cmds}}{{if (and (eq .GroupID $group.ID) (or .IsAvailableCommand (eq .Name "help")))}}
- {{rpad .Name .NamePadding }} {{.Short}}{{end}}{{end}}{{end}}{{if not .AllChildCommandsHaveGroup}}
-
-Additional Commands:{{range $cmds}}{{if (and (eq .GroupID "") (or .IsAvailableCommand (eq .Name "help")))}}
- {{rpad .Name .NamePadding }} {{.Short}}{{end}}{{end}}{{end}}{{end}}{{end}}{{if .HasAvailableLocalFlags}}
-
-Flags:
-{{.LocalFlags.FlagUsages | trimTrailingWhitespaces}}{{end}}{{if .HasAvailableInheritedFlags}}
-
-Global Flags:
-{{.InheritedFlags.FlagUsages | trimTrailingWhitespaces}}{{end}}{{if .HasHelpSubCommands}}
-
-Additional help topics:{{range .Commands}}{{if .IsAdditionalHelpTopicCommand}}
- {{rpad .CommandPath .CommandPathPadding}} {{.Short}}{{end}}{{end}}{{end}}{{if .HasAvailableSubCommands}}
-
-Use "{{.CommandPath}} [command] --help" for more information about a command.{{end}}
-`
+ return defaultUsageTemplate
}
// HelpTemplate return help template for the command.
+// This function is kept for backwards-compatibility reasons.
func (c *Command) HelpTemplate() string {
- if c.helpTemplate != "" {
- return c.helpTemplate
+ if c.helpTemplate != nil {
+ return c.helpTemplate.tmpl
}
if c.HasParent() {
return c.parent.HelpTemplate()
}
- return `{{with (or .Long .Short)}}{{. | trimTrailingWhitespaces}}
-
-{{end}}{{if or .Runnable .HasSubCommands}}{{.UsageString}}{{end}}`
+ return defaultHelpTemplate
}
// VersionTemplate return version template for the command.
+// This function is kept for backwards-compatibility reasons.
func (c *Command) VersionTemplate() string {
- if c.versionTemplate != "" {
- return c.versionTemplate
+ if c.versionTemplate != nil {
+ return c.versionTemplate.tmpl
}
if c.HasParent() {
return c.parent.VersionTemplate()
}
- return `{{with .Name}}{{printf "%s " .}}{{end}}{{printf "version %s" .Version}}
-`
+ return defaultVersionTemplate
+}
+
+// getVersionTemplateFunc returns the version template function for the command
+// going up the command tree if necessary.
+func (c *Command) getVersionTemplateFunc() func(w io.Writer, data interface{}) error {
+ if c.versionTemplate != nil {
+ return c.versionTemplate.fn
+ }
+
+ if c.HasParent() {
+ return c.parent.getVersionTemplateFunc()
+ }
+ return defaultVersionFunc
}
// ErrPrefix return error message prefix for the command
@@ -894,7 +923,7 @@ func (c *Command) execute(a []string) (err error) {
// If help is called, regardless of other flags, return we want help.
// Also say we need help if the command isn't runnable.
- helpVal, err := c.Flags().GetBool("help")
+ helpVal, err := c.Flags().GetBool(helpFlagName)
if err != nil {
// should be impossible to get here as we always declare a help
// flag in InitDefaultHelpFlag()
@@ -914,7 +943,8 @@ func (c *Command) execute(a []string) (err error) {
return err
}
if versionVal {
- err := tmpl(c.OutOrStdout(), c.VersionTemplate(), c)
+ fn := c.getVersionTemplateFunc()
+ err := fn(c.OutOrStdout(), c)
if err != nil {
c.Println(err)
}
@@ -1068,12 +1098,6 @@ func (c *Command) ExecuteC() (cmd *Command, err error) {
// initialize help at the last point to allow for user overriding
c.InitDefaultHelpCmd()
- // initialize completion at the last point to allow for user overriding
- c.InitDefaultCompletionCmd()
-
- // Now that all commands have been created, let's make sure all groups
- // are properly created also
- c.checkCommandGroups()
args := c.args
@@ -1082,9 +1106,16 @@ func (c *Command) ExecuteC() (cmd *Command, err error) {
args = os.Args[1:]
}
- // initialize the hidden command to be used for shell completion
+ // initialize the __complete command to be used for shell completion
c.initCompleteCmd(args)
+ // initialize the default completion command
+ c.InitDefaultCompletionCmd(args...)
+
+ // Now that all commands have been created, let's make sure all groups
+ // are properly created also
+ c.checkCommandGroups()
+
var flags []string
if c.TraverseChildren {
cmd, flags, err = c.Traverse(args)
@@ -1187,16 +1218,16 @@ func (c *Command) checkCommandGroups() {
// If c already has help flag, it will do nothing.
func (c *Command) InitDefaultHelpFlag() {
c.mergePersistentFlags()
- if c.Flags().Lookup("help") == nil {
+ if c.Flags().Lookup(helpFlagName) == nil {
usage := "help for "
- name := c.displayName()
+ name := c.DisplayName()
if name == "" {
usage += "this command"
} else {
usage += name
}
- c.Flags().BoolP("help", "h", false, usage)
- _ = c.Flags().SetAnnotation("help", FlagSetByCobraAnnotation, []string{"true"})
+ c.Flags().BoolP(helpFlagName, "h", false, usage)
+ _ = c.Flags().SetAnnotation(helpFlagName, FlagSetByCobraAnnotation, []string{"true"})
}
}
@@ -1215,7 +1246,7 @@ func (c *Command) InitDefaultVersionFlag() {
if c.Name() == "" {
usage += "this command"
} else {
- usage += c.Name()
+ usage += c.DisplayName()
}
if c.Flags().ShorthandLookup("v") == nil {
c.Flags().BoolP("version", "v", false, usage)
@@ -1239,9 +1270,9 @@ func (c *Command) InitDefaultHelpCmd() {
Use: "help [command]",
Short: "Help about any command",
Long: `Help provides help for any command in the application.
-Simply type ` + c.displayName() + ` help [path to command] for full details.`,
- ValidArgsFunction: func(c *Command, args []string, toComplete string) ([]string, ShellCompDirective) {
- var completions []string
+Simply type ` + c.DisplayName() + ` help [path to command] for full details.`,
+ ValidArgsFunction: func(c *Command, args []string, toComplete string) ([]Completion, ShellCompDirective) {
+ var completions []Completion
cmd, _, e := c.Root().Find(args)
if e != nil {
return nil, ShellCompDirectiveNoFileComp
@@ -1253,7 +1284,7 @@ Simply type ` + c.displayName() + ` help [path to command] for full details.`,
for _, subCmd := range cmd.Commands() {
if subCmd.IsAvailableCommand() || subCmd == cmd.helpCommand {
if strings.HasPrefix(subCmd.Name(), toComplete) {
- completions = append(completions, fmt.Sprintf("%s\t%s", subCmd.Name(), subCmd.Short))
+ completions = append(completions, CompletionWithDesc(subCmd.Name(), subCmd.Short))
}
}
}
@@ -1430,10 +1461,12 @@ func (c *Command) CommandPath() string {
if c.HasParent() {
return c.Parent().CommandPath() + " " + c.Name()
}
- return c.displayName()
+ return c.DisplayName()
}
-func (c *Command) displayName() string {
+// DisplayName returns the name to display in help text. Returns command Name()
+// If CommandDisplayNameAnnoation is not set
+func (c *Command) DisplayName() string {
if displayName, ok := c.Annotations[CommandDisplayNameAnnotation]; ok {
return displayName
}
@@ -1443,7 +1476,7 @@ func (c *Command) displayName() string {
// UseLine puts out the full usage for a given command (including parents).
func (c *Command) UseLine() string {
var useline string
- use := strings.Replace(c.Use, c.Name(), c.displayName(), 1)
+ use := strings.Replace(c.Use, c.Name(), c.DisplayName(), 1)
if c.HasParent() {
useline = c.parent.CommandPath() + " " + use
} else {
@@ -1649,7 +1682,7 @@ func (c *Command) GlobalNormalizationFunc() func(f *flag.FlagSet, name string) f
// to this command (local and persistent declared here and by all parents).
func (c *Command) Flags() *flag.FlagSet {
if c.flags == nil {
- c.flags = flag.NewFlagSet(c.displayName(), flag.ContinueOnError)
+ c.flags = flag.NewFlagSet(c.DisplayName(), flag.ContinueOnError)
if c.flagErrorBuf == nil {
c.flagErrorBuf = new(bytes.Buffer)
}
@@ -1664,7 +1697,7 @@ func (c *Command) Flags() *flag.FlagSet {
func (c *Command) LocalNonPersistentFlags() *flag.FlagSet {
persistentFlags := c.PersistentFlags()
- out := flag.NewFlagSet(c.displayName(), flag.ContinueOnError)
+ out := flag.NewFlagSet(c.DisplayName(), flag.ContinueOnError)
c.LocalFlags().VisitAll(func(f *flag.Flag) {
if persistentFlags.Lookup(f.Name) == nil {
out.AddFlag(f)
@@ -1679,7 +1712,7 @@ func (c *Command) LocalFlags() *flag.FlagSet {
c.mergePersistentFlags()
if c.lflags == nil {
- c.lflags = flag.NewFlagSet(c.displayName(), flag.ContinueOnError)
+ c.lflags = flag.NewFlagSet(c.DisplayName(), flag.ContinueOnError)
if c.flagErrorBuf == nil {
c.flagErrorBuf = new(bytes.Buffer)
}
@@ -1707,7 +1740,7 @@ func (c *Command) InheritedFlags() *flag.FlagSet {
c.mergePersistentFlags()
if c.iflags == nil {
- c.iflags = flag.NewFlagSet(c.displayName(), flag.ContinueOnError)
+ c.iflags = flag.NewFlagSet(c.DisplayName(), flag.ContinueOnError)
if c.flagErrorBuf == nil {
c.flagErrorBuf = new(bytes.Buffer)
}
@@ -1736,7 +1769,7 @@ func (c *Command) NonInheritedFlags() *flag.FlagSet {
// PersistentFlags returns the persistent FlagSet specifically set in the current command.
func (c *Command) PersistentFlags() *flag.FlagSet {
if c.pflags == nil {
- c.pflags = flag.NewFlagSet(c.displayName(), flag.ContinueOnError)
+ c.pflags = flag.NewFlagSet(c.DisplayName(), flag.ContinueOnError)
if c.flagErrorBuf == nil {
c.flagErrorBuf = new(bytes.Buffer)
}
@@ -1749,9 +1782,9 @@ func (c *Command) PersistentFlags() *flag.FlagSet {
func (c *Command) ResetFlags() {
c.flagErrorBuf = new(bytes.Buffer)
c.flagErrorBuf.Reset()
- c.flags = flag.NewFlagSet(c.displayName(), flag.ContinueOnError)
+ c.flags = flag.NewFlagSet(c.DisplayName(), flag.ContinueOnError)
c.flags.SetOutput(c.flagErrorBuf)
- c.pflags = flag.NewFlagSet(c.displayName(), flag.ContinueOnError)
+ c.pflags = flag.NewFlagSet(c.DisplayName(), flag.ContinueOnError)
c.pflags.SetOutput(c.flagErrorBuf)
c.lflags = nil
@@ -1868,7 +1901,7 @@ func (c *Command) mergePersistentFlags() {
// If c.parentsPflags == nil, it makes new.
func (c *Command) updateParentsPflags() {
if c.parentsPflags == nil {
- c.parentsPflags = flag.NewFlagSet(c.displayName(), flag.ContinueOnError)
+ c.parentsPflags = flag.NewFlagSet(c.DisplayName(), flag.ContinueOnError)
c.parentsPflags.SetOutput(c.flagErrorBuf)
c.parentsPflags.SortFlags = false
}
@@ -1894,3 +1927,141 @@ func commandNameMatches(s string, t string) bool {
return s == t
}
+
+// tmplFunc holds a template and a function that will execute said template.
+type tmplFunc struct {
+ tmpl string
+ fn func(io.Writer, interface{}) error
+}
+
+var defaultUsageTemplate = `Usage:{{if .Runnable}}
+ {{.UseLine}}{{end}}{{if .HasAvailableSubCommands}}
+ {{.CommandPath}} [command]{{end}}{{if gt (len .Aliases) 0}}
+
+Aliases:
+ {{.NameAndAliases}}{{end}}{{if .HasExample}}
+
+Examples:
+{{.Example}}{{end}}{{if .HasAvailableSubCommands}}{{$cmds := .Commands}}{{if eq (len .Groups) 0}}
+
+Available Commands:{{range $cmds}}{{if (or .IsAvailableCommand (eq .Name "help"))}}
+ {{rpad .Name .NamePadding }} {{.Short}}{{end}}{{end}}{{else}}{{range $group := .Groups}}
+
+{{.Title}}{{range $cmds}}{{if (and (eq .GroupID $group.ID) (or .IsAvailableCommand (eq .Name "help")))}}
+ {{rpad .Name .NamePadding }} {{.Short}}{{end}}{{end}}{{end}}{{if not .AllChildCommandsHaveGroup}}
+
+Additional Commands:{{range $cmds}}{{if (and (eq .GroupID "") (or .IsAvailableCommand (eq .Name "help")))}}
+ {{rpad .Name .NamePadding }} {{.Short}}{{end}}{{end}}{{end}}{{end}}{{end}}{{if .HasAvailableLocalFlags}}
+
+Flags:
+{{.LocalFlags.FlagUsages | trimTrailingWhitespaces}}{{end}}{{if .HasAvailableInheritedFlags}}
+
+Global Flags:
+{{.InheritedFlags.FlagUsages | trimTrailingWhitespaces}}{{end}}{{if .HasHelpSubCommands}}
+
+Additional help topics:{{range .Commands}}{{if .IsAdditionalHelpTopicCommand}}
+ {{rpad .CommandPath .CommandPathPadding}} {{.Short}}{{end}}{{end}}{{end}}{{if .HasAvailableSubCommands}}
+
+Use "{{.CommandPath}} [command] --help" for more information about a command.{{end}}
+`
+
+// defaultUsageFunc is equivalent to executing defaultUsageTemplate. The two should be changed in sync.
+func defaultUsageFunc(w io.Writer, in interface{}) error {
+ c := in.(*Command)
+ fmt.Fprint(w, "Usage:")
+ if c.Runnable() {
+ fmt.Fprintf(w, "\n %s", c.UseLine())
+ }
+ if c.HasAvailableSubCommands() {
+ fmt.Fprintf(w, "\n %s [command]", c.CommandPath())
+ }
+ if len(c.Aliases) > 0 {
+ fmt.Fprintf(w, "\n\nAliases:\n")
+ fmt.Fprintf(w, " %s", c.NameAndAliases())
+ }
+ if c.HasExample() {
+ fmt.Fprintf(w, "\n\nExamples:\n")
+ fmt.Fprintf(w, "%s", c.Example)
+ }
+ if c.HasAvailableSubCommands() {
+ cmds := c.Commands()
+ if len(c.Groups()) == 0 {
+ fmt.Fprintf(w, "\n\nAvailable Commands:")
+ for _, subcmd := range cmds {
+ if subcmd.IsAvailableCommand() || subcmd.Name() == helpCommandName {
+ fmt.Fprintf(w, "\n %s %s", rpad(subcmd.Name(), subcmd.NamePadding()), subcmd.Short)
+ }
+ }
+ } else {
+ for _, group := range c.Groups() {
+ fmt.Fprintf(w, "\n\n%s", group.Title)
+ for _, subcmd := range cmds {
+ if subcmd.GroupID == group.ID && (subcmd.IsAvailableCommand() || subcmd.Name() == helpCommandName) {
+ fmt.Fprintf(w, "\n %s %s", rpad(subcmd.Name(), subcmd.NamePadding()), subcmd.Short)
+ }
+ }
+ }
+ if !c.AllChildCommandsHaveGroup() {
+ fmt.Fprintf(w, "\n\nAdditional Commands:")
+ for _, subcmd := range cmds {
+ if subcmd.GroupID == "" && (subcmd.IsAvailableCommand() || subcmd.Name() == helpCommandName) {
+ fmt.Fprintf(w, "\n %s %s", rpad(subcmd.Name(), subcmd.NamePadding()), subcmd.Short)
+ }
+ }
+ }
+ }
+ }
+ if c.HasAvailableLocalFlags() {
+ fmt.Fprintf(w, "\n\nFlags:\n")
+ fmt.Fprint(w, trimRightSpace(c.LocalFlags().FlagUsages()))
+ }
+ if c.HasAvailableInheritedFlags() {
+ fmt.Fprintf(w, "\n\nGlobal Flags:\n")
+ fmt.Fprint(w, trimRightSpace(c.InheritedFlags().FlagUsages()))
+ }
+ if c.HasHelpSubCommands() {
+ fmt.Fprintf(w, "\n\nAdditional help topcis:")
+ for _, subcmd := range c.Commands() {
+ if subcmd.IsAdditionalHelpTopicCommand() {
+ fmt.Fprintf(w, "\n %s %s", rpad(subcmd.CommandPath(), subcmd.CommandPathPadding()), subcmd.Short)
+ }
+ }
+ }
+ if c.HasAvailableSubCommands() {
+ fmt.Fprintf(w, "\n\nUse \"%s [command] --help\" for more information about a command.", c.CommandPath())
+ }
+ fmt.Fprintln(w)
+ return nil
+}
+
+var defaultHelpTemplate = `{{with (or .Long .Short)}}{{. | trimTrailingWhitespaces}}
+
+{{end}}{{if or .Runnable .HasSubCommands}}{{.UsageString}}{{end}}`
+
+// defaultHelpFunc is equivalent to executing defaultHelpTemplate. The two should be changed in sync.
+func defaultHelpFunc(w io.Writer, in interface{}) error {
+ c := in.(*Command)
+ usage := c.Long
+ if usage == "" {
+ usage = c.Short
+ }
+ usage = trimRightSpace(usage)
+ if usage != "" {
+ fmt.Fprintln(w, usage)
+ fmt.Fprintln(w)
+ }
+ if c.Runnable() || c.HasSubCommands() {
+ fmt.Fprint(w, c.UsageString())
+ }
+ return nil
+}
+
+var defaultVersionTemplate = `{{with .DisplayName}}{{printf "%s " .}}{{end}}{{printf "version %s" .Version}}
+`
+
+// defaultVersionFunc is equivalent to executing defaultVersionTemplate. The two should be changed in sync.
+func defaultVersionFunc(w io.Writer, in interface{}) error {
+ c := in.(*Command)
+ _, err := fmt.Fprintf(w, "%s version %s\n", c.DisplayName(), c.Version)
+ return err
+}
diff --git a/vendor/github.com/spf13/cobra/completions.go b/vendor/github.com/spf13/cobra/completions.go
index c0c08b0572..a1752f7631 100644
--- a/vendor/github.com/spf13/cobra/completions.go
+++ b/vendor/github.com/spf13/cobra/completions.go
@@ -35,7 +35,7 @@ const (
)
// Global map of flag completion functions. Make sure to use flagCompletionMutex before you try to read and write from it.
-var flagCompletionFunctions = map[*pflag.Flag]func(cmd *Command, args []string, toComplete string) ([]string, ShellCompDirective){}
+var flagCompletionFunctions = map[*pflag.Flag]CompletionFunc{}
// lock for reading and writing from flagCompletionFunctions
var flagCompletionMutex = &sync.RWMutex{}
@@ -117,22 +117,50 @@ type CompletionOptions struct {
HiddenDefaultCmd bool
}
+// Completion is a string that can be used for completions
+//
+// two formats are supported:
+// - the completion choice
+// - the completion choice with a textual description (separated by a TAB).
+//
+// [CompletionWithDesc] can be used to create a completion string with a textual description.
+//
+// Note: Go type alias is used to provide a more descriptive name in the documentation, but any string can be used.
+type Completion = string
+
+// CompletionFunc is a function that provides completion results.
+type CompletionFunc = func(cmd *Command, args []string, toComplete string) ([]Completion, ShellCompDirective)
+
+// CompletionWithDesc returns a [Completion] with a description by using the TAB delimited format.
+func CompletionWithDesc(choice string, description string) Completion {
+ return choice + "\t" + description
+}
+
// NoFileCompletions can be used to disable file completion for commands that should
// not trigger file completions.
-func NoFileCompletions(cmd *Command, args []string, toComplete string) ([]string, ShellCompDirective) {
+//
+// This method satisfies [CompletionFunc].
+// It can be used with [Command.RegisterFlagCompletionFunc] and for [Command.ValidArgsFunction].
+func NoFileCompletions(cmd *Command, args []string, toComplete string) ([]Completion, ShellCompDirective) {
return nil, ShellCompDirectiveNoFileComp
}
// FixedCompletions can be used to create a completion function which always
// returns the same results.
-func FixedCompletions(choices []string, directive ShellCompDirective) func(cmd *Command, args []string, toComplete string) ([]string, ShellCompDirective) {
- return func(cmd *Command, args []string, toComplete string) ([]string, ShellCompDirective) {
+//
+// This method returns a function that satisfies [CompletionFunc]
+// It can be used with [Command.RegisterFlagCompletionFunc] and for [Command.ValidArgsFunction].
+func FixedCompletions(choices []Completion, directive ShellCompDirective) CompletionFunc {
+ return func(cmd *Command, args []string, toComplete string) ([]Completion, ShellCompDirective) {
return choices, directive
}
}
// RegisterFlagCompletionFunc should be called to register a function to provide completion for a flag.
-func (c *Command) RegisterFlagCompletionFunc(flagName string, f func(cmd *Command, args []string, toComplete string) ([]string, ShellCompDirective)) error {
+//
+// You can use pre-defined completion functions such as [FixedCompletions] or [NoFileCompletions],
+// or you can define your own.
+func (c *Command) RegisterFlagCompletionFunc(flagName string, f CompletionFunc) error {
flag := c.Flag(flagName)
if flag == nil {
return fmt.Errorf("RegisterFlagCompletionFunc: flag '%s' does not exist", flagName)
@@ -148,7 +176,7 @@ func (c *Command) RegisterFlagCompletionFunc(flagName string, f func(cmd *Comman
}
// GetFlagCompletionFunc returns the completion function for the given flag of the command, if available.
-func (c *Command) GetFlagCompletionFunc(flagName string) (func(*Command, []string, string) ([]string, ShellCompDirective), bool) {
+func (c *Command) GetFlagCompletionFunc(flagName string) (CompletionFunc, bool) {
flag := c.Flag(flagName)
if flag == nil {
return nil, false
@@ -270,7 +298,15 @@ func (c *Command) initCompleteCmd(args []string) {
}
}
-func (c *Command) getCompletions(args []string) (*Command, []string, ShellCompDirective, error) {
+// SliceValue is a reduced version of [pflag.SliceValue]. It is used to detect
+// flags that accept multiple values and therefore can provide completion
+// multiple times.
+type SliceValue interface {
+ // GetSlice returns the flag value list as an array of strings.
+ GetSlice() []string
+}
+
+func (c *Command) getCompletions(args []string) (*Command, []Completion, ShellCompDirective, error) {
// The last argument, which is not completely typed by the user,
// should not be part of the list of arguments
toComplete := args[len(args)-1]
@@ -298,7 +334,7 @@ func (c *Command) getCompletions(args []string) (*Command, []string, ShellCompDi
}
if err != nil {
// Unable to find the real command. E.g., someInvalidCmd
- return c, []string{}, ShellCompDirectiveDefault, fmt.Errorf("unable to find a command for arguments: %v", trimmedArgs)
+ return c, []Completion{}, ShellCompDirectiveDefault, fmt.Errorf("unable to find a command for arguments: %v", trimmedArgs)
}
finalCmd.ctx = c.ctx
@@ -328,7 +364,7 @@ func (c *Command) getCompletions(args []string) (*Command, []string, ShellCompDi
// Parse the flags early so we can check if required flags are set
if err = finalCmd.ParseFlags(finalArgs); err != nil {
- return finalCmd, []string{}, ShellCompDirectiveDefault, fmt.Errorf("Error while parsing flags from args %v: %s", finalArgs, err.Error())
+ return finalCmd, []Completion{}, ShellCompDirectiveDefault, fmt.Errorf("Error while parsing flags from args %v: %s", finalArgs, err.Error())
}
realArgCount := finalCmd.Flags().NArg()
@@ -340,14 +376,14 @@ func (c *Command) getCompletions(args []string) (*Command, []string, ShellCompDi
if flagErr != nil {
// If error type is flagCompError and we don't want flagCompletion we should ignore the error
if _, ok := flagErr.(*flagCompError); !(ok && !flagCompletion) {
- return finalCmd, []string{}, ShellCompDirectiveDefault, flagErr
+ return finalCmd, []Completion{}, ShellCompDirectiveDefault, flagErr
}
}
// Look for the --help or --version flags. If they are present,
// there should be no further completions.
if helpOrVersionFlagPresent(finalCmd) {
- return finalCmd, []string{}, ShellCompDirectiveNoFileComp, nil
+ return finalCmd, []Completion{}, ShellCompDirectiveNoFileComp, nil
}
// We only remove the flags from the arguments if DisableFlagParsing is not set.
@@ -376,11 +412,11 @@ func (c *Command) getCompletions(args []string) (*Command, []string, ShellCompDi
return finalCmd, subDir, ShellCompDirectiveFilterDirs, nil
}
// Directory completion
- return finalCmd, []string{}, ShellCompDirectiveFilterDirs, nil
+ return finalCmd, []Completion{}, ShellCompDirectiveFilterDirs, nil
}
}
- var completions []string
+ var completions []Completion
var directive ShellCompDirective
// Enforce flag groups before doing flag completions
@@ -399,10 +435,14 @@ func (c *Command) getCompletions(args []string) (*Command, []string, ShellCompDi
// If we have not found any required flags, only then can we show regular flags
if len(completions) == 0 {
doCompleteFlags := func(flag *pflag.Flag) {
- if !flag.Changed ||
+ _, acceptsMultiple := flag.Value.(SliceValue)
+ acceptsMultiple = acceptsMultiple ||
strings.Contains(flag.Value.Type(), "Slice") ||
- strings.Contains(flag.Value.Type(), "Array") {
- // If the flag is not already present, or if it can be specified multiple times (Array or Slice)
+ strings.Contains(flag.Value.Type(), "Array") ||
+ strings.HasPrefix(flag.Value.Type(), "stringTo")
+
+ if !flag.Changed || acceptsMultiple {
+ // If the flag is not already present, or if it can be specified multiple times (Array, Slice, or stringTo)
// we suggest it as a completion
completions = append(completions, getFlagNameCompletions(flag, toComplete)...)
}
@@ -462,7 +502,7 @@ func (c *Command) getCompletions(args []string) (*Command, []string, ShellCompDi
for _, subCmd := range finalCmd.Commands() {
if subCmd.IsAvailableCommand() || subCmd == finalCmd.helpCommand {
if strings.HasPrefix(subCmd.Name(), toComplete) {
- completions = append(completions, fmt.Sprintf("%s\t%s", subCmd.Name(), subCmd.Short))
+ completions = append(completions, CompletionWithDesc(subCmd.Name(), subCmd.Short))
}
directive = ShellCompDirectiveNoFileComp
}
@@ -507,7 +547,7 @@ func (c *Command) getCompletions(args []string) (*Command, []string, ShellCompDi
}
// Find the completion function for the flag or command
- var completionFn func(cmd *Command, args []string, toComplete string) ([]string, ShellCompDirective)
+ var completionFn CompletionFunc
if flag != nil && flagCompletion {
flagCompletionMutex.RLock()
completionFn = flagCompletionFunctions[flag]
@@ -518,7 +558,7 @@ func (c *Command) getCompletions(args []string) (*Command, []string, ShellCompDi
if completionFn != nil {
// Go custom completion defined for this flag or command.
// Call the registered completion function to get the completions.
- var comps []string
+ var comps []Completion
comps, directive = completionFn(finalCmd, finalArgs, toComplete)
completions = append(completions, comps...)
}
@@ -531,23 +571,23 @@ func helpOrVersionFlagPresent(cmd *Command) bool {
len(versionFlag.Annotations[FlagSetByCobraAnnotation]) > 0 && versionFlag.Changed {
return true
}
- if helpFlag := cmd.Flags().Lookup("help"); helpFlag != nil &&
+ if helpFlag := cmd.Flags().Lookup(helpFlagName); helpFlag != nil &&
len(helpFlag.Annotations[FlagSetByCobraAnnotation]) > 0 && helpFlag.Changed {
return true
}
return false
}
-func getFlagNameCompletions(flag *pflag.Flag, toComplete string) []string {
+func getFlagNameCompletions(flag *pflag.Flag, toComplete string) []Completion {
if nonCompletableFlag(flag) {
- return []string{}
+ return []Completion{}
}
- var completions []string
+ var completions []Completion
flagName := "--" + flag.Name
if strings.HasPrefix(flagName, toComplete) {
// Flag without the =
- completions = append(completions, fmt.Sprintf("%s\t%s", flagName, flag.Usage))
+ completions = append(completions, CompletionWithDesc(flagName, flag.Usage))
// Why suggest both long forms: --flag and --flag= ?
// This forces the user to *always* have to type either an = or a space after the flag name.
@@ -559,20 +599,20 @@ func getFlagNameCompletions(flag *pflag.Flag, toComplete string) []string {
// if len(flag.NoOptDefVal) == 0 {
// // Flag requires a value, so it can be suffixed with =
// flagName += "="
- // completions = append(completions, fmt.Sprintf("%s\t%s", flagName, flag.Usage))
+ // completions = append(completions, CompletionWithDesc(flagName, flag.Usage))
// }
}
flagName = "-" + flag.Shorthand
if len(flag.Shorthand) > 0 && strings.HasPrefix(flagName, toComplete) {
- completions = append(completions, fmt.Sprintf("%s\t%s", flagName, flag.Usage))
+ completions = append(completions, CompletionWithDesc(flagName, flag.Usage))
}
return completions
}
-func completeRequireFlags(finalCmd *Command, toComplete string) []string {
- var completions []string
+func completeRequireFlags(finalCmd *Command, toComplete string) []Completion {
+ var completions []Completion
doCompleteRequiredFlags := func(flag *pflag.Flag) {
if _, present := flag.Annotations[BashCompOneRequiredFlag]; present {
@@ -687,8 +727,8 @@ func checkIfFlagCompletion(finalCmd *Command, args []string, lastArg string) (*p
// 1- the feature has been explicitly disabled by the program,
// 2- c has no subcommands (to avoid creating one),
// 3- c already has a 'completion' command provided by the program.
-func (c *Command) InitDefaultCompletionCmd() {
- if c.CompletionOptions.DisableDefaultCmd || !c.HasSubCommands() {
+func (c *Command) InitDefaultCompletionCmd(args ...string) {
+ if c.CompletionOptions.DisableDefaultCmd {
return
}
@@ -701,6 +741,16 @@ func (c *Command) InitDefaultCompletionCmd() {
haveNoDescFlag := !c.CompletionOptions.DisableNoDescFlag && !c.CompletionOptions.DisableDescriptions
+ // Special case to know if there are sub-commands or not.
+ hasSubCommands := false
+ for _, cmd := range c.commands {
+ if cmd.Name() != ShellCompRequestCmd && cmd.Name() != helpCommandName {
+ // We found a real sub-command (not 'help' or '__complete')
+ hasSubCommands = true
+ break
+ }
+ }
+
completionCmd := &Command{
Use: compCmdName,
Short: "Generate the autocompletion script for the specified shell",
@@ -714,6 +764,22 @@ See each sub-command's help for details on how to use the generated script.
}
c.AddCommand(completionCmd)
+ if !hasSubCommands {
+ // If the 'completion' command will be the only sub-command,
+ // we only create it if it is actually being called.
+ // This avoids breaking programs that would suddenly find themselves with
+ // a subcommand, which would prevent them from accepting arguments.
+ // We also create the 'completion' command if the user is triggering
+ // shell completion for it (prog __complete completion '')
+ subCmd, cmdArgs, err := c.Find(args)
+ if err != nil || subCmd.Name() != compCmdName &&
+ !(subCmd.Name() == ShellCompRequestCmd && len(cmdArgs) > 1 && cmdArgs[0] == compCmdName) {
+ // The completion command is not being called or being completed so we remove it.
+ c.RemoveCommand(completionCmd)
+ return
+ }
+ }
+
out := c.OutOrStdout()
noDesc := c.CompletionOptions.DisableDescriptions
shortDesc := "Generate the autocompletion script for %s"
diff --git a/vendor/github.com/spf13/cobra/powershell_completions.go b/vendor/github.com/spf13/cobra/powershell_completions.go
index a830b7bcad..746dcb92e3 100644
--- a/vendor/github.com/spf13/cobra/powershell_completions.go
+++ b/vendor/github.com/spf13/cobra/powershell_completions.go
@@ -162,7 +162,10 @@ filter __%[1]s_escapeStringWithSpecialChars {
if (-Not $Description) {
$Description = " "
}
- @{Name="$Name";Description="$Description"}
+ New-Object -TypeName PSCustomObject -Property @{
+ Name = "$Name"
+ Description = "$Description"
+ }
}
@@ -240,7 +243,12 @@ filter __%[1]s_escapeStringWithSpecialChars {
__%[1]s_debug "Only one completion left"
# insert space after value
- [System.Management.Automation.CompletionResult]::new($($comp.Name | __%[1]s_escapeStringWithSpecialChars) + $Space, "$($comp.Name)", 'ParameterValue', "$($comp.Description)")
+ $CompletionText = $($comp.Name | __%[1]s_escapeStringWithSpecialChars) + $Space
+ if ($ExecutionContext.SessionState.LanguageMode -eq "FullLanguage"){
+ [System.Management.Automation.CompletionResult]::new($CompletionText, "$($comp.Name)", 'ParameterValue', "$($comp.Description)")
+ } else {
+ $CompletionText
+ }
} else {
# Add the proper number of spaces to align the descriptions
@@ -255,7 +263,12 @@ filter __%[1]s_escapeStringWithSpecialChars {
$Description = " ($($comp.Description))"
}
- [System.Management.Automation.CompletionResult]::new("$($comp.Name)$Description", "$($comp.Name)$Description", 'ParameterValue', "$($comp.Description)")
+ $CompletionText = "$($comp.Name)$Description"
+ if ($ExecutionContext.SessionState.LanguageMode -eq "FullLanguage"){
+ [System.Management.Automation.CompletionResult]::new($CompletionText, "$($comp.Name)$Description", 'ParameterValue', "$($comp.Description)")
+ } else {
+ $CompletionText
+ }
}
}
@@ -264,7 +277,13 @@ filter __%[1]s_escapeStringWithSpecialChars {
# insert space after value
# MenuComplete will automatically show the ToolTip of
# the highlighted value at the bottom of the suggestions.
- [System.Management.Automation.CompletionResult]::new($($comp.Name | __%[1]s_escapeStringWithSpecialChars) + $Space, "$($comp.Name)", 'ParameterValue', "$($comp.Description)")
+
+ $CompletionText = $($comp.Name | __%[1]s_escapeStringWithSpecialChars) + $Space
+ if ($ExecutionContext.SessionState.LanguageMode -eq "FullLanguage"){
+ [System.Management.Automation.CompletionResult]::new($CompletionText, "$($comp.Name)", 'ParameterValue', "$($comp.Description)")
+ } else {
+ $CompletionText
+ }
}
# TabCompleteNext and in case we get something unknown
@@ -272,7 +291,13 @@ filter __%[1]s_escapeStringWithSpecialChars {
# Like MenuComplete but we don't want to add a space here because
# the user need to press space anyway to get the completion.
# Description will not be shown because that's not possible with TabCompleteNext
- [System.Management.Automation.CompletionResult]::new($($comp.Name | __%[1]s_escapeStringWithSpecialChars), "$($comp.Name)", 'ParameterValue', "$($comp.Description)")
+
+ $CompletionText = $($comp.Name | __%[1]s_escapeStringWithSpecialChars)
+ if ($ExecutionContext.SessionState.LanguageMode -eq "FullLanguage"){
+ [System.Management.Automation.CompletionResult]::new($CompletionText, "$($comp.Name)", 'ParameterValue', "$($comp.Description)")
+ } else {
+ $CompletionText
+ }
}
}
diff --git a/vendor/modules.txt b/vendor/modules.txt
index 20f32aff52..929caba3a3 100644
--- a/vendor/modules.txt
+++ b/vendor/modules.txt
@@ -44,9 +44,6 @@ github.com/Microsoft/go-winio/pkg/guid
## explicit; go 1.18
github.com/Nerzal/gocloak/v13
github.com/Nerzal/gocloak/v13/pkg/jwx
-# github.com/OneOfOne/xxhash v1.2.8
-## explicit; go 1.11
-github.com/OneOfOne/xxhash
# github.com/ProtonMail/go-crypto v1.1.5
## explicit; go 1.17
github.com/ProtonMail/go-crypto/bitcurves
@@ -76,7 +73,7 @@ github.com/ProtonMail/go-crypto/openpgp/x448
github.com/RoaringBitmap/roaring
github.com/RoaringBitmap/roaring/internal
github.com/RoaringBitmap/roaring/roaring64
-# github.com/agnivade/levenshtein v1.2.0
+# github.com/agnivade/levenshtein v1.2.1
## explicit; go 1.21
github.com/agnivade/levenshtein
# github.com/ajg/form v1.5.1
@@ -319,8 +316,8 @@ github.com/coreos/go-systemd/v22/journal
# github.com/cornelk/hashmap v1.0.8
## explicit; go 1.19
github.com/cornelk/hashmap
-# github.com/cpuguy83/go-md2man/v2 v2.0.5
-## explicit; go 1.11
+# github.com/cpuguy83/go-md2man/v2 v2.0.6
+## explicit; go 1.12
github.com/cpuguy83/go-md2man/v2/md2man
# github.com/crewjam/httperr v0.2.0
## explicit; go 1.13
@@ -1095,8 +1092,8 @@ github.com/onsi/gomega/matchers/support/goraph/edge
github.com/onsi/gomega/matchers/support/goraph/node
github.com/onsi/gomega/matchers/support/goraph/util
github.com/onsi/gomega/types
-# github.com/open-policy-agent/opa v1.1.0
-## explicit; go 1.22.7
+# github.com/open-policy-agent/opa v1.2.0
+## explicit; go 1.23.6
github.com/open-policy-agent/opa/ast
github.com/open-policy-agent/opa/ast/json
github.com/open-policy-agent/opa/bundle
@@ -1802,7 +1799,7 @@ github.com/spacewander/go-suffix-tree
github.com/spf13/afero
github.com/spf13/afero/internal/common
github.com/spf13/afero/mem
-# github.com/spf13/cobra v1.8.1
+# github.com/spf13/cobra v1.9.1
## explicit; go 1.15
github.com/spf13/cobra
# github.com/spf13/pflag v1.0.6