chore(deps): bump github.com/open-policy-agent/opa from 0.65.0 to 0.67.1

Bumps [github.com/open-policy-agent/opa](https://github.com/open-policy-agent/opa) from 0.65.0 to 0.67.1.
- [Release notes](https://github.com/open-policy-agent/opa/releases)
- [Changelog](https://github.com/open-policy-agent/opa/blob/main/CHANGELOG.md)
- [Commits](https://github.com/open-policy-agent/opa/compare/v0.65.0...v0.67.1)

---
updated-dependencies:
- dependency-name: github.com/open-policy-agent/opa
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
This commit is contained in:
dependabot[bot]
2024-08-12 06:52:35 +00:00
committed by Ralf Haferkamp
parent a381a3a0e8
commit 4d155475fe
80 changed files with 15909 additions and 267 deletions

8
go.mod
View File

@@ -69,7 +69,7 @@ require (
github.com/onsi/ginkgo v1.16.5
github.com/onsi/ginkgo/v2 v2.19.0
github.com/onsi/gomega v1.33.1
github.com/open-policy-agent/opa v0.65.0
github.com/open-policy-agent/opa v0.67.1
github.com/orcaman/concurrent-map v1.0.0
github.com/owncloud/libre-graph-api-go v1.0.5-0.20240618162722-2298241331d1
github.com/pkg/errors v0.9.1
@@ -98,7 +98,7 @@ require (
go.opentelemetry.io/contrib/zpages v0.53.0
go.opentelemetry.io/otel v1.28.0
go.opentelemetry.io/otel/exporters/jaeger v1.17.0
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.27.0
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.28.0
go.opentelemetry.io/otel/sdk v1.28.0
go.opentelemetry.io/otel/trace v1.28.0
golang.org/x/crypto v0.25.0
@@ -335,9 +335,9 @@ require (
go.etcd.io/etcd/client/v3 v3.5.12 // indirect
go.opencensus.io v0.24.0 // indirect
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.48.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.27.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.28.0 // indirect
go.opentelemetry.io/otel/metric v1.28.0 // indirect
go.opentelemetry.io/proto/otlp v1.2.0 // indirect
go.opentelemetry.io/proto/otlp v1.3.1 // indirect
go.uber.org/atomic v1.11.0 // indirect
go.uber.org/multierr v1.9.0 // indirect
go.uber.org/zap v1.23.0 // indirect

16
go.sum
View File

@@ -933,8 +933,8 @@ github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7J
github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo=
github.com/onsi/gomega v1.33.1 h1:dsYjIxxSR755MDmKVsaFQTE22ChNBcuuTWgkUDSubOk=
github.com/onsi/gomega v1.33.1/go.mod h1:U4R44UsT+9eLIaYRB2a5qajjtQYn0hauxvRm16AVYg0=
github.com/open-policy-agent/opa v0.65.0 h1:wnEU0pEk80YjFi3yoDbFTMluyNssgPI4VJNJetD9a4U=
github.com/open-policy-agent/opa v0.65.0/go.mod h1:CNoLL44LuCH1Yot/zoeZXRKFylQtCJV+oGFiP2TeeEc=
github.com/open-policy-agent/opa v0.67.1 h1:rzy26J6g1X+CKknAcx0Vfbt41KqjuSzx4E0A8DAZf3E=
github.com/open-policy-agent/opa v0.67.1/go.mod h1:aqKlHc8E2VAAylYE9x09zJYr/fYzGX+JKne89UGqFzk=
github.com/opencontainers/runtime-spec v1.1.0 h1:HHUyrt9mwHUjtasSbXSMvs4cyFxh+Bll4AjJ9odEGpg=
github.com/opencontainers/runtime-spec v1.1.0/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0=
github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
@@ -1251,18 +1251,18 @@ go.opentelemetry.io/otel v1.28.0 h1:/SqNcYk+idO0CxKEUOtKQClMK/MimZihKYMruSMViUo=
go.opentelemetry.io/otel v1.28.0/go.mod h1:q68ijF8Fc8CnMHKyzqL6akLO46ePnjkgfIMIjUIX9z4=
go.opentelemetry.io/otel/exporters/jaeger v1.17.0 h1:D7UpUy2Xc2wsi1Ras6V40q806WM07rqoCWzXu7Sqy+4=
go.opentelemetry.io/otel/exporters/jaeger v1.17.0/go.mod h1:nPCqOnEH9rNLKqH/+rrUjiMzHJdV1BlpKcTwRTyKkKI=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.27.0 h1:R9DE4kQ4k+YtfLI2ULwX82VtNQ2J8yZmA7ZIF/D+7Mc=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.27.0/go.mod h1:OQFyQVrDlbe+R7xrEyDr/2Wr67Ol0hRUgsfA+V5A95s=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.27.0 h1:qFffATk0X+HD+f1Z8lswGiOQYKHRlzfmdJm0wEaVrFA=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.27.0/go.mod h1:MOiCmryaYtc+V0Ei+Tx9o5S1ZjA7kzLucuVuyzBZloQ=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.28.0 h1:3Q/xZUyC1BBkualc9ROb4G8qkH90LXEIICcs5zv1OYY=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.28.0/go.mod h1:s75jGIWA9OfCMzF0xr+ZgfrB5FEbbV7UuYo32ahUiFI=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.28.0 h1:R3X6ZXmNPRR8ul6i3WgFURCHzaXjHdm0karRG/+dj3s=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.28.0/go.mod h1:QWFXnDavXWwMx2EEcZsf3yxgEKAqsxQ+Syjp+seyInw=
go.opentelemetry.io/otel/metric v1.28.0 h1:f0HGvSl1KRAU1DLgLGFjrwVyismPlnuU6JD6bOeuA5Q=
go.opentelemetry.io/otel/metric v1.28.0/go.mod h1:Fb1eVBFZmLVTMb6PPohq3TO9IIhUisDsbJoL/+uQW4s=
go.opentelemetry.io/otel/sdk v1.28.0 h1:b9d7hIry8yZsgtbmM0DKyPWMMUMlK9NEKuIG4aBqWyE=
go.opentelemetry.io/otel/sdk v1.28.0/go.mod h1:oYj7ClPUA7Iw3m+r7GeEjz0qckQRJK2B8zjcZEfu7Pg=
go.opentelemetry.io/otel/trace v1.28.0 h1:GhQ9cUuQGmNDd5BTCP2dAvv75RdMxEfTmYejp+lkx9g=
go.opentelemetry.io/otel/trace v1.28.0/go.mod h1:jPyXzNPg6da9+38HEwElrQiHlVMTnVfM3/yv2OlIHaI=
go.opentelemetry.io/proto/otlp v1.2.0 h1:pVeZGk7nXDC9O2hncA6nHldxEjm6LByfA2aN8IOkz94=
go.opentelemetry.io/proto/otlp v1.2.0/go.mod h1:gGpR8txAl5M03pDhMC79G6SdqNV26naRm/KDsgaHD8A=
go.opentelemetry.io/proto/otlp v1.3.1 h1:TrMUixzpM0yuc/znrFTP9MMRh8trP93mkCiDVeXrui0=
go.opentelemetry.io/proto/otlp v1.3.1/go.mod h1:0X1WI4de4ZsLrrJNLAQbFeLCm3T7yBkR0XqQ7niQU+8=
go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=

View File

@@ -627,9 +627,8 @@ func (a *AuthorAnnotation) String() string {
return a.Name
} else if len(a.Name) == 0 {
return fmt.Sprintf("<%s>", a.Email)
} else {
return fmt.Sprintf("%s <%s>", a.Name, a.Email)
}
return fmt.Sprintf("%s <%s>", a.Name, a.Email)
}
// Copy returns a deep copy of rr.

View File

@@ -120,6 +120,7 @@ var DefaultBuiltins = [...]*Builtin{
Lower,
Upper,
Contains,
StringCount,
StartsWith,
EndsWith,
Split,
@@ -1109,6 +1110,19 @@ var Contains = &Builtin{
Categories: stringsCat,
}
var StringCount = &Builtin{
Name: "strings.count",
Description: "Returns the number of non-overlapping instances of a substring in a string.",
Decl: types.NewFunction(
types.Args(
types.Named("search", types.S).Description("string to search in"),
types.Named("substring", types.S).Description("substring to look for"),
),
types.Named("output", types.N).Description("count of occurrences, `0` if not found"),
),
Categories: stringsCat,
}
var StartsWith = &Builtin{
Name: "startswith",
Description: "Returns true if the search string begins with the base string.",

View File

@@ -120,34 +120,35 @@ type Compiler struct {
// Capabliities required by the modules that were compiled.
Required *Capabilities
localvargen *localVarGenerator
moduleLoader ModuleLoader
ruleIndices *util.HashMap
stages []stage
maxErrs int
sorted []string // list of sorted module names
pathExists func([]string) (bool, error)
after map[string][]CompilerStageDefinition
metrics metrics.Metrics
capabilities *Capabilities // user-supplied capabilities
imports map[string][]*Import // saved imports from stripping
builtins map[string]*Builtin // universe of built-in functions
customBuiltins map[string]*Builtin // user-supplied custom built-in functions (deprecated: use capabilities)
unsafeBuiltinsMap map[string]struct{} // user-supplied set of unsafe built-ins functions to block (deprecated: use capabilities)
deprecatedBuiltinsMap map[string]struct{} // set of deprecated, but not removed, built-in functions
enablePrintStatements bool // indicates if print statements should be elided (default)
comprehensionIndices map[*Term]*ComprehensionIndex // comprehension key index
initialized bool // indicates if init() has been called
debug debug.Debug // emits debug information produced during compilation
schemaSet *SchemaSet // user-supplied schemas for input and data documents
inputType types.Type // global input type retrieved from schema set
annotationSet *AnnotationSet // hierarchical set of annotations
strict bool // enforce strict compilation checks
keepModules bool // whether to keep the unprocessed, parse modules (below)
parsedModules map[string]*Module // parsed, but otherwise unprocessed modules, kept track of when keepModules is true
useTypeCheckAnnotations bool // whether to provide annotated information (schemas) to the type checker
allowUndefinedFuncCalls bool // don't error on calls to unknown functions.
evalMode CompilerEvalMode
localvargen *localVarGenerator
moduleLoader ModuleLoader
ruleIndices *util.HashMap
stages []stage
maxErrs int
sorted []string // list of sorted module names
pathExists func([]string) (bool, error)
after map[string][]CompilerStageDefinition
metrics metrics.Metrics
capabilities *Capabilities // user-supplied capabilities
imports map[string][]*Import // saved imports from stripping
builtins map[string]*Builtin // universe of built-in functions
customBuiltins map[string]*Builtin // user-supplied custom built-in functions (deprecated: use capabilities)
unsafeBuiltinsMap map[string]struct{} // user-supplied set of unsafe built-ins functions to block (deprecated: use capabilities)
deprecatedBuiltinsMap map[string]struct{} // set of deprecated, but not removed, built-in functions
enablePrintStatements bool // indicates if print statements should be elided (default)
comprehensionIndices map[*Term]*ComprehensionIndex // comprehension key index
initialized bool // indicates if init() has been called
debug debug.Debug // emits debug information produced during compilation
schemaSet *SchemaSet // user-supplied schemas for input and data documents
inputType types.Type // global input type retrieved from schema set
annotationSet *AnnotationSet // hierarchical set of annotations
strict bool // enforce strict compilation checks
keepModules bool // whether to keep the unprocessed, parse modules (below)
parsedModules map[string]*Module // parsed, but otherwise unprocessed modules, kept track of when keepModules is true
useTypeCheckAnnotations bool // whether to provide annotated information (schemas) to the type checker
allowUndefinedFuncCalls bool // don't error on calls to unknown functions.
evalMode CompilerEvalMode //
rewriteTestRulesForTracing bool // rewrite test rules to capture dynamic values for tracing.
}
// CompilerStage defines the interface for stages in the compiler.
@@ -346,6 +347,7 @@ func NewCompiler() *Compiler {
{"CheckSafetyRuleBodies", "compile_stage_check_safety_rule_bodies", c.checkSafetyRuleBodies},
{"RewriteEquals", "compile_stage_rewrite_equals", c.rewriteEquals},
{"RewriteDynamicTerms", "compile_stage_rewrite_dynamic_terms", c.rewriteDynamicTerms},
{"RewriteTestRulesForTracing", "compile_stage_rewrite_test_rules_for_tracing", c.rewriteTestRuleEqualities}, // must run after RewriteDynamicTerms
{"CheckRecursion", "compile_stage_check_recursion", c.checkRecursion},
{"CheckTypes", "compile_stage_check_types", c.checkTypes}, // must be run after CheckRecursion
{"CheckUnsafeBuiltins", "compile_state_check_unsafe_builtins", c.checkUnsafeBuiltins},
@@ -469,6 +471,13 @@ func (c *Compiler) WithEvalMode(e CompilerEvalMode) *Compiler {
return c
}
// WithRewriteTestRules enables rewriting test rules to capture dynamic values in local variables,
// so they can be accessed by tracing.
func (c *Compiler) WithRewriteTestRules(rewrite bool) *Compiler {
c.rewriteTestRulesForTracing = rewrite
return c
}
// ParsedModules returns the parsed, unprocessed modules from the compiler.
// It is `nil` if keeping modules wasn't enabled via `WithKeepModules(true)`.
// The map includes all modules loaded via the ModuleLoader, if one was used.
@@ -2167,6 +2176,43 @@ func (c *Compiler) rewriteDynamicTerms() {
}
}
// rewriteTestRuleEqualities rewrites equality expressions in test rule bodies to create local vars for statements that would otherwise
// not have their values captured through tracing, such as refs and comprehensions not unified/assigned to a local var.
// For example, given the following module:
//
// package test
//
// p.q contains v if {
// some v in numbers.range(1, 3)
// }
//
// p.r := "foo"
//
// test_rule {
// p == {
// "q": {4, 5, 6}
// }
// }
//
// `p` in `test_rule` resolves to `data.test.p`, which won't be an entry in the virtual-cache and must therefore be calculated after-the-fact.
// If `p` isn't captured in a local var, there is no trivial way to retrieve its value for test reporting.
func (c *Compiler) rewriteTestRuleEqualities() {
if !c.rewriteTestRulesForTracing {
return
}
f := newEqualityFactory(c.localvargen)
for _, name := range c.sorted {
mod := c.Modules[name]
WalkRules(mod, func(rule *Rule) bool {
if strings.HasPrefix(string(rule.Head.Name), "test_") {
rule.Body = rewriteTestEqualities(f, rule.Body)
}
return false
})
}
}
func (c *Compiler) parseMetadataBlocks() {
// Only parse annotations if rego.metadata built-ins are called
regoMetadataCalled := false
@@ -4194,7 +4240,7 @@ func resolveRefsInRule(globals map[Var]*usedRef, rule *Rule) error {
// Object keys cannot be pattern matched so only walk values.
case *object:
x.Foreach(func(k, v *Term) {
x.Foreach(func(_, v *Term) {
vis.Walk(v)
})
@@ -4517,6 +4563,41 @@ func rewriteEquals(x interface{}) (modified bool) {
return modified
}
func rewriteTestEqualities(f *equalityFactory, body Body) Body {
result := make(Body, 0, len(body))
for _, expr := range body {
// We can't rewrite negated expressions; if the extracted term is undefined, evaluation would fail before
// reaching the negation check.
if !expr.Negated && !expr.Generated {
switch {
case expr.IsEquality():
terms := expr.Terms.([]*Term)
result, terms[1] = rewriteDynamicsShallow(expr, f, terms[1], result)
result, terms[2] = rewriteDynamicsShallow(expr, f, terms[2], result)
case expr.IsEvery():
// We rewrite equalities inside of every-bodies as a fail here will be the cause of the test-rule fail.
// Failures inside other expressions with closures, such as comprehensions, won't cause the test-rule to fail, so we skip those.
every := expr.Terms.(*Every)
every.Body = rewriteTestEqualities(f, every.Body)
}
}
result = appendExpr(result, expr)
}
return result
}
func rewriteDynamicsShallow(original *Expr, f *equalityFactory, term *Term, result Body) (Body, *Term) {
switch term.Value.(type) {
case Ref, *ArrayComprehension, *SetComprehension, *ObjectComprehension:
generated := f.Generate(term)
generated.With = original.With
result.Append(generated)
connectGeneratedExprs(original, generated)
return result, result[len(result)-1].Operand(0)
}
return result, term
}
// rewriteDynamics will rewrite the body so that dynamic terms (i.e., refs and
// comprehensions) are bound to vars earlier in the query. This translation
// results in eager evaluation.
@@ -4608,6 +4689,7 @@ func rewriteDynamicsOne(original *Expr, f *equalityFactory, term *Term, result B
generated := f.Generate(term)
generated.With = original.With
result.Append(generated)
connectGeneratedExprs(original, generated)
return result, result[len(result)-1].Operand(0)
case *Array:
for i := 0; i < v.Len(); i++ {
@@ -4636,16 +4718,19 @@ func rewriteDynamicsOne(original *Expr, f *equalityFactory, term *Term, result B
var extra *Expr
v.Body, extra = rewriteDynamicsComprehensionBody(original, f, v.Body, term)
result.Append(extra)
connectGeneratedExprs(original, extra)
return result, result[len(result)-1].Operand(0)
case *SetComprehension:
var extra *Expr
v.Body, extra = rewriteDynamicsComprehensionBody(original, f, v.Body, term)
result.Append(extra)
connectGeneratedExprs(original, extra)
return result, result[len(result)-1].Operand(0)
case *ObjectComprehension:
var extra *Expr
v.Body, extra = rewriteDynamicsComprehensionBody(original, f, v.Body, term)
result.Append(extra)
connectGeneratedExprs(original, extra)
return result, result[len(result)-1].Operand(0)
}
return result, term
@@ -4713,6 +4798,7 @@ func expandExpr(gen *localVarGenerator, expr *Expr) (result []*Expr) {
for i := 1; i < len(terms); i++ {
var extras []*Expr
extras, terms[i] = expandExprTerm(gen, terms[i])
connectGeneratedExprs(expr, extras...)
if len(expr.With) > 0 {
for i := range extras {
extras[i].With = expr.With
@@ -4723,16 +4809,14 @@ func expandExpr(gen *localVarGenerator, expr *Expr) (result []*Expr) {
result = append(result, expr)
case *Every:
var extras []*Expr
if _, ok := terms.Domain.Value.(Call); ok {
extras, terms.Domain = expandExprTerm(gen, terms.Domain)
} else {
term := NewTerm(gen.Generate()).SetLocation(terms.Domain.Location)
eq := Equality.Expr(term, terms.Domain).SetLocation(terms.Domain.Location)
eq.Generated = true
eq.With = expr.With
extras = append(extras, eq)
terms.Domain = term
}
term := NewTerm(gen.Generate()).SetLocation(terms.Domain.Location)
eq := Equality.Expr(term, terms.Domain).SetLocation(terms.Domain.Location)
eq.Generated = true
eq.With = expr.With
extras = expandExpr(gen, eq)
terms.Domain = term
terms.Body = rewriteExprTermsInBody(gen, terms.Body)
result = append(result, extras...)
result = append(result, expr)
@@ -4740,6 +4824,13 @@ func expandExpr(gen *localVarGenerator, expr *Expr) (result []*Expr) {
return
}
func connectGeneratedExprs(parent *Expr, children ...*Expr) {
for _, child := range children {
child.generatedFrom = parent
parent.generates = append(parent.generates, child)
}
}
func expandExprTerm(gen *localVarGenerator, term *Term) (support []*Expr, output *Term) {
output = term
switch v := term.Value.(type) {
@@ -5494,26 +5585,34 @@ func validateWith(c *Compiler, unsafeBuiltinsMap map[string]struct{}, expr *Expr
return false, err
}
isAllowedUnknownFuncCall := false
if c.allowUndefinedFuncCalls {
switch target.Value.(type) {
case Ref, Var:
isAllowedUnknownFuncCall = true
}
}
switch {
case isDataRef(target):
ref := target.Value.(Ref)
node := c.RuleTree
targetNode := c.RuleTree
for i := 0; i < len(ref)-1; i++ {
child := node.Child(ref[i].Value)
child := targetNode.Child(ref[i].Value)
if child == nil {
break
} else if len(child.Values) > 0 {
return false, NewError(CompileErr, target.Loc(), "with keyword cannot partially replace virtual document(s)")
}
node = child
targetNode = child
}
if node != nil {
if targetNode != nil {
// NOTE(sr): at this point in the compiler stages, we don't have a fully-populated
// TypeEnv yet -- so we have to make do with this check to see if the replacement
// target is a function. It's probably wrong for arity-0 functions, but those are
// and edge case anyways.
if child := node.Child(ref[len(ref)-1].Value); child != nil {
if child := targetNode.Child(ref[len(ref)-1].Value); child != nil {
for _, v := range child.Values {
if len(v.(*Rule).Head.Args) > 0 {
if ok, err := validateWithFunctionValue(c.builtins, unsafeBuiltinsMap, c.RuleTree, value); err != nil || ok {
@@ -5523,6 +5622,18 @@ func validateWith(c *Compiler, unsafeBuiltinsMap map[string]struct{}, expr *Expr
}
}
}
// If the with-value is a ref to a function, but not a call, we can't rewrite it
if r, ok := value.Value.(Ref); ok {
// TODO: check that target ref doesn't exist?
if valueNode := c.RuleTree.Find(r); valueNode != nil {
for _, v := range valueNode.Values {
if len(v.(*Rule).Head.Args) > 0 {
return false, nil
}
}
}
}
case isInputRef(target): // ok, valid
case isBuiltinRefOrVar:
@@ -5541,6 +5652,9 @@ func validateWith(c *Compiler, unsafeBuiltinsMap map[string]struct{}, expr *Expr
if ok, err := validateWithFunctionValue(c.builtins, unsafeBuiltinsMap, c.RuleTree, value); err != nil || ok {
return false, err // err may be nil
}
case isAllowedUnknownFuncCall:
// The target isn't a ref to the input doc, data doc, or a known built-in, but it might be a ref to an unknown built-in.
return false, nil
default:
return false, NewError(TypeErr, target.Location, "with keyword target must reference existing %v, %v, or a function", InputRootDocument, DefaultRootDocument)
}

View File

@@ -472,7 +472,7 @@ func insertIntoObject(o *types.Object, path Ref, tpe types.Type, env *TypeEnv) (
func (n *typeTreeNode) Leafs() map[*Ref]types.Type {
leafs := map[*Ref]types.Type{}
n.children.Iter(func(k, v util.T) bool {
n.children.Iter(func(_, v util.T) bool {
collectLeafs(v.(*typeTreeNode), nil, leafs)
return false
})
@@ -485,7 +485,7 @@ func collectLeafs(n *typeTreeNode, path Ref, leafs map[*Ref]types.Type) {
leafs[&nPath] = n.Value()
return
}
n.children.Iter(func(k, v util.T) bool {
n.children.Iter(func(_, v util.T) bool {
collectLeafs(v.(*typeTreeNode), nPath, leafs)
return false
})

View File

@@ -164,7 +164,7 @@ func (i *baseDocEqIndex) Lookup(resolver ValueResolver) (*IndexResult, error) {
return result, nil
}
func (i *baseDocEqIndex) AllRules(resolver ValueResolver) (*IndexResult, error) {
func (i *baseDocEqIndex) AllRules(_ ValueResolver) (*IndexResult, error) {
tr := newTrieTraversalResult()
// Walk over the rule trie and accumulate _all_ rules

View File

@@ -26,6 +26,7 @@ type Scanner struct {
width int
errors []Error
keywords map[string]tokens.Token
tabs []int
regoV1Compatible bool
}
@@ -37,10 +38,11 @@ type Error struct {
// Position represents a point in the scanned source code.
type Position struct {
Offset int // start offset in bytes
End int // end offset in bytes
Row int // line number computed in bytes
Col int // column number computed in bytes
Offset int // start offset in bytes
End int // end offset in bytes
Row int // line number computed in bytes
Col int // column number computed in bytes
Tabs []int // positions of any tabs preceding Col
}
// New returns an initialized scanner that will scan
@@ -60,6 +62,7 @@ func New(r io.Reader) (*Scanner, error) {
curr: -1,
width: 0,
keywords: tokens.Keywords(),
tabs: []int{},
}
s.next()
@@ -156,7 +159,7 @@ func (s *Scanner) WithoutKeywords(kws map[string]tokens.Token) (*Scanner, map[st
// for any errors before using the other values.
func (s *Scanner) Scan() (tokens.Token, Position, string, []Error) {
pos := Position{Offset: s.offset - s.width, Row: s.row, Col: s.col}
pos := Position{Offset: s.offset - s.width, Row: s.row, Col: s.col, Tabs: s.tabs}
var tok tokens.Token
var lit string
@@ -410,8 +413,12 @@ func (s *Scanner) next() {
if s.curr == '\n' {
s.row++
s.col = 0
s.tabs = []int{}
} else {
s.col++
if s.curr == '\t' {
s.tabs = append(s.tabs, s.col)
}
}
}

View File

@@ -20,6 +20,8 @@ type Location struct {
// JSONOptions specifies options for marshaling and unmarshalling of locations
JSONOptions astJSON.Options
Tabs []int `json:"-"` // The column offsets of tabs in the source.
}
// NewLocation returns a new Location object.

View File

@@ -943,12 +943,10 @@ func (p *Parser) parseHead(defaultRule bool) (*Head, bool) {
p.illegal("expected rule value term (e.g., %s[%s] = <VALUE> { ... })", name, head.Key)
}
case tokens.Assign:
s := p.save()
p.scan()
head.Assign = true
head.Value = p.parseTermInfixCall()
if head.Value == nil {
p.restore(s)
switch {
case len(head.Args) > 0:
p.illegal("expected function value term (e.g., %s(...) := <VALUE> { ... })", name)
@@ -2132,6 +2130,7 @@ func (p *Parser) doScan(skipws bool) {
p.s.loc.Col = pos.Col
p.s.loc.Offset = pos.Offset
p.s.loc.Text = p.s.Text(pos.Offset, pos.End)
p.s.loc.Tabs = pos.Tabs
for _, err := range errs {
p.error(p.s.Loc(), err.Message)
@@ -2308,6 +2307,11 @@ func (b *metadataParser) Parse() (*Annotations, error) {
b.loc = comment.Location
}
}
if match == nil && len(b.comments) > 0 {
b.loc = b.comments[0].Location
}
return nil, augmentYamlError(err, b.comments)
}
@@ -2375,6 +2379,21 @@ func (b *metadataParser) Parse() (*Annotations, error) {
}
result.Location = b.loc
// recreate original text of entire metadata block for location text attribute
sb := strings.Builder{}
sb.WriteString("# METADATA\n")
lines := bytes.Split(b.buf.Bytes(), []byte{'\n'})
for _, line := range lines[:len(lines)-1] {
sb.WriteString("# ")
sb.Write(line)
sb.WriteByte('\n')
}
result.Location.Text = []byte(strings.TrimSuffix(sb.String(), "\n"))
return &result, nil
}
@@ -2412,10 +2431,11 @@ func augmentYamlError(err error, comments []*Comment) error {
return err
}
func unwrapPair(pair map[string]interface{}) (k string, v interface{}) {
for k, v = range pair {
func unwrapPair(pair map[string]interface{}) (string, interface{}) {
for k, v := range pair {
return k, v
}
return
return "", nil
}
var errInvalidSchemaRef = fmt.Errorf("invalid schema reference")

View File

@@ -100,7 +100,9 @@ var Wildcard = &Term{Value: Var("_")}
var WildcardPrefix = "$"
// Keywords contains strings that map to language keywords.
var Keywords = [...]string{
var Keywords = KeywordsV0
var KeywordsV0 = [...]string{
"not",
"package",
"import",
@@ -114,6 +116,24 @@ var Keywords = [...]string{
"some",
}
var KeywordsV1 = [...]string{
"not",
"package",
"import",
"as",
"default",
"else",
"with",
"null",
"true",
"false",
"some",
"if",
"contains",
"in",
"every",
}
// IsKeyword returns true if s is a language keyword.
func IsKeyword(s string) bool {
for _, x := range Keywords {
@@ -124,6 +144,26 @@ func IsKeyword(s string) bool {
return false
}
// IsKeywordInRegoVersion returns true if s is a language keyword.
func IsKeywordInRegoVersion(s string, regoVersion RegoVersion) bool {
switch regoVersion {
case RegoV0:
for _, x := range KeywordsV0 {
if x == s {
return true
}
}
case RegoV1, RegoV0CompatV1:
for _, x := range KeywordsV1 {
if x == s {
return true
}
}
}
return false
}
type (
// Node represents a node in an AST. Nodes may be statements in a policy module
// or elements of an ad-hoc query, expression, etc.
@@ -233,7 +273,9 @@ type (
Negated bool `json:"negated,omitempty"`
Location *Location `json:"location,omitempty"`
jsonOptions astJSON.Options
jsonOptions astJSON.Options
generatedFrom *Expr
generates []*Expr
}
// SomeDecl represents a variable declaration statement. The symbols are variables.
@@ -1593,6 +1635,46 @@ func NewBuiltinExpr(terms ...*Term) *Expr {
return &Expr{Terms: terms}
}
func (expr *Expr) CogeneratedExprs() []*Expr {
visited := map[*Expr]struct{}{}
visitCogeneratedExprs(expr, func(e *Expr) bool {
if expr.Equal(e) {
return true
}
if _, ok := visited[e]; ok {
return true
}
visited[e] = struct{}{}
return false
})
result := make([]*Expr, 0, len(visited))
for e := range visited {
result = append(result, e)
}
return result
}
func (expr *Expr) BaseCogeneratedExpr() *Expr {
if expr.generatedFrom == nil {
return expr
}
return expr.generatedFrom.BaseCogeneratedExpr()
}
func visitCogeneratedExprs(expr *Expr, f func(*Expr) bool) {
if parent := expr.generatedFrom; parent != nil {
if stop := f(parent); !stop {
visitCogeneratedExprs(parent, f)
}
}
for _, child := range expr.generates {
if stop := f(child); !stop {
visitCogeneratedExprs(child, f)
}
}
}
func (d *SomeDecl) String() string {
if call, ok := d.Symbols[0].Value.(Call); ok {
if len(call) == 4 {

View File

@@ -122,27 +122,65 @@ func checkDeprecatedBuiltinsForCurrentVersion(node interface{}) Errors {
return checkDeprecatedBuiltins(deprecatedBuiltins, node)
}
type RegoCheckOptions struct {
NoDuplicateImports bool
NoRootDocumentOverrides bool
NoDeprecatedBuiltins bool
NoKeywordsAsRuleNames bool
RequireIfKeyword bool
RequireContainsKeyword bool
RequireRuleBodyOrValue bool
}
func NewRegoCheckOptions() RegoCheckOptions {
// all options are enabled by default
return RegoCheckOptions{
NoDuplicateImports: true,
NoRootDocumentOverrides: true,
NoDeprecatedBuiltins: true,
NoKeywordsAsRuleNames: true,
RequireIfKeyword: true,
RequireContainsKeyword: true,
RequireRuleBodyOrValue: true,
}
}
// CheckRegoV1 checks the given module or rule for errors that are specific to Rego v1.
// Passing something other than an *ast.Rule or *ast.Module is considered a programming error, and will cause a panic.
func CheckRegoV1(x interface{}) Errors {
return CheckRegoV1WithOptions(x, NewRegoCheckOptions())
}
func CheckRegoV1WithOptions(x interface{}, opts RegoCheckOptions) Errors {
switch x := x.(type) {
case *Module:
return checkRegoV1Module(x)
return checkRegoV1Module(x, opts)
case *Rule:
return checkRegoV1Rule(x)
return checkRegoV1Rule(x, opts)
}
panic(fmt.Sprintf("cannot check rego-v1 compatibility on type %T", x))
}
func checkRegoV1Module(module *Module) Errors {
func checkRegoV1Module(module *Module, opts RegoCheckOptions) Errors {
var errors Errors
errors = append(errors, checkDuplicateImports([]*Module{module})...)
errors = append(errors, checkRootDocumentOverrides(module)...)
errors = append(errors, checkDeprecatedBuiltinsForCurrentVersion(module)...)
if opts.NoDuplicateImports {
errors = append(errors, checkDuplicateImports([]*Module{module})...)
}
if opts.NoRootDocumentOverrides {
errors = append(errors, checkRootDocumentOverrides(module)...)
}
if opts.NoDeprecatedBuiltins {
errors = append(errors, checkDeprecatedBuiltinsForCurrentVersion(module)...)
}
for _, rule := range module.Rules {
errors = append(errors, checkRegoV1Rule(rule, opts)...)
}
return errors
}
func checkRegoV1Rule(rule *Rule) Errors {
func checkRegoV1Rule(rule *Rule, opts RegoCheckOptions) Errors {
t := "rule"
if rule.isFunction() {
t = "function"
@@ -150,13 +188,16 @@ func checkRegoV1Rule(rule *Rule) Errors {
var errs Errors
if rule.generatedBody && rule.Head.generatedValue {
if opts.NoKeywordsAsRuleNames && IsKeywordInRegoVersion(rule.Head.Name.String(), RegoV1) {
errs = append(errs, NewError(ParseErr, rule.Location, fmt.Sprintf("%s keyword cannot be used for rule name", rule.Head.Name.String())))
}
if opts.RequireRuleBodyOrValue && rule.generatedBody && rule.Head.generatedValue {
errs = append(errs, NewError(ParseErr, rule.Location, "%s must have value assignment and/or body declaration", t))
}
if rule.Body != nil && !rule.generatedBody && !ruleDeclarationHasKeyword(rule, tokens.If) && !rule.Default {
if opts.RequireIfKeyword && rule.Body != nil && !rule.generatedBody && !ruleDeclarationHasKeyword(rule, tokens.If) && !rule.Default {
errs = append(errs, NewError(ParseErr, rule.Location, "`if` keyword is required before %s body", t))
}
if rule.Head.RuleKind() == MultiValue && !ruleDeclarationHasKeyword(rule, tokens.Contains) {
if opts.RequireContainsKeyword && rule.Head.RuleKind() == MultiValue && !ruleDeclarationHasKeyword(rule, tokens.Contains) {
errs = append(errs, NewError(ParseErr, rule.Location, "`contains` keyword is required for partial set rules"))
}

View File

@@ -2633,7 +2633,7 @@ func filterObject(o Value, filter Value) (Value, error) {
other = v
}
err := iterObj.Iter(func(key *Term, value *Term) error {
err := iterObj.Iter(func(key *Term, _ *Term) error {
if other.Get(key) != nil {
filteredValue, err := filterObject(v.Get(key).Value, filteredObj.Get(key).Value)
if err != nil {
@@ -3091,12 +3091,12 @@ func unmarshalTermSlice(s []interface{}) ([]*Term, error) {
buf := []*Term{}
for _, x := range s {
if m, ok := x.(map[string]interface{}); ok {
if t, err := unmarshalTerm(m); err == nil {
t, err := unmarshalTerm(m)
if err == nil {
buf = append(buf, t)
continue
} else {
return nil, err
}
return nil, err
}
return nil, fmt.Errorf("ast: unable to unmarshal term")
}

View File

@@ -1120,6 +1120,13 @@
"PreRelease": "",
"Metadata": ""
},
"strings.count": {
"Major": 0,
"Minor": 67,
"Patch": 0,
"PreRelease": "",
"Metadata": ""
},
"strings.render_template": {
"Major": 0,
"Minor": 59,

View File

@@ -352,12 +352,12 @@ func (vis *GenericVisitor) Walk(x interface{}) {
vis.Walk(x[i])
}
case *object:
x.Foreach(func(k, v *Term) {
x.Foreach(func(k, _ *Term) {
vis.Walk(k)
vis.Walk(x.Get(k))
})
case Object:
x.Foreach(func(k, v *Term) {
x.Foreach(func(k, _ *Term) {
vis.Walk(k)
vis.Walk(x.Get(k))
})
@@ -492,12 +492,12 @@ func (vis *BeforeAfterVisitor) Walk(x interface{}) {
vis.Walk(x[i])
}
case *object:
x.Foreach(func(k, v *Term) {
x.Foreach(func(k, _ *Term) {
vis.Walk(k)
vis.Walk(x.Get(k))
})
case Object:
x.Foreach(func(k, v *Term) {
x.Foreach(func(k, _ *Term) {
vis.Walk(k)
vis.Walk(x.Get(k))
})
@@ -579,7 +579,7 @@ func (vis *VarVisitor) Vars() VarSet {
func (vis *VarVisitor) visit(v interface{}) bool {
if vis.params.SkipObjectKeys {
if o, ok := v.(Object); ok {
o.Foreach(func(k, v *Term) {
o.Foreach(func(_, v *Term) {
vis.Walk(v)
})
return true
@@ -741,7 +741,7 @@ func (vis *VarVisitor) Walk(x interface{}) {
vis.Walk(x[i])
}
case *object:
x.Foreach(func(k, v *Term) {
x.Foreach(func(k, _ *Term) {
vis.Walk(k)
vis.Walk(x.Get(k))
})

View File

@@ -15,6 +15,7 @@ import (
"fmt"
"io"
"net/url"
"os"
"path"
"path/filepath"
"reflect"
@@ -449,6 +450,7 @@ type Reader struct {
name string
persist bool
regoVersion ast.RegoVersion
followSymlinks bool
}
// NewReader is deprecated. Use NewCustomReader instead.
@@ -537,6 +539,11 @@ func (r *Reader) WithBundleName(name string) *Reader {
return r
}
func (r *Reader) WithFollowSymlinks(yes bool) *Reader {
r.followSymlinks = yes
return r
}
// WithLazyLoadingMode sets the bundle loading mode. If true,
// bundles will be read in lazy mode. In this mode, data files in the bundle will not be
// deserialized and the check to validate that the bundle data does not contain paths
@@ -1190,7 +1197,8 @@ func (b *Bundle) SetRegoVersion(v ast.RegoVersion) {
// If there is no defined version for the given path, the default version def is returned.
// If the version does not correspond to ast.RegoV0 or ast.RegoV1, an error is returned.
func (b *Bundle) RegoVersionForFile(path string, def ast.RegoVersion) (ast.RegoVersion, error) {
if version, err := b.Manifest.numericRegoVersionForFile(path); err != nil {
version, err := b.Manifest.numericRegoVersionForFile(path)
if err != nil {
return def, err
} else if version == nil {
return def, nil
@@ -1198,9 +1206,8 @@ func (b *Bundle) RegoVersionForFile(path string, def ast.RegoVersion) (ast.RegoV
return ast.RegoV0, nil
} else if *version == 1 {
return ast.RegoV1, nil
} else {
return def, fmt.Errorf("unknown bundle rego-version %d for file '%s'", *version, path)
}
return def, fmt.Errorf("unknown bundle rego-version %d for file '%s'", *version, path)
}
func (m *Manifest) numericRegoVersionForFile(path string) (*int, error) {
@@ -1667,6 +1674,7 @@ func preProcessBundle(loader DirectoryLoader, skipVerify bool, sizeLimitBytes in
}
func readFile(f *Descriptor, sizeLimitBytes int64) (bytes.Buffer, error) {
// Case for pre-loaded byte buffers, like those from the tarballLoader.
if bb, ok := f.reader.(*bytes.Buffer); ok {
_ = f.Close() // always close, even on error
@@ -1678,6 +1686,37 @@ func readFile(f *Descriptor, sizeLimitBytes int64) (bytes.Buffer, error) {
return *bb, nil
}
// Case for *lazyFile readers:
if lf, ok := f.reader.(*lazyFile); ok {
var buf bytes.Buffer
if lf.file == nil {
var err error
if lf.file, err = os.Open(lf.path); err != nil {
return buf, fmt.Errorf("failed to open file %s: %w", f.path, err)
}
}
// Bail out if we can't read the whole file-- there's nothing useful we can do at that point!
fileSize, _ := fstatFileSize(lf.file)
if fileSize > sizeLimitBytes {
return buf, fmt.Errorf(maxSizeLimitBytesErrMsg, strings.TrimPrefix(f.Path(), "/"), fileSize, sizeLimitBytes-1)
}
// Prealloc the buffer for the file read.
buffer := make([]byte, fileSize)
_, err := io.ReadFull(lf.file, buffer)
if err != nil {
return buf, err
}
_ = lf.file.Close() // always close, even on error
// Note(philipc): Replace the lazyFile reader in the *Descriptor with a
// pointer to the wrapping bytes.Buffer, so that we don't re-read the
// file on disk again by accident.
buf = *bytes.NewBuffer(buffer)
f.reader = &buf
return buf, nil
}
// Fallback case:
var buf bytes.Buffer
n, err := f.Read(&buf, sizeLimitBytes)
_ = f.Close() // always close, even on error
@@ -1691,6 +1730,17 @@ func readFile(f *Descriptor, sizeLimitBytes int64) (bytes.Buffer, error) {
return buf, nil
}
// Takes an already open file handle and invokes the os.Stat system call on it
// to determine the file's size. Passes any errors from *File.Stat on up to the
// caller.
func fstatFileSize(f *os.File) (int64, error) {
fileInfo, err := f.Stat()
if err != nil {
return 0, err
}
return fileInfo.Size(), nil
}
func normalizePath(p string) string {
return filepath.ToSlash(p)
}

View File

@@ -6,6 +6,7 @@ import (
"compress/gzip"
"fmt"
"io"
"io/fs"
"os"
"path/filepath"
"sort"
@@ -126,6 +127,7 @@ type DirectoryLoader interface {
WithFilter(filter filter.LoaderFilter) DirectoryLoader
WithPathFormat(PathFormat) DirectoryLoader
WithSizeLimitBytes(sizeLimitBytes int64) DirectoryLoader
WithFollowSymlinks(followSymlinks bool) DirectoryLoader
}
type dirLoader struct {
@@ -135,6 +137,7 @@ type dirLoader struct {
filter filter.LoaderFilter
pathFormat PathFormat
maxSizeLimitBytes int64
followSymlinks bool
}
// Normalize root directory, ex "./src/bundle" -> "src/bundle"
@@ -181,6 +184,12 @@ func (d *dirLoader) WithSizeLimitBytes(sizeLimitBytes int64) DirectoryLoader {
return d
}
// WithFollowSymlinks specifies whether to follow symlinks when loading files from the directory
func (d *dirLoader) WithFollowSymlinks(followSymlinks bool) DirectoryLoader {
d.followSymlinks = followSymlinks
return d
}
func formatPath(fileName string, root string, pathFormat PathFormat) string {
switch pathFormat {
case SlashRooted:
@@ -211,8 +220,12 @@ func (d *dirLoader) NextFile() (*Descriptor, error) {
// build a list of all files we will iterate over and read, but only one time
if d.files == nil {
d.files = []string{}
err := filepath.Walk(d.root, func(path string, info os.FileInfo, err error) error {
if info != nil && info.Mode().IsRegular() {
err := filepath.Walk(d.root, func(path string, info os.FileInfo, _ error) error {
if info == nil {
return nil
}
if info.Mode().IsRegular() {
if d.filter != nil && d.filter(filepath.ToSlash(path), info, getdepth(path, false)) {
return nil
}
@@ -220,7 +233,15 @@ func (d *dirLoader) NextFile() (*Descriptor, error) {
return fmt.Errorf(maxSizeLimitBytesErrMsg, strings.TrimPrefix(path, "/"), info.Size(), d.maxSizeLimitBytes)
}
d.files = append(d.files, path)
} else if info != nil && info.Mode().IsDir() {
} else if d.followSymlinks && info.Mode().Type()&fs.ModeSymlink == fs.ModeSymlink {
if d.filter != nil && d.filter(filepath.ToSlash(path), info, getdepth(path, false)) {
return nil
}
if d.maxSizeLimitBytes > 0 && info.Size() > d.maxSizeLimitBytes {
return fmt.Errorf(maxSizeLimitBytesErrMsg, strings.TrimPrefix(path, "/"), info.Size(), d.maxSizeLimitBytes)
}
d.files = append(d.files, path)
} else if info.Mode().IsDir() {
if d.filter != nil && d.filter(filepath.ToSlash(path), info, getdepth(path, true)) {
return filepath.SkipDir
}
@@ -305,6 +326,11 @@ func (t *tarballLoader) WithSizeLimitBytes(sizeLimitBytes int64) DirectoryLoader
return t
}
// WithFollowSymlinks is a no-op for tarballLoader
func (t *tarballLoader) WithFollowSymlinks(_ bool) DirectoryLoader {
return t
}
// NextFile iterates to the next file in the directory tree
// and returns a file Descriptor for the file.
func (t *tarballLoader) NextFile() (*Descriptor, error) {
@@ -370,12 +396,13 @@ func (t *tarballLoader) NextFile() (*Descriptor, error) {
f := file{name: header.Name}
var buf bytes.Buffer
if _, err := io.Copy(&buf, t.tr); err != nil {
// Note(philipc): We rely on the previous size check in this loop for safety.
buf := bytes.NewBuffer(make([]byte, 0, header.Size))
if _, err := io.Copy(buf, t.tr); err != nil {
return nil, fmt.Errorf("failed to copy file %s: %w", header.Name, err)
}
f.reader = &buf
f.reader = buf
t.files = append(t.files, f)
} else if header.Typeflag == tar.TypeDir {

View File

@@ -26,6 +26,7 @@ type dirLoaderFS struct {
root string
pathFormat PathFormat
maxSizeLimitBytes int64
followSymlinks bool
}
// NewFSLoader returns a basic DirectoryLoader implementation
@@ -66,6 +67,16 @@ func (d *dirLoaderFS) walkDir(path string, dirEntry fs.DirEntry, err error) erro
return fmt.Errorf("file %s size %d exceeds limit of %d", path, info.Size(), d.maxSizeLimitBytes)
}
d.files = append(d.files, path)
} else if dirEntry.Type()&fs.ModeSymlink != 0 && d.followSymlinks {
if d.filter != nil && d.filter(filepath.ToSlash(path), info, getdepth(path, false)) {
return nil
}
if d.maxSizeLimitBytes > 0 && info.Size() > d.maxSizeLimitBytes {
return fmt.Errorf("file %s size %d exceeds limit of %d", path, info.Size(), d.maxSizeLimitBytes)
}
d.files = append(d.files, path)
} else if dirEntry.Type().IsDir() {
if d.filter != nil && d.filter(filepath.ToSlash(path), info, getdepth(path, true)) {
@@ -94,6 +105,11 @@ func (d *dirLoaderFS) WithSizeLimitBytes(sizeLimitBytes int64) DirectoryLoader {
return d
}
func (d *dirLoaderFS) WithFollowSymlinks(followSymlinks bool) DirectoryLoader {
d.followSymlinks = followSymlinks
return d
}
// NextFile iterates to the next file in the directory tree
// and returns a file Descriptor for the file.
func (d *dirLoaderFS) NextFile() (*Descriptor, error) {

View File

File diff suppressed because it is too large Load Diff

View File

File diff suppressed because it is too large Load Diff

View File

File diff suppressed because it is too large Load Diff

View File

@@ -39,6 +39,7 @@ type Config struct {
DistributedTracing json.RawMessage `json:"distributed_tracing,omitempty"`
Server *struct {
Encoding json.RawMessage `json:"encoding,omitempty"`
Decoding json.RawMessage `json:"decoding,omitempty"`
Metrics json.RawMessage `json:"metrics,omitempty"`
} `json:"server,omitempty"`
Storage *struct {

View File

@@ -56,7 +56,12 @@ func SourceWithOpts(filename string, src []byte, opts Opts) ([]byte, error) {
}
if opts.RegoVersion == ast.RegoV0CompatV1 || opts.RegoVersion == ast.RegoV1 {
errors := ast.CheckRegoV1(module)
checkOpts := ast.NewRegoCheckOptions()
// The module is parsed as v0, so we need to disable checks that will be automatically amended by the AstWithOpts call anyways.
checkOpts.RequireIfKeyword = false
checkOpts.RequireContainsKeyword = false
checkOpts.RequireRuleBodyOrValue = false
errors := ast.CheckRegoV1WithOptions(module, checkOpts)
if len(errors) > 0 {
return nil, errors
}
@@ -1340,7 +1345,10 @@ func closingLoc(skipOpen, skipClose, open, close byte, loc *ast.Location) *ast.L
i, offset = skipPast(skipOpen, skipClose, loc)
}
for ; i < len(loc.Text) && loc.Text[i] != open; i++ {
for ; i < len(loc.Text); i++ {
if loc.Text[i] == open {
break
}
}
if i >= len(loc.Text) {
@@ -1369,7 +1377,10 @@ func closingLoc(skipOpen, skipClose, open, close byte, loc *ast.Location) *ast.L
func skipPast(open, close byte, loc *ast.Location) (int, int) {
i := 0
for ; i < len(loc.Text) && loc.Text[i] != open; i++ {
for ; i < len(loc.Text); i++ {
if loc.Text[i] == open {
break
}
}
state := 1

View File

@@ -95,7 +95,8 @@ func LoadBundleFromDisk(path, name string, bvc *bundle.VerificationConfig) (*bun
func LoadBundleFromDiskForRegoVersion(regoVersion ast.RegoVersion, path, name string, bvc *bundle.VerificationConfig) (*bundle.Bundle, error) {
bundlePath := filepath.Join(path, name, "bundle.tar.gz")
if _, err := os.Stat(bundlePath); err == nil {
_, err := os.Stat(bundlePath)
if err == nil {
f, err := os.Open(filepath.Join(bundlePath))
if err != nil {
return nil, err
@@ -116,9 +117,9 @@ func LoadBundleFromDiskForRegoVersion(regoVersion ast.RegoVersion, path, name st
return &b, nil
} else if os.IsNotExist(err) {
return nil, nil
} else {
return nil, err
}
return nil, err
}
// SaveBundleToDisk saves the given raw bytes representing the bundle's content to disk

View File

@@ -1000,6 +1000,7 @@ opa_glob_match,std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\2
opa_glob_match,std::__1::__hash_iterator<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>*>\20std::__1::__hash_table<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::__unordered_map_hasher<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<cache_key>\2c\20std::__1::equal_to<cache_key>\2c\20true>\2c\20std::__1::__unordered_map_equal<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<cache_key>\2c\20std::__1::hash<cache_key>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\20>::find<cache_key>\28cache_key\20const&\29
opa_glob_match,std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>::operator=\28std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20const&\29
opa_glob_match,glob_translate\28char\20const*\2c\20unsigned\20long\2c\20std::__1::vector<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::allocator<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\20const&\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>*\29
opa_glob_match,std::__1::unordered_map<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::hash<cache_key>\2c\20std::__1::equal_to<cache_key>\2c\20std::__1::allocator<std::__1::pair<cache_key\20const\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\20>::erase\28std::__1::__hash_map_iterator<std::__1::__hash_iterator<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>*>\20>\29
opa_glob_match,std::__1::pair<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>::pair<cache_key&\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>&\2c\20false>\28cache_key&\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>&\29
opa_glob_match,std::__1::pair<std::__1::__hash_iterator<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>*>\2c\20bool>\20std::__1::__hash_table<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::__unordered_map_hasher<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<cache_key>\2c\20std::__1::equal_to<cache_key>\2c\20true>\2c\20std::__1::__unordered_map_equal<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<cache_key>\2c\20std::__1::hash<cache_key>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\20>::__emplace_unique_key_args<cache_key\2c\20std::__1::pair<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\28cache_key\20const&\2c\20std::__1::pair<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>&&\29
opa_glob_match,opa_string
@@ -1010,6 +1011,8 @@ void\20std::__1::vector<std::__1::basic_string<char\2c\20std::__1::char_traits<c
void\20std::__1::vector<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::allocator<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>::__push_back_slow_path<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\28std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>&&\29,abort
std::__1::__hash_iterator<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>*>\20std::__1::__hash_table<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::__unordered_map_hasher<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<cache_key>\2c\20std::__1::equal_to<cache_key>\2c\20true>\2c\20std::__1::__unordered_map_equal<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<cache_key>\2c\20std::__1::hash<cache_key>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\20>::find<cache_key>\28cache_key\20const&\29,std::__1::__unordered_map_hasher<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<cache_key>\2c\20std::__1::equal_to<cache_key>\2c\20true>::operator\28\29\28cache_key\20const&\29\20const
std::__1::__hash_iterator<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>*>\20std::__1::__hash_table<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::__unordered_map_hasher<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<cache_key>\2c\20std::__1::equal_to<cache_key>\2c\20true>\2c\20std::__1::__unordered_map_equal<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<cache_key>\2c\20std::__1::hash<cache_key>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\20>::find<cache_key>\28cache_key\20const&\29,std::__1::equal_to<cache_key>::operator\28\29\28cache_key\20const&\2c\20cache_key\20const&\29\20const
std::__1::unordered_map<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::hash<cache_key>\2c\20std::__1::equal_to<cache_key>\2c\20std::__1::allocator<std::__1::pair<cache_key\20const\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\20>::erase\28std::__1::__hash_map_iterator<std::__1::__hash_iterator<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>*>\20>\29,std::__1::__hash_table<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::__unordered_map_hasher<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<cache_key>\2c\20std::__1::equal_to<cache_key>\2c\20true>\2c\20std::__1::__unordered_map_equal<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<cache_key>\2c\20std::__1::hash<cache_key>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\20>::remove\28std::__1::__hash_const_iterator<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>*>\29
std::__1::unordered_map<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::hash<cache_key>\2c\20std::__1::equal_to<cache_key>\2c\20std::__1::allocator<std::__1::pair<cache_key\20const\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\20>::erase\28std::__1::__hash_map_iterator<std::__1::__hash_iterator<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>*>\20>\29,std::__1::unique_ptr<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>\2c\20std::__1::__hash_node_destructor<std::__1::allocator<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>\20>\20>\20>::~unique_ptr\28\29
std::__1::pair<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>::pair<cache_key&\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>&\2c\20false>\28cache_key&\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>&\29,std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>::basic_string\28std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20const&\29
std::__1::pair<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>::pair<cache_key&\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>&\2c\20false>\28cache_key&\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>&\29,operator\20new\28unsigned\20long\29
std::__1::pair<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>::pair<cache_key&\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>&\2c\20false>\28cache_key&\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>&\29,abort
@@ -1018,6 +1021,8 @@ std::__1::pair<std::__1::__hash_iterator<std::__1::__hash_node<std::__1::__hash_
std::__1::pair<std::__1::__hash_iterator<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>*>\2c\20bool>\20std::__1::__hash_table<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::__unordered_map_hasher<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<cache_key>\2c\20std::__1::equal_to<cache_key>\2c\20true>\2c\20std::__1::__unordered_map_equal<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<cache_key>\2c\20std::__1::hash<cache_key>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\20>::__emplace_unique_key_args<cache_key\2c\20std::__1::pair<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\28cache_key\20const&\2c\20std::__1::pair<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>&&\29,operator\20new\28unsigned\20long\29
std::__1::pair<std::__1::__hash_iterator<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>*>\2c\20bool>\20std::__1::__hash_table<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::__unordered_map_hasher<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<cache_key>\2c\20std::__1::equal_to<cache_key>\2c\20true>\2c\20std::__1::__unordered_map_equal<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<cache_key>\2c\20std::__1::hash<cache_key>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\20>::__emplace_unique_key_args<cache_key\2c\20std::__1::pair<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\28cache_key\20const&\2c\20std::__1::pair<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>&&\29,std::__1::__next_prime\28unsigned\20long\29
std::__1::pair<std::__1::__hash_iterator<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>*>\2c\20bool>\20std::__1::__hash_table<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::__unordered_map_hasher<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<cache_key>\2c\20std::__1::equal_to<cache_key>\2c\20true>\2c\20std::__1::__unordered_map_equal<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<cache_key>\2c\20std::__1::hash<cache_key>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\20>::__emplace_unique_key_args<cache_key\2c\20std::__1::pair<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\28cache_key\20const&\2c\20std::__1::pair<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>&&\29,std::__1::__hash_table<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::__unordered_map_hasher<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<cache_key>\2c\20std::__1::equal_to<cache_key>\2c\20true>\2c\20std::__1::__unordered_map_equal<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<cache_key>\2c\20std::__1::hash<cache_key>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\20>::__rehash\28unsigned\20long\29
std::__1::pair<std::__1::__hash_iterator<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>*>\2c\20bool>\20std::__1::__hash_table<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::__unordered_map_hasher<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<cache_key>\2c\20std::__1::equal_to<cache_key>\2c\20true>\2c\20std::__1::__unordered_map_equal<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<cache_key>\2c\20std::__1::hash<cache_key>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\20>::__emplace_unique_key_args<cache_key\2c\20std::__1::pair<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\28cache_key\20const&\2c\20std::__1::pair<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>&&\29,std::__1::unique_ptr<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>\2c\20std::__1::__hash_node_destructor<std::__1::allocator<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>\20>\20>\20>::~unique_ptr\28\29
std::__1::unique_ptr<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>\2c\20std::__1::__hash_node_destructor<std::__1::allocator<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>\20>\20>\20>::~unique_ptr\28\29,operator\20delete\28void*\29
std::__1::equal_to<cache_key>::operator\28\29\28cache_key\20const&\2c\20cache_key\20const&\29\20const,memcmp
std::__1::__hash_table<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::__unordered_map_hasher<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<cache_key>\2c\20std::__1::equal_to<cache_key>\2c\20true>\2c\20std::__1::__unordered_map_equal<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<cache_key>\2c\20std::__1::hash<cache_key>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\20>::__rehash\28unsigned\20long\29,operator\20new\28unsigned\20long\29
std::__1::__hash_table<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::__unordered_map_hasher<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<cache_key>\2c\20std::__1::equal_to<cache_key>\2c\20true>\2c\20std::__1::__unordered_map_equal<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<cache_key>\2c\20std::__1::hash<cache_key>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\20>::__rehash\28unsigned\20long\29,operator\20delete\28void*\29
@@ -1053,9 +1058,11 @@ compile\28char\20const*\29,abort
reuse\28re2::RE2*\29,opa_builtin_cache_get
reuse\28re2::RE2*\29,operator\20new\28unsigned\20long\29
reuse\28re2::RE2*\29,opa_builtin_cache_set
reuse\28re2::RE2*\29,re2::RE2::~RE2\28\29
reuse\28re2::RE2*\29,operator\20delete\28void*\29
reuse\28re2::RE2*\29,std::__1::__hash_table<std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20std::__1::__unordered_map_hasher<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20std::__1::hash<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20true>\2c\20std::__1::__unordered_map_equal<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20std::__1::equal_to<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\20>\20>::remove\28std::__1::__hash_const_iterator<std::__1::__hash_node<std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20void*>*>\29
reuse\28re2::RE2*\29,std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>::basic_string\28std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20const&\29
reuse\28re2::RE2*\29,std::__1::pair<std::__1::__hash_iterator<std::__1::__hash_node<std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20void*>*>\2c\20bool>\20std::__1::__hash_table<std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20std::__1::__unordered_map_hasher<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20std::__1::hash<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20true>\2c\20std::__1::__unordered_map_equal<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20std::__1::equal_to<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\20>\20>::__emplace_unique_key_args<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::pair<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\20>\28std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20const&\2c\20std::__1::pair<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>&&\29
reuse\28re2::RE2*\29,operator\20delete\28void*\29
std::__1::__hash_iterator<std::__1::__hash_node<std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20void*>*>\20std::__1::__hash_table<std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20std::__1::__unordered_map_hasher<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20std::__1::hash<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20true>\2c\20std::__1::__unordered_map_equal<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20std::__1::equal_to<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\20>\20>::find<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\28std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20const&\29,memcmp
std::__1::pair<std::__1::__hash_iterator<std::__1::__hash_node<std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20void*>*>\2c\20bool>\20std::__1::__hash_table<std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20std::__1::__unordered_map_hasher<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20std::__1::hash<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20true>\2c\20std::__1::__unordered_map_equal<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20std::__1::equal_to<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\20>\20>::__emplace_unique_key_args<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::pair<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\20>\28std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20const&\2c\20std::__1::pair<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>&&\29,memcmp
std::__1::pair<std::__1::__hash_iterator<std::__1::__hash_node<std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20void*>*>\2c\20bool>\20std::__1::__hash_table<std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20std::__1::__unordered_map_hasher<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20std::__1::hash<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20true>\2c\20std::__1::__unordered_map_equal<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20std::__1::equal_to<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\20>\20>::__emplace_unique_key_args<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::pair<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\20>\28std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20const&\2c\20std::__1::pair<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>&&\29,operator\20new\28unsigned\20long\29
1 opa_agg_count opa_value_type
1000 opa_glob_match std::__1::__hash_iterator<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>*>\20std::__1::__hash_table<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::__unordered_map_hasher<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<cache_key>\2c\20std::__1::equal_to<cache_key>\2c\20true>\2c\20std::__1::__unordered_map_equal<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<cache_key>\2c\20std::__1::hash<cache_key>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\20>::find<cache_key>\28cache_key\20const&\29
1001 opa_glob_match std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>::operator=\28std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20const&\29
1002 opa_glob_match glob_translate\28char\20const*\2c\20unsigned\20long\2c\20std::__1::vector<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::allocator<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\20const&\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>*\29
1003 opa_glob_match std::__1::unordered_map<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::hash<cache_key>\2c\20std::__1::equal_to<cache_key>\2c\20std::__1::allocator<std::__1::pair<cache_key\20const\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\20>::erase\28std::__1::__hash_map_iterator<std::__1::__hash_iterator<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>*>\20>\29
1004 opa_glob_match std::__1::pair<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>::pair<cache_key&\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>&\2c\20false>\28cache_key&\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>&\29
1005 opa_glob_match std::__1::pair<std::__1::__hash_iterator<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>*>\2c\20bool>\20std::__1::__hash_table<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::__unordered_map_hasher<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<cache_key>\2c\20std::__1::equal_to<cache_key>\2c\20true>\2c\20std::__1::__unordered_map_equal<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<cache_key>\2c\20std::__1::hash<cache_key>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\20>::__emplace_unique_key_args<cache_key\2c\20std::__1::pair<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\28cache_key\20const&\2c\20std::__1::pair<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>&&\29
1006 opa_glob_match opa_string
1011 void\20std::__1::vector<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::allocator<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>::__push_back_slow_path<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\28std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>&&\29 abort
1012 std::__1::__hash_iterator<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>*>\20std::__1::__hash_table<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::__unordered_map_hasher<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<cache_key>\2c\20std::__1::equal_to<cache_key>\2c\20true>\2c\20std::__1::__unordered_map_equal<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<cache_key>\2c\20std::__1::hash<cache_key>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\20>::find<cache_key>\28cache_key\20const&\29 std::__1::__unordered_map_hasher<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<cache_key>\2c\20std::__1::equal_to<cache_key>\2c\20true>::operator\28\29\28cache_key\20const&\29\20const
1013 std::__1::__hash_iterator<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>*>\20std::__1::__hash_table<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::__unordered_map_hasher<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<cache_key>\2c\20std::__1::equal_to<cache_key>\2c\20true>\2c\20std::__1::__unordered_map_equal<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<cache_key>\2c\20std::__1::hash<cache_key>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\20>::find<cache_key>\28cache_key\20const&\29 std::__1::equal_to<cache_key>::operator\28\29\28cache_key\20const&\2c\20cache_key\20const&\29\20const
1014 std::__1::unordered_map<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::hash<cache_key>\2c\20std::__1::equal_to<cache_key>\2c\20std::__1::allocator<std::__1::pair<cache_key\20const\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\20>::erase\28std::__1::__hash_map_iterator<std::__1::__hash_iterator<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>*>\20>\29 std::__1::__hash_table<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::__unordered_map_hasher<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<cache_key>\2c\20std::__1::equal_to<cache_key>\2c\20true>\2c\20std::__1::__unordered_map_equal<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<cache_key>\2c\20std::__1::hash<cache_key>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\20>::remove\28std::__1::__hash_const_iterator<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>*>\29
1015 std::__1::unordered_map<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::hash<cache_key>\2c\20std::__1::equal_to<cache_key>\2c\20std::__1::allocator<std::__1::pair<cache_key\20const\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\20>::erase\28std::__1::__hash_map_iterator<std::__1::__hash_iterator<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>*>\20>\29 std::__1::unique_ptr<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>\2c\20std::__1::__hash_node_destructor<std::__1::allocator<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>\20>\20>\20>::~unique_ptr\28\29
1016 std::__1::pair<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>::pair<cache_key&\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>&\2c\20false>\28cache_key&\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>&\29 std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>::basic_string\28std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20const&\29
1017 std::__1::pair<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>::pair<cache_key&\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>&\2c\20false>\28cache_key&\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>&\29 operator\20new\28unsigned\20long\29
1018 std::__1::pair<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>::pair<cache_key&\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>&\2c\20false>\28cache_key&\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>&\29 abort
1021 std::__1::pair<std::__1::__hash_iterator<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>*>\2c\20bool>\20std::__1::__hash_table<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::__unordered_map_hasher<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<cache_key>\2c\20std::__1::equal_to<cache_key>\2c\20true>\2c\20std::__1::__unordered_map_equal<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<cache_key>\2c\20std::__1::hash<cache_key>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\20>::__emplace_unique_key_args<cache_key\2c\20std::__1::pair<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\28cache_key\20const&\2c\20std::__1::pair<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>&&\29 operator\20new\28unsigned\20long\29
1022 std::__1::pair<std::__1::__hash_iterator<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>*>\2c\20bool>\20std::__1::__hash_table<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::__unordered_map_hasher<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<cache_key>\2c\20std::__1::equal_to<cache_key>\2c\20true>\2c\20std::__1::__unordered_map_equal<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<cache_key>\2c\20std::__1::hash<cache_key>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\20>::__emplace_unique_key_args<cache_key\2c\20std::__1::pair<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\28cache_key\20const&\2c\20std::__1::pair<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>&&\29 std::__1::__next_prime\28unsigned\20long\29
1023 std::__1::pair<std::__1::__hash_iterator<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>*>\2c\20bool>\20std::__1::__hash_table<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::__unordered_map_hasher<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<cache_key>\2c\20std::__1::equal_to<cache_key>\2c\20true>\2c\20std::__1::__unordered_map_equal<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<cache_key>\2c\20std::__1::hash<cache_key>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\20>::__emplace_unique_key_args<cache_key\2c\20std::__1::pair<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\28cache_key\20const&\2c\20std::__1::pair<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>&&\29 std::__1::__hash_table<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::__unordered_map_hasher<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<cache_key>\2c\20std::__1::equal_to<cache_key>\2c\20true>\2c\20std::__1::__unordered_map_equal<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<cache_key>\2c\20std::__1::hash<cache_key>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\20>::__rehash\28unsigned\20long\29
1024 std::__1::pair<std::__1::__hash_iterator<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>*>\2c\20bool>\20std::__1::__hash_table<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::__unordered_map_hasher<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<cache_key>\2c\20std::__1::equal_to<cache_key>\2c\20true>\2c\20std::__1::__unordered_map_equal<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<cache_key>\2c\20std::__1::hash<cache_key>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\20>::__emplace_unique_key_args<cache_key\2c\20std::__1::pair<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\28cache_key\20const&\2c\20std::__1::pair<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>&&\29 std::__1::unique_ptr<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>\2c\20std::__1::__hash_node_destructor<std::__1::allocator<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>\20>\20>\20>::~unique_ptr\28\29
1025 std::__1::unique_ptr<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>\2c\20std::__1::__hash_node_destructor<std::__1::allocator<std::__1::__hash_node<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20void*>\20>\20>\20>::~unique_ptr\28\29 operator\20delete\28void*\29
1026 std::__1::equal_to<cache_key>::operator\28\29\28cache_key\20const&\2c\20cache_key\20const&\29\20const memcmp
1027 std::__1::__hash_table<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::__unordered_map_hasher<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<cache_key>\2c\20std::__1::equal_to<cache_key>\2c\20true>\2c\20std::__1::__unordered_map_equal<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<cache_key>\2c\20std::__1::hash<cache_key>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\20>::__rehash\28unsigned\20long\29 operator\20new\28unsigned\20long\29
1028 std::__1::__hash_table<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::__unordered_map_hasher<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<cache_key>\2c\20std::__1::equal_to<cache_key>\2c\20true>\2c\20std::__1::__unordered_map_equal<cache_key\2c\20std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<cache_key>\2c\20std::__1::hash<cache_key>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<cache_key\2c\20std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\20>\20>::__rehash\28unsigned\20long\29 operator\20delete\28void*\29
1058 reuse\28re2::RE2*\29 opa_builtin_cache_get
1059 reuse\28re2::RE2*\29 operator\20new\28unsigned\20long\29
1060 reuse\28re2::RE2*\29 opa_builtin_cache_set
1061 reuse\28re2::RE2*\29 re2::RE2::~RE2\28\29
1062 reuse\28re2::RE2*\29 operator\20delete\28void*\29
1063 reuse\28re2::RE2*\29 std::__1::__hash_table<std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20std::__1::__unordered_map_hasher<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20std::__1::hash<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20true>\2c\20std::__1::__unordered_map_equal<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20std::__1::equal_to<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\20>\20>::remove\28std::__1::__hash_const_iterator<std::__1::__hash_node<std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20void*>*>\29
1064 reuse\28re2::RE2*\29 std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>::basic_string\28std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20const&\29
1065 reuse\28re2::RE2*\29 std::__1::pair<std::__1::__hash_iterator<std::__1::__hash_node<std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20void*>*>\2c\20bool>\20std::__1::__hash_table<std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20std::__1::__unordered_map_hasher<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20std::__1::hash<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20true>\2c\20std::__1::__unordered_map_equal<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20std::__1::equal_to<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\20>\20>::__emplace_unique_key_args<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::pair<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\20>\28std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20const&\2c\20std::__1::pair<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>&&\29
reuse\28re2::RE2*\29 operator\20delete\28void*\29
1066 std::__1::__hash_iterator<std::__1::__hash_node<std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20void*>*>\20std::__1::__hash_table<std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20std::__1::__unordered_map_hasher<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20std::__1::hash<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20true>\2c\20std::__1::__unordered_map_equal<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20std::__1::equal_to<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\20>\20>::find<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\28std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20const&\29 memcmp
1067 std::__1::pair<std::__1::__hash_iterator<std::__1::__hash_node<std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20void*>*>\2c\20bool>\20std::__1::__hash_table<std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20std::__1::__unordered_map_hasher<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20std::__1::hash<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20true>\2c\20std::__1::__unordered_map_equal<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20std::__1::equal_to<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\20>\20>::__emplace_unique_key_args<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::pair<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\20>\28std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20const&\2c\20std::__1::pair<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>&&\29 memcmp
1068 std::__1::pair<std::__1::__hash_iterator<std::__1::__hash_node<std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20void*>*>\2c\20bool>\20std::__1::__hash_table<std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20std::__1::__unordered_map_hasher<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20std::__1::hash<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::equal_to<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20true>\2c\20std::__1::__unordered_map_equal<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\2c\20std::__1::equal_to<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20std::__1::hash<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20>\2c\20true>\2c\20std::__1::allocator<std::__1::__hash_value_type<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\20>\20>::__emplace_unique_key_args<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20std::__1::pair<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>\20>\28std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\20const&\2c\20std::__1::pair<std::__1::basic_string<char\2c\20std::__1::char_traits<char>\2c\20std::__1::allocator<char>\20>\2c\20re2::RE2*>&&\29 operator\20new\28unsigned\20long\29

View File

Binary file not shown.

View File

@@ -1332,7 +1332,7 @@ func (c *Compiler) compileWithStmt(with *ir.WithStmt, result *[]instruction.Inst
return nil
}
func (c *Compiler) compileUpsert(local ir.Local, path []int, value ir.Operand, loc ir.Location, instrs []instruction.Instruction) []instruction.Instruction {
func (c *Compiler) compileUpsert(local ir.Local, path []int, value ir.Operand, _ ir.Location, instrs []instruction.Instruction) []instruction.Instruction {
lcopy := c.genLocal() // holds copy of local
instrs = append(instrs, instruction.GetLocal{Index: c.local(local)})

View File

@@ -788,7 +788,7 @@ func (d *Schema) parseSchema(documentNode interface{}, currentSchema *SubSchema)
return nil
}
func (d *Schema) parseReference(documentNode interface{}, currentSchema *SubSchema) error {
func (d *Schema) parseReference(_ interface{}, currentSchema *SubSchema) error {
var (
refdDocumentNode interface{}
dsp *schemaPoolDocument

View File

@@ -556,10 +556,10 @@ func (v *SubSchema) validateArray(currentSubSchema *SubSchema, value []interface
if validationResult.Valid() {
validatedOne = true
break
} else {
if bestValidationResult == nil || validationResult.score > bestValidationResult.score {
bestValidationResult = validationResult
}
}
if bestValidationResult == nil || validationResult.score > bestValidationResult.score {
bestValidationResult = validationResult
}
}
if !validatedOne {

View File

@@ -25,7 +25,7 @@ func init() {
)
})
observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) {
observers.OnFragment(func(_ *Walker, fragment *ast.FragmentDefinition) {
if fragment.Definition == nil || fragment.TypeCondition == "" || fragment.Definition.IsCompositeType() {
return
}

View File

@@ -10,7 +10,7 @@ import (
func init() {
AddRule("KnownArgumentNames", func(observers *Events, addError AddErrFunc) {
// A GraphQL field is only valid if all supplied arguments are defined by that field.
observers.OnField(func(walker *Walker, field *ast.Field) {
observers.OnField(func(_ *Walker, field *ast.Field) {
if field.Definition == nil || field.ObjectDefinition == nil {
return
}
@@ -33,7 +33,7 @@ func init() {
}
})
observers.OnDirective(func(walker *Walker, directive *ast.Directive) {
observers.OnDirective(func(_ *Walker, directive *ast.Directive) {
if directive.Definition == nil {
return
}

View File

@@ -15,7 +15,7 @@ func init() {
Column int
}
var seen = map[mayNotBeUsedDirective]bool{}
observers.OnDirective(func(walker *Walker, directive *ast.Directive) {
observers.OnDirective(func(_ *Walker, directive *ast.Directive) {
if directive.Definition == nil {
addError(
Message(`Unknown directive "@%s".`, directive.Name),

View File

@@ -9,7 +9,7 @@ import (
func init() {
AddRule("KnownFragmentNames", func(observers *Events, addError AddErrFunc) {
observers.OnFragmentSpread(func(walker *Walker, fragmentSpread *ast.FragmentSpread) {
observers.OnFragmentSpread(func(_ *Walker, fragmentSpread *ast.FragmentSpread) {
if fragmentSpread.Definition == nil {
addError(
Message(`Unknown fragment "%s".`, fragmentSpread.Name),

View File

@@ -13,13 +13,13 @@ func init() {
inFragmentDefinition := false
fragmentNameUsed := make(map[string]bool)
observers.OnFragmentSpread(func(walker *Walker, fragmentSpread *ast.FragmentSpread) {
observers.OnFragmentSpread(func(_ *Walker, fragmentSpread *ast.FragmentSpread) {
if !inFragmentDefinition {
fragmentNameUsed[fragmentSpread.Name] = true
}
})
observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) {
observers.OnFragment(func(_ *Walker, fragment *ast.FragmentDefinition) {
inFragmentDefinition = true
if !fragmentNameUsed[fragment.Name] {
addError(

View File

@@ -9,7 +9,7 @@ import (
func init() {
AddRule("NoUnusedVariables", func(observers *Events, addError AddErrFunc) {
observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
observers.OnOperation(func(_ *Walker, operation *ast.OperationDefinition) {
for _, varDef := range operation.VariableDefinitions {
if varDef.Used {
continue

View File

@@ -9,7 +9,7 @@ import (
func init() {
AddRule("ProvidedRequiredArguments", func(observers *Events, addError AddErrFunc) {
observers.OnField(func(walker *Walker, field *ast.Field) {
observers.OnField(func(_ *Walker, field *ast.Field) {
if field.Definition == nil {
return
}
@@ -35,7 +35,7 @@ func init() {
}
})
observers.OnDirective(func(walker *Walker, directive *ast.Directive) {
observers.OnDirective(func(_ *Walker, directive *ast.Directive) {
if directive.Definition == nil {
return
}

View File

@@ -9,11 +9,11 @@ import (
func init() {
AddRule("UniqueArgumentNames", func(observers *Events, addError AddErrFunc) {
observers.OnField(func(walker *Walker, field *ast.Field) {
observers.OnField(func(_ *Walker, field *ast.Field) {
checkUniqueArgs(field.Arguments, addError)
})
observers.OnDirective(func(walker *Walker, directive *ast.Directive) {
observers.OnDirective(func(_ *Walker, directive *ast.Directive) {
checkUniqueArgs(directive.Arguments, addError)
})
})

View File

@@ -9,7 +9,7 @@ import (
func init() {
AddRule("UniqueDirectivesPerLocation", func(observers *Events, addError AddErrFunc) {
observers.OnDirectiveList(func(walker *Walker, directives []*ast.Directive) {
observers.OnDirectiveList(func(_ *Walker, directives []*ast.Directive) {
seen := map[string]bool{}
for _, dir := range directives {

View File

@@ -11,7 +11,7 @@ func init() {
AddRule("UniqueFragmentNames", func(observers *Events, addError AddErrFunc) {
seenFragments := map[string]bool{}
observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) {
observers.OnFragment(func(_ *Walker, fragment *ast.FragmentDefinition) {
if seenFragments[fragment.Name] {
addError(
Message(`There can be only one fragment named "%s".`, fragment.Name),

View File

@@ -9,7 +9,7 @@ import (
func init() {
AddRule("UniqueInputFieldNames", func(observers *Events, addError AddErrFunc) {
observers.OnValue(func(walker *Walker, value *ast.Value) {
observers.OnValue(func(_ *Walker, value *ast.Value) {
if value.Kind != ast.ObjectValue {
return
}

View File

@@ -11,7 +11,7 @@ func init() {
AddRule("UniqueOperationNames", func(observers *Events, addError AddErrFunc) {
seen := map[string]bool{}
observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
observers.OnOperation(func(_ *Walker, operation *ast.OperationDefinition) {
if seen[operation.Name] {
addError(
Message(`There can be only one operation named "%s".`, operation.Name),

View File

@@ -9,7 +9,7 @@ import (
func init() {
AddRule("UniqueVariableNames", func(observers *Events, addError AddErrFunc) {
observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
observers.OnOperation(func(_ *Walker, operation *ast.OperationDefinition) {
seen := map[string]int{}
for _, def := range operation.VariableDefinitions {
// add the same error only once per a variable.

View File

@@ -13,7 +13,7 @@ import (
func init() {
AddRule("ValuesOfCorrectType", func(observers *Events, addError AddErrFunc) {
observers.OnValue(func(walker *Walker, value *ast.Value) {
observers.OnValue(func(_ *Walker, value *ast.Value) {
if value.Definition == nil || value.ExpectedType == nil {
return
}

View File

@@ -9,7 +9,7 @@ import (
func init() {
AddRule("VariablesAreInputTypes", func(observers *Events, addError AddErrFunc) {
observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) {
observers.OnOperation(func(_ *Walker, operation *ast.OperationDefinition) {
for _, def := range operation.VariableDefinitions {
if def.Definition == nil {
continue

View File

@@ -1523,7 +1523,7 @@ func (p *Planner) planValue(t ast.Value, loc *ast.Location, iter planiter) error
}
}
func (p *Planner) planNull(null ast.Null, iter planiter) error {
func (p *Planner) planNull(_ ast.Null, iter planiter) error {
target := p.newLocal()

View File

@@ -174,7 +174,7 @@ type httpSigner struct {
PayloadHash string
}
func (s *httpSigner) setRequiredSigningFields(headers http.Header, query url.Values) {
func (s *httpSigner) setRequiredSigningFields(headers http.Header, _ url.Values) {
amzDate := s.Time.Format(timeFormat)
headers.Set(AmzRegionSetKey, strings.Join(s.RegionSet, ","))

View File

@@ -8,7 +8,7 @@ import (
"github.com/open-policy-agent/opa/logging"
)
// DoRequestWithClient is a convenience function to get the body of a http response with
// DoRequestWithClient is a convenience function to get the body of an HTTP response with
// appropriate error-handling boilerplate and logging.
func DoRequestWithClient(req *http.Request, client *http.Client, desc string, logger logging.Logger) ([]byte, error) {
resp, err := client.Do(req)

View File

@@ -11,14 +11,9 @@ const doubleSpace = " "
// contain multiple side-by-side spaces.
func StripExcessSpaces(str string) string {
var j, k, l, m, spaces int
// Trim trailing spaces
for j = len(str) - 1; j >= 0 && str[j] == ' '; j-- {
}
// Trim leading spaces
for k = 0; k < j && str[k] == ' '; k++ {
}
str = str[k : j+1]
// Trim leading and trailing spaces
str = strings.Trim(str, " ")
// Strip multiple spaces.
j = strings.Index(str, doubleSpace)

View File

@@ -124,7 +124,7 @@ func LoadPaths(paths []string,
processAnnotations bool,
caps *ast.Capabilities,
fsys fs.FS) (*LoadPathsResult, error) {
return LoadPathsForRegoVersion(ast.RegoV0, paths, filter, asBundle, bvc, skipVerify, processAnnotations, caps, fsys)
return LoadPathsForRegoVersion(ast.RegoV0, paths, filter, asBundle, bvc, skipVerify, processAnnotations, false, caps, fsys)
}
func LoadPathsForRegoVersion(regoVersion ast.RegoVersion,
@@ -134,6 +134,7 @@ func LoadPathsForRegoVersion(regoVersion ast.RegoVersion,
bvc *bundle.VerificationConfig,
skipVerify bool,
processAnnotations bool,
followSymlinks bool,
caps *ast.Capabilities,
fsys fs.FS) (*LoadPathsResult, error) {
@@ -161,6 +162,7 @@ func LoadPathsForRegoVersion(regoVersion ast.RegoVersion,
WithProcessAnnotation(processAnnotations).
WithCapabilities(caps).
WithRegoVersion(regoVersion).
WithFollowSymlinks(followSymlinks).
AsBundle(path)
if err != nil {
return nil, err

View File

@@ -63,6 +63,14 @@ func TruncateFilePaths(maxIdealWidth, maxWidth int, path ...string) (map[string]
return result, longestLocation
}
func Truncate(str string, maxWidth int) string {
if len(str) <= maxWidth {
return str
}
return str[:maxWidth-3] + "..."
}
func getPathFromFirstSeparator(path string) string {
s := filepath.Dir(path)
s = strings.TrimPrefix(s, string(filepath.Separator))

View File

@@ -375,10 +375,10 @@ func readTableSection(r io.Reader, s *module.TableSection) error {
return err
} else if elem != constant.ElementTypeAnyFunc {
return fmt.Errorf("illegal element type")
} else {
table.Type = types.Anyfunc
}
table.Type = types.Anyfunc
if err := readLimits(r, &table.Lim); err != nil {
return err
}

View File

@@ -25,11 +25,11 @@ type prettyPrinter struct {
w io.Writer
}
func (pp *prettyPrinter) Before(x interface{}) {
func (pp *prettyPrinter) Before(_ interface{}) {
pp.depth++
}
func (pp *prettyPrinter) After(x interface{}) {
func (pp *prettyPrinter) After(_ interface{}) {
pp.depth--
}

View File

@@ -41,7 +41,7 @@ func (e *Errors) add(err error) {
type unsupportedDocumentType string
func (path unsupportedDocumentType) Error() string {
return string(path) + ": bad document type"
return string(path) + ": document must be of type object"
}
type unrecognizedFile string

View File

@@ -81,7 +81,7 @@ type Filter = filter.LoaderFilter
// GlobExcludeName excludes files and directories whose names do not match the
// shell style pattern at minDepth or greater.
func GlobExcludeName(pattern string, minDepth int) Filter {
return func(abspath string, info fs.FileInfo, depth int) bool {
return func(_ string, info fs.FileInfo, depth int) bool {
match, _ := filepath.Match(pattern, info.Name())
return match && depth >= minDepth
}
@@ -103,6 +103,7 @@ type FileLoader interface {
WithCapabilities(*ast.Capabilities) FileLoader
WithJSONOptions(*astJSON.Options) FileLoader
WithRegoVersion(ast.RegoVersion) FileLoader
WithFollowSymlinks(bool) FileLoader
}
// NewFileLoader returns a new FileLoader instance.
@@ -114,14 +115,15 @@ func NewFileLoader() FileLoader {
}
type fileLoader struct {
metrics metrics.Metrics
filter Filter
bvc *bundle.VerificationConfig
skipVerify bool
files map[string]bundle.FileInfo
opts ast.ParserOptions
fsys fs.FS
reader io.Reader
metrics metrics.Metrics
filter Filter
bvc *bundle.VerificationConfig
skipVerify bool
files map[string]bundle.FileInfo
opts ast.ParserOptions
fsys fs.FS
reader io.Reader
followSymlinks bool
}
// WithFS provides an fs.FS to use for loading files. You can pass nil to
@@ -188,6 +190,12 @@ func (fl *fileLoader) WithRegoVersion(version ast.RegoVersion) FileLoader {
return fl
}
// WithFollowSymlinks enables or disables following symlinks when loading files
func (fl *fileLoader) WithFollowSymlinks(followSymlinks bool) FileLoader {
fl.followSymlinks = followSymlinks
return fl
}
// All returns a Result object loaded (recursively) from the specified paths.
func (fl fileLoader) All(paths []string) (*Result, error) {
return fl.Filtered(paths, nil)
@@ -249,6 +257,7 @@ func (fl fileLoader) AsBundle(path string) (*bundle.Bundle, error) {
if err != nil {
return nil, err
}
bundleLoader = bundleLoader.WithFollowSymlinks(fl.followSymlinks)
br := bundle.NewCustomReader(bundleLoader).
WithMetrics(fl.metrics).
@@ -257,6 +266,7 @@ func (fl fileLoader) AsBundle(path string) (*bundle.Bundle, error) {
WithProcessAnnotations(fl.opts.ProcessAnnotation).
WithCapabilities(fl.opts.Capabilities).
WithJSONOptions(fl.opts.JSONOptions).
WithFollowSymlinks(fl.followSymlinks).
WithRegoVersion(fl.opts.RegoVersion)
// For bundle directories add the full path in front of module file names
@@ -486,7 +496,7 @@ func AsBundle(path string) (*bundle.Bundle, error) {
// AllRegos returns a Result object loaded (recursively) with all Rego source
// files from the specified paths.
func AllRegos(paths []string) (*Result, error) {
return NewFileLoader().Filtered(paths, func(_ string, info os.FileInfo, depth int) bool {
return NewFileLoader().Filtered(paths, func(_ string, info os.FileInfo, _ int) bool {
return !info.IsDir() && !strings.HasSuffix(info.Name(), bundle.RegoExt)
})
}
@@ -522,7 +532,7 @@ func Paths(path string, recurse bool) (paths []string, err error) {
if err != nil {
return nil, err
}
err = filepath.Walk(path, func(f string, info os.FileInfo, err error) error {
err = filepath.Walk(path, func(f string, _ os.FileInfo, _ error) error {
if !recurse {
if path != f && path != filepath.Dir(f) {
return filepath.SkipDir

View File

@@ -286,11 +286,10 @@ func ValidateAndInjectDefaultsForTriggerMode(a, b *TriggerMode) (*TriggerMode, e
return nil, err
}
return a, nil
} else {
t := DefaultTriggerMode
return &t, nil
}
t := DefaultTriggerMode
return &t, nil
}
type namedplugin struct {

View File

@@ -40,6 +40,8 @@ import (
const (
// Default to s3 when the service for sigv4 signing is not specified for backwards compatibility
awsSigv4SigningDefaultService = "s3"
// Default to urn:ietf:params:oauth:client-assertion-type:jwt-bearer for ClientAssertionType when not specified
defaultClientAssertionType = "urn:ietf:params:oauth:client-assertion-type:jwt-bearer"
)
// DefaultTLSConfig defines standard TLS configurations based on the Config
@@ -281,6 +283,9 @@ type oauth2ClientCredentialsAuthPlugin struct {
AdditionalParameters map[string]string `json:"additional_parameters,omitempty"`
AWSKmsKey *awsKmsKeyConfig `json:"aws_kms,omitempty"`
AWSSigningPlugin *awsSigningAuthPlugin `json:"aws_signing,omitempty"`
ClientAssertionType string `json:"client_assertion_type"`
ClientAssertion string `json:"client_assertion"`
ClientAssertionPath string `json:"client_assertion_path"`
signingKey *keys.Config
signingKeyParsed interface{}
@@ -294,16 +299,13 @@ type oauth2Token struct {
ExpiresAt time.Time
}
func (ap *oauth2ClientCredentialsAuthPlugin) createAuthJWT(ctx context.Context, claims map[string]interface{}, signingKey interface{}) (*string, error) {
func (ap *oauth2ClientCredentialsAuthPlugin) createAuthJWT(ctx context.Context, extClaims map[string]interface{}, signingKey interface{}) (*string, error) {
now := time.Now()
baseClaims := map[string]interface{}{
claims := map[string]interface{}{
"iat": now.Unix(),
"exp": now.Add(10 * time.Minute).Unix(),
}
if claims == nil {
claims = make(map[string]interface{})
}
for k, v := range baseClaims {
for k, v := range extClaims {
claims[k] = v
}
@@ -462,14 +464,30 @@ func (ap *oauth2ClientCredentialsAuthPlugin) NewClient(c Config) (*http.Client,
return nil, errors.New("token_url required to use https scheme")
}
if ap.GrantType == grantTypeClientCredentials {
if ap.AWSKmsKey != nil && (ap.ClientSecret != "" || ap.SigningKeyID != "") ||
(ap.ClientSecret != "" && ap.SigningKeyID != "") {
return nil, errors.New("can only use one of client_secret, signing_key or signing_kms_key for client_credentials")
clientCredentialExists := make(map[string]bool)
clientCredentialExists["client_secret"] = ap.ClientSecret != ""
clientCredentialExists["signing_key"] = ap.SigningKeyID != ""
clientCredentialExists["aws_kms"] = ap.AWSKmsKey != nil
clientCredentialExists["client_assertion"] = ap.ClientAssertion != ""
clientCredentialExists["client_assertion_path"] = ap.ClientAssertionPath != ""
var notEmptyVarCount int
for _, credentialSet := range clientCredentialExists {
if credentialSet {
notEmptyVarCount++
}
}
if ap.SigningKeyID == "" && ap.AWSKmsKey == nil && (ap.ClientID == "" || ap.ClientSecret == "") {
return nil, errors.New("client_id and client_secret required")
if notEmptyVarCount == 0 {
return nil, errors.New("please provide one of client_secret, signing_key, aws_kms, client_assertion, or client_assertion_path required")
}
if ap.AWSKmsKey != nil {
if notEmptyVarCount > 1 {
return nil, errors.New("can only use one of client_secret, signing_key, aws_kms, client_assertion, or client_assertion_path")
}
if clientCredentialExists["aws_kms"] {
if ap.AWSSigningPlugin == nil {
return nil, errors.New("aws_kms and aws_signing required")
}
@@ -478,6 +496,24 @@ func (ap *oauth2ClientCredentialsAuthPlugin) NewClient(c Config) (*http.Client,
if err != nil {
return nil, err
}
} else if clientCredentialExists["client_assertion"] {
if ap.ClientAssertionType == "" {
ap.ClientAssertionType = defaultClientAssertionType
}
if ap.ClientID == "" {
return nil, errors.New("client_id and client_assertion required")
}
} else if clientCredentialExists["client_assertion_path"] {
if ap.ClientAssertionType == "" {
ap.ClientAssertionType = defaultClientAssertionType
}
if ap.ClientID == "" {
return nil, errors.New("client_id and client_assertion_path required")
}
} else if clientCredentialExists["client_secret"] {
if ap.ClientID == "" {
return nil, errors.New("client_id and client_secret required")
}
}
}
@@ -505,12 +541,34 @@ func (ap *oauth2ClientCredentialsAuthPlugin) requestToken(ctx context.Context) (
if err != nil {
return nil, err
}
body.Add("client_assertion_type", "urn:ietf:params:oauth:client-assertion-type:jwt-bearer")
body.Add("client_assertion_type", defaultClientAssertionType)
body.Add("client_assertion", *authJwt)
if ap.ClientID != "" {
body.Add("client_id", ap.ClientID)
}
} else if ap.ClientAssertion != "" {
if ap.ClientAssertionType == "" {
ap.ClientAssertionType = defaultClientAssertionType
}
if ap.ClientID != "" {
body.Add("client_id", ap.ClientID)
}
body.Add("client_assertion_type", ap.ClientAssertionType)
body.Add("client_assertion", ap.ClientAssertion)
} else if ap.ClientAssertionPath != "" {
if ap.ClientAssertionType == "" {
ap.ClientAssertionType = defaultClientAssertionType
}
bytes, err := os.ReadFile(ap.ClientAssertionPath)
if err != nil {
return nil, err
}
if ap.ClientID != "" {
body.Add("client_id", ap.ClientID)
}
body.Add("client_assertion_type", ap.ClientAssertionType)
body.Add("client_assertion", strings.TrimSpace(string(bytes)))
}
}
@@ -693,7 +751,7 @@ func (ap *clientTLSAuthPlugin) NewClient(c Config) (*http.Client, error) {
return client, nil
}
func (ap *clientTLSAuthPlugin) Prepare(req *http.Request) error {
func (ap *clientTLSAuthPlugin) Prepare(_ *http.Request) error {
return nil
}

View File

@@ -1496,7 +1496,7 @@ func (r *Rego) Compile(ctx context.Context, opts ...CompileOption) (*CompileResu
return r.compileWasm(modules, queries, compileQueryType) // TODO(sr) control flow is funky here
}
func (r *Rego) compileWasm(modules []*ast.Module, queries []ast.Body, qType queryType) (*CompileResult, error) {
func (r *Rego) compileWasm(_ []*ast.Module, queries []ast.Body, qType queryType) (*CompileResult, error) {
policy, err := r.planQuery(queries, qType)
if err != nil {
return nil, err
@@ -1871,7 +1871,7 @@ func (r *Rego) loadFiles(ctx context.Context, txn storage.Transaction, m metrics
return nil
}
func (r *Rego) loadBundles(ctx context.Context, txn storage.Transaction, m metrics.Metrics) error {
func (r *Rego) loadBundles(_ context.Context, _ storage.Transaction, m metrics.Metrics) error {
if len(r.bundlePaths) == 0 {
return nil
}
@@ -2035,7 +2035,7 @@ func (r *Rego) prepareImports() ([]*ast.Import, error) {
return imports, nil
}
func (r *Rego) compileQuery(query ast.Body, imports []*ast.Import, m metrics.Metrics, extras []extraStage) (ast.QueryCompiler, ast.Body, error) {
func (r *Rego) compileQuery(query ast.Body, imports []*ast.Import, _ metrics.Metrics, extras []extraStage) (ast.QueryCompiler, ast.Body, error) {
var pkg *ast.Package
if r.pkg != "" {
@@ -2476,7 +2476,7 @@ func (r *Rego) partial(ctx context.Context, ectx *EvalContext) (*PartialQueries,
return pq, nil
}
func (r *Rego) rewriteQueryToCaptureValue(qc ast.QueryCompiler, query ast.Body) (ast.Body, error) {
func (r *Rego) rewriteQueryToCaptureValue(_ ast.QueryCompiler, query ast.Body) (ast.Body, error) {
checkCapture := iteration(query) || len(query) > 1
@@ -2593,7 +2593,7 @@ type transactionCloser func(ctx context.Context, err error) error
// regardless of status.
func (r *Rego) getTxn(ctx context.Context) (storage.Transaction, transactionCloser, error) {
noopCloser := func(ctx context.Context, err error) error {
noopCloser := func(_ context.Context, _ error) error {
return nil // no-op default
}

View File

@@ -43,7 +43,7 @@ func (u *bindings) Iter(caller *bindings, iter func(*ast.Term, *ast.Term) error)
var err error
u.values.Iter(func(k *ast.Term, v value) bool {
u.values.Iter(func(k *ast.Term, _ value) bool {
if err != nil {
return true
}

View File

@@ -222,11 +222,11 @@ func (p *CopyPropagator) plugBindings(pctx *plugContext, expr *ast.Expr) *ast.Ex
// errors unreachable.
x, err := ast.Transform(xform, expr.Copy())
if expr, ok := x.(*ast.Expr); !ok || err != nil {
expr, ok := x.(*ast.Expr)
if !ok || err != nil {
panic("unreachable")
} else {
return expr
}
return expr
}
type bindingPlugTransform struct {

View File

@@ -237,6 +237,10 @@ func (e *eval) traceWasm(x ast.Node, target *ast.Ref) {
e.traceEvent(WasmOp, x, "", target)
}
func (e *eval) traceUnify(a, b *ast.Term) {
e.traceEvent(UnifyOp, ast.Equality.Expr(a, b), "", nil)
}
func (e *eval) traceEvent(op Op, x ast.Node, msg string, target *ast.Ref) {
if !e.traceEnabled {
@@ -275,6 +279,7 @@ func (e *eval) traceEvent(op Op, x ast.Node, msg string, target *ast.Ref) {
evt.Locals = ast.NewValueMap()
evt.LocalMetadata = map[ast.Var]VarMetadata{}
evt.localVirtualCacheSnapshot = ast.NewValueMap()
_ = e.bindings.Iter(nil, func(k, v *ast.Term) error {
original := k.Value.(ast.Var)
@@ -290,15 +295,21 @@ func (e *eval) traceEvent(op Op, x ast.Node, msg string, target *ast.Ref) {
}) // cannot return error
ast.WalkTerms(x, func(term *ast.Term) bool {
if v, ok := term.Value.(ast.Var); ok {
if _, ok := evt.LocalMetadata[v]; !ok {
if rewritten, ok := e.rewrittenVar(v); ok {
evt.LocalMetadata[v] = VarMetadata{
switch x := term.Value.(type) {
case ast.Var:
if _, ok := evt.LocalMetadata[x]; !ok {
if rewritten, ok := e.rewrittenVar(x); ok {
evt.LocalMetadata[x] = VarMetadata{
Name: rewritten,
Location: term.Loc(),
}
}
}
case ast.Ref:
groundRef := x.GroundPrefix()
if v, _ := e.virtualCache.Get(groundRef); v != nil {
evt.localVirtualCacheSnapshot.Put(groundRef, v.Value)
}
}
return false
})
@@ -858,7 +869,7 @@ func (e *eval) biunify(a, b *ast.Term, b1, b2 *bindings, iter unifyIterator) err
a, b1 = b1.apply(a)
b, b2 = b2.apply(b)
if e.traceEnabled {
e.traceEvent(UnifyOp, ast.Equality.Expr(a, b), "", nil)
e.traceUnify(a, b)
}
switch vA := a.Value.(type) {
case ast.Var, ast.Ref, *ast.ArrayComprehension, *ast.SetComprehension, *ast.ObjectComprehension:
@@ -1096,10 +1107,10 @@ func (e *eval) biunifyComprehension(a, b *ast.Term, b1, b2 *bindings, swap bool,
return err
} else if value != nil {
return e.biunify(value, b, b1, b2, iter)
} else {
e.instr.counterIncr(evalOpComprehensionCacheMiss)
}
e.instr.counterIncr(evalOpComprehensionCacheMiss)
switch a := a.Value.(type) {
case *ast.ArrayComprehension:
return e.biunifyComprehensionArray(a, b, b1, b2, iter)
@@ -2560,7 +2571,7 @@ func (e evalVirtualPartial) evalOneRulePreUnify(iter unifyIterator, rule *ast.Ru
}
// Walk the dynamic portion of rule ref and key to unify vars
err := child.biunifyRuleHead(e.pos+1, e.ref, rule, e.bindings, child.bindings, func(pos int) error {
err := child.biunifyRuleHead(e.pos+1, e.ref, rule, e.bindings, child.bindings, func(_ int) error {
defined = true
return child.eval(func(child *eval) error {
@@ -2648,7 +2659,7 @@ func (e evalVirtualPartial) evalOneRulePostUnify(iter unifyIterator, rule *ast.R
err := child.eval(func(child *eval) error {
defined = true
return e.e.biunifyRuleHead(e.pos+1, e.ref, rule, e.bindings, child.bindings, func(pos int) error {
return e.e.biunifyRuleHead(e.pos+1, e.ref, rule, e.bindings, child.bindings, func(_ int) error {
return e.evalOneRuleContinue(iter, rule, child)
})
})
@@ -2724,7 +2735,7 @@ func (e evalVirtualPartial) partialEvalSupport(iter unifyIterator) error {
return e.e.saveUnify(term, e.rterm, e.bindings, e.rbindings, iter)
}
func (e evalVirtualPartial) partialEvalSupportRule(rule *ast.Rule, path ast.Ref) (bool, error) {
func (e evalVirtualPartial) partialEvalSupportRule(rule *ast.Rule, _ ast.Ref) (bool, error) {
child := e.e.child(rule.Body)
child.traceEnter(rule)

View File

@@ -10,6 +10,8 @@ import (
"github.com/open-policy-agent/opa/topdown/builtins"
)
const globCacheMaxSize = 100
var globCacheLock = sync.Mutex{}
var globCache map[string]glob.Glob
@@ -64,6 +66,13 @@ func globCompileAndMatch(id, pattern, match string, delimiters []rune) (bool, er
if p, err = glob.Compile(pattern, delimiters...); err != nil {
return false, err
}
if len(globCache) >= globCacheMaxSize {
// Delete a (semi-)random key to make room for the new one.
for k := range globCache {
delete(globCache, k)
break
}
}
globCache[id] = p
}
out := p.Match(match)

View File

@@ -68,6 +68,7 @@ var allowedKeyNames = [...]string{
"raise_error",
"caching_mode",
"max_retry_attempts",
"cache_ignored_headers",
}
// ref: https://www.rfc-editor.org/rfc/rfc7231#section-6.1
@@ -168,12 +169,17 @@ func getHTTPResponse(bctx BuiltinContext, req ast.Object) (*ast.Term, error) {
bctx.Metrics.Timer(httpSendLatencyMetricKey).Start()
reqExecutor, err := newHTTPRequestExecutor(bctx, req)
key, err := getKeyFromRequest(req)
if err != nil {
return nil, err
}
reqExecutor, err := newHTTPRequestExecutor(bctx, req, key)
if err != nil {
return nil, err
}
// Check if cache already has a response for this query
// set headers to exclude cache_ignored_headers
resp, err := reqExecutor.CheckCache()
if err != nil {
return nil, err
@@ -198,6 +204,43 @@ func getHTTPResponse(bctx BuiltinContext, req ast.Object) (*ast.Term, error) {
return ast.NewTerm(resp), nil
}
// getKeyFromRequest returns a key to be used for caching HTTP responses
// deletes headers from request object mentioned in cache_ignored_headers
func getKeyFromRequest(req ast.Object) (ast.Object, error) {
// deep copy so changes to key do not reflect in the request object
key := req.Copy()
cacheIgnoredHeadersTerm := req.Get(ast.StringTerm("cache_ignored_headers"))
allHeadersTerm := req.Get(ast.StringTerm("headers"))
// skip because no headers to delete
if cacheIgnoredHeadersTerm == nil || allHeadersTerm == nil {
// need to explicitly set cache_ignored_headers to null
// equivalent requests might have different sets of exclusion lists
key.Insert(ast.StringTerm("cache_ignored_headers"), ast.NullTerm())
return key, nil
}
var cacheIgnoredHeaders []string
var allHeaders map[string]interface{}
err := ast.As(cacheIgnoredHeadersTerm.Value, &cacheIgnoredHeaders)
if err != nil {
return nil, err
}
err = ast.As(allHeadersTerm.Value, &allHeaders)
if err != nil {
return nil, err
}
for _, header := range cacheIgnoredHeaders {
delete(allHeaders, header)
}
val, err := ast.InterfaceToValue(allHeaders)
if err != nil {
return nil, err
}
key.Insert(ast.StringTerm("headers"), ast.NewTerm(val))
// remove cache_ignored_headers key
key.Insert(ast.StringTerm("cache_ignored_headers"), ast.NullTerm())
return key, nil
}
func init() {
createAllowedKeys()
createCacheableHTTPStatusCodes()
@@ -303,7 +346,7 @@ func useSocket(rawURL string, tlsConfig *tls.Config) (bool, string, *http.Transp
u.RawQuery = v.Encode()
tr := http.DefaultTransport.(*http.Transport).Clone()
tr.DialContext = func(ctx context.Context, network, addr string) (net.Conn, error) {
tr.DialContext = func(ctx context.Context, _, _ string) (net.Conn, error) {
return http.DefaultTransport.(*http.Transport).DialContext(ctx, "unix", socket)
}
tr.TLSClientConfig = tlsConfig
@@ -482,7 +525,7 @@ func createHTTPRequest(bctx BuiltinContext, obj ast.Object) (*http.Request, *htt
case "cache", "caching_mode",
"force_cache", "force_cache_duration_seconds",
"force_json_decode", "force_yaml_decode",
"raise_error", "max_retry_attempts": // no-op
"raise_error", "max_retry_attempts", "cache_ignored_headers": // no-op
default:
return nil, nil, fmt.Errorf("invalid parameter %q", key)
}
@@ -729,13 +772,13 @@ func newHTTPSendCache() *httpSendCache {
}
func valueHash(v util.T) int {
return v.(ast.Value).Hash()
return ast.StringTerm(v.(ast.Value).String()).Hash()
}
func valueEq(a, b util.T) bool {
av := a.(ast.Value)
bv := b.(ast.Value)
return av.Compare(bv) == 0
return av.String() == bv.String()
}
func (cache *httpSendCache) get(k ast.Value) *httpSendCacheEntry {
@@ -1382,20 +1425,21 @@ type httpRequestExecutor interface {
// newHTTPRequestExecutor returns a new HTTP request executor that wraps either an inter-query or
// intra-query cache implementation
func newHTTPRequestExecutor(bctx BuiltinContext, key ast.Object) (httpRequestExecutor, error) {
useInterQueryCache, forceCacheParams, err := useInterQueryCache(key)
func newHTTPRequestExecutor(bctx BuiltinContext, req ast.Object, key ast.Object) (httpRequestExecutor, error) {
useInterQueryCache, forceCacheParams, err := useInterQueryCache(req)
if err != nil {
return nil, handleHTTPSendErr(bctx, err)
}
if useInterQueryCache && bctx.InterQueryBuiltinCache != nil {
return newInterQueryCache(bctx, key, forceCacheParams)
return newInterQueryCache(bctx, req, key, forceCacheParams)
}
return newIntraQueryCache(bctx, key)
return newIntraQueryCache(bctx, req, key)
}
type interQueryCache struct {
bctx BuiltinContext
req ast.Object
key ast.Object
httpReq *http.Request
httpClient *http.Client
@@ -1404,8 +1448,8 @@ type interQueryCache struct {
forceCacheParams *forceCacheParams
}
func newInterQueryCache(bctx BuiltinContext, key ast.Object, forceCacheParams *forceCacheParams) (*interQueryCache, error) {
return &interQueryCache{bctx: bctx, key: key, forceCacheParams: forceCacheParams}, nil
func newInterQueryCache(bctx BuiltinContext, req ast.Object, key ast.Object, forceCacheParams *forceCacheParams) (*interQueryCache, error) {
return &interQueryCache{bctx: bctx, req: req, key: key, forceCacheParams: forceCacheParams}, nil
}
// CheckCache checks the cache for the value of the key set on this object
@@ -1464,21 +1508,22 @@ func (c *interQueryCache) InsertErrorIntoCache(err error) {
// ExecuteHTTPRequest executes a HTTP request
func (c *interQueryCache) ExecuteHTTPRequest() (*http.Response, error) {
var err error
c.httpReq, c.httpClient, err = createHTTPRequest(c.bctx, c.key)
c.httpReq, c.httpClient, err = createHTTPRequest(c.bctx, c.req)
if err != nil {
return nil, handleHTTPSendErr(c.bctx, err)
}
return executeHTTPRequest(c.httpReq, c.httpClient, c.key)
return executeHTTPRequest(c.httpReq, c.httpClient, c.req)
}
type intraQueryCache struct {
bctx BuiltinContext
req ast.Object
key ast.Object
}
func newIntraQueryCache(bctx BuiltinContext, key ast.Object) (*intraQueryCache, error) {
return &intraQueryCache{bctx: bctx, key: key}, nil
func newIntraQueryCache(bctx BuiltinContext, req ast.Object, key ast.Object) (*intraQueryCache, error) {
return &intraQueryCache{bctx: bctx, req: req, key: key}, nil
}
// CheckCache checks the cache for the value of the key set on this object
@@ -1515,11 +1560,11 @@ func (c *intraQueryCache) InsertErrorIntoCache(err error) {
// ExecuteHTTPRequest executes a HTTP request
func (c *intraQueryCache) ExecuteHTTPRequest() (*http.Response, error) {
httpReq, httpClient, err := createHTTPRequest(c.bctx, c.key)
httpReq, httpClient, err := createHTTPRequest(c.bctx, c.req)
if err != nil {
return nil, handleHTTPSendErr(c.bctx, err)
}
return executeHTTPRequest(httpReq, httpClient, c.key)
return executeHTTPRequest(httpReq, httpClient, c.req)
}
func useInterQueryCache(req ast.Object) (bool, *forceCacheParams, error) {

View File

@@ -45,7 +45,7 @@ var (
errBytesValueIncludesSpaces = parseNumBytesError("spaces not allowed in resource strings")
)
func builtinNumBytes(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
func builtinNumBytes(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
var m big.Float
raw, err := builtins.StringOperand(operands[0].Value, 1)

View File

@@ -86,7 +86,7 @@ func validateAWSAuthParameters(o ast.Object) error {
return nil
}
func builtinAWSSigV4SignReq(ctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
func builtinAWSSigV4SignReq(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
// Request object.
reqObj, err := builtins.ObjectOperand(operands[0].Value, 1)
if err != nil {

View File

@@ -31,7 +31,7 @@ func numberOfEdges(collection *ast.Term) int {
return 0
}
func builtinReachable(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
func builtinReachable(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
// Error on wrong types for args.
graph, err := builtins.ObjectOperand(operands[0].Value, 1)
if err != nil {
@@ -109,7 +109,7 @@ func pathBuilder(graph ast.Object, root *ast.Term, path []*ast.Term, edgeRslt as
}
func builtinReachablePaths(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
func builtinReachablePaths(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
var traceResult = ast.NewSet()
// Error on wrong types for args.
graph, err := builtins.ObjectOperand(operands[0].Value, 1)

View File

@@ -15,6 +15,8 @@ import (
"github.com/open-policy-agent/opa/topdown/builtins"
)
const regexCacheMaxSize = 100
var regexpCacheLock = sync.Mutex{}
var regexpCache map[string]*regexp.Regexp
@@ -111,6 +113,13 @@ func getRegexp(pat string) (*regexp.Regexp, error) {
if err != nil {
return nil, err
}
if len(regexpCache) >= regexCacheMaxSize {
// Delete a (semi-)random key to make room for the new one.
for k := range regexpCache {
delete(regexpCache, k)
break
}
}
regexpCache[pat] = re
}
return re, nil

View File

@@ -12,7 +12,7 @@ import (
"github.com/open-policy-agent/opa/topdown/builtins"
)
func builtinSemVerCompare(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
func builtinSemVerCompare(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
versionStringA, err := builtins.StringOperand(operands[0].Value, 1)
if err != nil {
return err
@@ -37,7 +37,7 @@ func builtinSemVerCompare(bctx BuiltinContext, operands []*ast.Term, iter func(*
return iter(ast.IntNumberTerm(result))
}
func builtinSemVerIsValid(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
func builtinSemVerIsValid(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
versionString, err := builtins.StringOperand(operands[0].Value, 1)
if err != nil {
return iter(ast.BooleanTerm(false))

View File

@@ -16,7 +16,7 @@ import (
"github.com/open-policy-agent/opa/topdown/builtins"
)
func builtinAnyPrefixMatch(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
func builtinAnyPrefixMatch(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
a, b := operands[0].Value, operands[1].Value
var strs []string
@@ -50,7 +50,7 @@ func builtinAnyPrefixMatch(bctx BuiltinContext, operands []*ast.Term, iter func(
return iter(ast.BooleanTerm(anyStartsWithAny(strs, prefixes)))
}
func builtinAnySuffixMatch(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
func builtinAnySuffixMatch(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
a, b := operands[0].Value, operands[1].Value
var strsReversed []string
@@ -310,6 +310,25 @@ func builtinContains(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term
return iter(ast.BooleanTerm(strings.Contains(string(s), string(substr))))
}
func builtinStringCount(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
s, err := builtins.StringOperand(operands[0].Value, 1)
if err != nil {
return err
}
substr, err := builtins.StringOperand(operands[1].Value, 2)
if err != nil {
return err
}
baseTerm := string(s)
searchTerm := string(substr)
count := strings.Count(baseTerm, searchTerm)
return iter(ast.IntNumberTerm(count))
}
func builtinStartsWith(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
s, err := builtins.StringOperand(operands[0].Value, 1)
if err != nil {
@@ -384,12 +403,12 @@ func builtinReplace(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term)
return err
}
new, err := builtins.StringOperand(operands[2].Value, 3)
n, err := builtins.StringOperand(operands[2].Value, 3)
if err != nil {
return err
}
return iter(ast.StringTerm(strings.Replace(string(s), string(old), string(new), -1)))
return iter(ast.StringTerm(strings.Replace(string(s), string(old), string(n), -1)))
}
func builtinReplaceN(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
@@ -570,6 +589,7 @@ func init() {
RegisterBuiltinFunc(ast.IndexOfN.Name, builtinIndexOfN)
RegisterBuiltinFunc(ast.Substring.Name, builtinSubstring)
RegisterBuiltinFunc(ast.Contains.Name, builtinContains)
RegisterBuiltinFunc(ast.StringCount.Name, builtinStringCount)
RegisterBuiltinFunc(ast.StartsWith.Name, builtinStartsWith)
RegisterBuiltinFunc(ast.EndsWith.Name, builtinEndsWith)
RegisterBuiltinFunc(ast.Upper.Name, builtinUpper)

View File

@@ -233,7 +233,7 @@ func builtinJWTVerifyRSA(a ast.Value, b ast.Value, hasher func() hash.Hash, veri
}
// Implements ES256 JWT signature verification.
func builtinJWTVerifyES256(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
func builtinJWTVerifyES256(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
result, err := builtinJWTVerify(operands[0].Value, operands[1].Value, sha256.New, verifyES)
if err == nil {
return iter(ast.NewTerm(result))
@@ -242,7 +242,7 @@ func builtinJWTVerifyES256(bctx BuiltinContext, operands []*ast.Term, iter func(
}
// Implements ES384 JWT signature verification
func builtinJWTVerifyES384(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
func builtinJWTVerifyES384(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
result, err := builtinJWTVerify(operands[0].Value, operands[1].Value, sha512.New384, verifyES)
if err == nil {
return iter(ast.NewTerm(result))
@@ -251,7 +251,7 @@ func builtinJWTVerifyES384(bctx BuiltinContext, operands []*ast.Term, iter func(
}
// Implements ES512 JWT signature verification
func builtinJWTVerifyES512(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
func builtinJWTVerifyES512(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
result, err := builtinJWTVerify(operands[0].Value, operands[1].Value, sha512.New, verifyES)
if err == nil {
return iter(ast.NewTerm(result))
@@ -413,7 +413,7 @@ func builtinJWTVerify(a ast.Value, b ast.Value, hasher func() hash.Hash, verify
}
// Implements HS256 (secret) JWT signature verification
func builtinJWTVerifyHS256(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
func builtinJWTVerifyHS256(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
// Decode the JSON Web Token
token, err := decodeJWT(operands[0].Value)
if err != nil {
@@ -442,7 +442,7 @@ func builtinJWTVerifyHS256(bctx BuiltinContext, operands []*ast.Term, iter func(
}
// Implements HS384 JWT signature verification
func builtinJWTVerifyHS384(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
func builtinJWTVerifyHS384(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
// Decode the JSON Web Token
token, err := decodeJWT(operands[0].Value)
if err != nil {
@@ -471,7 +471,7 @@ func builtinJWTVerifyHS384(bctx BuiltinContext, operands []*ast.Term, iter func(
}
// Implements HS512 JWT signature verification
func builtinJWTVerifyHS512(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
func builtinJWTVerifyHS512(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
// Decode the JSON Web Token
token, err := decodeJWT(operands[0].Value)
if err != nil {
@@ -793,7 +793,7 @@ func verifyRSAPSS(key interface{}, hash crypto.Hash, digest []byte, signature []
return nil
}
func verifyECDSA(key interface{}, hash crypto.Hash, digest []byte, signature []byte) (err error) {
func verifyECDSA(key interface{}, _ crypto.Hash, digest []byte, signature []byte) (err error) {
defer func() {
if r := recover(); r != nil {
err = fmt.Errorf("ECDSA signature verification error: %v", r)
@@ -1048,10 +1048,9 @@ func builtinJWTDecodeVerify(bctx BuiltinContext, operands []*ast.Term, iter func
// Nested JWT, go round again with payload as first argument
a = p.Value
continue
} else {
// Non-nested JWT (or we've reached the bottom of the nesting).
break
}
// Non-nested JWT (or we've reached the bottom of the nesting).
break
}
payload, err := extractJSONObject(string(p.Value.(ast.String)))
if err != nil {

View File

@@ -5,8 +5,10 @@
package topdown
import (
"bytes"
"fmt"
"io"
"slices"
"strings"
iStrs "github.com/open-policy-agent/opa/internal/strings"
@@ -18,7 +20,9 @@ import (
const (
minLocationWidth = 5 // len("query")
maxIdealLocationWidth = 64
locationPadding = 4
columnPadding = 4
maxExprVarWidth = 32
maxPrettyExprVarWidth = 64
)
// Op defines the types of tracing events.
@@ -62,7 +66,8 @@ const (
// UnifyOp is emitted when two terms are unified. Node will be set to an
// equality expression with the two terms. This Node will not have location
// info.
UnifyOp Op = "Unify"
UnifyOp Op = "Unify"
FailedAssertionOp Op = "FailedAssertion"
)
// VarMetadata provides some user facing information about
@@ -84,8 +89,9 @@ type Event struct {
Message string // Contains message for Note events.
Ref *ast.Ref // Identifies the subject ref for the event. Only applies to Index and Wasm operations.
input *ast.Term
bindings *bindings
input *ast.Term
bindings *bindings
localVirtualCacheSnapshot *ast.ValueMap
}
// HasRule returns true if the Event contains an ast.Rule.
@@ -236,31 +242,162 @@ func (b *BufferTracer) Config() TraceConfig {
// PrettyTrace pretty prints the trace to the writer.
func PrettyTrace(w io.Writer, trace []*Event) {
prettyTraceWith(w, trace, false)
PrettyTraceWithOpts(w, trace, PrettyTraceOptions{})
}
// PrettyTraceWithLocation prints the trace to the writer and includes location information
func PrettyTraceWithLocation(w io.Writer, trace []*Event) {
prettyTraceWith(w, trace, true)
PrettyTraceWithOpts(w, trace, PrettyTraceOptions{Locations: true})
}
func prettyTraceWith(w io.Writer, trace []*Event, locations bool) {
type PrettyTraceOptions struct {
Locations bool // Include location information
ExprVariables bool // Include variables found in the expression
LocalVariables bool // Include all local variables
}
type traceRow []string
func (r *traceRow) add(s string) {
*r = append(*r, s)
}
type traceTable struct {
rows []traceRow
maxWidths []int
}
func (t *traceTable) add(row traceRow) {
t.rows = append(t.rows, row)
for i := range row {
if i >= len(t.maxWidths) {
t.maxWidths = append(t.maxWidths, len(row[i]))
} else if len(row[i]) > t.maxWidths[i] {
t.maxWidths[i] = len(row[i])
}
}
}
func (t *traceTable) write(w io.Writer, padding int) {
for _, row := range t.rows {
for i, cell := range row {
width := t.maxWidths[i] + padding
if i < len(row)-1 {
_, _ = fmt.Fprintf(w, "%-*s ", width, cell)
} else {
_, _ = fmt.Fprintf(w, "%s", cell)
}
}
_, _ = fmt.Fprintln(w)
}
}
func PrettyTraceWithOpts(w io.Writer, trace []*Event, opts PrettyTraceOptions) {
depths := depths{}
filePathAliases, longest := getShortenedFileNames(trace)
// FIXME: Can we shorten each location as we process each trace event instead of beforehand?
filePathAliases, _ := getShortenedFileNames(trace)
// Always include some padding between the trace and location
locationWidth := longest + locationPadding
table := traceTable{}
for _, event := range trace {
depth := depths.GetOrSet(event.QueryID, event.ParentID)
if locations {
row := traceRow{}
if opts.Locations {
location := formatLocation(event, filePathAliases)
fmt.Fprintf(w, "%-*s %s\n", locationWidth, location, formatEvent(event, depth))
} else {
fmt.Fprintln(w, formatEvent(event, depth))
row.add(location)
}
row.add(formatEvent(event, depth))
if opts.ExprVariables {
vars := exprLocalVars(event)
keys := sortedKeys(vars)
buf := new(bytes.Buffer)
buf.WriteString("{")
for i, k := range keys {
if i > 0 {
buf.WriteString(", ")
}
_, _ = fmt.Fprintf(buf, "%v: %s", k, iStrs.Truncate(vars.Get(k).String(), maxExprVarWidth))
}
buf.WriteString("}")
row.add(buf.String())
}
if opts.LocalVariables {
if locals := event.Locals; locals != nil {
keys := sortedKeys(locals)
buf := new(bytes.Buffer)
buf.WriteString("{")
for i, k := range keys {
if i > 0 {
buf.WriteString(", ")
}
_, _ = fmt.Fprintf(buf, "%v: %s", k, iStrs.Truncate(locals.Get(k).String(), maxExprVarWidth))
}
buf.WriteString("}")
row.add(buf.String())
} else {
row.add("{}")
}
}
table.add(row)
}
table.write(w, columnPadding)
}
func sortedKeys(vm *ast.ValueMap) []ast.Value {
keys := make([]ast.Value, 0, vm.Len())
vm.Iter(func(k, _ ast.Value) bool {
keys = append(keys, k)
return false
})
slices.SortFunc(keys, func(a, b ast.Value) int {
return strings.Compare(a.String(), b.String())
})
return keys
}
func exprLocalVars(e *Event) *ast.ValueMap {
vars := ast.NewValueMap()
findVars := func(term *ast.Term) bool {
//if r, ok := term.Value.(ast.Ref); ok {
// fmt.Printf("ref: %v\n", r)
// //return true
//}
if name, ok := term.Value.(ast.Var); ok {
if meta, ok := e.LocalMetadata[name]; ok {
if val := e.Locals.Get(name); val != nil {
vars.Put(meta.Name, val)
}
}
}
return false
}
if r, ok := e.Node.(*ast.Rule); ok {
// We're only interested in vars in the head, not the body
ast.WalkTerms(r.Head, findVars)
return vars
}
// The local cache snapshot only contains a snapshot for those refs present in the event node,
// so they can all be added to the vars map.
e.localVirtualCacheSnapshot.Iter(func(k, v ast.Value) bool {
vars.Put(k, v)
return false
})
ast.WalkTerms(e.Node, findVars)
return vars
}
func formatEvent(event *Event, depth int) string {
@@ -451,6 +588,310 @@ func rewrite(event *Event) *Event {
return &cpy
}
type varInfo struct {
VarMetadata
val ast.Value
exprLoc *ast.Location
col int // 0-indexed column
}
func (v varInfo) Value() string {
if v.val != nil {
return v.val.String()
}
return "undefined"
}
func (v varInfo) Title() string {
if v.exprLoc != nil && v.exprLoc.Text != nil {
return string(v.exprLoc.Text)
}
return string(v.Name)
}
func padLocationText(loc *ast.Location) string {
if loc == nil {
return ""
}
text := string(loc.Text)
if loc.Col == 0 {
return text
}
buf := new(bytes.Buffer)
j := 0
for i := 1; i < loc.Col; i++ {
if len(loc.Tabs) > 0 && j < len(loc.Tabs) && loc.Tabs[j] == i {
buf.WriteString("\t")
j++
} else {
buf.WriteString(" ")
}
}
buf.WriteString(text)
return buf.String()
}
type PrettyEventOpts struct {
PrettyVars bool
}
func walkTestTerms(x interface{}, f func(*ast.Term) bool) {
var vis *ast.GenericVisitor
vis = ast.NewGenericVisitor(func(x interface{}) bool {
switch x := x.(type) {
case ast.Call:
for _, t := range x[1:] {
vis.Walk(t)
}
return true
case *ast.Expr:
if x.IsCall() {
for _, o := range x.Operands() {
vis.Walk(o)
}
for i := range x.With {
vis.Walk(x.With[i])
}
return true
}
case *ast.Term:
return f(x)
case *ast.With:
vis.Walk(x.Value)
return true
}
return false
})
vis.Walk(x)
}
func PrettyEvent(w io.Writer, e *Event, opts PrettyEventOpts) error {
if !opts.PrettyVars {
_, _ = fmt.Fprintln(w, padLocationText(e.Location))
return nil
}
buf := new(bytes.Buffer)
exprVars := map[string]varInfo{}
findVars := func(unknownAreUndefined bool) func(term *ast.Term) bool {
return func(term *ast.Term) bool {
if term.Location == nil {
return false
}
switch v := term.Value.(type) {
case *ast.ArrayComprehension, *ast.SetComprehension, *ast.ObjectComprehension:
// we don't report on the internals of a comprehension, as it's already evaluated, and we won't have the local vars.
return true
case ast.Var:
var info *varInfo
if meta, ok := e.LocalMetadata[v]; ok {
info = &varInfo{
VarMetadata: meta,
val: e.Locals.Get(v),
exprLoc: term.Location,
}
} else if unknownAreUndefined {
info = &varInfo{
VarMetadata: VarMetadata{Name: v},
exprLoc: term.Location,
col: term.Location.Col,
}
}
if info != nil {
if v, exists := exprVars[info.Title()]; !exists || v.val == nil {
if term.Location != nil {
info.col = term.Location.Col
}
exprVars[info.Title()] = *info
}
}
}
return false
}
}
expr, ok := e.Node.(*ast.Expr)
if !ok || expr == nil {
return nil
}
base := expr.BaseCogeneratedExpr()
exprText := padLocationText(base.Location)
buf.WriteString(exprText)
e.localVirtualCacheSnapshot.Iter(func(k, v ast.Value) bool {
var info *varInfo
switch k := k.(type) {
case ast.Ref:
info = &varInfo{
VarMetadata: VarMetadata{Name: ast.Var(k.String())},
val: v,
exprLoc: k[0].Location,
col: k[0].Location.Col,
}
case *ast.ArrayComprehension:
info = &varInfo{
VarMetadata: VarMetadata{Name: ast.Var(k.String())},
val: v,
exprLoc: k.Term.Location,
col: k.Term.Location.Col,
}
case *ast.SetComprehension:
info = &varInfo{
VarMetadata: VarMetadata{Name: ast.Var(k.String())},
val: v,
exprLoc: k.Term.Location,
col: k.Term.Location.Col,
}
case *ast.ObjectComprehension:
info = &varInfo{
VarMetadata: VarMetadata{Name: ast.Var(k.String())},
val: v,
exprLoc: k.Key.Location,
col: k.Key.Location.Col,
}
}
if info != nil {
exprVars[info.Title()] = *info
}
return false
})
// If the expression is negated, we can't confidently assert that vars with unknown values are 'undefined',
// since the compiler might have opted out of the necessary rewrite.
walkTestTerms(expr, findVars(!expr.Negated))
coExprs := expr.CogeneratedExprs()
for _, coExpr := range coExprs {
// Only the current "co-expr" can have undefined vars, if we don't know the value for a var in any other co-expr,
// it's unknown, not undefined. A var can be unknown if it hasn't been assigned a value yet, because the co-expr
// hasn't been evaluated yet (the fail happened before it).
walkTestTerms(coExpr, findVars(false))
}
printPrettyVars(buf, exprVars)
_, _ = fmt.Fprint(w, buf.String())
return nil
}
func printPrettyVars(w *bytes.Buffer, exprVars map[string]varInfo) {
containsTabs := false
varRows := make(map[int]interface{})
for _, info := range exprVars {
if len(info.exprLoc.Tabs) > 0 {
containsTabs = true
}
varRows[info.exprLoc.Row] = nil
}
if containsTabs && len(varRows) > 1 {
// We can't (currently) reliably point to var locations when they are on different rows that contain tabs.
// So we'll just print them in alphabetical order instead.
byName := make([]varInfo, 0, len(exprVars))
for _, info := range exprVars {
byName = append(byName, info)
}
slices.SortStableFunc(byName, func(a, b varInfo) int {
return strings.Compare(a.Title(), b.Title())
})
w.WriteString("\n\nWhere:\n")
for _, info := range byName {
w.WriteString(fmt.Sprintf("\n%s: %s", info.Title(), iStrs.Truncate(info.Value(), maxPrettyExprVarWidth)))
}
return
}
byCol := make([]varInfo, 0, len(exprVars))
for _, info := range exprVars {
byCol = append(byCol, info)
}
slices.SortFunc(byCol, func(a, b varInfo) int {
// sort first by column, then by reverse row (to present vars in the same order they appear in the expr)
if a.col == b.col {
if a.exprLoc.Row == b.exprLoc.Row {
return strings.Compare(a.Title(), b.Title())
}
return b.exprLoc.Row - a.exprLoc.Row
}
return a.col - b.col
})
if len(byCol) == 0 {
return
}
w.WriteString("\n")
printArrows(w, byCol, -1)
for i := len(byCol) - 1; i >= 0; i-- {
w.WriteString("\n")
printArrows(w, byCol, i)
}
}
func printArrows(w *bytes.Buffer, l []varInfo, printValueAt int) {
prevCol := 0
var slice []varInfo
if printValueAt >= 0 {
slice = l[:printValueAt+1]
} else {
slice = l
}
isFirst := true
for i, info := range slice {
isLast := i >= len(slice)-1
col := info.col
if !isLast && col == l[i+1].col {
// We're sharing the same column with another, subsequent var
continue
}
spaces := col - 1
if i > 0 && !isFirst {
spaces = (col - prevCol) - 1
}
for j := 0; j < spaces; j++ {
tab := false
for _, t := range info.exprLoc.Tabs {
if t == j+prevCol+1 {
w.WriteString("\t")
tab = true
break
}
}
if !tab {
w.WriteString(" ")
}
}
if isLast && printValueAt >= 0 {
valueStr := iStrs.Truncate(info.Value(), maxPrettyExprVarWidth)
if (i > 0 && col == l[i-1].col) || (i < len(l)-1 && col == l[i+1].col) {
// There is another var on this column, so we need to include the name to differentiate them.
w.WriteString(fmt.Sprintf("%s: %s", info.Title(), valueStr))
} else {
w.WriteString(valueStr)
}
} else {
w.WriteString("|")
}
prevCol = col
isFirst = false
}
}
func init() {
RegisterBuiltinFunc(ast.Trace.Name, builtinTrace)
}

View File

@@ -77,10 +77,10 @@ func Unmarshal(bs []byte) (result Type, err error) {
}
}
case typeAny:
var any rawunion
if err = util.UnmarshalJSON(bs, &any); err == nil {
var union rawunion
if err = util.UnmarshalJSON(bs, &union); err == nil {
var of []Type
if of, err = unmarshalSlice(any.Of); err == nil {
if of, err = unmarshalSlice(union.Of); err == nil {
result = NewAny(of...)
}
}

View File

@@ -938,8 +938,8 @@ func Compare(a, b Type) int {
// Contains returns true if a is a superset or equal to b.
func Contains(a, b Type) bool {
if any, ok := unwrap(a).(Any); ok {
return any.Contains(b)
if x, ok := unwrap(a).(Any); ok {
return x.Contains(b)
}
return Compare(a, b) == 0
}
@@ -994,8 +994,8 @@ func Select(a Type, x interface{}) Type {
if Compare(a.of, tpe) == 0 {
return a.of
}
if any, ok := a.of.(Any); ok {
if any.Contains(tpe) {
if x, ok := a.of.(Any); ok {
if x.Contains(tpe) {
return tpe
}
}

View File

@@ -0,0 +1,31 @@
package decoding
import "context"
type requestContextKey string
// Note(philipc): We can add functions later to add the max request body length
// to contexts, if we ever need to.
const (
reqCtxKeyMaxLen = requestContextKey("server-decoding-plugin-context-max-length")
reqCtxKeyGzipMaxLen = requestContextKey("server-decoding-plugin-context-gzip-max-length")
)
func AddServerDecodingMaxLen(ctx context.Context, maxLen int64) context.Context {
return context.WithValue(ctx, reqCtxKeyMaxLen, maxLen)
}
func AddServerDecodingGzipMaxLen(ctx context.Context, maxLen int64) context.Context {
return context.WithValue(ctx, reqCtxKeyGzipMaxLen, maxLen)
}
// Used for enforcing max body content limits when dealing with chunked requests.
func GetServerDecodingMaxLen(ctx context.Context) (int64, bool) {
maxLength, ok := ctx.Value(reqCtxKeyMaxLen).(int64)
return maxLength, ok
}
func GetServerDecodingGzipMaxLen(ctx context.Context) (int64, bool) {
gzipMaxLength, ok := ctx.Value(reqCtxKeyGzipMaxLen).(int64)
return gzipMaxLength, ok
}

10
vendor/github.com/open-policy-agent/opa/util/maps.go generated vendored Normal file
View File

@@ -0,0 +1,10 @@
package util
// Values returns a slice of values from any map. Copied from golang.org/x/exp/maps.
func Values[M ~map[K]V, K comparable, V any](m M) []V {
r := make([]V, 0, len(m))
for _, v := range m {
r = append(r, v)
}
return r
}

View File

@@ -0,0 +1,81 @@
package util
import (
"bytes"
"compress/gzip"
"encoding/binary"
"fmt"
"io"
"net/http"
"strings"
"sync"
"github.com/open-policy-agent/opa/util/decoding"
)
var gzipReaderPool = sync.Pool{
New: func() interface{} {
reader := new(gzip.Reader)
return reader
},
}
// Note(philipc): Originally taken from server/server.go
// The DecodingLimitHandler handles validating that the gzip payload is within the
// allowed max size limit. Thus, in the event of a forged payload size trailer,
// the worst that can happen is that we waste memory up to the allowed max gzip
// payload size, but not an unbounded amount of memory, as was potentially
// possible before.
func ReadMaybeCompressedBody(r *http.Request) ([]byte, error) {
var content *bytes.Buffer
// Note(philipc): If the request body is of unknown length (such as what
// happens when 'Transfer-Encoding: chunked' is set), we have to do an
// incremental read of the body. In this case, we can't be too clever, we
// just do the best we can with whatever is streamed over to us.
// Fetch gzip payload size limit from request context.
if maxLength, ok := decoding.GetServerDecodingMaxLen(r.Context()); ok {
bs, err := io.ReadAll(io.LimitReader(r.Body, maxLength))
if err != nil {
return bs, err
}
content = bytes.NewBuffer(bs)
} else {
// Read content from the request body into a buffer of known size.
content = bytes.NewBuffer(make([]byte, 0, r.ContentLength))
if _, err := io.CopyN(content, r.Body, r.ContentLength); err != nil {
return content.Bytes(), err
}
}
// Decompress gzip content by reading from the buffer.
if strings.Contains(r.Header.Get("Content-Encoding"), "gzip") {
// Fetch gzip payload size limit from request context.
gzipMaxLength, _ := decoding.GetServerDecodingGzipMaxLen(r.Context())
// Note(philipc): The last 4 bytes of a well-formed gzip blob will
// always be a little-endian uint32, representing the decompressed
// content size, modulo 2^32. We validate that the size is safe,
// earlier in DecodingLimitHandler.
sizeTrailerField := binary.LittleEndian.Uint32(content.Bytes()[content.Len()-4:])
if sizeTrailerField > uint32(gzipMaxLength) {
return content.Bytes(), fmt.Errorf("gzip payload too large")
}
// Pull a gzip decompressor from the pool, and assign it to the current
// buffer, using Reset(). Later, return it back to the pool for another
// request to use.
gzReader := gzipReaderPool.Get().(*gzip.Reader)
if err := gzReader.Reset(content); err != nil {
return nil, err
}
defer gzReader.Close()
defer gzipReaderPool.Put(gzReader)
decompressedContent := bytes.NewBuffer(make([]byte, 0, sizeTrailerField))
if _, err := io.CopyN(decompressedContent, gzReader, int64(sizeTrailerField)); err != nil {
return decompressedContent.Bytes(), err
}
return decompressedContent.Bytes(), nil
}
// Request was not compressed; return the content bytes.
return content.Bytes(), nil
}

View File

@@ -11,7 +11,7 @@ import (
)
// Version is the canonical version of OPA.
var Version = "0.65.0"
var Version = "0.67.1"
// GoVersion is the version of Go this was built with
var GoVersion = runtime.Version()

View File

@@ -112,7 +112,7 @@ func (c Config) RequestFunc(evaluate EvaluateFunc) RequestFunc {
}
if ctxErr := waitFunc(ctx, delay); ctxErr != nil {
return fmt.Errorf("%w: %s", ctxErr, err)
return fmt.Errorf("%w: %w", ctxErr, err)
}
}
}

View File

@@ -59,8 +59,9 @@ func WithInsecure() Option {
//
// If the OTEL_EXPORTER_OTLP_ENDPOINT or OTEL_EXPORTER_OTLP_TRACES_ENDPOINT
// environment variable is set, and this option is not passed, that variable
// value will be used. If both are set, OTEL_EXPORTER_OTLP_TRACES_ENDPOINT
// will take precedence.
// value will be used. If both environment variables are set,
// OTEL_EXPORTER_OTLP_TRACES_ENDPOINT will take precedence. If an environment
// variable is set, and this option is passed, this option will take precedence.
//
// If both this option and WithEndpointURL are used, the last used option will
// take precedence.
@@ -79,8 +80,9 @@ func WithEndpoint(endpoint string) Option {
//
// If the OTEL_EXPORTER_OTLP_ENDPOINT or OTEL_EXPORTER_OTLP_TRACES_ENDPOINT
// environment variable is set, and this option is not passed, that variable
// value will be used. If both are set, OTEL_EXPORTER_OTLP_TRACES_ENDPOINT
// will take precedence.
// value will be used. If both environment variables are set,
// OTEL_EXPORTER_OTLP_TRACES_ENDPOINT will take precedence. If an environment
// variable is set, and this option is passed, this option will take precedence.
//
// If both this option and WithEndpoint are used, the last used option will
// take precedence.

View File

@@ -5,5 +5,5 @@ package otlptrace // import "go.opentelemetry.io/otel/exporters/otlp/otlptrace"
// Version is the current release version of the OpenTelemetry OTLP trace exporter in use.
func Version() string {
return "1.27.0"
return "1.28.0"
}

9
vendor/modules.txt vendored
View File

@@ -1525,7 +1525,7 @@ github.com/onsi/gomega/matchers/support/goraph/edge
github.com/onsi/gomega/matchers/support/goraph/node
github.com/onsi/gomega/matchers/support/goraph/util
github.com/onsi/gomega/types
# github.com/open-policy-agent/opa v0.65.0
# github.com/open-policy-agent/opa v0.67.1
## explicit; go 1.21
github.com/open-policy-agent/opa/ast
github.com/open-policy-agent/opa/ast/internal/scanner
@@ -1613,6 +1613,7 @@ github.com/open-policy-agent/opa/topdown/print
github.com/open-policy-agent/opa/tracing
github.com/open-policy-agent/opa/types
github.com/open-policy-agent/opa/util
github.com/open-policy-agent/opa/util/decoding
github.com/open-policy-agent/opa/version
# github.com/opencontainers/runtime-spec v1.1.0
## explicit
@@ -2071,11 +2072,11 @@ go.opentelemetry.io/otel/exporters/jaeger/internal/gen-go/agent
go.opentelemetry.io/otel/exporters/jaeger/internal/gen-go/jaeger
go.opentelemetry.io/otel/exporters/jaeger/internal/gen-go/zipkincore
go.opentelemetry.io/otel/exporters/jaeger/internal/third_party/thrift/lib/go/thrift
# go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.27.0
# go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.28.0
## explicit; go 1.21
go.opentelemetry.io/otel/exporters/otlp/otlptrace
go.opentelemetry.io/otel/exporters/otlp/otlptrace/internal/tracetransform
# go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.27.0
# go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.28.0
## explicit; go 1.21
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc/internal
@@ -2100,7 +2101,7 @@ go.opentelemetry.io/otel/sdk/trace
go.opentelemetry.io/otel/trace
go.opentelemetry.io/otel/trace/embedded
go.opentelemetry.io/otel/trace/noop
# go.opentelemetry.io/proto/otlp v1.2.0
# go.opentelemetry.io/proto/otlp v1.3.1
## explicit; go 1.17
go.opentelemetry.io/proto/otlp/collector/trace/v1
go.opentelemetry.io/proto/otlp/common/v1