diff --git a/.github/workflows/pull-request.yaml b/.github/workflows/pull-request.yaml index d7c4d19ce9..94d249feba 100644 --- a/.github/workflows/pull-request.yaml +++ b/.github/workflows/pull-request.yaml @@ -352,23 +352,23 @@ jobs: version: edge - name: Test policies - run: opa test build/policy + run: opa test --v0-compatible build/policy - name: Ensure proper formatting - run: opa fmt --list --fail build/policy + run: opa fmt --v0-compatible --list --fail build/policy - name: Run file policy checks on changed files run: | curl --silent --fail --header 'Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}' -o files.json \ https://api.github.com/repos/${{ github.repository }}/pulls/${{ github.event.pull_request.number }}/files - opa eval -d build/policy/files.rego -d build/policy/helpers.rego --format values --input files.json \ + opa eval --v0-compatible -d build/policy/files.rego -d build/policy/helpers.rego --format values --input files.json \ --fail-defined 'data.files.deny[message]' env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Show input on failure - run: opa eval --input files.json --format pretty input + run: opa eval --v0-compatible --input files.json --format pretty input if: ${{ failure() }} - name: Setup Hugo @@ -386,4 +386,4 @@ jobs: cd docs make dev-generate hugo-production-build cd - - opa eval 'data.integrations.deny[message]' -i docs/website/public/index.json -d build/policy/integrations.rego --format=values --fail-defined + opa eval --v0-compatible 'data.integrations.deny[message]' -i docs/website/public/index.json -d build/policy/integrations.rego --format=values --fail-defined diff --git a/ast/capabilities.go b/ast/capabilities.go index 7c82377ab0..bc7278a885 100644 --- a/ast/capabilities.go +++ b/ast/capabilities.go @@ -21,6 +21,7 @@ type VersionIndex = v1.VersionIndex // heads, they wouldn't be able to parse them. const FeatureRefHeadStringPrefixes = v1.FeatureRefHeadStringPrefixes const FeatureRefHeads = v1.FeatureRefHeads +const FeatureRegoV1 = v1.FeatureRegoV1 const FeatureRegoV1Import = v1.FeatureRegoV1Import // Capabilities defines a structure containing data that describes the capabilities @@ -33,7 +34,7 @@ type WasmABIVersion = v1.WasmABIVersion // CapabilitiesForThisVersion returns the capabilities of this version of OPA. func CapabilitiesForThisVersion() *Capabilities { - return v1.CapabilitiesForThisVersion() + return v1.CapabilitiesForThisVersion(v1.CapabilitiesRegoVersion(DefaultRegoVersion)) } // LoadCapabilitiesJSON loads a JSON serialized capabilities structure from the reader r. diff --git a/ast/parser.go b/ast/parser.go index 8954618a05..45cd4da06e 100644 --- a/ast/parser.go +++ b/ast/parser.go @@ -43,3 +43,7 @@ type ParserOptions = v1.ParserOptions func NewParser() *Parser { return v1.NewParser().WithRegoVersion(DefaultRegoVersion) } + +func IsFutureKeyword(s string) bool { + return v1.IsFutureKeywordForRegoVersion(s, RegoV0) +} diff --git a/build/policy/files.rego b/build/policy/files.rego index 5a4bdb6126..ed22bd8f2d 100644 --- a/build/policy/files.rego +++ b/build/policy/files.rego @@ -7,9 +7,7 @@ package files -import future.keywords.contains -import future.keywords.if -import future.keywords.in +import rego.v1 import data.helpers.basename import data.helpers.directory diff --git a/build/policy/files_test.rego b/build/policy/files_test.rego index 03e9f1e2d3..e92b9d1e37 100644 --- a/build/policy/files_test.rego +++ b/build/policy/files_test.rego @@ -1,27 +1,27 @@ package files_test -import future.keywords.in +import rego.v1 import data.files.deny -test_deny_invalid_yaml_file { +test_deny_invalid_yaml_file if { expected := "invalid.yaml is an invalid YAML file: {null{}}" expected in deny with data.files.yaml_file_contents as {"invalid.yaml": "{null{}}"} with data.files.changes as {"invalid.yaml": {"status": "modified"}} } -test_allow_valid_yaml_file { +test_allow_valid_yaml_file if { count(deny) == 0 with data.files.yaml_file_contents as {"valid.yaml": "foo: bar"} with data.files.changes as {"valid.yaml": {"status": "modified"}} } -test_deny_invalid_json_file { +test_deny_invalid_json_file if { expected := "invalid.json is an invalid JSON file: }}}" expected in deny with data.files.json_file_contents as {"invalid.json": "}}}"} with data.files.changes as {"invalid.json": {"status": "modified"}} } -test_allow_valid_json_file { +test_allow_valid_json_file if { count(deny) == 0 with data.files.json_file_contents as {"valid.json": "{\"foo\": \"bar\"}"} with data.files.changes as {"valid.json": {"status": "modified"}} } diff --git a/build/policy/helpers.rego b/build/policy/helpers.rego index 0236ef5a4c..cc302ab8bd 100644 --- a/build/policy/helpers.rego +++ b/build/policy/helpers.rego @@ -1,7 +1,6 @@ package helpers -import future.keywords.if -import future.keywords.in +import rego.v1 last_indexof(string, search) := i if { all := [i | chars := split(string, ""); chars[i] == search] diff --git a/build/policy/integrations.rego b/build/policy/integrations.rego index ff0e70e485..66112b813c 100644 --- a/build/policy/integrations.rego +++ b/build/policy/integrations.rego @@ -1,8 +1,6 @@ package integrations -import future.keywords.contains -import future.keywords.if -import future.keywords.in +import rego.v1 allowed_image_extensions := ["png", "svg"] diff --git a/build/policy/integrations_test.rego b/build/policy/integrations_test.rego index d21b331d53..f6b1c5eacf 100644 --- a/build/policy/integrations_test.rego +++ b/build/policy/integrations_test.rego @@ -1,8 +1,8 @@ package integrations_test -import future.keywords.in +import rego.v1 -messages_for_key(key, output) = messages { +messages_for_key(key, output) = messages if { messages := {m | some e output[e] @@ -15,18 +15,18 @@ messages_for_key(key, output) = messages { print_if(true, _, _, _) = true -print_if(false, key, false, output) := false { +print_if(false, key, false, output) := false if { print("Exp:", {}) print("Got: ", messages_for_key(key, output)) } -print_if(false, key, expected, output) := false { +print_if(false, key, expected, output) := false if { is_string(expected) print("Exp:", expected) print("Got:", messages_for_key(key, output)) } -test_integration_has_valid_key { +test_integration_has_valid_key if { output := data.integrations.deny with input as {"integrations": {"/integrations/in.valid/": {"link": "https://example.com/", "title": "Example"}}} key := "key" @@ -40,7 +40,7 @@ test_integration_has_valid_key { print_if(result, key, message, output) } -test_integration_has_required_fields_missing { +test_integration_has_required_fields_missing if { output := data.integrations.deny with input as {"integrations": {"/integrations/regal/": {}}} key := "fields" @@ -53,7 +53,7 @@ test_integration_has_required_fields_missing { print_if(result, key, message, output) } -test_integration_has_required_fields_present { +test_integration_has_required_fields_present if { output := data.integrations.deny with input as {"integrations": {"/integrations/regal/": {"title": "Regal"}}} key := "fields" @@ -66,7 +66,7 @@ test_integration_has_required_fields_present { print_if(result, key, false, output) } -test_integration_has_content_missing { +test_integration_has_content_missing if { output := data.integrations.deny with input as {"integrations": {"/integrations/regal/": {}}} key := "content" @@ -79,7 +79,7 @@ test_integration_has_content_missing { print_if(result, key, message, output) } -test_integration_has_content_blank { +test_integration_has_content_blank if { output := data.integrations.deny with input as {"integrations": {"/integrations/regal/": {"content": "\t\t\n "}}} key := "content" @@ -92,7 +92,7 @@ test_integration_has_content_blank { print_if(result, key, message, output) } -test_integration_has_content_present { +test_integration_has_content_present if { output := data.integrations.deny with input as {"integrations": {"/integrations/regal/": {"content": "foobar"}}} key := "content" @@ -103,7 +103,7 @@ test_integration_has_content_present { print_if(result, key, false, output) } -test_every_integration_has_image_missing { +test_every_integration_has_image_missing if { output := data.integrations.deny with input as { "images": ["reegal.png"], "integrations": {"/integrations/regal/": {}}, @@ -119,7 +119,7 @@ test_every_integration_has_image_missing { print_if(result, key, message, output) } -test_every_integration_has_image_present { +test_every_integration_has_image_present if { output := data.integrations.deny with input as { "images": ["regal.png"], "integrations": {"regal": {}}, @@ -133,7 +133,7 @@ test_every_integration_has_image_present { print_if(result, key, false, output) } -test_every_integration_has_image_missing_but_permitted { +test_every_integration_has_image_missing_but_permitted if { output := data.integrations.deny with input as { "images": ["reegal.png"], "integrations": {"regal": {"allow_missing_image": true}}, @@ -148,7 +148,7 @@ test_every_integration_has_image_missing_but_permitted { print_if(result, key, false, output) } -test_every_image_has_integration_missing { +test_every_image_has_integration_missing if { output := data.integrations.deny with input as { "images": ["regal.png"], "integrations": {"foobar": {}}, @@ -164,7 +164,7 @@ test_every_image_has_integration_missing { print_if(result, key, message, output) } -test_every_image_has_integration_present { +test_every_image_has_integration_present if { output := data.integrations.deny with input as { "images": ["regal.png"], "integrations": {"/integrations/regal/": {}}, @@ -178,7 +178,7 @@ test_every_image_has_integration_present { print_if(result, key, false, output) } -test_integration_organizations_missing { +test_integration_organizations_missing if { output := data.integrations.deny with input as { "organizations": {"/organizations/stira/": {}}, "integrations": {"/integrations/regal/": {"inventors": ["styra"]}}, @@ -194,7 +194,7 @@ test_integration_organizations_missing { print_if(result, key, message, output) } -test_integration_organizations_present { +test_integration_organizations_present if { output := data.integrations.deny with input as { "organizations": {"/organizations/styra/": {}}, "integrations": {"/integrations/regal/": {"inventors": ["styra"]}}, @@ -208,7 +208,7 @@ test_integration_organizations_present { print_if(result, key, false, output) } -test_integration_softwares_missing { +test_integration_softwares_missing if { output := data.integrations.deny with input as { "softwares": {"/softwares/mars/": {}}, "integrations": {"/integrations/regal/": {"software": ["terraform"]}}, @@ -224,7 +224,7 @@ test_integration_softwares_missing { print_if(result, key, message, output) } -test_integration_softwares_present { +test_integration_softwares_present if { output := data.integrations.deny with input as { "softwares": {"/softwares/terraform/": {}}, "integrations": {"/integrations/regal/": {"software": ["terraform"]}}, @@ -238,7 +238,7 @@ test_integration_softwares_present { print_if(result, key, false, output) } -test_software_has_required_fields_missing { +test_software_has_required_fields_missing if { output := data.integrations.deny with input as {"softwares": {"/softwares/terraform/": {}}} key := "fields" @@ -251,7 +251,7 @@ test_software_has_required_fields_missing { print_if(result, key, message, output) } -test_software_has_required_fields_present { +test_software_has_required_fields_present if { output := data.integrations.deny with input as {"softwares": {"terraform": {"link": "https://www.terraform.io/", "title": "Terraform"}}} key := "fields" @@ -263,7 +263,7 @@ test_software_has_required_fields_present { print_if(result, key, false, output) } -test_organization_has_required_labels { +test_organization_has_required_labels if { output := data.integrations.deny with input as {"organizations": {"/organizations/styra/": {}}} key := "fields" @@ -276,7 +276,7 @@ test_organization_has_required_labels { print_if(result, key, message, output) } -test_organization_has_required_fields_present { +test_organization_has_required_fields_present if { output := data.integrations.deny with input as {"organizations": {"styra": {"link": "https://styra.com/", "title": "Styra"}}} key := "fields" @@ -288,7 +288,7 @@ test_organization_has_required_fields_present { print_if(result, key, false, output) } -test_organization_has_valid_key { +test_organization_has_valid_key if { output := data.integrations.deny with input as {"organizations": {"/organizations/sty.ra/": {"link": "https://styra.com/", "title": "Styra"}}} key := "key" @@ -302,7 +302,7 @@ test_organization_has_valid_key { print_if(result, key, message, output) } -test_organization_has_one_or_more_integrations_none { +test_organization_has_one_or_more_integrations_none if { output := data.integrations.deny with input as {"organizations": {"/organizations/foobar/": {}}, "integrations": {}} key := "orphaned_org" @@ -315,7 +315,7 @@ test_organization_has_one_or_more_integrations_none { print_if(result, key, message, output) } -test_organization_has_one_or_more_integrations_one { +test_organization_has_one_or_more_integrations_one if { output := data.integrations.deny with input as {"organizations": {"/organizations/foobaz/": {}}, "integrations": {"/integrations/foobar/": {"inventors": ["foobaz"]}}} key := "orphaned_org" @@ -326,7 +326,7 @@ test_organization_has_one_or_more_integrations_one { print_if(result, key, false, output) } -test_organization_has_one_or_more_integrations_speaker { +test_organization_has_one_or_more_integrations_speaker if { output := data.integrations.deny with input as {"organizations": {"foobaz": {}}, "integrations": {"foobar": {"videos": [{"speakers": [{"organization": "foobaz"}]}]}}} key := "orphaned_org" @@ -337,7 +337,7 @@ test_organization_has_one_or_more_integrations_speaker { print_if(result, key, false, output) } -test_software_has_one_or_more_integrations_none { +test_software_has_one_or_more_integrations_none if { output := data.integrations.deny with input as {"softwares": {"/softwares/foobar/": {}}, "integrations": {}} key := "orphaned_software" @@ -350,7 +350,7 @@ test_software_has_one_or_more_integrations_none { print_if(result, key, message, output) } -test_software_has_one_or_more_integrations_one { +test_software_has_one_or_more_integrations_one if { output := data.integrations.deny with input as {"softwares": {"foobaz": {}}, "integrations": {"foobar": {"software": ["foobaz"]}}} key := "orphaned_software" @@ -361,7 +361,7 @@ test_software_has_one_or_more_integrations_one { print_if(result, key, false, output) } -test_software_has_valid_key { +test_software_has_valid_key if { output := data.integrations.deny with input as {"softwares": {"/softwares/in.valid/": {"link": "https://example.com/", "title": "Example"}}} key := "key" diff --git a/builtin_metadata.json b/builtin_metadata.json index 4e35f13087..dc1236cb32 100644 --- a/builtin_metadata.json +++ b/builtin_metadata.json @@ -362,6 +362,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the number without its sign.", @@ -488,6 +489,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "deprecated": true, @@ -619,6 +621,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the intersection of two sets.", @@ -746,6 +749,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "deprecated": true, @@ -877,6 +881,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Concatenates two arrays.", @@ -955,6 +960,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the reverse of a given array.", @@ -1093,6 +1099,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns a slice of a given array. If `start` is greater or equal than `stop`, `slice` is `[]`.", @@ -1222,6 +1229,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "infix": ":=", @@ -1348,6 +1356,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Deserializes the base64 encoded input string.", @@ -1476,6 +1485,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Serializes the input string into base64 encoding.", @@ -1583,6 +1593,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Verifies the input string is base64 encoded.", @@ -1711,6 +1722,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Deserializes the base64url encoded input string.", @@ -1839,6 +1851,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Serializes the input string into base64url encoding.", @@ -1944,6 +1957,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Serializes the input string into base64url encoding without padding.", @@ -2073,6 +2087,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the bitwise \"AND\" of two integers.", @@ -2202,6 +2217,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns a new integer with its bits shifted `s` bits to the left.", @@ -2326,6 +2342,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the bitwise negation (flip) of an integer.", @@ -2455,6 +2472,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the bitwise \"OR\" of two integers.", @@ -2584,6 +2602,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns a new integer with its bits shifted `s` bits to the right.", @@ -2713,6 +2732,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the bitwise \"XOR\" (exclusive-or) of two integers.", @@ -2839,6 +2859,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "deprecated": true, @@ -2963,6 +2984,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "deprecated": true, @@ -3087,6 +3109,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "deprecated": true, @@ -3211,6 +3234,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "deprecated": true, @@ -3335,6 +3359,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "deprecated": true, @@ -3459,6 +3484,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "deprecated": true, @@ -3556,6 +3582,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Rounds the number _up_ to the nearest integer.", @@ -3689,6 +3716,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Joins a set or array of strings with a delimiter.", @@ -3822,6 +3850,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns `true` if the search string is included in the base string", @@ -3950,6 +3979,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": " Count takes a collection or string and returns the number of elements (or characters) in it.", @@ -3999,6 +4029,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns a boolean representing the result of comparing two MACs for equality without leaking timing information.", @@ -4082,6 +4113,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns a string representing the MD5 HMAC of the input message using the input key.", @@ -4165,6 +4197,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns a string representing the SHA1 HMAC of the input message using the input key.", @@ -4248,6 +4281,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns a string representing the SHA256 HMAC of the input message using the input key.", @@ -4331,6 +4365,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns a string representing the SHA512 HMAC of the input message using the input key.", @@ -4459,6 +4494,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns a string representing the input string hashed with the MD5 function", @@ -4499,6 +4535,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns zero or more private keys from the given encoded string containing DER certificate data.\n\nIf the input is empty, the function will return null. The input string should be a list of one or more concatenated PEM blocks. The whole input of concatenated PEM blocks can optionally be Base64 encoded.", @@ -4627,6 +4664,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns a string representing the input string hashed with the SHA1 function", @@ -4755,6 +4793,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns a string representing the input string hashed with the SHA256 function", @@ -4842,6 +4881,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns one or more certificates from the given string containing PEM\nor base64 encoded DER certificates after verifying the supplied certificates form a complete\ncertificate chain back to a trusted root.\n\nThe first certificate is treated as the root and the last is treated as the leaf,\nwith all others being treated as intermediates.", @@ -4877,6 +4917,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns one or more certificates from the given string containing PEM\nor base64 encoded DER certificates after verifying the supplied certificates form a complete\ncertificate chain back to a trusted root. A config option passed as the second argument can\nbe used to configure the validation options used.\n\nThe first certificate is treated as the root and the last is treated as the leaf,\nwith all others being treated as intermediates.", @@ -4990,6 +5031,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns a PKCS #10 certificate signing request from the given PEM-encoded PKCS#10 certificate signing request.", @@ -5118,6 +5160,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns zero or more certificates from the given encoded string containing\nDER certificate data.\n\nIf the input is empty, the function will return null. The input string should be a list of one or more\nconcatenated PEM blocks. The whole input of concatenated PEM blocks can optionally be Base64 encoded.", @@ -5166,6 +5209,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns a valid key pair", @@ -5250,6 +5294,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns a JWK for signing a JWT from the given PEM-encoded RSA private key.", @@ -5383,6 +5428,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Divides the first number by the second number.", @@ -5517,6 +5563,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns true if the search string ends with the base string.", @@ -5646,6 +5693,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "infix": "=", @@ -5775,6 +5823,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "infix": "==", @@ -5874,6 +5923,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Rounds the number _down_ to the nearest integer.", @@ -6007,6 +6057,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the string representation of the number in the given base after rounding it down to an integer value.", @@ -6145,6 +6196,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Parses and matches strings against the glob notation. Not to be confused with `regex.globs_match`.", @@ -6273,6 +6325,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns a string which represents a version of the pattern where all asterisks have been escaped.", @@ -6397,6 +6450,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Computes the set of reachable nodes in the graph from a set of starting nodes.", @@ -6478,6 +6532,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Computes the set of reachable paths in the graph from a set of starting nodes.", @@ -6552,6 +6607,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Checks that a GraphQL query is valid against a given schema. The query and/or schema can be either GraphQL strings or AST objects from the other GraphQL builtin functions.", @@ -6626,6 +6682,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns AST objects for a given GraphQL query and schema after validating the query against the schema. Returns undefined if errors were encountered during parsing or validation. The query and/or schema can be either GraphQL strings or AST objects from the other GraphQL builtin functions.", @@ -6700,6 +6757,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns a boolean indicating success or failure alongside the parsed ASTs for a given GraphQL query and schema after validating the query against the schema. The query and/or schema can be either GraphQL strings or AST objects from the other GraphQL builtin functions.", @@ -6769,6 +6827,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns an AST object for a GraphQL query.", @@ -6838,6 +6897,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns an AST object for a GraphQL schema.", @@ -6899,6 +6959,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Checks that the input is a valid GraphQL schema. The schema can be either a GraphQL string or an AST object from the other GraphQL builtin functions.", @@ -7030,6 +7091,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "infix": "\u003e", @@ -7161,6 +7223,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "infix": "\u003e=", @@ -7266,6 +7329,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Deserializes the hex-encoded input string.", @@ -7371,6 +7435,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Serializes the input string using hex-encoding.", @@ -7499,6 +7564,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns a HTTP response to the given HTTP request.", @@ -7632,6 +7698,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the index of a substring contained inside a string.", @@ -7713,6 +7780,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns a list of all the indexes of a substring contained inside a string.", @@ -7796,6 +7864,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "infix": "in", @@ -7880,6 +7949,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "infix": "in", @@ -7958,6 +8028,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "introduced": "v0.34.0", @@ -8081,6 +8152,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the intersection of the given input sets.", @@ -8209,6 +8281,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Decodes a JSON Web Token and outputs it as an object.", @@ -8342,6 +8415,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Verifies a JWT signature under parameterized constraints and decodes the claims if it is valid.\nSupports the following algorithms: HS256, HS384, HS512, RS256, RS384, RS512, ES256, ES384, ES512, PS256, PS384 and PS512.", @@ -8480,6 +8554,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Encodes and optionally signs a JSON Web Token. Inputs are taken as objects, not encoded strings (see `io.jwt.encode_sign_raw`).", @@ -8618,6 +8693,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Encodes and optionally signs a JSON Web Token.", @@ -8751,6 +8827,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Verifies if a ES256 JWT signature is valid.", @@ -8875,6 +8952,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Verifies if a ES384 JWT signature is valid.", @@ -8999,6 +9077,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Verifies if a ES512 JWT signature is valid.", @@ -9132,6 +9211,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Verifies if a HS256 (secret) JWT signature is valid.", @@ -9256,6 +9336,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Verifies if a HS384 (secret) JWT signature is valid.", @@ -9380,6 +9461,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Verifies if a HS512 (secret) JWT signature is valid.", @@ -9513,6 +9595,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Verifies if a PS256 JWT signature is valid.", @@ -9637,6 +9720,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Verifies if a PS384 JWT signature is valid.", @@ -9761,6 +9845,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Verifies if a PS512 JWT signature is valid.", @@ -9894,6 +9979,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Verifies if a RS256 JWT signature is valid.", @@ -10018,6 +10104,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Verifies if a RS384 JWT signature is valid.", @@ -10142,6 +10229,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Verifies if a RS512 JWT signature is valid.", @@ -10270,6 +10358,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns `true` if the input value is an array.", @@ -10398,6 +10487,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns `true` if the input value is a boolean.", @@ -10526,6 +10616,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns `true` if the input value is null.", @@ -10654,6 +10745,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns `true` if the input value is a number.", @@ -10782,6 +10874,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns true if the input value is an object", @@ -10910,6 +11003,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns `true` if the input value is a set.", @@ -11038,6 +11132,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns `true` if the input value is a string.", @@ -11171,6 +11266,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Filters the object. For example: `json.filter({\"a\": {\"b\": \"x\", \"c\": \"y\"}}, [\"a/b\"])` will result in `{\"a\": {\"b\": \"x\"}}`). Paths are not filtered in-order and are deduplicated before being evaluated.", @@ -11277,6 +11373,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Verifies the input string is a valid JSON document.", @@ -11405,6 +11502,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Serializes the input term to JSON.", @@ -11439,6 +11537,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Serializes the input term JSON, with additional formatting options via the `opts` parameter. `opts` accepts keys `pretty` (enable multi-line/formatted JSON), `prefix` (string to prefix lines with, default empty string) and `indent` (string to indent with, default `\\t`).", @@ -11492,6 +11591,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Checks that the document matches the JSON schema.", @@ -11599,6 +11699,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Patches an object according to RFC6902. For example: `json.patch({\"a\": {\"foo\": 1}}, [{\"op\": \"add\", \"path\": \"/a/bar\", \"value\": 2}])` results in `{\"a\": {\"foo\": 1, \"bar\": 2}`. The patches are applied atomically: if any of them fails, the result will be undefined. Additionally works on sets, where a value contained in the set is considered to be its path.", @@ -11728,6 +11829,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Removes paths from an object. For example: `json.remove({\"a\": {\"b\": \"x\", \"c\": \"y\"}}, [\"a/b\"])` will result in `{\"a\": {\"c\": \"y\"}}`. Paths are not removed in-order and are deduplicated before being evaluated.", @@ -11856,6 +11958,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Deserializes the input string.", @@ -11904,6 +12007,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Checks that the input is a valid JSON schema object. The schema can be either a JSON string or an JSON object.", @@ -12032,6 +12136,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the input string but with all characters in lower-case.", @@ -12163,6 +12268,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "infix": "\u003c", @@ -12294,6 +12400,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "infix": "\u003c=", @@ -12422,6 +12529,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the maximum value in a collection.", @@ -12550,6 +12658,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the minimum value in a collection.", @@ -12681,6 +12790,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Minus subtracts the second number from the first number or computes the difference between two sets.", @@ -12813,6 +12923,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Multiplies two numbers.", @@ -12945,6 +13056,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "infix": "!=", @@ -13078,6 +13190,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Checks if a CIDR or IP is contained within another CIDR. `output` is `true` if `cidr_or_ip` (e.g. `127.0.0.64/26` or `127.0.0.1`) is contained within `cidr` (e.g. `127.0.0.1/24`) and `false` otherwise. Supports both IPv4 and IPv6 notations.", @@ -13206,6 +13319,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Checks if collections of cidrs or ips are contained within another collection of cidrs and returns matches. This function is similar to `net.cidr_contains` except it allows callers to pass collections of CIDRs or IPs as arguments and returns the matches (as opposed to a boolean result indicating a match between two CIDRs/IPs).", @@ -13334,6 +13448,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Expands CIDR to set of hosts (e.g., `net.cidr_expand(\"192.168.0.0/30\")` generates 4 hosts: `{\"192.168.0.0\", \"192.168.0.1\", \"192.168.0.2\", \"192.168.0.3\"}`).", @@ -13467,6 +13582,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Checks if a CIDR intersects with another CIDR (e.g. `192.168.0.0/16` overlaps with `192.168.1.0/24`). Supports both IPv4 and IPv6 notations.", @@ -13528,6 +13644,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Parses an IPv4/IPv6 CIDR and returns a boolean indicating if the provided CIDR is valid.", @@ -13635,6 +13752,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Merges IP addresses and subnets into the smallest possible list of CIDRs (e.g., `net.cidr_merge([\"192.0.128.0/24\", \"192.0.129.0/24\"])` generates `{\"192.0.128.0/23\"}`.This function merges adjacent subnets where possible, those contained within others and also removes any duplicates.\nSupports both IPv4 and IPv6 notations. IPv6 inputs need a prefix length (e.g. \"/128\").", @@ -13764,6 +13882,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "deprecated": true, @@ -13841,6 +13960,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the set of IP addresses (both v4 and v6) that the passed-in `name` resolves to using the standard name resolution mechanisms available.", @@ -13957,6 +14077,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns an array of numbers in the given (inclusive) range. If `a==b`, then `range == [a]`; if `a \u003e b`, then `range` is in descending order.", @@ -14006,6 +14127,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns an array of numbers in the given (inclusive) range incremented by a positive step.\n\tIf \"a==b\", then \"range == [a]\"; if \"a \u003e b\", then \"range\" is in descending order.\n\tIf the provided \"step\" is less then 1, an error will be thrown.\n\tIf \"b\" is not in the range of the provided \"step\", \"b\" won't be included in the result.\n\t", @@ -14137,6 +14259,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Filters the object by keeping only specified keys. For example: `object.filter({\"a\": {\"b\": \"x\", \"c\": \"y\"}, \"d\": \"z\"}, [\"a\"])` will result in `{\"a\": {\"b\": \"x\", \"c\": \"y\"}}`).", @@ -14275,6 +14398,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns value of an object's key if present, otherwise a default. If the supplied `key` is an `array`, then `object.get` will search through a nested object or array using each key in turn. For example: `object.get({\"a\": [{ \"b\": true }]}, [\"a\", 0, \"b\"], false)` results in `true`.", @@ -14332,6 +14456,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns a set of an object's keys. For example: `object.keys({\"a\": 1, \"b\": true, \"c\": \"d\")` results in `{\"a\", \"b\", \"c\"}`.", @@ -14463,6 +14588,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Removes specified keys from an object.", @@ -14536,6 +14662,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Determines if an object `sub` is a subset of another object `super`.Object `sub` is a subset of object `super` if and only if every key in `sub` is also in `super`, **and** for all keys which `sub` and `super` share, they have the same value. This function works with objects, sets, arrays and a set of array and set.If both arguments are objects, then the operation is recursive, e.g. `{\"c\": {\"x\": {10, 15, 20}}` is a subset of `{\"a\": \"b\", \"c\": {\"x\": {10, 15, 20, 25}, \"y\": \"z\"}`. If both arguments are sets, then this function checks if every element of `sub` is a member of `super`, but does not attempt to recurse. If both arguments are arrays, then this function checks if `sub` appears contiguously in order within `super`, and also does not attempt to recurse. If `super` is array and `sub` is set, then this function checks if `super` contains every element of `sub` with no consideration of ordering, and also does not attempt to recurse.", @@ -14667,6 +14794,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Creates a new object of the asymmetric union of two objects. For example: `object.union({\"a\": 1, \"b\": 2, \"c\": {\"d\": 3}}, {\"a\": 7, \"c\": {\"d\": 4, \"e\": 5}})` will result in `{\"a\": 7, \"b\": 2, \"c\": {\"d\": 4, \"e\": 5}}`.", @@ -14743,6 +14871,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Creates a new object that is the asymmetric union of all objects merged from left to right. For example: `object.union_n([{\"a\": 1}, {\"b\": 2}, {\"a\": 3}])` will result in `{\"b\": 2, \"a\": 3}`.", @@ -14865,6 +14994,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns an object that describes the runtime environment where OPA is deployed.", @@ -14996,6 +15126,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the union of two sets.", @@ -15128,6 +15259,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Plus adds two numbers together.", @@ -15205,6 +15337,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "introduced": "v0.34.0", @@ -15328,6 +15461,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Multiplies elements of an array or set of numbers", @@ -15395,6 +15529,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Signs an HTTP request object for Amazon Web Services. Currently implements [AWS Signature Version 4 request signing](https://docs.aws.amazon.com/AmazonS3/latest/API/sig-v4-authenticating-requests.html) by the `Authorization` header method.", @@ -15487,6 +15622,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns a random integer between `0` and `n` (`n` exclusive). If `n` is `0`, then `y` is always `0`. For any given argument pair (`str`, `n`), the output will be consistent throughout a query evaluation.", @@ -15616,6 +15752,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "deprecated": true, @@ -15752,6 +15889,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns all successive matches of the expression.", @@ -15890,6 +16028,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the specified number of matches when matching the input against the pattern.", @@ -16023,6 +16162,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Checks if the intersection of two glob-style regular expressions matches a non-empty set of non-empty strings.\nThe set of regex symbols is limited for this builtin: only `.`, `*`, `+`, `[`, `-`, `]` and `\\` are treated as special symbols.", @@ -16133,6 +16273,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Checks if a string is a valid regular expression: the detailed syntax for patterns is defined by https://github.com/google/re2/wiki/Syntax.", @@ -16248,6 +16389,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Matches a string against a regular expression.", @@ -16320,6 +16462,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Find and replaces the text using the regular expression pattern.", @@ -16453,6 +16596,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Splits the input string by the occurrences of the given pattern.", @@ -16596,6 +16740,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Matches a string against a pattern, where there pattern may be glob-like", @@ -16660,6 +16805,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the chain of metadata for the active rule.\nOrdered starting at the active rule, going outward to the most distant node in its package ancestry.\nA chain entry is a JSON document with two members: \"path\", an array representing the path of the node; and \"annotations\", a JSON document containing the annotations declared for the node.\nThe first entry in the chain always points to the active rule, even if it has no declared annotations (in which case the \"annotations\" member is not present).", @@ -16724,6 +16870,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns annotations declared for the active rule and using the _rule_ scope.", @@ -16857,6 +17004,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Parses the input Rego string and returns an object representation of the AST.", @@ -16988,6 +17136,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the remainder for of `x` divided by `y`, for `y != 0`.", @@ -17127,6 +17276,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Replace replaces all instances of a sub-string.", @@ -17255,6 +17405,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Rounds the number to the nearest integer.", @@ -17371,6 +17522,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Compares valid SemVer formatted version strings.", @@ -17482,6 +17634,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Validates that the input is a valid SemVer string.", @@ -17611,6 +17764,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "deprecated": true, @@ -17737,6 +17891,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns a sorted array.", @@ -17870,6 +18025,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Split returns an array containing elements of the input string split on a delimiter.", @@ -18003,6 +18159,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the given string, formatted.", @@ -18136,6 +18293,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns true if the search string begins with the base string.", @@ -18204,6 +18362,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns true if any of the search strings begins with any of the base strings.", @@ -18272,6 +18431,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns true if any of the search strings ends with any of the base strings.", @@ -18302,6 +18462,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the number of non-overlapping instances of a substring in a string.", @@ -18342,6 +18503,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Renders a templated string with given template variables injected. For a given templated string and key/value mapping, values will be injected into the template where they are referenced by key.\n\tFor examples of templating syntax, see https://pkg.go.dev/text/template", @@ -18475,6 +18637,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Replaces a string from a list of old, new string pairs.\nReplacements are performed in the order they appear in the target string, without overlapping matches.\nThe old string comparisons are done in argument order.", @@ -18553,6 +18716,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Reverses a given string.", @@ -18691,6 +18855,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the portion of a string for a given `offset` and a `length`. If `length \u003c 0`, `output` is the remainder of the string.", @@ -18819,6 +18984,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Sums elements of an array or set of numbers.", @@ -18956,6 +19122,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the nanoseconds since epoch after adding years, months and days to nanoseconds. Month \u0026 day values outside their usual ranges after the operation and will be normalized - for example, October 32 would become November 1. `undefined` if the result would be outside the valid time range that can fit within an `int64`.", @@ -19084,6 +19251,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the `[hour, minute, second]` of the day for the nanoseconds since epoch.", @@ -19212,6 +19380,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the `[year, month, day]` for the nanoseconds since epoch.", @@ -19313,6 +19482,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the difference between two unix timestamps in nanoseconds (with optional timezone strings).", @@ -19365,6 +19535,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the formatted timestamp for the nanoseconds since epoch.", @@ -19487,6 +19658,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the current time since epoch in nanoseconds.", @@ -19615,6 +19787,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the duration in nanoseconds represented by a string.", @@ -19748,6 +19921,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the time in nanoseconds parsed from the string in the given format. `undefined` if the result would be outside the valid time range that can fit within an `int64`.", @@ -19876,6 +20050,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the time in nanoseconds parsed from the string in RFC3339 format. `undefined` if the result would be outside the valid time range that can fit within an `int64`.", @@ -20004,6 +20179,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the day of the week (Monday, Tuesday, ...) for the nanoseconds since epoch.", @@ -20132,6 +20308,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Converts a string, bool, or number value to a number: Strings are converted to numbers using `strconv.Atoi`, Boolean `false` is converted to 0 and `true` is converted to 1.", @@ -20260,6 +20437,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Emits `note` as a `Note` event in the query explanation. Query explanations show the exact expressions evaluated by OPA during policy execution. For example, `trace(\"Hello There!\")` includes `Note \"Hello There!\"` in the query explanation. To include variables in the message, use `sprintf`. For example, `person := \"Bob\"; trace(sprintf(\"Hello There! %v\", [person]))` will emit `Note \"Hello There! Bob\"` inside of the explanation.", @@ -20393,6 +20571,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns `value` with all leading or trailing instances of the `cutset` characters removed.", @@ -20526,6 +20705,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns `value` with all leading instances of the `cutset` characters removed.", @@ -20659,6 +20839,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns `value` without the prefix. If `value` doesn't start with `prefix`, it is returned unchanged.", @@ -20792,6 +20973,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns `value` with all trailing instances of the `cutset` characters removed.", @@ -20920,6 +21102,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Return the given string with all leading and trailing white space removed.", @@ -21053,6 +21236,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns `value` without the suffix. If `value` doesn't end with `suffix`, it is returned unchanged.", @@ -21181,6 +21365,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the type of its input value.", @@ -21309,6 +21494,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the union of the given input sets.", @@ -21378,6 +21564,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Converts strings like \"10G\", \"5K\", \"4M\", \"1500m\", and the like into a number.\nThis number can be a non-integer, such as 1.5, 0.22, etc. Scientific notation is supported,\nallowing values such as \"1e-3K\" (1) or \"2.5e6M\" (2.5 million M).\n\nSupports standard metric decimal and binary SI units (e.g., K, Ki, M, Mi, G, Gi, etc.) where\nm, K, M, G, T, P, and E are treated as decimal units and Ki, Mi, Gi, Ti, Pi, and Ei are treated as\nbinary units.\n\nNote that 'm' and 'M' are case-sensitive to allow distinguishing between \"milli\" and \"mega\" units\nrespectively. Other units are case-insensitive.", @@ -21506,6 +21693,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Converts strings like \"10GB\", \"5K\", \"4mb\", or \"1e6KB\" into an integer number of bytes.\n\nSupports standard byte units (e.g., KB, KiB, etc.) where KB, MB, GB, and TB are treated as decimal\nunits, and KiB, MiB, GiB, and TiB are treated as binary units. Scientific notation is supported,\nenabling values like \"1.5e3MB\" (1500MB) or \"2e6GiB\" (2 million GiB).\n\nThe bytes symbol (b/B) in the unit is optional; omitting it will yield the same result (e.g., \"Mi\"\nand \"MiB\" are equivalent).", @@ -21634,6 +21822,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns the input string but with all characters in upper-case.", @@ -21762,6 +21951,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Decodes a URL-encoded input string.", @@ -21869,6 +22059,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Decodes the given URL query string into an object.", @@ -21997,6 +22188,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Encodes the input string into a URL-encoded string.", @@ -22125,6 +22317,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Encodes the given object into a URL encoded query string.", @@ -22163,6 +22356,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Parses the string value as an UUID and returns an object with the well-defined fields of the UUID if valid.", @@ -22282,6 +22476,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Returns a new UUIDv4.", @@ -22410,6 +22605,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Generates `[path, value]` tuples for all nested documents of `x` (recursively). Queries can use `walk` to traverse documents nested under `x`.", @@ -22517,6 +22713,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Verifies the input string is a valid YAML document.", @@ -22645,6 +22842,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Serializes the input term to YAML.", @@ -22773,6 +22971,7 @@ "v0.68.0", "v0.69.0", "v0.70.0", + "v1.0.0", "edge" ], "description": "Deserializes the input string.", diff --git a/capabilities.json b/capabilities.json index 862a4555f9..48a87b0c35 100644 --- a/capabilities.json +++ b/capabilities.json @@ -4819,12 +4819,6 @@ } } ], - "future_keywords": [ - "contains", - "every", - "if", - "in" - ], "wasm_abi_versions": [ { "version": 1, @@ -4836,8 +4830,6 @@ } ], "features": [ - "rule_head_ref_string_prefixes", - "rule_head_refs", - "rego_v1_import" + "rego_v1" ] } diff --git a/cmd/bench.go b/cmd/bench.go index a2e484ec44..2ec1e9a125 100644 --- a/cmd/bench.go +++ b/cmd/bench.go @@ -62,8 +62,9 @@ func newBenchmarkEvalParams() benchmarkCommandParams { evalPrettyOutput, benchmarkGoBenchOutput, }), - target: util.NewEnumFlag(compile.TargetRego, []string{compile.TargetRego, compile.TargetWasm}), - schema: &schemaFlags{}, + target: util.NewEnumFlag(compile.TargetRego, []string{compile.TargetRego, compile.TargetWasm}), + schema: &schemaFlags{}, + capabilities: newcapabilitiesFlag(), }, gracefulShutdownPeriod: 10, } diff --git a/cmd/build.go b/cmd/build.go index 1e3c6944cb..f6655c37ab 100644 --- a/cmd/build.go +++ b/cmd/build.go @@ -57,6 +57,17 @@ func newBuildParams() buildParams { } } +func (p *buildParams) regoVersion() ast.RegoVersion { + if p.v0Compatible { + // v0 takes precedence over v1 + return ast.RegoV0 + } + if p.v1Compatible { + return ast.RegoV1 + } + return ast.DefaultRegoVersion +} + func init() { buildParams := newBuildParams() @@ -292,7 +303,7 @@ func dobuild(params buildParams, args []string) error { if params.capabilities.C != nil { capabilities = params.capabilities.C } else { - capabilities = ast.CapabilitiesForThisVersion() + capabilities = ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(params.regoVersion())) } compiler := compile.New(). @@ -311,14 +322,7 @@ func dobuild(params buildParams, args []string) error { WithPartialNamespace(params.ns). WithFollowSymlinks(params.followSymlinks) - regoVersion := ast.DefaultRegoVersion - if params.v0Compatible { - // v0 takes precedence over v1 - regoVersion = ast.RegoV0 - } else if params.v1Compatible { - regoVersion = ast.RegoV1 - } - compiler = compiler.WithRegoVersion(regoVersion) + compiler = compiler.WithRegoVersion(params.regoVersion()) if params.revision.isSet { compiler = compiler.WithRevision(*params.revision.v) diff --git a/cmd/build_test.go b/cmd/build_test.go index f6361a1f95..8f99ba21ef 100644 --- a/cmd/build_test.go +++ b/cmd/build_test.go @@ -108,6 +108,7 @@ p { is_foo("bar") }`, caps: func() string { c := ast.CapabilitiesForThisVersion() c.FutureKeywords = []string{"in"} + c.Features = []string{} j, err := json.Marshal(c) if err != nil { panic(err) @@ -120,6 +121,23 @@ import future.keywords.in p if "opa" in input.tools`, err: "rego_parse_error: unexpected keyword, must be one of [in]", }, + { + note: "future kw NOT defined in caps, rego-v1 feature", + caps: func() string { + c := ast.CapabilitiesForThisVersion() + c.FutureKeywords = []string{"in"} + c.Features = []string{ast.FeatureRegoV1} + j, err := json.Marshal(c) + if err != nil { + panic(err) + } + return string(j) + }(), + policy: `package test +import future.keywords.if +import future.keywords.in +p if "opa" in input.tools`, + }, { note: "future kw are defined in caps", caps: func() string { @@ -136,6 +154,34 @@ import future.keywords.if import future.keywords.in p if "opa" in input.tools`, }, + { + note: "rego.v1 imported AND defined in capabilities", + caps: func() string { + c := ast.CapabilitiesForThisVersion() + c.Features = []string{ast.FeatureRegoV1Import} + j, err := json.Marshal(c) + if err != nil { + panic(err) + } + return string(j) + }(), + policy: `package test +import rego.v1`, + }, + { + note: "rego.v1 imported AND rego-v1 in capabilities", + caps: func() string { + c := ast.CapabilitiesForThisVersion() + c.Features = []string{ast.FeatureRegoV1} + j, err := json.Marshal(c) + if err != nil { + panic(err) + } + return string(j) + }(), + policy: `package test +import rego.v1`, + }, } // add same tests for bundle-mode == true: @@ -1098,7 +1144,48 @@ p contains 2 if { }, }, { - note: "multiple bundles with different rego-versions (v0-compatible)", + note: "multiple bundles with different rego-versions, v0-compatible", + v0Compatible: true, + roots: []string{"bundle1", "bundle2"}, + files: map[string]string{ + "bundle1/.manifest": `{ + "roots": ["test1"], + "rego_version": 0, + "file_rego_versions": { + "*/test2.rego": 1 + } +}`, + "bundle1/test1.rego": `package test1 +p[1] { + input.x == 1 +}`, + "bundle1/test2.rego": `package test1 +p contains 2 if { + input.x == 1 +}`, + "bundle2/.manifest": `{ + "roots": ["test2"], + "rego_version": 1, + "file_rego_versions": { + "*/test4.rego": 0 + } +}`, + "bundle2/test3.rego": `package test2 +p contains 3 if { + input.x == 1 +}`, + "bundle2/test4.rego": `package test2 +p[4] { + input.x == 1 +}`, + }, + expErrs: []string{ + // capabilities inferred from --v0-compatible doesn't include rego_v1 feature, which must be respected + "rego_parse_error: illegal capabilities: rego_v1 feature required for parsing v1 Rego", + }, + }, + { + note: "multiple bundles with different rego-versions, v0-compatible, rego_v1 capabilities feature", v0Compatible: true, roots: []string{"bundle1", "bundle2"}, files: map[string]string{ @@ -1132,11 +1219,20 @@ p contains 3 if { p[4] { input.x == 1 }`, + "capabilities.json": func() string { + caps := ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV0)) + caps.Features = append(caps.Features, ast.FeatureRegoV1) + bs, err := json.Marshal(caps) + if err != nil { + t.Fatal(err) + } + return string(bs) + }(), }, expManifest: `{"revision":"","roots":["test1","test2"],"rego_version":0,"file_rego_versions":{"%ROOT%/bundle1/test2.rego":1,"%ROOT%/bundle2/test3.rego":1}}`, }, { - note: "multiple bundles with different rego-versions (v1-compatible)", + note: "multiple bundles with different rego-versions, v1-compatible", v1Compatible: true, roots: []string{"bundle1", "bundle2"}, files: map[string]string{ @@ -1184,6 +1280,10 @@ p[4] { params.v0Compatible = tc.v0Compatible params.v1Compatible = tc.v1Compatible + if _, ok := tc.files["capabilities.json"]; ok { + _ = params.capabilities.Set(path.Join(root, "capabilities.json")) + } + var roots []string if len(tc.roots) == 0 { roots = []string{root} @@ -1259,6 +1359,7 @@ func TestBuildBundleFromOtherBundles(t *testing.T) { note string v0Compatible bool v1Compatible bool + capabilities *ast.Capabilities bundles map[string]bundleInfo expBundle bundleInfo expErrs []string @@ -1520,6 +1621,37 @@ p { "policy_1.rego": `package test q contains 1 if { input.x == 1 +}`, + }, + }, + expErrs: []string{ + // capabilities inferred from --v0-compatible doesn't include rego_v1 feature, which must be respected + "rego_parse_error: illegal capabilities: rego_v1 feature required for parsing v1 Rego", + }, + }, + { + note: "single v1 bundle, v0 per-file override, --v0-compatible, rego_v1 capabilities feature", + v0Compatible: true, + capabilities: func() *ast.Capabilities { + caps := ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV0)) + caps.Features = append(caps.Features, ast.FeatureRegoV1) + return caps + }(), + bundles: map[string]bundleInfo{ + "bundle.tar.gz": { + ".manifest": `{ + "rego_version": 1, + "file_rego_versions": { + "/policy_0.rego": 0 + } +}`, + "policy_0.rego": `package test +p { + input.x == 1 +}`, + "policy_1.rego": `package test +q contains 1 if { + input.x == 1 }`, }, }, @@ -1558,6 +1690,35 @@ p { "policy.rego": `package test2 q contains 1 if { input.x == 1 +}`, + }, + }, + expErrs: []string{ + // capabilities inferred from --v0-compatible doesn't include rego_v1 feature, which must be respected + "rego_parse_error: illegal capabilities: rego_v1 feature required for parsing v1 Rego", + }, + }, + { + note: "v0 bundle + v1 bundle, --v0-compatible, rego_v1 capabilities feature", + v0Compatible: true, + capabilities: func() *ast.Capabilities { + caps := ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV0)) + caps.Features = append(caps.Features, ast.FeatureRegoV1) + return caps + }(), + bundles: map[string]bundleInfo{ + "bundle_v0.tar.gz": { + ".manifest": `{"roots": ["test1"], "rego_version": 0}`, + "policy.rego": `package test1 +p { + input.x == 1 +}`, + }, + "bundle_v1.tar.gz": { + ".manifest": `{"roots": ["test2"], "rego_version": 1}`, + "policy.rego": `package test2 +q contains 1 if { + input.x == 1 }`, }, }, @@ -1649,6 +1810,10 @@ q contains 1 if { params.v0Compatible = tc.v0Compatible params.v1Compatible = tc.v1Compatible + if tc.capabilities != nil { + params.capabilities.C = tc.capabilities + } + err := dobuild(params, roots) if tc.expErrs != nil { if err == nil { @@ -1837,6 +2002,298 @@ p contains x if { } } +func TestBuildWithRegoV1Capability(t *testing.T) { + tests := []struct { + note string + v0Compatible bool + capabilities *ast.Capabilities + files map[string]string + expFiles map[string]string + expErrs []string + }{ + { + note: "v0 module, v0-compatible, no capabilities", + v0Compatible: true, + files: map[string]string{ + "test.rego": `package test + p[x] { + x := 42 + }`, + }, + expFiles: map[string]string{ + ".manifest": `{"revision":"","roots":[""],"rego_version":0} +`, + "test.rego": `package test + +p[x] { + x := 42 +} +`, + }, + }, + { + note: "v0 module, v0-compatible, v0 capabilities", + v0Compatible: true, + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV0)), + files: map[string]string{ + "test.rego": `package test + p[x] { + x := 42 + }`, + }, + expFiles: map[string]string{ + ".manifest": `{"revision":"","roots":[""],"rego_version":0} +`, + "test.rego": `package test + +p[x] { + x := 42 +} +`, + }, + }, + { + note: "v0 module, v0-compatible, v1 capabilities", + v0Compatible: true, + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV1)), + files: map[string]string{ + "test.rego": `package test + p[x] { + x := 42 + }`, + }, + expFiles: map[string]string{ + ".manifest": `{"revision":"","roots":[""],"rego_version":0} +`, + "test.rego": `package test + +p[x] { + x := 42 +} +`, + }, + }, + + { + note: "v0 module, not v0-compatible, no capabilities", + files: map[string]string{ + "test.rego": `package test + p[x] { + x := 42 + }`, + }, + expErrs: []string{ + "test.rego:2: rego_parse_error: `if` keyword is required before rule body", + "test.rego:2: rego_parse_error: `contains` keyword is required for partial set rules", + }, + }, + { + note: "v0 module, not v0-compatible, v0 capabilities", + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV0)), + files: map[string]string{ + "test.rego": `package test + p[x] { + x := 42 + }`, + }, + expErrs: []string{ + "rego_parse_error: illegal capabilities: rego_v1 feature required for parsing v1 Rego", + }, + }, + { + note: "v0 module, not v0-compatible, v1 capabilities", + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV1)), + files: map[string]string{ + "test.rego": `package test + p[x] { + x := 42 + }`, + }, + expErrs: []string{ + "test.rego:2: rego_parse_error: `if` keyword is required before rule body", + "test.rego:2: rego_parse_error: `contains` keyword is required for partial set rules", + }, + }, + + { + note: "v1 module, v0-compatible, no capabilities", + v0Compatible: true, + files: map[string]string{ + "test.rego": `package test + + p contains x if { + x := 42 + }`, + }, + expErrs: []string{ + "test.rego:3: rego_parse_error: var cannot be used for rule name", + }, + }, + { + note: "v1 module, v0-compatible, v0 capabilities", + v0Compatible: true, + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV0)), + files: map[string]string{ + "test.rego": `package test + + p contains x if { + x := 42 + }`, + }, + expErrs: []string{ + "test.rego:3: rego_parse_error: var cannot be used for rule name", + }, + }, + { + note: "v1 module, v0-compatible, v1 capabilities", + v0Compatible: true, + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV1)), + files: map[string]string{ + "test.rego": `package test + + p contains x if { + x := 42 + }`, + }, + expErrs: []string{ + "test.rego:3: rego_parse_error: var cannot be used for rule name", + }, + }, + + { + note: "v1 module, not v0-compatible, no capabilities", + files: map[string]string{ + "test.rego": `package test + + p contains x if { + x := 42 + }`, + }, + expFiles: map[string]string{ + ".manifest": `{"revision":"","roots":[""],"rego_version":1} +`, + "test.rego": `package test + +p contains x if { + x := 42 +} +`, + }, + }, + { + note: "v1 module, not v0-compatible, v0 capabilities", + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV0)), + files: map[string]string{ + "test.rego": `package test + + p contains x if { + x := 42 + }`, + }, + expErrs: []string{ + "rego_parse_error: illegal capabilities: rego_v1 feature required for parsing v1 Rego", + }, + }, + { + note: "v1 module, not v0-compatible, v1 capabilities", + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV1)), + files: map[string]string{ + "test.rego": `package test + + p contains x if { + x := 42 + }`, + }, + expFiles: map[string]string{ + ".manifest": `{"revision":"","roots":[""],"rego_version":1} +`, + "test.rego": `package test + +p contains x if { + x := 42 +} +`, + }, + }, + } + + for _, tc := range tests { + t.Run(tc.note, func(t *testing.T) { + test.WithTempFS(tc.files, func(root string) { + params := newBuildParams() + params.outputFile = path.Join(root, "bundle.tar.gz") + params.v0Compatible = tc.v0Compatible + params.capabilities.C = tc.capabilities + + err := dobuild(params, []string{root}) + + if len(tc.expErrs) > 0 { + if err == nil { + t.Fatal("expected error but got nil") + } + for _, expErr := range tc.expErrs { + if !strings.Contains(err.Error(), expErr) { + t.Fatalf("expected error:\n\n%v\n\ngot:\n\n%v", expErr, err) + } + } + } else { + if err != nil { + t.Fatal(err) + } + + fl := loader.NewFileLoader() + _, err = fl.AsBundle(params.outputFile) + if err != nil { + t.Fatal(err) + } + + // Check that manifest is not written given no input manifest and no other flags + f, err := os.Open(params.outputFile) + if err != nil { + t.Fatal(err) + } + defer f.Close() + + gr, err := gzip.NewReader(f) + if err != nil { + t.Fatal(err) + } + + tr := tar.NewReader(gr) + + foundFiles := map[string]struct{}{} + for { + f, err := tr.Next() + if err == io.EOF { + break + } else if err != nil { + t.Fatal(err) + } + foundFiles[path.Base(f.Name)] = struct{}{} + expectedFile := tc.expFiles[path.Base(f.Name)] + if expectedFile != "" { + data, err := io.ReadAll(tr) + if err != nil { + t.Fatal(err) + } + actualFile := string(data) + if actualFile != expectedFile { + t.Fatalf("expected file %s to be:\n\n%v\n\ngot:\n\n%v", f.Name, expectedFile, actualFile) + } + } + } + + for expectedFile := range tc.expFiles { + if _, ok := foundFiles[expectedFile]; !ok { + t.Fatalf("expected file %s not found in bundle, got: %v", expectedFile, foundFiles) + } + } + } + }) + }) + } +} + func TestBuildWithCompatibleFlags(t *testing.T) { tests := []struct { note string diff --git a/cmd/capabilities.go b/cmd/capabilities.go index bbcf4fc6ce..e031671a1c 100644 --- a/cmd/capabilities.go +++ b/cmd/capabilities.go @@ -15,9 +15,17 @@ import ( ) type capabilitiesParams struct { - showCurrent bool - version string - file string + showCurrent bool + version string + file string + v0Compatible bool +} + +func (p *capabilitiesParams) regoVersion() ast.RegoVersion { + if p.v0Compatible { + return ast.RegoV0 + } + return ast.DefaultRegoVersion } func init() { @@ -84,7 +92,8 @@ Print the capabilities of a capabilities file } capabilitiesCommand.Flags().BoolVar(&capabilitiesParams.showCurrent, "current", false, "print current capabilities") capabilitiesCommand.Flags().StringVar(&capabilitiesParams.version, "version", "", "print capabilities of a specific version") - capabilitiesCommand.Flags().StringVar(&capabilitiesParams.file, "file", "", "print current capabilities") + capabilitiesCommand.Flags().StringVar(&capabilitiesParams.file, "file", "", "print capabilities defined by a file") + addV0CompatibleFlag(capabilitiesCommand.Flags(), &capabilitiesParams.v0Compatible, false) RootCommand.AddCommand(capabilitiesCommand) } @@ -100,7 +109,7 @@ func doCapabilities(params capabilitiesParams) (string, error) { } else if len(params.file) > 0 { c, err = ast.LoadCapabilitiesFile(params.file) } else if params.showCurrent { - c = ast.CapabilitiesForThisVersion() + c = ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(params.regoVersion())) } else { return showVersions() } diff --git a/cmd/capabilities_test.go b/cmd/capabilities_test.go index 39f8a4a1f0..18162abdfc 100644 --- a/cmd/capabilities_test.go +++ b/cmd/capabilities_test.go @@ -5,9 +5,13 @@ package cmd import ( + "bytes" "path" + "reflect" + "sort" "testing" + "github.com/open-policy-agent/opa/v1/ast" "github.com/open-policy-agent/opa/v1/util/test" ) @@ -74,3 +78,64 @@ func TestCapabilitiesFile(t *testing.T) { }) } + +func TestCapabilitiesCurrent(t *testing.T) { + tests := []struct { + note string + v0Compatible bool + expFeatures []string + expFutureKeywords []string + }{ + { + note: "current", + expFeatures: []string{ + ast.FeatureRegoV1, + }, + }, + { + note: "current --v0-compatible", + v0Compatible: true, + expFeatures: []string{ + ast.FeatureRefHeadStringPrefixes, + ast.FeatureRefHeads, + ast.FeatureRegoV1Import, + }, + expFutureKeywords: []string{ + "in", + "every", + "contains", + "if", + }, + }, + } + + for _, tc := range tests { + t.Run(tc.note, func(t *testing.T) { + // These are sorted in the output + sort.Strings(tc.expFutureKeywords) + sort.Strings(tc.expFeatures) + + params := capabilitiesParams{ + showCurrent: true, + v0Compatible: tc.v0Compatible, + } + capsStr, err := doCapabilities(params) + if err != nil { + t.Fatal("expected success", err) + } + + caps, err := ast.LoadCapabilitiesJSON(bytes.NewReader([]byte(capsStr))) + if err != nil { + t.Fatal("expected success", err) + } + + if !reflect.DeepEqual(caps.Features, tc.expFeatures) { + t.Errorf("expected features:\n\n%v\n\nbut got:\n\n%v", tc.expFeatures, caps.Features) + } + + if !reflect.DeepEqual(caps.FutureKeywords, tc.expFutureKeywords) { + t.Errorf("expected future keywords:\n\n%v\n\nbut got:\n\n%v", tc.expFutureKeywords, caps.FutureKeywords) + } + }) + } +} diff --git a/cmd/check.go b/cmd/check.go index ebd2b5e768..2db3233999 100644 --- a/cmd/check.go +++ b/cmd/check.go @@ -73,7 +73,7 @@ func checkModules(params checkParams, args []string) error { if params.capabilities.C != nil { capabilities = params.capabilities.C } else { - capabilities = ast.CapabilitiesForThisVersion() + capabilities = ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(params.regoVersion())) } ss, err := loader.Schemas(params.schema.path) @@ -194,6 +194,7 @@ func init() { addCapabilitiesFlag(checkCommand.Flags(), checkParams.capabilities) addSchemaFlags(checkCommand.Flags(), checkParams.schema) addStrictFlag(checkCommand.Flags(), &checkParams.strict, false) + // FIXME: Rename or add new flag with same effect? '--rego-v1' will become even more confusing in 1.0, as what it actually means is check that module is compatible with BOTH v0 and v1. addRegoV1FlagWithDescription(checkCommand.Flags(), &checkParams.regoV1, false, "check for Rego v1 compatibility (policies must also be compatible with current OPA version)") addV0CompatibleFlag(checkCommand.Flags(), &checkParams.v0Compatible, false) diff --git a/cmd/check_test.go b/cmd/check_test.go index d851e93240..64f5b7e48d 100644 --- a/cmd/check_test.go +++ b/cmd/check_test.go @@ -54,6 +54,7 @@ p { is_foo("bar") }`, caps: func() string { c := ast.CapabilitiesForThisVersion() c.FutureKeywords = []string{"in"} + c.Features = []string{} j, err := json.Marshal(c) if err != nil { panic(err) @@ -66,6 +67,23 @@ import future.keywords.in p if "opa" in input.tools`, err: "rego_parse_error: unexpected keyword, must be one of [in]", }, + { + note: "future kw NOT defined in caps, rego-v1 feature", + caps: func() string { + c := ast.CapabilitiesForThisVersion() + c.FutureKeywords = []string{"in"} + c.Features = []string{ast.FeatureRegoV1} + j, err := json.Marshal(c) + if err != nil { + panic(err) + } + return string(j) + }(), + policy: `package test +import future.keywords.if +import future.keywords.in +p if "opa" in input.tools`, + }, { note: "future kw are defined in caps", caps: func() string { @@ -109,6 +127,20 @@ import rego.v1`, return string(j) }(), policy: `package test +import rego.v1`, + }, + { + note: "rego.v1 imported AND rego-v1 in capabilities", + caps: func() string { + c := ast.CapabilitiesForThisVersion() + c.Features = []string{ast.FeatureRegoV1} + j, err := json.Marshal(c) + if err != nil { + panic(err) + } + return string(j) + }(), + policy: `package test import rego.v1`, }, } @@ -461,6 +493,170 @@ a contains x if { } } +func TestCheckWithRegoV1Capability(t *testing.T) { + cases := []struct { + note string + v0Compatible bool + capabilities *ast.Capabilities + policy string + expErrs []string + }{ + { + note: "v0 module, v0-compatible, no capabilities", + v0Compatible: true, + policy: `package test +a[x] { + x := 42 +}`, + }, + { + note: "v0 module, v0-compatible, v0 capabilities", + v0Compatible: true, + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV0)), + policy: `package test +a[x] { + x := 42 +}`, + }, + { + note: "v0 module, v0-compatible, v1 capabilities", + v0Compatible: true, + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV1)), + policy: `package test +a[x] { + x := 42 +}`, + }, + + { + note: "v0 module, not v0-compatible, no capabilities", + policy: `package test +a[x] { + x := 42 +}`, + expErrs: []string{ + "test.rego:2: rego_parse_error: `if` keyword is required before rule body", + "test.rego:2: rego_parse_error: `contains` keyword is required for partial set rules", + }, + }, + { + note: "v0 module, not v0-compatible, v0 capabilities", + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV0)), + policy: `package test +a[x] { + x := 42 +}`, + expErrs: []string{ + "rego_parse_error: illegal capabilities: rego_v1 feature required for parsing v1 Rego", + }, + }, + { + note: "v0 module, not v0-compatible, v1 capabilities", + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV1)), + policy: `package test +a[x] { + x := 42 +}`, + expErrs: []string{ + "test.rego:2: rego_parse_error: `if` keyword is required before rule body", + "test.rego:2: rego_parse_error: `contains` keyword is required for partial set rules", + }, + }, + + { + note: "v1 module, v0-compatible, no capabilities", + v0Compatible: true, + policy: `package test +a contains x if { + x := 42 +}`, + expErrs: []string{ + "test.rego:2: rego_parse_error: var cannot be used for rule name", + }, + }, + { + note: "v1 module, v0-compatible, v0 capabilities", + v0Compatible: true, + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV0)), + policy: `package test +a contains x if { + x := 42 +}`, + expErrs: []string{ + "test.rego:2: rego_parse_error: var cannot be used for rule name", + }, + }, + { + note: "v1 module, v0-compatible, v1 capabilities", + v0Compatible: true, + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV1)), + policy: `package test +a contains x if { + x := 42 +}`, + expErrs: []string{ + "test.rego:2: rego_parse_error: var cannot be used for rule name", + }, + }, + + { + note: "v1 module, not v0-compatible, no capabilities", + policy: `package test +a contains x if { + x := 42 +}`, + }, + { + note: "v1 module, not v0-compatible, v0 capabilities", + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV0)), + policy: `package test +a contains x if { + x := 42 +}`, + expErrs: []string{ + "rego_parse_error: illegal capabilities: rego_v1 feature required for parsing v1 Rego", + }, + }, + { + note: "v1 module, not v0-compatible, v1 capabilities", + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV1)), + policy: `package test +a contains x if { + x := 42 +}`, + }, + } + + for _, tc := range cases { + t.Run(tc.note, func(t *testing.T) { + files := map[string]string{ + "test.rego": tc.policy, + } + + test.WithTempFS(files, func(root string) { + params := newCheckParams() + params.v0Compatible = tc.v0Compatible + params.capabilities.C = tc.capabilities + + err := checkModules(params, []string{root}) + switch { + case err != nil && len(tc.expErrs) > 0: + for _, expErr := range tc.expErrs { + if !strings.Contains(err.Error(), expErr) { + t.Fatalf("expected err:\n\n%v\n\ngot:\n\n%v", expErr, err) + } + } + return // don't read back bundle below + case err != nil && len(tc.expErrs) == 0: + t.Fatalf("unexpected error: %v", err) + case err == nil && len(tc.expErrs) > 0: + t.Fatalf("expected error:\n\n%v\n\ngot: none", tc.expErrs) + } + }) + }) + } +} + func TestCheckCompatibleFlags(t *testing.T) { cases := []struct { note string diff --git a/cmd/eval.go b/cmd/eval.go index d6ddff6045..be9261b2c6 100644 --- a/cmd/eval.go +++ b/cmd/eval.go @@ -705,8 +705,10 @@ func setupEval(args []string, params evalCommandParams) (*evalContext, error) { regoArgs = append(regoArgs, rego.BuiltinErrorList(&builtInErrors)) } - if params.capabilities != nil { + if params.capabilities.C != nil { regoArgs = append(regoArgs, rego.Capabilities(params.capabilities.C)) + } else { + regoArgs = append(regoArgs, rego.Capabilities(ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(params.regoVersion())))) } if params.strict { @@ -878,7 +880,7 @@ func generateOptimizedBundle(params evalCommandParams, asBundle bool, filter loa if params.capabilities.C != nil { capabilities = params.capabilities.C } else { - capabilities = ast.CapabilitiesForThisVersion() + capabilities = ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(params.regoVersion())) } compiler := compile.New(). diff --git a/cmd/eval_test.go b/cmd/eval_test.go index 86826fb3cc..21cbfb02a9 100755 --- a/cmd/eval_test.go +++ b/cmd/eval_test.go @@ -2907,6 +2907,224 @@ func TestEvalPolicyWithCompatibleFlags(t *testing.T) { } } +func TestEvalPolicyWithRegoV1Capability(t *testing.T) { + tests := []struct { + note string + v0Compatible bool + capabilities *ast.Capabilities + modules map[string]string + expErrs []string + }{ + { + note: "v0 module, v0-compatible, no capabilities", + v0Compatible: true, + modules: map[string]string{ + "test.rego": `package test + allow { + 1 < 2 + }`, + }, + }, + { + note: "v0 module, v0-compatible, v0 capabilities", + v0Compatible: true, + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV0)), + modules: map[string]string{ + "test.rego": `package test + allow { + 1 < 2 + }`, + }, + }, + { + note: "v0 module, v0-compatible, v1 capabilities", + v0Compatible: true, + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV1)), + modules: map[string]string{ + "test.rego": `package test + allow { + 1 < 2 + }`, + }, + }, + { + note: "v0 module, not v0-compatible, no capabilities", + v0Compatible: false, + modules: map[string]string{ + "test.rego": `package test + allow { + 1 < 2 + }`, + }, + expErrs: []string{ + "test.rego:2: rego_parse_error: `if` keyword is required before rule body", + }, + }, + { + note: "v0 module, not v0-compatible, v0 capabilities", + v0Compatible: false, + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV0)), + modules: map[string]string{ + "test.rego": `package test + allow { + 1 < 2 + }`, + }, + expErrs: []string{ + "rego_parse_error: illegal capabilities: rego_v1 feature required for parsing v1 Rego", + }, + }, + { + note: "v0 module, not v0-compatible, v1 capabilities", + v0Compatible: false, + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV1)), + modules: map[string]string{ + "test.rego": `package test + allow { + 1 < 2 + }`, + }, + expErrs: []string{ + "test.rego:2: rego_parse_error: `if` keyword is required before rule body", + }, + }, + + { + note: "v1 module, v0-compatible, no capabilities", + v0Compatible: true, + modules: map[string]string{ + "test.rego": `package test + allow if { + 1 < 2 + }`, + }, + expErrs: []string{ + "test.rego:2: rego_parse_error: var cannot be used for rule name", + }, + }, + { + note: "v1 module, v0-compatible, v0 capabilities", + v0Compatible: true, + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV0)), + modules: map[string]string{ + "test.rego": `package test + allow if { + 1 < 2 + }`, + }, + expErrs: []string{ + "test.rego:2: rego_parse_error: var cannot be used for rule name", + }, + }, + { + note: "v1 module, v0-compatible, v1 capabilities", + v0Compatible: true, + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV1)), + modules: map[string]string{ + "test.rego": `package test + allow if { + 1 < 2 + }`, + }, + expErrs: []string{ + "test.rego:2: rego_parse_error: var cannot be used for rule name", + }, + }, + { + note: "v1 module, not v0-compatible, no capabilities", + v0Compatible: false, + modules: map[string]string{ + "test.rego": `package test + allow if { + 1 < 2 + }`, + }, + }, + { + note: "v1 module, not v0-compatible, v0 capabilities", + v0Compatible: false, + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV0)), + modules: map[string]string{ + "test.rego": `package test + allow if { + 1 < 2 + }`, + }, + expErrs: []string{ + "rego_parse_error: illegal capabilities: rego_v1 feature required for parsing v1 Rego", + }, + }, + { + note: "v1 module, not v0-compatible, v1 capabilities", + v0Compatible: false, + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV1)), + modules: map[string]string{ + "test.rego": `package test + allow if { + 1 < 2 + }`, + }, + }, + } + + setup := []struct { + name string + commandParams func(params *evalCommandParams, path string) + }{ + { + name: "Files", + commandParams: func(params *evalCommandParams, path string) { + params.dataPaths = newrepeatedStringFlag([]string{path}) + }, + }, + { + name: "Bundle", + commandParams: func(params *evalCommandParams, path string) { + if err := params.bundlePaths.Set(path); err != nil { + t.Fatal(err) + } + }, + }, + } + + for _, s := range setup { + for _, tc := range tests { + t.Run(fmt.Sprintf("%s: %s", s.name, tc.note), func(t *testing.T) { + test.WithTempFS(tc.modules, func(path string) { + params := newEvalCommandParams() + s.commandParams(¶ms, path) + _ = params.outputFormat.Set(evalPrettyOutput) + params.v0Compatible = tc.v0Compatible + params.capabilities.C = tc.capabilities + + var buf bytes.Buffer + + defined, err := eval([]string{"data.test.allow"}, params, &buf) + + if len(tc.expErrs) > 0 { + if err == nil { + t.Fatal("expected error, got none") + } + + actual := buf.String() + for _, expErr := range tc.expErrs { + if !strings.Contains(actual, expErr) { + t.Fatalf("expected error:\n\n%v\n\ngot\n\n%v", expErr, actual) + } + } + } else { + if err != nil { + t.Fatalf("Unexpected error: %v, buf: %s", err, buf.String()) + } else if !defined { + t.Fatal("expected result to be defined") + } + } + }) + }) + } + } +} + func TestEvalPolicyWithBundleRegoVersion(t *testing.T) { tests := []struct { note string diff --git a/cmd/inspect_test.go b/cmd/inspect_test.go index 4c23f0d5a9..1601acc299 100644 --- a/cmd/inspect_test.go +++ b/cmd/inspect_test.go @@ -54,7 +54,7 @@ func TestDoInspect(t *testing.T) { } res := `{ - "capabilities": {}, + "capabilities": {"features": ["rego_v1"]}, "manifest": {"revision": "rev", "roots": ["foo", "bar", "fuz", "baz", "a", "x"]}, "signatures_config": {}, "namespaces": {"data": ["/data.json"], "data.foo": ["/example/foo.rego"]} @@ -63,7 +63,7 @@ func TestDoInspect(t *testing.T) { exp := util.MustUnmarshalJSON([]byte(res)) result := util.MustUnmarshalJSON(out.Bytes()) if !reflect.DeepEqual(exp, result) { - t.Fatalf("expected inspect output to be %v, got %v", exp, result) + t.Fatalf("expected inspect output to be:\n\n%v\n\ngot:\n\n%v", exp, result) } }) } @@ -1075,6 +1075,9 @@ p if { "type": "function" } } + ], + "features": [ + "rego_v1" ] } }`, @@ -1084,7 +1087,6 @@ p if { note: "known ref replaced inside 'with' stmt", files: [][2]string{ {"/policy.rego", `package test -import rego.v1 foo.bar(_) := false @@ -1113,7 +1115,7 @@ test_p if { }, "capabilities": { "features": [ - "rego_v1_import" + "rego_v1" ] } }`, @@ -1122,7 +1124,6 @@ test_p if { note: "unknown ref replaced inside 'with' stmt", files: [][2]string{ {"/policy.rego", `package test -import rego.v1 p if { data.foo.bar(42) @@ -1149,7 +1150,7 @@ test_p if { }, "capabilities": { "features": [ - "rego_v1_import" + "rego_v1" ] } }`, @@ -1158,7 +1159,6 @@ test_p if { note: "unknown built-in (var) replaced inside 'with' stmt", files: [][2]string{ {"/policy.rego", `package test -import rego.v1 p if { foo(42) @@ -1185,7 +1185,7 @@ test_p if { }, "capabilities": { "features": [ - "rego_v1_import" + "rego_v1" ] } }`, @@ -1194,7 +1194,6 @@ test_p if { note: "unknown built-in (ref) replaced inside 'with' stmt", files: [][2]string{ {"/policy.rego", `package test -import rego.v1 p if { foo.bar(42) @@ -1221,7 +1220,7 @@ test_p if { }, "capabilities": { "features": [ - "rego_v1_import" + "rego_v1" ] } }`, @@ -1230,7 +1229,6 @@ test_p if { note: "call replaced by unknown data ref inside 'with' stmt", files: [][2]string{ {"/policy.rego", `package test -import rego.v1 p if { foo(42) @@ -1277,7 +1275,7 @@ test_p if { } ], "features": [ - "rego_v1_import" + "rego_v1" ] } }`, @@ -1286,7 +1284,6 @@ test_p if { note: "call replaced by unknown built-in (var) inside 'with' stmt", files: [][2]string{ {"/policy.rego", `package test -import rego.v1 p if { foo(42) @@ -1314,7 +1311,7 @@ test_p if { }, "capabilities": { "features": [ - "rego_v1_import" + "rego_v1" ] } }`, @@ -1323,7 +1320,6 @@ test_p if { note: "call replaced by unknown built-in (ref) inside 'with' stmt", files: [][2]string{ {"/policy.rego", `package test -import rego.v1 p if { foo(42) @@ -1371,7 +1367,7 @@ test_p if { } ], "features": [ - "rego_v1_import" + "rego_v1" ] } }`, @@ -1509,6 +1505,9 @@ p if { }, "infix": "==" } + ], + "features": [ + "rego_v1" ] } }`) diff --git a/cmd/test.go b/cmd/test.go index c7db7eae6e..f271395c6c 100644 --- a/cmd/test.go +++ b/cmd/test.go @@ -116,11 +116,17 @@ func opaTest(args []string, testParams testCommandParams) (int, error) { var bundles map[string]*bundle.Bundle var store storage.Store + popts := ast.ParserOptions{ + RegoVersion: testParams.RegoVersion(), + Capabilities: testParams.capabilities.C, + ProcessAnnotation: true, + } + if testParams.bundleMode { - bundles, err = tester.LoadBundlesWithRegoVersion(args, filter.Apply, testParams.RegoVersion()) + bundles, err = tester.LoadBundlesWithParserOptions(args, filter.Apply, popts) store = inmem.NewWithOpts(inmem.OptRoundTripOnWrite(false)) } else { - modules, store, err = tester.LoadWithRegoVersion(args, filter.Apply, testParams.RegoVersion()) + modules, store, err = tester.LoadWithParserOptions(args, filter.Apply, popts) } if err != nil { diff --git a/cmd/test_test.go b/cmd/test_test.go index ead9b48521..b8f438f796 100644 --- a/cmd/test_test.go +++ b/cmd/test_test.go @@ -2578,6 +2578,318 @@ test_l if { } } +func TestRunWithRegoV1Capability(t *testing.T) { + tests := []struct { + note string + v0Compatible bool + capabilities *ast.Capabilities + files map[string]string + expErrs []string + }{ + { + note: "v0 module, v0-compatible, no capabilities", + v0Compatible: true, + files: map[string]string{ + "/test.rego": `package test + +l1 := {1, 3, 5} +l2[v] { + v := l1[_] +} + +test_l { + l1 == l2 +}`, + }, + }, + { + note: "v0 module, v0-compatible, v0 capabilities", + v0Compatible: true, + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV0)), + files: map[string]string{ + "/test.rego": `package test + +l1 := {1, 3, 5} +l2[v] { + v := l1[_] +} + +test_l { + l1 == l2 +}`, + }, + }, + { + note: "v0 module, v0-compatible, v1 capabilities", + v0Compatible: true, + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV1)), + files: map[string]string{ + "/test.rego": `package test + +l1 := {1, 3, 5} +l2[v] { + v := l1[_] +} + +test_l { + l1 == l2 +}`, + }, + }, + + { + note: "v0 module, not v0-compatible, no capabilities", + files: map[string]string{ + "/test.rego": `package test + +l1 := {1, 3, 5} +l2[v] { + v := l1[_] +} + +test_l { + l1 == l2 +}`, + }, + expErrs: []string{ + "test.rego:4: rego_parse_error: `if` keyword is required before rule body", + "test.rego:4: rego_parse_error: `contains` keyword is required for partial set rules", + "test.rego:8: rego_parse_error: `if` keyword is required before rule body", + }, + }, + { + note: "v0 module, not v0-compatible, v0 capabilities", + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV0)), + files: map[string]string{ + "/test.rego": `package test + +l1 := {1, 3, 5} +l2[v] { + v := l1[_] +} + +test_l { + l1 == l2 +}`, + }, + expErrs: []string{ + "rego_parse_error: illegal capabilities: rego_v1 feature required for parsing v1 Rego", + }, + }, + { + note: "v0 module, not v0-compatible, v1 capabilities", + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV1)), + files: map[string]string{ + "/test.rego": `package test + +l1 := {1, 3, 5} +l2[v] { + v := l1[_] +} + +test_l { + l1 == l2 +}`, + }, + expErrs: []string{ + "test.rego:4: rego_parse_error: `if` keyword is required before rule body", + "test.rego:4: rego_parse_error: `contains` keyword is required for partial set rules", + "test.rego:8: rego_parse_error: `if` keyword is required before rule body", + }, + }, + + { + note: "v1 module, v0-compatible, no capabilities", + v0Compatible: true, + files: map[string]string{ + "/test.rego": `package test + +l1 := {1, 3, 5} +l2 contains v if { + v := l1[_] +} + +test_l if { + l1 == l2 +}`, + }, + expErrs: []string{ + "test.rego:4: rego_parse_error: var cannot be used for rule name", + }, + }, + { + note: "v1 module, v0-compatible, v0 capabilities", + v0Compatible: true, + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV0)), + files: map[string]string{ + "/test.rego": `package test + +l1 := {1, 3, 5} +l2 contains v if { + v := l1[_] +} + +test_l if { + l1 == l2 +}`, + }, + expErrs: []string{ + "test.rego:4: rego_parse_error: var cannot be used for rule name", + }, + }, + { + note: "v1 module, v0-compatible, v1 capabilities", + v0Compatible: true, + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV1)), + files: map[string]string{ + "/test.rego": `package test + +l1 := {1, 3, 5} +l2 contains v if { + v := l1[_] +} + +test_l if { + l1 == l2 +}`, + }, + expErrs: []string{ + "test.rego:4: rego_parse_error: var cannot be used for rule name", + }, + }, + + { + note: "v1 module, not v0-compatible, no capabilities", + files: map[string]string{ + "/test.rego": `package test + +l1 := {1, 3, 5} +l2 contains v if { + v := l1[_] +} + +test_l if { + l1 == l2 +}`, + }, + }, + { + note: "v1 module, not v0-compatible, v0 capabilities", + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV0)), + files: map[string]string{ + "/test.rego": `package test + +l1 := {1, 3, 5} +l2 contains v if { + v := l1[_] +} + +test_l if { + l1 == l2 +}`, + }, + expErrs: []string{ + "rego_parse_error: illegal capabilities: rego_v1 feature required for parsing v1 Rego", + }, + }, + { + note: "v1 module, not v0-compatible, v1 capabilities", + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV1)), + files: map[string]string{ + "/test.rego": `package test + +l1 := {1, 3, 5} +l2 contains v if { + v := l1[_] +} + +test_l if { + l1 == l2 +}`, + }, + }, + } + + loadTypes := []loadType{loadFile, loadBundle, loadTarball} + + for _, tc := range tests { + for _, loadType := range loadTypes { + t.Run(fmt.Sprintf("%s (%s)", tc.note, loadType), func(t *testing.T) { + var files map[string]string + if loadType != loadTarball { + files = tc.files + } + test.WithTempFS(files, func(root string) { + if loadType == loadTarball { + f, err := os.Create(filepath.Join(root, "bundle.tar.gz")) + if err != nil { + t.Fatal(err) + } + + testBundle := bundle.Bundle{ + Data: map[string]interface{}{}, + } + for k, v := range tc.files { + testBundle.Modules = append(testBundle.Modules, bundle.ModuleFile{ + Path: k, + Raw: []byte(v), + }) + } + + if err := bundle.Write(f, testBundle); err != nil { + t.Fatal(err) + } + } + + var buf bytes.Buffer + var errBuf bytes.Buffer + + testParams := newTestCommandParams() + testParams.bundleMode = loadType == loadBundle + testParams.count = 1 + testParams.output = &buf + testParams.errOutput = &errBuf + testParams.v0Compatible = tc.v0Compatible + testParams.capabilities.C = tc.capabilities + + var paths []string + if loadType == loadTarball { + paths = []string{filepath.Join(root, "bundle.tar.gz")} + } else { + paths = []string{root} + } + + exitCode, _ := opaTest(paths, testParams) + if len(tc.expErrs) > 0 { + if exitCode == 0 { + t.Fatalf("expected non-zero exit code") + } + + for _, expErr := range tc.expErrs { + if actual := errBuf.String(); !strings.Contains(actual, expErr) { + t.Fatalf("expected error output to contain:\n\n%q\n\nbut got:\n\n%q", expErr, actual) + } + } + } else { + if exitCode != 0 { + t.Fatalf("unexpected exit code: %d", exitCode) + } + + if errBuf.Len() > 0 { + t.Fatalf("expected no error output but got:\n\n%q", buf.String()) + } + + expected := "PASS: 1/1" + if actual := buf.String(); !strings.Contains(actual, expected) { + t.Fatalf("expected output to contain:\n\n%s\n\nbut got:\n\n%q", expected, actual) + } + } + }) + }) + } + } +} + func TestRun_CompatibleFlags(t *testing.T) { tests := []struct { note string diff --git a/v1/ast/capabilities.go b/v1/ast/capabilities.go index b1491d7df4..e7d561d9e8 100644 --- a/v1/ast/capabilities.go +++ b/v1/ast/capabilities.go @@ -52,6 +52,7 @@ var minVersionIndex = func() VersionIndex { // heads, they wouldn't be able to parse them. const FeatureRefHeadStringPrefixes = "rule_head_ref_string_prefixes" const FeatureRefHeads = "rule_head_refs" +const FeatureRegoV1 = "rego_v1" const FeatureRegoV1Import = "rego_v1_import" // Capabilities defines a structure containing data that describes the capabilities @@ -83,8 +84,30 @@ type WasmABIVersion struct { Minor int `json:"minor_version"` } +type CapabilitiesOptions struct { + regoVersion RegoVersion +} + +func newCapabilitiesOptions(opts []CapabilitiesOption) CapabilitiesOptions { + co := CapabilitiesOptions{} + for _, opt := range opts { + opt(&co) + } + return co +} + +type CapabilitiesOption func(*CapabilitiesOptions) + +func CapabilitiesRegoVersion(regoVersion RegoVersion) CapabilitiesOption { + return func(o *CapabilitiesOptions) { + o.regoVersion = regoVersion + } +} + // CapabilitiesForThisVersion returns the capabilities of this version of OPA. -func CapabilitiesForThisVersion() *Capabilities { +func CapabilitiesForThisVersion(opts ...CapabilitiesOption) *Capabilities { + co := newCapabilitiesOptions(opts) + f := &Capabilities{} for _, vers := range capabilities.ABIVersions() { @@ -97,17 +120,29 @@ func CapabilitiesForThisVersion() *Capabilities { return f.Builtins[i].Name < f.Builtins[j].Name }) - for kw := range futureKeywords { - f.FutureKeywords = append(f.FutureKeywords, kw) - } - sort.Strings(f.FutureKeywords) + if co.regoVersion == RegoV0 || co.regoVersion == RegoV0CompatV1 { + for kw := range allFutureKeywords { + f.FutureKeywords = append(f.FutureKeywords, kw) + } - f.Features = []string{ - FeatureRefHeadStringPrefixes, - FeatureRefHeads, - FeatureRegoV1Import, + f.Features = []string{ + FeatureRefHeadStringPrefixes, + FeatureRefHeads, + FeatureRegoV1Import, + } + } else { + for kw := range futureKeywords { + f.FutureKeywords = append(f.FutureKeywords, kw) + } + + f.Features = []string{ + FeatureRegoV1, + } } + sort.Strings(f.FutureKeywords) + sort.Strings(f.Features) + return f } diff --git a/v1/ast/capabilities_test.go b/v1/ast/capabilities_test.go index 4ac499fa0a..29fab8df9f 100644 --- a/v1/ast/capabilities_test.go +++ b/v1/ast/capabilities_test.go @@ -9,25 +9,40 @@ import ( func TestParserCatchesIllegalCapabilities(t *testing.T) { tests := []struct { - note string - regoVersion RegoVersion + note string + regoVersion RegoVersion + capabilities Capabilities + expErr string }{ { - note: "v0", + note: "v0, bad future keyword", regoVersion: RegoV0, + capabilities: Capabilities{ + FutureKeywords: []string{"deadbeef"}, + }, + expErr: "illegal capabilities: unknown keyword: deadbeef", }, { - note: "v1", + note: "v1, bad future keyword", regoVersion: RegoV1, + capabilities: Capabilities{ + Features: []string{FeatureRegoV1}, + FutureKeywords: []string{"deadbeef"}, + }, + expErr: "illegal capabilities: unknown keyword: deadbeef", + }, + { + note: "v1, no rego_v1 feature", + regoVersion: RegoV1, + capabilities: Capabilities{}, + expErr: "illegal capabilities: rego_v1 feature required for parsing v1 Rego", }, } for _, tc := range tests { t.Run(tc.note, func(t *testing.T) { var opts ParserOptions - opts.Capabilities = &Capabilities{ - FutureKeywords: []string{"deadbeef"}, - } + opts.Capabilities = &tc.capabilities opts.RegoVersion = tc.regoVersion @@ -36,7 +51,7 @@ func TestParserCatchesIllegalCapabilities(t *testing.T) { t.Fatal("expected error") } else if errs, ok := err.(Errors); !ok || len(errs) != 1 { t.Fatal("expected exactly one error but got:", err) - } else if errs[0].Code != ParseErr || errs[0].Message != "illegal capabilities: unknown keyword: deadbeef" { + } else if errs[0].Code != ParseErr || errs[0].Message != tc.expErr { t.Fatal("unexpected error:", err) } }) diff --git a/v1/ast/compile.go b/v1/ast/compile.go index 91876a55e3..5f78b0da1f 100644 --- a/v1/ast/compile.go +++ b/v1/ast/compile.go @@ -117,7 +117,7 @@ type Compiler struct { // with the key being the generated name and value being the original. RewrittenVars map[Var]Var - // Capabliities required by the modules that were compiled. + // Capabilities required by the modules that were compiled. Required *Capabilities localvargen *localVarGenerator @@ -332,7 +332,7 @@ func NewCompiler() *Compiler { {"InitLocalVarGen", "compile_stage_init_local_var_gen", c.initLocalVarGen}, {"RewriteRuleHeadRefs", "compile_stage_rewrite_rule_head_refs", c.rewriteRuleHeadRefs}, {"CheckKeywordOverrides", "compile_stage_check_keyword_overrides", c.checkKeywordOverrides}, - {"CheckDuplicateImports", "compile_stage_check_duplicate_imports", c.checkDuplicateImports}, + {"CheckDuplicateImports", "compile_stage_check_imports", c.checkImports}, {"RemoveImports", "compile_stage_remove_imports", c.removeImports}, {"SetModuleTree", "compile_stage_set_module_tree", c.setModuleTree}, {"SetRuleTree", "compile_stage_set_rule_tree", c.setRuleTree}, // depends on RewriteRuleHeadRefs @@ -971,21 +971,46 @@ func (c *Compiler) buildRequiredCapabilities() { features := map[string]struct{}{} // extract required keywords from modules + keywords := map[string]struct{}{} futureKeywordsPrefix := Ref{FutureRootDocument, StringTerm("keywords")} for _, name := range c.sorted { for _, imp := range c.imports[name] { + mod := c.Modules[name] path := imp.Path.Value.(Ref) switch { case path.Equal(RegoV1CompatibleRef): - features[FeatureRegoV1Import] = struct{}{} + if !c.moduleIsRegoV1(mod) { + features[FeatureRegoV1Import] = struct{}{} + } case path.HasPrefix(futureKeywordsPrefix): if len(path) == 2 { - for kw := range futureKeywords { - keywords[kw] = struct{}{} + if c.moduleIsRegoV1(mod) { + for kw := range futureKeywords { + keywords[kw] = struct{}{} + } + } else { + for kw := range allFutureKeywords { + keywords[kw] = struct{}{} + } } } else { - keywords[string(path[2].Value.(String))] = struct{}{} + kw := string(path[2].Value.(String)) + if c.moduleIsRegoV1(mod) { + for allowedKw := range futureKeywords { + if kw == allowedKw { + keywords[kw] = struct{}{} + break + } + } + } else { + for allowedKw := range allFutureKeywords { + if kw == allowedKw { + keywords[kw] = struct{}{} + break + } + } + } } } } @@ -996,13 +1021,19 @@ func (c *Compiler) buildRequiredCapabilities() { // extract required features from modules for _, name := range c.sorted { - for _, rule := range c.Modules[name].Rules { - refLen := len(rule.Head.Reference) - if refLen >= 3 { - if refLen > len(rule.Head.Reference.ConstantPrefix()) { - features[FeatureRefHeads] = struct{}{} - } else { - features[FeatureRefHeadStringPrefixes] = struct{}{} + mod := c.Modules[name] + + if c.moduleIsRegoV1(mod) { + features[FeatureRegoV1] = struct{}{} + } else { + for _, rule := range mod.Rules { + refLen := len(rule.Head.Reference) + if refLen >= 3 { + if refLen > len(rule.Head.Reference.ConstantPrefix()) { + features[FeatureRefHeads] = struct{}{} + } else { + features[FeatureRefHeadStringPrefixes] = struct{}{} + } } } } @@ -1725,12 +1756,22 @@ func (c *Compiler) GetAnnotationSet() *AnnotationSet { return c.annotationSet } -func (c *Compiler) checkDuplicateImports() { +func (c *Compiler) checkImports() { modules := make([]*Module, 0, len(c.Modules)) + supportsRegoV1Import := c.capabilities.ContainsFeature(FeatureRegoV1Import) || + c.capabilities.ContainsFeature(FeatureRegoV1) + for _, name := range c.sorted { mod := c.Modules[name] - if c.strict || c.moduleIsRegoV1(mod) { + + for _, imp := range mod.Imports { + if !supportsRegoV1Import && Compare(imp.Path, RegoV1CompatibleRef) == 0 { + c.err(NewError(CompileErr, imp.Loc(), "rego.v1 import is not supported")) + } + } + + if c.strict || c.moduleIsRegoV1Compatible(mod) { modules = append(modules, mod) } } @@ -1744,7 +1785,7 @@ func (c *Compiler) checkDuplicateImports() { func (c *Compiler) checkKeywordOverrides() { for _, name := range c.sorted { mod := c.Modules[name] - if c.strict || c.moduleIsRegoV1(mod) { + if c.strict || c.moduleIsRegoV1Compatible(mod) { errs := checkRootDocumentOverrides(mod) for _, err := range errs { c.err(err) @@ -1756,6 +1797,23 @@ func (c *Compiler) checkKeywordOverrides() { func (c *Compiler) moduleIsRegoV1(mod *Module) bool { if mod.regoVersion == RegoUndefined { switch c.defaultRegoVersion { + case RegoUndefined: + c.err(NewError(CompileErr, mod.Package.Loc(), "cannot determine rego version for module")) + return false + case RegoV1: + return true + } + return false + } + return mod.regoVersion == RegoV1 +} + +func (c *Compiler) moduleIsRegoV1Compatible(mod *Module) bool { + if mod.regoVersion == RegoUndefined { + switch c.defaultRegoVersion { + case RegoUndefined: + c.err(NewError(CompileErr, mod.Package.Loc(), "cannot determine rego version for module")) + return false case RegoV1, RegoV0CompatV1: return true } @@ -1896,6 +1954,9 @@ func (c *Compiler) rewriteRuleHeadRefs() { cannotSpeakStringPrefixRefs = false case FeatureRefHeads: cannotSpeakGeneralRefs = false + case FeatureRegoV1: + cannotSpeakStringPrefixRefs = false + cannotSpeakGeneralRefs = false } } @@ -5800,7 +5861,7 @@ func safetyErrorSlice(unsafe unsafeVars, rewritten map[Var]Var) (result Errors) v = w } if !v.IsGenerated() { - if _, ok := futureKeywords[string(v)]; ok { + if _, ok := allFutureKeywords[string(v)]; ok { result = append(result, NewError(UnsafeVarErr, pair.Loc, "var %[1]v is unsafe (hint: `import future.keywords.%[1]v` to import a future keyword)", v)) continue diff --git a/v1/ast/compile_test.go b/v1/ast/compile_test.go index a76bd277d9..1d52df8d8c 100644 --- a/v1/ast/compile_test.go +++ b/v1/ast/compile_test.go @@ -9463,9 +9463,10 @@ func TestCompilerBuildRequiredCapabilities(t *testing.T) { `, opts: CompileOpts{ParserOptions: ParserOptions{RegoVersion: RegoV1}}, builtins: []string{"eq", "gt"}, + features: []string{"rego_v1"}, }, { - note: "rego.v1 import", + note: "rego.v1 import, v0 module", module: ` package x @@ -9473,19 +9474,64 @@ func TestCompilerBuildRequiredCapabilities(t *testing.T) { p if { true } `, + opts: CompileOpts{ParserOptions: ParserOptions{RegoVersion: RegoV0}}, features: []string{"rego_v1_import"}, }, { - note: "future.keywords wildcard", + note: "rego.v1 import, v1 module", + module: ` + package x + + import rego.v1 + + p if { true } + `, + opts: CompileOpts{ParserOptions: ParserOptions{RegoVersion: RegoV1}}, + features: []string{"rego_v1"}, + }, + { + note: "rego.v1 import, default rego-version module (v1)", + module: ` + package x + + import rego.v1 + + p if { true } + `, + opts: CompileOpts{ParserOptions: ParserOptions{RegoVersion: RegoV1}}, + features: []string{"rego_v1"}, + }, + { + note: "future.keywords wildcard, v0 module", module: ` package x import future.keywords `, + opts: CompileOpts{ParserOptions: ParserOptions{RegoVersion: RegoV0}}, keywords: []string{"contains", "every", "if", "in"}, }, { - note: "future.keywords specific", + note: "future.keywords wildcard, v1 module", + module: ` + package x + + import future.keywords + `, + opts: CompileOpts{ParserOptions: ParserOptions{RegoVersion: RegoV1}}, + features: []string{"rego_v1"}, + }, + { + note: "future.keywords wildcard, default rego-version module (v1)", + module: ` + package x + + import future.keywords + `, + features: []string{"rego_v1"}, + }, + { + note: "future.keywords specific, v0 module", module: ` package x @@ -9494,8 +9540,34 @@ func TestCompilerBuildRequiredCapabilities(t *testing.T) { import future.keywords.contains import future.keywords.every `, + opts: CompileOpts{ParserOptions: ParserOptions{RegoVersion: RegoV0}}, keywords: []string{"contains", "every", "if", "in"}, }, + { + note: "future.keywords specific, v1 module", + module: ` + package x + + import future.keywords.in + import future.keywords.if + import future.keywords.contains + import future.keywords.every + `, + opts: CompileOpts{ParserOptions: ParserOptions{RegoVersion: RegoV1}}, + features: []string{"rego_v1"}, + }, + { + note: "future.keywords specific, default rego-version module (v1)", + module: ` + package x + + import future.keywords.in + import future.keywords.if + import future.keywords.contains + import future.keywords.every + `, + features: []string{"rego_v1"}, + }, { note: "rewriting erases assignment", module: ` @@ -9505,6 +9577,7 @@ func TestCompilerBuildRequiredCapabilities(t *testing.T) { `, opts: CompileOpts{ParserOptions: ParserOptions{RegoVersion: RegoV1}}, builtins: []string{"assign", "eq"}, + features: []string{"rego_v1"}, }, { note: "rewriting erases equals", @@ -9515,6 +9588,7 @@ func TestCompilerBuildRequiredCapabilities(t *testing.T) { `, opts: CompileOpts{ParserOptions: ParserOptions{RegoVersion: RegoV1}}, builtins: []string{"eq", "equal"}, + features: []string{"rego_v1"}, }, { note: "rewriting erases print", @@ -9525,6 +9599,7 @@ func TestCompilerBuildRequiredCapabilities(t *testing.T) { `, opts: CompileOpts{EnablePrintStatements: true, ParserOptions: ParserOptions{RegoVersion: RegoV1}}, builtins: []string{"eq", "internal.print", "print"}, + features: []string{"rego_v1"}, }, { @@ -9536,16 +9611,69 @@ func TestCompilerBuildRequiredCapabilities(t *testing.T) { `, opts: CompileOpts{EnablePrintStatements: false, ParserOptions: ParserOptions{RegoVersion: RegoV1}}, builtins: []string{"print"}, // only print required because compiler will replace with true + features: []string{"rego_v1"}, }, { - note: "dots in the head", + note: "dots in the head, v0 module", module: ` package x a.b.c := 7 `, + opts: CompileOpts{ParserOptions: ParserOptions{RegoVersion: RegoV0}}, features: []string{"rule_head_ref_string_prefixes"}, }, + { + note: "dots in the head, v1 module", + module: ` + package x + + a.b.c := 7 + `, + opts: CompileOpts{ParserOptions: ParserOptions{RegoVersion: RegoV1}}, + features: []string{"rego_v1"}, // rego_v1 includes rule_head_ref_string_prefixes + }, + { + note: "dots in the head, default rego-version module (v1)", + module: ` + package x + + a.b.c := 7 + `, + features: []string{"rego_v1"}, // rego_v1 includes rule_head_ref_string_prefixes + }, + { + note: "dynamic dots in the head, v0 module", + module: ` + package x + + a[x].c[y] := z { x := "b"; y := "c"; z := "d" } + `, + opts: CompileOpts{ParserOptions: ParserOptions{RegoVersion: RegoV0}}, + builtins: []string{"assign", "eq"}, + features: []string{"rule_head_refs"}, + }, + { + note: "dynamic dots in the head, v1 module", + module: ` + package x + + a[x].c[y] := z if { x := "b"; y := "c"; z := "d" } + `, + opts: CompileOpts{ParserOptions: ParserOptions{RegoVersion: RegoV1}}, + builtins: []string{"assign", "eq"}, + features: []string{"rego_v1"}, // rego_v1 includes rule_head_refs + }, + { + note: "dynamic dots in the head, default rego-version module (v1)", + module: ` + package x + + a[x].c[y] := z if { x := "b"; y := "c"; z := "d" } + `, + builtins: []string{"assign", "eq"}, + features: []string{"rego_v1"}, // rego_v1 includes rule_head_refs + }, } for _, tc := range tests { @@ -10273,6 +10401,14 @@ func TestCompilerCapabilitiesFeatures(t *testing.T) { module: `package test p.q.r := 42`, }, + { + note: "rego-v1 feature, ref-head rule", + features: []string{ + FeatureRegoV1, + }, + module: `package test + p.q.r := 42`, + }, { note: "string-prefix-ref-head feature, general-ref-head rule", features: []string{ @@ -10290,6 +10426,14 @@ func TestCompilerCapabilitiesFeatures(t *testing.T) { module: `package test p[q].r[s] := 42 if { q := "foo"; s := "bar" }`, }, + { + note: "rego-v1 feature, general-ref-head rule", + features: []string{ + FeatureRegoV1, + }, + module: `package test + p[q].r[s] := 42 if { q := "foo"; s := "bar" }`, + }, { note: "string-prefix-ref-head & ref-head features, general-ref-head rule", features: []string{ @@ -10299,6 +10443,16 @@ func TestCompilerCapabilitiesFeatures(t *testing.T) { module: `package test p[q].r[s] := 42 if { q := "foo"; s := "bar" }`, }, + { + note: "string-prefix-ref-head & ref-head & rego-v1 features, general-ref-head rule", + features: []string{ + FeatureRefHeadStringPrefixes, + FeatureRefHeads, + FeatureRegoV1, + }, + module: `package test + p[q].r[s] := 42 if { q := "foo"; s := "bar" }`, + }, { note: "string-prefix-ref-head & ref-head features, ref-head rule", features: []string{ @@ -10308,6 +10462,16 @@ func TestCompilerCapabilitiesFeatures(t *testing.T) { module: `package test p.q.r := 42`, }, + { + note: "string-prefix-ref-head & ref-head & rego-v1 features, ref-head rule", + features: []string{ + FeatureRefHeadStringPrefixes, + FeatureRefHeads, + FeatureRegoV1, + }, + module: `package test + p.q.r := 42`, + }, { note: "no features, string-prefix-ref-head with contains kw", features: []string{}, @@ -10334,6 +10498,15 @@ func TestCompilerCapabilitiesFeatures(t *testing.T) { import future.keywords.contains p.x contains 1`, }, + { + note: "rego-v1 feature, string-prefix-ref-head with contains kw", + features: []string{ + FeatureRegoV1, + }, + module: `package test + import future.keywords.contains + p.x contains 1`, + }, { note: "no features, general-ref-head with contains kw", @@ -10362,6 +10535,41 @@ func TestCompilerCapabilitiesFeatures(t *testing.T) { import future.keywords p[x] contains 1 if x = "foo"`, }, + { + note: "rego-v1 feature, general-ref-head with contains kw", + features: []string{ + FeatureRegoV1, + }, + module: `package test + import future.keywords + p[x] contains 1 if x = "foo"`, + }, + + { + note: "no features, rego.v1 import", + module: `package test + import rego.v1 + p if { true }`, + expectedErr: "rego_compile_error: rego.v1 import is not supported", + }, + { + note: "rego-v1-import feature, rego.v1 import", + module: `package test + import rego.v1 + p if { true }`, + features: []string{ + FeatureRegoV1Import, + }, + }, + { + note: "rego-v1-import feature, rego.v1 import", + module: `package test + import rego.v1 + p if { true }`, + features: []string{ + FeatureRegoV1, + }, + }, } for _, tc := range cases { @@ -10369,8 +10577,11 @@ func TestCompilerCapabilitiesFeatures(t *testing.T) { capabilities := CapabilitiesForThisVersion() capabilities.Features = tc.features + // Modules are parsed with full set of capabilities + mod := module(tc.module) + compiler := NewCompiler().WithCapabilities(capabilities) - compiler.Compile(map[string]*Module{"test": module(tc.module)}) + compiler.Compile(map[string]*Module{"test": mod}) if tc.expectedErr != "" { if !compiler.Failed() { t.Fatal("expected error but got success") diff --git a/v1/ast/parser.go b/v1/ast/parser.go index a537d8b67d..6639ca990b 100644 --- a/v1/ast/parser.go +++ b/v1/ast/parser.go @@ -283,7 +283,7 @@ func (p *Parser) parsedTermCachePush(t *Term, s0 *state) { func (p *Parser) futureParser() *Parser { q := *p q.s = p.save() - q.s.s = p.s.s.WithKeywords(futureKeywords) + q.s.s = p.s.s.WithKeywords(allFutureKeywords) q.cache = parsedTermCache{} return &q } @@ -301,7 +301,7 @@ func (p *Parser) presentParser() (*Parser, map[string]tokens.Token) { var cpy map[string]tokens.Token q := *p q.s = p.save() - q.s.s, cpy = p.s.s.WithoutKeywords(futureKeywords) + q.s.s, cpy = p.s.s.WithoutKeywords(allFutureKeywords) q.cache = parsedTermCache{} return &q, cpy } @@ -312,30 +312,45 @@ func (p *Parser) presentParser() (*Parser, map[string]tokens.Token) { func (p *Parser) Parse() ([]Statement, []*Comment, Errors) { if p.po.Capabilities == nil { - p.po.Capabilities = CapabilitiesForThisVersion() + p.po.Capabilities = CapabilitiesForThisVersion(CapabilitiesRegoVersion(p.po.RegoVersion)) } allowedFutureKeywords := map[string]tokens.Token{} if p.po.EffectiveRegoVersion() == RegoV1 { - // RegoV1 includes all future keywords in the default language definition - for k, v := range futureKeywords { + if !p.po.Capabilities.ContainsFeature(FeatureRegoV1) { + return nil, nil, Errors{ + &Error{ + Code: ParseErr, + Message: "illegal capabilities: rego_v1 feature required for parsing v1 Rego", + Location: nil, + }, + } + } + + // rego-v1 includes all v0 future keywords in the default language definition + for k, v := range futureKeywordsV0 { allowedFutureKeywords[k] = v } - // For sake of error reporting, we still need to check that keywords in capabilities are known, for _, kw := range p.po.Capabilities.FutureKeywords { - if _, ok := futureKeywords[kw]; !ok { - return nil, nil, Errors{ - &Error{ - Code: ParseErr, - Message: fmt.Sprintf("illegal capabilities: unknown keyword: %v", kw), - Location: nil, - }, + if tok, ok := futureKeywords[kw]; ok { + allowedFutureKeywords[kw] = tok + } else { + // For sake of error reporting, we still need to check that keywords in capabilities are known in v0 + if _, ok := futureKeywordsV0[kw]; !ok { + return nil, nil, Errors{ + &Error{ + Code: ParseErr, + Message: fmt.Sprintf("illegal capabilities: unknown keyword: %v", kw), + Location: nil, + }, + } } } } - // and that explicitly requested future keywords are known. + + // Check that explicitly requested future keywords are known. for _, kw := range p.po.FutureKeywords { if _, ok := allowedFutureKeywords[kw]; !ok { return nil, nil, Errors{ @@ -350,7 +365,7 @@ func (p *Parser) Parse() ([]Statement, []*Comment, Errors) { } else { for _, kw := range p.po.Capabilities.FutureKeywords { var ok bool - allowedFutureKeywords[kw], ok = futureKeywords[kw] + allowedFutureKeywords[kw], ok = allFutureKeywords[kw] if !ok { return nil, nil, Errors{ &Error{ @@ -361,6 +376,13 @@ func (p *Parser) Parse() ([]Statement, []*Comment, Errors) { } } } + + if p.po.Capabilities.ContainsFeature(FeatureRegoV1) { + // rego-v1 includes all v0 future keywords in the default language definition + for k, v := range futureKeywordsV0 { + allowedFutureKeywords[k] = v + } + } } var err error @@ -2145,8 +2167,7 @@ func (p *Parser) illegal(note string, a ...interface{}) { tokType := "token" if tokens.IsKeyword(p.s.tok) { tokType = "keyword" - } - if _, ok := futureKeywords[p.s.tok.String()]; ok { + } else if _, ok := allFutureKeywords[p.s.tok.String()]; ok { tokType = "keyword" } @@ -2641,16 +2662,34 @@ func convertYAMLMapKeyTypes(x any, path []string) (any, error) { // futureKeywords is the source of truth for future keywords that will // eventually become standard keywords inside of Rego. -var futureKeywords = map[string]tokens.Token{ +var futureKeywords = map[string]tokens.Token{} + +// futureKeywordsV0 is the source of truth for future keywords that were +// not yet a standard part of Rego in v0, and required importing. +var futureKeywordsV0 = map[string]tokens.Token{ "in": tokens.In, "every": tokens.Every, "contains": tokens.Contains, "if": tokens.If, } +var allFutureKeywords map[string]tokens.Token + func IsFutureKeyword(s string) bool { - _, ok := futureKeywords[s] - return ok + return IsFutureKeywordForRegoVersion(s, RegoV1) +} + +func IsFutureKeywordForRegoVersion(s string, v RegoVersion) bool { + var yes bool + + switch v { + case RegoV0, RegoV0CompatV1: + _, yes = futureKeywordsV0[s] + case RegoV1: + _, yes = futureKeywords[s] + } + + return yes } func (p *Parser) futureImport(imp *Import, allowedFutureKeywords map[string]tokens.Token) { @@ -2666,11 +2705,6 @@ func (p *Parser) futureImport(imp *Import, allowedFutureKeywords map[string]toke return } - if p.s.s.RegoV1Compatible() { - p.errorf(imp.Path.Location, "the `%s` import implies `future.keywords`, these are therefore mutually exclusive", RegoV1CompatibleRef) - return - } - kwds := make([]string, 0, len(allowedFutureKeywords)) for k := range allowedFutureKeywords { kwds = append(kwds, k) @@ -2700,7 +2734,7 @@ func (p *Parser) futureImport(imp *Import, allowedFutureKeywords map[string]toke } func (p *Parser) regoV1Import(imp *Import) { - if !p.po.Capabilities.ContainsFeature(FeatureRegoV1Import) { + if !p.po.Capabilities.ContainsFeature(FeatureRegoV1Import) && !p.po.Capabilities.ContainsFeature(FeatureRegoV1) { p.errorf(imp.Path.Location, "invalid import, `%s` is not supported by current capabilities", RegoV1CompatibleRef) return } @@ -2724,19 +2758,23 @@ func (p *Parser) regoV1Import(imp *Import) { } // import all future keywords with the rego.v1 import - kwds := make([]string, 0, len(futureKeywords)) - for k := range futureKeywords { + kwds := make([]string, 0, len(futureKeywordsV0)) + for k := range futureKeywordsV0 { kwds = append(kwds, k) } - if p.s.s.HasKeyword(futureKeywords) && !p.s.s.RegoV1Compatible() { - // We have imported future keywords, but they didn't come from another `rego.v1` import. - p.errorf(imp.Path.Location, "the `%s` import implies `future.keywords`, these are therefore mutually exclusive", RegoV1CompatibleRef) - return - } - p.s.s.SetRegoV1Compatible() for _, kw := range kwds { - p.s.s.AddKeyword(kw, futureKeywords[kw]) + p.s.s.AddKeyword(kw, futureKeywordsV0[kw]) + } +} + +func init() { + allFutureKeywords = map[string]tokens.Token{} + for k, v := range futureKeywords { + allFutureKeywords[k] = v + } + for k, v := range futureKeywordsV0 { + allFutureKeywords[k] = v } } diff --git a/v1/ast/parser_test.go b/v1/ast/parser_test.go index ad5e67d7a9..00d74f9f9c 100644 --- a/v1/ast/parser_test.go +++ b/v1/ast/parser_test.go @@ -1266,26 +1266,6 @@ func TestFutureImports(t *testing.T) { } assertParseModule(t, "multiple imports, all kw in options", mod, &parsed, ParserOptions{AllFutureKeywords: true}) assertParseModule(t, "multiple imports, single in options", mod, &parsed, ParserOptions{FutureKeywords: []string{"in"}}) - - mod = ` - package p - import rego.v1 - import future.keywords.in - ` - // Only applies to v0, as the 'rego.v1' import is a no-op in v1 - assertParseModuleErrorMatch(t, "rego.v1 and future.keywords.in imported", mod, - "rego_parse_error: the `rego.v1` import implies `future.keywords`, these are therefore mutually exclusive", - ParserOptions{RegoVersion: RegoV0}) - - mod = ` - package p - import future.keywords - import rego.v1 - ` - // Only applies to v0, as the 'rego.v1' import is a no-op in v1 - assertParseModuleErrorMatch(t, "rego.v1 and future.keywords imported", mod, - "rego_parse_error: the `rego.v1` import implies `future.keywords`, these are therefore mutually exclusive", - ParserOptions{RegoVersion: RegoV0}) } func TestFutureAndRegoV1ImportsExtraction(t *testing.T) { @@ -1388,7 +1368,6 @@ import future.keywords p contains 1 if { input.x == 1 }`, - expectedErrors: []string{"rego_parse_error: the `rego.v1` import implies `future.keywords`, these are therefore mutually exclusive"}, }, { note: "`if` keyword used on rule", @@ -6612,23 +6591,6 @@ func assertParseModuleError(t *testing.T, msg, input string) { } } -func assertParseModuleErrorMatch(t *testing.T, msg, input string, expected string, opts ...ParserOptions) { - t.Helper() - - opt := ParserOptions{} - if len(opts) == 1 { - opt = opts[0] - } - - m, err := ParseModuleWithOpts("", input, opt) - if err == nil { - t.Errorf("Error on test \"%s\": expected parse error: %v (parsed)", msg, m) - } - if !strings.Contains(err.Error(), expected) { - t.Errorf("Error on test \"%s\"; expected:\n\n%v\n\ngot:\n\n%v", msg, expected, err) - } -} - func assertParsePackage(t *testing.T, msg string, input string, correct *Package) { assertParseOne(t, msg, input, func(parsed interface{}) { pkg := parsed.(*Package) diff --git a/v1/ast/version_index.json b/v1/ast/version_index.json index 718df220f9..b888b3e028 100644 --- a/v1/ast/version_index.json +++ b/v1/ast/version_index.json @@ -1395,6 +1395,13 @@ } }, "features": { + "rego_v1": { + "Major": 1, + "Minor": 0, + "Patch": 0, + "PreRelease": "", + "Metadata": "" + }, "rego_v1_import": { "Major": 0, "Minor": 59, diff --git a/v1/bundle/bundle.go b/v1/bundle/bundle.go index e4f42b8ded..be320f6a73 100644 --- a/v1/bundle/bundle.go +++ b/v1/bundle/bundle.go @@ -1082,9 +1082,9 @@ func hashBundleFiles(hash SignatureHasher, b *Bundle) ([]FileInfo, error) { } // FormatModules formats Rego modules -// Modules will be formatted to comply with rego-v0, but Rego compatibility of individual parsed modules will be respected (e.g. if 'rego.v1' is imported). +// Modules will be formatted to comply with [ast.DefaultRegoVersion], but Rego compatibility of individual parsed modules will be respected (e.g. if 'rego.v1' is imported). func (b *Bundle) FormatModules(useModulePath bool) error { - return b.FormatModulesForRegoVersion(ast.RegoV0, true, useModulePath) + return b.FormatModulesForRegoVersion(ast.DefaultRegoVersion, true, useModulePath) } // FormatModulesForRegoVersion formats Rego modules to comply with a given Rego version diff --git a/v1/capabilities/v1.0.0.json b/v1/capabilities/v1.0.0.json new file mode 100644 index 0000000000..c1b4b8a006 --- /dev/null +++ b/v1/capabilities/v1.0.0.json @@ -0,0 +1,4844 @@ +{ + "builtins": [ + { + "name": "abs", + "decl": { + "args": [ + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "all", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "and", + "decl": { + "args": [ + { + "of": { + "type": "any" + }, + "type": "set" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "result": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "function" + }, + "infix": "\u0026" + }, + { + "name": "any", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "array.concat", + "decl": { + "args": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "result": { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "array.reverse", + "decl": { + "args": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "result": { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "array.slice", + "decl": { + "args": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "assign", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": ":=" + }, + { + "name": "base64.decode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "base64.encode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "base64.is_valid", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "base64url.decode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "base64url.encode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "base64url.encode_no_pad", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "bits.and", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "bits.lsh", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "bits.negate", + "decl": { + "args": [ + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "bits.or", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "bits.rsh", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "bits.xor", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "cast_array", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "cast_boolean", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "cast_null", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "null" + }, + "type": "function" + } + }, + { + "name": "cast_object", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "cast_set", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "cast_string", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "ceil", + "decl": { + "args": [ + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "concat", + "decl": { + "args": [ + { + "type": "string" + }, + { + "of": [ + { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + { + "of": { + "type": "string" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "contains", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "count", + "decl": { + "args": [ + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "crypto.hmac.equal", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "crypto.hmac.md5", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "crypto.hmac.sha1", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "crypto.hmac.sha256", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "crypto.hmac.sha512", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "crypto.md5", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "crypto.parse_private_keys", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "dynamic": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "crypto.sha1", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "crypto.sha256", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "crypto.x509.parse_and_verify_certificates", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "static": [ + { + "type": "boolean" + }, + { + "dynamic": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "array" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "crypto.x509.parse_and_verify_certificates_with_options", + "decl": { + "args": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "result": { + "static": [ + { + "type": "boolean" + }, + { + "dynamic": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "array" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "crypto.x509.parse_certificate_request", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "crypto.x509.parse_certificates", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "dynamic": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "crypto.x509.parse_keypair", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "crypto.x509.parse_rsa_private_key", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "div", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + }, + "infix": "/" + }, + { + "name": "endswith", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "eq", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "=" + }, + { + "name": "equal", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "==" + }, + { + "name": "floor", + "decl": { + "args": [ + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "format_int", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "glob.match", + "decl": { + "args": [ + { + "type": "string" + }, + { + "of": [ + { + "type": "null" + }, + { + "dynamic": { + "type": "string" + }, + "type": "array" + } + ], + "type": "any" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "glob.quote_meta", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "graph.reachable", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + }, + "type": "object" + }, + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "graph.reachable_paths", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + }, + "type": "object" + }, + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "of": { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "graphql.is_valid", + "decl": { + "args": [ + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "any" + }, + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "graphql.parse", + "decl": { + "args": [ + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "any" + }, + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "any" + } + ], + "result": { + "static": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "graphql.parse_and_verify", + "decl": { + "args": [ + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "any" + }, + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "any" + } + ], + "result": { + "static": [ + { + "type": "boolean" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "graphql.parse_query", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "graphql.parse_schema", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "graphql.schema_is_valid", + "decl": { + "args": [ + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "gt", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "\u003e" + }, + { + "name": "gte", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "\u003e=" + }, + { + "name": "hex.decode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "hex.encode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "http.send", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "result": { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + }, + "nondeterministic": true + }, + { + "name": "indexof", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "indexof_n", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "dynamic": { + "type": "number" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "internal.member_2", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "in" + }, + { + "name": "internal.member_3", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "in" + }, + { + "name": "internal.print", + "decl": { + "args": [ + { + "dynamic": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "array" + } + ], + "type": "function" + } + }, + { + "name": "intersection", + "decl": { + "args": [ + { + "of": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "set" + } + ], + "result": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "io.jwt.decode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "static": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "type": "string" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "io.jwt.decode_verify", + "decl": { + "args": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "result": { + "static": [ + { + "type": "boolean" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "array" + }, + "type": "function" + }, + "nondeterministic": true + }, + { + "name": "io.jwt.encode_sign", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "result": { + "type": "string" + }, + "type": "function" + }, + "nondeterministic": true + }, + { + "name": "io.jwt.encode_sign_raw", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + }, + "nondeterministic": true + }, + { + "name": "io.jwt.verify_es256", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_es384", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_es512", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_hs256", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_hs384", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_hs512", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_ps256", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_ps384", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_ps512", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_rs256", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_rs384", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_rs512", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "is_array", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "is_boolean", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "is_null", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "is_number", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "is_object", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "is_set", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "is_string", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "json.filter", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": [ + { + "dynamic": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + }, + "type": "array" + }, + { + "of": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "json.is_valid", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "json.marshal", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "json.marshal_with_options", + "decl": { + "args": [ + { + "type": "any" + }, + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "static": [ + { + "key": "indent", + "value": { + "type": "string" + } + }, + { + "key": "prefix", + "value": { + "type": "string" + } + }, + { + "key": "pretty", + "value": { + "type": "boolean" + } + } + ], + "type": "object" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "json.match_schema", + "decl": { + "args": [ + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "any" + }, + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "any" + } + ], + "result": { + "static": [ + { + "type": "boolean" + }, + { + "dynamic": { + "static": [ + { + "key": "desc", + "value": { + "type": "string" + } + }, + { + "key": "error", + "value": { + "type": "string" + } + }, + { + "key": "field", + "value": { + "type": "string" + } + }, + { + "key": "type", + "value": { + "type": "string" + } + } + ], + "type": "object" + }, + "type": "array" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "json.patch", + "decl": { + "args": [ + { + "type": "any" + }, + { + "dynamic": { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "static": [ + { + "key": "op", + "value": { + "type": "string" + } + }, + { + "key": "path", + "value": { + "type": "any" + } + } + ], + "type": "object" + }, + "type": "array" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "json.remove", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": [ + { + "dynamic": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + }, + "type": "array" + }, + { + "of": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "json.unmarshal", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "json.verify_schema", + "decl": { + "args": [ + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "any" + } + ], + "result": { + "static": [ + { + "type": "boolean" + }, + { + "of": [ + { + "type": "null" + }, + { + "type": "string" + } + ], + "type": "any" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "lower", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "lt", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "\u003c" + }, + { + "name": "lte", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "\u003c=" + }, + { + "name": "max", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "min", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "minus", + "decl": { + "args": [ + { + "of": [ + { + "type": "number" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + }, + { + "of": [ + { + "type": "number" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "of": [ + { + "type": "number" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + }, + "type": "function" + }, + "infix": "-" + }, + { + "name": "mul", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + }, + "infix": "*" + }, + { + "name": "neq", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "!=" + }, + { + "name": "net.cidr_contains", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "net.cidr_contains_matches", + "decl": { + "args": [ + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + }, + "type": "array" + }, + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + } + }, + "type": "object" + }, + { + "of": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + }, + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + }, + "type": "array" + }, + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + } + }, + "type": "object" + }, + { + "of": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "of": { + "static": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "type": "array" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "net.cidr_expand", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "of": { + "type": "string" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "net.cidr_intersects", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "net.cidr_is_valid", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "net.cidr_merge", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "of": [ + { + "type": "string" + } + ], + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "string" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "of": { + "type": "string" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "net.cidr_overlap", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "net.lookup_ip_addr", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "of": { + "type": "string" + }, + "type": "set" + }, + "type": "function" + }, + "nondeterministic": true + }, + { + "name": "numbers.range", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "dynamic": { + "type": "number" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "numbers.range_step", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "dynamic": { + "type": "number" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "object.filter", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "object.get", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "object.keys", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "result": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "object.remove", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "object.subset", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + }, + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "object.union", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "object.union_n", + "decl": { + "args": [ + { + "dynamic": { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "array" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "opa.runtime", + "decl": { + "result": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + }, + "nondeterministic": true + }, + { + "name": "or", + "decl": { + "args": [ + { + "of": { + "type": "any" + }, + "type": "set" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "result": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "function" + }, + "infix": "|" + }, + { + "name": "plus", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + }, + "infix": "+" + }, + { + "name": "print", + "decl": { + "type": "function", + "variadic": { + "type": "any" + } + } + }, + { + "name": "product", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "type": "number" + }, + "type": "array" + }, + { + "of": { + "type": "number" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "providers.aws.sign_req", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "type": "number" + } + ], + "result": { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "rand.intn", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + }, + "nondeterministic": true + }, + { + "name": "re_match", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "regex.find_all_string_submatch_n", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + }, + { + "type": "number" + } + ], + "result": { + "dynamic": { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "regex.find_n", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + }, + { + "type": "number" + } + ], + "result": { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "regex.globs_match", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "regex.is_valid", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "regex.match", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "regex.replace", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "regex.split", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "regex.template_match", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + }, + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "rego.metadata.chain", + "decl": { + "result": { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "rego.metadata.rule", + "decl": { + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "rego.parse_module", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "rem", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + }, + "infix": "%" + }, + { + "name": "replace", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "round", + "decl": { + "args": [ + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "semver.compare", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "semver.is_valid", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "set_diff", + "decl": { + "args": [ + { + "of": { + "type": "any" + }, + "type": "set" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "result": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "sort", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "split", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "sprintf", + "decl": { + "args": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "startswith", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "strings.any_prefix_match", + "decl": { + "args": [ + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + { + "of": { + "type": "string" + }, + "type": "set" + } + ], + "type": "any" + }, + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + { + "of": { + "type": "string" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "strings.any_suffix_match", + "decl": { + "args": [ + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + { + "of": { + "type": "string" + }, + "type": "set" + } + ], + "type": "any" + }, + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + { + "of": { + "type": "string" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "strings.count", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "strings.render_template", + "decl": { + "args": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "strings.replace_n", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "string" + } + }, + "type": "object" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "strings.reverse", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "substring", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "sum", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "type": "number" + }, + "type": "array" + }, + { + "of": { + "type": "number" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "time.add_date", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "time.clock", + "decl": { + "args": [ + { + "of": [ + { + "type": "number" + }, + { + "static": [ + { + "type": "number" + }, + { + "type": "string" + } + ], + "type": "array" + } + ], + "type": "any" + } + ], + "result": { + "static": [ + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "time.date", + "decl": { + "args": [ + { + "of": [ + { + "type": "number" + }, + { + "static": [ + { + "type": "number" + }, + { + "type": "string" + } + ], + "type": "array" + } + ], + "type": "any" + } + ], + "result": { + "static": [ + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "time.diff", + "decl": { + "args": [ + { + "of": [ + { + "type": "number" + }, + { + "static": [ + { + "type": "number" + }, + { + "type": "string" + } + ], + "type": "array" + } + ], + "type": "any" + }, + { + "of": [ + { + "type": "number" + }, + { + "static": [ + { + "type": "number" + }, + { + "type": "string" + } + ], + "type": "array" + } + ], + "type": "any" + } + ], + "result": { + "static": [ + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "time.format", + "decl": { + "args": [ + { + "of": [ + { + "type": "number" + }, + { + "static": [ + { + "type": "number" + }, + { + "type": "string" + } + ], + "type": "array" + }, + { + "static": [ + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "string" + } + ], + "type": "array" + } + ], + "type": "any" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "time.now_ns", + "decl": { + "result": { + "type": "number" + }, + "type": "function" + }, + "nondeterministic": true + }, + { + "name": "time.parse_duration_ns", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "time.parse_ns", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "time.parse_rfc3339_ns", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "time.weekday", + "decl": { + "args": [ + { + "of": [ + { + "type": "number" + }, + { + "static": [ + { + "type": "number" + }, + { + "type": "string" + } + ], + "type": "array" + } + ], + "type": "any" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "to_number", + "decl": { + "args": [ + { + "of": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + } + ], + "type": "any" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "trace", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "trim", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "trim_left", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "trim_prefix", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "trim_right", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "trim_space", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "trim_suffix", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "type_name", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "union", + "decl": { + "args": [ + { + "of": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "set" + } + ], + "result": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "units.parse", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "units.parse_bytes", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "upper", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "urlquery.decode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "urlquery.decode_object", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "dynamic": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "urlquery.encode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "urlquery.encode_object", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + { + "of": { + "type": "string" + }, + "type": "set" + } + ], + "type": "any" + } + }, + "type": "object" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "uuid.parse", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "uuid.rfc4122", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + }, + "nondeterministic": true + }, + { + "name": "walk", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "static": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "type": "any" + } + ], + "type": "array" + }, + "type": "function" + }, + "relation": true + }, + { + "name": "yaml.is_valid", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "yaml.marshal", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "yaml.unmarshal", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + } + ], + "future_keywords": [ + "contains", + "every", + "if", + "in" + ], + "wasm_abi_versions": [ + { + "version": 1, + "minor_version": 1 + }, + { + "version": 1, + "minor_version": 2 + } + ], + "features": [ + "rule_head_ref_string_prefixes", + "rule_head_refs", + "rego_v1", + "rego_v1_import" + ] +} diff --git a/v1/compile/compile_test.go b/v1/compile/compile_test.go index dc6bcb0fc7..3454ad310d 100644 --- a/v1/compile/compile_test.go +++ b/v1/compile/compile_test.go @@ -1663,7 +1663,7 @@ update { for _, useMemoryFS := range []bool{false, true} { test.WithTestFS(tc.files, useMemoryFS, func(root string, fsys fs.FS) { - caps := ast.CapabilitiesForThisVersion() + caps := ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV0)) caps.Features = []string{ ast.FeatureRefHeadStringPrefixes, ast.FeatureRefHeads, @@ -1891,6 +1891,9 @@ p if { ast.FeatureRefHeadStringPrefixes, ast.FeatureRefHeads, } + if tc.modulesRegoVersion == ast.RegoV1 { + capabilities.Features = append(capabilities.Features, ast.FeatureRegoV1) + } if tc.regoV1ImportCapable { capabilities.Features = append(capabilities.Features, ast.FeatureRegoV1Import) } diff --git a/v1/loader/loader.go b/v1/loader/loader.go index c20500970b..8daf22458b 100644 --- a/v1/loader/loader.go +++ b/v1/loader/loader.go @@ -762,6 +762,7 @@ func loadBundleFile(path string, bs []byte, m metrics.Metrics, opts ast.ParserOp tl := bundle.NewTarballLoaderWithBaseURL(bytes.NewBuffer(bs), path) br := bundle.NewCustomReader(tl). WithRegoVersion(opts.RegoVersion). + WithCapabilities(opts.Capabilities). WithJSONOptions(opts.JSONOptions). WithProcessAnnotations(opts.ProcessAnnotation). WithMetrics(m). diff --git a/v1/plugins/plugins_test.go b/v1/plugins/plugins_test.go index e878022a19..ae587f078d 100644 --- a/v1/plugins/plugins_test.go +++ b/v1/plugins/plugins_test.go @@ -18,6 +18,7 @@ import ( internal_tracing "github.com/open-policy-agent/opa/internal/distributedtracing" "github.com/open-policy-agent/opa/internal/file/archive" "github.com/open-policy-agent/opa/internal/storage/mock" + "github.com/open-policy-agent/opa/v1/ast" "github.com/open-policy-agent/opa/v1/bundle" "github.com/open-policy-agent/opa/v1/logging" "github.com/open-policy-agent/opa/v1/logging/test" @@ -192,116 +193,143 @@ func TestPluginStatusUpdateOnStartAndStop(t *testing.T) { } func TestManagerWithOPATelemetryUpdateLoop(t *testing.T) { - // test server - mux := http.NewServeMux() - ts := httptest.NewServer(mux) - - versions := []string{} - mux.HandleFunc("/v1/version", func(w http.ResponseWriter, req *http.Request) { - var data map[string]string - - body, err := io.ReadAll(req.Body) - if err != nil { - t.Fatal(err) - } - - err = json.Unmarshal(body, &data) - if err != nil { - t.Fatal(err) - } - - versions = append(versions, data["min_compatible_version"]) - - w.WriteHeader(http.StatusOK) - bs, _ := json.Marshal(map[string]string{"foo": "bar"}) // dummy data - w.Header().Set("Content-Type", "application/json") - _, _ = w.Write(bs) // ignore error - }) - defer ts.Close() - - t.Setenv("OPA_TELEMETRY_SERVICE_URL", ts.URL) - - ctx := context.Background() - - m, err := New([]byte{}, "test", inmem.New(), WithEnableTelemetry(true)) - if err != nil { - t.Fatalf("Unexpected error: %s", err) - } - - defaultUploadIntervalSec = int64(1) - - err = m.Start(context.Background()) - if err != nil { - t.Fatalf("Unexpected error: %s", err) - } - - // add a policy to the store to trigger a telemetry update (v0.36.0) - module := `package x - p := array.reverse([1,2,3])` - - err = storage.Txn(ctx, m.Store, storage.WriteParams, func(txn storage.Transaction) error { - return m.Store.UpsertPolicy(ctx, txn, "policy.rego", []byte(module)) - }) - if err != nil { - t.Fatalf("unexpected error: %v", err) + tests := []struct { + note string + regoVersion ast.RegoVersion + exp []string + }{ + { + note: "v0 manager", + regoVersion: ast.RegoV0, + exp: []string{"0.36.0", "0.46.0"}, + }, + { + note: "v1 manager", + regoVersion: ast.RegoV1, + exp: []string{"1.0.0", "1.0.0"}, + }, } - time.Sleep(2 * time.Second) + for _, tc := range tests { + t.Run(tc.note, func(t *testing.T) { + // test server + mux := http.NewServeMux() + ts := httptest.NewServer(mux) - // add data to the store and verify there is no trigger for a telemetry update - err = storage.Txn(ctx, m.Store, storage.WriteParams, func(txn storage.Transaction) error { - return m.Store.Write(ctx, txn, storage.AddOp, storage.MustParsePath("/a"), `[2,1,3]`) - }) - if err != nil { - t.Fatalf("unexpected error: %v", err) - } + versions := []string{} + mux.HandleFunc("/v1/version", func(w http.ResponseWriter, req *http.Request) { + var data map[string]string - // add a bundle with some policy to trigger a telemetry update (v0.46.0) - txn := storage.NewTransactionOrDie(ctx, m.Store, storage.WriteParams) - - var archiveFiles = map[string]string{ - "/a/b/c/data.json": "[1,2,3]", - "/policy.rego": "package foo\n import future.keywords.every", - "/roles/policy.rego": "package bar\n import future.keywords.if\n p.a.b.c.d if { true }", - } + body, err := io.ReadAll(req.Body) + if err != nil { + t.Fatal(err) + } - files := make([][2]string, 0, len(archiveFiles)) - for name, content := range archiveFiles { - files = append(files, [2]string{name, content}) - } + err = json.Unmarshal(body, &data) + if err != nil { + t.Fatal(err) + } - buf := archive.MustWriteTarGz(files) - b, err := bundle.NewReader(buf).WithLazyLoadingMode(true).Read() - if err != nil { - t.Fatal(err) - } + versions = append(versions, data["min_compatible_version"]) - iterator := bundle.NewIterator(b.Raw) + w.WriteHeader(http.StatusOK) + bs, _ := json.Marshal(map[string]string{"foo": "bar"}) // dummy data + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write(bs) // ignore error + }) + defer ts.Close() - params := storage.WriteParams - params.BasePaths = []string{""} + t.Setenv("OPA_TELEMETRY_SERVICE_URL", ts.URL) - err = m.Store.Truncate(ctx, txn, params, iterator) - if err != nil { - t.Fatalf("Unexpected truncate error: %v", err) - } + ctx := context.Background() - if err := m.Store.Commit(ctx, txn); err != nil { - t.Fatalf("Unexpected commit error: %v", err) - } + m, err := New([]byte{}, "test", inmem.New(), + WithEnableTelemetry(true), + WithParserOptions(ast.ParserOptions{RegoVersion: tc.regoVersion})) + if err != nil { + t.Fatalf("Unexpected error: %s", err) + } - time.Sleep(2 * time.Second) + defaultUploadIntervalSec = int64(1) - m.Stop(ctx) + err = m.Start(context.Background()) + if err != nil { + t.Fatalf("Unexpected error: %s", err) + } - exp := 2 - if len(versions) != exp { - t.Fatalf("Expected number of server calls: %+v but got: %+v", exp, len(versions)) - } + // add a policy to the store to trigger a telemetry update + // (v0.36.0 with v0 manager) + // (v1.0.0 with v1 manager) + module := `package x + p := array.reverse([1,2,3])` - expVers := []string{"0.36.0", "0.46.0"} - if !reflect.DeepEqual(expVers, versions) { - t.Fatalf("Expected OPA versions: %+v but got: %+v", expVers, versions) + err = storage.Txn(ctx, m.Store, storage.WriteParams, func(txn storage.Transaction) error { + return m.Store.UpsertPolicy(ctx, txn, "policy.rego", []byte(module)) + }) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + time.Sleep(2 * time.Second) + + // add data to the store and verify there is no trigger for a telemetry update + err = storage.Txn(ctx, m.Store, storage.WriteParams, func(txn storage.Transaction) error { + return m.Store.Write(ctx, txn, storage.AddOp, storage.MustParsePath("/a"), `[2,1,3]`) + }) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + // add a bundle with some policy to trigger a telemetry update + // (v0.46.0 with v0 manager) + // (v1.0.0 with v1 manager) + txn := storage.NewTransactionOrDie(ctx, m.Store, storage.WriteParams) + + var archiveFiles = map[string]string{ + ".manifest": `{"rego_version": 0}`, + "/a/b/c/data.json": "[1,2,3]", + "/policy.rego": "package foo\n import future.keywords.every", + "/roles/policy.rego": "package bar\n import future.keywords.if\n p.a.b.c.d if { true }", + } + + files := make([][2]string, 0, len(archiveFiles)) + for name, content := range archiveFiles { + files = append(files, [2]string{name, content}) + } + + buf := archive.MustWriteTarGz(files) + b, err := bundle.NewReader(buf).WithLazyLoadingMode(true).Read() + if err != nil { + t.Fatal(err) + } + + iterator := bundle.NewIterator(b.Raw) + + params := storage.WriteParams + params.BasePaths = []string{""} + + err = m.Store.Truncate(ctx, txn, params, iterator) + if err != nil { + t.Fatalf("Unexpected truncate error: %v", err) + } + + if err := m.Store.Commit(ctx, txn); err != nil { + t.Fatalf("Unexpected commit error: %v", err) + } + + time.Sleep(2 * time.Second) + + m.Stop(ctx) + + exp := 2 + if len(versions) != exp { + t.Fatalf("Expected number of server calls: %+v but got: %+v", exp, len(versions)) + } + + if !reflect.DeepEqual(tc.exp, versions) { + t.Fatalf("Expected OPA versions: %+v but got: %+v", tc.exp, versions) + } + }) } } diff --git a/v1/rego/rego.go b/v1/rego/rego.go index caa21dec56..9499a213ff 100644 --- a/v1/rego/rego.go +++ b/v1/rego/rego.go @@ -1913,6 +1913,7 @@ func (r *Rego) loadFiles(ctx context.Context, txn storage.Transaction, m metrics WithMetrics(m). WithProcessAnnotation(true). WithRegoVersion(r.regoVersion). + WithCapabilities(r.capabilities). Filtered(r.loadPaths.paths, r.loadPaths.filter) if err != nil { return err @@ -1944,6 +1945,7 @@ func (r *Rego) loadBundles(_ context.Context, _ storage.Transaction, m metrics.M WithProcessAnnotation(true). WithSkipBundleVerification(r.skipBundleVerification). WithRegoVersion(r.regoVersion). + WithCapabilities(r.capabilities). AsBundle(path) if err != nil { return fmt.Errorf("loading error: %s", err) @@ -2489,7 +2491,10 @@ func (r *Rego) partial(ctx context.Context, ectx *EvalContext) (*PartialQueries, } // If the target rego-version is v0, and the rego.v1 import is available, then we attempt to apply it to support modules. - if r.regoVersion == ast.RegoV0 && (r.capabilities == nil || r.capabilities.ContainsFeature(ast.FeatureRegoV1Import)) { + if r.regoVersion == ast.RegoV0 && + (r.capabilities == nil || + r.capabilities.ContainsFeature(ast.FeatureRegoV1Import) || + r.capabilities.ContainsFeature(ast.FeatureRegoV1)) { for i, mod := range support { // We can't apply the RegoV0CompatV1 version to the support module if it contains rules or vars that @@ -2501,7 +2506,7 @@ func (r *Rego) partial(ctx context.Context, ectx *EvalContext) (*PartialQueries, if name == "" && len(r.Head.Reference) > 0 { name = r.Head.Reference[0].Value.(ast.Var) } - if ast.IsFutureKeyword(name.String()) { + if ast.IsFutureKeywordForRegoVersion(name.String(), ast.RegoV0) { applyRegoVersion = false return true } @@ -2510,7 +2515,7 @@ func (r *Rego) partial(ctx context.Context, ectx *EvalContext) (*PartialQueries, if applyRegoVersion { ast.WalkVars(mod, func(v ast.Var) bool { - if ast.IsFutureKeyword(v.String()) { + if ast.IsFutureKeywordForRegoVersion(v.String(), ast.RegoV0) { applyRegoVersion = false return true } diff --git a/v1/rego/rego_test.go b/v1/rego/rego_test.go index 9bab2ed917..c798851b3f 100644 --- a/v1/rego/rego_test.go +++ b/v1/rego/rego_test.go @@ -137,6 +137,213 @@ p contains x if { } } +func TestRegoEval_Capabilities(t *testing.T) { + tests := []struct { + note string + regoVersion ast.RegoVersion + capabilities *ast.Capabilities + module string + expResult interface{} + expErrs []string + }{ + { + note: "v0 module, rego-v0, no capabilities", + regoVersion: ast.RegoV0, + module: `package test + +p[x] { + x = ["a", "b", "c"][_] +}`, + expResult: []string{"a", "b", "c"}, + }, + { + note: "v0 module, rego-v0, v0 capabilities", + regoVersion: ast.RegoV0, + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV0)), + module: `package test + +p[x] { + x = ["a", "b", "c"][_] +}`, + expResult: []string{"a", "b", "c"}, + }, + { + note: "v0 module, rego-v0, v1 capabilities", + regoVersion: ast.RegoV0, + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV1)), + module: `package test + +p[x] { + x = ["a", "b", "c"][_] +}`, + expResult: []string{"a", "b", "c"}, + }, + + { + note: "v0 module, rego-v1, no capabilities", + regoVersion: ast.RegoV1, + module: `package test + +p[x] { + x = ["a", "b", "c"][_] +}`, + expErrs: []string{ + "test.rego:3: rego_parse_error: `if` keyword is required before rule body", + "test.rego:3: rego_parse_error: `contains` keyword is required for partial set rules", + }, + }, + { + note: "v0 module, rego-v1, v0 capabilities", + regoVersion: ast.RegoV1, + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV0)), + module: `package test + +p[x] { + x = ["a", "b", "c"][_] +}`, + expErrs: []string{ + "rego_parse_error: illegal capabilities: rego_v1 feature required for parsing v1 Rego", + }, + }, + { + note: "v0 module, rego-v1, v1 capabilities", + regoVersion: ast.RegoV1, + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV1)), + module: `package test + +p[x] { + x = ["a", "b", "c"][_] +}`, + expErrs: []string{ + "test.rego:3: rego_parse_error: `if` keyword is required before rule body", + "test.rego:3: rego_parse_error: `contains` keyword is required for partial set rules", + }, + }, + + { + note: "v1 module, rego-v0, no capabilities", + regoVersion: ast.RegoV0, + module: `package test + +p contains x if { + some x in ["a", "b", "c"] +}`, + expErrs: []string{ + "test.rego:4: rego_parse_error: unexpected identifier token", + }, + }, + { + note: "v1 module, rego-v0, v0 capabilities", + regoVersion: ast.RegoV0, + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV0)), + module: `package test + +p contains x if { + some x in ["a", "b", "c"] +}`, + expErrs: []string{ + "test.rego:4: rego_parse_error: unexpected identifier token", + }, + }, + { + note: "v1 module, rego-v0, v1 capabilities", + regoVersion: ast.RegoV0, + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV1)), + module: `package test + +p contains x if { + some x in ["a", "b", "c"] +}`, + expErrs: []string{ + "test.rego:4: rego_parse_error: unexpected identifier token", + }, + }, + + { + note: "v1 module, rego-v1, no capabilities", + regoVersion: ast.RegoV1, + module: `package test + +p contains x if { + some x in ["a", "b", "c"] +}`, + expResult: []string{"a", "b", "c"}, + }, + { + note: "v1 module, rego-v1, v0 capabilities", + regoVersion: ast.RegoV1, + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV0)), + module: `package test + +p contains x if { + some x in ["a", "b", "c"] +}`, + expErrs: []string{ + "rego_parse_error: illegal capabilities: rego_v1 feature required for parsing v1 Rego", + }, + }, + { + note: "v1 module, rego-v1, v1 capabilities", + regoVersion: ast.RegoV1, + capabilities: ast.CapabilitiesForThisVersion(ast.CapabilitiesRegoVersion(ast.RegoV1)), + module: `package test + +p contains x if { + some x in ["a", "b", "c"] +}`, + expResult: []string{"a", "b", "c"}, + }, + } + + for _, tc := range tests { + t.Run(tc.note, func(t *testing.T) { + files := map[string]string{ + "test.rego": tc.module, + } + + test.WithTempFS(files, func(root string) { + ctx := context.Background() + + pq, err := New( + SetRegoVersion(tc.regoVersion), + Capabilities(tc.capabilities), + Load([]string{root}, nil), + Query("data.test.p"), + ).PrepareForEval(ctx) + + if tc.expErrs != nil { + if err == nil { + t.Fatalf("Expected error but got nil") + } + + for _, expErr := range tc.expErrs { + if !strings.Contains(err.Error(), expErr) { + t.Fatalf("Expected error to contain:\n\n%q\n\nbut got:\n\n%v", expErr, err) + } + } + } else { + if err != nil { + t.Fatalf("Unexpected error: %v", err) + } + + rs, err := pq.Eval(ctx) + if err != nil { + t.Fatalf("Unexpected error: %v", err) + } + + if len(rs) != 1 { + t.Fatalf("Expected exactly one result but got:\n\n%v", rs) + } + + if reflect.DeepEqual(rs[0].Expressions[0].Value, tc.expResult) { + t.Fatalf("Expected %v but got: %v", tc.expResult, rs[0].Expressions[0].Value) + } + } + }) + }) + } +} + func assertEval(t *testing.T, r *Rego, expected string) { t.Helper() rs, err := r.Eval(context.Background()) diff --git a/v1/repl/repl.go b/v1/repl/repl.go index f4d8274b2d..4147a86281 100644 --- a/v1/repl/repl.go +++ b/v1/repl/repl.go @@ -929,12 +929,7 @@ func (r *REPL) parserOptions() (ast.ParserOptions, error) { if err == nil { for _, i := range r.modules[r.currentModuleID].Imports { if ast.Compare(i.Path.Value, ast.RegoV1CompatibleRef) == 0 { - opts.RegoVersion = ast.RegoV0CompatV1 - - // ast.RegoV0CompatV1 sets parsing requirements, but doesn't imply allowed future keywords - if r.capabilities != nil { - opts.FutureKeywords = r.capabilities.FutureKeywords - } + opts.RegoVersion = ast.RegoV1 } } } diff --git a/v1/tester/runner.go b/v1/tester/runner.go index 5df0a2d5ca..4522ffceff 100644 --- a/v1/tester/runner.go +++ b/v1/tester/runner.go @@ -622,6 +622,10 @@ func Load(args []string, filter loader.Filter) (map[string]*ast.Module, storage. // LoadWithRegoVersion returns modules and an in-memory store for running tests. // Modules are parsed in accordance with the given RegoVersion. func LoadWithRegoVersion(args []string, filter loader.Filter, regoVersion ast.RegoVersion) (map[string]*ast.Module, storage.Store, error) { + if regoVersion == ast.RegoUndefined { + regoVersion = ast.DefaultRegoVersion + } + loaded, err := loader.NewFileLoader(). WithRegoVersion(regoVersion). WithProcessAnnotation(true). @@ -649,6 +653,38 @@ func LoadWithRegoVersion(args []string, filter loader.Filter, regoVersion ast.Re return modules, store, err } +// LoadWithParserOptions returns modules and an in-memory store for running tests. +// Modules are parsed in accordance with the given [ast.ParserOptions]. +func LoadWithParserOptions(args []string, filter loader.Filter, popts ast.ParserOptions) (map[string]*ast.Module, storage.Store, error) { + loaded, err := loader.NewFileLoader(). + WithRegoVersion(popts.RegoVersion). + WithCapabilities(popts.Capabilities). + WithProcessAnnotation(popts.ProcessAnnotation). + WithJSONOptions(popts.JSONOptions). + Filtered(args, filter) + if err != nil { + return nil, nil, err + } + store := inmem.NewFromObject(loaded.Documents) + modules := map[string]*ast.Module{} + ctx := context.Background() + err = storage.Txn(ctx, store, storage.WriteParams, func(txn storage.Transaction) error { + for _, loadedModule := range loaded.Modules { + modules[loadedModule.Name] = loadedModule.Parsed + + // Add the policies to the store to ensure that any future bundle + // activations will preserve them and re-compile the module with + // the bundle modules. + err := store.UpsertPolicy(ctx, txn, loadedModule.Name, loadedModule.Raw) + if err != nil { + return err + } + } + return nil + }) + return modules, store, err +} + // LoadBundles will load the given args as bundles, either tarball or directory is OK. func LoadBundles(args []string, filter loader.Filter) (map[string]*bundle.Bundle, error) { return LoadBundlesWithRegoVersion(args, filter, ast.RegoV0) @@ -677,3 +713,29 @@ func LoadBundlesWithRegoVersion(args []string, filter loader.Filter, regoVersion return bundles, nil } + +// LoadBundlesWithParserOptions will load the given args as bundles, either tarball or directory is OK. +// Bundles are parsed in accordance with the given [ast.ParserOptions]. +func LoadBundlesWithParserOptions(args []string, filter loader.Filter, popts ast.ParserOptions) (map[string]*bundle.Bundle, error) { + if popts.RegoVersion == ast.RegoUndefined { + popts.RegoVersion = ast.DefaultRegoVersion + } + + bundles := map[string]*bundle.Bundle{} + for _, bundleDir := range args { + b, err := loader.NewFileLoader(). + WithRegoVersion(popts.RegoVersion). + WithCapabilities(popts.Capabilities). + WithProcessAnnotation(popts.ProcessAnnotation). + WithJSONOptions(popts.JSONOptions). + WithSkipBundleVerification(true). + WithFilter(filter). + AsBundle(bundleDir) + if err != nil { + return nil, fmt.Errorf("unable to load bundle %s: %s", bundleDir, err) + } + bundles[bundleDir] = b + } + + return bundles, nil +}