mirror of
https://github.com/opencloud-eu/opencloud.git
synced 2026-01-21 20:39:58 -05:00
Compare commits
5 Commits
dependabot
...
showTraceL
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2c75042f52 | ||
|
|
d49123b36b | ||
|
|
b40c3f068c | ||
|
|
8d3b2e3eeb | ||
|
|
e8b1834706 |
@@ -201,13 +201,6 @@ config = {
|
||||
],
|
||||
"skip": False,
|
||||
},
|
||||
"accountsHashDifficulty": {
|
||||
"skip": False,
|
||||
"suites": [
|
||||
"apiAccountsHashDifficulty",
|
||||
],
|
||||
"accounts_hash_difficulty": "default",
|
||||
},
|
||||
"notification": {
|
||||
"suites": [
|
||||
"apiNotification",
|
||||
@@ -234,7 +227,6 @@ config = {
|
||||
],
|
||||
"skip": False,
|
||||
"antivirusNeeded": True,
|
||||
"generateVirusFiles": True,
|
||||
"extraServerEnvironment": {
|
||||
"ANTIVIRUS_SCANNER_TYPE": "clamav",
|
||||
"ANTIVIRUS_CLAMAV_SOCKET": "tcp://clamav:3310",
|
||||
@@ -301,7 +293,6 @@ config = {
|
||||
"skip": False,
|
||||
"withRemotePhp": [True],
|
||||
"antivirusNeeded": True,
|
||||
"generateVirusFiles": True,
|
||||
"extraServerEnvironment": {
|
||||
"ANTIVIRUS_SCANNER_TYPE": "clamav",
|
||||
"ANTIVIRUS_CLAMAV_SOCKET": "tcp://clamav:3310",
|
||||
@@ -443,6 +434,7 @@ MINIO_MC_ENV = {
|
||||
"from_secret": "cache_s3_secret_key",
|
||||
},
|
||||
"PUBLIC_BUCKET": "public",
|
||||
"MC_PUBLIC_HOST": "https://s3.ci.opencloud.eu",
|
||||
}
|
||||
|
||||
CI_HTTP_PROXY_ENV = {
|
||||
@@ -667,10 +659,10 @@ def testPipelines(ctx):
|
||||
storage = "decomposed"
|
||||
|
||||
if "skip" not in config["cs3ApiTests"] or not config["cs3ApiTests"]["skip"]:
|
||||
pipelines += cs3ApiTests(ctx, storage, "default")
|
||||
pipelines += cs3ApiTests(ctx, storage)
|
||||
if "skip" not in config["wopiValidatorTests"] or not config["wopiValidatorTests"]["skip"]:
|
||||
pipelines += wopiValidatorTests(ctx, storage, "builtin", "default")
|
||||
pipelines += wopiValidatorTests(ctx, storage, "cs3", "default")
|
||||
pipelines += wopiValidatorTests(ctx, storage, "builtin")
|
||||
pipelines += wopiValidatorTests(ctx, storage, "cs3")
|
||||
|
||||
pipelines += localApiTestPipeline(ctx)
|
||||
pipelines += coreApiTestPipeline(ctx)
|
||||
@@ -1059,12 +1051,12 @@ def codestyle(ctx):
|
||||
|
||||
return pipelines
|
||||
|
||||
def cs3ApiTests(ctx, storage, accounts_hash_difficulty = 4):
|
||||
def cs3ApiTests(ctx, storage):
|
||||
pipeline = {
|
||||
"name": "test-cs3-API-%s" % storage,
|
||||
"steps": evaluateWorkflowStep() +
|
||||
restoreBuildArtifactCache(ctx, dirs["opencloudBinArtifact"], dirs["opencloudBinPath"]) +
|
||||
opencloudServer(storage, accounts_hash_difficulty, deploy_type = "cs3api_validator") +
|
||||
opencloudServer(storage, deploy_type = "cs3api_validator") +
|
||||
[
|
||||
{
|
||||
"name": "cs3ApiTests",
|
||||
@@ -1095,7 +1087,7 @@ def cs3ApiTests(ctx, storage, accounts_hash_difficulty = 4):
|
||||
])
|
||||
return [pipeline]
|
||||
|
||||
def wopiValidatorTests(ctx, storage, wopiServerType, accounts_hash_difficulty = 4):
|
||||
def wopiValidatorTests(ctx, storage, wopiServerType):
|
||||
testgroups = [
|
||||
"BaseWopiViewing",
|
||||
"CheckFileInfoSchema",
|
||||
@@ -1173,7 +1165,7 @@ def wopiValidatorTests(ctx, storage, wopiServerType, accounts_hash_difficulty =
|
||||
"steps": evaluateWorkflowStep() +
|
||||
restoreBuildArtifactCache(ctx, dirs["opencloudBinArtifact"], dirs["opencloudBinPath"]) +
|
||||
waitForServices("fake-office", ["fakeoffice:8080"]) +
|
||||
opencloudServer(storage, accounts_hash_difficulty, deploy_type = "wopi_validator", extra_server_environment = extra_server_environment) +
|
||||
opencloudServer(storage, deploy_type = "wopi_validator", extra_server_environment = extra_server_environment) +
|
||||
wopiServer +
|
||||
waitForServices("wopi-fakeoffice", ["wopi-fakeoffice:9300"]) +
|
||||
[
|
||||
@@ -1218,29 +1210,21 @@ def wopiValidatorTests(ctx, storage, wopiServerType, accounts_hash_difficulty =
|
||||
def localApiTestPipeline(ctx):
|
||||
pipelines = []
|
||||
|
||||
with_remote_php = [True]
|
||||
enable_watch_fs = [False]
|
||||
if ctx.build.event == "cron":
|
||||
with_remote_php.append(False)
|
||||
enable_watch_fs.append(True)
|
||||
|
||||
defaults = {
|
||||
"suites": {},
|
||||
"skip": False,
|
||||
"extraTestEnvironment": {},
|
||||
"extraServerEnvironment": {},
|
||||
"storages": ["posix"],
|
||||
"accounts_hash_difficulty": 4,
|
||||
"emailNeeded": False,
|
||||
"antivirusNeeded": False,
|
||||
"tikaNeeded": False,
|
||||
"federationServer": False,
|
||||
"collaborationServiceNeeded": False,
|
||||
"extraCollaborationEnvironment": {},
|
||||
"withRemotePhp": with_remote_php,
|
||||
"enableWatchFs": enable_watch_fs,
|
||||
"withRemotePhp": [True],
|
||||
"enableWatchFs": [False],
|
||||
"ldapNeeded": False,
|
||||
"generateVirusFiles": False,
|
||||
}
|
||||
|
||||
if "localApiTests" in config:
|
||||
@@ -1255,6 +1239,14 @@ def localApiTestPipeline(ctx):
|
||||
if "[decomposed]" in ctx.build.title.lower() or name.startswith("cli"):
|
||||
params["storages"] = ["decomposed"]
|
||||
|
||||
if ctx.build.event == "cron":
|
||||
params["withRemotePhp"] = [True, False]
|
||||
params["enableWatchFs"] = [True, False]
|
||||
|
||||
# override withRemotePhp if specified in the suite config
|
||||
if "withRemotePhp" in matrix:
|
||||
params["withRemotePhp"] = matrix["withRemotePhp"]
|
||||
|
||||
for storage in params["storages"]:
|
||||
for run_with_remote_php in params["withRemotePhp"]:
|
||||
for run_with_watch_fs_enabled in params["enableWatchFs"]:
|
||||
@@ -1279,16 +1271,15 @@ def localApiTestPipeline(ctx):
|
||||
(waitForLdapService() if params["ldapNeeded"] else []) +
|
||||
opencloudServer(
|
||||
storage,
|
||||
params["accounts_hash_difficulty"],
|
||||
extra_server_environment = params["extraServerEnvironment"],
|
||||
with_wrapper = True,
|
||||
tika_enabled = params["tikaNeeded"],
|
||||
watch_fs_enabled = run_with_watch_fs_enabled,
|
||||
) +
|
||||
(opencloudServer(storage, params["accounts_hash_difficulty"], deploy_type = "federation", extra_server_environment = params["extraServerEnvironment"], watch_fs_enabled = run_with_watch_fs_enabled) if params["federationServer"] else []) +
|
||||
(opencloudServer(storage, deploy_type = "federation", extra_server_environment = params["extraServerEnvironment"], watch_fs_enabled = run_with_watch_fs_enabled) if params["federationServer"] else []) +
|
||||
((wopiCollaborationService("fakeoffice") + wopiCollaborationService("collabora") + wopiCollaborationService("onlyoffice")) if params["collaborationServiceNeeded"] else []) +
|
||||
(openCloudHealthCheck("wopi", ["wopi-collabora:9304", "wopi-onlyoffice:9304", "wopi-fakeoffice:9304"]) if params["collaborationServiceNeeded"] else []) +
|
||||
localApiTest(params["suites"], storage, params["extraTestEnvironment"], run_with_remote_php, params["generateVirusFiles"]) +
|
||||
localApiTest(params["suites"], storage, params["extraTestEnvironment"], run_with_remote_php) +
|
||||
logRequests(),
|
||||
"services": (emailService() if params["emailNeeded"] else []) +
|
||||
(clamavService() if params["antivirusNeeded"] else []) +
|
||||
@@ -1312,7 +1303,7 @@ def localApiTestPipeline(ctx):
|
||||
pipelines.append(pipeline)
|
||||
return pipelines
|
||||
|
||||
def localApiTest(suites, storage = "decomposed", extra_environment = {}, with_remote_php = False, generate_virus_files = False):
|
||||
def localApiTest(suites, storage = "decomposed", extra_environment = {}, with_remote_php = False):
|
||||
test_dir = "%s/tests/acceptance" % dirs["base"]
|
||||
expected_failures_file = "%s/expected-failures-%s-storage.md" % (test_dir, storage)
|
||||
|
||||
@@ -1337,11 +1328,6 @@ def localApiTest(suites, storage = "decomposed", extra_environment = {}, with_re
|
||||
|
||||
commands = []
|
||||
|
||||
# Generate EICAR virus test files if needed
|
||||
if generate_virus_files:
|
||||
commands.append("chmod +x %s/tests/acceptance/scripts/generate-virus-files.sh" % dirs["base"])
|
||||
commands.append("bash %s/tests/acceptance/scripts/generate-virus-files.sh" % dirs["base"])
|
||||
|
||||
# Merge expected failures
|
||||
if not with_remote_php:
|
||||
commands.append("cat %s/expected-failures-without-remotephp.md >> %s" % (test_dir, expected_failures_file))
|
||||
@@ -1364,7 +1350,6 @@ def coreApiTestPipeline(ctx):
|
||||
"numberOfParts": 7,
|
||||
"skipExceptParts": [],
|
||||
"skip": False,
|
||||
"accounts_hash_difficulty": 4,
|
||||
}
|
||||
|
||||
pipelines = []
|
||||
@@ -1385,6 +1370,10 @@ def coreApiTestPipeline(ctx):
|
||||
params["withRemotePhp"] = [True, False]
|
||||
params["enableWatchFs"] = [True, False]
|
||||
|
||||
# override withRemotePhp if specified in the suite config
|
||||
if "withRemotePhp" in matrix:
|
||||
params["withRemotePhp"] = matrix["withRemotePhp"]
|
||||
|
||||
debugParts = params["skipExceptParts"]
|
||||
debugPartsEnabled = (len(debugParts) != 0)
|
||||
|
||||
@@ -1406,7 +1395,6 @@ def coreApiTestPipeline(ctx):
|
||||
restoreBuildArtifactCache(ctx, dirs["opencloudBinArtifact"], dirs["opencloudBinPath"]) +
|
||||
opencloudServer(
|
||||
storage,
|
||||
params["accounts_hash_difficulty"],
|
||||
with_wrapper = True,
|
||||
watch_fs_enabled = run_with_watch_fs_enabled,
|
||||
) +
|
||||
@@ -1771,7 +1759,7 @@ def uploadTracingResult(ctx):
|
||||
"mc cp -a %s/reports/e2e/playwright/tracing/* s3/$PUBLIC_BUCKET/web/tracing/$CI_REPO_NAME/$CI_PIPELINE_NUMBER/" % dirs["web"],
|
||||
"cd %s/reports/e2e/playwright/tracing/" % dirs["web"],
|
||||
'echo "To see the trace, please open the following link in the console"',
|
||||
'for f in *.zip; do echo "npx playwright show-trace $MC_HOST/$PUBLIC_BUCKET/web/tracing/$CI_REPO_NAME/$CI_PIPELINE_NUMBER/$f \n"; done',
|
||||
'for f in *.zip; do echo "npx playwright show-trace $MC_PUBLIC_HOST/$PUBLIC_BUCKET/web/tracing/$CI_REPO_NAME/$CI_PIPELINE_NUMBER/$f \n"; done',
|
||||
],
|
||||
"when": {
|
||||
"status": status,
|
||||
@@ -2324,7 +2312,7 @@ def notifyMatrix(ctx):
|
||||
|
||||
return result
|
||||
|
||||
def opencloudServer(storage = "decomposed", accounts_hash_difficulty = 4, depends_on = [], deploy_type = "", extra_server_environment = {}, with_wrapper = False, tika_enabled = False, watch_fs_enabled = False):
|
||||
def opencloudServer(storage = "decomposed", depends_on = [], deploy_type = "", extra_server_environment = {}, with_wrapper = False, tika_enabled = False, watch_fs_enabled = False):
|
||||
user = "0:0"
|
||||
container_name = OC_SERVER_NAME
|
||||
environment = {
|
||||
@@ -2420,13 +2408,6 @@ def opencloudServer(storage = "decomposed", accounts_hash_difficulty = 4, depend
|
||||
if watch_fs_enabled:
|
||||
environment["STORAGE_USERS_POSIX_WATCH_FS"] = True
|
||||
|
||||
# Pass in "default" accounts_hash_difficulty to not set this environment variable.
|
||||
# That will allow OpenCloud to use whatever its built-in default is.
|
||||
# Otherwise pass in a value from 4 to about 11 or 12 (default 4, for making regular tests fast)
|
||||
# The high values cause lots of CPU to be used when hashing passwords, and really slow down the tests.
|
||||
if accounts_hash_difficulty != "default":
|
||||
environment["ACCOUNTS_HASH_DIFFICULTY"] = accounts_hash_difficulty
|
||||
|
||||
for item in extra_server_environment:
|
||||
environment[item] = extra_server_environment[item]
|
||||
|
||||
|
||||
8
go.mod
8
go.mod
@@ -57,11 +57,11 @@ require (
|
||||
github.com/nats-io/nats-server/v2 v2.12.3
|
||||
github.com/nats-io/nats.go v1.48.0
|
||||
github.com/oklog/run v1.2.0
|
||||
github.com/olekukonko/tablewriter v1.1.3
|
||||
github.com/olekukonko/tablewriter v1.1.2
|
||||
github.com/onsi/ginkgo v1.16.5
|
||||
github.com/onsi/ginkgo/v2 v2.27.5
|
||||
github.com/onsi/gomega v1.39.0
|
||||
github.com/open-policy-agent/opa v1.11.1
|
||||
github.com/open-policy-agent/opa v1.12.3
|
||||
github.com/opencloud-eu/icap-client v0.0.0-20250930132611-28a2afe62d89
|
||||
github.com/opencloud-eu/libre-graph-api-go v1.0.8-0.20250724122329-41ba6b191e76
|
||||
github.com/opencloud-eu/reva/v2 v2.41.1-0.20260107152322-93760b632993
|
||||
@@ -165,7 +165,7 @@ require (
|
||||
github.com/ceph/go-ceph v0.37.0 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||
github.com/cevaris/ordered_map v0.0.0-20190319150403-3adeae072e73 // indirect
|
||||
github.com/clipperhouse/displaywidth v0.6.2 // indirect
|
||||
github.com/clipperhouse/displaywidth v0.6.0 // indirect
|
||||
github.com/clipperhouse/stringish v0.1.1 // indirect
|
||||
github.com/clipperhouse/uax29/v2 v2.3.0 // indirect
|
||||
github.com/cloudflare/circl v1.6.1 // indirect
|
||||
@@ -312,7 +312,7 @@ require (
|
||||
github.com/nxadm/tail v1.4.8 // indirect
|
||||
github.com/olekukonko/cat v0.0.0-20250911104152-50322a0618f6 // indirect
|
||||
github.com/olekukonko/errors v1.1.0 // indirect
|
||||
github.com/olekukonko/ll v0.1.4-0.20260115111900-9e59c2286df0 // indirect
|
||||
github.com/olekukonko/ll v0.1.3 // indirect
|
||||
github.com/opencontainers/go-digest v1.0.0 // indirect
|
||||
github.com/opencontainers/image-spec v1.1.1 // indirect
|
||||
github.com/opentracing/opentracing-go v1.2.0 // indirect
|
||||
|
||||
16
go.sum
16
go.sum
@@ -223,8 +223,8 @@ github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWR
|
||||
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
|
||||
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
|
||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||
github.com/clipperhouse/displaywidth v0.6.2 h1:ZDpTkFfpHOKte4RG5O/BOyf3ysnvFswpyYrV7z2uAKo=
|
||||
github.com/clipperhouse/displaywidth v0.6.2/go.mod h1:R+kHuzaYWFkTm7xoMmK1lFydbci4X2CicfbGstSGg0o=
|
||||
github.com/clipperhouse/displaywidth v0.6.0 h1:k32vueaksef9WIKCNcoqRNyKbyvkvkysNYnAWz2fN4s=
|
||||
github.com/clipperhouse/displaywidth v0.6.0/go.mod h1:R+kHuzaYWFkTm7xoMmK1lFydbci4X2CicfbGstSGg0o=
|
||||
github.com/clipperhouse/stringish v0.1.1 h1:+NSqMOr3GR6k1FdRhhnXrLfztGzuG+VuFDfatpWHKCs=
|
||||
github.com/clipperhouse/stringish v0.1.1/go.mod h1:v/WhFtE1q0ovMta2+m+UbpZ+2/HEXNWYXQgCt4hdOzA=
|
||||
github.com/clipperhouse/uax29/v2 v2.3.0 h1:SNdx9DVUqMoBuBoW3iLOj4FQv3dN5mDtuqwuhIGpJy4=
|
||||
@@ -940,11 +940,11 @@ github.com/olekukonko/cat v0.0.0-20250911104152-50322a0618f6 h1:zrbMGy9YXpIeTnGj
|
||||
github.com/olekukonko/cat v0.0.0-20250911104152-50322a0618f6/go.mod h1:rEKTHC9roVVicUIfZK7DYrdIoM0EOr8mK1Hj5s3JjH0=
|
||||
github.com/olekukonko/errors v1.1.0 h1:RNuGIh15QdDenh+hNvKrJkmxxjV4hcS50Db478Ou5sM=
|
||||
github.com/olekukonko/errors v1.1.0/go.mod h1:ppzxA5jBKcO1vIpCXQ9ZqgDh8iwODz6OXIGKU8r5m4Y=
|
||||
github.com/olekukonko/ll v0.1.4-0.20260115111900-9e59c2286df0 h1:jrYnow5+hy3WRDCBypUFvVKNSPPCdqgSXIE9eJDD8LM=
|
||||
github.com/olekukonko/ll v0.1.4-0.20260115111900-9e59c2286df0/go.mod h1:b52bVQRRPObe+yyBl0TxNfhesL0nedD4Cht0/zx55Ew=
|
||||
github.com/olekukonko/ll v0.1.3 h1:sV2jrhQGq5B3W0nENUISCR6azIPf7UBUpVq0x/y70Fg=
|
||||
github.com/olekukonko/ll v0.1.3/go.mod h1:b52bVQRRPObe+yyBl0TxNfhesL0nedD4Cht0/zx55Ew=
|
||||
github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY=
|
||||
github.com/olekukonko/tablewriter v1.1.3 h1:VSHhghXxrP0JHl+0NnKid7WoEmd9/urKRJLysb70nnA=
|
||||
github.com/olekukonko/tablewriter v1.1.3/go.mod h1:9VU0knjhmMkXjnMKrZ3+L2JhhtsQ/L38BbL3CRNE8tM=
|
||||
github.com/olekukonko/tablewriter v1.1.2 h1:L2kI1Y5tZBct/O/TyZK1zIE9GlBj/TVs+AY5tZDCDSc=
|
||||
github.com/olekukonko/tablewriter v1.1.2/go.mod h1:z7SYPugVqGVavWoA2sGsFIoOVNmEHxUAAMrhXONtfkg=
|
||||
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||
github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||
github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk=
|
||||
@@ -957,8 +957,8 @@ github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7J
|
||||
github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo=
|
||||
github.com/onsi/gomega v1.39.0 h1:y2ROC3hKFmQZJNFeGAMeHZKkjBL65mIZcvrLQBF9k6Q=
|
||||
github.com/onsi/gomega v1.39.0/go.mod h1:ZCU1pkQcXDO5Sl9/VVEGlDyp+zm0m1cmeG5TOzLgdh4=
|
||||
github.com/open-policy-agent/opa v1.11.1 h1:4bMlG6DjRZTRAswRyF+KUCgxHu1Gsk0h9EbZ4W9REvM=
|
||||
github.com/open-policy-agent/opa v1.11.1/go.mod h1:QimuJO4T3KYxWzrmAymqlFvsIanCjKrGjmmC8GgAdgE=
|
||||
github.com/open-policy-agent/opa v1.12.3 h1:qe3m/w52baKC/HJtippw+hYBUKCzuBCPjB+D5P9knfc=
|
||||
github.com/open-policy-agent/opa v1.12.3/go.mod h1:RnDgm04GA1RjEXJvrsG9uNT/+FyBNmozcPvA2qz60M4=
|
||||
github.com/opencloud-eu/go-micro-plugins/v4/store/nats-js-kv v0.0.0-20250512152754-23325793059a h1:Sakl76blJAaM6NxylVkgSzktjo2dS504iDotEFJsh3M=
|
||||
github.com/opencloud-eu/go-micro-plugins/v4/store/nats-js-kv v0.0.0-20250512152754-23325793059a/go.mod h1:pjcozWijkNPbEtX5SIQaxEW/h8VAVZYTLx+70bmB3LY=
|
||||
github.com/opencloud-eu/icap-client v0.0.0-20250930132611-28a2afe62d89 h1:W1ms+lP5lUUIzjRGDg93WrQfZJZCaV1ZP3KeyXi8bzY=
|
||||
|
||||
@@ -466,15 +466,6 @@ For antivirus running localy on Linux OS, use `ANTIVIRUS_CLAMAV_SOCKET= "/var/ru
|
||||
For antivirus running localy on Mac OS, use `ANTIVIRUS_CLAMAV_SOCKET= "/tmp/clamd.sock"`.
|
||||
For antivirus running with docker, use `ANTIVIRUS_CLAMAV_SOCKET= "tcp://host.docker.internal:3310"`
|
||||
|
||||
#### Create Virus Files
|
||||
|
||||
The antivirus tests require EICAR test files which are not stored in the repository
|
||||
They are generated dynamically when needed for testing.
|
||||
|
||||
```bash
|
||||
tests/acceptance/scripts/generate-virus-files.sh
|
||||
```
|
||||
|
||||
#### Run the Acceptance Test
|
||||
|
||||
Run the acceptance test with the following command:
|
||||
|
||||
@@ -62,7 +62,7 @@ class HttpRequestHelper {
|
||||
|
||||
/**
|
||||
*
|
||||
* @param string|null $url
|
||||
* @param string $url
|
||||
* @param string|null $xRequestId
|
||||
* @param string|null $method
|
||||
* @param string|null $user
|
||||
@@ -80,8 +80,8 @@ class HttpRequestHelper {
|
||||
* @throws GuzzleException
|
||||
*/
|
||||
public static function sendRequestOnce(
|
||||
?string $url,
|
||||
?string $xRequestId,
|
||||
string $url,
|
||||
?string $xRequestId = null,
|
||||
?string $method = 'GET',
|
||||
?string $user = null,
|
||||
?string $password = null,
|
||||
|
||||
@@ -195,13 +195,20 @@ class UploadHelper extends Assert {
|
||||
}
|
||||
|
||||
/**
|
||||
* get the path of a file from FilesForUpload directory
|
||||
*
|
||||
* @param string|null $name name of the file to upload
|
||||
* get the path of the acceptance tests directory
|
||||
*
|
||||
* @return string
|
||||
*/
|
||||
public static function getUploadFilesDir(?string $name): string {
|
||||
return \getenv("FILES_FOR_UPLOAD") . $name;
|
||||
public static function getAcceptanceTestsDir(): string {
|
||||
return \dirname(__FILE__) . "/../";
|
||||
}
|
||||
|
||||
/**
|
||||
* get the path of the filesForUpload directory
|
||||
*
|
||||
* @return string
|
||||
*/
|
||||
public static function getFilesForUploadDir(): string {
|
||||
return \dirname(__FILE__) . "/../filesForUpload/";
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,6 +25,7 @@ use PHPUnit\Framework\Assert;
|
||||
use Psr\Http\Message\ResponseInterface;
|
||||
use TestHelpers\WebDavHelper;
|
||||
use TestHelpers\BehatHelper;
|
||||
use TestHelpers\UploadHelper;
|
||||
|
||||
require_once 'bootstrap.php';
|
||||
|
||||
@@ -49,7 +50,7 @@ class ChecksumContext implements Context {
|
||||
string $checksum
|
||||
): ResponseInterface {
|
||||
$file = \file_get_contents(
|
||||
$this->featureContext->acceptanceTestsDirLocation() . $source
|
||||
UploadHelper::getAcceptanceTestsDir() . $source
|
||||
);
|
||||
return $this->featureContext->makeDavRequest(
|
||||
$user,
|
||||
|
||||
@@ -43,6 +43,7 @@ use TestHelpers\WebDavHelper;
|
||||
use TestHelpers\SettingsHelper;
|
||||
use TestHelpers\OcConfigHelper;
|
||||
use TestHelpers\BehatHelper;
|
||||
use TestHelpers\UploadHelper;
|
||||
use Swaggest\JsonSchema\InvalidValue as JsonSchemaException;
|
||||
use Swaggest\JsonSchema\Exception\ArrayException;
|
||||
use Swaggest\JsonSchema\Exception\ConstException;
|
||||
@@ -562,6 +563,38 @@ class FeatureContext extends BehatVariablesContext {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @BeforeScenario @antivirus
|
||||
*
|
||||
* @return void
|
||||
* @throws Exception
|
||||
*/
|
||||
public function createTestVirusFiles(): void {
|
||||
$uploadDir = UploadHelper::getFilesForUploadDir() . 'filesWithVirus/';
|
||||
$virusFile = $uploadDir . 'eicar.com';
|
||||
$virusZipFile = $uploadDir . 'eicar_com.zip';
|
||||
|
||||
if (file_exists($virusFile) && file_exists($virusZipFile)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!is_dir($uploadDir)) {
|
||||
mkdir($uploadDir, 0755);
|
||||
}
|
||||
|
||||
$res1 = HttpRequestHelper::sendRequestOnce('https://secure.eicar.org/eicar.com');
|
||||
if ($res1->getStatusCode() !== 200) {
|
||||
throw new Exception("Could not download eicar.com test virus file");
|
||||
}
|
||||
file_put_contents($virusFile, $res1->getBody()->getContents());
|
||||
|
||||
$res2 = HttpRequestHelper::sendRequestOnce('https://secure.eicar.org/eicar_com.zip');
|
||||
file_put_contents($virusZipFile, $res2->getBody()->getContents());
|
||||
if ($res2->getStatusCode() !== 200) {
|
||||
throw new Exception("Could not download eicar_com.zip test virus file");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @BeforeScenario
|
||||
@@ -2596,18 +2629,11 @@ class FeatureContext extends BehatVariablesContext {
|
||||
return "work_tmp";
|
||||
}
|
||||
|
||||
/**
|
||||
* @return string
|
||||
*/
|
||||
public function acceptanceTestsDirLocation(): string {
|
||||
return \dirname(__FILE__) . "/../";
|
||||
}
|
||||
|
||||
/**
|
||||
* @return string
|
||||
*/
|
||||
public function workStorageDirLocation(): string {
|
||||
return $this->acceptanceTestsDirLocation() . $this->temporaryStorageSubfolderName() . "/";
|
||||
return UploadHelper::getAcceptanceTestsDir() . $this->temporaryStorageSubfolderName() . "/";
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -26,6 +26,7 @@ use Psr\Http\Message\ResponseInterface;
|
||||
use TestHelpers\HttpRequestHelper;
|
||||
use TestHelpers\WebDavHelper;
|
||||
use TestHelpers\BehatHelper;
|
||||
use TestHelpers\UploadHelper;
|
||||
|
||||
require_once 'bootstrap.php';
|
||||
|
||||
@@ -862,7 +863,7 @@ class PublicWebDavContext implements Context {
|
||||
string $destination,
|
||||
): void {
|
||||
$content = \file_get_contents(
|
||||
$this->featureContext->acceptanceTestsDirLocation() . $source
|
||||
UploadHelper::getAcceptanceTestsDir() . $source
|
||||
);
|
||||
$response = $this->publicUploadContent(
|
||||
$destination,
|
||||
@@ -888,7 +889,7 @@ class PublicWebDavContext implements Context {
|
||||
string $password
|
||||
): void {
|
||||
$content = \file_get_contents(
|
||||
$this->featureContext->acceptanceTestsDirLocation() . $source
|
||||
UploadHelper::getAcceptanceTestsDir() . $source
|
||||
);
|
||||
$response = $this->publicUploadContent(
|
||||
$destination,
|
||||
|
||||
@@ -32,6 +32,7 @@ use Psr\Http\Message\ResponseInterface;
|
||||
use TestHelpers\HttpRequestHelper;
|
||||
use TestHelpers\WebDavHelper;
|
||||
use TestHelpers\BehatHelper;
|
||||
use TestHelpers\UploadHelper;
|
||||
|
||||
require_once 'bootstrap.php';
|
||||
|
||||
@@ -364,7 +365,7 @@ class TUSContext implements Context {
|
||||
$client->setChecksumAlgorithm('sha1');
|
||||
$client->setApiPath(WebDavHelper::getDavPath($davPathVersion, $suffixPath));
|
||||
$client->setMetadata($uploadMetadata);
|
||||
$sourceFile = $this->featureContext->acceptanceTestsDirLocation() . $source;
|
||||
$sourceFile = UploadHelper::getAcceptanceTestsDir() . $source;
|
||||
$client->setKey((string)rand())->file($sourceFile, $destination);
|
||||
$this->featureContext->pauseUploadDelete();
|
||||
|
||||
@@ -518,7 +519,7 @@ class TUSContext implements Context {
|
||||
*/
|
||||
public function writeDataToTempFile(string $content): string {
|
||||
$temporaryFileName = \tempnam(
|
||||
$this->featureContext->acceptanceTestsDirLocation(),
|
||||
UploadHelper::getAcceptanceTestsDir(),
|
||||
"tus-upload-test-"
|
||||
);
|
||||
if ($temporaryFileName === false) {
|
||||
|
||||
@@ -1648,7 +1648,7 @@ trait WebDav {
|
||||
?bool $isGivenStep = false
|
||||
): ResponseInterface {
|
||||
$user = $this->getActualUsername($user);
|
||||
$file = \fopen($this->acceptanceTestsDirLocation() . $source, 'r');
|
||||
$file = \fopen(UploadHelper::getAcceptanceTestsDir() . $source, 'r');
|
||||
$this->pauseUploadDelete();
|
||||
$response = $this->makeDavRequest(
|
||||
$user,
|
||||
@@ -1781,7 +1781,7 @@ trait WebDav {
|
||||
}
|
||||
return $this->uploadFileWithHeaders(
|
||||
$user,
|
||||
$this->acceptanceTestsDirLocation() . $source,
|
||||
UploadHelper::getAcceptanceTestsDir() . $source,
|
||||
$destination,
|
||||
$headers,
|
||||
$noOfChunks
|
||||
@@ -2222,7 +2222,7 @@ trait WebDav {
|
||||
$this->getBaseUrl(),
|
||||
$user,
|
||||
$this->getPasswordForUser($user),
|
||||
$this->acceptanceTestsDirLocation() . $source,
|
||||
UploadHelper::getAcceptanceTestsDir() . $source,
|
||||
$destination,
|
||||
$this->getStepLineRef(),
|
||||
["X-OC-Mtime" => $mtime],
|
||||
@@ -2257,7 +2257,7 @@ trait WebDav {
|
||||
$this->getBaseUrl(),
|
||||
$user,
|
||||
$this->getPasswordForUser($user),
|
||||
$this->acceptanceTestsDirLocation() . $source,
|
||||
UploadHelper::getAcceptanceTestsDir() . $source,
|
||||
$destination,
|
||||
$this->getStepLineRef(),
|
||||
["X-OC-Mtime" => $mtime],
|
||||
|
||||
@@ -3,9 +3,9 @@ default:
|
||||
"": "%paths.base%/../bootstrap"
|
||||
|
||||
suites:
|
||||
apiAccountsHashDifficulty:
|
||||
apiSpaces:
|
||||
paths:
|
||||
- "%paths.base%/../features/apiAccountsHashDifficulty"
|
||||
- "%paths.base%/../features/apiSpaces"
|
||||
context: &common_ldap_suite_context
|
||||
parameters:
|
||||
ldapAdminPassword: admin
|
||||
@@ -18,21 +18,6 @@ default:
|
||||
adminPassword: admin
|
||||
regularUserPassword: 123456
|
||||
- SettingsContext:
|
||||
- GraphContext:
|
||||
- SpacesContext:
|
||||
- CapabilitiesContext:
|
||||
- FilesVersionsContext:
|
||||
- NotificationContext:
|
||||
- OCSContext:
|
||||
- PublicWebDavContext:
|
||||
|
||||
apiSpaces:
|
||||
paths:
|
||||
- "%paths.base%/../features/apiSpaces"
|
||||
context: *common_ldap_suite_context
|
||||
contexts:
|
||||
- FeatureContext: *common_feature_context_params
|
||||
- SettingsContext:
|
||||
- SpacesContext:
|
||||
- CapabilitiesContext:
|
||||
- FilesVersionsContext:
|
||||
@@ -442,7 +427,7 @@ default:
|
||||
- AuthAppContext:
|
||||
- CliContext:
|
||||
- OcConfigContext:
|
||||
|
||||
|
||||
apiTenancy:
|
||||
paths:
|
||||
- "%paths.base%/../features/apiTenancy"
|
||||
|
||||
@@ -14,7 +14,6 @@ services:
|
||||
SHARING_USER_JSON_FILE: /srv/app/tmp/opencloud/shares.json
|
||||
PROXY_ENABLE_BASIC_AUTH: "true"
|
||||
WEB_UI_CONFIG_FILE: /woodpecker/src/github.com/opencloud-eu/opencloud/tests/config/woodpecker/opencloud-config.json
|
||||
ACCOUNTS_HASH_DIFFICULTY: 4
|
||||
OC_INSECURE: "true"
|
||||
IDM_CREATE_DEMO_USERS: "true"
|
||||
IDM_ADMIN_PASSWORD: "admin"
|
||||
|
||||
@@ -19,8 +19,6 @@
|
||||
|
||||
#### [Settings service user can list other peoples assignments](https://github.com/owncloud/ocis/issues/5032)
|
||||
|
||||
- [apiAccountsHashDifficulty/assignRole.feature:27](https://github.com/opencloud-eu/opencloud/blob/main/tests/acceptance/features/apiAccountsHashDifficulty/assignRole.feature#L27)
|
||||
- [apiAccountsHashDifficulty/assignRole.feature:28](https://github.com/opencloud-eu/opencloud/blob/main/tests/acceptance/features/apiAccountsHashDifficulty/assignRole.feature#L28)
|
||||
- [apiGraph/getAssignedRole.feature:31](https://github.com/opencloud-eu/opencloud/blob/main/tests/acceptance/features/apiGraph/getAssignedRole.feature#L31)
|
||||
- [apiGraph/getAssignedRole.feature:32](https://github.com/opencloud-eu/opencloud/blob/main/tests/acceptance/features/apiGraph/getAssignedRole.feature#L32)
|
||||
- [apiGraph/getAssignedRole.feature:33](https://github.com/opencloud-eu/opencloud/blob/main/tests/acceptance/features/apiGraph/getAssignedRole.feature#L33)
|
||||
|
||||
@@ -19,8 +19,6 @@
|
||||
|
||||
#### [Settings service user can list other peoples assignments](https://github.com/owncloud/ocis/issues/5032)
|
||||
|
||||
- [apiAccountsHashDifficulty/assignRole.feature:27](https://github.com/opencloud-eu/opencloud/blob/main/tests/acceptance/features/apiAccountsHashDifficulty/assignRole.feature#L27)
|
||||
- [apiAccountsHashDifficulty/assignRole.feature:28](https://github.com/opencloud-eu/opencloud/blob/main/tests/acceptance/features/apiAccountsHashDifficulty/assignRole.feature#L28)
|
||||
- [apiGraph/getAssignedRole.feature:31](https://github.com/opencloud-eu/opencloud/blob/main/tests/acceptance/features/apiGraph/getAssignedRole.feature#L31)
|
||||
- [apiGraph/getAssignedRole.feature:32](https://github.com/opencloud-eu/opencloud/blob/main/tests/acceptance/features/apiGraph/getAssignedRole.feature#L32)
|
||||
- [apiGraph/getAssignedRole.feature:33](https://github.com/opencloud-eu/opencloud/blob/main/tests/acceptance/features/apiGraph/getAssignedRole.feature#L33)
|
||||
|
||||
@@ -201,9 +201,9 @@
|
||||
- [apiAntivirus/antivirus.feature:143](https://github.com/opencloud-eu/opencloud/blob/main/tests/acceptance/features/apiAntivirus/antivirus.feature#L143)
|
||||
- [apiAntivirus/antivirus.feature:144](https://github.com/opencloud-eu/opencloud/blob/main/tests/acceptance/features/apiAntivirus/antivirus.feature#L144)
|
||||
- [apiAntivirus/antivirus.feature:145](https://github.com/opencloud-eu/opencloud/blob/main/tests/acceptance/features/apiAntivirus/antivirus.feature#L145)
|
||||
- [apiAntivirus/antivirus.feature:356](https://github.com/opencloud-eu/opencloud/blob/main/tests/acceptance/features/apiAntivirus/antivirus.feature#L356)
|
||||
- [apiAntivirus/antivirus.feature:357](https://github.com/opencloud-eu/opencloud/blob/main/tests/acceptance/features/apiAntivirus/antivirus.feature#L357)
|
||||
- [apiAntivirus/antivirus.feature:358](https://github.com/opencloud-eu/opencloud/blob/main/tests/acceptance/features/apiAntivirus/antivirus.feature#L358)
|
||||
- [apiAntivirus/antivirus.feature:359](https://github.com/opencloud-eu/opencloud/blob/main/tests/acceptance/features/apiAntivirus/antivirus.feature#L359)
|
||||
- [apiCollaboration/wopi.feature:956](https://github.com/opencloud-eu/opencloud/blob/main/tests/acceptance/features/apiCollaboration/wopi.feature#L956)
|
||||
- [apiCollaboration/wopi.feature:957](https://github.com/opencloud-eu/opencloud/blob/main/tests/acceptance/features/apiCollaboration/wopi.feature#L957)
|
||||
- [apiCollaboration/wopi.feature:958](https://github.com/opencloud-eu/opencloud/blob/main/tests/acceptance/features/apiCollaboration/wopi.feature#L958)
|
||||
@@ -320,7 +320,6 @@
|
||||
- [coreApiWebdavUploadTUS/uploadFile.feature:122](https://github.com/opencloud-eu/opencloud/blob/main/tests/acceptance/features/coreApiWebdavUploadTUS/uploadFile.feature#L122)
|
||||
- [coreApiWebdavUploadTUS/uploadFile.feature:133](https://github.com/opencloud-eu/opencloud/blob/main/tests/acceptance/features/coreApiWebdavUploadTUS/uploadFile.feature#L133)
|
||||
- [coreApiWebdavUploadTUS/uploadFile.feature:146](https://github.com/opencloud-eu/opencloud/blob/main/tests/acceptance/features/coreApiWebdavUploadTUS/uploadFile.feature#L146)
|
||||
- [coreApiWebdavUploadTUS/uploadFile.feature:168](https://github.com/opencloud-eu/opencloud/blob/main/tests/acceptance/features/coreApiWebdavUploadTUS/uploadFile.feature#L168)
|
||||
- [coreApiWebdavUploadTUS/uploadFile.feature:187](https://github.com/opencloud-eu/opencloud/blob/main/tests/acceptance/features/coreApiWebdavUploadTUS/uploadFile.feature#L187)
|
||||
- [coreApiWebdavUploadTUS/uploadFile.feature:199](https://github.com/opencloud-eu/opencloud/blob/main/tests/acceptance/features/coreApiWebdavUploadTUS/uploadFile.feature#L199)
|
||||
- [coreApiWebdavUploadTUS/uploadFile.feature:212](https://github.com/opencloud-eu/opencloud/blob/main/tests/acceptance/features/coreApiWebdavUploadTUS/uploadFile.feature#L212)
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
@skipOnReva
|
||||
Feature: add user
|
||||
As an admin
|
||||
I want to be able to add users and store their password with the full hash difficulty
|
||||
So that I can give people controlled individual access to resources on the OpenCloud server
|
||||
|
||||
|
||||
Scenario: admin creates a user
|
||||
When the user "Admin" creates a new user with the following attributes using the Graph API:
|
||||
| userName | brand-new-user |
|
||||
| displayName | Brand New User |
|
||||
| email | new@example.org |
|
||||
| password | %alt1% |
|
||||
Then the HTTP status code should be "201"
|
||||
And user "brand-new-user" should exist
|
||||
And user "brand-new-user" should be able to upload file "filesForUpload/lorem.txt" to "lorem.txt"
|
||||
@@ -1,64 +0,0 @@
|
||||
Feature: assign role
|
||||
As an admin,
|
||||
I want to assign roles to users
|
||||
So that I can provide them different authority
|
||||
|
||||
|
||||
Scenario Outline: only admin user can see all existing roles
|
||||
Given user "Alice" has been created with default attributes
|
||||
And the administrator has given "Alice" the role "<user-role>" using the settings api
|
||||
When user "Alice" tries to get all existing roles using the settings API
|
||||
Then the HTTP status code should be "<http-status-code>"
|
||||
Examples:
|
||||
| user-role | http-status-code |
|
||||
| Admin | 201 |
|
||||
| Space Admin | 201 |
|
||||
| User | 201 |
|
||||
|
||||
@issue-5032
|
||||
Scenario Outline: only admin user can see assignments list
|
||||
Given user "Alice" has been created with default attributes
|
||||
And the administrator has given "Alice" the role "<user-role>" using the settings api
|
||||
When user "Alice" tries to get list of assignment using the settings API
|
||||
Then the HTTP status code should be "<http-status-code>"
|
||||
Examples:
|
||||
| user-role | http-status-code |
|
||||
| Admin | 201 |
|
||||
| Space Admin | 401 |
|
||||
| User | 401 |
|
||||
|
||||
|
||||
Scenario Outline: a user cannot change own role
|
||||
Given user "Alice" has been created with default attributes
|
||||
And the administrator has given "Alice" the role "<user-role>" using the settings api
|
||||
When user "Alice" changes his own role to "<desired-role>"
|
||||
Then the HTTP status code should be "400"
|
||||
And user "Alice" should have the role "<user-role>"
|
||||
Examples:
|
||||
| user-role | desired-role |
|
||||
| Admin | User |
|
||||
| Admin | Space Admin |
|
||||
| Space Admin | Admin |
|
||||
| Space Admin | Space Admin |
|
||||
| User | Admin |
|
||||
| User | Space Admin |
|
||||
|
||||
|
||||
Scenario Outline: only admin user can change the role for another user
|
||||
Given these users have been created with default attributes:
|
||||
| username |
|
||||
| Alice |
|
||||
| Brian |
|
||||
And the administrator has given "Alice" the role "<user-role>" using the settings api
|
||||
When user "Alice" changes the role "<desired-role>" for user "Brian"
|
||||
Then the HTTP status code should be "<http-status-code>"
|
||||
And user "Brian" should have the role "<expected-role>"
|
||||
Examples:
|
||||
| user-role | desired-role | http-status-code | expected-role |
|
||||
| Admin | User | 201 | User |
|
||||
| Admin | Space Admin | 201 | Space Admin |
|
||||
| Admin | Admin | 201 | Admin |
|
||||
| Space Admin | Admin | 400 | User |
|
||||
| Space Admin | Space Admin | 400 | User |
|
||||
| User | Admin | 400 | User |
|
||||
| User | Space Admin | 400 | User |
|
||||
@@ -1,18 +0,0 @@
|
||||
@skipOnReva
|
||||
Feature: sharing
|
||||
As a user
|
||||
I want to be able to share files when passwords are stored with the full hash difficulty
|
||||
So that I can give people secure controlled access to my data
|
||||
|
||||
|
||||
Scenario Outline: creating a share of a file with a user
|
||||
Given using OCS API version "<ocs-api-version>"
|
||||
And user "Alice" has been created with default attributes
|
||||
And user "Alice" has uploaded file with content "OpenCloud test text file 0" to "/textfile0.txt"
|
||||
And user "Brian" has been created with default attributes
|
||||
When user "Alice" shares file "textfile0.txt" with user "Brian" using the sharing API
|
||||
And the content of file "/Shares/textfile0.txt" for user "Brian" should be "OpenCloud test text file 0"
|
||||
Examples:
|
||||
| ocs-api-version |
|
||||
| 1 |
|
||||
| 2 |
|
||||
@@ -1,21 +0,0 @@
|
||||
@skipOnReva
|
||||
Feature: upload file
|
||||
As a user
|
||||
I want to be able to upload files when passwords are stored with the full hash difficulty
|
||||
So that I can store and share files securely between multiple client systems
|
||||
|
||||
|
||||
Scenario Outline: upload a file and check download content
|
||||
Given using OCS API version "<ocs-api-version>"
|
||||
And user "Alice" has been created with default attributes
|
||||
And using <dav-path-version> DAV path
|
||||
When user "Alice" uploads file with content "uploaded content" to "/upload.txt" using the WebDAV API
|
||||
Then the content of file "/upload.txt" for user "Alice" should be "uploaded content"
|
||||
Examples:
|
||||
| ocs-api-version | dav-path-version |
|
||||
| 1 | old |
|
||||
| 1 | new |
|
||||
| 1 | spaces |
|
||||
| 2 | old |
|
||||
| 2 | new |
|
||||
| 2 | spaces |
|
||||
@@ -1,31 +0,0 @@
|
||||
@skipOnReva
|
||||
Feature: attempt to PUT files with invalid password
|
||||
As an admin
|
||||
I want the system to be secure when passwords are stored with the full hash difficulty
|
||||
So that unauthorised users do not have access to data
|
||||
|
||||
Background:
|
||||
Given user "Alice" has been created with default attributes
|
||||
And user "Alice" has created folder "/PARENT"
|
||||
|
||||
|
||||
Scenario: send PUT requests to webDav endpoints as normal user with wrong password
|
||||
When user "Alice" requests these endpoints with "PUT" including body "doesnotmatter" using password "invalid" about user "Alice"
|
||||
| endpoint |
|
||||
| /webdav/textfile0.txt |
|
||||
| /dav/files/%username%/textfile0.txt |
|
||||
| /webdav/PARENT |
|
||||
| /dav/files/%username%/PARENT |
|
||||
| /dav/files/%username%/PARENT/parent.txt |
|
||||
Then the HTTP status code of responses on all endpoints should be "401"
|
||||
|
||||
|
||||
Scenario: send PUT requests to webDav endpoints as normal user with no password
|
||||
When user "Alice" requests these endpoints with "PUT" including body "doesnotmatter" using password "" about user "Alice"
|
||||
| endpoint |
|
||||
| /webdav/textfile0.txt |
|
||||
| /dav/files/%username%/textfile0.txt |
|
||||
| /webdav/PARENT |
|
||||
| /dav/files/%username%/PARENT |
|
||||
| /dav/files/%username%/PARENT/parent.txt |
|
||||
Then the HTTP status code of responses on all endpoints should be "401"
|
||||
@@ -21,7 +21,7 @@ Feature: List upload sessions via CLI command
|
||||
And the CLI response should not contain these entries:
|
||||
| file0.txt |
|
||||
|
||||
|
||||
@antivirus
|
||||
Scenario: list all upload sessions that are currently in postprocessing
|
||||
Given the following configs have been set:
|
||||
| config | value |
|
||||
@@ -39,7 +39,7 @@ Feature: List upload sessions via CLI command
|
||||
And the CLI response should not contain these entries:
|
||||
| virusFile.txt |
|
||||
|
||||
|
||||
@antivirus
|
||||
Scenario: list all upload sessions that are infected by virus
|
||||
Given the following configs have been set:
|
||||
| config | value |
|
||||
@@ -109,7 +109,7 @@ Feature: List upload sessions via CLI command
|
||||
And the CLI response should not contain these entries:
|
||||
| file2.txt |
|
||||
|
||||
|
||||
@antivirus
|
||||
Scenario: clean all upload sessions that are not in post-processing
|
||||
Given the following configs have been set:
|
||||
| config | value |
|
||||
@@ -126,7 +126,7 @@ Feature: List upload sessions via CLI command
|
||||
And the CLI response should not contain these entries:
|
||||
| file1.txt |
|
||||
|
||||
|
||||
@antivirus
|
||||
Scenario: clean upload sessions that are not in post-processing and is not virus infected
|
||||
Given the following configs have been set:
|
||||
| config | value |
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
#!/bin/bash
|
||||
# tests/acceptance/scripts/generate-virus-files.sh
|
||||
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
TARGET_DIR="$SCRIPT_DIR/../filesForUpload/filesWithVirus"
|
||||
|
||||
echo "Generating EICAR test files..."
|
||||
|
||||
mkdir -p "$TARGET_DIR"
|
||||
|
||||
cd "$TARGET_DIR"
|
||||
|
||||
echo "Downloading eicar.com..."
|
||||
curl -s -o eicar.com https://secure.eicar.org/eicar.com
|
||||
|
||||
echo "Downloading eicar_com.zip..."
|
||||
curl -s -o eicar_com.zip https://secure.eicar.org/eicar_com.zip
|
||||
@@ -547,7 +547,6 @@ fi
|
||||
|
||||
export IPV4_URL
|
||||
export IPV6_URL
|
||||
export FILES_FOR_UPLOAD="${SCRIPT_PATH}/filesForUpload/"
|
||||
|
||||
TEST_LOG_FILE=$(mktemp)
|
||||
SCENARIOS_THAT_PASSED=0
|
||||
|
||||
@@ -64,7 +64,6 @@ SUITES=(
|
||||
"apiSharingNgShareInvitation"
|
||||
"apiSharingNgLinkSharePermission"
|
||||
"apiSharingNgLinkShareRoot"
|
||||
"apiAccountsHashDifficulty"
|
||||
"apiSearchContent"
|
||||
"apiNotification"
|
||||
)
|
||||
|
||||
10
vendor/github.com/clipperhouse/displaywidth/CHANGELOG.md
generated
vendored
10
vendor/github.com/clipperhouse/displaywidth/CHANGELOG.md
generated
vendored
@@ -1,15 +1,5 @@
|
||||
# Changelog
|
||||
|
||||
## [0.6.1]
|
||||
|
||||
[Compare](https://github.com/clipperhouse/displaywidth/compare/v0.6.0...v0.6.1)
|
||||
|
||||
### Changed
|
||||
- Perf improvements: replaced the ASCII lookup table with a simple
|
||||
function. A bit more cache-friendly. More inlining.
|
||||
- Bug fix: single regional indicators are now treated as width 2, since that
|
||||
is what actual terminals do.
|
||||
|
||||
## [0.6.0]
|
||||
|
||||
[Compare](https://github.com/clipperhouse/displaywidth/compare/v0.5.0...v0.6.0)
|
||||
|
||||
114
vendor/github.com/clipperhouse/displaywidth/README.md
generated
vendored
114
vendor/github.com/clipperhouse/displaywidth/README.md
generated
vendored
@@ -33,82 +33,42 @@ func main() {
|
||||
}
|
||||
```
|
||||
|
||||
For most purposes, you should use the `String` or `Bytes` methods. They sum
|
||||
the widths of grapheme clusters in the string or byte slice.
|
||||
For most purposes, you should use the `String` or `Bytes` methods.
|
||||
|
||||
> Note: in your application, iterating over runes to measure width is likely incorrect;
|
||||
the smallest unit of display is a grapheme, not a rune.
|
||||
|
||||
### Iterating over graphemes
|
||||
|
||||
If you need the individual graphemes:
|
||||
|
||||
```go
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/clipperhouse/displaywidth"
|
||||
)
|
||||
|
||||
func main() {
|
||||
g := displaywidth.StringGraphemes("Hello, 世界!")
|
||||
for g.Next() {
|
||||
width := g.Width()
|
||||
value := g.Value()
|
||||
// do something with the width or value
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Options
|
||||
|
||||
There is one option, `displaywidth.Options.EastAsianWidth`, which defines
|
||||
how [East Asian Ambiguous characters](https://www.unicode.org/reports/tr11/#Ambiguous)
|
||||
are treated.
|
||||
|
||||
When `false` (default), East Asian Ambiguous characters are treated as width 1.
|
||||
When `true`, they are treated as width 2.
|
||||
|
||||
You may wish to configure this based on environment variables or locale.
|
||||
`go-runewidth`, for example, does so
|
||||
[during package initialization](https://github.com/mattn/go-runewidth/blob/master/runewidth.go#L26C1-L45C2).
|
||||
|
||||
`displaywidth` does not do this automatically, we prefer to leave it to you.
|
||||
You might do something like:
|
||||
You can specify East Asian Width settings. When false (default),
|
||||
[East Asian Ambiguous characters](https://www.unicode.org/reports/tr11/#Ambiguous)
|
||||
are treated as width 1. When true, East Asian Ambiguous characters are treated
|
||||
as width 2.
|
||||
|
||||
```go
|
||||
var width displaywidth.Options // zero value is default
|
||||
|
||||
func init() {
|
||||
if os.Getenv("EAST_ASIAN_WIDTH") == "true" {
|
||||
width = displaywidth.Options{EastAsianWidth: true}
|
||||
}
|
||||
// or check locale, or any other logic you want
|
||||
myOptions := displaywidth.Options{
|
||||
EastAsianWidth: true,
|
||||
}
|
||||
|
||||
// use it in your logic
|
||||
func myApp() {
|
||||
fmt.Println(width.String("Hello, 世界!"))
|
||||
}
|
||||
width := myOptions.String("Hello, 世界!")
|
||||
fmt.Println(width)
|
||||
```
|
||||
|
||||
## Technical standards and compatibility
|
||||
## Technical details
|
||||
|
||||
This package implements the Unicode East Asian Width standard
|
||||
([UAX #11](https://www.unicode.org/reports/tr11/tr11-43.html)), and handles
|
||||
([UAX #11](https://www.unicode.org/reports/tr11/)), and handles
|
||||
[version selectors](https://en.wikipedia.org/wiki/Variation_Selectors_(Unicode_block)),
|
||||
and [regional indicator pairs](https://en.wikipedia.org/wiki/Regional_indicator_symbol)
|
||||
(flags). We implement [Unicode TR51](https://www.unicode.org/reports/tr51/tr51-27.html). We are keeping
|
||||
an eye on [emerging standards](https://www.jeffquast.com/post/state-of-terminal-emulation-2025/).
|
||||
|
||||
(flags). We implement [Unicode TR51](https://unicode.org/reports/tr51/).
|
||||
|
||||
`clipperhouse/displaywidth`, `mattn/go-runewidth`, and `rivo/uniseg` will
|
||||
give the same outputs for most real-world text. Extensive details are in the
|
||||
give the same outputs for most real-world text. See extensive details in the
|
||||
[compatibility analysis](comparison/COMPATIBILITY_ANALYSIS.md).
|
||||
|
||||
If you wish to investigate the core logic, see the `lookupProperties` and `width`
|
||||
functions in [width.go](width.go#L139). The essential trie generation logic is in
|
||||
`buildPropertyBitmap` in [unicode.go](internal/gen/unicode.go#L316).
|
||||
functions in [width.go](width.go#L135). The essential trie generation logic is in
|
||||
`buildPropertyBitmap` in [unicode.go](internal/gen/unicode.go#L317).
|
||||
|
||||
I (@clipperhouse) am keeping an eye on [emerging standards and test suites](https://www.jeffquast.com/post/state-of-terminal-emulation-2025/).
|
||||
|
||||
## Prior Art
|
||||
|
||||
@@ -133,33 +93,31 @@ goarch: arm64
|
||||
pkg: github.com/clipperhouse/displaywidth/comparison
|
||||
cpu: Apple M2
|
||||
|
||||
BenchmarkString_Mixed/clipperhouse/displaywidth-8 10326 ns/op 163.37 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkString_Mixed/mattn/go-runewidth-8 14415 ns/op 117.03 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkString_Mixed/rivo/uniseg-8 19343 ns/op 87.21 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkString_Mixed/clipperhouse/displaywidth-8 10469 ns/op 161.15 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkString_Mixed/mattn/go-runewidth-8 14250 ns/op 118.39 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkString_Mixed/rivo/uniseg-8 19258 ns/op 87.60 MB/s 0 B/op 0 allocs/op
|
||||
|
||||
BenchmarkString_EastAsian/clipperhouse/displaywidth-8 10561 ns/op 159.74 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkString_EastAsian/mattn/go-runewidth-8 23790 ns/op 70.91 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkString_EastAsian/rivo/uniseg-8 19322 ns/op 87.31 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkString_EastAsian/clipperhouse/displaywidth-8 10518 ns/op 160.39 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkString_EastAsian/mattn/go-runewidth-8 23827 ns/op 70.80 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkString_EastAsian/rivo/uniseg-8 19537 ns/op 86.35 MB/s 0 B/op 0 allocs/op
|
||||
|
||||
BenchmarkString_ASCII/clipperhouse/displaywidth-8 1033 ns/op 123.88 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkString_ASCII/mattn/go-runewidth-8 1168 ns/op 109.59 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkString_ASCII/rivo/uniseg-8 1585 ns/op 80.74 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkString_ASCII/clipperhouse/displaywidth-8 1027 ns/op 124.61 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkString_ASCII/mattn/go-runewidth-8 1166 ns/op 109.78 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkString_ASCII/rivo/uniseg-8 1551 ns/op 82.52 MB/s 0 B/op 0 allocs/op
|
||||
|
||||
BenchmarkString_Emoji/clipperhouse/displaywidth-8 3034 ns/op 238.61 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkString_Emoji/mattn/go-runewidth-8 4797 ns/op 150.94 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkString_Emoji/rivo/uniseg-8 6612 ns/op 109.50 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkString_Emoji/clipperhouse/displaywidth-8 3164 ns/op 228.84 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkString_Emoji/mattn/go-runewidth-8 4728 ns/op 153.13 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkString_Emoji/rivo/uniseg-8 6489 ns/op 111.57 MB/s 0 B/op 0 allocs/op
|
||||
|
||||
BenchmarkRune_Mixed/clipperhouse/displaywidth-8 3343 ns/op 504.67 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkRune_Mixed/mattn/go-runewidth-8 5414 ns/op 311.62 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkRune_Mixed/clipperhouse/displaywidth-8 3429 ns/op 491.96 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkRune_Mixed/mattn/go-runewidth-8 5308 ns/op 317.81 MB/s 0 B/op 0 allocs/op
|
||||
|
||||
BenchmarkRune_EastAsian/clipperhouse/displaywidth-8 3393 ns/op 497.17 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkRune_EastAsian/mattn/go-runewidth-8 15312 ns/op 110.17 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkRune_EastAsian/clipperhouse/displaywidth-8 3419 ns/op 493.49 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkRune_EastAsian/mattn/go-runewidth-8 15321 ns/op 110.11 MB/s 0 B/op 0 allocs/op
|
||||
|
||||
BenchmarkRune_ASCII/clipperhouse/displaywidth-8 256.9 ns/op 498.32 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkRune_ASCII/mattn/go-runewidth-8 265.7 ns/op 481.75 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkRune_ASCII/clipperhouse/displaywidth-8 254.4 ns/op 503.19 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkRune_ASCII/mattn/go-runewidth-8 264.3 ns/op 484.31 MB/s 0 B/op 0 allocs/op
|
||||
|
||||
BenchmarkRune_Emoji/clipperhouse/displaywidth-8 1336 ns/op 541.96 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkRune_Emoji/mattn/go-runewidth-8 2304 ns/op 314.23 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkRune_Emoji/clipperhouse/displaywidth-8 1374 ns/op 527.02 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkRune_Emoji/mattn/go-runewidth-8 2210 ns/op 327.66 MB/s 0 B/op 0 allocs/op
|
||||
```
|
||||
|
||||
Here are some notes on [how to make Unicode things fast](https://clipperhouse.com/go-unicode/).
|
||||
|
||||
91
vendor/github.com/clipperhouse/displaywidth/tables.go
generated
vendored
Normal file
91
vendor/github.com/clipperhouse/displaywidth/tables.go
generated
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
package displaywidth
|
||||
|
||||
// propertyWidths is a jump table of sorts, instead of a switch
|
||||
var propertyWidths = [5]int{
|
||||
_Default: 1,
|
||||
_Zero_Width: 0,
|
||||
_East_Asian_Wide: 2,
|
||||
_East_Asian_Ambiguous: 1,
|
||||
_Emoji: 2,
|
||||
}
|
||||
|
||||
// asciiWidths is a lookup table for single-byte character widths. Printable
|
||||
// ASCII characters have width 1, control characters have width 0.
|
||||
//
|
||||
// It is intended for valid single-byte UTF-8, which means <128.
|
||||
//
|
||||
// If you look up an index >= 128, that is either:
|
||||
// - invalid UTF-8, or
|
||||
// - a multi-byte UTF-8 sequence, in which case you should be operating on
|
||||
// the grapheme cluster, and not using this table
|
||||
//
|
||||
// We will return a default value of 1 in those cases, so as not to panic.
|
||||
var asciiWidths = [256]int8{
|
||||
// Control characters (0x00-0x1F): width 0
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
// Printable ASCII (0x20-0x7E): width 1
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
// DEL (0x7F): width 0
|
||||
0,
|
||||
// >= 128
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
}
|
||||
|
||||
// asciiProperties is a lookup table for single-byte character properties.
|
||||
// It is intended for valid single-byte UTF-8, which means <128.
|
||||
//
|
||||
// If you look up an index >= 128, that is either:
|
||||
// - invalid UTF-8, or
|
||||
// - a multi-byte UTF-8 sequence, in which case you should be operating on
|
||||
// the grapheme cluster, and not using this table
|
||||
//
|
||||
// We will return a default value of _Default in those cases, so as not to
|
||||
// panic.
|
||||
var asciiProperties = [256]property{
|
||||
// Control characters (0x00-0x1F): _Zero_Width
|
||||
_Zero_Width, _Zero_Width, _Zero_Width, _Zero_Width, _Zero_Width, _Zero_Width, _Zero_Width, _Zero_Width,
|
||||
_Zero_Width, _Zero_Width, _Zero_Width, _Zero_Width, _Zero_Width, _Zero_Width, _Zero_Width, _Zero_Width,
|
||||
_Zero_Width, _Zero_Width, _Zero_Width, _Zero_Width, _Zero_Width, _Zero_Width, _Zero_Width, _Zero_Width,
|
||||
_Zero_Width, _Zero_Width, _Zero_Width, _Zero_Width, _Zero_Width, _Zero_Width, _Zero_Width, _Zero_Width,
|
||||
// Printable ASCII (0x20-0x7E): _Default
|
||||
_Default, _Default, _Default, _Default, _Default, _Default, _Default, _Default,
|
||||
_Default, _Default, _Default, _Default, _Default, _Default, _Default, _Default,
|
||||
_Default, _Default, _Default, _Default, _Default, _Default, _Default, _Default,
|
||||
_Default, _Default, _Default, _Default, _Default, _Default, _Default, _Default,
|
||||
_Default, _Default, _Default, _Default, _Default, _Default, _Default, _Default,
|
||||
_Default, _Default, _Default, _Default, _Default, _Default, _Default, _Default,
|
||||
_Default, _Default, _Default, _Default, _Default, _Default, _Default, _Default,
|
||||
_Default, _Default, _Default, _Default, _Default, _Default, _Default, _Default,
|
||||
_Default, _Default, _Default, _Default, _Default, _Default, _Default, _Default,
|
||||
_Default, _Default, _Default, _Default, _Default, _Default, _Default, _Default,
|
||||
_Default, _Default, _Default, _Default, _Default, _Default, _Default, _Default,
|
||||
_Default, _Default, _Default, _Default, _Default, _Default, _Default,
|
||||
// DEL (0x7F): _Zero_Width
|
||||
_Zero_Width,
|
||||
// >= 128
|
||||
_Default, _Default, _Default, _Default, _Default, _Default, _Default, _Default,
|
||||
_Default, _Default, _Default, _Default, _Default, _Default, _Default, _Default,
|
||||
_Default, _Default, _Default, _Default, _Default, _Default, _Default, _Default,
|
||||
_Default, _Default, _Default, _Default, _Default, _Default, _Default, _Default,
|
||||
_Default, _Default, _Default, _Default, _Default, _Default, _Default, _Default,
|
||||
_Default, _Default, _Default, _Default, _Default, _Default, _Default, _Default,
|
||||
_Default, _Default, _Default, _Default, _Default, _Default, _Default, _Default,
|
||||
_Default, _Default, _Default, _Default, _Default, _Default, _Default, _Default,
|
||||
_Default, _Default, _Default, _Default, _Default, _Default, _Default, _Default,
|
||||
_Default, _Default, _Default, _Default, _Default, _Default, _Default, _Default,
|
||||
_Default, _Default, _Default, _Default, _Default, _Default, _Default, _Default,
|
||||
_Default, _Default, _Default, _Default, _Default, _Default, _Default, _Default,
|
||||
}
|
||||
502
vendor/github.com/clipperhouse/displaywidth/trie.go
generated
vendored
502
vendor/github.com/clipperhouse/displaywidth/trie.go
generated
vendored
@@ -10,10 +10,12 @@ type property uint8
|
||||
const (
|
||||
// Always 0 width, includes combining marks, control characters, non-printable, etc
|
||||
_Zero_Width property = iota + 1
|
||||
// Always 2 wide (East Asian Wide F/W, Emoji, Regional Indicator)
|
||||
_Wide
|
||||
// Always 2 wide (East Asian Wide F/W)
|
||||
_East_Asian_Wide
|
||||
// Width depends on EastAsianWidth option
|
||||
_East_Asian_Ambiguous
|
||||
// Extended_Pictographic + Emoji_Presentation
|
||||
_Emoji
|
||||
)
|
||||
|
||||
// lookup returns the trie value for the first UTF-8 encoding in s and
|
||||
@@ -79,7 +81,7 @@ func lookup[T stringish.Interface](s T) (v uint8, sz int) {
|
||||
return 0, 1
|
||||
}
|
||||
|
||||
// stringWidthTrie. Total size: 17664 bytes (17.25 KiB). Checksum: c77d82ff2d69f0d2.
|
||||
// stringWidthTrie. Total size: 17728 bytes (17.31 KiB). Checksum: b4b51ae347944fdb.
|
||||
// type stringWidthTrie struct { }
|
||||
|
||||
// func newStringWidthTrie(i int) *stringWidthTrie {
|
||||
@@ -94,9 +96,9 @@ func lookupValue(n uint32, b byte) uint8 {
|
||||
}
|
||||
}
|
||||
|
||||
// stringWidthValues: 246 blocks, 15744 entries, 15744 bytes
|
||||
// stringWidthValues: 247 blocks, 15808 entries, 15808 bytes
|
||||
// The third block is the zero block.
|
||||
var stringWidthValues = [15744]uint8{
|
||||
var stringWidthValues = [15808]uint8{
|
||||
// Block 0x0, offset 0x0
|
||||
// Block 0x1, offset 0x40
|
||||
// Block 0x2, offset 0x80
|
||||
@@ -575,13 +577,13 @@ var stringWidthValues = [15744]uint8{
|
||||
0x167f: 0x0003,
|
||||
// Block 0x5a, offset 0x1680
|
||||
0x1692: 0x0003,
|
||||
0x169a: 0x0002, 0x169b: 0x0002,
|
||||
0x169a: 0x0004, 0x169b: 0x0004,
|
||||
0x16a9: 0x0002,
|
||||
0x16aa: 0x0002,
|
||||
// Block 0x5b, offset 0x16c0
|
||||
0x16e9: 0x0002,
|
||||
0x16ea: 0x0002, 0x16eb: 0x0002, 0x16ec: 0x0002,
|
||||
0x16f0: 0x0002, 0x16f3: 0x0002,
|
||||
0x16e9: 0x0004,
|
||||
0x16ea: 0x0004, 0x16eb: 0x0004, 0x16ec: 0x0004,
|
||||
0x16f0: 0x0004, 0x16f3: 0x0004,
|
||||
// Block 0x5c, offset 0x1700
|
||||
0x1720: 0x0003, 0x1721: 0x0003, 0x1722: 0x0003, 0x1723: 0x0003,
|
||||
0x1724: 0x0003, 0x1725: 0x0003, 0x1726: 0x0003, 0x1727: 0x0003, 0x1728: 0x0003, 0x1729: 0x0003,
|
||||
@@ -640,63 +642,63 @@ var stringWidthValues = [15744]uint8{
|
||||
0x1862: 0x0003, 0x1863: 0x0003,
|
||||
0x1864: 0x0003, 0x1865: 0x0003,
|
||||
0x186f: 0x0003,
|
||||
0x187d: 0x0002, 0x187e: 0x0002,
|
||||
0x187d: 0x0004, 0x187e: 0x0004,
|
||||
// Block 0x62, offset 0x1880
|
||||
0x1885: 0x0003,
|
||||
0x1886: 0x0003, 0x1889: 0x0003,
|
||||
0x188e: 0x0003, 0x188f: 0x0003,
|
||||
0x1894: 0x0002, 0x1895: 0x0002,
|
||||
0x1894: 0x0004, 0x1895: 0x0004,
|
||||
0x189c: 0x0003,
|
||||
0x189e: 0x0003,
|
||||
0x18b0: 0x0002, 0x18b1: 0x0002, 0x18b2: 0x0002, 0x18b3: 0x0002, 0x18b4: 0x0002, 0x18b5: 0x0002,
|
||||
0x18b6: 0x0002, 0x18b7: 0x0002,
|
||||
// Block 0x63, offset 0x18c0
|
||||
0x18c0: 0x0003, 0x18c2: 0x0003,
|
||||
0x18c8: 0x0002, 0x18c9: 0x0002, 0x18ca: 0x0002, 0x18cb: 0x0002,
|
||||
0x18cc: 0x0002, 0x18cd: 0x0002, 0x18ce: 0x0002, 0x18cf: 0x0002, 0x18d0: 0x0002, 0x18d1: 0x0002,
|
||||
0x18d2: 0x0002, 0x18d3: 0x0002,
|
||||
0x18c8: 0x0004, 0x18c9: 0x0004, 0x18ca: 0x0004, 0x18cb: 0x0004,
|
||||
0x18cc: 0x0004, 0x18cd: 0x0004, 0x18ce: 0x0004, 0x18cf: 0x0004, 0x18d0: 0x0004, 0x18d1: 0x0004,
|
||||
0x18d2: 0x0004, 0x18d3: 0x0004,
|
||||
0x18e0: 0x0003, 0x18e1: 0x0003, 0x18e3: 0x0003,
|
||||
0x18e4: 0x0003, 0x18e5: 0x0003, 0x18e7: 0x0003, 0x18e8: 0x0003, 0x18e9: 0x0003,
|
||||
0x18ea: 0x0003, 0x18ec: 0x0003, 0x18ed: 0x0003, 0x18ef: 0x0003,
|
||||
0x18ff: 0x0002,
|
||||
0x18ff: 0x0004,
|
||||
// Block 0x64, offset 0x1900
|
||||
0x190a: 0x0002, 0x190b: 0x0002,
|
||||
0x190c: 0x0002, 0x190d: 0x0002, 0x190e: 0x0002, 0x190f: 0x0002,
|
||||
0x1913: 0x0002,
|
||||
0x191e: 0x0003, 0x191f: 0x0003, 0x1921: 0x0002,
|
||||
0x192a: 0x0002, 0x192b: 0x0002,
|
||||
0x193d: 0x0002, 0x193e: 0x0002, 0x193f: 0x0003,
|
||||
0x1913: 0x0004,
|
||||
0x191e: 0x0003, 0x191f: 0x0003, 0x1921: 0x0004,
|
||||
0x192a: 0x0004, 0x192b: 0x0004,
|
||||
0x193d: 0x0004, 0x193e: 0x0004, 0x193f: 0x0003,
|
||||
// Block 0x65, offset 0x1940
|
||||
0x1944: 0x0002, 0x1945: 0x0002,
|
||||
0x1944: 0x0004, 0x1945: 0x0004,
|
||||
0x1946: 0x0003, 0x1947: 0x0003, 0x1948: 0x0003, 0x1949: 0x0003, 0x194a: 0x0003, 0x194b: 0x0003,
|
||||
0x194c: 0x0003, 0x194d: 0x0003, 0x194e: 0x0002, 0x194f: 0x0003, 0x1950: 0x0003, 0x1951: 0x0003,
|
||||
0x1952: 0x0003, 0x1953: 0x0003, 0x1954: 0x0002, 0x1955: 0x0003, 0x1956: 0x0003, 0x1957: 0x0003,
|
||||
0x194c: 0x0003, 0x194d: 0x0003, 0x194e: 0x0004, 0x194f: 0x0003, 0x1950: 0x0003, 0x1951: 0x0003,
|
||||
0x1952: 0x0003, 0x1953: 0x0003, 0x1954: 0x0004, 0x1955: 0x0003, 0x1956: 0x0003, 0x1957: 0x0003,
|
||||
0x1958: 0x0003, 0x1959: 0x0003, 0x195a: 0x0003, 0x195b: 0x0003, 0x195c: 0x0003, 0x195d: 0x0003,
|
||||
0x195e: 0x0003, 0x195f: 0x0003, 0x1960: 0x0003, 0x1961: 0x0003, 0x1963: 0x0003,
|
||||
0x1968: 0x0003, 0x1969: 0x0003,
|
||||
0x196a: 0x0002, 0x196b: 0x0003, 0x196c: 0x0003, 0x196d: 0x0003, 0x196e: 0x0003, 0x196f: 0x0003,
|
||||
0x1970: 0x0003, 0x1971: 0x0003, 0x1972: 0x0002, 0x1973: 0x0002, 0x1974: 0x0003, 0x1975: 0x0002,
|
||||
0x1976: 0x0003, 0x1977: 0x0003, 0x1978: 0x0003, 0x1979: 0x0003, 0x197a: 0x0002, 0x197b: 0x0003,
|
||||
0x197c: 0x0003, 0x197d: 0x0002, 0x197e: 0x0003, 0x197f: 0x0003,
|
||||
0x196a: 0x0004, 0x196b: 0x0003, 0x196c: 0x0003, 0x196d: 0x0003, 0x196e: 0x0003, 0x196f: 0x0003,
|
||||
0x1970: 0x0003, 0x1971: 0x0003, 0x1972: 0x0004, 0x1973: 0x0004, 0x1974: 0x0003, 0x1975: 0x0004,
|
||||
0x1976: 0x0003, 0x1977: 0x0003, 0x1978: 0x0003, 0x1979: 0x0003, 0x197a: 0x0004, 0x197b: 0x0003,
|
||||
0x197c: 0x0003, 0x197d: 0x0004, 0x197e: 0x0003, 0x197f: 0x0003,
|
||||
// Block 0x66, offset 0x1980
|
||||
0x1985: 0x0002,
|
||||
0x198a: 0x0002, 0x198b: 0x0002,
|
||||
0x19a8: 0x0002,
|
||||
0x1985: 0x0004,
|
||||
0x198a: 0x0004, 0x198b: 0x0004,
|
||||
0x19a8: 0x0004,
|
||||
0x19bd: 0x0003,
|
||||
// Block 0x67, offset 0x19c0
|
||||
0x19cc: 0x0002, 0x19ce: 0x0002,
|
||||
0x19d3: 0x0002, 0x19d4: 0x0002, 0x19d5: 0x0002, 0x19d7: 0x0002,
|
||||
0x19cc: 0x0004, 0x19ce: 0x0004,
|
||||
0x19d3: 0x0004, 0x19d4: 0x0004, 0x19d5: 0x0004, 0x19d7: 0x0004,
|
||||
0x19f6: 0x0003, 0x19f7: 0x0003, 0x19f8: 0x0003, 0x19f9: 0x0003, 0x19fa: 0x0003, 0x19fb: 0x0003,
|
||||
0x19fc: 0x0003, 0x19fd: 0x0003, 0x19fe: 0x0003, 0x19ff: 0x0003,
|
||||
// Block 0x68, offset 0x1a00
|
||||
0x1a15: 0x0002, 0x1a16: 0x0002, 0x1a17: 0x0002,
|
||||
0x1a30: 0x0002,
|
||||
0x1a3f: 0x0002,
|
||||
0x1a15: 0x0004, 0x1a16: 0x0004, 0x1a17: 0x0004,
|
||||
0x1a30: 0x0004,
|
||||
0x1a3f: 0x0004,
|
||||
// Block 0x69, offset 0x1a40
|
||||
0x1a5b: 0x0002, 0x1a5c: 0x0002,
|
||||
0x1a5b: 0x0004, 0x1a5c: 0x0004,
|
||||
// Block 0x6a, offset 0x1a80
|
||||
0x1a90: 0x0002,
|
||||
0x1a95: 0x0002, 0x1a96: 0x0003, 0x1a97: 0x0003,
|
||||
0x1a90: 0x0004,
|
||||
0x1a95: 0x0004, 0x1a96: 0x0003, 0x1a97: 0x0003,
|
||||
0x1a98: 0x0003, 0x1a99: 0x0003,
|
||||
// Block 0x6b, offset 0x1ac0
|
||||
0x1aef: 0x0001,
|
||||
@@ -1271,9 +1273,9 @@ var stringWidthValues = [15744]uint8{
|
||||
0x3604: 0x0001, 0x3605: 0x0001,
|
||||
0x3606: 0x0001, 0x3607: 0x0001, 0x3608: 0x0001, 0x3609: 0x0001, 0x360a: 0x0001,
|
||||
// Block 0xd9, offset 0x3640
|
||||
0x3644: 0x0002,
|
||||
0x3644: 0x0004,
|
||||
// Block 0xda, offset 0x3680
|
||||
0x368f: 0x0002,
|
||||
0x368f: 0x0004,
|
||||
// Block 0xdb, offset 0x36c0
|
||||
0x36c0: 0x0003, 0x36c1: 0x0003, 0x36c2: 0x0003, 0x36c3: 0x0003, 0x36c4: 0x0003, 0x36c5: 0x0003,
|
||||
0x36c6: 0x0003, 0x36c7: 0x0003, 0x36c8: 0x0003, 0x36c9: 0x0003, 0x36ca: 0x0003,
|
||||
@@ -1300,228 +1302,246 @@ var stringWidthValues = [15744]uint8{
|
||||
// Block 0xdd, offset 0x3740
|
||||
0x3740: 0x0003, 0x3741: 0x0003, 0x3742: 0x0003, 0x3743: 0x0003, 0x3744: 0x0003, 0x3745: 0x0003,
|
||||
0x3746: 0x0003, 0x3747: 0x0003, 0x3748: 0x0003, 0x3749: 0x0003, 0x374a: 0x0003, 0x374b: 0x0003,
|
||||
0x374c: 0x0003, 0x374d: 0x0003, 0x374e: 0x0002, 0x374f: 0x0003, 0x3750: 0x0003, 0x3751: 0x0002,
|
||||
0x3752: 0x0002, 0x3753: 0x0002, 0x3754: 0x0002, 0x3755: 0x0002, 0x3756: 0x0002, 0x3757: 0x0002,
|
||||
0x3758: 0x0002, 0x3759: 0x0002, 0x375a: 0x0002, 0x375b: 0x0003, 0x375c: 0x0003, 0x375d: 0x0003,
|
||||
0x374c: 0x0003, 0x374d: 0x0003, 0x374e: 0x0004, 0x374f: 0x0003, 0x3750: 0x0003, 0x3751: 0x0004,
|
||||
0x3752: 0x0004, 0x3753: 0x0004, 0x3754: 0x0004, 0x3755: 0x0004, 0x3756: 0x0004, 0x3757: 0x0004,
|
||||
0x3758: 0x0004, 0x3759: 0x0004, 0x375a: 0x0004, 0x375b: 0x0003, 0x375c: 0x0003, 0x375d: 0x0003,
|
||||
0x375e: 0x0003, 0x375f: 0x0003, 0x3760: 0x0003, 0x3761: 0x0003, 0x3762: 0x0003, 0x3763: 0x0003,
|
||||
0x3764: 0x0003, 0x3765: 0x0003, 0x3766: 0x0003, 0x3767: 0x0003, 0x3768: 0x0003, 0x3769: 0x0003,
|
||||
0x376a: 0x0003, 0x376b: 0x0003, 0x376c: 0x0003,
|
||||
// Block 0xde, offset 0x3780
|
||||
0x37a6: 0x0002, 0x37a7: 0x0002, 0x37a8: 0x0002, 0x37a9: 0x0002,
|
||||
0x37aa: 0x0002, 0x37ab: 0x0002, 0x37ac: 0x0002, 0x37ad: 0x0002, 0x37ae: 0x0002, 0x37af: 0x0002,
|
||||
0x37b0: 0x0002, 0x37b1: 0x0002, 0x37b2: 0x0002, 0x37b3: 0x0002, 0x37b4: 0x0002, 0x37b5: 0x0002,
|
||||
0x37b6: 0x0002, 0x37b7: 0x0002, 0x37b8: 0x0002, 0x37b9: 0x0002, 0x37ba: 0x0002, 0x37bb: 0x0002,
|
||||
0x37bc: 0x0002, 0x37bd: 0x0002, 0x37be: 0x0002, 0x37bf: 0x0002,
|
||||
0x3780: 0x0002, 0x3781: 0x0004, 0x3782: 0x0002,
|
||||
0x3790: 0x0002, 0x3791: 0x0002,
|
||||
0x3792: 0x0002, 0x3793: 0x0002, 0x3794: 0x0002, 0x3795: 0x0002, 0x3796: 0x0002, 0x3797: 0x0002,
|
||||
0x3798: 0x0002, 0x3799: 0x0002, 0x379a: 0x0004, 0x379b: 0x0002, 0x379c: 0x0002, 0x379d: 0x0002,
|
||||
0x379e: 0x0002, 0x379f: 0x0002, 0x37a0: 0x0002, 0x37a1: 0x0002, 0x37a2: 0x0002, 0x37a3: 0x0002,
|
||||
0x37a4: 0x0002, 0x37a5: 0x0002, 0x37a6: 0x0002, 0x37a7: 0x0002, 0x37a8: 0x0002, 0x37a9: 0x0002,
|
||||
0x37aa: 0x0002, 0x37ab: 0x0002, 0x37ac: 0x0002, 0x37ad: 0x0002, 0x37ae: 0x0002, 0x37af: 0x0004,
|
||||
0x37b0: 0x0002, 0x37b1: 0x0002, 0x37b2: 0x0004, 0x37b3: 0x0004, 0x37b4: 0x0004, 0x37b5: 0x0004,
|
||||
0x37b6: 0x0004, 0x37b7: 0x0002, 0x37b8: 0x0004, 0x37b9: 0x0004, 0x37ba: 0x0004, 0x37bb: 0x0002,
|
||||
// Block 0xdf, offset 0x37c0
|
||||
0x37c0: 0x0002, 0x37c1: 0x0002, 0x37c2: 0x0002,
|
||||
0x37d0: 0x0002, 0x37d1: 0x0002,
|
||||
0x37d2: 0x0002, 0x37d3: 0x0002, 0x37d4: 0x0002, 0x37d5: 0x0002, 0x37d6: 0x0002, 0x37d7: 0x0002,
|
||||
0x37d8: 0x0002, 0x37d9: 0x0002, 0x37da: 0x0002, 0x37db: 0x0002, 0x37dc: 0x0002, 0x37dd: 0x0002,
|
||||
0x37de: 0x0002, 0x37df: 0x0002, 0x37e0: 0x0002, 0x37e1: 0x0002, 0x37e2: 0x0002, 0x37e3: 0x0002,
|
||||
0x37e4: 0x0002, 0x37e5: 0x0002, 0x37e6: 0x0002, 0x37e7: 0x0002, 0x37e8: 0x0002, 0x37e9: 0x0002,
|
||||
0x37ea: 0x0002, 0x37eb: 0x0002, 0x37ec: 0x0002, 0x37ed: 0x0002, 0x37ee: 0x0002, 0x37ef: 0x0002,
|
||||
0x37f0: 0x0002, 0x37f1: 0x0002, 0x37f2: 0x0002, 0x37f3: 0x0002, 0x37f4: 0x0002, 0x37f5: 0x0002,
|
||||
0x37f6: 0x0002, 0x37f7: 0x0002, 0x37f8: 0x0002, 0x37f9: 0x0002, 0x37fa: 0x0002, 0x37fb: 0x0002,
|
||||
0x37c0: 0x0002, 0x37c1: 0x0002, 0x37c2: 0x0002, 0x37c3: 0x0002, 0x37c4: 0x0002, 0x37c5: 0x0002,
|
||||
0x37c6: 0x0002, 0x37c7: 0x0002, 0x37c8: 0x0002,
|
||||
0x37d0: 0x0004, 0x37d1: 0x0004,
|
||||
0x37e0: 0x0002, 0x37e1: 0x0002, 0x37e2: 0x0002, 0x37e3: 0x0002,
|
||||
0x37e4: 0x0002, 0x37e5: 0x0002,
|
||||
// Block 0xe0, offset 0x3800
|
||||
0x3800: 0x0002, 0x3801: 0x0002, 0x3802: 0x0002, 0x3803: 0x0002, 0x3804: 0x0002, 0x3805: 0x0002,
|
||||
0x3806: 0x0002, 0x3807: 0x0002, 0x3808: 0x0002,
|
||||
0x3810: 0x0002, 0x3811: 0x0002,
|
||||
0x3820: 0x0002, 0x3821: 0x0002, 0x3822: 0x0002, 0x3823: 0x0002,
|
||||
0x3824: 0x0002, 0x3825: 0x0002,
|
||||
0x3800: 0x0004, 0x3801: 0x0004, 0x3802: 0x0004, 0x3803: 0x0004, 0x3804: 0x0004, 0x3805: 0x0004,
|
||||
0x3806: 0x0004, 0x3807: 0x0004, 0x3808: 0x0004, 0x3809: 0x0004, 0x380a: 0x0004, 0x380b: 0x0004,
|
||||
0x380c: 0x0004, 0x380d: 0x0004, 0x380e: 0x0004, 0x380f: 0x0004, 0x3810: 0x0004, 0x3811: 0x0004,
|
||||
0x3812: 0x0004, 0x3813: 0x0004, 0x3814: 0x0004, 0x3815: 0x0004, 0x3816: 0x0004, 0x3817: 0x0004,
|
||||
0x3818: 0x0004, 0x3819: 0x0004, 0x381a: 0x0004, 0x381b: 0x0004, 0x381c: 0x0004, 0x381d: 0x0004,
|
||||
0x381e: 0x0004, 0x381f: 0x0004, 0x3820: 0x0004,
|
||||
0x382d: 0x0004, 0x382e: 0x0004, 0x382f: 0x0004,
|
||||
0x3830: 0x0004, 0x3831: 0x0004, 0x3832: 0x0004, 0x3833: 0x0004, 0x3834: 0x0004, 0x3835: 0x0004,
|
||||
0x3837: 0x0004, 0x3838: 0x0004, 0x3839: 0x0004, 0x383a: 0x0004, 0x383b: 0x0004,
|
||||
0x383c: 0x0004, 0x383d: 0x0004, 0x383e: 0x0004, 0x383f: 0x0004,
|
||||
// Block 0xe1, offset 0x3840
|
||||
0x3840: 0x0002, 0x3841: 0x0002, 0x3842: 0x0002, 0x3843: 0x0002, 0x3844: 0x0002, 0x3845: 0x0002,
|
||||
0x3846: 0x0002, 0x3847: 0x0002, 0x3848: 0x0002, 0x3849: 0x0002, 0x384a: 0x0002, 0x384b: 0x0002,
|
||||
0x384c: 0x0002, 0x384d: 0x0002, 0x384e: 0x0002, 0x384f: 0x0002, 0x3850: 0x0002, 0x3851: 0x0002,
|
||||
0x3852: 0x0002, 0x3853: 0x0002, 0x3854: 0x0002, 0x3855: 0x0002, 0x3856: 0x0002, 0x3857: 0x0002,
|
||||
0x3858: 0x0002, 0x3859: 0x0002, 0x385a: 0x0002, 0x385b: 0x0002, 0x385c: 0x0002, 0x385d: 0x0002,
|
||||
0x385e: 0x0002, 0x385f: 0x0002, 0x3860: 0x0002,
|
||||
0x386d: 0x0002, 0x386e: 0x0002, 0x386f: 0x0002,
|
||||
0x3870: 0x0002, 0x3871: 0x0002, 0x3872: 0x0002, 0x3873: 0x0002, 0x3874: 0x0002, 0x3875: 0x0002,
|
||||
0x3877: 0x0002, 0x3878: 0x0002, 0x3879: 0x0002, 0x387a: 0x0002, 0x387b: 0x0002,
|
||||
0x387c: 0x0002, 0x387d: 0x0002, 0x387e: 0x0002, 0x387f: 0x0002,
|
||||
0x3840: 0x0004, 0x3841: 0x0004, 0x3842: 0x0004, 0x3843: 0x0004, 0x3844: 0x0004, 0x3845: 0x0004,
|
||||
0x3846: 0x0004, 0x3847: 0x0004, 0x3848: 0x0004, 0x3849: 0x0004, 0x384a: 0x0004, 0x384b: 0x0004,
|
||||
0x384c: 0x0004, 0x384d: 0x0004, 0x384e: 0x0004, 0x384f: 0x0004, 0x3850: 0x0004, 0x3851: 0x0004,
|
||||
0x3852: 0x0004, 0x3853: 0x0004, 0x3854: 0x0004, 0x3855: 0x0004, 0x3856: 0x0004, 0x3857: 0x0004,
|
||||
0x3858: 0x0004, 0x3859: 0x0004, 0x385a: 0x0004, 0x385b: 0x0004, 0x385c: 0x0004, 0x385d: 0x0004,
|
||||
0x385e: 0x0004, 0x385f: 0x0004, 0x3860: 0x0004, 0x3861: 0x0004, 0x3862: 0x0004, 0x3863: 0x0004,
|
||||
0x3864: 0x0004, 0x3865: 0x0004, 0x3866: 0x0004, 0x3867: 0x0004, 0x3868: 0x0004, 0x3869: 0x0004,
|
||||
0x386a: 0x0004, 0x386b: 0x0004, 0x386c: 0x0004, 0x386d: 0x0004, 0x386e: 0x0004, 0x386f: 0x0004,
|
||||
0x3870: 0x0004, 0x3871: 0x0004, 0x3872: 0x0004, 0x3873: 0x0004, 0x3874: 0x0004, 0x3875: 0x0004,
|
||||
0x3876: 0x0004, 0x3877: 0x0004, 0x3878: 0x0004, 0x3879: 0x0004, 0x387a: 0x0004, 0x387b: 0x0004,
|
||||
0x387c: 0x0004, 0x387e: 0x0004, 0x387f: 0x0004,
|
||||
// Block 0xe2, offset 0x3880
|
||||
0x3880: 0x0002, 0x3881: 0x0002, 0x3882: 0x0002, 0x3883: 0x0002, 0x3884: 0x0002, 0x3885: 0x0002,
|
||||
0x3886: 0x0002, 0x3887: 0x0002, 0x3888: 0x0002, 0x3889: 0x0002, 0x388a: 0x0002, 0x388b: 0x0002,
|
||||
0x388c: 0x0002, 0x388d: 0x0002, 0x388e: 0x0002, 0x388f: 0x0002, 0x3890: 0x0002, 0x3891: 0x0002,
|
||||
0x3892: 0x0002, 0x3893: 0x0002, 0x3894: 0x0002, 0x3895: 0x0002, 0x3896: 0x0002, 0x3897: 0x0002,
|
||||
0x3898: 0x0002, 0x3899: 0x0002, 0x389a: 0x0002, 0x389b: 0x0002, 0x389c: 0x0002, 0x389d: 0x0002,
|
||||
0x389e: 0x0002, 0x389f: 0x0002, 0x38a0: 0x0002, 0x38a1: 0x0002, 0x38a2: 0x0002, 0x38a3: 0x0002,
|
||||
0x38a4: 0x0002, 0x38a5: 0x0002, 0x38a6: 0x0002, 0x38a7: 0x0002, 0x38a8: 0x0002, 0x38a9: 0x0002,
|
||||
0x38aa: 0x0002, 0x38ab: 0x0002, 0x38ac: 0x0002, 0x38ad: 0x0002, 0x38ae: 0x0002, 0x38af: 0x0002,
|
||||
0x38b0: 0x0002, 0x38b1: 0x0002, 0x38b2: 0x0002, 0x38b3: 0x0002, 0x38b4: 0x0002, 0x38b5: 0x0002,
|
||||
0x38b6: 0x0002, 0x38b7: 0x0002, 0x38b8: 0x0002, 0x38b9: 0x0002, 0x38ba: 0x0002, 0x38bb: 0x0002,
|
||||
0x38bc: 0x0002, 0x38be: 0x0002, 0x38bf: 0x0002,
|
||||
0x3880: 0x0004, 0x3881: 0x0004, 0x3882: 0x0004, 0x3883: 0x0004, 0x3884: 0x0004, 0x3885: 0x0004,
|
||||
0x3886: 0x0004, 0x3887: 0x0004, 0x3888: 0x0004, 0x3889: 0x0004, 0x388a: 0x0004, 0x388b: 0x0004,
|
||||
0x388c: 0x0004, 0x388d: 0x0004, 0x388e: 0x0004, 0x388f: 0x0004, 0x3890: 0x0004, 0x3891: 0x0004,
|
||||
0x3892: 0x0004, 0x3893: 0x0004,
|
||||
0x38a0: 0x0004, 0x38a1: 0x0004, 0x38a2: 0x0004, 0x38a3: 0x0004,
|
||||
0x38a4: 0x0004, 0x38a5: 0x0004, 0x38a6: 0x0004, 0x38a7: 0x0004, 0x38a8: 0x0004, 0x38a9: 0x0004,
|
||||
0x38aa: 0x0004, 0x38ab: 0x0004, 0x38ac: 0x0004, 0x38ad: 0x0004, 0x38ae: 0x0004, 0x38af: 0x0004,
|
||||
0x38b0: 0x0004, 0x38b1: 0x0004, 0x38b2: 0x0004, 0x38b3: 0x0004, 0x38b4: 0x0004, 0x38b5: 0x0004,
|
||||
0x38b6: 0x0004, 0x38b7: 0x0004, 0x38b8: 0x0004, 0x38b9: 0x0004, 0x38ba: 0x0004, 0x38bb: 0x0004,
|
||||
0x38bc: 0x0004, 0x38bd: 0x0004, 0x38be: 0x0004, 0x38bf: 0x0004,
|
||||
// Block 0xe3, offset 0x38c0
|
||||
0x38c0: 0x0002, 0x38c1: 0x0002, 0x38c2: 0x0002, 0x38c3: 0x0002, 0x38c4: 0x0002, 0x38c5: 0x0002,
|
||||
0x38c6: 0x0002, 0x38c7: 0x0002, 0x38c8: 0x0002, 0x38c9: 0x0002, 0x38ca: 0x0002, 0x38cb: 0x0002,
|
||||
0x38cc: 0x0002, 0x38cd: 0x0002, 0x38ce: 0x0002, 0x38cf: 0x0002, 0x38d0: 0x0002, 0x38d1: 0x0002,
|
||||
0x38d2: 0x0002, 0x38d3: 0x0002,
|
||||
0x38e0: 0x0002, 0x38e1: 0x0002, 0x38e2: 0x0002, 0x38e3: 0x0002,
|
||||
0x38e4: 0x0002, 0x38e5: 0x0002, 0x38e6: 0x0002, 0x38e7: 0x0002, 0x38e8: 0x0002, 0x38e9: 0x0002,
|
||||
0x38ea: 0x0002, 0x38eb: 0x0002, 0x38ec: 0x0002, 0x38ed: 0x0002, 0x38ee: 0x0002, 0x38ef: 0x0002,
|
||||
0x38f0: 0x0002, 0x38f1: 0x0002, 0x38f2: 0x0002, 0x38f3: 0x0002, 0x38f4: 0x0002, 0x38f5: 0x0002,
|
||||
0x38f6: 0x0002, 0x38f7: 0x0002, 0x38f8: 0x0002, 0x38f9: 0x0002, 0x38fa: 0x0002, 0x38fb: 0x0002,
|
||||
0x38c0: 0x0004, 0x38c1: 0x0004, 0x38c2: 0x0004, 0x38c3: 0x0004, 0x38c4: 0x0004, 0x38c5: 0x0004,
|
||||
0x38c6: 0x0004, 0x38c7: 0x0004, 0x38c8: 0x0004, 0x38c9: 0x0004, 0x38ca: 0x0004,
|
||||
0x38cf: 0x0004, 0x38d0: 0x0004, 0x38d1: 0x0004,
|
||||
0x38d2: 0x0004, 0x38d3: 0x0004,
|
||||
0x38e0: 0x0004, 0x38e1: 0x0004, 0x38e2: 0x0004, 0x38e3: 0x0004,
|
||||
0x38e4: 0x0004, 0x38e5: 0x0004, 0x38e6: 0x0004, 0x38e7: 0x0004, 0x38e8: 0x0004, 0x38e9: 0x0004,
|
||||
0x38ea: 0x0004, 0x38eb: 0x0004, 0x38ec: 0x0004, 0x38ed: 0x0004, 0x38ee: 0x0004, 0x38ef: 0x0004,
|
||||
0x38f0: 0x0004, 0x38f4: 0x0004,
|
||||
0x38f8: 0x0004, 0x38f9: 0x0004, 0x38fa: 0x0004, 0x38fb: 0x0002,
|
||||
0x38fc: 0x0002, 0x38fd: 0x0002, 0x38fe: 0x0002, 0x38ff: 0x0002,
|
||||
// Block 0xe4, offset 0x3900
|
||||
0x3900: 0x0002, 0x3901: 0x0002, 0x3902: 0x0002, 0x3903: 0x0002, 0x3904: 0x0002, 0x3905: 0x0002,
|
||||
0x3906: 0x0002, 0x3907: 0x0002, 0x3908: 0x0002, 0x3909: 0x0002, 0x390a: 0x0002,
|
||||
0x390f: 0x0002, 0x3910: 0x0002, 0x3911: 0x0002,
|
||||
0x3912: 0x0002, 0x3913: 0x0002,
|
||||
0x3920: 0x0002, 0x3921: 0x0002, 0x3922: 0x0002, 0x3923: 0x0002,
|
||||
0x3924: 0x0002, 0x3925: 0x0002, 0x3926: 0x0002, 0x3927: 0x0002, 0x3928: 0x0002, 0x3929: 0x0002,
|
||||
0x392a: 0x0002, 0x392b: 0x0002, 0x392c: 0x0002, 0x392d: 0x0002, 0x392e: 0x0002, 0x392f: 0x0002,
|
||||
0x3930: 0x0002, 0x3934: 0x0002,
|
||||
0x3938: 0x0002, 0x3939: 0x0002, 0x393a: 0x0002, 0x393b: 0x0002,
|
||||
0x393c: 0x0002, 0x393d: 0x0002, 0x393e: 0x0002, 0x393f: 0x0002,
|
||||
0x3900: 0x0004, 0x3901: 0x0004, 0x3902: 0x0004, 0x3903: 0x0004, 0x3904: 0x0004, 0x3905: 0x0004,
|
||||
0x3906: 0x0004, 0x3907: 0x0004, 0x3908: 0x0004, 0x3909: 0x0004, 0x390a: 0x0004, 0x390b: 0x0004,
|
||||
0x390c: 0x0004, 0x390d: 0x0004, 0x390e: 0x0004, 0x390f: 0x0004, 0x3910: 0x0004, 0x3911: 0x0004,
|
||||
0x3912: 0x0004, 0x3913: 0x0004, 0x3914: 0x0004, 0x3915: 0x0004, 0x3916: 0x0004, 0x3917: 0x0004,
|
||||
0x3918: 0x0004, 0x3919: 0x0004, 0x391a: 0x0004, 0x391b: 0x0004, 0x391c: 0x0004, 0x391d: 0x0004,
|
||||
0x391e: 0x0004, 0x391f: 0x0004, 0x3920: 0x0004, 0x3921: 0x0004, 0x3922: 0x0004, 0x3923: 0x0004,
|
||||
0x3924: 0x0004, 0x3925: 0x0004, 0x3926: 0x0004, 0x3927: 0x0004, 0x3928: 0x0004, 0x3929: 0x0004,
|
||||
0x392a: 0x0004, 0x392b: 0x0004, 0x392c: 0x0004, 0x392d: 0x0004, 0x392e: 0x0004, 0x392f: 0x0004,
|
||||
0x3930: 0x0004, 0x3931: 0x0004, 0x3932: 0x0004, 0x3933: 0x0004, 0x3934: 0x0004, 0x3935: 0x0004,
|
||||
0x3936: 0x0004, 0x3937: 0x0004, 0x3938: 0x0004, 0x3939: 0x0004, 0x393a: 0x0004, 0x393b: 0x0004,
|
||||
0x393c: 0x0004, 0x393d: 0x0004, 0x393e: 0x0004,
|
||||
// Block 0xe5, offset 0x3940
|
||||
0x3940: 0x0002, 0x3941: 0x0002, 0x3942: 0x0002, 0x3943: 0x0002, 0x3944: 0x0002, 0x3945: 0x0002,
|
||||
0x3946: 0x0002, 0x3947: 0x0002, 0x3948: 0x0002, 0x3949: 0x0002, 0x394a: 0x0002, 0x394b: 0x0002,
|
||||
0x394c: 0x0002, 0x394d: 0x0002, 0x394e: 0x0002, 0x394f: 0x0002, 0x3950: 0x0002, 0x3951: 0x0002,
|
||||
0x3952: 0x0002, 0x3953: 0x0002, 0x3954: 0x0002, 0x3955: 0x0002, 0x3956: 0x0002, 0x3957: 0x0002,
|
||||
0x3958: 0x0002, 0x3959: 0x0002, 0x395a: 0x0002, 0x395b: 0x0002, 0x395c: 0x0002, 0x395d: 0x0002,
|
||||
0x395e: 0x0002, 0x395f: 0x0002, 0x3960: 0x0002, 0x3961: 0x0002, 0x3962: 0x0002, 0x3963: 0x0002,
|
||||
0x3964: 0x0002, 0x3965: 0x0002, 0x3966: 0x0002, 0x3967: 0x0002, 0x3968: 0x0002, 0x3969: 0x0002,
|
||||
0x396a: 0x0002, 0x396b: 0x0002, 0x396c: 0x0002, 0x396d: 0x0002, 0x396e: 0x0002, 0x396f: 0x0002,
|
||||
0x3970: 0x0002, 0x3971: 0x0002, 0x3972: 0x0002, 0x3973: 0x0002, 0x3974: 0x0002, 0x3975: 0x0002,
|
||||
0x3976: 0x0002, 0x3977: 0x0002, 0x3978: 0x0002, 0x3979: 0x0002, 0x397a: 0x0002, 0x397b: 0x0002,
|
||||
0x397c: 0x0002, 0x397d: 0x0002, 0x397e: 0x0002,
|
||||
0x3940: 0x0004, 0x3942: 0x0004, 0x3943: 0x0004, 0x3944: 0x0004, 0x3945: 0x0004,
|
||||
0x3946: 0x0004, 0x3947: 0x0004, 0x3948: 0x0004, 0x3949: 0x0004, 0x394a: 0x0004, 0x394b: 0x0004,
|
||||
0x394c: 0x0004, 0x394d: 0x0004, 0x394e: 0x0004, 0x394f: 0x0004, 0x3950: 0x0004, 0x3951: 0x0004,
|
||||
0x3952: 0x0004, 0x3953: 0x0004, 0x3954: 0x0004, 0x3955: 0x0004, 0x3956: 0x0004, 0x3957: 0x0004,
|
||||
0x3958: 0x0004, 0x3959: 0x0004, 0x395a: 0x0004, 0x395b: 0x0004, 0x395c: 0x0004, 0x395d: 0x0004,
|
||||
0x395e: 0x0004, 0x395f: 0x0004, 0x3960: 0x0004, 0x3961: 0x0004, 0x3962: 0x0004, 0x3963: 0x0004,
|
||||
0x3964: 0x0004, 0x3965: 0x0004, 0x3966: 0x0004, 0x3967: 0x0004, 0x3968: 0x0004, 0x3969: 0x0004,
|
||||
0x396a: 0x0004, 0x396b: 0x0004, 0x396c: 0x0004, 0x396d: 0x0004, 0x396e: 0x0004, 0x396f: 0x0004,
|
||||
0x3970: 0x0004, 0x3971: 0x0004, 0x3972: 0x0004, 0x3973: 0x0004, 0x3974: 0x0004, 0x3975: 0x0004,
|
||||
0x3976: 0x0004, 0x3977: 0x0004, 0x3978: 0x0004, 0x3979: 0x0004, 0x397a: 0x0004, 0x397b: 0x0004,
|
||||
0x397c: 0x0004, 0x397d: 0x0004, 0x397e: 0x0004, 0x397f: 0x0004,
|
||||
// Block 0xe6, offset 0x3980
|
||||
0x3980: 0x0002, 0x3982: 0x0002, 0x3983: 0x0002, 0x3984: 0x0002, 0x3985: 0x0002,
|
||||
0x3986: 0x0002, 0x3987: 0x0002, 0x3988: 0x0002, 0x3989: 0x0002, 0x398a: 0x0002, 0x398b: 0x0002,
|
||||
0x398c: 0x0002, 0x398d: 0x0002, 0x398e: 0x0002, 0x398f: 0x0002, 0x3990: 0x0002, 0x3991: 0x0002,
|
||||
0x3992: 0x0002, 0x3993: 0x0002, 0x3994: 0x0002, 0x3995: 0x0002, 0x3996: 0x0002, 0x3997: 0x0002,
|
||||
0x3998: 0x0002, 0x3999: 0x0002, 0x399a: 0x0002, 0x399b: 0x0002, 0x399c: 0x0002, 0x399d: 0x0002,
|
||||
0x399e: 0x0002, 0x399f: 0x0002, 0x39a0: 0x0002, 0x39a1: 0x0002, 0x39a2: 0x0002, 0x39a3: 0x0002,
|
||||
0x39a4: 0x0002, 0x39a5: 0x0002, 0x39a6: 0x0002, 0x39a7: 0x0002, 0x39a8: 0x0002, 0x39a9: 0x0002,
|
||||
0x39aa: 0x0002, 0x39ab: 0x0002, 0x39ac: 0x0002, 0x39ad: 0x0002, 0x39ae: 0x0002, 0x39af: 0x0002,
|
||||
0x39b0: 0x0002, 0x39b1: 0x0002, 0x39b2: 0x0002, 0x39b3: 0x0002, 0x39b4: 0x0002, 0x39b5: 0x0002,
|
||||
0x39b6: 0x0002, 0x39b7: 0x0002, 0x39b8: 0x0002, 0x39b9: 0x0002, 0x39ba: 0x0002, 0x39bb: 0x0002,
|
||||
0x39bc: 0x0002, 0x39bd: 0x0002, 0x39be: 0x0002, 0x39bf: 0x0002,
|
||||
0x3980: 0x0004, 0x3981: 0x0004, 0x3982: 0x0004, 0x3983: 0x0004, 0x3984: 0x0004, 0x3985: 0x0004,
|
||||
0x3986: 0x0004, 0x3987: 0x0004, 0x3988: 0x0004, 0x3989: 0x0004, 0x398a: 0x0004, 0x398b: 0x0004,
|
||||
0x398c: 0x0004, 0x398d: 0x0004, 0x398e: 0x0004, 0x398f: 0x0004, 0x3990: 0x0004, 0x3991: 0x0004,
|
||||
0x3992: 0x0004, 0x3993: 0x0004, 0x3994: 0x0004, 0x3995: 0x0004, 0x3996: 0x0004, 0x3997: 0x0004,
|
||||
0x3998: 0x0004, 0x3999: 0x0004, 0x399a: 0x0004, 0x399b: 0x0004, 0x399c: 0x0004, 0x399d: 0x0004,
|
||||
0x399e: 0x0004, 0x399f: 0x0004, 0x39a0: 0x0004, 0x39a1: 0x0004, 0x39a2: 0x0004, 0x39a3: 0x0004,
|
||||
0x39a4: 0x0004, 0x39a5: 0x0004, 0x39a6: 0x0004, 0x39a7: 0x0004, 0x39a8: 0x0004, 0x39a9: 0x0004,
|
||||
0x39aa: 0x0004, 0x39ab: 0x0004, 0x39ac: 0x0004, 0x39ad: 0x0004, 0x39ae: 0x0004, 0x39af: 0x0004,
|
||||
0x39b0: 0x0004, 0x39b1: 0x0004, 0x39b2: 0x0004, 0x39b3: 0x0004, 0x39b4: 0x0004, 0x39b5: 0x0004,
|
||||
0x39b6: 0x0004, 0x39b7: 0x0004, 0x39b8: 0x0004, 0x39b9: 0x0004, 0x39ba: 0x0004, 0x39bb: 0x0004,
|
||||
0x39bc: 0x0004, 0x39bd: 0x0004, 0x39be: 0x0004, 0x39bf: 0x0004,
|
||||
// Block 0xe7, offset 0x39c0
|
||||
0x39c0: 0x0002, 0x39c1: 0x0002, 0x39c2: 0x0002, 0x39c3: 0x0002, 0x39c4: 0x0002, 0x39c5: 0x0002,
|
||||
0x39c6: 0x0002, 0x39c7: 0x0002, 0x39c8: 0x0002, 0x39c9: 0x0002, 0x39ca: 0x0002, 0x39cb: 0x0002,
|
||||
0x39cc: 0x0002, 0x39cd: 0x0002, 0x39ce: 0x0002, 0x39cf: 0x0002, 0x39d0: 0x0002, 0x39d1: 0x0002,
|
||||
0x39d2: 0x0002, 0x39d3: 0x0002, 0x39d4: 0x0002, 0x39d5: 0x0002, 0x39d6: 0x0002, 0x39d7: 0x0002,
|
||||
0x39d8: 0x0002, 0x39d9: 0x0002, 0x39da: 0x0002, 0x39db: 0x0002, 0x39dc: 0x0002, 0x39dd: 0x0002,
|
||||
0x39de: 0x0002, 0x39df: 0x0002, 0x39e0: 0x0002, 0x39e1: 0x0002, 0x39e2: 0x0002, 0x39e3: 0x0002,
|
||||
0x39e4: 0x0002, 0x39e5: 0x0002, 0x39e6: 0x0002, 0x39e7: 0x0002, 0x39e8: 0x0002, 0x39e9: 0x0002,
|
||||
0x39ea: 0x0002, 0x39eb: 0x0002, 0x39ec: 0x0002, 0x39ed: 0x0002, 0x39ee: 0x0002, 0x39ef: 0x0002,
|
||||
0x39f0: 0x0002, 0x39f1: 0x0002, 0x39f2: 0x0002, 0x39f3: 0x0002, 0x39f4: 0x0002, 0x39f5: 0x0002,
|
||||
0x39f6: 0x0002, 0x39f7: 0x0002, 0x39f8: 0x0002, 0x39f9: 0x0002, 0x39fa: 0x0002, 0x39fb: 0x0002,
|
||||
0x39fc: 0x0002, 0x39ff: 0x0002,
|
||||
0x39c0: 0x0004, 0x39c1: 0x0004, 0x39c2: 0x0004, 0x39c3: 0x0004, 0x39c4: 0x0004, 0x39c5: 0x0004,
|
||||
0x39c6: 0x0004, 0x39c7: 0x0004, 0x39c8: 0x0004, 0x39c9: 0x0004, 0x39ca: 0x0004, 0x39cb: 0x0004,
|
||||
0x39cc: 0x0004, 0x39cd: 0x0004, 0x39ce: 0x0004, 0x39cf: 0x0004, 0x39d0: 0x0004, 0x39d1: 0x0004,
|
||||
0x39d2: 0x0004, 0x39d3: 0x0004, 0x39d4: 0x0004, 0x39d5: 0x0004, 0x39d6: 0x0004, 0x39d7: 0x0004,
|
||||
0x39d8: 0x0004, 0x39d9: 0x0004, 0x39da: 0x0004, 0x39db: 0x0004, 0x39dc: 0x0004, 0x39dd: 0x0004,
|
||||
0x39de: 0x0004, 0x39df: 0x0004, 0x39e0: 0x0004, 0x39e1: 0x0004, 0x39e2: 0x0004, 0x39e3: 0x0004,
|
||||
0x39e4: 0x0004, 0x39e5: 0x0004, 0x39e6: 0x0004, 0x39e7: 0x0004, 0x39e8: 0x0004, 0x39e9: 0x0004,
|
||||
0x39ea: 0x0004, 0x39eb: 0x0004, 0x39ec: 0x0004, 0x39ed: 0x0004, 0x39ee: 0x0004, 0x39ef: 0x0004,
|
||||
0x39f0: 0x0004, 0x39f1: 0x0004, 0x39f2: 0x0004, 0x39f3: 0x0004, 0x39f4: 0x0004, 0x39f5: 0x0004,
|
||||
0x39f6: 0x0004, 0x39f7: 0x0004, 0x39f8: 0x0004, 0x39f9: 0x0004, 0x39fa: 0x0004, 0x39fb: 0x0004,
|
||||
0x39fc: 0x0004, 0x39ff: 0x0004,
|
||||
// Block 0xe8, offset 0x3a00
|
||||
0x3a00: 0x0002, 0x3a01: 0x0002, 0x3a02: 0x0002, 0x3a03: 0x0002, 0x3a04: 0x0002, 0x3a05: 0x0002,
|
||||
0x3a06: 0x0002, 0x3a07: 0x0002, 0x3a08: 0x0002, 0x3a09: 0x0002, 0x3a0a: 0x0002, 0x3a0b: 0x0002,
|
||||
0x3a0c: 0x0002, 0x3a0d: 0x0002, 0x3a0e: 0x0002, 0x3a0f: 0x0002, 0x3a10: 0x0002, 0x3a11: 0x0002,
|
||||
0x3a12: 0x0002, 0x3a13: 0x0002, 0x3a14: 0x0002, 0x3a15: 0x0002, 0x3a16: 0x0002, 0x3a17: 0x0002,
|
||||
0x3a18: 0x0002, 0x3a19: 0x0002, 0x3a1a: 0x0002, 0x3a1b: 0x0002, 0x3a1c: 0x0002, 0x3a1d: 0x0002,
|
||||
0x3a1e: 0x0002, 0x3a1f: 0x0002, 0x3a20: 0x0002, 0x3a21: 0x0002, 0x3a22: 0x0002, 0x3a23: 0x0002,
|
||||
0x3a24: 0x0002, 0x3a25: 0x0002, 0x3a26: 0x0002, 0x3a27: 0x0002, 0x3a28: 0x0002, 0x3a29: 0x0002,
|
||||
0x3a2a: 0x0002, 0x3a2b: 0x0002, 0x3a2c: 0x0002, 0x3a2d: 0x0002, 0x3a2e: 0x0002, 0x3a2f: 0x0002,
|
||||
0x3a30: 0x0002, 0x3a31: 0x0002, 0x3a32: 0x0002, 0x3a33: 0x0002, 0x3a34: 0x0002, 0x3a35: 0x0002,
|
||||
0x3a36: 0x0002, 0x3a37: 0x0002, 0x3a38: 0x0002, 0x3a39: 0x0002, 0x3a3a: 0x0002, 0x3a3b: 0x0002,
|
||||
0x3a3c: 0x0002, 0x3a3d: 0x0002,
|
||||
0x3a00: 0x0004, 0x3a01: 0x0004, 0x3a02: 0x0004, 0x3a03: 0x0004, 0x3a04: 0x0004, 0x3a05: 0x0004,
|
||||
0x3a06: 0x0004, 0x3a07: 0x0004, 0x3a08: 0x0004, 0x3a09: 0x0004, 0x3a0a: 0x0004, 0x3a0b: 0x0004,
|
||||
0x3a0c: 0x0004, 0x3a0d: 0x0004, 0x3a0e: 0x0004, 0x3a0f: 0x0004, 0x3a10: 0x0004, 0x3a11: 0x0004,
|
||||
0x3a12: 0x0004, 0x3a13: 0x0004, 0x3a14: 0x0004, 0x3a15: 0x0004, 0x3a16: 0x0004, 0x3a17: 0x0004,
|
||||
0x3a18: 0x0004, 0x3a19: 0x0004, 0x3a1a: 0x0004, 0x3a1b: 0x0004, 0x3a1c: 0x0004, 0x3a1d: 0x0004,
|
||||
0x3a1e: 0x0004, 0x3a1f: 0x0004, 0x3a20: 0x0004, 0x3a21: 0x0004, 0x3a22: 0x0004, 0x3a23: 0x0004,
|
||||
0x3a24: 0x0004, 0x3a25: 0x0004, 0x3a26: 0x0004, 0x3a27: 0x0004, 0x3a28: 0x0004, 0x3a29: 0x0004,
|
||||
0x3a2a: 0x0004, 0x3a2b: 0x0004, 0x3a2c: 0x0004, 0x3a2d: 0x0004, 0x3a2e: 0x0004, 0x3a2f: 0x0004,
|
||||
0x3a30: 0x0004, 0x3a31: 0x0004, 0x3a32: 0x0004, 0x3a33: 0x0004, 0x3a34: 0x0004, 0x3a35: 0x0004,
|
||||
0x3a36: 0x0004, 0x3a37: 0x0004, 0x3a38: 0x0004, 0x3a39: 0x0004, 0x3a3a: 0x0004, 0x3a3b: 0x0004,
|
||||
0x3a3c: 0x0004, 0x3a3d: 0x0004,
|
||||
// Block 0xe9, offset 0x3a40
|
||||
0x3a4b: 0x0002,
|
||||
0x3a4c: 0x0002, 0x3a4d: 0x0002, 0x3a4e: 0x0002, 0x3a50: 0x0002, 0x3a51: 0x0002,
|
||||
0x3a52: 0x0002, 0x3a53: 0x0002, 0x3a54: 0x0002, 0x3a55: 0x0002, 0x3a56: 0x0002, 0x3a57: 0x0002,
|
||||
0x3a58: 0x0002, 0x3a59: 0x0002, 0x3a5a: 0x0002, 0x3a5b: 0x0002, 0x3a5c: 0x0002, 0x3a5d: 0x0002,
|
||||
0x3a5e: 0x0002, 0x3a5f: 0x0002, 0x3a60: 0x0002, 0x3a61: 0x0002, 0x3a62: 0x0002, 0x3a63: 0x0002,
|
||||
0x3a64: 0x0002, 0x3a65: 0x0002, 0x3a66: 0x0002, 0x3a67: 0x0002,
|
||||
0x3a7a: 0x0002,
|
||||
0x3a4b: 0x0004,
|
||||
0x3a4c: 0x0004, 0x3a4d: 0x0004, 0x3a4e: 0x0004, 0x3a50: 0x0004, 0x3a51: 0x0004,
|
||||
0x3a52: 0x0004, 0x3a53: 0x0004, 0x3a54: 0x0004, 0x3a55: 0x0004, 0x3a56: 0x0004, 0x3a57: 0x0004,
|
||||
0x3a58: 0x0004, 0x3a59: 0x0004, 0x3a5a: 0x0004, 0x3a5b: 0x0004, 0x3a5c: 0x0004, 0x3a5d: 0x0004,
|
||||
0x3a5e: 0x0004, 0x3a5f: 0x0004, 0x3a60: 0x0004, 0x3a61: 0x0004, 0x3a62: 0x0004, 0x3a63: 0x0004,
|
||||
0x3a64: 0x0004, 0x3a65: 0x0004, 0x3a66: 0x0004, 0x3a67: 0x0004,
|
||||
0x3a7a: 0x0004,
|
||||
// Block 0xea, offset 0x3a80
|
||||
0x3a95: 0x0002, 0x3a96: 0x0002,
|
||||
0x3aa4: 0x0002,
|
||||
0x3a95: 0x0004, 0x3a96: 0x0004,
|
||||
0x3aa4: 0x0004,
|
||||
// Block 0xeb, offset 0x3ac0
|
||||
0x3afb: 0x0002,
|
||||
0x3afc: 0x0002, 0x3afd: 0x0002, 0x3afe: 0x0002, 0x3aff: 0x0002,
|
||||
0x3afb: 0x0004,
|
||||
0x3afc: 0x0004, 0x3afd: 0x0004, 0x3afe: 0x0004, 0x3aff: 0x0004,
|
||||
// Block 0xec, offset 0x3b00
|
||||
0x3b00: 0x0002, 0x3b01: 0x0002, 0x3b02: 0x0002, 0x3b03: 0x0002, 0x3b04: 0x0002, 0x3b05: 0x0002,
|
||||
0x3b06: 0x0002, 0x3b07: 0x0002, 0x3b08: 0x0002, 0x3b09: 0x0002, 0x3b0a: 0x0002, 0x3b0b: 0x0002,
|
||||
0x3b0c: 0x0002, 0x3b0d: 0x0002, 0x3b0e: 0x0002, 0x3b0f: 0x0002,
|
||||
0x3b00: 0x0004, 0x3b01: 0x0004, 0x3b02: 0x0004, 0x3b03: 0x0004, 0x3b04: 0x0004, 0x3b05: 0x0004,
|
||||
0x3b06: 0x0004, 0x3b07: 0x0004, 0x3b08: 0x0004, 0x3b09: 0x0004, 0x3b0a: 0x0004, 0x3b0b: 0x0004,
|
||||
0x3b0c: 0x0004, 0x3b0d: 0x0004, 0x3b0e: 0x0004, 0x3b0f: 0x0004,
|
||||
// Block 0xed, offset 0x3b40
|
||||
0x3b40: 0x0002, 0x3b41: 0x0002, 0x3b42: 0x0002, 0x3b43: 0x0002, 0x3b44: 0x0002, 0x3b45: 0x0002,
|
||||
0x3b4c: 0x0002, 0x3b50: 0x0002, 0x3b51: 0x0002,
|
||||
0x3b52: 0x0002, 0x3b55: 0x0002, 0x3b56: 0x0002, 0x3b57: 0x0002,
|
||||
0x3b5c: 0x0002, 0x3b5d: 0x0002,
|
||||
0x3b5e: 0x0002, 0x3b5f: 0x0002,
|
||||
0x3b6b: 0x0002, 0x3b6c: 0x0002,
|
||||
0x3b74: 0x0002, 0x3b75: 0x0002,
|
||||
0x3b76: 0x0002, 0x3b77: 0x0002, 0x3b78: 0x0002, 0x3b79: 0x0002, 0x3b7a: 0x0002, 0x3b7b: 0x0002,
|
||||
0x3b7c: 0x0002,
|
||||
0x3b40: 0x0004, 0x3b41: 0x0004, 0x3b42: 0x0004, 0x3b43: 0x0004, 0x3b44: 0x0004, 0x3b45: 0x0004,
|
||||
0x3b4c: 0x0004, 0x3b50: 0x0004, 0x3b51: 0x0004,
|
||||
0x3b52: 0x0004, 0x3b55: 0x0004, 0x3b56: 0x0004, 0x3b57: 0x0004,
|
||||
0x3b5c: 0x0004, 0x3b5d: 0x0004,
|
||||
0x3b5e: 0x0004, 0x3b5f: 0x0004,
|
||||
0x3b6b: 0x0004, 0x3b6c: 0x0004,
|
||||
0x3b74: 0x0004, 0x3b75: 0x0004,
|
||||
0x3b76: 0x0004, 0x3b77: 0x0004, 0x3b78: 0x0004, 0x3b79: 0x0004, 0x3b7a: 0x0004, 0x3b7b: 0x0004,
|
||||
0x3b7c: 0x0004,
|
||||
// Block 0xee, offset 0x3b80
|
||||
0x3ba0: 0x0002, 0x3ba1: 0x0002, 0x3ba2: 0x0002, 0x3ba3: 0x0002,
|
||||
0x3ba4: 0x0002, 0x3ba5: 0x0002, 0x3ba6: 0x0002, 0x3ba7: 0x0002, 0x3ba8: 0x0002, 0x3ba9: 0x0002,
|
||||
0x3baa: 0x0002, 0x3bab: 0x0002,
|
||||
0x3bb0: 0x0002,
|
||||
0x3ba0: 0x0004, 0x3ba1: 0x0004, 0x3ba2: 0x0004, 0x3ba3: 0x0004,
|
||||
0x3ba4: 0x0004, 0x3ba5: 0x0004, 0x3ba6: 0x0004, 0x3ba7: 0x0004, 0x3ba8: 0x0004, 0x3ba9: 0x0004,
|
||||
0x3baa: 0x0004, 0x3bab: 0x0004,
|
||||
0x3bb0: 0x0004,
|
||||
// Block 0xef, offset 0x3bc0
|
||||
0x3bcc: 0x0002, 0x3bcd: 0x0002, 0x3bce: 0x0002, 0x3bcf: 0x0002, 0x3bd0: 0x0002, 0x3bd1: 0x0002,
|
||||
0x3bd2: 0x0002, 0x3bd3: 0x0002, 0x3bd4: 0x0002, 0x3bd5: 0x0002, 0x3bd6: 0x0002, 0x3bd7: 0x0002,
|
||||
0x3bd8: 0x0002, 0x3bd9: 0x0002, 0x3bda: 0x0002, 0x3bdb: 0x0002, 0x3bdc: 0x0002, 0x3bdd: 0x0002,
|
||||
0x3bde: 0x0002, 0x3bdf: 0x0002, 0x3be0: 0x0002, 0x3be1: 0x0002, 0x3be2: 0x0002, 0x3be3: 0x0002,
|
||||
0x3be4: 0x0002, 0x3be5: 0x0002, 0x3be6: 0x0002, 0x3be7: 0x0002, 0x3be8: 0x0002, 0x3be9: 0x0002,
|
||||
0x3bea: 0x0002, 0x3beb: 0x0002, 0x3bec: 0x0002, 0x3bed: 0x0002, 0x3bee: 0x0002, 0x3bef: 0x0002,
|
||||
0x3bf0: 0x0002, 0x3bf1: 0x0002, 0x3bf2: 0x0002, 0x3bf3: 0x0002, 0x3bf4: 0x0002, 0x3bf5: 0x0002,
|
||||
0x3bf6: 0x0002, 0x3bf7: 0x0002, 0x3bf8: 0x0002, 0x3bf9: 0x0002, 0x3bfa: 0x0002,
|
||||
0x3bfc: 0x0002, 0x3bfd: 0x0002, 0x3bfe: 0x0002, 0x3bff: 0x0002,
|
||||
0x3bcc: 0x0004, 0x3bcd: 0x0004, 0x3bce: 0x0004, 0x3bcf: 0x0004, 0x3bd0: 0x0004, 0x3bd1: 0x0004,
|
||||
0x3bd2: 0x0004, 0x3bd3: 0x0004, 0x3bd4: 0x0004, 0x3bd5: 0x0004, 0x3bd6: 0x0004, 0x3bd7: 0x0004,
|
||||
0x3bd8: 0x0004, 0x3bd9: 0x0004, 0x3bda: 0x0004, 0x3bdb: 0x0004, 0x3bdc: 0x0004, 0x3bdd: 0x0004,
|
||||
0x3bde: 0x0004, 0x3bdf: 0x0004, 0x3be0: 0x0004, 0x3be1: 0x0004, 0x3be2: 0x0004, 0x3be3: 0x0004,
|
||||
0x3be4: 0x0004, 0x3be5: 0x0004, 0x3be6: 0x0004, 0x3be7: 0x0004, 0x3be8: 0x0004, 0x3be9: 0x0004,
|
||||
0x3bea: 0x0004, 0x3beb: 0x0004, 0x3bec: 0x0004, 0x3bed: 0x0004, 0x3bee: 0x0004, 0x3bef: 0x0004,
|
||||
0x3bf0: 0x0004, 0x3bf1: 0x0004, 0x3bf2: 0x0004, 0x3bf3: 0x0004, 0x3bf4: 0x0004, 0x3bf5: 0x0004,
|
||||
0x3bf6: 0x0004, 0x3bf7: 0x0004, 0x3bf8: 0x0004, 0x3bf9: 0x0004, 0x3bfa: 0x0004,
|
||||
0x3bfc: 0x0004, 0x3bfd: 0x0004, 0x3bfe: 0x0004, 0x3bff: 0x0004,
|
||||
// Block 0xf0, offset 0x3c00
|
||||
0x3c00: 0x0002, 0x3c01: 0x0002, 0x3c02: 0x0002, 0x3c03: 0x0002, 0x3c04: 0x0002, 0x3c05: 0x0002,
|
||||
0x3c07: 0x0002, 0x3c08: 0x0002, 0x3c09: 0x0002, 0x3c0a: 0x0002, 0x3c0b: 0x0002,
|
||||
0x3c0c: 0x0002, 0x3c0d: 0x0002, 0x3c0e: 0x0002, 0x3c0f: 0x0002, 0x3c10: 0x0002, 0x3c11: 0x0002,
|
||||
0x3c12: 0x0002, 0x3c13: 0x0002, 0x3c14: 0x0002, 0x3c15: 0x0002, 0x3c16: 0x0002, 0x3c17: 0x0002,
|
||||
0x3c18: 0x0002, 0x3c19: 0x0002, 0x3c1a: 0x0002, 0x3c1b: 0x0002, 0x3c1c: 0x0002, 0x3c1d: 0x0002,
|
||||
0x3c1e: 0x0002, 0x3c1f: 0x0002, 0x3c20: 0x0002, 0x3c21: 0x0002, 0x3c22: 0x0002, 0x3c23: 0x0002,
|
||||
0x3c24: 0x0002, 0x3c25: 0x0002, 0x3c26: 0x0002, 0x3c27: 0x0002, 0x3c28: 0x0002, 0x3c29: 0x0002,
|
||||
0x3c2a: 0x0002, 0x3c2b: 0x0002, 0x3c2c: 0x0002, 0x3c2d: 0x0002, 0x3c2e: 0x0002, 0x3c2f: 0x0002,
|
||||
0x3c30: 0x0002, 0x3c31: 0x0002, 0x3c32: 0x0002, 0x3c33: 0x0002, 0x3c34: 0x0002, 0x3c35: 0x0002,
|
||||
0x3c36: 0x0002, 0x3c37: 0x0002, 0x3c38: 0x0002, 0x3c39: 0x0002, 0x3c3a: 0x0002, 0x3c3b: 0x0002,
|
||||
0x3c3c: 0x0002, 0x3c3d: 0x0002, 0x3c3e: 0x0002, 0x3c3f: 0x0002,
|
||||
0x3c00: 0x0004, 0x3c01: 0x0004, 0x3c02: 0x0004, 0x3c03: 0x0004, 0x3c04: 0x0004, 0x3c05: 0x0004,
|
||||
0x3c07: 0x0004, 0x3c08: 0x0004, 0x3c09: 0x0004, 0x3c0a: 0x0004, 0x3c0b: 0x0004,
|
||||
0x3c0c: 0x0004, 0x3c0d: 0x0004, 0x3c0e: 0x0004, 0x3c0f: 0x0004, 0x3c10: 0x0004, 0x3c11: 0x0004,
|
||||
0x3c12: 0x0004, 0x3c13: 0x0004, 0x3c14: 0x0004, 0x3c15: 0x0004, 0x3c16: 0x0004, 0x3c17: 0x0004,
|
||||
0x3c18: 0x0004, 0x3c19: 0x0004, 0x3c1a: 0x0004, 0x3c1b: 0x0004, 0x3c1c: 0x0004, 0x3c1d: 0x0004,
|
||||
0x3c1e: 0x0004, 0x3c1f: 0x0004, 0x3c20: 0x0004, 0x3c21: 0x0004, 0x3c22: 0x0004, 0x3c23: 0x0004,
|
||||
0x3c24: 0x0004, 0x3c25: 0x0004, 0x3c26: 0x0004, 0x3c27: 0x0004, 0x3c28: 0x0004, 0x3c29: 0x0004,
|
||||
0x3c2a: 0x0004, 0x3c2b: 0x0004, 0x3c2c: 0x0004, 0x3c2d: 0x0004, 0x3c2e: 0x0004, 0x3c2f: 0x0004,
|
||||
0x3c30: 0x0004, 0x3c31: 0x0004, 0x3c32: 0x0004, 0x3c33: 0x0004, 0x3c34: 0x0004, 0x3c35: 0x0004,
|
||||
0x3c36: 0x0004, 0x3c37: 0x0004, 0x3c38: 0x0004, 0x3c39: 0x0004, 0x3c3a: 0x0004, 0x3c3b: 0x0004,
|
||||
0x3c3c: 0x0004, 0x3c3d: 0x0004, 0x3c3e: 0x0004, 0x3c3f: 0x0004,
|
||||
// Block 0xf1, offset 0x3c40
|
||||
0x3c70: 0x0002, 0x3c71: 0x0002, 0x3c72: 0x0002, 0x3c73: 0x0002, 0x3c74: 0x0002, 0x3c75: 0x0002,
|
||||
0x3c76: 0x0002, 0x3c77: 0x0002, 0x3c78: 0x0002, 0x3c79: 0x0002, 0x3c7a: 0x0002, 0x3c7b: 0x0002,
|
||||
0x3c7c: 0x0002,
|
||||
0x3c70: 0x0004, 0x3c71: 0x0004, 0x3c72: 0x0004, 0x3c73: 0x0004, 0x3c74: 0x0004, 0x3c75: 0x0004,
|
||||
0x3c76: 0x0004, 0x3c77: 0x0004, 0x3c78: 0x0004, 0x3c79: 0x0004, 0x3c7a: 0x0004, 0x3c7b: 0x0004,
|
||||
0x3c7c: 0x0004,
|
||||
// Block 0xf2, offset 0x3c80
|
||||
0x3c80: 0x0002, 0x3c81: 0x0002, 0x3c82: 0x0002, 0x3c83: 0x0002, 0x3c84: 0x0002, 0x3c85: 0x0002,
|
||||
0x3c86: 0x0002, 0x3c87: 0x0002, 0x3c88: 0x0002, 0x3c89: 0x0002,
|
||||
0x3c8f: 0x0002, 0x3c90: 0x0002, 0x3c91: 0x0002,
|
||||
0x3c92: 0x0002, 0x3c93: 0x0002, 0x3c94: 0x0002, 0x3c95: 0x0002, 0x3c96: 0x0002, 0x3c97: 0x0002,
|
||||
0x3c98: 0x0002, 0x3c99: 0x0002, 0x3c9a: 0x0002, 0x3c9b: 0x0002, 0x3c9c: 0x0002, 0x3c9d: 0x0002,
|
||||
0x3c9e: 0x0002, 0x3c9f: 0x0002, 0x3ca0: 0x0002, 0x3ca1: 0x0002, 0x3ca2: 0x0002, 0x3ca3: 0x0002,
|
||||
0x3ca4: 0x0002, 0x3ca5: 0x0002, 0x3ca6: 0x0002, 0x3ca7: 0x0002, 0x3ca8: 0x0002, 0x3ca9: 0x0002,
|
||||
0x3caa: 0x0002, 0x3cab: 0x0002, 0x3cac: 0x0002, 0x3cad: 0x0002, 0x3cae: 0x0002, 0x3caf: 0x0002,
|
||||
0x3cb0: 0x0002, 0x3cb1: 0x0002, 0x3cb2: 0x0002, 0x3cb3: 0x0002, 0x3cb4: 0x0002, 0x3cb5: 0x0002,
|
||||
0x3cb6: 0x0002, 0x3cb7: 0x0002, 0x3cb8: 0x0002, 0x3cb9: 0x0002, 0x3cba: 0x0002, 0x3cbb: 0x0002,
|
||||
0x3cbc: 0x0002, 0x3cbd: 0x0002, 0x3cbe: 0x0002, 0x3cbf: 0x0002,
|
||||
0x3c80: 0x0004, 0x3c81: 0x0004, 0x3c82: 0x0004, 0x3c83: 0x0004, 0x3c84: 0x0004, 0x3c85: 0x0004,
|
||||
0x3c86: 0x0004, 0x3c87: 0x0004, 0x3c88: 0x0004, 0x3c89: 0x0004,
|
||||
0x3c8f: 0x0004, 0x3c90: 0x0004, 0x3c91: 0x0004,
|
||||
0x3c92: 0x0004, 0x3c93: 0x0004, 0x3c94: 0x0004, 0x3c95: 0x0004, 0x3c96: 0x0004, 0x3c97: 0x0004,
|
||||
0x3c98: 0x0004, 0x3c99: 0x0004, 0x3c9a: 0x0004, 0x3c9b: 0x0004, 0x3c9c: 0x0004, 0x3c9d: 0x0004,
|
||||
0x3c9e: 0x0004, 0x3c9f: 0x0004, 0x3ca0: 0x0004, 0x3ca1: 0x0004, 0x3ca2: 0x0004, 0x3ca3: 0x0004,
|
||||
0x3ca4: 0x0004, 0x3ca5: 0x0004, 0x3ca6: 0x0004, 0x3ca7: 0x0004, 0x3ca8: 0x0004, 0x3ca9: 0x0004,
|
||||
0x3caa: 0x0004, 0x3cab: 0x0004, 0x3cac: 0x0004, 0x3cad: 0x0004, 0x3cae: 0x0004, 0x3caf: 0x0004,
|
||||
0x3cb0: 0x0004, 0x3cb1: 0x0004, 0x3cb2: 0x0004, 0x3cb3: 0x0004, 0x3cb4: 0x0004, 0x3cb5: 0x0004,
|
||||
0x3cb6: 0x0004, 0x3cb7: 0x0004, 0x3cb8: 0x0004, 0x3cb9: 0x0004, 0x3cba: 0x0004, 0x3cbb: 0x0004,
|
||||
0x3cbc: 0x0004, 0x3cbd: 0x0004, 0x3cbe: 0x0004, 0x3cbf: 0x0004,
|
||||
// Block 0xf3, offset 0x3cc0
|
||||
0x3cc0: 0x0002, 0x3cc1: 0x0002, 0x3cc2: 0x0002, 0x3cc3: 0x0002, 0x3cc4: 0x0002, 0x3cc5: 0x0002,
|
||||
0x3cc6: 0x0002,
|
||||
0x3cce: 0x0002, 0x3ccf: 0x0002, 0x3cd0: 0x0002, 0x3cd1: 0x0002,
|
||||
0x3cd2: 0x0002, 0x3cd3: 0x0002, 0x3cd4: 0x0002, 0x3cd5: 0x0002, 0x3cd6: 0x0002, 0x3cd7: 0x0002,
|
||||
0x3cd8: 0x0002, 0x3cd9: 0x0002, 0x3cda: 0x0002, 0x3cdb: 0x0002, 0x3cdc: 0x0002,
|
||||
0x3cdf: 0x0002, 0x3ce0: 0x0002, 0x3ce1: 0x0002, 0x3ce2: 0x0002, 0x3ce3: 0x0002,
|
||||
0x3ce4: 0x0002, 0x3ce5: 0x0002, 0x3ce6: 0x0002, 0x3ce7: 0x0002, 0x3ce8: 0x0002, 0x3ce9: 0x0002,
|
||||
0x3cf0: 0x0002, 0x3cf1: 0x0002, 0x3cf2: 0x0002, 0x3cf3: 0x0002, 0x3cf4: 0x0002, 0x3cf5: 0x0002,
|
||||
0x3cf6: 0x0002, 0x3cf7: 0x0002, 0x3cf8: 0x0002,
|
||||
0x3cc0: 0x0004, 0x3cc1: 0x0004, 0x3cc2: 0x0004, 0x3cc3: 0x0004, 0x3cc4: 0x0004, 0x3cc5: 0x0004,
|
||||
0x3cc6: 0x0004,
|
||||
0x3cce: 0x0004, 0x3ccf: 0x0004, 0x3cd0: 0x0004, 0x3cd1: 0x0004,
|
||||
0x3cd2: 0x0004, 0x3cd3: 0x0004, 0x3cd4: 0x0004, 0x3cd5: 0x0004, 0x3cd6: 0x0004, 0x3cd7: 0x0004,
|
||||
0x3cd8: 0x0004, 0x3cd9: 0x0004, 0x3cda: 0x0004, 0x3cdb: 0x0004, 0x3cdc: 0x0004,
|
||||
0x3cdf: 0x0004, 0x3ce0: 0x0004, 0x3ce1: 0x0004, 0x3ce2: 0x0004, 0x3ce3: 0x0004,
|
||||
0x3ce4: 0x0004, 0x3ce5: 0x0004, 0x3ce6: 0x0004, 0x3ce7: 0x0004, 0x3ce8: 0x0004, 0x3ce9: 0x0004,
|
||||
0x3cf0: 0x0004, 0x3cf1: 0x0004, 0x3cf2: 0x0004, 0x3cf3: 0x0004, 0x3cf4: 0x0004, 0x3cf5: 0x0004,
|
||||
0x3cf6: 0x0004, 0x3cf7: 0x0004, 0x3cf8: 0x0004,
|
||||
// Block 0xf4, offset 0x3d00
|
||||
0x3d01: 0x0001,
|
||||
0x3d20: 0x0001, 0x3d21: 0x0001, 0x3d22: 0x0001, 0x3d23: 0x0001,
|
||||
0x3d24: 0x0001, 0x3d25: 0x0001, 0x3d26: 0x0001, 0x3d27: 0x0001, 0x3d28: 0x0001, 0x3d29: 0x0001,
|
||||
0x3d2a: 0x0001, 0x3d2b: 0x0001, 0x3d2c: 0x0001, 0x3d2d: 0x0001, 0x3d2e: 0x0001, 0x3d2f: 0x0001,
|
||||
0x3d30: 0x0001, 0x3d31: 0x0001, 0x3d32: 0x0001, 0x3d33: 0x0001, 0x3d34: 0x0001, 0x3d35: 0x0001,
|
||||
0x3d36: 0x0001, 0x3d37: 0x0001, 0x3d38: 0x0001, 0x3d39: 0x0001, 0x3d3a: 0x0001, 0x3d3b: 0x0001,
|
||||
0x3d3c: 0x0001, 0x3d3d: 0x0001, 0x3d3e: 0x0001, 0x3d3f: 0x0001,
|
||||
0x3d00: 0x0002, 0x3d01: 0x0002, 0x3d02: 0x0002, 0x3d03: 0x0002, 0x3d04: 0x0002, 0x3d05: 0x0002,
|
||||
0x3d06: 0x0002, 0x3d07: 0x0002, 0x3d08: 0x0002, 0x3d09: 0x0002, 0x3d0a: 0x0002, 0x3d0b: 0x0002,
|
||||
0x3d0c: 0x0002, 0x3d0d: 0x0002, 0x3d0e: 0x0002, 0x3d0f: 0x0002, 0x3d10: 0x0002, 0x3d11: 0x0002,
|
||||
0x3d12: 0x0002, 0x3d13: 0x0002, 0x3d14: 0x0002, 0x3d15: 0x0002, 0x3d16: 0x0002, 0x3d17: 0x0002,
|
||||
0x3d18: 0x0002, 0x3d19: 0x0002, 0x3d1a: 0x0002, 0x3d1b: 0x0002, 0x3d1c: 0x0002, 0x3d1d: 0x0002,
|
||||
0x3d1e: 0x0002, 0x3d1f: 0x0002, 0x3d20: 0x0002, 0x3d21: 0x0002, 0x3d22: 0x0002, 0x3d23: 0x0002,
|
||||
0x3d24: 0x0002, 0x3d25: 0x0002, 0x3d26: 0x0002, 0x3d27: 0x0002, 0x3d28: 0x0002, 0x3d29: 0x0002,
|
||||
0x3d2a: 0x0002, 0x3d2b: 0x0002, 0x3d2c: 0x0002, 0x3d2d: 0x0002, 0x3d2e: 0x0002, 0x3d2f: 0x0002,
|
||||
0x3d30: 0x0002, 0x3d31: 0x0002, 0x3d32: 0x0002, 0x3d33: 0x0002, 0x3d34: 0x0002, 0x3d35: 0x0002,
|
||||
0x3d36: 0x0002, 0x3d37: 0x0002, 0x3d38: 0x0002, 0x3d39: 0x0002, 0x3d3a: 0x0002, 0x3d3b: 0x0002,
|
||||
0x3d3c: 0x0002, 0x3d3d: 0x0002,
|
||||
// Block 0xf5, offset 0x3d40
|
||||
0x3d40: 0x0003, 0x3d41: 0x0003, 0x3d42: 0x0003, 0x3d43: 0x0003, 0x3d44: 0x0003, 0x3d45: 0x0003,
|
||||
0x3d46: 0x0003, 0x3d47: 0x0003, 0x3d48: 0x0003, 0x3d49: 0x0003, 0x3d4a: 0x0003, 0x3d4b: 0x0003,
|
||||
0x3d4c: 0x0003, 0x3d4d: 0x0003, 0x3d4e: 0x0003, 0x3d4f: 0x0003, 0x3d50: 0x0003, 0x3d51: 0x0003,
|
||||
0x3d52: 0x0003, 0x3d53: 0x0003, 0x3d54: 0x0003, 0x3d55: 0x0003, 0x3d56: 0x0003, 0x3d57: 0x0003,
|
||||
0x3d58: 0x0003, 0x3d59: 0x0003, 0x3d5a: 0x0003, 0x3d5b: 0x0003, 0x3d5c: 0x0003, 0x3d5d: 0x0003,
|
||||
0x3d5e: 0x0003, 0x3d5f: 0x0003, 0x3d60: 0x0003, 0x3d61: 0x0003, 0x3d62: 0x0003, 0x3d63: 0x0003,
|
||||
0x3d64: 0x0003, 0x3d65: 0x0003, 0x3d66: 0x0003, 0x3d67: 0x0003, 0x3d68: 0x0003, 0x3d69: 0x0003,
|
||||
0x3d6a: 0x0003, 0x3d6b: 0x0003, 0x3d6c: 0x0003, 0x3d6d: 0x0003, 0x3d6e: 0x0003, 0x3d6f: 0x0003,
|
||||
0x3d70: 0x0003, 0x3d71: 0x0003, 0x3d72: 0x0003, 0x3d73: 0x0003, 0x3d74: 0x0003, 0x3d75: 0x0003,
|
||||
0x3d76: 0x0003, 0x3d77: 0x0003, 0x3d78: 0x0003, 0x3d79: 0x0003, 0x3d7a: 0x0003, 0x3d7b: 0x0003,
|
||||
0x3d7c: 0x0003, 0x3d7d: 0x0003,
|
||||
0x3d41: 0x0001,
|
||||
0x3d60: 0x0001, 0x3d61: 0x0001, 0x3d62: 0x0001, 0x3d63: 0x0001,
|
||||
0x3d64: 0x0001, 0x3d65: 0x0001, 0x3d66: 0x0001, 0x3d67: 0x0001, 0x3d68: 0x0001, 0x3d69: 0x0001,
|
||||
0x3d6a: 0x0001, 0x3d6b: 0x0001, 0x3d6c: 0x0001, 0x3d6d: 0x0001, 0x3d6e: 0x0001, 0x3d6f: 0x0001,
|
||||
0x3d70: 0x0001, 0x3d71: 0x0001, 0x3d72: 0x0001, 0x3d73: 0x0001, 0x3d74: 0x0001, 0x3d75: 0x0001,
|
||||
0x3d76: 0x0001, 0x3d77: 0x0001, 0x3d78: 0x0001, 0x3d79: 0x0001, 0x3d7a: 0x0001, 0x3d7b: 0x0001,
|
||||
0x3d7c: 0x0001, 0x3d7d: 0x0001, 0x3d7e: 0x0001, 0x3d7f: 0x0001,
|
||||
// Block 0xf6, offset 0x3d80
|
||||
0x3d80: 0x0003, 0x3d81: 0x0003, 0x3d82: 0x0003, 0x3d83: 0x0003, 0x3d84: 0x0003, 0x3d85: 0x0003,
|
||||
0x3d86: 0x0003, 0x3d87: 0x0003, 0x3d88: 0x0003, 0x3d89: 0x0003, 0x3d8a: 0x0003, 0x3d8b: 0x0003,
|
||||
0x3d8c: 0x0003, 0x3d8d: 0x0003, 0x3d8e: 0x0003, 0x3d8f: 0x0003, 0x3d90: 0x0003, 0x3d91: 0x0003,
|
||||
0x3d92: 0x0003, 0x3d93: 0x0003, 0x3d94: 0x0003, 0x3d95: 0x0003, 0x3d96: 0x0003, 0x3d97: 0x0003,
|
||||
0x3d98: 0x0003, 0x3d99: 0x0003, 0x3d9a: 0x0003, 0x3d9b: 0x0003, 0x3d9c: 0x0003, 0x3d9d: 0x0003,
|
||||
0x3d9e: 0x0003, 0x3d9f: 0x0003, 0x3da0: 0x0003, 0x3da1: 0x0003, 0x3da2: 0x0003, 0x3da3: 0x0003,
|
||||
0x3da4: 0x0003, 0x3da5: 0x0003, 0x3da6: 0x0003, 0x3da7: 0x0003, 0x3da8: 0x0003, 0x3da9: 0x0003,
|
||||
0x3daa: 0x0003, 0x3dab: 0x0003, 0x3dac: 0x0003, 0x3dad: 0x0003, 0x3dae: 0x0003, 0x3daf: 0x0003,
|
||||
0x3db0: 0x0003, 0x3db1: 0x0003, 0x3db2: 0x0003, 0x3db3: 0x0003, 0x3db4: 0x0003, 0x3db5: 0x0003,
|
||||
0x3db6: 0x0003, 0x3db7: 0x0003, 0x3db8: 0x0003, 0x3db9: 0x0003, 0x3dba: 0x0003, 0x3dbb: 0x0003,
|
||||
0x3dbc: 0x0003, 0x3dbd: 0x0003,
|
||||
}
|
||||
|
||||
// stringWidthIndex: 30 blocks, 1920 entries, 1920 bytes
|
||||
@@ -1653,11 +1673,11 @@ var stringWidthIndex = [1920]uint8{
|
||||
0x593: 0xd4,
|
||||
0x5a3: 0xd5, 0x5a5: 0xd6,
|
||||
// Block 0x17, offset 0x5c0
|
||||
0x5c0: 0xd7, 0x5c3: 0xd8, 0x5c4: 0xd9, 0x5c5: 0xda, 0x5c6: 0xdb, 0x5c7: 0xdc,
|
||||
0x5c8: 0xdd, 0x5c9: 0xde, 0x5cc: 0xdf, 0x5cd: 0xe0, 0x5ce: 0xe1, 0x5cf: 0xe2,
|
||||
0x5d0: 0xe3, 0x5d1: 0xe4, 0x5d2: 0x39, 0x5d3: 0xe5, 0x5d4: 0xe6, 0x5d5: 0xe7, 0x5d6: 0xe8, 0x5d7: 0xe9,
|
||||
0x5d8: 0x39, 0x5d9: 0xea, 0x5da: 0x39, 0x5db: 0xeb, 0x5df: 0xec,
|
||||
0x5e4: 0xed, 0x5e5: 0xee, 0x5e6: 0x39, 0x5e7: 0x39,
|
||||
0x5c0: 0xd7, 0x5c3: 0xd8, 0x5c4: 0xd9, 0x5c5: 0xda, 0x5c6: 0xdb,
|
||||
0x5c8: 0xdc, 0x5c9: 0xdd, 0x5cc: 0xde, 0x5cd: 0xdf, 0x5ce: 0xe0, 0x5cf: 0xe1,
|
||||
0x5d0: 0xe2, 0x5d1: 0xe3, 0x5d2: 0xe4, 0x5d3: 0xe5, 0x5d4: 0xe6, 0x5d5: 0xe7, 0x5d6: 0xe8, 0x5d7: 0xe9,
|
||||
0x5d8: 0xe4, 0x5d9: 0xea, 0x5da: 0xe4, 0x5db: 0xeb, 0x5df: 0xec,
|
||||
0x5e4: 0xed, 0x5e5: 0xee, 0x5e6: 0xe4, 0x5e7: 0xe4,
|
||||
0x5e9: 0xef, 0x5ea: 0xf0, 0x5eb: 0xf1,
|
||||
// Block 0x18, offset 0x600
|
||||
0x600: 0x39, 0x601: 0x39, 0x602: 0x39, 0x603: 0x39, 0x604: 0x39, 0x605: 0x39, 0x606: 0x39, 0x607: 0x39,
|
||||
@@ -1667,7 +1687,7 @@ var stringWidthIndex = [1920]uint8{
|
||||
0x620: 0x39, 0x621: 0x39, 0x622: 0x39, 0x623: 0x39, 0x624: 0x39, 0x625: 0x39, 0x626: 0x39, 0x627: 0x39,
|
||||
0x628: 0x39, 0x629: 0x39, 0x62a: 0x39, 0x62b: 0x39, 0x62c: 0x39, 0x62d: 0x39, 0x62e: 0x39, 0x62f: 0x39,
|
||||
0x630: 0x39, 0x631: 0x39, 0x632: 0x39, 0x633: 0x39, 0x634: 0x39, 0x635: 0x39, 0x636: 0x39, 0x637: 0x39,
|
||||
0x638: 0x39, 0x639: 0x39, 0x63a: 0x39, 0x63b: 0x39, 0x63c: 0x39, 0x63d: 0x39, 0x63e: 0x39, 0x63f: 0xe6,
|
||||
0x638: 0x39, 0x639: 0x39, 0x63a: 0x39, 0x63b: 0x39, 0x63c: 0x39, 0x63d: 0x39, 0x63e: 0x39, 0x63f: 0xf2,
|
||||
// Block 0x19, offset 0x640
|
||||
0x650: 0x0b, 0x651: 0x0c, 0x653: 0x0d, 0x656: 0x0e, 0x657: 0x06,
|
||||
0x658: 0x0f, 0x65a: 0x10, 0x65b: 0x11, 0x65c: 0x12, 0x65d: 0x13, 0x65e: 0x14, 0x65f: 0x15,
|
||||
@@ -1676,7 +1696,7 @@ var stringWidthIndex = [1920]uint8{
|
||||
0x670: 0x06, 0x671: 0x06, 0x672: 0x06, 0x673: 0x06, 0x674: 0x06, 0x675: 0x06, 0x676: 0x06, 0x677: 0x06,
|
||||
0x678: 0x06, 0x679: 0x06, 0x67a: 0x06, 0x67b: 0x06, 0x67c: 0x06, 0x67d: 0x06, 0x67e: 0x06, 0x67f: 0x16,
|
||||
// Block 0x1a, offset 0x680
|
||||
0x680: 0xf2, 0x681: 0x08, 0x684: 0x08, 0x685: 0x08, 0x686: 0x08, 0x687: 0x09,
|
||||
0x680: 0xf3, 0x681: 0x08, 0x684: 0x08, 0x685: 0x08, 0x686: 0x08, 0x687: 0x09,
|
||||
// Block 0x1b, offset 0x6c0
|
||||
0x6c0: 0x5b, 0x6c1: 0x5b, 0x6c2: 0x5b, 0x6c3: 0x5b, 0x6c4: 0x5b, 0x6c5: 0x5b, 0x6c6: 0x5b, 0x6c7: 0x5b,
|
||||
0x6c8: 0x5b, 0x6c9: 0x5b, 0x6ca: 0x5b, 0x6cb: 0x5b, 0x6cc: 0x5b, 0x6cd: 0x5b, 0x6ce: 0x5b, 0x6cf: 0x5b,
|
||||
@@ -1685,7 +1705,7 @@ var stringWidthIndex = [1920]uint8{
|
||||
0x6e0: 0x5b, 0x6e1: 0x5b, 0x6e2: 0x5b, 0x6e3: 0x5b, 0x6e4: 0x5b, 0x6e5: 0x5b, 0x6e6: 0x5b, 0x6e7: 0x5b,
|
||||
0x6e8: 0x5b, 0x6e9: 0x5b, 0x6ea: 0x5b, 0x6eb: 0x5b, 0x6ec: 0x5b, 0x6ed: 0x5b, 0x6ee: 0x5b, 0x6ef: 0x5b,
|
||||
0x6f0: 0x5b, 0x6f1: 0x5b, 0x6f2: 0x5b, 0x6f3: 0x5b, 0x6f4: 0x5b, 0x6f5: 0x5b, 0x6f6: 0x5b, 0x6f7: 0x5b,
|
||||
0x6f8: 0x5b, 0x6f9: 0x5b, 0x6fa: 0x5b, 0x6fb: 0x5b, 0x6fc: 0x5b, 0x6fd: 0x5b, 0x6fe: 0x5b, 0x6ff: 0xf3,
|
||||
0x6f8: 0x5b, 0x6f9: 0x5b, 0x6fa: 0x5b, 0x6fb: 0x5b, 0x6fc: 0x5b, 0x6fd: 0x5b, 0x6fe: 0x5b, 0x6ff: 0xf4,
|
||||
// Block 0x1c, offset 0x700
|
||||
0x720: 0x18,
|
||||
0x730: 0x09, 0x731: 0x09, 0x732: 0x09, 0x733: 0x09, 0x734: 0x09, 0x735: 0x09, 0x736: 0x09, 0x737: 0x09,
|
||||
|
||||
128
vendor/github.com/clipperhouse/displaywidth/width.go
generated
vendored
128
vendor/github.com/clipperhouse/displaywidth/width.go
generated
vendored
@@ -34,7 +34,7 @@ func (options Options) String(s string) int {
|
||||
case 0:
|
||||
return 0
|
||||
case 1:
|
||||
return asciiWidth(s[0])
|
||||
return int(asciiWidths[s[0]])
|
||||
}
|
||||
|
||||
width := 0
|
||||
@@ -60,7 +60,7 @@ func (options Options) Bytes(s []byte) int {
|
||||
case 0:
|
||||
return 0
|
||||
case 1:
|
||||
return asciiWidth(s[0])
|
||||
return int(asciiWidths[s[0]])
|
||||
}
|
||||
|
||||
width := 0
|
||||
@@ -90,7 +90,7 @@ func Rune(r rune) int {
|
||||
// Iterating over runes to measure width is incorrect in many cases.
|
||||
func (options Options) Rune(r rune) int {
|
||||
if r < utf8.RuneSelf {
|
||||
return asciiWidth(byte(r))
|
||||
return int(asciiWidths[byte(r)])
|
||||
}
|
||||
|
||||
// Surrogates (U+D800-U+DFFF) are invalid UTF-8.
|
||||
@@ -102,11 +102,9 @@ func (options Options) Rune(r rune) int {
|
||||
n := utf8.EncodeRune(buf[:], r)
|
||||
|
||||
// Skip the grapheme iterator
|
||||
return graphemeWidth(buf[:n], options)
|
||||
return lookupProperties(buf[:n]).width(options)
|
||||
}
|
||||
|
||||
const _Default property = 0
|
||||
|
||||
// graphemeWidth returns the display width of a grapheme cluster.
|
||||
// The passed string must be a single grapheme cluster.
|
||||
func graphemeWidth[T stringish.Interface](s T, options Options) int {
|
||||
@@ -115,39 +113,16 @@ func graphemeWidth[T stringish.Interface](s T, options Options) int {
|
||||
case 0:
|
||||
return 0
|
||||
case 1:
|
||||
return asciiWidth(s[0])
|
||||
return int(asciiWidths[s[0]])
|
||||
}
|
||||
|
||||
p, sz := lookup(s)
|
||||
prop := property(p)
|
||||
|
||||
// Variation Selector 16 (VS16) requests emoji presentation
|
||||
if prop != _Wide && sz > 0 && len(s) >= sz+3 {
|
||||
vs := s[sz : sz+3]
|
||||
if isVS16(vs) {
|
||||
prop = _Wide
|
||||
}
|
||||
// VS15 (0x8E) requests text presentation but does not affect width,
|
||||
// in my reading of Unicode TR51. Falls through to return the base
|
||||
// character's property.
|
||||
}
|
||||
|
||||
if options.EastAsianWidth && prop == _East_Asian_Ambiguous {
|
||||
prop = _Wide
|
||||
}
|
||||
|
||||
if prop > upperBound {
|
||||
prop = _Default
|
||||
}
|
||||
|
||||
return propertyWidths[prop]
|
||||
return lookupProperties(s).width(options)
|
||||
}
|
||||
|
||||
func asciiWidth(b byte) int {
|
||||
if b <= 0x1F || b == 0x7F {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
// isRIPrefix checks if the slice matches the Regional Indicator prefix
|
||||
// (F0 9F 87). It assumes len(s) >= 3.
|
||||
func isRIPrefix[T stringish.Interface](s T) bool {
|
||||
return s[0] == 0xF0 && s[1] == 0x9F && s[2] == 0x87
|
||||
}
|
||||
|
||||
// isVS16 checks if the slice matches VS16 (U+FE0F) UTF-8 encoding
|
||||
@@ -156,12 +131,81 @@ func isVS16[T stringish.Interface](s T) bool {
|
||||
return s[0] == 0xEF && s[1] == 0xB8 && s[2] == 0x8F
|
||||
}
|
||||
|
||||
// propertyWidths is a jump table of sorts, instead of a switch
|
||||
var propertyWidths = [4]int{
|
||||
_Default: 1,
|
||||
_Zero_Width: 0,
|
||||
_Wide: 2,
|
||||
_East_Asian_Ambiguous: 1,
|
||||
// lookupProperties returns the properties for a grapheme.
|
||||
// The passed string must be at least one byte long.
|
||||
//
|
||||
// Callers must handle zero and single-byte strings upstream, both as an
|
||||
// optimization, and to reduce the scope of this function.
|
||||
func lookupProperties[T stringish.Interface](s T) property {
|
||||
l := len(s)
|
||||
|
||||
if s[0] < utf8.RuneSelf {
|
||||
// Check for variation selector after ASCII (e.g., keycap sequences like 1️⃣)
|
||||
if l >= 4 {
|
||||
// Subslice may help eliminate bounds checks
|
||||
vs := s[1:4]
|
||||
if isVS16(vs) {
|
||||
// VS16 requests emoji presentation (width 2)
|
||||
return _Emoji
|
||||
}
|
||||
// VS15 (0x8E) requests text presentation but does not affect width,
|
||||
// in my reading of Unicode TR51. Falls through to _Default.
|
||||
}
|
||||
return asciiProperties[s[0]]
|
||||
}
|
||||
|
||||
// Regional indicator pair (flag)
|
||||
if l >= 8 {
|
||||
// Subslice may help eliminate bounds checks
|
||||
ri := s[:8]
|
||||
// First rune
|
||||
if isRIPrefix(ri[0:3]) {
|
||||
b3 := ri[3]
|
||||
if b3 >= 0xA6 && b3 <= 0xBF {
|
||||
// Second rune
|
||||
if isRIPrefix(ri[4:7]) {
|
||||
b7 := ri[7]
|
||||
if b7 >= 0xA6 && b7 <= 0xBF {
|
||||
return _Emoji
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
p, sz := lookup(s)
|
||||
|
||||
// Variation Selectors
|
||||
if sz > 0 && l >= sz+3 {
|
||||
// Subslice may help eliminate bounds checks
|
||||
vs := s[sz : sz+3]
|
||||
if isVS16(vs) {
|
||||
// VS16 requests emoji presentation (width 2)
|
||||
return _Emoji
|
||||
}
|
||||
// VS15 (0x8E) requests text presentation but does not affect width,
|
||||
// in my reading of Unicode TR51. Falls through to return the base
|
||||
// character's property.
|
||||
}
|
||||
|
||||
return property(p)
|
||||
}
|
||||
|
||||
const upperBound = property(len(propertyWidths) - 1)
|
||||
const _Default property = 0
|
||||
const boundsCheck = property(len(propertyWidths) - 1)
|
||||
|
||||
// width determines the display width of a character based on its properties,
|
||||
// and configuration options
|
||||
func (p property) width(options Options) int {
|
||||
if options.EastAsianWidth && p == _East_Asian_Ambiguous {
|
||||
return 2
|
||||
}
|
||||
|
||||
// Bounds check may help the compiler eliminate its bounds check,
|
||||
// and safety of course.
|
||||
if p > boundsCheck {
|
||||
return 1 // default width
|
||||
}
|
||||
|
||||
return propertyWidths[p]
|
||||
}
|
||||
|
||||
5
vendor/github.com/olekukonko/ll/global.go
generated
vendored
5
vendor/github.com/olekukonko/ll/global.go
generated
vendored
@@ -667,8 +667,3 @@ func Inspect(values ...interface{}) {
|
||||
o := NewInspector(defaultLogger)
|
||||
o.Log(2, values...)
|
||||
}
|
||||
|
||||
func Apply(opts ...Option) *Logger {
|
||||
return defaultLogger.Apply(opts...)
|
||||
|
||||
}
|
||||
|
||||
20
vendor/github.com/olekukonko/ll/lh/colorized.go
generated
vendored
20
vendor/github.com/olekukonko/ll/lh/colorized.go
generated
vendored
@@ -29,7 +29,6 @@ type Palette struct {
|
||||
Info string // Color for Info level messages
|
||||
Warn string // Color for Warn level messages
|
||||
Error string // Color for Error level messages
|
||||
Fatal string // Color for Fatal level messages
|
||||
Title string // Color for dump titles (BEGIN/END separators)
|
||||
}
|
||||
|
||||
@@ -48,11 +47,10 @@ var darkPalette = Palette{
|
||||
Hex: "\033[38;5;156m", // Light green for hex values
|
||||
Ascii: "\033[38;5;224m", // Light pink for ASCII values
|
||||
|
||||
Debug: "\033[36m", // Cyan for Debug level
|
||||
Info: "\033[32m", // Green for Info level
|
||||
Warn: "\033[33m", // Yellow for Warn level
|
||||
Error: "\033[31m", // Standard red
|
||||
Fatal: "\033[1;31m", // Bold red - stands out more
|
||||
Debug: "\033[36m", // Cyan for Debug level
|
||||
Info: "\033[32m", // Green for Info level
|
||||
Warn: "\033[33m", // Yellow for Warn level
|
||||
Error: "\033[31m", // Red for Error level
|
||||
}
|
||||
|
||||
// lightPalette defines colors optimized for light terminal backgrounds.
|
||||
@@ -70,11 +68,10 @@ var lightPalette = Palette{
|
||||
Hex: "\033[38;5;156m", // Light green for hex values
|
||||
Ascii: "\033[38;5;224m", // Light pink for ASCII values
|
||||
|
||||
Debug: "\033[36m", // Cyan for Debug level
|
||||
Info: "\033[32m", // Green for Info level
|
||||
Warn: "\033[33m", // Yellow for Warn level
|
||||
Error: "\033[31m", // Standard red
|
||||
Fatal: "\033[1;31m", // Bold red - stands out more
|
||||
Debug: "\033[36m", // Cyan for Debug level
|
||||
Info: "\033[32m", // Green for Info level
|
||||
Warn: "\033[33m", // Yellow for Warn level
|
||||
Error: "\033[31m", // Red for Error level
|
||||
}
|
||||
|
||||
// ColorizedHandler is a handler that outputs log entries with ANSI color codes.
|
||||
@@ -253,7 +250,6 @@ func (h *ColorizedHandler) formatLevel(b *strings.Builder, e *lx.Entry) {
|
||||
lx.LevelInfo: h.palette.Info, // Green
|
||||
lx.LevelWarn: h.palette.Warn, // Yellow
|
||||
lx.LevelError: h.palette.Error, // Red
|
||||
lx.LevelFatal: h.palette.Fatal, // Bold Red
|
||||
}[e.Level]
|
||||
|
||||
b.WriteString(color)
|
||||
|
||||
13
vendor/github.com/olekukonko/ll/lh/multi.go
generated
vendored
13
vendor/github.com/olekukonko/ll/lh/multi.go
generated
vendored
@@ -3,7 +3,6 @@ package lh
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/olekukonko/ll/lx"
|
||||
)
|
||||
|
||||
@@ -31,18 +30,6 @@ func NewMultiHandler(h ...lx.Handler) *MultiHandler {
|
||||
}
|
||||
}
|
||||
|
||||
// Len returns the number of handlers in the MultiHandler.
|
||||
func (h *MultiHandler) Len() int {
|
||||
return len(h.Handlers)
|
||||
}
|
||||
|
||||
// Append adds one or more lx.Handler instances to the MultiHandler's list of handlers.
|
||||
func (h *MultiHandler) Append(handlers ...lx.Handler) {
|
||||
for _, e := range handlers {
|
||||
h.Handlers = append(h.Handlers, e)
|
||||
}
|
||||
}
|
||||
|
||||
// Handle implements the Handler interface, calling Handle on each handler in sequence.
|
||||
// It collects any errors from handlers and combines them into a single error using errors.Join.
|
||||
// If no errors occur, it returns nil. Thread-safe if the underlying handlers are thread-safe.
|
||||
|
||||
5
vendor/github.com/olekukonko/ll/lh/slog.go
generated
vendored
5
vendor/github.com/olekukonko/ll/lh/slog.go
generated
vendored
@@ -2,9 +2,8 @@ package lh
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log/slog"
|
||||
|
||||
"github.com/olekukonko/ll/lx"
|
||||
"log/slog"
|
||||
)
|
||||
|
||||
// SlogHandler adapts a slog.Handler to implement lx.Handler.
|
||||
@@ -82,7 +81,7 @@ func toSlogLevel(level lx.LevelType) slog.Level {
|
||||
return slog.LevelInfo
|
||||
case lx.LevelWarn:
|
||||
return slog.LevelWarn
|
||||
case lx.LevelError, lx.LevelFatal:
|
||||
case lx.LevelError:
|
||||
return slog.LevelError
|
||||
default:
|
||||
return slog.LevelInfo // Default for unknown levels
|
||||
|
||||
128
vendor/github.com/olekukonko/ll/ll.go
generated
vendored
128
vendor/github.com/olekukonko/ll/ll.go
generated
vendored
@@ -39,8 +39,6 @@ type Logger struct {
|
||||
stackBufferSize int // Buffer size for capturing stack traces
|
||||
separator string // Separator for namespace paths (e.g., "/")
|
||||
entries atomic.Int64 // Tracks total log entries sent to handler
|
||||
fatalExits bool
|
||||
fatalStack bool
|
||||
}
|
||||
|
||||
// New creates a new Logger with the given namespace and optional configurations.
|
||||
@@ -73,71 +71,22 @@ func New(namespace string, opts ...Option) *Logger {
|
||||
return logger
|
||||
}
|
||||
|
||||
// Apply applies one or more functional options to the default/global logger.
|
||||
// Useful for late configuration (e.g., after migration, attach VictoriaLogs handler,
|
||||
// set level, add middleware, etc.) without changing existing New() calls.
|
||||
//
|
||||
// AddContext adds a key-value pair to the logger's context, modifying it directly.
|
||||
// Unlike Context, it mutates the existing context. It is thread-safe using a write lock.
|
||||
// Example:
|
||||
//
|
||||
// // In main() or init(), after setting up handler
|
||||
// ll.Apply(
|
||||
// ll.Handler(vlBatched),
|
||||
// ll.Level(ll.LevelInfo),
|
||||
// ll.Use(rateLimiterMiddleware),
|
||||
// )
|
||||
//
|
||||
// Returns the default logger for chaining (if needed).
|
||||
func (l *Logger) Apply(opts ...Option) *Logger {
|
||||
l.mu.Lock()
|
||||
defer l.mu.Unlock()
|
||||
for _, opt := range opts {
|
||||
if opt != nil {
|
||||
opt(l)
|
||||
}
|
||||
}
|
||||
return l
|
||||
}
|
||||
|
||||
// AddContext adds one or more key-value pairs to the logger's persistent context.
|
||||
// These fields will be included in **every** subsequent log message from this logger
|
||||
// (and its child namespace loggers).
|
||||
//
|
||||
// It supports variadic key-value pairs (string key, any value).
|
||||
// Non-string keys or uneven number of arguments will be safely ignored/logged.
|
||||
//
|
||||
// Returns the logger for chaining.
|
||||
//
|
||||
// Examples:
|
||||
//
|
||||
// logger.AddContext("user", "alice", "env", "prod")
|
||||
// logger.AddContext("request_id", reqID, "trace_id", traceID)
|
||||
// logger.AddContext("service", "payment") // single pair
|
||||
func (l *Logger) AddContext(pairs ...any) *Logger {
|
||||
// logger := New("app").Enable()
|
||||
// logger.AddContext("user", "alice")
|
||||
// logger.Info("Action") // Output: [app] INFO: Action [user=alice]
|
||||
func (l *Logger) AddContext(key string, value interface{}) *Logger {
|
||||
l.mu.Lock()
|
||||
defer l.mu.Unlock()
|
||||
|
||||
// Lazy initialization of context map
|
||||
// Initialize context map if nil
|
||||
if l.context == nil {
|
||||
l.context = make(map[string]interface{})
|
||||
}
|
||||
|
||||
// Process key-value pairs
|
||||
for i := 0; i < len(pairs)-1; i += 2 {
|
||||
key, ok := pairs[i].(string)
|
||||
if !ok {
|
||||
l.Warnf("AddContext: non-string key at index %d: %v", i, pairs[i])
|
||||
continue
|
||||
}
|
||||
|
||||
value := pairs[i+1]
|
||||
l.context[key] = value
|
||||
}
|
||||
|
||||
// Optional: warn about uneven number of arguments
|
||||
if len(pairs)%2 != 0 {
|
||||
l.Warn("AddContext: uneven number of arguments, last value ignored")
|
||||
}
|
||||
|
||||
l.context[key] = value
|
||||
return l
|
||||
}
|
||||
|
||||
@@ -408,7 +357,6 @@ func (l *Logger) Output(values ...interface{}) {
|
||||
l.output(2, values...)
|
||||
}
|
||||
|
||||
// mark logs the caller's file and line number along with an optional custom name label for tracing execution flow.
|
||||
func (l *Logger) output(skip int, values ...interface{}) {
|
||||
if !l.shouldLog(lx.LevelInfo) {
|
||||
return
|
||||
@@ -588,10 +536,8 @@ func (l *Logger) Fatal(args ...any) {
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
l.log(lx.LevelFatal, lx.ClassText, cat.Space(args...), nil, l.fatalStack)
|
||||
if l.fatalExits {
|
||||
os.Exit(1)
|
||||
}
|
||||
l.log(lx.LevelError, lx.ClassText, cat.Space(args...), nil, false)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// Fatalf logs a formatted message at Error level with a stack trace and exits the program.
|
||||
@@ -849,7 +795,6 @@ func (l *Logger) Mark(name ...string) {
|
||||
l.mark(2, name...)
|
||||
}
|
||||
|
||||
// mark logs the caller's file and line number along with an optional custom name label for tracing execution flow.
|
||||
func (l *Logger) mark(skip int, names ...string) {
|
||||
// Skip logging if Info level is not enabled
|
||||
if !l.shouldLog(lx.LevelInfo) {
|
||||
@@ -1033,7 +978,7 @@ func (l *Logger) Panic(args ...any) {
|
||||
panic(msg)
|
||||
}
|
||||
|
||||
l.log(lx.LevelFatal, lx.ClassText, msg, nil, true)
|
||||
l.log(lx.LevelError, lx.ClassText, msg, nil, true)
|
||||
panic(msg)
|
||||
}
|
||||
|
||||
@@ -1514,3 +1459,54 @@ func (l *Logger) shouldLog(level lx.LevelType) bool {
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// WithHandler sets the handler for the logger as a functional option for configuring
|
||||
// a new logger instance.
|
||||
// Example:
|
||||
//
|
||||
// logger := New("app", WithHandler(lh.NewJSONHandler(os.Stdout)))
|
||||
func WithHandler(handler lx.Handler) Option {
|
||||
return func(l *Logger) {
|
||||
l.handler = handler
|
||||
}
|
||||
}
|
||||
|
||||
// WithTimestamped returns an Option that configures timestamp settings for the logger's existing handler.
|
||||
// It enables or disables timestamp logging and optionally sets the timestamp format if the handler
|
||||
// supports the lx.Timestamper interface. If no handler is set, the function has no effect.
|
||||
// Parameters:
|
||||
//
|
||||
// enable: Boolean to enable or disable timestamp logging
|
||||
// format: Optional string(s) to specify the timestamp format
|
||||
func WithTimestamped(enable bool, format ...string) Option {
|
||||
return func(l *Logger) {
|
||||
if l.handler != nil { // Check if a handler is set
|
||||
// Verify if the handler supports the lx.Timestamper interface
|
||||
if h, ok := l.handler.(lx.Timestamper); ok {
|
||||
h.Timestamped(enable, format...) // Apply timestamp settings to the handler
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// WithLevel sets the minimum log level for the logger as a functional option for
|
||||
// configuring a new logger instance.
|
||||
// Example:
|
||||
//
|
||||
// logger := New("app", WithLevel(lx.LevelWarn))
|
||||
func WithLevel(level lx.LevelType) Option {
|
||||
return func(l *Logger) {
|
||||
l.level = level
|
||||
}
|
||||
}
|
||||
|
||||
// WithStyle sets the namespace formatting style for the logger as a functional option
|
||||
// for configuring a new logger instance.
|
||||
// Example:
|
||||
//
|
||||
// logger := New("app", WithStyle(lx.NestedPath))
|
||||
func WithStyle(style lx.StyleType) Option {
|
||||
return func(l *Logger) {
|
||||
l.style = style
|
||||
}
|
||||
}
|
||||
|
||||
7
vendor/github.com/olekukonko/ll/lx/lx.go
generated
vendored
7
vendor/github.com/olekukonko/ll/lx/lx.go
generated
vendored
@@ -36,7 +36,6 @@ const (
|
||||
LevelInfo // Info level for general operational messages
|
||||
LevelWarn // Warn level for warning conditions
|
||||
LevelError // Error level for error conditions requiring attention
|
||||
LevelFatal // Fatal level for critical error conditions
|
||||
LevelDebug // None level for logs without a specific severity (e.g., raw output)
|
||||
LevelUnknown // None level for logs without a specific severity (e.g., raw output)
|
||||
)
|
||||
@@ -46,9 +45,7 @@ const (
|
||||
DebugString = "DEBUG"
|
||||
InfoString = "INFO"
|
||||
WarnString = "WARN"
|
||||
WarningString = "WARNING"
|
||||
ErrorString = "ERROR"
|
||||
FatalString = "FATAL"
|
||||
NoneString = "NONE"
|
||||
UnknownString = "UNKNOWN"
|
||||
|
||||
@@ -101,8 +98,6 @@ func (l LevelType) String() string {
|
||||
return WarnString
|
||||
case LevelError:
|
||||
return ErrorString
|
||||
case LevelFatal:
|
||||
return FatalString
|
||||
case LevelNone:
|
||||
return NoneString
|
||||
default:
|
||||
@@ -119,7 +114,7 @@ func LevelParse(s string) LevelType {
|
||||
return LevelDebug
|
||||
case InfoString:
|
||||
return LevelInfo
|
||||
case WarnString, WarningString: // Allow both "WARN" and "WARNING"
|
||||
case WarnString, "WARNING": // Allow both "WARN" and "WARNING"
|
||||
return LevelWarn
|
||||
case ErrorString:
|
||||
return LevelError
|
||||
|
||||
67
vendor/github.com/olekukonko/ll/options.go
generated
vendored
67
vendor/github.com/olekukonko/ll/options.go
generated
vendored
@@ -1,67 +0,0 @@
|
||||
package ll
|
||||
|
||||
import "github.com/olekukonko/ll/lx"
|
||||
|
||||
// WithHandler sets the handler for the logger as a functional option for configuring
|
||||
// a new logger instance.
|
||||
// Example:
|
||||
//
|
||||
// logger := New("app", WithHandler(lh.NewJSONHandler(os.Stdout)))
|
||||
func WithHandler(handler lx.Handler) Option {
|
||||
return func(l *Logger) {
|
||||
l.handler = handler
|
||||
}
|
||||
}
|
||||
|
||||
// WithTimestamped returns an Option that configures timestamp settings for the logger's existing handler.
|
||||
// It enables or disables timestamp logging and optionally sets the timestamp format if the handler
|
||||
// supports the lx.Timestamper interface. If no handler is set, the function has no effect.
|
||||
// Parameters:
|
||||
//
|
||||
// enable: Boolean to enable or disable timestamp logging
|
||||
// format: Optional string(s) to specify the timestamp format
|
||||
func WithTimestamped(enable bool, format ...string) Option {
|
||||
return func(l *Logger) {
|
||||
if l.handler != nil { // Check if a handler is set
|
||||
// Verify if the handler supports the lx.Timestamper interface
|
||||
if h, ok := l.handler.(lx.Timestamper); ok {
|
||||
h.Timestamped(enable, format...) // Apply timestamp settings to the handler
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// WithLevel sets the minimum log level for the logger as a functional option for
|
||||
// configuring a new logger instance.
|
||||
// Example:
|
||||
//
|
||||
// logger := New("app", WithLevel(lx.LevelWarn))
|
||||
func WithLevel(level lx.LevelType) Option {
|
||||
return func(l *Logger) {
|
||||
l.level = level
|
||||
}
|
||||
}
|
||||
|
||||
// WithStyle sets the namespace formatting style for the logger as a functional option
|
||||
// for configuring a new logger instance.
|
||||
// Example:
|
||||
//
|
||||
// logger := New("app", WithStyle(lx.NestedPath))
|
||||
func WithStyle(style lx.StyleType) Option {
|
||||
return func(l *Logger) {
|
||||
l.style = style
|
||||
}
|
||||
}
|
||||
|
||||
// Functional options (can be passed to New() or applied later)
|
||||
func WithFatalExits(enabled bool) Option {
|
||||
return func(l *Logger) {
|
||||
l.fatalExits = enabled
|
||||
}
|
||||
}
|
||||
|
||||
func WithFatalStack(enabled bool) Option {
|
||||
return func(l *Logger) {
|
||||
l.fatalStack = enabled
|
||||
}
|
||||
}
|
||||
6
vendor/github.com/olekukonko/tablewriter/README.md
generated
vendored
6
vendor/github.com/olekukonko/tablewriter/README.md
generated
vendored
@@ -28,7 +28,7 @@ go get github.com/olekukonko/tablewriter@v0.0.5
|
||||
#### Latest Version
|
||||
The latest stable version
|
||||
```bash
|
||||
go get github.com/olekukonko/tablewriter@v1.1.3
|
||||
go get github.com/olekukonko/tablewriter@v1.1.2
|
||||
```
|
||||
|
||||
**Warning:** Version `v1.0.0` contains missing functionality and should not be used.
|
||||
@@ -62,7 +62,7 @@ func main() {
|
||||
data := [][]string{
|
||||
{"Package", "Version", "Status"},
|
||||
{"tablewriter", "v0.0.5", "legacy"},
|
||||
{"tablewriter", "v1.1.3", "latest"},
|
||||
{"tablewriter", "v1.1.2", "latest"},
|
||||
}
|
||||
|
||||
table := tablewriter.NewWriter(os.Stdout)
|
||||
@@ -77,7 +77,7 @@ func main() {
|
||||
│ PACKAGE │ VERSION │ STATUS │
|
||||
├─────────────┼─────────┼────────┤
|
||||
│ tablewriter │ v0.0.5 │ legacy │
|
||||
│ tablewriter │ v1.1.3 │ latest │
|
||||
│ tablewriter │ v1.1.2 │ latest │
|
||||
└─────────────┴─────────┴────────┘
|
||||
```
|
||||
|
||||
|
||||
424
vendor/github.com/olekukonko/tablewriter/pkg/twwidth/ea.go
generated
vendored
424
vendor/github.com/olekukonko/tablewriter/pkg/twwidth/ea.go
generated
vendored
@@ -1,424 +0,0 @@
|
||||
/*
|
||||
Package twwidth provides intelligent East Asian width detection.
|
||||
|
||||
In 2025/2026, most modern terminal emulators (VSCode, Windows Terminal, iTerm2,
|
||||
Alacritty) and modern monospace fonts (Hack, Fira Code, Cascadia Code) treat
|
||||
box-drawing characters as Single Width, regardless of the underlying OS Locale.
|
||||
|
||||
Detection Logic (in order of priority):
|
||||
- RUNEWIDTH_EASTASIAN environment variable (explicit user override)
|
||||
- Force Legacy Mode (programmatic override for backward compatibility)
|
||||
- Modern environment detection (VSCode, Windows Terminal, etc. -> Narrow)
|
||||
- Locale-based detection (CJK locales in traditional terminals -> Wide)
|
||||
|
||||
This prioritization ensures that:
|
||||
- Users can always override behavior using RUNEWIDTH_EASTASIAN
|
||||
- Modern development environments work correctly by default
|
||||
- Traditional CJK terminals maintain compatibility via locale checks
|
||||
|
||||
Examples:
|
||||
|
||||
// Force narrow borders (for Hack font in zh_CN)
|
||||
RUNEWIDTH_EASTASIAN=0 go run .
|
||||
|
||||
// Force wide borders (for legacy CJK terminals)
|
||||
RUNEWIDTH_EASTASIAN=1 go run .
|
||||
*/
|
||||
package twwidth
|
||||
|
||||
import (
|
||||
"os"
|
||||
"runtime"
|
||||
"strings"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// Environment Variable Constants
|
||||
const (
|
||||
EnvLCAll = "LC_ALL"
|
||||
EnvLCCtype = "LC_CTYPE"
|
||||
EnvLang = "LANG"
|
||||
EnvRuneWidthEastAsian = "RUNEWIDTH_EASTASIAN"
|
||||
EnvTerm = "TERM"
|
||||
EnvTermProgram = "TERM_PROGRAM"
|
||||
EnvTermProgramWsl = "TERM_PROGRAM_WSL"
|
||||
EnvWTProfile = "WT_PROFILE_ID" // Windows Terminal
|
||||
EnvConEmuANSI = "ConEmuANSI" // ConEmu
|
||||
EnvAlacritty = "ALACRITTY_LOG" // Alacritty
|
||||
EnvVTEVersion = "VTE_VERSION" // GNOME/VTE
|
||||
)
|
||||
|
||||
const (
|
||||
overwriteOn = "override_on"
|
||||
overwriteOff = "override_off"
|
||||
|
||||
envModern = "modern_env"
|
||||
envCjk = "locale_cjk"
|
||||
envAscii = "default_ascii"
|
||||
)
|
||||
|
||||
// CJK Language Codes (Prefixes)
|
||||
// Covers ISO 639-1 (2-letter) and common full names used in some systems.
|
||||
var cjkPrefixes = []string{
|
||||
"zh", "ja", "ko", // Standard: Chinese, Japanese, Korean
|
||||
"chi", "zho", // ISO 639-2/B and T for Chinese
|
||||
"jpn", "kor", // ISO 639-2 for Japanese, Korean
|
||||
"chinese", "japanese", "korean", // Full names (rare but possible in some legacy systems)
|
||||
}
|
||||
|
||||
// CJK Region Codes
|
||||
// Checks for specific regions that imply CJK font usage (e.g., en_HK).
|
||||
var cjkRegions = map[string]bool{
|
||||
"cn": true, // China
|
||||
"tw": true, // Taiwan
|
||||
"hk": true, // Hong Kong
|
||||
"mo": true, // Macau
|
||||
"jp": true, // Japan
|
||||
"kr": true, // South Korea
|
||||
"kp": true, // North Korea
|
||||
"sg": true, // Singapore (Often uses CJK fonts)
|
||||
}
|
||||
|
||||
// Modern environments that should use narrow borders (1-width box chars)
|
||||
var modernEnvironments = map[string]bool{
|
||||
// Terminal programs
|
||||
"vscode": true, "visual studio code": true,
|
||||
"iterm.app": true, "iterm2": true,
|
||||
"windows terminal": true, "windowsterminal": true,
|
||||
"alacritty": true, "kitty": true,
|
||||
"hyper": true, "tabby": true, "terminus": true, "fluentterminal": true,
|
||||
"warp": true, "ghostty": true, "rio": true,
|
||||
"jetbrains-jediterm": true,
|
||||
|
||||
// Terminal types (TERM signatures)
|
||||
"xterm-kitty": true, "xterm-ghostty": true, "wezterm": true,
|
||||
}
|
||||
|
||||
var (
|
||||
eastAsianOnce sync.Once
|
||||
eastAsianVal bool
|
||||
|
||||
// Legacy override control
|
||||
// Renamed to cfgMu to avoid conflict with width.go's mu
|
||||
cfgMu sync.RWMutex
|
||||
forceLegacyEastAsian = false
|
||||
)
|
||||
|
||||
type Enviroment struct {
|
||||
GOOS string `json:"goos"`
|
||||
LC_ALL string `json:"lc_all"`
|
||||
LC_CTYPE string `json:"lc_ctype"`
|
||||
LANG string `json:"lang"`
|
||||
RUNEWIDTH_EASTASIAN string `json:"runewidth_eastasian"`
|
||||
TERM string `json:"term"`
|
||||
TERM_PROGRAM string `json:"term_program"`
|
||||
}
|
||||
|
||||
// State captures the calculated internal state.
|
||||
type State struct {
|
||||
NormalizedLocale string `json:"normalized_locale"`
|
||||
IsCJKLocale bool `json:"is_cjk_locale"`
|
||||
IsModernEnv bool `json:"is_modern_env"`
|
||||
LegacyOverrideMode bool `json:"legacy_override_mode"`
|
||||
}
|
||||
|
||||
// Detection aggregates all debug information regarding East Asian width detection.
|
||||
type Detection struct {
|
||||
AutoUseEastAsian bool `json:"auto_use_east_asian"`
|
||||
DetectionMode string `json:"detection_mode"`
|
||||
Raw Enviroment `json:"raw"`
|
||||
Derived State `json:"derived"`
|
||||
}
|
||||
|
||||
// EastAsianForceLegacy forces the detection logic to ignore modern environment checks.
|
||||
// It relies solely on Locale detection. This is useful for applications that need
|
||||
// strict backward compatibility.
|
||||
//
|
||||
// Note: This does NOT override RUNEWIDTH_EASTASIAN. User environment variables take precedence.
|
||||
// This should be called before the first table render.
|
||||
func EastAsianForceLegacy(force bool) {
|
||||
cfgMu.Lock()
|
||||
defer cfgMu.Unlock()
|
||||
forceLegacyEastAsian = force
|
||||
}
|
||||
|
||||
// EastAsianDetect checks the environment variables to determine if
|
||||
// East Asian width calculations should be enabled.
|
||||
func EastAsianDetect() bool {
|
||||
eastAsianOnce.Do(func() {
|
||||
eastAsianVal = detectEastAsian()
|
||||
})
|
||||
return eastAsianVal
|
||||
}
|
||||
|
||||
// EastAsianConservative is a stricter version that only defaults to Narrow
|
||||
// if the terminal is definitely known to be modern (e.g. VSCode, iTerm2).
|
||||
// It avoids heuristics like checking "xterm" in the TERM variable.
|
||||
func EastAsianConservative() bool {
|
||||
// Check overrides first
|
||||
if val, found := checkOverrides(); found {
|
||||
return val
|
||||
}
|
||||
|
||||
// Stricter modern environment detection
|
||||
if isConservativeModernEnvironment() {
|
||||
return false
|
||||
}
|
||||
|
||||
// Fall back to locale
|
||||
return checkLocale()
|
||||
}
|
||||
|
||||
// EastAsianMode returns the decision path used for the current environment.
|
||||
// Useful for debugging why a specific width was chosen.
|
||||
func EastAsianMode() string {
|
||||
// Check override
|
||||
if val, found := checkOverrides(); found {
|
||||
if val {
|
||||
return overwriteOn
|
||||
}
|
||||
return overwriteOff
|
||||
}
|
||||
|
||||
cfgMu.RLock()
|
||||
legacy := forceLegacyEastAsian
|
||||
cfgMu.RUnlock()
|
||||
|
||||
if legacy {
|
||||
if checkLocale() {
|
||||
return envCjk
|
||||
}
|
||||
return envAscii
|
||||
}
|
||||
|
||||
if isModernEnvironment() {
|
||||
return envModern
|
||||
}
|
||||
|
||||
if checkLocale() {
|
||||
return envCjk
|
||||
}
|
||||
|
||||
return envAscii
|
||||
}
|
||||
|
||||
// Debugging returns detailed information about the detection decision.
|
||||
// Useful for users to include in Github issues.
|
||||
func Debugging() Detection {
|
||||
locale := getNormalizedLocale()
|
||||
|
||||
cfgMu.RLock()
|
||||
legacy := forceLegacyEastAsian
|
||||
cfgMu.RUnlock()
|
||||
|
||||
return Detection{
|
||||
AutoUseEastAsian: EastAsianDetect(),
|
||||
DetectionMode: EastAsianMode(),
|
||||
Raw: Enviroment{
|
||||
GOOS: runtime.GOOS,
|
||||
LC_ALL: os.Getenv(EnvLCAll),
|
||||
LC_CTYPE: os.Getenv(EnvLCCtype),
|
||||
LANG: os.Getenv(EnvLang),
|
||||
RUNEWIDTH_EASTASIAN: os.Getenv(EnvRuneWidthEastAsian),
|
||||
TERM: os.Getenv(EnvTerm),
|
||||
TERM_PROGRAM: os.Getenv(EnvTermProgram),
|
||||
},
|
||||
Derived: State{
|
||||
NormalizedLocale: locale,
|
||||
IsCJKLocale: isCJKLocale(locale),
|
||||
IsModernEnv: isModernEnvironment(),
|
||||
LegacyOverrideMode: legacy,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// detectEastAsian evaluates the environment and locale settings to determine if East Asian width rules should apply.
|
||||
func detectEastAsian() bool {
|
||||
// User Override check (Highest Priority)
|
||||
if val, found := checkOverrides(); found {
|
||||
return val
|
||||
}
|
||||
|
||||
// Force Legacy Mode check
|
||||
cfgMu.RLock()
|
||||
isLegacy := forceLegacyEastAsian
|
||||
cfgMu.RUnlock()
|
||||
|
||||
if isLegacy {
|
||||
// Legacy mode ignores modern environment checks,
|
||||
// relying solely on locale.
|
||||
return checkLocale()
|
||||
}
|
||||
|
||||
// Modern Environment Detection
|
||||
// If modern, we assume Single Width (return false)
|
||||
if isModernEnvironment() {
|
||||
return false
|
||||
}
|
||||
|
||||
// 4. Locale Fallback
|
||||
return checkLocale()
|
||||
}
|
||||
|
||||
// checkOverrides looks for RUNEWIDTH_EASTASIAN
|
||||
func checkOverrides() (bool, bool) {
|
||||
if rw := os.Getenv(EnvRuneWidthEastAsian); rw != "" {
|
||||
rw = strings.ToLower(rw)
|
||||
if rw == "0" || rw == "off" || rw == "false" || rw == "no" {
|
||||
return false, true
|
||||
}
|
||||
if rw == "1" || rw == "on" || rw == "true" || rw == "yes" {
|
||||
return true, true
|
||||
}
|
||||
}
|
||||
return false, false
|
||||
}
|
||||
|
||||
// checkLocale performs the string analysis on LANG/LC_ALL
|
||||
func checkLocale() bool {
|
||||
locale := getNormalizedLocale()
|
||||
if locale == "" {
|
||||
return false
|
||||
}
|
||||
return isCJKLocale(locale)
|
||||
}
|
||||
|
||||
// isModernEnvironment performs comprehensive checks for modern terminal capabilities.
|
||||
func isModernEnvironment() bool {
|
||||
// Check TERM_PROGRAM (Most reliable)
|
||||
if termProg := os.Getenv(EnvTermProgram); termProg != "" {
|
||||
termProgLower := strings.ToLower(termProg)
|
||||
if modernEnvironments[termProgLower] {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
// Check WSL specific variable
|
||||
if os.Getenv(EnvTermProgramWsl) != "" {
|
||||
return true
|
||||
}
|
||||
|
||||
// Windows Specifics
|
||||
if runtime.GOOS == "windows" {
|
||||
// Windows Terminal
|
||||
if os.Getenv(EnvWTProfile) != "" {
|
||||
return true
|
||||
}
|
||||
// ConEmu/Cmder
|
||||
if os.Getenv(EnvConEmuANSI) == "ON" {
|
||||
return true
|
||||
}
|
||||
// Modern Windows console (Windows 10+) check via TERM
|
||||
if term := os.Getenv(EnvTerm); term != "" {
|
||||
termLower := strings.ToLower(term)
|
||||
if strings.Contains(termLower, "xterm") ||
|
||||
strings.Contains(termLower, "vt") {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// VTE-based terminals (GNOME Terminal, Tilix, etc.)
|
||||
if os.Getenv(EnvVTEVersion) != "" {
|
||||
return true
|
||||
}
|
||||
|
||||
// Check for Alacritty specifically
|
||||
if os.Getenv(EnvAlacritty) != "" {
|
||||
return true
|
||||
}
|
||||
|
||||
// Check TERM for modern terminal signatures
|
||||
if term := os.Getenv(EnvTerm); term != "" {
|
||||
termLower := strings.ToLower(term)
|
||||
// Specific modern terminals often put their name in TERM
|
||||
if modernEnvironments[termLower] {
|
||||
return true
|
||||
}
|
||||
// Heuristics for standard modern-capable descriptors
|
||||
if strings.Contains(termLower, "xterm") && !strings.Contains(termLower, "xterm-mono") {
|
||||
return true
|
||||
}
|
||||
if strings.Contains(termLower, "screen") ||
|
||||
strings.Contains(termLower, "tmux") {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// isConservativeModernEnvironment performs strict checks only for known modern terminals.
|
||||
func isConservativeModernEnvironment() bool {
|
||||
termProg := strings.ToLower(os.Getenv(EnvTermProgram))
|
||||
|
||||
// Allow-list of definitely modern terminals
|
||||
switch termProg {
|
||||
case "vscode", "visual studio code":
|
||||
return true
|
||||
case "iterm.app", "iterm2":
|
||||
return true
|
||||
case "windows terminal", "windowsterminal":
|
||||
return true
|
||||
case "alacritty", "wezterm", "kitty", "ghostty":
|
||||
return true
|
||||
case "warp", "tabby", "hyper":
|
||||
return true
|
||||
}
|
||||
|
||||
// Windows Terminal via specific Env
|
||||
if os.Getenv(EnvWTProfile) != "" {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// isCJKLocale determines if a given locale string corresponds to a CJK (Chinese, Japanese, Korean) language or region.
|
||||
func isCJKLocale(locale string) bool {
|
||||
// Check Language Prefix
|
||||
for _, prefix := range cjkPrefixes {
|
||||
if strings.HasPrefix(locale, prefix) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
// Check Regions
|
||||
parts := strings.Split(locale, "_")
|
||||
if len(parts) > 1 {
|
||||
for _, part := range parts[1:] {
|
||||
if cjkRegions[part] {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// getNormalizedLocale returns the normalized locale by inspecting environment variables LC_ALL, LC_CTYPE, and LANG.
|
||||
func getNormalizedLocale() string {
|
||||
var locale string
|
||||
if loc := os.Getenv(EnvLCAll); loc != "" {
|
||||
locale = loc
|
||||
} else if loc := os.Getenv(EnvLCCtype); loc != "" {
|
||||
locale = loc
|
||||
} else if loc := os.Getenv(EnvLang); loc != "" {
|
||||
locale = loc
|
||||
}
|
||||
|
||||
// Fast fail for empty or standard C/POSIX locales
|
||||
if locale == "" || locale == "C" || locale == "POSIX" {
|
||||
return ""
|
||||
}
|
||||
|
||||
// Strip encoding and modifiers
|
||||
if idx := strings.IndexByte(locale, '.'); idx != -1 {
|
||||
locale = locale[:idx]
|
||||
}
|
||||
if idx := strings.IndexByte(locale, '@'); idx != -1 {
|
||||
locale = locale[:idx]
|
||||
}
|
||||
|
||||
return strings.ToLower(locale)
|
||||
}
|
||||
229
vendor/github.com/olekukonko/tablewriter/pkg/twwidth/width.go
generated
vendored
229
vendor/github.com/olekukonko/tablewriter/pkg/twwidth/width.go
generated
vendored
@@ -1,4 +1,3 @@
|
||||
// width.go
|
||||
package twwidth
|
||||
|
||||
import (
|
||||
@@ -22,10 +21,6 @@ const (
|
||||
// Options allows for configuring width calculation on a per-call basis.
|
||||
type Options struct {
|
||||
EastAsianWidth bool
|
||||
|
||||
// Explicitly force box drawing chars to be narrow
|
||||
// regardless of EastAsianWidth setting.
|
||||
ForceNarrowBorders bool
|
||||
}
|
||||
|
||||
// globalOptions holds the global displaywidth configuration, including East Asian width settings.
|
||||
@@ -41,25 +36,12 @@ var widthCache *twcache.LRU[string, int]
|
||||
var ansi = Filter()
|
||||
|
||||
func init() {
|
||||
isEastAsian := EastAsianDetect()
|
||||
|
||||
// Initialize global options by detecting from the environment,
|
||||
// which is the one key feature we get from go-runewidth.
|
||||
cond := runewidth.NewCondition()
|
||||
cond.EastAsianWidth = isEastAsian
|
||||
|
||||
globalOptions = Options{
|
||||
EastAsianWidth: isEastAsian,
|
||||
|
||||
// Auto-enable ForceNarrowBorders for edge cases.
|
||||
// If EastAsianWidth is ON (e.g. forced via Env Var), but we detect
|
||||
// a modern environment, we might technically want to narrow borders
|
||||
// while keeping text wide.
|
||||
//
|
||||
// Note: In the standard EastAsian logic, isEastAsian will
|
||||
// ALREADY be false for modern environments, so this boolean implies
|
||||
// a specific "Forced On" scenario.
|
||||
ForceNarrowBorders: isEastAsian && isModernEnvironment(),
|
||||
EastAsianWidth: cond.EastAsianWidth,
|
||||
}
|
||||
|
||||
widthCache = twcache.NewLRU[string, int](cacheCapacity)
|
||||
}
|
||||
|
||||
@@ -73,14 +55,6 @@ func makeCacheKey(str string, eastAsianWidth bool) string {
|
||||
return cachePrefix + str
|
||||
}
|
||||
|
||||
// Display calculates the visual width of a string using a specific runewidth.Condition.
|
||||
// Deprecated: use WidthWithOptions with the new twwidth.Options struct instead.
|
||||
// This function is kept for backward compatibility.
|
||||
func Display(cond *runewidth.Condition, str string) int {
|
||||
opts := Options{EastAsianWidth: cond.EastAsianWidth}
|
||||
return WidthWithOptions(str, opts)
|
||||
}
|
||||
|
||||
// Filter compiles and returns a regular expression for matching ANSI escape sequences,
|
||||
// including CSI (Control Sequence Introducer) and OSC (Operating System Command) sequences.
|
||||
// The returned regex can be used to strip ANSI codes from strings.
|
||||
@@ -99,15 +73,25 @@ func Filter() *regexp.Regexp {
|
||||
return regexp.MustCompile("(" + regCSI + "|" + regOSC + ")")
|
||||
}
|
||||
|
||||
// GetCacheStats returns current cache statistics
|
||||
func GetCacheStats() (size, capacity int, hitRate float64) {
|
||||
// SetOptions sets the global options for width calculation.
|
||||
// This function is thread-safe.
|
||||
func SetOptions(opts Options) {
|
||||
mu.Lock()
|
||||
defer mu.Unlock()
|
||||
|
||||
if widthCache == nil {
|
||||
return 0, 0, 0
|
||||
if globalOptions.EastAsianWidth != opts.EastAsianWidth {
|
||||
globalOptions = opts
|
||||
widthCache.Purge()
|
||||
}
|
||||
return widthCache.Len(), widthCache.Cap(), widthCache.HitRate()
|
||||
}
|
||||
|
||||
// SetEastAsian enables or disables East Asian width handling globally.
|
||||
// This function is thread-safe.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// twdw.SetEastAsian(true) // Enable East Asian width handling
|
||||
func SetEastAsian(enable bool) {
|
||||
SetOptions(Options{EastAsianWidth: enable})
|
||||
}
|
||||
|
||||
// IsEastAsian returns the current East Asian width setting.
|
||||
@@ -124,22 +108,6 @@ func IsEastAsian() bool {
|
||||
return globalOptions.EastAsianWidth
|
||||
}
|
||||
|
||||
// SetCacheCapacity changes the cache size dynamically
|
||||
// If capacity <= 0, disables caching entirely
|
||||
func SetCacheCapacity(capacity int) {
|
||||
mu.Lock()
|
||||
defer mu.Unlock()
|
||||
|
||||
if capacity <= 0 {
|
||||
widthCache = nil // nil = fully disabled
|
||||
return
|
||||
}
|
||||
|
||||
newCache := twcache.NewLRU[string, int](capacity)
|
||||
widthCache = newCache
|
||||
}
|
||||
|
||||
// SetCondition sets the global East Asian width setting based on a runewidth.Condition.
|
||||
// Deprecated: use SetOptions with the new twwidth.Options struct instead.
|
||||
// This function is kept for backward compatibility.
|
||||
func SetCondition(cond *runewidth.Condition) {
|
||||
@@ -152,33 +120,55 @@ func SetCondition(cond *runewidth.Condition) {
|
||||
}
|
||||
}
|
||||
|
||||
// SetEastAsian enables or disables East Asian width handling globally.
|
||||
// Width calculates the visual width of a string using the global cache for performance.
|
||||
// It excludes ANSI escape sequences and accounts for the global East Asian width setting.
|
||||
// This function is thread-safe.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// twdw.SetEastAsian(true) // Enable East Asian width handling
|
||||
func SetEastAsian(enable bool) {
|
||||
SetOptions(Options{EastAsianWidth: enable})
|
||||
}
|
||||
// width := twdw.Width("Hello\x1b[31mWorld") // Returns 10
|
||||
func Width(str string) int {
|
||||
currentEA := IsEastAsian()
|
||||
key := makeCacheKey(str, currentEA)
|
||||
|
||||
// SetForceNarrow to preserve the new flag, or create a new setter
|
||||
func SetForceNarrow(enable bool) {
|
||||
mu.Lock()
|
||||
defer mu.Unlock()
|
||||
globalOptions.ForceNarrowBorders = enable
|
||||
widthCache.Purge() // Clear cache because widths might change
|
||||
}
|
||||
|
||||
// SetOptions sets the global options for width calculation.
|
||||
// This function is thread-safe.
|
||||
func SetOptions(opts Options) {
|
||||
mu.Lock()
|
||||
defer mu.Unlock()
|
||||
if globalOptions.EastAsianWidth != opts.EastAsianWidth || globalOptions.ForceNarrowBorders != opts.ForceNarrowBorders {
|
||||
globalOptions = opts
|
||||
widthCache.Purge()
|
||||
if w, found := widthCache.Get(key); found {
|
||||
return w
|
||||
}
|
||||
|
||||
opts := displaywidth.Options{EastAsianWidth: currentEA}
|
||||
stripped := ansi.ReplaceAllLiteralString(str, "")
|
||||
calculatedWidth := opts.String(stripped)
|
||||
|
||||
widthCache.Add(key, calculatedWidth)
|
||||
return calculatedWidth
|
||||
}
|
||||
|
||||
// WidthWithOptions calculates the visual width of a string with specific options,
|
||||
// bypassing the global settings and cache. This is useful for one-shot calculations
|
||||
// where global state is not desired.
|
||||
func WidthWithOptions(str string, opts Options) int {
|
||||
dwOpts := displaywidth.Options{EastAsianWidth: opts.EastAsianWidth}
|
||||
stripped := ansi.ReplaceAllLiteralString(str, "")
|
||||
return dwOpts.String(stripped)
|
||||
}
|
||||
|
||||
// WidthNoCache calculates the visual width of a string without using the global cache.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// width := twdw.WidthNoCache("Hello\x1b[31mWorld") // Returns 10
|
||||
func WidthNoCache(str string) int {
|
||||
// This function's behavior is equivalent to a one-shot calculation
|
||||
// using the current global options. The WidthWithOptions function
|
||||
// does not interact with the cache, thus fulfilling the requirement.
|
||||
return WidthWithOptions(str, Options{EastAsianWidth: IsEastAsian()})
|
||||
}
|
||||
|
||||
// Deprecated: use WidthWithOptions with the new twwidth.Options struct instead.
|
||||
// This function is kept for backward compatibility.
|
||||
func Display(cond *runewidth.Condition, str string) int {
|
||||
opts := Options{EastAsianWidth: cond.EastAsianWidth}
|
||||
return WidthWithOptions(str, opts)
|
||||
}
|
||||
|
||||
// Truncate shortens a string to fit within a specified visual width, optionally
|
||||
@@ -245,13 +235,11 @@ func Truncate(s string, maxWidth int, suffix ...string) string {
|
||||
|
||||
// Case 4: String needs truncation (sDisplayWidth > maxWidth).
|
||||
// maxWidth is the total budget for the final string (content + suffix).
|
||||
mu.Lock()
|
||||
currentOpts := globalOptions
|
||||
mu.Unlock()
|
||||
currentGlobalEastAsianWidth := IsEastAsian()
|
||||
|
||||
// Special case for EastAsianDetect true: if only suffix fits, return suffix.
|
||||
// Special case for EastAsian true: if only suffix fits, return suffix.
|
||||
// This was derived from previous test behavior.
|
||||
if len(suffixStr) > 0 && currentOpts.EastAsianWidth {
|
||||
if len(suffixStr) > 0 && currentGlobalEastAsianWidth {
|
||||
provisionalContentWidth := maxWidth - suffixDisplayWidth
|
||||
if provisionalContentWidth == 0 { // Exactly enough space for suffix only
|
||||
return suffixStr
|
||||
@@ -283,6 +271,8 @@ func Truncate(s string, maxWidth int, suffix ...string) string {
|
||||
inAnsiSequence := false
|
||||
ansiWrittenToContent := false
|
||||
|
||||
dwOpts := displaywidth.Options{EastAsianWidth: currentGlobalEastAsianWidth}
|
||||
|
||||
for _, r := range s {
|
||||
if r == '\x1b' {
|
||||
inAnsiSequence = true
|
||||
@@ -315,7 +305,7 @@ func Truncate(s string, maxWidth int, suffix ...string) string {
|
||||
ansiSeqBuf.Reset()
|
||||
}
|
||||
} else { // Normal character
|
||||
runeDisplayWidth := calculateRunewidth(r, currentOpts)
|
||||
runeDisplayWidth := dwOpts.Rune(r)
|
||||
if targetContentForIteration == 0 { // No budget for content at all
|
||||
break
|
||||
}
|
||||
@@ -352,81 +342,28 @@ func Truncate(s string, maxWidth int, suffix ...string) string {
|
||||
return result
|
||||
}
|
||||
|
||||
// Width calculates the visual width of a string using the global cache for performance.
|
||||
// It excludes ANSI escape sequences and accounts for the global East Asian width setting.
|
||||
// This function is thread-safe.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// width := twdw.Width("Hello\x1b[31mWorld") // Returns 10
|
||||
func Width(str string) int {
|
||||
// Fast path ASCII (Optimization)
|
||||
if len(str) == 1 && str[0] < 0x80 {
|
||||
return 1
|
||||
}
|
||||
|
||||
// SetCacheCapacity changes the cache size dynamically
|
||||
// If capacity <= 0, disables caching entirely
|
||||
func SetCacheCapacity(capacity int) {
|
||||
mu.Lock()
|
||||
currentOpts := globalOptions
|
||||
mu.Unlock()
|
||||
defer mu.Unlock()
|
||||
|
||||
key := makeCacheKey(str, currentOpts.EastAsianWidth)
|
||||
|
||||
// Check Cache (Optimization)
|
||||
if w, found := widthCache.Get(key); found {
|
||||
return w
|
||||
if capacity <= 0 {
|
||||
widthCache = nil // nil = fully disabled
|
||||
return
|
||||
}
|
||||
|
||||
stripped := ansi.ReplaceAllLiteralString(str, "")
|
||||
calculatedWidth := 0
|
||||
|
||||
for _, r := range stripped {
|
||||
calculatedWidth += calculateRunewidth(r, currentOpts)
|
||||
}
|
||||
|
||||
// Store in Cache
|
||||
widthCache.Add(key, calculatedWidth)
|
||||
return calculatedWidth
|
||||
newCache := twcache.NewLRU[string, int](capacity)
|
||||
widthCache = newCache
|
||||
}
|
||||
|
||||
// WidthNoCache calculates the visual width of a string without using the global cache.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// width := twdw.WidthNoCache("Hello\x1b[31mWorld") // Returns 10
|
||||
func WidthNoCache(str string) int {
|
||||
// This function's behavior is equivalent to a one-shot calculation
|
||||
// using the current global options. The WidthWithOptions function
|
||||
// does not interact with the cache, thus fulfilling the requirement.
|
||||
// GetCacheStats returns current cache statistics
|
||||
func GetCacheStats() (size, capacity int, hitRate float64) {
|
||||
mu.Lock()
|
||||
opts := globalOptions
|
||||
mu.Unlock()
|
||||
return WidthWithOptions(str, opts)
|
||||
}
|
||||
defer mu.Unlock()
|
||||
|
||||
// WidthWithOptions calculates the visual width of a string with specific options,
|
||||
// bypassing the global settings and cache. This is useful for one-shot calculations
|
||||
// where global state is not desired.
|
||||
func WidthWithOptions(str string, opts Options) int {
|
||||
stripped := ansi.ReplaceAllLiteralString(str, "")
|
||||
calculatedWidth := 0
|
||||
for _, r := range stripped {
|
||||
calculatedWidth += calculateRunewidth(r, opts)
|
||||
if widthCache == nil {
|
||||
return 0, 0, 0
|
||||
}
|
||||
return calculatedWidth
|
||||
}
|
||||
|
||||
// calculateRunewidth calculates the width of a single rune based on the provided options.
|
||||
// It applies narrow overrides for box drawing characters if configured.
|
||||
func calculateRunewidth(r rune, opts Options) int {
|
||||
if opts.ForceNarrowBorders && isBoxDrawingChar(r) {
|
||||
return 1
|
||||
}
|
||||
|
||||
dwOpts := displaywidth.Options{EastAsianWidth: opts.EastAsianWidth}
|
||||
return dwOpts.Rune(r)
|
||||
}
|
||||
|
||||
// isBoxDrawingChar checks if a rune is within the Unicode Box Drawing range.
|
||||
func isBoxDrawingChar(r rune) bool {
|
||||
return r >= 0x2500 && r <= 0x257F
|
||||
return widthCache.Len(), widthCache.Cap(), widthCache.HitRate()
|
||||
}
|
||||
|
||||
23
vendor/github.com/olekukonko/tablewriter/renderer/colorized.go
generated
vendored
23
vendor/github.com/olekukonko/tablewriter/renderer/colorized.go
generated
vendored
@@ -4,6 +4,7 @@ import (
|
||||
"io"
|
||||
"strings"
|
||||
|
||||
"github.com/fatih/color"
|
||||
"github.com/olekukonko/ll"
|
||||
"github.com/olekukonko/ll/lh"
|
||||
"github.com/olekukonko/tablewriter/pkg/twwidth"
|
||||
@@ -23,6 +24,28 @@ type ColorizedConfig struct {
|
||||
Symbols tw.Symbols // Symbols for table drawing (e.g., corners, lines)
|
||||
}
|
||||
|
||||
// Colors is a slice of color attributes for use with fatih/color, such as color.FgWhite or color.Bold.
|
||||
type Colors []color.Attribute
|
||||
|
||||
// Tint defines foreground and background color settings for table elements, with optional per-column overrides.
|
||||
type Tint struct {
|
||||
FG Colors // Foreground color attributes
|
||||
BG Colors // Background color attributes
|
||||
Columns []Tint // Per-column color settings
|
||||
}
|
||||
|
||||
// Apply applies the Tint's foreground and background colors to the given text, returning the text unchanged if no colors are set.
|
||||
func (t Tint) Apply(text string) string {
|
||||
if len(t.FG) == 0 && len(t.BG) == 0 {
|
||||
return text
|
||||
}
|
||||
// Combine foreground and background colors
|
||||
combinedColors := append(t.FG, t.BG...)
|
||||
// Create a color function and apply it to the text
|
||||
c := color.New(combinedColors...).SprintFunc()
|
||||
return c(text)
|
||||
}
|
||||
|
||||
// Colorized renders colored ASCII tables with customizable borders, colors, and alignments.
|
||||
type Colorized struct {
|
||||
config ColorizedConfig // Renderer configuration
|
||||
|
||||
25
vendor/github.com/olekukonko/tablewriter/renderer/tint.go
generated
vendored
25
vendor/github.com/olekukonko/tablewriter/renderer/tint.go
generated
vendored
@@ -1,25 +0,0 @@
|
||||
package renderer
|
||||
|
||||
import "github.com/fatih/color"
|
||||
|
||||
// Colors is a slice of color attributes for use with fatih/color, such as color.FgWhite or color.Bold.
|
||||
type Colors []color.Attribute
|
||||
|
||||
// Tint defines foreground and background color settings for table elements, with optional per-column overrides.
|
||||
type Tint struct {
|
||||
FG Colors // Foreground color attributes
|
||||
BG Colors // Background color attributes
|
||||
Columns []Tint // Per-column color settings
|
||||
}
|
||||
|
||||
// Apply applies the Tint's foreground and background colors to the given text, returning the text unchanged if no colors are set.
|
||||
func (t Tint) Apply(text string) string {
|
||||
if len(t.FG) == 0 && len(t.BG) == 0 {
|
||||
return text
|
||||
}
|
||||
// Combine foreground and background colors
|
||||
combinedColors := append(t.FG, t.BG...)
|
||||
// Create a color function and apply it to the text
|
||||
c := color.New(combinedColors...).SprintFunc()
|
||||
return c(text)
|
||||
}
|
||||
25
vendor/github.com/olekukonko/tablewriter/tablewriter.go
generated
vendored
25
vendor/github.com/olekukonko/tablewriter/tablewriter.go
generated
vendored
@@ -4,6 +4,7 @@ import (
|
||||
"bytes"
|
||||
"io"
|
||||
"math"
|
||||
"os"
|
||||
"reflect"
|
||||
"runtime"
|
||||
"strings"
|
||||
@@ -418,6 +419,7 @@ func (t *Table) Options(opts ...Option) *Table {
|
||||
}
|
||||
|
||||
// force debugging mode if set
|
||||
// This should be move away form WithDebug
|
||||
if t.config.Debug {
|
||||
t.logger.Enable()
|
||||
t.logger.Resume()
|
||||
@@ -432,28 +434,11 @@ func (t *Table) Options(opts ...Option) *Table {
|
||||
goArch := runtime.GOARCH
|
||||
numCPU := runtime.NumCPU()
|
||||
|
||||
// Use the new struct-based info.
|
||||
// No type assertions or magic strings needed.
|
||||
info := twwidth.Debugging()
|
||||
|
||||
t.logger.Infof("Go Runtime: Version=%s, OS=%s, Arch=%s, CPUs=%d",
|
||||
goVersion, goOS, goArch, numCPU)
|
||||
|
||||
t.logger.Infof("Environment: LC_CTYPE=%s, LANG=%s, TERM=%s, TERM_PROGRAM=%s",
|
||||
info.Raw.LC_CTYPE,
|
||||
info.Raw.LANG,
|
||||
info.Raw.TERM,
|
||||
info.Raw.TERM_PROGRAM,
|
||||
)
|
||||
|
||||
t.logger.Infof("East Asian Detection: Auto=%v, Mode=%s, ModernEnv=%v, CJKLocale=%v",
|
||||
info.AutoUseEastAsian,
|
||||
info.DetectionMode,
|
||||
info.Derived.IsModernEnv,
|
||||
info.Derived.IsCJKLocale,
|
||||
)
|
||||
t.logger.Infof("Environment: LC_CTYPE=%s, LANG=%s, TERM=%s", os.Getenv("LC_CTYPE"), os.Getenv("LANG"), os.Getenv("TERM"))
|
||||
t.logger.Infof("Go Runtime: Version=%s, OS=%s, Arch=%s, CPUs=%d", goVersion, goOS, goArch, numCPU)
|
||||
|
||||
// send logger to renderer
|
||||
// this will overwrite the default logger
|
||||
t.renderer.Logger(t.logger)
|
||||
return t
|
||||
}
|
||||
|
||||
22
vendor/github.com/olekukonko/tablewriter/zoo.go
generated
vendored
22
vendor/github.com/olekukonko/tablewriter/zoo.go
generated
vendored
@@ -991,7 +991,7 @@ func (t *Table) calculateContentMaxWidth(colIdx int, config tw.CellConfig, padLe
|
||||
constraintTotalCellWidth := 0
|
||||
hasConstraint := false
|
||||
|
||||
// Check new Widths.PerColumn (highest priority)
|
||||
// 1. Check new Widths.PerColumn (highest priority)
|
||||
if t.config.Widths.Constrained() {
|
||||
|
||||
if colWidth, ok := t.config.Widths.PerColumn.OK(colIdx); ok && colWidth > 0 {
|
||||
@@ -1001,7 +1001,7 @@ func (t *Table) calculateContentMaxWidth(colIdx int, config tw.CellConfig, padLe
|
||||
colIdx, constraintTotalCellWidth)
|
||||
}
|
||||
|
||||
// Check new Widths.Global
|
||||
// 2. Check new Widths.Global
|
||||
if !hasConstraint && t.config.Widths.Global > 0 {
|
||||
constraintTotalCellWidth = t.config.Widths.Global
|
||||
hasConstraint = true
|
||||
@@ -1009,7 +1009,7 @@ func (t *Table) calculateContentMaxWidth(colIdx int, config tw.CellConfig, padLe
|
||||
}
|
||||
}
|
||||
|
||||
// Fall back to legacy ColMaxWidths.PerColumn (backward compatibility)
|
||||
// 3. Fall back to legacy ColMaxWidths.PerColumn (backward compatibility)
|
||||
if !hasConstraint && config.ColMaxWidths.PerColumn != nil {
|
||||
if colMax, ok := config.ColMaxWidths.PerColumn.OK(colIdx); ok && colMax > 0 {
|
||||
constraintTotalCellWidth = colMax
|
||||
@@ -1019,7 +1019,7 @@ func (t *Table) calculateContentMaxWidth(colIdx int, config tw.CellConfig, padLe
|
||||
}
|
||||
}
|
||||
|
||||
// Fall back to legacy ColMaxWidths.Global
|
||||
// 4. Fall back to legacy ColMaxWidths.Global
|
||||
if !hasConstraint && config.ColMaxWidths.Global > 0 {
|
||||
constraintTotalCellWidth = config.ColMaxWidths.Global
|
||||
hasConstraint = true
|
||||
@@ -1027,7 +1027,7 @@ func (t *Table) calculateContentMaxWidth(colIdx int, config tw.CellConfig, padLe
|
||||
constraintTotalCellWidth)
|
||||
}
|
||||
|
||||
// Fall back to table MaxWidth if auto-wrapping
|
||||
// 5. Fall back to table MaxWidth if auto-wrapping
|
||||
if !hasConstraint && t.config.MaxWidth > 0 && config.Formatting.AutoWrap != tw.WrapNone {
|
||||
constraintTotalCellWidth = t.config.MaxWidth
|
||||
hasConstraint = true
|
||||
@@ -1217,10 +1217,14 @@ func (t *Table) convertToString(value interface{}) string {
|
||||
// convertItemToCells is responsible for converting a single input item (which could be
|
||||
// a struct, a basic type, or an item implementing Stringer/Formatter) into a slice
|
||||
// of strings, where each string represents a cell for the table row.
|
||||
// zoo.go
|
||||
|
||||
// convertItemToCells is responsible for converting a single input item into a slice of strings.
|
||||
// It now uses the unified struct parser for structs.
|
||||
func (t *Table) convertItemToCells(item interface{}) ([]string, error) {
|
||||
t.logger.Debugf("convertItemToCells: Converting item of type %T", item)
|
||||
|
||||
// User-defined table-wide stringer (t.stringer) takes highest precedence.
|
||||
// 1. User-defined table-wide stringer (t.stringer) takes highest precedence.
|
||||
if t.stringer != nil {
|
||||
res, err := t.convertToStringer(item)
|
||||
if err == nil {
|
||||
@@ -1230,13 +1234,13 @@ func (t *Table) convertItemToCells(item interface{}) ([]string, error) {
|
||||
t.logger.Warnf("convertItemToCells: Custom table stringer was set but incompatible for type %T: %v. Will attempt other methods.", item, err)
|
||||
}
|
||||
|
||||
// Handle untyped nil directly.
|
||||
// 2. Handle untyped nil directly.
|
||||
if item == nil {
|
||||
t.logger.Debugf("convertItemToCells: Item is untyped nil. Returning single empty cell.")
|
||||
return []string{""}, nil
|
||||
}
|
||||
|
||||
// Use the new unified struct parser. It handles pointers and embedding.
|
||||
// 3. Use the new unified struct parser. It handles pointers and embedding.
|
||||
// We only care about the values it returns.
|
||||
_, values := t.extractFieldsAndValuesFromStruct(item)
|
||||
if values != nil {
|
||||
@@ -1244,7 +1248,7 @@ func (t *Table) convertItemToCells(item interface{}) ([]string, error) {
|
||||
return values, nil
|
||||
}
|
||||
|
||||
// Fallback for any other single item (e.g., basic types, or types that implement Stringer/Formatter).
|
||||
// 4. Fallback for any other single item (e.g., basic types, or types that implement Stringer/Formatter).
|
||||
// This code path is now for non-struct types.
|
||||
if formatter, ok := item.(tw.Formatter); ok {
|
||||
t.logger.Debugf("convertItemToCells: Item (non-struct, type %T) is tw.Formatter. Using Format().", item)
|
||||
|
||||
4896
vendor/github.com/open-policy-agent/opa/capabilities/v1.12.0.json
generated
vendored
Normal file
4896
vendor/github.com/open-policy-agent/opa/capabilities/v1.12.0.json
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
4896
vendor/github.com/open-policy-agent/opa/capabilities/v1.12.1.json
generated
vendored
Normal file
4896
vendor/github.com/open-policy-agent/opa/capabilities/v1.12.1.json
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
4896
vendor/github.com/open-policy-agent/opa/capabilities/v1.12.2.json
generated
vendored
Normal file
4896
vendor/github.com/open-policy-agent/opa/capabilities/v1.12.2.json
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
4896
vendor/github.com/open-policy-agent/opa/capabilities/v1.12.3.json
generated
vendored
Normal file
4896
vendor/github.com/open-policy-agent/opa/capabilities/v1.12.3.json
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
12
vendor/github.com/open-policy-agent/opa/internal/compiler/wasm/opa/callgraph.csv
generated
vendored
12
vendor/github.com/open-policy-agent/opa/internal/compiler/wasm/opa/callgraph.csv
generated
vendored
@@ -719,6 +719,18 @@ opa_strings_upper,opa_abort
|
||||
opa_strings_upper,opa_unicode_to_upper
|
||||
opa_strings_upper,opa_realloc
|
||||
opa_strings_upper,opa_unicode_encode_utf8
|
||||
to_string,opa_value_type
|
||||
to_string,opa_string_terminated
|
||||
to_string,opa_value_dump
|
||||
to_string,opa_strlen
|
||||
to_string,opa_string_allocated
|
||||
opa_template_string,opa_value_type
|
||||
opa_template_string,opa_array_with_cap
|
||||
opa_template_string,to_string
|
||||
opa_template_string,opa_array_append
|
||||
opa_template_string,opa_malloc
|
||||
opa_template_string,memcpy
|
||||
opa_template_string,opa_string_allocated
|
||||
opa_types_is_number,opa_value_type
|
||||
opa_types_is_number,opa_boolean
|
||||
opa_types_is_string,opa_value_type
|
||||
|
||||
|
BIN
vendor/github.com/open-policy-agent/opa/internal/compiler/wasm/opa/opa.wasm
generated
vendored
BIN
vendor/github.com/open-policy-agent/opa/internal/compiler/wasm/opa/opa.wasm
generated
vendored
Binary file not shown.
1
vendor/github.com/open-policy-agent/opa/internal/compiler/wasm/wasm.go
generated
vendored
1
vendor/github.com/open-policy-agent/opa/internal/compiler/wasm/wasm.go
generated
vendored
@@ -162,6 +162,7 @@ var builtinsFunctions = map[string]string{
|
||||
ast.TrimRight.Name: "opa_strings_trim_right",
|
||||
ast.TrimSuffix.Name: "opa_strings_trim_suffix",
|
||||
ast.TrimSpace.Name: "opa_strings_trim_space",
|
||||
ast.InternalTemplateString.Name: "opa_template_string",
|
||||
ast.NumbersRange.Name: "opa_numbers_range",
|
||||
ast.ToNumber.Name: "opa_to_number",
|
||||
ast.WalkBuiltin.Name: "opa_value_transitive_closure",
|
||||
|
||||
2
vendor/github.com/open-policy-agent/opa/internal/planner/planner.go
generated
vendored
2
vendor/github.com/open-policy-agent/opa/internal/planner/planner.go
generated
vendored
@@ -1768,7 +1768,7 @@ func (p *Planner) planRef(ref ast.Ref, iter planiter) error {
|
||||
return errors.New("illegal ref: non-var head")
|
||||
}
|
||||
|
||||
if head.Compare(ast.DefaultRootDocument.Value) == 0 {
|
||||
if head.Equal(ast.DefaultRootDocument.Value) {
|
||||
virtual := p.rules.Get(ref[0].Value)
|
||||
base := &baseptr{local: p.vars.GetOrEmpty(ast.DefaultRootDocument.Value.(ast.Var))}
|
||||
return p.planRefData(virtual, base, ref, 1, iter)
|
||||
|
||||
2
vendor/github.com/open-policy-agent/opa/internal/wasm/encoding/reader.go
generated
vendored
2
vendor/github.com/open-policy-agent/opa/internal/wasm/encoding/reader.go
generated
vendored
@@ -83,7 +83,7 @@ func readModule(r io.Reader) (*module.Module, error) {
|
||||
|
||||
var m module.Module
|
||||
|
||||
if err := readSections(r, &m); err != nil && err != io.EOF {
|
||||
if err := readSections(r, &m); err != io.EOF {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
||||
13
vendor/github.com/open-policy-agent/opa/v1/ast/annotations.go
generated
vendored
13
vendor/github.com/open-policy-agent/opa/v1/ast/annotations.go
generated
vendored
@@ -433,18 +433,7 @@ func (a *Annotations) toObject() (*Object, *Error) {
|
||||
}
|
||||
|
||||
if len(a.Scope) > 0 {
|
||||
switch a.Scope {
|
||||
case annotationScopeDocument:
|
||||
obj.Insert(InternedTerm("scope"), InternedTerm("document"))
|
||||
case annotationScopePackage:
|
||||
obj.Insert(InternedTerm("scope"), InternedTerm("package"))
|
||||
case annotationScopeRule:
|
||||
obj.Insert(InternedTerm("scope"), InternedTerm("rule"))
|
||||
case annotationScopeSubpackages:
|
||||
obj.Insert(InternedTerm("scope"), InternedTerm("subpackages"))
|
||||
default:
|
||||
obj.Insert(InternedTerm("scope"), StringTerm(a.Scope))
|
||||
}
|
||||
obj.Insert(InternedTerm("scope"), InternedTerm(a.Scope))
|
||||
}
|
||||
|
||||
if len(a.Title) > 0 {
|
||||
|
||||
12
vendor/github.com/open-policy-agent/opa/v1/ast/builtins.go
generated
vendored
12
vendor/github.com/open-policy-agent/opa/v1/ast/builtins.go
generated
vendored
@@ -151,6 +151,7 @@ var DefaultBuiltins = [...]*Builtin{
|
||||
Sprintf,
|
||||
StringReverse,
|
||||
RenderTemplate,
|
||||
InternalTemplateString,
|
||||
|
||||
// Numbers
|
||||
NumbersRange,
|
||||
@@ -1109,7 +1110,7 @@ var Concat = &Builtin{
|
||||
types.Named("output", types.S).Description("the joined string"),
|
||||
),
|
||||
Categories: stringsCat,
|
||||
CanSkipBctx: true,
|
||||
CanSkipBctx: false,
|
||||
}
|
||||
|
||||
var FormatInt = &Builtin{
|
||||
@@ -1277,7 +1278,7 @@ var Replace = &Builtin{
|
||||
types.Named("y", types.S).Description("string with replaced substrings"),
|
||||
),
|
||||
Categories: stringsCat,
|
||||
CanSkipBctx: true,
|
||||
CanSkipBctx: false,
|
||||
}
|
||||
|
||||
var ReplaceN = &Builtin{
|
||||
@@ -1297,7 +1298,7 @@ The old string comparisons are done in argument order.`,
|
||||
),
|
||||
types.Named("output", types.S).Description("string with replaced substrings"),
|
||||
),
|
||||
CanSkipBctx: true,
|
||||
CanSkipBctx: false,
|
||||
}
|
||||
|
||||
var RegexReplace = &Builtin{
|
||||
@@ -3388,6 +3389,11 @@ var InternalTestCase = &Builtin{
|
||||
Decl: types.NewFunction([]types.Type{types.NewArray(nil, types.A)}, nil),
|
||||
}
|
||||
|
||||
var InternalTemplateString = &Builtin{
|
||||
Name: "internal.template_string",
|
||||
Decl: types.NewFunction([]types.Type{types.NewArray(nil, types.A)}, types.S),
|
||||
}
|
||||
|
||||
/**
|
||||
* Deprecated built-ins.
|
||||
*/
|
||||
|
||||
8
vendor/github.com/open-policy-agent/opa/v1/ast/capabilities.go
generated
vendored
8
vendor/github.com/open-policy-agent/opa/v1/ast/capabilities.go
generated
vendored
@@ -58,12 +58,14 @@ const FeatureRefHeads = "rule_head_refs"
|
||||
const FeatureRegoV1 = "rego_v1"
|
||||
const FeatureRegoV1Import = "rego_v1_import"
|
||||
const FeatureKeywordsInRefs = "keywords_in_refs"
|
||||
const FeatureTemplateStrings = "template_strings"
|
||||
|
||||
// Features carries the default features supported by this version of OPA.
|
||||
// Use RegisterFeatures to add to them.
|
||||
var Features = []string{
|
||||
FeatureRegoV1,
|
||||
FeatureKeywordsInRefs,
|
||||
FeatureTemplateStrings,
|
||||
}
|
||||
|
||||
// RegisterFeatures lets applications wrapping OPA register features, to be
|
||||
@@ -269,6 +271,12 @@ func (c *Capabilities) ContainsFeature(feature string) bool {
|
||||
return slices.Contains(c.Features, feature)
|
||||
}
|
||||
|
||||
func (c *Capabilities) ContainsBuiltin(name string) bool {
|
||||
return slices.ContainsFunc(c.Builtins, func(builtin *Builtin) bool {
|
||||
return builtin.Name == name
|
||||
})
|
||||
}
|
||||
|
||||
// addBuiltinSorted inserts a built-in into c in sorted order. An existing built-in with the same name
|
||||
// will be overwritten.
|
||||
func (c *Capabilities) addBuiltinSorted(bi *Builtin) {
|
||||
|
||||
137
vendor/github.com/open-policy-agent/opa/v1/ast/check.go
generated
vendored
137
vendor/github.com/open-policy-agent/opa/v1/ast/check.go
generated
vendored
@@ -7,7 +7,6 @@ package ast
|
||||
import (
|
||||
"fmt"
|
||||
"slices"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/open-policy-agent/opa/v1/types"
|
||||
@@ -16,11 +15,6 @@ import (
|
||||
|
||||
type varRewriter func(Ref) Ref
|
||||
|
||||
// exprChecker defines the interface for executing type checking on a single
|
||||
// expression. The exprChecker must update the provided TypeEnv with inferred
|
||||
// types of vars.
|
||||
type exprChecker func(*TypeEnv, *Expr) *Error
|
||||
|
||||
// typeChecker implements type checking on queries and rules. Errors are
|
||||
// accumulated on the typeChecker so that a single run can report multiple
|
||||
// issues.
|
||||
@@ -28,7 +22,6 @@ type typeChecker struct {
|
||||
builtins map[string]*Builtin
|
||||
required *Capabilities
|
||||
errs Errors
|
||||
exprCheckers map[string]exprChecker
|
||||
varRewriter varRewriter
|
||||
ss *SchemaSet
|
||||
allowNet []string
|
||||
@@ -39,11 +32,7 @@ type typeChecker struct {
|
||||
|
||||
// newTypeChecker returns a new typeChecker object that has no errors.
|
||||
func newTypeChecker() *typeChecker {
|
||||
return &typeChecker{
|
||||
exprCheckers: map[string]exprChecker{
|
||||
"eq": checkExprEq,
|
||||
},
|
||||
}
|
||||
return &typeChecker{}
|
||||
}
|
||||
|
||||
func (tc *typeChecker) newEnv(exist *TypeEnv) *TypeEnv {
|
||||
@@ -126,43 +115,39 @@ func (tc *typeChecker) Env(builtins map[string]*Builtin) *TypeEnv {
|
||||
// are found. The resulting TypeEnv wraps the provided one. The resulting
|
||||
// TypeEnv will be able to resolve types of vars contained in the body.
|
||||
func (tc *typeChecker) CheckBody(env *TypeEnv, body Body) (*TypeEnv, Errors) {
|
||||
var errors []*Error
|
||||
|
||||
errors := []*Error{}
|
||||
env = tc.newEnv(env)
|
||||
vis := newRefChecker(env, tc.varRewriter)
|
||||
gv := NewGenericVisitor(vis.Visit)
|
||||
|
||||
WalkExprs(body, func(expr *Expr) bool {
|
||||
for _, bexpr := range body {
|
||||
WalkExprs(bexpr, func(expr *Expr) bool {
|
||||
closureErrs := tc.checkClosures(env, expr)
|
||||
errors = append(errors, closureErrs...)
|
||||
|
||||
closureErrs := tc.checkClosures(env, expr)
|
||||
for _, err := range closureErrs {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
// reset errors from previous iteration
|
||||
vis.errs = nil
|
||||
gv.Walk(expr)
|
||||
errors = append(errors, vis.errs...)
|
||||
|
||||
hasClosureErrors := len(closureErrs) > 0
|
||||
|
||||
// reset errors from previous iteration
|
||||
vis.errs = nil
|
||||
NewGenericVisitor(vis.Visit).Walk(expr)
|
||||
for _, err := range vis.errs {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
|
||||
hasRefErrors := len(vis.errs) > 0
|
||||
|
||||
if err := tc.checkExpr(env, expr); err != nil {
|
||||
// Suppress this error if a more actionable one has occurred. In
|
||||
// this case, if an error occurred in a ref or closure contained in
|
||||
// this expression, and the error is due to a nil type, then it's
|
||||
// likely to be the result of the more specific error.
|
||||
skip := (hasClosureErrors || hasRefErrors) && causedByNilType(err)
|
||||
if !skip {
|
||||
errors = append(errors, err)
|
||||
if err := tc.checkExpr(env, expr); err != nil {
|
||||
hasClosureErrors := len(closureErrs) > 0
|
||||
hasRefErrors := len(vis.errs) > 0
|
||||
// Suppress this error if a more actionable one has occurred. In
|
||||
// this case, if an error occurred in a ref or closure contained in
|
||||
// this expression, and the error is due to a nil type, then it's
|
||||
// likely to be the result of the more specific error.
|
||||
skip := (hasClosureErrors || hasRefErrors) && causedByNilType(err)
|
||||
if !skip {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
return true
|
||||
})
|
||||
}
|
||||
|
||||
tc.err(errors)
|
||||
tc.err(errors...)
|
||||
return env, errors
|
||||
}
|
||||
|
||||
@@ -243,7 +228,7 @@ func (tc *typeChecker) checkRule(env *TypeEnv, as *AnnotationSet, rule *Rule) {
|
||||
for _, schemaAnnot := range schemaAnnots {
|
||||
refType, err := tc.getSchemaType(schemaAnnot, rule)
|
||||
if err != nil {
|
||||
tc.err([]*Error{err})
|
||||
tc.err(err)
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -259,7 +244,7 @@ func (tc *typeChecker) checkRule(env *TypeEnv, as *AnnotationSet, rule *Rule) {
|
||||
} else {
|
||||
newType, err := override(ref[len(prefixRef):], t, refType, rule)
|
||||
if err != nil {
|
||||
tc.err([]*Error{err})
|
||||
tc.err(err)
|
||||
continue
|
||||
}
|
||||
env.tree.Put(prefixRef, newType)
|
||||
@@ -281,23 +266,25 @@ func (tc *typeChecker) checkRule(env *TypeEnv, as *AnnotationSet, rule *Rule) {
|
||||
var tpe types.Type
|
||||
|
||||
if len(rule.Head.Args) > 0 {
|
||||
// If args are not referred to in body, infer as any.
|
||||
WalkVars(rule.Head.Args, func(v Var) bool {
|
||||
if cpy.GetByValue(v) == nil {
|
||||
cpy.tree.PutOne(v, types.A)
|
||||
}
|
||||
return false
|
||||
})
|
||||
for _, arg := range rule.Head.Args {
|
||||
// If args are not referred to in body, infer as any.
|
||||
WalkTerms(arg, func(t *Term) bool {
|
||||
if _, ok := t.Value.(Var); ok {
|
||||
if cpy.GetByValue(t.Value) == nil {
|
||||
cpy.tree.PutOne(t.Value, types.A)
|
||||
}
|
||||
}
|
||||
return false
|
||||
})
|
||||
}
|
||||
|
||||
// Construct function type.
|
||||
args := make([]types.Type, len(rule.Head.Args))
|
||||
for i := range len(rule.Head.Args) {
|
||||
for i := range rule.Head.Args {
|
||||
args[i] = cpy.GetByValue(rule.Head.Args[i].Value)
|
||||
}
|
||||
|
||||
f := types.NewFunction(args, cpy.Get(rule.Head.Value))
|
||||
|
||||
tpe = f
|
||||
tpe = types.NewFunction(args, cpy.GetByValue(rule.Head.Value.Value))
|
||||
} else {
|
||||
switch rule.Head.RuleKind() {
|
||||
case SingleValue:
|
||||
@@ -310,7 +297,7 @@ func (tc *typeChecker) checkRule(env *TypeEnv, as *AnnotationSet, rule *Rule) {
|
||||
var err error
|
||||
tpe, err = nestedObject(cpy, objPath, typeV)
|
||||
if err != nil {
|
||||
tc.err([]*Error{NewError(TypeErr, rule.Head.Location, "%s", err.Error())})
|
||||
tc.err(NewError(TypeErr, rule.Head.Location, "%s", err.Error()))
|
||||
tpe = nil
|
||||
}
|
||||
} else if typeV != nil {
|
||||
@@ -374,9 +361,8 @@ func (tc *typeChecker) checkExpr(env *TypeEnv, expr *Expr) *Error {
|
||||
}
|
||||
}
|
||||
|
||||
checker := tc.exprCheckers[operator]
|
||||
if checker != nil {
|
||||
return checker(env, expr)
|
||||
if operator == "eq" {
|
||||
return checkExprEq(env, expr)
|
||||
}
|
||||
|
||||
return tc.checkExprBuiltin(env, expr)
|
||||
@@ -599,7 +585,7 @@ func unify1(env *TypeEnv, term *Term, tpe types.Type, union bool) bool {
|
||||
return unifies
|
||||
}
|
||||
return false
|
||||
case Set:
|
||||
case *set:
|
||||
switch tpe := tpe.(type) {
|
||||
case *types.Set:
|
||||
return unify1Set(env, v, tpe, union)
|
||||
@@ -674,14 +660,14 @@ func unify1Object(env *TypeEnv, val Object, tpe *types.Object, union bool) bool
|
||||
return !stop
|
||||
}
|
||||
|
||||
func unify1Set(env *TypeEnv, val Set, tpe *types.Set, union bool) bool {
|
||||
func unify1Set(env *TypeEnv, val *set, tpe *types.Set, union bool) bool {
|
||||
of := types.Values(tpe)
|
||||
return !val.Until(func(elem *Term) bool {
|
||||
return !unify1(env, elem, of, union)
|
||||
})
|
||||
}
|
||||
|
||||
func (tc *typeChecker) err(errors []*Error) {
|
||||
func (tc *typeChecker) err(errors ...*Error) {
|
||||
tc.errs = append(tc.errs, errors...)
|
||||
}
|
||||
|
||||
@@ -702,7 +688,6 @@ func newRefChecker(env *TypeEnv, f varRewriter) *refChecker {
|
||||
|
||||
return &refChecker{
|
||||
env: env,
|
||||
errs: nil,
|
||||
varRewriter: f,
|
||||
}
|
||||
}
|
||||
@@ -714,8 +699,9 @@ func (rc *refChecker) Visit(x any) bool {
|
||||
case *Expr:
|
||||
switch terms := x.Terms.(type) {
|
||||
case []*Term:
|
||||
vis := NewGenericVisitor(rc.Visit)
|
||||
for i := 1; i < len(terms); i++ {
|
||||
NewGenericVisitor(rc.Visit).Walk(terms[i])
|
||||
vis.Walk(terms[i])
|
||||
}
|
||||
return true
|
||||
case *Term:
|
||||
@@ -805,7 +791,6 @@ func (rc *refChecker) checkRef(curr *TypeEnv, node *typeTreeNode, ref Ref, idx i
|
||||
}
|
||||
|
||||
func (rc *refChecker) checkRefLeaf(tpe types.Type, ref Ref, idx int) *Error {
|
||||
|
||||
if idx == len(ref) {
|
||||
return nil
|
||||
}
|
||||
@@ -820,16 +805,16 @@ func (rc *refChecker) checkRefLeaf(tpe types.Type, ref Ref, idx int) *Error {
|
||||
switch value := head.Value.(type) {
|
||||
|
||||
case Var:
|
||||
if exist := rc.env.GetByValue(value); exist != nil {
|
||||
if exist := rc.env.GetByValue(head.Value); exist != nil {
|
||||
if !unifies(exist, keys) {
|
||||
return newRefErrInvalid(ref[0].Location, rc.varRewriter(ref), idx, exist, keys, getOneOfForType(tpe))
|
||||
}
|
||||
} else {
|
||||
rc.env.tree.PutOne(value, types.Keys(tpe))
|
||||
rc.env.tree.PutOne(head.Value, types.Keys(tpe))
|
||||
}
|
||||
|
||||
case Ref:
|
||||
if exist := rc.env.Get(value); exist != nil {
|
||||
if exist := rc.env.GetByRef(value); exist != nil {
|
||||
if !unifies(exist, keys) {
|
||||
return newRefErrInvalid(ref[0].Location, rc.varRewriter(ref), idx, exist, keys, getOneOfForType(tpe))
|
||||
}
|
||||
@@ -1130,7 +1115,7 @@ func getOneOfForNode(node *typeTreeNode) (result []Value) {
|
||||
return false
|
||||
})
|
||||
|
||||
sortValueSlice(result)
|
||||
slices.SortFunc(result, Value.Compare)
|
||||
return result
|
||||
}
|
||||
|
||||
@@ -1153,16 +1138,10 @@ func getOneOfForType(tpe types.Type) (result []Value) {
|
||||
}
|
||||
|
||||
result = removeDuplicate(result)
|
||||
sortValueSlice(result)
|
||||
slices.SortFunc(result, Value.Compare)
|
||||
return result
|
||||
}
|
||||
|
||||
func sortValueSlice(sl []Value) {
|
||||
sort.Slice(sl, func(i, j int) bool {
|
||||
return sl[i].Compare(sl[j]) < 0
|
||||
})
|
||||
}
|
||||
|
||||
func removeDuplicate(list []Value) []Value {
|
||||
seen := make(map[Value]bool)
|
||||
var newResult []Value
|
||||
@@ -1186,13 +1165,13 @@ func getArgTypes(env *TypeEnv, args []*Term) []types.Type {
|
||||
// getPrefix returns the shortest prefix of ref that exists in env
|
||||
func getPrefix(env *TypeEnv, ref Ref) (Ref, types.Type) {
|
||||
if len(ref) == 1 {
|
||||
t := env.Get(ref)
|
||||
t := env.GetByRef(ref)
|
||||
if t != nil {
|
||||
return ref, t
|
||||
}
|
||||
}
|
||||
for i := 1; i < len(ref); i++ {
|
||||
t := env.Get(ref[:i])
|
||||
t := env.GetByRef(ref[:i])
|
||||
if t != nil {
|
||||
return ref[:i], t
|
||||
}
|
||||
@@ -1200,12 +1179,14 @@ func getPrefix(env *TypeEnv, ref Ref) (Ref, types.Type) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
var dynamicAnyAny = types.NewDynamicProperty(types.A, types.A)
|
||||
|
||||
// override takes a type t and returns a type obtained from t where the path represented by ref within it has type o (overriding the original type of that path)
|
||||
func override(ref Ref, t types.Type, o types.Type, rule *Rule) (types.Type, *Error) {
|
||||
var newStaticProps []*types.StaticProperty
|
||||
obj, ok := t.(*types.Object)
|
||||
if !ok {
|
||||
newType, err := getObjectType(ref, o, rule, types.NewDynamicProperty(types.A, types.A))
|
||||
newType, err := getObjectType(ref, o, rule, dynamicAnyAny)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
35
vendor/github.com/open-policy-agent/opa/v1/ast/compare.go
generated
vendored
35
vendor/github.com/open-policy-agent/opa/v1/ast/compare.go
generated
vendored
@@ -96,6 +96,9 @@ func Compare(a, b any) int {
|
||||
return -1
|
||||
}
|
||||
return 1
|
||||
case *TemplateString:
|
||||
b := b.(*TemplateString)
|
||||
return a.Compare(b)
|
||||
case Var:
|
||||
return VarCompare(a, b.(Var))
|
||||
case Ref:
|
||||
@@ -179,26 +182,28 @@ func sortOrder(x any) int {
|
||||
return 2
|
||||
case String:
|
||||
return 3
|
||||
case Var:
|
||||
case *TemplateString:
|
||||
return 4
|
||||
case Ref:
|
||||
case Var:
|
||||
return 5
|
||||
case *Array:
|
||||
case Ref:
|
||||
return 6
|
||||
case Object:
|
||||
case *Array:
|
||||
return 7
|
||||
case Set:
|
||||
case Object:
|
||||
return 8
|
||||
case *ArrayComprehension:
|
||||
case Set:
|
||||
return 9
|
||||
case *ObjectComprehension:
|
||||
case *ArrayComprehension:
|
||||
return 10
|
||||
case *SetComprehension:
|
||||
case *ObjectComprehension:
|
||||
return 11
|
||||
case Call:
|
||||
case *SetComprehension:
|
||||
return 12
|
||||
case Args:
|
||||
case Call:
|
||||
return 13
|
||||
case Args:
|
||||
return 14
|
||||
case *Expr:
|
||||
return 100
|
||||
case *SomeDecl:
|
||||
@@ -322,14 +327,6 @@ func TermValueEqual(a, b *Term) bool {
|
||||
}
|
||||
|
||||
func ValueEqual(a, b Value) bool {
|
||||
// TODO(ae): why doesn't this work the same?
|
||||
//
|
||||
// case interface{ Equal(Value) bool }:
|
||||
// return v.Equal(b)
|
||||
//
|
||||
// When put on top, golangci-lint even flags the other cases as unreachable..
|
||||
// but TestTopdownVirtualCache will have failing test cases when we replace
|
||||
// the other cases with the above one.. 🤔
|
||||
switch v := a.(type) {
|
||||
case Null:
|
||||
return v.Equal(b)
|
||||
@@ -345,6 +342,8 @@ func ValueEqual(a, b Value) bool {
|
||||
return v.Equal(b)
|
||||
case *Array:
|
||||
return v.Equal(b)
|
||||
case *TemplateString:
|
||||
return v.Equal(b)
|
||||
}
|
||||
|
||||
return a.Compare(b) == 0
|
||||
|
||||
698
vendor/github.com/open-policy-agent/opa/v1/ast/compile.go
generated
vendored
698
vendor/github.com/open-policy-agent/opa/v1/ast/compile.go
generated
vendored
File diff suppressed because it is too large
Load Diff
67
vendor/github.com/open-policy-agent/opa/v1/ast/env.go
generated
vendored
67
vendor/github.com/open-policy-agent/opa/v1/ast/env.go
generated
vendored
@@ -54,15 +54,14 @@ func (env *TypeEnv) GetByValue(v Value) types.Type {
|
||||
return types.B
|
||||
case Number:
|
||||
return types.N
|
||||
case String:
|
||||
case String, *TemplateString:
|
||||
return types.S
|
||||
|
||||
// Composites.
|
||||
case *Array:
|
||||
static := make([]types.Type, x.Len())
|
||||
for i := range static {
|
||||
tpe := env.GetByValue(x.Elem(i).Value)
|
||||
static[i] = tpe
|
||||
static[i] = env.GetByValue(x.Elem(i).Value)
|
||||
}
|
||||
|
||||
var dynamic types.Type
|
||||
@@ -80,17 +79,13 @@ func (env *TypeEnv) GetByValue(v Value) types.Type {
|
||||
|
||||
x.Foreach(func(k, v *Term) {
|
||||
if IsConstant(k.Value) {
|
||||
kjson, err := JSON(k.Value)
|
||||
if err == nil {
|
||||
tpe := env.GetByValue(v.Value)
|
||||
static = append(static, types.NewStaticProperty(kjson, tpe))
|
||||
if kjson, err := JSON(k.Value); err == nil {
|
||||
static = append(static, types.NewStaticProperty(kjson, env.GetByValue(v.Value)))
|
||||
return
|
||||
}
|
||||
}
|
||||
// Can't handle it as a static property, fallback to dynamic
|
||||
typeK := env.GetByValue(k.Value)
|
||||
typeV := env.GetByValue(v.Value)
|
||||
dynamic = types.NewDynamicProperty(typeK, typeV)
|
||||
dynamic = types.NewDynamicProperty(env.GetByValue(k.Value), env.GetByValue(v.Value))
|
||||
})
|
||||
|
||||
if len(static) == 0 && dynamic == nil {
|
||||
@@ -99,7 +94,7 @@ func (env *TypeEnv) GetByValue(v Value) types.Type {
|
||||
|
||||
return types.NewObject(static, dynamic)
|
||||
|
||||
case Set:
|
||||
case *set:
|
||||
var tpe types.Type
|
||||
x.Foreach(func(elem *Term) {
|
||||
tpe = types.Or(tpe, env.GetByValue(elem.Value))
|
||||
@@ -162,7 +157,6 @@ func (env *TypeEnv) GetByRef(ref Ref) types.Type {
|
||||
}
|
||||
|
||||
func (env *TypeEnv) getRefFallback(ref Ref) types.Type {
|
||||
|
||||
if env.next != nil {
|
||||
return env.next.GetByRef(ref)
|
||||
}
|
||||
@@ -299,15 +293,11 @@ func (n *typeTreeNode) PutOne(key Value, tpe types.Type) {
|
||||
func (n *typeTreeNode) Put(path Ref, tpe types.Type) {
|
||||
curr := n
|
||||
for _, term := range path {
|
||||
c, ok := curr.children.Get(term.Value)
|
||||
|
||||
var child *typeTreeNode
|
||||
child, ok := curr.children.Get(term.Value)
|
||||
if !ok {
|
||||
child = newTypeTree()
|
||||
child.key = term.Value
|
||||
curr.children.Put(child.key, child)
|
||||
} else {
|
||||
child = c
|
||||
}
|
||||
|
||||
curr = child
|
||||
@@ -321,23 +311,18 @@ func (n *typeTreeNode) Put(path Ref, tpe types.Type) {
|
||||
func (n *typeTreeNode) Insert(path Ref, tpe types.Type, env *TypeEnv) {
|
||||
curr := n
|
||||
for i, term := range path {
|
||||
c, ok := curr.children.Get(term.Value)
|
||||
|
||||
var child *typeTreeNode
|
||||
child, ok := curr.children.Get(term.Value)
|
||||
if !ok {
|
||||
child = newTypeTree()
|
||||
child.key = term.Value
|
||||
curr.children.Put(child.key, child)
|
||||
} else {
|
||||
child = c
|
||||
if child.value != nil && i+1 < len(path) {
|
||||
// If child has an object value, merge the new value into it.
|
||||
if o, ok := child.value.(*types.Object); ok {
|
||||
var err error
|
||||
child.value, err = insertIntoObject(o, path[i+1:], tpe, env)
|
||||
if err != nil {
|
||||
panic(fmt.Errorf("unreachable, insertIntoObject: %w", err))
|
||||
}
|
||||
} else if child.value != nil && i+1 < len(path) {
|
||||
// If child has an object value, merge the new value into it.
|
||||
if o, ok := child.value.(*types.Object); ok {
|
||||
var err error
|
||||
child.value, err = insertIntoObject(o, path[i+1:], tpe, env)
|
||||
if err != nil {
|
||||
panic(fmt.Errorf("unreachable, insertIntoObject: %w", err))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -349,8 +334,7 @@ func (n *typeTreeNode) Insert(path Ref, tpe types.Type, env *TypeEnv) {
|
||||
|
||||
if _, ok := tpe.(*types.Object); ok && curr.children.Len() > 0 {
|
||||
// merge all leafs into the inserted object
|
||||
leafs := curr.Leafs()
|
||||
for p, t := range leafs {
|
||||
for p, t := range curr.Leafs() {
|
||||
var err error
|
||||
curr.value, err = insertIntoObject(curr.value.(*types.Object), *p, t, env)
|
||||
if err != nil {
|
||||
@@ -388,7 +372,8 @@ func mergeTypes(a, b types.Type) types.Type {
|
||||
bDynProps := bObj.DynamicProperties()
|
||||
dynProps := types.NewDynamicProperty(
|
||||
types.Or(aDynProps.Key, bDynProps.Key),
|
||||
mergeTypes(aDynProps.Value, bDynProps.Value))
|
||||
mergeTypes(aDynProps.Value, bDynProps.Value),
|
||||
)
|
||||
return types.NewObject(nil, dynProps)
|
||||
} else if bAny, ok := b.(types.Any); ok && len(a.StaticProperties()) == 0 {
|
||||
// If a is an object type with no static components ...
|
||||
@@ -417,14 +402,14 @@ func mergeTypes(a, b types.Type) types.Type {
|
||||
}
|
||||
|
||||
func (n *typeTreeNode) String() string {
|
||||
b := strings.Builder{}
|
||||
b := &strings.Builder{}
|
||||
|
||||
key := "-"
|
||||
if k := n.key; k != nil {
|
||||
b.WriteString(k.String())
|
||||
} else {
|
||||
b.WriteString("-")
|
||||
key = k.String()
|
||||
}
|
||||
|
||||
b.WriteString(key)
|
||||
if v := n.value; v != nil {
|
||||
b.WriteString(": ")
|
||||
b.WriteString(v.String())
|
||||
@@ -432,9 +417,7 @@ func (n *typeTreeNode) String() string {
|
||||
|
||||
n.children.Iter(func(_ Value, child *typeTreeNode) bool {
|
||||
b.WriteString("\n\t+ ")
|
||||
s := child.String()
|
||||
s = strings.ReplaceAll(s, "\n", "\n\t")
|
||||
b.WriteString(s)
|
||||
b.WriteString(strings.ReplaceAll(child.String(), "\n", "\n\t"))
|
||||
|
||||
return false
|
||||
})
|
||||
@@ -485,7 +468,8 @@ func (n *typeTreeNode) Leafs() map[*Ref]types.Type {
|
||||
func collectLeafs(n *typeTreeNode, path Ref, leafs map[*Ref]types.Type) {
|
||||
nPath := append(path, NewTerm(n.key))
|
||||
if n.Leaf() {
|
||||
leafs[&nPath] = n.Value()
|
||||
npc := nPath // copy of else nPath escapes to heap even if !n.Leaf()
|
||||
leafs[&npc] = n.Value()
|
||||
return
|
||||
}
|
||||
n.children.Iter(func(_ Value, v *typeTreeNode) bool {
|
||||
@@ -513,7 +497,6 @@ func selectConstant(tpe types.Type, term *Term) types.Type {
|
||||
// contains vars or refs, then the returned type will be a union of the
|
||||
// possible types.
|
||||
func selectRef(tpe types.Type, ref Ref) types.Type {
|
||||
|
||||
if tpe == nil || len(ref) == 0 {
|
||||
return tpe
|
||||
}
|
||||
|
||||
6
vendor/github.com/open-policy-agent/opa/v1/ast/errors.go
generated
vendored
6
vendor/github.com/open-policy-agent/opa/v1/ast/errors.go
generated
vendored
@@ -121,9 +121,13 @@ func (e *Error) Error() string {
|
||||
|
||||
// NewError returns a new Error object.
|
||||
func NewError(code string, loc *Location, f string, a ...any) *Error {
|
||||
return newErrorString(code, loc, fmt.Sprintf(f, a...))
|
||||
}
|
||||
|
||||
func newErrorString(code string, loc *Location, m string) *Error {
|
||||
return &Error{
|
||||
Code: code,
|
||||
Location: loc,
|
||||
Message: fmt.Sprintf(f, a...),
|
||||
Message: m,
|
||||
}
|
||||
}
|
||||
|
||||
2
vendor/github.com/open-policy-agent/opa/v1/ast/index.go
generated
vendored
2
vendor/github.com/open-policy-agent/opa/v1/ast/index.go
generated
vendored
@@ -412,7 +412,7 @@ func (i *refindices) updateGlobMatch(rule *Rule, expr *Expr) {
|
||||
if _, ok := match.Value.(Var); ok {
|
||||
var ref Ref
|
||||
for _, other := range i.rules[rule] {
|
||||
if _, ok := other.Value.(Var); ok && other.Value.Compare(match.Value) == 0 {
|
||||
if ov, ok := other.Value.(Var); ok && ov.Equal(match.Value) {
|
||||
ref = other.Ref
|
||||
}
|
||||
}
|
||||
|
||||
151
vendor/github.com/open-policy-agent/opa/v1/ast/internal/scanner/scanner.go
generated
vendored
151
vendor/github.com/open-policy-agent/opa/v1/ast/internal/scanner/scanner.go
generated
vendored
@@ -158,18 +158,42 @@ func (s *Scanner) WithoutKeywords(kws map[string]tokens.Token) (*Scanner, map[st
|
||||
return &cpy, kw
|
||||
}
|
||||
|
||||
type ScanOptions struct {
|
||||
continueTemplateString bool
|
||||
rawTemplateString bool
|
||||
}
|
||||
|
||||
type ScanOption func(*ScanOptions)
|
||||
|
||||
// ContinueTemplateString will continue scanning a template string
|
||||
func ContinueTemplateString(raw bool) ScanOption {
|
||||
return func(opts *ScanOptions) {
|
||||
opts.continueTemplateString = true
|
||||
opts.rawTemplateString = raw
|
||||
}
|
||||
}
|
||||
|
||||
// Scan will increment the scanners position in the source
|
||||
// code until the next token is found. The token, starting position
|
||||
// of the token, string literal, and any errors encountered are
|
||||
// returned. A token will always be returned, the caller must check
|
||||
// for any errors before using the other values.
|
||||
func (s *Scanner) Scan() (tokens.Token, Position, string, []Error) {
|
||||
func (s *Scanner) Scan(opts ...ScanOption) (tokens.Token, Position, string, []Error) {
|
||||
scanOpts := &ScanOptions{}
|
||||
for _, opt := range opts {
|
||||
opt(scanOpts)
|
||||
}
|
||||
|
||||
pos := Position{Offset: s.offset - s.width, Row: s.row, Col: s.col, Tabs: s.tabs}
|
||||
var tok tokens.Token
|
||||
var lit string
|
||||
|
||||
if s.isWhitespace() {
|
||||
if scanOpts.continueTemplateString {
|
||||
if scanOpts.rawTemplateString {
|
||||
lit, tok = s.scanRawTemplateString()
|
||||
} else {
|
||||
lit, tok = s.scanTemplateString()
|
||||
}
|
||||
} else if s.isWhitespace() {
|
||||
// string(rune) is an unnecessary heap allocation in this case as we know all
|
||||
// the possible whitespace values, and can simply translate to string ourselves
|
||||
switch s.curr {
|
||||
@@ -275,6 +299,17 @@ func (s *Scanner) Scan() (tokens.Token, Position, string, []Error) {
|
||||
tok = tokens.Semicolon
|
||||
case '.':
|
||||
tok = tokens.Dot
|
||||
case '$':
|
||||
switch s.curr {
|
||||
case '`':
|
||||
s.next()
|
||||
lit, tok = s.scanRawTemplateString()
|
||||
case '"':
|
||||
s.next()
|
||||
lit, tok = s.scanTemplateString()
|
||||
default:
|
||||
s.error("illegal $ character")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -395,6 +430,116 @@ func (s *Scanner) scanRawString() string {
|
||||
return util.ByteSliceToString(s.bs[start : s.offset-1])
|
||||
}
|
||||
|
||||
func (s *Scanner) scanTemplateString() (string, tokens.Token) {
|
||||
tok := tokens.TemplateStringPart
|
||||
start := s.literalStart()
|
||||
var escapes []int
|
||||
for {
|
||||
ch := s.curr
|
||||
|
||||
if ch == '\n' || ch < 0 {
|
||||
s.error("non-terminated string")
|
||||
break
|
||||
}
|
||||
|
||||
s.next()
|
||||
|
||||
if ch == '"' {
|
||||
tok = tokens.TemplateStringEnd
|
||||
break
|
||||
}
|
||||
|
||||
if ch == '{' {
|
||||
break
|
||||
}
|
||||
|
||||
if ch == '\\' {
|
||||
switch s.curr {
|
||||
case '\\', '"', '/', 'b', 'f', 'n', 'r', 't':
|
||||
s.next()
|
||||
case '{':
|
||||
escapes = append(escapes, s.offset-1)
|
||||
s.next()
|
||||
case 'u':
|
||||
s.next()
|
||||
s.next()
|
||||
s.next()
|
||||
s.next()
|
||||
default:
|
||||
s.error("illegal escape sequence")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Lazily remove escapes to not unnecessarily allocate a new byte slice
|
||||
if len(escapes) > 0 {
|
||||
return util.ByteSliceToString(removeEscapes(s, escapes, start)), tok
|
||||
}
|
||||
|
||||
return util.ByteSliceToString(s.bs[start : s.offset-1]), tok
|
||||
}
|
||||
|
||||
func (s *Scanner) scanRawTemplateString() (string, tokens.Token) {
|
||||
tok := tokens.RawTemplateStringPart
|
||||
start := s.literalStart()
|
||||
var escapes []int
|
||||
for {
|
||||
ch := s.curr
|
||||
|
||||
if ch < 0 {
|
||||
s.error("non-terminated string")
|
||||
break
|
||||
}
|
||||
|
||||
s.next()
|
||||
|
||||
if ch == '`' {
|
||||
tok = tokens.RawTemplateStringEnd
|
||||
break
|
||||
}
|
||||
|
||||
if ch == '{' {
|
||||
break
|
||||
}
|
||||
|
||||
if ch == '\\' {
|
||||
switch s.curr {
|
||||
case '{':
|
||||
escapes = append(escapes, s.offset-1)
|
||||
s.next()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Lazily remove escapes to not unnecessarily allocate a new byte slice
|
||||
if len(escapes) > 0 {
|
||||
return util.ByteSliceToString(removeEscapes(s, escapes, start)), tok
|
||||
}
|
||||
|
||||
return util.ByteSliceToString(s.bs[start : s.offset-1]), tok
|
||||
}
|
||||
|
||||
func removeEscapes(s *Scanner, escapes []int, start int) []byte {
|
||||
from := start
|
||||
bs := make([]byte, 0, s.offset-start-len(escapes))
|
||||
|
||||
for _, escape := range escapes {
|
||||
// Append the bytes before the escape sequence.
|
||||
if escape > from {
|
||||
bs = append(bs, s.bs[from:escape-1]...)
|
||||
}
|
||||
// Skip the escape character.
|
||||
from = escape
|
||||
}
|
||||
|
||||
// Append the remaining bytes after the last escape sequence.
|
||||
if from < s.offset-1 {
|
||||
bs = append(bs, s.bs[from:s.offset-1]...)
|
||||
}
|
||||
|
||||
return bs
|
||||
}
|
||||
|
||||
func (s *Scanner) scanComment() string {
|
||||
start := s.literalStart()
|
||||
for s.curr != '\n' && s.curr != -1 {
|
||||
|
||||
108
vendor/github.com/open-policy-agent/opa/v1/ast/internal/tokens/tokens.go
generated
vendored
108
vendor/github.com/open-policy-agent/opa/v1/ast/internal/tokens/tokens.go
generated
vendored
@@ -39,6 +39,10 @@ const (
|
||||
|
||||
Number
|
||||
String
|
||||
TemplateStringPart
|
||||
TemplateStringEnd
|
||||
RawTemplateStringPart
|
||||
RawTemplateStringEnd
|
||||
|
||||
LBrack
|
||||
RBrack
|
||||
@@ -67,6 +71,7 @@ const (
|
||||
Lte
|
||||
Dot
|
||||
Semicolon
|
||||
Dollar
|
||||
|
||||
Every
|
||||
Contains
|
||||
@@ -74,53 +79,58 @@ const (
|
||||
)
|
||||
|
||||
var strings = [...]string{
|
||||
Illegal: "illegal",
|
||||
EOF: "eof",
|
||||
Whitespace: "whitespace",
|
||||
Comment: "comment",
|
||||
Ident: "identifier",
|
||||
Package: "package",
|
||||
Import: "import",
|
||||
As: "as",
|
||||
Default: "default",
|
||||
Else: "else",
|
||||
Not: "not",
|
||||
Some: "some",
|
||||
With: "with",
|
||||
Null: "null",
|
||||
True: "true",
|
||||
False: "false",
|
||||
Number: "number",
|
||||
String: "string",
|
||||
LBrack: "[",
|
||||
RBrack: "]",
|
||||
LBrace: "{",
|
||||
RBrace: "}",
|
||||
LParen: "(",
|
||||
RParen: ")",
|
||||
Comma: ",",
|
||||
Colon: ":",
|
||||
Add: "plus",
|
||||
Sub: "minus",
|
||||
Mul: "mul",
|
||||
Quo: "div",
|
||||
Rem: "rem",
|
||||
And: "and",
|
||||
Or: "or",
|
||||
Unify: "eq",
|
||||
Equal: "equal",
|
||||
Assign: "assign",
|
||||
In: "in",
|
||||
Neq: "neq",
|
||||
Gt: "gt",
|
||||
Lt: "lt",
|
||||
Gte: "gte",
|
||||
Lte: "lte",
|
||||
Dot: ".",
|
||||
Semicolon: ";",
|
||||
Every: "every",
|
||||
Contains: "contains",
|
||||
If: "if",
|
||||
Illegal: "illegal",
|
||||
EOF: "eof",
|
||||
Whitespace: "whitespace",
|
||||
Comment: "comment",
|
||||
Ident: "identifier",
|
||||
Package: "package",
|
||||
Import: "import",
|
||||
As: "as",
|
||||
Default: "default",
|
||||
Else: "else",
|
||||
Not: "not",
|
||||
Some: "some",
|
||||
With: "with",
|
||||
Null: "null",
|
||||
True: "true",
|
||||
False: "false",
|
||||
Number: "number",
|
||||
String: "string",
|
||||
TemplateStringPart: "template-string-part",
|
||||
TemplateStringEnd: "template-string-end",
|
||||
RawTemplateStringPart: "raw-template-string-part",
|
||||
RawTemplateStringEnd: "raw-template-string-end",
|
||||
LBrack: "[",
|
||||
RBrack: "]",
|
||||
LBrace: "{",
|
||||
RBrace: "}",
|
||||
LParen: "(",
|
||||
RParen: ")",
|
||||
Comma: ",",
|
||||
Colon: ":",
|
||||
Add: "plus",
|
||||
Sub: "minus",
|
||||
Mul: "mul",
|
||||
Quo: "div",
|
||||
Rem: "rem",
|
||||
And: "and",
|
||||
Or: "or",
|
||||
Unify: "eq",
|
||||
Equal: "equal",
|
||||
Assign: "assign",
|
||||
In: "in",
|
||||
Neq: "neq",
|
||||
Gt: "gt",
|
||||
Lt: "lt",
|
||||
Gte: "gte",
|
||||
Lte: "lte",
|
||||
Dot: ".",
|
||||
Semicolon: ";",
|
||||
Dollar: "dollar",
|
||||
Every: "every",
|
||||
Contains: "contains",
|
||||
If: "if",
|
||||
}
|
||||
|
||||
var keywords = map[string]Token{
|
||||
@@ -147,3 +157,7 @@ func IsKeyword(tok Token) bool {
|
||||
_, ok := keywords[strings[tok]]
|
||||
return ok
|
||||
}
|
||||
|
||||
func KeywordFor(tok Token) string {
|
||||
return strings[tok]
|
||||
}
|
||||
|
||||
22
vendor/github.com/open-policy-agent/opa/v1/ast/interning.go
generated
vendored
22
vendor/github.com/open-policy-agent/opa/v1/ast/interning.go
generated
vendored
@@ -42,10 +42,17 @@ var (
|
||||
}
|
||||
|
||||
internedVarValues = map[string]Value{
|
||||
"input": Var("input"),
|
||||
"data": Var("data"),
|
||||
"key": Var("key"),
|
||||
"value": Var("value"),
|
||||
"input": Var("input"),
|
||||
"data": Var("data"),
|
||||
"args": Var("args"),
|
||||
"schema": Var("schema"),
|
||||
"key": Var("key"),
|
||||
"value": Var("value"),
|
||||
"future": Var("future"),
|
||||
"rego": Var("rego"),
|
||||
"set": Var("set"),
|
||||
"internal": Var("internal"),
|
||||
"else": Var("else"),
|
||||
|
||||
"i": Var("i"), "j": Var("j"), "k": Var("k"), "v": Var("v"), "x": Var("x"), "y": Var("y"), "z": Var("z"),
|
||||
}
|
||||
@@ -190,6 +197,13 @@ func InternedTerm[T internable](v T) *Term {
|
||||
}
|
||||
}
|
||||
|
||||
// InternedItem works just like [Item] but returns interned terms for both
|
||||
// key and value where possible. This is mostly useful for making tests less
|
||||
// verbose.
|
||||
func InternedItem[K, V internable](key K, value V) [2]*Term {
|
||||
return [2]*Term{InternedTerm(key), InternedTerm(value)}
|
||||
}
|
||||
|
||||
// InternedIntFromString returns a term with the given integer value if the string
|
||||
// maps to an interned term. If the string does not map to an interned term, nil is
|
||||
// returned.
|
||||
|
||||
147
vendor/github.com/open-policy-agent/opa/v1/ast/parser.go
generated
vendored
147
vendor/github.com/open-policy-agent/opa/v1/ast/parser.go
generated
vendored
@@ -1736,6 +1736,10 @@ func (p *Parser) parseTerm() *Term {
|
||||
term = p.parseNumber()
|
||||
case tokens.String:
|
||||
term = p.parseString()
|
||||
case tokens.TemplateStringPart, tokens.TemplateStringEnd:
|
||||
term = p.parseTemplateString(false)
|
||||
case tokens.RawTemplateStringPart, tokens.RawTemplateStringEnd:
|
||||
term = p.parseTemplateString(true)
|
||||
case tokens.Ident, tokens.Contains: // NOTE(sr): contains anywhere BUT in rule heads gets no special treatment
|
||||
term = p.parseVar()
|
||||
case tokens.LBrack:
|
||||
@@ -1767,7 +1771,7 @@ func (p *Parser) parseTermFinish(head *Term, skipws bool) *Term {
|
||||
return nil
|
||||
}
|
||||
offset := p.s.loc.Offset
|
||||
p.doScan(skipws)
|
||||
p.doScan(skipws, noScanOptions...)
|
||||
|
||||
switch p.s.tok {
|
||||
case tokens.LParen, tokens.Dot, tokens.LBrack:
|
||||
@@ -1788,7 +1792,7 @@ func (p *Parser) parseHeadFinish(head *Term, skipws bool) *Term {
|
||||
return nil
|
||||
}
|
||||
offset := p.s.loc.Offset
|
||||
p.doScan(false)
|
||||
p.scanWS()
|
||||
|
||||
switch p.s.tok {
|
||||
case tokens.Add, tokens.Sub, tokens.Mul, tokens.Quo, tokens.Rem,
|
||||
@@ -1796,7 +1800,7 @@ func (p *Parser) parseHeadFinish(head *Term, skipws bool) *Term {
|
||||
tokens.Equal, tokens.Neq, tokens.Gt, tokens.Gte, tokens.Lt, tokens.Lte:
|
||||
p.illegalToken()
|
||||
case tokens.Whitespace:
|
||||
p.doScan(skipws)
|
||||
p.doScan(skipws, noScanOptions...)
|
||||
}
|
||||
|
||||
switch p.s.tok {
|
||||
@@ -1886,6 +1890,11 @@ func (p *Parser) parseString() *Term {
|
||||
return NewTerm(InternedEmptyString.Value).SetLocation(p.s.Loc())
|
||||
}
|
||||
|
||||
inner := p.s.lit[1 : len(p.s.lit)-1]
|
||||
if !strings.ContainsRune(inner, '\\') { // nothing to un-escape
|
||||
return StringTerm(inner).SetLocation(p.s.Loc())
|
||||
}
|
||||
|
||||
var s string
|
||||
if err := json.Unmarshal([]byte(p.s.lit), &s); err != nil {
|
||||
p.errorf(p.s.Loc(), "illegal string literal: %s", p.s.lit)
|
||||
@@ -1903,6 +1912,120 @@ func (p *Parser) parseRawString() *Term {
|
||||
return StringTerm(p.s.lit[1 : len(p.s.lit)-1]).SetLocation(p.s.Loc())
|
||||
}
|
||||
|
||||
func templateStringPartToStringLiteral(tok tokens.Token, lit string) (string, error) {
|
||||
switch tok {
|
||||
case tokens.TemplateStringPart, tokens.TemplateStringEnd:
|
||||
inner := lit[1 : len(lit)-1]
|
||||
if !strings.ContainsRune(inner, '\\') { // nothing to un-escape
|
||||
return inner, nil
|
||||
}
|
||||
|
||||
buf := make([]byte, 0, len(inner)+2)
|
||||
buf = append(buf, '"')
|
||||
buf = append(buf, inner...)
|
||||
buf = append(buf, '"')
|
||||
var s string
|
||||
if err := json.Unmarshal(buf, &s); err != nil {
|
||||
return "", fmt.Errorf("illegal template-string part: %s", lit)
|
||||
}
|
||||
return s, nil
|
||||
case tokens.RawTemplateStringPart, tokens.RawTemplateStringEnd:
|
||||
return lit[1 : len(lit)-1], nil
|
||||
default:
|
||||
return "", errors.New("expected template-string part")
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Parser) parseTemplateString(multiLine bool) *Term {
|
||||
loc := p.s.Loc()
|
||||
|
||||
if !p.po.Capabilities.ContainsFeature(FeatureTemplateStrings) {
|
||||
p.errorf(loc, "template strings are not supported by current capabilities")
|
||||
return nil
|
||||
}
|
||||
|
||||
var parts []Node
|
||||
|
||||
for {
|
||||
s, err := templateStringPartToStringLiteral(p.s.tok, p.s.lit)
|
||||
if err != nil {
|
||||
p.error(p.s.Loc(), err.Error())
|
||||
return nil
|
||||
}
|
||||
|
||||
// Don't add empty strings
|
||||
if len(s) > 0 {
|
||||
parts = append(parts, StringTerm(s).SetLocation(p.s.Loc()))
|
||||
}
|
||||
|
||||
if p.s.tok == tokens.TemplateStringEnd || p.s.tok == tokens.RawTemplateStringEnd {
|
||||
break
|
||||
}
|
||||
|
||||
numCommentsBefore := len(p.s.comments)
|
||||
p.scan()
|
||||
numCommentsAfter := len(p.s.comments)
|
||||
|
||||
expr := p.parseLiteral()
|
||||
if expr == nil {
|
||||
p.error(p.s.Loc(), "invalid template-string expression")
|
||||
return nil
|
||||
}
|
||||
|
||||
if expr.Negated {
|
||||
p.errorf(expr.Loc(), "unexpected negation ('%s') in template-string expression", tokens.KeywordFor(tokens.Not))
|
||||
return nil
|
||||
}
|
||||
|
||||
// Note: Actually unification
|
||||
if expr.IsEquality() {
|
||||
p.errorf(expr.Loc(), "unexpected unification ('=') in template-string expression")
|
||||
return nil
|
||||
}
|
||||
|
||||
if expr.IsAssignment() {
|
||||
p.errorf(expr.Loc(), "unexpected assignment (':=') in template-string expression")
|
||||
return nil
|
||||
}
|
||||
|
||||
if expr.IsEvery() {
|
||||
p.errorf(expr.Loc(), "unexpected '%s' in template-string expression", tokens.KeywordFor(tokens.Every))
|
||||
return nil
|
||||
}
|
||||
|
||||
if expr.IsSome() {
|
||||
p.errorf(expr.Loc(), "unexpected '%s' in template-string expression", tokens.KeywordFor(tokens.Some))
|
||||
return nil
|
||||
}
|
||||
|
||||
// FIXME: Can we optimize for collections and comprehensions too? To qualify, they must not contain refs or calls.
|
||||
var nonOptional bool
|
||||
if term, ok := expr.Terms.(*Term); ok && numCommentsAfter == numCommentsBefore {
|
||||
switch term.Value.(type) {
|
||||
case String, Number, Boolean, Null:
|
||||
nonOptional = true
|
||||
parts = append(parts, term)
|
||||
}
|
||||
}
|
||||
|
||||
if !nonOptional {
|
||||
parts = append(parts, expr)
|
||||
}
|
||||
|
||||
if p.s.tok != tokens.RBrace {
|
||||
p.errorf(p.s.Loc(), "expected %s to end template string expression", tokens.RBrace)
|
||||
return nil
|
||||
}
|
||||
|
||||
p.doScan(false, scanner.ContinueTemplateString(multiLine))
|
||||
}
|
||||
|
||||
// When there are template-expressions, the initial location will only contain the text up to the first expression
|
||||
loc.Text = p.s.Text(loc.Offset, p.s.tokEnd)
|
||||
|
||||
return TemplateStringTerm(multiLine, parts...).SetLocation(loc)
|
||||
}
|
||||
|
||||
func (p *Parser) parseCall(operator *Term, offset int) (term *Term) {
|
||||
if !p.enter() {
|
||||
return nil
|
||||
@@ -2456,15 +2579,17 @@ func (p *Parser) illegalToken() {
|
||||
p.illegal("")
|
||||
}
|
||||
|
||||
var noScanOptions []scanner.ScanOption
|
||||
|
||||
func (p *Parser) scan() {
|
||||
p.doScan(true)
|
||||
p.doScan(true, noScanOptions...)
|
||||
}
|
||||
|
||||
func (p *Parser) scanWS() {
|
||||
p.doScan(false)
|
||||
p.doScan(false, noScanOptions...)
|
||||
}
|
||||
|
||||
func (p *Parser) doScan(skipws bool) {
|
||||
func (p *Parser) doScan(skipws bool, scanOpts ...scanner.ScanOption) {
|
||||
|
||||
// NOTE(tsandall): the last position is used to compute the "text" field for
|
||||
// complex AST nodes. Whitespace never affects the last position of an AST
|
||||
@@ -2477,7 +2602,7 @@ func (p *Parser) doScan(skipws bool) {
|
||||
var errs []scanner.Error
|
||||
for {
|
||||
var pos scanner.Position
|
||||
p.s.tok, pos, p.s.lit, errs = p.s.s.Scan()
|
||||
p.s.tok, pos, p.s.lit, errs = p.s.s.Scan(scanOpts...)
|
||||
|
||||
p.s.tokEnd = pos.End
|
||||
p.s.loc.Row = pos.Row
|
||||
@@ -2532,12 +2657,10 @@ func (p *Parser) restore(s *state) {
|
||||
}
|
||||
|
||||
func setLocRecursive(x any, loc *location.Location) {
|
||||
NewGenericVisitor(func(x any) bool {
|
||||
if node, ok := x.(Node); ok {
|
||||
node.SetLoc(loc)
|
||||
}
|
||||
WalkNodes(x, func(n Node) bool {
|
||||
n.SetLoc(loc)
|
||||
return false
|
||||
}).Walk(x)
|
||||
})
|
||||
}
|
||||
|
||||
func (p *Parser) setLoc(term *Term, loc *location.Location, offset, end int) *Term {
|
||||
|
||||
18
vendor/github.com/open-policy-agent/opa/v1/ast/parser_ext.go
generated
vendored
18
vendor/github.com/open-policy-agent/opa/v1/ast/parser_ext.go
generated
vendored
@@ -11,7 +11,6 @@
|
||||
package ast
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"slices"
|
||||
@@ -625,10 +624,9 @@ func ParseStatements(filename, input string) ([]Statement, []*Comment, error) {
|
||||
// ParseStatementsWithOpts returns a slice of parsed statements. This is the
|
||||
// default return value from the parser.
|
||||
func ParseStatementsWithOpts(filename, input string, popts ParserOptions) ([]Statement, []*Comment, error) {
|
||||
|
||||
parser := NewParser().
|
||||
WithFilename(filename).
|
||||
WithReader(bytes.NewBufferString(input)).
|
||||
WithReader(strings.NewReader(input)).
|
||||
WithProcessAnnotation(popts.ProcessAnnotation).
|
||||
WithFutureKeywords(popts.FutureKeywords...).
|
||||
WithAllFutureKeywords(popts.AllFutureKeywords).
|
||||
@@ -638,7 +636,6 @@ func ParseStatementsWithOpts(filename, input string, popts ParserOptions) ([]Sta
|
||||
withUnreleasedKeywords(popts.unreleasedKeywords)
|
||||
|
||||
stmts, comments, errs := parser.Parse()
|
||||
|
||||
if len(errs) > 0 {
|
||||
return nil, nil, errs
|
||||
}
|
||||
@@ -647,7 +644,6 @@ func ParseStatementsWithOpts(filename, input string, popts ParserOptions) ([]Sta
|
||||
}
|
||||
|
||||
func parseModule(filename string, stmts []Statement, comments []*Comment, regoCompatibilityMode RegoVersion) (*Module, error) {
|
||||
|
||||
if len(stmts) == 0 {
|
||||
return nil, NewError(ParseErr, &Location{File: filename}, "empty module")
|
||||
}
|
||||
@@ -662,23 +658,21 @@ func parseModule(filename string, stmts []Statement, comments []*Comment, regoCo
|
||||
|
||||
mod := &Module{
|
||||
Package: pkg,
|
||||
stmts: stmts,
|
||||
// The comments slice only holds comments that were not their own statements.
|
||||
Comments: comments,
|
||||
stmts: stmts,
|
||||
}
|
||||
|
||||
// The comments slice only holds comments that were not their own statements.
|
||||
mod.Comments = append(mod.Comments, comments...)
|
||||
|
||||
mod.regoVersion = regoCompatibilityMode
|
||||
if regoCompatibilityMode == RegoUndefined {
|
||||
mod.regoVersion = DefaultRegoVersion
|
||||
} else {
|
||||
mod.regoVersion = regoCompatibilityMode
|
||||
}
|
||||
|
||||
for i, stmt := range stmts[1:] {
|
||||
switch stmt := stmt.(type) {
|
||||
case *Import:
|
||||
mod.Imports = append(mod.Imports, stmt)
|
||||
if mod.regoVersion == RegoV0 && Compare(stmt.Path.Value, RegoV1CompatibleRef) == 0 {
|
||||
if mod.regoVersion == RegoV0 && RegoV1CompatibleRef.Equal(stmt.Path.Value) {
|
||||
mod.regoVersion = RegoV0CompatV1
|
||||
}
|
||||
case *Rule:
|
||||
|
||||
3
vendor/github.com/open-policy-agent/opa/v1/ast/policy.go
generated
vendored
3
vendor/github.com/open-policy-agent/opa/v1/ast/policy.go
generated
vendored
@@ -621,7 +621,7 @@ func (imp *Import) SetLoc(loc *Location) {
|
||||
// document. This is the alias if defined otherwise the last element in the
|
||||
// path.
|
||||
func (imp *Import) Name() Var {
|
||||
if len(imp.Alias) != 0 {
|
||||
if imp.Alias != "" {
|
||||
return imp.Alias
|
||||
}
|
||||
switch v := imp.Path.Value.(type) {
|
||||
@@ -988,6 +988,7 @@ func (head *Head) Copy() *Head {
|
||||
cpy.Key = head.Key.Copy()
|
||||
cpy.Value = head.Value.Copy()
|
||||
cpy.keywords = nil
|
||||
cpy.Assign = head.Assign
|
||||
return &cpy
|
||||
}
|
||||
|
||||
|
||||
27
vendor/github.com/open-policy-agent/opa/v1/ast/rego_v1.go
generated
vendored
27
vendor/github.com/open-policy-agent/opa/v1/ast/rego_v1.go
generated
vendored
@@ -27,13 +27,12 @@ func checkRootDocumentOverrides(node any) Errors {
|
||||
errors := Errors{}
|
||||
|
||||
WalkRules(node, func(rule *Rule) bool {
|
||||
var name string
|
||||
name := rule.Head.Name
|
||||
if len(rule.Head.Reference) > 0 {
|
||||
name = rule.Head.Reference[0].Value.(Var).String()
|
||||
} else {
|
||||
name = rule.Head.Name.String()
|
||||
name = rule.Head.Reference[0].Value.(Var)
|
||||
}
|
||||
if RootDocumentRefs.Contains(RefTerm(VarTerm(name))) {
|
||||
|
||||
if ReservedVars.Contains(name) {
|
||||
errors = append(errors, NewError(CompileErr, rule.Location, "rules must not shadow %v (use a different rule name)", name))
|
||||
}
|
||||
|
||||
@@ -52,8 +51,8 @@ func checkRootDocumentOverrides(node any) Errors {
|
||||
if expr.IsAssignment() {
|
||||
// assign() can be called directly, so we need to assert its given first operand exists before checking its name.
|
||||
if nameOp := expr.Operand(0); nameOp != nil {
|
||||
name := nameOp.String()
|
||||
if RootDocumentRefs.Contains(RefTerm(VarTerm(name))) {
|
||||
name := Var(nameOp.String())
|
||||
if ReservedVars.Contains(name) {
|
||||
errors = append(errors, NewError(CompileErr, expr.Location, "variables must not shadow %v (use a different variable name)", name))
|
||||
}
|
||||
}
|
||||
@@ -65,26 +64,24 @@ func checkRootDocumentOverrides(node any) Errors {
|
||||
}
|
||||
|
||||
func walkCalls(node any, f func(any) bool) {
|
||||
vis := &GenericVisitor{func(x any) bool {
|
||||
switch x := x.(type) {
|
||||
vis := NewGenericVisitor(func(x any) bool {
|
||||
switch y := x.(type) {
|
||||
case Call:
|
||||
return f(x)
|
||||
case *Expr:
|
||||
if x.IsCall() {
|
||||
if y.IsCall() {
|
||||
return f(x)
|
||||
}
|
||||
case *Head:
|
||||
// GenericVisitor doesn't walk the rule head ref
|
||||
walkCalls(x.Reference, f)
|
||||
walkCalls(y.Reference, f)
|
||||
}
|
||||
return false
|
||||
}}
|
||||
})
|
||||
vis.Walk(node)
|
||||
}
|
||||
|
||||
func checkDeprecatedBuiltins(deprecatedBuiltinsMap map[string]struct{}, node any) Errors {
|
||||
errs := make(Errors, 0)
|
||||
|
||||
func checkDeprecatedBuiltins(deprecatedBuiltinsMap map[string]struct{}, node any) (errs Errors) {
|
||||
walkCalls(node, func(x any) bool {
|
||||
var operator string
|
||||
var loc *Location
|
||||
|
||||
2
vendor/github.com/open-policy-agent/opa/v1/ast/strings.go
generated
vendored
2
vendor/github.com/open-policy-agent/opa/v1/ast/strings.go
generated
vendored
@@ -48,6 +48,8 @@ func ValueName(x Value) string {
|
||||
return "objectcomprehension"
|
||||
case *SetComprehension:
|
||||
return "setcomprehension"
|
||||
case *TemplateString:
|
||||
return "templatestring"
|
||||
}
|
||||
|
||||
return TypeName(x)
|
||||
|
||||
304
vendor/github.com/open-policy-agent/opa/v1/ast/term.go
generated
vendored
304
vendor/github.com/open-policy-agent/opa/v1/ast/term.go
generated
vendored
@@ -25,7 +25,13 @@ import (
|
||||
"github.com/open-policy-agent/opa/v1/util"
|
||||
)
|
||||
|
||||
var errFindNotFound = errors.New("find: not found")
|
||||
var (
|
||||
NullValue Value = Null{}
|
||||
|
||||
errFindNotFound = errors.New("find: not found")
|
||||
|
||||
varRegexp = regexp.MustCompile("^[[:alpha:]_][[:alpha:][:digit:]_]*$")
|
||||
)
|
||||
|
||||
// Location records a position in source code.
|
||||
type Location = location.Location
|
||||
@@ -43,6 +49,7 @@ func NewLocation(text []byte, file string, row int, col int) *Location {
|
||||
// - Variables, References
|
||||
// - Array, Set, and Object Comprehensions
|
||||
// - Calls
|
||||
// - Template Strings
|
||||
type Value interface {
|
||||
Compare(other Value) int // Compare returns <0, 0, or >0 if this Value is less than, equal to, or greater than other, respectively.
|
||||
Find(path Ref) (Value, error) // Find returns value referred to by path or an error if path is not found.
|
||||
@@ -351,6 +358,8 @@ func (term *Term) Copy() *Term {
|
||||
cpy.Value = v.Copy()
|
||||
case *SetComprehension:
|
||||
cpy.Value = v.Copy()
|
||||
case *TemplateString:
|
||||
cpy.Value = v.Copy()
|
||||
case Call:
|
||||
cpy.Value = v.Copy()
|
||||
}
|
||||
@@ -456,7 +465,17 @@ func (term *Term) Vars() VarSet {
|
||||
}
|
||||
|
||||
// IsConstant returns true if the AST value is constant.
|
||||
// Note that this is only a shallow check as we currently don't have a real
|
||||
// notion of constant "vars" in the AST implementation. Meaning that while we could
|
||||
// derive that a reference to a constant value is also constant, we currently don't.
|
||||
func IsConstant(v Value) bool {
|
||||
switch v.(type) {
|
||||
case Null, Boolean, Number, String:
|
||||
return true
|
||||
case Var, Ref, *ArrayComprehension, *ObjectComprehension, *SetComprehension, Call:
|
||||
return false
|
||||
}
|
||||
|
||||
found := false
|
||||
vis := GenericVisitor{
|
||||
func(x any) bool {
|
||||
@@ -531,8 +550,6 @@ func IsScalar(v Value) bool {
|
||||
// Null represents the null value defined by JSON.
|
||||
type Null struct{}
|
||||
|
||||
var NullValue Value = Null{}
|
||||
|
||||
// NullTerm creates a new Term with a Null value.
|
||||
func NullTerm() *Term {
|
||||
return &Term{Value: NullValue}
|
||||
@@ -818,6 +835,173 @@ func (str String) Hash() int {
|
||||
return int(xxhash.Sum64String(string(str)))
|
||||
}
|
||||
|
||||
type TemplateString struct {
|
||||
Parts []Node `json:"parts"`
|
||||
MultiLine bool `json:"multi_line"`
|
||||
}
|
||||
|
||||
func (ts *TemplateString) Copy() *TemplateString {
|
||||
cpy := &TemplateString{MultiLine: ts.MultiLine, Parts: make([]Node, len(ts.Parts))}
|
||||
for i, p := range ts.Parts {
|
||||
switch v := p.(type) {
|
||||
case *Expr:
|
||||
cpy.Parts[i] = v.Copy()
|
||||
case *Term:
|
||||
cpy.Parts[i] = v.Copy()
|
||||
}
|
||||
}
|
||||
return cpy
|
||||
}
|
||||
|
||||
func (ts *TemplateString) Equal(other Value) bool {
|
||||
if o, ok := other.(*TemplateString); ok && ts.MultiLine == o.MultiLine && len(ts.Parts) == len(o.Parts) {
|
||||
for i, p := range ts.Parts {
|
||||
switch v := p.(type) {
|
||||
case *Expr:
|
||||
if ope, ok := o.Parts[i].(*Expr); !ok || !v.Equal(ope) {
|
||||
return false
|
||||
}
|
||||
case *Term:
|
||||
if opt, ok := o.Parts[i].(*Term); !ok || !v.Equal(opt) {
|
||||
return false
|
||||
}
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (ts *TemplateString) Compare(other Value) int {
|
||||
if ots, ok := other.(*TemplateString); ok {
|
||||
if ts.MultiLine != ots.MultiLine {
|
||||
if !ts.MultiLine {
|
||||
return -1
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
if len(ts.Parts) != len(ots.Parts) {
|
||||
return len(ts.Parts) - len(ots.Parts)
|
||||
}
|
||||
|
||||
for i := range ts.Parts {
|
||||
if cmp := Compare(ts.Parts[i], ots.Parts[i]); cmp != 0 {
|
||||
return cmp
|
||||
}
|
||||
}
|
||||
|
||||
return 0
|
||||
}
|
||||
return Compare(ts, other)
|
||||
}
|
||||
|
||||
func (ts *TemplateString) Find(path Ref) (Value, error) {
|
||||
if len(path) == 0 {
|
||||
return ts, nil
|
||||
}
|
||||
return nil, errFindNotFound
|
||||
}
|
||||
|
||||
func (ts *TemplateString) Hash() int {
|
||||
hash := 0
|
||||
for _, p := range ts.Parts {
|
||||
switch x := p.(type) {
|
||||
case *Expr:
|
||||
hash += x.Hash()
|
||||
case *Term:
|
||||
hash += x.Value.Hash()
|
||||
default:
|
||||
panic(fmt.Sprintf("invalid template part type %T", p))
|
||||
}
|
||||
}
|
||||
return hash
|
||||
}
|
||||
|
||||
func (*TemplateString) IsGround() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (ts *TemplateString) String() string {
|
||||
str := strings.Builder{}
|
||||
str.WriteString("$\"")
|
||||
|
||||
for _, p := range ts.Parts {
|
||||
switch x := p.(type) {
|
||||
case *Expr:
|
||||
str.WriteByte('{')
|
||||
str.WriteString(p.String())
|
||||
str.WriteByte('}')
|
||||
case *Term:
|
||||
s := p.String()
|
||||
if _, ok := x.Value.(String); ok {
|
||||
s = strings.TrimPrefix(s, "\"")
|
||||
s = strings.TrimSuffix(s, "\"")
|
||||
s = EscapeTemplateStringStringPart(s)
|
||||
}
|
||||
str.WriteString(s)
|
||||
default:
|
||||
str.WriteString("<invalid>")
|
||||
}
|
||||
}
|
||||
|
||||
str.WriteByte('"')
|
||||
return str.String()
|
||||
}
|
||||
|
||||
func TemplateStringTerm(multiLine bool, parts ...Node) *Term {
|
||||
return &Term{Value: &TemplateString{MultiLine: multiLine, Parts: parts}}
|
||||
}
|
||||
|
||||
// EscapeTemplateStringStringPart escapes unescaped left curly braces in s - i.e "{" becomes "\{".
|
||||
// The internal representation of string terms within a template string does **NOT**
|
||||
// treat '{' as special, but expects code dealing with template strings to escape them when
|
||||
// required, such as when serializing the complete template string. Code that programmatically
|
||||
// constructs template strings should not pre-escape left curly braces in string term parts.
|
||||
//
|
||||
// // TODO(anders): a future optimization would be to combine this with the other escaping done
|
||||
// // for strings (e.g. escaping quotes, backslashes, and JSON control characters) in a single operation
|
||||
// // to avoid multiple passes and allocations over the same string. That's currently done by
|
||||
// // strconv.Quote, so we would need to re-implement that logic in code of our own.
|
||||
// // NOTE(anders): I would love to come up with a better name for this component than
|
||||
// // "TemplateStringStringPart"..
|
||||
func EscapeTemplateStringStringPart(s string) string {
|
||||
numUnescaped := countUnescapedLeftCurly(s)
|
||||
if numUnescaped == 0 {
|
||||
return s
|
||||
}
|
||||
|
||||
l := len(s)
|
||||
escaped := make([]byte, 0, l+numUnescaped)
|
||||
if s[0] == '{' {
|
||||
escaped = append(escaped, '\\', s[0])
|
||||
} else {
|
||||
escaped = append(escaped, s[0])
|
||||
}
|
||||
|
||||
for i := 1; i < l; i++ {
|
||||
if s[i] == '{' && s[i-1] != '\\' {
|
||||
escaped = append(escaped, '\\', s[i])
|
||||
} else {
|
||||
escaped = append(escaped, s[i])
|
||||
}
|
||||
}
|
||||
|
||||
return util.ByteSliceToString(escaped)
|
||||
}
|
||||
|
||||
func countUnescapedLeftCurly(s string) (n int) {
|
||||
// Note(anders): while not the functions I'd intuitively reach for to solve this,
|
||||
// they are hands down the fastest option here, as they're done in assembly, which
|
||||
// performs about an order of magnitude better than a manual loop in Go.
|
||||
if n = strings.Count(s, "{"); n > 0 {
|
||||
n -= strings.Count(s, `\{`)
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
// Var represents a variable as defined by the language.
|
||||
type Var string
|
||||
|
||||
@@ -951,14 +1135,14 @@ func (ref Ref) Insert(x *Term, pos int) Ref {
|
||||
// Extend returns a copy of ref with the terms from other appended. The head of
|
||||
// other will be converted to a string.
|
||||
func (ref Ref) Extend(other Ref) Ref {
|
||||
dst := make(Ref, len(ref)+len(other))
|
||||
offset := len(ref)
|
||||
dst := make(Ref, offset+len(other))
|
||||
copy(dst, ref)
|
||||
|
||||
head := other[0].Copy()
|
||||
head.Value = String(head.Value.(Var))
|
||||
offset := len(ref)
|
||||
dst[offset] = head
|
||||
|
||||
dst[offset] = head
|
||||
copy(dst[offset+1:], other[1:])
|
||||
return dst
|
||||
}
|
||||
@@ -1070,42 +1254,38 @@ func (ref Ref) HasPrefix(other Ref) bool {
|
||||
func (ref Ref) ConstantPrefix() Ref {
|
||||
i := ref.Dynamic()
|
||||
if i < 0 {
|
||||
return ref.Copy()
|
||||
return ref
|
||||
}
|
||||
return ref[:i].Copy()
|
||||
return ref[:i]
|
||||
}
|
||||
|
||||
// StringPrefix returns the string portion of the ref starting from the head.
|
||||
func (ref Ref) StringPrefix() Ref {
|
||||
for i := 1; i < len(ref); i++ {
|
||||
switch ref[i].Value.(type) {
|
||||
case String: // pass
|
||||
default: // cut off
|
||||
return ref[:i].Copy()
|
||||
return ref[:i]
|
||||
}
|
||||
}
|
||||
|
||||
return ref.Copy()
|
||||
return ref
|
||||
}
|
||||
|
||||
// GroundPrefix returns the ground portion of the ref starting from the head. By
|
||||
// definition, the head of the reference is always ground.
|
||||
func (ref Ref) GroundPrefix() Ref {
|
||||
if ref.IsGround() {
|
||||
return ref
|
||||
}
|
||||
|
||||
prefix := make(Ref, 0, len(ref))
|
||||
|
||||
for i, x := range ref {
|
||||
if i > 0 && !x.IsGround() {
|
||||
break
|
||||
for i := range ref {
|
||||
if i > 0 && !ref[i].IsGround() {
|
||||
return ref[:i]
|
||||
}
|
||||
prefix = append(prefix, x)
|
||||
}
|
||||
|
||||
return prefix
|
||||
return ref
|
||||
}
|
||||
|
||||
// DynamicSuffix returns the dynamic portion of the ref.
|
||||
// If the ref is not dynamic, nil is returned.
|
||||
func (ref Ref) DynamicSuffix() Ref {
|
||||
i := ref.Dynamic()
|
||||
if i < 0 {
|
||||
@@ -1116,7 +1296,7 @@ func (ref Ref) DynamicSuffix() Ref {
|
||||
|
||||
// IsGround returns true if all of the parts of the Ref are ground.
|
||||
func (ref Ref) IsGround() bool {
|
||||
if len(ref) == 0 {
|
||||
if len(ref) < 2 {
|
||||
return true
|
||||
}
|
||||
return termSliceIsGround(ref[1:])
|
||||
@@ -1136,18 +1316,29 @@ func (ref Ref) IsNested() bool {
|
||||
// contains non-string terms this function returns an error. Path
|
||||
// components are escaped.
|
||||
func (ref Ref) Ptr() (string, error) {
|
||||
parts := make([]string, 0, len(ref)-1)
|
||||
for _, term := range ref[1:] {
|
||||
if str, ok := term.Value.(String); ok {
|
||||
parts = append(parts, url.PathEscape(string(str)))
|
||||
} else {
|
||||
buf := &strings.Builder{}
|
||||
tail := ref[1:]
|
||||
|
||||
l := max(len(tail)-1, 0) // number of '/' to add
|
||||
for i := range tail {
|
||||
str, ok := tail[i].Value.(String)
|
||||
if !ok {
|
||||
return "", errors.New("invalid path value type")
|
||||
}
|
||||
l += len(str)
|
||||
}
|
||||
return strings.Join(parts, "/"), nil
|
||||
}
|
||||
buf.Grow(l)
|
||||
|
||||
var varRegexp = regexp.MustCompile("^[[:alpha:]_][[:alpha:][:digit:]_]*$")
|
||||
for i := range tail {
|
||||
if i > 0 {
|
||||
buf.WriteByte('/')
|
||||
}
|
||||
str := string(tail[i].Value.(String))
|
||||
// Sadly, the url package does not expose an appender for this.
|
||||
buf.WriteString(url.PathEscape(str))
|
||||
}
|
||||
return buf.String(), nil
|
||||
}
|
||||
|
||||
func IsVarCompatibleString(s string) bool {
|
||||
return varRegexp.MatchString(s)
|
||||
@@ -1263,13 +1454,12 @@ type Array struct {
|
||||
|
||||
// Copy returns a deep copy of arr.
|
||||
func (arr *Array) Copy() *Array {
|
||||
cpy := make([]int, len(arr.elems))
|
||||
copy(cpy, arr.hashs)
|
||||
return &Array{
|
||||
elems: termSliceCopy(arr.elems),
|
||||
hashs: cpy,
|
||||
hashs: slices.Clone(arr.hashs),
|
||||
hash: arr.hash,
|
||||
ground: arr.IsGround()}
|
||||
ground: arr.ground,
|
||||
}
|
||||
}
|
||||
|
||||
// Equal returns true if arr is equal to other.
|
||||
@@ -1548,13 +1738,19 @@ type set struct {
|
||||
|
||||
// Copy returns a deep copy of s.
|
||||
func (s *set) Copy() Set {
|
||||
terms := make([]*Term, len(s.keys))
|
||||
for i := range s.keys {
|
||||
terms[i] = s.keys[i].Copy()
|
||||
cpy := &set{
|
||||
hash: s.hash,
|
||||
ground: s.ground,
|
||||
sortGuard: sync.Once{},
|
||||
elems: make(map[int]*Term, len(s.elems)),
|
||||
keys: make([]*Term, 0, len(s.keys)),
|
||||
}
|
||||
cpy := NewSet(terms...).(*set)
|
||||
cpy.hash = s.hash
|
||||
cpy.ground = s.ground
|
||||
|
||||
for hash := range s.elems {
|
||||
cpy.elems[hash] = s.elems[hash].Copy()
|
||||
cpy.keys = append(cpy.keys, cpy.elems[hash])
|
||||
}
|
||||
|
||||
return cpy
|
||||
}
|
||||
|
||||
@@ -2309,19 +2505,21 @@ func (obj *object) Merge(other Object) (Object, bool) {
|
||||
// is called. The conflictResolver can return a merged value and a boolean
|
||||
// indicating if the merge has failed and should stop.
|
||||
func (obj *object) MergeWith(other Object, conflictResolver func(v1, v2 *Term) (*Term, bool)) (Object, bool) {
|
||||
result := NewObject()
|
||||
// Might overallocate assuming no conflicts is the common case,
|
||||
// but that's typically faster than iterating over each object twice.
|
||||
result := newobject(obj.Len() + other.Len())
|
||||
stop := obj.Until(func(k, v *Term) bool {
|
||||
v2 := other.Get(k)
|
||||
// The key didn't exist in other, keep the original value
|
||||
if v2 == nil {
|
||||
result.Insert(k, v)
|
||||
result.insert(k, v, false)
|
||||
return false
|
||||
}
|
||||
|
||||
// The key exists in both, resolve the conflict if possible
|
||||
merged, stop := conflictResolver(v, v2)
|
||||
if !stop {
|
||||
result.Insert(k, merged)
|
||||
result.insert(k, merged, false)
|
||||
}
|
||||
return stop
|
||||
})
|
||||
@@ -2333,7 +2531,7 @@ func (obj *object) MergeWith(other Object, conflictResolver func(v1, v2 *Term) (
|
||||
// Copy in any values from other for keys that don't exist in obj
|
||||
other.Foreach(func(k, v *Term) {
|
||||
if v2 := obj.Get(k); v2 == nil {
|
||||
result.Insert(k, v)
|
||||
result.insert(k, v, false)
|
||||
}
|
||||
})
|
||||
return result, true
|
||||
@@ -2733,12 +2931,28 @@ func (c Call) IsGround() bool {
|
||||
return termSliceIsGround(c)
|
||||
}
|
||||
|
||||
// MakeExpr returns an ew Expr from this call.
|
||||
// MakeExpr returns a new Expr from this call.
|
||||
func (c Call) MakeExpr(output *Term) *Expr {
|
||||
terms := []*Term(c)
|
||||
return NewExpr(append(terms, output))
|
||||
}
|
||||
|
||||
func (c Call) Operator() Ref {
|
||||
if len(c) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
return c[0].Value.(Ref)
|
||||
}
|
||||
|
||||
func (c Call) Operands() []*Term {
|
||||
if len(c) < 1 {
|
||||
return nil
|
||||
}
|
||||
|
||||
return c[1:]
|
||||
}
|
||||
|
||||
func (c Call) String() string {
|
||||
args := make([]string, len(c)-1)
|
||||
for i := 1; i < len(c); i++ {
|
||||
|
||||
45
vendor/github.com/open-policy-agent/opa/v1/ast/transform.go
generated
vendored
45
vendor/github.com/open-policy-agent/opa/v1/ast/transform.go
generated
vendored
@@ -19,7 +19,6 @@ type Transformer interface {
|
||||
// Transform iterates the AST and calls the Transform function on the
|
||||
// Transformer t for x before recursing.
|
||||
func Transform(t Transformer, x any) (any, error) {
|
||||
|
||||
if term, ok := x.(*Term); ok {
|
||||
return Transform(t, term.Value)
|
||||
}
|
||||
@@ -284,6 +283,19 @@ func Transform(t Transformer, x any) (any, error) {
|
||||
}
|
||||
}
|
||||
return y, nil
|
||||
case *TemplateString:
|
||||
for i := range y.Parts {
|
||||
if expr, ok := y.Parts[i].(*Expr); ok {
|
||||
transformed, err := Transform(t, expr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if y.Parts[i], ok = transformed.(*Expr); !ok {
|
||||
return nil, fmt.Errorf("illegal transform: %T != %T", expr, transformed)
|
||||
}
|
||||
}
|
||||
}
|
||||
return y, nil
|
||||
default:
|
||||
return y, nil
|
||||
}
|
||||
@@ -291,29 +303,29 @@ func Transform(t Transformer, x any) (any, error) {
|
||||
|
||||
// TransformRefs calls the function f on all references under x.
|
||||
func TransformRefs(x any, f func(Ref) (Value, error)) (any, error) {
|
||||
t := &GenericTransformer{func(x any) (any, error) {
|
||||
t := NewGenericTransformer(func(x any) (any, error) {
|
||||
if r, ok := x.(Ref); ok {
|
||||
return f(r)
|
||||
}
|
||||
return x, nil
|
||||
}}
|
||||
})
|
||||
return Transform(t, x)
|
||||
}
|
||||
|
||||
// TransformVars calls the function f on all vars under x.
|
||||
func TransformVars(x any, f func(Var) (Value, error)) (any, error) {
|
||||
t := &GenericTransformer{func(x any) (any, error) {
|
||||
t := NewGenericTransformer(func(x any) (any, error) {
|
||||
if v, ok := x.(Var); ok {
|
||||
return f(v)
|
||||
}
|
||||
return x, nil
|
||||
}}
|
||||
})
|
||||
return Transform(t, x)
|
||||
}
|
||||
|
||||
// TransformComprehensions calls the functio nf on all comprehensions under x.
|
||||
// TransformComprehensions calls the function f on all comprehensions under x.
|
||||
func TransformComprehensions(x any, f func(any) (Value, error)) (any, error) {
|
||||
t := &GenericTransformer{func(x any) (any, error) {
|
||||
t := NewGenericTransformer(func(x any) (any, error) {
|
||||
switch x := x.(type) {
|
||||
case *ArrayComprehension:
|
||||
return f(x)
|
||||
@@ -323,7 +335,7 @@ func TransformComprehensions(x any, f func(any) (Value, error)) (any, error) {
|
||||
return f(x)
|
||||
}
|
||||
return x, nil
|
||||
}}
|
||||
})
|
||||
return Transform(t, x)
|
||||
}
|
||||
|
||||
@@ -387,11 +399,7 @@ func transformTerm(t Transformer, term *Term) (*Term, error) {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
r := &Term{
|
||||
Value: v,
|
||||
Location: term.Location,
|
||||
}
|
||||
return r, nil
|
||||
return &Term{Value: v, Location: term.Location}, nil
|
||||
}
|
||||
|
||||
func transformValue(t Transformer, v Value) (Value, error) {
|
||||
@@ -407,13 +415,18 @@ func transformValue(t Transformer, v Value) (Value, error) {
|
||||
}
|
||||
|
||||
func transformVar(t Transformer, v Var) (Var, error) {
|
||||
v1, err := Transform(t, v)
|
||||
tv, err := t.Transform(v)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
r, ok := v1.(Var)
|
||||
|
||||
if tv == nil {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
r, ok := tv.(Var)
|
||||
if !ok {
|
||||
return "", fmt.Errorf("illegal transform: %T != %T", v, v1)
|
||||
return "", fmt.Errorf("illegal transform: %T != %T", v, tv)
|
||||
}
|
||||
return r, nil
|
||||
}
|
||||
|
||||
11
vendor/github.com/open-policy-agent/opa/v1/ast/unify.go
generated
vendored
11
vendor/github.com/open-policy-agent/opa/v1/ast/unify.go
generated
vendored
@@ -11,12 +11,11 @@ func isRefSafe(ref Ref, safe VarSet) bool {
|
||||
case Call:
|
||||
return isCallSafe(head, safe)
|
||||
default:
|
||||
for v := range ref[0].Vars() {
|
||||
if !safe.Contains(v) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
vis := varVisitorPool.Get().WithParams(SafetyCheckVisitorParams)
|
||||
vis.Walk(ref[0])
|
||||
isSafe := vis.Vars().DiffCount(safe) == 0
|
||||
varVisitorPool.Put(vis)
|
||||
return isSafe
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
10
vendor/github.com/open-policy-agent/opa/v1/ast/version_index.json
generated
vendored
10
vendor/github.com/open-policy-agent/opa/v1/ast/version_index.json
generated
vendored
@@ -358,6 +358,11 @@
|
||||
"Minor": 34,
|
||||
"Patch": 0
|
||||
},
|
||||
"internal.template_string": {
|
||||
"Major": 1,
|
||||
"Minor": 12,
|
||||
"Patch": 0
|
||||
},
|
||||
"internal.test_case": {
|
||||
"Major": 1,
|
||||
"Minor": 2,
|
||||
@@ -1037,6 +1042,11 @@
|
||||
"Major": 0,
|
||||
"Minor": 59,
|
||||
"Patch": 0
|
||||
},
|
||||
"template_strings": {
|
||||
"Major": 1,
|
||||
"Minor": 12,
|
||||
"Patch": 0
|
||||
}
|
||||
},
|
||||
"keywords": {
|
||||
|
||||
451
vendor/github.com/open-policy-agent/opa/v1/ast/visit.go
generated
vendored
451
vendor/github.com/open-policy-agent/opa/v1/ast/visit.go
generated
vendored
@@ -4,44 +4,108 @@
|
||||
|
||||
package ast
|
||||
|
||||
// Visitor defines the interface for iterating AST elements. The Visit function
|
||||
// can return a Visitor w which will be used to visit the children of the AST
|
||||
// element v. If the Visit function returns nil, the children will not be
|
||||
// visited.
|
||||
//
|
||||
// Deprecated: use GenericVisitor or another visitor implementation
|
||||
type Visitor interface {
|
||||
Visit(v any) (w Visitor)
|
||||
}
|
||||
var (
|
||||
termTypeVisitor = newTypeVisitor[*Term]()
|
||||
varTypeVisitor = newTypeVisitor[Var]()
|
||||
exprTypeVisitor = newTypeVisitor[*Expr]()
|
||||
ruleTypeVisitor = newTypeVisitor[*Rule]()
|
||||
refTypeVisitor = newTypeVisitor[Ref]()
|
||||
bodyTypeVisitor = newTypeVisitor[Body]()
|
||||
withTypeVisitor = newTypeVisitor[*With]()
|
||||
)
|
||||
|
||||
// BeforeAndAfterVisitor wraps Visitor to provide hooks for being called before
|
||||
// and after the AST has been visited.
|
||||
//
|
||||
// Deprecated: use GenericVisitor or another visitor implementation
|
||||
type BeforeAndAfterVisitor interface {
|
||||
Visitor
|
||||
Before(x any)
|
||||
After(x any)
|
||||
}
|
||||
type (
|
||||
// GenericVisitor provides a utility to walk over AST nodes using a
|
||||
// closure. If the closure returns true, the visitor will not walk
|
||||
// over AST nodes under x.
|
||||
GenericVisitor struct {
|
||||
f func(x any) bool
|
||||
}
|
||||
|
||||
// Walk iterates the AST by calling the Visit function on the Visitor
|
||||
// BeforeAfterVisitor provides a utility to walk over AST nodes using
|
||||
// closures. If the before closure returns true, the visitor will not
|
||||
// walk over AST nodes under x. The after closure is invoked always
|
||||
// after visiting a node.
|
||||
BeforeAfterVisitor struct {
|
||||
before func(x any) bool
|
||||
after func(x any)
|
||||
}
|
||||
|
||||
// VarVisitor walks AST nodes under a given node and collects all encountered
|
||||
// variables. The collected variables can be controlled by specifying
|
||||
// VarVisitorParams when creating the visitor.
|
||||
VarVisitor struct {
|
||||
params VarVisitorParams
|
||||
vars VarSet
|
||||
}
|
||||
|
||||
// VarVisitorParams contains settings for a VarVisitor.
|
||||
VarVisitorParams struct {
|
||||
SkipRefHead bool
|
||||
SkipRefCallHead bool
|
||||
SkipObjectKeys bool
|
||||
SkipClosures bool
|
||||
SkipWithTarget bool
|
||||
SkipSets bool
|
||||
}
|
||||
|
||||
// Visitor defines the interface for iterating AST elements. The Visit function
|
||||
// can return a Visitor w which will be used to visit the children of the AST
|
||||
// element v. If the Visit function returns nil, the children will not be
|
||||
// visited.
|
||||
//
|
||||
// Deprecated: use [GenericVisitor] or another visitor implementation
|
||||
Visitor interface {
|
||||
Visit(v any) (w Visitor)
|
||||
}
|
||||
|
||||
// BeforeAndAfterVisitor wraps Visitor to provide hooks for being called before
|
||||
// and after the AST has been visited.
|
||||
//
|
||||
// Deprecated: use [GenericVisitor] or another visitor implementation
|
||||
BeforeAndAfterVisitor interface {
|
||||
Visitor
|
||||
Before(x any)
|
||||
After(x any)
|
||||
}
|
||||
|
||||
// typeVisitor is a generic visitor for a specific type T (the "generic" name was
|
||||
// however taken). Contrary to the [GenericVisitor], the typeVisitor only invokes
|
||||
// the visit function for nodes of type T, saving both CPU cycles and type assertions.
|
||||
// typeVisitor implementations carry no state, and can be shared freely across
|
||||
// goroutines. Access is private for the time being, as there is already inflation
|
||||
// in visitor types exposed in the AST package. The various WalkXXX functions however
|
||||
// now leverage typeVisitor under the hood.
|
||||
//
|
||||
// While a typeVisitor is generally a more performant option over a GenericVisitor,
|
||||
// it is not as flexible: a type visitor can only visit nodes of a single type T,
|
||||
// whereas a GenericVisitor visits all nodes. Adding to that, a typeVisitor can only
|
||||
// be instantiated for **concrete types** — not interfaces (e.g., [*Expr], not [Node]),
|
||||
// as reflection would be required to determine the concrete type at runtime, thus
|
||||
// nullifying the performance benefits of the typeVisitor in the first place.
|
||||
typeVisitor[T any] struct {
|
||||
typ any
|
||||
}
|
||||
)
|
||||
|
||||
// Walk iterates the AST by calling the Visit function on the [Visitor]
|
||||
// v for x before recursing.
|
||||
//
|
||||
// Deprecated: use GenericVisitor.Walk
|
||||
// Deprecated: use [GenericVisitor.Walk]
|
||||
func Walk(v Visitor, x any) {
|
||||
if bav, ok := v.(BeforeAndAfterVisitor); !ok {
|
||||
walk(v, x)
|
||||
} else {
|
||||
bav.Before(x)
|
||||
defer bav.After(x)
|
||||
walk(bav, x)
|
||||
bav.After(x)
|
||||
}
|
||||
}
|
||||
|
||||
// WalkBeforeAndAfter iterates the AST by calling the Visit function on the
|
||||
// Visitor v for x before recursing.
|
||||
//
|
||||
// Deprecated: use GenericVisitor.Walk
|
||||
// Deprecated: use [GenericVisitor.Walk]
|
||||
func WalkBeforeAndAfter(v BeforeAndAfterVisitor, x any) {
|
||||
Walk(v, x)
|
||||
}
|
||||
@@ -153,132 +217,258 @@ func walk(v Visitor, x any) {
|
||||
for i := range x.Symbols {
|
||||
Walk(w, x.Symbols[i])
|
||||
}
|
||||
case *TemplateString:
|
||||
for i := range x.Parts {
|
||||
Walk(w, x.Parts[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// WalkVars calls the function f on all vars under x. If the function f
|
||||
// returns true, AST nodes under the last node will not be visited.
|
||||
func WalkVars(x any, f func(Var) bool) {
|
||||
vis := &GenericVisitor{func(x any) bool {
|
||||
if v, ok := x.(Var); ok {
|
||||
return f(v)
|
||||
}
|
||||
return false
|
||||
}}
|
||||
vis.Walk(x)
|
||||
varTypeVisitor.walk(x, f)
|
||||
}
|
||||
|
||||
// WalkClosures calls the function f on all closures under x. If the function f
|
||||
// returns true, AST nodes under the last node will not be visited.
|
||||
func WalkClosures(x any, f func(any) bool) {
|
||||
vis := &GenericVisitor{func(x any) bool {
|
||||
vis := NewGenericVisitor(func(x any) bool {
|
||||
switch x := x.(type) {
|
||||
case *ArrayComprehension, *ObjectComprehension, *SetComprehension, *Every:
|
||||
return f(x)
|
||||
}
|
||||
return false
|
||||
}}
|
||||
})
|
||||
vis.Walk(x)
|
||||
}
|
||||
|
||||
// WalkRefs calls the function f on all references under x. If the function f
|
||||
// returns true, AST nodes under the last node will not be visited.
|
||||
func WalkRefs(x any, f func(Ref) bool) {
|
||||
vis := &GenericVisitor{func(x any) bool {
|
||||
if r, ok := x.(Ref); ok {
|
||||
return f(r)
|
||||
}
|
||||
return false
|
||||
}}
|
||||
vis.Walk(x)
|
||||
refTypeVisitor.walk(x, f)
|
||||
}
|
||||
|
||||
// WalkTerms calls the function f on all terms under x. If the function f
|
||||
// returns true, AST nodes under the last node will not be visited.
|
||||
func WalkTerms(x any, f func(*Term) bool) {
|
||||
vis := &GenericVisitor{func(x any) bool {
|
||||
if term, ok := x.(*Term); ok {
|
||||
return f(term)
|
||||
}
|
||||
return false
|
||||
}}
|
||||
vis.Walk(x)
|
||||
termTypeVisitor.walk(x, f)
|
||||
}
|
||||
|
||||
// WalkWiths calls the function f on all with modifiers under x. If the function f
|
||||
// returns true, AST nodes under the last node will not be visited.
|
||||
func WalkWiths(x any, f func(*With) bool) {
|
||||
vis := &GenericVisitor{func(x any) bool {
|
||||
if w, ok := x.(*With); ok {
|
||||
return f(w)
|
||||
}
|
||||
return false
|
||||
}}
|
||||
vis.Walk(x)
|
||||
withTypeVisitor.walk(x, f)
|
||||
}
|
||||
|
||||
// WalkExprs calls the function f on all expressions under x. If the function f
|
||||
// returns true, AST nodes under the last node will not be visited.
|
||||
func WalkExprs(x any, f func(*Expr) bool) {
|
||||
vis := &GenericVisitor{func(x any) bool {
|
||||
if r, ok := x.(*Expr); ok {
|
||||
return f(r)
|
||||
}
|
||||
return false
|
||||
}}
|
||||
vis.Walk(x)
|
||||
exprTypeVisitor.walk(x, f)
|
||||
}
|
||||
|
||||
// WalkBodies calls the function f on all bodies under x. If the function f
|
||||
// returns true, AST nodes under the last node will not be visited.
|
||||
func WalkBodies(x any, f func(Body) bool) {
|
||||
vis := &GenericVisitor{func(x any) bool {
|
||||
if b, ok := x.(Body); ok {
|
||||
return f(b)
|
||||
}
|
||||
return false
|
||||
}}
|
||||
vis.Walk(x)
|
||||
bodyTypeVisitor.walk(x, f)
|
||||
}
|
||||
|
||||
// WalkRules calls the function f on all rules under x. If the function f
|
||||
// returns true, AST nodes under the last node will not be visited.
|
||||
func WalkRules(x any, f func(*Rule) bool) {
|
||||
vis := &GenericVisitor{func(x any) bool {
|
||||
if r, ok := x.(*Rule); ok {
|
||||
stop := f(r)
|
||||
// NOTE(tsandall): since rules cannot be embedded inside of queries
|
||||
// we can stop early if there is no else block.
|
||||
if stop || r.Else == nil {
|
||||
return true
|
||||
switch x := x.(type) {
|
||||
case *Module:
|
||||
for i := range x.Rules {
|
||||
if !f(x.Rules[i]) && x.Rules[i].Else != nil {
|
||||
WalkRules(x.Rules[i].Else, f)
|
||||
}
|
||||
}
|
||||
return false
|
||||
}}
|
||||
vis.Walk(x)
|
||||
case *Rule:
|
||||
if !f(x) && x.Else != nil {
|
||||
WalkRules(x.Else, f)
|
||||
}
|
||||
default:
|
||||
ruleTypeVisitor.walk(x, f)
|
||||
}
|
||||
}
|
||||
|
||||
// WalkNodes calls the function f on all nodes under x. If the function f
|
||||
// returns true, AST nodes under the last node will not be visited.
|
||||
func WalkNodes(x any, f func(Node) bool) {
|
||||
vis := &GenericVisitor{func(x any) bool {
|
||||
vis := NewGenericVisitor(func(x any) bool {
|
||||
if n, ok := x.(Node); ok {
|
||||
return f(n)
|
||||
}
|
||||
return false
|
||||
}}
|
||||
})
|
||||
vis.Walk(x)
|
||||
}
|
||||
|
||||
// GenericVisitor provides a utility to walk over AST nodes using a
|
||||
// closure. If the closure returns true, the visitor will not walk
|
||||
// over AST nodes under x.
|
||||
type GenericVisitor struct {
|
||||
f func(x any) bool
|
||||
func newTypeVisitor[T any]() *typeVisitor[T] {
|
||||
var t T
|
||||
|
||||
return &typeVisitor[T]{typ: any(t)}
|
||||
}
|
||||
|
||||
func (tv *typeVisitor[T]) walkArgs(args Args, visit func(x T) bool) {
|
||||
// If T is not Args, avoid allocation by inlining the walk.
|
||||
if _, ok := tv.typ.(Args); !ok {
|
||||
for i := range args {
|
||||
tv.walk(args[i], visit)
|
||||
}
|
||||
} else {
|
||||
tv.walk(args, visit) // allocates
|
||||
}
|
||||
}
|
||||
|
||||
func (tv *typeVisitor[T]) walkBody(body Body, visit func(x T) bool) {
|
||||
if _, ok := tv.typ.(Body); !ok {
|
||||
for i := range body {
|
||||
tv.walk(body[i], visit)
|
||||
}
|
||||
} else {
|
||||
tv.walk(body, visit) // allocates
|
||||
}
|
||||
}
|
||||
|
||||
func (tv *typeVisitor[T]) walkRef(ref Ref, visit func(x T) bool) {
|
||||
if _, ok := tv.typ.(Ref); !ok {
|
||||
for i := range ref {
|
||||
tv.walk(ref[i], visit)
|
||||
}
|
||||
} else {
|
||||
tv.walk(ref, visit) // allocates
|
||||
}
|
||||
}
|
||||
|
||||
func (tv *typeVisitor[T]) walk(x any, visit func(x T) bool) {
|
||||
if v, ok := x.(T); ok && visit(v) {
|
||||
return
|
||||
}
|
||||
|
||||
switch x := x.(type) {
|
||||
case *Module:
|
||||
tv.walk(x.Package, visit)
|
||||
for i := range x.Imports {
|
||||
tv.walk(x.Imports[i], visit)
|
||||
}
|
||||
for i := range x.Rules {
|
||||
tv.walk(x.Rules[i], visit)
|
||||
}
|
||||
for i := range x.Annotations {
|
||||
tv.walk(x.Annotations[i], visit)
|
||||
}
|
||||
for i := range x.Comments {
|
||||
tv.walk(x.Comments[i], visit)
|
||||
}
|
||||
case *Package:
|
||||
tv.walkRef(x.Path, visit)
|
||||
case *Import:
|
||||
tv.walk(x.Path, visit)
|
||||
if _, ok := tv.typ.(Var); ok {
|
||||
tv.walk(x.Alias, visit)
|
||||
}
|
||||
case *Rule:
|
||||
tv.walk(x.Head, visit)
|
||||
tv.walkBody(x.Body, visit)
|
||||
if x.Else != nil {
|
||||
tv.walk(x.Else, visit)
|
||||
}
|
||||
case *Head:
|
||||
if _, ok := tv.typ.(Var); ok {
|
||||
tv.walk(x.Name, visit)
|
||||
}
|
||||
tv.walkArgs(x.Args, visit)
|
||||
if x.Key != nil {
|
||||
tv.walk(x.Key, visit)
|
||||
}
|
||||
if x.Value != nil {
|
||||
tv.walk(x.Value, visit)
|
||||
}
|
||||
case Body:
|
||||
for i := range x {
|
||||
tv.walk(x[i], visit)
|
||||
}
|
||||
case Args:
|
||||
for i := range x {
|
||||
tv.walk(x[i], visit)
|
||||
}
|
||||
case *Expr:
|
||||
switch ts := x.Terms.(type) {
|
||||
case *Term, *SomeDecl, *Every:
|
||||
tv.walk(ts, visit)
|
||||
case []*Term:
|
||||
for i := range ts {
|
||||
tv.walk(ts[i], visit)
|
||||
}
|
||||
}
|
||||
for i := range x.With {
|
||||
tv.walk(x.With[i], visit)
|
||||
}
|
||||
case *With:
|
||||
tv.walk(x.Target, visit)
|
||||
tv.walk(x.Value, visit)
|
||||
case *Term:
|
||||
tv.walk(x.Value, visit)
|
||||
case Ref:
|
||||
for i := range x {
|
||||
tv.walk(x[i], visit)
|
||||
}
|
||||
case *object:
|
||||
x.Foreach(func(k, v *Term) {
|
||||
tv.walk(k, visit)
|
||||
tv.walk(v, visit)
|
||||
})
|
||||
case Object:
|
||||
for _, k := range x.Keys() {
|
||||
tv.walk(k, visit)
|
||||
tv.walk(x.Get(k), visit)
|
||||
}
|
||||
case *Array:
|
||||
for i := range x.Len() {
|
||||
tv.walk(x.Elem(i), visit)
|
||||
}
|
||||
case Set:
|
||||
xSlice := x.Slice()
|
||||
for i := range xSlice {
|
||||
tv.walk(xSlice[i], visit)
|
||||
}
|
||||
case *ArrayComprehension:
|
||||
tv.walk(x.Term, visit)
|
||||
tv.walkBody(x.Body, visit)
|
||||
case *ObjectComprehension:
|
||||
tv.walk(x.Key, visit)
|
||||
tv.walk(x.Value, visit)
|
||||
tv.walkBody(x.Body, visit)
|
||||
case *SetComprehension:
|
||||
tv.walk(x.Term, visit)
|
||||
tv.walkBody(x.Body, visit)
|
||||
case Call:
|
||||
for i := range x {
|
||||
tv.walk(x[i], visit)
|
||||
}
|
||||
case *Every:
|
||||
if x.Key != nil {
|
||||
tv.walk(x.Key, visit)
|
||||
}
|
||||
tv.walk(x.Value, visit)
|
||||
tv.walk(x.Domain, visit)
|
||||
tv.walkBody(x.Body, visit)
|
||||
case *SomeDecl:
|
||||
for i := range x.Symbols {
|
||||
tv.walk(x.Symbols[i], visit)
|
||||
}
|
||||
case *TemplateString:
|
||||
for i := range x.Parts {
|
||||
tv.walk(x.Parts[i], visit)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// NewGenericVisitor returns a new GenericVisitor that will invoke the function
|
||||
// f on AST nodes.
|
||||
// f on AST nodes. Note that while it returns a pointer, the creating a GenericVisitor
|
||||
// doesn't commonly allocate it on the heap, as long as it doesn't escape the function
|
||||
// in which it is created and used (as it's trivially inlined).
|
||||
func NewGenericVisitor(f func(x any) bool) *GenericVisitor {
|
||||
return &GenericVisitor{f}
|
||||
}
|
||||
@@ -310,7 +500,9 @@ func (vis *GenericVisitor) Walk(x any) {
|
||||
vis.Walk(x.Path)
|
||||
case *Import:
|
||||
vis.Walk(x.Path)
|
||||
vis.Walk(x.Alias)
|
||||
if x.Alias != "" {
|
||||
vis.f(x.Alias)
|
||||
}
|
||||
case *Rule:
|
||||
vis.Walk(x.Head)
|
||||
vis.Walk(x.Body)
|
||||
@@ -318,8 +510,12 @@ func (vis *GenericVisitor) Walk(x any) {
|
||||
vis.Walk(x.Else)
|
||||
}
|
||||
case *Head:
|
||||
vis.Walk(x.Name)
|
||||
vis.Walk(x.Args)
|
||||
if x.Name != "" {
|
||||
vis.f(x.Name)
|
||||
}
|
||||
if x.Args != nil {
|
||||
vis.Walk(x.Args)
|
||||
}
|
||||
if x.Key != nil {
|
||||
vis.Walk(x.Key)
|
||||
}
|
||||
@@ -399,18 +595,13 @@ func (vis *GenericVisitor) Walk(x any) {
|
||||
for i := range x.Symbols {
|
||||
vis.Walk(x.Symbols[i])
|
||||
}
|
||||
case *TemplateString:
|
||||
for i := range x.Parts {
|
||||
vis.Walk(x.Parts[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// BeforeAfterVisitor provides a utility to walk over AST nodes using
|
||||
// closures. If the before closure returns true, the visitor will not
|
||||
// walk over AST nodes under x. The after closure is invoked always
|
||||
// after visiting a node.
|
||||
type BeforeAfterVisitor struct {
|
||||
before func(x any) bool
|
||||
after func(x any)
|
||||
}
|
||||
|
||||
// NewBeforeAfterVisitor returns a new BeforeAndAfterVisitor that
|
||||
// will invoke the functions before and after AST nodes.
|
||||
func NewBeforeAfterVisitor(before func(x any) bool, after func(x any)) *BeforeAfterVisitor {
|
||||
@@ -542,31 +733,29 @@ func (vis *BeforeAfterVisitor) Walk(x any) {
|
||||
}
|
||||
}
|
||||
|
||||
// VarVisitor walks AST nodes under a given node and collects all encountered
|
||||
// variables. The collected variables can be controlled by specifying
|
||||
// VarVisitorParams when creating the visitor.
|
||||
type VarVisitor struct {
|
||||
params VarVisitorParams
|
||||
vars VarSet
|
||||
}
|
||||
|
||||
// VarVisitorParams contains settings for a VarVisitor.
|
||||
type VarVisitorParams struct {
|
||||
SkipRefHead bool
|
||||
SkipRefCallHead bool
|
||||
SkipObjectKeys bool
|
||||
SkipClosures bool
|
||||
SkipWithTarget bool
|
||||
SkipSets bool
|
||||
}
|
||||
|
||||
// NewVarVisitor returns a new VarVisitor object.
|
||||
// NewVarVisitor returns a new [VarVisitor] object.
|
||||
func NewVarVisitor() *VarVisitor {
|
||||
return &VarVisitor{
|
||||
vars: NewVarSet(),
|
||||
}
|
||||
}
|
||||
|
||||
// ClearOrNewVarVisitor clears a non-nil [VarVisitor] or returns a new one.
|
||||
func ClearOrNewVarVisitor(vis *VarVisitor) *VarVisitor {
|
||||
if vis == nil {
|
||||
return NewVarVisitor()
|
||||
}
|
||||
|
||||
return vis.Clear()
|
||||
}
|
||||
|
||||
// ClearOrNew resets the visitor to its initial state, or returns a new one if nil.
|
||||
//
|
||||
// Deprecated: use [ClearOrNewVarVisitor] instead.
|
||||
func (vis *VarVisitor) ClearOrNew() *VarVisitor {
|
||||
return ClearOrNewVarVisitor(vis)
|
||||
}
|
||||
|
||||
// Clear resets the visitor to its initial state, and returns it for chaining.
|
||||
func (vis *VarVisitor) Clear() *VarVisitor {
|
||||
vis.params = VarVisitorParams{}
|
||||
@@ -575,14 +764,6 @@ func (vis *VarVisitor) Clear() *VarVisitor {
|
||||
return vis
|
||||
}
|
||||
|
||||
// ClearOrNew returns a new VarVisitor if vis is nil, or else a cleared VarVisitor.
|
||||
func (vis *VarVisitor) ClearOrNew() *VarVisitor {
|
||||
if vis == nil {
|
||||
return NewVarVisitor()
|
||||
}
|
||||
return vis.Clear()
|
||||
}
|
||||
|
||||
// WithParams sets the parameters in params on vis.
|
||||
func (vis *VarVisitor) WithParams(params VarVisitorParams) *VarVisitor {
|
||||
vis.params = params
|
||||
@@ -598,7 +779,7 @@ func (vis *VarVisitor) Add(v Var) {
|
||||
}
|
||||
}
|
||||
|
||||
// Vars returns a VarSet that contains collected vars.
|
||||
// Vars returns a [VarSet] that contains collected vars.
|
||||
func (vis *VarVisitor) Vars() VarSet {
|
||||
return vis.vars
|
||||
}
|
||||
@@ -625,7 +806,7 @@ func (vis *VarVisitor) visit(v any) bool {
|
||||
}
|
||||
if vis.params.SkipClosures {
|
||||
switch v := v.(type) {
|
||||
case *ArrayComprehension, *ObjectComprehension, *SetComprehension:
|
||||
case *ArrayComprehension, *ObjectComprehension, *SetComprehension, *TemplateString:
|
||||
return true
|
||||
case *Expr:
|
||||
if ev, ok := v.Terms.(*Every); ok {
|
||||
@@ -695,9 +876,8 @@ func (vis *VarVisitor) visit(v any) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
// Walk iterates the AST by calling the function f on the
|
||||
// GenericVisitor before recursing. Contrary to the generic Walk, this
|
||||
// does not require allocating the visitor from heap.
|
||||
// Walk iterates the AST by calling the function f on the [VarVisitor] before recursing.
|
||||
// Contrary to the deprecated [Walk] function, this does not require allocating the visitor from heap.
|
||||
func (vis *VarVisitor) Walk(x any) {
|
||||
if vis.visit(x) {
|
||||
return
|
||||
@@ -705,16 +885,9 @@ func (vis *VarVisitor) Walk(x any) {
|
||||
|
||||
switch x := x.(type) {
|
||||
case *Module:
|
||||
vis.Walk(x.Package)
|
||||
for i := range x.Imports {
|
||||
vis.Walk(x.Imports[i])
|
||||
}
|
||||
for i := range x.Rules {
|
||||
vis.Walk(x.Rules[i])
|
||||
}
|
||||
for i := range x.Comments {
|
||||
vis.Walk(x.Comments[i])
|
||||
}
|
||||
case *Package:
|
||||
vis.WalkRef(x.Path)
|
||||
case *Import:
|
||||
@@ -767,9 +940,9 @@ func (vis *VarVisitor) Walk(x any) {
|
||||
vis.Walk(x[i].Value)
|
||||
}
|
||||
case *object:
|
||||
x.Foreach(func(k, _ *Term) {
|
||||
x.Foreach(func(k, v *Term) {
|
||||
vis.Walk(k)
|
||||
vis.Walk(x.Get(k))
|
||||
vis.Walk(v)
|
||||
})
|
||||
case *Array:
|
||||
x.Foreach(func(t *Term) {
|
||||
@@ -805,6 +978,10 @@ func (vis *VarVisitor) Walk(x any) {
|
||||
for i := range x.Symbols {
|
||||
vis.Walk(x.Symbols[i])
|
||||
}
|
||||
case *TemplateString:
|
||||
for i := range x.Parts {
|
||||
vis.Walk(x.Parts[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
2
vendor/github.com/open-policy-agent/opa/v1/bundle/store.go
generated
vendored
2
vendor/github.com/open-policy-agent/opa/v1/bundle/store.go
generated
vendored
@@ -970,7 +970,7 @@ func compileModules(compiler *ast.Compiler, m metrics.Metrics, bundles map[strin
|
||||
m.Timer(metrics.RegoModuleCompile).Start()
|
||||
defer m.Timer(metrics.RegoModuleCompile).Stop()
|
||||
|
||||
modules := map[string]*ast.Module{}
|
||||
modules := make(map[string]*ast.Module, len(compiler.Modules)+len(extraModules)+len(bundles))
|
||||
|
||||
// preserve any modules already on the compiler
|
||||
maps.Copy(modules, compiler.Modules)
|
||||
|
||||
96
vendor/github.com/open-policy-agent/opa/v1/format/format.go
generated
vendored
96
vendor/github.com/open-policy-agent/opa/v1/format/format.go
generated
vendored
@@ -27,8 +27,6 @@ import (
|
||||
const defaultLocationFile = "__format_default__"
|
||||
|
||||
var (
|
||||
elseVar ast.Value = ast.Var("else")
|
||||
|
||||
expandedConst = ast.NewBody(ast.NewExpr(ast.InternedTerm(true)))
|
||||
commentsSlicePool = util.NewSlicePool[*ast.Comment](50)
|
||||
varRegexp = regexp.MustCompile("^[[:alpha:]_][[:alpha:][:digit:]_]*$")
|
||||
@@ -732,7 +730,7 @@ func (w *writer) writeElse(rule *ast.Rule, comments []*ast.Comment) ([]*ast.Comm
|
||||
|
||||
rule.Else.Head.Name = "else" // NOTE(sr): whaaat
|
||||
|
||||
elseHeadReference := ast.NewTerm(elseVar) // construct a reference for the term
|
||||
elseHeadReference := ast.VarTerm("else") // construct a reference for the term
|
||||
elseHeadReference.Location = rule.Else.Head.Location // and set the location to match the rule location
|
||||
|
||||
rule.Else.Head.Reference = ast.Ref{elseHeadReference}
|
||||
@@ -1284,6 +1282,11 @@ func (w *writer) writeTermParens(parens bool, term *ast.Term, comments []*ast.Co
|
||||
}
|
||||
|
||||
}
|
||||
case *ast.TemplateString:
|
||||
comments, err = w.writeTemplateString(x, comments)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case ast.Var:
|
||||
w.write(w.formatVar(x))
|
||||
case ast.Call:
|
||||
@@ -1301,6 +1304,91 @@ func (w *writer) writeTermParens(parens bool, term *ast.Term, comments []*ast.Co
|
||||
return comments, nil
|
||||
}
|
||||
|
||||
func (w *writer) writeTemplateString(ts *ast.TemplateString, comments []*ast.Comment) ([]*ast.Comment, error) {
|
||||
w.write("$")
|
||||
if ts.MultiLine {
|
||||
w.write("`")
|
||||
} else {
|
||||
w.write(`"`)
|
||||
}
|
||||
|
||||
for i, p := range ts.Parts {
|
||||
switch x := p.(type) {
|
||||
case *ast.Expr:
|
||||
w.write("{")
|
||||
w.up()
|
||||
|
||||
if w.beforeEnd != nil {
|
||||
// We have a comment on the same line as the opening template-expression brace '{'
|
||||
w.endLine()
|
||||
w.startLine()
|
||||
} else {
|
||||
// We might have comments to write; the first of which should be on the same line as the opening template-expression brace '{'
|
||||
before, _, _ := partitionComments(comments, x.Location)
|
||||
if len(before) > 0 {
|
||||
w.write(" ")
|
||||
w.inline = true
|
||||
if err := w.writeComments(before); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
comments = comments[len(before):]
|
||||
}
|
||||
}
|
||||
|
||||
var err error
|
||||
comments, err = w.writeExpr(x, comments)
|
||||
if err != nil {
|
||||
return comments, err
|
||||
}
|
||||
|
||||
// write trailing comments
|
||||
if i+1 < len(ts.Parts) {
|
||||
before, _, _ := partitionComments(comments, ts.Parts[i+1].Loc())
|
||||
if len(before) > 0 {
|
||||
w.endLine()
|
||||
if err := w.writeComments(before); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
comments = comments[len(before):]
|
||||
w.startLine()
|
||||
}
|
||||
}
|
||||
|
||||
w.write("}")
|
||||
|
||||
if err := w.down(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case *ast.Term:
|
||||
if s, ok := x.Value.(ast.String); ok {
|
||||
if ts.MultiLine {
|
||||
w.write(ast.EscapeTemplateStringStringPart(string(s)))
|
||||
} else {
|
||||
str := ast.EscapeTemplateStringStringPart(s.String())
|
||||
w.write(str[1 : len(str)-1])
|
||||
}
|
||||
} else {
|
||||
s := x.String()
|
||||
s = strings.TrimPrefix(s, "\"")
|
||||
s = strings.TrimSuffix(s, "\"")
|
||||
w.write(s)
|
||||
}
|
||||
default:
|
||||
w.write("<invalid>")
|
||||
}
|
||||
}
|
||||
|
||||
if ts.MultiLine {
|
||||
w.write("`")
|
||||
} else {
|
||||
w.write(`"`)
|
||||
}
|
||||
|
||||
return comments, nil
|
||||
}
|
||||
|
||||
func (w *writer) writeRef(x ast.Ref, comments []*ast.Comment) ([]*ast.Comment, error) {
|
||||
if len(x) > 0 {
|
||||
parens := false
|
||||
@@ -1931,7 +2019,7 @@ func partitionComments(comments []*ast.Comment, l *ast.Location) ([]*ast.Comment
|
||||
var at *ast.Comment
|
||||
|
||||
before := make([]*ast.Comment, 0, numBefore)
|
||||
after := comments[0 : 0 : len(comments)-numBefore]
|
||||
after := make([]*ast.Comment, 0, numAfter)
|
||||
|
||||
for _, c := range comments {
|
||||
switch cmp := c.Location.Row - l.Row; {
|
||||
|
||||
2
vendor/github.com/open-policy-agent/opa/v1/rego/rego.go
generated
vendored
2
vendor/github.com/open-policy-agent/opa/v1/rego/rego.go
generated
vendored
@@ -2212,7 +2212,7 @@ func (r *Rego) compileQuery(query ast.Body, imports []*ast.Import, _ metrics.Met
|
||||
|
||||
if r.pkg != "" {
|
||||
var err error
|
||||
pkg, err = ast.ParsePackage(fmt.Sprintf("package %v", r.pkg))
|
||||
pkg, err = ast.ParsePackage("package " + r.pkg)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
15
vendor/github.com/open-policy-agent/opa/v1/topdown/bindings.go
generated
vendored
15
vendor/github.com/open-policy-agent/opa/v1/topdown/bindings.go
generated
vendored
@@ -216,16 +216,25 @@ func (vis namespacingVisitor) Visit(x any) bool {
|
||||
switch x := x.(type) {
|
||||
case *ast.ArrayComprehension:
|
||||
x.Term = vis.namespaceTerm(x.Term)
|
||||
ast.NewGenericVisitor(vis.Visit).Walk(x.Body)
|
||||
vis := ast.NewGenericVisitor(vis.Visit)
|
||||
for _, expr := range x.Body {
|
||||
vis.Walk(expr)
|
||||
}
|
||||
return true
|
||||
case *ast.SetComprehension:
|
||||
x.Term = vis.namespaceTerm(x.Term)
|
||||
ast.NewGenericVisitor(vis.Visit).Walk(x.Body)
|
||||
vis := ast.NewGenericVisitor(vis.Visit)
|
||||
for _, expr := range x.Body {
|
||||
vis.Walk(expr)
|
||||
}
|
||||
return true
|
||||
case *ast.ObjectComprehension:
|
||||
x.Key = vis.namespaceTerm(x.Key)
|
||||
x.Value = vis.namespaceTerm(x.Value)
|
||||
ast.NewGenericVisitor(vis.Visit).Walk(x.Body)
|
||||
vis := ast.NewGenericVisitor(vis.Visit)
|
||||
for _, expr := range x.Body {
|
||||
vis.Walk(expr)
|
||||
}
|
||||
return true
|
||||
case *ast.Expr:
|
||||
switch terms := x.Terms.(type) {
|
||||
|
||||
@@ -344,7 +344,7 @@ func (p *CopyPropagator) livevarRef(a *ast.Term) bool {
|
||||
}
|
||||
|
||||
for _, v := range p.sorted {
|
||||
if ref[0].Value.Compare(v) == 0 {
|
||||
if v.Equal(ref[0].Value) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
@@ -403,7 +403,7 @@ func containedIn(value ast.Value, x any) bool {
|
||||
if v, ok := value.(ast.Ref); ok {
|
||||
match = x.HasPrefix(v)
|
||||
} else {
|
||||
match = x.Compare(value) == 0
|
||||
match = x.Equal(value)
|
||||
}
|
||||
if stop || match {
|
||||
stop = true
|
||||
|
||||
27
vendor/github.com/open-policy-agent/opa/v1/topdown/print.go
generated
vendored
27
vendor/github.com/open-policy-agent/opa/v1/topdown/print.go
generated
vendored
@@ -28,7 +28,6 @@ func (h printHook) Print(_ print.Context, msg string) error {
|
||||
}
|
||||
|
||||
func builtinPrint(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
|
||||
|
||||
if bctx.PrintHook == nil {
|
||||
return iter(nil)
|
||||
}
|
||||
@@ -40,7 +39,7 @@ func builtinPrint(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term
|
||||
|
||||
buf := make([]string, arr.Len())
|
||||
|
||||
err = builtinPrintCrossProductOperands(bctx, buf, arr, 0, func(buf []string) error {
|
||||
err = builtinPrintCrossProductOperands(bctx.Location, buf, arr, 0, func(buf []string) error {
|
||||
pctx := print.Context{
|
||||
Context: bctx.Context,
|
||||
Location: bctx.Location,
|
||||
@@ -54,20 +53,32 @@ func builtinPrint(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term
|
||||
return iter(nil)
|
||||
}
|
||||
|
||||
func builtinPrintCrossProductOperands(bctx BuiltinContext, buf []string, operands *ast.Array, i int, f func([]string) error) error {
|
||||
|
||||
func builtinPrintCrossProductOperands(loc *ast.Location, buf []string, operands *ast.Array, i int, f func([]string) error) error {
|
||||
if i >= operands.Len() {
|
||||
return f(buf)
|
||||
}
|
||||
|
||||
xs, ok := operands.Elem(i).Value.(ast.Set)
|
||||
operand := operands.Elem(i)
|
||||
|
||||
// We allow primitives ...
|
||||
switch x := operand.Value.(type) {
|
||||
case ast.String:
|
||||
buf[i] = string(x)
|
||||
return builtinPrintCrossProductOperands(loc, buf, operands, i+1, f)
|
||||
case ast.Number, ast.Boolean, ast.Null:
|
||||
buf[i] = x.String()
|
||||
return builtinPrintCrossProductOperands(loc, buf, operands, i+1, f)
|
||||
}
|
||||
|
||||
// ... but all other operand types must be sets.
|
||||
xs, ok := operand.Value.(ast.Set)
|
||||
if !ok {
|
||||
return Halt{Err: internalErr(bctx.Location, fmt.Sprintf("illegal argument type: %v", ast.ValueName(operands.Elem(i).Value)))}
|
||||
return Halt{Err: internalErr(loc, "illegal argument type: "+ast.ValueName(operand.Value))}
|
||||
}
|
||||
|
||||
if xs.Len() == 0 {
|
||||
buf[i] = "<undefined>"
|
||||
return builtinPrintCrossProductOperands(bctx, buf, operands, i+1, f)
|
||||
return builtinPrintCrossProductOperands(loc, buf, operands, i+1, f)
|
||||
}
|
||||
|
||||
return xs.Iter(func(x *ast.Term) error {
|
||||
@@ -77,7 +88,7 @@ func builtinPrintCrossProductOperands(bctx BuiltinContext, buf []string, operand
|
||||
default:
|
||||
buf[i] = v.String()
|
||||
}
|
||||
return builtinPrintCrossProductOperands(bctx, buf, operands, i+1, f)
|
||||
return builtinPrintCrossProductOperands(loc, buf, operands, i+1, f)
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
2
vendor/github.com/open-policy-agent/opa/v1/topdown/query.go
generated
vendored
2
vendor/github.com/open-policy-agent/opa/v1/topdown/query.go
generated
vendored
@@ -134,7 +134,7 @@ func (q *Query) WithTracer(tracer Tracer) *Query {
|
||||
// WithQueryTracer adds a query tracer to use during evaluation. This is optional.
|
||||
// Disabled QueryTracers will be ignored.
|
||||
func (q *Query) WithQueryTracer(tracer QueryTracer) *Query {
|
||||
if !tracer.Enabled() {
|
||||
if tracer == nil || !tracer.Enabled() {
|
||||
return q
|
||||
}
|
||||
|
||||
|
||||
73
vendor/github.com/open-policy-agent/opa/v1/topdown/sink.go
generated
vendored
Normal file
73
vendor/github.com/open-policy-agent/opa/v1/topdown/sink.go
generated
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
package topdown
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
)
|
||||
|
||||
var _ io.Writer = (*sinkW)(nil)
|
||||
|
||||
type sinkWriter interface {
|
||||
io.Writer
|
||||
String() string
|
||||
Grow(int)
|
||||
WriteByte(byte) error
|
||||
WriteString(string) (int, error)
|
||||
}
|
||||
|
||||
type sinkW struct {
|
||||
buf *bytes.Buffer
|
||||
cancel Cancel
|
||||
err error
|
||||
}
|
||||
|
||||
func newSink(name string, hint int, c Cancel) sinkWriter {
|
||||
b := &bytes.Buffer{}
|
||||
if hint > 0 {
|
||||
b.Grow(hint)
|
||||
}
|
||||
|
||||
if c == nil {
|
||||
return b
|
||||
}
|
||||
|
||||
return &sinkW{
|
||||
cancel: c,
|
||||
buf: b,
|
||||
err: Halt{
|
||||
Err: &Error{
|
||||
Code: CancelErr,
|
||||
Message: name + ": timed out before finishing",
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (sw *sinkW) Grow(n int) {
|
||||
sw.buf.Grow(n)
|
||||
}
|
||||
|
||||
func (sw *sinkW) Write(bs []byte) (int, error) {
|
||||
if sw.cancel.Cancelled() {
|
||||
return 0, sw.err
|
||||
}
|
||||
return sw.buf.Write(bs)
|
||||
}
|
||||
|
||||
func (sw *sinkW) WriteByte(b byte) error {
|
||||
if sw.cancel.Cancelled() {
|
||||
return sw.err
|
||||
}
|
||||
return sw.buf.WriteByte(b)
|
||||
}
|
||||
|
||||
func (sw *sinkW) WriteString(s string) (int, error) {
|
||||
if sw.cancel.Cancelled() {
|
||||
return 0, sw.err
|
||||
}
|
||||
return sw.buf.WriteString(s)
|
||||
}
|
||||
|
||||
func (sw *sinkW) String() string {
|
||||
return sw.buf.String()
|
||||
}
|
||||
58
vendor/github.com/open-policy-agent/opa/v1/topdown/strings.go
generated
vendored
58
vendor/github.com/open-policy-agent/opa/v1/topdown/strings.go
generated
vendored
@@ -152,7 +152,7 @@ func builtinFormatInt(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Ter
|
||||
return iter(ast.InternedTerm(fmt.Sprintf(format, i)))
|
||||
}
|
||||
|
||||
func builtinConcat(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
|
||||
func builtinConcat(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
|
||||
join, err := builtins.StringOperand(operands[0].Value, 1)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -163,11 +163,13 @@ func builtinConcat(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term)
|
||||
return iter(term)
|
||||
}
|
||||
|
||||
sb := newSink(ast.Concat.Name, 0, bctx.Cancel)
|
||||
|
||||
// NOTE(anderseknert):
|
||||
// More or less Go's strings.Join implementation, but where we avoid
|
||||
// creating an intermediate []string slice to pass to that function,
|
||||
// as that's expensive (3.5x more space allocated). Instead we build
|
||||
// the string directly using a strings.Builder to concatenate the string
|
||||
// the string directly using the sink to concatenate the string
|
||||
// values from the array/set with the separator.
|
||||
n := 0
|
||||
switch b := operands[1].Value.(type) {
|
||||
@@ -182,25 +184,36 @@ func builtinConcat(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term)
|
||||
}
|
||||
sep := string(join)
|
||||
n += len(sep) * (l - 1)
|
||||
var sb strings.Builder
|
||||
sb.Grow(n)
|
||||
sb.WriteString(string(b.Elem(0).Value.(ast.String)))
|
||||
if _, err := sb.WriteString(string(b.Elem(0).Value.(ast.String))); err != nil {
|
||||
return err
|
||||
}
|
||||
if sep == "" {
|
||||
for i := 1; i < l; i++ {
|
||||
sb.WriteString(string(b.Elem(i).Value.(ast.String)))
|
||||
if _, err := sb.WriteString(string(b.Elem(i).Value.(ast.String))); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
} else if len(sep) == 1 {
|
||||
// when the separator is a single byte, sb.WriteByte is substantially faster
|
||||
bsep := sep[0]
|
||||
for i := 1; i < l; i++ {
|
||||
sb.WriteByte(bsep)
|
||||
sb.WriteString(string(b.Elem(i).Value.(ast.String)))
|
||||
if err := sb.WriteByte(bsep); err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err := sb.WriteString(string(b.Elem(i).Value.(ast.String))); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// for longer separators, there is no such difference between WriteString and Write
|
||||
for i := 1; i < l; i++ {
|
||||
sb.WriteString(sep)
|
||||
sb.WriteString(string(b.Elem(i).Value.(ast.String)))
|
||||
if _, err := sb.WriteString(sep); err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err := sb.WriteString(string(b.Elem(i).Value.(ast.String))); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return iter(ast.InternedTerm(sb.String()))
|
||||
@@ -215,12 +228,15 @@ func builtinConcat(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term)
|
||||
sep := string(join)
|
||||
l := b.Len()
|
||||
n += len(sep) * (l - 1)
|
||||
var sb strings.Builder
|
||||
sb.Grow(n)
|
||||
for i, v := range b.Slice() {
|
||||
sb.WriteString(string(v.Value.(ast.String)))
|
||||
if _, err := sb.WriteString(string(v.Value.(ast.String))); err != nil {
|
||||
return err
|
||||
}
|
||||
if i < l-1 {
|
||||
sb.WriteString(sep)
|
||||
if _, err := sb.WriteString(sep); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return iter(ast.InternedTerm(sb.String()))
|
||||
@@ -523,7 +539,7 @@ func builtinSplit(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) e
|
||||
return iter(ast.ArrayTerm(util.SplitMap(text, delim, ast.InternedTerm)...))
|
||||
}
|
||||
|
||||
func builtinReplace(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
|
||||
func builtinReplace(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
|
||||
s, err := builtins.StringOperand(operands[0].Value, 1)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -539,7 +555,12 @@ func builtinReplace(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term)
|
||||
return err
|
||||
}
|
||||
|
||||
replaced := strings.ReplaceAll(string(s), string(old), string(n))
|
||||
sink := newSink(ast.Replace.Name, len(s), bctx.Cancel)
|
||||
replacer := strings.NewReplacer(string(old), string(n))
|
||||
if _, err := replacer.WriteString(sink, string(s)); err != nil {
|
||||
return err
|
||||
}
|
||||
replaced := sink.String()
|
||||
if replaced == string(s) {
|
||||
return iter(operands[0])
|
||||
}
|
||||
@@ -547,7 +568,7 @@ func builtinReplace(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term)
|
||||
return iter(ast.InternedTerm(replaced))
|
||||
}
|
||||
|
||||
func builtinReplaceN(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
|
||||
func builtinReplaceN(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
|
||||
patterns, err := builtins.ObjectOperand(operands[0].Value, 1)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -574,7 +595,12 @@ func builtinReplaceN(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term
|
||||
oldnewArr = append(oldnewArr, string(keyVal), string(strVal))
|
||||
}
|
||||
|
||||
return iter(ast.InternedTerm(strings.NewReplacer(oldnewArr...).Replace(string(s))))
|
||||
sink := newSink(ast.ReplaceN.Name, len(s), bctx.Cancel)
|
||||
replacer := strings.NewReplacer(oldnewArr...)
|
||||
if _, err := replacer.WriteString(sink, string(s)); err != nil {
|
||||
return err
|
||||
}
|
||||
return iter(ast.InternedTerm(sink.String()))
|
||||
}
|
||||
|
||||
func builtinTrim(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
|
||||
|
||||
45
vendor/github.com/open-policy-agent/opa/v1/topdown/template_string.go
generated
vendored
Normal file
45
vendor/github.com/open-policy-agent/opa/v1/topdown/template_string.go
generated
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
// Copyright 2025 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package topdown
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/open-policy-agent/opa/v1/ast"
|
||||
"github.com/open-policy-agent/opa/v1/topdown/builtins"
|
||||
)
|
||||
|
||||
func builtinTemplateString(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
|
||||
arr, err := builtins.ArrayOperand(operands[0].Value, 1)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
buf := make([]string, arr.Len())
|
||||
|
||||
var count int
|
||||
err = builtinPrintCrossProductOperands(bctx.Location, buf, arr, 0, func(buf []string) error {
|
||||
count += 1
|
||||
// Precautionary run-time assertion that template-strings can't produce multiple outputs; e.g. for custom relation type built-ins not known at compile-time.
|
||||
if count > 1 {
|
||||
return Halt{Err: &Error{
|
||||
Code: ConflictErr,
|
||||
Location: bctx.Location,
|
||||
Message: "template-strings must not produce multiple outputs",
|
||||
}}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return iter(ast.StringTerm(strings.Join(buf, "")))
|
||||
}
|
||||
|
||||
func init() {
|
||||
RegisterBuiltinFunc(ast.InternalTemplateString.Name, builtinTemplateString)
|
||||
}
|
||||
5
vendor/github.com/open-policy-agent/opa/v1/topdown/tokens.go
generated
vendored
5
vendor/github.com/open-policy-agent/opa/v1/topdown/tokens.go
generated
vendored
@@ -21,6 +21,7 @@ import (
|
||||
"fmt"
|
||||
"hash"
|
||||
"math/big"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/lestrrat-go/jwx/v3/jwk"
|
||||
@@ -1131,8 +1132,8 @@ func builtinJWTDecodeVerify(bctx BuiltinContext, operands []*ast.Term, iter func
|
||||
switch v := nbf.Value.(type) {
|
||||
case ast.Number:
|
||||
// constraints.time is in nanoseconds but nbf Value is in seconds
|
||||
compareTime := ast.FloatNumberTerm(constraints.time / 1000000000)
|
||||
if ast.Compare(compareTime, v) == -1 {
|
||||
compareTime := ast.Number(strconv.FormatFloat(constraints.time/1000000000, 'g', -1, 64))
|
||||
if compareTime.Compare(v) == -1 {
|
||||
return iter(unverified)
|
||||
}
|
||||
default:
|
||||
|
||||
2
vendor/github.com/open-policy-agent/opa/v1/version/version.go
generated
vendored
2
vendor/github.com/open-policy-agent/opa/v1/version/version.go
generated
vendored
@@ -10,7 +10,7 @@ import (
|
||||
"runtime/debug"
|
||||
)
|
||||
|
||||
var Version = "1.11.1"
|
||||
var Version = "1.12.3"
|
||||
|
||||
// GoVersion is the version of Go this was built with
|
||||
var GoVersion = runtime.Version()
|
||||
|
||||
8
vendor/modules.txt
vendored
8
vendor/modules.txt
vendored
@@ -255,7 +255,7 @@ github.com/cespare/xxhash/v2
|
||||
# github.com/cevaris/ordered_map v0.0.0-20190319150403-3adeae072e73
|
||||
## explicit
|
||||
github.com/cevaris/ordered_map
|
||||
# github.com/clipperhouse/displaywidth v0.6.2
|
||||
# github.com/clipperhouse/displaywidth v0.6.0
|
||||
## explicit; go 1.18
|
||||
github.com/clipperhouse/displaywidth
|
||||
# github.com/clipperhouse/stringish v0.1.1
|
||||
@@ -1197,12 +1197,12 @@ github.com/olekukonko/cat
|
||||
# github.com/olekukonko/errors v1.1.0
|
||||
## explicit; go 1.21
|
||||
github.com/olekukonko/errors
|
||||
# github.com/olekukonko/ll v0.1.4-0.20260115111900-9e59c2286df0
|
||||
# github.com/olekukonko/ll v0.1.3
|
||||
## explicit; go 1.21
|
||||
github.com/olekukonko/ll
|
||||
github.com/olekukonko/ll/lh
|
||||
github.com/olekukonko/ll/lx
|
||||
# github.com/olekukonko/tablewriter v1.1.3
|
||||
# github.com/olekukonko/tablewriter v1.1.2
|
||||
## explicit; go 1.21
|
||||
github.com/olekukonko/tablewriter
|
||||
github.com/olekukonko/tablewriter/pkg/twcache
|
||||
@@ -1277,7 +1277,7 @@ github.com/onsi/gomega/matchers/support/goraph/edge
|
||||
github.com/onsi/gomega/matchers/support/goraph/node
|
||||
github.com/onsi/gomega/matchers/support/goraph/util
|
||||
github.com/onsi/gomega/types
|
||||
# github.com/open-policy-agent/opa v1.11.1
|
||||
# github.com/open-policy-agent/opa v1.12.3
|
||||
## explicit; go 1.24.6
|
||||
github.com/open-policy-agent/opa/ast
|
||||
github.com/open-policy-agent/opa/ast/json
|
||||
|
||||
Reference in New Issue
Block a user