diff --git a/go.mod b/go.mod
index a22895df3..0861b883c 100644
--- a/go.mod
+++ b/go.mod
@@ -4,28 +4,27 @@ go 1.21
require (
github.com/harvester/go-common v0.0.0-20230718010724-11313421a8f5
- github.com/imdario/mergo v0.3.12
+ github.com/imdario/mergo v0.3.16
github.com/insomniacslk/dhcp v0.0.0-20210827173440-b95caade3eac
github.com/jroimartin/gocui v0.4.0
- github.com/mudler/yip v0.0.0-20211129144714-088f39125cf7
github.com/pkg/errors v0.9.1
github.com/rancher/mapper v0.0.0-20190814232720-058a8b7feb99
github.com/rancher/wharfie v0.6.5
- github.com/sirupsen/logrus v1.9.2
- github.com/stretchr/testify v1.8.1
- github.com/tredoe/osutil v1.3.6
- github.com/vishvananda/netlink v1.1.0
- golang.org/x/crypto v0.14.0
- golang.org/x/net v0.17.0
- golang.org/x/sys v0.13.0
- gopkg.in/ini.v1 v1.63.2
+ github.com/rancher/yip v1.9.2
+ github.com/sirupsen/logrus v1.9.3
+ github.com/stretchr/testify v1.9.0
+ github.com/tredoe/osutil v1.5.0
+ github.com/vishvananda/netlink v1.2.1-beta.2
+ golang.org/x/crypto v0.25.0
+ golang.org/x/net v0.27.0
+ golang.org/x/sys v0.22.0
+ gopkg.in/ini.v1 v1.67.0
gopkg.in/yaml.v3 v3.0.1
k8s.io/apimachinery v0.25.4
)
require (
github.com/coreos/go-systemd/v22 v22.5.0 // indirect
- github.com/coreos/yaml v0.0.0-20141224210557-6b16a5714269 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/docker/cli v20.10.20+incompatible // indirect
github.com/docker/distribution v2.8.2+incompatible // indirect
@@ -37,12 +36,13 @@ require (
github.com/godbus/dbus/v5 v5.0.4 // indirect
github.com/google/go-containerregistry v0.12.2-0.20230106184643-b063f6aeac72 // indirect
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect
- github.com/hashicorp/errwrap v1.0.0 // indirect
- github.com/hashicorp/go-multierror v1.1.0 // indirect
- github.com/itchyny/gojq v0.12.2 // indirect
- github.com/itchyny/timefmt-go v0.1.2 // indirect
+ github.com/hashicorp/errwrap v1.1.0 // indirect
+ github.com/hashicorp/go-multierror v1.1.1 // indirect
+ github.com/itchyny/gojq v0.12.16 // indirect
+ github.com/itchyny/timefmt-go v0.1.6 // indirect
github.com/klauspost/compress v1.15.11 // indirect
- github.com/mattn/go-runewidth v0.0.9 // indirect
+ github.com/kr/pretty v0.3.1 // indirect
+ github.com/mattn/go-runewidth v0.0.15 // indirect
github.com/mattn/go-shellwords v1.0.10 // indirect
github.com/mdlayher/ethernet v0.0.0-20190606142754-0394541c37b7 // indirect
github.com/mdlayher/raw v0.0.0-20191009151244-50f2db8cc065 // indirect
@@ -51,15 +51,16 @@ require (
github.com/opencontainers/go-digest v1.0.0 // indirect
github.com/opencontainers/image-spec v1.1.0-rc2 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
- github.com/rancher-sandbox/cloud-init v1.14.3-0.20210913085759-bf90bf5eb77e // indirect
github.com/rancher/wrangler v0.0.0-20190426050201-5946f0eaed19 // indirect
- github.com/twpayne/go-vfs v1.5.0 // indirect
+ github.com/rivo/uniseg v0.4.7 // indirect
+ github.com/rogpeppe/go-internal v1.11.0 // indirect
+ github.com/twpayne/go-vfs/v4 v4.3.0 // indirect
github.com/u-root/uio v0.0.0-20210528114334-82958018845c // indirect
- github.com/vishvananda/netns v0.0.0-20210104183010-2eb08e3e575f // indirect
+ github.com/vishvananda/netns v0.0.4 // indirect
go.uber.org/atomic v1.7.0 // indirect
go.uber.org/multierr v1.6.0 // indirect
- golang.org/x/sync v0.1.0 // indirect
- golang.org/x/text v0.13.0 // indirect
+ golang.org/x/sync v0.7.0 // indirect
+ golang.org/x/text v0.16.0 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
k8s.io/utils v0.0.0-20221011040102-427025108f67 // indirect
)
diff --git a/go.sum b/go.sum
index 68367a6c1..c85528b12 100644
--- a/go.sum
+++ b/go.sum
@@ -1,4 +1,3 @@
-bitbucket.org/liamstask/goose v0.0.0-20150115234039-8488cc47d90c/go.mod h1:hSVuE3qU7grINVSwrmzHfpg9k87ALBk+XaualNyUzI4=
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU=
@@ -49,44 +48,27 @@ github.com/Azure/go-autorest/logger v0.2.1/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZ
github.com/Azure/go-autorest/tracing v0.6.0/go.mod h1:+vhtPC754Xsa23ID7GlGsrdKBpUA79WCAKPPZVC2DeU=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
-github.com/GeertJohan/go.incremental v1.0.0/go.mod h1:6fAjUhbVuX1KcMD3c8TEgVUqmo4seqhv0i0kdATSkM0=
-github.com/GeertJohan/go.rice v1.0.0/go.mod h1:eH6gbSOAUv07dQuZVnBmoDP8mgsM1rtixis4Tib9if0=
github.com/Harvester/termbox-go v1.1.1-0.20210318083914-8ab92204a400 h1:UgeaojHkOo7VUw2syL9P0i6i7P7kGseN+5/3AYIrXV4=
github.com/Harvester/termbox-go v1.1.1-0.20210318083914-8ab92204a400/go.mod h1:T0cTdVuOwf7pHQNtfhnEbzHbcNyCEcVU4YPpouCbVxo=
github.com/MakeNowJust/heredoc v0.0.0-20170808103936-bb23615498cd/go.mod h1:64YHyfSL2R96J44Nlwm39UHepQbyR5q10x7iYa1ks2E=
-github.com/Masterminds/goutils v1.1.0/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU=
-github.com/Masterminds/semver/v3 v3.1.0/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs=
-github.com/Masterminds/sprig/v3 v3.1.0/go.mod h1:ONGMf7UfYGAbMXCZmQLy8x3lCDIPrEZE/rU8pmrbihA=
-github.com/Microsoft/go-winio v0.4.14/go.mod h1:qXqCSQ3Xa7+6tgxaGTIe4Kpcdsi+P8jBhyzoq1bpyYA=
-github.com/Microsoft/go-winio v0.4.16/go.mod h1:XB6nPKklQyQ7GC9LdcBEcBl8PF76WugXOPRXwdLnMv0=
github.com/NYTimes/gziphandler v0.0.0-20170623195520-56545f4a5d46/go.mod h1:3wb06e3pkSAbeQ52E9H9iFoQsEEwGN64994WTCIhntQ=
github.com/NYTimes/gziphandler v1.1.1/go.mod h1:n/CVRwUEOgIxrgPvAQhUUr9oeUtvrhMomdKFjzJNB0c=
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
-github.com/ProtonMail/go-crypto v0.0.0-20210428141323-04723f9f07d7/go.mod h1:z4/9nQmJSSwwds7ejkxaJwO37dru3geImFUdJlaLzQo=
github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
-github.com/acomagu/bufpipe v1.0.3/go.mod h1:mxdxdup/WdsKVreO5GpW4+M/1CE2sMG4jeGJ2sYmHc4=
-github.com/akavel/rsrc v0.8.0/go.mod h1:uLoCtb9J+EyAqh+26kdrTgmzRBFPGOolLWKpdxkKq+c=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
-github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c=
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20220418222510-f25a4f6275ed/go.mod h1:F7bn7fEU90QkQ3tnmaTx3LTKLEDqnwWODIYppRQ5hnY=
-github.com/apex/log v1.9.0/go.mod h1:m82fZlWIuiWzWP04XCTXmnX0xRkYYbCdYn8jbJeLBEA=
-github.com/apex/logs v1.0.0/go.mod h1:XzxuLZ5myVHDy9SAmYpamKKRNApGj54PfYLcFrXqDwo=
-github.com/aphistic/golf v0.0.0-20180712155816-02c07f170c5a/go.mod h1:3NqKYiepwy8kCu4PNA+aP7WUV72eXWJeP9/r3/K9aLE=
-github.com/aphistic/sweet v0.2.0/go.mod h1:fWDlIh/isSE9n6EPsRmC0det+whmX6dJid3stzu0Xys=
github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o=
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY=
github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs=
github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY=
-github.com/aws/aws-sdk-go v1.20.6/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
-github.com/aybabtme/rgbterm v0.0.0-20170906152045-cc83f3b3ce59/go.mod h1:q/89r3U2H7sSsE2t6Kca0lfwTK8JdoNGS/yzM/4iH5I=
github.com/benbjohnson/clock v1.0.3/go.mod h1:bGMdMPoPVvcYyt1gHDf4J2KE153Yf9BuiUKYMaxlTDM=
github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA=
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
@@ -97,11 +79,7 @@ github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c/go.mod h1:MKsuJm
github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM=
github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk=
github.com/blang/semver/v4 v4.0.0/go.mod h1:IbckMUScFkM3pff0VJDNKRiT6TG/YpiHIM2yvyW5YoQ=
-github.com/bmatcuk/doublestar v1.3.0 h1:1jLE2y0VpSrOn/QR9G4f2RmrCtkM3AuATcWradjHUvM=
-github.com/bmatcuk/doublestar v1.3.0/go.mod h1:wiQtGV+rzVYxB7WIlirSN++5HPtPlXEo9MEoZQC/PmE=
-github.com/cavaliergopher/grab v2.0.0+incompatible/go.mod h1:6ICNRTQPwkMP0m2sKIDv/9XkhFJJwiEOQyZ+8E4H7Yg=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
-github.com/certifi/gocertifi v0.0.0-20180118203423-deb3ae2ef261/go.mod h1:GJKEexRPVJrBSOjoqN5VNOIKJ5Q3RViH6eu3puDRwx4=
github.com/certifi/gocertifi v0.0.0-20191021191039-0944d244cd40/go.mod h1:sGbDF6GwGcLpkNXPUTkMRoywsNa/ol15pxFe6ERfguA=
github.com/certifi/gocertifi v0.0.0-20200922220541-2c3bb06c6054/go.mod h1:sGbDF6GwGcLpkNXPUTkMRoywsNa/ol15pxFe6ERfguA=
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
@@ -112,10 +90,6 @@ github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWR
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
-github.com/cloudflare/backoff v0.0.0-20161212185259-647f3cdfc87a/go.mod h1:rzgs2ZOiguV6/NpiDgADjRLPNyZlApIWxKpkT+X8SdY=
-github.com/cloudflare/cfssl v1.5.0/go.mod h1:sPPkBS5L8l8sRc/IOO1jG51Xb34u+TYhL6P//JdODMQ=
-github.com/cloudflare/go-metrics v0.0.0-20151117154305-6a9aea36fb41/go.mod h1:eaZPlJWD+G9wseg1BuRXlHnjntPMrywMsyxf+LTOdP4=
-github.com/cloudflare/redoctober v0.0.0-20171127175943-746a508df14c/go.mod h1:6Se34jNoqrd8bTxrmJB2Bg2aoZ2CdSXonils9NsiNgo=
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
@@ -139,28 +113,21 @@ github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSV
github.com/coreos/go-systemd/v22 v22.5.0 h1:RrqgGjYQKalulkV8NGVIfkXQf6YYmOyiJKk8iXXhfZs=
github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
-github.com/coreos/yaml v0.0.0-20141224210557-6b16a5714269 h1:/1sjrpK5Mb6IwyFOKd+u7321tXfNAsj0Ci8CivZmSlo=
-github.com/coreos/yaml v0.0.0-20141224210557-6b16a5714269/go.mod h1:Bl1D/T9QJhVdu6eFoLrGxN90+admDLGaLz2HXH/VzDc=
github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/creack/pty v1.1.11/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
-github.com/daaku/go.zipexe v1.0.0/go.mod h1:z8IiR6TsVLEYKwXAoE/I+8ys/sDkgTzSL0CLnGVd57E=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
-github.com/davidcassany/linuxkit/pkg/metadata v0.0.0-20210714125456-8b187a9ffd7e/go.mod h1:GIS28BxE1G2YB+dCf+O2Ow/bVP7hqioSWLsvr9YQg3o=
github.com/daviddengcn/go-colortext v0.0.0-20160507010035-511bcaf42ccd/go.mod h1:dv4zxwHi5C/8AeI+4gX4dCWOIvNi7I6JCSX0HvlKPgE=
-github.com/denisbrodbeck/machineid v1.0.1/go.mod h1:dJUwb7PTidGDeYyUBmXZ2GphQBbjJCrnectwCyxcUSI=
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
-github.com/diskfs/go-diskfs v1.1.1/go.mod h1:afUPxxu+x1snp4aCY2bKR0CoZ/YFJewV3X2UEr2nPZE=
github.com/docker/cli v20.10.20+incompatible h1:lWQbHSHUFs7KraSN2jOJK7zbMS2jNCHI4mt4xUFUVQ4=
github.com/docker/cli v20.10.20+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
github.com/docker/distribution v2.7.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
github.com/docker/distribution v2.8.2+incompatible h1:T3de5rq0dB1j30rp0sA2rER+m322EBzniBPB6ZIzuh8=
github.com/docker/distribution v2.8.2+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
-github.com/docker/docker v17.12.0-ce-rc1.0.20200417035958-130b0bc6032c+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/docker v20.10.27+incompatible h1:Id/ZooynV4ZlD6xX20RCd3SR0Ikn7r4QZDa2ECK2TgA=
github.com/docker/docker v20.10.27+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/docker-credential-helpers v0.7.0 h1:xtCHsjxogADNZcdv1pKUHXryefjlVRqWqIhk/uXJp0A=
@@ -168,14 +135,12 @@ github.com/docker/docker-credential-helpers v0.7.0/go.mod h1:rETQfLdHNT3foU5kuNk
github.com/docker/go-units v0.3.3/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4=
github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
-github.com/docker/libnetwork v0.8.0-dev.2.0.20200612180813-9e99af28df21/go.mod h1:93m0aTqz6z+g32wla4l4WxTrdtvBRmVzYRkYvasA5Z8=
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE=
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc=
github.com/emicklei/go-restful v0.0.0-20170410110728-ff4f55a20633/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs=
github.com/emicklei/go-restful v2.9.5+incompatible/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs=
github.com/emicklei/go-restful/v3 v3.8.0/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc=
-github.com/emirpasic/gods v1.12.0/go.mod h1:YfzfFFoVP/catgzJb4IKIqXjX78Ha8FMSDh3ymbK86o=
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
@@ -192,7 +157,6 @@ github.com/fanliao/go-promise v0.0.0-20141029170127-1890db352a72/go.mod h1:Pjfxu
github.com/fatih/camelcase v1.0.0/go.mod h1:yN2Sb0lFhZJUdVvtELVWefmrXpuZESvPmqwoZc+/fpc=
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
github.com/felixge/httpsnoop v1.0.1/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
-github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc=
github.com/form3tech-oss/jwt-go v3.2.2+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k=
github.com/form3tech-oss/jwt-go v3.2.3+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
@@ -201,17 +165,10 @@ github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4
github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw=
github.com/fvbommel/sortorder v1.0.1/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0=
github.com/getkin/kin-openapi v0.76.0/go.mod h1:660oXbgy5JFMKreazJaQTw7o+X00qeSyhcnluiMv+Xg=
-github.com/getsentry/raven-go v0.0.0-20180121060056-563b81fc02b7/go.mod h1:KungGk8q33+aIAZUIVWZDr2OfAEBsO49PX4NzFV5kcQ=
github.com/getsentry/raven-go v0.2.0/go.mod h1:KungGk8q33+aIAZUIVWZDr2OfAEBsO49PX4NzFV5kcQ=
github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk=
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
-github.com/gliderlabs/ssh v0.2.2/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0=
github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q=
-github.com/go-git/gcfg v1.5.0/go.mod h1:5m20vg6GwYabIxaOonVkTdrILxQMpEShl1xiMF4ua+E=
-github.com/go-git/go-billy/v5 v5.2.0/go.mod h1:pmpqyWchKfYfrkb/UVH4otLvyi/5gJlGI4Hb3ZqZ3W0=
-github.com/go-git/go-billy/v5 v5.3.1/go.mod h1:pmpqyWchKfYfrkb/UVH4otLvyi/5gJlGI4Hb3ZqZ3W0=
-github.com/go-git/go-git-fixtures/v4 v4.2.1/go.mod h1:K8zd3kDUAykwTdDCr+I0per6Y6vMiRR/nnVTBtavnB0=
-github.com/go-git/go-git/v5 v5.4.2/go.mod h1:gQ1kArt6d+n+BGd+/B/I74HwRTLhth2+zti4ihgckDc=
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
@@ -226,8 +183,9 @@ github.com/go-logr/logr v0.2.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTg
github.com/go-logr/logr v0.4.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU=
github.com/go-logr/logr v1.2.0/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
-github.com/go-logr/logr v1.2.3 h1:2DntVwHkVopvECVRSlL5PSo9eG+cAkDCuckLubN+rq0=
github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
+github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY=
+github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-logr/zapr v0.4.0/go.mod h1:tabnROwaDl0UNxkVeFRbY8bwB37GwRv0P8lg6aAiEnk=
github.com/go-logr/zapr v1.2.3/go.mod h1:eIauM6P8qSvTw5o2ez6UEAfGjQKrxQTl5EoK+Qa2oG4=
github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
@@ -236,9 +194,11 @@ github.com/go-openapi/jsonreference v0.19.3/go.mod h1:rjx6GuL8TTa9VaixXglHmQmIL9
github.com/go-openapi/jsonreference v0.19.5/go.mod h1:RdybgQwPxbL4UEjuAruzK1x3nE69AqPYEJeo/TWfEeg=
github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
github.com/go-openapi/swag v0.19.14/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ=
-github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
+github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0 h1:p104kn46Q8WdvHunIJ9dAyjPVtrBPhSr3KT2yUst43I=
github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE=
+github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI=
+github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8=
github.com/godbus/dbus/v5 v5.0.4 h1:9349emZab16e7zQvpmsbtjc18ykshndd8y2PG3sgJbA=
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
@@ -283,7 +243,6 @@ github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Z
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/btree v1.0.1/go.mod h1:xXMiIv4Fb/0kKde4SpL7qlzvu5cMJDRkFDxJfI9uaxA=
github.com/google/cel-go v0.12.5/go.mod h1:Jk7ljRzLBhkmiAwBoUxB1sZSCVBAzkqPF25olK/iRDw=
-github.com/google/certificate-transparency-go v1.0.21/go.mod h1:QeJfpSbVSfYc7RgB3gJFj9cbuQMMchQxrWXz8Ruopmg=
github.com/google/gnostic v0.5.7-v3refs/go.mod h1:73MKFl6jIHelAJNaBGFzt3SPtZULs9dYrGFt8OiIsHQ=
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
@@ -298,8 +257,8 @@ github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
-github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
-github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
+github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
+github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-containerregistry v0.12.2-0.20230106184643-b063f6aeac72 h1:mjNVVKGHs3+xtaGPnTHz4ozBphZNsxsgqFR4+TNwRVM=
github.com/google/go-containerregistry v0.12.2-0.20230106184643-b063f6aeac72/go.mod h1:J9FQ+eSS4a1aC2GNZxvNpbWhgp0487v+cgiilB4FqDo=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
@@ -321,6 +280,8 @@ github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLe
github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
+github.com/google/pprof v0.0.0-20240424215950-a892ee059fd6 h1:k7nVchz72niMH6YLQNvHSdIE7iqsQxK1P41mySCvssg=
+github.com/google/pprof v0.0.0-20240424215950-a892ee059fd6/go.mod h1:kf6iHlnVGwgKolg33glAes7Yg/8iWP8ukqeldJSO7jw=
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4=
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ=
@@ -345,14 +306,15 @@ github.com/harvester/go-common v0.0.0-20230718010724-11313421a8f5 h1:0sdmh186yfB
github.com/harvester/go-common v0.0.0-20230718010724-11313421a8f5/go.mod h1:z8gmnLv5YrXhueSrZZVnd1j8ZiXKEhzitq/BnyQU/pE=
github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q=
github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8=
-github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA=
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
+github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I=
+github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60=
github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM=
github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk=
-github.com/hashicorp/go-multierror v1.1.0 h1:B9UzwGQJehnUY1yNrnwREHc3fGbC2xefo8g4TbElacI=
-github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA=
+github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
+github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU=
github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU=
github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4=
@@ -367,36 +329,26 @@ github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0m
github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I=
github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc=
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
-github.com/huandu/xstrings v1.3.1/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
github.com/hugelgupf/socketpair v0.0.0-20190730060125-05d35a94e714 h1:/jC7qQFrv8CrSJVmaolDVOxTfS9kc36uB6H40kdbQq8=
github.com/hugelgupf/socketpair v0.0.0-20190730060125-05d35a94e714/go.mod h1:2Goc3h8EklBH5mspfHFxBnEoURQCGzQQH1ga9Myjvis=
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/imdario/mergo v0.3.5/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA=
github.com/imdario/mergo v0.3.6/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA=
-github.com/imdario/mergo v0.3.8/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA=
-github.com/imdario/mergo v0.3.12 h1:b6R2BslTbIEToALKP7LxUvijTsNI9TAe80pLWN2g/HU=
github.com/imdario/mergo v0.3.12/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA=
+github.com/imdario/mergo v0.3.16 h1:wwQJbIsHYGMUyLSPrEq1CT16AhnhNJQ51+4fdHUnCl4=
+github.com/imdario/mergo v0.3.16/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY=
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
github.com/insomniacslk/dhcp v0.0.0-20210827173440-b95caade3eac h1:IO6EfdRnPhxgKOsk9DbewdtQZHKZKnGlW7QCUttvNys=
github.com/insomniacslk/dhcp v0.0.0-20210827173440-b95caade3eac/go.mod h1:h+MxyHxRg9NH3terB1nfRIUaQEcI0XOVkdR9LNBlp8E=
-github.com/ishidawataru/sctp v0.0.0-20191218070446-00ab2ac2db07/go.mod h1:co9pwDoBCm1kGxawmb4sPq0cSIOOWNPT4KnHotMP1Zg=
-github.com/itchyny/go-flags v1.5.0/go.mod h1:lenkYuCobuxLBAd/HGFE4LRoW8D3B6iXRQfWYJ+MNbA=
-github.com/itchyny/gojq v0.12.2 h1:TxhFjk1w7Vnb0SwQPeG4FxTC98O4Es+x/mPaD5Azgfs=
-github.com/itchyny/gojq v0.12.2/go.mod h1:mi4PdXSlFllHyByM68JKUrbiArtEdEnNEmjbwxcQKAg=
-github.com/itchyny/timefmt-go v0.1.2 h1:q0Xa4P5it6K6D7ISsbLAMwx1PnWlixDcJL6/sFs93Hs=
-github.com/itchyny/timefmt-go v0.1.2/go.mod h1:0osSSCQSASBJMsIZnhAaF1C2fCBTJZXrnj37mG8/c+A=
-github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo=
+github.com/itchyny/gojq v0.12.16 h1:yLfgLxhIr/6sJNVmYfQjTIv0jGctu6/DgDoivmxTr7g=
+github.com/itchyny/gojq v0.12.16/go.mod h1:6abHbdC2uB9ogMS38XsErnfqJ94UlngIJGlRAIj4jTM=
+github.com/itchyny/timefmt-go v0.1.6 h1:ia3s54iciXDdzWzwaVKXZPbiXzxxnv1SPGFfM/myJ5Q=
+github.com/itchyny/timefmt-go v0.1.6/go.mod h1:RRDZYC5s9ErkjQvTvvU7keJjxUYzIISJGxm9/mAERQg=
github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
-github.com/jessevdk/go-flags v1.5.0/go.mod h1:Fw0T6WPc1dYxT4mKEZRfG5kJhaTDP9pj1c2EWnYs/m4=
-github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
-github.com/jmhodges/clock v0.0.0-20160418191101-880ee4c33548/go.mod h1:hGT6jSUVzF6no3QaDSMLGLEHtHSBSefs+MgcDWnmhmo=
-github.com/jmoiron/sqlx v1.2.0/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhBSsks=
-github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
github.com/jonboulle/clockwork v0.2.2/go.mod h1:Pkfl5aHPm1nk2H9h0bjmnJD/BcgbGXUBGnn1kMkgxc8=
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
-github.com/jpillora/backoff v0.0.0-20180909062703-3050d21c67d7/go.mod h1:2iMrUgbbvHEiQClaW2NsSzMyGHqN+rDFqY705q49KG0=
github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4=
github.com/jroimartin/gocui v0.4.0 h1:52jnalstgmc25FmtGcWqa0tcbMEWS6RpFLsOIO+I+E8=
github.com/jroimartin/gocui v0.4.0/go.mod h1:7i7bbj99OgFHzo7kB2zPb8pXLqMBSQegY7azfqXMkyY=
@@ -414,13 +366,10 @@ github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/X
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM=
-github.com/kevinburke/ssh_config v0.0.0-20201106050909-4977a11b4351/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM=
github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q=
github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00=
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
-github.com/kisielk/sqlstruct v0.0.0-20150923205031-648daed35d49/go.mod h1:yyMNCyc/Ib3bDTKd379tNMpB/7/H5TjM2Y9QJ5THLbE=
-github.com/kisom/goutils v1.1.0/go.mod h1:+UBTfd78habUYWFbNWTJNG+jNG/i/lGURakr4A/yNRw=
github.com/klauspost/compress v1.15.11 h1:Lcadnb3RKGin4FYM/orgq0qde+nc15E5Cbqg4B9Sx9c=
github.com/klauspost/compress v1.15.11/go.mod h1:QPwzmACJjUTFsnSHH934V6woptycfrDDJnH7hvFVbGM=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
@@ -429,15 +378,12 @@ github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
-github.com/kr/pretty v0.2.1 h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI=
-github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
+github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
+github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
-github.com/kylelemons/go-gypsy v0.0.0-20160905020020-08cad365cd28/go.mod h1:T/T7jsxVqf9k/zYOqbgNAsANsjxTd1Yq3htjDhQ1H0c=
-github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
-github.com/lib/pq v1.3.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de/go.mod h1:zAbeS9B/r2mtpb6U+EI2rYA5OAXxsYw6wTamcNW+zcE=
github.com/lithammer/dedent v1.1.0/go.mod h1:jrXYCQtgg0nJiN+StA2KgR7w6CiQNv9Fd/Z9BP0jIOc=
github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
@@ -447,22 +393,15 @@ github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN
github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
github.com/mailru/easyjson v0.7.0/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7ldAVICs=
github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
-github.com/matryer/is v1.2.0/go.mod h1:2fLPjFQM9rhQ15aVEtbuwhJinnOqrmgXPNdZsdwlWXA=
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
-github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ=
-github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
-github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
-github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
-github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
github.com/mattn/go-runewidth v0.0.7/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
-github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0=
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
+github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U=
+github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/mattn/go-shellwords v1.0.5/go.mod h1:3xCvwCdWdlDJUrvuMn7Wuy9eWs4pE8vqg+NOMyg4B2o=
github.com/mattn/go-shellwords v1.0.10 h1:Y7Xqm8piKOO3v10Thp7Z36h4FYFjt5xB//6XvOrs2Gw=
github.com/mattn/go-shellwords v1.0.10/go.mod h1:EZzvwXDESEeg03EKmM+RmDnNOPKG4lLtQsUlTZDWQ8Y=
-github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
-github.com/mattn/go-sqlite3 v1.10.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4=
github.com/mdlayher/ethernet v0.0.0-20190606142754-0394541c37b7 h1:lez6TS6aAau+8wXUP3G9I3TGlmPFEq2CTxBaRqY6AGE=
@@ -474,10 +413,8 @@ github.com/mdlayher/netlink v1.1.1/go.mod h1:WTYpFb/WTvlRJAyKhZL5/uy69TDDpHHu2VZ
github.com/mdlayher/raw v0.0.0-20190606142536-fef19f00fc18/go.mod h1:7EpbotpCmVZcu+KCX4g9WaRNuu11uyhiW7+Le1dKawg=
github.com/mdlayher/raw v0.0.0-20191009151244-50f2db8cc065 h1:aFkJ6lx4FPip+S+Uw4aTegFMct9shDvP+79PsSxpm3w=
github.com/mdlayher/raw v0.0.0-20191009151244-50f2db8cc065/go.mod h1:7EpbotpCmVZcu+KCX4g9WaRNuu11uyhiW7+Le1dKawg=
-github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc=
-github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw=
github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
@@ -488,8 +425,6 @@ github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0Qu
github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
-github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
-github.com/moby/libnetwork v0.8.0-dev.2.0.20200612180813-9e99af28df21/go.mod h1:RQTqDxGZChsPHosY8R3ZL2THYWUuW8X5SRhiBNoTY5I=
github.com/moby/locker v1.0.1/go.mod h1:S7SDdo5zpBK84bzzVlKr2V0hz+7x9hWbYC/kq7oQppc=
github.com/moby/spdystream v0.2.0/go.mod h1:f7i0iNDQJ059oMTcWxx8MA/zKFIuD/lY+0GqbN2Wy8c=
github.com/moby/term v0.0.0-20210610120745-9d4ed1856297/go.mod h1:vgPCkQMyxTZ7IDy8SXRufE172gr8+K/JE/7hHFxHW3A=
@@ -502,59 +437,47 @@ github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3Rllmb
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/monochromegane/go-gitignore v0.0.0-20200626010858-205db1a8cc00/go.mod h1:Pm3mSP3c5uWn86xMLZ5Sa7JB9GsEZySvHYXCTK4E9q4=
-github.com/mreiferson/go-httpclient v0.0.0-20160630210159-31f0106b4474/go.mod h1:OQA4XLvDbMgS8P0CevmM4m9Q3Jq4phKUzcocxuGJ5m8=
-github.com/mudler/entities v0.0.0-20211108084227-d1414478861b/go.mod h1:qquFT9tYp+/NO7tTotto4BT9zSRYSMDxo2PGZwujpFA=
-github.com/mudler/yip v0.0.0-20211129144714-088f39125cf7 h1:9yuv3CMIZx+Mt3Zj54Kpac42IAou5p7lQ6m2LLzNs6E=
-github.com/mudler/yip v0.0.0-20211129144714-088f39125cf7/go.mod h1:ejqp/S2gmS9sh498Ib/B//k9GQvXX98EfeyIxxEkGIM=
github.com/munnerz/goautoneg v0.0.0-20120707110453-a547fc61f48d/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+o7JKHSa8/e818NopupXU1YMK5fe1lsApnBw=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
-github.com/nkovacs/streamquote v0.0.0-20170412213628-49af9bddb229/go.mod h1:0aYXnNPJ8l7uZxf45rWW1a/uME32OF0rhiYGNQ2oF2E=
github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A=
-github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE=
github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU=
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
github.com/olekukonko/tablewriter v0.0.4/go.mod h1:zq6QwlOf5SlnkVbMSr5EoBv3636FWnp+qbPhuoO21uA=
-github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY=
github.com/onsi/ginkgo v0.0.0-20170829012221-11459a886d9c/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
-github.com/onsi/ginkgo v1.12.0/go.mod h1:oUhWkIvk5aDxtKvDDuw8gItl8pKl42LzjC9KZE0HfGg=
github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk=
github.com/onsi/ginkgo v1.14.0/go.mod h1:iSB4RoI2tjJc9BBv4NKIKWKya62Rps+oPG/Lv9klQyY=
-github.com/onsi/ginkgo v1.16.1/go.mod h1:CObGmKUOKaSC0RjmoAK7tKyn4Azo5P2IWuoMnvwxz1E=
github.com/onsi/ginkgo v1.16.4 h1:29JGrr5oVBm5ulCWet69zQkzWipVXIol6ygQUe/EzNc=
github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0=
github.com/onsi/ginkgo/v2 v2.1.3/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c=
github.com/onsi/ginkgo/v2 v2.1.4/go.mod h1:um6tUpWM/cxCK3/FK8BXqEiUMUwRgSM4JXG47RKZmLU=
github.com/onsi/ginkgo/v2 v2.1.6/go.mod h1:MEH45j8TBi6u9BMogfbp0stKC5cdGjumZj5Y7AG4VIk=
+github.com/onsi/ginkgo/v2 v2.19.0 h1:9Cnnf7UHo57Hy3k6/m5k3dRfGTMXGvxhHFvkDTCTpvA=
+github.com/onsi/ginkgo/v2 v2.19.0/go.mod h1:rlwLi9PilAFJ8jCg9UE1QP6VBpd6/xj3SRC0d6TU0To=
github.com/onsi/gomega v0.0.0-20170829124025-dcabb60a477c/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA=
-github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY=
-github.com/onsi/gomega v1.9.0/go.mod h1:Ho0h+IUsWyvy1OpqCwxlQ/21gkhVunqlU8fDGcoTdcA=
github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo=
-github.com/onsi/gomega v1.11.0/go.mod h1:azGKhqFUon9Vuj0YmTfLSmx0FUwqXYSTl5re8lQLTUg=
github.com/onsi/gomega v1.15.0/go.mod h1:cIuvLEne0aoVhAgh/O6ac0Op8WWw9H6eYCriF+tEHG0=
github.com/onsi/gomega v1.16.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY=
github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY=
github.com/onsi/gomega v1.19.0/go.mod h1:LY+I3pBVzYsTBU1AnDwOSxaYi9WoWiqgwooUqq9yPro=
-github.com/onsi/gomega v1.20.1 h1:PA/3qinGoukvymdIDV8pii6tiZgC8kbmJO6Z5+b002Q=
github.com/onsi/gomega v1.20.1/go.mod h1:DtrZpjmvpn2mPm4YWQa0/ALMDj9v4YxLgojwPeREyVo=
-github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk=
+github.com/onsi/gomega v1.33.1 h1:dsYjIxxSR755MDmKVsaFQTE22ChNBcuuTWgkUDSubOk=
+github.com/onsi/gomega v1.33.1/go.mod h1:U4R44UsT+9eLIaYRB2a5qajjtQYn0hauxvRm16AVYg0=
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
github.com/opencontainers/image-spec v1.1.0-rc2 h1:2zx/Stx4Wc5pIPDvIxHXvXtQFW/7XWJGmnM7r3wg034=
github.com/opencontainers/image-spec v1.1.0-rc2/go.mod h1:3OVijpioIKYWTqjiG0zfF6wvoJ4fAXGbjdZuI2NgsRQ=
github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
-github.com/packethost/packngo v0.1.0/go.mod h1:otzZQXgoO96RTzDB/Hycg0qZcXZsWJGJRSXbmEIJ+4M=
github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
-github.com/paultag/go-modprobe v0.0.0-20200930231701-46c7252028d3/go.mod h1:8kv7zKfUEDKENYA4Wk+cT1bxjXaEiuYisqjRZGGNI84=
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU=
-github.com/phayes/permbits v0.0.0-20190612203442-39d7c581d2ee/go.mod h1:3uODdxMgOaPYeWU7RzZLxVtJHZ/x1f/iHkBZuKJDzuY=
+github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
@@ -589,8 +512,6 @@ github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4O
github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA=
github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA=
github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU=
-github.com/rancher-sandbox/cloud-init v1.14.3-0.20210913085759-bf90bf5eb77e h1:1GSTIPu7CB2UCkS++2/wMygc3S+aa5O0VbTsdDrYupE=
-github.com/rancher-sandbox/cloud-init v1.14.3-0.20210913085759-bf90bf5eb77e/go.mod h1:qbJr82AYxRKlEa4ZSm6qENOmXebBxPXne8BWbtll4cg=
github.com/rancher/dynamiclistener v0.3.5 h1:5TaIHvkDGmZKvc96Huur16zfTKOiLhDtK4S+WV0JA6A=
github.com/rancher/dynamiclistener v0.3.5/go.mod h1:dW/YF6/m2+uEyJ5VtEcd9THxda599HP6N9dSXk81+k0=
github.com/rancher/lasso v0.0.0-20221227210133-6ea88ca2fbcc/go.mod h1:dEfC9eFQigj95lv/JQ8K5e7+qQCacWs1aIA6nLxKzT8=
@@ -600,34 +521,34 @@ github.com/rancher/wharfie v0.6.5 h1:Xxj/ki8d4BFeQBvyViW9jX8BqxaPlZwXvRD8BG2Mm/8
github.com/rancher/wharfie v0.6.5/go.mod h1:iGXn5Y9GEhsx1h/lPUXAklK4DoY4+6NOJF+Li1j248Y=
github.com/rancher/wrangler v1.1.1 h1:wmqUwqc2M7ADfXnBCJTFkTB5ZREWpD78rnZMzmxwMvM=
github.com/rancher/wrangler v1.1.1/go.mod h1:ioVbKupzcBOdzsl55MvEDN0R1wdGggj8iNCYGFI5JvM=
+github.com/rancher/yip v1.9.2 h1:AddaE7/J5eUgeMeW5mlB7JtNCafqVt78aZgg1CE+cfk=
+github.com/rancher/yip v1.9.2/go.mod h1:acOitsP+8zVDhxM36mDePxpT+SXhK66GxW5Vgg3lw2Y=
+github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
+github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
+github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
-github.com/rogpeppe/fastuuid v1.1.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
+github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
+github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M=
+github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA=
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
-github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
-github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
-github.com/sirupsen/logrus v1.0.3/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc=
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
-github.com/sirupsen/logrus v1.3.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88=
github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
-github.com/sirupsen/logrus v1.9.2 h1:oxx1eChJGI6Uks2ZC4W1zpLlVgqB8ner4EuQwV4Ik1Y=
-github.com/sirupsen/logrus v1.9.2/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
+github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
+github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
-github.com/smartystreets/assertions v1.0.0/go.mod h1:kHHU4qYBaI3q23Pp3VPrmWhuIUrLW/7eUrw0BU5VaoM=
-github.com/smartystreets/go-aws-auth v0.0.0-20180515143844-0c1422d1fdb9/go.mod h1:SnhjPscd9TpLiy1LpzGSKh3bXCfxxXuqd9xmQJy3slM=
github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
-github.com/smartystreets/gunit v1.0.0/go.mod h1:qwPWnhz6pn0NnRBP++URONOVyNkPyr4SauJk4cUOwJs=
github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM=
github.com/soheilhy/cmux v0.1.5/go.mod h1:T7TcVDs9LWfQgPlPsdngu6I6QIoyIFZDDC6sNE1GqG0=
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
@@ -636,7 +557,6 @@ github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTd
github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I=
github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
-github.com/spf13/cobra v0.0.6/go.mod h1:/6GTrnGXV9HjY+aR4k0oJ5tcvakLuG6EuKReYlHNrgE=
github.com/spf13/cobra v1.0.0/go.mod h1:/6GTrnGXV9HjY+aR4k0oJ5tcvakLuG6EuKReYlHNrgE=
github.com/spf13/cobra v1.1.3/go.mod h1:pGADOWyqRD/YMrPZigI/zbliZ2wVD/23d+is3pSWzOo=
github.com/spf13/cobra v1.2.1/go.mod h1:ExllRjgxM/piMAM+3tAZvg8fsklGAf3tPfi+i8t68Nk=
@@ -664,42 +584,26 @@ github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
-github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk=
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
+github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
+github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=
-github.com/tj/assert v0.0.0-20171129193455-018094318fb0/go.mod h1:mZ9/Rh9oLWpLLDRpvE+3b7gP/C2YyLFYxNmcLnPTMe0=
-github.com/tj/assert v0.0.3/go.mod h1:Ne6X72Q+TB1AteidzQncjw9PabbMp4PBMZ1k+vd1Pvk=
-github.com/tj/go-buffer v1.1.0/go.mod h1:iyiJpfFcR2B9sXu7KvjbT9fpM4mOelRSDTbntVj52Uc=
-github.com/tj/go-elastic v0.0.0-20171221160941-36157cbbebc2/go.mod h1:WjeM0Oo1eNAjXGDx2yma7uG2XoyRZTq1uv3M/o7imD0=
-github.com/tj/go-kinesis v0.0.0-20171128231115-08b17f58cb1b/go.mod h1:/yhzCV0xPfx6jb1bBgRFjl5lytqVqZXEaeqWP8lTEao=
-github.com/tj/go-spin v1.1.0/go.mod h1:Mg1mzmePZm4dva8Qz60H2lHwmJ2loum4VIrLgVnKwh4=
github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
github.com/tmc/grpc-websocket-proxy v0.0.0-20201229170055-e5319fda7802/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
-github.com/tredoe/fileutil v1.0.5/go.mod h1:HFzzpvg+3Q8LgmZgo1mVF5epHc/CVkWKEb3hja+/1Zo=
-github.com/tredoe/goutil v1.0.0/go.mod h1:Qhf75QLcNEChimbl4wb8nROzw9PCFCPYTEUmTnoszXY=
-github.com/tredoe/osutil v1.3.6 h1:VrweDEuUWOYU/lskw8HqsGRt4fOdbdp6td5vjgVzrj8=
-github.com/tredoe/osutil v1.3.6/go.mod h1:panccMiyCdP8g45yxJ7DcxdMTZfPqHGGceuou2MNvHo=
-github.com/tredoe/osutil/v2 v2.0.0-rc.16/go.mod h1:uLRVx/3pb7Y4RQhG8cQFbPE9ha5r81e6MXpBsxbTAYc=
-github.com/twpayne/go-vfs v1.5.0 h1:3j9j2RchDSIQ1gjuNHZqJOyZAS702lrmqUow2uUYgX8=
-github.com/twpayne/go-vfs v1.5.0/go.mod h1:MAbvpnWRAE5LfIt88Eopskigmyc+wakYCyCDnys7iNw=
+github.com/tredoe/osutil v1.5.0 h1:UGVxbbHRoZi8xXVmbNZ2vgG6XoJ15ndE4LniiQ3rJKg=
+github.com/tredoe/osutil v1.5.0/go.mod h1:TEzphzUUunysbdDRfdOgqkg10POQbnfIPV50ynqOfIg=
+github.com/twpayne/go-vfs/v4 v4.3.0 h1:rTqFzzOQ/6ESKTSiwVubHlCBedJDOhQyVSnw8rQNZhU=
+github.com/twpayne/go-vfs/v4 v4.3.0/go.mod h1:tq2UVhnUepesc0lSnPJH/jQ8HruGhzwZe2r5kDFpEIw=
github.com/u-root/uio v0.0.0-20210528114334-82958018845c h1:BFvcl34IGnw8yvJi8hlqLFo9EshRInwWBs2M5fGWzQA=
github.com/u-root/uio v0.0.0-20210528114334-82958018845c/go.mod h1:LpEX5FO/cB+WF4TYGY1V5qktpaZLkKkSegbr0V4eYXA=
github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc=
-github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
-github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8=
github.com/vbatts/tar-split v0.11.2 h1:Via6XqJr0hceW4wff3QRzD5gAk/tatMw/4ZA7cTlIME=
github.com/vbatts/tar-split v0.11.2/go.mod h1:vV3ZuO2yWSVsz+pfFzDG/upWH1JhjOiEaWq6kXyQ3VI=
-github.com/vishvananda/netlink v0.0.0-20170808154308-f5a6f697a596/go.mod h1:+SR5DhBJrl6ZM7CoCKvpw5BKroDKQ+PJqOg65H/2ktk=
-github.com/vishvananda/netlink v1.1.0 h1:1iyaYNBLmP6L0220aDnYQpo1QEV4t4hJ+xEEhhJH8j0=
-github.com/vishvananda/netlink v1.1.0/go.mod h1:cTgwzPIzzgDAYoQrMm0EdrjRUBkTqKYppBueQtXaqoE=
-github.com/vishvananda/netns v0.0.0-20170707011535-86bef332bfc3/go.mod h1:ZjcWmFBXmLKZu9Nxj3WKYEafiSqer2rnvPr0en9UNpI=
-github.com/vishvananda/netns v0.0.0-20191106174202-0a2b9b5464df/go.mod h1:JP3t17pCcGlemwknint6hfoeCVQrEMVwxRLRjXpq+BU=
-github.com/vishvananda/netns v0.0.0-20210104183010-2eb08e3e575f h1:p4VB7kIXpOQvVn1ZaTIVp+3vuYAXFe3OJEvjbUYJLaA=
-github.com/vishvananda/netns v0.0.0-20210104183010-2eb08e3e575f/go.mod h1:DD4vA1DwXk04H54A1oHXtwZmA0grkVMdPxx/VGLCah0=
-github.com/weppos/publicsuffix-go v0.4.0/go.mod h1:z3LCPQ38eedDQSwmsSRW4Y7t2L8Ln16JPQ02lHAdn5k=
-github.com/weppos/publicsuffix-go v0.13.0/go.mod h1:z3LCPQ38eedDQSwmsSRW4Y7t2L8Ln16JPQ02lHAdn5k=
-github.com/willdonnelly/passwd v0.0.0-20141013001024-7935dab3074c/go.mod h1:xcvfY9pOw6s4wyrhilFSbMthL6KzgrfCIETHHUOQ/fQ=
-github.com/xanzy/ssh-agent v0.3.0/go.mod h1:3s9xbODqPuuhK9JV1R321M/FlMZSBvE5aY6eAcqrDh0=
+github.com/vishvananda/netlink v1.2.1-beta.2 h1:Llsql0lnQEbHj0I1OuKyp8otXp0r3q0mPkuhwHfStVs=
+github.com/vishvananda/netlink v1.2.1-beta.2/go.mod h1:twkDnbuQxJYemMlGd4JFIcuhgX83tXhKS2B/PRMpOho=
+github.com/vishvananda/netns v0.0.0-20200728191858-db3c7e526aae/go.mod h1:DD4vA1DwXk04H54A1oHXtwZmA0grkVMdPxx/VGLCah0=
+github.com/vishvananda/netns v0.0.4 h1:Oeaw1EM2JMxD51g9uhtC0D7erkIjgmj8+JZc26m1YX8=
+github.com/vishvananda/netns v0.0.4/go.mod h1:SpkAiCQRtJ6TvvxPnOSyH3BMl6unz3xZlaprSwhNNJM=
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
github.com/xlab/treeprint v0.0.0-20181112141820-a009c3971eca/go.mod h1:ce1O1j6UtZfjr22oyGxGLbauSBp2YVXpARAosm7dHBg=
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
@@ -710,13 +614,6 @@ github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9dec
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
-github.com/zcalusic/sysinfo v0.0.0-20210831153053-2c6e1d254246/go.mod h1:WGLNaWsjKQ2gXmAHh+MQztgu3FLFAnOFJjFzhpgShCY=
-github.com/ziutek/mymysql v1.5.4/go.mod h1:LMSpPZ6DbqWFxNCHW77HeMg9I646SAhApZ/wKdgO/C0=
-github.com/zmap/rc2 v0.0.0-20131011165748-24b9757f5521/go.mod h1:3YZ9o3WnatTIZhuOtot4IcUfzoKVjUHqu6WALIyI0nE=
-github.com/zmap/zcertificate v0.0.0-20180516150559-0e3d58b1bac4/go.mod h1:5iU54tB79AMBcySS0R2XIyZBAVmeHranShAFELYx7is=
-github.com/zmap/zcrypto v0.0.0-20200513165325-16679db567ff/go.mod h1:TxpejqcVKQjQaVVmMGfzx5HnmFMdIU+vLtaCyPBfGI4=
-github.com/zmap/zcrypto v0.0.0-20200911161511-43ff0ea04f21/go.mod h1:TxpejqcVKQjQaVVmMGfzx5HnmFMdIU+vLtaCyPBfGI4=
-github.com/zmap/zlint/v2 v2.2.1/go.mod h1:ixPWsdq8qLxYRpNUTbcKig3R7WgmspsHGLhCCs6rFAM=
go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
go.etcd.io/bbolt v1.3.6/go.mod h1:qXsaaIqmgQH0T+OPdb99Bf+PKfBBQVAdyD6TY9G8XM4=
go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs=
@@ -766,28 +663,20 @@ go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo=
go.uber.org/zap v1.19.0/go.mod h1:xg/QME4nWcxGxrpdeYfq7UvYrLh66cuVKdrbD1XF/NI=
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
-golang.org/x/crypto v0.0.0-20190219172222-a4c6cb3142f2/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
-golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
-golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
-golang.org/x/crypto v0.0.0-20200124225646-8b5121be2f68/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
-golang.org/x/crypto v0.0.0-20200414173820-0848c9571904/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
-golang.org/x/crypto v0.0.0-20201012173705-84dcc777aaee/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20210220033148-5ea612d1eb83/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I=
-golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4=
-golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20220131195533-30dcbda58838/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.0.0-20220214200702-86341886e292/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.0.0-20220315160706-3147a52a75dd/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
-golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc=
-golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=
+golang.org/x/crypto v0.25.0 h1:ypSNr+bnYL2YhwoMt2zPxHFmbAN1KZs/njMG3hxUp30=
+golang.org/x/crypto v0.25.0/go.mod h1:T+wALwcMOSE0kXgUAnPAHqTLW+XHgcELELW8VaDgm/M=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
@@ -872,7 +761,6 @@ golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v
golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc=
-golang.org/x/net v0.0.0-20210326060303-6b1517762897/go.mod h1:uSPa2vr4CLtc/ILN5odXGNXS6mhrKVzTaCXzk9m6W3k=
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk=
golang.org/x/net v0.0.0-20210520170846-37e1c6afe023/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
@@ -884,8 +772,8 @@ golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.3.1-0.20221206200815-1e63c2f08a10/go.mod h1:MBQ8lrhLObU/6UmLb4fmbmk5OcyYmqtbGd/9yIeKjEE=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
-golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM=
-golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
+golang.org/x/net v0.27.0 h1:5K3Njcw06/l2y9vpGCSdcxWOYHOUk3dVNGDXN+FvAys=
+golang.org/x/net v0.27.0/go.mod h1:dDi0PyhWNoiUOrAS8uXv/vnScO4wnHQO4mj9fn/RytE=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
@@ -912,8 +800,9 @@ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
+golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@@ -922,7 +811,6 @@ golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5h
golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190411185658-b44545bcd369/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@@ -930,15 +818,12 @@ golang.org/x/sys v0.0.0-20190418153312-f0ce4c0180be/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20190523142557-0e01d883c5c5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190606122018-79a91cf218c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20190606203320-7fc4e5ec1444/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191002063906-3421d5a6bb1c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@@ -947,10 +832,8 @@ golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200102141924-c96a22e43c9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@@ -959,7 +842,6 @@ golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200420163511-1957bb5e6d1f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@@ -967,6 +849,7 @@ golang.org/x/sys v0.0.0-20200519105757-fe76b779f299/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200728102440-3e129f6d46b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200831180312-196b9ba8737a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@@ -981,15 +864,12 @@ golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20210301091718-77cc2087c03b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20210324051608-47abb6519492/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20210502180810-71e4cd670f79/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210525143221-35b2ab0089ea/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
@@ -1007,15 +887,15 @@ golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE=
-golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.22.0 h1:RI27ohtqKCnwULzJLqkv897zojh5/DwS/ENaMzUOaWI=
+golang.org/x/sys v0.22.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
-golang.org/x/term v0.13.0 h1:bb+I9cTfFazGW51MZqBVmZy7+JEJMouUHTUSKVQLBek=
-golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U=
+golang.org/x/term v0.22.0 h1:BbsgPEJULsl2fV/AT3v15Mjva5yXKQDyKf+TbDz7QJk=
+golang.org/x/term v0.22.0/go.mod h1:F3qCibpT5AMpCRfhfT53vVJwhLtIVHhB9XDjfFvnMI4=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@@ -1027,8 +907,8 @@ golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
-golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k=
-golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
+golang.org/x/text v0.16.0 h1:a94ExnEXNtEwYLGJSIUxnWoxoRz/ZcCsV63ROupILh4=
+golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
@@ -1097,6 +977,8 @@ golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
+golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d h1:vU5i/LfpvrRCpgM/VPfJLg5KjxD3E+hfT1SH+d9zLwg=
+golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
@@ -1213,7 +1095,6 @@ google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp0
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
-gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U=
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
@@ -1221,22 +1102,18 @@ gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
-gopkg.in/djherbis/times.v1 v1.2.0/go.mod h1:AQlg6unIsrsCEdQYhTzERy542dz6SFdQFZFv6mUY0P8=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
-gopkg.in/gemnasium/logrus-airbrake-hook.v2 v2.1.2/go.mod h1:Xk6kEKp8OKb+X14hQBKWaSkCsqBpgog8nAV2xsGOxlo=
gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc=
gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw=
gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
-gopkg.in/ini.v1 v1.63.2 h1:tGK/CyBg7SMzb60vP1M03vNZ3VDu3wGQJwn7Sxi9r3c=
-gopkg.in/ini.v1 v1.63.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
+gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
+gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k=
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
gopkg.in/square/go-jose.v2 v2.2.2/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI=
-gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
-gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI=
gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74=
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
@@ -1248,13 +1125,10 @@ gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
-gopkg.in/yaml.v3 v3.0.0-20200605160147-a5ece683394c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
-gotest.tools v2.2.0+incompatible h1:VsBPFP1AI068pPrMxtb/S8Zkgf9xEmTLJjfM+P5UIEo=
-gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw=
gotest.tools/v3 v3.0.2/go.mod h1:3SzNCllyD9/Y+b5r9JIKQ474KzkZyqLqEfYqMsX94Bk=
gotest.tools/v3 v3.0.3 h1:4AuOwCGf4lLR9u3YOe2awrHygurzhO/HeQ6laiA6Sx0=
gotest.tools/v3 v3.0.3/go.mod h1:Z7Lb0S5l+klDB31fvDQX8ss/FlKDxtlFlw3Oa8Ymbl8=
@@ -1307,7 +1181,6 @@ k8s.io/utils v0.0.0-20220210201930-3a6ce19ff2f9/go.mod h1:jPW/WVKK9YHAvNhRxK0md/
k8s.io/utils v0.0.0-20220728103510-ee6ede2d64ed/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA=
k8s.io/utils v0.0.0-20221011040102-427025108f67 h1:ZmUY7x0cwj9e7pGyCTIalBi5jpNfigO5sU46/xFoF/w=
k8s.io/utils v0.0.0-20221011040102-427025108f67/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0=
-pault.ag/go/topsort v0.0.0-20160530003732-f98d2ad46e1a/go.mod h1:INqx0ClF7kmPAMk2zVTX8DRnhZ/yaA/Mg52g8KFKE7k=
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=
diff --git a/pkg/config/config.go b/pkg/config/config.go
index 54947db77..58b15995b 100644
--- a/pkg/config/config.go
+++ b/pkg/config/config.go
@@ -6,7 +6,7 @@ import (
"strings"
"github.com/imdario/mergo"
- yipSchema "github.com/mudler/yip/pkg/schema"
+ yipSchema "github.com/rancher/yip/pkg/schema"
"k8s.io/apimachinery/pkg/util/validation"
)
diff --git a/pkg/config/cos.go b/pkg/config/cos.go
index 3e550a1ab..efa3dfba4 100644
--- a/pkg/config/cos.go
+++ b/pkg/config/cos.go
@@ -11,7 +11,7 @@ import (
"strings"
"sync"
- yipSchema "github.com/mudler/yip/pkg/schema"
+ yipSchema "github.com/rancher/yip/pkg/schema"
"github.com/sirupsen/logrus"
"gopkg.in/yaml.v3"
diff --git a/pkg/config/cos_test.go b/pkg/config/cos_test.go
index 8fa32d34a..09057e94e 100644
--- a/pkg/config/cos_test.go
+++ b/pkg/config/cos_test.go
@@ -4,7 +4,7 @@ import (
"strings"
"testing"
- yipSchema "github.com/mudler/yip/pkg/schema"
+ yipSchema "github.com/rancher/yip/pkg/schema"
"github.com/stretchr/testify/assert"
"github.com/harvester/harvester-installer/pkg/util"
diff --git a/pkg/console/network.go b/pkg/console/network.go
index 1e84f1645..5b234b8fd 100644
--- a/pkg/console/network.go
+++ b/pkg/console/network.go
@@ -8,7 +8,7 @@ import (
"os/exec"
"syscall"
- yipSchema "github.com/mudler/yip/pkg/schema"
+ yipSchema "github.com/rancher/yip/pkg/schema"
"github.com/sirupsen/logrus"
"github.com/vishvananda/netlink"
"golang.org/x/sys/unix"
diff --git a/pkg/console/util.go b/pkg/console/util.go
index bc0b74b7a..789d0c9cf 100644
--- a/pkg/console/util.go
+++ b/pkg/console/util.go
@@ -21,8 +21,8 @@ import (
"time"
"github.com/jroimartin/gocui"
- yipSchema "github.com/mudler/yip/pkg/schema"
"github.com/pkg/errors"
+ yipSchema "github.com/rancher/yip/pkg/schema"
"github.com/sirupsen/logrus"
"golang.org/x/crypto/ssh"
"golang.org/x/net/http/httpproxy"
diff --git a/vendor/github.com/coreos/yaml/LICENSE b/vendor/github.com/coreos/yaml/LICENSE
deleted file mode 100644
index a68e67f01..000000000
--- a/vendor/github.com/coreos/yaml/LICENSE
+++ /dev/null
@@ -1,188 +0,0 @@
-
-Copyright (c) 2011-2014 - Canonical Inc.
-
-This software is licensed under the LGPLv3, included below.
-
-As a special exception to the GNU Lesser General Public License version 3
-("LGPL3"), the copyright holders of this Library give you permission to
-convey to a third party a Combined Work that links statically or dynamically
-to this Library without providing any Minimal Corresponding Source or
-Minimal Application Code as set out in 4d or providing the installation
-information set out in section 4e, provided that you comply with the other
-provisions of LGPL3 and provided that you meet, for the Application the
-terms and conditions of the license(s) which apply to the Application.
-
-Except as stated in this special exception, the provisions of LGPL3 will
-continue to comply in full to this Library. If you modify this Library, you
-may apply this exception to your version of this Library, but you are not
-obliged to do so. If you do not wish to do so, delete this exception
-statement from your version. This exception does not (and cannot) modify any
-license terms which apply to the Application, with which you must still
-comply.
-
-
- GNU LESSER GENERAL PUBLIC LICENSE
- Version 3, 29 June 2007
-
- Copyright (C) 2007 Free Software Foundation, Inc.
- Everyone is permitted to copy and distribute verbatim copies
- of this license document, but changing it is not allowed.
-
-
- This version of the GNU Lesser General Public License incorporates
-the terms and conditions of version 3 of the GNU General Public
-License, supplemented by the additional permissions listed below.
-
- 0. Additional Definitions.
-
- As used herein, "this License" refers to version 3 of the GNU Lesser
-General Public License, and the "GNU GPL" refers to version 3 of the GNU
-General Public License.
-
- "The Library" refers to a covered work governed by this License,
-other than an Application or a Combined Work as defined below.
-
- An "Application" is any work that makes use of an interface provided
-by the Library, but which is not otherwise based on the Library.
-Defining a subclass of a class defined by the Library is deemed a mode
-of using an interface provided by the Library.
-
- A "Combined Work" is a work produced by combining or linking an
-Application with the Library. The particular version of the Library
-with which the Combined Work was made is also called the "Linked
-Version".
-
- The "Minimal Corresponding Source" for a Combined Work means the
-Corresponding Source for the Combined Work, excluding any source code
-for portions of the Combined Work that, considered in isolation, are
-based on the Application, and not on the Linked Version.
-
- The "Corresponding Application Code" for a Combined Work means the
-object code and/or source code for the Application, including any data
-and utility programs needed for reproducing the Combined Work from the
-Application, but excluding the System Libraries of the Combined Work.
-
- 1. Exception to Section 3 of the GNU GPL.
-
- You may convey a covered work under sections 3 and 4 of this License
-without being bound by section 3 of the GNU GPL.
-
- 2. Conveying Modified Versions.
-
- If you modify a copy of the Library, and, in your modifications, a
-facility refers to a function or data to be supplied by an Application
-that uses the facility (other than as an argument passed when the
-facility is invoked), then you may convey a copy of the modified
-version:
-
- a) under this License, provided that you make a good faith effort to
- ensure that, in the event an Application does not supply the
- function or data, the facility still operates, and performs
- whatever part of its purpose remains meaningful, or
-
- b) under the GNU GPL, with none of the additional permissions of
- this License applicable to that copy.
-
- 3. Object Code Incorporating Material from Library Header Files.
-
- The object code form of an Application may incorporate material from
-a header file that is part of the Library. You may convey such object
-code under terms of your choice, provided that, if the incorporated
-material is not limited to numerical parameters, data structure
-layouts and accessors, or small macros, inline functions and templates
-(ten or fewer lines in length), you do both of the following:
-
- a) Give prominent notice with each copy of the object code that the
- Library is used in it and that the Library and its use are
- covered by this License.
-
- b) Accompany the object code with a copy of the GNU GPL and this license
- document.
-
- 4. Combined Works.
-
- You may convey a Combined Work under terms of your choice that,
-taken together, effectively do not restrict modification of the
-portions of the Library contained in the Combined Work and reverse
-engineering for debugging such modifications, if you also do each of
-the following:
-
- a) Give prominent notice with each copy of the Combined Work that
- the Library is used in it and that the Library and its use are
- covered by this License.
-
- b) Accompany the Combined Work with a copy of the GNU GPL and this license
- document.
-
- c) For a Combined Work that displays copyright notices during
- execution, include the copyright notice for the Library among
- these notices, as well as a reference directing the user to the
- copies of the GNU GPL and this license document.
-
- d) Do one of the following:
-
- 0) Convey the Minimal Corresponding Source under the terms of this
- License, and the Corresponding Application Code in a form
- suitable for, and under terms that permit, the user to
- recombine or relink the Application with a modified version of
- the Linked Version to produce a modified Combined Work, in the
- manner specified by section 6 of the GNU GPL for conveying
- Corresponding Source.
-
- 1) Use a suitable shared library mechanism for linking with the
- Library. A suitable mechanism is one that (a) uses at run time
- a copy of the Library already present on the user's computer
- system, and (b) will operate properly with a modified version
- of the Library that is interface-compatible with the Linked
- Version.
-
- e) Provide Installation Information, but only if you would otherwise
- be required to provide such information under section 6 of the
- GNU GPL, and only to the extent that such information is
- necessary to install and execute a modified version of the
- Combined Work produced by recombining or relinking the
- Application with a modified version of the Linked Version. (If
- you use option 4d0, the Installation Information must accompany
- the Minimal Corresponding Source and Corresponding Application
- Code. If you use option 4d1, you must provide the Installation
- Information in the manner specified by section 6 of the GNU GPL
- for conveying Corresponding Source.)
-
- 5. Combined Libraries.
-
- You may place library facilities that are a work based on the
-Library side by side in a single library together with other library
-facilities that are not Applications and are not covered by this
-License, and convey such a combined library under terms of your
-choice, if you do both of the following:
-
- a) Accompany the combined library with a copy of the same work based
- on the Library, uncombined with any other library facilities,
- conveyed under the terms of this License.
-
- b) Give prominent notice with the combined library that part of it
- is a work based on the Library, and explaining where to find the
- accompanying uncombined form of the same work.
-
- 6. Revised Versions of the GNU Lesser General Public License.
-
- The Free Software Foundation may publish revised and/or new versions
-of the GNU Lesser General Public License from time to time. Such new
-versions will be similar in spirit to the present version, but may
-differ in detail to address new problems or concerns.
-
- Each version is given a distinguishing version number. If the
-Library as you received it specifies that a certain numbered version
-of the GNU Lesser General Public License "or any later version"
-applies to it, you have the option of following the terms and
-conditions either of that published version or of any later version
-published by the Free Software Foundation. If the Library as you
-received it does not specify a version number of the GNU Lesser
-General Public License, you may choose any version of the GNU Lesser
-General Public License ever published by the Free Software Foundation.
-
- If the Library as you received it specifies that a proxy can decide
-whether future versions of the GNU Lesser General Public License shall
-apply, that proxy's public statement of acceptance of any version is
-permanent authorization for you to choose that version for the
-Library.
diff --git a/vendor/github.com/coreos/yaml/LICENSE.libyaml b/vendor/github.com/coreos/yaml/LICENSE.libyaml
deleted file mode 100644
index 8da58fbf6..000000000
--- a/vendor/github.com/coreos/yaml/LICENSE.libyaml
+++ /dev/null
@@ -1,31 +0,0 @@
-The following files were ported to Go from C files of libyaml, and thus
-are still covered by their original copyright and license:
-
- apic.go
- emitterc.go
- parserc.go
- readerc.go
- scannerc.go
- writerc.go
- yamlh.go
- yamlprivateh.go
-
-Copyright (c) 2006 Kirill Simonov
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal in
-the Software without restriction, including without limitation the rights to
-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
-of the Software, and to permit persons to whom the Software is furnished to do
-so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
diff --git a/vendor/github.com/coreos/yaml/README.md b/vendor/github.com/coreos/yaml/README.md
deleted file mode 100644
index 44270051f..000000000
--- a/vendor/github.com/coreos/yaml/README.md
+++ /dev/null
@@ -1,131 +0,0 @@
-Note: This is a fork of https://github.com/go-yaml/yaml. The following README
-doesn't necessarily apply to this fork.
-
-# YAML support for the Go language
-
-Introduction
-------------
-
-The yaml package enables Go programs to comfortably encode and decode YAML
-values. It was developed within [Canonical](https://www.canonical.com) as
-part of the [juju](https://juju.ubuntu.com) project, and is based on a
-pure Go port of the well-known [libyaml](http://pyyaml.org/wiki/LibYAML)
-C library to parse and generate YAML data quickly and reliably.
-
-Compatibility
--------------
-
-The yaml package supports most of YAML 1.1 and 1.2, including support for
-anchors, tags, map merging, etc. Multi-document unmarshalling is not yet
-implemented, and base-60 floats from YAML 1.1 are purposefully not
-supported since they're a poor design and are gone in YAML 1.2.
-
-Installation and usage
-----------------------
-
-The import path for the package is *gopkg.in/yaml.v1*.
-
-To install it, run:
-
- go get gopkg.in/yaml.v1
-
-API documentation
------------------
-
-If opened in a browser, the import path itself leads to the API documentation:
-
- * [https://gopkg.in/yaml.v1](https://gopkg.in/yaml.v1)
-
-API stability
--------------
-
-The package API for yaml v1 will remain stable as described in [gopkg.in](https://gopkg.in).
-
-
-License
--------
-
-The yaml package is licensed under the LGPL with an exception that allows it to be linked statically. Please see the LICENSE file for details.
-
-
-Example
--------
-
-```Go
-package main
-
-import (
- "fmt"
- "log"
-
- "gopkg.in/yaml.v1"
-)
-
-var data = `
-a: Easy!
-b:
- c: 2
- d: [3, 4]
-`
-
-type T struct {
- A string
- B struct{C int; D []int ",flow"}
-}
-
-func main() {
- t := T{}
-
- err := yaml.Unmarshal([]byte(data), &t)
- if err != nil {
- log.Fatalf("error: %v", err)
- }
- fmt.Printf("--- t:\n%v\n\n", t)
-
- d, err := yaml.Marshal(&t)
- if err != nil {
- log.Fatalf("error: %v", err)
- }
- fmt.Printf("--- t dump:\n%s\n\n", string(d))
-
- m := make(map[interface{}]interface{})
-
- err = yaml.Unmarshal([]byte(data), &m)
- if err != nil {
- log.Fatalf("error: %v", err)
- }
- fmt.Printf("--- m:\n%v\n\n", m)
-
- d, err = yaml.Marshal(&m)
- if err != nil {
- log.Fatalf("error: %v", err)
- }
- fmt.Printf("--- m dump:\n%s\n\n", string(d))
-}
-```
-
-This example will generate the following output:
-
-```
---- t:
-{Easy! {2 [3 4]}}
-
---- t dump:
-a: Easy!
-b:
- c: 2
- d: [3, 4]
-
-
---- m:
-map[a:Easy! b:map[c:2 d:[3 4]]]
-
---- m dump:
-a: Easy!
-b:
- c: 2
- d:
- - 3
- - 4
-```
-
diff --git a/vendor/github.com/coreos/yaml/apic.go b/vendor/github.com/coreos/yaml/apic.go
deleted file mode 100644
index 95ec014e8..000000000
--- a/vendor/github.com/coreos/yaml/apic.go
+++ /dev/null
@@ -1,742 +0,0 @@
-package yaml
-
-import (
- "io"
- "os"
-)
-
-func yaml_insert_token(parser *yaml_parser_t, pos int, token *yaml_token_t) {
- //fmt.Println("yaml_insert_token", "pos:", pos, "typ:", token.typ, "head:", parser.tokens_head, "len:", len(parser.tokens))
-
- // Check if we can move the queue at the beginning of the buffer.
- if parser.tokens_head > 0 && len(parser.tokens) == cap(parser.tokens) {
- if parser.tokens_head != len(parser.tokens) {
- copy(parser.tokens, parser.tokens[parser.tokens_head:])
- }
- parser.tokens = parser.tokens[:len(parser.tokens)-parser.tokens_head]
- parser.tokens_head = 0
- }
- parser.tokens = append(parser.tokens, *token)
- if pos < 0 {
- return
- }
- copy(parser.tokens[parser.tokens_head+pos+1:], parser.tokens[parser.tokens_head+pos:])
- parser.tokens[parser.tokens_head+pos] = *token
-}
-
-// Create a new parser object.
-func yaml_parser_initialize(parser *yaml_parser_t) bool {
- *parser = yaml_parser_t{
- raw_buffer: make([]byte, 0, input_raw_buffer_size),
- buffer: make([]byte, 0, input_buffer_size),
- }
- return true
-}
-
-// Destroy a parser object.
-func yaml_parser_delete(parser *yaml_parser_t) {
- *parser = yaml_parser_t{}
-}
-
-// String read handler.
-func yaml_string_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) {
- if parser.input_pos == len(parser.input) {
- return 0, io.EOF
- }
- n = copy(buffer, parser.input[parser.input_pos:])
- parser.input_pos += n
- return n, nil
-}
-
-// File read handler.
-func yaml_file_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) {
- return parser.input_file.Read(buffer)
-}
-
-// Set a string input.
-func yaml_parser_set_input_string(parser *yaml_parser_t, input []byte) {
- if parser.read_handler != nil {
- panic("must set the input source only once")
- }
- parser.read_handler = yaml_string_read_handler
- parser.input = input
- parser.input_pos = 0
-}
-
-// Set a file input.
-func yaml_parser_set_input_file(parser *yaml_parser_t, file *os.File) {
- if parser.read_handler != nil {
- panic("must set the input source only once")
- }
- parser.read_handler = yaml_file_read_handler
- parser.input_file = file
-}
-
-// Set the source encoding.
-func yaml_parser_set_encoding(parser *yaml_parser_t, encoding yaml_encoding_t) {
- if parser.encoding != yaml_ANY_ENCODING {
- panic("must set the encoding only once")
- }
- parser.encoding = encoding
-}
-
-// Create a new emitter object.
-func yaml_emitter_initialize(emitter *yaml_emitter_t) bool {
- *emitter = yaml_emitter_t{
- buffer: make([]byte, output_buffer_size),
- raw_buffer: make([]byte, 0, output_raw_buffer_size),
- states: make([]yaml_emitter_state_t, 0, initial_stack_size),
- events: make([]yaml_event_t, 0, initial_queue_size),
- }
- return true
-}
-
-// Destroy an emitter object.
-func yaml_emitter_delete(emitter *yaml_emitter_t) {
- *emitter = yaml_emitter_t{}
-}
-
-// String write handler.
-func yaml_string_write_handler(emitter *yaml_emitter_t, buffer []byte) error {
- *emitter.output_buffer = append(*emitter.output_buffer, buffer...)
- return nil
-}
-
-// File write handler.
-func yaml_file_write_handler(emitter *yaml_emitter_t, buffer []byte) error {
- _, err := emitter.output_file.Write(buffer)
- return err
-}
-
-// Set a string output.
-func yaml_emitter_set_output_string(emitter *yaml_emitter_t, output_buffer *[]byte) {
- if emitter.write_handler != nil {
- panic("must set the output target only once")
- }
- emitter.write_handler = yaml_string_write_handler
- emitter.output_buffer = output_buffer
-}
-
-// Set a file output.
-func yaml_emitter_set_output_file(emitter *yaml_emitter_t, file io.Writer) {
- if emitter.write_handler != nil {
- panic("must set the output target only once")
- }
- emitter.write_handler = yaml_file_write_handler
- emitter.output_file = file
-}
-
-// Set the output encoding.
-func yaml_emitter_set_encoding(emitter *yaml_emitter_t, encoding yaml_encoding_t) {
- if emitter.encoding != yaml_ANY_ENCODING {
- panic("must set the output encoding only once")
- }
- emitter.encoding = encoding
-}
-
-// Set the canonical output style.
-func yaml_emitter_set_canonical(emitter *yaml_emitter_t, canonical bool) {
- emitter.canonical = canonical
-}
-
-//// Set the indentation increment.
-func yaml_emitter_set_indent(emitter *yaml_emitter_t, indent int) {
- if indent < 2 || indent > 9 {
- indent = 2
- }
- emitter.best_indent = indent
-}
-
-// Set the preferred line width.
-func yaml_emitter_set_width(emitter *yaml_emitter_t, width int) {
- if width < 0 {
- width = -1
- }
- emitter.best_width = width
-}
-
-// Set if unescaped non-ASCII characters are allowed.
-func yaml_emitter_set_unicode(emitter *yaml_emitter_t, unicode bool) {
- emitter.unicode = unicode
-}
-
-// Set the preferred line break character.
-func yaml_emitter_set_break(emitter *yaml_emitter_t, line_break yaml_break_t) {
- emitter.line_break = line_break
-}
-
-///*
-// * Destroy a token object.
-// */
-//
-//YAML_DECLARE(void)
-//yaml_token_delete(yaml_token_t *token)
-//{
-// assert(token); // Non-NULL token object expected.
-//
-// switch (token.type)
-// {
-// case YAML_TAG_DIRECTIVE_TOKEN:
-// yaml_free(token.data.tag_directive.handle);
-// yaml_free(token.data.tag_directive.prefix);
-// break;
-//
-// case YAML_ALIAS_TOKEN:
-// yaml_free(token.data.alias.value);
-// break;
-//
-// case YAML_ANCHOR_TOKEN:
-// yaml_free(token.data.anchor.value);
-// break;
-//
-// case YAML_TAG_TOKEN:
-// yaml_free(token.data.tag.handle);
-// yaml_free(token.data.tag.suffix);
-// break;
-//
-// case YAML_SCALAR_TOKEN:
-// yaml_free(token.data.scalar.value);
-// break;
-//
-// default:
-// break;
-// }
-//
-// memset(token, 0, sizeof(yaml_token_t));
-//}
-//
-///*
-// * Check if a string is a valid UTF-8 sequence.
-// *
-// * Check 'reader.c' for more details on UTF-8 encoding.
-// */
-//
-//static int
-//yaml_check_utf8(yaml_char_t *start, size_t length)
-//{
-// yaml_char_t *end = start+length;
-// yaml_char_t *pointer = start;
-//
-// while (pointer < end) {
-// unsigned char octet;
-// unsigned int width;
-// unsigned int value;
-// size_t k;
-//
-// octet = pointer[0];
-// width = (octet & 0x80) == 0x00 ? 1 :
-// (octet & 0xE0) == 0xC0 ? 2 :
-// (octet & 0xF0) == 0xE0 ? 3 :
-// (octet & 0xF8) == 0xF0 ? 4 : 0;
-// value = (octet & 0x80) == 0x00 ? octet & 0x7F :
-// (octet & 0xE0) == 0xC0 ? octet & 0x1F :
-// (octet & 0xF0) == 0xE0 ? octet & 0x0F :
-// (octet & 0xF8) == 0xF0 ? octet & 0x07 : 0;
-// if (!width) return 0;
-// if (pointer+width > end) return 0;
-// for (k = 1; k < width; k ++) {
-// octet = pointer[k];
-// if ((octet & 0xC0) != 0x80) return 0;
-// value = (value << 6) + (octet & 0x3F);
-// }
-// if (!((width == 1) ||
-// (width == 2 && value >= 0x80) ||
-// (width == 3 && value >= 0x800) ||
-// (width == 4 && value >= 0x10000))) return 0;
-//
-// pointer += width;
-// }
-//
-// return 1;
-//}
-//
-
-// Create STREAM-START.
-func yaml_stream_start_event_initialize(event *yaml_event_t, encoding yaml_encoding_t) bool {
- *event = yaml_event_t{
- typ: yaml_STREAM_START_EVENT,
- encoding: encoding,
- }
- return true
-}
-
-// Create STREAM-END.
-func yaml_stream_end_event_initialize(event *yaml_event_t) bool {
- *event = yaml_event_t{
- typ: yaml_STREAM_END_EVENT,
- }
- return true
-}
-
-// Create DOCUMENT-START.
-func yaml_document_start_event_initialize(event *yaml_event_t, version_directive *yaml_version_directive_t,
- tag_directives []yaml_tag_directive_t, implicit bool) bool {
- *event = yaml_event_t{
- typ: yaml_DOCUMENT_START_EVENT,
- version_directive: version_directive,
- tag_directives: tag_directives,
- implicit: implicit,
- }
- return true
-}
-
-// Create DOCUMENT-END.
-func yaml_document_end_event_initialize(event *yaml_event_t, implicit bool) bool {
- *event = yaml_event_t{
- typ: yaml_DOCUMENT_END_EVENT,
- implicit: implicit,
- }
- return true
-}
-
-///*
-// * Create ALIAS.
-// */
-//
-//YAML_DECLARE(int)
-//yaml_alias_event_initialize(event *yaml_event_t, anchor *yaml_char_t)
-//{
-// mark yaml_mark_t = { 0, 0, 0 }
-// anchor_copy *yaml_char_t = NULL
-//
-// assert(event) // Non-NULL event object is expected.
-// assert(anchor) // Non-NULL anchor is expected.
-//
-// if (!yaml_check_utf8(anchor, strlen((char *)anchor))) return 0
-//
-// anchor_copy = yaml_strdup(anchor)
-// if (!anchor_copy)
-// return 0
-//
-// ALIAS_EVENT_INIT(*event, anchor_copy, mark, mark)
-//
-// return 1
-//}
-
-// Create SCALAR.
-func yaml_scalar_event_initialize(event *yaml_event_t, anchor, tag, value []byte, plain_implicit, quoted_implicit bool, style yaml_scalar_style_t) bool {
- *event = yaml_event_t{
- typ: yaml_SCALAR_EVENT,
- anchor: anchor,
- tag: tag,
- value: value,
- implicit: plain_implicit,
- quoted_implicit: quoted_implicit,
- style: yaml_style_t(style),
- }
- return true
-}
-
-// Create SEQUENCE-START.
-func yaml_sequence_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_sequence_style_t) bool {
- *event = yaml_event_t{
- typ: yaml_SEQUENCE_START_EVENT,
- anchor: anchor,
- tag: tag,
- implicit: implicit,
- style: yaml_style_t(style),
- }
- return true
-}
-
-// Create SEQUENCE-END.
-func yaml_sequence_end_event_initialize(event *yaml_event_t) bool {
- *event = yaml_event_t{
- typ: yaml_SEQUENCE_END_EVENT,
- }
- return true
-}
-
-// Create MAPPING-START.
-func yaml_mapping_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_mapping_style_t) bool {
- *event = yaml_event_t{
- typ: yaml_MAPPING_START_EVENT,
- anchor: anchor,
- tag: tag,
- implicit: implicit,
- style: yaml_style_t(style),
- }
- return true
-}
-
-// Create MAPPING-END.
-func yaml_mapping_end_event_initialize(event *yaml_event_t) bool {
- *event = yaml_event_t{
- typ: yaml_MAPPING_END_EVENT,
- }
- return true
-}
-
-// Destroy an event object.
-func yaml_event_delete(event *yaml_event_t) {
- *event = yaml_event_t{}
-}
-
-///*
-// * Create a document object.
-// */
-//
-//YAML_DECLARE(int)
-//yaml_document_initialize(document *yaml_document_t,
-// version_directive *yaml_version_directive_t,
-// tag_directives_start *yaml_tag_directive_t,
-// tag_directives_end *yaml_tag_directive_t,
-// start_implicit int, end_implicit int)
-//{
-// struct {
-// error yaml_error_type_t
-// } context
-// struct {
-// start *yaml_node_t
-// end *yaml_node_t
-// top *yaml_node_t
-// } nodes = { NULL, NULL, NULL }
-// version_directive_copy *yaml_version_directive_t = NULL
-// struct {
-// start *yaml_tag_directive_t
-// end *yaml_tag_directive_t
-// top *yaml_tag_directive_t
-// } tag_directives_copy = { NULL, NULL, NULL }
-// value yaml_tag_directive_t = { NULL, NULL }
-// mark yaml_mark_t = { 0, 0, 0 }
-//
-// assert(document) // Non-NULL document object is expected.
-// assert((tag_directives_start && tag_directives_end) ||
-// (tag_directives_start == tag_directives_end))
-// // Valid tag directives are expected.
-//
-// if (!STACK_INIT(&context, nodes, INITIAL_STACK_SIZE)) goto error
-//
-// if (version_directive) {
-// version_directive_copy = yaml_malloc(sizeof(yaml_version_directive_t))
-// if (!version_directive_copy) goto error
-// version_directive_copy.major = version_directive.major
-// version_directive_copy.minor = version_directive.minor
-// }
-//
-// if (tag_directives_start != tag_directives_end) {
-// tag_directive *yaml_tag_directive_t
-// if (!STACK_INIT(&context, tag_directives_copy, INITIAL_STACK_SIZE))
-// goto error
-// for (tag_directive = tag_directives_start
-// tag_directive != tag_directives_end; tag_directive ++) {
-// assert(tag_directive.handle)
-// assert(tag_directive.prefix)
-// if (!yaml_check_utf8(tag_directive.handle,
-// strlen((char *)tag_directive.handle)))
-// goto error
-// if (!yaml_check_utf8(tag_directive.prefix,
-// strlen((char *)tag_directive.prefix)))
-// goto error
-// value.handle = yaml_strdup(tag_directive.handle)
-// value.prefix = yaml_strdup(tag_directive.prefix)
-// if (!value.handle || !value.prefix) goto error
-// if (!PUSH(&context, tag_directives_copy, value))
-// goto error
-// value.handle = NULL
-// value.prefix = NULL
-// }
-// }
-//
-// DOCUMENT_INIT(*document, nodes.start, nodes.end, version_directive_copy,
-// tag_directives_copy.start, tag_directives_copy.top,
-// start_implicit, end_implicit, mark, mark)
-//
-// return 1
-//
-//error:
-// STACK_DEL(&context, nodes)
-// yaml_free(version_directive_copy)
-// while (!STACK_EMPTY(&context, tag_directives_copy)) {
-// value yaml_tag_directive_t = POP(&context, tag_directives_copy)
-// yaml_free(value.handle)
-// yaml_free(value.prefix)
-// }
-// STACK_DEL(&context, tag_directives_copy)
-// yaml_free(value.handle)
-// yaml_free(value.prefix)
-//
-// return 0
-//}
-//
-///*
-// * Destroy a document object.
-// */
-//
-//YAML_DECLARE(void)
-//yaml_document_delete(document *yaml_document_t)
-//{
-// struct {
-// error yaml_error_type_t
-// } context
-// tag_directive *yaml_tag_directive_t
-//
-// context.error = YAML_NO_ERROR // Eliminate a compliler warning.
-//
-// assert(document) // Non-NULL document object is expected.
-//
-// while (!STACK_EMPTY(&context, document.nodes)) {
-// node yaml_node_t = POP(&context, document.nodes)
-// yaml_free(node.tag)
-// switch (node.type) {
-// case YAML_SCALAR_NODE:
-// yaml_free(node.data.scalar.value)
-// break
-// case YAML_SEQUENCE_NODE:
-// STACK_DEL(&context, node.data.sequence.items)
-// break
-// case YAML_MAPPING_NODE:
-// STACK_DEL(&context, node.data.mapping.pairs)
-// break
-// default:
-// assert(0) // Should not happen.
-// }
-// }
-// STACK_DEL(&context, document.nodes)
-//
-// yaml_free(document.version_directive)
-// for (tag_directive = document.tag_directives.start
-// tag_directive != document.tag_directives.end
-// tag_directive++) {
-// yaml_free(tag_directive.handle)
-// yaml_free(tag_directive.prefix)
-// }
-// yaml_free(document.tag_directives.start)
-//
-// memset(document, 0, sizeof(yaml_document_t))
-//}
-//
-///**
-// * Get a document node.
-// */
-//
-//YAML_DECLARE(yaml_node_t *)
-//yaml_document_get_node(document *yaml_document_t, index int)
-//{
-// assert(document) // Non-NULL document object is expected.
-//
-// if (index > 0 && document.nodes.start + index <= document.nodes.top) {
-// return document.nodes.start + index - 1
-// }
-// return NULL
-//}
-//
-///**
-// * Get the root object.
-// */
-//
-//YAML_DECLARE(yaml_node_t *)
-//yaml_document_get_root_node(document *yaml_document_t)
-//{
-// assert(document) // Non-NULL document object is expected.
-//
-// if (document.nodes.top != document.nodes.start) {
-// return document.nodes.start
-// }
-// return NULL
-//}
-//
-///*
-// * Add a scalar node to a document.
-// */
-//
-//YAML_DECLARE(int)
-//yaml_document_add_scalar(document *yaml_document_t,
-// tag *yaml_char_t, value *yaml_char_t, length int,
-// style yaml_scalar_style_t)
-//{
-// struct {
-// error yaml_error_type_t
-// } context
-// mark yaml_mark_t = { 0, 0, 0 }
-// tag_copy *yaml_char_t = NULL
-// value_copy *yaml_char_t = NULL
-// node yaml_node_t
-//
-// assert(document) // Non-NULL document object is expected.
-// assert(value) // Non-NULL value is expected.
-//
-// if (!tag) {
-// tag = (yaml_char_t *)YAML_DEFAULT_SCALAR_TAG
-// }
-//
-// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error
-// tag_copy = yaml_strdup(tag)
-// if (!tag_copy) goto error
-//
-// if (length < 0) {
-// length = strlen((char *)value)
-// }
-//
-// if (!yaml_check_utf8(value, length)) goto error
-// value_copy = yaml_malloc(length+1)
-// if (!value_copy) goto error
-// memcpy(value_copy, value, length)
-// value_copy[length] = '\0'
-//
-// SCALAR_NODE_INIT(node, tag_copy, value_copy, length, style, mark, mark)
-// if (!PUSH(&context, document.nodes, node)) goto error
-//
-// return document.nodes.top - document.nodes.start
-//
-//error:
-// yaml_free(tag_copy)
-// yaml_free(value_copy)
-//
-// return 0
-//}
-//
-///*
-// * Add a sequence node to a document.
-// */
-//
-//YAML_DECLARE(int)
-//yaml_document_add_sequence(document *yaml_document_t,
-// tag *yaml_char_t, style yaml_sequence_style_t)
-//{
-// struct {
-// error yaml_error_type_t
-// } context
-// mark yaml_mark_t = { 0, 0, 0 }
-// tag_copy *yaml_char_t = NULL
-// struct {
-// start *yaml_node_item_t
-// end *yaml_node_item_t
-// top *yaml_node_item_t
-// } items = { NULL, NULL, NULL }
-// node yaml_node_t
-//
-// assert(document) // Non-NULL document object is expected.
-//
-// if (!tag) {
-// tag = (yaml_char_t *)YAML_DEFAULT_SEQUENCE_TAG
-// }
-//
-// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error
-// tag_copy = yaml_strdup(tag)
-// if (!tag_copy) goto error
-//
-// if (!STACK_INIT(&context, items, INITIAL_STACK_SIZE)) goto error
-//
-// SEQUENCE_NODE_INIT(node, tag_copy, items.start, items.end,
-// style, mark, mark)
-// if (!PUSH(&context, document.nodes, node)) goto error
-//
-// return document.nodes.top - document.nodes.start
-//
-//error:
-// STACK_DEL(&context, items)
-// yaml_free(tag_copy)
-//
-// return 0
-//}
-//
-///*
-// * Add a mapping node to a document.
-// */
-//
-//YAML_DECLARE(int)
-//yaml_document_add_mapping(document *yaml_document_t,
-// tag *yaml_char_t, style yaml_mapping_style_t)
-//{
-// struct {
-// error yaml_error_type_t
-// } context
-// mark yaml_mark_t = { 0, 0, 0 }
-// tag_copy *yaml_char_t = NULL
-// struct {
-// start *yaml_node_pair_t
-// end *yaml_node_pair_t
-// top *yaml_node_pair_t
-// } pairs = { NULL, NULL, NULL }
-// node yaml_node_t
-//
-// assert(document) // Non-NULL document object is expected.
-//
-// if (!tag) {
-// tag = (yaml_char_t *)YAML_DEFAULT_MAPPING_TAG
-// }
-//
-// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error
-// tag_copy = yaml_strdup(tag)
-// if (!tag_copy) goto error
-//
-// if (!STACK_INIT(&context, pairs, INITIAL_STACK_SIZE)) goto error
-//
-// MAPPING_NODE_INIT(node, tag_copy, pairs.start, pairs.end,
-// style, mark, mark)
-// if (!PUSH(&context, document.nodes, node)) goto error
-//
-// return document.nodes.top - document.nodes.start
-//
-//error:
-// STACK_DEL(&context, pairs)
-// yaml_free(tag_copy)
-//
-// return 0
-//}
-//
-///*
-// * Append an item to a sequence node.
-// */
-//
-//YAML_DECLARE(int)
-//yaml_document_append_sequence_item(document *yaml_document_t,
-// sequence int, item int)
-//{
-// struct {
-// error yaml_error_type_t
-// } context
-//
-// assert(document) // Non-NULL document is required.
-// assert(sequence > 0
-// && document.nodes.start + sequence <= document.nodes.top)
-// // Valid sequence id is required.
-// assert(document.nodes.start[sequence-1].type == YAML_SEQUENCE_NODE)
-// // A sequence node is required.
-// assert(item > 0 && document.nodes.start + item <= document.nodes.top)
-// // Valid item id is required.
-//
-// if (!PUSH(&context,
-// document.nodes.start[sequence-1].data.sequence.items, item))
-// return 0
-//
-// return 1
-//}
-//
-///*
-// * Append a pair of a key and a value to a mapping node.
-// */
-//
-//YAML_DECLARE(int)
-//yaml_document_append_mapping_pair(document *yaml_document_t,
-// mapping int, key int, value int)
-//{
-// struct {
-// error yaml_error_type_t
-// } context
-//
-// pair yaml_node_pair_t
-//
-// assert(document) // Non-NULL document is required.
-// assert(mapping > 0
-// && document.nodes.start + mapping <= document.nodes.top)
-// // Valid mapping id is required.
-// assert(document.nodes.start[mapping-1].type == YAML_MAPPING_NODE)
-// // A mapping node is required.
-// assert(key > 0 && document.nodes.start + key <= document.nodes.top)
-// // Valid key id is required.
-// assert(value > 0 && document.nodes.start + value <= document.nodes.top)
-// // Valid value id is required.
-//
-// pair.key = key
-// pair.value = value
-//
-// if (!PUSH(&context,
-// document.nodes.start[mapping-1].data.mapping.pairs, pair))
-// return 0
-//
-// return 1
-//}
-//
-//
diff --git a/vendor/github.com/coreos/yaml/decode.go b/vendor/github.com/coreos/yaml/decode.go
deleted file mode 100644
index e219d4bd6..000000000
--- a/vendor/github.com/coreos/yaml/decode.go
+++ /dev/null
@@ -1,571 +0,0 @@
-package yaml
-
-import (
- "encoding/base64"
- "fmt"
- "reflect"
- "strconv"
- "time"
-)
-
-const (
- documentNode = 1 << iota
- mappingNode
- sequenceNode
- scalarNode
- aliasNode
-)
-
-type node struct {
- kind int
- line, column int
- tag string
- value string
- implicit bool
- children []*node
- anchors map[string]*node
-}
-
-// ----------------------------------------------------------------------------
-// Parser, produces a node tree out of a libyaml event stream.
-
-type parser struct {
- parser yaml_parser_t
- event yaml_event_t
- doc *node
- transform transformString
-}
-
-func newParser(b []byte, t transformString) *parser {
- p := parser{transform: t}
-
- if !yaml_parser_initialize(&p.parser) {
- panic("Failed to initialize YAML emitter")
- }
-
- if len(b) == 0 {
- b = []byte{'\n'}
- }
-
- yaml_parser_set_input_string(&p.parser, b)
-
- p.skip()
- if p.event.typ != yaml_STREAM_START_EVENT {
- panic("Expected stream start event, got " + strconv.Itoa(int(p.event.typ)))
- }
- p.skip()
- return &p
-}
-
-func (p *parser) destroy() {
- if p.event.typ != yaml_NO_EVENT {
- yaml_event_delete(&p.event)
- }
- yaml_parser_delete(&p.parser)
-}
-
-func (p *parser) skip() {
- if p.event.typ != yaml_NO_EVENT {
- if p.event.typ == yaml_STREAM_END_EVENT {
- fail("Attempted to go past the end of stream. Corrupted value?")
- }
- yaml_event_delete(&p.event)
- }
- if !yaml_parser_parse(&p.parser, &p.event) {
- p.fail()
- }
-}
-
-func (p *parser) fail() {
- var where string
- var line int
- if p.parser.problem_mark.line != 0 {
- line = p.parser.problem_mark.line
- } else if p.parser.context_mark.line != 0 {
- line = p.parser.context_mark.line
- }
- if line != 0 {
- where = "line " + strconv.Itoa(line) + ": "
- }
- var msg string
- if len(p.parser.problem) > 0 {
- msg = p.parser.problem
- } else {
- msg = "Unknown problem parsing YAML content"
- }
- fail(where + msg)
-}
-
-func (p *parser) anchor(n *node, anchor []byte) {
- if anchor != nil {
- p.doc.anchors[string(anchor)] = n
- }
-}
-
-func (p *parser) parse() *node {
- switch p.event.typ {
- case yaml_SCALAR_EVENT:
- return p.scalar()
- case yaml_ALIAS_EVENT:
- return p.alias()
- case yaml_MAPPING_START_EVENT:
- return p.mapping()
- case yaml_SEQUENCE_START_EVENT:
- return p.sequence()
- case yaml_DOCUMENT_START_EVENT:
- return p.document()
- case yaml_STREAM_END_EVENT:
- // Happens when attempting to decode an empty buffer.
- return nil
- default:
- panic("Attempted to parse unknown event: " + strconv.Itoa(int(p.event.typ)))
- }
- panic("unreachable")
-}
-
-func (p *parser) node(kind int) *node {
- return &node{
- kind: kind,
- line: p.event.start_mark.line,
- column: p.event.start_mark.column,
- }
-}
-
-func (p *parser) document() *node {
- n := p.node(documentNode)
- n.anchors = make(map[string]*node)
- p.doc = n
- p.skip()
- n.children = append(n.children, p.parse())
- if p.event.typ != yaml_DOCUMENT_END_EVENT {
- panic("Expected end of document event but got " + strconv.Itoa(int(p.event.typ)))
- }
- p.skip()
- return n
-}
-
-func (p *parser) alias() *node {
- n := p.node(aliasNode)
- n.value = string(p.event.anchor)
- p.skip()
- return n
-}
-
-func (p *parser) scalar() *node {
- n := p.node(scalarNode)
- n.value = string(p.event.value)
- n.tag = string(p.event.tag)
- n.implicit = p.event.implicit
- p.anchor(n, p.event.anchor)
- p.skip()
- return n
-}
-
-func (p *parser) sequence() *node {
- n := p.node(sequenceNode)
- p.anchor(n, p.event.anchor)
- p.skip()
- for p.event.typ != yaml_SEQUENCE_END_EVENT {
- n.children = append(n.children, p.parse())
- }
- p.skip()
- return n
-}
-
-func (p *parser) mapping() *node {
- n := p.node(mappingNode)
- p.anchor(n, p.event.anchor)
- p.skip()
- for p.event.typ != yaml_MAPPING_END_EVENT {
- key := p.parse()
- key.value = p.transform(key.value)
- value := p.parse()
- n.children = append(n.children, key, value)
- }
- p.skip()
- return n
-}
-
-// ----------------------------------------------------------------------------
-// Decoder, unmarshals a node into a provided value.
-
-type decoder struct {
- doc *node
- aliases map[string]bool
-}
-
-func newDecoder() *decoder {
- d := &decoder{}
- d.aliases = make(map[string]bool)
- return d
-}
-
-// d.setter deals with setters and pointer dereferencing and initialization.
-//
-// It's a slightly convoluted case to handle properly:
-//
-// - nil pointers should be initialized, unless being set to nil
-// - we don't know at this point yet what's the value to SetYAML() with.
-// - we can't separate pointer deref/init and setter checking, because
-// a setter may be found while going down a pointer chain.
-//
-// Thus, here is how it takes care of it:
-//
-// - out is provided as a pointer, so that it can be replaced.
-// - when looking at a non-setter ptr, *out=ptr.Elem(), unless tag=!!null
-// - when a setter is found, *out=interface{}, and a set() function is
-// returned to call SetYAML() with the value of *out once it's defined.
-//
-func (d *decoder) setter(tag string, out *reflect.Value, good *bool) (set func()) {
- if (*out).Kind() != reflect.Ptr && (*out).CanAddr() {
- setter, _ := (*out).Addr().Interface().(Setter)
- if setter != nil {
- var arg interface{}
- *out = reflect.ValueOf(&arg).Elem()
- return func() {
- *good = setter.SetYAML(shortTag(tag), arg)
- }
- }
- }
- again := true
- for again {
- again = false
- setter, _ := (*out).Interface().(Setter)
- if tag != yaml_NULL_TAG || setter != nil {
- if pv := (*out); pv.Kind() == reflect.Ptr {
- if pv.IsNil() {
- *out = reflect.New(pv.Type().Elem()).Elem()
- pv.Set((*out).Addr())
- } else {
- *out = pv.Elem()
- }
- setter, _ = pv.Interface().(Setter)
- again = true
- }
- }
- if setter != nil {
- var arg interface{}
- *out = reflect.ValueOf(&arg).Elem()
- return func() {
- *good = setter.SetYAML(shortTag(tag), arg)
- }
- }
- }
- return nil
-}
-
-func (d *decoder) unmarshal(n *node, out reflect.Value) (good bool) {
- switch n.kind {
- case documentNode:
- good = d.document(n, out)
- case scalarNode:
- good = d.scalar(n, out)
- case aliasNode:
- good = d.alias(n, out)
- case mappingNode:
- good = d.mapping(n, out)
- case sequenceNode:
- good = d.sequence(n, out)
- default:
- panic("Internal error: unknown node kind: " + strconv.Itoa(n.kind))
- }
- return
-}
-
-func (d *decoder) document(n *node, out reflect.Value) (good bool) {
- if len(n.children) == 1 {
- d.doc = n
- d.unmarshal(n.children[0], out)
- return true
- }
- return false
-}
-
-func (d *decoder) alias(n *node, out reflect.Value) (good bool) {
- an, ok := d.doc.anchors[n.value]
- if !ok {
- fail("Unknown anchor '" + n.value + "' referenced")
- }
- if d.aliases[n.value] {
- fail("Anchor '" + n.value + "' value contains itself")
- }
- d.aliases[n.value] = true
- good = d.unmarshal(an, out)
- delete(d.aliases, n.value)
- return good
-}
-
-var zeroValue reflect.Value
-
-func resetMap(out reflect.Value) {
- for _, k := range out.MapKeys() {
- out.SetMapIndex(k, zeroValue)
- }
-}
-
-var durationType = reflect.TypeOf(time.Duration(0))
-
-func (d *decoder) scalar(n *node, out reflect.Value) (good bool) {
- var tag string
- var resolved interface{}
- if n.tag == "" && !n.implicit {
- tag = yaml_STR_TAG
- resolved = n.value
- } else {
- tag, resolved = resolve(n.tag, n.value)
- if tag == yaml_BINARY_TAG {
- data, err := base64.StdEncoding.DecodeString(resolved.(string))
- if err != nil {
- fail("!!binary value contains invalid base64 data")
- }
- resolved = string(data)
- }
- }
- if set := d.setter(tag, &out, &good); set != nil {
- defer set()
- }
- if resolved == nil {
- if out.Kind() == reflect.Map && !out.CanAddr() {
- resetMap(out)
- } else {
- out.Set(reflect.Zero(out.Type()))
- }
- good = true
- return
- }
- switch out.Kind() {
- case reflect.String:
- if tag == yaml_BINARY_TAG {
- out.SetString(resolved.(string))
- good = true
- } else if resolved != nil {
- out.SetString(n.value)
- good = true
- }
- case reflect.Interface:
- if resolved == nil {
- out.Set(reflect.Zero(out.Type()))
- } else {
- out.Set(reflect.ValueOf(resolved))
- }
- good = true
- case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
- switch resolved := resolved.(type) {
- case int:
- if !out.OverflowInt(int64(resolved)) {
- out.SetInt(int64(resolved))
- good = true
- }
- case int64:
- if !out.OverflowInt(resolved) {
- out.SetInt(resolved)
- good = true
- }
- case float64:
- if resolved < 1<<63-1 && !out.OverflowInt(int64(resolved)) {
- out.SetInt(int64(resolved))
- good = true
- }
- case string:
- if out.Type() == durationType {
- d, err := time.ParseDuration(resolved)
- if err == nil {
- out.SetInt(int64(d))
- good = true
- }
- }
- }
- case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
- switch resolved := resolved.(type) {
- case int:
- if resolved >= 0 {
- out.SetUint(uint64(resolved))
- good = true
- }
- case int64:
- if resolved >= 0 {
- out.SetUint(uint64(resolved))
- good = true
- }
- case float64:
- if resolved < 1<<64-1 && !out.OverflowUint(uint64(resolved)) {
- out.SetUint(uint64(resolved))
- good = true
- }
- }
- case reflect.Bool:
- switch resolved := resolved.(type) {
- case bool:
- out.SetBool(resolved)
- good = true
- }
- case reflect.Float32, reflect.Float64:
- switch resolved := resolved.(type) {
- case int:
- out.SetFloat(float64(resolved))
- good = true
- case int64:
- out.SetFloat(float64(resolved))
- good = true
- case float64:
- out.SetFloat(resolved)
- good = true
- }
- case reflect.Ptr:
- if out.Type().Elem() == reflect.TypeOf(resolved) {
- elem := reflect.New(out.Type().Elem())
- elem.Elem().Set(reflect.ValueOf(resolved))
- out.Set(elem)
- good = true
- }
- }
- return good
-}
-
-func settableValueOf(i interface{}) reflect.Value {
- v := reflect.ValueOf(i)
- sv := reflect.New(v.Type()).Elem()
- sv.Set(v)
- return sv
-}
-
-func (d *decoder) sequence(n *node, out reflect.Value) (good bool) {
- if set := d.setter(yaml_SEQ_TAG, &out, &good); set != nil {
- defer set()
- }
- var iface reflect.Value
- if out.Kind() == reflect.Interface {
- // No type hints. Will have to use a generic sequence.
- iface = out
- out = settableValueOf(make([]interface{}, 0))
- }
-
- if out.Kind() != reflect.Slice {
- return false
- }
- et := out.Type().Elem()
-
- l := len(n.children)
- for i := 0; i < l; i++ {
- e := reflect.New(et).Elem()
- if ok := d.unmarshal(n.children[i], e); ok {
- out.Set(reflect.Append(out, e))
- }
- }
- if iface.IsValid() {
- iface.Set(out)
- }
- return true
-}
-
-func (d *decoder) mapping(n *node, out reflect.Value) (good bool) {
- if set := d.setter(yaml_MAP_TAG, &out, &good); set != nil {
- defer set()
- }
- if out.Kind() == reflect.Struct {
- return d.mappingStruct(n, out)
- }
-
- if out.Kind() == reflect.Interface {
- // No type hints. Will have to use a generic map.
- iface := out
- out = settableValueOf(make(map[interface{}]interface{}))
- iface.Set(out)
- }
-
- if out.Kind() != reflect.Map {
- return false
- }
- outt := out.Type()
- kt := outt.Key()
- et := outt.Elem()
-
- if out.IsNil() {
- out.Set(reflect.MakeMap(outt))
- }
- l := len(n.children)
- for i := 0; i < l; i += 2 {
- if isMerge(n.children[i]) {
- d.merge(n.children[i+1], out)
- continue
- }
- k := reflect.New(kt).Elem()
- if d.unmarshal(n.children[i], k) {
- kkind := k.Kind()
- if kkind == reflect.Interface {
- kkind = k.Elem().Kind()
- }
- if kkind == reflect.Map || kkind == reflect.Slice {
- fail(fmt.Sprintf("invalid map key: %#v", k.Interface()))
- }
- e := reflect.New(et).Elem()
- if d.unmarshal(n.children[i+1], e) {
- out.SetMapIndex(k, e)
- }
- }
- }
- return true
-}
-
-func (d *decoder) mappingStruct(n *node, out reflect.Value) (good bool) {
- sinfo, err := getStructInfo(out.Type())
- if err != nil {
- panic(err)
- }
- name := settableValueOf("")
- l := len(n.children)
- for i := 0; i < l; i += 2 {
- ni := n.children[i]
- if isMerge(ni) {
- d.merge(n.children[i+1], out)
- continue
- }
- if !d.unmarshal(ni, name) {
- continue
- }
- if info, ok := sinfo.FieldsMap[name.String()]; ok {
- var field reflect.Value
- if info.Inline == nil {
- field = out.Field(info.Num)
- } else {
- field = out.FieldByIndex(info.Inline)
- }
- d.unmarshal(n.children[i+1], field)
- }
- }
- return true
-}
-
-func (d *decoder) merge(n *node, out reflect.Value) {
- const wantMap = "map merge requires map or sequence of maps as the value"
- switch n.kind {
- case mappingNode:
- d.unmarshal(n, out)
- case aliasNode:
- an, ok := d.doc.anchors[n.value]
- if ok && an.kind != mappingNode {
- fail(wantMap)
- }
- d.unmarshal(n, out)
- case sequenceNode:
- // Step backwards as earlier nodes take precedence.
- for i := len(n.children) - 1; i >= 0; i-- {
- ni := n.children[i]
- if ni.kind == aliasNode {
- an, ok := d.doc.anchors[ni.value]
- if ok && an.kind != mappingNode {
- fail(wantMap)
- }
- } else if ni.kind != mappingNode {
- fail(wantMap)
- }
- d.unmarshal(ni, out)
- }
- default:
- fail(wantMap)
- }
-}
-
-func isMerge(n *node) bool {
- return n.kind == scalarNode && n.value == "<<" && (n.implicit == true || n.tag == yaml_MERGE_TAG)
-}
diff --git a/vendor/github.com/coreos/yaml/emitterc.go b/vendor/github.com/coreos/yaml/emitterc.go
deleted file mode 100644
index 9b3dc4a43..000000000
--- a/vendor/github.com/coreos/yaml/emitterc.go
+++ /dev/null
@@ -1,1685 +0,0 @@
-package yaml
-
-import (
- "bytes"
-)
-
-// Flush the buffer if needed.
-func flush(emitter *yaml_emitter_t) bool {
- if emitter.buffer_pos+5 >= len(emitter.buffer) {
- return yaml_emitter_flush(emitter)
- }
- return true
-}
-
-// Put a character to the output buffer.
-func put(emitter *yaml_emitter_t, value byte) bool {
- if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) {
- return false
- }
- emitter.buffer[emitter.buffer_pos] = value
- emitter.buffer_pos++
- emitter.column++
- return true
-}
-
-// Put a line break to the output buffer.
-func put_break(emitter *yaml_emitter_t) bool {
- if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) {
- return false
- }
- switch emitter.line_break {
- case yaml_CR_BREAK:
- emitter.buffer[emitter.buffer_pos] = '\r'
- emitter.buffer_pos += 1
- case yaml_LN_BREAK:
- emitter.buffer[emitter.buffer_pos] = '\n'
- emitter.buffer_pos += 1
- case yaml_CRLN_BREAK:
- emitter.buffer[emitter.buffer_pos+0] = '\r'
- emitter.buffer[emitter.buffer_pos+1] = '\n'
- emitter.buffer_pos += 2
- default:
- panic("unknown line break setting")
- }
- emitter.column = 0
- emitter.line++
- return true
-}
-
-// Copy a character from a string into buffer.
-func write(emitter *yaml_emitter_t, s []byte, i *int) bool {
- if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) {
- return false
- }
- p := emitter.buffer_pos
- w := width(s[*i])
- switch w {
- case 4:
- emitter.buffer[p+3] = s[*i+3]
- fallthrough
- case 3:
- emitter.buffer[p+2] = s[*i+2]
- fallthrough
- case 2:
- emitter.buffer[p+1] = s[*i+1]
- fallthrough
- case 1:
- emitter.buffer[p+0] = s[*i+0]
- default:
- panic("unknown character width")
- }
- emitter.column++
- emitter.buffer_pos += w
- *i += w
- return true
-}
-
-// Write a whole string into buffer.
-func write_all(emitter *yaml_emitter_t, s []byte) bool {
- for i := 0; i < len(s); {
- if !write(emitter, s, &i) {
- return false
- }
- }
- return true
-}
-
-// Copy a line break character from a string into buffer.
-func write_break(emitter *yaml_emitter_t, s []byte, i *int) bool {
- if s[*i] == '\n' {
- if !put_break(emitter) {
- return false
- }
- *i++
- } else {
- if !write(emitter, s, i) {
- return false
- }
- emitter.column = 0
- emitter.line++
- }
- return true
-}
-
-// Set an emitter error and return false.
-func yaml_emitter_set_emitter_error(emitter *yaml_emitter_t, problem string) bool {
- emitter.error = yaml_EMITTER_ERROR
- emitter.problem = problem
- return false
-}
-
-// Emit an event.
-func yaml_emitter_emit(emitter *yaml_emitter_t, event *yaml_event_t) bool {
- emitter.events = append(emitter.events, *event)
- for !yaml_emitter_need_more_events(emitter) {
- event := &emitter.events[emitter.events_head]
- if !yaml_emitter_analyze_event(emitter, event) {
- return false
- }
- if !yaml_emitter_state_machine(emitter, event) {
- return false
- }
- yaml_event_delete(event)
- emitter.events_head++
- }
- return true
-}
-
-// Check if we need to accumulate more events before emitting.
-//
-// We accumulate extra
-// - 1 event for DOCUMENT-START
-// - 2 events for SEQUENCE-START
-// - 3 events for MAPPING-START
-//
-func yaml_emitter_need_more_events(emitter *yaml_emitter_t) bool {
- if emitter.events_head == len(emitter.events) {
- return true
- }
- var accumulate int
- switch emitter.events[emitter.events_head].typ {
- case yaml_DOCUMENT_START_EVENT:
- accumulate = 1
- break
- case yaml_SEQUENCE_START_EVENT:
- accumulate = 2
- break
- case yaml_MAPPING_START_EVENT:
- accumulate = 3
- break
- default:
- return false
- }
- if len(emitter.events)-emitter.events_head > accumulate {
- return false
- }
- var level int
- for i := emitter.events_head; i < len(emitter.events); i++ {
- switch emitter.events[i].typ {
- case yaml_STREAM_START_EVENT, yaml_DOCUMENT_START_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT:
- level++
- case yaml_STREAM_END_EVENT, yaml_DOCUMENT_END_EVENT, yaml_SEQUENCE_END_EVENT, yaml_MAPPING_END_EVENT:
- level--
- }
- if level == 0 {
- return false
- }
- }
- return true
-}
-
-// Append a directive to the directives stack.
-func yaml_emitter_append_tag_directive(emitter *yaml_emitter_t, value *yaml_tag_directive_t, allow_duplicates bool) bool {
- for i := 0; i < len(emitter.tag_directives); i++ {
- if bytes.Equal(value.handle, emitter.tag_directives[i].handle) {
- if allow_duplicates {
- return true
- }
- return yaml_emitter_set_emitter_error(emitter, "duplicate %TAG directive")
- }
- }
-
- // [Go] Do we actually need to copy this given garbage collection
- // and the lack of deallocating destructors?
- tag_copy := yaml_tag_directive_t{
- handle: make([]byte, len(value.handle)),
- prefix: make([]byte, len(value.prefix)),
- }
- copy(tag_copy.handle, value.handle)
- copy(tag_copy.prefix, value.prefix)
- emitter.tag_directives = append(emitter.tag_directives, tag_copy)
- return true
-}
-
-// Increase the indentation level.
-func yaml_emitter_increase_indent(emitter *yaml_emitter_t, flow, indentless bool) bool {
- emitter.indents = append(emitter.indents, emitter.indent)
- if emitter.indent < 0 {
- if flow {
- emitter.indent = emitter.best_indent
- } else {
- emitter.indent = 0
- }
- } else if !indentless {
- emitter.indent += emitter.best_indent
- }
- return true
-}
-
-// State dispatcher.
-func yaml_emitter_state_machine(emitter *yaml_emitter_t, event *yaml_event_t) bool {
- switch emitter.state {
- default:
- case yaml_EMIT_STREAM_START_STATE:
- return yaml_emitter_emit_stream_start(emitter, event)
-
- case yaml_EMIT_FIRST_DOCUMENT_START_STATE:
- return yaml_emitter_emit_document_start(emitter, event, true)
-
- case yaml_EMIT_DOCUMENT_START_STATE:
- return yaml_emitter_emit_document_start(emitter, event, false)
-
- case yaml_EMIT_DOCUMENT_CONTENT_STATE:
- return yaml_emitter_emit_document_content(emitter, event)
-
- case yaml_EMIT_DOCUMENT_END_STATE:
- return yaml_emitter_emit_document_end(emitter, event)
-
- case yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE:
- return yaml_emitter_emit_flow_sequence_item(emitter, event, true)
-
- case yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE:
- return yaml_emitter_emit_flow_sequence_item(emitter, event, false)
-
- case yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE:
- return yaml_emitter_emit_flow_mapping_key(emitter, event, true)
-
- case yaml_EMIT_FLOW_MAPPING_KEY_STATE:
- return yaml_emitter_emit_flow_mapping_key(emitter, event, false)
-
- case yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE:
- return yaml_emitter_emit_flow_mapping_value(emitter, event, true)
-
- case yaml_EMIT_FLOW_MAPPING_VALUE_STATE:
- return yaml_emitter_emit_flow_mapping_value(emitter, event, false)
-
- case yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE:
- return yaml_emitter_emit_block_sequence_item(emitter, event, true)
-
- case yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE:
- return yaml_emitter_emit_block_sequence_item(emitter, event, false)
-
- case yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE:
- return yaml_emitter_emit_block_mapping_key(emitter, event, true)
-
- case yaml_EMIT_BLOCK_MAPPING_KEY_STATE:
- return yaml_emitter_emit_block_mapping_key(emitter, event, false)
-
- case yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE:
- return yaml_emitter_emit_block_mapping_value(emitter, event, true)
-
- case yaml_EMIT_BLOCK_MAPPING_VALUE_STATE:
- return yaml_emitter_emit_block_mapping_value(emitter, event, false)
-
- case yaml_EMIT_END_STATE:
- return yaml_emitter_set_emitter_error(emitter, "expected nothing after STREAM-END")
- }
- panic("invalid emitter state")
-}
-
-// Expect STREAM-START.
-func yaml_emitter_emit_stream_start(emitter *yaml_emitter_t, event *yaml_event_t) bool {
- if event.typ != yaml_STREAM_START_EVENT {
- return yaml_emitter_set_emitter_error(emitter, "expected STREAM-START")
- }
- if emitter.encoding == yaml_ANY_ENCODING {
- emitter.encoding = event.encoding
- if emitter.encoding == yaml_ANY_ENCODING {
- emitter.encoding = yaml_UTF8_ENCODING
- }
- }
- if emitter.best_indent < 2 || emitter.best_indent > 9 {
- emitter.best_indent = 2
- }
- if emitter.best_width >= 0 && emitter.best_width <= emitter.best_indent*2 {
- emitter.best_width = 80
- }
- if emitter.best_width < 0 {
- emitter.best_width = 1<<31 - 1
- }
- if emitter.line_break == yaml_ANY_BREAK {
- emitter.line_break = yaml_LN_BREAK
- }
-
- emitter.indent = -1
- emitter.line = 0
- emitter.column = 0
- emitter.whitespace = true
- emitter.indention = true
-
- if emitter.encoding != yaml_UTF8_ENCODING {
- if !yaml_emitter_write_bom(emitter) {
- return false
- }
- }
- emitter.state = yaml_EMIT_FIRST_DOCUMENT_START_STATE
- return true
-}
-
-// Expect DOCUMENT-START or STREAM-END.
-func yaml_emitter_emit_document_start(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool {
-
- if event.typ == yaml_DOCUMENT_START_EVENT {
-
- if event.version_directive != nil {
- if !yaml_emitter_analyze_version_directive(emitter, event.version_directive) {
- return false
- }
- }
-
- for i := 0; i < len(event.tag_directives); i++ {
- tag_directive := &event.tag_directives[i]
- if !yaml_emitter_analyze_tag_directive(emitter, tag_directive) {
- return false
- }
- if !yaml_emitter_append_tag_directive(emitter, tag_directive, false) {
- return false
- }
- }
-
- for i := 0; i < len(default_tag_directives); i++ {
- tag_directive := &default_tag_directives[i]
- if !yaml_emitter_append_tag_directive(emitter, tag_directive, true) {
- return false
- }
- }
-
- implicit := event.implicit
- if !first || emitter.canonical {
- implicit = false
- }
-
- if emitter.open_ended && (event.version_directive != nil || len(event.tag_directives) > 0) {
- if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) {
- return false
- }
- if !yaml_emitter_write_indent(emitter) {
- return false
- }
- }
-
- if event.version_directive != nil {
- implicit = false
- if !yaml_emitter_write_indicator(emitter, []byte("%YAML"), true, false, false) {
- return false
- }
- if !yaml_emitter_write_indicator(emitter, []byte("1.1"), true, false, false) {
- return false
- }
- if !yaml_emitter_write_indent(emitter) {
- return false
- }
- }
-
- if len(event.tag_directives) > 0 {
- implicit = false
- for i := 0; i < len(event.tag_directives); i++ {
- tag_directive := &event.tag_directives[i]
- if !yaml_emitter_write_indicator(emitter, []byte("%TAG"), true, false, false) {
- return false
- }
- if !yaml_emitter_write_tag_handle(emitter, tag_directive.handle) {
- return false
- }
- if !yaml_emitter_write_tag_content(emitter, tag_directive.prefix, true) {
- return false
- }
- if !yaml_emitter_write_indent(emitter) {
- return false
- }
- }
- }
-
- if yaml_emitter_check_empty_document(emitter) {
- implicit = false
- }
- if !implicit {
- if !yaml_emitter_write_indent(emitter) {
- return false
- }
- if !yaml_emitter_write_indicator(emitter, []byte("---"), true, false, false) {
- return false
- }
- if emitter.canonical {
- if !yaml_emitter_write_indent(emitter) {
- return false
- }
- }
- }
-
- emitter.state = yaml_EMIT_DOCUMENT_CONTENT_STATE
- return true
- }
-
- if event.typ == yaml_STREAM_END_EVENT {
- if emitter.open_ended {
- if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) {
- return false
- }
- if !yaml_emitter_write_indent(emitter) {
- return false
- }
- }
- if !yaml_emitter_flush(emitter) {
- return false
- }
- emitter.state = yaml_EMIT_END_STATE
- return true
- }
-
- return yaml_emitter_set_emitter_error(emitter, "expected DOCUMENT-START or STREAM-END")
-}
-
-// Expect the root node.
-func yaml_emitter_emit_document_content(emitter *yaml_emitter_t, event *yaml_event_t) bool {
- emitter.states = append(emitter.states, yaml_EMIT_DOCUMENT_END_STATE)
- return yaml_emitter_emit_node(emitter, event, true, false, false, false)
-}
-
-// Expect DOCUMENT-END.
-func yaml_emitter_emit_document_end(emitter *yaml_emitter_t, event *yaml_event_t) bool {
- if event.typ != yaml_DOCUMENT_END_EVENT {
- return yaml_emitter_set_emitter_error(emitter, "expected DOCUMENT-END")
- }
- if !yaml_emitter_write_indent(emitter) {
- return false
- }
- if !event.implicit {
- // [Go] Allocate the slice elsewhere.
- if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) {
- return false
- }
- if !yaml_emitter_write_indent(emitter) {
- return false
- }
- }
- if !yaml_emitter_flush(emitter) {
- return false
- }
- emitter.state = yaml_EMIT_DOCUMENT_START_STATE
- emitter.tag_directives = emitter.tag_directives[:0]
- return true
-}
-
-// Expect a flow item node.
-func yaml_emitter_emit_flow_sequence_item(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool {
- if first {
- if !yaml_emitter_write_indicator(emitter, []byte{'['}, true, true, false) {
- return false
- }
- if !yaml_emitter_increase_indent(emitter, true, false) {
- return false
- }
- emitter.flow_level++
- }
-
- if event.typ == yaml_SEQUENCE_END_EVENT {
- emitter.flow_level--
- emitter.indent = emitter.indents[len(emitter.indents)-1]
- emitter.indents = emitter.indents[:len(emitter.indents)-1]
- if emitter.canonical && !first {
- if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) {
- return false
- }
- if !yaml_emitter_write_indent(emitter) {
- return false
- }
- }
- if !yaml_emitter_write_indicator(emitter, []byte{']'}, false, false, false) {
- return false
- }
- emitter.state = emitter.states[len(emitter.states)-1]
- emitter.states = emitter.states[:len(emitter.states)-1]
-
- return true
- }
-
- if !first {
- if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) {
- return false
- }
- }
-
- if emitter.canonical || emitter.column > emitter.best_width {
- if !yaml_emitter_write_indent(emitter) {
- return false
- }
- }
- emitter.states = append(emitter.states, yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE)
- return yaml_emitter_emit_node(emitter, event, false, true, false, false)
-}
-
-// Expect a flow key node.
-func yaml_emitter_emit_flow_mapping_key(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool {
- if first {
- if !yaml_emitter_write_indicator(emitter, []byte{'{'}, true, true, false) {
- return false
- }
- if !yaml_emitter_increase_indent(emitter, true, false) {
- return false
- }
- emitter.flow_level++
- }
-
- if event.typ == yaml_MAPPING_END_EVENT {
- emitter.flow_level--
- emitter.indent = emitter.indents[len(emitter.indents)-1]
- emitter.indents = emitter.indents[:len(emitter.indents)-1]
- if emitter.canonical && !first {
- if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) {
- return false
- }
- if !yaml_emitter_write_indent(emitter) {
- return false
- }
- }
- if !yaml_emitter_write_indicator(emitter, []byte{'}'}, false, false, false) {
- return false
- }
- emitter.state = emitter.states[len(emitter.states)-1]
- emitter.states = emitter.states[:len(emitter.states)-1]
- return true
- }
-
- if !first {
- if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) {
- return false
- }
- }
- if emitter.canonical || emitter.column > emitter.best_width {
- if !yaml_emitter_write_indent(emitter) {
- return false
- }
- }
-
- if !emitter.canonical && yaml_emitter_check_simple_key(emitter) {
- emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE)
- return yaml_emitter_emit_node(emitter, event, false, false, true, true)
- }
- if !yaml_emitter_write_indicator(emitter, []byte{'?'}, true, false, false) {
- return false
- }
- emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_VALUE_STATE)
- return yaml_emitter_emit_node(emitter, event, false, false, true, false)
-}
-
-// Expect a flow value node.
-func yaml_emitter_emit_flow_mapping_value(emitter *yaml_emitter_t, event *yaml_event_t, simple bool) bool {
- if simple {
- if !yaml_emitter_write_indicator(emitter, []byte{':'}, false, false, false) {
- return false
- }
- } else {
- if emitter.canonical || emitter.column > emitter.best_width {
- if !yaml_emitter_write_indent(emitter) {
- return false
- }
- }
- if !yaml_emitter_write_indicator(emitter, []byte{':'}, true, false, false) {
- return false
- }
- }
- emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_KEY_STATE)
- return yaml_emitter_emit_node(emitter, event, false, false, true, false)
-}
-
-// Expect a block item node.
-func yaml_emitter_emit_block_sequence_item(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool {
- if first {
- if !yaml_emitter_increase_indent(emitter, false, emitter.mapping_context && !emitter.indention) {
- return false
- }
- }
- if event.typ == yaml_SEQUENCE_END_EVENT {
- emitter.indent = emitter.indents[len(emitter.indents)-1]
- emitter.indents = emitter.indents[:len(emitter.indents)-1]
- emitter.state = emitter.states[len(emitter.states)-1]
- emitter.states = emitter.states[:len(emitter.states)-1]
- return true
- }
- if !yaml_emitter_write_indent(emitter) {
- return false
- }
- if !yaml_emitter_write_indicator(emitter, []byte{'-'}, true, false, true) {
- return false
- }
- emitter.states = append(emitter.states, yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE)
- return yaml_emitter_emit_node(emitter, event, false, true, false, false)
-}
-
-// Expect a block key node.
-func yaml_emitter_emit_block_mapping_key(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool {
- if first {
- if !yaml_emitter_increase_indent(emitter, false, false) {
- return false
- }
- }
- if event.typ == yaml_MAPPING_END_EVENT {
- emitter.indent = emitter.indents[len(emitter.indents)-1]
- emitter.indents = emitter.indents[:len(emitter.indents)-1]
- emitter.state = emitter.states[len(emitter.states)-1]
- emitter.states = emitter.states[:len(emitter.states)-1]
- return true
- }
- if !yaml_emitter_write_indent(emitter) {
- return false
- }
- if yaml_emitter_check_simple_key(emitter) {
- emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE)
- return yaml_emitter_emit_node(emitter, event, false, false, true, true)
- }
- if !yaml_emitter_write_indicator(emitter, []byte{'?'}, true, false, true) {
- return false
- }
- emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_VALUE_STATE)
- return yaml_emitter_emit_node(emitter, event, false, false, true, false)
-}
-
-// Expect a block value node.
-func yaml_emitter_emit_block_mapping_value(emitter *yaml_emitter_t, event *yaml_event_t, simple bool) bool {
- if simple {
- if !yaml_emitter_write_indicator(emitter, []byte{':'}, false, false, false) {
- return false
- }
- } else {
- if !yaml_emitter_write_indent(emitter) {
- return false
- }
- if !yaml_emitter_write_indicator(emitter, []byte{':'}, true, false, true) {
- return false
- }
- }
- emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_KEY_STATE)
- return yaml_emitter_emit_node(emitter, event, false, false, true, false)
-}
-
-// Expect a node.
-func yaml_emitter_emit_node(emitter *yaml_emitter_t, event *yaml_event_t,
- root bool, sequence bool, mapping bool, simple_key bool) bool {
-
- emitter.root_context = root
- emitter.sequence_context = sequence
- emitter.mapping_context = mapping
- emitter.simple_key_context = simple_key
-
- switch event.typ {
- case yaml_ALIAS_EVENT:
- return yaml_emitter_emit_alias(emitter, event)
- case yaml_SCALAR_EVENT:
- return yaml_emitter_emit_scalar(emitter, event)
- case yaml_SEQUENCE_START_EVENT:
- return yaml_emitter_emit_sequence_start(emitter, event)
- case yaml_MAPPING_START_EVENT:
- return yaml_emitter_emit_mapping_start(emitter, event)
- default:
- return yaml_emitter_set_emitter_error(emitter,
- "expected SCALAR, SEQUENCE-START, MAPPING-START, or ALIAS")
- }
- return false
-}
-
-// Expect ALIAS.
-func yaml_emitter_emit_alias(emitter *yaml_emitter_t, event *yaml_event_t) bool {
- if !yaml_emitter_process_anchor(emitter) {
- return false
- }
- emitter.state = emitter.states[len(emitter.states)-1]
- emitter.states = emitter.states[:len(emitter.states)-1]
- return true
-}
-
-// Expect SCALAR.
-func yaml_emitter_emit_scalar(emitter *yaml_emitter_t, event *yaml_event_t) bool {
- if !yaml_emitter_select_scalar_style(emitter, event) {
- return false
- }
- if !yaml_emitter_process_anchor(emitter) {
- return false
- }
- if !yaml_emitter_process_tag(emitter) {
- return false
- }
- if !yaml_emitter_increase_indent(emitter, true, false) {
- return false
- }
- if !yaml_emitter_process_scalar(emitter) {
- return false
- }
- emitter.indent = emitter.indents[len(emitter.indents)-1]
- emitter.indents = emitter.indents[:len(emitter.indents)-1]
- emitter.state = emitter.states[len(emitter.states)-1]
- emitter.states = emitter.states[:len(emitter.states)-1]
- return true
-}
-
-// Expect SEQUENCE-START.
-func yaml_emitter_emit_sequence_start(emitter *yaml_emitter_t, event *yaml_event_t) bool {
- if !yaml_emitter_process_anchor(emitter) {
- return false
- }
- if !yaml_emitter_process_tag(emitter) {
- return false
- }
- if emitter.flow_level > 0 || emitter.canonical || event.sequence_style() == yaml_FLOW_SEQUENCE_STYLE ||
- yaml_emitter_check_empty_sequence(emitter) {
- emitter.state = yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE
- } else {
- emitter.state = yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE
- }
- return true
-}
-
-// Expect MAPPING-START.
-func yaml_emitter_emit_mapping_start(emitter *yaml_emitter_t, event *yaml_event_t) bool {
- if !yaml_emitter_process_anchor(emitter) {
- return false
- }
- if !yaml_emitter_process_tag(emitter) {
- return false
- }
- if emitter.flow_level > 0 || emitter.canonical || event.mapping_style() == yaml_FLOW_MAPPING_STYLE ||
- yaml_emitter_check_empty_mapping(emitter) {
- emitter.state = yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE
- } else {
- emitter.state = yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE
- }
- return true
-}
-
-// Check if the document content is an empty scalar.
-func yaml_emitter_check_empty_document(emitter *yaml_emitter_t) bool {
- return false // [Go] Huh?
-}
-
-// Check if the next events represent an empty sequence.
-func yaml_emitter_check_empty_sequence(emitter *yaml_emitter_t) bool {
- if len(emitter.events)-emitter.events_head < 2 {
- return false
- }
- return emitter.events[emitter.events_head].typ == yaml_SEQUENCE_START_EVENT &&
- emitter.events[emitter.events_head+1].typ == yaml_SEQUENCE_END_EVENT
-}
-
-// Check if the next events represent an empty mapping.
-func yaml_emitter_check_empty_mapping(emitter *yaml_emitter_t) bool {
- if len(emitter.events)-emitter.events_head < 2 {
- return false
- }
- return emitter.events[emitter.events_head].typ == yaml_MAPPING_START_EVENT &&
- emitter.events[emitter.events_head+1].typ == yaml_MAPPING_END_EVENT
-}
-
-// Check if the next node can be expressed as a simple key.
-func yaml_emitter_check_simple_key(emitter *yaml_emitter_t) bool {
- length := 0
- switch emitter.events[emitter.events_head].typ {
- case yaml_ALIAS_EVENT:
- length += len(emitter.anchor_data.anchor)
- case yaml_SCALAR_EVENT:
- if emitter.scalar_data.multiline {
- return false
- }
- length += len(emitter.anchor_data.anchor) +
- len(emitter.tag_data.handle) +
- len(emitter.tag_data.suffix) +
- len(emitter.scalar_data.value)
- case yaml_SEQUENCE_START_EVENT:
- if !yaml_emitter_check_empty_sequence(emitter) {
- return false
- }
- length += len(emitter.anchor_data.anchor) +
- len(emitter.tag_data.handle) +
- len(emitter.tag_data.suffix)
- case yaml_MAPPING_START_EVENT:
- if !yaml_emitter_check_empty_mapping(emitter) {
- return false
- }
- length += len(emitter.anchor_data.anchor) +
- len(emitter.tag_data.handle) +
- len(emitter.tag_data.suffix)
- default:
- return false
- }
- return length <= 128
-}
-
-// Determine an acceptable scalar style.
-func yaml_emitter_select_scalar_style(emitter *yaml_emitter_t, event *yaml_event_t) bool {
-
- no_tag := len(emitter.tag_data.handle) == 0 && len(emitter.tag_data.suffix) == 0
- if no_tag && !event.implicit && !event.quoted_implicit {
- return yaml_emitter_set_emitter_error(emitter, "neither tag nor implicit flags are specified")
- }
-
- style := event.scalar_style()
- if style == yaml_ANY_SCALAR_STYLE {
- style = yaml_PLAIN_SCALAR_STYLE
- }
- if emitter.canonical {
- style = yaml_DOUBLE_QUOTED_SCALAR_STYLE
- }
- if emitter.simple_key_context && emitter.scalar_data.multiline {
- style = yaml_DOUBLE_QUOTED_SCALAR_STYLE
- }
-
- if style == yaml_PLAIN_SCALAR_STYLE {
- if emitter.flow_level > 0 && !emitter.scalar_data.flow_plain_allowed ||
- emitter.flow_level == 0 && !emitter.scalar_data.block_plain_allowed {
- style = yaml_SINGLE_QUOTED_SCALAR_STYLE
- }
- if len(emitter.scalar_data.value) == 0 && (emitter.flow_level > 0 || emitter.simple_key_context) {
- style = yaml_SINGLE_QUOTED_SCALAR_STYLE
- }
- if no_tag && !event.implicit {
- style = yaml_SINGLE_QUOTED_SCALAR_STYLE
- }
- }
- if style == yaml_SINGLE_QUOTED_SCALAR_STYLE {
- if !emitter.scalar_data.single_quoted_allowed {
- style = yaml_DOUBLE_QUOTED_SCALAR_STYLE
- }
- }
- if style == yaml_LITERAL_SCALAR_STYLE || style == yaml_FOLDED_SCALAR_STYLE {
- if !emitter.scalar_data.block_allowed || emitter.flow_level > 0 || emitter.simple_key_context {
- style = yaml_DOUBLE_QUOTED_SCALAR_STYLE
- }
- }
-
- if no_tag && !event.quoted_implicit && style != yaml_PLAIN_SCALAR_STYLE {
- emitter.tag_data.handle = []byte{'!'}
- }
- emitter.scalar_data.style = style
- return true
-}
-
-// Write an achor.
-func yaml_emitter_process_anchor(emitter *yaml_emitter_t) bool {
- if emitter.anchor_data.anchor == nil {
- return true
- }
- c := []byte{'&'}
- if emitter.anchor_data.alias {
- c[0] = '*'
- }
- if !yaml_emitter_write_indicator(emitter, c, true, false, false) {
- return false
- }
- return yaml_emitter_write_anchor(emitter, emitter.anchor_data.anchor)
-}
-
-// Write a tag.
-func yaml_emitter_process_tag(emitter *yaml_emitter_t) bool {
- if len(emitter.tag_data.handle) == 0 && len(emitter.tag_data.suffix) == 0 {
- return true
- }
- if len(emitter.tag_data.handle) > 0 {
- if !yaml_emitter_write_tag_handle(emitter, emitter.tag_data.handle) {
- return false
- }
- if len(emitter.tag_data.suffix) > 0 {
- if !yaml_emitter_write_tag_content(emitter, emitter.tag_data.suffix, false) {
- return false
- }
- }
- } else {
- // [Go] Allocate these slices elsewhere.
- if !yaml_emitter_write_indicator(emitter, []byte("!<"), true, false, false) {
- return false
- }
- if !yaml_emitter_write_tag_content(emitter, emitter.tag_data.suffix, false) {
- return false
- }
- if !yaml_emitter_write_indicator(emitter, []byte{'>'}, false, false, false) {
- return false
- }
- }
- return true
-}
-
-// Write a scalar.
-func yaml_emitter_process_scalar(emitter *yaml_emitter_t) bool {
- switch emitter.scalar_data.style {
- case yaml_PLAIN_SCALAR_STYLE:
- return yaml_emitter_write_plain_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context)
-
- case yaml_SINGLE_QUOTED_SCALAR_STYLE:
- return yaml_emitter_write_single_quoted_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context)
-
- case yaml_DOUBLE_QUOTED_SCALAR_STYLE:
- return yaml_emitter_write_double_quoted_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context)
-
- case yaml_LITERAL_SCALAR_STYLE:
- return yaml_emitter_write_literal_scalar(emitter, emitter.scalar_data.value)
-
- case yaml_FOLDED_SCALAR_STYLE:
- return yaml_emitter_write_folded_scalar(emitter, emitter.scalar_data.value)
- }
- panic("unknown scalar style")
-}
-
-// Check if a %YAML directive is valid.
-func yaml_emitter_analyze_version_directive(emitter *yaml_emitter_t, version_directive *yaml_version_directive_t) bool {
- if version_directive.major != 1 || version_directive.minor != 1 {
- return yaml_emitter_set_emitter_error(emitter, "incompatible %YAML directive")
- }
- return true
-}
-
-// Check if a %TAG directive is valid.
-func yaml_emitter_analyze_tag_directive(emitter *yaml_emitter_t, tag_directive *yaml_tag_directive_t) bool {
- handle := tag_directive.handle
- prefix := tag_directive.prefix
- if len(handle) == 0 {
- return yaml_emitter_set_emitter_error(emitter, "tag handle must not be empty")
- }
- if handle[0] != '!' {
- return yaml_emitter_set_emitter_error(emitter, "tag handle must start with '!'")
- }
- if handle[len(handle)-1] != '!' {
- return yaml_emitter_set_emitter_error(emitter, "tag handle must end with '!'")
- }
- for i := 1; i < len(handle)-1; i += width(handle[i]) {
- if !is_alpha(handle, i) {
- return yaml_emitter_set_emitter_error(emitter, "tag handle must contain alphanumerical characters only")
- }
- }
- if len(prefix) == 0 {
- return yaml_emitter_set_emitter_error(emitter, "tag prefix must not be empty")
- }
- return true
-}
-
-// Check if an anchor is valid.
-func yaml_emitter_analyze_anchor(emitter *yaml_emitter_t, anchor []byte, alias bool) bool {
- if len(anchor) == 0 {
- problem := "anchor value must not be empty"
- if alias {
- problem = "alias value must not be empty"
- }
- return yaml_emitter_set_emitter_error(emitter, problem)
- }
- for i := 0; i < len(anchor); i += width(anchor[i]) {
- if !is_alpha(anchor, i) {
- problem := "anchor value must contain alphanumerical characters only"
- if alias {
- problem = "alias value must contain alphanumerical characters only"
- }
- return yaml_emitter_set_emitter_error(emitter, problem)
- }
- }
- emitter.anchor_data.anchor = anchor
- emitter.anchor_data.alias = alias
- return true
-}
-
-// Check if a tag is valid.
-func yaml_emitter_analyze_tag(emitter *yaml_emitter_t, tag []byte) bool {
- if len(tag) == 0 {
- return yaml_emitter_set_emitter_error(emitter, "tag value must not be empty")
- }
- for i := 0; i < len(emitter.tag_directives); i++ {
- tag_directive := &emitter.tag_directives[i]
- if bytes.HasPrefix(tag, tag_directive.prefix) {
- emitter.tag_data.handle = tag_directive.handle
- emitter.tag_data.suffix = tag[len(tag_directive.prefix):]
- return true
- }
- }
- emitter.tag_data.suffix = tag
- return true
-}
-
-// Check if a scalar is valid.
-func yaml_emitter_analyze_scalar(emitter *yaml_emitter_t, value []byte) bool {
- var (
- block_indicators = false
- flow_indicators = false
- line_breaks = false
- special_characters = false
-
- leading_space = false
- leading_break = false
- trailing_space = false
- trailing_break = false
- break_space = false
- space_break = false
-
- preceeded_by_whitespace = false
- followed_by_whitespace = false
- previous_space = false
- previous_break = false
- )
-
- emitter.scalar_data.value = value
-
- if len(value) == 0 {
- emitter.scalar_data.multiline = false
- emitter.scalar_data.flow_plain_allowed = false
- emitter.scalar_data.block_plain_allowed = true
- emitter.scalar_data.single_quoted_allowed = true
- emitter.scalar_data.block_allowed = false
- return true
- }
-
- if len(value) >= 3 && ((value[0] == '-' && value[1] == '-' && value[2] == '-') || (value[0] == '.' && value[1] == '.' && value[2] == '.')) {
- block_indicators = true
- flow_indicators = true
- }
-
- preceeded_by_whitespace = true
- for i, w := 0, 0; i < len(value); i += w {
- w = width(value[0])
- followed_by_whitespace = i+w >= len(value) || is_blank(value, i+w)
-
- if i == 0 {
- switch value[i] {
- case '#', ',', '[', ']', '{', '}', '&', '*', '!', '|', '>', '\'', '"', '%', '@', '`':
- flow_indicators = true
- block_indicators = true
- case '?', ':':
- flow_indicators = true
- if followed_by_whitespace {
- block_indicators = true
- }
- case '-':
- if followed_by_whitespace {
- flow_indicators = true
- block_indicators = true
- }
- }
- } else {
- switch value[i] {
- case ',', '?', '[', ']', '{', '}':
- flow_indicators = true
- case ':':
- flow_indicators = true
- if followed_by_whitespace {
- block_indicators = true
- }
- case '#':
- if preceeded_by_whitespace {
- flow_indicators = true
- block_indicators = true
- }
- }
- }
-
- if !is_printable(value, i) || !is_ascii(value, i) && !emitter.unicode {
- special_characters = true
- }
- if is_space(value, i) {
- if i == 0 {
- leading_space = true
- }
- if i+width(value[i]) == len(value) {
- trailing_space = true
- }
- if previous_break {
- break_space = true
- }
- previous_space = true
- previous_break = false
- } else if is_break(value, i) {
- line_breaks = true
- if i == 0 {
- leading_break = true
- }
- if i+width(value[i]) == len(value) {
- trailing_break = true
- }
- if previous_space {
- space_break = true
- }
- previous_space = false
- previous_break = true
- } else {
- previous_space = false
- previous_break = false
- }
-
- // [Go]: Why 'z'? Couldn't be the end of the string as that's the loop condition.
- preceeded_by_whitespace = is_blankz(value, i)
- }
-
- emitter.scalar_data.multiline = line_breaks
- emitter.scalar_data.flow_plain_allowed = true
- emitter.scalar_data.block_plain_allowed = true
- emitter.scalar_data.single_quoted_allowed = true
- emitter.scalar_data.block_allowed = true
-
- if leading_space || leading_break || trailing_space || trailing_break {
- emitter.scalar_data.flow_plain_allowed = false
- emitter.scalar_data.block_plain_allowed = false
- }
- if trailing_space {
- emitter.scalar_data.block_allowed = false
- }
- if break_space {
- emitter.scalar_data.flow_plain_allowed = false
- emitter.scalar_data.block_plain_allowed = false
- emitter.scalar_data.single_quoted_allowed = false
- }
- if space_break || special_characters {
- emitter.scalar_data.flow_plain_allowed = false
- emitter.scalar_data.block_plain_allowed = false
- emitter.scalar_data.single_quoted_allowed = false
- emitter.scalar_data.block_allowed = false
- }
- if line_breaks {
- emitter.scalar_data.flow_plain_allowed = false
- emitter.scalar_data.block_plain_allowed = false
- }
- if flow_indicators {
- emitter.scalar_data.flow_plain_allowed = false
- }
- if block_indicators {
- emitter.scalar_data.block_plain_allowed = false
- }
- return true
-}
-
-// Check if the event data is valid.
-func yaml_emitter_analyze_event(emitter *yaml_emitter_t, event *yaml_event_t) bool {
-
- emitter.anchor_data.anchor = nil
- emitter.tag_data.handle = nil
- emitter.tag_data.suffix = nil
- emitter.scalar_data.value = nil
-
- switch event.typ {
- case yaml_ALIAS_EVENT:
- if !yaml_emitter_analyze_anchor(emitter, event.anchor, true) {
- return false
- }
-
- case yaml_SCALAR_EVENT:
- if len(event.anchor) > 0 {
- if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) {
- return false
- }
- }
- if len(event.tag) > 0 && (emitter.canonical || (!event.implicit && !event.quoted_implicit)) {
- if !yaml_emitter_analyze_tag(emitter, event.tag) {
- return false
- }
- }
- if !yaml_emitter_analyze_scalar(emitter, event.value) {
- return false
- }
-
- case yaml_SEQUENCE_START_EVENT:
- if len(event.anchor) > 0 {
- if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) {
- return false
- }
- }
- if len(event.tag) > 0 && (emitter.canonical || !event.implicit) {
- if !yaml_emitter_analyze_tag(emitter, event.tag) {
- return false
- }
- }
-
- case yaml_MAPPING_START_EVENT:
- if len(event.anchor) > 0 {
- if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) {
- return false
- }
- }
- if len(event.tag) > 0 && (emitter.canonical || !event.implicit) {
- if !yaml_emitter_analyze_tag(emitter, event.tag) {
- return false
- }
- }
- }
- return true
-}
-
-// Write the BOM character.
-func yaml_emitter_write_bom(emitter *yaml_emitter_t) bool {
- if !flush(emitter) {
- return false
- }
- pos := emitter.buffer_pos
- emitter.buffer[pos+0] = '\xEF'
- emitter.buffer[pos+1] = '\xBB'
- emitter.buffer[pos+2] = '\xBF'
- emitter.buffer_pos += 3
- return true
-}
-
-func yaml_emitter_write_indent(emitter *yaml_emitter_t) bool {
- indent := emitter.indent
- if indent < 0 {
- indent = 0
- }
- if !emitter.indention || emitter.column > indent || (emitter.column == indent && !emitter.whitespace) {
- if !put_break(emitter) {
- return false
- }
- }
- for emitter.column < indent {
- if !put(emitter, ' ') {
- return false
- }
- }
- emitter.whitespace = true
- emitter.indention = true
- return true
-}
-
-func yaml_emitter_write_indicator(emitter *yaml_emitter_t, indicator []byte, need_whitespace, is_whitespace, is_indention bool) bool {
- if need_whitespace && !emitter.whitespace {
- if !put(emitter, ' ') {
- return false
- }
- }
- if !write_all(emitter, indicator) {
- return false
- }
- emitter.whitespace = is_whitespace
- emitter.indention = (emitter.indention && is_indention)
- emitter.open_ended = false
- return true
-}
-
-func yaml_emitter_write_anchor(emitter *yaml_emitter_t, value []byte) bool {
- if !write_all(emitter, value) {
- return false
- }
- emitter.whitespace = false
- emitter.indention = false
- return true
-}
-
-func yaml_emitter_write_tag_handle(emitter *yaml_emitter_t, value []byte) bool {
- if !emitter.whitespace {
- if !put(emitter, ' ') {
- return false
- }
- }
- if !write_all(emitter, value) {
- return false
- }
- emitter.whitespace = false
- emitter.indention = false
- return true
-}
-
-func yaml_emitter_write_tag_content(emitter *yaml_emitter_t, value []byte, need_whitespace bool) bool {
- if need_whitespace && !emitter.whitespace {
- if !put(emitter, ' ') {
- return false
- }
- }
- for i := 0; i < len(value); {
- var must_write bool
- switch value[i] {
- case ';', '/', '?', ':', '@', '&', '=', '+', '$', ',', '_', '.', '~', '*', '\'', '(', ')', '[', ']':
- must_write = true
- default:
- must_write = is_alpha(value, i)
- }
- if must_write {
- if !write(emitter, value, &i) {
- return false
- }
- } else {
- w := width(value[i])
- for k := 0; k < w; k++ {
- octet := value[i]
- i++
- if !put(emitter, '%') {
- return false
- }
-
- c := octet >> 4
- if c < 10 {
- c += '0'
- } else {
- c += 'A' - 10
- }
- if !put(emitter, c) {
- return false
- }
-
- c = octet & 0x0f
- if c < 10 {
- c += '0'
- } else {
- c += 'A' - 10
- }
- if !put(emitter, c) {
- return false
- }
- }
- }
- }
- emitter.whitespace = false
- emitter.indention = false
- return true
-}
-
-func yaml_emitter_write_plain_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool {
- if !emitter.whitespace {
- if !put(emitter, ' ') {
- return false
- }
- }
-
- spaces := false
- breaks := false
- for i := 0; i < len(value); {
- if is_space(value, i) {
- if allow_breaks && !spaces && emitter.column > emitter.best_width && !is_space(value, i+1) {
- if !yaml_emitter_write_indent(emitter) {
- return false
- }
- i += width(value[i])
- } else {
- if !write(emitter, value, &i) {
- return false
- }
- }
- spaces = true
- } else if is_break(value, i) {
- if !breaks && value[i] == '\n' {
- if !put_break(emitter) {
- return false
- }
- }
- if !write_break(emitter, value, &i) {
- return false
- }
- emitter.indention = true
- breaks = true
- } else {
- if breaks {
- if !yaml_emitter_write_indent(emitter) {
- return false
- }
- }
- if !write(emitter, value, &i) {
- return false
- }
- emitter.indention = false
- spaces = false
- breaks = false
- }
- }
-
- emitter.whitespace = false
- emitter.indention = false
- if emitter.root_context {
- emitter.open_ended = true
- }
-
- return true
-}
-
-func yaml_emitter_write_single_quoted_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool {
-
- if !yaml_emitter_write_indicator(emitter, []byte{'\''}, true, false, false) {
- return false
- }
-
- spaces := false
- breaks := false
- for i := 0; i < len(value); {
- if is_space(value, i) {
- if allow_breaks && !spaces && emitter.column > emitter.best_width && i > 0 && i < len(value)-1 && !is_space(value, i+1) {
- if !yaml_emitter_write_indent(emitter) {
- return false
- }
- i += width(value[i])
- } else {
- if !write(emitter, value, &i) {
- return false
- }
- }
- spaces = true
- } else if is_break(value, i) {
- if !breaks && value[i] == '\n' {
- if !put_break(emitter) {
- return false
- }
- }
- if !write_break(emitter, value, &i) {
- return false
- }
- emitter.indention = true
- breaks = true
- } else {
- if breaks {
- if !yaml_emitter_write_indent(emitter) {
- return false
- }
- }
- if value[i] == '\'' {
- if !put(emitter, '\'') {
- return false
- }
- }
- if !write(emitter, value, &i) {
- return false
- }
- emitter.indention = false
- spaces = false
- breaks = false
- }
- }
- if !yaml_emitter_write_indicator(emitter, []byte{'\''}, false, false, false) {
- return false
- }
- emitter.whitespace = false
- emitter.indention = false
- return true
-}
-
-func yaml_emitter_write_double_quoted_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool {
- spaces := false
- if !yaml_emitter_write_indicator(emitter, []byte{'"'}, true, false, false) {
- return false
- }
-
- for i := 0; i < len(value); {
- if !is_printable(value, i) || (!emitter.unicode && !is_ascii(value, i)) ||
- is_bom(value, i) || is_break(value, i) ||
- value[i] == '"' || value[i] == '\\' {
-
- octet := value[i]
-
- var w int
- var v rune
- switch {
- case octet&0x80 == 0x00:
- w, v = 1, rune(octet&0x7F)
- case octet&0xE0 == 0xC0:
- w, v = 2, rune(octet&0x1F)
- case octet&0xF0 == 0xE0:
- w, v = 3, rune(octet&0x0F)
- case octet&0xF8 == 0xF0:
- w, v = 4, rune(octet&0x07)
- }
- for k := 1; k < w; k++ {
- octet = value[i+k]
- v = (v << 6) + (rune(octet) & 0x3F)
- }
- i += w
-
- if !put(emitter, '\\') {
- return false
- }
-
- var ok bool
- switch v {
- case 0x00:
- ok = put(emitter, '0')
- case 0x07:
- ok = put(emitter, 'a')
- case 0x08:
- ok = put(emitter, 'b')
- case 0x09:
- ok = put(emitter, 't')
- case 0x0A:
- ok = put(emitter, 'n')
- case 0x0b:
- ok = put(emitter, 'v')
- case 0x0c:
- ok = put(emitter, 'f')
- case 0x0d:
- ok = put(emitter, 'r')
- case 0x1b:
- ok = put(emitter, 'e')
- case 0x22:
- ok = put(emitter, '"')
- case 0x5c:
- ok = put(emitter, '\\')
- case 0x85:
- ok = put(emitter, 'N')
- case 0xA0:
- ok = put(emitter, '_')
- case 0x2028:
- ok = put(emitter, 'L')
- case 0x2029:
- ok = put(emitter, 'P')
- default:
- if v <= 0xFF {
- ok = put(emitter, 'x')
- w = 2
- } else if v <= 0xFFFF {
- ok = put(emitter, 'u')
- w = 4
- } else {
- ok = put(emitter, 'U')
- w = 8
- }
- for k := (w - 1) * 4; ok && k >= 0; k -= 4 {
- digit := byte((v >> uint(k)) & 0x0F)
- if digit < 10 {
- ok = put(emitter, digit+'0')
- } else {
- ok = put(emitter, digit+'A'-10)
- }
- }
- }
- if !ok {
- return false
- }
- spaces = false
- } else if is_space(value, i) {
- if allow_breaks && !spaces && emitter.column > emitter.best_width && i > 0 && i < len(value)-1 {
- if !yaml_emitter_write_indent(emitter) {
- return false
- }
- if is_space(value, i+1) {
- if !put(emitter, '\\') {
- return false
- }
- }
- i += width(value[i])
- } else if !write(emitter, value, &i) {
- return false
- }
- spaces = true
- } else {
- if !write(emitter, value, &i) {
- return false
- }
- spaces = false
- }
- }
- if !yaml_emitter_write_indicator(emitter, []byte{'"'}, false, false, false) {
- return false
- }
- emitter.whitespace = false
- emitter.indention = false
- return true
-}
-
-func yaml_emitter_write_block_scalar_hints(emitter *yaml_emitter_t, value []byte) bool {
- if is_space(value, 0) || is_break(value, 0) {
- indent_hint := []byte{'0' + byte(emitter.best_indent)}
- if !yaml_emitter_write_indicator(emitter, indent_hint, false, false, false) {
- return false
- }
- }
-
- emitter.open_ended = false
-
- var chomp_hint [1]byte
- if len(value) == 0 {
- chomp_hint[0] = '-'
- } else {
- i := len(value) - 1
- for value[i]&0xC0 == 0x80 {
- i--
- }
- if !is_break(value, i) {
- chomp_hint[0] = '-'
- } else if i == 0 {
- chomp_hint[0] = '+'
- emitter.open_ended = true
- } else {
- i--
- for value[i]&0xC0 == 0x80 {
- i--
- }
- if is_break(value, i) {
- chomp_hint[0] = '+'
- emitter.open_ended = true
- }
- }
- }
- if chomp_hint[0] != 0 {
- if !yaml_emitter_write_indicator(emitter, chomp_hint[:], false, false, false) {
- return false
- }
- }
- return true
-}
-
-func yaml_emitter_write_literal_scalar(emitter *yaml_emitter_t, value []byte) bool {
- if !yaml_emitter_write_indicator(emitter, []byte{'|'}, true, false, false) {
- return false
- }
- if !yaml_emitter_write_block_scalar_hints(emitter, value) {
- return false
- }
- if !put_break(emitter) {
- return false
- }
- emitter.indention = true
- emitter.whitespace = true
- breaks := true
- for i := 0; i < len(value); {
- if is_break(value, i) {
- if !write_break(emitter, value, &i) {
- return false
- }
- emitter.indention = true
- breaks = true
- } else {
- if breaks {
- if !yaml_emitter_write_indent(emitter) {
- return false
- }
- }
- if !write(emitter, value, &i) {
- return false
- }
- emitter.indention = false
- breaks = false
- }
- }
-
- return true
-}
-
-func yaml_emitter_write_folded_scalar(emitter *yaml_emitter_t, value []byte) bool {
- if !yaml_emitter_write_indicator(emitter, []byte{'>'}, true, false, false) {
- return false
- }
- if !yaml_emitter_write_block_scalar_hints(emitter, value) {
- return false
- }
-
- if !put_break(emitter) {
- return false
- }
- emitter.indention = true
- emitter.whitespace = true
-
- breaks := true
- leading_spaces := true
- for i := 0; i < len(value); {
- if is_break(value, i) {
- if !breaks && !leading_spaces && value[i] == '\n' {
- k := 0
- for is_break(value, k) {
- k += width(value[k])
- }
- if !is_blankz(value, k) {
- if !put_break(emitter) {
- return false
- }
- }
- }
- if !write_break(emitter, value, &i) {
- return false
- }
- emitter.indention = true
- breaks = true
- } else {
- if breaks {
- if !yaml_emitter_write_indent(emitter) {
- return false
- }
- leading_spaces = is_blank(value, i)
- }
- if !breaks && is_space(value, i) && !is_space(value, i+1) && emitter.column > emitter.best_width {
- if !yaml_emitter_write_indent(emitter) {
- return false
- }
- i += width(value[i])
- } else {
- if !write(emitter, value, &i) {
- return false
- }
- }
- emitter.indention = false
- breaks = false
- }
- }
- return true
-}
diff --git a/vendor/github.com/coreos/yaml/encode.go b/vendor/github.com/coreos/yaml/encode.go
deleted file mode 100644
index 0b9048d73..000000000
--- a/vendor/github.com/coreos/yaml/encode.go
+++ /dev/null
@@ -1,265 +0,0 @@
-package yaml
-
-import (
- "reflect"
- "regexp"
- "sort"
- "strconv"
- "strings"
- "time"
-)
-
-type encoder struct {
- emitter yaml_emitter_t
- event yaml_event_t
- out []byte
- flow bool
-}
-
-func newEncoder() (e *encoder) {
- e = &encoder{}
- e.must(yaml_emitter_initialize(&e.emitter))
- yaml_emitter_set_output_string(&e.emitter, &e.out)
- e.must(yaml_stream_start_event_initialize(&e.event, yaml_UTF8_ENCODING))
- e.emit()
- e.must(yaml_document_start_event_initialize(&e.event, nil, nil, true))
- e.emit()
- return e
-}
-
-func (e *encoder) finish() {
- e.must(yaml_document_end_event_initialize(&e.event, true))
- e.emit()
- e.emitter.open_ended = false
- e.must(yaml_stream_end_event_initialize(&e.event))
- e.emit()
-}
-
-func (e *encoder) destroy() {
- yaml_emitter_delete(&e.emitter)
-}
-
-func (e *encoder) emit() {
- // This will internally delete the e.event value.
- if !yaml_emitter_emit(&e.emitter, &e.event) && e.event.typ != yaml_DOCUMENT_END_EVENT && e.event.typ != yaml_STREAM_END_EVENT {
- e.must(false)
- }
-}
-
-func (e *encoder) must(ok bool) {
- if !ok {
- msg := e.emitter.problem
- if msg == "" {
- msg = "Unknown problem generating YAML content"
- }
- fail(msg)
- }
-}
-
-func (e *encoder) marshal(tag string, in reflect.Value) {
- if !in.IsValid() {
- e.nilv()
- return
- }
- var value interface{}
- if getter, ok := in.Interface().(Getter); ok {
- tag, value = getter.GetYAML()
- tag = longTag(tag)
- if value == nil {
- e.nilv()
- return
- }
- in = reflect.ValueOf(value)
- }
- switch in.Kind() {
- case reflect.Interface:
- if in.IsNil() {
- e.nilv()
- } else {
- e.marshal(tag, in.Elem())
- }
- case reflect.Map:
- e.mapv(tag, in)
- case reflect.Ptr:
- if in.IsNil() {
- e.nilv()
- } else {
- e.marshal(tag, in.Elem())
- }
- case reflect.Struct:
- e.structv(tag, in)
- case reflect.Slice:
- e.slicev(tag, in)
- case reflect.String:
- e.stringv(tag, in)
- case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
- if in.Type() == durationType {
- e.stringv(tag, reflect.ValueOf(in.Interface().(time.Duration).String()))
- } else {
- e.intv(tag, in)
- }
- case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
- e.uintv(tag, in)
- case reflect.Float32, reflect.Float64:
- e.floatv(tag, in)
- case reflect.Bool:
- e.boolv(tag, in)
- default:
- panic("Can't marshal type: " + in.Type().String())
- }
-}
-
-func (e *encoder) mapv(tag string, in reflect.Value) {
- e.mappingv(tag, func() {
- keys := keyList(in.MapKeys())
- sort.Sort(keys)
- for _, k := range keys {
- e.marshal("", k)
- e.marshal("", in.MapIndex(k))
- }
- })
-}
-
-func (e *encoder) structv(tag string, in reflect.Value) {
- sinfo, err := getStructInfo(in.Type())
- if err != nil {
- panic(err)
- }
- e.mappingv(tag, func() {
- for _, info := range sinfo.FieldsList {
- var value reflect.Value
- if info.Inline == nil {
- value = in.Field(info.Num)
- } else {
- value = in.FieldByIndex(info.Inline)
- }
- if info.OmitEmpty && isZero(value) {
- continue
- }
- e.marshal("", reflect.ValueOf(info.Key))
- e.flow = info.Flow
- e.marshal("", value)
- }
- })
-}
-
-func (e *encoder) mappingv(tag string, f func()) {
- implicit := tag == ""
- style := yaml_BLOCK_MAPPING_STYLE
- if e.flow {
- e.flow = false
- style = yaml_FLOW_MAPPING_STYLE
- }
- e.must(yaml_mapping_start_event_initialize(&e.event, nil, []byte(tag), implicit, style))
- e.emit()
- f()
- e.must(yaml_mapping_end_event_initialize(&e.event))
- e.emit()
-}
-
-func (e *encoder) slicev(tag string, in reflect.Value) {
- implicit := tag == ""
- style := yaml_BLOCK_SEQUENCE_STYLE
- if e.flow {
- e.flow = false
- style = yaml_FLOW_SEQUENCE_STYLE
- }
- e.must(yaml_sequence_start_event_initialize(&e.event, nil, []byte(tag), implicit, style))
- e.emit()
- n := in.Len()
- for i := 0; i < n; i++ {
- e.marshal("", in.Index(i))
- }
- e.must(yaml_sequence_end_event_initialize(&e.event))
- e.emit()
-}
-
-// isBase60 returns whether s is in base 60 notation as defined in YAML 1.1.
-//
-// The base 60 float notation in YAML 1.1 is a terrible idea and is unsupported
-// in YAML 1.2 and by this package, but these should be marshalled quoted for
-// the time being for compatibility with other parsers.
-func isBase60Float(s string) (result bool) {
- // Fast path.
- if s == "" {
- return false
- }
- c := s[0]
- if !(c == '+' || c == '-' || c >= '0' && c <= '9') || strings.IndexByte(s, ':') < 0 {
- return false
- }
- // Do the full match.
- return base60float.MatchString(s)
-}
-
-// From http://yaml.org/type/float.html, except the regular expression there
-// is bogus. In practice parsers do not enforce the "\.[0-9_]*" suffix.
-var base60float = regexp.MustCompile(`^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+(?:\.[0-9_]*)?$`)
-
-func (e *encoder) stringv(tag string, in reflect.Value) {
- var style yaml_scalar_style_t
- s := in.String()
- rtag, rs := resolve("", s)
- if rtag == yaml_BINARY_TAG {
- if tag == "" || tag == yaml_STR_TAG {
- tag = rtag
- s = rs.(string)
- } else if tag == yaml_BINARY_TAG {
- fail("explicitly tagged !!binary data must be base64-encoded")
- } else {
- fail("cannot marshal invalid UTF-8 data as " + shortTag(tag))
- }
- }
- if tag == "" && (rtag != yaml_STR_TAG || isBase60Float(s)) {
- style = yaml_DOUBLE_QUOTED_SCALAR_STYLE
- } else if strings.Contains(s, "\n") {
- style = yaml_LITERAL_SCALAR_STYLE
- } else {
- style = yaml_PLAIN_SCALAR_STYLE
- }
- e.emitScalar(s, "", tag, style)
-}
-
-func (e *encoder) boolv(tag string, in reflect.Value) {
- var s string
- if in.Bool() {
- s = "true"
- } else {
- s = "false"
- }
- e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
-}
-
-func (e *encoder) intv(tag string, in reflect.Value) {
- s := strconv.FormatInt(in.Int(), 10)
- e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
-}
-
-func (e *encoder) uintv(tag string, in reflect.Value) {
- s := strconv.FormatUint(in.Uint(), 10)
- e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
-}
-
-func (e *encoder) floatv(tag string, in reflect.Value) {
- // FIXME: Handle 64 bits here.
- s := strconv.FormatFloat(float64(in.Float()), 'g', -1, 32)
- switch s {
- case "+Inf":
- s = ".inf"
- case "-Inf":
- s = "-.inf"
- case "NaN":
- s = ".nan"
- }
- e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
-}
-
-func (e *encoder) nilv() {
- e.emitScalar("null", "", "", yaml_PLAIN_SCALAR_STYLE)
-}
-
-func (e *encoder) emitScalar(value, anchor, tag string, style yaml_scalar_style_t) {
- implicit := tag == ""
- e.must(yaml_scalar_event_initialize(&e.event, []byte(anchor), []byte(tag), []byte(value), implicit, implicit, style))
- e.emit()
-}
diff --git a/vendor/github.com/coreos/yaml/parserc.go b/vendor/github.com/coreos/yaml/parserc.go
deleted file mode 100644
index 0a7037ad1..000000000
--- a/vendor/github.com/coreos/yaml/parserc.go
+++ /dev/null
@@ -1,1096 +0,0 @@
-package yaml
-
-import (
- "bytes"
-)
-
-// The parser implements the following grammar:
-//
-// stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
-// implicit_document ::= block_node DOCUMENT-END*
-// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
-// block_node_or_indentless_sequence ::=
-// ALIAS
-// | properties (block_content | indentless_block_sequence)?
-// | block_content
-// | indentless_block_sequence
-// block_node ::= ALIAS
-// | properties block_content?
-// | block_content
-// flow_node ::= ALIAS
-// | properties flow_content?
-// | flow_content
-// properties ::= TAG ANCHOR? | ANCHOR TAG?
-// block_content ::= block_collection | flow_collection | SCALAR
-// flow_content ::= flow_collection | SCALAR
-// block_collection ::= block_sequence | block_mapping
-// flow_collection ::= flow_sequence | flow_mapping
-// block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
-// indentless_sequence ::= (BLOCK-ENTRY block_node?)+
-// block_mapping ::= BLOCK-MAPPING_START
-// ((KEY block_node_or_indentless_sequence?)?
-// (VALUE block_node_or_indentless_sequence?)?)*
-// BLOCK-END
-// flow_sequence ::= FLOW-SEQUENCE-START
-// (flow_sequence_entry FLOW-ENTRY)*
-// flow_sequence_entry?
-// FLOW-SEQUENCE-END
-// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
-// flow_mapping ::= FLOW-MAPPING-START
-// (flow_mapping_entry FLOW-ENTRY)*
-// flow_mapping_entry?
-// FLOW-MAPPING-END
-// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
-
-// Peek the next token in the token queue.
-func peek_token(parser *yaml_parser_t) *yaml_token_t {
- if parser.token_available || yaml_parser_fetch_more_tokens(parser) {
- return &parser.tokens[parser.tokens_head]
- }
- return nil
-}
-
-// Remove the next token from the queue (must be called after peek_token).
-func skip_token(parser *yaml_parser_t) {
- parser.token_available = false
- parser.tokens_parsed++
- parser.stream_end_produced = parser.tokens[parser.tokens_head].typ == yaml_STREAM_END_TOKEN
- parser.tokens_head++
-}
-
-// Get the next event.
-func yaml_parser_parse(parser *yaml_parser_t, event *yaml_event_t) bool {
- // Erase the event object.
- *event = yaml_event_t{}
-
- // No events after the end of the stream or error.
- if parser.stream_end_produced || parser.error != yaml_NO_ERROR || parser.state == yaml_PARSE_END_STATE {
- return true
- }
-
- // Generate the next event.
- return yaml_parser_state_machine(parser, event)
-}
-
-// Set parser error.
-func yaml_parser_set_parser_error(parser *yaml_parser_t, problem string, problem_mark yaml_mark_t) bool {
- parser.error = yaml_PARSER_ERROR
- parser.problem = problem
- parser.problem_mark = problem_mark
- return false
-}
-
-func yaml_parser_set_parser_error_context(parser *yaml_parser_t, context string, context_mark yaml_mark_t, problem string, problem_mark yaml_mark_t) bool {
- parser.error = yaml_PARSER_ERROR
- parser.context = context
- parser.context_mark = context_mark
- parser.problem = problem
- parser.problem_mark = problem_mark
- return false
-}
-
-// State dispatcher.
-func yaml_parser_state_machine(parser *yaml_parser_t, event *yaml_event_t) bool {
- //trace("yaml_parser_state_machine", "state:", parser.state.String())
-
- switch parser.state {
- case yaml_PARSE_STREAM_START_STATE:
- return yaml_parser_parse_stream_start(parser, event)
-
- case yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE:
- return yaml_parser_parse_document_start(parser, event, true)
-
- case yaml_PARSE_DOCUMENT_START_STATE:
- return yaml_parser_parse_document_start(parser, event, false)
-
- case yaml_PARSE_DOCUMENT_CONTENT_STATE:
- return yaml_parser_parse_document_content(parser, event)
-
- case yaml_PARSE_DOCUMENT_END_STATE:
- return yaml_parser_parse_document_end(parser, event)
-
- case yaml_PARSE_BLOCK_NODE_STATE:
- return yaml_parser_parse_node(parser, event, true, false)
-
- case yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE:
- return yaml_parser_parse_node(parser, event, true, true)
-
- case yaml_PARSE_FLOW_NODE_STATE:
- return yaml_parser_parse_node(parser, event, false, false)
-
- case yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE:
- return yaml_parser_parse_block_sequence_entry(parser, event, true)
-
- case yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE:
- return yaml_parser_parse_block_sequence_entry(parser, event, false)
-
- case yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE:
- return yaml_parser_parse_indentless_sequence_entry(parser, event)
-
- case yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE:
- return yaml_parser_parse_block_mapping_key(parser, event, true)
-
- case yaml_PARSE_BLOCK_MAPPING_KEY_STATE:
- return yaml_parser_parse_block_mapping_key(parser, event, false)
-
- case yaml_PARSE_BLOCK_MAPPING_VALUE_STATE:
- return yaml_parser_parse_block_mapping_value(parser, event)
-
- case yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE:
- return yaml_parser_parse_flow_sequence_entry(parser, event, true)
-
- case yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE:
- return yaml_parser_parse_flow_sequence_entry(parser, event, false)
-
- case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE:
- return yaml_parser_parse_flow_sequence_entry_mapping_key(parser, event)
-
- case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE:
- return yaml_parser_parse_flow_sequence_entry_mapping_value(parser, event)
-
- case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE:
- return yaml_parser_parse_flow_sequence_entry_mapping_end(parser, event)
-
- case yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE:
- return yaml_parser_parse_flow_mapping_key(parser, event, true)
-
- case yaml_PARSE_FLOW_MAPPING_KEY_STATE:
- return yaml_parser_parse_flow_mapping_key(parser, event, false)
-
- case yaml_PARSE_FLOW_MAPPING_VALUE_STATE:
- return yaml_parser_parse_flow_mapping_value(parser, event, false)
-
- case yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE:
- return yaml_parser_parse_flow_mapping_value(parser, event, true)
-
- default:
- panic("invalid parser state")
- }
- return false
-}
-
-// Parse the production:
-// stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
-// ************
-func yaml_parser_parse_stream_start(parser *yaml_parser_t, event *yaml_event_t) bool {
- token := peek_token(parser)
- if token == nil {
- return false
- }
- if token.typ != yaml_STREAM_START_TOKEN {
- return yaml_parser_set_parser_error(parser, "did not find expected ", token.start_mark)
- }
- parser.state = yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE
- *event = yaml_event_t{
- typ: yaml_STREAM_START_EVENT,
- start_mark: token.start_mark,
- end_mark: token.end_mark,
- encoding: token.encoding,
- }
- skip_token(parser)
- return true
-}
-
-// Parse the productions:
-// implicit_document ::= block_node DOCUMENT-END*
-// *
-// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
-// *************************
-func yaml_parser_parse_document_start(parser *yaml_parser_t, event *yaml_event_t, implicit bool) bool {
-
- token := peek_token(parser)
- if token == nil {
- return false
- }
-
- // Parse extra document end indicators.
- if !implicit {
- for token.typ == yaml_DOCUMENT_END_TOKEN {
- skip_token(parser)
- token = peek_token(parser)
- if token == nil {
- return false
- }
- }
- }
-
- if implicit && token.typ != yaml_VERSION_DIRECTIVE_TOKEN &&
- token.typ != yaml_TAG_DIRECTIVE_TOKEN &&
- token.typ != yaml_DOCUMENT_START_TOKEN &&
- token.typ != yaml_STREAM_END_TOKEN {
- // Parse an implicit document.
- if !yaml_parser_process_directives(parser, nil, nil) {
- return false
- }
- parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE)
- parser.state = yaml_PARSE_BLOCK_NODE_STATE
-
- *event = yaml_event_t{
- typ: yaml_DOCUMENT_START_EVENT,
- start_mark: token.start_mark,
- end_mark: token.end_mark,
- }
-
- } else if token.typ != yaml_STREAM_END_TOKEN {
- // Parse an explicit document.
- var version_directive *yaml_version_directive_t
- var tag_directives []yaml_tag_directive_t
- start_mark := token.start_mark
- if !yaml_parser_process_directives(parser, &version_directive, &tag_directives) {
- return false
- }
- token = peek_token(parser)
- if token == nil {
- return false
- }
- if token.typ != yaml_DOCUMENT_START_TOKEN {
- yaml_parser_set_parser_error(parser,
- "did not find expected ", token.start_mark)
- return false
- }
- parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE)
- parser.state = yaml_PARSE_DOCUMENT_CONTENT_STATE
- end_mark := token.end_mark
-
- *event = yaml_event_t{
- typ: yaml_DOCUMENT_START_EVENT,
- start_mark: start_mark,
- end_mark: end_mark,
- version_directive: version_directive,
- tag_directives: tag_directives,
- implicit: false,
- }
- skip_token(parser)
-
- } else {
- // Parse the stream end.
- parser.state = yaml_PARSE_END_STATE
- *event = yaml_event_t{
- typ: yaml_STREAM_END_EVENT,
- start_mark: token.start_mark,
- end_mark: token.end_mark,
- }
- skip_token(parser)
- }
-
- return true
-}
-
-// Parse the productions:
-// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
-// ***********
-//
-func yaml_parser_parse_document_content(parser *yaml_parser_t, event *yaml_event_t) bool {
- token := peek_token(parser)
- if token == nil {
- return false
- }
- if token.typ == yaml_VERSION_DIRECTIVE_TOKEN ||
- token.typ == yaml_TAG_DIRECTIVE_TOKEN ||
- token.typ == yaml_DOCUMENT_START_TOKEN ||
- token.typ == yaml_DOCUMENT_END_TOKEN ||
- token.typ == yaml_STREAM_END_TOKEN {
- parser.state = parser.states[len(parser.states)-1]
- parser.states = parser.states[:len(parser.states)-1]
- return yaml_parser_process_empty_scalar(parser, event,
- token.start_mark)
- }
- return yaml_parser_parse_node(parser, event, true, false)
-}
-
-// Parse the productions:
-// implicit_document ::= block_node DOCUMENT-END*
-// *************
-// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
-//
-func yaml_parser_parse_document_end(parser *yaml_parser_t, event *yaml_event_t) bool {
- token := peek_token(parser)
- if token == nil {
- return false
- }
-
- start_mark := token.start_mark
- end_mark := token.start_mark
-
- implicit := true
- if token.typ == yaml_DOCUMENT_END_TOKEN {
- end_mark = token.end_mark
- skip_token(parser)
- implicit = false
- }
-
- parser.tag_directives = parser.tag_directives[:0]
-
- parser.state = yaml_PARSE_DOCUMENT_START_STATE
- *event = yaml_event_t{
- typ: yaml_DOCUMENT_END_EVENT,
- start_mark: start_mark,
- end_mark: end_mark,
- implicit: implicit,
- }
- return true
-}
-
-// Parse the productions:
-// block_node_or_indentless_sequence ::=
-// ALIAS
-// *****
-// | properties (block_content | indentless_block_sequence)?
-// ********** *
-// | block_content | indentless_block_sequence
-// *
-// block_node ::= ALIAS
-// *****
-// | properties block_content?
-// ********** *
-// | block_content
-// *
-// flow_node ::= ALIAS
-// *****
-// | properties flow_content?
-// ********** *
-// | flow_content
-// *
-// properties ::= TAG ANCHOR? | ANCHOR TAG?
-// *************************
-// block_content ::= block_collection | flow_collection | SCALAR
-// ******
-// flow_content ::= flow_collection | SCALAR
-// ******
-func yaml_parser_parse_node(parser *yaml_parser_t, event *yaml_event_t, block, indentless_sequence bool) bool {
- //defer trace("yaml_parser_parse_node", "block:", block, "indentless_sequence:", indentless_sequence)()
-
- token := peek_token(parser)
- if token == nil {
- return false
- }
-
- if token.typ == yaml_ALIAS_TOKEN {
- parser.state = parser.states[len(parser.states)-1]
- parser.states = parser.states[:len(parser.states)-1]
- *event = yaml_event_t{
- typ: yaml_ALIAS_EVENT,
- start_mark: token.start_mark,
- end_mark: token.end_mark,
- anchor: token.value,
- }
- skip_token(parser)
- return true
- }
-
- start_mark := token.start_mark
- end_mark := token.start_mark
-
- var tag_token bool
- var tag_handle, tag_suffix, anchor []byte
- var tag_mark yaml_mark_t
- if token.typ == yaml_ANCHOR_TOKEN {
- anchor = token.value
- start_mark = token.start_mark
- end_mark = token.end_mark
- skip_token(parser)
- token = peek_token(parser)
- if token == nil {
- return false
- }
- if token.typ == yaml_TAG_TOKEN {
- tag_token = true
- tag_handle = token.value
- tag_suffix = token.suffix
- tag_mark = token.start_mark
- end_mark = token.end_mark
- skip_token(parser)
- token = peek_token(parser)
- if token == nil {
- return false
- }
- }
- } else if token.typ == yaml_TAG_TOKEN {
- tag_token = true
- tag_handle = token.value
- tag_suffix = token.suffix
- start_mark = token.start_mark
- tag_mark = token.start_mark
- end_mark = token.end_mark
- skip_token(parser)
- token = peek_token(parser)
- if token == nil {
- return false
- }
- if token.typ == yaml_ANCHOR_TOKEN {
- anchor = token.value
- end_mark = token.end_mark
- skip_token(parser)
- token = peek_token(parser)
- if token == nil {
- return false
- }
- }
- }
-
- var tag []byte
- if tag_token {
- if len(tag_handle) == 0 {
- tag = tag_suffix
- tag_suffix = nil
- } else {
- for i := range parser.tag_directives {
- if bytes.Equal(parser.tag_directives[i].handle, tag_handle) {
- tag = append([]byte(nil), parser.tag_directives[i].prefix...)
- tag = append(tag, tag_suffix...)
- break
- }
- }
- if len(tag) == 0 {
- yaml_parser_set_parser_error_context(parser,
- "while parsing a node", start_mark,
- "found undefined tag handle", tag_mark)
- return false
- }
- }
- }
-
- implicit := len(tag) == 0
- if indentless_sequence && token.typ == yaml_BLOCK_ENTRY_TOKEN {
- end_mark = token.end_mark
- parser.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE
- *event = yaml_event_t{
- typ: yaml_SEQUENCE_START_EVENT,
- start_mark: start_mark,
- end_mark: end_mark,
- anchor: anchor,
- tag: tag,
- implicit: implicit,
- style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE),
- }
- return true
- }
- if token.typ == yaml_SCALAR_TOKEN {
- var plain_implicit, quoted_implicit bool
- end_mark = token.end_mark
- if (len(tag) == 0 && token.style == yaml_PLAIN_SCALAR_STYLE) || (len(tag) == 1 && tag[0] == '!') {
- plain_implicit = true
- } else if len(tag) == 0 {
- quoted_implicit = true
- }
- parser.state = parser.states[len(parser.states)-1]
- parser.states = parser.states[:len(parser.states)-1]
-
- *event = yaml_event_t{
- typ: yaml_SCALAR_EVENT,
- start_mark: start_mark,
- end_mark: end_mark,
- anchor: anchor,
- tag: tag,
- value: token.value,
- implicit: plain_implicit,
- quoted_implicit: quoted_implicit,
- style: yaml_style_t(token.style),
- }
- skip_token(parser)
- return true
- }
- if token.typ == yaml_FLOW_SEQUENCE_START_TOKEN {
- // [Go] Some of the events below can be merged as they differ only on style.
- end_mark = token.end_mark
- parser.state = yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE
- *event = yaml_event_t{
- typ: yaml_SEQUENCE_START_EVENT,
- start_mark: start_mark,
- end_mark: end_mark,
- anchor: anchor,
- tag: tag,
- implicit: implicit,
- style: yaml_style_t(yaml_FLOW_SEQUENCE_STYLE),
- }
- return true
- }
- if token.typ == yaml_FLOW_MAPPING_START_TOKEN {
- end_mark = token.end_mark
- parser.state = yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE
- *event = yaml_event_t{
- typ: yaml_MAPPING_START_EVENT,
- start_mark: start_mark,
- end_mark: end_mark,
- anchor: anchor,
- tag: tag,
- implicit: implicit,
- style: yaml_style_t(yaml_FLOW_MAPPING_STYLE),
- }
- return true
- }
- if block && token.typ == yaml_BLOCK_SEQUENCE_START_TOKEN {
- end_mark = token.end_mark
- parser.state = yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE
- *event = yaml_event_t{
- typ: yaml_SEQUENCE_START_EVENT,
- start_mark: start_mark,
- end_mark: end_mark,
- anchor: anchor,
- tag: tag,
- implicit: implicit,
- style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE),
- }
- return true
- }
- if block && token.typ == yaml_BLOCK_MAPPING_START_TOKEN {
- end_mark = token.end_mark
- parser.state = yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE
- *event = yaml_event_t{
- typ: yaml_MAPPING_START_EVENT,
- start_mark: start_mark,
- end_mark: end_mark,
- anchor: anchor,
- tag: tag,
- implicit: implicit,
- style: yaml_style_t(yaml_BLOCK_MAPPING_STYLE),
- }
- return true
- }
- if len(anchor) > 0 || len(tag) > 0 {
- parser.state = parser.states[len(parser.states)-1]
- parser.states = parser.states[:len(parser.states)-1]
-
- *event = yaml_event_t{
- typ: yaml_SCALAR_EVENT,
- start_mark: start_mark,
- end_mark: end_mark,
- anchor: anchor,
- tag: tag,
- implicit: implicit,
- quoted_implicit: false,
- style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE),
- }
- return true
- }
-
- context := "while parsing a flow node"
- if block {
- context = "while parsing a block node"
- }
- yaml_parser_set_parser_error_context(parser, context, start_mark,
- "did not find expected node content", token.start_mark)
- return false
-}
-
-// Parse the productions:
-// block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
-// ******************** *********** * *********
-//
-func yaml_parser_parse_block_sequence_entry(parser *yaml_parser_t, event *yaml_event_t, first bool) bool {
- if first {
- token := peek_token(parser)
- parser.marks = append(parser.marks, token.start_mark)
- skip_token(parser)
- }
-
- token := peek_token(parser)
- if token == nil {
- return false
- }
-
- if token.typ == yaml_BLOCK_ENTRY_TOKEN {
- mark := token.end_mark
- skip_token(parser)
- token = peek_token(parser)
- if token == nil {
- return false
- }
- if token.typ != yaml_BLOCK_ENTRY_TOKEN && token.typ != yaml_BLOCK_END_TOKEN {
- parser.states = append(parser.states, yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE)
- return yaml_parser_parse_node(parser, event, true, false)
- } else {
- parser.state = yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE
- return yaml_parser_process_empty_scalar(parser, event, mark)
- }
- }
- if token.typ == yaml_BLOCK_END_TOKEN {
- parser.state = parser.states[len(parser.states)-1]
- parser.states = parser.states[:len(parser.states)-1]
- parser.marks = parser.marks[:len(parser.marks)-1]
-
- *event = yaml_event_t{
- typ: yaml_SEQUENCE_END_EVENT,
- start_mark: token.start_mark,
- end_mark: token.end_mark,
- }
-
- skip_token(parser)
- return true
- }
-
- context_mark := parser.marks[len(parser.marks)-1]
- parser.marks = parser.marks[:len(parser.marks)-1]
- return yaml_parser_set_parser_error_context(parser,
- "while parsing a block collection", context_mark,
- "did not find expected '-' indicator", token.start_mark)
-}
-
-// Parse the productions:
-// indentless_sequence ::= (BLOCK-ENTRY block_node?)+
-// *********** *
-func yaml_parser_parse_indentless_sequence_entry(parser *yaml_parser_t, event *yaml_event_t) bool {
- token := peek_token(parser)
- if token == nil {
- return false
- }
-
- if token.typ == yaml_BLOCK_ENTRY_TOKEN {
- mark := token.end_mark
- skip_token(parser)
- token = peek_token(parser)
- if token == nil {
- return false
- }
- if token.typ != yaml_BLOCK_ENTRY_TOKEN &&
- token.typ != yaml_KEY_TOKEN &&
- token.typ != yaml_VALUE_TOKEN &&
- token.typ != yaml_BLOCK_END_TOKEN {
- parser.states = append(parser.states, yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE)
- return yaml_parser_parse_node(parser, event, true, false)
- }
- parser.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE
- return yaml_parser_process_empty_scalar(parser, event, mark)
- }
- parser.state = parser.states[len(parser.states)-1]
- parser.states = parser.states[:len(parser.states)-1]
-
- *event = yaml_event_t{
- typ: yaml_SEQUENCE_END_EVENT,
- start_mark: token.start_mark,
- end_mark: token.start_mark, // [Go] Shouldn't this be token.end_mark?
- }
- return true
-}
-
-// Parse the productions:
-// block_mapping ::= BLOCK-MAPPING_START
-// *******************
-// ((KEY block_node_or_indentless_sequence?)?
-// *** *
-// (VALUE block_node_or_indentless_sequence?)?)*
-//
-// BLOCK-END
-// *********
-//
-func yaml_parser_parse_block_mapping_key(parser *yaml_parser_t, event *yaml_event_t, first bool) bool {
- if first {
- token := peek_token(parser)
- parser.marks = append(parser.marks, token.start_mark)
- skip_token(parser)
- }
-
- token := peek_token(parser)
- if token == nil {
- return false
- }
-
- if token.typ == yaml_KEY_TOKEN {
- mark := token.end_mark
- skip_token(parser)
- token = peek_token(parser)
- if token == nil {
- return false
- }
- if token.typ != yaml_KEY_TOKEN &&
- token.typ != yaml_VALUE_TOKEN &&
- token.typ != yaml_BLOCK_END_TOKEN {
- parser.states = append(parser.states, yaml_PARSE_BLOCK_MAPPING_VALUE_STATE)
- return yaml_parser_parse_node(parser, event, true, true)
- } else {
- parser.state = yaml_PARSE_BLOCK_MAPPING_VALUE_STATE
- return yaml_parser_process_empty_scalar(parser, event, mark)
- }
- } else if token.typ == yaml_BLOCK_END_TOKEN {
- parser.state = parser.states[len(parser.states)-1]
- parser.states = parser.states[:len(parser.states)-1]
- parser.marks = parser.marks[:len(parser.marks)-1]
- *event = yaml_event_t{
- typ: yaml_MAPPING_END_EVENT,
- start_mark: token.start_mark,
- end_mark: token.end_mark,
- }
- skip_token(parser)
- return true
- }
-
- context_mark := parser.marks[len(parser.marks)-1]
- parser.marks = parser.marks[:len(parser.marks)-1]
- return yaml_parser_set_parser_error_context(parser,
- "while parsing a block mapping", context_mark,
- "did not find expected key", token.start_mark)
-}
-
-// Parse the productions:
-// block_mapping ::= BLOCK-MAPPING_START
-//
-// ((KEY block_node_or_indentless_sequence?)?
-//
-// (VALUE block_node_or_indentless_sequence?)?)*
-// ***** *
-// BLOCK-END
-//
-//
-func yaml_parser_parse_block_mapping_value(parser *yaml_parser_t, event *yaml_event_t) bool {
- token := peek_token(parser)
- if token == nil {
- return false
- }
- if token.typ == yaml_VALUE_TOKEN {
- mark := token.end_mark
- skip_token(parser)
- token = peek_token(parser)
- if token == nil {
- return false
- }
- if token.typ != yaml_KEY_TOKEN &&
- token.typ != yaml_VALUE_TOKEN &&
- token.typ != yaml_BLOCK_END_TOKEN {
- parser.states = append(parser.states, yaml_PARSE_BLOCK_MAPPING_KEY_STATE)
- return yaml_parser_parse_node(parser, event, true, true)
- }
- parser.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE
- return yaml_parser_process_empty_scalar(parser, event, mark)
- }
- parser.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE
- return yaml_parser_process_empty_scalar(parser, event, token.start_mark)
-}
-
-// Parse the productions:
-// flow_sequence ::= FLOW-SEQUENCE-START
-// *******************
-// (flow_sequence_entry FLOW-ENTRY)*
-// * **********
-// flow_sequence_entry?
-// *
-// FLOW-SEQUENCE-END
-// *****************
-// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
-// *
-//
-func yaml_parser_parse_flow_sequence_entry(parser *yaml_parser_t, event *yaml_event_t, first bool) bool {
- if first {
- token := peek_token(parser)
- parser.marks = append(parser.marks, token.start_mark)
- skip_token(parser)
- }
- token := peek_token(parser)
- if token == nil {
- return false
- }
- if token.typ != yaml_FLOW_SEQUENCE_END_TOKEN {
- if !first {
- if token.typ == yaml_FLOW_ENTRY_TOKEN {
- skip_token(parser)
- token = peek_token(parser)
- if token == nil {
- return false
- }
- } else {
- context_mark := parser.marks[len(parser.marks)-1]
- parser.marks = parser.marks[:len(parser.marks)-1]
- return yaml_parser_set_parser_error_context(parser,
- "while parsing a flow sequence", context_mark,
- "did not find expected ',' or ']'", token.start_mark)
- }
- }
-
- if token.typ == yaml_KEY_TOKEN {
- parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE
- *event = yaml_event_t{
- typ: yaml_MAPPING_START_EVENT,
- start_mark: token.start_mark,
- end_mark: token.end_mark,
- implicit: true,
- style: yaml_style_t(yaml_FLOW_MAPPING_STYLE),
- }
- skip_token(parser)
- return true
- } else if token.typ != yaml_FLOW_SEQUENCE_END_TOKEN {
- parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE)
- return yaml_parser_parse_node(parser, event, false, false)
- }
- }
-
- parser.state = parser.states[len(parser.states)-1]
- parser.states = parser.states[:len(parser.states)-1]
- parser.marks = parser.marks[:len(parser.marks)-1]
-
- *event = yaml_event_t{
- typ: yaml_SEQUENCE_END_EVENT,
- start_mark: token.start_mark,
- end_mark: token.end_mark,
- }
-
- skip_token(parser)
- return true
-}
-
-//
-// Parse the productions:
-// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
-// *** *
-//
-func yaml_parser_parse_flow_sequence_entry_mapping_key(parser *yaml_parser_t, event *yaml_event_t) bool {
- token := peek_token(parser)
- if token == nil {
- return false
- }
- if token.typ != yaml_VALUE_TOKEN &&
- token.typ != yaml_FLOW_ENTRY_TOKEN &&
- token.typ != yaml_FLOW_SEQUENCE_END_TOKEN {
- parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE)
- return yaml_parser_parse_node(parser, event, false, false)
- }
- mark := token.end_mark
- skip_token(parser)
- parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE
- return yaml_parser_process_empty_scalar(parser, event, mark)
-}
-
-// Parse the productions:
-// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
-// ***** *
-//
-func yaml_parser_parse_flow_sequence_entry_mapping_value(parser *yaml_parser_t, event *yaml_event_t) bool {
- token := peek_token(parser)
- if token == nil {
- return false
- }
- if token.typ == yaml_VALUE_TOKEN {
- skip_token(parser)
- token := peek_token(parser)
- if token == nil {
- return false
- }
- if token.typ != yaml_FLOW_ENTRY_TOKEN && token.typ != yaml_FLOW_SEQUENCE_END_TOKEN {
- parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE)
- return yaml_parser_parse_node(parser, event, false, false)
- }
- }
- parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE
- return yaml_parser_process_empty_scalar(parser, event, token.start_mark)
-}
-
-// Parse the productions:
-// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
-// *
-//
-func yaml_parser_parse_flow_sequence_entry_mapping_end(parser *yaml_parser_t, event *yaml_event_t) bool {
- token := peek_token(parser)
- if token == nil {
- return false
- }
- parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE
- *event = yaml_event_t{
- typ: yaml_MAPPING_END_EVENT,
- start_mark: token.start_mark,
- end_mark: token.start_mark, // [Go] Shouldn't this be end_mark?
- }
- return true
-}
-
-// Parse the productions:
-// flow_mapping ::= FLOW-MAPPING-START
-// ******************
-// (flow_mapping_entry FLOW-ENTRY)*
-// * **********
-// flow_mapping_entry?
-// ******************
-// FLOW-MAPPING-END
-// ****************
-// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
-// * *** *
-//
-func yaml_parser_parse_flow_mapping_key(parser *yaml_parser_t, event *yaml_event_t, first bool) bool {
- if first {
- token := peek_token(parser)
- parser.marks = append(parser.marks, token.start_mark)
- skip_token(parser)
- }
-
- token := peek_token(parser)
- if token == nil {
- return false
- }
-
- if token.typ != yaml_FLOW_MAPPING_END_TOKEN {
- if !first {
- if token.typ == yaml_FLOW_ENTRY_TOKEN {
- skip_token(parser)
- token = peek_token(parser)
- if token == nil {
- return false
- }
- } else {
- context_mark := parser.marks[len(parser.marks)-1]
- parser.marks = parser.marks[:len(parser.marks)-1]
- return yaml_parser_set_parser_error_context(parser,
- "while parsing a flow mapping", context_mark,
- "did not find expected ',' or '}'", token.start_mark)
- }
- }
-
- if token.typ == yaml_KEY_TOKEN {
- skip_token(parser)
- token = peek_token(parser)
- if token == nil {
- return false
- }
- if token.typ != yaml_VALUE_TOKEN &&
- token.typ != yaml_FLOW_ENTRY_TOKEN &&
- token.typ != yaml_FLOW_MAPPING_END_TOKEN {
- parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_VALUE_STATE)
- return yaml_parser_parse_node(parser, event, false, false)
- } else {
- parser.state = yaml_PARSE_FLOW_MAPPING_VALUE_STATE
- return yaml_parser_process_empty_scalar(parser, event, token.start_mark)
- }
- } else if token.typ != yaml_FLOW_MAPPING_END_TOKEN {
- parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE)
- return yaml_parser_parse_node(parser, event, false, false)
- }
- }
-
- parser.state = parser.states[len(parser.states)-1]
- parser.states = parser.states[:len(parser.states)-1]
- parser.marks = parser.marks[:len(parser.marks)-1]
- *event = yaml_event_t{
- typ: yaml_MAPPING_END_EVENT,
- start_mark: token.start_mark,
- end_mark: token.end_mark,
- }
- skip_token(parser)
- return true
-}
-
-// Parse the productions:
-// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
-// * ***** *
-//
-func yaml_parser_parse_flow_mapping_value(parser *yaml_parser_t, event *yaml_event_t, empty bool) bool {
- token := peek_token(parser)
- if token == nil {
- return false
- }
- if empty {
- parser.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE
- return yaml_parser_process_empty_scalar(parser, event, token.start_mark)
- }
- if token.typ == yaml_VALUE_TOKEN {
- skip_token(parser)
- token = peek_token(parser)
- if token == nil {
- return false
- }
- if token.typ != yaml_FLOW_ENTRY_TOKEN && token.typ != yaml_FLOW_MAPPING_END_TOKEN {
- parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_KEY_STATE)
- return yaml_parser_parse_node(parser, event, false, false)
- }
- }
- parser.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE
- return yaml_parser_process_empty_scalar(parser, event, token.start_mark)
-}
-
-// Generate an empty scalar event.
-func yaml_parser_process_empty_scalar(parser *yaml_parser_t, event *yaml_event_t, mark yaml_mark_t) bool {
- *event = yaml_event_t{
- typ: yaml_SCALAR_EVENT,
- start_mark: mark,
- end_mark: mark,
- value: nil, // Empty
- implicit: true,
- style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE),
- }
- return true
-}
-
-var default_tag_directives = []yaml_tag_directive_t{
- {[]byte("!"), []byte("!")},
- {[]byte("!!"), []byte("tag:yaml.org,2002:")},
-}
-
-// Parse directives.
-func yaml_parser_process_directives(parser *yaml_parser_t,
- version_directive_ref **yaml_version_directive_t,
- tag_directives_ref *[]yaml_tag_directive_t) bool {
-
- var version_directive *yaml_version_directive_t
- var tag_directives []yaml_tag_directive_t
-
- token := peek_token(parser)
- if token == nil {
- return false
- }
-
- for token.typ == yaml_VERSION_DIRECTIVE_TOKEN || token.typ == yaml_TAG_DIRECTIVE_TOKEN {
- if token.typ == yaml_VERSION_DIRECTIVE_TOKEN {
- if version_directive != nil {
- yaml_parser_set_parser_error(parser,
- "found duplicate %YAML directive", token.start_mark)
- return false
- }
- if token.major != 1 || token.minor != 1 {
- yaml_parser_set_parser_error(parser,
- "found incompatible YAML document", token.start_mark)
- return false
- }
- version_directive = &yaml_version_directive_t{
- major: token.major,
- minor: token.minor,
- }
- } else if token.typ == yaml_TAG_DIRECTIVE_TOKEN {
- value := yaml_tag_directive_t{
- handle: token.value,
- prefix: token.prefix,
- }
- if !yaml_parser_append_tag_directive(parser, value, false, token.start_mark) {
- return false
- }
- tag_directives = append(tag_directives, value)
- }
-
- skip_token(parser)
- token = peek_token(parser)
- if token == nil {
- return false
- }
- }
-
- for i := range default_tag_directives {
- if !yaml_parser_append_tag_directive(parser, default_tag_directives[i], true, token.start_mark) {
- return false
- }
- }
-
- if version_directive_ref != nil {
- *version_directive_ref = version_directive
- }
- if tag_directives_ref != nil {
- *tag_directives_ref = tag_directives
- }
- return true
-}
-
-// Append a tag directive to the directives stack.
-func yaml_parser_append_tag_directive(parser *yaml_parser_t, value yaml_tag_directive_t, allow_duplicates bool, mark yaml_mark_t) bool {
- for i := range parser.tag_directives {
- if bytes.Equal(value.handle, parser.tag_directives[i].handle) {
- if allow_duplicates {
- return true
- }
- return yaml_parser_set_parser_error(parser, "found duplicate %TAG directive", mark)
- }
- }
-
- // [Go] I suspect the copy is unnecessary. This was likely done
- // because there was no way to track ownership of the data.
- value_copy := yaml_tag_directive_t{
- handle: make([]byte, len(value.handle)),
- prefix: make([]byte, len(value.prefix)),
- }
- copy(value_copy.handle, value.handle)
- copy(value_copy.prefix, value.prefix)
- parser.tag_directives = append(parser.tag_directives, value_copy)
- return true
-}
diff --git a/vendor/github.com/coreos/yaml/readerc.go b/vendor/github.com/coreos/yaml/readerc.go
deleted file mode 100644
index d5fb09727..000000000
--- a/vendor/github.com/coreos/yaml/readerc.go
+++ /dev/null
@@ -1,391 +0,0 @@
-package yaml
-
-import (
- "io"
-)
-
-// Set the reader error and return 0.
-func yaml_parser_set_reader_error(parser *yaml_parser_t, problem string, offset int, value int) bool {
- parser.error = yaml_READER_ERROR
- parser.problem = problem
- parser.problem_offset = offset
- parser.problem_value = value
- return false
-}
-
-// Byte order marks.
-const (
- bom_UTF8 = "\xef\xbb\xbf"
- bom_UTF16LE = "\xff\xfe"
- bom_UTF16BE = "\xfe\xff"
-)
-
-// Determine the input stream encoding by checking the BOM symbol. If no BOM is
-// found, the UTF-8 encoding is assumed. Return 1 on success, 0 on failure.
-func yaml_parser_determine_encoding(parser *yaml_parser_t) bool {
- // Ensure that we had enough bytes in the raw buffer.
- for !parser.eof && len(parser.raw_buffer)-parser.raw_buffer_pos < 3 {
- if !yaml_parser_update_raw_buffer(parser) {
- return false
- }
- }
-
- // Determine the encoding.
- buf := parser.raw_buffer
- pos := parser.raw_buffer_pos
- avail := len(buf) - pos
- if avail >= 2 && buf[pos] == bom_UTF16LE[0] && buf[pos+1] == bom_UTF16LE[1] {
- parser.encoding = yaml_UTF16LE_ENCODING
- parser.raw_buffer_pos += 2
- parser.offset += 2
- } else if avail >= 2 && buf[pos] == bom_UTF16BE[0] && buf[pos+1] == bom_UTF16BE[1] {
- parser.encoding = yaml_UTF16BE_ENCODING
- parser.raw_buffer_pos += 2
- parser.offset += 2
- } else if avail >= 3 && buf[pos] == bom_UTF8[0] && buf[pos+1] == bom_UTF8[1] && buf[pos+2] == bom_UTF8[2] {
- parser.encoding = yaml_UTF8_ENCODING
- parser.raw_buffer_pos += 3
- parser.offset += 3
- } else {
- parser.encoding = yaml_UTF8_ENCODING
- }
- return true
-}
-
-// Update the raw buffer.
-func yaml_parser_update_raw_buffer(parser *yaml_parser_t) bool {
- size_read := 0
-
- // Return if the raw buffer is full.
- if parser.raw_buffer_pos == 0 && len(parser.raw_buffer) == cap(parser.raw_buffer) {
- return true
- }
-
- // Return on EOF.
- if parser.eof {
- return true
- }
-
- // Move the remaining bytes in the raw buffer to the beginning.
- if parser.raw_buffer_pos > 0 && parser.raw_buffer_pos < len(parser.raw_buffer) {
- copy(parser.raw_buffer, parser.raw_buffer[parser.raw_buffer_pos:])
- }
- parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)-parser.raw_buffer_pos]
- parser.raw_buffer_pos = 0
-
- // Call the read handler to fill the buffer.
- size_read, err := parser.read_handler(parser, parser.raw_buffer[len(parser.raw_buffer):cap(parser.raw_buffer)])
- parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)+size_read]
- if err == io.EOF {
- parser.eof = true
- } else if err != nil {
- return yaml_parser_set_reader_error(parser, "input error: "+err.Error(), parser.offset, -1)
- }
- return true
-}
-
-// Ensure that the buffer contains at least `length` characters.
-// Return true on success, false on failure.
-//
-// The length is supposed to be significantly less that the buffer size.
-func yaml_parser_update_buffer(parser *yaml_parser_t, length int) bool {
- if parser.read_handler == nil {
- panic("read handler must be set")
- }
-
- // If the EOF flag is set and the raw buffer is empty, do nothing.
- if parser.eof && parser.raw_buffer_pos == len(parser.raw_buffer) {
- return true
- }
-
- // Return if the buffer contains enough characters.
- if parser.unread >= length {
- return true
- }
-
- // Determine the input encoding if it is not known yet.
- if parser.encoding == yaml_ANY_ENCODING {
- if !yaml_parser_determine_encoding(parser) {
- return false
- }
- }
-
- // Move the unread characters to the beginning of the buffer.
- buffer_len := len(parser.buffer)
- if parser.buffer_pos > 0 && parser.buffer_pos < buffer_len {
- copy(parser.buffer, parser.buffer[parser.buffer_pos:])
- buffer_len -= parser.buffer_pos
- parser.buffer_pos = 0
- } else if parser.buffer_pos == buffer_len {
- buffer_len = 0
- parser.buffer_pos = 0
- }
-
- // Open the whole buffer for writing, and cut it before returning.
- parser.buffer = parser.buffer[:cap(parser.buffer)]
-
- // Fill the buffer until it has enough characters.
- first := true
- for parser.unread < length {
-
- // Fill the raw buffer if necessary.
- if !first || parser.raw_buffer_pos == len(parser.raw_buffer) {
- if !yaml_parser_update_raw_buffer(parser) {
- parser.buffer = parser.buffer[:buffer_len]
- return false
- }
- }
- first = false
-
- // Decode the raw buffer.
- inner:
- for parser.raw_buffer_pos != len(parser.raw_buffer) {
- var value rune
- var width int
-
- raw_unread := len(parser.raw_buffer) - parser.raw_buffer_pos
-
- // Decode the next character.
- switch parser.encoding {
- case yaml_UTF8_ENCODING:
- // Decode a UTF-8 character. Check RFC 3629
- // (http://www.ietf.org/rfc/rfc3629.txt) for more details.
- //
- // The following table (taken from the RFC) is used for
- // decoding.
- //
- // Char. number range | UTF-8 octet sequence
- // (hexadecimal) | (binary)
- // --------------------+------------------------------------
- // 0000 0000-0000 007F | 0xxxxxxx
- // 0000 0080-0000 07FF | 110xxxxx 10xxxxxx
- // 0000 0800-0000 FFFF | 1110xxxx 10xxxxxx 10xxxxxx
- // 0001 0000-0010 FFFF | 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx
- //
- // Additionally, the characters in the range 0xD800-0xDFFF
- // are prohibited as they are reserved for use with UTF-16
- // surrogate pairs.
-
- // Determine the length of the UTF-8 sequence.
- octet := parser.raw_buffer[parser.raw_buffer_pos]
- switch {
- case octet&0x80 == 0x00:
- width = 1
- case octet&0xE0 == 0xC0:
- width = 2
- case octet&0xF0 == 0xE0:
- width = 3
- case octet&0xF8 == 0xF0:
- width = 4
- default:
- // The leading octet is invalid.
- return yaml_parser_set_reader_error(parser,
- "invalid leading UTF-8 octet",
- parser.offset, int(octet))
- }
-
- // Check if the raw buffer contains an incomplete character.
- if width > raw_unread {
- if parser.eof {
- return yaml_parser_set_reader_error(parser,
- "incomplete UTF-8 octet sequence",
- parser.offset, -1)
- }
- break inner
- }
-
- // Decode the leading octet.
- switch {
- case octet&0x80 == 0x00:
- value = rune(octet & 0x7F)
- case octet&0xE0 == 0xC0:
- value = rune(octet & 0x1F)
- case octet&0xF0 == 0xE0:
- value = rune(octet & 0x0F)
- case octet&0xF8 == 0xF0:
- value = rune(octet & 0x07)
- default:
- value = 0
- }
-
- // Check and decode the trailing octets.
- for k := 1; k < width; k++ {
- octet = parser.raw_buffer[parser.raw_buffer_pos+k]
-
- // Check if the octet is valid.
- if (octet & 0xC0) != 0x80 {
- return yaml_parser_set_reader_error(parser,
- "invalid trailing UTF-8 octet",
- parser.offset+k, int(octet))
- }
-
- // Decode the octet.
- value = (value << 6) + rune(octet&0x3F)
- }
-
- // Check the length of the sequence against the value.
- switch {
- case width == 1:
- case width == 2 && value >= 0x80:
- case width == 3 && value >= 0x800:
- case width == 4 && value >= 0x10000:
- default:
- return yaml_parser_set_reader_error(parser,
- "invalid length of a UTF-8 sequence",
- parser.offset, -1)
- }
-
- // Check the range of the value.
- if value >= 0xD800 && value <= 0xDFFF || value > 0x10FFFF {
- return yaml_parser_set_reader_error(parser,
- "invalid Unicode character",
- parser.offset, int(value))
- }
-
- case yaml_UTF16LE_ENCODING, yaml_UTF16BE_ENCODING:
- var low, high int
- if parser.encoding == yaml_UTF16LE_ENCODING {
- low, high = 0, 1
- } else {
- high, low = 1, 0
- }
-
- // The UTF-16 encoding is not as simple as one might
- // naively think. Check RFC 2781
- // (http://www.ietf.org/rfc/rfc2781.txt).
- //
- // Normally, two subsequent bytes describe a Unicode
- // character. However a special technique (called a
- // surrogate pair) is used for specifying character
- // values larger than 0xFFFF.
- //
- // A surrogate pair consists of two pseudo-characters:
- // high surrogate area (0xD800-0xDBFF)
- // low surrogate area (0xDC00-0xDFFF)
- //
- // The following formulas are used for decoding
- // and encoding characters using surrogate pairs:
- //
- // U = U' + 0x10000 (0x01 00 00 <= U <= 0x10 FF FF)
- // U' = yyyyyyyyyyxxxxxxxxxx (0 <= U' <= 0x0F FF FF)
- // W1 = 110110yyyyyyyyyy
- // W2 = 110111xxxxxxxxxx
- //
- // where U is the character value, W1 is the high surrogate
- // area, W2 is the low surrogate area.
-
- // Check for incomplete UTF-16 character.
- if raw_unread < 2 {
- if parser.eof {
- return yaml_parser_set_reader_error(parser,
- "incomplete UTF-16 character",
- parser.offset, -1)
- }
- break inner
- }
-
- // Get the character.
- value = rune(parser.raw_buffer[parser.raw_buffer_pos+low]) +
- (rune(parser.raw_buffer[parser.raw_buffer_pos+high]) << 8)
-
- // Check for unexpected low surrogate area.
- if value&0xFC00 == 0xDC00 {
- return yaml_parser_set_reader_error(parser,
- "unexpected low surrogate area",
- parser.offset, int(value))
- }
-
- // Check for a high surrogate area.
- if value&0xFC00 == 0xD800 {
- width = 4
-
- // Check for incomplete surrogate pair.
- if raw_unread < 4 {
- if parser.eof {
- return yaml_parser_set_reader_error(parser,
- "incomplete UTF-16 surrogate pair",
- parser.offset, -1)
- }
- break inner
- }
-
- // Get the next character.
- value2 := rune(parser.raw_buffer[parser.raw_buffer_pos+low+2]) +
- (rune(parser.raw_buffer[parser.raw_buffer_pos+high+2]) << 8)
-
- // Check for a low surrogate area.
- if value2&0xFC00 != 0xDC00 {
- return yaml_parser_set_reader_error(parser,
- "expected low surrogate area",
- parser.offset+2, int(value2))
- }
-
- // Generate the value of the surrogate pair.
- value = 0x10000 + ((value & 0x3FF) << 10) + (value2 & 0x3FF)
- } else {
- width = 2
- }
-
- default:
- panic("impossible")
- }
-
- // Check if the character is in the allowed range:
- // #x9 | #xA | #xD | [#x20-#x7E] (8 bit)
- // | #x85 | [#xA0-#xD7FF] | [#xE000-#xFFFD] (16 bit)
- // | [#x10000-#x10FFFF] (32 bit)
- switch {
- case value == 0x09:
- case value == 0x0A:
- case value == 0x0D:
- case value >= 0x20 && value <= 0x7E:
- case value == 0x85:
- case value >= 0xA0 && value <= 0xD7FF:
- case value >= 0xE000 && value <= 0xFFFD:
- case value >= 0x10000 && value <= 0x10FFFF:
- default:
- return yaml_parser_set_reader_error(parser,
- "control characters are not allowed",
- parser.offset, int(value))
- }
-
- // Move the raw pointers.
- parser.raw_buffer_pos += width
- parser.offset += width
-
- // Finally put the character into the buffer.
- if value <= 0x7F {
- // 0000 0000-0000 007F . 0xxxxxxx
- parser.buffer[buffer_len+0] = byte(value)
- } else if value <= 0x7FF {
- // 0000 0080-0000 07FF . 110xxxxx 10xxxxxx
- parser.buffer[buffer_len+0] = byte(0xC0 + (value >> 6))
- parser.buffer[buffer_len+1] = byte(0x80 + (value & 0x3F))
- } else if value <= 0xFFFF {
- // 0000 0800-0000 FFFF . 1110xxxx 10xxxxxx 10xxxxxx
- parser.buffer[buffer_len+0] = byte(0xE0 + (value >> 12))
- parser.buffer[buffer_len+1] = byte(0x80 + ((value >> 6) & 0x3F))
- parser.buffer[buffer_len+2] = byte(0x80 + (value & 0x3F))
- } else {
- // 0001 0000-0010 FFFF . 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx
- parser.buffer[buffer_len+0] = byte(0xF0 + (value >> 18))
- parser.buffer[buffer_len+1] = byte(0x80 + ((value >> 12) & 0x3F))
- parser.buffer[buffer_len+2] = byte(0x80 + ((value >> 6) & 0x3F))
- parser.buffer[buffer_len+3] = byte(0x80 + (value & 0x3F))
- }
- buffer_len += width
-
- parser.unread++
- }
-
- // On EOF, put NUL into the buffer and return.
- if parser.eof {
- parser.buffer[buffer_len] = 0
- buffer_len++
- parser.unread++
- break
- }
- }
- parser.buffer = parser.buffer[:buffer_len]
- return true
-}
diff --git a/vendor/github.com/coreos/yaml/resolve.go b/vendor/github.com/coreos/yaml/resolve.go
deleted file mode 100644
index 06c698a28..000000000
--- a/vendor/github.com/coreos/yaml/resolve.go
+++ /dev/null
@@ -1,190 +0,0 @@
-package yaml
-
-import (
- "encoding/base64"
- "fmt"
- "math"
- "strconv"
- "strings"
- "unicode/utf8"
-)
-
-// TODO: merge, timestamps, base 60 floats, omap.
-
-type resolveMapItem struct {
- value interface{}
- tag string
-}
-
-var resolveTable = make([]byte, 256)
-var resolveMap = make(map[string]resolveMapItem)
-
-func init() {
- t := resolveTable
- t[int('+')] = 'S' // Sign
- t[int('-')] = 'S'
- for _, c := range "0123456789" {
- t[int(c)] = 'D' // Digit
- }
- for _, c := range "yYnNtTfFoO~" {
- t[int(c)] = 'M' // In map
- }
- t[int('.')] = '.' // Float (potentially in map)
-
- var resolveMapList = []struct {
- v interface{}
- tag string
- l []string
- }{
- {true, yaml_BOOL_TAG, []string{"y", "Y", "yes", "Yes", "YES"}},
- {true, yaml_BOOL_TAG, []string{"true", "True", "TRUE"}},
- {true, yaml_BOOL_TAG, []string{"on", "On", "ON"}},
- {false, yaml_BOOL_TAG, []string{"n", "N", "no", "No", "NO"}},
- {false, yaml_BOOL_TAG, []string{"false", "False", "FALSE"}},
- {false, yaml_BOOL_TAG, []string{"off", "Off", "OFF"}},
- {nil, yaml_NULL_TAG, []string{"", "~", "null", "Null", "NULL"}},
- {math.NaN(), yaml_FLOAT_TAG, []string{".nan", ".NaN", ".NAN"}},
- {math.Inf(+1), yaml_FLOAT_TAG, []string{".inf", ".Inf", ".INF"}},
- {math.Inf(+1), yaml_FLOAT_TAG, []string{"+.inf", "+.Inf", "+.INF"}},
- {math.Inf(-1), yaml_FLOAT_TAG, []string{"-.inf", "-.Inf", "-.INF"}},
- {"<<", yaml_MERGE_TAG, []string{"<<"}},
- }
-
- m := resolveMap
- for _, item := range resolveMapList {
- for _, s := range item.l {
- m[s] = resolveMapItem{item.v, item.tag}
- }
- }
-}
-
-const longTagPrefix = "tag:yaml.org,2002:"
-
-func shortTag(tag string) string {
- // TODO This can easily be made faster and produce less garbage.
- if strings.HasPrefix(tag, longTagPrefix) {
- return "!!" + tag[len(longTagPrefix):]
- }
- return tag
-}
-
-func longTag(tag string) string {
- if strings.HasPrefix(tag, "!!") {
- return longTagPrefix + tag[2:]
- }
- return tag
-}
-
-func resolvableTag(tag string) bool {
- switch tag {
- case "", yaml_STR_TAG, yaml_BOOL_TAG, yaml_INT_TAG, yaml_FLOAT_TAG, yaml_NULL_TAG:
- return true
- }
- return false
-}
-
-func resolve(tag string, in string) (rtag string, out interface{}) {
- if !resolvableTag(tag) {
- return tag, in
- }
-
- defer func() {
- switch tag {
- case "", rtag, yaml_STR_TAG, yaml_BINARY_TAG:
- return
- }
- fail(fmt.Sprintf("cannot decode %s `%s` as a %s", shortTag(rtag), in, shortTag(tag)))
- }()
-
- // Any data is accepted as a !!str or !!binary.
- // Otherwise, the prefix is enough of a hint about what it might be.
- hint := byte('N')
- if in != "" {
- hint = resolveTable[in[0]]
- }
- if hint != 0 && tag != yaml_STR_TAG && tag != yaml_BINARY_TAG {
- // Handle things we can lookup in a map.
- if item, ok := resolveMap[in]; ok {
- return item.tag, item.value
- }
-
- // Base 60 floats are a bad idea, were dropped in YAML 1.2, and
- // are purposefully unsupported here. They're still quoted on
- // the way out for compatibility with other parser, though.
-
- switch hint {
- case 'M':
- // We've already checked the map above.
-
- case '.':
- // Not in the map, so maybe a normal float.
- floatv, err := strconv.ParseFloat(in, 64)
- if err == nil {
- return yaml_FLOAT_TAG, floatv
- }
-
- case 'D', 'S':
- // Int, float, or timestamp.
- plain := strings.Replace(in, "_", "", -1)
- intv, err := strconv.ParseInt(plain, 0, 64)
- if err == nil {
- if intv == int64(int(intv)) {
- return yaml_INT_TAG, int(intv)
- } else {
- return yaml_INT_TAG, intv
- }
- }
- floatv, err := strconv.ParseFloat(plain, 64)
- if err == nil {
- return yaml_FLOAT_TAG, floatv
- }
- if strings.HasPrefix(plain, "0b") {
- intv, err := strconv.ParseInt(plain[2:], 2, 64)
- if err == nil {
- return yaml_INT_TAG, int(intv)
- }
- } else if strings.HasPrefix(plain, "-0b") {
- intv, err := strconv.ParseInt(plain[3:], 2, 64)
- if err == nil {
- return yaml_INT_TAG, -int(intv)
- }
- }
- // XXX Handle timestamps here.
-
- default:
- panic("resolveTable item not yet handled: " + string(rune(hint)) + " (with " + in + ")")
- }
- }
- if tag == yaml_BINARY_TAG {
- return yaml_BINARY_TAG, in
- }
- if utf8.ValidString(in) {
- return yaml_STR_TAG, in
- }
- return yaml_BINARY_TAG, encodeBase64(in)
-}
-
-// encodeBase64 encodes s as base64 that is broken up into multiple lines
-// as appropriate for the resulting length.
-func encodeBase64(s string) string {
- const lineLen = 70
- encLen := base64.StdEncoding.EncodedLen(len(s))
- lines := encLen/lineLen + 1
- buf := make([]byte, encLen*2+lines)
- in := buf[0:encLen]
- out := buf[encLen:]
- base64.StdEncoding.Encode(in, []byte(s))
- k := 0
- for i := 0; i < len(in); i += lineLen {
- j := i + lineLen
- if j > len(in) {
- j = len(in)
- }
- k += copy(out[k:], in[i:j])
- if lines > 1 {
- out[k] = '\n'
- k++
- }
- }
- return string(out[:k])
-}
diff --git a/vendor/github.com/coreos/yaml/scannerc.go b/vendor/github.com/coreos/yaml/scannerc.go
deleted file mode 100644
index fe93b190c..000000000
--- a/vendor/github.com/coreos/yaml/scannerc.go
+++ /dev/null
@@ -1,2710 +0,0 @@
-package yaml
-
-import (
- "bytes"
- "fmt"
-)
-
-// Introduction
-// ************
-//
-// The following notes assume that you are familiar with the YAML specification
-// (http://yaml.org/spec/cvs/current.html). We mostly follow it, although in
-// some cases we are less restrictive that it requires.
-//
-// The process of transforming a YAML stream into a sequence of events is
-// divided on two steps: Scanning and Parsing.
-//
-// The Scanner transforms the input stream into a sequence of tokens, while the
-// parser transform the sequence of tokens produced by the Scanner into a
-// sequence of parsing events.
-//
-// The Scanner is rather clever and complicated. The Parser, on the contrary,
-// is a straightforward implementation of a recursive-descendant parser (or,
-// LL(1) parser, as it is usually called).
-//
-// Actually there are two issues of Scanning that might be called "clever", the
-// rest is quite straightforward. The issues are "block collection start" and
-// "simple keys". Both issues are explained below in details.
-//
-// Here the Scanning step is explained and implemented. We start with the list
-// of all the tokens produced by the Scanner together with short descriptions.
-//
-// Now, tokens:
-//
-// STREAM-START(encoding) # The stream start.
-// STREAM-END # The stream end.
-// VERSION-DIRECTIVE(major,minor) # The '%YAML' directive.
-// TAG-DIRECTIVE(handle,prefix) # The '%TAG' directive.
-// DOCUMENT-START # '---'
-// DOCUMENT-END # '...'
-// BLOCK-SEQUENCE-START # Indentation increase denoting a block
-// BLOCK-MAPPING-START # sequence or a block mapping.
-// BLOCK-END # Indentation decrease.
-// FLOW-SEQUENCE-START # '['
-// FLOW-SEQUENCE-END # ']'
-// BLOCK-SEQUENCE-START # '{'
-// BLOCK-SEQUENCE-END # '}'
-// BLOCK-ENTRY # '-'
-// FLOW-ENTRY # ','
-// KEY # '?' or nothing (simple keys).
-// VALUE # ':'
-// ALIAS(anchor) # '*anchor'
-// ANCHOR(anchor) # '&anchor'
-// TAG(handle,suffix) # '!handle!suffix'
-// SCALAR(value,style) # A scalar.
-//
-// The following two tokens are "virtual" tokens denoting the beginning and the
-// end of the stream:
-//
-// STREAM-START(encoding)
-// STREAM-END
-//
-// We pass the information about the input stream encoding with the
-// STREAM-START token.
-//
-// The next two tokens are responsible for tags:
-//
-// VERSION-DIRECTIVE(major,minor)
-// TAG-DIRECTIVE(handle,prefix)
-//
-// Example:
-//
-// %YAML 1.1
-// %TAG ! !foo
-// %TAG !yaml! tag:yaml.org,2002:
-// ---
-//
-// The correspoding sequence of tokens:
-//
-// STREAM-START(utf-8)
-// VERSION-DIRECTIVE(1,1)
-// TAG-DIRECTIVE("!","!foo")
-// TAG-DIRECTIVE("!yaml","tag:yaml.org,2002:")
-// DOCUMENT-START
-// STREAM-END
-//
-// Note that the VERSION-DIRECTIVE and TAG-DIRECTIVE tokens occupy a whole
-// line.
-//
-// The document start and end indicators are represented by:
-//
-// DOCUMENT-START
-// DOCUMENT-END
-//
-// Note that if a YAML stream contains an implicit document (without '---'
-// and '...' indicators), no DOCUMENT-START and DOCUMENT-END tokens will be
-// produced.
-//
-// In the following examples, we present whole documents together with the
-// produced tokens.
-//
-// 1. An implicit document:
-//
-// 'a scalar'
-//
-// Tokens:
-//
-// STREAM-START(utf-8)
-// SCALAR("a scalar",single-quoted)
-// STREAM-END
-//
-// 2. An explicit document:
-//
-// ---
-// 'a scalar'
-// ...
-//
-// Tokens:
-//
-// STREAM-START(utf-8)
-// DOCUMENT-START
-// SCALAR("a scalar",single-quoted)
-// DOCUMENT-END
-// STREAM-END
-//
-// 3. Several documents in a stream:
-//
-// 'a scalar'
-// ---
-// 'another scalar'
-// ---
-// 'yet another scalar'
-//
-// Tokens:
-//
-// STREAM-START(utf-8)
-// SCALAR("a scalar",single-quoted)
-// DOCUMENT-START
-// SCALAR("another scalar",single-quoted)
-// DOCUMENT-START
-// SCALAR("yet another scalar",single-quoted)
-// STREAM-END
-//
-// We have already introduced the SCALAR token above. The following tokens are
-// used to describe aliases, anchors, tag, and scalars:
-//
-// ALIAS(anchor)
-// ANCHOR(anchor)
-// TAG(handle,suffix)
-// SCALAR(value,style)
-//
-// The following series of examples illustrate the usage of these tokens:
-//
-// 1. A recursive sequence:
-//
-// &A [ *A ]
-//
-// Tokens:
-//
-// STREAM-START(utf-8)
-// ANCHOR("A")
-// FLOW-SEQUENCE-START
-// ALIAS("A")
-// FLOW-SEQUENCE-END
-// STREAM-END
-//
-// 2. A tagged scalar:
-//
-// !!float "3.14" # A good approximation.
-//
-// Tokens:
-//
-// STREAM-START(utf-8)
-// TAG("!!","float")
-// SCALAR("3.14",double-quoted)
-// STREAM-END
-//
-// 3. Various scalar styles:
-//
-// --- # Implicit empty plain scalars do not produce tokens.
-// --- a plain scalar
-// --- 'a single-quoted scalar'
-// --- "a double-quoted scalar"
-// --- |-
-// a literal scalar
-// --- >-
-// a folded
-// scalar
-//
-// Tokens:
-//
-// STREAM-START(utf-8)
-// DOCUMENT-START
-// DOCUMENT-START
-// SCALAR("a plain scalar",plain)
-// DOCUMENT-START
-// SCALAR("a single-quoted scalar",single-quoted)
-// DOCUMENT-START
-// SCALAR("a double-quoted scalar",double-quoted)
-// DOCUMENT-START
-// SCALAR("a literal scalar",literal)
-// DOCUMENT-START
-// SCALAR("a folded scalar",folded)
-// STREAM-END
-//
-// Now it's time to review collection-related tokens. We will start with
-// flow collections:
-//
-// FLOW-SEQUENCE-START
-// FLOW-SEQUENCE-END
-// FLOW-MAPPING-START
-// FLOW-MAPPING-END
-// FLOW-ENTRY
-// KEY
-// VALUE
-//
-// The tokens FLOW-SEQUENCE-START, FLOW-SEQUENCE-END, FLOW-MAPPING-START, and
-// FLOW-MAPPING-END represent the indicators '[', ']', '{', and '}'
-// correspondingly. FLOW-ENTRY represent the ',' indicator. Finally the
-// indicators '?' and ':', which are used for denoting mapping keys and values,
-// are represented by the KEY and VALUE tokens.
-//
-// The following examples show flow collections:
-//
-// 1. A flow sequence:
-//
-// [item 1, item 2, item 3]
-//
-// Tokens:
-//
-// STREAM-START(utf-8)
-// FLOW-SEQUENCE-START
-// SCALAR("item 1",plain)
-// FLOW-ENTRY
-// SCALAR("item 2",plain)
-// FLOW-ENTRY
-// SCALAR("item 3",plain)
-// FLOW-SEQUENCE-END
-// STREAM-END
-//
-// 2. A flow mapping:
-//
-// {
-// a simple key: a value, # Note that the KEY token is produced.
-// ? a complex key: another value,
-// }
-//
-// Tokens:
-//
-// STREAM-START(utf-8)
-// FLOW-MAPPING-START
-// KEY
-// SCALAR("a simple key",plain)
-// VALUE
-// SCALAR("a value",plain)
-// FLOW-ENTRY
-// KEY
-// SCALAR("a complex key",plain)
-// VALUE
-// SCALAR("another value",plain)
-// FLOW-ENTRY
-// FLOW-MAPPING-END
-// STREAM-END
-//
-// A simple key is a key which is not denoted by the '?' indicator. Note that
-// the Scanner still produce the KEY token whenever it encounters a simple key.
-//
-// For scanning block collections, the following tokens are used (note that we
-// repeat KEY and VALUE here):
-//
-// BLOCK-SEQUENCE-START
-// BLOCK-MAPPING-START
-// BLOCK-END
-// BLOCK-ENTRY
-// KEY
-// VALUE
-//
-// The tokens BLOCK-SEQUENCE-START and BLOCK-MAPPING-START denote indentation
-// increase that precedes a block collection (cf. the INDENT token in Python).
-// The token BLOCK-END denote indentation decrease that ends a block collection
-// (cf. the DEDENT token in Python). However YAML has some syntax pecularities
-// that makes detections of these tokens more complex.
-//
-// The tokens BLOCK-ENTRY, KEY, and VALUE are used to represent the indicators
-// '-', '?', and ':' correspondingly.
-//
-// The following examples show how the tokens BLOCK-SEQUENCE-START,
-// BLOCK-MAPPING-START, and BLOCK-END are emitted by the Scanner:
-//
-// 1. Block sequences:
-//
-// - item 1
-// - item 2
-// -
-// - item 3.1
-// - item 3.2
-// -
-// key 1: value 1
-// key 2: value 2
-//
-// Tokens:
-//
-// STREAM-START(utf-8)
-// BLOCK-SEQUENCE-START
-// BLOCK-ENTRY
-// SCALAR("item 1",plain)
-// BLOCK-ENTRY
-// SCALAR("item 2",plain)
-// BLOCK-ENTRY
-// BLOCK-SEQUENCE-START
-// BLOCK-ENTRY
-// SCALAR("item 3.1",plain)
-// BLOCK-ENTRY
-// SCALAR("item 3.2",plain)
-// BLOCK-END
-// BLOCK-ENTRY
-// BLOCK-MAPPING-START
-// KEY
-// SCALAR("key 1",plain)
-// VALUE
-// SCALAR("value 1",plain)
-// KEY
-// SCALAR("key 2",plain)
-// VALUE
-// SCALAR("value 2",plain)
-// BLOCK-END
-// BLOCK-END
-// STREAM-END
-//
-// 2. Block mappings:
-//
-// a simple key: a value # The KEY token is produced here.
-// ? a complex key
-// : another value
-// a mapping:
-// key 1: value 1
-// key 2: value 2
-// a sequence:
-// - item 1
-// - item 2
-//
-// Tokens:
-//
-// STREAM-START(utf-8)
-// BLOCK-MAPPING-START
-// KEY
-// SCALAR("a simple key",plain)
-// VALUE
-// SCALAR("a value",plain)
-// KEY
-// SCALAR("a complex key",plain)
-// VALUE
-// SCALAR("another value",plain)
-// KEY
-// SCALAR("a mapping",plain)
-// BLOCK-MAPPING-START
-// KEY
-// SCALAR("key 1",plain)
-// VALUE
-// SCALAR("value 1",plain)
-// KEY
-// SCALAR("key 2",plain)
-// VALUE
-// SCALAR("value 2",plain)
-// BLOCK-END
-// KEY
-// SCALAR("a sequence",plain)
-// VALUE
-// BLOCK-SEQUENCE-START
-// BLOCK-ENTRY
-// SCALAR("item 1",plain)
-// BLOCK-ENTRY
-// SCALAR("item 2",plain)
-// BLOCK-END
-// BLOCK-END
-// STREAM-END
-//
-// YAML does not always require to start a new block collection from a new
-// line. If the current line contains only '-', '?', and ':' indicators, a new
-// block collection may start at the current line. The following examples
-// illustrate this case:
-//
-// 1. Collections in a sequence:
-//
-// - - item 1
-// - item 2
-// - key 1: value 1
-// key 2: value 2
-// - ? complex key
-// : complex value
-//
-// Tokens:
-//
-// STREAM-START(utf-8)
-// BLOCK-SEQUENCE-START
-// BLOCK-ENTRY
-// BLOCK-SEQUENCE-START
-// BLOCK-ENTRY
-// SCALAR("item 1",plain)
-// BLOCK-ENTRY
-// SCALAR("item 2",plain)
-// BLOCK-END
-// BLOCK-ENTRY
-// BLOCK-MAPPING-START
-// KEY
-// SCALAR("key 1",plain)
-// VALUE
-// SCALAR("value 1",plain)
-// KEY
-// SCALAR("key 2",plain)
-// VALUE
-// SCALAR("value 2",plain)
-// BLOCK-END
-// BLOCK-ENTRY
-// BLOCK-MAPPING-START
-// KEY
-// SCALAR("complex key")
-// VALUE
-// SCALAR("complex value")
-// BLOCK-END
-// BLOCK-END
-// STREAM-END
-//
-// 2. Collections in a mapping:
-//
-// ? a sequence
-// : - item 1
-// - item 2
-// ? a mapping
-// : key 1: value 1
-// key 2: value 2
-//
-// Tokens:
-//
-// STREAM-START(utf-8)
-// BLOCK-MAPPING-START
-// KEY
-// SCALAR("a sequence",plain)
-// VALUE
-// BLOCK-SEQUENCE-START
-// BLOCK-ENTRY
-// SCALAR("item 1",plain)
-// BLOCK-ENTRY
-// SCALAR("item 2",plain)
-// BLOCK-END
-// KEY
-// SCALAR("a mapping",plain)
-// VALUE
-// BLOCK-MAPPING-START
-// KEY
-// SCALAR("key 1",plain)
-// VALUE
-// SCALAR("value 1",plain)
-// KEY
-// SCALAR("key 2",plain)
-// VALUE
-// SCALAR("value 2",plain)
-// BLOCK-END
-// BLOCK-END
-// STREAM-END
-//
-// YAML also permits non-indented sequences if they are included into a block
-// mapping. In this case, the token BLOCK-SEQUENCE-START is not produced:
-//
-// key:
-// - item 1 # BLOCK-SEQUENCE-START is NOT produced here.
-// - item 2
-//
-// Tokens:
-//
-// STREAM-START(utf-8)
-// BLOCK-MAPPING-START
-// KEY
-// SCALAR("key",plain)
-// VALUE
-// BLOCK-ENTRY
-// SCALAR("item 1",plain)
-// BLOCK-ENTRY
-// SCALAR("item 2",plain)
-// BLOCK-END
-//
-
-// Ensure that the buffer contains the required number of characters.
-// Return true on success, false on failure (reader error or memory error).
-func cache(parser *yaml_parser_t, length int) bool {
- // [Go] This was inlined: !cache(A, B) -> unread < B && !update(A, B)
- return parser.unread >= length || yaml_parser_update_buffer(parser, length)
-}
-
-// Advance the buffer pointer.
-func skip(parser *yaml_parser_t) {
- parser.mark.index++
- parser.mark.column++
- parser.unread--
- parser.buffer_pos += width(parser.buffer[parser.buffer_pos])
-}
-
-func skip_line(parser *yaml_parser_t) {
- if is_crlf(parser.buffer, parser.buffer_pos) {
- parser.mark.index += 2
- parser.mark.column = 0
- parser.mark.line++
- parser.unread -= 2
- parser.buffer_pos += 2
- } else if is_break(parser.buffer, parser.buffer_pos) {
- parser.mark.index++
- parser.mark.column = 0
- parser.mark.line++
- parser.unread--
- parser.buffer_pos += width(parser.buffer[parser.buffer_pos])
- }
-}
-
-// Copy a character to a string buffer and advance pointers.
-func read(parser *yaml_parser_t, s []byte) []byte {
- w := width(parser.buffer[parser.buffer_pos])
- if w == 0 {
- panic("invalid character sequence")
- }
- if len(s) == 0 {
- s = make([]byte, 0, 32)
- }
- if w == 1 && len(s)+w <= cap(s) {
- s = s[:len(s)+1]
- s[len(s)-1] = parser.buffer[parser.buffer_pos]
- parser.buffer_pos++
- } else {
- s = append(s, parser.buffer[parser.buffer_pos:parser.buffer_pos+w]...)
- parser.buffer_pos += w
- }
- parser.mark.index++
- parser.mark.column++
- parser.unread--
- return s
-}
-
-// Copy a line break character to a string buffer and advance pointers.
-func read_line(parser *yaml_parser_t, s []byte) []byte {
- buf := parser.buffer
- pos := parser.buffer_pos
- switch {
- case buf[pos] == '\r' && buf[pos+1] == '\n':
- // CR LF . LF
- s = append(s, '\n')
- parser.buffer_pos += 2
- parser.mark.index++
- parser.unread--
- case buf[pos] == '\r' || buf[pos] == '\n':
- // CR|LF . LF
- s = append(s, '\n')
- parser.buffer_pos += 1
- case buf[pos] == '\xC2' && buf[pos+1] == '\x85':
- // NEL . LF
- s = append(s, '\n')
- parser.buffer_pos += 2
- case buf[pos] == '\xE2' && buf[pos+1] == '\x80' && (buf[pos+2] == '\xA8' || buf[pos+2] == '\xA9'):
- // LS|PS . LS|PS
- s = append(s, buf[parser.buffer_pos:pos+3]...)
- parser.buffer_pos += 3
- default:
- return s
- }
- parser.mark.index++
- parser.mark.column = 0
- parser.mark.line++
- parser.unread--
- return s
-}
-
-// Get the next token.
-func yaml_parser_scan(parser *yaml_parser_t, token *yaml_token_t) bool {
- // Erase the token object.
- *token = yaml_token_t{} // [Go] Is this necessary?
-
- // No tokens after STREAM-END or error.
- if parser.stream_end_produced || parser.error != yaml_NO_ERROR {
- return true
- }
-
- // Ensure that the tokens queue contains enough tokens.
- if !parser.token_available {
- if !yaml_parser_fetch_more_tokens(parser) {
- return false
- }
- }
-
- // Fetch the next token from the queue.
- *token = parser.tokens[parser.tokens_head]
- parser.tokens_head++
- parser.tokens_parsed++
- parser.token_available = false
-
- if token.typ == yaml_STREAM_END_TOKEN {
- parser.stream_end_produced = true
- }
- return true
-}
-
-// Set the scanner error and return false.
-func yaml_parser_set_scanner_error(parser *yaml_parser_t, context string, context_mark yaml_mark_t, problem string) bool {
- parser.error = yaml_SCANNER_ERROR
- parser.context = context
- parser.context_mark = context_mark
- parser.problem = problem
- parser.problem_mark = parser.mark
- return false
-}
-
-func yaml_parser_set_scanner_tag_error(parser *yaml_parser_t, directive bool, context_mark yaml_mark_t, problem string) bool {
- context := "while parsing a tag"
- if directive {
- context = "while parsing a %TAG directive"
- }
- return yaml_parser_set_scanner_error(parser, context, context_mark, "did not find URI escaped octet")
-}
-
-func trace(args ...interface{}) func() {
- pargs := append([]interface{}{"+++"}, args...)
- fmt.Println(pargs...)
- pargs = append([]interface{}{"---"}, args...)
- return func() { fmt.Println(pargs...) }
-}
-
-// Ensure that the tokens queue contains at least one token which can be
-// returned to the Parser.
-func yaml_parser_fetch_more_tokens(parser *yaml_parser_t) bool {
- // While we need more tokens to fetch, do it.
- for {
- // Check if we really need to fetch more tokens.
- need_more_tokens := false
-
- if parser.tokens_head == len(parser.tokens) {
- // Queue is empty.
- need_more_tokens = true
- } else {
- // Check if any potential simple key may occupy the head position.
- if !yaml_parser_stale_simple_keys(parser) {
- return false
- }
-
- for i := range parser.simple_keys {
- simple_key := &parser.simple_keys[i]
- if simple_key.possible && simple_key.token_number == parser.tokens_parsed {
- need_more_tokens = true
- break
- }
- }
- }
-
- // We are finished.
- if !need_more_tokens {
- break
- }
- // Fetch the next token.
- if !yaml_parser_fetch_next_token(parser) {
- return false
- }
- }
-
- parser.token_available = true
- return true
-}
-
-// The dispatcher for token fetchers.
-func yaml_parser_fetch_next_token(parser *yaml_parser_t) bool {
- // Ensure that the buffer is initialized.
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
-
- // Check if we just started scanning. Fetch STREAM-START then.
- if !parser.stream_start_produced {
- return yaml_parser_fetch_stream_start(parser)
- }
-
- // Eat whitespaces and comments until we reach the next token.
- if !yaml_parser_scan_to_next_token(parser) {
- return false
- }
-
- // Remove obsolete potential simple keys.
- if !yaml_parser_stale_simple_keys(parser) {
- return false
- }
-
- // Check the indentation level against the current column.
- if !yaml_parser_unroll_indent(parser, parser.mark.column) {
- return false
- }
-
- // Ensure that the buffer contains at least 4 characters. 4 is the length
- // of the longest indicators ('--- ' and '... ').
- if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) {
- return false
- }
-
- // Is it the end of the stream?
- if is_z(parser.buffer, parser.buffer_pos) {
- return yaml_parser_fetch_stream_end(parser)
- }
-
- // Is it a directive?
- if parser.mark.column == 0 && parser.buffer[parser.buffer_pos] == '%' {
- return yaml_parser_fetch_directive(parser)
- }
-
- buf := parser.buffer
- pos := parser.buffer_pos
-
- // Is it the document start indicator?
- if parser.mark.column == 0 && buf[pos] == '-' && buf[pos+1] == '-' && buf[pos+2] == '-' && is_blankz(buf, pos+3) {
- return yaml_parser_fetch_document_indicator(parser, yaml_DOCUMENT_START_TOKEN)
- }
-
- // Is it the document end indicator?
- if parser.mark.column == 0 && buf[pos] == '.' && buf[pos+1] == '.' && buf[pos+2] == '.' && is_blankz(buf, pos+3) {
- return yaml_parser_fetch_document_indicator(parser, yaml_DOCUMENT_END_TOKEN)
- }
-
- // Is it the flow sequence start indicator?
- if buf[pos] == '[' {
- return yaml_parser_fetch_flow_collection_start(parser, yaml_FLOW_SEQUENCE_START_TOKEN)
- }
-
- // Is it the flow mapping start indicator?
- if parser.buffer[parser.buffer_pos] == '{' {
- return yaml_parser_fetch_flow_collection_start(parser, yaml_FLOW_MAPPING_START_TOKEN)
- }
-
- // Is it the flow sequence end indicator?
- if parser.buffer[parser.buffer_pos] == ']' {
- return yaml_parser_fetch_flow_collection_end(parser,
- yaml_FLOW_SEQUENCE_END_TOKEN)
- }
-
- // Is it the flow mapping end indicator?
- if parser.buffer[parser.buffer_pos] == '}' {
- return yaml_parser_fetch_flow_collection_end(parser,
- yaml_FLOW_MAPPING_END_TOKEN)
- }
-
- // Is it the flow entry indicator?
- if parser.buffer[parser.buffer_pos] == ',' {
- return yaml_parser_fetch_flow_entry(parser)
- }
-
- // Is it the block entry indicator?
- if parser.buffer[parser.buffer_pos] == '-' && is_blankz(parser.buffer, parser.buffer_pos+1) {
- return yaml_parser_fetch_block_entry(parser)
- }
-
- // Is it the key indicator?
- if parser.buffer[parser.buffer_pos] == '?' && (parser.flow_level > 0 || is_blankz(parser.buffer, parser.buffer_pos+1)) {
- return yaml_parser_fetch_key(parser)
- }
-
- // Is it the value indicator?
- if parser.buffer[parser.buffer_pos] == ':' && (parser.flow_level > 0 || is_blankz(parser.buffer, parser.buffer_pos+1)) {
- return yaml_parser_fetch_value(parser)
- }
-
- // Is it an alias?
- if parser.buffer[parser.buffer_pos] == '*' {
- return yaml_parser_fetch_anchor(parser, yaml_ALIAS_TOKEN)
- }
-
- // Is it an anchor?
- if parser.buffer[parser.buffer_pos] == '&' {
- return yaml_parser_fetch_anchor(parser, yaml_ANCHOR_TOKEN)
- }
-
- // Is it a tag?
- if parser.buffer[parser.buffer_pos] == '!' {
- return yaml_parser_fetch_tag(parser)
- }
-
- // Is it a literal scalar?
- if parser.buffer[parser.buffer_pos] == '|' && parser.flow_level == 0 {
- return yaml_parser_fetch_block_scalar(parser, true)
- }
-
- // Is it a folded scalar?
- if parser.buffer[parser.buffer_pos] == '>' && parser.flow_level == 0 {
- return yaml_parser_fetch_block_scalar(parser, false)
- }
-
- // Is it a single-quoted scalar?
- if parser.buffer[parser.buffer_pos] == '\'' {
- return yaml_parser_fetch_flow_scalar(parser, true)
- }
-
- // Is it a double-quoted scalar?
- if parser.buffer[parser.buffer_pos] == '"' {
- return yaml_parser_fetch_flow_scalar(parser, false)
- }
-
- // Is it a plain scalar?
- //
- // A plain scalar may start with any non-blank characters except
- //
- // '-', '?', ':', ',', '[', ']', '{', '}',
- // '#', '&', '*', '!', '|', '>', '\'', '\"',
- // '%', '@', '`'.
- //
- // In the block context (and, for the '-' indicator, in the flow context
- // too), it may also start with the characters
- //
- // '-', '?', ':'
- //
- // if it is followed by a non-space character.
- //
- // The last rule is more restrictive than the specification requires.
- // [Go] Make this logic more reasonable.
- //switch parser.buffer[parser.buffer_pos] {
- //case '-', '?', ':', ',', '?', '-', ',', ':', ']', '[', '}', '{', '&', '#', '!', '*', '>', '|', '"', '\'', '@', '%', '-', '`':
- //}
- if !(is_blankz(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == '-' ||
- parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == ':' ||
- parser.buffer[parser.buffer_pos] == ',' || parser.buffer[parser.buffer_pos] == '[' ||
- parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '{' ||
- parser.buffer[parser.buffer_pos] == '}' || parser.buffer[parser.buffer_pos] == '#' ||
- parser.buffer[parser.buffer_pos] == '&' || parser.buffer[parser.buffer_pos] == '*' ||
- parser.buffer[parser.buffer_pos] == '!' || parser.buffer[parser.buffer_pos] == '|' ||
- parser.buffer[parser.buffer_pos] == '>' || parser.buffer[parser.buffer_pos] == '\'' ||
- parser.buffer[parser.buffer_pos] == '"' || parser.buffer[parser.buffer_pos] == '%' ||
- parser.buffer[parser.buffer_pos] == '@' || parser.buffer[parser.buffer_pos] == '`') ||
- (parser.buffer[parser.buffer_pos] == '-' && !is_blank(parser.buffer, parser.buffer_pos+1)) ||
- (parser.flow_level == 0 &&
- (parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == ':') &&
- !is_blankz(parser.buffer, parser.buffer_pos+1)) {
- return yaml_parser_fetch_plain_scalar(parser)
- }
-
- // If we don't determine the token type so far, it is an error.
- return yaml_parser_set_scanner_error(parser,
- "while scanning for the next token", parser.mark,
- "found character that cannot start any token")
-}
-
-// Check the list of potential simple keys and remove the positions that
-// cannot contain simple keys anymore.
-func yaml_parser_stale_simple_keys(parser *yaml_parser_t) bool {
- // Check for a potential simple key for each flow level.
- for i := range parser.simple_keys {
- simple_key := &parser.simple_keys[i]
-
- // The specification requires that a simple key
- //
- // - is limited to a single line,
- // - is shorter than 1024 characters.
- if simple_key.possible && (simple_key.mark.line < parser.mark.line || simple_key.mark.index+1024 < parser.mark.index) {
-
- // Check if the potential simple key to be removed is required.
- if simple_key.required {
- return yaml_parser_set_scanner_error(parser,
- "while scanning a simple key", simple_key.mark,
- "could not find expected ':'")
- }
- simple_key.possible = false
- }
- }
- return true
-}
-
-// Check if a simple key may start at the current position and add it if
-// needed.
-func yaml_parser_save_simple_key(parser *yaml_parser_t) bool {
- // A simple key is required at the current position if the scanner is in
- // the block context and the current column coincides with the indentation
- // level.
-
- required := parser.flow_level == 0 && parser.indent == parser.mark.column
-
- // A simple key is required only when it is the first token in the current
- // line. Therefore it is always allowed. But we add a check anyway.
- if required && !parser.simple_key_allowed {
- panic("should not happen")
- }
-
- //
- // If the current position may start a simple key, save it.
- //
- if parser.simple_key_allowed {
- simple_key := yaml_simple_key_t{
- possible: true,
- required: required,
- token_number: parser.tokens_parsed + (len(parser.tokens) - parser.tokens_head),
- }
- simple_key.mark = parser.mark
-
- if !yaml_parser_remove_simple_key(parser) {
- return false
- }
- parser.simple_keys[len(parser.simple_keys)-1] = simple_key
- }
- return true
-}
-
-// Remove a potential simple key at the current flow level.
-func yaml_parser_remove_simple_key(parser *yaml_parser_t) bool {
- i := len(parser.simple_keys) - 1
- if parser.simple_keys[i].possible {
- // If the key is required, it is an error.
- if parser.simple_keys[i].required {
- return yaml_parser_set_scanner_error(parser,
- "while scanning a simple key", parser.simple_keys[i].mark,
- "could not find expected ':'")
- }
- }
- // Remove the key from the stack.
- parser.simple_keys[i].possible = false
- return true
-}
-
-// Increase the flow level and resize the simple key list if needed.
-func yaml_parser_increase_flow_level(parser *yaml_parser_t) bool {
- // Reset the simple key on the next level.
- parser.simple_keys = append(parser.simple_keys, yaml_simple_key_t{})
-
- // Increase the flow level.
- parser.flow_level++
- return true
-}
-
-// Decrease the flow level.
-func yaml_parser_decrease_flow_level(parser *yaml_parser_t) bool {
- if parser.flow_level > 0 {
- parser.flow_level--
- parser.simple_keys = parser.simple_keys[:len(parser.simple_keys)-1]
- }
- return true
-}
-
-// Push the current indentation level to the stack and set the new level
-// the current column is greater than the indentation level. In this case,
-// append or insert the specified token into the token queue.
-func yaml_parser_roll_indent(parser *yaml_parser_t, column, number int, typ yaml_token_type_t, mark yaml_mark_t) bool {
- // In the flow context, do nothing.
- if parser.flow_level > 0 {
- return true
- }
-
- if parser.indent < column {
- // Push the current indentation level to the stack and set the new
- // indentation level.
- parser.indents = append(parser.indents, parser.indent)
- parser.indent = column
-
- // Create a token and insert it into the queue.
- token := yaml_token_t{
- typ: typ,
- start_mark: mark,
- end_mark: mark,
- }
- if number > -1 {
- number -= parser.tokens_parsed
- }
- yaml_insert_token(parser, number, &token)
- }
- return true
-}
-
-// Pop indentation levels from the indents stack until the current level
-// becomes less or equal to the column. For each intendation level, append
-// the BLOCK-END token.
-func yaml_parser_unroll_indent(parser *yaml_parser_t, column int) bool {
- // In the flow context, do nothing.
- if parser.flow_level > 0 {
- return true
- }
-
- // Loop through the intendation levels in the stack.
- for parser.indent > column {
- // Create a token and append it to the queue.
- token := yaml_token_t{
- typ: yaml_BLOCK_END_TOKEN,
- start_mark: parser.mark,
- end_mark: parser.mark,
- }
- yaml_insert_token(parser, -1, &token)
-
- // Pop the indentation level.
- parser.indent = parser.indents[len(parser.indents)-1]
- parser.indents = parser.indents[:len(parser.indents)-1]
- }
- return true
-}
-
-// Initialize the scanner and produce the STREAM-START token.
-func yaml_parser_fetch_stream_start(parser *yaml_parser_t) bool {
-
- // Set the initial indentation.
- parser.indent = -1
-
- // Initialize the simple key stack.
- parser.simple_keys = append(parser.simple_keys, yaml_simple_key_t{})
-
- // A simple key is allowed at the beginning of the stream.
- parser.simple_key_allowed = true
-
- // We have started.
- parser.stream_start_produced = true
-
- // Create the STREAM-START token and append it to the queue.
- token := yaml_token_t{
- typ: yaml_STREAM_START_TOKEN,
- start_mark: parser.mark,
- end_mark: parser.mark,
- encoding: parser.encoding,
- }
- yaml_insert_token(parser, -1, &token)
- return true
-}
-
-// Produce the STREAM-END token and shut down the scanner.
-func yaml_parser_fetch_stream_end(parser *yaml_parser_t) bool {
-
- // Force new line.
- if parser.mark.column != 0 {
- parser.mark.column = 0
- parser.mark.line++
- }
-
- // Reset the indentation level.
- if !yaml_parser_unroll_indent(parser, -1) {
- return false
- }
-
- // Reset simple keys.
- if !yaml_parser_remove_simple_key(parser) {
- return false
- }
-
- parser.simple_key_allowed = false
-
- // Create the STREAM-END token and append it to the queue.
- token := yaml_token_t{
- typ: yaml_STREAM_END_TOKEN,
- start_mark: parser.mark,
- end_mark: parser.mark,
- }
- yaml_insert_token(parser, -1, &token)
- return true
-}
-
-// Produce a VERSION-DIRECTIVE or TAG-DIRECTIVE token.
-func yaml_parser_fetch_directive(parser *yaml_parser_t) bool {
- // Reset the indentation level.
- if !yaml_parser_unroll_indent(parser, -1) {
- return false
- }
-
- // Reset simple keys.
- if !yaml_parser_remove_simple_key(parser) {
- return false
- }
-
- parser.simple_key_allowed = false
-
- // Create the YAML-DIRECTIVE or TAG-DIRECTIVE token.
- token := yaml_token_t{}
- if !yaml_parser_scan_directive(parser, &token) {
- return false
- }
- // Append the token to the queue.
- yaml_insert_token(parser, -1, &token)
- return true
-}
-
-// Produce the DOCUMENT-START or DOCUMENT-END token.
-func yaml_parser_fetch_document_indicator(parser *yaml_parser_t, typ yaml_token_type_t) bool {
- // Reset the indentation level.
- if !yaml_parser_unroll_indent(parser, -1) {
- return false
- }
-
- // Reset simple keys.
- if !yaml_parser_remove_simple_key(parser) {
- return false
- }
-
- parser.simple_key_allowed = false
-
- // Consume the token.
- start_mark := parser.mark
-
- skip(parser)
- skip(parser)
- skip(parser)
-
- end_mark := parser.mark
-
- // Create the DOCUMENT-START or DOCUMENT-END token.
- token := yaml_token_t{
- typ: typ,
- start_mark: start_mark,
- end_mark: end_mark,
- }
- // Append the token to the queue.
- yaml_insert_token(parser, -1, &token)
- return true
-}
-
-// Produce the FLOW-SEQUENCE-START or FLOW-MAPPING-START token.
-func yaml_parser_fetch_flow_collection_start(parser *yaml_parser_t, typ yaml_token_type_t) bool {
- // The indicators '[' and '{' may start a simple key.
- if !yaml_parser_save_simple_key(parser) {
- return false
- }
-
- // Increase the flow level.
- if !yaml_parser_increase_flow_level(parser) {
- return false
- }
-
- // A simple key may follow the indicators '[' and '{'.
- parser.simple_key_allowed = true
-
- // Consume the token.
- start_mark := parser.mark
- skip(parser)
- end_mark := parser.mark
-
- // Create the FLOW-SEQUENCE-START of FLOW-MAPPING-START token.
- token := yaml_token_t{
- typ: typ,
- start_mark: start_mark,
- end_mark: end_mark,
- }
- // Append the token to the queue.
- yaml_insert_token(parser, -1, &token)
- return true
-}
-
-// Produce the FLOW-SEQUENCE-END or FLOW-MAPPING-END token.
-func yaml_parser_fetch_flow_collection_end(parser *yaml_parser_t, typ yaml_token_type_t) bool {
- // Reset any potential simple key on the current flow level.
- if !yaml_parser_remove_simple_key(parser) {
- return false
- }
-
- // Decrease the flow level.
- if !yaml_parser_decrease_flow_level(parser) {
- return false
- }
-
- // No simple keys after the indicators ']' and '}'.
- parser.simple_key_allowed = false
-
- // Consume the token.
-
- start_mark := parser.mark
- skip(parser)
- end_mark := parser.mark
-
- // Create the FLOW-SEQUENCE-END of FLOW-MAPPING-END token.
- token := yaml_token_t{
- typ: typ,
- start_mark: start_mark,
- end_mark: end_mark,
- }
- // Append the token to the queue.
- yaml_insert_token(parser, -1, &token)
- return true
-}
-
-// Produce the FLOW-ENTRY token.
-func yaml_parser_fetch_flow_entry(parser *yaml_parser_t) bool {
- // Reset any potential simple keys on the current flow level.
- if !yaml_parser_remove_simple_key(parser) {
- return false
- }
-
- // Simple keys are allowed after ','.
- parser.simple_key_allowed = true
-
- // Consume the token.
- start_mark := parser.mark
- skip(parser)
- end_mark := parser.mark
-
- // Create the FLOW-ENTRY token and append it to the queue.
- token := yaml_token_t{
- typ: yaml_FLOW_ENTRY_TOKEN,
- start_mark: start_mark,
- end_mark: end_mark,
- }
- yaml_insert_token(parser, -1, &token)
- return true
-}
-
-// Produce the BLOCK-ENTRY token.
-func yaml_parser_fetch_block_entry(parser *yaml_parser_t) bool {
- // Check if the scanner is in the block context.
- if parser.flow_level == 0 {
- // Check if we are allowed to start a new entry.
- if !parser.simple_key_allowed {
- return yaml_parser_set_scanner_error(parser, "", parser.mark,
- "block sequence entries are not allowed in this context")
- }
- // Add the BLOCK-SEQUENCE-START token if needed.
- if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_SEQUENCE_START_TOKEN, parser.mark) {
- return false
- }
- } else {
- // It is an error for the '-' indicator to occur in the flow context,
- // but we let the Parser detect and report about it because the Parser
- // is able to point to the context.
- }
-
- // Reset any potential simple keys on the current flow level.
- if !yaml_parser_remove_simple_key(parser) {
- return false
- }
-
- // Simple keys are allowed after '-'.
- parser.simple_key_allowed = true
-
- // Consume the token.
- start_mark := parser.mark
- skip(parser)
- end_mark := parser.mark
-
- // Create the BLOCK-ENTRY token and append it to the queue.
- token := yaml_token_t{
- typ: yaml_BLOCK_ENTRY_TOKEN,
- start_mark: start_mark,
- end_mark: end_mark,
- }
- yaml_insert_token(parser, -1, &token)
- return true
-}
-
-// Produce the KEY token.
-func yaml_parser_fetch_key(parser *yaml_parser_t) bool {
-
- // In the block context, additional checks are required.
- if parser.flow_level == 0 {
- // Check if we are allowed to start a new key (not nessesary simple).
- if !parser.simple_key_allowed {
- return yaml_parser_set_scanner_error(parser, "", parser.mark,
- "mapping keys are not allowed in this context")
- }
- // Add the BLOCK-MAPPING-START token if needed.
- if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_MAPPING_START_TOKEN, parser.mark) {
- return false
- }
- }
-
- // Reset any potential simple keys on the current flow level.
- if !yaml_parser_remove_simple_key(parser) {
- return false
- }
-
- // Simple keys are allowed after '?' in the block context.
- parser.simple_key_allowed = parser.flow_level == 0
-
- // Consume the token.
- start_mark := parser.mark
- skip(parser)
- end_mark := parser.mark
-
- // Create the KEY token and append it to the queue.
- token := yaml_token_t{
- typ: yaml_KEY_TOKEN,
- start_mark: start_mark,
- end_mark: end_mark,
- }
- yaml_insert_token(parser, -1, &token)
- return true
-}
-
-// Produce the VALUE token.
-func yaml_parser_fetch_value(parser *yaml_parser_t) bool {
-
- simple_key := &parser.simple_keys[len(parser.simple_keys)-1]
-
- // Have we found a simple key?
- if simple_key.possible {
- // Create the KEY token and insert it into the queue.
- token := yaml_token_t{
- typ: yaml_KEY_TOKEN,
- start_mark: simple_key.mark,
- end_mark: simple_key.mark,
- }
- yaml_insert_token(parser, simple_key.token_number-parser.tokens_parsed, &token)
-
- // In the block context, we may need to add the BLOCK-MAPPING-START token.
- if !yaml_parser_roll_indent(parser, simple_key.mark.column,
- simple_key.token_number,
- yaml_BLOCK_MAPPING_START_TOKEN, simple_key.mark) {
- return false
- }
-
- // Remove the simple key.
- simple_key.possible = false
-
- // A simple key cannot follow another simple key.
- parser.simple_key_allowed = false
-
- } else {
- // The ':' indicator follows a complex key.
-
- // In the block context, extra checks are required.
- if parser.flow_level == 0 {
-
- // Check if we are allowed to start a complex value.
- if !parser.simple_key_allowed {
- return yaml_parser_set_scanner_error(parser, "", parser.mark,
- "mapping values are not allowed in this context")
- }
-
- // Add the BLOCK-MAPPING-START token if needed.
- if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_MAPPING_START_TOKEN, parser.mark) {
- return false
- }
- }
-
- // Simple keys after ':' are allowed in the block context.
- parser.simple_key_allowed = parser.flow_level == 0
- }
-
- // Consume the token.
- start_mark := parser.mark
- skip(parser)
- end_mark := parser.mark
-
- // Create the VALUE token and append it to the queue.
- token := yaml_token_t{
- typ: yaml_VALUE_TOKEN,
- start_mark: start_mark,
- end_mark: end_mark,
- }
- yaml_insert_token(parser, -1, &token)
- return true
-}
-
-// Produce the ALIAS or ANCHOR token.
-func yaml_parser_fetch_anchor(parser *yaml_parser_t, typ yaml_token_type_t) bool {
- // An anchor or an alias could be a simple key.
- if !yaml_parser_save_simple_key(parser) {
- return false
- }
-
- // A simple key cannot follow an anchor or an alias.
- parser.simple_key_allowed = false
-
- // Create the ALIAS or ANCHOR token and append it to the queue.
- var token yaml_token_t
- if !yaml_parser_scan_anchor(parser, &token, typ) {
- return false
- }
- yaml_insert_token(parser, -1, &token)
- return true
-}
-
-// Produce the TAG token.
-func yaml_parser_fetch_tag(parser *yaml_parser_t) bool {
- // A tag could be a simple key.
- if !yaml_parser_save_simple_key(parser) {
- return false
- }
-
- // A simple key cannot follow a tag.
- parser.simple_key_allowed = false
-
- // Create the TAG token and append it to the queue.
- var token yaml_token_t
- if !yaml_parser_scan_tag(parser, &token) {
- return false
- }
- yaml_insert_token(parser, -1, &token)
- return true
-}
-
-// Produce the SCALAR(...,literal) or SCALAR(...,folded) tokens.
-func yaml_parser_fetch_block_scalar(parser *yaml_parser_t, literal bool) bool {
- // Remove any potential simple keys.
- if !yaml_parser_remove_simple_key(parser) {
- return false
- }
-
- // A simple key may follow a block scalar.
- parser.simple_key_allowed = true
-
- // Create the SCALAR token and append it to the queue.
- var token yaml_token_t
- if !yaml_parser_scan_block_scalar(parser, &token, literal) {
- return false
- }
- yaml_insert_token(parser, -1, &token)
- return true
-}
-
-// Produce the SCALAR(...,single-quoted) or SCALAR(...,double-quoted) tokens.
-func yaml_parser_fetch_flow_scalar(parser *yaml_parser_t, single bool) bool {
- // A plain scalar could be a simple key.
- if !yaml_parser_save_simple_key(parser) {
- return false
- }
-
- // A simple key cannot follow a flow scalar.
- parser.simple_key_allowed = false
-
- // Create the SCALAR token and append it to the queue.
- var token yaml_token_t
- if !yaml_parser_scan_flow_scalar(parser, &token, single) {
- return false
- }
- yaml_insert_token(parser, -1, &token)
- return true
-}
-
-// Produce the SCALAR(...,plain) token.
-func yaml_parser_fetch_plain_scalar(parser *yaml_parser_t) bool {
- // A plain scalar could be a simple key.
- if !yaml_parser_save_simple_key(parser) {
- return false
- }
-
- // A simple key cannot follow a flow scalar.
- parser.simple_key_allowed = false
-
- // Create the SCALAR token and append it to the queue.
- var token yaml_token_t
- if !yaml_parser_scan_plain_scalar(parser, &token) {
- return false
- }
- yaml_insert_token(parser, -1, &token)
- return true
-}
-
-// Eat whitespaces and comments until the next token is found.
-func yaml_parser_scan_to_next_token(parser *yaml_parser_t) bool {
-
- // Until the next token is not found.
- for {
- // Allow the BOM mark to start a line.
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
- if parser.mark.column == 0 && is_bom(parser.buffer, parser.buffer_pos) {
- skip(parser)
- }
-
- // Eat whitespaces.
- // Tabs are allowed:
- // - in the flow context
- // - in the block context, but not at the beginning of the line or
- // after '-', '?', or ':' (complex value).
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
-
- for parser.buffer[parser.buffer_pos] == ' ' || ((parser.flow_level > 0 || !parser.simple_key_allowed) && parser.buffer[parser.buffer_pos] == '\t') {
- skip(parser)
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
- }
-
- // Eat a comment until a line break.
- if parser.buffer[parser.buffer_pos] == '#' {
- for !is_breakz(parser.buffer, parser.buffer_pos) {
- skip(parser)
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
- }
- }
-
- // If it is a line break, eat it.
- if is_break(parser.buffer, parser.buffer_pos) {
- if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
- return false
- }
- skip_line(parser)
-
- // In the block context, a new line may start a simple key.
- if parser.flow_level == 0 {
- parser.simple_key_allowed = true
- }
- } else {
- break // We have found a token.
- }
- }
-
- return true
-}
-
-// Scan a YAML-DIRECTIVE or TAG-DIRECTIVE token.
-//
-// Scope:
-// %YAML 1.1 # a comment \n
-// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-// %TAG !yaml! tag:yaml.org,2002: \n
-// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-//
-func yaml_parser_scan_directive(parser *yaml_parser_t, token *yaml_token_t) bool {
- // Eat '%'.
- start_mark := parser.mark
- skip(parser)
-
- // Scan the directive name.
- var name []byte
- if !yaml_parser_scan_directive_name(parser, start_mark, &name) {
- return false
- }
-
- // Is it a YAML directive?
- if bytes.Equal(name, []byte("YAML")) {
- // Scan the VERSION directive value.
- var major, minor int8
- if !yaml_parser_scan_version_directive_value(parser, start_mark, &major, &minor) {
- return false
- }
- end_mark := parser.mark
-
- // Create a VERSION-DIRECTIVE token.
- *token = yaml_token_t{
- typ: yaml_VERSION_DIRECTIVE_TOKEN,
- start_mark: start_mark,
- end_mark: end_mark,
- major: major,
- minor: minor,
- }
-
- // Is it a TAG directive?
- } else if bytes.Equal(name, []byte("TAG")) {
- // Scan the TAG directive value.
- var handle, prefix []byte
- if !yaml_parser_scan_tag_directive_value(parser, start_mark, &handle, &prefix) {
- return false
- }
- end_mark := parser.mark
-
- // Create a TAG-DIRECTIVE token.
- *token = yaml_token_t{
- typ: yaml_TAG_DIRECTIVE_TOKEN,
- start_mark: start_mark,
- end_mark: end_mark,
- value: handle,
- prefix: prefix,
- }
-
- // Unknown directive.
- } else {
- yaml_parser_set_scanner_error(parser, "while scanning a directive",
- start_mark, "found uknown directive name")
- return false
- }
-
- // Eat the rest of the line including any comments.
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
-
- for is_blank(parser.buffer, parser.buffer_pos) {
- skip(parser)
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
- }
-
- if parser.buffer[parser.buffer_pos] == '#' {
- for !is_breakz(parser.buffer, parser.buffer_pos) {
- skip(parser)
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
- }
- }
-
- // Check if we are at the end of the line.
- if !is_breakz(parser.buffer, parser.buffer_pos) {
- yaml_parser_set_scanner_error(parser, "while scanning a directive",
- start_mark, "did not find expected comment or line break")
- return false
- }
-
- // Eat a line break.
- if is_break(parser.buffer, parser.buffer_pos) {
- if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
- return false
- }
- skip_line(parser)
- }
-
- return true
-}
-
-// Scan the directive name.
-//
-// Scope:
-// %YAML 1.1 # a comment \n
-// ^^^^
-// %TAG !yaml! tag:yaml.org,2002: \n
-// ^^^
-//
-func yaml_parser_scan_directive_name(parser *yaml_parser_t, start_mark yaml_mark_t, name *[]byte) bool {
- // Consume the directive name.
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
-
- var s []byte
- for is_alpha(parser.buffer, parser.buffer_pos) {
- s = read(parser, s)
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
- }
-
- // Check if the name is empty.
- if len(s) == 0 {
- yaml_parser_set_scanner_error(parser, "while scanning a directive",
- start_mark, "could not find expected directive name")
- return false
- }
-
- // Check for an blank character after the name.
- if !is_blankz(parser.buffer, parser.buffer_pos) {
- yaml_parser_set_scanner_error(parser, "while scanning a directive",
- start_mark, "found unexpected non-alphabetical character")
- return false
- }
- *name = s
- return true
-}
-
-// Scan the value of VERSION-DIRECTIVE.
-//
-// Scope:
-// %YAML 1.1 # a comment \n
-// ^^^^^^
-func yaml_parser_scan_version_directive_value(parser *yaml_parser_t, start_mark yaml_mark_t, major, minor *int8) bool {
- // Eat whitespaces.
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
- for is_blank(parser.buffer, parser.buffer_pos) {
- skip(parser)
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
- }
-
- // Consume the major version number.
- if !yaml_parser_scan_version_directive_number(parser, start_mark, major) {
- return false
- }
-
- // Eat '.'.
- if parser.buffer[parser.buffer_pos] != '.' {
- return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive",
- start_mark, "did not find expected digit or '.' character")
- }
-
- skip(parser)
-
- // Consume the minor version number.
- if !yaml_parser_scan_version_directive_number(parser, start_mark, minor) {
- return false
- }
- return true
-}
-
-const max_number_length = 2
-
-// Scan the version number of VERSION-DIRECTIVE.
-//
-// Scope:
-// %YAML 1.1 # a comment \n
-// ^
-// %YAML 1.1 # a comment \n
-// ^
-func yaml_parser_scan_version_directive_number(parser *yaml_parser_t, start_mark yaml_mark_t, number *int8) bool {
-
- // Repeat while the next character is digit.
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
- var value, length int8
- for is_digit(parser.buffer, parser.buffer_pos) {
- // Check if the number is too long.
- length++
- if length > max_number_length {
- return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive",
- start_mark, "found extremely long version number")
- }
- value = value*10 + int8(as_digit(parser.buffer, parser.buffer_pos))
- skip(parser)
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
- }
-
- // Check if the number was present.
- if length == 0 {
- return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive",
- start_mark, "did not find expected version number")
- }
- *number = value
- return true
-}
-
-// Scan the value of a TAG-DIRECTIVE token.
-//
-// Scope:
-// %TAG !yaml! tag:yaml.org,2002: \n
-// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-//
-func yaml_parser_scan_tag_directive_value(parser *yaml_parser_t, start_mark yaml_mark_t, handle, prefix *[]byte) bool {
- var handle_value, prefix_value []byte
-
- // Eat whitespaces.
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
-
- for is_blank(parser.buffer, parser.buffer_pos) {
- skip(parser)
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
- }
-
- // Scan a handle.
- if !yaml_parser_scan_tag_handle(parser, true, start_mark, &handle_value) {
- return false
- }
-
- // Expect a whitespace.
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
- if !is_blank(parser.buffer, parser.buffer_pos) {
- yaml_parser_set_scanner_error(parser, "while scanning a %TAG directive",
- start_mark, "did not find expected whitespace")
- return false
- }
-
- // Eat whitespaces.
- for is_blank(parser.buffer, parser.buffer_pos) {
- skip(parser)
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
- }
-
- // Scan a prefix.
- if !yaml_parser_scan_tag_uri(parser, true, nil, start_mark, &prefix_value) {
- return false
- }
-
- // Expect a whitespace or line break.
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
- if !is_blankz(parser.buffer, parser.buffer_pos) {
- yaml_parser_set_scanner_error(parser, "while scanning a %TAG directive",
- start_mark, "did not find expected whitespace or line break")
- return false
- }
-
- *handle = handle_value
- *prefix = prefix_value
- return true
-}
-
-func yaml_parser_scan_anchor(parser *yaml_parser_t, token *yaml_token_t, typ yaml_token_type_t) bool {
- var s []byte
-
- // Eat the indicator character.
- start_mark := parser.mark
- skip(parser)
-
- // Consume the value.
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
-
- for is_alpha(parser.buffer, parser.buffer_pos) {
- s = read(parser, s)
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
- }
-
- end_mark := parser.mark
-
- /*
- * Check if length of the anchor is greater than 0 and it is followed by
- * a whitespace character or one of the indicators:
- *
- * '?', ':', ',', ']', '}', '%', '@', '`'.
- */
-
- if len(s) == 0 ||
- !(is_blankz(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == '?' ||
- parser.buffer[parser.buffer_pos] == ':' || parser.buffer[parser.buffer_pos] == ',' ||
- parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '}' ||
- parser.buffer[parser.buffer_pos] == '%' || parser.buffer[parser.buffer_pos] == '@' ||
- parser.buffer[parser.buffer_pos] == '`') {
- context := "while scanning an alias"
- if typ == yaml_ANCHOR_TOKEN {
- context = "while scanning an anchor"
- }
- yaml_parser_set_scanner_error(parser, context, start_mark,
- "did not find expected alphabetic or numeric character")
- return false
- }
-
- // Create a token.
- *token = yaml_token_t{
- typ: typ,
- start_mark: start_mark,
- end_mark: end_mark,
- value: s,
- }
-
- return true
-}
-
-/*
- * Scan a TAG token.
- */
-
-func yaml_parser_scan_tag(parser *yaml_parser_t, token *yaml_token_t) bool {
- var handle, suffix []byte
-
- start_mark := parser.mark
-
- // Check if the tag is in the canonical form.
- if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
- return false
- }
-
- if parser.buffer[parser.buffer_pos+1] == '<' {
- // Keep the handle as ''
-
- // Eat '!<'
- skip(parser)
- skip(parser)
-
- // Consume the tag value.
- if !yaml_parser_scan_tag_uri(parser, false, nil, start_mark, &suffix) {
- return false
- }
-
- // Check for '>' and eat it.
- if parser.buffer[parser.buffer_pos] != '>' {
- yaml_parser_set_scanner_error(parser, "while scanning a tag",
- start_mark, "did not find the expected '>'")
- return false
- }
-
- skip(parser)
- } else {
- // The tag has either the '!suffix' or the '!handle!suffix' form.
-
- // First, try to scan a handle.
- if !yaml_parser_scan_tag_handle(parser, false, start_mark, &handle) {
- return false
- }
-
- // Check if it is, indeed, handle.
- if handle[0] == '!' && len(handle) > 1 && handle[len(handle)-1] == '!' {
- // Scan the suffix now.
- if !yaml_parser_scan_tag_uri(parser, false, nil, start_mark, &suffix) {
- return false
- }
- } else {
- // It wasn't a handle after all. Scan the rest of the tag.
- if !yaml_parser_scan_tag_uri(parser, false, handle, start_mark, &suffix) {
- return false
- }
-
- // Set the handle to '!'.
- handle = []byte{'!'}
-
- // A special case: the '!' tag. Set the handle to '' and the
- // suffix to '!'.
- if len(suffix) == 0 {
- handle, suffix = suffix, handle
- }
- }
- }
-
- // Check the character which ends the tag.
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
- if !is_blankz(parser.buffer, parser.buffer_pos) {
- yaml_parser_set_scanner_error(parser, "while scanning a tag",
- start_mark, "did not find expected whitespace or line break")
- return false
- }
-
- end_mark := parser.mark
-
- // Create a token.
- *token = yaml_token_t{
- typ: yaml_TAG_TOKEN,
- start_mark: start_mark,
- end_mark: end_mark,
- value: handle,
- suffix: suffix,
- }
- return true
-}
-
-// Scan a tag handle.
-func yaml_parser_scan_tag_handle(parser *yaml_parser_t, directive bool, start_mark yaml_mark_t, handle *[]byte) bool {
- // Check the initial '!' character.
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
- if parser.buffer[parser.buffer_pos] != '!' {
- yaml_parser_set_scanner_tag_error(parser, directive,
- start_mark, "did not find expected '!'")
- return false
- }
-
- var s []byte
-
- // Copy the '!' character.
- s = read(parser, s)
-
- // Copy all subsequent alphabetical and numerical characters.
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
- for is_alpha(parser.buffer, parser.buffer_pos) {
- s = read(parser, s)
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
- }
-
- // Check if the trailing character is '!' and copy it.
- if parser.buffer[parser.buffer_pos] == '!' {
- s = read(parser, s)
- } else {
- // It's either the '!' tag or not really a tag handle. If it's a %TAG
- // directive, it's an error. If it's a tag token, it must be a part of URI.
- if directive && !(s[0] == '!' && s[1] == 0) {
- yaml_parser_set_scanner_tag_error(parser, directive,
- start_mark, "did not find expected '!'")
- return false
- }
- }
-
- *handle = s
- return true
-}
-
-// Scan a tag.
-func yaml_parser_scan_tag_uri(parser *yaml_parser_t, directive bool, head []byte, start_mark yaml_mark_t, uri *[]byte) bool {
- //size_t length = head ? strlen((char *)head) : 0
- var s []byte
-
- // Copy the head if needed.
- //
- // Note that we don't copy the leading '!' character.
- if len(head) > 1 {
- s = append(s, head[1:]...)
- }
-
- // Scan the tag.
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
-
- // The set of characters that may appear in URI is as follows:
- //
- // '0'-'9', 'A'-'Z', 'a'-'z', '_', '-', ';', '/', '?', ':', '@', '&',
- // '=', '+', '$', ',', '.', '!', '~', '*', '\'', '(', ')', '[', ']',
- // '%'.
- // [Go] Convert this into more reasonable logic.
- for is_alpha(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == ';' ||
- parser.buffer[parser.buffer_pos] == '/' || parser.buffer[parser.buffer_pos] == '?' ||
- parser.buffer[parser.buffer_pos] == ':' || parser.buffer[parser.buffer_pos] == '@' ||
- parser.buffer[parser.buffer_pos] == '&' || parser.buffer[parser.buffer_pos] == '=' ||
- parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '$' ||
- parser.buffer[parser.buffer_pos] == ',' || parser.buffer[parser.buffer_pos] == '.' ||
- parser.buffer[parser.buffer_pos] == '!' || parser.buffer[parser.buffer_pos] == '~' ||
- parser.buffer[parser.buffer_pos] == '*' || parser.buffer[parser.buffer_pos] == '\'' ||
- parser.buffer[parser.buffer_pos] == '(' || parser.buffer[parser.buffer_pos] == ')' ||
- parser.buffer[parser.buffer_pos] == '[' || parser.buffer[parser.buffer_pos] == ']' ||
- parser.buffer[parser.buffer_pos] == '%' {
- // Check if it is a URI-escape sequence.
- if parser.buffer[parser.buffer_pos] == '%' {
- if !yaml_parser_scan_uri_escapes(parser, directive, start_mark, &s) {
- return false
- }
- } else {
- s = read(parser, s)
- }
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
- }
-
- // Check if the tag is non-empty.
- if len(s) == 0 {
- yaml_parser_set_scanner_tag_error(parser, directive,
- start_mark, "did not find expected tag URI")
- return false
- }
- *uri = s
- return true
-}
-
-// Decode an URI-escape sequence corresponding to a single UTF-8 character.
-func yaml_parser_scan_uri_escapes(parser *yaml_parser_t, directive bool, start_mark yaml_mark_t, s *[]byte) bool {
-
- // Decode the required number of characters.
- w := 1024
- for w > 0 {
- // Check for a URI-escaped octet.
- if parser.unread < 3 && !yaml_parser_update_buffer(parser, 3) {
- return false
- }
-
- if !(parser.buffer[parser.buffer_pos] == '%' &&
- is_hex(parser.buffer, parser.buffer_pos+1) &&
- is_hex(parser.buffer, parser.buffer_pos+2)) {
- return yaml_parser_set_scanner_tag_error(parser, directive,
- start_mark, "did not find URI escaped octet")
- }
-
- // Get the octet.
- octet := byte((as_hex(parser.buffer, parser.buffer_pos+1) << 4) + as_hex(parser.buffer, parser.buffer_pos+2))
-
- // If it is the leading octet, determine the length of the UTF-8 sequence.
- if w == 1024 {
- w = width(octet)
- if w == 0 {
- return yaml_parser_set_scanner_tag_error(parser, directive,
- start_mark, "found an incorrect leading UTF-8 octet")
- }
- } else {
- // Check if the trailing octet is correct.
- if octet&0xC0 != 0x80 {
- return yaml_parser_set_scanner_tag_error(parser, directive,
- start_mark, "found an incorrect trailing UTF-8 octet")
- }
- }
-
- // Copy the octet and move the pointers.
- *s = append(*s, octet)
- skip(parser)
- skip(parser)
- skip(parser)
- w--
- }
- return true
-}
-
-// Scan a block scalar.
-func yaml_parser_scan_block_scalar(parser *yaml_parser_t, token *yaml_token_t, literal bool) bool {
- // Eat the indicator '|' or '>'.
- start_mark := parser.mark
- skip(parser)
-
- // Scan the additional block scalar indicators.
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
-
- // Check for a chomping indicator.
- var chomping, increment int
- if parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '-' {
- // Set the chomping method and eat the indicator.
- if parser.buffer[parser.buffer_pos] == '+' {
- chomping = +1
- } else {
- chomping = -1
- }
- skip(parser)
-
- // Check for an indentation indicator.
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
- if is_digit(parser.buffer, parser.buffer_pos) {
- // Check that the intendation is greater than 0.
- if parser.buffer[parser.buffer_pos] == '0' {
- yaml_parser_set_scanner_error(parser, "while scanning a block scalar",
- start_mark, "found an intendation indicator equal to 0")
- return false
- }
-
- // Get the intendation level and eat the indicator.
- increment = as_digit(parser.buffer, parser.buffer_pos)
- skip(parser)
- }
-
- } else if is_digit(parser.buffer, parser.buffer_pos) {
- // Do the same as above, but in the opposite order.
-
- if parser.buffer[parser.buffer_pos] == '0' {
- yaml_parser_set_scanner_error(parser, "while scanning a block scalar",
- start_mark, "found an intendation indicator equal to 0")
- return false
- }
- increment = as_digit(parser.buffer, parser.buffer_pos)
- skip(parser)
-
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
- if parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '-' {
- if parser.buffer[parser.buffer_pos] == '+' {
- chomping = +1
- } else {
- chomping = -1
- }
- skip(parser)
- }
- }
-
- // Eat whitespaces and comments to the end of the line.
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
- for is_blank(parser.buffer, parser.buffer_pos) {
- skip(parser)
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
- }
- if parser.buffer[parser.buffer_pos] == '#' {
- for !is_breakz(parser.buffer, parser.buffer_pos) {
- skip(parser)
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
- }
- }
-
- // Check if we are at the end of the line.
- if !is_breakz(parser.buffer, parser.buffer_pos) {
- yaml_parser_set_scanner_error(parser, "while scanning a block scalar",
- start_mark, "did not find expected comment or line break")
- return false
- }
-
- // Eat a line break.
- if is_break(parser.buffer, parser.buffer_pos) {
- if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
- return false
- }
- skip_line(parser)
- }
-
- end_mark := parser.mark
-
- // Set the intendation level if it was specified.
- var indent int
- if increment > 0 {
- if parser.indent >= 0 {
- indent = parser.indent + increment
- } else {
- indent = increment
- }
- }
-
- // Scan the leading line breaks and determine the indentation level if needed.
- var s, leading_break, trailing_breaks []byte
- if !yaml_parser_scan_block_scalar_breaks(parser, &indent, &trailing_breaks, start_mark, &end_mark) {
- return false
- }
-
- // Scan the block scalar content.
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
- var leading_blank, trailing_blank bool
- for parser.mark.column == indent && !is_z(parser.buffer, parser.buffer_pos) {
- // We are at the beginning of a non-empty line.
-
- // Is it a trailing whitespace?
- trailing_blank = is_blank(parser.buffer, parser.buffer_pos)
-
- // Check if we need to fold the leading line break.
- if !literal && !leading_blank && !trailing_blank && len(leading_break) > 0 && leading_break[0] == '\n' {
- // Do we need to join the lines by space?
- if len(trailing_breaks) == 0 {
- s = append(s, ' ')
- }
- } else {
- s = append(s, leading_break...)
- }
- leading_break = leading_break[:0]
-
- // Append the remaining line breaks.
- s = append(s, trailing_breaks...)
- trailing_breaks = trailing_breaks[:0]
-
- // Is it a leading whitespace?
- leading_blank = is_blank(parser.buffer, parser.buffer_pos)
-
- // Consume the current line.
- for !is_breakz(parser.buffer, parser.buffer_pos) {
- s = read(parser, s)
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
- }
-
- // Consume the line break.
- if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
- return false
- }
-
- leading_break = read_line(parser, leading_break)
-
- // Eat the following intendation spaces and line breaks.
- if !yaml_parser_scan_block_scalar_breaks(parser, &indent, &trailing_breaks, start_mark, &end_mark) {
- return false
- }
- }
-
- // Chomp the tail.
- if chomping != -1 {
- s = append(s, leading_break...)
- }
- if chomping == 1 {
- s = append(s, trailing_breaks...)
- }
-
- // Create a token.
- *token = yaml_token_t{
- typ: yaml_SCALAR_TOKEN,
- start_mark: start_mark,
- end_mark: end_mark,
- value: s,
- style: yaml_LITERAL_SCALAR_STYLE,
- }
- if !literal {
- token.style = yaml_FOLDED_SCALAR_STYLE
- }
- return true
-}
-
-// Scan intendation spaces and line breaks for a block scalar. Determine the
-// intendation level if needed.
-func yaml_parser_scan_block_scalar_breaks(parser *yaml_parser_t, indent *int, breaks *[]byte, start_mark yaml_mark_t, end_mark *yaml_mark_t) bool {
- *end_mark = parser.mark
-
- // Eat the intendation spaces and line breaks.
- max_indent := 0
- for {
- // Eat the intendation spaces.
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
- for (*indent == 0 || parser.mark.column < *indent) && is_space(parser.buffer, parser.buffer_pos) {
- skip(parser)
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
- }
- if parser.mark.column > max_indent {
- max_indent = parser.mark.column
- }
-
- // Check for a tab character messing the intendation.
- if (*indent == 0 || parser.mark.column < *indent) && is_tab(parser.buffer, parser.buffer_pos) {
- return yaml_parser_set_scanner_error(parser, "while scanning a block scalar",
- start_mark, "found a tab character where an intendation space is expected")
- }
-
- // Have we found a non-empty line?
- if !is_break(parser.buffer, parser.buffer_pos) {
- break
- }
-
- // Consume the line break.
- if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
- return false
- }
- // [Go] Should really be returning breaks instead.
- *breaks = read_line(parser, *breaks)
- *end_mark = parser.mark
- }
-
- // Determine the indentation level if needed.
- if *indent == 0 {
- *indent = max_indent
- if *indent < parser.indent+1 {
- *indent = parser.indent + 1
- }
- if *indent < 1 {
- *indent = 1
- }
- }
- return true
-}
-
-// Scan a quoted scalar.
-func yaml_parser_scan_flow_scalar(parser *yaml_parser_t, token *yaml_token_t, single bool) bool {
- // Eat the left quote.
- start_mark := parser.mark
- skip(parser)
-
- // Consume the content of the quoted scalar.
- var s, leading_break, trailing_breaks, whitespaces []byte
- for {
- // Check that there are no document indicators at the beginning of the line.
- if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) {
- return false
- }
-
- if parser.mark.column == 0 &&
- ((parser.buffer[parser.buffer_pos+0] == '-' &&
- parser.buffer[parser.buffer_pos+1] == '-' &&
- parser.buffer[parser.buffer_pos+2] == '-') ||
- (parser.buffer[parser.buffer_pos+0] == '.' &&
- parser.buffer[parser.buffer_pos+1] == '.' &&
- parser.buffer[parser.buffer_pos+2] == '.')) &&
- is_blankz(parser.buffer, parser.buffer_pos+3) {
- yaml_parser_set_scanner_error(parser, "while scanning a quoted scalar",
- start_mark, "found unexpected document indicator")
- return false
- }
-
- // Check for EOF.
- if is_z(parser.buffer, parser.buffer_pos) {
- yaml_parser_set_scanner_error(parser, "while scanning a quoted scalar",
- start_mark, "found unexpected end of stream")
- return false
- }
-
- // Consume non-blank characters.
- leading_blanks := false
- for !is_blankz(parser.buffer, parser.buffer_pos) {
- if single && parser.buffer[parser.buffer_pos] == '\'' && parser.buffer[parser.buffer_pos+1] == '\'' {
- // Is is an escaped single quote.
- s = append(s, '\'')
- skip(parser)
- skip(parser)
-
- } else if single && parser.buffer[parser.buffer_pos] == '\'' {
- // It is a right single quote.
- break
- } else if !single && parser.buffer[parser.buffer_pos] == '"' {
- // It is a right double quote.
- break
-
- } else if !single && parser.buffer[parser.buffer_pos] == '\\' && is_break(parser.buffer, parser.buffer_pos+1) {
- // It is an escaped line break.
- if parser.unread < 3 && !yaml_parser_update_buffer(parser, 3) {
- return false
- }
- skip(parser)
- skip_line(parser)
- leading_blanks = true
- break
-
- } else if !single && parser.buffer[parser.buffer_pos] == '\\' {
- // It is an escape sequence.
- code_length := 0
-
- // Check the escape character.
- switch parser.buffer[parser.buffer_pos+1] {
- case '0':
- s = append(s, 0)
- case 'a':
- s = append(s, '\x07')
- case 'b':
- s = append(s, '\x08')
- case 't', '\t':
- s = append(s, '\x09')
- case 'n':
- s = append(s, '\x0A')
- case 'v':
- s = append(s, '\x0B')
- case 'f':
- s = append(s, '\x0C')
- case 'r':
- s = append(s, '\x0D')
- case 'e':
- s = append(s, '\x1B')
- case ' ':
- s = append(s, '\x20')
- case '"':
- s = append(s, '"')
- case '\'':
- s = append(s, '\'')
- case '\\':
- s = append(s, '\\')
- case 'N': // NEL (#x85)
- s = append(s, '\xC2')
- s = append(s, '\x85')
- case '_': // #xA0
- s = append(s, '\xC2')
- s = append(s, '\xA0')
- case 'L': // LS (#x2028)
- s = append(s, '\xE2')
- s = append(s, '\x80')
- s = append(s, '\xA8')
- case 'P': // PS (#x2029)
- s = append(s, '\xE2')
- s = append(s, '\x80')
- s = append(s, '\xA9')
- case 'x':
- code_length = 2
- case 'u':
- code_length = 4
- case 'U':
- code_length = 8
- default:
- yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar",
- start_mark, "found unknown escape character")
- return false
- }
-
- skip(parser)
- skip(parser)
-
- // Consume an arbitrary escape code.
- if code_length > 0 {
- var value int
-
- // Scan the character value.
- if parser.unread < code_length && !yaml_parser_update_buffer(parser, code_length) {
- return false
- }
- for k := 0; k < code_length; k++ {
- if !is_hex(parser.buffer, parser.buffer_pos+k) {
- yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar",
- start_mark, "did not find expected hexdecimal number")
- return false
- }
- value = (value << 4) + as_hex(parser.buffer, parser.buffer_pos+k)
- }
-
- // Check the value and write the character.
- if (value >= 0xD800 && value <= 0xDFFF) || value > 0x10FFFF {
- yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar",
- start_mark, "found invalid Unicode character escape code")
- return false
- }
- if value <= 0x7F {
- s = append(s, byte(value))
- } else if value <= 0x7FF {
- s = append(s, byte(0xC0+(value>>6)))
- s = append(s, byte(0x80+(value&0x3F)))
- } else if value <= 0xFFFF {
- s = append(s, byte(0xE0+(value>>12)))
- s = append(s, byte(0x80+((value>>6)&0x3F)))
- s = append(s, byte(0x80+(value&0x3F)))
- } else {
- s = append(s, byte(0xF0+(value>>18)))
- s = append(s, byte(0x80+((value>>12)&0x3F)))
- s = append(s, byte(0x80+((value>>6)&0x3F)))
- s = append(s, byte(0x80+(value&0x3F)))
- }
-
- // Advance the pointer.
- for k := 0; k < code_length; k++ {
- skip(parser)
- }
- }
- } else {
- // It is a non-escaped non-blank character.
- s = read(parser, s)
- }
- if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
- return false
- }
- }
-
- // Check if we are at the end of the scalar.
- if single {
- if parser.buffer[parser.buffer_pos] == '\'' {
- break
- }
- } else {
- if parser.buffer[parser.buffer_pos] == '"' {
- break
- }
- }
-
- // Consume blank characters.
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
-
- for is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos) {
- if is_blank(parser.buffer, parser.buffer_pos) {
- // Consume a space or a tab character.
- if !leading_blanks {
- whitespaces = read(parser, whitespaces)
- } else {
- skip(parser)
- }
- } else {
- if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
- return false
- }
-
- // Check if it is a first line break.
- if !leading_blanks {
- whitespaces = whitespaces[:0]
- leading_break = read_line(parser, leading_break)
- leading_blanks = true
- } else {
- trailing_breaks = read_line(parser, trailing_breaks)
- }
- }
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
- }
-
- // Join the whitespaces or fold line breaks.
- if leading_blanks {
- // Do we need to fold line breaks?
- if len(leading_break) > 0 && leading_break[0] == '\n' {
- if len(trailing_breaks) == 0 {
- s = append(s, ' ')
- } else {
- s = append(s, trailing_breaks...)
- }
- } else {
- s = append(s, leading_break...)
- s = append(s, trailing_breaks...)
- }
- trailing_breaks = trailing_breaks[:0]
- leading_break = leading_break[:0]
- } else {
- s = append(s, whitespaces...)
- whitespaces = whitespaces[:0]
- }
- }
-
- // Eat the right quote.
- skip(parser)
- end_mark := parser.mark
-
- // Create a token.
- *token = yaml_token_t{
- typ: yaml_SCALAR_TOKEN,
- start_mark: start_mark,
- end_mark: end_mark,
- value: s,
- style: yaml_SINGLE_QUOTED_SCALAR_STYLE,
- }
- if !single {
- token.style = yaml_DOUBLE_QUOTED_SCALAR_STYLE
- }
- return true
-}
-
-// Scan a plain scalar.
-func yaml_parser_scan_plain_scalar(parser *yaml_parser_t, token *yaml_token_t) bool {
-
- var s, leading_break, trailing_breaks, whitespaces []byte
- var leading_blanks bool
- var indent = parser.indent + 1
-
- start_mark := parser.mark
- end_mark := parser.mark
-
- // Consume the content of the plain scalar.
- for {
- // Check for a document indicator.
- if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) {
- return false
- }
- if parser.mark.column == 0 &&
- ((parser.buffer[parser.buffer_pos+0] == '-' &&
- parser.buffer[parser.buffer_pos+1] == '-' &&
- parser.buffer[parser.buffer_pos+2] == '-') ||
- (parser.buffer[parser.buffer_pos+0] == '.' &&
- parser.buffer[parser.buffer_pos+1] == '.' &&
- parser.buffer[parser.buffer_pos+2] == '.')) &&
- is_blankz(parser.buffer, parser.buffer_pos+3) {
- break
- }
-
- // Check for a comment.
- if parser.buffer[parser.buffer_pos] == '#' {
- break
- }
-
- // Consume non-blank characters.
- for !is_blankz(parser.buffer, parser.buffer_pos) {
-
- // Check for 'x:x' in the flow context. TODO: Fix the test "spec-08-13".
- if parser.flow_level > 0 &&
- parser.buffer[parser.buffer_pos] == ':' &&
- !is_blankz(parser.buffer, parser.buffer_pos+1) {
- yaml_parser_set_scanner_error(parser, "while scanning a plain scalar",
- start_mark, "found unexpected ':'")
- return false
- }
-
- // Check for indicators that may end a plain scalar.
- if (parser.buffer[parser.buffer_pos] == ':' && is_blankz(parser.buffer, parser.buffer_pos+1)) ||
- (parser.flow_level > 0 &&
- (parser.buffer[parser.buffer_pos] == ',' || parser.buffer[parser.buffer_pos] == ':' ||
- parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == '[' ||
- parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '{' ||
- parser.buffer[parser.buffer_pos] == '}')) {
- break
- }
-
- // Check if we need to join whitespaces and breaks.
- if leading_blanks || len(whitespaces) > 0 {
- if leading_blanks {
- // Do we need to fold line breaks?
- if leading_break[0] == '\n' {
- if len(trailing_breaks) == 0 {
- s = append(s, ' ')
- } else {
- s = append(s, trailing_breaks...)
- }
- } else {
- s = append(s, leading_break...)
- s = append(s, trailing_breaks...)
- }
- trailing_breaks = trailing_breaks[:0]
- leading_break = leading_break[:0]
- leading_blanks = false
- } else {
- s = append(s, whitespaces...)
- whitespaces = whitespaces[:0]
- }
- }
-
- // Copy the character.
- s = read(parser, s)
-
- end_mark = parser.mark
- if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
- return false
- }
- }
-
- // Is it the end?
- if !(is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos)) {
- break
- }
-
- // Consume blank characters.
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
-
- for is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos) {
- if is_blank(parser.buffer, parser.buffer_pos) {
-
- // Check for tab character that abuse intendation.
- if leading_blanks && parser.mark.column < indent && is_tab(parser.buffer, parser.buffer_pos) {
- yaml_parser_set_scanner_error(parser, "while scanning a plain scalar",
- start_mark, "found a tab character that violate intendation")
- return false
- }
-
- // Consume a space or a tab character.
- if !leading_blanks {
- whitespaces = read(parser, whitespaces)
- } else {
- skip(parser)
- }
- } else {
- if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
- return false
- }
-
- // Check if it is a first line break.
- if !leading_blanks {
- whitespaces = whitespaces[:0]
- leading_break = read_line(parser, leading_break)
- leading_blanks = true
- } else {
- trailing_breaks = read_line(parser, trailing_breaks)
- }
- }
- if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
- return false
- }
- }
-
- // Check intendation level.
- if parser.flow_level == 0 && parser.mark.column < indent {
- break
- }
- }
-
- // Create a token.
- *token = yaml_token_t{
- typ: yaml_SCALAR_TOKEN,
- start_mark: start_mark,
- end_mark: end_mark,
- value: s,
- style: yaml_PLAIN_SCALAR_STYLE,
- }
-
- // Note that we change the 'simple_key_allowed' flag.
- if leading_blanks {
- parser.simple_key_allowed = true
- }
- return true
-}
diff --git a/vendor/github.com/coreos/yaml/sorter.go b/vendor/github.com/coreos/yaml/sorter.go
deleted file mode 100644
index 5958822f9..000000000
--- a/vendor/github.com/coreos/yaml/sorter.go
+++ /dev/null
@@ -1,104 +0,0 @@
-package yaml
-
-import (
- "reflect"
- "unicode"
-)
-
-type keyList []reflect.Value
-
-func (l keyList) Len() int { return len(l) }
-func (l keyList) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
-func (l keyList) Less(i, j int) bool {
- a := l[i]
- b := l[j]
- ak := a.Kind()
- bk := b.Kind()
- for (ak == reflect.Interface || ak == reflect.Ptr) && !a.IsNil() {
- a = a.Elem()
- ak = a.Kind()
- }
- for (bk == reflect.Interface || bk == reflect.Ptr) && !b.IsNil() {
- b = b.Elem()
- bk = b.Kind()
- }
- af, aok := keyFloat(a)
- bf, bok := keyFloat(b)
- if aok && bok {
- if af != bf {
- return af < bf
- }
- if ak != bk {
- return ak < bk
- }
- return numLess(a, b)
- }
- if ak != reflect.String || bk != reflect.String {
- return ak < bk
- }
- ar, br := []rune(a.String()), []rune(b.String())
- for i := 0; i < len(ar) && i < len(br); i++ {
- if ar[i] == br[i] {
- continue
- }
- al := unicode.IsLetter(ar[i])
- bl := unicode.IsLetter(br[i])
- if al && bl {
- return ar[i] < br[i]
- }
- if al || bl {
- return bl
- }
- var ai, bi int
- var an, bn int64
- for ai = i; ai < len(ar) && unicode.IsDigit(ar[ai]); ai++ {
- an = an*10 + int64(ar[ai]-'0')
- }
- for bi = i; bi < len(br) && unicode.IsDigit(br[bi]); bi++ {
- bn = bn*10 + int64(br[bi]-'0')
- }
- if an != bn {
- return an < bn
- }
- if ai != bi {
- return ai < bi
- }
- return ar[i] < br[i]
- }
- return len(ar) < len(br)
-}
-
-// keyFloat returns a float value for v if it is a number/bool
-// and whether it is a number/bool or not.
-func keyFloat(v reflect.Value) (f float64, ok bool) {
- switch v.Kind() {
- case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
- return float64(v.Int()), true
- case reflect.Float32, reflect.Float64:
- return v.Float(), true
- case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
- return float64(v.Uint()), true
- case reflect.Bool:
- if v.Bool() {
- return 1, true
- }
- return 0, true
- }
- return 0, false
-}
-
-// numLess returns whether a < b.
-// a and b must necessarily have the same kind.
-func numLess(a, b reflect.Value) bool {
- switch a.Kind() {
- case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
- return a.Int() < b.Int()
- case reflect.Float32, reflect.Float64:
- return a.Float() < b.Float()
- case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
- return a.Uint() < b.Uint()
- case reflect.Bool:
- return !a.Bool() && b.Bool()
- }
- panic("not a number")
-}
diff --git a/vendor/github.com/coreos/yaml/writerc.go b/vendor/github.com/coreos/yaml/writerc.go
deleted file mode 100644
index 190362f25..000000000
--- a/vendor/github.com/coreos/yaml/writerc.go
+++ /dev/null
@@ -1,89 +0,0 @@
-package yaml
-
-// Set the writer error and return false.
-func yaml_emitter_set_writer_error(emitter *yaml_emitter_t, problem string) bool {
- emitter.error = yaml_WRITER_ERROR
- emitter.problem = problem
- return false
-}
-
-// Flush the output buffer.
-func yaml_emitter_flush(emitter *yaml_emitter_t) bool {
- if emitter.write_handler == nil {
- panic("write handler not set")
- }
-
- // Check if the buffer is empty.
- if emitter.buffer_pos == 0 {
- return true
- }
-
- // If the output encoding is UTF-8, we don't need to recode the buffer.
- if emitter.encoding == yaml_UTF8_ENCODING {
- if err := emitter.write_handler(emitter, emitter.buffer[:emitter.buffer_pos]); err != nil {
- return yaml_emitter_set_writer_error(emitter, "write error: "+err.Error())
- }
- emitter.buffer_pos = 0
- return true
- }
-
- // Recode the buffer into the raw buffer.
- var low, high int
- if emitter.encoding == yaml_UTF16LE_ENCODING {
- low, high = 0, 1
- } else {
- high, low = 1, 0
- }
-
- pos := 0
- for pos < emitter.buffer_pos {
- // See the "reader.c" code for more details on UTF-8 encoding. Note
- // that we assume that the buffer contains a valid UTF-8 sequence.
-
- // Read the next UTF-8 character.
- octet := emitter.buffer[pos]
-
- var w int
- var value rune
- switch {
- case octet&0x80 == 0x00:
- w, value = 1, rune(octet&0x7F)
- case octet&0xE0 == 0xC0:
- w, value = 2, rune(octet&0x1F)
- case octet&0xF0 == 0xE0:
- w, value = 3, rune(octet&0x0F)
- case octet&0xF8 == 0xF0:
- w, value = 4, rune(octet&0x07)
- }
- for k := 1; k < w; k++ {
- octet = emitter.buffer[pos+k]
- value = (value << 6) + (rune(octet) & 0x3F)
- }
- pos += w
-
- // Write the character.
- if value < 0x10000 {
- var b [2]byte
- b[high] = byte(value >> 8)
- b[low] = byte(value & 0xFF)
- emitter.raw_buffer = append(emitter.raw_buffer, b[0], b[1])
- } else {
- // Write the character using a surrogate pair (check "reader.c").
- var b [4]byte
- value -= 0x10000
- b[high] = byte(0xD8 + (value >> 18))
- b[low] = byte((value >> 10) & 0xFF)
- b[high+2] = byte(0xDC + ((value >> 8) & 0xFF))
- b[low+2] = byte(value & 0xFF)
- emitter.raw_buffer = append(emitter.raw_buffer, b[0], b[1], b[2], b[3])
- }
- }
-
- // Write the raw buffer.
- if err := emitter.write_handler(emitter, emitter.raw_buffer); err != nil {
- return yaml_emitter_set_writer_error(emitter, "write error: "+err.Error())
- }
- emitter.buffer_pos = 0
- emitter.raw_buffer = emitter.raw_buffer[:0]
- return true
-}
diff --git a/vendor/github.com/coreos/yaml/yaml.go b/vendor/github.com/coreos/yaml/yaml.go
deleted file mode 100644
index 16e136543..000000000
--- a/vendor/github.com/coreos/yaml/yaml.go
+++ /dev/null
@@ -1,312 +0,0 @@
-// Package yaml implements YAML support for the Go language.
-//
-// Source code and other details for the project are available at GitHub:
-//
-// https://github.com/go-yaml/yaml
-//
-package yaml
-
-import (
- "errors"
- "fmt"
- "reflect"
- "strings"
- "sync"
-)
-
-type yamlError string
-
-func fail(msg string) {
- panic(yamlError(msg))
-}
-
-func handleErr(err *error) {
- if r := recover(); r != nil {
- if e, ok := r.(yamlError); ok {
- *err = errors.New("YAML error: " + string(e))
- } else {
- panic(r)
- }
- }
-}
-
-// The Setter interface may be implemented by types to do their own custom
-// unmarshalling of YAML values, rather than being implicitly assigned by
-// the yaml package machinery. If setting the value works, the method should
-// return true. If it returns false, the value is considered unsupported
-// and is omitted from maps and slices.
-type Setter interface {
- SetYAML(tag string, value interface{}) bool
-}
-
-// The Getter interface is implemented by types to do their own custom
-// marshalling into a YAML tag and value.
-type Getter interface {
- GetYAML() (tag string, value interface{})
-}
-
-// Unmarshal decodes the first document found within the in byte slice
-// and assigns decoded values into the out value.
-//
-// Maps and pointers (to a struct, string, int, etc) are accepted as out
-// values. If an internal pointer within a struct is not initialized,
-// the yaml package will initialize it if necessary for unmarshalling
-// the provided data. The out parameter must not be nil.
-//
-// The type of the decoded values and the type of out will be considered,
-// and Unmarshal will do the best possible job to unmarshal values
-// appropriately. It is NOT considered an error, though, to skip values
-// because they are not available in the decoded YAML, or if they are not
-// compatible with the out value. To ensure something was properly
-// unmarshaled use a map or compare against the previous value for the
-// field (usually the zero value).
-//
-// Struct fields are only unmarshalled if they are exported (have an
-// upper case first letter), and are unmarshalled using the field name
-// lowercased as the default key. Custom keys may be defined via the
-// "yaml" name in the field tag: the content preceding the first comma
-// is used as the key, and the following comma-separated options are
-// used to tweak the marshalling process (see Marshal).
-// Conflicting names result in a runtime error.
-//
-// For example:
-//
-// type T struct {
-// F int `yaml:"a,omitempty"`
-// B int
-// }
-// var t T
-// yaml.Unmarshal([]byte("a: 1\nb: 2"), &t)
-//
-// See the documentation of Marshal for the format of tags and a list of
-// supported tag options.
-//
-func Unmarshal(in []byte, out interface{}) (err error) {
- defer handleErr(&err)
- d := newDecoder()
- p := newParser(in, UnmarshalMappingKeyTransform)
- defer p.destroy()
- node := p.parse()
- if node != nil {
- v := reflect.ValueOf(out)
- if v.Kind() == reflect.Ptr && !v.IsNil() {
- v = v.Elem()
- }
- d.unmarshal(node, v)
- }
- return nil
-}
-
-// Marshal serializes the value provided into a YAML document. The structure
-// of the generated document will reflect the structure of the value itself.
-// Maps and pointers (to struct, string, int, etc) are accepted as the in value.
-//
-// Struct fields are only unmarshalled if they are exported (have an upper case
-// first letter), and are unmarshalled using the field name lowercased as the
-// default key. Custom keys may be defined via the "yaml" name in the field
-// tag: the content preceding the first comma is used as the key, and the
-// following comma-separated options are used to tweak the marshalling process.
-// Conflicting names result in a runtime error.
-//
-// The field tag format accepted is:
-//
-// `(...) yaml:"[][,[,]]" (...)`
-//
-// The following flags are currently supported:
-//
-// omitempty Only include the field if it's not set to the zero
-// value for the type or to empty slices or maps.
-// Does not apply to zero valued structs.
-//
-// flow Marshal using a flow style (useful for structs,
-// sequences and maps.
-//
-// inline Inline the struct it's applied to, so its fields
-// are processed as if they were part of the outer
-// struct.
-//
-// In addition, if the key is "-", the field is ignored.
-//
-// For example:
-//
-// type T struct {
-// F int "a,omitempty"
-// B int
-// }
-// yaml.Marshal(&T{B: 2}) // Returns "b: 2\n"
-// yaml.Marshal(&T{F: 1}} // Returns "a: 1\nb: 0\n"
-//
-func Marshal(in interface{}) (out []byte, err error) {
- defer handleErr(&err)
- e := newEncoder()
- defer e.destroy()
- e.marshal("", reflect.ValueOf(in))
- e.finish()
- out = e.out
- return
-}
-
-// UnmarshalMappingKeyTransform is a string transformation that is applied to
-// each mapping key in a YAML document before it is unmarshalled. By default,
-// UnmarshalMappingKeyTransform is an identity transform (no modification).
-var UnmarshalMappingKeyTransform transformString = identityTransform
-
-type transformString func(in string) (out string)
-
-func identityTransform(in string) (out string) {
- return in
-}
-
-// --------------------------------------------------------------------------
-// Maintain a mapping of keys to structure field indexes
-
-// The code in this section was copied from mgo/bson.
-
-// structInfo holds details for the serialization of fields of
-// a given struct.
-type structInfo struct {
- FieldsMap map[string]fieldInfo
- FieldsList []fieldInfo
-
- // InlineMap is the number of the field in the struct that
- // contains an ,inline map, or -1 if there's none.
- InlineMap int
-}
-
-type fieldInfo struct {
- Key string
- Num int
- OmitEmpty bool
- Flow bool
-
- // Inline holds the field index if the field is part of an inlined struct.
- Inline []int
-}
-
-var structMap = make(map[reflect.Type]*structInfo)
-var fieldMapMutex sync.RWMutex
-
-func getStructInfo(st reflect.Type) (*structInfo, error) {
- fieldMapMutex.RLock()
- sinfo, found := structMap[st]
- fieldMapMutex.RUnlock()
- if found {
- return sinfo, nil
- }
-
- n := st.NumField()
- fieldsMap := make(map[string]fieldInfo)
- fieldsList := make([]fieldInfo, 0, n)
- inlineMap := -1
- for i := 0; i != n; i++ {
- field := st.Field(i)
- if field.PkgPath != "" {
- continue // Private field
- }
-
- info := fieldInfo{Num: i}
-
- tag := field.Tag.Get("yaml")
- if tag == "" && strings.Index(string(field.Tag), ":") < 0 {
- tag = string(field.Tag)
- }
- if tag == "-" {
- continue
- }
-
- inline := false
- fields := strings.Split(tag, ",")
- if len(fields) > 1 {
- for _, flag := range fields[1:] {
- switch flag {
- case "omitempty":
- info.OmitEmpty = true
- case "flow":
- info.Flow = true
- case "inline":
- inline = true
- default:
- return nil, errors.New(fmt.Sprintf("Unsupported flag %q in tag %q of type %s", flag, tag, st))
- }
- }
- tag = fields[0]
- }
-
- if inline {
- switch field.Type.Kind() {
- // TODO: Implement support for inline maps.
- //case reflect.Map:
- // if inlineMap >= 0 {
- // return nil, errors.New("Multiple ,inline maps in struct " + st.String())
- // }
- // if field.Type.Key() != reflect.TypeOf("") {
- // return nil, errors.New("Option ,inline needs a map with string keys in struct " + st.String())
- // }
- // inlineMap = info.Num
- case reflect.Struct:
- sinfo, err := getStructInfo(field.Type)
- if err != nil {
- return nil, err
- }
- for _, finfo := range sinfo.FieldsList {
- if _, found := fieldsMap[finfo.Key]; found {
- msg := "Duplicated key '" + finfo.Key + "' in struct " + st.String()
- return nil, errors.New(msg)
- }
- if finfo.Inline == nil {
- finfo.Inline = []int{i, finfo.Num}
- } else {
- finfo.Inline = append([]int{i}, finfo.Inline...)
- }
- fieldsMap[finfo.Key] = finfo
- fieldsList = append(fieldsList, finfo)
- }
- default:
- //return nil, errors.New("Option ,inline needs a struct value or map field")
- return nil, errors.New("Option ,inline needs a struct value field")
- }
- continue
- }
-
- if tag != "" {
- info.Key = tag
- } else {
- info.Key = strings.ToLower(field.Name)
- }
-
- if _, found = fieldsMap[info.Key]; found {
- msg := "Duplicated key '" + info.Key + "' in struct " + st.String()
- return nil, errors.New(msg)
- }
-
- fieldsList = append(fieldsList, info)
- fieldsMap[info.Key] = info
- }
-
- sinfo = &structInfo{fieldsMap, fieldsList, inlineMap}
-
- fieldMapMutex.Lock()
- structMap[st] = sinfo
- fieldMapMutex.Unlock()
- return sinfo, nil
-}
-
-func isZero(v reflect.Value) bool {
- switch v.Kind() {
- case reflect.String:
- return len(v.String()) == 0
- case reflect.Interface, reflect.Ptr:
- return v.IsNil()
- case reflect.Slice:
- return v.Len() == 0
- case reflect.Map:
- return v.Len() == 0
- case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
- return v.Int() == 0
- case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
- return v.Uint() == 0
- case reflect.Bool:
- return !v.Bool()
- }
- return false
-}
diff --git a/vendor/github.com/coreos/yaml/yamlh.go b/vendor/github.com/coreos/yaml/yamlh.go
deleted file mode 100644
index 4b020b1b3..000000000
--- a/vendor/github.com/coreos/yaml/yamlh.go
+++ /dev/null
@@ -1,716 +0,0 @@
-package yaml
-
-import (
- "io"
-)
-
-// The version directive data.
-type yaml_version_directive_t struct {
- major int8 // The major version number.
- minor int8 // The minor version number.
-}
-
-// The tag directive data.
-type yaml_tag_directive_t struct {
- handle []byte // The tag handle.
- prefix []byte // The tag prefix.
-}
-
-type yaml_encoding_t int
-
-// The stream encoding.
-const (
- // Let the parser choose the encoding.
- yaml_ANY_ENCODING yaml_encoding_t = iota
-
- yaml_UTF8_ENCODING // The default UTF-8 encoding.
- yaml_UTF16LE_ENCODING // The UTF-16-LE encoding with BOM.
- yaml_UTF16BE_ENCODING // The UTF-16-BE encoding with BOM.
-)
-
-type yaml_break_t int
-
-// Line break types.
-const (
- // Let the parser choose the break type.
- yaml_ANY_BREAK yaml_break_t = iota
-
- yaml_CR_BREAK // Use CR for line breaks (Mac style).
- yaml_LN_BREAK // Use LN for line breaks (Unix style).
- yaml_CRLN_BREAK // Use CR LN for line breaks (DOS style).
-)
-
-type yaml_error_type_t int
-
-// Many bad things could happen with the parser and emitter.
-const (
- // No error is produced.
- yaml_NO_ERROR yaml_error_type_t = iota
-
- yaml_MEMORY_ERROR // Cannot allocate or reallocate a block of memory.
- yaml_READER_ERROR // Cannot read or decode the input stream.
- yaml_SCANNER_ERROR // Cannot scan the input stream.
- yaml_PARSER_ERROR // Cannot parse the input stream.
- yaml_COMPOSER_ERROR // Cannot compose a YAML document.
- yaml_WRITER_ERROR // Cannot write to the output stream.
- yaml_EMITTER_ERROR // Cannot emit a YAML stream.
-)
-
-// The pointer position.
-type yaml_mark_t struct {
- index int // The position index.
- line int // The position line.
- column int // The position column.
-}
-
-// Node Styles
-
-type yaml_style_t int8
-
-type yaml_scalar_style_t yaml_style_t
-
-// Scalar styles.
-const (
- // Let the emitter choose the style.
- yaml_ANY_SCALAR_STYLE yaml_scalar_style_t = iota
-
- yaml_PLAIN_SCALAR_STYLE // The plain scalar style.
- yaml_SINGLE_QUOTED_SCALAR_STYLE // The single-quoted scalar style.
- yaml_DOUBLE_QUOTED_SCALAR_STYLE // The double-quoted scalar style.
- yaml_LITERAL_SCALAR_STYLE // The literal scalar style.
- yaml_FOLDED_SCALAR_STYLE // The folded scalar style.
-)
-
-type yaml_sequence_style_t yaml_style_t
-
-// Sequence styles.
-const (
- // Let the emitter choose the style.
- yaml_ANY_SEQUENCE_STYLE yaml_sequence_style_t = iota
-
- yaml_BLOCK_SEQUENCE_STYLE // The block sequence style.
- yaml_FLOW_SEQUENCE_STYLE // The flow sequence style.
-)
-
-type yaml_mapping_style_t yaml_style_t
-
-// Mapping styles.
-const (
- // Let the emitter choose the style.
- yaml_ANY_MAPPING_STYLE yaml_mapping_style_t = iota
-
- yaml_BLOCK_MAPPING_STYLE // The block mapping style.
- yaml_FLOW_MAPPING_STYLE // The flow mapping style.
-)
-
-// Tokens
-
-type yaml_token_type_t int
-
-// Token types.
-const (
- // An empty token.
- yaml_NO_TOKEN yaml_token_type_t = iota
-
- yaml_STREAM_START_TOKEN // A STREAM-START token.
- yaml_STREAM_END_TOKEN // A STREAM-END token.
-
- yaml_VERSION_DIRECTIVE_TOKEN // A VERSION-DIRECTIVE token.
- yaml_TAG_DIRECTIVE_TOKEN // A TAG-DIRECTIVE token.
- yaml_DOCUMENT_START_TOKEN // A DOCUMENT-START token.
- yaml_DOCUMENT_END_TOKEN // A DOCUMENT-END token.
-
- yaml_BLOCK_SEQUENCE_START_TOKEN // A BLOCK-SEQUENCE-START token.
- yaml_BLOCK_MAPPING_START_TOKEN // A BLOCK-SEQUENCE-END token.
- yaml_BLOCK_END_TOKEN // A BLOCK-END token.
-
- yaml_FLOW_SEQUENCE_START_TOKEN // A FLOW-SEQUENCE-START token.
- yaml_FLOW_SEQUENCE_END_TOKEN // A FLOW-SEQUENCE-END token.
- yaml_FLOW_MAPPING_START_TOKEN // A FLOW-MAPPING-START token.
- yaml_FLOW_MAPPING_END_TOKEN // A FLOW-MAPPING-END token.
-
- yaml_BLOCK_ENTRY_TOKEN // A BLOCK-ENTRY token.
- yaml_FLOW_ENTRY_TOKEN // A FLOW-ENTRY token.
- yaml_KEY_TOKEN // A KEY token.
- yaml_VALUE_TOKEN // A VALUE token.
-
- yaml_ALIAS_TOKEN // An ALIAS token.
- yaml_ANCHOR_TOKEN // An ANCHOR token.
- yaml_TAG_TOKEN // A TAG token.
- yaml_SCALAR_TOKEN // A SCALAR token.
-)
-
-func (tt yaml_token_type_t) String() string {
- switch tt {
- case yaml_NO_TOKEN:
- return "yaml_NO_TOKEN"
- case yaml_STREAM_START_TOKEN:
- return "yaml_STREAM_START_TOKEN"
- case yaml_STREAM_END_TOKEN:
- return "yaml_STREAM_END_TOKEN"
- case yaml_VERSION_DIRECTIVE_TOKEN:
- return "yaml_VERSION_DIRECTIVE_TOKEN"
- case yaml_TAG_DIRECTIVE_TOKEN:
- return "yaml_TAG_DIRECTIVE_TOKEN"
- case yaml_DOCUMENT_START_TOKEN:
- return "yaml_DOCUMENT_START_TOKEN"
- case yaml_DOCUMENT_END_TOKEN:
- return "yaml_DOCUMENT_END_TOKEN"
- case yaml_BLOCK_SEQUENCE_START_TOKEN:
- return "yaml_BLOCK_SEQUENCE_START_TOKEN"
- case yaml_BLOCK_MAPPING_START_TOKEN:
- return "yaml_BLOCK_MAPPING_START_TOKEN"
- case yaml_BLOCK_END_TOKEN:
- return "yaml_BLOCK_END_TOKEN"
- case yaml_FLOW_SEQUENCE_START_TOKEN:
- return "yaml_FLOW_SEQUENCE_START_TOKEN"
- case yaml_FLOW_SEQUENCE_END_TOKEN:
- return "yaml_FLOW_SEQUENCE_END_TOKEN"
- case yaml_FLOW_MAPPING_START_TOKEN:
- return "yaml_FLOW_MAPPING_START_TOKEN"
- case yaml_FLOW_MAPPING_END_TOKEN:
- return "yaml_FLOW_MAPPING_END_TOKEN"
- case yaml_BLOCK_ENTRY_TOKEN:
- return "yaml_BLOCK_ENTRY_TOKEN"
- case yaml_FLOW_ENTRY_TOKEN:
- return "yaml_FLOW_ENTRY_TOKEN"
- case yaml_KEY_TOKEN:
- return "yaml_KEY_TOKEN"
- case yaml_VALUE_TOKEN:
- return "yaml_VALUE_TOKEN"
- case yaml_ALIAS_TOKEN:
- return "yaml_ALIAS_TOKEN"
- case yaml_ANCHOR_TOKEN:
- return "yaml_ANCHOR_TOKEN"
- case yaml_TAG_TOKEN:
- return "yaml_TAG_TOKEN"
- case yaml_SCALAR_TOKEN:
- return "yaml_SCALAR_TOKEN"
- }
- return ""
-}
-
-// The token structure.
-type yaml_token_t struct {
- // The token type.
- typ yaml_token_type_t
-
- // The start/end of the token.
- start_mark, end_mark yaml_mark_t
-
- // The stream encoding (for yaml_STREAM_START_TOKEN).
- encoding yaml_encoding_t
-
- // The alias/anchor/scalar value or tag/tag directive handle
- // (for yaml_ALIAS_TOKEN, yaml_ANCHOR_TOKEN, yaml_SCALAR_TOKEN, yaml_TAG_TOKEN, yaml_TAG_DIRECTIVE_TOKEN).
- value []byte
-
- // The tag suffix (for yaml_TAG_TOKEN).
- suffix []byte
-
- // The tag directive prefix (for yaml_TAG_DIRECTIVE_TOKEN).
- prefix []byte
-
- // The scalar style (for yaml_SCALAR_TOKEN).
- style yaml_scalar_style_t
-
- // The version directive major/minor (for yaml_VERSION_DIRECTIVE_TOKEN).
- major, minor int8
-}
-
-// Events
-
-type yaml_event_type_t int8
-
-// Event types.
-const (
- // An empty event.
- yaml_NO_EVENT yaml_event_type_t = iota
-
- yaml_STREAM_START_EVENT // A STREAM-START event.
- yaml_STREAM_END_EVENT // A STREAM-END event.
- yaml_DOCUMENT_START_EVENT // A DOCUMENT-START event.
- yaml_DOCUMENT_END_EVENT // A DOCUMENT-END event.
- yaml_ALIAS_EVENT // An ALIAS event.
- yaml_SCALAR_EVENT // A SCALAR event.
- yaml_SEQUENCE_START_EVENT // A SEQUENCE-START event.
- yaml_SEQUENCE_END_EVENT // A SEQUENCE-END event.
- yaml_MAPPING_START_EVENT // A MAPPING-START event.
- yaml_MAPPING_END_EVENT // A MAPPING-END event.
-)
-
-// The event structure.
-type yaml_event_t struct {
-
- // The event type.
- typ yaml_event_type_t
-
- // The start and end of the event.
- start_mark, end_mark yaml_mark_t
-
- // The document encoding (for yaml_STREAM_START_EVENT).
- encoding yaml_encoding_t
-
- // The version directive (for yaml_DOCUMENT_START_EVENT).
- version_directive *yaml_version_directive_t
-
- // The list of tag directives (for yaml_DOCUMENT_START_EVENT).
- tag_directives []yaml_tag_directive_t
-
- // The anchor (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT, yaml_ALIAS_EVENT).
- anchor []byte
-
- // The tag (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT).
- tag []byte
-
- // The scalar value (for yaml_SCALAR_EVENT).
- value []byte
-
- // Is the document start/end indicator implicit, or the tag optional?
- // (for yaml_DOCUMENT_START_EVENT, yaml_DOCUMENT_END_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT, yaml_SCALAR_EVENT).
- implicit bool
-
- // Is the tag optional for any non-plain style? (for yaml_SCALAR_EVENT).
- quoted_implicit bool
-
- // The style (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT).
- style yaml_style_t
-}
-
-func (e *yaml_event_t) scalar_style() yaml_scalar_style_t { return yaml_scalar_style_t(e.style) }
-func (e *yaml_event_t) sequence_style() yaml_sequence_style_t { return yaml_sequence_style_t(e.style) }
-func (e *yaml_event_t) mapping_style() yaml_mapping_style_t { return yaml_mapping_style_t(e.style) }
-
-// Nodes
-
-const (
- yaml_NULL_TAG = "tag:yaml.org,2002:null" // The tag !!null with the only possible value: null.
- yaml_BOOL_TAG = "tag:yaml.org,2002:bool" // The tag !!bool with the values: true and false.
- yaml_STR_TAG = "tag:yaml.org,2002:str" // The tag !!str for string values.
- yaml_INT_TAG = "tag:yaml.org,2002:int" // The tag !!int for integer values.
- yaml_FLOAT_TAG = "tag:yaml.org,2002:float" // The tag !!float for float values.
- yaml_TIMESTAMP_TAG = "tag:yaml.org,2002:timestamp" // The tag !!timestamp for date and time values.
-
- yaml_SEQ_TAG = "tag:yaml.org,2002:seq" // The tag !!seq is used to denote sequences.
- yaml_MAP_TAG = "tag:yaml.org,2002:map" // The tag !!map is used to denote mapping.
-
- // Not in original libyaml.
- yaml_BINARY_TAG = "tag:yaml.org,2002:binary"
- yaml_MERGE_TAG = "tag:yaml.org,2002:merge"
-
- yaml_DEFAULT_SCALAR_TAG = yaml_STR_TAG // The default scalar tag is !!str.
- yaml_DEFAULT_SEQUENCE_TAG = yaml_SEQ_TAG // The default sequence tag is !!seq.
- yaml_DEFAULT_MAPPING_TAG = yaml_MAP_TAG // The default mapping tag is !!map.
-)
-
-type yaml_node_type_t int
-
-// Node types.
-const (
- // An empty node.
- yaml_NO_NODE yaml_node_type_t = iota
-
- yaml_SCALAR_NODE // A scalar node.
- yaml_SEQUENCE_NODE // A sequence node.
- yaml_MAPPING_NODE // A mapping node.
-)
-
-// An element of a sequence node.
-type yaml_node_item_t int
-
-// An element of a mapping node.
-type yaml_node_pair_t struct {
- key int // The key of the element.
- value int // The value of the element.
-}
-
-// The node structure.
-type yaml_node_t struct {
- typ yaml_node_type_t // The node type.
- tag []byte // The node tag.
-
- // The node data.
-
- // The scalar parameters (for yaml_SCALAR_NODE).
- scalar struct {
- value []byte // The scalar value.
- length int // The length of the scalar value.
- style yaml_scalar_style_t // The scalar style.
- }
-
- // The sequence parameters (for YAML_SEQUENCE_NODE).
- sequence struct {
- items_data []yaml_node_item_t // The stack of sequence items.
- style yaml_sequence_style_t // The sequence style.
- }
-
- // The mapping parameters (for yaml_MAPPING_NODE).
- mapping struct {
- pairs_data []yaml_node_pair_t // The stack of mapping pairs (key, value).
- pairs_start *yaml_node_pair_t // The beginning of the stack.
- pairs_end *yaml_node_pair_t // The end of the stack.
- pairs_top *yaml_node_pair_t // The top of the stack.
- style yaml_mapping_style_t // The mapping style.
- }
-
- start_mark yaml_mark_t // The beginning of the node.
- end_mark yaml_mark_t // The end of the node.
-
-}
-
-// The document structure.
-type yaml_document_t struct {
-
- // The document nodes.
- nodes []yaml_node_t
-
- // The version directive.
- version_directive *yaml_version_directive_t
-
- // The list of tag directives.
- tag_directives_data []yaml_tag_directive_t
- tag_directives_start int // The beginning of the tag directives list.
- tag_directives_end int // The end of the tag directives list.
-
- start_implicit int // Is the document start indicator implicit?
- end_implicit int // Is the document end indicator implicit?
-
- // The start/end of the document.
- start_mark, end_mark yaml_mark_t
-}
-
-// The prototype of a read handler.
-//
-// The read handler is called when the parser needs to read more bytes from the
-// source. The handler should write not more than size bytes to the buffer.
-// The number of written bytes should be set to the size_read variable.
-//
-// [in,out] data A pointer to an application data specified by
-// yaml_parser_set_input().
-// [out] buffer The buffer to write the data from the source.
-// [in] size The size of the buffer.
-// [out] size_read The actual number of bytes read from the source.
-//
-// On success, the handler should return 1. If the handler failed,
-// the returned value should be 0. On EOF, the handler should set the
-// size_read to 0 and return 1.
-type yaml_read_handler_t func(parser *yaml_parser_t, buffer []byte) (n int, err error)
-
-// This structure holds information about a potential simple key.
-type yaml_simple_key_t struct {
- possible bool // Is a simple key possible?
- required bool // Is a simple key required?
- token_number int // The number of the token.
- mark yaml_mark_t // The position mark.
-}
-
-// The states of the parser.
-type yaml_parser_state_t int
-
-const (
- yaml_PARSE_STREAM_START_STATE yaml_parser_state_t = iota
-
- yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE // Expect the beginning of an implicit document.
- yaml_PARSE_DOCUMENT_START_STATE // Expect DOCUMENT-START.
- yaml_PARSE_DOCUMENT_CONTENT_STATE // Expect the content of a document.
- yaml_PARSE_DOCUMENT_END_STATE // Expect DOCUMENT-END.
- yaml_PARSE_BLOCK_NODE_STATE // Expect a block node.
- yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE // Expect a block node or indentless sequence.
- yaml_PARSE_FLOW_NODE_STATE // Expect a flow node.
- yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE // Expect the first entry of a block sequence.
- yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE // Expect an entry of a block sequence.
- yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE // Expect an entry of an indentless sequence.
- yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE // Expect the first key of a block mapping.
- yaml_PARSE_BLOCK_MAPPING_KEY_STATE // Expect a block mapping key.
- yaml_PARSE_BLOCK_MAPPING_VALUE_STATE // Expect a block mapping value.
- yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE // Expect the first entry of a flow sequence.
- yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE // Expect an entry of a flow sequence.
- yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE // Expect a key of an ordered mapping.
- yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE // Expect a value of an ordered mapping.
- yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE // Expect the and of an ordered mapping entry.
- yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE // Expect the first key of a flow mapping.
- yaml_PARSE_FLOW_MAPPING_KEY_STATE // Expect a key of a flow mapping.
- yaml_PARSE_FLOW_MAPPING_VALUE_STATE // Expect a value of a flow mapping.
- yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE // Expect an empty value of a flow mapping.
- yaml_PARSE_END_STATE // Expect nothing.
-)
-
-func (ps yaml_parser_state_t) String() string {
- switch ps {
- case yaml_PARSE_STREAM_START_STATE:
- return "yaml_PARSE_STREAM_START_STATE"
- case yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE:
- return "yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE"
- case yaml_PARSE_DOCUMENT_START_STATE:
- return "yaml_PARSE_DOCUMENT_START_STATE"
- case yaml_PARSE_DOCUMENT_CONTENT_STATE:
- return "yaml_PARSE_DOCUMENT_CONTENT_STATE"
- case yaml_PARSE_DOCUMENT_END_STATE:
- return "yaml_PARSE_DOCUMENT_END_STATE"
- case yaml_PARSE_BLOCK_NODE_STATE:
- return "yaml_PARSE_BLOCK_NODE_STATE"
- case yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE:
- return "yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE"
- case yaml_PARSE_FLOW_NODE_STATE:
- return "yaml_PARSE_FLOW_NODE_STATE"
- case yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE:
- return "yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE"
- case yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE:
- return "yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE"
- case yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE:
- return "yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE"
- case yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE:
- return "yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE"
- case yaml_PARSE_BLOCK_MAPPING_KEY_STATE:
- return "yaml_PARSE_BLOCK_MAPPING_KEY_STATE"
- case yaml_PARSE_BLOCK_MAPPING_VALUE_STATE:
- return "yaml_PARSE_BLOCK_MAPPING_VALUE_STATE"
- case yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE:
- return "yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE"
- case yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE:
- return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE"
- case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE:
- return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE"
- case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE:
- return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE"
- case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE:
- return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE"
- case yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE:
- return "yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE"
- case yaml_PARSE_FLOW_MAPPING_KEY_STATE:
- return "yaml_PARSE_FLOW_MAPPING_KEY_STATE"
- case yaml_PARSE_FLOW_MAPPING_VALUE_STATE:
- return "yaml_PARSE_FLOW_MAPPING_VALUE_STATE"
- case yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE:
- return "yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE"
- case yaml_PARSE_END_STATE:
- return "yaml_PARSE_END_STATE"
- }
- return ""
-}
-
-// This structure holds aliases data.
-type yaml_alias_data_t struct {
- anchor []byte // The anchor.
- index int // The node id.
- mark yaml_mark_t // The anchor mark.
-}
-
-// The parser structure.
-//
-// All members are internal. Manage the structure using the
-// yaml_parser_ family of functions.
-type yaml_parser_t struct {
-
- // Error handling
-
- error yaml_error_type_t // Error type.
-
- problem string // Error description.
-
- // The byte about which the problem occured.
- problem_offset int
- problem_value int
- problem_mark yaml_mark_t
-
- // The error context.
- context string
- context_mark yaml_mark_t
-
- // Reader stuff
-
- read_handler yaml_read_handler_t // Read handler.
-
- input_file io.Reader // File input data.
- input []byte // String input data.
- input_pos int
-
- eof bool // EOF flag
-
- buffer []byte // The working buffer.
- buffer_pos int // The current position of the buffer.
-
- unread int // The number of unread characters in the buffer.
-
- raw_buffer []byte // The raw buffer.
- raw_buffer_pos int // The current position of the buffer.
-
- encoding yaml_encoding_t // The input encoding.
-
- offset int // The offset of the current position (in bytes).
- mark yaml_mark_t // The mark of the current position.
-
- // Scanner stuff
-
- stream_start_produced bool // Have we started to scan the input stream?
- stream_end_produced bool // Have we reached the end of the input stream?
-
- flow_level int // The number of unclosed '[' and '{' indicators.
-
- tokens []yaml_token_t // The tokens queue.
- tokens_head int // The head of the tokens queue.
- tokens_parsed int // The number of tokens fetched from the queue.
- token_available bool // Does the tokens queue contain a token ready for dequeueing.
-
- indent int // The current indentation level.
- indents []int // The indentation levels stack.
-
- simple_key_allowed bool // May a simple key occur at the current position?
- simple_keys []yaml_simple_key_t // The stack of simple keys.
-
- // Parser stuff
-
- state yaml_parser_state_t // The current parser state.
- states []yaml_parser_state_t // The parser states stack.
- marks []yaml_mark_t // The stack of marks.
- tag_directives []yaml_tag_directive_t // The list of TAG directives.
-
- // Dumper stuff
-
- aliases []yaml_alias_data_t // The alias data.
-
- document *yaml_document_t // The currently parsed document.
-}
-
-// Emitter Definitions
-
-// The prototype of a write handler.
-//
-// The write handler is called when the emitter needs to flush the accumulated
-// characters to the output. The handler should write @a size bytes of the
-// @a buffer to the output.
-//
-// @param[in,out] data A pointer to an application data specified by
-// yaml_emitter_set_output().
-// @param[in] buffer The buffer with bytes to be written.
-// @param[in] size The size of the buffer.
-//
-// @returns On success, the handler should return @c 1. If the handler failed,
-// the returned value should be @c 0.
-//
-type yaml_write_handler_t func(emitter *yaml_emitter_t, buffer []byte) error
-
-type yaml_emitter_state_t int
-
-// The emitter states.
-const (
- // Expect STREAM-START.
- yaml_EMIT_STREAM_START_STATE yaml_emitter_state_t = iota
-
- yaml_EMIT_FIRST_DOCUMENT_START_STATE // Expect the first DOCUMENT-START or STREAM-END.
- yaml_EMIT_DOCUMENT_START_STATE // Expect DOCUMENT-START or STREAM-END.
- yaml_EMIT_DOCUMENT_CONTENT_STATE // Expect the content of a document.
- yaml_EMIT_DOCUMENT_END_STATE // Expect DOCUMENT-END.
- yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE // Expect the first item of a flow sequence.
- yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE // Expect an item of a flow sequence.
- yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE // Expect the first key of a flow mapping.
- yaml_EMIT_FLOW_MAPPING_KEY_STATE // Expect a key of a flow mapping.
- yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE // Expect a value for a simple key of a flow mapping.
- yaml_EMIT_FLOW_MAPPING_VALUE_STATE // Expect a value of a flow mapping.
- yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE // Expect the first item of a block sequence.
- yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE // Expect an item of a block sequence.
- yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE // Expect the first key of a block mapping.
- yaml_EMIT_BLOCK_MAPPING_KEY_STATE // Expect the key of a block mapping.
- yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE // Expect a value for a simple key of a block mapping.
- yaml_EMIT_BLOCK_MAPPING_VALUE_STATE // Expect a value of a block mapping.
- yaml_EMIT_END_STATE // Expect nothing.
-)
-
-// The emitter structure.
-//
-// All members are internal. Manage the structure using the @c yaml_emitter_
-// family of functions.
-type yaml_emitter_t struct {
-
- // Error handling
-
- error yaml_error_type_t // Error type.
- problem string // Error description.
-
- // Writer stuff
-
- write_handler yaml_write_handler_t // Write handler.
-
- output_buffer *[]byte // String output data.
- output_file io.Writer // File output data.
-
- buffer []byte // The working buffer.
- buffer_pos int // The current position of the buffer.
-
- raw_buffer []byte // The raw buffer.
- raw_buffer_pos int // The current position of the buffer.
-
- encoding yaml_encoding_t // The stream encoding.
-
- // Emitter stuff
-
- canonical bool // If the output is in the canonical style?
- best_indent int // The number of indentation spaces.
- best_width int // The preferred width of the output lines.
- unicode bool // Allow unescaped non-ASCII characters?
- line_break yaml_break_t // The preferred line break.
-
- state yaml_emitter_state_t // The current emitter state.
- states []yaml_emitter_state_t // The stack of states.
-
- events []yaml_event_t // The event queue.
- events_head int // The head of the event queue.
-
- indents []int // The stack of indentation levels.
-
- tag_directives []yaml_tag_directive_t // The list of tag directives.
-
- indent int // The current indentation level.
-
- flow_level int // The current flow level.
-
- root_context bool // Is it the document root context?
- sequence_context bool // Is it a sequence context?
- mapping_context bool // Is it a mapping context?
- simple_key_context bool // Is it a simple mapping key context?
-
- line int // The current line.
- column int // The current column.
- whitespace bool // If the last character was a whitespace?
- indention bool // If the last character was an indentation character (' ', '-', '?', ':')?
- open_ended bool // If an explicit document end is required?
-
- // Anchor analysis.
- anchor_data struct {
- anchor []byte // The anchor value.
- alias bool // Is it an alias?
- }
-
- // Tag analysis.
- tag_data struct {
- handle []byte // The tag handle.
- suffix []byte // The tag suffix.
- }
-
- // Scalar analysis.
- scalar_data struct {
- value []byte // The scalar value.
- multiline bool // Does the scalar contain line breaks?
- flow_plain_allowed bool // Can the scalar be expessed in the flow plain style?
- block_plain_allowed bool // Can the scalar be expressed in the block plain style?
- single_quoted_allowed bool // Can the scalar be expressed in the single quoted style?
- block_allowed bool // Can the scalar be expressed in the literal or folded styles?
- style yaml_scalar_style_t // The output style.
- }
-
- // Dumper stuff
-
- opened bool // If the stream was already opened?
- closed bool // If the stream was already closed?
-
- // The information associated with the document nodes.
- anchors *struct {
- references int // The number of references.
- anchor int // The anchor id.
- serialized bool // If the node has been emitted?
- }
-
- last_anchor_id int // The last assigned anchor id.
-
- document *yaml_document_t // The currently emitted document.
-}
diff --git a/vendor/github.com/coreos/yaml/yamlprivateh.go b/vendor/github.com/coreos/yaml/yamlprivateh.go
deleted file mode 100644
index 8110ce3c3..000000000
--- a/vendor/github.com/coreos/yaml/yamlprivateh.go
+++ /dev/null
@@ -1,173 +0,0 @@
-package yaml
-
-const (
- // The size of the input raw buffer.
- input_raw_buffer_size = 512
-
- // The size of the input buffer.
- // It should be possible to decode the whole raw buffer.
- input_buffer_size = input_raw_buffer_size * 3
-
- // The size of the output buffer.
- output_buffer_size = 128
-
- // The size of the output raw buffer.
- // It should be possible to encode the whole output buffer.
- output_raw_buffer_size = (output_buffer_size*2 + 2)
-
- // The size of other stacks and queues.
- initial_stack_size = 16
- initial_queue_size = 16
- initial_string_size = 16
-)
-
-// Check if the character at the specified position is an alphabetical
-// character, a digit, '_', or '-'.
-func is_alpha(b []byte, i int) bool {
- return b[i] >= '0' && b[i] <= '9' || b[i] >= 'A' && b[i] <= 'Z' || b[i] >= 'a' && b[i] <= 'z' || b[i] == '_' || b[i] == '-'
-}
-
-// Check if the character at the specified position is a digit.
-func is_digit(b []byte, i int) bool {
- return b[i] >= '0' && b[i] <= '9'
-}
-
-// Get the value of a digit.
-func as_digit(b []byte, i int) int {
- return int(b[i]) - '0'
-}
-
-// Check if the character at the specified position is a hex-digit.
-func is_hex(b []byte, i int) bool {
- return b[i] >= '0' && b[i] <= '9' || b[i] >= 'A' && b[i] <= 'F' || b[i] >= 'a' && b[i] <= 'f'
-}
-
-// Get the value of a hex-digit.
-func as_hex(b []byte, i int) int {
- bi := b[i]
- if bi >= 'A' && bi <= 'F' {
- return int(bi) - 'A' + 10
- }
- if bi >= 'a' && bi <= 'f' {
- return int(bi) - 'a' + 10
- }
- return int(bi) - '0'
-}
-
-// Check if the character is ASCII.
-func is_ascii(b []byte, i int) bool {
- return b[i] <= 0x7F
-}
-
-// Check if the character at the start of the buffer can be printed unescaped.
-func is_printable(b []byte, i int) bool {
- return ((b[i] == 0x0A) || // . == #x0A
- (b[i] >= 0x20 && b[i] <= 0x7E) || // #x20 <= . <= #x7E
- (b[i] == 0xC2 && b[i+1] >= 0xA0) || // #0xA0 <= . <= #xD7FF
- (b[i] > 0xC2 && b[i] < 0xED) ||
- (b[i] == 0xED && b[i+1] < 0xA0) ||
- (b[i] == 0xEE) ||
- (b[i] == 0xEF && // #xE000 <= . <= #xFFFD
- !(b[i+1] == 0xBB && b[i+2] == 0xBF) && // && . != #xFEFF
- !(b[i+1] == 0xBF && (b[i+2] == 0xBE || b[i+2] == 0xBF))))
-}
-
-// Check if the character at the specified position is NUL.
-func is_z(b []byte, i int) bool {
- return b[i] == 0x00
-}
-
-// Check if the beginning of the buffer is a BOM.
-func is_bom(b []byte, i int) bool {
- return b[0] == 0xEF && b[1] == 0xBB && b[2] == 0xBF
-}
-
-// Check if the character at the specified position is space.
-func is_space(b []byte, i int) bool {
- return b[i] == ' '
-}
-
-// Check if the character at the specified position is tab.
-func is_tab(b []byte, i int) bool {
- return b[i] == '\t'
-}
-
-// Check if the character at the specified position is blank (space or tab).
-func is_blank(b []byte, i int) bool {
- //return is_space(b, i) || is_tab(b, i)
- return b[i] == ' ' || b[i] == '\t'
-}
-
-// Check if the character at the specified position is a line break.
-func is_break(b []byte, i int) bool {
- return (b[i] == '\r' || // CR (#xD)
- b[i] == '\n' || // LF (#xA)
- b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85)
- b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028)
- b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9) // PS (#x2029)
-}
-
-func is_crlf(b []byte, i int) bool {
- return b[i] == '\r' && b[i+1] == '\n'
-}
-
-// Check if the character is a line break or NUL.
-func is_breakz(b []byte, i int) bool {
- //return is_break(b, i) || is_z(b, i)
- return ( // is_break:
- b[i] == '\r' || // CR (#xD)
- b[i] == '\n' || // LF (#xA)
- b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85)
- b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028)
- b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029)
- // is_z:
- b[i] == 0)
-}
-
-// Check if the character is a line break, space, or NUL.
-func is_spacez(b []byte, i int) bool {
- //return is_space(b, i) || is_breakz(b, i)
- return ( // is_space:
- b[i] == ' ' ||
- // is_breakz:
- b[i] == '\r' || // CR (#xD)
- b[i] == '\n' || // LF (#xA)
- b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85)
- b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028)
- b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029)
- b[i] == 0)
-}
-
-// Check if the character is a line break, space, tab, or NUL.
-func is_blankz(b []byte, i int) bool {
- //return is_blank(b, i) || is_breakz(b, i)
- return ( // is_blank:
- b[i] == ' ' || b[i] == '\t' ||
- // is_breakz:
- b[i] == '\r' || // CR (#xD)
- b[i] == '\n' || // LF (#xA)
- b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85)
- b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028)
- b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029)
- b[i] == 0)
-}
-
-// Determine the width of the character.
-func width(b byte) int {
- // Don't replace these by a switch without first
- // confirming that it is being inlined.
- if b&0x80 == 0x00 {
- return 1
- }
- if b&0xE0 == 0xC0 {
- return 2
- }
- if b&0xF0 == 0xE0 {
- return 3
- }
- if b&0xF8 == 0xF0 {
- return 4
- }
- return 0
-
-}
diff --git a/vendor/github.com/hashicorp/errwrap/errwrap.go b/vendor/github.com/hashicorp/errwrap/errwrap.go
index a733bef18..44e368e56 100644
--- a/vendor/github.com/hashicorp/errwrap/errwrap.go
+++ b/vendor/github.com/hashicorp/errwrap/errwrap.go
@@ -44,6 +44,8 @@ func Wrap(outer, inner error) error {
//
// format is the format of the error message. The string '{{err}}' will
// be replaced with the original error message.
+//
+// Deprecated: Use fmt.Errorf()
func Wrapf(format string, err error) error {
outerMsg := ""
if err != nil {
@@ -148,6 +150,9 @@ func Walk(err error, cb WalkFunc) {
for _, err := range e.WrappedErrors() {
Walk(err, cb)
}
+ case interface{ Unwrap() error }:
+ cb(err)
+ Walk(e.Unwrap(), cb)
default:
cb(err)
}
@@ -167,3 +172,7 @@ func (w *wrappedError) Error() string {
func (w *wrappedError) WrappedErrors() []error {
return []error{w.Outer, w.Inner}
}
+
+func (w *wrappedError) Unwrap() error {
+ return w.Inner
+}
diff --git a/vendor/github.com/hashicorp/go-multierror/.travis.yml b/vendor/github.com/hashicorp/go-multierror/.travis.yml
deleted file mode 100644
index 24b80388f..000000000
--- a/vendor/github.com/hashicorp/go-multierror/.travis.yml
+++ /dev/null
@@ -1,12 +0,0 @@
-sudo: false
-
-language: go
-
-go:
- - 1.x
-
-branches:
- only:
- - master
-
-script: env GO111MODULE=on make test testrace
diff --git a/vendor/github.com/hashicorp/go-multierror/README.md b/vendor/github.com/hashicorp/go-multierror/README.md
index e92fa614c..71dd308ed 100644
--- a/vendor/github.com/hashicorp/go-multierror/README.md
+++ b/vendor/github.com/hashicorp/go-multierror/README.md
@@ -1,10 +1,11 @@
# go-multierror
-[![Build Status](http://img.shields.io/travis/hashicorp/go-multierror.svg?style=flat-square)][travis]
-[![Go Documentation](http://img.shields.io/badge/go-documentation-blue.svg?style=flat-square)][godocs]
+[![CircleCI](https://img.shields.io/circleci/build/github/hashicorp/go-multierror/master)](https://circleci.com/gh/hashicorp/go-multierror)
+[![Go Reference](https://pkg.go.dev/badge/github.com/hashicorp/go-multierror.svg)](https://pkg.go.dev/github.com/hashicorp/go-multierror)
+![GitHub go.mod Go version](https://img.shields.io/github/go-mod/go-version/hashicorp/go-multierror)
-[travis]: https://travis-ci.org/hashicorp/go-multierror
-[godocs]: https://godoc.org/github.com/hashicorp/go-multierror
+[circleci]: https://app.circleci.com/pipelines/github/hashicorp/go-multierror
+[godocs]: https://pkg.go.dev/github.com/hashicorp/go-multierror
`go-multierror` is a package for Go that provides a mechanism for
representing a list of `error` values as a single `error`.
@@ -24,7 +25,25 @@ for introspecting on error values.
Install using `go get github.com/hashicorp/go-multierror`.
Full documentation is available at
-http://godoc.org/github.com/hashicorp/go-multierror
+https://pkg.go.dev/github.com/hashicorp/go-multierror
+
+### Requires go version 1.13 or newer
+
+`go-multierror` requires go version 1.13 or newer. Go 1.13 introduced
+[error wrapping](https://golang.org/doc/go1.13#error_wrapping), which
+this library takes advantage of.
+
+If you need to use an earlier version of go, you can use the
+[v1.0.0](https://github.com/hashicorp/go-multierror/tree/v1.0.0)
+tag, which doesn't rely on features in go 1.13.
+
+If you see compile errors that look like the below, it's likely that
+you're on an older version of go:
+
+```
+/go/src/github.com/hashicorp/go-multierror/multierror.go:112:9: undefined: errors.As
+/go/src/github.com/hashicorp/go-multierror/multierror.go:117:9: undefined: errors.Is
+```
## Usage
diff --git a/vendor/github.com/hashicorp/go-multierror/append.go b/vendor/github.com/hashicorp/go-multierror/append.go
index 775b6e753..3e2589bfd 100644
--- a/vendor/github.com/hashicorp/go-multierror/append.go
+++ b/vendor/github.com/hashicorp/go-multierror/append.go
@@ -6,6 +6,8 @@ package multierror
// If err is not a multierror.Error, then it will be turned into
// one. If any of the errs are multierr.Error, they will be flattened
// one level into err.
+// Any nil errors within errs will be ignored. If err is nil, a new
+// *Error will be returned.
func Append(err error, errs ...error) *Error {
switch err := err.(type) {
case *Error:
diff --git a/vendor/github.com/hashicorp/go-multierror/multierror.go b/vendor/github.com/hashicorp/go-multierror/multierror.go
index d05dd9269..f54574326 100644
--- a/vendor/github.com/hashicorp/go-multierror/multierror.go
+++ b/vendor/github.com/hashicorp/go-multierror/multierror.go
@@ -40,14 +40,17 @@ func (e *Error) GoString() string {
return fmt.Sprintf("*%#v", *e)
}
-// WrappedErrors returns the list of errors that this Error is wrapping.
-// It is an implementation of the errwrap.Wrapper interface so that
-// multierror.Error can be used with that library.
+// WrappedErrors returns the list of errors that this Error is wrapping. It is
+// an implementation of the errwrap.Wrapper interface so that multierror.Error
+// can be used with that library.
//
-// This method is not safe to be called concurrently and is no different
-// than accessing the Errors field directly. It is implemented only to
-// satisfy the errwrap.Wrapper interface.
+// This method is not safe to be called concurrently. Unlike accessing the
+// Errors field directly, this function also checks if the multierror is nil to
+// prevent a null-pointer panic. It satisfies the errwrap.Wrapper interface.
func (e *Error) WrappedErrors() []error {
+ if e == nil {
+ return nil
+ }
return e.Errors
}
diff --git a/vendor/github.com/imdario/mergo/CONTRIBUTING.md b/vendor/github.com/imdario/mergo/CONTRIBUTING.md
new file mode 100644
index 000000000..0a1ff9f94
--- /dev/null
+++ b/vendor/github.com/imdario/mergo/CONTRIBUTING.md
@@ -0,0 +1,112 @@
+
+# Contributing to mergo
+
+First off, thanks for taking the time to contribute! ❤️
+
+All types of contributions are encouraged and valued. See the [Table of Contents](#table-of-contents) for different ways to help and details about how this project handles them. Please make sure to read the relevant section before making your contribution. It will make it a lot easier for us maintainers and smooth out the experience for all involved. The community looks forward to your contributions. 🎉
+
+> And if you like the project, but just don't have time to contribute, that's fine. There are other easy ways to support the project and show your appreciation, which we would also be very happy about:
+> - Star the project
+> - Tweet about it
+> - Refer this project in your project's readme
+> - Mention the project at local meetups and tell your friends/colleagues
+
+
+## Table of Contents
+
+- [Code of Conduct](#code-of-conduct)
+- [I Have a Question](#i-have-a-question)
+- [I Want To Contribute](#i-want-to-contribute)
+- [Reporting Bugs](#reporting-bugs)
+- [Suggesting Enhancements](#suggesting-enhancements)
+
+## Code of Conduct
+
+This project and everyone participating in it is governed by the
+[mergo Code of Conduct](https://github.com/imdario/mergoblob/master/CODE_OF_CONDUCT.md).
+By participating, you are expected to uphold this code. Please report unacceptable behavior
+to <>.
+
+
+## I Have a Question
+
+> If you want to ask a question, we assume that you have read the available [Documentation](https://pkg.go.dev/github.com/imdario/mergo).
+
+Before you ask a question, it is best to search for existing [Issues](https://github.com/imdario/mergo/issues) that might help you. In case you have found a suitable issue and still need clarification, you can write your question in this issue. It is also advisable to search the internet for answers first.
+
+If you then still feel the need to ask a question and need clarification, we recommend the following:
+
+- Open an [Issue](https://github.com/imdario/mergo/issues/new).
+- Provide as much context as you can about what you're running into.
+- Provide project and platform versions (nodejs, npm, etc), depending on what seems relevant.
+
+We will then take care of the issue as soon as possible.
+
+## I Want To Contribute
+
+> ### Legal Notice
+> When contributing to this project, you must agree that you have authored 100% of the content, that you have the necessary rights to the content and that the content you contribute may be provided under the project license.
+
+### Reporting Bugs
+
+
+#### Before Submitting a Bug Report
+
+A good bug report shouldn't leave others needing to chase you up for more information. Therefore, we ask you to investigate carefully, collect information and describe the issue in detail in your report. Please complete the following steps in advance to help us fix any potential bug as fast as possible.
+
+- Make sure that you are using the latest version.
+- Determine if your bug is really a bug and not an error on your side e.g. using incompatible environment components/versions (Make sure that you have read the [documentation](). If you are looking for support, you might want to check [this section](#i-have-a-question)).
+- To see if other users have experienced (and potentially already solved) the same issue you are having, check if there is not already a bug report existing for your bug or error in the [bug tracker](https://github.com/imdario/mergoissues?q=label%3Abug).
+- Also make sure to search the internet (including Stack Overflow) to see if users outside of the GitHub community have discussed the issue.
+- Collect information about the bug:
+- Stack trace (Traceback)
+- OS, Platform and Version (Windows, Linux, macOS, x86, ARM)
+- Version of the interpreter, compiler, SDK, runtime environment, package manager, depending on what seems relevant.
+- Possibly your input and the output
+- Can you reliably reproduce the issue? And can you also reproduce it with older versions?
+
+
+#### How Do I Submit a Good Bug Report?
+
+> You must never report security related issues, vulnerabilities or bugs including sensitive information to the issue tracker, or elsewhere in public. Instead sensitive bugs must be sent by email to .
+
+
+We use GitHub issues to track bugs and errors. If you run into an issue with the project:
+
+- Open an [Issue](https://github.com/imdario/mergo/issues/new). (Since we can't be sure at this point whether it is a bug or not, we ask you not to talk about a bug yet and not to label the issue.)
+- Explain the behavior you would expect and the actual behavior.
+- Please provide as much context as possible and describe the *reproduction steps* that someone else can follow to recreate the issue on their own. This usually includes your code. For good bug reports you should isolate the problem and create a reduced test case.
+- Provide the information you collected in the previous section.
+
+Once it's filed:
+
+- The project team will label the issue accordingly.
+- A team member will try to reproduce the issue with your provided steps. If there are no reproduction steps or no obvious way to reproduce the issue, the team will ask you for those steps and mark the issue as `needs-repro`. Bugs with the `needs-repro` tag will not be addressed until they are reproduced.
+- If the team is able to reproduce the issue, it will be marked `needs-fix`, as well as possibly other tags (such as `critical`), and the issue will be left to be implemented by someone.
+
+### Suggesting Enhancements
+
+This section guides you through submitting an enhancement suggestion for mergo, **including completely new features and minor improvements to existing functionality**. Following these guidelines will help maintainers and the community to understand your suggestion and find related suggestions.
+
+
+#### Before Submitting an Enhancement
+
+- Make sure that you are using the latest version.
+- Read the [documentation]() carefully and find out if the functionality is already covered, maybe by an individual configuration.
+- Perform a [search](https://github.com/imdario/mergo/issues) to see if the enhancement has already been suggested. If it has, add a comment to the existing issue instead of opening a new one.
+- Find out whether your idea fits with the scope and aims of the project. It's up to you to make a strong case to convince the project's developers of the merits of this feature. Keep in mind that we want features that will be useful to the majority of our users and not just a small subset. If you're just targeting a minority of users, consider writing an add-on/plugin library.
+
+
+#### How Do I Submit a Good Enhancement Suggestion?
+
+Enhancement suggestions are tracked as [GitHub issues](https://github.com/imdario/mergo/issues).
+
+- Use a **clear and descriptive title** for the issue to identify the suggestion.
+- Provide a **step-by-step description of the suggested enhancement** in as many details as possible.
+- **Describe the current behavior** and **explain which behavior you expected to see instead** and why. At this point you can also tell which alternatives do not work for you.
+- You may want to **include screenshots and animated GIFs** which help you demonstrate the steps or point out the part which the suggestion is related to. You can use [this tool](https://www.cockos.com/licecap/) to record GIFs on macOS and Windows, and [this tool](https://github.com/colinkeenan/silentcast) or [this tool](https://github.com/GNOME/byzanz) on Linux.
+- **Explain why this enhancement would be useful** to most mergo users. You may also want to point out the other projects that solved it better and which could serve as inspiration.
+
+
+## Attribution
+This guide is based on the **contributing-gen**. [Make your own](https://github.com/bttger/contributing-gen)!
diff --git a/vendor/github.com/imdario/mergo/README.md b/vendor/github.com/imdario/mergo/README.md
index aa8cbd7ce..ffbbb62c7 100644
--- a/vendor/github.com/imdario/mergo/README.md
+++ b/vendor/github.com/imdario/mergo/README.md
@@ -1,18 +1,20 @@
# Mergo
-
-[![GoDoc][3]][4]
[![GitHub release][5]][6]
[![GoCard][7]][8]
-[![Build Status][1]][2]
-[![Coverage Status][9]][10]
+[![Test status][1]][2]
+[![OpenSSF Scorecard][21]][22]
+[![OpenSSF Best Practices][19]][20]
+[![Coverage status][9]][10]
[![Sourcegraph][11]][12]
-[![FOSSA Status][13]][14]
+[![FOSSA status][13]][14]
-[![GoCenter Kudos][15]][16]
+[![GoDoc][3]][4]
+[![Become my sponsor][15]][16]
+[![Tidelift][17]][18]
-[1]: https://travis-ci.org/imdario/mergo.png
-[2]: https://travis-ci.org/imdario/mergo
+[1]: https://github.com/imdario/mergo/workflows/tests/badge.svg?branch=master
+[2]: https://github.com/imdario/mergo/actions/workflows/tests.yml
[3]: https://godoc.org/github.com/imdario/mergo?status.svg
[4]: https://godoc.org/github.com/imdario/mergo
[5]: https://img.shields.io/github/release/imdario/mergo.svg
@@ -25,8 +27,14 @@
[12]: https://sourcegraph.com/github.com/imdario/mergo?badge
[13]: https://app.fossa.io/api/projects/git%2Bgithub.com%2Fimdario%2Fmergo.svg?type=shield
[14]: https://app.fossa.io/projects/git%2Bgithub.com%2Fimdario%2Fmergo?ref=badge_shield
-[15]: https://search.gocenter.io/api/ui/badge/github.com%2Fimdario%2Fmergo
-[16]: https://search.gocenter.io/github.com/imdario/mergo
+[15]: https://img.shields.io/github/sponsors/imdario
+[16]: https://github.com/sponsors/imdario
+[17]: https://tidelift.com/badges/package/go/github.com%2Fimdario%2Fmergo
+[18]: https://tidelift.com/subscription/pkg/go-github.com-imdario-mergo
+[19]: https://bestpractices.coreinfrastructure.org/projects/7177/badge
+[20]: https://bestpractices.coreinfrastructure.org/projects/7177
+[21]: https://api.securityscorecards.dev/projects/github.com/imdario/mergo/badge
+[22]: https://api.securityscorecards.dev/projects/github.com/imdario/mergo
A helper to merge structs and maps in Golang. Useful for configuration default values, avoiding messy if-statements.
@@ -36,11 +44,11 @@ Also a lovely [comune](http://en.wikipedia.org/wiki/Mergo) (municipality) in the
## Status
-It is ready for production use. [It is used in several projects by Docker, Google, The Linux Foundation, VMWare, Shopify, etc](https://github.com/imdario/mergo#mergo-in-the-wild).
+It is ready for production use. [It is used in several projects by Docker, Google, The Linux Foundation, VMWare, Shopify, Microsoft, etc](https://github.com/imdario/mergo#mergo-in-the-wild).
### Important note
-Please keep in mind that a problematic PR broke [0.3.9](//github.com/imdario/mergo/releases/tag/0.3.9). I reverted it in [0.3.10](//github.com/imdario/mergo/releases/tag/0.3.10), and I consider it stable but not bug-free. Also, this version adds suppot for go modules.
+Please keep in mind that a problematic PR broke [0.3.9](//github.com/imdario/mergo/releases/tag/0.3.9). I reverted it in [0.3.10](//github.com/imdario/mergo/releases/tag/0.3.10), and I consider it stable but not bug-free. Also, this version adds support for go modules.
Keep in mind that in [0.3.2](//github.com/imdario/mergo/releases/tag/0.3.2), Mergo changed `Merge()`and `Map()` signatures to support [transformers](#transformers). I added an optional/variadic argument so that it won't break the existing code.
@@ -51,9 +59,8 @@ If you were using Mergo before April 6th, 2015, please check your project works
If Mergo is useful to you, consider buying me a coffee, a beer, or making a monthly donation to allow me to keep building great free software. :heart_eyes:
-[![Beerpay](https://beerpay.io/imdario/mergo/badge.svg)](https://beerpay.io/imdario/mergo)
-[![Beerpay](https://beerpay.io/imdario/mergo/make-wish.svg)](https://beerpay.io/imdario/mergo)
+
### Mergo in the wild
@@ -98,6 +105,8 @@ If Mergo is useful to you, consider buying me a coffee, a beer, or making a mont
- [jnuthong/item_search](https://github.com/jnuthong/item_search)
- [bukalapak/snowboard](https://github.com/bukalapak/snowboard)
- [containerssh/containerssh](https://github.com/containerssh/containerssh)
+- [goreleaser/goreleaser](https://github.com/goreleaser/goreleaser)
+- [tjpnz/structbot](https://github.com/tjpnz/structbot)
## Install
@@ -168,7 +177,7 @@ func main() {
Note: if test are failing due missing package, please execute:
- go get gopkg.in/yaml.v2
+ go get gopkg.in/yaml.v3
### Transformers
@@ -218,7 +227,6 @@ func main() {
}
```
-
## Contact me
If I can help you, you have an idea or you are using Mergo in your projects, don't hesitate to drop me a line (or a pull request): [@im_dario](https://twitter.com/im_dario)
@@ -227,21 +235,8 @@ If I can help you, you have an idea or you are using Mergo in your projects, don
Written by [Dario Castañé](http://dario.im).
-## Top Contributors
-
-[![0](https://sourcerer.io/fame/imdario/imdario/mergo/images/0)](https://sourcerer.io/fame/imdario/imdario/mergo/links/0)
-[![1](https://sourcerer.io/fame/imdario/imdario/mergo/images/1)](https://sourcerer.io/fame/imdario/imdario/mergo/links/1)
-[![2](https://sourcerer.io/fame/imdario/imdario/mergo/images/2)](https://sourcerer.io/fame/imdario/imdario/mergo/links/2)
-[![3](https://sourcerer.io/fame/imdario/imdario/mergo/images/3)](https://sourcerer.io/fame/imdario/imdario/mergo/links/3)
-[![4](https://sourcerer.io/fame/imdario/imdario/mergo/images/4)](https://sourcerer.io/fame/imdario/imdario/mergo/links/4)
-[![5](https://sourcerer.io/fame/imdario/imdario/mergo/images/5)](https://sourcerer.io/fame/imdario/imdario/mergo/links/5)
-[![6](https://sourcerer.io/fame/imdario/imdario/mergo/images/6)](https://sourcerer.io/fame/imdario/imdario/mergo/links/6)
-[![7](https://sourcerer.io/fame/imdario/imdario/mergo/images/7)](https://sourcerer.io/fame/imdario/imdario/mergo/links/7)
-
-
## License
[BSD 3-Clause](http://opensource.org/licenses/BSD-3-Clause) license, as [Go language](http://golang.org/LICENSE).
-
[![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2Fimdario%2Fmergo.svg?type=large)](https://app.fossa.io/projects/git%2Bgithub.com%2Fimdario%2Fmergo?ref=badge_large)
diff --git a/vendor/github.com/imdario/mergo/SECURITY.md b/vendor/github.com/imdario/mergo/SECURITY.md
new file mode 100644
index 000000000..a5de61f77
--- /dev/null
+++ b/vendor/github.com/imdario/mergo/SECURITY.md
@@ -0,0 +1,14 @@
+# Security Policy
+
+## Supported Versions
+
+| Version | Supported |
+| ------- | ------------------ |
+| 0.3.x | :white_check_mark: |
+| < 0.3 | :x: |
+
+## Security contact information
+
+To report a security vulnerability, please use the
+[Tidelift security contact](https://tidelift.com/security).
+Tidelift will coordinate the fix and disclosure.
diff --git a/vendor/github.com/imdario/mergo/map.go b/vendor/github.com/imdario/mergo/map.go
index a13a7ee46..b50d5c2a4 100644
--- a/vendor/github.com/imdario/mergo/map.go
+++ b/vendor/github.com/imdario/mergo/map.go
@@ -44,7 +44,7 @@ func deepMap(dst, src reflect.Value, visited map[uintptr]*visit, depth int, conf
}
}
// Remember, remember...
- visited[h] = &visit{addr, typ, seen}
+ visited[h] = &visit{typ, seen, addr}
}
zeroValue := reflect.Value{}
switch dst.Kind() {
@@ -58,7 +58,7 @@ func deepMap(dst, src reflect.Value, visited map[uintptr]*visit, depth int, conf
}
fieldName := field.Name
fieldName = changeInitialCase(fieldName, unicode.ToLower)
- if v, ok := dstMap[fieldName]; !ok || (isEmptyValue(reflect.ValueOf(v)) || overwrite) {
+ if v, ok := dstMap[fieldName]; !ok || (isEmptyValue(reflect.ValueOf(v), !config.ShouldNotDereference) || overwrite) {
dstMap[fieldName] = src.Field(i).Interface()
}
}
@@ -142,7 +142,7 @@ func MapWithOverwrite(dst, src interface{}, opts ...func(*Config)) error {
func _map(dst, src interface{}, opts ...func(*Config)) error {
if dst != nil && reflect.ValueOf(dst).Kind() != reflect.Ptr {
- return ErrNonPointerAgument
+ return ErrNonPointerArgument
}
var (
vDst, vSrc reflect.Value
diff --git a/vendor/github.com/imdario/mergo/merge.go b/vendor/github.com/imdario/mergo/merge.go
index 8c2a8fcd9..0ef9b2138 100644
--- a/vendor/github.com/imdario/mergo/merge.go
+++ b/vendor/github.com/imdario/mergo/merge.go
@@ -38,10 +38,11 @@ func isExportedComponent(field *reflect.StructField) bool {
}
type Config struct {
+ Transformers Transformers
Overwrite bool
+ ShouldNotDereference bool
AppendSlice bool
TypeCheck bool
- Transformers Transformers
overwriteWithEmptyValue bool
overwriteSliceWithEmptyValue bool
sliceDeepCopy bool
@@ -76,10 +77,10 @@ func deepMerge(dst, src reflect.Value, visited map[uintptr]*visit, depth int, co
}
}
// Remember, remember...
- visited[h] = &visit{addr, typ, seen}
+ visited[h] = &visit{typ, seen, addr}
}
- if config.Transformers != nil && !isEmptyValue(dst) {
+ if config.Transformers != nil && !isReflectNil(dst) && dst.IsValid() {
if fn := config.Transformers.Transformer(dst.Type()); fn != nil {
err = fn(dst, src)
return
@@ -95,7 +96,7 @@ func deepMerge(dst, src reflect.Value, visited map[uintptr]*visit, depth int, co
}
}
} else {
- if dst.CanSet() && (isReflectNil(dst) || overwrite) && (!isEmptyValue(src) || overwriteWithEmptySrc) {
+ if dst.CanSet() && (isReflectNil(dst) || overwrite) && (!isEmptyValue(src, !config.ShouldNotDereference) || overwriteWithEmptySrc) {
dst.Set(src)
}
}
@@ -110,7 +111,7 @@ func deepMerge(dst, src reflect.Value, visited map[uintptr]*visit, depth int, co
}
if src.Kind() != reflect.Map {
- if overwrite {
+ if overwrite && dst.CanSet() {
dst.Set(src)
}
return
@@ -162,7 +163,7 @@ func deepMerge(dst, src reflect.Value, visited map[uintptr]*visit, depth int, co
dstSlice = reflect.ValueOf(dstElement.Interface())
}
- if (!isEmptyValue(src) || overwriteWithEmptySrc || overwriteSliceWithEmptySrc) && (overwrite || isEmptyValue(dst)) && !config.AppendSlice && !sliceDeepCopy {
+ if (!isEmptyValue(src, !config.ShouldNotDereference) || overwriteWithEmptySrc || overwriteSliceWithEmptySrc) && (overwrite || isEmptyValue(dst, !config.ShouldNotDereference)) && !config.AppendSlice && !sliceDeepCopy {
if typeCheck && srcSlice.Type() != dstSlice.Type() {
return fmt.Errorf("cannot override two slices with different type (%s, %s)", srcSlice.Type(), dstSlice.Type())
}
@@ -194,22 +195,38 @@ func deepMerge(dst, src reflect.Value, visited map[uintptr]*visit, depth int, co
dst.SetMapIndex(key, dstSlice)
}
}
- if dstElement.IsValid() && !isEmptyValue(dstElement) && (reflect.TypeOf(srcElement.Interface()).Kind() == reflect.Map || reflect.TypeOf(srcElement.Interface()).Kind() == reflect.Slice) {
- continue
+
+ if dstElement.IsValid() && !isEmptyValue(dstElement, !config.ShouldNotDereference) {
+ if reflect.TypeOf(srcElement.Interface()).Kind() == reflect.Slice {
+ continue
+ }
+ if reflect.TypeOf(srcElement.Interface()).Kind() == reflect.Map && reflect.TypeOf(dstElement.Interface()).Kind() == reflect.Map {
+ continue
+ }
}
- if srcElement.IsValid() && ((srcElement.Kind() != reflect.Ptr && overwrite) || !dstElement.IsValid() || isEmptyValue(dstElement)) {
+ if srcElement.IsValid() && ((srcElement.Kind() != reflect.Ptr && overwrite) || !dstElement.IsValid() || isEmptyValue(dstElement, !config.ShouldNotDereference)) {
if dst.IsNil() {
dst.Set(reflect.MakeMap(dst.Type()))
}
dst.SetMapIndex(key, srcElement)
}
}
+
+ // Ensure that all keys in dst are deleted if they are not in src.
+ if overwriteWithEmptySrc {
+ for _, key := range dst.MapKeys() {
+ srcElement := src.MapIndex(key)
+ if !srcElement.IsValid() {
+ dst.SetMapIndex(key, reflect.Value{})
+ }
+ }
+ }
case reflect.Slice:
if !dst.CanSet() {
break
}
- if (!isEmptyValue(src) || overwriteWithEmptySrc || overwriteSliceWithEmptySrc) && (overwrite || isEmptyValue(dst)) && !config.AppendSlice && !sliceDeepCopy {
+ if (!isEmptyValue(src, !config.ShouldNotDereference) || overwriteWithEmptySrc || overwriteSliceWithEmptySrc) && (overwrite || isEmptyValue(dst, !config.ShouldNotDereference)) && !config.AppendSlice && !sliceDeepCopy {
dst.Set(src)
} else if config.AppendSlice {
if src.Type() != dst.Type() {
@@ -244,12 +261,18 @@ func deepMerge(dst, src reflect.Value, visited map[uintptr]*visit, depth int, co
if src.Kind() != reflect.Interface {
if dst.IsNil() || (src.Kind() != reflect.Ptr && overwrite) {
- if dst.CanSet() && (overwrite || isEmptyValue(dst)) {
+ if dst.CanSet() && (overwrite || isEmptyValue(dst, !config.ShouldNotDereference)) {
dst.Set(src)
}
} else if src.Kind() == reflect.Ptr {
- if err = deepMerge(dst.Elem(), src.Elem(), visited, depth+1, config); err != nil {
- return
+ if !config.ShouldNotDereference {
+ if err = deepMerge(dst.Elem(), src.Elem(), visited, depth+1, config); err != nil {
+ return
+ }
+ } else {
+ if overwriteWithEmptySrc || (overwrite && !src.IsNil()) || dst.IsNil() {
+ dst.Set(src)
+ }
}
} else if dst.Elem().Type() == src.Type() {
if err = deepMerge(dst.Elem(), src, visited, depth+1, config); err != nil {
@@ -262,7 +285,7 @@ func deepMerge(dst, src reflect.Value, visited map[uintptr]*visit, depth int, co
}
if dst.IsNil() || overwrite {
- if dst.CanSet() && (overwrite || isEmptyValue(dst)) {
+ if dst.CanSet() && (overwrite || isEmptyValue(dst, !config.ShouldNotDereference)) {
dst.Set(src)
}
break
@@ -275,7 +298,7 @@ func deepMerge(dst, src reflect.Value, visited map[uintptr]*visit, depth int, co
break
}
default:
- mustSet := (isEmptyValue(dst) || overwrite) && (!isEmptyValue(src) || overwriteWithEmptySrc)
+ mustSet := (isEmptyValue(dst, !config.ShouldNotDereference) || overwrite) && (!isEmptyValue(src, !config.ShouldNotDereference) || overwriteWithEmptySrc)
if mustSet {
if dst.CanSet() {
dst.Set(src)
@@ -326,6 +349,12 @@ func WithOverrideEmptySlice(config *Config) {
config.overwriteSliceWithEmptyValue = true
}
+// WithoutDereference prevents dereferencing pointers when evaluating whether they are empty
+// (i.e. a non-nil pointer is never considered empty).
+func WithoutDereference(config *Config) {
+ config.ShouldNotDereference = true
+}
+
// WithAppendSlice will make merge append slices instead of overwriting it.
func WithAppendSlice(config *Config) {
config.AppendSlice = true
@@ -344,7 +373,7 @@ func WithSliceDeepCopy(config *Config) {
func merge(dst, src interface{}, opts ...func(*Config)) error {
if dst != nil && reflect.ValueOf(dst).Kind() != reflect.Ptr {
- return ErrNonPointerAgument
+ return ErrNonPointerArgument
}
var (
vDst, vSrc reflect.Value
diff --git a/vendor/github.com/imdario/mergo/mergo.go b/vendor/github.com/imdario/mergo/mergo.go
index 3cc926c7f..0a721e2d8 100644
--- a/vendor/github.com/imdario/mergo/mergo.go
+++ b/vendor/github.com/imdario/mergo/mergo.go
@@ -17,10 +17,10 @@ import (
var (
ErrNilArguments = errors.New("src and dst must not be nil")
ErrDifferentArgumentsTypes = errors.New("src and dst must be of same type")
- ErrNotSupported = errors.New("only structs and maps are supported")
+ ErrNotSupported = errors.New("only structs, maps, and slices are supported")
ErrExpectedMapAsDestination = errors.New("dst was expected to be a map")
ErrExpectedStructAsDestination = errors.New("dst was expected to be a struct")
- ErrNonPointerAgument = errors.New("dst must be a pointer")
+ ErrNonPointerArgument = errors.New("dst must be a pointer")
)
// During deepMerge, must keep track of checks that are
@@ -28,13 +28,13 @@ var (
// checks in progress are true when it reencounters them.
// Visited are stored in a map indexed by 17 * a1 + a2;
type visit struct {
- ptr uintptr
typ reflect.Type
next *visit
+ ptr uintptr
}
// From src/pkg/encoding/json/encode.go.
-func isEmptyValue(v reflect.Value) bool {
+func isEmptyValue(v reflect.Value, shouldDereference bool) bool {
switch v.Kind() {
case reflect.Array, reflect.Map, reflect.Slice, reflect.String:
return v.Len() == 0
@@ -50,7 +50,10 @@ func isEmptyValue(v reflect.Value) bool {
if v.IsNil() {
return true
}
- return isEmptyValue(v.Elem())
+ if shouldDereference {
+ return isEmptyValue(v.Elem(), shouldDereference)
+ }
+ return false
case reflect.Func:
return v.IsNil()
case reflect.Invalid:
@@ -65,7 +68,7 @@ func resolveValues(dst, src interface{}) (vDst, vSrc reflect.Value, err error) {
return
}
vDst = reflect.ValueOf(dst).Elem()
- if vDst.Kind() != reflect.Struct && vDst.Kind() != reflect.Map {
+ if vDst.Kind() != reflect.Struct && vDst.Kind() != reflect.Map && vDst.Kind() != reflect.Slice {
err = ErrNotSupported
return
}
diff --git a/vendor/github.com/itchyny/gojq/.dockerignore b/vendor/github.com/itchyny/gojq/.dockerignore
index c8e02dc8f..54095155c 100644
--- a/vendor/github.com/itchyny/gojq/.dockerignore
+++ b/vendor/github.com/itchyny/gojq/.dockerignore
@@ -1,9 +1,16 @@
/gojq
/goxz
/CREDITS
-/._*
/y.output
*.exe
*.test
*.out
-/.github/
+*.md
+*.y
+**/*.jq
+**/*.json
+**/*.yaml
+**/*_test.go
+.github
+_gojq
+_tools
diff --git a/vendor/github.com/itchyny/gojq/.gitattributes b/vendor/github.com/itchyny/gojq/.gitattributes
index 9c2075be6..797f39593 100644
--- a/vendor/github.com/itchyny/gojq/.gitattributes
+++ b/vendor/github.com/itchyny/gojq/.gitattributes
@@ -1,2 +1,3 @@
**/testdata/** binary
-/builtin.go eol=lf
+/builtin.go eol=lf linguist-generated=true
+/parser.go eol=lf linguist-generated=true
diff --git a/vendor/github.com/itchyny/gojq/.gitignore b/vendor/github.com/itchyny/gojq/.gitignore
index e350f9308..bbeb991f8 100644
--- a/vendor/github.com/itchyny/gojq/.gitignore
+++ b/vendor/github.com/itchyny/gojq/.gitignore
@@ -1,7 +1,6 @@
/gojq
/goxz
/CREDITS
-/._*
/y.output
*.exe
*.test
diff --git a/vendor/github.com/itchyny/gojq/CHANGELOG.md b/vendor/github.com/itchyny/gojq/CHANGELOG.md
index 2d13b6d45..9ae257a2e 100644
--- a/vendor/github.com/itchyny/gojq/CHANGELOG.md
+++ b/vendor/github.com/itchyny/gojq/CHANGELOG.md
@@ -1,4 +1,152 @@
# Changelog
+## [v0.12.16](https://github.com/itchyny/gojq/compare/v0.12.15..v0.12.16) (2024-06-01)
+* fix offset of query parsing error on multi-byte characters
+* fix tests of `exp10` and `atan2` failing on some platforms
+* fix `debug/1` to be available only when `debug/0` is defined
+* improve parser to allow binary operators as object values
+* improve compiler to emit error if query is missing
+
+## [v0.12.15](https://github.com/itchyny/gojq/compare/v0.12.14..v0.12.15) (2024-04-01)
+* implement `ltrim`, `rtrim`, and `trim` functions
+* implement `gojq.ParseError` for getting the offset and token of query parsing error
+* implement `gojq.HaltError` for detecting halt errors and stopping outer iteration
+* fix object construction with duplicate keys (`{x:0,y:1} | {a:.x,a:.y}`)
+* fix `halt` and `halt_error` functions to stop the command execution immediately
+* fix variable scope of binding syntax (`"a" as $v | def f: $v; "b" as $v | f`)
+* fix pre-defined variables to be available in initial modules (`$ARGS` in `~/.jq`)
+* fix `ltrimstr` and `rtrimstr` functions to emit error on non-string input
+* fix `nearbyint` and `rint` functions to round ties to even
+* improve parser to allow `reduce`, `foreach`, `if`, `try`-`catch` syntax as object values
+* remove `pow10` in favor of `exp10`, define `scalbn` and `scalbln` by `ldexp`
+
+## [v0.12.14](https://github.com/itchyny/gojq/compare/v0.12.13..v0.12.14) (2023-12-01)
+* implement `abs`, `pick`, and `debug/1` functions
+* implement `--raw-output0` option, and remove `--nul-output` (`-0`) option
+* fix string multiplication by zero to emit an empty string
+* fix zero divided by zero to emit an error, not `nan`
+* fix modulo operator to emit `nan` if either side is `nan`
+* fix `implode` function to emit replacement characters on invalid code points
+* fix `stderr` function to output strings in raw format
+* fix `error` function to throw an error even for `null`
+* fix `walk` function on multiple outputs arguments
+* fix `--from-file` option to work with `--args` and `--jsonargs` options
+* fix the default module search path `../lib` relative to the executable
+* improve query parser to support comment continuation with backslash
+* improve `modulemeta` function to include defined function names in the module
+* improve search path of `import` and `include` directives to support `$ORIGIN` expansion
+* remove deprecated `leaf_paths` function
+
+## [v0.12.13](https://github.com/itchyny/gojq/compare/v0.12.12..v0.12.13) (2023-06-01)
+* implement `@urid` format string to decode URI values
+* fix functions returning arrays not to emit nil slices (`flatten`, `group_by`,
+ `unique`, `unique_by`, `nth`, `indices`, `path`, and `modulemeta.deps`)
+
+## [v0.12.12](https://github.com/itchyny/gojq/compare/v0.12.11..v0.12.12) (2023-03-01)
+* fix assignment operator (`=`) with overlapping paths and multiple values (`[[]] | .. = ..`)
+* fix crash on multiplying large numbers to an empty string (`9223372036854775807 * ""`)
+* improve zsh completion file
+
+## [v0.12.11](https://github.com/itchyny/gojq/compare/v0.12.10..v0.12.11) (2022-12-24)
+* fix crash on assignment operator (`=`) with multiple values (`. = (0,0)`)
+* fix `isnormal` and `normals` functions against subnormal numbers
+
+## [v0.12.10](https://github.com/itchyny/gojq/compare/v0.12.9..v0.12.10) (2022-12-01)
+* fix `break` in `try`-`catch` query (`label $x | try break $x catch .`)
+* fix path value validation for `getpath` function (`path(getpath([[0]][0]))`)
+* fix path value validation for custom iterator functions
+* fix `walk` function with argument emitting multiple values (`[1],{x:1} | walk(.,0)`)
+* fix `@csv`, `@tsv`, `@sh` to escape the null character (`["\u0000"] | @csv,@tsv,@sh`)
+* improve performance of assignment operator (`=`), update-assignment operator (`|=`),
+ `map_values`, `del`, `delpaths`, `walk`, `ascii_downcase`, and `ascii_upcase` functions
+
+## [v0.12.9](https://github.com/itchyny/gojq/compare/v0.12.8..v0.12.9) (2022-09-01)
+* fix `fromjson` to emit error on unexpected trailing string
+* fix path analyzer on variable argument evaluation (`def f($x): .y; path(f(.x))`)
+* fix raw input option `--raw-input` (`-R`) to keep carriage returns and support 64KiB+ lines
+
+## [v0.12.8](https://github.com/itchyny/gojq/compare/v0.12.7..v0.12.8) (2022-06-01)
+* implement `gojq.Compare` for comparing values in custom internal functions
+* implement `gojq.TypeOf` for obtaining type name of values in custom internal functions
+* implement `gojq.Preview` for previewing values for error messages of custom internal functions
+* fix query lexer to parse string literals as JSON to support surrogate pairs (`"\ud83d\ude04"`)
+* fix priority bug of declared and builtin functions (`def empty: .; null | select(.)`)
+* fix string indexing by index out of bounds to emit `null` (`"abc" | .[3]`)
+* fix array binding pattern not to match against strings (`"abc" as [$a] ?// $a | $a`)
+* fix `sub` and `gsub` functions to emit results in the same order of jq
+* fix `fromjson` to keep integer precision (`"10000000000000000" | fromjson + 1`)
+* fix stream option to raise error against incomplete JSON input
+* improve array updating index and string repetition to increase limitations
+* improve `mktime` to support nanoseconds, just like `gmtime` and `now`
+* improve query lexer to report unterminated string literals
+* improve performance of string indexing and slicing by reducing allocations
+* improve performance of object and array indexing, slicing, and iteration,
+ by validating path values by comparing data addresses. This change improves jq
+ compatibility of path value validation (`{} | {}.x = 0`, `[0] | [.[]][] = 1`).
+ Also optimize constant indexing and slicing by specialized instruction
+* improve performance of `add` (on array of strings), `flatten`, `min`, `max`,
+ `sort`, `unique`, `join`, `to_entries`, `from_entries`, `indices`, `index`,
+ `rindex`, `startswith`, `endswith`, `ltrimstr`, `rtrimstr`, `explode`,
+ `capture`, `sub`, and `gsub` functions
+
+## [v0.12.7](https://github.com/itchyny/gojq/compare/v0.12.6..v0.12.7) (2022-03-01)
+* fix precedence of try expression against operators (`try 0 * error(0)`)
+* fix iterator suffix with optional operator (`0 | .x[]?`)
+* fix stream option with slurp option or `input`, `inputs` functions
+* fix the command flag parser to support equal sign in short options with argument
+* fix string conversion of query including empty strings in module and import metadata
+* improve performance of `isempty` function
+
+## [v0.12.6](https://github.com/itchyny/gojq/compare/v0.12.5..v0.12.6) (2021-12-01)
+* implement options for consuming remaining arguments (`--args`, `--jsonargs`, `$ARGS.positional`)
+* fix `delpaths` function with overlapped paths
+* fix `--exit-status` flag with `halt`, `halt_error` functions
+* fix `input_filename` function with null input option
+* fix path value validation for `nan`
+* fix crash on branch optimization (`if 0 then . else 0|0 end`)
+* add validation on regular expression flags to reject unsupported ones
+* improve performance of `range`, `join`, `flatten` functions
+* improve constant value optimization for object with quoted keys
+* remove dependency on forked `go-flags` package
+
+## [v0.12.5](https://github.com/itchyny/gojq/compare/v0.12.4..v0.12.5) (2021-09-01)
+* implement `input_filename` function for the command
+* fix priority bug of declared functions and arguments (`def g: 1; def f(g): g; f(2)`)
+* fix label handling to catch the correct break error (`first((0, 0) | first(0))`)
+* fix `null|error` and `error(null)` to behave like `empty` (`null | [0, error, error(null), 1]`)
+* fix integer division to keep precision when divisible (`1 / 1 * 1000000000000000000000`)
+* fix modulo operator on negative number and large number (`(-1) % 10000000000`)
+* fix combination of slurp (`--slurp`) and raw input option (`--raw-input`) to keep newlines
+* change the default module paths to `~/.jq`, `$ORIGIN/../lib/gojq`, `$ORIGIN/lib`
+ where `$ORIGIN` is the directory where the executable is located in
+* improve command argument parser to recognize query with leading hyphen,
+ allow hyphen for standard input, and force posix style on Windows
+* improve `@base64d` to allow input without padding characters
+* improve `fromdate`, `fromdateiso8601` to parse date time strings with timezone offset
+* improve `halt_error` to print error values without prefix
+* improve `sub`, `gsub` to allow the replacement string emitting multiple values
+* improve encoding `\b` and `\f` in strings
+* improve module loader for search path in query, and absolute path
+* improve query lexer to support string literal including newlines
+* improve performance of `index`, `rindex`, `indices`, `transpose`, and `walk` functions
+* improve performance of value preview in errors and debug mode
+* improve runtime performance including tail call optimization
+* switch Docker base image to `distroless/static:debug`
+
+## [v0.12.4](https://github.com/itchyny/gojq/compare/v0.12.3..v0.12.4) (2021-06-01)
+* fix numeric conversion of large floating-point numbers in modulo operator
+* implement a compiler option for adding custom iterator functions
+* implement `gojq.NewIter` function for creating a new iterator from values
+* implement `$ARGS.named` for listing command line variables
+* remove `debug` and `stderr` functions from the library
+* stop printing newlines on `stderr` function for jq compatibility
+
+## [v0.12.3](https://github.com/itchyny/gojq/compare/v0.12.2..v0.12.3) (2021-04-01)
+* fix array slicing with infinities and large numbers (`[0][-infinite:infinite], [0][:1e20]`)
+* fix multiplying strings and modulo by infinities on MIPS 64 architecture
+* fix git revision information in Docker images
+* release multi-platform Docker images for ARM 64
+* switch to `distroless` image for Docker base image
+
## [v0.12.2](https://github.com/itchyny/gojq/compare/v0.12.1..v0.12.2) (2021-03-01)
* implement `GOJQ_COLORS` environment variable to configure individual colors
* respect `--color-output` (`-C`) option even if `NO_COLOR` is set
@@ -128,7 +276,7 @@
## [v0.7.0](https://github.com/itchyny/gojq/compare/v0.6.0..v0.7.0) (2019-12-22)
* implement YAML input (`--yaml-input`) and output (`--yaml-output`)
* fix pipe in object value
-* fix precedence of if, try, reduce and foreach expressions
+* fix precedence of `if`, `try`, `reduce` and `foreach` expressions
* release from GitHub Actions
## [v0.6.0](https://github.com/itchyny/gojq/compare/v0.5.0..v0.6.0) (2019-08-26)
diff --git a/vendor/github.com/itchyny/gojq/Dockerfile b/vendor/github.com/itchyny/gojq/Dockerfile
index 882514391..d5e0dce63 100644
--- a/vendor/github.com/itchyny/gojq/Dockerfile
+++ b/vendor/github.com/itchyny/gojq/Dockerfile
@@ -1,12 +1,14 @@
-FROM golang:1.16 as builder
+FROM golang:1.22 AS builder
WORKDIR /app
+COPY go.* ./
+RUN go mod download
COPY . .
ENV CGO_ENABLED 0
RUN make build
-FROM alpine:3.13
+FROM gcr.io/distroless/static:debug
-COPY --from=builder /app/gojq /usr/local/bin/
-ENTRYPOINT ["/usr/local/bin/gojq"]
+COPY --from=builder /app/gojq /
+ENTRYPOINT ["/gojq"]
CMD ["--help"]
diff --git a/vendor/github.com/itchyny/gojq/LICENSE b/vendor/github.com/itchyny/gojq/LICENSE
index 759e398cc..fe5900407 100644
--- a/vendor/github.com/itchyny/gojq/LICENSE
+++ b/vendor/github.com/itchyny/gojq/LICENSE
@@ -1,6 +1,6 @@
The MIT License (MIT)
-Copyright (c) 2019-2021 itchyny
+Copyright (c) 2019-2024 itchyny
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
diff --git a/vendor/github.com/itchyny/gojq/Makefile b/vendor/github.com/itchyny/gojq/Makefile
index a1ec11efb..b7cdb4001 100644
--- a/vendor/github.com/itchyny/gojq/Makefile
+++ b/vendor/github.com/itchyny/gojq/Makefile
@@ -1,11 +1,10 @@
BIN := gojq
VERSION := $$(make -s show-version)
VERSION_PATH := cli
-CURRENT_REVISION := $(shell git rev-parse --short HEAD)
-BUILD_LDFLAGS := "-s -w -X github.com/itchyny/$(BIN)/cli.revision=$(CURRENT_REVISION)"
+CURRENT_REVISION = $(shell git rev-parse --short HEAD)
+BUILD_LDFLAGS = "-s -w -X github.com/itchyny/$(BIN)/cli.revision=$(CURRENT_REVISION)"
GOBIN ?= $(shell go env GOPATH)/bin
SHELL := /bin/bash
-export GO111MODULE=on
.PHONY: all
all: build
@@ -20,73 +19,63 @@ build-dev: parser.go builtin.go
.PHONY: build-debug
build-debug: parser.go builtin.go
- go build -tags debug -ldflags=$(BUILD_LDFLAGS) -o $(BIN) ./cmd/$(BIN)
+ go build -tags gojq_debug -ldflags=$(BUILD_LDFLAGS) -o $(BIN) ./cmd/$(BIN)
builtin.go: builtin.jq parser.go.y parser.go query.go operator.go _tools/*
GOOS= GOARCH= go generate
.SUFFIXES:
-parser.go: parser.go.y lexer.go $(GOBIN)/goyacc
+parser.go: parser.go.y $(GOBIN)/goyacc
goyacc -o $@ $<
$(GOBIN)/goyacc:
- @cd && go get golang.org/x/tools/cmd/goyacc
+ @go install golang.org/x/tools/cmd/goyacc@latest
.PHONY: install
install:
- go install -ldflags=$(BUILD_LDFLAGS) ./...
+ go install -ldflags=$(BUILD_LDFLAGS) ./cmd/$(BIN)
.PHONY: install-dev
install-dev: parser.go builtin.go
- go install -ldflags=$(BUILD_LDFLAGS) ./...
+ go install -ldflags=$(BUILD_LDFLAGS) ./cmd/$(BIN)
.PHONY: install-debug
install-debug: parser.go builtin.go
- go install -tags debug -ldflags=$(BUILD_LDFLAGS) ./...
+ go install -tags gojq_debug -ldflags=$(BUILD_LDFLAGS) ./cmd/$(BIN)
.PHONY: show-version
show-version: $(GOBIN)/gobump
- @gobump show -r $(VERSION_PATH)
+ @gobump show -r "$(VERSION_PATH)"
$(GOBIN)/gobump:
- @cd && go get github.com/x-motemen/gobump/cmd/gobump
+ @go install github.com/x-motemen/gobump/cmd/gobump@latest
.PHONY: cross
cross: $(GOBIN)/goxz CREDITS
- build() { \
- goxz -n $(BIN) -pv=v$(VERSION) -os=$$1 -arch=$$2 \
- -include _$(BIN) -build-ldflags=$(BUILD_LDFLAGS) ./cmd/$(BIN); \
- }; \
- build linux,darwin,windows amd64 && build linux,darwin arm64
+ goxz -n $(BIN) -pv=v$(VERSION) -include _$(BIN) \
+ -build-ldflags=$(BUILD_LDFLAGS) ./cmd/$(BIN)
$(GOBIN)/goxz:
- cd && go get github.com/Songmu/goxz/cmd/goxz
+ go install github.com/Songmu/goxz/cmd/goxz@latest
CREDITS: $(GOBIN)/gocredits go.sum
go mod tidy
gocredits -w .
$(GOBIN)/gocredits:
- cd && go get github.com/Songmu/gocredits/cmd/gocredits
+ go install github.com/Songmu/gocredits/cmd/gocredits@latest
.PHONY: test
test: build
go test -v -race ./...
.PHONY: lint
-lint: $(GOBIN)/golint
+lint: $(GOBIN)/staticcheck
go vet ./...
- golint -set_exit_status ./...
+ staticcheck -checks all -tags gojq_debug ./...
-$(GOBIN)/golint:
- cd && go get golang.org/x/lint/golint
-
-.PHONY: maligned
-maligned: $(GOBIN)/maligned
- ! maligned . 2>&1 | grep -v pointer | grep ^
-
-$(GOBIN)/maligned:
- cd && go get github.com/mdempsky/maligned
+$(GOBIN)/staticcheck:
+ go install honnef.co/go/tools/cmd/staticcheck@latest
.PHONY: check-tools
check-tools:
@@ -98,29 +87,17 @@ clean:
go clean
.PHONY: update
+update: export GOPROXY=direct
update:
- export GOPROXY=direct
- rm -f go.sum && go get -u -d ./... && go get github.com/mattn/go-runewidth@v0.0.9 && go mod tidy
- sed -i.bak '/require (/,/)/d' go.dev.mod && rm -f go.dev.{sum,mod.bak}
+ go get -u -d ./... && go mod tidy
+ go mod edit -modfile=go.dev.mod -droprequire=github.com/itchyny/{astgen,timefmt}-go
go get -u -d -modfile=go.dev.mod github.com/itchyny/{astgen,timefmt}-go && go generate
.PHONY: bump
bump: $(GOBIN)/gobump
-ifneq ($(shell git status --porcelain),)
- $(error git workspace is dirty)
-endif
-ifneq ($(shell git rev-parse --abbrev-ref HEAD),main)
- $(error current branch is not main)
-endif
+ test -z "$$(git status --porcelain || echo .)"
+ test "$$(git branch --show-current)" = "main"
@gobump up -w "$(VERSION_PATH)"
git commit -am "bump up version to $(VERSION)"
git tag "v$(VERSION)"
- git push origin main
- git push origin "refs/tags/v$(VERSION)"
-
-.PHONY: upload
-upload: $(GOBIN)/ghr
- ghr "v$(VERSION)" goxz
-
-$(GOBIN)/ghr:
- cd && go get github.com/tcnksm/ghr
+ git push --atomic origin main tag "v$(VERSION)"
diff --git a/vendor/github.com/itchyny/gojq/README.md b/vendor/github.com/itchyny/gojq/README.md
index 292446a7c..7b34f93c3 100644
--- a/vendor/github.com/itchyny/gojq/README.md
+++ b/vendor/github.com/itchyny/gojq/README.md
@@ -1,11 +1,11 @@
# gojq
-[![CI Status](https://github.com/itchyny/gojq/workflows/CI/badge.svg)](https://github.com/itchyny/gojq/actions)
+[![CI Status](https://github.com/itchyny/gojq/actions/workflows/ci.yaml/badge.svg?branch=main)](https://github.com/itchyny/gojq/actions?query=branch:main)
[![Go Report Card](https://goreportcard.com/badge/github.com/itchyny/gojq)](https://goreportcard.com/report/github.com/itchyny/gojq)
-[![MIT License](http://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/itchyny/gojq/blob/main/LICENSE)
+[![MIT License](https://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/itchyny/gojq/blob/main/LICENSE)
[![release](https://img.shields.io/github/release/itchyny/gojq/all.svg)](https://github.com/itchyny/gojq/releases)
[![pkg.go.dev](https://pkg.go.dev/badge/github.com/itchyny/gojq)](https://pkg.go.dev/github.com/itchyny/gojq)
-### Pure Go implementation of [jq](https://github.com/stedolan/jq)
+### Pure Go implementation of [jq](https://github.com/jqlang/jq)
This is an implementation of jq command written in Go language.
You can also embed gojq as a library to your Go products.
@@ -56,23 +56,30 @@ gojq: invalid json:
brew install gojq
```
+### Zero Install
+```sh
+0install add gojq https://apps.0install.net/utils/gojq.xml
+```
+
### Build from source
```sh
-go get github.com/itchyny/gojq/cmd/gojq
+go install github.com/itchyny/gojq/cmd/gojq@latest
```
### Docker
```sh
docker run -i --rm itchyny/gojq
+docker run -i --rm ghcr.io/itchyny/gojq
```
## Difference to jq
- gojq is purely implemented with Go language and is completely portable. jq depends on the C standard library so the availability of math functions depends on the library. jq also depends on the regular expression library and it makes build scripts complex.
- gojq implements nice error messages for invalid query and JSON input. The error message of jq is sometimes difficult to tell where to fix the query.
-- gojq does not keep the order of object keys. I understand this might cause problems for some scripts but basically, we should not rely on the order of object keys. Due to this limitation, gojq does not have `keys_unsorted` function and `--sort-keys` (`-S`) option. I would implement when ordered map is implemented in the standard library of Go but I'm less motivated. Also, gojq assumes only valid JSON input while jq deals with some JSON extensions; `NaN` and `Infinity`.
-- gojq supports arbitrary-precision integer calculation while jq does not. This is important to keep the precision of numeric IDs or nanosecond values. You can also use gojq to solve some mathematical problems which require big integers. Note that mathematical functions convert integers to floating-point numbers; only addition, subtraction, multiplication, modulo operation, and division (when divisible) keep integer precisions. When you want to calculate floor division of big integers, use `def intdiv($x; $y): ($x - $x % $y) / $y;`, instead of `$x / $y`.
-- gojq fixes some bugs of jq. gojq correctly deletes elements of arrays by `|= empty` ([jq#2051](https://github.com/stedolan/jq/issues/2051)). gojq fixes update assignments including `try` or `//` operator ([jq#1885](https://github.com/stedolan/jq/issues/1885), [jq#2140](https://github.com/stedolan/jq/issues/2140)). gojq consistently counts by characters (not by bytes) in `index`, `rindex`, and `indices` functions; `"12345" | .[index("3"):]` results in `"345"` ([jq#1430](https://github.com/stedolan/jq/issues/1430), [jq#1624](https://github.com/stedolan/jq/issues/1624)). gojq can deal with `%f` in `strftime` and `strptime` ([jq#1409](https://github.com/stedolan/jq/issues/1409)).
-- gojq supports reading from YAML input while jq does not. gojq also supports YAML output.
+- gojq does not keep the order of object keys. I understand this might cause problems for some scripts but basically, we should not rely on the order of object keys. Due to this limitation, gojq does not have `keys_unsorted` function and `--sort-keys` (`-S`) option. I would implement when ordered map is implemented in the standard library of Go but I'm less motivated.
+- gojq supports arbitrary-precision integer calculation while jq does not; jq loses the precision of large integers when calculation is involved. Note that even with gojq, all mathematical functions, including `floor` and `round`, convert integers to floating-point numbers; only addition, subtraction, multiplication, modulo, and division operators (when divisible) keep the integer precision. To calculate floor division of integers without losing the precision, use `def idivide($n): (. - . % $n) / $n;`. To round down floating-point numbers to integers, use `def ifloor: floor | tostring | tonumber;`, but note that this function does not work with large floating-point numbers and also loses the precision of large integers.
+- gojq behaves differently than jq in some features, hoping that jq will fix the behaviors in the future. gojq consistently counts by characters (not by bytes) in `index`, `rindex`, and `indices` functions; `"12345" | .[index("3"):]` results in `"345"` ([jq#1430](https://github.com/jqlang/jq/issues/1430), [jq#1624](https://github.com/jqlang/jq/issues/1624)). gojq supports string indexing; `"abcde"[2]` ([jq#1520](https://github.com/jqlang/jq/issues/1520)). gojq fixes handling files with no newline characters at the end ([jq#2374](https://github.com/jqlang/jq/issues/2374)). gojq consistently truncates down floating-point number indices both in indexing (`[0] | .[0.5]` results in `0`), and slicing (`[0,1,2] | .[0.5:1.5]` results in `[0]`). gojq parses unary operators with higher precedence than variable binding (`[-1 as $x | 1,$x]` results in `[1,-1]` not `[-1,-1]`) ([jq#3053](https://github.com/jqlang/jq/pull/3053)). gojq fixes `@base64d` to allow binary string as the decoded string ([jq#1931](https://github.com/jqlang/jq/issues/1931)). gojq improves time formatting and parsing; deals with `%f` in `strftime` and `strptime` ([jq#1409](https://github.com/jqlang/jq/issues/1409)), parses timezone offsets with `fromdate` and `fromdateiso8601` ([jq#1053](https://github.com/jqlang/jq/issues/1053)), supports timezone name/offset with `%Z`/`%z` in `strptime` ([jq#929](https://github.com/jqlang/jq/issues/929), [jq#2195](https://github.com/jqlang/jq/issues/2195)), and looks up correct timezone during daylight saving time on formatting with `%Z` ([jq#1912](https://github.com/jqlang/jq/issues/1912)). gojq supports nanoseconds in date and time functions.
+- gojq does not support some functions intentionally; `get_jq_origin`, `get_prog_origin`, `get_search_list` (unstable, not listed in jq document), `input_line_number`, `$__loc__` (performance issue). gojq does not support some flags; `--ascii-output, -a` (performance issue), `--seq` (not used commonly), `--sort-keys, -S` (sorts by default because `map[string]any` does not keep the order), `--unbuffered` (unbuffered by default). gojq does not parse JSON extensions supported by jq; `NaN`, `Infinity`, and `[000]`. gojq normalizes floating-point numbers to fit to double-precision floating-point numbers. gojq does not support some regular expression metacharacters, backreferences, look-around assertions, and some flags (regular expression engine differences). gojq does not support BOM (`encoding/json` does not support this). gojq disallows using keywords for function names (`def true: .; true` is a confusing query), and module name prefixes in function declarations (using module prefixes like `def m::f: .;` is undocumented).
+- gojq supports reading from YAML input (`--yaml-input`) while jq does not. gojq also supports YAML output (`--yaml-output`). gojq supports `@urid` format string ([jq#798](https://github.com/jqlang/jq/issues/798), [jq#2261](https://github.com/jqlang/jq/issues/2261)).
### Color configuration
The gojq command automatically disables coloring output when the output is not a tty.
@@ -101,7 +108,7 @@ func main() {
if err != nil {
log.Fatalln(err)
}
- input := map[string]interface{}{"foo": []interface{}{1, 2, 3}}
+ input := map[string]any{"foo": []any{1, 2, 3}}
iter := query.Run(input) // or query.RunWithContext
for {
v, ok := iter.Next()
@@ -109,6 +116,9 @@ func main() {
break
}
if err, ok := v.(error); ok {
+ if err, ok := err.(*gojq.HaltError); ok && err.Value() == nil {
+ break
+ }
log.Fatalln(err)
}
fmt.Printf("%#v\n", v)
@@ -117,25 +127,34 @@ func main() {
```
- Firstly, use [`gojq.Parse(string) (*Query, error)`](https://pkg.go.dev/github.com/itchyny/gojq#Parse) to get the query from a string.
+ - Use [`gojq.ParseError`](https://pkg.go.dev/github.com/itchyny/gojq#ParseError) to get the error position and token of the parsing error.
- Secondly, get the result iterator
- using [`query.Run`](https://pkg.go.dev/github.com/itchyny/gojq#Query.Run) or [`query.RunWithContext`](https://pkg.go.dev/github.com/itchyny/gojq#Query.RunWithContext)
- - or alternatively, compile the query using [`gojq.Compile`](https://pkg.go.dev/github.com/itchyny/gojq#Compile) and then [`code.Run`](https://pkg.go.dev/github.com/itchyny/gojq#Code.Run) or [`code.RunWithContext`](https://pkg.go.dev/github.com/itchyny/gojq#Code.RunWithContext). You can reuse the `*Code` against multiple inputs to avoid compilation of the same query.
- - In either case, you cannot use custom type values as the query input. The type should be `[]interface{}` for an array and `map[string]interface{}` for a map (just like decoded to an `interface{}` using the [encoding/json](https://golang.org/pkg/encoding/json/) package). You can't use `[]int` or `map[string]string`, for example. If you want to query your custom struct, marshal to JSON, unmarshal to `interface{}` and use it as the query input.
-- Thirdly, iterate through the results using [`iter.Next() (interface{}, bool)`](https://pkg.go.dev/github.com/itchyny/gojq#Iter). The iterator can emit an error so make sure to handle it. Termination is notified by the second returned value of `Next()`. The reason why the return type is not `(interface{}, error)` is that the iterator can emit multiple errors and you can continue after an error.
+ - or alternatively, compile the query using [`gojq.Compile`](https://pkg.go.dev/github.com/itchyny/gojq#Compile) and then [`code.Run`](https://pkg.go.dev/github.com/itchyny/gojq#Code.Run) or [`code.RunWithContext`](https://pkg.go.dev/github.com/itchyny/gojq#Code.RunWithContext). You can reuse the `*Code` against multiple inputs to avoid compilation of the same query. But for arguments of `code.Run`, do not give values sharing same data between multiple calls.
+ - In either case, you cannot use custom type values as the query input. The type should be `[]any` for an array and `map[string]any` for a map (just like decoded to an `any` using the [encoding/json](https://golang.org/pkg/encoding/json/) package). You can't use `[]int` or `map[string]string`, for example. If you want to query your custom struct, marshal to JSON, unmarshal to `any` and use it as the query input.
+- Thirdly, iterate through the results using [`iter.Next() (any, bool)`](https://pkg.go.dev/github.com/itchyny/gojq#Iter). The iterator can emit an error so make sure to handle it. The method returns `true` with results, and `false` when the iterator terminates.
+ - The return type is not `(any, error)` because the iterator may emit multiple errors. The `jq` and `gojq` commands stop the iteration on the first error, but the library user can choose to stop the iteration on errors, or to continue until it terminates.
+ - In any case, it is recommended to stop the iteration on [`gojq.HaltError`](https://pkg.go.dev/github.com/itchyny/gojq#HaltError), which is emitted by `halt` and `halt_error` functions, although these functions are rarely used.
+ The error implements [`gojq.ValueError`](https://pkg.go.dev/github.com/itchyny/gojq#ValueError), and if the error value is `nil`, stop the iteration without handling the error.
+ Technically speaking, we can fix the iterator to terminate on the halting error, but it does not terminate at the moment.
+ The `halt` function in jq not only stops the iteration, but also terminates the command execution, even if there are still input values.
+ So, gojq leaves it up to the library user how to handle the halting error.
+ - Note that the result iterator may emit infinite number of values; `repeat(0)` and `range(infinite)`. It may stuck with no output value; `def f: f; f`. Use `RunWithContext` when you want to limit the execution time.
[`gojq.Compile`](https://pkg.go.dev/github.com/itchyny/gojq#Compile) allows to configure the following compiler options.
- [`gojq.WithModuleLoader`](https://pkg.go.dev/github.com/itchyny/gojq#WithModuleLoader) allows to load modules. By default, the module feature is disabled. If you want to load modules from the file system, use [`gojq.NewModuleLoader`](https://pkg.go.dev/github.com/itchyny/gojq#NewModuleLoader).
- [`gojq.WithEnvironLoader`](https://pkg.go.dev/github.com/itchyny/gojq#WithEnvironLoader) allows to configure the environment variables referenced by `env` and `$ENV`. By default, OS environment variables are not accessible due to security reasons. You can use `gojq.WithEnvironLoader(os.Environ)` if you want.
- [`gojq.WithVariables`](https://pkg.go.dev/github.com/itchyny/gojq#WithVariables) allows to configure the variables which can be used in the query. Pass the values of the variables to [`code.Run`](https://pkg.go.dev/github.com/itchyny/gojq#Code.Run) in the same order.
-- [`gojq.WithFunction`](https://pkg.go.dev/github.com/itchyny/gojq#WithFunction) allows to add a custom internal function. An internal function can return a single value (which can be an error) each invocation. To add a jq function (which may include a comma operator to emit multiple values, `empty` function, accept a filter for its argument, or call another built-in function), prepend the function to each query on parsing.
+- [`gojq.WithFunction`](https://pkg.go.dev/github.com/itchyny/gojq#WithFunction) allows to add a custom internal function. An internal function can return a single value (which can be an error) each invocation. To add a jq function (which may include a comma operator to emit multiple values, `empty` function, accept a filter for its argument, or call another built-in function), use `LoadInitModules` of the module loader.
+- [`gojq.WithIterFunction`](https://pkg.go.dev/github.com/itchyny/gojq#WithIterFunction) allows to add a custom iterator function. An iterator function returns an iterator to emit multiple values. You cannot define both iterator and non-iterator functions of the same name (with possibly different arities). You can use [`gojq.NewIter`](https://pkg.go.dev/github.com/itchyny/gojq#NewIter) to convert values or an error to a [`gojq.Iter`](https://pkg.go.dev/github.com/itchyny/gojq#Iter).
- [`gojq.WithInputIter`](https://pkg.go.dev/github.com/itchyny/gojq#WithInputIter) allows to use `input` and `inputs` functions. By default, these functions are disabled.
## Bug Tracker
Report bug at [Issues・itchyny/gojq - GitHub](https://github.com/itchyny/gojq/issues).
## Author
-itchyny (https://github.com/itchyny)
+itchyny ()
## License
This software is released under the MIT License, see LICENSE.
diff --git a/vendor/github.com/itchyny/gojq/_gojq b/vendor/github.com/itchyny/gojq/_gojq
index 92d64cfc7..01e4c4f77 100644
--- a/vendor/github.com/itchyny/gojq/_gojq
+++ b/vendor/github.com/itchyny/gojq/_gojq
@@ -2,29 +2,42 @@
_gojq()
{
- _arguments -C \
- '(-c --compact-output)'{-c,--compact-output}'[compact output]' \
- '(-r --raw-output)'{-r,--raw-output}'[output raw strings]' \
- '(-j --join-output)'{-j,--join-output}'[stop printing a newline after each output]' \
- '(-0 --nul-output)'{-0,--nul-output}'[print NUL after each output]' \
- '(-C --color-output)'{-C,--color-output}'[colorize output even if piped]' \
- '(-M --monochrome-output)'{-M,--monochrome-output}'[stop colorizing output]' \
- '(--yaml-output)'--yaml-output'[output by YAML]' \
- '(--indent)'--indent'[number of spaces for indentation]:indentation count' \
- '(--tab)'--tab'[use tabs for indentation]' \
+ _arguments -s -S \
+ '(-r --raw-output --raw-output0 -j --join-output)'{-r,--raw-output}'[output raw strings]' \
+ '(-r --raw-output -j --join-output)--raw-output0[implies -r with NUL character delimiter]' \
+ '(-r --raw-output --raw-output0 -j --join-output)'{-j,--join-output}'[implies -r with no newline delimiter]' \
+ '(-c --compact-output --indent --tab --yaml-output)'{-c,--compact-output}'[output without pretty-printing]' \
+ '(-c --compact-output --tab --yaml-output)--indent=[number of spaces for indentation]:indentation count:(2 4 8)' \
+ '(-c --compact-output --indent --yaml-output)--tab[use tabs for indentation]' \
+ '(-c --compact-output --indent --tab )--yaml-output[output in YAML format]' \
+ '(-C --color-output -M --monochrome-output)'{-C,--color-output}'[output with colors even if piped]' \
+ '(-C --color-output -M --monochrome-output)'{-M,--monochrome-output}'[output without colors]' \
'(-n --null-input)'{-n,--null-input}'[use null as input value]' \
- '(-R --raw-input)'{-R,--raw-input}'[read input as raw strings]' \
+ '(-R --raw-input --stream --yaml-input)'{-R,--raw-input}'[read input as raw strings]' \
+ '(-R --raw-input --yaml-input)--stream[parse input in stream fashion]' \
+ '(-R --raw-input --stream )--yaml-input[read input as YAML format]' \
'(-s --slurp)'{-s,--slurp}'[read all inputs into an array]' \
- '(--stream)'--stream'[parse input in stream fashion]' \
- '(--yaml-input)'--yaml-input'[read input as YAML]' \
- '(-f --from-file)'{-f,--from-file}'[load query from file]:filename of jq query:_files' \
- '(-L)'-L'[directory to search modules from]:module directory:_directories' \
- '(--arg)'--arg'[set variable to string value]:variable name:' \
- '(--argjson)'--argjson'[set variable to JSON value]:variable name:' \
- '(--slurpfile)'--slurpfile'[set variable to the JSON contents of the file]:variable name:' \
- '(--rawfile)'--rawfile'[set variable to the contents of the file]:variable name:' \
+ '(-f --from-file 1)'{-f,--from-file}'[load query from file]:filename of jq query:_files' \
+ '*-L=[directory to search modules from]:module directory:_directories' \
+ '*--arg[set a string value to a variable]:variable name: :string value' \
+ '*--argjson[set a JSON value to a variable]:variable name: :JSON value' \
+ '*--slurpfile[set the JSON contents of a file to a variable]:variable name: :JSON file:_files' \
+ '*--rawfile[set the contents of a file to a variable]:variable name: :file:_files' \
+ '*--args[consume remaining arguments as positional string values]' \
+ '*--jsonargs[consume remaining arguments as positional JSON values]' \
'(-e --exit-status)'{-e,--exit-status}'[exit 1 when the last value is false or null]' \
- '(-v --version)'{-v,--version}'[print version]' \
- '(-h --help)'{-h,--help}'[print help]' \
- && ret=0
+ '(- 1 *)'{-v,--version}'[display version information]' \
+ '(- 1 *)'{-h,--help}'[display help information]' \
+ '1: :_guard "^-([[:alpha:]0]#|-*)" "jq query"' \
+ '*: :_gojq_args'
+}
+
+_gojq_args() {
+ if (($words[(I)--args] > $words[(I)--jsonargs])); then
+ _message 'string value'
+ elif (($words[(I)--args] < $words[(I)--jsonargs])); then
+ _message 'JSON value'
+ else
+ _arguments '*:input file:_files'
+ fi
}
diff --git a/vendor/github.com/itchyny/gojq/builtin.go b/vendor/github.com/itchyny/gojq/builtin.go
index bf82d464f..89b03dc7a 100644
--- a/vendor/github.com/itchyny/gojq/builtin.go
+++ b/vendor/github.com/itchyny/gojq/builtin.go
@@ -4,83 +4,65 @@ package gojq
func init() {
builtinFuncDefs = map[string][]*FuncDef{
- "IN": []*FuncDef{&FuncDef{Name: "IN", Args: []string{"s"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "any", Args: []*Query{&Query{Left: &Query{Func: "s"}, Op: OpEq, Right: &Query{Func: "."}}, &Query{Func: "."}}}}}}, &FuncDef{Name: "IN", Args: []string{"src", "s"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "any", Args: []*Query{&Query{Left: &Query{Func: "src"}, Op: OpEq, Right: &Query{Func: "s"}}, &Query{Func: "."}}}}}}},
- "INDEX": []*FuncDef{&FuncDef{Name: "INDEX", Args: []string{"stream", "idx_expr"}, Body: &Query{Term: &Term{Type: TermTypeReduce, Reduce: &Reduce{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "stream"}}, Pattern: &Pattern{Name: "$row"}, Start: &Query{Term: &Term{Type: TermTypeObject, Object: &Object{}}}, Update: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Left: &Query{Func: "$row"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "idx_expr"}, Op: OpPipe, Right: &Query{Func: "tostring"}}}}}}, Op: OpAssign, Right: &Query{Func: "$row"}}}}}}, &FuncDef{Name: "INDEX", Args: []string{"idx_expr"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "INDEX", Args: []*Query{&Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}, &Query{Func: "idx_expr"}}}}}}},
- "JOIN": []*FuncDef{&FuncDef{Name: "JOIN", Args: []string{"$idx", "idx_expr"}, Body: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Func: "."}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$idx"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Func: "idx_expr"}}}}}}}}}}}}}}}, &FuncDef{Name: "JOIN", Args: []string{"$idx", "stream", "idx_expr"}, Body: &Query{Left: &Query{Func: "stream"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Func: "."}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$idx"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Func: "idx_expr"}}}}}}}}}}}}, &FuncDef{Name: "JOIN", Args: []string{"$idx", "stream", "idx_expr", "join_expr"}, Body: &Query{Left: &Query{Func: "stream"}, Op: OpPipe, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Func: "."}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$idx"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Func: "idx_expr"}}}}}}}}}}, Op: OpPipe, Right: &Query{Func: "join_expr"}}}}},
- "all": []*FuncDef{&FuncDef{Name: "all", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "all", Args: []*Query{&Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}, &Query{Func: "."}}}}}}, &FuncDef{Name: "all", Args: []string{"y"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "all", Args: []*Query{&Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}, &Query{Func: "y"}}}}}}, &FuncDef{Name: "all", Args: []string{"g", "y"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "isempty", Args: []*Query{&Query{Left: &Query{Func: "g"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "y"}, Op: OpAnd, Right: &Query{Func: "empty"}}}}}}}}},
- "any": []*FuncDef{&FuncDef{Name: "any", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "any", Args: []*Query{&Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}, &Query{Func: "."}}}}}}, &FuncDef{Name: "any", Args: []string{"y"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "any", Args: []*Query{&Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}, &Query{Func: "y"}}}}}}, &FuncDef{Name: "any", Args: []string{"g", "y"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "isempty", Args: []*Query{&Query{Left: &Query{Func: "g"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "y"}, Op: OpOr, Right: &Query{Func: "empty"}}}}}}}, Op: OpPipe, Right: &Query{Func: "not"}}}},
- "arrays": []*FuncDef{&FuncDef{Name: "arrays", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "array"}}}}}}}}}},
- "ascii_downcase": []*FuncDef{&FuncDef{Name: "ascii_downcase", Body: &Query{Left: &Query{Func: "explode"}, Op: OpPipe, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{&Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Left: &Query{Term: &Term{Type: TermTypeNumber, Number: "65"}}, Op: OpLe, Right: &Query{Func: "."}}, Op: OpAnd, Right: &Query{Left: &Query{Func: "."}, Op: OpLe, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "90"}}}}, Then: &Query{Left: &Query{Func: "."}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "32"}}}}}}}}}}, Op: OpPipe, Right: &Query{Func: "implode"}}}}},
- "ascii_upcase": []*FuncDef{&FuncDef{Name: "ascii_upcase", Body: &Query{Left: &Query{Func: "explode"}, Op: OpPipe, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{&Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Left: &Query{Term: &Term{Type: TermTypeNumber, Number: "97"}}, Op: OpLe, Right: &Query{Func: "."}}, Op: OpAnd, Right: &Query{Left: &Query{Func: "."}, Op: OpLe, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "122"}}}}, Then: &Query{Left: &Query{Func: "."}, Op: OpSub, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "32"}}}}}}}}}}, Op: OpPipe, Right: &Query{Func: "implode"}}}}},
- "assign": []*FuncDef{&FuncDef{Name: "_assign", Args: []string{"ps", "$v"}, Body: &Query{Term: &Term{Type: TermTypeReduce, Reduce: &Reduce{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "path", Args: []*Query{&Query{Func: "ps"}}}}, Pattern: &Pattern{Name: "$p"}, Start: &Query{Func: "."}, Update: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "setpath", Args: []*Query{&Query{Func: "$p"}, &Query{Func: "$v"}}}}}}}}}},
- "booleans": []*FuncDef{&FuncDef{Name: "booleans", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "boolean"}}}}}}}}}},
- "capture": []*FuncDef{&FuncDef{Name: "capture", Args: []string{"$re"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "capture", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "null"}}}}}}, &FuncDef{Name: "capture", Args: []string{"$re", "$flags"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "match", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "$flags"}}}}}, Op: OpPipe, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "captures"}}}, Op: OpPipe, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}, Op: OpPipe, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "name"}}}, Op: OpNe, Right: &Query{Func: "null"}}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeObject, Object: &Object{KeyVals: []*ObjectKeyVal{&ObjectKeyVal{KeyQuery: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "name"}}}, Val: &ObjectVal{Queries: []*Query{&Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "string"}}}}}}}}}}}}}}}}, Op: OpPipe, Right: &Query{Left: &Query{Func: "add"}, Op: OpAlt, Right: &Query{Term: &Term{Type: TermTypeObject, Object: &Object{}}}}}}}},
- "combinations": []*FuncDef{&FuncDef{Name: "combinations", Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "length"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, Then: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{}}}, Else: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, SuffixList: []*Suffix{&Suffix{Iter: true}, &Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$x"}}, Body: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}, IsSlice: true}}}, Op: OpPipe, Right: &Query{Func: "combinations"}}, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$y"}}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Func: "$x"}}}}, Op: OpAdd, Right: &Query{Func: "$y"}}}}}}}}}}}}}}}}, &FuncDef{Name: "combinations", Args: []string{"n"}, Body: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$dot"}}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "range", Args: []*Query{&Query{Func: "n"}}}}}, Op: OpPipe, Right: &Query{Func: "$dot"}}}}}, Op: OpPipe, Right: &Query{Func: "combinations"}}}}}}}}},
- "del": []*FuncDef{&FuncDef{Name: "del", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "delpaths", Args: []*Query{&Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "path", Args: []*Query{&Query{Func: "f"}}}}}}}}}}}}}},
- "endswith": []*FuncDef{&FuncDef{Name: "endswith", Args: []string{"$x"}, Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "string"}}}}, Then: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "$x"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "string"}}}}}, Then: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeUnary, Unary: &Unary{Op: OpSub, Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Func: "$x"}, Op: OpPipe, Right: &Query{Func: "length"}}}}}}, IsSlice: true}}}, Op: OpEq, Right: &Query{Func: "$x"}}, Else: &Query{Left: &Query{Func: "$x"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_type_error", Args: []*Query{&Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "endswith"}}}}}}}}}}}, Else: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_type_error", Args: []*Query{&Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "endswith"}}}}}}}}}}}},
- "finites": []*FuncDef{&FuncDef{Name: "finites", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Func: "isfinite"}}}}}}},
- "first": []*FuncDef{&FuncDef{Name: "first", Body: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}}, &FuncDef{Name: "first", Args: []string{"g"}, Body: &Query{Term: &Term{Type: TermTypeLabel, Label: &Label{Ident: "$out", Body: &Query{Left: &Query{Func: "g"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "."}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeBreak, Break: "$out"}}}}}}}}},
- "flatten": []*FuncDef{&FuncDef{Name: "_flatten", Args: []string{"$x"}, Body: &Query{Term: &Term{Type: TermTypeReduce, Reduce: &Reduce{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}, Pattern: &Pattern{Name: "$i"}, Start: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{}}}, Update: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "$i"}, Op: OpPipe, Right: &Query{Left: &Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "array"}}}}, Op: OpAnd, Right: &Query{Left: &Query{Func: "$x"}, Op: OpNe, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}, Then: &Query{Left: &Query{Func: "."}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Func: "$i"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_flatten", Args: []*Query{&Query{Left: &Query{Func: "$x"}, Op: OpSub, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}}}}}}}, Else: &Query{Left: &Query{Func: "."}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Func: "$i"}}}}}}}}}}}}, &FuncDef{Name: "flatten", Args: []string{"$x"}, Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "$x"}, Op: OpLt, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, Then: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "error", Args: []*Query{&Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "flatten depth must not be negative"}}}}}}}, Else: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_flatten", Args: []*Query{&Query{Func: "$x"}}}}}}}}}, &FuncDef{Name: "flatten", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_flatten", Args: []*Query{&Query{Term: &Term{Type: TermTypeUnary, Unary: &Unary{Op: OpSub, Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}}}}}},
- "from_entries": []*FuncDef{&FuncDef{Name: "from_entries", Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{&Query{Term: &Term{Type: TermTypeObject, Object: &Object{KeyVals: []*ObjectKeyVal{&ObjectKeyVal{KeyQuery: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "key"}}}, Op: OpAlt, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "Key"}}}, Op: OpAlt, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "name"}}}, Op: OpAlt, Right: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "Name"}}}}}}, Val: &ObjectVal{Queries: []*Query{&Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "has", Args: []*Query{&Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "value"}}}}}}}, Then: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "value"}}}, Else: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "Value"}}}}}}}}}}}}}}}}}}}, Op: OpPipe, Right: &Query{Left: &Query{Func: "add"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "."}, Op: OpUpdateAlt, Right: &Query{Term: &Term{Type: TermTypeObject, Object: &Object{}}}}}}}},
- "fromdate": []*FuncDef{&FuncDef{Name: "fromdate", Body: &Query{Func: "fromdateiso8601"}}},
- "fromdateiso8601": []*FuncDef{&FuncDef{Name: "fromdateiso8601", Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "strptime", Args: []*Query{&Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "%Y-%m-%dT%H:%M:%SZ"}}}}}}}, Op: OpPipe, Right: &Query{Func: "mktime"}}}},
- "fromstream": []*FuncDef{&FuncDef{Name: "fromstream", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeObject, Object: &Object{KeyVals: []*ObjectKeyVal{&ObjectKeyVal{Key: "x", Val: &ObjectVal{Queries: []*Query{&Query{Func: "null"}}}}, &ObjectKeyVal{Key: "e", Val: &ObjectVal{Queries: []*Query{&Query{Func: "false"}}}}}}, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$init"}}, Body: &Query{Term: &Term{Type: TermTypeForeach, Foreach: &Foreach{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "f"}}, Pattern: &Pattern{Name: "$i"}, Start: &Query{Func: "$init"}, Update: &Query{Left: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "e"}}}, Then: &Query{Func: "$init"}, Else: &Query{Func: "."}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "$i"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "length"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "2"}}}}, Then: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "setpath", Args: []*Query{&Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "e"}}}}}}, &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$i"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}}}, Op: OpPipe, Right: &Query{Left: &Query{Func: "length"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "setpath", Args: []*Query{&Query{Left: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "x"}}}}}}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$i"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}}}}, &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$i"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}}}}}}}}, Else: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "setpath", Args: []*Query{&Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "e"}}}}}}, &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$i"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}}}, Op: OpPipe, Right: &Query{Left: &Query{Func: "length"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}}}}}}}}, Extract: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "e"}}}, Then: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "x"}}}, Else: &Query{Func: "empty"}}}}}}}}}}}}}},
- "group_by": []*FuncDef{&FuncDef{Name: "group_by", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_group_by", Args: []*Query{&Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{&Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Func: "f"}}}}}}}}}}}}}},
- "gsub": []*FuncDef{&FuncDef{Name: "gsub", Args: []string{"$re", "str"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "sub", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "str"}, &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "g"}}}}}}}}, &FuncDef{Name: "gsub", Args: []string{"$re", "str", "$flags"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "sub", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "str"}, &Query{Left: &Query{Func: "$flags"}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "g"}}}}}}}}}},
- "in": []*FuncDef{&FuncDef{Name: "in", Args: []string{"xs"}, Body: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$x"}}, Body: &Query{Left: &Query{Func: "xs"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "has", Args: []*Query{&Query{Func: "$x"}}}}}}}}}}}}},
- "index": []*FuncDef{&FuncDef{Name: "index", Args: []string{"$x"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "indices", Args: []*Query{&Query{Func: "$x"}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}}}},
- "indices": []*FuncDef{&FuncDef{Name: "indices", Args: []string{"$x"}, Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "array"}}}}, Op: OpAnd, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Func: "$x"}, Op: OpPipe, Right: &Query{Func: "type"}}}}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "array"}}}}}, Then: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Func: "$x"}}}}, Elif: []*IfElif{&IfElif{Cond: &Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "array"}}}}, Then: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Func: "$x"}}}}}}}}, &IfElif{Cond: &Query{Left: &Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "string"}}}}, Op: OpAnd, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Func: "$x"}, Op: OpPipe, Right: &Query{Func: "type"}}}}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "string"}}}}}, Then: &Query{Left: &Query{Func: "explode"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Left: &Query{Func: "$x"}, Op: OpPipe, Right: &Query{Func: "explode"}}}}}}}}, Else: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Func: "$x"}}}}}}}}},
- "inputs": []*FuncDef{&FuncDef{Name: "inputs", Body: &Query{Term: &Term{Type: TermTypeTry, Try: &Try{Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "repeat", Args: []*Query{&Query{Func: "input"}}}}}, Catch: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "."}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "break"}}}}, Then: &Query{Func: "empty"}, Else: &Query{Func: "error"}}}}}}}}},
- "inside": []*FuncDef{&FuncDef{Name: "inside", Args: []string{"xs"}, Body: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$x"}}, Body: &Query{Left: &Query{Func: "xs"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "contains", Args: []*Query{&Query{Func: "$x"}}}}}}}}}}}}},
- "isempty": []*FuncDef{&FuncDef{Name: "isempty", Args: []string{"g"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "first", Args: []*Query{&Query{Left: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Func: "g"}, Op: OpPipe, Right: &Query{Func: "false"}}}}, Op: OpComma, Right: &Query{Func: "true"}}}}}}}},
- "iterables": []*FuncDef{&FuncDef{Name: "iterables", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Func: "type"}, Op: OpPipe, Right: &Query{Left: &Query{Left: &Query{Func: "."}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "array"}}}}, Op: OpOr, Right: &Query{Left: &Query{Func: "."}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "object"}}}}}}}}}}}},
- "join": []*FuncDef{&FuncDef{Name: "join", Args: []string{"$x"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeReduce, Reduce: &Reduce{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}, Pattern: &Pattern{Name: "$i"}, Start: &Query{Func: "null"}, Update: &Query{Left: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "."}, Op: OpEq, Right: &Query{Func: "null"}}, Then: &Query{Term: &Term{Type: TermTypeString, Str: &String{}}}, Else: &Query{Left: &Query{Func: "."}, Op: OpAdd, Right: &Query{Func: "$x"}}}}}}}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Func: "$i"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "boolean"}}}}, Op: OpOr, Right: &Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "number"}}}}}, Then: &Query{Func: "tostring"}, Else: &Query{Left: &Query{Func: "."}, Op: OpAlt, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{}}}}}}}}}}}}}}, Op: OpAlt, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{}}}}}},
- "last": []*FuncDef{&FuncDef{Name: "last", Body: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeUnary, Unary: &Unary{Op: OpSub, Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}}}}, &FuncDef{Name: "last", Args: []string{"g"}, Body: &Query{Term: &Term{Type: TermTypeReduce, Reduce: &Reduce{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "g"}}, Pattern: &Pattern{Name: "$item"}, Start: &Query{Func: "null"}, Update: &Query{Func: "$item"}}}}}},
- "leaf_paths": []*FuncDef{&FuncDef{Name: "leaf_paths", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "paths", Args: []*Query{&Query{Func: "scalars"}}}}}}},
- "limit": []*FuncDef{&FuncDef{Name: "limit", Args: []string{"$n", "g"}, Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "$n"}, Op: OpGt, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, Then: &Query{Term: &Term{Type: TermTypeLabel, Label: &Label{Ident: "$out", Body: &Query{Term: &Term{Type: TermTypeForeach, Foreach: &Foreach{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "g"}}, Pattern: &Pattern{Name: "$item"}, Start: &Query{Func: "$n"}, Update: &Query{Left: &Query{Func: "."}, Op: OpSub, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}, Extract: &Query{Left: &Query{Func: "$item"}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "."}, Op: OpLe, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, Then: &Query{Term: &Term{Type: TermTypeBreak, Break: "$out"}}, Else: &Query{Func: "empty"}}}}}}}}}}}, Elif: []*IfElif{&IfElif{Cond: &Query{Left: &Query{Func: "$n"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, Then: &Query{Func: "empty"}}}, Else: &Query{Func: "g"}}}}}},
- "ltrimstr": []*FuncDef{&FuncDef{Name: "ltrimstr", Args: []string{"$x"}, Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Left: &Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "string"}}}}, Op: OpAnd, Right: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Func: "$x"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "string"}}}}}}}}, Op: OpAnd, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "startswith", Args: []*Query{&Query{Func: "$x"}}}}}}, Then: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Left: &Query{Func: "$x"}, Op: OpPipe, Right: &Query{Func: "length"}}, IsSlice: true}}}}}}}},
- "map": []*FuncDef{&FuncDef{Name: "map", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}, Op: OpPipe, Right: &Query{Func: "f"}}}}}}},
- "map_values": []*FuncDef{&FuncDef{Name: "map_values", Args: []string{"f"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}, Op: OpModify, Right: &Query{Func: "f"}}}},
- "match": []*FuncDef{&FuncDef{Name: "match", Args: []string{"$re"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "match", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "null"}}}}}}, &FuncDef{Name: "match", Args: []string{"$re", "$flags"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_match", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "$flags"}, &Query{Func: "false"}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}}}},
- "max": []*FuncDef{&FuncDef{Name: "max", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "max_by", Args: []*Query{&Query{Func: "."}}}}}}},
- "max_by": []*FuncDef{&FuncDef{Name: "max_by", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_max_by", Args: []*Query{&Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{&Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Func: "f"}}}}}}}}}}}}}},
- "min": []*FuncDef{&FuncDef{Name: "min", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "min_by", Args: []*Query{&Query{Func: "."}}}}}}},
- "min_by": []*FuncDef{&FuncDef{Name: "min_by", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_min_by", Args: []*Query{&Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{&Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Func: "f"}}}}}}}}}}}}}},
- "modify": []*FuncDef{&FuncDef{Name: "_modify", Args: []string{"ps", "f"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeReduce, Reduce: &Reduce{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "path", Args: []*Query{&Query{Func: "ps"}}}}, Pattern: &Pattern{Name: "$p"}, Start: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Func: "."}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{}}}}}}}, Update: &Query{Term: &Term{Type: TermTypeLabel, Label: &Label{Ident: "$out", Body: &Query{Left: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}, Op: OpAdd, Right: &Query{Func: "$p"}}, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$q"}}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "setpath", Args: []*Query{&Query{Func: "$q"}, &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "getpath", Args: []*Query{&Query{Func: "$q"}}}}}, Op: OpPipe, Right: &Query{Func: "f"}}}}}}, Op: OpPipe, Right: &Query{Left: &Query{Func: "."}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeBreak, Break: "$out"}}}}}}}}}}}, Op: OpComma, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}, Op: OpUpdateAdd, Right: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Func: "$p"}}}}}}}}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$x"}}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$x"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "delpaths", Args: []*Query{&Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$x"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}}}}}}}}}}}}}}}},
- "normals": []*FuncDef{&FuncDef{Name: "normals", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Func: "isnormal"}}}}}}},
- "not": []*FuncDef{&FuncDef{Name: "not", Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Func: "."}, Then: &Query{Func: "false"}, Else: &Query{Func: "true"}}}}}},
- "nth": []*FuncDef{&FuncDef{Name: "nth", Args: []string{"$n"}, Body: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Func: "$n"}}}}}, &FuncDef{Name: "nth", Args: []string{"$n", "g"}, Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "$n"}, Op: OpLt, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, Then: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "error", Args: []*Query{&Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "nth doesn't support negative indices"}}}}}}}, Else: &Query{Term: &Term{Type: TermTypeLabel, Label: &Label{Ident: "$out", Body: &Query{Term: &Term{Type: TermTypeForeach, Foreach: &Foreach{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "g"}}, Pattern: &Pattern{Name: "$item"}, Start: &Query{Func: "$n"}, Update: &Query{Left: &Query{Func: "."}, Op: OpSub, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}, Extract: &Query{Left: &Query{Left: &Query{Left: &Query{Func: "."}, Op: OpLt, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, Op: OpOr, Right: &Query{Func: "empty"}}, Op: OpPipe, Right: &Query{Left: &Query{Func: "$item"}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeBreak, Break: "$out"}}}}}}}}}}}}}}},
- "nulls": []*FuncDef{&FuncDef{Name: "nulls", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Func: "."}, Op: OpEq, Right: &Query{Func: "null"}}}}}}}},
- "numbers": []*FuncDef{&FuncDef{Name: "numbers", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "number"}}}}}}}}}},
- "objects": []*FuncDef{&FuncDef{Name: "objects", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "object"}}}}}}}}}},
- "paths": []*FuncDef{&FuncDef{Name: "paths", Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "path", Args: []*Query{&Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "recurse", Args: []*Query{&Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Func: "type"}, Op: OpPipe, Right: &Query{Left: &Query{Left: &Query{Func: "."}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "array"}}}}, Op: OpOr, Right: &Query{Left: &Query{Func: "."}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "object"}}}}}}}}, Then: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}, Else: &Query{Func: "empty"}}}}}}}}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Func: "length"}, Op: OpGt, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}}}}}, &FuncDef{Name: "paths", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$x"}}, Body: &Query{Left: &Query{Func: "paths"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$p"}}, Body: &Query{Left: &Query{Func: "$x"}, Op: OpPipe, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "getpath", Args: []*Query{&Query{Func: "$p"}}}}}, Op: OpPipe, Right: &Query{Func: "f"}}}}}}}}}}}}}}}}}}}},
- "range": []*FuncDef{&FuncDef{Name: "range", Args: []string{"$x"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "range", Args: []*Query{&Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}, &Query{Func: "$x"}}}}}}, &FuncDef{Name: "range", Args: []string{"$start", "$end"}, Body: &Query{Left: &Query{Func: "$start"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "while", Args: []*Query{&Query{Left: &Query{Func: "."}, Op: OpLt, Right: &Query{Func: "$end"}}, &Query{Left: &Query{Func: "."}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}}}}}, &FuncDef{Name: "range", Args: []string{"$start", "$end", "$step"}, Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "$step"}, Op: OpGt, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, Then: &Query{Left: &Query{Func: "$start"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "while", Args: []*Query{&Query{Left: &Query{Func: "."}, Op: OpLt, Right: &Query{Func: "$end"}}, &Query{Left: &Query{Func: "."}, Op: OpAdd, Right: &Query{Func: "$step"}}}}}}}, Elif: []*IfElif{&IfElif{Cond: &Query{Left: &Query{Func: "$step"}, Op: OpLt, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, Then: &Query{Left: &Query{Func: "$start"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "while", Args: []*Query{&Query{Left: &Query{Func: "."}, Op: OpGt, Right: &Query{Func: "$end"}}, &Query{Left: &Query{Func: "."}, Op: OpAdd, Right: &Query{Func: "$step"}}}}}}}}}, Else: &Query{Func: "empty"}}}}}},
- "recurse": []*FuncDef{&FuncDef{Name: "recurse", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "recurse", Args: []*Query{&Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}, &Suffix{Optional: true}}}}}}}}}, &FuncDef{Name: "recurse", Args: []string{"f"}, Body: &Query{FuncDefs: []*FuncDef{&FuncDef{Name: "r", Body: &Query{Left: &Query{Func: "."}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Func: "f"}, Op: OpPipe, Right: &Query{Func: "r"}}}}}}}, Func: "r"}}, &FuncDef{Name: "recurse", Args: []string{"f", "cond"}, Body: &Query{FuncDefs: []*FuncDef{&FuncDef{Name: "r", Body: &Query{Left: &Query{Func: "."}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Func: "f"}, Op: OpPipe, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Func: "cond"}}}}}, Op: OpPipe, Right: &Query{Func: "r"}}}}}}}}, Func: "r"}}},
- "repeat": []*FuncDef{&FuncDef{Name: "repeat", Args: []string{"f"}, Body: &Query{FuncDefs: []*FuncDef{&FuncDef{Name: "_repeat", Body: &Query{Left: &Query{Func: "f"}, Op: OpComma, Right: &Query{Func: "_repeat"}}}}, Func: "_repeat"}}},
- "rindex": []*FuncDef{&FuncDef{Name: "rindex", Args: []string{"$x"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "indices", Args: []*Query{&Query{Func: "$x"}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeUnary, Unary: &Unary{Op: OpSub, Term: &Term{Type: TermTypeNumber, Number: "1"}}}}, IsSlice: true}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}}}}}},
- "rtrimstr": []*FuncDef{&FuncDef{Name: "rtrimstr", Args: []string{"$x"}, Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Left: &Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "string"}}}}, Op: OpAnd, Right: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Func: "$x"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "string"}}}}}}}}, Op: OpAnd, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "endswith", Args: []*Query{&Query{Func: "$x"}}}}}}, Then: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{End: &Query{Term: &Term{Type: TermTypeUnary, Unary: &Unary{Op: OpSub, Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Func: "$x"}, Op: OpPipe, Right: &Query{Func: "length"}}}}}}}}}}}}}},
- "scalars": []*FuncDef{&FuncDef{Name: "scalars", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Func: "type"}, Op: OpPipe, Right: &Query{Left: &Query{Left: &Query{Func: "."}, Op: OpNe, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "array"}}}}, Op: OpAnd, Right: &Query{Left: &Query{Func: "."}, Op: OpNe, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "object"}}}}}}}}}}}},
- "scan": []*FuncDef{&FuncDef{Name: "scan", Args: []string{"$re"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "scan", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "null"}}}}}}, &FuncDef{Name: "scan", Args: []string{"$re", "$flags"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "match", Args: []*Query{&Query{Func: "$re"}, &Query{Left: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "g"}}}, Op: OpAdd, Right: &Query{Func: "$flags"}}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "captures"}}}, Op: OpPipe, Right: &Query{Left: &Query{Func: "length"}, Op: OpGt, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}}, Then: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "captures"}}}, Op: OpPipe, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "string"}}}}}}}}, Else: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "string"}}}}}}}}},
- "select": []*FuncDef{&FuncDef{Name: "select", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Func: "f"}, Then: &Query{Func: "."}, Else: &Query{Func: "empty"}}}}}},
- "sort": []*FuncDef{&FuncDef{Name: "sort", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "sort_by", Args: []*Query{&Query{Func: "."}}}}}}},
- "sort_by": []*FuncDef{&FuncDef{Name: "sort_by", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_sort_by", Args: []*Query{&Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{&Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Func: "f"}}}}}}}}}}}}}},
- "splits": []*FuncDef{&FuncDef{Name: "splits", Args: []string{"$re"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "splits", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "null"}}}}}}, &FuncDef{Name: "splits", Args: []string{"$re", "$flags"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "split", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "$flags"}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}}}},
- "startswith": []*FuncDef{&FuncDef{Name: "startswith", Args: []string{"$x"}, Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "string"}}}}, Then: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "$x"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "string"}}}}}, Then: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{End: &Query{Left: &Query{Func: "$x"}, Op: OpPipe, Right: &Query{Func: "length"}}}}}, Op: OpEq, Right: &Query{Func: "$x"}}, Else: &Query{Left: &Query{Func: "$x"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_type_error", Args: []*Query{&Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "startswith"}}}}}}}}}}}, Else: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_type_error", Args: []*Query{&Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "startswith"}}}}}}}}}}}},
- "strings": []*FuncDef{&FuncDef{Name: "strings", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "string"}}}}}}}}}},
- "sub": []*FuncDef{&FuncDef{Name: "sub", Args: []string{"$re", "str"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "sub", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "str"}, &Query{Func: "null"}}}}}}, &FuncDef{Name: "sub", Args: []string{"$re", "str", "$flags"}, Body: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$in"}}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeReduce, Reduce: &Reduce{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "match", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "$flags"}}}}, Pattern: &Pattern{Name: "$r"}, Start: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Term: &Term{Type: TermTypeString, Str: &String{}}}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}}, Update: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$x"}}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Func: "$r"}, Op: OpPipe, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "captures"}}}, Op: OpPipe, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}, Op: OpPipe, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "name"}}}, Op: OpNe, Right: &Query{Func: "null"}}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeObject, Object: &Object{KeyVals: []*ObjectKeyVal{&ObjectKeyVal{KeyQuery: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "name"}}}, Val: &ObjectVal{Queries: []*Query{&Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "string"}}}}}}}}}}}}}}}}}, Op: OpPipe, Right: &Query{Left: &Query{Left: &Query{Func: "add"}, Op: OpAlt, Right: &Query{Term: &Term{Type: TermTypeObject, Object: &Object{}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Left: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$x"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}}}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$in"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$x"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}}}, IsSlice: true, End: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$r"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Name: "offset"}}}}}}}}}}}, Op: OpAdd, Right: &Query{Func: "str"}}, Op: OpComma, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$r"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Name: "offset"}}}}}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$r"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Name: "length"}}}}}}}}}}}}}}}}}}}}, Op: OpPipe, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$in"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}, IsSlice: true}}}}}}}}}}}}}},
- "test": []*FuncDef{&FuncDef{Name: "test", Args: []string{"$re"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "test", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "null"}}}}}}, &FuncDef{Name: "test", Args: []string{"$re", "$flags"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_match", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "$flags"}, &Query{Func: "true"}}}}}}},
- "to_entries": []*FuncDef{&FuncDef{Name: "to_entries", Body: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "keys"}, SuffixList: []*Suffix{&Suffix{Iter: true}, &Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$k"}}, Body: &Query{Term: &Term{Type: TermTypeObject, Object: &Object{KeyVals: []*ObjectKeyVal{&ObjectKeyVal{Key: "key", Val: &ObjectVal{Queries: []*Query{&Query{Func: "$k"}}}}, &ObjectKeyVal{Key: "value", Val: &ObjectVal{Queries: []*Query{&Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Func: "$k"}}}}}}}}}}}}}}}}}}}}},
- "todate": []*FuncDef{&FuncDef{Name: "todate", Body: &Query{Func: "todateiso8601"}}},
- "todateiso8601": []*FuncDef{&FuncDef{Name: "todateiso8601", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "strftime", Args: []*Query{&Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "%Y-%m-%dT%H:%M:%SZ"}}}}}}}}},
- "tostream": []*FuncDef{&FuncDef{Name: "tostream", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "path", Args: []*Query{&Query{FuncDefs: []*FuncDef{&FuncDef{Name: "r", Body: &Query{Left: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}, &Suffix{Optional: true}}}}, Op: OpPipe, Right: &Query{Func: "r"}}}}, Op: OpComma, Right: &Query{Func: "."}}}}, Func: "r"}}}, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$p"}}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "getpath", Args: []*Query{&Query{Func: "$p"}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeReduce, Reduce: &Reduce{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "path", Args: []*Query{&Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}, &Suffix{Optional: true}}}}}}}, Pattern: &Pattern{Name: "$q"}, Start: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Func: "$p"}, Op: OpComma, Right: &Query{Func: "."}}}}}, Update: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Func: "$p"}, Op: OpAdd, Right: &Query{Func: "$q"}}}}}}}}}}}}}}}},
- "transpose": []*FuncDef{&FuncDef{Name: "transpose", Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "."}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{}}}}, Then: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{}}}, Else: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$in"}}, Body: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{&Query{Func: "length"}}}}}, Op: OpPipe, Right: &Query{Func: "max"}}, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$max"}}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "length"}, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$length"}}, Body: &Query{Term: &Term{Type: TermTypeReduce, Reduce: &Reduce{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "range", Args: []*Query{&Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}, &Query{Func: "$max"}}}}, Pattern: &Pattern{Name: "$j"}, Start: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{}}}, Update: &Query{Left: &Query{Func: "."}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeReduce, Reduce: &Reduce{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "range", Args: []*Query{&Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}, &Query{Func: "$length"}}}}, Pattern: &Pattern{Name: "$i"}, Start: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{}}}, Update: &Query{Left: &Query{Func: "."}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$in"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Func: "$i"}}}, &Suffix{Index: &Index{Start: &Query{Func: "$j"}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}},
- "truncate_stream": []*FuncDef{&FuncDef{Name: "truncate_stream", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$n"}}, Body: &Query{Left: &Query{Func: "null"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "f"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$input"}}, Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}, Op: OpPipe, Right: &Query{Func: "length"}}}}, Op: OpGt, Right: &Query{Func: "$n"}}, Then: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "setpath", Args: []*Query{&Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}, &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$input"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}, &Suffix{Index: &Index{Start: &Query{Func: "$n"}, IsSlice: true}}}}}}}}}, Else: &Query{Func: "empty"}}}}}}}}}}}}}}}}}},
- "unique": []*FuncDef{&FuncDef{Name: "unique", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "unique_by", Args: []*Query{&Query{Func: "."}}}}}}},
- "unique_by": []*FuncDef{&FuncDef{Name: "unique_by", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_unique_by", Args: []*Query{&Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{&Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Func: "f"}}}}}}}}}}}}}},
- "until": []*FuncDef{&FuncDef{Name: "until", Args: []string{"cond", "next"}, Body: &Query{FuncDefs: []*FuncDef{&FuncDef{Name: "_until", Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Func: "cond"}, Then: &Query{Func: "."}, Else: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Func: "next"}, Op: OpPipe, Right: &Query{Func: "_until"}}}}}}}}}, Func: "_until"}}},
- "values": []*FuncDef{&FuncDef{Name: "values", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Func: "."}, Op: OpNe, Right: &Query{Func: "null"}}}}}}}},
- "walk": []*FuncDef{&FuncDef{Name: "walk", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$in"}}, Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "object"}}}}, Then: &Query{Left: &Query{Term: &Term{Type: TermTypeReduce, Reduce: &Reduce{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "keys"}, SuffixList: []*Suffix{&Suffix{Iter: true}}}, Pattern: &Pattern{Name: "$key"}, Start: &Query{Term: &Term{Type: TermTypeObject, Object: &Object{}}}, Update: &Query{Left: &Query{Func: "."}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeObject, Object: &Object{KeyVals: []*ObjectKeyVal{&ObjectKeyVal{KeyQuery: &Query{Func: "$key"}, Val: &ObjectVal{Queries: []*Query{&Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$in"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Func: "$key"}}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "walk", Args: []*Query{&Query{Func: "f"}}}}}}}}}}}}}}}}}}}, Op: OpPipe, Right: &Query{Func: "f"}}, Elif: []*IfElif{&IfElif{Cond: &Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "array"}}}}, Then: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{&Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "walk", Args: []*Query{&Query{Func: "f"}}}}}}}}}, Op: OpPipe, Right: &Query{Func: "f"}}}}, Else: &Query{Func: "f"}}}}}}}}}}},
- "while": []*FuncDef{&FuncDef{Name: "while", Args: []string{"cond", "update"}, Body: &Query{FuncDefs: []*FuncDef{&FuncDef{Name: "_while", Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Func: "cond"}, Then: &Query{Left: &Query{Func: "."}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Func: "update"}, Op: OpPipe, Right: &Query{Func: "_while"}}}}}, Else: &Query{Func: "empty"}}}}}}, Func: "_while"}}},
- "with_entries": []*FuncDef{&FuncDef{Name: "with_entries", Args: []string{"f"}, Body: &Query{Left: &Query{Func: "to_entries"}, Op: OpPipe, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{&Query{Func: "f"}}}}}, Op: OpPipe, Right: &Query{Func: "from_entries"}}}}},
+ "IN": {{Name: "IN", Args: []string{"s"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "any", Args: []*Query{{Left: &Query{Func: "s"}, Op: OpEq, Right: &Query{Func: "."}}, {Func: "."}}}}}}, {Name: "IN", Args: []string{"src", "s"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "any", Args: []*Query{{Left: &Query{Func: "src"}, Op: OpEq, Right: &Query{Func: "s"}}, {Func: "."}}}}}}},
+ "INDEX": {{Name: "INDEX", Args: []string{"stream", "idx_expr"}, Body: &Query{Term: &Term{Type: TermTypeReduce, Reduce: &Reduce{Query: &Query{Func: "stream"}, Pattern: &Pattern{Name: "$row"}, Start: &Query{Term: &Term{Type: TermTypeObject, Object: &Object{}}}, Update: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Left: &Query{Func: "$row"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "idx_expr"}, Op: OpPipe, Right: &Query{Func: "tostring"}}}}}}, Op: OpAssign, Right: &Query{Func: "$row"}}}}}}, {Name: "INDEX", Args: []string{"idx_expr"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "INDEX", Args: []*Query{{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{{Iter: true}}}}, {Func: "idx_expr"}}}}}}},
+ "JOIN": {{Name: "JOIN", Args: []string{"$idx", "idx_expr"}, Body: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{{Iter: true}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Func: "."}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$idx"}, SuffixList: []*Suffix{{Index: &Index{Start: &Query{Func: "idx_expr"}}}}}}}}}}}}}}}, {Name: "JOIN", Args: []string{"$idx", "stream", "idx_expr"}, Body: &Query{Left: &Query{Func: "stream"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Func: "."}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$idx"}, SuffixList: []*Suffix{{Index: &Index{Start: &Query{Func: "idx_expr"}}}}}}}}}}}}, {Name: "JOIN", Args: []string{"$idx", "stream", "idx_expr", "join_expr"}, Body: &Query{Left: &Query{Func: "stream"}, Op: OpPipe, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Func: "."}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$idx"}, SuffixList: []*Suffix{{Index: &Index{Start: &Query{Func: "idx_expr"}}}}}}}}}}, Op: OpPipe, Right: &Query{Func: "join_expr"}}}}},
+ "_assign": {},
+ "_modify": {},
+ "all": {{Name: "all", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "all", Args: []*Query{{Func: "."}}}}}}, {Name: "all", Args: []string{"y"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "all", Args: []*Query{{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{{Iter: true}}}}, {Func: "y"}}}}}}, {Name: "all", Args: []string{"g", "y"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "isempty", Args: []*Query{{Left: &Query{Func: "g"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{{Left: &Query{Func: "y"}, Op: OpPipe, Right: &Query{Func: "not"}}}}}}}}}}}}},
+ "any": {{Name: "any", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "any", Args: []*Query{{Func: "."}}}}}}, {Name: "any", Args: []string{"y"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "any", Args: []*Query{{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{{Iter: true}}}}, {Func: "y"}}}}}}, {Name: "any", Args: []string{"g", "y"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "isempty", Args: []*Query{{Left: &Query{Func: "g"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{{Func: "y"}}}}}}}}}}, Op: OpPipe, Right: &Query{Func: "not"}}}},
+ "arrays": {{Name: "arrays", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "array"}}}}}}}}}},
+ "booleans": {{Name: "booleans", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "boolean"}}}}}}}}}},
+ "capture": {{Name: "capture", Args: []string{"$re"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "capture", Args: []*Query{{Func: "$re"}, {Func: "null"}}}}}}, {Name: "capture", Args: []string{"$re", "$flags"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "match", Args: []*Query{{Func: "$re"}, {Func: "$flags"}}}}}, Op: OpPipe, Right: &Query{Func: "_capture"}}}},
+ "combinations": {{Name: "combinations", Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "length"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, Then: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{}}}, Else: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, SuffixList: []*Suffix{{Iter: true}, {Bind: &Bind{Patterns: []*Pattern{{Name: "$x"}}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Func: "$x"}}}}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}, IsSlice: true}}}, Op: OpPipe, Right: &Query{Func: "combinations"}}}}}}}}}}}}}}, {Name: "combinations", Args: []string{"n"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "limit", Args: []*Query{{Func: "n"}, {Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "repeat", Args: []*Query{{Func: "."}}}}}}}}}}}}, Op: OpPipe, Right: &Query{Func: "combinations"}}}},
+ "del": {{Name: "del", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "delpaths", Args: []*Query{{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "path", Args: []*Query{{Func: "f"}}}}}}}}}}}}}},
+ "finites": {{Name: "finites", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{{Func: "isfinite"}}}}}}},
+ "first": {{Name: "first", Body: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}}, {Name: "first", Args: []string{"g"}, Body: &Query{Term: &Term{Type: TermTypeLabel, Label: &Label{Ident: "$out", Body: &Query{Left: &Query{Func: "g"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "."}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeBreak, Break: "$out"}}}}}}}}},
+ "fromdate": {{Name: "fromdate", Body: &Query{Func: "fromdateiso8601"}}},
+ "fromdateiso8601": {{Name: "fromdateiso8601", Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "strptime", Args: []*Query{{Term: &Term{Type: TermTypeString, Str: &String{Str: "%Y-%m-%dT%H:%M:%S%z"}}}}}}}, Op: OpPipe, Right: &Query{Func: "mktime"}}}},
+ "fromstream": {{Name: "fromstream", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeObject, Object: &Object{KeyVals: []*ObjectKeyVal{{Key: "x", Val: &Query{Func: "null"}}, {Key: "e", Val: &Query{Func: "false"}}}}, SuffixList: []*Suffix{{Bind: &Bind{Patterns: []*Pattern{{Name: "$init"}}, Body: &Query{Term: &Term{Type: TermTypeForeach, Foreach: &Foreach{Query: &Query{Func: "f"}, Pattern: &Pattern{Name: "$i"}, Start: &Query{Func: "$init"}, Update: &Query{Left: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "e"}}}, Then: &Query{Func: "$init"}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "$i"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "length"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "2"}}}}, Then: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "setpath", Args: []*Query{{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "e"}}}}}}, {Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$i"}, SuffixList: []*Suffix{{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}}}, Op: OpPipe, Right: &Query{Left: &Query{Func: "length"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "setpath", Args: []*Query{{Left: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "x"}}}}}}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$i"}, SuffixList: []*Suffix{{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}}}}, {Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$i"}, SuffixList: []*Suffix{{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}}}}}}}}, Else: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "setpath", Args: []*Query{{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "e"}}}}}}, {Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$i"}, SuffixList: []*Suffix{{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}}}, Op: OpPipe, Right: &Query{Left: &Query{Func: "length"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}}}}}}}}, Extract: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "e"}}}, Then: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "x"}}}, Else: &Query{Func: "empty"}}}}}}}}}}}}}},
+ "group_by": {{Name: "group_by", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_group_by", Args: []*Query{{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Func: "f"}}}}}}}}}}}}}},
+ "gsub": {{Name: "gsub", Args: []string{"$re", "str"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "sub", Args: []*Query{{Func: "$re"}, {Func: "str"}, {Term: &Term{Type: TermTypeString, Str: &String{Str: "g"}}}}}}}}, {Name: "gsub", Args: []string{"$re", "str", "$flags"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "sub", Args: []*Query{{Func: "$re"}, {Func: "str"}, {Left: &Query{Func: "$flags"}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "g"}}}}}}}}}},
+ "in": {{Name: "in", Args: []string{"xs"}, Body: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{{Bind: &Bind{Patterns: []*Pattern{{Name: "$x"}}, Body: &Query{Left: &Query{Func: "xs"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "has", Args: []*Query{{Func: "$x"}}}}}}}}}}}}},
+ "inputs": {{Name: "inputs", Body: &Query{Term: &Term{Type: TermTypeTry, Try: &Try{Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "repeat", Args: []*Query{{Func: "input"}}}}}, Catch: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "."}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "break"}}}}, Then: &Query{Func: "empty"}, Else: &Query{Func: "error"}}}}}}}}},
+ "inside": {{Name: "inside", Args: []string{"xs"}, Body: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{{Bind: &Bind{Patterns: []*Pattern{{Name: "$x"}}, Body: &Query{Left: &Query{Func: "xs"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "contains", Args: []*Query{{Func: "$x"}}}}}}}}}}}}},
+ "isempty": {{Name: "isempty", Args: []string{"g"}, Body: &Query{Term: &Term{Type: TermTypeLabel, Label: &Label{Ident: "$out", Body: &Query{Left: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Func: "g"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "false"}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeBreak, Break: "$out"}}}}}}, Op: OpComma, Right: &Query{Func: "true"}}}}}}},
+ "iterables": {{Name: "iterables", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{{Left: &Query{Func: "type"}, Op: OpPipe, Right: &Query{Left: &Query{Left: &Query{Func: "."}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "array"}}}}, Op: OpOr, Right: &Query{Left: &Query{Func: "."}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "object"}}}}}}}}}}}},
+ "last": {{Name: "last", Body: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeUnary, Unary: &Unary{Op: OpSub, Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}}}}, {Name: "last", Args: []string{"g"}, Body: &Query{Term: &Term{Type: TermTypeReduce, Reduce: &Reduce{Query: &Query{Func: "g"}, Pattern: &Pattern{Name: "$item"}, Start: &Query{Func: "null"}, Update: &Query{Func: "$item"}}}}}},
+ "limit": {{Name: "limit", Args: []string{"$n", "g"}, Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "$n"}, Op: OpGt, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, Then: &Query{Term: &Term{Type: TermTypeLabel, Label: &Label{Ident: "$out", Body: &Query{Term: &Term{Type: TermTypeForeach, Foreach: &Foreach{Query: &Query{Func: "g"}, Pattern: &Pattern{Name: "$item"}, Start: &Query{Func: "$n"}, Update: &Query{Left: &Query{Func: "."}, Op: OpSub, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}, Extract: &Query{Left: &Query{Func: "$item"}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "."}, Op: OpLe, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, Then: &Query{Term: &Term{Type: TermTypeBreak, Break: "$out"}}, Else: &Query{Func: "empty"}}}}}}}}}}}, Elif: []*IfElif{{Cond: &Query{Left: &Query{Func: "$n"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, Then: &Query{Func: "empty"}}}, Else: &Query{Func: "g"}}}}}},
+ "map": {{Name: "map", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{{Iter: true}}}}, Op: OpPipe, Right: &Query{Func: "f"}}}}}}},
+ "map_values": {{Name: "map_values", Args: []string{"f"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{{Iter: true}}}}, Op: OpModify, Right: &Query{Func: "f"}}}},
+ "match": {{Name: "match", Args: []string{"$re"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "match", Args: []*Query{{Func: "$re"}, {Func: "null"}}}}}}, {Name: "match", Args: []string{"$re", "$flags"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_match", Args: []*Query{{Func: "$re"}, {Func: "$flags"}, {Func: "false"}}}, SuffixList: []*Suffix{{Iter: true}}}}}},
+ "max_by": {{Name: "max_by", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_max_by", Args: []*Query{{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Func: "f"}}}}}}}}}}}}}},
+ "min_by": {{Name: "min_by", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_min_by", Args: []*Query{{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Func: "f"}}}}}}}}}}}}}},
+ "normals": {{Name: "normals", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{{Func: "isnormal"}}}}}}},
+ "not": {{Name: "not", Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Func: "."}, Then: &Query{Func: "false"}, Else: &Query{Func: "true"}}}}}},
+ "nth": {{Name: "nth", Args: []string{"$n"}, Body: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Func: "$n"}}}}}, {Name: "nth", Args: []string{"$n", "g"}, Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "$n"}, Op: OpLt, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, Then: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "error", Args: []*Query{{Term: &Term{Type: TermTypeString, Str: &String{Str: "nth doesn't support negative indices"}}}}}}}, Else: &Query{Term: &Term{Type: TermTypeLabel, Label: &Label{Ident: "$out", Body: &Query{Term: &Term{Type: TermTypeForeach, Foreach: &Foreach{Query: &Query{Func: "g"}, Pattern: &Pattern{Name: "$item"}, Start: &Query{Left: &Query{Func: "$n"}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}, Update: &Query{Left: &Query{Func: "."}, Op: OpSub, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}, Extract: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "."}, Op: OpLe, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, Then: &Query{Left: &Query{Func: "$item"}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeBreak, Break: "$out"}}}, Else: &Query{Func: "empty"}}}}}}}}}}}}}}},
+ "nulls": {{Name: "nulls", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{{Left: &Query{Func: "."}, Op: OpEq, Right: &Query{Func: "null"}}}}}}}},
+ "numbers": {{Name: "numbers", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "number"}}}}}}}}}},
+ "objects": {{Name: "objects", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "object"}}}}}}}}}},
+ "paths": {{Name: "paths", Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "path", Args: []*Query{{Func: ".."}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{{Left: &Query{Func: "."}, Op: OpNe, Right: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{}}}}}}}}}}, {Name: "paths", Args: []string{"f"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "path", Args: []*Query{{Left: &Query{Func: ".."}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{{Func: "f"}}}}}}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{{Left: &Query{Func: "."}, Op: OpNe, Right: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{}}}}}}}}}}},
+ "pick": {{Name: "pick", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{{Bind: &Bind{Patterns: []*Pattern{{Name: "$v"}}, Body: &Query{Term: &Term{Type: TermTypeReduce, Reduce: &Reduce{Query: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "path", Args: []*Query{{Func: "f"}}}}}, Pattern: &Pattern{Name: "$p"}, Start: &Query{Func: "null"}, Update: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "setpath", Args: []*Query{{Func: "$p"}, {Left: &Query{Func: "$v"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "getpath", Args: []*Query{{Func: "$p"}}}}}}}}}}}}}}}}}}}},
+ "range": {{Name: "range", Args: []string{"$end"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_range", Args: []*Query{{Term: &Term{Type: TermTypeNumber, Number: "0"}}, {Func: "$end"}, {Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}}}, {Name: "range", Args: []string{"$start", "$end"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_range", Args: []*Query{{Func: "$start"}, {Func: "$end"}, {Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}}}, {Name: "range", Args: []string{"$start", "$end", "$step"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_range", Args: []*Query{{Func: "$start"}, {Func: "$end"}, {Func: "$step"}}}}}}},
+ "recurse": {{Name: "recurse", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "recurse", Args: []*Query{{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{{Iter: true}, {Optional: true}}}}}}}}}, {Name: "recurse", Args: []string{"f"}, Body: &Query{FuncDefs: []*FuncDef{{Name: "r", Body: &Query{Left: &Query{Func: "."}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Func: "f"}, Op: OpPipe, Right: &Query{Func: "r"}}}}}}}, Func: "r"}}, {Name: "recurse", Args: []string{"f", "cond"}, Body: &Query{FuncDefs: []*FuncDef{{Name: "r", Body: &Query{Left: &Query{Func: "."}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Func: "f"}, Op: OpPipe, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{{Func: "cond"}}}}}, Op: OpPipe, Right: &Query{Func: "r"}}}}}}}}, Func: "r"}}},
+ "repeat": {{Name: "repeat", Args: []string{"f"}, Body: &Query{FuncDefs: []*FuncDef{{Name: "_repeat", Body: &Query{Left: &Query{Func: "f"}, Op: OpComma, Right: &Query{Func: "_repeat"}}}}, Func: "_repeat"}}},
+ "scalars": {{Name: "scalars", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{{Left: &Query{Func: "type"}, Op: OpPipe, Right: &Query{Left: &Query{Left: &Query{Func: "."}, Op: OpNe, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "array"}}}}, Op: OpAnd, Right: &Query{Left: &Query{Func: "."}, Op: OpNe, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "object"}}}}}}}}}}}},
+ "scan": {{Name: "scan", Args: []string{"$re"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "scan", Args: []*Query{{Func: "$re"}, {Func: "null"}}}}}}, {Name: "scan", Args: []string{"$re", "$flags"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "match", Args: []*Query{{Func: "$re"}, {Left: &Query{Func: "$flags"}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "g"}}}}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "captures"}}}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{}}}}, Then: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "string"}}}, Else: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "captures"}, SuffixList: []*Suffix{{Iter: true}, {Index: &Index{Name: "string"}}}}}}}}}}}}}},
+ "select": {{Name: "select", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Func: "f"}, Then: &Query{Func: "."}, Else: &Query{Func: "empty"}}}}}},
+ "sort_by": {{Name: "sort_by", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_sort_by", Args: []*Query{{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Func: "f"}}}}}}}}}}}}}},
+ "splits": {{Name: "splits", Args: []string{"$re"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "splits", Args: []*Query{{Func: "$re"}, {Func: "null"}}}}}}, {Name: "splits", Args: []string{"$re", "$flags"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "split", Args: []*Query{{Func: "$re"}, {Func: "$flags"}}}, SuffixList: []*Suffix{{Iter: true}}}}}},
+ "strings": {{Name: "strings", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "string"}}}}}}}}}},
+ "sub": {{Name: "sub", Args: []string{"$re", "str"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "sub", Args: []*Query{{Func: "$re"}, {Func: "str"}, {Func: "null"}}}}}}, {Name: "sub", Args: []string{"$re", "str", "$flags"}, Body: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{{Bind: &Bind{Patterns: []*Pattern{{Name: "$str"}}, Body: &Query{FuncDefs: []*FuncDef{{Name: "_sub", Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "matches"}}}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{}}}}, Then: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$str"}, SuffixList: []*Suffix{{Index: &Index{End: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "offset"}}}, IsSlice: true}}}}}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "string"}}}}, Else: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "matches"}, SuffixList: []*Suffix{{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeUnary, Unary: &Unary{Op: OpSub, Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}}, {Bind: &Bind{Patterns: []*Pattern{{Name: "$r"}}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeObject, Object: &Object{KeyVals: []*ObjectKeyVal{{Key: "string", Val: &Query{Left: &Query{Left: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Func: "$r"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "_capture"}, Op: OpPipe, Right: &Query{Func: "str"}}}}}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$str"}, SuffixList: []*Suffix{{Index: &Index{Start: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$r"}, SuffixList: []*Suffix{{Index: &Index{Name: "offset"}}}}}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$r"}, SuffixList: []*Suffix{{Index: &Index{Name: "length"}}}}}}, End: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "offset"}}}, IsSlice: true}}}}}}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "string"}}}}}, {Key: "offset", Val: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$r"}, SuffixList: []*Suffix{{Index: &Index{Name: "offset"}}}}}}, {Key: "matches", Val: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "matches"}, SuffixList: []*Suffix{{Index: &Index{End: &Query{Term: &Term{Type: TermTypeUnary, Unary: &Unary{Op: OpSub, Term: &Term{Type: TermTypeNumber, Number: "1"}}}}, IsSlice: true}}}}}}}}}}, Op: OpPipe, Right: &Query{Func: "_sub"}}}}}}}}}}}}, Left: &Query{Term: &Term{Type: TermTypeObject, Object: &Object{KeyVals: []*ObjectKeyVal{{Key: "string", Val: &Query{Term: &Term{Type: TermTypeString, Str: &String{}}}}, {Key: "matches", Val: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "match", Args: []*Query{{Func: "$re"}, {Func: "$flags"}}}}}}}}}}}}}, Op: OpPipe, Right: &Query{Func: "_sub"}}}}}}}}},
+ "test": {{Name: "test", Args: []string{"$re"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "test", Args: []*Query{{Func: "$re"}, {Func: "null"}}}}}}, {Name: "test", Args: []string{"$re", "$flags"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_match", Args: []*Query{{Func: "$re"}, {Func: "$flags"}, {Func: "true"}}}}}}},
+ "todate": {{Name: "todate", Body: &Query{Func: "todateiso8601"}}},
+ "todateiso8601": {{Name: "todateiso8601", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "strftime", Args: []*Query{{Term: &Term{Type: TermTypeString, Str: &String{Str: "%Y-%m-%dT%H:%M:%SZ"}}}}}}}}},
+ "tostream": {{Name: "tostream", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "path", Args: []*Query{{FuncDefs: []*FuncDef{{Name: "r", Body: &Query{Left: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{{Iter: true}, {Optional: true}}}}, Op: OpPipe, Right: &Query{Func: "r"}}}}, Op: OpComma, Right: &Query{Func: "."}}}}, Func: "r"}}}, SuffixList: []*Suffix{{Bind: &Bind{Patterns: []*Pattern{{Name: "$p"}}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "getpath", Args: []*Query{{Func: "$p"}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeReduce, Reduce: &Reduce{Query: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "path", Args: []*Query{{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{{Iter: true}, {Optional: true}}}}}}}}, Pattern: &Pattern{Name: "$q"}, Start: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Func: "$p"}, Op: OpComma, Right: &Query{Func: "."}}}}}, Update: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Func: "$p"}, Op: OpAdd, Right: &Query{Func: "$q"}}}}}}}}}}}}}}}},
+ "truncate_stream": {{Name: "truncate_stream", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{{Bind: &Bind{Patterns: []*Pattern{{Name: "$n"}}, Body: &Query{Left: &Query{Func: "null"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "f"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}, Op: OpPipe, Right: &Query{Left: &Query{Func: "length"}, Op: OpGt, Right: &Query{Func: "$n"}}}, Then: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}, Op: OpModify, Right: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Func: "$n"}, IsSlice: true}}}}, Else: &Query{Func: "empty"}}}}}}}}}}}}},
+ "unique_by": {{Name: "unique_by", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_unique_by", Args: []*Query{{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Func: "f"}}}}}}}}}}}}}},
+ "until": {{Name: "until", Args: []string{"cond", "next"}, Body: &Query{FuncDefs: []*FuncDef{{Name: "_until", Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Func: "cond"}, Then: &Query{Func: "."}, Else: &Query{Left: &Query{Func: "next"}, Op: OpPipe, Right: &Query{Func: "_until"}}}}}}}, Func: "_until"}}},
+ "values": {{Name: "values", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{{Left: &Query{Func: "."}, Op: OpNe, Right: &Query{Func: "null"}}}}}}}},
+ "walk": {{Name: "walk", Args: []string{"f"}, Body: &Query{FuncDefs: []*FuncDef{{Name: "_walk", Body: &Query{Left: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "array"}}}}, Then: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{{Func: "_walk"}}}}}, Elif: []*IfElif{{Cond: &Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "object"}}}}, Then: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map_values", Args: []*Query{{Func: "_walk"}}}}}}}}}}, Op: OpPipe, Right: &Query{Func: "f"}}}}, Func: "_walk"}}},
+ "while": {{Name: "while", Args: []string{"cond", "update"}, Body: &Query{FuncDefs: []*FuncDef{{Name: "_while", Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Func: "cond"}, Then: &Query{Left: &Query{Func: "."}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Func: "update"}, Op: OpPipe, Right: &Query{Func: "_while"}}}}}, Else: &Query{Func: "empty"}}}}}}, Func: "_while"}}},
+ "with_entries": {{Name: "with_entries", Args: []string{"f"}, Body: &Query{Left: &Query{Func: "to_entries"}, Op: OpPipe, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{{Func: "f"}}}}}, Op: OpPipe, Right: &Query{Func: "from_entries"}}}}},
}
}
diff --git a/vendor/github.com/itchyny/gojq/builtin.jq b/vendor/github.com/itchyny/gojq/builtin.jq
index 22913ba52..ac6292ba7 100644
--- a/vendor/github.com/itchyny/gojq/builtin.jq
+++ b/vendor/github.com/itchyny/gojq/builtin.jq
@@ -1,10 +1,6 @@
def not: if . then false else true end;
def in(xs): . as $x | xs | has($x);
def map(f): [.[] | f];
-def to_entries: [keys[] as $k | {key: $k, value: .[$k]}];
-def from_entries:
- map({ (.key // .Key // .name // .Name): (if has("value") then .value else .Value end) })
- | add | . //= {};
def with_entries(f): to_entries | map(f) | from_entries;
def select(f): if f then . else empty end;
def recurse: recurse(.[]?);
@@ -15,44 +11,24 @@ def while(cond; update):
def _while: if cond then ., (update | _while) else empty end;
_while;
def until(cond; next):
- def _until: if cond then . else (next | _until) end;
+ def _until: if cond then . else next | _until end;
_until;
def repeat(f):
def _repeat: f, _repeat;
_repeat;
-def range($x): range(0; $x);
-def range($start; $end):
- $start | while(. < $end; . + 1);
-def range($start; $end; $step):
- if $step > 0 then $start|while(. < $end; . + $step)
- elif $step < 0 then $start|while(. > $end; . + $step)
- else empty end;
+def range($end): _range(0; $end; 1);
+def range($start; $end): _range($start; $end; 1);
+def range($start; $end; $step): _range($start; $end; $step);
-def _flatten($x):
- reduce .[] as $i
- ( [];
- if $i | type == "array" and $x != 0
- then . + ($i | _flatten($x-1))
- else . + [$i]
- end);
-def flatten($x):
- if $x < 0
- then error("flatten depth must not be negative")
- else _flatten($x) end;
-def flatten: _flatten(-1);
-def min: min_by(.);
def min_by(f): _min_by(map([f]));
-def max: max_by(.);
def max_by(f): _max_by(map([f]));
-def sort: sort_by(.);
def sort_by(f): _sort_by(map([f]));
def group_by(f): _group_by(map([f]));
-def unique: unique_by(.);
def unique_by(f): _unique_by(map([f]));
def arrays: select(type == "array");
def objects: select(type == "object");
-def iterables: select(type |. == "array" or . == "object");
+def iterables: select(type | . == "array" or . == "object");
def booleans: select(type == "boolean");
def numbers: select(type == "number");
def finites: select(isfinite);
@@ -60,98 +36,44 @@ def normals: select(isnormal);
def strings: select(type == "string");
def nulls: select(. == null);
def values: select(. != null);
-def scalars: select(type |. != "array" and . != "object");
-def leaf_paths: paths(scalars);
+def scalars: select(type | . != "array" and . != "object");
-def indices($x):
- if type == "array" and ($x|type) == "array" then .[$x]
- elif type == "array" then .[[$x]]
- elif type == "string" and ($x|type) == "string" then explode | .[$x|explode]
- else .[$x] end;
-def index($x): indices($x) | .[0];
-def rindex($x): indices($x) | .[-1:][0];
def inside(xs): . as $x | xs | contains($x);
-def startswith($x):
- if type == "string" then
- if $x|type == "string" then
- .[:$x | length] == $x
- else
- $x | _type_error("startswith")
- end
- else
- _type_error("startswith")
- end;
-def endswith($x):
- if type == "string" then
- if $x|type == "string" then
- .[- ($x | length):] == $x
- else
- $x | _type_error("endswith")
- end
- else
- _type_error("endswith")
- end;
-def ltrimstr($x):
- if type == "string" and ($x|type == "string") and startswith($x) then
- .[$x | length:]
- end;
-def rtrimstr($x):
- if type == "string" and ($x|type == "string") and endswith($x) then
- .[:- ($x | length)]
- end;
-
def combinations:
if length == 0 then
[]
else
- .[0][] as $x | (.[1:] | combinations) as $y | [$x] + $y
+ .[0][] as $x | [$x] + (.[1:] | combinations)
end;
-def combinations(n):
- . as $dot | [range(n) | $dot] | combinations;
-def join($x): reduce .[] as $i (null;
- (if . == null then "" else . + $x end) +
- ($i | if type == "boolean" or type == "number" then tostring else . // "" end)
- ) // "";
-def ascii_downcase:
- explode | map(if 65 <= . and . <= 90 then . + 32 end) | implode;
-def ascii_upcase:
- explode | map(if 97 <= . and . <= 122 then . - 32 end) | implode;
+def combinations(n): [limit(n; repeat(.))] | combinations;
def walk(f):
- . as $in
- | if type == "object" then
- reduce keys[] as $key ({}; . + { ($key): ($in[$key] | walk(f)) }) | f
- elif type == "array" then
- map(walk(f)) | f
- else
- f
- end;
-def transpose:
- if . == [] then
- []
- else
- . as $in
- | (map(length) | max) as $max
- | length as $length
- | reduce range(0; $max) as $j
- ([]; . + [reduce range(0; $length) as $i ([]; . + [ $in[$i][$j] ] )] )
- end;
+ def _walk:
+ if type == "array" then
+ map(_walk)
+ elif type == "object" then
+ map_values(_walk)
+ end | f;
+ _walk;
def first: .[0];
def first(g): label $out | g | ., break $out;
def last: .[-1];
def last(g): reduce g as $item (null; $item);
-def isempty(g): first((g|false), true);
-def all: all(.[]; .);
+def isempty(g): label $out | (g | false, break $out), true;
+def all: all(.);
def all(y): all(.[]; y);
-def all(g; y): isempty(g|y and empty);
-def any: any(.[]; .);
+def all(g; y): isempty(g | select(y | not));
+def any: any(.);
def any(y): any(.[]; y);
-def any(g; y): isempty(g|y or empty) | not;
+def any(g; y): isempty(g | select(y)) | not;
def limit($n; g):
if $n > 0 then
- label $out
- | foreach g as $item
- ($n; .-1; $item, if . <= 0 then break $out else empty end)
+ label $out |
+ foreach g as $item (
+ $n;
+ . - 1;
+ $item, if . <= 0 then break $out else empty end
+ )
elif $n == 0 then
empty
else
@@ -162,72 +84,79 @@ def nth($n; g):
if $n < 0 then
error("nth doesn't support negative indices")
else
- label $out
- | foreach g as $item
- ($n; .-1; . < 0 or empty|$item, break $out)
+ label $out |
+ foreach g as $item (
+ $n + 1;
+ . - 1;
+ if . <= 0 then $item, break $out else empty end
+ )
end;
def truncate_stream(f):
- . as $n | null | f | . as $input
- | if (.[0] | length) > $n then setpath([0]; $input[0][$n:]) else empty end;
+ . as $n | null | f |
+ if .[0] | length > $n then .[0] |= .[$n:] else empty end;
def fromstream(f):
- { x: null, e: false } as $init
- | foreach f as $i
- ( $init;
- if .e then $init else . end
- | if $i | length == 2
- then setpath(["e"]; $i[0] | length==0) | setpath(["x"] + $i[0]; $i[1])
- else setpath(["e"]; $i[0] | length==1) end;
- if .e then .x else empty end);
+ { x: null, e: false } as $init |
+ foreach f as $i (
+ $init;
+ if .e then $init end |
+ if $i | length == 2 then
+ setpath(["e"]; $i[0] | length == 0) |
+ setpath(["x"] + $i[0]; $i[1])
+ else
+ setpath(["e"]; $i[0] | length == 1)
+ end;
+ if .e then .x else empty end
+ );
def tostream:
- path(def r: (.[]? | r), .; r) as $p
- | getpath($p)
- | reduce path(.[]?) as $q ([$p, .]; [$p + $q]);
+ path(def r: (.[]? | r), .; r) as $p |
+ getpath($p) |
+ reduce path(.[]?) as $q ([$p, .]; [$p + $q]);
-def _assign(ps; $v):
- reduce path(ps) as $p (.; setpath($p; $v));
-def _modify(ps; f):
- reduce path(ps) as $p
- ([., []]; label $out | (([0] + $p) as $q | setpath($q; getpath($q) | f) | ., break $out), .[1] += [$p])
- | . as $x | $x[0] | delpaths($x[1]);
def map_values(f): .[] |= f;
def del(f): delpaths([path(f)]);
-def paths:
- path(recurse(if (type | . == "array" or . == "object") then .[] else empty end))
- | select(length > 0);
-def paths(f):
- . as $x | paths | select(. as $p | $x | getpath($p) | f);
+def paths: path(..) | select(. != []);
+def paths(f): path(.. | select(f)) | select(. != []);
+def pick(f): . as $v |
+ reduce path(f) as $p (null; setpath($p; $v | getpath($p)));
-def fromdateiso8601: strptime("%Y-%m-%dT%H:%M:%SZ") | mktime;
+def fromdateiso8601: strptime("%Y-%m-%dT%H:%M:%S%z") | mktime;
def todateiso8601: strftime("%Y-%m-%dT%H:%M:%SZ");
def fromdate: fromdateiso8601;
def todate: todateiso8601;
def match($re): match($re; null);
-def match($re; $flags): _match($re; $flags; false) | .[];
+def match($re; $flags): _match($re; $flags; false)[];
def test($re): test($re; null);
def test($re; $flags): _match($re; $flags; true);
def capture($re): capture($re; null);
-def capture($re; $flags):
- match($re; $flags)
- | [.captures | .[] | select(.name != null) | { (.name): .string }]
- | add // {};
+def capture($re; $flags): match($re; $flags) | _capture;
def scan($re): scan($re; null);
def scan($re; $flags):
- match($re; "g" + $flags)
- | if (.captures|length > 0) then [ .captures | .[] | .string ] else .string end;
+ match($re; $flags + "g") |
+ if .captures == [] then
+ .string
+ else
+ [.captures[].string]
+ end;
def splits($re): splits($re; null);
-def splits($re; $flags): split($re; $flags) | .[];
+def splits($re; $flags): split($re; $flags)[];
def sub($re; str): sub($re; str; null);
def sub($re; str; $flags):
- . as $in
- | reduce match($re; $flags) as $r
- ( ["", 0];
- . as $x
- | [$r | .captures | .[] | select(.name != null) | { (.name) : .string }]
- | add // {}
- | [$x[0] + $in[$x[1]:$r.offset] + str, $r.offset + $r.length] )
- | .[0] + $in[.[1]:];
+ . as $str |
+ def _sub:
+ if .matches == [] then
+ $str[:.offset] + .string
+ else
+ .matches[-1] as $r |
+ {
+ string: ($r | _capture | str) + $str[$r.offset+$r.length:.offset] + .string,
+ offset: $r.offset,
+ matches: .matches[:-1],
+ } |
+ _sub
+ end;
+ { string: "", matches: [match($re; $flags)] } | _sub;
def gsub($re; str): sub($re; str; "g");
def gsub($re; str; $flags): sub($re; str; $flags + "g");
@@ -238,8 +167,9 @@ def inputs:
if . == "break" then empty else error end;
def INDEX(stream; idx_expr):
- reduce stream as $row ({}; .[$row|idx_expr|tostring] = $row);
-def INDEX(idx_expr): INDEX(.[]; idx_expr);
+ reduce stream as $row ({}; .[$row | idx_expr | tostring] = $row);
+def INDEX(idx_expr):
+ INDEX(.[]; idx_expr);
def JOIN($idx; idx_expr):
[.[] | [., $idx[idx_expr]]];
def JOIN($idx; stream; idx_expr):
diff --git a/vendor/github.com/itchyny/gojq/code.go b/vendor/github.com/itchyny/gojq/code.go
index 963350b7f..33505bde0 100644
--- a/vendor/github.com/itchyny/gojq/code.go
+++ b/vendor/github.com/itchyny/gojq/code.go
@@ -1,7 +1,7 @@
package gojq
type code struct {
- v interface{}
+ v any
op opcode
}
@@ -25,17 +25,19 @@ const (
opbacktrack
opjump
opjumpifnot
+ opindex
+ opindexarray
opcall
+ opcallrec
oppushpc
opcallpc
opscope
opret
- opeach
+ opiter
opexpbegin
opexpend
oppathbegin
oppathend
- opdebug
)
func (op opcode) String() string {
@@ -74,8 +76,14 @@ func (op opcode) String() string {
return "jump"
case opjumpifnot:
return "jumpifnot"
+ case opindex:
+ return "index"
+ case opindexarray:
+ return "indexarray"
case opcall:
return "call"
+ case opcallrec:
+ return "callrec"
case oppushpc:
return "pushpc"
case opcallpc:
@@ -84,8 +92,8 @@ func (op opcode) String() string {
return "scope"
case opret:
return "ret"
- case opeach:
- return "each"
+ case opiter:
+ return "iter"
case opexpbegin:
return "expbegin"
case opexpend:
@@ -94,8 +102,6 @@ func (op opcode) String() string {
return "pathbegin"
case oppathend:
return "pathend"
- case opdebug:
- return "debug"
default:
panic(op)
}
diff --git a/vendor/github.com/itchyny/gojq/compare.go b/vendor/github.com/itchyny/gojq/compare.go
index 9f0d53382..6ab22754a 100644
--- a/vendor/github.com/itchyny/gojq/compare.go
+++ b/vendor/github.com/itchyny/gojq/compare.go
@@ -5,19 +5,13 @@ import (
"math/big"
)
-func compare(l, r interface{}) int {
+// Compare l and r, and returns jq-flavored comparison value.
+// The result will be 0 if l == r, -1 if l < r, and +1 if l > r.
+// This comparison is used by built-in operators and functions.
+func Compare(l, r any) int {
return binopTypeSwitch(l, r,
- func(l, r int) interface{} {
- switch {
- case l < r:
- return -1
- case l == r:
- return 0
- default:
- return 1
- }
- },
- func(l, r float64) interface{} {
+ compareInt,
+ func(l, r float64) any {
switch {
case l < r || math.IsNaN(l):
return -1
@@ -27,10 +21,10 @@ func compare(l, r interface{}) int {
return 1
}
},
- func(l, r *big.Int) interface{} {
+ func(l, r *big.Int) any {
return l.Cmp(r)
},
- func(l, r string) interface{} {
+ func(l, r string) any {
switch {
case l < r:
return -1
@@ -40,66 +34,63 @@ func compare(l, r interface{}) int {
return 1
}
},
- func(l, r []interface{}) interface{} {
- for i := 0; ; i++ {
- if i >= len(l) {
- if i >= len(r) {
- return 0
- }
- return -1
- }
- if i >= len(r) {
- return 1
- }
- if cmp := compare(l[i], r[i]); cmp != 0 {
+ func(l, r []any) any {
+ n := len(l)
+ if len(r) < n {
+ n = len(r)
+ }
+ for i := 0; i < n; i++ {
+ if cmp := Compare(l[i], r[i]); cmp != 0 {
return cmp
}
}
+ return compareInt(len(l), len(r))
},
- func(l, r map[string]interface{}) interface{} {
+ func(l, r map[string]any) any {
lk, rk := funcKeys(l), funcKeys(r)
- if cmp := compare(lk, rk); cmp != 0 {
+ if cmp := Compare(lk, rk); cmp != 0 {
return cmp
}
- for _, k := range lk.([]interface{}) {
- if cmp := compare(l[k.(string)], r[k.(string)]); cmp != 0 {
+ for _, k := range lk.([]any) {
+ if cmp := Compare(l[k.(string)], r[k.(string)]); cmp != 0 {
return cmp
}
}
return 0
},
- func(l, r interface{}) interface{} {
- ln, rn := getTypeOrdNum(l), getTypeOrdNum(r)
- switch {
- case ln < rn:
- return -1
- case ln == rn:
- return 0
- default:
- return 1
- }
+ func(l, r any) any {
+ return compareInt(typeIndex(l), typeIndex(r))
},
).(int)
}
-func getTypeOrdNum(v interface{}) int {
+func compareInt(l, r int) any {
+ switch {
+ case l < r:
+ return -1
+ case l == r:
+ return 0
+ default:
+ return 1
+ }
+}
+
+func typeIndex(v any) int {
switch v := v.(type) {
- case nil:
+ default:
return 0
case bool:
- if v {
- return 2
+ if !v {
+ return 1
}
- return 1
+ return 2
case int, float64, *big.Int:
return 3
case string:
return 4
- case []interface{}:
+ case []any:
return 5
- case map[string]interface{}:
+ case map[string]any:
return 6
- default:
- return -1
}
}
diff --git a/vendor/github.com/itchyny/gojq/compiler.go b/vendor/github.com/itchyny/gojq/compiler.go
index 879493817..b42517f80 100644
--- a/vendor/github.com/itchyny/gojq/compiler.go
+++ b/vendor/github.com/itchyny/gojq/compiler.go
@@ -2,7 +2,6 @@ package gojq
import (
"context"
- "encoding/json"
"errors"
"fmt"
"sort"
@@ -18,9 +17,9 @@ type compiler struct {
inputIter Iter
codes []*code
codeinfos []codeinfo
+ builtinScope *scopeinfo
scopes []*scopeinfo
scopecnt int
- funcs []*funcinfo
}
// Code is a compiled jq query.
@@ -31,20 +30,21 @@ type Code struct {
}
// Run runs the code with the variable values (which should be in the
-// same order as the given variables using WithVariables) and returns
+// same order as the given variables using [WithVariables]) and returns
// a result iterator.
//
-// It is safe to call this method of a *Code in multiple goroutines.
-func (c *Code) Run(v interface{}, values ...interface{}) Iter {
- return c.RunWithContext(nil, v, values...)
+// It is safe to call this method in goroutines, to reuse a compiled [*Code].
+// But for arguments, do not give values sharing same data between goroutines.
+func (c *Code) Run(v any, values ...any) Iter {
+ return c.RunWithContext(context.Background(), v, values...)
}
// RunWithContext runs the code with context.
-func (c *Code) RunWithContext(ctx context.Context, v interface{}, values ...interface{}) Iter {
+func (c *Code) RunWithContext(ctx context.Context, v any, values ...any) Iter {
if len(values) > len(c.variables) {
- return unitIterator(&tooManyVariableValuesError{})
+ return NewIter(&tooManyVariableValuesError{})
} else if len(values) < len(c.variables) {
- return unitIterator(&expectedVariableError{c.variables[len(values)]})
+ return NewIter(&expectedVariableError{c.variables[len(values)]})
}
for i, v := range values {
values[i] = normalizeNumbers(v)
@@ -52,26 +52,11 @@ func (c *Code) RunWithContext(ctx context.Context, v interface{}, values ...inte
return newEnv(ctx).execute(c, normalizeNumbers(v), values...)
}
-// ModuleLoader is an interface for loading modules.
-//
-// Implement following optional methods. Use NewModuleLoader to load local modules.
-// LoadModule(string) (*Query, error)
-// LoadModuleWithMeta(string, map[string]interface{}) (*Query, error)
-// LoadInitModules() ([]*Query, error)
-// LoadJSON(string) (interface{}, error)
-// LoadJSONWithMeta(string, map[string]interface{}) (interface{}, error)
-type ModuleLoader interface {
-}
-
-type codeinfo struct {
- name string
- pc int
-}
-
type scopeinfo struct {
+ variables []*varinfo
+ funcs []*funcinfo
id int
depth int
- variables []varinfo
variablecnt int
}
@@ -82,10 +67,9 @@ type varinfo struct {
}
type funcinfo struct {
- name string
- pc int
- args []string
- argsorder []int
+ name string
+ pc int
+ argcnt int
}
// Compile compiles a query.
@@ -94,11 +78,19 @@ func Compile(q *Query, options ...CompilerOption) (*Code, error) {
for _, opt := range options {
opt(c)
}
+ c.builtinScope = c.newScope()
scope := c.newScope()
c.scopes = []*scopeinfo{scope}
- defer c.lazy(func() *code {
- return &code{op: opscope, v: [2]int{scope.id, scope.variablecnt}}
- })()
+ setscope := c.lazy(func() *code {
+ return &code{op: opscope, v: [3]int{scope.id, scope.variablecnt, 0}}
+ })
+ for _, name := range c.variables {
+ if !newLexer(name).validVarName() {
+ return nil, &variableNameError{name}
+ }
+ c.appendCodeInfo(name)
+ c.append(&code{op: opstore, v: c.pushVariable(name)})
+ }
if c.moduleLoader != nil {
if moduleLoader, ok := c.moduleLoader.(interface {
LoadInitModules() ([]*Query, error)
@@ -117,8 +109,9 @@ func Compile(q *Query, options ...CompilerOption) (*Code, error) {
if err := c.compile(q); err != nil {
return nil, err
}
+ setscope()
c.optimizeTailRec()
- c.optimizeJumps()
+ c.optimizeCodeOps()
return &Code{
variables: c.variables,
codes: c.codes,
@@ -127,13 +120,6 @@ func Compile(q *Query, options ...CompilerOption) (*Code, error) {
}
func (c *compiler) compile(q *Query) error {
- for _, name := range c.variables {
- if !newLexer(name).validVarName() {
- return &variableNameError{name}
- }
- v := c.pushVariable(name)
- c.append(&code{op: opstore, v: v})
- }
for _, i := range q.Imports {
if err := c.compileImport(i); err != nil {
return err
@@ -158,15 +144,15 @@ func (c *compiler) compileImport(i *Import) error {
return fmt.Errorf("cannot load module: %q", path)
}
if strings.HasPrefix(alias, "$") {
- var vals interface{}
+ var vals any
if moduleLoader, ok := c.moduleLoader.(interface {
- LoadJSONWithMeta(string, map[string]interface{}) (interface{}, error)
+ LoadJSONWithMeta(string, map[string]any) (any, error)
}); ok {
if vals, err = moduleLoader.LoadJSONWithMeta(path, i.Meta.ToValue()); err != nil {
return err
}
} else if moduleLoader, ok := c.moduleLoader.(interface {
- LoadJSON(string) (interface{}, error)
+ LoadJSON(string) (any, error)
}); ok {
if vals, err = moduleLoader.LoadJSON(path); err != nil {
return err
@@ -183,7 +169,7 @@ func (c *compiler) compileImport(i *Import) error {
}
var q *Query
if moduleLoader, ok := c.moduleLoader.(interface {
- LoadModuleWithMeta(string, map[string]interface{}) (*Query, error)
+ LoadModuleWithMeta(string, map[string]any) (*Query, error)
}); ok {
if q, err = moduleLoader.LoadModuleWithMeta(path, i.Meta.ToValue()); err != nil {
return err
@@ -196,8 +182,11 @@ func (c *compiler) compileImport(i *Import) error {
}
}
c.appendCodeInfo("module " + path)
- defer c.appendCodeInfo("end of module " + path)
- return c.compileModule(q, alias)
+ if err = c.compileModule(q, alias); err != nil {
+ return err
+ }
+ c.appendCodeInfo("end of module " + path)
+ return nil
}
func (c *compiler) compileModule(q *Query, alias string) error {
@@ -209,10 +198,10 @@ func (c *compiler) compileModule(q *Query, alias string) error {
}(len(scope.variables))
if alias != "" {
defer func(l int) {
- for _, f := range c.funcs[l:] {
+ for _, f := range scope.funcs[l:] {
f.name = alias + "::" + f.name
}
- }(len(c.funcs))
+ }(len(scope.funcs))
}
for _, i := range q.Imports {
if err := c.compileImport(i); err != nil {
@@ -228,104 +217,160 @@ func (c *compiler) compileModule(q *Query, alias string) error {
}
func (c *compiler) newVariable() [2]int {
- return c.pushVariable("")
+ return c.createVariable("")
}
func (c *compiler) pushVariable(name string) [2]int {
s := c.scopes[len(c.scopes)-1]
- if name != "" {
- for _, v := range s.variables {
- if v.name == name && v.depth == s.depth {
- return v.index
- }
+ for _, v := range s.variables {
+ if v.name == name && v.depth == s.depth {
+ return v.index
}
}
+ return c.createVariable(name)
+}
+
+func (c *compiler) createVariable(name string) [2]int {
+ s := c.scopes[len(c.scopes)-1]
v := [2]int{s.id, s.variablecnt}
s.variablecnt++
- s.variables = append(s.variables, varinfo{name, v, s.depth})
+ s.variables = append(s.variables, &varinfo{name, v, s.depth})
return v
}
+func (c *compiler) lookupVariable(name string) ([2]int, error) {
+ for i := len(c.scopes) - 1; i >= 0; i-- {
+ s := c.scopes[i]
+ for j := len(s.variables) - 1; j >= 0; j-- {
+ if w := s.variables[j]; w.name == name {
+ return w.index, nil
+ }
+ }
+ }
+ return [2]int{}, &variableNotFoundError{name}
+}
+
+func (c *compiler) lookupFuncOrVariable(name string) (*funcinfo, *varinfo) {
+ for i, isFunc := len(c.scopes)-1, name[0] != '$'; i >= 0; i-- {
+ s := c.scopes[i]
+ if isFunc {
+ for j := len(s.funcs) - 1; j >= 0; j-- {
+ if f := s.funcs[j]; f.name == name && f.argcnt == 0 {
+ return f, nil
+ }
+ }
+ }
+ for j := len(s.variables) - 1; j >= 0; j-- {
+ if v := s.variables[j]; v.name == name {
+ return nil, v
+ }
+ }
+ }
+ return nil, nil
+}
+
+func (c *compiler) lookupBuiltin(name string, argcnt int) *funcinfo {
+ s := c.builtinScope
+ for i := len(s.funcs) - 1; i >= 0; i-- {
+ if f := s.funcs[i]; f.name == name && f.argcnt == argcnt {
+ return f
+ }
+ }
+ return nil
+}
+
+func (c *compiler) appendBuiltin(name string, argcnt int) func() {
+ setjump := c.lazy(func() *code {
+ return &code{op: opjump, v: len(c.codes)}
+ })
+ c.appendCodeInfo(name)
+ c.builtinScope.funcs = append(
+ c.builtinScope.funcs,
+ &funcinfo{name, len(c.codes), argcnt},
+ )
+ return func() {
+ setjump()
+ c.appendCodeInfo("end of " + name)
+ }
+}
+
func (c *compiler) newScope() *scopeinfo {
i := c.scopecnt // do not use len(c.scopes) because it pops
c.scopecnt++
- return &scopeinfo{i, 0, nil, 0}
+ return &scopeinfo{id: i}
}
func (c *compiler) newScopeDepth() func() {
scope := c.scopes[len(c.scopes)-1]
- l, m := len(scope.variables), len(c.funcs)
+ l, m := len(scope.variables), len(scope.funcs)
scope.depth++
return func() {
scope.depth--
scope.variables = scope.variables[:l]
- c.funcs = c.funcs[:m]
+ scope.funcs = scope.funcs[:m]
}
}
func (c *compiler) compileFuncDef(e *FuncDef, builtin bool) error {
+ var scope *scopeinfo
if builtin {
- for i := len(c.funcs) - 1; i >= 0; i-- {
- if f := c.funcs[i]; f.name == e.Name && len(f.args) == len(e.Args) {
- return nil
- }
- }
+ scope = c.builtinScope
+ } else {
+ scope = c.scopes[len(c.scopes)-1]
}
defer c.lazy(func() *code {
- return &code{op: opjump, v: c.pc()}
+ return &code{op: opjump, v: len(c.codes)}
})()
c.appendCodeInfo(e.Name)
- defer c.appendCodeInfo("end of " + e.Name)
- pc, argsorder := c.pc(), getArgsOrder(e.Args)
- c.funcs = append(c.funcs, &funcinfo{e.Name, pc, e.Args, argsorder})
- defer func(l, m int, variables []string) {
- c.scopes, c.funcs, c.variables = c.scopes[:l], c.funcs[:m], variables
- }(len(c.scopes), len(c.funcs), c.variables)
+ scope.funcs = append(scope.funcs, &funcinfo{e.Name, len(c.codes), len(e.Args)})
+ defer func(scopes []*scopeinfo, variables []string) {
+ c.scopes, c.variables = scopes, variables
+ }(c.scopes, c.variables)
c.variables = c.variables[len(c.variables):]
- scope := c.newScope()
- c.scopes = append(c.scopes, scope)
+ scope = c.newScope()
+ if builtin {
+ c.scopes = []*scopeinfo{c.builtinScope, scope}
+ } else {
+ c.scopes = append(c.scopes, scope)
+ }
defer c.lazy(func() *code {
- return &code{op: opscope, v: [2]int{scope.id, scope.variablecnt}}
+ return &code{op: opscope, v: [3]int{scope.id, scope.variablecnt, len(e.Args)}}
})()
if len(e.Args) > 0 {
+ type varIndex struct {
+ name string
+ index [2]int
+ }
+ vis := make([]varIndex, 0, len(e.Args))
v := c.newVariable()
c.append(&code{op: opstore, v: v})
- skip := make([]bool, len(e.Args))
- for i, name := range e.Args {
- for j := 0; j < i; j++ {
- if name == e.Args[j] {
- skip[j] = true
- break
- }
- }
- }
- for _, i := range argsorder {
- if skip[i] {
- c.append(&code{op: oppop})
+ for _, arg := range e.Args {
+ if arg[0] == '$' {
+ c.appendCodeInfo(arg[1:])
+ w := c.createVariable(arg[1:])
+ c.append(&code{op: opstore, v: w})
+ vis = append(vis, varIndex{arg, w})
} else {
- c.append(&code{op: opstore, v: c.pushVariable(e.Args[i])})
+ c.appendCodeInfo(arg)
+ c.append(&code{op: opstore, v: c.createVariable(arg)})
}
}
+ for _, w := range vis {
+ c.append(&code{op: opload, v: v})
+ c.append(&code{op: opexpbegin})
+ c.append(&code{op: opload, v: w.index})
+ c.append(&code{op: opcallpc})
+ c.appendCodeInfo(w.name)
+ c.append(&code{op: opstore, v: c.pushVariable(w.name)})
+ c.append(&code{op: opexpend})
+ }
c.append(&code{op: opload, v: v})
}
- return c.compile(e.Body)
-}
-
-func getArgsOrder(args []string) []int {
- xs := make([]int, len(args))
- if len(xs) > 0 {
- for i := range xs {
- xs[i] = i
- }
- sort.Slice(xs, func(i, j int) bool {
- xi, xj := xs[i], xs[j]
- if args[xi][0] == '$' {
- return args[xj][0] == '$' && xi > xj // reverse the order of variables
- }
- return args[xj][0] == '$' || xi < xj
- })
+ if err := c.compile(e.Body); err != nil {
+ return err
}
- return xs
+ c.appendCodeInfo("end of " + e.Name)
+ return nil
}
func (c *compiler) compileQuery(e *Query) error {
@@ -353,6 +398,8 @@ func (c *compiler) compileQuery(e *Query) error {
return c.compileTerm(e.Term)
}
switch e.Op {
+ case Operator(0):
+ return errors.New(`missing query (try ".")`)
case OpPipe:
if err := c.compileQuery(e.Left); err != nil {
return err
@@ -399,15 +446,15 @@ func (c *compiler) compileQuery(e *Query) error {
func (c *compiler) compileComma(l, r *Query) error {
setfork := c.lazy(func() *code {
- return &code{op: opfork, v: c.pc() + 1}
+ return &code{op: opfork, v: len(c.codes)}
})
if err := c.compileQuery(l); err != nil {
return err
}
- setfork()
defer c.lazy(func() *code {
- return &code{op: opjump, v: c.pc()}
+ return &code{op: opjump, v: len(c.codes)}
})()
+ setfork()
return c.compileQuery(r)
}
@@ -416,33 +463,34 @@ func (c *compiler) compileAlt(l, r *Query) error {
found := c.newVariable()
c.append(&code{op: opstore, v: found})
setfork := c.lazy(func() *code {
- return &code{op: opfork, v: c.pc()} // opload found
+ return &code{op: opfork, v: len(c.codes)} // opload found
})
if err := c.compileQuery(l); err != nil {
return err
}
c.append(&code{op: opdup})
- c.append(&code{op: opjumpifnot, v: c.pc() + 4}) // oppop
- c.append(&code{op: oppush, v: true}) // found some value
+ c.append(&code{op: opjumpifnot, v: len(c.codes) + 4}) // oppop
+ c.append(&code{op: oppush, v: true}) // found some value
c.append(&code{op: opstore, v: found})
defer c.lazy(func() *code {
- return &code{op: opjump, v: c.pc()} // ret
+ return &code{op: opjump, v: len(c.codes)}
})()
c.append(&code{op: oppop})
c.append(&code{op: opbacktrack})
setfork()
c.append(&code{op: opload, v: found})
- c.append(&code{op: opjumpifnot, v: c.pc() + 3})
+ c.append(&code{op: opjumpifnot, v: len(c.codes) + 3})
c.append(&code{op: opbacktrack}) // if found, backtrack
c.append(&code{op: oppop})
return c.compileQuery(r)
}
-func (c *compiler) compileQueryUpdate(l, r *Query, op Operator) (err error) {
+func (c *compiler) compileQueryUpdate(l, r *Query, op Operator) error {
switch op {
case OpAssign:
- // .foo.bar = f => setpath(["foo", "bar"]; f)
- if xs := l.toIndices(); xs != nil {
+ // optimize assignment operator with constant indexing and slicing
+ // .foo.[0].[1:2] = f => setpath(["foo",0,{"start":1,"end":2}]; f)
+ if xs := l.toIndices(nil); xs != nil {
// ref: compileCall
v := c.newVariable()
c.append(&code{op: opstore, v: v})
@@ -452,7 +500,7 @@ func (c *compiler) compileQueryUpdate(l, r *Query, op Operator) (err error) {
}
c.append(&code{op: oppush, v: xs})
c.append(&code{op: opload, v: v})
- c.append(&code{op: opcall, v: [3]interface{}{internalFuncs["setpath"].callback, 2, "setpath"}})
+ c.append(&code{op: opcall, v: [3]any{internalFuncs["setpath"].callback, 2, "setpath"}})
return nil
}
fallthrough
@@ -491,7 +539,13 @@ func (c *compiler) compileQueryUpdate(l, r *Query, op Operator) (err error) {
}
}
-func (c *compiler) compileBind(b *Bind) error {
+func (c *compiler) compileBind(e *Term, b *Bind) error {
+ defer c.newScopeDepth()()
+ c.append(&code{op: opdup})
+ c.append(&code{op: opexpbegin})
+ if err := c.compileTerm(e); err != nil {
+ return err
+ }
var pc int
var vs [][2]int
for i, p := range b.Patterns {
@@ -508,98 +562,86 @@ func (c *compiler) compileBind(b *Bind) error {
c.append(&code{op: opstore, v: v})
}
}
- vs, err = c.compilePattern(p)
- if err != nil {
+ if vs, err = c.compilePattern(vs[:0], p); err != nil {
return err
}
if i < len(b.Patterns)-1 {
defer c.lazy(func() *code {
return &code{op: opjump, v: pc}
})()
- pcc = c.pc()
+ pcc = len(c.codes)
}
}
if len(b.Patterns) > 1 {
- pc = c.pc()
+ pc = len(c.codes)
}
- if c.codes[len(c.codes)-2].op == opexpbegin {
- c.codes[len(c.codes)-2] = c.codes[len(c.codes)-1]
- c.codes = c.codes[:len(c.codes)-1]
+ if len(b.Patterns) == 1 && c.codes[len(c.codes)-2].op == opexpbegin {
+ c.codes[len(c.codes)-2].op = opnop
} else {
- c.append(&code{op: opexpend}) // ref: compileQuery
+ c.append(&code{op: opexpend})
}
return c.compileQuery(b.Body)
}
-func (c *compiler) compilePattern(p *Pattern) ([][2]int, error) {
+func (c *compiler) compilePattern(vs [][2]int, p *Pattern) ([][2]int, error) {
+ var err error
c.appendCodeInfo(p)
if p.Name != "" {
v := c.pushVariable(p.Name)
c.append(&code{op: opstore, v: v})
- return [][2]int{v}, nil
+ return append(vs, v), nil
} else if len(p.Array) > 0 {
- var vs [][2]int
v := c.newVariable()
c.append(&code{op: opstore, v: v})
for i, p := range p.Array {
- c.append(&code{op: oppush, v: i})
- c.append(&code{op: opload, v: v})
c.append(&code{op: opload, v: v})
- // ref: compileCall
- c.append(&code{op: opcall, v: [3]interface{}{internalFuncs["_index"].callback, 2, "_index"}})
- ns, err := c.compilePattern(p)
- if err != nil {
+ c.append(&code{op: opindexarray, v: i})
+ if vs, err = c.compilePattern(vs, p); err != nil {
return nil, err
}
- vs = append(vs, ns...)
}
return vs, nil
} else if len(p.Object) > 0 {
- var vs [][2]int
v := c.newVariable()
c.append(&code{op: opstore, v: v})
for _, kv := range p.Object {
var key, name string
- if kv.KeyOnly != "" {
- key, name = kv.KeyOnly[1:], kv.KeyOnly
- c.append(&code{op: oppush, v: key})
- } else if kv.Key != "" {
- key = kv.Key
- if key != "" && key[0] == '$' {
+ c.append(&code{op: opload, v: v})
+ if key = kv.Key; key != "" {
+ if key[0] == '$' {
key, name = key[1:], key
}
- c.append(&code{op: oppush, v: key})
} else if kv.KeyString != nil {
- c.append(&code{op: opload, v: v})
- if err := c.compileString(kv.KeyString, nil); err != nil {
- return nil, err
+ if key = kv.KeyString.Str; key == "" {
+ if err := c.compileString(kv.KeyString, nil); err != nil {
+ return nil, err
+ }
}
} else if kv.KeyQuery != nil {
- c.append(&code{op: opload, v: v})
if err := c.compileQuery(kv.KeyQuery); err != nil {
return nil, err
}
}
- c.append(&code{op: opload, v: v})
- c.append(&code{op: opload, v: v})
- // ref: compileCall
- c.append(&code{op: opcall, v: [3]interface{}{internalFuncs["_index"].callback, 2, "_index"}})
+ if key != "" {
+ c.append(&code{op: opindex, v: key})
+ } else {
+ c.append(&code{op: opload, v: v})
+ c.append(&code{op: oppush, v: nil})
+ // ref: compileCall
+ c.append(&code{op: opcall, v: [3]any{internalFuncs["_index"].callback, 2, "_index"}})
+ }
if name != "" {
if kv.Val != nil {
c.append(&code{op: opdup})
}
- ns, err := c.compilePattern(&Pattern{Name: name})
- if err != nil {
+ if vs, err = c.compilePattern(vs, &Pattern{Name: name}); err != nil {
return nil, err
}
- vs = append(vs, ns...)
}
if kv.Val != nil {
- ns, err := c.compilePattern(kv.Val)
- if err != nil {
+ if vs, err = c.compilePattern(vs, kv.Val); err != nil {
return nil, err
}
- vs = append(vs, ns...)
}
}
return vs, nil
@@ -612,29 +654,47 @@ func (c *compiler) compileIf(e *If) error {
c.appendCodeInfo(e)
c.append(&code{op: opdup}) // duplicate the value for then or else clause
c.append(&code{op: opexpbegin})
+ pc := len(c.codes)
f := c.newScopeDepth()
if err := c.compileQuery(e.Cond); err != nil {
return err
}
f()
- c.append(&code{op: opexpend})
+ if pc == len(c.codes) {
+ c.codes = c.codes[:pc-1]
+ } else {
+ c.append(&code{op: opexpend})
+ }
+ pcc := len(c.codes)
setjumpifnot := c.lazy(func() *code {
- return &code{op: opjumpifnot, v: c.pc() + 1} // if falsy, skip then clause
+ return &code{op: opjumpifnot, v: len(c.codes)} // skip then clause
})
f = c.newScopeDepth()
if err := c.compileQuery(e.Then); err != nil {
return err
}
f()
- setjumpifnot()
defer c.lazy(func() *code {
- return &code{op: opjump, v: c.pc()} // jump to ret after else clause
+ return &code{op: opjump, v: len(c.codes)}
})()
+ setjumpifnot()
if len(e.Elif) > 0 {
return c.compileIf(&If{e.Elif[0].Cond, e.Elif[0].Then, e.Elif[1:], e.Else})
}
if e.Else != nil {
defer c.newScopeDepth()()
+ defer func() {
+ // optimize constant results
+ // opdup, ..., opjumpifnot, opconst, opjump, opconst
+ // => opnop, ..., opjumpifnot, oppush, opjump, oppush
+ if pcc+4 == len(c.codes) &&
+ c.codes[pcc+1] != nil && c.codes[pcc+1].op == opconst &&
+ c.codes[pcc+3] != nil && c.codes[pcc+3].op == opconst {
+ c.codes[pc-2].op = opnop
+ c.codes[pcc+1].op = oppush
+ c.codes[pcc+3].op = oppush
+ }
+ }()
return c.compileQuery(e.Else)
}
return nil
@@ -643,7 +703,7 @@ func (c *compiler) compileIf(e *If) error {
func (c *compiler) compileTry(e *Try) error {
c.appendCodeInfo(e)
setforktrybegin := c.lazy(func() *code {
- return &code{op: opforktrybegin, v: c.pc()}
+ return &code{op: opforktrybegin, v: len(c.codes)}
})
f := c.newScopeDepth()
if err := c.compileQuery(e.Body); err != nil {
@@ -652,7 +712,7 @@ func (c *compiler) compileTry(e *Try) error {
f()
c.append(&code{op: opforktryend})
defer c.lazy(func() *code {
- return &code{op: opjump, v: c.pc()}
+ return &code{op: opjump, v: len(c.codes)}
})()
setforktrybegin()
if e.Catch != nil {
@@ -666,9 +726,9 @@ func (c *compiler) compileTry(e *Try) error {
func (c *compiler) compileReduce(e *Reduce) error {
c.appendCodeInfo(e)
defer c.newScopeDepth()()
- defer c.lazy(func() *code {
- return &code{op: opfork, v: c.pc() - 2}
- })()
+ setfork := c.lazy(func() *code {
+ return &code{op: opfork, v: len(c.codes)}
+ })
c.append(&code{op: opdup})
v := c.newVariable()
f := c.newScopeDepth()
@@ -677,10 +737,10 @@ func (c *compiler) compileReduce(e *Reduce) error {
}
f()
c.append(&code{op: opstore, v: v})
- if err := c.compileTerm(e.Term); err != nil {
+ if err := c.compileQuery(e.Query); err != nil {
return err
}
- if _, err := c.compilePattern(e.Pattern); err != nil {
+ if _, err := c.compilePattern(nil, e.Pattern); err != nil {
return err
}
c.append(&code{op: opload, v: v})
@@ -691,6 +751,7 @@ func (c *compiler) compileReduce(e *Reduce) error {
f()
c.append(&code{op: opstore, v: v})
c.append(&code{op: opbacktrack})
+ setfork()
c.append(&code{op: oppop})
c.append(&code{op: opload, v: v})
return nil
@@ -707,10 +768,10 @@ func (c *compiler) compileForeach(e *Foreach) error {
}
f()
c.append(&code{op: opstore, v: v})
- if err := c.compileTerm(e.Term); err != nil {
+ if err := c.compileQuery(e.Query); err != nil {
return err
}
- if _, err := c.compilePattern(e.Pattern); err != nil {
+ if _, err := c.compilePattern(nil, e.Pattern); err != nil {
return err
}
c.append(&code{op: opload, v: v})
@@ -730,17 +791,33 @@ func (c *compiler) compileForeach(e *Foreach) error {
func (c *compiler) compileLabel(e *Label) error {
c.appendCodeInfo(e)
- defer c.lazy(func() *code {
- return &code{op: opforklabel, v: e.Ident}
- })()
+ v := c.pushVariable("$%" + e.Ident[1:])
+ c.append(&code{op: opforklabel, v: v})
return c.compileQuery(e.Body)
}
-func (c *compiler) compileTerm(e *Term) (err error) {
+func (c *compiler) compileBreak(label string) error {
+ v, err := c.lookupVariable("$%" + label[1:])
+ if err != nil {
+ return &breakError{label, nil}
+ }
+ c.append(&code{op: oppop})
+ c.append(&code{op: opload, v: v})
+ c.append(&code{op: opcall, v: [3]any{funcBreak(label), 0, "_break"}})
+ return nil
+}
+
+func funcBreak(label string) func(any, []any) any {
+ return func(v any, _ []any) any {
+ return &breakError{label, v}
+ }
+}
+
+func (c *compiler) compileTerm(e *Term) error {
if len(e.SuffixList) > 0 {
s := e.SuffixList[len(e.SuffixList)-1]
t := *e // clone without changing e
- (&t).SuffixList = t.SuffixList[:len(e.SuffixList)-1]
+ t.SuffixList = t.SuffixList[:len(e.SuffixList)-1]
return c.compileTermSuffix(&t, s)
}
switch e.Type {
@@ -766,11 +843,7 @@ func (c *compiler) compileTerm(e *Term) (err error) {
case TermTypeArray:
return c.compileArray(e.Array)
case TermTypeNumber:
- v := normalizeNumbers(json.Number(e.Number))
- if err, ok := v.(error); ok {
- return err
- }
- c.append(&code{op: opconst, v: v})
+ c.append(&code{op: opconst, v: toNumber(e.Number)})
return nil
case TermTypeUnary:
return c.compileUnary(e.Unary)
@@ -789,8 +862,7 @@ func (c *compiler) compileTerm(e *Term) (err error) {
case TermTypeLabel:
return c.compileLabel(e.Label)
case TermTypeBreak:
- c.append(&code{op: opconst, v: e.Break})
- return c.compileCall("_break", nil)
+ return c.compileBreak(e.Break)
case TermTypeQuery:
defer c.newScopeDepth()()
return c.compileQuery(e.Query)
@@ -800,78 +872,90 @@ func (c *compiler) compileTerm(e *Term) (err error) {
}
func (c *compiler) compileIndex(e *Term, x *Index) error {
- c.appendCodeInfo(x)
- if x.Name != "" {
- return c.compileCall("_index", []*Query{{Term: e}, {Term: &Term{Type: TermTypeString, Str: &String{Str: x.Name}}}})
+ if k := x.toIndexKey(); k != nil {
+ if err := c.compileTerm(e); err != nil {
+ return err
+ }
+ c.appendCodeInfo(x)
+ c.append(&code{op: opindex, v: k})
+ return nil
}
+ c.appendCodeInfo(x)
if x.Str != nil {
return c.compileCall("_index", []*Query{{Term: e}, {Term: &Term{Type: TermTypeString, Str: x.Str}}})
}
- if x.Start != nil {
- if x.IsSlice {
- if x.End != nil {
- return c.compileCall("_slice", []*Query{{Term: e}, x.End, x.Start})
- }
- return c.compileCall("_slice", []*Query{{Term: e}, {Term: &Term{Type: TermTypeNull}}, x.Start})
- }
+ if !x.IsSlice {
return c.compileCall("_index", []*Query{{Term: e}, x.Start})
}
- return c.compileCall("_slice", []*Query{{Term: e}, x.End, {Term: &Term{Type: TermTypeNull}}})
+ if x.Start == nil {
+ return c.compileCall("_slice", []*Query{{Term: e}, x.End, {Term: &Term{Type: TermTypeNull}}})
+ }
+ if x.End == nil {
+ return c.compileCall("_slice", []*Query{{Term: e}, {Term: &Term{Type: TermTypeNull}}, x.Start})
+ }
+ return c.compileCall("_slice", []*Query{{Term: e}, x.End, x.Start})
}
func (c *compiler) compileFunc(e *Func) error {
- for i := len(c.funcs) - 1; i >= 0; i-- {
- if f := c.funcs[i]; f.name == e.Name && len(f.args) == len(e.Args) {
+ if len(e.Args) == 0 {
+ if f, v := c.lookupFuncOrVariable(e.Name); f != nil {
return c.compileCallPc(f, e.Args)
- }
- }
- for i := len(c.scopes) - 1; i >= 0; i-- {
- s := c.scopes[i]
- for j := len(s.variables) - 1; j >= 0; j-- {
- v := s.variables[j]
- if v.name == e.Name && len(e.Args) == 0 {
- if e.Name[0] == '$' {
- c.append(&code{op: oppop})
- c.append(&code{op: opload, v: v.index})
- } else {
- c.append(&code{op: opload, v: v.index})
- c.append(&code{op: opcallpc})
+ } else if v != nil {
+ if e.Name[0] == '$' {
+ c.append(&code{op: oppop})
+ c.append(&code{op: opload, v: v.index})
+ } else {
+ c.append(&code{op: opload, v: v.index})
+ c.append(&code{op: opcallpc})
+ }
+ return nil
+ } else if e.Name == "$ENV" || e.Name == "env" {
+ env := make(map[string]any)
+ if c.environLoader != nil {
+ for _, kv := range c.environLoader() {
+ if i := strings.IndexByte(kv, '='); i > 0 {
+ env[kv[:i]] = kv[i+1:]
+ }
}
- return nil
}
+ c.append(&code{op: opconst, v: env})
+ return nil
+ } else if e.Name[0] == '$' {
+ return &variableNotFoundError{e.Name}
}
- }
- if (e.Name == "$ENV" || e.Name == "env") && len(e.Args) == 0 {
- env := make(map[string]interface{})
- if c.environLoader != nil {
- for _, kv := range c.environLoader() {
- xs := strings.SplitN(kv, "=", 2)
- env[xs[0]] = xs[1]
+ } else {
+ for i := len(c.scopes) - 1; i >= 0; i-- {
+ s := c.scopes[i]
+ for j := len(s.funcs) - 1; j >= 0; j-- {
+ if f := s.funcs[j]; f.name == e.Name && f.argcnt == len(e.Args) {
+ return c.compileCallPc(f, e.Args)
+ }
}
}
- c.append(&code{op: opconst, v: env})
- return nil
- }
- if e.Name[0] == '$' {
- return &variableNotFoundError{e.Name}
}
- name := e.Name
- if name[0] == '_' {
- name = name[1:]
+ if f := c.lookupBuiltin(e.Name, len(e.Args)); f != nil {
+ return c.compileCallPc(f, e.Args)
}
- if fds, ok := builtinFuncDefs[name]; ok {
+ if fds, ok := builtinFuncDefs[e.Name]; ok {
for _, fd := range fds {
if len(fd.Args) == len(e.Args) {
if err := c.compileFuncDef(fd, true); err != nil {
return err
}
+ break
}
}
- for i := len(c.funcs) - 1; i >= 0; i-- {
- if f := c.funcs[i]; f.name == e.Name && len(f.args) == len(e.Args) {
- return c.compileCallPc(f, e.Args)
+ if len(fds) == 0 {
+ switch e.Name {
+ case "_assign":
+ c.compileAssign()
+ case "_modify":
+ c.compileModify()
}
}
+ if f := c.lookupBuiltin(e.Name, len(e.Args)); f != nil {
+ return c.compileCallPc(f, e.Args)
+ }
}
if fn, ok := internalFuncs[e.Name]; ok && fn.accept(len(e.Args)) {
switch e.Name {
@@ -885,52 +969,176 @@ func (c *compiler) compileFunc(e *Func) error {
}
c.codes[len(c.codes)-1] = &code{op: oppathend}
return nil
- case "debug":
- c.append(&code{op: opdebug, v: "DEBUG:"})
- return nil
- case "stderr":
- c.append(&code{op: opdebug, v: "STDERR:"})
- return nil
case "builtins":
return c.compileCallInternal(
- [3]interface{}{c.funcBuiltins, 0, e.Name},
+ [3]any{c.funcBuiltins, 0, e.Name},
e.Args,
- nil,
- false,
+ true,
+ -1,
)
case "input":
if c.inputIter == nil {
return &inputNotAllowedError{}
}
return c.compileCallInternal(
- [3]interface{}{c.funcInput, 0, e.Name},
+ [3]any{c.funcInput, 0, e.Name},
e.Args,
- nil,
- false,
+ true,
+ -1,
)
case "modulemeta":
return c.compileCallInternal(
- [3]interface{}{c.funcModulemeta, 0, e.Name},
+ [3]any{c.funcModulemeta, 0, e.Name},
e.Args,
- nil,
- false,
+ true,
+ -1,
)
+ case "debug":
+ setfork := c.lazy(func() *code {
+ return &code{op: opfork, v: len(c.codes)}
+ })
+ if err := c.compileQuery(e.Args[0]); err != nil {
+ return err
+ }
+ if err := c.compileFunc(&Func{Name: "debug"}); err != nil {
+ if _, ok := err.(*funcNotFoundError); ok {
+ err = &funcNotFoundError{e}
+ }
+ return err
+ }
+ c.append(&code{op: opbacktrack})
+ setfork()
+ return nil
default:
return c.compileCall(e.Name, e.Args)
}
}
if fn, ok := c.customFuncs[e.Name]; ok && fn.accept(len(e.Args)) {
- return c.compileCallInternal(
- [3]interface{}{fn.callback, len(e.Args), e.Name},
+ if err := c.compileCallInternal(
+ [3]any{fn.callback, len(e.Args), e.Name},
e.Args,
- nil,
- false,
- )
+ true,
+ -1,
+ ); err != nil {
+ return err
+ }
+ if fn.iter {
+ c.append(&code{op: opiter})
+ }
+ return nil
}
return &funcNotFoundError{e}
}
-func (c *compiler) funcBuiltins(interface{}, []interface{}) interface{} {
+// Appends the compiled code for the assignment operator (`=`) to maximize
+// performance. Originally the operator was implemented as follows.
+//
+// def _assign(p; $x): reduce path(p) as $q (.; setpath($q; $x));
+//
+// To overcome the difficulty of reducing allocations on `setpath`, we use the
+// `allocator` type and track the allocated addresses during the reduction.
+func (c *compiler) compileAssign() {
+ defer c.appendBuiltin("_assign", 2)()
+ scope := c.newScope()
+ v, p := [2]int{scope.id, 0}, [2]int{scope.id, 1}
+ x, a := [2]int{scope.id, 2}, [2]int{scope.id, 3}
+ // Cannot reuse v, p due to backtracking in x.
+ w, q := [2]int{scope.id, 4}, [2]int{scope.id, 5}
+ c.appends(
+ &code{op: opscope, v: [3]int{scope.id, 6, 2}},
+ &code{op: opstore, v: v}, // def _assign(p; $x):
+ &code{op: opstore, v: p},
+ &code{op: opstore, v: x},
+ &code{op: opload, v: v},
+ &code{op: opexpbegin},
+ &code{op: opload, v: x},
+ &code{op: opcallpc},
+ &code{op: opstore, v: x},
+ &code{op: opexpend},
+ &code{op: oppush, v: nil},
+ &code{op: opcall, v: [3]any{funcAllocator, 0, "_allocator"}},
+ &code{op: opstore, v: a},
+ &code{op: opload, v: v},
+ &code{op: opfork, v: len(c.codes) + 30}, // reduce [L1]
+ &code{op: opdup},
+ &code{op: opstore, v: w},
+ &code{op: oppathbegin}, // path(p)
+ &code{op: opload, v: p},
+ &code{op: opcallpc},
+ &code{op: opload, v: w},
+ &code{op: oppathend},
+ &code{op: opstore, v: q}, // as $q (.;
+ &code{op: opload, v: a}, // setpath($q; $x)
+ &code{op: opload, v: x},
+ &code{op: opload, v: q},
+ &code{op: opload, v: w},
+ &code{op: opcall, v: [3]any{funcSetpathWithAllocator, 3, "_setpath"}},
+ &code{op: opstore, v: w},
+ &code{op: opbacktrack}, // );
+ &code{op: oppop}, // [L1]
+ &code{op: opload, v: w},
+ &code{op: opret},
+ )
+}
+
+// Appends the compiled code for the update-assignment operator (`|=`) to
+// maximize performance. We use the `allocator` type, just like `_assign/2`.
+func (c *compiler) compileModify() {
+ defer c.appendBuiltin("_modify", 2)()
+ scope := c.newScope()
+ v, p := [2]int{scope.id, 0}, [2]int{scope.id, 1}
+ f, d := [2]int{scope.id, 2}, [2]int{scope.id, 3}
+ a, l := [2]int{scope.id, 4}, [2]int{scope.id, 5}
+ c.appends(
+ &code{op: opscope, v: [3]int{scope.id, 6, 2}},
+ &code{op: opstore, v: v}, // def _modify(p; f):
+ &code{op: opstore, v: p},
+ &code{op: opstore, v: f},
+ &code{op: oppush, v: []any{}},
+ &code{op: opstore, v: d},
+ &code{op: oppush, v: nil},
+ &code{op: opcall, v: [3]any{funcAllocator, 0, "_allocator"}},
+ &code{op: opstore, v: a},
+ &code{op: opload, v: v},
+ &code{op: opfork, v: len(c.codes) + 39}, // reduce [L1]
+ &code{op: oppathbegin}, // path(p)
+ &code{op: opload, v: p},
+ &code{op: opcallpc},
+ &code{op: opload, v: v},
+ &code{op: oppathend},
+ &code{op: opstore, v: p}, // as $p (.;
+ &code{op: opforklabel, v: l}, // label $l |
+ &code{op: opload, v: v}, //
+ &code{op: opfork, v: len(c.codes) + 36}, // [L2]
+ &code{op: oppop}, // (getpath($p) |
+ &code{op: opload, v: a},
+ &code{op: opload, v: p},
+ &code{op: opload, v: v},
+ &code{op: opcall, v: [3]any{internalFuncs["getpath"].callback, 1, "getpath"}},
+ &code{op: opload, v: f}, // f)
+ &code{op: opcallpc},
+ &code{op: opload, v: p}, // setpath($p; ...)
+ &code{op: opload, v: v},
+ &code{op: opcall, v: [3]any{funcSetpathWithAllocator, 3, "_setpath"}},
+ &code{op: opstore, v: v},
+ &code{op: opload, v: v}, // ., break $l
+ &code{op: opfork, v: len(c.codes) + 34}, // [L4]
+ &code{op: opjump, v: len(c.codes) + 38}, // [L3]
+ &code{op: opload, v: l}, // [L4]
+ &code{op: opcall, v: [3]any{funcBreak(""), 0, "_break"}},
+ &code{op: opload, v: p}, // append $p to $d [L2]
+ &code{op: opappend, v: d}, //
+ &code{op: opbacktrack}, // ) | [L3]
+ &code{op: oppop}, // delpaths($d); [L1]
+ &code{op: opload, v: a},
+ &code{op: opload, v: d},
+ &code{op: opload, v: v},
+ &code{op: opcall, v: [3]any{funcDelpathsWithAllocator, 2, "_delpaths"}},
+ &code{op: opret},
+ )
+}
+
+func (c *compiler) funcBuiltins(any, []any) any {
type funcNameArity struct {
name string
arity int
@@ -965,14 +1173,14 @@ func (c *compiler) funcBuiltins(interface{}, []interface{}) interface{} {
return xs[i].name < xs[j].name ||
xs[i].name == xs[j].name && xs[i].arity < xs[j].arity
})
- ys := make([]interface{}, len(xs))
+ ys := make([]any, len(xs))
for i, x := range xs {
ys[i] = x.name + "/" + strconv.Itoa(x.arity)
}
return ys
}
-func (c *compiler) funcInput(interface{}, []interface{}) interface{} {
+func (c *compiler) funcInput(any, []any) any {
v, ok := c.inputIter.Next()
if !ok {
return errors.New("break")
@@ -980,10 +1188,10 @@ func (c *compiler) funcInput(interface{}, []interface{}) interface{} {
return normalizeNumbers(v)
}
-func (c *compiler) funcModulemeta(v interface{}, _ []interface{}) interface{} {
+func (c *compiler) funcModulemeta(v any, _ []any) any {
s, ok := v.(string)
if !ok {
- return &funcTypeError{"modulemeta", v}
+ return &func0TypeError{"modulemeta", v}
}
if c.moduleLoader == nil {
return fmt.Errorf("cannot load module: %q", s)
@@ -991,7 +1199,7 @@ func (c *compiler) funcModulemeta(v interface{}, _ []interface{}) interface{} {
var q *Query
var err error
if moduleLoader, ok := c.moduleLoader.(interface {
- LoadModuleWithMeta(string, map[string]interface{}) (*Query, error)
+ LoadModuleWithMeta(string, map[string]any) (*Query, error)
}); ok {
if q, err = moduleLoader.LoadModuleWithMeta(s, nil); err != nil {
return err
@@ -1005,43 +1213,60 @@ func (c *compiler) funcModulemeta(v interface{}, _ []interface{}) interface{} {
}
meta := q.Meta.ToValue()
if meta == nil {
- meta = make(map[string]interface{})
+ meta = make(map[string]any)
}
- var deps []interface{}
- for _, i := range q.Imports {
+ meta["defs"] = listModuleDefs(q)
+ meta["deps"] = listModuleDeps(q)
+ return meta
+}
+
+func listModuleDefs(q *Query) []any {
+ type funcNameArity struct {
+ name string
+ arity int
+ }
+ var xs []*funcNameArity
+ for _, fd := range q.FuncDefs {
+ if fd.Name[0] != '_' {
+ xs = append(xs, &funcNameArity{fd.Name, len(fd.Args)})
+ }
+ }
+ sort.Slice(xs, func(i, j int) bool {
+ return xs[i].name < xs[j].name ||
+ xs[i].name == xs[j].name && xs[i].arity < xs[j].arity
+ })
+ defs := make([]any, len(xs))
+ for i, x := range xs {
+ defs[i] = x.name + "/" + strconv.Itoa(x.arity)
+ }
+ return defs
+}
+
+func listModuleDeps(q *Query) []any {
+ deps := make([]any, len(q.Imports))
+ for j, i := range q.Imports {
v := i.Meta.ToValue()
if v == nil {
- v = make(map[string]interface{})
- } else {
- for k := range v {
- // dirty hack to remove the internal fields
- if strings.HasPrefix(k, "$$") {
- delete(v, k)
- }
- }
- }
- if i.ImportPath == "" {
- v["relpath"] = i.IncludePath
- } else {
- v["relpath"] = i.ImportPath
+ v = make(map[string]any)
}
- if err != nil {
- return err
+ relpath := i.ImportPath
+ if relpath == "" {
+ relpath = i.IncludePath
}
+ v["relpath"] = relpath
if i.ImportAlias != "" {
v["as"] = strings.TrimPrefix(i.ImportAlias, "$")
}
v["is_data"] = strings.HasPrefix(i.ImportAlias, "$")
- deps = append(deps, v)
+ deps[j] = v
}
- meta["deps"] = deps
- return meta
+ return deps
}
func (c *compiler) compileObject(e *Object) error {
c.appendCodeInfo(e)
if len(e.KeyVals) == 0 {
- c.append(&code{op: opconst, v: map[string]interface{}{}})
+ c.append(&code{op: opconst, v: map[string]any{}})
return nil
}
defer c.newScopeDepth()()
@@ -1066,7 +1291,7 @@ func (c *compiler) compileObject(e *Object) error {
return nil
}
}
- w := make(map[string]interface{}, l)
+ w := make(map[string]any, l)
for i := 0; i < l; i++ {
w[c.codes[pc+i*3].v.(string)] = c.codes[pc+i*3+2].v
}
@@ -1076,55 +1301,53 @@ func (c *compiler) compileObject(e *Object) error {
}
func (c *compiler) compileObjectKeyVal(v [2]int, kv *ObjectKeyVal) error {
- if kv.KeyOnly != "" {
- if kv.KeyOnly[0] == '$' {
- c.append(&code{op: oppush, v: kv.KeyOnly[1:]})
- c.append(&code{op: opload, v: v})
- return c.compileFunc(&Func{Name: kv.KeyOnly})
- }
- c.append(&code{op: oppush, v: kv.KeyOnly})
- c.append(&code{op: opload, v: v})
- return c.compileIndex(&Term{Type: TermTypeIdentity}, &Index{Name: kv.KeyOnly})
- } else if kv.KeyOnlyString != nil {
- c.append(&code{op: opload, v: v})
- if err := c.compileString(kv.KeyOnlyString, nil); err != nil {
- return err
- }
- c.append(&code{op: opdup})
- c.append(&code{op: opload, v: v})
- c.append(&code{op: opload, v: v})
- // ref: compileCall
- c.append(&code{op: opcall, v: [3]interface{}{internalFuncs["_index"].callback, 2, "_index"}})
- return nil
- } else {
- if kv.KeyQuery != nil {
- c.append(&code{op: opload, v: v})
- f := c.newScopeDepth()
- if err := c.compileQuery(kv.KeyQuery); err != nil {
- return err
+ if key := kv.Key; key != "" {
+ if key[0] == '$' {
+ if kv.Val == nil { // {$foo} == {foo:$foo}
+ c.append(&code{op: oppush, v: key[1:]})
}
- f()
- } else if kv.KeyString != nil {
c.append(&code{op: opload, v: v})
- if err := c.compileString(kv.KeyString, nil); err != nil {
+ if err := c.compileFunc(&Func{Name: key}); err != nil {
return err
}
- } else if kv.Key[0] == '$' {
+ } else {
+ c.append(&code{op: oppush, v: key})
+ if kv.Val == nil { // {foo} == {foo:.foo}
+ c.append(&code{op: opload, v: v})
+ c.append(&code{op: opindex, v: key})
+ }
+ }
+ } else if key := kv.KeyString; key != nil {
+ if key.Queries == nil {
+ c.append(&code{op: oppush, v: key.Str})
+ if kv.Val == nil { // {"foo"} == {"foo":.["foo"]}
+ c.append(&code{op: opload, v: v})
+ c.append(&code{op: opindex, v: key.Str})
+ }
+ } else {
c.append(&code{op: opload, v: v})
- if err := c.compileFunc(&Func{Name: kv.Key}); err != nil {
+ if err := c.compileString(key, nil); err != nil {
return err
}
- } else {
- c.append(&code{op: oppush, v: kv.Key})
+ if kv.Val == nil {
+ c.append(&code{op: opdup})
+ c.append(&code{op: opload, v: v})
+ c.append(&code{op: oppush, v: nil})
+ // ref: compileCall
+ c.append(&code{op: opcall, v: [3]any{internalFuncs["_index"].callback, 2, "_index"}})
+ }
}
+ } else if kv.KeyQuery != nil {
c.append(&code{op: opload, v: v})
- return c.compileObjectVal(kv.Val)
+ f := c.newScopeDepth()
+ if err := c.compileQuery(kv.KeyQuery); err != nil {
+ return err
+ }
+ f()
}
-}
-
-func (c *compiler) compileObjectVal(e *ObjectVal) error {
- for _, e := range e.Queries {
- if err := c.compileQuery(e); err != nil {
+ if kv.Val != nil {
+ c.append(&code{op: opload, v: v})
+ if err := c.compileQuery(kv.Val); err != nil {
return err
}
}
@@ -1134,43 +1357,41 @@ func (c *compiler) compileObjectVal(e *ObjectVal) error {
func (c *compiler) compileArray(e *Array) error {
c.appendCodeInfo(e)
if e.Query == nil {
- c.append(&code{op: opconst, v: []interface{}{}})
+ c.append(&code{op: opconst, v: []any{}})
return nil
}
- c.append(&code{op: oppush, v: []interface{}{}})
+ c.append(&code{op: oppush, v: []any{}})
arr := c.newVariable()
c.append(&code{op: opstore, v: arr})
pc := len(c.codes)
- c.append(&code{op: opnop})
- defer func() {
- if pc < len(c.codes) {
- c.codes[pc] = &code{op: opfork, v: c.pc() - 2}
- }
- }()
+ setfork := c.lazy(func() *code {
+ return &code{op: opfork, v: len(c.codes)}
+ })
defer c.newScopeDepth()()
if err := c.compileQuery(e.Query); err != nil {
return err
}
c.append(&code{op: opappend, v: arr})
c.append(&code{op: opbacktrack})
+ setfork()
c.append(&code{op: oppop})
c.append(&code{op: opload, v: arr})
if e.Query.Op == OpPipe {
return nil
}
// optimize constant arrays
- l := e.Query.countCommaQueries()
- if 3*l != len(c.codes)-pc-3 {
+ if (len(c.codes)-pc)%3 != 0 {
return nil
}
+ l := (len(c.codes) - pc - 3) / 3
for i := 0; i < l; i++ {
- if (i > 0 && c.codes[pc+i].op != opfork) ||
+ if c.codes[pc+i].op != opfork ||
c.codes[pc+i*2+l].op != opconst ||
(i < l-1 && c.codes[pc+i*2+l+1].op != opjump) {
return nil
}
}
- v := make([]interface{}, l)
+ v := make([]any, l)
for i := 0; i < l; i++ {
v[i] = c.codes[pc+i*2+l].v
}
@@ -1181,6 +1402,10 @@ func (c *compiler) compileArray(e *Array) error {
func (c *compiler) compileUnary(e *Unary) error {
c.appendCodeInfo(e)
+ if v := e.toNumber(); v != nil {
+ c.append(&code{op: opconst, v: v})
+ return nil
+ }
if err := c.compileTerm(e.Term); err != nil {
return err
}
@@ -1194,12 +1419,12 @@ func (c *compiler) compileUnary(e *Unary) error {
}
}
-func (c *compiler) compileFormat(fmt string, str *String) error {
- f := formatToFunc(fmt)
+func (c *compiler) compileFormat(format string, str *String) error {
+ f := formatToFunc(format)
if f == nil {
f = &Func{
Name: "format",
- Args: []*Query{{Term: &Term{Type: TermTypeString, Str: &String{Str: fmt[1:]}}}},
+ Args: []*Query{{Term: &Term{Type: TermTypeString, Str: &String{Str: format[1:]}}}},
}
}
if str == nil {
@@ -1208,8 +1433,8 @@ func (c *compiler) compileFormat(fmt string, str *String) error {
return c.compileString(str, f)
}
-func formatToFunc(fmt string) *Func {
- switch fmt {
+func formatToFunc(format string) *Func {
+ switch format {
case "@text":
return &Func{Name: "tostring"}
case "@json":
@@ -1218,6 +1443,8 @@ func formatToFunc(fmt string) *Func {
return &Func{Name: "_tohtml"}
case "@uri":
return &Func{Name: "_touri"}
+ case "@urid":
+ return &Func{Name: "_tourid"}
case "@csv":
return &Func{Name: "_tocsv"}
case "@tsv":
@@ -1241,18 +1468,18 @@ func (c *compiler) compileString(s *String, f *Func) error {
if f == nil {
f = &Func{Name: "tostring"}
}
- e := s.Queries[0]
- if e.Term.Str == nil {
- e = &Query{Left: e, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: f}}}
- }
- for i := 1; i < len(s.Queries); i++ {
- x := s.Queries[i]
- if x.Term.Str == nil {
- x = &Query{Left: x, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: f}}}
+ var q *Query
+ for _, e := range s.Queries {
+ if e.Term.Str == nil {
+ e = &Query{Left: e, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: f}}}
+ }
+ if q == nil {
+ q = e
+ } else {
+ q = &Query{Left: q, Op: OpAdd, Right: e}
}
- e = &Query{Left: e, Op: OpAdd, Right: x}
}
- return c.compileQuery(e)
+ return c.compileQuery(q)
}
func (c *compiler) compileTermSuffix(e *Term, s *Suffix) error {
@@ -1262,101 +1489,105 @@ func (c *compiler) compileTermSuffix(e *Term, s *Suffix) error {
if err := c.compileTerm(e); err != nil {
return err
}
- c.append(&code{op: opeach})
+ c.append(&code{op: opiter})
return nil
} else if s.Optional {
- if len(e.SuffixList) > 1 || len(e.SuffixList) == 1 && !e.SuffixList[0].Iter {
- if u, ok := e.SuffixList[len(e.SuffixList)-1].toTerm(); ok {
- t := *e // clone without changing e
- (&t).SuffixList = t.SuffixList[:len(e.SuffixList)-1]
- if err := c.compileTerm(&t); err != nil {
+ if len(e.SuffixList) > 0 {
+ if u := e.SuffixList[len(e.SuffixList)-1].toTerm(); u != nil {
+ // no need to clone (ref: compileTerm)
+ e.SuffixList = e.SuffixList[:len(e.SuffixList)-1]
+ if err := c.compileTerm(e); err != nil {
return err
}
- return c.compileTermSuffix(u, s)
+ e = u
}
}
return c.compileTry(&Try{Body: &Query{Term: e}})
} else if s.Bind != nil {
- c.append(&code{op: opdup})
- c.append(&code{op: opexpbegin})
- if err := c.compileTerm(e); err != nil {
- return err
- }
- return c.compileBind(s.Bind)
+ return c.compileBind(e, s.Bind)
} else {
return fmt.Errorf("invalid suffix: %s", s)
}
}
func (c *compiler) compileCall(name string, args []*Query) error {
- return c.compileCallInternal(
- [3]interface{}{internalFuncs[name].callback, len(args), name},
+ fn := internalFuncs[name]
+ var indexing int
+ switch name {
+ case "_index", "_slice":
+ indexing = 1
+ case "getpath":
+ indexing = 0
+ default:
+ indexing = -1
+ }
+ if err := c.compileCallInternal(
+ [3]any{fn.callback, len(args), name},
args,
- nil,
- name == "_index" || name == "_slice",
- )
+ true,
+ indexing,
+ ); err != nil {
+ return err
+ }
+ if fn.iter {
+ c.append(&code{op: opiter})
+ }
+ return nil
}
func (c *compiler) compileCallPc(fn *funcinfo, args []*Query) error {
- if len(args) == 0 {
- return c.compileCallInternal(fn.pc, args, nil, false)
- }
- xs, vars := make([]*Query, len(args)), make(map[int]bool, len(fn.args))
- for i, j := range fn.argsorder {
- xs[i] = args[j]
- if fn.args[j][0] == '$' {
- vars[i] = true
- }
- }
- return c.compileCallInternal(fn.pc, xs, vars, false)
+ return c.compileCallInternal(fn.pc, args, false, -1)
}
-func (c *compiler) compileCallInternal(fn interface{}, args []*Query, vars map[int]bool, indexing bool) error {
+func (c *compiler) compileCallInternal(
+ fn any, args []*Query, internal bool, indexing int,
+) error {
if len(args) == 0 {
c.append(&code{op: opcall, v: fn})
return nil
}
- idx := c.newVariable()
- c.append(&code{op: opstore, v: idx})
- if indexing && len(args) > 1 {
+ v := c.newVariable()
+ c.append(&code{op: opstore, v: v})
+ if indexing >= 0 {
c.append(&code{op: opexpbegin})
}
for i := len(args) - 1; i >= 0; i-- {
- pc := c.pc() + 1 // ref: compileFuncDef
+ pc := len(c.codes) + 1 // skip opjump (ref: compileFuncDef)
name := "lambda:" + strconv.Itoa(pc)
if err := c.compileFuncDef(&FuncDef{Name: name, Body: args[i]}, false); err != nil {
return err
}
- if vars == nil || vars[i] {
- if pc == c.pc()-2 {
- // optimize identity argument
+ if internal {
+ switch len(c.codes) - pc {
+ case 2: // optimize identity argument (opscope, opret)
j := len(c.codes) - 3
- c.codes[j] = &code{op: opload, v: idx}
+ c.codes[j] = &code{op: opload, v: v}
c.codes = c.codes[:j+1]
- c.funcs = c.funcs[:len(c.funcs)-1]
+ s := c.scopes[len(c.scopes)-1]
+ s.funcs = s.funcs[:len(s.funcs)-1]
c.deleteCodeInfo(name)
- } else if pc == c.pc()-3 {
- // optimize one instruction argument
+ case 3: // optimize one instruction argument (opscope, opX, opret)
j := len(c.codes) - 4
if c.codes[j+2].op == opconst {
c.codes[j] = &code{op: oppush, v: c.codes[j+2].v}
c.codes = c.codes[:j+1]
} else {
- c.codes[j] = &code{op: opload, v: idx}
+ c.codes[j] = &code{op: opload, v: v}
c.codes[j+1] = c.codes[j+2]
c.codes = c.codes[:j+2]
}
- c.funcs = c.funcs[:len(c.funcs)-1]
+ s := c.scopes[len(c.scopes)-1]
+ s.funcs = s.funcs[:len(s.funcs)-1]
c.deleteCodeInfo(name)
- } else {
- c.append(&code{op: opload, v: idx})
+ default:
+ c.append(&code{op: opload, v: v})
c.append(&code{op: oppushpc, v: pc})
c.append(&code{op: opcallpc})
}
} else {
c.append(&code{op: oppushpc, v: pc})
}
- if indexing && i == 1 {
+ if i == indexing {
if c.codes[len(c.codes)-2].op == opexpbegin {
c.codes[len(c.codes)-2] = c.codes[len(c.codes)-1]
c.codes = c.codes[:len(c.codes)-1]
@@ -1365,7 +1596,11 @@ func (c *compiler) compileCallInternal(fn interface{}, args []*Query, vars map[i
}
}
}
- c.append(&code{op: opload, v: idx})
+ if indexing > 0 {
+ c.append(&code{op: oppush, v: nil})
+ } else {
+ c.append(&code{op: opload, v: v})
+ }
c.append(&code{op: opcall, v: fn})
return nil
}
@@ -1374,30 +1609,33 @@ func (c *compiler) append(code *code) {
c.codes = append(c.codes, code)
}
-func (c *compiler) pc() int {
- return len(c.codes)
+func (c *compiler) appends(codes ...*code) {
+ c.codes = append(c.codes, codes...)
}
func (c *compiler) lazy(f func() *code) func() {
i := len(c.codes)
- c.codes = append(c.codes, &code{op: opnop})
+ c.codes = append(c.codes, nil)
return func() { c.codes[i] = f() }
}
func (c *compiler) optimizeTailRec() {
var pcs []int
- targets := map[int]bool{}
+ scopes := map[int]bool{}
L:
for i, l := 0, len(c.codes); i < l; i++ {
switch c.codes[i].op {
case opscope:
pcs = append(pcs, i)
- if c.codes[i].v.([2]int)[1] == 0 {
- targets[i] = true
+ if v := c.codes[i].v.([3]int); v[2] == 0 {
+ scopes[i] = v[1] == 0
}
case opcall:
+ var canjump bool
if j, ok := c.codes[i].v.(int); !ok ||
- len(pcs) == 0 || pcs[len(pcs)-1] != j || !targets[j] {
+ len(pcs) == 0 || pcs[len(pcs)-1] != j {
+ break
+ } else if canjump, ok = scopes[j]; !ok {
break
}
for j := i + 1; j < l; {
@@ -1405,7 +1643,12 @@ L:
case opjump:
j = c.codes[j].v.(int)
case opret:
- c.codes[i] = &code{op: opjump, v: pcs[len(pcs)-1] + 1}
+ if canjump {
+ c.codes[i].op = opjump
+ c.codes[i].v = pcs[len(pcs)-1] + 1
+ } else {
+ c.codes[i].op = opcallrec
+ }
continue L
default:
continue L
@@ -1420,22 +1663,26 @@ L:
}
}
-func (c *compiler) optimizeJumps() {
- for i := len(c.codes) - 1; i >= 0; i-- {
+func (c *compiler) optimizeCodeOps() {
+ for i, next := len(c.codes)-1, (*code)(nil); i >= 0; i-- {
code := c.codes[i]
- if code.op != opjump {
- continue
- }
- if code.v.(int)-1 == i {
- c.codes[i].op = opnop
- continue
- }
- for {
- d := c.codes[code.v.(int)]
- if d.op != opjump || code.v.(int) == d.v.(int) {
- break
+ switch code.op {
+ case oppush, opdup, opload:
+ switch next.op {
+ case oppop:
+ code.op = opnop
+ next.op = opnop
+ case opconst:
+ code.op = opnop
+ next.op = oppush
+ }
+ case opjump, opjumpifnot:
+ if j := code.v.(int); j-1 == i {
+ code.op = opnop
+ } else if next = c.codes[j]; next.op == opjump {
+ code.v = next.v
}
- code.v = d.v
}
+ next = code
}
}
diff --git a/vendor/github.com/itchyny/gojq/debug.go b/vendor/github.com/itchyny/gojq/debug.go
index 806552fa7..236982809 100644
--- a/vendor/github.com/itchyny/gojq/debug.go
+++ b/vendor/github.com/itchyny/gojq/debug.go
@@ -1,12 +1,13 @@
-// +build debug
+//go:build gojq_debug
+// +build gojq_debug
package gojq
import (
- "encoding/json"
"fmt"
"io"
"os"
+ "strconv"
"strings"
)
@@ -26,7 +27,12 @@ func init() {
}
}
-func (c *compiler) appendCodeInfo(x interface{}) {
+type codeinfo struct {
+ name string
+ pc int
+}
+
+func (c *compiler) appendCodeInfo(x any) {
if !debug {
return
}
@@ -38,10 +44,10 @@ func (c *compiler) appendCodeInfo(x interface{}) {
name = fmt.Sprint(x)
}
var diff int
- if len(c.codes) > 0 && c.codes[len(c.codes)-1].op == opret && strings.HasPrefix(name, "end of ") {
+ if c.codes[len(c.codes)-1] != nil && c.codes[len(c.codes)-1].op == opret && strings.HasPrefix(name, "end of ") {
diff = -1
}
- c.codeinfos = append(c.codeinfos, codeinfo{name, c.pc() + diff})
+ c.codeinfos = append(c.codeinfos, codeinfo{name, len(c.codes) + diff})
}
func (c *compiler) deleteCodeInfo(name string) {
@@ -74,7 +80,7 @@ func (env *env) debugCodes() {
for i, c := range env.codes {
pc := i
switch c.op {
- case opcall:
+ case opcall, opcallrec:
if x, ok := c.v.(int); ok {
pc = x
}
@@ -86,13 +92,18 @@ func (env *env) debugCodes() {
}
var s string
if name := env.lookupInfoName(pc); name != "" {
- if (c.op == opcall || c.op == opjump) && !strings.HasPrefix(name, "module ") {
- s = "\t## call " + name
- } else {
+ switch c.op {
+ case opcall, opcallrec, opjump:
+ if !strings.HasPrefix(name, "module ") {
+ s = "\t## call " + name
+ break
+ }
+ fallthrough
+ default:
s = "\t## " + name
}
}
- fmt.Fprintf(debugOut, "\t%d\t%s%s%s\n", i, formatOp(c.op, false), debugOperand(c), s)
+ fmt.Fprintf(debugOut, "\t%d\t%-*s%s%s\n", i, 25, c.op, debugOperand(c), s)
}
fmt.Fprintln(debugOut, "\t"+strings.Repeat("-", 40)+"+")
}
@@ -103,17 +114,21 @@ func (env *env) debugState(pc int, backtrack bool) {
}
var sb strings.Builder
c := env.codes[pc]
- fmt.Fprintf(&sb, "\t%d\t%s%s\t|", pc, formatOp(c.op, backtrack), debugOperand(c))
+ op := c.op.String()
+ if backtrack {
+ op += " "
+ }
+ fmt.Fprintf(&sb, "\t%d\t%-*s%s\t|", pc, 25, op, debugOperand(c))
var xs []int
for i := env.stack.index; i >= 0; i = env.stack.data[i].next {
xs = append(xs, i)
}
for i := len(xs) - 1; i >= 0; i-- {
sb.WriteString("\t")
- sb.WriteString(debugJSON(env.stack.data[xs[i]].value))
+ sb.WriteString(debugValue(env.stack.data[xs[i]].value))
}
switch c.op {
- case opcall:
+ case opcall, opcallrec:
if x, ok := c.v.(int); ok {
pc = x
}
@@ -124,22 +139,20 @@ func (env *env) debugState(pc int, backtrack bool) {
}
}
if name := env.lookupInfoName(pc); name != "" {
- if (c.op == opcall || c.op == opjump) && !strings.HasPrefix(name, "module ") {
- sb.WriteString("\t\t\t## call " + name)
- } else {
+ switch c.op {
+ case opcall, opcallrec, opjump:
+ if !strings.HasPrefix(name, "module ") {
+ sb.WriteString("\t\t\t## call " + name)
+ break
+ }
+ fallthrough
+ default:
sb.WriteString("\t\t\t## " + name)
}
}
fmt.Fprintln(debugOut, sb.String())
}
-func formatOp(c opcode, backtrack bool) string {
- if backtrack {
- return c.String() + " " + strings.Repeat(" ", 13-len(c.String()))
- }
- return c.String() + strings.Repeat(" ", 25-len(c.String()))
-}
-
func (env *env) debugForks(pc int, op string) {
if !debug {
return
@@ -152,31 +165,45 @@ func (env *env) debugForks(pc int, op string) {
if i == len(env.forks)-1 {
sb.WriteByte('<')
}
- fmt.Fprintf(&sb, "%d, %s", v.pc, debugJSON(env.stack.data[v.stackindex].value))
+ fmt.Fprintf(&sb, "%d, %s", v.pc, debugValue(env.stack.data[v.stackindex].value))
if i == len(env.forks)-1 {
sb.WriteByte('>')
}
}
- fmt.Fprintf(debugOut, "\t-\t%s%s%d\t|\t%s\n", op, strings.Repeat(" ", 22), pc, sb.String())
+ fmt.Fprintf(debugOut, "\t-\t%-*s%d\t|\t%s\n", 25, op, pc, sb.String())
}
func debugOperand(c *code) string {
- if c.op == opcall {
+ switch c.op {
+ case opcall, opcallrec:
switch v := c.v.(type) {
case int:
- return debugJSON(v)
- case [3]interface{}:
+ return strconv.Itoa(v)
+ case [3]any:
return fmt.Sprintf("%s/%d", v[2], v[1])
default:
panic(c)
}
- } else {
- return debugJSON(c.v)
+ default:
+ return debugValue(c.v)
}
}
-func debugJSON(v interface{}) string {
- var sb strings.Builder
- json.NewEncoder(&sb).Encode(v)
- return strings.TrimSpace(sb.String())
+func debugValue(v any) string {
+ switch v := v.(type) {
+ case Iter:
+ return fmt.Sprintf("gojq.Iter(%#v)", v)
+ case []pathValue:
+ return fmt.Sprintf("[]gojq.pathValue(%v)", v)
+ case [2]int:
+ return fmt.Sprintf("[%d,%d]", v[0], v[1])
+ case [3]int:
+ return fmt.Sprintf("[%d,%d,%d]", v[0], v[1], v[2])
+ case [3]any:
+ return fmt.Sprintf("[%v,%v,%v]", v[0], v[1], v[2])
+ case allocator:
+ return fmt.Sprintf("%v", v)
+ default:
+ return Preview(v)
+ }
}
diff --git a/vendor/github.com/itchyny/gojq/encoder.go b/vendor/github.com/itchyny/gojq/encoder.go
index 484c6aff1..3233e8a95 100644
--- a/vendor/github.com/itchyny/gojq/encoder.go
+++ b/vendor/github.com/itchyny/gojq/encoder.go
@@ -3,41 +3,50 @@ package gojq
import (
"bytes"
"fmt"
+ "io"
"math"
"math/big"
"sort"
"strconv"
+ "strings"
"unicode/utf8"
)
// Marshal returns the jq-flavored JSON encoding of v.
//
-// This method only accepts limited types (nil, bool, int, float64, *big.Int,
-// string, []interface{} and map[string]interface{}) because these are the
-// possible types a gojq iterator can emit. This method marshals NaN to null,
-// truncates infinities to (+|-) math.MaxFloat64 and does not escape '<' and
-// '>' for embedding in HTML. These behaviors are based on the marshaler of jq
-// command and different from the standard library method json.Marshal.
-func Marshal(v interface{}) ([]byte, error) {
- return jsonMarshalBytes(v), nil
+// This method accepts only limited types (nil, bool, int, float64, *big.Int,
+// string, []any, and map[string]any) because these are the possible types a
+// gojq iterator can emit. This method marshals NaN to null, truncates
+// infinities to (+|-) math.MaxFloat64, uses \b and \f in strings, and does not
+// escape '<', '>', '&', '\u2028', and '\u2029'. These behaviors are based on
+// the marshaler of jq command, and different from json.Marshal in the Go
+// standard library. Note that the result is not safe to embed in HTML.
+func Marshal(v any) ([]byte, error) {
+ var b bytes.Buffer
+ (&encoder{w: &b}).encode(v)
+ return b.Bytes(), nil
}
-func jsonMarshal(v interface{}) string {
- return string(jsonMarshalBytes(v))
+func jsonMarshal(v any) string {
+ var sb strings.Builder
+ (&encoder{w: &sb}).encode(v)
+ return sb.String()
}
-func jsonMarshalBytes(v interface{}) []byte {
- var b bytes.Buffer
- (&encoder{w: &b}).encode(v)
- return b.Bytes()
+func jsonEncodeString(sb *strings.Builder, v string) {
+ (&encoder{w: sb}).encodeString(v)
}
type encoder struct {
- w *bytes.Buffer
+ w interface {
+ io.Writer
+ io.ByteWriter
+ io.StringWriter
+ }
buf [64]byte
}
-func (e *encoder) encode(v interface{}) {
+func (e *encoder) encode(v any) {
switch v := v.(type) {
case nil:
e.w.WriteString("null")
@@ -55,12 +64,12 @@ func (e *encoder) encode(v interface{}) {
e.w.Write(v.Append(e.buf[:0], 10))
case string:
e.encodeString(v)
- case []interface{}:
+ case []any:
e.encodeArray(v)
- case map[string]interface{}:
- e.encodeMap(v)
+ case map[string]any:
+ e.encodeObject(v)
default:
- panic(fmt.Sprintf("invalid value: %v", v))
+ panic(fmt.Sprintf("invalid type: %[1]T (%[1]v)", v))
}
}
@@ -75,12 +84,12 @@ func (e *encoder) encodeFloat64(f float64) {
} else if f <= -math.MaxFloat64 {
f = -math.MaxFloat64
}
- fmt := byte('f')
+ format := byte('f')
if x := math.Abs(f); x != 0 && x < 1e-6 || x >= 1e21 {
- fmt = 'e'
+ format = 'e'
}
- buf := strconv.AppendFloat(e.buf[:0], f, fmt, -1, 64)
- if fmt == 'e' {
+ buf := strconv.AppendFloat(e.buf[:0], f, format, -1, 64)
+ if format == 'e' {
// clean up e-09 to e-9
if n := len(buf); n >= 4 && buf[n-4] == 'e' && buf[n-3] == '-' && buf[n-2] == '0' {
buf[n-2] = buf[n-1]
@@ -96,26 +105,31 @@ func (e *encoder) encodeString(s string) {
start := 0
for i := 0; i < len(s); {
if b := s[i]; b < utf8.RuneSelf {
- if ']' <= b && b <= '~' || '#' <= b && b <= '[' || b == ' ' || b == '!' {
+ if ' ' <= b && b <= '~' && b != '"' && b != '\\' {
i++
continue
}
if start < i {
e.w.WriteString(s[start:i])
}
- e.w.WriteByte('\\')
switch b {
- case '\\', '"':
- e.w.WriteByte(b)
+ case '"':
+ e.w.WriteString(`\"`)
+ case '\\':
+ e.w.WriteString(`\\`)
+ case '\b':
+ e.w.WriteString(`\b`)
+ case '\f':
+ e.w.WriteString(`\f`)
case '\n':
- e.w.WriteByte('n')
+ e.w.WriteString(`\n`)
case '\r':
- e.w.WriteByte('r')
+ e.w.WriteString(`\r`)
case '\t':
- e.w.WriteByte('t')
+ e.w.WriteString(`\t`)
default:
const hex = "0123456789abcdef"
- e.w.WriteString("u00")
+ e.w.WriteString(`\u00`)
e.w.WriteByte(hex[b>>4])
e.w.WriteByte(hex[b&0xF])
}
@@ -141,7 +155,7 @@ func (e *encoder) encodeString(s string) {
e.w.WriteByte('"')
}
-func (e *encoder) encodeArray(vs []interface{}) {
+func (e *encoder) encodeArray(vs []any) {
e.w.WriteByte('[')
for i, v := range vs {
if i > 0 {
@@ -152,11 +166,11 @@ func (e *encoder) encodeArray(vs []interface{}) {
e.w.WriteByte(']')
}
-func (e *encoder) encodeMap(vs map[string]interface{}) {
+func (e *encoder) encodeObject(vs map[string]any) {
e.w.WriteByte('{')
type keyVal struct {
key string
- val interface{}
+ val any
}
kvs := make([]keyVal, len(vs))
var i int
diff --git a/vendor/github.com/itchyny/gojq/env.go b/vendor/github.com/itchyny/gojq/env.go
index 56b9fa0ea..bf058eda8 100644
--- a/vendor/github.com/itchyny/gojq/env.go
+++ b/vendor/github.com/itchyny/gojq/env.go
@@ -5,37 +5,38 @@ import "context"
type env struct {
pc int
stack *stack
- scopes *stack
paths *stack
- values []interface{}
+ scopes *scopeStack
+ values []any
codes []*code
codeinfos []codeinfo
- forks []*fork
+ forks []fork
backtrack bool
offset int
expdepth int
- args [32]interface{} // len(env.args) > maxarity
+ label int
+ args [32]any // len(env.args) > maxarity
ctx context.Context
}
func newEnv(ctx context.Context) *env {
return &env{
stack: newStack(),
- scopes: newStack(),
paths: newStack(),
+ scopes: newScopeStack(),
ctx: ctx,
}
}
type scope struct {
- id int
- offset int
- pc int
- saveindex int
+ id int
+ offset int
+ pc int
+ saveindex int
+ outerindex int
}
type fork struct {
- op opcode
pc int
stackindex int
stacklimit int
diff --git a/vendor/github.com/itchyny/gojq/error.go b/vendor/github.com/itchyny/gojq/error.go
index 450c30ad7..18b06b1cc 100644
--- a/vendor/github.com/itchyny/gojq/error.go
+++ b/vendor/github.com/itchyny/gojq/error.go
@@ -1,23 +1,18 @@
package gojq
-import (
- "math/big"
- "reflect"
- "strconv"
- "strings"
-)
+import "strconv"
// ValueError is an interface for errors with a value for internal function.
// Return an error implementing this interface when you want to catch error
-// values (not error messages) by try-catch, just like builtin error function.
-// Refer to WithFunction to add a custom internal function.
+// values (not error messages) by try-catch, just like built-in error function.
+// Refer to [WithFunction] to add a custom internal function.
type ValueError interface {
error
- Value() interface{}
+ Value() any
}
type expectedObjectError struct {
- v interface{}
+ v any
}
func (err *expectedObjectError) Error() string {
@@ -25,7 +20,7 @@ func (err *expectedObjectError) Error() string {
}
type expectedArrayError struct {
- v interface{}
+ v any
}
func (err *expectedArrayError) Error() string {
@@ -33,23 +28,31 @@ func (err *expectedArrayError) Error() string {
}
type iteratorError struct {
- v interface{}
+ v any
}
func (err *iteratorError) Error() string {
return "cannot iterate over: " + typeErrorPreview(err.v)
}
+type arrayIndexNegativeError struct {
+ v int
+}
+
+func (err *arrayIndexNegativeError) Error() string {
+ return "array index should not be negative: " + Preview(err.v)
+}
+
type arrayIndexTooLargeError struct {
- v interface{}
+ v any
}
func (err *arrayIndexTooLargeError) Error() string {
- return "array index too large: " + previewValue(err.v)
+ return "array index too large: " + Preview(err.v)
}
type objectKeyNotStringError struct {
- v interface{}
+ v any
}
func (err *objectKeyNotStringError) Error() string {
@@ -57,24 +60,37 @@ func (err *objectKeyNotStringError) Error() string {
}
type arrayIndexNotNumberError struct {
- v interface{}
+ v any
}
func (err *arrayIndexNotNumberError) Error() string {
return "expected a number for indexing an array but got: " + typeErrorPreview(err.v)
}
+type stringIndexNotNumberError struct {
+ v any
+}
+
+func (err *stringIndexNotNumberError) Error() string {
+ return "expected a number for indexing a string but got: " + typeErrorPreview(err.v)
+}
+
type expectedStartEndError struct {
- v interface{}
+ v any
}
func (err *expectedStartEndError) Error() string {
return `expected "start" and "end" for slicing but got: ` + typeErrorPreview(err.v)
}
-type inputNotAllowedError struct {
+type lengthMismatchError struct{}
+
+func (*lengthMismatchError) Error() string {
+ return "length mismatch"
}
+type inputNotAllowedError struct{}
+
func (*inputNotAllowedError) Error() string {
return "input(s)/0 is not allowed"
}
@@ -87,19 +103,66 @@ func (err *funcNotFoundError) Error() string {
return "function not defined: " + err.f.Name + "/" + strconv.Itoa(len(err.f.Args))
}
-type funcTypeError struct {
+type func0TypeError struct {
name string
- v interface{}
+ v any
}
-func (err *funcTypeError) Error() string {
+func (err *func0TypeError) Error() string {
return err.name + " cannot be applied to: " + typeErrorPreview(err.v)
}
+type func1TypeError struct {
+ name string
+ v, w any
+}
+
+func (err *func1TypeError) Error() string {
+ return err.name + "(" + Preview(err.w) + ") cannot be applied to: " + typeErrorPreview(err.v)
+}
+
+type func2TypeError struct {
+ name string
+ v, w, x any
+}
+
+func (err *func2TypeError) Error() string {
+ return err.name + "(" + Preview(err.w) + "; " + Preview(err.x) + ") cannot be applied to: " + typeErrorPreview(err.v)
+}
+
+type func0WrapError struct {
+ name string
+ v any
+ err error
+}
+
+func (err *func0WrapError) Error() string {
+ return err.name + " cannot be applied to " + Preview(err.v) + ": " + err.err.Error()
+}
+
+type func1WrapError struct {
+ name string
+ v, w any
+ err error
+}
+
+func (err *func1WrapError) Error() string {
+ return err.name + "(" + Preview(err.w) + ") cannot be applied to " + Preview(err.v) + ": " + err.err.Error()
+}
+
+type func2WrapError struct {
+ name string
+ v, w, x any
+ err error
+}
+
+func (err *func2WrapError) Error() string {
+ return err.name + "(" + Preview(err.w) + "; " + Preview(err.x) + ") cannot be applied to " + Preview(err.v) + ": " + err.err.Error()
+}
+
type exitCodeError struct {
- value interface{}
+ value any
code int
- halt bool
}
func (err *exitCodeError) Error() string {
@@ -109,37 +172,63 @@ func (err *exitCodeError) Error() string {
return "error: " + jsonMarshal(err.value)
}
-func (err *exitCodeError) IsEmptyError() bool {
- return err.value == nil
+func (err *exitCodeError) Value() any {
+ return err.value
}
func (err *exitCodeError) ExitCode() int {
return err.code
}
-func (err *exitCodeError) Value() interface{} {
- return err.value
+// HaltError is an error emitted by halt and halt_error functions.
+// It implements [ValueError], and if the value is nil, discard the error
+// and stop the iteration. Consider a query like "1, halt, 2";
+// the first value is 1, and the second value is a HaltError with nil value.
+// You might think the iterator should not emit an error this case, but it
+// should so that we can recognize the halt error to stop the outer loop
+// of iterating input values; echo 1 2 3 | gojq "., halt".
+type HaltError exitCodeError
+
+func (err *HaltError) Error() string {
+ return "halt " + (*exitCodeError)(err).Error()
+}
+
+// Value returns the value of the error. This implements [ValueError],
+// but halt error is not catchable by try-catch.
+func (err *HaltError) Value() any {
+ return (*exitCodeError)(err).Value()
}
-type funcContainsError struct {
- l, r interface{}
+// ExitCode returns the exit code of the error.
+func (err *HaltError) ExitCode() int {
+ return (*exitCodeError)(err).ExitCode()
}
-func (err *funcContainsError) Error() string {
- return "cannot check contains(" + previewValue(err.r) + "): " + typeErrorPreview(err.l)
+type flattenDepthError struct {
+ v float64
}
-type hasKeyTypeError struct {
- l, r interface{}
+func (err *flattenDepthError) Error() string {
+ return "flatten depth should not be negative: " + Preview(err.v)
}
-func (err *hasKeyTypeError) Error() string {
- return "cannot check whether " + typeErrorPreview(err.l) + " has a key: " + typeErrorPreview(err.r)
+type joinTypeError struct {
+ v any
+}
+
+func (err *joinTypeError) Error() string {
+ return "join cannot be applied to an array including: " + typeErrorPreview(err.v)
+}
+
+type timeArrayError struct{}
+
+func (*timeArrayError) Error() string {
+ return "expected an array of 8 numbers"
}
type unaryTypeError struct {
name string
- v interface{}
+ v any
}
func (err *unaryTypeError) Error() string {
@@ -148,7 +237,7 @@ func (err *unaryTypeError) Error() string {
type binopTypeError struct {
name string
- l, r interface{}
+ l, r any
}
func (err *binopTypeError) Error() string {
@@ -156,7 +245,7 @@ func (err *binopTypeError) Error() string {
}
type zeroDivisionError struct {
- l, r interface{}
+ l, r any
}
func (err *zeroDivisionError) Error() string {
@@ -164,11 +253,11 @@ func (err *zeroDivisionError) Error() string {
}
type zeroModuloError struct {
- l, r interface{}
+ l, r any
}
func (err *zeroModuloError) Error() string {
- return "cannot modulo " + typeErrorPreview(err.l) + " by: " + typeErrorPreview(err.r) + ""
+ return "cannot modulo " + typeErrorPreview(err.l) + " by: " + typeErrorPreview(err.r)
}
type formatNotFoundError struct {
@@ -179,26 +268,18 @@ func (err *formatNotFoundError) Error() string {
return "format not defined: " + err.n
}
-type formatCsvTsvRowError struct {
+type formatRowError struct {
typ string
- v interface{}
-}
-
-func (err *formatCsvTsvRowError) Error() string {
- return "invalid " + err.typ + " row: " + typeErrorPreview(err.v)
-}
-
-type formatShError struct {
- v interface{}
+ v any
}
-func (err *formatShError) Error() string {
- return "cannot escape for shell: " + typeErrorPreview(err.v)
+func (err *formatRowError) Error() string {
+ return "@" + err.typ + " cannot format an array including: " + typeErrorPreview(err.v)
}
type tooManyVariableValuesError struct{}
-func (err *tooManyVariableValuesError) Error() string {
+func (*tooManyVariableValuesError) Error() string {
return "too many variable values provided"
}
@@ -228,13 +309,14 @@ func (err *variableNameError) Error() string {
type breakError struct {
n string
+ v any
}
func (err *breakError) Error() string {
return "label not defined: " + err.n
}
-func (err *breakError) ExitCode() int {
+func (*breakError) ExitCode() int {
return 3
}
@@ -247,7 +329,7 @@ func (err *tryEndError) Error() string {
}
type invalidPathError struct {
- v interface{}
+ v any
}
func (err *invalidPathError) Error() string {
@@ -255,32 +337,24 @@ func (err *invalidPathError) Error() string {
}
type invalidPathIterError struct {
- v interface{}
+ v any
}
func (err *invalidPathIterError) Error() string {
return "invalid path on iterating against: " + typeErrorPreview(err.v)
}
-type getpathError struct {
- v, path interface{}
-}
-
-func (err *getpathError) Error() string {
- return "cannot getpath with " + previewValue(err.path) + " against: " + typeErrorPreview(err.v) + ""
-}
-
type queryParseError struct {
- typ, fname, contents string
- err error
+ fname, contents string
+ err error
}
-func (err *queryParseError) QueryParseError() (string, string, string, error) {
- return err.typ, err.fname, err.contents, err.err
+func (err *queryParseError) QueryParseError() (string, string, error) {
+ return err.fname, err.contents, err.err
}
func (err *queryParseError) Error() string {
- return "invalid " + err.typ + ": " + err.fname + ": " + err.err.Error()
+ return "invalid query: " + err.fname + ": " + err.err.Error()
}
type jsonParseError struct {
@@ -296,74 +370,13 @@ func (err *jsonParseError) Error() string {
return "invalid json: " + err.fname + ": " + err.err.Error()
}
-func typeErrorPreview(v interface{}) string {
- p := preview(v)
- if p != "" {
- p = " (" + p + ")"
- }
- return typeof(v) + p
-}
-
-func typeof(v interface{}) (s string) {
- if v == nil {
+func typeErrorPreview(v any) string {
+ switch v.(type) {
+ case nil:
return "null"
- }
- k := reflect.TypeOf(v).Kind()
- switch k {
- case reflect.Array, reflect.Slice:
- return "array"
- case reflect.Map:
- return "object"
- case reflect.Bool:
- return "boolean"
- case reflect.Int, reflect.Uint, reflect.Float64:
- return "number"
- case reflect.String:
- return "string"
- case reflect.Ptr:
- if _, ok := v.(*big.Int); ok {
- return "number"
- }
- return "ptr"
+ case Iter:
+ return "gojq.Iter"
default:
- return k.String()
- }
-}
-
-func preview(v interface{}) string {
- if v == nil {
- return ""
- }
- s := jsonMarshal(v)
- if l := 30; len(s) > l {
- var trailing string
- switch v.(type) {
- case string:
- trailing = ` ..."`
- case []interface{}:
- trailing = " ...]"
- case map[string]interface{}:
- trailing = " ...}"
- default:
- trailing = " ..."
- }
- var sb strings.Builder
- sb.Grow(l + 5)
- for _, c := range s {
- sb.WriteRune(c)
- if sb.Len() >= l-len(trailing) {
- sb.WriteString(trailing)
- break
- }
- }
- s = sb.String()
- }
- return s
-}
-
-func previewValue(v interface{}) string {
- if v == nil {
- return "null"
+ return TypeOf(v) + " (" + Preview(v) + ")"
}
- return preview(v)
}
diff --git a/vendor/github.com/itchyny/gojq/execute.go b/vendor/github.com/itchyny/gojq/execute.go
index a63388d9a..344d8a3c5 100644
--- a/vendor/github.com/itchyny/gojq/execute.go
+++ b/vendor/github.com/itchyny/gojq/execute.go
@@ -1,12 +1,13 @@
package gojq
import (
- "fmt"
+ "context"
+ "math"
"reflect"
"sort"
)
-func (env *env) execute(bc *Code, v interface{}, vars ...interface{}) Iter {
+func (env *env) execute(bc *Code, v any, vars ...any) Iter {
env.codes = bc.codes
env.codeinfos = bc.codeinfos
env.push(v)
@@ -17,10 +18,10 @@ func (env *env) execute(bc *Code, v interface{}, vars ...interface{}) Iter {
return env
}
-func (env *env) Next() (interface{}, bool) {
+func (env *env) Next() (any, bool) {
var err error
pc, callpc, index := env.pc, len(env.codes)-1, -1
- backtrack, hasCtx := env.backtrack, env.ctx != nil
+ backtrack, hasCtx := env.backtrack, env.ctx != context.Background()
defer func() { env.pc, env.backtrack = pc, true }()
loop:
for ; pc < len(env.codes); pc++ {
@@ -42,9 +43,9 @@ loop:
case oppop:
env.pop()
case opdup:
- x := env.pop()
- env.push(x)
- env.push(x)
+ v := env.pop()
+ env.push(v)
+ env.push(v)
case opconst:
env.pop()
env.push(code.v)
@@ -57,7 +58,7 @@ loop:
break loop
}
n := code.v.(int)
- m := make(map[string]interface{}, n)
+ m := make(map[string]any, n)
for i := 0; i < n; i++ {
v, k := env.pop(), env.pop()
s, ok := k.(string)
@@ -65,12 +66,14 @@ loop:
err = &objectKeyNotStringError{k}
break loop
}
- m[s] = v
+ if _, ok := m[s]; !ok {
+ m[s] = v
+ }
}
env.push(m)
case opappend:
i := env.index(code.v.([2]int))
- env.values[i] = append(env.values[i].([]interface{}), env.pop())
+ env.values[i] = append(env.values[i].([]any), env.pop())
case opfork:
if backtrack {
if err != nil {
@@ -78,47 +81,38 @@ loop:
}
pc, backtrack = code.v.(int), false
goto loop
- } else {
- env.pushfork(code.op, pc)
}
+ env.pushfork(pc)
case opforktrybegin:
if backtrack {
if err == nil {
break loop
}
- switch er := err.(type) {
+ switch e := err.(type) {
case *tryEndError:
- err = er.err
+ err = e.err
+ break loop
+ case *breakError, *HaltError:
break loop
case ValueError:
- if er, ok := er.(*exitCodeError); ok && er.halt {
- break loop
- }
- if v := er.Value(); v != nil {
- env.pop()
- env.push(v)
- } else {
- err = nil
- break loop
- }
+ env.pop()
+ env.push(e.Value())
default:
env.pop()
env.push(err.Error())
}
pc, backtrack, err = code.v.(int), false, nil
goto loop
- } else {
- env.pushfork(code.op, pc)
}
+ env.pushfork(pc)
case opforktryend:
if backtrack {
if err != nil {
err = &tryEndError{err}
}
break loop
- } else {
- env.pushfork(code.op, pc)
}
+ env.pushfork(pc)
case opforkalt:
if backtrack {
if err == nil {
@@ -126,18 +120,21 @@ loop:
}
pc, backtrack, err = code.v.(int), false, nil
goto loop
- } else {
- env.pushfork(code.op, pc)
}
+ env.pushfork(pc)
case opforklabel:
if backtrack {
- if e, ok := err.(*breakError); ok && code.v.(string) == e.n {
+ label := env.pop()
+ if e, ok := err.(*breakError); ok && e.v == label {
err = nil
}
break loop
- } else {
- env.pushfork(code.op, pc)
}
+ env.push(env.label)
+ env.pushfork(pc)
+ env.pop()
+ env.values[env.index(code.v.([2]int))] = env.label
+ env.label++
case opbacktrack:
break loop
case opjump:
@@ -148,6 +145,30 @@ loop:
pc = code.v.(int)
goto loop
}
+ case opindex, opindexarray:
+ if backtrack {
+ break loop
+ }
+ p, v := code.v, env.pop()
+ if code.op == opindexarray && v != nil {
+ if _, ok := v.([]any); !ok {
+ err = &expectedArrayError{v}
+ break loop
+ }
+ }
+ w := funcIndex2(nil, v, p)
+ if e, ok := w.(error); ok {
+ err = e
+ break loop
+ }
+ env.push(w)
+ if !env.paths.empty() && env.expdepth == 0 {
+ if !env.pathIntact(v) {
+ err = &invalidPathError{v}
+ break loop
+ }
+ env.paths.push(pathValue{path: p, value: w})
+ }
case opcall:
if backtrack {
break loop
@@ -156,31 +177,51 @@ loop:
case int:
pc, callpc, index = v, pc, env.scopes.index
goto loop
- case [3]interface{}:
+ case [3]any:
argcnt := v[1].(int)
x, args := env.pop(), env.args[:argcnt]
for i := 0; i < argcnt; i++ {
args[i] = env.pop()
}
- w := v[0].(func(interface{}, []interface{}) interface{})(x, args)
+ w := v[0].(func(any, []any) any)(x, args)
if e, ok := w.(error); ok {
err = e
break loop
}
env.push(w)
- if !env.paths.empty() {
- var ps []interface{}
- ps, err = env.pathEntries(v[2].(string), x, args)
- if err != nil {
- break loop
- }
- for _, p := range ps {
- env.paths.push([2]interface{}{p, w})
+ if !env.paths.empty() && env.expdepth == 0 {
+ switch v[2].(string) {
+ case "_index":
+ if x = args[0]; !env.pathIntact(x) {
+ err = &invalidPathError{x}
+ break loop
+ }
+ env.paths.push(pathValue{path: args[1], value: w})
+ case "_slice":
+ if x = args[0]; !env.pathIntact(x) {
+ err = &invalidPathError{x}
+ break loop
+ }
+ env.paths.push(pathValue{
+ path: map[string]any{"start": args[2], "end": args[1]},
+ value: w,
+ })
+ case "getpath":
+ if !env.pathIntact(x) {
+ err = &invalidPathError{x}
+ break loop
+ }
+ for _, p := range args[0].([]any) {
+ env.paths.push(pathValue{path: p, value: w})
+ }
}
}
default:
panic(v)
}
+ case opcallrec:
+ pc, callpc, index = code.v.(int), -1, env.scopes.index
+ goto loop
case oppushpc:
env.push([2]int{code.v.(int), env.scopes.index})
case opcallpc:
@@ -188,18 +229,27 @@ loop:
pc, callpc, index = xs[0], pc, xs[1]
goto loop
case opscope:
- xs := code.v.([2]int)
- var i, l int
+ xs := code.v.([3]int)
+ var saveindex, outerindex int
if index == env.scopes.index {
- i = index
+ if callpc >= 0 {
+ saveindex = index
+ } else {
+ callpc, saveindex = env.popscope()
+ }
} else {
- env.scopes.save(&i, &l)
+ saveindex, _ = env.scopes.save()
env.scopes.index = index
}
- env.scopes.push(scope{xs[0], env.offset, callpc, i})
+ if outerindex = index; outerindex >= 0 {
+ if s := env.scopes.data[outerindex].value; s.id == xs[0] {
+ outerindex = s.outerindex
+ }
+ }
+ env.scopes.push(scope{xs[0], env.offset, callpc, saveindex, outerindex})
env.offset += xs[1]
if env.offset > len(env.values) {
- vs := make([]interface{}, env.offset*2)
+ vs := make([]any, env.offset*2)
copy(vs, env.values)
env.values = vs
}
@@ -207,63 +257,74 @@ loop:
if backtrack {
break loop
}
- s := env.scopes.pop().(scope)
- pc, env.scopes.index = s.pc, s.saveindex
+ pc, env.scopes.index = env.popscope()
if env.scopes.empty() {
return env.pop(), true
}
- case opeach:
+ case opiter:
if err != nil {
break loop
}
backtrack = false
- var xs [][2]interface{}
+ var xs []pathValue
switch v := env.pop().(type) {
- case [][2]interface{}:
+ case []pathValue:
xs = v
- case []interface{}:
- if !env.paths.empty() && (env.expdepth == 0 && !reflect.DeepEqual(v, env.paths.top().([2]interface{})[1])) {
+ case []any:
+ if !env.paths.empty() && env.expdepth == 0 && !env.pathIntact(v) {
err = &invalidPathIterError{v}
break loop
}
if len(v) == 0 {
break loop
}
- xs = make([][2]interface{}, len(v))
+ xs = make([]pathValue, len(v))
for i, v := range v {
- xs[i] = [2]interface{}{i, v}
+ xs[i] = pathValue{path: i, value: v}
}
- case map[string]interface{}:
- if !env.paths.empty() && (env.expdepth == 0 && !reflect.DeepEqual(v, env.paths.top().([2]interface{})[1])) {
+ case map[string]any:
+ if !env.paths.empty() && env.expdepth == 0 && !env.pathIntact(v) {
err = &invalidPathIterError{v}
break loop
}
if len(v) == 0 {
break loop
}
- xs = make([][2]interface{}, len(v))
+ xs = make([]pathValue, len(v))
var i int
for k, v := range v {
- xs[i] = [2]interface{}{k, v}
+ xs[i] = pathValue{path: k, value: v}
i++
}
sort.Slice(xs, func(i, j int) bool {
- return xs[i][0].(string) < xs[j][0].(string)
+ return xs[i].path.(string) < xs[j].path.(string)
})
+ case Iter:
+ if w, ok := v.Next(); ok {
+ env.push(v)
+ env.pushfork(pc)
+ env.pop()
+ if e, ok := w.(error); ok {
+ err = e
+ break loop
+ }
+ env.push(w)
+ continue
+ }
+ break loop
default:
err = &iteratorError{v}
+ env.push(emptyIter{})
break loop
}
if len(xs) > 1 {
env.push(xs[1:])
- env.pushfork(code.op, pc)
+ env.pushfork(pc)
env.pop()
}
- env.push(xs[0][1])
- if !env.paths.empty() {
- if env.expdepth == 0 {
- env.paths.push(xs[0])
- }
+ env.push(xs[0].value)
+ if !env.paths.empty() && env.expdepth == 0 {
+ env.paths.push(xs[0])
}
case opexpbegin:
env.expdepth++
@@ -271,35 +332,25 @@ loop:
env.expdepth--
case oppathbegin:
env.paths.push(env.expdepth)
- env.paths.push([2]interface{}{nil, env.stack.top()})
+ env.paths.push(pathValue{value: env.stack.top()})
env.expdepth = 0
case oppathend:
if backtrack {
break loop
}
- if env.expdepth > 0 {
- panic(fmt.Sprintf("unexpected expdepth: %d", env.expdepth))
- }
env.pop()
- x := env.pop()
- if reflect.DeepEqual(x, env.paths.top().([2]interface{})[1]) {
- env.push(env.poppaths())
- env.expdepth = env.paths.pop().(int)
- } else {
- err = &invalidPathError{x}
+ if v := env.pop(); !env.pathIntact(v) {
+ err = &invalidPathError{v}
break loop
}
- case opdebug:
- if !backtrack {
- return [2]interface{}{code.v, env.stack.top()}, true
- }
- backtrack = false
+ env.push(env.poppaths())
+ env.expdepth = env.paths.pop().(int)
default:
panic(code.op)
}
}
if len(env.forks) > 0 {
- pc, backtrack = env.popfork().pc, true
+ pc, backtrack = env.popfork(), true
goto loop
}
if err != nil {
@@ -308,82 +359,84 @@ loop:
return nil, false
}
-func (env *env) push(v interface{}) {
+func (env *env) push(v any) {
env.stack.push(v)
}
-func (env *env) pop() interface{} {
+func (env *env) pop() any {
return env.stack.pop()
}
-func (env *env) pushfork(op opcode, pc int) {
- f := &fork{op: op, pc: pc, expdepth: env.expdepth}
- env.stack.save(&f.stackindex, &f.stacklimit)
- env.scopes.save(&f.scopeindex, &f.scopelimit)
- env.paths.save(&f.pathindex, &f.pathlimit)
+func (env *env) popscope() (int, int) {
+ free := env.scopes.index > env.scopes.limit
+ s := env.scopes.pop()
+ if free {
+ env.offset = s.offset
+ }
+ return s.pc, s.saveindex
+}
+
+func (env *env) pushfork(pc int) {
+ f := fork{pc: pc, expdepth: env.expdepth}
+ f.stackindex, f.stacklimit = env.stack.save()
+ f.scopeindex, f.scopelimit = env.scopes.save()
+ f.pathindex, f.pathlimit = env.paths.save()
env.forks = append(env.forks, f)
env.debugForks(pc, ">>>")
}
-func (env *env) popfork() *fork {
+func (env *env) popfork() int {
f := env.forks[len(env.forks)-1]
env.debugForks(f.pc, "<<<")
env.forks, env.expdepth = env.forks[:len(env.forks)-1], f.expdepth
env.stack.restore(f.stackindex, f.stacklimit)
env.scopes.restore(f.scopeindex, f.scopelimit)
env.paths.restore(f.pathindex, f.pathlimit)
- return f
+ return f.pc
}
-func (env *env) scopeOffset(id int) int {
- return env.scopes.lookup(func(v interface{}) bool {
- return v.(scope).id == id
- }).(scope).offset
+func (env *env) index(v [2]int) int {
+ for id, i := v[0], env.scopes.index; i >= 0; {
+ s := env.scopes.data[i].value
+ if s.id == id {
+ return s.offset + v[1]
+ }
+ i = s.outerindex
+ }
+ panic("env.index")
}
-func (env *env) index(v [2]int) int {
- return env.scopeOffset(v[0]) + v[1]
+type pathValue struct {
+ path, value any
}
-func (env *env) pathEntries(name string, x interface{}, args []interface{}) ([]interface{}, error) {
- switch name {
- case "_index":
- if env.expdepth > 0 {
- return nil, nil
- } else if !reflect.DeepEqual(args[0], env.paths.top().([2]interface{})[1]) {
- return nil, &invalidPathError{x}
- }
- return []interface{}{args[1]}, nil
- case "_slice":
- if env.expdepth > 0 {
- return nil, nil
- } else if !reflect.DeepEqual(args[0], env.paths.top().([2]interface{})[1]) {
- return nil, &invalidPathError{x}
+func (env *env) pathIntact(v any) bool {
+ w := env.paths.top().(pathValue).value
+ switch v := v.(type) {
+ case []any, map[string]any:
+ switch w.(type) {
+ case []any, map[string]any:
+ v, w := reflect.ValueOf(v), reflect.ValueOf(w)
+ return v.Pointer() == w.Pointer() && v.Len() == w.Len()
}
- return []interface{}{map[string]interface{}{"start": args[2], "end": args[1]}}, nil
- case "getpath":
- if env.expdepth > 0 {
- return nil, nil
- } else if !reflect.DeepEqual(x, env.paths.top().([2]interface{})[1]) {
- return nil, &invalidPathError{x}
+ case float64:
+ if w, ok := w.(float64); ok {
+ return v == w || math.IsNaN(v) && math.IsNaN(w)
}
- return args[0].([]interface{}), nil
- default:
- return nil, nil
}
+ return v == w
}
-func (env *env) poppaths() []interface{} {
- var xs []interface{}
+func (env *env) poppaths() []any {
+ xs := []any{}
for {
- p := env.paths.pop().([2]interface{})
- if p[0] == nil {
+ p := env.paths.pop().(pathValue)
+ if p.path == nil {
break
}
- xs = append(xs, p[0])
+ xs = append(xs, p.path)
}
- for i := 0; i < len(xs)/2; i++ { // reverse
- j := len(xs) - 1 - i
+ for i, j := 0, len(xs)-1; i < j; i, j = i+1, j-1 {
xs[i], xs[j] = xs[j], xs[i]
}
return xs
diff --git a/vendor/github.com/itchyny/gojq/func.go b/vendor/github.com/itchyny/gojq/func.go
index 9b8189ab3..e06b4ff72 100644
--- a/vendor/github.com/itchyny/gojq/func.go
+++ b/vendor/github.com/itchyny/gojq/func.go
@@ -3,15 +3,19 @@ package gojq
import (
"encoding/base64"
"encoding/json"
+ "errors"
"fmt"
+ "io"
"math"
"math/big"
"net/url"
+ "reflect"
"regexp"
"sort"
"strconv"
"strings"
"time"
+ "unicode"
"unicode/utf8"
"github.com/itchyny/timefmt-go"
@@ -29,11 +33,12 @@ const (
type function struct {
argcount int
- callback func(interface{}, []interface{}) interface{}
+ iter bool
+ callback func(any, []any) any
}
func (fn function) accept(cnt int) bool {
- return fn.argcount&(1< 0
+ return fn.argcount&(1<= 0 {
return v
@@ -269,252 +289,495 @@ func funcLength(v interface{}) interface{} {
case float64:
return math.Abs(v)
case *big.Int:
+ if v.Sign() >= 0 {
+ return v
+ }
return new(big.Int).Abs(v)
- case nil:
- return 0
default:
- return &funcTypeError{"length", v}
+ return &func0TypeError{"abs", v}
}
}
-func funcUtf8ByteLength(v interface{}) interface{} {
+func funcLength(v any) any {
switch v := v.(type) {
+ case nil:
+ return 0
+ case int:
+ if v >= 0 {
+ return v
+ }
+ return -v
+ case float64:
+ return math.Abs(v)
+ case *big.Int:
+ if v.Sign() >= 0 {
+ return v
+ }
+ return new(big.Int).Abs(v)
case string:
+ return len([]rune(v))
+ case []any:
+ return len(v)
+ case map[string]any:
return len(v)
default:
- return &funcTypeError{"utf8bytelength", v}
+ return &func0TypeError{"length", v}
}
}
-func funcKeys(v interface{}) interface{} {
+func funcUtf8ByteLength(v any) any {
+ s, ok := v.(string)
+ if !ok {
+ return &func0TypeError{"utf8bytelength", v}
+ }
+ return len(s)
+}
+
+func funcKeys(v any) any {
switch v := v.(type) {
- case []interface{}:
- w := make([]interface{}, len(v))
+ case []any:
+ w := make([]any, len(v))
for i := range v {
w[i] = i
}
return w
- case map[string]interface{}:
- w := make([]string, len(v))
- var i int
- for k := range v {
+ case map[string]any:
+ w := make([]any, len(v))
+ for i, k := range keys(v) {
w[i] = k
- i++
}
- sort.Strings(w)
- u := make([]interface{}, len(v))
- for i, x := range w {
- u[i] = x
- }
- return u
+ return w
default:
- return &funcTypeError{"keys", v}
+ return &func0TypeError{"keys", v}
}
}
-func funcHas(v, x interface{}) interface{} {
+func keys(v map[string]any) []string {
+ w := make([]string, len(v))
+ var i int
+ for k := range v {
+ w[i] = k
+ i++
+ }
+ sort.Strings(w)
+ return w
+}
+
+func values(v any) ([]any, bool) {
switch v := v.(type) {
- case []interface{}:
+ case []any:
+ return v, true
+ case map[string]any:
+ vs := make([]any, len(v))
+ for i, k := range keys(v) {
+ vs[i] = v[k]
+ }
+ return vs, true
+ default:
+ return nil, false
+ }
+}
+
+func funcHas(v, x any) any {
+ switch v := v.(type) {
+ case []any:
if x, ok := toInt(x); ok {
return 0 <= x && x < len(v)
}
- return &hasKeyTypeError{v, x}
- case map[string]interface{}:
- switch x := x.(type) {
- case string:
+ case map[string]any:
+ if x, ok := x.(string); ok {
_, ok := v[x]
return ok
- default:
- return &hasKeyTypeError{v, x}
}
- default:
- return &hasKeyTypeError{v, x}
+ case nil:
+ return false
}
+ return &func1TypeError{"has", v, x}
}
-func funcAdd(v interface{}) interface{} {
- if vs, ok := v.(map[string]interface{}); ok {
- xs := make([]string, len(vs))
- var i int
- for k := range vs {
- xs[i] = k
- i++
+func funcToEntries(v any) any {
+ switch v := v.(type) {
+ case []any:
+ w := make([]any, len(v))
+ for i, x := range v {
+ w[i] = map[string]any{"key": i, "value": x}
+ }
+ return w
+ case map[string]any:
+ w := make([]any, len(v))
+ for i, k := range keys(v) {
+ w[i] = map[string]any{"key": k, "value": v[k]}
}
- sort.Strings(xs)
- us := make([]interface{}, len(vs))
- for i, x := range xs {
- us[i] = vs[x]
+ return w
+ default:
+ return &func0TypeError{"to_entries", v}
+ }
+}
+
+func funcFromEntries(v any) any {
+ vs, ok := v.([]any)
+ if !ok {
+ return &func0TypeError{"from_entries", v}
+ }
+ w := make(map[string]any, len(vs))
+ for _, v := range vs {
+ switch v := v.(type) {
+ case map[string]any:
+ var (
+ key string
+ value any
+ ok bool
+ )
+ for _, k := range [4]string{"key", "Key", "name", "Name"} {
+ if k := v[k]; k != nil && k != false {
+ if key, ok = k.(string); !ok {
+ return &func0WrapError{"from_entries", vs, &objectKeyNotStringError{k}}
+ }
+ break
+ }
+ }
+ if !ok {
+ return &func0WrapError{"from_entries", vs, &objectKeyNotStringError{nil}}
+ }
+ for _, k := range [2]string{"value", "Value"} {
+ if value, ok = v[k]; ok {
+ break
+ }
+ }
+ w[key] = value
+ default:
+ return &func0TypeError{"from_entries", v}
}
- v = us
}
- vs, ok := v.([]interface{})
+ return w
+}
+
+func funcAdd(v any) any {
+ vs, ok := values(v)
if !ok {
- return &funcTypeError{"add", v}
+ return &func0TypeError{"add", v}
}
v = nil
for _, x := range vs {
- switch y := x.(type) {
- case map[string]interface{}:
+ switch x := x.(type) {
+ case nil:
+ continue
+ case string:
switch w := v.(type) {
case nil:
- m := make(map[string]interface{}, len(y))
- for k, e := range y {
- m[k] = e
- }
- v = m
+ var sb strings.Builder
+ sb.WriteString(x)
+ v = &sb
continue
- case map[string]interface{}:
- for k, e := range y {
- w[k] = e
- }
+ case *strings.Builder:
+ w.WriteString(x)
continue
}
- case []interface{}:
+ case []any:
switch w := v.(type) {
case nil:
- s := make([]interface{}, len(y))
- copy(s, y)
+ s := make([]any, len(x))
+ copy(s, x)
v = s
continue
- case []interface{}:
- v = append(w, y...)
+ case []any:
+ v = append(w, x...)
+ continue
+ }
+ case map[string]any:
+ switch w := v.(type) {
+ case nil:
+ m := make(map[string]any, len(x))
+ for k, e := range x {
+ m[k] = e
+ }
+ v = m
+ continue
+ case map[string]any:
+ for k, e := range x {
+ w[k] = e
+ }
continue
}
}
+ if sb, ok := v.(*strings.Builder); ok {
+ v = sb.String()
+ }
v = funcOpAdd(nil, v, x)
if err, ok := v.(error); ok {
return err
}
}
+ if sb, ok := v.(*strings.Builder); ok {
+ v = sb.String()
+ }
return v
}
-func funcToNumber(v interface{}) interface{} {
+func funcToNumber(v any) any {
switch v := v.(type) {
case int, float64, *big.Int:
return v
case string:
if !newLexer(v).validNumber() {
- return fmt.Errorf("invalid number: %q", v)
+ return &func0WrapError{"tonumber", v, errors.New("invalid number")}
}
- return normalizeNumbers(json.Number(v))
+ return toNumber(v)
default:
- return &funcTypeError{"tonumber", v}
+ return &func0TypeError{"tonumber", v}
}
}
-func funcToString(v interface{}) interface{} {
+func toNumber(v string) any {
+ return normalizeNumber(json.Number(v))
+}
+
+func funcToString(v any) any {
if s, ok := v.(string); ok {
return s
}
return funcToJSON(v)
}
-func funcType(v interface{}) interface{} {
- return typeof(v)
+func funcType(v any) any {
+ return TypeOf(v)
}
-func funcReverse(v interface{}) interface{} {
- vs, ok := v.([]interface{})
+func funcReverse(v any) any {
+ vs, ok := v.([]any)
if !ok {
- return &expectedArrayError{v}
+ return &func0TypeError{"reverse", v}
}
- ws := make([]interface{}, len(vs))
+ ws := make([]any, len(vs))
for i, v := range vs {
ws[len(ws)-i-1] = v
}
return ws
}
-func funcContains(v, x interface{}) interface{} {
- switch v := v.(type) {
- case nil:
- if x == nil {
- return true
- }
- case bool:
- switch x := x.(type) {
- case bool:
- if v == x {
- return true
- }
- }
- }
+func funcContains(v, x any) any {
return binopTypeSwitch(v, x,
- func(l, r int) interface{} { return l == r },
- func(l, r float64) interface{} { return l == r },
- func(l, r *big.Int) interface{} { return l.Cmp(r) == 0 },
- func(l, r string) interface{} { return strings.Contains(l, r) },
- func(l, r []interface{}) interface{} {
- for _, x := range r {
- var found bool
- for _, y := range l {
- if funcContains(y, x) == true {
- found = true
- break
+ func(l, r int) any { return l == r },
+ func(l, r float64) any { return l == r },
+ func(l, r *big.Int) any { return l.Cmp(r) == 0 },
+ func(l, r string) any { return strings.Contains(l, r) },
+ func(l, r []any) any {
+ R:
+ for _, r := range r {
+ for _, l := range l {
+ if funcContains(l, r) == true {
+ continue R
}
}
- if !found {
- return false
- }
+ return false
}
return true
},
- func(l, r map[string]interface{}) interface{} {
- for k, rk := range r {
- lk, ok := l[k]
- if !ok {
- return false
- }
- c := funcContains(lk, rk)
- if _, ok := c.(error); ok {
- return false
- }
- if c == false {
+ func(l, r map[string]any) any {
+ if len(l) < len(r) {
+ return false
+ }
+ for k, r := range r {
+ if l, ok := l[k]; !ok || funcContains(l, r) != true {
return false
}
}
return true
},
- func(l, r interface{}) interface{} { return &funcContainsError{l, r} },
+ func(l, r any) any {
+ if l == r {
+ return true
+ }
+ return &func1TypeError{"contains", l, r}
+ },
)
}
-func funcExplode(v interface{}) interface{} {
+func funcIndices(v, x any) any {
+ return indexFunc("indices", v, x, indices)
+}
+
+func indices(vs, xs []any) any {
+ rs := []any{}
+ if len(xs) == 0 {
+ return rs
+ }
+ for i := 0; i <= len(vs)-len(xs); i++ {
+ if Compare(vs[i:i+len(xs)], xs) == 0 {
+ rs = append(rs, i)
+ }
+ }
+ return rs
+}
+
+func funcIndex(v, x any) any {
+ return indexFunc("index", v, x, func(vs, xs []any) any {
+ if len(xs) == 0 {
+ return nil
+ }
+ for i := 0; i <= len(vs)-len(xs); i++ {
+ if Compare(vs[i:i+len(xs)], xs) == 0 {
+ return i
+ }
+ }
+ return nil
+ })
+}
+
+func funcRindex(v, x any) any {
+ return indexFunc("rindex", v, x, func(vs, xs []any) any {
+ if len(xs) == 0 {
+ return nil
+ }
+ for i := len(vs) - len(xs); i >= 0; i-- {
+ if Compare(vs[i:i+len(xs)], xs) == 0 {
+ return i
+ }
+ }
+ return nil
+ })
+}
+
+func indexFunc(name string, v, x any, f func(_, _ []any) any) any {
switch v := v.(type) {
+ case nil:
+ return nil
+ case []any:
+ switch x := x.(type) {
+ case []any:
+ return f(v, x)
+ default:
+ return f(v, []any{x})
+ }
case string:
- return explode(v)
+ if x, ok := x.(string); ok {
+ return f(explode(v), explode(x))
+ }
+ return &func1TypeError{name, v, x}
default:
- return &funcTypeError{"explode", v}
+ return &func1TypeError{name, v, x}
+ }
+}
+
+func funcStartsWith(v, x any) any {
+ s, ok := v.(string)
+ if !ok {
+ return &func1TypeError{"startswith", v, x}
+ }
+ t, ok := x.(string)
+ if !ok {
+ return &func1TypeError{"startswith", v, x}
+ }
+ return strings.HasPrefix(s, t)
+}
+
+func funcEndsWith(v, x any) any {
+ s, ok := v.(string)
+ if !ok {
+ return &func1TypeError{"endswith", v, x}
+ }
+ t, ok := x.(string)
+ if !ok {
+ return &func1TypeError{"endswith", v, x}
}
+ return strings.HasSuffix(s, t)
}
-func explode(s string) []interface{} {
- rs := []int32(s)
- xs := make([]interface{}, len(rs))
- for i, r := range rs {
+func funcLtrimstr(v, x any) any {
+ s, ok := v.(string)
+ if !ok {
+ return &func1TypeError{"ltrimstr", v, x}
+ }
+ t, ok := x.(string)
+ if !ok {
+ return &func1TypeError{"ltrimstr", v, x}
+ }
+ return strings.TrimPrefix(s, t)
+}
+
+func funcRtrimstr(v, x any) any {
+ s, ok := v.(string)
+ if !ok {
+ return &func1TypeError{"rtrimstr", v, x}
+ }
+ t, ok := x.(string)
+ if !ok {
+ return &func1TypeError{"rtrimstr", v, x}
+ }
+ return strings.TrimSuffix(s, t)
+}
+
+func funcLtrim(v any) any {
+ s, ok := v.(string)
+ if !ok {
+ return &func0TypeError{"ltrim", v}
+ }
+ return strings.TrimLeftFunc(s, unicode.IsSpace)
+}
+
+func funcRtrim(v any) any {
+ s, ok := v.(string)
+ if !ok {
+ return &func0TypeError{"rtrim", v}
+ }
+ return strings.TrimRightFunc(s, unicode.IsSpace)
+}
+
+func funcTrim(v any) any {
+ s, ok := v.(string)
+ if !ok {
+ return &func0TypeError{"trim", v}
+ }
+ return strings.TrimSpace(s)
+}
+
+func funcExplode(v any) any {
+ s, ok := v.(string)
+ if !ok {
+ return &func0TypeError{"explode", v}
+ }
+ return explode(s)
+}
+
+func explode(s string) []any {
+ xs := make([]any, len([]rune(s)))
+ var i int
+ for _, r := range s {
xs[i] = int(r)
+ i++
}
return xs
}
-func funcImplode(v interface{}) interface{} {
- switch v := v.(type) {
- case []interface{}:
- return implode(v)
- default:
- return &funcTypeError{"implode", v}
+func funcImplode(v any) any {
+ vs, ok := v.([]any)
+ if !ok {
+ return &func0TypeError{"implode", v}
}
+ var sb strings.Builder
+ sb.Grow(len(vs))
+ for _, v := range vs {
+ if r, ok := toInt(v); ok {
+ if 0 <= r && r <= utf8.MaxRune {
+ sb.WriteRune(rune(r))
+ } else {
+ sb.WriteRune(utf8.RuneError)
+ }
+ } else {
+ return &func0TypeError{"implode", vs}
+ }
+ }
+ return sb.String()
}
-func funcSplit(v interface{}, args []interface{}) interface{} {
+func funcSplit(v any, args []any) any {
s, ok := v.(string)
if !ok {
- return &funcTypeError{"split", v}
+ return &func0TypeError{"split", v}
}
x, ok := args[0].(string)
if !ok {
- return &funcTypeError{"split", x}
+ return &func0TypeError{"split", x}
}
var ss []string
if len(args) == 1 {
@@ -524,7 +787,7 @@ func funcSplit(v interface{}, args []interface{}) interface{} {
if args[1] != nil {
v, ok := args[1].(string)
if !ok {
- return &funcTypeError{"split", args[1]}
+ return &func0TypeError{"split", args[1]}
}
flags = v
}
@@ -534,132 +797,71 @@ func funcSplit(v interface{}, args []interface{}) interface{} {
}
ss = r.Split(s, -1)
}
- xs := make([]interface{}, len(ss))
+ xs := make([]any, len(ss))
for i, s := range ss {
xs[i] = s
}
return xs
}
-func implode(v []interface{}) interface{} {
- var sb strings.Builder
- sb.Grow(len(v))
- for _, r := range v {
- if r, ok := toInt(r); ok && 0 <= r && r <= utf8.MaxRune {
- sb.WriteRune(rune(r))
- } else {
- return &funcTypeError{"implode", v}
- }
+func funcASCIIDowncase(v any) any {
+ s, ok := v.(string)
+ if !ok {
+ return &func0TypeError{"ascii_downcase", v}
}
- return sb.String()
-}
-
-func funcToJSON(v interface{}) interface{} {
- return jsonMarshal(v)
-}
-
-func funcFromJSON(v interface{}) interface{} {
- switch v := v.(type) {
- case string:
- var w interface{}
- err := json.Unmarshal([]byte(v), &w)
- if err != nil {
- return err
+ return strings.Map(func(r rune) rune {
+ if 'A' <= r && r <= 'Z' {
+ return r + ('a' - 'A')
}
- return w
- default:
- return &funcTypeError{"fromjson", v}
- }
+ return r
+ }, s)
}
-func funcFormat(v, x interface{}) interface{} {
- switch x := x.(type) {
- case string:
- fmt := "@" + x
- f := formatToFunc(fmt)
- if f == nil {
- return &formatNotFoundError{fmt}
- }
- return internalFuncs[f.Name].callback(v, nil)
- default:
- return &funcTypeError{"format", x}
+func funcASCIIUpcase(v any) any {
+ s, ok := v.(string)
+ if !ok {
+ return &func0TypeError{"ascii_upcase", v}
}
+ return strings.Map(func(r rune) rune {
+ if 'a' <= r && r <= 'z' {
+ return r - ('a' - 'A')
+ }
+ return r
+ }, s)
}
-func funcToCSV(v interface{}) interface{} {
- return funcToCSVTSV("csv", v, ",", func(s string) string {
- return `"` + strings.ReplaceAll(s, `"`, `""`) + `"`
- })
-}
-
-var tsvEscaper = strings.NewReplacer(
- "\t", `\t`,
- "\r", `\r`,
- "\n", `\n`,
- "\\", `\\`,
-)
-
-func funcToTSV(v interface{}) interface{} {
- return funcToCSVTSV("tsv", v, "\t", func(s string) string {
- return tsvEscaper.Replace(s)
- })
+func funcToJSON(v any) any {
+ return jsonMarshal(v)
}
-func funcToSh(v interface{}) interface{} {
- var xs []interface{}
- if w, ok := v.([]interface{}); ok {
- xs = w
- } else {
- xs = []interface{}{v}
+func funcFromJSON(v any) any {
+ s, ok := v.(string)
+ if !ok {
+ return &func0TypeError{"fromjson", v}
}
- var s strings.Builder
- for i, x := range xs {
- if i > 0 {
- s.WriteByte(' ')
- }
- switch x := x.(type) {
- case map[string]interface{}, []interface{}:
- return &formatShError{x}
- case string:
- s.WriteByte('\'')
- s.WriteString(strings.ReplaceAll(x, "'", `'\''`))
- s.WriteByte('\'')
- default:
- s.WriteString(jsonMarshal(x))
- }
+ var w any
+ dec := json.NewDecoder(strings.NewReader(s))
+ dec.UseNumber()
+ if err := dec.Decode(&w); err != nil {
+ return &func0WrapError{"fromjson", v, err}
}
- return s.String()
-}
-
-func funcToCSVTSV(typ string, v interface{}, sep string, escape func(string) string) interface{} {
- switch xs := v.(type) {
- case []interface{}:
- ys := make([]string, len(xs))
- for i, x := range xs {
- y, err := toCSVTSV(typ, x, escape)
- if err != nil {
- return err
- }
- ys[i] = y
- }
- return strings.Join(ys, sep)
- default:
- return &expectedArrayError{v}
+ if _, err := dec.Token(); err != io.EOF {
+ return &func0TypeError{"fromjson", v}
}
+ return normalizeNumbers(w)
}
-func toCSVTSV(typ string, v interface{}, escape func(string) string) (string, error) {
- switch v := v.(type) {
- case map[string]interface{}, []interface{}:
- return "", &formatCsvTsvRowError{typ, v}
- case string:
- return escape(v), nil
- default:
- if s := jsonMarshal(v); s != "null" {
- return s, nil
- }
- return "", nil
+func funcFormat(v, x any) any {
+ s, ok := x.(string)
+ if !ok {
+ return &func0TypeError{"format", x}
}
+ format := "@" + s
+ f := formatToFunc(format)
+ if f == nil {
+ return &formatNotFoundError{format}
+ }
+ return internalFuncs[f.Name].callback(v, nil)
}
var htmlEscaper = strings.NewReplacer(
@@ -670,7 +872,7 @@ var htmlEscaper = strings.NewReplacer(
`"`, """,
)
-func funcToHTML(v interface{}) interface{} {
+func funcToHTML(v any) any {
switch x := funcToString(v).(type) {
case string:
return htmlEscaper.Replace(x)
@@ -679,7 +881,7 @@ func funcToHTML(v interface{}) interface{} {
}
}
-func funcToURI(v interface{}) interface{} {
+func funcToURI(v any) any {
switch x := funcToString(v).(type) {
case string:
return url.QueryEscape(x)
@@ -688,7 +890,78 @@ func funcToURI(v interface{}) interface{} {
}
}
-func funcToBase64(v interface{}) interface{} {
+func funcToURId(v any) any {
+ switch x := funcToString(v).(type) {
+ case string:
+ x, err := url.QueryUnescape(x)
+ if err != nil {
+ return &func0WrapError{"@urid", v, err}
+ }
+ return x
+ default:
+ return x
+ }
+}
+
+var csvEscaper = strings.NewReplacer(
+ `"`, `""`,
+ "\x00", `\0`,
+)
+
+func funcToCSV(v any) any {
+ return formatJoin("csv", v, ",", func(s string) string {
+ return `"` + csvEscaper.Replace(s) + `"`
+ })
+}
+
+var tsvEscaper = strings.NewReplacer(
+ "\t", `\t`,
+ "\r", `\r`,
+ "\n", `\n`,
+ "\\", `\\`,
+ "\x00", `\0`,
+)
+
+func funcToTSV(v any) any {
+ return formatJoin("tsv", v, "\t", tsvEscaper.Replace)
+}
+
+var shEscaper = strings.NewReplacer(
+ "'", `'\''`,
+ "\x00", `\0`,
+)
+
+func funcToSh(v any) any {
+ if _, ok := v.([]any); !ok {
+ v = []any{v}
+ }
+ return formatJoin("sh", v, " ", func(s string) string {
+ return "'" + shEscaper.Replace(s) + "'"
+ })
+}
+
+func formatJoin(typ string, v any, sep string, escape func(string) string) any {
+ vs, ok := v.([]any)
+ if !ok {
+ return &func0TypeError{"@" + typ, v}
+ }
+ ss := make([]string, len(vs))
+ for i, v := range vs {
+ switch v := v.(type) {
+ case []any, map[string]any:
+ return &formatRowError{typ, v}
+ case string:
+ ss[i] = escape(v)
+ default:
+ if s := jsonMarshal(v); s != "null" || typ == "sh" {
+ ss[i] = s
+ }
+ }
+ }
+ return strings.Join(ss, sep)
+}
+
+func funcToBase64(v any) any {
switch x := funcToString(v).(type) {
case string:
return base64.StdEncoding.EncodeToString([]byte(x))
@@ -697,12 +970,15 @@ func funcToBase64(v interface{}) interface{} {
}
}
-func funcToBase64d(v interface{}) interface{} {
+func funcToBase64d(v any) any {
switch x := funcToString(v).(type) {
case string:
- y, err := base64.StdEncoding.DecodeString(x)
+ if i := strings.IndexRune(x, base64.StdPadding); i >= 0 {
+ x = x[:i]
+ }
+ y, err := base64.RawStdEncoding.DecodeString(x)
if err != nil {
- return err
+ return &func0WrapError{"@base64d", v, err}
}
return string(y)
default:
@@ -710,65 +986,41 @@ func funcToBase64d(v interface{}) interface{} {
}
}
-func funcIndex(_, v, x interface{}) interface{} {
+func funcIndex2(_, v, x any) any {
switch x := x.(type) {
case string:
switch v := v.(type) {
case nil:
return nil
- case map[string]interface{}:
+ case map[string]any:
return v[x]
default:
return &expectedObjectError{v}
}
case int, float64, *big.Int:
- idx, _ := toInt(x)
+ i, _ := toInt(x)
switch v := v.(type) {
case nil:
return nil
- case []interface{}:
- return funcIndexSlice(nil, nil, &idx, v)
+ case []any:
+ return index(v, i)
case string:
- switch v := funcIndexSlice(nil, nil, &idx, explode(v)).(type) {
- case []interface{}:
- return implode(v)
- case int:
- return implode([]interface{}{v})
- case nil:
- return ""
- default:
- panic(v)
- }
+ return indexString(v, i)
default:
return &expectedArrayError{v}
}
- case []interface{}:
+ case []any:
switch v := v.(type) {
case nil:
return nil
- case []interface{}:
- var xs []interface{}
- if len(x) == 0 {
- return xs
- }
- for i := 0; i < len(v) && i < len(v)-len(x)+1; i++ {
- var neq bool
- for j, y := range x {
- if neq = compare(v[i+j], y) != 0; neq {
- break
- }
- }
- if !neq {
- xs = append(xs, i)
- }
- }
- return xs
+ case []any:
+ return indices(v, x)
default:
return &expectedArrayError{v}
}
- case map[string]interface{}:
+ case map[string]any:
if v == nil {
- return v
+ return nil
}
start, ok := x["start"]
if !ok {
@@ -780,217 +1032,361 @@ func funcIndex(_, v, x interface{}) interface{} {
}
return funcSlice(nil, v, end, start)
default:
- return &objectKeyNotStringError{x}
+ switch v.(type) {
+ case []any:
+ return &arrayIndexNotNumberError{x}
+ case string:
+ return &stringIndexNotNumberError{x}
+ default:
+ return &objectKeyNotStringError{x}
+ }
}
}
-func funcSlice(_, v, end, start interface{}) (r interface{}) {
- if w, ok := v.(string); ok {
- v = explode(w)
- defer func() {
- switch s := r.(type) {
- case []interface{}:
- r = implode(s)
- case int:
- r = implode([]interface{}{s})
- case nil:
- r = ""
- case error:
- default:
- panic(r)
+func index(vs []any, i int) any {
+ i = clampIndex(i, -1, len(vs))
+ if 0 <= i && i < len(vs) {
+ return vs[i]
+ }
+ return nil
+}
+
+func indexString(s string, i int) any {
+ l := len([]rune(s))
+ i = clampIndex(i, -1, l)
+ if 0 <= i && i < l {
+ for _, r := range s {
+ if i--; i < 0 {
+ return string(r)
}
- }()
+ }
}
+ return nil
+}
+
+func funcSlice(_, v, e, s any) (r any) {
switch v := v.(type) {
case nil:
return nil
- case []interface{}:
- if start != nil {
- if start, ok := toInt(start); ok {
- if end != nil {
- if end, ok := toInt(end); ok {
- return funcIndexSlice(&start, &end, nil, v)
- }
- return &arrayIndexNotNumberError{end}
- }
- return funcIndexSlice(&start, nil, nil, v)
+ case []any:
+ return slice(v, e, s)
+ case string:
+ return sliceString(v, e, s)
+ default:
+ return &expectedArrayError{v}
+ }
+}
+
+func slice(vs []any, e, s any) any {
+ var start, end int
+ if s != nil {
+ if i, ok := toInt(s); ok {
+ start = clampIndex(i, 0, len(vs))
+ } else {
+ return &arrayIndexNotNumberError{s}
+ }
+ }
+ if e != nil {
+ if i, ok := toInt(e); ok {
+ end = clampIndex(i, start, len(vs))
+ } else {
+ return &arrayIndexNotNumberError{e}
+ }
+ } else {
+ end = len(vs)
+ }
+ return vs[start:end]
+}
+
+func sliceString(v string, e, s any) any {
+ var start, end int
+ l := len([]rune(v))
+ if s != nil {
+ if i, ok := toInt(s); ok {
+ start = clampIndex(i, 0, l)
+ } else {
+ return &stringIndexNotNumberError{s}
+ }
+ }
+ if e != nil {
+ if i, ok := toInt(e); ok {
+ end = clampIndex(i, start, l)
+ } else {
+ return &stringIndexNotNumberError{e}
+ }
+ } else {
+ end = l
+ }
+ if start < l {
+ for i := range v {
+ if start--; start < 0 {
+ start = i
+ break
}
- return &arrayIndexNotNumberError{start}
}
- if end != nil {
- if end, ok := toInt(end); ok {
- return funcIndexSlice(nil, &end, nil, v)
+ } else {
+ start = len(v)
+ }
+ if end < l {
+ for i := range v {
+ if end--; end < 0 {
+ end = i
+ break
}
- return &arrayIndexNotNumberError{end}
}
- return v
- default:
- return &expectedArrayError{v}
+ } else {
+ end = len(v)
}
+ return v[start:end]
}
-func funcIndexSlice(start, end, index *int, a []interface{}) interface{} {
- aa := a
- if index != nil {
- i := toIndex(aa, *index)
- if i < 0 {
- return nil
+func clampIndex(i, min, max int) int {
+ if i < 0 {
+ i += max
+ }
+ if i < min {
+ return min
+ } else if i < max {
+ return i
+ } else {
+ return max
+ }
+}
+
+func funcFlatten(v any, args []any) any {
+ vs, ok := values(v)
+ if !ok {
+ return &func0TypeError{"flatten", v}
+ }
+ var depth float64
+ if len(args) == 0 {
+ depth = -1
+ } else {
+ depth, ok = toFloat(args[0])
+ if !ok {
+ return &func0TypeError{"flatten", args[0]}
+ }
+ if depth < 0 {
+ return &flattenDepthError{depth}
}
- return a[i]
}
- if end != nil {
- i := toIndex(aa, *end)
- if i == -1 {
- i = len(a)
- } else if i == -2 {
- i = 0
+ return flatten([]any{}, vs, depth)
+}
+
+func flatten(xs, vs []any, depth float64) []any {
+ for _, v := range vs {
+ if vs, ok := v.([]any); ok && depth != 0 {
+ xs = flatten(xs, vs, depth-1)
+ } else {
+ xs = append(xs, v)
}
- a = a[:i]
}
- if start != nil {
- i := toIndex(aa, *start)
- if i == -1 || len(a) < i {
- i = len(a)
- } else if i == -2 {
- i = 0
+ return xs
+}
+
+type rangeIter struct {
+ value, end, step any
+}
+
+func (iter *rangeIter) Next() (any, bool) {
+ if Compare(iter.step, 0)*Compare(iter.value, iter.end) >= 0 {
+ return nil, false
+ }
+ v := iter.value
+ iter.value = funcOpAdd(nil, v, iter.step)
+ return v, true
+}
+
+func funcRange(_ any, xs []any) any {
+ for _, x := range xs {
+ switch x.(type) {
+ case int, float64, *big.Int:
+ default:
+ return &func0TypeError{"range", x}
}
- a = a[i:]
}
- return a
+ return &rangeIter{xs[0], xs[1], xs[2]}
}
-func toIndex(a []interface{}, i int) int {
- l := len(a)
- switch {
- case i < -l:
- return -2
- case i < 0:
- return l + i
- case i < l:
- return i
- default:
- return -1
+func funcMin(v any) any {
+ vs, ok := v.([]any)
+ if !ok {
+ return &func0TypeError{"min", v}
+ }
+ return minMaxBy(vs, vs, true)
+}
+
+func funcMinBy(v, x any) any {
+ vs, ok := v.([]any)
+ if !ok {
+ return &func1TypeError{"min_by", v, x}
}
+ xs, ok := x.([]any)
+ if !ok {
+ return &func1TypeError{"min_by", v, x}
+ }
+ if len(vs) != len(xs) {
+ return &func1WrapError{"min_by", v, x, &lengthMismatchError{}}
+ }
+ return minMaxBy(vs, xs, true)
}
-func funcBreak(v interface{}) interface{} {
- if v, ok := v.(string); ok {
- return &breakError{v}
+func funcMax(v any) any {
+ vs, ok := v.([]any)
+ if !ok {
+ return &func0TypeError{"max", v}
}
- return &funcTypeError{"_break", v}
+ return minMaxBy(vs, vs, false)
}
-func funcMinBy(v, x interface{}) interface{} {
- vs, ok := v.([]interface{})
+func funcMaxBy(v, x any) any {
+ vs, ok := v.([]any)
if !ok {
- return &expectedArrayError{v}
+ return &func1TypeError{"max_by", v, x}
}
- xs, ok := x.([]interface{})
+ xs, ok := x.([]any)
if !ok {
- return &expectedArrayError{x}
+ return &func1TypeError{"max_by", v, x}
}
if len(vs) != len(xs) {
- panic("length mismatch in min_by")
+ return &func1WrapError{"max_by", v, x, &lengthMismatchError{}}
}
- return funcMinMaxBy(vs, xs, true)
+ return minMaxBy(vs, xs, false)
}
-func funcMaxBy(v, x interface{}) interface{} {
- vs, ok := v.([]interface{})
+func minMaxBy(vs, xs []any, isMin bool) any {
+ if len(vs) == 0 {
+ return nil
+ }
+ i, j, x := 0, 0, xs[0]
+ for i++; i < len(xs); i++ {
+ if Compare(x, xs[i]) > 0 == isMin {
+ j, x = i, xs[i]
+ }
+ }
+ return vs[j]
+}
+
+type sortItem struct {
+ value, key any
+}
+
+func sortItems(name string, v, x any) ([]*sortItem, error) {
+ vs, ok := v.([]any)
if !ok {
- return &expectedArrayError{v}
+ if strings.HasSuffix(name, "_by") {
+ return nil, &func1TypeError{name, v, x}
+ }
+ return nil, &func0TypeError{name, v}
}
- xs, ok := x.([]interface{})
+ xs, ok := x.([]any)
if !ok {
- return &expectedArrayError{x}
+ return nil, &func1TypeError{name, v, x}
}
if len(vs) != len(xs) {
- panic("length mismatch in max_by")
+ return nil, &func1WrapError{name, v, x, &lengthMismatchError{}}
+ }
+ items := make([]*sortItem, len(vs))
+ for i, v := range vs {
+ items[i] = &sortItem{v, xs[i]}
}
- return funcMinMaxBy(vs, xs, false)
+ sort.SliceStable(items, func(i, j int) bool {
+ return Compare(items[i].key, items[j].key) < 0
+ })
+ return items, nil
}
-func funcMinMaxBy(vs, xs []interface{}, isMin bool) interface{} {
- if len(vs) == 0 {
- return nil
- }
- i, j, x := 0, 0, xs[0]
- for i++; i < len(xs); i++ {
- if (compare(x, xs[i]) > 0) == isMin {
- j, x = i, xs[i]
- }
- }
- return vs[j]
+func funcSort(v any) any {
+ return sortBy("sort", v, v)
}
-type sortItem struct {
- value, key interface{}
+func funcSortBy(v, x any) any {
+ return sortBy("sort_by", v, x)
}
-func funcSortBy(v, x interface{}) interface{} {
- items, err := sortItems(v, x)
+func sortBy(name string, v, x any) any {
+ items, err := sortItems(name, v, x)
if err != nil {
return err
}
- rs := make([]interface{}, len(items))
+ rs := make([]any, len(items))
for i, x := range items {
rs[i] = x.value
}
return rs
}
-func funcGroupBy(v, x interface{}) interface{} {
- items, err := sortItems(v, x)
+func funcGroupBy(v, x any) any {
+ items, err := sortItems("group_by", v, x)
if err != nil {
return err
}
- var rs []interface{}
- var last interface{}
+ rs := []any{}
+ var last any
for i, r := range items {
- if i == 0 || compare(last, r.key) != 0 {
- rs, last = append(rs, []interface{}{r.value}), r.key
+ if i == 0 || Compare(last, r.key) != 0 {
+ rs, last = append(rs, []any{r.value}), r.key
} else {
- rs[len(rs)-1] = append(rs[len(rs)-1].([]interface{}), r.value)
+ rs[len(rs)-1] = append(rs[len(rs)-1].([]any), r.value)
}
}
return rs
}
-func funcUniqueBy(v, x interface{}) interface{} {
- items, err := sortItems(v, x)
+func funcUnique(v any) any {
+ return uniqueBy("unique", v, v)
+}
+
+func funcUniqueBy(v, x any) any {
+ return uniqueBy("unique_by", v, x)
+}
+
+func uniqueBy(name string, v, x any) any {
+ items, err := sortItems(name, v, x)
if err != nil {
return err
}
- var rs []interface{}
- var last interface{}
+ rs := []any{}
+ var last any
for i, r := range items {
- if i == 0 || compare(last, r.key) != 0 {
+ if i == 0 || Compare(last, r.key) != 0 {
rs, last = append(rs, r.value), r.key
}
}
return rs
}
-func sortItems(v, x interface{}) ([]*sortItem, error) {
- vs, ok := v.([]interface{})
+func funcJoin(v, x any) any {
+ vs, ok := values(v)
if !ok {
- return nil, &expectedArrayError{v}
+ return &func1TypeError{"join", v, x}
}
- xs, ok := x.([]interface{})
- if !ok {
- return nil, &expectedArrayError{x}
+ if len(vs) == 0 {
+ return ""
}
- if len(vs) != len(xs) {
- panic("length mismatch")
+ sep, ok := x.(string)
+ if len(vs) > 1 && !ok {
+ return &func1TypeError{"join", v, x}
}
- items := make([]*sortItem, len(vs))
+ ss := make([]string, len(vs))
for i, v := range vs {
- items[i] = &sortItem{v, xs[i]}
+ switch v := v.(type) {
+ case nil:
+ case string:
+ ss[i] = v
+ case bool:
+ if v {
+ ss[i] = "true"
+ } else {
+ ss[i] = "false"
+ }
+ case int, float64, *big.Int:
+ ss[i] = jsonMarshal(v)
+ default:
+ return &joinTypeError{v}
+ }
}
- sort.SliceStable(items, func(i, j int) bool {
- return compare(items[i].key, items[j].key) < 0
- })
- return items, nil
+ return strings.Join(ss, sep)
}
func funcSignificand(v float64) float64 {
@@ -1004,27 +1400,27 @@ func funcExp10(v float64) float64 {
return math.Pow(10, v)
}
-func funcLgamma(v float64) float64 {
- v, _ = math.Lgamma(v)
- return v
-}
-
-func funcFrexp(v interface{}) interface{} {
+func funcFrexp(v any) any {
x, ok := toFloat(v)
if !ok {
- return &funcTypeError{"frexp", v}
+ return &func0TypeError{"frexp", v}
}
f, e := math.Frexp(x)
- return []interface{}{f, e}
+ return []any{f, e}
}
-func funcModf(v interface{}) interface{} {
+func funcModf(v any) any {
x, ok := toFloat(v)
if !ok {
- return &funcTypeError{"modf", v}
+ return &func0TypeError{"modf", v}
}
i, f := math.Modf(x)
- return []interface{}{f, i}
+ return []any{f, i}
+}
+
+func funcLgamma(v float64) float64 {
+ v, _ = math.Lgamma(v)
+ return v
}
func funcDrem(l, r float64) float64 {
@@ -1043,315 +1439,419 @@ func funcLdexp(l, r float64) float64 {
return math.Ldexp(l, int(r))
}
-func funcScalb(l, r float64) float64 {
- return l * math.Pow(2, r)
-}
-
-func funcScalbln(l, r float64) float64 {
- return l * math.Pow(2, r)
-}
-
func funcYn(l, r float64) float64 {
return math.Yn(int(l), r)
}
-func funcFma(x, y, z float64) float64 {
- return x*y + z
-}
-
-func funcInfinite(interface{}) interface{} {
+func funcInfinite(any) any {
return math.Inf(1)
}
-func funcIsfinite(v interface{}) interface{} {
+func funcIsfinite(v any) any {
x, ok := toFloat(v)
return ok && !math.IsInf(x, 0)
}
-func funcIsinfinite(v interface{}) interface{} {
+func funcIsinfinite(v any) any {
x, ok := toFloat(v)
return ok && math.IsInf(x, 0)
}
-func funcNan(interface{}) interface{} {
+func funcNan(any) any {
return math.NaN()
}
-func funcIsnan(v interface{}) interface{} {
+func funcIsnan(v any) any {
x, ok := toFloat(v)
if !ok {
if v == nil {
return false
}
- return &funcTypeError{"isnan", v}
+ return &func0TypeError{"isnan", v}
}
return math.IsNaN(x)
}
-func funcIsnormal(v interface{}) interface{} {
- x, ok := toFloat(v)
- return ok && !math.IsNaN(x) && !math.IsInf(x, 0) && x != 0.0
+func funcIsnormal(v any) any {
+ if v, ok := toFloat(v); ok {
+ e := math.Float64bits(v) & 0x7ff0000000000000 >> 52
+ return 0 < e && e < 0x7ff
+ }
+ return false
}
-func funcSetpath(v, p, w interface{}) interface{} {
- path, ok := p.([]interface{})
- if !ok {
- return &funcTypeError{"setpath", p}
+// An `allocator` creates new maps and slices, stores the allocated addresses.
+// This allocator is used to reduce allocations on assignment operator (`=`),
+// update-assignment operator (`|=`), and the `map_values`, `del`, `delpaths`
+// functions.
+type allocator map[uintptr]struct{}
+
+func funcAllocator(any, []any) any {
+ return allocator{}
+}
+
+func (a allocator) allocated(v any) bool {
+ _, ok := a[reflect.ValueOf(v).Pointer()]
+ return ok
+}
+
+func (a allocator) makeObject(l int) map[string]any {
+ v := make(map[string]any, l)
+ if a != nil {
+ a[reflect.ValueOf(v).Pointer()] = struct{}{}
}
- var err error
- if v, err = updatePaths(v, path, w, false); err != nil {
- if err, ok := err.(*funcTypeError); ok {
- err.name = "setpath"
- }
- return err
+ return v
+}
+
+func (a allocator) makeArray(l, c int) []any {
+ if c < l {
+ c = l
+ }
+ v := make([]any, l, c)
+ if a != nil {
+ a[reflect.ValueOf(v).Pointer()] = struct{}{}
}
return v
}
-func funcDelpaths(v, p interface{}) interface{} {
- paths, ok := p.([]interface{})
+func funcSetpath(v, p, n any) any {
+ // There is no need to use an allocator on a single update.
+ return setpath(v, p, n, nil)
+}
+
+// Used in compiler#compileAssign and compiler#compileModify.
+func funcSetpathWithAllocator(v any, args []any) any {
+ return setpath(v, args[0], args[1], args[2].(allocator))
+}
+
+func setpath(v, p, n any, a allocator) any {
+ path, ok := p.([]any)
+ if !ok {
+ return &func1TypeError{"setpath", v, p}
+ }
+ u, err := update(v, path, n, a)
+ if err != nil {
+ return &func2WrapError{"setpath", v, p, n, err}
+ }
+ return u
+}
+
+func funcDelpaths(v, p any) any {
+ return delpaths(v, p, allocator{})
+}
+
+// Used in compiler#compileAssign and compiler#compileModify.
+func funcDelpathsWithAllocator(v any, args []any) any {
+ return delpaths(v, args[0], args[1].(allocator))
+}
+
+func delpaths(v, p any, a allocator) any {
+ paths, ok := p.([]any)
if !ok {
- return &funcTypeError{"delpaths", p}
+ return &func1TypeError{"delpaths", v, p}
+ }
+ if len(paths) == 0 {
+ return v
}
// Fills the paths with an empty value and then delete them. We cannot delete
// in each loop because array indices should not change. For example,
// jq -n "[0, 1, 2, 3] | delpaths([[1], [2]])" #=> [0, 3].
var empty struct{}
var err error
- for _, p := range paths {
- path, ok := p.([]interface{})
+ u := v
+ for _, q := range paths {
+ path, ok := q.([]any)
if !ok {
- return &funcTypeError{"delpaths", p}
+ return &func1WrapError{"delpaths", v, p, &expectedArrayError{q}}
}
- if v, err = updatePaths(v, path, empty, true); err != nil {
- return err
+ u, err = update(u, path, empty, a)
+ if err != nil {
+ return &func1WrapError{"delpaths", v, p, err}
}
}
- return deleteEmpty(v)
+ return deleteEmpty(u)
}
-func updatePaths(v interface{}, path []interface{}, w interface{}, delpaths bool) (interface{}, error) {
+func update(v any, path []any, n any, a allocator) (any, error) {
if len(path) == 0 {
- return w, nil
+ return n, nil
}
- switch x := path[0].(type) {
+ switch p := path[0].(type) {
case string:
- if v == nil {
- if delpaths {
- return v, nil
- }
- v = make(map[string]interface{})
- }
- switch uu := v.(type) {
- case map[string]interface{}:
- if _, ok := uu[x]; !ok && delpaths {
- return v, nil
- }
- u, err := updatePaths(uu[x], path[1:], w, delpaths)
- if err != nil {
- return nil, err
- }
- vs := make(map[string]interface{}, len(uu))
- for k, v := range uu {
- vs[k] = v
- }
- vs[x] = u
- return vs, nil
+ switch v := v.(type) {
+ case nil:
+ return updateObject(nil, p, path[1:], n, a)
+ case map[string]any:
+ return updateObject(v, p, path[1:], n, a)
+ case struct{}:
+ return v, nil
default:
return nil, &expectedObjectError{v}
}
case int, float64, *big.Int:
- if v == nil {
- if delpaths {
- return v, nil
- }
- v = []interface{}{}
- }
- switch uu := v.(type) {
- case []interface{}:
- y, _ := toInt(x)
- l := len(uu)
- var copied bool
- if copied = y >= l; copied {
- if delpaths {
- return v, nil
- }
- if y > 0x3ffffff {
- return nil, &arrayIndexTooLargeError{y}
- }
- l = y + 1
- ys := make([]interface{}, l)
- copy(ys, uu)
- uu = ys
- } else if y < -l {
- if delpaths {
- return v, nil
- }
- return nil, &funcTypeError{v: y}
- } else if y < 0 {
- y += l
- }
- u, err := updatePaths(uu[y], path[1:], w, delpaths)
- if err != nil {
- return nil, err
- }
- if copied {
- uu[y] = u
- return uu, nil
- }
- vs := make([]interface{}, l)
- copy(vs, uu)
- vs[y] = u
- return vs, nil
+ i, _ := toInt(p)
+ switch v := v.(type) {
+ case nil:
+ return updateArrayIndex(nil, i, path[1:], n, a)
+ case []any:
+ return updateArrayIndex(v, i, path[1:], n, a)
+ case struct{}:
+ return v, nil
default:
return nil, &expectedArrayError{v}
}
- case map[string]interface{}:
- if len(x) == 0 {
- switch v.(type) {
- case []interface{}:
- return nil, &arrayIndexNotNumberError{x}
- default:
- return nil, &objectKeyNotStringError{x}
- }
- }
- if v == nil {
- v = []interface{}{}
- }
- switch uu := v.(type) {
- case []interface{}:
- var start, end int
- if x, ok := toInt(x["start"]); ok {
- x := toIndex(uu, x)
- if x > len(uu) || x == -1 {
- start = len(uu)
- } else if x == -2 {
- start = 0
- } else {
- start = x
- }
- }
- if x, ok := toInt(x["end"]); ok {
- x := toIndex(uu, x)
- if x == -1 {
- end = len(uu)
- } else if x < start {
- end = start
- } else {
- end = x
- }
- } else {
- end = len(uu)
- }
- if delpaths {
- if start >= end {
- return uu, nil
- }
- if len(path) > 1 {
- u, err := updatePaths(uu[start:end], path[1:], w, delpaths)
- if err != nil {
- return nil, err
- }
- switch us := u.(type) {
- case []interface{}:
- vs := make([]interface{}, len(uu))
- copy(vs, uu)
- copy(vs[start:end], us)
- return vs, nil
- default:
- return nil, &expectedArrayError{u}
- }
- }
- vs := make([]interface{}, len(uu))
- copy(vs, uu)
- for y := start; y < end; y++ {
- vs[y] = w
- }
- return vs, nil
- }
- if len(path) > 1 {
- u, err := updatePaths(uu[start:end], path[1:], w, delpaths)
- if err != nil {
- return nil, err
- }
- w = u
- }
- switch v := w.(type) {
- case []interface{}:
- vs := make([]interface{}, start+len(v)+len(uu)-end)
- copy(vs, uu[:start])
- copy(vs[start:], v)
- copy(vs[start+len(v):], uu[end:])
- return vs, nil
- default:
- return nil, &expectedArrayError{v}
- }
+ case map[string]any:
+ switch v := v.(type) {
+ case nil:
+ return updateArraySlice(nil, p, path[1:], n, a)
+ case []any:
+ return updateArraySlice(v, p, path[1:], n, a)
+ case struct{}:
+ return v, nil
default:
return nil, &expectedArrayError{v}
}
default:
switch v.(type) {
- case []interface{}:
- return nil, &arrayIndexNotNumberError{x}
+ case []any:
+ return nil, &arrayIndexNotNumberError{p}
default:
- return nil, &objectKeyNotStringError{x}
+ return nil, &objectKeyNotStringError{p}
+ }
+ }
+}
+
+func updateObject(v map[string]any, k string, path []any, n any, a allocator) (any, error) {
+ x, ok := v[k]
+ if !ok && n == struct{}{} {
+ return v, nil
+ }
+ u, err := update(x, path, n, a)
+ if err != nil {
+ return nil, err
+ }
+ if a.allocated(v) {
+ v[k] = u
+ return v, nil
+ }
+ w := a.makeObject(len(v) + 1)
+ for k, v := range v {
+ w[k] = v
+ }
+ w[k] = u
+ return w, nil
+}
+
+func updateArrayIndex(v []any, i int, path []any, n any, a allocator) (any, error) {
+ var x any
+ if j := clampIndex(i, -1, len(v)); j < 0 {
+ if n == struct{}{} {
+ return v, nil
+ }
+ return nil, &arrayIndexNegativeError{i}
+ } else if j < len(v) {
+ i = j
+ x = v[i]
+ } else {
+ if n == struct{}{} {
+ return v, nil
+ }
+ if i >= 0x8000000 {
+ return nil, &arrayIndexTooLargeError{i}
+ }
+ }
+ u, err := update(x, path, n, a)
+ if err != nil {
+ return nil, err
+ }
+ l, c := len(v), cap(v)
+ if a.allocated(v) {
+ if i < c {
+ if i >= l {
+ v = v[:i+1]
+ }
+ v[i] = u
+ return v, nil
+ }
+ c *= 2
+ }
+ if i >= l {
+ l = i + 1
+ }
+ w := a.makeArray(l, c)
+ copy(w, v)
+ w[i] = u
+ return w, nil
+}
+
+func updateArraySlice(v []any, m map[string]any, path []any, n any, a allocator) (any, error) {
+ s, ok := m["start"]
+ if !ok {
+ return nil, &expectedStartEndError{m}
+ }
+ e, ok := m["end"]
+ if !ok {
+ return nil, &expectedStartEndError{m}
+ }
+ var start, end int
+ if i, ok := toInt(s); ok {
+ start = clampIndex(i, 0, len(v))
+ }
+ if i, ok := toInt(e); ok {
+ end = clampIndex(i, start, len(v))
+ } else {
+ end = len(v)
+ }
+ if start == end && n == struct{}{} {
+ return v, nil
+ }
+ u, err := update(v[start:end], path, n, a)
+ if err != nil {
+ return nil, err
+ }
+ switch u := u.(type) {
+ case []any:
+ var w []any
+ if len(u) == end-start && a.allocated(v) {
+ w = v
+ } else {
+ w = a.makeArray(len(v)-(end-start)+len(u), 0)
+ copy(w, v[:start])
+ copy(w[start+len(u):], v[end:])
+ }
+ copy(w[start:], u)
+ return w, nil
+ case struct{}:
+ var w []any
+ if a.allocated(v) {
+ w = v
+ } else {
+ w = a.makeArray(len(v), 0)
+ copy(w, v)
+ }
+ for i := start; i < end; i++ {
+ w[i] = u
+ }
+ return w, nil
+ default:
+ return nil, &expectedArrayError{u}
+ }
+}
+
+func deleteEmpty(v any) any {
+ switch v := v.(type) {
+ case struct{}:
+ return nil
+ case map[string]any:
+ for k, w := range v {
+ if w == struct{}{} {
+ delete(v, k)
+ } else {
+ v[k] = deleteEmpty(w)
+ }
+ }
+ return v
+ case []any:
+ var j int
+ for _, w := range v {
+ if w != struct{}{} {
+ v[j] = deleteEmpty(w)
+ j++
+ }
+ }
+ for i := j; i < len(v); i++ {
+ v[i] = nil
}
+ return v[:j]
+ default:
+ return v
}
}
-func funcGetpath(v, p interface{}) interface{} {
- keys, ok := p.([]interface{})
+func funcGetpath(v, p any) any {
+ path, ok := p.([]any)
if !ok {
- return &funcTypeError{"getpath", p}
+ return &func1TypeError{"getpath", v, p}
}
u := v
- for _, x := range keys {
+ for _, x := range path {
switch v.(type) {
- case map[string]interface{}:
- case []interface{}:
- case nil:
+ case nil, []any, map[string]any:
+ v = funcIndex2(nil, v, x)
+ if err, ok := v.(error); ok {
+ return &func1WrapError{"getpath", u, p, err}
+ }
default:
- return &getpathError{u, p}
- }
- v = funcIndex(nil, v, x)
- if _, ok := v.(error); ok {
- return &getpathError{u, p}
+ return &func1TypeError{"getpath", u, p}
}
}
return v
}
-func funcBsearch(v, t interface{}) interface{} {
- vs, ok := v.([]interface{})
+func funcTranspose(v any) any {
+ vss, ok := v.([]any)
+ if !ok {
+ return &func0TypeError{"transpose", v}
+ }
+ if len(vss) == 0 {
+ return []any{}
+ }
+ var l int
+ for _, vs := range vss {
+ vs, ok := vs.([]any)
+ if !ok {
+ return &func0TypeError{"transpose", v}
+ }
+ if k := len(vs); l < k {
+ l = k
+ }
+ }
+ wss := make([][]any, l)
+ xs := make([]any, l)
+ for i, k := 0, len(vss); i < l; i++ {
+ s := make([]any, k)
+ wss[i] = s
+ xs[i] = s
+ }
+ for i, vs := range vss {
+ for j, v := range vs.([]any) {
+ wss[j][i] = v
+ }
+ }
+ return xs
+}
+
+func funcBsearch(v, t any) any {
+ vs, ok := v.([]any)
if !ok {
- return &funcTypeError{"bsearch", v}
+ return &func1TypeError{"bsearch", v, t}
}
i := sort.Search(len(vs), func(i int) bool {
- return compare(vs[i], t) >= 0
+ return Compare(vs[i], t) >= 0
})
- if i < len(vs) && compare(vs[i], t) == 0 {
+ if i < len(vs) && Compare(vs[i], t) == 0 {
return i
}
return -i - 1
}
-func funcGmtime(v interface{}) interface{} {
+func funcGmtime(v any) any {
if v, ok := toFloat(v); ok {
return epochToArray(v, time.UTC)
}
- return &funcTypeError{"gmtime", v}
+ return &func0TypeError{"gmtime", v}
}
-func funcLocaltime(v interface{}) interface{} {
+func funcLocaltime(v any) any {
if v, ok := toFloat(v); ok {
return epochToArray(v, time.Local)
}
- return &funcTypeError{"localtime", v}
+ return &func0TypeError{"localtime", v}
}
-func epochToArray(v float64, loc *time.Location) []interface{} {
+func epochToArray(v float64, loc *time.Location) []any {
t := time.Unix(int64(v), int64((v-math.Floor(v))*1e9)).In(loc)
- return []interface{}{
+ return []any{
t.Year(),
int(t.Month()) - 1,
t.Day(),
@@ -1363,130 +1863,143 @@ func epochToArray(v float64, loc *time.Location) []interface{} {
}
}
-func funcMktime(v interface{}) interface{} {
- if a, ok := v.([]interface{}); ok {
- t, err := arrayToTime("mktime", a, time.UTC)
- if err != nil {
- return err
- }
- return float64(t.Unix())
+func funcMktime(v any) any {
+ a, ok := v.([]any)
+ if !ok {
+ return &func0TypeError{"mktime", v}
+ }
+ t, err := arrayToTime(a, time.UTC)
+ if err != nil {
+ return &func0WrapError{"mktime", v, err}
}
- return &funcTypeError{"mktime", v}
+ return timeToEpoch(t)
+}
+
+func timeToEpoch(t time.Time) float64 {
+ return float64(t.Unix()) + float64(t.Nanosecond())/1e9
}
-func funcStrftime(v, x interface{}) interface{} {
+func funcStrftime(v, x any) any {
if w, ok := toFloat(v); ok {
v = epochToArray(w, time.UTC)
}
- if a, ok := v.([]interface{}); ok {
- if format, ok := x.(string); ok {
- t, err := arrayToTime("strftime", a, time.UTC)
- if err != nil {
- return err
- }
- return timefmt.Format(t, format)
- }
- return &funcTypeError{"strftime", x}
+ a, ok := v.([]any)
+ if !ok {
+ return &func1TypeError{"strftime", v, x}
+ }
+ format, ok := x.(string)
+ if !ok {
+ return &func1TypeError{"strftime", v, x}
+ }
+ t, err := arrayToTime(a, time.UTC)
+ if err != nil {
+ return &func1WrapError{"strftime", v, x, err}
}
- return &funcTypeError{"strftime", v}
+ return timefmt.Format(t, format)
}
-func funcStrflocaltime(v, x interface{}) interface{} {
+func funcStrflocaltime(v, x any) any {
if w, ok := toFloat(v); ok {
v = epochToArray(w, time.Local)
}
- if a, ok := v.([]interface{}); ok {
- if format, ok := x.(string); ok {
- t, err := arrayToTime("strflocaltime", a, time.Local)
- if err != nil {
- return err
- }
- return timefmt.Format(t, format)
- }
- return &funcTypeError{"strflocaltime", x}
+ a, ok := v.([]any)
+ if !ok {
+ return &func1TypeError{"strflocaltime", v, x}
+ }
+ format, ok := x.(string)
+ if !ok {
+ return &func1TypeError{"strflocaltime", v, x}
}
- return &funcTypeError{"strflocaltime", v}
+ t, err := arrayToTime(a, time.Local)
+ if err != nil {
+ return &func1WrapError{"strflocaltime", v, x, err}
+ }
+ return timefmt.Format(t, format)
}
-func funcStrptime(v, x interface{}) interface{} {
- if v, ok := v.(string); ok {
- if format, ok := x.(string); ok {
- t, err := timefmt.Parse(v, format)
- if err != nil {
- return err
- }
- var s time.Time
- if t == s {
- return &funcTypeError{"strptime", v}
- }
- return epochToArray(float64(t.Unix())+float64(t.Nanosecond())/1e9, time.UTC)
- }
- return &funcTypeError{"strptime", x}
+func funcStrptime(v, x any) any {
+ s, ok := v.(string)
+ if !ok {
+ return &func1TypeError{"strptime", v, x}
+ }
+ format, ok := x.(string)
+ if !ok {
+ return &func1TypeError{"strptime", v, x}
+ }
+ t, err := timefmt.Parse(s, format)
+ if err != nil {
+ return &func1WrapError{"strptime", v, x, err}
+ }
+ var u time.Time
+ if t == u {
+ return &func1TypeError{"strptime", v, x}
}
- return &funcTypeError{"strptime", v}
+ return epochToArray(timeToEpoch(t), time.UTC)
}
-func arrayToTime(name string, a []interface{}, loc *time.Location) (time.Time, error) {
+func arrayToTime(a []any, loc *time.Location) (time.Time, error) {
var t time.Time
if len(a) != 8 {
- return t, &funcTypeError{name, a}
+ return t, &timeArrayError{}
}
var y, m, d, h, min, sec, nsec int
- if x, ok := toInt(a[0]); ok {
- y = x
- } else {
- return t, &funcTypeError{name, a}
+ var ok bool
+ if y, ok = toInt(a[0]); !ok {
+ return t, &timeArrayError{}
}
- if x, ok := toInt(a[1]); ok {
- m = x + 1
+ if m, ok = toInt(a[1]); ok {
+ m++
} else {
- return t, &funcTypeError{name, a}
+ return t, &timeArrayError{}
}
- if x, ok := toInt(a[2]); ok {
- d = x
- } else {
- return t, &funcTypeError{name, a}
+ if d, ok = toInt(a[2]); !ok {
+ return t, &timeArrayError{}
}
- if x, ok := toInt(a[3]); ok {
- h = x
- } else {
- return t, &funcTypeError{name, a}
+ if h, ok = toInt(a[3]); !ok {
+ return t, &timeArrayError{}
}
- if x, ok := toInt(a[4]); ok {
- min = x
- } else {
- return t, &funcTypeError{name, a}
+ if min, ok = toInt(a[4]); !ok {
+ return t, &timeArrayError{}
}
if x, ok := toFloat(a[5]); ok {
sec = int(x)
nsec = int((x - math.Floor(x)) * 1e9)
} else {
- return t, &funcTypeError{name, a}
+ return t, &timeArrayError{}
+ }
+ if _, ok = toFloat(a[6]); !ok {
+ return t, &timeArrayError{}
+ }
+ if _, ok = toFloat(a[7]); !ok {
+ return t, &timeArrayError{}
}
return time.Date(y, time.Month(m), d, h, min, sec, nsec, loc), nil
}
-func funcNow(interface{}) interface{} {
- t := time.Now()
- return float64(t.Unix()) + float64(t.Nanosecond())/1e9
+func funcNow(any) any {
+ return timeToEpoch(time.Now())
}
-func funcMatch(v, re, fs, testing interface{}) interface{} {
+func funcMatch(v, re, fs, testing any) any {
+ name := "match"
+ if testing == true {
+ name = "test"
+ }
var flags string
if fs != nil {
v, ok := fs.(string)
if !ok {
- return &funcTypeError{"match", fs}
+ return &func2TypeError{name, v, re, fs}
}
flags = v
}
s, ok := v.(string)
if !ok {
- return &funcTypeError{"match", v}
+ return &func2TypeError{name, v, re, fs}
}
restr, ok := re.(string)
if !ok {
- return &funcTypeError{"match", v}
+ return &func2TypeError{name, v, re, fs}
}
r, err := compileRegexp(restr, flags)
if err != nil {
@@ -1504,16 +2017,16 @@ func funcMatch(v, re, fs, testing interface{}) interface{} {
xs = [][]int{got}
}
}
- res, names := make([]interface{}, len(xs)), r.SubexpNames()
+ res, names := make([]any, len(xs)), r.SubexpNames()
for i, x := range xs {
- captures := make([]interface{}, (len(x)-2)/2)
+ captures := make([]any, (len(x)-2)/2)
for j := 1; j < len(x)/2; j++ {
- var name interface{}
+ var name any
if n := names[j]; n != "" {
name = n
}
if x[j*2] < 0 {
- captures[j-1] = map[string]interface{}{
+ captures[j-1] = map[string]any{
"name": name,
"offset": -1,
"length": 0,
@@ -1521,14 +2034,14 @@ func funcMatch(v, re, fs, testing interface{}) interface{} {
}
continue
}
- captures[j-1] = map[string]interface{}{
+ captures[j-1] = map[string]any{
"name": name,
"offset": len([]rune(s[:x[j*2]])),
"length": len([]rune(s[:x[j*2+1]])) - len([]rune(s[:x[j*2]])),
"string": s[x[j*2]:x[j*2+1]],
}
}
- res[i] = map[string]interface{}{
+ res[i] = map[string]any{
"offset": len([]rune(s[:x[0]])),
"length": len([]rune(s[:x[1]])) - len([]rune(s[:x[0]])),
"string": s[x[0]:x[1]],
@@ -1539,6 +2052,11 @@ func funcMatch(v, re, fs, testing interface{}) interface{} {
}
func compileRegexp(re, flags string) (*regexp.Regexp, error) {
+ if strings.IndexFunc(flags, func(r rune) bool {
+ return r != 'g' && r != 'i' && r != 'm'
+ }) >= 0 {
+ return nil, fmt.Errorf("unsupported regular expression flag: %q", flags)
+ }
re = strings.ReplaceAll(re, "(?<", "(?P<")
if strings.ContainsRune(flags, 'i') {
re = "(?i)" + re
@@ -1553,61 +2071,81 @@ func compileRegexp(re, flags string) (*regexp.Regexp, error) {
return r, nil
}
-func funcError(v interface{}, args []interface{}) interface{} {
+func funcCapture(v any) any {
+ vs, ok := v.(map[string]any)
+ if !ok {
+ return &expectedObjectError{v}
+ }
+ v = vs["captures"]
+ captures, ok := v.([]any)
+ if !ok {
+ return &expectedArrayError{v}
+ }
+ w := make(map[string]any, len(captures))
+ for _, capture := range captures {
+ if capture, ok := capture.(map[string]any); ok {
+ if name, ok := capture["name"].(string); ok {
+ w[name] = capture["string"]
+ }
+ }
+ }
+ return w
+}
+
+func funcError(v any, args []any) any {
if len(args) > 0 {
v = args[0]
}
- code := 5
- if v == nil {
- code = 0
- }
- return &exitCodeError{v, code, false}
+ return &exitCodeError{v, 5}
}
-func funcHalt(interface{}) interface{} {
- return &exitCodeError{nil, 0, true}
+func funcHalt(any) any {
+ return &HaltError{nil, 0}
}
-func funcHaltError(v interface{}, args []interface{}) interface{} {
+func funcHaltError(v any, args []any) any {
code := 5
if len(args) > 0 {
var ok bool
if code, ok = toInt(args[0]); !ok {
- return &funcTypeError{"halt_error", args[0]}
+ return &func0TypeError{"halt_error", args[0]}
}
}
- return &exitCodeError{v, code, true}
-}
-
-func internalfuncTypeError(v, x interface{}) interface{} {
- if x, ok := x.(string); ok {
- return &funcTypeError{x, v}
- }
- return &funcTypeError{"_type_error", v}
+ return &HaltError{v, code}
}
-func toInt(x interface{}) (int, bool) {
+func toInt(x any) (int, bool) {
switch x := x.(type) {
case int:
return x, true
case float64:
- return int(x), true
+ return floatToInt(x), true
case *big.Int:
if x.IsInt64() {
- if i := x.Int64(); minInt <= i && i <= maxInt {
+ if i := x.Int64(); math.MinInt <= i && i <= math.MaxInt {
return int(i), true
}
}
if x.Sign() > 0 {
- return maxInt, true
+ return math.MaxInt, true
}
- return minInt, true
+ return math.MinInt, true
default:
return 0, false
}
}
-func toFloat(x interface{}) (float64, bool) {
+func floatToInt(x float64) int {
+ if math.MinInt <= x && x <= math.MaxInt {
+ return int(x)
+ }
+ if x > 0 {
+ return math.MaxInt
+ }
+ return math.MinInt
+}
+
+func toFloat(x any) (float64, bool) {
switch x := x.(type) {
case int:
return float64(x), true
diff --git a/vendor/github.com/itchyny/gojq/go.dev.mod b/vendor/github.com/itchyny/gojq/go.dev.mod
index 23cb84c6d..1e8316268 100644
--- a/vendor/github.com/itchyny/gojq/go.dev.mod
+++ b/vendor/github.com/itchyny/gojq/go.dev.mod
@@ -1,8 +1,8 @@
module github.com/itchyny/gojq
-go 1.14
+go 1.20
require (
- github.com/itchyny/astgen-go v0.0.0-20210222032259-bf31276dfbe1 // indirect
- github.com/itchyny/timefmt-go v0.1.2
+ github.com/itchyny/astgen-go v0.0.0-20231113225122-e1c22b9aaf7b // indirect
+ github.com/itchyny/timefmt-go v0.1.6 // indirect
)
diff --git a/vendor/github.com/itchyny/gojq/go.dev.sum b/vendor/github.com/itchyny/gojq/go.dev.sum
index b78c94a4a..e8691b822 100644
--- a/vendor/github.com/itchyny/gojq/go.dev.sum
+++ b/vendor/github.com/itchyny/gojq/go.dev.sum
@@ -1,4 +1,4 @@
-github.com/itchyny/astgen-go v0.0.0-20210222032259-bf31276dfbe1 h1:z3NR0toPa8yJsPd21cmxnttYX+cDzBQSxZ569ICn49I=
-github.com/itchyny/astgen-go v0.0.0-20210222032259-bf31276dfbe1/go.mod h1:296z3W7Xsrp2mlIY88ruDKscuvrkL6zXCNRtaYVshzw=
-github.com/itchyny/timefmt-go v0.1.2 h1:q0Xa4P5it6K6D7ISsbLAMwx1PnWlixDcJL6/sFs93Hs=
-github.com/itchyny/timefmt-go v0.1.2/go.mod h1:0osSSCQSASBJMsIZnhAaF1C2fCBTJZXrnj37mG8/c+A=
+github.com/itchyny/astgen-go v0.0.0-20231113225122-e1c22b9aaf7b h1:72fDU7wad+r3iQObaxhlXVIpAIMRUIUMrNa3go1vb8s=
+github.com/itchyny/astgen-go v0.0.0-20231113225122-e1c22b9aaf7b/go.mod h1:Zp6xzEWVc2pQ/ObfLD6t/M6gDegsJWKdGKJSiT7qlu0=
+github.com/itchyny/timefmt-go v0.1.6 h1:ia3s54iciXDdzWzwaVKXZPbiXzxxnv1SPGFfM/myJ5Q=
+github.com/itchyny/timefmt-go v0.1.6/go.mod h1:RRDZYC5s9ErkjQvTvvU7keJjxUYzIISJGxm9/mAERQg=
diff --git a/vendor/github.com/itchyny/gojq/gojq.go b/vendor/github.com/itchyny/gojq/gojq.go
index 8f53b3568..e078c8090 100644
--- a/vendor/github.com/itchyny/gojq/gojq.go
+++ b/vendor/github.com/itchyny/gojq/gojq.go
@@ -1,5 +1,5 @@
-// Package gojq provides the parser and interpreter of gojq.
+// Package gojq provides the parser and the interpreter of gojq.
+// Please refer to [Usage as a library] for introduction.
//
-// Please refer to https://github.com/itchyny/gojq#usage-as-a-library for
-// introduction of the usage as a library.
+// [Usage as a library]: https://github.com/itchyny/gojq#usage-as-a-library
package gojq
diff --git a/vendor/github.com/itchyny/gojq/iter.go b/vendor/github.com/itchyny/gojq/iter.go
index 4f724417b..d0bed9606 100644
--- a/vendor/github.com/itchyny/gojq/iter.go
+++ b/vendor/github.com/itchyny/gojq/iter.go
@@ -2,22 +2,48 @@ package gojq
// Iter is an interface for an iterator.
type Iter interface {
- Next() (interface{}, bool)
+ Next() (any, bool)
}
-func unitIterator(v interface{}) Iter {
- return &unitIter{v: v}
+// NewIter creates a new [Iter] from values.
+func NewIter(values ...any) Iter {
+ switch len(values) {
+ case 0:
+ return emptyIter{}
+ case 1:
+ return &unitIter{value: values[0]}
+ default:
+ iter := sliceIter(values)
+ return &iter
+ }
+}
+
+type emptyIter struct{}
+
+func (emptyIter) Next() (any, bool) {
+ return nil, false
}
type unitIter struct {
- v interface{}
- done bool
+ value any
+ done bool
}
-func (c *unitIter) Next() (interface{}, bool) {
- if !c.done {
- c.done = true
- return c.v, true
+func (iter *unitIter) Next() (any, bool) {
+ if iter.done {
+ return nil, false
}
- return nil, false
+ iter.done = true
+ return iter.value, true
+}
+
+type sliceIter []any
+
+func (iter *sliceIter) Next() (any, bool) {
+ if len(*iter) == 0 {
+ return nil, false
+ }
+ value := (*iter)[0]
+ *iter = (*iter)[1:]
+ return value, true
}
diff --git a/vendor/github.com/itchyny/gojq/lexer.go b/vendor/github.com/itchyny/gojq/lexer.go
index 88060d247..0c2efd122 100644
--- a/vendor/github.com/itchyny/gojq/lexer.go
+++ b/vendor/github.com/itchyny/gojq/lexer.go
@@ -1,8 +1,7 @@
package gojq
import (
- "strconv"
- "strings"
+ "encoding/json"
"unicode/utf8"
)
@@ -235,9 +234,10 @@ func (l *lexer) Lex(lval *yySymType) (tokenType int) {
return tok
default:
if ch >= utf8.RuneSelf {
- r, _ := utf8.DecodeRuneInString(l.source[l.offset-1:])
+ r, size := utf8.DecodeRuneInString(l.source[l.offset-1:])
+ // -1 to adjust for first byte consumed by next()
+ l.offset += size - 1
l.token = string(r)
- l.offset += len(l.token)
}
}
return int(ch)
@@ -248,15 +248,9 @@ func (l *lexer) next() (byte, bool) {
ch := l.source[l.offset]
l.offset++
if ch == '#' {
- if len(l.source) == l.offset {
+ if l.skipComment() {
return 0, true
}
- for !isNewLine(l.source[l.offset]) {
- l.offset++
- if len(l.source) == l.offset {
- return 0, true
- }
- }
} else if !isWhite(ch) {
return ch, false
} else if len(l.source) == l.offset {
@@ -265,6 +259,28 @@ func (l *lexer) next() (byte, bool) {
}
}
+func (l *lexer) skipComment() bool {
+ for {
+ switch l.peek() {
+ case 0:
+ return true
+ case '\\':
+ switch l.offset++; l.peek() {
+ case '\\', '\n':
+ l.offset++
+ case '\r':
+ if l.offset++; l.peek() == '\n' {
+ l.offset++
+ }
+ }
+ case '\n', '\r':
+ return false
+ default:
+ l.offset++
+ }
+ }
+}
+
func (l *lexer) peek() byte {
if len(l.source) == l.offset {
return 0
@@ -381,115 +397,159 @@ func (l *lexer) validNumber() bool {
}
func (l *lexer) scanString(start int) (int, string) {
- var quote bool
- for i, m := l.offset, len(l.source); i < m; i++ {
+ var decode bool
+ var controls int
+ unquote := func(src string, quote bool) (string, error) {
+ if !decode {
+ if quote {
+ return src, nil
+ }
+ return src[1 : len(src)-1], nil
+ }
+ var buf []byte
+ if !quote && controls == 0 {
+ buf = []byte(src)
+ } else {
+ buf = quoteAndEscape(src, quote, controls)
+ }
+ if err := json.Unmarshal(buf, &src); err != nil {
+ return "", err
+ }
+ return src, nil
+ }
+ for i := l.offset; i < len(l.source); i++ {
ch := l.source[i]
switch ch {
case '\\':
- quote = !quote
- case '"':
- if !quote {
- if !l.inString {
- l.offset = i + 1
- l.token = l.source[start:l.offset]
- str, err := strconv.Unquote(l.token)
- if err != nil {
- return tokInvalid, ""
+ if i++; i >= len(l.source) {
+ break
+ }
+ switch l.source[i] {
+ case 'u':
+ for j := 1; j <= 4; j++ {
+ if i+j >= len(l.source) || !isHex(l.source[i+j]) {
+ l.offset = i + j
+ l.token = l.source[i-1 : l.offset]
+ return tokInvalidEscapeSequence, ""
}
- return tokString, str
}
- if i > l.offset {
- l.offset = i
- l.token = l.source[start:l.offset]
- str, err := strconv.Unquote("\"" + l.token + "\"")
- if err != nil {
- return tokInvalid, ""
- }
- return tokString, str
+ i += 4
+ fallthrough
+ case '"', '/', '\\', 'b', 'f', 'n', 'r', 't':
+ decode = true
+ case '(':
+ if !l.inString {
+ l.inString = true
+ return tokStringStart, ""
}
- l.inString = false
- l.offset = i + 1
- return tokStringEnd, ""
- }
- quote = false
- case '(':
- if quote {
- if l.inString {
- if i > l.offset+1 {
- l.offset = i - 1
- l.token = l.source[start:l.offset]
- str, err := strconv.Unquote("\"" + l.token + "\"")
- if err != nil {
- return tokInvalid, ""
- }
- return tokString, str
- }
- l.offset = i + 1
+ if i == l.offset+1 {
+ l.offset += 2
l.inString = false
return tokStringQuery, ""
}
- l.inString = true
- return tokStringStart, ""
+ l.offset = i - 1
+ l.token = l.source[start:l.offset]
+ str, err := unquote(l.token, true)
+ if err != nil {
+ return tokInvalid, ""
+ }
+ return tokString, str
+ default:
+ l.offset = i + 1
+ l.token = l.source[l.offset-2 : l.offset]
+ return tokInvalidEscapeSequence, ""
+ }
+ case '"':
+ if !l.inString {
+ l.offset = i + 1
+ l.token = l.source[start:l.offset]
+ str, err := unquote(l.token, false)
+ if err != nil {
+ return tokInvalid, ""
+ }
+ return tokString, str
}
- default:
- if quote {
- if !('a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' ||
- '0' <= ch && ch <= '9' || ch == '\'' || ch == '"') {
- l.offset = i + 1
- l.token = l.source[l.offset-2 : l.offset]
+ if i > l.offset {
+ l.offset = i
+ l.token = l.source[start:l.offset]
+ str, err := unquote(l.token, true)
+ if err != nil {
return tokInvalid, ""
}
- quote = false
+ return tokString, str
+ }
+ l.inString = false
+ l.offset = i + 1
+ return tokStringEnd, ""
+ default:
+ if !decode {
+ decode = ch > '~'
+ }
+ if ch < ' ' { // ref: unquoteBytes in encoding/json
+ controls++
}
}
}
l.offset = len(l.source)
- l.token = l.source[start:l.offset]
- return tokInvalid, ""
+ l.token = ""
+ return tokUnterminatedString, ""
}
-type parseError struct {
- offset int
- token string
- tokenType int
-}
-
-func (err *parseError) Error() string {
- var message string
- prefix := "unexpected"
- switch {
- case err.tokenType == eof:
- message = ""
- case err.tokenType == tokInvalid:
- prefix = "invalid"
- fallthrough
- case err.tokenType >= utf8.RuneSelf:
- if strings.HasPrefix(err.token, "\"") {
- message = err.token
+func quoteAndEscape(src string, quote bool, controls int) []byte {
+ size := len(src) + controls*5
+ if quote {
+ size += 2
+ }
+ buf := make([]byte, size)
+ var j int
+ if quote {
+ buf[0] = '"'
+ buf[len(buf)-1] = '"'
+ j++
+ }
+ for i := 0; i < len(src); i++ {
+ if ch := src[i]; ch < ' ' {
+ const hex = "0123456789abcdef"
+ copy(buf[j:], `\u00`)
+ buf[j+4] = hex[ch>>4]
+ buf[j+5] = hex[ch&0xF]
+ j += 6
} else {
- message = "\"" + err.token + "\""
+ buf[j] = ch
+ j++
}
- default:
- message = strconv.Quote(string(rune(err.tokenType)))
}
- return prefix + " token " + message
+ return buf
}
-func (err *parseError) Token() (string, int) {
- return err.token, err.offset
+// ParseError represents a description of a query parsing error.
+type ParseError struct {
+ Offset int // the error occurred after reading Offset bytes
+ Token string // the Token that caused the error (may be empty)
+ tokenType int
}
-func (l *lexer) Error(e string) {
- offset, token := l.offset, l.token
- switch {
- case l.tokenType == eof:
- offset++
- case l.tokenType >= utf8.RuneSelf:
- offset -= len(token) - 1
+func (err *ParseError) Error() string {
+ switch err.tokenType {
+ case eof:
+ return "unexpected EOF"
+ case tokInvalid:
+ return "invalid token " + jsonMarshal(err.Token)
+ case tokInvalidEscapeSequence:
+ return `invalid escape sequence "` + err.Token + `" in string literal`
+ case tokUnterminatedString:
+ return "unterminated string literal"
default:
+ return "unexpected token " + jsonMarshal(err.Token)
+ }
+}
+
+func (l *lexer) Error(string) {
+ offset, token := l.offset, l.token
+ if l.tokenType != eof && l.tokenType < utf8.RuneSelf {
token = string(rune(l.tokenType))
}
- l.err = &parseError{offset, token, l.tokenType}
+ l.err = &ParseError{offset, token, l.tokenType}
}
func isWhite(ch byte) bool {
@@ -507,15 +567,12 @@ func isIdent(ch byte, tail bool) bool {
tail && isNumber(ch)
}
-func isNumber(ch byte) bool {
- return '0' <= ch && ch <= '9'
+func isHex(ch byte) bool {
+ return 'a' <= ch && ch <= 'f' ||
+ 'A' <= ch && ch <= 'F' ||
+ isNumber(ch)
}
-func isNewLine(ch byte) bool {
- switch ch {
- case '\n', '\r':
- return true
- default:
- return false
- }
+func isNumber(ch byte) bool {
+ return '0' <= ch && ch <= '9'
}
diff --git a/vendor/github.com/itchyny/gojq/math.go b/vendor/github.com/itchyny/gojq/math.go
deleted file mode 100644
index 55d64765c..000000000
--- a/vendor/github.com/itchyny/gojq/math.go
+++ /dev/null
@@ -1,10 +0,0 @@
-package gojq
-
-import "math/bits"
-
-const (
- maxInt = 1<<(bits.UintSize-1) - 1 // math.MaxInt64 or math.MaxInt32
- minInt = -maxInt - 1 // math.MinInt64 or math.MinInt32
- maxHalfInt = 1<<(bits.UintSize/2-1) - 1 // math.MaxInt32 or math.MaxInt16
- minHalfInt = -maxHalfInt - 1 // math.MinInt32 or math.MinInt16
-)
diff --git a/vendor/github.com/itchyny/gojq/module_loader.go b/vendor/github.com/itchyny/gojq/module_loader.go
index 8c776d05e..0a73ba05b 100644
--- a/vendor/github.com/itchyny/gojq/module_loader.go
+++ b/vendor/github.com/itchyny/gojq/module_loader.go
@@ -4,18 +4,37 @@ import (
"encoding/json"
"fmt"
"io"
- "io/ioutil"
"os"
"path/filepath"
+ "strings"
)
-type moduleLoader struct {
- paths []string
-}
+// ModuleLoader is the interface for loading modules.
+//
+// Implement following optional methods. Use [NewModuleLoader] to load local modules.
+//
+// LoadInitModules() ([]*Query, error)
+// LoadModule(string) (*Query, error)
+// LoadModuleWithMeta(string, map[string]any) (*Query, error)
+// LoadJSON(string) (any, error)
+// LoadJSONWithMeta(string, map[string]any) (any, error)
+type ModuleLoader any
-// NewModuleLoader creates a new ModuleLoader reading local modules in the paths.
+// NewModuleLoader creates a new [ModuleLoader] loading local modules in the paths.
+// Note that user can load modules outside the paths using "search" path of metadata.
+// Empty paths are ignored, so specify "." for the current working directory.
func NewModuleLoader(paths []string) ModuleLoader {
- return &moduleLoader{paths}
+ ps := make([]string, 0, len(paths))
+ for _, path := range paths {
+ if path = resolvePath(path, ""); path != "" {
+ ps = append(ps, path)
+ }
+ }
+ return &moduleLoader{ps}
+}
+
+type moduleLoader struct {
+ paths []string
}
func (l *moduleLoader) LoadInitModules() ([]*Query, error) {
@@ -34,36 +53,36 @@ func (l *moduleLoader) LoadInitModules() ([]*Query, error) {
if fi.IsDir() {
continue
}
- cnt, err := ioutil.ReadFile(path)
+ cnt, err := os.ReadFile(path)
if err != nil {
return nil, err
}
- q, err := parseModule(path, string(cnt))
+ q, err := parseModule(string(cnt), filepath.Dir(path))
if err != nil {
- return nil, &queryParseError{"query in module", path, string(cnt), err}
+ return nil, &queryParseError{path, string(cnt), err}
}
qs = append(qs, q)
}
return qs, nil
}
-func (l *moduleLoader) LoadModuleWithMeta(name string, meta map[string]interface{}) (*Query, error) {
+func (l *moduleLoader) LoadModuleWithMeta(name string, meta map[string]any) (*Query, error) {
path, err := l.lookupModule(name, ".jq", meta)
if err != nil {
return nil, err
}
- cnt, err := ioutil.ReadFile(path)
+ cnt, err := os.ReadFile(path)
if err != nil {
return nil, err
}
- q, err := parseModule(path, string(cnt))
+ q, err := parseModule(string(cnt), filepath.Dir(path))
if err != nil {
- return nil, &queryParseError{"query in module", path, string(cnt), err}
+ return nil, &queryParseError{path, string(cnt), err}
}
return q, nil
}
-func (l *moduleLoader) LoadJSONWithMeta(name string, meta map[string]interface{}) (interface{}, error) {
+func (l *moduleLoader) LoadJSONWithMeta(name string, meta map[string]any) (any, error) {
path, err := l.lookupModule(name, ".json", meta)
if err != nil {
return nil, err
@@ -73,11 +92,11 @@ func (l *moduleLoader) LoadJSONWithMeta(name string, meta map[string]interface{}
return nil, err
}
defer f.Close()
- var vals []interface{}
+ vals := []any{}
dec := json.NewDecoder(f)
dec.UseNumber()
for {
- var val interface{}
+ var val any
if err := dec.Decode(&val); err != nil {
if err == io.EOF {
break
@@ -85,7 +104,7 @@ func (l *moduleLoader) LoadJSONWithMeta(name string, meta map[string]interface{}
if _, err := f.Seek(0, io.SeekStart); err != nil {
return nil, err
}
- cnt, er := ioutil.ReadAll(f)
+ cnt, er := io.ReadAll(f)
if er != nil {
return nil, er
}
@@ -96,17 +115,17 @@ func (l *moduleLoader) LoadJSONWithMeta(name string, meta map[string]interface{}
return vals, nil
}
-func (l *moduleLoader) lookupModule(name, extension string, meta map[string]interface{}) (string, error) {
+func (l *moduleLoader) lookupModule(name, extension string, meta map[string]any) (string, error) {
paths := l.paths
- if path := searchPath(meta); path != "" {
+ if path, ok := meta["search"].(string); ok {
paths = append([]string{path}, paths...)
}
for _, base := range paths {
- path := filepath.Clean(filepath.Join(base, name+extension))
+ path := filepath.Join(base, name+extension)
if _, err := os.Stat(path); err == nil {
return path, err
}
- path = filepath.Clean(filepath.Join(base, name, filepath.Base(name)+extension))
+ path = filepath.Join(base, name, filepath.Base(name)+extension)
if _, err := os.Stat(path); err == nil {
return path, err
}
@@ -114,43 +133,50 @@ func (l *moduleLoader) lookupModule(name, extension string, meta map[string]inte
return "", fmt.Errorf("module not found: %q", name)
}
-// This is a dirty hack to implement the "search" field.
-func parseModule(path, cnt string) (*Query, error) {
+func parseModule(cnt, dir string) (*Query, error) {
q, err := Parse(cnt)
if err != nil {
return nil, err
}
for _, i := range q.Imports {
- if i.Meta == nil {
- continue
+ if i.Meta != nil {
+ for _, e := range i.Meta.KeyVals {
+ if e.Key == "search" || e.KeyString == "search" {
+ if path, ok := e.Val.toString(); ok {
+ if path = resolvePath(path, dir); path != "" {
+ e.Val.Str = path
+ } else {
+ e.Val.Null = true
+ }
+ }
+ }
+ }
}
- i.Meta.KeyVals = append(
- i.Meta.KeyVals,
- &ConstObjectKeyVal{
- Key: "$$path",
- Val: &ConstTerm{Str: path},
- },
- )
}
return q, nil
}
-func searchPath(meta map[string]interface{}) string {
- x, ok := meta["$$path"]
- if !ok {
- return ""
- }
- path, ok := x.(string)
- if !ok {
- return ""
- }
- x, ok = meta["search"]
- if !ok {
- return ""
- }
- s, ok := x.(string)
- if !ok {
- return ""
+func resolvePath(path, dir string) string {
+ switch {
+ case filepath.IsAbs(path):
+ return path
+ case strings.HasPrefix(path, "~/"):
+ dir, err := os.UserHomeDir()
+ if err != nil {
+ return ""
+ }
+ return filepath.Join(dir, path[2:])
+ case strings.HasPrefix(path, "$ORIGIN/"):
+ exe, err := os.Executable()
+ if err != nil {
+ return ""
+ }
+ exe, err = filepath.EvalSymlinks(exe)
+ if err != nil {
+ return ""
+ }
+ return filepath.Join(filepath.Dir(exe), path[8:])
+ default:
+ return filepath.Join(dir, path)
}
- return filepath.Join(filepath.Dir(path), s)
}
diff --git a/vendor/github.com/itchyny/gojq/normalize.go b/vendor/github.com/itchyny/gojq/normalize.go
index c4006a1ea..2bfcd2156 100644
--- a/vendor/github.com/itchyny/gojq/normalize.go
+++ b/vendor/github.com/itchyny/gojq/normalize.go
@@ -7,36 +7,40 @@ import (
"strings"
)
-func normalizeNumbers(v interface{}) interface{} {
+func normalizeNumber(v json.Number) any {
+ if i, err := v.Int64(); err == nil && math.MinInt <= i && i <= math.MaxInt {
+ return int(i)
+ }
+ if strings.ContainsAny(v.String(), ".eE") {
+ if f, err := v.Float64(); err == nil {
+ return f
+ }
+ }
+ if bi, ok := new(big.Int).SetString(v.String(), 10); ok {
+ return bi
+ }
+ if strings.HasPrefix(v.String(), "-") {
+ return math.Inf(-1)
+ }
+ return math.Inf(1)
+}
+
+func normalizeNumbers(v any) any {
switch v := v.(type) {
case json.Number:
- if i, err := v.Int64(); err == nil && minInt <= i && i <= maxInt {
- return int(i)
- }
- if strings.ContainsAny(v.String(), ".eE") {
- if f, err := v.Float64(); err == nil {
- return f
- }
- }
- if bi, ok := new(big.Int).SetString(v.String(), 10); ok {
- return bi
- }
- if strings.HasPrefix(v.String(), "-") {
- return math.Inf(-1)
- }
- return math.Inf(1)
+ return normalizeNumber(v)
case *big.Int:
if v.IsInt64() {
- if i := v.Int64(); minInt <= i && i <= maxInt {
+ if i := v.Int64(); math.MinInt <= i && i <= math.MaxInt {
return int(i)
}
}
return v
case int64:
- if v > maxInt || v < minInt {
- return new(big.Int).SetInt64(v)
+ if math.MinInt <= v && v <= math.MaxInt {
+ return int(v)
}
- return int(v)
+ return big.NewInt(v)
case int32:
return int(v)
case int16:
@@ -44,68 +48,36 @@ func normalizeNumbers(v interface{}) interface{} {
case int8:
return int(v)
case uint:
- if v > maxInt {
- return new(big.Int).SetUint64(uint64(v))
+ if v <= math.MaxInt {
+ return int(v)
}
- return int(v)
+ return new(big.Int).SetUint64(uint64(v))
case uint64:
- if v > maxInt {
- return new(big.Int).SetUint64(v)
+ if v <= math.MaxInt {
+ return int(v)
}
- return int(v)
+ return new(big.Int).SetUint64(v)
case uint32:
- if uint64(v) > maxInt {
- return new(big.Int).SetUint64(uint64(v))
+ if uint64(v) <= math.MaxInt {
+ return int(v)
}
- return int(v)
+ return new(big.Int).SetUint64(uint64(v))
case uint16:
return int(v)
case uint8:
return int(v)
case float32:
return float64(v)
- case map[string]interface{}:
- for k, x := range v {
- v[k] = normalizeNumbers(x)
- }
- return v
- case []interface{}:
+ case []any:
for i, x := range v {
v[i] = normalizeNumbers(x)
}
return v
- default:
- return v
- }
-}
-
-// It's ok to delete destructively because this function is used right after
-// updatePaths, where it shallow-copies maps or slices on updates.
-func deleteEmpty(v interface{}) interface{} {
- switch v := v.(type) {
- case struct{}:
- return nil
- case map[string]interface{}:
- for k, w := range v {
- if w == struct{}{} {
- delete(v, k)
- } else {
- v[k] = deleteEmpty(w)
- }
+ case map[string]any:
+ for k, x := range v {
+ v[k] = normalizeNumbers(x)
}
return v
- case []interface{}:
- var j int
- for _, w := range v {
- if w != struct{}{} {
- v[j] = deleteEmpty(w)
- j++
- }
- }
- for i := j; i < len(v); i++ {
- v[i] = nil
- }
- return v[:j]
default:
return v
}
diff --git a/vendor/github.com/itchyny/gojq/operator.go b/vendor/github.com/itchyny/gojq/operator.go
index 6ebd77ebe..64b74b780 100644
--- a/vendor/github.com/itchyny/gojq/operator.go
+++ b/vendor/github.com/itchyny/gojq/operator.go
@@ -3,7 +3,6 @@ package gojq
import (
"math"
"math/big"
- "reflect"
"strings"
)
@@ -38,7 +37,7 @@ const (
OpUpdateAlt
)
-// String implements Stringer.
+// String implements [fmt.Stringer].
func (op Operator) String() string {
switch op {
case OpPipe:
@@ -94,7 +93,7 @@ func (op Operator) String() string {
}
}
-// GoString implements GoStringer.
+// GoString implements [fmt.GoStringer].
func (op Operator) GoString() (str string) {
defer func() { str = "gojq." + str }()
switch op {
@@ -209,23 +208,19 @@ func (op Operator) getFunc() string {
}
func binopTypeSwitch(
- l, r interface{},
- callbackInts func(int, int) interface{},
- callbackFloats func(float64, float64) interface{},
- callbackBigInts func(*big.Int, *big.Int) interface{},
- callbackStrings func(string, string) interface{},
- callbackArrays func(l, r []interface{}) interface{},
- callbackMaps func(l, r map[string]interface{}) interface{},
- fallback func(interface{}, interface{}) interface{}) interface{} {
+ l, r any,
+ callbackInts func(_, _ int) any,
+ callbackFloats func(_, _ float64) any,
+ callbackBigInts func(_, _ *big.Int) any,
+ callbackStrings func(_, _ string) any,
+ callbackArrays func(_, _ []any) any,
+ callbackMaps func(_, _ map[string]any) any,
+ fallback func(_, _ any) any) any {
switch l := l.(type) {
case int:
switch r := r.(type) {
case int:
- if minHalfInt <= l && l <= maxHalfInt &&
- minHalfInt <= r && r <= maxHalfInt {
- return callbackInts(l, r)
- }
- return callbackBigInts(big.NewInt(int64(l)), big.NewInt(int64(r)))
+ return callbackInts(l, r)
case float64:
return callbackFloats(float64(l), r)
case *big.Int:
@@ -262,16 +257,16 @@ func binopTypeSwitch(
default:
return fallback(l, r)
}
- case []interface{}:
+ case []any:
switch r := r.(type) {
- case []interface{}:
+ case []any:
return callbackArrays(l, r)
default:
return fallback(l, r)
}
- case map[string]interface{}:
+ case map[string]any:
switch r := r.(type) {
- case map[string]interface{}:
+ case map[string]any:
return callbackMaps(l, r)
default:
return fallback(l, r)
@@ -281,7 +276,7 @@ func binopTypeSwitch(
}
}
-func funcOpPlus(v interface{}) interface{} {
+func funcOpPlus(v any) any {
switch v := v.(type) {
case int:
return v
@@ -294,7 +289,7 @@ func funcOpPlus(v interface{}) interface{} {
}
}
-func funcOpNegate(v interface{}) interface{} {
+func funcOpNegate(v any) any {
switch v := v.(type) {
case int:
return -v
@@ -307,28 +302,38 @@ func funcOpNegate(v interface{}) interface{} {
}
}
-func funcOpAdd(_, l, r interface{}) interface{} {
- if l == nil {
- return r
- } else if r == nil {
- return l
- }
+func funcOpAdd(_, l, r any) any {
return binopTypeSwitch(l, r,
- func(l, r int) interface{} { return l + r },
- func(l, r float64) interface{} { return l + r },
- func(l, r *big.Int) interface{} { return new(big.Int).Add(l, r) },
- func(l, r string) interface{} { return l + r },
- func(l, r []interface{}) interface{} {
+ func(l, r int) any {
+ if v := l + r; (v >= l) == (r >= 0) {
+ return v
+ }
+ x, y := big.NewInt(int64(l)), big.NewInt(int64(r))
+ return x.Add(x, y)
+ },
+ func(l, r float64) any { return l + r },
+ func(l, r *big.Int) any { return new(big.Int).Add(l, r) },
+ func(l, r string) any { return l + r },
+ func(l, r []any) any {
+ if len(l) == 0 {
+ return r
+ }
if len(r) == 0 {
return l
- } else if len(l) == 0 {
- return r
}
- v := make([]interface{}, 0, len(l)+len(r))
- return append(append(v, l...), r...)
+ v := make([]any, len(l)+len(r))
+ copy(v, l)
+ copy(v[len(l):], r)
+ return v
},
- func(l, r map[string]interface{}) interface{} {
- m := make(map[string]interface{}, len(l)+len(r))
+ func(l, r map[string]any) any {
+ if len(l) == 0 {
+ return r
+ }
+ if len(r) == 0 {
+ return l
+ }
+ m := make(map[string]any, len(l)+len(r))
for k, v := range l {
m[k] = v
}
@@ -337,63 +342,71 @@ func funcOpAdd(_, l, r interface{}) interface{} {
}
return m
},
- func(l, r interface{}) interface{} { return &binopTypeError{"add", l, r} },
+ func(l, r any) any {
+ if l == nil {
+ return r
+ }
+ if r == nil {
+ return l
+ }
+ return &binopTypeError{"add", l, r}
+ },
)
}
-func funcOpSub(_, l, r interface{}) interface{} {
+func funcOpSub(_, l, r any) any {
return binopTypeSwitch(l, r,
- func(l, r int) interface{} { return l - r },
- func(l, r float64) interface{} { return l - r },
- func(l, r *big.Int) interface{} { return new(big.Int).Sub(l, r) },
- func(l, r string) interface{} { return &binopTypeError{"subtract", l, r} },
- func(l, r []interface{}) interface{} {
- a := make([]interface{}, 0, len(l))
- for _, v := range l {
- var found bool
- for _, w := range r {
- if reflect.DeepEqual(normalizeNumbers(v), normalizeNumbers(w)) {
- found = true
- break
+ func(l, r int) any {
+ if v := l - r; (v <= l) == (r >= 0) {
+ return v
+ }
+ x, y := big.NewInt(int64(l)), big.NewInt(int64(r))
+ return x.Sub(x, y)
+ },
+ func(l, r float64) any { return l - r },
+ func(l, r *big.Int) any { return new(big.Int).Sub(l, r) },
+ func(l, r string) any { return &binopTypeError{"subtract", l, r} },
+ func(l, r []any) any {
+ v := make([]any, 0, len(l))
+ L:
+ for _, l := range l {
+ for _, r := range r {
+ if Compare(l, r) == 0 {
+ continue L
}
}
- if !found {
- a = append(a, v)
- }
+ v = append(v, l)
}
- return a
+ return v
},
- func(l, r map[string]interface{}) interface{} { return &binopTypeError{"subtract", l, r} },
- func(l, r interface{}) interface{} { return &binopTypeError{"subtract", l, r} },
+ func(l, r map[string]any) any { return &binopTypeError{"subtract", l, r} },
+ func(l, r any) any { return &binopTypeError{"subtract", l, r} },
)
}
-func funcOpMul(_, l, r interface{}) interface{} {
+func funcOpMul(_, l, r any) any {
return binopTypeSwitch(l, r,
- func(l, r int) interface{} { return l * r },
- func(l, r float64) interface{} { return l * r },
- func(l, r *big.Int) interface{} { return new(big.Int).Mul(l, r) },
- func(l, r string) interface{} { return &binopTypeError{"multiply", l, r} },
- func(l, r []interface{}) interface{} { return &binopTypeError{"multiply", l, r} },
- deepMergeObjects,
- func(l, r interface{}) interface{} {
- multiplyString := func(s string, cnt float64) interface{} {
- if cnt <= 0.0 || int(cnt) < 0 || int(cnt) > maxHalfInt/(16*(len(s)+1)) {
- return nil
- }
- if cnt < 1.0 {
- return s
- }
- return strings.Repeat(s, int(cnt))
+ func(l, r int) any {
+ if v := l * r; r == 0 || v/r == l {
+ return v
}
+ x, y := big.NewInt(int64(l)), big.NewInt(int64(r))
+ return x.Mul(x, y)
+ },
+ func(l, r float64) any { return l * r },
+ func(l, r *big.Int) any { return new(big.Int).Mul(l, r) },
+ func(l, r string) any { return &binopTypeError{"multiply", l, r} },
+ func(l, r []any) any { return &binopTypeError{"multiply", l, r} },
+ deepMergeObjects,
+ func(l, r any) any {
if l, ok := l.(string); ok {
- if f, ok := toFloat(r); ok {
- return multiplyString(l, f)
+ if r, ok := toFloat(r); ok {
+ return repeatString(l, r)
}
}
if r, ok := r.(string); ok {
- if f, ok := toFloat(l); ok {
- return multiplyString(r, f)
+ if l, ok := toFloat(l); ok {
+ return repeatString(r, l)
}
}
return &binopTypeError{"multiply", l, r}
@@ -401,15 +414,15 @@ func funcOpMul(_, l, r interface{}) interface{} {
)
}
-func deepMergeObjects(l, r map[string]interface{}) interface{} {
- m := make(map[string]interface{}, len(l)+len(r))
+func deepMergeObjects(l, r map[string]any) any {
+ m := make(map[string]any, len(l)+len(r))
for k, v := range l {
m[k] = v
}
for k, v := range r {
if mk, ok := m[k]; ok {
- if mk, ok := mk.(map[string]interface{}); ok {
- if w, ok := v.(map[string]interface{}); ok {
+ if mk, ok := mk.(map[string]any); ok {
+ if w, ok := v.(map[string]any); ok {
v = deepMergeObjects(mk, w)
}
}
@@ -419,110 +432,118 @@ func deepMergeObjects(l, r map[string]interface{}) interface{} {
return m
}
-func funcOpDiv(_, l, r interface{}) interface{} {
+func repeatString(s string, n float64) any {
+ if n < 0.0 || len(s) > 0 && n > float64(0x10000000/len(s)) || math.IsNaN(n) {
+ return nil
+ }
+ if s == "" {
+ return ""
+ }
+ return strings.Repeat(s, int(n))
+}
+
+func funcOpDiv(_, l, r any) any {
return binopTypeSwitch(l, r,
- func(l, r int) interface{} {
+ func(l, r int) any {
if r == 0 {
- if l == 0 {
- return math.NaN()
- }
return &zeroDivisionError{l, r}
}
+ if l%r == 0 {
+ return l / r
+ }
return float64(l) / float64(r)
},
- func(l, r float64) interface{} {
+ func(l, r float64) any {
if r == 0.0 {
- if l == 0.0 {
- return math.NaN()
- }
return &zeroDivisionError{l, r}
}
return l / r
},
- func(l, r *big.Int) interface{} {
+ func(l, r *big.Int) any {
if r.Sign() == 0 {
- if l.Sign() == 0 {
- return math.NaN()
- }
return &zeroDivisionError{l, r}
}
- x := new(big.Int).Div(l, r)
- if new(big.Int).Mul(x, r).Cmp(l) == 0 {
- return x
+ d, m := new(big.Int).DivMod(l, r, new(big.Int))
+ if m.Sign() == 0 {
+ return d
}
return bigToFloat(l) / bigToFloat(r)
},
- func(l, r string) interface{} {
+ func(l, r string) any {
if l == "" {
- return []interface{}{}
+ return []any{}
}
xs := strings.Split(l, r)
- vs := make([]interface{}, len(xs))
+ vs := make([]any, len(xs))
for i, x := range xs {
vs[i] = x
}
return vs
},
- func(l, r []interface{}) interface{} { return &binopTypeError{"divide", l, r} },
- func(l, r map[string]interface{}) interface{} { return &binopTypeError{"divide", l, r} },
- func(l, r interface{}) interface{} { return &binopTypeError{"divide", l, r} },
+ func(l, r []any) any { return &binopTypeError{"divide", l, r} },
+ func(l, r map[string]any) any { return &binopTypeError{"divide", l, r} },
+ func(l, r any) any { return &binopTypeError{"divide", l, r} },
)
}
-func funcOpMod(_, l, r interface{}) interface{} {
+func funcOpMod(_, l, r any) any {
return binopTypeSwitch(l, r,
- func(l, r int) interface{} {
+ func(l, r int) any {
if r == 0 {
return &zeroModuloError{l, r}
}
return l % r
},
- func(l, r float64) interface{} {
- if int(r) == 0 {
+ func(l, r float64) any {
+ ri := floatToInt(r)
+ if ri == 0 {
return &zeroModuloError{l, r}
}
- return int(l) % int(r)
+ if math.IsNaN(l) || math.IsNaN(r) {
+ return math.NaN()
+ }
+ return floatToInt(l) % ri
},
- func(l, r *big.Int) interface{} {
+ func(l, r *big.Int) any {
if r.Sign() == 0 {
return &zeroModuloError{l, r}
}
- return new(big.Int).Mod(l, r)
+ return new(big.Int).Rem(l, r)
},
- func(l, r string) interface{} { return &binopTypeError{"modulo", l, r} },
- func(l, r []interface{}) interface{} { return &binopTypeError{"modulo", l, r} },
- func(l, r map[string]interface{}) interface{} { return &binopTypeError{"modulo", l, r} },
- func(l, r interface{}) interface{} { return &binopTypeError{"modulo", l, r} },
+ func(l, r string) any { return &binopTypeError{"modulo", l, r} },
+ func(l, r []any) any { return &binopTypeError{"modulo", l, r} },
+ func(l, r map[string]any) any { return &binopTypeError{"modulo", l, r} },
+ func(l, r any) any { return &binopTypeError{"modulo", l, r} },
)
}
-func funcOpAlt(_, l, r interface{}) interface{} {
+func funcOpAlt(_, l, r any) any {
if l == nil || l == false {
return r
}
return l
}
-func funcOpEq(_, l, r interface{}) interface{} {
- return compare(l, r) == 0
+func funcOpEq(_, l, r any) any {
+ return Compare(l, r) == 0
}
-func funcOpNe(_, l, r interface{}) interface{} {
- return compare(l, r) != 0
+func funcOpNe(_, l, r any) any {
+ return Compare(l, r) != 0
}
-func funcOpGt(_, l, r interface{}) interface{} {
- return compare(l, r) > 0
+func funcOpGt(_, l, r any) any {
+ return Compare(l, r) > 0
}
-func funcOpLt(_, l, r interface{}) interface{} {
- return compare(l, r) < 0
+func funcOpLt(_, l, r any) any {
+ return Compare(l, r) < 0
}
-func funcOpGe(_, l, r interface{}) interface{} {
- return compare(l, r) >= 0
+func funcOpGe(_, l, r any) any {
+ return Compare(l, r) >= 0
}
-func funcOpLe(_, l, r interface{}) interface{} {
- return compare(l, r) <= 0
+func funcOpLe(_, l, r any) any {
+ return Compare(l, r) <= 0
}
diff --git a/vendor/github.com/itchyny/gojq/option.go b/vendor/github.com/itchyny/gojq/option.go
index 126ecd140..f1a110fae 100644
--- a/vendor/github.com/itchyny/gojq/option.go
+++ b/vendor/github.com/itchyny/gojq/option.go
@@ -6,7 +6,7 @@ import "fmt"
type CompilerOption func(*compiler)
// WithModuleLoader is a compiler option for module loader.
-// If you want to load modules from the filesystem, use NewModuleLoader.
+// If you want to load modules from the filesystem, use [NewModuleLoader].
func WithModuleLoader(moduleLoader ModuleLoader) CompilerOption {
return func(c *compiler) {
c.moduleLoader = moduleLoader
@@ -15,7 +15,7 @@ func WithModuleLoader(moduleLoader ModuleLoader) CompilerOption {
// WithEnvironLoader is a compiler option for environment variables loader.
// The OS environment variables are not accessible by default due to security
-// reason. You can pass os.Environ if you allow to access it.
+// reasons. You can specify [os.Environ] as argument if you allow to access.
func WithEnvironLoader(environLoader func() []string) CompilerOption {
return func(c *compiler) {
c.environLoader = environLoader
@@ -23,8 +23,7 @@ func WithEnvironLoader(environLoader func() []string) CompilerOption {
}
// WithVariables is a compiler option for variable names. The variables can be
-// used in the query. You have to give the values to query.Run or code.Run in
-// the same order.
+// used in the query. You have to give the values to [*Code.Run] in the same order.
func WithVariables(variables []string) CompilerOption {
return func(c *compiler) {
c.variables = variables
@@ -35,12 +34,29 @@ func WithVariables(variables []string) CompilerOption {
// Specify the minimum and maximum count of the function arguments. These
// values should satisfy 0 <= minarity <= maxarity <= 30, otherwise panics.
// On handling numbers, you should take account to int, float64 and *big.Int.
-// Refer to ValueError to return a value error just like built-in error function.
-// If you want to emit multiple values, call the empty function, accept a filter
-// for its argument, or call another built-in function, then prepend the jq
-// function to each query on parsing.
-func WithFunction(name string, minarity, maxarity int,
- f func(interface{}, []interface{}) interface{}) CompilerOption {
+// These are the number types you are allowed to return, so do not return int64.
+// Refer to [ValueError] to return a value error just like built-in error
+// function. If you want to emit multiple values, call the empty function,
+// accept a filter for its argument, or call another built-in function, then
+// use LoadInitModules of the module loader.
+func WithFunction(name string, minarity, maxarity int, f func(any, []any) any) CompilerOption {
+ return withFunction(name, minarity, maxarity, false, f)
+}
+
+// WithIterFunction is a compiler option for adding a custom iterator function.
+// This is like the [WithFunction] option, but you can add a function which
+// returns an Iter to emit multiple values. You cannot define both iterator and
+// non-iterator functions of the same name (with possibly different arities).
+// See also [NewIter], which can be used to convert values or an error to an Iter.
+func WithIterFunction(name string, minarity, maxarity int, f func(any, []any) Iter) CompilerOption {
+ return withFunction(name, minarity, maxarity, true,
+ func(v any, args []any) any {
+ return f(v, args)
+ },
+ )
+}
+
+func withFunction(name string, minarity, maxarity int, iter bool, f func(any, []any) any) CompilerOption {
if !(0 <= minarity && minarity <= maxarity && maxarity <= 30) {
panic(fmt.Sprintf("invalid arity for %q: %d, %d", name, minarity, maxarity))
}
@@ -50,9 +66,12 @@ func WithFunction(name string, minarity, maxarity int,
c.customFuncs = make(map[string]function)
}
if fn, ok := c.customFuncs[name]; ok {
+ if fn.iter != iter {
+ panic(fmt.Sprintf("cannot define both iterator and non-iterator functions for %q", name))
+ }
c.customFuncs[name] = function{
- argcount | fn.argcount,
- func(x interface{}, xs []interface{}) interface{} {
+ argcount | fn.argcount, iter,
+ func(x any, xs []any) any {
if argcount&(1< 0 {
- return nil, l.err
+func reverseFuncDef(xs []*FuncDef) []*FuncDef {
+ for i, j := 0, len(xs)-1; i < j; i, j = i+1, j-1 {
+ xs[i], xs[j] = xs[j], xs[i]
}
- return l.result, nil
+ return xs
}
-//line parser.go.y:14
+func prependFuncDef(xs []*FuncDef, x *FuncDef) []*FuncDef {
+ xs = append(xs, nil)
+ copy(xs[1:], xs)
+ xs[0] = x
+ return xs
+}
+
+//line parser.go.y:19
type yySymType struct {
yys int
- value interface{}
+ value any
token string
operator Operator
}
@@ -27,9 +32,9 @@ type yySymType struct {
const tokAltOp = 57346
const tokUpdateOp = 57347
const tokDestAltOp = 57348
-const tokOrOp = 57349
-const tokAndOp = 57350
-const tokCompareOp = 57351
+const tokCompareOp = 57349
+const tokOrOp = 57350
+const tokAndOp = 57351
const tokModule = 57352
const tokImport = 57353
const tokInclude = 57354
@@ -40,31 +45,34 @@ const tokBreak = 57358
const tokNull = 57359
const tokTrue = 57360
const tokFalse = 57361
-const tokIdent = 57362
-const tokVariable = 57363
-const tokModuleIdent = 57364
-const tokModuleVariable = 57365
-const tokIndex = 57366
-const tokNumber = 57367
-const tokFormat = 57368
-const tokInvalid = 57369
-const tokString = 57370
-const tokStringStart = 57371
-const tokStringQuery = 57372
-const tokStringEnd = 57373
-const tokIf = 57374
-const tokThen = 57375
-const tokElif = 57376
-const tokElse = 57377
-const tokEnd = 57378
-const tokTry = 57379
-const tokCatch = 57380
-const tokReduce = 57381
-const tokForeach = 57382
-const tokRecurse = 57383
-const tokFuncDefPost = 57384
-const tokTermPost = 57385
-const tokEmptyCatch = 57386
+const tokIf = 57362
+const tokThen = 57363
+const tokElif = 57364
+const tokElse = 57365
+const tokEnd = 57366
+const tokTry = 57367
+const tokCatch = 57368
+const tokReduce = 57369
+const tokForeach = 57370
+const tokIdent = 57371
+const tokVariable = 57372
+const tokModuleIdent = 57373
+const tokModuleVariable = 57374
+const tokRecurse = 57375
+const tokIndex = 57376
+const tokNumber = 57377
+const tokFormat = 57378
+const tokString = 57379
+const tokStringStart = 57380
+const tokStringQuery = 57381
+const tokStringEnd = 57382
+const tokInvalid = 57383
+const tokInvalidEscapeSequence = 57384
+const tokUnterminatedString = 57385
+const tokFuncDefQuery = 57386
+const tokExpr = 57387
+const tokTerm = 57388
+const tokEmptyCatch = 57389
var yyToknames = [...]string{
"$end",
@@ -73,9 +81,9 @@ var yyToknames = [...]string{
"tokAltOp",
"tokUpdateOp",
"tokDestAltOp",
+ "tokCompareOp",
"tokOrOp",
"tokAndOp",
- "tokCompareOp",
"tokModule",
"tokImport",
"tokInclude",
@@ -86,31 +94,33 @@ var yyToknames = [...]string{
"tokNull",
"tokTrue",
"tokFalse",
+ "tokIf",
+ "tokThen",
+ "tokElif",
+ "tokElse",
+ "tokEnd",
+ "tokTry",
+ "tokCatch",
+ "tokReduce",
+ "tokForeach",
"tokIdent",
"tokVariable",
"tokModuleIdent",
"tokModuleVariable",
+ "tokRecurse",
"tokIndex",
"tokNumber",
"tokFormat",
- "tokInvalid",
"tokString",
"tokStringStart",
"tokStringQuery",
"tokStringEnd",
- "tokIf",
- "tokThen",
- "tokElif",
- "tokElse",
- "tokEnd",
- "tokTry",
- "tokCatch",
- "tokReduce",
- "tokForeach",
- "tokRecurse",
- "tokFuncDefPost",
- "tokTermPost",
- "tokEmptyCatch",
+ "tokInvalid",
+ "tokInvalidEscapeSequence",
+ "tokUnterminatedString",
+ "tokFuncDefQuery",
+ "tokExpr",
+ "tokTerm",
"'|'",
"','",
"'+'",
@@ -120,6 +130,7 @@ var yyToknames = [...]string{
"'%'",
"'.'",
"'?'",
+ "tokEmptyCatch",
"'['",
"';'",
"':'",
@@ -136,300 +147,276 @@ const yyEofCode = 1
const yyErrCode = 2
const yyInitialStackSize = 16
-//line parser.go.y:665
+//line parser.go.y:671
//line yacctab:1
-var yyExca = [...]int{
+var yyExca = [...]int16{
-1, 1,
1, -1,
-2, 0,
- -1, 132,
+ -1, 145,
5, 0,
- -2, 32,
- -1, 135,
- 9, 0,
- -2, 35,
- -1, 196,
- 56, 113,
- -2, 54,
+ -2, 27,
+ -1, 148,
+ 7, 0,
+ -2, 30,
+ -1, 199,
+ 59, 114,
+ -2, 49,
}
const yyPrivate = 57344
-const yyLast = 1053
-
-var yyAct = [...]int{
- 88, 234, 176, 111, 14, 171, 12, 122, 211, 31,
- 177, 192, 109, 116, 9, 142, 48, 225, 97, 99,
- 95, 96, 91, 143, 51, 162, 124, 10, 245, 223,
- 233, 103, 224, 106, 157, 158, 232, 117, 107, 108,
- 105, 244, 222, 238, 102, 112, 237, 77, 78, 104,
- 79, 80, 81, 262, 257, 209, 144, 123, 208, 241,
- 227, 226, 145, 165, 164, 128, 77, 78, 127, 79,
- 80, 81, 84, 85, 86, 129, 75, 130, 131, 132,
- 133, 134, 135, 136, 137, 138, 139, 140, 74, 76,
- 82, 83, 84, 85, 86, 149, 75, 90, 126, 198,
- 243, 174, 197, 147, 47, 160, 75, 74, 76, 82,
- 83, 84, 85, 86, 166, 75, 146, 276, 240, 156,
- 275, 42, 43, 152, 163, 93, 92, 94, 125, 255,
- 185, 186, 265, 90, 46, 181, 182, 183, 45, 51,
- 119, 188, 175, 179, 42, 43, 180, 94, 100, 193,
- 169, 199, 170, 168, 202, 194, 203, 204, 205, 101,
- 190, 93, 92, 94, 207, 172, 173, 200, 201, 212,
- 212, 215, 184, 112, 213, 98, 210, 217, 10, 216,
- 187, 13, 123, 181, 182, 183, 121, 219, 220, 13,
- 3, 179, 11, 5, 180, 90, 228, 7, 8, 230,
- 44, 28, 27, 218, 221, 178, 87, 49, 77, 78,
- 239, 79, 80, 81, 110, 82, 83, 84, 85, 86,
- 184, 75, 151, 93, 92, 94, 10, 154, 254, 193,
- 80, 81, 90, 252, 253, 194, 256, 247, 159, 246,
- 120, 191, 248, 249, 258, 212, 212, 189, 261, 259,
- 260, 82, 83, 84, 85, 86, 266, 75, 268, 141,
- 93, 92, 94, 206, 270, 271, 6, 4, 272, 82,
- 83, 84, 85, 86, 277, 75, 2, 278, 269, 20,
- 1, 19, 37, 24, 25, 26, 38, 40, 39, 41,
- 23, 29, 30, 0, 42, 43, 0, 0, 17, 0,
- 0, 0, 0, 18, 0, 15, 16, 22, 0, 89,
- 0, 0, 0, 34, 33, 0, 0, 0, 21, 90,
- 36, 0, 150, 32, 0, 148, 35, 20, 0, 19,
- 37, 24, 25, 26, 38, 40, 39, 41, 23, 29,
- 30, 0, 42, 43, 0, 0, 17, 93, 92, 94,
- 0, 18, 0, 15, 16, 22, 0, 0, 0, 0,
- 0, 34, 33, 0, 0, 0, 21, 0, 36, 0,
- 0, 32, 0, 229, 35, 20, 0, 19, 37, 24,
- 25, 26, 38, 40, 39, 41, 23, 29, 30, 0,
- 42, 43, 0, 0, 17, 0, 0, 0, 0, 18,
- 0, 15, 16, 22, 0, 0, 0, 0, 0, 34,
- 33, 0, 0, 0, 21, 0, 36, 0, 0, 32,
- 0, 118, 35, 20, 0, 19, 37, 24, 25, 26,
- 38, 40, 39, 41, 23, 29, 30, 0, 42, 43,
- 0, 0, 17, 0, 0, 0, 0, 18, 0, 15,
- 16, 22, 0, 0, 0, 0, 0, 34, 33, 0,
- 0, 0, 21, 0, 36, 0, 0, 32, 53, 54,
- 35, 55, 56, 57, 58, 59, 60, 61, 62, 63,
- 64, 114, 196, 0, 0, 0, 0, 0, 0, 42,
- 43, 0, 0, 65, 66, 67, 68, 69, 70, 71,
- 72, 73, 53, 54, 0, 55, 56, 57, 58, 59,
- 60, 61, 62, 63, 64, 114, 115, 0, 195, 0,
- 0, 0, 0, 42, 43, 0, 0, 65, 66, 67,
- 68, 69, 70, 71, 72, 73, 37, 24, 25, 26,
- 38, 40, 39, 41, 23, 29, 30, 0, 42, 43,
- 77, 78, 113, 79, 80, 81, 0, 0, 0, 0,
- 0, 22, 0, 0, 0, 0, 0, 34, 33, 79,
- 80, 81, 21, 0, 36, 0, 0, 32, 77, 78,
- 35, 79, 80, 81, 0, 0, 0, 0, 0, 0,
- 0, 74, 76, 82, 83, 84, 85, 86, 0, 75,
- 0, 0, 0, 77, 78, 251, 79, 80, 81, 82,
- 83, 84, 85, 86, 0, 75, 0, 0, 0, 74,
- 76, 82, 83, 84, 85, 86, 81, 75, 0, 0,
- 0, 77, 78, 231, 79, 80, 81, 0, 0, 0,
- 0, 0, 0, 0, 74, 76, 82, 83, 84, 85,
- 86, 0, 75, 0, 0, 0, 77, 78, 167, 79,
- 80, 81, 0, 0, 82, 83, 84, 85, 86, 0,
- 75, 0, 74, 76, 82, 83, 84, 85, 86, 0,
- 75, 0, 0, 77, 78, 279, 79, 80, 81, 0,
- 0, 0, 0, 0, 0, 0, 0, 74, 76, 82,
- 83, 84, 85, 86, 0, 75, 0, 0, 77, 78,
- 274, 79, 80, 81, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 74, 76, 82, 83, 84, 85,
- 86, 0, 75, 0, 0, 77, 78, 250, 79, 80,
- 81, 0, 0, 0, 0, 0, 0, 0, 0, 74,
- 76, 82, 83, 84, 85, 86, 0, 75, 0, 0,
- 77, 78, 242, 79, 80, 81, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 74, 76, 82, 83,
- 84, 85, 86, 0, 75, 0, 0, 77, 78, 214,
- 79, 80, 81, 0, 0, 0, 0, 0, 0, 0,
- 0, 74, 76, 82, 83, 84, 85, 86, 0, 75,
- 0, 0, 77, 78, 161, 79, 80, 81, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 74, 76,
- 82, 83, 84, 85, 86, 0, 75, 0, 273, 77,
- 78, 0, 79, 80, 81, 0, 0, 0, 0, 0,
- 0, 0, 0, 74, 76, 82, 83, 84, 85, 86,
- 0, 75, 0, 264, 77, 78, 0, 79, 80, 81,
+const yyLast = 782
+
+var yyAct = [...]int16{
+ 78, 134, 186, 102, 103, 10, 175, 195, 32, 211,
+ 48, 108, 81, 176, 131, 6, 229, 5, 50, 73,
+ 74, 159, 14, 180, 181, 182, 124, 98, 110, 135,
+ 280, 97, 228, 279, 115, 104, 16, 158, 265, 121,
+ 114, 178, 123, 179, 244, 73, 74, 180, 181, 182,
+ 73, 74, 112, 113, 154, 155, 136, 117, 117, 117,
+ 254, 243, 137, 183, 282, 178, 255, 179, 220, 6,
+ 247, 116, 118, 119, 128, 129, 73, 74, 99, 73,
+ 74, 227, 73, 74, 246, 141, 238, 183, 201, 237,
+ 132, 200, 139, 6, 235, 226, 138, 163, 208, 80,
+ 157, 207, 241, 231, 230, 161, 162, 73, 74, 117,
+ 117, 117, 117, 117, 117, 117, 117, 117, 117, 83,
+ 82, 278, 84, 144, 145, 146, 147, 148, 149, 150,
+ 151, 152, 153, 184, 185, 174, 50, 160, 193, 73,
+ 74, 127, 196, 202, 203, 126, 197, 73, 74, 125,
+ 73, 74, 248, 253, 189, 204, 45, 240, 206, 73,
+ 74, 245, 143, 210, 214, 215, 73, 74, 104, 217,
+ 218, 213, 79, 219, 86, 87, 76, 90, 88, 89,
+ 169, 43, 44, 117, 117, 73, 74, 75, 166, 117,
+ 222, 224, 80, 225, 73, 74, 273, 212, 212, 232,
+ 132, 223, 234, 216, 120, 271, 73, 74, 191, 239,
+ 43, 44, 83, 82, 85, 84, 274, 270, 96, 91,
+ 92, 93, 94, 95, 73, 74, 93, 94, 95, 249,
+ 84, 164, 251, 252, 196, 236, 267, 250, 197, 130,
+ 25, 256, 73, 74, 262, 263, 187, 188, 3, 190,
+ 257, 258, 260, 261, 264, 24, 266, 73, 74, 9,
+ 221, 268, 269, 117, 117, 111, 171, 272, 172, 170,
+ 13, 275, 276, 77, 90, 277, 89, 212, 212, 13,
+ 177, 281, 52, 53, 54, 55, 56, 57, 58, 59,
+ 60, 61, 62, 63, 64, 65, 66, 67, 68, 69,
+ 70, 71, 72, 106, 107, 91, 92, 93, 94, 95,
+ 47, 43, 44, 101, 165, 259, 91, 92, 93, 94,
+ 95, 242, 156, 122, 194, 17, 192, 15, 37, 21,
+ 22, 23, 33, 133, 105, 205, 7, 34, 209, 35,
+ 36, 39, 41, 40, 42, 19, 20, 28, 31, 43,
+ 44, 8, 4, 2, 86, 87, 1, 90, 88, 89,
+ 0, 29, 30, 0, 168, 90, 18, 0, 0, 27,
+ 0, 142, 38, 0, 140, 26, 52, 53, 54, 55,
+ 56, 57, 58, 59, 60, 61, 62, 63, 64, 65,
+ 66, 67, 68, 69, 70, 71, 72, 106, 107, 91,
+ 92, 93, 94, 95, 0, 43, 44, 91, 92, 93,
+ 94, 95, 0, 0, 0, 0, 0, 11, 12, 17,
+ 0, 15, 37, 21, 22, 23, 33, 0, 105, 0,
+ 0, 34, 100, 35, 36, 39, 41, 40, 42, 19,
+ 20, 28, 31, 43, 44, 0, 0, 0, 0, 86,
+ 87, 0, 90, 88, 89, 29, 30, 0, 0, 167,
+ 18, 0, 0, 27, 0, 0, 38, 0, 17, 26,
+ 15, 37, 21, 22, 23, 33, 0, 0, 0, 0,
+ 34, 0, 35, 36, 39, 41, 40, 42, 19, 20,
+ 28, 31, 43, 44, 91, 92, 93, 94, 95, 0,
+ 0, 0, 0, 0, 29, 30, 90, 88, 89, 18,
+ 0, 0, 27, 0, 0, 38, 0, 233, 26, 17,
+ 0, 15, 37, 21, 22, 23, 33, 0, 0, 0,
+ 0, 34, 0, 35, 36, 39, 41, 40, 42, 19,
+ 20, 28, 31, 43, 44, 0, 0, 0, 91, 92,
+ 93, 94, 95, 0, 0, 29, 30, 0, 0, 0,
+ 18, 0, 0, 27, 0, 0, 38, 0, 109, 26,
+ 17, 0, 15, 37, 21, 22, 23, 33, 0, 0,
+ 0, 0, 34, 0, 35, 36, 39, 41, 40, 42,
+ 19, 20, 28, 31, 43, 44, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 29, 30, 0, 0,
+ 0, 18, 0, 0, 27, 0, 0, 38, 0, 0,
+ 26, 52, 53, 54, 55, 56, 57, 58, 59, 60,
+ 61, 62, 63, 64, 65, 66, 67, 68, 69, 70,
+ 71, 72, 49, 0, 0, 0, 0, 0, 0, 0,
+ 51, 52, 53, 54, 55, 56, 57, 58, 59, 60,
+ 61, 62, 63, 64, 65, 66, 67, 68, 69, 70,
+ 71, 72, 49, 0, 0, 0, 0, 173, 0, 0,
+ 51, 37, 21, 22, 23, 33, 0, 0, 0, 0,
+ 34, 0, 35, 36, 39, 41, 40, 42, 19, 20,
+ 28, 31, 43, 44, 0, 0, 0, 46, 0, 0,
+ 0, 0, 0, 0, 29, 30, 0, 0, 0, 18,
+ 0, 0, 27, 0, 0, 38, 0, 0, 26, 52,
+ 53, 54, 55, 56, 57, 58, 59, 60, 61, 62,
+ 63, 64, 65, 66, 67, 68, 69, 70, 71, 72,
+ 106, 199, 0, 0, 0, 0, 0, 0, 43, 44,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 74, 76, 82, 83, 84, 85, 86, 0, 75, 0,
- 263, 77, 78, 0, 79, 80, 81, 0, 0, 0,
- 0, 0, 0, 0, 0, 74, 76, 82, 83, 84,
- 85, 86, 0, 75, 0, 236, 0, 0, 0, 77,
- 78, 235, 79, 80, 81, 0, 0, 0, 0, 0,
- 0, 0, 74, 76, 82, 83, 84, 85, 86, 0,
- 75, 77, 78, 0, 79, 80, 81, 0, 267, 0,
- 77, 78, 0, 79, 80, 81, 0, 0, 0, 0,
- 74, 76, 82, 83, 84, 85, 86, 0, 75, 77,
- 78, 0, 79, 80, 81, 155, 0, 0, 0, 153,
- 0, 0, 74, 76, 82, 83, 84, 85, 86, 0,
- 75, 74, 76, 82, 83, 84, 85, 86, 0, 75,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 74, 76, 82, 83, 84, 85, 86, 0, 75, 53,
- 54, 0, 55, 56, 57, 58, 59, 60, 61, 62,
- 63, 64, 50, 0, 0, 0, 0, 0, 0, 0,
- 52, 0, 0, 0, 65, 66, 67, 68, 69, 70,
- 71, 72, 73,
+ 0, 198,
}
-var yyPact = [...]int{
- 180, -1000, 186, -33, -1000, 410, 186, 110, 106, 49,
- 1012, -1000, 965, 410, 295, 520, 520, 410, 410, 127,
- 139, 93, -1000, -1000, -1000, -1000, -1000, -8, -1000, -1000,
- 116, -1000, 410, 520, 520, 495, 362, 119, -1000, -1000,
- -1000, -1000, -1000, -1000, -1000, 172, -33, -1000, -35, 82,
- 42, 12, 9, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+var yyPact = [...]int16{
+ 238, -1000, -1000, -48, 406, 98, 643, -1000, -1000, -1000,
+ 112, 150, 139, 557, 158, 184, 170, 189, 173, -1000,
+ -1000, -1000, -1000, -1000, 18, -1000, 368, 506, -1000, 665,
+ 665, 144, -1000, 557, 665, 665, 665, 174, 557, -1000,
+ -1000, -1000, -1000, -1000, -1000, -1000, -1000, -22, -1000, 90,
+ 86, 82, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
-1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
- -1000, -1000, -1000, -1000, 410, -1000, 410, 410, 410, 410,
- 410, 410, 410, 410, 410, 410, 410, -1000, 965, 2,
- -1000, -1000, -1000, 93, 266, 208, 109, 946, 410, 937,
- 74, -22, -1000, -1000, 410, -1000, 756, 171, 171, -36,
- 78, 8, 7, 410, -1000, -1000, -1000, 599, -1000, -1000,
- 122, 145, 46, -1000, -1000, 1012, 166, 166, 166, 965,
- 204, 204, 562, 222, 617, 168, 23, 23, 53, 53,
- 53, 135, -1000, -1000, 2, 461, -1000, -1000, -1000, 43,
- 410, 2, 2, 410, -1000, 410, 410, 410, 145, 0,
- 965, -1000, -1000, 495, 520, 520, 731, -1000, -1000, -1000,
- 410, -33, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
- -1000, -1000, -1000, -1000, 118, -1000, -1000, 410, 2, -17,
- -1000, -29, -1000, 5, 4, 410, -1000, -1000, 314, 574,
- -21, -27, 887, -1000, 965, 860, -12, -1000, -1000, 410,
- -1000, -1000, 73, -1000, 3, 704, 45, -1000, -18, -1000,
- 965, -1000, -1000, 2, -1000, 461, 2, 2, 679, -1000,
- 546, -1000, 410, 410, 94, 410, -1000, -2, 145, 965,
- 520, 520, -1000, -1000, -1000, 166, -1000, -1000, -1000, -1000,
- -3, -1000, 835, 808, 96, 410, 915, 410, -1000, -1000,
- -1000, -1000, 2, 410, 410, -1000, 965, 410, 783, -1000,
- 652, 62, 887, -1000, -1000, -1000, 410, -1000, 627, -1000,
+ -1000, -1000, -1000, 557, 557, 225, -48, -1000, 112, -1,
+ -1000, -1000, -1000, 173, 312, 115, 665, 665, 665, 665,
+ 665, 665, 665, 665, 665, 665, -5, -1000, -1000, 557,
+ -1000, -27, -1000, 78, 46, 557, -1000, -1000, -1000, -1000,
+ 35, 557, 65, 65, -1000, 210, 162, 65, 445, 350,
+ -1000, 119, 229, -1000, 613, 30, 30, 30, 112, -1000,
+ 217, 96, -1000, 202, -1000, -1000, -1, 721, -1000, -1000,
+ -1000, 29, 557, 557, 170, 499, 267, 358, 256, 175,
+ 175, -1000, -1000, -1000, 557, 217, 40, 112, -1000, 274,
+ 665, 665, 103, -1000, 557, -1000, 665, -1, -1, -1000,
+ -1000, -1000, 557, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+ -1000, -1000, -1000, 6, -1000, -1000, -48, -1000, -1000, -1000,
+ 557, -1, 33, -1000, -32, -1000, 45, 44, 557, -1000,
+ -1000, 455, 32, 112, 177, 28, -1000, -1000, 557, -1000,
+ -1000, 110, 170, 110, 43, 112, -1000, 1, -16, 100,
+ -1000, 22, -1000, 94, 112, -1000, -1000, -1, -1000, 721,
+ -1, -1, 92, -1000, -2, -1000, -1000, 7, 217, 112,
+ 665, 665, 230, 557, 557, -1000, -1000, 30, -1000, -1000,
+ -1000, -1000, -1000, -21, -1000, 557, -1000, 110, 110, 212,
+ 557, 557, 159, 147, -1000, -1, 138, -1000, 195, 112,
+ 557, 557, -1000, -1000, 557, 60, -28, 112, -1000, -1000,
+ 557, 3, -1000,
}
-var yyPgo = [...]int{
- 0, 280, 276, 267, 193, 266, 7, 192, 175, 263,
- 0, 259, 15, 247, 241, 11, 4, 9, 240, 22,
- 238, 1, 228, 227, 12, 214, 8, 2, 10, 16,
- 207, 205, 203, 5, 202, 201, 13, 3,
+var yyPgo = [...]int16{
+ 0, 356, 353, 352, 351, 14, 336, 259, 265, 335,
+ 0, 333, 1, 326, 324, 7, 36, 22, 8, 323,
+ 12, 322, 321, 315, 314, 313, 3, 9, 6, 13,
+ 310, 10, 280, 260, 2, 255, 240, 11, 4,
}
-var yyR1 = [...]int{
+var yyR1 = [...]int8{
0, 1, 2, 2, 3, 3, 4, 4, 5, 5,
- 6, 6, 7, 7, 8, 8, 9, 9, 33, 33,
- 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
- 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
- 10, 10, 11, 11, 12, 12, 12, 13, 13, 14,
- 14, 15, 15, 15, 15, 16, 16, 16, 16, 16,
- 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
- 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
- 16, 16, 17, 17, 18, 18, 18, 34, 34, 35,
- 35, 19, 19, 19, 19, 19, 20, 20, 21, 21,
- 22, 22, 23, 23, 24, 24, 24, 25, 25, 25,
- 25, 25, 37, 37, 37, 26, 26, 27, 27, 27,
- 27, 27, 27, 27, 28, 29, 29, 29, 30, 30,
- 30, 31, 31, 32, 32, 36, 36, 36, 36, 36,
- 36, 36, 36, 36, 36, 36, 36, 36, 36, 36,
- 36, 36, 36, 36, 36, 36,
+ 6, 6, 7, 7, 8, 8, 9, 9, 34, 34,
+ 10, 10, 10, 10, 10, 10, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 11, 11, 12,
+ 12, 12, 13, 13, 14, 14, 15, 15, 15, 15,
+ 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
+ 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
+ 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
+ 17, 17, 17, 17, 18, 18, 19, 19, 19, 35,
+ 35, 36, 36, 20, 20, 20, 20, 20, 21, 21,
+ 22, 22, 23, 23, 24, 24, 25, 25, 26, 26,
+ 26, 26, 26, 38, 38, 38, 27, 27, 28, 28,
+ 28, 28, 28, 28, 28, 29, 29, 29, 30, 30,
+ 31, 31, 31, 32, 32, 33, 33, 37, 37, 37,
+ 37, 37, 37, 37, 37, 37, 37, 37, 37, 37,
+ 37, 37, 37, 37, 37, 37, 37, 37,
}
-var yyR2 = [...]int{
- 0, 2, 0, 3, 2, 2, 0, 2, 6, 4,
- 0, 1, 0, 2, 5, 8, 1, 3, 1, 1,
- 2, 3, 5, 9, 9, 11, 7, 3, 4, 2,
- 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
- 3, 1, 1, 3, 1, 3, 3, 1, 3, 1,
- 3, 3, 3, 5, 1, 1, 1, 1, 2, 2,
- 1, 1, 1, 1, 4, 1, 1, 1, 2, 1,
- 3, 2, 2, 3, 3, 2, 2, 2, 2, 2,
- 3, 3, 1, 3, 0, 2, 4, 1, 1, 1,
- 1, 2, 3, 4, 4, 5, 1, 3, 0, 5,
- 0, 2, 0, 2, 0, 1, 3, 3, 3, 5,
- 1, 1, 1, 1, 1, 1, 3, 1, 1, 1,
- 1, 1, 1, 1, 3, 0, 1, 3, 3, 3,
- 3, 2, 3, 1, 3, 1, 1, 1, 1, 1,
+var yyR2 = [...]int8{
+ 0, 3, 0, 3, 0, 2, 6, 4, 0, 1,
+ 1, 1, 0, 2, 5, 8, 1, 3, 1, 1,
+ 2, 3, 5, 4, 3, 1, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 1, 1, 3, 1,
+ 3, 3, 1, 3, 1, 3, 3, 3, 5, 1,
+ 1, 1, 1, 2, 2, 1, 1, 1, 1, 4,
+ 1, 2, 3, 4, 2, 3, 1, 2, 2, 1,
+ 2, 1, 7, 3, 9, 9, 11, 2, 3, 2,
+ 2, 2, 3, 3, 1, 3, 0, 2, 4, 1,
+ 1, 1, 1, 2, 3, 4, 4, 5, 1, 3,
+ 0, 5, 0, 2, 0, 2, 1, 3, 3, 3,
+ 5, 1, 1, 1, 1, 1, 3, 1, 1, 1,
+ 1, 1, 1, 1, 1, 2, 3, 4, 1, 3,
+ 3, 3, 3, 2, 3, 1, 3, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1,
}
-var yyChk = [...]int{
- -1000, -1, -2, 10, -3, -4, -5, 11, 12, -28,
- 60, -7, -10, -8, -16, 39, 40, 32, 37, 15,
- 13, 52, 41, 24, 17, 18, 19, -34, -35, 25,
- 26, -17, 57, 48, 47, 60, 54, 16, 20, 22,
- 21, 23, 28, 29, -4, 28, 28, 55, -29, -30,
- 20, -36, 28, 7, 8, 10, 11, 12, 13, 14,
- 15, 16, 17, 18, 19, 32, 33, 34, 35, 36,
- 37, 38, 39, 40, 45, 53, 46, 4, 5, 7,
- 8, 9, 47, 48, 49, 50, 51, -7, -10, 14,
- 24, -19, 53, 52, 54, -16, -16, -10, -8, -10,
- 21, 20, -19, -17, 57, -17, -10, -16, -16, -24,
- -25, -37, -17, 57, 20, 21, -36, -10, 59, 21,
- -18, 14, -6, -28, 61, 46, 56, 56, 56, -10,
- -10, -10, -10, -10, -10, -10, -10, -10, -10, -10,
- -10, -11, -12, 21, 54, 60, -19, -17, 59, -10,
- 56, 14, 14, 33, -23, 38, 45, 56, 57, -20,
- -10, 58, 61, 46, 56, 56, -10, 59, 31, 28,
- 30, -33, 20, 21, 55, -29, -27, -28, -31, 25,
- 28, 17, 18, 19, 54, -27, -27, 45, 6, -13,
- -12, -14, -15, -37, -17, 57, 21, 59, 56, -10,
- -12, -12, -10, -10, -10, -10, -9, -33, 58, 55,
- -24, -26, -16, -26, 58, -10, -6, 59, -32, -27,
- -10, -12, 59, 46, 61, 46, 56, 56, -10, 59,
- -10, 59, 57, 57, -21, 34, 55, 58, 55, -10,
- 45, 56, 58, 55, 59, 46, -12, -15, -12, -12,
- 58, 59, -10, -10, -22, 35, -10, 56, -33, -26,
- -26, -27, 56, 55, 55, 36, -10, 33, -10, -12,
- -10, -10, -10, 55, 58, 58, 55, -21, -10, 58,
+var yyChk = [...]int16{
+ -1000, -1, -2, 10, -3, -29, 63, -6, -4, -7,
+ -10, 11, 12, -8, -17, 15, -16, 13, 54, 33,
+ 34, 17, 18, 19, -35, -36, 63, 57, 35, 49,
+ 50, 36, -18, 20, 25, 27, 28, 16, 60, 29,
+ 31, 30, 32, 37, 38, 58, 64, -30, -31, 29,
+ -37, 37, 8, 9, 10, 11, 12, 13, 14, 15,
+ 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
+ 26, 27, 28, 47, 48, 37, 37, -7, -10, 14,
+ 34, -20, 55, 54, 57, 30, 4, 5, 8, 9,
+ 7, 49, 50, 51, 52, 53, 29, -20, -18, 60,
+ 64, -25, -26, -38, -18, 60, 29, 30, -37, 62,
+ -10, -8, -17, -17, -18, -10, -16, -17, -16, -16,
+ 30, -10, -19, 64, 48, 59, 59, 59, -10, -10,
+ 14, -5, -29, -11, -12, 30, 57, 63, -20, -18,
+ 62, -10, 59, 47, -16, -16, -16, -16, -16, -16,
+ -16, -16, -16, -16, 59, 60, -21, -10, 64, 48,
+ 59, 59, -10, 62, 21, -24, 26, 14, 14, 61,
+ 40, 37, 39, 64, -31, -28, -29, -32, 35, 37,
+ 17, 18, 19, 57, -28, -28, -34, 29, 30, 58,
+ 47, 6, -13, -12, -14, -15, -38, -18, 60, 30,
+ 62, 59, -10, -10, -10, -9, -34, 61, 58, 64,
+ -26, -27, -16, -27, 61, -10, -16, -12, -12, -10,
+ 62, -33, -28, -5, -10, -12, 62, 48, 64, 48,
+ 59, 59, -10, 62, -10, 62, 58, 61, 58, -10,
+ 47, 59, -22, 60, 60, 61, 62, 48, 58, -12,
+ -15, -12, -12, 61, 62, 59, -34, -27, -27, -23,
+ 22, 23, -10, -10, -28, 59, -10, 24, -10, -10,
+ 58, 58, -12, 58, 21, -10, -10, -10, 61, 61,
+ 58, -10, 61,
}
-var yyDef = [...]int{
- 2, -2, 6, 0, 1, 12, 6, 0, 0, 0,
- 125, 4, 5, 12, 41, 0, 0, 0, 0, 0,
- 0, 55, 56, 57, 60, 61, 62, 63, 65, 66,
- 67, 69, 0, 0, 0, 104, 0, 0, 87, 88,
- 89, 90, 82, 84, 7, 0, 10, 3, 0, 126,
- 0, 0, 0, 135, 136, 137, 138, 139, 140, 141,
- 142, 143, 144, 145, 146, 147, 148, 149, 150, 151,
- 152, 153, 154, 155, 0, 29, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 13, 20, 0,
- 77, 78, 79, 0, 0, 0, 0, 0, 0, 102,
- 0, 0, 58, 59, 0, 68, 0, 71, 72, 0,
- 105, 110, 111, 0, 112, 113, 114, 0, 75, 76,
- 0, 0, 0, 11, 124, 125, 0, 0, 0, 21,
- 30, 31, -2, 33, 34, -2, 36, 37, 38, 39,
- 40, 0, 42, 44, 0, 0, 80, 81, 91, 0,
- 0, 0, 0, 0, 27, 0, 0, 0, 0, 0,
- 96, 70, 73, 104, 0, 0, 0, 74, 83, 85,
- 0, 10, 18, 19, 9, 127, 128, 117, 118, 119,
- 120, 121, 122, 123, 0, 129, 130, 0, 0, 0,
- 47, 0, 49, 0, 0, 0, -2, 92, 0, 0,
- 0, 0, 98, 103, 28, 0, 0, 16, 64, 0,
- 106, 107, 115, 108, 0, 0, 0, 131, 0, 133,
- 22, 43, 45, 0, 46, 0, 0, 0, 0, 93,
- 0, 94, 0, 0, 100, 0, 14, 0, 0, 97,
- 0, 0, 86, 8, 132, 0, 48, 50, 51, 52,
- 0, 95, 0, 0, 0, 0, 0, 0, 17, 116,
- 109, 134, 0, 0, 0, 26, 101, 0, 0, 53,
- 0, 0, 98, 15, 23, 24, 0, 99, 0, 25,
+var yyDef = [...]int16{
+ 2, -2, 4, 0, 12, 0, 0, 1, 5, 10,
+ 11, 0, 0, 12, 36, 0, 25, 0, 50, 51,
+ 52, 55, 56, 57, 58, 60, 0, 0, 66, 0,
+ 0, 69, 71, 0, 0, 0, 0, 0, 0, 89,
+ 90, 91, 92, 84, 86, 3, 125, 0, 128, 0,
+ 0, 0, 137, 138, 139, 140, 141, 142, 143, 144,
+ 145, 146, 147, 148, 149, 150, 151, 152, 153, 154,
+ 155, 156, 157, 0, 0, 0, 8, 13, 20, 0,
+ 79, 80, 81, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 53, 54, 0,
+ 61, 0, 106, 111, 112, 0, 113, 114, 115, 64,
+ 0, 0, 67, 68, 70, 0, 104, 36, 0, 0,
+ 77, 0, 0, 126, 0, 0, 0, 0, 21, 24,
+ 0, 0, 9, 0, 37, 39, 0, 0, 82, 83,
+ 93, 0, 0, 0, 26, -2, 28, 29, -2, 31,
+ 32, 33, 34, 35, 0, 0, 0, 98, 62, 0,
+ 0, 0, 0, 65, 0, 73, 0, 0, 0, 78,
+ 85, 87, 0, 127, 129, 130, 118, 119, 120, 121,
+ 122, 123, 124, 0, 131, 132, 8, 18, 19, 7,
+ 0, 0, 0, 42, 0, 44, 0, 0, 0, -2,
+ 94, 0, 0, 23, 0, 0, 16, 59, 0, 63,
+ 107, 108, 117, 109, 0, 100, 105, 0, 0, 0,
+ 133, 0, 135, 0, 22, 38, 40, 0, 41, 0,
+ 0, 0, 0, 95, 0, 96, 14, 0, 0, 99,
+ 0, 0, 102, 0, 0, 88, 134, 0, 6, 43,
+ 45, 46, 47, 0, 97, 0, 17, 116, 110, 0,
+ 0, 0, 0, 0, 136, 0, 0, 72, 0, 103,
+ 0, 0, 48, 15, 0, 0, 0, 101, 74, 75,
+ 0, 0, 76,
}
-var yyTok1 = [...]int{
+var yyTok1 = [...]int8{
1, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
- 3, 3, 3, 3, 3, 3, 3, 51, 3, 3,
- 57, 58, 49, 47, 46, 48, 52, 50, 3, 3,
- 3, 3, 3, 3, 3, 3, 3, 3, 56, 55,
- 3, 3, 3, 53, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 53, 3, 3,
+ 60, 61, 51, 49, 48, 50, 54, 52, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 59, 58,
+ 3, 3, 3, 55, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
- 3, 54, 3, 59, 3, 3, 3, 3, 3, 3,
+ 3, 57, 3, 62, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
- 3, 3, 3, 60, 45, 61,
+ 3, 3, 3, 63, 47, 64,
}
-var yyTok2 = [...]int{
+var yyTok2 = [...]int8{
2, 3, 4, 5, 6, 7, 8, 9, 10, 11,
12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
32, 33, 34, 35, 36, 37, 38, 39, 40, 41,
- 42, 43, 44,
+ 42, 43, 44, 45, 46, 56,
}
-var yyTok3 = [...]int{
+var yyTok3 = [...]int8{
0,
}
@@ -511,9 +498,9 @@ func yyErrorMessage(state, lookAhead int) string {
expected := make([]int, 0, 4)
// Look for shiftable tokens.
- base := yyPact[state]
+ base := int(yyPact[state])
for tok := TOKSTART; tok-1 < len(yyToknames); tok++ {
- if n := base + tok; n >= 0 && n < yyLast && yyChk[yyAct[n]] == tok {
+ if n := base + tok; n >= 0 && n < yyLast && int(yyChk[int(yyAct[n])]) == tok {
if len(expected) == cap(expected) {
return res
}
@@ -523,13 +510,13 @@ func yyErrorMessage(state, lookAhead int) string {
if yyDef[state] == -2 {
i := 0
- for yyExca[i] != -1 || yyExca[i+1] != state {
+ for yyExca[i] != -1 || int(yyExca[i+1]) != state {
i += 2
}
// Look for tokens that we accept or reduce.
for i += 2; yyExca[i] >= 0; i += 2 {
- tok := yyExca[i]
+ tok := int(yyExca[i])
if tok < TOKSTART || yyExca[i+1] == 0 {
continue
}
@@ -560,30 +547,30 @@ func yylex1(lex yyLexer, lval *yySymType) (char, token int) {
token = 0
char = lex.Lex(lval)
if char <= 0 {
- token = yyTok1[0]
+ token = int(yyTok1[0])
goto out
}
if char < len(yyTok1) {
- token = yyTok1[char]
+ token = int(yyTok1[char])
goto out
}
if char >= yyPrivate {
if char < yyPrivate+len(yyTok2) {
- token = yyTok2[char-yyPrivate]
+ token = int(yyTok2[char-yyPrivate])
goto out
}
}
for i := 0; i < len(yyTok3); i += 2 {
- token = yyTok3[i+0]
+ token = int(yyTok3[i+0])
if token == char {
- token = yyTok3[i+1]
+ token = int(yyTok3[i+1])
goto out
}
}
out:
if token == 0 {
- token = yyTok2[1] /* unknown char */
+ token = int(yyTok2[1]) /* unknown char */
}
if yyDebug >= 3 {
__yyfmt__.Printf("lex %s(%d)\n", yyTokname(token), uint(char))
@@ -638,7 +625,7 @@ yystack:
yyS[yyp].yys = yystate
yynewstate:
- yyn = yyPact[yystate]
+ yyn = int(yyPact[yystate])
if yyn <= yyFlag {
goto yydefault /* simple state */
}
@@ -649,8 +636,8 @@ yynewstate:
if yyn < 0 || yyn >= yyLast {
goto yydefault
}
- yyn = yyAct[yyn]
- if yyChk[yyn] == yytoken { /* valid shift */
+ yyn = int(yyAct[yyn])
+ if int(yyChk[yyn]) == yytoken { /* valid shift */
yyrcvr.char = -1
yytoken = -1
yyVAL = yyrcvr.lval
@@ -663,7 +650,7 @@ yynewstate:
yydefault:
/* default state action */
- yyn = yyDef[yystate]
+ yyn = int(yyDef[yystate])
if yyn == -2 {
if yyrcvr.char < 0 {
yyrcvr.char, yytoken = yylex1(yylex, &yyrcvr.lval)
@@ -672,18 +659,18 @@ yydefault:
/* look through exception table */
xi := 0
for {
- if yyExca[xi+0] == -1 && yyExca[xi+1] == yystate {
+ if yyExca[xi+0] == -1 && int(yyExca[xi+1]) == yystate {
break
}
xi += 2
}
for xi += 2; ; xi += 2 {
- yyn = yyExca[xi+0]
+ yyn = int(yyExca[xi+0])
if yyn < 0 || yyn == yytoken {
break
}
}
- yyn = yyExca[xi+1]
+ yyn = int(yyExca[xi+1])
if yyn < 0 {
goto ret0
}
@@ -705,10 +692,10 @@ yydefault:
/* find a state where "error" is a legal shift action */
for yyp >= 0 {
- yyn = yyPact[yyS[yyp].yys] + yyErrCode
+ yyn = int(yyPact[yyS[yyp].yys]) + yyErrCode
if yyn >= 0 && yyn < yyLast {
- yystate = yyAct[yyn] /* simulate a shift of "error" */
- if yyChk[yystate] == yyErrCode {
+ yystate = int(yyAct[yyn]) /* simulate a shift of "error" */
+ if int(yyChk[yystate]) == yyErrCode {
goto yystack
}
}
@@ -744,7 +731,7 @@ yydefault:
yypt := yyp
_ = yypt // guard against "declared and not used"
- yyp -= yyR2[yyn]
+ yyp -= int(yyR2[yyn])
// yyp is now the index of $0. Perform the default action. Iff the
// reduced production is ε, $1 is possibly out of range.
if yyp+1 >= len(yyS) {
@@ -755,936 +742,771 @@ yydefault:
yyVAL = yyS[yyp+1]
/* consult goto table to find next state */
- yyn = yyR1[yyn]
- yyg := yyPgo[yyn]
+ yyn = int(yyR1[yyn])
+ yyg := int(yyPgo[yyn])
yyj := yyg + yyS[yyp].yys + 1
if yyj >= yyLast {
- yystate = yyAct[yyg]
+ yystate = int(yyAct[yyg])
} else {
- yystate = yyAct[yyj]
- if yyChk[yystate] != -yyn {
- yystate = yyAct[yyg]
+ yystate = int(yyAct[yyj])
+ if int(yyChk[yystate]) != -yyn {
+ yystate = int(yyAct[yyg])
}
}
// dummy call; replaced with literal code
switch yynt {
case 1:
- yyDollar = yyS[yypt-2 : yypt+1]
-//line parser.go.y:53
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:58
{
- if yyDollar[1].value != nil {
- yyDollar[2].value.(*Query).Meta = yyDollar[1].value.(*ConstObject)
- }
- yylex.(*lexer).result = yyDollar[2].value.(*Query)
+ query := yyDollar[3].value.(*Query)
+ query.Meta = yyDollar[1].value.(*ConstObject)
+ query.Imports = yyDollar[2].value.([]*Import)
+ yylex.(*lexer).result = query
}
case 2:
yyDollar = yyS[yypt-0 : yypt+1]
-//line parser.go.y:60
+//line parser.go.y:67
{
- yyVAL.value = nil
+ yyVAL.value = (*ConstObject)(nil)
}
case 3:
yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:64
+//line parser.go.y:71
{
yyVAL.value = yyDollar[2].value
}
case 4:
- yyDollar = yyS[yypt-2 : yypt+1]
-//line parser.go.y:70
- {
- yyVAL.value = &Query{Imports: yyDollar[1].value.([]*Import), FuncDefs: yyDollar[2].value.([]*FuncDef), Term: &Term{Type: TermTypeIdentity}}
- }
- case 5:
- yyDollar = yyS[yypt-2 : yypt+1]
-//line parser.go.y:74
- {
- if yyDollar[1].value != nil {
- yyDollar[2].value.(*Query).Imports = yyDollar[1].value.([]*Import)
- }
- yyVAL.value = yyDollar[2].value
- }
- case 6:
yyDollar = yyS[yypt-0 : yypt+1]
-//line parser.go.y:81
+//line parser.go.y:77
{
yyVAL.value = []*Import(nil)
}
- case 7:
+ case 5:
yyDollar = yyS[yypt-2 : yypt+1]
-//line parser.go.y:85
+//line parser.go.y:81
{
- yyVAL.value = prependImport(yyDollar[2].value.([]*Import), yyDollar[1].value.(*Import))
+ yyVAL.value = append(yyDollar[1].value.([]*Import), yyDollar[2].value.(*Import))
}
- case 8:
+ case 6:
yyDollar = yyS[yypt-6 : yypt+1]
-//line parser.go.y:91
+//line parser.go.y:87
{
yyVAL.value = &Import{ImportPath: yyDollar[2].token, ImportAlias: yyDollar[4].token, Meta: yyDollar[5].value.(*ConstObject)}
}
- case 9:
+ case 7:
yyDollar = yyS[yypt-4 : yypt+1]
-//line parser.go.y:95
+//line parser.go.y:91
{
yyVAL.value = &Import{IncludePath: yyDollar[2].token, Meta: yyDollar[3].value.(*ConstObject)}
}
- case 10:
+ case 8:
yyDollar = yyS[yypt-0 : yypt+1]
-//line parser.go.y:101
+//line parser.go.y:97
{
yyVAL.value = (*ConstObject)(nil)
}
- case 11:
+ case 10:
yyDollar = yyS[yypt-1 : yypt+1]
//line parser.go.y:104
{
+ yyVAL.value = &Query{FuncDefs: reverseFuncDef(yyDollar[1].value.([]*FuncDef))}
}
case 12:
yyDollar = yyS[yypt-0 : yypt+1]
-//line parser.go.y:108
+//line parser.go.y:111
{
yyVAL.value = []*FuncDef(nil)
}
case 13:
yyDollar = yyS[yypt-2 : yypt+1]
-//line parser.go.y:112
+//line parser.go.y:115
{
- yyVAL.value = prependFuncDef(yyDollar[2].value.([]*FuncDef), yyDollar[1].value.(*FuncDef))
+ yyVAL.value = append(yyDollar[2].value.([]*FuncDef), yyDollar[1].value.(*FuncDef))
}
case 14:
yyDollar = yyS[yypt-5 : yypt+1]
-//line parser.go.y:118
+//line parser.go.y:121
{
yyVAL.value = &FuncDef{Name: yyDollar[2].token, Body: yyDollar[4].value.(*Query)}
}
case 15:
yyDollar = yyS[yypt-8 : yypt+1]
-//line parser.go.y:122
+//line parser.go.y:125
{
yyVAL.value = &FuncDef{yyDollar[2].token, yyDollar[4].value.([]string), yyDollar[7].value.(*Query)}
}
case 16:
yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:128
+//line parser.go.y:131
{
yyVAL.value = []string{yyDollar[1].token}
}
case 17:
yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:132
+//line parser.go.y:135
{
yyVAL.value = append(yyDollar[1].value.([]string), yyDollar[3].token)
}
- case 18:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:137
- {
- }
- case 19:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:138
- {
- }
case 20:
yyDollar = yyS[yypt-2 : yypt+1]
-//line parser.go.y:142
+//line parser.go.y:145
{
- yyDollar[2].value.(*Query).FuncDefs = prependFuncDef(yyDollar[2].value.(*Query).FuncDefs, yyDollar[1].value.(*FuncDef))
- yyVAL.value = yyDollar[2].value
+ query := yyDollar[2].value.(*Query)
+ query.FuncDefs = prependFuncDef(query.FuncDefs, yyDollar[1].value.(*FuncDef))
+ yyVAL.value = query
}
case 21:
yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:147
+//line parser.go.y:151
{
yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpPipe, Right: yyDollar[3].value.(*Query)}
}
case 22:
yyDollar = yyS[yypt-5 : yypt+1]
-//line parser.go.y:151
+//line parser.go.y:155
{
- yyDollar[1].value.(*Term).SuffixList = append(yyDollar[1].value.(*Term).SuffixList, &Suffix{Bind: &Bind{yyDollar[3].value.([]*Pattern), yyDollar[5].value.(*Query)}})
- yyVAL.value = &Query{Term: yyDollar[1].value.(*Term)}
+ term := yyDollar[1].value.(*Term)
+ term.SuffixList = append(term.SuffixList, &Suffix{Bind: &Bind{yyDollar[3].value.([]*Pattern), yyDollar[5].value.(*Query)}})
+ yyVAL.value = &Query{Term: term}
}
case 23:
- yyDollar = yyS[yypt-9 : yypt+1]
-//line parser.go.y:156
+ yyDollar = yyS[yypt-4 : yypt+1]
+//line parser.go.y:161
{
- yyVAL.value = &Query{Term: &Term{Type: TermTypeReduce, Reduce: &Reduce{yyDollar[2].value.(*Term), yyDollar[4].value.(*Pattern), yyDollar[6].value.(*Query), yyDollar[8].value.(*Query)}}}
+ yyVAL.value = &Query{Term: &Term{Type: TermTypeLabel, Label: &Label{yyDollar[2].token, yyDollar[4].value.(*Query)}}}
}
case 24:
- yyDollar = yyS[yypt-9 : yypt+1]
-//line parser.go.y:160
- {
- yyVAL.value = &Query{Term: &Term{Type: TermTypeForeach, Foreach: &Foreach{yyDollar[2].value.(*Term), yyDollar[4].value.(*Pattern), yyDollar[6].value.(*Query), yyDollar[8].value.(*Query), nil}}}
- }
- case 25:
- yyDollar = yyS[yypt-11 : yypt+1]
-//line parser.go.y:164
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:165
{
- yyVAL.value = &Query{Term: &Term{Type: TermTypeForeach, Foreach: &Foreach{yyDollar[2].value.(*Term), yyDollar[4].value.(*Pattern), yyDollar[6].value.(*Query), yyDollar[8].value.(*Query), yyDollar[10].value.(*Query)}}}
+ yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpComma, Right: yyDollar[3].value.(*Query)}
}
case 26:
- yyDollar = yyS[yypt-7 : yypt+1]
-//line parser.go.y:168
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:172
{
- yyVAL.value = &Query{Term: &Term{Type: TermTypeIf, If: &If{yyDollar[2].value.(*Query), yyDollar[4].value.(*Query), yyDollar[5].value.([]*IfElif), yyDollar[6].value.(*Query)}}}
+ yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: yyDollar[2].operator, Right: yyDollar[3].value.(*Query)}
}
case 27:
yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:172
+//line parser.go.y:176
{
- yyVAL.value = &Query{Term: &Term{Type: TermTypeTry, Try: &Try{yyDollar[2].value.(*Query), yyDollar[3].value.(*Query)}}}
+ yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: yyDollar[2].operator, Right: yyDollar[3].value.(*Query)}
}
case 28:
- yyDollar = yyS[yypt-4 : yypt+1]
-//line parser.go.y:176
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:180
{
- yyVAL.value = &Query{Term: &Term{Type: TermTypeLabel, Label: &Label{yyDollar[2].token, yyDollar[4].value.(*Query)}}}
+ yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpOr, Right: yyDollar[3].value.(*Query)}
}
case 29:
- yyDollar = yyS[yypt-2 : yypt+1]
-//line parser.go.y:180
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:184
{
- if t := yyDollar[1].value.(*Query).Term; t != nil {
- t.SuffixList = append(t.SuffixList, &Suffix{Optional: true})
- } else {
- yyVAL.value = &Query{Term: &Term{Type: TermTypeQuery, Query: yyDollar[1].value.(*Query), SuffixList: []*Suffix{{Optional: true}}}}
- }
+ yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpAnd, Right: yyDollar[3].value.(*Query)}
}
case 30:
yyDollar = yyS[yypt-3 : yypt+1]
//line parser.go.y:188
{
- yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpComma, Right: yyDollar[3].value.(*Query)}
+ yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: yyDollar[2].operator, Right: yyDollar[3].value.(*Query)}
}
case 31:
yyDollar = yyS[yypt-3 : yypt+1]
//line parser.go.y:192
{
- yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: yyDollar[2].operator, Right: yyDollar[3].value.(*Query)}
+ yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpAdd, Right: yyDollar[3].value.(*Query)}
}
case 32:
yyDollar = yyS[yypt-3 : yypt+1]
//line parser.go.y:196
{
- yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: yyDollar[2].operator, Right: yyDollar[3].value.(*Query)}
+ yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpSub, Right: yyDollar[3].value.(*Query)}
}
case 33:
yyDollar = yyS[yypt-3 : yypt+1]
//line parser.go.y:200
{
- yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpOr, Right: yyDollar[3].value.(*Query)}
+ yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpMul, Right: yyDollar[3].value.(*Query)}
}
case 34:
yyDollar = yyS[yypt-3 : yypt+1]
//line parser.go.y:204
{
- yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpAnd, Right: yyDollar[3].value.(*Query)}
+ yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpDiv, Right: yyDollar[3].value.(*Query)}
}
case 35:
yyDollar = yyS[yypt-3 : yypt+1]
//line parser.go.y:208
{
- yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: yyDollar[2].operator, Right: yyDollar[3].value.(*Query)}
+ yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpMod, Right: yyDollar[3].value.(*Query)}
}
case 36:
- yyDollar = yyS[yypt-3 : yypt+1]
+ yyDollar = yyS[yypt-1 : yypt+1]
//line parser.go.y:212
{
- yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpAdd, Right: yyDollar[3].value.(*Query)}
+ yyVAL.value = &Query{Term: yyDollar[1].value.(*Term)}
}
case 37:
- yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:216
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:218
{
- yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpSub, Right: yyDollar[3].value.(*Query)}
+ yyVAL.value = []*Pattern{yyDollar[1].value.(*Pattern)}
}
case 38:
yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:220
+//line parser.go.y:222
{
- yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpMul, Right: yyDollar[3].value.(*Query)}
+ yyVAL.value = append(yyDollar[1].value.([]*Pattern), yyDollar[3].value.(*Pattern))
}
case 39:
- yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:224
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:228
{
- yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpDiv, Right: yyDollar[3].value.(*Query)}
+ yyVAL.value = &Pattern{Name: yyDollar[1].token}
}
case 40:
yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:228
+//line parser.go.y:232
{
- yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpMod, Right: yyDollar[3].value.(*Query)}
+ yyVAL.value = &Pattern{Array: yyDollar[2].value.([]*Pattern)}
}
case 41:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:232
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:236
{
- yyVAL.value = &Query{Term: yyDollar[1].value.(*Term)}
+ yyVAL.value = &Pattern{Object: yyDollar[2].value.([]*PatternObject)}
}
case 42:
yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:238
+//line parser.go.y:242
{
yyVAL.value = []*Pattern{yyDollar[1].value.(*Pattern)}
}
case 43:
yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:242
+//line parser.go.y:246
{
yyVAL.value = append(yyDollar[1].value.([]*Pattern), yyDollar[3].value.(*Pattern))
}
case 44:
yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:248
+//line parser.go.y:252
{
- yyVAL.value = &Pattern{Name: yyDollar[1].token}
+ yyVAL.value = []*PatternObject{yyDollar[1].value.(*PatternObject)}
}
case 45:
yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:252
+//line parser.go.y:256
{
- yyVAL.value = &Pattern{Array: yyDollar[2].value.([]*Pattern)}
+ yyVAL.value = append(yyDollar[1].value.([]*PatternObject), yyDollar[3].value.(*PatternObject))
}
case 46:
yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:256
+//line parser.go.y:262
{
- yyVAL.value = &Pattern{Object: yyDollar[2].value.([]*PatternObject)}
+ yyVAL.value = &PatternObject{Key: yyDollar[1].token, Val: yyDollar[3].value.(*Pattern)}
}
case 47:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:262
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:266
{
- yyVAL.value = []*Pattern{yyDollar[1].value.(*Pattern)}
+ yyVAL.value = &PatternObject{KeyString: yyDollar[1].value.(*String), Val: yyDollar[3].value.(*Pattern)}
}
case 48:
- yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:266
+ yyDollar = yyS[yypt-5 : yypt+1]
+//line parser.go.y:270
{
- yyVAL.value = append(yyDollar[1].value.([]*Pattern), yyDollar[3].value.(*Pattern))
+ yyVAL.value = &PatternObject{KeyQuery: yyDollar[2].value.(*Query), Val: yyDollar[5].value.(*Pattern)}
}
case 49:
yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:272
+//line parser.go.y:274
{
- yyVAL.value = []*PatternObject{yyDollar[1].value.(*PatternObject)}
+ yyVAL.value = &PatternObject{Key: yyDollar[1].token}
}
case 50:
- yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:276
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:280
{
- yyVAL.value = append(yyDollar[1].value.([]*PatternObject), yyDollar[3].value.(*PatternObject))
+ yyVAL.value = &Term{Type: TermTypeIdentity}
}
case 51:
- yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:282
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:284
{
- yyVAL.value = &PatternObject{Key: yyDollar[1].token, Val: yyDollar[3].value.(*Pattern)}
+ yyVAL.value = &Term{Type: TermTypeRecurse}
}
case 52:
- yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:286
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:288
{
- yyVAL.value = &PatternObject{KeyString: yyDollar[1].value.(*String), Val: yyDollar[3].value.(*Pattern)}
+ yyVAL.value = &Term{Type: TermTypeIndex, Index: &Index{Name: yyDollar[1].token}}
}
case 53:
- yyDollar = yyS[yypt-5 : yypt+1]
-//line parser.go.y:290
+ yyDollar = yyS[yypt-2 : yypt+1]
+//line parser.go.y:292
{
- yyVAL.value = &PatternObject{KeyQuery: yyDollar[2].value.(*Query), Val: yyDollar[5].value.(*Pattern)}
+ suffix := yyDollar[2].value.(*Suffix)
+ if suffix.Iter {
+ yyVAL.value = &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{suffix}}
+ } else {
+ yyVAL.value = &Term{Type: TermTypeIndex, Index: suffix.Index}
+ }
}
case 54:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:294
+ yyDollar = yyS[yypt-2 : yypt+1]
+//line parser.go.y:301
{
- yyVAL.value = &PatternObject{KeyOnly: yyDollar[1].token}
+ yyVAL.value = &Term{Type: TermTypeIndex, Index: &Index{Str: yyDollar[2].value.(*String)}}
}
case 55:
yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:300
+//line parser.go.y:305
{
- yyVAL.value = &Term{Type: TermTypeIdentity}
+ yyVAL.value = &Term{Type: TermTypeNull}
}
case 56:
yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:304
+//line parser.go.y:309
{
- yyVAL.value = &Term{Type: TermTypeRecurse}
+ yyVAL.value = &Term{Type: TermTypeTrue}
}
case 57:
yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:308
+//line parser.go.y:313
{
- yyVAL.value = &Term{Type: TermTypeIndex, Index: &Index{Name: yyDollar[1].token}}
+ yyVAL.value = &Term{Type: TermTypeFalse}
}
case 58:
- yyDollar = yyS[yypt-2 : yypt+1]
-//line parser.go.y:312
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:317
{
- if yyDollar[2].value.(*Suffix).Iter {
- yyVAL.value = &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{yyDollar[2].value.(*Suffix)}}
- } else {
- yyVAL.value = &Term{Type: TermTypeIndex, Index: yyDollar[2].value.(*Suffix).Index}
- }
+ yyVAL.value = &Term{Type: TermTypeFunc, Func: &Func{Name: yyDollar[1].token}}
}
case 59:
- yyDollar = yyS[yypt-2 : yypt+1]
-//line parser.go.y:320
+ yyDollar = yyS[yypt-4 : yypt+1]
+//line parser.go.y:321
{
- yyVAL.value = &Term{Type: TermTypeIndex, Index: &Index{Str: yyDollar[2].value.(*String)}}
+ yyVAL.value = &Term{Type: TermTypeFunc, Func: &Func{Name: yyDollar[1].token, Args: yyDollar[3].value.([]*Query)}}
}
case 60:
yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:324
+//line parser.go.y:325
{
- yyVAL.value = &Term{Type: TermTypeNull}
+ yyVAL.value = &Term{Type: TermTypeFunc, Func: &Func{Name: yyDollar[1].token}}
}
case 61:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:328
+ yyDollar = yyS[yypt-2 : yypt+1]
+//line parser.go.y:329
{
- yyVAL.value = &Term{Type: TermTypeTrue}
+ yyVAL.value = &Term{Type: TermTypeObject, Object: &Object{}}
}
case 62:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:332
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:333
{
- yyVAL.value = &Term{Type: TermTypeFalse}
+ yyVAL.value = &Term{Type: TermTypeObject, Object: &Object{yyDollar[2].value.([]*ObjectKeyVal)}}
}
case 63:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:336
+ yyDollar = yyS[yypt-4 : yypt+1]
+//line parser.go.y:337
{
- yyVAL.value = &Term{Type: TermTypeFunc, Func: &Func{Name: yyDollar[1].token}}
+ yyVAL.value = &Term{Type: TermTypeObject, Object: &Object{yyDollar[2].value.([]*ObjectKeyVal)}}
}
case 64:
- yyDollar = yyS[yypt-4 : yypt+1]
-//line parser.go.y:340
+ yyDollar = yyS[yypt-2 : yypt+1]
+//line parser.go.y:341
{
- yyVAL.value = &Term{Type: TermTypeFunc, Func: &Func{Name: yyDollar[1].token, Args: yyDollar[3].value.([]*Query)}}
+ yyVAL.value = &Term{Type: TermTypeArray, Array: &Array{}}
}
case 65:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:344
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:345
{
- yyVAL.value = &Term{Type: TermTypeFunc, Func: &Func{Name: yyDollar[1].token}}
+ yyVAL.value = &Term{Type: TermTypeArray, Array: &Array{yyDollar[2].value.(*Query)}}
}
case 66:
yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:348
+//line parser.go.y:349
{
yyVAL.value = &Term{Type: TermTypeNumber, Number: yyDollar[1].token}
}
case 67:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:352
+ yyDollar = yyS[yypt-2 : yypt+1]
+//line parser.go.y:353
{
- yyVAL.value = &Term{Type: TermTypeFormat, Format: yyDollar[1].token}
+ yyVAL.value = &Term{Type: TermTypeUnary, Unary: &Unary{OpAdd, yyDollar[2].value.(*Term)}}
}
case 68:
yyDollar = yyS[yypt-2 : yypt+1]
-//line parser.go.y:356
+//line parser.go.y:357
{
- yyVAL.value = &Term{Type: TermTypeFormat, Format: yyDollar[1].token, Str: yyDollar[2].value.(*String)}
+ yyVAL.value = &Term{Type: TermTypeUnary, Unary: &Unary{OpSub, yyDollar[2].value.(*Term)}}
}
case 69:
yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:360
+//line parser.go.y:361
{
- yyVAL.value = &Term{Type: TermTypeString, Str: yyDollar[1].value.(*String)}
+ yyVAL.value = &Term{Type: TermTypeFormat, Format: yyDollar[1].token}
}
case 70:
- yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:364
+ yyDollar = yyS[yypt-2 : yypt+1]
+//line parser.go.y:365
{
- yyVAL.value = &Term{Type: TermTypeQuery, Query: yyDollar[2].value.(*Query)}
+ yyVAL.value = &Term{Type: TermTypeFormat, Format: yyDollar[1].token, Str: yyDollar[2].value.(*String)}
}
case 71:
- yyDollar = yyS[yypt-2 : yypt+1]
-//line parser.go.y:368
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:369
{
- yyVAL.value = &Term{Type: TermTypeUnary, Unary: &Unary{OpSub, yyDollar[2].value.(*Term)}}
+ yyVAL.value = &Term{Type: TermTypeString, Str: yyDollar[1].value.(*String)}
}
case 72:
- yyDollar = yyS[yypt-2 : yypt+1]
-//line parser.go.y:372
+ yyDollar = yyS[yypt-7 : yypt+1]
+//line parser.go.y:373
{
- yyVAL.value = &Term{Type: TermTypeUnary, Unary: &Unary{OpAdd, yyDollar[2].value.(*Term)}}
+ yyVAL.value = &Term{Type: TermTypeIf, If: &If{yyDollar[2].value.(*Query), yyDollar[4].value.(*Query), yyDollar[5].value.([]*IfElif), yyDollar[6].value.(*Query)}}
}
case 73:
yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:376
+//line parser.go.y:377
{
- yyVAL.value = &Term{Type: TermTypeObject, Object: &Object{yyDollar[2].value.([]*ObjectKeyVal)}}
+ yyVAL.value = &Term{Type: TermTypeTry, Try: &Try{yyDollar[2].value.(*Query), yyDollar[3].value.(*Query)}}
}
case 74:
- yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:380
+ yyDollar = yyS[yypt-9 : yypt+1]
+//line parser.go.y:381
{
- yyVAL.value = &Term{Type: TermTypeArray, Array: &Array{yyDollar[2].value.(*Query)}}
+ yyVAL.value = &Term{Type: TermTypeReduce, Reduce: &Reduce{yyDollar[2].value.(*Query), yyDollar[4].value.(*Pattern), yyDollar[6].value.(*Query), yyDollar[8].value.(*Query)}}
}
case 75:
- yyDollar = yyS[yypt-2 : yypt+1]
-//line parser.go.y:384
+ yyDollar = yyS[yypt-9 : yypt+1]
+//line parser.go.y:385
{
- yyVAL.value = &Term{Type: TermTypeArray, Array: &Array{}}
+ yyVAL.value = &Term{Type: TermTypeForeach, Foreach: &Foreach{yyDollar[2].value.(*Query), yyDollar[4].value.(*Pattern), yyDollar[6].value.(*Query), yyDollar[8].value.(*Query), nil}}
}
case 76:
+ yyDollar = yyS[yypt-11 : yypt+1]
+//line parser.go.y:389
+ {
+ yyVAL.value = &Term{Type: TermTypeForeach, Foreach: &Foreach{yyDollar[2].value.(*Query), yyDollar[4].value.(*Pattern), yyDollar[6].value.(*Query), yyDollar[8].value.(*Query), yyDollar[10].value.(*Query)}}
+ }
+ case 77:
yyDollar = yyS[yypt-2 : yypt+1]
-//line parser.go.y:388
+//line parser.go.y:393
{
yyVAL.value = &Term{Type: TermTypeBreak, Break: yyDollar[2].token}
}
- case 77:
+ case 78:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:397
+ {
+ yyVAL.value = &Term{Type: TermTypeQuery, Query: yyDollar[2].value.(*Query)}
+ }
+ case 79:
yyDollar = yyS[yypt-2 : yypt+1]
-//line parser.go.y:392
+//line parser.go.y:401
{
yyDollar[1].value.(*Term).SuffixList = append(yyDollar[1].value.(*Term).SuffixList, &Suffix{Index: &Index{Name: yyDollar[2].token}})
}
- case 78:
+ case 80:
yyDollar = yyS[yypt-2 : yypt+1]
-//line parser.go.y:396
+//line parser.go.y:405
{
yyDollar[1].value.(*Term).SuffixList = append(yyDollar[1].value.(*Term).SuffixList, yyDollar[2].value.(*Suffix))
}
- case 79:
+ case 81:
yyDollar = yyS[yypt-2 : yypt+1]
-//line parser.go.y:400
+//line parser.go.y:409
{
yyDollar[1].value.(*Term).SuffixList = append(yyDollar[1].value.(*Term).SuffixList, &Suffix{Optional: true})
}
- case 80:
+ case 82:
yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:404
+//line parser.go.y:413
{
yyDollar[1].value.(*Term).SuffixList = append(yyDollar[1].value.(*Term).SuffixList, yyDollar[3].value.(*Suffix))
}
- case 81:
+ case 83:
yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:408
+//line parser.go.y:417
{
yyDollar[1].value.(*Term).SuffixList = append(yyDollar[1].value.(*Term).SuffixList, &Suffix{Index: &Index{Str: yyDollar[3].value.(*String)}})
}
- case 82:
+ case 84:
yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:414
+//line parser.go.y:423
{
yyVAL.value = &String{Str: yyDollar[1].token}
}
- case 83:
+ case 85:
yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:418
+//line parser.go.y:427
{
yyVAL.value = &String{Queries: yyDollar[2].value.([]*Query)}
}
- case 84:
+ case 86:
yyDollar = yyS[yypt-0 : yypt+1]
-//line parser.go.y:424
+//line parser.go.y:433
{
yyVAL.value = []*Query{}
}
- case 85:
+ case 87:
yyDollar = yyS[yypt-2 : yypt+1]
-//line parser.go.y:428
+//line parser.go.y:437
{
yyVAL.value = append(yyDollar[1].value.([]*Query), &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: yyDollar[2].token}}})
}
- case 86:
+ case 88:
yyDollar = yyS[yypt-4 : yypt+1]
-//line parser.go.y:432
+//line parser.go.y:441
{
yylex.(*lexer).inString = true
yyVAL.value = append(yyDollar[1].value.([]*Query), &Query{Term: &Term{Type: TermTypeQuery, Query: yyDollar[3].value.(*Query)}})
}
- case 87:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:438
- {
- }
- case 88:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:439
- {
- }
- case 89:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:442
- {
- }
- case 90:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:443
- {
- }
- case 91:
+ case 93:
yyDollar = yyS[yypt-2 : yypt+1]
-//line parser.go.y:447
+//line parser.go.y:456
{
yyVAL.value = &Suffix{Iter: true}
}
- case 92:
+ case 94:
yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:451
+//line parser.go.y:460
{
yyVAL.value = &Suffix{Index: &Index{Start: yyDollar[2].value.(*Query)}}
}
- case 93:
+ case 95:
yyDollar = yyS[yypt-4 : yypt+1]
-//line parser.go.y:455
+//line parser.go.y:464
{
yyVAL.value = &Suffix{Index: &Index{Start: yyDollar[2].value.(*Query), IsSlice: true}}
}
- case 94:
+ case 96:
yyDollar = yyS[yypt-4 : yypt+1]
-//line parser.go.y:459
+//line parser.go.y:468
{
- yyVAL.value = &Suffix{Index: &Index{End: yyDollar[3].value.(*Query)}}
+ yyVAL.value = &Suffix{Index: &Index{End: yyDollar[3].value.(*Query), IsSlice: true}}
}
- case 95:
+ case 97:
yyDollar = yyS[yypt-5 : yypt+1]
-//line parser.go.y:463
+//line parser.go.y:472
{
- yyVAL.value = &Suffix{Index: &Index{Start: yyDollar[2].value.(*Query), IsSlice: true, End: yyDollar[4].value.(*Query)}}
+ yyVAL.value = &Suffix{Index: &Index{Start: yyDollar[2].value.(*Query), End: yyDollar[4].value.(*Query), IsSlice: true}}
}
- case 96:
+ case 98:
yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:469
+//line parser.go.y:478
{
yyVAL.value = []*Query{yyDollar[1].value.(*Query)}
}
- case 97:
+ case 99:
yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:473
+//line parser.go.y:482
{
yyVAL.value = append(yyDollar[1].value.([]*Query), yyDollar[3].value.(*Query))
}
- case 98:
- yyDollar = yyS[yypt-0 : yypt+1]
-//line parser.go.y:479
- {
- yyVAL.value = []*IfElif(nil)
- }
- case 99:
- yyDollar = yyS[yypt-5 : yypt+1]
-//line parser.go.y:483
- {
- yyVAL.value = prependIfElif(yyDollar[5].value.([]*IfElif), &IfElif{yyDollar[2].value.(*Query), yyDollar[4].value.(*Query)})
- }
case 100:
yyDollar = yyS[yypt-0 : yypt+1]
-//line parser.go.y:489
+//line parser.go.y:488
{
- yyVAL.value = (*Query)(nil)
+ yyVAL.value = []*IfElif(nil)
}
case 101:
- yyDollar = yyS[yypt-2 : yypt+1]
-//line parser.go.y:493
+ yyDollar = yyS[yypt-5 : yypt+1]
+//line parser.go.y:492
{
- yyVAL.value = yyDollar[2].value
+ yyVAL.value = append(yyDollar[1].value.([]*IfElif), &IfElif{yyDollar[3].value.(*Query), yyDollar[5].value.(*Query)})
}
case 102:
yyDollar = yyS[yypt-0 : yypt+1]
-//line parser.go.y:499
+//line parser.go.y:498
{
yyVAL.value = (*Query)(nil)
}
case 103:
yyDollar = yyS[yypt-2 : yypt+1]
-//line parser.go.y:503
+//line parser.go.y:502
{
yyVAL.value = yyDollar[2].value
}
case 104:
yyDollar = yyS[yypt-0 : yypt+1]
-//line parser.go.y:509
+//line parser.go.y:508
{
- yyVAL.value = []*ObjectKeyVal(nil)
+ yyVAL.value = (*Query)(nil)
}
case 105:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:513
+ yyDollar = yyS[yypt-2 : yypt+1]
+//line parser.go.y:512
{
- yyVAL.value = []*ObjectKeyVal{yyDollar[1].value.(*ObjectKeyVal)}
+ yyVAL.value = yyDollar[2].value
}
case 106:
- yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:517
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:518
{
- yyVAL.value = prependObjectKeyVal(yyDollar[3].value.([]*ObjectKeyVal), yyDollar[1].value.(*ObjectKeyVal))
+ yyVAL.value = []*ObjectKeyVal{yyDollar[1].value.(*ObjectKeyVal)}
}
case 107:
yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:523
+//line parser.go.y:522
{
- yyVAL.value = &ObjectKeyVal{Key: yyDollar[1].token, Val: yyDollar[3].value.(*ObjectVal)}
+ yyVAL.value = append(yyDollar[1].value.([]*ObjectKeyVal), yyDollar[3].value.(*ObjectKeyVal))
}
case 108:
yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:527
+//line parser.go.y:528
{
- yyVAL.value = &ObjectKeyVal{KeyString: yyDollar[1].value.(*String), Val: yyDollar[3].value.(*ObjectVal)}
+ yyVAL.value = &ObjectKeyVal{Key: yyDollar[1].token, Val: yyDollar[3].value.(*Query)}
}
case 109:
- yyDollar = yyS[yypt-5 : yypt+1]
-//line parser.go.y:531
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:532
{
- yyVAL.value = &ObjectKeyVal{KeyQuery: yyDollar[2].value.(*Query), Val: yyDollar[5].value.(*ObjectVal)}
+ yyVAL.value = &ObjectKeyVal{KeyString: yyDollar[1].value.(*String), Val: yyDollar[3].value.(*Query)}
}
case 110:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:535
+ yyDollar = yyS[yypt-5 : yypt+1]
+//line parser.go.y:536
{
- yyVAL.value = &ObjectKeyVal{KeyOnly: yyDollar[1].token}
+ yyVAL.value = &ObjectKeyVal{KeyQuery: yyDollar[2].value.(*Query), Val: yyDollar[5].value.(*Query)}
}
case 111:
yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:539
+//line parser.go.y:540
{
- yyVAL.value = &ObjectKeyVal{KeyOnlyString: yyDollar[1].value.(*String)}
+ yyVAL.value = &ObjectKeyVal{Key: yyDollar[1].token}
}
case 112:
yyDollar = yyS[yypt-1 : yypt+1]
//line parser.go.y:544
{
- }
- case 113:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:545
- {
- }
- case 114:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:546
- {
- }
- case 115:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:550
- {
- yyVAL.value = &ObjectVal{[]*Query{{Term: yyDollar[1].value.(*Term)}}}
+ yyVAL.value = &ObjectKeyVal{KeyString: yyDollar[1].value.(*String)}
}
case 116:
yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:554
+//line parser.go.y:555
{
- yyVAL.value = &ObjectVal{prependQuery(yyDollar[3].value.(*ObjectVal).Queries, &Query{Term: yyDollar[1].value.(*Term)})}
+ yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpPipe, Right: yyDollar[3].value.(*Query)}
}
- case 117:
+ case 118:
yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:560
+//line parser.go.y:562
{
yyVAL.value = &ConstTerm{Object: yyDollar[1].value.(*ConstObject)}
}
- case 118:
+ case 119:
yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:564
+//line parser.go.y:566
{
yyVAL.value = &ConstTerm{Array: yyDollar[1].value.(*ConstArray)}
}
- case 119:
+ case 120:
yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:568
+//line parser.go.y:570
{
yyVAL.value = &ConstTerm{Number: yyDollar[1].token}
}
- case 120:
+ case 121:
yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:572
+//line parser.go.y:574
{
yyVAL.value = &ConstTerm{Str: yyDollar[1].token}
}
- case 121:
+ case 122:
yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:576
+//line parser.go.y:578
{
yyVAL.value = &ConstTerm{Null: true}
}
- case 122:
+ case 123:
yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:580
+//line parser.go.y:582
{
yyVAL.value = &ConstTerm{True: true}
}
- case 123:
+ case 124:
yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:584
+//line parser.go.y:586
{
yyVAL.value = &ConstTerm{False: true}
}
- case 124:
- yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:590
- {
- yyVAL.value = &ConstObject{yyDollar[2].value.([]*ConstObjectKeyVal)}
- }
case 125:
- yyDollar = yyS[yypt-0 : yypt+1]
-//line parser.go.y:596
+ yyDollar = yyS[yypt-2 : yypt+1]
+//line parser.go.y:592
{
- yyVAL.value = []*ConstObjectKeyVal(nil)
+ yyVAL.value = &ConstObject{}
}
case 126:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:600
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:596
{
- yyVAL.value = []*ConstObjectKeyVal{yyDollar[1].value.(*ConstObjectKeyVal)}
+ yyVAL.value = &ConstObject{yyDollar[2].value.([]*ConstObjectKeyVal)}
}
case 127:
- yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:604
+ yyDollar = yyS[yypt-4 : yypt+1]
+//line parser.go.y:600
{
- yyVAL.value = prependConstObjectKeyVal(yyDollar[3].value.([]*ConstObjectKeyVal), yyDollar[1].value.(*ConstObjectKeyVal))
+ yyVAL.value = &ConstObject{yyDollar[2].value.([]*ConstObjectKeyVal)}
}
case 128:
- yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:610
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:606
{
- yyVAL.value = &ConstObjectKeyVal{Key: yyDollar[1].token, Val: yyDollar[3].value.(*ConstTerm)}
+ yyVAL.value = []*ConstObjectKeyVal{yyDollar[1].value.(*ConstObjectKeyVal)}
}
case 129:
yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:614
+//line parser.go.y:610
{
- yyVAL.value = &ConstObjectKeyVal{Key: yyDollar[1].token, Val: yyDollar[3].value.(*ConstTerm)}
+ yyVAL.value = append(yyDollar[1].value.([]*ConstObjectKeyVal), yyDollar[3].value.(*ConstObjectKeyVal))
}
case 130:
yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:618
+//line parser.go.y:616
{
- yyVAL.value = &ConstObjectKeyVal{KeyString: yyDollar[1].token, Val: yyDollar[3].value.(*ConstTerm)}
+ yyVAL.value = &ConstObjectKeyVal{Key: yyDollar[1].token, Val: yyDollar[3].value.(*ConstTerm)}
}
case 131:
- yyDollar = yyS[yypt-2 : yypt+1]
-//line parser.go.y:624
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:620
{
- yyVAL.value = &ConstArray{}
+ yyVAL.value = &ConstObjectKeyVal{Key: yyDollar[1].token, Val: yyDollar[3].value.(*ConstTerm)}
}
case 132:
yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:628
+//line parser.go.y:624
{
- yyVAL.value = &ConstArray{yyDollar[2].value.([]*ConstTerm)}
+ yyVAL.value = &ConstObjectKeyVal{KeyString: yyDollar[1].token, Val: yyDollar[3].value.(*ConstTerm)}
}
case 133:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:634
+ yyDollar = yyS[yypt-2 : yypt+1]
+//line parser.go.y:630
{
- yyVAL.value = []*ConstTerm{yyDollar[1].value.(*ConstTerm)}
+ yyVAL.value = &ConstArray{}
}
case 134:
yyDollar = yyS[yypt-3 : yypt+1]
-//line parser.go.y:638
+//line parser.go.y:634
{
- yyVAL.value = append(yyDollar[1].value.([]*ConstTerm), yyDollar[3].value.(*ConstTerm))
+ yyVAL.value = &ConstArray{yyDollar[2].value.([]*ConstTerm)}
}
case 135:
yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:643
+//line parser.go.y:640
{
+ yyVAL.value = []*ConstTerm{yyDollar[1].value.(*ConstTerm)}
}
case 136:
- yyDollar = yyS[yypt-1 : yypt+1]
+ yyDollar = yyS[yypt-3 : yypt+1]
//line parser.go.y:644
{
- }
- case 137:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:645
- {
- }
- case 138:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:646
- {
- }
- case 139:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:647
- {
- }
- case 140:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:648
- {
- }
- case 141:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:649
- {
- }
- case 142:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:650
- {
- }
- case 143:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:651
- {
- }
- case 144:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:652
- {
- }
- case 145:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:653
- {
- }
- case 146:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:654
- {
- }
- case 147:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:655
- {
- }
- case 148:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:656
- {
- }
- case 149:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:657
- {
- }
- case 150:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:658
- {
- }
- case 151:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:659
- {
- }
- case 152:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:660
- {
- }
- case 153:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:661
- {
- }
- case 154:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:662
- {
- }
- case 155:
- yyDollar = yyS[yypt-1 : yypt+1]
-//line parser.go.y:663
- {
+ yyVAL.value = append(yyDollar[1].value.([]*ConstTerm), yyDollar[3].value.(*ConstTerm))
}
}
goto yystack /* stack new state and value */
diff --git a/vendor/github.com/itchyny/gojq/parser.go.y b/vendor/github.com/itchyny/gojq/parser.go.y
index 7fc3aee36..5481e2114 100644
--- a/vendor/github.com/itchyny/gojq/parser.go.y
+++ b/vendor/github.com/itchyny/gojq/parser.go.y
@@ -1,39 +1,44 @@
%{
package gojq
-// Parse parses a query.
-func Parse(src string) (*Query, error) {
- l := newLexer(src)
- if yyParse(l) > 0 {
- return nil, l.err
+func reverseFuncDef(xs []*FuncDef) []*FuncDef {
+ for i, j := 0, len(xs)-1; i < j; i, j = i+1, j-1 {
+ xs[i], xs[j] = xs[j], xs[i]
}
- return l.result, nil
+ return xs
+}
+
+func prependFuncDef(xs []*FuncDef, x *FuncDef) []*FuncDef {
+ xs = append(xs, nil)
+ copy(xs[1:], xs)
+ xs[0] = x
+ return xs
}
%}
%union {
- value interface{}
+ value any
token string
operator Operator
}
-%type program moduleheader programbody imports import metaopt funcdefs funcdef funcdefargs query
+%type program header imports import meta body funcdefs funcdef funcargs query
%type bindpatterns pattern arraypatterns objectpatterns objectpattern
-%type term string stringparts suffix args ifelifs ifelse trycatch
-%type object objectkeyval objectval
+%type expr term string stringparts suffix args ifelifs ifelse trycatch
+%type objectkeyvals objectkeyval objectval
%type constterm constobject constobjectkeyvals constobjectkeyval constarray constarrayelems
%type tokIdentVariable tokIdentModuleIdent tokVariableModuleVariable tokKeyword objectkey
-%token tokAltOp tokUpdateOp tokDestAltOp tokOrOp tokAndOp tokCompareOp
-%token tokModule tokImport tokInclude tokDef tokAs tokLabel tokBreak
+%token tokAltOp tokUpdateOp tokDestAltOp tokCompareOp
+%token tokOrOp tokAndOp tokModule tokImport tokInclude tokDef tokAs tokLabel tokBreak
%token tokNull tokTrue tokFalse
-%token tokIdent tokVariable tokModuleIdent tokModuleVariable
-%token tokIndex tokNumber tokFormat tokInvalid
-%token tokString tokStringStart tokStringQuery tokStringEnd
%token tokIf tokThen tokElif tokElse tokEnd
%token tokTry tokCatch tokReduce tokForeach
-%token tokRecurse tokFuncDefPost tokTermPost tokEmptyCatch
+%token tokIdent tokVariable tokModuleIdent tokModuleVariable
+%token tokRecurse tokIndex tokNumber tokFormat
+%token tokString tokStringStart tokStringQuery tokStringEnd
+%token tokInvalid tokInvalidEscapeSequence tokUnterminatedString
-%nonassoc tokFuncDefPost tokTermPost tokEmptyCatch
+%nonassoc tokFuncDefQuery tokExpr tokTerm
%right '|'
%left ','
%right tokAltOp
@@ -43,65 +48,63 @@ func Parse(src string) (*Query, error) {
%nonassoc tokCompareOp
%left '+' '-'
%left '*' '/' '%'
-%nonassoc tokAs tokIndex '.' '?'
+%nonassoc tokAs tokIndex '.' '?' tokEmptyCatch
%nonassoc '[' tokTry tokCatch
%%
program
- : moduleheader programbody
+ : header imports body
{
- if $1 != nil { $2.(*Query).Meta = $1.(*ConstObject) }
- yylex.(*lexer).result = $2.(*Query)
+ query := $3.(*Query)
+ query.Meta = $1.(*ConstObject)
+ query.Imports = $2.([]*Import)
+ yylex.(*lexer).result = query
}
-moduleheader
+header
:
{
- $$ = nil
+ $$ = (*ConstObject)(nil)
}
| tokModule constobject ';'
{
$$ = $2;
}
-programbody
- : imports funcdefs
- {
- $$ = &Query{Imports: $1.([]*Import), FuncDefs: $2.([]*FuncDef), Term: &Term{Type: TermTypeIdentity}}
- }
- | imports query
- {
- if $1 != nil { $2.(*Query).Imports = $1.([]*Import) }
- $$ = $2
- }
-
imports
:
{
$$ = []*Import(nil)
}
- | import imports
+ | imports import
{
- $$ = prependImport($2.([]*Import), $1.(*Import))
+ $$ = append($1.([]*Import), $2.(*Import))
}
import
- : tokImport tokString tokAs tokIdentVariable metaopt ';'
+ : tokImport tokString tokAs tokIdentVariable meta ';'
{
$$ = &Import{ImportPath: $2, ImportAlias: $4, Meta: $5.(*ConstObject)}
}
- | tokInclude tokString metaopt ';'
+ | tokInclude tokString meta ';'
{
$$ = &Import{IncludePath: $2, Meta: $3.(*ConstObject)}
}
-metaopt
+meta
:
{
$$ = (*ConstObject)(nil)
}
- | constobject {}
+ | constobject
+
+body
+ : funcdefs
+ {
+ $$ = &Query{FuncDefs: reverseFuncDef($1.([]*FuncDef))}
+ }
+ | query
funcdefs
:
@@ -110,7 +113,7 @@ funcdefs
}
| funcdef funcdefs
{
- $$ = prependFuncDef($2.([]*FuncDef), $1.(*FuncDef))
+ $$ = append($2.([]*FuncDef), $1.(*FuncDef))
}
funcdef
@@ -118,30 +121,31 @@ funcdef
{
$$ = &FuncDef{Name: $2, Body: $4.(*Query)}
}
- | tokDef tokIdent '(' funcdefargs ')' ':' query ';'
+ | tokDef tokIdent '(' funcargs ')' ':' query ';'
{
$$ = &FuncDef{$2, $4.([]string), $7.(*Query)}
}
-funcdefargs
+funcargs
: tokIdentVariable
{
$$ = []string{$1}
}
- | funcdefargs ';' tokIdentVariable
+ | funcargs ';' tokIdentVariable
{
$$ = append($1.([]string), $3)
}
tokIdentVariable
- : tokIdent {}
- | tokVariable {}
+ : tokIdent
+ | tokVariable
query
- : funcdef query %prec tokFuncDefPost
+ : funcdef query %prec tokFuncDefQuery
{
- $2.(*Query).FuncDefs = prependFuncDef($2.(*Query).FuncDefs, $1.(*FuncDef))
- $$ = $2
+ query := $2.(*Query)
+ query.FuncDefs = prependFuncDef(query.FuncDefs, $1.(*FuncDef))
+ $$ = query
}
| query '|' query
{
@@ -149,86 +153,62 @@ query
}
| term tokAs bindpatterns '|' query
{
- $1.(*Term).SuffixList = append($1.(*Term).SuffixList, &Suffix{Bind: &Bind{$3.([]*Pattern), $5.(*Query)}})
- $$ = &Query{Term: $1.(*Term)}
- }
- | tokReduce term tokAs pattern '(' query ';' query ')'
- {
- $$ = &Query{Term: &Term{Type: TermTypeReduce, Reduce: &Reduce{$2.(*Term), $4.(*Pattern), $6.(*Query), $8.(*Query)}}}
- }
- | tokForeach term tokAs pattern '(' query ';' query ')'
- {
- $$ = &Query{Term: &Term{Type: TermTypeForeach, Foreach: &Foreach{$2.(*Term), $4.(*Pattern), $6.(*Query), $8.(*Query), nil}}}
- }
- | tokForeach term tokAs pattern '(' query ';' query ';' query ')'
- {
- $$ = &Query{Term: &Term{Type: TermTypeForeach, Foreach: &Foreach{$2.(*Term), $4.(*Pattern), $6.(*Query), $8.(*Query), $10.(*Query)}}}
- }
- | tokIf query tokThen query ifelifs ifelse tokEnd
- {
- $$ = &Query{Term: &Term{Type: TermTypeIf, If: &If{$2.(*Query), $4.(*Query), $5.([]*IfElif), $6.(*Query)}}}
- }
- | tokTry query trycatch
- {
- $$ = &Query{Term: &Term{Type: TermTypeTry, Try: &Try{$2.(*Query), $3.(*Query)}}}
+ term := $1.(*Term)
+ term.SuffixList = append(term.SuffixList, &Suffix{Bind: &Bind{$3.([]*Pattern), $5.(*Query)}})
+ $$ = &Query{Term: term}
}
| tokLabel tokVariable '|' query
{
$$ = &Query{Term: &Term{Type: TermTypeLabel, Label: &Label{$2, $4.(*Query)}}}
}
- | query '?'
- {
- if t := $1.(*Query).Term; t != nil {
- t.SuffixList = append(t.SuffixList, &Suffix{Optional: true})
- } else {
- $$ = &Query{Term: &Term{Type: TermTypeQuery, Query: $1.(*Query), SuffixList: []*Suffix{{Optional: true}}}}
- }
- }
| query ',' query
{
$$ = &Query{Left: $1.(*Query), Op: OpComma, Right: $3.(*Query)}
}
- | query tokAltOp query
+ | expr %prec tokExpr
+
+expr
+ : expr tokAltOp expr
{
$$ = &Query{Left: $1.(*Query), Op: $2, Right: $3.(*Query)}
}
- | query tokUpdateOp query
+ | expr tokUpdateOp expr
{
$$ = &Query{Left: $1.(*Query), Op: $2, Right: $3.(*Query)}
}
- | query tokOrOp query
+ | expr tokOrOp expr
{
$$ = &Query{Left: $1.(*Query), Op: OpOr, Right: $3.(*Query)}
}
- | query tokAndOp query
+ | expr tokAndOp expr
{
$$ = &Query{Left: $1.(*Query), Op: OpAnd, Right: $3.(*Query)}
}
- | query tokCompareOp query
+ | expr tokCompareOp expr
{
$$ = &Query{Left: $1.(*Query), Op: $2, Right: $3.(*Query)}
}
- | query '+' query
+ | expr '+' expr
{
$$ = &Query{Left: $1.(*Query), Op: OpAdd, Right: $3.(*Query)}
}
- | query '-' query
+ | expr '-' expr
{
$$ = &Query{Left: $1.(*Query), Op: OpSub, Right: $3.(*Query)}
}
- | query '*' query
+ | expr '*' expr
{
$$ = &Query{Left: $1.(*Query), Op: OpMul, Right: $3.(*Query)}
}
- | query '/' query
+ | expr '/' expr
{
$$ = &Query{Left: $1.(*Query), Op: OpDiv, Right: $3.(*Query)}
}
- | query '%' query
+ | expr '%' expr
{
$$ = &Query{Left: $1.(*Query), Op: OpMod, Right: $3.(*Query)}
}
- | term %prec tokTermPost
+ | term %prec tokTerm
{
$$ = &Query{Term: $1.(*Term)}
}
@@ -292,7 +272,7 @@ objectpattern
}
| tokVariable
{
- $$ = &PatternObject{KeyOnly: $1}
+ $$ = &PatternObject{Key: $1}
}
term
@@ -310,10 +290,11 @@ term
}
| '.' suffix
{
- if $2.(*Suffix).Iter {
- $$ = &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{$2.(*Suffix)}}
+ suffix := $2.(*Suffix)
+ if suffix.Iter {
+ $$ = &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{suffix}}
} else {
- $$ = &Term{Type: TermTypeIndex, Index: $2.(*Suffix).Index}
+ $$ = &Term{Type: TermTypeIndex, Index: suffix.Index}
}
}
| '.' string
@@ -344,10 +325,38 @@ term
{
$$ = &Term{Type: TermTypeFunc, Func: &Func{Name: $1}}
}
+ | '{' '}'
+ {
+ $$ = &Term{Type: TermTypeObject, Object: &Object{}}
+ }
+ | '{' objectkeyvals '}'
+ {
+ $$ = &Term{Type: TermTypeObject, Object: &Object{$2.([]*ObjectKeyVal)}}
+ }
+ | '{' objectkeyvals ',' '}'
+ {
+ $$ = &Term{Type: TermTypeObject, Object: &Object{$2.([]*ObjectKeyVal)}}
+ }
+ | '[' ']'
+ {
+ $$ = &Term{Type: TermTypeArray, Array: &Array{}}
+ }
+ | '[' query ']'
+ {
+ $$ = &Term{Type: TermTypeArray, Array: &Array{$2.(*Query)}}
+ }
| tokNumber
{
$$ = &Term{Type: TermTypeNumber, Number: $1}
}
+ | '+' term
+ {
+ $$ = &Term{Type: TermTypeUnary, Unary: &Unary{OpAdd, $2.(*Term)}}
+ }
+ | '-' term
+ {
+ $$ = &Term{Type: TermTypeUnary, Unary: &Unary{OpSub, $2.(*Term)}}
+ }
| tokFormat
{
$$ = &Term{Type: TermTypeFormat, Format: $1}
@@ -360,34 +369,34 @@ term
{
$$ = &Term{Type: TermTypeString, Str: $1.(*String)}
}
- | '(' query ')'
- {
- $$ = &Term{Type: TermTypeQuery, Query: $2.(*Query)}
- }
- | '-' term
+ | tokIf query tokThen query ifelifs ifelse tokEnd
{
- $$ = &Term{Type: TermTypeUnary, Unary: &Unary{OpSub, $2.(*Term)}}
+ $$ = &Term{Type: TermTypeIf, If: &If{$2.(*Query), $4.(*Query), $5.([]*IfElif), $6.(*Query)}}
}
- | '+' term
+ | tokTry expr trycatch
{
- $$ = &Term{Type: TermTypeUnary, Unary: &Unary{OpAdd, $2.(*Term)}}
+ $$ = &Term{Type: TermTypeTry, Try: &Try{$2.(*Query), $3.(*Query)}}
}
- | '{' object '}'
+ | tokReduce expr tokAs pattern '(' query ';' query ')'
{
- $$ = &Term{Type: TermTypeObject, Object: &Object{$2.([]*ObjectKeyVal)}}
+ $$ = &Term{Type: TermTypeReduce, Reduce: &Reduce{$2.(*Query), $4.(*Pattern), $6.(*Query), $8.(*Query)}}
}
- | '[' query ']'
+ | tokForeach expr tokAs pattern '(' query ';' query ')'
{
- $$ = &Term{Type: TermTypeArray, Array: &Array{$2.(*Query)}}
+ $$ = &Term{Type: TermTypeForeach, Foreach: &Foreach{$2.(*Query), $4.(*Pattern), $6.(*Query), $8.(*Query), nil}}
}
- | '[' ']'
+ | tokForeach expr tokAs pattern '(' query ';' query ';' query ')'
{
- $$ = &Term{Type: TermTypeArray, Array: &Array{}}
+ $$ = &Term{Type: TermTypeForeach, Foreach: &Foreach{$2.(*Query), $4.(*Pattern), $6.(*Query), $8.(*Query), $10.(*Query)}}
}
| tokBreak tokVariable
{
$$ = &Term{Type: TermTypeBreak, Break: $2}
}
+ | '(' query ')'
+ {
+ $$ = &Term{Type: TermTypeQuery, Query: $2.(*Query)}
+ }
| term tokIndex
{
$1.(*Term).SuffixList = append($1.(*Term).SuffixList, &Suffix{Index: &Index{Name: $2}})
@@ -435,12 +444,12 @@ stringparts
}
tokIdentModuleIdent
- : tokIdent {}
- | tokModuleIdent {}
+ : tokIdent
+ | tokModuleIdent
tokVariableModuleVariable
- : tokVariable {}
- | tokModuleVariable {}
+ : tokVariable
+ | tokModuleVariable
suffix
: '[' ']'
@@ -457,11 +466,11 @@ suffix
}
| '[' ':' query ']'
{
- $$ = &Suffix{Index: &Index{End: $3.(*Query)}}
+ $$ = &Suffix{Index: &Index{End: $3.(*Query), IsSlice: true}}
}
| '[' query ':' query ']'
{
- $$ = &Suffix{Index: &Index{Start: $2.(*Query), IsSlice: true, End: $4.(*Query)}}
+ $$ = &Suffix{Index: &Index{Start: $2.(*Query), End: $4.(*Query), IsSlice: true}}
}
args
@@ -479,9 +488,9 @@ ifelifs
{
$$ = []*IfElif(nil)
}
- | tokElif query tokThen query ifelifs
+ | ifelifs tokElif query tokThen query
{
- $$ = prependIfElif($5.([]*IfElif), &IfElif{$2.(*Query), $4.(*Query)})
+ $$ = append($1.([]*IfElif), &IfElif{$3.(*Query), $5.(*Query)})
}
ifelse
@@ -499,61 +508,54 @@ trycatch
{
$$ = (*Query)(nil)
}
- | tokCatch query
+ | tokCatch expr
{
$$ = $2
}
-object
- :
- {
- $$ = []*ObjectKeyVal(nil)
- }
- | objectkeyval
+objectkeyvals
+ : objectkeyval
{
$$ = []*ObjectKeyVal{$1.(*ObjectKeyVal)}
}
- | objectkeyval ',' object
+ | objectkeyvals ',' objectkeyval
{
- $$ = prependObjectKeyVal($3.([]*ObjectKeyVal), $1.(*ObjectKeyVal))
+ $$ = append($1.([]*ObjectKeyVal), $3.(*ObjectKeyVal))
}
objectkeyval
: objectkey ':' objectval
{
- $$ = &ObjectKeyVal{Key: $1, Val: $3.(*ObjectVal)}
+ $$ = &ObjectKeyVal{Key: $1, Val: $3.(*Query)}
}
| string ':' objectval
{
- $$ = &ObjectKeyVal{KeyString: $1.(*String), Val: $3.(*ObjectVal)}
+ $$ = &ObjectKeyVal{KeyString: $1.(*String), Val: $3.(*Query)}
}
| '(' query ')' ':' objectval
{
- $$ = &ObjectKeyVal{KeyQuery: $2.(*Query), Val: $5.(*ObjectVal)}
+ $$ = &ObjectKeyVal{KeyQuery: $2.(*Query), Val: $5.(*Query)}
}
| objectkey
{
- $$ = &ObjectKeyVal{KeyOnly: $1}
+ $$ = &ObjectKeyVal{Key: $1}
}
| string
{
- $$ = &ObjectKeyVal{KeyOnlyString: $1.(*String)}
+ $$ = &ObjectKeyVal{KeyString: $1.(*String)}
}
objectkey
- : tokIdent {}
- | tokVariable {}
- | tokKeyword {}
+ : tokIdent
+ | tokVariable
+ | tokKeyword
objectval
- : term
- {
- $$ = &ObjectVal{[]*Query{{Term: $1.(*Term)}}}
- }
- | term '|' objectval
+ : objectval '|' objectval
{
- $$ = &ObjectVal{prependQuery($3.(*ObjectVal).Queries, &Query{Term: $1.(*Term)})}
+ $$ = &Query{Left: $1.(*Query), Op: OpPipe, Right: $3.(*Query)}
}
+ | expr
constterm
: constobject
@@ -586,23 +588,27 @@ constterm
}
constobject
- : '{' constobjectkeyvals '}'
+ : '{' '}'
+ {
+ $$ = &ConstObject{}
+ }
+ | '{' constobjectkeyvals '}'
{
$$ = &ConstObject{$2.([]*ConstObjectKeyVal)}
}
-
-constobjectkeyvals
- :
+ | '{' constobjectkeyvals ',' '}'
{
- $$ = []*ConstObjectKeyVal(nil)
+ $$ = &ConstObject{$2.([]*ConstObjectKeyVal)}
}
- | constobjectkeyval
+
+constobjectkeyvals
+ : constobjectkeyval
{
$$ = []*ConstObjectKeyVal{$1.(*ConstObjectKeyVal)}
}
- | constobjectkeyval ',' constobjectkeyvals
+ | constobjectkeyvals ',' constobjectkeyval
{
- $$ = prependConstObjectKeyVal($3.([]*ConstObjectKeyVal), $1.(*ConstObjectKeyVal))
+ $$ = append($1.([]*ConstObjectKeyVal), $3.(*ConstObjectKeyVal))
}
constobjectkeyval
@@ -640,26 +646,26 @@ constarrayelems
}
tokKeyword
- : tokOrOp {}
- | tokAndOp {}
- | tokModule {}
- | tokImport {}
- | tokInclude {}
- | tokDef {}
- | tokAs {}
- | tokLabel {}
- | tokBreak {}
- | tokNull {}
- | tokTrue {}
- | tokFalse {}
- | tokIf {}
- | tokThen {}
- | tokElif {}
- | tokElse {}
- | tokEnd {}
- | tokTry {}
- | tokCatch {}
- | tokReduce {}
- | tokForeach {}
+ : tokOrOp
+ | tokAndOp
+ | tokModule
+ | tokImport
+ | tokInclude
+ | tokDef
+ | tokAs
+ | tokLabel
+ | tokBreak
+ | tokNull
+ | tokTrue
+ | tokFalse
+ | tokIf
+ | tokThen
+ | tokElif
+ | tokElse
+ | tokEnd
+ | tokTry
+ | tokCatch
+ | tokReduce
+ | tokForeach
%%
diff --git a/vendor/github.com/itchyny/gojq/prepend.go b/vendor/github.com/itchyny/gojq/prepend.go
deleted file mode 100644
index 6b57bbf84..000000000
--- a/vendor/github.com/itchyny/gojq/prepend.go
+++ /dev/null
@@ -1,43 +0,0 @@
-package gojq
-
-func prependQuery(xs []*Query, x *Query) []*Query {
- xs = append(xs, nil)
- copy(xs[1:], xs)
- xs[0] = x
- return xs
-}
-
-func prependImport(xs []*Import, x *Import) []*Import {
- xs = append(xs, nil)
- copy(xs[1:], xs)
- xs[0] = x
- return xs
-}
-
-func prependFuncDef(xs []*FuncDef, x *FuncDef) []*FuncDef {
- xs = append(xs, nil)
- copy(xs[1:], xs)
- xs[0] = x
- return xs
-}
-
-func prependIfElif(xs []*IfElif, x *IfElif) []*IfElif {
- xs = append(xs, nil)
- copy(xs[1:], xs)
- xs[0] = x
- return xs
-}
-
-func prependObjectKeyVal(xs []*ObjectKeyVal, x *ObjectKeyVal) []*ObjectKeyVal {
- xs = append(xs, nil)
- copy(xs[1:], xs)
- xs[0] = x
- return xs
-}
-
-func prependConstObjectKeyVal(xs []*ConstObjectKeyVal, x *ConstObjectKeyVal) []*ConstObjectKeyVal {
- xs = append(xs, nil)
- copy(xs[1:], xs)
- xs[0] = x
- return xs
-}
diff --git a/vendor/github.com/itchyny/gojq/preview.go b/vendor/github.com/itchyny/gojq/preview.go
new file mode 100644
index 000000000..e082eb561
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/preview.go
@@ -0,0 +1,77 @@
+package gojq
+
+import "unicode/utf8"
+
+// Preview returns the preview string of v. The preview string is basically the
+// same as the jq-flavored JSON encoding returned by [Marshal], but is truncated
+// by 30 bytes, and more efficient than truncating the result of [Marshal].
+//
+// This method is used by error messages of built-in operators and functions,
+// and accepts only limited types (nil, bool, int, float64, *big.Int, string,
+// []any, and map[string]any). Note that the maximum width and trailing strings
+// on truncation may be changed in the future.
+func Preview(v any) string {
+ bs := jsonLimitedMarshal(v, 32)
+ if l := 30; len(bs) > l {
+ var trailing string
+ switch v.(type) {
+ case string:
+ trailing = ` ..."`
+ case []any:
+ trailing = " ...]"
+ case map[string]any:
+ trailing = " ...}"
+ default:
+ trailing = " ..."
+ }
+ for len(bs) > l-len(trailing) {
+ _, size := utf8.DecodeLastRune(bs)
+ bs = bs[:len(bs)-size]
+ }
+ bs = append(bs, trailing...)
+ }
+ return string(bs)
+}
+
+func jsonLimitedMarshal(v any, n int) (bs []byte) {
+ w := &limitedWriter{buf: make([]byte, n)}
+ defer func() {
+ _ = recover()
+ bs = w.Bytes()
+ }()
+ (&encoder{w: w}).encode(v)
+ return
+}
+
+type limitedWriter struct {
+ buf []byte
+ off int
+}
+
+func (w *limitedWriter) Write(bs []byte) (int, error) {
+ n := copy(w.buf[w.off:], bs)
+ if w.off += n; w.off == len(w.buf) {
+ panic(struct{}{})
+ }
+ return n, nil
+}
+
+func (w *limitedWriter) WriteByte(b byte) error {
+ w.buf[w.off] = b
+ if w.off++; w.off == len(w.buf) {
+ panic(struct{}{})
+ }
+ return nil
+}
+
+func (w *limitedWriter) WriteString(s string) (int, error) {
+ n := copy(w.buf[w.off:], s)
+ if w.off += n; w.off == len(w.buf) {
+ panic(struct{}{})
+ }
+ return n, nil
+}
+
+func (w *limitedWriter) Bytes() []byte {
+ return w.buf[:w.off]
+}
diff --git a/vendor/github.com/itchyny/gojq/query.go b/vendor/github.com/itchyny/gojq/query.go
index 4e2ef811b..e7cf77890 100644
--- a/vendor/github.com/itchyny/gojq/query.go
+++ b/vendor/github.com/itchyny/gojq/query.go
@@ -2,11 +2,23 @@ package gojq
import (
"context"
- "encoding/json"
- "strconv"
"strings"
)
+// Parse a query string, and returns the query struct.
+//
+// If parsing failed, it returns an error of type [*ParseError], which has
+// the byte offset and the invalid token. The byte offset is the scanned bytes
+// when the error occurred. The token is empty if the error occurred after
+// scanning the entire query string.
+func Parse(src string) (*Query, error) {
+ l := newLexer(src)
+ if yyParse(l) > 0 {
+ return nil, l.err
+ }
+ return l.result, nil
+}
+
// Query represents the abstract syntax tree of a jq query.
type Query struct {
Meta *ConstObject
@@ -21,16 +33,17 @@ type Query struct {
// Run the query.
//
-// It is safe to call this method of a *Query in multiple goroutines.
-func (e *Query) Run(v interface{}) Iter {
- return e.RunWithContext(nil, v)
+// It is safe to call this method in goroutines, to reuse a parsed [*Query].
+// But for arguments, do not give values sharing same data between goroutines.
+func (e *Query) Run(v any) Iter {
+ return e.RunWithContext(context.Background(), v)
}
// RunWithContext runs the query with context.
-func (e *Query) RunWithContext(ctx context.Context, v interface{}) Iter {
+func (e *Query) RunWithContext(ctx context.Context, v any) Iter {
code, err := Compile(e)
if err != nil {
- return unitIterator(err)
+ return NewIter(err)
}
return code.RunWithContext(ctx, v)
}
@@ -50,13 +63,8 @@ func (e *Query) writeTo(s *strings.Builder) {
for _, im := range e.Imports {
im.writeTo(s)
}
- for i, fd := range e.FuncDefs {
- if i > 0 {
- s.WriteByte(' ')
- }
+ for _, fd := range e.FuncDefs {
fd.writeTo(s)
- }
- if len(e.FuncDefs) > 0 {
s.WriteByte(' ')
}
if e.Func != "" {
@@ -93,18 +101,18 @@ func (e *Query) minify() {
}
}
-func (e *Query) toIndices() []interface{} {
- if e.FuncDefs != nil || e.Right != nil || e.Term == nil {
+func (e *Query) toIndexKey() any {
+ if e.Term == nil {
return nil
}
- return e.Term.toIndices()
+ return e.Term.toIndexKey()
}
-func (e *Query) countCommaQueries() int {
- if e.Op == OpComma {
- return e.Left.countCommaQueries() + e.Right.countCommaQueries()
+func (e *Query) toIndices(xs []any) []any {
+ if e.Term == nil {
+ return nil
}
- return 1
+ return e.Term.toIndices(xs)
}
// Import ...
@@ -124,12 +132,12 @@ func (e *Import) String() string {
func (e *Import) writeTo(s *strings.Builder) {
if e.ImportPath != "" {
s.WriteString("import ")
- s.WriteString(strconv.Quote(e.ImportPath))
+ jsonEncodeString(s, e.ImportPath)
s.WriteString(" as ")
s.WriteString(e.ImportAlias)
} else {
s.WriteString("include ")
- s.WriteString(strconv.Quote(e.IncludePath))
+ jsonEncodeString(s, e.IncludePath)
}
if e.Meta != nil {
s.WriteByte(' ')
@@ -315,25 +323,48 @@ func (e *Term) toFunc() string {
}
}
-func (e *Term) toIndices() []interface{} {
- if e.Index != nil {
- xs := e.Index.toIndices()
- if xs == nil {
+func (e *Term) toIndexKey() any {
+ switch e.Type {
+ case TermTypeNumber:
+ return toNumber(e.Number)
+ case TermTypeUnary:
+ return e.Unary.toNumber()
+ case TermTypeString:
+ if e.Str.Queries == nil {
+ return e.Str.Str
+ }
+ return nil
+ default:
+ return nil
+ }
+}
+
+func (e *Term) toIndices(xs []any) []any {
+ switch e.Type {
+ case TermTypeIndex:
+ if xs = e.Index.toIndices(xs); xs == nil {
return nil
}
- for _, s := range e.SuffixList {
- x := s.toIndices()
- if x == nil {
- return nil
- }
- xs = append(xs, x...)
+ case TermTypeQuery:
+ if xs = e.Query.toIndices(xs); xs == nil {
+ return nil
}
- return xs
- } else if e.Query != nil && len(e.SuffixList) == 0 {
- return e.Query.toIndices()
- } else {
+ default:
return nil
}
+ for _, s := range e.SuffixList {
+ if xs = s.toIndices(xs); xs == nil {
+ return nil
+ }
+ }
+ return xs
+}
+
+func (e *Term) toNumber() any {
+ if e.Type == TermTypeNumber {
+ return toNumber(e.Number)
+ }
+ return nil
}
// Unary ...
@@ -357,6 +388,14 @@ func (e *Unary) minify() {
e.Term.minify()
}
+func (e *Unary) toNumber() any {
+ v := e.Term.toNumber()
+ if v != nil && e.Op == OpSub {
+ v = funcOpNegate(v)
+ }
+ return v
+}
+
// Pattern ...
type Pattern struct {
Name string
@@ -400,7 +439,6 @@ type PatternObject struct {
KeyString *String
KeyQuery *Query
Val *Pattern
- KeyOnly string
}
func (e *PatternObject) String() string {
@@ -423,9 +461,6 @@ func (e *PatternObject) writeTo(s *strings.Builder) {
s.WriteString(": ")
e.Val.writeTo(s)
}
- if e.KeyOnly != "" {
- s.WriteString(e.KeyOnly)
- }
}
// Index ...
@@ -433,8 +468,8 @@ type Index struct {
Name string
Str *String
Start *Query
- IsSlice bool
End *Query
+ IsSlice bool
}
func (e *Index) String() string {
@@ -457,25 +492,22 @@ func (e *Index) writeTo(s *strings.Builder) {
func (e *Index) writeSuffixTo(s *strings.Builder) {
if e.Name != "" {
s.WriteString(e.Name)
+ } else if e.Str != nil {
+ e.Str.writeTo(s)
} else {
- if e.Str != nil {
- e.Str.writeTo(s)
- } else {
- s.WriteByte('[')
+ s.WriteByte('[')
+ if e.IsSlice {
if e.Start != nil {
e.Start.writeTo(s)
- if e.IsSlice {
- s.WriteByte(':')
- if e.End != nil {
- e.End.writeTo(s)
- }
- }
- } else if e.End != nil {
- s.WriteByte(':')
+ }
+ s.WriteByte(':')
+ if e.End != nil {
e.End.writeTo(s)
}
- s.WriteByte(']')
+ } else {
+ e.Start.writeTo(s)
}
+ s.WriteByte(']')
}
}
@@ -491,11 +523,38 @@ func (e *Index) minify() {
}
}
-func (e *Index) toIndices() []interface{} {
- if e.Name == "" {
- return nil
+func (e *Index) toIndexKey() any {
+ if e.Name != "" {
+ return e.Name
+ } else if e.Str != nil {
+ if e.Str.Queries == nil {
+ return e.Str.Str
+ }
+ } else if !e.IsSlice {
+ return e.Start.toIndexKey()
+ } else {
+ var start, end any
+ ok := true
+ if e.Start != nil {
+ start = e.Start.toIndexKey()
+ ok = start != nil
+ }
+ if e.End != nil && ok {
+ end = e.End.toIndexKey()
+ ok = end != nil
+ }
+ if ok {
+ return map[string]any{"start": start, "end": end}
+ }
}
- return []interface{}{e.Name}
+ return nil
+}
+
+func (e *Index) toIndices(xs []any) []any {
+ if k := e.toIndexKey(); k != nil {
+ return append(xs, k)
+ }
+ return nil
}
// Func ...
@@ -551,7 +610,7 @@ func (e *String) String() string {
func (e *String) writeTo(s *strings.Builder) {
if e.Queries == nil {
- s.WriteString(strconv.Quote(e.Str))
+ jsonEncodeString(s, e.Str)
return
}
s.WriteByte('"')
@@ -607,12 +666,10 @@ func (e *Object) minify() {
// ObjectKeyVal ...
type ObjectKeyVal struct {
- Key string
- KeyString *String
- KeyQuery *Query
- Val *ObjectVal
- KeyOnly string
- KeyOnlyString *String
+ Key string
+ KeyString *String
+ KeyQuery *Query
+ Val *Query
}
func (e *ObjectKeyVal) String() string {
@@ -635,11 +692,6 @@ func (e *ObjectKeyVal) writeTo(s *strings.Builder) {
s.WriteString(": ")
e.Val.writeTo(s)
}
- if e.KeyOnly != "" {
- s.WriteString(e.KeyOnly)
- } else if e.KeyOnlyString != nil {
- e.KeyOnlyString.writeTo(s)
- }
}
func (e *ObjectKeyVal) minify() {
@@ -651,35 +703,6 @@ func (e *ObjectKeyVal) minify() {
if e.Val != nil {
e.Val.minify()
}
- if e.KeyOnlyString != nil {
- e.KeyOnlyString.minify()
- }
-}
-
-// ObjectVal ...
-type ObjectVal struct {
- Queries []*Query
-}
-
-func (e *ObjectVal) String() string {
- var s strings.Builder
- e.writeTo(&s)
- return s.String()
-}
-
-func (e *ObjectVal) writeTo(s *strings.Builder) {
- for i, e := range e.Queries {
- if i > 0 {
- s.WriteString(" | ")
- }
- e.writeTo(s)
- }
-}
-
-func (e *ObjectVal) minify() {
- for _, e := range e.Queries {
- e.minify()
- }
}
// Array ...
@@ -745,21 +768,21 @@ func (e *Suffix) minify() {
}
}
-func (e *Suffix) toTerm() (*Term, bool) {
+func (e *Suffix) toTerm() *Term {
if e.Index != nil {
- return &Term{Type: TermTypeIndex, Index: e.Index}, true
+ return &Term{Type: TermTypeIndex, Index: e.Index}
} else if e.Iter {
- return &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{{Iter: true}}}, true
+ return &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{{Iter: true}}}
} else {
- return nil, false
+ return nil
}
}
-func (e *Suffix) toIndices() []interface{} {
+func (e *Suffix) toIndices(xs []any) []any {
if e.Index == nil {
return nil
}
- return e.Index.toIndices()
+ return e.Index.toIndices(xs)
}
// Bind ...
@@ -889,7 +912,7 @@ func (e *Try) minify() {
// Reduce ...
type Reduce struct {
- Term *Term
+ Query *Query
Pattern *Pattern
Start *Query
Update *Query
@@ -903,7 +926,7 @@ func (e *Reduce) String() string {
func (e *Reduce) writeTo(s *strings.Builder) {
s.WriteString("reduce ")
- e.Term.writeTo(s)
+ e.Query.writeTo(s)
s.WriteString(" as ")
e.Pattern.writeTo(s)
s.WriteString(" (")
@@ -914,14 +937,14 @@ func (e *Reduce) writeTo(s *strings.Builder) {
}
func (e *Reduce) minify() {
- e.Term.minify()
+ e.Query.minify()
e.Start.minify()
e.Update.minify()
}
// Foreach ...
type Foreach struct {
- Term *Term
+ Query *Query
Pattern *Pattern
Start *Query
Update *Query
@@ -936,7 +959,7 @@ func (e *Foreach) String() string {
func (e *Foreach) writeTo(s *strings.Builder) {
s.WriteString("foreach ")
- e.Term.writeTo(s)
+ e.Query.writeTo(s)
s.WriteString(" as ")
e.Pattern.writeTo(s)
s.WriteString(" (")
@@ -951,7 +974,7 @@ func (e *Foreach) writeTo(s *strings.Builder) {
}
func (e *Foreach) minify() {
- e.Term.minify()
+ e.Query.minify()
e.Start.minify()
e.Update.minify()
if e.Extract != nil {
@@ -1006,24 +1029,24 @@ func (e *ConstTerm) writeTo(s *strings.Builder) {
e.Array.writeTo(s)
} else if e.Number != "" {
s.WriteString(e.Number)
- } else if e.Str != "" {
- s.WriteString(strconv.Quote(e.Str))
} else if e.Null {
s.WriteString("null")
} else if e.True {
s.WriteString("true")
} else if e.False {
s.WriteString("false")
+ } else {
+ jsonEncodeString(s, e.Str)
}
}
-func (e *ConstTerm) toValue() interface{} {
+func (e *ConstTerm) toValue() any {
if e.Object != nil {
return e.Object.ToValue()
} else if e.Array != nil {
return e.Array.toValue()
} else if e.Number != "" {
- return normalizeNumbers(json.Number(e.Number))
+ return toNumber(e.Number)
} else if e.Null {
return nil
} else if e.True {
@@ -1035,6 +1058,14 @@ func (e *ConstTerm) toValue() interface{} {
}
}
+func (e *ConstTerm) toString() (string, bool) {
+ if e.Object != nil || e.Array != nil ||
+ e.Number != "" || e.Null || e.True || e.False {
+ return "", false
+ }
+ return e.Str, true
+}
+
// ConstObject ...
type ConstObject struct {
KeyVals []*ConstObjectKeyVal
@@ -1061,12 +1092,12 @@ func (e *ConstObject) writeTo(s *strings.Builder) {
s.WriteString(" }")
}
-// ToValue converts the object to map[string]interface{}.
-func (e *ConstObject) ToValue() map[string]interface{} {
+// ToValue converts the object to map[string]any.
+func (e *ConstObject) ToValue() map[string]any {
if e == nil {
return nil
}
- v := make(map[string]interface{}, len(e.KeyVals))
+ v := make(map[string]any, len(e.KeyVals))
for _, e := range e.KeyVals {
key := e.Key
if key == "" {
@@ -1094,7 +1125,7 @@ func (e *ConstObjectKeyVal) writeTo(s *strings.Builder) {
if e.Key != "" {
s.WriteString(e.Key)
} else {
- s.WriteString(e.KeyString)
+ jsonEncodeString(s, e.KeyString)
}
s.WriteString(": ")
e.Val.writeTo(s)
@@ -1122,8 +1153,8 @@ func (e *ConstArray) writeTo(s *strings.Builder) {
s.WriteByte(']')
}
-func (e *ConstArray) toValue() []interface{} {
- v := make([]interface{}, len(e.Elems))
+func (e *ConstArray) toValue() []any {
+ v := make([]any, len(e.Elems))
for i, e := range e.Elems {
v[i] = e.toValue()
}
diff --git a/vendor/github.com/itchyny/gojq/release.go b/vendor/github.com/itchyny/gojq/release.go
index 7f39519b6..07fc71676 100644
--- a/vendor/github.com/itchyny/gojq/release.go
+++ b/vendor/github.com/itchyny/gojq/release.go
@@ -1,13 +1,16 @@
-// +build !debug
+//go:build !gojq_debug
+// +build !gojq_debug
package gojq
-func (c *compiler) appendCodeInfo(interface{}) {}
+type codeinfo struct{}
-func (c *compiler) deleteCodeInfo(string) {}
+func (*compiler) appendCodeInfo(any) {}
-func (env *env) debugCodes() {}
+func (*compiler) deleteCodeInfo(string) {}
-func (env *env) debugState(int, bool) {}
+func (*env) debugCodes() {}
-func (env *env) debugForks(int, string) {}
+func (*env) debugState(int, bool) {}
+
+func (*env) debugForks(int, string) {}
diff --git a/vendor/github.com/itchyny/gojq/scope_stack.go b/vendor/github.com/itchyny/gojq/scope_stack.go
new file mode 100644
index 000000000..e140ca15b
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/scope_stack.go
@@ -0,0 +1,52 @@
+package gojq
+
+type scopeStack struct {
+ data []scopeBlock
+ index int
+ limit int
+}
+
+type scopeBlock struct {
+ value scope
+ next int
+}
+
+func newScopeStack() *scopeStack {
+ return &scopeStack{index: -1, limit: -1}
+}
+
+func (s *scopeStack) push(v scope) {
+ b := scopeBlock{v, s.index}
+ i := s.index + 1
+ if i <= s.limit {
+ i = s.limit + 1
+ }
+ s.index = i
+ if i < len(s.data) {
+ s.data[i] = b
+ } else {
+ s.data = append(s.data, b)
+ }
+}
+
+func (s *scopeStack) pop() scope {
+ b := s.data[s.index]
+ s.index = b.next
+ return b.value
+}
+
+func (s *scopeStack) empty() bool {
+ return s.index < 0
+}
+
+func (s *scopeStack) save() (index, limit int) {
+ index, limit = s.index, s.limit
+ if s.index > s.limit {
+ s.limit = s.index
+ }
+ return
+}
+
+func (s *scopeStack) restore(index, limit int) {
+ s.index, s.limit = index, limit
+}
diff --git a/vendor/github.com/itchyny/gojq/stack.go b/vendor/github.com/itchyny/gojq/stack.go
index fc98dfee0..a0e265c8c 100644
--- a/vendor/github.com/itchyny/gojq/stack.go
+++ b/vendor/github.com/itchyny/gojq/stack.go
@@ -7,7 +7,7 @@ type stack struct {
}
type block struct {
- value interface{}
+ value any
next int
}
@@ -15,7 +15,7 @@ func newStack() *stack {
return &stack{index: -1, limit: -1}
}
-func (s *stack) push(v interface{}) {
+func (s *stack) push(v any) {
b := block{v, s.index}
i := s.index + 1
if i <= s.limit {
@@ -29,13 +29,13 @@ func (s *stack) push(v interface{}) {
}
}
-func (s *stack) pop() interface{} {
+func (s *stack) pop() any {
b := s.data[s.index]
s.index = b.next
return b.value
}
-func (s *stack) top() interface{} {
+func (s *stack) top() any {
return s.data[s.index].value
}
@@ -43,20 +43,12 @@ func (s *stack) empty() bool {
return s.index < 0
}
-func (s *stack) lookup(f func(v interface{}) bool) interface{} {
- for i := s.index; i >= 0; i = s.data[i].next {
- if f(s.data[i].value) {
- return s.data[i].value
- }
- }
- panic("stack.lookup")
-}
-
-func (s *stack) save(index, limit *int) {
- if s.index >= s.limit {
+func (s *stack) save() (index, limit int) {
+ index, limit = s.index, s.limit
+ if s.index > s.limit {
s.limit = s.index
}
- *index, *limit = s.index, s.limit
+ return
}
func (s *stack) restore(index, limit int) {
diff --git a/vendor/github.com/itchyny/gojq/term_type.go b/vendor/github.com/itchyny/gojq/term_type.go
index 1670948c8..941e7ba9c 100644
--- a/vendor/github.com/itchyny/gojq/term_type.go
+++ b/vendor/github.com/itchyny/gojq/term_type.go
@@ -1,6 +1,6 @@
package gojq
-// TermType represents the type of Term.
+// TermType represents the type of [Term].
type TermType int
// TermType list.
@@ -27,7 +27,7 @@ const (
TermTypeQuery
)
-// GoString implements GoStringer.
+// GoString implements [fmt.GoStringer].
func (termType TermType) GoString() (str string) {
defer func() { str = "gojq." + str }()
switch termType {
diff --git a/vendor/github.com/itchyny/gojq/type.go b/vendor/github.com/itchyny/gojq/type.go
new file mode 100644
index 000000000..bb388e20e
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/type.go
@@ -0,0 +1,29 @@
+package gojq
+
+import (
+ "fmt"
+ "math/big"
+)
+
+// TypeOf returns the jq-flavored type name of v.
+//
+// This method is used by built-in type/0 function, and accepts only limited
+// types (nil, bool, int, float64, *big.Int, string, []any, and map[string]any).
+func TypeOf(v any) string {
+ switch v.(type) {
+ case nil:
+ return "null"
+ case bool:
+ return "boolean"
+ case int, float64, *big.Int:
+ return "number"
+ case string:
+ return "string"
+ case []any:
+ return "array"
+ case map[string]any:
+ return "object"
+ default:
+ panic(fmt.Sprintf("invalid type: %[1]T (%[1]v)", v))
+ }
+}
diff --git a/vendor/github.com/itchyny/timefmt-go/CHANGELOG.md b/vendor/github.com/itchyny/timefmt-go/CHANGELOG.md
index ca51eb865..d863ac3be 100644
--- a/vendor/github.com/itchyny/timefmt-go/CHANGELOG.md
+++ b/vendor/github.com/itchyny/timefmt-go/CHANGELOG.md
@@ -1,4 +1,19 @@
# Changelog
+## [v0.1.6](https://github.com/itchyny/timefmt-go/compare/v0.1.5..v0.1.6) (2024-06-01)
+* support parsing week directives (`%A`, `%a`, `%w`, `%u`, `%V`, `%U`, `%W`)
+* validate range of values on parsing directives
+* fix formatting `%l` to show `12` at midnight
+
+## [v0.1.5](https://github.com/itchyny/timefmt-go/compare/v0.1.4..v0.1.5) (2022-12-01)
+* support parsing time zone offset with name using both `%z` and `%Z`
+
+## [v0.1.4](https://github.com/itchyny/timefmt-go/compare/v0.1.3..v0.1.4) (2022-09-01)
+* improve documents
+* drop support for Go 1.16
+
+## [v0.1.3](https://github.com/itchyny/timefmt-go/compare/v0.1.2..v0.1.3) (2021-04-14)
+* implement `ParseInLocation` for configuring the default location
+
## [v0.1.2](https://github.com/itchyny/timefmt-go/compare/v0.1.1..v0.1.2) (2021-02-22)
* implement parsing/formatting time zone offset with colons (`%:z`, `%::z`, `%:::z`)
* recognize `Z` as UTC on parsing time zone offset (`%z`)
diff --git a/vendor/github.com/itchyny/timefmt-go/LICENSE b/vendor/github.com/itchyny/timefmt-go/LICENSE
index 4d650fa85..84d6cb033 100644
--- a/vendor/github.com/itchyny/timefmt-go/LICENSE
+++ b/vendor/github.com/itchyny/timefmt-go/LICENSE
@@ -1,6 +1,6 @@
The MIT License (MIT)
-Copyright (c) 2020,2021 itchyny
+Copyright (c) 2020-2022 itchyny
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
diff --git a/vendor/github.com/itchyny/timefmt-go/Makefile b/vendor/github.com/itchyny/timefmt-go/Makefile
index bacef7bf3..a87cb2864 100644
--- a/vendor/github.com/itchyny/timefmt-go/Makefile
+++ b/vendor/github.com/itchyny/timefmt-go/Makefile
@@ -1,20 +1,19 @@
GOBIN ?= $(shell go env GOPATH)/bin
-export GO111MODULE=on
.PHONY: all
all: test
.PHONY: test
test:
- go test -v ./...
+ go test -v -race ./...
.PHONY: lint
-lint: $(GOBIN)/golint
+lint: $(GOBIN)/staticcheck
go vet ./...
- golint -set_exit_status ./...
+ staticcheck -checks all,-ST1000 ./...
-$(GOBIN)/golint:
- cd && go get golang.org/x/lint/golint
+$(GOBIN)/staticcheck:
+ go install honnef.co/go/tools/cmd/staticcheck@latest
.PHONY: clean
clean:
diff --git a/vendor/github.com/itchyny/timefmt-go/README.md b/vendor/github.com/itchyny/timefmt-go/README.md
index 1fcc08542..9c028c746 100644
--- a/vendor/github.com/itchyny/timefmt-go/README.md
+++ b/vendor/github.com/itchyny/timefmt-go/README.md
@@ -1,7 +1,7 @@
# timefmt-go
-[![CI Status](https://github.com/itchyny/timefmt-go/workflows/CI/badge.svg)](https://github.com/itchyny/timefmt-go/actions)
+[![CI Status](https://github.com/itchyny/timefmt-go/actions/workflows/ci.yaml/badge.svg?branch=main)](https://github.com/itchyny/timefmt-go/actions?query=branch:main)
[![Go Report Card](https://goreportcard.com/badge/github.com/itchyny/timefmt-go)](https://goreportcard.com/report/github.com/itchyny/timefmt-go)
-[![MIT License](http://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/itchyny/timefmt-go/blob/main/LICENSE)
+[![MIT License](https://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/itchyny/timefmt-go/blob/main/LICENSE)
[![release](https://img.shields.io/github/release/itchyny/timefmt-go/all.svg)](https://github.com/itchyny/timefmt-go/releases)
[![pkg.go.dev](https://pkg.go.dev/badge/github.com/itchyny/timefmt-go)](https://pkg.go.dev/github.com/itchyny/timefmt-go)
@@ -35,14 +35,15 @@ func main() {
Please refer to [`man 3 strftime`](https://linux.die.net/man/3/strftime) and
[`man 3 strptime`](https://linux.die.net/man/3/strptime) for formatters.
+As an extension, `%f` directive is supported for zero-padded microseconds, which originates from Python.
Note that `E` and `O` modifier characters are not supported.
## Comparison to other libraries
- This library
- provides both formatting and parsing functions in pure Go language,
- - depends only on the Go standard libraries not to grows up the module file.
+ - depends only on the Go standard libraries not to grow up dependency.
- `Format` (`strftime`) implements glibc extensions including
- - width specifier like `%10A, %10B %2k:%M`,
+ - width specifier like `%6Y %10B %4Z` (limited to 1024 bytes),
- omitting padding modifier like `%-y-%-m-%-d`,
- space padding modifier like `%_y-%_m-%_d`,
- upper case modifier like `%^a %^b`,
@@ -53,7 +54,8 @@ Note that `E` and `O` modifier characters are not supported.
- `Parse` (`strptime`) allows to parse
- composed directives like `%F %T`,
- century years like `%C %y`,
- - week names like `%A` `%a` (parsed results are discarded).
+ - week directives like `%W %a` and `%G-W%V-%u`.
+- `ParseInLocation` is provided for configuring the default location.
![](https://user-images.githubusercontent.com/375258/88606920-de475c80-d0b8-11ea-8d40-cbfee9e35c2e.jpg)
@@ -61,7 +63,7 @@ Note that `E` and `O` modifier characters are not supported.
Report bug at [Issues・itchyny/timefmt-go - GitHub](https://github.com/itchyny/timefmt-go/issues).
## Author
-itchyny (https://github.com/itchyny)
+itchyny ()
## License
This software is released under the MIT License, see LICENSE.
diff --git a/vendor/github.com/itchyny/timefmt-go/format.go b/vendor/github.com/itchyny/timefmt-go/format.go
index 1d9b36724..b38202dff 100644
--- a/vendor/github.com/itchyny/timefmt-go/format.go
+++ b/vendor/github.com/itchyny/timefmt-go/format.go
@@ -1,6 +1,7 @@
package timefmt
import (
+ "math"
"strconv"
"time"
)
@@ -10,8 +11,7 @@ func Format(t time.Time, format string) string {
return string(AppendFormat(make([]byte, 0, 64), t, format))
}
-// AppendFormat appends formatted time to the bytes.
-// You can use this method to reduce allocations.
+// AppendFormat appends formatted time string to the buffer.
func AppendFormat(buf []byte, t time.Time, format string) []byte {
year, month, day := t.Date()
hour, min, sec := t.Clock()
@@ -74,7 +74,7 @@ func AppendFormat(buf []byte, t time.Time, format string) []byte {
b = format[i]
if b <= '9' && '0' <= b {
width = width*10 + int(b&0x0F)
- if width >= int((^uint(0)>>1)/10) {
+ if width >= math.MaxInt/10 {
width = maxWidth
}
} else {
@@ -132,19 +132,13 @@ func AppendFormat(buf []byte, t time.Time, format string) []byte {
case 'a':
buf = appendString(buf, shortWeekNames[t.Weekday()], width, padding, upper, swap)
case 'w':
- for ; width > 1; width-- {
- buf = append(buf, padding&paddingMask)
- }
- buf = append(buf, '0'+byte(t.Weekday()))
+ buf = appendInt(buf, int(t.Weekday()), width, padding)
case 'u':
w := int(t.Weekday())
if w == 0 {
w = 7
}
- for ; width > 1; width-- {
- buf = append(buf, padding&paddingMask)
- }
- buf = append(buf, '0'+byte(w))
+ buf = appendInt(buf, w, width, padding)
case 'V':
if width < 2 {
width = 2
@@ -193,17 +187,10 @@ func AppendFormat(buf []byte, t time.Time, format string) []byte {
}
buf = appendInt(buf, hour, width, padding)
case 'l':
- if width < 2 {
- width = 2
- }
if padding < ^paddingMask {
padding = ' '
}
- h := hour
- if h > 12 {
- h -= 12
- }
- buf = appendInt(buf, h, width, padding)
+ fallthrough
case 'I':
if width < 2 {
width = 2
@@ -215,18 +202,15 @@ func AppendFormat(buf []byte, t time.Time, format string) []byte {
h = 12
}
buf = appendInt(buf, h, width, padding)
+ case 'P':
+ swap = !(upper || swap)
+ fallthrough
case 'p':
if hour < 12 {
buf = appendString(buf, "AM", width, padding, upper, swap)
} else {
buf = appendString(buf, "PM", width, padding, upper, swap)
}
- case 'P':
- if hour < 12 {
- buf = appendString(buf, "am", width, padding, upper, swap)
- } else {
- buf = appendString(buf, "pm", width, padding, upper, swap)
- }
case 'M':
if width < 2 {
width = 2
@@ -271,14 +255,14 @@ func AppendFormat(buf []byte, t time.Time, format string) []byte {
if buf[k] == ' ' {
buf[k-1], buf[k] = buf[k], buf[k-1]
}
- if k = offset % 3600; colons <= 2 || k != 0 {
+ if offset %= 3600; colons <= 2 || offset != 0 {
if colons != 0 {
buf = append(buf, ':')
}
- buf = appendInt(buf, k/60, 2, '0')
- if k %= 60; colons == 2 || colons == 3 && k != 0 {
+ buf = appendInt(buf, offset/60, 2, '0')
+ if offset %= 60; colons == 2 || colons == 3 && offset != 0 {
buf = append(buf, ':')
- buf = appendInt(buf, k, 2, '0')
+ buf = appendInt(buf, offset, 2, '0')
}
}
colons = 0
@@ -294,9 +278,7 @@ func AppendFormat(buf []byte, t time.Time, format string) []byte {
copy(buf[k:], buf[j:])
buf = buf[:l]
if padding&paddingMask == '0' {
- for ; k > i; k-- {
- buf[k-1], buf[k] = buf[k], buf[k-1]
- }
+ buf[i], buf[k] = buf[k], buf[i]
}
}
case ':':
@@ -444,7 +426,7 @@ func appendString(buf []byte, str string, width int, padding byte, upper, swap b
}
switch {
case swap:
- if str[len(str)-1] < 'a' {
+ if str[1] < 'a' {
for _, b := range []byte(str) {
buf = append(buf, b|0x20)
}
diff --git a/vendor/github.com/itchyny/timefmt-go/parse.go b/vendor/github.com/itchyny/timefmt-go/parse.go
index 45f05e58b..26ae0f0c2 100644
--- a/vendor/github.com/itchyny/timefmt-go/parse.go
+++ b/vendor/github.com/itchyny/timefmt-go/parse.go
@@ -3,36 +3,48 @@ package timefmt
import (
"errors"
"fmt"
+ "math"
"time"
)
// Parse time string using the format.
func Parse(source, format string) (t time.Time, err error) {
- year, month, day, hour, min, sec, nsec, loc := 1900, 1, 1, 0, 0, 0, 0, time.UTC
+ return parse(source, format, time.UTC, time.Local)
+}
+
+// ParseInLocation parses time string with the default location.
+// The location is also used to parse the time zone name (%Z).
+func ParseInLocation(source, format string, loc *time.Location) (t time.Time, err error) {
+ return parse(source, format, loc, loc)
+}
+
+func parse(source, format string, loc, base *time.Location) (t time.Time, err error) {
+ year, month, day, hour, min, sec, nsec := 1900, 1, 0, 0, 0, 0, 0
defer func() {
if err != nil {
err = fmt.Errorf("failed to parse %q with %q: %w", source, format, err)
}
}()
- var j, century, yday, colons int
- var pm bool
+ var j, week, weekday, yday, colons int
+ century, weekstart := -1, time.Weekday(-1)
+ var pm, hasISOYear, hasZoneName, hasZoneOffset bool
var pending string
for i, l := 0, len(source); i < len(format); i++ {
if b := format[i]; b == '%' {
i++
if i == len(format) {
- err = errors.New("stray %")
+ err = errors.New(`stray "%"`)
return
}
b = format[i]
L:
switch b {
case 'Y':
- if year, j, err = parseNumber(source, j, 4, 'Y'); err != nil {
+ if year, j, err = parseNumber(source, j, 4, 0, 9999, 'Y'); err != nil {
return
}
case 'y':
- if year, j, err = parseNumber(source, j, 2, 'y'); err != nil {
+ if year, j, err = parseNumber(source, j, 2, 0, 99, 'y'); err != nil {
return
}
if year < 69 {
@@ -41,65 +53,85 @@ func Parse(source, format string) (t time.Time, err error) {
year += 1900
}
case 'C':
- if century, j, err = parseNumber(source, j, 2, 'C'); err != nil {
+ if century, j, err = parseNumber(source, j, 2, 0, 99, 'C'); err != nil {
return
}
case 'g':
- if year, j, err = parseNumber(source, j, 2, b); err != nil {
+ if year, j, err = parseNumber(source, j, 2, 0, 99, b); err != nil {
return
}
year += 2000
+ hasISOYear = true
case 'G':
- if year, j, err = parseNumber(source, j, 4, b); err != nil {
+ if year, j, err = parseNumber(source, j, 4, 0, 9999, b); err != nil {
return
}
+ hasISOYear = true
case 'm':
- if month, j, err = parseNumber(source, j, 2, 'm'); err != nil {
+ if month, j, err = parseNumber(source, j, 2, 1, 12, 'm'); err != nil {
return
}
case 'B':
- if month, j, err = lookup(source, j, longMonthNames, 'B'); err != nil {
+ if month, j, err = parseAny(source, j, longMonthNames, 'B'); err != nil {
return
}
case 'b', 'h':
- if month, j, err = lookup(source, j, shortMonthNames, b); err != nil {
+ if month, j, err = parseAny(source, j, shortMonthNames, b); err != nil {
return
}
case 'A':
- if _, j, err = lookup(source, j, longWeekNames, 'A'); err != nil {
+ if weekday, j, err = parseAny(source, j, longWeekNames, 'A'); err != nil {
return
}
case 'a':
- if _, j, err = lookup(source, j, shortWeekNames, 'a'); err != nil {
+ if weekday, j, err = parseAny(source, j, shortWeekNames, 'a'); err != nil {
return
}
case 'w':
- if j >= l || source[j] < '0' || '6' < source[j] {
- err = parseFormatError(b)
+ if weekday, j, err = parseNumber(source, j, 1, 0, 6, 'w'); err != nil {
return
}
- j++
+ weekday++
case 'u':
- if j >= l || source[j] < '1' || '7' < source[j] {
- err = parseFormatError(b)
+ if weekday, j, err = parseNumber(source, j, 1, 1, 7, 'u'); err != nil {
return
}
- j++
- case 'V', 'U', 'W':
- if _, j, err = parseNumber(source, j, 2, b); err != nil {
+ weekday = weekday%7 + 1
+ case 'V':
+ if week, j, err = parseNumber(source, j, 2, 1, 53, b); err != nil {
+ return
+ }
+ weekstart = time.Thursday
+ if weekday == 0 {
+ weekday = 2
+ }
+ case 'U':
+ if week, j, err = parseNumber(source, j, 2, 0, 53, b); err != nil {
+ return
+ }
+ weekstart = time.Sunday
+ if weekday == 0 {
+ weekday = 1
+ }
+ case 'W':
+ if week, j, err = parseNumber(source, j, 2, 0, 53, b); err != nil {
return
}
+ weekstart = time.Monday
+ if weekday == 0 {
+ weekday = 2
+ }
case 'e':
if j < l && source[j] == ' ' {
j++
}
fallthrough
case 'd':
- if day, j, err = parseNumber(source, j, 2, b); err != nil {
+ if day, j, err = parseNumber(source, j, 2, 1, 31, b); err != nil {
return
}
case 'j':
- if yday, j, err = parseNumber(source, j, 3, 'j'); err != nil {
+ if yday, j, err = parseNumber(source, j, 3, 1, 366, 'j'); err != nil {
return
}
case 'k':
@@ -108,7 +140,7 @@ func Parse(source, format string) (t time.Time, err error) {
}
fallthrough
case 'H':
- if hour, j, err = parseNumber(source, j, 2, b); err != nil {
+ if hour, j, err = parseNumber(source, j, 2, 0, 23, b); err != nil {
return
}
case 'l':
@@ -117,29 +149,29 @@ func Parse(source, format string) (t time.Time, err error) {
}
fallthrough
case 'I':
- if hour, j, err = parseNumber(source, j, 2, b); err != nil {
+ if hour, j, err = parseNumber(source, j, 2, 1, 12, b); err != nil {
return
}
if hour == 12 {
hour = 0
}
- case 'p', 'P':
+ case 'P', 'p':
var ampm int
- if ampm, j, err = lookup(source, j, []string{"AM", "PM"}, 'p'); err != nil {
+ if ampm, j, err = parseAny(source, j, []string{"AM", "PM"}, b); err != nil {
return
}
pm = ampm == 2
case 'M':
- if min, j, err = parseNumber(source, j, 2, 'M'); err != nil {
+ if min, j, err = parseNumber(source, j, 2, 0, 59, 'M'); err != nil {
return
}
case 'S':
- if sec, j, err = parseNumber(source, j, 2, 'S'); err != nil {
+ if sec, j, err = parseNumber(source, j, 2, 0, 60, 'S'); err != nil {
return
}
case 's':
var unix int
- if unix, j, err = parseNumber(source, j, 10, 's'); err != nil {
+ if unix, j, err = parseNumber(source, j, 10, 0, math.MaxInt, 's'); err != nil {
return
}
t = time.Unix(int64(unix), 0).In(time.UTC)
@@ -148,28 +180,34 @@ func Parse(source, format string) (t time.Time, err error) {
hour, min, sec = t.Clock()
month = int(mon)
case 'f':
- var msec, k, d int
- if msec, k, err = parseNumber(source, j, 6, 'f'); err != nil {
+ usec, i := 0, j
+ if usec, j, err = parseNumber(source, j, 6, 0, 999999, 'f'); err != nil {
return
}
- nsec = msec * 1000
- for j, d = k, k-j; d < 6; d++ {
- nsec *= 10
+ for i = j - i; i < 6; i++ {
+ usec *= 10
}
+ nsec = usec * 1000
case 'Z':
- k := j
- for ; k < l; k++ {
- if c := source[k]; c < 'A' || 'Z' < c {
+ i := j
+ for ; j < l; j++ {
+ if c := source[j]; c < 'A' || 'Z' < c {
break
}
}
- t, err = time.Parse("MST", source[j:k])
+ t, err = time.ParseInLocation("MST", source[i:j], base)
if err != nil {
- err = fmt.Errorf(`cannot parse %q with "%%Z"`, source[j:k])
+ err = fmt.Errorf(`cannot parse %q with "%%Z"`, source[i:j])
return
}
- loc = t.Location()
- j = k
+ if hasZoneOffset {
+ name, _ := t.Zone()
+ _, offset := locationZone(loc)
+ loc = time.FixedZone(name, offset)
+ } else {
+ loc = t.Location()
+ }
+ hasZoneName = true
case 'z':
if j >= l {
err = parseZFormatError(colons)
@@ -181,47 +219,49 @@ func Parse(source, format string) (t time.Time, err error) {
sign = -1
fallthrough
case '+':
- var hour, min, sec, k int
- if hour, k, _ = parseNumber(source, j+1, 2, 'z'); k != j+3 {
+ hour, min, sec, i := 0, 0, 0, j
+ if hour, j, _ = parseNumber(source, j+1, 2, 0, 23, 'z'); j != i+3 {
err = parseZFormatError(colons)
return
}
- if j = k; j >= l || source[j] != ':' {
- switch colons {
- case 1:
- err = errors.New("expected ':' for %:z")
- return
- case 2:
- err = errors.New("expected ':' for %::z")
+ if j >= l || source[j] != ':' {
+ if colons > 0 {
+ err = expectedColonForZFormatError(colons)
return
}
} else if j++; colons == 0 {
colons = 4
}
- if min, k, _ = parseNumber(source, j, 2, 'z'); k != j+2 {
- if colons == 0 {
- k = j
- } else {
+ i = j
+ if min, j, _ = parseNumber(source, j, 2, 0, 59, 'z'); j != i+2 {
+ if colons > 0 {
err = parseZFormatError(colons & 3)
return
}
- }
- if j = k; colons > 1 {
+ j = i
+ } else if colons > 1 {
if j >= l || source[j] != ':' {
if colons == 2 {
- err = errors.New("expected ':' for %::z")
- return
- }
- } else if sec, k, _ = parseNumber(source, j+1, 2, 'z'); k != j+3 {
- if colons == 2 {
- err = parseZFormatError(colons)
+ err = expectedColonForZFormatError(colons)
return
}
} else {
- j = k
+ i = j
+ if sec, j, _ = parseNumber(source, j+1, 2, 0, 59, 'z'); j != i+3 {
+ if colons == 2 {
+ err = parseZFormatError(colons)
+ return
+ }
+ j = i
+ }
}
}
- loc, colons = time.FixedZone("", sign*((hour*60+min)*60+sec)), 0
+ var name string
+ if hasZoneName {
+ name, _ = locationZone(loc)
+ }
+ loc, colons = time.FixedZone(name, sign*((hour*60+min)*60+sec)), 0
+ hasZoneOffset = true
case 'Z':
loc, colons, j = time.UTC, 0, j+1
default:
@@ -236,40 +276,32 @@ func Parse(source, format string) (t time.Time, err error) {
}
j++
} else {
- if i++; i == len(format) {
- err = errors.New(`expected 'z' after "%:"`)
- return
- } else if b = format[i]; b == 'z' {
- colons = 1
- } else if b != ':' {
- err = errors.New(`expected 'z' after "%:"`)
- return
- } else if i++; i == len(format) {
- err = errors.New(`expected 'z' after "%::"`)
- return
- } else if b = format[i]; b == 'z' {
- colons = 2
- } else {
- err = errors.New(`expected 'z' after "%::"`)
- return
+ for colons = 1; colons <= 2; colons++ {
+ if i++; i == len(format) {
+ break
+ } else if b = format[i]; b == 'z' {
+ goto L
+ } else if b != ':' || colons == 2 {
+ break
+ }
}
- goto L
+ err = expectedZAfterColonError(colons)
+ return
}
case 't', 'n':
- k := j
+ i := j
K:
- for ; k < l; k++ {
- switch source[k] {
+ for ; j < l; j++ {
+ switch source[j] {
case ' ', '\t', '\n', '\v', '\f', '\r':
default:
break K
}
}
- if k == j {
- err = fmt.Errorf("expected a space for %%%c", b)
+ if i == j {
+ err = fmt.Errorf(`expected a space for "%%%c"`, b)
return
}
- j = k
case '%':
if j >= l || source[j] != b {
err = expectedFormatError(b)
@@ -282,7 +314,7 @@ func Parse(source, format string) (t time.Time, err error) {
if pending, ok = compositions[b]; ok {
break
}
- err = fmt.Errorf(`unexpected format: "%%%c"`, b)
+ err = fmt.Errorf(`unexpected format "%%%c"`, b)
return
}
if j >= l || source[j] != b {
@@ -295,7 +327,7 @@ func Parse(source, format string) (t time.Time, err error) {
b, pending = pending[0], pending[1:]
goto L
}
- } else if j >= len(source) || source[j] != b {
+ } else if j >= l || source[j] != b {
err = expectedFormatError(b)
return
} else {
@@ -303,25 +335,52 @@ func Parse(source, format string) (t time.Time, err error) {
}
}
if j < len(source) {
- err = fmt.Errorf("unconverted string: %q", source[j:])
+ err = fmt.Errorf("unparsed string %q", source[j:])
return
}
if pm {
hour += 12
}
- if century > 0 {
+ if century >= 0 {
year = century*100 + year%100
}
- if yday > 0 {
- return time.Date(year, time.January, 1, hour, min, sec, nsec, loc).AddDate(0, 0, yday-1), nil
+ if day == 0 {
+ if yday > 0 {
+ if hasISOYear {
+ err = errors.New(`use "%Y" to parse non-ISO year for "%j"`)
+ return
+ }
+ return time.Date(year, time.January, yday, hour, min, sec, nsec, loc), nil
+ }
+ if weekstart >= time.Sunday {
+ if weekstart == time.Thursday {
+ if !hasISOYear {
+ err = errors.New(`use "%G" to parse ISO year for "%V"`)
+ return
+ }
+ } else if hasISOYear {
+ err = errors.New(`use "%Y" to parse non-ISO year for "%U" or "%W"`)
+ return
+ }
+ if weekstart > time.Sunday && weekday == 1 {
+ week++
+ }
+ t := time.Date(year, time.January, -int(weekstart), hour, min, sec, nsec, loc)
+ return t.AddDate(0, 0, week*7-int(t.Weekday())+weekday-1), nil
+ }
+ day = 1
}
return time.Date(year, time.Month(month), day, hour, min, sec, nsec, loc), nil
}
+func locationZone(loc *time.Location) (name string, offset int) {
+ return time.Date(2000, time.January, 1, 0, 0, 0, 0, loc).Zone()
+}
+
type parseFormatError byte
func (err parseFormatError) Error() string {
- return fmt.Sprintf("cannot parse %%%c", byte(err))
+ return fmt.Sprintf(`cannot parse "%%%c"`, byte(err))
}
type expectedFormatError byte
@@ -333,46 +392,51 @@ func (err expectedFormatError) Error() string {
type parseZFormatError int
func (err parseZFormatError) Error() string {
- switch int(err) {
- case 0:
- return "cannot parse %z"
- case 1:
- return "cannot parse %:z"
- default:
- return "cannot parse %::z"
- }
+ return `cannot parse "%` + `::z"`[2-err:]
+}
+
+type expectedColonForZFormatError int
+
+func (err expectedColonForZFormatError) Error() string {
+ return `expected ':' for "%` + `::z"`[2-err:]
+}
+
+type expectedZAfterColonError int
+
+func (err expectedZAfterColonError) Error() string {
+ return `expected 'z' after "%` + `::"`[2-err:]
}
-func parseNumber(source string, min, size int, format byte) (int, int, error) {
- var val int
- if l := len(source); min+size > l {
+func parseNumber(source string, index, size, min, max int, format byte) (int, int, error) {
+ var value int
+ if l := len(source); index+size > l {
size = l
} else {
- size += min
+ size += index
}
- i := min
+ i := index
for ; i < size; i++ {
if b := source[i]; '0' <= b && b <= '9' {
- val = val*10 + int(b&0x0F)
+ value = value*10 + int(b&0x0F)
} else {
break
}
}
- if i == min {
+ if i == index || value < min || max < value {
return 0, 0, parseFormatError(format)
}
- return val, i, nil
+ return value, i, nil
}
-func lookup(source string, min int, candidates []string, format byte) (int, int, error) {
+func parseAny(source string, index int, candidates []string, format byte) (int, int, error) {
L:
for i, xs := range candidates {
- j := min
+ j := index
for k := 0; k < len(xs); k, j = k+1, j+1 {
if j >= len(source) {
continue L
}
- if x, y := xs[k], source[j]; x != y && x|('a'-'A') != y|('a'-'A') {
+ if x, y := xs[k], source[j]; x != y && x|0x20 != y|0x20 {
continue L
}
}
diff --git a/vendor/github.com/itchyny/timefmt-go/timefmt.go b/vendor/github.com/itchyny/timefmt-go/timefmt.go
new file mode 100644
index 000000000..45bf6ae90
--- /dev/null
+++ b/vendor/github.com/itchyny/timefmt-go/timefmt.go
@@ -0,0 +1,2 @@
+// Package timefmt provides functions for formatting and parsing date time strings.
+package timefmt
diff --git a/vendor/github.com/mattn/go-runewidth/.travis.yml b/vendor/github.com/mattn/go-runewidth/.travis.yml
deleted file mode 100644
index 6a21813a3..000000000
--- a/vendor/github.com/mattn/go-runewidth/.travis.yml
+++ /dev/null
@@ -1,16 +0,0 @@
-language: go
-sudo: false
-go:
- - 1.13.x
- - tip
-
-before_install:
- - go get -t -v ./...
-
-script:
- - go generate
- - git diff --cached --exit-code
- - ./go.test.sh
-
-after_success:
- - bash <(curl -s https://codecov.io/bash)
diff --git a/vendor/github.com/mattn/go-runewidth/README.md b/vendor/github.com/mattn/go-runewidth/README.md
index aa56ab96c..5e2cfd98c 100644
--- a/vendor/github.com/mattn/go-runewidth/README.md
+++ b/vendor/github.com/mattn/go-runewidth/README.md
@@ -1,7 +1,7 @@
go-runewidth
============
-[![Build Status](https://travis-ci.org/mattn/go-runewidth.png?branch=master)](https://travis-ci.org/mattn/go-runewidth)
+[![Build Status](https://github.com/mattn/go-runewidth/workflows/test/badge.svg?branch=master)](https://github.com/mattn/go-runewidth/actions?query=workflow%3Atest)
[![Codecov](https://codecov.io/gh/mattn/go-runewidth/branch/master/graph/badge.svg)](https://codecov.io/gh/mattn/go-runewidth)
[![GoDoc](https://godoc.org/github.com/mattn/go-runewidth?status.svg)](http://godoc.org/github.com/mattn/go-runewidth)
[![Go Report Card](https://goreportcard.com/badge/github.com/mattn/go-runewidth)](https://goreportcard.com/report/github.com/mattn/go-runewidth)
diff --git a/vendor/github.com/mattn/go-runewidth/go.test.sh b/vendor/github.com/mattn/go-runewidth/go.test.sh
deleted file mode 100644
index 012162b07..000000000
--- a/vendor/github.com/mattn/go-runewidth/go.test.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/usr/bin/env bash
-
-set -e
-echo "" > coverage.txt
-
-for d in $(go list ./... | grep -v vendor); do
- go test -race -coverprofile=profile.out -covermode=atomic "$d"
- if [ -f profile.out ]; then
- cat profile.out >> coverage.txt
- rm profile.out
- fi
-done
diff --git a/vendor/github.com/mattn/go-runewidth/runewidth.go b/vendor/github.com/mattn/go-runewidth/runewidth.go
index 19f8e0449..7dfbb3be9 100644
--- a/vendor/github.com/mattn/go-runewidth/runewidth.go
+++ b/vendor/github.com/mattn/go-runewidth/runewidth.go
@@ -2,6 +2,9 @@ package runewidth
import (
"os"
+ "strings"
+
+ "github.com/rivo/uniseg"
)
//go:generate go run script/generate.go
@@ -10,11 +13,14 @@ var (
// EastAsianWidth will be set true if the current locale is CJK
EastAsianWidth bool
- // ZeroWidthJoiner is flag to set to use UTR#51 ZWJ
- ZeroWidthJoiner bool
+ // StrictEmojiNeutral should be set false if handle broken fonts
+ StrictEmojiNeutral bool = true
// DefaultCondition is a condition in current locale
- DefaultCondition = &Condition{}
+ DefaultCondition = &Condition{
+ EastAsianWidth: false,
+ StrictEmojiNeutral: true,
+ }
)
func init() {
@@ -29,8 +35,13 @@ func handleEnv() {
EastAsianWidth = env == "1"
}
// update DefaultCondition
- DefaultCondition.EastAsianWidth = EastAsianWidth
- DefaultCondition.ZeroWidthJoiner = ZeroWidthJoiner
+ if DefaultCondition.EastAsianWidth != EastAsianWidth {
+ DefaultCondition.EastAsianWidth = EastAsianWidth
+ if len(DefaultCondition.combinedLut) > 0 {
+ DefaultCondition.combinedLut = DefaultCondition.combinedLut[:0]
+ CreateLUT()
+ }
+ }
}
type interval struct {
@@ -85,63 +96,97 @@ var nonprint = table{
// Condition have flag EastAsianWidth whether the current locale is CJK or not.
type Condition struct {
- EastAsianWidth bool
- ZeroWidthJoiner bool
+ combinedLut []byte
+ EastAsianWidth bool
+ StrictEmojiNeutral bool
}
// NewCondition return new instance of Condition which is current locale.
func NewCondition() *Condition {
return &Condition{
- EastAsianWidth: EastAsianWidth,
- ZeroWidthJoiner: ZeroWidthJoiner,
+ EastAsianWidth: EastAsianWidth,
+ StrictEmojiNeutral: StrictEmojiNeutral,
}
}
// RuneWidth returns the number of cells in r.
// See http://www.unicode.org/reports/tr11/
func (c *Condition) RuneWidth(r rune) int {
- switch {
- case r < 0 || r > 0x10FFFF || inTables(r, nonprint, combining, notassigned):
+ if r < 0 || r > 0x10FFFF {
return 0
- case (c.EastAsianWidth && IsAmbiguousWidth(r)) || inTables(r, doublewidth):
- return 2
- default:
- return 1
}
-}
-
-func (c *Condition) stringWidth(s string) (width int) {
- for _, r := range []rune(s) {
- width += c.RuneWidth(r)
+ if len(c.combinedLut) > 0 {
+ return int(c.combinedLut[r>>1]>>(uint(r&1)*4)) & 3
}
- return width
-}
-
-func (c *Condition) stringWidthZeroJoiner(s string) (width int) {
- r1, r2 := rune(0), rune(0)
- for _, r := range []rune(s) {
- if r == 0xFE0E || r == 0xFE0F {
- continue
+ // optimized version, verified by TestRuneWidthChecksums()
+ if !c.EastAsianWidth {
+ switch {
+ case r < 0x20:
+ return 0
+ case (r >= 0x7F && r <= 0x9F) || r == 0xAD: // nonprint
+ return 0
+ case r < 0x300:
+ return 1
+ case inTable(r, narrow):
+ return 1
+ case inTables(r, nonprint, combining):
+ return 0
+ case inTable(r, doublewidth):
+ return 2
+ default:
+ return 1
}
- w := c.RuneWidth(r)
- if r2 == 0x200D && inTables(r, emoji) && inTables(r1, emoji) {
- if width < w {
- width = w
- }
- } else {
- width += w
+ } else {
+ switch {
+ case inTables(r, nonprint, combining):
+ return 0
+ case inTable(r, narrow):
+ return 1
+ case inTables(r, ambiguous, doublewidth):
+ return 2
+ case !c.StrictEmojiNeutral && inTables(r, ambiguous, emoji, narrow):
+ return 2
+ default:
+ return 1
}
- r1, r2 = r2, r
}
- return width
+}
+
+// CreateLUT will create an in-memory lookup table of 557056 bytes for faster operation.
+// This should not be called concurrently with other operations on c.
+// If options in c is changed, CreateLUT should be called again.
+func (c *Condition) CreateLUT() {
+ const max = 0x110000
+ lut := c.combinedLut
+ if len(c.combinedLut) != 0 {
+ // Remove so we don't use it.
+ c.combinedLut = nil
+ } else {
+ lut = make([]byte, max/2)
+ }
+ for i := range lut {
+ i32 := int32(i * 2)
+ x0 := c.RuneWidth(i32)
+ x1 := c.RuneWidth(i32 + 1)
+ lut[i] = uint8(x0) | uint8(x1)<<4
+ }
+ c.combinedLut = lut
}
// StringWidth return width as you can see
func (c *Condition) StringWidth(s string) (width int) {
- if c.ZeroWidthJoiner {
- return c.stringWidthZeroJoiner(s)
+ g := uniseg.NewGraphemes(s)
+ for g.Next() {
+ var chWidth int
+ for _, r := range g.Runes() {
+ chWidth = c.RuneWidth(r)
+ if chWidth > 0 {
+ break // Our best guess at this point is to use the width of the first non-zero-width rune.
+ }
+ }
+ width += chWidth
}
- return c.stringWidth(s)
+ return
}
// Truncate return string truncated with w cells
@@ -149,27 +194,69 @@ func (c *Condition) Truncate(s string, w int, tail string) string {
if c.StringWidth(s) <= w {
return s
}
- r := []rune(s)
- tw := c.StringWidth(tail)
- w -= tw
- width := 0
- i := 0
- for ; i < len(r); i++ {
- cw := c.RuneWidth(r[i])
- if width+cw > w {
+ w -= c.StringWidth(tail)
+ var width int
+ pos := len(s)
+ g := uniseg.NewGraphemes(s)
+ for g.Next() {
+ var chWidth int
+ for _, r := range g.Runes() {
+ chWidth = c.RuneWidth(r)
+ if chWidth > 0 {
+ break // See StringWidth() for details.
+ }
+ }
+ if width+chWidth > w {
+ pos, _ = g.Positions()
break
}
- width += cw
+ width += chWidth
+ }
+ return s[:pos] + tail
+}
+
+// TruncateLeft cuts w cells from the beginning of the `s`.
+func (c *Condition) TruncateLeft(s string, w int, prefix string) string {
+ if c.StringWidth(s) <= w {
+ return prefix
+ }
+
+ var width int
+ pos := len(s)
+
+ g := uniseg.NewGraphemes(s)
+ for g.Next() {
+ var chWidth int
+ for _, r := range g.Runes() {
+ chWidth = c.RuneWidth(r)
+ if chWidth > 0 {
+ break // See StringWidth() for details.
+ }
+ }
+
+ if width+chWidth > w {
+ if width < w {
+ _, pos = g.Positions()
+ prefix += strings.Repeat(" ", width+chWidth-w)
+ } else {
+ pos, _ = g.Positions()
+ }
+
+ break
+ }
+
+ width += chWidth
}
- return string(r[0:i]) + tail
+
+ return prefix + s[pos:]
}
// Wrap return string wrapped with w cells
func (c *Condition) Wrap(s string, w int) string {
width := 0
out := ""
- for _, r := range []rune(s) {
- cw := RuneWidth(r)
+ for _, r := range s {
+ cw := c.RuneWidth(r)
if r == '\n' {
out += string(r)
width = 0
@@ -241,6 +328,11 @@ func Truncate(s string, w int, tail string) string {
return DefaultCondition.Truncate(s, w, tail)
}
+// TruncateLeft cuts w cells from the beginning of the `s`.
+func TruncateLeft(s string, w int, prefix string) string {
+ return DefaultCondition.TruncateLeft(s, w, prefix)
+}
+
// Wrap return string wrapped with w cells
func Wrap(s string, w int) string {
return DefaultCondition.Wrap(s, w)
@@ -255,3 +347,12 @@ func FillLeft(s string, w int) string {
func FillRight(s string, w int) string {
return DefaultCondition.FillRight(s, w)
}
+
+// CreateLUT will create an in-memory lookup table of 557055 bytes for faster operation.
+// This should not be called concurrently with other operations.
+func CreateLUT() {
+ if len(DefaultCondition.combinedLut) > 0 {
+ return
+ }
+ DefaultCondition.CreateLUT()
+}
diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_appengine.go b/vendor/github.com/mattn/go-runewidth/runewidth_appengine.go
index 7d99f6e52..84b6528df 100644
--- a/vendor/github.com/mattn/go-runewidth/runewidth_appengine.go
+++ b/vendor/github.com/mattn/go-runewidth/runewidth_appengine.go
@@ -1,3 +1,4 @@
+//go:build appengine
// +build appengine
package runewidth
diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_js.go b/vendor/github.com/mattn/go-runewidth/runewidth_js.go
index c5fdf40ba..c2abbc2db 100644
--- a/vendor/github.com/mattn/go-runewidth/runewidth_js.go
+++ b/vendor/github.com/mattn/go-runewidth/runewidth_js.go
@@ -1,5 +1,5 @@
-// +build js
-// +build !appengine
+//go:build js && !appengine
+// +build js,!appengine
package runewidth
diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_posix.go b/vendor/github.com/mattn/go-runewidth/runewidth_posix.go
index 480ad7485..5a31d738e 100644
--- a/vendor/github.com/mattn/go-runewidth/runewidth_posix.go
+++ b/vendor/github.com/mattn/go-runewidth/runewidth_posix.go
@@ -1,6 +1,5 @@
-// +build !windows
-// +build !js
-// +build !appengine
+//go:build !windows && !js && !appengine
+// +build !windows,!js,!appengine
package runewidth
diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_table.go b/vendor/github.com/mattn/go-runewidth/runewidth_table.go
index b27d77d89..e5d890c26 100644
--- a/vendor/github.com/mattn/go-runewidth/runewidth_table.go
+++ b/vendor/github.com/mattn/go-runewidth/runewidth_table.go
@@ -124,8 +124,10 @@ var ambiguous = table{
{0x1F18F, 0x1F190}, {0x1F19B, 0x1F1AC}, {0xE0100, 0xE01EF},
{0xF0000, 0xFFFFD}, {0x100000, 0x10FFFD},
}
-var notassigned = table{
- {0x27E6, 0x27ED}, {0x2985, 0x2986},
+var narrow = table{
+ {0x0020, 0x007E}, {0x00A2, 0x00A3}, {0x00A5, 0x00A6},
+ {0x00AC, 0x00AC}, {0x00AF, 0x00AF}, {0x27E6, 0x27ED},
+ {0x2985, 0x2986},
}
var neutral = table{
diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_windows.go b/vendor/github.com/mattn/go-runewidth/runewidth_windows.go
index d6a61777d..5f987a310 100644
--- a/vendor/github.com/mattn/go-runewidth/runewidth_windows.go
+++ b/vendor/github.com/mattn/go-runewidth/runewidth_windows.go
@@ -1,5 +1,5 @@
-// +build windows
-// +build !appengine
+//go:build windows && !appengine
+// +build windows,!appengine
package runewidth
diff --git a/vendor/github.com/mudler/yip/LICENSE b/vendor/github.com/mudler/yip/LICENSE
deleted file mode 100644
index f288702d2..000000000
--- a/vendor/github.com/mudler/yip/LICENSE
+++ /dev/null
@@ -1,674 +0,0 @@
- GNU GENERAL PUBLIC LICENSE
- Version 3, 29 June 2007
-
- Copyright (C) 2007 Free Software Foundation, Inc.
- Everyone is permitted to copy and distribute verbatim copies
- of this license document, but changing it is not allowed.
-
- Preamble
-
- The GNU General Public License is a free, copyleft license for
-software and other kinds of works.
-
- The licenses for most software and other practical works are designed
-to take away your freedom to share and change the works. By contrast,
-the GNU General Public License is intended to guarantee your freedom to
-share and change all versions of a program--to make sure it remains free
-software for all its users. We, the Free Software Foundation, use the
-GNU General Public License for most of our software; it applies also to
-any other work released this way by its authors. You can apply it to
-your programs, too.
-
- When we speak of free software, we are referring to freedom, not
-price. Our General Public Licenses are designed to make sure that you
-have the freedom to distribute copies of free software (and charge for
-them if you wish), that you receive source code or can get it if you
-want it, that you can change the software or use pieces of it in new
-free programs, and that you know you can do these things.
-
- To protect your rights, we need to prevent others from denying you
-these rights or asking you to surrender the rights. Therefore, you have
-certain responsibilities if you distribute copies of the software, or if
-you modify it: responsibilities to respect the freedom of others.
-
- For example, if you distribute copies of such a program, whether
-gratis or for a fee, you must pass on to the recipients the same
-freedoms that you received. You must make sure that they, too, receive
-or can get the source code. And you must show them these terms so they
-know their rights.
-
- Developers that use the GNU GPL protect your rights with two steps:
-(1) assert copyright on the software, and (2) offer you this License
-giving you legal permission to copy, distribute and/or modify it.
-
- For the developers' and authors' protection, the GPL clearly explains
-that there is no warranty for this free software. For both users' and
-authors' sake, the GPL requires that modified versions be marked as
-changed, so that their problems will not be attributed erroneously to
-authors of previous versions.
-
- Some devices are designed to deny users access to install or run
-modified versions of the software inside them, although the manufacturer
-can do so. This is fundamentally incompatible with the aim of
-protecting users' freedom to change the software. The systematic
-pattern of such abuse occurs in the area of products for individuals to
-use, which is precisely where it is most unacceptable. Therefore, we
-have designed this version of the GPL to prohibit the practice for those
-products. If such problems arise substantially in other domains, we
-stand ready to extend this provision to those domains in future versions
-of the GPL, as needed to protect the freedom of users.
-
- Finally, every program is threatened constantly by software patents.
-States should not allow patents to restrict development and use of
-software on general-purpose computers, but in those that do, we wish to
-avoid the special danger that patents applied to a free program could
-make it effectively proprietary. To prevent this, the GPL assures that
-patents cannot be used to render the program non-free.
-
- The precise terms and conditions for copying, distribution and
-modification follow.
-
- TERMS AND CONDITIONS
-
- 0. Definitions.
-
- "This License" refers to version 3 of the GNU General Public License.
-
- "Copyright" also means copyright-like laws that apply to other kinds of
-works, such as semiconductor masks.
-
- "The Program" refers to any copyrightable work licensed under this
-License. Each licensee is addressed as "you". "Licensees" and
-"recipients" may be individuals or organizations.
-
- To "modify" a work means to copy from or adapt all or part of the work
-in a fashion requiring copyright permission, other than the making of an
-exact copy. The resulting work is called a "modified version" of the
-earlier work or a work "based on" the earlier work.
-
- A "covered work" means either the unmodified Program or a work based
-on the Program.
-
- To "propagate" a work means to do anything with it that, without
-permission, would make you directly or secondarily liable for
-infringement under applicable copyright law, except executing it on a
-computer or modifying a private copy. Propagation includes copying,
-distribution (with or without modification), making available to the
-public, and in some countries other activities as well.
-
- To "convey" a work means any kind of propagation that enables other
-parties to make or receive copies. Mere interaction with a user through
-a computer network, with no transfer of a copy, is not conveying.
-
- An interactive user interface displays "Appropriate Legal Notices"
-to the extent that it includes a convenient and prominently visible
-feature that (1) displays an appropriate copyright notice, and (2)
-tells the user that there is no warranty for the work (except to the
-extent that warranties are provided), that licensees may convey the
-work under this License, and how to view a copy of this License. If
-the interface presents a list of user commands or options, such as a
-menu, a prominent item in the list meets this criterion.
-
- 1. Source Code.
-
- The "source code" for a work means the preferred form of the work
-for making modifications to it. "Object code" means any non-source
-form of a work.
-
- A "Standard Interface" means an interface that either is an official
-standard defined by a recognized standards body, or, in the case of
-interfaces specified for a particular programming language, one that
-is widely used among developers working in that language.
-
- The "System Libraries" of an executable work include anything, other
-than the work as a whole, that (a) is included in the normal form of
-packaging a Major Component, but which is not part of that Major
-Component, and (b) serves only to enable use of the work with that
-Major Component, or to implement a Standard Interface for which an
-implementation is available to the public in source code form. A
-"Major Component", in this context, means a major essential component
-(kernel, window system, and so on) of the specific operating system
-(if any) on which the executable work runs, or a compiler used to
-produce the work, or an object code interpreter used to run it.
-
- The "Corresponding Source" for a work in object code form means all
-the source code needed to generate, install, and (for an executable
-work) run the object code and to modify the work, including scripts to
-control those activities. However, it does not include the work's
-System Libraries, or general-purpose tools or generally available free
-programs which are used unmodified in performing those activities but
-which are not part of the work. For example, Corresponding Source
-includes interface definition files associated with source files for
-the work, and the source code for shared libraries and dynamically
-linked subprograms that the work is specifically designed to require,
-such as by intimate data communication or control flow between those
-subprograms and other parts of the work.
-
- The Corresponding Source need not include anything that users
-can regenerate automatically from other parts of the Corresponding
-Source.
-
- The Corresponding Source for a work in source code form is that
-same work.
-
- 2. Basic Permissions.
-
- All rights granted under this License are granted for the term of
-copyright on the Program, and are irrevocable provided the stated
-conditions are met. This License explicitly affirms your unlimited
-permission to run the unmodified Program. The output from running a
-covered work is covered by this License only if the output, given its
-content, constitutes a covered work. This License acknowledges your
-rights of fair use or other equivalent, as provided by copyright law.
-
- You may make, run and propagate covered works that you do not
-convey, without conditions so long as your license otherwise remains
-in force. You may convey covered works to others for the sole purpose
-of having them make modifications exclusively for you, or provide you
-with facilities for running those works, provided that you comply with
-the terms of this License in conveying all material for which you do
-not control copyright. Those thus making or running the covered works
-for you must do so exclusively on your behalf, under your direction
-and control, on terms that prohibit them from making any copies of
-your copyrighted material outside their relationship with you.
-
- Conveying under any other circumstances is permitted solely under
-the conditions stated below. Sublicensing is not allowed; section 10
-makes it unnecessary.
-
- 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
-
- No covered work shall be deemed part of an effective technological
-measure under any applicable law fulfilling obligations under article
-11 of the WIPO copyright treaty adopted on 20 December 1996, or
-similar laws prohibiting or restricting circumvention of such
-measures.
-
- When you convey a covered work, you waive any legal power to forbid
-circumvention of technological measures to the extent such circumvention
-is effected by exercising rights under this License with respect to
-the covered work, and you disclaim any intention to limit operation or
-modification of the work as a means of enforcing, against the work's
-users, your or third parties' legal rights to forbid circumvention of
-technological measures.
-
- 4. Conveying Verbatim Copies.
-
- You may convey verbatim copies of the Program's source code as you
-receive it, in any medium, provided that you conspicuously and
-appropriately publish on each copy an appropriate copyright notice;
-keep intact all notices stating that this License and any
-non-permissive terms added in accord with section 7 apply to the code;
-keep intact all notices of the absence of any warranty; and give all
-recipients a copy of this License along with the Program.
-
- You may charge any price or no price for each copy that you convey,
-and you may offer support or warranty protection for a fee.
-
- 5. Conveying Modified Source Versions.
-
- You may convey a work based on the Program, or the modifications to
-produce it from the Program, in the form of source code under the
-terms of section 4, provided that you also meet all of these conditions:
-
- a) The work must carry prominent notices stating that you modified
- it, and giving a relevant date.
-
- b) The work must carry prominent notices stating that it is
- released under this License and any conditions added under section
- 7. This requirement modifies the requirement in section 4 to
- "keep intact all notices".
-
- c) You must license the entire work, as a whole, under this
- License to anyone who comes into possession of a copy. This
- License will therefore apply, along with any applicable section 7
- additional terms, to the whole of the work, and all its parts,
- regardless of how they are packaged. This License gives no
- permission to license the work in any other way, but it does not
- invalidate such permission if you have separately received it.
-
- d) If the work has interactive user interfaces, each must display
- Appropriate Legal Notices; however, if the Program has interactive
- interfaces that do not display Appropriate Legal Notices, your
- work need not make them do so.
-
- A compilation of a covered work with other separate and independent
-works, which are not by their nature extensions of the covered work,
-and which are not combined with it such as to form a larger program,
-in or on a volume of a storage or distribution medium, is called an
-"aggregate" if the compilation and its resulting copyright are not
-used to limit the access or legal rights of the compilation's users
-beyond what the individual works permit. Inclusion of a covered work
-in an aggregate does not cause this License to apply to the other
-parts of the aggregate.
-
- 6. Conveying Non-Source Forms.
-
- You may convey a covered work in object code form under the terms
-of sections 4 and 5, provided that you also convey the
-machine-readable Corresponding Source under the terms of this License,
-in one of these ways:
-
- a) Convey the object code in, or embodied in, a physical product
- (including a physical distribution medium), accompanied by the
- Corresponding Source fixed on a durable physical medium
- customarily used for software interchange.
-
- b) Convey the object code in, or embodied in, a physical product
- (including a physical distribution medium), accompanied by a
- written offer, valid for at least three years and valid for as
- long as you offer spare parts or customer support for that product
- model, to give anyone who possesses the object code either (1) a
- copy of the Corresponding Source for all the software in the
- product that is covered by this License, on a durable physical
- medium customarily used for software interchange, for a price no
- more than your reasonable cost of physically performing this
- conveying of source, or (2) access to copy the
- Corresponding Source from a network server at no charge.
-
- c) Convey individual copies of the object code with a copy of the
- written offer to provide the Corresponding Source. This
- alternative is allowed only occasionally and noncommercially, and
- only if you received the object code with such an offer, in accord
- with subsection 6b.
-
- d) Convey the object code by offering access from a designated
- place (gratis or for a charge), and offer equivalent access to the
- Corresponding Source in the same way through the same place at no
- further charge. You need not require recipients to copy the
- Corresponding Source along with the object code. If the place to
- copy the object code is a network server, the Corresponding Source
- may be on a different server (operated by you or a third party)
- that supports equivalent copying facilities, provided you maintain
- clear directions next to the object code saying where to find the
- Corresponding Source. Regardless of what server hosts the
- Corresponding Source, you remain obligated to ensure that it is
- available for as long as needed to satisfy these requirements.
-
- e) Convey the object code using peer-to-peer transmission, provided
- you inform other peers where the object code and Corresponding
- Source of the work are being offered to the general public at no
- charge under subsection 6d.
-
- A separable portion of the object code, whose source code is excluded
-from the Corresponding Source as a System Library, need not be
-included in conveying the object code work.
-
- A "User Product" is either (1) a "consumer product", which means any
-tangible personal property which is normally used for personal, family,
-or household purposes, or (2) anything designed or sold for incorporation
-into a dwelling. In determining whether a product is a consumer product,
-doubtful cases shall be resolved in favor of coverage. For a particular
-product received by a particular user, "normally used" refers to a
-typical or common use of that class of product, regardless of the status
-of the particular user or of the way in which the particular user
-actually uses, or expects or is expected to use, the product. A product
-is a consumer product regardless of whether the product has substantial
-commercial, industrial or non-consumer uses, unless such uses represent
-the only significant mode of use of the product.
-
- "Installation Information" for a User Product means any methods,
-procedures, authorization keys, or other information required to install
-and execute modified versions of a covered work in that User Product from
-a modified version of its Corresponding Source. The information must
-suffice to ensure that the continued functioning of the modified object
-code is in no case prevented or interfered with solely because
-modification has been made.
-
- If you convey an object code work under this section in, or with, or
-specifically for use in, a User Product, and the conveying occurs as
-part of a transaction in which the right of possession and use of the
-User Product is transferred to the recipient in perpetuity or for a
-fixed term (regardless of how the transaction is characterized), the
-Corresponding Source conveyed under this section must be accompanied
-by the Installation Information. But this requirement does not apply
-if neither you nor any third party retains the ability to install
-modified object code on the User Product (for example, the work has
-been installed in ROM).
-
- The requirement to provide Installation Information does not include a
-requirement to continue to provide support service, warranty, or updates
-for a work that has been modified or installed by the recipient, or for
-the User Product in which it has been modified or installed. Access to a
-network may be denied when the modification itself materially and
-adversely affects the operation of the network or violates the rules and
-protocols for communication across the network.
-
- Corresponding Source conveyed, and Installation Information provided,
-in accord with this section must be in a format that is publicly
-documented (and with an implementation available to the public in
-source code form), and must require no special password or key for
-unpacking, reading or copying.
-
- 7. Additional Terms.
-
- "Additional permissions" are terms that supplement the terms of this
-License by making exceptions from one or more of its conditions.
-Additional permissions that are applicable to the entire Program shall
-be treated as though they were included in this License, to the extent
-that they are valid under applicable law. If additional permissions
-apply only to part of the Program, that part may be used separately
-under those permissions, but the entire Program remains governed by
-this License without regard to the additional permissions.
-
- When you convey a copy of a covered work, you may at your option
-remove any additional permissions from that copy, or from any part of
-it. (Additional permissions may be written to require their own
-removal in certain cases when you modify the work.) You may place
-additional permissions on material, added by you to a covered work,
-for which you have or can give appropriate copyright permission.
-
- Notwithstanding any other provision of this License, for material you
-add to a covered work, you may (if authorized by the copyright holders of
-that material) supplement the terms of this License with terms:
-
- a) Disclaiming warranty or limiting liability differently from the
- terms of sections 15 and 16 of this License; or
-
- b) Requiring preservation of specified reasonable legal notices or
- author attributions in that material or in the Appropriate Legal
- Notices displayed by works containing it; or
-
- c) Prohibiting misrepresentation of the origin of that material, or
- requiring that modified versions of such material be marked in
- reasonable ways as different from the original version; or
-
- d) Limiting the use for publicity purposes of names of licensors or
- authors of the material; or
-
- e) Declining to grant rights under trademark law for use of some
- trade names, trademarks, or service marks; or
-
- f) Requiring indemnification of licensors and authors of that
- material by anyone who conveys the material (or modified versions of
- it) with contractual assumptions of liability to the recipient, for
- any liability that these contractual assumptions directly impose on
- those licensors and authors.
-
- All other non-permissive additional terms are considered "further
-restrictions" within the meaning of section 10. If the Program as you
-received it, or any part of it, contains a notice stating that it is
-governed by this License along with a term that is a further
-restriction, you may remove that term. If a license document contains
-a further restriction but permits relicensing or conveying under this
-License, you may add to a covered work material governed by the terms
-of that license document, provided that the further restriction does
-not survive such relicensing or conveying.
-
- If you add terms to a covered work in accord with this section, you
-must place, in the relevant source files, a statement of the
-additional terms that apply to those files, or a notice indicating
-where to find the applicable terms.
-
- Additional terms, permissive or non-permissive, may be stated in the
-form of a separately written license, or stated as exceptions;
-the above requirements apply either way.
-
- 8. Termination.
-
- You may not propagate or modify a covered work except as expressly
-provided under this License. Any attempt otherwise to propagate or
-modify it is void, and will automatically terminate your rights under
-this License (including any patent licenses granted under the third
-paragraph of section 11).
-
- However, if you cease all violation of this License, then your
-license from a particular copyright holder is reinstated (a)
-provisionally, unless and until the copyright holder explicitly and
-finally terminates your license, and (b) permanently, if the copyright
-holder fails to notify you of the violation by some reasonable means
-prior to 60 days after the cessation.
-
- Moreover, your license from a particular copyright holder is
-reinstated permanently if the copyright holder notifies you of the
-violation by some reasonable means, this is the first time you have
-received notice of violation of this License (for any work) from that
-copyright holder, and you cure the violation prior to 30 days after
-your receipt of the notice.
-
- Termination of your rights under this section does not terminate the
-licenses of parties who have received copies or rights from you under
-this License. If your rights have been terminated and not permanently
-reinstated, you do not qualify to receive new licenses for the same
-material under section 10.
-
- 9. Acceptance Not Required for Having Copies.
-
- You are not required to accept this License in order to receive or
-run a copy of the Program. Ancillary propagation of a covered work
-occurring solely as a consequence of using peer-to-peer transmission
-to receive a copy likewise does not require acceptance. However,
-nothing other than this License grants you permission to propagate or
-modify any covered work. These actions infringe copyright if you do
-not accept this License. Therefore, by modifying or propagating a
-covered work, you indicate your acceptance of this License to do so.
-
- 10. Automatic Licensing of Downstream Recipients.
-
- Each time you convey a covered work, the recipient automatically
-receives a license from the original licensors, to run, modify and
-propagate that work, subject to this License. You are not responsible
-for enforcing compliance by third parties with this License.
-
- An "entity transaction" is a transaction transferring control of an
-organization, or substantially all assets of one, or subdividing an
-organization, or merging organizations. If propagation of a covered
-work results from an entity transaction, each party to that
-transaction who receives a copy of the work also receives whatever
-licenses to the work the party's predecessor in interest had or could
-give under the previous paragraph, plus a right to possession of the
-Corresponding Source of the work from the predecessor in interest, if
-the predecessor has it or can get it with reasonable efforts.
-
- You may not impose any further restrictions on the exercise of the
-rights granted or affirmed under this License. For example, you may
-not impose a license fee, royalty, or other charge for exercise of
-rights granted under this License, and you may not initiate litigation
-(including a cross-claim or counterclaim in a lawsuit) alleging that
-any patent claim is infringed by making, using, selling, offering for
-sale, or importing the Program or any portion of it.
-
- 11. Patents.
-
- A "contributor" is a copyright holder who authorizes use under this
-License of the Program or a work on which the Program is based. The
-work thus licensed is called the contributor's "contributor version".
-
- A contributor's "essential patent claims" are all patent claims
-owned or controlled by the contributor, whether already acquired or
-hereafter acquired, that would be infringed by some manner, permitted
-by this License, of making, using, or selling its contributor version,
-but do not include claims that would be infringed only as a
-consequence of further modification of the contributor version. For
-purposes of this definition, "control" includes the right to grant
-patent sublicenses in a manner consistent with the requirements of
-this License.
-
- Each contributor grants you a non-exclusive, worldwide, royalty-free
-patent license under the contributor's essential patent claims, to
-make, use, sell, offer for sale, import and otherwise run, modify and
-propagate the contents of its contributor version.
-
- In the following three paragraphs, a "patent license" is any express
-agreement or commitment, however denominated, not to enforce a patent
-(such as an express permission to practice a patent or covenant not to
-sue for patent infringement). To "grant" such a patent license to a
-party means to make such an agreement or commitment not to enforce a
-patent against the party.
-
- If you convey a covered work, knowingly relying on a patent license,
-and the Corresponding Source of the work is not available for anyone
-to copy, free of charge and under the terms of this License, through a
-publicly available network server or other readily accessible means,
-then you must either (1) cause the Corresponding Source to be so
-available, or (2) arrange to deprive yourself of the benefit of the
-patent license for this particular work, or (3) arrange, in a manner
-consistent with the requirements of this License, to extend the patent
-license to downstream recipients. "Knowingly relying" means you have
-actual knowledge that, but for the patent license, your conveying the
-covered work in a country, or your recipient's use of the covered work
-in a country, would infringe one or more identifiable patents in that
-country that you have reason to believe are valid.
-
- If, pursuant to or in connection with a single transaction or
-arrangement, you convey, or propagate by procuring conveyance of, a
-covered work, and grant a patent license to some of the parties
-receiving the covered work authorizing them to use, propagate, modify
-or convey a specific copy of the covered work, then the patent license
-you grant is automatically extended to all recipients of the covered
-work and works based on it.
-
- A patent license is "discriminatory" if it does not include within
-the scope of its coverage, prohibits the exercise of, or is
-conditioned on the non-exercise of one or more of the rights that are
-specifically granted under this License. You may not convey a covered
-work if you are a party to an arrangement with a third party that is
-in the business of distributing software, under which you make payment
-to the third party based on the extent of your activity of conveying
-the work, and under which the third party grants, to any of the
-parties who would receive the covered work from you, a discriminatory
-patent license (a) in connection with copies of the covered work
-conveyed by you (or copies made from those copies), or (b) primarily
-for and in connection with specific products or compilations that
-contain the covered work, unless you entered into that arrangement,
-or that patent license was granted, prior to 28 March 2007.
-
- Nothing in this License shall be construed as excluding or limiting
-any implied license or other defenses to infringement that may
-otherwise be available to you under applicable patent law.
-
- 12. No Surrender of Others' Freedom.
-
- If conditions are imposed on you (whether by court order, agreement or
-otherwise) that contradict the conditions of this License, they do not
-excuse you from the conditions of this License. If you cannot convey a
-covered work so as to satisfy simultaneously your obligations under this
-License and any other pertinent obligations, then as a consequence you may
-not convey it at all. For example, if you agree to terms that obligate you
-to collect a royalty for further conveying from those to whom you convey
-the Program, the only way you could satisfy both those terms and this
-License would be to refrain entirely from conveying the Program.
-
- 13. Use with the GNU Affero General Public License.
-
- Notwithstanding any other provision of this License, you have
-permission to link or combine any covered work with a work licensed
-under version 3 of the GNU Affero General Public License into a single
-combined work, and to convey the resulting work. The terms of this
-License will continue to apply to the part which is the covered work,
-but the special requirements of the GNU Affero General Public License,
-section 13, concerning interaction through a network will apply to the
-combination as such.
-
- 14. Revised Versions of this License.
-
- The Free Software Foundation may publish revised and/or new versions of
-the GNU General Public License from time to time. Such new versions will
-be similar in spirit to the present version, but may differ in detail to
-address new problems or concerns.
-
- Each version is given a distinguishing version number. If the
-Program specifies that a certain numbered version of the GNU General
-Public License "or any later version" applies to it, you have the
-option of following the terms and conditions either of that numbered
-version or of any later version published by the Free Software
-Foundation. If the Program does not specify a version number of the
-GNU General Public License, you may choose any version ever published
-by the Free Software Foundation.
-
- If the Program specifies that a proxy can decide which future
-versions of the GNU General Public License can be used, that proxy's
-public statement of acceptance of a version permanently authorizes you
-to choose that version for the Program.
-
- Later license versions may give you additional or different
-permissions. However, no additional obligations are imposed on any
-author or copyright holder as a result of your choosing to follow a
-later version.
-
- 15. Disclaimer of Warranty.
-
- THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
-APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
-HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
-OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
-IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
-ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
-
- 16. Limitation of Liability.
-
- IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
-WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
-THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
-GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
-USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
-DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
-PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
-EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
-SUCH DAMAGES.
-
- 17. Interpretation of Sections 15 and 16.
-
- If the disclaimer of warranty and limitation of liability provided
-above cannot be given local legal effect according to their terms,
-reviewing courts shall apply local law that most closely approximates
-an absolute waiver of all civil liability in connection with the
-Program, unless a warranty or assumption of liability accompanies a
-copy of the Program in return for a fee.
-
- END OF TERMS AND CONDITIONS
-
- How to Apply These Terms to Your New Programs
-
- If you develop a new program, and you want it to be of the greatest
-possible use to the public, the best way to achieve this is to make it
-free software which everyone can redistribute and change under these terms.
-
- To do so, attach the following notices to the program. It is safest
-to attach them to the start of each source file to most effectively
-state the exclusion of warranty; and each file should have at least
-the "copyright" line and a pointer to where the full notice is found.
-
-
- Copyright (C)
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program. If not, see .
-
-Also add information on how to contact you by electronic and paper mail.
-
- If the program does terminal interaction, make it output a short
-notice like this when it starts in an interactive mode:
-
- Copyright (C)
- This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
- This is free software, and you are welcome to redistribute it
- under certain conditions; type `show c' for details.
-
-The hypothetical commands `show w' and `show c' should show the appropriate
-parts of the General Public License. Of course, your program's commands
-might be different; for a GUI interface, you would use an "about box".
-
- You should also get your employer (if you work as a programmer) or school,
-if any, to sign a "copyright disclaimer" for the program, if necessary.
-For more information on this, and how to apply and follow the GNU GPL, see
-.
-
- The GNU General Public License does not permit incorporating your program
-into proprietary programs. If your program is a subroutine library, you
-may consider it more useful to permit linking proprietary applications with
-the library. If this is what you want to do, use the GNU Lesser General
-Public License instead of this License. But first, please read
-.
diff --git a/vendor/github.com/rancher-sandbox/cloud-init/NOTICE b/vendor/github.com/rancher-sandbox/cloud-init/NOTICE
deleted file mode 100644
index b39ddfa5c..000000000
--- a/vendor/github.com/rancher-sandbox/cloud-init/NOTICE
+++ /dev/null
@@ -1,5 +0,0 @@
-CoreOS Project
-Copyright 2014 CoreOS, Inc
-
-This product includes software developed at CoreOS, Inc.
-(http://www.coreos.com/).
diff --git a/vendor/github.com/rancher-sandbox/cloud-init/config/config.go b/vendor/github.com/rancher-sandbox/cloud-init/config/config.go
deleted file mode 100644
index 881130ec3..000000000
--- a/vendor/github.com/rancher-sandbox/cloud-init/config/config.go
+++ /dev/null
@@ -1,164 +0,0 @@
-// Copyright 2015 CoreOS, Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package config
-
-import (
- "fmt"
- "reflect"
- "regexp"
- "strings"
- "unicode"
-
- "github.com/coreos/yaml"
-)
-
-// CloudConfig encapsulates the entire cloud-config configuration file and maps
-// directly to YAML. Fields that cannot be set in the cloud-config (fields
-// used for internal use) have the YAML tag '-' so that they aren't marshalled.
-type CloudConfig struct {
- SSHAuthorizedKeys []string `yaml:"ssh_authorized_keys,omitempty"`
- WriteFiles []File `yaml:"write_files,omitempty"`
- Hostname string `yaml:"hostname,omitempty"`
- Users []User `yaml:"users,omitempty"`
- RunCmd []string `yaml:"runcmd,omitempty"`
-
- Partitioning GrowPart `yaml:"growpart"`
- // this one is legacy, can be removed when no more kip controllers use it
- MilpaFiles []File `yaml:"milpa_files,omitempty"`
- // Todo: add additional parameters supported by traditional cloud-init
-}
-
-func IsCloudConfig(userdata string) bool {
- header := strings.SplitN(userdata, "\n", 2)[0]
-
- // Trim trailing whitespaces
- header = strings.TrimRightFunc(header, unicode.IsSpace)
-
- return (header == "#cloud-config")
-}
-
-// NewCloudConfig instantiates a new CloudConfig from the given contents (a
-// string of YAML), returning any error encountered. It will ignore unknown
-// fields but log encountering them.
-func NewCloudConfig(contents string) (*CloudConfig, error) {
- yaml.UnmarshalMappingKeyTransform = func(nameIn string) (nameOut string) {
- return strings.Replace(nameIn, "-", "_", -1)
- }
- var cfg CloudConfig
- err := yaml.Unmarshal([]byte(contents), &cfg)
- //fmt.Printf("%+v\n", cfg)
- return &cfg, err
-}
-
-// Decode decodes the content of cloud config. Currently only WriteFiles section
-// supports several types of encoding and all of them are supported. After
-// decode operation, Encoding type is unset.
-func (cc *CloudConfig) Decode() error {
- for i, file := range cc.WriteFiles {
- content, err := DecodeContent(file.Content, file.Encoding)
- if err != nil {
- return err
- }
-
- cc.WriteFiles[i].Content = string(content)
- cc.WriteFiles[i].Encoding = ""
- }
-
- return nil
-}
-func (cc CloudConfig) String() string {
- bytes, err := yaml.Marshal(cc)
- if err != nil {
- return ""
- }
-
- stringified := string(bytes)
- stringified = fmt.Sprintf("#cloud-config\n%s", stringified)
-
- return stringified
-}
-
-// IsZero returns whether or not the parameter is the zero value for its type.
-// If the parameter is a struct, only the exported fields are considered.
-func IsZero(c interface{}) bool {
- return isZero(reflect.ValueOf(c))
-}
-
-type ErrorValid struct {
- Value string
- Valid string
- Field string
-}
-
-func (e ErrorValid) Error() string {
- return fmt.Sprintf("invalid value %q for option %q (valid options: %q)", e.Value, e.Field, e.Valid)
-}
-
-// AssertStructValid checks the fields in the structure and makes sure that
-// they contain valid values as specified by the 'valid' flag. Empty fields are
-// implicitly valid.
-func AssertStructValid(c interface{}) error {
- ct := reflect.TypeOf(c)
- cv := reflect.ValueOf(c)
- for i := 0; i < ct.NumField(); i++ {
- ft := ct.Field(i)
- if !isFieldExported(ft) {
- continue
- }
-
- if err := AssertValid(cv.Field(i), ft.Tag.Get("valid")); err != nil {
- err.Field = ft.Name
- return err
- }
- }
- return nil
-}
-
-// AssertValid checks to make sure that the given value is in the list of
-// valid values. Zero values are implicitly valid.
-func AssertValid(value reflect.Value, valid string) *ErrorValid {
- if valid == "" || isZero(value) {
- return nil
- }
-
- vs := fmt.Sprintf("%v", value.Interface())
- if m, _ := regexp.MatchString(valid, vs); m {
- return nil
- }
-
- return &ErrorValid{
- Value: vs,
- Valid: valid,
- }
-}
-
-func isZero(v reflect.Value) bool {
- switch v.Kind() {
- case reflect.Struct:
- vt := v.Type()
- for i := 0; i < v.NumField(); i++ {
- if isFieldExported(vt.Field(i)) && !isZero(v.Field(i)) {
- return false
- }
- }
- return true
- default:
- return v.Interface() == reflect.Zero(v.Type()).Interface()
- }
-}
-
-func isFieldExported(f reflect.StructField) bool {
- return f.PkgPath == ""
-}
diff --git a/vendor/github.com/rancher-sandbox/cloud-init/config/decode.go b/vendor/github.com/rancher-sandbox/cloud-init/config/decode.go
deleted file mode 100644
index f5847aa96..000000000
--- a/vendor/github.com/rancher-sandbox/cloud-init/config/decode.go
+++ /dev/null
@@ -1,56 +0,0 @@
-package config
-
-import (
- "bytes"
- "compress/gzip"
- "encoding/base64"
- "fmt"
-)
-
-func DecodeBase64Content(content string) ([]byte, error) {
- output, err := base64.StdEncoding.DecodeString(content)
-
- if err != nil {
- return nil, fmt.Errorf("Unable to decode base64: %q", err)
- }
-
- return output, nil
-}
-
-func DecodeGzipContent(content string) ([]byte, error) {
- gzr, err := gzip.NewReader(bytes.NewReader([]byte(content)))
-
- if err != nil {
- return nil, fmt.Errorf("Unable to decode gzip: %q", err)
- }
- defer gzr.Close()
-
- buf := new(bytes.Buffer)
- buf.ReadFrom(gzr)
-
- return buf.Bytes(), nil
-}
-
-func DecodeContent(content string, encoding string) ([]byte, error) {
- switch encoding {
- case "":
- return []byte(content), nil
-
- case "b64", "base64":
- return DecodeBase64Content(content)
-
- case "gz", "gzip":
- return DecodeGzipContent(content)
-
- case "gz+base64", "gzip+base64", "gz+b64", "gzip+b64":
- gz, err := DecodeBase64Content(content)
-
- if err != nil {
- return nil, err
- }
-
- return DecodeGzipContent(string(gz))
- }
-
- return nil, fmt.Errorf("Unsupported encoding %q", encoding)
-}
diff --git a/vendor/github.com/rancher-sandbox/cloud-init/config/etc_hosts.go b/vendor/github.com/rancher-sandbox/cloud-init/config/etc_hosts.go
deleted file mode 100644
index ba22bb6d3..000000000
--- a/vendor/github.com/rancher-sandbox/cloud-init/config/etc_hosts.go
+++ /dev/null
@@ -1,17 +0,0 @@
-// Copyright 2015 CoreOS, Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package config
-
-type EtcHosts string
diff --git a/vendor/github.com/rancher-sandbox/cloud-init/config/etcd.go b/vendor/github.com/rancher-sandbox/cloud-init/config/etcd.go
deleted file mode 100644
index 4315f49ef..000000000
--- a/vendor/github.com/rancher-sandbox/cloud-init/config/etcd.go
+++ /dev/null
@@ -1,67 +0,0 @@
-// Copyright 2015 CoreOS, Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package config
-
-type Etcd struct {
- Addr string `yaml:"addr" env:"ETCD_ADDR"`
- AdvertiseClientURLs string `yaml:"advertise_client_urls" env:"ETCD_ADVERTISE_CLIENT_URLS" deprecated:"etcd2 options no longer work for etcd"`
- BindAddr string `yaml:"bind_addr" env:"ETCD_BIND_ADDR"`
- CAFile string `yaml:"ca_file" env:"ETCD_CA_FILE"`
- CertFile string `yaml:"cert_file" env:"ETCD_CERT_FILE"`
- ClusterActiveSize int `yaml:"cluster_active_size" env:"ETCD_CLUSTER_ACTIVE_SIZE"`
- ClusterRemoveDelay float64 `yaml:"cluster_remove_delay" env:"ETCD_CLUSTER_REMOVE_DELAY"`
- ClusterSyncInterval float64 `yaml:"cluster_sync_interval" env:"ETCD_CLUSTER_SYNC_INTERVAL"`
- CorsOrigins string `yaml:"cors" env:"ETCD_CORS"`
- DataDir string `yaml:"data_dir" env:"ETCD_DATA_DIR"`
- Discovery string `yaml:"discovery" env:"ETCD_DISCOVERY"`
- DiscoveryFallback string `yaml:"discovery_fallback" env:"ETCD_DISCOVERY_FALLBACK" deprecated:"etcd2 options no longer work for etcd"`
- DiscoverySRV string `yaml:"discovery_srv" env:"ETCD_DISCOVERY_SRV" deprecated:"etcd2 options no longer work for etcd"`
- DiscoveryProxy string `yaml:"discovery_proxy" env:"ETCD_DISCOVERY_PROXY" deprecated:"etcd2 options no longer work for etcd"`
- ElectionTimeout int `yaml:"election_timeout" env:"ETCD_ELECTION_TIMEOUT" deprecated:"etcd2 options no longer work for etcd"`
- ForceNewCluster bool `yaml:"force_new_cluster" env:"ETCD_FORCE_NEW_CLUSTER" deprecated:"etcd2 options no longer work for etcd"`
- GraphiteHost string `yaml:"graphite_host" env:"ETCD_GRAPHITE_HOST"`
- HeartbeatInterval int `yaml:"heartbeat_interval" env:"ETCD_HEARTBEAT_INTERVAL" deprecated:"etcd2 options no longer work for etcd"`
- HTTPReadTimeout float64 `yaml:"http_read_timeout" env:"ETCD_HTTP_READ_TIMEOUT"`
- HTTPWriteTimeout float64 `yaml:"http_write_timeout" env:"ETCD_HTTP_WRITE_TIMEOUT"`
- InitialAdvertisePeerURLs string `yaml:"initial_advertise_peer_urls" env:"ETCD_INITIAL_ADVERTISE_PEER_URLS" deprecated:"etcd2 options no longer work for etcd"`
- InitialCluster string `yaml:"initial_cluster" env:"ETCD_INITIAL_CLUSTER" deprecated:"etcd2 options no longer work for etcd"`
- InitialClusterState string `yaml:"initial_cluster_state" env:"ETCD_INITIAL_CLUSTER_STATE" deprecated:"etcd2 options no longer work for etcd"`
- InitialClusterToken string `yaml:"initial_cluster_token" env:"ETCD_INITIAL_CLUSTER_TOKEN" deprecated:"etcd2 options no longer work for etcd"`
- KeyFile string `yaml:"key_file" env:"ETCD_KEY_FILE"`
- ListenClientURLs string `yaml:"listen_client_urls" env:"ETCD_LISTEN_CLIENT_URLS" deprecated:"etcd2 options no longer work for etcd"`
- ListenPeerURLs string `yaml:"listen_peer_urls" env:"ETCD_LISTEN_PEER_URLS" deprecated:"etcd2 options no longer work for etcd"`
- MaxResultBuffer int `yaml:"max_result_buffer" env:"ETCD_MAX_RESULT_BUFFER"`
- MaxRetryAttempts int `yaml:"max_retry_attempts" env:"ETCD_MAX_RETRY_ATTEMPTS"`
- MaxSnapshots int `yaml:"max_snapshots" env:"ETCD_MAX_SNAPSHOTS" deprecated:"etcd2 options no longer work for etcd"`
- MaxWALs int `yaml:"max_wals" env:"ETCD_MAX_WALS" deprecated:"etcd2 options no longer work for etcd"`
- Name string `yaml:"name" env:"ETCD_NAME"`
- PeerAddr string `yaml:"peer_addr" env:"ETCD_PEER_ADDR"`
- PeerBindAddr string `yaml:"peer_bind_addr" env:"ETCD_PEER_BIND_ADDR"`
- PeerCAFile string `yaml:"peer_ca_file" env:"ETCD_PEER_CA_FILE"`
- PeerCertFile string `yaml:"peer_cert_file" env:"ETCD_PEER_CERT_FILE"`
- PeerElectionTimeout int `yaml:"peer_election_timeout" env:"ETCD_PEER_ELECTION_TIMEOUT"`
- PeerHeartbeatInterval int `yaml:"peer_heartbeat_interval" env:"ETCD_PEER_HEARTBEAT_INTERVAL"`
- PeerKeyFile string `yaml:"peer_key_file" env:"ETCD_PEER_KEY_FILE"`
- Peers string `yaml:"peers" env:"ETCD_PEERS"`
- PeersFile string `yaml:"peers_file" env:"ETCD_PEERS_FILE"`
- Proxy string `yaml:"proxy" env:"ETCD_PROXY" deprecated:"etcd2 options no longer work for etcd"`
- RetryInterval float64 `yaml:"retry_interval" env:"ETCD_RETRY_INTERVAL"`
- Snapshot bool `yaml:"snapshot" env:"ETCD_SNAPSHOT"`
- SnapshotCount int `yaml:"snapshot_count" env:"ETCD_SNAPSHOTCOUNT"`
- StrTrace string `yaml:"trace" env:"ETCD_TRACE"`
- Verbose bool `yaml:"verbose" env:"ETCD_VERBOSE"`
- VeryVerbose bool `yaml:"very_verbose" env:"ETCD_VERY_VERBOSE"`
- VeryVeryVerbose bool `yaml:"very_very_verbose" env:"ETCD_VERY_VERY_VERBOSE"`
-}
diff --git a/vendor/github.com/rancher-sandbox/cloud-init/config/etcd2.go b/vendor/github.com/rancher-sandbox/cloud-init/config/etcd2.go
deleted file mode 100644
index d0e0575a2..000000000
--- a/vendor/github.com/rancher-sandbox/cloud-init/config/etcd2.go
+++ /dev/null
@@ -1,59 +0,0 @@
-// Copyright 2015 CoreOS, Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package config
-
-type Etcd2 struct {
- AdvertiseClientURLs string `yaml:"advertise_client_urls" env:"ETCD_ADVERTISE_CLIENT_URLS"`
- CAFile string `yaml:"ca_file" env:"ETCD_CA_FILE" deprecated:"ca_file obsoleted by trusted_ca_file and client_cert_auth"`
- CertFile string `yaml:"cert_file" env:"ETCD_CERT_FILE"`
- ClientCertAuth bool `yaml:"client_cert_auth" env:"ETCD_CLIENT_CERT_AUTH"`
- CorsOrigins string `yaml:"cors" env:"ETCD_CORS"`
- DataDir string `yaml:"data_dir" env:"ETCD_DATA_DIR"`
- Debug bool `yaml:"debug" env:"ETCD_DEBUG"`
- Discovery string `yaml:"discovery" env:"ETCD_DISCOVERY"`
- DiscoveryFallback string `yaml:"discovery_fallback" env:"ETCD_DISCOVERY_FALLBACK"`
- DiscoverySRV string `yaml:"discovery_srv" env:"ETCD_DISCOVERY_SRV"`
- DiscoveryProxy string `yaml:"discovery_proxy" env:"ETCD_DISCOVERY_PROXY"`
- ElectionTimeout int `yaml:"election_timeout" env:"ETCD_ELECTION_TIMEOUT"`
- EnablePprof bool `yaml:"enable_pprof" env:"ETCD_ENABLE_PPROF"`
- ForceNewCluster bool `yaml:"force_new_cluster" env:"ETCD_FORCE_NEW_CLUSTER"`
- HeartbeatInterval int `yaml:"heartbeat_interval" env:"ETCD_HEARTBEAT_INTERVAL"`
- InitialAdvertisePeerURLs string `yaml:"initial_advertise_peer_urls" env:"ETCD_INITIAL_ADVERTISE_PEER_URLS"`
- InitialCluster string `yaml:"initial_cluster" env:"ETCD_INITIAL_CLUSTER"`
- InitialClusterState string `yaml:"initial_cluster_state" env:"ETCD_INITIAL_CLUSTER_STATE"`
- InitialClusterToken string `yaml:"initial_cluster_token" env:"ETCD_INITIAL_CLUSTER_TOKEN"`
- KeyFile string `yaml:"key_file" env:"ETCD_KEY_FILE"`
- ListenClientURLs string `yaml:"listen_client_urls" env:"ETCD_LISTEN_CLIENT_URLS"`
- ListenPeerURLs string `yaml:"listen_peer_urls" env:"ETCD_LISTEN_PEER_URLS"`
- LogPackageLevels string `yaml:"log_package_levels" env:"ETCD_LOG_PACKAGE_LEVELS"`
- MaxSnapshots int `yaml:"max_snapshots" env:"ETCD_MAX_SNAPSHOTS"`
- MaxWALs int `yaml:"max_wals" env:"ETCD_MAX_WALS"`
- Name string `yaml:"name" env:"ETCD_NAME"`
- PeerCAFile string `yaml:"peer_ca_file" env:"ETCD_PEER_CA_FILE" deprecated:"peer_ca_file obsoleted peer_trusted_ca_file and peer_client_cert_auth"`
- PeerCertFile string `yaml:"peer_cert_file" env:"ETCD_PEER_CERT_FILE"`
- PeerKeyFile string `yaml:"peer_key_file" env:"ETCD_PEER_KEY_FILE"`
- PeerClientCertAuth bool `yaml:"peer_client_cert_auth" env:"ETCD_PEER_CLIENT_CERT_AUTH"`
- PeerTrustedCAFile string `yaml:"peer_trusted_ca_file" env:"ETCD_PEER_TRUSTED_CA_FILE"`
- Proxy string `yaml:"proxy" env:"ETCD_PROXY" valid:"^(on|off|readonly)$"`
- ProxyDialTimeout int `yaml:"proxy_dial_timeout" env:"ETCD_PROXY_DIAL_TIMEOUT"`
- ProxyFailureWait int `yaml:"proxy_failure_wait" env:"ETCD_PROXY_FAILURE_WAIT"`
- ProxyReadTimeout int `yaml:"proxy_read_timeout" env:"ETCD_PROXY_READ_TIMEOUT"`
- ProxyRefreshInterval int `yaml:"proxy_refresh_interval" env:"ETCD_PROXY_REFRESH_INTERVAL"`
- ProxyWriteTimeout int `yaml:"proxy_write_timeout" env:"ETCD_PROXY_WRITE_TIMEOUT"`
- SnapshotCount int `yaml:"snapshot_count" env:"ETCD_SNAPSHOT_COUNT"`
- StrictReconfigCheck bool `yaml:"strict_reconfig_check" env:"ETCD_STRICT_RECONFIG_CHECK"`
- TrustedCAFile string `yaml:"trusted_ca_file" env:"ETCD_TRUSTED_CA_FILE"`
- WalDir string `yaml:"wal_dir" env:"ETCD_WAL_DIR"`
-}
diff --git a/vendor/github.com/rancher-sandbox/cloud-init/config/file.go b/vendor/github.com/rancher-sandbox/cloud-init/config/file.go
deleted file mode 100644
index 97789ec64..000000000
--- a/vendor/github.com/rancher-sandbox/cloud-init/config/file.go
+++ /dev/null
@@ -1,23 +0,0 @@
-// Copyright 2015 CoreOS, Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package config
-
-type File struct {
- Encoding string `yaml:"encoding,omitempty" valid:"^(base64|b64|gz|gzip|gz\\+base64|gzip\\+base64|gz\\+b64|gzip\\+b64)$"`
- Content string `yaml:"content,omitempty"`
- Owner string `yaml:"owner,omitempty"`
- Path string `yaml:"path,omitempty"`
- RawFilePermissions string `yaml:"permissions,omitempty" valid:"^0?[0-7]{3,4}$"`
-}
diff --git a/vendor/github.com/rancher-sandbox/cloud-init/config/flannel.go b/vendor/github.com/rancher-sandbox/cloud-init/config/flannel.go
deleted file mode 100644
index 76ca695aa..000000000
--- a/vendor/github.com/rancher-sandbox/cloud-init/config/flannel.go
+++ /dev/null
@@ -1,29 +0,0 @@
-// Copyright 2015 CoreOS, Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package config
-
-type Flannel struct {
- EtcdEndpoints string `yaml:"etcd_endpoints" env:"FLANNELD_ETCD_ENDPOINTS"`
- EtcdCAFile string `yaml:"etcd_cafile" env:"FLANNELD_ETCD_CAFILE"`
- EtcdCertFile string `yaml:"etcd_certfile" env:"FLANNELD_ETCD_CERTFILE"`
- EtcdKeyFile string `yaml:"etcd_keyfile" env:"FLANNELD_ETCD_KEYFILE"`
- EtcdPrefix string `yaml:"etcd_prefix" env:"FLANNELD_ETCD_PREFIX"`
- EtcdUsername string `yaml:"etcd_username" env:"FLANNELD_ETCD_USERNAME"`
- EtcdPassword string `yaml:"etcd_password" env:"FLANNELD_ETCD_PASSWORD"`
- IPMasq string `yaml:"ip_masq" env:"FLANNELD_IP_MASQ"`
- SubnetFile string `yaml:"subnet_file" env:"FLANNELD_SUBNET_FILE"`
- Iface string `yaml:"interface" env:"FLANNELD_IFACE"`
- PublicIP string `yaml:"public_ip" env:"FLANNELD_PUBLIC_IP"`
-}
diff --git a/vendor/github.com/rancher-sandbox/cloud-init/config/fleet.go b/vendor/github.com/rancher-sandbox/cloud-init/config/fleet.go
deleted file mode 100644
index 66c53bf6c..000000000
--- a/vendor/github.com/rancher-sandbox/cloud-init/config/fleet.go
+++ /dev/null
@@ -1,35 +0,0 @@
-// Copyright 2015 CoreOS, Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package config
-
-type Fleet struct {
- AgentTTL string `yaml:"agent_ttl" env:"FLEET_AGENT_TTL"`
- AuthorizedKeysFile string `yaml:"authorized_keys_file" env:"FLEET_AUTHORIZED_KEYS_FILE"`
- DisableEngine bool `yaml:"disable_engine" env:"FLEET_DISABLE_ENGINE"`
- EngineReconcileInterval float64 `yaml:"engine_reconcile_interval" env:"FLEET_ENGINE_RECONCILE_INTERVAL"`
- EtcdCAFile string `yaml:"etcd_cafile" env:"FLEET_ETCD_CAFILE"`
- EtcdCertFile string `yaml:"etcd_certfile" env:"FLEET_ETCD_CERTFILE"`
- EtcdKeyFile string `yaml:"etcd_keyfile" env:"FLEET_ETCD_KEYFILE"`
- EtcdKeyPrefix string `yaml:"etcd_key_prefix" env:"FLEET_ETCD_KEY_PREFIX"`
- EtcdRequestTimeout float64 `yaml:"etcd_request_timeout" env:"FLEET_ETCD_REQUEST_TIMEOUT"`
- EtcdServers string `yaml:"etcd_servers" env:"FLEET_ETCD_SERVERS"`
- EtcdUsername string `yaml:"etcd_username" env:"FLEET_ETCD_USERNAME"`
- EtcdPassword string `yaml:"etcd_password" env:"FLEET_ETCD_PASSWORD"`
- Metadata string `yaml:"metadata" env:"FLEET_METADATA"`
- PublicIP string `yaml:"public_ip" env:"FLEET_PUBLIC_IP"`
- TokenLimit int `yaml:"token_limit" env:"FLEET_TOKEN_LIMIT"`
- Verbosity int `yaml:"verbosity" env:"FLEET_VERBOSITY"`
- VerifyUnits bool `yaml:"verify_units" env:"FLEET_VERIFY_UNITS"`
-}
diff --git a/vendor/github.com/rancher-sandbox/cloud-init/config/ignition.go b/vendor/github.com/rancher-sandbox/cloud-init/config/ignition.go
deleted file mode 100644
index 2d9bdd509..000000000
--- a/vendor/github.com/rancher-sandbox/cloud-init/config/ignition.go
+++ /dev/null
@@ -1,29 +0,0 @@
-// Copyright 2015 CoreOS, Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package config
-
-import (
- "encoding/json"
-)
-
-func IsIgnitionConfig(userdata string) bool {
- var cfg struct {
- Version *int `json:"ignitionVersion"`
- Ignition struct {
- Version *string `json:"version"`
- } `json:"ignition"`
- }
- return (json.Unmarshal([]byte(userdata), &cfg) == nil && (cfg.Version != nil || cfg.Ignition.Version != nil))
-}
diff --git a/vendor/github.com/rancher-sandbox/cloud-init/config/locksmith.go b/vendor/github.com/rancher-sandbox/cloud-init/config/locksmith.go
deleted file mode 100644
index c261e3564..000000000
--- a/vendor/github.com/rancher-sandbox/cloud-init/config/locksmith.go
+++ /dev/null
@@ -1,27 +0,0 @@
-// Copyright 2015 CoreOS, Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package config
-
-type Locksmith struct {
- Endpoint string `yaml:"endpoint" env:"LOCKSMITHD_ENDPOINT"`
- EtcdCAFile string `yaml:"etcd_cafile" env:"LOCKSMITHD_ETCD_CAFILE"`
- EtcdCertFile string `yaml:"etcd_certfile" env:"LOCKSMITHD_ETCD_CERTFILE"`
- EtcdKeyFile string `yaml:"etcd_keyfile" env:"LOCKSMITHD_ETCD_KEYFILE"`
- EtcdUsername string `yaml:"etcd_username" env:"LOCKSMITHD_ETCD_USERNAME"`
- EtcdPassword string `yaml:"etcd_password" env:"LOCKSMITHD_ETCD_PASSWORD"`
- Group string `yaml:"group" env:"LOCKSMITHD_GROUP"`
- RebootWindowStart string `yaml:"window_start" env:"REBOOT_WINDOW_START" valid:"^((?i:sun|mon|tue|wed|thu|fri|sat|sun) )?0*([0-9]|1[0-9]|2[0-3]):0*([0-9]|[1-5][0-9])$"`
- RebootWindowLength string `yaml:"window_length" env:"REBOOT_WINDOW_LENGTH" valid:"^[-+]?([0-9]*(\\.[0-9]*)?[a-z]+)+$"`
-}
diff --git a/vendor/github.com/rancher-sandbox/cloud-init/config/oem.go b/vendor/github.com/rancher-sandbox/cloud-init/config/oem.go
deleted file mode 100644
index 21ce6683e..000000000
--- a/vendor/github.com/rancher-sandbox/cloud-init/config/oem.go
+++ /dev/null
@@ -1,23 +0,0 @@
-// Copyright 2015 CoreOS, Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package config
-
-type OEM struct {
- ID string `yaml:"id"`
- Name string `yaml:"name"`
- VersionID string `yaml:"version_id"`
- HomeURL string `yaml:"home_url"`
- BugReportURL string `yaml:"bug_report_url"`
-}
diff --git a/vendor/github.com/rancher-sandbox/cloud-init/config/partitioning.go b/vendor/github.com/rancher-sandbox/cloud-init/config/partitioning.go
deleted file mode 100644
index ae0d683a7..000000000
--- a/vendor/github.com/rancher-sandbox/cloud-init/config/partitioning.go
+++ /dev/null
@@ -1,6 +0,0 @@
-package config
-
-type GrowPart struct {
- Mode string `yaml:"mode,omitempty"`
- Devices []string `yaml:"devices"`
-}
diff --git a/vendor/github.com/rancher-sandbox/cloud-init/config/script.go b/vendor/github.com/rancher-sandbox/cloud-init/config/script.go
deleted file mode 100644
index a81c5fc72..000000000
--- a/vendor/github.com/rancher-sandbox/cloud-init/config/script.go
+++ /dev/null
@@ -1,31 +0,0 @@
-// Copyright 2015 CoreOS, Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package config
-
-import (
- "strings"
-)
-
-type Script []byte
-
-func IsScript(userdata string) bool {
- header := strings.SplitN(userdata, "\n", 2)[0]
- return strings.HasPrefix(header, "#!")
-}
-
-func NewScript(userdata string) (*Script, error) {
- s := Script(userdata)
- return &s, nil
-}
diff --git a/vendor/github.com/rancher-sandbox/cloud-init/config/unit.go b/vendor/github.com/rancher-sandbox/cloud-init/config/unit.go
deleted file mode 100644
index 591d44eec..000000000
--- a/vendor/github.com/rancher-sandbox/cloud-init/config/unit.go
+++ /dev/null
@@ -1,30 +0,0 @@
-// Copyright 2015 CoreOS, Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package config
-
-type Unit struct {
- Name string `yaml:"name"`
- Mask bool `yaml:"mask"`
- Enable bool `yaml:"enable"`
- Runtime bool `yaml:"runtime"`
- Content string `yaml:"content"`
- Command string `yaml:"command" valid:"^(start|stop|restart|reload|try-restart|reload-or-restart|reload-or-try-restart)$"`
- DropIns []UnitDropIn `yaml:"drop_ins"`
-}
-
-type UnitDropIn struct {
- Name string `yaml:"name"`
- Content string `yaml:"content"`
-}
diff --git a/vendor/github.com/rancher-sandbox/cloud-init/config/update.go b/vendor/github.com/rancher-sandbox/cloud-init/config/update.go
deleted file mode 100644
index 3d702e440..000000000
--- a/vendor/github.com/rancher-sandbox/cloud-init/config/update.go
+++ /dev/null
@@ -1,21 +0,0 @@
-// Copyright 2015 CoreOS, Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package config
-
-type Update struct {
- RebootStrategy string `yaml:"reboot_strategy" env:"REBOOT_STRATEGY" valid:"^(best-effort|etcd-lock|reboot|off)$"`
- Group string `yaml:"group" env:"GROUP"`
- Server string `yaml:"server" env:"SERVER"`
-}
diff --git a/vendor/github.com/rancher-sandbox/cloud-init/config/user.go b/vendor/github.com/rancher-sandbox/cloud-init/config/user.go
deleted file mode 100644
index beabbfee3..000000000
--- a/vendor/github.com/rancher-sandbox/cloud-init/config/user.go
+++ /dev/null
@@ -1,35 +0,0 @@
-// Copyright 2015 CoreOS, Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package config
-
-type User struct {
- Name string `yaml:"name,omitempty"`
- PasswordHash string `yaml:"passwd,omitempty"`
- SSHAuthorizedKeys []string `yaml:"ssh_authorized_keys,omitempty"`
- SSHImportGithubUser string `yaml:"coreos_ssh_import_github,omitempty" deprecated:"trying to fetch from a remote endpoint introduces too many intermittent errors"`
- SSHImportGithubUsers []string `yaml:"coreos_ssh_import_github_users,omitempty" deprecated:"trying to fetch from a remote endpoint introduces too many intermittent errors"`
- SSHImportURL string `yaml:"coreos_ssh_import_url,omitempty" deprecated:"trying to fetch from a remote endpoint introduces too many intermittent errors"`
- GECOS string `yaml:"gecos,omitempty"`
- Homedir string `yaml:"homedir,omitempty"`
- NoCreateHome bool `yaml:"no_create_home,omitempty"`
- PrimaryGroup string `yaml:"primary_group,omitempty"`
- Groups []string `yaml:"groups,omitempty"`
- NoUserGroup bool `yaml:"no_user_group,omitempty"`
- System bool `yaml:"system,omitempty"`
- NoLogInit bool `yaml:"no_log_init,omitempty"`
- Shell string `yaml:"shell,omitempty"`
- LockPasswd bool `yaml:"lock_passwd"`
- UID string `yaml:"uid"`
-}
diff --git a/vendor/github.com/rancher-sandbox/cloud-init/LICENSE b/vendor/github.com/rancher/yip/LICENSE
similarity index 99%
rename from vendor/github.com/rancher-sandbox/cloud-init/LICENSE
rename to vendor/github.com/rancher/yip/LICENSE
index e06d20818..d64569567 100644
--- a/vendor/github.com/rancher-sandbox/cloud-init/LICENSE
+++ b/vendor/github.com/rancher/yip/LICENSE
@@ -1,4 +1,5 @@
-Apache License
+
+ Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
@@ -178,7 +179,7 @@ Apache License
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "{}"
+ boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
@@ -186,7 +187,7 @@ Apache License
same "printed page" as the copyright notice for easier
identification within third-party archives.
- Copyright {yyyy} {name of copyright owner}
+ Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
@@ -199,4 +200,3 @@ Apache License
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-
diff --git a/vendor/github.com/rancher/yip/pkg/schema/cloudinit/config.go b/vendor/github.com/rancher/yip/pkg/schema/cloudinit/config.go
new file mode 100644
index 000000000..67cc8b9ee
--- /dev/null
+++ b/vendor/github.com/rancher/yip/pkg/schema/cloudinit/config.go
@@ -0,0 +1,78 @@
+// This is an adaptation of coreOS/cloudinit.
+// https://github.com/coreos/coreos-cloudinit/blob/master/config/config.go
+
+package config
+
+import (
+ "strings"
+ "unicode"
+
+ "gopkg.in/yaml.v3"
+)
+
+// NewCloudConfig instantiates a new CloudConfig from the given contents (a
+// string of YAML), returning any error encountered. It will ignore unknown
+// fields but log encountering them.
+func NewCloudConfig(contents string) (*CloudConfig, error) {
+ var cfg CloudConfig
+ err := yaml.Unmarshal([]byte(contents), &cfg)
+ return &cfg, err
+}
+
+func IsCloudConfig(userdata string) bool {
+ header := strings.SplitN(userdata, "\n", 2)[0]
+
+ // Trim trailing whitespaces
+ header = strings.TrimRightFunc(header, unicode.IsSpace)
+
+ return (header == "#cloud-config")
+}
+
+// CloudConfig encapsulates the entire cloud-config configuration file and maps
+// directly to YAML. Fields that cannot be set in the cloud-config (fields
+// used for internal use) have the YAML tag '-' so that they aren't marshalled.
+type CloudConfig struct {
+ SSHAuthorizedKeys []string `yaml:"ssh_authorized_keys,omitempty"`
+ WriteFiles []File `yaml:"write_files,omitempty"`
+ Hostname string `yaml:"hostname,omitempty"`
+ Users []User `yaml:"users,omitempty"`
+ RunCmd []string `yaml:"runcmd,omitempty"`
+
+ Partitioning GrowPart `yaml:"growpart"`
+ // this one is legacy, can be removed when no more kip controllers use it
+ MilpaFiles []File `yaml:"milpa_files,omitempty"`
+ // Todo: add additional parameters supported by traditional cloud-init
+}
+
+type GrowPart struct {
+ Mode string `yaml:"mode,omitempty"`
+ Devices []string `yaml:"devices"`
+}
+
+type File struct {
+ Encoding string `yaml:"encoding,omitempty" valid:"^(base64|b64|gz|gzip|gz\\+base64|gzip\\+base64|gz\\+b64|gzip\\+b64)$"`
+ Content string `yaml:"content,omitempty"`
+ Owner string `yaml:"owner,omitempty"`
+ Path string `yaml:"path,omitempty"`
+ RawFilePermissions string `yaml:"permissions,omitempty" valid:"^0?[0-7]{3,4}$"`
+}
+
+type User struct {
+ Name string `yaml:"name,omitempty"`
+ PasswordHash string `yaml:"passwd,omitempty"`
+ SSHAuthorizedKeys []string `yaml:"ssh_authorized_keys,omitempty"`
+ SSHImportGithubUser string `yaml:"coreos_ssh_import_github,omitempty" deprecated:"trying to fetch from a remote endpoint introduces too many intermittent errors"`
+ SSHImportGithubUsers []string `yaml:"coreos_ssh_import_github_users,omitempty" deprecated:"trying to fetch from a remote endpoint introduces too many intermittent errors"`
+ SSHImportURL string `yaml:"coreos_ssh_import_url,omitempty" deprecated:"trying to fetch from a remote endpoint introduces too many intermittent errors"`
+ GECOS string `yaml:"gecos,omitempty"`
+ Homedir string `yaml:"homedir,omitempty"`
+ NoCreateHome bool `yaml:"no_create_home,omitempty"`
+ PrimaryGroup string `yaml:"primary_group,omitempty"`
+ Groups []string `yaml:"groups,omitempty"`
+ NoUserGroup bool `yaml:"no_user_group,omitempty"`
+ System bool `yaml:"system,omitempty"`
+ NoLogInit bool `yaml:"no_log_init,omitempty"`
+ Shell string `yaml:"shell,omitempty"`
+ LockPasswd bool `yaml:"lock_passwd"`
+ UID string `yaml:"uid"`
+}
diff --git a/vendor/github.com/mudler/yip/pkg/schema/loader_cloudinit.go b/vendor/github.com/rancher/yip/pkg/schema/loader_cloudinit.go
similarity index 77%
rename from vendor/github.com/mudler/yip/pkg/schema/loader_cloudinit.go
rename to vendor/github.com/rancher/yip/pkg/schema/loader_cloudinit.go
index 1220b98a9..0cfffc8e6 100644
--- a/vendor/github.com/mudler/yip/pkg/schema/loader_cloudinit.go
+++ b/vendor/github.com/rancher/yip/pkg/schema/loader_cloudinit.go
@@ -1,17 +1,16 @@
-// Copyright © 2021 Ettore Di Giacinto
+// Copyright 2020 Ettore Di Giacinto
//
-// This program is free software; you can redistribute it and/or modify
-// it under the terms of the GNU General Public License as published by
-// the Free Software Foundation; either version 2 of the License, or
-// (at your option) any later version.
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
//
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU General Public License for more details.
+// http://www.apache.org/licenses/LICENSE-2.0
//
-// You should have received a copy of the GNU General Public License along
-// with this program; if not, see .
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
package schema
@@ -19,8 +18,9 @@ import (
"fmt"
"strconv"
- cloudconfig "github.com/rancher-sandbox/cloud-init/config"
- "github.com/twpayne/go-vfs"
+ "github.com/twpayne/go-vfs/v4"
+
+ cloudconfig "github.com/rancher/yip/pkg/schema/cloudinit"
)
type cloudInit struct{}
@@ -106,7 +106,6 @@ func (cloudInit) Load(s []byte, fs vfs.FS) (*YipConfig, error) {
}
result := &YipConfig{
- Name: "Cloud init",
Stages: map[string][]Stage{
"boot": stages,
"initramfs": {{
diff --git a/vendor/github.com/mudler/yip/pkg/schema/loader_yip.go b/vendor/github.com/rancher/yip/pkg/schema/loader_yip.go
similarity index 84%
rename from vendor/github.com/mudler/yip/pkg/schema/loader_yip.go
rename to vendor/github.com/rancher/yip/pkg/schema/loader_yip.go
index 6fbb8f5a8..f7af2df62 100644
--- a/vendor/github.com/mudler/yip/pkg/schema/loader_yip.go
+++ b/vendor/github.com/rancher/yip/pkg/schema/loader_yip.go
@@ -1,8 +1,8 @@
package schema
import (
- "github.com/twpayne/go-vfs"
- "gopkg.in/yaml.v2"
+ "github.com/twpayne/go-vfs/v4"
+ "gopkg.in/yaml.v3"
)
type yipYAML struct{}
diff --git a/vendor/github.com/mudler/yip/pkg/schema/schema.go b/vendor/github.com/rancher/yip/pkg/schema/schema.go
similarity index 55%
rename from vendor/github.com/mudler/yip/pkg/schema/schema.go
rename to vendor/github.com/rancher/yip/pkg/schema/schema.go
index 076eaf0c4..d67faa4f2 100644
--- a/vendor/github.com/mudler/yip/pkg/schema/schema.go
+++ b/vendor/github.com/rancher/yip/pkg/schema/schema.go
@@ -1,22 +1,22 @@
-// Copyright © 2020 Ettore Di Giacinto
+// Copyright 2019 Ettore Di Giacinto
//
-// This program is free software; you can redistribute it and/or modify
-// it under the terms of the GNU General Public License as published by
-// the Free Software Foundation; either version 2 of the License, or
-// (at your option) any later version.
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
//
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU General Public License for more details.
+// http://www.apache.org/licenses/LICENSE-2.0
//
-// You should have received a copy of the GNU General Public License along
-// with this program; if not, see .
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
package schema
import (
"bytes"
+ "errors"
"fmt"
"io"
"net/http"
@@ -25,13 +25,12 @@ import (
"github.com/google/shlex"
- "github.com/pkg/errors"
- "github.com/rancher-sandbox/cloud-init/config"
+ config "github.com/rancher/yip/pkg/schema/cloudinit"
"github.com/hashicorp/go-multierror"
"github.com/itchyny/gojq"
- "github.com/twpayne/go-vfs"
- "gopkg.in/yaml.v2"
+ "github.com/twpayne/go-vfs/v4"
+ "gopkg.in/yaml.v3"
)
type YipEntity struct {
@@ -64,25 +63,25 @@ type Directory struct {
}
type DataSource struct {
- Providers []string `yaml:"providers"`
- Path string `yaml:"path"`
+ Providers []string `yaml:"providers,omitempty"`
+ Path string `yaml:"path,omitempty"`
}
type Git struct {
- Auth Auth `yaml:"auth"`
- URL string `yaml:"url"`
- Path string `yaml:"path"`
- Branch string `yaml:"branch"`
- BranchOnly bool `yaml:"branch_only"`
+ Auth Auth `yaml:"auth,omitempty"`
+ URL string `yaml:"url,omitempty"`
+ Path string `yaml:"path,omitempty"`
+ Branch string `yaml:"branch,omitempty"`
+ BranchOnly bool `yaml:"branch_only,omitempty"`
}
type Auth struct {
- Username string `yaml:"username"`
- Password string `yaml:"password"`
- PrivateKey string `yaml:"private_key"`
+ Username string `yaml:"username,omitempty"`
+ Password string `yaml:"password,omitempty"`
+ PrivateKey string `yaml:"private_key,omitempty"`
- Insecure bool `yaml:"insecure"`
- PublicKey string `yaml:"public_key"`
+ Insecure bool `yaml:"insecure,omitempty"`
+ PublicKey string `yaml:"public_key,omitempty"`
}
type User struct {
@@ -98,8 +97,8 @@ type User struct {
System bool `yaml:"system,omitempty"`
NoLogInit bool `yaml:"no_log_init,omitempty"`
Shell string `yaml:"shell,omitempty"`
- LockPasswd bool `yaml:"lock_passwd"`
- UID string `yaml:"uid"`
+ LockPasswd bool `yaml:"lock_passwd,omitempty"`
+ UID string `yaml:"uid,omitempty"`
}
func (u User) Exists() bool {
@@ -108,74 +107,79 @@ func (u User) Exists() bool {
}
type Layout struct {
- Device *Device `yaml:"device"`
+ Device *Device `yaml:"device,omitempty"`
Expand *Expand `yaml:"expand_partition,omitempty"`
Parts []Partition `yaml:"add_partitions,omitempty"`
}
type Device struct {
- Label string `"yaml:label"`
- Path string `"yaml:path"`
+ Label string `yaml:"label,omitempty"`
+ Path string `yaml:"path,omitempty"`
}
type Expand struct {
- Size uint `"yaml:size"`
+ Size uint `yaml:"size,omitempty"`
}
type Partition struct {
- FSLabel string `yaml:"fsLabel"`
+ FSLabel string `yaml:"fsLabel,omitempty"`
Size uint `yaml:"size,omitempty"`
PLabel string `yaml:"pLabel,omitempty"`
FileSystem string `yaml:"filesystem,omitempty"`
}
+type Dependency struct {
+ Name string `yaml:"name,omitempty"`
+}
+
type Stage struct {
- Commands []string `yaml:"commands"`
- Files []File `yaml:"files"`
- Downloads []Download `yaml:"downloads"`
- Directories []Directory `yaml:"directories"`
- If string `yaml:"if"`
-
- EnsureEntities []YipEntity `yaml:"ensure_entities"`
- DeleteEntities []YipEntity `yaml:"delete_entities"`
- Dns DNS `yaml:"dns"`
- Hostname string `yaml:"hostname"`
- Name string `yaml:"name"`
- Sysctl map[string]string `yaml:"sysctl"`
- SSHKeys map[string][]string `yaml:"authorized_keys"`
- Node string `yaml:"node"`
- Users map[string]User `yaml:"users"`
- Modules []string `yaml:"modules"`
- Systemctl Systemctl `yaml:"systemctl"`
- Environment map[string]string `yaml:"environment"`
- EnvironmentFile string `yaml:"environment_file"`
-
- DataSources DataSource `yaml:"datasource"`
- Layout Layout `yaml:"layout"`
-
- SystemdFirstBoot map[string]string `yaml:"systemd_firstboot"`
-
- TimeSyncd map[string]string `yaml:"timesyncd"`
- Git Git `yaml:"git"`
+ Commands []string `yaml:"commands,omitempty"`
+ Files []File `yaml:"files,omitempty"`
+ Downloads []Download `yaml:"downloads,omitempty"`
+ Directories []Directory `yaml:"directories,omitempty"`
+ If string `yaml:"if,omitempty"`
+
+ EnsureEntities []YipEntity `yaml:"ensure_entities,omitempty"`
+ DeleteEntities []YipEntity `yaml:"delete_entities,omitempty"`
+ Dns DNS `yaml:"dns,omitempty"`
+ Hostname string `yaml:"hostname,omitempty"`
+ Name string `yaml:"name,omitempty"`
+ Sysctl map[string]string `yaml:"sysctl,omitempty"`
+ SSHKeys map[string][]string `yaml:"authorized_keys,omitempty"`
+ Node string `yaml:"node,omitempty"`
+ Users map[string]User `yaml:"users,omitempty"`
+ Systemctl Systemctl `yaml:"systemctl,omitempty"`
+ Environment map[string]string `yaml:"environment,omitempty"`
+ EnvironmentFile string `yaml:"environment_file,omitempty"`
+
+ After []Dependency `yaml:"after,omitempty"`
+
+ DataSources DataSource `yaml:"datasource,omitempty"`
+ Layout Layout `yaml:"layout,omitempty"`
+
+ SystemdFirstBoot map[string]string `yaml:"systemd_firstboot,omitempty"`
+
+ TimeSyncd map[string]string `yaml:"timesyncd,omitempty"`
+ Git Git `yaml:"git,omitempty"`
}
type Systemctl struct {
- Enable []string `yaml:"enable"`
- Disable []string `yaml:"disable"`
- Start []string `yaml:"start"`
- Mask []string `yaml:"mask"`
+ Enable []string `yaml:"enable,omitempty"`
+ Disable []string `yaml:"disable,omitempty"`
+ Start []string `yaml:"start,omitempty"`
+ Mask []string `yaml:"mask,omitempty"`
}
type DNS struct {
- Nameservers []string `yaml:"nameservers"`
- DnsSearch []string `yaml:"search"`
- DnsOptions []string `yaml:"options"`
- Path string `yaml:"path"`
+ Nameservers []string `yaml:"nameservers,omitempty"`
+ DnsSearch []string `yaml:"search,omitempty"`
+ DnsOptions []string `yaml:"options,omitempty"`
+ Path string `yaml:"path,omitempty"`
}
type YipConfig struct {
- Name string `yaml:"name"`
- Stages map[string][]Stage `yaml:"stages"`
+ Name string `yaml:"name,omitempty"`
+ Stages map[string][]Stage `yaml:"stages,omitempty"`
}
type Loader func(s string, fs vfs.FS, m Modifier) ([]byte, error)
@@ -194,12 +198,12 @@ func Load(s string, fs vfs.FS, l Loader, m Modifier) (*YipConfig, error) {
}
data, err := l(s, fs, m)
if err != nil {
- return nil, errors.Wrap(err, "while loading yipconfig")
+ return nil, fmt.Errorf("error while loading yipconfig: %s", err.Error())
}
loader, err := detect(data)
if err != nil {
- return nil, errors.Wrap(err, "invalid file type")
+ return nil, fmt.Errorf("invalid file type: %s", err.Error())
}
return loader.Load(data, fs)
}
@@ -264,7 +268,7 @@ func jq(command string, data map[string]interface{}) (map[string]interface{}, er
v, ok := iter.Next()
if !ok {
- return nil, errors.New("failed getting rsult from gojq")
+ return nil, errors.New("failed getting result from gojq")
}
if err, ok := v.(error); ok {
return nil, err
diff --git a/vendor/github.com/rivo/uniseg/LICENSE.txt b/vendor/github.com/rivo/uniseg/LICENSE.txt
new file mode 100644
index 000000000..5040f1ef8
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/LICENSE.txt
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2019 Oliver Kuederle
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/rivo/uniseg/README.md b/vendor/github.com/rivo/uniseg/README.md
new file mode 100644
index 000000000..a8191b815
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/README.md
@@ -0,0 +1,137 @@
+# Unicode Text Segmentation for Go
+
+[![Go Reference](https://pkg.go.dev/badge/github.com/rivo/uniseg.svg)](https://pkg.go.dev/github.com/rivo/uniseg)
+[![Go Report](https://img.shields.io/badge/go%20report-A%2B-brightgreen.svg)](https://goreportcard.com/report/github.com/rivo/uniseg)
+
+This Go package implements Unicode Text Segmentation according to [Unicode Standard Annex #29](https://unicode.org/reports/tr29/), Unicode Line Breaking according to [Unicode Standard Annex #14](https://unicode.org/reports/tr14/) (Unicode version 15.0.0), and monospace font string width calculation similar to [wcwidth](https://man7.org/linux/man-pages/man3/wcwidth.3.html).
+
+## Background
+
+### Grapheme Clusters
+
+In Go, [strings are read-only slices of bytes](https://go.dev/blog/strings). They can be turned into Unicode code points using the `for` loop or by casting: `[]rune(str)`. However, multiple code points may be combined into one user-perceived character or what the Unicode specification calls "grapheme cluster". Here are some examples:
+
+|String|Bytes (UTF-8)|Code points (runes)|Grapheme clusters|
+|-|-|-|-|
+|Käse|6 bytes: `4b 61 cc 88 73 65`|5 code points: `4b 61 308 73 65`|4 clusters: `[4b],[61 308],[73],[65]`|
+|🏳️🌈|14 bytes: `f0 9f 8f b3 ef b8 8f e2 80 8d f0 9f 8c 88`|4 code points: `1f3f3 fe0f 200d 1f308`|1 cluster: `[1f3f3 fe0f 200d 1f308]`|
+|🇩🇪|8 bytes: `f0 9f 87 a9 f0 9f 87 aa`|2 code points: `1f1e9 1f1ea`|1 cluster: `[1f1e9 1f1ea]`|
+
+This package provides tools to iterate over these grapheme clusters. This may be used to determine the number of user-perceived characters, to split strings in their intended places, or to extract individual characters which form a unit.
+
+### Word Boundaries
+
+Word boundaries are used in a number of different contexts. The most familiar ones are selection (double-click mouse selection), cursor movement ("move to next word" control-arrow keys), and the dialog option "Whole Word Search" for search and replace. They are also used in database queries, to determine whether elements are within a certain number of words of one another. Searching may also use word boundaries in determining matching items. This package provides tools to determine word boundaries within strings.
+
+### Sentence Boundaries
+
+Sentence boundaries are often used for triple-click or some other method of selecting or iterating through blocks of text that are larger than single words. They are also used to determine whether words occur within the same sentence in database queries. This package provides tools to determine sentence boundaries within strings.
+
+### Line Breaking
+
+Line breaking, also known as word wrapping, is the process of breaking a section of text into lines such that it will fit in the available width of a page, window or other display area. This package provides tools to determine where a string may or may not be broken and where it must be broken (for example after newline characters).
+
+### Monospace Width
+
+Most terminals or text displays / text editors using a monospace font (for example source code editors) use a fixed width for each character. Some characters such as emojis or characters found in Asian and other languages may take up more than one character cell. This package provides tools to determine the number of cells a string will take up when displayed in a monospace font. See [here](https://pkg.go.dev/github.com/rivo/uniseg#hdr-Monospace_Width) for more information.
+
+## Installation
+
+```bash
+go get github.com/rivo/uniseg
+```
+
+## Examples
+
+### Counting Characters in a String
+
+```go
+n := uniseg.GraphemeClusterCount("🇩🇪🏳️🌈")
+fmt.Println(n)
+// 2
+```
+
+### Calculating the Monospace String Width
+
+```go
+width := uniseg.StringWidth("🇩🇪🏳️🌈!")
+fmt.Println(width)
+// 5
+```
+
+### Using the [`Graphemes`](https://pkg.go.dev/github.com/rivo/uniseg#Graphemes) Class
+
+This is the most convenient method of iterating over grapheme clusters:
+
+```go
+gr := uniseg.NewGraphemes("👍🏼!")
+for gr.Next() {
+ fmt.Printf("%x ", gr.Runes())
+}
+// [1f44d 1f3fc] [21]
+```
+
+### Using the [`Step`](https://pkg.go.dev/github.com/rivo/uniseg#Step) or [`StepString`](https://pkg.go.dev/github.com/rivo/uniseg#StepString) Function
+
+This avoids allocating a new `Graphemes` object but it requires the handling of states and boundaries:
+
+```go
+str := "🇩🇪🏳️🌈"
+state := -1
+var c string
+for len(str) > 0 {
+ c, str, _, state = uniseg.StepString(str, state)
+ fmt.Printf("%x ", []rune(c))
+}
+// [1f1e9 1f1ea] [1f3f3 fe0f 200d 1f308]
+```
+
+### Advanced Examples
+
+The [`Graphemes`](https://pkg.go.dev/github.com/rivo/uniseg#Graphemes) class offers the most convenient way to access all functionality of this package. But in some cases, it may be better to use the specialized functions directly. For example, if you're only interested in word segmentation, use [`FirstWord`](https://pkg.go.dev/github.com/rivo/uniseg#FirstWord) or [`FirstWordInString`](https://pkg.go.dev/github.com/rivo/uniseg#FirstWordInString):
+
+```go
+str := "Hello, world!"
+state := -1
+var c string
+for len(str) > 0 {
+ c, str, state = uniseg.FirstWordInString(str, state)
+ fmt.Printf("(%s)\n", c)
+}
+// (Hello)
+// (,)
+// ( )
+// (world)
+// (!)
+```
+
+Similarly, use
+
+- [`FirstGraphemeCluster`](https://pkg.go.dev/github.com/rivo/uniseg#FirstGraphemeCluster) or [`FirstGraphemeClusterInString`](https://pkg.go.dev/github.com/rivo/uniseg#FirstGraphemeClusterInString) for grapheme cluster determination only,
+- [`FirstSentence`](https://pkg.go.dev/github.com/rivo/uniseg#FirstSentence) or [`FirstSentenceInString`](https://pkg.go.dev/github.com/rivo/uniseg#FirstSentenceInString) for sentence segmentation only, and
+- [`FirstLineSegment`](https://pkg.go.dev/github.com/rivo/uniseg#FirstLineSegment) or [`FirstLineSegmentInString`](https://pkg.go.dev/github.com/rivo/uniseg#FirstLineSegmentInString) for line breaking / word wrapping (although using [`Step`](https://pkg.go.dev/github.com/rivo/uniseg#Step) or [`StepString`](https://pkg.go.dev/github.com/rivo/uniseg#StepString) is preferred as it will observe grapheme cluster boundaries).
+
+If you're only interested in the width of characters, use [`FirstGraphemeCluster`](https://pkg.go.dev/github.com/rivo/uniseg#FirstGraphemeCluster) or [`FirstGraphemeClusterInString`](https://pkg.go.dev/github.com/rivo/uniseg#FirstGraphemeClusterInString). It is much faster than using [`Step`](https://pkg.go.dev/github.com/rivo/uniseg#Step), [`StepString`](https://pkg.go.dev/github.com/rivo/uniseg#StepString), or the [`Graphemes`](https://pkg.go.dev/github.com/rivo/uniseg#Graphemes) class because it does not include the logic for word / sentence / line boundaries.
+
+Finally, if you need to reverse a string while preserving grapheme clusters, use [`ReverseString`](https://pkg.go.dev/github.com/rivo/uniseg#ReverseString):
+
+```go
+fmt.Println(uniseg.ReverseString("🇩🇪🏳️🌈"))
+// 🏳️🌈🇩🇪
+```
+
+## Documentation
+
+Refer to https://pkg.go.dev/github.com/rivo/uniseg for the package's documentation.
+
+## Dependencies
+
+This package does not depend on any packages outside the standard library.
+
+## Sponsor this Project
+
+[Become a Sponsor on GitHub](https://github.com/sponsors/rivo?metadata_source=uniseg_readme) to support this project!
+
+## Your Feedback
+
+Add your issue here on GitHub, preferably before submitting any PR's. Feel free to get in touch if you have any questions.
\ No newline at end of file
diff --git a/vendor/github.com/rivo/uniseg/doc.go b/vendor/github.com/rivo/uniseg/doc.go
new file mode 100644
index 000000000..11224ae22
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/doc.go
@@ -0,0 +1,108 @@
+/*
+Package uniseg implements Unicode Text Segmentation, Unicode Line Breaking, and
+string width calculation for monospace fonts. Unicode Text Segmentation conforms
+to Unicode Standard Annex #29 (https://unicode.org/reports/tr29/) and Unicode
+Line Breaking conforms to Unicode Standard Annex #14
+(https://unicode.org/reports/tr14/).
+
+In short, using this package, you can split a string into grapheme clusters
+(what people would usually refer to as a "character"), into words, and into
+sentences. Or, in its simplest case, this package allows you to count the number
+of characters in a string, especially when it contains complex characters such
+as emojis, combining characters, or characters from Asian, Arabic, Hebrew, or
+other languages. Additionally, you can use it to implement line breaking (or
+"word wrapping"), that is, to determine where text can be broken over to the
+next line when the width of the line is not big enough to fit the entire text.
+Finally, you can use it to calculate the display width of a string for monospace
+fonts.
+
+# Getting Started
+
+If you just want to count the number of characters in a string, you can use
+[GraphemeClusterCount]. If you want to determine the display width of a string,
+you can use [StringWidth]. If you want to iterate over a string, you can use
+[Step], [StepString], or the [Graphemes] class (more convenient but less
+performant). This will provide you with all information: grapheme clusters,
+word boundaries, sentence boundaries, line breaks, and monospace character
+widths. The specialized functions [FirstGraphemeCluster],
+[FirstGraphemeClusterInString], [FirstWord], [FirstWordInString],
+[FirstSentence], and [FirstSentenceInString] can be used if only one type of
+information is needed.
+
+# Grapheme Clusters
+
+Consider the rainbow flag emoji: 🏳️🌈. On most modern systems, it appears as one
+character. But its string representation actually has 14 bytes, so counting
+bytes (or using len("🏳️🌈")) will not work as expected. Counting runes won't,
+either: The flag has 4 Unicode code points, thus 4 runes. The stdlib function
+utf8.RuneCountInString("🏳️🌈") and len([]rune("🏳️🌈")) will both return 4.
+
+The [GraphemeClusterCount] function will return 1 for the rainbow flag emoji.
+The Graphemes class and a variety of functions in this package will allow you to
+split strings into its grapheme clusters.
+
+# Word Boundaries
+
+Word boundaries are used in a number of different contexts. The most familiar
+ones are selection (double-click mouse selection), cursor movement ("move to
+next word" control-arrow keys), and the dialog option "Whole Word Search" for
+search and replace. This package provides methods for determining word
+boundaries.
+
+# Sentence Boundaries
+
+Sentence boundaries are often used for triple-click or some other method of
+selecting or iterating through blocks of text that are larger than single words.
+They are also used to determine whether words occur within the same sentence in
+database queries. This package provides methods for determining sentence
+boundaries.
+
+# Line Breaking
+
+Line breaking, also known as word wrapping, is the process of breaking a section
+of text into lines such that it will fit in the available width of a page,
+window or other display area. This package provides methods to determine the
+positions in a string where a line must be broken, may be broken, or must not be
+broken.
+
+# Monospace Width
+
+Monospace width, as referred to in this package, is the width of a string in a
+monospace font. This is commonly used in terminal user interfaces or text
+displays or editors that don't support proportional fonts. A width of 1
+corresponds to a single character cell. The C function [wcswidth()] and its
+implementation in other programming languages is in widespread use for the same
+purpose. However, there is no standard for the calculation of such widths, and
+this package differs from wcswidth() in a number of ways, presumably to generate
+more visually pleasing results.
+
+To start, we assume that every code point has a width of 1, with the following
+exceptions:
+
+ - Code points with grapheme cluster break properties Control, CR, LF, Extend,
+ and ZWJ have a width of 0.
+ - U+2E3A, Two-Em Dash, has a width of 3.
+ - U+2E3B, Three-Em Dash, has a width of 4.
+ - Characters with the East-Asian Width properties "Fullwidth" (F) and "Wide"
+ (W) have a width of 2. (Properties "Ambiguous" (A) and "Neutral" (N) both
+ have a width of 1.)
+ - Code points with grapheme cluster break property Regional Indicator have a
+ width of 2.
+ - Code points with grapheme cluster break property Extended Pictographic have
+ a width of 2, unless their Emoji Presentation flag is "No", in which case
+ the width is 1.
+
+For Hangul grapheme clusters composed of conjoining Jamo and for Regional
+Indicators (flags), all code points except the first one have a width of 0. For
+grapheme clusters starting with an Extended Pictographic, any additional code
+point will force a total width of 2, except if the Variation Selector-15
+(U+FE0E) is included, in which case the total width is always 1. Grapheme
+clusters ending with Variation Selector-16 (U+FE0F) have a width of 2.
+
+Note that whether these widths appear correct depends on your application's
+render engine, to which extent it conforms to the Unicode Standard, and its
+choice of font.
+
+[wcswidth()]: https://man7.org/linux/man-pages/man3/wcswidth.3.html
+*/
+package uniseg
diff --git a/vendor/github.com/rivo/uniseg/eastasianwidth.go b/vendor/github.com/rivo/uniseg/eastasianwidth.go
new file mode 100644
index 000000000..5fc54d991
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/eastasianwidth.go
@@ -0,0 +1,2588 @@
+// Code generated via go generate from gen_properties.go. DO NOT EDIT.
+
+package uniseg
+
+// eastAsianWidth are taken from
+// https://www.unicode.org/Public/15.0.0/ucd/EastAsianWidth.txt
+// and
+// https://unicode.org/Public/15.0.0/ucd/emoji/emoji-data.txt
+// ("Extended_Pictographic" only)
+// on September 5, 2023. See https://www.unicode.org/license.html for the Unicode
+// license agreement.
+var eastAsianWidth = [][3]int{
+ {0x0000, 0x001F, prN}, // Cc [32] ..
+ {0x0020, 0x0020, prNa}, // Zs SPACE
+ {0x0021, 0x0023, prNa}, // Po [3] EXCLAMATION MARK..NUMBER SIGN
+ {0x0024, 0x0024, prNa}, // Sc DOLLAR SIGN
+ {0x0025, 0x0027, prNa}, // Po [3] PERCENT SIGN..APOSTROPHE
+ {0x0028, 0x0028, prNa}, // Ps LEFT PARENTHESIS
+ {0x0029, 0x0029, prNa}, // Pe RIGHT PARENTHESIS
+ {0x002A, 0x002A, prNa}, // Po ASTERISK
+ {0x002B, 0x002B, prNa}, // Sm PLUS SIGN
+ {0x002C, 0x002C, prNa}, // Po COMMA
+ {0x002D, 0x002D, prNa}, // Pd HYPHEN-MINUS
+ {0x002E, 0x002F, prNa}, // Po [2] FULL STOP..SOLIDUS
+ {0x0030, 0x0039, prNa}, // Nd [10] DIGIT ZERO..DIGIT NINE
+ {0x003A, 0x003B, prNa}, // Po [2] COLON..SEMICOLON
+ {0x003C, 0x003E, prNa}, // Sm [3] LESS-THAN SIGN..GREATER-THAN SIGN
+ {0x003F, 0x0040, prNa}, // Po [2] QUESTION MARK..COMMERCIAL AT
+ {0x0041, 0x005A, prNa}, // Lu [26] LATIN CAPITAL LETTER A..LATIN CAPITAL LETTER Z
+ {0x005B, 0x005B, prNa}, // Ps LEFT SQUARE BRACKET
+ {0x005C, 0x005C, prNa}, // Po REVERSE SOLIDUS
+ {0x005D, 0x005D, prNa}, // Pe RIGHT SQUARE BRACKET
+ {0x005E, 0x005E, prNa}, // Sk CIRCUMFLEX ACCENT
+ {0x005F, 0x005F, prNa}, // Pc LOW LINE
+ {0x0060, 0x0060, prNa}, // Sk GRAVE ACCENT
+ {0x0061, 0x007A, prNa}, // Ll [26] LATIN SMALL LETTER A..LATIN SMALL LETTER Z
+ {0x007B, 0x007B, prNa}, // Ps LEFT CURLY BRACKET
+ {0x007C, 0x007C, prNa}, // Sm VERTICAL LINE
+ {0x007D, 0x007D, prNa}, // Pe RIGHT CURLY BRACKET
+ {0x007E, 0x007E, prNa}, // Sm TILDE
+ {0x007F, 0x007F, prN}, // Cc
+ {0x0080, 0x009F, prN}, // Cc [32] ..
+ {0x00A0, 0x00A0, prN}, // Zs NO-BREAK SPACE
+ {0x00A1, 0x00A1, prA}, // Po INVERTED EXCLAMATION MARK
+ {0x00A2, 0x00A3, prNa}, // Sc [2] CENT SIGN..POUND SIGN
+ {0x00A4, 0x00A4, prA}, // Sc CURRENCY SIGN
+ {0x00A5, 0x00A5, prNa}, // Sc YEN SIGN
+ {0x00A6, 0x00A6, prNa}, // So BROKEN BAR
+ {0x00A7, 0x00A7, prA}, // Po SECTION SIGN
+ {0x00A8, 0x00A8, prA}, // Sk DIAERESIS
+ {0x00A9, 0x00A9, prN}, // So COPYRIGHT SIGN
+ {0x00AA, 0x00AA, prA}, // Lo FEMININE ORDINAL INDICATOR
+ {0x00AB, 0x00AB, prN}, // Pi LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+ {0x00AC, 0x00AC, prNa}, // Sm NOT SIGN
+ {0x00AD, 0x00AD, prA}, // Cf SOFT HYPHEN
+ {0x00AE, 0x00AE, prA}, // So REGISTERED SIGN
+ {0x00AF, 0x00AF, prNa}, // Sk MACRON
+ {0x00B0, 0x00B0, prA}, // So DEGREE SIGN
+ {0x00B1, 0x00B1, prA}, // Sm PLUS-MINUS SIGN
+ {0x00B2, 0x00B3, prA}, // No [2] SUPERSCRIPT TWO..SUPERSCRIPT THREE
+ {0x00B4, 0x00B4, prA}, // Sk ACUTE ACCENT
+ {0x00B5, 0x00B5, prN}, // Ll MICRO SIGN
+ {0x00B6, 0x00B7, prA}, // Po [2] PILCROW SIGN..MIDDLE DOT
+ {0x00B8, 0x00B8, prA}, // Sk CEDILLA
+ {0x00B9, 0x00B9, prA}, // No SUPERSCRIPT ONE
+ {0x00BA, 0x00BA, prA}, // Lo MASCULINE ORDINAL INDICATOR
+ {0x00BB, 0x00BB, prN}, // Pf RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
+ {0x00BC, 0x00BE, prA}, // No [3] VULGAR FRACTION ONE QUARTER..VULGAR FRACTION THREE QUARTERS
+ {0x00BF, 0x00BF, prA}, // Po INVERTED QUESTION MARK
+ {0x00C0, 0x00C5, prN}, // Lu [6] LATIN CAPITAL LETTER A WITH GRAVE..LATIN CAPITAL LETTER A WITH RING ABOVE
+ {0x00C6, 0x00C6, prA}, // Lu LATIN CAPITAL LETTER AE
+ {0x00C7, 0x00CF, prN}, // Lu [9] LATIN CAPITAL LETTER C WITH CEDILLA..LATIN CAPITAL LETTER I WITH DIAERESIS
+ {0x00D0, 0x00D0, prA}, // Lu LATIN CAPITAL LETTER ETH
+ {0x00D1, 0x00D6, prN}, // Lu [6] LATIN CAPITAL LETTER N WITH TILDE..LATIN CAPITAL LETTER O WITH DIAERESIS
+ {0x00D7, 0x00D7, prA}, // Sm MULTIPLICATION SIGN
+ {0x00D8, 0x00D8, prA}, // Lu LATIN CAPITAL LETTER O WITH STROKE
+ {0x00D9, 0x00DD, prN}, // Lu [5] LATIN CAPITAL LETTER U WITH GRAVE..LATIN CAPITAL LETTER Y WITH ACUTE
+ {0x00DE, 0x00E1, prA}, // L& [4] LATIN CAPITAL LETTER THORN..LATIN SMALL LETTER A WITH ACUTE
+ {0x00E2, 0x00E5, prN}, // Ll [4] LATIN SMALL LETTER A WITH CIRCUMFLEX..LATIN SMALL LETTER A WITH RING ABOVE
+ {0x00E6, 0x00E6, prA}, // Ll LATIN SMALL LETTER AE
+ {0x00E7, 0x00E7, prN}, // Ll LATIN SMALL LETTER C WITH CEDILLA
+ {0x00E8, 0x00EA, prA}, // Ll [3] LATIN SMALL LETTER E WITH GRAVE..LATIN SMALL LETTER E WITH CIRCUMFLEX
+ {0x00EB, 0x00EB, prN}, // Ll LATIN SMALL LETTER E WITH DIAERESIS
+ {0x00EC, 0x00ED, prA}, // Ll [2] LATIN SMALL LETTER I WITH GRAVE..LATIN SMALL LETTER I WITH ACUTE
+ {0x00EE, 0x00EF, prN}, // Ll [2] LATIN SMALL LETTER I WITH CIRCUMFLEX..LATIN SMALL LETTER I WITH DIAERESIS
+ {0x00F0, 0x00F0, prA}, // Ll LATIN SMALL LETTER ETH
+ {0x00F1, 0x00F1, prN}, // Ll LATIN SMALL LETTER N WITH TILDE
+ {0x00F2, 0x00F3, prA}, // Ll [2] LATIN SMALL LETTER O WITH GRAVE..LATIN SMALL LETTER O WITH ACUTE
+ {0x00F4, 0x00F6, prN}, // Ll [3] LATIN SMALL LETTER O WITH CIRCUMFLEX..LATIN SMALL LETTER O WITH DIAERESIS
+ {0x00F7, 0x00F7, prA}, // Sm DIVISION SIGN
+ {0x00F8, 0x00FA, prA}, // Ll [3] LATIN SMALL LETTER O WITH STROKE..LATIN SMALL LETTER U WITH ACUTE
+ {0x00FB, 0x00FB, prN}, // Ll LATIN SMALL LETTER U WITH CIRCUMFLEX
+ {0x00FC, 0x00FC, prA}, // Ll LATIN SMALL LETTER U WITH DIAERESIS
+ {0x00FD, 0x00FD, prN}, // Ll LATIN SMALL LETTER Y WITH ACUTE
+ {0x00FE, 0x00FE, prA}, // Ll LATIN SMALL LETTER THORN
+ {0x00FF, 0x00FF, prN}, // Ll LATIN SMALL LETTER Y WITH DIAERESIS
+ {0x0100, 0x0100, prN}, // Lu LATIN CAPITAL LETTER A WITH MACRON
+ {0x0101, 0x0101, prA}, // Ll LATIN SMALL LETTER A WITH MACRON
+ {0x0102, 0x0110, prN}, // L& [15] LATIN CAPITAL LETTER A WITH BREVE..LATIN CAPITAL LETTER D WITH STROKE
+ {0x0111, 0x0111, prA}, // Ll LATIN SMALL LETTER D WITH STROKE
+ {0x0112, 0x0112, prN}, // Lu LATIN CAPITAL LETTER E WITH MACRON
+ {0x0113, 0x0113, prA}, // Ll LATIN SMALL LETTER E WITH MACRON
+ {0x0114, 0x011A, prN}, // L& [7] LATIN CAPITAL LETTER E WITH BREVE..LATIN CAPITAL LETTER E WITH CARON
+ {0x011B, 0x011B, prA}, // Ll LATIN SMALL LETTER E WITH CARON
+ {0x011C, 0x0125, prN}, // L& [10] LATIN CAPITAL LETTER G WITH CIRCUMFLEX..LATIN SMALL LETTER H WITH CIRCUMFLEX
+ {0x0126, 0x0127, prA}, // L& [2] LATIN CAPITAL LETTER H WITH STROKE..LATIN SMALL LETTER H WITH STROKE
+ {0x0128, 0x012A, prN}, // L& [3] LATIN CAPITAL LETTER I WITH TILDE..LATIN CAPITAL LETTER I WITH MACRON
+ {0x012B, 0x012B, prA}, // Ll LATIN SMALL LETTER I WITH MACRON
+ {0x012C, 0x0130, prN}, // L& [5] LATIN CAPITAL LETTER I WITH BREVE..LATIN CAPITAL LETTER I WITH DOT ABOVE
+ {0x0131, 0x0133, prA}, // L& [3] LATIN SMALL LETTER DOTLESS I..LATIN SMALL LIGATURE IJ
+ {0x0134, 0x0137, prN}, // L& [4] LATIN CAPITAL LETTER J WITH CIRCUMFLEX..LATIN SMALL LETTER K WITH CEDILLA
+ {0x0138, 0x0138, prA}, // Ll LATIN SMALL LETTER KRA
+ {0x0139, 0x013E, prN}, // L& [6] LATIN CAPITAL LETTER L WITH ACUTE..LATIN SMALL LETTER L WITH CARON
+ {0x013F, 0x0142, prA}, // L& [4] LATIN CAPITAL LETTER L WITH MIDDLE DOT..LATIN SMALL LETTER L WITH STROKE
+ {0x0143, 0x0143, prN}, // Lu LATIN CAPITAL LETTER N WITH ACUTE
+ {0x0144, 0x0144, prA}, // Ll LATIN SMALL LETTER N WITH ACUTE
+ {0x0145, 0x0147, prN}, // L& [3] LATIN CAPITAL LETTER N WITH CEDILLA..LATIN CAPITAL LETTER N WITH CARON
+ {0x0148, 0x014B, prA}, // L& [4] LATIN SMALL LETTER N WITH CARON..LATIN SMALL LETTER ENG
+ {0x014C, 0x014C, prN}, // Lu LATIN CAPITAL LETTER O WITH MACRON
+ {0x014D, 0x014D, prA}, // Ll LATIN SMALL LETTER O WITH MACRON
+ {0x014E, 0x0151, prN}, // L& [4] LATIN CAPITAL LETTER O WITH BREVE..LATIN SMALL LETTER O WITH DOUBLE ACUTE
+ {0x0152, 0x0153, prA}, // L& [2] LATIN CAPITAL LIGATURE OE..LATIN SMALL LIGATURE OE
+ {0x0154, 0x0165, prN}, // L& [18] LATIN CAPITAL LETTER R WITH ACUTE..LATIN SMALL LETTER T WITH CARON
+ {0x0166, 0x0167, prA}, // L& [2] LATIN CAPITAL LETTER T WITH STROKE..LATIN SMALL LETTER T WITH STROKE
+ {0x0168, 0x016A, prN}, // L& [3] LATIN CAPITAL LETTER U WITH TILDE..LATIN CAPITAL LETTER U WITH MACRON
+ {0x016B, 0x016B, prA}, // Ll LATIN SMALL LETTER U WITH MACRON
+ {0x016C, 0x017F, prN}, // L& [20] LATIN CAPITAL LETTER U WITH BREVE..LATIN SMALL LETTER LONG S
+ {0x0180, 0x01BA, prN}, // L& [59] LATIN SMALL LETTER B WITH STROKE..LATIN SMALL LETTER EZH WITH TAIL
+ {0x01BB, 0x01BB, prN}, // Lo LATIN LETTER TWO WITH STROKE
+ {0x01BC, 0x01BF, prN}, // L& [4] LATIN CAPITAL LETTER TONE FIVE..LATIN LETTER WYNN
+ {0x01C0, 0x01C3, prN}, // Lo [4] LATIN LETTER DENTAL CLICK..LATIN LETTER RETROFLEX CLICK
+ {0x01C4, 0x01CD, prN}, // L& [10] LATIN CAPITAL LETTER DZ WITH CARON..LATIN CAPITAL LETTER A WITH CARON
+ {0x01CE, 0x01CE, prA}, // Ll LATIN SMALL LETTER A WITH CARON
+ {0x01CF, 0x01CF, prN}, // Lu LATIN CAPITAL LETTER I WITH CARON
+ {0x01D0, 0x01D0, prA}, // Ll LATIN SMALL LETTER I WITH CARON
+ {0x01D1, 0x01D1, prN}, // Lu LATIN CAPITAL LETTER O WITH CARON
+ {0x01D2, 0x01D2, prA}, // Ll LATIN SMALL LETTER O WITH CARON
+ {0x01D3, 0x01D3, prN}, // Lu LATIN CAPITAL LETTER U WITH CARON
+ {0x01D4, 0x01D4, prA}, // Ll LATIN SMALL LETTER U WITH CARON
+ {0x01D5, 0x01D5, prN}, // Lu LATIN CAPITAL LETTER U WITH DIAERESIS AND MACRON
+ {0x01D6, 0x01D6, prA}, // Ll LATIN SMALL LETTER U WITH DIAERESIS AND MACRON
+ {0x01D7, 0x01D7, prN}, // Lu LATIN CAPITAL LETTER U WITH DIAERESIS AND ACUTE
+ {0x01D8, 0x01D8, prA}, // Ll LATIN SMALL LETTER U WITH DIAERESIS AND ACUTE
+ {0x01D9, 0x01D9, prN}, // Lu LATIN CAPITAL LETTER U WITH DIAERESIS AND CARON
+ {0x01DA, 0x01DA, prA}, // Ll LATIN SMALL LETTER U WITH DIAERESIS AND CARON
+ {0x01DB, 0x01DB, prN}, // Lu LATIN CAPITAL LETTER U WITH DIAERESIS AND GRAVE
+ {0x01DC, 0x01DC, prA}, // Ll LATIN SMALL LETTER U WITH DIAERESIS AND GRAVE
+ {0x01DD, 0x024F, prN}, // L& [115] LATIN SMALL LETTER TURNED E..LATIN SMALL LETTER Y WITH STROKE
+ {0x0250, 0x0250, prN}, // Ll LATIN SMALL LETTER TURNED A
+ {0x0251, 0x0251, prA}, // Ll LATIN SMALL LETTER ALPHA
+ {0x0252, 0x0260, prN}, // Ll [15] LATIN SMALL LETTER TURNED ALPHA..LATIN SMALL LETTER G WITH HOOK
+ {0x0261, 0x0261, prA}, // Ll LATIN SMALL LETTER SCRIPT G
+ {0x0262, 0x0293, prN}, // Ll [50] LATIN LETTER SMALL CAPITAL G..LATIN SMALL LETTER EZH WITH CURL
+ {0x0294, 0x0294, prN}, // Lo LATIN LETTER GLOTTAL STOP
+ {0x0295, 0x02AF, prN}, // Ll [27] LATIN LETTER PHARYNGEAL VOICED FRICATIVE..LATIN SMALL LETTER TURNED H WITH FISHHOOK AND TAIL
+ {0x02B0, 0x02C1, prN}, // Lm [18] MODIFIER LETTER SMALL H..MODIFIER LETTER REVERSED GLOTTAL STOP
+ {0x02C2, 0x02C3, prN}, // Sk [2] MODIFIER LETTER LEFT ARROWHEAD..MODIFIER LETTER RIGHT ARROWHEAD
+ {0x02C4, 0x02C4, prA}, // Sk MODIFIER LETTER UP ARROWHEAD
+ {0x02C5, 0x02C5, prN}, // Sk MODIFIER LETTER DOWN ARROWHEAD
+ {0x02C6, 0x02C6, prN}, // Lm MODIFIER LETTER CIRCUMFLEX ACCENT
+ {0x02C7, 0x02C7, prA}, // Lm CARON
+ {0x02C8, 0x02C8, prN}, // Lm MODIFIER LETTER VERTICAL LINE
+ {0x02C9, 0x02CB, prA}, // Lm [3] MODIFIER LETTER MACRON..MODIFIER LETTER GRAVE ACCENT
+ {0x02CC, 0x02CC, prN}, // Lm MODIFIER LETTER LOW VERTICAL LINE
+ {0x02CD, 0x02CD, prA}, // Lm MODIFIER LETTER LOW MACRON
+ {0x02CE, 0x02CF, prN}, // Lm [2] MODIFIER LETTER LOW GRAVE ACCENT..MODIFIER LETTER LOW ACUTE ACCENT
+ {0x02D0, 0x02D0, prA}, // Lm MODIFIER LETTER TRIANGULAR COLON
+ {0x02D1, 0x02D1, prN}, // Lm MODIFIER LETTER HALF TRIANGULAR COLON
+ {0x02D2, 0x02D7, prN}, // Sk [6] MODIFIER LETTER CENTRED RIGHT HALF RING..MODIFIER LETTER MINUS SIGN
+ {0x02D8, 0x02DB, prA}, // Sk [4] BREVE..OGONEK
+ {0x02DC, 0x02DC, prN}, // Sk SMALL TILDE
+ {0x02DD, 0x02DD, prA}, // Sk DOUBLE ACUTE ACCENT
+ {0x02DE, 0x02DE, prN}, // Sk MODIFIER LETTER RHOTIC HOOK
+ {0x02DF, 0x02DF, prA}, // Sk MODIFIER LETTER CROSS ACCENT
+ {0x02E0, 0x02E4, prN}, // Lm [5] MODIFIER LETTER SMALL GAMMA..MODIFIER LETTER SMALL REVERSED GLOTTAL STOP
+ {0x02E5, 0x02EB, prN}, // Sk [7] MODIFIER LETTER EXTRA-HIGH TONE BAR..MODIFIER LETTER YANG DEPARTING TONE MARK
+ {0x02EC, 0x02EC, prN}, // Lm MODIFIER LETTER VOICING
+ {0x02ED, 0x02ED, prN}, // Sk MODIFIER LETTER UNASPIRATED
+ {0x02EE, 0x02EE, prN}, // Lm MODIFIER LETTER DOUBLE APOSTROPHE
+ {0x02EF, 0x02FF, prN}, // Sk [17] MODIFIER LETTER LOW DOWN ARROWHEAD..MODIFIER LETTER LOW LEFT ARROW
+ {0x0300, 0x036F, prA}, // Mn [112] COMBINING GRAVE ACCENT..COMBINING LATIN SMALL LETTER X
+ {0x0370, 0x0373, prN}, // L& [4] GREEK CAPITAL LETTER HETA..GREEK SMALL LETTER ARCHAIC SAMPI
+ {0x0374, 0x0374, prN}, // Lm GREEK NUMERAL SIGN
+ {0x0375, 0x0375, prN}, // Sk GREEK LOWER NUMERAL SIGN
+ {0x0376, 0x0377, prN}, // L& [2] GREEK CAPITAL LETTER PAMPHYLIAN DIGAMMA..GREEK SMALL LETTER PAMPHYLIAN DIGAMMA
+ {0x037A, 0x037A, prN}, // Lm GREEK YPOGEGRAMMENI
+ {0x037B, 0x037D, prN}, // Ll [3] GREEK SMALL REVERSED LUNATE SIGMA SYMBOL..GREEK SMALL REVERSED DOTTED LUNATE SIGMA SYMBOL
+ {0x037E, 0x037E, prN}, // Po GREEK QUESTION MARK
+ {0x037F, 0x037F, prN}, // Lu GREEK CAPITAL LETTER YOT
+ {0x0384, 0x0385, prN}, // Sk [2] GREEK TONOS..GREEK DIALYTIKA TONOS
+ {0x0386, 0x0386, prN}, // Lu GREEK CAPITAL LETTER ALPHA WITH TONOS
+ {0x0387, 0x0387, prN}, // Po GREEK ANO TELEIA
+ {0x0388, 0x038A, prN}, // Lu [3] GREEK CAPITAL LETTER EPSILON WITH TONOS..GREEK CAPITAL LETTER IOTA WITH TONOS
+ {0x038C, 0x038C, prN}, // Lu GREEK CAPITAL LETTER OMICRON WITH TONOS
+ {0x038E, 0x0390, prN}, // L& [3] GREEK CAPITAL LETTER UPSILON WITH TONOS..GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS
+ {0x0391, 0x03A1, prA}, // Lu [17] GREEK CAPITAL LETTER ALPHA..GREEK CAPITAL LETTER RHO
+ {0x03A3, 0x03A9, prA}, // Lu [7] GREEK CAPITAL LETTER SIGMA..GREEK CAPITAL LETTER OMEGA
+ {0x03AA, 0x03B0, prN}, // L& [7] GREEK CAPITAL LETTER IOTA WITH DIALYTIKA..GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS
+ {0x03B1, 0x03C1, prA}, // Ll [17] GREEK SMALL LETTER ALPHA..GREEK SMALL LETTER RHO
+ {0x03C2, 0x03C2, prN}, // Ll GREEK SMALL LETTER FINAL SIGMA
+ {0x03C3, 0x03C9, prA}, // Ll [7] GREEK SMALL LETTER SIGMA..GREEK SMALL LETTER OMEGA
+ {0x03CA, 0x03F5, prN}, // L& [44] GREEK SMALL LETTER IOTA WITH DIALYTIKA..GREEK LUNATE EPSILON SYMBOL
+ {0x03F6, 0x03F6, prN}, // Sm GREEK REVERSED LUNATE EPSILON SYMBOL
+ {0x03F7, 0x03FF, prN}, // L& [9] GREEK CAPITAL LETTER SHO..GREEK CAPITAL REVERSED DOTTED LUNATE SIGMA SYMBOL
+ {0x0400, 0x0400, prN}, // Lu CYRILLIC CAPITAL LETTER IE WITH GRAVE
+ {0x0401, 0x0401, prA}, // Lu CYRILLIC CAPITAL LETTER IO
+ {0x0402, 0x040F, prN}, // Lu [14] CYRILLIC CAPITAL LETTER DJE..CYRILLIC CAPITAL LETTER DZHE
+ {0x0410, 0x044F, prA}, // L& [64] CYRILLIC CAPITAL LETTER A..CYRILLIC SMALL LETTER YA
+ {0x0450, 0x0450, prN}, // Ll CYRILLIC SMALL LETTER IE WITH GRAVE
+ {0x0451, 0x0451, prA}, // Ll CYRILLIC SMALL LETTER IO
+ {0x0452, 0x0481, prN}, // L& [48] CYRILLIC SMALL LETTER DJE..CYRILLIC SMALL LETTER KOPPA
+ {0x0482, 0x0482, prN}, // So CYRILLIC THOUSANDS SIGN
+ {0x0483, 0x0487, prN}, // Mn [5] COMBINING CYRILLIC TITLO..COMBINING CYRILLIC POKRYTIE
+ {0x0488, 0x0489, prN}, // Me [2] COMBINING CYRILLIC HUNDRED THOUSANDS SIGN..COMBINING CYRILLIC MILLIONS SIGN
+ {0x048A, 0x04FF, prN}, // L& [118] CYRILLIC CAPITAL LETTER SHORT I WITH TAIL..CYRILLIC SMALL LETTER HA WITH STROKE
+ {0x0500, 0x052F, prN}, // L& [48] CYRILLIC CAPITAL LETTER KOMI DE..CYRILLIC SMALL LETTER EL WITH DESCENDER
+ {0x0531, 0x0556, prN}, // Lu [38] ARMENIAN CAPITAL LETTER AYB..ARMENIAN CAPITAL LETTER FEH
+ {0x0559, 0x0559, prN}, // Lm ARMENIAN MODIFIER LETTER LEFT HALF RING
+ {0x055A, 0x055F, prN}, // Po [6] ARMENIAN APOSTROPHE..ARMENIAN ABBREVIATION MARK
+ {0x0560, 0x0588, prN}, // Ll [41] ARMENIAN SMALL LETTER TURNED AYB..ARMENIAN SMALL LETTER YI WITH STROKE
+ {0x0589, 0x0589, prN}, // Po ARMENIAN FULL STOP
+ {0x058A, 0x058A, prN}, // Pd ARMENIAN HYPHEN
+ {0x058D, 0x058E, prN}, // So [2] RIGHT-FACING ARMENIAN ETERNITY SIGN..LEFT-FACING ARMENIAN ETERNITY SIGN
+ {0x058F, 0x058F, prN}, // Sc ARMENIAN DRAM SIGN
+ {0x0591, 0x05BD, prN}, // Mn [45] HEBREW ACCENT ETNAHTA..HEBREW POINT METEG
+ {0x05BE, 0x05BE, prN}, // Pd HEBREW PUNCTUATION MAQAF
+ {0x05BF, 0x05BF, prN}, // Mn HEBREW POINT RAFE
+ {0x05C0, 0x05C0, prN}, // Po HEBREW PUNCTUATION PASEQ
+ {0x05C1, 0x05C2, prN}, // Mn [2] HEBREW POINT SHIN DOT..HEBREW POINT SIN DOT
+ {0x05C3, 0x05C3, prN}, // Po HEBREW PUNCTUATION SOF PASUQ
+ {0x05C4, 0x05C5, prN}, // Mn [2] HEBREW MARK UPPER DOT..HEBREW MARK LOWER DOT
+ {0x05C6, 0x05C6, prN}, // Po HEBREW PUNCTUATION NUN HAFUKHA
+ {0x05C7, 0x05C7, prN}, // Mn HEBREW POINT QAMATS QATAN
+ {0x05D0, 0x05EA, prN}, // Lo [27] HEBREW LETTER ALEF..HEBREW LETTER TAV
+ {0x05EF, 0x05F2, prN}, // Lo [4] HEBREW YOD TRIANGLE..HEBREW LIGATURE YIDDISH DOUBLE YOD
+ {0x05F3, 0x05F4, prN}, // Po [2] HEBREW PUNCTUATION GERESH..HEBREW PUNCTUATION GERSHAYIM
+ {0x0600, 0x0605, prN}, // Cf [6] ARABIC NUMBER SIGN..ARABIC NUMBER MARK ABOVE
+ {0x0606, 0x0608, prN}, // Sm [3] ARABIC-INDIC CUBE ROOT..ARABIC RAY
+ {0x0609, 0x060A, prN}, // Po [2] ARABIC-INDIC PER MILLE SIGN..ARABIC-INDIC PER TEN THOUSAND SIGN
+ {0x060B, 0x060B, prN}, // Sc AFGHANI SIGN
+ {0x060C, 0x060D, prN}, // Po [2] ARABIC COMMA..ARABIC DATE SEPARATOR
+ {0x060E, 0x060F, prN}, // So [2] ARABIC POETIC VERSE SIGN..ARABIC SIGN MISRA
+ {0x0610, 0x061A, prN}, // Mn [11] ARABIC SIGN SALLALLAHOU ALAYHE WASSALLAM..ARABIC SMALL KASRA
+ {0x061B, 0x061B, prN}, // Po ARABIC SEMICOLON
+ {0x061C, 0x061C, prN}, // Cf ARABIC LETTER MARK
+ {0x061D, 0x061F, prN}, // Po [3] ARABIC END OF TEXT MARK..ARABIC QUESTION MARK
+ {0x0620, 0x063F, prN}, // Lo [32] ARABIC LETTER KASHMIRI YEH..ARABIC LETTER FARSI YEH WITH THREE DOTS ABOVE
+ {0x0640, 0x0640, prN}, // Lm ARABIC TATWEEL
+ {0x0641, 0x064A, prN}, // Lo [10] ARABIC LETTER FEH..ARABIC LETTER YEH
+ {0x064B, 0x065F, prN}, // Mn [21] ARABIC FATHATAN..ARABIC WAVY HAMZA BELOW
+ {0x0660, 0x0669, prN}, // Nd [10] ARABIC-INDIC DIGIT ZERO..ARABIC-INDIC DIGIT NINE
+ {0x066A, 0x066D, prN}, // Po [4] ARABIC PERCENT SIGN..ARABIC FIVE POINTED STAR
+ {0x066E, 0x066F, prN}, // Lo [2] ARABIC LETTER DOTLESS BEH..ARABIC LETTER DOTLESS QAF
+ {0x0670, 0x0670, prN}, // Mn ARABIC LETTER SUPERSCRIPT ALEF
+ {0x0671, 0x06D3, prN}, // Lo [99] ARABIC LETTER ALEF WASLA..ARABIC LETTER YEH BARREE WITH HAMZA ABOVE
+ {0x06D4, 0x06D4, prN}, // Po ARABIC FULL STOP
+ {0x06D5, 0x06D5, prN}, // Lo ARABIC LETTER AE
+ {0x06D6, 0x06DC, prN}, // Mn [7] ARABIC SMALL HIGH LIGATURE SAD WITH LAM WITH ALEF MAKSURA..ARABIC SMALL HIGH SEEN
+ {0x06DD, 0x06DD, prN}, // Cf ARABIC END OF AYAH
+ {0x06DE, 0x06DE, prN}, // So ARABIC START OF RUB EL HIZB
+ {0x06DF, 0x06E4, prN}, // Mn [6] ARABIC SMALL HIGH ROUNDED ZERO..ARABIC SMALL HIGH MADDA
+ {0x06E5, 0x06E6, prN}, // Lm [2] ARABIC SMALL WAW..ARABIC SMALL YEH
+ {0x06E7, 0x06E8, prN}, // Mn [2] ARABIC SMALL HIGH YEH..ARABIC SMALL HIGH NOON
+ {0x06E9, 0x06E9, prN}, // So ARABIC PLACE OF SAJDAH
+ {0x06EA, 0x06ED, prN}, // Mn [4] ARABIC EMPTY CENTRE LOW STOP..ARABIC SMALL LOW MEEM
+ {0x06EE, 0x06EF, prN}, // Lo [2] ARABIC LETTER DAL WITH INVERTED V..ARABIC LETTER REH WITH INVERTED V
+ {0x06F0, 0x06F9, prN}, // Nd [10] EXTENDED ARABIC-INDIC DIGIT ZERO..EXTENDED ARABIC-INDIC DIGIT NINE
+ {0x06FA, 0x06FC, prN}, // Lo [3] ARABIC LETTER SHEEN WITH DOT BELOW..ARABIC LETTER GHAIN WITH DOT BELOW
+ {0x06FD, 0x06FE, prN}, // So [2] ARABIC SIGN SINDHI AMPERSAND..ARABIC SIGN SINDHI POSTPOSITION MEN
+ {0x06FF, 0x06FF, prN}, // Lo ARABIC LETTER HEH WITH INVERTED V
+ {0x0700, 0x070D, prN}, // Po [14] SYRIAC END OF PARAGRAPH..SYRIAC HARKLEAN ASTERISCUS
+ {0x070F, 0x070F, prN}, // Cf SYRIAC ABBREVIATION MARK
+ {0x0710, 0x0710, prN}, // Lo SYRIAC LETTER ALAPH
+ {0x0711, 0x0711, prN}, // Mn SYRIAC LETTER SUPERSCRIPT ALAPH
+ {0x0712, 0x072F, prN}, // Lo [30] SYRIAC LETTER BETH..SYRIAC LETTER PERSIAN DHALATH
+ {0x0730, 0x074A, prN}, // Mn [27] SYRIAC PTHAHA ABOVE..SYRIAC BARREKH
+ {0x074D, 0x074F, prN}, // Lo [3] SYRIAC LETTER SOGDIAN ZHAIN..SYRIAC LETTER SOGDIAN FE
+ {0x0750, 0x077F, prN}, // Lo [48] ARABIC LETTER BEH WITH THREE DOTS HORIZONTALLY BELOW..ARABIC LETTER KAF WITH TWO DOTS ABOVE
+ {0x0780, 0x07A5, prN}, // Lo [38] THAANA LETTER HAA..THAANA LETTER WAAVU
+ {0x07A6, 0x07B0, prN}, // Mn [11] THAANA ABAFILI..THAANA SUKUN
+ {0x07B1, 0x07B1, prN}, // Lo THAANA LETTER NAA
+ {0x07C0, 0x07C9, prN}, // Nd [10] NKO DIGIT ZERO..NKO DIGIT NINE
+ {0x07CA, 0x07EA, prN}, // Lo [33] NKO LETTER A..NKO LETTER JONA RA
+ {0x07EB, 0x07F3, prN}, // Mn [9] NKO COMBINING SHORT HIGH TONE..NKO COMBINING DOUBLE DOT ABOVE
+ {0x07F4, 0x07F5, prN}, // Lm [2] NKO HIGH TONE APOSTROPHE..NKO LOW TONE APOSTROPHE
+ {0x07F6, 0x07F6, prN}, // So NKO SYMBOL OO DENNEN
+ {0x07F7, 0x07F9, prN}, // Po [3] NKO SYMBOL GBAKURUNEN..NKO EXCLAMATION MARK
+ {0x07FA, 0x07FA, prN}, // Lm NKO LAJANYALAN
+ {0x07FD, 0x07FD, prN}, // Mn NKO DANTAYALAN
+ {0x07FE, 0x07FF, prN}, // Sc [2] NKO DOROME SIGN..NKO TAMAN SIGN
+ {0x0800, 0x0815, prN}, // Lo [22] SAMARITAN LETTER ALAF..SAMARITAN LETTER TAAF
+ {0x0816, 0x0819, prN}, // Mn [4] SAMARITAN MARK IN..SAMARITAN MARK DAGESH
+ {0x081A, 0x081A, prN}, // Lm SAMARITAN MODIFIER LETTER EPENTHETIC YUT
+ {0x081B, 0x0823, prN}, // Mn [9] SAMARITAN MARK EPENTHETIC YUT..SAMARITAN VOWEL SIGN A
+ {0x0824, 0x0824, prN}, // Lm SAMARITAN MODIFIER LETTER SHORT A
+ {0x0825, 0x0827, prN}, // Mn [3] SAMARITAN VOWEL SIGN SHORT A..SAMARITAN VOWEL SIGN U
+ {0x0828, 0x0828, prN}, // Lm SAMARITAN MODIFIER LETTER I
+ {0x0829, 0x082D, prN}, // Mn [5] SAMARITAN VOWEL SIGN LONG I..SAMARITAN MARK NEQUDAA
+ {0x0830, 0x083E, prN}, // Po [15] SAMARITAN PUNCTUATION NEQUDAA..SAMARITAN PUNCTUATION ANNAAU
+ {0x0840, 0x0858, prN}, // Lo [25] MANDAIC LETTER HALQA..MANDAIC LETTER AIN
+ {0x0859, 0x085B, prN}, // Mn [3] MANDAIC AFFRICATION MARK..MANDAIC GEMINATION MARK
+ {0x085E, 0x085E, prN}, // Po MANDAIC PUNCTUATION
+ {0x0860, 0x086A, prN}, // Lo [11] SYRIAC LETTER MALAYALAM NGA..SYRIAC LETTER MALAYALAM SSA
+ {0x0870, 0x0887, prN}, // Lo [24] ARABIC LETTER ALEF WITH ATTACHED FATHA..ARABIC BASELINE ROUND DOT
+ {0x0888, 0x0888, prN}, // Sk ARABIC RAISED ROUND DOT
+ {0x0889, 0x088E, prN}, // Lo [6] ARABIC LETTER NOON WITH INVERTED SMALL V..ARABIC VERTICAL TAIL
+ {0x0890, 0x0891, prN}, // Cf [2] ARABIC POUND MARK ABOVE..ARABIC PIASTRE MARK ABOVE
+ {0x0898, 0x089F, prN}, // Mn [8] ARABIC SMALL HIGH WORD AL-JUZ..ARABIC HALF MADDA OVER MADDA
+ {0x08A0, 0x08C8, prN}, // Lo [41] ARABIC LETTER BEH WITH SMALL V BELOW..ARABIC LETTER GRAF
+ {0x08C9, 0x08C9, prN}, // Lm ARABIC SMALL FARSI YEH
+ {0x08CA, 0x08E1, prN}, // Mn [24] ARABIC SMALL HIGH FARSI YEH..ARABIC SMALL HIGH SIGN SAFHA
+ {0x08E2, 0x08E2, prN}, // Cf ARABIC DISPUTED END OF AYAH
+ {0x08E3, 0x08FF, prN}, // Mn [29] ARABIC TURNED DAMMA BELOW..ARABIC MARK SIDEWAYS NOON GHUNNA
+ {0x0900, 0x0902, prN}, // Mn [3] DEVANAGARI SIGN INVERTED CANDRABINDU..DEVANAGARI SIGN ANUSVARA
+ {0x0903, 0x0903, prN}, // Mc DEVANAGARI SIGN VISARGA
+ {0x0904, 0x0939, prN}, // Lo [54] DEVANAGARI LETTER SHORT A..DEVANAGARI LETTER HA
+ {0x093A, 0x093A, prN}, // Mn DEVANAGARI VOWEL SIGN OE
+ {0x093B, 0x093B, prN}, // Mc DEVANAGARI VOWEL SIGN OOE
+ {0x093C, 0x093C, prN}, // Mn DEVANAGARI SIGN NUKTA
+ {0x093D, 0x093D, prN}, // Lo DEVANAGARI SIGN AVAGRAHA
+ {0x093E, 0x0940, prN}, // Mc [3] DEVANAGARI VOWEL SIGN AA..DEVANAGARI VOWEL SIGN II
+ {0x0941, 0x0948, prN}, // Mn [8] DEVANAGARI VOWEL SIGN U..DEVANAGARI VOWEL SIGN AI
+ {0x0949, 0x094C, prN}, // Mc [4] DEVANAGARI VOWEL SIGN CANDRA O..DEVANAGARI VOWEL SIGN AU
+ {0x094D, 0x094D, prN}, // Mn DEVANAGARI SIGN VIRAMA
+ {0x094E, 0x094F, prN}, // Mc [2] DEVANAGARI VOWEL SIGN PRISHTHAMATRA E..DEVANAGARI VOWEL SIGN AW
+ {0x0950, 0x0950, prN}, // Lo DEVANAGARI OM
+ {0x0951, 0x0957, prN}, // Mn [7] DEVANAGARI STRESS SIGN UDATTA..DEVANAGARI VOWEL SIGN UUE
+ {0x0958, 0x0961, prN}, // Lo [10] DEVANAGARI LETTER QA..DEVANAGARI LETTER VOCALIC LL
+ {0x0962, 0x0963, prN}, // Mn [2] DEVANAGARI VOWEL SIGN VOCALIC L..DEVANAGARI VOWEL SIGN VOCALIC LL
+ {0x0964, 0x0965, prN}, // Po [2] DEVANAGARI DANDA..DEVANAGARI DOUBLE DANDA
+ {0x0966, 0x096F, prN}, // Nd [10] DEVANAGARI DIGIT ZERO..DEVANAGARI DIGIT NINE
+ {0x0970, 0x0970, prN}, // Po DEVANAGARI ABBREVIATION SIGN
+ {0x0971, 0x0971, prN}, // Lm DEVANAGARI SIGN HIGH SPACING DOT
+ {0x0972, 0x097F, prN}, // Lo [14] DEVANAGARI LETTER CANDRA A..DEVANAGARI LETTER BBA
+ {0x0980, 0x0980, prN}, // Lo BENGALI ANJI
+ {0x0981, 0x0981, prN}, // Mn BENGALI SIGN CANDRABINDU
+ {0x0982, 0x0983, prN}, // Mc [2] BENGALI SIGN ANUSVARA..BENGALI SIGN VISARGA
+ {0x0985, 0x098C, prN}, // Lo [8] BENGALI LETTER A..BENGALI LETTER VOCALIC L
+ {0x098F, 0x0990, prN}, // Lo [2] BENGALI LETTER E..BENGALI LETTER AI
+ {0x0993, 0x09A8, prN}, // Lo [22] BENGALI LETTER O..BENGALI LETTER NA
+ {0x09AA, 0x09B0, prN}, // Lo [7] BENGALI LETTER PA..BENGALI LETTER RA
+ {0x09B2, 0x09B2, prN}, // Lo BENGALI LETTER LA
+ {0x09B6, 0x09B9, prN}, // Lo [4] BENGALI LETTER SHA..BENGALI LETTER HA
+ {0x09BC, 0x09BC, prN}, // Mn BENGALI SIGN NUKTA
+ {0x09BD, 0x09BD, prN}, // Lo BENGALI SIGN AVAGRAHA
+ {0x09BE, 0x09C0, prN}, // Mc [3] BENGALI VOWEL SIGN AA..BENGALI VOWEL SIGN II
+ {0x09C1, 0x09C4, prN}, // Mn [4] BENGALI VOWEL SIGN U..BENGALI VOWEL SIGN VOCALIC RR
+ {0x09C7, 0x09C8, prN}, // Mc [2] BENGALI VOWEL SIGN E..BENGALI VOWEL SIGN AI
+ {0x09CB, 0x09CC, prN}, // Mc [2] BENGALI VOWEL SIGN O..BENGALI VOWEL SIGN AU
+ {0x09CD, 0x09CD, prN}, // Mn BENGALI SIGN VIRAMA
+ {0x09CE, 0x09CE, prN}, // Lo BENGALI LETTER KHANDA TA
+ {0x09D7, 0x09D7, prN}, // Mc BENGALI AU LENGTH MARK
+ {0x09DC, 0x09DD, prN}, // Lo [2] BENGALI LETTER RRA..BENGALI LETTER RHA
+ {0x09DF, 0x09E1, prN}, // Lo [3] BENGALI LETTER YYA..BENGALI LETTER VOCALIC LL
+ {0x09E2, 0x09E3, prN}, // Mn [2] BENGALI VOWEL SIGN VOCALIC L..BENGALI VOWEL SIGN VOCALIC LL
+ {0x09E6, 0x09EF, prN}, // Nd [10] BENGALI DIGIT ZERO..BENGALI DIGIT NINE
+ {0x09F0, 0x09F1, prN}, // Lo [2] BENGALI LETTER RA WITH MIDDLE DIAGONAL..BENGALI LETTER RA WITH LOWER DIAGONAL
+ {0x09F2, 0x09F3, prN}, // Sc [2] BENGALI RUPEE MARK..BENGALI RUPEE SIGN
+ {0x09F4, 0x09F9, prN}, // No [6] BENGALI CURRENCY NUMERATOR ONE..BENGALI CURRENCY DENOMINATOR SIXTEEN
+ {0x09FA, 0x09FA, prN}, // So BENGALI ISSHAR
+ {0x09FB, 0x09FB, prN}, // Sc BENGALI GANDA MARK
+ {0x09FC, 0x09FC, prN}, // Lo BENGALI LETTER VEDIC ANUSVARA
+ {0x09FD, 0x09FD, prN}, // Po BENGALI ABBREVIATION SIGN
+ {0x09FE, 0x09FE, prN}, // Mn BENGALI SANDHI MARK
+ {0x0A01, 0x0A02, prN}, // Mn [2] GURMUKHI SIGN ADAK BINDI..GURMUKHI SIGN BINDI
+ {0x0A03, 0x0A03, prN}, // Mc GURMUKHI SIGN VISARGA
+ {0x0A05, 0x0A0A, prN}, // Lo [6] GURMUKHI LETTER A..GURMUKHI LETTER UU
+ {0x0A0F, 0x0A10, prN}, // Lo [2] GURMUKHI LETTER EE..GURMUKHI LETTER AI
+ {0x0A13, 0x0A28, prN}, // Lo [22] GURMUKHI LETTER OO..GURMUKHI LETTER NA
+ {0x0A2A, 0x0A30, prN}, // Lo [7] GURMUKHI LETTER PA..GURMUKHI LETTER RA
+ {0x0A32, 0x0A33, prN}, // Lo [2] GURMUKHI LETTER LA..GURMUKHI LETTER LLA
+ {0x0A35, 0x0A36, prN}, // Lo [2] GURMUKHI LETTER VA..GURMUKHI LETTER SHA
+ {0x0A38, 0x0A39, prN}, // Lo [2] GURMUKHI LETTER SA..GURMUKHI LETTER HA
+ {0x0A3C, 0x0A3C, prN}, // Mn GURMUKHI SIGN NUKTA
+ {0x0A3E, 0x0A40, prN}, // Mc [3] GURMUKHI VOWEL SIGN AA..GURMUKHI VOWEL SIGN II
+ {0x0A41, 0x0A42, prN}, // Mn [2] GURMUKHI VOWEL SIGN U..GURMUKHI VOWEL SIGN UU
+ {0x0A47, 0x0A48, prN}, // Mn [2] GURMUKHI VOWEL SIGN EE..GURMUKHI VOWEL SIGN AI
+ {0x0A4B, 0x0A4D, prN}, // Mn [3] GURMUKHI VOWEL SIGN OO..GURMUKHI SIGN VIRAMA
+ {0x0A51, 0x0A51, prN}, // Mn GURMUKHI SIGN UDAAT
+ {0x0A59, 0x0A5C, prN}, // Lo [4] GURMUKHI LETTER KHHA..GURMUKHI LETTER RRA
+ {0x0A5E, 0x0A5E, prN}, // Lo GURMUKHI LETTER FA
+ {0x0A66, 0x0A6F, prN}, // Nd [10] GURMUKHI DIGIT ZERO..GURMUKHI DIGIT NINE
+ {0x0A70, 0x0A71, prN}, // Mn [2] GURMUKHI TIPPI..GURMUKHI ADDAK
+ {0x0A72, 0x0A74, prN}, // Lo [3] GURMUKHI IRI..GURMUKHI EK ONKAR
+ {0x0A75, 0x0A75, prN}, // Mn GURMUKHI SIGN YAKASH
+ {0x0A76, 0x0A76, prN}, // Po GURMUKHI ABBREVIATION SIGN
+ {0x0A81, 0x0A82, prN}, // Mn [2] GUJARATI SIGN CANDRABINDU..GUJARATI SIGN ANUSVARA
+ {0x0A83, 0x0A83, prN}, // Mc GUJARATI SIGN VISARGA
+ {0x0A85, 0x0A8D, prN}, // Lo [9] GUJARATI LETTER A..GUJARATI VOWEL CANDRA E
+ {0x0A8F, 0x0A91, prN}, // Lo [3] GUJARATI LETTER E..GUJARATI VOWEL CANDRA O
+ {0x0A93, 0x0AA8, prN}, // Lo [22] GUJARATI LETTER O..GUJARATI LETTER NA
+ {0x0AAA, 0x0AB0, prN}, // Lo [7] GUJARATI LETTER PA..GUJARATI LETTER RA
+ {0x0AB2, 0x0AB3, prN}, // Lo [2] GUJARATI LETTER LA..GUJARATI LETTER LLA
+ {0x0AB5, 0x0AB9, prN}, // Lo [5] GUJARATI LETTER VA..GUJARATI LETTER HA
+ {0x0ABC, 0x0ABC, prN}, // Mn GUJARATI SIGN NUKTA
+ {0x0ABD, 0x0ABD, prN}, // Lo GUJARATI SIGN AVAGRAHA
+ {0x0ABE, 0x0AC0, prN}, // Mc [3] GUJARATI VOWEL SIGN AA..GUJARATI VOWEL SIGN II
+ {0x0AC1, 0x0AC5, prN}, // Mn [5] GUJARATI VOWEL SIGN U..GUJARATI VOWEL SIGN CANDRA E
+ {0x0AC7, 0x0AC8, prN}, // Mn [2] GUJARATI VOWEL SIGN E..GUJARATI VOWEL SIGN AI
+ {0x0AC9, 0x0AC9, prN}, // Mc GUJARATI VOWEL SIGN CANDRA O
+ {0x0ACB, 0x0ACC, prN}, // Mc [2] GUJARATI VOWEL SIGN O..GUJARATI VOWEL SIGN AU
+ {0x0ACD, 0x0ACD, prN}, // Mn GUJARATI SIGN VIRAMA
+ {0x0AD0, 0x0AD0, prN}, // Lo GUJARATI OM
+ {0x0AE0, 0x0AE1, prN}, // Lo [2] GUJARATI LETTER VOCALIC RR..GUJARATI LETTER VOCALIC LL
+ {0x0AE2, 0x0AE3, prN}, // Mn [2] GUJARATI VOWEL SIGN VOCALIC L..GUJARATI VOWEL SIGN VOCALIC LL
+ {0x0AE6, 0x0AEF, prN}, // Nd [10] GUJARATI DIGIT ZERO..GUJARATI DIGIT NINE
+ {0x0AF0, 0x0AF0, prN}, // Po GUJARATI ABBREVIATION SIGN
+ {0x0AF1, 0x0AF1, prN}, // Sc GUJARATI RUPEE SIGN
+ {0x0AF9, 0x0AF9, prN}, // Lo GUJARATI LETTER ZHA
+ {0x0AFA, 0x0AFF, prN}, // Mn [6] GUJARATI SIGN SUKUN..GUJARATI SIGN TWO-CIRCLE NUKTA ABOVE
+ {0x0B01, 0x0B01, prN}, // Mn ORIYA SIGN CANDRABINDU
+ {0x0B02, 0x0B03, prN}, // Mc [2] ORIYA SIGN ANUSVARA..ORIYA SIGN VISARGA
+ {0x0B05, 0x0B0C, prN}, // Lo [8] ORIYA LETTER A..ORIYA LETTER VOCALIC L
+ {0x0B0F, 0x0B10, prN}, // Lo [2] ORIYA LETTER E..ORIYA LETTER AI
+ {0x0B13, 0x0B28, prN}, // Lo [22] ORIYA LETTER O..ORIYA LETTER NA
+ {0x0B2A, 0x0B30, prN}, // Lo [7] ORIYA LETTER PA..ORIYA LETTER RA
+ {0x0B32, 0x0B33, prN}, // Lo [2] ORIYA LETTER LA..ORIYA LETTER LLA
+ {0x0B35, 0x0B39, prN}, // Lo [5] ORIYA LETTER VA..ORIYA LETTER HA
+ {0x0B3C, 0x0B3C, prN}, // Mn ORIYA SIGN NUKTA
+ {0x0B3D, 0x0B3D, prN}, // Lo ORIYA SIGN AVAGRAHA
+ {0x0B3E, 0x0B3E, prN}, // Mc ORIYA VOWEL SIGN AA
+ {0x0B3F, 0x0B3F, prN}, // Mn ORIYA VOWEL SIGN I
+ {0x0B40, 0x0B40, prN}, // Mc ORIYA VOWEL SIGN II
+ {0x0B41, 0x0B44, prN}, // Mn [4] ORIYA VOWEL SIGN U..ORIYA VOWEL SIGN VOCALIC RR
+ {0x0B47, 0x0B48, prN}, // Mc [2] ORIYA VOWEL SIGN E..ORIYA VOWEL SIGN AI
+ {0x0B4B, 0x0B4C, prN}, // Mc [2] ORIYA VOWEL SIGN O..ORIYA VOWEL SIGN AU
+ {0x0B4D, 0x0B4D, prN}, // Mn ORIYA SIGN VIRAMA
+ {0x0B55, 0x0B56, prN}, // Mn [2] ORIYA SIGN OVERLINE..ORIYA AI LENGTH MARK
+ {0x0B57, 0x0B57, prN}, // Mc ORIYA AU LENGTH MARK
+ {0x0B5C, 0x0B5D, prN}, // Lo [2] ORIYA LETTER RRA..ORIYA LETTER RHA
+ {0x0B5F, 0x0B61, prN}, // Lo [3] ORIYA LETTER YYA..ORIYA LETTER VOCALIC LL
+ {0x0B62, 0x0B63, prN}, // Mn [2] ORIYA VOWEL SIGN VOCALIC L..ORIYA VOWEL SIGN VOCALIC LL
+ {0x0B66, 0x0B6F, prN}, // Nd [10] ORIYA DIGIT ZERO..ORIYA DIGIT NINE
+ {0x0B70, 0x0B70, prN}, // So ORIYA ISSHAR
+ {0x0B71, 0x0B71, prN}, // Lo ORIYA LETTER WA
+ {0x0B72, 0x0B77, prN}, // No [6] ORIYA FRACTION ONE QUARTER..ORIYA FRACTION THREE SIXTEENTHS
+ {0x0B82, 0x0B82, prN}, // Mn TAMIL SIGN ANUSVARA
+ {0x0B83, 0x0B83, prN}, // Lo TAMIL SIGN VISARGA
+ {0x0B85, 0x0B8A, prN}, // Lo [6] TAMIL LETTER A..TAMIL LETTER UU
+ {0x0B8E, 0x0B90, prN}, // Lo [3] TAMIL LETTER E..TAMIL LETTER AI
+ {0x0B92, 0x0B95, prN}, // Lo [4] TAMIL LETTER O..TAMIL LETTER KA
+ {0x0B99, 0x0B9A, prN}, // Lo [2] TAMIL LETTER NGA..TAMIL LETTER CA
+ {0x0B9C, 0x0B9C, prN}, // Lo TAMIL LETTER JA
+ {0x0B9E, 0x0B9F, prN}, // Lo [2] TAMIL LETTER NYA..TAMIL LETTER TTA
+ {0x0BA3, 0x0BA4, prN}, // Lo [2] TAMIL LETTER NNA..TAMIL LETTER TA
+ {0x0BA8, 0x0BAA, prN}, // Lo [3] TAMIL LETTER NA..TAMIL LETTER PA
+ {0x0BAE, 0x0BB9, prN}, // Lo [12] TAMIL LETTER MA..TAMIL LETTER HA
+ {0x0BBE, 0x0BBF, prN}, // Mc [2] TAMIL VOWEL SIGN AA..TAMIL VOWEL SIGN I
+ {0x0BC0, 0x0BC0, prN}, // Mn TAMIL VOWEL SIGN II
+ {0x0BC1, 0x0BC2, prN}, // Mc [2] TAMIL VOWEL SIGN U..TAMIL VOWEL SIGN UU
+ {0x0BC6, 0x0BC8, prN}, // Mc [3] TAMIL VOWEL SIGN E..TAMIL VOWEL SIGN AI
+ {0x0BCA, 0x0BCC, prN}, // Mc [3] TAMIL VOWEL SIGN O..TAMIL VOWEL SIGN AU
+ {0x0BCD, 0x0BCD, prN}, // Mn TAMIL SIGN VIRAMA
+ {0x0BD0, 0x0BD0, prN}, // Lo TAMIL OM
+ {0x0BD7, 0x0BD7, prN}, // Mc TAMIL AU LENGTH MARK
+ {0x0BE6, 0x0BEF, prN}, // Nd [10] TAMIL DIGIT ZERO..TAMIL DIGIT NINE
+ {0x0BF0, 0x0BF2, prN}, // No [3] TAMIL NUMBER TEN..TAMIL NUMBER ONE THOUSAND
+ {0x0BF3, 0x0BF8, prN}, // So [6] TAMIL DAY SIGN..TAMIL AS ABOVE SIGN
+ {0x0BF9, 0x0BF9, prN}, // Sc TAMIL RUPEE SIGN
+ {0x0BFA, 0x0BFA, prN}, // So TAMIL NUMBER SIGN
+ {0x0C00, 0x0C00, prN}, // Mn TELUGU SIGN COMBINING CANDRABINDU ABOVE
+ {0x0C01, 0x0C03, prN}, // Mc [3] TELUGU SIGN CANDRABINDU..TELUGU SIGN VISARGA
+ {0x0C04, 0x0C04, prN}, // Mn TELUGU SIGN COMBINING ANUSVARA ABOVE
+ {0x0C05, 0x0C0C, prN}, // Lo [8] TELUGU LETTER A..TELUGU LETTER VOCALIC L
+ {0x0C0E, 0x0C10, prN}, // Lo [3] TELUGU LETTER E..TELUGU LETTER AI
+ {0x0C12, 0x0C28, prN}, // Lo [23] TELUGU LETTER O..TELUGU LETTER NA
+ {0x0C2A, 0x0C39, prN}, // Lo [16] TELUGU LETTER PA..TELUGU LETTER HA
+ {0x0C3C, 0x0C3C, prN}, // Mn TELUGU SIGN NUKTA
+ {0x0C3D, 0x0C3D, prN}, // Lo TELUGU SIGN AVAGRAHA
+ {0x0C3E, 0x0C40, prN}, // Mn [3] TELUGU VOWEL SIGN AA..TELUGU VOWEL SIGN II
+ {0x0C41, 0x0C44, prN}, // Mc [4] TELUGU VOWEL SIGN U..TELUGU VOWEL SIGN VOCALIC RR
+ {0x0C46, 0x0C48, prN}, // Mn [3] TELUGU VOWEL SIGN E..TELUGU VOWEL SIGN AI
+ {0x0C4A, 0x0C4D, prN}, // Mn [4] TELUGU VOWEL SIGN O..TELUGU SIGN VIRAMA
+ {0x0C55, 0x0C56, prN}, // Mn [2] TELUGU LENGTH MARK..TELUGU AI LENGTH MARK
+ {0x0C58, 0x0C5A, prN}, // Lo [3] TELUGU LETTER TSA..TELUGU LETTER RRRA
+ {0x0C5D, 0x0C5D, prN}, // Lo TELUGU LETTER NAKAARA POLLU
+ {0x0C60, 0x0C61, prN}, // Lo [2] TELUGU LETTER VOCALIC RR..TELUGU LETTER VOCALIC LL
+ {0x0C62, 0x0C63, prN}, // Mn [2] TELUGU VOWEL SIGN VOCALIC L..TELUGU VOWEL SIGN VOCALIC LL
+ {0x0C66, 0x0C6F, prN}, // Nd [10] TELUGU DIGIT ZERO..TELUGU DIGIT NINE
+ {0x0C77, 0x0C77, prN}, // Po TELUGU SIGN SIDDHAM
+ {0x0C78, 0x0C7E, prN}, // No [7] TELUGU FRACTION DIGIT ZERO FOR ODD POWERS OF FOUR..TELUGU FRACTION DIGIT THREE FOR EVEN POWERS OF FOUR
+ {0x0C7F, 0x0C7F, prN}, // So TELUGU SIGN TUUMU
+ {0x0C80, 0x0C80, prN}, // Lo KANNADA SIGN SPACING CANDRABINDU
+ {0x0C81, 0x0C81, prN}, // Mn KANNADA SIGN CANDRABINDU
+ {0x0C82, 0x0C83, prN}, // Mc [2] KANNADA SIGN ANUSVARA..KANNADA SIGN VISARGA
+ {0x0C84, 0x0C84, prN}, // Po KANNADA SIGN SIDDHAM
+ {0x0C85, 0x0C8C, prN}, // Lo [8] KANNADA LETTER A..KANNADA LETTER VOCALIC L
+ {0x0C8E, 0x0C90, prN}, // Lo [3] KANNADA LETTER E..KANNADA LETTER AI
+ {0x0C92, 0x0CA8, prN}, // Lo [23] KANNADA LETTER O..KANNADA LETTER NA
+ {0x0CAA, 0x0CB3, prN}, // Lo [10] KANNADA LETTER PA..KANNADA LETTER LLA
+ {0x0CB5, 0x0CB9, prN}, // Lo [5] KANNADA LETTER VA..KANNADA LETTER HA
+ {0x0CBC, 0x0CBC, prN}, // Mn KANNADA SIGN NUKTA
+ {0x0CBD, 0x0CBD, prN}, // Lo KANNADA SIGN AVAGRAHA
+ {0x0CBE, 0x0CBE, prN}, // Mc KANNADA VOWEL SIGN AA
+ {0x0CBF, 0x0CBF, prN}, // Mn KANNADA VOWEL SIGN I
+ {0x0CC0, 0x0CC4, prN}, // Mc [5] KANNADA VOWEL SIGN II..KANNADA VOWEL SIGN VOCALIC RR
+ {0x0CC6, 0x0CC6, prN}, // Mn KANNADA VOWEL SIGN E
+ {0x0CC7, 0x0CC8, prN}, // Mc [2] KANNADA VOWEL SIGN EE..KANNADA VOWEL SIGN AI
+ {0x0CCA, 0x0CCB, prN}, // Mc [2] KANNADA VOWEL SIGN O..KANNADA VOWEL SIGN OO
+ {0x0CCC, 0x0CCD, prN}, // Mn [2] KANNADA VOWEL SIGN AU..KANNADA SIGN VIRAMA
+ {0x0CD5, 0x0CD6, prN}, // Mc [2] KANNADA LENGTH MARK..KANNADA AI LENGTH MARK
+ {0x0CDD, 0x0CDE, prN}, // Lo [2] KANNADA LETTER NAKAARA POLLU..KANNADA LETTER FA
+ {0x0CE0, 0x0CE1, prN}, // Lo [2] KANNADA LETTER VOCALIC RR..KANNADA LETTER VOCALIC LL
+ {0x0CE2, 0x0CE3, prN}, // Mn [2] KANNADA VOWEL SIGN VOCALIC L..KANNADA VOWEL SIGN VOCALIC LL
+ {0x0CE6, 0x0CEF, prN}, // Nd [10] KANNADA DIGIT ZERO..KANNADA DIGIT NINE
+ {0x0CF1, 0x0CF2, prN}, // Lo [2] KANNADA SIGN JIHVAMULIYA..KANNADA SIGN UPADHMANIYA
+ {0x0CF3, 0x0CF3, prN}, // Mc KANNADA SIGN COMBINING ANUSVARA ABOVE RIGHT
+ {0x0D00, 0x0D01, prN}, // Mn [2] MALAYALAM SIGN COMBINING ANUSVARA ABOVE..MALAYALAM SIGN CANDRABINDU
+ {0x0D02, 0x0D03, prN}, // Mc [2] MALAYALAM SIGN ANUSVARA..MALAYALAM SIGN VISARGA
+ {0x0D04, 0x0D0C, prN}, // Lo [9] MALAYALAM LETTER VEDIC ANUSVARA..MALAYALAM LETTER VOCALIC L
+ {0x0D0E, 0x0D10, prN}, // Lo [3] MALAYALAM LETTER E..MALAYALAM LETTER AI
+ {0x0D12, 0x0D3A, prN}, // Lo [41] MALAYALAM LETTER O..MALAYALAM LETTER TTTA
+ {0x0D3B, 0x0D3C, prN}, // Mn [2] MALAYALAM SIGN VERTICAL BAR VIRAMA..MALAYALAM SIGN CIRCULAR VIRAMA
+ {0x0D3D, 0x0D3D, prN}, // Lo MALAYALAM SIGN AVAGRAHA
+ {0x0D3E, 0x0D40, prN}, // Mc [3] MALAYALAM VOWEL SIGN AA..MALAYALAM VOWEL SIGN II
+ {0x0D41, 0x0D44, prN}, // Mn [4] MALAYALAM VOWEL SIGN U..MALAYALAM VOWEL SIGN VOCALIC RR
+ {0x0D46, 0x0D48, prN}, // Mc [3] MALAYALAM VOWEL SIGN E..MALAYALAM VOWEL SIGN AI
+ {0x0D4A, 0x0D4C, prN}, // Mc [3] MALAYALAM VOWEL SIGN O..MALAYALAM VOWEL SIGN AU
+ {0x0D4D, 0x0D4D, prN}, // Mn MALAYALAM SIGN VIRAMA
+ {0x0D4E, 0x0D4E, prN}, // Lo MALAYALAM LETTER DOT REPH
+ {0x0D4F, 0x0D4F, prN}, // So MALAYALAM SIGN PARA
+ {0x0D54, 0x0D56, prN}, // Lo [3] MALAYALAM LETTER CHILLU M..MALAYALAM LETTER CHILLU LLL
+ {0x0D57, 0x0D57, prN}, // Mc MALAYALAM AU LENGTH MARK
+ {0x0D58, 0x0D5E, prN}, // No [7] MALAYALAM FRACTION ONE ONE-HUNDRED-AND-SIXTIETH..MALAYALAM FRACTION ONE FIFTH
+ {0x0D5F, 0x0D61, prN}, // Lo [3] MALAYALAM LETTER ARCHAIC II..MALAYALAM LETTER VOCALIC LL
+ {0x0D62, 0x0D63, prN}, // Mn [2] MALAYALAM VOWEL SIGN VOCALIC L..MALAYALAM VOWEL SIGN VOCALIC LL
+ {0x0D66, 0x0D6F, prN}, // Nd [10] MALAYALAM DIGIT ZERO..MALAYALAM DIGIT NINE
+ {0x0D70, 0x0D78, prN}, // No [9] MALAYALAM NUMBER TEN..MALAYALAM FRACTION THREE SIXTEENTHS
+ {0x0D79, 0x0D79, prN}, // So MALAYALAM DATE MARK
+ {0x0D7A, 0x0D7F, prN}, // Lo [6] MALAYALAM LETTER CHILLU NN..MALAYALAM LETTER CHILLU K
+ {0x0D81, 0x0D81, prN}, // Mn SINHALA SIGN CANDRABINDU
+ {0x0D82, 0x0D83, prN}, // Mc [2] SINHALA SIGN ANUSVARAYA..SINHALA SIGN VISARGAYA
+ {0x0D85, 0x0D96, prN}, // Lo [18] SINHALA LETTER AYANNA..SINHALA LETTER AUYANNA
+ {0x0D9A, 0x0DB1, prN}, // Lo [24] SINHALA LETTER ALPAPRAANA KAYANNA..SINHALA LETTER DANTAJA NAYANNA
+ {0x0DB3, 0x0DBB, prN}, // Lo [9] SINHALA LETTER SANYAKA DAYANNA..SINHALA LETTER RAYANNA
+ {0x0DBD, 0x0DBD, prN}, // Lo SINHALA LETTER DANTAJA LAYANNA
+ {0x0DC0, 0x0DC6, prN}, // Lo [7] SINHALA LETTER VAYANNA..SINHALA LETTER FAYANNA
+ {0x0DCA, 0x0DCA, prN}, // Mn SINHALA SIGN AL-LAKUNA
+ {0x0DCF, 0x0DD1, prN}, // Mc [3] SINHALA VOWEL SIGN AELA-PILLA..SINHALA VOWEL SIGN DIGA AEDA-PILLA
+ {0x0DD2, 0x0DD4, prN}, // Mn [3] SINHALA VOWEL SIGN KETTI IS-PILLA..SINHALA VOWEL SIGN KETTI PAA-PILLA
+ {0x0DD6, 0x0DD6, prN}, // Mn SINHALA VOWEL SIGN DIGA PAA-PILLA
+ {0x0DD8, 0x0DDF, prN}, // Mc [8] SINHALA VOWEL SIGN GAETTA-PILLA..SINHALA VOWEL SIGN GAYANUKITTA
+ {0x0DE6, 0x0DEF, prN}, // Nd [10] SINHALA LITH DIGIT ZERO..SINHALA LITH DIGIT NINE
+ {0x0DF2, 0x0DF3, prN}, // Mc [2] SINHALA VOWEL SIGN DIGA GAETTA-PILLA..SINHALA VOWEL SIGN DIGA GAYANUKITTA
+ {0x0DF4, 0x0DF4, prN}, // Po SINHALA PUNCTUATION KUNDDALIYA
+ {0x0E01, 0x0E30, prN}, // Lo [48] THAI CHARACTER KO KAI..THAI CHARACTER SARA A
+ {0x0E31, 0x0E31, prN}, // Mn THAI CHARACTER MAI HAN-AKAT
+ {0x0E32, 0x0E33, prN}, // Lo [2] THAI CHARACTER SARA AA..THAI CHARACTER SARA AM
+ {0x0E34, 0x0E3A, prN}, // Mn [7] THAI CHARACTER SARA I..THAI CHARACTER PHINTHU
+ {0x0E3F, 0x0E3F, prN}, // Sc THAI CURRENCY SYMBOL BAHT
+ {0x0E40, 0x0E45, prN}, // Lo [6] THAI CHARACTER SARA E..THAI CHARACTER LAKKHANGYAO
+ {0x0E46, 0x0E46, prN}, // Lm THAI CHARACTER MAIYAMOK
+ {0x0E47, 0x0E4E, prN}, // Mn [8] THAI CHARACTER MAITAIKHU..THAI CHARACTER YAMAKKAN
+ {0x0E4F, 0x0E4F, prN}, // Po THAI CHARACTER FONGMAN
+ {0x0E50, 0x0E59, prN}, // Nd [10] THAI DIGIT ZERO..THAI DIGIT NINE
+ {0x0E5A, 0x0E5B, prN}, // Po [2] THAI CHARACTER ANGKHANKHU..THAI CHARACTER KHOMUT
+ {0x0E81, 0x0E82, prN}, // Lo [2] LAO LETTER KO..LAO LETTER KHO SUNG
+ {0x0E84, 0x0E84, prN}, // Lo LAO LETTER KHO TAM
+ {0x0E86, 0x0E8A, prN}, // Lo [5] LAO LETTER PALI GHA..LAO LETTER SO TAM
+ {0x0E8C, 0x0EA3, prN}, // Lo [24] LAO LETTER PALI JHA..LAO LETTER LO LING
+ {0x0EA5, 0x0EA5, prN}, // Lo LAO LETTER LO LOOT
+ {0x0EA7, 0x0EB0, prN}, // Lo [10] LAO LETTER WO..LAO VOWEL SIGN A
+ {0x0EB1, 0x0EB1, prN}, // Mn LAO VOWEL SIGN MAI KAN
+ {0x0EB2, 0x0EB3, prN}, // Lo [2] LAO VOWEL SIGN AA..LAO VOWEL SIGN AM
+ {0x0EB4, 0x0EBC, prN}, // Mn [9] LAO VOWEL SIGN I..LAO SEMIVOWEL SIGN LO
+ {0x0EBD, 0x0EBD, prN}, // Lo LAO SEMIVOWEL SIGN NYO
+ {0x0EC0, 0x0EC4, prN}, // Lo [5] LAO VOWEL SIGN E..LAO VOWEL SIGN AI
+ {0x0EC6, 0x0EC6, prN}, // Lm LAO KO LA
+ {0x0EC8, 0x0ECE, prN}, // Mn [7] LAO TONE MAI EK..LAO YAMAKKAN
+ {0x0ED0, 0x0ED9, prN}, // Nd [10] LAO DIGIT ZERO..LAO DIGIT NINE
+ {0x0EDC, 0x0EDF, prN}, // Lo [4] LAO HO NO..LAO LETTER KHMU NYO
+ {0x0F00, 0x0F00, prN}, // Lo TIBETAN SYLLABLE OM
+ {0x0F01, 0x0F03, prN}, // So [3] TIBETAN MARK GTER YIG MGO TRUNCATED A..TIBETAN MARK GTER YIG MGO -UM GTER TSHEG MA
+ {0x0F04, 0x0F12, prN}, // Po [15] TIBETAN MARK INITIAL YIG MGO MDUN MA..TIBETAN MARK RGYA GRAM SHAD
+ {0x0F13, 0x0F13, prN}, // So TIBETAN MARK CARET -DZUD RTAGS ME LONG CAN
+ {0x0F14, 0x0F14, prN}, // Po TIBETAN MARK GTER TSHEG
+ {0x0F15, 0x0F17, prN}, // So [3] TIBETAN LOGOTYPE SIGN CHAD RTAGS..TIBETAN ASTROLOGICAL SIGN SGRA GCAN -CHAR RTAGS
+ {0x0F18, 0x0F19, prN}, // Mn [2] TIBETAN ASTROLOGICAL SIGN -KHYUD PA..TIBETAN ASTROLOGICAL SIGN SDONG TSHUGS
+ {0x0F1A, 0x0F1F, prN}, // So [6] TIBETAN SIGN RDEL DKAR GCIG..TIBETAN SIGN RDEL DKAR RDEL NAG
+ {0x0F20, 0x0F29, prN}, // Nd [10] TIBETAN DIGIT ZERO..TIBETAN DIGIT NINE
+ {0x0F2A, 0x0F33, prN}, // No [10] TIBETAN DIGIT HALF ONE..TIBETAN DIGIT HALF ZERO
+ {0x0F34, 0x0F34, prN}, // So TIBETAN MARK BSDUS RTAGS
+ {0x0F35, 0x0F35, prN}, // Mn TIBETAN MARK NGAS BZUNG NYI ZLA
+ {0x0F36, 0x0F36, prN}, // So TIBETAN MARK CARET -DZUD RTAGS BZHI MIG CAN
+ {0x0F37, 0x0F37, prN}, // Mn TIBETAN MARK NGAS BZUNG SGOR RTAGS
+ {0x0F38, 0x0F38, prN}, // So TIBETAN MARK CHE MGO
+ {0x0F39, 0x0F39, prN}, // Mn TIBETAN MARK TSA -PHRU
+ {0x0F3A, 0x0F3A, prN}, // Ps TIBETAN MARK GUG RTAGS GYON
+ {0x0F3B, 0x0F3B, prN}, // Pe TIBETAN MARK GUG RTAGS GYAS
+ {0x0F3C, 0x0F3C, prN}, // Ps TIBETAN MARK ANG KHANG GYON
+ {0x0F3D, 0x0F3D, prN}, // Pe TIBETAN MARK ANG KHANG GYAS
+ {0x0F3E, 0x0F3F, prN}, // Mc [2] TIBETAN SIGN YAR TSHES..TIBETAN SIGN MAR TSHES
+ {0x0F40, 0x0F47, prN}, // Lo [8] TIBETAN LETTER KA..TIBETAN LETTER JA
+ {0x0F49, 0x0F6C, prN}, // Lo [36] TIBETAN LETTER NYA..TIBETAN LETTER RRA
+ {0x0F71, 0x0F7E, prN}, // Mn [14] TIBETAN VOWEL SIGN AA..TIBETAN SIGN RJES SU NGA RO
+ {0x0F7F, 0x0F7F, prN}, // Mc TIBETAN SIGN RNAM BCAD
+ {0x0F80, 0x0F84, prN}, // Mn [5] TIBETAN VOWEL SIGN REVERSED I..TIBETAN MARK HALANTA
+ {0x0F85, 0x0F85, prN}, // Po TIBETAN MARK PALUTA
+ {0x0F86, 0x0F87, prN}, // Mn [2] TIBETAN SIGN LCI RTAGS..TIBETAN SIGN YANG RTAGS
+ {0x0F88, 0x0F8C, prN}, // Lo [5] TIBETAN SIGN LCE TSA CAN..TIBETAN SIGN INVERTED MCHU CAN
+ {0x0F8D, 0x0F97, prN}, // Mn [11] TIBETAN SUBJOINED SIGN LCE TSA CAN..TIBETAN SUBJOINED LETTER JA
+ {0x0F99, 0x0FBC, prN}, // Mn [36] TIBETAN SUBJOINED LETTER NYA..TIBETAN SUBJOINED LETTER FIXED-FORM RA
+ {0x0FBE, 0x0FC5, prN}, // So [8] TIBETAN KU RU KHA..TIBETAN SYMBOL RDO RJE
+ {0x0FC6, 0x0FC6, prN}, // Mn TIBETAN SYMBOL PADMA GDAN
+ {0x0FC7, 0x0FCC, prN}, // So [6] TIBETAN SYMBOL RDO RJE RGYA GRAM..TIBETAN SYMBOL NOR BU BZHI -KHYIL
+ {0x0FCE, 0x0FCF, prN}, // So [2] TIBETAN SIGN RDEL NAG RDEL DKAR..TIBETAN SIGN RDEL NAG GSUM
+ {0x0FD0, 0x0FD4, prN}, // Po [5] TIBETAN MARK BSKA- SHOG GI MGO RGYAN..TIBETAN MARK CLOSING BRDA RNYING YIG MGO SGAB MA
+ {0x0FD5, 0x0FD8, prN}, // So [4] RIGHT-FACING SVASTI SIGN..LEFT-FACING SVASTI SIGN WITH DOTS
+ {0x0FD9, 0x0FDA, prN}, // Po [2] TIBETAN MARK LEADING MCHAN RTAGS..TIBETAN MARK TRAILING MCHAN RTAGS
+ {0x1000, 0x102A, prN}, // Lo [43] MYANMAR LETTER KA..MYANMAR LETTER AU
+ {0x102B, 0x102C, prN}, // Mc [2] MYANMAR VOWEL SIGN TALL AA..MYANMAR VOWEL SIGN AA
+ {0x102D, 0x1030, prN}, // Mn [4] MYANMAR VOWEL SIGN I..MYANMAR VOWEL SIGN UU
+ {0x1031, 0x1031, prN}, // Mc MYANMAR VOWEL SIGN E
+ {0x1032, 0x1037, prN}, // Mn [6] MYANMAR VOWEL SIGN AI..MYANMAR SIGN DOT BELOW
+ {0x1038, 0x1038, prN}, // Mc MYANMAR SIGN VISARGA
+ {0x1039, 0x103A, prN}, // Mn [2] MYANMAR SIGN VIRAMA..MYANMAR SIGN ASAT
+ {0x103B, 0x103C, prN}, // Mc [2] MYANMAR CONSONANT SIGN MEDIAL YA..MYANMAR CONSONANT SIGN MEDIAL RA
+ {0x103D, 0x103E, prN}, // Mn [2] MYANMAR CONSONANT SIGN MEDIAL WA..MYANMAR CONSONANT SIGN MEDIAL HA
+ {0x103F, 0x103F, prN}, // Lo MYANMAR LETTER GREAT SA
+ {0x1040, 0x1049, prN}, // Nd [10] MYANMAR DIGIT ZERO..MYANMAR DIGIT NINE
+ {0x104A, 0x104F, prN}, // Po [6] MYANMAR SIGN LITTLE SECTION..MYANMAR SYMBOL GENITIVE
+ {0x1050, 0x1055, prN}, // Lo [6] MYANMAR LETTER SHA..MYANMAR LETTER VOCALIC LL
+ {0x1056, 0x1057, prN}, // Mc [2] MYANMAR VOWEL SIGN VOCALIC R..MYANMAR VOWEL SIGN VOCALIC RR
+ {0x1058, 0x1059, prN}, // Mn [2] MYANMAR VOWEL SIGN VOCALIC L..MYANMAR VOWEL SIGN VOCALIC LL
+ {0x105A, 0x105D, prN}, // Lo [4] MYANMAR LETTER MON NGA..MYANMAR LETTER MON BBE
+ {0x105E, 0x1060, prN}, // Mn [3] MYANMAR CONSONANT SIGN MON MEDIAL NA..MYANMAR CONSONANT SIGN MON MEDIAL LA
+ {0x1061, 0x1061, prN}, // Lo MYANMAR LETTER SGAW KAREN SHA
+ {0x1062, 0x1064, prN}, // Mc [3] MYANMAR VOWEL SIGN SGAW KAREN EU..MYANMAR TONE MARK SGAW KAREN KE PHO
+ {0x1065, 0x1066, prN}, // Lo [2] MYANMAR LETTER WESTERN PWO KAREN THA..MYANMAR LETTER WESTERN PWO KAREN PWA
+ {0x1067, 0x106D, prN}, // Mc [7] MYANMAR VOWEL SIGN WESTERN PWO KAREN EU..MYANMAR SIGN WESTERN PWO KAREN TONE-5
+ {0x106E, 0x1070, prN}, // Lo [3] MYANMAR LETTER EASTERN PWO KAREN NNA..MYANMAR LETTER EASTERN PWO KAREN GHWA
+ {0x1071, 0x1074, prN}, // Mn [4] MYANMAR VOWEL SIGN GEBA KAREN I..MYANMAR VOWEL SIGN KAYAH EE
+ {0x1075, 0x1081, prN}, // Lo [13] MYANMAR LETTER SHAN KA..MYANMAR LETTER SHAN HA
+ {0x1082, 0x1082, prN}, // Mn MYANMAR CONSONANT SIGN SHAN MEDIAL WA
+ {0x1083, 0x1084, prN}, // Mc [2] MYANMAR VOWEL SIGN SHAN AA..MYANMAR VOWEL SIGN SHAN E
+ {0x1085, 0x1086, prN}, // Mn [2] MYANMAR VOWEL SIGN SHAN E ABOVE..MYANMAR VOWEL SIGN SHAN FINAL Y
+ {0x1087, 0x108C, prN}, // Mc [6] MYANMAR SIGN SHAN TONE-2..MYANMAR SIGN SHAN COUNCIL TONE-3
+ {0x108D, 0x108D, prN}, // Mn MYANMAR SIGN SHAN COUNCIL EMPHATIC TONE
+ {0x108E, 0x108E, prN}, // Lo MYANMAR LETTER RUMAI PALAUNG FA
+ {0x108F, 0x108F, prN}, // Mc MYANMAR SIGN RUMAI PALAUNG TONE-5
+ {0x1090, 0x1099, prN}, // Nd [10] MYANMAR SHAN DIGIT ZERO..MYANMAR SHAN DIGIT NINE
+ {0x109A, 0x109C, prN}, // Mc [3] MYANMAR SIGN KHAMTI TONE-1..MYANMAR VOWEL SIGN AITON A
+ {0x109D, 0x109D, prN}, // Mn MYANMAR VOWEL SIGN AITON AI
+ {0x109E, 0x109F, prN}, // So [2] MYANMAR SYMBOL SHAN ONE..MYANMAR SYMBOL SHAN EXCLAMATION
+ {0x10A0, 0x10C5, prN}, // Lu [38] GEORGIAN CAPITAL LETTER AN..GEORGIAN CAPITAL LETTER HOE
+ {0x10C7, 0x10C7, prN}, // Lu GEORGIAN CAPITAL LETTER YN
+ {0x10CD, 0x10CD, prN}, // Lu GEORGIAN CAPITAL LETTER AEN
+ {0x10D0, 0x10FA, prN}, // Ll [43] GEORGIAN LETTER AN..GEORGIAN LETTER AIN
+ {0x10FB, 0x10FB, prN}, // Po GEORGIAN PARAGRAPH SEPARATOR
+ {0x10FC, 0x10FC, prN}, // Lm MODIFIER LETTER GEORGIAN NAR
+ {0x10FD, 0x10FF, prN}, // Ll [3] GEORGIAN LETTER AEN..GEORGIAN LETTER LABIAL SIGN
+ {0x1100, 0x115F, prW}, // Lo [96] HANGUL CHOSEONG KIYEOK..HANGUL CHOSEONG FILLER
+ {0x1160, 0x11FF, prN}, // Lo [160] HANGUL JUNGSEONG FILLER..HANGUL JONGSEONG SSANGNIEUN
+ {0x1200, 0x1248, prN}, // Lo [73] ETHIOPIC SYLLABLE HA..ETHIOPIC SYLLABLE QWA
+ {0x124A, 0x124D, prN}, // Lo [4] ETHIOPIC SYLLABLE QWI..ETHIOPIC SYLLABLE QWE
+ {0x1250, 0x1256, prN}, // Lo [7] ETHIOPIC SYLLABLE QHA..ETHIOPIC SYLLABLE QHO
+ {0x1258, 0x1258, prN}, // Lo ETHIOPIC SYLLABLE QHWA
+ {0x125A, 0x125D, prN}, // Lo [4] ETHIOPIC SYLLABLE QHWI..ETHIOPIC SYLLABLE QHWE
+ {0x1260, 0x1288, prN}, // Lo [41] ETHIOPIC SYLLABLE BA..ETHIOPIC SYLLABLE XWA
+ {0x128A, 0x128D, prN}, // Lo [4] ETHIOPIC SYLLABLE XWI..ETHIOPIC SYLLABLE XWE
+ {0x1290, 0x12B0, prN}, // Lo [33] ETHIOPIC SYLLABLE NA..ETHIOPIC SYLLABLE KWA
+ {0x12B2, 0x12B5, prN}, // Lo [4] ETHIOPIC SYLLABLE KWI..ETHIOPIC SYLLABLE KWE
+ {0x12B8, 0x12BE, prN}, // Lo [7] ETHIOPIC SYLLABLE KXA..ETHIOPIC SYLLABLE KXO
+ {0x12C0, 0x12C0, prN}, // Lo ETHIOPIC SYLLABLE KXWA
+ {0x12C2, 0x12C5, prN}, // Lo [4] ETHIOPIC SYLLABLE KXWI..ETHIOPIC SYLLABLE KXWE
+ {0x12C8, 0x12D6, prN}, // Lo [15] ETHIOPIC SYLLABLE WA..ETHIOPIC SYLLABLE PHARYNGEAL O
+ {0x12D8, 0x1310, prN}, // Lo [57] ETHIOPIC SYLLABLE ZA..ETHIOPIC SYLLABLE GWA
+ {0x1312, 0x1315, prN}, // Lo [4] ETHIOPIC SYLLABLE GWI..ETHIOPIC SYLLABLE GWE
+ {0x1318, 0x135A, prN}, // Lo [67] ETHIOPIC SYLLABLE GGA..ETHIOPIC SYLLABLE FYA
+ {0x135D, 0x135F, prN}, // Mn [3] ETHIOPIC COMBINING GEMINATION AND VOWEL LENGTH MARK..ETHIOPIC COMBINING GEMINATION MARK
+ {0x1360, 0x1368, prN}, // Po [9] ETHIOPIC SECTION MARK..ETHIOPIC PARAGRAPH SEPARATOR
+ {0x1369, 0x137C, prN}, // No [20] ETHIOPIC DIGIT ONE..ETHIOPIC NUMBER TEN THOUSAND
+ {0x1380, 0x138F, prN}, // Lo [16] ETHIOPIC SYLLABLE SEBATBEIT MWA..ETHIOPIC SYLLABLE PWE
+ {0x1390, 0x1399, prN}, // So [10] ETHIOPIC TONAL MARK YIZET..ETHIOPIC TONAL MARK KURT
+ {0x13A0, 0x13F5, prN}, // Lu [86] CHEROKEE LETTER A..CHEROKEE LETTER MV
+ {0x13F8, 0x13FD, prN}, // Ll [6] CHEROKEE SMALL LETTER YE..CHEROKEE SMALL LETTER MV
+ {0x1400, 0x1400, prN}, // Pd CANADIAN SYLLABICS HYPHEN
+ {0x1401, 0x166C, prN}, // Lo [620] CANADIAN SYLLABICS E..CANADIAN SYLLABICS CARRIER TTSA
+ {0x166D, 0x166D, prN}, // So CANADIAN SYLLABICS CHI SIGN
+ {0x166E, 0x166E, prN}, // Po CANADIAN SYLLABICS FULL STOP
+ {0x166F, 0x167F, prN}, // Lo [17] CANADIAN SYLLABICS QAI..CANADIAN SYLLABICS BLACKFOOT W
+ {0x1680, 0x1680, prN}, // Zs OGHAM SPACE MARK
+ {0x1681, 0x169A, prN}, // Lo [26] OGHAM LETTER BEITH..OGHAM LETTER PEITH
+ {0x169B, 0x169B, prN}, // Ps OGHAM FEATHER MARK
+ {0x169C, 0x169C, prN}, // Pe OGHAM REVERSED FEATHER MARK
+ {0x16A0, 0x16EA, prN}, // Lo [75] RUNIC LETTER FEHU FEOH FE F..RUNIC LETTER X
+ {0x16EB, 0x16ED, prN}, // Po [3] RUNIC SINGLE PUNCTUATION..RUNIC CROSS PUNCTUATION
+ {0x16EE, 0x16F0, prN}, // Nl [3] RUNIC ARLAUG SYMBOL..RUNIC BELGTHOR SYMBOL
+ {0x16F1, 0x16F8, prN}, // Lo [8] RUNIC LETTER K..RUNIC LETTER FRANKS CASKET AESC
+ {0x1700, 0x1711, prN}, // Lo [18] TAGALOG LETTER A..TAGALOG LETTER HA
+ {0x1712, 0x1714, prN}, // Mn [3] TAGALOG VOWEL SIGN I..TAGALOG SIGN VIRAMA
+ {0x1715, 0x1715, prN}, // Mc TAGALOG SIGN PAMUDPOD
+ {0x171F, 0x171F, prN}, // Lo TAGALOG LETTER ARCHAIC RA
+ {0x1720, 0x1731, prN}, // Lo [18] HANUNOO LETTER A..HANUNOO LETTER HA
+ {0x1732, 0x1733, prN}, // Mn [2] HANUNOO VOWEL SIGN I..HANUNOO VOWEL SIGN U
+ {0x1734, 0x1734, prN}, // Mc HANUNOO SIGN PAMUDPOD
+ {0x1735, 0x1736, prN}, // Po [2] PHILIPPINE SINGLE PUNCTUATION..PHILIPPINE DOUBLE PUNCTUATION
+ {0x1740, 0x1751, prN}, // Lo [18] BUHID LETTER A..BUHID LETTER HA
+ {0x1752, 0x1753, prN}, // Mn [2] BUHID VOWEL SIGN I..BUHID VOWEL SIGN U
+ {0x1760, 0x176C, prN}, // Lo [13] TAGBANWA LETTER A..TAGBANWA LETTER YA
+ {0x176E, 0x1770, prN}, // Lo [3] TAGBANWA LETTER LA..TAGBANWA LETTER SA
+ {0x1772, 0x1773, prN}, // Mn [2] TAGBANWA VOWEL SIGN I..TAGBANWA VOWEL SIGN U
+ {0x1780, 0x17B3, prN}, // Lo [52] KHMER LETTER KA..KHMER INDEPENDENT VOWEL QAU
+ {0x17B4, 0x17B5, prN}, // Mn [2] KHMER VOWEL INHERENT AQ..KHMER VOWEL INHERENT AA
+ {0x17B6, 0x17B6, prN}, // Mc KHMER VOWEL SIGN AA
+ {0x17B7, 0x17BD, prN}, // Mn [7] KHMER VOWEL SIGN I..KHMER VOWEL SIGN UA
+ {0x17BE, 0x17C5, prN}, // Mc [8] KHMER VOWEL SIGN OE..KHMER VOWEL SIGN AU
+ {0x17C6, 0x17C6, prN}, // Mn KHMER SIGN NIKAHIT
+ {0x17C7, 0x17C8, prN}, // Mc [2] KHMER SIGN REAHMUK..KHMER SIGN YUUKALEAPINTU
+ {0x17C9, 0x17D3, prN}, // Mn [11] KHMER SIGN MUUSIKATOAN..KHMER SIGN BATHAMASAT
+ {0x17D4, 0x17D6, prN}, // Po [3] KHMER SIGN KHAN..KHMER SIGN CAMNUC PII KUUH
+ {0x17D7, 0x17D7, prN}, // Lm KHMER SIGN LEK TOO
+ {0x17D8, 0x17DA, prN}, // Po [3] KHMER SIGN BEYYAL..KHMER SIGN KOOMUUT
+ {0x17DB, 0x17DB, prN}, // Sc KHMER CURRENCY SYMBOL RIEL
+ {0x17DC, 0x17DC, prN}, // Lo KHMER SIGN AVAKRAHASANYA
+ {0x17DD, 0x17DD, prN}, // Mn KHMER SIGN ATTHACAN
+ {0x17E0, 0x17E9, prN}, // Nd [10] KHMER DIGIT ZERO..KHMER DIGIT NINE
+ {0x17F0, 0x17F9, prN}, // No [10] KHMER SYMBOL LEK ATTAK SON..KHMER SYMBOL LEK ATTAK PRAM-BUON
+ {0x1800, 0x1805, prN}, // Po [6] MONGOLIAN BIRGA..MONGOLIAN FOUR DOTS
+ {0x1806, 0x1806, prN}, // Pd MONGOLIAN TODO SOFT HYPHEN
+ {0x1807, 0x180A, prN}, // Po [4] MONGOLIAN SIBE SYLLABLE BOUNDARY MARKER..MONGOLIAN NIRUGU
+ {0x180B, 0x180D, prN}, // Mn [3] MONGOLIAN FREE VARIATION SELECTOR ONE..MONGOLIAN FREE VARIATION SELECTOR THREE
+ {0x180E, 0x180E, prN}, // Cf MONGOLIAN VOWEL SEPARATOR
+ {0x180F, 0x180F, prN}, // Mn MONGOLIAN FREE VARIATION SELECTOR FOUR
+ {0x1810, 0x1819, prN}, // Nd [10] MONGOLIAN DIGIT ZERO..MONGOLIAN DIGIT NINE
+ {0x1820, 0x1842, prN}, // Lo [35] MONGOLIAN LETTER A..MONGOLIAN LETTER CHI
+ {0x1843, 0x1843, prN}, // Lm MONGOLIAN LETTER TODO LONG VOWEL SIGN
+ {0x1844, 0x1878, prN}, // Lo [53] MONGOLIAN LETTER TODO E..MONGOLIAN LETTER CHA WITH TWO DOTS
+ {0x1880, 0x1884, prN}, // Lo [5] MONGOLIAN LETTER ALI GALI ANUSVARA ONE..MONGOLIAN LETTER ALI GALI INVERTED UBADAMA
+ {0x1885, 0x1886, prN}, // Mn [2] MONGOLIAN LETTER ALI GALI BALUDA..MONGOLIAN LETTER ALI GALI THREE BALUDA
+ {0x1887, 0x18A8, prN}, // Lo [34] MONGOLIAN LETTER ALI GALI A..MONGOLIAN LETTER MANCHU ALI GALI BHA
+ {0x18A9, 0x18A9, prN}, // Mn MONGOLIAN LETTER ALI GALI DAGALGA
+ {0x18AA, 0x18AA, prN}, // Lo MONGOLIAN LETTER MANCHU ALI GALI LHA
+ {0x18B0, 0x18F5, prN}, // Lo [70] CANADIAN SYLLABICS OY..CANADIAN SYLLABICS CARRIER DENTAL S
+ {0x1900, 0x191E, prN}, // Lo [31] LIMBU VOWEL-CARRIER LETTER..LIMBU LETTER TRA
+ {0x1920, 0x1922, prN}, // Mn [3] LIMBU VOWEL SIGN A..LIMBU VOWEL SIGN U
+ {0x1923, 0x1926, prN}, // Mc [4] LIMBU VOWEL SIGN EE..LIMBU VOWEL SIGN AU
+ {0x1927, 0x1928, prN}, // Mn [2] LIMBU VOWEL SIGN E..LIMBU VOWEL SIGN O
+ {0x1929, 0x192B, prN}, // Mc [3] LIMBU SUBJOINED LETTER YA..LIMBU SUBJOINED LETTER WA
+ {0x1930, 0x1931, prN}, // Mc [2] LIMBU SMALL LETTER KA..LIMBU SMALL LETTER NGA
+ {0x1932, 0x1932, prN}, // Mn LIMBU SMALL LETTER ANUSVARA
+ {0x1933, 0x1938, prN}, // Mc [6] LIMBU SMALL LETTER TA..LIMBU SMALL LETTER LA
+ {0x1939, 0x193B, prN}, // Mn [3] LIMBU SIGN MUKPHRENG..LIMBU SIGN SA-I
+ {0x1940, 0x1940, prN}, // So LIMBU SIGN LOO
+ {0x1944, 0x1945, prN}, // Po [2] LIMBU EXCLAMATION MARK..LIMBU QUESTION MARK
+ {0x1946, 0x194F, prN}, // Nd [10] LIMBU DIGIT ZERO..LIMBU DIGIT NINE
+ {0x1950, 0x196D, prN}, // Lo [30] TAI LE LETTER KA..TAI LE LETTER AI
+ {0x1970, 0x1974, prN}, // Lo [5] TAI LE LETTER TONE-2..TAI LE LETTER TONE-6
+ {0x1980, 0x19AB, prN}, // Lo [44] NEW TAI LUE LETTER HIGH QA..NEW TAI LUE LETTER LOW SUA
+ {0x19B0, 0x19C9, prN}, // Lo [26] NEW TAI LUE VOWEL SIGN VOWEL SHORTENER..NEW TAI LUE TONE MARK-2
+ {0x19D0, 0x19D9, prN}, // Nd [10] NEW TAI LUE DIGIT ZERO..NEW TAI LUE DIGIT NINE
+ {0x19DA, 0x19DA, prN}, // No NEW TAI LUE THAM DIGIT ONE
+ {0x19DE, 0x19DF, prN}, // So [2] NEW TAI LUE SIGN LAE..NEW TAI LUE SIGN LAEV
+ {0x19E0, 0x19FF, prN}, // So [32] KHMER SYMBOL PATHAMASAT..KHMER SYMBOL DAP-PRAM ROC
+ {0x1A00, 0x1A16, prN}, // Lo [23] BUGINESE LETTER KA..BUGINESE LETTER HA
+ {0x1A17, 0x1A18, prN}, // Mn [2] BUGINESE VOWEL SIGN I..BUGINESE VOWEL SIGN U
+ {0x1A19, 0x1A1A, prN}, // Mc [2] BUGINESE VOWEL SIGN E..BUGINESE VOWEL SIGN O
+ {0x1A1B, 0x1A1B, prN}, // Mn BUGINESE VOWEL SIGN AE
+ {0x1A1E, 0x1A1F, prN}, // Po [2] BUGINESE PALLAWA..BUGINESE END OF SECTION
+ {0x1A20, 0x1A54, prN}, // Lo [53] TAI THAM LETTER HIGH KA..TAI THAM LETTER GREAT SA
+ {0x1A55, 0x1A55, prN}, // Mc TAI THAM CONSONANT SIGN MEDIAL RA
+ {0x1A56, 0x1A56, prN}, // Mn TAI THAM CONSONANT SIGN MEDIAL LA
+ {0x1A57, 0x1A57, prN}, // Mc TAI THAM CONSONANT SIGN LA TANG LAI
+ {0x1A58, 0x1A5E, prN}, // Mn [7] TAI THAM SIGN MAI KANG LAI..TAI THAM CONSONANT SIGN SA
+ {0x1A60, 0x1A60, prN}, // Mn TAI THAM SIGN SAKOT
+ {0x1A61, 0x1A61, prN}, // Mc TAI THAM VOWEL SIGN A
+ {0x1A62, 0x1A62, prN}, // Mn TAI THAM VOWEL SIGN MAI SAT
+ {0x1A63, 0x1A64, prN}, // Mc [2] TAI THAM VOWEL SIGN AA..TAI THAM VOWEL SIGN TALL AA
+ {0x1A65, 0x1A6C, prN}, // Mn [8] TAI THAM VOWEL SIGN I..TAI THAM VOWEL SIGN OA BELOW
+ {0x1A6D, 0x1A72, prN}, // Mc [6] TAI THAM VOWEL SIGN OY..TAI THAM VOWEL SIGN THAM AI
+ {0x1A73, 0x1A7C, prN}, // Mn [10] TAI THAM VOWEL SIGN OA ABOVE..TAI THAM SIGN KHUEN-LUE KARAN
+ {0x1A7F, 0x1A7F, prN}, // Mn TAI THAM COMBINING CRYPTOGRAMMIC DOT
+ {0x1A80, 0x1A89, prN}, // Nd [10] TAI THAM HORA DIGIT ZERO..TAI THAM HORA DIGIT NINE
+ {0x1A90, 0x1A99, prN}, // Nd [10] TAI THAM THAM DIGIT ZERO..TAI THAM THAM DIGIT NINE
+ {0x1AA0, 0x1AA6, prN}, // Po [7] TAI THAM SIGN WIANG..TAI THAM SIGN REVERSED ROTATED RANA
+ {0x1AA7, 0x1AA7, prN}, // Lm TAI THAM SIGN MAI YAMOK
+ {0x1AA8, 0x1AAD, prN}, // Po [6] TAI THAM SIGN KAAN..TAI THAM SIGN CAANG
+ {0x1AB0, 0x1ABD, prN}, // Mn [14] COMBINING DOUBLED CIRCUMFLEX ACCENT..COMBINING PARENTHESES BELOW
+ {0x1ABE, 0x1ABE, prN}, // Me COMBINING PARENTHESES OVERLAY
+ {0x1ABF, 0x1ACE, prN}, // Mn [16] COMBINING LATIN SMALL LETTER W BELOW..COMBINING LATIN SMALL LETTER INSULAR T
+ {0x1B00, 0x1B03, prN}, // Mn [4] BALINESE SIGN ULU RICEM..BALINESE SIGN SURANG
+ {0x1B04, 0x1B04, prN}, // Mc BALINESE SIGN BISAH
+ {0x1B05, 0x1B33, prN}, // Lo [47] BALINESE LETTER AKARA..BALINESE LETTER HA
+ {0x1B34, 0x1B34, prN}, // Mn BALINESE SIGN REREKAN
+ {0x1B35, 0x1B35, prN}, // Mc BALINESE VOWEL SIGN TEDUNG
+ {0x1B36, 0x1B3A, prN}, // Mn [5] BALINESE VOWEL SIGN ULU..BALINESE VOWEL SIGN RA REPA
+ {0x1B3B, 0x1B3B, prN}, // Mc BALINESE VOWEL SIGN RA REPA TEDUNG
+ {0x1B3C, 0x1B3C, prN}, // Mn BALINESE VOWEL SIGN LA LENGA
+ {0x1B3D, 0x1B41, prN}, // Mc [5] BALINESE VOWEL SIGN LA LENGA TEDUNG..BALINESE VOWEL SIGN TALING REPA TEDUNG
+ {0x1B42, 0x1B42, prN}, // Mn BALINESE VOWEL SIGN PEPET
+ {0x1B43, 0x1B44, prN}, // Mc [2] BALINESE VOWEL SIGN PEPET TEDUNG..BALINESE ADEG ADEG
+ {0x1B45, 0x1B4C, prN}, // Lo [8] BALINESE LETTER KAF SASAK..BALINESE LETTER ARCHAIC JNYA
+ {0x1B50, 0x1B59, prN}, // Nd [10] BALINESE DIGIT ZERO..BALINESE DIGIT NINE
+ {0x1B5A, 0x1B60, prN}, // Po [7] BALINESE PANTI..BALINESE PAMENENG
+ {0x1B61, 0x1B6A, prN}, // So [10] BALINESE MUSICAL SYMBOL DONG..BALINESE MUSICAL SYMBOL DANG GEDE
+ {0x1B6B, 0x1B73, prN}, // Mn [9] BALINESE MUSICAL SYMBOL COMBINING TEGEH..BALINESE MUSICAL SYMBOL COMBINING GONG
+ {0x1B74, 0x1B7C, prN}, // So [9] BALINESE MUSICAL SYMBOL RIGHT-HAND OPEN DUG..BALINESE MUSICAL SYMBOL LEFT-HAND OPEN PING
+ {0x1B7D, 0x1B7E, prN}, // Po [2] BALINESE PANTI LANTANG..BALINESE PAMADA LANTANG
+ {0x1B80, 0x1B81, prN}, // Mn [2] SUNDANESE SIGN PANYECEK..SUNDANESE SIGN PANGLAYAR
+ {0x1B82, 0x1B82, prN}, // Mc SUNDANESE SIGN PANGWISAD
+ {0x1B83, 0x1BA0, prN}, // Lo [30] SUNDANESE LETTER A..SUNDANESE LETTER HA
+ {0x1BA1, 0x1BA1, prN}, // Mc SUNDANESE CONSONANT SIGN PAMINGKAL
+ {0x1BA2, 0x1BA5, prN}, // Mn [4] SUNDANESE CONSONANT SIGN PANYAKRA..SUNDANESE VOWEL SIGN PANYUKU
+ {0x1BA6, 0x1BA7, prN}, // Mc [2] SUNDANESE VOWEL SIGN PANAELAENG..SUNDANESE VOWEL SIGN PANOLONG
+ {0x1BA8, 0x1BA9, prN}, // Mn [2] SUNDANESE VOWEL SIGN PAMEPET..SUNDANESE VOWEL SIGN PANEULEUNG
+ {0x1BAA, 0x1BAA, prN}, // Mc SUNDANESE SIGN PAMAAEH
+ {0x1BAB, 0x1BAD, prN}, // Mn [3] SUNDANESE SIGN VIRAMA..SUNDANESE CONSONANT SIGN PASANGAN WA
+ {0x1BAE, 0x1BAF, prN}, // Lo [2] SUNDANESE LETTER KHA..SUNDANESE LETTER SYA
+ {0x1BB0, 0x1BB9, prN}, // Nd [10] SUNDANESE DIGIT ZERO..SUNDANESE DIGIT NINE
+ {0x1BBA, 0x1BBF, prN}, // Lo [6] SUNDANESE AVAGRAHA..SUNDANESE LETTER FINAL M
+ {0x1BC0, 0x1BE5, prN}, // Lo [38] BATAK LETTER A..BATAK LETTER U
+ {0x1BE6, 0x1BE6, prN}, // Mn BATAK SIGN TOMPI
+ {0x1BE7, 0x1BE7, prN}, // Mc BATAK VOWEL SIGN E
+ {0x1BE8, 0x1BE9, prN}, // Mn [2] BATAK VOWEL SIGN PAKPAK E..BATAK VOWEL SIGN EE
+ {0x1BEA, 0x1BEC, prN}, // Mc [3] BATAK VOWEL SIGN I..BATAK VOWEL SIGN O
+ {0x1BED, 0x1BED, prN}, // Mn BATAK VOWEL SIGN KARO O
+ {0x1BEE, 0x1BEE, prN}, // Mc BATAK VOWEL SIGN U
+ {0x1BEF, 0x1BF1, prN}, // Mn [3] BATAK VOWEL SIGN U FOR SIMALUNGUN SA..BATAK CONSONANT SIGN H
+ {0x1BF2, 0x1BF3, prN}, // Mc [2] BATAK PANGOLAT..BATAK PANONGONAN
+ {0x1BFC, 0x1BFF, prN}, // Po [4] BATAK SYMBOL BINDU NA METEK..BATAK SYMBOL BINDU PANGOLAT
+ {0x1C00, 0x1C23, prN}, // Lo [36] LEPCHA LETTER KA..LEPCHA LETTER A
+ {0x1C24, 0x1C2B, prN}, // Mc [8] LEPCHA SUBJOINED LETTER YA..LEPCHA VOWEL SIGN UU
+ {0x1C2C, 0x1C33, prN}, // Mn [8] LEPCHA VOWEL SIGN E..LEPCHA CONSONANT SIGN T
+ {0x1C34, 0x1C35, prN}, // Mc [2] LEPCHA CONSONANT SIGN NYIN-DO..LEPCHA CONSONANT SIGN KANG
+ {0x1C36, 0x1C37, prN}, // Mn [2] LEPCHA SIGN RAN..LEPCHA SIGN NUKTA
+ {0x1C3B, 0x1C3F, prN}, // Po [5] LEPCHA PUNCTUATION TA-ROL..LEPCHA PUNCTUATION TSHOOK
+ {0x1C40, 0x1C49, prN}, // Nd [10] LEPCHA DIGIT ZERO..LEPCHA DIGIT NINE
+ {0x1C4D, 0x1C4F, prN}, // Lo [3] LEPCHA LETTER TTA..LEPCHA LETTER DDA
+ {0x1C50, 0x1C59, prN}, // Nd [10] OL CHIKI DIGIT ZERO..OL CHIKI DIGIT NINE
+ {0x1C5A, 0x1C77, prN}, // Lo [30] OL CHIKI LETTER LA..OL CHIKI LETTER OH
+ {0x1C78, 0x1C7D, prN}, // Lm [6] OL CHIKI MU TTUDDAG..OL CHIKI AHAD
+ {0x1C7E, 0x1C7F, prN}, // Po [2] OL CHIKI PUNCTUATION MUCAAD..OL CHIKI PUNCTUATION DOUBLE MUCAAD
+ {0x1C80, 0x1C88, prN}, // Ll [9] CYRILLIC SMALL LETTER ROUNDED VE..CYRILLIC SMALL LETTER UNBLENDED UK
+ {0x1C90, 0x1CBA, prN}, // Lu [43] GEORGIAN MTAVRULI CAPITAL LETTER AN..GEORGIAN MTAVRULI CAPITAL LETTER AIN
+ {0x1CBD, 0x1CBF, prN}, // Lu [3] GEORGIAN MTAVRULI CAPITAL LETTER AEN..GEORGIAN MTAVRULI CAPITAL LETTER LABIAL SIGN
+ {0x1CC0, 0x1CC7, prN}, // Po [8] SUNDANESE PUNCTUATION BINDU SURYA..SUNDANESE PUNCTUATION BINDU BA SATANGA
+ {0x1CD0, 0x1CD2, prN}, // Mn [3] VEDIC TONE KARSHANA..VEDIC TONE PRENKHA
+ {0x1CD3, 0x1CD3, prN}, // Po VEDIC SIGN NIHSHVASA
+ {0x1CD4, 0x1CE0, prN}, // Mn [13] VEDIC SIGN YAJURVEDIC MIDLINE SVARITA..VEDIC TONE RIGVEDIC KASHMIRI INDEPENDENT SVARITA
+ {0x1CE1, 0x1CE1, prN}, // Mc VEDIC TONE ATHARVAVEDIC INDEPENDENT SVARITA
+ {0x1CE2, 0x1CE8, prN}, // Mn [7] VEDIC SIGN VISARGA SVARITA..VEDIC SIGN VISARGA ANUDATTA WITH TAIL
+ {0x1CE9, 0x1CEC, prN}, // Lo [4] VEDIC SIGN ANUSVARA ANTARGOMUKHA..VEDIC SIGN ANUSVARA VAMAGOMUKHA WITH TAIL
+ {0x1CED, 0x1CED, prN}, // Mn VEDIC SIGN TIRYAK
+ {0x1CEE, 0x1CF3, prN}, // Lo [6] VEDIC SIGN HEXIFORM LONG ANUSVARA..VEDIC SIGN ROTATED ARDHAVISARGA
+ {0x1CF4, 0x1CF4, prN}, // Mn VEDIC TONE CANDRA ABOVE
+ {0x1CF5, 0x1CF6, prN}, // Lo [2] VEDIC SIGN JIHVAMULIYA..VEDIC SIGN UPADHMANIYA
+ {0x1CF7, 0x1CF7, prN}, // Mc VEDIC SIGN ATIKRAMA
+ {0x1CF8, 0x1CF9, prN}, // Mn [2] VEDIC TONE RING ABOVE..VEDIC TONE DOUBLE RING ABOVE
+ {0x1CFA, 0x1CFA, prN}, // Lo VEDIC SIGN DOUBLE ANUSVARA ANTARGOMUKHA
+ {0x1D00, 0x1D2B, prN}, // Ll [44] LATIN LETTER SMALL CAPITAL A..CYRILLIC LETTER SMALL CAPITAL EL
+ {0x1D2C, 0x1D6A, prN}, // Lm [63] MODIFIER LETTER CAPITAL A..GREEK SUBSCRIPT SMALL LETTER CHI
+ {0x1D6B, 0x1D77, prN}, // Ll [13] LATIN SMALL LETTER UE..LATIN SMALL LETTER TURNED G
+ {0x1D78, 0x1D78, prN}, // Lm MODIFIER LETTER CYRILLIC EN
+ {0x1D79, 0x1D7F, prN}, // Ll [7] LATIN SMALL LETTER INSULAR G..LATIN SMALL LETTER UPSILON WITH STROKE
+ {0x1D80, 0x1D9A, prN}, // Ll [27] LATIN SMALL LETTER B WITH PALATAL HOOK..LATIN SMALL LETTER EZH WITH RETROFLEX HOOK
+ {0x1D9B, 0x1DBF, prN}, // Lm [37] MODIFIER LETTER SMALL TURNED ALPHA..MODIFIER LETTER SMALL THETA
+ {0x1DC0, 0x1DFF, prN}, // Mn [64] COMBINING DOTTED GRAVE ACCENT..COMBINING RIGHT ARROWHEAD AND DOWN ARROWHEAD BELOW
+ {0x1E00, 0x1EFF, prN}, // L& [256] LATIN CAPITAL LETTER A WITH RING BELOW..LATIN SMALL LETTER Y WITH LOOP
+ {0x1F00, 0x1F15, prN}, // L& [22] GREEK SMALL LETTER ALPHA WITH PSILI..GREEK SMALL LETTER EPSILON WITH DASIA AND OXIA
+ {0x1F18, 0x1F1D, prN}, // Lu [6] GREEK CAPITAL LETTER EPSILON WITH PSILI..GREEK CAPITAL LETTER EPSILON WITH DASIA AND OXIA
+ {0x1F20, 0x1F45, prN}, // L& [38] GREEK SMALL LETTER ETA WITH PSILI..GREEK SMALL LETTER OMICRON WITH DASIA AND OXIA
+ {0x1F48, 0x1F4D, prN}, // Lu [6] GREEK CAPITAL LETTER OMICRON WITH PSILI..GREEK CAPITAL LETTER OMICRON WITH DASIA AND OXIA
+ {0x1F50, 0x1F57, prN}, // Ll [8] GREEK SMALL LETTER UPSILON WITH PSILI..GREEK SMALL LETTER UPSILON WITH DASIA AND PERISPOMENI
+ {0x1F59, 0x1F59, prN}, // Lu GREEK CAPITAL LETTER UPSILON WITH DASIA
+ {0x1F5B, 0x1F5B, prN}, // Lu GREEK CAPITAL LETTER UPSILON WITH DASIA AND VARIA
+ {0x1F5D, 0x1F5D, prN}, // Lu GREEK CAPITAL LETTER UPSILON WITH DASIA AND OXIA
+ {0x1F5F, 0x1F7D, prN}, // L& [31] GREEK CAPITAL LETTER UPSILON WITH DASIA AND PERISPOMENI..GREEK SMALL LETTER OMEGA WITH OXIA
+ {0x1F80, 0x1FB4, prN}, // L& [53] GREEK SMALL LETTER ALPHA WITH PSILI AND YPOGEGRAMMENI..GREEK SMALL LETTER ALPHA WITH OXIA AND YPOGEGRAMMENI
+ {0x1FB6, 0x1FBC, prN}, // L& [7] GREEK SMALL LETTER ALPHA WITH PERISPOMENI..GREEK CAPITAL LETTER ALPHA WITH PROSGEGRAMMENI
+ {0x1FBD, 0x1FBD, prN}, // Sk GREEK KORONIS
+ {0x1FBE, 0x1FBE, prN}, // Ll GREEK PROSGEGRAMMENI
+ {0x1FBF, 0x1FC1, prN}, // Sk [3] GREEK PSILI..GREEK DIALYTIKA AND PERISPOMENI
+ {0x1FC2, 0x1FC4, prN}, // Ll [3] GREEK SMALL LETTER ETA WITH VARIA AND YPOGEGRAMMENI..GREEK SMALL LETTER ETA WITH OXIA AND YPOGEGRAMMENI
+ {0x1FC6, 0x1FCC, prN}, // L& [7] GREEK SMALL LETTER ETA WITH PERISPOMENI..GREEK CAPITAL LETTER ETA WITH PROSGEGRAMMENI
+ {0x1FCD, 0x1FCF, prN}, // Sk [3] GREEK PSILI AND VARIA..GREEK PSILI AND PERISPOMENI
+ {0x1FD0, 0x1FD3, prN}, // Ll [4] GREEK SMALL LETTER IOTA WITH VRACHY..GREEK SMALL LETTER IOTA WITH DIALYTIKA AND OXIA
+ {0x1FD6, 0x1FDB, prN}, // L& [6] GREEK SMALL LETTER IOTA WITH PERISPOMENI..GREEK CAPITAL LETTER IOTA WITH OXIA
+ {0x1FDD, 0x1FDF, prN}, // Sk [3] GREEK DASIA AND VARIA..GREEK DASIA AND PERISPOMENI
+ {0x1FE0, 0x1FEC, prN}, // L& [13] GREEK SMALL LETTER UPSILON WITH VRACHY..GREEK CAPITAL LETTER RHO WITH DASIA
+ {0x1FED, 0x1FEF, prN}, // Sk [3] GREEK DIALYTIKA AND VARIA..GREEK VARIA
+ {0x1FF2, 0x1FF4, prN}, // Ll [3] GREEK SMALL LETTER OMEGA WITH VARIA AND YPOGEGRAMMENI..GREEK SMALL LETTER OMEGA WITH OXIA AND YPOGEGRAMMENI
+ {0x1FF6, 0x1FFC, prN}, // L& [7] GREEK SMALL LETTER OMEGA WITH PERISPOMENI..GREEK CAPITAL LETTER OMEGA WITH PROSGEGRAMMENI
+ {0x1FFD, 0x1FFE, prN}, // Sk [2] GREEK OXIA..GREEK DASIA
+ {0x2000, 0x200A, prN}, // Zs [11] EN QUAD..HAIR SPACE
+ {0x200B, 0x200F, prN}, // Cf [5] ZERO WIDTH SPACE..RIGHT-TO-LEFT MARK
+ {0x2010, 0x2010, prA}, // Pd HYPHEN
+ {0x2011, 0x2012, prN}, // Pd [2] NON-BREAKING HYPHEN..FIGURE DASH
+ {0x2013, 0x2015, prA}, // Pd [3] EN DASH..HORIZONTAL BAR
+ {0x2016, 0x2016, prA}, // Po DOUBLE VERTICAL LINE
+ {0x2017, 0x2017, prN}, // Po DOUBLE LOW LINE
+ {0x2018, 0x2018, prA}, // Pi LEFT SINGLE QUOTATION MARK
+ {0x2019, 0x2019, prA}, // Pf RIGHT SINGLE QUOTATION MARK
+ {0x201A, 0x201A, prN}, // Ps SINGLE LOW-9 QUOTATION MARK
+ {0x201B, 0x201B, prN}, // Pi SINGLE HIGH-REVERSED-9 QUOTATION MARK
+ {0x201C, 0x201C, prA}, // Pi LEFT DOUBLE QUOTATION MARK
+ {0x201D, 0x201D, prA}, // Pf RIGHT DOUBLE QUOTATION MARK
+ {0x201E, 0x201E, prN}, // Ps DOUBLE LOW-9 QUOTATION MARK
+ {0x201F, 0x201F, prN}, // Pi DOUBLE HIGH-REVERSED-9 QUOTATION MARK
+ {0x2020, 0x2022, prA}, // Po [3] DAGGER..BULLET
+ {0x2023, 0x2023, prN}, // Po TRIANGULAR BULLET
+ {0x2024, 0x2027, prA}, // Po [4] ONE DOT LEADER..HYPHENATION POINT
+ {0x2028, 0x2028, prN}, // Zl LINE SEPARATOR
+ {0x2029, 0x2029, prN}, // Zp PARAGRAPH SEPARATOR
+ {0x202A, 0x202E, prN}, // Cf [5] LEFT-TO-RIGHT EMBEDDING..RIGHT-TO-LEFT OVERRIDE
+ {0x202F, 0x202F, prN}, // Zs NARROW NO-BREAK SPACE
+ {0x2030, 0x2030, prA}, // Po PER MILLE SIGN
+ {0x2031, 0x2031, prN}, // Po PER TEN THOUSAND SIGN
+ {0x2032, 0x2033, prA}, // Po [2] PRIME..DOUBLE PRIME
+ {0x2034, 0x2034, prN}, // Po TRIPLE PRIME
+ {0x2035, 0x2035, prA}, // Po REVERSED PRIME
+ {0x2036, 0x2038, prN}, // Po [3] REVERSED DOUBLE PRIME..CARET
+ {0x2039, 0x2039, prN}, // Pi SINGLE LEFT-POINTING ANGLE QUOTATION MARK
+ {0x203A, 0x203A, prN}, // Pf SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
+ {0x203B, 0x203B, prA}, // Po REFERENCE MARK
+ {0x203C, 0x203D, prN}, // Po [2] DOUBLE EXCLAMATION MARK..INTERROBANG
+ {0x203E, 0x203E, prA}, // Po OVERLINE
+ {0x203F, 0x2040, prN}, // Pc [2] UNDERTIE..CHARACTER TIE
+ {0x2041, 0x2043, prN}, // Po [3] CARET INSERTION POINT..HYPHEN BULLET
+ {0x2044, 0x2044, prN}, // Sm FRACTION SLASH
+ {0x2045, 0x2045, prN}, // Ps LEFT SQUARE BRACKET WITH QUILL
+ {0x2046, 0x2046, prN}, // Pe RIGHT SQUARE BRACKET WITH QUILL
+ {0x2047, 0x2051, prN}, // Po [11] DOUBLE QUESTION MARK..TWO ASTERISKS ALIGNED VERTICALLY
+ {0x2052, 0x2052, prN}, // Sm COMMERCIAL MINUS SIGN
+ {0x2053, 0x2053, prN}, // Po SWUNG DASH
+ {0x2054, 0x2054, prN}, // Pc INVERTED UNDERTIE
+ {0x2055, 0x205E, prN}, // Po [10] FLOWER PUNCTUATION MARK..VERTICAL FOUR DOTS
+ {0x205F, 0x205F, prN}, // Zs MEDIUM MATHEMATICAL SPACE
+ {0x2060, 0x2064, prN}, // Cf [5] WORD JOINER..INVISIBLE PLUS
+ {0x2066, 0x206F, prN}, // Cf [10] LEFT-TO-RIGHT ISOLATE..NOMINAL DIGIT SHAPES
+ {0x2070, 0x2070, prN}, // No SUPERSCRIPT ZERO
+ {0x2071, 0x2071, prN}, // Lm SUPERSCRIPT LATIN SMALL LETTER I
+ {0x2074, 0x2074, prA}, // No SUPERSCRIPT FOUR
+ {0x2075, 0x2079, prN}, // No [5] SUPERSCRIPT FIVE..SUPERSCRIPT NINE
+ {0x207A, 0x207C, prN}, // Sm [3] SUPERSCRIPT PLUS SIGN..SUPERSCRIPT EQUALS SIGN
+ {0x207D, 0x207D, prN}, // Ps SUPERSCRIPT LEFT PARENTHESIS
+ {0x207E, 0x207E, prN}, // Pe SUPERSCRIPT RIGHT PARENTHESIS
+ {0x207F, 0x207F, prA}, // Lm SUPERSCRIPT LATIN SMALL LETTER N
+ {0x2080, 0x2080, prN}, // No SUBSCRIPT ZERO
+ {0x2081, 0x2084, prA}, // No [4] SUBSCRIPT ONE..SUBSCRIPT FOUR
+ {0x2085, 0x2089, prN}, // No [5] SUBSCRIPT FIVE..SUBSCRIPT NINE
+ {0x208A, 0x208C, prN}, // Sm [3] SUBSCRIPT PLUS SIGN..SUBSCRIPT EQUALS SIGN
+ {0x208D, 0x208D, prN}, // Ps SUBSCRIPT LEFT PARENTHESIS
+ {0x208E, 0x208E, prN}, // Pe SUBSCRIPT RIGHT PARENTHESIS
+ {0x2090, 0x209C, prN}, // Lm [13] LATIN SUBSCRIPT SMALL LETTER A..LATIN SUBSCRIPT SMALL LETTER T
+ {0x20A0, 0x20A8, prN}, // Sc [9] EURO-CURRENCY SIGN..RUPEE SIGN
+ {0x20A9, 0x20A9, prH}, // Sc WON SIGN
+ {0x20AA, 0x20AB, prN}, // Sc [2] NEW SHEQEL SIGN..DONG SIGN
+ {0x20AC, 0x20AC, prA}, // Sc EURO SIGN
+ {0x20AD, 0x20C0, prN}, // Sc [20] KIP SIGN..SOM SIGN
+ {0x20D0, 0x20DC, prN}, // Mn [13] COMBINING LEFT HARPOON ABOVE..COMBINING FOUR DOTS ABOVE
+ {0x20DD, 0x20E0, prN}, // Me [4] COMBINING ENCLOSING CIRCLE..COMBINING ENCLOSING CIRCLE BACKSLASH
+ {0x20E1, 0x20E1, prN}, // Mn COMBINING LEFT RIGHT ARROW ABOVE
+ {0x20E2, 0x20E4, prN}, // Me [3] COMBINING ENCLOSING SCREEN..COMBINING ENCLOSING UPWARD POINTING TRIANGLE
+ {0x20E5, 0x20F0, prN}, // Mn [12] COMBINING REVERSE SOLIDUS OVERLAY..COMBINING ASTERISK ABOVE
+ {0x2100, 0x2101, prN}, // So [2] ACCOUNT OF..ADDRESSED TO THE SUBJECT
+ {0x2102, 0x2102, prN}, // Lu DOUBLE-STRUCK CAPITAL C
+ {0x2103, 0x2103, prA}, // So DEGREE CELSIUS
+ {0x2104, 0x2104, prN}, // So CENTRE LINE SYMBOL
+ {0x2105, 0x2105, prA}, // So CARE OF
+ {0x2106, 0x2106, prN}, // So CADA UNA
+ {0x2107, 0x2107, prN}, // Lu EULER CONSTANT
+ {0x2108, 0x2108, prN}, // So SCRUPLE
+ {0x2109, 0x2109, prA}, // So DEGREE FAHRENHEIT
+ {0x210A, 0x2112, prN}, // L& [9] SCRIPT SMALL G..SCRIPT CAPITAL L
+ {0x2113, 0x2113, prA}, // Ll SCRIPT SMALL L
+ {0x2114, 0x2114, prN}, // So L B BAR SYMBOL
+ {0x2115, 0x2115, prN}, // Lu DOUBLE-STRUCK CAPITAL N
+ {0x2116, 0x2116, prA}, // So NUMERO SIGN
+ {0x2117, 0x2117, prN}, // So SOUND RECORDING COPYRIGHT
+ {0x2118, 0x2118, prN}, // Sm SCRIPT CAPITAL P
+ {0x2119, 0x211D, prN}, // Lu [5] DOUBLE-STRUCK CAPITAL P..DOUBLE-STRUCK CAPITAL R
+ {0x211E, 0x2120, prN}, // So [3] PRESCRIPTION TAKE..SERVICE MARK
+ {0x2121, 0x2122, prA}, // So [2] TELEPHONE SIGN..TRADE MARK SIGN
+ {0x2123, 0x2123, prN}, // So VERSICLE
+ {0x2124, 0x2124, prN}, // Lu DOUBLE-STRUCK CAPITAL Z
+ {0x2125, 0x2125, prN}, // So OUNCE SIGN
+ {0x2126, 0x2126, prA}, // Lu OHM SIGN
+ {0x2127, 0x2127, prN}, // So INVERTED OHM SIGN
+ {0x2128, 0x2128, prN}, // Lu BLACK-LETTER CAPITAL Z
+ {0x2129, 0x2129, prN}, // So TURNED GREEK SMALL LETTER IOTA
+ {0x212A, 0x212A, prN}, // Lu KELVIN SIGN
+ {0x212B, 0x212B, prA}, // Lu ANGSTROM SIGN
+ {0x212C, 0x212D, prN}, // Lu [2] SCRIPT CAPITAL B..BLACK-LETTER CAPITAL C
+ {0x212E, 0x212E, prN}, // So ESTIMATED SYMBOL
+ {0x212F, 0x2134, prN}, // L& [6] SCRIPT SMALL E..SCRIPT SMALL O
+ {0x2135, 0x2138, prN}, // Lo [4] ALEF SYMBOL..DALET SYMBOL
+ {0x2139, 0x2139, prN}, // Ll INFORMATION SOURCE
+ {0x213A, 0x213B, prN}, // So [2] ROTATED CAPITAL Q..FACSIMILE SIGN
+ {0x213C, 0x213F, prN}, // L& [4] DOUBLE-STRUCK SMALL PI..DOUBLE-STRUCK CAPITAL PI
+ {0x2140, 0x2144, prN}, // Sm [5] DOUBLE-STRUCK N-ARY SUMMATION..TURNED SANS-SERIF CAPITAL Y
+ {0x2145, 0x2149, prN}, // L& [5] DOUBLE-STRUCK ITALIC CAPITAL D..DOUBLE-STRUCK ITALIC SMALL J
+ {0x214A, 0x214A, prN}, // So PROPERTY LINE
+ {0x214B, 0x214B, prN}, // Sm TURNED AMPERSAND
+ {0x214C, 0x214D, prN}, // So [2] PER SIGN..AKTIESELSKAB
+ {0x214E, 0x214E, prN}, // Ll TURNED SMALL F
+ {0x214F, 0x214F, prN}, // So SYMBOL FOR SAMARITAN SOURCE
+ {0x2150, 0x2152, prN}, // No [3] VULGAR FRACTION ONE SEVENTH..VULGAR FRACTION ONE TENTH
+ {0x2153, 0x2154, prA}, // No [2] VULGAR FRACTION ONE THIRD..VULGAR FRACTION TWO THIRDS
+ {0x2155, 0x215A, prN}, // No [6] VULGAR FRACTION ONE FIFTH..VULGAR FRACTION FIVE SIXTHS
+ {0x215B, 0x215E, prA}, // No [4] VULGAR FRACTION ONE EIGHTH..VULGAR FRACTION SEVEN EIGHTHS
+ {0x215F, 0x215F, prN}, // No FRACTION NUMERATOR ONE
+ {0x2160, 0x216B, prA}, // Nl [12] ROMAN NUMERAL ONE..ROMAN NUMERAL TWELVE
+ {0x216C, 0x216F, prN}, // Nl [4] ROMAN NUMERAL FIFTY..ROMAN NUMERAL ONE THOUSAND
+ {0x2170, 0x2179, prA}, // Nl [10] SMALL ROMAN NUMERAL ONE..SMALL ROMAN NUMERAL TEN
+ {0x217A, 0x2182, prN}, // Nl [9] SMALL ROMAN NUMERAL ELEVEN..ROMAN NUMERAL TEN THOUSAND
+ {0x2183, 0x2184, prN}, // L& [2] ROMAN NUMERAL REVERSED ONE HUNDRED..LATIN SMALL LETTER REVERSED C
+ {0x2185, 0x2188, prN}, // Nl [4] ROMAN NUMERAL SIX LATE FORM..ROMAN NUMERAL ONE HUNDRED THOUSAND
+ {0x2189, 0x2189, prA}, // No VULGAR FRACTION ZERO THIRDS
+ {0x218A, 0x218B, prN}, // So [2] TURNED DIGIT TWO..TURNED DIGIT THREE
+ {0x2190, 0x2194, prA}, // Sm [5] LEFTWARDS ARROW..LEFT RIGHT ARROW
+ {0x2195, 0x2199, prA}, // So [5] UP DOWN ARROW..SOUTH WEST ARROW
+ {0x219A, 0x219B, prN}, // Sm [2] LEFTWARDS ARROW WITH STROKE..RIGHTWARDS ARROW WITH STROKE
+ {0x219C, 0x219F, prN}, // So [4] LEFTWARDS WAVE ARROW..UPWARDS TWO HEADED ARROW
+ {0x21A0, 0x21A0, prN}, // Sm RIGHTWARDS TWO HEADED ARROW
+ {0x21A1, 0x21A2, prN}, // So [2] DOWNWARDS TWO HEADED ARROW..LEFTWARDS ARROW WITH TAIL
+ {0x21A3, 0x21A3, prN}, // Sm RIGHTWARDS ARROW WITH TAIL
+ {0x21A4, 0x21A5, prN}, // So [2] LEFTWARDS ARROW FROM BAR..UPWARDS ARROW FROM BAR
+ {0x21A6, 0x21A6, prN}, // Sm RIGHTWARDS ARROW FROM BAR
+ {0x21A7, 0x21AD, prN}, // So [7] DOWNWARDS ARROW FROM BAR..LEFT RIGHT WAVE ARROW
+ {0x21AE, 0x21AE, prN}, // Sm LEFT RIGHT ARROW WITH STROKE
+ {0x21AF, 0x21B7, prN}, // So [9] DOWNWARDS ZIGZAG ARROW..CLOCKWISE TOP SEMICIRCLE ARROW
+ {0x21B8, 0x21B9, prA}, // So [2] NORTH WEST ARROW TO LONG BAR..LEFTWARDS ARROW TO BAR OVER RIGHTWARDS ARROW TO BAR
+ {0x21BA, 0x21CD, prN}, // So [20] ANTICLOCKWISE OPEN CIRCLE ARROW..LEFTWARDS DOUBLE ARROW WITH STROKE
+ {0x21CE, 0x21CF, prN}, // Sm [2] LEFT RIGHT DOUBLE ARROW WITH STROKE..RIGHTWARDS DOUBLE ARROW WITH STROKE
+ {0x21D0, 0x21D1, prN}, // So [2] LEFTWARDS DOUBLE ARROW..UPWARDS DOUBLE ARROW
+ {0x21D2, 0x21D2, prA}, // Sm RIGHTWARDS DOUBLE ARROW
+ {0x21D3, 0x21D3, prN}, // So DOWNWARDS DOUBLE ARROW
+ {0x21D4, 0x21D4, prA}, // Sm LEFT RIGHT DOUBLE ARROW
+ {0x21D5, 0x21E6, prN}, // So [18] UP DOWN DOUBLE ARROW..LEFTWARDS WHITE ARROW
+ {0x21E7, 0x21E7, prA}, // So UPWARDS WHITE ARROW
+ {0x21E8, 0x21F3, prN}, // So [12] RIGHTWARDS WHITE ARROW..UP DOWN WHITE ARROW
+ {0x21F4, 0x21FF, prN}, // Sm [12] RIGHT ARROW WITH SMALL CIRCLE..LEFT RIGHT OPEN-HEADED ARROW
+ {0x2200, 0x2200, prA}, // Sm FOR ALL
+ {0x2201, 0x2201, prN}, // Sm COMPLEMENT
+ {0x2202, 0x2203, prA}, // Sm [2] PARTIAL DIFFERENTIAL..THERE EXISTS
+ {0x2204, 0x2206, prN}, // Sm [3] THERE DOES NOT EXIST..INCREMENT
+ {0x2207, 0x2208, prA}, // Sm [2] NABLA..ELEMENT OF
+ {0x2209, 0x220A, prN}, // Sm [2] NOT AN ELEMENT OF..SMALL ELEMENT OF
+ {0x220B, 0x220B, prA}, // Sm CONTAINS AS MEMBER
+ {0x220C, 0x220E, prN}, // Sm [3] DOES NOT CONTAIN AS MEMBER..END OF PROOF
+ {0x220F, 0x220F, prA}, // Sm N-ARY PRODUCT
+ {0x2210, 0x2210, prN}, // Sm N-ARY COPRODUCT
+ {0x2211, 0x2211, prA}, // Sm N-ARY SUMMATION
+ {0x2212, 0x2214, prN}, // Sm [3] MINUS SIGN..DOT PLUS
+ {0x2215, 0x2215, prA}, // Sm DIVISION SLASH
+ {0x2216, 0x2219, prN}, // Sm [4] SET MINUS..BULLET OPERATOR
+ {0x221A, 0x221A, prA}, // Sm SQUARE ROOT
+ {0x221B, 0x221C, prN}, // Sm [2] CUBE ROOT..FOURTH ROOT
+ {0x221D, 0x2220, prA}, // Sm [4] PROPORTIONAL TO..ANGLE
+ {0x2221, 0x2222, prN}, // Sm [2] MEASURED ANGLE..SPHERICAL ANGLE
+ {0x2223, 0x2223, prA}, // Sm DIVIDES
+ {0x2224, 0x2224, prN}, // Sm DOES NOT DIVIDE
+ {0x2225, 0x2225, prA}, // Sm PARALLEL TO
+ {0x2226, 0x2226, prN}, // Sm NOT PARALLEL TO
+ {0x2227, 0x222C, prA}, // Sm [6] LOGICAL AND..DOUBLE INTEGRAL
+ {0x222D, 0x222D, prN}, // Sm TRIPLE INTEGRAL
+ {0x222E, 0x222E, prA}, // Sm CONTOUR INTEGRAL
+ {0x222F, 0x2233, prN}, // Sm [5] SURFACE INTEGRAL..ANTICLOCKWISE CONTOUR INTEGRAL
+ {0x2234, 0x2237, prA}, // Sm [4] THEREFORE..PROPORTION
+ {0x2238, 0x223B, prN}, // Sm [4] DOT MINUS..HOMOTHETIC
+ {0x223C, 0x223D, prA}, // Sm [2] TILDE OPERATOR..REVERSED TILDE
+ {0x223E, 0x2247, prN}, // Sm [10] INVERTED LAZY S..NEITHER APPROXIMATELY NOR ACTUALLY EQUAL TO
+ {0x2248, 0x2248, prA}, // Sm ALMOST EQUAL TO
+ {0x2249, 0x224B, prN}, // Sm [3] NOT ALMOST EQUAL TO..TRIPLE TILDE
+ {0x224C, 0x224C, prA}, // Sm ALL EQUAL TO
+ {0x224D, 0x2251, prN}, // Sm [5] EQUIVALENT TO..GEOMETRICALLY EQUAL TO
+ {0x2252, 0x2252, prA}, // Sm APPROXIMATELY EQUAL TO OR THE IMAGE OF
+ {0x2253, 0x225F, prN}, // Sm [13] IMAGE OF OR APPROXIMATELY EQUAL TO..QUESTIONED EQUAL TO
+ {0x2260, 0x2261, prA}, // Sm [2] NOT EQUAL TO..IDENTICAL TO
+ {0x2262, 0x2263, prN}, // Sm [2] NOT IDENTICAL TO..STRICTLY EQUIVALENT TO
+ {0x2264, 0x2267, prA}, // Sm [4] LESS-THAN OR EQUAL TO..GREATER-THAN OVER EQUAL TO
+ {0x2268, 0x2269, prN}, // Sm [2] LESS-THAN BUT NOT EQUAL TO..GREATER-THAN BUT NOT EQUAL TO
+ {0x226A, 0x226B, prA}, // Sm [2] MUCH LESS-THAN..MUCH GREATER-THAN
+ {0x226C, 0x226D, prN}, // Sm [2] BETWEEN..NOT EQUIVALENT TO
+ {0x226E, 0x226F, prA}, // Sm [2] NOT LESS-THAN..NOT GREATER-THAN
+ {0x2270, 0x2281, prN}, // Sm [18] NEITHER LESS-THAN NOR EQUAL TO..DOES NOT SUCCEED
+ {0x2282, 0x2283, prA}, // Sm [2] SUBSET OF..SUPERSET OF
+ {0x2284, 0x2285, prN}, // Sm [2] NOT A SUBSET OF..NOT A SUPERSET OF
+ {0x2286, 0x2287, prA}, // Sm [2] SUBSET OF OR EQUAL TO..SUPERSET OF OR EQUAL TO
+ {0x2288, 0x2294, prN}, // Sm [13] NEITHER A SUBSET OF NOR EQUAL TO..SQUARE CUP
+ {0x2295, 0x2295, prA}, // Sm CIRCLED PLUS
+ {0x2296, 0x2298, prN}, // Sm [3] CIRCLED MINUS..CIRCLED DIVISION SLASH
+ {0x2299, 0x2299, prA}, // Sm CIRCLED DOT OPERATOR
+ {0x229A, 0x22A4, prN}, // Sm [11] CIRCLED RING OPERATOR..DOWN TACK
+ {0x22A5, 0x22A5, prA}, // Sm UP TACK
+ {0x22A6, 0x22BE, prN}, // Sm [25] ASSERTION..RIGHT ANGLE WITH ARC
+ {0x22BF, 0x22BF, prA}, // Sm RIGHT TRIANGLE
+ {0x22C0, 0x22FF, prN}, // Sm [64] N-ARY LOGICAL AND..Z NOTATION BAG MEMBERSHIP
+ {0x2300, 0x2307, prN}, // So [8] DIAMETER SIGN..WAVY LINE
+ {0x2308, 0x2308, prN}, // Ps LEFT CEILING
+ {0x2309, 0x2309, prN}, // Pe RIGHT CEILING
+ {0x230A, 0x230A, prN}, // Ps LEFT FLOOR
+ {0x230B, 0x230B, prN}, // Pe RIGHT FLOOR
+ {0x230C, 0x2311, prN}, // So [6] BOTTOM RIGHT CROP..SQUARE LOZENGE
+ {0x2312, 0x2312, prA}, // So ARC
+ {0x2313, 0x2319, prN}, // So [7] SEGMENT..TURNED NOT SIGN
+ {0x231A, 0x231B, prW}, // So [2] WATCH..HOURGLASS
+ {0x231C, 0x231F, prN}, // So [4] TOP LEFT CORNER..BOTTOM RIGHT CORNER
+ {0x2320, 0x2321, prN}, // Sm [2] TOP HALF INTEGRAL..BOTTOM HALF INTEGRAL
+ {0x2322, 0x2328, prN}, // So [7] FROWN..KEYBOARD
+ {0x2329, 0x2329, prW}, // Ps LEFT-POINTING ANGLE BRACKET
+ {0x232A, 0x232A, prW}, // Pe RIGHT-POINTING ANGLE BRACKET
+ {0x232B, 0x237B, prN}, // So [81] ERASE TO THE LEFT..NOT CHECK MARK
+ {0x237C, 0x237C, prN}, // Sm RIGHT ANGLE WITH DOWNWARDS ZIGZAG ARROW
+ {0x237D, 0x239A, prN}, // So [30] SHOULDERED OPEN BOX..CLEAR SCREEN SYMBOL
+ {0x239B, 0x23B3, prN}, // Sm [25] LEFT PARENTHESIS UPPER HOOK..SUMMATION BOTTOM
+ {0x23B4, 0x23DB, prN}, // So [40] TOP SQUARE BRACKET..FUSE
+ {0x23DC, 0x23E1, prN}, // Sm [6] TOP PARENTHESIS..BOTTOM TORTOISE SHELL BRACKET
+ {0x23E2, 0x23E8, prN}, // So [7] WHITE TRAPEZIUM..DECIMAL EXPONENT SYMBOL
+ {0x23E9, 0x23EC, prW}, // So [4] BLACK RIGHT-POINTING DOUBLE TRIANGLE..BLACK DOWN-POINTING DOUBLE TRIANGLE
+ {0x23ED, 0x23EF, prN}, // So [3] BLACK RIGHT-POINTING DOUBLE TRIANGLE WITH VERTICAL BAR..BLACK RIGHT-POINTING TRIANGLE WITH DOUBLE VERTICAL BAR
+ {0x23F0, 0x23F0, prW}, // So ALARM CLOCK
+ {0x23F1, 0x23F2, prN}, // So [2] STOPWATCH..TIMER CLOCK
+ {0x23F3, 0x23F3, prW}, // So HOURGLASS WITH FLOWING SAND
+ {0x23F4, 0x23FF, prN}, // So [12] BLACK MEDIUM LEFT-POINTING TRIANGLE..OBSERVER EYE SYMBOL
+ {0x2400, 0x2426, prN}, // So [39] SYMBOL FOR NULL..SYMBOL FOR SUBSTITUTE FORM TWO
+ {0x2440, 0x244A, prN}, // So [11] OCR HOOK..OCR DOUBLE BACKSLASH
+ {0x2460, 0x249B, prA}, // No [60] CIRCLED DIGIT ONE..NUMBER TWENTY FULL STOP
+ {0x249C, 0x24E9, prA}, // So [78] PARENTHESIZED LATIN SMALL LETTER A..CIRCLED LATIN SMALL LETTER Z
+ {0x24EA, 0x24EA, prN}, // No CIRCLED DIGIT ZERO
+ {0x24EB, 0x24FF, prA}, // No [21] NEGATIVE CIRCLED NUMBER ELEVEN..NEGATIVE CIRCLED DIGIT ZERO
+ {0x2500, 0x254B, prA}, // So [76] BOX DRAWINGS LIGHT HORIZONTAL..BOX DRAWINGS HEAVY VERTICAL AND HORIZONTAL
+ {0x254C, 0x254F, prN}, // So [4] BOX DRAWINGS LIGHT DOUBLE DASH HORIZONTAL..BOX DRAWINGS HEAVY DOUBLE DASH VERTICAL
+ {0x2550, 0x2573, prA}, // So [36] BOX DRAWINGS DOUBLE HORIZONTAL..BOX DRAWINGS LIGHT DIAGONAL CROSS
+ {0x2574, 0x257F, prN}, // So [12] BOX DRAWINGS LIGHT LEFT..BOX DRAWINGS HEAVY UP AND LIGHT DOWN
+ {0x2580, 0x258F, prA}, // So [16] UPPER HALF BLOCK..LEFT ONE EIGHTH BLOCK
+ {0x2590, 0x2591, prN}, // So [2] RIGHT HALF BLOCK..LIGHT SHADE
+ {0x2592, 0x2595, prA}, // So [4] MEDIUM SHADE..RIGHT ONE EIGHTH BLOCK
+ {0x2596, 0x259F, prN}, // So [10] QUADRANT LOWER LEFT..QUADRANT UPPER RIGHT AND LOWER LEFT AND LOWER RIGHT
+ {0x25A0, 0x25A1, prA}, // So [2] BLACK SQUARE..WHITE SQUARE
+ {0x25A2, 0x25A2, prN}, // So WHITE SQUARE WITH ROUNDED CORNERS
+ {0x25A3, 0x25A9, prA}, // So [7] WHITE SQUARE CONTAINING BLACK SMALL SQUARE..SQUARE WITH DIAGONAL CROSSHATCH FILL
+ {0x25AA, 0x25B1, prN}, // So [8] BLACK SMALL SQUARE..WHITE PARALLELOGRAM
+ {0x25B2, 0x25B3, prA}, // So [2] BLACK UP-POINTING TRIANGLE..WHITE UP-POINTING TRIANGLE
+ {0x25B4, 0x25B5, prN}, // So [2] BLACK UP-POINTING SMALL TRIANGLE..WHITE UP-POINTING SMALL TRIANGLE
+ {0x25B6, 0x25B6, prA}, // So BLACK RIGHT-POINTING TRIANGLE
+ {0x25B7, 0x25B7, prA}, // Sm WHITE RIGHT-POINTING TRIANGLE
+ {0x25B8, 0x25BB, prN}, // So [4] BLACK RIGHT-POINTING SMALL TRIANGLE..WHITE RIGHT-POINTING POINTER
+ {0x25BC, 0x25BD, prA}, // So [2] BLACK DOWN-POINTING TRIANGLE..WHITE DOWN-POINTING TRIANGLE
+ {0x25BE, 0x25BF, prN}, // So [2] BLACK DOWN-POINTING SMALL TRIANGLE..WHITE DOWN-POINTING SMALL TRIANGLE
+ {0x25C0, 0x25C0, prA}, // So BLACK LEFT-POINTING TRIANGLE
+ {0x25C1, 0x25C1, prA}, // Sm WHITE LEFT-POINTING TRIANGLE
+ {0x25C2, 0x25C5, prN}, // So [4] BLACK LEFT-POINTING SMALL TRIANGLE..WHITE LEFT-POINTING POINTER
+ {0x25C6, 0x25C8, prA}, // So [3] BLACK DIAMOND..WHITE DIAMOND CONTAINING BLACK SMALL DIAMOND
+ {0x25C9, 0x25CA, prN}, // So [2] FISHEYE..LOZENGE
+ {0x25CB, 0x25CB, prA}, // So WHITE CIRCLE
+ {0x25CC, 0x25CD, prN}, // So [2] DOTTED CIRCLE..CIRCLE WITH VERTICAL FILL
+ {0x25CE, 0x25D1, prA}, // So [4] BULLSEYE..CIRCLE WITH RIGHT HALF BLACK
+ {0x25D2, 0x25E1, prN}, // So [16] CIRCLE WITH LOWER HALF BLACK..LOWER HALF CIRCLE
+ {0x25E2, 0x25E5, prA}, // So [4] BLACK LOWER RIGHT TRIANGLE..BLACK UPPER RIGHT TRIANGLE
+ {0x25E6, 0x25EE, prN}, // So [9] WHITE BULLET..UP-POINTING TRIANGLE WITH RIGHT HALF BLACK
+ {0x25EF, 0x25EF, prA}, // So LARGE CIRCLE
+ {0x25F0, 0x25F7, prN}, // So [8] WHITE SQUARE WITH UPPER LEFT QUADRANT..WHITE CIRCLE WITH UPPER RIGHT QUADRANT
+ {0x25F8, 0x25FC, prN}, // Sm [5] UPPER LEFT TRIANGLE..BLACK MEDIUM SQUARE
+ {0x25FD, 0x25FE, prW}, // Sm [2] WHITE MEDIUM SMALL SQUARE..BLACK MEDIUM SMALL SQUARE
+ {0x25FF, 0x25FF, prN}, // Sm LOWER RIGHT TRIANGLE
+ {0x2600, 0x2604, prN}, // So [5] BLACK SUN WITH RAYS..COMET
+ {0x2605, 0x2606, prA}, // So [2] BLACK STAR..WHITE STAR
+ {0x2607, 0x2608, prN}, // So [2] LIGHTNING..THUNDERSTORM
+ {0x2609, 0x2609, prA}, // So SUN
+ {0x260A, 0x260D, prN}, // So [4] ASCENDING NODE..OPPOSITION
+ {0x260E, 0x260F, prA}, // So [2] BLACK TELEPHONE..WHITE TELEPHONE
+ {0x2610, 0x2613, prN}, // So [4] BALLOT BOX..SALTIRE
+ {0x2614, 0x2615, prW}, // So [2] UMBRELLA WITH RAIN DROPS..HOT BEVERAGE
+ {0x2616, 0x261B, prN}, // So [6] WHITE SHOGI PIECE..BLACK RIGHT POINTING INDEX
+ {0x261C, 0x261C, prA}, // So WHITE LEFT POINTING INDEX
+ {0x261D, 0x261D, prN}, // So WHITE UP POINTING INDEX
+ {0x261E, 0x261E, prA}, // So WHITE RIGHT POINTING INDEX
+ {0x261F, 0x263F, prN}, // So [33] WHITE DOWN POINTING INDEX..MERCURY
+ {0x2640, 0x2640, prA}, // So FEMALE SIGN
+ {0x2641, 0x2641, prN}, // So EARTH
+ {0x2642, 0x2642, prA}, // So MALE SIGN
+ {0x2643, 0x2647, prN}, // So [5] JUPITER..PLUTO
+ {0x2648, 0x2653, prW}, // So [12] ARIES..PISCES
+ {0x2654, 0x265F, prN}, // So [12] WHITE CHESS KING..BLACK CHESS PAWN
+ {0x2660, 0x2661, prA}, // So [2] BLACK SPADE SUIT..WHITE HEART SUIT
+ {0x2662, 0x2662, prN}, // So WHITE DIAMOND SUIT
+ {0x2663, 0x2665, prA}, // So [3] BLACK CLUB SUIT..BLACK HEART SUIT
+ {0x2666, 0x2666, prN}, // So BLACK DIAMOND SUIT
+ {0x2667, 0x266A, prA}, // So [4] WHITE CLUB SUIT..EIGHTH NOTE
+ {0x266B, 0x266B, prN}, // So BEAMED EIGHTH NOTES
+ {0x266C, 0x266D, prA}, // So [2] BEAMED SIXTEENTH NOTES..MUSIC FLAT SIGN
+ {0x266E, 0x266E, prN}, // So MUSIC NATURAL SIGN
+ {0x266F, 0x266F, prA}, // Sm MUSIC SHARP SIGN
+ {0x2670, 0x267E, prN}, // So [15] WEST SYRIAC CROSS..PERMANENT PAPER SIGN
+ {0x267F, 0x267F, prW}, // So WHEELCHAIR SYMBOL
+ {0x2680, 0x2692, prN}, // So [19] DIE FACE-1..HAMMER AND PICK
+ {0x2693, 0x2693, prW}, // So ANCHOR
+ {0x2694, 0x269D, prN}, // So [10] CROSSED SWORDS..OUTLINED WHITE STAR
+ {0x269E, 0x269F, prA}, // So [2] THREE LINES CONVERGING RIGHT..THREE LINES CONVERGING LEFT
+ {0x26A0, 0x26A0, prN}, // So WARNING SIGN
+ {0x26A1, 0x26A1, prW}, // So HIGH VOLTAGE SIGN
+ {0x26A2, 0x26A9, prN}, // So [8] DOUBLED FEMALE SIGN..HORIZONTAL MALE WITH STROKE SIGN
+ {0x26AA, 0x26AB, prW}, // So [2] MEDIUM WHITE CIRCLE..MEDIUM BLACK CIRCLE
+ {0x26AC, 0x26BC, prN}, // So [17] MEDIUM SMALL WHITE CIRCLE..SESQUIQUADRATE
+ {0x26BD, 0x26BE, prW}, // So [2] SOCCER BALL..BASEBALL
+ {0x26BF, 0x26BF, prA}, // So SQUARED KEY
+ {0x26C0, 0x26C3, prN}, // So [4] WHITE DRAUGHTS MAN..BLACK DRAUGHTS KING
+ {0x26C4, 0x26C5, prW}, // So [2] SNOWMAN WITHOUT SNOW..SUN BEHIND CLOUD
+ {0x26C6, 0x26CD, prA}, // So [8] RAIN..DISABLED CAR
+ {0x26CE, 0x26CE, prW}, // So OPHIUCHUS
+ {0x26CF, 0x26D3, prA}, // So [5] PICK..CHAINS
+ {0x26D4, 0x26D4, prW}, // So NO ENTRY
+ {0x26D5, 0x26E1, prA}, // So [13] ALTERNATE ONE-WAY LEFT WAY TRAFFIC..RESTRICTED LEFT ENTRY-2
+ {0x26E2, 0x26E2, prN}, // So ASTRONOMICAL SYMBOL FOR URANUS
+ {0x26E3, 0x26E3, prA}, // So HEAVY CIRCLE WITH STROKE AND TWO DOTS ABOVE
+ {0x26E4, 0x26E7, prN}, // So [4] PENTAGRAM..INVERTED PENTAGRAM
+ {0x26E8, 0x26E9, prA}, // So [2] BLACK CROSS ON SHIELD..SHINTO SHRINE
+ {0x26EA, 0x26EA, prW}, // So CHURCH
+ {0x26EB, 0x26F1, prA}, // So [7] CASTLE..UMBRELLA ON GROUND
+ {0x26F2, 0x26F3, prW}, // So [2] FOUNTAIN..FLAG IN HOLE
+ {0x26F4, 0x26F4, prA}, // So FERRY
+ {0x26F5, 0x26F5, prW}, // So SAILBOAT
+ {0x26F6, 0x26F9, prA}, // So [4] SQUARE FOUR CORNERS..PERSON WITH BALL
+ {0x26FA, 0x26FA, prW}, // So TENT
+ {0x26FB, 0x26FC, prA}, // So [2] JAPANESE BANK SYMBOL..HEADSTONE GRAVEYARD SYMBOL
+ {0x26FD, 0x26FD, prW}, // So FUEL PUMP
+ {0x26FE, 0x26FF, prA}, // So [2] CUP ON BLACK SQUARE..WHITE FLAG WITH HORIZONTAL MIDDLE BLACK STRIPE
+ {0x2700, 0x2704, prN}, // So [5] BLACK SAFETY SCISSORS..WHITE SCISSORS
+ {0x2705, 0x2705, prW}, // So WHITE HEAVY CHECK MARK
+ {0x2706, 0x2709, prN}, // So [4] TELEPHONE LOCATION SIGN..ENVELOPE
+ {0x270A, 0x270B, prW}, // So [2] RAISED FIST..RAISED HAND
+ {0x270C, 0x2727, prN}, // So [28] VICTORY HAND..WHITE FOUR POINTED STAR
+ {0x2728, 0x2728, prW}, // So SPARKLES
+ {0x2729, 0x273C, prN}, // So [20] STRESS OUTLINED WHITE STAR..OPEN CENTRE TEARDROP-SPOKED ASTERISK
+ {0x273D, 0x273D, prA}, // So HEAVY TEARDROP-SPOKED ASTERISK
+ {0x273E, 0x274B, prN}, // So [14] SIX PETALLED BLACK AND WHITE FLORETTE..HEAVY EIGHT TEARDROP-SPOKED PROPELLER ASTERISK
+ {0x274C, 0x274C, prW}, // So CROSS MARK
+ {0x274D, 0x274D, prN}, // So SHADOWED WHITE CIRCLE
+ {0x274E, 0x274E, prW}, // So NEGATIVE SQUARED CROSS MARK
+ {0x274F, 0x2752, prN}, // So [4] LOWER RIGHT DROP-SHADOWED WHITE SQUARE..UPPER RIGHT SHADOWED WHITE SQUARE
+ {0x2753, 0x2755, prW}, // So [3] BLACK QUESTION MARK ORNAMENT..WHITE EXCLAMATION MARK ORNAMENT
+ {0x2756, 0x2756, prN}, // So BLACK DIAMOND MINUS WHITE X
+ {0x2757, 0x2757, prW}, // So HEAVY EXCLAMATION MARK SYMBOL
+ {0x2758, 0x2767, prN}, // So [16] LIGHT VERTICAL BAR..ROTATED FLORAL HEART BULLET
+ {0x2768, 0x2768, prN}, // Ps MEDIUM LEFT PARENTHESIS ORNAMENT
+ {0x2769, 0x2769, prN}, // Pe MEDIUM RIGHT PARENTHESIS ORNAMENT
+ {0x276A, 0x276A, prN}, // Ps MEDIUM FLATTENED LEFT PARENTHESIS ORNAMENT
+ {0x276B, 0x276B, prN}, // Pe MEDIUM FLATTENED RIGHT PARENTHESIS ORNAMENT
+ {0x276C, 0x276C, prN}, // Ps MEDIUM LEFT-POINTING ANGLE BRACKET ORNAMENT
+ {0x276D, 0x276D, prN}, // Pe MEDIUM RIGHT-POINTING ANGLE BRACKET ORNAMENT
+ {0x276E, 0x276E, prN}, // Ps HEAVY LEFT-POINTING ANGLE QUOTATION MARK ORNAMENT
+ {0x276F, 0x276F, prN}, // Pe HEAVY RIGHT-POINTING ANGLE QUOTATION MARK ORNAMENT
+ {0x2770, 0x2770, prN}, // Ps HEAVY LEFT-POINTING ANGLE BRACKET ORNAMENT
+ {0x2771, 0x2771, prN}, // Pe HEAVY RIGHT-POINTING ANGLE BRACKET ORNAMENT
+ {0x2772, 0x2772, prN}, // Ps LIGHT LEFT TORTOISE SHELL BRACKET ORNAMENT
+ {0x2773, 0x2773, prN}, // Pe LIGHT RIGHT TORTOISE SHELL BRACKET ORNAMENT
+ {0x2774, 0x2774, prN}, // Ps MEDIUM LEFT CURLY BRACKET ORNAMENT
+ {0x2775, 0x2775, prN}, // Pe MEDIUM RIGHT CURLY BRACKET ORNAMENT
+ {0x2776, 0x277F, prA}, // No [10] DINGBAT NEGATIVE CIRCLED DIGIT ONE..DINGBAT NEGATIVE CIRCLED NUMBER TEN
+ {0x2780, 0x2793, prN}, // No [20] DINGBAT CIRCLED SANS-SERIF DIGIT ONE..DINGBAT NEGATIVE CIRCLED SANS-SERIF NUMBER TEN
+ {0x2794, 0x2794, prN}, // So HEAVY WIDE-HEADED RIGHTWARDS ARROW
+ {0x2795, 0x2797, prW}, // So [3] HEAVY PLUS SIGN..HEAVY DIVISION SIGN
+ {0x2798, 0x27AF, prN}, // So [24] HEAVY SOUTH EAST ARROW..NOTCHED LOWER RIGHT-SHADOWED WHITE RIGHTWARDS ARROW
+ {0x27B0, 0x27B0, prW}, // So CURLY LOOP
+ {0x27B1, 0x27BE, prN}, // So [14] NOTCHED UPPER RIGHT-SHADOWED WHITE RIGHTWARDS ARROW..OPEN-OUTLINED RIGHTWARDS ARROW
+ {0x27BF, 0x27BF, prW}, // So DOUBLE CURLY LOOP
+ {0x27C0, 0x27C4, prN}, // Sm [5] THREE DIMENSIONAL ANGLE..OPEN SUPERSET
+ {0x27C5, 0x27C5, prN}, // Ps LEFT S-SHAPED BAG DELIMITER
+ {0x27C6, 0x27C6, prN}, // Pe RIGHT S-SHAPED BAG DELIMITER
+ {0x27C7, 0x27E5, prN}, // Sm [31] OR WITH DOT INSIDE..WHITE SQUARE WITH RIGHTWARDS TICK
+ {0x27E6, 0x27E6, prNa}, // Ps MATHEMATICAL LEFT WHITE SQUARE BRACKET
+ {0x27E7, 0x27E7, prNa}, // Pe MATHEMATICAL RIGHT WHITE SQUARE BRACKET
+ {0x27E8, 0x27E8, prNa}, // Ps MATHEMATICAL LEFT ANGLE BRACKET
+ {0x27E9, 0x27E9, prNa}, // Pe MATHEMATICAL RIGHT ANGLE BRACKET
+ {0x27EA, 0x27EA, prNa}, // Ps MATHEMATICAL LEFT DOUBLE ANGLE BRACKET
+ {0x27EB, 0x27EB, prNa}, // Pe MATHEMATICAL RIGHT DOUBLE ANGLE BRACKET
+ {0x27EC, 0x27EC, prNa}, // Ps MATHEMATICAL LEFT WHITE TORTOISE SHELL BRACKET
+ {0x27ED, 0x27ED, prNa}, // Pe MATHEMATICAL RIGHT WHITE TORTOISE SHELL BRACKET
+ {0x27EE, 0x27EE, prN}, // Ps MATHEMATICAL LEFT FLATTENED PARENTHESIS
+ {0x27EF, 0x27EF, prN}, // Pe MATHEMATICAL RIGHT FLATTENED PARENTHESIS
+ {0x27F0, 0x27FF, prN}, // Sm [16] UPWARDS QUADRUPLE ARROW..LONG RIGHTWARDS SQUIGGLE ARROW
+ {0x2800, 0x28FF, prN}, // So [256] BRAILLE PATTERN BLANK..BRAILLE PATTERN DOTS-12345678
+ {0x2900, 0x297F, prN}, // Sm [128] RIGHTWARDS TWO-HEADED ARROW WITH VERTICAL STROKE..DOWN FISH TAIL
+ {0x2980, 0x2982, prN}, // Sm [3] TRIPLE VERTICAL BAR DELIMITER..Z NOTATION TYPE COLON
+ {0x2983, 0x2983, prN}, // Ps LEFT WHITE CURLY BRACKET
+ {0x2984, 0x2984, prN}, // Pe RIGHT WHITE CURLY BRACKET
+ {0x2985, 0x2985, prNa}, // Ps LEFT WHITE PARENTHESIS
+ {0x2986, 0x2986, prNa}, // Pe RIGHT WHITE PARENTHESIS
+ {0x2987, 0x2987, prN}, // Ps Z NOTATION LEFT IMAGE BRACKET
+ {0x2988, 0x2988, prN}, // Pe Z NOTATION RIGHT IMAGE BRACKET
+ {0x2989, 0x2989, prN}, // Ps Z NOTATION LEFT BINDING BRACKET
+ {0x298A, 0x298A, prN}, // Pe Z NOTATION RIGHT BINDING BRACKET
+ {0x298B, 0x298B, prN}, // Ps LEFT SQUARE BRACKET WITH UNDERBAR
+ {0x298C, 0x298C, prN}, // Pe RIGHT SQUARE BRACKET WITH UNDERBAR
+ {0x298D, 0x298D, prN}, // Ps LEFT SQUARE BRACKET WITH TICK IN TOP CORNER
+ {0x298E, 0x298E, prN}, // Pe RIGHT SQUARE BRACKET WITH TICK IN BOTTOM CORNER
+ {0x298F, 0x298F, prN}, // Ps LEFT SQUARE BRACKET WITH TICK IN BOTTOM CORNER
+ {0x2990, 0x2990, prN}, // Pe RIGHT SQUARE BRACKET WITH TICK IN TOP CORNER
+ {0x2991, 0x2991, prN}, // Ps LEFT ANGLE BRACKET WITH DOT
+ {0x2992, 0x2992, prN}, // Pe RIGHT ANGLE BRACKET WITH DOT
+ {0x2993, 0x2993, prN}, // Ps LEFT ARC LESS-THAN BRACKET
+ {0x2994, 0x2994, prN}, // Pe RIGHT ARC GREATER-THAN BRACKET
+ {0x2995, 0x2995, prN}, // Ps DOUBLE LEFT ARC GREATER-THAN BRACKET
+ {0x2996, 0x2996, prN}, // Pe DOUBLE RIGHT ARC LESS-THAN BRACKET
+ {0x2997, 0x2997, prN}, // Ps LEFT BLACK TORTOISE SHELL BRACKET
+ {0x2998, 0x2998, prN}, // Pe RIGHT BLACK TORTOISE SHELL BRACKET
+ {0x2999, 0x29D7, prN}, // Sm [63] DOTTED FENCE..BLACK HOURGLASS
+ {0x29D8, 0x29D8, prN}, // Ps LEFT WIGGLY FENCE
+ {0x29D9, 0x29D9, prN}, // Pe RIGHT WIGGLY FENCE
+ {0x29DA, 0x29DA, prN}, // Ps LEFT DOUBLE WIGGLY FENCE
+ {0x29DB, 0x29DB, prN}, // Pe RIGHT DOUBLE WIGGLY FENCE
+ {0x29DC, 0x29FB, prN}, // Sm [32] INCOMPLETE INFINITY..TRIPLE PLUS
+ {0x29FC, 0x29FC, prN}, // Ps LEFT-POINTING CURVED ANGLE BRACKET
+ {0x29FD, 0x29FD, prN}, // Pe RIGHT-POINTING CURVED ANGLE BRACKET
+ {0x29FE, 0x29FF, prN}, // Sm [2] TINY..MINY
+ {0x2A00, 0x2AFF, prN}, // Sm [256] N-ARY CIRCLED DOT OPERATOR..N-ARY WHITE VERTICAL BAR
+ {0x2B00, 0x2B1A, prN}, // So [27] NORTH EAST WHITE ARROW..DOTTED SQUARE
+ {0x2B1B, 0x2B1C, prW}, // So [2] BLACK LARGE SQUARE..WHITE LARGE SQUARE
+ {0x2B1D, 0x2B2F, prN}, // So [19] BLACK VERY SMALL SQUARE..WHITE VERTICAL ELLIPSE
+ {0x2B30, 0x2B44, prN}, // Sm [21] LEFT ARROW WITH SMALL CIRCLE..RIGHTWARDS ARROW THROUGH SUPERSET
+ {0x2B45, 0x2B46, prN}, // So [2] LEFTWARDS QUADRUPLE ARROW..RIGHTWARDS QUADRUPLE ARROW
+ {0x2B47, 0x2B4C, prN}, // Sm [6] REVERSE TILDE OPERATOR ABOVE RIGHTWARDS ARROW..RIGHTWARDS ARROW ABOVE REVERSE TILDE OPERATOR
+ {0x2B4D, 0x2B4F, prN}, // So [3] DOWNWARDS TRIANGLE-HEADED ZIGZAG ARROW..SHORT BACKSLANTED SOUTH ARROW
+ {0x2B50, 0x2B50, prW}, // So WHITE MEDIUM STAR
+ {0x2B51, 0x2B54, prN}, // So [4] BLACK SMALL STAR..WHITE RIGHT-POINTING PENTAGON
+ {0x2B55, 0x2B55, prW}, // So HEAVY LARGE CIRCLE
+ {0x2B56, 0x2B59, prA}, // So [4] HEAVY OVAL WITH OVAL INSIDE..HEAVY CIRCLED SALTIRE
+ {0x2B5A, 0x2B73, prN}, // So [26] SLANTED NORTH ARROW WITH HOOKED HEAD..DOWNWARDS TRIANGLE-HEADED ARROW TO BAR
+ {0x2B76, 0x2B95, prN}, // So [32] NORTH WEST TRIANGLE-HEADED ARROW TO BAR..RIGHTWARDS BLACK ARROW
+ {0x2B97, 0x2BFF, prN}, // So [105] SYMBOL FOR TYPE A ELECTRONICS..HELLSCHREIBER PAUSE SYMBOL
+ {0x2C00, 0x2C5F, prN}, // L& [96] GLAGOLITIC CAPITAL LETTER AZU..GLAGOLITIC SMALL LETTER CAUDATE CHRIVI
+ {0x2C60, 0x2C7B, prN}, // L& [28] LATIN CAPITAL LETTER L WITH DOUBLE BAR..LATIN LETTER SMALL CAPITAL TURNED E
+ {0x2C7C, 0x2C7D, prN}, // Lm [2] LATIN SUBSCRIPT SMALL LETTER J..MODIFIER LETTER CAPITAL V
+ {0x2C7E, 0x2C7F, prN}, // Lu [2] LATIN CAPITAL LETTER S WITH SWASH TAIL..LATIN CAPITAL LETTER Z WITH SWASH TAIL
+ {0x2C80, 0x2CE4, prN}, // L& [101] COPTIC CAPITAL LETTER ALFA..COPTIC SYMBOL KAI
+ {0x2CE5, 0x2CEA, prN}, // So [6] COPTIC SYMBOL MI RO..COPTIC SYMBOL SHIMA SIMA
+ {0x2CEB, 0x2CEE, prN}, // L& [4] COPTIC CAPITAL LETTER CRYPTOGRAMMIC SHEI..COPTIC SMALL LETTER CRYPTOGRAMMIC GANGIA
+ {0x2CEF, 0x2CF1, prN}, // Mn [3] COPTIC COMBINING NI ABOVE..COPTIC COMBINING SPIRITUS LENIS
+ {0x2CF2, 0x2CF3, prN}, // L& [2] COPTIC CAPITAL LETTER BOHAIRIC KHEI..COPTIC SMALL LETTER BOHAIRIC KHEI
+ {0x2CF9, 0x2CFC, prN}, // Po [4] COPTIC OLD NUBIAN FULL STOP..COPTIC OLD NUBIAN VERSE DIVIDER
+ {0x2CFD, 0x2CFD, prN}, // No COPTIC FRACTION ONE HALF
+ {0x2CFE, 0x2CFF, prN}, // Po [2] COPTIC FULL STOP..COPTIC MORPHOLOGICAL DIVIDER
+ {0x2D00, 0x2D25, prN}, // Ll [38] GEORGIAN SMALL LETTER AN..GEORGIAN SMALL LETTER HOE
+ {0x2D27, 0x2D27, prN}, // Ll GEORGIAN SMALL LETTER YN
+ {0x2D2D, 0x2D2D, prN}, // Ll GEORGIAN SMALL LETTER AEN
+ {0x2D30, 0x2D67, prN}, // Lo [56] TIFINAGH LETTER YA..TIFINAGH LETTER YO
+ {0x2D6F, 0x2D6F, prN}, // Lm TIFINAGH MODIFIER LETTER LABIALIZATION MARK
+ {0x2D70, 0x2D70, prN}, // Po TIFINAGH SEPARATOR MARK
+ {0x2D7F, 0x2D7F, prN}, // Mn TIFINAGH CONSONANT JOINER
+ {0x2D80, 0x2D96, prN}, // Lo [23] ETHIOPIC SYLLABLE LOA..ETHIOPIC SYLLABLE GGWE
+ {0x2DA0, 0x2DA6, prN}, // Lo [7] ETHIOPIC SYLLABLE SSA..ETHIOPIC SYLLABLE SSO
+ {0x2DA8, 0x2DAE, prN}, // Lo [7] ETHIOPIC SYLLABLE CCA..ETHIOPIC SYLLABLE CCO
+ {0x2DB0, 0x2DB6, prN}, // Lo [7] ETHIOPIC SYLLABLE ZZA..ETHIOPIC SYLLABLE ZZO
+ {0x2DB8, 0x2DBE, prN}, // Lo [7] ETHIOPIC SYLLABLE CCHA..ETHIOPIC SYLLABLE CCHO
+ {0x2DC0, 0x2DC6, prN}, // Lo [7] ETHIOPIC SYLLABLE QYA..ETHIOPIC SYLLABLE QYO
+ {0x2DC8, 0x2DCE, prN}, // Lo [7] ETHIOPIC SYLLABLE KYA..ETHIOPIC SYLLABLE KYO
+ {0x2DD0, 0x2DD6, prN}, // Lo [7] ETHIOPIC SYLLABLE XYA..ETHIOPIC SYLLABLE XYO
+ {0x2DD8, 0x2DDE, prN}, // Lo [7] ETHIOPIC SYLLABLE GYA..ETHIOPIC SYLLABLE GYO
+ {0x2DE0, 0x2DFF, prN}, // Mn [32] COMBINING CYRILLIC LETTER BE..COMBINING CYRILLIC LETTER IOTIFIED BIG YUS
+ {0x2E00, 0x2E01, prN}, // Po [2] RIGHT ANGLE SUBSTITUTION MARKER..RIGHT ANGLE DOTTED SUBSTITUTION MARKER
+ {0x2E02, 0x2E02, prN}, // Pi LEFT SUBSTITUTION BRACKET
+ {0x2E03, 0x2E03, prN}, // Pf RIGHT SUBSTITUTION BRACKET
+ {0x2E04, 0x2E04, prN}, // Pi LEFT DOTTED SUBSTITUTION BRACKET
+ {0x2E05, 0x2E05, prN}, // Pf RIGHT DOTTED SUBSTITUTION BRACKET
+ {0x2E06, 0x2E08, prN}, // Po [3] RAISED INTERPOLATION MARKER..DOTTED TRANSPOSITION MARKER
+ {0x2E09, 0x2E09, prN}, // Pi LEFT TRANSPOSITION BRACKET
+ {0x2E0A, 0x2E0A, prN}, // Pf RIGHT TRANSPOSITION BRACKET
+ {0x2E0B, 0x2E0B, prN}, // Po RAISED SQUARE
+ {0x2E0C, 0x2E0C, prN}, // Pi LEFT RAISED OMISSION BRACKET
+ {0x2E0D, 0x2E0D, prN}, // Pf RIGHT RAISED OMISSION BRACKET
+ {0x2E0E, 0x2E16, prN}, // Po [9] EDITORIAL CORONIS..DOTTED RIGHT-POINTING ANGLE
+ {0x2E17, 0x2E17, prN}, // Pd DOUBLE OBLIQUE HYPHEN
+ {0x2E18, 0x2E19, prN}, // Po [2] INVERTED INTERROBANG..PALM BRANCH
+ {0x2E1A, 0x2E1A, prN}, // Pd HYPHEN WITH DIAERESIS
+ {0x2E1B, 0x2E1B, prN}, // Po TILDE WITH RING ABOVE
+ {0x2E1C, 0x2E1C, prN}, // Pi LEFT LOW PARAPHRASE BRACKET
+ {0x2E1D, 0x2E1D, prN}, // Pf RIGHT LOW PARAPHRASE BRACKET
+ {0x2E1E, 0x2E1F, prN}, // Po [2] TILDE WITH DOT ABOVE..TILDE WITH DOT BELOW
+ {0x2E20, 0x2E20, prN}, // Pi LEFT VERTICAL BAR WITH QUILL
+ {0x2E21, 0x2E21, prN}, // Pf RIGHT VERTICAL BAR WITH QUILL
+ {0x2E22, 0x2E22, prN}, // Ps TOP LEFT HALF BRACKET
+ {0x2E23, 0x2E23, prN}, // Pe TOP RIGHT HALF BRACKET
+ {0x2E24, 0x2E24, prN}, // Ps BOTTOM LEFT HALF BRACKET
+ {0x2E25, 0x2E25, prN}, // Pe BOTTOM RIGHT HALF BRACKET
+ {0x2E26, 0x2E26, prN}, // Ps LEFT SIDEWAYS U BRACKET
+ {0x2E27, 0x2E27, prN}, // Pe RIGHT SIDEWAYS U BRACKET
+ {0x2E28, 0x2E28, prN}, // Ps LEFT DOUBLE PARENTHESIS
+ {0x2E29, 0x2E29, prN}, // Pe RIGHT DOUBLE PARENTHESIS
+ {0x2E2A, 0x2E2E, prN}, // Po [5] TWO DOTS OVER ONE DOT PUNCTUATION..REVERSED QUESTION MARK
+ {0x2E2F, 0x2E2F, prN}, // Lm VERTICAL TILDE
+ {0x2E30, 0x2E39, prN}, // Po [10] RING POINT..TOP HALF SECTION SIGN
+ {0x2E3A, 0x2E3B, prN}, // Pd [2] TWO-EM DASH..THREE-EM DASH
+ {0x2E3C, 0x2E3F, prN}, // Po [4] STENOGRAPHIC FULL STOP..CAPITULUM
+ {0x2E40, 0x2E40, prN}, // Pd DOUBLE HYPHEN
+ {0x2E41, 0x2E41, prN}, // Po REVERSED COMMA
+ {0x2E42, 0x2E42, prN}, // Ps DOUBLE LOW-REVERSED-9 QUOTATION MARK
+ {0x2E43, 0x2E4F, prN}, // Po [13] DASH WITH LEFT UPTURN..CORNISH VERSE DIVIDER
+ {0x2E50, 0x2E51, prN}, // So [2] CROSS PATTY WITH RIGHT CROSSBAR..CROSS PATTY WITH LEFT CROSSBAR
+ {0x2E52, 0x2E54, prN}, // Po [3] TIRONIAN SIGN CAPITAL ET..MEDIEVAL QUESTION MARK
+ {0x2E55, 0x2E55, prN}, // Ps LEFT SQUARE BRACKET WITH STROKE
+ {0x2E56, 0x2E56, prN}, // Pe RIGHT SQUARE BRACKET WITH STROKE
+ {0x2E57, 0x2E57, prN}, // Ps LEFT SQUARE BRACKET WITH DOUBLE STROKE
+ {0x2E58, 0x2E58, prN}, // Pe RIGHT SQUARE BRACKET WITH DOUBLE STROKE
+ {0x2E59, 0x2E59, prN}, // Ps TOP HALF LEFT PARENTHESIS
+ {0x2E5A, 0x2E5A, prN}, // Pe TOP HALF RIGHT PARENTHESIS
+ {0x2E5B, 0x2E5B, prN}, // Ps BOTTOM HALF LEFT PARENTHESIS
+ {0x2E5C, 0x2E5C, prN}, // Pe BOTTOM HALF RIGHT PARENTHESIS
+ {0x2E5D, 0x2E5D, prN}, // Pd OBLIQUE HYPHEN
+ {0x2E80, 0x2E99, prW}, // So [26] CJK RADICAL REPEAT..CJK RADICAL RAP
+ {0x2E9B, 0x2EF3, prW}, // So [89] CJK RADICAL CHOKE..CJK RADICAL C-SIMPLIFIED TURTLE
+ {0x2F00, 0x2FD5, prW}, // So [214] KANGXI RADICAL ONE..KANGXI RADICAL FLUTE
+ {0x2FF0, 0x2FFB, prW}, // So [12] IDEOGRAPHIC DESCRIPTION CHARACTER LEFT TO RIGHT..IDEOGRAPHIC DESCRIPTION CHARACTER OVERLAID
+ {0x3000, 0x3000, prF}, // Zs IDEOGRAPHIC SPACE
+ {0x3001, 0x3003, prW}, // Po [3] IDEOGRAPHIC COMMA..DITTO MARK
+ {0x3004, 0x3004, prW}, // So JAPANESE INDUSTRIAL STANDARD SYMBOL
+ {0x3005, 0x3005, prW}, // Lm IDEOGRAPHIC ITERATION MARK
+ {0x3006, 0x3006, prW}, // Lo IDEOGRAPHIC CLOSING MARK
+ {0x3007, 0x3007, prW}, // Nl IDEOGRAPHIC NUMBER ZERO
+ {0x3008, 0x3008, prW}, // Ps LEFT ANGLE BRACKET
+ {0x3009, 0x3009, prW}, // Pe RIGHT ANGLE BRACKET
+ {0x300A, 0x300A, prW}, // Ps LEFT DOUBLE ANGLE BRACKET
+ {0x300B, 0x300B, prW}, // Pe RIGHT DOUBLE ANGLE BRACKET
+ {0x300C, 0x300C, prW}, // Ps LEFT CORNER BRACKET
+ {0x300D, 0x300D, prW}, // Pe RIGHT CORNER BRACKET
+ {0x300E, 0x300E, prW}, // Ps LEFT WHITE CORNER BRACKET
+ {0x300F, 0x300F, prW}, // Pe RIGHT WHITE CORNER BRACKET
+ {0x3010, 0x3010, prW}, // Ps LEFT BLACK LENTICULAR BRACKET
+ {0x3011, 0x3011, prW}, // Pe RIGHT BLACK LENTICULAR BRACKET
+ {0x3012, 0x3013, prW}, // So [2] POSTAL MARK..GETA MARK
+ {0x3014, 0x3014, prW}, // Ps LEFT TORTOISE SHELL BRACKET
+ {0x3015, 0x3015, prW}, // Pe RIGHT TORTOISE SHELL BRACKET
+ {0x3016, 0x3016, prW}, // Ps LEFT WHITE LENTICULAR BRACKET
+ {0x3017, 0x3017, prW}, // Pe RIGHT WHITE LENTICULAR BRACKET
+ {0x3018, 0x3018, prW}, // Ps LEFT WHITE TORTOISE SHELL BRACKET
+ {0x3019, 0x3019, prW}, // Pe RIGHT WHITE TORTOISE SHELL BRACKET
+ {0x301A, 0x301A, prW}, // Ps LEFT WHITE SQUARE BRACKET
+ {0x301B, 0x301B, prW}, // Pe RIGHT WHITE SQUARE BRACKET
+ {0x301C, 0x301C, prW}, // Pd WAVE DASH
+ {0x301D, 0x301D, prW}, // Ps REVERSED DOUBLE PRIME QUOTATION MARK
+ {0x301E, 0x301F, prW}, // Pe [2] DOUBLE PRIME QUOTATION MARK..LOW DOUBLE PRIME QUOTATION MARK
+ {0x3020, 0x3020, prW}, // So POSTAL MARK FACE
+ {0x3021, 0x3029, prW}, // Nl [9] HANGZHOU NUMERAL ONE..HANGZHOU NUMERAL NINE
+ {0x302A, 0x302D, prW}, // Mn [4] IDEOGRAPHIC LEVEL TONE MARK..IDEOGRAPHIC ENTERING TONE MARK
+ {0x302E, 0x302F, prW}, // Mc [2] HANGUL SINGLE DOT TONE MARK..HANGUL DOUBLE DOT TONE MARK
+ {0x3030, 0x3030, prW}, // Pd WAVY DASH
+ {0x3031, 0x3035, prW}, // Lm [5] VERTICAL KANA REPEAT MARK..VERTICAL KANA REPEAT MARK LOWER HALF
+ {0x3036, 0x3037, prW}, // So [2] CIRCLED POSTAL MARK..IDEOGRAPHIC TELEGRAPH LINE FEED SEPARATOR SYMBOL
+ {0x3038, 0x303A, prW}, // Nl [3] HANGZHOU NUMERAL TEN..HANGZHOU NUMERAL THIRTY
+ {0x303B, 0x303B, prW}, // Lm VERTICAL IDEOGRAPHIC ITERATION MARK
+ {0x303C, 0x303C, prW}, // Lo MASU MARK
+ {0x303D, 0x303D, prW}, // Po PART ALTERNATION MARK
+ {0x303E, 0x303E, prW}, // So IDEOGRAPHIC VARIATION INDICATOR
+ {0x303F, 0x303F, prN}, // So IDEOGRAPHIC HALF FILL SPACE
+ {0x3041, 0x3096, prW}, // Lo [86] HIRAGANA LETTER SMALL A..HIRAGANA LETTER SMALL KE
+ {0x3099, 0x309A, prW}, // Mn [2] COMBINING KATAKANA-HIRAGANA VOICED SOUND MARK..COMBINING KATAKANA-HIRAGANA SEMI-VOICED SOUND MARK
+ {0x309B, 0x309C, prW}, // Sk [2] KATAKANA-HIRAGANA VOICED SOUND MARK..KATAKANA-HIRAGANA SEMI-VOICED SOUND MARK
+ {0x309D, 0x309E, prW}, // Lm [2] HIRAGANA ITERATION MARK..HIRAGANA VOICED ITERATION MARK
+ {0x309F, 0x309F, prW}, // Lo HIRAGANA DIGRAPH YORI
+ {0x30A0, 0x30A0, prW}, // Pd KATAKANA-HIRAGANA DOUBLE HYPHEN
+ {0x30A1, 0x30FA, prW}, // Lo [90] KATAKANA LETTER SMALL A..KATAKANA LETTER VO
+ {0x30FB, 0x30FB, prW}, // Po KATAKANA MIDDLE DOT
+ {0x30FC, 0x30FE, prW}, // Lm [3] KATAKANA-HIRAGANA PROLONGED SOUND MARK..KATAKANA VOICED ITERATION MARK
+ {0x30FF, 0x30FF, prW}, // Lo KATAKANA DIGRAPH KOTO
+ {0x3105, 0x312F, prW}, // Lo [43] BOPOMOFO LETTER B..BOPOMOFO LETTER NN
+ {0x3131, 0x318E, prW}, // Lo [94] HANGUL LETTER KIYEOK..HANGUL LETTER ARAEAE
+ {0x3190, 0x3191, prW}, // So [2] IDEOGRAPHIC ANNOTATION LINKING MARK..IDEOGRAPHIC ANNOTATION REVERSE MARK
+ {0x3192, 0x3195, prW}, // No [4] IDEOGRAPHIC ANNOTATION ONE MARK..IDEOGRAPHIC ANNOTATION FOUR MARK
+ {0x3196, 0x319F, prW}, // So [10] IDEOGRAPHIC ANNOTATION TOP MARK..IDEOGRAPHIC ANNOTATION MAN MARK
+ {0x31A0, 0x31BF, prW}, // Lo [32] BOPOMOFO LETTER BU..BOPOMOFO LETTER AH
+ {0x31C0, 0x31E3, prW}, // So [36] CJK STROKE T..CJK STROKE Q
+ {0x31F0, 0x31FF, prW}, // Lo [16] KATAKANA LETTER SMALL KU..KATAKANA LETTER SMALL RO
+ {0x3200, 0x321E, prW}, // So [31] PARENTHESIZED HANGUL KIYEOK..PARENTHESIZED KOREAN CHARACTER O HU
+ {0x3220, 0x3229, prW}, // No [10] PARENTHESIZED IDEOGRAPH ONE..PARENTHESIZED IDEOGRAPH TEN
+ {0x322A, 0x3247, prW}, // So [30] PARENTHESIZED IDEOGRAPH MOON..CIRCLED IDEOGRAPH KOTO
+ {0x3248, 0x324F, prA}, // No [8] CIRCLED NUMBER TEN ON BLACK SQUARE..CIRCLED NUMBER EIGHTY ON BLACK SQUARE
+ {0x3250, 0x3250, prW}, // So PARTNERSHIP SIGN
+ {0x3251, 0x325F, prW}, // No [15] CIRCLED NUMBER TWENTY ONE..CIRCLED NUMBER THIRTY FIVE
+ {0x3260, 0x327F, prW}, // So [32] CIRCLED HANGUL KIYEOK..KOREAN STANDARD SYMBOL
+ {0x3280, 0x3289, prW}, // No [10] CIRCLED IDEOGRAPH ONE..CIRCLED IDEOGRAPH TEN
+ {0x328A, 0x32B0, prW}, // So [39] CIRCLED IDEOGRAPH MOON..CIRCLED IDEOGRAPH NIGHT
+ {0x32B1, 0x32BF, prW}, // No [15] CIRCLED NUMBER THIRTY SIX..CIRCLED NUMBER FIFTY
+ {0x32C0, 0x32FF, prW}, // So [64] IDEOGRAPHIC TELEGRAPH SYMBOL FOR JANUARY..SQUARE ERA NAME REIWA
+ {0x3300, 0x33FF, prW}, // So [256] SQUARE APAATO..SQUARE GAL
+ {0x3400, 0x4DBF, prW}, // Lo [6592] CJK UNIFIED IDEOGRAPH-3400..CJK UNIFIED IDEOGRAPH-4DBF
+ {0x4DC0, 0x4DFF, prN}, // So [64] HEXAGRAM FOR THE CREATIVE HEAVEN..HEXAGRAM FOR BEFORE COMPLETION
+ {0x4E00, 0x9FFF, prW}, // Lo [20992] CJK UNIFIED IDEOGRAPH-4E00..CJK UNIFIED IDEOGRAPH-9FFF
+ {0xA000, 0xA014, prW}, // Lo [21] YI SYLLABLE IT..YI SYLLABLE E
+ {0xA015, 0xA015, prW}, // Lm YI SYLLABLE WU
+ {0xA016, 0xA48C, prW}, // Lo [1143] YI SYLLABLE BIT..YI SYLLABLE YYR
+ {0xA490, 0xA4C6, prW}, // So [55] YI RADICAL QOT..YI RADICAL KE
+ {0xA4D0, 0xA4F7, prN}, // Lo [40] LISU LETTER BA..LISU LETTER OE
+ {0xA4F8, 0xA4FD, prN}, // Lm [6] LISU LETTER TONE MYA TI..LISU LETTER TONE MYA JEU
+ {0xA4FE, 0xA4FF, prN}, // Po [2] LISU PUNCTUATION COMMA..LISU PUNCTUATION FULL STOP
+ {0xA500, 0xA60B, prN}, // Lo [268] VAI SYLLABLE EE..VAI SYLLABLE NG
+ {0xA60C, 0xA60C, prN}, // Lm VAI SYLLABLE LENGTHENER
+ {0xA60D, 0xA60F, prN}, // Po [3] VAI COMMA..VAI QUESTION MARK
+ {0xA610, 0xA61F, prN}, // Lo [16] VAI SYLLABLE NDOLE FA..VAI SYMBOL JONG
+ {0xA620, 0xA629, prN}, // Nd [10] VAI DIGIT ZERO..VAI DIGIT NINE
+ {0xA62A, 0xA62B, prN}, // Lo [2] VAI SYLLABLE NDOLE MA..VAI SYLLABLE NDOLE DO
+ {0xA640, 0xA66D, prN}, // L& [46] CYRILLIC CAPITAL LETTER ZEMLYA..CYRILLIC SMALL LETTER DOUBLE MONOCULAR O
+ {0xA66E, 0xA66E, prN}, // Lo CYRILLIC LETTER MULTIOCULAR O
+ {0xA66F, 0xA66F, prN}, // Mn COMBINING CYRILLIC VZMET
+ {0xA670, 0xA672, prN}, // Me [3] COMBINING CYRILLIC TEN MILLIONS SIGN..COMBINING CYRILLIC THOUSAND MILLIONS SIGN
+ {0xA673, 0xA673, prN}, // Po SLAVONIC ASTERISK
+ {0xA674, 0xA67D, prN}, // Mn [10] COMBINING CYRILLIC LETTER UKRAINIAN IE..COMBINING CYRILLIC PAYEROK
+ {0xA67E, 0xA67E, prN}, // Po CYRILLIC KAVYKA
+ {0xA67F, 0xA67F, prN}, // Lm CYRILLIC PAYEROK
+ {0xA680, 0xA69B, prN}, // L& [28] CYRILLIC CAPITAL LETTER DWE..CYRILLIC SMALL LETTER CROSSED O
+ {0xA69C, 0xA69D, prN}, // Lm [2] MODIFIER LETTER CYRILLIC HARD SIGN..MODIFIER LETTER CYRILLIC SOFT SIGN
+ {0xA69E, 0xA69F, prN}, // Mn [2] COMBINING CYRILLIC LETTER EF..COMBINING CYRILLIC LETTER IOTIFIED E
+ {0xA6A0, 0xA6E5, prN}, // Lo [70] BAMUM LETTER A..BAMUM LETTER KI
+ {0xA6E6, 0xA6EF, prN}, // Nl [10] BAMUM LETTER MO..BAMUM LETTER KOGHOM
+ {0xA6F0, 0xA6F1, prN}, // Mn [2] BAMUM COMBINING MARK KOQNDON..BAMUM COMBINING MARK TUKWENTIS
+ {0xA6F2, 0xA6F7, prN}, // Po [6] BAMUM NJAEMLI..BAMUM QUESTION MARK
+ {0xA700, 0xA716, prN}, // Sk [23] MODIFIER LETTER CHINESE TONE YIN PING..MODIFIER LETTER EXTRA-LOW LEFT-STEM TONE BAR
+ {0xA717, 0xA71F, prN}, // Lm [9] MODIFIER LETTER DOT VERTICAL BAR..MODIFIER LETTER LOW INVERTED EXCLAMATION MARK
+ {0xA720, 0xA721, prN}, // Sk [2] MODIFIER LETTER STRESS AND HIGH TONE..MODIFIER LETTER STRESS AND LOW TONE
+ {0xA722, 0xA76F, prN}, // L& [78] LATIN CAPITAL LETTER EGYPTOLOGICAL ALEF..LATIN SMALL LETTER CON
+ {0xA770, 0xA770, prN}, // Lm MODIFIER LETTER US
+ {0xA771, 0xA787, prN}, // L& [23] LATIN SMALL LETTER DUM..LATIN SMALL LETTER INSULAR T
+ {0xA788, 0xA788, prN}, // Lm MODIFIER LETTER LOW CIRCUMFLEX ACCENT
+ {0xA789, 0xA78A, prN}, // Sk [2] MODIFIER LETTER COLON..MODIFIER LETTER SHORT EQUALS SIGN
+ {0xA78B, 0xA78E, prN}, // L& [4] LATIN CAPITAL LETTER SALTILLO..LATIN SMALL LETTER L WITH RETROFLEX HOOK AND BELT
+ {0xA78F, 0xA78F, prN}, // Lo LATIN LETTER SINOLOGICAL DOT
+ {0xA790, 0xA7CA, prN}, // L& [59] LATIN CAPITAL LETTER N WITH DESCENDER..LATIN SMALL LETTER S WITH SHORT STROKE OVERLAY
+ {0xA7D0, 0xA7D1, prN}, // L& [2] LATIN CAPITAL LETTER CLOSED INSULAR G..LATIN SMALL LETTER CLOSED INSULAR G
+ {0xA7D3, 0xA7D3, prN}, // Ll LATIN SMALL LETTER DOUBLE THORN
+ {0xA7D5, 0xA7D9, prN}, // L& [5] LATIN SMALL LETTER DOUBLE WYNN..LATIN SMALL LETTER SIGMOID S
+ {0xA7F2, 0xA7F4, prN}, // Lm [3] MODIFIER LETTER CAPITAL C..MODIFIER LETTER CAPITAL Q
+ {0xA7F5, 0xA7F6, prN}, // L& [2] LATIN CAPITAL LETTER REVERSED HALF H..LATIN SMALL LETTER REVERSED HALF H
+ {0xA7F7, 0xA7F7, prN}, // Lo LATIN EPIGRAPHIC LETTER SIDEWAYS I
+ {0xA7F8, 0xA7F9, prN}, // Lm [2] MODIFIER LETTER CAPITAL H WITH STROKE..MODIFIER LETTER SMALL LIGATURE OE
+ {0xA7FA, 0xA7FA, prN}, // Ll LATIN LETTER SMALL CAPITAL TURNED M
+ {0xA7FB, 0xA7FF, prN}, // Lo [5] LATIN EPIGRAPHIC LETTER REVERSED F..LATIN EPIGRAPHIC LETTER ARCHAIC M
+ {0xA800, 0xA801, prN}, // Lo [2] SYLOTI NAGRI LETTER A..SYLOTI NAGRI LETTER I
+ {0xA802, 0xA802, prN}, // Mn SYLOTI NAGRI SIGN DVISVARA
+ {0xA803, 0xA805, prN}, // Lo [3] SYLOTI NAGRI LETTER U..SYLOTI NAGRI LETTER O
+ {0xA806, 0xA806, prN}, // Mn SYLOTI NAGRI SIGN HASANTA
+ {0xA807, 0xA80A, prN}, // Lo [4] SYLOTI NAGRI LETTER KO..SYLOTI NAGRI LETTER GHO
+ {0xA80B, 0xA80B, prN}, // Mn SYLOTI NAGRI SIGN ANUSVARA
+ {0xA80C, 0xA822, prN}, // Lo [23] SYLOTI NAGRI LETTER CO..SYLOTI NAGRI LETTER HO
+ {0xA823, 0xA824, prN}, // Mc [2] SYLOTI NAGRI VOWEL SIGN A..SYLOTI NAGRI VOWEL SIGN I
+ {0xA825, 0xA826, prN}, // Mn [2] SYLOTI NAGRI VOWEL SIGN U..SYLOTI NAGRI VOWEL SIGN E
+ {0xA827, 0xA827, prN}, // Mc SYLOTI NAGRI VOWEL SIGN OO
+ {0xA828, 0xA82B, prN}, // So [4] SYLOTI NAGRI POETRY MARK-1..SYLOTI NAGRI POETRY MARK-4
+ {0xA82C, 0xA82C, prN}, // Mn SYLOTI NAGRI SIGN ALTERNATE HASANTA
+ {0xA830, 0xA835, prN}, // No [6] NORTH INDIC FRACTION ONE QUARTER..NORTH INDIC FRACTION THREE SIXTEENTHS
+ {0xA836, 0xA837, prN}, // So [2] NORTH INDIC QUARTER MARK..NORTH INDIC PLACEHOLDER MARK
+ {0xA838, 0xA838, prN}, // Sc NORTH INDIC RUPEE MARK
+ {0xA839, 0xA839, prN}, // So NORTH INDIC QUANTITY MARK
+ {0xA840, 0xA873, prN}, // Lo [52] PHAGS-PA LETTER KA..PHAGS-PA LETTER CANDRABINDU
+ {0xA874, 0xA877, prN}, // Po [4] PHAGS-PA SINGLE HEAD MARK..PHAGS-PA MARK DOUBLE SHAD
+ {0xA880, 0xA881, prN}, // Mc [2] SAURASHTRA SIGN ANUSVARA..SAURASHTRA SIGN VISARGA
+ {0xA882, 0xA8B3, prN}, // Lo [50] SAURASHTRA LETTER A..SAURASHTRA LETTER LLA
+ {0xA8B4, 0xA8C3, prN}, // Mc [16] SAURASHTRA CONSONANT SIGN HAARU..SAURASHTRA VOWEL SIGN AU
+ {0xA8C4, 0xA8C5, prN}, // Mn [2] SAURASHTRA SIGN VIRAMA..SAURASHTRA SIGN CANDRABINDU
+ {0xA8CE, 0xA8CF, prN}, // Po [2] SAURASHTRA DANDA..SAURASHTRA DOUBLE DANDA
+ {0xA8D0, 0xA8D9, prN}, // Nd [10] SAURASHTRA DIGIT ZERO..SAURASHTRA DIGIT NINE
+ {0xA8E0, 0xA8F1, prN}, // Mn [18] COMBINING DEVANAGARI DIGIT ZERO..COMBINING DEVANAGARI SIGN AVAGRAHA
+ {0xA8F2, 0xA8F7, prN}, // Lo [6] DEVANAGARI SIGN SPACING CANDRABINDU..DEVANAGARI SIGN CANDRABINDU AVAGRAHA
+ {0xA8F8, 0xA8FA, prN}, // Po [3] DEVANAGARI SIGN PUSHPIKA..DEVANAGARI CARET
+ {0xA8FB, 0xA8FB, prN}, // Lo DEVANAGARI HEADSTROKE
+ {0xA8FC, 0xA8FC, prN}, // Po DEVANAGARI SIGN SIDDHAM
+ {0xA8FD, 0xA8FE, prN}, // Lo [2] DEVANAGARI JAIN OM..DEVANAGARI LETTER AY
+ {0xA8FF, 0xA8FF, prN}, // Mn DEVANAGARI VOWEL SIGN AY
+ {0xA900, 0xA909, prN}, // Nd [10] KAYAH LI DIGIT ZERO..KAYAH LI DIGIT NINE
+ {0xA90A, 0xA925, prN}, // Lo [28] KAYAH LI LETTER KA..KAYAH LI LETTER OO
+ {0xA926, 0xA92D, prN}, // Mn [8] KAYAH LI VOWEL UE..KAYAH LI TONE CALYA PLOPHU
+ {0xA92E, 0xA92F, prN}, // Po [2] KAYAH LI SIGN CWI..KAYAH LI SIGN SHYA
+ {0xA930, 0xA946, prN}, // Lo [23] REJANG LETTER KA..REJANG LETTER A
+ {0xA947, 0xA951, prN}, // Mn [11] REJANG VOWEL SIGN I..REJANG CONSONANT SIGN R
+ {0xA952, 0xA953, prN}, // Mc [2] REJANG CONSONANT SIGN H..REJANG VIRAMA
+ {0xA95F, 0xA95F, prN}, // Po REJANG SECTION MARK
+ {0xA960, 0xA97C, prW}, // Lo [29] HANGUL CHOSEONG TIKEUT-MIEUM..HANGUL CHOSEONG SSANGYEORINHIEUH
+ {0xA980, 0xA982, prN}, // Mn [3] JAVANESE SIGN PANYANGGA..JAVANESE SIGN LAYAR
+ {0xA983, 0xA983, prN}, // Mc JAVANESE SIGN WIGNYAN
+ {0xA984, 0xA9B2, prN}, // Lo [47] JAVANESE LETTER A..JAVANESE LETTER HA
+ {0xA9B3, 0xA9B3, prN}, // Mn JAVANESE SIGN CECAK TELU
+ {0xA9B4, 0xA9B5, prN}, // Mc [2] JAVANESE VOWEL SIGN TARUNG..JAVANESE VOWEL SIGN TOLONG
+ {0xA9B6, 0xA9B9, prN}, // Mn [4] JAVANESE VOWEL SIGN WULU..JAVANESE VOWEL SIGN SUKU MENDUT
+ {0xA9BA, 0xA9BB, prN}, // Mc [2] JAVANESE VOWEL SIGN TALING..JAVANESE VOWEL SIGN DIRGA MURE
+ {0xA9BC, 0xA9BD, prN}, // Mn [2] JAVANESE VOWEL SIGN PEPET..JAVANESE CONSONANT SIGN KERET
+ {0xA9BE, 0xA9C0, prN}, // Mc [3] JAVANESE CONSONANT SIGN PENGKAL..JAVANESE PANGKON
+ {0xA9C1, 0xA9CD, prN}, // Po [13] JAVANESE LEFT RERENGGAN..JAVANESE TURNED PADA PISELEH
+ {0xA9CF, 0xA9CF, prN}, // Lm JAVANESE PANGRANGKEP
+ {0xA9D0, 0xA9D9, prN}, // Nd [10] JAVANESE DIGIT ZERO..JAVANESE DIGIT NINE
+ {0xA9DE, 0xA9DF, prN}, // Po [2] JAVANESE PADA TIRTA TUMETES..JAVANESE PADA ISEN-ISEN
+ {0xA9E0, 0xA9E4, prN}, // Lo [5] MYANMAR LETTER SHAN GHA..MYANMAR LETTER SHAN BHA
+ {0xA9E5, 0xA9E5, prN}, // Mn MYANMAR SIGN SHAN SAW
+ {0xA9E6, 0xA9E6, prN}, // Lm MYANMAR MODIFIER LETTER SHAN REDUPLICATION
+ {0xA9E7, 0xA9EF, prN}, // Lo [9] MYANMAR LETTER TAI LAING NYA..MYANMAR LETTER TAI LAING NNA
+ {0xA9F0, 0xA9F9, prN}, // Nd [10] MYANMAR TAI LAING DIGIT ZERO..MYANMAR TAI LAING DIGIT NINE
+ {0xA9FA, 0xA9FE, prN}, // Lo [5] MYANMAR LETTER TAI LAING LLA..MYANMAR LETTER TAI LAING BHA
+ {0xAA00, 0xAA28, prN}, // Lo [41] CHAM LETTER A..CHAM LETTER HA
+ {0xAA29, 0xAA2E, prN}, // Mn [6] CHAM VOWEL SIGN AA..CHAM VOWEL SIGN OE
+ {0xAA2F, 0xAA30, prN}, // Mc [2] CHAM VOWEL SIGN O..CHAM VOWEL SIGN AI
+ {0xAA31, 0xAA32, prN}, // Mn [2] CHAM VOWEL SIGN AU..CHAM VOWEL SIGN UE
+ {0xAA33, 0xAA34, prN}, // Mc [2] CHAM CONSONANT SIGN YA..CHAM CONSONANT SIGN RA
+ {0xAA35, 0xAA36, prN}, // Mn [2] CHAM CONSONANT SIGN LA..CHAM CONSONANT SIGN WA
+ {0xAA40, 0xAA42, prN}, // Lo [3] CHAM LETTER FINAL K..CHAM LETTER FINAL NG
+ {0xAA43, 0xAA43, prN}, // Mn CHAM CONSONANT SIGN FINAL NG
+ {0xAA44, 0xAA4B, prN}, // Lo [8] CHAM LETTER FINAL CH..CHAM LETTER FINAL SS
+ {0xAA4C, 0xAA4C, prN}, // Mn CHAM CONSONANT SIGN FINAL M
+ {0xAA4D, 0xAA4D, prN}, // Mc CHAM CONSONANT SIGN FINAL H
+ {0xAA50, 0xAA59, prN}, // Nd [10] CHAM DIGIT ZERO..CHAM DIGIT NINE
+ {0xAA5C, 0xAA5F, prN}, // Po [4] CHAM PUNCTUATION SPIRAL..CHAM PUNCTUATION TRIPLE DANDA
+ {0xAA60, 0xAA6F, prN}, // Lo [16] MYANMAR LETTER KHAMTI GA..MYANMAR LETTER KHAMTI FA
+ {0xAA70, 0xAA70, prN}, // Lm MYANMAR MODIFIER LETTER KHAMTI REDUPLICATION
+ {0xAA71, 0xAA76, prN}, // Lo [6] MYANMAR LETTER KHAMTI XA..MYANMAR LOGOGRAM KHAMTI HM
+ {0xAA77, 0xAA79, prN}, // So [3] MYANMAR SYMBOL AITON EXCLAMATION..MYANMAR SYMBOL AITON TWO
+ {0xAA7A, 0xAA7A, prN}, // Lo MYANMAR LETTER AITON RA
+ {0xAA7B, 0xAA7B, prN}, // Mc MYANMAR SIGN PAO KAREN TONE
+ {0xAA7C, 0xAA7C, prN}, // Mn MYANMAR SIGN TAI LAING TONE-2
+ {0xAA7D, 0xAA7D, prN}, // Mc MYANMAR SIGN TAI LAING TONE-5
+ {0xAA7E, 0xAA7F, prN}, // Lo [2] MYANMAR LETTER SHWE PALAUNG CHA..MYANMAR LETTER SHWE PALAUNG SHA
+ {0xAA80, 0xAAAF, prN}, // Lo [48] TAI VIET LETTER LOW KO..TAI VIET LETTER HIGH O
+ {0xAAB0, 0xAAB0, prN}, // Mn TAI VIET MAI KANG
+ {0xAAB1, 0xAAB1, prN}, // Lo TAI VIET VOWEL AA
+ {0xAAB2, 0xAAB4, prN}, // Mn [3] TAI VIET VOWEL I..TAI VIET VOWEL U
+ {0xAAB5, 0xAAB6, prN}, // Lo [2] TAI VIET VOWEL E..TAI VIET VOWEL O
+ {0xAAB7, 0xAAB8, prN}, // Mn [2] TAI VIET MAI KHIT..TAI VIET VOWEL IA
+ {0xAAB9, 0xAABD, prN}, // Lo [5] TAI VIET VOWEL UEA..TAI VIET VOWEL AN
+ {0xAABE, 0xAABF, prN}, // Mn [2] TAI VIET VOWEL AM..TAI VIET TONE MAI EK
+ {0xAAC0, 0xAAC0, prN}, // Lo TAI VIET TONE MAI NUENG
+ {0xAAC1, 0xAAC1, prN}, // Mn TAI VIET TONE MAI THO
+ {0xAAC2, 0xAAC2, prN}, // Lo TAI VIET TONE MAI SONG
+ {0xAADB, 0xAADC, prN}, // Lo [2] TAI VIET SYMBOL KON..TAI VIET SYMBOL NUENG
+ {0xAADD, 0xAADD, prN}, // Lm TAI VIET SYMBOL SAM
+ {0xAADE, 0xAADF, prN}, // Po [2] TAI VIET SYMBOL HO HOI..TAI VIET SYMBOL KOI KOI
+ {0xAAE0, 0xAAEA, prN}, // Lo [11] MEETEI MAYEK LETTER E..MEETEI MAYEK LETTER SSA
+ {0xAAEB, 0xAAEB, prN}, // Mc MEETEI MAYEK VOWEL SIGN II
+ {0xAAEC, 0xAAED, prN}, // Mn [2] MEETEI MAYEK VOWEL SIGN UU..MEETEI MAYEK VOWEL SIGN AAI
+ {0xAAEE, 0xAAEF, prN}, // Mc [2] MEETEI MAYEK VOWEL SIGN AU..MEETEI MAYEK VOWEL SIGN AAU
+ {0xAAF0, 0xAAF1, prN}, // Po [2] MEETEI MAYEK CHEIKHAN..MEETEI MAYEK AHANG KHUDAM
+ {0xAAF2, 0xAAF2, prN}, // Lo MEETEI MAYEK ANJI
+ {0xAAF3, 0xAAF4, prN}, // Lm [2] MEETEI MAYEK SYLLABLE REPETITION MARK..MEETEI MAYEK WORD REPETITION MARK
+ {0xAAF5, 0xAAF5, prN}, // Mc MEETEI MAYEK VOWEL SIGN VISARGA
+ {0xAAF6, 0xAAF6, prN}, // Mn MEETEI MAYEK VIRAMA
+ {0xAB01, 0xAB06, prN}, // Lo [6] ETHIOPIC SYLLABLE TTHU..ETHIOPIC SYLLABLE TTHO
+ {0xAB09, 0xAB0E, prN}, // Lo [6] ETHIOPIC SYLLABLE DDHU..ETHIOPIC SYLLABLE DDHO
+ {0xAB11, 0xAB16, prN}, // Lo [6] ETHIOPIC SYLLABLE DZU..ETHIOPIC SYLLABLE DZO
+ {0xAB20, 0xAB26, prN}, // Lo [7] ETHIOPIC SYLLABLE CCHHA..ETHIOPIC SYLLABLE CCHHO
+ {0xAB28, 0xAB2E, prN}, // Lo [7] ETHIOPIC SYLLABLE BBA..ETHIOPIC SYLLABLE BBO
+ {0xAB30, 0xAB5A, prN}, // Ll [43] LATIN SMALL LETTER BARRED ALPHA..LATIN SMALL LETTER Y WITH SHORT RIGHT LEG
+ {0xAB5B, 0xAB5B, prN}, // Sk MODIFIER BREVE WITH INVERTED BREVE
+ {0xAB5C, 0xAB5F, prN}, // Lm [4] MODIFIER LETTER SMALL HENG..MODIFIER LETTER SMALL U WITH LEFT HOOK
+ {0xAB60, 0xAB68, prN}, // Ll [9] LATIN SMALL LETTER SAKHA YAT..LATIN SMALL LETTER TURNED R WITH MIDDLE TILDE
+ {0xAB69, 0xAB69, prN}, // Lm MODIFIER LETTER SMALL TURNED W
+ {0xAB6A, 0xAB6B, prN}, // Sk [2] MODIFIER LETTER LEFT TACK..MODIFIER LETTER RIGHT TACK
+ {0xAB70, 0xABBF, prN}, // Ll [80] CHEROKEE SMALL LETTER A..CHEROKEE SMALL LETTER YA
+ {0xABC0, 0xABE2, prN}, // Lo [35] MEETEI MAYEK LETTER KOK..MEETEI MAYEK LETTER I LONSUM
+ {0xABE3, 0xABE4, prN}, // Mc [2] MEETEI MAYEK VOWEL SIGN ONAP..MEETEI MAYEK VOWEL SIGN INAP
+ {0xABE5, 0xABE5, prN}, // Mn MEETEI MAYEK VOWEL SIGN ANAP
+ {0xABE6, 0xABE7, prN}, // Mc [2] MEETEI MAYEK VOWEL SIGN YENAP..MEETEI MAYEK VOWEL SIGN SOUNAP
+ {0xABE8, 0xABE8, prN}, // Mn MEETEI MAYEK VOWEL SIGN UNAP
+ {0xABE9, 0xABEA, prN}, // Mc [2] MEETEI MAYEK VOWEL SIGN CHEINAP..MEETEI MAYEK VOWEL SIGN NUNG
+ {0xABEB, 0xABEB, prN}, // Po MEETEI MAYEK CHEIKHEI
+ {0xABEC, 0xABEC, prN}, // Mc MEETEI MAYEK LUM IYEK
+ {0xABED, 0xABED, prN}, // Mn MEETEI MAYEK APUN IYEK
+ {0xABF0, 0xABF9, prN}, // Nd [10] MEETEI MAYEK DIGIT ZERO..MEETEI MAYEK DIGIT NINE
+ {0xAC00, 0xD7A3, prW}, // Lo [11172] HANGUL SYLLABLE GA..HANGUL SYLLABLE HIH
+ {0xD7B0, 0xD7C6, prN}, // Lo [23] HANGUL JUNGSEONG O-YEO..HANGUL JUNGSEONG ARAEA-E
+ {0xD7CB, 0xD7FB, prN}, // Lo [49] HANGUL JONGSEONG NIEUN-RIEUL..HANGUL JONGSEONG PHIEUPH-THIEUTH
+ {0xD800, 0xDB7F, prN}, // Cs [896] ..
+ {0xDB80, 0xDBFF, prN}, // Cs [128] ..
+ {0xDC00, 0xDFFF, prN}, // Cs [1024] ..
+ {0xE000, 0xF8FF, prA}, // Co [6400] ..
+ {0xF900, 0xFA6D, prW}, // Lo [366] CJK COMPATIBILITY IDEOGRAPH-F900..CJK COMPATIBILITY IDEOGRAPH-FA6D
+ {0xFA6E, 0xFA6F, prW}, // Cn [2] ..
+ {0xFA70, 0xFAD9, prW}, // Lo [106] CJK COMPATIBILITY IDEOGRAPH-FA70..CJK COMPATIBILITY IDEOGRAPH-FAD9
+ {0xFADA, 0xFAFF, prW}, // Cn [38] ..
+ {0xFB00, 0xFB06, prN}, // Ll [7] LATIN SMALL LIGATURE FF..LATIN SMALL LIGATURE ST
+ {0xFB13, 0xFB17, prN}, // Ll [5] ARMENIAN SMALL LIGATURE MEN NOW..ARMENIAN SMALL LIGATURE MEN XEH
+ {0xFB1D, 0xFB1D, prN}, // Lo HEBREW LETTER YOD WITH HIRIQ
+ {0xFB1E, 0xFB1E, prN}, // Mn HEBREW POINT JUDEO-SPANISH VARIKA
+ {0xFB1F, 0xFB28, prN}, // Lo [10] HEBREW LIGATURE YIDDISH YOD YOD PATAH..HEBREW LETTER WIDE TAV
+ {0xFB29, 0xFB29, prN}, // Sm HEBREW LETTER ALTERNATIVE PLUS SIGN
+ {0xFB2A, 0xFB36, prN}, // Lo [13] HEBREW LETTER SHIN WITH SHIN DOT..HEBREW LETTER ZAYIN WITH DAGESH
+ {0xFB38, 0xFB3C, prN}, // Lo [5] HEBREW LETTER TET WITH DAGESH..HEBREW LETTER LAMED WITH DAGESH
+ {0xFB3E, 0xFB3E, prN}, // Lo HEBREW LETTER MEM WITH DAGESH
+ {0xFB40, 0xFB41, prN}, // Lo [2] HEBREW LETTER NUN WITH DAGESH..HEBREW LETTER SAMEKH WITH DAGESH
+ {0xFB43, 0xFB44, prN}, // Lo [2] HEBREW LETTER FINAL PE WITH DAGESH..HEBREW LETTER PE WITH DAGESH
+ {0xFB46, 0xFB4F, prN}, // Lo [10] HEBREW LETTER TSADI WITH DAGESH..HEBREW LIGATURE ALEF LAMED
+ {0xFB50, 0xFBB1, prN}, // Lo [98] ARABIC LETTER ALEF WASLA ISOLATED FORM..ARABIC LETTER YEH BARREE WITH HAMZA ABOVE FINAL FORM
+ {0xFBB2, 0xFBC2, prN}, // Sk [17] ARABIC SYMBOL DOT ABOVE..ARABIC SYMBOL WASLA ABOVE
+ {0xFBD3, 0xFD3D, prN}, // Lo [363] ARABIC LETTER NG ISOLATED FORM..ARABIC LIGATURE ALEF WITH FATHATAN ISOLATED FORM
+ {0xFD3E, 0xFD3E, prN}, // Pe ORNATE LEFT PARENTHESIS
+ {0xFD3F, 0xFD3F, prN}, // Ps ORNATE RIGHT PARENTHESIS
+ {0xFD40, 0xFD4F, prN}, // So [16] ARABIC LIGATURE RAHIMAHU ALLAAH..ARABIC LIGATURE RAHIMAHUM ALLAAH
+ {0xFD50, 0xFD8F, prN}, // Lo [64] ARABIC LIGATURE TEH WITH JEEM WITH MEEM INITIAL FORM..ARABIC LIGATURE MEEM WITH KHAH WITH MEEM INITIAL FORM
+ {0xFD92, 0xFDC7, prN}, // Lo [54] ARABIC LIGATURE MEEM WITH JEEM WITH KHAH INITIAL FORM..ARABIC LIGATURE NOON WITH JEEM WITH YEH FINAL FORM
+ {0xFDCF, 0xFDCF, prN}, // So ARABIC LIGATURE SALAAMUHU ALAYNAA
+ {0xFDF0, 0xFDFB, prN}, // Lo [12] ARABIC LIGATURE SALLA USED AS KORANIC STOP SIGN ISOLATED FORM..ARABIC LIGATURE JALLAJALALOUHOU
+ {0xFDFC, 0xFDFC, prN}, // Sc RIAL SIGN
+ {0xFDFD, 0xFDFF, prN}, // So [3] ARABIC LIGATURE BISMILLAH AR-RAHMAN AR-RAHEEM..ARABIC LIGATURE AZZA WA JALL
+ {0xFE00, 0xFE0F, prA}, // Mn [16] VARIATION SELECTOR-1..VARIATION SELECTOR-16
+ {0xFE10, 0xFE16, prW}, // Po [7] PRESENTATION FORM FOR VERTICAL COMMA..PRESENTATION FORM FOR VERTICAL QUESTION MARK
+ {0xFE17, 0xFE17, prW}, // Ps PRESENTATION FORM FOR VERTICAL LEFT WHITE LENTICULAR BRACKET
+ {0xFE18, 0xFE18, prW}, // Pe PRESENTATION FORM FOR VERTICAL RIGHT WHITE LENTICULAR BRAKCET
+ {0xFE19, 0xFE19, prW}, // Po PRESENTATION FORM FOR VERTICAL HORIZONTAL ELLIPSIS
+ {0xFE20, 0xFE2F, prN}, // Mn [16] COMBINING LIGATURE LEFT HALF..COMBINING CYRILLIC TITLO RIGHT HALF
+ {0xFE30, 0xFE30, prW}, // Po PRESENTATION FORM FOR VERTICAL TWO DOT LEADER
+ {0xFE31, 0xFE32, prW}, // Pd [2] PRESENTATION FORM FOR VERTICAL EM DASH..PRESENTATION FORM FOR VERTICAL EN DASH
+ {0xFE33, 0xFE34, prW}, // Pc [2] PRESENTATION FORM FOR VERTICAL LOW LINE..PRESENTATION FORM FOR VERTICAL WAVY LOW LINE
+ {0xFE35, 0xFE35, prW}, // Ps PRESENTATION FORM FOR VERTICAL LEFT PARENTHESIS
+ {0xFE36, 0xFE36, prW}, // Pe PRESENTATION FORM FOR VERTICAL RIGHT PARENTHESIS
+ {0xFE37, 0xFE37, prW}, // Ps PRESENTATION FORM FOR VERTICAL LEFT CURLY BRACKET
+ {0xFE38, 0xFE38, prW}, // Pe PRESENTATION FORM FOR VERTICAL RIGHT CURLY BRACKET
+ {0xFE39, 0xFE39, prW}, // Ps PRESENTATION FORM FOR VERTICAL LEFT TORTOISE SHELL BRACKET
+ {0xFE3A, 0xFE3A, prW}, // Pe PRESENTATION FORM FOR VERTICAL RIGHT TORTOISE SHELL BRACKET
+ {0xFE3B, 0xFE3B, prW}, // Ps PRESENTATION FORM FOR VERTICAL LEFT BLACK LENTICULAR BRACKET
+ {0xFE3C, 0xFE3C, prW}, // Pe PRESENTATION FORM FOR VERTICAL RIGHT BLACK LENTICULAR BRACKET
+ {0xFE3D, 0xFE3D, prW}, // Ps PRESENTATION FORM FOR VERTICAL LEFT DOUBLE ANGLE BRACKET
+ {0xFE3E, 0xFE3E, prW}, // Pe PRESENTATION FORM FOR VERTICAL RIGHT DOUBLE ANGLE BRACKET
+ {0xFE3F, 0xFE3F, prW}, // Ps PRESENTATION FORM FOR VERTICAL LEFT ANGLE BRACKET
+ {0xFE40, 0xFE40, prW}, // Pe PRESENTATION FORM FOR VERTICAL RIGHT ANGLE BRACKET
+ {0xFE41, 0xFE41, prW}, // Ps PRESENTATION FORM FOR VERTICAL LEFT CORNER BRACKET
+ {0xFE42, 0xFE42, prW}, // Pe PRESENTATION FORM FOR VERTICAL RIGHT CORNER BRACKET
+ {0xFE43, 0xFE43, prW}, // Ps PRESENTATION FORM FOR VERTICAL LEFT WHITE CORNER BRACKET
+ {0xFE44, 0xFE44, prW}, // Pe PRESENTATION FORM FOR VERTICAL RIGHT WHITE CORNER BRACKET
+ {0xFE45, 0xFE46, prW}, // Po [2] SESAME DOT..WHITE SESAME DOT
+ {0xFE47, 0xFE47, prW}, // Ps PRESENTATION FORM FOR VERTICAL LEFT SQUARE BRACKET
+ {0xFE48, 0xFE48, prW}, // Pe PRESENTATION FORM FOR VERTICAL RIGHT SQUARE BRACKET
+ {0xFE49, 0xFE4C, prW}, // Po [4] DASHED OVERLINE..DOUBLE WAVY OVERLINE
+ {0xFE4D, 0xFE4F, prW}, // Pc [3] DASHED LOW LINE..WAVY LOW LINE
+ {0xFE50, 0xFE52, prW}, // Po [3] SMALL COMMA..SMALL FULL STOP
+ {0xFE54, 0xFE57, prW}, // Po [4] SMALL SEMICOLON..SMALL EXCLAMATION MARK
+ {0xFE58, 0xFE58, prW}, // Pd SMALL EM DASH
+ {0xFE59, 0xFE59, prW}, // Ps SMALL LEFT PARENTHESIS
+ {0xFE5A, 0xFE5A, prW}, // Pe SMALL RIGHT PARENTHESIS
+ {0xFE5B, 0xFE5B, prW}, // Ps SMALL LEFT CURLY BRACKET
+ {0xFE5C, 0xFE5C, prW}, // Pe SMALL RIGHT CURLY BRACKET
+ {0xFE5D, 0xFE5D, prW}, // Ps SMALL LEFT TORTOISE SHELL BRACKET
+ {0xFE5E, 0xFE5E, prW}, // Pe SMALL RIGHT TORTOISE SHELL BRACKET
+ {0xFE5F, 0xFE61, prW}, // Po [3] SMALL NUMBER SIGN..SMALL ASTERISK
+ {0xFE62, 0xFE62, prW}, // Sm SMALL PLUS SIGN
+ {0xFE63, 0xFE63, prW}, // Pd SMALL HYPHEN-MINUS
+ {0xFE64, 0xFE66, prW}, // Sm [3] SMALL LESS-THAN SIGN..SMALL EQUALS SIGN
+ {0xFE68, 0xFE68, prW}, // Po SMALL REVERSE SOLIDUS
+ {0xFE69, 0xFE69, prW}, // Sc SMALL DOLLAR SIGN
+ {0xFE6A, 0xFE6B, prW}, // Po [2] SMALL PERCENT SIGN..SMALL COMMERCIAL AT
+ {0xFE70, 0xFE74, prN}, // Lo [5] ARABIC FATHATAN ISOLATED FORM..ARABIC KASRATAN ISOLATED FORM
+ {0xFE76, 0xFEFC, prN}, // Lo [135] ARABIC FATHA ISOLATED FORM..ARABIC LIGATURE LAM WITH ALEF FINAL FORM
+ {0xFEFF, 0xFEFF, prN}, // Cf ZERO WIDTH NO-BREAK SPACE
+ {0xFF01, 0xFF03, prF}, // Po [3] FULLWIDTH EXCLAMATION MARK..FULLWIDTH NUMBER SIGN
+ {0xFF04, 0xFF04, prF}, // Sc FULLWIDTH DOLLAR SIGN
+ {0xFF05, 0xFF07, prF}, // Po [3] FULLWIDTH PERCENT SIGN..FULLWIDTH APOSTROPHE
+ {0xFF08, 0xFF08, prF}, // Ps FULLWIDTH LEFT PARENTHESIS
+ {0xFF09, 0xFF09, prF}, // Pe FULLWIDTH RIGHT PARENTHESIS
+ {0xFF0A, 0xFF0A, prF}, // Po FULLWIDTH ASTERISK
+ {0xFF0B, 0xFF0B, prF}, // Sm FULLWIDTH PLUS SIGN
+ {0xFF0C, 0xFF0C, prF}, // Po FULLWIDTH COMMA
+ {0xFF0D, 0xFF0D, prF}, // Pd FULLWIDTH HYPHEN-MINUS
+ {0xFF0E, 0xFF0F, prF}, // Po [2] FULLWIDTH FULL STOP..FULLWIDTH SOLIDUS
+ {0xFF10, 0xFF19, prF}, // Nd [10] FULLWIDTH DIGIT ZERO..FULLWIDTH DIGIT NINE
+ {0xFF1A, 0xFF1B, prF}, // Po [2] FULLWIDTH COLON..FULLWIDTH SEMICOLON
+ {0xFF1C, 0xFF1E, prF}, // Sm [3] FULLWIDTH LESS-THAN SIGN..FULLWIDTH GREATER-THAN SIGN
+ {0xFF1F, 0xFF20, prF}, // Po [2] FULLWIDTH QUESTION MARK..FULLWIDTH COMMERCIAL AT
+ {0xFF21, 0xFF3A, prF}, // Lu [26] FULLWIDTH LATIN CAPITAL LETTER A..FULLWIDTH LATIN CAPITAL LETTER Z
+ {0xFF3B, 0xFF3B, prF}, // Ps FULLWIDTH LEFT SQUARE BRACKET
+ {0xFF3C, 0xFF3C, prF}, // Po FULLWIDTH REVERSE SOLIDUS
+ {0xFF3D, 0xFF3D, prF}, // Pe FULLWIDTH RIGHT SQUARE BRACKET
+ {0xFF3E, 0xFF3E, prF}, // Sk FULLWIDTH CIRCUMFLEX ACCENT
+ {0xFF3F, 0xFF3F, prF}, // Pc FULLWIDTH LOW LINE
+ {0xFF40, 0xFF40, prF}, // Sk FULLWIDTH GRAVE ACCENT
+ {0xFF41, 0xFF5A, prF}, // Ll [26] FULLWIDTH LATIN SMALL LETTER A..FULLWIDTH LATIN SMALL LETTER Z
+ {0xFF5B, 0xFF5B, prF}, // Ps FULLWIDTH LEFT CURLY BRACKET
+ {0xFF5C, 0xFF5C, prF}, // Sm FULLWIDTH VERTICAL LINE
+ {0xFF5D, 0xFF5D, prF}, // Pe FULLWIDTH RIGHT CURLY BRACKET
+ {0xFF5E, 0xFF5E, prF}, // Sm FULLWIDTH TILDE
+ {0xFF5F, 0xFF5F, prF}, // Ps FULLWIDTH LEFT WHITE PARENTHESIS
+ {0xFF60, 0xFF60, prF}, // Pe FULLWIDTH RIGHT WHITE PARENTHESIS
+ {0xFF61, 0xFF61, prH}, // Po HALFWIDTH IDEOGRAPHIC FULL STOP
+ {0xFF62, 0xFF62, prH}, // Ps HALFWIDTH LEFT CORNER BRACKET
+ {0xFF63, 0xFF63, prH}, // Pe HALFWIDTH RIGHT CORNER BRACKET
+ {0xFF64, 0xFF65, prH}, // Po [2] HALFWIDTH IDEOGRAPHIC COMMA..HALFWIDTH KATAKANA MIDDLE DOT
+ {0xFF66, 0xFF6F, prH}, // Lo [10] HALFWIDTH KATAKANA LETTER WO..HALFWIDTH KATAKANA LETTER SMALL TU
+ {0xFF70, 0xFF70, prH}, // Lm HALFWIDTH KATAKANA-HIRAGANA PROLONGED SOUND MARK
+ {0xFF71, 0xFF9D, prH}, // Lo [45] HALFWIDTH KATAKANA LETTER A..HALFWIDTH KATAKANA LETTER N
+ {0xFF9E, 0xFF9F, prH}, // Lm [2] HALFWIDTH KATAKANA VOICED SOUND MARK..HALFWIDTH KATAKANA SEMI-VOICED SOUND MARK
+ {0xFFA0, 0xFFBE, prH}, // Lo [31] HALFWIDTH HANGUL FILLER..HALFWIDTH HANGUL LETTER HIEUH
+ {0xFFC2, 0xFFC7, prH}, // Lo [6] HALFWIDTH HANGUL LETTER A..HALFWIDTH HANGUL LETTER E
+ {0xFFCA, 0xFFCF, prH}, // Lo [6] HALFWIDTH HANGUL LETTER YEO..HALFWIDTH HANGUL LETTER OE
+ {0xFFD2, 0xFFD7, prH}, // Lo [6] HALFWIDTH HANGUL LETTER YO..HALFWIDTH HANGUL LETTER YU
+ {0xFFDA, 0xFFDC, prH}, // Lo [3] HALFWIDTH HANGUL LETTER EU..HALFWIDTH HANGUL LETTER I
+ {0xFFE0, 0xFFE1, prF}, // Sc [2] FULLWIDTH CENT SIGN..FULLWIDTH POUND SIGN
+ {0xFFE2, 0xFFE2, prF}, // Sm FULLWIDTH NOT SIGN
+ {0xFFE3, 0xFFE3, prF}, // Sk FULLWIDTH MACRON
+ {0xFFE4, 0xFFE4, prF}, // So FULLWIDTH BROKEN BAR
+ {0xFFE5, 0xFFE6, prF}, // Sc [2] FULLWIDTH YEN SIGN..FULLWIDTH WON SIGN
+ {0xFFE8, 0xFFE8, prH}, // So HALFWIDTH FORMS LIGHT VERTICAL
+ {0xFFE9, 0xFFEC, prH}, // Sm [4] HALFWIDTH LEFTWARDS ARROW..HALFWIDTH DOWNWARDS ARROW
+ {0xFFED, 0xFFEE, prH}, // So [2] HALFWIDTH BLACK SQUARE..HALFWIDTH WHITE CIRCLE
+ {0xFFF9, 0xFFFB, prN}, // Cf [3] INTERLINEAR ANNOTATION ANCHOR..INTERLINEAR ANNOTATION TERMINATOR
+ {0xFFFC, 0xFFFC, prN}, // So OBJECT REPLACEMENT CHARACTER
+ {0xFFFD, 0xFFFD, prA}, // So REPLACEMENT CHARACTER
+ {0x10000, 0x1000B, prN}, // Lo [12] LINEAR B SYLLABLE B008 A..LINEAR B SYLLABLE B046 JE
+ {0x1000D, 0x10026, prN}, // Lo [26] LINEAR B SYLLABLE B036 JO..LINEAR B SYLLABLE B032 QO
+ {0x10028, 0x1003A, prN}, // Lo [19] LINEAR B SYLLABLE B060 RA..LINEAR B SYLLABLE B042 WO
+ {0x1003C, 0x1003D, prN}, // Lo [2] LINEAR B SYLLABLE B017 ZA..LINEAR B SYLLABLE B074 ZE
+ {0x1003F, 0x1004D, prN}, // Lo [15] LINEAR B SYLLABLE B020 ZO..LINEAR B SYLLABLE B091 TWO
+ {0x10050, 0x1005D, prN}, // Lo [14] LINEAR B SYMBOL B018..LINEAR B SYMBOL B089
+ {0x10080, 0x100FA, prN}, // Lo [123] LINEAR B IDEOGRAM B100 MAN..LINEAR B IDEOGRAM VESSEL B305
+ {0x10100, 0x10102, prN}, // Po [3] AEGEAN WORD SEPARATOR LINE..AEGEAN CHECK MARK
+ {0x10107, 0x10133, prN}, // No [45] AEGEAN NUMBER ONE..AEGEAN NUMBER NINETY THOUSAND
+ {0x10137, 0x1013F, prN}, // So [9] AEGEAN WEIGHT BASE UNIT..AEGEAN MEASURE THIRD SUBUNIT
+ {0x10140, 0x10174, prN}, // Nl [53] GREEK ACROPHONIC ATTIC ONE QUARTER..GREEK ACROPHONIC STRATIAN FIFTY MNAS
+ {0x10175, 0x10178, prN}, // No [4] GREEK ONE HALF SIGN..GREEK THREE QUARTERS SIGN
+ {0x10179, 0x10189, prN}, // So [17] GREEK YEAR SIGN..GREEK TRYBLION BASE SIGN
+ {0x1018A, 0x1018B, prN}, // No [2] GREEK ZERO SIGN..GREEK ONE QUARTER SIGN
+ {0x1018C, 0x1018E, prN}, // So [3] GREEK SINUSOID SIGN..NOMISMA SIGN
+ {0x10190, 0x1019C, prN}, // So [13] ROMAN SEXTANS SIGN..ASCIA SYMBOL
+ {0x101A0, 0x101A0, prN}, // So GREEK SYMBOL TAU RHO
+ {0x101D0, 0x101FC, prN}, // So [45] PHAISTOS DISC SIGN PEDESTRIAN..PHAISTOS DISC SIGN WAVY BAND
+ {0x101FD, 0x101FD, prN}, // Mn PHAISTOS DISC SIGN COMBINING OBLIQUE STROKE
+ {0x10280, 0x1029C, prN}, // Lo [29] LYCIAN LETTER A..LYCIAN LETTER X
+ {0x102A0, 0x102D0, prN}, // Lo [49] CARIAN LETTER A..CARIAN LETTER UUU3
+ {0x102E0, 0x102E0, prN}, // Mn COPTIC EPACT THOUSANDS MARK
+ {0x102E1, 0x102FB, prN}, // No [27] COPTIC EPACT DIGIT ONE..COPTIC EPACT NUMBER NINE HUNDRED
+ {0x10300, 0x1031F, prN}, // Lo [32] OLD ITALIC LETTER A..OLD ITALIC LETTER ESS
+ {0x10320, 0x10323, prN}, // No [4] OLD ITALIC NUMERAL ONE..OLD ITALIC NUMERAL FIFTY
+ {0x1032D, 0x1032F, prN}, // Lo [3] OLD ITALIC LETTER YE..OLD ITALIC LETTER SOUTHERN TSE
+ {0x10330, 0x10340, prN}, // Lo [17] GOTHIC LETTER AHSA..GOTHIC LETTER PAIRTHRA
+ {0x10341, 0x10341, prN}, // Nl GOTHIC LETTER NINETY
+ {0x10342, 0x10349, prN}, // Lo [8] GOTHIC LETTER RAIDA..GOTHIC LETTER OTHAL
+ {0x1034A, 0x1034A, prN}, // Nl GOTHIC LETTER NINE HUNDRED
+ {0x10350, 0x10375, prN}, // Lo [38] OLD PERMIC LETTER AN..OLD PERMIC LETTER IA
+ {0x10376, 0x1037A, prN}, // Mn [5] COMBINING OLD PERMIC LETTER AN..COMBINING OLD PERMIC LETTER SII
+ {0x10380, 0x1039D, prN}, // Lo [30] UGARITIC LETTER ALPA..UGARITIC LETTER SSU
+ {0x1039F, 0x1039F, prN}, // Po UGARITIC WORD DIVIDER
+ {0x103A0, 0x103C3, prN}, // Lo [36] OLD PERSIAN SIGN A..OLD PERSIAN SIGN HA
+ {0x103C8, 0x103CF, prN}, // Lo [8] OLD PERSIAN SIGN AURAMAZDAA..OLD PERSIAN SIGN BUUMISH
+ {0x103D0, 0x103D0, prN}, // Po OLD PERSIAN WORD DIVIDER
+ {0x103D1, 0x103D5, prN}, // Nl [5] OLD PERSIAN NUMBER ONE..OLD PERSIAN NUMBER HUNDRED
+ {0x10400, 0x1044F, prN}, // L& [80] DESERET CAPITAL LETTER LONG I..DESERET SMALL LETTER EW
+ {0x10450, 0x1047F, prN}, // Lo [48] SHAVIAN LETTER PEEP..SHAVIAN LETTER YEW
+ {0x10480, 0x1049D, prN}, // Lo [30] OSMANYA LETTER ALEF..OSMANYA LETTER OO
+ {0x104A0, 0x104A9, prN}, // Nd [10] OSMANYA DIGIT ZERO..OSMANYA DIGIT NINE
+ {0x104B0, 0x104D3, prN}, // Lu [36] OSAGE CAPITAL LETTER A..OSAGE CAPITAL LETTER ZHA
+ {0x104D8, 0x104FB, prN}, // Ll [36] OSAGE SMALL LETTER A..OSAGE SMALL LETTER ZHA
+ {0x10500, 0x10527, prN}, // Lo [40] ELBASAN LETTER A..ELBASAN LETTER KHE
+ {0x10530, 0x10563, prN}, // Lo [52] CAUCASIAN ALBANIAN LETTER ALT..CAUCASIAN ALBANIAN LETTER KIW
+ {0x1056F, 0x1056F, prN}, // Po CAUCASIAN ALBANIAN CITATION MARK
+ {0x10570, 0x1057A, prN}, // Lu [11] VITHKUQI CAPITAL LETTER A..VITHKUQI CAPITAL LETTER GA
+ {0x1057C, 0x1058A, prN}, // Lu [15] VITHKUQI CAPITAL LETTER HA..VITHKUQI CAPITAL LETTER RE
+ {0x1058C, 0x10592, prN}, // Lu [7] VITHKUQI CAPITAL LETTER SE..VITHKUQI CAPITAL LETTER XE
+ {0x10594, 0x10595, prN}, // Lu [2] VITHKUQI CAPITAL LETTER Y..VITHKUQI CAPITAL LETTER ZE
+ {0x10597, 0x105A1, prN}, // Ll [11] VITHKUQI SMALL LETTER A..VITHKUQI SMALL LETTER GA
+ {0x105A3, 0x105B1, prN}, // Ll [15] VITHKUQI SMALL LETTER HA..VITHKUQI SMALL LETTER RE
+ {0x105B3, 0x105B9, prN}, // Ll [7] VITHKUQI SMALL LETTER SE..VITHKUQI SMALL LETTER XE
+ {0x105BB, 0x105BC, prN}, // Ll [2] VITHKUQI SMALL LETTER Y..VITHKUQI SMALL LETTER ZE
+ {0x10600, 0x10736, prN}, // Lo [311] LINEAR A SIGN AB001..LINEAR A SIGN A664
+ {0x10740, 0x10755, prN}, // Lo [22] LINEAR A SIGN A701 A..LINEAR A SIGN A732 JE
+ {0x10760, 0x10767, prN}, // Lo [8] LINEAR A SIGN A800..LINEAR A SIGN A807
+ {0x10780, 0x10785, prN}, // Lm [6] MODIFIER LETTER SMALL CAPITAL AA..MODIFIER LETTER SMALL B WITH HOOK
+ {0x10787, 0x107B0, prN}, // Lm [42] MODIFIER LETTER SMALL DZ DIGRAPH..MODIFIER LETTER SMALL V WITH RIGHT HOOK
+ {0x107B2, 0x107BA, prN}, // Lm [9] MODIFIER LETTER SMALL CAPITAL Y..MODIFIER LETTER SMALL S WITH CURL
+ {0x10800, 0x10805, prN}, // Lo [6] CYPRIOT SYLLABLE A..CYPRIOT SYLLABLE JA
+ {0x10808, 0x10808, prN}, // Lo CYPRIOT SYLLABLE JO
+ {0x1080A, 0x10835, prN}, // Lo [44] CYPRIOT SYLLABLE KA..CYPRIOT SYLLABLE WO
+ {0x10837, 0x10838, prN}, // Lo [2] CYPRIOT SYLLABLE XA..CYPRIOT SYLLABLE XE
+ {0x1083C, 0x1083C, prN}, // Lo CYPRIOT SYLLABLE ZA
+ {0x1083F, 0x1083F, prN}, // Lo CYPRIOT SYLLABLE ZO
+ {0x10840, 0x10855, prN}, // Lo [22] IMPERIAL ARAMAIC LETTER ALEPH..IMPERIAL ARAMAIC LETTER TAW
+ {0x10857, 0x10857, prN}, // Po IMPERIAL ARAMAIC SECTION SIGN
+ {0x10858, 0x1085F, prN}, // No [8] IMPERIAL ARAMAIC NUMBER ONE..IMPERIAL ARAMAIC NUMBER TEN THOUSAND
+ {0x10860, 0x10876, prN}, // Lo [23] PALMYRENE LETTER ALEPH..PALMYRENE LETTER TAW
+ {0x10877, 0x10878, prN}, // So [2] PALMYRENE LEFT-POINTING FLEURON..PALMYRENE RIGHT-POINTING FLEURON
+ {0x10879, 0x1087F, prN}, // No [7] PALMYRENE NUMBER ONE..PALMYRENE NUMBER TWENTY
+ {0x10880, 0x1089E, prN}, // Lo [31] NABATAEAN LETTER FINAL ALEPH..NABATAEAN LETTER TAW
+ {0x108A7, 0x108AF, prN}, // No [9] NABATAEAN NUMBER ONE..NABATAEAN NUMBER ONE HUNDRED
+ {0x108E0, 0x108F2, prN}, // Lo [19] HATRAN LETTER ALEPH..HATRAN LETTER QOPH
+ {0x108F4, 0x108F5, prN}, // Lo [2] HATRAN LETTER SHIN..HATRAN LETTER TAW
+ {0x108FB, 0x108FF, prN}, // No [5] HATRAN NUMBER ONE..HATRAN NUMBER ONE HUNDRED
+ {0x10900, 0x10915, prN}, // Lo [22] PHOENICIAN LETTER ALF..PHOENICIAN LETTER TAU
+ {0x10916, 0x1091B, prN}, // No [6] PHOENICIAN NUMBER ONE..PHOENICIAN NUMBER THREE
+ {0x1091F, 0x1091F, prN}, // Po PHOENICIAN WORD SEPARATOR
+ {0x10920, 0x10939, prN}, // Lo [26] LYDIAN LETTER A..LYDIAN LETTER C
+ {0x1093F, 0x1093F, prN}, // Po LYDIAN TRIANGULAR MARK
+ {0x10980, 0x1099F, prN}, // Lo [32] MEROITIC HIEROGLYPHIC LETTER A..MEROITIC HIEROGLYPHIC SYMBOL VIDJ-2
+ {0x109A0, 0x109B7, prN}, // Lo [24] MEROITIC CURSIVE LETTER A..MEROITIC CURSIVE LETTER DA
+ {0x109BC, 0x109BD, prN}, // No [2] MEROITIC CURSIVE FRACTION ELEVEN TWELFTHS..MEROITIC CURSIVE FRACTION ONE HALF
+ {0x109BE, 0x109BF, prN}, // Lo [2] MEROITIC CURSIVE LOGOGRAM RMT..MEROITIC CURSIVE LOGOGRAM IMN
+ {0x109C0, 0x109CF, prN}, // No [16] MEROITIC CURSIVE NUMBER ONE..MEROITIC CURSIVE NUMBER SEVENTY
+ {0x109D2, 0x109FF, prN}, // No [46] MEROITIC CURSIVE NUMBER ONE HUNDRED..MEROITIC CURSIVE FRACTION TEN TWELFTHS
+ {0x10A00, 0x10A00, prN}, // Lo KHAROSHTHI LETTER A
+ {0x10A01, 0x10A03, prN}, // Mn [3] KHAROSHTHI VOWEL SIGN I..KHAROSHTHI VOWEL SIGN VOCALIC R
+ {0x10A05, 0x10A06, prN}, // Mn [2] KHAROSHTHI VOWEL SIGN E..KHAROSHTHI VOWEL SIGN O
+ {0x10A0C, 0x10A0F, prN}, // Mn [4] KHAROSHTHI VOWEL LENGTH MARK..KHAROSHTHI SIGN VISARGA
+ {0x10A10, 0x10A13, prN}, // Lo [4] KHAROSHTHI LETTER KA..KHAROSHTHI LETTER GHA
+ {0x10A15, 0x10A17, prN}, // Lo [3] KHAROSHTHI LETTER CA..KHAROSHTHI LETTER JA
+ {0x10A19, 0x10A35, prN}, // Lo [29] KHAROSHTHI LETTER NYA..KHAROSHTHI LETTER VHA
+ {0x10A38, 0x10A3A, prN}, // Mn [3] KHAROSHTHI SIGN BAR ABOVE..KHAROSHTHI SIGN DOT BELOW
+ {0x10A3F, 0x10A3F, prN}, // Mn KHAROSHTHI VIRAMA
+ {0x10A40, 0x10A48, prN}, // No [9] KHAROSHTHI DIGIT ONE..KHAROSHTHI FRACTION ONE HALF
+ {0x10A50, 0x10A58, prN}, // Po [9] KHAROSHTHI PUNCTUATION DOT..KHAROSHTHI PUNCTUATION LINES
+ {0x10A60, 0x10A7C, prN}, // Lo [29] OLD SOUTH ARABIAN LETTER HE..OLD SOUTH ARABIAN LETTER THETH
+ {0x10A7D, 0x10A7E, prN}, // No [2] OLD SOUTH ARABIAN NUMBER ONE..OLD SOUTH ARABIAN NUMBER FIFTY
+ {0x10A7F, 0x10A7F, prN}, // Po OLD SOUTH ARABIAN NUMERIC INDICATOR
+ {0x10A80, 0x10A9C, prN}, // Lo [29] OLD NORTH ARABIAN LETTER HEH..OLD NORTH ARABIAN LETTER ZAH
+ {0x10A9D, 0x10A9F, prN}, // No [3] OLD NORTH ARABIAN NUMBER ONE..OLD NORTH ARABIAN NUMBER TWENTY
+ {0x10AC0, 0x10AC7, prN}, // Lo [8] MANICHAEAN LETTER ALEPH..MANICHAEAN LETTER WAW
+ {0x10AC8, 0x10AC8, prN}, // So MANICHAEAN SIGN UD
+ {0x10AC9, 0x10AE4, prN}, // Lo [28] MANICHAEAN LETTER ZAYIN..MANICHAEAN LETTER TAW
+ {0x10AE5, 0x10AE6, prN}, // Mn [2] MANICHAEAN ABBREVIATION MARK ABOVE..MANICHAEAN ABBREVIATION MARK BELOW
+ {0x10AEB, 0x10AEF, prN}, // No [5] MANICHAEAN NUMBER ONE..MANICHAEAN NUMBER ONE HUNDRED
+ {0x10AF0, 0x10AF6, prN}, // Po [7] MANICHAEAN PUNCTUATION STAR..MANICHAEAN PUNCTUATION LINE FILLER
+ {0x10B00, 0x10B35, prN}, // Lo [54] AVESTAN LETTER A..AVESTAN LETTER HE
+ {0x10B39, 0x10B3F, prN}, // Po [7] AVESTAN ABBREVIATION MARK..LARGE ONE RING OVER TWO RINGS PUNCTUATION
+ {0x10B40, 0x10B55, prN}, // Lo [22] INSCRIPTIONAL PARTHIAN LETTER ALEPH..INSCRIPTIONAL PARTHIAN LETTER TAW
+ {0x10B58, 0x10B5F, prN}, // No [8] INSCRIPTIONAL PARTHIAN NUMBER ONE..INSCRIPTIONAL PARTHIAN NUMBER ONE THOUSAND
+ {0x10B60, 0x10B72, prN}, // Lo [19] INSCRIPTIONAL PAHLAVI LETTER ALEPH..INSCRIPTIONAL PAHLAVI LETTER TAW
+ {0x10B78, 0x10B7F, prN}, // No [8] INSCRIPTIONAL PAHLAVI NUMBER ONE..INSCRIPTIONAL PAHLAVI NUMBER ONE THOUSAND
+ {0x10B80, 0x10B91, prN}, // Lo [18] PSALTER PAHLAVI LETTER ALEPH..PSALTER PAHLAVI LETTER TAW
+ {0x10B99, 0x10B9C, prN}, // Po [4] PSALTER PAHLAVI SECTION MARK..PSALTER PAHLAVI FOUR DOTS WITH DOT
+ {0x10BA9, 0x10BAF, prN}, // No [7] PSALTER PAHLAVI NUMBER ONE..PSALTER PAHLAVI NUMBER ONE HUNDRED
+ {0x10C00, 0x10C48, prN}, // Lo [73] OLD TURKIC LETTER ORKHON A..OLD TURKIC LETTER ORKHON BASH
+ {0x10C80, 0x10CB2, prN}, // Lu [51] OLD HUNGARIAN CAPITAL LETTER A..OLD HUNGARIAN CAPITAL LETTER US
+ {0x10CC0, 0x10CF2, prN}, // Ll [51] OLD HUNGARIAN SMALL LETTER A..OLD HUNGARIAN SMALL LETTER US
+ {0x10CFA, 0x10CFF, prN}, // No [6] OLD HUNGARIAN NUMBER ONE..OLD HUNGARIAN NUMBER ONE THOUSAND
+ {0x10D00, 0x10D23, prN}, // Lo [36] HANIFI ROHINGYA LETTER A..HANIFI ROHINGYA MARK NA KHONNA
+ {0x10D24, 0x10D27, prN}, // Mn [4] HANIFI ROHINGYA SIGN HARBAHAY..HANIFI ROHINGYA SIGN TASSI
+ {0x10D30, 0x10D39, prN}, // Nd [10] HANIFI ROHINGYA DIGIT ZERO..HANIFI ROHINGYA DIGIT NINE
+ {0x10E60, 0x10E7E, prN}, // No [31] RUMI DIGIT ONE..RUMI FRACTION TWO THIRDS
+ {0x10E80, 0x10EA9, prN}, // Lo [42] YEZIDI LETTER ELIF..YEZIDI LETTER ET
+ {0x10EAB, 0x10EAC, prN}, // Mn [2] YEZIDI COMBINING HAMZA MARK..YEZIDI COMBINING MADDA MARK
+ {0x10EAD, 0x10EAD, prN}, // Pd YEZIDI HYPHENATION MARK
+ {0x10EB0, 0x10EB1, prN}, // Lo [2] YEZIDI LETTER LAM WITH DOT ABOVE..YEZIDI LETTER YOT WITH CIRCUMFLEX ABOVE
+ {0x10EFD, 0x10EFF, prN}, // Mn [3] ARABIC SMALL LOW WORD SAKTA..ARABIC SMALL LOW WORD MADDA
+ {0x10F00, 0x10F1C, prN}, // Lo [29] OLD SOGDIAN LETTER ALEPH..OLD SOGDIAN LETTER FINAL TAW WITH VERTICAL TAIL
+ {0x10F1D, 0x10F26, prN}, // No [10] OLD SOGDIAN NUMBER ONE..OLD SOGDIAN FRACTION ONE HALF
+ {0x10F27, 0x10F27, prN}, // Lo OLD SOGDIAN LIGATURE AYIN-DALETH
+ {0x10F30, 0x10F45, prN}, // Lo [22] SOGDIAN LETTER ALEPH..SOGDIAN INDEPENDENT SHIN
+ {0x10F46, 0x10F50, prN}, // Mn [11] SOGDIAN COMBINING DOT BELOW..SOGDIAN COMBINING STROKE BELOW
+ {0x10F51, 0x10F54, prN}, // No [4] SOGDIAN NUMBER ONE..SOGDIAN NUMBER ONE HUNDRED
+ {0x10F55, 0x10F59, prN}, // Po [5] SOGDIAN PUNCTUATION TWO VERTICAL BARS..SOGDIAN PUNCTUATION HALF CIRCLE WITH DOT
+ {0x10F70, 0x10F81, prN}, // Lo [18] OLD UYGHUR LETTER ALEPH..OLD UYGHUR LETTER LESH
+ {0x10F82, 0x10F85, prN}, // Mn [4] OLD UYGHUR COMBINING DOT ABOVE..OLD UYGHUR COMBINING TWO DOTS BELOW
+ {0x10F86, 0x10F89, prN}, // Po [4] OLD UYGHUR PUNCTUATION BAR..OLD UYGHUR PUNCTUATION FOUR DOTS
+ {0x10FB0, 0x10FC4, prN}, // Lo [21] CHORASMIAN LETTER ALEPH..CHORASMIAN LETTER TAW
+ {0x10FC5, 0x10FCB, prN}, // No [7] CHORASMIAN NUMBER ONE..CHORASMIAN NUMBER ONE HUNDRED
+ {0x10FE0, 0x10FF6, prN}, // Lo [23] ELYMAIC LETTER ALEPH..ELYMAIC LIGATURE ZAYIN-YODH
+ {0x11000, 0x11000, prN}, // Mc BRAHMI SIGN CANDRABINDU
+ {0x11001, 0x11001, prN}, // Mn BRAHMI SIGN ANUSVARA
+ {0x11002, 0x11002, prN}, // Mc BRAHMI SIGN VISARGA
+ {0x11003, 0x11037, prN}, // Lo [53] BRAHMI SIGN JIHVAMULIYA..BRAHMI LETTER OLD TAMIL NNNA
+ {0x11038, 0x11046, prN}, // Mn [15] BRAHMI VOWEL SIGN AA..BRAHMI VIRAMA
+ {0x11047, 0x1104D, prN}, // Po [7] BRAHMI DANDA..BRAHMI PUNCTUATION LOTUS
+ {0x11052, 0x11065, prN}, // No [20] BRAHMI NUMBER ONE..BRAHMI NUMBER ONE THOUSAND
+ {0x11066, 0x1106F, prN}, // Nd [10] BRAHMI DIGIT ZERO..BRAHMI DIGIT NINE
+ {0x11070, 0x11070, prN}, // Mn BRAHMI SIGN OLD TAMIL VIRAMA
+ {0x11071, 0x11072, prN}, // Lo [2] BRAHMI LETTER OLD TAMIL SHORT E..BRAHMI LETTER OLD TAMIL SHORT O
+ {0x11073, 0x11074, prN}, // Mn [2] BRAHMI VOWEL SIGN OLD TAMIL SHORT E..BRAHMI VOWEL SIGN OLD TAMIL SHORT O
+ {0x11075, 0x11075, prN}, // Lo BRAHMI LETTER OLD TAMIL LLA
+ {0x1107F, 0x1107F, prN}, // Mn BRAHMI NUMBER JOINER
+ {0x11080, 0x11081, prN}, // Mn [2] KAITHI SIGN CANDRABINDU..KAITHI SIGN ANUSVARA
+ {0x11082, 0x11082, prN}, // Mc KAITHI SIGN VISARGA
+ {0x11083, 0x110AF, prN}, // Lo [45] KAITHI LETTER A..KAITHI LETTER HA
+ {0x110B0, 0x110B2, prN}, // Mc [3] KAITHI VOWEL SIGN AA..KAITHI VOWEL SIGN II
+ {0x110B3, 0x110B6, prN}, // Mn [4] KAITHI VOWEL SIGN U..KAITHI VOWEL SIGN AI
+ {0x110B7, 0x110B8, prN}, // Mc [2] KAITHI VOWEL SIGN O..KAITHI VOWEL SIGN AU
+ {0x110B9, 0x110BA, prN}, // Mn [2] KAITHI SIGN VIRAMA..KAITHI SIGN NUKTA
+ {0x110BB, 0x110BC, prN}, // Po [2] KAITHI ABBREVIATION SIGN..KAITHI ENUMERATION SIGN
+ {0x110BD, 0x110BD, prN}, // Cf KAITHI NUMBER SIGN
+ {0x110BE, 0x110C1, prN}, // Po [4] KAITHI SECTION MARK..KAITHI DOUBLE DANDA
+ {0x110C2, 0x110C2, prN}, // Mn KAITHI VOWEL SIGN VOCALIC R
+ {0x110CD, 0x110CD, prN}, // Cf KAITHI NUMBER SIGN ABOVE
+ {0x110D0, 0x110E8, prN}, // Lo [25] SORA SOMPENG LETTER SAH..SORA SOMPENG LETTER MAE
+ {0x110F0, 0x110F9, prN}, // Nd [10] SORA SOMPENG DIGIT ZERO..SORA SOMPENG DIGIT NINE
+ {0x11100, 0x11102, prN}, // Mn [3] CHAKMA SIGN CANDRABINDU..CHAKMA SIGN VISARGA
+ {0x11103, 0x11126, prN}, // Lo [36] CHAKMA LETTER AA..CHAKMA LETTER HAA
+ {0x11127, 0x1112B, prN}, // Mn [5] CHAKMA VOWEL SIGN A..CHAKMA VOWEL SIGN UU
+ {0x1112C, 0x1112C, prN}, // Mc CHAKMA VOWEL SIGN E
+ {0x1112D, 0x11134, prN}, // Mn [8] CHAKMA VOWEL SIGN AI..CHAKMA MAAYYAA
+ {0x11136, 0x1113F, prN}, // Nd [10] CHAKMA DIGIT ZERO..CHAKMA DIGIT NINE
+ {0x11140, 0x11143, prN}, // Po [4] CHAKMA SECTION MARK..CHAKMA QUESTION MARK
+ {0x11144, 0x11144, prN}, // Lo CHAKMA LETTER LHAA
+ {0x11145, 0x11146, prN}, // Mc [2] CHAKMA VOWEL SIGN AA..CHAKMA VOWEL SIGN EI
+ {0x11147, 0x11147, prN}, // Lo CHAKMA LETTER VAA
+ {0x11150, 0x11172, prN}, // Lo [35] MAHAJANI LETTER A..MAHAJANI LETTER RRA
+ {0x11173, 0x11173, prN}, // Mn MAHAJANI SIGN NUKTA
+ {0x11174, 0x11175, prN}, // Po [2] MAHAJANI ABBREVIATION SIGN..MAHAJANI SECTION MARK
+ {0x11176, 0x11176, prN}, // Lo MAHAJANI LIGATURE SHRI
+ {0x11180, 0x11181, prN}, // Mn [2] SHARADA SIGN CANDRABINDU..SHARADA SIGN ANUSVARA
+ {0x11182, 0x11182, prN}, // Mc SHARADA SIGN VISARGA
+ {0x11183, 0x111B2, prN}, // Lo [48] SHARADA LETTER A..SHARADA LETTER HA
+ {0x111B3, 0x111B5, prN}, // Mc [3] SHARADA VOWEL SIGN AA..SHARADA VOWEL SIGN II
+ {0x111B6, 0x111BE, prN}, // Mn [9] SHARADA VOWEL SIGN U..SHARADA VOWEL SIGN O
+ {0x111BF, 0x111C0, prN}, // Mc [2] SHARADA VOWEL SIGN AU..SHARADA SIGN VIRAMA
+ {0x111C1, 0x111C4, prN}, // Lo [4] SHARADA SIGN AVAGRAHA..SHARADA OM
+ {0x111C5, 0x111C8, prN}, // Po [4] SHARADA DANDA..SHARADA SEPARATOR
+ {0x111C9, 0x111CC, prN}, // Mn [4] SHARADA SANDHI MARK..SHARADA EXTRA SHORT VOWEL MARK
+ {0x111CD, 0x111CD, prN}, // Po SHARADA SUTRA MARK
+ {0x111CE, 0x111CE, prN}, // Mc SHARADA VOWEL SIGN PRISHTHAMATRA E
+ {0x111CF, 0x111CF, prN}, // Mn SHARADA SIGN INVERTED CANDRABINDU
+ {0x111D0, 0x111D9, prN}, // Nd [10] SHARADA DIGIT ZERO..SHARADA DIGIT NINE
+ {0x111DA, 0x111DA, prN}, // Lo SHARADA EKAM
+ {0x111DB, 0x111DB, prN}, // Po SHARADA SIGN SIDDHAM
+ {0x111DC, 0x111DC, prN}, // Lo SHARADA HEADSTROKE
+ {0x111DD, 0x111DF, prN}, // Po [3] SHARADA CONTINUATION SIGN..SHARADA SECTION MARK-2
+ {0x111E1, 0x111F4, prN}, // No [20] SINHALA ARCHAIC DIGIT ONE..SINHALA ARCHAIC NUMBER ONE THOUSAND
+ {0x11200, 0x11211, prN}, // Lo [18] KHOJKI LETTER A..KHOJKI LETTER JJA
+ {0x11213, 0x1122B, prN}, // Lo [25] KHOJKI LETTER NYA..KHOJKI LETTER LLA
+ {0x1122C, 0x1122E, prN}, // Mc [3] KHOJKI VOWEL SIGN AA..KHOJKI VOWEL SIGN II
+ {0x1122F, 0x11231, prN}, // Mn [3] KHOJKI VOWEL SIGN U..KHOJKI VOWEL SIGN AI
+ {0x11232, 0x11233, prN}, // Mc [2] KHOJKI VOWEL SIGN O..KHOJKI VOWEL SIGN AU
+ {0x11234, 0x11234, prN}, // Mn KHOJKI SIGN ANUSVARA
+ {0x11235, 0x11235, prN}, // Mc KHOJKI SIGN VIRAMA
+ {0x11236, 0x11237, prN}, // Mn [2] KHOJKI SIGN NUKTA..KHOJKI SIGN SHADDA
+ {0x11238, 0x1123D, prN}, // Po [6] KHOJKI DANDA..KHOJKI ABBREVIATION SIGN
+ {0x1123E, 0x1123E, prN}, // Mn KHOJKI SIGN SUKUN
+ {0x1123F, 0x11240, prN}, // Lo [2] KHOJKI LETTER QA..KHOJKI LETTER SHORT I
+ {0x11241, 0x11241, prN}, // Mn KHOJKI VOWEL SIGN VOCALIC R
+ {0x11280, 0x11286, prN}, // Lo [7] MULTANI LETTER A..MULTANI LETTER GA
+ {0x11288, 0x11288, prN}, // Lo MULTANI LETTER GHA
+ {0x1128A, 0x1128D, prN}, // Lo [4] MULTANI LETTER CA..MULTANI LETTER JJA
+ {0x1128F, 0x1129D, prN}, // Lo [15] MULTANI LETTER NYA..MULTANI LETTER BA
+ {0x1129F, 0x112A8, prN}, // Lo [10] MULTANI LETTER BHA..MULTANI LETTER RHA
+ {0x112A9, 0x112A9, prN}, // Po MULTANI SECTION MARK
+ {0x112B0, 0x112DE, prN}, // Lo [47] KHUDAWADI LETTER A..KHUDAWADI LETTER HA
+ {0x112DF, 0x112DF, prN}, // Mn KHUDAWADI SIGN ANUSVARA
+ {0x112E0, 0x112E2, prN}, // Mc [3] KHUDAWADI VOWEL SIGN AA..KHUDAWADI VOWEL SIGN II
+ {0x112E3, 0x112EA, prN}, // Mn [8] KHUDAWADI VOWEL SIGN U..KHUDAWADI SIGN VIRAMA
+ {0x112F0, 0x112F9, prN}, // Nd [10] KHUDAWADI DIGIT ZERO..KHUDAWADI DIGIT NINE
+ {0x11300, 0x11301, prN}, // Mn [2] GRANTHA SIGN COMBINING ANUSVARA ABOVE..GRANTHA SIGN CANDRABINDU
+ {0x11302, 0x11303, prN}, // Mc [2] GRANTHA SIGN ANUSVARA..GRANTHA SIGN VISARGA
+ {0x11305, 0x1130C, prN}, // Lo [8] GRANTHA LETTER A..GRANTHA LETTER VOCALIC L
+ {0x1130F, 0x11310, prN}, // Lo [2] GRANTHA LETTER EE..GRANTHA LETTER AI
+ {0x11313, 0x11328, prN}, // Lo [22] GRANTHA LETTER OO..GRANTHA LETTER NA
+ {0x1132A, 0x11330, prN}, // Lo [7] GRANTHA LETTER PA..GRANTHA LETTER RA
+ {0x11332, 0x11333, prN}, // Lo [2] GRANTHA LETTER LA..GRANTHA LETTER LLA
+ {0x11335, 0x11339, prN}, // Lo [5] GRANTHA LETTER VA..GRANTHA LETTER HA
+ {0x1133B, 0x1133C, prN}, // Mn [2] COMBINING BINDU BELOW..GRANTHA SIGN NUKTA
+ {0x1133D, 0x1133D, prN}, // Lo GRANTHA SIGN AVAGRAHA
+ {0x1133E, 0x1133F, prN}, // Mc [2] GRANTHA VOWEL SIGN AA..GRANTHA VOWEL SIGN I
+ {0x11340, 0x11340, prN}, // Mn GRANTHA VOWEL SIGN II
+ {0x11341, 0x11344, prN}, // Mc [4] GRANTHA VOWEL SIGN U..GRANTHA VOWEL SIGN VOCALIC RR
+ {0x11347, 0x11348, prN}, // Mc [2] GRANTHA VOWEL SIGN EE..GRANTHA VOWEL SIGN AI
+ {0x1134B, 0x1134D, prN}, // Mc [3] GRANTHA VOWEL SIGN OO..GRANTHA SIGN VIRAMA
+ {0x11350, 0x11350, prN}, // Lo GRANTHA OM
+ {0x11357, 0x11357, prN}, // Mc GRANTHA AU LENGTH MARK
+ {0x1135D, 0x11361, prN}, // Lo [5] GRANTHA SIGN PLUTA..GRANTHA LETTER VOCALIC LL
+ {0x11362, 0x11363, prN}, // Mc [2] GRANTHA VOWEL SIGN VOCALIC L..GRANTHA VOWEL SIGN VOCALIC LL
+ {0x11366, 0x1136C, prN}, // Mn [7] COMBINING GRANTHA DIGIT ZERO..COMBINING GRANTHA DIGIT SIX
+ {0x11370, 0x11374, prN}, // Mn [5] COMBINING GRANTHA LETTER A..COMBINING GRANTHA LETTER PA
+ {0x11400, 0x11434, prN}, // Lo [53] NEWA LETTER A..NEWA LETTER HA
+ {0x11435, 0x11437, prN}, // Mc [3] NEWA VOWEL SIGN AA..NEWA VOWEL SIGN II
+ {0x11438, 0x1143F, prN}, // Mn [8] NEWA VOWEL SIGN U..NEWA VOWEL SIGN AI
+ {0x11440, 0x11441, prN}, // Mc [2] NEWA VOWEL SIGN O..NEWA VOWEL SIGN AU
+ {0x11442, 0x11444, prN}, // Mn [3] NEWA SIGN VIRAMA..NEWA SIGN ANUSVARA
+ {0x11445, 0x11445, prN}, // Mc NEWA SIGN VISARGA
+ {0x11446, 0x11446, prN}, // Mn NEWA SIGN NUKTA
+ {0x11447, 0x1144A, prN}, // Lo [4] NEWA SIGN AVAGRAHA..NEWA SIDDHI
+ {0x1144B, 0x1144F, prN}, // Po [5] NEWA DANDA..NEWA ABBREVIATION SIGN
+ {0x11450, 0x11459, prN}, // Nd [10] NEWA DIGIT ZERO..NEWA DIGIT NINE
+ {0x1145A, 0x1145B, prN}, // Po [2] NEWA DOUBLE COMMA..NEWA PLACEHOLDER MARK
+ {0x1145D, 0x1145D, prN}, // Po NEWA INSERTION SIGN
+ {0x1145E, 0x1145E, prN}, // Mn NEWA SANDHI MARK
+ {0x1145F, 0x11461, prN}, // Lo [3] NEWA LETTER VEDIC ANUSVARA..NEWA SIGN UPADHMANIYA
+ {0x11480, 0x114AF, prN}, // Lo [48] TIRHUTA ANJI..TIRHUTA LETTER HA
+ {0x114B0, 0x114B2, prN}, // Mc [3] TIRHUTA VOWEL SIGN AA..TIRHUTA VOWEL SIGN II
+ {0x114B3, 0x114B8, prN}, // Mn [6] TIRHUTA VOWEL SIGN U..TIRHUTA VOWEL SIGN VOCALIC LL
+ {0x114B9, 0x114B9, prN}, // Mc TIRHUTA VOWEL SIGN E
+ {0x114BA, 0x114BA, prN}, // Mn TIRHUTA VOWEL SIGN SHORT E
+ {0x114BB, 0x114BE, prN}, // Mc [4] TIRHUTA VOWEL SIGN AI..TIRHUTA VOWEL SIGN AU
+ {0x114BF, 0x114C0, prN}, // Mn [2] TIRHUTA SIGN CANDRABINDU..TIRHUTA SIGN ANUSVARA
+ {0x114C1, 0x114C1, prN}, // Mc TIRHUTA SIGN VISARGA
+ {0x114C2, 0x114C3, prN}, // Mn [2] TIRHUTA SIGN VIRAMA..TIRHUTA SIGN NUKTA
+ {0x114C4, 0x114C5, prN}, // Lo [2] TIRHUTA SIGN AVAGRAHA..TIRHUTA GVANG
+ {0x114C6, 0x114C6, prN}, // Po TIRHUTA ABBREVIATION SIGN
+ {0x114C7, 0x114C7, prN}, // Lo TIRHUTA OM
+ {0x114D0, 0x114D9, prN}, // Nd [10] TIRHUTA DIGIT ZERO..TIRHUTA DIGIT NINE
+ {0x11580, 0x115AE, prN}, // Lo [47] SIDDHAM LETTER A..SIDDHAM LETTER HA
+ {0x115AF, 0x115B1, prN}, // Mc [3] SIDDHAM VOWEL SIGN AA..SIDDHAM VOWEL SIGN II
+ {0x115B2, 0x115B5, prN}, // Mn [4] SIDDHAM VOWEL SIGN U..SIDDHAM VOWEL SIGN VOCALIC RR
+ {0x115B8, 0x115BB, prN}, // Mc [4] SIDDHAM VOWEL SIGN E..SIDDHAM VOWEL SIGN AU
+ {0x115BC, 0x115BD, prN}, // Mn [2] SIDDHAM SIGN CANDRABINDU..SIDDHAM SIGN ANUSVARA
+ {0x115BE, 0x115BE, prN}, // Mc SIDDHAM SIGN VISARGA
+ {0x115BF, 0x115C0, prN}, // Mn [2] SIDDHAM SIGN VIRAMA..SIDDHAM SIGN NUKTA
+ {0x115C1, 0x115D7, prN}, // Po [23] SIDDHAM SIGN SIDDHAM..SIDDHAM SECTION MARK WITH CIRCLES AND FOUR ENCLOSURES
+ {0x115D8, 0x115DB, prN}, // Lo [4] SIDDHAM LETTER THREE-CIRCLE ALTERNATE I..SIDDHAM LETTER ALTERNATE U
+ {0x115DC, 0x115DD, prN}, // Mn [2] SIDDHAM VOWEL SIGN ALTERNATE U..SIDDHAM VOWEL SIGN ALTERNATE UU
+ {0x11600, 0x1162F, prN}, // Lo [48] MODI LETTER A..MODI LETTER LLA
+ {0x11630, 0x11632, prN}, // Mc [3] MODI VOWEL SIGN AA..MODI VOWEL SIGN II
+ {0x11633, 0x1163A, prN}, // Mn [8] MODI VOWEL SIGN U..MODI VOWEL SIGN AI
+ {0x1163B, 0x1163C, prN}, // Mc [2] MODI VOWEL SIGN O..MODI VOWEL SIGN AU
+ {0x1163D, 0x1163D, prN}, // Mn MODI SIGN ANUSVARA
+ {0x1163E, 0x1163E, prN}, // Mc MODI SIGN VISARGA
+ {0x1163F, 0x11640, prN}, // Mn [2] MODI SIGN VIRAMA..MODI SIGN ARDHACANDRA
+ {0x11641, 0x11643, prN}, // Po [3] MODI DANDA..MODI ABBREVIATION SIGN
+ {0x11644, 0x11644, prN}, // Lo MODI SIGN HUVA
+ {0x11650, 0x11659, prN}, // Nd [10] MODI DIGIT ZERO..MODI DIGIT NINE
+ {0x11660, 0x1166C, prN}, // Po [13] MONGOLIAN BIRGA WITH ORNAMENT..MONGOLIAN TURNED SWIRL BIRGA WITH DOUBLE ORNAMENT
+ {0x11680, 0x116AA, prN}, // Lo [43] TAKRI LETTER A..TAKRI LETTER RRA
+ {0x116AB, 0x116AB, prN}, // Mn TAKRI SIGN ANUSVARA
+ {0x116AC, 0x116AC, prN}, // Mc TAKRI SIGN VISARGA
+ {0x116AD, 0x116AD, prN}, // Mn TAKRI VOWEL SIGN AA
+ {0x116AE, 0x116AF, prN}, // Mc [2] TAKRI VOWEL SIGN I..TAKRI VOWEL SIGN II
+ {0x116B0, 0x116B5, prN}, // Mn [6] TAKRI VOWEL SIGN U..TAKRI VOWEL SIGN AU
+ {0x116B6, 0x116B6, prN}, // Mc TAKRI SIGN VIRAMA
+ {0x116B7, 0x116B7, prN}, // Mn TAKRI SIGN NUKTA
+ {0x116B8, 0x116B8, prN}, // Lo TAKRI LETTER ARCHAIC KHA
+ {0x116B9, 0x116B9, prN}, // Po TAKRI ABBREVIATION SIGN
+ {0x116C0, 0x116C9, prN}, // Nd [10] TAKRI DIGIT ZERO..TAKRI DIGIT NINE
+ {0x11700, 0x1171A, prN}, // Lo [27] AHOM LETTER KA..AHOM LETTER ALTERNATE BA
+ {0x1171D, 0x1171F, prN}, // Mn [3] AHOM CONSONANT SIGN MEDIAL LA..AHOM CONSONANT SIGN MEDIAL LIGATING RA
+ {0x11720, 0x11721, prN}, // Mc [2] AHOM VOWEL SIGN A..AHOM VOWEL SIGN AA
+ {0x11722, 0x11725, prN}, // Mn [4] AHOM VOWEL SIGN I..AHOM VOWEL SIGN UU
+ {0x11726, 0x11726, prN}, // Mc AHOM VOWEL SIGN E
+ {0x11727, 0x1172B, prN}, // Mn [5] AHOM VOWEL SIGN AW..AHOM SIGN KILLER
+ {0x11730, 0x11739, prN}, // Nd [10] AHOM DIGIT ZERO..AHOM DIGIT NINE
+ {0x1173A, 0x1173B, prN}, // No [2] AHOM NUMBER TEN..AHOM NUMBER TWENTY
+ {0x1173C, 0x1173E, prN}, // Po [3] AHOM SIGN SMALL SECTION..AHOM SIGN RULAI
+ {0x1173F, 0x1173F, prN}, // So AHOM SYMBOL VI
+ {0x11740, 0x11746, prN}, // Lo [7] AHOM LETTER CA..AHOM LETTER LLA
+ {0x11800, 0x1182B, prN}, // Lo [44] DOGRA LETTER A..DOGRA LETTER RRA
+ {0x1182C, 0x1182E, prN}, // Mc [3] DOGRA VOWEL SIGN AA..DOGRA VOWEL SIGN II
+ {0x1182F, 0x11837, prN}, // Mn [9] DOGRA VOWEL SIGN U..DOGRA SIGN ANUSVARA
+ {0x11838, 0x11838, prN}, // Mc DOGRA SIGN VISARGA
+ {0x11839, 0x1183A, prN}, // Mn [2] DOGRA SIGN VIRAMA..DOGRA SIGN NUKTA
+ {0x1183B, 0x1183B, prN}, // Po DOGRA ABBREVIATION SIGN
+ {0x118A0, 0x118DF, prN}, // L& [64] WARANG CITI CAPITAL LETTER NGAA..WARANG CITI SMALL LETTER VIYO
+ {0x118E0, 0x118E9, prN}, // Nd [10] WARANG CITI DIGIT ZERO..WARANG CITI DIGIT NINE
+ {0x118EA, 0x118F2, prN}, // No [9] WARANG CITI NUMBER TEN..WARANG CITI NUMBER NINETY
+ {0x118FF, 0x118FF, prN}, // Lo WARANG CITI OM
+ {0x11900, 0x11906, prN}, // Lo [7] DIVES AKURU LETTER A..DIVES AKURU LETTER E
+ {0x11909, 0x11909, prN}, // Lo DIVES AKURU LETTER O
+ {0x1190C, 0x11913, prN}, // Lo [8] DIVES AKURU LETTER KA..DIVES AKURU LETTER JA
+ {0x11915, 0x11916, prN}, // Lo [2] DIVES AKURU LETTER NYA..DIVES AKURU LETTER TTA
+ {0x11918, 0x1192F, prN}, // Lo [24] DIVES AKURU LETTER DDA..DIVES AKURU LETTER ZA
+ {0x11930, 0x11935, prN}, // Mc [6] DIVES AKURU VOWEL SIGN AA..DIVES AKURU VOWEL SIGN E
+ {0x11937, 0x11938, prN}, // Mc [2] DIVES AKURU VOWEL SIGN AI..DIVES AKURU VOWEL SIGN O
+ {0x1193B, 0x1193C, prN}, // Mn [2] DIVES AKURU SIGN ANUSVARA..DIVES AKURU SIGN CANDRABINDU
+ {0x1193D, 0x1193D, prN}, // Mc DIVES AKURU SIGN HALANTA
+ {0x1193E, 0x1193E, prN}, // Mn DIVES AKURU VIRAMA
+ {0x1193F, 0x1193F, prN}, // Lo DIVES AKURU PREFIXED NASAL SIGN
+ {0x11940, 0x11940, prN}, // Mc DIVES AKURU MEDIAL YA
+ {0x11941, 0x11941, prN}, // Lo DIVES AKURU INITIAL RA
+ {0x11942, 0x11942, prN}, // Mc DIVES AKURU MEDIAL RA
+ {0x11943, 0x11943, prN}, // Mn DIVES AKURU SIGN NUKTA
+ {0x11944, 0x11946, prN}, // Po [3] DIVES AKURU DOUBLE DANDA..DIVES AKURU END OF TEXT MARK
+ {0x11950, 0x11959, prN}, // Nd [10] DIVES AKURU DIGIT ZERO..DIVES AKURU DIGIT NINE
+ {0x119A0, 0x119A7, prN}, // Lo [8] NANDINAGARI LETTER A..NANDINAGARI LETTER VOCALIC RR
+ {0x119AA, 0x119D0, prN}, // Lo [39] NANDINAGARI LETTER E..NANDINAGARI LETTER RRA
+ {0x119D1, 0x119D3, prN}, // Mc [3] NANDINAGARI VOWEL SIGN AA..NANDINAGARI VOWEL SIGN II
+ {0x119D4, 0x119D7, prN}, // Mn [4] NANDINAGARI VOWEL SIGN U..NANDINAGARI VOWEL SIGN VOCALIC RR
+ {0x119DA, 0x119DB, prN}, // Mn [2] NANDINAGARI VOWEL SIGN E..NANDINAGARI VOWEL SIGN AI
+ {0x119DC, 0x119DF, prN}, // Mc [4] NANDINAGARI VOWEL SIGN O..NANDINAGARI SIGN VISARGA
+ {0x119E0, 0x119E0, prN}, // Mn NANDINAGARI SIGN VIRAMA
+ {0x119E1, 0x119E1, prN}, // Lo NANDINAGARI SIGN AVAGRAHA
+ {0x119E2, 0x119E2, prN}, // Po NANDINAGARI SIGN SIDDHAM
+ {0x119E3, 0x119E3, prN}, // Lo NANDINAGARI HEADSTROKE
+ {0x119E4, 0x119E4, prN}, // Mc NANDINAGARI VOWEL SIGN PRISHTHAMATRA E
+ {0x11A00, 0x11A00, prN}, // Lo ZANABAZAR SQUARE LETTER A
+ {0x11A01, 0x11A0A, prN}, // Mn [10] ZANABAZAR SQUARE VOWEL SIGN I..ZANABAZAR SQUARE VOWEL LENGTH MARK
+ {0x11A0B, 0x11A32, prN}, // Lo [40] ZANABAZAR SQUARE LETTER KA..ZANABAZAR SQUARE LETTER KSSA
+ {0x11A33, 0x11A38, prN}, // Mn [6] ZANABAZAR SQUARE FINAL CONSONANT MARK..ZANABAZAR SQUARE SIGN ANUSVARA
+ {0x11A39, 0x11A39, prN}, // Mc ZANABAZAR SQUARE SIGN VISARGA
+ {0x11A3A, 0x11A3A, prN}, // Lo ZANABAZAR SQUARE CLUSTER-INITIAL LETTER RA
+ {0x11A3B, 0x11A3E, prN}, // Mn [4] ZANABAZAR SQUARE CLUSTER-FINAL LETTER YA..ZANABAZAR SQUARE CLUSTER-FINAL LETTER VA
+ {0x11A3F, 0x11A46, prN}, // Po [8] ZANABAZAR SQUARE INITIAL HEAD MARK..ZANABAZAR SQUARE CLOSING DOUBLE-LINED HEAD MARK
+ {0x11A47, 0x11A47, prN}, // Mn ZANABAZAR SQUARE SUBJOINER
+ {0x11A50, 0x11A50, prN}, // Lo SOYOMBO LETTER A
+ {0x11A51, 0x11A56, prN}, // Mn [6] SOYOMBO VOWEL SIGN I..SOYOMBO VOWEL SIGN OE
+ {0x11A57, 0x11A58, prN}, // Mc [2] SOYOMBO VOWEL SIGN AI..SOYOMBO VOWEL SIGN AU
+ {0x11A59, 0x11A5B, prN}, // Mn [3] SOYOMBO VOWEL SIGN VOCALIC R..SOYOMBO VOWEL LENGTH MARK
+ {0x11A5C, 0x11A89, prN}, // Lo [46] SOYOMBO LETTER KA..SOYOMBO CLUSTER-INITIAL LETTER SA
+ {0x11A8A, 0x11A96, prN}, // Mn [13] SOYOMBO FINAL CONSONANT SIGN G..SOYOMBO SIGN ANUSVARA
+ {0x11A97, 0x11A97, prN}, // Mc SOYOMBO SIGN VISARGA
+ {0x11A98, 0x11A99, prN}, // Mn [2] SOYOMBO GEMINATION MARK..SOYOMBO SUBJOINER
+ {0x11A9A, 0x11A9C, prN}, // Po [3] SOYOMBO MARK TSHEG..SOYOMBO MARK DOUBLE SHAD
+ {0x11A9D, 0x11A9D, prN}, // Lo SOYOMBO MARK PLUTA
+ {0x11A9E, 0x11AA2, prN}, // Po [5] SOYOMBO HEAD MARK WITH MOON AND SUN AND TRIPLE FLAME..SOYOMBO TERMINAL MARK-2
+ {0x11AB0, 0x11ABF, prN}, // Lo [16] CANADIAN SYLLABICS NATTILIK HI..CANADIAN SYLLABICS SPA
+ {0x11AC0, 0x11AF8, prN}, // Lo [57] PAU CIN HAU LETTER PA..PAU CIN HAU GLOTTAL STOP FINAL
+ {0x11B00, 0x11B09, prN}, // Po [10] DEVANAGARI HEAD MARK..DEVANAGARI SIGN MINDU
+ {0x11C00, 0x11C08, prN}, // Lo [9] BHAIKSUKI LETTER A..BHAIKSUKI LETTER VOCALIC L
+ {0x11C0A, 0x11C2E, prN}, // Lo [37] BHAIKSUKI LETTER E..BHAIKSUKI LETTER HA
+ {0x11C2F, 0x11C2F, prN}, // Mc BHAIKSUKI VOWEL SIGN AA
+ {0x11C30, 0x11C36, prN}, // Mn [7] BHAIKSUKI VOWEL SIGN I..BHAIKSUKI VOWEL SIGN VOCALIC L
+ {0x11C38, 0x11C3D, prN}, // Mn [6] BHAIKSUKI VOWEL SIGN E..BHAIKSUKI SIGN ANUSVARA
+ {0x11C3E, 0x11C3E, prN}, // Mc BHAIKSUKI SIGN VISARGA
+ {0x11C3F, 0x11C3F, prN}, // Mn BHAIKSUKI SIGN VIRAMA
+ {0x11C40, 0x11C40, prN}, // Lo BHAIKSUKI SIGN AVAGRAHA
+ {0x11C41, 0x11C45, prN}, // Po [5] BHAIKSUKI DANDA..BHAIKSUKI GAP FILLER-2
+ {0x11C50, 0x11C59, prN}, // Nd [10] BHAIKSUKI DIGIT ZERO..BHAIKSUKI DIGIT NINE
+ {0x11C5A, 0x11C6C, prN}, // No [19] BHAIKSUKI NUMBER ONE..BHAIKSUKI HUNDREDS UNIT MARK
+ {0x11C70, 0x11C71, prN}, // Po [2] MARCHEN HEAD MARK..MARCHEN MARK SHAD
+ {0x11C72, 0x11C8F, prN}, // Lo [30] MARCHEN LETTER KA..MARCHEN LETTER A
+ {0x11C92, 0x11CA7, prN}, // Mn [22] MARCHEN SUBJOINED LETTER KA..MARCHEN SUBJOINED LETTER ZA
+ {0x11CA9, 0x11CA9, prN}, // Mc MARCHEN SUBJOINED LETTER YA
+ {0x11CAA, 0x11CB0, prN}, // Mn [7] MARCHEN SUBJOINED LETTER RA..MARCHEN VOWEL SIGN AA
+ {0x11CB1, 0x11CB1, prN}, // Mc MARCHEN VOWEL SIGN I
+ {0x11CB2, 0x11CB3, prN}, // Mn [2] MARCHEN VOWEL SIGN U..MARCHEN VOWEL SIGN E
+ {0x11CB4, 0x11CB4, prN}, // Mc MARCHEN VOWEL SIGN O
+ {0x11CB5, 0x11CB6, prN}, // Mn [2] MARCHEN SIGN ANUSVARA..MARCHEN SIGN CANDRABINDU
+ {0x11D00, 0x11D06, prN}, // Lo [7] MASARAM GONDI LETTER A..MASARAM GONDI LETTER E
+ {0x11D08, 0x11D09, prN}, // Lo [2] MASARAM GONDI LETTER AI..MASARAM GONDI LETTER O
+ {0x11D0B, 0x11D30, prN}, // Lo [38] MASARAM GONDI LETTER AU..MASARAM GONDI LETTER TRA
+ {0x11D31, 0x11D36, prN}, // Mn [6] MASARAM GONDI VOWEL SIGN AA..MASARAM GONDI VOWEL SIGN VOCALIC R
+ {0x11D3A, 0x11D3A, prN}, // Mn MASARAM GONDI VOWEL SIGN E
+ {0x11D3C, 0x11D3D, prN}, // Mn [2] MASARAM GONDI VOWEL SIGN AI..MASARAM GONDI VOWEL SIGN O
+ {0x11D3F, 0x11D45, prN}, // Mn [7] MASARAM GONDI VOWEL SIGN AU..MASARAM GONDI VIRAMA
+ {0x11D46, 0x11D46, prN}, // Lo MASARAM GONDI REPHA
+ {0x11D47, 0x11D47, prN}, // Mn MASARAM GONDI RA-KARA
+ {0x11D50, 0x11D59, prN}, // Nd [10] MASARAM GONDI DIGIT ZERO..MASARAM GONDI DIGIT NINE
+ {0x11D60, 0x11D65, prN}, // Lo [6] GUNJALA GONDI LETTER A..GUNJALA GONDI LETTER UU
+ {0x11D67, 0x11D68, prN}, // Lo [2] GUNJALA GONDI LETTER EE..GUNJALA GONDI LETTER AI
+ {0x11D6A, 0x11D89, prN}, // Lo [32] GUNJALA GONDI LETTER OO..GUNJALA GONDI LETTER SA
+ {0x11D8A, 0x11D8E, prN}, // Mc [5] GUNJALA GONDI VOWEL SIGN AA..GUNJALA GONDI VOWEL SIGN UU
+ {0x11D90, 0x11D91, prN}, // Mn [2] GUNJALA GONDI VOWEL SIGN EE..GUNJALA GONDI VOWEL SIGN AI
+ {0x11D93, 0x11D94, prN}, // Mc [2] GUNJALA GONDI VOWEL SIGN OO..GUNJALA GONDI VOWEL SIGN AU
+ {0x11D95, 0x11D95, prN}, // Mn GUNJALA GONDI SIGN ANUSVARA
+ {0x11D96, 0x11D96, prN}, // Mc GUNJALA GONDI SIGN VISARGA
+ {0x11D97, 0x11D97, prN}, // Mn GUNJALA GONDI VIRAMA
+ {0x11D98, 0x11D98, prN}, // Lo GUNJALA GONDI OM
+ {0x11DA0, 0x11DA9, prN}, // Nd [10] GUNJALA GONDI DIGIT ZERO..GUNJALA GONDI DIGIT NINE
+ {0x11EE0, 0x11EF2, prN}, // Lo [19] MAKASAR LETTER KA..MAKASAR ANGKA
+ {0x11EF3, 0x11EF4, prN}, // Mn [2] MAKASAR VOWEL SIGN I..MAKASAR VOWEL SIGN U
+ {0x11EF5, 0x11EF6, prN}, // Mc [2] MAKASAR VOWEL SIGN E..MAKASAR VOWEL SIGN O
+ {0x11EF7, 0x11EF8, prN}, // Po [2] MAKASAR PASSIMBANG..MAKASAR END OF SECTION
+ {0x11F00, 0x11F01, prN}, // Mn [2] KAWI SIGN CANDRABINDU..KAWI SIGN ANUSVARA
+ {0x11F02, 0x11F02, prN}, // Lo KAWI SIGN REPHA
+ {0x11F03, 0x11F03, prN}, // Mc KAWI SIGN VISARGA
+ {0x11F04, 0x11F10, prN}, // Lo [13] KAWI LETTER A..KAWI LETTER O
+ {0x11F12, 0x11F33, prN}, // Lo [34] KAWI LETTER KA..KAWI LETTER JNYA
+ {0x11F34, 0x11F35, prN}, // Mc [2] KAWI VOWEL SIGN AA..KAWI VOWEL SIGN ALTERNATE AA
+ {0x11F36, 0x11F3A, prN}, // Mn [5] KAWI VOWEL SIGN I..KAWI VOWEL SIGN VOCALIC R
+ {0x11F3E, 0x11F3F, prN}, // Mc [2] KAWI VOWEL SIGN E..KAWI VOWEL SIGN AI
+ {0x11F40, 0x11F40, prN}, // Mn KAWI VOWEL SIGN EU
+ {0x11F41, 0x11F41, prN}, // Mc KAWI SIGN KILLER
+ {0x11F42, 0x11F42, prN}, // Mn KAWI CONJOINER
+ {0x11F43, 0x11F4F, prN}, // Po [13] KAWI DANDA..KAWI PUNCTUATION CLOSING SPIRAL
+ {0x11F50, 0x11F59, prN}, // Nd [10] KAWI DIGIT ZERO..KAWI DIGIT NINE
+ {0x11FB0, 0x11FB0, prN}, // Lo LISU LETTER YHA
+ {0x11FC0, 0x11FD4, prN}, // No [21] TAMIL FRACTION ONE THREE-HUNDRED-AND-TWENTIETH..TAMIL FRACTION DOWNSCALING FACTOR KIIZH
+ {0x11FD5, 0x11FDC, prN}, // So [8] TAMIL SIGN NEL..TAMIL SIGN MUKKURUNI
+ {0x11FDD, 0x11FE0, prN}, // Sc [4] TAMIL SIGN KAACU..TAMIL SIGN VARAAKAN
+ {0x11FE1, 0x11FF1, prN}, // So [17] TAMIL SIGN PAARAM..TAMIL SIGN VAKAIYARAA
+ {0x11FFF, 0x11FFF, prN}, // Po TAMIL PUNCTUATION END OF TEXT
+ {0x12000, 0x12399, prN}, // Lo [922] CUNEIFORM SIGN A..CUNEIFORM SIGN U U
+ {0x12400, 0x1246E, prN}, // Nl [111] CUNEIFORM NUMERIC SIGN TWO ASH..CUNEIFORM NUMERIC SIGN NINE U VARIANT FORM
+ {0x12470, 0x12474, prN}, // Po [5] CUNEIFORM PUNCTUATION SIGN OLD ASSYRIAN WORD DIVIDER..CUNEIFORM PUNCTUATION SIGN DIAGONAL QUADCOLON
+ {0x12480, 0x12543, prN}, // Lo [196] CUNEIFORM SIGN AB TIMES NUN TENU..CUNEIFORM SIGN ZU5 TIMES THREE DISH TENU
+ {0x12F90, 0x12FF0, prN}, // Lo [97] CYPRO-MINOAN SIGN CM001..CYPRO-MINOAN SIGN CM114
+ {0x12FF1, 0x12FF2, prN}, // Po [2] CYPRO-MINOAN SIGN CM301..CYPRO-MINOAN SIGN CM302
+ {0x13000, 0x1342F, prN}, // Lo [1072] EGYPTIAN HIEROGLYPH A001..EGYPTIAN HIEROGLYPH V011D
+ {0x13430, 0x1343F, prN}, // Cf [16] EGYPTIAN HIEROGLYPH VERTICAL JOINER..EGYPTIAN HIEROGLYPH END WALLED ENCLOSURE
+ {0x13440, 0x13440, prN}, // Mn EGYPTIAN HIEROGLYPH MIRROR HORIZONTALLY
+ {0x13441, 0x13446, prN}, // Lo [6] EGYPTIAN HIEROGLYPH FULL BLANK..EGYPTIAN HIEROGLYPH WIDE LOST SIGN
+ {0x13447, 0x13455, prN}, // Mn [15] EGYPTIAN HIEROGLYPH MODIFIER DAMAGED AT TOP START..EGYPTIAN HIEROGLYPH MODIFIER DAMAGED
+ {0x14400, 0x14646, prN}, // Lo [583] ANATOLIAN HIEROGLYPH A001..ANATOLIAN HIEROGLYPH A530
+ {0x16800, 0x16A38, prN}, // Lo [569] BAMUM LETTER PHASE-A NGKUE MFON..BAMUM LETTER PHASE-F VUEQ
+ {0x16A40, 0x16A5E, prN}, // Lo [31] MRO LETTER TA..MRO LETTER TEK
+ {0x16A60, 0x16A69, prN}, // Nd [10] MRO DIGIT ZERO..MRO DIGIT NINE
+ {0x16A6E, 0x16A6F, prN}, // Po [2] MRO DANDA..MRO DOUBLE DANDA
+ {0x16A70, 0x16ABE, prN}, // Lo [79] TANGSA LETTER OZ..TANGSA LETTER ZA
+ {0x16AC0, 0x16AC9, prN}, // Nd [10] TANGSA DIGIT ZERO..TANGSA DIGIT NINE
+ {0x16AD0, 0x16AED, prN}, // Lo [30] BASSA VAH LETTER ENNI..BASSA VAH LETTER I
+ {0x16AF0, 0x16AF4, prN}, // Mn [5] BASSA VAH COMBINING HIGH TONE..BASSA VAH COMBINING HIGH-LOW TONE
+ {0x16AF5, 0x16AF5, prN}, // Po BASSA VAH FULL STOP
+ {0x16B00, 0x16B2F, prN}, // Lo [48] PAHAWH HMONG VOWEL KEEB..PAHAWH HMONG CONSONANT CAU
+ {0x16B30, 0x16B36, prN}, // Mn [7] PAHAWH HMONG MARK CIM TUB..PAHAWH HMONG MARK CIM TAUM
+ {0x16B37, 0x16B3B, prN}, // Po [5] PAHAWH HMONG SIGN VOS THOM..PAHAWH HMONG SIGN VOS FEEM
+ {0x16B3C, 0x16B3F, prN}, // So [4] PAHAWH HMONG SIGN XYEEM NTXIV..PAHAWH HMONG SIGN XYEEM FAIB
+ {0x16B40, 0x16B43, prN}, // Lm [4] PAHAWH HMONG SIGN VOS SEEV..PAHAWH HMONG SIGN IB YAM
+ {0x16B44, 0x16B44, prN}, // Po PAHAWH HMONG SIGN XAUS
+ {0x16B45, 0x16B45, prN}, // So PAHAWH HMONG SIGN CIM TSOV ROG
+ {0x16B50, 0x16B59, prN}, // Nd [10] PAHAWH HMONG DIGIT ZERO..PAHAWH HMONG DIGIT NINE
+ {0x16B5B, 0x16B61, prN}, // No [7] PAHAWH HMONG NUMBER TENS..PAHAWH HMONG NUMBER TRILLIONS
+ {0x16B63, 0x16B77, prN}, // Lo [21] PAHAWH HMONG SIGN VOS LUB..PAHAWH HMONG SIGN CIM NRES TOS
+ {0x16B7D, 0x16B8F, prN}, // Lo [19] PAHAWH HMONG CLAN SIGN TSHEEJ..PAHAWH HMONG CLAN SIGN VWJ
+ {0x16E40, 0x16E7F, prN}, // L& [64] MEDEFAIDRIN CAPITAL LETTER M..MEDEFAIDRIN SMALL LETTER Y
+ {0x16E80, 0x16E96, prN}, // No [23] MEDEFAIDRIN DIGIT ZERO..MEDEFAIDRIN DIGIT THREE ALTERNATE FORM
+ {0x16E97, 0x16E9A, prN}, // Po [4] MEDEFAIDRIN COMMA..MEDEFAIDRIN EXCLAMATION OH
+ {0x16F00, 0x16F4A, prN}, // Lo [75] MIAO LETTER PA..MIAO LETTER RTE
+ {0x16F4F, 0x16F4F, prN}, // Mn MIAO SIGN CONSONANT MODIFIER BAR
+ {0x16F50, 0x16F50, prN}, // Lo MIAO LETTER NASALIZATION
+ {0x16F51, 0x16F87, prN}, // Mc [55] MIAO SIGN ASPIRATION..MIAO VOWEL SIGN UI
+ {0x16F8F, 0x16F92, prN}, // Mn [4] MIAO TONE RIGHT..MIAO TONE BELOW
+ {0x16F93, 0x16F9F, prN}, // Lm [13] MIAO LETTER TONE-2..MIAO LETTER REFORMED TONE-8
+ {0x16FE0, 0x16FE1, prW}, // Lm [2] TANGUT ITERATION MARK..NUSHU ITERATION MARK
+ {0x16FE2, 0x16FE2, prW}, // Po OLD CHINESE HOOK MARK
+ {0x16FE3, 0x16FE3, prW}, // Lm OLD CHINESE ITERATION MARK
+ {0x16FE4, 0x16FE4, prW}, // Mn KHITAN SMALL SCRIPT FILLER
+ {0x16FF0, 0x16FF1, prW}, // Mc [2] VIETNAMESE ALTERNATE READING MARK CA..VIETNAMESE ALTERNATE READING MARK NHAY
+ {0x17000, 0x187F7, prW}, // Lo [6136] TANGUT IDEOGRAPH-17000..TANGUT IDEOGRAPH-187F7
+ {0x18800, 0x18AFF, prW}, // Lo [768] TANGUT COMPONENT-001..TANGUT COMPONENT-768
+ {0x18B00, 0x18CD5, prW}, // Lo [470] KHITAN SMALL SCRIPT CHARACTER-18B00..KHITAN SMALL SCRIPT CHARACTER-18CD5
+ {0x18D00, 0x18D08, prW}, // Lo [9] TANGUT IDEOGRAPH-18D00..TANGUT IDEOGRAPH-18D08
+ {0x1AFF0, 0x1AFF3, prW}, // Lm [4] KATAKANA LETTER MINNAN TONE-2..KATAKANA LETTER MINNAN TONE-5
+ {0x1AFF5, 0x1AFFB, prW}, // Lm [7] KATAKANA LETTER MINNAN TONE-7..KATAKANA LETTER MINNAN NASALIZED TONE-5
+ {0x1AFFD, 0x1AFFE, prW}, // Lm [2] KATAKANA LETTER MINNAN NASALIZED TONE-7..KATAKANA LETTER MINNAN NASALIZED TONE-8
+ {0x1B000, 0x1B0FF, prW}, // Lo [256] KATAKANA LETTER ARCHAIC E..HENTAIGANA LETTER RE-2
+ {0x1B100, 0x1B122, prW}, // Lo [35] HENTAIGANA LETTER RE-3..KATAKANA LETTER ARCHAIC WU
+ {0x1B132, 0x1B132, prW}, // Lo HIRAGANA LETTER SMALL KO
+ {0x1B150, 0x1B152, prW}, // Lo [3] HIRAGANA LETTER SMALL WI..HIRAGANA LETTER SMALL WO
+ {0x1B155, 0x1B155, prW}, // Lo KATAKANA LETTER SMALL KO
+ {0x1B164, 0x1B167, prW}, // Lo [4] KATAKANA LETTER SMALL WI..KATAKANA LETTER SMALL N
+ {0x1B170, 0x1B2FB, prW}, // Lo [396] NUSHU CHARACTER-1B170..NUSHU CHARACTER-1B2FB
+ {0x1BC00, 0x1BC6A, prN}, // Lo [107] DUPLOYAN LETTER H..DUPLOYAN LETTER VOCALIC M
+ {0x1BC70, 0x1BC7C, prN}, // Lo [13] DUPLOYAN AFFIX LEFT HORIZONTAL SECANT..DUPLOYAN AFFIX ATTACHED TANGENT HOOK
+ {0x1BC80, 0x1BC88, prN}, // Lo [9] DUPLOYAN AFFIX HIGH ACUTE..DUPLOYAN AFFIX HIGH VERTICAL
+ {0x1BC90, 0x1BC99, prN}, // Lo [10] DUPLOYAN AFFIX LOW ACUTE..DUPLOYAN AFFIX LOW ARROW
+ {0x1BC9C, 0x1BC9C, prN}, // So DUPLOYAN SIGN O WITH CROSS
+ {0x1BC9D, 0x1BC9E, prN}, // Mn [2] DUPLOYAN THICK LETTER SELECTOR..DUPLOYAN DOUBLE MARK
+ {0x1BC9F, 0x1BC9F, prN}, // Po DUPLOYAN PUNCTUATION CHINOOK FULL STOP
+ {0x1BCA0, 0x1BCA3, prN}, // Cf [4] SHORTHAND FORMAT LETTER OVERLAP..SHORTHAND FORMAT UP STEP
+ {0x1CF00, 0x1CF2D, prN}, // Mn [46] ZNAMENNY COMBINING MARK GORAZDO NIZKO S KRYZHEM ON LEFT..ZNAMENNY COMBINING MARK KRYZH ON LEFT
+ {0x1CF30, 0x1CF46, prN}, // Mn [23] ZNAMENNY COMBINING TONAL RANGE MARK MRACHNO..ZNAMENNY PRIZNAK MODIFIER ROG
+ {0x1CF50, 0x1CFC3, prN}, // So [116] ZNAMENNY NEUME KRYUK..ZNAMENNY NEUME PAUK
+ {0x1D000, 0x1D0F5, prN}, // So [246] BYZANTINE MUSICAL SYMBOL PSILI..BYZANTINE MUSICAL SYMBOL GORGON NEO KATO
+ {0x1D100, 0x1D126, prN}, // So [39] MUSICAL SYMBOL SINGLE BARLINE..MUSICAL SYMBOL DRUM CLEF-2
+ {0x1D129, 0x1D164, prN}, // So [60] MUSICAL SYMBOL MULTIPLE MEASURE REST..MUSICAL SYMBOL ONE HUNDRED TWENTY-EIGHTH NOTE
+ {0x1D165, 0x1D166, prN}, // Mc [2] MUSICAL SYMBOL COMBINING STEM..MUSICAL SYMBOL COMBINING SPRECHGESANG STEM
+ {0x1D167, 0x1D169, prN}, // Mn [3] MUSICAL SYMBOL COMBINING TREMOLO-1..MUSICAL SYMBOL COMBINING TREMOLO-3
+ {0x1D16A, 0x1D16C, prN}, // So [3] MUSICAL SYMBOL FINGERED TREMOLO-1..MUSICAL SYMBOL FINGERED TREMOLO-3
+ {0x1D16D, 0x1D172, prN}, // Mc [6] MUSICAL SYMBOL COMBINING AUGMENTATION DOT..MUSICAL SYMBOL COMBINING FLAG-5
+ {0x1D173, 0x1D17A, prN}, // Cf [8] MUSICAL SYMBOL BEGIN BEAM..MUSICAL SYMBOL END PHRASE
+ {0x1D17B, 0x1D182, prN}, // Mn [8] MUSICAL SYMBOL COMBINING ACCENT..MUSICAL SYMBOL COMBINING LOURE
+ {0x1D183, 0x1D184, prN}, // So [2] MUSICAL SYMBOL ARPEGGIATO UP..MUSICAL SYMBOL ARPEGGIATO DOWN
+ {0x1D185, 0x1D18B, prN}, // Mn [7] MUSICAL SYMBOL COMBINING DOIT..MUSICAL SYMBOL COMBINING TRIPLE TONGUE
+ {0x1D18C, 0x1D1A9, prN}, // So [30] MUSICAL SYMBOL RINFORZANDO..MUSICAL SYMBOL DEGREE SLASH
+ {0x1D1AA, 0x1D1AD, prN}, // Mn [4] MUSICAL SYMBOL COMBINING DOWN BOW..MUSICAL SYMBOL COMBINING SNAP PIZZICATO
+ {0x1D1AE, 0x1D1EA, prN}, // So [61] MUSICAL SYMBOL PEDAL MARK..MUSICAL SYMBOL KORON
+ {0x1D200, 0x1D241, prN}, // So [66] GREEK VOCAL NOTATION SYMBOL-1..GREEK INSTRUMENTAL NOTATION SYMBOL-54
+ {0x1D242, 0x1D244, prN}, // Mn [3] COMBINING GREEK MUSICAL TRISEME..COMBINING GREEK MUSICAL PENTASEME
+ {0x1D245, 0x1D245, prN}, // So GREEK MUSICAL LEIMMA
+ {0x1D2C0, 0x1D2D3, prN}, // No [20] KAKTOVIK NUMERAL ZERO..KAKTOVIK NUMERAL NINETEEN
+ {0x1D2E0, 0x1D2F3, prN}, // No [20] MAYAN NUMERAL ZERO..MAYAN NUMERAL NINETEEN
+ {0x1D300, 0x1D356, prN}, // So [87] MONOGRAM FOR EARTH..TETRAGRAM FOR FOSTERING
+ {0x1D360, 0x1D378, prN}, // No [25] COUNTING ROD UNIT DIGIT ONE..TALLY MARK FIVE
+ {0x1D400, 0x1D454, prN}, // L& [85] MATHEMATICAL BOLD CAPITAL A..MATHEMATICAL ITALIC SMALL G
+ {0x1D456, 0x1D49C, prN}, // L& [71] MATHEMATICAL ITALIC SMALL I..MATHEMATICAL SCRIPT CAPITAL A
+ {0x1D49E, 0x1D49F, prN}, // Lu [2] MATHEMATICAL SCRIPT CAPITAL C..MATHEMATICAL SCRIPT CAPITAL D
+ {0x1D4A2, 0x1D4A2, prN}, // Lu MATHEMATICAL SCRIPT CAPITAL G
+ {0x1D4A5, 0x1D4A6, prN}, // Lu [2] MATHEMATICAL SCRIPT CAPITAL J..MATHEMATICAL SCRIPT CAPITAL K
+ {0x1D4A9, 0x1D4AC, prN}, // Lu [4] MATHEMATICAL SCRIPT CAPITAL N..MATHEMATICAL SCRIPT CAPITAL Q
+ {0x1D4AE, 0x1D4B9, prN}, // L& [12] MATHEMATICAL SCRIPT CAPITAL S..MATHEMATICAL SCRIPT SMALL D
+ {0x1D4BB, 0x1D4BB, prN}, // Ll MATHEMATICAL SCRIPT SMALL F
+ {0x1D4BD, 0x1D4C3, prN}, // Ll [7] MATHEMATICAL SCRIPT SMALL H..MATHEMATICAL SCRIPT SMALL N
+ {0x1D4C5, 0x1D505, prN}, // L& [65] MATHEMATICAL SCRIPT SMALL P..MATHEMATICAL FRAKTUR CAPITAL B
+ {0x1D507, 0x1D50A, prN}, // Lu [4] MATHEMATICAL FRAKTUR CAPITAL D..MATHEMATICAL FRAKTUR CAPITAL G
+ {0x1D50D, 0x1D514, prN}, // Lu [8] MATHEMATICAL FRAKTUR CAPITAL J..MATHEMATICAL FRAKTUR CAPITAL Q
+ {0x1D516, 0x1D51C, prN}, // Lu [7] MATHEMATICAL FRAKTUR CAPITAL S..MATHEMATICAL FRAKTUR CAPITAL Y
+ {0x1D51E, 0x1D539, prN}, // L& [28] MATHEMATICAL FRAKTUR SMALL A..MATHEMATICAL DOUBLE-STRUCK CAPITAL B
+ {0x1D53B, 0x1D53E, prN}, // Lu [4] MATHEMATICAL DOUBLE-STRUCK CAPITAL D..MATHEMATICAL DOUBLE-STRUCK CAPITAL G
+ {0x1D540, 0x1D544, prN}, // Lu [5] MATHEMATICAL DOUBLE-STRUCK CAPITAL I..MATHEMATICAL DOUBLE-STRUCK CAPITAL M
+ {0x1D546, 0x1D546, prN}, // Lu MATHEMATICAL DOUBLE-STRUCK CAPITAL O
+ {0x1D54A, 0x1D550, prN}, // Lu [7] MATHEMATICAL DOUBLE-STRUCK CAPITAL S..MATHEMATICAL DOUBLE-STRUCK CAPITAL Y
+ {0x1D552, 0x1D6A5, prN}, // L& [340] MATHEMATICAL DOUBLE-STRUCK SMALL A..MATHEMATICAL ITALIC SMALL DOTLESS J
+ {0x1D6A8, 0x1D6C0, prN}, // Lu [25] MATHEMATICAL BOLD CAPITAL ALPHA..MATHEMATICAL BOLD CAPITAL OMEGA
+ {0x1D6C1, 0x1D6C1, prN}, // Sm MATHEMATICAL BOLD NABLA
+ {0x1D6C2, 0x1D6DA, prN}, // Ll [25] MATHEMATICAL BOLD SMALL ALPHA..MATHEMATICAL BOLD SMALL OMEGA
+ {0x1D6DB, 0x1D6DB, prN}, // Sm MATHEMATICAL BOLD PARTIAL DIFFERENTIAL
+ {0x1D6DC, 0x1D6FA, prN}, // L& [31] MATHEMATICAL BOLD EPSILON SYMBOL..MATHEMATICAL ITALIC CAPITAL OMEGA
+ {0x1D6FB, 0x1D6FB, prN}, // Sm MATHEMATICAL ITALIC NABLA
+ {0x1D6FC, 0x1D714, prN}, // Ll [25] MATHEMATICAL ITALIC SMALL ALPHA..MATHEMATICAL ITALIC SMALL OMEGA
+ {0x1D715, 0x1D715, prN}, // Sm MATHEMATICAL ITALIC PARTIAL DIFFERENTIAL
+ {0x1D716, 0x1D734, prN}, // L& [31] MATHEMATICAL ITALIC EPSILON SYMBOL..MATHEMATICAL BOLD ITALIC CAPITAL OMEGA
+ {0x1D735, 0x1D735, prN}, // Sm MATHEMATICAL BOLD ITALIC NABLA
+ {0x1D736, 0x1D74E, prN}, // Ll [25] MATHEMATICAL BOLD ITALIC SMALL ALPHA..MATHEMATICAL BOLD ITALIC SMALL OMEGA
+ {0x1D74F, 0x1D74F, prN}, // Sm MATHEMATICAL BOLD ITALIC PARTIAL DIFFERENTIAL
+ {0x1D750, 0x1D76E, prN}, // L& [31] MATHEMATICAL BOLD ITALIC EPSILON SYMBOL..MATHEMATICAL SANS-SERIF BOLD CAPITAL OMEGA
+ {0x1D76F, 0x1D76F, prN}, // Sm MATHEMATICAL SANS-SERIF BOLD NABLA
+ {0x1D770, 0x1D788, prN}, // Ll [25] MATHEMATICAL SANS-SERIF BOLD SMALL ALPHA..MATHEMATICAL SANS-SERIF BOLD SMALL OMEGA
+ {0x1D789, 0x1D789, prN}, // Sm MATHEMATICAL SANS-SERIF BOLD PARTIAL DIFFERENTIAL
+ {0x1D78A, 0x1D7A8, prN}, // L& [31] MATHEMATICAL SANS-SERIF BOLD EPSILON SYMBOL..MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL OMEGA
+ {0x1D7A9, 0x1D7A9, prN}, // Sm MATHEMATICAL SANS-SERIF BOLD ITALIC NABLA
+ {0x1D7AA, 0x1D7C2, prN}, // Ll [25] MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL ALPHA..MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL OMEGA
+ {0x1D7C3, 0x1D7C3, prN}, // Sm MATHEMATICAL SANS-SERIF BOLD ITALIC PARTIAL DIFFERENTIAL
+ {0x1D7C4, 0x1D7CB, prN}, // L& [8] MATHEMATICAL SANS-SERIF BOLD ITALIC EPSILON SYMBOL..MATHEMATICAL BOLD SMALL DIGAMMA
+ {0x1D7CE, 0x1D7FF, prN}, // Nd [50] MATHEMATICAL BOLD DIGIT ZERO..MATHEMATICAL MONOSPACE DIGIT NINE
+ {0x1D800, 0x1D9FF, prN}, // So [512] SIGNWRITING HAND-FIST INDEX..SIGNWRITING HEAD
+ {0x1DA00, 0x1DA36, prN}, // Mn [55] SIGNWRITING HEAD RIM..SIGNWRITING AIR SUCKING IN
+ {0x1DA37, 0x1DA3A, prN}, // So [4] SIGNWRITING AIR BLOW SMALL ROTATIONS..SIGNWRITING BREATH EXHALE
+ {0x1DA3B, 0x1DA6C, prN}, // Mn [50] SIGNWRITING MOUTH CLOSED NEUTRAL..SIGNWRITING EXCITEMENT
+ {0x1DA6D, 0x1DA74, prN}, // So [8] SIGNWRITING SHOULDER HIP SPINE..SIGNWRITING TORSO-FLOORPLANE TWISTING
+ {0x1DA75, 0x1DA75, prN}, // Mn SIGNWRITING UPPER BODY TILTING FROM HIP JOINTS
+ {0x1DA76, 0x1DA83, prN}, // So [14] SIGNWRITING LIMB COMBINATION..SIGNWRITING LOCATION DEPTH
+ {0x1DA84, 0x1DA84, prN}, // Mn SIGNWRITING LOCATION HEAD NECK
+ {0x1DA85, 0x1DA86, prN}, // So [2] SIGNWRITING LOCATION TORSO..SIGNWRITING LOCATION LIMBS DIGITS
+ {0x1DA87, 0x1DA8B, prN}, // Po [5] SIGNWRITING COMMA..SIGNWRITING PARENTHESIS
+ {0x1DA9B, 0x1DA9F, prN}, // Mn [5] SIGNWRITING FILL MODIFIER-2..SIGNWRITING FILL MODIFIER-6
+ {0x1DAA1, 0x1DAAF, prN}, // Mn [15] SIGNWRITING ROTATION MODIFIER-2..SIGNWRITING ROTATION MODIFIER-16
+ {0x1DF00, 0x1DF09, prN}, // Ll [10] LATIN SMALL LETTER FENG DIGRAPH WITH TRILL..LATIN SMALL LETTER T WITH HOOK AND RETROFLEX HOOK
+ {0x1DF0A, 0x1DF0A, prN}, // Lo LATIN LETTER RETROFLEX CLICK WITH RETROFLEX HOOK
+ {0x1DF0B, 0x1DF1E, prN}, // Ll [20] LATIN SMALL LETTER ESH WITH DOUBLE BAR..LATIN SMALL LETTER S WITH CURL
+ {0x1DF25, 0x1DF2A, prN}, // Ll [6] LATIN SMALL LETTER D WITH MID-HEIGHT LEFT HOOK..LATIN SMALL LETTER T WITH MID-HEIGHT LEFT HOOK
+ {0x1E000, 0x1E006, prN}, // Mn [7] COMBINING GLAGOLITIC LETTER AZU..COMBINING GLAGOLITIC LETTER ZHIVETE
+ {0x1E008, 0x1E018, prN}, // Mn [17] COMBINING GLAGOLITIC LETTER ZEMLJA..COMBINING GLAGOLITIC LETTER HERU
+ {0x1E01B, 0x1E021, prN}, // Mn [7] COMBINING GLAGOLITIC LETTER SHTA..COMBINING GLAGOLITIC LETTER YATI
+ {0x1E023, 0x1E024, prN}, // Mn [2] COMBINING GLAGOLITIC LETTER YU..COMBINING GLAGOLITIC LETTER SMALL YUS
+ {0x1E026, 0x1E02A, prN}, // Mn [5] COMBINING GLAGOLITIC LETTER YO..COMBINING GLAGOLITIC LETTER FITA
+ {0x1E030, 0x1E06D, prN}, // Lm [62] MODIFIER LETTER CYRILLIC SMALL A..MODIFIER LETTER CYRILLIC SMALL STRAIGHT U WITH STROKE
+ {0x1E08F, 0x1E08F, prN}, // Mn COMBINING CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
+ {0x1E100, 0x1E12C, prN}, // Lo [45] NYIAKENG PUACHUE HMONG LETTER MA..NYIAKENG PUACHUE HMONG LETTER W
+ {0x1E130, 0x1E136, prN}, // Mn [7] NYIAKENG PUACHUE HMONG TONE-B..NYIAKENG PUACHUE HMONG TONE-D
+ {0x1E137, 0x1E13D, prN}, // Lm [7] NYIAKENG PUACHUE HMONG SIGN FOR PERSON..NYIAKENG PUACHUE HMONG SYLLABLE LENGTHENER
+ {0x1E140, 0x1E149, prN}, // Nd [10] NYIAKENG PUACHUE HMONG DIGIT ZERO..NYIAKENG PUACHUE HMONG DIGIT NINE
+ {0x1E14E, 0x1E14E, prN}, // Lo NYIAKENG PUACHUE HMONG LOGOGRAM NYAJ
+ {0x1E14F, 0x1E14F, prN}, // So NYIAKENG PUACHUE HMONG CIRCLED CA
+ {0x1E290, 0x1E2AD, prN}, // Lo [30] TOTO LETTER PA..TOTO LETTER A
+ {0x1E2AE, 0x1E2AE, prN}, // Mn TOTO SIGN RISING TONE
+ {0x1E2C0, 0x1E2EB, prN}, // Lo [44] WANCHO LETTER AA..WANCHO LETTER YIH
+ {0x1E2EC, 0x1E2EF, prN}, // Mn [4] WANCHO TONE TUP..WANCHO TONE KOINI
+ {0x1E2F0, 0x1E2F9, prN}, // Nd [10] WANCHO DIGIT ZERO..WANCHO DIGIT NINE
+ {0x1E2FF, 0x1E2FF, prN}, // Sc WANCHO NGUN SIGN
+ {0x1E4D0, 0x1E4EA, prN}, // Lo [27] NAG MUNDARI LETTER O..NAG MUNDARI LETTER ELL
+ {0x1E4EB, 0x1E4EB, prN}, // Lm NAG MUNDARI SIGN OJOD
+ {0x1E4EC, 0x1E4EF, prN}, // Mn [4] NAG MUNDARI SIGN MUHOR..NAG MUNDARI SIGN SUTUH
+ {0x1E4F0, 0x1E4F9, prN}, // Nd [10] NAG MUNDARI DIGIT ZERO..NAG MUNDARI DIGIT NINE
+ {0x1E7E0, 0x1E7E6, prN}, // Lo [7] ETHIOPIC SYLLABLE HHYA..ETHIOPIC SYLLABLE HHYO
+ {0x1E7E8, 0x1E7EB, prN}, // Lo [4] ETHIOPIC SYLLABLE GURAGE HHWA..ETHIOPIC SYLLABLE HHWE
+ {0x1E7ED, 0x1E7EE, prN}, // Lo [2] ETHIOPIC SYLLABLE GURAGE MWI..ETHIOPIC SYLLABLE GURAGE MWEE
+ {0x1E7F0, 0x1E7FE, prN}, // Lo [15] ETHIOPIC SYLLABLE GURAGE QWI..ETHIOPIC SYLLABLE GURAGE PWEE
+ {0x1E800, 0x1E8C4, prN}, // Lo [197] MENDE KIKAKUI SYLLABLE M001 KI..MENDE KIKAKUI SYLLABLE M060 NYON
+ {0x1E8C7, 0x1E8CF, prN}, // No [9] MENDE KIKAKUI DIGIT ONE..MENDE KIKAKUI DIGIT NINE
+ {0x1E8D0, 0x1E8D6, prN}, // Mn [7] MENDE KIKAKUI COMBINING NUMBER TEENS..MENDE KIKAKUI COMBINING NUMBER MILLIONS
+ {0x1E900, 0x1E943, prN}, // L& [68] ADLAM CAPITAL LETTER ALIF..ADLAM SMALL LETTER SHA
+ {0x1E944, 0x1E94A, prN}, // Mn [7] ADLAM ALIF LENGTHENER..ADLAM NUKTA
+ {0x1E94B, 0x1E94B, prN}, // Lm ADLAM NASALIZATION MARK
+ {0x1E950, 0x1E959, prN}, // Nd [10] ADLAM DIGIT ZERO..ADLAM DIGIT NINE
+ {0x1E95E, 0x1E95F, prN}, // Po [2] ADLAM INITIAL EXCLAMATION MARK..ADLAM INITIAL QUESTION MARK
+ {0x1EC71, 0x1ECAB, prN}, // No [59] INDIC SIYAQ NUMBER ONE..INDIC SIYAQ NUMBER PREFIXED NINE
+ {0x1ECAC, 0x1ECAC, prN}, // So INDIC SIYAQ PLACEHOLDER
+ {0x1ECAD, 0x1ECAF, prN}, // No [3] INDIC SIYAQ FRACTION ONE QUARTER..INDIC SIYAQ FRACTION THREE QUARTERS
+ {0x1ECB0, 0x1ECB0, prN}, // Sc INDIC SIYAQ RUPEE MARK
+ {0x1ECB1, 0x1ECB4, prN}, // No [4] INDIC SIYAQ NUMBER ALTERNATE ONE..INDIC SIYAQ ALTERNATE LAKH MARK
+ {0x1ED01, 0x1ED2D, prN}, // No [45] OTTOMAN SIYAQ NUMBER ONE..OTTOMAN SIYAQ NUMBER NINETY THOUSAND
+ {0x1ED2E, 0x1ED2E, prN}, // So OTTOMAN SIYAQ MARRATAN
+ {0x1ED2F, 0x1ED3D, prN}, // No [15] OTTOMAN SIYAQ ALTERNATE NUMBER TWO..OTTOMAN SIYAQ FRACTION ONE SIXTH
+ {0x1EE00, 0x1EE03, prN}, // Lo [4] ARABIC MATHEMATICAL ALEF..ARABIC MATHEMATICAL DAL
+ {0x1EE05, 0x1EE1F, prN}, // Lo [27] ARABIC MATHEMATICAL WAW..ARABIC MATHEMATICAL DOTLESS QAF
+ {0x1EE21, 0x1EE22, prN}, // Lo [2] ARABIC MATHEMATICAL INITIAL BEH..ARABIC MATHEMATICAL INITIAL JEEM
+ {0x1EE24, 0x1EE24, prN}, // Lo ARABIC MATHEMATICAL INITIAL HEH
+ {0x1EE27, 0x1EE27, prN}, // Lo ARABIC MATHEMATICAL INITIAL HAH
+ {0x1EE29, 0x1EE32, prN}, // Lo [10] ARABIC MATHEMATICAL INITIAL YEH..ARABIC MATHEMATICAL INITIAL QAF
+ {0x1EE34, 0x1EE37, prN}, // Lo [4] ARABIC MATHEMATICAL INITIAL SHEEN..ARABIC MATHEMATICAL INITIAL KHAH
+ {0x1EE39, 0x1EE39, prN}, // Lo ARABIC MATHEMATICAL INITIAL DAD
+ {0x1EE3B, 0x1EE3B, prN}, // Lo ARABIC MATHEMATICAL INITIAL GHAIN
+ {0x1EE42, 0x1EE42, prN}, // Lo ARABIC MATHEMATICAL TAILED JEEM
+ {0x1EE47, 0x1EE47, prN}, // Lo ARABIC MATHEMATICAL TAILED HAH
+ {0x1EE49, 0x1EE49, prN}, // Lo ARABIC MATHEMATICAL TAILED YEH
+ {0x1EE4B, 0x1EE4B, prN}, // Lo ARABIC MATHEMATICAL TAILED LAM
+ {0x1EE4D, 0x1EE4F, prN}, // Lo [3] ARABIC MATHEMATICAL TAILED NOON..ARABIC MATHEMATICAL TAILED AIN
+ {0x1EE51, 0x1EE52, prN}, // Lo [2] ARABIC MATHEMATICAL TAILED SAD..ARABIC MATHEMATICAL TAILED QAF
+ {0x1EE54, 0x1EE54, prN}, // Lo ARABIC MATHEMATICAL TAILED SHEEN
+ {0x1EE57, 0x1EE57, prN}, // Lo ARABIC MATHEMATICAL TAILED KHAH
+ {0x1EE59, 0x1EE59, prN}, // Lo ARABIC MATHEMATICAL TAILED DAD
+ {0x1EE5B, 0x1EE5B, prN}, // Lo ARABIC MATHEMATICAL TAILED GHAIN
+ {0x1EE5D, 0x1EE5D, prN}, // Lo ARABIC MATHEMATICAL TAILED DOTLESS NOON
+ {0x1EE5F, 0x1EE5F, prN}, // Lo ARABIC MATHEMATICAL TAILED DOTLESS QAF
+ {0x1EE61, 0x1EE62, prN}, // Lo [2] ARABIC MATHEMATICAL STRETCHED BEH..ARABIC MATHEMATICAL STRETCHED JEEM
+ {0x1EE64, 0x1EE64, prN}, // Lo ARABIC MATHEMATICAL STRETCHED HEH
+ {0x1EE67, 0x1EE6A, prN}, // Lo [4] ARABIC MATHEMATICAL STRETCHED HAH..ARABIC MATHEMATICAL STRETCHED KAF
+ {0x1EE6C, 0x1EE72, prN}, // Lo [7] ARABIC MATHEMATICAL STRETCHED MEEM..ARABIC MATHEMATICAL STRETCHED QAF
+ {0x1EE74, 0x1EE77, prN}, // Lo [4] ARABIC MATHEMATICAL STRETCHED SHEEN..ARABIC MATHEMATICAL STRETCHED KHAH
+ {0x1EE79, 0x1EE7C, prN}, // Lo [4] ARABIC MATHEMATICAL STRETCHED DAD..ARABIC MATHEMATICAL STRETCHED DOTLESS BEH
+ {0x1EE7E, 0x1EE7E, prN}, // Lo ARABIC MATHEMATICAL STRETCHED DOTLESS FEH
+ {0x1EE80, 0x1EE89, prN}, // Lo [10] ARABIC MATHEMATICAL LOOPED ALEF..ARABIC MATHEMATICAL LOOPED YEH
+ {0x1EE8B, 0x1EE9B, prN}, // Lo [17] ARABIC MATHEMATICAL LOOPED LAM..ARABIC MATHEMATICAL LOOPED GHAIN
+ {0x1EEA1, 0x1EEA3, prN}, // Lo [3] ARABIC MATHEMATICAL DOUBLE-STRUCK BEH..ARABIC MATHEMATICAL DOUBLE-STRUCK DAL
+ {0x1EEA5, 0x1EEA9, prN}, // Lo [5] ARABIC MATHEMATICAL DOUBLE-STRUCK WAW..ARABIC MATHEMATICAL DOUBLE-STRUCK YEH
+ {0x1EEAB, 0x1EEBB, prN}, // Lo [17] ARABIC MATHEMATICAL DOUBLE-STRUCK LAM..ARABIC MATHEMATICAL DOUBLE-STRUCK GHAIN
+ {0x1EEF0, 0x1EEF1, prN}, // Sm [2] ARABIC MATHEMATICAL OPERATOR MEEM WITH HAH WITH TATWEEL..ARABIC MATHEMATICAL OPERATOR HAH WITH DAL
+ {0x1F000, 0x1F003, prN}, // So [4] MAHJONG TILE EAST WIND..MAHJONG TILE NORTH WIND
+ {0x1F004, 0x1F004, prW}, // So MAHJONG TILE RED DRAGON
+ {0x1F005, 0x1F02B, prN}, // So [39] MAHJONG TILE GREEN DRAGON..MAHJONG TILE BACK
+ {0x1F030, 0x1F093, prN}, // So [100] DOMINO TILE HORIZONTAL BACK..DOMINO TILE VERTICAL-06-06
+ {0x1F0A0, 0x1F0AE, prN}, // So [15] PLAYING CARD BACK..PLAYING CARD KING OF SPADES
+ {0x1F0B1, 0x1F0BF, prN}, // So [15] PLAYING CARD ACE OF HEARTS..PLAYING CARD RED JOKER
+ {0x1F0C1, 0x1F0CE, prN}, // So [14] PLAYING CARD ACE OF DIAMONDS..PLAYING CARD KING OF DIAMONDS
+ {0x1F0CF, 0x1F0CF, prW}, // So PLAYING CARD BLACK JOKER
+ {0x1F0D1, 0x1F0F5, prN}, // So [37] PLAYING CARD ACE OF CLUBS..PLAYING CARD TRUMP-21
+ {0x1F100, 0x1F10A, prA}, // No [11] DIGIT ZERO FULL STOP..DIGIT NINE COMMA
+ {0x1F10B, 0x1F10C, prN}, // No [2] DINGBAT CIRCLED SANS-SERIF DIGIT ZERO..DINGBAT NEGATIVE CIRCLED SANS-SERIF DIGIT ZERO
+ {0x1F10D, 0x1F10F, prN}, // So [3] CIRCLED ZERO WITH SLASH..CIRCLED DOLLAR SIGN WITH OVERLAID BACKSLASH
+ {0x1F110, 0x1F12D, prA}, // So [30] PARENTHESIZED LATIN CAPITAL LETTER A..CIRCLED CD
+ {0x1F12E, 0x1F12F, prN}, // So [2] CIRCLED WZ..COPYLEFT SYMBOL
+ {0x1F130, 0x1F169, prA}, // So [58] SQUARED LATIN CAPITAL LETTER A..NEGATIVE CIRCLED LATIN CAPITAL LETTER Z
+ {0x1F16A, 0x1F16F, prN}, // So [6] RAISED MC SIGN..CIRCLED HUMAN FIGURE
+ {0x1F170, 0x1F18D, prA}, // So [30] NEGATIVE SQUARED LATIN CAPITAL LETTER A..NEGATIVE SQUARED SA
+ {0x1F18E, 0x1F18E, prW}, // So NEGATIVE SQUARED AB
+ {0x1F18F, 0x1F190, prA}, // So [2] NEGATIVE SQUARED WC..SQUARE DJ
+ {0x1F191, 0x1F19A, prW}, // So [10] SQUARED CL..SQUARED VS
+ {0x1F19B, 0x1F1AC, prA}, // So [18] SQUARED THREE D..SQUARED VOD
+ {0x1F1AD, 0x1F1AD, prN}, // So MASK WORK SYMBOL
+ {0x1F1E6, 0x1F1FF, prN}, // So [26] REGIONAL INDICATOR SYMBOL LETTER A..REGIONAL INDICATOR SYMBOL LETTER Z
+ {0x1F200, 0x1F202, prW}, // So [3] SQUARE HIRAGANA HOKA..SQUARED KATAKANA SA
+ {0x1F210, 0x1F23B, prW}, // So [44] SQUARED CJK UNIFIED IDEOGRAPH-624B..SQUARED CJK UNIFIED IDEOGRAPH-914D
+ {0x1F240, 0x1F248, prW}, // So [9] TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-672C..TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-6557
+ {0x1F250, 0x1F251, prW}, // So [2] CIRCLED IDEOGRAPH ADVANTAGE..CIRCLED IDEOGRAPH ACCEPT
+ {0x1F260, 0x1F265, prW}, // So [6] ROUNDED SYMBOL FOR FU..ROUNDED SYMBOL FOR CAI
+ {0x1F300, 0x1F320, prW}, // So [33] CYCLONE..SHOOTING STAR
+ {0x1F321, 0x1F32C, prN}, // So [12] THERMOMETER..WIND BLOWING FACE
+ {0x1F32D, 0x1F335, prW}, // So [9] HOT DOG..CACTUS
+ {0x1F336, 0x1F336, prN}, // So HOT PEPPER
+ {0x1F337, 0x1F37C, prW}, // So [70] TULIP..BABY BOTTLE
+ {0x1F37D, 0x1F37D, prN}, // So FORK AND KNIFE WITH PLATE
+ {0x1F37E, 0x1F393, prW}, // So [22] BOTTLE WITH POPPING CORK..GRADUATION CAP
+ {0x1F394, 0x1F39F, prN}, // So [12] HEART WITH TIP ON THE LEFT..ADMISSION TICKETS
+ {0x1F3A0, 0x1F3CA, prW}, // So [43] CAROUSEL HORSE..SWIMMER
+ {0x1F3CB, 0x1F3CE, prN}, // So [4] WEIGHT LIFTER..RACING CAR
+ {0x1F3CF, 0x1F3D3, prW}, // So [5] CRICKET BAT AND BALL..TABLE TENNIS PADDLE AND BALL
+ {0x1F3D4, 0x1F3DF, prN}, // So [12] SNOW CAPPED MOUNTAIN..STADIUM
+ {0x1F3E0, 0x1F3F0, prW}, // So [17] HOUSE BUILDING..EUROPEAN CASTLE
+ {0x1F3F1, 0x1F3F3, prN}, // So [3] WHITE PENNANT..WAVING WHITE FLAG
+ {0x1F3F4, 0x1F3F4, prW}, // So WAVING BLACK FLAG
+ {0x1F3F5, 0x1F3F7, prN}, // So [3] ROSETTE..LABEL
+ {0x1F3F8, 0x1F3FA, prW}, // So [3] BADMINTON RACQUET AND SHUTTLECOCK..AMPHORA
+ {0x1F3FB, 0x1F3FF, prW}, // Sk [5] EMOJI MODIFIER FITZPATRICK TYPE-1-2..EMOJI MODIFIER FITZPATRICK TYPE-6
+ {0x1F400, 0x1F43E, prW}, // So [63] RAT..PAW PRINTS
+ {0x1F43F, 0x1F43F, prN}, // So CHIPMUNK
+ {0x1F440, 0x1F440, prW}, // So EYES
+ {0x1F441, 0x1F441, prN}, // So EYE
+ {0x1F442, 0x1F4FC, prW}, // So [187] EAR..VIDEOCASSETTE
+ {0x1F4FD, 0x1F4FE, prN}, // So [2] FILM PROJECTOR..PORTABLE STEREO
+ {0x1F4FF, 0x1F53D, prW}, // So [63] PRAYER BEADS..DOWN-POINTING SMALL RED TRIANGLE
+ {0x1F53E, 0x1F54A, prN}, // So [13] LOWER RIGHT SHADOWED WHITE CIRCLE..DOVE OF PEACE
+ {0x1F54B, 0x1F54E, prW}, // So [4] KAABA..MENORAH WITH NINE BRANCHES
+ {0x1F54F, 0x1F54F, prN}, // So BOWL OF HYGIEIA
+ {0x1F550, 0x1F567, prW}, // So [24] CLOCK FACE ONE OCLOCK..CLOCK FACE TWELVE-THIRTY
+ {0x1F568, 0x1F579, prN}, // So [18] RIGHT SPEAKER..JOYSTICK
+ {0x1F57A, 0x1F57A, prW}, // So MAN DANCING
+ {0x1F57B, 0x1F594, prN}, // So [26] LEFT HAND TELEPHONE RECEIVER..REVERSED VICTORY HAND
+ {0x1F595, 0x1F596, prW}, // So [2] REVERSED HAND WITH MIDDLE FINGER EXTENDED..RAISED HAND WITH PART BETWEEN MIDDLE AND RING FINGERS
+ {0x1F597, 0x1F5A3, prN}, // So [13] WHITE DOWN POINTING LEFT HAND INDEX..BLACK DOWN POINTING BACKHAND INDEX
+ {0x1F5A4, 0x1F5A4, prW}, // So BLACK HEART
+ {0x1F5A5, 0x1F5FA, prN}, // So [86] DESKTOP COMPUTER..WORLD MAP
+ {0x1F5FB, 0x1F5FF, prW}, // So [5] MOUNT FUJI..MOYAI
+ {0x1F600, 0x1F64F, prW}, // So [80] GRINNING FACE..PERSON WITH FOLDED HANDS
+ {0x1F650, 0x1F67F, prN}, // So [48] NORTH WEST POINTING LEAF..REVERSE CHECKER BOARD
+ {0x1F680, 0x1F6C5, prW}, // So [70] ROCKET..LEFT LUGGAGE
+ {0x1F6C6, 0x1F6CB, prN}, // So [6] TRIANGLE WITH ROUNDED CORNERS..COUCH AND LAMP
+ {0x1F6CC, 0x1F6CC, prW}, // So SLEEPING ACCOMMODATION
+ {0x1F6CD, 0x1F6CF, prN}, // So [3] SHOPPING BAGS..BED
+ {0x1F6D0, 0x1F6D2, prW}, // So [3] PLACE OF WORSHIP..SHOPPING TROLLEY
+ {0x1F6D3, 0x1F6D4, prN}, // So [2] STUPA..PAGODA
+ {0x1F6D5, 0x1F6D7, prW}, // So [3] HINDU TEMPLE..ELEVATOR
+ {0x1F6DC, 0x1F6DF, prW}, // So [4] WIRELESS..RING BUOY
+ {0x1F6E0, 0x1F6EA, prN}, // So [11] HAMMER AND WRENCH..NORTHEAST-POINTING AIRPLANE
+ {0x1F6EB, 0x1F6EC, prW}, // So [2] AIRPLANE DEPARTURE..AIRPLANE ARRIVING
+ {0x1F6F0, 0x1F6F3, prN}, // So [4] SATELLITE..PASSENGER SHIP
+ {0x1F6F4, 0x1F6FC, prW}, // So [9] SCOOTER..ROLLER SKATE
+ {0x1F700, 0x1F776, prN}, // So [119] ALCHEMICAL SYMBOL FOR QUINTESSENCE..LUNAR ECLIPSE
+ {0x1F77B, 0x1F77F, prN}, // So [5] HAUMEA..ORCUS
+ {0x1F780, 0x1F7D9, prN}, // So [90] BLACK LEFT-POINTING ISOSCELES RIGHT TRIANGLE..NINE POINTED WHITE STAR
+ {0x1F7E0, 0x1F7EB, prW}, // So [12] LARGE ORANGE CIRCLE..LARGE BROWN SQUARE
+ {0x1F7F0, 0x1F7F0, prW}, // So HEAVY EQUALS SIGN
+ {0x1F800, 0x1F80B, prN}, // So [12] LEFTWARDS ARROW WITH SMALL TRIANGLE ARROWHEAD..DOWNWARDS ARROW WITH LARGE TRIANGLE ARROWHEAD
+ {0x1F810, 0x1F847, prN}, // So [56] LEFTWARDS ARROW WITH SMALL EQUILATERAL ARROWHEAD..DOWNWARDS HEAVY ARROW
+ {0x1F850, 0x1F859, prN}, // So [10] LEFTWARDS SANS-SERIF ARROW..UP DOWN SANS-SERIF ARROW
+ {0x1F860, 0x1F887, prN}, // So [40] WIDE-HEADED LEFTWARDS LIGHT BARB ARROW..WIDE-HEADED SOUTH WEST VERY HEAVY BARB ARROW
+ {0x1F890, 0x1F8AD, prN}, // So [30] LEFTWARDS TRIANGLE ARROWHEAD..WHITE ARROW SHAFT WIDTH TWO THIRDS
+ {0x1F8B0, 0x1F8B1, prN}, // So [2] ARROW POINTING UPWARDS THEN NORTH WEST..ARROW POINTING RIGHTWARDS THEN CURVING SOUTH WEST
+ {0x1F900, 0x1F90B, prN}, // So [12] CIRCLED CROSS FORMEE WITH FOUR DOTS..DOWNWARD FACING NOTCHED HOOK WITH DOT
+ {0x1F90C, 0x1F93A, prW}, // So [47] PINCHED FINGERS..FENCER
+ {0x1F93B, 0x1F93B, prN}, // So MODERN PENTATHLON
+ {0x1F93C, 0x1F945, prW}, // So [10] WRESTLERS..GOAL NET
+ {0x1F946, 0x1F946, prN}, // So RIFLE
+ {0x1F947, 0x1F9FF, prW}, // So [185] FIRST PLACE MEDAL..NAZAR AMULET
+ {0x1FA00, 0x1FA53, prN}, // So [84] NEUTRAL CHESS KING..BLACK CHESS KNIGHT-BISHOP
+ {0x1FA60, 0x1FA6D, prN}, // So [14] XIANGQI RED GENERAL..XIANGQI BLACK SOLDIER
+ {0x1FA70, 0x1FA7C, prW}, // So [13] BALLET SHOES..CRUTCH
+ {0x1FA80, 0x1FA88, prW}, // So [9] YO-YO..FLUTE
+ {0x1FA90, 0x1FABD, prW}, // So [46] RINGED PLANET..WING
+ {0x1FABF, 0x1FAC5, prW}, // So [7] GOOSE..PERSON WITH CROWN
+ {0x1FACE, 0x1FADB, prW}, // So [14] MOOSE..PEA POD
+ {0x1FAE0, 0x1FAE8, prW}, // So [9] MELTING FACE..SHAKING FACE
+ {0x1FAF0, 0x1FAF8, prW}, // So [9] HAND WITH INDEX FINGER AND THUMB CROSSED..RIGHTWARDS PUSHING HAND
+ {0x1FB00, 0x1FB92, prN}, // So [147] BLOCK SEXTANT-1..UPPER HALF INVERSE MEDIUM SHADE AND LOWER HALF BLOCK
+ {0x1FB94, 0x1FBCA, prN}, // So [55] LEFT HALF INVERSE MEDIUM SHADE AND RIGHT HALF BLOCK..WHITE UP-POINTING CHEVRON
+ {0x1FBF0, 0x1FBF9, prN}, // Nd [10] SEGMENTED DIGIT ZERO..SEGMENTED DIGIT NINE
+ {0x20000, 0x2A6DF, prW}, // Lo [42720] CJK UNIFIED IDEOGRAPH-20000..CJK UNIFIED IDEOGRAPH-2A6DF
+ {0x2A6E0, 0x2A6FF, prW}, // Cn [32] ..
+ {0x2A700, 0x2B739, prW}, // Lo [4154] CJK UNIFIED IDEOGRAPH-2A700..CJK UNIFIED IDEOGRAPH-2B739
+ {0x2B73A, 0x2B73F, prW}, // Cn [6] ..
+ {0x2B740, 0x2B81D, prW}, // Lo [222] CJK UNIFIED IDEOGRAPH-2B740..CJK UNIFIED IDEOGRAPH-2B81D
+ {0x2B81E, 0x2B81F, prW}, // Cn [2] ..
+ {0x2B820, 0x2CEA1, prW}, // Lo [5762] CJK UNIFIED IDEOGRAPH-2B820..CJK UNIFIED IDEOGRAPH-2CEA1
+ {0x2CEA2, 0x2CEAF, prW}, // Cn [14] ..
+ {0x2CEB0, 0x2EBE0, prW}, // Lo [7473] CJK UNIFIED IDEOGRAPH-2CEB0..CJK UNIFIED IDEOGRAPH-2EBE0
+ {0x2EBE1, 0x2F7FF, prW}, // Cn [3103] ..
+ {0x2F800, 0x2FA1D, prW}, // Lo [542] CJK COMPATIBILITY IDEOGRAPH-2F800..CJK COMPATIBILITY IDEOGRAPH-2FA1D
+ {0x2FA1E, 0x2FA1F, prW}, // Cn [2] ..
+ {0x2FA20, 0x2FFFD, prW}, // Cn [1502] ..
+ {0x30000, 0x3134A, prW}, // Lo [4939] CJK UNIFIED IDEOGRAPH-30000..CJK UNIFIED IDEOGRAPH-3134A
+ {0x3134B, 0x3134F, prW}, // Cn [5] ..
+ {0x31350, 0x323AF, prW}, // Lo [4192] CJK UNIFIED IDEOGRAPH-31350..CJK UNIFIED IDEOGRAPH-323AF
+ {0x323B0, 0x3FFFD, prW}, // Cn [56398] ..
+ {0xE0001, 0xE0001, prN}, // Cf LANGUAGE TAG
+ {0xE0020, 0xE007F, prN}, // Cf [96] TAG SPACE..CANCEL TAG
+ {0xE0100, 0xE01EF, prA}, // Mn [240] VARIATION SELECTOR-17..VARIATION SELECTOR-256
+ {0xF0000, 0xFFFFD, prA}, // Co [65534] ..
+ {0x100000, 0x10FFFD, prA}, // Co [65534] ..
+}
diff --git a/vendor/github.com/rivo/uniseg/emojipresentation.go b/vendor/github.com/rivo/uniseg/emojipresentation.go
new file mode 100644
index 000000000..9b5f499c4
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/emojipresentation.go
@@ -0,0 +1,295 @@
+// Code generated via go generate from gen_properties.go. DO NOT EDIT.
+
+package uniseg
+
+// emojiPresentation are taken from
+//
+// and
+// https://unicode.org/Public/15.0.0/ucd/emoji/emoji-data.txt
+// ("Extended_Pictographic" only)
+// on September 5, 2023. See https://www.unicode.org/license.html for the Unicode
+// license agreement.
+var emojiPresentation = [][3]int{
+ {0x231A, 0x231B, prEmojiPresentation}, // E0.6 [2] (⌚..⌛) watch..hourglass done
+ {0x23E9, 0x23EC, prEmojiPresentation}, // E0.6 [4] (⏩..⏬) fast-forward button..fast down button
+ {0x23F0, 0x23F0, prEmojiPresentation}, // E0.6 [1] (⏰) alarm clock
+ {0x23F3, 0x23F3, prEmojiPresentation}, // E0.6 [1] (⏳) hourglass not done
+ {0x25FD, 0x25FE, prEmojiPresentation}, // E0.6 [2] (◽..◾) white medium-small square..black medium-small square
+ {0x2614, 0x2615, prEmojiPresentation}, // E0.6 [2] (☔..☕) umbrella with rain drops..hot beverage
+ {0x2648, 0x2653, prEmojiPresentation}, // E0.6 [12] (♈..♓) Aries..Pisces
+ {0x267F, 0x267F, prEmojiPresentation}, // E0.6 [1] (♿) wheelchair symbol
+ {0x2693, 0x2693, prEmojiPresentation}, // E0.6 [1] (⚓) anchor
+ {0x26A1, 0x26A1, prEmojiPresentation}, // E0.6 [1] (⚡) high voltage
+ {0x26AA, 0x26AB, prEmojiPresentation}, // E0.6 [2] (⚪..⚫) white circle..black circle
+ {0x26BD, 0x26BE, prEmojiPresentation}, // E0.6 [2] (⚽..⚾) soccer ball..baseball
+ {0x26C4, 0x26C5, prEmojiPresentation}, // E0.6 [2] (⛄..⛅) snowman without snow..sun behind cloud
+ {0x26CE, 0x26CE, prEmojiPresentation}, // E0.6 [1] (⛎) Ophiuchus
+ {0x26D4, 0x26D4, prEmojiPresentation}, // E0.6 [1] (⛔) no entry
+ {0x26EA, 0x26EA, prEmojiPresentation}, // E0.6 [1] (⛪) church
+ {0x26F2, 0x26F3, prEmojiPresentation}, // E0.6 [2] (⛲..⛳) fountain..flag in hole
+ {0x26F5, 0x26F5, prEmojiPresentation}, // E0.6 [1] (⛵) sailboat
+ {0x26FA, 0x26FA, prEmojiPresentation}, // E0.6 [1] (⛺) tent
+ {0x26FD, 0x26FD, prEmojiPresentation}, // E0.6 [1] (⛽) fuel pump
+ {0x2705, 0x2705, prEmojiPresentation}, // E0.6 [1] (✅) check mark button
+ {0x270A, 0x270B, prEmojiPresentation}, // E0.6 [2] (✊..✋) raised fist..raised hand
+ {0x2728, 0x2728, prEmojiPresentation}, // E0.6 [1] (✨) sparkles
+ {0x274C, 0x274C, prEmojiPresentation}, // E0.6 [1] (❌) cross mark
+ {0x274E, 0x274E, prEmojiPresentation}, // E0.6 [1] (❎) cross mark button
+ {0x2753, 0x2755, prEmojiPresentation}, // E0.6 [3] (❓..❕) red question mark..white exclamation mark
+ {0x2757, 0x2757, prEmojiPresentation}, // E0.6 [1] (❗) red exclamation mark
+ {0x2795, 0x2797, prEmojiPresentation}, // E0.6 [3] (➕..➗) plus..divide
+ {0x27B0, 0x27B0, prEmojiPresentation}, // E0.6 [1] (➰) curly loop
+ {0x27BF, 0x27BF, prEmojiPresentation}, // E1.0 [1] (➿) double curly loop
+ {0x2B1B, 0x2B1C, prEmojiPresentation}, // E0.6 [2] (⬛..⬜) black large square..white large square
+ {0x2B50, 0x2B50, prEmojiPresentation}, // E0.6 [1] (⭐) star
+ {0x2B55, 0x2B55, prEmojiPresentation}, // E0.6 [1] (⭕) hollow red circle
+ {0x1F004, 0x1F004, prEmojiPresentation}, // E0.6 [1] (🀄) mahjong red dragon
+ {0x1F0CF, 0x1F0CF, prEmojiPresentation}, // E0.6 [1] (🃏) joker
+ {0x1F18E, 0x1F18E, prEmojiPresentation}, // E0.6 [1] (🆎) AB button (blood type)
+ {0x1F191, 0x1F19A, prEmojiPresentation}, // E0.6 [10] (🆑..🆚) CL button..VS button
+ {0x1F1E6, 0x1F1FF, prEmojiPresentation}, // E0.0 [26] (🇦..🇿) regional indicator symbol letter a..regional indicator symbol letter z
+ {0x1F201, 0x1F201, prEmojiPresentation}, // E0.6 [1] (🈁) Japanese “here” button
+ {0x1F21A, 0x1F21A, prEmojiPresentation}, // E0.6 [1] (🈚) Japanese “free of charge” button
+ {0x1F22F, 0x1F22F, prEmojiPresentation}, // E0.6 [1] (🈯) Japanese “reserved” button
+ {0x1F232, 0x1F236, prEmojiPresentation}, // E0.6 [5] (🈲..🈶) Japanese “prohibited” button..Japanese “not free of charge” button
+ {0x1F238, 0x1F23A, prEmojiPresentation}, // E0.6 [3] (🈸..🈺) Japanese “application” button..Japanese “open for business” button
+ {0x1F250, 0x1F251, prEmojiPresentation}, // E0.6 [2] (🉐..🉑) Japanese “bargain” button..Japanese “acceptable” button
+ {0x1F300, 0x1F30C, prEmojiPresentation}, // E0.6 [13] (🌀..🌌) cyclone..milky way
+ {0x1F30D, 0x1F30E, prEmojiPresentation}, // E0.7 [2] (🌍..🌎) globe showing Europe-Africa..globe showing Americas
+ {0x1F30F, 0x1F30F, prEmojiPresentation}, // E0.6 [1] (🌏) globe showing Asia-Australia
+ {0x1F310, 0x1F310, prEmojiPresentation}, // E1.0 [1] (🌐) globe with meridians
+ {0x1F311, 0x1F311, prEmojiPresentation}, // E0.6 [1] (🌑) new moon
+ {0x1F312, 0x1F312, prEmojiPresentation}, // E1.0 [1] (🌒) waxing crescent moon
+ {0x1F313, 0x1F315, prEmojiPresentation}, // E0.6 [3] (🌓..🌕) first quarter moon..full moon
+ {0x1F316, 0x1F318, prEmojiPresentation}, // E1.0 [3] (🌖..🌘) waning gibbous moon..waning crescent moon
+ {0x1F319, 0x1F319, prEmojiPresentation}, // E0.6 [1] (🌙) crescent moon
+ {0x1F31A, 0x1F31A, prEmojiPresentation}, // E1.0 [1] (🌚) new moon face
+ {0x1F31B, 0x1F31B, prEmojiPresentation}, // E0.6 [1] (🌛) first quarter moon face
+ {0x1F31C, 0x1F31C, prEmojiPresentation}, // E0.7 [1] (🌜) last quarter moon face
+ {0x1F31D, 0x1F31E, prEmojiPresentation}, // E1.0 [2] (🌝..🌞) full moon face..sun with face
+ {0x1F31F, 0x1F320, prEmojiPresentation}, // E0.6 [2] (🌟..🌠) glowing star..shooting star
+ {0x1F32D, 0x1F32F, prEmojiPresentation}, // E1.0 [3] (🌭..🌯) hot dog..burrito
+ {0x1F330, 0x1F331, prEmojiPresentation}, // E0.6 [2] (🌰..🌱) chestnut..seedling
+ {0x1F332, 0x1F333, prEmojiPresentation}, // E1.0 [2] (🌲..🌳) evergreen tree..deciduous tree
+ {0x1F334, 0x1F335, prEmojiPresentation}, // E0.6 [2] (🌴..🌵) palm tree..cactus
+ {0x1F337, 0x1F34A, prEmojiPresentation}, // E0.6 [20] (🌷..🍊) tulip..tangerine
+ {0x1F34B, 0x1F34B, prEmojiPresentation}, // E1.0 [1] (🍋) lemon
+ {0x1F34C, 0x1F34F, prEmojiPresentation}, // E0.6 [4] (🍌..🍏) banana..green apple
+ {0x1F350, 0x1F350, prEmojiPresentation}, // E1.0 [1] (🍐) pear
+ {0x1F351, 0x1F37B, prEmojiPresentation}, // E0.6 [43] (🍑..🍻) peach..clinking beer mugs
+ {0x1F37C, 0x1F37C, prEmojiPresentation}, // E1.0 [1] (🍼) baby bottle
+ {0x1F37E, 0x1F37F, prEmojiPresentation}, // E1.0 [2] (🍾..🍿) bottle with popping cork..popcorn
+ {0x1F380, 0x1F393, prEmojiPresentation}, // E0.6 [20] (🎀..🎓) ribbon..graduation cap
+ {0x1F3A0, 0x1F3C4, prEmojiPresentation}, // E0.6 [37] (🎠..🏄) carousel horse..person surfing
+ {0x1F3C5, 0x1F3C5, prEmojiPresentation}, // E1.0 [1] (🏅) sports medal
+ {0x1F3C6, 0x1F3C6, prEmojiPresentation}, // E0.6 [1] (🏆) trophy
+ {0x1F3C7, 0x1F3C7, prEmojiPresentation}, // E1.0 [1] (🏇) horse racing
+ {0x1F3C8, 0x1F3C8, prEmojiPresentation}, // E0.6 [1] (🏈) american football
+ {0x1F3C9, 0x1F3C9, prEmojiPresentation}, // E1.0 [1] (🏉) rugby football
+ {0x1F3CA, 0x1F3CA, prEmojiPresentation}, // E0.6 [1] (🏊) person swimming
+ {0x1F3CF, 0x1F3D3, prEmojiPresentation}, // E1.0 [5] (🏏..🏓) cricket game..ping pong
+ {0x1F3E0, 0x1F3E3, prEmojiPresentation}, // E0.6 [4] (🏠..🏣) house..Japanese post office
+ {0x1F3E4, 0x1F3E4, prEmojiPresentation}, // E1.0 [1] (🏤) post office
+ {0x1F3E5, 0x1F3F0, prEmojiPresentation}, // E0.6 [12] (🏥..🏰) hospital..castle
+ {0x1F3F4, 0x1F3F4, prEmojiPresentation}, // E1.0 [1] (🏴) black flag
+ {0x1F3F8, 0x1F407, prEmojiPresentation}, // E1.0 [16] (🏸..🐇) badminton..rabbit
+ {0x1F408, 0x1F408, prEmojiPresentation}, // E0.7 [1] (🐈) cat
+ {0x1F409, 0x1F40B, prEmojiPresentation}, // E1.0 [3] (🐉..🐋) dragon..whale
+ {0x1F40C, 0x1F40E, prEmojiPresentation}, // E0.6 [3] (🐌..🐎) snail..horse
+ {0x1F40F, 0x1F410, prEmojiPresentation}, // E1.0 [2] (🐏..🐐) ram..goat
+ {0x1F411, 0x1F412, prEmojiPresentation}, // E0.6 [2] (🐑..🐒) ewe..monkey
+ {0x1F413, 0x1F413, prEmojiPresentation}, // E1.0 [1] (🐓) rooster
+ {0x1F414, 0x1F414, prEmojiPresentation}, // E0.6 [1] (🐔) chicken
+ {0x1F415, 0x1F415, prEmojiPresentation}, // E0.7 [1] (🐕) dog
+ {0x1F416, 0x1F416, prEmojiPresentation}, // E1.0 [1] (🐖) pig
+ {0x1F417, 0x1F429, prEmojiPresentation}, // E0.6 [19] (🐗..🐩) boar..poodle
+ {0x1F42A, 0x1F42A, prEmojiPresentation}, // E1.0 [1] (🐪) camel
+ {0x1F42B, 0x1F43E, prEmojiPresentation}, // E0.6 [20] (🐫..🐾) two-hump camel..paw prints
+ {0x1F440, 0x1F440, prEmojiPresentation}, // E0.6 [1] (👀) eyes
+ {0x1F442, 0x1F464, prEmojiPresentation}, // E0.6 [35] (👂..👤) ear..bust in silhouette
+ {0x1F465, 0x1F465, prEmojiPresentation}, // E1.0 [1] (👥) busts in silhouette
+ {0x1F466, 0x1F46B, prEmojiPresentation}, // E0.6 [6] (👦..👫) boy..woman and man holding hands
+ {0x1F46C, 0x1F46D, prEmojiPresentation}, // E1.0 [2] (👬..👭) men holding hands..women holding hands
+ {0x1F46E, 0x1F4AC, prEmojiPresentation}, // E0.6 [63] (👮..💬) police officer..speech balloon
+ {0x1F4AD, 0x1F4AD, prEmojiPresentation}, // E1.0 [1] (💭) thought balloon
+ {0x1F4AE, 0x1F4B5, prEmojiPresentation}, // E0.6 [8] (💮..💵) white flower..dollar banknote
+ {0x1F4B6, 0x1F4B7, prEmojiPresentation}, // E1.0 [2] (💶..💷) euro banknote..pound banknote
+ {0x1F4B8, 0x1F4EB, prEmojiPresentation}, // E0.6 [52] (💸..📫) money with wings..closed mailbox with raised flag
+ {0x1F4EC, 0x1F4ED, prEmojiPresentation}, // E0.7 [2] (📬..📭) open mailbox with raised flag..open mailbox with lowered flag
+ {0x1F4EE, 0x1F4EE, prEmojiPresentation}, // E0.6 [1] (📮) postbox
+ {0x1F4EF, 0x1F4EF, prEmojiPresentation}, // E1.0 [1] (📯) postal horn
+ {0x1F4F0, 0x1F4F4, prEmojiPresentation}, // E0.6 [5] (📰..📴) newspaper..mobile phone off
+ {0x1F4F5, 0x1F4F5, prEmojiPresentation}, // E1.0 [1] (📵) no mobile phones
+ {0x1F4F6, 0x1F4F7, prEmojiPresentation}, // E0.6 [2] (📶..📷) antenna bars..camera
+ {0x1F4F8, 0x1F4F8, prEmojiPresentation}, // E1.0 [1] (📸) camera with flash
+ {0x1F4F9, 0x1F4FC, prEmojiPresentation}, // E0.6 [4] (📹..📼) video camera..videocassette
+ {0x1F4FF, 0x1F502, prEmojiPresentation}, // E1.0 [4] (📿..🔂) prayer beads..repeat single button
+ {0x1F503, 0x1F503, prEmojiPresentation}, // E0.6 [1] (🔃) clockwise vertical arrows
+ {0x1F504, 0x1F507, prEmojiPresentation}, // E1.0 [4] (🔄..🔇) counterclockwise arrows button..muted speaker
+ {0x1F508, 0x1F508, prEmojiPresentation}, // E0.7 [1] (🔈) speaker low volume
+ {0x1F509, 0x1F509, prEmojiPresentation}, // E1.0 [1] (🔉) speaker medium volume
+ {0x1F50A, 0x1F514, prEmojiPresentation}, // E0.6 [11] (🔊..🔔) speaker high volume..bell
+ {0x1F515, 0x1F515, prEmojiPresentation}, // E1.0 [1] (🔕) bell with slash
+ {0x1F516, 0x1F52B, prEmojiPresentation}, // E0.6 [22] (🔖..🔫) bookmark..water pistol
+ {0x1F52C, 0x1F52D, prEmojiPresentation}, // E1.0 [2] (🔬..🔭) microscope..telescope
+ {0x1F52E, 0x1F53D, prEmojiPresentation}, // E0.6 [16] (🔮..🔽) crystal ball..downwards button
+ {0x1F54B, 0x1F54E, prEmojiPresentation}, // E1.0 [4] (🕋..🕎) kaaba..menorah
+ {0x1F550, 0x1F55B, prEmojiPresentation}, // E0.6 [12] (🕐..🕛) one o’clock..twelve o’clock
+ {0x1F55C, 0x1F567, prEmojiPresentation}, // E0.7 [12] (🕜..🕧) one-thirty..twelve-thirty
+ {0x1F57A, 0x1F57A, prEmojiPresentation}, // E3.0 [1] (🕺) man dancing
+ {0x1F595, 0x1F596, prEmojiPresentation}, // E1.0 [2] (🖕..🖖) middle finger..vulcan salute
+ {0x1F5A4, 0x1F5A4, prEmojiPresentation}, // E3.0 [1] (🖤) black heart
+ {0x1F5FB, 0x1F5FF, prEmojiPresentation}, // E0.6 [5] (🗻..🗿) mount fuji..moai
+ {0x1F600, 0x1F600, prEmojiPresentation}, // E1.0 [1] (😀) grinning face
+ {0x1F601, 0x1F606, prEmojiPresentation}, // E0.6 [6] (😁..😆) beaming face with smiling eyes..grinning squinting face
+ {0x1F607, 0x1F608, prEmojiPresentation}, // E1.0 [2] (😇..😈) smiling face with halo..smiling face with horns
+ {0x1F609, 0x1F60D, prEmojiPresentation}, // E0.6 [5] (😉..😍) winking face..smiling face with heart-eyes
+ {0x1F60E, 0x1F60E, prEmojiPresentation}, // E1.0 [1] (😎) smiling face with sunglasses
+ {0x1F60F, 0x1F60F, prEmojiPresentation}, // E0.6 [1] (😏) smirking face
+ {0x1F610, 0x1F610, prEmojiPresentation}, // E0.7 [1] (😐) neutral face
+ {0x1F611, 0x1F611, prEmojiPresentation}, // E1.0 [1] (😑) expressionless face
+ {0x1F612, 0x1F614, prEmojiPresentation}, // E0.6 [3] (😒..😔) unamused face..pensive face
+ {0x1F615, 0x1F615, prEmojiPresentation}, // E1.0 [1] (😕) confused face
+ {0x1F616, 0x1F616, prEmojiPresentation}, // E0.6 [1] (😖) confounded face
+ {0x1F617, 0x1F617, prEmojiPresentation}, // E1.0 [1] (😗) kissing face
+ {0x1F618, 0x1F618, prEmojiPresentation}, // E0.6 [1] (😘) face blowing a kiss
+ {0x1F619, 0x1F619, prEmojiPresentation}, // E1.0 [1] (😙) kissing face with smiling eyes
+ {0x1F61A, 0x1F61A, prEmojiPresentation}, // E0.6 [1] (😚) kissing face with closed eyes
+ {0x1F61B, 0x1F61B, prEmojiPresentation}, // E1.0 [1] (😛) face with tongue
+ {0x1F61C, 0x1F61E, prEmojiPresentation}, // E0.6 [3] (😜..😞) winking face with tongue..disappointed face
+ {0x1F61F, 0x1F61F, prEmojiPresentation}, // E1.0 [1] (😟) worried face
+ {0x1F620, 0x1F625, prEmojiPresentation}, // E0.6 [6] (😠..😥) angry face..sad but relieved face
+ {0x1F626, 0x1F627, prEmojiPresentation}, // E1.0 [2] (😦..😧) frowning face with open mouth..anguished face
+ {0x1F628, 0x1F62B, prEmojiPresentation}, // E0.6 [4] (😨..😫) fearful face..tired face
+ {0x1F62C, 0x1F62C, prEmojiPresentation}, // E1.0 [1] (😬) grimacing face
+ {0x1F62D, 0x1F62D, prEmojiPresentation}, // E0.6 [1] (😭) loudly crying face
+ {0x1F62E, 0x1F62F, prEmojiPresentation}, // E1.0 [2] (😮..😯) face with open mouth..hushed face
+ {0x1F630, 0x1F633, prEmojiPresentation}, // E0.6 [4] (😰..😳) anxious face with sweat..flushed face
+ {0x1F634, 0x1F634, prEmojiPresentation}, // E1.0 [1] (😴) sleeping face
+ {0x1F635, 0x1F635, prEmojiPresentation}, // E0.6 [1] (😵) face with crossed-out eyes
+ {0x1F636, 0x1F636, prEmojiPresentation}, // E1.0 [1] (😶) face without mouth
+ {0x1F637, 0x1F640, prEmojiPresentation}, // E0.6 [10] (😷..🙀) face with medical mask..weary cat
+ {0x1F641, 0x1F644, prEmojiPresentation}, // E1.0 [4] (🙁..🙄) slightly frowning face..face with rolling eyes
+ {0x1F645, 0x1F64F, prEmojiPresentation}, // E0.6 [11] (🙅..🙏) person gesturing NO..folded hands
+ {0x1F680, 0x1F680, prEmojiPresentation}, // E0.6 [1] (🚀) rocket
+ {0x1F681, 0x1F682, prEmojiPresentation}, // E1.0 [2] (🚁..🚂) helicopter..locomotive
+ {0x1F683, 0x1F685, prEmojiPresentation}, // E0.6 [3] (🚃..🚅) railway car..bullet train
+ {0x1F686, 0x1F686, prEmojiPresentation}, // E1.0 [1] (🚆) train
+ {0x1F687, 0x1F687, prEmojiPresentation}, // E0.6 [1] (🚇) metro
+ {0x1F688, 0x1F688, prEmojiPresentation}, // E1.0 [1] (🚈) light rail
+ {0x1F689, 0x1F689, prEmojiPresentation}, // E0.6 [1] (🚉) station
+ {0x1F68A, 0x1F68B, prEmojiPresentation}, // E1.0 [2] (🚊..🚋) tram..tram car
+ {0x1F68C, 0x1F68C, prEmojiPresentation}, // E0.6 [1] (🚌) bus
+ {0x1F68D, 0x1F68D, prEmojiPresentation}, // E0.7 [1] (🚍) oncoming bus
+ {0x1F68E, 0x1F68E, prEmojiPresentation}, // E1.0 [1] (🚎) trolleybus
+ {0x1F68F, 0x1F68F, prEmojiPresentation}, // E0.6 [1] (🚏) bus stop
+ {0x1F690, 0x1F690, prEmojiPresentation}, // E1.0 [1] (🚐) minibus
+ {0x1F691, 0x1F693, prEmojiPresentation}, // E0.6 [3] (🚑..🚓) ambulance..police car
+ {0x1F694, 0x1F694, prEmojiPresentation}, // E0.7 [1] (🚔) oncoming police car
+ {0x1F695, 0x1F695, prEmojiPresentation}, // E0.6 [1] (🚕) taxi
+ {0x1F696, 0x1F696, prEmojiPresentation}, // E1.0 [1] (🚖) oncoming taxi
+ {0x1F697, 0x1F697, prEmojiPresentation}, // E0.6 [1] (🚗) automobile
+ {0x1F698, 0x1F698, prEmojiPresentation}, // E0.7 [1] (🚘) oncoming automobile
+ {0x1F699, 0x1F69A, prEmojiPresentation}, // E0.6 [2] (🚙..🚚) sport utility vehicle..delivery truck
+ {0x1F69B, 0x1F6A1, prEmojiPresentation}, // E1.0 [7] (🚛..🚡) articulated lorry..aerial tramway
+ {0x1F6A2, 0x1F6A2, prEmojiPresentation}, // E0.6 [1] (🚢) ship
+ {0x1F6A3, 0x1F6A3, prEmojiPresentation}, // E1.0 [1] (🚣) person rowing boat
+ {0x1F6A4, 0x1F6A5, prEmojiPresentation}, // E0.6 [2] (🚤..🚥) speedboat..horizontal traffic light
+ {0x1F6A6, 0x1F6A6, prEmojiPresentation}, // E1.0 [1] (🚦) vertical traffic light
+ {0x1F6A7, 0x1F6AD, prEmojiPresentation}, // E0.6 [7] (🚧..🚭) construction..no smoking
+ {0x1F6AE, 0x1F6B1, prEmojiPresentation}, // E1.0 [4] (🚮..🚱) litter in bin sign..non-potable water
+ {0x1F6B2, 0x1F6B2, prEmojiPresentation}, // E0.6 [1] (🚲) bicycle
+ {0x1F6B3, 0x1F6B5, prEmojiPresentation}, // E1.0 [3] (🚳..🚵) no bicycles..person mountain biking
+ {0x1F6B6, 0x1F6B6, prEmojiPresentation}, // E0.6 [1] (🚶) person walking
+ {0x1F6B7, 0x1F6B8, prEmojiPresentation}, // E1.0 [2] (🚷..🚸) no pedestrians..children crossing
+ {0x1F6B9, 0x1F6BE, prEmojiPresentation}, // E0.6 [6] (🚹..🚾) men’s room..water closet
+ {0x1F6BF, 0x1F6BF, prEmojiPresentation}, // E1.0 [1] (🚿) shower
+ {0x1F6C0, 0x1F6C0, prEmojiPresentation}, // E0.6 [1] (🛀) person taking bath
+ {0x1F6C1, 0x1F6C5, prEmojiPresentation}, // E1.0 [5] (🛁..🛅) bathtub..left luggage
+ {0x1F6CC, 0x1F6CC, prEmojiPresentation}, // E1.0 [1] (🛌) person in bed
+ {0x1F6D0, 0x1F6D0, prEmojiPresentation}, // E1.0 [1] (🛐) place of worship
+ {0x1F6D1, 0x1F6D2, prEmojiPresentation}, // E3.0 [2] (🛑..🛒) stop sign..shopping cart
+ {0x1F6D5, 0x1F6D5, prEmojiPresentation}, // E12.0 [1] (🛕) hindu temple
+ {0x1F6D6, 0x1F6D7, prEmojiPresentation}, // E13.0 [2] (🛖..🛗) hut..elevator
+ {0x1F6DC, 0x1F6DC, prEmojiPresentation}, // E15.0 [1] (🛜) wireless
+ {0x1F6DD, 0x1F6DF, prEmojiPresentation}, // E14.0 [3] (🛝..🛟) playground slide..ring buoy
+ {0x1F6EB, 0x1F6EC, prEmojiPresentation}, // E1.0 [2] (🛫..🛬) airplane departure..airplane arrival
+ {0x1F6F4, 0x1F6F6, prEmojiPresentation}, // E3.0 [3] (🛴..🛶) kick scooter..canoe
+ {0x1F6F7, 0x1F6F8, prEmojiPresentation}, // E5.0 [2] (🛷..🛸) sled..flying saucer
+ {0x1F6F9, 0x1F6F9, prEmojiPresentation}, // E11.0 [1] (🛹) skateboard
+ {0x1F6FA, 0x1F6FA, prEmojiPresentation}, // E12.0 [1] (🛺) auto rickshaw
+ {0x1F6FB, 0x1F6FC, prEmojiPresentation}, // E13.0 [2] (🛻..🛼) pickup truck..roller skate
+ {0x1F7E0, 0x1F7EB, prEmojiPresentation}, // E12.0 [12] (🟠..🟫) orange circle..brown square
+ {0x1F7F0, 0x1F7F0, prEmojiPresentation}, // E14.0 [1] (🟰) heavy equals sign
+ {0x1F90C, 0x1F90C, prEmojiPresentation}, // E13.0 [1] (🤌) pinched fingers
+ {0x1F90D, 0x1F90F, prEmojiPresentation}, // E12.0 [3] (🤍..🤏) white heart..pinching hand
+ {0x1F910, 0x1F918, prEmojiPresentation}, // E1.0 [9] (🤐..🤘) zipper-mouth face..sign of the horns
+ {0x1F919, 0x1F91E, prEmojiPresentation}, // E3.0 [6] (🤙..🤞) call me hand..crossed fingers
+ {0x1F91F, 0x1F91F, prEmojiPresentation}, // E5.0 [1] (🤟) love-you gesture
+ {0x1F920, 0x1F927, prEmojiPresentation}, // E3.0 [8] (🤠..🤧) cowboy hat face..sneezing face
+ {0x1F928, 0x1F92F, prEmojiPresentation}, // E5.0 [8] (🤨..🤯) face with raised eyebrow..exploding head
+ {0x1F930, 0x1F930, prEmojiPresentation}, // E3.0 [1] (🤰) pregnant woman
+ {0x1F931, 0x1F932, prEmojiPresentation}, // E5.0 [2] (🤱..🤲) breast-feeding..palms up together
+ {0x1F933, 0x1F93A, prEmojiPresentation}, // E3.0 [8] (🤳..🤺) selfie..person fencing
+ {0x1F93C, 0x1F93E, prEmojiPresentation}, // E3.0 [3] (🤼..🤾) people wrestling..person playing handball
+ {0x1F93F, 0x1F93F, prEmojiPresentation}, // E12.0 [1] (🤿) diving mask
+ {0x1F940, 0x1F945, prEmojiPresentation}, // E3.0 [6] (🥀..🥅) wilted flower..goal net
+ {0x1F947, 0x1F94B, prEmojiPresentation}, // E3.0 [5] (🥇..🥋) 1st place medal..martial arts uniform
+ {0x1F94C, 0x1F94C, prEmojiPresentation}, // E5.0 [1] (🥌) curling stone
+ {0x1F94D, 0x1F94F, prEmojiPresentation}, // E11.0 [3] (🥍..🥏) lacrosse..flying disc
+ {0x1F950, 0x1F95E, prEmojiPresentation}, // E3.0 [15] (🥐..🥞) croissant..pancakes
+ {0x1F95F, 0x1F96B, prEmojiPresentation}, // E5.0 [13] (🥟..🥫) dumpling..canned food
+ {0x1F96C, 0x1F970, prEmojiPresentation}, // E11.0 [5] (🥬..🥰) leafy green..smiling face with hearts
+ {0x1F971, 0x1F971, prEmojiPresentation}, // E12.0 [1] (🥱) yawning face
+ {0x1F972, 0x1F972, prEmojiPresentation}, // E13.0 [1] (🥲) smiling face with tear
+ {0x1F973, 0x1F976, prEmojiPresentation}, // E11.0 [4] (🥳..🥶) partying face..cold face
+ {0x1F977, 0x1F978, prEmojiPresentation}, // E13.0 [2] (🥷..🥸) ninja..disguised face
+ {0x1F979, 0x1F979, prEmojiPresentation}, // E14.0 [1] (🥹) face holding back tears
+ {0x1F97A, 0x1F97A, prEmojiPresentation}, // E11.0 [1] (🥺) pleading face
+ {0x1F97B, 0x1F97B, prEmojiPresentation}, // E12.0 [1] (🥻) sari
+ {0x1F97C, 0x1F97F, prEmojiPresentation}, // E11.0 [4] (🥼..🥿) lab coat..flat shoe
+ {0x1F980, 0x1F984, prEmojiPresentation}, // E1.0 [5] (🦀..🦄) crab..unicorn
+ {0x1F985, 0x1F991, prEmojiPresentation}, // E3.0 [13] (🦅..🦑) eagle..squid
+ {0x1F992, 0x1F997, prEmojiPresentation}, // E5.0 [6] (🦒..🦗) giraffe..cricket
+ {0x1F998, 0x1F9A2, prEmojiPresentation}, // E11.0 [11] (🦘..🦢) kangaroo..swan
+ {0x1F9A3, 0x1F9A4, prEmojiPresentation}, // E13.0 [2] (🦣..🦤) mammoth..dodo
+ {0x1F9A5, 0x1F9AA, prEmojiPresentation}, // E12.0 [6] (🦥..🦪) sloth..oyster
+ {0x1F9AB, 0x1F9AD, prEmojiPresentation}, // E13.0 [3] (🦫..🦭) beaver..seal
+ {0x1F9AE, 0x1F9AF, prEmojiPresentation}, // E12.0 [2] (🦮..🦯) guide dog..white cane
+ {0x1F9B0, 0x1F9B9, prEmojiPresentation}, // E11.0 [10] (🦰..🦹) red hair..supervillain
+ {0x1F9BA, 0x1F9BF, prEmojiPresentation}, // E12.0 [6] (🦺..🦿) safety vest..mechanical leg
+ {0x1F9C0, 0x1F9C0, prEmojiPresentation}, // E1.0 [1] (🧀) cheese wedge
+ {0x1F9C1, 0x1F9C2, prEmojiPresentation}, // E11.0 [2] (🧁..🧂) cupcake..salt
+ {0x1F9C3, 0x1F9CA, prEmojiPresentation}, // E12.0 [8] (🧃..🧊) beverage box..ice
+ {0x1F9CB, 0x1F9CB, prEmojiPresentation}, // E13.0 [1] (🧋) bubble tea
+ {0x1F9CC, 0x1F9CC, prEmojiPresentation}, // E14.0 [1] (🧌) troll
+ {0x1F9CD, 0x1F9CF, prEmojiPresentation}, // E12.0 [3] (🧍..🧏) person standing..deaf person
+ {0x1F9D0, 0x1F9E6, prEmojiPresentation}, // E5.0 [23] (🧐..🧦) face with monocle..socks
+ {0x1F9E7, 0x1F9FF, prEmojiPresentation}, // E11.0 [25] (🧧..🧿) red envelope..nazar amulet
+ {0x1FA70, 0x1FA73, prEmojiPresentation}, // E12.0 [4] (🩰..🩳) ballet shoes..shorts
+ {0x1FA74, 0x1FA74, prEmojiPresentation}, // E13.0 [1] (🩴) thong sandal
+ {0x1FA75, 0x1FA77, prEmojiPresentation}, // E15.0 [3] (🩵..🩷) light blue heart..pink heart
+ {0x1FA78, 0x1FA7A, prEmojiPresentation}, // E12.0 [3] (🩸..🩺) drop of blood..stethoscope
+ {0x1FA7B, 0x1FA7C, prEmojiPresentation}, // E14.0 [2] (🩻..🩼) x-ray..crutch
+ {0x1FA80, 0x1FA82, prEmojiPresentation}, // E12.0 [3] (🪀..🪂) yo-yo..parachute
+ {0x1FA83, 0x1FA86, prEmojiPresentation}, // E13.0 [4] (🪃..🪆) boomerang..nesting dolls
+ {0x1FA87, 0x1FA88, prEmojiPresentation}, // E15.0 [2] (🪇..🪈) maracas..flute
+ {0x1FA90, 0x1FA95, prEmojiPresentation}, // E12.0 [6] (🪐..🪕) ringed planet..banjo
+ {0x1FA96, 0x1FAA8, prEmojiPresentation}, // E13.0 [19] (🪖..🪨) military helmet..rock
+ {0x1FAA9, 0x1FAAC, prEmojiPresentation}, // E14.0 [4] (🪩..🪬) mirror ball..hamsa
+ {0x1FAAD, 0x1FAAF, prEmojiPresentation}, // E15.0 [3] (🪭..🪯) folding hand fan..khanda
+ {0x1FAB0, 0x1FAB6, prEmojiPresentation}, // E13.0 [7] (🪰..🪶) fly..feather
+ {0x1FAB7, 0x1FABA, prEmojiPresentation}, // E14.0 [4] (🪷..🪺) lotus..nest with eggs
+ {0x1FABB, 0x1FABD, prEmojiPresentation}, // E15.0 [3] (🪻..🪽) hyacinth..wing
+ {0x1FABF, 0x1FABF, prEmojiPresentation}, // E15.0 [1] (🪿) goose
+ {0x1FAC0, 0x1FAC2, prEmojiPresentation}, // E13.0 [3] (🫀..🫂) anatomical heart..people hugging
+ {0x1FAC3, 0x1FAC5, prEmojiPresentation}, // E14.0 [3] (🫃..🫅) pregnant man..person with crown
+ {0x1FACE, 0x1FACF, prEmojiPresentation}, // E15.0 [2] (🫎..🫏) moose..donkey
+ {0x1FAD0, 0x1FAD6, prEmojiPresentation}, // E13.0 [7] (🫐..🫖) blueberries..teapot
+ {0x1FAD7, 0x1FAD9, prEmojiPresentation}, // E14.0 [3] (🫗..🫙) pouring liquid..jar
+ {0x1FADA, 0x1FADB, prEmojiPresentation}, // E15.0 [2] (🫚..🫛) ginger root..pea pod
+ {0x1FAE0, 0x1FAE7, prEmojiPresentation}, // E14.0 [8] (🫠..🫧) melting face..bubbles
+ {0x1FAE8, 0x1FAE8, prEmojiPresentation}, // E15.0 [1] (🫨) shaking face
+ {0x1FAF0, 0x1FAF6, prEmojiPresentation}, // E14.0 [7] (🫰..🫶) hand with index finger and thumb crossed..heart hands
+ {0x1FAF7, 0x1FAF8, prEmojiPresentation}, // E15.0 [2] (🫷..🫸) leftwards pushing hand..rightwards pushing hand
+}
diff --git a/vendor/github.com/rivo/uniseg/gen_breaktest.go b/vendor/github.com/rivo/uniseg/gen_breaktest.go
new file mode 100644
index 000000000..6bfbeb5e7
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/gen_breaktest.go
@@ -0,0 +1,215 @@
+//go:build generate
+
+// This program generates a Go containing a slice of test cases based on the
+// Unicode Character Database auxiliary data files. The command line arguments
+// are as follows:
+//
+// 1. The name of the Unicode data file (just the filename, without extension).
+// 2. The name of the locally generated Go file.
+// 3. The name of the slice containing the test cases.
+// 4. The name of the generator, for logging purposes.
+//
+//go:generate go run gen_breaktest.go GraphemeBreakTest graphemebreak_test.go graphemeBreakTestCases graphemes
+//go:generate go run gen_breaktest.go WordBreakTest wordbreak_test.go wordBreakTestCases words
+//go:generate go run gen_breaktest.go SentenceBreakTest sentencebreak_test.go sentenceBreakTestCases sentences
+//go:generate go run gen_breaktest.go LineBreakTest linebreak_test.go lineBreakTestCases lines
+
+package main
+
+import (
+ "bufio"
+ "bytes"
+ "errors"
+ "fmt"
+ "go/format"
+ "io/ioutil"
+ "log"
+ "net/http"
+ "os"
+ "time"
+)
+
+// We want to test against a specific version rather than the latest. When the
+// package is upgraded to a new version, change these to generate new tests.
+const (
+ testCaseURL = `https://www.unicode.org/Public/15.0.0/ucd/auxiliary/%s.txt`
+)
+
+func main() {
+ if len(os.Args) < 5 {
+ fmt.Println("Not enough arguments, see code for details")
+ os.Exit(1)
+ }
+
+ log.SetPrefix("gen_breaktest (" + os.Args[4] + "): ")
+ log.SetFlags(0)
+
+ // Read text of testcases and parse into Go source code.
+ src, err := parse(fmt.Sprintf(testCaseURL, os.Args[1]))
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ // Format the Go code.
+ formatted, err := format.Source(src)
+ if err != nil {
+ log.Fatalln("gofmt:", err)
+ }
+
+ // Write it out.
+ log.Print("Writing to ", os.Args[2])
+ if err := ioutil.WriteFile(os.Args[2], formatted, 0644); err != nil {
+ log.Fatal(err)
+ }
+}
+
+// parse reads a break text file, either from a local file or from a URL. It
+// parses the file data into Go source code representing the test cases.
+func parse(url string) ([]byte, error) {
+ log.Printf("Parsing %s", url)
+ res, err := http.Get(url)
+ if err != nil {
+ return nil, err
+ }
+ body := res.Body
+ defer body.Close()
+
+ buf := new(bytes.Buffer)
+ buf.Grow(120 << 10)
+ buf.WriteString(`// Code generated via go generate from gen_breaktest.go. DO NOT EDIT.
+
+package uniseg
+
+// ` + os.Args[3] + ` are Grapheme testcases taken from
+// ` + url + `
+// on ` + time.Now().Format("January 2, 2006") + `. See
+// https://www.unicode.org/license.html for the Unicode license agreement.
+var ` + os.Args[3] + ` = []testCase {
+`)
+
+ sc := bufio.NewScanner(body)
+ num := 1
+ var line []byte
+ original := make([]byte, 0, 64)
+ expected := make([]byte, 0, 64)
+ for sc.Scan() {
+ num++
+ line = sc.Bytes()
+ if len(line) == 0 || line[0] == '#' {
+ continue
+ }
+ var comment []byte
+ if i := bytes.IndexByte(line, '#'); i >= 0 {
+ comment = bytes.TrimSpace(line[i+1:])
+ line = bytes.TrimSpace(line[:i])
+ }
+ original, expected, err := parseRuneSequence(line, original[:0], expected[:0])
+ if err != nil {
+ return nil, fmt.Errorf(`line %d: %v: %q`, num, err, line)
+ }
+ fmt.Fprintf(buf, "\t{original: \"%s\", expected: %s}, // %s\n", original, expected, comment)
+ }
+ if err := sc.Err(); err != nil {
+ return nil, err
+ }
+
+ // Check for final "# EOF", useful check if we're streaming via HTTP
+ if !bytes.Equal(line, []byte("# EOF")) {
+ return nil, fmt.Errorf(`line %d: exected "# EOF" as final line, got %q`, num, line)
+ }
+ buf.WriteString("}\n")
+ return buf.Bytes(), nil
+}
+
+// Used by parseRuneSequence to match input via bytes.HasPrefix.
+var (
+ prefixBreak = []byte("÷ ")
+ prefixDontBreak = []byte("× ")
+ breakOk = []byte("÷")
+ breakNo = []byte("×")
+)
+
+// parseRuneSequence parses a rune + breaking opportunity sequence from b
+// and appends the Go code for testcase.original to orig
+// and appends the Go code for testcase.expected to exp.
+// It retuns the new orig and exp slices.
+//
+// E.g. for the input b="÷ 0020 × 0308 ÷ 1F1E6 ÷"
+// it will append
+//
+// "\u0020\u0308\U0001F1E6"
+//
+// and "[][]rune{{0x0020,0x0308},{0x1F1E6},}"
+// to orig and exp respectively.
+//
+// The formatting of exp is expected to be cleaned up by gofmt or format.Source.
+// Note we explicitly require the sequence to start with ÷ and we implicitly
+// require it to end with ÷.
+func parseRuneSequence(b, orig, exp []byte) ([]byte, []byte, error) {
+ // Check for and remove first ÷ or ×.
+ if !bytes.HasPrefix(b, prefixBreak) && !bytes.HasPrefix(b, prefixDontBreak) {
+ return nil, nil, errors.New("expected ÷ or × as first character")
+ }
+ if bytes.HasPrefix(b, prefixBreak) {
+ b = b[len(prefixBreak):]
+ } else {
+ b = b[len(prefixDontBreak):]
+ }
+
+ boundary := true
+ exp = append(exp, "[][]rune{"...)
+ for len(b) > 0 {
+ if boundary {
+ exp = append(exp, '{')
+ }
+ exp = append(exp, "0x"...)
+ // Find end of hex digits.
+ var i int
+ for i = 0; i < len(b) && b[i] != ' '; i++ {
+ if d := b[i]; ('0' <= d || d <= '9') ||
+ ('A' <= d || d <= 'F') ||
+ ('a' <= d || d <= 'f') {
+ continue
+ }
+ return nil, nil, errors.New("bad hex digit")
+ }
+ switch i {
+ case 4:
+ orig = append(orig, "\\u"...)
+ case 5:
+ orig = append(orig, "\\U000"...)
+ default:
+ return nil, nil, errors.New("unsupport code point hex length")
+ }
+ orig = append(orig, b[:i]...)
+ exp = append(exp, b[:i]...)
+ b = b[i:]
+
+ // Check for space between hex and ÷ or ×.
+ if len(b) < 1 || b[0] != ' ' {
+ return nil, nil, errors.New("bad input")
+ }
+ b = b[1:]
+
+ // Check for next boundary.
+ switch {
+ case bytes.HasPrefix(b, breakOk):
+ boundary = true
+ b = b[len(breakOk):]
+ case bytes.HasPrefix(b, breakNo):
+ boundary = false
+ b = b[len(breakNo):]
+ default:
+ return nil, nil, errors.New("missing ÷ or ×")
+ }
+ if boundary {
+ exp = append(exp, '}')
+ }
+ exp = append(exp, ',')
+ if len(b) > 0 && b[0] == ' ' {
+ b = b[1:]
+ }
+ }
+ exp = append(exp, '}')
+ return orig, exp, nil
+}
diff --git a/vendor/github.com/rivo/uniseg/gen_properties.go b/vendor/github.com/rivo/uniseg/gen_properties.go
new file mode 100644
index 000000000..8992d2c5f
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/gen_properties.go
@@ -0,0 +1,261 @@
+//go:build generate
+
+// This program generates a property file in Go file from Unicode Character
+// Database auxiliary data files. The command line arguments are as follows:
+//
+// 1. The name of the Unicode data file (just the filename, without extension).
+// Can be "-" (to skip) if the emoji flag is included.
+// 2. The name of the locally generated Go file.
+// 3. The name of the slice mapping code points to properties.
+// 4. The name of the generator, for logging purposes.
+// 5. (Optional) Flags, comma-separated. The following flags are available:
+// - "emojis=": include the specified emoji properties (e.g.
+// "Extended_Pictographic").
+// - "gencat": include general category properties.
+//
+//go:generate go run gen_properties.go auxiliary/GraphemeBreakProperty graphemeproperties.go graphemeCodePoints graphemes emojis=Extended_Pictographic
+//go:generate go run gen_properties.go auxiliary/WordBreakProperty wordproperties.go workBreakCodePoints words emojis=Extended_Pictographic
+//go:generate go run gen_properties.go auxiliary/SentenceBreakProperty sentenceproperties.go sentenceBreakCodePoints sentences
+//go:generate go run gen_properties.go LineBreak lineproperties.go lineBreakCodePoints lines gencat
+//go:generate go run gen_properties.go EastAsianWidth eastasianwidth.go eastAsianWidth eastasianwidth
+//go:generate go run gen_properties.go - emojipresentation.go emojiPresentation emojipresentation emojis=Emoji_Presentation
+package main
+
+import (
+ "bufio"
+ "bytes"
+ "errors"
+ "fmt"
+ "go/format"
+ "io/ioutil"
+ "log"
+ "net/http"
+ "os"
+ "regexp"
+ "sort"
+ "strconv"
+ "strings"
+ "time"
+)
+
+// We want to test against a specific version rather than the latest. When the
+// package is upgraded to a new version, change these to generate new tests.
+const (
+ propertyURL = `https://www.unicode.org/Public/15.0.0/ucd/%s.txt`
+ emojiURL = `https://unicode.org/Public/15.0.0/ucd/emoji/emoji-data.txt`
+)
+
+// The regular expression for a line containing a code point range property.
+var propertyPattern = regexp.MustCompile(`^([0-9A-F]{4,6})(\.\.([0-9A-F]{4,6}))?\s*;\s*([A-Za-z0-9_]+)\s*#\s(.+)$`)
+
+func main() {
+ if len(os.Args) < 5 {
+ fmt.Println("Not enough arguments, see code for details")
+ os.Exit(1)
+ }
+
+ log.SetPrefix("gen_properties (" + os.Args[4] + "): ")
+ log.SetFlags(0)
+
+ // Parse flags.
+ flags := make(map[string]string)
+ if len(os.Args) >= 6 {
+ for _, flag := range strings.Split(os.Args[5], ",") {
+ flagFields := strings.Split(flag, "=")
+ if len(flagFields) == 1 {
+ flags[flagFields[0]] = "yes"
+ } else {
+ flags[flagFields[0]] = flagFields[1]
+ }
+ }
+ }
+
+ // Parse the text file and generate Go source code from it.
+ _, includeGeneralCategory := flags["gencat"]
+ var mainURL string
+ if os.Args[1] != "-" {
+ mainURL = fmt.Sprintf(propertyURL, os.Args[1])
+ }
+ src, err := parse(mainURL, flags["emojis"], includeGeneralCategory)
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ // Format the Go code.
+ formatted, err := format.Source([]byte(src))
+ if err != nil {
+ log.Fatal("gofmt:", err)
+ }
+
+ // Save it to the (local) target file.
+ log.Print("Writing to ", os.Args[2])
+ if err := ioutil.WriteFile(os.Args[2], formatted, 0644); err != nil {
+ log.Fatal(err)
+ }
+}
+
+// parse parses the Unicode Properties text files located at the given URLs and
+// returns their equivalent Go source code to be used in the uniseg package. If
+// "emojiProperty" is not an empty string, emoji code points for that emoji
+// property (e.g. "Extended_Pictographic") will be included. In those cases, you
+// may pass an empty "propertyURL" to skip parsing the main properties file. If
+// "includeGeneralCategory" is true, the Unicode General Category property will
+// be extracted from the comments and included in the output.
+func parse(propertyURL, emojiProperty string, includeGeneralCategory bool) (string, error) {
+ if propertyURL == "" && emojiProperty == "" {
+ return "", errors.New("no properties to parse")
+ }
+
+ // Temporary buffer to hold properties.
+ var properties [][4]string
+
+ // Open the first URL.
+ if propertyURL != "" {
+ log.Printf("Parsing %s", propertyURL)
+ res, err := http.Get(propertyURL)
+ if err != nil {
+ return "", err
+ }
+ in1 := res.Body
+ defer in1.Close()
+
+ // Parse it.
+ scanner := bufio.NewScanner(in1)
+ num := 0
+ for scanner.Scan() {
+ num++
+ line := strings.TrimSpace(scanner.Text())
+
+ // Skip comments and empty lines.
+ if strings.HasPrefix(line, "#") || line == "" {
+ continue
+ }
+
+ // Everything else must be a code point range, a property and a comment.
+ from, to, property, comment, err := parseProperty(line)
+ if err != nil {
+ return "", fmt.Errorf("%s line %d: %v", os.Args[4], num, err)
+ }
+ properties = append(properties, [4]string{from, to, property, comment})
+ }
+ if err := scanner.Err(); err != nil {
+ return "", err
+ }
+ }
+
+ // Open the second URL.
+ if emojiProperty != "" {
+ log.Printf("Parsing %s", emojiURL)
+ res, err := http.Get(emojiURL)
+ if err != nil {
+ return "", err
+ }
+ in2 := res.Body
+ defer in2.Close()
+
+ // Parse it.
+ scanner := bufio.NewScanner(in2)
+ num := 0
+ for scanner.Scan() {
+ num++
+ line := scanner.Text()
+
+ // Skip comments, empty lines, and everything not containing
+ // "Extended_Pictographic".
+ if strings.HasPrefix(line, "#") || line == "" || !strings.Contains(line, emojiProperty) {
+ continue
+ }
+
+ // Everything else must be a code point range, a property and a comment.
+ from, to, property, comment, err := parseProperty(line)
+ if err != nil {
+ return "", fmt.Errorf("emojis line %d: %v", num, err)
+ }
+ properties = append(properties, [4]string{from, to, property, comment})
+ }
+ if err := scanner.Err(); err != nil {
+ return "", err
+ }
+ }
+
+ // Avoid overflow during binary search.
+ if len(properties) >= 1<<31 {
+ return "", errors.New("too many properties")
+ }
+
+ // Sort properties.
+ sort.Slice(properties, func(i, j int) bool {
+ left, _ := strconv.ParseUint(properties[i][0], 16, 64)
+ right, _ := strconv.ParseUint(properties[j][0], 16, 64)
+ return left < right
+ })
+
+ // Header.
+ var (
+ buf bytes.Buffer
+ emojiComment string
+ )
+ columns := 3
+ if includeGeneralCategory {
+ columns = 4
+ }
+ if emojiURL != "" {
+ emojiComment = `
+// and
+// ` + emojiURL + `
+// ("Extended_Pictographic" only)`
+ }
+ buf.WriteString(`// Code generated via go generate from gen_properties.go. DO NOT EDIT.
+
+package uniseg
+
+// ` + os.Args[3] + ` are taken from
+// ` + propertyURL + emojiComment + `
+// on ` + time.Now().Format("January 2, 2006") + `. See https://www.unicode.org/license.html for the Unicode
+// license agreement.
+var ` + os.Args[3] + ` = [][` + strconv.Itoa(columns) + `]int{
+ `)
+
+ // Properties.
+ for _, prop := range properties {
+ if includeGeneralCategory {
+ generalCategory := "gc" + prop[3][:2]
+ if generalCategory == "gcL&" {
+ generalCategory = "gcLC"
+ }
+ prop[3] = prop[3][3:]
+ fmt.Fprintf(&buf, "{0x%s,0x%s,%s,%s}, // %s\n", prop[0], prop[1], translateProperty("pr", prop[2]), generalCategory, prop[3])
+ } else {
+ fmt.Fprintf(&buf, "{0x%s,0x%s,%s}, // %s\n", prop[0], prop[1], translateProperty("pr", prop[2]), prop[3])
+ }
+ }
+
+ // Tail.
+ buf.WriteString("}")
+
+ return buf.String(), nil
+}
+
+// parseProperty parses a line of the Unicode properties text file containing a
+// property for a code point range and returns it along with its comment.
+func parseProperty(line string) (from, to, property, comment string, err error) {
+ fields := propertyPattern.FindStringSubmatch(line)
+ if fields == nil {
+ err = errors.New("no property found")
+ return
+ }
+ from = fields[1]
+ to = fields[3]
+ if to == "" {
+ to = from
+ }
+ property = fields[4]
+ comment = fields[5]
+ return
+}
+
+// translateProperty translates a property name as used in the Unicode data file
+// to a variable used in the Go code.
+func translateProperty(prefix, property string) string {
+ return prefix + strings.ReplaceAll(property, "_", "")
+}
diff --git a/vendor/github.com/rivo/uniseg/grapheme.go b/vendor/github.com/rivo/uniseg/grapheme.go
new file mode 100644
index 000000000..b12403d43
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/grapheme.go
@@ -0,0 +1,331 @@
+package uniseg
+
+import "unicode/utf8"
+
+// Graphemes implements an iterator over Unicode grapheme clusters, or
+// user-perceived characters. While iterating, it also provides information
+// about word boundaries, sentence boundaries, line breaks, and monospace
+// character widths.
+//
+// After constructing the class via [NewGraphemes] for a given string "str",
+// [Graphemes.Next] is called for every grapheme cluster in a loop until it
+// returns false. Inside the loop, information about the grapheme cluster as
+// well as boundary information and character width is available via the various
+// methods (see examples below).
+//
+// This class basically wraps the [StepString] parser and provides a convenient
+// interface to it. If you are only interested in some parts of this package's
+// functionality, using the specialized functions starting with "First" is
+// almost always faster.
+type Graphemes struct {
+ // The original string.
+ original string
+
+ // The remaining string to be parsed.
+ remaining string
+
+ // The current grapheme cluster.
+ cluster string
+
+ // The byte offset of the current grapheme cluster relative to the original
+ // string.
+ offset int
+
+ // The current boundary information of the [Step] parser.
+ boundaries int
+
+ // The current state of the [Step] parser.
+ state int
+}
+
+// NewGraphemes returns a new grapheme cluster iterator.
+func NewGraphemes(str string) *Graphemes {
+ return &Graphemes{
+ original: str,
+ remaining: str,
+ state: -1,
+ }
+}
+
+// Next advances the iterator by one grapheme cluster and returns false if no
+// clusters are left. This function must be called before the first cluster is
+// accessed.
+func (g *Graphemes) Next() bool {
+ if len(g.remaining) == 0 {
+ // We're already past the end.
+ g.state = -2
+ g.cluster = ""
+ return false
+ }
+ g.offset += len(g.cluster)
+ g.cluster, g.remaining, g.boundaries, g.state = StepString(g.remaining, g.state)
+ return true
+}
+
+// Runes returns a slice of runes (code points) which corresponds to the current
+// grapheme cluster. If the iterator is already past the end or [Graphemes.Next]
+// has not yet been called, nil is returned.
+func (g *Graphemes) Runes() []rune {
+ if g.state < 0 {
+ return nil
+ }
+ return []rune(g.cluster)
+}
+
+// Str returns a substring of the original string which corresponds to the
+// current grapheme cluster. If the iterator is already past the end or
+// [Graphemes.Next] has not yet been called, an empty string is returned.
+func (g *Graphemes) Str() string {
+ return g.cluster
+}
+
+// Bytes returns a byte slice which corresponds to the current grapheme cluster.
+// If the iterator is already past the end or [Graphemes.Next] has not yet been
+// called, nil is returned.
+func (g *Graphemes) Bytes() []byte {
+ if g.state < 0 {
+ return nil
+ }
+ return []byte(g.cluster)
+}
+
+// Positions returns the interval of the current grapheme cluster as byte
+// positions into the original string. The first returned value "from" indexes
+// the first byte and the second returned value "to" indexes the first byte that
+// is not included anymore, i.e. str[from:to] is the current grapheme cluster of
+// the original string "str". If [Graphemes.Next] has not yet been called, both
+// values are 0. If the iterator is already past the end, both values are 1.
+func (g *Graphemes) Positions() (int, int) {
+ if g.state == -1 {
+ return 0, 0
+ } else if g.state == -2 {
+ return 1, 1
+ }
+ return g.offset, g.offset + len(g.cluster)
+}
+
+// IsWordBoundary returns true if a word ends after the current grapheme
+// cluster.
+func (g *Graphemes) IsWordBoundary() bool {
+ if g.state < 0 {
+ return true
+ }
+ return g.boundaries&MaskWord != 0
+}
+
+// IsSentenceBoundary returns true if a sentence ends after the current
+// grapheme cluster.
+func (g *Graphemes) IsSentenceBoundary() bool {
+ if g.state < 0 {
+ return true
+ }
+ return g.boundaries&MaskSentence != 0
+}
+
+// LineBreak returns whether the line can be broken after the current grapheme
+// cluster. A value of [LineDontBreak] means the line may not be broken, a value
+// of [LineMustBreak] means the line must be broken, and a value of
+// [LineCanBreak] means the line may or may not be broken.
+func (g *Graphemes) LineBreak() int {
+ if g.state == -1 {
+ return LineDontBreak
+ }
+ if g.state == -2 {
+ return LineMustBreak
+ }
+ return g.boundaries & MaskLine
+}
+
+// Width returns the monospace width of the current grapheme cluster.
+func (g *Graphemes) Width() int {
+ if g.state < 0 {
+ return 0
+ }
+ return g.boundaries >> ShiftWidth
+}
+
+// Reset puts the iterator into its initial state such that the next call to
+// [Graphemes.Next] sets it to the first grapheme cluster again.
+func (g *Graphemes) Reset() {
+ g.state = -1
+ g.offset = 0
+ g.cluster = ""
+ g.remaining = g.original
+}
+
+// GraphemeClusterCount returns the number of user-perceived characters
+// (grapheme clusters) for the given string.
+func GraphemeClusterCount(s string) (n int) {
+ state := -1
+ for len(s) > 0 {
+ _, s, _, state = FirstGraphemeClusterInString(s, state)
+ n++
+ }
+ return
+}
+
+// ReverseString reverses the given string while observing grapheme cluster
+// boundaries.
+func ReverseString(s string) string {
+ str := []byte(s)
+ reversed := make([]byte, len(str))
+ state := -1
+ index := len(str)
+ for len(str) > 0 {
+ var cluster []byte
+ cluster, str, _, state = FirstGraphemeCluster(str, state)
+ index -= len(cluster)
+ copy(reversed[index:], cluster)
+ if index <= len(str)/2 {
+ break
+ }
+ }
+ return string(reversed)
+}
+
+// The number of bits the grapheme property must be shifted to make place for
+// grapheme states.
+const shiftGraphemePropState = 4
+
+// FirstGraphemeCluster returns the first grapheme cluster found in the given
+// byte slice according to the rules of [Unicode Standard Annex #29, Grapheme
+// Cluster Boundaries]. This function can be called continuously to extract all
+// grapheme clusters from a byte slice, as illustrated in the example below.
+//
+// If you don't know the current state, for example when calling the function
+// for the first time, you must pass -1. For consecutive calls, pass the state
+// and rest slice returned by the previous call.
+//
+// The "rest" slice is the sub-slice of the original byte slice "b" starting
+// after the last byte of the identified grapheme cluster. If the length of the
+// "rest" slice is 0, the entire byte slice "b" has been processed. The
+// "cluster" byte slice is the sub-slice of the input slice containing the
+// identified grapheme cluster.
+//
+// The returned width is the width of the grapheme cluster for most monospace
+// fonts where a value of 1 represents one character cell.
+//
+// Given an empty byte slice "b", the function returns nil values.
+//
+// While slightly less convenient than using the Graphemes class, this function
+// has much better performance and makes no allocations. It lends itself well to
+// large byte slices.
+//
+// [Unicode Standard Annex #29, Grapheme Cluster Boundaries]: http://unicode.org/reports/tr29/#Grapheme_Cluster_Boundaries
+func FirstGraphemeCluster(b []byte, state int) (cluster, rest []byte, width, newState int) {
+ // An empty byte slice returns nothing.
+ if len(b) == 0 {
+ return
+ }
+
+ // Extract the first rune.
+ r, length := utf8.DecodeRune(b)
+ if len(b) <= length { // If we're already past the end, there is nothing else to parse.
+ var prop int
+ if state < 0 {
+ prop = propertyGraphemes(r)
+ } else {
+ prop = state >> shiftGraphemePropState
+ }
+ return b, nil, runeWidth(r, prop), grAny | (prop << shiftGraphemePropState)
+ }
+
+ // If we don't know the state, determine it now.
+ var firstProp int
+ if state < 0 {
+ state, firstProp, _ = transitionGraphemeState(state, r)
+ } else {
+ firstProp = state >> shiftGraphemePropState
+ }
+ width += runeWidth(r, firstProp)
+
+ // Transition until we find a boundary.
+ for {
+ var (
+ prop int
+ boundary bool
+ )
+
+ r, l := utf8.DecodeRune(b[length:])
+ state, prop, boundary = transitionGraphemeState(state&maskGraphemeState, r)
+
+ if boundary {
+ return b[:length], b[length:], width, state | (prop << shiftGraphemePropState)
+ }
+
+ if firstProp == prExtendedPictographic {
+ if r == vs15 {
+ width = 1
+ } else if r == vs16 {
+ width = 2
+ }
+ } else if firstProp != prRegionalIndicator && firstProp != prL {
+ width += runeWidth(r, prop)
+ }
+
+ length += l
+ if len(b) <= length {
+ return b, nil, width, grAny | (prop << shiftGraphemePropState)
+ }
+ }
+}
+
+// FirstGraphemeClusterInString is like [FirstGraphemeCluster] but its input and
+// outputs are strings.
+func FirstGraphemeClusterInString(str string, state int) (cluster, rest string, width, newState int) {
+ // An empty string returns nothing.
+ if len(str) == 0 {
+ return
+ }
+
+ // Extract the first rune.
+ r, length := utf8.DecodeRuneInString(str)
+ if len(str) <= length { // If we're already past the end, there is nothing else to parse.
+ var prop int
+ if state < 0 {
+ prop = propertyGraphemes(r)
+ } else {
+ prop = state >> shiftGraphemePropState
+ }
+ return str, "", runeWidth(r, prop), grAny | (prop << shiftGraphemePropState)
+ }
+
+ // If we don't know the state, determine it now.
+ var firstProp int
+ if state < 0 {
+ state, firstProp, _ = transitionGraphemeState(state, r)
+ } else {
+ firstProp = state >> shiftGraphemePropState
+ }
+ width += runeWidth(r, firstProp)
+
+ // Transition until we find a boundary.
+ for {
+ var (
+ prop int
+ boundary bool
+ )
+
+ r, l := utf8.DecodeRuneInString(str[length:])
+ state, prop, boundary = transitionGraphemeState(state&maskGraphemeState, r)
+
+ if boundary {
+ return str[:length], str[length:], width, state | (prop << shiftGraphemePropState)
+ }
+
+ if firstProp == prExtendedPictographic {
+ if r == vs15 {
+ width = 1
+ } else if r == vs16 {
+ width = 2
+ }
+ } else if firstProp != prRegionalIndicator && firstProp != prL {
+ width += runeWidth(r, prop)
+ }
+
+ length += l
+ if len(str) <= length {
+ return str, "", width, grAny | (prop << shiftGraphemePropState)
+ }
+ }
+}
diff --git a/vendor/github.com/rivo/uniseg/graphemeproperties.go b/vendor/github.com/rivo/uniseg/graphemeproperties.go
new file mode 100644
index 000000000..0aff4a619
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/graphemeproperties.go
@@ -0,0 +1,1915 @@
+// Code generated via go generate from gen_properties.go. DO NOT EDIT.
+
+package uniseg
+
+// graphemeCodePoints are taken from
+// https://www.unicode.org/Public/15.0.0/ucd/auxiliary/GraphemeBreakProperty.txt
+// and
+// https://unicode.org/Public/15.0.0/ucd/emoji/emoji-data.txt
+// ("Extended_Pictographic" only)
+// on September 5, 2023. See https://www.unicode.org/license.html for the Unicode
+// license agreement.
+var graphemeCodePoints = [][3]int{
+ {0x0000, 0x0009, prControl}, // Cc [10] ..
+ {0x000A, 0x000A, prLF}, // Cc
+ {0x000B, 0x000C, prControl}, // Cc [2] ..
+ {0x000D, 0x000D, prCR}, // Cc
+ {0x000E, 0x001F, prControl}, // Cc [18] ..
+ {0x007F, 0x009F, prControl}, // Cc [33] ..
+ {0x00A9, 0x00A9, prExtendedPictographic}, // E0.6 [1] (©️) copyright
+ {0x00AD, 0x00AD, prControl}, // Cf SOFT HYPHEN
+ {0x00AE, 0x00AE, prExtendedPictographic}, // E0.6 [1] (®️) registered
+ {0x0300, 0x036F, prExtend}, // Mn [112] COMBINING GRAVE ACCENT..COMBINING LATIN SMALL LETTER X
+ {0x0483, 0x0487, prExtend}, // Mn [5] COMBINING CYRILLIC TITLO..COMBINING CYRILLIC POKRYTIE
+ {0x0488, 0x0489, prExtend}, // Me [2] COMBINING CYRILLIC HUNDRED THOUSANDS SIGN..COMBINING CYRILLIC MILLIONS SIGN
+ {0x0591, 0x05BD, prExtend}, // Mn [45] HEBREW ACCENT ETNAHTA..HEBREW POINT METEG
+ {0x05BF, 0x05BF, prExtend}, // Mn HEBREW POINT RAFE
+ {0x05C1, 0x05C2, prExtend}, // Mn [2] HEBREW POINT SHIN DOT..HEBREW POINT SIN DOT
+ {0x05C4, 0x05C5, prExtend}, // Mn [2] HEBREW MARK UPPER DOT..HEBREW MARK LOWER DOT
+ {0x05C7, 0x05C7, prExtend}, // Mn HEBREW POINT QAMATS QATAN
+ {0x0600, 0x0605, prPrepend}, // Cf [6] ARABIC NUMBER SIGN..ARABIC NUMBER MARK ABOVE
+ {0x0610, 0x061A, prExtend}, // Mn [11] ARABIC SIGN SALLALLAHOU ALAYHE WASSALLAM..ARABIC SMALL KASRA
+ {0x061C, 0x061C, prControl}, // Cf ARABIC LETTER MARK
+ {0x064B, 0x065F, prExtend}, // Mn [21] ARABIC FATHATAN..ARABIC WAVY HAMZA BELOW
+ {0x0670, 0x0670, prExtend}, // Mn ARABIC LETTER SUPERSCRIPT ALEF
+ {0x06D6, 0x06DC, prExtend}, // Mn [7] ARABIC SMALL HIGH LIGATURE SAD WITH LAM WITH ALEF MAKSURA..ARABIC SMALL HIGH SEEN
+ {0x06DD, 0x06DD, prPrepend}, // Cf ARABIC END OF AYAH
+ {0x06DF, 0x06E4, prExtend}, // Mn [6] ARABIC SMALL HIGH ROUNDED ZERO..ARABIC SMALL HIGH MADDA
+ {0x06E7, 0x06E8, prExtend}, // Mn [2] ARABIC SMALL HIGH YEH..ARABIC SMALL HIGH NOON
+ {0x06EA, 0x06ED, prExtend}, // Mn [4] ARABIC EMPTY CENTRE LOW STOP..ARABIC SMALL LOW MEEM
+ {0x070F, 0x070F, prPrepend}, // Cf SYRIAC ABBREVIATION MARK
+ {0x0711, 0x0711, prExtend}, // Mn SYRIAC LETTER SUPERSCRIPT ALAPH
+ {0x0730, 0x074A, prExtend}, // Mn [27] SYRIAC PTHAHA ABOVE..SYRIAC BARREKH
+ {0x07A6, 0x07B0, prExtend}, // Mn [11] THAANA ABAFILI..THAANA SUKUN
+ {0x07EB, 0x07F3, prExtend}, // Mn [9] NKO COMBINING SHORT HIGH TONE..NKO COMBINING DOUBLE DOT ABOVE
+ {0x07FD, 0x07FD, prExtend}, // Mn NKO DANTAYALAN
+ {0x0816, 0x0819, prExtend}, // Mn [4] SAMARITAN MARK IN..SAMARITAN MARK DAGESH
+ {0x081B, 0x0823, prExtend}, // Mn [9] SAMARITAN MARK EPENTHETIC YUT..SAMARITAN VOWEL SIGN A
+ {0x0825, 0x0827, prExtend}, // Mn [3] SAMARITAN VOWEL SIGN SHORT A..SAMARITAN VOWEL SIGN U
+ {0x0829, 0x082D, prExtend}, // Mn [5] SAMARITAN VOWEL SIGN LONG I..SAMARITAN MARK NEQUDAA
+ {0x0859, 0x085B, prExtend}, // Mn [3] MANDAIC AFFRICATION MARK..MANDAIC GEMINATION MARK
+ {0x0890, 0x0891, prPrepend}, // Cf [2] ARABIC POUND MARK ABOVE..ARABIC PIASTRE MARK ABOVE
+ {0x0898, 0x089F, prExtend}, // Mn [8] ARABIC SMALL HIGH WORD AL-JUZ..ARABIC HALF MADDA OVER MADDA
+ {0x08CA, 0x08E1, prExtend}, // Mn [24] ARABIC SMALL HIGH FARSI YEH..ARABIC SMALL HIGH SIGN SAFHA
+ {0x08E2, 0x08E2, prPrepend}, // Cf ARABIC DISPUTED END OF AYAH
+ {0x08E3, 0x0902, prExtend}, // Mn [32] ARABIC TURNED DAMMA BELOW..DEVANAGARI SIGN ANUSVARA
+ {0x0903, 0x0903, prSpacingMark}, // Mc DEVANAGARI SIGN VISARGA
+ {0x093A, 0x093A, prExtend}, // Mn DEVANAGARI VOWEL SIGN OE
+ {0x093B, 0x093B, prSpacingMark}, // Mc DEVANAGARI VOWEL SIGN OOE
+ {0x093C, 0x093C, prExtend}, // Mn DEVANAGARI SIGN NUKTA
+ {0x093E, 0x0940, prSpacingMark}, // Mc [3] DEVANAGARI VOWEL SIGN AA..DEVANAGARI VOWEL SIGN II
+ {0x0941, 0x0948, prExtend}, // Mn [8] DEVANAGARI VOWEL SIGN U..DEVANAGARI VOWEL SIGN AI
+ {0x0949, 0x094C, prSpacingMark}, // Mc [4] DEVANAGARI VOWEL SIGN CANDRA O..DEVANAGARI VOWEL SIGN AU
+ {0x094D, 0x094D, prExtend}, // Mn DEVANAGARI SIGN VIRAMA
+ {0x094E, 0x094F, prSpacingMark}, // Mc [2] DEVANAGARI VOWEL SIGN PRISHTHAMATRA E..DEVANAGARI VOWEL SIGN AW
+ {0x0951, 0x0957, prExtend}, // Mn [7] DEVANAGARI STRESS SIGN UDATTA..DEVANAGARI VOWEL SIGN UUE
+ {0x0962, 0x0963, prExtend}, // Mn [2] DEVANAGARI VOWEL SIGN VOCALIC L..DEVANAGARI VOWEL SIGN VOCALIC LL
+ {0x0981, 0x0981, prExtend}, // Mn BENGALI SIGN CANDRABINDU
+ {0x0982, 0x0983, prSpacingMark}, // Mc [2] BENGALI SIGN ANUSVARA..BENGALI SIGN VISARGA
+ {0x09BC, 0x09BC, prExtend}, // Mn BENGALI SIGN NUKTA
+ {0x09BE, 0x09BE, prExtend}, // Mc BENGALI VOWEL SIGN AA
+ {0x09BF, 0x09C0, prSpacingMark}, // Mc [2] BENGALI VOWEL SIGN I..BENGALI VOWEL SIGN II
+ {0x09C1, 0x09C4, prExtend}, // Mn [4] BENGALI VOWEL SIGN U..BENGALI VOWEL SIGN VOCALIC RR
+ {0x09C7, 0x09C8, prSpacingMark}, // Mc [2] BENGALI VOWEL SIGN E..BENGALI VOWEL SIGN AI
+ {0x09CB, 0x09CC, prSpacingMark}, // Mc [2] BENGALI VOWEL SIGN O..BENGALI VOWEL SIGN AU
+ {0x09CD, 0x09CD, prExtend}, // Mn BENGALI SIGN VIRAMA
+ {0x09D7, 0x09D7, prExtend}, // Mc BENGALI AU LENGTH MARK
+ {0x09E2, 0x09E3, prExtend}, // Mn [2] BENGALI VOWEL SIGN VOCALIC L..BENGALI VOWEL SIGN VOCALIC LL
+ {0x09FE, 0x09FE, prExtend}, // Mn BENGALI SANDHI MARK
+ {0x0A01, 0x0A02, prExtend}, // Mn [2] GURMUKHI SIGN ADAK BINDI..GURMUKHI SIGN BINDI
+ {0x0A03, 0x0A03, prSpacingMark}, // Mc GURMUKHI SIGN VISARGA
+ {0x0A3C, 0x0A3C, prExtend}, // Mn GURMUKHI SIGN NUKTA
+ {0x0A3E, 0x0A40, prSpacingMark}, // Mc [3] GURMUKHI VOWEL SIGN AA..GURMUKHI VOWEL SIGN II
+ {0x0A41, 0x0A42, prExtend}, // Mn [2] GURMUKHI VOWEL SIGN U..GURMUKHI VOWEL SIGN UU
+ {0x0A47, 0x0A48, prExtend}, // Mn [2] GURMUKHI VOWEL SIGN EE..GURMUKHI VOWEL SIGN AI
+ {0x0A4B, 0x0A4D, prExtend}, // Mn [3] GURMUKHI VOWEL SIGN OO..GURMUKHI SIGN VIRAMA
+ {0x0A51, 0x0A51, prExtend}, // Mn GURMUKHI SIGN UDAAT
+ {0x0A70, 0x0A71, prExtend}, // Mn [2] GURMUKHI TIPPI..GURMUKHI ADDAK
+ {0x0A75, 0x0A75, prExtend}, // Mn GURMUKHI SIGN YAKASH
+ {0x0A81, 0x0A82, prExtend}, // Mn [2] GUJARATI SIGN CANDRABINDU..GUJARATI SIGN ANUSVARA
+ {0x0A83, 0x0A83, prSpacingMark}, // Mc GUJARATI SIGN VISARGA
+ {0x0ABC, 0x0ABC, prExtend}, // Mn GUJARATI SIGN NUKTA
+ {0x0ABE, 0x0AC0, prSpacingMark}, // Mc [3] GUJARATI VOWEL SIGN AA..GUJARATI VOWEL SIGN II
+ {0x0AC1, 0x0AC5, prExtend}, // Mn [5] GUJARATI VOWEL SIGN U..GUJARATI VOWEL SIGN CANDRA E
+ {0x0AC7, 0x0AC8, prExtend}, // Mn [2] GUJARATI VOWEL SIGN E..GUJARATI VOWEL SIGN AI
+ {0x0AC9, 0x0AC9, prSpacingMark}, // Mc GUJARATI VOWEL SIGN CANDRA O
+ {0x0ACB, 0x0ACC, prSpacingMark}, // Mc [2] GUJARATI VOWEL SIGN O..GUJARATI VOWEL SIGN AU
+ {0x0ACD, 0x0ACD, prExtend}, // Mn GUJARATI SIGN VIRAMA
+ {0x0AE2, 0x0AE3, prExtend}, // Mn [2] GUJARATI VOWEL SIGN VOCALIC L..GUJARATI VOWEL SIGN VOCALIC LL
+ {0x0AFA, 0x0AFF, prExtend}, // Mn [6] GUJARATI SIGN SUKUN..GUJARATI SIGN TWO-CIRCLE NUKTA ABOVE
+ {0x0B01, 0x0B01, prExtend}, // Mn ORIYA SIGN CANDRABINDU
+ {0x0B02, 0x0B03, prSpacingMark}, // Mc [2] ORIYA SIGN ANUSVARA..ORIYA SIGN VISARGA
+ {0x0B3C, 0x0B3C, prExtend}, // Mn ORIYA SIGN NUKTA
+ {0x0B3E, 0x0B3E, prExtend}, // Mc ORIYA VOWEL SIGN AA
+ {0x0B3F, 0x0B3F, prExtend}, // Mn ORIYA VOWEL SIGN I
+ {0x0B40, 0x0B40, prSpacingMark}, // Mc ORIYA VOWEL SIGN II
+ {0x0B41, 0x0B44, prExtend}, // Mn [4] ORIYA VOWEL SIGN U..ORIYA VOWEL SIGN VOCALIC RR
+ {0x0B47, 0x0B48, prSpacingMark}, // Mc [2] ORIYA VOWEL SIGN E..ORIYA VOWEL SIGN AI
+ {0x0B4B, 0x0B4C, prSpacingMark}, // Mc [2] ORIYA VOWEL SIGN O..ORIYA VOWEL SIGN AU
+ {0x0B4D, 0x0B4D, prExtend}, // Mn ORIYA SIGN VIRAMA
+ {0x0B55, 0x0B56, prExtend}, // Mn [2] ORIYA SIGN OVERLINE..ORIYA AI LENGTH MARK
+ {0x0B57, 0x0B57, prExtend}, // Mc ORIYA AU LENGTH MARK
+ {0x0B62, 0x0B63, prExtend}, // Mn [2] ORIYA VOWEL SIGN VOCALIC L..ORIYA VOWEL SIGN VOCALIC LL
+ {0x0B82, 0x0B82, prExtend}, // Mn TAMIL SIGN ANUSVARA
+ {0x0BBE, 0x0BBE, prExtend}, // Mc TAMIL VOWEL SIGN AA
+ {0x0BBF, 0x0BBF, prSpacingMark}, // Mc TAMIL VOWEL SIGN I
+ {0x0BC0, 0x0BC0, prExtend}, // Mn TAMIL VOWEL SIGN II
+ {0x0BC1, 0x0BC2, prSpacingMark}, // Mc [2] TAMIL VOWEL SIGN U..TAMIL VOWEL SIGN UU
+ {0x0BC6, 0x0BC8, prSpacingMark}, // Mc [3] TAMIL VOWEL SIGN E..TAMIL VOWEL SIGN AI
+ {0x0BCA, 0x0BCC, prSpacingMark}, // Mc [3] TAMIL VOWEL SIGN O..TAMIL VOWEL SIGN AU
+ {0x0BCD, 0x0BCD, prExtend}, // Mn TAMIL SIGN VIRAMA
+ {0x0BD7, 0x0BD7, prExtend}, // Mc TAMIL AU LENGTH MARK
+ {0x0C00, 0x0C00, prExtend}, // Mn TELUGU SIGN COMBINING CANDRABINDU ABOVE
+ {0x0C01, 0x0C03, prSpacingMark}, // Mc [3] TELUGU SIGN CANDRABINDU..TELUGU SIGN VISARGA
+ {0x0C04, 0x0C04, prExtend}, // Mn TELUGU SIGN COMBINING ANUSVARA ABOVE
+ {0x0C3C, 0x0C3C, prExtend}, // Mn TELUGU SIGN NUKTA
+ {0x0C3E, 0x0C40, prExtend}, // Mn [3] TELUGU VOWEL SIGN AA..TELUGU VOWEL SIGN II
+ {0x0C41, 0x0C44, prSpacingMark}, // Mc [4] TELUGU VOWEL SIGN U..TELUGU VOWEL SIGN VOCALIC RR
+ {0x0C46, 0x0C48, prExtend}, // Mn [3] TELUGU VOWEL SIGN E..TELUGU VOWEL SIGN AI
+ {0x0C4A, 0x0C4D, prExtend}, // Mn [4] TELUGU VOWEL SIGN O..TELUGU SIGN VIRAMA
+ {0x0C55, 0x0C56, prExtend}, // Mn [2] TELUGU LENGTH MARK..TELUGU AI LENGTH MARK
+ {0x0C62, 0x0C63, prExtend}, // Mn [2] TELUGU VOWEL SIGN VOCALIC L..TELUGU VOWEL SIGN VOCALIC LL
+ {0x0C81, 0x0C81, prExtend}, // Mn KANNADA SIGN CANDRABINDU
+ {0x0C82, 0x0C83, prSpacingMark}, // Mc [2] KANNADA SIGN ANUSVARA..KANNADA SIGN VISARGA
+ {0x0CBC, 0x0CBC, prExtend}, // Mn KANNADA SIGN NUKTA
+ {0x0CBE, 0x0CBE, prSpacingMark}, // Mc KANNADA VOWEL SIGN AA
+ {0x0CBF, 0x0CBF, prExtend}, // Mn KANNADA VOWEL SIGN I
+ {0x0CC0, 0x0CC1, prSpacingMark}, // Mc [2] KANNADA VOWEL SIGN II..KANNADA VOWEL SIGN U
+ {0x0CC2, 0x0CC2, prExtend}, // Mc KANNADA VOWEL SIGN UU
+ {0x0CC3, 0x0CC4, prSpacingMark}, // Mc [2] KANNADA VOWEL SIGN VOCALIC R..KANNADA VOWEL SIGN VOCALIC RR
+ {0x0CC6, 0x0CC6, prExtend}, // Mn KANNADA VOWEL SIGN E
+ {0x0CC7, 0x0CC8, prSpacingMark}, // Mc [2] KANNADA VOWEL SIGN EE..KANNADA VOWEL SIGN AI
+ {0x0CCA, 0x0CCB, prSpacingMark}, // Mc [2] KANNADA VOWEL SIGN O..KANNADA VOWEL SIGN OO
+ {0x0CCC, 0x0CCD, prExtend}, // Mn [2] KANNADA VOWEL SIGN AU..KANNADA SIGN VIRAMA
+ {0x0CD5, 0x0CD6, prExtend}, // Mc [2] KANNADA LENGTH MARK..KANNADA AI LENGTH MARK
+ {0x0CE2, 0x0CE3, prExtend}, // Mn [2] KANNADA VOWEL SIGN VOCALIC L..KANNADA VOWEL SIGN VOCALIC LL
+ {0x0CF3, 0x0CF3, prSpacingMark}, // Mc KANNADA SIGN COMBINING ANUSVARA ABOVE RIGHT
+ {0x0D00, 0x0D01, prExtend}, // Mn [2] MALAYALAM SIGN COMBINING ANUSVARA ABOVE..MALAYALAM SIGN CANDRABINDU
+ {0x0D02, 0x0D03, prSpacingMark}, // Mc [2] MALAYALAM SIGN ANUSVARA..MALAYALAM SIGN VISARGA
+ {0x0D3B, 0x0D3C, prExtend}, // Mn [2] MALAYALAM SIGN VERTICAL BAR VIRAMA..MALAYALAM SIGN CIRCULAR VIRAMA
+ {0x0D3E, 0x0D3E, prExtend}, // Mc MALAYALAM VOWEL SIGN AA
+ {0x0D3F, 0x0D40, prSpacingMark}, // Mc [2] MALAYALAM VOWEL SIGN I..MALAYALAM VOWEL SIGN II
+ {0x0D41, 0x0D44, prExtend}, // Mn [4] MALAYALAM VOWEL SIGN U..MALAYALAM VOWEL SIGN VOCALIC RR
+ {0x0D46, 0x0D48, prSpacingMark}, // Mc [3] MALAYALAM VOWEL SIGN E..MALAYALAM VOWEL SIGN AI
+ {0x0D4A, 0x0D4C, prSpacingMark}, // Mc [3] MALAYALAM VOWEL SIGN O..MALAYALAM VOWEL SIGN AU
+ {0x0D4D, 0x0D4D, prExtend}, // Mn MALAYALAM SIGN VIRAMA
+ {0x0D4E, 0x0D4E, prPrepend}, // Lo MALAYALAM LETTER DOT REPH
+ {0x0D57, 0x0D57, prExtend}, // Mc MALAYALAM AU LENGTH MARK
+ {0x0D62, 0x0D63, prExtend}, // Mn [2] MALAYALAM VOWEL SIGN VOCALIC L..MALAYALAM VOWEL SIGN VOCALIC LL
+ {0x0D81, 0x0D81, prExtend}, // Mn SINHALA SIGN CANDRABINDU
+ {0x0D82, 0x0D83, prSpacingMark}, // Mc [2] SINHALA SIGN ANUSVARAYA..SINHALA SIGN VISARGAYA
+ {0x0DCA, 0x0DCA, prExtend}, // Mn SINHALA SIGN AL-LAKUNA
+ {0x0DCF, 0x0DCF, prExtend}, // Mc SINHALA VOWEL SIGN AELA-PILLA
+ {0x0DD0, 0x0DD1, prSpacingMark}, // Mc [2] SINHALA VOWEL SIGN KETTI AEDA-PILLA..SINHALA VOWEL SIGN DIGA AEDA-PILLA
+ {0x0DD2, 0x0DD4, prExtend}, // Mn [3] SINHALA VOWEL SIGN KETTI IS-PILLA..SINHALA VOWEL SIGN KETTI PAA-PILLA
+ {0x0DD6, 0x0DD6, prExtend}, // Mn SINHALA VOWEL SIGN DIGA PAA-PILLA
+ {0x0DD8, 0x0DDE, prSpacingMark}, // Mc [7] SINHALA VOWEL SIGN GAETTA-PILLA..SINHALA VOWEL SIGN KOMBUVA HAA GAYANUKITTA
+ {0x0DDF, 0x0DDF, prExtend}, // Mc SINHALA VOWEL SIGN GAYANUKITTA
+ {0x0DF2, 0x0DF3, prSpacingMark}, // Mc [2] SINHALA VOWEL SIGN DIGA GAETTA-PILLA..SINHALA VOWEL SIGN DIGA GAYANUKITTA
+ {0x0E31, 0x0E31, prExtend}, // Mn THAI CHARACTER MAI HAN-AKAT
+ {0x0E33, 0x0E33, prSpacingMark}, // Lo THAI CHARACTER SARA AM
+ {0x0E34, 0x0E3A, prExtend}, // Mn [7] THAI CHARACTER SARA I..THAI CHARACTER PHINTHU
+ {0x0E47, 0x0E4E, prExtend}, // Mn [8] THAI CHARACTER MAITAIKHU..THAI CHARACTER YAMAKKAN
+ {0x0EB1, 0x0EB1, prExtend}, // Mn LAO VOWEL SIGN MAI KAN
+ {0x0EB3, 0x0EB3, prSpacingMark}, // Lo LAO VOWEL SIGN AM
+ {0x0EB4, 0x0EBC, prExtend}, // Mn [9] LAO VOWEL SIGN I..LAO SEMIVOWEL SIGN LO
+ {0x0EC8, 0x0ECE, prExtend}, // Mn [7] LAO TONE MAI EK..LAO YAMAKKAN
+ {0x0F18, 0x0F19, prExtend}, // Mn [2] TIBETAN ASTROLOGICAL SIGN -KHYUD PA..TIBETAN ASTROLOGICAL SIGN SDONG TSHUGS
+ {0x0F35, 0x0F35, prExtend}, // Mn TIBETAN MARK NGAS BZUNG NYI ZLA
+ {0x0F37, 0x0F37, prExtend}, // Mn TIBETAN MARK NGAS BZUNG SGOR RTAGS
+ {0x0F39, 0x0F39, prExtend}, // Mn TIBETAN MARK TSA -PHRU
+ {0x0F3E, 0x0F3F, prSpacingMark}, // Mc [2] TIBETAN SIGN YAR TSHES..TIBETAN SIGN MAR TSHES
+ {0x0F71, 0x0F7E, prExtend}, // Mn [14] TIBETAN VOWEL SIGN AA..TIBETAN SIGN RJES SU NGA RO
+ {0x0F7F, 0x0F7F, prSpacingMark}, // Mc TIBETAN SIGN RNAM BCAD
+ {0x0F80, 0x0F84, prExtend}, // Mn [5] TIBETAN VOWEL SIGN REVERSED I..TIBETAN MARK HALANTA
+ {0x0F86, 0x0F87, prExtend}, // Mn [2] TIBETAN SIGN LCI RTAGS..TIBETAN SIGN YANG RTAGS
+ {0x0F8D, 0x0F97, prExtend}, // Mn [11] TIBETAN SUBJOINED SIGN LCE TSA CAN..TIBETAN SUBJOINED LETTER JA
+ {0x0F99, 0x0FBC, prExtend}, // Mn [36] TIBETAN SUBJOINED LETTER NYA..TIBETAN SUBJOINED LETTER FIXED-FORM RA
+ {0x0FC6, 0x0FC6, prExtend}, // Mn TIBETAN SYMBOL PADMA GDAN
+ {0x102D, 0x1030, prExtend}, // Mn [4] MYANMAR VOWEL SIGN I..MYANMAR VOWEL SIGN UU
+ {0x1031, 0x1031, prSpacingMark}, // Mc MYANMAR VOWEL SIGN E
+ {0x1032, 0x1037, prExtend}, // Mn [6] MYANMAR VOWEL SIGN AI..MYANMAR SIGN DOT BELOW
+ {0x1039, 0x103A, prExtend}, // Mn [2] MYANMAR SIGN VIRAMA..MYANMAR SIGN ASAT
+ {0x103B, 0x103C, prSpacingMark}, // Mc [2] MYANMAR CONSONANT SIGN MEDIAL YA..MYANMAR CONSONANT SIGN MEDIAL RA
+ {0x103D, 0x103E, prExtend}, // Mn [2] MYANMAR CONSONANT SIGN MEDIAL WA..MYANMAR CONSONANT SIGN MEDIAL HA
+ {0x1056, 0x1057, prSpacingMark}, // Mc [2] MYANMAR VOWEL SIGN VOCALIC R..MYANMAR VOWEL SIGN VOCALIC RR
+ {0x1058, 0x1059, prExtend}, // Mn [2] MYANMAR VOWEL SIGN VOCALIC L..MYANMAR VOWEL SIGN VOCALIC LL
+ {0x105E, 0x1060, prExtend}, // Mn [3] MYANMAR CONSONANT SIGN MON MEDIAL NA..MYANMAR CONSONANT SIGN MON MEDIAL LA
+ {0x1071, 0x1074, prExtend}, // Mn [4] MYANMAR VOWEL SIGN GEBA KAREN I..MYANMAR VOWEL SIGN KAYAH EE
+ {0x1082, 0x1082, prExtend}, // Mn MYANMAR CONSONANT SIGN SHAN MEDIAL WA
+ {0x1084, 0x1084, prSpacingMark}, // Mc MYANMAR VOWEL SIGN SHAN E
+ {0x1085, 0x1086, prExtend}, // Mn [2] MYANMAR VOWEL SIGN SHAN E ABOVE..MYANMAR VOWEL SIGN SHAN FINAL Y
+ {0x108D, 0x108D, prExtend}, // Mn MYANMAR SIGN SHAN COUNCIL EMPHATIC TONE
+ {0x109D, 0x109D, prExtend}, // Mn MYANMAR VOWEL SIGN AITON AI
+ {0x1100, 0x115F, prL}, // Lo [96] HANGUL CHOSEONG KIYEOK..HANGUL CHOSEONG FILLER
+ {0x1160, 0x11A7, prV}, // Lo [72] HANGUL JUNGSEONG FILLER..HANGUL JUNGSEONG O-YAE
+ {0x11A8, 0x11FF, prT}, // Lo [88] HANGUL JONGSEONG KIYEOK..HANGUL JONGSEONG SSANGNIEUN
+ {0x135D, 0x135F, prExtend}, // Mn [3] ETHIOPIC COMBINING GEMINATION AND VOWEL LENGTH MARK..ETHIOPIC COMBINING GEMINATION MARK
+ {0x1712, 0x1714, prExtend}, // Mn [3] TAGALOG VOWEL SIGN I..TAGALOG SIGN VIRAMA
+ {0x1715, 0x1715, prSpacingMark}, // Mc TAGALOG SIGN PAMUDPOD
+ {0x1732, 0x1733, prExtend}, // Mn [2] HANUNOO VOWEL SIGN I..HANUNOO VOWEL SIGN U
+ {0x1734, 0x1734, prSpacingMark}, // Mc HANUNOO SIGN PAMUDPOD
+ {0x1752, 0x1753, prExtend}, // Mn [2] BUHID VOWEL SIGN I..BUHID VOWEL SIGN U
+ {0x1772, 0x1773, prExtend}, // Mn [2] TAGBANWA VOWEL SIGN I..TAGBANWA VOWEL SIGN U
+ {0x17B4, 0x17B5, prExtend}, // Mn [2] KHMER VOWEL INHERENT AQ..KHMER VOWEL INHERENT AA
+ {0x17B6, 0x17B6, prSpacingMark}, // Mc KHMER VOWEL SIGN AA
+ {0x17B7, 0x17BD, prExtend}, // Mn [7] KHMER VOWEL SIGN I..KHMER VOWEL SIGN UA
+ {0x17BE, 0x17C5, prSpacingMark}, // Mc [8] KHMER VOWEL SIGN OE..KHMER VOWEL SIGN AU
+ {0x17C6, 0x17C6, prExtend}, // Mn KHMER SIGN NIKAHIT
+ {0x17C7, 0x17C8, prSpacingMark}, // Mc [2] KHMER SIGN REAHMUK..KHMER SIGN YUUKALEAPINTU
+ {0x17C9, 0x17D3, prExtend}, // Mn [11] KHMER SIGN MUUSIKATOAN..KHMER SIGN BATHAMASAT
+ {0x17DD, 0x17DD, prExtend}, // Mn KHMER SIGN ATTHACAN
+ {0x180B, 0x180D, prExtend}, // Mn [3] MONGOLIAN FREE VARIATION SELECTOR ONE..MONGOLIAN FREE VARIATION SELECTOR THREE
+ {0x180E, 0x180E, prControl}, // Cf MONGOLIAN VOWEL SEPARATOR
+ {0x180F, 0x180F, prExtend}, // Mn MONGOLIAN FREE VARIATION SELECTOR FOUR
+ {0x1885, 0x1886, prExtend}, // Mn [2] MONGOLIAN LETTER ALI GALI BALUDA..MONGOLIAN LETTER ALI GALI THREE BALUDA
+ {0x18A9, 0x18A9, prExtend}, // Mn MONGOLIAN LETTER ALI GALI DAGALGA
+ {0x1920, 0x1922, prExtend}, // Mn [3] LIMBU VOWEL SIGN A..LIMBU VOWEL SIGN U
+ {0x1923, 0x1926, prSpacingMark}, // Mc [4] LIMBU VOWEL SIGN EE..LIMBU VOWEL SIGN AU
+ {0x1927, 0x1928, prExtend}, // Mn [2] LIMBU VOWEL SIGN E..LIMBU VOWEL SIGN O
+ {0x1929, 0x192B, prSpacingMark}, // Mc [3] LIMBU SUBJOINED LETTER YA..LIMBU SUBJOINED LETTER WA
+ {0x1930, 0x1931, prSpacingMark}, // Mc [2] LIMBU SMALL LETTER KA..LIMBU SMALL LETTER NGA
+ {0x1932, 0x1932, prExtend}, // Mn LIMBU SMALL LETTER ANUSVARA
+ {0x1933, 0x1938, prSpacingMark}, // Mc [6] LIMBU SMALL LETTER TA..LIMBU SMALL LETTER LA
+ {0x1939, 0x193B, prExtend}, // Mn [3] LIMBU SIGN MUKPHRENG..LIMBU SIGN SA-I
+ {0x1A17, 0x1A18, prExtend}, // Mn [2] BUGINESE VOWEL SIGN I..BUGINESE VOWEL SIGN U
+ {0x1A19, 0x1A1A, prSpacingMark}, // Mc [2] BUGINESE VOWEL SIGN E..BUGINESE VOWEL SIGN O
+ {0x1A1B, 0x1A1B, prExtend}, // Mn BUGINESE VOWEL SIGN AE
+ {0x1A55, 0x1A55, prSpacingMark}, // Mc TAI THAM CONSONANT SIGN MEDIAL RA
+ {0x1A56, 0x1A56, prExtend}, // Mn TAI THAM CONSONANT SIGN MEDIAL LA
+ {0x1A57, 0x1A57, prSpacingMark}, // Mc TAI THAM CONSONANT SIGN LA TANG LAI
+ {0x1A58, 0x1A5E, prExtend}, // Mn [7] TAI THAM SIGN MAI KANG LAI..TAI THAM CONSONANT SIGN SA
+ {0x1A60, 0x1A60, prExtend}, // Mn TAI THAM SIGN SAKOT
+ {0x1A62, 0x1A62, prExtend}, // Mn TAI THAM VOWEL SIGN MAI SAT
+ {0x1A65, 0x1A6C, prExtend}, // Mn [8] TAI THAM VOWEL SIGN I..TAI THAM VOWEL SIGN OA BELOW
+ {0x1A6D, 0x1A72, prSpacingMark}, // Mc [6] TAI THAM VOWEL SIGN OY..TAI THAM VOWEL SIGN THAM AI
+ {0x1A73, 0x1A7C, prExtend}, // Mn [10] TAI THAM VOWEL SIGN OA ABOVE..TAI THAM SIGN KHUEN-LUE KARAN
+ {0x1A7F, 0x1A7F, prExtend}, // Mn TAI THAM COMBINING CRYPTOGRAMMIC DOT
+ {0x1AB0, 0x1ABD, prExtend}, // Mn [14] COMBINING DOUBLED CIRCUMFLEX ACCENT..COMBINING PARENTHESES BELOW
+ {0x1ABE, 0x1ABE, prExtend}, // Me COMBINING PARENTHESES OVERLAY
+ {0x1ABF, 0x1ACE, prExtend}, // Mn [16] COMBINING LATIN SMALL LETTER W BELOW..COMBINING LATIN SMALL LETTER INSULAR T
+ {0x1B00, 0x1B03, prExtend}, // Mn [4] BALINESE SIGN ULU RICEM..BALINESE SIGN SURANG
+ {0x1B04, 0x1B04, prSpacingMark}, // Mc BALINESE SIGN BISAH
+ {0x1B34, 0x1B34, prExtend}, // Mn BALINESE SIGN REREKAN
+ {0x1B35, 0x1B35, prExtend}, // Mc BALINESE VOWEL SIGN TEDUNG
+ {0x1B36, 0x1B3A, prExtend}, // Mn [5] BALINESE VOWEL SIGN ULU..BALINESE VOWEL SIGN RA REPA
+ {0x1B3B, 0x1B3B, prSpacingMark}, // Mc BALINESE VOWEL SIGN RA REPA TEDUNG
+ {0x1B3C, 0x1B3C, prExtend}, // Mn BALINESE VOWEL SIGN LA LENGA
+ {0x1B3D, 0x1B41, prSpacingMark}, // Mc [5] BALINESE VOWEL SIGN LA LENGA TEDUNG..BALINESE VOWEL SIGN TALING REPA TEDUNG
+ {0x1B42, 0x1B42, prExtend}, // Mn BALINESE VOWEL SIGN PEPET
+ {0x1B43, 0x1B44, prSpacingMark}, // Mc [2] BALINESE VOWEL SIGN PEPET TEDUNG..BALINESE ADEG ADEG
+ {0x1B6B, 0x1B73, prExtend}, // Mn [9] BALINESE MUSICAL SYMBOL COMBINING TEGEH..BALINESE MUSICAL SYMBOL COMBINING GONG
+ {0x1B80, 0x1B81, prExtend}, // Mn [2] SUNDANESE SIGN PANYECEK..SUNDANESE SIGN PANGLAYAR
+ {0x1B82, 0x1B82, prSpacingMark}, // Mc SUNDANESE SIGN PANGWISAD
+ {0x1BA1, 0x1BA1, prSpacingMark}, // Mc SUNDANESE CONSONANT SIGN PAMINGKAL
+ {0x1BA2, 0x1BA5, prExtend}, // Mn [4] SUNDANESE CONSONANT SIGN PANYAKRA..SUNDANESE VOWEL SIGN PANYUKU
+ {0x1BA6, 0x1BA7, prSpacingMark}, // Mc [2] SUNDANESE VOWEL SIGN PANAELAENG..SUNDANESE VOWEL SIGN PANOLONG
+ {0x1BA8, 0x1BA9, prExtend}, // Mn [2] SUNDANESE VOWEL SIGN PAMEPET..SUNDANESE VOWEL SIGN PANEULEUNG
+ {0x1BAA, 0x1BAA, prSpacingMark}, // Mc SUNDANESE SIGN PAMAAEH
+ {0x1BAB, 0x1BAD, prExtend}, // Mn [3] SUNDANESE SIGN VIRAMA..SUNDANESE CONSONANT SIGN PASANGAN WA
+ {0x1BE6, 0x1BE6, prExtend}, // Mn BATAK SIGN TOMPI
+ {0x1BE7, 0x1BE7, prSpacingMark}, // Mc BATAK VOWEL SIGN E
+ {0x1BE8, 0x1BE9, prExtend}, // Mn [2] BATAK VOWEL SIGN PAKPAK E..BATAK VOWEL SIGN EE
+ {0x1BEA, 0x1BEC, prSpacingMark}, // Mc [3] BATAK VOWEL SIGN I..BATAK VOWEL SIGN O
+ {0x1BED, 0x1BED, prExtend}, // Mn BATAK VOWEL SIGN KARO O
+ {0x1BEE, 0x1BEE, prSpacingMark}, // Mc BATAK VOWEL SIGN U
+ {0x1BEF, 0x1BF1, prExtend}, // Mn [3] BATAK VOWEL SIGN U FOR SIMALUNGUN SA..BATAK CONSONANT SIGN H
+ {0x1BF2, 0x1BF3, prSpacingMark}, // Mc [2] BATAK PANGOLAT..BATAK PANONGONAN
+ {0x1C24, 0x1C2B, prSpacingMark}, // Mc [8] LEPCHA SUBJOINED LETTER YA..LEPCHA VOWEL SIGN UU
+ {0x1C2C, 0x1C33, prExtend}, // Mn [8] LEPCHA VOWEL SIGN E..LEPCHA CONSONANT SIGN T
+ {0x1C34, 0x1C35, prSpacingMark}, // Mc [2] LEPCHA CONSONANT SIGN NYIN-DO..LEPCHA CONSONANT SIGN KANG
+ {0x1C36, 0x1C37, prExtend}, // Mn [2] LEPCHA SIGN RAN..LEPCHA SIGN NUKTA
+ {0x1CD0, 0x1CD2, prExtend}, // Mn [3] VEDIC TONE KARSHANA..VEDIC TONE PRENKHA
+ {0x1CD4, 0x1CE0, prExtend}, // Mn [13] VEDIC SIGN YAJURVEDIC MIDLINE SVARITA..VEDIC TONE RIGVEDIC KASHMIRI INDEPENDENT SVARITA
+ {0x1CE1, 0x1CE1, prSpacingMark}, // Mc VEDIC TONE ATHARVAVEDIC INDEPENDENT SVARITA
+ {0x1CE2, 0x1CE8, prExtend}, // Mn [7] VEDIC SIGN VISARGA SVARITA..VEDIC SIGN VISARGA ANUDATTA WITH TAIL
+ {0x1CED, 0x1CED, prExtend}, // Mn VEDIC SIGN TIRYAK
+ {0x1CF4, 0x1CF4, prExtend}, // Mn VEDIC TONE CANDRA ABOVE
+ {0x1CF7, 0x1CF7, prSpacingMark}, // Mc VEDIC SIGN ATIKRAMA
+ {0x1CF8, 0x1CF9, prExtend}, // Mn [2] VEDIC TONE RING ABOVE..VEDIC TONE DOUBLE RING ABOVE
+ {0x1DC0, 0x1DFF, prExtend}, // Mn [64] COMBINING DOTTED GRAVE ACCENT..COMBINING RIGHT ARROWHEAD AND DOWN ARROWHEAD BELOW
+ {0x200B, 0x200B, prControl}, // Cf ZERO WIDTH SPACE
+ {0x200C, 0x200C, prExtend}, // Cf ZERO WIDTH NON-JOINER
+ {0x200D, 0x200D, prZWJ}, // Cf ZERO WIDTH JOINER
+ {0x200E, 0x200F, prControl}, // Cf [2] LEFT-TO-RIGHT MARK..RIGHT-TO-LEFT MARK
+ {0x2028, 0x2028, prControl}, // Zl LINE SEPARATOR
+ {0x2029, 0x2029, prControl}, // Zp PARAGRAPH SEPARATOR
+ {0x202A, 0x202E, prControl}, // Cf [5] LEFT-TO-RIGHT EMBEDDING..RIGHT-TO-LEFT OVERRIDE
+ {0x203C, 0x203C, prExtendedPictographic}, // E0.6 [1] (‼️) double exclamation mark
+ {0x2049, 0x2049, prExtendedPictographic}, // E0.6 [1] (⁉️) exclamation question mark
+ {0x2060, 0x2064, prControl}, // Cf [5] WORD JOINER..INVISIBLE PLUS
+ {0x2065, 0x2065, prControl}, // Cn
+ {0x2066, 0x206F, prControl}, // Cf [10] LEFT-TO-RIGHT ISOLATE..NOMINAL DIGIT SHAPES
+ {0x20D0, 0x20DC, prExtend}, // Mn [13] COMBINING LEFT HARPOON ABOVE..COMBINING FOUR DOTS ABOVE
+ {0x20DD, 0x20E0, prExtend}, // Me [4] COMBINING ENCLOSING CIRCLE..COMBINING ENCLOSING CIRCLE BACKSLASH
+ {0x20E1, 0x20E1, prExtend}, // Mn COMBINING LEFT RIGHT ARROW ABOVE
+ {0x20E2, 0x20E4, prExtend}, // Me [3] COMBINING ENCLOSING SCREEN..COMBINING ENCLOSING UPWARD POINTING TRIANGLE
+ {0x20E5, 0x20F0, prExtend}, // Mn [12] COMBINING REVERSE SOLIDUS OVERLAY..COMBINING ASTERISK ABOVE
+ {0x2122, 0x2122, prExtendedPictographic}, // E0.6 [1] (™️) trade mark
+ {0x2139, 0x2139, prExtendedPictographic}, // E0.6 [1] (ℹ️) information
+ {0x2194, 0x2199, prExtendedPictographic}, // E0.6 [6] (↔️..↙️) left-right arrow..down-left arrow
+ {0x21A9, 0x21AA, prExtendedPictographic}, // E0.6 [2] (↩️..↪️) right arrow curving left..left arrow curving right
+ {0x231A, 0x231B, prExtendedPictographic}, // E0.6 [2] (⌚..⌛) watch..hourglass done
+ {0x2328, 0x2328, prExtendedPictographic}, // E1.0 [1] (⌨️) keyboard
+ {0x2388, 0x2388, prExtendedPictographic}, // E0.0 [1] (⎈) HELM SYMBOL
+ {0x23CF, 0x23CF, prExtendedPictographic}, // E1.0 [1] (⏏️) eject button
+ {0x23E9, 0x23EC, prExtendedPictographic}, // E0.6 [4] (⏩..⏬) fast-forward button..fast down button
+ {0x23ED, 0x23EE, prExtendedPictographic}, // E0.7 [2] (⏭️..⏮️) next track button..last track button
+ {0x23EF, 0x23EF, prExtendedPictographic}, // E1.0 [1] (⏯️) play or pause button
+ {0x23F0, 0x23F0, prExtendedPictographic}, // E0.6 [1] (⏰) alarm clock
+ {0x23F1, 0x23F2, prExtendedPictographic}, // E1.0 [2] (⏱️..⏲️) stopwatch..timer clock
+ {0x23F3, 0x23F3, prExtendedPictographic}, // E0.6 [1] (⏳) hourglass not done
+ {0x23F8, 0x23FA, prExtendedPictographic}, // E0.7 [3] (⏸️..⏺️) pause button..record button
+ {0x24C2, 0x24C2, prExtendedPictographic}, // E0.6 [1] (Ⓜ️) circled M
+ {0x25AA, 0x25AB, prExtendedPictographic}, // E0.6 [2] (▪️..▫️) black small square..white small square
+ {0x25B6, 0x25B6, prExtendedPictographic}, // E0.6 [1] (▶️) play button
+ {0x25C0, 0x25C0, prExtendedPictographic}, // E0.6 [1] (◀️) reverse button
+ {0x25FB, 0x25FE, prExtendedPictographic}, // E0.6 [4] (◻️..◾) white medium square..black medium-small square
+ {0x2600, 0x2601, prExtendedPictographic}, // E0.6 [2] (☀️..☁️) sun..cloud
+ {0x2602, 0x2603, prExtendedPictographic}, // E0.7 [2] (☂️..☃️) umbrella..snowman
+ {0x2604, 0x2604, prExtendedPictographic}, // E1.0 [1] (☄️) comet
+ {0x2605, 0x2605, prExtendedPictographic}, // E0.0 [1] (★) BLACK STAR
+ {0x2607, 0x260D, prExtendedPictographic}, // E0.0 [7] (☇..☍) LIGHTNING..OPPOSITION
+ {0x260E, 0x260E, prExtendedPictographic}, // E0.6 [1] (☎️) telephone
+ {0x260F, 0x2610, prExtendedPictographic}, // E0.0 [2] (☏..☐) WHITE TELEPHONE..BALLOT BOX
+ {0x2611, 0x2611, prExtendedPictographic}, // E0.6 [1] (☑️) check box with check
+ {0x2612, 0x2612, prExtendedPictographic}, // E0.0 [1] (☒) BALLOT BOX WITH X
+ {0x2614, 0x2615, prExtendedPictographic}, // E0.6 [2] (☔..☕) umbrella with rain drops..hot beverage
+ {0x2616, 0x2617, prExtendedPictographic}, // E0.0 [2] (☖..☗) WHITE SHOGI PIECE..BLACK SHOGI PIECE
+ {0x2618, 0x2618, prExtendedPictographic}, // E1.0 [1] (☘️) shamrock
+ {0x2619, 0x261C, prExtendedPictographic}, // E0.0 [4] (☙..☜) REVERSED ROTATED FLORAL HEART BULLET..WHITE LEFT POINTING INDEX
+ {0x261D, 0x261D, prExtendedPictographic}, // E0.6 [1] (☝️) index pointing up
+ {0x261E, 0x261F, prExtendedPictographic}, // E0.0 [2] (☞..☟) WHITE RIGHT POINTING INDEX..WHITE DOWN POINTING INDEX
+ {0x2620, 0x2620, prExtendedPictographic}, // E1.0 [1] (☠️) skull and crossbones
+ {0x2621, 0x2621, prExtendedPictographic}, // E0.0 [1] (☡) CAUTION SIGN
+ {0x2622, 0x2623, prExtendedPictographic}, // E1.0 [2] (☢️..☣️) radioactive..biohazard
+ {0x2624, 0x2625, prExtendedPictographic}, // E0.0 [2] (☤..☥) CADUCEUS..ANKH
+ {0x2626, 0x2626, prExtendedPictographic}, // E1.0 [1] (☦️) orthodox cross
+ {0x2627, 0x2629, prExtendedPictographic}, // E0.0 [3] (☧..☩) CHI RHO..CROSS OF JERUSALEM
+ {0x262A, 0x262A, prExtendedPictographic}, // E0.7 [1] (☪️) star and crescent
+ {0x262B, 0x262D, prExtendedPictographic}, // E0.0 [3] (☫..☭) FARSI SYMBOL..HAMMER AND SICKLE
+ {0x262E, 0x262E, prExtendedPictographic}, // E1.0 [1] (☮️) peace symbol
+ {0x262F, 0x262F, prExtendedPictographic}, // E0.7 [1] (☯️) yin yang
+ {0x2630, 0x2637, prExtendedPictographic}, // E0.0 [8] (☰..☷) TRIGRAM FOR HEAVEN..TRIGRAM FOR EARTH
+ {0x2638, 0x2639, prExtendedPictographic}, // E0.7 [2] (☸️..☹️) wheel of dharma..frowning face
+ {0x263A, 0x263A, prExtendedPictographic}, // E0.6 [1] (☺️) smiling face
+ {0x263B, 0x263F, prExtendedPictographic}, // E0.0 [5] (☻..☿) BLACK SMILING FACE..MERCURY
+ {0x2640, 0x2640, prExtendedPictographic}, // E4.0 [1] (♀️) female sign
+ {0x2641, 0x2641, prExtendedPictographic}, // E0.0 [1] (♁) EARTH
+ {0x2642, 0x2642, prExtendedPictographic}, // E4.0 [1] (♂️) male sign
+ {0x2643, 0x2647, prExtendedPictographic}, // E0.0 [5] (♃..♇) JUPITER..PLUTO
+ {0x2648, 0x2653, prExtendedPictographic}, // E0.6 [12] (♈..♓) Aries..Pisces
+ {0x2654, 0x265E, prExtendedPictographic}, // E0.0 [11] (♔..♞) WHITE CHESS KING..BLACK CHESS KNIGHT
+ {0x265F, 0x265F, prExtendedPictographic}, // E11.0 [1] (♟️) chess pawn
+ {0x2660, 0x2660, prExtendedPictographic}, // E0.6 [1] (♠️) spade suit
+ {0x2661, 0x2662, prExtendedPictographic}, // E0.0 [2] (♡..♢) WHITE HEART SUIT..WHITE DIAMOND SUIT
+ {0x2663, 0x2663, prExtendedPictographic}, // E0.6 [1] (♣️) club suit
+ {0x2664, 0x2664, prExtendedPictographic}, // E0.0 [1] (♤) WHITE SPADE SUIT
+ {0x2665, 0x2666, prExtendedPictographic}, // E0.6 [2] (♥️..♦️) heart suit..diamond suit
+ {0x2667, 0x2667, prExtendedPictographic}, // E0.0 [1] (♧) WHITE CLUB SUIT
+ {0x2668, 0x2668, prExtendedPictographic}, // E0.6 [1] (♨️) hot springs
+ {0x2669, 0x267A, prExtendedPictographic}, // E0.0 [18] (♩..♺) QUARTER NOTE..RECYCLING SYMBOL FOR GENERIC MATERIALS
+ {0x267B, 0x267B, prExtendedPictographic}, // E0.6 [1] (♻️) recycling symbol
+ {0x267C, 0x267D, prExtendedPictographic}, // E0.0 [2] (♼..♽) RECYCLED PAPER SYMBOL..PARTIALLY-RECYCLED PAPER SYMBOL
+ {0x267E, 0x267E, prExtendedPictographic}, // E11.0 [1] (♾️) infinity
+ {0x267F, 0x267F, prExtendedPictographic}, // E0.6 [1] (♿) wheelchair symbol
+ {0x2680, 0x2685, prExtendedPictographic}, // E0.0 [6] (⚀..⚅) DIE FACE-1..DIE FACE-6
+ {0x2690, 0x2691, prExtendedPictographic}, // E0.0 [2] (⚐..⚑) WHITE FLAG..BLACK FLAG
+ {0x2692, 0x2692, prExtendedPictographic}, // E1.0 [1] (⚒️) hammer and pick
+ {0x2693, 0x2693, prExtendedPictographic}, // E0.6 [1] (⚓) anchor
+ {0x2694, 0x2694, prExtendedPictographic}, // E1.0 [1] (⚔️) crossed swords
+ {0x2695, 0x2695, prExtendedPictographic}, // E4.0 [1] (⚕️) medical symbol
+ {0x2696, 0x2697, prExtendedPictographic}, // E1.0 [2] (⚖️..⚗️) balance scale..alembic
+ {0x2698, 0x2698, prExtendedPictographic}, // E0.0 [1] (⚘) FLOWER
+ {0x2699, 0x2699, prExtendedPictographic}, // E1.0 [1] (⚙️) gear
+ {0x269A, 0x269A, prExtendedPictographic}, // E0.0 [1] (⚚) STAFF OF HERMES
+ {0x269B, 0x269C, prExtendedPictographic}, // E1.0 [2] (⚛️..⚜️) atom symbol..fleur-de-lis
+ {0x269D, 0x269F, prExtendedPictographic}, // E0.0 [3] (⚝..⚟) OUTLINED WHITE STAR..THREE LINES CONVERGING LEFT
+ {0x26A0, 0x26A1, prExtendedPictographic}, // E0.6 [2] (⚠️..⚡) warning..high voltage
+ {0x26A2, 0x26A6, prExtendedPictographic}, // E0.0 [5] (⚢..⚦) DOUBLED FEMALE SIGN..MALE WITH STROKE SIGN
+ {0x26A7, 0x26A7, prExtendedPictographic}, // E13.0 [1] (⚧️) transgender symbol
+ {0x26A8, 0x26A9, prExtendedPictographic}, // E0.0 [2] (⚨..⚩) VERTICAL MALE WITH STROKE SIGN..HORIZONTAL MALE WITH STROKE SIGN
+ {0x26AA, 0x26AB, prExtendedPictographic}, // E0.6 [2] (⚪..⚫) white circle..black circle
+ {0x26AC, 0x26AF, prExtendedPictographic}, // E0.0 [4] (⚬..⚯) MEDIUM SMALL WHITE CIRCLE..UNMARRIED PARTNERSHIP SYMBOL
+ {0x26B0, 0x26B1, prExtendedPictographic}, // E1.0 [2] (⚰️..⚱️) coffin..funeral urn
+ {0x26B2, 0x26BC, prExtendedPictographic}, // E0.0 [11] (⚲..⚼) NEUTER..SESQUIQUADRATE
+ {0x26BD, 0x26BE, prExtendedPictographic}, // E0.6 [2] (⚽..⚾) soccer ball..baseball
+ {0x26BF, 0x26C3, prExtendedPictographic}, // E0.0 [5] (⚿..⛃) SQUARED KEY..BLACK DRAUGHTS KING
+ {0x26C4, 0x26C5, prExtendedPictographic}, // E0.6 [2] (⛄..⛅) snowman without snow..sun behind cloud
+ {0x26C6, 0x26C7, prExtendedPictographic}, // E0.0 [2] (⛆..⛇) RAIN..BLACK SNOWMAN
+ {0x26C8, 0x26C8, prExtendedPictographic}, // E0.7 [1] (⛈️) cloud with lightning and rain
+ {0x26C9, 0x26CD, prExtendedPictographic}, // E0.0 [5] (⛉..⛍) TURNED WHITE SHOGI PIECE..DISABLED CAR
+ {0x26CE, 0x26CE, prExtendedPictographic}, // E0.6 [1] (⛎) Ophiuchus
+ {0x26CF, 0x26CF, prExtendedPictographic}, // E0.7 [1] (⛏️) pick
+ {0x26D0, 0x26D0, prExtendedPictographic}, // E0.0 [1] (⛐) CAR SLIDING
+ {0x26D1, 0x26D1, prExtendedPictographic}, // E0.7 [1] (⛑️) rescue worker’s helmet
+ {0x26D2, 0x26D2, prExtendedPictographic}, // E0.0 [1] (⛒) CIRCLED CROSSING LANES
+ {0x26D3, 0x26D3, prExtendedPictographic}, // E0.7 [1] (⛓️) chains
+ {0x26D4, 0x26D4, prExtendedPictographic}, // E0.6 [1] (⛔) no entry
+ {0x26D5, 0x26E8, prExtendedPictographic}, // E0.0 [20] (⛕..⛨) ALTERNATE ONE-WAY LEFT WAY TRAFFIC..BLACK CROSS ON SHIELD
+ {0x26E9, 0x26E9, prExtendedPictographic}, // E0.7 [1] (⛩️) shinto shrine
+ {0x26EA, 0x26EA, prExtendedPictographic}, // E0.6 [1] (⛪) church
+ {0x26EB, 0x26EF, prExtendedPictographic}, // E0.0 [5] (⛫..⛯) CASTLE..MAP SYMBOL FOR LIGHTHOUSE
+ {0x26F0, 0x26F1, prExtendedPictographic}, // E0.7 [2] (⛰️..⛱️) mountain..umbrella on ground
+ {0x26F2, 0x26F3, prExtendedPictographic}, // E0.6 [2] (⛲..⛳) fountain..flag in hole
+ {0x26F4, 0x26F4, prExtendedPictographic}, // E0.7 [1] (⛴️) ferry
+ {0x26F5, 0x26F5, prExtendedPictographic}, // E0.6 [1] (⛵) sailboat
+ {0x26F6, 0x26F6, prExtendedPictographic}, // E0.0 [1] (⛶) SQUARE FOUR CORNERS
+ {0x26F7, 0x26F9, prExtendedPictographic}, // E0.7 [3] (⛷️..⛹️) skier..person bouncing ball
+ {0x26FA, 0x26FA, prExtendedPictographic}, // E0.6 [1] (⛺) tent
+ {0x26FB, 0x26FC, prExtendedPictographic}, // E0.0 [2] (⛻..⛼) JAPANESE BANK SYMBOL..HEADSTONE GRAVEYARD SYMBOL
+ {0x26FD, 0x26FD, prExtendedPictographic}, // E0.6 [1] (⛽) fuel pump
+ {0x26FE, 0x2701, prExtendedPictographic}, // E0.0 [4] (⛾..✁) CUP ON BLACK SQUARE..UPPER BLADE SCISSORS
+ {0x2702, 0x2702, prExtendedPictographic}, // E0.6 [1] (✂️) scissors
+ {0x2703, 0x2704, prExtendedPictographic}, // E0.0 [2] (✃..✄) LOWER BLADE SCISSORS..WHITE SCISSORS
+ {0x2705, 0x2705, prExtendedPictographic}, // E0.6 [1] (✅) check mark button
+ {0x2708, 0x270C, prExtendedPictographic}, // E0.6 [5] (✈️..✌️) airplane..victory hand
+ {0x270D, 0x270D, prExtendedPictographic}, // E0.7 [1] (✍️) writing hand
+ {0x270E, 0x270E, prExtendedPictographic}, // E0.0 [1] (✎) LOWER RIGHT PENCIL
+ {0x270F, 0x270F, prExtendedPictographic}, // E0.6 [1] (✏️) pencil
+ {0x2710, 0x2711, prExtendedPictographic}, // E0.0 [2] (✐..✑) UPPER RIGHT PENCIL..WHITE NIB
+ {0x2712, 0x2712, prExtendedPictographic}, // E0.6 [1] (✒️) black nib
+ {0x2714, 0x2714, prExtendedPictographic}, // E0.6 [1] (✔️) check mark
+ {0x2716, 0x2716, prExtendedPictographic}, // E0.6 [1] (✖️) multiply
+ {0x271D, 0x271D, prExtendedPictographic}, // E0.7 [1] (✝️) latin cross
+ {0x2721, 0x2721, prExtendedPictographic}, // E0.7 [1] (✡️) star of David
+ {0x2728, 0x2728, prExtendedPictographic}, // E0.6 [1] (✨) sparkles
+ {0x2733, 0x2734, prExtendedPictographic}, // E0.6 [2] (✳️..✴️) eight-spoked asterisk..eight-pointed star
+ {0x2744, 0x2744, prExtendedPictographic}, // E0.6 [1] (❄️) snowflake
+ {0x2747, 0x2747, prExtendedPictographic}, // E0.6 [1] (❇️) sparkle
+ {0x274C, 0x274C, prExtendedPictographic}, // E0.6 [1] (❌) cross mark
+ {0x274E, 0x274E, prExtendedPictographic}, // E0.6 [1] (❎) cross mark button
+ {0x2753, 0x2755, prExtendedPictographic}, // E0.6 [3] (❓..❕) red question mark..white exclamation mark
+ {0x2757, 0x2757, prExtendedPictographic}, // E0.6 [1] (❗) red exclamation mark
+ {0x2763, 0x2763, prExtendedPictographic}, // E1.0 [1] (❣️) heart exclamation
+ {0x2764, 0x2764, prExtendedPictographic}, // E0.6 [1] (❤️) red heart
+ {0x2765, 0x2767, prExtendedPictographic}, // E0.0 [3] (❥..❧) ROTATED HEAVY BLACK HEART BULLET..ROTATED FLORAL HEART BULLET
+ {0x2795, 0x2797, prExtendedPictographic}, // E0.6 [3] (➕..➗) plus..divide
+ {0x27A1, 0x27A1, prExtendedPictographic}, // E0.6 [1] (➡️) right arrow
+ {0x27B0, 0x27B0, prExtendedPictographic}, // E0.6 [1] (➰) curly loop
+ {0x27BF, 0x27BF, prExtendedPictographic}, // E1.0 [1] (➿) double curly loop
+ {0x2934, 0x2935, prExtendedPictographic}, // E0.6 [2] (⤴️..⤵️) right arrow curving up..right arrow curving down
+ {0x2B05, 0x2B07, prExtendedPictographic}, // E0.6 [3] (⬅️..⬇️) left arrow..down arrow
+ {0x2B1B, 0x2B1C, prExtendedPictographic}, // E0.6 [2] (⬛..⬜) black large square..white large square
+ {0x2B50, 0x2B50, prExtendedPictographic}, // E0.6 [1] (⭐) star
+ {0x2B55, 0x2B55, prExtendedPictographic}, // E0.6 [1] (⭕) hollow red circle
+ {0x2CEF, 0x2CF1, prExtend}, // Mn [3] COPTIC COMBINING NI ABOVE..COPTIC COMBINING SPIRITUS LENIS
+ {0x2D7F, 0x2D7F, prExtend}, // Mn TIFINAGH CONSONANT JOINER
+ {0x2DE0, 0x2DFF, prExtend}, // Mn [32] COMBINING CYRILLIC LETTER BE..COMBINING CYRILLIC LETTER IOTIFIED BIG YUS
+ {0x302A, 0x302D, prExtend}, // Mn [4] IDEOGRAPHIC LEVEL TONE MARK..IDEOGRAPHIC ENTERING TONE MARK
+ {0x302E, 0x302F, prExtend}, // Mc [2] HANGUL SINGLE DOT TONE MARK..HANGUL DOUBLE DOT TONE MARK
+ {0x3030, 0x3030, prExtendedPictographic}, // E0.6 [1] (〰️) wavy dash
+ {0x303D, 0x303D, prExtendedPictographic}, // E0.6 [1] (〽️) part alternation mark
+ {0x3099, 0x309A, prExtend}, // Mn [2] COMBINING KATAKANA-HIRAGANA VOICED SOUND MARK..COMBINING KATAKANA-HIRAGANA SEMI-VOICED SOUND MARK
+ {0x3297, 0x3297, prExtendedPictographic}, // E0.6 [1] (㊗️) Japanese “congratulations” button
+ {0x3299, 0x3299, prExtendedPictographic}, // E0.6 [1] (㊙️) Japanese “secret” button
+ {0xA66F, 0xA66F, prExtend}, // Mn COMBINING CYRILLIC VZMET
+ {0xA670, 0xA672, prExtend}, // Me [3] COMBINING CYRILLIC TEN MILLIONS SIGN..COMBINING CYRILLIC THOUSAND MILLIONS SIGN
+ {0xA674, 0xA67D, prExtend}, // Mn [10] COMBINING CYRILLIC LETTER UKRAINIAN IE..COMBINING CYRILLIC PAYEROK
+ {0xA69E, 0xA69F, prExtend}, // Mn [2] COMBINING CYRILLIC LETTER EF..COMBINING CYRILLIC LETTER IOTIFIED E
+ {0xA6F0, 0xA6F1, prExtend}, // Mn [2] BAMUM COMBINING MARK KOQNDON..BAMUM COMBINING MARK TUKWENTIS
+ {0xA802, 0xA802, prExtend}, // Mn SYLOTI NAGRI SIGN DVISVARA
+ {0xA806, 0xA806, prExtend}, // Mn SYLOTI NAGRI SIGN HASANTA
+ {0xA80B, 0xA80B, prExtend}, // Mn SYLOTI NAGRI SIGN ANUSVARA
+ {0xA823, 0xA824, prSpacingMark}, // Mc [2] SYLOTI NAGRI VOWEL SIGN A..SYLOTI NAGRI VOWEL SIGN I
+ {0xA825, 0xA826, prExtend}, // Mn [2] SYLOTI NAGRI VOWEL SIGN U..SYLOTI NAGRI VOWEL SIGN E
+ {0xA827, 0xA827, prSpacingMark}, // Mc SYLOTI NAGRI VOWEL SIGN OO
+ {0xA82C, 0xA82C, prExtend}, // Mn SYLOTI NAGRI SIGN ALTERNATE HASANTA
+ {0xA880, 0xA881, prSpacingMark}, // Mc [2] SAURASHTRA SIGN ANUSVARA..SAURASHTRA SIGN VISARGA
+ {0xA8B4, 0xA8C3, prSpacingMark}, // Mc [16] SAURASHTRA CONSONANT SIGN HAARU..SAURASHTRA VOWEL SIGN AU
+ {0xA8C4, 0xA8C5, prExtend}, // Mn [2] SAURASHTRA SIGN VIRAMA..SAURASHTRA SIGN CANDRABINDU
+ {0xA8E0, 0xA8F1, prExtend}, // Mn [18] COMBINING DEVANAGARI DIGIT ZERO..COMBINING DEVANAGARI SIGN AVAGRAHA
+ {0xA8FF, 0xA8FF, prExtend}, // Mn DEVANAGARI VOWEL SIGN AY
+ {0xA926, 0xA92D, prExtend}, // Mn [8] KAYAH LI VOWEL UE..KAYAH LI TONE CALYA PLOPHU
+ {0xA947, 0xA951, prExtend}, // Mn [11] REJANG VOWEL SIGN I..REJANG CONSONANT SIGN R
+ {0xA952, 0xA953, prSpacingMark}, // Mc [2] REJANG CONSONANT SIGN H..REJANG VIRAMA
+ {0xA960, 0xA97C, prL}, // Lo [29] HANGUL CHOSEONG TIKEUT-MIEUM..HANGUL CHOSEONG SSANGYEORINHIEUH
+ {0xA980, 0xA982, prExtend}, // Mn [3] JAVANESE SIGN PANYANGGA..JAVANESE SIGN LAYAR
+ {0xA983, 0xA983, prSpacingMark}, // Mc JAVANESE SIGN WIGNYAN
+ {0xA9B3, 0xA9B3, prExtend}, // Mn JAVANESE SIGN CECAK TELU
+ {0xA9B4, 0xA9B5, prSpacingMark}, // Mc [2] JAVANESE VOWEL SIGN TARUNG..JAVANESE VOWEL SIGN TOLONG
+ {0xA9B6, 0xA9B9, prExtend}, // Mn [4] JAVANESE VOWEL SIGN WULU..JAVANESE VOWEL SIGN SUKU MENDUT
+ {0xA9BA, 0xA9BB, prSpacingMark}, // Mc [2] JAVANESE VOWEL SIGN TALING..JAVANESE VOWEL SIGN DIRGA MURE
+ {0xA9BC, 0xA9BD, prExtend}, // Mn [2] JAVANESE VOWEL SIGN PEPET..JAVANESE CONSONANT SIGN KERET
+ {0xA9BE, 0xA9C0, prSpacingMark}, // Mc [3] JAVANESE CONSONANT SIGN PENGKAL..JAVANESE PANGKON
+ {0xA9E5, 0xA9E5, prExtend}, // Mn MYANMAR SIGN SHAN SAW
+ {0xAA29, 0xAA2E, prExtend}, // Mn [6] CHAM VOWEL SIGN AA..CHAM VOWEL SIGN OE
+ {0xAA2F, 0xAA30, prSpacingMark}, // Mc [2] CHAM VOWEL SIGN O..CHAM VOWEL SIGN AI
+ {0xAA31, 0xAA32, prExtend}, // Mn [2] CHAM VOWEL SIGN AU..CHAM VOWEL SIGN UE
+ {0xAA33, 0xAA34, prSpacingMark}, // Mc [2] CHAM CONSONANT SIGN YA..CHAM CONSONANT SIGN RA
+ {0xAA35, 0xAA36, prExtend}, // Mn [2] CHAM CONSONANT SIGN LA..CHAM CONSONANT SIGN WA
+ {0xAA43, 0xAA43, prExtend}, // Mn CHAM CONSONANT SIGN FINAL NG
+ {0xAA4C, 0xAA4C, prExtend}, // Mn CHAM CONSONANT SIGN FINAL M
+ {0xAA4D, 0xAA4D, prSpacingMark}, // Mc CHAM CONSONANT SIGN FINAL H
+ {0xAA7C, 0xAA7C, prExtend}, // Mn MYANMAR SIGN TAI LAING TONE-2
+ {0xAAB0, 0xAAB0, prExtend}, // Mn TAI VIET MAI KANG
+ {0xAAB2, 0xAAB4, prExtend}, // Mn [3] TAI VIET VOWEL I..TAI VIET VOWEL U
+ {0xAAB7, 0xAAB8, prExtend}, // Mn [2] TAI VIET MAI KHIT..TAI VIET VOWEL IA
+ {0xAABE, 0xAABF, prExtend}, // Mn [2] TAI VIET VOWEL AM..TAI VIET TONE MAI EK
+ {0xAAC1, 0xAAC1, prExtend}, // Mn TAI VIET TONE MAI THO
+ {0xAAEB, 0xAAEB, prSpacingMark}, // Mc MEETEI MAYEK VOWEL SIGN II
+ {0xAAEC, 0xAAED, prExtend}, // Mn [2] MEETEI MAYEK VOWEL SIGN UU..MEETEI MAYEK VOWEL SIGN AAI
+ {0xAAEE, 0xAAEF, prSpacingMark}, // Mc [2] MEETEI MAYEK VOWEL SIGN AU..MEETEI MAYEK VOWEL SIGN AAU
+ {0xAAF5, 0xAAF5, prSpacingMark}, // Mc MEETEI MAYEK VOWEL SIGN VISARGA
+ {0xAAF6, 0xAAF6, prExtend}, // Mn MEETEI MAYEK VIRAMA
+ {0xABE3, 0xABE4, prSpacingMark}, // Mc [2] MEETEI MAYEK VOWEL SIGN ONAP..MEETEI MAYEK VOWEL SIGN INAP
+ {0xABE5, 0xABE5, prExtend}, // Mn MEETEI MAYEK VOWEL SIGN ANAP
+ {0xABE6, 0xABE7, prSpacingMark}, // Mc [2] MEETEI MAYEK VOWEL SIGN YENAP..MEETEI MAYEK VOWEL SIGN SOUNAP
+ {0xABE8, 0xABE8, prExtend}, // Mn MEETEI MAYEK VOWEL SIGN UNAP
+ {0xABE9, 0xABEA, prSpacingMark}, // Mc [2] MEETEI MAYEK VOWEL SIGN CHEINAP..MEETEI MAYEK VOWEL SIGN NUNG
+ {0xABEC, 0xABEC, prSpacingMark}, // Mc MEETEI MAYEK LUM IYEK
+ {0xABED, 0xABED, prExtend}, // Mn MEETEI MAYEK APUN IYEK
+ {0xAC00, 0xAC00, prLV}, // Lo HANGUL SYLLABLE GA
+ {0xAC01, 0xAC1B, prLVT}, // Lo [27] HANGUL SYLLABLE GAG..HANGUL SYLLABLE GAH
+ {0xAC1C, 0xAC1C, prLV}, // Lo HANGUL SYLLABLE GAE
+ {0xAC1D, 0xAC37, prLVT}, // Lo [27] HANGUL SYLLABLE GAEG..HANGUL SYLLABLE GAEH
+ {0xAC38, 0xAC38, prLV}, // Lo HANGUL SYLLABLE GYA
+ {0xAC39, 0xAC53, prLVT}, // Lo [27] HANGUL SYLLABLE GYAG..HANGUL SYLLABLE GYAH
+ {0xAC54, 0xAC54, prLV}, // Lo HANGUL SYLLABLE GYAE
+ {0xAC55, 0xAC6F, prLVT}, // Lo [27] HANGUL SYLLABLE GYAEG..HANGUL SYLLABLE GYAEH
+ {0xAC70, 0xAC70, prLV}, // Lo HANGUL SYLLABLE GEO
+ {0xAC71, 0xAC8B, prLVT}, // Lo [27] HANGUL SYLLABLE GEOG..HANGUL SYLLABLE GEOH
+ {0xAC8C, 0xAC8C, prLV}, // Lo HANGUL SYLLABLE GE
+ {0xAC8D, 0xACA7, prLVT}, // Lo [27] HANGUL SYLLABLE GEG..HANGUL SYLLABLE GEH
+ {0xACA8, 0xACA8, prLV}, // Lo HANGUL SYLLABLE GYEO
+ {0xACA9, 0xACC3, prLVT}, // Lo [27] HANGUL SYLLABLE GYEOG..HANGUL SYLLABLE GYEOH
+ {0xACC4, 0xACC4, prLV}, // Lo HANGUL SYLLABLE GYE
+ {0xACC5, 0xACDF, prLVT}, // Lo [27] HANGUL SYLLABLE GYEG..HANGUL SYLLABLE GYEH
+ {0xACE0, 0xACE0, prLV}, // Lo HANGUL SYLLABLE GO
+ {0xACE1, 0xACFB, prLVT}, // Lo [27] HANGUL SYLLABLE GOG..HANGUL SYLLABLE GOH
+ {0xACFC, 0xACFC, prLV}, // Lo HANGUL SYLLABLE GWA
+ {0xACFD, 0xAD17, prLVT}, // Lo [27] HANGUL SYLLABLE GWAG..HANGUL SYLLABLE GWAH
+ {0xAD18, 0xAD18, prLV}, // Lo HANGUL SYLLABLE GWAE
+ {0xAD19, 0xAD33, prLVT}, // Lo [27] HANGUL SYLLABLE GWAEG..HANGUL SYLLABLE GWAEH
+ {0xAD34, 0xAD34, prLV}, // Lo HANGUL SYLLABLE GOE
+ {0xAD35, 0xAD4F, prLVT}, // Lo [27] HANGUL SYLLABLE GOEG..HANGUL SYLLABLE GOEH
+ {0xAD50, 0xAD50, prLV}, // Lo HANGUL SYLLABLE GYO
+ {0xAD51, 0xAD6B, prLVT}, // Lo [27] HANGUL SYLLABLE GYOG..HANGUL SYLLABLE GYOH
+ {0xAD6C, 0xAD6C, prLV}, // Lo HANGUL SYLLABLE GU
+ {0xAD6D, 0xAD87, prLVT}, // Lo [27] HANGUL SYLLABLE GUG..HANGUL SYLLABLE GUH
+ {0xAD88, 0xAD88, prLV}, // Lo HANGUL SYLLABLE GWEO
+ {0xAD89, 0xADA3, prLVT}, // Lo [27] HANGUL SYLLABLE GWEOG..HANGUL SYLLABLE GWEOH
+ {0xADA4, 0xADA4, prLV}, // Lo HANGUL SYLLABLE GWE
+ {0xADA5, 0xADBF, prLVT}, // Lo [27] HANGUL SYLLABLE GWEG..HANGUL SYLLABLE GWEH
+ {0xADC0, 0xADC0, prLV}, // Lo HANGUL SYLLABLE GWI
+ {0xADC1, 0xADDB, prLVT}, // Lo [27] HANGUL SYLLABLE GWIG..HANGUL SYLLABLE GWIH
+ {0xADDC, 0xADDC, prLV}, // Lo HANGUL SYLLABLE GYU
+ {0xADDD, 0xADF7, prLVT}, // Lo [27] HANGUL SYLLABLE GYUG..HANGUL SYLLABLE GYUH
+ {0xADF8, 0xADF8, prLV}, // Lo HANGUL SYLLABLE GEU
+ {0xADF9, 0xAE13, prLVT}, // Lo [27] HANGUL SYLLABLE GEUG..HANGUL SYLLABLE GEUH
+ {0xAE14, 0xAE14, prLV}, // Lo HANGUL SYLLABLE GYI
+ {0xAE15, 0xAE2F, prLVT}, // Lo [27] HANGUL SYLLABLE GYIG..HANGUL SYLLABLE GYIH
+ {0xAE30, 0xAE30, prLV}, // Lo HANGUL SYLLABLE GI
+ {0xAE31, 0xAE4B, prLVT}, // Lo [27] HANGUL SYLLABLE GIG..HANGUL SYLLABLE GIH
+ {0xAE4C, 0xAE4C, prLV}, // Lo HANGUL SYLLABLE GGA
+ {0xAE4D, 0xAE67, prLVT}, // Lo [27] HANGUL SYLLABLE GGAG..HANGUL SYLLABLE GGAH
+ {0xAE68, 0xAE68, prLV}, // Lo HANGUL SYLLABLE GGAE
+ {0xAE69, 0xAE83, prLVT}, // Lo [27] HANGUL SYLLABLE GGAEG..HANGUL SYLLABLE GGAEH
+ {0xAE84, 0xAE84, prLV}, // Lo HANGUL SYLLABLE GGYA
+ {0xAE85, 0xAE9F, prLVT}, // Lo [27] HANGUL SYLLABLE GGYAG..HANGUL SYLLABLE GGYAH
+ {0xAEA0, 0xAEA0, prLV}, // Lo HANGUL SYLLABLE GGYAE
+ {0xAEA1, 0xAEBB, prLVT}, // Lo [27] HANGUL SYLLABLE GGYAEG..HANGUL SYLLABLE GGYAEH
+ {0xAEBC, 0xAEBC, prLV}, // Lo HANGUL SYLLABLE GGEO
+ {0xAEBD, 0xAED7, prLVT}, // Lo [27] HANGUL SYLLABLE GGEOG..HANGUL SYLLABLE GGEOH
+ {0xAED8, 0xAED8, prLV}, // Lo HANGUL SYLLABLE GGE
+ {0xAED9, 0xAEF3, prLVT}, // Lo [27] HANGUL SYLLABLE GGEG..HANGUL SYLLABLE GGEH
+ {0xAEF4, 0xAEF4, prLV}, // Lo HANGUL SYLLABLE GGYEO
+ {0xAEF5, 0xAF0F, prLVT}, // Lo [27] HANGUL SYLLABLE GGYEOG..HANGUL SYLLABLE GGYEOH
+ {0xAF10, 0xAF10, prLV}, // Lo HANGUL SYLLABLE GGYE
+ {0xAF11, 0xAF2B, prLVT}, // Lo [27] HANGUL SYLLABLE GGYEG..HANGUL SYLLABLE GGYEH
+ {0xAF2C, 0xAF2C, prLV}, // Lo HANGUL SYLLABLE GGO
+ {0xAF2D, 0xAF47, prLVT}, // Lo [27] HANGUL SYLLABLE GGOG..HANGUL SYLLABLE GGOH
+ {0xAF48, 0xAF48, prLV}, // Lo HANGUL SYLLABLE GGWA
+ {0xAF49, 0xAF63, prLVT}, // Lo [27] HANGUL SYLLABLE GGWAG..HANGUL SYLLABLE GGWAH
+ {0xAF64, 0xAF64, prLV}, // Lo HANGUL SYLLABLE GGWAE
+ {0xAF65, 0xAF7F, prLVT}, // Lo [27] HANGUL SYLLABLE GGWAEG..HANGUL SYLLABLE GGWAEH
+ {0xAF80, 0xAF80, prLV}, // Lo HANGUL SYLLABLE GGOE
+ {0xAF81, 0xAF9B, prLVT}, // Lo [27] HANGUL SYLLABLE GGOEG..HANGUL SYLLABLE GGOEH
+ {0xAF9C, 0xAF9C, prLV}, // Lo HANGUL SYLLABLE GGYO
+ {0xAF9D, 0xAFB7, prLVT}, // Lo [27] HANGUL SYLLABLE GGYOG..HANGUL SYLLABLE GGYOH
+ {0xAFB8, 0xAFB8, prLV}, // Lo HANGUL SYLLABLE GGU
+ {0xAFB9, 0xAFD3, prLVT}, // Lo [27] HANGUL SYLLABLE GGUG..HANGUL SYLLABLE GGUH
+ {0xAFD4, 0xAFD4, prLV}, // Lo HANGUL SYLLABLE GGWEO
+ {0xAFD5, 0xAFEF, prLVT}, // Lo [27] HANGUL SYLLABLE GGWEOG..HANGUL SYLLABLE GGWEOH
+ {0xAFF0, 0xAFF0, prLV}, // Lo HANGUL SYLLABLE GGWE
+ {0xAFF1, 0xB00B, prLVT}, // Lo [27] HANGUL SYLLABLE GGWEG..HANGUL SYLLABLE GGWEH
+ {0xB00C, 0xB00C, prLV}, // Lo HANGUL SYLLABLE GGWI
+ {0xB00D, 0xB027, prLVT}, // Lo [27] HANGUL SYLLABLE GGWIG..HANGUL SYLLABLE GGWIH
+ {0xB028, 0xB028, prLV}, // Lo HANGUL SYLLABLE GGYU
+ {0xB029, 0xB043, prLVT}, // Lo [27] HANGUL SYLLABLE GGYUG..HANGUL SYLLABLE GGYUH
+ {0xB044, 0xB044, prLV}, // Lo HANGUL SYLLABLE GGEU
+ {0xB045, 0xB05F, prLVT}, // Lo [27] HANGUL SYLLABLE GGEUG..HANGUL SYLLABLE GGEUH
+ {0xB060, 0xB060, prLV}, // Lo HANGUL SYLLABLE GGYI
+ {0xB061, 0xB07B, prLVT}, // Lo [27] HANGUL SYLLABLE GGYIG..HANGUL SYLLABLE GGYIH
+ {0xB07C, 0xB07C, prLV}, // Lo HANGUL SYLLABLE GGI
+ {0xB07D, 0xB097, prLVT}, // Lo [27] HANGUL SYLLABLE GGIG..HANGUL SYLLABLE GGIH
+ {0xB098, 0xB098, prLV}, // Lo HANGUL SYLLABLE NA
+ {0xB099, 0xB0B3, prLVT}, // Lo [27] HANGUL SYLLABLE NAG..HANGUL SYLLABLE NAH
+ {0xB0B4, 0xB0B4, prLV}, // Lo HANGUL SYLLABLE NAE
+ {0xB0B5, 0xB0CF, prLVT}, // Lo [27] HANGUL SYLLABLE NAEG..HANGUL SYLLABLE NAEH
+ {0xB0D0, 0xB0D0, prLV}, // Lo HANGUL SYLLABLE NYA
+ {0xB0D1, 0xB0EB, prLVT}, // Lo [27] HANGUL SYLLABLE NYAG..HANGUL SYLLABLE NYAH
+ {0xB0EC, 0xB0EC, prLV}, // Lo HANGUL SYLLABLE NYAE
+ {0xB0ED, 0xB107, prLVT}, // Lo [27] HANGUL SYLLABLE NYAEG..HANGUL SYLLABLE NYAEH
+ {0xB108, 0xB108, prLV}, // Lo HANGUL SYLLABLE NEO
+ {0xB109, 0xB123, prLVT}, // Lo [27] HANGUL SYLLABLE NEOG..HANGUL SYLLABLE NEOH
+ {0xB124, 0xB124, prLV}, // Lo HANGUL SYLLABLE NE
+ {0xB125, 0xB13F, prLVT}, // Lo [27] HANGUL SYLLABLE NEG..HANGUL SYLLABLE NEH
+ {0xB140, 0xB140, prLV}, // Lo HANGUL SYLLABLE NYEO
+ {0xB141, 0xB15B, prLVT}, // Lo [27] HANGUL SYLLABLE NYEOG..HANGUL SYLLABLE NYEOH
+ {0xB15C, 0xB15C, prLV}, // Lo HANGUL SYLLABLE NYE
+ {0xB15D, 0xB177, prLVT}, // Lo [27] HANGUL SYLLABLE NYEG..HANGUL SYLLABLE NYEH
+ {0xB178, 0xB178, prLV}, // Lo HANGUL SYLLABLE NO
+ {0xB179, 0xB193, prLVT}, // Lo [27] HANGUL SYLLABLE NOG..HANGUL SYLLABLE NOH
+ {0xB194, 0xB194, prLV}, // Lo HANGUL SYLLABLE NWA
+ {0xB195, 0xB1AF, prLVT}, // Lo [27] HANGUL SYLLABLE NWAG..HANGUL SYLLABLE NWAH
+ {0xB1B0, 0xB1B0, prLV}, // Lo HANGUL SYLLABLE NWAE
+ {0xB1B1, 0xB1CB, prLVT}, // Lo [27] HANGUL SYLLABLE NWAEG..HANGUL SYLLABLE NWAEH
+ {0xB1CC, 0xB1CC, prLV}, // Lo HANGUL SYLLABLE NOE
+ {0xB1CD, 0xB1E7, prLVT}, // Lo [27] HANGUL SYLLABLE NOEG..HANGUL SYLLABLE NOEH
+ {0xB1E8, 0xB1E8, prLV}, // Lo HANGUL SYLLABLE NYO
+ {0xB1E9, 0xB203, prLVT}, // Lo [27] HANGUL SYLLABLE NYOG..HANGUL SYLLABLE NYOH
+ {0xB204, 0xB204, prLV}, // Lo HANGUL SYLLABLE NU
+ {0xB205, 0xB21F, prLVT}, // Lo [27] HANGUL SYLLABLE NUG..HANGUL SYLLABLE NUH
+ {0xB220, 0xB220, prLV}, // Lo HANGUL SYLLABLE NWEO
+ {0xB221, 0xB23B, prLVT}, // Lo [27] HANGUL SYLLABLE NWEOG..HANGUL SYLLABLE NWEOH
+ {0xB23C, 0xB23C, prLV}, // Lo HANGUL SYLLABLE NWE
+ {0xB23D, 0xB257, prLVT}, // Lo [27] HANGUL SYLLABLE NWEG..HANGUL SYLLABLE NWEH
+ {0xB258, 0xB258, prLV}, // Lo HANGUL SYLLABLE NWI
+ {0xB259, 0xB273, prLVT}, // Lo [27] HANGUL SYLLABLE NWIG..HANGUL SYLLABLE NWIH
+ {0xB274, 0xB274, prLV}, // Lo HANGUL SYLLABLE NYU
+ {0xB275, 0xB28F, prLVT}, // Lo [27] HANGUL SYLLABLE NYUG..HANGUL SYLLABLE NYUH
+ {0xB290, 0xB290, prLV}, // Lo HANGUL SYLLABLE NEU
+ {0xB291, 0xB2AB, prLVT}, // Lo [27] HANGUL SYLLABLE NEUG..HANGUL SYLLABLE NEUH
+ {0xB2AC, 0xB2AC, prLV}, // Lo HANGUL SYLLABLE NYI
+ {0xB2AD, 0xB2C7, prLVT}, // Lo [27] HANGUL SYLLABLE NYIG..HANGUL SYLLABLE NYIH
+ {0xB2C8, 0xB2C8, prLV}, // Lo HANGUL SYLLABLE NI
+ {0xB2C9, 0xB2E3, prLVT}, // Lo [27] HANGUL SYLLABLE NIG..HANGUL SYLLABLE NIH
+ {0xB2E4, 0xB2E4, prLV}, // Lo HANGUL SYLLABLE DA
+ {0xB2E5, 0xB2FF, prLVT}, // Lo [27] HANGUL SYLLABLE DAG..HANGUL SYLLABLE DAH
+ {0xB300, 0xB300, prLV}, // Lo HANGUL SYLLABLE DAE
+ {0xB301, 0xB31B, prLVT}, // Lo [27] HANGUL SYLLABLE DAEG..HANGUL SYLLABLE DAEH
+ {0xB31C, 0xB31C, prLV}, // Lo HANGUL SYLLABLE DYA
+ {0xB31D, 0xB337, prLVT}, // Lo [27] HANGUL SYLLABLE DYAG..HANGUL SYLLABLE DYAH
+ {0xB338, 0xB338, prLV}, // Lo HANGUL SYLLABLE DYAE
+ {0xB339, 0xB353, prLVT}, // Lo [27] HANGUL SYLLABLE DYAEG..HANGUL SYLLABLE DYAEH
+ {0xB354, 0xB354, prLV}, // Lo HANGUL SYLLABLE DEO
+ {0xB355, 0xB36F, prLVT}, // Lo [27] HANGUL SYLLABLE DEOG..HANGUL SYLLABLE DEOH
+ {0xB370, 0xB370, prLV}, // Lo HANGUL SYLLABLE DE
+ {0xB371, 0xB38B, prLVT}, // Lo [27] HANGUL SYLLABLE DEG..HANGUL SYLLABLE DEH
+ {0xB38C, 0xB38C, prLV}, // Lo HANGUL SYLLABLE DYEO
+ {0xB38D, 0xB3A7, prLVT}, // Lo [27] HANGUL SYLLABLE DYEOG..HANGUL SYLLABLE DYEOH
+ {0xB3A8, 0xB3A8, prLV}, // Lo HANGUL SYLLABLE DYE
+ {0xB3A9, 0xB3C3, prLVT}, // Lo [27] HANGUL SYLLABLE DYEG..HANGUL SYLLABLE DYEH
+ {0xB3C4, 0xB3C4, prLV}, // Lo HANGUL SYLLABLE DO
+ {0xB3C5, 0xB3DF, prLVT}, // Lo [27] HANGUL SYLLABLE DOG..HANGUL SYLLABLE DOH
+ {0xB3E0, 0xB3E0, prLV}, // Lo HANGUL SYLLABLE DWA
+ {0xB3E1, 0xB3FB, prLVT}, // Lo [27] HANGUL SYLLABLE DWAG..HANGUL SYLLABLE DWAH
+ {0xB3FC, 0xB3FC, prLV}, // Lo HANGUL SYLLABLE DWAE
+ {0xB3FD, 0xB417, prLVT}, // Lo [27] HANGUL SYLLABLE DWAEG..HANGUL SYLLABLE DWAEH
+ {0xB418, 0xB418, prLV}, // Lo HANGUL SYLLABLE DOE
+ {0xB419, 0xB433, prLVT}, // Lo [27] HANGUL SYLLABLE DOEG..HANGUL SYLLABLE DOEH
+ {0xB434, 0xB434, prLV}, // Lo HANGUL SYLLABLE DYO
+ {0xB435, 0xB44F, prLVT}, // Lo [27] HANGUL SYLLABLE DYOG..HANGUL SYLLABLE DYOH
+ {0xB450, 0xB450, prLV}, // Lo HANGUL SYLLABLE DU
+ {0xB451, 0xB46B, prLVT}, // Lo [27] HANGUL SYLLABLE DUG..HANGUL SYLLABLE DUH
+ {0xB46C, 0xB46C, prLV}, // Lo HANGUL SYLLABLE DWEO
+ {0xB46D, 0xB487, prLVT}, // Lo [27] HANGUL SYLLABLE DWEOG..HANGUL SYLLABLE DWEOH
+ {0xB488, 0xB488, prLV}, // Lo HANGUL SYLLABLE DWE
+ {0xB489, 0xB4A3, prLVT}, // Lo [27] HANGUL SYLLABLE DWEG..HANGUL SYLLABLE DWEH
+ {0xB4A4, 0xB4A4, prLV}, // Lo HANGUL SYLLABLE DWI
+ {0xB4A5, 0xB4BF, prLVT}, // Lo [27] HANGUL SYLLABLE DWIG..HANGUL SYLLABLE DWIH
+ {0xB4C0, 0xB4C0, prLV}, // Lo HANGUL SYLLABLE DYU
+ {0xB4C1, 0xB4DB, prLVT}, // Lo [27] HANGUL SYLLABLE DYUG..HANGUL SYLLABLE DYUH
+ {0xB4DC, 0xB4DC, prLV}, // Lo HANGUL SYLLABLE DEU
+ {0xB4DD, 0xB4F7, prLVT}, // Lo [27] HANGUL SYLLABLE DEUG..HANGUL SYLLABLE DEUH
+ {0xB4F8, 0xB4F8, prLV}, // Lo HANGUL SYLLABLE DYI
+ {0xB4F9, 0xB513, prLVT}, // Lo [27] HANGUL SYLLABLE DYIG..HANGUL SYLLABLE DYIH
+ {0xB514, 0xB514, prLV}, // Lo HANGUL SYLLABLE DI
+ {0xB515, 0xB52F, prLVT}, // Lo [27] HANGUL SYLLABLE DIG..HANGUL SYLLABLE DIH
+ {0xB530, 0xB530, prLV}, // Lo HANGUL SYLLABLE DDA
+ {0xB531, 0xB54B, prLVT}, // Lo [27] HANGUL SYLLABLE DDAG..HANGUL SYLLABLE DDAH
+ {0xB54C, 0xB54C, prLV}, // Lo HANGUL SYLLABLE DDAE
+ {0xB54D, 0xB567, prLVT}, // Lo [27] HANGUL SYLLABLE DDAEG..HANGUL SYLLABLE DDAEH
+ {0xB568, 0xB568, prLV}, // Lo HANGUL SYLLABLE DDYA
+ {0xB569, 0xB583, prLVT}, // Lo [27] HANGUL SYLLABLE DDYAG..HANGUL SYLLABLE DDYAH
+ {0xB584, 0xB584, prLV}, // Lo HANGUL SYLLABLE DDYAE
+ {0xB585, 0xB59F, prLVT}, // Lo [27] HANGUL SYLLABLE DDYAEG..HANGUL SYLLABLE DDYAEH
+ {0xB5A0, 0xB5A0, prLV}, // Lo HANGUL SYLLABLE DDEO
+ {0xB5A1, 0xB5BB, prLVT}, // Lo [27] HANGUL SYLLABLE DDEOG..HANGUL SYLLABLE DDEOH
+ {0xB5BC, 0xB5BC, prLV}, // Lo HANGUL SYLLABLE DDE
+ {0xB5BD, 0xB5D7, prLVT}, // Lo [27] HANGUL SYLLABLE DDEG..HANGUL SYLLABLE DDEH
+ {0xB5D8, 0xB5D8, prLV}, // Lo HANGUL SYLLABLE DDYEO
+ {0xB5D9, 0xB5F3, prLVT}, // Lo [27] HANGUL SYLLABLE DDYEOG..HANGUL SYLLABLE DDYEOH
+ {0xB5F4, 0xB5F4, prLV}, // Lo HANGUL SYLLABLE DDYE
+ {0xB5F5, 0xB60F, prLVT}, // Lo [27] HANGUL SYLLABLE DDYEG..HANGUL SYLLABLE DDYEH
+ {0xB610, 0xB610, prLV}, // Lo HANGUL SYLLABLE DDO
+ {0xB611, 0xB62B, prLVT}, // Lo [27] HANGUL SYLLABLE DDOG..HANGUL SYLLABLE DDOH
+ {0xB62C, 0xB62C, prLV}, // Lo HANGUL SYLLABLE DDWA
+ {0xB62D, 0xB647, prLVT}, // Lo [27] HANGUL SYLLABLE DDWAG..HANGUL SYLLABLE DDWAH
+ {0xB648, 0xB648, prLV}, // Lo HANGUL SYLLABLE DDWAE
+ {0xB649, 0xB663, prLVT}, // Lo [27] HANGUL SYLLABLE DDWAEG..HANGUL SYLLABLE DDWAEH
+ {0xB664, 0xB664, prLV}, // Lo HANGUL SYLLABLE DDOE
+ {0xB665, 0xB67F, prLVT}, // Lo [27] HANGUL SYLLABLE DDOEG..HANGUL SYLLABLE DDOEH
+ {0xB680, 0xB680, prLV}, // Lo HANGUL SYLLABLE DDYO
+ {0xB681, 0xB69B, prLVT}, // Lo [27] HANGUL SYLLABLE DDYOG..HANGUL SYLLABLE DDYOH
+ {0xB69C, 0xB69C, prLV}, // Lo HANGUL SYLLABLE DDU
+ {0xB69D, 0xB6B7, prLVT}, // Lo [27] HANGUL SYLLABLE DDUG..HANGUL SYLLABLE DDUH
+ {0xB6B8, 0xB6B8, prLV}, // Lo HANGUL SYLLABLE DDWEO
+ {0xB6B9, 0xB6D3, prLVT}, // Lo [27] HANGUL SYLLABLE DDWEOG..HANGUL SYLLABLE DDWEOH
+ {0xB6D4, 0xB6D4, prLV}, // Lo HANGUL SYLLABLE DDWE
+ {0xB6D5, 0xB6EF, prLVT}, // Lo [27] HANGUL SYLLABLE DDWEG..HANGUL SYLLABLE DDWEH
+ {0xB6F0, 0xB6F0, prLV}, // Lo HANGUL SYLLABLE DDWI
+ {0xB6F1, 0xB70B, prLVT}, // Lo [27] HANGUL SYLLABLE DDWIG..HANGUL SYLLABLE DDWIH
+ {0xB70C, 0xB70C, prLV}, // Lo HANGUL SYLLABLE DDYU
+ {0xB70D, 0xB727, prLVT}, // Lo [27] HANGUL SYLLABLE DDYUG..HANGUL SYLLABLE DDYUH
+ {0xB728, 0xB728, prLV}, // Lo HANGUL SYLLABLE DDEU
+ {0xB729, 0xB743, prLVT}, // Lo [27] HANGUL SYLLABLE DDEUG..HANGUL SYLLABLE DDEUH
+ {0xB744, 0xB744, prLV}, // Lo HANGUL SYLLABLE DDYI
+ {0xB745, 0xB75F, prLVT}, // Lo [27] HANGUL SYLLABLE DDYIG..HANGUL SYLLABLE DDYIH
+ {0xB760, 0xB760, prLV}, // Lo HANGUL SYLLABLE DDI
+ {0xB761, 0xB77B, prLVT}, // Lo [27] HANGUL SYLLABLE DDIG..HANGUL SYLLABLE DDIH
+ {0xB77C, 0xB77C, prLV}, // Lo HANGUL SYLLABLE RA
+ {0xB77D, 0xB797, prLVT}, // Lo [27] HANGUL SYLLABLE RAG..HANGUL SYLLABLE RAH
+ {0xB798, 0xB798, prLV}, // Lo HANGUL SYLLABLE RAE
+ {0xB799, 0xB7B3, prLVT}, // Lo [27] HANGUL SYLLABLE RAEG..HANGUL SYLLABLE RAEH
+ {0xB7B4, 0xB7B4, prLV}, // Lo HANGUL SYLLABLE RYA
+ {0xB7B5, 0xB7CF, prLVT}, // Lo [27] HANGUL SYLLABLE RYAG..HANGUL SYLLABLE RYAH
+ {0xB7D0, 0xB7D0, prLV}, // Lo HANGUL SYLLABLE RYAE
+ {0xB7D1, 0xB7EB, prLVT}, // Lo [27] HANGUL SYLLABLE RYAEG..HANGUL SYLLABLE RYAEH
+ {0xB7EC, 0xB7EC, prLV}, // Lo HANGUL SYLLABLE REO
+ {0xB7ED, 0xB807, prLVT}, // Lo [27] HANGUL SYLLABLE REOG..HANGUL SYLLABLE REOH
+ {0xB808, 0xB808, prLV}, // Lo HANGUL SYLLABLE RE
+ {0xB809, 0xB823, prLVT}, // Lo [27] HANGUL SYLLABLE REG..HANGUL SYLLABLE REH
+ {0xB824, 0xB824, prLV}, // Lo HANGUL SYLLABLE RYEO
+ {0xB825, 0xB83F, prLVT}, // Lo [27] HANGUL SYLLABLE RYEOG..HANGUL SYLLABLE RYEOH
+ {0xB840, 0xB840, prLV}, // Lo HANGUL SYLLABLE RYE
+ {0xB841, 0xB85B, prLVT}, // Lo [27] HANGUL SYLLABLE RYEG..HANGUL SYLLABLE RYEH
+ {0xB85C, 0xB85C, prLV}, // Lo HANGUL SYLLABLE RO
+ {0xB85D, 0xB877, prLVT}, // Lo [27] HANGUL SYLLABLE ROG..HANGUL SYLLABLE ROH
+ {0xB878, 0xB878, prLV}, // Lo HANGUL SYLLABLE RWA
+ {0xB879, 0xB893, prLVT}, // Lo [27] HANGUL SYLLABLE RWAG..HANGUL SYLLABLE RWAH
+ {0xB894, 0xB894, prLV}, // Lo HANGUL SYLLABLE RWAE
+ {0xB895, 0xB8AF, prLVT}, // Lo [27] HANGUL SYLLABLE RWAEG..HANGUL SYLLABLE RWAEH
+ {0xB8B0, 0xB8B0, prLV}, // Lo HANGUL SYLLABLE ROE
+ {0xB8B1, 0xB8CB, prLVT}, // Lo [27] HANGUL SYLLABLE ROEG..HANGUL SYLLABLE ROEH
+ {0xB8CC, 0xB8CC, prLV}, // Lo HANGUL SYLLABLE RYO
+ {0xB8CD, 0xB8E7, prLVT}, // Lo [27] HANGUL SYLLABLE RYOG..HANGUL SYLLABLE RYOH
+ {0xB8E8, 0xB8E8, prLV}, // Lo HANGUL SYLLABLE RU
+ {0xB8E9, 0xB903, prLVT}, // Lo [27] HANGUL SYLLABLE RUG..HANGUL SYLLABLE RUH
+ {0xB904, 0xB904, prLV}, // Lo HANGUL SYLLABLE RWEO
+ {0xB905, 0xB91F, prLVT}, // Lo [27] HANGUL SYLLABLE RWEOG..HANGUL SYLLABLE RWEOH
+ {0xB920, 0xB920, prLV}, // Lo HANGUL SYLLABLE RWE
+ {0xB921, 0xB93B, prLVT}, // Lo [27] HANGUL SYLLABLE RWEG..HANGUL SYLLABLE RWEH
+ {0xB93C, 0xB93C, prLV}, // Lo HANGUL SYLLABLE RWI
+ {0xB93D, 0xB957, prLVT}, // Lo [27] HANGUL SYLLABLE RWIG..HANGUL SYLLABLE RWIH
+ {0xB958, 0xB958, prLV}, // Lo HANGUL SYLLABLE RYU
+ {0xB959, 0xB973, prLVT}, // Lo [27] HANGUL SYLLABLE RYUG..HANGUL SYLLABLE RYUH
+ {0xB974, 0xB974, prLV}, // Lo HANGUL SYLLABLE REU
+ {0xB975, 0xB98F, prLVT}, // Lo [27] HANGUL SYLLABLE REUG..HANGUL SYLLABLE REUH
+ {0xB990, 0xB990, prLV}, // Lo HANGUL SYLLABLE RYI
+ {0xB991, 0xB9AB, prLVT}, // Lo [27] HANGUL SYLLABLE RYIG..HANGUL SYLLABLE RYIH
+ {0xB9AC, 0xB9AC, prLV}, // Lo HANGUL SYLLABLE RI
+ {0xB9AD, 0xB9C7, prLVT}, // Lo [27] HANGUL SYLLABLE RIG..HANGUL SYLLABLE RIH
+ {0xB9C8, 0xB9C8, prLV}, // Lo HANGUL SYLLABLE MA
+ {0xB9C9, 0xB9E3, prLVT}, // Lo [27] HANGUL SYLLABLE MAG..HANGUL SYLLABLE MAH
+ {0xB9E4, 0xB9E4, prLV}, // Lo HANGUL SYLLABLE MAE
+ {0xB9E5, 0xB9FF, prLVT}, // Lo [27] HANGUL SYLLABLE MAEG..HANGUL SYLLABLE MAEH
+ {0xBA00, 0xBA00, prLV}, // Lo HANGUL SYLLABLE MYA
+ {0xBA01, 0xBA1B, prLVT}, // Lo [27] HANGUL SYLLABLE MYAG..HANGUL SYLLABLE MYAH
+ {0xBA1C, 0xBA1C, prLV}, // Lo HANGUL SYLLABLE MYAE
+ {0xBA1D, 0xBA37, prLVT}, // Lo [27] HANGUL SYLLABLE MYAEG..HANGUL SYLLABLE MYAEH
+ {0xBA38, 0xBA38, prLV}, // Lo HANGUL SYLLABLE MEO
+ {0xBA39, 0xBA53, prLVT}, // Lo [27] HANGUL SYLLABLE MEOG..HANGUL SYLLABLE MEOH
+ {0xBA54, 0xBA54, prLV}, // Lo HANGUL SYLLABLE ME
+ {0xBA55, 0xBA6F, prLVT}, // Lo [27] HANGUL SYLLABLE MEG..HANGUL SYLLABLE MEH
+ {0xBA70, 0xBA70, prLV}, // Lo HANGUL SYLLABLE MYEO
+ {0xBA71, 0xBA8B, prLVT}, // Lo [27] HANGUL SYLLABLE MYEOG..HANGUL SYLLABLE MYEOH
+ {0xBA8C, 0xBA8C, prLV}, // Lo HANGUL SYLLABLE MYE
+ {0xBA8D, 0xBAA7, prLVT}, // Lo [27] HANGUL SYLLABLE MYEG..HANGUL SYLLABLE MYEH
+ {0xBAA8, 0xBAA8, prLV}, // Lo HANGUL SYLLABLE MO
+ {0xBAA9, 0xBAC3, prLVT}, // Lo [27] HANGUL SYLLABLE MOG..HANGUL SYLLABLE MOH
+ {0xBAC4, 0xBAC4, prLV}, // Lo HANGUL SYLLABLE MWA
+ {0xBAC5, 0xBADF, prLVT}, // Lo [27] HANGUL SYLLABLE MWAG..HANGUL SYLLABLE MWAH
+ {0xBAE0, 0xBAE0, prLV}, // Lo HANGUL SYLLABLE MWAE
+ {0xBAE1, 0xBAFB, prLVT}, // Lo [27] HANGUL SYLLABLE MWAEG..HANGUL SYLLABLE MWAEH
+ {0xBAFC, 0xBAFC, prLV}, // Lo HANGUL SYLLABLE MOE
+ {0xBAFD, 0xBB17, prLVT}, // Lo [27] HANGUL SYLLABLE MOEG..HANGUL SYLLABLE MOEH
+ {0xBB18, 0xBB18, prLV}, // Lo HANGUL SYLLABLE MYO
+ {0xBB19, 0xBB33, prLVT}, // Lo [27] HANGUL SYLLABLE MYOG..HANGUL SYLLABLE MYOH
+ {0xBB34, 0xBB34, prLV}, // Lo HANGUL SYLLABLE MU
+ {0xBB35, 0xBB4F, prLVT}, // Lo [27] HANGUL SYLLABLE MUG..HANGUL SYLLABLE MUH
+ {0xBB50, 0xBB50, prLV}, // Lo HANGUL SYLLABLE MWEO
+ {0xBB51, 0xBB6B, prLVT}, // Lo [27] HANGUL SYLLABLE MWEOG..HANGUL SYLLABLE MWEOH
+ {0xBB6C, 0xBB6C, prLV}, // Lo HANGUL SYLLABLE MWE
+ {0xBB6D, 0xBB87, prLVT}, // Lo [27] HANGUL SYLLABLE MWEG..HANGUL SYLLABLE MWEH
+ {0xBB88, 0xBB88, prLV}, // Lo HANGUL SYLLABLE MWI
+ {0xBB89, 0xBBA3, prLVT}, // Lo [27] HANGUL SYLLABLE MWIG..HANGUL SYLLABLE MWIH
+ {0xBBA4, 0xBBA4, prLV}, // Lo HANGUL SYLLABLE MYU
+ {0xBBA5, 0xBBBF, prLVT}, // Lo [27] HANGUL SYLLABLE MYUG..HANGUL SYLLABLE MYUH
+ {0xBBC0, 0xBBC0, prLV}, // Lo HANGUL SYLLABLE MEU
+ {0xBBC1, 0xBBDB, prLVT}, // Lo [27] HANGUL SYLLABLE MEUG..HANGUL SYLLABLE MEUH
+ {0xBBDC, 0xBBDC, prLV}, // Lo HANGUL SYLLABLE MYI
+ {0xBBDD, 0xBBF7, prLVT}, // Lo [27] HANGUL SYLLABLE MYIG..HANGUL SYLLABLE MYIH
+ {0xBBF8, 0xBBF8, prLV}, // Lo HANGUL SYLLABLE MI
+ {0xBBF9, 0xBC13, prLVT}, // Lo [27] HANGUL SYLLABLE MIG..HANGUL SYLLABLE MIH
+ {0xBC14, 0xBC14, prLV}, // Lo HANGUL SYLLABLE BA
+ {0xBC15, 0xBC2F, prLVT}, // Lo [27] HANGUL SYLLABLE BAG..HANGUL SYLLABLE BAH
+ {0xBC30, 0xBC30, prLV}, // Lo HANGUL SYLLABLE BAE
+ {0xBC31, 0xBC4B, prLVT}, // Lo [27] HANGUL SYLLABLE BAEG..HANGUL SYLLABLE BAEH
+ {0xBC4C, 0xBC4C, prLV}, // Lo HANGUL SYLLABLE BYA
+ {0xBC4D, 0xBC67, prLVT}, // Lo [27] HANGUL SYLLABLE BYAG..HANGUL SYLLABLE BYAH
+ {0xBC68, 0xBC68, prLV}, // Lo HANGUL SYLLABLE BYAE
+ {0xBC69, 0xBC83, prLVT}, // Lo [27] HANGUL SYLLABLE BYAEG..HANGUL SYLLABLE BYAEH
+ {0xBC84, 0xBC84, prLV}, // Lo HANGUL SYLLABLE BEO
+ {0xBC85, 0xBC9F, prLVT}, // Lo [27] HANGUL SYLLABLE BEOG..HANGUL SYLLABLE BEOH
+ {0xBCA0, 0xBCA0, prLV}, // Lo HANGUL SYLLABLE BE
+ {0xBCA1, 0xBCBB, prLVT}, // Lo [27] HANGUL SYLLABLE BEG..HANGUL SYLLABLE BEH
+ {0xBCBC, 0xBCBC, prLV}, // Lo HANGUL SYLLABLE BYEO
+ {0xBCBD, 0xBCD7, prLVT}, // Lo [27] HANGUL SYLLABLE BYEOG..HANGUL SYLLABLE BYEOH
+ {0xBCD8, 0xBCD8, prLV}, // Lo HANGUL SYLLABLE BYE
+ {0xBCD9, 0xBCF3, prLVT}, // Lo [27] HANGUL SYLLABLE BYEG..HANGUL SYLLABLE BYEH
+ {0xBCF4, 0xBCF4, prLV}, // Lo HANGUL SYLLABLE BO
+ {0xBCF5, 0xBD0F, prLVT}, // Lo [27] HANGUL SYLLABLE BOG..HANGUL SYLLABLE BOH
+ {0xBD10, 0xBD10, prLV}, // Lo HANGUL SYLLABLE BWA
+ {0xBD11, 0xBD2B, prLVT}, // Lo [27] HANGUL SYLLABLE BWAG..HANGUL SYLLABLE BWAH
+ {0xBD2C, 0xBD2C, prLV}, // Lo HANGUL SYLLABLE BWAE
+ {0xBD2D, 0xBD47, prLVT}, // Lo [27] HANGUL SYLLABLE BWAEG..HANGUL SYLLABLE BWAEH
+ {0xBD48, 0xBD48, prLV}, // Lo HANGUL SYLLABLE BOE
+ {0xBD49, 0xBD63, prLVT}, // Lo [27] HANGUL SYLLABLE BOEG..HANGUL SYLLABLE BOEH
+ {0xBD64, 0xBD64, prLV}, // Lo HANGUL SYLLABLE BYO
+ {0xBD65, 0xBD7F, prLVT}, // Lo [27] HANGUL SYLLABLE BYOG..HANGUL SYLLABLE BYOH
+ {0xBD80, 0xBD80, prLV}, // Lo HANGUL SYLLABLE BU
+ {0xBD81, 0xBD9B, prLVT}, // Lo [27] HANGUL SYLLABLE BUG..HANGUL SYLLABLE BUH
+ {0xBD9C, 0xBD9C, prLV}, // Lo HANGUL SYLLABLE BWEO
+ {0xBD9D, 0xBDB7, prLVT}, // Lo [27] HANGUL SYLLABLE BWEOG..HANGUL SYLLABLE BWEOH
+ {0xBDB8, 0xBDB8, prLV}, // Lo HANGUL SYLLABLE BWE
+ {0xBDB9, 0xBDD3, prLVT}, // Lo [27] HANGUL SYLLABLE BWEG..HANGUL SYLLABLE BWEH
+ {0xBDD4, 0xBDD4, prLV}, // Lo HANGUL SYLLABLE BWI
+ {0xBDD5, 0xBDEF, prLVT}, // Lo [27] HANGUL SYLLABLE BWIG..HANGUL SYLLABLE BWIH
+ {0xBDF0, 0xBDF0, prLV}, // Lo HANGUL SYLLABLE BYU
+ {0xBDF1, 0xBE0B, prLVT}, // Lo [27] HANGUL SYLLABLE BYUG..HANGUL SYLLABLE BYUH
+ {0xBE0C, 0xBE0C, prLV}, // Lo HANGUL SYLLABLE BEU
+ {0xBE0D, 0xBE27, prLVT}, // Lo [27] HANGUL SYLLABLE BEUG..HANGUL SYLLABLE BEUH
+ {0xBE28, 0xBE28, prLV}, // Lo HANGUL SYLLABLE BYI
+ {0xBE29, 0xBE43, prLVT}, // Lo [27] HANGUL SYLLABLE BYIG..HANGUL SYLLABLE BYIH
+ {0xBE44, 0xBE44, prLV}, // Lo HANGUL SYLLABLE BI
+ {0xBE45, 0xBE5F, prLVT}, // Lo [27] HANGUL SYLLABLE BIG..HANGUL SYLLABLE BIH
+ {0xBE60, 0xBE60, prLV}, // Lo HANGUL SYLLABLE BBA
+ {0xBE61, 0xBE7B, prLVT}, // Lo [27] HANGUL SYLLABLE BBAG..HANGUL SYLLABLE BBAH
+ {0xBE7C, 0xBE7C, prLV}, // Lo HANGUL SYLLABLE BBAE
+ {0xBE7D, 0xBE97, prLVT}, // Lo [27] HANGUL SYLLABLE BBAEG..HANGUL SYLLABLE BBAEH
+ {0xBE98, 0xBE98, prLV}, // Lo HANGUL SYLLABLE BBYA
+ {0xBE99, 0xBEB3, prLVT}, // Lo [27] HANGUL SYLLABLE BBYAG..HANGUL SYLLABLE BBYAH
+ {0xBEB4, 0xBEB4, prLV}, // Lo HANGUL SYLLABLE BBYAE
+ {0xBEB5, 0xBECF, prLVT}, // Lo [27] HANGUL SYLLABLE BBYAEG..HANGUL SYLLABLE BBYAEH
+ {0xBED0, 0xBED0, prLV}, // Lo HANGUL SYLLABLE BBEO
+ {0xBED1, 0xBEEB, prLVT}, // Lo [27] HANGUL SYLLABLE BBEOG..HANGUL SYLLABLE BBEOH
+ {0xBEEC, 0xBEEC, prLV}, // Lo HANGUL SYLLABLE BBE
+ {0xBEED, 0xBF07, prLVT}, // Lo [27] HANGUL SYLLABLE BBEG..HANGUL SYLLABLE BBEH
+ {0xBF08, 0xBF08, prLV}, // Lo HANGUL SYLLABLE BBYEO
+ {0xBF09, 0xBF23, prLVT}, // Lo [27] HANGUL SYLLABLE BBYEOG..HANGUL SYLLABLE BBYEOH
+ {0xBF24, 0xBF24, prLV}, // Lo HANGUL SYLLABLE BBYE
+ {0xBF25, 0xBF3F, prLVT}, // Lo [27] HANGUL SYLLABLE BBYEG..HANGUL SYLLABLE BBYEH
+ {0xBF40, 0xBF40, prLV}, // Lo HANGUL SYLLABLE BBO
+ {0xBF41, 0xBF5B, prLVT}, // Lo [27] HANGUL SYLLABLE BBOG..HANGUL SYLLABLE BBOH
+ {0xBF5C, 0xBF5C, prLV}, // Lo HANGUL SYLLABLE BBWA
+ {0xBF5D, 0xBF77, prLVT}, // Lo [27] HANGUL SYLLABLE BBWAG..HANGUL SYLLABLE BBWAH
+ {0xBF78, 0xBF78, prLV}, // Lo HANGUL SYLLABLE BBWAE
+ {0xBF79, 0xBF93, prLVT}, // Lo [27] HANGUL SYLLABLE BBWAEG..HANGUL SYLLABLE BBWAEH
+ {0xBF94, 0xBF94, prLV}, // Lo HANGUL SYLLABLE BBOE
+ {0xBF95, 0xBFAF, prLVT}, // Lo [27] HANGUL SYLLABLE BBOEG..HANGUL SYLLABLE BBOEH
+ {0xBFB0, 0xBFB0, prLV}, // Lo HANGUL SYLLABLE BBYO
+ {0xBFB1, 0xBFCB, prLVT}, // Lo [27] HANGUL SYLLABLE BBYOG..HANGUL SYLLABLE BBYOH
+ {0xBFCC, 0xBFCC, prLV}, // Lo HANGUL SYLLABLE BBU
+ {0xBFCD, 0xBFE7, prLVT}, // Lo [27] HANGUL SYLLABLE BBUG..HANGUL SYLLABLE BBUH
+ {0xBFE8, 0xBFE8, prLV}, // Lo HANGUL SYLLABLE BBWEO
+ {0xBFE9, 0xC003, prLVT}, // Lo [27] HANGUL SYLLABLE BBWEOG..HANGUL SYLLABLE BBWEOH
+ {0xC004, 0xC004, prLV}, // Lo HANGUL SYLLABLE BBWE
+ {0xC005, 0xC01F, prLVT}, // Lo [27] HANGUL SYLLABLE BBWEG..HANGUL SYLLABLE BBWEH
+ {0xC020, 0xC020, prLV}, // Lo HANGUL SYLLABLE BBWI
+ {0xC021, 0xC03B, prLVT}, // Lo [27] HANGUL SYLLABLE BBWIG..HANGUL SYLLABLE BBWIH
+ {0xC03C, 0xC03C, prLV}, // Lo HANGUL SYLLABLE BBYU
+ {0xC03D, 0xC057, prLVT}, // Lo [27] HANGUL SYLLABLE BBYUG..HANGUL SYLLABLE BBYUH
+ {0xC058, 0xC058, prLV}, // Lo HANGUL SYLLABLE BBEU
+ {0xC059, 0xC073, prLVT}, // Lo [27] HANGUL SYLLABLE BBEUG..HANGUL SYLLABLE BBEUH
+ {0xC074, 0xC074, prLV}, // Lo HANGUL SYLLABLE BBYI
+ {0xC075, 0xC08F, prLVT}, // Lo [27] HANGUL SYLLABLE BBYIG..HANGUL SYLLABLE BBYIH
+ {0xC090, 0xC090, prLV}, // Lo HANGUL SYLLABLE BBI
+ {0xC091, 0xC0AB, prLVT}, // Lo [27] HANGUL SYLLABLE BBIG..HANGUL SYLLABLE BBIH
+ {0xC0AC, 0xC0AC, prLV}, // Lo HANGUL SYLLABLE SA
+ {0xC0AD, 0xC0C7, prLVT}, // Lo [27] HANGUL SYLLABLE SAG..HANGUL SYLLABLE SAH
+ {0xC0C8, 0xC0C8, prLV}, // Lo HANGUL SYLLABLE SAE
+ {0xC0C9, 0xC0E3, prLVT}, // Lo [27] HANGUL SYLLABLE SAEG..HANGUL SYLLABLE SAEH
+ {0xC0E4, 0xC0E4, prLV}, // Lo HANGUL SYLLABLE SYA
+ {0xC0E5, 0xC0FF, prLVT}, // Lo [27] HANGUL SYLLABLE SYAG..HANGUL SYLLABLE SYAH
+ {0xC100, 0xC100, prLV}, // Lo HANGUL SYLLABLE SYAE
+ {0xC101, 0xC11B, prLVT}, // Lo [27] HANGUL SYLLABLE SYAEG..HANGUL SYLLABLE SYAEH
+ {0xC11C, 0xC11C, prLV}, // Lo HANGUL SYLLABLE SEO
+ {0xC11D, 0xC137, prLVT}, // Lo [27] HANGUL SYLLABLE SEOG..HANGUL SYLLABLE SEOH
+ {0xC138, 0xC138, prLV}, // Lo HANGUL SYLLABLE SE
+ {0xC139, 0xC153, prLVT}, // Lo [27] HANGUL SYLLABLE SEG..HANGUL SYLLABLE SEH
+ {0xC154, 0xC154, prLV}, // Lo HANGUL SYLLABLE SYEO
+ {0xC155, 0xC16F, prLVT}, // Lo [27] HANGUL SYLLABLE SYEOG..HANGUL SYLLABLE SYEOH
+ {0xC170, 0xC170, prLV}, // Lo HANGUL SYLLABLE SYE
+ {0xC171, 0xC18B, prLVT}, // Lo [27] HANGUL SYLLABLE SYEG..HANGUL SYLLABLE SYEH
+ {0xC18C, 0xC18C, prLV}, // Lo HANGUL SYLLABLE SO
+ {0xC18D, 0xC1A7, prLVT}, // Lo [27] HANGUL SYLLABLE SOG..HANGUL SYLLABLE SOH
+ {0xC1A8, 0xC1A8, prLV}, // Lo HANGUL SYLLABLE SWA
+ {0xC1A9, 0xC1C3, prLVT}, // Lo [27] HANGUL SYLLABLE SWAG..HANGUL SYLLABLE SWAH
+ {0xC1C4, 0xC1C4, prLV}, // Lo HANGUL SYLLABLE SWAE
+ {0xC1C5, 0xC1DF, prLVT}, // Lo [27] HANGUL SYLLABLE SWAEG..HANGUL SYLLABLE SWAEH
+ {0xC1E0, 0xC1E0, prLV}, // Lo HANGUL SYLLABLE SOE
+ {0xC1E1, 0xC1FB, prLVT}, // Lo [27] HANGUL SYLLABLE SOEG..HANGUL SYLLABLE SOEH
+ {0xC1FC, 0xC1FC, prLV}, // Lo HANGUL SYLLABLE SYO
+ {0xC1FD, 0xC217, prLVT}, // Lo [27] HANGUL SYLLABLE SYOG..HANGUL SYLLABLE SYOH
+ {0xC218, 0xC218, prLV}, // Lo HANGUL SYLLABLE SU
+ {0xC219, 0xC233, prLVT}, // Lo [27] HANGUL SYLLABLE SUG..HANGUL SYLLABLE SUH
+ {0xC234, 0xC234, prLV}, // Lo HANGUL SYLLABLE SWEO
+ {0xC235, 0xC24F, prLVT}, // Lo [27] HANGUL SYLLABLE SWEOG..HANGUL SYLLABLE SWEOH
+ {0xC250, 0xC250, prLV}, // Lo HANGUL SYLLABLE SWE
+ {0xC251, 0xC26B, prLVT}, // Lo [27] HANGUL SYLLABLE SWEG..HANGUL SYLLABLE SWEH
+ {0xC26C, 0xC26C, prLV}, // Lo HANGUL SYLLABLE SWI
+ {0xC26D, 0xC287, prLVT}, // Lo [27] HANGUL SYLLABLE SWIG..HANGUL SYLLABLE SWIH
+ {0xC288, 0xC288, prLV}, // Lo HANGUL SYLLABLE SYU
+ {0xC289, 0xC2A3, prLVT}, // Lo [27] HANGUL SYLLABLE SYUG..HANGUL SYLLABLE SYUH
+ {0xC2A4, 0xC2A4, prLV}, // Lo HANGUL SYLLABLE SEU
+ {0xC2A5, 0xC2BF, prLVT}, // Lo [27] HANGUL SYLLABLE SEUG..HANGUL SYLLABLE SEUH
+ {0xC2C0, 0xC2C0, prLV}, // Lo HANGUL SYLLABLE SYI
+ {0xC2C1, 0xC2DB, prLVT}, // Lo [27] HANGUL SYLLABLE SYIG..HANGUL SYLLABLE SYIH
+ {0xC2DC, 0xC2DC, prLV}, // Lo HANGUL SYLLABLE SI
+ {0xC2DD, 0xC2F7, prLVT}, // Lo [27] HANGUL SYLLABLE SIG..HANGUL SYLLABLE SIH
+ {0xC2F8, 0xC2F8, prLV}, // Lo HANGUL SYLLABLE SSA
+ {0xC2F9, 0xC313, prLVT}, // Lo [27] HANGUL SYLLABLE SSAG..HANGUL SYLLABLE SSAH
+ {0xC314, 0xC314, prLV}, // Lo HANGUL SYLLABLE SSAE
+ {0xC315, 0xC32F, prLVT}, // Lo [27] HANGUL SYLLABLE SSAEG..HANGUL SYLLABLE SSAEH
+ {0xC330, 0xC330, prLV}, // Lo HANGUL SYLLABLE SSYA
+ {0xC331, 0xC34B, prLVT}, // Lo [27] HANGUL SYLLABLE SSYAG..HANGUL SYLLABLE SSYAH
+ {0xC34C, 0xC34C, prLV}, // Lo HANGUL SYLLABLE SSYAE
+ {0xC34D, 0xC367, prLVT}, // Lo [27] HANGUL SYLLABLE SSYAEG..HANGUL SYLLABLE SSYAEH
+ {0xC368, 0xC368, prLV}, // Lo HANGUL SYLLABLE SSEO
+ {0xC369, 0xC383, prLVT}, // Lo [27] HANGUL SYLLABLE SSEOG..HANGUL SYLLABLE SSEOH
+ {0xC384, 0xC384, prLV}, // Lo HANGUL SYLLABLE SSE
+ {0xC385, 0xC39F, prLVT}, // Lo [27] HANGUL SYLLABLE SSEG..HANGUL SYLLABLE SSEH
+ {0xC3A0, 0xC3A0, prLV}, // Lo HANGUL SYLLABLE SSYEO
+ {0xC3A1, 0xC3BB, prLVT}, // Lo [27] HANGUL SYLLABLE SSYEOG..HANGUL SYLLABLE SSYEOH
+ {0xC3BC, 0xC3BC, prLV}, // Lo HANGUL SYLLABLE SSYE
+ {0xC3BD, 0xC3D7, prLVT}, // Lo [27] HANGUL SYLLABLE SSYEG..HANGUL SYLLABLE SSYEH
+ {0xC3D8, 0xC3D8, prLV}, // Lo HANGUL SYLLABLE SSO
+ {0xC3D9, 0xC3F3, prLVT}, // Lo [27] HANGUL SYLLABLE SSOG..HANGUL SYLLABLE SSOH
+ {0xC3F4, 0xC3F4, prLV}, // Lo HANGUL SYLLABLE SSWA
+ {0xC3F5, 0xC40F, prLVT}, // Lo [27] HANGUL SYLLABLE SSWAG..HANGUL SYLLABLE SSWAH
+ {0xC410, 0xC410, prLV}, // Lo HANGUL SYLLABLE SSWAE
+ {0xC411, 0xC42B, prLVT}, // Lo [27] HANGUL SYLLABLE SSWAEG..HANGUL SYLLABLE SSWAEH
+ {0xC42C, 0xC42C, prLV}, // Lo HANGUL SYLLABLE SSOE
+ {0xC42D, 0xC447, prLVT}, // Lo [27] HANGUL SYLLABLE SSOEG..HANGUL SYLLABLE SSOEH
+ {0xC448, 0xC448, prLV}, // Lo HANGUL SYLLABLE SSYO
+ {0xC449, 0xC463, prLVT}, // Lo [27] HANGUL SYLLABLE SSYOG..HANGUL SYLLABLE SSYOH
+ {0xC464, 0xC464, prLV}, // Lo HANGUL SYLLABLE SSU
+ {0xC465, 0xC47F, prLVT}, // Lo [27] HANGUL SYLLABLE SSUG..HANGUL SYLLABLE SSUH
+ {0xC480, 0xC480, prLV}, // Lo HANGUL SYLLABLE SSWEO
+ {0xC481, 0xC49B, prLVT}, // Lo [27] HANGUL SYLLABLE SSWEOG..HANGUL SYLLABLE SSWEOH
+ {0xC49C, 0xC49C, prLV}, // Lo HANGUL SYLLABLE SSWE
+ {0xC49D, 0xC4B7, prLVT}, // Lo [27] HANGUL SYLLABLE SSWEG..HANGUL SYLLABLE SSWEH
+ {0xC4B8, 0xC4B8, prLV}, // Lo HANGUL SYLLABLE SSWI
+ {0xC4B9, 0xC4D3, prLVT}, // Lo [27] HANGUL SYLLABLE SSWIG..HANGUL SYLLABLE SSWIH
+ {0xC4D4, 0xC4D4, prLV}, // Lo HANGUL SYLLABLE SSYU
+ {0xC4D5, 0xC4EF, prLVT}, // Lo [27] HANGUL SYLLABLE SSYUG..HANGUL SYLLABLE SSYUH
+ {0xC4F0, 0xC4F0, prLV}, // Lo HANGUL SYLLABLE SSEU
+ {0xC4F1, 0xC50B, prLVT}, // Lo [27] HANGUL SYLLABLE SSEUG..HANGUL SYLLABLE SSEUH
+ {0xC50C, 0xC50C, prLV}, // Lo HANGUL SYLLABLE SSYI
+ {0xC50D, 0xC527, prLVT}, // Lo [27] HANGUL SYLLABLE SSYIG..HANGUL SYLLABLE SSYIH
+ {0xC528, 0xC528, prLV}, // Lo HANGUL SYLLABLE SSI
+ {0xC529, 0xC543, prLVT}, // Lo [27] HANGUL SYLLABLE SSIG..HANGUL SYLLABLE SSIH
+ {0xC544, 0xC544, prLV}, // Lo HANGUL SYLLABLE A
+ {0xC545, 0xC55F, prLVT}, // Lo [27] HANGUL SYLLABLE AG..HANGUL SYLLABLE AH
+ {0xC560, 0xC560, prLV}, // Lo HANGUL SYLLABLE AE
+ {0xC561, 0xC57B, prLVT}, // Lo [27] HANGUL SYLLABLE AEG..HANGUL SYLLABLE AEH
+ {0xC57C, 0xC57C, prLV}, // Lo HANGUL SYLLABLE YA
+ {0xC57D, 0xC597, prLVT}, // Lo [27] HANGUL SYLLABLE YAG..HANGUL SYLLABLE YAH
+ {0xC598, 0xC598, prLV}, // Lo HANGUL SYLLABLE YAE
+ {0xC599, 0xC5B3, prLVT}, // Lo [27] HANGUL SYLLABLE YAEG..HANGUL SYLLABLE YAEH
+ {0xC5B4, 0xC5B4, prLV}, // Lo HANGUL SYLLABLE EO
+ {0xC5B5, 0xC5CF, prLVT}, // Lo [27] HANGUL SYLLABLE EOG..HANGUL SYLLABLE EOH
+ {0xC5D0, 0xC5D0, prLV}, // Lo HANGUL SYLLABLE E
+ {0xC5D1, 0xC5EB, prLVT}, // Lo [27] HANGUL SYLLABLE EG..HANGUL SYLLABLE EH
+ {0xC5EC, 0xC5EC, prLV}, // Lo HANGUL SYLLABLE YEO
+ {0xC5ED, 0xC607, prLVT}, // Lo [27] HANGUL SYLLABLE YEOG..HANGUL SYLLABLE YEOH
+ {0xC608, 0xC608, prLV}, // Lo HANGUL SYLLABLE YE
+ {0xC609, 0xC623, prLVT}, // Lo [27] HANGUL SYLLABLE YEG..HANGUL SYLLABLE YEH
+ {0xC624, 0xC624, prLV}, // Lo HANGUL SYLLABLE O
+ {0xC625, 0xC63F, prLVT}, // Lo [27] HANGUL SYLLABLE OG..HANGUL SYLLABLE OH
+ {0xC640, 0xC640, prLV}, // Lo HANGUL SYLLABLE WA
+ {0xC641, 0xC65B, prLVT}, // Lo [27] HANGUL SYLLABLE WAG..HANGUL SYLLABLE WAH
+ {0xC65C, 0xC65C, prLV}, // Lo HANGUL SYLLABLE WAE
+ {0xC65D, 0xC677, prLVT}, // Lo [27] HANGUL SYLLABLE WAEG..HANGUL SYLLABLE WAEH
+ {0xC678, 0xC678, prLV}, // Lo HANGUL SYLLABLE OE
+ {0xC679, 0xC693, prLVT}, // Lo [27] HANGUL SYLLABLE OEG..HANGUL SYLLABLE OEH
+ {0xC694, 0xC694, prLV}, // Lo HANGUL SYLLABLE YO
+ {0xC695, 0xC6AF, prLVT}, // Lo [27] HANGUL SYLLABLE YOG..HANGUL SYLLABLE YOH
+ {0xC6B0, 0xC6B0, prLV}, // Lo HANGUL SYLLABLE U
+ {0xC6B1, 0xC6CB, prLVT}, // Lo [27] HANGUL SYLLABLE UG..HANGUL SYLLABLE UH
+ {0xC6CC, 0xC6CC, prLV}, // Lo HANGUL SYLLABLE WEO
+ {0xC6CD, 0xC6E7, prLVT}, // Lo [27] HANGUL SYLLABLE WEOG..HANGUL SYLLABLE WEOH
+ {0xC6E8, 0xC6E8, prLV}, // Lo HANGUL SYLLABLE WE
+ {0xC6E9, 0xC703, prLVT}, // Lo [27] HANGUL SYLLABLE WEG..HANGUL SYLLABLE WEH
+ {0xC704, 0xC704, prLV}, // Lo HANGUL SYLLABLE WI
+ {0xC705, 0xC71F, prLVT}, // Lo [27] HANGUL SYLLABLE WIG..HANGUL SYLLABLE WIH
+ {0xC720, 0xC720, prLV}, // Lo HANGUL SYLLABLE YU
+ {0xC721, 0xC73B, prLVT}, // Lo [27] HANGUL SYLLABLE YUG..HANGUL SYLLABLE YUH
+ {0xC73C, 0xC73C, prLV}, // Lo HANGUL SYLLABLE EU
+ {0xC73D, 0xC757, prLVT}, // Lo [27] HANGUL SYLLABLE EUG..HANGUL SYLLABLE EUH
+ {0xC758, 0xC758, prLV}, // Lo HANGUL SYLLABLE YI
+ {0xC759, 0xC773, prLVT}, // Lo [27] HANGUL SYLLABLE YIG..HANGUL SYLLABLE YIH
+ {0xC774, 0xC774, prLV}, // Lo HANGUL SYLLABLE I
+ {0xC775, 0xC78F, prLVT}, // Lo [27] HANGUL SYLLABLE IG..HANGUL SYLLABLE IH
+ {0xC790, 0xC790, prLV}, // Lo HANGUL SYLLABLE JA
+ {0xC791, 0xC7AB, prLVT}, // Lo [27] HANGUL SYLLABLE JAG..HANGUL SYLLABLE JAH
+ {0xC7AC, 0xC7AC, prLV}, // Lo HANGUL SYLLABLE JAE
+ {0xC7AD, 0xC7C7, prLVT}, // Lo [27] HANGUL SYLLABLE JAEG..HANGUL SYLLABLE JAEH
+ {0xC7C8, 0xC7C8, prLV}, // Lo HANGUL SYLLABLE JYA
+ {0xC7C9, 0xC7E3, prLVT}, // Lo [27] HANGUL SYLLABLE JYAG..HANGUL SYLLABLE JYAH
+ {0xC7E4, 0xC7E4, prLV}, // Lo HANGUL SYLLABLE JYAE
+ {0xC7E5, 0xC7FF, prLVT}, // Lo [27] HANGUL SYLLABLE JYAEG..HANGUL SYLLABLE JYAEH
+ {0xC800, 0xC800, prLV}, // Lo HANGUL SYLLABLE JEO
+ {0xC801, 0xC81B, prLVT}, // Lo [27] HANGUL SYLLABLE JEOG..HANGUL SYLLABLE JEOH
+ {0xC81C, 0xC81C, prLV}, // Lo HANGUL SYLLABLE JE
+ {0xC81D, 0xC837, prLVT}, // Lo [27] HANGUL SYLLABLE JEG..HANGUL SYLLABLE JEH
+ {0xC838, 0xC838, prLV}, // Lo HANGUL SYLLABLE JYEO
+ {0xC839, 0xC853, prLVT}, // Lo [27] HANGUL SYLLABLE JYEOG..HANGUL SYLLABLE JYEOH
+ {0xC854, 0xC854, prLV}, // Lo HANGUL SYLLABLE JYE
+ {0xC855, 0xC86F, prLVT}, // Lo [27] HANGUL SYLLABLE JYEG..HANGUL SYLLABLE JYEH
+ {0xC870, 0xC870, prLV}, // Lo HANGUL SYLLABLE JO
+ {0xC871, 0xC88B, prLVT}, // Lo [27] HANGUL SYLLABLE JOG..HANGUL SYLLABLE JOH
+ {0xC88C, 0xC88C, prLV}, // Lo HANGUL SYLLABLE JWA
+ {0xC88D, 0xC8A7, prLVT}, // Lo [27] HANGUL SYLLABLE JWAG..HANGUL SYLLABLE JWAH
+ {0xC8A8, 0xC8A8, prLV}, // Lo HANGUL SYLLABLE JWAE
+ {0xC8A9, 0xC8C3, prLVT}, // Lo [27] HANGUL SYLLABLE JWAEG..HANGUL SYLLABLE JWAEH
+ {0xC8C4, 0xC8C4, prLV}, // Lo HANGUL SYLLABLE JOE
+ {0xC8C5, 0xC8DF, prLVT}, // Lo [27] HANGUL SYLLABLE JOEG..HANGUL SYLLABLE JOEH
+ {0xC8E0, 0xC8E0, prLV}, // Lo HANGUL SYLLABLE JYO
+ {0xC8E1, 0xC8FB, prLVT}, // Lo [27] HANGUL SYLLABLE JYOG..HANGUL SYLLABLE JYOH
+ {0xC8FC, 0xC8FC, prLV}, // Lo HANGUL SYLLABLE JU
+ {0xC8FD, 0xC917, prLVT}, // Lo [27] HANGUL SYLLABLE JUG..HANGUL SYLLABLE JUH
+ {0xC918, 0xC918, prLV}, // Lo HANGUL SYLLABLE JWEO
+ {0xC919, 0xC933, prLVT}, // Lo [27] HANGUL SYLLABLE JWEOG..HANGUL SYLLABLE JWEOH
+ {0xC934, 0xC934, prLV}, // Lo HANGUL SYLLABLE JWE
+ {0xC935, 0xC94F, prLVT}, // Lo [27] HANGUL SYLLABLE JWEG..HANGUL SYLLABLE JWEH
+ {0xC950, 0xC950, prLV}, // Lo HANGUL SYLLABLE JWI
+ {0xC951, 0xC96B, prLVT}, // Lo [27] HANGUL SYLLABLE JWIG..HANGUL SYLLABLE JWIH
+ {0xC96C, 0xC96C, prLV}, // Lo HANGUL SYLLABLE JYU
+ {0xC96D, 0xC987, prLVT}, // Lo [27] HANGUL SYLLABLE JYUG..HANGUL SYLLABLE JYUH
+ {0xC988, 0xC988, prLV}, // Lo HANGUL SYLLABLE JEU
+ {0xC989, 0xC9A3, prLVT}, // Lo [27] HANGUL SYLLABLE JEUG..HANGUL SYLLABLE JEUH
+ {0xC9A4, 0xC9A4, prLV}, // Lo HANGUL SYLLABLE JYI
+ {0xC9A5, 0xC9BF, prLVT}, // Lo [27] HANGUL SYLLABLE JYIG..HANGUL SYLLABLE JYIH
+ {0xC9C0, 0xC9C0, prLV}, // Lo HANGUL SYLLABLE JI
+ {0xC9C1, 0xC9DB, prLVT}, // Lo [27] HANGUL SYLLABLE JIG..HANGUL SYLLABLE JIH
+ {0xC9DC, 0xC9DC, prLV}, // Lo HANGUL SYLLABLE JJA
+ {0xC9DD, 0xC9F7, prLVT}, // Lo [27] HANGUL SYLLABLE JJAG..HANGUL SYLLABLE JJAH
+ {0xC9F8, 0xC9F8, prLV}, // Lo HANGUL SYLLABLE JJAE
+ {0xC9F9, 0xCA13, prLVT}, // Lo [27] HANGUL SYLLABLE JJAEG..HANGUL SYLLABLE JJAEH
+ {0xCA14, 0xCA14, prLV}, // Lo HANGUL SYLLABLE JJYA
+ {0xCA15, 0xCA2F, prLVT}, // Lo [27] HANGUL SYLLABLE JJYAG..HANGUL SYLLABLE JJYAH
+ {0xCA30, 0xCA30, prLV}, // Lo HANGUL SYLLABLE JJYAE
+ {0xCA31, 0xCA4B, prLVT}, // Lo [27] HANGUL SYLLABLE JJYAEG..HANGUL SYLLABLE JJYAEH
+ {0xCA4C, 0xCA4C, prLV}, // Lo HANGUL SYLLABLE JJEO
+ {0xCA4D, 0xCA67, prLVT}, // Lo [27] HANGUL SYLLABLE JJEOG..HANGUL SYLLABLE JJEOH
+ {0xCA68, 0xCA68, prLV}, // Lo HANGUL SYLLABLE JJE
+ {0xCA69, 0xCA83, prLVT}, // Lo [27] HANGUL SYLLABLE JJEG..HANGUL SYLLABLE JJEH
+ {0xCA84, 0xCA84, prLV}, // Lo HANGUL SYLLABLE JJYEO
+ {0xCA85, 0xCA9F, prLVT}, // Lo [27] HANGUL SYLLABLE JJYEOG..HANGUL SYLLABLE JJYEOH
+ {0xCAA0, 0xCAA0, prLV}, // Lo HANGUL SYLLABLE JJYE
+ {0xCAA1, 0xCABB, prLVT}, // Lo [27] HANGUL SYLLABLE JJYEG..HANGUL SYLLABLE JJYEH
+ {0xCABC, 0xCABC, prLV}, // Lo HANGUL SYLLABLE JJO
+ {0xCABD, 0xCAD7, prLVT}, // Lo [27] HANGUL SYLLABLE JJOG..HANGUL SYLLABLE JJOH
+ {0xCAD8, 0xCAD8, prLV}, // Lo HANGUL SYLLABLE JJWA
+ {0xCAD9, 0xCAF3, prLVT}, // Lo [27] HANGUL SYLLABLE JJWAG..HANGUL SYLLABLE JJWAH
+ {0xCAF4, 0xCAF4, prLV}, // Lo HANGUL SYLLABLE JJWAE
+ {0xCAF5, 0xCB0F, prLVT}, // Lo [27] HANGUL SYLLABLE JJWAEG..HANGUL SYLLABLE JJWAEH
+ {0xCB10, 0xCB10, prLV}, // Lo HANGUL SYLLABLE JJOE
+ {0xCB11, 0xCB2B, prLVT}, // Lo [27] HANGUL SYLLABLE JJOEG..HANGUL SYLLABLE JJOEH
+ {0xCB2C, 0xCB2C, prLV}, // Lo HANGUL SYLLABLE JJYO
+ {0xCB2D, 0xCB47, prLVT}, // Lo [27] HANGUL SYLLABLE JJYOG..HANGUL SYLLABLE JJYOH
+ {0xCB48, 0xCB48, prLV}, // Lo HANGUL SYLLABLE JJU
+ {0xCB49, 0xCB63, prLVT}, // Lo [27] HANGUL SYLLABLE JJUG..HANGUL SYLLABLE JJUH
+ {0xCB64, 0xCB64, prLV}, // Lo HANGUL SYLLABLE JJWEO
+ {0xCB65, 0xCB7F, prLVT}, // Lo [27] HANGUL SYLLABLE JJWEOG..HANGUL SYLLABLE JJWEOH
+ {0xCB80, 0xCB80, prLV}, // Lo HANGUL SYLLABLE JJWE
+ {0xCB81, 0xCB9B, prLVT}, // Lo [27] HANGUL SYLLABLE JJWEG..HANGUL SYLLABLE JJWEH
+ {0xCB9C, 0xCB9C, prLV}, // Lo HANGUL SYLLABLE JJWI
+ {0xCB9D, 0xCBB7, prLVT}, // Lo [27] HANGUL SYLLABLE JJWIG..HANGUL SYLLABLE JJWIH
+ {0xCBB8, 0xCBB8, prLV}, // Lo HANGUL SYLLABLE JJYU
+ {0xCBB9, 0xCBD3, prLVT}, // Lo [27] HANGUL SYLLABLE JJYUG..HANGUL SYLLABLE JJYUH
+ {0xCBD4, 0xCBD4, prLV}, // Lo HANGUL SYLLABLE JJEU
+ {0xCBD5, 0xCBEF, prLVT}, // Lo [27] HANGUL SYLLABLE JJEUG..HANGUL SYLLABLE JJEUH
+ {0xCBF0, 0xCBF0, prLV}, // Lo HANGUL SYLLABLE JJYI
+ {0xCBF1, 0xCC0B, prLVT}, // Lo [27] HANGUL SYLLABLE JJYIG..HANGUL SYLLABLE JJYIH
+ {0xCC0C, 0xCC0C, prLV}, // Lo HANGUL SYLLABLE JJI
+ {0xCC0D, 0xCC27, prLVT}, // Lo [27] HANGUL SYLLABLE JJIG..HANGUL SYLLABLE JJIH
+ {0xCC28, 0xCC28, prLV}, // Lo HANGUL SYLLABLE CA
+ {0xCC29, 0xCC43, prLVT}, // Lo [27] HANGUL SYLLABLE CAG..HANGUL SYLLABLE CAH
+ {0xCC44, 0xCC44, prLV}, // Lo HANGUL SYLLABLE CAE
+ {0xCC45, 0xCC5F, prLVT}, // Lo [27] HANGUL SYLLABLE CAEG..HANGUL SYLLABLE CAEH
+ {0xCC60, 0xCC60, prLV}, // Lo HANGUL SYLLABLE CYA
+ {0xCC61, 0xCC7B, prLVT}, // Lo [27] HANGUL SYLLABLE CYAG..HANGUL SYLLABLE CYAH
+ {0xCC7C, 0xCC7C, prLV}, // Lo HANGUL SYLLABLE CYAE
+ {0xCC7D, 0xCC97, prLVT}, // Lo [27] HANGUL SYLLABLE CYAEG..HANGUL SYLLABLE CYAEH
+ {0xCC98, 0xCC98, prLV}, // Lo HANGUL SYLLABLE CEO
+ {0xCC99, 0xCCB3, prLVT}, // Lo [27] HANGUL SYLLABLE CEOG..HANGUL SYLLABLE CEOH
+ {0xCCB4, 0xCCB4, prLV}, // Lo HANGUL SYLLABLE CE
+ {0xCCB5, 0xCCCF, prLVT}, // Lo [27] HANGUL SYLLABLE CEG..HANGUL SYLLABLE CEH
+ {0xCCD0, 0xCCD0, prLV}, // Lo HANGUL SYLLABLE CYEO
+ {0xCCD1, 0xCCEB, prLVT}, // Lo [27] HANGUL SYLLABLE CYEOG..HANGUL SYLLABLE CYEOH
+ {0xCCEC, 0xCCEC, prLV}, // Lo HANGUL SYLLABLE CYE
+ {0xCCED, 0xCD07, prLVT}, // Lo [27] HANGUL SYLLABLE CYEG..HANGUL SYLLABLE CYEH
+ {0xCD08, 0xCD08, prLV}, // Lo HANGUL SYLLABLE CO
+ {0xCD09, 0xCD23, prLVT}, // Lo [27] HANGUL SYLLABLE COG..HANGUL SYLLABLE COH
+ {0xCD24, 0xCD24, prLV}, // Lo HANGUL SYLLABLE CWA
+ {0xCD25, 0xCD3F, prLVT}, // Lo [27] HANGUL SYLLABLE CWAG..HANGUL SYLLABLE CWAH
+ {0xCD40, 0xCD40, prLV}, // Lo HANGUL SYLLABLE CWAE
+ {0xCD41, 0xCD5B, prLVT}, // Lo [27] HANGUL SYLLABLE CWAEG..HANGUL SYLLABLE CWAEH
+ {0xCD5C, 0xCD5C, prLV}, // Lo HANGUL SYLLABLE COE
+ {0xCD5D, 0xCD77, prLVT}, // Lo [27] HANGUL SYLLABLE COEG..HANGUL SYLLABLE COEH
+ {0xCD78, 0xCD78, prLV}, // Lo HANGUL SYLLABLE CYO
+ {0xCD79, 0xCD93, prLVT}, // Lo [27] HANGUL SYLLABLE CYOG..HANGUL SYLLABLE CYOH
+ {0xCD94, 0xCD94, prLV}, // Lo HANGUL SYLLABLE CU
+ {0xCD95, 0xCDAF, prLVT}, // Lo [27] HANGUL SYLLABLE CUG..HANGUL SYLLABLE CUH
+ {0xCDB0, 0xCDB0, prLV}, // Lo HANGUL SYLLABLE CWEO
+ {0xCDB1, 0xCDCB, prLVT}, // Lo [27] HANGUL SYLLABLE CWEOG..HANGUL SYLLABLE CWEOH
+ {0xCDCC, 0xCDCC, prLV}, // Lo HANGUL SYLLABLE CWE
+ {0xCDCD, 0xCDE7, prLVT}, // Lo [27] HANGUL SYLLABLE CWEG..HANGUL SYLLABLE CWEH
+ {0xCDE8, 0xCDE8, prLV}, // Lo HANGUL SYLLABLE CWI
+ {0xCDE9, 0xCE03, prLVT}, // Lo [27] HANGUL SYLLABLE CWIG..HANGUL SYLLABLE CWIH
+ {0xCE04, 0xCE04, prLV}, // Lo HANGUL SYLLABLE CYU
+ {0xCE05, 0xCE1F, prLVT}, // Lo [27] HANGUL SYLLABLE CYUG..HANGUL SYLLABLE CYUH
+ {0xCE20, 0xCE20, prLV}, // Lo HANGUL SYLLABLE CEU
+ {0xCE21, 0xCE3B, prLVT}, // Lo [27] HANGUL SYLLABLE CEUG..HANGUL SYLLABLE CEUH
+ {0xCE3C, 0xCE3C, prLV}, // Lo HANGUL SYLLABLE CYI
+ {0xCE3D, 0xCE57, prLVT}, // Lo [27] HANGUL SYLLABLE CYIG..HANGUL SYLLABLE CYIH
+ {0xCE58, 0xCE58, prLV}, // Lo HANGUL SYLLABLE CI
+ {0xCE59, 0xCE73, prLVT}, // Lo [27] HANGUL SYLLABLE CIG..HANGUL SYLLABLE CIH
+ {0xCE74, 0xCE74, prLV}, // Lo HANGUL SYLLABLE KA
+ {0xCE75, 0xCE8F, prLVT}, // Lo [27] HANGUL SYLLABLE KAG..HANGUL SYLLABLE KAH
+ {0xCE90, 0xCE90, prLV}, // Lo HANGUL SYLLABLE KAE
+ {0xCE91, 0xCEAB, prLVT}, // Lo [27] HANGUL SYLLABLE KAEG..HANGUL SYLLABLE KAEH
+ {0xCEAC, 0xCEAC, prLV}, // Lo HANGUL SYLLABLE KYA
+ {0xCEAD, 0xCEC7, prLVT}, // Lo [27] HANGUL SYLLABLE KYAG..HANGUL SYLLABLE KYAH
+ {0xCEC8, 0xCEC8, prLV}, // Lo HANGUL SYLLABLE KYAE
+ {0xCEC9, 0xCEE3, prLVT}, // Lo [27] HANGUL SYLLABLE KYAEG..HANGUL SYLLABLE KYAEH
+ {0xCEE4, 0xCEE4, prLV}, // Lo HANGUL SYLLABLE KEO
+ {0xCEE5, 0xCEFF, prLVT}, // Lo [27] HANGUL SYLLABLE KEOG..HANGUL SYLLABLE KEOH
+ {0xCF00, 0xCF00, prLV}, // Lo HANGUL SYLLABLE KE
+ {0xCF01, 0xCF1B, prLVT}, // Lo [27] HANGUL SYLLABLE KEG..HANGUL SYLLABLE KEH
+ {0xCF1C, 0xCF1C, prLV}, // Lo HANGUL SYLLABLE KYEO
+ {0xCF1D, 0xCF37, prLVT}, // Lo [27] HANGUL SYLLABLE KYEOG..HANGUL SYLLABLE KYEOH
+ {0xCF38, 0xCF38, prLV}, // Lo HANGUL SYLLABLE KYE
+ {0xCF39, 0xCF53, prLVT}, // Lo [27] HANGUL SYLLABLE KYEG..HANGUL SYLLABLE KYEH
+ {0xCF54, 0xCF54, prLV}, // Lo HANGUL SYLLABLE KO
+ {0xCF55, 0xCF6F, prLVT}, // Lo [27] HANGUL SYLLABLE KOG..HANGUL SYLLABLE KOH
+ {0xCF70, 0xCF70, prLV}, // Lo HANGUL SYLLABLE KWA
+ {0xCF71, 0xCF8B, prLVT}, // Lo [27] HANGUL SYLLABLE KWAG..HANGUL SYLLABLE KWAH
+ {0xCF8C, 0xCF8C, prLV}, // Lo HANGUL SYLLABLE KWAE
+ {0xCF8D, 0xCFA7, prLVT}, // Lo [27] HANGUL SYLLABLE KWAEG..HANGUL SYLLABLE KWAEH
+ {0xCFA8, 0xCFA8, prLV}, // Lo HANGUL SYLLABLE KOE
+ {0xCFA9, 0xCFC3, prLVT}, // Lo [27] HANGUL SYLLABLE KOEG..HANGUL SYLLABLE KOEH
+ {0xCFC4, 0xCFC4, prLV}, // Lo HANGUL SYLLABLE KYO
+ {0xCFC5, 0xCFDF, prLVT}, // Lo [27] HANGUL SYLLABLE KYOG..HANGUL SYLLABLE KYOH
+ {0xCFE0, 0xCFE0, prLV}, // Lo HANGUL SYLLABLE KU
+ {0xCFE1, 0xCFFB, prLVT}, // Lo [27] HANGUL SYLLABLE KUG..HANGUL SYLLABLE KUH
+ {0xCFFC, 0xCFFC, prLV}, // Lo HANGUL SYLLABLE KWEO
+ {0xCFFD, 0xD017, prLVT}, // Lo [27] HANGUL SYLLABLE KWEOG..HANGUL SYLLABLE KWEOH
+ {0xD018, 0xD018, prLV}, // Lo HANGUL SYLLABLE KWE
+ {0xD019, 0xD033, prLVT}, // Lo [27] HANGUL SYLLABLE KWEG..HANGUL SYLLABLE KWEH
+ {0xD034, 0xD034, prLV}, // Lo HANGUL SYLLABLE KWI
+ {0xD035, 0xD04F, prLVT}, // Lo [27] HANGUL SYLLABLE KWIG..HANGUL SYLLABLE KWIH
+ {0xD050, 0xD050, prLV}, // Lo HANGUL SYLLABLE KYU
+ {0xD051, 0xD06B, prLVT}, // Lo [27] HANGUL SYLLABLE KYUG..HANGUL SYLLABLE KYUH
+ {0xD06C, 0xD06C, prLV}, // Lo HANGUL SYLLABLE KEU
+ {0xD06D, 0xD087, prLVT}, // Lo [27] HANGUL SYLLABLE KEUG..HANGUL SYLLABLE KEUH
+ {0xD088, 0xD088, prLV}, // Lo HANGUL SYLLABLE KYI
+ {0xD089, 0xD0A3, prLVT}, // Lo [27] HANGUL SYLLABLE KYIG..HANGUL SYLLABLE KYIH
+ {0xD0A4, 0xD0A4, prLV}, // Lo HANGUL SYLLABLE KI
+ {0xD0A5, 0xD0BF, prLVT}, // Lo [27] HANGUL SYLLABLE KIG..HANGUL SYLLABLE KIH
+ {0xD0C0, 0xD0C0, prLV}, // Lo HANGUL SYLLABLE TA
+ {0xD0C1, 0xD0DB, prLVT}, // Lo [27] HANGUL SYLLABLE TAG..HANGUL SYLLABLE TAH
+ {0xD0DC, 0xD0DC, prLV}, // Lo HANGUL SYLLABLE TAE
+ {0xD0DD, 0xD0F7, prLVT}, // Lo [27] HANGUL SYLLABLE TAEG..HANGUL SYLLABLE TAEH
+ {0xD0F8, 0xD0F8, prLV}, // Lo HANGUL SYLLABLE TYA
+ {0xD0F9, 0xD113, prLVT}, // Lo [27] HANGUL SYLLABLE TYAG..HANGUL SYLLABLE TYAH
+ {0xD114, 0xD114, prLV}, // Lo HANGUL SYLLABLE TYAE
+ {0xD115, 0xD12F, prLVT}, // Lo [27] HANGUL SYLLABLE TYAEG..HANGUL SYLLABLE TYAEH
+ {0xD130, 0xD130, prLV}, // Lo HANGUL SYLLABLE TEO
+ {0xD131, 0xD14B, prLVT}, // Lo [27] HANGUL SYLLABLE TEOG..HANGUL SYLLABLE TEOH
+ {0xD14C, 0xD14C, prLV}, // Lo HANGUL SYLLABLE TE
+ {0xD14D, 0xD167, prLVT}, // Lo [27] HANGUL SYLLABLE TEG..HANGUL SYLLABLE TEH
+ {0xD168, 0xD168, prLV}, // Lo HANGUL SYLLABLE TYEO
+ {0xD169, 0xD183, prLVT}, // Lo [27] HANGUL SYLLABLE TYEOG..HANGUL SYLLABLE TYEOH
+ {0xD184, 0xD184, prLV}, // Lo HANGUL SYLLABLE TYE
+ {0xD185, 0xD19F, prLVT}, // Lo [27] HANGUL SYLLABLE TYEG..HANGUL SYLLABLE TYEH
+ {0xD1A0, 0xD1A0, prLV}, // Lo HANGUL SYLLABLE TO
+ {0xD1A1, 0xD1BB, prLVT}, // Lo [27] HANGUL SYLLABLE TOG..HANGUL SYLLABLE TOH
+ {0xD1BC, 0xD1BC, prLV}, // Lo HANGUL SYLLABLE TWA
+ {0xD1BD, 0xD1D7, prLVT}, // Lo [27] HANGUL SYLLABLE TWAG..HANGUL SYLLABLE TWAH
+ {0xD1D8, 0xD1D8, prLV}, // Lo HANGUL SYLLABLE TWAE
+ {0xD1D9, 0xD1F3, prLVT}, // Lo [27] HANGUL SYLLABLE TWAEG..HANGUL SYLLABLE TWAEH
+ {0xD1F4, 0xD1F4, prLV}, // Lo HANGUL SYLLABLE TOE
+ {0xD1F5, 0xD20F, prLVT}, // Lo [27] HANGUL SYLLABLE TOEG..HANGUL SYLLABLE TOEH
+ {0xD210, 0xD210, prLV}, // Lo HANGUL SYLLABLE TYO
+ {0xD211, 0xD22B, prLVT}, // Lo [27] HANGUL SYLLABLE TYOG..HANGUL SYLLABLE TYOH
+ {0xD22C, 0xD22C, prLV}, // Lo HANGUL SYLLABLE TU
+ {0xD22D, 0xD247, prLVT}, // Lo [27] HANGUL SYLLABLE TUG..HANGUL SYLLABLE TUH
+ {0xD248, 0xD248, prLV}, // Lo HANGUL SYLLABLE TWEO
+ {0xD249, 0xD263, prLVT}, // Lo [27] HANGUL SYLLABLE TWEOG..HANGUL SYLLABLE TWEOH
+ {0xD264, 0xD264, prLV}, // Lo HANGUL SYLLABLE TWE
+ {0xD265, 0xD27F, prLVT}, // Lo [27] HANGUL SYLLABLE TWEG..HANGUL SYLLABLE TWEH
+ {0xD280, 0xD280, prLV}, // Lo HANGUL SYLLABLE TWI
+ {0xD281, 0xD29B, prLVT}, // Lo [27] HANGUL SYLLABLE TWIG..HANGUL SYLLABLE TWIH
+ {0xD29C, 0xD29C, prLV}, // Lo HANGUL SYLLABLE TYU
+ {0xD29D, 0xD2B7, prLVT}, // Lo [27] HANGUL SYLLABLE TYUG..HANGUL SYLLABLE TYUH
+ {0xD2B8, 0xD2B8, prLV}, // Lo HANGUL SYLLABLE TEU
+ {0xD2B9, 0xD2D3, prLVT}, // Lo [27] HANGUL SYLLABLE TEUG..HANGUL SYLLABLE TEUH
+ {0xD2D4, 0xD2D4, prLV}, // Lo HANGUL SYLLABLE TYI
+ {0xD2D5, 0xD2EF, prLVT}, // Lo [27] HANGUL SYLLABLE TYIG..HANGUL SYLLABLE TYIH
+ {0xD2F0, 0xD2F0, prLV}, // Lo HANGUL SYLLABLE TI
+ {0xD2F1, 0xD30B, prLVT}, // Lo [27] HANGUL SYLLABLE TIG..HANGUL SYLLABLE TIH
+ {0xD30C, 0xD30C, prLV}, // Lo HANGUL SYLLABLE PA
+ {0xD30D, 0xD327, prLVT}, // Lo [27] HANGUL SYLLABLE PAG..HANGUL SYLLABLE PAH
+ {0xD328, 0xD328, prLV}, // Lo HANGUL SYLLABLE PAE
+ {0xD329, 0xD343, prLVT}, // Lo [27] HANGUL SYLLABLE PAEG..HANGUL SYLLABLE PAEH
+ {0xD344, 0xD344, prLV}, // Lo HANGUL SYLLABLE PYA
+ {0xD345, 0xD35F, prLVT}, // Lo [27] HANGUL SYLLABLE PYAG..HANGUL SYLLABLE PYAH
+ {0xD360, 0xD360, prLV}, // Lo HANGUL SYLLABLE PYAE
+ {0xD361, 0xD37B, prLVT}, // Lo [27] HANGUL SYLLABLE PYAEG..HANGUL SYLLABLE PYAEH
+ {0xD37C, 0xD37C, prLV}, // Lo HANGUL SYLLABLE PEO
+ {0xD37D, 0xD397, prLVT}, // Lo [27] HANGUL SYLLABLE PEOG..HANGUL SYLLABLE PEOH
+ {0xD398, 0xD398, prLV}, // Lo HANGUL SYLLABLE PE
+ {0xD399, 0xD3B3, prLVT}, // Lo [27] HANGUL SYLLABLE PEG..HANGUL SYLLABLE PEH
+ {0xD3B4, 0xD3B4, prLV}, // Lo HANGUL SYLLABLE PYEO
+ {0xD3B5, 0xD3CF, prLVT}, // Lo [27] HANGUL SYLLABLE PYEOG..HANGUL SYLLABLE PYEOH
+ {0xD3D0, 0xD3D0, prLV}, // Lo HANGUL SYLLABLE PYE
+ {0xD3D1, 0xD3EB, prLVT}, // Lo [27] HANGUL SYLLABLE PYEG..HANGUL SYLLABLE PYEH
+ {0xD3EC, 0xD3EC, prLV}, // Lo HANGUL SYLLABLE PO
+ {0xD3ED, 0xD407, prLVT}, // Lo [27] HANGUL SYLLABLE POG..HANGUL SYLLABLE POH
+ {0xD408, 0xD408, prLV}, // Lo HANGUL SYLLABLE PWA
+ {0xD409, 0xD423, prLVT}, // Lo [27] HANGUL SYLLABLE PWAG..HANGUL SYLLABLE PWAH
+ {0xD424, 0xD424, prLV}, // Lo HANGUL SYLLABLE PWAE
+ {0xD425, 0xD43F, prLVT}, // Lo [27] HANGUL SYLLABLE PWAEG..HANGUL SYLLABLE PWAEH
+ {0xD440, 0xD440, prLV}, // Lo HANGUL SYLLABLE POE
+ {0xD441, 0xD45B, prLVT}, // Lo [27] HANGUL SYLLABLE POEG..HANGUL SYLLABLE POEH
+ {0xD45C, 0xD45C, prLV}, // Lo HANGUL SYLLABLE PYO
+ {0xD45D, 0xD477, prLVT}, // Lo [27] HANGUL SYLLABLE PYOG..HANGUL SYLLABLE PYOH
+ {0xD478, 0xD478, prLV}, // Lo HANGUL SYLLABLE PU
+ {0xD479, 0xD493, prLVT}, // Lo [27] HANGUL SYLLABLE PUG..HANGUL SYLLABLE PUH
+ {0xD494, 0xD494, prLV}, // Lo HANGUL SYLLABLE PWEO
+ {0xD495, 0xD4AF, prLVT}, // Lo [27] HANGUL SYLLABLE PWEOG..HANGUL SYLLABLE PWEOH
+ {0xD4B0, 0xD4B0, prLV}, // Lo HANGUL SYLLABLE PWE
+ {0xD4B1, 0xD4CB, prLVT}, // Lo [27] HANGUL SYLLABLE PWEG..HANGUL SYLLABLE PWEH
+ {0xD4CC, 0xD4CC, prLV}, // Lo HANGUL SYLLABLE PWI
+ {0xD4CD, 0xD4E7, prLVT}, // Lo [27] HANGUL SYLLABLE PWIG..HANGUL SYLLABLE PWIH
+ {0xD4E8, 0xD4E8, prLV}, // Lo HANGUL SYLLABLE PYU
+ {0xD4E9, 0xD503, prLVT}, // Lo [27] HANGUL SYLLABLE PYUG..HANGUL SYLLABLE PYUH
+ {0xD504, 0xD504, prLV}, // Lo HANGUL SYLLABLE PEU
+ {0xD505, 0xD51F, prLVT}, // Lo [27] HANGUL SYLLABLE PEUG..HANGUL SYLLABLE PEUH
+ {0xD520, 0xD520, prLV}, // Lo HANGUL SYLLABLE PYI
+ {0xD521, 0xD53B, prLVT}, // Lo [27] HANGUL SYLLABLE PYIG..HANGUL SYLLABLE PYIH
+ {0xD53C, 0xD53C, prLV}, // Lo HANGUL SYLLABLE PI
+ {0xD53D, 0xD557, prLVT}, // Lo [27] HANGUL SYLLABLE PIG..HANGUL SYLLABLE PIH
+ {0xD558, 0xD558, prLV}, // Lo HANGUL SYLLABLE HA
+ {0xD559, 0xD573, prLVT}, // Lo [27] HANGUL SYLLABLE HAG..HANGUL SYLLABLE HAH
+ {0xD574, 0xD574, prLV}, // Lo HANGUL SYLLABLE HAE
+ {0xD575, 0xD58F, prLVT}, // Lo [27] HANGUL SYLLABLE HAEG..HANGUL SYLLABLE HAEH
+ {0xD590, 0xD590, prLV}, // Lo HANGUL SYLLABLE HYA
+ {0xD591, 0xD5AB, prLVT}, // Lo [27] HANGUL SYLLABLE HYAG..HANGUL SYLLABLE HYAH
+ {0xD5AC, 0xD5AC, prLV}, // Lo HANGUL SYLLABLE HYAE
+ {0xD5AD, 0xD5C7, prLVT}, // Lo [27] HANGUL SYLLABLE HYAEG..HANGUL SYLLABLE HYAEH
+ {0xD5C8, 0xD5C8, prLV}, // Lo HANGUL SYLLABLE HEO
+ {0xD5C9, 0xD5E3, prLVT}, // Lo [27] HANGUL SYLLABLE HEOG..HANGUL SYLLABLE HEOH
+ {0xD5E4, 0xD5E4, prLV}, // Lo HANGUL SYLLABLE HE
+ {0xD5E5, 0xD5FF, prLVT}, // Lo [27] HANGUL SYLLABLE HEG..HANGUL SYLLABLE HEH
+ {0xD600, 0xD600, prLV}, // Lo HANGUL SYLLABLE HYEO
+ {0xD601, 0xD61B, prLVT}, // Lo [27] HANGUL SYLLABLE HYEOG..HANGUL SYLLABLE HYEOH
+ {0xD61C, 0xD61C, prLV}, // Lo HANGUL SYLLABLE HYE
+ {0xD61D, 0xD637, prLVT}, // Lo [27] HANGUL SYLLABLE HYEG..HANGUL SYLLABLE HYEH
+ {0xD638, 0xD638, prLV}, // Lo HANGUL SYLLABLE HO
+ {0xD639, 0xD653, prLVT}, // Lo [27] HANGUL SYLLABLE HOG..HANGUL SYLLABLE HOH
+ {0xD654, 0xD654, prLV}, // Lo HANGUL SYLLABLE HWA
+ {0xD655, 0xD66F, prLVT}, // Lo [27] HANGUL SYLLABLE HWAG..HANGUL SYLLABLE HWAH
+ {0xD670, 0xD670, prLV}, // Lo HANGUL SYLLABLE HWAE
+ {0xD671, 0xD68B, prLVT}, // Lo [27] HANGUL SYLLABLE HWAEG..HANGUL SYLLABLE HWAEH
+ {0xD68C, 0xD68C, prLV}, // Lo HANGUL SYLLABLE HOE
+ {0xD68D, 0xD6A7, prLVT}, // Lo [27] HANGUL SYLLABLE HOEG..HANGUL SYLLABLE HOEH
+ {0xD6A8, 0xD6A8, prLV}, // Lo HANGUL SYLLABLE HYO
+ {0xD6A9, 0xD6C3, prLVT}, // Lo [27] HANGUL SYLLABLE HYOG..HANGUL SYLLABLE HYOH
+ {0xD6C4, 0xD6C4, prLV}, // Lo HANGUL SYLLABLE HU
+ {0xD6C5, 0xD6DF, prLVT}, // Lo [27] HANGUL SYLLABLE HUG..HANGUL SYLLABLE HUH
+ {0xD6E0, 0xD6E0, prLV}, // Lo HANGUL SYLLABLE HWEO
+ {0xD6E1, 0xD6FB, prLVT}, // Lo [27] HANGUL SYLLABLE HWEOG..HANGUL SYLLABLE HWEOH
+ {0xD6FC, 0xD6FC, prLV}, // Lo HANGUL SYLLABLE HWE
+ {0xD6FD, 0xD717, prLVT}, // Lo [27] HANGUL SYLLABLE HWEG..HANGUL SYLLABLE HWEH
+ {0xD718, 0xD718, prLV}, // Lo HANGUL SYLLABLE HWI
+ {0xD719, 0xD733, prLVT}, // Lo [27] HANGUL SYLLABLE HWIG..HANGUL SYLLABLE HWIH
+ {0xD734, 0xD734, prLV}, // Lo HANGUL SYLLABLE HYU
+ {0xD735, 0xD74F, prLVT}, // Lo [27] HANGUL SYLLABLE HYUG..HANGUL SYLLABLE HYUH
+ {0xD750, 0xD750, prLV}, // Lo HANGUL SYLLABLE HEU
+ {0xD751, 0xD76B, prLVT}, // Lo [27] HANGUL SYLLABLE HEUG..HANGUL SYLLABLE HEUH
+ {0xD76C, 0xD76C, prLV}, // Lo HANGUL SYLLABLE HYI
+ {0xD76D, 0xD787, prLVT}, // Lo [27] HANGUL SYLLABLE HYIG..HANGUL SYLLABLE HYIH
+ {0xD788, 0xD788, prLV}, // Lo HANGUL SYLLABLE HI
+ {0xD789, 0xD7A3, prLVT}, // Lo [27] HANGUL SYLLABLE HIG..HANGUL SYLLABLE HIH
+ {0xD7B0, 0xD7C6, prV}, // Lo [23] HANGUL JUNGSEONG O-YEO..HANGUL JUNGSEONG ARAEA-E
+ {0xD7CB, 0xD7FB, prT}, // Lo [49] HANGUL JONGSEONG NIEUN-RIEUL..HANGUL JONGSEONG PHIEUPH-THIEUTH
+ {0xFB1E, 0xFB1E, prExtend}, // Mn HEBREW POINT JUDEO-SPANISH VARIKA
+ {0xFE00, 0xFE0F, prExtend}, // Mn [16] VARIATION SELECTOR-1..VARIATION SELECTOR-16
+ {0xFE20, 0xFE2F, prExtend}, // Mn [16] COMBINING LIGATURE LEFT HALF..COMBINING CYRILLIC TITLO RIGHT HALF
+ {0xFEFF, 0xFEFF, prControl}, // Cf ZERO WIDTH NO-BREAK SPACE
+ {0xFF9E, 0xFF9F, prExtend}, // Lm [2] HALFWIDTH KATAKANA VOICED SOUND MARK..HALFWIDTH KATAKANA SEMI-VOICED SOUND MARK
+ {0xFFF0, 0xFFF8, prControl}, // Cn [9] ..
+ {0xFFF9, 0xFFFB, prControl}, // Cf [3] INTERLINEAR ANNOTATION ANCHOR..INTERLINEAR ANNOTATION TERMINATOR
+ {0x101FD, 0x101FD, prExtend}, // Mn PHAISTOS DISC SIGN COMBINING OBLIQUE STROKE
+ {0x102E0, 0x102E0, prExtend}, // Mn COPTIC EPACT THOUSANDS MARK
+ {0x10376, 0x1037A, prExtend}, // Mn [5] COMBINING OLD PERMIC LETTER AN..COMBINING OLD PERMIC LETTER SII
+ {0x10A01, 0x10A03, prExtend}, // Mn [3] KHAROSHTHI VOWEL SIGN I..KHAROSHTHI VOWEL SIGN VOCALIC R
+ {0x10A05, 0x10A06, prExtend}, // Mn [2] KHAROSHTHI VOWEL SIGN E..KHAROSHTHI VOWEL SIGN O
+ {0x10A0C, 0x10A0F, prExtend}, // Mn [4] KHAROSHTHI VOWEL LENGTH MARK..KHAROSHTHI SIGN VISARGA
+ {0x10A38, 0x10A3A, prExtend}, // Mn [3] KHAROSHTHI SIGN BAR ABOVE..KHAROSHTHI SIGN DOT BELOW
+ {0x10A3F, 0x10A3F, prExtend}, // Mn KHAROSHTHI VIRAMA
+ {0x10AE5, 0x10AE6, prExtend}, // Mn [2] MANICHAEAN ABBREVIATION MARK ABOVE..MANICHAEAN ABBREVIATION MARK BELOW
+ {0x10D24, 0x10D27, prExtend}, // Mn [4] HANIFI ROHINGYA SIGN HARBAHAY..HANIFI ROHINGYA SIGN TASSI
+ {0x10EAB, 0x10EAC, prExtend}, // Mn [2] YEZIDI COMBINING HAMZA MARK..YEZIDI COMBINING MADDA MARK
+ {0x10EFD, 0x10EFF, prExtend}, // Mn [3] ARABIC SMALL LOW WORD SAKTA..ARABIC SMALL LOW WORD MADDA
+ {0x10F46, 0x10F50, prExtend}, // Mn [11] SOGDIAN COMBINING DOT BELOW..SOGDIAN COMBINING STROKE BELOW
+ {0x10F82, 0x10F85, prExtend}, // Mn [4] OLD UYGHUR COMBINING DOT ABOVE..OLD UYGHUR COMBINING TWO DOTS BELOW
+ {0x11000, 0x11000, prSpacingMark}, // Mc BRAHMI SIGN CANDRABINDU
+ {0x11001, 0x11001, prExtend}, // Mn BRAHMI SIGN ANUSVARA
+ {0x11002, 0x11002, prSpacingMark}, // Mc BRAHMI SIGN VISARGA
+ {0x11038, 0x11046, prExtend}, // Mn [15] BRAHMI VOWEL SIGN AA..BRAHMI VIRAMA
+ {0x11070, 0x11070, prExtend}, // Mn BRAHMI SIGN OLD TAMIL VIRAMA
+ {0x11073, 0x11074, prExtend}, // Mn [2] BRAHMI VOWEL SIGN OLD TAMIL SHORT E..BRAHMI VOWEL SIGN OLD TAMIL SHORT O
+ {0x1107F, 0x11081, prExtend}, // Mn [3] BRAHMI NUMBER JOINER..KAITHI SIGN ANUSVARA
+ {0x11082, 0x11082, prSpacingMark}, // Mc KAITHI SIGN VISARGA
+ {0x110B0, 0x110B2, prSpacingMark}, // Mc [3] KAITHI VOWEL SIGN AA..KAITHI VOWEL SIGN II
+ {0x110B3, 0x110B6, prExtend}, // Mn [4] KAITHI VOWEL SIGN U..KAITHI VOWEL SIGN AI
+ {0x110B7, 0x110B8, prSpacingMark}, // Mc [2] KAITHI VOWEL SIGN O..KAITHI VOWEL SIGN AU
+ {0x110B9, 0x110BA, prExtend}, // Mn [2] KAITHI SIGN VIRAMA..KAITHI SIGN NUKTA
+ {0x110BD, 0x110BD, prPrepend}, // Cf KAITHI NUMBER SIGN
+ {0x110C2, 0x110C2, prExtend}, // Mn KAITHI VOWEL SIGN VOCALIC R
+ {0x110CD, 0x110CD, prPrepend}, // Cf KAITHI NUMBER SIGN ABOVE
+ {0x11100, 0x11102, prExtend}, // Mn [3] CHAKMA SIGN CANDRABINDU..CHAKMA SIGN VISARGA
+ {0x11127, 0x1112B, prExtend}, // Mn [5] CHAKMA VOWEL SIGN A..CHAKMA VOWEL SIGN UU
+ {0x1112C, 0x1112C, prSpacingMark}, // Mc CHAKMA VOWEL SIGN E
+ {0x1112D, 0x11134, prExtend}, // Mn [8] CHAKMA VOWEL SIGN AI..CHAKMA MAAYYAA
+ {0x11145, 0x11146, prSpacingMark}, // Mc [2] CHAKMA VOWEL SIGN AA..CHAKMA VOWEL SIGN EI
+ {0x11173, 0x11173, prExtend}, // Mn MAHAJANI SIGN NUKTA
+ {0x11180, 0x11181, prExtend}, // Mn [2] SHARADA SIGN CANDRABINDU..SHARADA SIGN ANUSVARA
+ {0x11182, 0x11182, prSpacingMark}, // Mc SHARADA SIGN VISARGA
+ {0x111B3, 0x111B5, prSpacingMark}, // Mc [3] SHARADA VOWEL SIGN AA..SHARADA VOWEL SIGN II
+ {0x111B6, 0x111BE, prExtend}, // Mn [9] SHARADA VOWEL SIGN U..SHARADA VOWEL SIGN O
+ {0x111BF, 0x111C0, prSpacingMark}, // Mc [2] SHARADA VOWEL SIGN AU..SHARADA SIGN VIRAMA
+ {0x111C2, 0x111C3, prPrepend}, // Lo [2] SHARADA SIGN JIHVAMULIYA..SHARADA SIGN UPADHMANIYA
+ {0x111C9, 0x111CC, prExtend}, // Mn [4] SHARADA SANDHI MARK..SHARADA EXTRA SHORT VOWEL MARK
+ {0x111CE, 0x111CE, prSpacingMark}, // Mc SHARADA VOWEL SIGN PRISHTHAMATRA E
+ {0x111CF, 0x111CF, prExtend}, // Mn SHARADA SIGN INVERTED CANDRABINDU
+ {0x1122C, 0x1122E, prSpacingMark}, // Mc [3] KHOJKI VOWEL SIGN AA..KHOJKI VOWEL SIGN II
+ {0x1122F, 0x11231, prExtend}, // Mn [3] KHOJKI VOWEL SIGN U..KHOJKI VOWEL SIGN AI
+ {0x11232, 0x11233, prSpacingMark}, // Mc [2] KHOJKI VOWEL SIGN O..KHOJKI VOWEL SIGN AU
+ {0x11234, 0x11234, prExtend}, // Mn KHOJKI SIGN ANUSVARA
+ {0x11235, 0x11235, prSpacingMark}, // Mc KHOJKI SIGN VIRAMA
+ {0x11236, 0x11237, prExtend}, // Mn [2] KHOJKI SIGN NUKTA..KHOJKI SIGN SHADDA
+ {0x1123E, 0x1123E, prExtend}, // Mn KHOJKI SIGN SUKUN
+ {0x11241, 0x11241, prExtend}, // Mn KHOJKI VOWEL SIGN VOCALIC R
+ {0x112DF, 0x112DF, prExtend}, // Mn KHUDAWADI SIGN ANUSVARA
+ {0x112E0, 0x112E2, prSpacingMark}, // Mc [3] KHUDAWADI VOWEL SIGN AA..KHUDAWADI VOWEL SIGN II
+ {0x112E3, 0x112EA, prExtend}, // Mn [8] KHUDAWADI VOWEL SIGN U..KHUDAWADI SIGN VIRAMA
+ {0x11300, 0x11301, prExtend}, // Mn [2] GRANTHA SIGN COMBINING ANUSVARA ABOVE..GRANTHA SIGN CANDRABINDU
+ {0x11302, 0x11303, prSpacingMark}, // Mc [2] GRANTHA SIGN ANUSVARA..GRANTHA SIGN VISARGA
+ {0x1133B, 0x1133C, prExtend}, // Mn [2] COMBINING BINDU BELOW..GRANTHA SIGN NUKTA
+ {0x1133E, 0x1133E, prExtend}, // Mc GRANTHA VOWEL SIGN AA
+ {0x1133F, 0x1133F, prSpacingMark}, // Mc GRANTHA VOWEL SIGN I
+ {0x11340, 0x11340, prExtend}, // Mn GRANTHA VOWEL SIGN II
+ {0x11341, 0x11344, prSpacingMark}, // Mc [4] GRANTHA VOWEL SIGN U..GRANTHA VOWEL SIGN VOCALIC RR
+ {0x11347, 0x11348, prSpacingMark}, // Mc [2] GRANTHA VOWEL SIGN EE..GRANTHA VOWEL SIGN AI
+ {0x1134B, 0x1134D, prSpacingMark}, // Mc [3] GRANTHA VOWEL SIGN OO..GRANTHA SIGN VIRAMA
+ {0x11357, 0x11357, prExtend}, // Mc GRANTHA AU LENGTH MARK
+ {0x11362, 0x11363, prSpacingMark}, // Mc [2] GRANTHA VOWEL SIGN VOCALIC L..GRANTHA VOWEL SIGN VOCALIC LL
+ {0x11366, 0x1136C, prExtend}, // Mn [7] COMBINING GRANTHA DIGIT ZERO..COMBINING GRANTHA DIGIT SIX
+ {0x11370, 0x11374, prExtend}, // Mn [5] COMBINING GRANTHA LETTER A..COMBINING GRANTHA LETTER PA
+ {0x11435, 0x11437, prSpacingMark}, // Mc [3] NEWA VOWEL SIGN AA..NEWA VOWEL SIGN II
+ {0x11438, 0x1143F, prExtend}, // Mn [8] NEWA VOWEL SIGN U..NEWA VOWEL SIGN AI
+ {0x11440, 0x11441, prSpacingMark}, // Mc [2] NEWA VOWEL SIGN O..NEWA VOWEL SIGN AU
+ {0x11442, 0x11444, prExtend}, // Mn [3] NEWA SIGN VIRAMA..NEWA SIGN ANUSVARA
+ {0x11445, 0x11445, prSpacingMark}, // Mc NEWA SIGN VISARGA
+ {0x11446, 0x11446, prExtend}, // Mn NEWA SIGN NUKTA
+ {0x1145E, 0x1145E, prExtend}, // Mn NEWA SANDHI MARK
+ {0x114B0, 0x114B0, prExtend}, // Mc TIRHUTA VOWEL SIGN AA
+ {0x114B1, 0x114B2, prSpacingMark}, // Mc [2] TIRHUTA VOWEL SIGN I..TIRHUTA VOWEL SIGN II
+ {0x114B3, 0x114B8, prExtend}, // Mn [6] TIRHUTA VOWEL SIGN U..TIRHUTA VOWEL SIGN VOCALIC LL
+ {0x114B9, 0x114B9, prSpacingMark}, // Mc TIRHUTA VOWEL SIGN E
+ {0x114BA, 0x114BA, prExtend}, // Mn TIRHUTA VOWEL SIGN SHORT E
+ {0x114BB, 0x114BC, prSpacingMark}, // Mc [2] TIRHUTA VOWEL SIGN AI..TIRHUTA VOWEL SIGN O
+ {0x114BD, 0x114BD, prExtend}, // Mc TIRHUTA VOWEL SIGN SHORT O
+ {0x114BE, 0x114BE, prSpacingMark}, // Mc TIRHUTA VOWEL SIGN AU
+ {0x114BF, 0x114C0, prExtend}, // Mn [2] TIRHUTA SIGN CANDRABINDU..TIRHUTA SIGN ANUSVARA
+ {0x114C1, 0x114C1, prSpacingMark}, // Mc TIRHUTA SIGN VISARGA
+ {0x114C2, 0x114C3, prExtend}, // Mn [2] TIRHUTA SIGN VIRAMA..TIRHUTA SIGN NUKTA
+ {0x115AF, 0x115AF, prExtend}, // Mc SIDDHAM VOWEL SIGN AA
+ {0x115B0, 0x115B1, prSpacingMark}, // Mc [2] SIDDHAM VOWEL SIGN I..SIDDHAM VOWEL SIGN II
+ {0x115B2, 0x115B5, prExtend}, // Mn [4] SIDDHAM VOWEL SIGN U..SIDDHAM VOWEL SIGN VOCALIC RR
+ {0x115B8, 0x115BB, prSpacingMark}, // Mc [4] SIDDHAM VOWEL SIGN E..SIDDHAM VOWEL SIGN AU
+ {0x115BC, 0x115BD, prExtend}, // Mn [2] SIDDHAM SIGN CANDRABINDU..SIDDHAM SIGN ANUSVARA
+ {0x115BE, 0x115BE, prSpacingMark}, // Mc SIDDHAM SIGN VISARGA
+ {0x115BF, 0x115C0, prExtend}, // Mn [2] SIDDHAM SIGN VIRAMA..SIDDHAM SIGN NUKTA
+ {0x115DC, 0x115DD, prExtend}, // Mn [2] SIDDHAM VOWEL SIGN ALTERNATE U..SIDDHAM VOWEL SIGN ALTERNATE UU
+ {0x11630, 0x11632, prSpacingMark}, // Mc [3] MODI VOWEL SIGN AA..MODI VOWEL SIGN II
+ {0x11633, 0x1163A, prExtend}, // Mn [8] MODI VOWEL SIGN U..MODI VOWEL SIGN AI
+ {0x1163B, 0x1163C, prSpacingMark}, // Mc [2] MODI VOWEL SIGN O..MODI VOWEL SIGN AU
+ {0x1163D, 0x1163D, prExtend}, // Mn MODI SIGN ANUSVARA
+ {0x1163E, 0x1163E, prSpacingMark}, // Mc MODI SIGN VISARGA
+ {0x1163F, 0x11640, prExtend}, // Mn [2] MODI SIGN VIRAMA..MODI SIGN ARDHACANDRA
+ {0x116AB, 0x116AB, prExtend}, // Mn TAKRI SIGN ANUSVARA
+ {0x116AC, 0x116AC, prSpacingMark}, // Mc TAKRI SIGN VISARGA
+ {0x116AD, 0x116AD, prExtend}, // Mn TAKRI VOWEL SIGN AA
+ {0x116AE, 0x116AF, prSpacingMark}, // Mc [2] TAKRI VOWEL SIGN I..TAKRI VOWEL SIGN II
+ {0x116B0, 0x116B5, prExtend}, // Mn [6] TAKRI VOWEL SIGN U..TAKRI VOWEL SIGN AU
+ {0x116B6, 0x116B6, prSpacingMark}, // Mc TAKRI SIGN VIRAMA
+ {0x116B7, 0x116B7, prExtend}, // Mn TAKRI SIGN NUKTA
+ {0x1171D, 0x1171F, prExtend}, // Mn [3] AHOM CONSONANT SIGN MEDIAL LA..AHOM CONSONANT SIGN MEDIAL LIGATING RA
+ {0x11722, 0x11725, prExtend}, // Mn [4] AHOM VOWEL SIGN I..AHOM VOWEL SIGN UU
+ {0x11726, 0x11726, prSpacingMark}, // Mc AHOM VOWEL SIGN E
+ {0x11727, 0x1172B, prExtend}, // Mn [5] AHOM VOWEL SIGN AW..AHOM SIGN KILLER
+ {0x1182C, 0x1182E, prSpacingMark}, // Mc [3] DOGRA VOWEL SIGN AA..DOGRA VOWEL SIGN II
+ {0x1182F, 0x11837, prExtend}, // Mn [9] DOGRA VOWEL SIGN U..DOGRA SIGN ANUSVARA
+ {0x11838, 0x11838, prSpacingMark}, // Mc DOGRA SIGN VISARGA
+ {0x11839, 0x1183A, prExtend}, // Mn [2] DOGRA SIGN VIRAMA..DOGRA SIGN NUKTA
+ {0x11930, 0x11930, prExtend}, // Mc DIVES AKURU VOWEL SIGN AA
+ {0x11931, 0x11935, prSpacingMark}, // Mc [5] DIVES AKURU VOWEL SIGN I..DIVES AKURU VOWEL SIGN E
+ {0x11937, 0x11938, prSpacingMark}, // Mc [2] DIVES AKURU VOWEL SIGN AI..DIVES AKURU VOWEL SIGN O
+ {0x1193B, 0x1193C, prExtend}, // Mn [2] DIVES AKURU SIGN ANUSVARA..DIVES AKURU SIGN CANDRABINDU
+ {0x1193D, 0x1193D, prSpacingMark}, // Mc DIVES AKURU SIGN HALANTA
+ {0x1193E, 0x1193E, prExtend}, // Mn DIVES AKURU VIRAMA
+ {0x1193F, 0x1193F, prPrepend}, // Lo DIVES AKURU PREFIXED NASAL SIGN
+ {0x11940, 0x11940, prSpacingMark}, // Mc DIVES AKURU MEDIAL YA
+ {0x11941, 0x11941, prPrepend}, // Lo DIVES AKURU INITIAL RA
+ {0x11942, 0x11942, prSpacingMark}, // Mc DIVES AKURU MEDIAL RA
+ {0x11943, 0x11943, prExtend}, // Mn DIVES AKURU SIGN NUKTA
+ {0x119D1, 0x119D3, prSpacingMark}, // Mc [3] NANDINAGARI VOWEL SIGN AA..NANDINAGARI VOWEL SIGN II
+ {0x119D4, 0x119D7, prExtend}, // Mn [4] NANDINAGARI VOWEL SIGN U..NANDINAGARI VOWEL SIGN VOCALIC RR
+ {0x119DA, 0x119DB, prExtend}, // Mn [2] NANDINAGARI VOWEL SIGN E..NANDINAGARI VOWEL SIGN AI
+ {0x119DC, 0x119DF, prSpacingMark}, // Mc [4] NANDINAGARI VOWEL SIGN O..NANDINAGARI SIGN VISARGA
+ {0x119E0, 0x119E0, prExtend}, // Mn NANDINAGARI SIGN VIRAMA
+ {0x119E4, 0x119E4, prSpacingMark}, // Mc NANDINAGARI VOWEL SIGN PRISHTHAMATRA E
+ {0x11A01, 0x11A0A, prExtend}, // Mn [10] ZANABAZAR SQUARE VOWEL SIGN I..ZANABAZAR SQUARE VOWEL LENGTH MARK
+ {0x11A33, 0x11A38, prExtend}, // Mn [6] ZANABAZAR SQUARE FINAL CONSONANT MARK..ZANABAZAR SQUARE SIGN ANUSVARA
+ {0x11A39, 0x11A39, prSpacingMark}, // Mc ZANABAZAR SQUARE SIGN VISARGA
+ {0x11A3A, 0x11A3A, prPrepend}, // Lo ZANABAZAR SQUARE CLUSTER-INITIAL LETTER RA
+ {0x11A3B, 0x11A3E, prExtend}, // Mn [4] ZANABAZAR SQUARE CLUSTER-FINAL LETTER YA..ZANABAZAR SQUARE CLUSTER-FINAL LETTER VA
+ {0x11A47, 0x11A47, prExtend}, // Mn ZANABAZAR SQUARE SUBJOINER
+ {0x11A51, 0x11A56, prExtend}, // Mn [6] SOYOMBO VOWEL SIGN I..SOYOMBO VOWEL SIGN OE
+ {0x11A57, 0x11A58, prSpacingMark}, // Mc [2] SOYOMBO VOWEL SIGN AI..SOYOMBO VOWEL SIGN AU
+ {0x11A59, 0x11A5B, prExtend}, // Mn [3] SOYOMBO VOWEL SIGN VOCALIC R..SOYOMBO VOWEL LENGTH MARK
+ {0x11A84, 0x11A89, prPrepend}, // Lo [6] SOYOMBO SIGN JIHVAMULIYA..SOYOMBO CLUSTER-INITIAL LETTER SA
+ {0x11A8A, 0x11A96, prExtend}, // Mn [13] SOYOMBO FINAL CONSONANT SIGN G..SOYOMBO SIGN ANUSVARA
+ {0x11A97, 0x11A97, prSpacingMark}, // Mc SOYOMBO SIGN VISARGA
+ {0x11A98, 0x11A99, prExtend}, // Mn [2] SOYOMBO GEMINATION MARK..SOYOMBO SUBJOINER
+ {0x11C2F, 0x11C2F, prSpacingMark}, // Mc BHAIKSUKI VOWEL SIGN AA
+ {0x11C30, 0x11C36, prExtend}, // Mn [7] BHAIKSUKI VOWEL SIGN I..BHAIKSUKI VOWEL SIGN VOCALIC L
+ {0x11C38, 0x11C3D, prExtend}, // Mn [6] BHAIKSUKI VOWEL SIGN E..BHAIKSUKI SIGN ANUSVARA
+ {0x11C3E, 0x11C3E, prSpacingMark}, // Mc BHAIKSUKI SIGN VISARGA
+ {0x11C3F, 0x11C3F, prExtend}, // Mn BHAIKSUKI SIGN VIRAMA
+ {0x11C92, 0x11CA7, prExtend}, // Mn [22] MARCHEN SUBJOINED LETTER KA..MARCHEN SUBJOINED LETTER ZA
+ {0x11CA9, 0x11CA9, prSpacingMark}, // Mc MARCHEN SUBJOINED LETTER YA
+ {0x11CAA, 0x11CB0, prExtend}, // Mn [7] MARCHEN SUBJOINED LETTER RA..MARCHEN VOWEL SIGN AA
+ {0x11CB1, 0x11CB1, prSpacingMark}, // Mc MARCHEN VOWEL SIGN I
+ {0x11CB2, 0x11CB3, prExtend}, // Mn [2] MARCHEN VOWEL SIGN U..MARCHEN VOWEL SIGN E
+ {0x11CB4, 0x11CB4, prSpacingMark}, // Mc MARCHEN VOWEL SIGN O
+ {0x11CB5, 0x11CB6, prExtend}, // Mn [2] MARCHEN SIGN ANUSVARA..MARCHEN SIGN CANDRABINDU
+ {0x11D31, 0x11D36, prExtend}, // Mn [6] MASARAM GONDI VOWEL SIGN AA..MASARAM GONDI VOWEL SIGN VOCALIC R
+ {0x11D3A, 0x11D3A, prExtend}, // Mn MASARAM GONDI VOWEL SIGN E
+ {0x11D3C, 0x11D3D, prExtend}, // Mn [2] MASARAM GONDI VOWEL SIGN AI..MASARAM GONDI VOWEL SIGN O
+ {0x11D3F, 0x11D45, prExtend}, // Mn [7] MASARAM GONDI VOWEL SIGN AU..MASARAM GONDI VIRAMA
+ {0x11D46, 0x11D46, prPrepend}, // Lo MASARAM GONDI REPHA
+ {0x11D47, 0x11D47, prExtend}, // Mn MASARAM GONDI RA-KARA
+ {0x11D8A, 0x11D8E, prSpacingMark}, // Mc [5] GUNJALA GONDI VOWEL SIGN AA..GUNJALA GONDI VOWEL SIGN UU
+ {0x11D90, 0x11D91, prExtend}, // Mn [2] GUNJALA GONDI VOWEL SIGN EE..GUNJALA GONDI VOWEL SIGN AI
+ {0x11D93, 0x11D94, prSpacingMark}, // Mc [2] GUNJALA GONDI VOWEL SIGN OO..GUNJALA GONDI VOWEL SIGN AU
+ {0x11D95, 0x11D95, prExtend}, // Mn GUNJALA GONDI SIGN ANUSVARA
+ {0x11D96, 0x11D96, prSpacingMark}, // Mc GUNJALA GONDI SIGN VISARGA
+ {0x11D97, 0x11D97, prExtend}, // Mn GUNJALA GONDI VIRAMA
+ {0x11EF3, 0x11EF4, prExtend}, // Mn [2] MAKASAR VOWEL SIGN I..MAKASAR VOWEL SIGN U
+ {0x11EF5, 0x11EF6, prSpacingMark}, // Mc [2] MAKASAR VOWEL SIGN E..MAKASAR VOWEL SIGN O
+ {0x11F00, 0x11F01, prExtend}, // Mn [2] KAWI SIGN CANDRABINDU..KAWI SIGN ANUSVARA
+ {0x11F02, 0x11F02, prPrepend}, // Lo KAWI SIGN REPHA
+ {0x11F03, 0x11F03, prSpacingMark}, // Mc KAWI SIGN VISARGA
+ {0x11F34, 0x11F35, prSpacingMark}, // Mc [2] KAWI VOWEL SIGN AA..KAWI VOWEL SIGN ALTERNATE AA
+ {0x11F36, 0x11F3A, prExtend}, // Mn [5] KAWI VOWEL SIGN I..KAWI VOWEL SIGN VOCALIC R
+ {0x11F3E, 0x11F3F, prSpacingMark}, // Mc [2] KAWI VOWEL SIGN E..KAWI VOWEL SIGN AI
+ {0x11F40, 0x11F40, prExtend}, // Mn KAWI VOWEL SIGN EU
+ {0x11F41, 0x11F41, prSpacingMark}, // Mc KAWI SIGN KILLER
+ {0x11F42, 0x11F42, prExtend}, // Mn KAWI CONJOINER
+ {0x13430, 0x1343F, prControl}, // Cf [16] EGYPTIAN HIEROGLYPH VERTICAL JOINER..EGYPTIAN HIEROGLYPH END WALLED ENCLOSURE
+ {0x13440, 0x13440, prExtend}, // Mn EGYPTIAN HIEROGLYPH MIRROR HORIZONTALLY
+ {0x13447, 0x13455, prExtend}, // Mn [15] EGYPTIAN HIEROGLYPH MODIFIER DAMAGED AT TOP START..EGYPTIAN HIEROGLYPH MODIFIER DAMAGED
+ {0x16AF0, 0x16AF4, prExtend}, // Mn [5] BASSA VAH COMBINING HIGH TONE..BASSA VAH COMBINING HIGH-LOW TONE
+ {0x16B30, 0x16B36, prExtend}, // Mn [7] PAHAWH HMONG MARK CIM TUB..PAHAWH HMONG MARK CIM TAUM
+ {0x16F4F, 0x16F4F, prExtend}, // Mn MIAO SIGN CONSONANT MODIFIER BAR
+ {0x16F51, 0x16F87, prSpacingMark}, // Mc [55] MIAO SIGN ASPIRATION..MIAO VOWEL SIGN UI
+ {0x16F8F, 0x16F92, prExtend}, // Mn [4] MIAO TONE RIGHT..MIAO TONE BELOW
+ {0x16FE4, 0x16FE4, prExtend}, // Mn KHITAN SMALL SCRIPT FILLER
+ {0x16FF0, 0x16FF1, prSpacingMark}, // Mc [2] VIETNAMESE ALTERNATE READING MARK CA..VIETNAMESE ALTERNATE READING MARK NHAY
+ {0x1BC9D, 0x1BC9E, prExtend}, // Mn [2] DUPLOYAN THICK LETTER SELECTOR..DUPLOYAN DOUBLE MARK
+ {0x1BCA0, 0x1BCA3, prControl}, // Cf [4] SHORTHAND FORMAT LETTER OVERLAP..SHORTHAND FORMAT UP STEP
+ {0x1CF00, 0x1CF2D, prExtend}, // Mn [46] ZNAMENNY COMBINING MARK GORAZDO NIZKO S KRYZHEM ON LEFT..ZNAMENNY COMBINING MARK KRYZH ON LEFT
+ {0x1CF30, 0x1CF46, prExtend}, // Mn [23] ZNAMENNY COMBINING TONAL RANGE MARK MRACHNO..ZNAMENNY PRIZNAK MODIFIER ROG
+ {0x1D165, 0x1D165, prExtend}, // Mc MUSICAL SYMBOL COMBINING STEM
+ {0x1D166, 0x1D166, prSpacingMark}, // Mc MUSICAL SYMBOL COMBINING SPRECHGESANG STEM
+ {0x1D167, 0x1D169, prExtend}, // Mn [3] MUSICAL SYMBOL COMBINING TREMOLO-1..MUSICAL SYMBOL COMBINING TREMOLO-3
+ {0x1D16D, 0x1D16D, prSpacingMark}, // Mc MUSICAL SYMBOL COMBINING AUGMENTATION DOT
+ {0x1D16E, 0x1D172, prExtend}, // Mc [5] MUSICAL SYMBOL COMBINING FLAG-1..MUSICAL SYMBOL COMBINING FLAG-5
+ {0x1D173, 0x1D17A, prControl}, // Cf [8] MUSICAL SYMBOL BEGIN BEAM..MUSICAL SYMBOL END PHRASE
+ {0x1D17B, 0x1D182, prExtend}, // Mn [8] MUSICAL SYMBOL COMBINING ACCENT..MUSICAL SYMBOL COMBINING LOURE
+ {0x1D185, 0x1D18B, prExtend}, // Mn [7] MUSICAL SYMBOL COMBINING DOIT..MUSICAL SYMBOL COMBINING TRIPLE TONGUE
+ {0x1D1AA, 0x1D1AD, prExtend}, // Mn [4] MUSICAL SYMBOL COMBINING DOWN BOW..MUSICAL SYMBOL COMBINING SNAP PIZZICATO
+ {0x1D242, 0x1D244, prExtend}, // Mn [3] COMBINING GREEK MUSICAL TRISEME..COMBINING GREEK MUSICAL PENTASEME
+ {0x1DA00, 0x1DA36, prExtend}, // Mn [55] SIGNWRITING HEAD RIM..SIGNWRITING AIR SUCKING IN
+ {0x1DA3B, 0x1DA6C, prExtend}, // Mn [50] SIGNWRITING MOUTH CLOSED NEUTRAL..SIGNWRITING EXCITEMENT
+ {0x1DA75, 0x1DA75, prExtend}, // Mn SIGNWRITING UPPER BODY TILTING FROM HIP JOINTS
+ {0x1DA84, 0x1DA84, prExtend}, // Mn SIGNWRITING LOCATION HEAD NECK
+ {0x1DA9B, 0x1DA9F, prExtend}, // Mn [5] SIGNWRITING FILL MODIFIER-2..SIGNWRITING FILL MODIFIER-6
+ {0x1DAA1, 0x1DAAF, prExtend}, // Mn [15] SIGNWRITING ROTATION MODIFIER-2..SIGNWRITING ROTATION MODIFIER-16
+ {0x1E000, 0x1E006, prExtend}, // Mn [7] COMBINING GLAGOLITIC LETTER AZU..COMBINING GLAGOLITIC LETTER ZHIVETE
+ {0x1E008, 0x1E018, prExtend}, // Mn [17] COMBINING GLAGOLITIC LETTER ZEMLJA..COMBINING GLAGOLITIC LETTER HERU
+ {0x1E01B, 0x1E021, prExtend}, // Mn [7] COMBINING GLAGOLITIC LETTER SHTA..COMBINING GLAGOLITIC LETTER YATI
+ {0x1E023, 0x1E024, prExtend}, // Mn [2] COMBINING GLAGOLITIC LETTER YU..COMBINING GLAGOLITIC LETTER SMALL YUS
+ {0x1E026, 0x1E02A, prExtend}, // Mn [5] COMBINING GLAGOLITIC LETTER YO..COMBINING GLAGOLITIC LETTER FITA
+ {0x1E08F, 0x1E08F, prExtend}, // Mn COMBINING CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
+ {0x1E130, 0x1E136, prExtend}, // Mn [7] NYIAKENG PUACHUE HMONG TONE-B..NYIAKENG PUACHUE HMONG TONE-D
+ {0x1E2AE, 0x1E2AE, prExtend}, // Mn TOTO SIGN RISING TONE
+ {0x1E2EC, 0x1E2EF, prExtend}, // Mn [4] WANCHO TONE TUP..WANCHO TONE KOINI
+ {0x1E4EC, 0x1E4EF, prExtend}, // Mn [4] NAG MUNDARI SIGN MUHOR..NAG MUNDARI SIGN SUTUH
+ {0x1E8D0, 0x1E8D6, prExtend}, // Mn [7] MENDE KIKAKUI COMBINING NUMBER TEENS..MENDE KIKAKUI COMBINING NUMBER MILLIONS
+ {0x1E944, 0x1E94A, prExtend}, // Mn [7] ADLAM ALIF LENGTHENER..ADLAM NUKTA
+ {0x1F000, 0x1F003, prExtendedPictographic}, // E0.0 [4] (🀀..🀃) MAHJONG TILE EAST WIND..MAHJONG TILE NORTH WIND
+ {0x1F004, 0x1F004, prExtendedPictographic}, // E0.6 [1] (🀄) mahjong red dragon
+ {0x1F005, 0x1F0CE, prExtendedPictographic}, // E0.0 [202] (🀅..🃎) MAHJONG TILE GREEN DRAGON..PLAYING CARD KING OF DIAMONDS
+ {0x1F0CF, 0x1F0CF, prExtendedPictographic}, // E0.6 [1] (🃏) joker
+ {0x1F0D0, 0x1F0FF, prExtendedPictographic}, // E0.0 [48] (..) ..
+ {0x1F10D, 0x1F10F, prExtendedPictographic}, // E0.0 [3] (🄍..🄏) CIRCLED ZERO WITH SLASH..CIRCLED DOLLAR SIGN WITH OVERLAID BACKSLASH
+ {0x1F12F, 0x1F12F, prExtendedPictographic}, // E0.0 [1] (🄯) COPYLEFT SYMBOL
+ {0x1F16C, 0x1F16F, prExtendedPictographic}, // E0.0 [4] (🅬..🅯) RAISED MR SIGN..CIRCLED HUMAN FIGURE
+ {0x1F170, 0x1F171, prExtendedPictographic}, // E0.6 [2] (🅰️..🅱️) A button (blood type)..B button (blood type)
+ {0x1F17E, 0x1F17F, prExtendedPictographic}, // E0.6 [2] (🅾️..🅿️) O button (blood type)..P button
+ {0x1F18E, 0x1F18E, prExtendedPictographic}, // E0.6 [1] (🆎) AB button (blood type)
+ {0x1F191, 0x1F19A, prExtendedPictographic}, // E0.6 [10] (🆑..🆚) CL button..VS button
+ {0x1F1AD, 0x1F1E5, prExtendedPictographic}, // E0.0 [57] (🆭..) MASK WORK SYMBOL..
+ {0x1F1E6, 0x1F1FF, prRegionalIndicator}, // So [26] REGIONAL INDICATOR SYMBOL LETTER A..REGIONAL INDICATOR SYMBOL LETTER Z
+ {0x1F201, 0x1F202, prExtendedPictographic}, // E0.6 [2] (🈁..🈂️) Japanese “here” button..Japanese “service charge” button
+ {0x1F203, 0x1F20F, prExtendedPictographic}, // E0.0 [13] (..) ..
+ {0x1F21A, 0x1F21A, prExtendedPictographic}, // E0.6 [1] (🈚) Japanese “free of charge” button
+ {0x1F22F, 0x1F22F, prExtendedPictographic}, // E0.6 [1] (🈯) Japanese “reserved” button
+ {0x1F232, 0x1F23A, prExtendedPictographic}, // E0.6 [9] (🈲..🈺) Japanese “prohibited” button..Japanese “open for business” button
+ {0x1F23C, 0x1F23F, prExtendedPictographic}, // E0.0 [4] (..) ..
+ {0x1F249, 0x1F24F, prExtendedPictographic}, // E0.0 [7] (..) ..
+ {0x1F250, 0x1F251, prExtendedPictographic}, // E0.6 [2] (🉐..🉑) Japanese “bargain” button..Japanese “acceptable” button
+ {0x1F252, 0x1F2FF, prExtendedPictographic}, // E0.0 [174] (..) ..
+ {0x1F300, 0x1F30C, prExtendedPictographic}, // E0.6 [13] (🌀..🌌) cyclone..milky way
+ {0x1F30D, 0x1F30E, prExtendedPictographic}, // E0.7 [2] (🌍..🌎) globe showing Europe-Africa..globe showing Americas
+ {0x1F30F, 0x1F30F, prExtendedPictographic}, // E0.6 [1] (🌏) globe showing Asia-Australia
+ {0x1F310, 0x1F310, prExtendedPictographic}, // E1.0 [1] (🌐) globe with meridians
+ {0x1F311, 0x1F311, prExtendedPictographic}, // E0.6 [1] (🌑) new moon
+ {0x1F312, 0x1F312, prExtendedPictographic}, // E1.0 [1] (🌒) waxing crescent moon
+ {0x1F313, 0x1F315, prExtendedPictographic}, // E0.6 [3] (🌓..🌕) first quarter moon..full moon
+ {0x1F316, 0x1F318, prExtendedPictographic}, // E1.0 [3] (🌖..🌘) waning gibbous moon..waning crescent moon
+ {0x1F319, 0x1F319, prExtendedPictographic}, // E0.6 [1] (🌙) crescent moon
+ {0x1F31A, 0x1F31A, prExtendedPictographic}, // E1.0 [1] (🌚) new moon face
+ {0x1F31B, 0x1F31B, prExtendedPictographic}, // E0.6 [1] (🌛) first quarter moon face
+ {0x1F31C, 0x1F31C, prExtendedPictographic}, // E0.7 [1] (🌜) last quarter moon face
+ {0x1F31D, 0x1F31E, prExtendedPictographic}, // E1.0 [2] (🌝..🌞) full moon face..sun with face
+ {0x1F31F, 0x1F320, prExtendedPictographic}, // E0.6 [2] (🌟..🌠) glowing star..shooting star
+ {0x1F321, 0x1F321, prExtendedPictographic}, // E0.7 [1] (🌡️) thermometer
+ {0x1F322, 0x1F323, prExtendedPictographic}, // E0.0 [2] (🌢..🌣) BLACK DROPLET..WHITE SUN
+ {0x1F324, 0x1F32C, prExtendedPictographic}, // E0.7 [9] (🌤️..🌬️) sun behind small cloud..wind face
+ {0x1F32D, 0x1F32F, prExtendedPictographic}, // E1.0 [3] (🌭..🌯) hot dog..burrito
+ {0x1F330, 0x1F331, prExtendedPictographic}, // E0.6 [2] (🌰..🌱) chestnut..seedling
+ {0x1F332, 0x1F333, prExtendedPictographic}, // E1.0 [2] (🌲..🌳) evergreen tree..deciduous tree
+ {0x1F334, 0x1F335, prExtendedPictographic}, // E0.6 [2] (🌴..🌵) palm tree..cactus
+ {0x1F336, 0x1F336, prExtendedPictographic}, // E0.7 [1] (🌶️) hot pepper
+ {0x1F337, 0x1F34A, prExtendedPictographic}, // E0.6 [20] (🌷..🍊) tulip..tangerine
+ {0x1F34B, 0x1F34B, prExtendedPictographic}, // E1.0 [1] (🍋) lemon
+ {0x1F34C, 0x1F34F, prExtendedPictographic}, // E0.6 [4] (🍌..🍏) banana..green apple
+ {0x1F350, 0x1F350, prExtendedPictographic}, // E1.0 [1] (🍐) pear
+ {0x1F351, 0x1F37B, prExtendedPictographic}, // E0.6 [43] (🍑..🍻) peach..clinking beer mugs
+ {0x1F37C, 0x1F37C, prExtendedPictographic}, // E1.0 [1] (🍼) baby bottle
+ {0x1F37D, 0x1F37D, prExtendedPictographic}, // E0.7 [1] (🍽️) fork and knife with plate
+ {0x1F37E, 0x1F37F, prExtendedPictographic}, // E1.0 [2] (🍾..🍿) bottle with popping cork..popcorn
+ {0x1F380, 0x1F393, prExtendedPictographic}, // E0.6 [20] (🎀..🎓) ribbon..graduation cap
+ {0x1F394, 0x1F395, prExtendedPictographic}, // E0.0 [2] (🎔..🎕) HEART WITH TIP ON THE LEFT..BOUQUET OF FLOWERS
+ {0x1F396, 0x1F397, prExtendedPictographic}, // E0.7 [2] (🎖️..🎗️) military medal..reminder ribbon
+ {0x1F398, 0x1F398, prExtendedPictographic}, // E0.0 [1] (🎘) MUSICAL KEYBOARD WITH JACKS
+ {0x1F399, 0x1F39B, prExtendedPictographic}, // E0.7 [3] (🎙️..🎛️) studio microphone..control knobs
+ {0x1F39C, 0x1F39D, prExtendedPictographic}, // E0.0 [2] (🎜..🎝) BEAMED ASCENDING MUSICAL NOTES..BEAMED DESCENDING MUSICAL NOTES
+ {0x1F39E, 0x1F39F, prExtendedPictographic}, // E0.7 [2] (🎞️..🎟️) film frames..admission tickets
+ {0x1F3A0, 0x1F3C4, prExtendedPictographic}, // E0.6 [37] (🎠..🏄) carousel horse..person surfing
+ {0x1F3C5, 0x1F3C5, prExtendedPictographic}, // E1.0 [1] (🏅) sports medal
+ {0x1F3C6, 0x1F3C6, prExtendedPictographic}, // E0.6 [1] (🏆) trophy
+ {0x1F3C7, 0x1F3C7, prExtendedPictographic}, // E1.0 [1] (🏇) horse racing
+ {0x1F3C8, 0x1F3C8, prExtendedPictographic}, // E0.6 [1] (🏈) american football
+ {0x1F3C9, 0x1F3C9, prExtendedPictographic}, // E1.0 [1] (🏉) rugby football
+ {0x1F3CA, 0x1F3CA, prExtendedPictographic}, // E0.6 [1] (🏊) person swimming
+ {0x1F3CB, 0x1F3CE, prExtendedPictographic}, // E0.7 [4] (🏋️..🏎️) person lifting weights..racing car
+ {0x1F3CF, 0x1F3D3, prExtendedPictographic}, // E1.0 [5] (🏏..🏓) cricket game..ping pong
+ {0x1F3D4, 0x1F3DF, prExtendedPictographic}, // E0.7 [12] (🏔️..🏟️) snow-capped mountain..stadium
+ {0x1F3E0, 0x1F3E3, prExtendedPictographic}, // E0.6 [4] (🏠..🏣) house..Japanese post office
+ {0x1F3E4, 0x1F3E4, prExtendedPictographic}, // E1.0 [1] (🏤) post office
+ {0x1F3E5, 0x1F3F0, prExtendedPictographic}, // E0.6 [12] (🏥..🏰) hospital..castle
+ {0x1F3F1, 0x1F3F2, prExtendedPictographic}, // E0.0 [2] (🏱..🏲) WHITE PENNANT..BLACK PENNANT
+ {0x1F3F3, 0x1F3F3, prExtendedPictographic}, // E0.7 [1] (🏳️) white flag
+ {0x1F3F4, 0x1F3F4, prExtendedPictographic}, // E1.0 [1] (🏴) black flag
+ {0x1F3F5, 0x1F3F5, prExtendedPictographic}, // E0.7 [1] (🏵️) rosette
+ {0x1F3F6, 0x1F3F6, prExtendedPictographic}, // E0.0 [1] (🏶) BLACK ROSETTE
+ {0x1F3F7, 0x1F3F7, prExtendedPictographic}, // E0.7 [1] (🏷️) label
+ {0x1F3F8, 0x1F3FA, prExtendedPictographic}, // E1.0 [3] (🏸..🏺) badminton..amphora
+ {0x1F3FB, 0x1F3FF, prExtend}, // Sk [5] EMOJI MODIFIER FITZPATRICK TYPE-1-2..EMOJI MODIFIER FITZPATRICK TYPE-6
+ {0x1F400, 0x1F407, prExtendedPictographic}, // E1.0 [8] (🐀..🐇) rat..rabbit
+ {0x1F408, 0x1F408, prExtendedPictographic}, // E0.7 [1] (🐈) cat
+ {0x1F409, 0x1F40B, prExtendedPictographic}, // E1.0 [3] (🐉..🐋) dragon..whale
+ {0x1F40C, 0x1F40E, prExtendedPictographic}, // E0.6 [3] (🐌..🐎) snail..horse
+ {0x1F40F, 0x1F410, prExtendedPictographic}, // E1.0 [2] (🐏..🐐) ram..goat
+ {0x1F411, 0x1F412, prExtendedPictographic}, // E0.6 [2] (🐑..🐒) ewe..monkey
+ {0x1F413, 0x1F413, prExtendedPictographic}, // E1.0 [1] (🐓) rooster
+ {0x1F414, 0x1F414, prExtendedPictographic}, // E0.6 [1] (🐔) chicken
+ {0x1F415, 0x1F415, prExtendedPictographic}, // E0.7 [1] (🐕) dog
+ {0x1F416, 0x1F416, prExtendedPictographic}, // E1.0 [1] (🐖) pig
+ {0x1F417, 0x1F429, prExtendedPictographic}, // E0.6 [19] (🐗..🐩) boar..poodle
+ {0x1F42A, 0x1F42A, prExtendedPictographic}, // E1.0 [1] (🐪) camel
+ {0x1F42B, 0x1F43E, prExtendedPictographic}, // E0.6 [20] (🐫..🐾) two-hump camel..paw prints
+ {0x1F43F, 0x1F43F, prExtendedPictographic}, // E0.7 [1] (🐿️) chipmunk
+ {0x1F440, 0x1F440, prExtendedPictographic}, // E0.6 [1] (👀) eyes
+ {0x1F441, 0x1F441, prExtendedPictographic}, // E0.7 [1] (👁️) eye
+ {0x1F442, 0x1F464, prExtendedPictographic}, // E0.6 [35] (👂..👤) ear..bust in silhouette
+ {0x1F465, 0x1F465, prExtendedPictographic}, // E1.0 [1] (👥) busts in silhouette
+ {0x1F466, 0x1F46B, prExtendedPictographic}, // E0.6 [6] (👦..👫) boy..woman and man holding hands
+ {0x1F46C, 0x1F46D, prExtendedPictographic}, // E1.0 [2] (👬..👭) men holding hands..women holding hands
+ {0x1F46E, 0x1F4AC, prExtendedPictographic}, // E0.6 [63] (👮..💬) police officer..speech balloon
+ {0x1F4AD, 0x1F4AD, prExtendedPictographic}, // E1.0 [1] (💭) thought balloon
+ {0x1F4AE, 0x1F4B5, prExtendedPictographic}, // E0.6 [8] (💮..💵) white flower..dollar banknote
+ {0x1F4B6, 0x1F4B7, prExtendedPictographic}, // E1.0 [2] (💶..💷) euro banknote..pound banknote
+ {0x1F4B8, 0x1F4EB, prExtendedPictographic}, // E0.6 [52] (💸..📫) money with wings..closed mailbox with raised flag
+ {0x1F4EC, 0x1F4ED, prExtendedPictographic}, // E0.7 [2] (📬..📭) open mailbox with raised flag..open mailbox with lowered flag
+ {0x1F4EE, 0x1F4EE, prExtendedPictographic}, // E0.6 [1] (📮) postbox
+ {0x1F4EF, 0x1F4EF, prExtendedPictographic}, // E1.0 [1] (📯) postal horn
+ {0x1F4F0, 0x1F4F4, prExtendedPictographic}, // E0.6 [5] (📰..📴) newspaper..mobile phone off
+ {0x1F4F5, 0x1F4F5, prExtendedPictographic}, // E1.0 [1] (📵) no mobile phones
+ {0x1F4F6, 0x1F4F7, prExtendedPictographic}, // E0.6 [2] (📶..📷) antenna bars..camera
+ {0x1F4F8, 0x1F4F8, prExtendedPictographic}, // E1.0 [1] (📸) camera with flash
+ {0x1F4F9, 0x1F4FC, prExtendedPictographic}, // E0.6 [4] (📹..📼) video camera..videocassette
+ {0x1F4FD, 0x1F4FD, prExtendedPictographic}, // E0.7 [1] (📽️) film projector
+ {0x1F4FE, 0x1F4FE, prExtendedPictographic}, // E0.0 [1] (📾) PORTABLE STEREO
+ {0x1F4FF, 0x1F502, prExtendedPictographic}, // E1.0 [4] (📿..🔂) prayer beads..repeat single button
+ {0x1F503, 0x1F503, prExtendedPictographic}, // E0.6 [1] (🔃) clockwise vertical arrows
+ {0x1F504, 0x1F507, prExtendedPictographic}, // E1.0 [4] (🔄..🔇) counterclockwise arrows button..muted speaker
+ {0x1F508, 0x1F508, prExtendedPictographic}, // E0.7 [1] (🔈) speaker low volume
+ {0x1F509, 0x1F509, prExtendedPictographic}, // E1.0 [1] (🔉) speaker medium volume
+ {0x1F50A, 0x1F514, prExtendedPictographic}, // E0.6 [11] (🔊..🔔) speaker high volume..bell
+ {0x1F515, 0x1F515, prExtendedPictographic}, // E1.0 [1] (🔕) bell with slash
+ {0x1F516, 0x1F52B, prExtendedPictographic}, // E0.6 [22] (🔖..🔫) bookmark..water pistol
+ {0x1F52C, 0x1F52D, prExtendedPictographic}, // E1.0 [2] (🔬..🔭) microscope..telescope
+ {0x1F52E, 0x1F53D, prExtendedPictographic}, // E0.6 [16] (🔮..🔽) crystal ball..downwards button
+ {0x1F546, 0x1F548, prExtendedPictographic}, // E0.0 [3] (🕆..🕈) WHITE LATIN CROSS..CELTIC CROSS
+ {0x1F549, 0x1F54A, prExtendedPictographic}, // E0.7 [2] (🕉️..🕊️) om..dove
+ {0x1F54B, 0x1F54E, prExtendedPictographic}, // E1.0 [4] (🕋..🕎) kaaba..menorah
+ {0x1F54F, 0x1F54F, prExtendedPictographic}, // E0.0 [1] (🕏) BOWL OF HYGIEIA
+ {0x1F550, 0x1F55B, prExtendedPictographic}, // E0.6 [12] (🕐..🕛) one o’clock..twelve o’clock
+ {0x1F55C, 0x1F567, prExtendedPictographic}, // E0.7 [12] (🕜..🕧) one-thirty..twelve-thirty
+ {0x1F568, 0x1F56E, prExtendedPictographic}, // E0.0 [7] (🕨..🕮) RIGHT SPEAKER..BOOK
+ {0x1F56F, 0x1F570, prExtendedPictographic}, // E0.7 [2] (🕯️..🕰️) candle..mantelpiece clock
+ {0x1F571, 0x1F572, prExtendedPictographic}, // E0.0 [2] (🕱..🕲) BLACK SKULL AND CROSSBONES..NO PIRACY
+ {0x1F573, 0x1F579, prExtendedPictographic}, // E0.7 [7] (🕳️..🕹️) hole..joystick
+ {0x1F57A, 0x1F57A, prExtendedPictographic}, // E3.0 [1] (🕺) man dancing
+ {0x1F57B, 0x1F586, prExtendedPictographic}, // E0.0 [12] (🕻..🖆) LEFT HAND TELEPHONE RECEIVER..PEN OVER STAMPED ENVELOPE
+ {0x1F587, 0x1F587, prExtendedPictographic}, // E0.7 [1] (🖇️) linked paperclips
+ {0x1F588, 0x1F589, prExtendedPictographic}, // E0.0 [2] (🖈..🖉) BLACK PUSHPIN..LOWER LEFT PENCIL
+ {0x1F58A, 0x1F58D, prExtendedPictographic}, // E0.7 [4] (🖊️..🖍️) pen..crayon
+ {0x1F58E, 0x1F58F, prExtendedPictographic}, // E0.0 [2] (🖎..🖏) LEFT WRITING HAND..TURNED OK HAND SIGN
+ {0x1F590, 0x1F590, prExtendedPictographic}, // E0.7 [1] (🖐️) hand with fingers splayed
+ {0x1F591, 0x1F594, prExtendedPictographic}, // E0.0 [4] (🖑..🖔) REVERSED RAISED HAND WITH FINGERS SPLAYED..REVERSED VICTORY HAND
+ {0x1F595, 0x1F596, prExtendedPictographic}, // E1.0 [2] (🖕..🖖) middle finger..vulcan salute
+ {0x1F597, 0x1F5A3, prExtendedPictographic}, // E0.0 [13] (🖗..🖣) WHITE DOWN POINTING LEFT HAND INDEX..BLACK DOWN POINTING BACKHAND INDEX
+ {0x1F5A4, 0x1F5A4, prExtendedPictographic}, // E3.0 [1] (🖤) black heart
+ {0x1F5A5, 0x1F5A5, prExtendedPictographic}, // E0.7 [1] (🖥️) desktop computer
+ {0x1F5A6, 0x1F5A7, prExtendedPictographic}, // E0.0 [2] (🖦..🖧) KEYBOARD AND MOUSE..THREE NETWORKED COMPUTERS
+ {0x1F5A8, 0x1F5A8, prExtendedPictographic}, // E0.7 [1] (🖨️) printer
+ {0x1F5A9, 0x1F5B0, prExtendedPictographic}, // E0.0 [8] (🖩..🖰) POCKET CALCULATOR..TWO BUTTON MOUSE
+ {0x1F5B1, 0x1F5B2, prExtendedPictographic}, // E0.7 [2] (🖱️..🖲️) computer mouse..trackball
+ {0x1F5B3, 0x1F5BB, prExtendedPictographic}, // E0.0 [9] (🖳..🖻) OLD PERSONAL COMPUTER..DOCUMENT WITH PICTURE
+ {0x1F5BC, 0x1F5BC, prExtendedPictographic}, // E0.7 [1] (🖼️) framed picture
+ {0x1F5BD, 0x1F5C1, prExtendedPictographic}, // E0.0 [5] (🖽..🗁) FRAME WITH TILES..OPEN FOLDER
+ {0x1F5C2, 0x1F5C4, prExtendedPictographic}, // E0.7 [3] (🗂️..🗄️) card index dividers..file cabinet
+ {0x1F5C5, 0x1F5D0, prExtendedPictographic}, // E0.0 [12] (🗅..🗐) EMPTY NOTE..PAGES
+ {0x1F5D1, 0x1F5D3, prExtendedPictographic}, // E0.7 [3] (🗑️..🗓️) wastebasket..spiral calendar
+ {0x1F5D4, 0x1F5DB, prExtendedPictographic}, // E0.0 [8] (🗔..🗛) DESKTOP WINDOW..DECREASE FONT SIZE SYMBOL
+ {0x1F5DC, 0x1F5DE, prExtendedPictographic}, // E0.7 [3] (🗜️..🗞️) clamp..rolled-up newspaper
+ {0x1F5DF, 0x1F5E0, prExtendedPictographic}, // E0.0 [2] (🗟..🗠) PAGE WITH CIRCLED TEXT..STOCK CHART
+ {0x1F5E1, 0x1F5E1, prExtendedPictographic}, // E0.7 [1] (🗡️) dagger
+ {0x1F5E2, 0x1F5E2, prExtendedPictographic}, // E0.0 [1] (🗢) LIPS
+ {0x1F5E3, 0x1F5E3, prExtendedPictographic}, // E0.7 [1] (🗣️) speaking head
+ {0x1F5E4, 0x1F5E7, prExtendedPictographic}, // E0.0 [4] (🗤..🗧) THREE RAYS ABOVE..THREE RAYS RIGHT
+ {0x1F5E8, 0x1F5E8, prExtendedPictographic}, // E2.0 [1] (🗨️) left speech bubble
+ {0x1F5E9, 0x1F5EE, prExtendedPictographic}, // E0.0 [6] (🗩..🗮) RIGHT SPEECH BUBBLE..LEFT ANGER BUBBLE
+ {0x1F5EF, 0x1F5EF, prExtendedPictographic}, // E0.7 [1] (🗯️) right anger bubble
+ {0x1F5F0, 0x1F5F2, prExtendedPictographic}, // E0.0 [3] (🗰..🗲) MOOD BUBBLE..LIGHTNING MOOD
+ {0x1F5F3, 0x1F5F3, prExtendedPictographic}, // E0.7 [1] (🗳️) ballot box with ballot
+ {0x1F5F4, 0x1F5F9, prExtendedPictographic}, // E0.0 [6] (🗴..🗹) BALLOT SCRIPT X..BALLOT BOX WITH BOLD CHECK
+ {0x1F5FA, 0x1F5FA, prExtendedPictographic}, // E0.7 [1] (🗺️) world map
+ {0x1F5FB, 0x1F5FF, prExtendedPictographic}, // E0.6 [5] (🗻..🗿) mount fuji..moai
+ {0x1F600, 0x1F600, prExtendedPictographic}, // E1.0 [1] (😀) grinning face
+ {0x1F601, 0x1F606, prExtendedPictographic}, // E0.6 [6] (😁..😆) beaming face with smiling eyes..grinning squinting face
+ {0x1F607, 0x1F608, prExtendedPictographic}, // E1.0 [2] (😇..😈) smiling face with halo..smiling face with horns
+ {0x1F609, 0x1F60D, prExtendedPictographic}, // E0.6 [5] (😉..😍) winking face..smiling face with heart-eyes
+ {0x1F60E, 0x1F60E, prExtendedPictographic}, // E1.0 [1] (😎) smiling face with sunglasses
+ {0x1F60F, 0x1F60F, prExtendedPictographic}, // E0.6 [1] (😏) smirking face
+ {0x1F610, 0x1F610, prExtendedPictographic}, // E0.7 [1] (😐) neutral face
+ {0x1F611, 0x1F611, prExtendedPictographic}, // E1.0 [1] (😑) expressionless face
+ {0x1F612, 0x1F614, prExtendedPictographic}, // E0.6 [3] (😒..😔) unamused face..pensive face
+ {0x1F615, 0x1F615, prExtendedPictographic}, // E1.0 [1] (😕) confused face
+ {0x1F616, 0x1F616, prExtendedPictographic}, // E0.6 [1] (😖) confounded face
+ {0x1F617, 0x1F617, prExtendedPictographic}, // E1.0 [1] (😗) kissing face
+ {0x1F618, 0x1F618, prExtendedPictographic}, // E0.6 [1] (😘) face blowing a kiss
+ {0x1F619, 0x1F619, prExtendedPictographic}, // E1.0 [1] (😙) kissing face with smiling eyes
+ {0x1F61A, 0x1F61A, prExtendedPictographic}, // E0.6 [1] (😚) kissing face with closed eyes
+ {0x1F61B, 0x1F61B, prExtendedPictographic}, // E1.0 [1] (😛) face with tongue
+ {0x1F61C, 0x1F61E, prExtendedPictographic}, // E0.6 [3] (😜..😞) winking face with tongue..disappointed face
+ {0x1F61F, 0x1F61F, prExtendedPictographic}, // E1.0 [1] (😟) worried face
+ {0x1F620, 0x1F625, prExtendedPictographic}, // E0.6 [6] (😠..😥) angry face..sad but relieved face
+ {0x1F626, 0x1F627, prExtendedPictographic}, // E1.0 [2] (😦..😧) frowning face with open mouth..anguished face
+ {0x1F628, 0x1F62B, prExtendedPictographic}, // E0.6 [4] (😨..😫) fearful face..tired face
+ {0x1F62C, 0x1F62C, prExtendedPictographic}, // E1.0 [1] (😬) grimacing face
+ {0x1F62D, 0x1F62D, prExtendedPictographic}, // E0.6 [1] (😭) loudly crying face
+ {0x1F62E, 0x1F62F, prExtendedPictographic}, // E1.0 [2] (😮..😯) face with open mouth..hushed face
+ {0x1F630, 0x1F633, prExtendedPictographic}, // E0.6 [4] (😰..😳) anxious face with sweat..flushed face
+ {0x1F634, 0x1F634, prExtendedPictographic}, // E1.0 [1] (😴) sleeping face
+ {0x1F635, 0x1F635, prExtendedPictographic}, // E0.6 [1] (😵) face with crossed-out eyes
+ {0x1F636, 0x1F636, prExtendedPictographic}, // E1.0 [1] (😶) face without mouth
+ {0x1F637, 0x1F640, prExtendedPictographic}, // E0.6 [10] (😷..🙀) face with medical mask..weary cat
+ {0x1F641, 0x1F644, prExtendedPictographic}, // E1.0 [4] (🙁..🙄) slightly frowning face..face with rolling eyes
+ {0x1F645, 0x1F64F, prExtendedPictographic}, // E0.6 [11] (🙅..🙏) person gesturing NO..folded hands
+ {0x1F680, 0x1F680, prExtendedPictographic}, // E0.6 [1] (🚀) rocket
+ {0x1F681, 0x1F682, prExtendedPictographic}, // E1.0 [2] (🚁..🚂) helicopter..locomotive
+ {0x1F683, 0x1F685, prExtendedPictographic}, // E0.6 [3] (🚃..🚅) railway car..bullet train
+ {0x1F686, 0x1F686, prExtendedPictographic}, // E1.0 [1] (🚆) train
+ {0x1F687, 0x1F687, prExtendedPictographic}, // E0.6 [1] (🚇) metro
+ {0x1F688, 0x1F688, prExtendedPictographic}, // E1.0 [1] (🚈) light rail
+ {0x1F689, 0x1F689, prExtendedPictographic}, // E0.6 [1] (🚉) station
+ {0x1F68A, 0x1F68B, prExtendedPictographic}, // E1.0 [2] (🚊..🚋) tram..tram car
+ {0x1F68C, 0x1F68C, prExtendedPictographic}, // E0.6 [1] (🚌) bus
+ {0x1F68D, 0x1F68D, prExtendedPictographic}, // E0.7 [1] (🚍) oncoming bus
+ {0x1F68E, 0x1F68E, prExtendedPictographic}, // E1.0 [1] (🚎) trolleybus
+ {0x1F68F, 0x1F68F, prExtendedPictographic}, // E0.6 [1] (🚏) bus stop
+ {0x1F690, 0x1F690, prExtendedPictographic}, // E1.0 [1] (🚐) minibus
+ {0x1F691, 0x1F693, prExtendedPictographic}, // E0.6 [3] (🚑..🚓) ambulance..police car
+ {0x1F694, 0x1F694, prExtendedPictographic}, // E0.7 [1] (🚔) oncoming police car
+ {0x1F695, 0x1F695, prExtendedPictographic}, // E0.6 [1] (🚕) taxi
+ {0x1F696, 0x1F696, prExtendedPictographic}, // E1.0 [1] (🚖) oncoming taxi
+ {0x1F697, 0x1F697, prExtendedPictographic}, // E0.6 [1] (🚗) automobile
+ {0x1F698, 0x1F698, prExtendedPictographic}, // E0.7 [1] (🚘) oncoming automobile
+ {0x1F699, 0x1F69A, prExtendedPictographic}, // E0.6 [2] (🚙..🚚) sport utility vehicle..delivery truck
+ {0x1F69B, 0x1F6A1, prExtendedPictographic}, // E1.0 [7] (🚛..🚡) articulated lorry..aerial tramway
+ {0x1F6A2, 0x1F6A2, prExtendedPictographic}, // E0.6 [1] (🚢) ship
+ {0x1F6A3, 0x1F6A3, prExtendedPictographic}, // E1.0 [1] (🚣) person rowing boat
+ {0x1F6A4, 0x1F6A5, prExtendedPictographic}, // E0.6 [2] (🚤..🚥) speedboat..horizontal traffic light
+ {0x1F6A6, 0x1F6A6, prExtendedPictographic}, // E1.0 [1] (🚦) vertical traffic light
+ {0x1F6A7, 0x1F6AD, prExtendedPictographic}, // E0.6 [7] (🚧..🚭) construction..no smoking
+ {0x1F6AE, 0x1F6B1, prExtendedPictographic}, // E1.0 [4] (🚮..🚱) litter in bin sign..non-potable water
+ {0x1F6B2, 0x1F6B2, prExtendedPictographic}, // E0.6 [1] (🚲) bicycle
+ {0x1F6B3, 0x1F6B5, prExtendedPictographic}, // E1.0 [3] (🚳..🚵) no bicycles..person mountain biking
+ {0x1F6B6, 0x1F6B6, prExtendedPictographic}, // E0.6 [1] (🚶) person walking
+ {0x1F6B7, 0x1F6B8, prExtendedPictographic}, // E1.0 [2] (🚷..🚸) no pedestrians..children crossing
+ {0x1F6B9, 0x1F6BE, prExtendedPictographic}, // E0.6 [6] (🚹..🚾) men’s room..water closet
+ {0x1F6BF, 0x1F6BF, prExtendedPictographic}, // E1.0 [1] (🚿) shower
+ {0x1F6C0, 0x1F6C0, prExtendedPictographic}, // E0.6 [1] (🛀) person taking bath
+ {0x1F6C1, 0x1F6C5, prExtendedPictographic}, // E1.0 [5] (🛁..🛅) bathtub..left luggage
+ {0x1F6C6, 0x1F6CA, prExtendedPictographic}, // E0.0 [5] (🛆..🛊) TRIANGLE WITH ROUNDED CORNERS..GIRLS SYMBOL
+ {0x1F6CB, 0x1F6CB, prExtendedPictographic}, // E0.7 [1] (🛋️) couch and lamp
+ {0x1F6CC, 0x1F6CC, prExtendedPictographic}, // E1.0 [1] (🛌) person in bed
+ {0x1F6CD, 0x1F6CF, prExtendedPictographic}, // E0.7 [3] (🛍️..🛏️) shopping bags..bed
+ {0x1F6D0, 0x1F6D0, prExtendedPictographic}, // E1.0 [1] (🛐) place of worship
+ {0x1F6D1, 0x1F6D2, prExtendedPictographic}, // E3.0 [2] (🛑..🛒) stop sign..shopping cart
+ {0x1F6D3, 0x1F6D4, prExtendedPictographic}, // E0.0 [2] (🛓..🛔) STUPA..PAGODA
+ {0x1F6D5, 0x1F6D5, prExtendedPictographic}, // E12.0 [1] (🛕) hindu temple
+ {0x1F6D6, 0x1F6D7, prExtendedPictographic}, // E13.0 [2] (🛖..🛗) hut..elevator
+ {0x1F6D8, 0x1F6DB, prExtendedPictographic}, // E0.0 [4] (..) ..
+ {0x1F6DC, 0x1F6DC, prExtendedPictographic}, // E15.0 [1] (🛜) wireless
+ {0x1F6DD, 0x1F6DF, prExtendedPictographic}, // E14.0 [3] (🛝..🛟) playground slide..ring buoy
+ {0x1F6E0, 0x1F6E5, prExtendedPictographic}, // E0.7 [6] (🛠️..🛥️) hammer and wrench..motor boat
+ {0x1F6E6, 0x1F6E8, prExtendedPictographic}, // E0.0 [3] (🛦..🛨) UP-POINTING MILITARY AIRPLANE..UP-POINTING SMALL AIRPLANE
+ {0x1F6E9, 0x1F6E9, prExtendedPictographic}, // E0.7 [1] (🛩️) small airplane
+ {0x1F6EA, 0x1F6EA, prExtendedPictographic}, // E0.0 [1] (🛪) NORTHEAST-POINTING AIRPLANE
+ {0x1F6EB, 0x1F6EC, prExtendedPictographic}, // E1.0 [2] (🛫..🛬) airplane departure..airplane arrival
+ {0x1F6ED, 0x1F6EF, prExtendedPictographic}, // E0.0 [3] (..) ..
+ {0x1F6F0, 0x1F6F0, prExtendedPictographic}, // E0.7 [1] (🛰️) satellite
+ {0x1F6F1, 0x1F6F2, prExtendedPictographic}, // E0.0 [2] (🛱..🛲) ONCOMING FIRE ENGINE..DIESEL LOCOMOTIVE
+ {0x1F6F3, 0x1F6F3, prExtendedPictographic}, // E0.7 [1] (🛳️) passenger ship
+ {0x1F6F4, 0x1F6F6, prExtendedPictographic}, // E3.0 [3] (🛴..🛶) kick scooter..canoe
+ {0x1F6F7, 0x1F6F8, prExtendedPictographic}, // E5.0 [2] (🛷..🛸) sled..flying saucer
+ {0x1F6F9, 0x1F6F9, prExtendedPictographic}, // E11.0 [1] (🛹) skateboard
+ {0x1F6FA, 0x1F6FA, prExtendedPictographic}, // E12.0 [1] (🛺) auto rickshaw
+ {0x1F6FB, 0x1F6FC, prExtendedPictographic}, // E13.0 [2] (🛻..🛼) pickup truck..roller skate
+ {0x1F6FD, 0x1F6FF, prExtendedPictographic}, // E0.0 [3] (..) ..
+ {0x1F774, 0x1F77F, prExtendedPictographic}, // E0.0 [12] (🝴..🝿) LOT OF FORTUNE..ORCUS
+ {0x1F7D5, 0x1F7DF, prExtendedPictographic}, // E0.0 [11] (🟕..) CIRCLED TRIANGLE..
+ {0x1F7E0, 0x1F7EB, prExtendedPictographic}, // E12.0 [12] (🟠..🟫) orange circle..brown square
+ {0x1F7EC, 0x1F7EF, prExtendedPictographic}, // E0.0 [4] (..) ..
+ {0x1F7F0, 0x1F7F0, prExtendedPictographic}, // E14.0 [1] (🟰) heavy equals sign
+ {0x1F7F1, 0x1F7FF, prExtendedPictographic}, // E0.0 [15] (..) ..
+ {0x1F80C, 0x1F80F, prExtendedPictographic}, // E0.0 [4] (..) ..
+ {0x1F848, 0x1F84F, prExtendedPictographic}, // E0.0 [8] (..) ..
+ {0x1F85A, 0x1F85F, prExtendedPictographic}, // E0.0 [6] (..) ..
+ {0x1F888, 0x1F88F, prExtendedPictographic}, // E0.0 [8] (..) ..
+ {0x1F8AE, 0x1F8FF, prExtendedPictographic}, // E0.0 [82] (..) ..
+ {0x1F90C, 0x1F90C, prExtendedPictographic}, // E13.0 [1] (🤌) pinched fingers
+ {0x1F90D, 0x1F90F, prExtendedPictographic}, // E12.0 [3] (🤍..🤏) white heart..pinching hand
+ {0x1F910, 0x1F918, prExtendedPictographic}, // E1.0 [9] (🤐..🤘) zipper-mouth face..sign of the horns
+ {0x1F919, 0x1F91E, prExtendedPictographic}, // E3.0 [6] (🤙..🤞) call me hand..crossed fingers
+ {0x1F91F, 0x1F91F, prExtendedPictographic}, // E5.0 [1] (🤟) love-you gesture
+ {0x1F920, 0x1F927, prExtendedPictographic}, // E3.0 [8] (🤠..🤧) cowboy hat face..sneezing face
+ {0x1F928, 0x1F92F, prExtendedPictographic}, // E5.0 [8] (🤨..🤯) face with raised eyebrow..exploding head
+ {0x1F930, 0x1F930, prExtendedPictographic}, // E3.0 [1] (🤰) pregnant woman
+ {0x1F931, 0x1F932, prExtendedPictographic}, // E5.0 [2] (🤱..🤲) breast-feeding..palms up together
+ {0x1F933, 0x1F93A, prExtendedPictographic}, // E3.0 [8] (🤳..🤺) selfie..person fencing
+ {0x1F93C, 0x1F93E, prExtendedPictographic}, // E3.0 [3] (🤼..🤾) people wrestling..person playing handball
+ {0x1F93F, 0x1F93F, prExtendedPictographic}, // E12.0 [1] (🤿) diving mask
+ {0x1F940, 0x1F945, prExtendedPictographic}, // E3.0 [6] (🥀..🥅) wilted flower..goal net
+ {0x1F947, 0x1F94B, prExtendedPictographic}, // E3.0 [5] (🥇..🥋) 1st place medal..martial arts uniform
+ {0x1F94C, 0x1F94C, prExtendedPictographic}, // E5.0 [1] (🥌) curling stone
+ {0x1F94D, 0x1F94F, prExtendedPictographic}, // E11.0 [3] (🥍..🥏) lacrosse..flying disc
+ {0x1F950, 0x1F95E, prExtendedPictographic}, // E3.0 [15] (🥐..🥞) croissant..pancakes
+ {0x1F95F, 0x1F96B, prExtendedPictographic}, // E5.0 [13] (🥟..🥫) dumpling..canned food
+ {0x1F96C, 0x1F970, prExtendedPictographic}, // E11.0 [5] (🥬..🥰) leafy green..smiling face with hearts
+ {0x1F971, 0x1F971, prExtendedPictographic}, // E12.0 [1] (🥱) yawning face
+ {0x1F972, 0x1F972, prExtendedPictographic}, // E13.0 [1] (🥲) smiling face with tear
+ {0x1F973, 0x1F976, prExtendedPictographic}, // E11.0 [4] (🥳..🥶) partying face..cold face
+ {0x1F977, 0x1F978, prExtendedPictographic}, // E13.0 [2] (🥷..🥸) ninja..disguised face
+ {0x1F979, 0x1F979, prExtendedPictographic}, // E14.0 [1] (🥹) face holding back tears
+ {0x1F97A, 0x1F97A, prExtendedPictographic}, // E11.0 [1] (🥺) pleading face
+ {0x1F97B, 0x1F97B, prExtendedPictographic}, // E12.0 [1] (🥻) sari
+ {0x1F97C, 0x1F97F, prExtendedPictographic}, // E11.0 [4] (🥼..🥿) lab coat..flat shoe
+ {0x1F980, 0x1F984, prExtendedPictographic}, // E1.0 [5] (🦀..🦄) crab..unicorn
+ {0x1F985, 0x1F991, prExtendedPictographic}, // E3.0 [13] (🦅..🦑) eagle..squid
+ {0x1F992, 0x1F997, prExtendedPictographic}, // E5.0 [6] (🦒..🦗) giraffe..cricket
+ {0x1F998, 0x1F9A2, prExtendedPictographic}, // E11.0 [11] (🦘..🦢) kangaroo..swan
+ {0x1F9A3, 0x1F9A4, prExtendedPictographic}, // E13.0 [2] (🦣..🦤) mammoth..dodo
+ {0x1F9A5, 0x1F9AA, prExtendedPictographic}, // E12.0 [6] (🦥..🦪) sloth..oyster
+ {0x1F9AB, 0x1F9AD, prExtendedPictographic}, // E13.0 [3] (🦫..🦭) beaver..seal
+ {0x1F9AE, 0x1F9AF, prExtendedPictographic}, // E12.0 [2] (🦮..🦯) guide dog..white cane
+ {0x1F9B0, 0x1F9B9, prExtendedPictographic}, // E11.0 [10] (🦰..🦹) red hair..supervillain
+ {0x1F9BA, 0x1F9BF, prExtendedPictographic}, // E12.0 [6] (🦺..🦿) safety vest..mechanical leg
+ {0x1F9C0, 0x1F9C0, prExtendedPictographic}, // E1.0 [1] (🧀) cheese wedge
+ {0x1F9C1, 0x1F9C2, prExtendedPictographic}, // E11.0 [2] (🧁..🧂) cupcake..salt
+ {0x1F9C3, 0x1F9CA, prExtendedPictographic}, // E12.0 [8] (🧃..🧊) beverage box..ice
+ {0x1F9CB, 0x1F9CB, prExtendedPictographic}, // E13.0 [1] (🧋) bubble tea
+ {0x1F9CC, 0x1F9CC, prExtendedPictographic}, // E14.0 [1] (🧌) troll
+ {0x1F9CD, 0x1F9CF, prExtendedPictographic}, // E12.0 [3] (🧍..🧏) person standing..deaf person
+ {0x1F9D0, 0x1F9E6, prExtendedPictographic}, // E5.0 [23] (🧐..🧦) face with monocle..socks
+ {0x1F9E7, 0x1F9FF, prExtendedPictographic}, // E11.0 [25] (🧧..🧿) red envelope..nazar amulet
+ {0x1FA00, 0x1FA6F, prExtendedPictographic}, // E0.0 [112] (🨀..) NEUTRAL CHESS KING..
+ {0x1FA70, 0x1FA73, prExtendedPictographic}, // E12.0 [4] (🩰..🩳) ballet shoes..shorts
+ {0x1FA74, 0x1FA74, prExtendedPictographic}, // E13.0 [1] (🩴) thong sandal
+ {0x1FA75, 0x1FA77, prExtendedPictographic}, // E15.0 [3] (🩵..🩷) light blue heart..pink heart
+ {0x1FA78, 0x1FA7A, prExtendedPictographic}, // E12.0 [3] (🩸..🩺) drop of blood..stethoscope
+ {0x1FA7B, 0x1FA7C, prExtendedPictographic}, // E14.0 [2] (🩻..🩼) x-ray..crutch
+ {0x1FA7D, 0x1FA7F, prExtendedPictographic}, // E0.0 [3] (..) ..
+ {0x1FA80, 0x1FA82, prExtendedPictographic}, // E12.0 [3] (🪀..🪂) yo-yo..parachute
+ {0x1FA83, 0x1FA86, prExtendedPictographic}, // E13.0 [4] (🪃..🪆) boomerang..nesting dolls
+ {0x1FA87, 0x1FA88, prExtendedPictographic}, // E15.0 [2] (🪇..🪈) maracas..flute
+ {0x1FA89, 0x1FA8F, prExtendedPictographic}, // E0.0 [7] (..) ..
+ {0x1FA90, 0x1FA95, prExtendedPictographic}, // E12.0 [6] (🪐..🪕) ringed planet..banjo
+ {0x1FA96, 0x1FAA8, prExtendedPictographic}, // E13.0 [19] (🪖..🪨) military helmet..rock
+ {0x1FAA9, 0x1FAAC, prExtendedPictographic}, // E14.0 [4] (🪩..🪬) mirror ball..hamsa
+ {0x1FAAD, 0x1FAAF, prExtendedPictographic}, // E15.0 [3] (🪭..🪯) folding hand fan..khanda
+ {0x1FAB0, 0x1FAB6, prExtendedPictographic}, // E13.0 [7] (🪰..🪶) fly..feather
+ {0x1FAB7, 0x1FABA, prExtendedPictographic}, // E14.0 [4] (🪷..🪺) lotus..nest with eggs
+ {0x1FABB, 0x1FABD, prExtendedPictographic}, // E15.0 [3] (🪻..🪽) hyacinth..wing
+ {0x1FABE, 0x1FABE, prExtendedPictographic}, // E0.0 [1] ()
+ {0x1FABF, 0x1FABF, prExtendedPictographic}, // E15.0 [1] (🪿) goose
+ {0x1FAC0, 0x1FAC2, prExtendedPictographic}, // E13.0 [3] (🫀..🫂) anatomical heart..people hugging
+ {0x1FAC3, 0x1FAC5, prExtendedPictographic}, // E14.0 [3] (🫃..🫅) pregnant man..person with crown
+ {0x1FAC6, 0x1FACD, prExtendedPictographic}, // E0.0 [8] (..) ..
+ {0x1FACE, 0x1FACF, prExtendedPictographic}, // E15.0 [2] (🫎..🫏) moose..donkey
+ {0x1FAD0, 0x1FAD6, prExtendedPictographic}, // E13.0 [7] (🫐..🫖) blueberries..teapot
+ {0x1FAD7, 0x1FAD9, prExtendedPictographic}, // E14.0 [3] (🫗..🫙) pouring liquid..jar
+ {0x1FADA, 0x1FADB, prExtendedPictographic}, // E15.0 [2] (🫚..🫛) ginger root..pea pod
+ {0x1FADC, 0x1FADF, prExtendedPictographic}, // E0.0 [4] (..) ..
+ {0x1FAE0, 0x1FAE7, prExtendedPictographic}, // E14.0 [8] (🫠..🫧) melting face..bubbles
+ {0x1FAE8, 0x1FAE8, prExtendedPictographic}, // E15.0 [1] (🫨) shaking face
+ {0x1FAE9, 0x1FAEF, prExtendedPictographic}, // E0.0 [7] (..) ..
+ {0x1FAF0, 0x1FAF6, prExtendedPictographic}, // E14.0 [7] (🫰..🫶) hand with index finger and thumb crossed..heart hands
+ {0x1FAF7, 0x1FAF8, prExtendedPictographic}, // E15.0 [2] (🫷..🫸) leftwards pushing hand..rightwards pushing hand
+ {0x1FAF9, 0x1FAFF, prExtendedPictographic}, // E0.0 [7] (..) ..
+ {0x1FC00, 0x1FFFD, prExtendedPictographic}, // E0.0[1022] (..) ..
+ {0xE0000, 0xE0000, prControl}, // Cn
+ {0xE0001, 0xE0001, prControl}, // Cf LANGUAGE TAG
+ {0xE0002, 0xE001F, prControl}, // Cn [30] ..
+ {0xE0020, 0xE007F, prExtend}, // Cf [96] TAG SPACE..CANCEL TAG
+ {0xE0080, 0xE00FF, prControl}, // Cn [128] ..
+ {0xE0100, 0xE01EF, prExtend}, // Mn [240] VARIATION SELECTOR-17..VARIATION SELECTOR-256
+ {0xE01F0, 0xE0FFF, prControl}, // Cn [3600] ..
+}
diff --git a/vendor/github.com/rivo/uniseg/graphemerules.go b/vendor/github.com/rivo/uniseg/graphemerules.go
new file mode 100644
index 000000000..5d399d29c
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/graphemerules.go
@@ -0,0 +1,176 @@
+package uniseg
+
+// The states of the grapheme cluster parser.
+const (
+ grAny = iota
+ grCR
+ grControlLF
+ grL
+ grLVV
+ grLVTT
+ grPrepend
+ grExtendedPictographic
+ grExtendedPictographicZWJ
+ grRIOdd
+ grRIEven
+)
+
+// The grapheme cluster parser's breaking instructions.
+const (
+ grNoBoundary = iota
+ grBoundary
+)
+
+// grTransitions implements the grapheme cluster parser's state transitions.
+// Maps state and property to a new state, a breaking instruction, and rule
+// number. The breaking instruction always refers to the boundary between the
+// last and next code point. Returns negative values if no transition is found.
+//
+// This function is used as follows:
+//
+// 1. Find specific state + specific property. Stop if found.
+// 2. Find specific state + any property.
+// 3. Find any state + specific property.
+// 4. If only (2) or (3) (but not both) was found, stop.
+// 5. If both (2) and (3) were found, use state from (3) and breaking instruction
+// from the transition with the lower rule number, prefer (3) if rule numbers
+// are equal. Stop.
+// 6. Assume grAny and grBoundary.
+//
+// Unicode version 15.0.0.
+func grTransitions(state, prop int) (newState int, newProp int, boundary int) {
+ // It turns out that using a big switch statement is much faster than using
+ // a map.
+
+ switch uint64(state) | uint64(prop)<<32 {
+ // GB5
+ case grAny | prCR<<32:
+ return grCR, grBoundary, 50
+ case grAny | prLF<<32:
+ return grControlLF, grBoundary, 50
+ case grAny | prControl<<32:
+ return grControlLF, grBoundary, 50
+
+ // GB4
+ case grCR | prAny<<32:
+ return grAny, grBoundary, 40
+ case grControlLF | prAny<<32:
+ return grAny, grBoundary, 40
+
+ // GB3
+ case grCR | prLF<<32:
+ return grControlLF, grNoBoundary, 30
+
+ // GB6
+ case grAny | prL<<32:
+ return grL, grBoundary, 9990
+ case grL | prL<<32:
+ return grL, grNoBoundary, 60
+ case grL | prV<<32:
+ return grLVV, grNoBoundary, 60
+ case grL | prLV<<32:
+ return grLVV, grNoBoundary, 60
+ case grL | prLVT<<32:
+ return grLVTT, grNoBoundary, 60
+
+ // GB7
+ case grAny | prLV<<32:
+ return grLVV, grBoundary, 9990
+ case grAny | prV<<32:
+ return grLVV, grBoundary, 9990
+ case grLVV | prV<<32:
+ return grLVV, grNoBoundary, 70
+ case grLVV | prT<<32:
+ return grLVTT, grNoBoundary, 70
+
+ // GB8
+ case grAny | prLVT<<32:
+ return grLVTT, grBoundary, 9990
+ case grAny | prT<<32:
+ return grLVTT, grBoundary, 9990
+ case grLVTT | prT<<32:
+ return grLVTT, grNoBoundary, 80
+
+ // GB9
+ case grAny | prExtend<<32:
+ return grAny, grNoBoundary, 90
+ case grAny | prZWJ<<32:
+ return grAny, grNoBoundary, 90
+
+ // GB9a
+ case grAny | prSpacingMark<<32:
+ return grAny, grNoBoundary, 91
+
+ // GB9b
+ case grAny | prPrepend<<32:
+ return grPrepend, grBoundary, 9990
+ case grPrepend | prAny<<32:
+ return grAny, grNoBoundary, 92
+
+ // GB11
+ case grAny | prExtendedPictographic<<32:
+ return grExtendedPictographic, grBoundary, 9990
+ case grExtendedPictographic | prExtend<<32:
+ return grExtendedPictographic, grNoBoundary, 110
+ case grExtendedPictographic | prZWJ<<32:
+ return grExtendedPictographicZWJ, grNoBoundary, 110
+ case grExtendedPictographicZWJ | prExtendedPictographic<<32:
+ return grExtendedPictographic, grNoBoundary, 110
+
+ // GB12 / GB13
+ case grAny | prRegionalIndicator<<32:
+ return grRIOdd, grBoundary, 9990
+ case grRIOdd | prRegionalIndicator<<32:
+ return grRIEven, grNoBoundary, 120
+ case grRIEven | prRegionalIndicator<<32:
+ return grRIOdd, grBoundary, 120
+ default:
+ return -1, -1, -1
+ }
+}
+
+// transitionGraphemeState determines the new state of the grapheme cluster
+// parser given the current state and the next code point. It also returns the
+// code point's grapheme property (the value mapped by the [graphemeCodePoints]
+// table) and whether a cluster boundary was detected.
+func transitionGraphemeState(state int, r rune) (newState, prop int, boundary bool) {
+ // Determine the property of the next character.
+ prop = propertyGraphemes(r)
+
+ // Find the applicable transition.
+ nextState, nextProp, _ := grTransitions(state, prop)
+ if nextState >= 0 {
+ // We have a specific transition. We'll use it.
+ return nextState, prop, nextProp == grBoundary
+ }
+
+ // No specific transition found. Try the less specific ones.
+ anyPropState, anyPropProp, anyPropRule := grTransitions(state, prAny)
+ anyStateState, anyStateProp, anyStateRule := grTransitions(grAny, prop)
+ if anyPropState >= 0 && anyStateState >= 0 {
+ // Both apply. We'll use a mix (see comments for grTransitions).
+ newState = anyStateState
+ boundary = anyStateProp == grBoundary
+ if anyPropRule < anyStateRule {
+ boundary = anyPropProp == grBoundary
+ }
+ return
+ }
+
+ if anyPropState >= 0 {
+ // We only have a specific state.
+ return anyPropState, prop, anyPropProp == grBoundary
+ // This branch will probably never be reached because okAnyState will
+ // always be true given the current transition map. But we keep it here
+ // for future modifications to the transition map where this may not be
+ // true anymore.
+ }
+
+ if anyStateState >= 0 {
+ // We only have a specific property.
+ return anyStateState, prop, anyStateProp == grBoundary
+ }
+
+ // No known transition. GB999: Any ÷ Any.
+ return grAny, prop, true
+}
diff --git a/vendor/github.com/rivo/uniseg/line.go b/vendor/github.com/rivo/uniseg/line.go
new file mode 100644
index 000000000..7a46318d9
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/line.go
@@ -0,0 +1,134 @@
+package uniseg
+
+import "unicode/utf8"
+
+// FirstLineSegment returns the prefix of the given byte slice after which a
+// decision to break the string over to the next line can or must be made,
+// according to the rules of [Unicode Standard Annex #14]. This is used to
+// implement line breaking.
+//
+// Line breaking, also known as word wrapping, is the process of breaking a
+// section of text into lines such that it will fit in the available width of a
+// page, window or other display area.
+//
+// The returned "segment" may not be broken into smaller parts, unless no other
+// breaking opportunities present themselves, in which case you may break by
+// grapheme clusters (using the [FirstGraphemeCluster] function to determine the
+// grapheme clusters).
+//
+// The "mustBreak" flag indicates whether you MUST break the line after the
+// given segment (true), for example after newline characters, or you MAY break
+// the line after the given segment (false).
+//
+// This function can be called continuously to extract all non-breaking sub-sets
+// from a byte slice, as illustrated in the example below.
+//
+// If you don't know the current state, for example when calling the function
+// for the first time, you must pass -1. For consecutive calls, pass the state
+// and rest slice returned by the previous call.
+//
+// The "rest" slice is the sub-slice of the original byte slice "b" starting
+// after the last byte of the identified line segment. If the length of the
+// "rest" slice is 0, the entire byte slice "b" has been processed. The
+// "segment" byte slice is the sub-slice of the input slice containing the
+// identified line segment.
+//
+// Given an empty byte slice "b", the function returns nil values.
+//
+// Note that in accordance with [UAX #14 LB3], the final segment will end with
+// "mustBreak" set to true. You can choose to ignore this by checking if the
+// length of the "rest" slice is 0 and calling [HasTrailingLineBreak] or
+// [HasTrailingLineBreakInString] on the last rune.
+//
+// Note also that this algorithm may break within grapheme clusters. This is
+// addressed in Section 8.2 Example 6 of UAX #14. To avoid this, you can use
+// the [Step] function instead.
+//
+// [Unicode Standard Annex #14]: https://www.unicode.org/reports/tr14/
+// [UAX #14 LB3]: https://www.unicode.org/reports/tr14/#Algorithm
+func FirstLineSegment(b []byte, state int) (segment, rest []byte, mustBreak bool, newState int) {
+ // An empty byte slice returns nothing.
+ if len(b) == 0 {
+ return
+ }
+
+ // Extract the first rune.
+ r, length := utf8.DecodeRune(b)
+ if len(b) <= length { // If we're already past the end, there is nothing else to parse.
+ return b, nil, true, lbAny // LB3.
+ }
+
+ // If we don't know the state, determine it now.
+ if state < 0 {
+ state, _ = transitionLineBreakState(state, r, b[length:], "")
+ }
+
+ // Transition until we find a boundary.
+ var boundary int
+ for {
+ r, l := utf8.DecodeRune(b[length:])
+ state, boundary = transitionLineBreakState(state, r, b[length+l:], "")
+
+ if boundary != LineDontBreak {
+ return b[:length], b[length:], boundary == LineMustBreak, state
+ }
+
+ length += l
+ if len(b) <= length {
+ return b, nil, true, lbAny // LB3
+ }
+ }
+}
+
+// FirstLineSegmentInString is like [FirstLineSegment] but its input and outputs
+// are strings.
+func FirstLineSegmentInString(str string, state int) (segment, rest string, mustBreak bool, newState int) {
+ // An empty byte slice returns nothing.
+ if len(str) == 0 {
+ return
+ }
+
+ // Extract the first rune.
+ r, length := utf8.DecodeRuneInString(str)
+ if len(str) <= length { // If we're already past the end, there is nothing else to parse.
+ return str, "", true, lbAny // LB3.
+ }
+
+ // If we don't know the state, determine it now.
+ if state < 0 {
+ state, _ = transitionLineBreakState(state, r, nil, str[length:])
+ }
+
+ // Transition until we find a boundary.
+ var boundary int
+ for {
+ r, l := utf8.DecodeRuneInString(str[length:])
+ state, boundary = transitionLineBreakState(state, r, nil, str[length+l:])
+
+ if boundary != LineDontBreak {
+ return str[:length], str[length:], boundary == LineMustBreak, state
+ }
+
+ length += l
+ if len(str) <= length {
+ return str, "", true, lbAny // LB3.
+ }
+ }
+}
+
+// HasTrailingLineBreak returns true if the last rune in the given byte slice is
+// one of the hard line break code points defined in LB4 and LB5 of [UAX #14].
+//
+// [UAX #14]: https://www.unicode.org/reports/tr14/#Algorithm
+func HasTrailingLineBreak(b []byte) bool {
+ r, _ := utf8.DecodeLastRune(b)
+ property, _ := propertyLineBreak(r)
+ return property == prBK || property == prCR || property == prLF || property == prNL
+}
+
+// HasTrailingLineBreakInString is like [HasTrailingLineBreak] but for a string.
+func HasTrailingLineBreakInString(str string) bool {
+ r, _ := utf8.DecodeLastRuneInString(str)
+ property, _ := propertyLineBreak(r)
+ return property == prBK || property == prCR || property == prLF || property == prNL
+}
diff --git a/vendor/github.com/rivo/uniseg/lineproperties.go b/vendor/github.com/rivo/uniseg/lineproperties.go
new file mode 100644
index 000000000..ac7fac4c0
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/lineproperties.go
@@ -0,0 +1,3554 @@
+// Code generated via go generate from gen_properties.go. DO NOT EDIT.
+
+package uniseg
+
+// lineBreakCodePoints are taken from
+// https://www.unicode.org/Public/15.0.0/ucd/LineBreak.txt
+// and
+// https://unicode.org/Public/15.0.0/ucd/emoji/emoji-data.txt
+// ("Extended_Pictographic" only)
+// on September 5, 2023. See https://www.unicode.org/license.html for the Unicode
+// license agreement.
+var lineBreakCodePoints = [][4]int{
+ {0x0000, 0x0008, prCM, gcCc}, // [9] ..
+ {0x0009, 0x0009, prBA, gcCc}, //
+ {0x000A, 0x000A, prLF, gcCc}, //
+ {0x000B, 0x000C, prBK, gcCc}, // [2] ..
+ {0x000D, 0x000D, prCR, gcCc}, //
+ {0x000E, 0x001F, prCM, gcCc}, // [18] ..
+ {0x0020, 0x0020, prSP, gcZs}, // SPACE
+ {0x0021, 0x0021, prEX, gcPo}, // EXCLAMATION MARK
+ {0x0022, 0x0022, prQU, gcPo}, // QUOTATION MARK
+ {0x0023, 0x0023, prAL, gcPo}, // NUMBER SIGN
+ {0x0024, 0x0024, prPR, gcSc}, // DOLLAR SIGN
+ {0x0025, 0x0025, prPO, gcPo}, // PERCENT SIGN
+ {0x0026, 0x0026, prAL, gcPo}, // AMPERSAND
+ {0x0027, 0x0027, prQU, gcPo}, // APOSTROPHE
+ {0x0028, 0x0028, prOP, gcPs}, // LEFT PARENTHESIS
+ {0x0029, 0x0029, prCP, gcPe}, // RIGHT PARENTHESIS
+ {0x002A, 0x002A, prAL, gcPo}, // ASTERISK
+ {0x002B, 0x002B, prPR, gcSm}, // PLUS SIGN
+ {0x002C, 0x002C, prIS, gcPo}, // COMMA
+ {0x002D, 0x002D, prHY, gcPd}, // HYPHEN-MINUS
+ {0x002E, 0x002E, prIS, gcPo}, // FULL STOP
+ {0x002F, 0x002F, prSY, gcPo}, // SOLIDUS
+ {0x0030, 0x0039, prNU, gcNd}, // [10] DIGIT ZERO..DIGIT NINE
+ {0x003A, 0x003B, prIS, gcPo}, // [2] COLON..SEMICOLON
+ {0x003C, 0x003E, prAL, gcSm}, // [3] LESS-THAN SIGN..GREATER-THAN SIGN
+ {0x003F, 0x003F, prEX, gcPo}, // QUESTION MARK
+ {0x0040, 0x0040, prAL, gcPo}, // COMMERCIAL AT
+ {0x0041, 0x005A, prAL, gcLu}, // [26] LATIN CAPITAL LETTER A..LATIN CAPITAL LETTER Z
+ {0x005B, 0x005B, prOP, gcPs}, // LEFT SQUARE BRACKET
+ {0x005C, 0x005C, prPR, gcPo}, // REVERSE SOLIDUS
+ {0x005D, 0x005D, prCP, gcPe}, // RIGHT SQUARE BRACKET
+ {0x005E, 0x005E, prAL, gcSk}, // CIRCUMFLEX ACCENT
+ {0x005F, 0x005F, prAL, gcPc}, // LOW LINE
+ {0x0060, 0x0060, prAL, gcSk}, // GRAVE ACCENT
+ {0x0061, 0x007A, prAL, gcLl}, // [26] LATIN SMALL LETTER A..LATIN SMALL LETTER Z
+ {0x007B, 0x007B, prOP, gcPs}, // LEFT CURLY BRACKET
+ {0x007C, 0x007C, prBA, gcSm}, // VERTICAL LINE
+ {0x007D, 0x007D, prCL, gcPe}, // RIGHT CURLY BRACKET
+ {0x007E, 0x007E, prAL, gcSm}, // TILDE
+ {0x007F, 0x007F, prCM, gcCc}, //
+ {0x0080, 0x0084, prCM, gcCc}, // [5] ..
+ {0x0085, 0x0085, prNL, gcCc}, //
+ {0x0086, 0x009F, prCM, gcCc}, // [26] ..
+ {0x00A0, 0x00A0, prGL, gcZs}, // NO-BREAK SPACE
+ {0x00A1, 0x00A1, prOP, gcPo}, // INVERTED EXCLAMATION MARK
+ {0x00A2, 0x00A2, prPO, gcSc}, // CENT SIGN
+ {0x00A3, 0x00A5, prPR, gcSc}, // [3] POUND SIGN..YEN SIGN
+ {0x00A6, 0x00A6, prAL, gcSo}, // BROKEN BAR
+ {0x00A7, 0x00A7, prAI, gcPo}, // SECTION SIGN
+ {0x00A8, 0x00A8, prAI, gcSk}, // DIAERESIS
+ {0x00A9, 0x00A9, prAL, gcSo}, // COPYRIGHT SIGN
+ {0x00AA, 0x00AA, prAI, gcLo}, // FEMININE ORDINAL INDICATOR
+ {0x00AB, 0x00AB, prQU, gcPi}, // LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+ {0x00AC, 0x00AC, prAL, gcSm}, // NOT SIGN
+ {0x00AD, 0x00AD, prBA, gcCf}, // SOFT HYPHEN
+ {0x00AE, 0x00AE, prAL, gcSo}, // REGISTERED SIGN
+ {0x00AF, 0x00AF, prAL, gcSk}, // MACRON
+ {0x00B0, 0x00B0, prPO, gcSo}, // DEGREE SIGN
+ {0x00B1, 0x00B1, prPR, gcSm}, // PLUS-MINUS SIGN
+ {0x00B2, 0x00B3, prAI, gcNo}, // [2] SUPERSCRIPT TWO..SUPERSCRIPT THREE
+ {0x00B4, 0x00B4, prBB, gcSk}, // ACUTE ACCENT
+ {0x00B5, 0x00B5, prAL, gcLl}, // MICRO SIGN
+ {0x00B6, 0x00B7, prAI, gcPo}, // [2] PILCROW SIGN..MIDDLE DOT
+ {0x00B8, 0x00B8, prAI, gcSk}, // CEDILLA
+ {0x00B9, 0x00B9, prAI, gcNo}, // SUPERSCRIPT ONE
+ {0x00BA, 0x00BA, prAI, gcLo}, // MASCULINE ORDINAL INDICATOR
+ {0x00BB, 0x00BB, prQU, gcPf}, // RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
+ {0x00BC, 0x00BE, prAI, gcNo}, // [3] VULGAR FRACTION ONE QUARTER..VULGAR FRACTION THREE QUARTERS
+ {0x00BF, 0x00BF, prOP, gcPo}, // INVERTED QUESTION MARK
+ {0x00C0, 0x00D6, prAL, gcLu}, // [23] LATIN CAPITAL LETTER A WITH GRAVE..LATIN CAPITAL LETTER O WITH DIAERESIS
+ {0x00D7, 0x00D7, prAI, gcSm}, // MULTIPLICATION SIGN
+ {0x00D8, 0x00F6, prAL, gcLC}, // [31] LATIN CAPITAL LETTER O WITH STROKE..LATIN SMALL LETTER O WITH DIAERESIS
+ {0x00F7, 0x00F7, prAI, gcSm}, // DIVISION SIGN
+ {0x00F8, 0x00FF, prAL, gcLl}, // [8] LATIN SMALL LETTER O WITH STROKE..LATIN SMALL LETTER Y WITH DIAERESIS
+ {0x0100, 0x017F, prAL, gcLC}, // [128] LATIN CAPITAL LETTER A WITH MACRON..LATIN SMALL LETTER LONG S
+ {0x0180, 0x01BA, prAL, gcLC}, // [59] LATIN SMALL LETTER B WITH STROKE..LATIN SMALL LETTER EZH WITH TAIL
+ {0x01BB, 0x01BB, prAL, gcLo}, // LATIN LETTER TWO WITH STROKE
+ {0x01BC, 0x01BF, prAL, gcLC}, // [4] LATIN CAPITAL LETTER TONE FIVE..LATIN LETTER WYNN
+ {0x01C0, 0x01C3, prAL, gcLo}, // [4] LATIN LETTER DENTAL CLICK..LATIN LETTER RETROFLEX CLICK
+ {0x01C4, 0x024F, prAL, gcLC}, // [140] LATIN CAPITAL LETTER DZ WITH CARON..LATIN SMALL LETTER Y WITH STROKE
+ {0x0250, 0x0293, prAL, gcLl}, // [68] LATIN SMALL LETTER TURNED A..LATIN SMALL LETTER EZH WITH CURL
+ {0x0294, 0x0294, prAL, gcLo}, // LATIN LETTER GLOTTAL STOP
+ {0x0295, 0x02AF, prAL, gcLl}, // [27] LATIN LETTER PHARYNGEAL VOICED FRICATIVE..LATIN SMALL LETTER TURNED H WITH FISHHOOK AND TAIL
+ {0x02B0, 0x02C1, prAL, gcLm}, // [18] MODIFIER LETTER SMALL H..MODIFIER LETTER REVERSED GLOTTAL STOP
+ {0x02C2, 0x02C5, prAL, gcSk}, // [4] MODIFIER LETTER LEFT ARROWHEAD..MODIFIER LETTER DOWN ARROWHEAD
+ {0x02C6, 0x02C6, prAL, gcLm}, // MODIFIER LETTER CIRCUMFLEX ACCENT
+ {0x02C7, 0x02C7, prAI, gcLm}, // CARON
+ {0x02C8, 0x02C8, prBB, gcLm}, // MODIFIER LETTER VERTICAL LINE
+ {0x02C9, 0x02CB, prAI, gcLm}, // [3] MODIFIER LETTER MACRON..MODIFIER LETTER GRAVE ACCENT
+ {0x02CC, 0x02CC, prBB, gcLm}, // MODIFIER LETTER LOW VERTICAL LINE
+ {0x02CD, 0x02CD, prAI, gcLm}, // MODIFIER LETTER LOW MACRON
+ {0x02CE, 0x02CF, prAL, gcLm}, // [2] MODIFIER LETTER LOW GRAVE ACCENT..MODIFIER LETTER LOW ACUTE ACCENT
+ {0x02D0, 0x02D0, prAI, gcLm}, // MODIFIER LETTER TRIANGULAR COLON
+ {0x02D1, 0x02D1, prAL, gcLm}, // MODIFIER LETTER HALF TRIANGULAR COLON
+ {0x02D2, 0x02D7, prAL, gcSk}, // [6] MODIFIER LETTER CENTRED RIGHT HALF RING..MODIFIER LETTER MINUS SIGN
+ {0x02D8, 0x02DB, prAI, gcSk}, // [4] BREVE..OGONEK
+ {0x02DC, 0x02DC, prAL, gcSk}, // SMALL TILDE
+ {0x02DD, 0x02DD, prAI, gcSk}, // DOUBLE ACUTE ACCENT
+ {0x02DE, 0x02DE, prAL, gcSk}, // MODIFIER LETTER RHOTIC HOOK
+ {0x02DF, 0x02DF, prBB, gcSk}, // MODIFIER LETTER CROSS ACCENT
+ {0x02E0, 0x02E4, prAL, gcLm}, // [5] MODIFIER LETTER SMALL GAMMA..MODIFIER LETTER SMALL REVERSED GLOTTAL STOP
+ {0x02E5, 0x02EB, prAL, gcSk}, // [7] MODIFIER LETTER EXTRA-HIGH TONE BAR..MODIFIER LETTER YANG DEPARTING TONE MARK
+ {0x02EC, 0x02EC, prAL, gcLm}, // MODIFIER LETTER VOICING
+ {0x02ED, 0x02ED, prAL, gcSk}, // MODIFIER LETTER UNASPIRATED
+ {0x02EE, 0x02EE, prAL, gcLm}, // MODIFIER LETTER DOUBLE APOSTROPHE
+ {0x02EF, 0x02FF, prAL, gcSk}, // [17] MODIFIER LETTER LOW DOWN ARROWHEAD..MODIFIER LETTER LOW LEFT ARROW
+ {0x0300, 0x034E, prCM, gcMn}, // [79] COMBINING GRAVE ACCENT..COMBINING UPWARDS ARROW BELOW
+ {0x034F, 0x034F, prGL, gcMn}, // COMBINING GRAPHEME JOINER
+ {0x0350, 0x035B, prCM, gcMn}, // [12] COMBINING RIGHT ARROWHEAD ABOVE..COMBINING ZIGZAG ABOVE
+ {0x035C, 0x0362, prGL, gcMn}, // [7] COMBINING DOUBLE BREVE BELOW..COMBINING DOUBLE RIGHTWARDS ARROW BELOW
+ {0x0363, 0x036F, prCM, gcMn}, // [13] COMBINING LATIN SMALL LETTER A..COMBINING LATIN SMALL LETTER X
+ {0x0370, 0x0373, prAL, gcLC}, // [4] GREEK CAPITAL LETTER HETA..GREEK SMALL LETTER ARCHAIC SAMPI
+ {0x0374, 0x0374, prAL, gcLm}, // GREEK NUMERAL SIGN
+ {0x0375, 0x0375, prAL, gcSk}, // GREEK LOWER NUMERAL SIGN
+ {0x0376, 0x0377, prAL, gcLC}, // [2] GREEK CAPITAL LETTER PAMPHYLIAN DIGAMMA..GREEK SMALL LETTER PAMPHYLIAN DIGAMMA
+ {0x037A, 0x037A, prAL, gcLm}, // GREEK YPOGEGRAMMENI
+ {0x037B, 0x037D, prAL, gcLl}, // [3] GREEK SMALL REVERSED LUNATE SIGMA SYMBOL..GREEK SMALL REVERSED DOTTED LUNATE SIGMA SYMBOL
+ {0x037E, 0x037E, prIS, gcPo}, // GREEK QUESTION MARK
+ {0x037F, 0x037F, prAL, gcLu}, // GREEK CAPITAL LETTER YOT
+ {0x0384, 0x0385, prAL, gcSk}, // [2] GREEK TONOS..GREEK DIALYTIKA TONOS
+ {0x0386, 0x0386, prAL, gcLu}, // GREEK CAPITAL LETTER ALPHA WITH TONOS
+ {0x0387, 0x0387, prAL, gcPo}, // GREEK ANO TELEIA
+ {0x0388, 0x038A, prAL, gcLu}, // [3] GREEK CAPITAL LETTER EPSILON WITH TONOS..GREEK CAPITAL LETTER IOTA WITH TONOS
+ {0x038C, 0x038C, prAL, gcLu}, // GREEK CAPITAL LETTER OMICRON WITH TONOS
+ {0x038E, 0x03A1, prAL, gcLC}, // [20] GREEK CAPITAL LETTER UPSILON WITH TONOS..GREEK CAPITAL LETTER RHO
+ {0x03A3, 0x03F5, prAL, gcLC}, // [83] GREEK CAPITAL LETTER SIGMA..GREEK LUNATE EPSILON SYMBOL
+ {0x03F6, 0x03F6, prAL, gcSm}, // GREEK REVERSED LUNATE EPSILON SYMBOL
+ {0x03F7, 0x03FF, prAL, gcLC}, // [9] GREEK CAPITAL LETTER SHO..GREEK CAPITAL REVERSED DOTTED LUNATE SIGMA SYMBOL
+ {0x0400, 0x0481, prAL, gcLC}, // [130] CYRILLIC CAPITAL LETTER IE WITH GRAVE..CYRILLIC SMALL LETTER KOPPA
+ {0x0482, 0x0482, prAL, gcSo}, // CYRILLIC THOUSANDS SIGN
+ {0x0483, 0x0487, prCM, gcMn}, // [5] COMBINING CYRILLIC TITLO..COMBINING CYRILLIC POKRYTIE
+ {0x0488, 0x0489, prCM, gcMe}, // [2] COMBINING CYRILLIC HUNDRED THOUSANDS SIGN..COMBINING CYRILLIC MILLIONS SIGN
+ {0x048A, 0x04FF, prAL, gcLC}, // [118] CYRILLIC CAPITAL LETTER SHORT I WITH TAIL..CYRILLIC SMALL LETTER HA WITH STROKE
+ {0x0500, 0x052F, prAL, gcLC}, // [48] CYRILLIC CAPITAL LETTER KOMI DE..CYRILLIC SMALL LETTER EL WITH DESCENDER
+ {0x0531, 0x0556, prAL, gcLu}, // [38] ARMENIAN CAPITAL LETTER AYB..ARMENIAN CAPITAL LETTER FEH
+ {0x0559, 0x0559, prAL, gcLm}, // ARMENIAN MODIFIER LETTER LEFT HALF RING
+ {0x055A, 0x055F, prAL, gcPo}, // [6] ARMENIAN APOSTROPHE..ARMENIAN ABBREVIATION MARK
+ {0x0560, 0x0588, prAL, gcLl}, // [41] ARMENIAN SMALL LETTER TURNED AYB..ARMENIAN SMALL LETTER YI WITH STROKE
+ {0x0589, 0x0589, prIS, gcPo}, // ARMENIAN FULL STOP
+ {0x058A, 0x058A, prBA, gcPd}, // ARMENIAN HYPHEN
+ {0x058D, 0x058E, prAL, gcSo}, // [2] RIGHT-FACING ARMENIAN ETERNITY SIGN..LEFT-FACING ARMENIAN ETERNITY SIGN
+ {0x058F, 0x058F, prPR, gcSc}, // ARMENIAN DRAM SIGN
+ {0x0591, 0x05BD, prCM, gcMn}, // [45] HEBREW ACCENT ETNAHTA..HEBREW POINT METEG
+ {0x05BE, 0x05BE, prBA, gcPd}, // HEBREW PUNCTUATION MAQAF
+ {0x05BF, 0x05BF, prCM, gcMn}, // HEBREW POINT RAFE
+ {0x05C0, 0x05C0, prAL, gcPo}, // HEBREW PUNCTUATION PASEQ
+ {0x05C1, 0x05C2, prCM, gcMn}, // [2] HEBREW POINT SHIN DOT..HEBREW POINT SIN DOT
+ {0x05C3, 0x05C3, prAL, gcPo}, // HEBREW PUNCTUATION SOF PASUQ
+ {0x05C4, 0x05C5, prCM, gcMn}, // [2] HEBREW MARK UPPER DOT..HEBREW MARK LOWER DOT
+ {0x05C6, 0x05C6, prEX, gcPo}, // HEBREW PUNCTUATION NUN HAFUKHA
+ {0x05C7, 0x05C7, prCM, gcMn}, // HEBREW POINT QAMATS QATAN
+ {0x05D0, 0x05EA, prHL, gcLo}, // [27] HEBREW LETTER ALEF..HEBREW LETTER TAV
+ {0x05EF, 0x05F2, prHL, gcLo}, // [4] HEBREW YOD TRIANGLE..HEBREW LIGATURE YIDDISH DOUBLE YOD
+ {0x05F3, 0x05F4, prAL, gcPo}, // [2] HEBREW PUNCTUATION GERESH..HEBREW PUNCTUATION GERSHAYIM
+ {0x0600, 0x0605, prAL, gcCf}, // [6] ARABIC NUMBER SIGN..ARABIC NUMBER MARK ABOVE
+ {0x0606, 0x0608, prAL, gcSm}, // [3] ARABIC-INDIC CUBE ROOT..ARABIC RAY
+ {0x0609, 0x060A, prPO, gcPo}, // [2] ARABIC-INDIC PER MILLE SIGN..ARABIC-INDIC PER TEN THOUSAND SIGN
+ {0x060B, 0x060B, prPO, gcSc}, // AFGHANI SIGN
+ {0x060C, 0x060D, prIS, gcPo}, // [2] ARABIC COMMA..ARABIC DATE SEPARATOR
+ {0x060E, 0x060F, prAL, gcSo}, // [2] ARABIC POETIC VERSE SIGN..ARABIC SIGN MISRA
+ {0x0610, 0x061A, prCM, gcMn}, // [11] ARABIC SIGN SALLALLAHOU ALAYHE WASSALLAM..ARABIC SMALL KASRA
+ {0x061B, 0x061B, prEX, gcPo}, // ARABIC SEMICOLON
+ {0x061C, 0x061C, prCM, gcCf}, // ARABIC LETTER MARK
+ {0x061D, 0x061F, prEX, gcPo}, // [3] ARABIC END OF TEXT MARK..ARABIC QUESTION MARK
+ {0x0620, 0x063F, prAL, gcLo}, // [32] ARABIC LETTER KASHMIRI YEH..ARABIC LETTER FARSI YEH WITH THREE DOTS ABOVE
+ {0x0640, 0x0640, prAL, gcLm}, // ARABIC TATWEEL
+ {0x0641, 0x064A, prAL, gcLo}, // [10] ARABIC LETTER FEH..ARABIC LETTER YEH
+ {0x064B, 0x065F, prCM, gcMn}, // [21] ARABIC FATHATAN..ARABIC WAVY HAMZA BELOW
+ {0x0660, 0x0669, prNU, gcNd}, // [10] ARABIC-INDIC DIGIT ZERO..ARABIC-INDIC DIGIT NINE
+ {0x066A, 0x066A, prPO, gcPo}, // ARABIC PERCENT SIGN
+ {0x066B, 0x066C, prNU, gcPo}, // [2] ARABIC DECIMAL SEPARATOR..ARABIC THOUSANDS SEPARATOR
+ {0x066D, 0x066D, prAL, gcPo}, // ARABIC FIVE POINTED STAR
+ {0x066E, 0x066F, prAL, gcLo}, // [2] ARABIC LETTER DOTLESS BEH..ARABIC LETTER DOTLESS QAF
+ {0x0670, 0x0670, prCM, gcMn}, // ARABIC LETTER SUPERSCRIPT ALEF
+ {0x0671, 0x06D3, prAL, gcLo}, // [99] ARABIC LETTER ALEF WASLA..ARABIC LETTER YEH BARREE WITH HAMZA ABOVE
+ {0x06D4, 0x06D4, prEX, gcPo}, // ARABIC FULL STOP
+ {0x06D5, 0x06D5, prAL, gcLo}, // ARABIC LETTER AE
+ {0x06D6, 0x06DC, prCM, gcMn}, // [7] ARABIC SMALL HIGH LIGATURE SAD WITH LAM WITH ALEF MAKSURA..ARABIC SMALL HIGH SEEN
+ {0x06DD, 0x06DD, prAL, gcCf}, // ARABIC END OF AYAH
+ {0x06DE, 0x06DE, prAL, gcSo}, // ARABIC START OF RUB EL HIZB
+ {0x06DF, 0x06E4, prCM, gcMn}, // [6] ARABIC SMALL HIGH ROUNDED ZERO..ARABIC SMALL HIGH MADDA
+ {0x06E5, 0x06E6, prAL, gcLm}, // [2] ARABIC SMALL WAW..ARABIC SMALL YEH
+ {0x06E7, 0x06E8, prCM, gcMn}, // [2] ARABIC SMALL HIGH YEH..ARABIC SMALL HIGH NOON
+ {0x06E9, 0x06E9, prAL, gcSo}, // ARABIC PLACE OF SAJDAH
+ {0x06EA, 0x06ED, prCM, gcMn}, // [4] ARABIC EMPTY CENTRE LOW STOP..ARABIC SMALL LOW MEEM
+ {0x06EE, 0x06EF, prAL, gcLo}, // [2] ARABIC LETTER DAL WITH INVERTED V..ARABIC LETTER REH WITH INVERTED V
+ {0x06F0, 0x06F9, prNU, gcNd}, // [10] EXTENDED ARABIC-INDIC DIGIT ZERO..EXTENDED ARABIC-INDIC DIGIT NINE
+ {0x06FA, 0x06FC, prAL, gcLo}, // [3] ARABIC LETTER SHEEN WITH DOT BELOW..ARABIC LETTER GHAIN WITH DOT BELOW
+ {0x06FD, 0x06FE, prAL, gcSo}, // [2] ARABIC SIGN SINDHI AMPERSAND..ARABIC SIGN SINDHI POSTPOSITION MEN
+ {0x06FF, 0x06FF, prAL, gcLo}, // ARABIC LETTER HEH WITH INVERTED V
+ {0x0700, 0x070D, prAL, gcPo}, // [14] SYRIAC END OF PARAGRAPH..SYRIAC HARKLEAN ASTERISCUS
+ {0x070F, 0x070F, prAL, gcCf}, // SYRIAC ABBREVIATION MARK
+ {0x0710, 0x0710, prAL, gcLo}, // SYRIAC LETTER ALAPH
+ {0x0711, 0x0711, prCM, gcMn}, // SYRIAC LETTER SUPERSCRIPT ALAPH
+ {0x0712, 0x072F, prAL, gcLo}, // [30] SYRIAC LETTER BETH..SYRIAC LETTER PERSIAN DHALATH
+ {0x0730, 0x074A, prCM, gcMn}, // [27] SYRIAC PTHAHA ABOVE..SYRIAC BARREKH
+ {0x074D, 0x074F, prAL, gcLo}, // [3] SYRIAC LETTER SOGDIAN ZHAIN..SYRIAC LETTER SOGDIAN FE
+ {0x0750, 0x077F, prAL, gcLo}, // [48] ARABIC LETTER BEH WITH THREE DOTS HORIZONTALLY BELOW..ARABIC LETTER KAF WITH TWO DOTS ABOVE
+ {0x0780, 0x07A5, prAL, gcLo}, // [38] THAANA LETTER HAA..THAANA LETTER WAAVU
+ {0x07A6, 0x07B0, prCM, gcMn}, // [11] THAANA ABAFILI..THAANA SUKUN
+ {0x07B1, 0x07B1, prAL, gcLo}, // THAANA LETTER NAA
+ {0x07C0, 0x07C9, prNU, gcNd}, // [10] NKO DIGIT ZERO..NKO DIGIT NINE
+ {0x07CA, 0x07EA, prAL, gcLo}, // [33] NKO LETTER A..NKO LETTER JONA RA
+ {0x07EB, 0x07F3, prCM, gcMn}, // [9] NKO COMBINING SHORT HIGH TONE..NKO COMBINING DOUBLE DOT ABOVE
+ {0x07F4, 0x07F5, prAL, gcLm}, // [2] NKO HIGH TONE APOSTROPHE..NKO LOW TONE APOSTROPHE
+ {0x07F6, 0x07F6, prAL, gcSo}, // NKO SYMBOL OO DENNEN
+ {0x07F7, 0x07F7, prAL, gcPo}, // NKO SYMBOL GBAKURUNEN
+ {0x07F8, 0x07F8, prIS, gcPo}, // NKO COMMA
+ {0x07F9, 0x07F9, prEX, gcPo}, // NKO EXCLAMATION MARK
+ {0x07FA, 0x07FA, prAL, gcLm}, // NKO LAJANYALAN
+ {0x07FD, 0x07FD, prCM, gcMn}, // NKO DANTAYALAN
+ {0x07FE, 0x07FF, prPR, gcSc}, // [2] NKO DOROME SIGN..NKO TAMAN SIGN
+ {0x0800, 0x0815, prAL, gcLo}, // [22] SAMARITAN LETTER ALAF..SAMARITAN LETTER TAAF
+ {0x0816, 0x0819, prCM, gcMn}, // [4] SAMARITAN MARK IN..SAMARITAN MARK DAGESH
+ {0x081A, 0x081A, prAL, gcLm}, // SAMARITAN MODIFIER LETTER EPENTHETIC YUT
+ {0x081B, 0x0823, prCM, gcMn}, // [9] SAMARITAN MARK EPENTHETIC YUT..SAMARITAN VOWEL SIGN A
+ {0x0824, 0x0824, prAL, gcLm}, // SAMARITAN MODIFIER LETTER SHORT A
+ {0x0825, 0x0827, prCM, gcMn}, // [3] SAMARITAN VOWEL SIGN SHORT A..SAMARITAN VOWEL SIGN U
+ {0x0828, 0x0828, prAL, gcLm}, // SAMARITAN MODIFIER LETTER I
+ {0x0829, 0x082D, prCM, gcMn}, // [5] SAMARITAN VOWEL SIGN LONG I..SAMARITAN MARK NEQUDAA
+ {0x0830, 0x083E, prAL, gcPo}, // [15] SAMARITAN PUNCTUATION NEQUDAA..SAMARITAN PUNCTUATION ANNAAU
+ {0x0840, 0x0858, prAL, gcLo}, // [25] MANDAIC LETTER HALQA..MANDAIC LETTER AIN
+ {0x0859, 0x085B, prCM, gcMn}, // [3] MANDAIC AFFRICATION MARK..MANDAIC GEMINATION MARK
+ {0x085E, 0x085E, prAL, gcPo}, // MANDAIC PUNCTUATION
+ {0x0860, 0x086A, prAL, gcLo}, // [11] SYRIAC LETTER MALAYALAM NGA..SYRIAC LETTER MALAYALAM SSA
+ {0x0870, 0x0887, prAL, gcLo}, // [24] ARABIC LETTER ALEF WITH ATTACHED FATHA..ARABIC BASELINE ROUND DOT
+ {0x0888, 0x0888, prAL, gcSk}, // ARABIC RAISED ROUND DOT
+ {0x0889, 0x088E, prAL, gcLo}, // [6] ARABIC LETTER NOON WITH INVERTED SMALL V..ARABIC VERTICAL TAIL
+ {0x0890, 0x0891, prAL, gcCf}, // [2] ARABIC POUND MARK ABOVE..ARABIC PIASTRE MARK ABOVE
+ {0x0898, 0x089F, prCM, gcMn}, // [8] ARABIC SMALL HIGH WORD AL-JUZ..ARABIC HALF MADDA OVER MADDA
+ {0x08A0, 0x08C8, prAL, gcLo}, // [41] ARABIC LETTER BEH WITH SMALL V BELOW..ARABIC LETTER GRAF
+ {0x08C9, 0x08C9, prAL, gcLm}, // ARABIC SMALL FARSI YEH
+ {0x08CA, 0x08E1, prCM, gcMn}, // [24] ARABIC SMALL HIGH FARSI YEH..ARABIC SMALL HIGH SIGN SAFHA
+ {0x08E2, 0x08E2, prAL, gcCf}, // ARABIC DISPUTED END OF AYAH
+ {0x08E3, 0x08FF, prCM, gcMn}, // [29] ARABIC TURNED DAMMA BELOW..ARABIC MARK SIDEWAYS NOON GHUNNA
+ {0x0900, 0x0902, prCM, gcMn}, // [3] DEVANAGARI SIGN INVERTED CANDRABINDU..DEVANAGARI SIGN ANUSVARA
+ {0x0903, 0x0903, prCM, gcMc}, // DEVANAGARI SIGN VISARGA
+ {0x0904, 0x0939, prAL, gcLo}, // [54] DEVANAGARI LETTER SHORT A..DEVANAGARI LETTER HA
+ {0x093A, 0x093A, prCM, gcMn}, // DEVANAGARI VOWEL SIGN OE
+ {0x093B, 0x093B, prCM, gcMc}, // DEVANAGARI VOWEL SIGN OOE
+ {0x093C, 0x093C, prCM, gcMn}, // DEVANAGARI SIGN NUKTA
+ {0x093D, 0x093D, prAL, gcLo}, // DEVANAGARI SIGN AVAGRAHA
+ {0x093E, 0x0940, prCM, gcMc}, // [3] DEVANAGARI VOWEL SIGN AA..DEVANAGARI VOWEL SIGN II
+ {0x0941, 0x0948, prCM, gcMn}, // [8] DEVANAGARI VOWEL SIGN U..DEVANAGARI VOWEL SIGN AI
+ {0x0949, 0x094C, prCM, gcMc}, // [4] DEVANAGARI VOWEL SIGN CANDRA O..DEVANAGARI VOWEL SIGN AU
+ {0x094D, 0x094D, prCM, gcMn}, // DEVANAGARI SIGN VIRAMA
+ {0x094E, 0x094F, prCM, gcMc}, // [2] DEVANAGARI VOWEL SIGN PRISHTHAMATRA E..DEVANAGARI VOWEL SIGN AW
+ {0x0950, 0x0950, prAL, gcLo}, // DEVANAGARI OM
+ {0x0951, 0x0957, prCM, gcMn}, // [7] DEVANAGARI STRESS SIGN UDATTA..DEVANAGARI VOWEL SIGN UUE
+ {0x0958, 0x0961, prAL, gcLo}, // [10] DEVANAGARI LETTER QA..DEVANAGARI LETTER VOCALIC LL
+ {0x0962, 0x0963, prCM, gcMn}, // [2] DEVANAGARI VOWEL SIGN VOCALIC L..DEVANAGARI VOWEL SIGN VOCALIC LL
+ {0x0964, 0x0965, prBA, gcPo}, // [2] DEVANAGARI DANDA..DEVANAGARI DOUBLE DANDA
+ {0x0966, 0x096F, prNU, gcNd}, // [10] DEVANAGARI DIGIT ZERO..DEVANAGARI DIGIT NINE
+ {0x0970, 0x0970, prAL, gcPo}, // DEVANAGARI ABBREVIATION SIGN
+ {0x0971, 0x0971, prAL, gcLm}, // DEVANAGARI SIGN HIGH SPACING DOT
+ {0x0972, 0x097F, prAL, gcLo}, // [14] DEVANAGARI LETTER CANDRA A..DEVANAGARI LETTER BBA
+ {0x0980, 0x0980, prAL, gcLo}, // BENGALI ANJI
+ {0x0981, 0x0981, prCM, gcMn}, // BENGALI SIGN CANDRABINDU
+ {0x0982, 0x0983, prCM, gcMc}, // [2] BENGALI SIGN ANUSVARA..BENGALI SIGN VISARGA
+ {0x0985, 0x098C, prAL, gcLo}, // [8] BENGALI LETTER A..BENGALI LETTER VOCALIC L
+ {0x098F, 0x0990, prAL, gcLo}, // [2] BENGALI LETTER E..BENGALI LETTER AI
+ {0x0993, 0x09A8, prAL, gcLo}, // [22] BENGALI LETTER O..BENGALI LETTER NA
+ {0x09AA, 0x09B0, prAL, gcLo}, // [7] BENGALI LETTER PA..BENGALI LETTER RA
+ {0x09B2, 0x09B2, prAL, gcLo}, // BENGALI LETTER LA
+ {0x09B6, 0x09B9, prAL, gcLo}, // [4] BENGALI LETTER SHA..BENGALI LETTER HA
+ {0x09BC, 0x09BC, prCM, gcMn}, // BENGALI SIGN NUKTA
+ {0x09BD, 0x09BD, prAL, gcLo}, // BENGALI SIGN AVAGRAHA
+ {0x09BE, 0x09C0, prCM, gcMc}, // [3] BENGALI VOWEL SIGN AA..BENGALI VOWEL SIGN II
+ {0x09C1, 0x09C4, prCM, gcMn}, // [4] BENGALI VOWEL SIGN U..BENGALI VOWEL SIGN VOCALIC RR
+ {0x09C7, 0x09C8, prCM, gcMc}, // [2] BENGALI VOWEL SIGN E..BENGALI VOWEL SIGN AI
+ {0x09CB, 0x09CC, prCM, gcMc}, // [2] BENGALI VOWEL SIGN O..BENGALI VOWEL SIGN AU
+ {0x09CD, 0x09CD, prCM, gcMn}, // BENGALI SIGN VIRAMA
+ {0x09CE, 0x09CE, prAL, gcLo}, // BENGALI LETTER KHANDA TA
+ {0x09D7, 0x09D7, prCM, gcMc}, // BENGALI AU LENGTH MARK
+ {0x09DC, 0x09DD, prAL, gcLo}, // [2] BENGALI LETTER RRA..BENGALI LETTER RHA
+ {0x09DF, 0x09E1, prAL, gcLo}, // [3] BENGALI LETTER YYA..BENGALI LETTER VOCALIC LL
+ {0x09E2, 0x09E3, prCM, gcMn}, // [2] BENGALI VOWEL SIGN VOCALIC L..BENGALI VOWEL SIGN VOCALIC LL
+ {0x09E6, 0x09EF, prNU, gcNd}, // [10] BENGALI DIGIT ZERO..BENGALI DIGIT NINE
+ {0x09F0, 0x09F1, prAL, gcLo}, // [2] BENGALI LETTER RA WITH MIDDLE DIAGONAL..BENGALI LETTER RA WITH LOWER DIAGONAL
+ {0x09F2, 0x09F3, prPO, gcSc}, // [2] BENGALI RUPEE MARK..BENGALI RUPEE SIGN
+ {0x09F4, 0x09F8, prAL, gcNo}, // [5] BENGALI CURRENCY NUMERATOR ONE..BENGALI CURRENCY NUMERATOR ONE LESS THAN THE DENOMINATOR
+ {0x09F9, 0x09F9, prPO, gcNo}, // BENGALI CURRENCY DENOMINATOR SIXTEEN
+ {0x09FA, 0x09FA, prAL, gcSo}, // BENGALI ISSHAR
+ {0x09FB, 0x09FB, prPR, gcSc}, // BENGALI GANDA MARK
+ {0x09FC, 0x09FC, prAL, gcLo}, // BENGALI LETTER VEDIC ANUSVARA
+ {0x09FD, 0x09FD, prAL, gcPo}, // BENGALI ABBREVIATION SIGN
+ {0x09FE, 0x09FE, prCM, gcMn}, // BENGALI SANDHI MARK
+ {0x0A01, 0x0A02, prCM, gcMn}, // [2] GURMUKHI SIGN ADAK BINDI..GURMUKHI SIGN BINDI
+ {0x0A03, 0x0A03, prCM, gcMc}, // GURMUKHI SIGN VISARGA
+ {0x0A05, 0x0A0A, prAL, gcLo}, // [6] GURMUKHI LETTER A..GURMUKHI LETTER UU
+ {0x0A0F, 0x0A10, prAL, gcLo}, // [2] GURMUKHI LETTER EE..GURMUKHI LETTER AI
+ {0x0A13, 0x0A28, prAL, gcLo}, // [22] GURMUKHI LETTER OO..GURMUKHI LETTER NA
+ {0x0A2A, 0x0A30, prAL, gcLo}, // [7] GURMUKHI LETTER PA..GURMUKHI LETTER RA
+ {0x0A32, 0x0A33, prAL, gcLo}, // [2] GURMUKHI LETTER LA..GURMUKHI LETTER LLA
+ {0x0A35, 0x0A36, prAL, gcLo}, // [2] GURMUKHI LETTER VA..GURMUKHI LETTER SHA
+ {0x0A38, 0x0A39, prAL, gcLo}, // [2] GURMUKHI LETTER SA..GURMUKHI LETTER HA
+ {0x0A3C, 0x0A3C, prCM, gcMn}, // GURMUKHI SIGN NUKTA
+ {0x0A3E, 0x0A40, prCM, gcMc}, // [3] GURMUKHI VOWEL SIGN AA..GURMUKHI VOWEL SIGN II
+ {0x0A41, 0x0A42, prCM, gcMn}, // [2] GURMUKHI VOWEL SIGN U..GURMUKHI VOWEL SIGN UU
+ {0x0A47, 0x0A48, prCM, gcMn}, // [2] GURMUKHI VOWEL SIGN EE..GURMUKHI VOWEL SIGN AI
+ {0x0A4B, 0x0A4D, prCM, gcMn}, // [3] GURMUKHI VOWEL SIGN OO..GURMUKHI SIGN VIRAMA
+ {0x0A51, 0x0A51, prCM, gcMn}, // GURMUKHI SIGN UDAAT
+ {0x0A59, 0x0A5C, prAL, gcLo}, // [4] GURMUKHI LETTER KHHA..GURMUKHI LETTER RRA
+ {0x0A5E, 0x0A5E, prAL, gcLo}, // GURMUKHI LETTER FA
+ {0x0A66, 0x0A6F, prNU, gcNd}, // [10] GURMUKHI DIGIT ZERO..GURMUKHI DIGIT NINE
+ {0x0A70, 0x0A71, prCM, gcMn}, // [2] GURMUKHI TIPPI..GURMUKHI ADDAK
+ {0x0A72, 0x0A74, prAL, gcLo}, // [3] GURMUKHI IRI..GURMUKHI EK ONKAR
+ {0x0A75, 0x0A75, prCM, gcMn}, // GURMUKHI SIGN YAKASH
+ {0x0A76, 0x0A76, prAL, gcPo}, // GURMUKHI ABBREVIATION SIGN
+ {0x0A81, 0x0A82, prCM, gcMn}, // [2] GUJARATI SIGN CANDRABINDU..GUJARATI SIGN ANUSVARA
+ {0x0A83, 0x0A83, prCM, gcMc}, // GUJARATI SIGN VISARGA
+ {0x0A85, 0x0A8D, prAL, gcLo}, // [9] GUJARATI LETTER A..GUJARATI VOWEL CANDRA E
+ {0x0A8F, 0x0A91, prAL, gcLo}, // [3] GUJARATI LETTER E..GUJARATI VOWEL CANDRA O
+ {0x0A93, 0x0AA8, prAL, gcLo}, // [22] GUJARATI LETTER O..GUJARATI LETTER NA
+ {0x0AAA, 0x0AB0, prAL, gcLo}, // [7] GUJARATI LETTER PA..GUJARATI LETTER RA
+ {0x0AB2, 0x0AB3, prAL, gcLo}, // [2] GUJARATI LETTER LA..GUJARATI LETTER LLA
+ {0x0AB5, 0x0AB9, prAL, gcLo}, // [5] GUJARATI LETTER VA..GUJARATI LETTER HA
+ {0x0ABC, 0x0ABC, prCM, gcMn}, // GUJARATI SIGN NUKTA
+ {0x0ABD, 0x0ABD, prAL, gcLo}, // GUJARATI SIGN AVAGRAHA
+ {0x0ABE, 0x0AC0, prCM, gcMc}, // [3] GUJARATI VOWEL SIGN AA..GUJARATI VOWEL SIGN II
+ {0x0AC1, 0x0AC5, prCM, gcMn}, // [5] GUJARATI VOWEL SIGN U..GUJARATI VOWEL SIGN CANDRA E
+ {0x0AC7, 0x0AC8, prCM, gcMn}, // [2] GUJARATI VOWEL SIGN E..GUJARATI VOWEL SIGN AI
+ {0x0AC9, 0x0AC9, prCM, gcMc}, // GUJARATI VOWEL SIGN CANDRA O
+ {0x0ACB, 0x0ACC, prCM, gcMc}, // [2] GUJARATI VOWEL SIGN O..GUJARATI VOWEL SIGN AU
+ {0x0ACD, 0x0ACD, prCM, gcMn}, // GUJARATI SIGN VIRAMA
+ {0x0AD0, 0x0AD0, prAL, gcLo}, // GUJARATI OM
+ {0x0AE0, 0x0AE1, prAL, gcLo}, // [2] GUJARATI LETTER VOCALIC RR..GUJARATI LETTER VOCALIC LL
+ {0x0AE2, 0x0AE3, prCM, gcMn}, // [2] GUJARATI VOWEL SIGN VOCALIC L..GUJARATI VOWEL SIGN VOCALIC LL
+ {0x0AE6, 0x0AEF, prNU, gcNd}, // [10] GUJARATI DIGIT ZERO..GUJARATI DIGIT NINE
+ {0x0AF0, 0x0AF0, prAL, gcPo}, // GUJARATI ABBREVIATION SIGN
+ {0x0AF1, 0x0AF1, prPR, gcSc}, // GUJARATI RUPEE SIGN
+ {0x0AF9, 0x0AF9, prAL, gcLo}, // GUJARATI LETTER ZHA
+ {0x0AFA, 0x0AFF, prCM, gcMn}, // [6] GUJARATI SIGN SUKUN..GUJARATI SIGN TWO-CIRCLE NUKTA ABOVE
+ {0x0B01, 0x0B01, prCM, gcMn}, // ORIYA SIGN CANDRABINDU
+ {0x0B02, 0x0B03, prCM, gcMc}, // [2] ORIYA SIGN ANUSVARA..ORIYA SIGN VISARGA
+ {0x0B05, 0x0B0C, prAL, gcLo}, // [8] ORIYA LETTER A..ORIYA LETTER VOCALIC L
+ {0x0B0F, 0x0B10, prAL, gcLo}, // [2] ORIYA LETTER E..ORIYA LETTER AI
+ {0x0B13, 0x0B28, prAL, gcLo}, // [22] ORIYA LETTER O..ORIYA LETTER NA
+ {0x0B2A, 0x0B30, prAL, gcLo}, // [7] ORIYA LETTER PA..ORIYA LETTER RA
+ {0x0B32, 0x0B33, prAL, gcLo}, // [2] ORIYA LETTER LA..ORIYA LETTER LLA
+ {0x0B35, 0x0B39, prAL, gcLo}, // [5] ORIYA LETTER VA..ORIYA LETTER HA
+ {0x0B3C, 0x0B3C, prCM, gcMn}, // ORIYA SIGN NUKTA
+ {0x0B3D, 0x0B3D, prAL, gcLo}, // ORIYA SIGN AVAGRAHA
+ {0x0B3E, 0x0B3E, prCM, gcMc}, // ORIYA VOWEL SIGN AA
+ {0x0B3F, 0x0B3F, prCM, gcMn}, // ORIYA VOWEL SIGN I
+ {0x0B40, 0x0B40, prCM, gcMc}, // ORIYA VOWEL SIGN II
+ {0x0B41, 0x0B44, prCM, gcMn}, // [4] ORIYA VOWEL SIGN U..ORIYA VOWEL SIGN VOCALIC RR
+ {0x0B47, 0x0B48, prCM, gcMc}, // [2] ORIYA VOWEL SIGN E..ORIYA VOWEL SIGN AI
+ {0x0B4B, 0x0B4C, prCM, gcMc}, // [2] ORIYA VOWEL SIGN O..ORIYA VOWEL SIGN AU
+ {0x0B4D, 0x0B4D, prCM, gcMn}, // ORIYA SIGN VIRAMA
+ {0x0B55, 0x0B56, prCM, gcMn}, // [2] ORIYA SIGN OVERLINE..ORIYA AI LENGTH MARK
+ {0x0B57, 0x0B57, prCM, gcMc}, // ORIYA AU LENGTH MARK
+ {0x0B5C, 0x0B5D, prAL, gcLo}, // [2] ORIYA LETTER RRA..ORIYA LETTER RHA
+ {0x0B5F, 0x0B61, prAL, gcLo}, // [3] ORIYA LETTER YYA..ORIYA LETTER VOCALIC LL
+ {0x0B62, 0x0B63, prCM, gcMn}, // [2] ORIYA VOWEL SIGN VOCALIC L..ORIYA VOWEL SIGN VOCALIC LL
+ {0x0B66, 0x0B6F, prNU, gcNd}, // [10] ORIYA DIGIT ZERO..ORIYA DIGIT NINE
+ {0x0B70, 0x0B70, prAL, gcSo}, // ORIYA ISSHAR
+ {0x0B71, 0x0B71, prAL, gcLo}, // ORIYA LETTER WA
+ {0x0B72, 0x0B77, prAL, gcNo}, // [6] ORIYA FRACTION ONE QUARTER..ORIYA FRACTION THREE SIXTEENTHS
+ {0x0B82, 0x0B82, prCM, gcMn}, // TAMIL SIGN ANUSVARA
+ {0x0B83, 0x0B83, prAL, gcLo}, // TAMIL SIGN VISARGA
+ {0x0B85, 0x0B8A, prAL, gcLo}, // [6] TAMIL LETTER A..TAMIL LETTER UU
+ {0x0B8E, 0x0B90, prAL, gcLo}, // [3] TAMIL LETTER E..TAMIL LETTER AI
+ {0x0B92, 0x0B95, prAL, gcLo}, // [4] TAMIL LETTER O..TAMIL LETTER KA
+ {0x0B99, 0x0B9A, prAL, gcLo}, // [2] TAMIL LETTER NGA..TAMIL LETTER CA
+ {0x0B9C, 0x0B9C, prAL, gcLo}, // TAMIL LETTER JA
+ {0x0B9E, 0x0B9F, prAL, gcLo}, // [2] TAMIL LETTER NYA..TAMIL LETTER TTA
+ {0x0BA3, 0x0BA4, prAL, gcLo}, // [2] TAMIL LETTER NNA..TAMIL LETTER TA
+ {0x0BA8, 0x0BAA, prAL, gcLo}, // [3] TAMIL LETTER NA..TAMIL LETTER PA
+ {0x0BAE, 0x0BB9, prAL, gcLo}, // [12] TAMIL LETTER MA..TAMIL LETTER HA
+ {0x0BBE, 0x0BBF, prCM, gcMc}, // [2] TAMIL VOWEL SIGN AA..TAMIL VOWEL SIGN I
+ {0x0BC0, 0x0BC0, prCM, gcMn}, // TAMIL VOWEL SIGN II
+ {0x0BC1, 0x0BC2, prCM, gcMc}, // [2] TAMIL VOWEL SIGN U..TAMIL VOWEL SIGN UU
+ {0x0BC6, 0x0BC8, prCM, gcMc}, // [3] TAMIL VOWEL SIGN E..TAMIL VOWEL SIGN AI
+ {0x0BCA, 0x0BCC, prCM, gcMc}, // [3] TAMIL VOWEL SIGN O..TAMIL VOWEL SIGN AU
+ {0x0BCD, 0x0BCD, prCM, gcMn}, // TAMIL SIGN VIRAMA
+ {0x0BD0, 0x0BD0, prAL, gcLo}, // TAMIL OM
+ {0x0BD7, 0x0BD7, prCM, gcMc}, // TAMIL AU LENGTH MARK
+ {0x0BE6, 0x0BEF, prNU, gcNd}, // [10] TAMIL DIGIT ZERO..TAMIL DIGIT NINE
+ {0x0BF0, 0x0BF2, prAL, gcNo}, // [3] TAMIL NUMBER TEN..TAMIL NUMBER ONE THOUSAND
+ {0x0BF3, 0x0BF8, prAL, gcSo}, // [6] TAMIL DAY SIGN..TAMIL AS ABOVE SIGN
+ {0x0BF9, 0x0BF9, prPR, gcSc}, // TAMIL RUPEE SIGN
+ {0x0BFA, 0x0BFA, prAL, gcSo}, // TAMIL NUMBER SIGN
+ {0x0C00, 0x0C00, prCM, gcMn}, // TELUGU SIGN COMBINING CANDRABINDU ABOVE
+ {0x0C01, 0x0C03, prCM, gcMc}, // [3] TELUGU SIGN CANDRABINDU..TELUGU SIGN VISARGA
+ {0x0C04, 0x0C04, prCM, gcMn}, // TELUGU SIGN COMBINING ANUSVARA ABOVE
+ {0x0C05, 0x0C0C, prAL, gcLo}, // [8] TELUGU LETTER A..TELUGU LETTER VOCALIC L
+ {0x0C0E, 0x0C10, prAL, gcLo}, // [3] TELUGU LETTER E..TELUGU LETTER AI
+ {0x0C12, 0x0C28, prAL, gcLo}, // [23] TELUGU LETTER O..TELUGU LETTER NA
+ {0x0C2A, 0x0C39, prAL, gcLo}, // [16] TELUGU LETTER PA..TELUGU LETTER HA
+ {0x0C3C, 0x0C3C, prCM, gcMn}, // TELUGU SIGN NUKTA
+ {0x0C3D, 0x0C3D, prAL, gcLo}, // TELUGU SIGN AVAGRAHA
+ {0x0C3E, 0x0C40, prCM, gcMn}, // [3] TELUGU VOWEL SIGN AA..TELUGU VOWEL SIGN II
+ {0x0C41, 0x0C44, prCM, gcMc}, // [4] TELUGU VOWEL SIGN U..TELUGU VOWEL SIGN VOCALIC RR
+ {0x0C46, 0x0C48, prCM, gcMn}, // [3] TELUGU VOWEL SIGN E..TELUGU VOWEL SIGN AI
+ {0x0C4A, 0x0C4D, prCM, gcMn}, // [4] TELUGU VOWEL SIGN O..TELUGU SIGN VIRAMA
+ {0x0C55, 0x0C56, prCM, gcMn}, // [2] TELUGU LENGTH MARK..TELUGU AI LENGTH MARK
+ {0x0C58, 0x0C5A, prAL, gcLo}, // [3] TELUGU LETTER TSA..TELUGU LETTER RRRA
+ {0x0C5D, 0x0C5D, prAL, gcLo}, // TELUGU LETTER NAKAARA POLLU
+ {0x0C60, 0x0C61, prAL, gcLo}, // [2] TELUGU LETTER VOCALIC RR..TELUGU LETTER VOCALIC LL
+ {0x0C62, 0x0C63, prCM, gcMn}, // [2] TELUGU VOWEL SIGN VOCALIC L..TELUGU VOWEL SIGN VOCALIC LL
+ {0x0C66, 0x0C6F, prNU, gcNd}, // [10] TELUGU DIGIT ZERO..TELUGU DIGIT NINE
+ {0x0C77, 0x0C77, prBB, gcPo}, // TELUGU SIGN SIDDHAM
+ {0x0C78, 0x0C7E, prAL, gcNo}, // [7] TELUGU FRACTION DIGIT ZERO FOR ODD POWERS OF FOUR..TELUGU FRACTION DIGIT THREE FOR EVEN POWERS OF FOUR
+ {0x0C7F, 0x0C7F, prAL, gcSo}, // TELUGU SIGN TUUMU
+ {0x0C80, 0x0C80, prAL, gcLo}, // KANNADA SIGN SPACING CANDRABINDU
+ {0x0C81, 0x0C81, prCM, gcMn}, // KANNADA SIGN CANDRABINDU
+ {0x0C82, 0x0C83, prCM, gcMc}, // [2] KANNADA SIGN ANUSVARA..KANNADA SIGN VISARGA
+ {0x0C84, 0x0C84, prBB, gcPo}, // KANNADA SIGN SIDDHAM
+ {0x0C85, 0x0C8C, prAL, gcLo}, // [8] KANNADA LETTER A..KANNADA LETTER VOCALIC L
+ {0x0C8E, 0x0C90, prAL, gcLo}, // [3] KANNADA LETTER E..KANNADA LETTER AI
+ {0x0C92, 0x0CA8, prAL, gcLo}, // [23] KANNADA LETTER O..KANNADA LETTER NA
+ {0x0CAA, 0x0CB3, prAL, gcLo}, // [10] KANNADA LETTER PA..KANNADA LETTER LLA
+ {0x0CB5, 0x0CB9, prAL, gcLo}, // [5] KANNADA LETTER VA..KANNADA LETTER HA
+ {0x0CBC, 0x0CBC, prCM, gcMn}, // KANNADA SIGN NUKTA
+ {0x0CBD, 0x0CBD, prAL, gcLo}, // KANNADA SIGN AVAGRAHA
+ {0x0CBE, 0x0CBE, prCM, gcMc}, // KANNADA VOWEL SIGN AA
+ {0x0CBF, 0x0CBF, prCM, gcMn}, // KANNADA VOWEL SIGN I
+ {0x0CC0, 0x0CC4, prCM, gcMc}, // [5] KANNADA VOWEL SIGN II..KANNADA VOWEL SIGN VOCALIC RR
+ {0x0CC6, 0x0CC6, prCM, gcMn}, // KANNADA VOWEL SIGN E
+ {0x0CC7, 0x0CC8, prCM, gcMc}, // [2] KANNADA VOWEL SIGN EE..KANNADA VOWEL SIGN AI
+ {0x0CCA, 0x0CCB, prCM, gcMc}, // [2] KANNADA VOWEL SIGN O..KANNADA VOWEL SIGN OO
+ {0x0CCC, 0x0CCD, prCM, gcMn}, // [2] KANNADA VOWEL SIGN AU..KANNADA SIGN VIRAMA
+ {0x0CD5, 0x0CD6, prCM, gcMc}, // [2] KANNADA LENGTH MARK..KANNADA AI LENGTH MARK
+ {0x0CDD, 0x0CDE, prAL, gcLo}, // [2] KANNADA LETTER NAKAARA POLLU..KANNADA LETTER FA
+ {0x0CE0, 0x0CE1, prAL, gcLo}, // [2] KANNADA LETTER VOCALIC RR..KANNADA LETTER VOCALIC LL
+ {0x0CE2, 0x0CE3, prCM, gcMn}, // [2] KANNADA VOWEL SIGN VOCALIC L..KANNADA VOWEL SIGN VOCALIC LL
+ {0x0CE6, 0x0CEF, prNU, gcNd}, // [10] KANNADA DIGIT ZERO..KANNADA DIGIT NINE
+ {0x0CF1, 0x0CF2, prAL, gcLo}, // [2] KANNADA SIGN JIHVAMULIYA..KANNADA SIGN UPADHMANIYA
+ {0x0CF3, 0x0CF3, prCM, gcMc}, // KANNADA SIGN COMBINING ANUSVARA ABOVE RIGHT
+ {0x0D00, 0x0D01, prCM, gcMn}, // [2] MALAYALAM SIGN COMBINING ANUSVARA ABOVE..MALAYALAM SIGN CANDRABINDU
+ {0x0D02, 0x0D03, prCM, gcMc}, // [2] MALAYALAM SIGN ANUSVARA..MALAYALAM SIGN VISARGA
+ {0x0D04, 0x0D0C, prAL, gcLo}, // [9] MALAYALAM LETTER VEDIC ANUSVARA..MALAYALAM LETTER VOCALIC L
+ {0x0D0E, 0x0D10, prAL, gcLo}, // [3] MALAYALAM LETTER E..MALAYALAM LETTER AI
+ {0x0D12, 0x0D3A, prAL, gcLo}, // [41] MALAYALAM LETTER O..MALAYALAM LETTER TTTA
+ {0x0D3B, 0x0D3C, prCM, gcMn}, // [2] MALAYALAM SIGN VERTICAL BAR VIRAMA..MALAYALAM SIGN CIRCULAR VIRAMA
+ {0x0D3D, 0x0D3D, prAL, gcLo}, // MALAYALAM SIGN AVAGRAHA
+ {0x0D3E, 0x0D40, prCM, gcMc}, // [3] MALAYALAM VOWEL SIGN AA..MALAYALAM VOWEL SIGN II
+ {0x0D41, 0x0D44, prCM, gcMn}, // [4] MALAYALAM VOWEL SIGN U..MALAYALAM VOWEL SIGN VOCALIC RR
+ {0x0D46, 0x0D48, prCM, gcMc}, // [3] MALAYALAM VOWEL SIGN E..MALAYALAM VOWEL SIGN AI
+ {0x0D4A, 0x0D4C, prCM, gcMc}, // [3] MALAYALAM VOWEL SIGN O..MALAYALAM VOWEL SIGN AU
+ {0x0D4D, 0x0D4D, prCM, gcMn}, // MALAYALAM SIGN VIRAMA
+ {0x0D4E, 0x0D4E, prAL, gcLo}, // MALAYALAM LETTER DOT REPH
+ {0x0D4F, 0x0D4F, prAL, gcSo}, // MALAYALAM SIGN PARA
+ {0x0D54, 0x0D56, prAL, gcLo}, // [3] MALAYALAM LETTER CHILLU M..MALAYALAM LETTER CHILLU LLL
+ {0x0D57, 0x0D57, prCM, gcMc}, // MALAYALAM AU LENGTH MARK
+ {0x0D58, 0x0D5E, prAL, gcNo}, // [7] MALAYALAM FRACTION ONE ONE-HUNDRED-AND-SIXTIETH..MALAYALAM FRACTION ONE FIFTH
+ {0x0D5F, 0x0D61, prAL, gcLo}, // [3] MALAYALAM LETTER ARCHAIC II..MALAYALAM LETTER VOCALIC LL
+ {0x0D62, 0x0D63, prCM, gcMn}, // [2] MALAYALAM VOWEL SIGN VOCALIC L..MALAYALAM VOWEL SIGN VOCALIC LL
+ {0x0D66, 0x0D6F, prNU, gcNd}, // [10] MALAYALAM DIGIT ZERO..MALAYALAM DIGIT NINE
+ {0x0D70, 0x0D78, prAL, gcNo}, // [9] MALAYALAM NUMBER TEN..MALAYALAM FRACTION THREE SIXTEENTHS
+ {0x0D79, 0x0D79, prPO, gcSo}, // MALAYALAM DATE MARK
+ {0x0D7A, 0x0D7F, prAL, gcLo}, // [6] MALAYALAM LETTER CHILLU NN..MALAYALAM LETTER CHILLU K
+ {0x0D81, 0x0D81, prCM, gcMn}, // SINHALA SIGN CANDRABINDU
+ {0x0D82, 0x0D83, prCM, gcMc}, // [2] SINHALA SIGN ANUSVARAYA..SINHALA SIGN VISARGAYA
+ {0x0D85, 0x0D96, prAL, gcLo}, // [18] SINHALA LETTER AYANNA..SINHALA LETTER AUYANNA
+ {0x0D9A, 0x0DB1, prAL, gcLo}, // [24] SINHALA LETTER ALPAPRAANA KAYANNA..SINHALA LETTER DANTAJA NAYANNA
+ {0x0DB3, 0x0DBB, prAL, gcLo}, // [9] SINHALA LETTER SANYAKA DAYANNA..SINHALA LETTER RAYANNA
+ {0x0DBD, 0x0DBD, prAL, gcLo}, // SINHALA LETTER DANTAJA LAYANNA
+ {0x0DC0, 0x0DC6, prAL, gcLo}, // [7] SINHALA LETTER VAYANNA..SINHALA LETTER FAYANNA
+ {0x0DCA, 0x0DCA, prCM, gcMn}, // SINHALA SIGN AL-LAKUNA
+ {0x0DCF, 0x0DD1, prCM, gcMc}, // [3] SINHALA VOWEL SIGN AELA-PILLA..SINHALA VOWEL SIGN DIGA AEDA-PILLA
+ {0x0DD2, 0x0DD4, prCM, gcMn}, // [3] SINHALA VOWEL SIGN KETTI IS-PILLA..SINHALA VOWEL SIGN KETTI PAA-PILLA
+ {0x0DD6, 0x0DD6, prCM, gcMn}, // SINHALA VOWEL SIGN DIGA PAA-PILLA
+ {0x0DD8, 0x0DDF, prCM, gcMc}, // [8] SINHALA VOWEL SIGN GAETTA-PILLA..SINHALA VOWEL SIGN GAYANUKITTA
+ {0x0DE6, 0x0DEF, prNU, gcNd}, // [10] SINHALA LITH DIGIT ZERO..SINHALA LITH DIGIT NINE
+ {0x0DF2, 0x0DF3, prCM, gcMc}, // [2] SINHALA VOWEL SIGN DIGA GAETTA-PILLA..SINHALA VOWEL SIGN DIGA GAYANUKITTA
+ {0x0DF4, 0x0DF4, prAL, gcPo}, // SINHALA PUNCTUATION KUNDDALIYA
+ {0x0E01, 0x0E30, prSA, gcLo}, // [48] THAI CHARACTER KO KAI..THAI CHARACTER SARA A
+ {0x0E31, 0x0E31, prSA, gcMn}, // THAI CHARACTER MAI HAN-AKAT
+ {0x0E32, 0x0E33, prSA, gcLo}, // [2] THAI CHARACTER SARA AA..THAI CHARACTER SARA AM
+ {0x0E34, 0x0E3A, prSA, gcMn}, // [7] THAI CHARACTER SARA I..THAI CHARACTER PHINTHU
+ {0x0E3F, 0x0E3F, prPR, gcSc}, // THAI CURRENCY SYMBOL BAHT
+ {0x0E40, 0x0E45, prSA, gcLo}, // [6] THAI CHARACTER SARA E..THAI CHARACTER LAKKHANGYAO
+ {0x0E46, 0x0E46, prSA, gcLm}, // THAI CHARACTER MAIYAMOK
+ {0x0E47, 0x0E4E, prSA, gcMn}, // [8] THAI CHARACTER MAITAIKHU..THAI CHARACTER YAMAKKAN
+ {0x0E4F, 0x0E4F, prAL, gcPo}, // THAI CHARACTER FONGMAN
+ {0x0E50, 0x0E59, prNU, gcNd}, // [10] THAI DIGIT ZERO..THAI DIGIT NINE
+ {0x0E5A, 0x0E5B, prBA, gcPo}, // [2] THAI CHARACTER ANGKHANKHU..THAI CHARACTER KHOMUT
+ {0x0E81, 0x0E82, prSA, gcLo}, // [2] LAO LETTER KO..LAO LETTER KHO SUNG
+ {0x0E84, 0x0E84, prSA, gcLo}, // LAO LETTER KHO TAM
+ {0x0E86, 0x0E8A, prSA, gcLo}, // [5] LAO LETTER PALI GHA..LAO LETTER SO TAM
+ {0x0E8C, 0x0EA3, prSA, gcLo}, // [24] LAO LETTER PALI JHA..LAO LETTER LO LING
+ {0x0EA5, 0x0EA5, prSA, gcLo}, // LAO LETTER LO LOOT
+ {0x0EA7, 0x0EB0, prSA, gcLo}, // [10] LAO LETTER WO..LAO VOWEL SIGN A
+ {0x0EB1, 0x0EB1, prSA, gcMn}, // LAO VOWEL SIGN MAI KAN
+ {0x0EB2, 0x0EB3, prSA, gcLo}, // [2] LAO VOWEL SIGN AA..LAO VOWEL SIGN AM
+ {0x0EB4, 0x0EBC, prSA, gcMn}, // [9] LAO VOWEL SIGN I..LAO SEMIVOWEL SIGN LO
+ {0x0EBD, 0x0EBD, prSA, gcLo}, // LAO SEMIVOWEL SIGN NYO
+ {0x0EC0, 0x0EC4, prSA, gcLo}, // [5] LAO VOWEL SIGN E..LAO VOWEL SIGN AI
+ {0x0EC6, 0x0EC6, prSA, gcLm}, // LAO KO LA
+ {0x0EC8, 0x0ECE, prSA, gcMn}, // [7] LAO TONE MAI EK..LAO YAMAKKAN
+ {0x0ED0, 0x0ED9, prNU, gcNd}, // [10] LAO DIGIT ZERO..LAO DIGIT NINE
+ {0x0EDC, 0x0EDF, prSA, gcLo}, // [4] LAO HO NO..LAO LETTER KHMU NYO
+ {0x0F00, 0x0F00, prAL, gcLo}, // TIBETAN SYLLABLE OM
+ {0x0F01, 0x0F03, prBB, gcSo}, // [3] TIBETAN MARK GTER YIG MGO TRUNCATED A..TIBETAN MARK GTER YIG MGO -UM GTER TSHEG MA
+ {0x0F04, 0x0F04, prBB, gcPo}, // TIBETAN MARK INITIAL YIG MGO MDUN MA
+ {0x0F05, 0x0F05, prAL, gcPo}, // TIBETAN MARK CLOSING YIG MGO SGAB MA
+ {0x0F06, 0x0F07, prBB, gcPo}, // [2] TIBETAN MARK CARET YIG MGO PHUR SHAD MA..TIBETAN MARK YIG MGO TSHEG SHAD MA
+ {0x0F08, 0x0F08, prGL, gcPo}, // TIBETAN MARK SBRUL SHAD
+ {0x0F09, 0x0F0A, prBB, gcPo}, // [2] TIBETAN MARK BSKUR YIG MGO..TIBETAN MARK BKA- SHOG YIG MGO
+ {0x0F0B, 0x0F0B, prBA, gcPo}, // TIBETAN MARK INTERSYLLABIC TSHEG
+ {0x0F0C, 0x0F0C, prGL, gcPo}, // TIBETAN MARK DELIMITER TSHEG BSTAR
+ {0x0F0D, 0x0F11, prEX, gcPo}, // [5] TIBETAN MARK SHAD..TIBETAN MARK RIN CHEN SPUNGS SHAD
+ {0x0F12, 0x0F12, prGL, gcPo}, // TIBETAN MARK RGYA GRAM SHAD
+ {0x0F13, 0x0F13, prAL, gcSo}, // TIBETAN MARK CARET -DZUD RTAGS ME LONG CAN
+ {0x0F14, 0x0F14, prEX, gcPo}, // TIBETAN MARK GTER TSHEG
+ {0x0F15, 0x0F17, prAL, gcSo}, // [3] TIBETAN LOGOTYPE SIGN CHAD RTAGS..TIBETAN ASTROLOGICAL SIGN SGRA GCAN -CHAR RTAGS
+ {0x0F18, 0x0F19, prCM, gcMn}, // [2] TIBETAN ASTROLOGICAL SIGN -KHYUD PA..TIBETAN ASTROLOGICAL SIGN SDONG TSHUGS
+ {0x0F1A, 0x0F1F, prAL, gcSo}, // [6] TIBETAN SIGN RDEL DKAR GCIG..TIBETAN SIGN RDEL DKAR RDEL NAG
+ {0x0F20, 0x0F29, prNU, gcNd}, // [10] TIBETAN DIGIT ZERO..TIBETAN DIGIT NINE
+ {0x0F2A, 0x0F33, prAL, gcNo}, // [10] TIBETAN DIGIT HALF ONE..TIBETAN DIGIT HALF ZERO
+ {0x0F34, 0x0F34, prBA, gcSo}, // TIBETAN MARK BSDUS RTAGS
+ {0x0F35, 0x0F35, prCM, gcMn}, // TIBETAN MARK NGAS BZUNG NYI ZLA
+ {0x0F36, 0x0F36, prAL, gcSo}, // TIBETAN MARK CARET -DZUD RTAGS BZHI MIG CAN
+ {0x0F37, 0x0F37, prCM, gcMn}, // TIBETAN MARK NGAS BZUNG SGOR RTAGS
+ {0x0F38, 0x0F38, prAL, gcSo}, // TIBETAN MARK CHE MGO
+ {0x0F39, 0x0F39, prCM, gcMn}, // TIBETAN MARK TSA -PHRU
+ {0x0F3A, 0x0F3A, prOP, gcPs}, // TIBETAN MARK GUG RTAGS GYON
+ {0x0F3B, 0x0F3B, prCL, gcPe}, // TIBETAN MARK GUG RTAGS GYAS
+ {0x0F3C, 0x0F3C, prOP, gcPs}, // TIBETAN MARK ANG KHANG GYON
+ {0x0F3D, 0x0F3D, prCL, gcPe}, // TIBETAN MARK ANG KHANG GYAS
+ {0x0F3E, 0x0F3F, prCM, gcMc}, // [2] TIBETAN SIGN YAR TSHES..TIBETAN SIGN MAR TSHES
+ {0x0F40, 0x0F47, prAL, gcLo}, // [8] TIBETAN LETTER KA..TIBETAN LETTER JA
+ {0x0F49, 0x0F6C, prAL, gcLo}, // [36] TIBETAN LETTER NYA..TIBETAN LETTER RRA
+ {0x0F71, 0x0F7E, prCM, gcMn}, // [14] TIBETAN VOWEL SIGN AA..TIBETAN SIGN RJES SU NGA RO
+ {0x0F7F, 0x0F7F, prBA, gcMc}, // TIBETAN SIGN RNAM BCAD
+ {0x0F80, 0x0F84, prCM, gcMn}, // [5] TIBETAN VOWEL SIGN REVERSED I..TIBETAN MARK HALANTA
+ {0x0F85, 0x0F85, prBA, gcPo}, // TIBETAN MARK PALUTA
+ {0x0F86, 0x0F87, prCM, gcMn}, // [2] TIBETAN SIGN LCI RTAGS..TIBETAN SIGN YANG RTAGS
+ {0x0F88, 0x0F8C, prAL, gcLo}, // [5] TIBETAN SIGN LCE TSA CAN..TIBETAN SIGN INVERTED MCHU CAN
+ {0x0F8D, 0x0F97, prCM, gcMn}, // [11] TIBETAN SUBJOINED SIGN LCE TSA CAN..TIBETAN SUBJOINED LETTER JA
+ {0x0F99, 0x0FBC, prCM, gcMn}, // [36] TIBETAN SUBJOINED LETTER NYA..TIBETAN SUBJOINED LETTER FIXED-FORM RA
+ {0x0FBE, 0x0FBF, prBA, gcSo}, // [2] TIBETAN KU RU KHA..TIBETAN KU RU KHA BZHI MIG CAN
+ {0x0FC0, 0x0FC5, prAL, gcSo}, // [6] TIBETAN CANTILLATION SIGN HEAVY BEAT..TIBETAN SYMBOL RDO RJE
+ {0x0FC6, 0x0FC6, prCM, gcMn}, // TIBETAN SYMBOL PADMA GDAN
+ {0x0FC7, 0x0FCC, prAL, gcSo}, // [6] TIBETAN SYMBOL RDO RJE RGYA GRAM..TIBETAN SYMBOL NOR BU BZHI -KHYIL
+ {0x0FCE, 0x0FCF, prAL, gcSo}, // [2] TIBETAN SIGN RDEL NAG RDEL DKAR..TIBETAN SIGN RDEL NAG GSUM
+ {0x0FD0, 0x0FD1, prBB, gcPo}, // [2] TIBETAN MARK BSKA- SHOG GI MGO RGYAN..TIBETAN MARK MNYAM YIG GI MGO RGYAN
+ {0x0FD2, 0x0FD2, prBA, gcPo}, // TIBETAN MARK NYIS TSHEG
+ {0x0FD3, 0x0FD3, prBB, gcPo}, // TIBETAN MARK INITIAL BRDA RNYING YIG MGO MDUN MA
+ {0x0FD4, 0x0FD4, prAL, gcPo}, // TIBETAN MARK CLOSING BRDA RNYING YIG MGO SGAB MA
+ {0x0FD5, 0x0FD8, prAL, gcSo}, // [4] RIGHT-FACING SVASTI SIGN..LEFT-FACING SVASTI SIGN WITH DOTS
+ {0x0FD9, 0x0FDA, prGL, gcPo}, // [2] TIBETAN MARK LEADING MCHAN RTAGS..TIBETAN MARK TRAILING MCHAN RTAGS
+ {0x1000, 0x102A, prSA, gcLo}, // [43] MYANMAR LETTER KA..MYANMAR LETTER AU
+ {0x102B, 0x102C, prSA, gcMc}, // [2] MYANMAR VOWEL SIGN TALL AA..MYANMAR VOWEL SIGN AA
+ {0x102D, 0x1030, prSA, gcMn}, // [4] MYANMAR VOWEL SIGN I..MYANMAR VOWEL SIGN UU
+ {0x1031, 0x1031, prSA, gcMc}, // MYANMAR VOWEL SIGN E
+ {0x1032, 0x1037, prSA, gcMn}, // [6] MYANMAR VOWEL SIGN AI..MYANMAR SIGN DOT BELOW
+ {0x1038, 0x1038, prSA, gcMc}, // MYANMAR SIGN VISARGA
+ {0x1039, 0x103A, prSA, gcMn}, // [2] MYANMAR SIGN VIRAMA..MYANMAR SIGN ASAT
+ {0x103B, 0x103C, prSA, gcMc}, // [2] MYANMAR CONSONANT SIGN MEDIAL YA..MYANMAR CONSONANT SIGN MEDIAL RA
+ {0x103D, 0x103E, prSA, gcMn}, // [2] MYANMAR CONSONANT SIGN MEDIAL WA..MYANMAR CONSONANT SIGN MEDIAL HA
+ {0x103F, 0x103F, prSA, gcLo}, // MYANMAR LETTER GREAT SA
+ {0x1040, 0x1049, prNU, gcNd}, // [10] MYANMAR DIGIT ZERO..MYANMAR DIGIT NINE
+ {0x104A, 0x104B, prBA, gcPo}, // [2] MYANMAR SIGN LITTLE SECTION..MYANMAR SIGN SECTION
+ {0x104C, 0x104F, prAL, gcPo}, // [4] MYANMAR SYMBOL LOCATIVE..MYANMAR SYMBOL GENITIVE
+ {0x1050, 0x1055, prSA, gcLo}, // [6] MYANMAR LETTER SHA..MYANMAR LETTER VOCALIC LL
+ {0x1056, 0x1057, prSA, gcMc}, // [2] MYANMAR VOWEL SIGN VOCALIC R..MYANMAR VOWEL SIGN VOCALIC RR
+ {0x1058, 0x1059, prSA, gcMn}, // [2] MYANMAR VOWEL SIGN VOCALIC L..MYANMAR VOWEL SIGN VOCALIC LL
+ {0x105A, 0x105D, prSA, gcLo}, // [4] MYANMAR LETTER MON NGA..MYANMAR LETTER MON BBE
+ {0x105E, 0x1060, prSA, gcMn}, // [3] MYANMAR CONSONANT SIGN MON MEDIAL NA..MYANMAR CONSONANT SIGN MON MEDIAL LA
+ {0x1061, 0x1061, prSA, gcLo}, // MYANMAR LETTER SGAW KAREN SHA
+ {0x1062, 0x1064, prSA, gcMc}, // [3] MYANMAR VOWEL SIGN SGAW KAREN EU..MYANMAR TONE MARK SGAW KAREN KE PHO
+ {0x1065, 0x1066, prSA, gcLo}, // [2] MYANMAR LETTER WESTERN PWO KAREN THA..MYANMAR LETTER WESTERN PWO KAREN PWA
+ {0x1067, 0x106D, prSA, gcMc}, // [7] MYANMAR VOWEL SIGN WESTERN PWO KAREN EU..MYANMAR SIGN WESTERN PWO KAREN TONE-5
+ {0x106E, 0x1070, prSA, gcLo}, // [3] MYANMAR LETTER EASTERN PWO KAREN NNA..MYANMAR LETTER EASTERN PWO KAREN GHWA
+ {0x1071, 0x1074, prSA, gcMn}, // [4] MYANMAR VOWEL SIGN GEBA KAREN I..MYANMAR VOWEL SIGN KAYAH EE
+ {0x1075, 0x1081, prSA, gcLo}, // [13] MYANMAR LETTER SHAN KA..MYANMAR LETTER SHAN HA
+ {0x1082, 0x1082, prSA, gcMn}, // MYANMAR CONSONANT SIGN SHAN MEDIAL WA
+ {0x1083, 0x1084, prSA, gcMc}, // [2] MYANMAR VOWEL SIGN SHAN AA..MYANMAR VOWEL SIGN SHAN E
+ {0x1085, 0x1086, prSA, gcMn}, // [2] MYANMAR VOWEL SIGN SHAN E ABOVE..MYANMAR VOWEL SIGN SHAN FINAL Y
+ {0x1087, 0x108C, prSA, gcMc}, // [6] MYANMAR SIGN SHAN TONE-2..MYANMAR SIGN SHAN COUNCIL TONE-3
+ {0x108D, 0x108D, prSA, gcMn}, // MYANMAR SIGN SHAN COUNCIL EMPHATIC TONE
+ {0x108E, 0x108E, prSA, gcLo}, // MYANMAR LETTER RUMAI PALAUNG FA
+ {0x108F, 0x108F, prSA, gcMc}, // MYANMAR SIGN RUMAI PALAUNG TONE-5
+ {0x1090, 0x1099, prNU, gcNd}, // [10] MYANMAR SHAN DIGIT ZERO..MYANMAR SHAN DIGIT NINE
+ {0x109A, 0x109C, prSA, gcMc}, // [3] MYANMAR SIGN KHAMTI TONE-1..MYANMAR VOWEL SIGN AITON A
+ {0x109D, 0x109D, prSA, gcMn}, // MYANMAR VOWEL SIGN AITON AI
+ {0x109E, 0x109F, prSA, gcSo}, // [2] MYANMAR SYMBOL SHAN ONE..MYANMAR SYMBOL SHAN EXCLAMATION
+ {0x10A0, 0x10C5, prAL, gcLu}, // [38] GEORGIAN CAPITAL LETTER AN..GEORGIAN CAPITAL LETTER HOE
+ {0x10C7, 0x10C7, prAL, gcLu}, // GEORGIAN CAPITAL LETTER YN
+ {0x10CD, 0x10CD, prAL, gcLu}, // GEORGIAN CAPITAL LETTER AEN
+ {0x10D0, 0x10FA, prAL, gcLl}, // [43] GEORGIAN LETTER AN..GEORGIAN LETTER AIN
+ {0x10FB, 0x10FB, prAL, gcPo}, // GEORGIAN PARAGRAPH SEPARATOR
+ {0x10FC, 0x10FC, prAL, gcLm}, // MODIFIER LETTER GEORGIAN NAR
+ {0x10FD, 0x10FF, prAL, gcLl}, // [3] GEORGIAN LETTER AEN..GEORGIAN LETTER LABIAL SIGN
+ {0x1100, 0x115F, prJL, gcLo}, // [96] HANGUL CHOSEONG KIYEOK..HANGUL CHOSEONG FILLER
+ {0x1160, 0x11A7, prJV, gcLo}, // [72] HANGUL JUNGSEONG FILLER..HANGUL JUNGSEONG O-YAE
+ {0x11A8, 0x11FF, prJT, gcLo}, // [88] HANGUL JONGSEONG KIYEOK..HANGUL JONGSEONG SSANGNIEUN
+ {0x1200, 0x1248, prAL, gcLo}, // [73] ETHIOPIC SYLLABLE HA..ETHIOPIC SYLLABLE QWA
+ {0x124A, 0x124D, prAL, gcLo}, // [4] ETHIOPIC SYLLABLE QWI..ETHIOPIC SYLLABLE QWE
+ {0x1250, 0x1256, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE QHA..ETHIOPIC SYLLABLE QHO
+ {0x1258, 0x1258, prAL, gcLo}, // ETHIOPIC SYLLABLE QHWA
+ {0x125A, 0x125D, prAL, gcLo}, // [4] ETHIOPIC SYLLABLE QHWI..ETHIOPIC SYLLABLE QHWE
+ {0x1260, 0x1288, prAL, gcLo}, // [41] ETHIOPIC SYLLABLE BA..ETHIOPIC SYLLABLE XWA
+ {0x128A, 0x128D, prAL, gcLo}, // [4] ETHIOPIC SYLLABLE XWI..ETHIOPIC SYLLABLE XWE
+ {0x1290, 0x12B0, prAL, gcLo}, // [33] ETHIOPIC SYLLABLE NA..ETHIOPIC SYLLABLE KWA
+ {0x12B2, 0x12B5, prAL, gcLo}, // [4] ETHIOPIC SYLLABLE KWI..ETHIOPIC SYLLABLE KWE
+ {0x12B8, 0x12BE, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE KXA..ETHIOPIC SYLLABLE KXO
+ {0x12C0, 0x12C0, prAL, gcLo}, // ETHIOPIC SYLLABLE KXWA
+ {0x12C2, 0x12C5, prAL, gcLo}, // [4] ETHIOPIC SYLLABLE KXWI..ETHIOPIC SYLLABLE KXWE
+ {0x12C8, 0x12D6, prAL, gcLo}, // [15] ETHIOPIC SYLLABLE WA..ETHIOPIC SYLLABLE PHARYNGEAL O
+ {0x12D8, 0x1310, prAL, gcLo}, // [57] ETHIOPIC SYLLABLE ZA..ETHIOPIC SYLLABLE GWA
+ {0x1312, 0x1315, prAL, gcLo}, // [4] ETHIOPIC SYLLABLE GWI..ETHIOPIC SYLLABLE GWE
+ {0x1318, 0x135A, prAL, gcLo}, // [67] ETHIOPIC SYLLABLE GGA..ETHIOPIC SYLLABLE FYA
+ {0x135D, 0x135F, prCM, gcMn}, // [3] ETHIOPIC COMBINING GEMINATION AND VOWEL LENGTH MARK..ETHIOPIC COMBINING GEMINATION MARK
+ {0x1360, 0x1360, prAL, gcPo}, // ETHIOPIC SECTION MARK
+ {0x1361, 0x1361, prBA, gcPo}, // ETHIOPIC WORDSPACE
+ {0x1362, 0x1368, prAL, gcPo}, // [7] ETHIOPIC FULL STOP..ETHIOPIC PARAGRAPH SEPARATOR
+ {0x1369, 0x137C, prAL, gcNo}, // [20] ETHIOPIC DIGIT ONE..ETHIOPIC NUMBER TEN THOUSAND
+ {0x1380, 0x138F, prAL, gcLo}, // [16] ETHIOPIC SYLLABLE SEBATBEIT MWA..ETHIOPIC SYLLABLE PWE
+ {0x1390, 0x1399, prAL, gcSo}, // [10] ETHIOPIC TONAL MARK YIZET..ETHIOPIC TONAL MARK KURT
+ {0x13A0, 0x13F5, prAL, gcLu}, // [86] CHEROKEE LETTER A..CHEROKEE LETTER MV
+ {0x13F8, 0x13FD, prAL, gcLl}, // [6] CHEROKEE SMALL LETTER YE..CHEROKEE SMALL LETTER MV
+ {0x1400, 0x1400, prBA, gcPd}, // CANADIAN SYLLABICS HYPHEN
+ {0x1401, 0x166C, prAL, gcLo}, // [620] CANADIAN SYLLABICS E..CANADIAN SYLLABICS CARRIER TTSA
+ {0x166D, 0x166D, prAL, gcSo}, // CANADIAN SYLLABICS CHI SIGN
+ {0x166E, 0x166E, prAL, gcPo}, // CANADIAN SYLLABICS FULL STOP
+ {0x166F, 0x167F, prAL, gcLo}, // [17] CANADIAN SYLLABICS QAI..CANADIAN SYLLABICS BLACKFOOT W
+ {0x1680, 0x1680, prBA, gcZs}, // OGHAM SPACE MARK
+ {0x1681, 0x169A, prAL, gcLo}, // [26] OGHAM LETTER BEITH..OGHAM LETTER PEITH
+ {0x169B, 0x169B, prOP, gcPs}, // OGHAM FEATHER MARK
+ {0x169C, 0x169C, prCL, gcPe}, // OGHAM REVERSED FEATHER MARK
+ {0x16A0, 0x16EA, prAL, gcLo}, // [75] RUNIC LETTER FEHU FEOH FE F..RUNIC LETTER X
+ {0x16EB, 0x16ED, prBA, gcPo}, // [3] RUNIC SINGLE PUNCTUATION..RUNIC CROSS PUNCTUATION
+ {0x16EE, 0x16F0, prAL, gcNl}, // [3] RUNIC ARLAUG SYMBOL..RUNIC BELGTHOR SYMBOL
+ {0x16F1, 0x16F8, prAL, gcLo}, // [8] RUNIC LETTER K..RUNIC LETTER FRANKS CASKET AESC
+ {0x1700, 0x1711, prAL, gcLo}, // [18] TAGALOG LETTER A..TAGALOG LETTER HA
+ {0x1712, 0x1714, prCM, gcMn}, // [3] TAGALOG VOWEL SIGN I..TAGALOG SIGN VIRAMA
+ {0x1715, 0x1715, prCM, gcMc}, // TAGALOG SIGN PAMUDPOD
+ {0x171F, 0x171F, prAL, gcLo}, // TAGALOG LETTER ARCHAIC RA
+ {0x1720, 0x1731, prAL, gcLo}, // [18] HANUNOO LETTER A..HANUNOO LETTER HA
+ {0x1732, 0x1733, prCM, gcMn}, // [2] HANUNOO VOWEL SIGN I..HANUNOO VOWEL SIGN U
+ {0x1734, 0x1734, prCM, gcMc}, // HANUNOO SIGN PAMUDPOD
+ {0x1735, 0x1736, prBA, gcPo}, // [2] PHILIPPINE SINGLE PUNCTUATION..PHILIPPINE DOUBLE PUNCTUATION
+ {0x1740, 0x1751, prAL, gcLo}, // [18] BUHID LETTER A..BUHID LETTER HA
+ {0x1752, 0x1753, prCM, gcMn}, // [2] BUHID VOWEL SIGN I..BUHID VOWEL SIGN U
+ {0x1760, 0x176C, prAL, gcLo}, // [13] TAGBANWA LETTER A..TAGBANWA LETTER YA
+ {0x176E, 0x1770, prAL, gcLo}, // [3] TAGBANWA LETTER LA..TAGBANWA LETTER SA
+ {0x1772, 0x1773, prCM, gcMn}, // [2] TAGBANWA VOWEL SIGN I..TAGBANWA VOWEL SIGN U
+ {0x1780, 0x17B3, prSA, gcLo}, // [52] KHMER LETTER KA..KHMER INDEPENDENT VOWEL QAU
+ {0x17B4, 0x17B5, prSA, gcMn}, // [2] KHMER VOWEL INHERENT AQ..KHMER VOWEL INHERENT AA
+ {0x17B6, 0x17B6, prSA, gcMc}, // KHMER VOWEL SIGN AA
+ {0x17B7, 0x17BD, prSA, gcMn}, // [7] KHMER VOWEL SIGN I..KHMER VOWEL SIGN UA
+ {0x17BE, 0x17C5, prSA, gcMc}, // [8] KHMER VOWEL SIGN OE..KHMER VOWEL SIGN AU
+ {0x17C6, 0x17C6, prSA, gcMn}, // KHMER SIGN NIKAHIT
+ {0x17C7, 0x17C8, prSA, gcMc}, // [2] KHMER SIGN REAHMUK..KHMER SIGN YUUKALEAPINTU
+ {0x17C9, 0x17D3, prSA, gcMn}, // [11] KHMER SIGN MUUSIKATOAN..KHMER SIGN BATHAMASAT
+ {0x17D4, 0x17D5, prBA, gcPo}, // [2] KHMER SIGN KHAN..KHMER SIGN BARIYOOSAN
+ {0x17D6, 0x17D6, prNS, gcPo}, // KHMER SIGN CAMNUC PII KUUH
+ {0x17D7, 0x17D7, prSA, gcLm}, // KHMER SIGN LEK TOO
+ {0x17D8, 0x17D8, prBA, gcPo}, // KHMER SIGN BEYYAL
+ {0x17D9, 0x17D9, prAL, gcPo}, // KHMER SIGN PHNAEK MUAN
+ {0x17DA, 0x17DA, prBA, gcPo}, // KHMER SIGN KOOMUUT
+ {0x17DB, 0x17DB, prPR, gcSc}, // KHMER CURRENCY SYMBOL RIEL
+ {0x17DC, 0x17DC, prSA, gcLo}, // KHMER SIGN AVAKRAHASANYA
+ {0x17DD, 0x17DD, prSA, gcMn}, // KHMER SIGN ATTHACAN
+ {0x17E0, 0x17E9, prNU, gcNd}, // [10] KHMER DIGIT ZERO..KHMER DIGIT NINE
+ {0x17F0, 0x17F9, prAL, gcNo}, // [10] KHMER SYMBOL LEK ATTAK SON..KHMER SYMBOL LEK ATTAK PRAM-BUON
+ {0x1800, 0x1801, prAL, gcPo}, // [2] MONGOLIAN BIRGA..MONGOLIAN ELLIPSIS
+ {0x1802, 0x1803, prEX, gcPo}, // [2] MONGOLIAN COMMA..MONGOLIAN FULL STOP
+ {0x1804, 0x1805, prBA, gcPo}, // [2] MONGOLIAN COLON..MONGOLIAN FOUR DOTS
+ {0x1806, 0x1806, prBB, gcPd}, // MONGOLIAN TODO SOFT HYPHEN
+ {0x1807, 0x1807, prAL, gcPo}, // MONGOLIAN SIBE SYLLABLE BOUNDARY MARKER
+ {0x1808, 0x1809, prEX, gcPo}, // [2] MONGOLIAN MANCHU COMMA..MONGOLIAN MANCHU FULL STOP
+ {0x180A, 0x180A, prAL, gcPo}, // MONGOLIAN NIRUGU
+ {0x180B, 0x180D, prCM, gcMn}, // [3] MONGOLIAN FREE VARIATION SELECTOR ONE..MONGOLIAN FREE VARIATION SELECTOR THREE
+ {0x180E, 0x180E, prGL, gcCf}, // MONGOLIAN VOWEL SEPARATOR
+ {0x180F, 0x180F, prCM, gcMn}, // MONGOLIAN FREE VARIATION SELECTOR FOUR
+ {0x1810, 0x1819, prNU, gcNd}, // [10] MONGOLIAN DIGIT ZERO..MONGOLIAN DIGIT NINE
+ {0x1820, 0x1842, prAL, gcLo}, // [35] MONGOLIAN LETTER A..MONGOLIAN LETTER CHI
+ {0x1843, 0x1843, prAL, gcLm}, // MONGOLIAN LETTER TODO LONG VOWEL SIGN
+ {0x1844, 0x1878, prAL, gcLo}, // [53] MONGOLIAN LETTER TODO E..MONGOLIAN LETTER CHA WITH TWO DOTS
+ {0x1880, 0x1884, prAL, gcLo}, // [5] MONGOLIAN LETTER ALI GALI ANUSVARA ONE..MONGOLIAN LETTER ALI GALI INVERTED UBADAMA
+ {0x1885, 0x1886, prCM, gcMn}, // [2] MONGOLIAN LETTER ALI GALI BALUDA..MONGOLIAN LETTER ALI GALI THREE BALUDA
+ {0x1887, 0x18A8, prAL, gcLo}, // [34] MONGOLIAN LETTER ALI GALI A..MONGOLIAN LETTER MANCHU ALI GALI BHA
+ {0x18A9, 0x18A9, prCM, gcMn}, // MONGOLIAN LETTER ALI GALI DAGALGA
+ {0x18AA, 0x18AA, prAL, gcLo}, // MONGOLIAN LETTER MANCHU ALI GALI LHA
+ {0x18B0, 0x18F5, prAL, gcLo}, // [70] CANADIAN SYLLABICS OY..CANADIAN SYLLABICS CARRIER DENTAL S
+ {0x1900, 0x191E, prAL, gcLo}, // [31] LIMBU VOWEL-CARRIER LETTER..LIMBU LETTER TRA
+ {0x1920, 0x1922, prCM, gcMn}, // [3] LIMBU VOWEL SIGN A..LIMBU VOWEL SIGN U
+ {0x1923, 0x1926, prCM, gcMc}, // [4] LIMBU VOWEL SIGN EE..LIMBU VOWEL SIGN AU
+ {0x1927, 0x1928, prCM, gcMn}, // [2] LIMBU VOWEL SIGN E..LIMBU VOWEL SIGN O
+ {0x1929, 0x192B, prCM, gcMc}, // [3] LIMBU SUBJOINED LETTER YA..LIMBU SUBJOINED LETTER WA
+ {0x1930, 0x1931, prCM, gcMc}, // [2] LIMBU SMALL LETTER KA..LIMBU SMALL LETTER NGA
+ {0x1932, 0x1932, prCM, gcMn}, // LIMBU SMALL LETTER ANUSVARA
+ {0x1933, 0x1938, prCM, gcMc}, // [6] LIMBU SMALL LETTER TA..LIMBU SMALL LETTER LA
+ {0x1939, 0x193B, prCM, gcMn}, // [3] LIMBU SIGN MUKPHRENG..LIMBU SIGN SA-I
+ {0x1940, 0x1940, prAL, gcSo}, // LIMBU SIGN LOO
+ {0x1944, 0x1945, prEX, gcPo}, // [2] LIMBU EXCLAMATION MARK..LIMBU QUESTION MARK
+ {0x1946, 0x194F, prNU, gcNd}, // [10] LIMBU DIGIT ZERO..LIMBU DIGIT NINE
+ {0x1950, 0x196D, prSA, gcLo}, // [30] TAI LE LETTER KA..TAI LE LETTER AI
+ {0x1970, 0x1974, prSA, gcLo}, // [5] TAI LE LETTER TONE-2..TAI LE LETTER TONE-6
+ {0x1980, 0x19AB, prSA, gcLo}, // [44] NEW TAI LUE LETTER HIGH QA..NEW TAI LUE LETTER LOW SUA
+ {0x19B0, 0x19C9, prSA, gcLo}, // [26] NEW TAI LUE VOWEL SIGN VOWEL SHORTENER..NEW TAI LUE TONE MARK-2
+ {0x19D0, 0x19D9, prNU, gcNd}, // [10] NEW TAI LUE DIGIT ZERO..NEW TAI LUE DIGIT NINE
+ {0x19DA, 0x19DA, prSA, gcNo}, // NEW TAI LUE THAM DIGIT ONE
+ {0x19DE, 0x19DF, prSA, gcSo}, // [2] NEW TAI LUE SIGN LAE..NEW TAI LUE SIGN LAEV
+ {0x19E0, 0x19FF, prAL, gcSo}, // [32] KHMER SYMBOL PATHAMASAT..KHMER SYMBOL DAP-PRAM ROC
+ {0x1A00, 0x1A16, prAL, gcLo}, // [23] BUGINESE LETTER KA..BUGINESE LETTER HA
+ {0x1A17, 0x1A18, prCM, gcMn}, // [2] BUGINESE VOWEL SIGN I..BUGINESE VOWEL SIGN U
+ {0x1A19, 0x1A1A, prCM, gcMc}, // [2] BUGINESE VOWEL SIGN E..BUGINESE VOWEL SIGN O
+ {0x1A1B, 0x1A1B, prCM, gcMn}, // BUGINESE VOWEL SIGN AE
+ {0x1A1E, 0x1A1F, prAL, gcPo}, // [2] BUGINESE PALLAWA..BUGINESE END OF SECTION
+ {0x1A20, 0x1A54, prSA, gcLo}, // [53] TAI THAM LETTER HIGH KA..TAI THAM LETTER GREAT SA
+ {0x1A55, 0x1A55, prSA, gcMc}, // TAI THAM CONSONANT SIGN MEDIAL RA
+ {0x1A56, 0x1A56, prSA, gcMn}, // TAI THAM CONSONANT SIGN MEDIAL LA
+ {0x1A57, 0x1A57, prSA, gcMc}, // TAI THAM CONSONANT SIGN LA TANG LAI
+ {0x1A58, 0x1A5E, prSA, gcMn}, // [7] TAI THAM SIGN MAI KANG LAI..TAI THAM CONSONANT SIGN SA
+ {0x1A60, 0x1A60, prSA, gcMn}, // TAI THAM SIGN SAKOT
+ {0x1A61, 0x1A61, prSA, gcMc}, // TAI THAM VOWEL SIGN A
+ {0x1A62, 0x1A62, prSA, gcMn}, // TAI THAM VOWEL SIGN MAI SAT
+ {0x1A63, 0x1A64, prSA, gcMc}, // [2] TAI THAM VOWEL SIGN AA..TAI THAM VOWEL SIGN TALL AA
+ {0x1A65, 0x1A6C, prSA, gcMn}, // [8] TAI THAM VOWEL SIGN I..TAI THAM VOWEL SIGN OA BELOW
+ {0x1A6D, 0x1A72, prSA, gcMc}, // [6] TAI THAM VOWEL SIGN OY..TAI THAM VOWEL SIGN THAM AI
+ {0x1A73, 0x1A7C, prSA, gcMn}, // [10] TAI THAM VOWEL SIGN OA ABOVE..TAI THAM SIGN KHUEN-LUE KARAN
+ {0x1A7F, 0x1A7F, prCM, gcMn}, // TAI THAM COMBINING CRYPTOGRAMMIC DOT
+ {0x1A80, 0x1A89, prNU, gcNd}, // [10] TAI THAM HORA DIGIT ZERO..TAI THAM HORA DIGIT NINE
+ {0x1A90, 0x1A99, prNU, gcNd}, // [10] TAI THAM THAM DIGIT ZERO..TAI THAM THAM DIGIT NINE
+ {0x1AA0, 0x1AA6, prSA, gcPo}, // [7] TAI THAM SIGN WIANG..TAI THAM SIGN REVERSED ROTATED RANA
+ {0x1AA7, 0x1AA7, prSA, gcLm}, // TAI THAM SIGN MAI YAMOK
+ {0x1AA8, 0x1AAD, prSA, gcPo}, // [6] TAI THAM SIGN KAAN..TAI THAM SIGN CAANG
+ {0x1AB0, 0x1ABD, prCM, gcMn}, // [14] COMBINING DOUBLED CIRCUMFLEX ACCENT..COMBINING PARENTHESES BELOW
+ {0x1ABE, 0x1ABE, prCM, gcMe}, // COMBINING PARENTHESES OVERLAY
+ {0x1ABF, 0x1ACE, prCM, gcMn}, // [16] COMBINING LATIN SMALL LETTER W BELOW..COMBINING LATIN SMALL LETTER INSULAR T
+ {0x1B00, 0x1B03, prCM, gcMn}, // [4] BALINESE SIGN ULU RICEM..BALINESE SIGN SURANG
+ {0x1B04, 0x1B04, prCM, gcMc}, // BALINESE SIGN BISAH
+ {0x1B05, 0x1B33, prAL, gcLo}, // [47] BALINESE LETTER AKARA..BALINESE LETTER HA
+ {0x1B34, 0x1B34, prCM, gcMn}, // BALINESE SIGN REREKAN
+ {0x1B35, 0x1B35, prCM, gcMc}, // BALINESE VOWEL SIGN TEDUNG
+ {0x1B36, 0x1B3A, prCM, gcMn}, // [5] BALINESE VOWEL SIGN ULU..BALINESE VOWEL SIGN RA REPA
+ {0x1B3B, 0x1B3B, prCM, gcMc}, // BALINESE VOWEL SIGN RA REPA TEDUNG
+ {0x1B3C, 0x1B3C, prCM, gcMn}, // BALINESE VOWEL SIGN LA LENGA
+ {0x1B3D, 0x1B41, prCM, gcMc}, // [5] BALINESE VOWEL SIGN LA LENGA TEDUNG..BALINESE VOWEL SIGN TALING REPA TEDUNG
+ {0x1B42, 0x1B42, prCM, gcMn}, // BALINESE VOWEL SIGN PEPET
+ {0x1B43, 0x1B44, prCM, gcMc}, // [2] BALINESE VOWEL SIGN PEPET TEDUNG..BALINESE ADEG ADEG
+ {0x1B45, 0x1B4C, prAL, gcLo}, // [8] BALINESE LETTER KAF SASAK..BALINESE LETTER ARCHAIC JNYA
+ {0x1B50, 0x1B59, prNU, gcNd}, // [10] BALINESE DIGIT ZERO..BALINESE DIGIT NINE
+ {0x1B5A, 0x1B5B, prBA, gcPo}, // [2] BALINESE PANTI..BALINESE PAMADA
+ {0x1B5C, 0x1B5C, prAL, gcPo}, // BALINESE WINDU
+ {0x1B5D, 0x1B60, prBA, gcPo}, // [4] BALINESE CARIK PAMUNGKAH..BALINESE PAMENENG
+ {0x1B61, 0x1B6A, prAL, gcSo}, // [10] BALINESE MUSICAL SYMBOL DONG..BALINESE MUSICAL SYMBOL DANG GEDE
+ {0x1B6B, 0x1B73, prCM, gcMn}, // [9] BALINESE MUSICAL SYMBOL COMBINING TEGEH..BALINESE MUSICAL SYMBOL COMBINING GONG
+ {0x1B74, 0x1B7C, prAL, gcSo}, // [9] BALINESE MUSICAL SYMBOL RIGHT-HAND OPEN DUG..BALINESE MUSICAL SYMBOL LEFT-HAND OPEN PING
+ {0x1B7D, 0x1B7E, prBA, gcPo}, // [2] BALINESE PANTI LANTANG..BALINESE PAMADA LANTANG
+ {0x1B80, 0x1B81, prCM, gcMn}, // [2] SUNDANESE SIGN PANYECEK..SUNDANESE SIGN PANGLAYAR
+ {0x1B82, 0x1B82, prCM, gcMc}, // SUNDANESE SIGN PANGWISAD
+ {0x1B83, 0x1BA0, prAL, gcLo}, // [30] SUNDANESE LETTER A..SUNDANESE LETTER HA
+ {0x1BA1, 0x1BA1, prCM, gcMc}, // SUNDANESE CONSONANT SIGN PAMINGKAL
+ {0x1BA2, 0x1BA5, prCM, gcMn}, // [4] SUNDANESE CONSONANT SIGN PANYAKRA..SUNDANESE VOWEL SIGN PANYUKU
+ {0x1BA6, 0x1BA7, prCM, gcMc}, // [2] SUNDANESE VOWEL SIGN PANAELAENG..SUNDANESE VOWEL SIGN PANOLONG
+ {0x1BA8, 0x1BA9, prCM, gcMn}, // [2] SUNDANESE VOWEL SIGN PAMEPET..SUNDANESE VOWEL SIGN PANEULEUNG
+ {0x1BAA, 0x1BAA, prCM, gcMc}, // SUNDANESE SIGN PAMAAEH
+ {0x1BAB, 0x1BAD, prCM, gcMn}, // [3] SUNDANESE SIGN VIRAMA..SUNDANESE CONSONANT SIGN PASANGAN WA
+ {0x1BAE, 0x1BAF, prAL, gcLo}, // [2] SUNDANESE LETTER KHA..SUNDANESE LETTER SYA
+ {0x1BB0, 0x1BB9, prNU, gcNd}, // [10] SUNDANESE DIGIT ZERO..SUNDANESE DIGIT NINE
+ {0x1BBA, 0x1BBF, prAL, gcLo}, // [6] SUNDANESE AVAGRAHA..SUNDANESE LETTER FINAL M
+ {0x1BC0, 0x1BE5, prAL, gcLo}, // [38] BATAK LETTER A..BATAK LETTER U
+ {0x1BE6, 0x1BE6, prCM, gcMn}, // BATAK SIGN TOMPI
+ {0x1BE7, 0x1BE7, prCM, gcMc}, // BATAK VOWEL SIGN E
+ {0x1BE8, 0x1BE9, prCM, gcMn}, // [2] BATAK VOWEL SIGN PAKPAK E..BATAK VOWEL SIGN EE
+ {0x1BEA, 0x1BEC, prCM, gcMc}, // [3] BATAK VOWEL SIGN I..BATAK VOWEL SIGN O
+ {0x1BED, 0x1BED, prCM, gcMn}, // BATAK VOWEL SIGN KARO O
+ {0x1BEE, 0x1BEE, prCM, gcMc}, // BATAK VOWEL SIGN U
+ {0x1BEF, 0x1BF1, prCM, gcMn}, // [3] BATAK VOWEL SIGN U FOR SIMALUNGUN SA..BATAK CONSONANT SIGN H
+ {0x1BF2, 0x1BF3, prCM, gcMc}, // [2] BATAK PANGOLAT..BATAK PANONGONAN
+ {0x1BFC, 0x1BFF, prAL, gcPo}, // [4] BATAK SYMBOL BINDU NA METEK..BATAK SYMBOL BINDU PANGOLAT
+ {0x1C00, 0x1C23, prAL, gcLo}, // [36] LEPCHA LETTER KA..LEPCHA LETTER A
+ {0x1C24, 0x1C2B, prCM, gcMc}, // [8] LEPCHA SUBJOINED LETTER YA..LEPCHA VOWEL SIGN UU
+ {0x1C2C, 0x1C33, prCM, gcMn}, // [8] LEPCHA VOWEL SIGN E..LEPCHA CONSONANT SIGN T
+ {0x1C34, 0x1C35, prCM, gcMc}, // [2] LEPCHA CONSONANT SIGN NYIN-DO..LEPCHA CONSONANT SIGN KANG
+ {0x1C36, 0x1C37, prCM, gcMn}, // [2] LEPCHA SIGN RAN..LEPCHA SIGN NUKTA
+ {0x1C3B, 0x1C3F, prBA, gcPo}, // [5] LEPCHA PUNCTUATION TA-ROL..LEPCHA PUNCTUATION TSHOOK
+ {0x1C40, 0x1C49, prNU, gcNd}, // [10] LEPCHA DIGIT ZERO..LEPCHA DIGIT NINE
+ {0x1C4D, 0x1C4F, prAL, gcLo}, // [3] LEPCHA LETTER TTA..LEPCHA LETTER DDA
+ {0x1C50, 0x1C59, prNU, gcNd}, // [10] OL CHIKI DIGIT ZERO..OL CHIKI DIGIT NINE
+ {0x1C5A, 0x1C77, prAL, gcLo}, // [30] OL CHIKI LETTER LA..OL CHIKI LETTER OH
+ {0x1C78, 0x1C7D, prAL, gcLm}, // [6] OL CHIKI MU TTUDDAG..OL CHIKI AHAD
+ {0x1C7E, 0x1C7F, prBA, gcPo}, // [2] OL CHIKI PUNCTUATION MUCAAD..OL CHIKI PUNCTUATION DOUBLE MUCAAD
+ {0x1C80, 0x1C88, prAL, gcLl}, // [9] CYRILLIC SMALL LETTER ROUNDED VE..CYRILLIC SMALL LETTER UNBLENDED UK
+ {0x1C90, 0x1CBA, prAL, gcLu}, // [43] GEORGIAN MTAVRULI CAPITAL LETTER AN..GEORGIAN MTAVRULI CAPITAL LETTER AIN
+ {0x1CBD, 0x1CBF, prAL, gcLu}, // [3] GEORGIAN MTAVRULI CAPITAL LETTER AEN..GEORGIAN MTAVRULI CAPITAL LETTER LABIAL SIGN
+ {0x1CC0, 0x1CC7, prAL, gcPo}, // [8] SUNDANESE PUNCTUATION BINDU SURYA..SUNDANESE PUNCTUATION BINDU BA SATANGA
+ {0x1CD0, 0x1CD2, prCM, gcMn}, // [3] VEDIC TONE KARSHANA..VEDIC TONE PRENKHA
+ {0x1CD3, 0x1CD3, prAL, gcPo}, // VEDIC SIGN NIHSHVASA
+ {0x1CD4, 0x1CE0, prCM, gcMn}, // [13] VEDIC SIGN YAJURVEDIC MIDLINE SVARITA..VEDIC TONE RIGVEDIC KASHMIRI INDEPENDENT SVARITA
+ {0x1CE1, 0x1CE1, prCM, gcMc}, // VEDIC TONE ATHARVAVEDIC INDEPENDENT SVARITA
+ {0x1CE2, 0x1CE8, prCM, gcMn}, // [7] VEDIC SIGN VISARGA SVARITA..VEDIC SIGN VISARGA ANUDATTA WITH TAIL
+ {0x1CE9, 0x1CEC, prAL, gcLo}, // [4] VEDIC SIGN ANUSVARA ANTARGOMUKHA..VEDIC SIGN ANUSVARA VAMAGOMUKHA WITH TAIL
+ {0x1CED, 0x1CED, prCM, gcMn}, // VEDIC SIGN TIRYAK
+ {0x1CEE, 0x1CF3, prAL, gcLo}, // [6] VEDIC SIGN HEXIFORM LONG ANUSVARA..VEDIC SIGN ROTATED ARDHAVISARGA
+ {0x1CF4, 0x1CF4, prCM, gcMn}, // VEDIC TONE CANDRA ABOVE
+ {0x1CF5, 0x1CF6, prAL, gcLo}, // [2] VEDIC SIGN JIHVAMULIYA..VEDIC SIGN UPADHMANIYA
+ {0x1CF7, 0x1CF7, prCM, gcMc}, // VEDIC SIGN ATIKRAMA
+ {0x1CF8, 0x1CF9, prCM, gcMn}, // [2] VEDIC TONE RING ABOVE..VEDIC TONE DOUBLE RING ABOVE
+ {0x1CFA, 0x1CFA, prAL, gcLo}, // VEDIC SIGN DOUBLE ANUSVARA ANTARGOMUKHA
+ {0x1D00, 0x1D2B, prAL, gcLl}, // [44] LATIN LETTER SMALL CAPITAL A..CYRILLIC LETTER SMALL CAPITAL EL
+ {0x1D2C, 0x1D6A, prAL, gcLm}, // [63] MODIFIER LETTER CAPITAL A..GREEK SUBSCRIPT SMALL LETTER CHI
+ {0x1D6B, 0x1D77, prAL, gcLl}, // [13] LATIN SMALL LETTER UE..LATIN SMALL LETTER TURNED G
+ {0x1D78, 0x1D78, prAL, gcLm}, // MODIFIER LETTER CYRILLIC EN
+ {0x1D79, 0x1D7F, prAL, gcLl}, // [7] LATIN SMALL LETTER INSULAR G..LATIN SMALL LETTER UPSILON WITH STROKE
+ {0x1D80, 0x1D9A, prAL, gcLl}, // [27] LATIN SMALL LETTER B WITH PALATAL HOOK..LATIN SMALL LETTER EZH WITH RETROFLEX HOOK
+ {0x1D9B, 0x1DBF, prAL, gcLm}, // [37] MODIFIER LETTER SMALL TURNED ALPHA..MODIFIER LETTER SMALL THETA
+ {0x1DC0, 0x1DCC, prCM, gcMn}, // [13] COMBINING DOTTED GRAVE ACCENT..COMBINING MACRON-BREVE
+ {0x1DCD, 0x1DCD, prGL, gcMn}, // COMBINING DOUBLE CIRCUMFLEX ABOVE
+ {0x1DCE, 0x1DFB, prCM, gcMn}, // [46] COMBINING OGONEK ABOVE..COMBINING DELETION MARK
+ {0x1DFC, 0x1DFC, prGL, gcMn}, // COMBINING DOUBLE INVERTED BREVE BELOW
+ {0x1DFD, 0x1DFF, prCM, gcMn}, // [3] COMBINING ALMOST EQUAL TO BELOW..COMBINING RIGHT ARROWHEAD AND DOWN ARROWHEAD BELOW
+ {0x1E00, 0x1EFF, prAL, gcLC}, // [256] LATIN CAPITAL LETTER A WITH RING BELOW..LATIN SMALL LETTER Y WITH LOOP
+ {0x1F00, 0x1F15, prAL, gcLC}, // [22] GREEK SMALL LETTER ALPHA WITH PSILI..GREEK SMALL LETTER EPSILON WITH DASIA AND OXIA
+ {0x1F18, 0x1F1D, prAL, gcLu}, // [6] GREEK CAPITAL LETTER EPSILON WITH PSILI..GREEK CAPITAL LETTER EPSILON WITH DASIA AND OXIA
+ {0x1F20, 0x1F45, prAL, gcLC}, // [38] GREEK SMALL LETTER ETA WITH PSILI..GREEK SMALL LETTER OMICRON WITH DASIA AND OXIA
+ {0x1F48, 0x1F4D, prAL, gcLu}, // [6] GREEK CAPITAL LETTER OMICRON WITH PSILI..GREEK CAPITAL LETTER OMICRON WITH DASIA AND OXIA
+ {0x1F50, 0x1F57, prAL, gcLl}, // [8] GREEK SMALL LETTER UPSILON WITH PSILI..GREEK SMALL LETTER UPSILON WITH DASIA AND PERISPOMENI
+ {0x1F59, 0x1F59, prAL, gcLu}, // GREEK CAPITAL LETTER UPSILON WITH DASIA
+ {0x1F5B, 0x1F5B, prAL, gcLu}, // GREEK CAPITAL LETTER UPSILON WITH DASIA AND VARIA
+ {0x1F5D, 0x1F5D, prAL, gcLu}, // GREEK CAPITAL LETTER UPSILON WITH DASIA AND OXIA
+ {0x1F5F, 0x1F7D, prAL, gcLC}, // [31] GREEK CAPITAL LETTER UPSILON WITH DASIA AND PERISPOMENI..GREEK SMALL LETTER OMEGA WITH OXIA
+ {0x1F80, 0x1FB4, prAL, gcLC}, // [53] GREEK SMALL LETTER ALPHA WITH PSILI AND YPOGEGRAMMENI..GREEK SMALL LETTER ALPHA WITH OXIA AND YPOGEGRAMMENI
+ {0x1FB6, 0x1FBC, prAL, gcLC}, // [7] GREEK SMALL LETTER ALPHA WITH PERISPOMENI..GREEK CAPITAL LETTER ALPHA WITH PROSGEGRAMMENI
+ {0x1FBD, 0x1FBD, prAL, gcSk}, // GREEK KORONIS
+ {0x1FBE, 0x1FBE, prAL, gcLl}, // GREEK PROSGEGRAMMENI
+ {0x1FBF, 0x1FC1, prAL, gcSk}, // [3] GREEK PSILI..GREEK DIALYTIKA AND PERISPOMENI
+ {0x1FC2, 0x1FC4, prAL, gcLl}, // [3] GREEK SMALL LETTER ETA WITH VARIA AND YPOGEGRAMMENI..GREEK SMALL LETTER ETA WITH OXIA AND YPOGEGRAMMENI
+ {0x1FC6, 0x1FCC, prAL, gcLC}, // [7] GREEK SMALL LETTER ETA WITH PERISPOMENI..GREEK CAPITAL LETTER ETA WITH PROSGEGRAMMENI
+ {0x1FCD, 0x1FCF, prAL, gcSk}, // [3] GREEK PSILI AND VARIA..GREEK PSILI AND PERISPOMENI
+ {0x1FD0, 0x1FD3, prAL, gcLl}, // [4] GREEK SMALL LETTER IOTA WITH VRACHY..GREEK SMALL LETTER IOTA WITH DIALYTIKA AND OXIA
+ {0x1FD6, 0x1FDB, prAL, gcLC}, // [6] GREEK SMALL LETTER IOTA WITH PERISPOMENI..GREEK CAPITAL LETTER IOTA WITH OXIA
+ {0x1FDD, 0x1FDF, prAL, gcSk}, // [3] GREEK DASIA AND VARIA..GREEK DASIA AND PERISPOMENI
+ {0x1FE0, 0x1FEC, prAL, gcLC}, // [13] GREEK SMALL LETTER UPSILON WITH VRACHY..GREEK CAPITAL LETTER RHO WITH DASIA
+ {0x1FED, 0x1FEF, prAL, gcSk}, // [3] GREEK DIALYTIKA AND VARIA..GREEK VARIA
+ {0x1FF2, 0x1FF4, prAL, gcLl}, // [3] GREEK SMALL LETTER OMEGA WITH VARIA AND YPOGEGRAMMENI..GREEK SMALL LETTER OMEGA WITH OXIA AND YPOGEGRAMMENI
+ {0x1FF6, 0x1FFC, prAL, gcLC}, // [7] GREEK SMALL LETTER OMEGA WITH PERISPOMENI..GREEK CAPITAL LETTER OMEGA WITH PROSGEGRAMMENI
+ {0x1FFD, 0x1FFD, prBB, gcSk}, // GREEK OXIA
+ {0x1FFE, 0x1FFE, prAL, gcSk}, // GREEK DASIA
+ {0x2000, 0x2006, prBA, gcZs}, // [7] EN QUAD..SIX-PER-EM SPACE
+ {0x2007, 0x2007, prGL, gcZs}, // FIGURE SPACE
+ {0x2008, 0x200A, prBA, gcZs}, // [3] PUNCTUATION SPACE..HAIR SPACE
+ {0x200B, 0x200B, prZW, gcCf}, // ZERO WIDTH SPACE
+ {0x200C, 0x200C, prCM, gcCf}, // ZERO WIDTH NON-JOINER
+ {0x200D, 0x200D, prZWJ, gcCf}, // ZERO WIDTH JOINER
+ {0x200E, 0x200F, prCM, gcCf}, // [2] LEFT-TO-RIGHT MARK..RIGHT-TO-LEFT MARK
+ {0x2010, 0x2010, prBA, gcPd}, // HYPHEN
+ {0x2011, 0x2011, prGL, gcPd}, // NON-BREAKING HYPHEN
+ {0x2012, 0x2013, prBA, gcPd}, // [2] FIGURE DASH..EN DASH
+ {0x2014, 0x2014, prB2, gcPd}, // EM DASH
+ {0x2015, 0x2015, prAI, gcPd}, // HORIZONTAL BAR
+ {0x2016, 0x2016, prAI, gcPo}, // DOUBLE VERTICAL LINE
+ {0x2017, 0x2017, prAL, gcPo}, // DOUBLE LOW LINE
+ {0x2018, 0x2018, prQU, gcPi}, // LEFT SINGLE QUOTATION MARK
+ {0x2019, 0x2019, prQU, gcPf}, // RIGHT SINGLE QUOTATION MARK
+ {0x201A, 0x201A, prOP, gcPs}, // SINGLE LOW-9 QUOTATION MARK
+ {0x201B, 0x201C, prQU, gcPi}, // [2] SINGLE HIGH-REVERSED-9 QUOTATION MARK..LEFT DOUBLE QUOTATION MARK
+ {0x201D, 0x201D, prQU, gcPf}, // RIGHT DOUBLE QUOTATION MARK
+ {0x201E, 0x201E, prOP, gcPs}, // DOUBLE LOW-9 QUOTATION MARK
+ {0x201F, 0x201F, prQU, gcPi}, // DOUBLE HIGH-REVERSED-9 QUOTATION MARK
+ {0x2020, 0x2021, prAI, gcPo}, // [2] DAGGER..DOUBLE DAGGER
+ {0x2022, 0x2023, prAL, gcPo}, // [2] BULLET..TRIANGULAR BULLET
+ {0x2024, 0x2026, prIN, gcPo}, // [3] ONE DOT LEADER..HORIZONTAL ELLIPSIS
+ {0x2027, 0x2027, prBA, gcPo}, // HYPHENATION POINT
+ {0x2028, 0x2028, prBK, gcZl}, // LINE SEPARATOR
+ {0x2029, 0x2029, prBK, gcZp}, // PARAGRAPH SEPARATOR
+ {0x202A, 0x202E, prCM, gcCf}, // [5] LEFT-TO-RIGHT EMBEDDING..RIGHT-TO-LEFT OVERRIDE
+ {0x202F, 0x202F, prGL, gcZs}, // NARROW NO-BREAK SPACE
+ {0x2030, 0x2037, prPO, gcPo}, // [8] PER MILLE SIGN..REVERSED TRIPLE PRIME
+ {0x2038, 0x2038, prAL, gcPo}, // CARET
+ {0x2039, 0x2039, prQU, gcPi}, // SINGLE LEFT-POINTING ANGLE QUOTATION MARK
+ {0x203A, 0x203A, prQU, gcPf}, // SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
+ {0x203B, 0x203B, prAI, gcPo}, // REFERENCE MARK
+ {0x203C, 0x203D, prNS, gcPo}, // [2] DOUBLE EXCLAMATION MARK..INTERROBANG
+ {0x203E, 0x203E, prAL, gcPo}, // OVERLINE
+ {0x203F, 0x2040, prAL, gcPc}, // [2] UNDERTIE..CHARACTER TIE
+ {0x2041, 0x2043, prAL, gcPo}, // [3] CARET INSERTION POINT..HYPHEN BULLET
+ {0x2044, 0x2044, prIS, gcSm}, // FRACTION SLASH
+ {0x2045, 0x2045, prOP, gcPs}, // LEFT SQUARE BRACKET WITH QUILL
+ {0x2046, 0x2046, prCL, gcPe}, // RIGHT SQUARE BRACKET WITH QUILL
+ {0x2047, 0x2049, prNS, gcPo}, // [3] DOUBLE QUESTION MARK..EXCLAMATION QUESTION MARK
+ {0x204A, 0x2051, prAL, gcPo}, // [8] TIRONIAN SIGN ET..TWO ASTERISKS ALIGNED VERTICALLY
+ {0x2052, 0x2052, prAL, gcSm}, // COMMERCIAL MINUS SIGN
+ {0x2053, 0x2053, prAL, gcPo}, // SWUNG DASH
+ {0x2054, 0x2054, prAL, gcPc}, // INVERTED UNDERTIE
+ {0x2055, 0x2055, prAL, gcPo}, // FLOWER PUNCTUATION MARK
+ {0x2056, 0x2056, prBA, gcPo}, // THREE DOT PUNCTUATION
+ {0x2057, 0x2057, prPO, gcPo}, // QUADRUPLE PRIME
+ {0x2058, 0x205B, prBA, gcPo}, // [4] FOUR DOT PUNCTUATION..FOUR DOT MARK
+ {0x205C, 0x205C, prAL, gcPo}, // DOTTED CROSS
+ {0x205D, 0x205E, prBA, gcPo}, // [2] TRICOLON..VERTICAL FOUR DOTS
+ {0x205F, 0x205F, prBA, gcZs}, // MEDIUM MATHEMATICAL SPACE
+ {0x2060, 0x2060, prWJ, gcCf}, // WORD JOINER
+ {0x2061, 0x2064, prAL, gcCf}, // [4] FUNCTION APPLICATION..INVISIBLE PLUS
+ {0x2066, 0x206F, prCM, gcCf}, // [10] LEFT-TO-RIGHT ISOLATE..NOMINAL DIGIT SHAPES
+ {0x2070, 0x2070, prAL, gcNo}, // SUPERSCRIPT ZERO
+ {0x2071, 0x2071, prAL, gcLm}, // SUPERSCRIPT LATIN SMALL LETTER I
+ {0x2074, 0x2074, prAI, gcNo}, // SUPERSCRIPT FOUR
+ {0x2075, 0x2079, prAL, gcNo}, // [5] SUPERSCRIPT FIVE..SUPERSCRIPT NINE
+ {0x207A, 0x207C, prAL, gcSm}, // [3] SUPERSCRIPT PLUS SIGN..SUPERSCRIPT EQUALS SIGN
+ {0x207D, 0x207D, prOP, gcPs}, // SUPERSCRIPT LEFT PARENTHESIS
+ {0x207E, 0x207E, prCL, gcPe}, // SUPERSCRIPT RIGHT PARENTHESIS
+ {0x207F, 0x207F, prAI, gcLm}, // SUPERSCRIPT LATIN SMALL LETTER N
+ {0x2080, 0x2080, prAL, gcNo}, // SUBSCRIPT ZERO
+ {0x2081, 0x2084, prAI, gcNo}, // [4] SUBSCRIPT ONE..SUBSCRIPT FOUR
+ {0x2085, 0x2089, prAL, gcNo}, // [5] SUBSCRIPT FIVE..SUBSCRIPT NINE
+ {0x208A, 0x208C, prAL, gcSm}, // [3] SUBSCRIPT PLUS SIGN..SUBSCRIPT EQUALS SIGN
+ {0x208D, 0x208D, prOP, gcPs}, // SUBSCRIPT LEFT PARENTHESIS
+ {0x208E, 0x208E, prCL, gcPe}, // SUBSCRIPT RIGHT PARENTHESIS
+ {0x2090, 0x209C, prAL, gcLm}, // [13] LATIN SUBSCRIPT SMALL LETTER A..LATIN SUBSCRIPT SMALL LETTER T
+ {0x20A0, 0x20A6, prPR, gcSc}, // [7] EURO-CURRENCY SIGN..NAIRA SIGN
+ {0x20A7, 0x20A7, prPO, gcSc}, // PESETA SIGN
+ {0x20A8, 0x20B5, prPR, gcSc}, // [14] RUPEE SIGN..CEDI SIGN
+ {0x20B6, 0x20B6, prPO, gcSc}, // LIVRE TOURNOIS SIGN
+ {0x20B7, 0x20BA, prPR, gcSc}, // [4] SPESMILO SIGN..TURKISH LIRA SIGN
+ {0x20BB, 0x20BB, prPO, gcSc}, // NORDIC MARK SIGN
+ {0x20BC, 0x20BD, prPR, gcSc}, // [2] MANAT SIGN..RUBLE SIGN
+ {0x20BE, 0x20BE, prPO, gcSc}, // LARI SIGN
+ {0x20BF, 0x20BF, prPR, gcSc}, // BITCOIN SIGN
+ {0x20C0, 0x20C0, prPO, gcSc}, // SOM SIGN
+ {0x20C1, 0x20CF, prPR, gcCn}, // [15] ..
+ {0x20D0, 0x20DC, prCM, gcMn}, // [13] COMBINING LEFT HARPOON ABOVE..COMBINING FOUR DOTS ABOVE
+ {0x20DD, 0x20E0, prCM, gcMe}, // [4] COMBINING ENCLOSING CIRCLE..COMBINING ENCLOSING CIRCLE BACKSLASH
+ {0x20E1, 0x20E1, prCM, gcMn}, // COMBINING LEFT RIGHT ARROW ABOVE
+ {0x20E2, 0x20E4, prCM, gcMe}, // [3] COMBINING ENCLOSING SCREEN..COMBINING ENCLOSING UPWARD POINTING TRIANGLE
+ {0x20E5, 0x20F0, prCM, gcMn}, // [12] COMBINING REVERSE SOLIDUS OVERLAY..COMBINING ASTERISK ABOVE
+ {0x2100, 0x2101, prAL, gcSo}, // [2] ACCOUNT OF..ADDRESSED TO THE SUBJECT
+ {0x2102, 0x2102, prAL, gcLu}, // DOUBLE-STRUCK CAPITAL C
+ {0x2103, 0x2103, prPO, gcSo}, // DEGREE CELSIUS
+ {0x2104, 0x2104, prAL, gcSo}, // CENTRE LINE SYMBOL
+ {0x2105, 0x2105, prAI, gcSo}, // CARE OF
+ {0x2106, 0x2106, prAL, gcSo}, // CADA UNA
+ {0x2107, 0x2107, prAL, gcLu}, // EULER CONSTANT
+ {0x2108, 0x2108, prAL, gcSo}, // SCRUPLE
+ {0x2109, 0x2109, prPO, gcSo}, // DEGREE FAHRENHEIT
+ {0x210A, 0x2112, prAL, gcLC}, // [9] SCRIPT SMALL G..SCRIPT CAPITAL L
+ {0x2113, 0x2113, prAI, gcLl}, // SCRIPT SMALL L
+ {0x2114, 0x2114, prAL, gcSo}, // L B BAR SYMBOL
+ {0x2115, 0x2115, prAL, gcLu}, // DOUBLE-STRUCK CAPITAL N
+ {0x2116, 0x2116, prPR, gcSo}, // NUMERO SIGN
+ {0x2117, 0x2117, prAL, gcSo}, // SOUND RECORDING COPYRIGHT
+ {0x2118, 0x2118, prAL, gcSm}, // SCRIPT CAPITAL P
+ {0x2119, 0x211D, prAL, gcLu}, // [5] DOUBLE-STRUCK CAPITAL P..DOUBLE-STRUCK CAPITAL R
+ {0x211E, 0x2120, prAL, gcSo}, // [3] PRESCRIPTION TAKE..SERVICE MARK
+ {0x2121, 0x2122, prAI, gcSo}, // [2] TELEPHONE SIGN..TRADE MARK SIGN
+ {0x2123, 0x2123, prAL, gcSo}, // VERSICLE
+ {0x2124, 0x2124, prAL, gcLu}, // DOUBLE-STRUCK CAPITAL Z
+ {0x2125, 0x2125, prAL, gcSo}, // OUNCE SIGN
+ {0x2126, 0x2126, prAL, gcLu}, // OHM SIGN
+ {0x2127, 0x2127, prAL, gcSo}, // INVERTED OHM SIGN
+ {0x2128, 0x2128, prAL, gcLu}, // BLACK-LETTER CAPITAL Z
+ {0x2129, 0x2129, prAL, gcSo}, // TURNED GREEK SMALL LETTER IOTA
+ {0x212A, 0x212A, prAL, gcLu}, // KELVIN SIGN
+ {0x212B, 0x212B, prAI, gcLu}, // ANGSTROM SIGN
+ {0x212C, 0x212D, prAL, gcLu}, // [2] SCRIPT CAPITAL B..BLACK-LETTER CAPITAL C
+ {0x212E, 0x212E, prAL, gcSo}, // ESTIMATED SYMBOL
+ {0x212F, 0x2134, prAL, gcLC}, // [6] SCRIPT SMALL E..SCRIPT SMALL O
+ {0x2135, 0x2138, prAL, gcLo}, // [4] ALEF SYMBOL..DALET SYMBOL
+ {0x2139, 0x2139, prAL, gcLl}, // INFORMATION SOURCE
+ {0x213A, 0x213B, prAL, gcSo}, // [2] ROTATED CAPITAL Q..FACSIMILE SIGN
+ {0x213C, 0x213F, prAL, gcLC}, // [4] DOUBLE-STRUCK SMALL PI..DOUBLE-STRUCK CAPITAL PI
+ {0x2140, 0x2144, prAL, gcSm}, // [5] DOUBLE-STRUCK N-ARY SUMMATION..TURNED SANS-SERIF CAPITAL Y
+ {0x2145, 0x2149, prAL, gcLC}, // [5] DOUBLE-STRUCK ITALIC CAPITAL D..DOUBLE-STRUCK ITALIC SMALL J
+ {0x214A, 0x214A, prAL, gcSo}, // PROPERTY LINE
+ {0x214B, 0x214B, prAL, gcSm}, // TURNED AMPERSAND
+ {0x214C, 0x214D, prAL, gcSo}, // [2] PER SIGN..AKTIESELSKAB
+ {0x214E, 0x214E, prAL, gcLl}, // TURNED SMALL F
+ {0x214F, 0x214F, prAL, gcSo}, // SYMBOL FOR SAMARITAN SOURCE
+ {0x2150, 0x2153, prAL, gcNo}, // [4] VULGAR FRACTION ONE SEVENTH..VULGAR FRACTION ONE THIRD
+ {0x2154, 0x2155, prAI, gcNo}, // [2] VULGAR FRACTION TWO THIRDS..VULGAR FRACTION ONE FIFTH
+ {0x2156, 0x215A, prAL, gcNo}, // [5] VULGAR FRACTION TWO FIFTHS..VULGAR FRACTION FIVE SIXTHS
+ {0x215B, 0x215B, prAI, gcNo}, // VULGAR FRACTION ONE EIGHTH
+ {0x215C, 0x215D, prAL, gcNo}, // [2] VULGAR FRACTION THREE EIGHTHS..VULGAR FRACTION FIVE EIGHTHS
+ {0x215E, 0x215E, prAI, gcNo}, // VULGAR FRACTION SEVEN EIGHTHS
+ {0x215F, 0x215F, prAL, gcNo}, // FRACTION NUMERATOR ONE
+ {0x2160, 0x216B, prAI, gcNl}, // [12] ROMAN NUMERAL ONE..ROMAN NUMERAL TWELVE
+ {0x216C, 0x216F, prAL, gcNl}, // [4] ROMAN NUMERAL FIFTY..ROMAN NUMERAL ONE THOUSAND
+ {0x2170, 0x2179, prAI, gcNl}, // [10] SMALL ROMAN NUMERAL ONE..SMALL ROMAN NUMERAL TEN
+ {0x217A, 0x2182, prAL, gcNl}, // [9] SMALL ROMAN NUMERAL ELEVEN..ROMAN NUMERAL TEN THOUSAND
+ {0x2183, 0x2184, prAL, gcLC}, // [2] ROMAN NUMERAL REVERSED ONE HUNDRED..LATIN SMALL LETTER REVERSED C
+ {0x2185, 0x2188, prAL, gcNl}, // [4] ROMAN NUMERAL SIX LATE FORM..ROMAN NUMERAL ONE HUNDRED THOUSAND
+ {0x2189, 0x2189, prAI, gcNo}, // VULGAR FRACTION ZERO THIRDS
+ {0x218A, 0x218B, prAL, gcSo}, // [2] TURNED DIGIT TWO..TURNED DIGIT THREE
+ {0x2190, 0x2194, prAI, gcSm}, // [5] LEFTWARDS ARROW..LEFT RIGHT ARROW
+ {0x2195, 0x2199, prAI, gcSo}, // [5] UP DOWN ARROW..SOUTH WEST ARROW
+ {0x219A, 0x219B, prAL, gcSm}, // [2] LEFTWARDS ARROW WITH STROKE..RIGHTWARDS ARROW WITH STROKE
+ {0x219C, 0x219F, prAL, gcSo}, // [4] LEFTWARDS WAVE ARROW..UPWARDS TWO HEADED ARROW
+ {0x21A0, 0x21A0, prAL, gcSm}, // RIGHTWARDS TWO HEADED ARROW
+ {0x21A1, 0x21A2, prAL, gcSo}, // [2] DOWNWARDS TWO HEADED ARROW..LEFTWARDS ARROW WITH TAIL
+ {0x21A3, 0x21A3, prAL, gcSm}, // RIGHTWARDS ARROW WITH TAIL
+ {0x21A4, 0x21A5, prAL, gcSo}, // [2] LEFTWARDS ARROW FROM BAR..UPWARDS ARROW FROM BAR
+ {0x21A6, 0x21A6, prAL, gcSm}, // RIGHTWARDS ARROW FROM BAR
+ {0x21A7, 0x21AD, prAL, gcSo}, // [7] DOWNWARDS ARROW FROM BAR..LEFT RIGHT WAVE ARROW
+ {0x21AE, 0x21AE, prAL, gcSm}, // LEFT RIGHT ARROW WITH STROKE
+ {0x21AF, 0x21CD, prAL, gcSo}, // [31] DOWNWARDS ZIGZAG ARROW..LEFTWARDS DOUBLE ARROW WITH STROKE
+ {0x21CE, 0x21CF, prAL, gcSm}, // [2] LEFT RIGHT DOUBLE ARROW WITH STROKE..RIGHTWARDS DOUBLE ARROW WITH STROKE
+ {0x21D0, 0x21D1, prAL, gcSo}, // [2] LEFTWARDS DOUBLE ARROW..UPWARDS DOUBLE ARROW
+ {0x21D2, 0x21D2, prAI, gcSm}, // RIGHTWARDS DOUBLE ARROW
+ {0x21D3, 0x21D3, prAL, gcSo}, // DOWNWARDS DOUBLE ARROW
+ {0x21D4, 0x21D4, prAI, gcSm}, // LEFT RIGHT DOUBLE ARROW
+ {0x21D5, 0x21F3, prAL, gcSo}, // [31] UP DOWN DOUBLE ARROW..UP DOWN WHITE ARROW
+ {0x21F4, 0x21FF, prAL, gcSm}, // [12] RIGHT ARROW WITH SMALL CIRCLE..LEFT RIGHT OPEN-HEADED ARROW
+ {0x2200, 0x2200, prAI, gcSm}, // FOR ALL
+ {0x2201, 0x2201, prAL, gcSm}, // COMPLEMENT
+ {0x2202, 0x2203, prAI, gcSm}, // [2] PARTIAL DIFFERENTIAL..THERE EXISTS
+ {0x2204, 0x2206, prAL, gcSm}, // [3] THERE DOES NOT EXIST..INCREMENT
+ {0x2207, 0x2208, prAI, gcSm}, // [2] NABLA..ELEMENT OF
+ {0x2209, 0x220A, prAL, gcSm}, // [2] NOT AN ELEMENT OF..SMALL ELEMENT OF
+ {0x220B, 0x220B, prAI, gcSm}, // CONTAINS AS MEMBER
+ {0x220C, 0x220E, prAL, gcSm}, // [3] DOES NOT CONTAIN AS MEMBER..END OF PROOF
+ {0x220F, 0x220F, prAI, gcSm}, // N-ARY PRODUCT
+ {0x2210, 0x2210, prAL, gcSm}, // N-ARY COPRODUCT
+ {0x2211, 0x2211, prAI, gcSm}, // N-ARY SUMMATION
+ {0x2212, 0x2213, prPR, gcSm}, // [2] MINUS SIGN..MINUS-OR-PLUS SIGN
+ {0x2214, 0x2214, prAL, gcSm}, // DOT PLUS
+ {0x2215, 0x2215, prAI, gcSm}, // DIVISION SLASH
+ {0x2216, 0x2219, prAL, gcSm}, // [4] SET MINUS..BULLET OPERATOR
+ {0x221A, 0x221A, prAI, gcSm}, // SQUARE ROOT
+ {0x221B, 0x221C, prAL, gcSm}, // [2] CUBE ROOT..FOURTH ROOT
+ {0x221D, 0x2220, prAI, gcSm}, // [4] PROPORTIONAL TO..ANGLE
+ {0x2221, 0x2222, prAL, gcSm}, // [2] MEASURED ANGLE..SPHERICAL ANGLE
+ {0x2223, 0x2223, prAI, gcSm}, // DIVIDES
+ {0x2224, 0x2224, prAL, gcSm}, // DOES NOT DIVIDE
+ {0x2225, 0x2225, prAI, gcSm}, // PARALLEL TO
+ {0x2226, 0x2226, prAL, gcSm}, // NOT PARALLEL TO
+ {0x2227, 0x222C, prAI, gcSm}, // [6] LOGICAL AND..DOUBLE INTEGRAL
+ {0x222D, 0x222D, prAL, gcSm}, // TRIPLE INTEGRAL
+ {0x222E, 0x222E, prAI, gcSm}, // CONTOUR INTEGRAL
+ {0x222F, 0x2233, prAL, gcSm}, // [5] SURFACE INTEGRAL..ANTICLOCKWISE CONTOUR INTEGRAL
+ {0x2234, 0x2237, prAI, gcSm}, // [4] THEREFORE..PROPORTION
+ {0x2238, 0x223B, prAL, gcSm}, // [4] DOT MINUS..HOMOTHETIC
+ {0x223C, 0x223D, prAI, gcSm}, // [2] TILDE OPERATOR..REVERSED TILDE
+ {0x223E, 0x2247, prAL, gcSm}, // [10] INVERTED LAZY S..NEITHER APPROXIMATELY NOR ACTUALLY EQUAL TO
+ {0x2248, 0x2248, prAI, gcSm}, // ALMOST EQUAL TO
+ {0x2249, 0x224B, prAL, gcSm}, // [3] NOT ALMOST EQUAL TO..TRIPLE TILDE
+ {0x224C, 0x224C, prAI, gcSm}, // ALL EQUAL TO
+ {0x224D, 0x2251, prAL, gcSm}, // [5] EQUIVALENT TO..GEOMETRICALLY EQUAL TO
+ {0x2252, 0x2252, prAI, gcSm}, // APPROXIMATELY EQUAL TO OR THE IMAGE OF
+ {0x2253, 0x225F, prAL, gcSm}, // [13] IMAGE OF OR APPROXIMATELY EQUAL TO..QUESTIONED EQUAL TO
+ {0x2260, 0x2261, prAI, gcSm}, // [2] NOT EQUAL TO..IDENTICAL TO
+ {0x2262, 0x2263, prAL, gcSm}, // [2] NOT IDENTICAL TO..STRICTLY EQUIVALENT TO
+ {0x2264, 0x2267, prAI, gcSm}, // [4] LESS-THAN OR EQUAL TO..GREATER-THAN OVER EQUAL TO
+ {0x2268, 0x2269, prAL, gcSm}, // [2] LESS-THAN BUT NOT EQUAL TO..GREATER-THAN BUT NOT EQUAL TO
+ {0x226A, 0x226B, prAI, gcSm}, // [2] MUCH LESS-THAN..MUCH GREATER-THAN
+ {0x226C, 0x226D, prAL, gcSm}, // [2] BETWEEN..NOT EQUIVALENT TO
+ {0x226E, 0x226F, prAI, gcSm}, // [2] NOT LESS-THAN..NOT GREATER-THAN
+ {0x2270, 0x2281, prAL, gcSm}, // [18] NEITHER LESS-THAN NOR EQUAL TO..DOES NOT SUCCEED
+ {0x2282, 0x2283, prAI, gcSm}, // [2] SUBSET OF..SUPERSET OF
+ {0x2284, 0x2285, prAL, gcSm}, // [2] NOT A SUBSET OF..NOT A SUPERSET OF
+ {0x2286, 0x2287, prAI, gcSm}, // [2] SUBSET OF OR EQUAL TO..SUPERSET OF OR EQUAL TO
+ {0x2288, 0x2294, prAL, gcSm}, // [13] NEITHER A SUBSET OF NOR EQUAL TO..SQUARE CUP
+ {0x2295, 0x2295, prAI, gcSm}, // CIRCLED PLUS
+ {0x2296, 0x2298, prAL, gcSm}, // [3] CIRCLED MINUS..CIRCLED DIVISION SLASH
+ {0x2299, 0x2299, prAI, gcSm}, // CIRCLED DOT OPERATOR
+ {0x229A, 0x22A4, prAL, gcSm}, // [11] CIRCLED RING OPERATOR..DOWN TACK
+ {0x22A5, 0x22A5, prAI, gcSm}, // UP TACK
+ {0x22A6, 0x22BE, prAL, gcSm}, // [25] ASSERTION..RIGHT ANGLE WITH ARC
+ {0x22BF, 0x22BF, prAI, gcSm}, // RIGHT TRIANGLE
+ {0x22C0, 0x22EE, prAL, gcSm}, // [47] N-ARY LOGICAL AND..VERTICAL ELLIPSIS
+ {0x22EF, 0x22EF, prIN, gcSm}, // MIDLINE HORIZONTAL ELLIPSIS
+ {0x22F0, 0x22FF, prAL, gcSm}, // [16] UP RIGHT DIAGONAL ELLIPSIS..Z NOTATION BAG MEMBERSHIP
+ {0x2300, 0x2307, prAL, gcSo}, // [8] DIAMETER SIGN..WAVY LINE
+ {0x2308, 0x2308, prOP, gcPs}, // LEFT CEILING
+ {0x2309, 0x2309, prCL, gcPe}, // RIGHT CEILING
+ {0x230A, 0x230A, prOP, gcPs}, // LEFT FLOOR
+ {0x230B, 0x230B, prCL, gcPe}, // RIGHT FLOOR
+ {0x230C, 0x2311, prAL, gcSo}, // [6] BOTTOM RIGHT CROP..SQUARE LOZENGE
+ {0x2312, 0x2312, prAI, gcSo}, // ARC
+ {0x2313, 0x2319, prAL, gcSo}, // [7] SEGMENT..TURNED NOT SIGN
+ {0x231A, 0x231B, prID, gcSo}, // [2] WATCH..HOURGLASS
+ {0x231C, 0x231F, prAL, gcSo}, // [4] TOP LEFT CORNER..BOTTOM RIGHT CORNER
+ {0x2320, 0x2321, prAL, gcSm}, // [2] TOP HALF INTEGRAL..BOTTOM HALF INTEGRAL
+ {0x2322, 0x2328, prAL, gcSo}, // [7] FROWN..KEYBOARD
+ {0x2329, 0x2329, prOP, gcPs}, // LEFT-POINTING ANGLE BRACKET
+ {0x232A, 0x232A, prCL, gcPe}, // RIGHT-POINTING ANGLE BRACKET
+ {0x232B, 0x237B, prAL, gcSo}, // [81] ERASE TO THE LEFT..NOT CHECK MARK
+ {0x237C, 0x237C, prAL, gcSm}, // RIGHT ANGLE WITH DOWNWARDS ZIGZAG ARROW
+ {0x237D, 0x239A, prAL, gcSo}, // [30] SHOULDERED OPEN BOX..CLEAR SCREEN SYMBOL
+ {0x239B, 0x23B3, prAL, gcSm}, // [25] LEFT PARENTHESIS UPPER HOOK..SUMMATION BOTTOM
+ {0x23B4, 0x23DB, prAL, gcSo}, // [40] TOP SQUARE BRACKET..FUSE
+ {0x23DC, 0x23E1, prAL, gcSm}, // [6] TOP PARENTHESIS..BOTTOM TORTOISE SHELL BRACKET
+ {0x23E2, 0x23EF, prAL, gcSo}, // [14] WHITE TRAPEZIUM..BLACK RIGHT-POINTING TRIANGLE WITH DOUBLE VERTICAL BAR
+ {0x23F0, 0x23F3, prID, gcSo}, // [4] ALARM CLOCK..HOURGLASS WITH FLOWING SAND
+ {0x23F4, 0x23FF, prAL, gcSo}, // [12] BLACK MEDIUM LEFT-POINTING TRIANGLE..OBSERVER EYE SYMBOL
+ {0x2400, 0x2426, prAL, gcSo}, // [39] SYMBOL FOR NULL..SYMBOL FOR SUBSTITUTE FORM TWO
+ {0x2440, 0x244A, prAL, gcSo}, // [11] OCR HOOK..OCR DOUBLE BACKSLASH
+ {0x2460, 0x249B, prAI, gcNo}, // [60] CIRCLED DIGIT ONE..NUMBER TWENTY FULL STOP
+ {0x249C, 0x24E9, prAI, gcSo}, // [78] PARENTHESIZED LATIN SMALL LETTER A..CIRCLED LATIN SMALL LETTER Z
+ {0x24EA, 0x24FE, prAI, gcNo}, // [21] CIRCLED DIGIT ZERO..DOUBLE CIRCLED NUMBER TEN
+ {0x24FF, 0x24FF, prAL, gcNo}, // NEGATIVE CIRCLED DIGIT ZERO
+ {0x2500, 0x254B, prAI, gcSo}, // [76] BOX DRAWINGS LIGHT HORIZONTAL..BOX DRAWINGS HEAVY VERTICAL AND HORIZONTAL
+ {0x254C, 0x254F, prAL, gcSo}, // [4] BOX DRAWINGS LIGHT DOUBLE DASH HORIZONTAL..BOX DRAWINGS HEAVY DOUBLE DASH VERTICAL
+ {0x2550, 0x2574, prAI, gcSo}, // [37] BOX DRAWINGS DOUBLE HORIZONTAL..BOX DRAWINGS LIGHT LEFT
+ {0x2575, 0x257F, prAL, gcSo}, // [11] BOX DRAWINGS LIGHT UP..BOX DRAWINGS HEAVY UP AND LIGHT DOWN
+ {0x2580, 0x258F, prAI, gcSo}, // [16] UPPER HALF BLOCK..LEFT ONE EIGHTH BLOCK
+ {0x2590, 0x2591, prAL, gcSo}, // [2] RIGHT HALF BLOCK..LIGHT SHADE
+ {0x2592, 0x2595, prAI, gcSo}, // [4] MEDIUM SHADE..RIGHT ONE EIGHTH BLOCK
+ {0x2596, 0x259F, prAL, gcSo}, // [10] QUADRANT LOWER LEFT..QUADRANT UPPER RIGHT AND LOWER LEFT AND LOWER RIGHT
+ {0x25A0, 0x25A1, prAI, gcSo}, // [2] BLACK SQUARE..WHITE SQUARE
+ {0x25A2, 0x25A2, prAL, gcSo}, // WHITE SQUARE WITH ROUNDED CORNERS
+ {0x25A3, 0x25A9, prAI, gcSo}, // [7] WHITE SQUARE CONTAINING BLACK SMALL SQUARE..SQUARE WITH DIAGONAL CROSSHATCH FILL
+ {0x25AA, 0x25B1, prAL, gcSo}, // [8] BLACK SMALL SQUARE..WHITE PARALLELOGRAM
+ {0x25B2, 0x25B3, prAI, gcSo}, // [2] BLACK UP-POINTING TRIANGLE..WHITE UP-POINTING TRIANGLE
+ {0x25B4, 0x25B5, prAL, gcSo}, // [2] BLACK UP-POINTING SMALL TRIANGLE..WHITE UP-POINTING SMALL TRIANGLE
+ {0x25B6, 0x25B6, prAI, gcSo}, // BLACK RIGHT-POINTING TRIANGLE
+ {0x25B7, 0x25B7, prAI, gcSm}, // WHITE RIGHT-POINTING TRIANGLE
+ {0x25B8, 0x25BB, prAL, gcSo}, // [4] BLACK RIGHT-POINTING SMALL TRIANGLE..WHITE RIGHT-POINTING POINTER
+ {0x25BC, 0x25BD, prAI, gcSo}, // [2] BLACK DOWN-POINTING TRIANGLE..WHITE DOWN-POINTING TRIANGLE
+ {0x25BE, 0x25BF, prAL, gcSo}, // [2] BLACK DOWN-POINTING SMALL TRIANGLE..WHITE DOWN-POINTING SMALL TRIANGLE
+ {0x25C0, 0x25C0, prAI, gcSo}, // BLACK LEFT-POINTING TRIANGLE
+ {0x25C1, 0x25C1, prAI, gcSm}, // WHITE LEFT-POINTING TRIANGLE
+ {0x25C2, 0x25C5, prAL, gcSo}, // [4] BLACK LEFT-POINTING SMALL TRIANGLE..WHITE LEFT-POINTING POINTER
+ {0x25C6, 0x25C8, prAI, gcSo}, // [3] BLACK DIAMOND..WHITE DIAMOND CONTAINING BLACK SMALL DIAMOND
+ {0x25C9, 0x25CA, prAL, gcSo}, // [2] FISHEYE..LOZENGE
+ {0x25CB, 0x25CB, prAI, gcSo}, // WHITE CIRCLE
+ {0x25CC, 0x25CD, prAL, gcSo}, // [2] DOTTED CIRCLE..CIRCLE WITH VERTICAL FILL
+ {0x25CE, 0x25D1, prAI, gcSo}, // [4] BULLSEYE..CIRCLE WITH RIGHT HALF BLACK
+ {0x25D2, 0x25E1, prAL, gcSo}, // [16] CIRCLE WITH LOWER HALF BLACK..LOWER HALF CIRCLE
+ {0x25E2, 0x25E5, prAI, gcSo}, // [4] BLACK LOWER RIGHT TRIANGLE..BLACK UPPER RIGHT TRIANGLE
+ {0x25E6, 0x25EE, prAL, gcSo}, // [9] WHITE BULLET..UP-POINTING TRIANGLE WITH RIGHT HALF BLACK
+ {0x25EF, 0x25EF, prAI, gcSo}, // LARGE CIRCLE
+ {0x25F0, 0x25F7, prAL, gcSo}, // [8] WHITE SQUARE WITH UPPER LEFT QUADRANT..WHITE CIRCLE WITH UPPER RIGHT QUADRANT
+ {0x25F8, 0x25FF, prAL, gcSm}, // [8] UPPER LEFT TRIANGLE..LOWER RIGHT TRIANGLE
+ {0x2600, 0x2603, prID, gcSo}, // [4] BLACK SUN WITH RAYS..SNOWMAN
+ {0x2604, 0x2604, prAL, gcSo}, // COMET
+ {0x2605, 0x2606, prAI, gcSo}, // [2] BLACK STAR..WHITE STAR
+ {0x2607, 0x2608, prAL, gcSo}, // [2] LIGHTNING..THUNDERSTORM
+ {0x2609, 0x2609, prAI, gcSo}, // SUN
+ {0x260A, 0x260D, prAL, gcSo}, // [4] ASCENDING NODE..OPPOSITION
+ {0x260E, 0x260F, prAI, gcSo}, // [2] BLACK TELEPHONE..WHITE TELEPHONE
+ {0x2610, 0x2613, prAL, gcSo}, // [4] BALLOT BOX..SALTIRE
+ {0x2614, 0x2615, prID, gcSo}, // [2] UMBRELLA WITH RAIN DROPS..HOT BEVERAGE
+ {0x2616, 0x2617, prAI, gcSo}, // [2] WHITE SHOGI PIECE..BLACK SHOGI PIECE
+ {0x2618, 0x2618, prID, gcSo}, // SHAMROCK
+ {0x2619, 0x2619, prAL, gcSo}, // REVERSED ROTATED FLORAL HEART BULLET
+ {0x261A, 0x261C, prID, gcSo}, // [3] BLACK LEFT POINTING INDEX..WHITE LEFT POINTING INDEX
+ {0x261D, 0x261D, prEB, gcSo}, // WHITE UP POINTING INDEX
+ {0x261E, 0x261F, prID, gcSo}, // [2] WHITE RIGHT POINTING INDEX..WHITE DOWN POINTING INDEX
+ {0x2620, 0x2638, prAL, gcSo}, // [25] SKULL AND CROSSBONES..WHEEL OF DHARMA
+ {0x2639, 0x263B, prID, gcSo}, // [3] WHITE FROWNING FACE..BLACK SMILING FACE
+ {0x263C, 0x263F, prAL, gcSo}, // [4] WHITE SUN WITH RAYS..MERCURY
+ {0x2640, 0x2640, prAI, gcSo}, // FEMALE SIGN
+ {0x2641, 0x2641, prAL, gcSo}, // EARTH
+ {0x2642, 0x2642, prAI, gcSo}, // MALE SIGN
+ {0x2643, 0x265F, prAL, gcSo}, // [29] JUPITER..BLACK CHESS PAWN
+ {0x2660, 0x2661, prAI, gcSo}, // [2] BLACK SPADE SUIT..WHITE HEART SUIT
+ {0x2662, 0x2662, prAL, gcSo}, // WHITE DIAMOND SUIT
+ {0x2663, 0x2665, prAI, gcSo}, // [3] BLACK CLUB SUIT..BLACK HEART SUIT
+ {0x2666, 0x2666, prAL, gcSo}, // BLACK DIAMOND SUIT
+ {0x2667, 0x2667, prAI, gcSo}, // WHITE CLUB SUIT
+ {0x2668, 0x2668, prID, gcSo}, // HOT SPRINGS
+ {0x2669, 0x266A, prAI, gcSo}, // [2] QUARTER NOTE..EIGHTH NOTE
+ {0x266B, 0x266B, prAL, gcSo}, // BEAMED EIGHTH NOTES
+ {0x266C, 0x266D, prAI, gcSo}, // [2] BEAMED SIXTEENTH NOTES..MUSIC FLAT SIGN
+ {0x266E, 0x266E, prAL, gcSo}, // MUSIC NATURAL SIGN
+ {0x266F, 0x266F, prAI, gcSm}, // MUSIC SHARP SIGN
+ {0x2670, 0x267E, prAL, gcSo}, // [15] WEST SYRIAC CROSS..PERMANENT PAPER SIGN
+ {0x267F, 0x267F, prID, gcSo}, // WHEELCHAIR SYMBOL
+ {0x2680, 0x269D, prAL, gcSo}, // [30] DIE FACE-1..OUTLINED WHITE STAR
+ {0x269E, 0x269F, prAI, gcSo}, // [2] THREE LINES CONVERGING RIGHT..THREE LINES CONVERGING LEFT
+ {0x26A0, 0x26BC, prAL, gcSo}, // [29] WARNING SIGN..SESQUIQUADRATE
+ {0x26BD, 0x26C8, prID, gcSo}, // [12] SOCCER BALL..THUNDER CLOUD AND RAIN
+ {0x26C9, 0x26CC, prAI, gcSo}, // [4] TURNED WHITE SHOGI PIECE..CROSSING LANES
+ {0x26CD, 0x26CD, prID, gcSo}, // DISABLED CAR
+ {0x26CE, 0x26CE, prAL, gcSo}, // OPHIUCHUS
+ {0x26CF, 0x26D1, prID, gcSo}, // [3] PICK..HELMET WITH WHITE CROSS
+ {0x26D2, 0x26D2, prAI, gcSo}, // CIRCLED CROSSING LANES
+ {0x26D3, 0x26D4, prID, gcSo}, // [2] CHAINS..NO ENTRY
+ {0x26D5, 0x26D7, prAI, gcSo}, // [3] ALTERNATE ONE-WAY LEFT WAY TRAFFIC..WHITE TWO-WAY LEFT WAY TRAFFIC
+ {0x26D8, 0x26D9, prID, gcSo}, // [2] BLACK LEFT LANE MERGE..WHITE LEFT LANE MERGE
+ {0x26DA, 0x26DB, prAI, gcSo}, // [2] DRIVE SLOW SIGN..HEAVY WHITE DOWN-POINTING TRIANGLE
+ {0x26DC, 0x26DC, prID, gcSo}, // LEFT CLOSED ENTRY
+ {0x26DD, 0x26DE, prAI, gcSo}, // [2] SQUARED SALTIRE..FALLING DIAGONAL IN WHITE CIRCLE IN BLACK SQUARE
+ {0x26DF, 0x26E1, prID, gcSo}, // [3] BLACK TRUCK..RESTRICTED LEFT ENTRY-2
+ {0x26E2, 0x26E2, prAL, gcSo}, // ASTRONOMICAL SYMBOL FOR URANUS
+ {0x26E3, 0x26E3, prAI, gcSo}, // HEAVY CIRCLE WITH STROKE AND TWO DOTS ABOVE
+ {0x26E4, 0x26E7, prAL, gcSo}, // [4] PENTAGRAM..INVERTED PENTAGRAM
+ {0x26E8, 0x26E9, prAI, gcSo}, // [2] BLACK CROSS ON SHIELD..SHINTO SHRINE
+ {0x26EA, 0x26EA, prID, gcSo}, // CHURCH
+ {0x26EB, 0x26F0, prAI, gcSo}, // [6] CASTLE..MOUNTAIN
+ {0x26F1, 0x26F5, prID, gcSo}, // [5] UMBRELLA ON GROUND..SAILBOAT
+ {0x26F6, 0x26F6, prAI, gcSo}, // SQUARE FOUR CORNERS
+ {0x26F7, 0x26F8, prID, gcSo}, // [2] SKIER..ICE SKATE
+ {0x26F9, 0x26F9, prEB, gcSo}, // PERSON WITH BALL
+ {0x26FA, 0x26FA, prID, gcSo}, // TENT
+ {0x26FB, 0x26FC, prAI, gcSo}, // [2] JAPANESE BANK SYMBOL..HEADSTONE GRAVEYARD SYMBOL
+ {0x26FD, 0x26FF, prID, gcSo}, // [3] FUEL PUMP..WHITE FLAG WITH HORIZONTAL MIDDLE BLACK STRIPE
+ {0x2700, 0x2704, prID, gcSo}, // [5] BLACK SAFETY SCISSORS..WHITE SCISSORS
+ {0x2705, 0x2707, prAL, gcSo}, // [3] WHITE HEAVY CHECK MARK..TAPE DRIVE
+ {0x2708, 0x2709, prID, gcSo}, // [2] AIRPLANE..ENVELOPE
+ {0x270A, 0x270D, prEB, gcSo}, // [4] RAISED FIST..WRITING HAND
+ {0x270E, 0x2756, prAL, gcSo}, // [73] LOWER RIGHT PENCIL..BLACK DIAMOND MINUS WHITE X
+ {0x2757, 0x2757, prAI, gcSo}, // HEAVY EXCLAMATION MARK SYMBOL
+ {0x2758, 0x275A, prAL, gcSo}, // [3] LIGHT VERTICAL BAR..HEAVY VERTICAL BAR
+ {0x275B, 0x2760, prQU, gcSo}, // [6] HEAVY SINGLE TURNED COMMA QUOTATION MARK ORNAMENT..HEAVY LOW DOUBLE COMMA QUOTATION MARK ORNAMENT
+ {0x2761, 0x2761, prAL, gcSo}, // CURVED STEM PARAGRAPH SIGN ORNAMENT
+ {0x2762, 0x2763, prEX, gcSo}, // [2] HEAVY EXCLAMATION MARK ORNAMENT..HEAVY HEART EXCLAMATION MARK ORNAMENT
+ {0x2764, 0x2764, prID, gcSo}, // HEAVY BLACK HEART
+ {0x2765, 0x2767, prAL, gcSo}, // [3] ROTATED HEAVY BLACK HEART BULLET..ROTATED FLORAL HEART BULLET
+ {0x2768, 0x2768, prOP, gcPs}, // MEDIUM LEFT PARENTHESIS ORNAMENT
+ {0x2769, 0x2769, prCL, gcPe}, // MEDIUM RIGHT PARENTHESIS ORNAMENT
+ {0x276A, 0x276A, prOP, gcPs}, // MEDIUM FLATTENED LEFT PARENTHESIS ORNAMENT
+ {0x276B, 0x276B, prCL, gcPe}, // MEDIUM FLATTENED RIGHT PARENTHESIS ORNAMENT
+ {0x276C, 0x276C, prOP, gcPs}, // MEDIUM LEFT-POINTING ANGLE BRACKET ORNAMENT
+ {0x276D, 0x276D, prCL, gcPe}, // MEDIUM RIGHT-POINTING ANGLE BRACKET ORNAMENT
+ {0x276E, 0x276E, prOP, gcPs}, // HEAVY LEFT-POINTING ANGLE QUOTATION MARK ORNAMENT
+ {0x276F, 0x276F, prCL, gcPe}, // HEAVY RIGHT-POINTING ANGLE QUOTATION MARK ORNAMENT
+ {0x2770, 0x2770, prOP, gcPs}, // HEAVY LEFT-POINTING ANGLE BRACKET ORNAMENT
+ {0x2771, 0x2771, prCL, gcPe}, // HEAVY RIGHT-POINTING ANGLE BRACKET ORNAMENT
+ {0x2772, 0x2772, prOP, gcPs}, // LIGHT LEFT TORTOISE SHELL BRACKET ORNAMENT
+ {0x2773, 0x2773, prCL, gcPe}, // LIGHT RIGHT TORTOISE SHELL BRACKET ORNAMENT
+ {0x2774, 0x2774, prOP, gcPs}, // MEDIUM LEFT CURLY BRACKET ORNAMENT
+ {0x2775, 0x2775, prCL, gcPe}, // MEDIUM RIGHT CURLY BRACKET ORNAMENT
+ {0x2776, 0x2793, prAI, gcNo}, // [30] DINGBAT NEGATIVE CIRCLED DIGIT ONE..DINGBAT NEGATIVE CIRCLED SANS-SERIF NUMBER TEN
+ {0x2794, 0x27BF, prAL, gcSo}, // [44] HEAVY WIDE-HEADED RIGHTWARDS ARROW..DOUBLE CURLY LOOP
+ {0x27C0, 0x27C4, prAL, gcSm}, // [5] THREE DIMENSIONAL ANGLE..OPEN SUPERSET
+ {0x27C5, 0x27C5, prOP, gcPs}, // LEFT S-SHAPED BAG DELIMITER
+ {0x27C6, 0x27C6, prCL, gcPe}, // RIGHT S-SHAPED BAG DELIMITER
+ {0x27C7, 0x27E5, prAL, gcSm}, // [31] OR WITH DOT INSIDE..WHITE SQUARE WITH RIGHTWARDS TICK
+ {0x27E6, 0x27E6, prOP, gcPs}, // MATHEMATICAL LEFT WHITE SQUARE BRACKET
+ {0x27E7, 0x27E7, prCL, gcPe}, // MATHEMATICAL RIGHT WHITE SQUARE BRACKET
+ {0x27E8, 0x27E8, prOP, gcPs}, // MATHEMATICAL LEFT ANGLE BRACKET
+ {0x27E9, 0x27E9, prCL, gcPe}, // MATHEMATICAL RIGHT ANGLE BRACKET
+ {0x27EA, 0x27EA, prOP, gcPs}, // MATHEMATICAL LEFT DOUBLE ANGLE BRACKET
+ {0x27EB, 0x27EB, prCL, gcPe}, // MATHEMATICAL RIGHT DOUBLE ANGLE BRACKET
+ {0x27EC, 0x27EC, prOP, gcPs}, // MATHEMATICAL LEFT WHITE TORTOISE SHELL BRACKET
+ {0x27ED, 0x27ED, prCL, gcPe}, // MATHEMATICAL RIGHT WHITE TORTOISE SHELL BRACKET
+ {0x27EE, 0x27EE, prOP, gcPs}, // MATHEMATICAL LEFT FLATTENED PARENTHESIS
+ {0x27EF, 0x27EF, prCL, gcPe}, // MATHEMATICAL RIGHT FLATTENED PARENTHESIS
+ {0x27F0, 0x27FF, prAL, gcSm}, // [16] UPWARDS QUADRUPLE ARROW..LONG RIGHTWARDS SQUIGGLE ARROW
+ {0x2800, 0x28FF, prAL, gcSo}, // [256] BRAILLE PATTERN BLANK..BRAILLE PATTERN DOTS-12345678
+ {0x2900, 0x297F, prAL, gcSm}, // [128] RIGHTWARDS TWO-HEADED ARROW WITH VERTICAL STROKE..DOWN FISH TAIL
+ {0x2980, 0x2982, prAL, gcSm}, // [3] TRIPLE VERTICAL BAR DELIMITER..Z NOTATION TYPE COLON
+ {0x2983, 0x2983, prOP, gcPs}, // LEFT WHITE CURLY BRACKET
+ {0x2984, 0x2984, prCL, gcPe}, // RIGHT WHITE CURLY BRACKET
+ {0x2985, 0x2985, prOP, gcPs}, // LEFT WHITE PARENTHESIS
+ {0x2986, 0x2986, prCL, gcPe}, // RIGHT WHITE PARENTHESIS
+ {0x2987, 0x2987, prOP, gcPs}, // Z NOTATION LEFT IMAGE BRACKET
+ {0x2988, 0x2988, prCL, gcPe}, // Z NOTATION RIGHT IMAGE BRACKET
+ {0x2989, 0x2989, prOP, gcPs}, // Z NOTATION LEFT BINDING BRACKET
+ {0x298A, 0x298A, prCL, gcPe}, // Z NOTATION RIGHT BINDING BRACKET
+ {0x298B, 0x298B, prOP, gcPs}, // LEFT SQUARE BRACKET WITH UNDERBAR
+ {0x298C, 0x298C, prCL, gcPe}, // RIGHT SQUARE BRACKET WITH UNDERBAR
+ {0x298D, 0x298D, prOP, gcPs}, // LEFT SQUARE BRACKET WITH TICK IN TOP CORNER
+ {0x298E, 0x298E, prCL, gcPe}, // RIGHT SQUARE BRACKET WITH TICK IN BOTTOM CORNER
+ {0x298F, 0x298F, prOP, gcPs}, // LEFT SQUARE BRACKET WITH TICK IN BOTTOM CORNER
+ {0x2990, 0x2990, prCL, gcPe}, // RIGHT SQUARE BRACKET WITH TICK IN TOP CORNER
+ {0x2991, 0x2991, prOP, gcPs}, // LEFT ANGLE BRACKET WITH DOT
+ {0x2992, 0x2992, prCL, gcPe}, // RIGHT ANGLE BRACKET WITH DOT
+ {0x2993, 0x2993, prOP, gcPs}, // LEFT ARC LESS-THAN BRACKET
+ {0x2994, 0x2994, prCL, gcPe}, // RIGHT ARC GREATER-THAN BRACKET
+ {0x2995, 0x2995, prOP, gcPs}, // DOUBLE LEFT ARC GREATER-THAN BRACKET
+ {0x2996, 0x2996, prCL, gcPe}, // DOUBLE RIGHT ARC LESS-THAN BRACKET
+ {0x2997, 0x2997, prOP, gcPs}, // LEFT BLACK TORTOISE SHELL BRACKET
+ {0x2998, 0x2998, prCL, gcPe}, // RIGHT BLACK TORTOISE SHELL BRACKET
+ {0x2999, 0x29D7, prAL, gcSm}, // [63] DOTTED FENCE..BLACK HOURGLASS
+ {0x29D8, 0x29D8, prOP, gcPs}, // LEFT WIGGLY FENCE
+ {0x29D9, 0x29D9, prCL, gcPe}, // RIGHT WIGGLY FENCE
+ {0x29DA, 0x29DA, prOP, gcPs}, // LEFT DOUBLE WIGGLY FENCE
+ {0x29DB, 0x29DB, prCL, gcPe}, // RIGHT DOUBLE WIGGLY FENCE
+ {0x29DC, 0x29FB, prAL, gcSm}, // [32] INCOMPLETE INFINITY..TRIPLE PLUS
+ {0x29FC, 0x29FC, prOP, gcPs}, // LEFT-POINTING CURVED ANGLE BRACKET
+ {0x29FD, 0x29FD, prCL, gcPe}, // RIGHT-POINTING CURVED ANGLE BRACKET
+ {0x29FE, 0x29FF, prAL, gcSm}, // [2] TINY..MINY
+ {0x2A00, 0x2AFF, prAL, gcSm}, // [256] N-ARY CIRCLED DOT OPERATOR..N-ARY WHITE VERTICAL BAR
+ {0x2B00, 0x2B2F, prAL, gcSo}, // [48] NORTH EAST WHITE ARROW..WHITE VERTICAL ELLIPSE
+ {0x2B30, 0x2B44, prAL, gcSm}, // [21] LEFT ARROW WITH SMALL CIRCLE..RIGHTWARDS ARROW THROUGH SUPERSET
+ {0x2B45, 0x2B46, prAL, gcSo}, // [2] LEFTWARDS QUADRUPLE ARROW..RIGHTWARDS QUADRUPLE ARROW
+ {0x2B47, 0x2B4C, prAL, gcSm}, // [6] REVERSE TILDE OPERATOR ABOVE RIGHTWARDS ARROW..RIGHTWARDS ARROW ABOVE REVERSE TILDE OPERATOR
+ {0x2B4D, 0x2B54, prAL, gcSo}, // [8] DOWNWARDS TRIANGLE-HEADED ZIGZAG ARROW..WHITE RIGHT-POINTING PENTAGON
+ {0x2B55, 0x2B59, prAI, gcSo}, // [5] HEAVY LARGE CIRCLE..HEAVY CIRCLED SALTIRE
+ {0x2B5A, 0x2B73, prAL, gcSo}, // [26] SLANTED NORTH ARROW WITH HOOKED HEAD..DOWNWARDS TRIANGLE-HEADED ARROW TO BAR
+ {0x2B76, 0x2B95, prAL, gcSo}, // [32] NORTH WEST TRIANGLE-HEADED ARROW TO BAR..RIGHTWARDS BLACK ARROW
+ {0x2B97, 0x2BFF, prAL, gcSo}, // [105] SYMBOL FOR TYPE A ELECTRONICS..HELLSCHREIBER PAUSE SYMBOL
+ {0x2C00, 0x2C5F, prAL, gcLC}, // [96] GLAGOLITIC CAPITAL LETTER AZU..GLAGOLITIC SMALL LETTER CAUDATE CHRIVI
+ {0x2C60, 0x2C7B, prAL, gcLC}, // [28] LATIN CAPITAL LETTER L WITH DOUBLE BAR..LATIN LETTER SMALL CAPITAL TURNED E
+ {0x2C7C, 0x2C7D, prAL, gcLm}, // [2] LATIN SUBSCRIPT SMALL LETTER J..MODIFIER LETTER CAPITAL V
+ {0x2C7E, 0x2C7F, prAL, gcLu}, // [2] LATIN CAPITAL LETTER S WITH SWASH TAIL..LATIN CAPITAL LETTER Z WITH SWASH TAIL
+ {0x2C80, 0x2CE4, prAL, gcLC}, // [101] COPTIC CAPITAL LETTER ALFA..COPTIC SYMBOL KAI
+ {0x2CE5, 0x2CEA, prAL, gcSo}, // [6] COPTIC SYMBOL MI RO..COPTIC SYMBOL SHIMA SIMA
+ {0x2CEB, 0x2CEE, prAL, gcLC}, // [4] COPTIC CAPITAL LETTER CRYPTOGRAMMIC SHEI..COPTIC SMALL LETTER CRYPTOGRAMMIC GANGIA
+ {0x2CEF, 0x2CF1, prCM, gcMn}, // [3] COPTIC COMBINING NI ABOVE..COPTIC COMBINING SPIRITUS LENIS
+ {0x2CF2, 0x2CF3, prAL, gcLC}, // [2] COPTIC CAPITAL LETTER BOHAIRIC KHEI..COPTIC SMALL LETTER BOHAIRIC KHEI
+ {0x2CF9, 0x2CF9, prEX, gcPo}, // COPTIC OLD NUBIAN FULL STOP
+ {0x2CFA, 0x2CFC, prBA, gcPo}, // [3] COPTIC OLD NUBIAN DIRECT QUESTION MARK..COPTIC OLD NUBIAN VERSE DIVIDER
+ {0x2CFD, 0x2CFD, prAL, gcNo}, // COPTIC FRACTION ONE HALF
+ {0x2CFE, 0x2CFE, prEX, gcPo}, // COPTIC FULL STOP
+ {0x2CFF, 0x2CFF, prBA, gcPo}, // COPTIC MORPHOLOGICAL DIVIDER
+ {0x2D00, 0x2D25, prAL, gcLl}, // [38] GEORGIAN SMALL LETTER AN..GEORGIAN SMALL LETTER HOE
+ {0x2D27, 0x2D27, prAL, gcLl}, // GEORGIAN SMALL LETTER YN
+ {0x2D2D, 0x2D2D, prAL, gcLl}, // GEORGIAN SMALL LETTER AEN
+ {0x2D30, 0x2D67, prAL, gcLo}, // [56] TIFINAGH LETTER YA..TIFINAGH LETTER YO
+ {0x2D6F, 0x2D6F, prAL, gcLm}, // TIFINAGH MODIFIER LETTER LABIALIZATION MARK
+ {0x2D70, 0x2D70, prBA, gcPo}, // TIFINAGH SEPARATOR MARK
+ {0x2D7F, 0x2D7F, prCM, gcMn}, // TIFINAGH CONSONANT JOINER
+ {0x2D80, 0x2D96, prAL, gcLo}, // [23] ETHIOPIC SYLLABLE LOA..ETHIOPIC SYLLABLE GGWE
+ {0x2DA0, 0x2DA6, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE SSA..ETHIOPIC SYLLABLE SSO
+ {0x2DA8, 0x2DAE, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE CCA..ETHIOPIC SYLLABLE CCO
+ {0x2DB0, 0x2DB6, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE ZZA..ETHIOPIC SYLLABLE ZZO
+ {0x2DB8, 0x2DBE, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE CCHA..ETHIOPIC SYLLABLE CCHO
+ {0x2DC0, 0x2DC6, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE QYA..ETHIOPIC SYLLABLE QYO
+ {0x2DC8, 0x2DCE, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE KYA..ETHIOPIC SYLLABLE KYO
+ {0x2DD0, 0x2DD6, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE XYA..ETHIOPIC SYLLABLE XYO
+ {0x2DD8, 0x2DDE, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE GYA..ETHIOPIC SYLLABLE GYO
+ {0x2DE0, 0x2DFF, prCM, gcMn}, // [32] COMBINING CYRILLIC LETTER BE..COMBINING CYRILLIC LETTER IOTIFIED BIG YUS
+ {0x2E00, 0x2E01, prQU, gcPo}, // [2] RIGHT ANGLE SUBSTITUTION MARKER..RIGHT ANGLE DOTTED SUBSTITUTION MARKER
+ {0x2E02, 0x2E02, prQU, gcPi}, // LEFT SUBSTITUTION BRACKET
+ {0x2E03, 0x2E03, prQU, gcPf}, // RIGHT SUBSTITUTION BRACKET
+ {0x2E04, 0x2E04, prQU, gcPi}, // LEFT DOTTED SUBSTITUTION BRACKET
+ {0x2E05, 0x2E05, prQU, gcPf}, // RIGHT DOTTED SUBSTITUTION BRACKET
+ {0x2E06, 0x2E08, prQU, gcPo}, // [3] RAISED INTERPOLATION MARKER..DOTTED TRANSPOSITION MARKER
+ {0x2E09, 0x2E09, prQU, gcPi}, // LEFT TRANSPOSITION BRACKET
+ {0x2E0A, 0x2E0A, prQU, gcPf}, // RIGHT TRANSPOSITION BRACKET
+ {0x2E0B, 0x2E0B, prQU, gcPo}, // RAISED SQUARE
+ {0x2E0C, 0x2E0C, prQU, gcPi}, // LEFT RAISED OMISSION BRACKET
+ {0x2E0D, 0x2E0D, prQU, gcPf}, // RIGHT RAISED OMISSION BRACKET
+ {0x2E0E, 0x2E15, prBA, gcPo}, // [8] EDITORIAL CORONIS..UPWARDS ANCORA
+ {0x2E16, 0x2E16, prAL, gcPo}, // DOTTED RIGHT-POINTING ANGLE
+ {0x2E17, 0x2E17, prBA, gcPd}, // DOUBLE OBLIQUE HYPHEN
+ {0x2E18, 0x2E18, prOP, gcPo}, // INVERTED INTERROBANG
+ {0x2E19, 0x2E19, prBA, gcPo}, // PALM BRANCH
+ {0x2E1A, 0x2E1A, prAL, gcPd}, // HYPHEN WITH DIAERESIS
+ {0x2E1B, 0x2E1B, prAL, gcPo}, // TILDE WITH RING ABOVE
+ {0x2E1C, 0x2E1C, prQU, gcPi}, // LEFT LOW PARAPHRASE BRACKET
+ {0x2E1D, 0x2E1D, prQU, gcPf}, // RIGHT LOW PARAPHRASE BRACKET
+ {0x2E1E, 0x2E1F, prAL, gcPo}, // [2] TILDE WITH DOT ABOVE..TILDE WITH DOT BELOW
+ {0x2E20, 0x2E20, prQU, gcPi}, // LEFT VERTICAL BAR WITH QUILL
+ {0x2E21, 0x2E21, prQU, gcPf}, // RIGHT VERTICAL BAR WITH QUILL
+ {0x2E22, 0x2E22, prOP, gcPs}, // TOP LEFT HALF BRACKET
+ {0x2E23, 0x2E23, prCL, gcPe}, // TOP RIGHT HALF BRACKET
+ {0x2E24, 0x2E24, prOP, gcPs}, // BOTTOM LEFT HALF BRACKET
+ {0x2E25, 0x2E25, prCL, gcPe}, // BOTTOM RIGHT HALF BRACKET
+ {0x2E26, 0x2E26, prOP, gcPs}, // LEFT SIDEWAYS U BRACKET
+ {0x2E27, 0x2E27, prCL, gcPe}, // RIGHT SIDEWAYS U BRACKET
+ {0x2E28, 0x2E28, prOP, gcPs}, // LEFT DOUBLE PARENTHESIS
+ {0x2E29, 0x2E29, prCL, gcPe}, // RIGHT DOUBLE PARENTHESIS
+ {0x2E2A, 0x2E2D, prBA, gcPo}, // [4] TWO DOTS OVER ONE DOT PUNCTUATION..FIVE DOT MARK
+ {0x2E2E, 0x2E2E, prEX, gcPo}, // REVERSED QUESTION MARK
+ {0x2E2F, 0x2E2F, prAL, gcLm}, // VERTICAL TILDE
+ {0x2E30, 0x2E31, prBA, gcPo}, // [2] RING POINT..WORD SEPARATOR MIDDLE DOT
+ {0x2E32, 0x2E32, prAL, gcPo}, // TURNED COMMA
+ {0x2E33, 0x2E34, prBA, gcPo}, // [2] RAISED DOT..RAISED COMMA
+ {0x2E35, 0x2E39, prAL, gcPo}, // [5] TURNED SEMICOLON..TOP HALF SECTION SIGN
+ {0x2E3A, 0x2E3B, prB2, gcPd}, // [2] TWO-EM DASH..THREE-EM DASH
+ {0x2E3C, 0x2E3E, prBA, gcPo}, // [3] STENOGRAPHIC FULL STOP..WIGGLY VERTICAL LINE
+ {0x2E3F, 0x2E3F, prAL, gcPo}, // CAPITULUM
+ {0x2E40, 0x2E40, prBA, gcPd}, // DOUBLE HYPHEN
+ {0x2E41, 0x2E41, prBA, gcPo}, // REVERSED COMMA
+ {0x2E42, 0x2E42, prOP, gcPs}, // DOUBLE LOW-REVERSED-9 QUOTATION MARK
+ {0x2E43, 0x2E4A, prBA, gcPo}, // [8] DASH WITH LEFT UPTURN..DOTTED SOLIDUS
+ {0x2E4B, 0x2E4B, prAL, gcPo}, // TRIPLE DAGGER
+ {0x2E4C, 0x2E4C, prBA, gcPo}, // MEDIEVAL COMMA
+ {0x2E4D, 0x2E4D, prAL, gcPo}, // PARAGRAPHUS MARK
+ {0x2E4E, 0x2E4F, prBA, gcPo}, // [2] PUNCTUS ELEVATUS MARK..CORNISH VERSE DIVIDER
+ {0x2E50, 0x2E51, prAL, gcSo}, // [2] CROSS PATTY WITH RIGHT CROSSBAR..CROSS PATTY WITH LEFT CROSSBAR
+ {0x2E52, 0x2E52, prAL, gcPo}, // TIRONIAN SIGN CAPITAL ET
+ {0x2E53, 0x2E54, prEX, gcPo}, // [2] MEDIEVAL EXCLAMATION MARK..MEDIEVAL QUESTION MARK
+ {0x2E55, 0x2E55, prOP, gcPs}, // LEFT SQUARE BRACKET WITH STROKE
+ {0x2E56, 0x2E56, prCL, gcPe}, // RIGHT SQUARE BRACKET WITH STROKE
+ {0x2E57, 0x2E57, prOP, gcPs}, // LEFT SQUARE BRACKET WITH DOUBLE STROKE
+ {0x2E58, 0x2E58, prCL, gcPe}, // RIGHT SQUARE BRACKET WITH DOUBLE STROKE
+ {0x2E59, 0x2E59, prOP, gcPs}, // TOP HALF LEFT PARENTHESIS
+ {0x2E5A, 0x2E5A, prCL, gcPe}, // TOP HALF RIGHT PARENTHESIS
+ {0x2E5B, 0x2E5B, prOP, gcPs}, // BOTTOM HALF LEFT PARENTHESIS
+ {0x2E5C, 0x2E5C, prCL, gcPe}, // BOTTOM HALF RIGHT PARENTHESIS
+ {0x2E5D, 0x2E5D, prBA, gcPd}, // OBLIQUE HYPHEN
+ {0x2E80, 0x2E99, prID, gcSo}, // [26] CJK RADICAL REPEAT..CJK RADICAL RAP
+ {0x2E9B, 0x2EF3, prID, gcSo}, // [89] CJK RADICAL CHOKE..CJK RADICAL C-SIMPLIFIED TURTLE
+ {0x2F00, 0x2FD5, prID, gcSo}, // [214] KANGXI RADICAL ONE..KANGXI RADICAL FLUTE
+ {0x2FF0, 0x2FFB, prID, gcSo}, // [12] IDEOGRAPHIC DESCRIPTION CHARACTER LEFT TO RIGHT..IDEOGRAPHIC DESCRIPTION CHARACTER OVERLAID
+ {0x3000, 0x3000, prBA, gcZs}, // IDEOGRAPHIC SPACE
+ {0x3001, 0x3002, prCL, gcPo}, // [2] IDEOGRAPHIC COMMA..IDEOGRAPHIC FULL STOP
+ {0x3003, 0x3003, prID, gcPo}, // DITTO MARK
+ {0x3004, 0x3004, prID, gcSo}, // JAPANESE INDUSTRIAL STANDARD SYMBOL
+ {0x3005, 0x3005, prNS, gcLm}, // IDEOGRAPHIC ITERATION MARK
+ {0x3006, 0x3006, prID, gcLo}, // IDEOGRAPHIC CLOSING MARK
+ {0x3007, 0x3007, prID, gcNl}, // IDEOGRAPHIC NUMBER ZERO
+ {0x3008, 0x3008, prOP, gcPs}, // LEFT ANGLE BRACKET
+ {0x3009, 0x3009, prCL, gcPe}, // RIGHT ANGLE BRACKET
+ {0x300A, 0x300A, prOP, gcPs}, // LEFT DOUBLE ANGLE BRACKET
+ {0x300B, 0x300B, prCL, gcPe}, // RIGHT DOUBLE ANGLE BRACKET
+ {0x300C, 0x300C, prOP, gcPs}, // LEFT CORNER BRACKET
+ {0x300D, 0x300D, prCL, gcPe}, // RIGHT CORNER BRACKET
+ {0x300E, 0x300E, prOP, gcPs}, // LEFT WHITE CORNER BRACKET
+ {0x300F, 0x300F, prCL, gcPe}, // RIGHT WHITE CORNER BRACKET
+ {0x3010, 0x3010, prOP, gcPs}, // LEFT BLACK LENTICULAR BRACKET
+ {0x3011, 0x3011, prCL, gcPe}, // RIGHT BLACK LENTICULAR BRACKET
+ {0x3012, 0x3013, prID, gcSo}, // [2] POSTAL MARK..GETA MARK
+ {0x3014, 0x3014, prOP, gcPs}, // LEFT TORTOISE SHELL BRACKET
+ {0x3015, 0x3015, prCL, gcPe}, // RIGHT TORTOISE SHELL BRACKET
+ {0x3016, 0x3016, prOP, gcPs}, // LEFT WHITE LENTICULAR BRACKET
+ {0x3017, 0x3017, prCL, gcPe}, // RIGHT WHITE LENTICULAR BRACKET
+ {0x3018, 0x3018, prOP, gcPs}, // LEFT WHITE TORTOISE SHELL BRACKET
+ {0x3019, 0x3019, prCL, gcPe}, // RIGHT WHITE TORTOISE SHELL BRACKET
+ {0x301A, 0x301A, prOP, gcPs}, // LEFT WHITE SQUARE BRACKET
+ {0x301B, 0x301B, prCL, gcPe}, // RIGHT WHITE SQUARE BRACKET
+ {0x301C, 0x301C, prNS, gcPd}, // WAVE DASH
+ {0x301D, 0x301D, prOP, gcPs}, // REVERSED DOUBLE PRIME QUOTATION MARK
+ {0x301E, 0x301F, prCL, gcPe}, // [2] DOUBLE PRIME QUOTATION MARK..LOW DOUBLE PRIME QUOTATION MARK
+ {0x3020, 0x3020, prID, gcSo}, // POSTAL MARK FACE
+ {0x3021, 0x3029, prID, gcNl}, // [9] HANGZHOU NUMERAL ONE..HANGZHOU NUMERAL NINE
+ {0x302A, 0x302D, prCM, gcMn}, // [4] IDEOGRAPHIC LEVEL TONE MARK..IDEOGRAPHIC ENTERING TONE MARK
+ {0x302E, 0x302F, prCM, gcMc}, // [2] HANGUL SINGLE DOT TONE MARK..HANGUL DOUBLE DOT TONE MARK
+ {0x3030, 0x3030, prID, gcPd}, // WAVY DASH
+ {0x3031, 0x3034, prID, gcLm}, // [4] VERTICAL KANA REPEAT MARK..VERTICAL KANA REPEAT WITH VOICED SOUND MARK UPPER HALF
+ {0x3035, 0x3035, prCM, gcLm}, // VERTICAL KANA REPEAT MARK LOWER HALF
+ {0x3036, 0x3037, prID, gcSo}, // [2] CIRCLED POSTAL MARK..IDEOGRAPHIC TELEGRAPH LINE FEED SEPARATOR SYMBOL
+ {0x3038, 0x303A, prID, gcNl}, // [3] HANGZHOU NUMERAL TEN..HANGZHOU NUMERAL THIRTY
+ {0x303B, 0x303B, prNS, gcLm}, // VERTICAL IDEOGRAPHIC ITERATION MARK
+ {0x303C, 0x303C, prNS, gcLo}, // MASU MARK
+ {0x303D, 0x303D, prID, gcPo}, // PART ALTERNATION MARK
+ {0x303E, 0x303F, prID, gcSo}, // [2] IDEOGRAPHIC VARIATION INDICATOR..IDEOGRAPHIC HALF FILL SPACE
+ {0x3041, 0x3041, prCJ, gcLo}, // HIRAGANA LETTER SMALL A
+ {0x3042, 0x3042, prID, gcLo}, // HIRAGANA LETTER A
+ {0x3043, 0x3043, prCJ, gcLo}, // HIRAGANA LETTER SMALL I
+ {0x3044, 0x3044, prID, gcLo}, // HIRAGANA LETTER I
+ {0x3045, 0x3045, prCJ, gcLo}, // HIRAGANA LETTER SMALL U
+ {0x3046, 0x3046, prID, gcLo}, // HIRAGANA LETTER U
+ {0x3047, 0x3047, prCJ, gcLo}, // HIRAGANA LETTER SMALL E
+ {0x3048, 0x3048, prID, gcLo}, // HIRAGANA LETTER E
+ {0x3049, 0x3049, prCJ, gcLo}, // HIRAGANA LETTER SMALL O
+ {0x304A, 0x3062, prID, gcLo}, // [25] HIRAGANA LETTER O..HIRAGANA LETTER DI
+ {0x3063, 0x3063, prCJ, gcLo}, // HIRAGANA LETTER SMALL TU
+ {0x3064, 0x3082, prID, gcLo}, // [31] HIRAGANA LETTER TU..HIRAGANA LETTER MO
+ {0x3083, 0x3083, prCJ, gcLo}, // HIRAGANA LETTER SMALL YA
+ {0x3084, 0x3084, prID, gcLo}, // HIRAGANA LETTER YA
+ {0x3085, 0x3085, prCJ, gcLo}, // HIRAGANA LETTER SMALL YU
+ {0x3086, 0x3086, prID, gcLo}, // HIRAGANA LETTER YU
+ {0x3087, 0x3087, prCJ, gcLo}, // HIRAGANA LETTER SMALL YO
+ {0x3088, 0x308D, prID, gcLo}, // [6] HIRAGANA LETTER YO..HIRAGANA LETTER RO
+ {0x308E, 0x308E, prCJ, gcLo}, // HIRAGANA LETTER SMALL WA
+ {0x308F, 0x3094, prID, gcLo}, // [6] HIRAGANA LETTER WA..HIRAGANA LETTER VU
+ {0x3095, 0x3096, prCJ, gcLo}, // [2] HIRAGANA LETTER SMALL KA..HIRAGANA LETTER SMALL KE
+ {0x3099, 0x309A, prCM, gcMn}, // [2] COMBINING KATAKANA-HIRAGANA VOICED SOUND MARK..COMBINING KATAKANA-HIRAGANA SEMI-VOICED SOUND MARK
+ {0x309B, 0x309C, prNS, gcSk}, // [2] KATAKANA-HIRAGANA VOICED SOUND MARK..KATAKANA-HIRAGANA SEMI-VOICED SOUND MARK
+ {0x309D, 0x309E, prNS, gcLm}, // [2] HIRAGANA ITERATION MARK..HIRAGANA VOICED ITERATION MARK
+ {0x309F, 0x309F, prID, gcLo}, // HIRAGANA DIGRAPH YORI
+ {0x30A0, 0x30A0, prNS, gcPd}, // KATAKANA-HIRAGANA DOUBLE HYPHEN
+ {0x30A1, 0x30A1, prCJ, gcLo}, // KATAKANA LETTER SMALL A
+ {0x30A2, 0x30A2, prID, gcLo}, // KATAKANA LETTER A
+ {0x30A3, 0x30A3, prCJ, gcLo}, // KATAKANA LETTER SMALL I
+ {0x30A4, 0x30A4, prID, gcLo}, // KATAKANA LETTER I
+ {0x30A5, 0x30A5, prCJ, gcLo}, // KATAKANA LETTER SMALL U
+ {0x30A6, 0x30A6, prID, gcLo}, // KATAKANA LETTER U
+ {0x30A7, 0x30A7, prCJ, gcLo}, // KATAKANA LETTER SMALL E
+ {0x30A8, 0x30A8, prID, gcLo}, // KATAKANA LETTER E
+ {0x30A9, 0x30A9, prCJ, gcLo}, // KATAKANA LETTER SMALL O
+ {0x30AA, 0x30C2, prID, gcLo}, // [25] KATAKANA LETTER O..KATAKANA LETTER DI
+ {0x30C3, 0x30C3, prCJ, gcLo}, // KATAKANA LETTER SMALL TU
+ {0x30C4, 0x30E2, prID, gcLo}, // [31] KATAKANA LETTER TU..KATAKANA LETTER MO
+ {0x30E3, 0x30E3, prCJ, gcLo}, // KATAKANA LETTER SMALL YA
+ {0x30E4, 0x30E4, prID, gcLo}, // KATAKANA LETTER YA
+ {0x30E5, 0x30E5, prCJ, gcLo}, // KATAKANA LETTER SMALL YU
+ {0x30E6, 0x30E6, prID, gcLo}, // KATAKANA LETTER YU
+ {0x30E7, 0x30E7, prCJ, gcLo}, // KATAKANA LETTER SMALL YO
+ {0x30E8, 0x30ED, prID, gcLo}, // [6] KATAKANA LETTER YO..KATAKANA LETTER RO
+ {0x30EE, 0x30EE, prCJ, gcLo}, // KATAKANA LETTER SMALL WA
+ {0x30EF, 0x30F4, prID, gcLo}, // [6] KATAKANA LETTER WA..KATAKANA LETTER VU
+ {0x30F5, 0x30F6, prCJ, gcLo}, // [2] KATAKANA LETTER SMALL KA..KATAKANA LETTER SMALL KE
+ {0x30F7, 0x30FA, prID, gcLo}, // [4] KATAKANA LETTER VA..KATAKANA LETTER VO
+ {0x30FB, 0x30FB, prNS, gcPo}, // KATAKANA MIDDLE DOT
+ {0x30FC, 0x30FC, prCJ, gcLm}, // KATAKANA-HIRAGANA PROLONGED SOUND MARK
+ {0x30FD, 0x30FE, prNS, gcLm}, // [2] KATAKANA ITERATION MARK..KATAKANA VOICED ITERATION MARK
+ {0x30FF, 0x30FF, prID, gcLo}, // KATAKANA DIGRAPH KOTO
+ {0x3105, 0x312F, prID, gcLo}, // [43] BOPOMOFO LETTER B..BOPOMOFO LETTER NN
+ {0x3131, 0x318E, prID, gcLo}, // [94] HANGUL LETTER KIYEOK..HANGUL LETTER ARAEAE
+ {0x3190, 0x3191, prID, gcSo}, // [2] IDEOGRAPHIC ANNOTATION LINKING MARK..IDEOGRAPHIC ANNOTATION REVERSE MARK
+ {0x3192, 0x3195, prID, gcNo}, // [4] IDEOGRAPHIC ANNOTATION ONE MARK..IDEOGRAPHIC ANNOTATION FOUR MARK
+ {0x3196, 0x319F, prID, gcSo}, // [10] IDEOGRAPHIC ANNOTATION TOP MARK..IDEOGRAPHIC ANNOTATION MAN MARK
+ {0x31A0, 0x31BF, prID, gcLo}, // [32] BOPOMOFO LETTER BU..BOPOMOFO LETTER AH
+ {0x31C0, 0x31E3, prID, gcSo}, // [36] CJK STROKE T..CJK STROKE Q
+ {0x31F0, 0x31FF, prCJ, gcLo}, // [16] KATAKANA LETTER SMALL KU..KATAKANA LETTER SMALL RO
+ {0x3200, 0x321E, prID, gcSo}, // [31] PARENTHESIZED HANGUL KIYEOK..PARENTHESIZED KOREAN CHARACTER O HU
+ {0x3220, 0x3229, prID, gcNo}, // [10] PARENTHESIZED IDEOGRAPH ONE..PARENTHESIZED IDEOGRAPH TEN
+ {0x322A, 0x3247, prID, gcSo}, // [30] PARENTHESIZED IDEOGRAPH MOON..CIRCLED IDEOGRAPH KOTO
+ {0x3248, 0x324F, prAI, gcNo}, // [8] CIRCLED NUMBER TEN ON BLACK SQUARE..CIRCLED NUMBER EIGHTY ON BLACK SQUARE
+ {0x3250, 0x3250, prID, gcSo}, // PARTNERSHIP SIGN
+ {0x3251, 0x325F, prID, gcNo}, // [15] CIRCLED NUMBER TWENTY ONE..CIRCLED NUMBER THIRTY FIVE
+ {0x3260, 0x327F, prID, gcSo}, // [32] CIRCLED HANGUL KIYEOK..KOREAN STANDARD SYMBOL
+ {0x3280, 0x3289, prID, gcNo}, // [10] CIRCLED IDEOGRAPH ONE..CIRCLED IDEOGRAPH TEN
+ {0x328A, 0x32B0, prID, gcSo}, // [39] CIRCLED IDEOGRAPH MOON..CIRCLED IDEOGRAPH NIGHT
+ {0x32B1, 0x32BF, prID, gcNo}, // [15] CIRCLED NUMBER THIRTY SIX..CIRCLED NUMBER FIFTY
+ {0x32C0, 0x32FF, prID, gcSo}, // [64] IDEOGRAPHIC TELEGRAPH SYMBOL FOR JANUARY..SQUARE ERA NAME REIWA
+ {0x3300, 0x33FF, prID, gcSo}, // [256] SQUARE APAATO..SQUARE GAL
+ {0x3400, 0x4DBF, prID, gcLo}, // [6592] CJK UNIFIED IDEOGRAPH-3400..CJK UNIFIED IDEOGRAPH-4DBF
+ {0x4DC0, 0x4DFF, prAL, gcSo}, // [64] HEXAGRAM FOR THE CREATIVE HEAVEN..HEXAGRAM FOR BEFORE COMPLETION
+ {0x4E00, 0x9FFF, prID, gcLo}, // [20992] CJK UNIFIED IDEOGRAPH-4E00..CJK UNIFIED IDEOGRAPH-9FFF
+ {0xA000, 0xA014, prID, gcLo}, // [21] YI SYLLABLE IT..YI SYLLABLE E
+ {0xA015, 0xA015, prNS, gcLm}, // YI SYLLABLE WU
+ {0xA016, 0xA48C, prID, gcLo}, // [1143] YI SYLLABLE BIT..YI SYLLABLE YYR
+ {0xA490, 0xA4C6, prID, gcSo}, // [55] YI RADICAL QOT..YI RADICAL KE
+ {0xA4D0, 0xA4F7, prAL, gcLo}, // [40] LISU LETTER BA..LISU LETTER OE
+ {0xA4F8, 0xA4FD, prAL, gcLm}, // [6] LISU LETTER TONE MYA TI..LISU LETTER TONE MYA JEU
+ {0xA4FE, 0xA4FF, prBA, gcPo}, // [2] LISU PUNCTUATION COMMA..LISU PUNCTUATION FULL STOP
+ {0xA500, 0xA60B, prAL, gcLo}, // [268] VAI SYLLABLE EE..VAI SYLLABLE NG
+ {0xA60C, 0xA60C, prAL, gcLm}, // VAI SYLLABLE LENGTHENER
+ {0xA60D, 0xA60D, prBA, gcPo}, // VAI COMMA
+ {0xA60E, 0xA60E, prEX, gcPo}, // VAI FULL STOP
+ {0xA60F, 0xA60F, prBA, gcPo}, // VAI QUESTION MARK
+ {0xA610, 0xA61F, prAL, gcLo}, // [16] VAI SYLLABLE NDOLE FA..VAI SYMBOL JONG
+ {0xA620, 0xA629, prNU, gcNd}, // [10] VAI DIGIT ZERO..VAI DIGIT NINE
+ {0xA62A, 0xA62B, prAL, gcLo}, // [2] VAI SYLLABLE NDOLE MA..VAI SYLLABLE NDOLE DO
+ {0xA640, 0xA66D, prAL, gcLC}, // [46] CYRILLIC CAPITAL LETTER ZEMLYA..CYRILLIC SMALL LETTER DOUBLE MONOCULAR O
+ {0xA66E, 0xA66E, prAL, gcLo}, // CYRILLIC LETTER MULTIOCULAR O
+ {0xA66F, 0xA66F, prCM, gcMn}, // COMBINING CYRILLIC VZMET
+ {0xA670, 0xA672, prCM, gcMe}, // [3] COMBINING CYRILLIC TEN MILLIONS SIGN..COMBINING CYRILLIC THOUSAND MILLIONS SIGN
+ {0xA673, 0xA673, prAL, gcPo}, // SLAVONIC ASTERISK
+ {0xA674, 0xA67D, prCM, gcMn}, // [10] COMBINING CYRILLIC LETTER UKRAINIAN IE..COMBINING CYRILLIC PAYEROK
+ {0xA67E, 0xA67E, prAL, gcPo}, // CYRILLIC KAVYKA
+ {0xA67F, 0xA67F, prAL, gcLm}, // CYRILLIC PAYEROK
+ {0xA680, 0xA69B, prAL, gcLC}, // [28] CYRILLIC CAPITAL LETTER DWE..CYRILLIC SMALL LETTER CROSSED O
+ {0xA69C, 0xA69D, prAL, gcLm}, // [2] MODIFIER LETTER CYRILLIC HARD SIGN..MODIFIER LETTER CYRILLIC SOFT SIGN
+ {0xA69E, 0xA69F, prCM, gcMn}, // [2] COMBINING CYRILLIC LETTER EF..COMBINING CYRILLIC LETTER IOTIFIED E
+ {0xA6A0, 0xA6E5, prAL, gcLo}, // [70] BAMUM LETTER A..BAMUM LETTER KI
+ {0xA6E6, 0xA6EF, prAL, gcNl}, // [10] BAMUM LETTER MO..BAMUM LETTER KOGHOM
+ {0xA6F0, 0xA6F1, prCM, gcMn}, // [2] BAMUM COMBINING MARK KOQNDON..BAMUM COMBINING MARK TUKWENTIS
+ {0xA6F2, 0xA6F2, prAL, gcPo}, // BAMUM NJAEMLI
+ {0xA6F3, 0xA6F7, prBA, gcPo}, // [5] BAMUM FULL STOP..BAMUM QUESTION MARK
+ {0xA700, 0xA716, prAL, gcSk}, // [23] MODIFIER LETTER CHINESE TONE YIN PING..MODIFIER LETTER EXTRA-LOW LEFT-STEM TONE BAR
+ {0xA717, 0xA71F, prAL, gcLm}, // [9] MODIFIER LETTER DOT VERTICAL BAR..MODIFIER LETTER LOW INVERTED EXCLAMATION MARK
+ {0xA720, 0xA721, prAL, gcSk}, // [2] MODIFIER LETTER STRESS AND HIGH TONE..MODIFIER LETTER STRESS AND LOW TONE
+ {0xA722, 0xA76F, prAL, gcLC}, // [78] LATIN CAPITAL LETTER EGYPTOLOGICAL ALEF..LATIN SMALL LETTER CON
+ {0xA770, 0xA770, prAL, gcLm}, // MODIFIER LETTER US
+ {0xA771, 0xA787, prAL, gcLC}, // [23] LATIN SMALL LETTER DUM..LATIN SMALL LETTER INSULAR T
+ {0xA788, 0xA788, prAL, gcLm}, // MODIFIER LETTER LOW CIRCUMFLEX ACCENT
+ {0xA789, 0xA78A, prAL, gcSk}, // [2] MODIFIER LETTER COLON..MODIFIER LETTER SHORT EQUALS SIGN
+ {0xA78B, 0xA78E, prAL, gcLC}, // [4] LATIN CAPITAL LETTER SALTILLO..LATIN SMALL LETTER L WITH RETROFLEX HOOK AND BELT
+ {0xA78F, 0xA78F, prAL, gcLo}, // LATIN LETTER SINOLOGICAL DOT
+ {0xA790, 0xA7CA, prAL, gcLC}, // [59] LATIN CAPITAL LETTER N WITH DESCENDER..LATIN SMALL LETTER S WITH SHORT STROKE OVERLAY
+ {0xA7D0, 0xA7D1, prAL, gcLC}, // [2] LATIN CAPITAL LETTER CLOSED INSULAR G..LATIN SMALL LETTER CLOSED INSULAR G
+ {0xA7D3, 0xA7D3, prAL, gcLl}, // LATIN SMALL LETTER DOUBLE THORN
+ {0xA7D5, 0xA7D9, prAL, gcLC}, // [5] LATIN SMALL LETTER DOUBLE WYNN..LATIN SMALL LETTER SIGMOID S
+ {0xA7F2, 0xA7F4, prAL, gcLm}, // [3] MODIFIER LETTER CAPITAL C..MODIFIER LETTER CAPITAL Q
+ {0xA7F5, 0xA7F6, prAL, gcLC}, // [2] LATIN CAPITAL LETTER REVERSED HALF H..LATIN SMALL LETTER REVERSED HALF H
+ {0xA7F7, 0xA7F7, prAL, gcLo}, // LATIN EPIGRAPHIC LETTER SIDEWAYS I
+ {0xA7F8, 0xA7F9, prAL, gcLm}, // [2] MODIFIER LETTER CAPITAL H WITH STROKE..MODIFIER LETTER SMALL LIGATURE OE
+ {0xA7FA, 0xA7FA, prAL, gcLl}, // LATIN LETTER SMALL CAPITAL TURNED M
+ {0xA7FB, 0xA7FF, prAL, gcLo}, // [5] LATIN EPIGRAPHIC LETTER REVERSED F..LATIN EPIGRAPHIC LETTER ARCHAIC M
+ {0xA800, 0xA801, prAL, gcLo}, // [2] SYLOTI NAGRI LETTER A..SYLOTI NAGRI LETTER I
+ {0xA802, 0xA802, prCM, gcMn}, // SYLOTI NAGRI SIGN DVISVARA
+ {0xA803, 0xA805, prAL, gcLo}, // [3] SYLOTI NAGRI LETTER U..SYLOTI NAGRI LETTER O
+ {0xA806, 0xA806, prCM, gcMn}, // SYLOTI NAGRI SIGN HASANTA
+ {0xA807, 0xA80A, prAL, gcLo}, // [4] SYLOTI NAGRI LETTER KO..SYLOTI NAGRI LETTER GHO
+ {0xA80B, 0xA80B, prCM, gcMn}, // SYLOTI NAGRI SIGN ANUSVARA
+ {0xA80C, 0xA822, prAL, gcLo}, // [23] SYLOTI NAGRI LETTER CO..SYLOTI NAGRI LETTER HO
+ {0xA823, 0xA824, prCM, gcMc}, // [2] SYLOTI NAGRI VOWEL SIGN A..SYLOTI NAGRI VOWEL SIGN I
+ {0xA825, 0xA826, prCM, gcMn}, // [2] SYLOTI NAGRI VOWEL SIGN U..SYLOTI NAGRI VOWEL SIGN E
+ {0xA827, 0xA827, prCM, gcMc}, // SYLOTI NAGRI VOWEL SIGN OO
+ {0xA828, 0xA82B, prAL, gcSo}, // [4] SYLOTI NAGRI POETRY MARK-1..SYLOTI NAGRI POETRY MARK-4
+ {0xA82C, 0xA82C, prCM, gcMn}, // SYLOTI NAGRI SIGN ALTERNATE HASANTA
+ {0xA830, 0xA835, prAL, gcNo}, // [6] NORTH INDIC FRACTION ONE QUARTER..NORTH INDIC FRACTION THREE SIXTEENTHS
+ {0xA836, 0xA837, prAL, gcSo}, // [2] NORTH INDIC QUARTER MARK..NORTH INDIC PLACEHOLDER MARK
+ {0xA838, 0xA838, prPO, gcSc}, // NORTH INDIC RUPEE MARK
+ {0xA839, 0xA839, prAL, gcSo}, // NORTH INDIC QUANTITY MARK
+ {0xA840, 0xA873, prAL, gcLo}, // [52] PHAGS-PA LETTER KA..PHAGS-PA LETTER CANDRABINDU
+ {0xA874, 0xA875, prBB, gcPo}, // [2] PHAGS-PA SINGLE HEAD MARK..PHAGS-PA DOUBLE HEAD MARK
+ {0xA876, 0xA877, prEX, gcPo}, // [2] PHAGS-PA MARK SHAD..PHAGS-PA MARK DOUBLE SHAD
+ {0xA880, 0xA881, prCM, gcMc}, // [2] SAURASHTRA SIGN ANUSVARA..SAURASHTRA SIGN VISARGA
+ {0xA882, 0xA8B3, prAL, gcLo}, // [50] SAURASHTRA LETTER A..SAURASHTRA LETTER LLA
+ {0xA8B4, 0xA8C3, prCM, gcMc}, // [16] SAURASHTRA CONSONANT SIGN HAARU..SAURASHTRA VOWEL SIGN AU
+ {0xA8C4, 0xA8C5, prCM, gcMn}, // [2] SAURASHTRA SIGN VIRAMA..SAURASHTRA SIGN CANDRABINDU
+ {0xA8CE, 0xA8CF, prBA, gcPo}, // [2] SAURASHTRA DANDA..SAURASHTRA DOUBLE DANDA
+ {0xA8D0, 0xA8D9, prNU, gcNd}, // [10] SAURASHTRA DIGIT ZERO..SAURASHTRA DIGIT NINE
+ {0xA8E0, 0xA8F1, prCM, gcMn}, // [18] COMBINING DEVANAGARI DIGIT ZERO..COMBINING DEVANAGARI SIGN AVAGRAHA
+ {0xA8F2, 0xA8F7, prAL, gcLo}, // [6] DEVANAGARI SIGN SPACING CANDRABINDU..DEVANAGARI SIGN CANDRABINDU AVAGRAHA
+ {0xA8F8, 0xA8FA, prAL, gcPo}, // [3] DEVANAGARI SIGN PUSHPIKA..DEVANAGARI CARET
+ {0xA8FB, 0xA8FB, prAL, gcLo}, // DEVANAGARI HEADSTROKE
+ {0xA8FC, 0xA8FC, prBB, gcPo}, // DEVANAGARI SIGN SIDDHAM
+ {0xA8FD, 0xA8FE, prAL, gcLo}, // [2] DEVANAGARI JAIN OM..DEVANAGARI LETTER AY
+ {0xA8FF, 0xA8FF, prCM, gcMn}, // DEVANAGARI VOWEL SIGN AY
+ {0xA900, 0xA909, prNU, gcNd}, // [10] KAYAH LI DIGIT ZERO..KAYAH LI DIGIT NINE
+ {0xA90A, 0xA925, prAL, gcLo}, // [28] KAYAH LI LETTER KA..KAYAH LI LETTER OO
+ {0xA926, 0xA92D, prCM, gcMn}, // [8] KAYAH LI VOWEL UE..KAYAH LI TONE CALYA PLOPHU
+ {0xA92E, 0xA92F, prBA, gcPo}, // [2] KAYAH LI SIGN CWI..KAYAH LI SIGN SHYA
+ {0xA930, 0xA946, prAL, gcLo}, // [23] REJANG LETTER KA..REJANG LETTER A
+ {0xA947, 0xA951, prCM, gcMn}, // [11] REJANG VOWEL SIGN I..REJANG CONSONANT SIGN R
+ {0xA952, 0xA953, prCM, gcMc}, // [2] REJANG CONSONANT SIGN H..REJANG VIRAMA
+ {0xA95F, 0xA95F, prAL, gcPo}, // REJANG SECTION MARK
+ {0xA960, 0xA97C, prJL, gcLo}, // [29] HANGUL CHOSEONG TIKEUT-MIEUM..HANGUL CHOSEONG SSANGYEORINHIEUH
+ {0xA980, 0xA982, prCM, gcMn}, // [3] JAVANESE SIGN PANYANGGA..JAVANESE SIGN LAYAR
+ {0xA983, 0xA983, prCM, gcMc}, // JAVANESE SIGN WIGNYAN
+ {0xA984, 0xA9B2, prAL, gcLo}, // [47] JAVANESE LETTER A..JAVANESE LETTER HA
+ {0xA9B3, 0xA9B3, prCM, gcMn}, // JAVANESE SIGN CECAK TELU
+ {0xA9B4, 0xA9B5, prCM, gcMc}, // [2] JAVANESE VOWEL SIGN TARUNG..JAVANESE VOWEL SIGN TOLONG
+ {0xA9B6, 0xA9B9, prCM, gcMn}, // [4] JAVANESE VOWEL SIGN WULU..JAVANESE VOWEL SIGN SUKU MENDUT
+ {0xA9BA, 0xA9BB, prCM, gcMc}, // [2] JAVANESE VOWEL SIGN TALING..JAVANESE VOWEL SIGN DIRGA MURE
+ {0xA9BC, 0xA9BD, prCM, gcMn}, // [2] JAVANESE VOWEL SIGN PEPET..JAVANESE CONSONANT SIGN KERET
+ {0xA9BE, 0xA9C0, prCM, gcMc}, // [3] JAVANESE CONSONANT SIGN PENGKAL..JAVANESE PANGKON
+ {0xA9C1, 0xA9C6, prAL, gcPo}, // [6] JAVANESE LEFT RERENGGAN..JAVANESE PADA WINDU
+ {0xA9C7, 0xA9C9, prBA, gcPo}, // [3] JAVANESE PADA PANGKAT..JAVANESE PADA LUNGSI
+ {0xA9CA, 0xA9CD, prAL, gcPo}, // [4] JAVANESE PADA ADEG..JAVANESE TURNED PADA PISELEH
+ {0xA9CF, 0xA9CF, prAL, gcLm}, // JAVANESE PANGRANGKEP
+ {0xA9D0, 0xA9D9, prNU, gcNd}, // [10] JAVANESE DIGIT ZERO..JAVANESE DIGIT NINE
+ {0xA9DE, 0xA9DF, prAL, gcPo}, // [2] JAVANESE PADA TIRTA TUMETES..JAVANESE PADA ISEN-ISEN
+ {0xA9E0, 0xA9E4, prSA, gcLo}, // [5] MYANMAR LETTER SHAN GHA..MYANMAR LETTER SHAN BHA
+ {0xA9E5, 0xA9E5, prSA, gcMn}, // MYANMAR SIGN SHAN SAW
+ {0xA9E6, 0xA9E6, prSA, gcLm}, // MYANMAR MODIFIER LETTER SHAN REDUPLICATION
+ {0xA9E7, 0xA9EF, prSA, gcLo}, // [9] MYANMAR LETTER TAI LAING NYA..MYANMAR LETTER TAI LAING NNA
+ {0xA9F0, 0xA9F9, prNU, gcNd}, // [10] MYANMAR TAI LAING DIGIT ZERO..MYANMAR TAI LAING DIGIT NINE
+ {0xA9FA, 0xA9FE, prSA, gcLo}, // [5] MYANMAR LETTER TAI LAING LLA..MYANMAR LETTER TAI LAING BHA
+ {0xAA00, 0xAA28, prAL, gcLo}, // [41] CHAM LETTER A..CHAM LETTER HA
+ {0xAA29, 0xAA2E, prCM, gcMn}, // [6] CHAM VOWEL SIGN AA..CHAM VOWEL SIGN OE
+ {0xAA2F, 0xAA30, prCM, gcMc}, // [2] CHAM VOWEL SIGN O..CHAM VOWEL SIGN AI
+ {0xAA31, 0xAA32, prCM, gcMn}, // [2] CHAM VOWEL SIGN AU..CHAM VOWEL SIGN UE
+ {0xAA33, 0xAA34, prCM, gcMc}, // [2] CHAM CONSONANT SIGN YA..CHAM CONSONANT SIGN RA
+ {0xAA35, 0xAA36, prCM, gcMn}, // [2] CHAM CONSONANT SIGN LA..CHAM CONSONANT SIGN WA
+ {0xAA40, 0xAA42, prAL, gcLo}, // [3] CHAM LETTER FINAL K..CHAM LETTER FINAL NG
+ {0xAA43, 0xAA43, prCM, gcMn}, // CHAM CONSONANT SIGN FINAL NG
+ {0xAA44, 0xAA4B, prAL, gcLo}, // [8] CHAM LETTER FINAL CH..CHAM LETTER FINAL SS
+ {0xAA4C, 0xAA4C, prCM, gcMn}, // CHAM CONSONANT SIGN FINAL M
+ {0xAA4D, 0xAA4D, prCM, gcMc}, // CHAM CONSONANT SIGN FINAL H
+ {0xAA50, 0xAA59, prNU, gcNd}, // [10] CHAM DIGIT ZERO..CHAM DIGIT NINE
+ {0xAA5C, 0xAA5C, prAL, gcPo}, // CHAM PUNCTUATION SPIRAL
+ {0xAA5D, 0xAA5F, prBA, gcPo}, // [3] CHAM PUNCTUATION DANDA..CHAM PUNCTUATION TRIPLE DANDA
+ {0xAA60, 0xAA6F, prSA, gcLo}, // [16] MYANMAR LETTER KHAMTI GA..MYANMAR LETTER KHAMTI FA
+ {0xAA70, 0xAA70, prSA, gcLm}, // MYANMAR MODIFIER LETTER KHAMTI REDUPLICATION
+ {0xAA71, 0xAA76, prSA, gcLo}, // [6] MYANMAR LETTER KHAMTI XA..MYANMAR LOGOGRAM KHAMTI HM
+ {0xAA77, 0xAA79, prSA, gcSo}, // [3] MYANMAR SYMBOL AITON EXCLAMATION..MYANMAR SYMBOL AITON TWO
+ {0xAA7A, 0xAA7A, prSA, gcLo}, // MYANMAR LETTER AITON RA
+ {0xAA7B, 0xAA7B, prSA, gcMc}, // MYANMAR SIGN PAO KAREN TONE
+ {0xAA7C, 0xAA7C, prSA, gcMn}, // MYANMAR SIGN TAI LAING TONE-2
+ {0xAA7D, 0xAA7D, prSA, gcMc}, // MYANMAR SIGN TAI LAING TONE-5
+ {0xAA7E, 0xAA7F, prSA, gcLo}, // [2] MYANMAR LETTER SHWE PALAUNG CHA..MYANMAR LETTER SHWE PALAUNG SHA
+ {0xAA80, 0xAAAF, prSA, gcLo}, // [48] TAI VIET LETTER LOW KO..TAI VIET LETTER HIGH O
+ {0xAAB0, 0xAAB0, prSA, gcMn}, // TAI VIET MAI KANG
+ {0xAAB1, 0xAAB1, prSA, gcLo}, // TAI VIET VOWEL AA
+ {0xAAB2, 0xAAB4, prSA, gcMn}, // [3] TAI VIET VOWEL I..TAI VIET VOWEL U
+ {0xAAB5, 0xAAB6, prSA, gcLo}, // [2] TAI VIET VOWEL E..TAI VIET VOWEL O
+ {0xAAB7, 0xAAB8, prSA, gcMn}, // [2] TAI VIET MAI KHIT..TAI VIET VOWEL IA
+ {0xAAB9, 0xAABD, prSA, gcLo}, // [5] TAI VIET VOWEL UEA..TAI VIET VOWEL AN
+ {0xAABE, 0xAABF, prSA, gcMn}, // [2] TAI VIET VOWEL AM..TAI VIET TONE MAI EK
+ {0xAAC0, 0xAAC0, prSA, gcLo}, // TAI VIET TONE MAI NUENG
+ {0xAAC1, 0xAAC1, prSA, gcMn}, // TAI VIET TONE MAI THO
+ {0xAAC2, 0xAAC2, prSA, gcLo}, // TAI VIET TONE MAI SONG
+ {0xAADB, 0xAADC, prSA, gcLo}, // [2] TAI VIET SYMBOL KON..TAI VIET SYMBOL NUENG
+ {0xAADD, 0xAADD, prSA, gcLm}, // TAI VIET SYMBOL SAM
+ {0xAADE, 0xAADF, prSA, gcPo}, // [2] TAI VIET SYMBOL HO HOI..TAI VIET SYMBOL KOI KOI
+ {0xAAE0, 0xAAEA, prAL, gcLo}, // [11] MEETEI MAYEK LETTER E..MEETEI MAYEK LETTER SSA
+ {0xAAEB, 0xAAEB, prCM, gcMc}, // MEETEI MAYEK VOWEL SIGN II
+ {0xAAEC, 0xAAED, prCM, gcMn}, // [2] MEETEI MAYEK VOWEL SIGN UU..MEETEI MAYEK VOWEL SIGN AAI
+ {0xAAEE, 0xAAEF, prCM, gcMc}, // [2] MEETEI MAYEK VOWEL SIGN AU..MEETEI MAYEK VOWEL SIGN AAU
+ {0xAAF0, 0xAAF1, prBA, gcPo}, // [2] MEETEI MAYEK CHEIKHAN..MEETEI MAYEK AHANG KHUDAM
+ {0xAAF2, 0xAAF2, prAL, gcLo}, // MEETEI MAYEK ANJI
+ {0xAAF3, 0xAAF4, prAL, gcLm}, // [2] MEETEI MAYEK SYLLABLE REPETITION MARK..MEETEI MAYEK WORD REPETITION MARK
+ {0xAAF5, 0xAAF5, prCM, gcMc}, // MEETEI MAYEK VOWEL SIGN VISARGA
+ {0xAAF6, 0xAAF6, prCM, gcMn}, // MEETEI MAYEK VIRAMA
+ {0xAB01, 0xAB06, prAL, gcLo}, // [6] ETHIOPIC SYLLABLE TTHU..ETHIOPIC SYLLABLE TTHO
+ {0xAB09, 0xAB0E, prAL, gcLo}, // [6] ETHIOPIC SYLLABLE DDHU..ETHIOPIC SYLLABLE DDHO
+ {0xAB11, 0xAB16, prAL, gcLo}, // [6] ETHIOPIC SYLLABLE DZU..ETHIOPIC SYLLABLE DZO
+ {0xAB20, 0xAB26, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE CCHHA..ETHIOPIC SYLLABLE CCHHO
+ {0xAB28, 0xAB2E, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE BBA..ETHIOPIC SYLLABLE BBO
+ {0xAB30, 0xAB5A, prAL, gcLl}, // [43] LATIN SMALL LETTER BARRED ALPHA..LATIN SMALL LETTER Y WITH SHORT RIGHT LEG
+ {0xAB5B, 0xAB5B, prAL, gcSk}, // MODIFIER BREVE WITH INVERTED BREVE
+ {0xAB5C, 0xAB5F, prAL, gcLm}, // [4] MODIFIER LETTER SMALL HENG..MODIFIER LETTER SMALL U WITH LEFT HOOK
+ {0xAB60, 0xAB68, prAL, gcLl}, // [9] LATIN SMALL LETTER SAKHA YAT..LATIN SMALL LETTER TURNED R WITH MIDDLE TILDE
+ {0xAB69, 0xAB69, prAL, gcLm}, // MODIFIER LETTER SMALL TURNED W
+ {0xAB6A, 0xAB6B, prAL, gcSk}, // [2] MODIFIER LETTER LEFT TACK..MODIFIER LETTER RIGHT TACK
+ {0xAB70, 0xABBF, prAL, gcLl}, // [80] CHEROKEE SMALL LETTER A..CHEROKEE SMALL LETTER YA
+ {0xABC0, 0xABE2, prAL, gcLo}, // [35] MEETEI MAYEK LETTER KOK..MEETEI MAYEK LETTER I LONSUM
+ {0xABE3, 0xABE4, prCM, gcMc}, // [2] MEETEI MAYEK VOWEL SIGN ONAP..MEETEI MAYEK VOWEL SIGN INAP
+ {0xABE5, 0xABE5, prCM, gcMn}, // MEETEI MAYEK VOWEL SIGN ANAP
+ {0xABE6, 0xABE7, prCM, gcMc}, // [2] MEETEI MAYEK VOWEL SIGN YENAP..MEETEI MAYEK VOWEL SIGN SOUNAP
+ {0xABE8, 0xABE8, prCM, gcMn}, // MEETEI MAYEK VOWEL SIGN UNAP
+ {0xABE9, 0xABEA, prCM, gcMc}, // [2] MEETEI MAYEK VOWEL SIGN CHEINAP..MEETEI MAYEK VOWEL SIGN NUNG
+ {0xABEB, 0xABEB, prBA, gcPo}, // MEETEI MAYEK CHEIKHEI
+ {0xABEC, 0xABEC, prCM, gcMc}, // MEETEI MAYEK LUM IYEK
+ {0xABED, 0xABED, prCM, gcMn}, // MEETEI MAYEK APUN IYEK
+ {0xABF0, 0xABF9, prNU, gcNd}, // [10] MEETEI MAYEK DIGIT ZERO..MEETEI MAYEK DIGIT NINE
+ {0xAC00, 0xAC00, prH2, gcLo}, // HANGUL SYLLABLE GA
+ {0xAC01, 0xAC1B, prH3, gcLo}, // [27] HANGUL SYLLABLE GAG..HANGUL SYLLABLE GAH
+ {0xAC1C, 0xAC1C, prH2, gcLo}, // HANGUL SYLLABLE GAE
+ {0xAC1D, 0xAC37, prH3, gcLo}, // [27] HANGUL SYLLABLE GAEG..HANGUL SYLLABLE GAEH
+ {0xAC38, 0xAC38, prH2, gcLo}, // HANGUL SYLLABLE GYA
+ {0xAC39, 0xAC53, prH3, gcLo}, // [27] HANGUL SYLLABLE GYAG..HANGUL SYLLABLE GYAH
+ {0xAC54, 0xAC54, prH2, gcLo}, // HANGUL SYLLABLE GYAE
+ {0xAC55, 0xAC6F, prH3, gcLo}, // [27] HANGUL SYLLABLE GYAEG..HANGUL SYLLABLE GYAEH
+ {0xAC70, 0xAC70, prH2, gcLo}, // HANGUL SYLLABLE GEO
+ {0xAC71, 0xAC8B, prH3, gcLo}, // [27] HANGUL SYLLABLE GEOG..HANGUL SYLLABLE GEOH
+ {0xAC8C, 0xAC8C, prH2, gcLo}, // HANGUL SYLLABLE GE
+ {0xAC8D, 0xACA7, prH3, gcLo}, // [27] HANGUL SYLLABLE GEG..HANGUL SYLLABLE GEH
+ {0xACA8, 0xACA8, prH2, gcLo}, // HANGUL SYLLABLE GYEO
+ {0xACA9, 0xACC3, prH3, gcLo}, // [27] HANGUL SYLLABLE GYEOG..HANGUL SYLLABLE GYEOH
+ {0xACC4, 0xACC4, prH2, gcLo}, // HANGUL SYLLABLE GYE
+ {0xACC5, 0xACDF, prH3, gcLo}, // [27] HANGUL SYLLABLE GYEG..HANGUL SYLLABLE GYEH
+ {0xACE0, 0xACE0, prH2, gcLo}, // HANGUL SYLLABLE GO
+ {0xACE1, 0xACFB, prH3, gcLo}, // [27] HANGUL SYLLABLE GOG..HANGUL SYLLABLE GOH
+ {0xACFC, 0xACFC, prH2, gcLo}, // HANGUL SYLLABLE GWA
+ {0xACFD, 0xAD17, prH3, gcLo}, // [27] HANGUL SYLLABLE GWAG..HANGUL SYLLABLE GWAH
+ {0xAD18, 0xAD18, prH2, gcLo}, // HANGUL SYLLABLE GWAE
+ {0xAD19, 0xAD33, prH3, gcLo}, // [27] HANGUL SYLLABLE GWAEG..HANGUL SYLLABLE GWAEH
+ {0xAD34, 0xAD34, prH2, gcLo}, // HANGUL SYLLABLE GOE
+ {0xAD35, 0xAD4F, prH3, gcLo}, // [27] HANGUL SYLLABLE GOEG..HANGUL SYLLABLE GOEH
+ {0xAD50, 0xAD50, prH2, gcLo}, // HANGUL SYLLABLE GYO
+ {0xAD51, 0xAD6B, prH3, gcLo}, // [27] HANGUL SYLLABLE GYOG..HANGUL SYLLABLE GYOH
+ {0xAD6C, 0xAD6C, prH2, gcLo}, // HANGUL SYLLABLE GU
+ {0xAD6D, 0xAD87, prH3, gcLo}, // [27] HANGUL SYLLABLE GUG..HANGUL SYLLABLE GUH
+ {0xAD88, 0xAD88, prH2, gcLo}, // HANGUL SYLLABLE GWEO
+ {0xAD89, 0xADA3, prH3, gcLo}, // [27] HANGUL SYLLABLE GWEOG..HANGUL SYLLABLE GWEOH
+ {0xADA4, 0xADA4, prH2, gcLo}, // HANGUL SYLLABLE GWE
+ {0xADA5, 0xADBF, prH3, gcLo}, // [27] HANGUL SYLLABLE GWEG..HANGUL SYLLABLE GWEH
+ {0xADC0, 0xADC0, prH2, gcLo}, // HANGUL SYLLABLE GWI
+ {0xADC1, 0xADDB, prH3, gcLo}, // [27] HANGUL SYLLABLE GWIG..HANGUL SYLLABLE GWIH
+ {0xADDC, 0xADDC, prH2, gcLo}, // HANGUL SYLLABLE GYU
+ {0xADDD, 0xADF7, prH3, gcLo}, // [27] HANGUL SYLLABLE GYUG..HANGUL SYLLABLE GYUH
+ {0xADF8, 0xADF8, prH2, gcLo}, // HANGUL SYLLABLE GEU
+ {0xADF9, 0xAE13, prH3, gcLo}, // [27] HANGUL SYLLABLE GEUG..HANGUL SYLLABLE GEUH
+ {0xAE14, 0xAE14, prH2, gcLo}, // HANGUL SYLLABLE GYI
+ {0xAE15, 0xAE2F, prH3, gcLo}, // [27] HANGUL SYLLABLE GYIG..HANGUL SYLLABLE GYIH
+ {0xAE30, 0xAE30, prH2, gcLo}, // HANGUL SYLLABLE GI
+ {0xAE31, 0xAE4B, prH3, gcLo}, // [27] HANGUL SYLLABLE GIG..HANGUL SYLLABLE GIH
+ {0xAE4C, 0xAE4C, prH2, gcLo}, // HANGUL SYLLABLE GGA
+ {0xAE4D, 0xAE67, prH3, gcLo}, // [27] HANGUL SYLLABLE GGAG..HANGUL SYLLABLE GGAH
+ {0xAE68, 0xAE68, prH2, gcLo}, // HANGUL SYLLABLE GGAE
+ {0xAE69, 0xAE83, prH3, gcLo}, // [27] HANGUL SYLLABLE GGAEG..HANGUL SYLLABLE GGAEH
+ {0xAE84, 0xAE84, prH2, gcLo}, // HANGUL SYLLABLE GGYA
+ {0xAE85, 0xAE9F, prH3, gcLo}, // [27] HANGUL SYLLABLE GGYAG..HANGUL SYLLABLE GGYAH
+ {0xAEA0, 0xAEA0, prH2, gcLo}, // HANGUL SYLLABLE GGYAE
+ {0xAEA1, 0xAEBB, prH3, gcLo}, // [27] HANGUL SYLLABLE GGYAEG..HANGUL SYLLABLE GGYAEH
+ {0xAEBC, 0xAEBC, prH2, gcLo}, // HANGUL SYLLABLE GGEO
+ {0xAEBD, 0xAED7, prH3, gcLo}, // [27] HANGUL SYLLABLE GGEOG..HANGUL SYLLABLE GGEOH
+ {0xAED8, 0xAED8, prH2, gcLo}, // HANGUL SYLLABLE GGE
+ {0xAED9, 0xAEF3, prH3, gcLo}, // [27] HANGUL SYLLABLE GGEG..HANGUL SYLLABLE GGEH
+ {0xAEF4, 0xAEF4, prH2, gcLo}, // HANGUL SYLLABLE GGYEO
+ {0xAEF5, 0xAF0F, prH3, gcLo}, // [27] HANGUL SYLLABLE GGYEOG..HANGUL SYLLABLE GGYEOH
+ {0xAF10, 0xAF10, prH2, gcLo}, // HANGUL SYLLABLE GGYE
+ {0xAF11, 0xAF2B, prH3, gcLo}, // [27] HANGUL SYLLABLE GGYEG..HANGUL SYLLABLE GGYEH
+ {0xAF2C, 0xAF2C, prH2, gcLo}, // HANGUL SYLLABLE GGO
+ {0xAF2D, 0xAF47, prH3, gcLo}, // [27] HANGUL SYLLABLE GGOG..HANGUL SYLLABLE GGOH
+ {0xAF48, 0xAF48, prH2, gcLo}, // HANGUL SYLLABLE GGWA
+ {0xAF49, 0xAF63, prH3, gcLo}, // [27] HANGUL SYLLABLE GGWAG..HANGUL SYLLABLE GGWAH
+ {0xAF64, 0xAF64, prH2, gcLo}, // HANGUL SYLLABLE GGWAE
+ {0xAF65, 0xAF7F, prH3, gcLo}, // [27] HANGUL SYLLABLE GGWAEG..HANGUL SYLLABLE GGWAEH
+ {0xAF80, 0xAF80, prH2, gcLo}, // HANGUL SYLLABLE GGOE
+ {0xAF81, 0xAF9B, prH3, gcLo}, // [27] HANGUL SYLLABLE GGOEG..HANGUL SYLLABLE GGOEH
+ {0xAF9C, 0xAF9C, prH2, gcLo}, // HANGUL SYLLABLE GGYO
+ {0xAF9D, 0xAFB7, prH3, gcLo}, // [27] HANGUL SYLLABLE GGYOG..HANGUL SYLLABLE GGYOH
+ {0xAFB8, 0xAFB8, prH2, gcLo}, // HANGUL SYLLABLE GGU
+ {0xAFB9, 0xAFD3, prH3, gcLo}, // [27] HANGUL SYLLABLE GGUG..HANGUL SYLLABLE GGUH
+ {0xAFD4, 0xAFD4, prH2, gcLo}, // HANGUL SYLLABLE GGWEO
+ {0xAFD5, 0xAFEF, prH3, gcLo}, // [27] HANGUL SYLLABLE GGWEOG..HANGUL SYLLABLE GGWEOH
+ {0xAFF0, 0xAFF0, prH2, gcLo}, // HANGUL SYLLABLE GGWE
+ {0xAFF1, 0xB00B, prH3, gcLo}, // [27] HANGUL SYLLABLE GGWEG..HANGUL SYLLABLE GGWEH
+ {0xB00C, 0xB00C, prH2, gcLo}, // HANGUL SYLLABLE GGWI
+ {0xB00D, 0xB027, prH3, gcLo}, // [27] HANGUL SYLLABLE GGWIG..HANGUL SYLLABLE GGWIH
+ {0xB028, 0xB028, prH2, gcLo}, // HANGUL SYLLABLE GGYU
+ {0xB029, 0xB043, prH3, gcLo}, // [27] HANGUL SYLLABLE GGYUG..HANGUL SYLLABLE GGYUH
+ {0xB044, 0xB044, prH2, gcLo}, // HANGUL SYLLABLE GGEU
+ {0xB045, 0xB05F, prH3, gcLo}, // [27] HANGUL SYLLABLE GGEUG..HANGUL SYLLABLE GGEUH
+ {0xB060, 0xB060, prH2, gcLo}, // HANGUL SYLLABLE GGYI
+ {0xB061, 0xB07B, prH3, gcLo}, // [27] HANGUL SYLLABLE GGYIG..HANGUL SYLLABLE GGYIH
+ {0xB07C, 0xB07C, prH2, gcLo}, // HANGUL SYLLABLE GGI
+ {0xB07D, 0xB097, prH3, gcLo}, // [27] HANGUL SYLLABLE GGIG..HANGUL SYLLABLE GGIH
+ {0xB098, 0xB098, prH2, gcLo}, // HANGUL SYLLABLE NA
+ {0xB099, 0xB0B3, prH3, gcLo}, // [27] HANGUL SYLLABLE NAG..HANGUL SYLLABLE NAH
+ {0xB0B4, 0xB0B4, prH2, gcLo}, // HANGUL SYLLABLE NAE
+ {0xB0B5, 0xB0CF, prH3, gcLo}, // [27] HANGUL SYLLABLE NAEG..HANGUL SYLLABLE NAEH
+ {0xB0D0, 0xB0D0, prH2, gcLo}, // HANGUL SYLLABLE NYA
+ {0xB0D1, 0xB0EB, prH3, gcLo}, // [27] HANGUL SYLLABLE NYAG..HANGUL SYLLABLE NYAH
+ {0xB0EC, 0xB0EC, prH2, gcLo}, // HANGUL SYLLABLE NYAE
+ {0xB0ED, 0xB107, prH3, gcLo}, // [27] HANGUL SYLLABLE NYAEG..HANGUL SYLLABLE NYAEH
+ {0xB108, 0xB108, prH2, gcLo}, // HANGUL SYLLABLE NEO
+ {0xB109, 0xB123, prH3, gcLo}, // [27] HANGUL SYLLABLE NEOG..HANGUL SYLLABLE NEOH
+ {0xB124, 0xB124, prH2, gcLo}, // HANGUL SYLLABLE NE
+ {0xB125, 0xB13F, prH3, gcLo}, // [27] HANGUL SYLLABLE NEG..HANGUL SYLLABLE NEH
+ {0xB140, 0xB140, prH2, gcLo}, // HANGUL SYLLABLE NYEO
+ {0xB141, 0xB15B, prH3, gcLo}, // [27] HANGUL SYLLABLE NYEOG..HANGUL SYLLABLE NYEOH
+ {0xB15C, 0xB15C, prH2, gcLo}, // HANGUL SYLLABLE NYE
+ {0xB15D, 0xB177, prH3, gcLo}, // [27] HANGUL SYLLABLE NYEG..HANGUL SYLLABLE NYEH
+ {0xB178, 0xB178, prH2, gcLo}, // HANGUL SYLLABLE NO
+ {0xB179, 0xB193, prH3, gcLo}, // [27] HANGUL SYLLABLE NOG..HANGUL SYLLABLE NOH
+ {0xB194, 0xB194, prH2, gcLo}, // HANGUL SYLLABLE NWA
+ {0xB195, 0xB1AF, prH3, gcLo}, // [27] HANGUL SYLLABLE NWAG..HANGUL SYLLABLE NWAH
+ {0xB1B0, 0xB1B0, prH2, gcLo}, // HANGUL SYLLABLE NWAE
+ {0xB1B1, 0xB1CB, prH3, gcLo}, // [27] HANGUL SYLLABLE NWAEG..HANGUL SYLLABLE NWAEH
+ {0xB1CC, 0xB1CC, prH2, gcLo}, // HANGUL SYLLABLE NOE
+ {0xB1CD, 0xB1E7, prH3, gcLo}, // [27] HANGUL SYLLABLE NOEG..HANGUL SYLLABLE NOEH
+ {0xB1E8, 0xB1E8, prH2, gcLo}, // HANGUL SYLLABLE NYO
+ {0xB1E9, 0xB203, prH3, gcLo}, // [27] HANGUL SYLLABLE NYOG..HANGUL SYLLABLE NYOH
+ {0xB204, 0xB204, prH2, gcLo}, // HANGUL SYLLABLE NU
+ {0xB205, 0xB21F, prH3, gcLo}, // [27] HANGUL SYLLABLE NUG..HANGUL SYLLABLE NUH
+ {0xB220, 0xB220, prH2, gcLo}, // HANGUL SYLLABLE NWEO
+ {0xB221, 0xB23B, prH3, gcLo}, // [27] HANGUL SYLLABLE NWEOG..HANGUL SYLLABLE NWEOH
+ {0xB23C, 0xB23C, prH2, gcLo}, // HANGUL SYLLABLE NWE
+ {0xB23D, 0xB257, prH3, gcLo}, // [27] HANGUL SYLLABLE NWEG..HANGUL SYLLABLE NWEH
+ {0xB258, 0xB258, prH2, gcLo}, // HANGUL SYLLABLE NWI
+ {0xB259, 0xB273, prH3, gcLo}, // [27] HANGUL SYLLABLE NWIG..HANGUL SYLLABLE NWIH
+ {0xB274, 0xB274, prH2, gcLo}, // HANGUL SYLLABLE NYU
+ {0xB275, 0xB28F, prH3, gcLo}, // [27] HANGUL SYLLABLE NYUG..HANGUL SYLLABLE NYUH
+ {0xB290, 0xB290, prH2, gcLo}, // HANGUL SYLLABLE NEU
+ {0xB291, 0xB2AB, prH3, gcLo}, // [27] HANGUL SYLLABLE NEUG..HANGUL SYLLABLE NEUH
+ {0xB2AC, 0xB2AC, prH2, gcLo}, // HANGUL SYLLABLE NYI
+ {0xB2AD, 0xB2C7, prH3, gcLo}, // [27] HANGUL SYLLABLE NYIG..HANGUL SYLLABLE NYIH
+ {0xB2C8, 0xB2C8, prH2, gcLo}, // HANGUL SYLLABLE NI
+ {0xB2C9, 0xB2E3, prH3, gcLo}, // [27] HANGUL SYLLABLE NIG..HANGUL SYLLABLE NIH
+ {0xB2E4, 0xB2E4, prH2, gcLo}, // HANGUL SYLLABLE DA
+ {0xB2E5, 0xB2FF, prH3, gcLo}, // [27] HANGUL SYLLABLE DAG..HANGUL SYLLABLE DAH
+ {0xB300, 0xB300, prH2, gcLo}, // HANGUL SYLLABLE DAE
+ {0xB301, 0xB31B, prH3, gcLo}, // [27] HANGUL SYLLABLE DAEG..HANGUL SYLLABLE DAEH
+ {0xB31C, 0xB31C, prH2, gcLo}, // HANGUL SYLLABLE DYA
+ {0xB31D, 0xB337, prH3, gcLo}, // [27] HANGUL SYLLABLE DYAG..HANGUL SYLLABLE DYAH
+ {0xB338, 0xB338, prH2, gcLo}, // HANGUL SYLLABLE DYAE
+ {0xB339, 0xB353, prH3, gcLo}, // [27] HANGUL SYLLABLE DYAEG..HANGUL SYLLABLE DYAEH
+ {0xB354, 0xB354, prH2, gcLo}, // HANGUL SYLLABLE DEO
+ {0xB355, 0xB36F, prH3, gcLo}, // [27] HANGUL SYLLABLE DEOG..HANGUL SYLLABLE DEOH
+ {0xB370, 0xB370, prH2, gcLo}, // HANGUL SYLLABLE DE
+ {0xB371, 0xB38B, prH3, gcLo}, // [27] HANGUL SYLLABLE DEG..HANGUL SYLLABLE DEH
+ {0xB38C, 0xB38C, prH2, gcLo}, // HANGUL SYLLABLE DYEO
+ {0xB38D, 0xB3A7, prH3, gcLo}, // [27] HANGUL SYLLABLE DYEOG..HANGUL SYLLABLE DYEOH
+ {0xB3A8, 0xB3A8, prH2, gcLo}, // HANGUL SYLLABLE DYE
+ {0xB3A9, 0xB3C3, prH3, gcLo}, // [27] HANGUL SYLLABLE DYEG..HANGUL SYLLABLE DYEH
+ {0xB3C4, 0xB3C4, prH2, gcLo}, // HANGUL SYLLABLE DO
+ {0xB3C5, 0xB3DF, prH3, gcLo}, // [27] HANGUL SYLLABLE DOG..HANGUL SYLLABLE DOH
+ {0xB3E0, 0xB3E0, prH2, gcLo}, // HANGUL SYLLABLE DWA
+ {0xB3E1, 0xB3FB, prH3, gcLo}, // [27] HANGUL SYLLABLE DWAG..HANGUL SYLLABLE DWAH
+ {0xB3FC, 0xB3FC, prH2, gcLo}, // HANGUL SYLLABLE DWAE
+ {0xB3FD, 0xB417, prH3, gcLo}, // [27] HANGUL SYLLABLE DWAEG..HANGUL SYLLABLE DWAEH
+ {0xB418, 0xB418, prH2, gcLo}, // HANGUL SYLLABLE DOE
+ {0xB419, 0xB433, prH3, gcLo}, // [27] HANGUL SYLLABLE DOEG..HANGUL SYLLABLE DOEH
+ {0xB434, 0xB434, prH2, gcLo}, // HANGUL SYLLABLE DYO
+ {0xB435, 0xB44F, prH3, gcLo}, // [27] HANGUL SYLLABLE DYOG..HANGUL SYLLABLE DYOH
+ {0xB450, 0xB450, prH2, gcLo}, // HANGUL SYLLABLE DU
+ {0xB451, 0xB46B, prH3, gcLo}, // [27] HANGUL SYLLABLE DUG..HANGUL SYLLABLE DUH
+ {0xB46C, 0xB46C, prH2, gcLo}, // HANGUL SYLLABLE DWEO
+ {0xB46D, 0xB487, prH3, gcLo}, // [27] HANGUL SYLLABLE DWEOG..HANGUL SYLLABLE DWEOH
+ {0xB488, 0xB488, prH2, gcLo}, // HANGUL SYLLABLE DWE
+ {0xB489, 0xB4A3, prH3, gcLo}, // [27] HANGUL SYLLABLE DWEG..HANGUL SYLLABLE DWEH
+ {0xB4A4, 0xB4A4, prH2, gcLo}, // HANGUL SYLLABLE DWI
+ {0xB4A5, 0xB4BF, prH3, gcLo}, // [27] HANGUL SYLLABLE DWIG..HANGUL SYLLABLE DWIH
+ {0xB4C0, 0xB4C0, prH2, gcLo}, // HANGUL SYLLABLE DYU
+ {0xB4C1, 0xB4DB, prH3, gcLo}, // [27] HANGUL SYLLABLE DYUG..HANGUL SYLLABLE DYUH
+ {0xB4DC, 0xB4DC, prH2, gcLo}, // HANGUL SYLLABLE DEU
+ {0xB4DD, 0xB4F7, prH3, gcLo}, // [27] HANGUL SYLLABLE DEUG..HANGUL SYLLABLE DEUH
+ {0xB4F8, 0xB4F8, prH2, gcLo}, // HANGUL SYLLABLE DYI
+ {0xB4F9, 0xB513, prH3, gcLo}, // [27] HANGUL SYLLABLE DYIG..HANGUL SYLLABLE DYIH
+ {0xB514, 0xB514, prH2, gcLo}, // HANGUL SYLLABLE DI
+ {0xB515, 0xB52F, prH3, gcLo}, // [27] HANGUL SYLLABLE DIG..HANGUL SYLLABLE DIH
+ {0xB530, 0xB530, prH2, gcLo}, // HANGUL SYLLABLE DDA
+ {0xB531, 0xB54B, prH3, gcLo}, // [27] HANGUL SYLLABLE DDAG..HANGUL SYLLABLE DDAH
+ {0xB54C, 0xB54C, prH2, gcLo}, // HANGUL SYLLABLE DDAE
+ {0xB54D, 0xB567, prH3, gcLo}, // [27] HANGUL SYLLABLE DDAEG..HANGUL SYLLABLE DDAEH
+ {0xB568, 0xB568, prH2, gcLo}, // HANGUL SYLLABLE DDYA
+ {0xB569, 0xB583, prH3, gcLo}, // [27] HANGUL SYLLABLE DDYAG..HANGUL SYLLABLE DDYAH
+ {0xB584, 0xB584, prH2, gcLo}, // HANGUL SYLLABLE DDYAE
+ {0xB585, 0xB59F, prH3, gcLo}, // [27] HANGUL SYLLABLE DDYAEG..HANGUL SYLLABLE DDYAEH
+ {0xB5A0, 0xB5A0, prH2, gcLo}, // HANGUL SYLLABLE DDEO
+ {0xB5A1, 0xB5BB, prH3, gcLo}, // [27] HANGUL SYLLABLE DDEOG..HANGUL SYLLABLE DDEOH
+ {0xB5BC, 0xB5BC, prH2, gcLo}, // HANGUL SYLLABLE DDE
+ {0xB5BD, 0xB5D7, prH3, gcLo}, // [27] HANGUL SYLLABLE DDEG..HANGUL SYLLABLE DDEH
+ {0xB5D8, 0xB5D8, prH2, gcLo}, // HANGUL SYLLABLE DDYEO
+ {0xB5D9, 0xB5F3, prH3, gcLo}, // [27] HANGUL SYLLABLE DDYEOG..HANGUL SYLLABLE DDYEOH
+ {0xB5F4, 0xB5F4, prH2, gcLo}, // HANGUL SYLLABLE DDYE
+ {0xB5F5, 0xB60F, prH3, gcLo}, // [27] HANGUL SYLLABLE DDYEG..HANGUL SYLLABLE DDYEH
+ {0xB610, 0xB610, prH2, gcLo}, // HANGUL SYLLABLE DDO
+ {0xB611, 0xB62B, prH3, gcLo}, // [27] HANGUL SYLLABLE DDOG..HANGUL SYLLABLE DDOH
+ {0xB62C, 0xB62C, prH2, gcLo}, // HANGUL SYLLABLE DDWA
+ {0xB62D, 0xB647, prH3, gcLo}, // [27] HANGUL SYLLABLE DDWAG..HANGUL SYLLABLE DDWAH
+ {0xB648, 0xB648, prH2, gcLo}, // HANGUL SYLLABLE DDWAE
+ {0xB649, 0xB663, prH3, gcLo}, // [27] HANGUL SYLLABLE DDWAEG..HANGUL SYLLABLE DDWAEH
+ {0xB664, 0xB664, prH2, gcLo}, // HANGUL SYLLABLE DDOE
+ {0xB665, 0xB67F, prH3, gcLo}, // [27] HANGUL SYLLABLE DDOEG..HANGUL SYLLABLE DDOEH
+ {0xB680, 0xB680, prH2, gcLo}, // HANGUL SYLLABLE DDYO
+ {0xB681, 0xB69B, prH3, gcLo}, // [27] HANGUL SYLLABLE DDYOG..HANGUL SYLLABLE DDYOH
+ {0xB69C, 0xB69C, prH2, gcLo}, // HANGUL SYLLABLE DDU
+ {0xB69D, 0xB6B7, prH3, gcLo}, // [27] HANGUL SYLLABLE DDUG..HANGUL SYLLABLE DDUH
+ {0xB6B8, 0xB6B8, prH2, gcLo}, // HANGUL SYLLABLE DDWEO
+ {0xB6B9, 0xB6D3, prH3, gcLo}, // [27] HANGUL SYLLABLE DDWEOG..HANGUL SYLLABLE DDWEOH
+ {0xB6D4, 0xB6D4, prH2, gcLo}, // HANGUL SYLLABLE DDWE
+ {0xB6D5, 0xB6EF, prH3, gcLo}, // [27] HANGUL SYLLABLE DDWEG..HANGUL SYLLABLE DDWEH
+ {0xB6F0, 0xB6F0, prH2, gcLo}, // HANGUL SYLLABLE DDWI
+ {0xB6F1, 0xB70B, prH3, gcLo}, // [27] HANGUL SYLLABLE DDWIG..HANGUL SYLLABLE DDWIH
+ {0xB70C, 0xB70C, prH2, gcLo}, // HANGUL SYLLABLE DDYU
+ {0xB70D, 0xB727, prH3, gcLo}, // [27] HANGUL SYLLABLE DDYUG..HANGUL SYLLABLE DDYUH
+ {0xB728, 0xB728, prH2, gcLo}, // HANGUL SYLLABLE DDEU
+ {0xB729, 0xB743, prH3, gcLo}, // [27] HANGUL SYLLABLE DDEUG..HANGUL SYLLABLE DDEUH
+ {0xB744, 0xB744, prH2, gcLo}, // HANGUL SYLLABLE DDYI
+ {0xB745, 0xB75F, prH3, gcLo}, // [27] HANGUL SYLLABLE DDYIG..HANGUL SYLLABLE DDYIH
+ {0xB760, 0xB760, prH2, gcLo}, // HANGUL SYLLABLE DDI
+ {0xB761, 0xB77B, prH3, gcLo}, // [27] HANGUL SYLLABLE DDIG..HANGUL SYLLABLE DDIH
+ {0xB77C, 0xB77C, prH2, gcLo}, // HANGUL SYLLABLE RA
+ {0xB77D, 0xB797, prH3, gcLo}, // [27] HANGUL SYLLABLE RAG..HANGUL SYLLABLE RAH
+ {0xB798, 0xB798, prH2, gcLo}, // HANGUL SYLLABLE RAE
+ {0xB799, 0xB7B3, prH3, gcLo}, // [27] HANGUL SYLLABLE RAEG..HANGUL SYLLABLE RAEH
+ {0xB7B4, 0xB7B4, prH2, gcLo}, // HANGUL SYLLABLE RYA
+ {0xB7B5, 0xB7CF, prH3, gcLo}, // [27] HANGUL SYLLABLE RYAG..HANGUL SYLLABLE RYAH
+ {0xB7D0, 0xB7D0, prH2, gcLo}, // HANGUL SYLLABLE RYAE
+ {0xB7D1, 0xB7EB, prH3, gcLo}, // [27] HANGUL SYLLABLE RYAEG..HANGUL SYLLABLE RYAEH
+ {0xB7EC, 0xB7EC, prH2, gcLo}, // HANGUL SYLLABLE REO
+ {0xB7ED, 0xB807, prH3, gcLo}, // [27] HANGUL SYLLABLE REOG..HANGUL SYLLABLE REOH
+ {0xB808, 0xB808, prH2, gcLo}, // HANGUL SYLLABLE RE
+ {0xB809, 0xB823, prH3, gcLo}, // [27] HANGUL SYLLABLE REG..HANGUL SYLLABLE REH
+ {0xB824, 0xB824, prH2, gcLo}, // HANGUL SYLLABLE RYEO
+ {0xB825, 0xB83F, prH3, gcLo}, // [27] HANGUL SYLLABLE RYEOG..HANGUL SYLLABLE RYEOH
+ {0xB840, 0xB840, prH2, gcLo}, // HANGUL SYLLABLE RYE
+ {0xB841, 0xB85B, prH3, gcLo}, // [27] HANGUL SYLLABLE RYEG..HANGUL SYLLABLE RYEH
+ {0xB85C, 0xB85C, prH2, gcLo}, // HANGUL SYLLABLE RO
+ {0xB85D, 0xB877, prH3, gcLo}, // [27] HANGUL SYLLABLE ROG..HANGUL SYLLABLE ROH
+ {0xB878, 0xB878, prH2, gcLo}, // HANGUL SYLLABLE RWA
+ {0xB879, 0xB893, prH3, gcLo}, // [27] HANGUL SYLLABLE RWAG..HANGUL SYLLABLE RWAH
+ {0xB894, 0xB894, prH2, gcLo}, // HANGUL SYLLABLE RWAE
+ {0xB895, 0xB8AF, prH3, gcLo}, // [27] HANGUL SYLLABLE RWAEG..HANGUL SYLLABLE RWAEH
+ {0xB8B0, 0xB8B0, prH2, gcLo}, // HANGUL SYLLABLE ROE
+ {0xB8B1, 0xB8CB, prH3, gcLo}, // [27] HANGUL SYLLABLE ROEG..HANGUL SYLLABLE ROEH
+ {0xB8CC, 0xB8CC, prH2, gcLo}, // HANGUL SYLLABLE RYO
+ {0xB8CD, 0xB8E7, prH3, gcLo}, // [27] HANGUL SYLLABLE RYOG..HANGUL SYLLABLE RYOH
+ {0xB8E8, 0xB8E8, prH2, gcLo}, // HANGUL SYLLABLE RU
+ {0xB8E9, 0xB903, prH3, gcLo}, // [27] HANGUL SYLLABLE RUG..HANGUL SYLLABLE RUH
+ {0xB904, 0xB904, prH2, gcLo}, // HANGUL SYLLABLE RWEO
+ {0xB905, 0xB91F, prH3, gcLo}, // [27] HANGUL SYLLABLE RWEOG..HANGUL SYLLABLE RWEOH
+ {0xB920, 0xB920, prH2, gcLo}, // HANGUL SYLLABLE RWE
+ {0xB921, 0xB93B, prH3, gcLo}, // [27] HANGUL SYLLABLE RWEG..HANGUL SYLLABLE RWEH
+ {0xB93C, 0xB93C, prH2, gcLo}, // HANGUL SYLLABLE RWI
+ {0xB93D, 0xB957, prH3, gcLo}, // [27] HANGUL SYLLABLE RWIG..HANGUL SYLLABLE RWIH
+ {0xB958, 0xB958, prH2, gcLo}, // HANGUL SYLLABLE RYU
+ {0xB959, 0xB973, prH3, gcLo}, // [27] HANGUL SYLLABLE RYUG..HANGUL SYLLABLE RYUH
+ {0xB974, 0xB974, prH2, gcLo}, // HANGUL SYLLABLE REU
+ {0xB975, 0xB98F, prH3, gcLo}, // [27] HANGUL SYLLABLE REUG..HANGUL SYLLABLE REUH
+ {0xB990, 0xB990, prH2, gcLo}, // HANGUL SYLLABLE RYI
+ {0xB991, 0xB9AB, prH3, gcLo}, // [27] HANGUL SYLLABLE RYIG..HANGUL SYLLABLE RYIH
+ {0xB9AC, 0xB9AC, prH2, gcLo}, // HANGUL SYLLABLE RI
+ {0xB9AD, 0xB9C7, prH3, gcLo}, // [27] HANGUL SYLLABLE RIG..HANGUL SYLLABLE RIH
+ {0xB9C8, 0xB9C8, prH2, gcLo}, // HANGUL SYLLABLE MA
+ {0xB9C9, 0xB9E3, prH3, gcLo}, // [27] HANGUL SYLLABLE MAG..HANGUL SYLLABLE MAH
+ {0xB9E4, 0xB9E4, prH2, gcLo}, // HANGUL SYLLABLE MAE
+ {0xB9E5, 0xB9FF, prH3, gcLo}, // [27] HANGUL SYLLABLE MAEG..HANGUL SYLLABLE MAEH
+ {0xBA00, 0xBA00, prH2, gcLo}, // HANGUL SYLLABLE MYA
+ {0xBA01, 0xBA1B, prH3, gcLo}, // [27] HANGUL SYLLABLE MYAG..HANGUL SYLLABLE MYAH
+ {0xBA1C, 0xBA1C, prH2, gcLo}, // HANGUL SYLLABLE MYAE
+ {0xBA1D, 0xBA37, prH3, gcLo}, // [27] HANGUL SYLLABLE MYAEG..HANGUL SYLLABLE MYAEH
+ {0xBA38, 0xBA38, prH2, gcLo}, // HANGUL SYLLABLE MEO
+ {0xBA39, 0xBA53, prH3, gcLo}, // [27] HANGUL SYLLABLE MEOG..HANGUL SYLLABLE MEOH
+ {0xBA54, 0xBA54, prH2, gcLo}, // HANGUL SYLLABLE ME
+ {0xBA55, 0xBA6F, prH3, gcLo}, // [27] HANGUL SYLLABLE MEG..HANGUL SYLLABLE MEH
+ {0xBA70, 0xBA70, prH2, gcLo}, // HANGUL SYLLABLE MYEO
+ {0xBA71, 0xBA8B, prH3, gcLo}, // [27] HANGUL SYLLABLE MYEOG..HANGUL SYLLABLE MYEOH
+ {0xBA8C, 0xBA8C, prH2, gcLo}, // HANGUL SYLLABLE MYE
+ {0xBA8D, 0xBAA7, prH3, gcLo}, // [27] HANGUL SYLLABLE MYEG..HANGUL SYLLABLE MYEH
+ {0xBAA8, 0xBAA8, prH2, gcLo}, // HANGUL SYLLABLE MO
+ {0xBAA9, 0xBAC3, prH3, gcLo}, // [27] HANGUL SYLLABLE MOG..HANGUL SYLLABLE MOH
+ {0xBAC4, 0xBAC4, prH2, gcLo}, // HANGUL SYLLABLE MWA
+ {0xBAC5, 0xBADF, prH3, gcLo}, // [27] HANGUL SYLLABLE MWAG..HANGUL SYLLABLE MWAH
+ {0xBAE0, 0xBAE0, prH2, gcLo}, // HANGUL SYLLABLE MWAE
+ {0xBAE1, 0xBAFB, prH3, gcLo}, // [27] HANGUL SYLLABLE MWAEG..HANGUL SYLLABLE MWAEH
+ {0xBAFC, 0xBAFC, prH2, gcLo}, // HANGUL SYLLABLE MOE
+ {0xBAFD, 0xBB17, prH3, gcLo}, // [27] HANGUL SYLLABLE MOEG..HANGUL SYLLABLE MOEH
+ {0xBB18, 0xBB18, prH2, gcLo}, // HANGUL SYLLABLE MYO
+ {0xBB19, 0xBB33, prH3, gcLo}, // [27] HANGUL SYLLABLE MYOG..HANGUL SYLLABLE MYOH
+ {0xBB34, 0xBB34, prH2, gcLo}, // HANGUL SYLLABLE MU
+ {0xBB35, 0xBB4F, prH3, gcLo}, // [27] HANGUL SYLLABLE MUG..HANGUL SYLLABLE MUH
+ {0xBB50, 0xBB50, prH2, gcLo}, // HANGUL SYLLABLE MWEO
+ {0xBB51, 0xBB6B, prH3, gcLo}, // [27] HANGUL SYLLABLE MWEOG..HANGUL SYLLABLE MWEOH
+ {0xBB6C, 0xBB6C, prH2, gcLo}, // HANGUL SYLLABLE MWE
+ {0xBB6D, 0xBB87, prH3, gcLo}, // [27] HANGUL SYLLABLE MWEG..HANGUL SYLLABLE MWEH
+ {0xBB88, 0xBB88, prH2, gcLo}, // HANGUL SYLLABLE MWI
+ {0xBB89, 0xBBA3, prH3, gcLo}, // [27] HANGUL SYLLABLE MWIG..HANGUL SYLLABLE MWIH
+ {0xBBA4, 0xBBA4, prH2, gcLo}, // HANGUL SYLLABLE MYU
+ {0xBBA5, 0xBBBF, prH3, gcLo}, // [27] HANGUL SYLLABLE MYUG..HANGUL SYLLABLE MYUH
+ {0xBBC0, 0xBBC0, prH2, gcLo}, // HANGUL SYLLABLE MEU
+ {0xBBC1, 0xBBDB, prH3, gcLo}, // [27] HANGUL SYLLABLE MEUG..HANGUL SYLLABLE MEUH
+ {0xBBDC, 0xBBDC, prH2, gcLo}, // HANGUL SYLLABLE MYI
+ {0xBBDD, 0xBBF7, prH3, gcLo}, // [27] HANGUL SYLLABLE MYIG..HANGUL SYLLABLE MYIH
+ {0xBBF8, 0xBBF8, prH2, gcLo}, // HANGUL SYLLABLE MI
+ {0xBBF9, 0xBC13, prH3, gcLo}, // [27] HANGUL SYLLABLE MIG..HANGUL SYLLABLE MIH
+ {0xBC14, 0xBC14, prH2, gcLo}, // HANGUL SYLLABLE BA
+ {0xBC15, 0xBC2F, prH3, gcLo}, // [27] HANGUL SYLLABLE BAG..HANGUL SYLLABLE BAH
+ {0xBC30, 0xBC30, prH2, gcLo}, // HANGUL SYLLABLE BAE
+ {0xBC31, 0xBC4B, prH3, gcLo}, // [27] HANGUL SYLLABLE BAEG..HANGUL SYLLABLE BAEH
+ {0xBC4C, 0xBC4C, prH2, gcLo}, // HANGUL SYLLABLE BYA
+ {0xBC4D, 0xBC67, prH3, gcLo}, // [27] HANGUL SYLLABLE BYAG..HANGUL SYLLABLE BYAH
+ {0xBC68, 0xBC68, prH2, gcLo}, // HANGUL SYLLABLE BYAE
+ {0xBC69, 0xBC83, prH3, gcLo}, // [27] HANGUL SYLLABLE BYAEG..HANGUL SYLLABLE BYAEH
+ {0xBC84, 0xBC84, prH2, gcLo}, // HANGUL SYLLABLE BEO
+ {0xBC85, 0xBC9F, prH3, gcLo}, // [27] HANGUL SYLLABLE BEOG..HANGUL SYLLABLE BEOH
+ {0xBCA0, 0xBCA0, prH2, gcLo}, // HANGUL SYLLABLE BE
+ {0xBCA1, 0xBCBB, prH3, gcLo}, // [27] HANGUL SYLLABLE BEG..HANGUL SYLLABLE BEH
+ {0xBCBC, 0xBCBC, prH2, gcLo}, // HANGUL SYLLABLE BYEO
+ {0xBCBD, 0xBCD7, prH3, gcLo}, // [27] HANGUL SYLLABLE BYEOG..HANGUL SYLLABLE BYEOH
+ {0xBCD8, 0xBCD8, prH2, gcLo}, // HANGUL SYLLABLE BYE
+ {0xBCD9, 0xBCF3, prH3, gcLo}, // [27] HANGUL SYLLABLE BYEG..HANGUL SYLLABLE BYEH
+ {0xBCF4, 0xBCF4, prH2, gcLo}, // HANGUL SYLLABLE BO
+ {0xBCF5, 0xBD0F, prH3, gcLo}, // [27] HANGUL SYLLABLE BOG..HANGUL SYLLABLE BOH
+ {0xBD10, 0xBD10, prH2, gcLo}, // HANGUL SYLLABLE BWA
+ {0xBD11, 0xBD2B, prH3, gcLo}, // [27] HANGUL SYLLABLE BWAG..HANGUL SYLLABLE BWAH
+ {0xBD2C, 0xBD2C, prH2, gcLo}, // HANGUL SYLLABLE BWAE
+ {0xBD2D, 0xBD47, prH3, gcLo}, // [27] HANGUL SYLLABLE BWAEG..HANGUL SYLLABLE BWAEH
+ {0xBD48, 0xBD48, prH2, gcLo}, // HANGUL SYLLABLE BOE
+ {0xBD49, 0xBD63, prH3, gcLo}, // [27] HANGUL SYLLABLE BOEG..HANGUL SYLLABLE BOEH
+ {0xBD64, 0xBD64, prH2, gcLo}, // HANGUL SYLLABLE BYO
+ {0xBD65, 0xBD7F, prH3, gcLo}, // [27] HANGUL SYLLABLE BYOG..HANGUL SYLLABLE BYOH
+ {0xBD80, 0xBD80, prH2, gcLo}, // HANGUL SYLLABLE BU
+ {0xBD81, 0xBD9B, prH3, gcLo}, // [27] HANGUL SYLLABLE BUG..HANGUL SYLLABLE BUH
+ {0xBD9C, 0xBD9C, prH2, gcLo}, // HANGUL SYLLABLE BWEO
+ {0xBD9D, 0xBDB7, prH3, gcLo}, // [27] HANGUL SYLLABLE BWEOG..HANGUL SYLLABLE BWEOH
+ {0xBDB8, 0xBDB8, prH2, gcLo}, // HANGUL SYLLABLE BWE
+ {0xBDB9, 0xBDD3, prH3, gcLo}, // [27] HANGUL SYLLABLE BWEG..HANGUL SYLLABLE BWEH
+ {0xBDD4, 0xBDD4, prH2, gcLo}, // HANGUL SYLLABLE BWI
+ {0xBDD5, 0xBDEF, prH3, gcLo}, // [27] HANGUL SYLLABLE BWIG..HANGUL SYLLABLE BWIH
+ {0xBDF0, 0xBDF0, prH2, gcLo}, // HANGUL SYLLABLE BYU
+ {0xBDF1, 0xBE0B, prH3, gcLo}, // [27] HANGUL SYLLABLE BYUG..HANGUL SYLLABLE BYUH
+ {0xBE0C, 0xBE0C, prH2, gcLo}, // HANGUL SYLLABLE BEU
+ {0xBE0D, 0xBE27, prH3, gcLo}, // [27] HANGUL SYLLABLE BEUG..HANGUL SYLLABLE BEUH
+ {0xBE28, 0xBE28, prH2, gcLo}, // HANGUL SYLLABLE BYI
+ {0xBE29, 0xBE43, prH3, gcLo}, // [27] HANGUL SYLLABLE BYIG..HANGUL SYLLABLE BYIH
+ {0xBE44, 0xBE44, prH2, gcLo}, // HANGUL SYLLABLE BI
+ {0xBE45, 0xBE5F, prH3, gcLo}, // [27] HANGUL SYLLABLE BIG..HANGUL SYLLABLE BIH
+ {0xBE60, 0xBE60, prH2, gcLo}, // HANGUL SYLLABLE BBA
+ {0xBE61, 0xBE7B, prH3, gcLo}, // [27] HANGUL SYLLABLE BBAG..HANGUL SYLLABLE BBAH
+ {0xBE7C, 0xBE7C, prH2, gcLo}, // HANGUL SYLLABLE BBAE
+ {0xBE7D, 0xBE97, prH3, gcLo}, // [27] HANGUL SYLLABLE BBAEG..HANGUL SYLLABLE BBAEH
+ {0xBE98, 0xBE98, prH2, gcLo}, // HANGUL SYLLABLE BBYA
+ {0xBE99, 0xBEB3, prH3, gcLo}, // [27] HANGUL SYLLABLE BBYAG..HANGUL SYLLABLE BBYAH
+ {0xBEB4, 0xBEB4, prH2, gcLo}, // HANGUL SYLLABLE BBYAE
+ {0xBEB5, 0xBECF, prH3, gcLo}, // [27] HANGUL SYLLABLE BBYAEG..HANGUL SYLLABLE BBYAEH
+ {0xBED0, 0xBED0, prH2, gcLo}, // HANGUL SYLLABLE BBEO
+ {0xBED1, 0xBEEB, prH3, gcLo}, // [27] HANGUL SYLLABLE BBEOG..HANGUL SYLLABLE BBEOH
+ {0xBEEC, 0xBEEC, prH2, gcLo}, // HANGUL SYLLABLE BBE
+ {0xBEED, 0xBF07, prH3, gcLo}, // [27] HANGUL SYLLABLE BBEG..HANGUL SYLLABLE BBEH
+ {0xBF08, 0xBF08, prH2, gcLo}, // HANGUL SYLLABLE BBYEO
+ {0xBF09, 0xBF23, prH3, gcLo}, // [27] HANGUL SYLLABLE BBYEOG..HANGUL SYLLABLE BBYEOH
+ {0xBF24, 0xBF24, prH2, gcLo}, // HANGUL SYLLABLE BBYE
+ {0xBF25, 0xBF3F, prH3, gcLo}, // [27] HANGUL SYLLABLE BBYEG..HANGUL SYLLABLE BBYEH
+ {0xBF40, 0xBF40, prH2, gcLo}, // HANGUL SYLLABLE BBO
+ {0xBF41, 0xBF5B, prH3, gcLo}, // [27] HANGUL SYLLABLE BBOG..HANGUL SYLLABLE BBOH
+ {0xBF5C, 0xBF5C, prH2, gcLo}, // HANGUL SYLLABLE BBWA
+ {0xBF5D, 0xBF77, prH3, gcLo}, // [27] HANGUL SYLLABLE BBWAG..HANGUL SYLLABLE BBWAH
+ {0xBF78, 0xBF78, prH2, gcLo}, // HANGUL SYLLABLE BBWAE
+ {0xBF79, 0xBF93, prH3, gcLo}, // [27] HANGUL SYLLABLE BBWAEG..HANGUL SYLLABLE BBWAEH
+ {0xBF94, 0xBF94, prH2, gcLo}, // HANGUL SYLLABLE BBOE
+ {0xBF95, 0xBFAF, prH3, gcLo}, // [27] HANGUL SYLLABLE BBOEG..HANGUL SYLLABLE BBOEH
+ {0xBFB0, 0xBFB0, prH2, gcLo}, // HANGUL SYLLABLE BBYO
+ {0xBFB1, 0xBFCB, prH3, gcLo}, // [27] HANGUL SYLLABLE BBYOG..HANGUL SYLLABLE BBYOH
+ {0xBFCC, 0xBFCC, prH2, gcLo}, // HANGUL SYLLABLE BBU
+ {0xBFCD, 0xBFE7, prH3, gcLo}, // [27] HANGUL SYLLABLE BBUG..HANGUL SYLLABLE BBUH
+ {0xBFE8, 0xBFE8, prH2, gcLo}, // HANGUL SYLLABLE BBWEO
+ {0xBFE9, 0xC003, prH3, gcLo}, // [27] HANGUL SYLLABLE BBWEOG..HANGUL SYLLABLE BBWEOH
+ {0xC004, 0xC004, prH2, gcLo}, // HANGUL SYLLABLE BBWE
+ {0xC005, 0xC01F, prH3, gcLo}, // [27] HANGUL SYLLABLE BBWEG..HANGUL SYLLABLE BBWEH
+ {0xC020, 0xC020, prH2, gcLo}, // HANGUL SYLLABLE BBWI
+ {0xC021, 0xC03B, prH3, gcLo}, // [27] HANGUL SYLLABLE BBWIG..HANGUL SYLLABLE BBWIH
+ {0xC03C, 0xC03C, prH2, gcLo}, // HANGUL SYLLABLE BBYU
+ {0xC03D, 0xC057, prH3, gcLo}, // [27] HANGUL SYLLABLE BBYUG..HANGUL SYLLABLE BBYUH
+ {0xC058, 0xC058, prH2, gcLo}, // HANGUL SYLLABLE BBEU
+ {0xC059, 0xC073, prH3, gcLo}, // [27] HANGUL SYLLABLE BBEUG..HANGUL SYLLABLE BBEUH
+ {0xC074, 0xC074, prH2, gcLo}, // HANGUL SYLLABLE BBYI
+ {0xC075, 0xC08F, prH3, gcLo}, // [27] HANGUL SYLLABLE BBYIG..HANGUL SYLLABLE BBYIH
+ {0xC090, 0xC090, prH2, gcLo}, // HANGUL SYLLABLE BBI
+ {0xC091, 0xC0AB, prH3, gcLo}, // [27] HANGUL SYLLABLE BBIG..HANGUL SYLLABLE BBIH
+ {0xC0AC, 0xC0AC, prH2, gcLo}, // HANGUL SYLLABLE SA
+ {0xC0AD, 0xC0C7, prH3, gcLo}, // [27] HANGUL SYLLABLE SAG..HANGUL SYLLABLE SAH
+ {0xC0C8, 0xC0C8, prH2, gcLo}, // HANGUL SYLLABLE SAE
+ {0xC0C9, 0xC0E3, prH3, gcLo}, // [27] HANGUL SYLLABLE SAEG..HANGUL SYLLABLE SAEH
+ {0xC0E4, 0xC0E4, prH2, gcLo}, // HANGUL SYLLABLE SYA
+ {0xC0E5, 0xC0FF, prH3, gcLo}, // [27] HANGUL SYLLABLE SYAG..HANGUL SYLLABLE SYAH
+ {0xC100, 0xC100, prH2, gcLo}, // HANGUL SYLLABLE SYAE
+ {0xC101, 0xC11B, prH3, gcLo}, // [27] HANGUL SYLLABLE SYAEG..HANGUL SYLLABLE SYAEH
+ {0xC11C, 0xC11C, prH2, gcLo}, // HANGUL SYLLABLE SEO
+ {0xC11D, 0xC137, prH3, gcLo}, // [27] HANGUL SYLLABLE SEOG..HANGUL SYLLABLE SEOH
+ {0xC138, 0xC138, prH2, gcLo}, // HANGUL SYLLABLE SE
+ {0xC139, 0xC153, prH3, gcLo}, // [27] HANGUL SYLLABLE SEG..HANGUL SYLLABLE SEH
+ {0xC154, 0xC154, prH2, gcLo}, // HANGUL SYLLABLE SYEO
+ {0xC155, 0xC16F, prH3, gcLo}, // [27] HANGUL SYLLABLE SYEOG..HANGUL SYLLABLE SYEOH
+ {0xC170, 0xC170, prH2, gcLo}, // HANGUL SYLLABLE SYE
+ {0xC171, 0xC18B, prH3, gcLo}, // [27] HANGUL SYLLABLE SYEG..HANGUL SYLLABLE SYEH
+ {0xC18C, 0xC18C, prH2, gcLo}, // HANGUL SYLLABLE SO
+ {0xC18D, 0xC1A7, prH3, gcLo}, // [27] HANGUL SYLLABLE SOG..HANGUL SYLLABLE SOH
+ {0xC1A8, 0xC1A8, prH2, gcLo}, // HANGUL SYLLABLE SWA
+ {0xC1A9, 0xC1C3, prH3, gcLo}, // [27] HANGUL SYLLABLE SWAG..HANGUL SYLLABLE SWAH
+ {0xC1C4, 0xC1C4, prH2, gcLo}, // HANGUL SYLLABLE SWAE
+ {0xC1C5, 0xC1DF, prH3, gcLo}, // [27] HANGUL SYLLABLE SWAEG..HANGUL SYLLABLE SWAEH
+ {0xC1E0, 0xC1E0, prH2, gcLo}, // HANGUL SYLLABLE SOE
+ {0xC1E1, 0xC1FB, prH3, gcLo}, // [27] HANGUL SYLLABLE SOEG..HANGUL SYLLABLE SOEH
+ {0xC1FC, 0xC1FC, prH2, gcLo}, // HANGUL SYLLABLE SYO
+ {0xC1FD, 0xC217, prH3, gcLo}, // [27] HANGUL SYLLABLE SYOG..HANGUL SYLLABLE SYOH
+ {0xC218, 0xC218, prH2, gcLo}, // HANGUL SYLLABLE SU
+ {0xC219, 0xC233, prH3, gcLo}, // [27] HANGUL SYLLABLE SUG..HANGUL SYLLABLE SUH
+ {0xC234, 0xC234, prH2, gcLo}, // HANGUL SYLLABLE SWEO
+ {0xC235, 0xC24F, prH3, gcLo}, // [27] HANGUL SYLLABLE SWEOG..HANGUL SYLLABLE SWEOH
+ {0xC250, 0xC250, prH2, gcLo}, // HANGUL SYLLABLE SWE
+ {0xC251, 0xC26B, prH3, gcLo}, // [27] HANGUL SYLLABLE SWEG..HANGUL SYLLABLE SWEH
+ {0xC26C, 0xC26C, prH2, gcLo}, // HANGUL SYLLABLE SWI
+ {0xC26D, 0xC287, prH3, gcLo}, // [27] HANGUL SYLLABLE SWIG..HANGUL SYLLABLE SWIH
+ {0xC288, 0xC288, prH2, gcLo}, // HANGUL SYLLABLE SYU
+ {0xC289, 0xC2A3, prH3, gcLo}, // [27] HANGUL SYLLABLE SYUG..HANGUL SYLLABLE SYUH
+ {0xC2A4, 0xC2A4, prH2, gcLo}, // HANGUL SYLLABLE SEU
+ {0xC2A5, 0xC2BF, prH3, gcLo}, // [27] HANGUL SYLLABLE SEUG..HANGUL SYLLABLE SEUH
+ {0xC2C0, 0xC2C0, prH2, gcLo}, // HANGUL SYLLABLE SYI
+ {0xC2C1, 0xC2DB, prH3, gcLo}, // [27] HANGUL SYLLABLE SYIG..HANGUL SYLLABLE SYIH
+ {0xC2DC, 0xC2DC, prH2, gcLo}, // HANGUL SYLLABLE SI
+ {0xC2DD, 0xC2F7, prH3, gcLo}, // [27] HANGUL SYLLABLE SIG..HANGUL SYLLABLE SIH
+ {0xC2F8, 0xC2F8, prH2, gcLo}, // HANGUL SYLLABLE SSA
+ {0xC2F9, 0xC313, prH3, gcLo}, // [27] HANGUL SYLLABLE SSAG..HANGUL SYLLABLE SSAH
+ {0xC314, 0xC314, prH2, gcLo}, // HANGUL SYLLABLE SSAE
+ {0xC315, 0xC32F, prH3, gcLo}, // [27] HANGUL SYLLABLE SSAEG..HANGUL SYLLABLE SSAEH
+ {0xC330, 0xC330, prH2, gcLo}, // HANGUL SYLLABLE SSYA
+ {0xC331, 0xC34B, prH3, gcLo}, // [27] HANGUL SYLLABLE SSYAG..HANGUL SYLLABLE SSYAH
+ {0xC34C, 0xC34C, prH2, gcLo}, // HANGUL SYLLABLE SSYAE
+ {0xC34D, 0xC367, prH3, gcLo}, // [27] HANGUL SYLLABLE SSYAEG..HANGUL SYLLABLE SSYAEH
+ {0xC368, 0xC368, prH2, gcLo}, // HANGUL SYLLABLE SSEO
+ {0xC369, 0xC383, prH3, gcLo}, // [27] HANGUL SYLLABLE SSEOG..HANGUL SYLLABLE SSEOH
+ {0xC384, 0xC384, prH2, gcLo}, // HANGUL SYLLABLE SSE
+ {0xC385, 0xC39F, prH3, gcLo}, // [27] HANGUL SYLLABLE SSEG..HANGUL SYLLABLE SSEH
+ {0xC3A0, 0xC3A0, prH2, gcLo}, // HANGUL SYLLABLE SSYEO
+ {0xC3A1, 0xC3BB, prH3, gcLo}, // [27] HANGUL SYLLABLE SSYEOG..HANGUL SYLLABLE SSYEOH
+ {0xC3BC, 0xC3BC, prH2, gcLo}, // HANGUL SYLLABLE SSYE
+ {0xC3BD, 0xC3D7, prH3, gcLo}, // [27] HANGUL SYLLABLE SSYEG..HANGUL SYLLABLE SSYEH
+ {0xC3D8, 0xC3D8, prH2, gcLo}, // HANGUL SYLLABLE SSO
+ {0xC3D9, 0xC3F3, prH3, gcLo}, // [27] HANGUL SYLLABLE SSOG..HANGUL SYLLABLE SSOH
+ {0xC3F4, 0xC3F4, prH2, gcLo}, // HANGUL SYLLABLE SSWA
+ {0xC3F5, 0xC40F, prH3, gcLo}, // [27] HANGUL SYLLABLE SSWAG..HANGUL SYLLABLE SSWAH
+ {0xC410, 0xC410, prH2, gcLo}, // HANGUL SYLLABLE SSWAE
+ {0xC411, 0xC42B, prH3, gcLo}, // [27] HANGUL SYLLABLE SSWAEG..HANGUL SYLLABLE SSWAEH
+ {0xC42C, 0xC42C, prH2, gcLo}, // HANGUL SYLLABLE SSOE
+ {0xC42D, 0xC447, prH3, gcLo}, // [27] HANGUL SYLLABLE SSOEG..HANGUL SYLLABLE SSOEH
+ {0xC448, 0xC448, prH2, gcLo}, // HANGUL SYLLABLE SSYO
+ {0xC449, 0xC463, prH3, gcLo}, // [27] HANGUL SYLLABLE SSYOG..HANGUL SYLLABLE SSYOH
+ {0xC464, 0xC464, prH2, gcLo}, // HANGUL SYLLABLE SSU
+ {0xC465, 0xC47F, prH3, gcLo}, // [27] HANGUL SYLLABLE SSUG..HANGUL SYLLABLE SSUH
+ {0xC480, 0xC480, prH2, gcLo}, // HANGUL SYLLABLE SSWEO
+ {0xC481, 0xC49B, prH3, gcLo}, // [27] HANGUL SYLLABLE SSWEOG..HANGUL SYLLABLE SSWEOH
+ {0xC49C, 0xC49C, prH2, gcLo}, // HANGUL SYLLABLE SSWE
+ {0xC49D, 0xC4B7, prH3, gcLo}, // [27] HANGUL SYLLABLE SSWEG..HANGUL SYLLABLE SSWEH
+ {0xC4B8, 0xC4B8, prH2, gcLo}, // HANGUL SYLLABLE SSWI
+ {0xC4B9, 0xC4D3, prH3, gcLo}, // [27] HANGUL SYLLABLE SSWIG..HANGUL SYLLABLE SSWIH
+ {0xC4D4, 0xC4D4, prH2, gcLo}, // HANGUL SYLLABLE SSYU
+ {0xC4D5, 0xC4EF, prH3, gcLo}, // [27] HANGUL SYLLABLE SSYUG..HANGUL SYLLABLE SSYUH
+ {0xC4F0, 0xC4F0, prH2, gcLo}, // HANGUL SYLLABLE SSEU
+ {0xC4F1, 0xC50B, prH3, gcLo}, // [27] HANGUL SYLLABLE SSEUG..HANGUL SYLLABLE SSEUH
+ {0xC50C, 0xC50C, prH2, gcLo}, // HANGUL SYLLABLE SSYI
+ {0xC50D, 0xC527, prH3, gcLo}, // [27] HANGUL SYLLABLE SSYIG..HANGUL SYLLABLE SSYIH
+ {0xC528, 0xC528, prH2, gcLo}, // HANGUL SYLLABLE SSI
+ {0xC529, 0xC543, prH3, gcLo}, // [27] HANGUL SYLLABLE SSIG..HANGUL SYLLABLE SSIH
+ {0xC544, 0xC544, prH2, gcLo}, // HANGUL SYLLABLE A
+ {0xC545, 0xC55F, prH3, gcLo}, // [27] HANGUL SYLLABLE AG..HANGUL SYLLABLE AH
+ {0xC560, 0xC560, prH2, gcLo}, // HANGUL SYLLABLE AE
+ {0xC561, 0xC57B, prH3, gcLo}, // [27] HANGUL SYLLABLE AEG..HANGUL SYLLABLE AEH
+ {0xC57C, 0xC57C, prH2, gcLo}, // HANGUL SYLLABLE YA
+ {0xC57D, 0xC597, prH3, gcLo}, // [27] HANGUL SYLLABLE YAG..HANGUL SYLLABLE YAH
+ {0xC598, 0xC598, prH2, gcLo}, // HANGUL SYLLABLE YAE
+ {0xC599, 0xC5B3, prH3, gcLo}, // [27] HANGUL SYLLABLE YAEG..HANGUL SYLLABLE YAEH
+ {0xC5B4, 0xC5B4, prH2, gcLo}, // HANGUL SYLLABLE EO
+ {0xC5B5, 0xC5CF, prH3, gcLo}, // [27] HANGUL SYLLABLE EOG..HANGUL SYLLABLE EOH
+ {0xC5D0, 0xC5D0, prH2, gcLo}, // HANGUL SYLLABLE E
+ {0xC5D1, 0xC5EB, prH3, gcLo}, // [27] HANGUL SYLLABLE EG..HANGUL SYLLABLE EH
+ {0xC5EC, 0xC5EC, prH2, gcLo}, // HANGUL SYLLABLE YEO
+ {0xC5ED, 0xC607, prH3, gcLo}, // [27] HANGUL SYLLABLE YEOG..HANGUL SYLLABLE YEOH
+ {0xC608, 0xC608, prH2, gcLo}, // HANGUL SYLLABLE YE
+ {0xC609, 0xC623, prH3, gcLo}, // [27] HANGUL SYLLABLE YEG..HANGUL SYLLABLE YEH
+ {0xC624, 0xC624, prH2, gcLo}, // HANGUL SYLLABLE O
+ {0xC625, 0xC63F, prH3, gcLo}, // [27] HANGUL SYLLABLE OG..HANGUL SYLLABLE OH
+ {0xC640, 0xC640, prH2, gcLo}, // HANGUL SYLLABLE WA
+ {0xC641, 0xC65B, prH3, gcLo}, // [27] HANGUL SYLLABLE WAG..HANGUL SYLLABLE WAH
+ {0xC65C, 0xC65C, prH2, gcLo}, // HANGUL SYLLABLE WAE
+ {0xC65D, 0xC677, prH3, gcLo}, // [27] HANGUL SYLLABLE WAEG..HANGUL SYLLABLE WAEH
+ {0xC678, 0xC678, prH2, gcLo}, // HANGUL SYLLABLE OE
+ {0xC679, 0xC693, prH3, gcLo}, // [27] HANGUL SYLLABLE OEG..HANGUL SYLLABLE OEH
+ {0xC694, 0xC694, prH2, gcLo}, // HANGUL SYLLABLE YO
+ {0xC695, 0xC6AF, prH3, gcLo}, // [27] HANGUL SYLLABLE YOG..HANGUL SYLLABLE YOH
+ {0xC6B0, 0xC6B0, prH2, gcLo}, // HANGUL SYLLABLE U
+ {0xC6B1, 0xC6CB, prH3, gcLo}, // [27] HANGUL SYLLABLE UG..HANGUL SYLLABLE UH
+ {0xC6CC, 0xC6CC, prH2, gcLo}, // HANGUL SYLLABLE WEO
+ {0xC6CD, 0xC6E7, prH3, gcLo}, // [27] HANGUL SYLLABLE WEOG..HANGUL SYLLABLE WEOH
+ {0xC6E8, 0xC6E8, prH2, gcLo}, // HANGUL SYLLABLE WE
+ {0xC6E9, 0xC703, prH3, gcLo}, // [27] HANGUL SYLLABLE WEG..HANGUL SYLLABLE WEH
+ {0xC704, 0xC704, prH2, gcLo}, // HANGUL SYLLABLE WI
+ {0xC705, 0xC71F, prH3, gcLo}, // [27] HANGUL SYLLABLE WIG..HANGUL SYLLABLE WIH
+ {0xC720, 0xC720, prH2, gcLo}, // HANGUL SYLLABLE YU
+ {0xC721, 0xC73B, prH3, gcLo}, // [27] HANGUL SYLLABLE YUG..HANGUL SYLLABLE YUH
+ {0xC73C, 0xC73C, prH2, gcLo}, // HANGUL SYLLABLE EU
+ {0xC73D, 0xC757, prH3, gcLo}, // [27] HANGUL SYLLABLE EUG..HANGUL SYLLABLE EUH
+ {0xC758, 0xC758, prH2, gcLo}, // HANGUL SYLLABLE YI
+ {0xC759, 0xC773, prH3, gcLo}, // [27] HANGUL SYLLABLE YIG..HANGUL SYLLABLE YIH
+ {0xC774, 0xC774, prH2, gcLo}, // HANGUL SYLLABLE I
+ {0xC775, 0xC78F, prH3, gcLo}, // [27] HANGUL SYLLABLE IG..HANGUL SYLLABLE IH
+ {0xC790, 0xC790, prH2, gcLo}, // HANGUL SYLLABLE JA
+ {0xC791, 0xC7AB, prH3, gcLo}, // [27] HANGUL SYLLABLE JAG..HANGUL SYLLABLE JAH
+ {0xC7AC, 0xC7AC, prH2, gcLo}, // HANGUL SYLLABLE JAE
+ {0xC7AD, 0xC7C7, prH3, gcLo}, // [27] HANGUL SYLLABLE JAEG..HANGUL SYLLABLE JAEH
+ {0xC7C8, 0xC7C8, prH2, gcLo}, // HANGUL SYLLABLE JYA
+ {0xC7C9, 0xC7E3, prH3, gcLo}, // [27] HANGUL SYLLABLE JYAG..HANGUL SYLLABLE JYAH
+ {0xC7E4, 0xC7E4, prH2, gcLo}, // HANGUL SYLLABLE JYAE
+ {0xC7E5, 0xC7FF, prH3, gcLo}, // [27] HANGUL SYLLABLE JYAEG..HANGUL SYLLABLE JYAEH
+ {0xC800, 0xC800, prH2, gcLo}, // HANGUL SYLLABLE JEO
+ {0xC801, 0xC81B, prH3, gcLo}, // [27] HANGUL SYLLABLE JEOG..HANGUL SYLLABLE JEOH
+ {0xC81C, 0xC81C, prH2, gcLo}, // HANGUL SYLLABLE JE
+ {0xC81D, 0xC837, prH3, gcLo}, // [27] HANGUL SYLLABLE JEG..HANGUL SYLLABLE JEH
+ {0xC838, 0xC838, prH2, gcLo}, // HANGUL SYLLABLE JYEO
+ {0xC839, 0xC853, prH3, gcLo}, // [27] HANGUL SYLLABLE JYEOG..HANGUL SYLLABLE JYEOH
+ {0xC854, 0xC854, prH2, gcLo}, // HANGUL SYLLABLE JYE
+ {0xC855, 0xC86F, prH3, gcLo}, // [27] HANGUL SYLLABLE JYEG..HANGUL SYLLABLE JYEH
+ {0xC870, 0xC870, prH2, gcLo}, // HANGUL SYLLABLE JO
+ {0xC871, 0xC88B, prH3, gcLo}, // [27] HANGUL SYLLABLE JOG..HANGUL SYLLABLE JOH
+ {0xC88C, 0xC88C, prH2, gcLo}, // HANGUL SYLLABLE JWA
+ {0xC88D, 0xC8A7, prH3, gcLo}, // [27] HANGUL SYLLABLE JWAG..HANGUL SYLLABLE JWAH
+ {0xC8A8, 0xC8A8, prH2, gcLo}, // HANGUL SYLLABLE JWAE
+ {0xC8A9, 0xC8C3, prH3, gcLo}, // [27] HANGUL SYLLABLE JWAEG..HANGUL SYLLABLE JWAEH
+ {0xC8C4, 0xC8C4, prH2, gcLo}, // HANGUL SYLLABLE JOE
+ {0xC8C5, 0xC8DF, prH3, gcLo}, // [27] HANGUL SYLLABLE JOEG..HANGUL SYLLABLE JOEH
+ {0xC8E0, 0xC8E0, prH2, gcLo}, // HANGUL SYLLABLE JYO
+ {0xC8E1, 0xC8FB, prH3, gcLo}, // [27] HANGUL SYLLABLE JYOG..HANGUL SYLLABLE JYOH
+ {0xC8FC, 0xC8FC, prH2, gcLo}, // HANGUL SYLLABLE JU
+ {0xC8FD, 0xC917, prH3, gcLo}, // [27] HANGUL SYLLABLE JUG..HANGUL SYLLABLE JUH
+ {0xC918, 0xC918, prH2, gcLo}, // HANGUL SYLLABLE JWEO
+ {0xC919, 0xC933, prH3, gcLo}, // [27] HANGUL SYLLABLE JWEOG..HANGUL SYLLABLE JWEOH
+ {0xC934, 0xC934, prH2, gcLo}, // HANGUL SYLLABLE JWE
+ {0xC935, 0xC94F, prH3, gcLo}, // [27] HANGUL SYLLABLE JWEG..HANGUL SYLLABLE JWEH
+ {0xC950, 0xC950, prH2, gcLo}, // HANGUL SYLLABLE JWI
+ {0xC951, 0xC96B, prH3, gcLo}, // [27] HANGUL SYLLABLE JWIG..HANGUL SYLLABLE JWIH
+ {0xC96C, 0xC96C, prH2, gcLo}, // HANGUL SYLLABLE JYU
+ {0xC96D, 0xC987, prH3, gcLo}, // [27] HANGUL SYLLABLE JYUG..HANGUL SYLLABLE JYUH
+ {0xC988, 0xC988, prH2, gcLo}, // HANGUL SYLLABLE JEU
+ {0xC989, 0xC9A3, prH3, gcLo}, // [27] HANGUL SYLLABLE JEUG..HANGUL SYLLABLE JEUH
+ {0xC9A4, 0xC9A4, prH2, gcLo}, // HANGUL SYLLABLE JYI
+ {0xC9A5, 0xC9BF, prH3, gcLo}, // [27] HANGUL SYLLABLE JYIG..HANGUL SYLLABLE JYIH
+ {0xC9C0, 0xC9C0, prH2, gcLo}, // HANGUL SYLLABLE JI
+ {0xC9C1, 0xC9DB, prH3, gcLo}, // [27] HANGUL SYLLABLE JIG..HANGUL SYLLABLE JIH
+ {0xC9DC, 0xC9DC, prH2, gcLo}, // HANGUL SYLLABLE JJA
+ {0xC9DD, 0xC9F7, prH3, gcLo}, // [27] HANGUL SYLLABLE JJAG..HANGUL SYLLABLE JJAH
+ {0xC9F8, 0xC9F8, prH2, gcLo}, // HANGUL SYLLABLE JJAE
+ {0xC9F9, 0xCA13, prH3, gcLo}, // [27] HANGUL SYLLABLE JJAEG..HANGUL SYLLABLE JJAEH
+ {0xCA14, 0xCA14, prH2, gcLo}, // HANGUL SYLLABLE JJYA
+ {0xCA15, 0xCA2F, prH3, gcLo}, // [27] HANGUL SYLLABLE JJYAG..HANGUL SYLLABLE JJYAH
+ {0xCA30, 0xCA30, prH2, gcLo}, // HANGUL SYLLABLE JJYAE
+ {0xCA31, 0xCA4B, prH3, gcLo}, // [27] HANGUL SYLLABLE JJYAEG..HANGUL SYLLABLE JJYAEH
+ {0xCA4C, 0xCA4C, prH2, gcLo}, // HANGUL SYLLABLE JJEO
+ {0xCA4D, 0xCA67, prH3, gcLo}, // [27] HANGUL SYLLABLE JJEOG..HANGUL SYLLABLE JJEOH
+ {0xCA68, 0xCA68, prH2, gcLo}, // HANGUL SYLLABLE JJE
+ {0xCA69, 0xCA83, prH3, gcLo}, // [27] HANGUL SYLLABLE JJEG..HANGUL SYLLABLE JJEH
+ {0xCA84, 0xCA84, prH2, gcLo}, // HANGUL SYLLABLE JJYEO
+ {0xCA85, 0xCA9F, prH3, gcLo}, // [27] HANGUL SYLLABLE JJYEOG..HANGUL SYLLABLE JJYEOH
+ {0xCAA0, 0xCAA0, prH2, gcLo}, // HANGUL SYLLABLE JJYE
+ {0xCAA1, 0xCABB, prH3, gcLo}, // [27] HANGUL SYLLABLE JJYEG..HANGUL SYLLABLE JJYEH
+ {0xCABC, 0xCABC, prH2, gcLo}, // HANGUL SYLLABLE JJO
+ {0xCABD, 0xCAD7, prH3, gcLo}, // [27] HANGUL SYLLABLE JJOG..HANGUL SYLLABLE JJOH
+ {0xCAD8, 0xCAD8, prH2, gcLo}, // HANGUL SYLLABLE JJWA
+ {0xCAD9, 0xCAF3, prH3, gcLo}, // [27] HANGUL SYLLABLE JJWAG..HANGUL SYLLABLE JJWAH
+ {0xCAF4, 0xCAF4, prH2, gcLo}, // HANGUL SYLLABLE JJWAE
+ {0xCAF5, 0xCB0F, prH3, gcLo}, // [27] HANGUL SYLLABLE JJWAEG..HANGUL SYLLABLE JJWAEH
+ {0xCB10, 0xCB10, prH2, gcLo}, // HANGUL SYLLABLE JJOE
+ {0xCB11, 0xCB2B, prH3, gcLo}, // [27] HANGUL SYLLABLE JJOEG..HANGUL SYLLABLE JJOEH
+ {0xCB2C, 0xCB2C, prH2, gcLo}, // HANGUL SYLLABLE JJYO
+ {0xCB2D, 0xCB47, prH3, gcLo}, // [27] HANGUL SYLLABLE JJYOG..HANGUL SYLLABLE JJYOH
+ {0xCB48, 0xCB48, prH2, gcLo}, // HANGUL SYLLABLE JJU
+ {0xCB49, 0xCB63, prH3, gcLo}, // [27] HANGUL SYLLABLE JJUG..HANGUL SYLLABLE JJUH
+ {0xCB64, 0xCB64, prH2, gcLo}, // HANGUL SYLLABLE JJWEO
+ {0xCB65, 0xCB7F, prH3, gcLo}, // [27] HANGUL SYLLABLE JJWEOG..HANGUL SYLLABLE JJWEOH
+ {0xCB80, 0xCB80, prH2, gcLo}, // HANGUL SYLLABLE JJWE
+ {0xCB81, 0xCB9B, prH3, gcLo}, // [27] HANGUL SYLLABLE JJWEG..HANGUL SYLLABLE JJWEH
+ {0xCB9C, 0xCB9C, prH2, gcLo}, // HANGUL SYLLABLE JJWI
+ {0xCB9D, 0xCBB7, prH3, gcLo}, // [27] HANGUL SYLLABLE JJWIG..HANGUL SYLLABLE JJWIH
+ {0xCBB8, 0xCBB8, prH2, gcLo}, // HANGUL SYLLABLE JJYU
+ {0xCBB9, 0xCBD3, prH3, gcLo}, // [27] HANGUL SYLLABLE JJYUG..HANGUL SYLLABLE JJYUH
+ {0xCBD4, 0xCBD4, prH2, gcLo}, // HANGUL SYLLABLE JJEU
+ {0xCBD5, 0xCBEF, prH3, gcLo}, // [27] HANGUL SYLLABLE JJEUG..HANGUL SYLLABLE JJEUH
+ {0xCBF0, 0xCBF0, prH2, gcLo}, // HANGUL SYLLABLE JJYI
+ {0xCBF1, 0xCC0B, prH3, gcLo}, // [27] HANGUL SYLLABLE JJYIG..HANGUL SYLLABLE JJYIH
+ {0xCC0C, 0xCC0C, prH2, gcLo}, // HANGUL SYLLABLE JJI
+ {0xCC0D, 0xCC27, prH3, gcLo}, // [27] HANGUL SYLLABLE JJIG..HANGUL SYLLABLE JJIH
+ {0xCC28, 0xCC28, prH2, gcLo}, // HANGUL SYLLABLE CA
+ {0xCC29, 0xCC43, prH3, gcLo}, // [27] HANGUL SYLLABLE CAG..HANGUL SYLLABLE CAH
+ {0xCC44, 0xCC44, prH2, gcLo}, // HANGUL SYLLABLE CAE
+ {0xCC45, 0xCC5F, prH3, gcLo}, // [27] HANGUL SYLLABLE CAEG..HANGUL SYLLABLE CAEH
+ {0xCC60, 0xCC60, prH2, gcLo}, // HANGUL SYLLABLE CYA
+ {0xCC61, 0xCC7B, prH3, gcLo}, // [27] HANGUL SYLLABLE CYAG..HANGUL SYLLABLE CYAH
+ {0xCC7C, 0xCC7C, prH2, gcLo}, // HANGUL SYLLABLE CYAE
+ {0xCC7D, 0xCC97, prH3, gcLo}, // [27] HANGUL SYLLABLE CYAEG..HANGUL SYLLABLE CYAEH
+ {0xCC98, 0xCC98, prH2, gcLo}, // HANGUL SYLLABLE CEO
+ {0xCC99, 0xCCB3, prH3, gcLo}, // [27] HANGUL SYLLABLE CEOG..HANGUL SYLLABLE CEOH
+ {0xCCB4, 0xCCB4, prH2, gcLo}, // HANGUL SYLLABLE CE
+ {0xCCB5, 0xCCCF, prH3, gcLo}, // [27] HANGUL SYLLABLE CEG..HANGUL SYLLABLE CEH
+ {0xCCD0, 0xCCD0, prH2, gcLo}, // HANGUL SYLLABLE CYEO
+ {0xCCD1, 0xCCEB, prH3, gcLo}, // [27] HANGUL SYLLABLE CYEOG..HANGUL SYLLABLE CYEOH
+ {0xCCEC, 0xCCEC, prH2, gcLo}, // HANGUL SYLLABLE CYE
+ {0xCCED, 0xCD07, prH3, gcLo}, // [27] HANGUL SYLLABLE CYEG..HANGUL SYLLABLE CYEH
+ {0xCD08, 0xCD08, prH2, gcLo}, // HANGUL SYLLABLE CO
+ {0xCD09, 0xCD23, prH3, gcLo}, // [27] HANGUL SYLLABLE COG..HANGUL SYLLABLE COH
+ {0xCD24, 0xCD24, prH2, gcLo}, // HANGUL SYLLABLE CWA
+ {0xCD25, 0xCD3F, prH3, gcLo}, // [27] HANGUL SYLLABLE CWAG..HANGUL SYLLABLE CWAH
+ {0xCD40, 0xCD40, prH2, gcLo}, // HANGUL SYLLABLE CWAE
+ {0xCD41, 0xCD5B, prH3, gcLo}, // [27] HANGUL SYLLABLE CWAEG..HANGUL SYLLABLE CWAEH
+ {0xCD5C, 0xCD5C, prH2, gcLo}, // HANGUL SYLLABLE COE
+ {0xCD5D, 0xCD77, prH3, gcLo}, // [27] HANGUL SYLLABLE COEG..HANGUL SYLLABLE COEH
+ {0xCD78, 0xCD78, prH2, gcLo}, // HANGUL SYLLABLE CYO
+ {0xCD79, 0xCD93, prH3, gcLo}, // [27] HANGUL SYLLABLE CYOG..HANGUL SYLLABLE CYOH
+ {0xCD94, 0xCD94, prH2, gcLo}, // HANGUL SYLLABLE CU
+ {0xCD95, 0xCDAF, prH3, gcLo}, // [27] HANGUL SYLLABLE CUG..HANGUL SYLLABLE CUH
+ {0xCDB0, 0xCDB0, prH2, gcLo}, // HANGUL SYLLABLE CWEO
+ {0xCDB1, 0xCDCB, prH3, gcLo}, // [27] HANGUL SYLLABLE CWEOG..HANGUL SYLLABLE CWEOH
+ {0xCDCC, 0xCDCC, prH2, gcLo}, // HANGUL SYLLABLE CWE
+ {0xCDCD, 0xCDE7, prH3, gcLo}, // [27] HANGUL SYLLABLE CWEG..HANGUL SYLLABLE CWEH
+ {0xCDE8, 0xCDE8, prH2, gcLo}, // HANGUL SYLLABLE CWI
+ {0xCDE9, 0xCE03, prH3, gcLo}, // [27] HANGUL SYLLABLE CWIG..HANGUL SYLLABLE CWIH
+ {0xCE04, 0xCE04, prH2, gcLo}, // HANGUL SYLLABLE CYU
+ {0xCE05, 0xCE1F, prH3, gcLo}, // [27] HANGUL SYLLABLE CYUG..HANGUL SYLLABLE CYUH
+ {0xCE20, 0xCE20, prH2, gcLo}, // HANGUL SYLLABLE CEU
+ {0xCE21, 0xCE3B, prH3, gcLo}, // [27] HANGUL SYLLABLE CEUG..HANGUL SYLLABLE CEUH
+ {0xCE3C, 0xCE3C, prH2, gcLo}, // HANGUL SYLLABLE CYI
+ {0xCE3D, 0xCE57, prH3, gcLo}, // [27] HANGUL SYLLABLE CYIG..HANGUL SYLLABLE CYIH
+ {0xCE58, 0xCE58, prH2, gcLo}, // HANGUL SYLLABLE CI
+ {0xCE59, 0xCE73, prH3, gcLo}, // [27] HANGUL SYLLABLE CIG..HANGUL SYLLABLE CIH
+ {0xCE74, 0xCE74, prH2, gcLo}, // HANGUL SYLLABLE KA
+ {0xCE75, 0xCE8F, prH3, gcLo}, // [27] HANGUL SYLLABLE KAG..HANGUL SYLLABLE KAH
+ {0xCE90, 0xCE90, prH2, gcLo}, // HANGUL SYLLABLE KAE
+ {0xCE91, 0xCEAB, prH3, gcLo}, // [27] HANGUL SYLLABLE KAEG..HANGUL SYLLABLE KAEH
+ {0xCEAC, 0xCEAC, prH2, gcLo}, // HANGUL SYLLABLE KYA
+ {0xCEAD, 0xCEC7, prH3, gcLo}, // [27] HANGUL SYLLABLE KYAG..HANGUL SYLLABLE KYAH
+ {0xCEC8, 0xCEC8, prH2, gcLo}, // HANGUL SYLLABLE KYAE
+ {0xCEC9, 0xCEE3, prH3, gcLo}, // [27] HANGUL SYLLABLE KYAEG..HANGUL SYLLABLE KYAEH
+ {0xCEE4, 0xCEE4, prH2, gcLo}, // HANGUL SYLLABLE KEO
+ {0xCEE5, 0xCEFF, prH3, gcLo}, // [27] HANGUL SYLLABLE KEOG..HANGUL SYLLABLE KEOH
+ {0xCF00, 0xCF00, prH2, gcLo}, // HANGUL SYLLABLE KE
+ {0xCF01, 0xCF1B, prH3, gcLo}, // [27] HANGUL SYLLABLE KEG..HANGUL SYLLABLE KEH
+ {0xCF1C, 0xCF1C, prH2, gcLo}, // HANGUL SYLLABLE KYEO
+ {0xCF1D, 0xCF37, prH3, gcLo}, // [27] HANGUL SYLLABLE KYEOG..HANGUL SYLLABLE KYEOH
+ {0xCF38, 0xCF38, prH2, gcLo}, // HANGUL SYLLABLE KYE
+ {0xCF39, 0xCF53, prH3, gcLo}, // [27] HANGUL SYLLABLE KYEG..HANGUL SYLLABLE KYEH
+ {0xCF54, 0xCF54, prH2, gcLo}, // HANGUL SYLLABLE KO
+ {0xCF55, 0xCF6F, prH3, gcLo}, // [27] HANGUL SYLLABLE KOG..HANGUL SYLLABLE KOH
+ {0xCF70, 0xCF70, prH2, gcLo}, // HANGUL SYLLABLE KWA
+ {0xCF71, 0xCF8B, prH3, gcLo}, // [27] HANGUL SYLLABLE KWAG..HANGUL SYLLABLE KWAH
+ {0xCF8C, 0xCF8C, prH2, gcLo}, // HANGUL SYLLABLE KWAE
+ {0xCF8D, 0xCFA7, prH3, gcLo}, // [27] HANGUL SYLLABLE KWAEG..HANGUL SYLLABLE KWAEH
+ {0xCFA8, 0xCFA8, prH2, gcLo}, // HANGUL SYLLABLE KOE
+ {0xCFA9, 0xCFC3, prH3, gcLo}, // [27] HANGUL SYLLABLE KOEG..HANGUL SYLLABLE KOEH
+ {0xCFC4, 0xCFC4, prH2, gcLo}, // HANGUL SYLLABLE KYO
+ {0xCFC5, 0xCFDF, prH3, gcLo}, // [27] HANGUL SYLLABLE KYOG..HANGUL SYLLABLE KYOH
+ {0xCFE0, 0xCFE0, prH2, gcLo}, // HANGUL SYLLABLE KU
+ {0xCFE1, 0xCFFB, prH3, gcLo}, // [27] HANGUL SYLLABLE KUG..HANGUL SYLLABLE KUH
+ {0xCFFC, 0xCFFC, prH2, gcLo}, // HANGUL SYLLABLE KWEO
+ {0xCFFD, 0xD017, prH3, gcLo}, // [27] HANGUL SYLLABLE KWEOG..HANGUL SYLLABLE KWEOH
+ {0xD018, 0xD018, prH2, gcLo}, // HANGUL SYLLABLE KWE
+ {0xD019, 0xD033, prH3, gcLo}, // [27] HANGUL SYLLABLE KWEG..HANGUL SYLLABLE KWEH
+ {0xD034, 0xD034, prH2, gcLo}, // HANGUL SYLLABLE KWI
+ {0xD035, 0xD04F, prH3, gcLo}, // [27] HANGUL SYLLABLE KWIG..HANGUL SYLLABLE KWIH
+ {0xD050, 0xD050, prH2, gcLo}, // HANGUL SYLLABLE KYU
+ {0xD051, 0xD06B, prH3, gcLo}, // [27] HANGUL SYLLABLE KYUG..HANGUL SYLLABLE KYUH
+ {0xD06C, 0xD06C, prH2, gcLo}, // HANGUL SYLLABLE KEU
+ {0xD06D, 0xD087, prH3, gcLo}, // [27] HANGUL SYLLABLE KEUG..HANGUL SYLLABLE KEUH
+ {0xD088, 0xD088, prH2, gcLo}, // HANGUL SYLLABLE KYI
+ {0xD089, 0xD0A3, prH3, gcLo}, // [27] HANGUL SYLLABLE KYIG..HANGUL SYLLABLE KYIH
+ {0xD0A4, 0xD0A4, prH2, gcLo}, // HANGUL SYLLABLE KI
+ {0xD0A5, 0xD0BF, prH3, gcLo}, // [27] HANGUL SYLLABLE KIG..HANGUL SYLLABLE KIH
+ {0xD0C0, 0xD0C0, prH2, gcLo}, // HANGUL SYLLABLE TA
+ {0xD0C1, 0xD0DB, prH3, gcLo}, // [27] HANGUL SYLLABLE TAG..HANGUL SYLLABLE TAH
+ {0xD0DC, 0xD0DC, prH2, gcLo}, // HANGUL SYLLABLE TAE
+ {0xD0DD, 0xD0F7, prH3, gcLo}, // [27] HANGUL SYLLABLE TAEG..HANGUL SYLLABLE TAEH
+ {0xD0F8, 0xD0F8, prH2, gcLo}, // HANGUL SYLLABLE TYA
+ {0xD0F9, 0xD113, prH3, gcLo}, // [27] HANGUL SYLLABLE TYAG..HANGUL SYLLABLE TYAH
+ {0xD114, 0xD114, prH2, gcLo}, // HANGUL SYLLABLE TYAE
+ {0xD115, 0xD12F, prH3, gcLo}, // [27] HANGUL SYLLABLE TYAEG..HANGUL SYLLABLE TYAEH
+ {0xD130, 0xD130, prH2, gcLo}, // HANGUL SYLLABLE TEO
+ {0xD131, 0xD14B, prH3, gcLo}, // [27] HANGUL SYLLABLE TEOG..HANGUL SYLLABLE TEOH
+ {0xD14C, 0xD14C, prH2, gcLo}, // HANGUL SYLLABLE TE
+ {0xD14D, 0xD167, prH3, gcLo}, // [27] HANGUL SYLLABLE TEG..HANGUL SYLLABLE TEH
+ {0xD168, 0xD168, prH2, gcLo}, // HANGUL SYLLABLE TYEO
+ {0xD169, 0xD183, prH3, gcLo}, // [27] HANGUL SYLLABLE TYEOG..HANGUL SYLLABLE TYEOH
+ {0xD184, 0xD184, prH2, gcLo}, // HANGUL SYLLABLE TYE
+ {0xD185, 0xD19F, prH3, gcLo}, // [27] HANGUL SYLLABLE TYEG..HANGUL SYLLABLE TYEH
+ {0xD1A0, 0xD1A0, prH2, gcLo}, // HANGUL SYLLABLE TO
+ {0xD1A1, 0xD1BB, prH3, gcLo}, // [27] HANGUL SYLLABLE TOG..HANGUL SYLLABLE TOH
+ {0xD1BC, 0xD1BC, prH2, gcLo}, // HANGUL SYLLABLE TWA
+ {0xD1BD, 0xD1D7, prH3, gcLo}, // [27] HANGUL SYLLABLE TWAG..HANGUL SYLLABLE TWAH
+ {0xD1D8, 0xD1D8, prH2, gcLo}, // HANGUL SYLLABLE TWAE
+ {0xD1D9, 0xD1F3, prH3, gcLo}, // [27] HANGUL SYLLABLE TWAEG..HANGUL SYLLABLE TWAEH
+ {0xD1F4, 0xD1F4, prH2, gcLo}, // HANGUL SYLLABLE TOE
+ {0xD1F5, 0xD20F, prH3, gcLo}, // [27] HANGUL SYLLABLE TOEG..HANGUL SYLLABLE TOEH
+ {0xD210, 0xD210, prH2, gcLo}, // HANGUL SYLLABLE TYO
+ {0xD211, 0xD22B, prH3, gcLo}, // [27] HANGUL SYLLABLE TYOG..HANGUL SYLLABLE TYOH
+ {0xD22C, 0xD22C, prH2, gcLo}, // HANGUL SYLLABLE TU
+ {0xD22D, 0xD247, prH3, gcLo}, // [27] HANGUL SYLLABLE TUG..HANGUL SYLLABLE TUH
+ {0xD248, 0xD248, prH2, gcLo}, // HANGUL SYLLABLE TWEO
+ {0xD249, 0xD263, prH3, gcLo}, // [27] HANGUL SYLLABLE TWEOG..HANGUL SYLLABLE TWEOH
+ {0xD264, 0xD264, prH2, gcLo}, // HANGUL SYLLABLE TWE
+ {0xD265, 0xD27F, prH3, gcLo}, // [27] HANGUL SYLLABLE TWEG..HANGUL SYLLABLE TWEH
+ {0xD280, 0xD280, prH2, gcLo}, // HANGUL SYLLABLE TWI
+ {0xD281, 0xD29B, prH3, gcLo}, // [27] HANGUL SYLLABLE TWIG..HANGUL SYLLABLE TWIH
+ {0xD29C, 0xD29C, prH2, gcLo}, // HANGUL SYLLABLE TYU
+ {0xD29D, 0xD2B7, prH3, gcLo}, // [27] HANGUL SYLLABLE TYUG..HANGUL SYLLABLE TYUH
+ {0xD2B8, 0xD2B8, prH2, gcLo}, // HANGUL SYLLABLE TEU
+ {0xD2B9, 0xD2D3, prH3, gcLo}, // [27] HANGUL SYLLABLE TEUG..HANGUL SYLLABLE TEUH
+ {0xD2D4, 0xD2D4, prH2, gcLo}, // HANGUL SYLLABLE TYI
+ {0xD2D5, 0xD2EF, prH3, gcLo}, // [27] HANGUL SYLLABLE TYIG..HANGUL SYLLABLE TYIH
+ {0xD2F0, 0xD2F0, prH2, gcLo}, // HANGUL SYLLABLE TI
+ {0xD2F1, 0xD30B, prH3, gcLo}, // [27] HANGUL SYLLABLE TIG..HANGUL SYLLABLE TIH
+ {0xD30C, 0xD30C, prH2, gcLo}, // HANGUL SYLLABLE PA
+ {0xD30D, 0xD327, prH3, gcLo}, // [27] HANGUL SYLLABLE PAG..HANGUL SYLLABLE PAH
+ {0xD328, 0xD328, prH2, gcLo}, // HANGUL SYLLABLE PAE
+ {0xD329, 0xD343, prH3, gcLo}, // [27] HANGUL SYLLABLE PAEG..HANGUL SYLLABLE PAEH
+ {0xD344, 0xD344, prH2, gcLo}, // HANGUL SYLLABLE PYA
+ {0xD345, 0xD35F, prH3, gcLo}, // [27] HANGUL SYLLABLE PYAG..HANGUL SYLLABLE PYAH
+ {0xD360, 0xD360, prH2, gcLo}, // HANGUL SYLLABLE PYAE
+ {0xD361, 0xD37B, prH3, gcLo}, // [27] HANGUL SYLLABLE PYAEG..HANGUL SYLLABLE PYAEH
+ {0xD37C, 0xD37C, prH2, gcLo}, // HANGUL SYLLABLE PEO
+ {0xD37D, 0xD397, prH3, gcLo}, // [27] HANGUL SYLLABLE PEOG..HANGUL SYLLABLE PEOH
+ {0xD398, 0xD398, prH2, gcLo}, // HANGUL SYLLABLE PE
+ {0xD399, 0xD3B3, prH3, gcLo}, // [27] HANGUL SYLLABLE PEG..HANGUL SYLLABLE PEH
+ {0xD3B4, 0xD3B4, prH2, gcLo}, // HANGUL SYLLABLE PYEO
+ {0xD3B5, 0xD3CF, prH3, gcLo}, // [27] HANGUL SYLLABLE PYEOG..HANGUL SYLLABLE PYEOH
+ {0xD3D0, 0xD3D0, prH2, gcLo}, // HANGUL SYLLABLE PYE
+ {0xD3D1, 0xD3EB, prH3, gcLo}, // [27] HANGUL SYLLABLE PYEG..HANGUL SYLLABLE PYEH
+ {0xD3EC, 0xD3EC, prH2, gcLo}, // HANGUL SYLLABLE PO
+ {0xD3ED, 0xD407, prH3, gcLo}, // [27] HANGUL SYLLABLE POG..HANGUL SYLLABLE POH
+ {0xD408, 0xD408, prH2, gcLo}, // HANGUL SYLLABLE PWA
+ {0xD409, 0xD423, prH3, gcLo}, // [27] HANGUL SYLLABLE PWAG..HANGUL SYLLABLE PWAH
+ {0xD424, 0xD424, prH2, gcLo}, // HANGUL SYLLABLE PWAE
+ {0xD425, 0xD43F, prH3, gcLo}, // [27] HANGUL SYLLABLE PWAEG..HANGUL SYLLABLE PWAEH
+ {0xD440, 0xD440, prH2, gcLo}, // HANGUL SYLLABLE POE
+ {0xD441, 0xD45B, prH3, gcLo}, // [27] HANGUL SYLLABLE POEG..HANGUL SYLLABLE POEH
+ {0xD45C, 0xD45C, prH2, gcLo}, // HANGUL SYLLABLE PYO
+ {0xD45D, 0xD477, prH3, gcLo}, // [27] HANGUL SYLLABLE PYOG..HANGUL SYLLABLE PYOH
+ {0xD478, 0xD478, prH2, gcLo}, // HANGUL SYLLABLE PU
+ {0xD479, 0xD493, prH3, gcLo}, // [27] HANGUL SYLLABLE PUG..HANGUL SYLLABLE PUH
+ {0xD494, 0xD494, prH2, gcLo}, // HANGUL SYLLABLE PWEO
+ {0xD495, 0xD4AF, prH3, gcLo}, // [27] HANGUL SYLLABLE PWEOG..HANGUL SYLLABLE PWEOH
+ {0xD4B0, 0xD4B0, prH2, gcLo}, // HANGUL SYLLABLE PWE
+ {0xD4B1, 0xD4CB, prH3, gcLo}, // [27] HANGUL SYLLABLE PWEG..HANGUL SYLLABLE PWEH
+ {0xD4CC, 0xD4CC, prH2, gcLo}, // HANGUL SYLLABLE PWI
+ {0xD4CD, 0xD4E7, prH3, gcLo}, // [27] HANGUL SYLLABLE PWIG..HANGUL SYLLABLE PWIH
+ {0xD4E8, 0xD4E8, prH2, gcLo}, // HANGUL SYLLABLE PYU
+ {0xD4E9, 0xD503, prH3, gcLo}, // [27] HANGUL SYLLABLE PYUG..HANGUL SYLLABLE PYUH
+ {0xD504, 0xD504, prH2, gcLo}, // HANGUL SYLLABLE PEU
+ {0xD505, 0xD51F, prH3, gcLo}, // [27] HANGUL SYLLABLE PEUG..HANGUL SYLLABLE PEUH
+ {0xD520, 0xD520, prH2, gcLo}, // HANGUL SYLLABLE PYI
+ {0xD521, 0xD53B, prH3, gcLo}, // [27] HANGUL SYLLABLE PYIG..HANGUL SYLLABLE PYIH
+ {0xD53C, 0xD53C, prH2, gcLo}, // HANGUL SYLLABLE PI
+ {0xD53D, 0xD557, prH3, gcLo}, // [27] HANGUL SYLLABLE PIG..HANGUL SYLLABLE PIH
+ {0xD558, 0xD558, prH2, gcLo}, // HANGUL SYLLABLE HA
+ {0xD559, 0xD573, prH3, gcLo}, // [27] HANGUL SYLLABLE HAG..HANGUL SYLLABLE HAH
+ {0xD574, 0xD574, prH2, gcLo}, // HANGUL SYLLABLE HAE
+ {0xD575, 0xD58F, prH3, gcLo}, // [27] HANGUL SYLLABLE HAEG..HANGUL SYLLABLE HAEH
+ {0xD590, 0xD590, prH2, gcLo}, // HANGUL SYLLABLE HYA
+ {0xD591, 0xD5AB, prH3, gcLo}, // [27] HANGUL SYLLABLE HYAG..HANGUL SYLLABLE HYAH
+ {0xD5AC, 0xD5AC, prH2, gcLo}, // HANGUL SYLLABLE HYAE
+ {0xD5AD, 0xD5C7, prH3, gcLo}, // [27] HANGUL SYLLABLE HYAEG..HANGUL SYLLABLE HYAEH
+ {0xD5C8, 0xD5C8, prH2, gcLo}, // HANGUL SYLLABLE HEO
+ {0xD5C9, 0xD5E3, prH3, gcLo}, // [27] HANGUL SYLLABLE HEOG..HANGUL SYLLABLE HEOH
+ {0xD5E4, 0xD5E4, prH2, gcLo}, // HANGUL SYLLABLE HE
+ {0xD5E5, 0xD5FF, prH3, gcLo}, // [27] HANGUL SYLLABLE HEG..HANGUL SYLLABLE HEH
+ {0xD600, 0xD600, prH2, gcLo}, // HANGUL SYLLABLE HYEO
+ {0xD601, 0xD61B, prH3, gcLo}, // [27] HANGUL SYLLABLE HYEOG..HANGUL SYLLABLE HYEOH
+ {0xD61C, 0xD61C, prH2, gcLo}, // HANGUL SYLLABLE HYE
+ {0xD61D, 0xD637, prH3, gcLo}, // [27] HANGUL SYLLABLE HYEG..HANGUL SYLLABLE HYEH
+ {0xD638, 0xD638, prH2, gcLo}, // HANGUL SYLLABLE HO
+ {0xD639, 0xD653, prH3, gcLo}, // [27] HANGUL SYLLABLE HOG..HANGUL SYLLABLE HOH
+ {0xD654, 0xD654, prH2, gcLo}, // HANGUL SYLLABLE HWA
+ {0xD655, 0xD66F, prH3, gcLo}, // [27] HANGUL SYLLABLE HWAG..HANGUL SYLLABLE HWAH
+ {0xD670, 0xD670, prH2, gcLo}, // HANGUL SYLLABLE HWAE
+ {0xD671, 0xD68B, prH3, gcLo}, // [27] HANGUL SYLLABLE HWAEG..HANGUL SYLLABLE HWAEH
+ {0xD68C, 0xD68C, prH2, gcLo}, // HANGUL SYLLABLE HOE
+ {0xD68D, 0xD6A7, prH3, gcLo}, // [27] HANGUL SYLLABLE HOEG..HANGUL SYLLABLE HOEH
+ {0xD6A8, 0xD6A8, prH2, gcLo}, // HANGUL SYLLABLE HYO
+ {0xD6A9, 0xD6C3, prH3, gcLo}, // [27] HANGUL SYLLABLE HYOG..HANGUL SYLLABLE HYOH
+ {0xD6C4, 0xD6C4, prH2, gcLo}, // HANGUL SYLLABLE HU
+ {0xD6C5, 0xD6DF, prH3, gcLo}, // [27] HANGUL SYLLABLE HUG..HANGUL SYLLABLE HUH
+ {0xD6E0, 0xD6E0, prH2, gcLo}, // HANGUL SYLLABLE HWEO
+ {0xD6E1, 0xD6FB, prH3, gcLo}, // [27] HANGUL SYLLABLE HWEOG..HANGUL SYLLABLE HWEOH
+ {0xD6FC, 0xD6FC, prH2, gcLo}, // HANGUL SYLLABLE HWE
+ {0xD6FD, 0xD717, prH3, gcLo}, // [27] HANGUL SYLLABLE HWEG..HANGUL SYLLABLE HWEH
+ {0xD718, 0xD718, prH2, gcLo}, // HANGUL SYLLABLE HWI
+ {0xD719, 0xD733, prH3, gcLo}, // [27] HANGUL SYLLABLE HWIG..HANGUL SYLLABLE HWIH
+ {0xD734, 0xD734, prH2, gcLo}, // HANGUL SYLLABLE HYU
+ {0xD735, 0xD74F, prH3, gcLo}, // [27] HANGUL SYLLABLE HYUG..HANGUL SYLLABLE HYUH
+ {0xD750, 0xD750, prH2, gcLo}, // HANGUL SYLLABLE HEU
+ {0xD751, 0xD76B, prH3, gcLo}, // [27] HANGUL SYLLABLE HEUG..HANGUL SYLLABLE HEUH
+ {0xD76C, 0xD76C, prH2, gcLo}, // HANGUL SYLLABLE HYI
+ {0xD76D, 0xD787, prH3, gcLo}, // [27] HANGUL SYLLABLE HYIG..HANGUL SYLLABLE HYIH
+ {0xD788, 0xD788, prH2, gcLo}, // HANGUL SYLLABLE HI
+ {0xD789, 0xD7A3, prH3, gcLo}, // [27] HANGUL SYLLABLE HIG..HANGUL SYLLABLE HIH
+ {0xD7B0, 0xD7C6, prJV, gcLo}, // [23] HANGUL JUNGSEONG O-YEO..HANGUL JUNGSEONG ARAEA-E
+ {0xD7CB, 0xD7FB, prJT, gcLo}, // [49] HANGUL JONGSEONG NIEUN-RIEUL..HANGUL JONGSEONG PHIEUPH-THIEUTH
+ {0xD800, 0xDB7F, prSG, gcCs}, // [896] ..
+ {0xDB80, 0xDBFF, prSG, gcCs}, // [128] ..
+ {0xDC00, 0xDFFF, prSG, gcCs}, // [1024] ..
+ {0xE000, 0xF8FF, prXX, gcCo}, // [6400] ..
+ {0xF900, 0xFA6D, prID, gcLo}, // [366] CJK COMPATIBILITY IDEOGRAPH-F900..CJK COMPATIBILITY IDEOGRAPH-FA6D
+ {0xFA6E, 0xFA6F, prID, gcCn}, // [2] ..
+ {0xFA70, 0xFAD9, prID, gcLo}, // [106] CJK COMPATIBILITY IDEOGRAPH-FA70..CJK COMPATIBILITY IDEOGRAPH-FAD9
+ {0xFADA, 0xFAFF, prID, gcCn}, // [38] ..
+ {0xFB00, 0xFB06, prAL, gcLl}, // [7] LATIN SMALL LIGATURE FF..LATIN SMALL LIGATURE ST
+ {0xFB13, 0xFB17, prAL, gcLl}, // [5] ARMENIAN SMALL LIGATURE MEN NOW..ARMENIAN SMALL LIGATURE MEN XEH
+ {0xFB1D, 0xFB1D, prHL, gcLo}, // HEBREW LETTER YOD WITH HIRIQ
+ {0xFB1E, 0xFB1E, prCM, gcMn}, // HEBREW POINT JUDEO-SPANISH VARIKA
+ {0xFB1F, 0xFB28, prHL, gcLo}, // [10] HEBREW LIGATURE YIDDISH YOD YOD PATAH..HEBREW LETTER WIDE TAV
+ {0xFB29, 0xFB29, prAL, gcSm}, // HEBREW LETTER ALTERNATIVE PLUS SIGN
+ {0xFB2A, 0xFB36, prHL, gcLo}, // [13] HEBREW LETTER SHIN WITH SHIN DOT..HEBREW LETTER ZAYIN WITH DAGESH
+ {0xFB38, 0xFB3C, prHL, gcLo}, // [5] HEBREW LETTER TET WITH DAGESH..HEBREW LETTER LAMED WITH DAGESH
+ {0xFB3E, 0xFB3E, prHL, gcLo}, // HEBREW LETTER MEM WITH DAGESH
+ {0xFB40, 0xFB41, prHL, gcLo}, // [2] HEBREW LETTER NUN WITH DAGESH..HEBREW LETTER SAMEKH WITH DAGESH
+ {0xFB43, 0xFB44, prHL, gcLo}, // [2] HEBREW LETTER FINAL PE WITH DAGESH..HEBREW LETTER PE WITH DAGESH
+ {0xFB46, 0xFB4F, prHL, gcLo}, // [10] HEBREW LETTER TSADI WITH DAGESH..HEBREW LIGATURE ALEF LAMED
+ {0xFB50, 0xFBB1, prAL, gcLo}, // [98] ARABIC LETTER ALEF WASLA ISOLATED FORM..ARABIC LETTER YEH BARREE WITH HAMZA ABOVE FINAL FORM
+ {0xFBB2, 0xFBC2, prAL, gcSk}, // [17] ARABIC SYMBOL DOT ABOVE..ARABIC SYMBOL WASLA ABOVE
+ {0xFBD3, 0xFD3D, prAL, gcLo}, // [363] ARABIC LETTER NG ISOLATED FORM..ARABIC LIGATURE ALEF WITH FATHATAN ISOLATED FORM
+ {0xFD3E, 0xFD3E, prCL, gcPe}, // ORNATE LEFT PARENTHESIS
+ {0xFD3F, 0xFD3F, prOP, gcPs}, // ORNATE RIGHT PARENTHESIS
+ {0xFD40, 0xFD4F, prAL, gcSo}, // [16] ARABIC LIGATURE RAHIMAHU ALLAAH..ARABIC LIGATURE RAHIMAHUM ALLAAH
+ {0xFD50, 0xFD8F, prAL, gcLo}, // [64] ARABIC LIGATURE TEH WITH JEEM WITH MEEM INITIAL FORM..ARABIC LIGATURE MEEM WITH KHAH WITH MEEM INITIAL FORM
+ {0xFD92, 0xFDC7, prAL, gcLo}, // [54] ARABIC LIGATURE MEEM WITH JEEM WITH KHAH INITIAL FORM..ARABIC LIGATURE NOON WITH JEEM WITH YEH FINAL FORM
+ {0xFDCF, 0xFDCF, prAL, gcSo}, // ARABIC LIGATURE SALAAMUHU ALAYNAA
+ {0xFDF0, 0xFDFB, prAL, gcLo}, // [12] ARABIC LIGATURE SALLA USED AS KORANIC STOP SIGN ISOLATED FORM..ARABIC LIGATURE JALLAJALALOUHOU
+ {0xFDFC, 0xFDFC, prPO, gcSc}, // RIAL SIGN
+ {0xFDFD, 0xFDFF, prAL, gcSo}, // [3] ARABIC LIGATURE BISMILLAH AR-RAHMAN AR-RAHEEM..ARABIC LIGATURE AZZA WA JALL
+ {0xFE00, 0xFE0F, prCM, gcMn}, // [16] VARIATION SELECTOR-1..VARIATION SELECTOR-16
+ {0xFE10, 0xFE10, prIS, gcPo}, // PRESENTATION FORM FOR VERTICAL COMMA
+ {0xFE11, 0xFE12, prCL, gcPo}, // [2] PRESENTATION FORM FOR VERTICAL IDEOGRAPHIC COMMA..PRESENTATION FORM FOR VERTICAL IDEOGRAPHIC FULL STOP
+ {0xFE13, 0xFE14, prIS, gcPo}, // [2] PRESENTATION FORM FOR VERTICAL COLON..PRESENTATION FORM FOR VERTICAL SEMICOLON
+ {0xFE15, 0xFE16, prEX, gcPo}, // [2] PRESENTATION FORM FOR VERTICAL EXCLAMATION MARK..PRESENTATION FORM FOR VERTICAL QUESTION MARK
+ {0xFE17, 0xFE17, prOP, gcPs}, // PRESENTATION FORM FOR VERTICAL LEFT WHITE LENTICULAR BRACKET
+ {0xFE18, 0xFE18, prCL, gcPe}, // PRESENTATION FORM FOR VERTICAL RIGHT WHITE LENTICULAR BRAKCET
+ {0xFE19, 0xFE19, prIN, gcPo}, // PRESENTATION FORM FOR VERTICAL HORIZONTAL ELLIPSIS
+ {0xFE20, 0xFE2F, prCM, gcMn}, // [16] COMBINING LIGATURE LEFT HALF..COMBINING CYRILLIC TITLO RIGHT HALF
+ {0xFE30, 0xFE30, prID, gcPo}, // PRESENTATION FORM FOR VERTICAL TWO DOT LEADER
+ {0xFE31, 0xFE32, prID, gcPd}, // [2] PRESENTATION FORM FOR VERTICAL EM DASH..PRESENTATION FORM FOR VERTICAL EN DASH
+ {0xFE33, 0xFE34, prID, gcPc}, // [2] PRESENTATION FORM FOR VERTICAL LOW LINE..PRESENTATION FORM FOR VERTICAL WAVY LOW LINE
+ {0xFE35, 0xFE35, prOP, gcPs}, // PRESENTATION FORM FOR VERTICAL LEFT PARENTHESIS
+ {0xFE36, 0xFE36, prCL, gcPe}, // PRESENTATION FORM FOR VERTICAL RIGHT PARENTHESIS
+ {0xFE37, 0xFE37, prOP, gcPs}, // PRESENTATION FORM FOR VERTICAL LEFT CURLY BRACKET
+ {0xFE38, 0xFE38, prCL, gcPe}, // PRESENTATION FORM FOR VERTICAL RIGHT CURLY BRACKET
+ {0xFE39, 0xFE39, prOP, gcPs}, // PRESENTATION FORM FOR VERTICAL LEFT TORTOISE SHELL BRACKET
+ {0xFE3A, 0xFE3A, prCL, gcPe}, // PRESENTATION FORM FOR VERTICAL RIGHT TORTOISE SHELL BRACKET
+ {0xFE3B, 0xFE3B, prOP, gcPs}, // PRESENTATION FORM FOR VERTICAL LEFT BLACK LENTICULAR BRACKET
+ {0xFE3C, 0xFE3C, prCL, gcPe}, // PRESENTATION FORM FOR VERTICAL RIGHT BLACK LENTICULAR BRACKET
+ {0xFE3D, 0xFE3D, prOP, gcPs}, // PRESENTATION FORM FOR VERTICAL LEFT DOUBLE ANGLE BRACKET
+ {0xFE3E, 0xFE3E, prCL, gcPe}, // PRESENTATION FORM FOR VERTICAL RIGHT DOUBLE ANGLE BRACKET
+ {0xFE3F, 0xFE3F, prOP, gcPs}, // PRESENTATION FORM FOR VERTICAL LEFT ANGLE BRACKET
+ {0xFE40, 0xFE40, prCL, gcPe}, // PRESENTATION FORM FOR VERTICAL RIGHT ANGLE BRACKET
+ {0xFE41, 0xFE41, prOP, gcPs}, // PRESENTATION FORM FOR VERTICAL LEFT CORNER BRACKET
+ {0xFE42, 0xFE42, prCL, gcPe}, // PRESENTATION FORM FOR VERTICAL RIGHT CORNER BRACKET
+ {0xFE43, 0xFE43, prOP, gcPs}, // PRESENTATION FORM FOR VERTICAL LEFT WHITE CORNER BRACKET
+ {0xFE44, 0xFE44, prCL, gcPe}, // PRESENTATION FORM FOR VERTICAL RIGHT WHITE CORNER BRACKET
+ {0xFE45, 0xFE46, prID, gcPo}, // [2] SESAME DOT..WHITE SESAME DOT
+ {0xFE47, 0xFE47, prOP, gcPs}, // PRESENTATION FORM FOR VERTICAL LEFT SQUARE BRACKET
+ {0xFE48, 0xFE48, prCL, gcPe}, // PRESENTATION FORM FOR VERTICAL RIGHT SQUARE BRACKET
+ {0xFE49, 0xFE4C, prID, gcPo}, // [4] DASHED OVERLINE..DOUBLE WAVY OVERLINE
+ {0xFE4D, 0xFE4F, prID, gcPc}, // [3] DASHED LOW LINE..WAVY LOW LINE
+ {0xFE50, 0xFE50, prCL, gcPo}, // SMALL COMMA
+ {0xFE51, 0xFE51, prID, gcPo}, // SMALL IDEOGRAPHIC COMMA
+ {0xFE52, 0xFE52, prCL, gcPo}, // SMALL FULL STOP
+ {0xFE54, 0xFE55, prNS, gcPo}, // [2] SMALL SEMICOLON..SMALL COLON
+ {0xFE56, 0xFE57, prEX, gcPo}, // [2] SMALL QUESTION MARK..SMALL EXCLAMATION MARK
+ {0xFE58, 0xFE58, prID, gcPd}, // SMALL EM DASH
+ {0xFE59, 0xFE59, prOP, gcPs}, // SMALL LEFT PARENTHESIS
+ {0xFE5A, 0xFE5A, prCL, gcPe}, // SMALL RIGHT PARENTHESIS
+ {0xFE5B, 0xFE5B, prOP, gcPs}, // SMALL LEFT CURLY BRACKET
+ {0xFE5C, 0xFE5C, prCL, gcPe}, // SMALL RIGHT CURLY BRACKET
+ {0xFE5D, 0xFE5D, prOP, gcPs}, // SMALL LEFT TORTOISE SHELL BRACKET
+ {0xFE5E, 0xFE5E, prCL, gcPe}, // SMALL RIGHT TORTOISE SHELL BRACKET
+ {0xFE5F, 0xFE61, prID, gcPo}, // [3] SMALL NUMBER SIGN..SMALL ASTERISK
+ {0xFE62, 0xFE62, prID, gcSm}, // SMALL PLUS SIGN
+ {0xFE63, 0xFE63, prID, gcPd}, // SMALL HYPHEN-MINUS
+ {0xFE64, 0xFE66, prID, gcSm}, // [3] SMALL LESS-THAN SIGN..SMALL EQUALS SIGN
+ {0xFE68, 0xFE68, prID, gcPo}, // SMALL REVERSE SOLIDUS
+ {0xFE69, 0xFE69, prPR, gcSc}, // SMALL DOLLAR SIGN
+ {0xFE6A, 0xFE6A, prPO, gcPo}, // SMALL PERCENT SIGN
+ {0xFE6B, 0xFE6B, prID, gcPo}, // SMALL COMMERCIAL AT
+ {0xFE70, 0xFE74, prAL, gcLo}, // [5] ARABIC FATHATAN ISOLATED FORM..ARABIC KASRATAN ISOLATED FORM
+ {0xFE76, 0xFEFC, prAL, gcLo}, // [135] ARABIC FATHA ISOLATED FORM..ARABIC LIGATURE LAM WITH ALEF FINAL FORM
+ {0xFEFF, 0xFEFF, prWJ, gcCf}, // ZERO WIDTH NO-BREAK SPACE
+ {0xFF01, 0xFF01, prEX, gcPo}, // FULLWIDTH EXCLAMATION MARK
+ {0xFF02, 0xFF03, prID, gcPo}, // [2] FULLWIDTH QUOTATION MARK..FULLWIDTH NUMBER SIGN
+ {0xFF04, 0xFF04, prPR, gcSc}, // FULLWIDTH DOLLAR SIGN
+ {0xFF05, 0xFF05, prPO, gcPo}, // FULLWIDTH PERCENT SIGN
+ {0xFF06, 0xFF07, prID, gcPo}, // [2] FULLWIDTH AMPERSAND..FULLWIDTH APOSTROPHE
+ {0xFF08, 0xFF08, prOP, gcPs}, // FULLWIDTH LEFT PARENTHESIS
+ {0xFF09, 0xFF09, prCL, gcPe}, // FULLWIDTH RIGHT PARENTHESIS
+ {0xFF0A, 0xFF0A, prID, gcPo}, // FULLWIDTH ASTERISK
+ {0xFF0B, 0xFF0B, prID, gcSm}, // FULLWIDTH PLUS SIGN
+ {0xFF0C, 0xFF0C, prCL, gcPo}, // FULLWIDTH COMMA
+ {0xFF0D, 0xFF0D, prID, gcPd}, // FULLWIDTH HYPHEN-MINUS
+ {0xFF0E, 0xFF0E, prCL, gcPo}, // FULLWIDTH FULL STOP
+ {0xFF0F, 0xFF0F, prID, gcPo}, // FULLWIDTH SOLIDUS
+ {0xFF10, 0xFF19, prID, gcNd}, // [10] FULLWIDTH DIGIT ZERO..FULLWIDTH DIGIT NINE
+ {0xFF1A, 0xFF1B, prNS, gcPo}, // [2] FULLWIDTH COLON..FULLWIDTH SEMICOLON
+ {0xFF1C, 0xFF1E, prID, gcSm}, // [3] FULLWIDTH LESS-THAN SIGN..FULLWIDTH GREATER-THAN SIGN
+ {0xFF1F, 0xFF1F, prEX, gcPo}, // FULLWIDTH QUESTION MARK
+ {0xFF20, 0xFF20, prID, gcPo}, // FULLWIDTH COMMERCIAL AT
+ {0xFF21, 0xFF3A, prID, gcLu}, // [26] FULLWIDTH LATIN CAPITAL LETTER A..FULLWIDTH LATIN CAPITAL LETTER Z
+ {0xFF3B, 0xFF3B, prOP, gcPs}, // FULLWIDTH LEFT SQUARE BRACKET
+ {0xFF3C, 0xFF3C, prID, gcPo}, // FULLWIDTH REVERSE SOLIDUS
+ {0xFF3D, 0xFF3D, prCL, gcPe}, // FULLWIDTH RIGHT SQUARE BRACKET
+ {0xFF3E, 0xFF3E, prID, gcSk}, // FULLWIDTH CIRCUMFLEX ACCENT
+ {0xFF3F, 0xFF3F, prID, gcPc}, // FULLWIDTH LOW LINE
+ {0xFF40, 0xFF40, prID, gcSk}, // FULLWIDTH GRAVE ACCENT
+ {0xFF41, 0xFF5A, prID, gcLl}, // [26] FULLWIDTH LATIN SMALL LETTER A..FULLWIDTH LATIN SMALL LETTER Z
+ {0xFF5B, 0xFF5B, prOP, gcPs}, // FULLWIDTH LEFT CURLY BRACKET
+ {0xFF5C, 0xFF5C, prID, gcSm}, // FULLWIDTH VERTICAL LINE
+ {0xFF5D, 0xFF5D, prCL, gcPe}, // FULLWIDTH RIGHT CURLY BRACKET
+ {0xFF5E, 0xFF5E, prID, gcSm}, // FULLWIDTH TILDE
+ {0xFF5F, 0xFF5F, prOP, gcPs}, // FULLWIDTH LEFT WHITE PARENTHESIS
+ {0xFF60, 0xFF60, prCL, gcPe}, // FULLWIDTH RIGHT WHITE PARENTHESIS
+ {0xFF61, 0xFF61, prCL, gcPo}, // HALFWIDTH IDEOGRAPHIC FULL STOP
+ {0xFF62, 0xFF62, prOP, gcPs}, // HALFWIDTH LEFT CORNER BRACKET
+ {0xFF63, 0xFF63, prCL, gcPe}, // HALFWIDTH RIGHT CORNER BRACKET
+ {0xFF64, 0xFF64, prCL, gcPo}, // HALFWIDTH IDEOGRAPHIC COMMA
+ {0xFF65, 0xFF65, prNS, gcPo}, // HALFWIDTH KATAKANA MIDDLE DOT
+ {0xFF66, 0xFF66, prID, gcLo}, // HALFWIDTH KATAKANA LETTER WO
+ {0xFF67, 0xFF6F, prCJ, gcLo}, // [9] HALFWIDTH KATAKANA LETTER SMALL A..HALFWIDTH KATAKANA LETTER SMALL TU
+ {0xFF70, 0xFF70, prCJ, gcLm}, // HALFWIDTH KATAKANA-HIRAGANA PROLONGED SOUND MARK
+ {0xFF71, 0xFF9D, prID, gcLo}, // [45] HALFWIDTH KATAKANA LETTER A..HALFWIDTH KATAKANA LETTER N
+ {0xFF9E, 0xFF9F, prNS, gcLm}, // [2] HALFWIDTH KATAKANA VOICED SOUND MARK..HALFWIDTH KATAKANA SEMI-VOICED SOUND MARK
+ {0xFFA0, 0xFFBE, prID, gcLo}, // [31] HALFWIDTH HANGUL FILLER..HALFWIDTH HANGUL LETTER HIEUH
+ {0xFFC2, 0xFFC7, prID, gcLo}, // [6] HALFWIDTH HANGUL LETTER A..HALFWIDTH HANGUL LETTER E
+ {0xFFCA, 0xFFCF, prID, gcLo}, // [6] HALFWIDTH HANGUL LETTER YEO..HALFWIDTH HANGUL LETTER OE
+ {0xFFD2, 0xFFD7, prID, gcLo}, // [6] HALFWIDTH HANGUL LETTER YO..HALFWIDTH HANGUL LETTER YU
+ {0xFFDA, 0xFFDC, prID, gcLo}, // [3] HALFWIDTH HANGUL LETTER EU..HALFWIDTH HANGUL LETTER I
+ {0xFFE0, 0xFFE0, prPO, gcSc}, // FULLWIDTH CENT SIGN
+ {0xFFE1, 0xFFE1, prPR, gcSc}, // FULLWIDTH POUND SIGN
+ {0xFFE2, 0xFFE2, prID, gcSm}, // FULLWIDTH NOT SIGN
+ {0xFFE3, 0xFFE3, prID, gcSk}, // FULLWIDTH MACRON
+ {0xFFE4, 0xFFE4, prID, gcSo}, // FULLWIDTH BROKEN BAR
+ {0xFFE5, 0xFFE6, prPR, gcSc}, // [2] FULLWIDTH YEN SIGN..FULLWIDTH WON SIGN
+ {0xFFE8, 0xFFE8, prAL, gcSo}, // HALFWIDTH FORMS LIGHT VERTICAL
+ {0xFFE9, 0xFFEC, prAL, gcSm}, // [4] HALFWIDTH LEFTWARDS ARROW..HALFWIDTH DOWNWARDS ARROW
+ {0xFFED, 0xFFEE, prAL, gcSo}, // [2] HALFWIDTH BLACK SQUARE..HALFWIDTH WHITE CIRCLE
+ {0xFFF9, 0xFFFB, prCM, gcCf}, // [3] INTERLINEAR ANNOTATION ANCHOR..INTERLINEAR ANNOTATION TERMINATOR
+ {0xFFFC, 0xFFFC, prCB, gcSo}, // OBJECT REPLACEMENT CHARACTER
+ {0xFFFD, 0xFFFD, prAI, gcSo}, // REPLACEMENT CHARACTER
+ {0x10000, 0x1000B, prAL, gcLo}, // [12] LINEAR B SYLLABLE B008 A..LINEAR B SYLLABLE B046 JE
+ {0x1000D, 0x10026, prAL, gcLo}, // [26] LINEAR B SYLLABLE B036 JO..LINEAR B SYLLABLE B032 QO
+ {0x10028, 0x1003A, prAL, gcLo}, // [19] LINEAR B SYLLABLE B060 RA..LINEAR B SYLLABLE B042 WO
+ {0x1003C, 0x1003D, prAL, gcLo}, // [2] LINEAR B SYLLABLE B017 ZA..LINEAR B SYLLABLE B074 ZE
+ {0x1003F, 0x1004D, prAL, gcLo}, // [15] LINEAR B SYLLABLE B020 ZO..LINEAR B SYLLABLE B091 TWO
+ {0x10050, 0x1005D, prAL, gcLo}, // [14] LINEAR B SYMBOL B018..LINEAR B SYMBOL B089
+ {0x10080, 0x100FA, prAL, gcLo}, // [123] LINEAR B IDEOGRAM B100 MAN..LINEAR B IDEOGRAM VESSEL B305
+ {0x10100, 0x10102, prBA, gcPo}, // [3] AEGEAN WORD SEPARATOR LINE..AEGEAN CHECK MARK
+ {0x10107, 0x10133, prAL, gcNo}, // [45] AEGEAN NUMBER ONE..AEGEAN NUMBER NINETY THOUSAND
+ {0x10137, 0x1013F, prAL, gcSo}, // [9] AEGEAN WEIGHT BASE UNIT..AEGEAN MEASURE THIRD SUBUNIT
+ {0x10140, 0x10174, prAL, gcNl}, // [53] GREEK ACROPHONIC ATTIC ONE QUARTER..GREEK ACROPHONIC STRATIAN FIFTY MNAS
+ {0x10175, 0x10178, prAL, gcNo}, // [4] GREEK ONE HALF SIGN..GREEK THREE QUARTERS SIGN
+ {0x10179, 0x10189, prAL, gcSo}, // [17] GREEK YEAR SIGN..GREEK TRYBLION BASE SIGN
+ {0x1018A, 0x1018B, prAL, gcNo}, // [2] GREEK ZERO SIGN..GREEK ONE QUARTER SIGN
+ {0x1018C, 0x1018E, prAL, gcSo}, // [3] GREEK SINUSOID SIGN..NOMISMA SIGN
+ {0x10190, 0x1019C, prAL, gcSo}, // [13] ROMAN SEXTANS SIGN..ASCIA SYMBOL
+ {0x101A0, 0x101A0, prAL, gcSo}, // GREEK SYMBOL TAU RHO
+ {0x101D0, 0x101FC, prAL, gcSo}, // [45] PHAISTOS DISC SIGN PEDESTRIAN..PHAISTOS DISC SIGN WAVY BAND
+ {0x101FD, 0x101FD, prCM, gcMn}, // PHAISTOS DISC SIGN COMBINING OBLIQUE STROKE
+ {0x10280, 0x1029C, prAL, gcLo}, // [29] LYCIAN LETTER A..LYCIAN LETTER X
+ {0x102A0, 0x102D0, prAL, gcLo}, // [49] CARIAN LETTER A..CARIAN LETTER UUU3
+ {0x102E0, 0x102E0, prCM, gcMn}, // COPTIC EPACT THOUSANDS MARK
+ {0x102E1, 0x102FB, prAL, gcNo}, // [27] COPTIC EPACT DIGIT ONE..COPTIC EPACT NUMBER NINE HUNDRED
+ {0x10300, 0x1031F, prAL, gcLo}, // [32] OLD ITALIC LETTER A..OLD ITALIC LETTER ESS
+ {0x10320, 0x10323, prAL, gcNo}, // [4] OLD ITALIC NUMERAL ONE..OLD ITALIC NUMERAL FIFTY
+ {0x1032D, 0x1032F, prAL, gcLo}, // [3] OLD ITALIC LETTER YE..OLD ITALIC LETTER SOUTHERN TSE
+ {0x10330, 0x10340, prAL, gcLo}, // [17] GOTHIC LETTER AHSA..GOTHIC LETTER PAIRTHRA
+ {0x10341, 0x10341, prAL, gcNl}, // GOTHIC LETTER NINETY
+ {0x10342, 0x10349, prAL, gcLo}, // [8] GOTHIC LETTER RAIDA..GOTHIC LETTER OTHAL
+ {0x1034A, 0x1034A, prAL, gcNl}, // GOTHIC LETTER NINE HUNDRED
+ {0x10350, 0x10375, prAL, gcLo}, // [38] OLD PERMIC LETTER AN..OLD PERMIC LETTER IA
+ {0x10376, 0x1037A, prCM, gcMn}, // [5] COMBINING OLD PERMIC LETTER AN..COMBINING OLD PERMIC LETTER SII
+ {0x10380, 0x1039D, prAL, gcLo}, // [30] UGARITIC LETTER ALPA..UGARITIC LETTER SSU
+ {0x1039F, 0x1039F, prBA, gcPo}, // UGARITIC WORD DIVIDER
+ {0x103A0, 0x103C3, prAL, gcLo}, // [36] OLD PERSIAN SIGN A..OLD PERSIAN SIGN HA
+ {0x103C8, 0x103CF, prAL, gcLo}, // [8] OLD PERSIAN SIGN AURAMAZDAA..OLD PERSIAN SIGN BUUMISH
+ {0x103D0, 0x103D0, prBA, gcPo}, // OLD PERSIAN WORD DIVIDER
+ {0x103D1, 0x103D5, prAL, gcNl}, // [5] OLD PERSIAN NUMBER ONE..OLD PERSIAN NUMBER HUNDRED
+ {0x10400, 0x1044F, prAL, gcLC}, // [80] DESERET CAPITAL LETTER LONG I..DESERET SMALL LETTER EW
+ {0x10450, 0x1047F, prAL, gcLo}, // [48] SHAVIAN LETTER PEEP..SHAVIAN LETTER YEW
+ {0x10480, 0x1049D, prAL, gcLo}, // [30] OSMANYA LETTER ALEF..OSMANYA LETTER OO
+ {0x104A0, 0x104A9, prNU, gcNd}, // [10] OSMANYA DIGIT ZERO..OSMANYA DIGIT NINE
+ {0x104B0, 0x104D3, prAL, gcLu}, // [36] OSAGE CAPITAL LETTER A..OSAGE CAPITAL LETTER ZHA
+ {0x104D8, 0x104FB, prAL, gcLl}, // [36] OSAGE SMALL LETTER A..OSAGE SMALL LETTER ZHA
+ {0x10500, 0x10527, prAL, gcLo}, // [40] ELBASAN LETTER A..ELBASAN LETTER KHE
+ {0x10530, 0x10563, prAL, gcLo}, // [52] CAUCASIAN ALBANIAN LETTER ALT..CAUCASIAN ALBANIAN LETTER KIW
+ {0x1056F, 0x1056F, prAL, gcPo}, // CAUCASIAN ALBANIAN CITATION MARK
+ {0x10570, 0x1057A, prAL, gcLu}, // [11] VITHKUQI CAPITAL LETTER A..VITHKUQI CAPITAL LETTER GA
+ {0x1057C, 0x1058A, prAL, gcLu}, // [15] VITHKUQI CAPITAL LETTER HA..VITHKUQI CAPITAL LETTER RE
+ {0x1058C, 0x10592, prAL, gcLu}, // [7] VITHKUQI CAPITAL LETTER SE..VITHKUQI CAPITAL LETTER XE
+ {0x10594, 0x10595, prAL, gcLu}, // [2] VITHKUQI CAPITAL LETTER Y..VITHKUQI CAPITAL LETTER ZE
+ {0x10597, 0x105A1, prAL, gcLl}, // [11] VITHKUQI SMALL LETTER A..VITHKUQI SMALL LETTER GA
+ {0x105A3, 0x105B1, prAL, gcLl}, // [15] VITHKUQI SMALL LETTER HA..VITHKUQI SMALL LETTER RE
+ {0x105B3, 0x105B9, prAL, gcLl}, // [7] VITHKUQI SMALL LETTER SE..VITHKUQI SMALL LETTER XE
+ {0x105BB, 0x105BC, prAL, gcLl}, // [2] VITHKUQI SMALL LETTER Y..VITHKUQI SMALL LETTER ZE
+ {0x10600, 0x10736, prAL, gcLo}, // [311] LINEAR A SIGN AB001..LINEAR A SIGN A664
+ {0x10740, 0x10755, prAL, gcLo}, // [22] LINEAR A SIGN A701 A..LINEAR A SIGN A732 JE
+ {0x10760, 0x10767, prAL, gcLo}, // [8] LINEAR A SIGN A800..LINEAR A SIGN A807
+ {0x10780, 0x10785, prAL, gcLm}, // [6] MODIFIER LETTER SMALL CAPITAL AA..MODIFIER LETTER SMALL B WITH HOOK
+ {0x10787, 0x107B0, prAL, gcLm}, // [42] MODIFIER LETTER SMALL DZ DIGRAPH..MODIFIER LETTER SMALL V WITH RIGHT HOOK
+ {0x107B2, 0x107BA, prAL, gcLm}, // [9] MODIFIER LETTER SMALL CAPITAL Y..MODIFIER LETTER SMALL S WITH CURL
+ {0x10800, 0x10805, prAL, gcLo}, // [6] CYPRIOT SYLLABLE A..CYPRIOT SYLLABLE JA
+ {0x10808, 0x10808, prAL, gcLo}, // CYPRIOT SYLLABLE JO
+ {0x1080A, 0x10835, prAL, gcLo}, // [44] CYPRIOT SYLLABLE KA..CYPRIOT SYLLABLE WO
+ {0x10837, 0x10838, prAL, gcLo}, // [2] CYPRIOT SYLLABLE XA..CYPRIOT SYLLABLE XE
+ {0x1083C, 0x1083C, prAL, gcLo}, // CYPRIOT SYLLABLE ZA
+ {0x1083F, 0x1083F, prAL, gcLo}, // CYPRIOT SYLLABLE ZO
+ {0x10840, 0x10855, prAL, gcLo}, // [22] IMPERIAL ARAMAIC LETTER ALEPH..IMPERIAL ARAMAIC LETTER TAW
+ {0x10857, 0x10857, prBA, gcPo}, // IMPERIAL ARAMAIC SECTION SIGN
+ {0x10858, 0x1085F, prAL, gcNo}, // [8] IMPERIAL ARAMAIC NUMBER ONE..IMPERIAL ARAMAIC NUMBER TEN THOUSAND
+ {0x10860, 0x10876, prAL, gcLo}, // [23] PALMYRENE LETTER ALEPH..PALMYRENE LETTER TAW
+ {0x10877, 0x10878, prAL, gcSo}, // [2] PALMYRENE LEFT-POINTING FLEURON..PALMYRENE RIGHT-POINTING FLEURON
+ {0x10879, 0x1087F, prAL, gcNo}, // [7] PALMYRENE NUMBER ONE..PALMYRENE NUMBER TWENTY
+ {0x10880, 0x1089E, prAL, gcLo}, // [31] NABATAEAN LETTER FINAL ALEPH..NABATAEAN LETTER TAW
+ {0x108A7, 0x108AF, prAL, gcNo}, // [9] NABATAEAN NUMBER ONE..NABATAEAN NUMBER ONE HUNDRED
+ {0x108E0, 0x108F2, prAL, gcLo}, // [19] HATRAN LETTER ALEPH..HATRAN LETTER QOPH
+ {0x108F4, 0x108F5, prAL, gcLo}, // [2] HATRAN LETTER SHIN..HATRAN LETTER TAW
+ {0x108FB, 0x108FF, prAL, gcNo}, // [5] HATRAN NUMBER ONE..HATRAN NUMBER ONE HUNDRED
+ {0x10900, 0x10915, prAL, gcLo}, // [22] PHOENICIAN LETTER ALF..PHOENICIAN LETTER TAU
+ {0x10916, 0x1091B, prAL, gcNo}, // [6] PHOENICIAN NUMBER ONE..PHOENICIAN NUMBER THREE
+ {0x1091F, 0x1091F, prBA, gcPo}, // PHOENICIAN WORD SEPARATOR
+ {0x10920, 0x10939, prAL, gcLo}, // [26] LYDIAN LETTER A..LYDIAN LETTER C
+ {0x1093F, 0x1093F, prAL, gcPo}, // LYDIAN TRIANGULAR MARK
+ {0x10980, 0x1099F, prAL, gcLo}, // [32] MEROITIC HIEROGLYPHIC LETTER A..MEROITIC HIEROGLYPHIC SYMBOL VIDJ-2
+ {0x109A0, 0x109B7, prAL, gcLo}, // [24] MEROITIC CURSIVE LETTER A..MEROITIC CURSIVE LETTER DA
+ {0x109BC, 0x109BD, prAL, gcNo}, // [2] MEROITIC CURSIVE FRACTION ELEVEN TWELFTHS..MEROITIC CURSIVE FRACTION ONE HALF
+ {0x109BE, 0x109BF, prAL, gcLo}, // [2] MEROITIC CURSIVE LOGOGRAM RMT..MEROITIC CURSIVE LOGOGRAM IMN
+ {0x109C0, 0x109CF, prAL, gcNo}, // [16] MEROITIC CURSIVE NUMBER ONE..MEROITIC CURSIVE NUMBER SEVENTY
+ {0x109D2, 0x109FF, prAL, gcNo}, // [46] MEROITIC CURSIVE NUMBER ONE HUNDRED..MEROITIC CURSIVE FRACTION TEN TWELFTHS
+ {0x10A00, 0x10A00, prAL, gcLo}, // KHAROSHTHI LETTER A
+ {0x10A01, 0x10A03, prCM, gcMn}, // [3] KHAROSHTHI VOWEL SIGN I..KHAROSHTHI VOWEL SIGN VOCALIC R
+ {0x10A05, 0x10A06, prCM, gcMn}, // [2] KHAROSHTHI VOWEL SIGN E..KHAROSHTHI VOWEL SIGN O
+ {0x10A0C, 0x10A0F, prCM, gcMn}, // [4] KHAROSHTHI VOWEL LENGTH MARK..KHAROSHTHI SIGN VISARGA
+ {0x10A10, 0x10A13, prAL, gcLo}, // [4] KHAROSHTHI LETTER KA..KHAROSHTHI LETTER GHA
+ {0x10A15, 0x10A17, prAL, gcLo}, // [3] KHAROSHTHI LETTER CA..KHAROSHTHI LETTER JA
+ {0x10A19, 0x10A35, prAL, gcLo}, // [29] KHAROSHTHI LETTER NYA..KHAROSHTHI LETTER VHA
+ {0x10A38, 0x10A3A, prCM, gcMn}, // [3] KHAROSHTHI SIGN BAR ABOVE..KHAROSHTHI SIGN DOT BELOW
+ {0x10A3F, 0x10A3F, prCM, gcMn}, // KHAROSHTHI VIRAMA
+ {0x10A40, 0x10A48, prAL, gcNo}, // [9] KHAROSHTHI DIGIT ONE..KHAROSHTHI FRACTION ONE HALF
+ {0x10A50, 0x10A57, prBA, gcPo}, // [8] KHAROSHTHI PUNCTUATION DOT..KHAROSHTHI PUNCTUATION DOUBLE DANDA
+ {0x10A58, 0x10A58, prAL, gcPo}, // KHAROSHTHI PUNCTUATION LINES
+ {0x10A60, 0x10A7C, prAL, gcLo}, // [29] OLD SOUTH ARABIAN LETTER HE..OLD SOUTH ARABIAN LETTER THETH
+ {0x10A7D, 0x10A7E, prAL, gcNo}, // [2] OLD SOUTH ARABIAN NUMBER ONE..OLD SOUTH ARABIAN NUMBER FIFTY
+ {0x10A7F, 0x10A7F, prAL, gcPo}, // OLD SOUTH ARABIAN NUMERIC INDICATOR
+ {0x10A80, 0x10A9C, prAL, gcLo}, // [29] OLD NORTH ARABIAN LETTER HEH..OLD NORTH ARABIAN LETTER ZAH
+ {0x10A9D, 0x10A9F, prAL, gcNo}, // [3] OLD NORTH ARABIAN NUMBER ONE..OLD NORTH ARABIAN NUMBER TWENTY
+ {0x10AC0, 0x10AC7, prAL, gcLo}, // [8] MANICHAEAN LETTER ALEPH..MANICHAEAN LETTER WAW
+ {0x10AC8, 0x10AC8, prAL, gcSo}, // MANICHAEAN SIGN UD
+ {0x10AC9, 0x10AE4, prAL, gcLo}, // [28] MANICHAEAN LETTER ZAYIN..MANICHAEAN LETTER TAW
+ {0x10AE5, 0x10AE6, prCM, gcMn}, // [2] MANICHAEAN ABBREVIATION MARK ABOVE..MANICHAEAN ABBREVIATION MARK BELOW
+ {0x10AEB, 0x10AEF, prAL, gcNo}, // [5] MANICHAEAN NUMBER ONE..MANICHAEAN NUMBER ONE HUNDRED
+ {0x10AF0, 0x10AF5, prBA, gcPo}, // [6] MANICHAEAN PUNCTUATION STAR..MANICHAEAN PUNCTUATION TWO DOTS
+ {0x10AF6, 0x10AF6, prIN, gcPo}, // MANICHAEAN PUNCTUATION LINE FILLER
+ {0x10B00, 0x10B35, prAL, gcLo}, // [54] AVESTAN LETTER A..AVESTAN LETTER HE
+ {0x10B39, 0x10B3F, prBA, gcPo}, // [7] AVESTAN ABBREVIATION MARK..LARGE ONE RING OVER TWO RINGS PUNCTUATION
+ {0x10B40, 0x10B55, prAL, gcLo}, // [22] INSCRIPTIONAL PARTHIAN LETTER ALEPH..INSCRIPTIONAL PARTHIAN LETTER TAW
+ {0x10B58, 0x10B5F, prAL, gcNo}, // [8] INSCRIPTIONAL PARTHIAN NUMBER ONE..INSCRIPTIONAL PARTHIAN NUMBER ONE THOUSAND
+ {0x10B60, 0x10B72, prAL, gcLo}, // [19] INSCRIPTIONAL PAHLAVI LETTER ALEPH..INSCRIPTIONAL PAHLAVI LETTER TAW
+ {0x10B78, 0x10B7F, prAL, gcNo}, // [8] INSCRIPTIONAL PAHLAVI NUMBER ONE..INSCRIPTIONAL PAHLAVI NUMBER ONE THOUSAND
+ {0x10B80, 0x10B91, prAL, gcLo}, // [18] PSALTER PAHLAVI LETTER ALEPH..PSALTER PAHLAVI LETTER TAW
+ {0x10B99, 0x10B9C, prAL, gcPo}, // [4] PSALTER PAHLAVI SECTION MARK..PSALTER PAHLAVI FOUR DOTS WITH DOT
+ {0x10BA9, 0x10BAF, prAL, gcNo}, // [7] PSALTER PAHLAVI NUMBER ONE..PSALTER PAHLAVI NUMBER ONE HUNDRED
+ {0x10C00, 0x10C48, prAL, gcLo}, // [73] OLD TURKIC LETTER ORKHON A..OLD TURKIC LETTER ORKHON BASH
+ {0x10C80, 0x10CB2, prAL, gcLu}, // [51] OLD HUNGARIAN CAPITAL LETTER A..OLD HUNGARIAN CAPITAL LETTER US
+ {0x10CC0, 0x10CF2, prAL, gcLl}, // [51] OLD HUNGARIAN SMALL LETTER A..OLD HUNGARIAN SMALL LETTER US
+ {0x10CFA, 0x10CFF, prAL, gcNo}, // [6] OLD HUNGARIAN NUMBER ONE..OLD HUNGARIAN NUMBER ONE THOUSAND
+ {0x10D00, 0x10D23, prAL, gcLo}, // [36] HANIFI ROHINGYA LETTER A..HANIFI ROHINGYA MARK NA KHONNA
+ {0x10D24, 0x10D27, prCM, gcMn}, // [4] HANIFI ROHINGYA SIGN HARBAHAY..HANIFI ROHINGYA SIGN TASSI
+ {0x10D30, 0x10D39, prNU, gcNd}, // [10] HANIFI ROHINGYA DIGIT ZERO..HANIFI ROHINGYA DIGIT NINE
+ {0x10E60, 0x10E7E, prAL, gcNo}, // [31] RUMI DIGIT ONE..RUMI FRACTION TWO THIRDS
+ {0x10E80, 0x10EA9, prAL, gcLo}, // [42] YEZIDI LETTER ELIF..YEZIDI LETTER ET
+ {0x10EAB, 0x10EAC, prCM, gcMn}, // [2] YEZIDI COMBINING HAMZA MARK..YEZIDI COMBINING MADDA MARK
+ {0x10EAD, 0x10EAD, prBA, gcPd}, // YEZIDI HYPHENATION MARK
+ {0x10EB0, 0x10EB1, prAL, gcLo}, // [2] YEZIDI LETTER LAM WITH DOT ABOVE..YEZIDI LETTER YOT WITH CIRCUMFLEX ABOVE
+ {0x10EFD, 0x10EFF, prCM, gcMn}, // [3] ARABIC SMALL LOW WORD SAKTA..ARABIC SMALL LOW WORD MADDA
+ {0x10F00, 0x10F1C, prAL, gcLo}, // [29] OLD SOGDIAN LETTER ALEPH..OLD SOGDIAN LETTER FINAL TAW WITH VERTICAL TAIL
+ {0x10F1D, 0x10F26, prAL, gcNo}, // [10] OLD SOGDIAN NUMBER ONE..OLD SOGDIAN FRACTION ONE HALF
+ {0x10F27, 0x10F27, prAL, gcLo}, // OLD SOGDIAN LIGATURE AYIN-DALETH
+ {0x10F30, 0x10F45, prAL, gcLo}, // [22] SOGDIAN LETTER ALEPH..SOGDIAN INDEPENDENT SHIN
+ {0x10F46, 0x10F50, prCM, gcMn}, // [11] SOGDIAN COMBINING DOT BELOW..SOGDIAN COMBINING STROKE BELOW
+ {0x10F51, 0x10F54, prAL, gcNo}, // [4] SOGDIAN NUMBER ONE..SOGDIAN NUMBER ONE HUNDRED
+ {0x10F55, 0x10F59, prAL, gcPo}, // [5] SOGDIAN PUNCTUATION TWO VERTICAL BARS..SOGDIAN PUNCTUATION HALF CIRCLE WITH DOT
+ {0x10F70, 0x10F81, prAL, gcLo}, // [18] OLD UYGHUR LETTER ALEPH..OLD UYGHUR LETTER LESH
+ {0x10F82, 0x10F85, prCM, gcMn}, // [4] OLD UYGHUR COMBINING DOT ABOVE..OLD UYGHUR COMBINING TWO DOTS BELOW
+ {0x10F86, 0x10F89, prAL, gcPo}, // [4] OLD UYGHUR PUNCTUATION BAR..OLD UYGHUR PUNCTUATION FOUR DOTS
+ {0x10FB0, 0x10FC4, prAL, gcLo}, // [21] CHORASMIAN LETTER ALEPH..CHORASMIAN LETTER TAW
+ {0x10FC5, 0x10FCB, prAL, gcNo}, // [7] CHORASMIAN NUMBER ONE..CHORASMIAN NUMBER ONE HUNDRED
+ {0x10FE0, 0x10FF6, prAL, gcLo}, // [23] ELYMAIC LETTER ALEPH..ELYMAIC LIGATURE ZAYIN-YODH
+ {0x11000, 0x11000, prCM, gcMc}, // BRAHMI SIGN CANDRABINDU
+ {0x11001, 0x11001, prCM, gcMn}, // BRAHMI SIGN ANUSVARA
+ {0x11002, 0x11002, prCM, gcMc}, // BRAHMI SIGN VISARGA
+ {0x11003, 0x11037, prAL, gcLo}, // [53] BRAHMI SIGN JIHVAMULIYA..BRAHMI LETTER OLD TAMIL NNNA
+ {0x11038, 0x11046, prCM, gcMn}, // [15] BRAHMI VOWEL SIGN AA..BRAHMI VIRAMA
+ {0x11047, 0x11048, prBA, gcPo}, // [2] BRAHMI DANDA..BRAHMI DOUBLE DANDA
+ {0x11049, 0x1104D, prAL, gcPo}, // [5] BRAHMI PUNCTUATION DOT..BRAHMI PUNCTUATION LOTUS
+ {0x11052, 0x11065, prAL, gcNo}, // [20] BRAHMI NUMBER ONE..BRAHMI NUMBER ONE THOUSAND
+ {0x11066, 0x1106F, prNU, gcNd}, // [10] BRAHMI DIGIT ZERO..BRAHMI DIGIT NINE
+ {0x11070, 0x11070, prCM, gcMn}, // BRAHMI SIGN OLD TAMIL VIRAMA
+ {0x11071, 0x11072, prAL, gcLo}, // [2] BRAHMI LETTER OLD TAMIL SHORT E..BRAHMI LETTER OLD TAMIL SHORT O
+ {0x11073, 0x11074, prCM, gcMn}, // [2] BRAHMI VOWEL SIGN OLD TAMIL SHORT E..BRAHMI VOWEL SIGN OLD TAMIL SHORT O
+ {0x11075, 0x11075, prAL, gcLo}, // BRAHMI LETTER OLD TAMIL LLA
+ {0x1107F, 0x1107F, prCM, gcMn}, // BRAHMI NUMBER JOINER
+ {0x11080, 0x11081, prCM, gcMn}, // [2] KAITHI SIGN CANDRABINDU..KAITHI SIGN ANUSVARA
+ {0x11082, 0x11082, prCM, gcMc}, // KAITHI SIGN VISARGA
+ {0x11083, 0x110AF, prAL, gcLo}, // [45] KAITHI LETTER A..KAITHI LETTER HA
+ {0x110B0, 0x110B2, prCM, gcMc}, // [3] KAITHI VOWEL SIGN AA..KAITHI VOWEL SIGN II
+ {0x110B3, 0x110B6, prCM, gcMn}, // [4] KAITHI VOWEL SIGN U..KAITHI VOWEL SIGN AI
+ {0x110B7, 0x110B8, prCM, gcMc}, // [2] KAITHI VOWEL SIGN O..KAITHI VOWEL SIGN AU
+ {0x110B9, 0x110BA, prCM, gcMn}, // [2] KAITHI SIGN VIRAMA..KAITHI SIGN NUKTA
+ {0x110BB, 0x110BC, prAL, gcPo}, // [2] KAITHI ABBREVIATION SIGN..KAITHI ENUMERATION SIGN
+ {0x110BD, 0x110BD, prAL, gcCf}, // KAITHI NUMBER SIGN
+ {0x110BE, 0x110C1, prBA, gcPo}, // [4] KAITHI SECTION MARK..KAITHI DOUBLE DANDA
+ {0x110C2, 0x110C2, prCM, gcMn}, // KAITHI VOWEL SIGN VOCALIC R
+ {0x110CD, 0x110CD, prAL, gcCf}, // KAITHI NUMBER SIGN ABOVE
+ {0x110D0, 0x110E8, prAL, gcLo}, // [25] SORA SOMPENG LETTER SAH..SORA SOMPENG LETTER MAE
+ {0x110F0, 0x110F9, prNU, gcNd}, // [10] SORA SOMPENG DIGIT ZERO..SORA SOMPENG DIGIT NINE
+ {0x11100, 0x11102, prCM, gcMn}, // [3] CHAKMA SIGN CANDRABINDU..CHAKMA SIGN VISARGA
+ {0x11103, 0x11126, prAL, gcLo}, // [36] CHAKMA LETTER AA..CHAKMA LETTER HAA
+ {0x11127, 0x1112B, prCM, gcMn}, // [5] CHAKMA VOWEL SIGN A..CHAKMA VOWEL SIGN UU
+ {0x1112C, 0x1112C, prCM, gcMc}, // CHAKMA VOWEL SIGN E
+ {0x1112D, 0x11134, prCM, gcMn}, // [8] CHAKMA VOWEL SIGN AI..CHAKMA MAAYYAA
+ {0x11136, 0x1113F, prNU, gcNd}, // [10] CHAKMA DIGIT ZERO..CHAKMA DIGIT NINE
+ {0x11140, 0x11143, prBA, gcPo}, // [4] CHAKMA SECTION MARK..CHAKMA QUESTION MARK
+ {0x11144, 0x11144, prAL, gcLo}, // CHAKMA LETTER LHAA
+ {0x11145, 0x11146, prCM, gcMc}, // [2] CHAKMA VOWEL SIGN AA..CHAKMA VOWEL SIGN EI
+ {0x11147, 0x11147, prAL, gcLo}, // CHAKMA LETTER VAA
+ {0x11150, 0x11172, prAL, gcLo}, // [35] MAHAJANI LETTER A..MAHAJANI LETTER RRA
+ {0x11173, 0x11173, prCM, gcMn}, // MAHAJANI SIGN NUKTA
+ {0x11174, 0x11174, prAL, gcPo}, // MAHAJANI ABBREVIATION SIGN
+ {0x11175, 0x11175, prBB, gcPo}, // MAHAJANI SECTION MARK
+ {0x11176, 0x11176, prAL, gcLo}, // MAHAJANI LIGATURE SHRI
+ {0x11180, 0x11181, prCM, gcMn}, // [2] SHARADA SIGN CANDRABINDU..SHARADA SIGN ANUSVARA
+ {0x11182, 0x11182, prCM, gcMc}, // SHARADA SIGN VISARGA
+ {0x11183, 0x111B2, prAL, gcLo}, // [48] SHARADA LETTER A..SHARADA LETTER HA
+ {0x111B3, 0x111B5, prCM, gcMc}, // [3] SHARADA VOWEL SIGN AA..SHARADA VOWEL SIGN II
+ {0x111B6, 0x111BE, prCM, gcMn}, // [9] SHARADA VOWEL SIGN U..SHARADA VOWEL SIGN O
+ {0x111BF, 0x111C0, prCM, gcMc}, // [2] SHARADA VOWEL SIGN AU..SHARADA SIGN VIRAMA
+ {0x111C1, 0x111C4, prAL, gcLo}, // [4] SHARADA SIGN AVAGRAHA..SHARADA OM
+ {0x111C5, 0x111C6, prBA, gcPo}, // [2] SHARADA DANDA..SHARADA DOUBLE DANDA
+ {0x111C7, 0x111C7, prAL, gcPo}, // SHARADA ABBREVIATION SIGN
+ {0x111C8, 0x111C8, prBA, gcPo}, // SHARADA SEPARATOR
+ {0x111C9, 0x111CC, prCM, gcMn}, // [4] SHARADA SANDHI MARK..SHARADA EXTRA SHORT VOWEL MARK
+ {0x111CD, 0x111CD, prAL, gcPo}, // SHARADA SUTRA MARK
+ {0x111CE, 0x111CE, prCM, gcMc}, // SHARADA VOWEL SIGN PRISHTHAMATRA E
+ {0x111CF, 0x111CF, prCM, gcMn}, // SHARADA SIGN INVERTED CANDRABINDU
+ {0x111D0, 0x111D9, prNU, gcNd}, // [10] SHARADA DIGIT ZERO..SHARADA DIGIT NINE
+ {0x111DA, 0x111DA, prAL, gcLo}, // SHARADA EKAM
+ {0x111DB, 0x111DB, prBB, gcPo}, // SHARADA SIGN SIDDHAM
+ {0x111DC, 0x111DC, prAL, gcLo}, // SHARADA HEADSTROKE
+ {0x111DD, 0x111DF, prBA, gcPo}, // [3] SHARADA CONTINUATION SIGN..SHARADA SECTION MARK-2
+ {0x111E1, 0x111F4, prAL, gcNo}, // [20] SINHALA ARCHAIC DIGIT ONE..SINHALA ARCHAIC NUMBER ONE THOUSAND
+ {0x11200, 0x11211, prAL, gcLo}, // [18] KHOJKI LETTER A..KHOJKI LETTER JJA
+ {0x11213, 0x1122B, prAL, gcLo}, // [25] KHOJKI LETTER NYA..KHOJKI LETTER LLA
+ {0x1122C, 0x1122E, prCM, gcMc}, // [3] KHOJKI VOWEL SIGN AA..KHOJKI VOWEL SIGN II
+ {0x1122F, 0x11231, prCM, gcMn}, // [3] KHOJKI VOWEL SIGN U..KHOJKI VOWEL SIGN AI
+ {0x11232, 0x11233, prCM, gcMc}, // [2] KHOJKI VOWEL SIGN O..KHOJKI VOWEL SIGN AU
+ {0x11234, 0x11234, prCM, gcMn}, // KHOJKI SIGN ANUSVARA
+ {0x11235, 0x11235, prCM, gcMc}, // KHOJKI SIGN VIRAMA
+ {0x11236, 0x11237, prCM, gcMn}, // [2] KHOJKI SIGN NUKTA..KHOJKI SIGN SHADDA
+ {0x11238, 0x11239, prBA, gcPo}, // [2] KHOJKI DANDA..KHOJKI DOUBLE DANDA
+ {0x1123A, 0x1123A, prAL, gcPo}, // KHOJKI WORD SEPARATOR
+ {0x1123B, 0x1123C, prBA, gcPo}, // [2] KHOJKI SECTION MARK..KHOJKI DOUBLE SECTION MARK
+ {0x1123D, 0x1123D, prAL, gcPo}, // KHOJKI ABBREVIATION SIGN
+ {0x1123E, 0x1123E, prCM, gcMn}, // KHOJKI SIGN SUKUN
+ {0x1123F, 0x11240, prAL, gcLo}, // [2] KHOJKI LETTER QA..KHOJKI LETTER SHORT I
+ {0x11241, 0x11241, prCM, gcMn}, // KHOJKI VOWEL SIGN VOCALIC R
+ {0x11280, 0x11286, prAL, gcLo}, // [7] MULTANI LETTER A..MULTANI LETTER GA
+ {0x11288, 0x11288, prAL, gcLo}, // MULTANI LETTER GHA
+ {0x1128A, 0x1128D, prAL, gcLo}, // [4] MULTANI LETTER CA..MULTANI LETTER JJA
+ {0x1128F, 0x1129D, prAL, gcLo}, // [15] MULTANI LETTER NYA..MULTANI LETTER BA
+ {0x1129F, 0x112A8, prAL, gcLo}, // [10] MULTANI LETTER BHA..MULTANI LETTER RHA
+ {0x112A9, 0x112A9, prBA, gcPo}, // MULTANI SECTION MARK
+ {0x112B0, 0x112DE, prAL, gcLo}, // [47] KHUDAWADI LETTER A..KHUDAWADI LETTER HA
+ {0x112DF, 0x112DF, prCM, gcMn}, // KHUDAWADI SIGN ANUSVARA
+ {0x112E0, 0x112E2, prCM, gcMc}, // [3] KHUDAWADI VOWEL SIGN AA..KHUDAWADI VOWEL SIGN II
+ {0x112E3, 0x112EA, prCM, gcMn}, // [8] KHUDAWADI VOWEL SIGN U..KHUDAWADI SIGN VIRAMA
+ {0x112F0, 0x112F9, prNU, gcNd}, // [10] KHUDAWADI DIGIT ZERO..KHUDAWADI DIGIT NINE
+ {0x11300, 0x11301, prCM, gcMn}, // [2] GRANTHA SIGN COMBINING ANUSVARA ABOVE..GRANTHA SIGN CANDRABINDU
+ {0x11302, 0x11303, prCM, gcMc}, // [2] GRANTHA SIGN ANUSVARA..GRANTHA SIGN VISARGA
+ {0x11305, 0x1130C, prAL, gcLo}, // [8] GRANTHA LETTER A..GRANTHA LETTER VOCALIC L
+ {0x1130F, 0x11310, prAL, gcLo}, // [2] GRANTHA LETTER EE..GRANTHA LETTER AI
+ {0x11313, 0x11328, prAL, gcLo}, // [22] GRANTHA LETTER OO..GRANTHA LETTER NA
+ {0x1132A, 0x11330, prAL, gcLo}, // [7] GRANTHA LETTER PA..GRANTHA LETTER RA
+ {0x11332, 0x11333, prAL, gcLo}, // [2] GRANTHA LETTER LA..GRANTHA LETTER LLA
+ {0x11335, 0x11339, prAL, gcLo}, // [5] GRANTHA LETTER VA..GRANTHA LETTER HA
+ {0x1133B, 0x1133C, prCM, gcMn}, // [2] COMBINING BINDU BELOW..GRANTHA SIGN NUKTA
+ {0x1133D, 0x1133D, prAL, gcLo}, // GRANTHA SIGN AVAGRAHA
+ {0x1133E, 0x1133F, prCM, gcMc}, // [2] GRANTHA VOWEL SIGN AA..GRANTHA VOWEL SIGN I
+ {0x11340, 0x11340, prCM, gcMn}, // GRANTHA VOWEL SIGN II
+ {0x11341, 0x11344, prCM, gcMc}, // [4] GRANTHA VOWEL SIGN U..GRANTHA VOWEL SIGN VOCALIC RR
+ {0x11347, 0x11348, prCM, gcMc}, // [2] GRANTHA VOWEL SIGN EE..GRANTHA VOWEL SIGN AI
+ {0x1134B, 0x1134D, prCM, gcMc}, // [3] GRANTHA VOWEL SIGN OO..GRANTHA SIGN VIRAMA
+ {0x11350, 0x11350, prAL, gcLo}, // GRANTHA OM
+ {0x11357, 0x11357, prCM, gcMc}, // GRANTHA AU LENGTH MARK
+ {0x1135D, 0x11361, prAL, gcLo}, // [5] GRANTHA SIGN PLUTA..GRANTHA LETTER VOCALIC LL
+ {0x11362, 0x11363, prCM, gcMc}, // [2] GRANTHA VOWEL SIGN VOCALIC L..GRANTHA VOWEL SIGN VOCALIC LL
+ {0x11366, 0x1136C, prCM, gcMn}, // [7] COMBINING GRANTHA DIGIT ZERO..COMBINING GRANTHA DIGIT SIX
+ {0x11370, 0x11374, prCM, gcMn}, // [5] COMBINING GRANTHA LETTER A..COMBINING GRANTHA LETTER PA
+ {0x11400, 0x11434, prAL, gcLo}, // [53] NEWA LETTER A..NEWA LETTER HA
+ {0x11435, 0x11437, prCM, gcMc}, // [3] NEWA VOWEL SIGN AA..NEWA VOWEL SIGN II
+ {0x11438, 0x1143F, prCM, gcMn}, // [8] NEWA VOWEL SIGN U..NEWA VOWEL SIGN AI
+ {0x11440, 0x11441, prCM, gcMc}, // [2] NEWA VOWEL SIGN O..NEWA VOWEL SIGN AU
+ {0x11442, 0x11444, prCM, gcMn}, // [3] NEWA SIGN VIRAMA..NEWA SIGN ANUSVARA
+ {0x11445, 0x11445, prCM, gcMc}, // NEWA SIGN VISARGA
+ {0x11446, 0x11446, prCM, gcMn}, // NEWA SIGN NUKTA
+ {0x11447, 0x1144A, prAL, gcLo}, // [4] NEWA SIGN AVAGRAHA..NEWA SIDDHI
+ {0x1144B, 0x1144E, prBA, gcPo}, // [4] NEWA DANDA..NEWA GAP FILLER
+ {0x1144F, 0x1144F, prAL, gcPo}, // NEWA ABBREVIATION SIGN
+ {0x11450, 0x11459, prNU, gcNd}, // [10] NEWA DIGIT ZERO..NEWA DIGIT NINE
+ {0x1145A, 0x1145B, prBA, gcPo}, // [2] NEWA DOUBLE COMMA..NEWA PLACEHOLDER MARK
+ {0x1145D, 0x1145D, prAL, gcPo}, // NEWA INSERTION SIGN
+ {0x1145E, 0x1145E, prCM, gcMn}, // NEWA SANDHI MARK
+ {0x1145F, 0x11461, prAL, gcLo}, // [3] NEWA LETTER VEDIC ANUSVARA..NEWA SIGN UPADHMANIYA
+ {0x11480, 0x114AF, prAL, gcLo}, // [48] TIRHUTA ANJI..TIRHUTA LETTER HA
+ {0x114B0, 0x114B2, prCM, gcMc}, // [3] TIRHUTA VOWEL SIGN AA..TIRHUTA VOWEL SIGN II
+ {0x114B3, 0x114B8, prCM, gcMn}, // [6] TIRHUTA VOWEL SIGN U..TIRHUTA VOWEL SIGN VOCALIC LL
+ {0x114B9, 0x114B9, prCM, gcMc}, // TIRHUTA VOWEL SIGN E
+ {0x114BA, 0x114BA, prCM, gcMn}, // TIRHUTA VOWEL SIGN SHORT E
+ {0x114BB, 0x114BE, prCM, gcMc}, // [4] TIRHUTA VOWEL SIGN AI..TIRHUTA VOWEL SIGN AU
+ {0x114BF, 0x114C0, prCM, gcMn}, // [2] TIRHUTA SIGN CANDRABINDU..TIRHUTA SIGN ANUSVARA
+ {0x114C1, 0x114C1, prCM, gcMc}, // TIRHUTA SIGN VISARGA
+ {0x114C2, 0x114C3, prCM, gcMn}, // [2] TIRHUTA SIGN VIRAMA..TIRHUTA SIGN NUKTA
+ {0x114C4, 0x114C5, prAL, gcLo}, // [2] TIRHUTA SIGN AVAGRAHA..TIRHUTA GVANG
+ {0x114C6, 0x114C6, prAL, gcPo}, // TIRHUTA ABBREVIATION SIGN
+ {0x114C7, 0x114C7, prAL, gcLo}, // TIRHUTA OM
+ {0x114D0, 0x114D9, prNU, gcNd}, // [10] TIRHUTA DIGIT ZERO..TIRHUTA DIGIT NINE
+ {0x11580, 0x115AE, prAL, gcLo}, // [47] SIDDHAM LETTER A..SIDDHAM LETTER HA
+ {0x115AF, 0x115B1, prCM, gcMc}, // [3] SIDDHAM VOWEL SIGN AA..SIDDHAM VOWEL SIGN II
+ {0x115B2, 0x115B5, prCM, gcMn}, // [4] SIDDHAM VOWEL SIGN U..SIDDHAM VOWEL SIGN VOCALIC RR
+ {0x115B8, 0x115BB, prCM, gcMc}, // [4] SIDDHAM VOWEL SIGN E..SIDDHAM VOWEL SIGN AU
+ {0x115BC, 0x115BD, prCM, gcMn}, // [2] SIDDHAM SIGN CANDRABINDU..SIDDHAM SIGN ANUSVARA
+ {0x115BE, 0x115BE, prCM, gcMc}, // SIDDHAM SIGN VISARGA
+ {0x115BF, 0x115C0, prCM, gcMn}, // [2] SIDDHAM SIGN VIRAMA..SIDDHAM SIGN NUKTA
+ {0x115C1, 0x115C1, prBB, gcPo}, // SIDDHAM SIGN SIDDHAM
+ {0x115C2, 0x115C3, prBA, gcPo}, // [2] SIDDHAM DANDA..SIDDHAM DOUBLE DANDA
+ {0x115C4, 0x115C5, prEX, gcPo}, // [2] SIDDHAM SEPARATOR DOT..SIDDHAM SEPARATOR BAR
+ {0x115C6, 0x115C8, prAL, gcPo}, // [3] SIDDHAM REPETITION MARK-1..SIDDHAM REPETITION MARK-3
+ {0x115C9, 0x115D7, prBA, gcPo}, // [15] SIDDHAM END OF TEXT MARK..SIDDHAM SECTION MARK WITH CIRCLES AND FOUR ENCLOSURES
+ {0x115D8, 0x115DB, prAL, gcLo}, // [4] SIDDHAM LETTER THREE-CIRCLE ALTERNATE I..SIDDHAM LETTER ALTERNATE U
+ {0x115DC, 0x115DD, prCM, gcMn}, // [2] SIDDHAM VOWEL SIGN ALTERNATE U..SIDDHAM VOWEL SIGN ALTERNATE UU
+ {0x11600, 0x1162F, prAL, gcLo}, // [48] MODI LETTER A..MODI LETTER LLA
+ {0x11630, 0x11632, prCM, gcMc}, // [3] MODI VOWEL SIGN AA..MODI VOWEL SIGN II
+ {0x11633, 0x1163A, prCM, gcMn}, // [8] MODI VOWEL SIGN U..MODI VOWEL SIGN AI
+ {0x1163B, 0x1163C, prCM, gcMc}, // [2] MODI VOWEL SIGN O..MODI VOWEL SIGN AU
+ {0x1163D, 0x1163D, prCM, gcMn}, // MODI SIGN ANUSVARA
+ {0x1163E, 0x1163E, prCM, gcMc}, // MODI SIGN VISARGA
+ {0x1163F, 0x11640, prCM, gcMn}, // [2] MODI SIGN VIRAMA..MODI SIGN ARDHACANDRA
+ {0x11641, 0x11642, prBA, gcPo}, // [2] MODI DANDA..MODI DOUBLE DANDA
+ {0x11643, 0x11643, prAL, gcPo}, // MODI ABBREVIATION SIGN
+ {0x11644, 0x11644, prAL, gcLo}, // MODI SIGN HUVA
+ {0x11650, 0x11659, prNU, gcNd}, // [10] MODI DIGIT ZERO..MODI DIGIT NINE
+ {0x11660, 0x1166C, prBB, gcPo}, // [13] MONGOLIAN BIRGA WITH ORNAMENT..MONGOLIAN TURNED SWIRL BIRGA WITH DOUBLE ORNAMENT
+ {0x11680, 0x116AA, prAL, gcLo}, // [43] TAKRI LETTER A..TAKRI LETTER RRA
+ {0x116AB, 0x116AB, prCM, gcMn}, // TAKRI SIGN ANUSVARA
+ {0x116AC, 0x116AC, prCM, gcMc}, // TAKRI SIGN VISARGA
+ {0x116AD, 0x116AD, prCM, gcMn}, // TAKRI VOWEL SIGN AA
+ {0x116AE, 0x116AF, prCM, gcMc}, // [2] TAKRI VOWEL SIGN I..TAKRI VOWEL SIGN II
+ {0x116B0, 0x116B5, prCM, gcMn}, // [6] TAKRI VOWEL SIGN U..TAKRI VOWEL SIGN AU
+ {0x116B6, 0x116B6, prCM, gcMc}, // TAKRI SIGN VIRAMA
+ {0x116B7, 0x116B7, prCM, gcMn}, // TAKRI SIGN NUKTA
+ {0x116B8, 0x116B8, prAL, gcLo}, // TAKRI LETTER ARCHAIC KHA
+ {0x116B9, 0x116B9, prAL, gcPo}, // TAKRI ABBREVIATION SIGN
+ {0x116C0, 0x116C9, prNU, gcNd}, // [10] TAKRI DIGIT ZERO..TAKRI DIGIT NINE
+ {0x11700, 0x1171A, prSA, gcLo}, // [27] AHOM LETTER KA..AHOM LETTER ALTERNATE BA
+ {0x1171D, 0x1171F, prSA, gcMn}, // [3] AHOM CONSONANT SIGN MEDIAL LA..AHOM CONSONANT SIGN MEDIAL LIGATING RA
+ {0x11720, 0x11721, prSA, gcMc}, // [2] AHOM VOWEL SIGN A..AHOM VOWEL SIGN AA
+ {0x11722, 0x11725, prSA, gcMn}, // [4] AHOM VOWEL SIGN I..AHOM VOWEL SIGN UU
+ {0x11726, 0x11726, prSA, gcMc}, // AHOM VOWEL SIGN E
+ {0x11727, 0x1172B, prSA, gcMn}, // [5] AHOM VOWEL SIGN AW..AHOM SIGN KILLER
+ {0x11730, 0x11739, prNU, gcNd}, // [10] AHOM DIGIT ZERO..AHOM DIGIT NINE
+ {0x1173A, 0x1173B, prSA, gcNo}, // [2] AHOM NUMBER TEN..AHOM NUMBER TWENTY
+ {0x1173C, 0x1173E, prBA, gcPo}, // [3] AHOM SIGN SMALL SECTION..AHOM SIGN RULAI
+ {0x1173F, 0x1173F, prSA, gcSo}, // AHOM SYMBOL VI
+ {0x11740, 0x11746, prSA, gcLo}, // [7] AHOM LETTER CA..AHOM LETTER LLA
+ {0x11800, 0x1182B, prAL, gcLo}, // [44] DOGRA LETTER A..DOGRA LETTER RRA
+ {0x1182C, 0x1182E, prCM, gcMc}, // [3] DOGRA VOWEL SIGN AA..DOGRA VOWEL SIGN II
+ {0x1182F, 0x11837, prCM, gcMn}, // [9] DOGRA VOWEL SIGN U..DOGRA SIGN ANUSVARA
+ {0x11838, 0x11838, prCM, gcMc}, // DOGRA SIGN VISARGA
+ {0x11839, 0x1183A, prCM, gcMn}, // [2] DOGRA SIGN VIRAMA..DOGRA SIGN NUKTA
+ {0x1183B, 0x1183B, prAL, gcPo}, // DOGRA ABBREVIATION SIGN
+ {0x118A0, 0x118DF, prAL, gcLC}, // [64] WARANG CITI CAPITAL LETTER NGAA..WARANG CITI SMALL LETTER VIYO
+ {0x118E0, 0x118E9, prNU, gcNd}, // [10] WARANG CITI DIGIT ZERO..WARANG CITI DIGIT NINE
+ {0x118EA, 0x118F2, prAL, gcNo}, // [9] WARANG CITI NUMBER TEN..WARANG CITI NUMBER NINETY
+ {0x118FF, 0x118FF, prAL, gcLo}, // WARANG CITI OM
+ {0x11900, 0x11906, prAL, gcLo}, // [7] DIVES AKURU LETTER A..DIVES AKURU LETTER E
+ {0x11909, 0x11909, prAL, gcLo}, // DIVES AKURU LETTER O
+ {0x1190C, 0x11913, prAL, gcLo}, // [8] DIVES AKURU LETTER KA..DIVES AKURU LETTER JA
+ {0x11915, 0x11916, prAL, gcLo}, // [2] DIVES AKURU LETTER NYA..DIVES AKURU LETTER TTA
+ {0x11918, 0x1192F, prAL, gcLo}, // [24] DIVES AKURU LETTER DDA..DIVES AKURU LETTER ZA
+ {0x11930, 0x11935, prCM, gcMc}, // [6] DIVES AKURU VOWEL SIGN AA..DIVES AKURU VOWEL SIGN E
+ {0x11937, 0x11938, prCM, gcMc}, // [2] DIVES AKURU VOWEL SIGN AI..DIVES AKURU VOWEL SIGN O
+ {0x1193B, 0x1193C, prCM, gcMn}, // [2] DIVES AKURU SIGN ANUSVARA..DIVES AKURU SIGN CANDRABINDU
+ {0x1193D, 0x1193D, prCM, gcMc}, // DIVES AKURU SIGN HALANTA
+ {0x1193E, 0x1193E, prCM, gcMn}, // DIVES AKURU VIRAMA
+ {0x1193F, 0x1193F, prAL, gcLo}, // DIVES AKURU PREFIXED NASAL SIGN
+ {0x11940, 0x11940, prCM, gcMc}, // DIVES AKURU MEDIAL YA
+ {0x11941, 0x11941, prAL, gcLo}, // DIVES AKURU INITIAL RA
+ {0x11942, 0x11942, prCM, gcMc}, // DIVES AKURU MEDIAL RA
+ {0x11943, 0x11943, prCM, gcMn}, // DIVES AKURU SIGN NUKTA
+ {0x11944, 0x11946, prBA, gcPo}, // [3] DIVES AKURU DOUBLE DANDA..DIVES AKURU END OF TEXT MARK
+ {0x11950, 0x11959, prNU, gcNd}, // [10] DIVES AKURU DIGIT ZERO..DIVES AKURU DIGIT NINE
+ {0x119A0, 0x119A7, prAL, gcLo}, // [8] NANDINAGARI LETTER A..NANDINAGARI LETTER VOCALIC RR
+ {0x119AA, 0x119D0, prAL, gcLo}, // [39] NANDINAGARI LETTER E..NANDINAGARI LETTER RRA
+ {0x119D1, 0x119D3, prCM, gcMc}, // [3] NANDINAGARI VOWEL SIGN AA..NANDINAGARI VOWEL SIGN II
+ {0x119D4, 0x119D7, prCM, gcMn}, // [4] NANDINAGARI VOWEL SIGN U..NANDINAGARI VOWEL SIGN VOCALIC RR
+ {0x119DA, 0x119DB, prCM, gcMn}, // [2] NANDINAGARI VOWEL SIGN E..NANDINAGARI VOWEL SIGN AI
+ {0x119DC, 0x119DF, prCM, gcMc}, // [4] NANDINAGARI VOWEL SIGN O..NANDINAGARI SIGN VISARGA
+ {0x119E0, 0x119E0, prCM, gcMn}, // NANDINAGARI SIGN VIRAMA
+ {0x119E1, 0x119E1, prAL, gcLo}, // NANDINAGARI SIGN AVAGRAHA
+ {0x119E2, 0x119E2, prBB, gcPo}, // NANDINAGARI SIGN SIDDHAM
+ {0x119E3, 0x119E3, prAL, gcLo}, // NANDINAGARI HEADSTROKE
+ {0x119E4, 0x119E4, prCM, gcMc}, // NANDINAGARI VOWEL SIGN PRISHTHAMATRA E
+ {0x11A00, 0x11A00, prAL, gcLo}, // ZANABAZAR SQUARE LETTER A
+ {0x11A01, 0x11A0A, prCM, gcMn}, // [10] ZANABAZAR SQUARE VOWEL SIGN I..ZANABAZAR SQUARE VOWEL LENGTH MARK
+ {0x11A0B, 0x11A32, prAL, gcLo}, // [40] ZANABAZAR SQUARE LETTER KA..ZANABAZAR SQUARE LETTER KSSA
+ {0x11A33, 0x11A38, prCM, gcMn}, // [6] ZANABAZAR SQUARE FINAL CONSONANT MARK..ZANABAZAR SQUARE SIGN ANUSVARA
+ {0x11A39, 0x11A39, prCM, gcMc}, // ZANABAZAR SQUARE SIGN VISARGA
+ {0x11A3A, 0x11A3A, prAL, gcLo}, // ZANABAZAR SQUARE CLUSTER-INITIAL LETTER RA
+ {0x11A3B, 0x11A3E, prCM, gcMn}, // [4] ZANABAZAR SQUARE CLUSTER-FINAL LETTER YA..ZANABAZAR SQUARE CLUSTER-FINAL LETTER VA
+ {0x11A3F, 0x11A3F, prBB, gcPo}, // ZANABAZAR SQUARE INITIAL HEAD MARK
+ {0x11A40, 0x11A40, prAL, gcPo}, // ZANABAZAR SQUARE CLOSING HEAD MARK
+ {0x11A41, 0x11A44, prBA, gcPo}, // [4] ZANABAZAR SQUARE MARK TSHEG..ZANABAZAR SQUARE MARK LONG TSHEG
+ {0x11A45, 0x11A45, prBB, gcPo}, // ZANABAZAR SQUARE INITIAL DOUBLE-LINED HEAD MARK
+ {0x11A46, 0x11A46, prAL, gcPo}, // ZANABAZAR SQUARE CLOSING DOUBLE-LINED HEAD MARK
+ {0x11A47, 0x11A47, prCM, gcMn}, // ZANABAZAR SQUARE SUBJOINER
+ {0x11A50, 0x11A50, prAL, gcLo}, // SOYOMBO LETTER A
+ {0x11A51, 0x11A56, prCM, gcMn}, // [6] SOYOMBO VOWEL SIGN I..SOYOMBO VOWEL SIGN OE
+ {0x11A57, 0x11A58, prCM, gcMc}, // [2] SOYOMBO VOWEL SIGN AI..SOYOMBO VOWEL SIGN AU
+ {0x11A59, 0x11A5B, prCM, gcMn}, // [3] SOYOMBO VOWEL SIGN VOCALIC R..SOYOMBO VOWEL LENGTH MARK
+ {0x11A5C, 0x11A89, prAL, gcLo}, // [46] SOYOMBO LETTER KA..SOYOMBO CLUSTER-INITIAL LETTER SA
+ {0x11A8A, 0x11A96, prCM, gcMn}, // [13] SOYOMBO FINAL CONSONANT SIGN G..SOYOMBO SIGN ANUSVARA
+ {0x11A97, 0x11A97, prCM, gcMc}, // SOYOMBO SIGN VISARGA
+ {0x11A98, 0x11A99, prCM, gcMn}, // [2] SOYOMBO GEMINATION MARK..SOYOMBO SUBJOINER
+ {0x11A9A, 0x11A9C, prBA, gcPo}, // [3] SOYOMBO MARK TSHEG..SOYOMBO MARK DOUBLE SHAD
+ {0x11A9D, 0x11A9D, prAL, gcLo}, // SOYOMBO MARK PLUTA
+ {0x11A9E, 0x11AA0, prBB, gcPo}, // [3] SOYOMBO HEAD MARK WITH MOON AND SUN AND TRIPLE FLAME..SOYOMBO HEAD MARK WITH MOON AND SUN
+ {0x11AA1, 0x11AA2, prBA, gcPo}, // [2] SOYOMBO TERMINAL MARK-1..SOYOMBO TERMINAL MARK-2
+ {0x11AB0, 0x11ABF, prAL, gcLo}, // [16] CANADIAN SYLLABICS NATTILIK HI..CANADIAN SYLLABICS SPA
+ {0x11AC0, 0x11AF8, prAL, gcLo}, // [57] PAU CIN HAU LETTER PA..PAU CIN HAU GLOTTAL STOP FINAL
+ {0x11B00, 0x11B09, prBB, gcPo}, // [10] DEVANAGARI HEAD MARK..DEVANAGARI SIGN MINDU
+ {0x11C00, 0x11C08, prAL, gcLo}, // [9] BHAIKSUKI LETTER A..BHAIKSUKI LETTER VOCALIC L
+ {0x11C0A, 0x11C2E, prAL, gcLo}, // [37] BHAIKSUKI LETTER E..BHAIKSUKI LETTER HA
+ {0x11C2F, 0x11C2F, prCM, gcMc}, // BHAIKSUKI VOWEL SIGN AA
+ {0x11C30, 0x11C36, prCM, gcMn}, // [7] BHAIKSUKI VOWEL SIGN I..BHAIKSUKI VOWEL SIGN VOCALIC L
+ {0x11C38, 0x11C3D, prCM, gcMn}, // [6] BHAIKSUKI VOWEL SIGN E..BHAIKSUKI SIGN ANUSVARA
+ {0x11C3E, 0x11C3E, prCM, gcMc}, // BHAIKSUKI SIGN VISARGA
+ {0x11C3F, 0x11C3F, prCM, gcMn}, // BHAIKSUKI SIGN VIRAMA
+ {0x11C40, 0x11C40, prAL, gcLo}, // BHAIKSUKI SIGN AVAGRAHA
+ {0x11C41, 0x11C45, prBA, gcPo}, // [5] BHAIKSUKI DANDA..BHAIKSUKI GAP FILLER-2
+ {0x11C50, 0x11C59, prNU, gcNd}, // [10] BHAIKSUKI DIGIT ZERO..BHAIKSUKI DIGIT NINE
+ {0x11C5A, 0x11C6C, prAL, gcNo}, // [19] BHAIKSUKI NUMBER ONE..BHAIKSUKI HUNDREDS UNIT MARK
+ {0x11C70, 0x11C70, prBB, gcPo}, // MARCHEN HEAD MARK
+ {0x11C71, 0x11C71, prEX, gcPo}, // MARCHEN MARK SHAD
+ {0x11C72, 0x11C8F, prAL, gcLo}, // [30] MARCHEN LETTER KA..MARCHEN LETTER A
+ {0x11C92, 0x11CA7, prCM, gcMn}, // [22] MARCHEN SUBJOINED LETTER KA..MARCHEN SUBJOINED LETTER ZA
+ {0x11CA9, 0x11CA9, prCM, gcMc}, // MARCHEN SUBJOINED LETTER YA
+ {0x11CAA, 0x11CB0, prCM, gcMn}, // [7] MARCHEN SUBJOINED LETTER RA..MARCHEN VOWEL SIGN AA
+ {0x11CB1, 0x11CB1, prCM, gcMc}, // MARCHEN VOWEL SIGN I
+ {0x11CB2, 0x11CB3, prCM, gcMn}, // [2] MARCHEN VOWEL SIGN U..MARCHEN VOWEL SIGN E
+ {0x11CB4, 0x11CB4, prCM, gcMc}, // MARCHEN VOWEL SIGN O
+ {0x11CB5, 0x11CB6, prCM, gcMn}, // [2] MARCHEN SIGN ANUSVARA..MARCHEN SIGN CANDRABINDU
+ {0x11D00, 0x11D06, prAL, gcLo}, // [7] MASARAM GONDI LETTER A..MASARAM GONDI LETTER E
+ {0x11D08, 0x11D09, prAL, gcLo}, // [2] MASARAM GONDI LETTER AI..MASARAM GONDI LETTER O
+ {0x11D0B, 0x11D30, prAL, gcLo}, // [38] MASARAM GONDI LETTER AU..MASARAM GONDI LETTER TRA
+ {0x11D31, 0x11D36, prCM, gcMn}, // [6] MASARAM GONDI VOWEL SIGN AA..MASARAM GONDI VOWEL SIGN VOCALIC R
+ {0x11D3A, 0x11D3A, prCM, gcMn}, // MASARAM GONDI VOWEL SIGN E
+ {0x11D3C, 0x11D3D, prCM, gcMn}, // [2] MASARAM GONDI VOWEL SIGN AI..MASARAM GONDI VOWEL SIGN O
+ {0x11D3F, 0x11D45, prCM, gcMn}, // [7] MASARAM GONDI VOWEL SIGN AU..MASARAM GONDI VIRAMA
+ {0x11D46, 0x11D46, prAL, gcLo}, // MASARAM GONDI REPHA
+ {0x11D47, 0x11D47, prCM, gcMn}, // MASARAM GONDI RA-KARA
+ {0x11D50, 0x11D59, prNU, gcNd}, // [10] MASARAM GONDI DIGIT ZERO..MASARAM GONDI DIGIT NINE
+ {0x11D60, 0x11D65, prAL, gcLo}, // [6] GUNJALA GONDI LETTER A..GUNJALA GONDI LETTER UU
+ {0x11D67, 0x11D68, prAL, gcLo}, // [2] GUNJALA GONDI LETTER EE..GUNJALA GONDI LETTER AI
+ {0x11D6A, 0x11D89, prAL, gcLo}, // [32] GUNJALA GONDI LETTER OO..GUNJALA GONDI LETTER SA
+ {0x11D8A, 0x11D8E, prCM, gcMc}, // [5] GUNJALA GONDI VOWEL SIGN AA..GUNJALA GONDI VOWEL SIGN UU
+ {0x11D90, 0x11D91, prCM, gcMn}, // [2] GUNJALA GONDI VOWEL SIGN EE..GUNJALA GONDI VOWEL SIGN AI
+ {0x11D93, 0x11D94, prCM, gcMc}, // [2] GUNJALA GONDI VOWEL SIGN OO..GUNJALA GONDI VOWEL SIGN AU
+ {0x11D95, 0x11D95, prCM, gcMn}, // GUNJALA GONDI SIGN ANUSVARA
+ {0x11D96, 0x11D96, prCM, gcMc}, // GUNJALA GONDI SIGN VISARGA
+ {0x11D97, 0x11D97, prCM, gcMn}, // GUNJALA GONDI VIRAMA
+ {0x11D98, 0x11D98, prAL, gcLo}, // GUNJALA GONDI OM
+ {0x11DA0, 0x11DA9, prNU, gcNd}, // [10] GUNJALA GONDI DIGIT ZERO..GUNJALA GONDI DIGIT NINE
+ {0x11EE0, 0x11EF2, prAL, gcLo}, // [19] MAKASAR LETTER KA..MAKASAR ANGKA
+ {0x11EF3, 0x11EF4, prCM, gcMn}, // [2] MAKASAR VOWEL SIGN I..MAKASAR VOWEL SIGN U
+ {0x11EF5, 0x11EF6, prCM, gcMc}, // [2] MAKASAR VOWEL SIGN E..MAKASAR VOWEL SIGN O
+ {0x11EF7, 0x11EF8, prAL, gcPo}, // [2] MAKASAR PASSIMBANG..MAKASAR END OF SECTION
+ {0x11F00, 0x11F01, prCM, gcMn}, // [2] KAWI SIGN CANDRABINDU..KAWI SIGN ANUSVARA
+ {0x11F02, 0x11F02, prAL, gcLo}, // KAWI SIGN REPHA
+ {0x11F03, 0x11F03, prCM, gcMc}, // KAWI SIGN VISARGA
+ {0x11F04, 0x11F10, prAL, gcLo}, // [13] KAWI LETTER A..KAWI LETTER O
+ {0x11F12, 0x11F33, prAL, gcLo}, // [34] KAWI LETTER KA..KAWI LETTER JNYA
+ {0x11F34, 0x11F35, prCM, gcMc}, // [2] KAWI VOWEL SIGN AA..KAWI VOWEL SIGN ALTERNATE AA
+ {0x11F36, 0x11F3A, prCM, gcMn}, // [5] KAWI VOWEL SIGN I..KAWI VOWEL SIGN VOCALIC R
+ {0x11F3E, 0x11F3F, prCM, gcMc}, // [2] KAWI VOWEL SIGN E..KAWI VOWEL SIGN AI
+ {0x11F40, 0x11F40, prCM, gcMn}, // KAWI VOWEL SIGN EU
+ {0x11F41, 0x11F41, prCM, gcMc}, // KAWI SIGN KILLER
+ {0x11F42, 0x11F42, prCM, gcMn}, // KAWI CONJOINER
+ {0x11F43, 0x11F44, prBA, gcPo}, // [2] KAWI DANDA..KAWI DOUBLE DANDA
+ {0x11F45, 0x11F4F, prID, gcPo}, // [11] KAWI PUNCTUATION SECTION MARKER..KAWI PUNCTUATION CLOSING SPIRAL
+ {0x11F50, 0x11F59, prNU, gcNd}, // [10] KAWI DIGIT ZERO..KAWI DIGIT NINE
+ {0x11FB0, 0x11FB0, prAL, gcLo}, // LISU LETTER YHA
+ {0x11FC0, 0x11FD4, prAL, gcNo}, // [21] TAMIL FRACTION ONE THREE-HUNDRED-AND-TWENTIETH..TAMIL FRACTION DOWNSCALING FACTOR KIIZH
+ {0x11FD5, 0x11FDC, prAL, gcSo}, // [8] TAMIL SIGN NEL..TAMIL SIGN MUKKURUNI
+ {0x11FDD, 0x11FE0, prPO, gcSc}, // [4] TAMIL SIGN KAACU..TAMIL SIGN VARAAKAN
+ {0x11FE1, 0x11FF1, prAL, gcSo}, // [17] TAMIL SIGN PAARAM..TAMIL SIGN VAKAIYARAA
+ {0x11FFF, 0x11FFF, prBA, gcPo}, // TAMIL PUNCTUATION END OF TEXT
+ {0x12000, 0x12399, prAL, gcLo}, // [922] CUNEIFORM SIGN A..CUNEIFORM SIGN U U
+ {0x12400, 0x1246E, prAL, gcNl}, // [111] CUNEIFORM NUMERIC SIGN TWO ASH..CUNEIFORM NUMERIC SIGN NINE U VARIANT FORM
+ {0x12470, 0x12474, prBA, gcPo}, // [5] CUNEIFORM PUNCTUATION SIGN OLD ASSYRIAN WORD DIVIDER..CUNEIFORM PUNCTUATION SIGN DIAGONAL QUADCOLON
+ {0x12480, 0x12543, prAL, gcLo}, // [196] CUNEIFORM SIGN AB TIMES NUN TENU..CUNEIFORM SIGN ZU5 TIMES THREE DISH TENU
+ {0x12F90, 0x12FF0, prAL, gcLo}, // [97] CYPRO-MINOAN SIGN CM001..CYPRO-MINOAN SIGN CM114
+ {0x12FF1, 0x12FF2, prAL, gcPo}, // [2] CYPRO-MINOAN SIGN CM301..CYPRO-MINOAN SIGN CM302
+ {0x13000, 0x13257, prAL, gcLo}, // [600] EGYPTIAN HIEROGLYPH A001..EGYPTIAN HIEROGLYPH O006
+ {0x13258, 0x1325A, prOP, gcLo}, // [3] EGYPTIAN HIEROGLYPH O006A..EGYPTIAN HIEROGLYPH O006C
+ {0x1325B, 0x1325D, prCL, gcLo}, // [3] EGYPTIAN HIEROGLYPH O006D..EGYPTIAN HIEROGLYPH O006F
+ {0x1325E, 0x13281, prAL, gcLo}, // [36] EGYPTIAN HIEROGLYPH O007..EGYPTIAN HIEROGLYPH O033
+ {0x13282, 0x13282, prCL, gcLo}, // EGYPTIAN HIEROGLYPH O033A
+ {0x13283, 0x13285, prAL, gcLo}, // [3] EGYPTIAN HIEROGLYPH O034..EGYPTIAN HIEROGLYPH O036
+ {0x13286, 0x13286, prOP, gcLo}, // EGYPTIAN HIEROGLYPH O036A
+ {0x13287, 0x13287, prCL, gcLo}, // EGYPTIAN HIEROGLYPH O036B
+ {0x13288, 0x13288, prOP, gcLo}, // EGYPTIAN HIEROGLYPH O036C
+ {0x13289, 0x13289, prCL, gcLo}, // EGYPTIAN HIEROGLYPH O036D
+ {0x1328A, 0x13378, prAL, gcLo}, // [239] EGYPTIAN HIEROGLYPH O037..EGYPTIAN HIEROGLYPH V011
+ {0x13379, 0x13379, prOP, gcLo}, // EGYPTIAN HIEROGLYPH V011A
+ {0x1337A, 0x1337B, prCL, gcLo}, // [2] EGYPTIAN HIEROGLYPH V011B..EGYPTIAN HIEROGLYPH V011C
+ {0x1337C, 0x1342F, prAL, gcLo}, // [180] EGYPTIAN HIEROGLYPH V012..EGYPTIAN HIEROGLYPH V011D
+ {0x13430, 0x13436, prGL, gcCf}, // [7] EGYPTIAN HIEROGLYPH VERTICAL JOINER..EGYPTIAN HIEROGLYPH OVERLAY MIDDLE
+ {0x13437, 0x13437, prOP, gcCf}, // EGYPTIAN HIEROGLYPH BEGIN SEGMENT
+ {0x13438, 0x13438, prCL, gcCf}, // EGYPTIAN HIEROGLYPH END SEGMENT
+ {0x13439, 0x1343B, prGL, gcCf}, // [3] EGYPTIAN HIEROGLYPH INSERT AT MIDDLE..EGYPTIAN HIEROGLYPH INSERT AT BOTTOM
+ {0x1343C, 0x1343C, prOP, gcCf}, // EGYPTIAN HIEROGLYPH BEGIN ENCLOSURE
+ {0x1343D, 0x1343D, prCL, gcCf}, // EGYPTIAN HIEROGLYPH END ENCLOSURE
+ {0x1343E, 0x1343E, prOP, gcCf}, // EGYPTIAN HIEROGLYPH BEGIN WALLED ENCLOSURE
+ {0x1343F, 0x1343F, prCL, gcCf}, // EGYPTIAN HIEROGLYPH END WALLED ENCLOSURE
+ {0x13440, 0x13440, prCM, gcMn}, // EGYPTIAN HIEROGLYPH MIRROR HORIZONTALLY
+ {0x13441, 0x13446, prAL, gcLo}, // [6] EGYPTIAN HIEROGLYPH FULL BLANK..EGYPTIAN HIEROGLYPH WIDE LOST SIGN
+ {0x13447, 0x13455, prCM, gcMn}, // [15] EGYPTIAN HIEROGLYPH MODIFIER DAMAGED AT TOP START..EGYPTIAN HIEROGLYPH MODIFIER DAMAGED
+ {0x14400, 0x145CD, prAL, gcLo}, // [462] ANATOLIAN HIEROGLYPH A001..ANATOLIAN HIEROGLYPH A409
+ {0x145CE, 0x145CE, prOP, gcLo}, // ANATOLIAN HIEROGLYPH A410 BEGIN LOGOGRAM MARK
+ {0x145CF, 0x145CF, prCL, gcLo}, // ANATOLIAN HIEROGLYPH A410A END LOGOGRAM MARK
+ {0x145D0, 0x14646, prAL, gcLo}, // [119] ANATOLIAN HIEROGLYPH A411..ANATOLIAN HIEROGLYPH A530
+ {0x16800, 0x16A38, prAL, gcLo}, // [569] BAMUM LETTER PHASE-A NGKUE MFON..BAMUM LETTER PHASE-F VUEQ
+ {0x16A40, 0x16A5E, prAL, gcLo}, // [31] MRO LETTER TA..MRO LETTER TEK
+ {0x16A60, 0x16A69, prNU, gcNd}, // [10] MRO DIGIT ZERO..MRO DIGIT NINE
+ {0x16A6E, 0x16A6F, prBA, gcPo}, // [2] MRO DANDA..MRO DOUBLE DANDA
+ {0x16A70, 0x16ABE, prAL, gcLo}, // [79] TANGSA LETTER OZ..TANGSA LETTER ZA
+ {0x16AC0, 0x16AC9, prNU, gcNd}, // [10] TANGSA DIGIT ZERO..TANGSA DIGIT NINE
+ {0x16AD0, 0x16AED, prAL, gcLo}, // [30] BASSA VAH LETTER ENNI..BASSA VAH LETTER I
+ {0x16AF0, 0x16AF4, prCM, gcMn}, // [5] BASSA VAH COMBINING HIGH TONE..BASSA VAH COMBINING HIGH-LOW TONE
+ {0x16AF5, 0x16AF5, prBA, gcPo}, // BASSA VAH FULL STOP
+ {0x16B00, 0x16B2F, prAL, gcLo}, // [48] PAHAWH HMONG VOWEL KEEB..PAHAWH HMONG CONSONANT CAU
+ {0x16B30, 0x16B36, prCM, gcMn}, // [7] PAHAWH HMONG MARK CIM TUB..PAHAWH HMONG MARK CIM TAUM
+ {0x16B37, 0x16B39, prBA, gcPo}, // [3] PAHAWH HMONG SIGN VOS THOM..PAHAWH HMONG SIGN CIM CHEEM
+ {0x16B3A, 0x16B3B, prAL, gcPo}, // [2] PAHAWH HMONG SIGN VOS THIAB..PAHAWH HMONG SIGN VOS FEEM
+ {0x16B3C, 0x16B3F, prAL, gcSo}, // [4] PAHAWH HMONG SIGN XYEEM NTXIV..PAHAWH HMONG SIGN XYEEM FAIB
+ {0x16B40, 0x16B43, prAL, gcLm}, // [4] PAHAWH HMONG SIGN VOS SEEV..PAHAWH HMONG SIGN IB YAM
+ {0x16B44, 0x16B44, prBA, gcPo}, // PAHAWH HMONG SIGN XAUS
+ {0x16B45, 0x16B45, prAL, gcSo}, // PAHAWH HMONG SIGN CIM TSOV ROG
+ {0x16B50, 0x16B59, prNU, gcNd}, // [10] PAHAWH HMONG DIGIT ZERO..PAHAWH HMONG DIGIT NINE
+ {0x16B5B, 0x16B61, prAL, gcNo}, // [7] PAHAWH HMONG NUMBER TENS..PAHAWH HMONG NUMBER TRILLIONS
+ {0x16B63, 0x16B77, prAL, gcLo}, // [21] PAHAWH HMONG SIGN VOS LUB..PAHAWH HMONG SIGN CIM NRES TOS
+ {0x16B7D, 0x16B8F, prAL, gcLo}, // [19] PAHAWH HMONG CLAN SIGN TSHEEJ..PAHAWH HMONG CLAN SIGN VWJ
+ {0x16E40, 0x16E7F, prAL, gcLC}, // [64] MEDEFAIDRIN CAPITAL LETTER M..MEDEFAIDRIN SMALL LETTER Y
+ {0x16E80, 0x16E96, prAL, gcNo}, // [23] MEDEFAIDRIN DIGIT ZERO..MEDEFAIDRIN DIGIT THREE ALTERNATE FORM
+ {0x16E97, 0x16E98, prBA, gcPo}, // [2] MEDEFAIDRIN COMMA..MEDEFAIDRIN FULL STOP
+ {0x16E99, 0x16E9A, prAL, gcPo}, // [2] MEDEFAIDRIN SYMBOL AIVA..MEDEFAIDRIN EXCLAMATION OH
+ {0x16F00, 0x16F4A, prAL, gcLo}, // [75] MIAO LETTER PA..MIAO LETTER RTE
+ {0x16F4F, 0x16F4F, prCM, gcMn}, // MIAO SIGN CONSONANT MODIFIER BAR
+ {0x16F50, 0x16F50, prAL, gcLo}, // MIAO LETTER NASALIZATION
+ {0x16F51, 0x16F87, prCM, gcMc}, // [55] MIAO SIGN ASPIRATION..MIAO VOWEL SIGN UI
+ {0x16F8F, 0x16F92, prCM, gcMn}, // [4] MIAO TONE RIGHT..MIAO TONE BELOW
+ {0x16F93, 0x16F9F, prAL, gcLm}, // [13] MIAO LETTER TONE-2..MIAO LETTER REFORMED TONE-8
+ {0x16FE0, 0x16FE1, prNS, gcLm}, // [2] TANGUT ITERATION MARK..NUSHU ITERATION MARK
+ {0x16FE2, 0x16FE2, prNS, gcPo}, // OLD CHINESE HOOK MARK
+ {0x16FE3, 0x16FE3, prNS, gcLm}, // OLD CHINESE ITERATION MARK
+ {0x16FE4, 0x16FE4, prGL, gcMn}, // KHITAN SMALL SCRIPT FILLER
+ {0x16FF0, 0x16FF1, prCM, gcMc}, // [2] VIETNAMESE ALTERNATE READING MARK CA..VIETNAMESE ALTERNATE READING MARK NHAY
+ {0x17000, 0x187F7, prID, gcLo}, // [6136] TANGUT IDEOGRAPH-17000..TANGUT IDEOGRAPH-187F7
+ {0x18800, 0x18AFF, prID, gcLo}, // [768] TANGUT COMPONENT-001..TANGUT COMPONENT-768
+ {0x18B00, 0x18CD5, prAL, gcLo}, // [470] KHITAN SMALL SCRIPT CHARACTER-18B00..KHITAN SMALL SCRIPT CHARACTER-18CD5
+ {0x18D00, 0x18D08, prID, gcLo}, // [9] TANGUT IDEOGRAPH-18D00..TANGUT IDEOGRAPH-18D08
+ {0x1AFF0, 0x1AFF3, prAL, gcLm}, // [4] KATAKANA LETTER MINNAN TONE-2..KATAKANA LETTER MINNAN TONE-5
+ {0x1AFF5, 0x1AFFB, prAL, gcLm}, // [7] KATAKANA LETTER MINNAN TONE-7..KATAKANA LETTER MINNAN NASALIZED TONE-5
+ {0x1AFFD, 0x1AFFE, prAL, gcLm}, // [2] KATAKANA LETTER MINNAN NASALIZED TONE-7..KATAKANA LETTER MINNAN NASALIZED TONE-8
+ {0x1B000, 0x1B0FF, prID, gcLo}, // [256] KATAKANA LETTER ARCHAIC E..HENTAIGANA LETTER RE-2
+ {0x1B100, 0x1B122, prID, gcLo}, // [35] HENTAIGANA LETTER RE-3..KATAKANA LETTER ARCHAIC WU
+ {0x1B132, 0x1B132, prCJ, gcLo}, // HIRAGANA LETTER SMALL KO
+ {0x1B150, 0x1B152, prCJ, gcLo}, // [3] HIRAGANA LETTER SMALL WI..HIRAGANA LETTER SMALL WO
+ {0x1B155, 0x1B155, prCJ, gcLo}, // KATAKANA LETTER SMALL KO
+ {0x1B164, 0x1B167, prCJ, gcLo}, // [4] KATAKANA LETTER SMALL WI..KATAKANA LETTER SMALL N
+ {0x1B170, 0x1B2FB, prID, gcLo}, // [396] NUSHU CHARACTER-1B170..NUSHU CHARACTER-1B2FB
+ {0x1BC00, 0x1BC6A, prAL, gcLo}, // [107] DUPLOYAN LETTER H..DUPLOYAN LETTER VOCALIC M
+ {0x1BC70, 0x1BC7C, prAL, gcLo}, // [13] DUPLOYAN AFFIX LEFT HORIZONTAL SECANT..DUPLOYAN AFFIX ATTACHED TANGENT HOOK
+ {0x1BC80, 0x1BC88, prAL, gcLo}, // [9] DUPLOYAN AFFIX HIGH ACUTE..DUPLOYAN AFFIX HIGH VERTICAL
+ {0x1BC90, 0x1BC99, prAL, gcLo}, // [10] DUPLOYAN AFFIX LOW ACUTE..DUPLOYAN AFFIX LOW ARROW
+ {0x1BC9C, 0x1BC9C, prAL, gcSo}, // DUPLOYAN SIGN O WITH CROSS
+ {0x1BC9D, 0x1BC9E, prCM, gcMn}, // [2] DUPLOYAN THICK LETTER SELECTOR..DUPLOYAN DOUBLE MARK
+ {0x1BC9F, 0x1BC9F, prBA, gcPo}, // DUPLOYAN PUNCTUATION CHINOOK FULL STOP
+ {0x1BCA0, 0x1BCA3, prCM, gcCf}, // [4] SHORTHAND FORMAT LETTER OVERLAP..SHORTHAND FORMAT UP STEP
+ {0x1CF00, 0x1CF2D, prCM, gcMn}, // [46] ZNAMENNY COMBINING MARK GORAZDO NIZKO S KRYZHEM ON LEFT..ZNAMENNY COMBINING MARK KRYZH ON LEFT
+ {0x1CF30, 0x1CF46, prCM, gcMn}, // [23] ZNAMENNY COMBINING TONAL RANGE MARK MRACHNO..ZNAMENNY PRIZNAK MODIFIER ROG
+ {0x1CF50, 0x1CFC3, prAL, gcSo}, // [116] ZNAMENNY NEUME KRYUK..ZNAMENNY NEUME PAUK
+ {0x1D000, 0x1D0F5, prAL, gcSo}, // [246] BYZANTINE MUSICAL SYMBOL PSILI..BYZANTINE MUSICAL SYMBOL GORGON NEO KATO
+ {0x1D100, 0x1D126, prAL, gcSo}, // [39] MUSICAL SYMBOL SINGLE BARLINE..MUSICAL SYMBOL DRUM CLEF-2
+ {0x1D129, 0x1D164, prAL, gcSo}, // [60] MUSICAL SYMBOL MULTIPLE MEASURE REST..MUSICAL SYMBOL ONE HUNDRED TWENTY-EIGHTH NOTE
+ {0x1D165, 0x1D166, prCM, gcMc}, // [2] MUSICAL SYMBOL COMBINING STEM..MUSICAL SYMBOL COMBINING SPRECHGESANG STEM
+ {0x1D167, 0x1D169, prCM, gcMn}, // [3] MUSICAL SYMBOL COMBINING TREMOLO-1..MUSICAL SYMBOL COMBINING TREMOLO-3
+ {0x1D16A, 0x1D16C, prAL, gcSo}, // [3] MUSICAL SYMBOL FINGERED TREMOLO-1..MUSICAL SYMBOL FINGERED TREMOLO-3
+ {0x1D16D, 0x1D172, prCM, gcMc}, // [6] MUSICAL SYMBOL COMBINING AUGMENTATION DOT..MUSICAL SYMBOL COMBINING FLAG-5
+ {0x1D173, 0x1D17A, prCM, gcCf}, // [8] MUSICAL SYMBOL BEGIN BEAM..MUSICAL SYMBOL END PHRASE
+ {0x1D17B, 0x1D182, prCM, gcMn}, // [8] MUSICAL SYMBOL COMBINING ACCENT..MUSICAL SYMBOL COMBINING LOURE
+ {0x1D183, 0x1D184, prAL, gcSo}, // [2] MUSICAL SYMBOL ARPEGGIATO UP..MUSICAL SYMBOL ARPEGGIATO DOWN
+ {0x1D185, 0x1D18B, prCM, gcMn}, // [7] MUSICAL SYMBOL COMBINING DOIT..MUSICAL SYMBOL COMBINING TRIPLE TONGUE
+ {0x1D18C, 0x1D1A9, prAL, gcSo}, // [30] MUSICAL SYMBOL RINFORZANDO..MUSICAL SYMBOL DEGREE SLASH
+ {0x1D1AA, 0x1D1AD, prCM, gcMn}, // [4] MUSICAL SYMBOL COMBINING DOWN BOW..MUSICAL SYMBOL COMBINING SNAP PIZZICATO
+ {0x1D1AE, 0x1D1EA, prAL, gcSo}, // [61] MUSICAL SYMBOL PEDAL MARK..MUSICAL SYMBOL KORON
+ {0x1D200, 0x1D241, prAL, gcSo}, // [66] GREEK VOCAL NOTATION SYMBOL-1..GREEK INSTRUMENTAL NOTATION SYMBOL-54
+ {0x1D242, 0x1D244, prCM, gcMn}, // [3] COMBINING GREEK MUSICAL TRISEME..COMBINING GREEK MUSICAL PENTASEME
+ {0x1D245, 0x1D245, prAL, gcSo}, // GREEK MUSICAL LEIMMA
+ {0x1D2C0, 0x1D2D3, prAL, gcNo}, // [20] KAKTOVIK NUMERAL ZERO..KAKTOVIK NUMERAL NINETEEN
+ {0x1D2E0, 0x1D2F3, prAL, gcNo}, // [20] MAYAN NUMERAL ZERO..MAYAN NUMERAL NINETEEN
+ {0x1D300, 0x1D356, prAL, gcSo}, // [87] MONOGRAM FOR EARTH..TETRAGRAM FOR FOSTERING
+ {0x1D360, 0x1D378, prAL, gcNo}, // [25] COUNTING ROD UNIT DIGIT ONE..TALLY MARK FIVE
+ {0x1D400, 0x1D454, prAL, gcLC}, // [85] MATHEMATICAL BOLD CAPITAL A..MATHEMATICAL ITALIC SMALL G
+ {0x1D456, 0x1D49C, prAL, gcLC}, // [71] MATHEMATICAL ITALIC SMALL I..MATHEMATICAL SCRIPT CAPITAL A
+ {0x1D49E, 0x1D49F, prAL, gcLu}, // [2] MATHEMATICAL SCRIPT CAPITAL C..MATHEMATICAL SCRIPT CAPITAL D
+ {0x1D4A2, 0x1D4A2, prAL, gcLu}, // MATHEMATICAL SCRIPT CAPITAL G
+ {0x1D4A5, 0x1D4A6, prAL, gcLu}, // [2] MATHEMATICAL SCRIPT CAPITAL J..MATHEMATICAL SCRIPT CAPITAL K
+ {0x1D4A9, 0x1D4AC, prAL, gcLu}, // [4] MATHEMATICAL SCRIPT CAPITAL N..MATHEMATICAL SCRIPT CAPITAL Q
+ {0x1D4AE, 0x1D4B9, prAL, gcLC}, // [12] MATHEMATICAL SCRIPT CAPITAL S..MATHEMATICAL SCRIPT SMALL D
+ {0x1D4BB, 0x1D4BB, prAL, gcLl}, // MATHEMATICAL SCRIPT SMALL F
+ {0x1D4BD, 0x1D4C3, prAL, gcLl}, // [7] MATHEMATICAL SCRIPT SMALL H..MATHEMATICAL SCRIPT SMALL N
+ {0x1D4C5, 0x1D505, prAL, gcLC}, // [65] MATHEMATICAL SCRIPT SMALL P..MATHEMATICAL FRAKTUR CAPITAL B
+ {0x1D507, 0x1D50A, prAL, gcLu}, // [4] MATHEMATICAL FRAKTUR CAPITAL D..MATHEMATICAL FRAKTUR CAPITAL G
+ {0x1D50D, 0x1D514, prAL, gcLu}, // [8] MATHEMATICAL FRAKTUR CAPITAL J..MATHEMATICAL FRAKTUR CAPITAL Q
+ {0x1D516, 0x1D51C, prAL, gcLu}, // [7] MATHEMATICAL FRAKTUR CAPITAL S..MATHEMATICAL FRAKTUR CAPITAL Y
+ {0x1D51E, 0x1D539, prAL, gcLC}, // [28] MATHEMATICAL FRAKTUR SMALL A..MATHEMATICAL DOUBLE-STRUCK CAPITAL B
+ {0x1D53B, 0x1D53E, prAL, gcLu}, // [4] MATHEMATICAL DOUBLE-STRUCK CAPITAL D..MATHEMATICAL DOUBLE-STRUCK CAPITAL G
+ {0x1D540, 0x1D544, prAL, gcLu}, // [5] MATHEMATICAL DOUBLE-STRUCK CAPITAL I..MATHEMATICAL DOUBLE-STRUCK CAPITAL M
+ {0x1D546, 0x1D546, prAL, gcLu}, // MATHEMATICAL DOUBLE-STRUCK CAPITAL O
+ {0x1D54A, 0x1D550, prAL, gcLu}, // [7] MATHEMATICAL DOUBLE-STRUCK CAPITAL S..MATHEMATICAL DOUBLE-STRUCK CAPITAL Y
+ {0x1D552, 0x1D6A5, prAL, gcLC}, // [340] MATHEMATICAL DOUBLE-STRUCK SMALL A..MATHEMATICAL ITALIC SMALL DOTLESS J
+ {0x1D6A8, 0x1D6C0, prAL, gcLu}, // [25] MATHEMATICAL BOLD CAPITAL ALPHA..MATHEMATICAL BOLD CAPITAL OMEGA
+ {0x1D6C1, 0x1D6C1, prAL, gcSm}, // MATHEMATICAL BOLD NABLA
+ {0x1D6C2, 0x1D6DA, prAL, gcLl}, // [25] MATHEMATICAL BOLD SMALL ALPHA..MATHEMATICAL BOLD SMALL OMEGA
+ {0x1D6DB, 0x1D6DB, prAL, gcSm}, // MATHEMATICAL BOLD PARTIAL DIFFERENTIAL
+ {0x1D6DC, 0x1D6FA, prAL, gcLC}, // [31] MATHEMATICAL BOLD EPSILON SYMBOL..MATHEMATICAL ITALIC CAPITAL OMEGA
+ {0x1D6FB, 0x1D6FB, prAL, gcSm}, // MATHEMATICAL ITALIC NABLA
+ {0x1D6FC, 0x1D714, prAL, gcLl}, // [25] MATHEMATICAL ITALIC SMALL ALPHA..MATHEMATICAL ITALIC SMALL OMEGA
+ {0x1D715, 0x1D715, prAL, gcSm}, // MATHEMATICAL ITALIC PARTIAL DIFFERENTIAL
+ {0x1D716, 0x1D734, prAL, gcLC}, // [31] MATHEMATICAL ITALIC EPSILON SYMBOL..MATHEMATICAL BOLD ITALIC CAPITAL OMEGA
+ {0x1D735, 0x1D735, prAL, gcSm}, // MATHEMATICAL BOLD ITALIC NABLA
+ {0x1D736, 0x1D74E, prAL, gcLl}, // [25] MATHEMATICAL BOLD ITALIC SMALL ALPHA..MATHEMATICAL BOLD ITALIC SMALL OMEGA
+ {0x1D74F, 0x1D74F, prAL, gcSm}, // MATHEMATICAL BOLD ITALIC PARTIAL DIFFERENTIAL
+ {0x1D750, 0x1D76E, prAL, gcLC}, // [31] MATHEMATICAL BOLD ITALIC EPSILON SYMBOL..MATHEMATICAL SANS-SERIF BOLD CAPITAL OMEGA
+ {0x1D76F, 0x1D76F, prAL, gcSm}, // MATHEMATICAL SANS-SERIF BOLD NABLA
+ {0x1D770, 0x1D788, prAL, gcLl}, // [25] MATHEMATICAL SANS-SERIF BOLD SMALL ALPHA..MATHEMATICAL SANS-SERIF BOLD SMALL OMEGA
+ {0x1D789, 0x1D789, prAL, gcSm}, // MATHEMATICAL SANS-SERIF BOLD PARTIAL DIFFERENTIAL
+ {0x1D78A, 0x1D7A8, prAL, gcLC}, // [31] MATHEMATICAL SANS-SERIF BOLD EPSILON SYMBOL..MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL OMEGA
+ {0x1D7A9, 0x1D7A9, prAL, gcSm}, // MATHEMATICAL SANS-SERIF BOLD ITALIC NABLA
+ {0x1D7AA, 0x1D7C2, prAL, gcLl}, // [25] MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL ALPHA..MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL OMEGA
+ {0x1D7C3, 0x1D7C3, prAL, gcSm}, // MATHEMATICAL SANS-SERIF BOLD ITALIC PARTIAL DIFFERENTIAL
+ {0x1D7C4, 0x1D7CB, prAL, gcLC}, // [8] MATHEMATICAL SANS-SERIF BOLD ITALIC EPSILON SYMBOL..MATHEMATICAL BOLD SMALL DIGAMMA
+ {0x1D7CE, 0x1D7FF, prNU, gcNd}, // [50] MATHEMATICAL BOLD DIGIT ZERO..MATHEMATICAL MONOSPACE DIGIT NINE
+ {0x1D800, 0x1D9FF, prAL, gcSo}, // [512] SIGNWRITING HAND-FIST INDEX..SIGNWRITING HEAD
+ {0x1DA00, 0x1DA36, prCM, gcMn}, // [55] SIGNWRITING HEAD RIM..SIGNWRITING AIR SUCKING IN
+ {0x1DA37, 0x1DA3A, prAL, gcSo}, // [4] SIGNWRITING AIR BLOW SMALL ROTATIONS..SIGNWRITING BREATH EXHALE
+ {0x1DA3B, 0x1DA6C, prCM, gcMn}, // [50] SIGNWRITING MOUTH CLOSED NEUTRAL..SIGNWRITING EXCITEMENT
+ {0x1DA6D, 0x1DA74, prAL, gcSo}, // [8] SIGNWRITING SHOULDER HIP SPINE..SIGNWRITING TORSO-FLOORPLANE TWISTING
+ {0x1DA75, 0x1DA75, prCM, gcMn}, // SIGNWRITING UPPER BODY TILTING FROM HIP JOINTS
+ {0x1DA76, 0x1DA83, prAL, gcSo}, // [14] SIGNWRITING LIMB COMBINATION..SIGNWRITING LOCATION DEPTH
+ {0x1DA84, 0x1DA84, prCM, gcMn}, // SIGNWRITING LOCATION HEAD NECK
+ {0x1DA85, 0x1DA86, prAL, gcSo}, // [2] SIGNWRITING LOCATION TORSO..SIGNWRITING LOCATION LIMBS DIGITS
+ {0x1DA87, 0x1DA8A, prBA, gcPo}, // [4] SIGNWRITING COMMA..SIGNWRITING COLON
+ {0x1DA8B, 0x1DA8B, prAL, gcPo}, // SIGNWRITING PARENTHESIS
+ {0x1DA9B, 0x1DA9F, prCM, gcMn}, // [5] SIGNWRITING FILL MODIFIER-2..SIGNWRITING FILL MODIFIER-6
+ {0x1DAA1, 0x1DAAF, prCM, gcMn}, // [15] SIGNWRITING ROTATION MODIFIER-2..SIGNWRITING ROTATION MODIFIER-16
+ {0x1DF00, 0x1DF09, prAL, gcLl}, // [10] LATIN SMALL LETTER FENG DIGRAPH WITH TRILL..LATIN SMALL LETTER T WITH HOOK AND RETROFLEX HOOK
+ {0x1DF0A, 0x1DF0A, prAL, gcLo}, // LATIN LETTER RETROFLEX CLICK WITH RETROFLEX HOOK
+ {0x1DF0B, 0x1DF1E, prAL, gcLl}, // [20] LATIN SMALL LETTER ESH WITH DOUBLE BAR..LATIN SMALL LETTER S WITH CURL
+ {0x1DF25, 0x1DF2A, prAL, gcLl}, // [6] LATIN SMALL LETTER D WITH MID-HEIGHT LEFT HOOK..LATIN SMALL LETTER T WITH MID-HEIGHT LEFT HOOK
+ {0x1E000, 0x1E006, prCM, gcMn}, // [7] COMBINING GLAGOLITIC LETTER AZU..COMBINING GLAGOLITIC LETTER ZHIVETE
+ {0x1E008, 0x1E018, prCM, gcMn}, // [17] COMBINING GLAGOLITIC LETTER ZEMLJA..COMBINING GLAGOLITIC LETTER HERU
+ {0x1E01B, 0x1E021, prCM, gcMn}, // [7] COMBINING GLAGOLITIC LETTER SHTA..COMBINING GLAGOLITIC LETTER YATI
+ {0x1E023, 0x1E024, prCM, gcMn}, // [2] COMBINING GLAGOLITIC LETTER YU..COMBINING GLAGOLITIC LETTER SMALL YUS
+ {0x1E026, 0x1E02A, prCM, gcMn}, // [5] COMBINING GLAGOLITIC LETTER YO..COMBINING GLAGOLITIC LETTER FITA
+ {0x1E030, 0x1E06D, prAL, gcLm}, // [62] MODIFIER LETTER CYRILLIC SMALL A..MODIFIER LETTER CYRILLIC SMALL STRAIGHT U WITH STROKE
+ {0x1E08F, 0x1E08F, prCM, gcMn}, // COMBINING CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
+ {0x1E100, 0x1E12C, prAL, gcLo}, // [45] NYIAKENG PUACHUE HMONG LETTER MA..NYIAKENG PUACHUE HMONG LETTER W
+ {0x1E130, 0x1E136, prCM, gcMn}, // [7] NYIAKENG PUACHUE HMONG TONE-B..NYIAKENG PUACHUE HMONG TONE-D
+ {0x1E137, 0x1E13D, prAL, gcLm}, // [7] NYIAKENG PUACHUE HMONG SIGN FOR PERSON..NYIAKENG PUACHUE HMONG SYLLABLE LENGTHENER
+ {0x1E140, 0x1E149, prNU, gcNd}, // [10] NYIAKENG PUACHUE HMONG DIGIT ZERO..NYIAKENG PUACHUE HMONG DIGIT NINE
+ {0x1E14E, 0x1E14E, prAL, gcLo}, // NYIAKENG PUACHUE HMONG LOGOGRAM NYAJ
+ {0x1E14F, 0x1E14F, prAL, gcSo}, // NYIAKENG PUACHUE HMONG CIRCLED CA
+ {0x1E290, 0x1E2AD, prAL, gcLo}, // [30] TOTO LETTER PA..TOTO LETTER A
+ {0x1E2AE, 0x1E2AE, prCM, gcMn}, // TOTO SIGN RISING TONE
+ {0x1E2C0, 0x1E2EB, prAL, gcLo}, // [44] WANCHO LETTER AA..WANCHO LETTER YIH
+ {0x1E2EC, 0x1E2EF, prCM, gcMn}, // [4] WANCHO TONE TUP..WANCHO TONE KOINI
+ {0x1E2F0, 0x1E2F9, prNU, gcNd}, // [10] WANCHO DIGIT ZERO..WANCHO DIGIT NINE
+ {0x1E2FF, 0x1E2FF, prPR, gcSc}, // WANCHO NGUN SIGN
+ {0x1E4D0, 0x1E4EA, prAL, gcLo}, // [27] NAG MUNDARI LETTER O..NAG MUNDARI LETTER ELL
+ {0x1E4EB, 0x1E4EB, prAL, gcLm}, // NAG MUNDARI SIGN OJOD
+ {0x1E4EC, 0x1E4EF, prCM, gcMn}, // [4] NAG MUNDARI SIGN MUHOR..NAG MUNDARI SIGN SUTUH
+ {0x1E4F0, 0x1E4F9, prNU, gcNd}, // [10] NAG MUNDARI DIGIT ZERO..NAG MUNDARI DIGIT NINE
+ {0x1E7E0, 0x1E7E6, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE HHYA..ETHIOPIC SYLLABLE HHYO
+ {0x1E7E8, 0x1E7EB, prAL, gcLo}, // [4] ETHIOPIC SYLLABLE GURAGE HHWA..ETHIOPIC SYLLABLE HHWE
+ {0x1E7ED, 0x1E7EE, prAL, gcLo}, // [2] ETHIOPIC SYLLABLE GURAGE MWI..ETHIOPIC SYLLABLE GURAGE MWEE
+ {0x1E7F0, 0x1E7FE, prAL, gcLo}, // [15] ETHIOPIC SYLLABLE GURAGE QWI..ETHIOPIC SYLLABLE GURAGE PWEE
+ {0x1E800, 0x1E8C4, prAL, gcLo}, // [197] MENDE KIKAKUI SYLLABLE M001 KI..MENDE KIKAKUI SYLLABLE M060 NYON
+ {0x1E8C7, 0x1E8CF, prAL, gcNo}, // [9] MENDE KIKAKUI DIGIT ONE..MENDE KIKAKUI DIGIT NINE
+ {0x1E8D0, 0x1E8D6, prCM, gcMn}, // [7] MENDE KIKAKUI COMBINING NUMBER TEENS..MENDE KIKAKUI COMBINING NUMBER MILLIONS
+ {0x1E900, 0x1E943, prAL, gcLC}, // [68] ADLAM CAPITAL LETTER ALIF..ADLAM SMALL LETTER SHA
+ {0x1E944, 0x1E94A, prCM, gcMn}, // [7] ADLAM ALIF LENGTHENER..ADLAM NUKTA
+ {0x1E94B, 0x1E94B, prAL, gcLm}, // ADLAM NASALIZATION MARK
+ {0x1E950, 0x1E959, prNU, gcNd}, // [10] ADLAM DIGIT ZERO..ADLAM DIGIT NINE
+ {0x1E95E, 0x1E95F, prOP, gcPo}, // [2] ADLAM INITIAL EXCLAMATION MARK..ADLAM INITIAL QUESTION MARK
+ {0x1EC71, 0x1ECAB, prAL, gcNo}, // [59] INDIC SIYAQ NUMBER ONE..INDIC SIYAQ NUMBER PREFIXED NINE
+ {0x1ECAC, 0x1ECAC, prPO, gcSo}, // INDIC SIYAQ PLACEHOLDER
+ {0x1ECAD, 0x1ECAF, prAL, gcNo}, // [3] INDIC SIYAQ FRACTION ONE QUARTER..INDIC SIYAQ FRACTION THREE QUARTERS
+ {0x1ECB0, 0x1ECB0, prPO, gcSc}, // INDIC SIYAQ RUPEE MARK
+ {0x1ECB1, 0x1ECB4, prAL, gcNo}, // [4] INDIC SIYAQ NUMBER ALTERNATE ONE..INDIC SIYAQ ALTERNATE LAKH MARK
+ {0x1ED01, 0x1ED2D, prAL, gcNo}, // [45] OTTOMAN SIYAQ NUMBER ONE..OTTOMAN SIYAQ NUMBER NINETY THOUSAND
+ {0x1ED2E, 0x1ED2E, prAL, gcSo}, // OTTOMAN SIYAQ MARRATAN
+ {0x1ED2F, 0x1ED3D, prAL, gcNo}, // [15] OTTOMAN SIYAQ ALTERNATE NUMBER TWO..OTTOMAN SIYAQ FRACTION ONE SIXTH
+ {0x1EE00, 0x1EE03, prAL, gcLo}, // [4] ARABIC MATHEMATICAL ALEF..ARABIC MATHEMATICAL DAL
+ {0x1EE05, 0x1EE1F, prAL, gcLo}, // [27] ARABIC MATHEMATICAL WAW..ARABIC MATHEMATICAL DOTLESS QAF
+ {0x1EE21, 0x1EE22, prAL, gcLo}, // [2] ARABIC MATHEMATICAL INITIAL BEH..ARABIC MATHEMATICAL INITIAL JEEM
+ {0x1EE24, 0x1EE24, prAL, gcLo}, // ARABIC MATHEMATICAL INITIAL HEH
+ {0x1EE27, 0x1EE27, prAL, gcLo}, // ARABIC MATHEMATICAL INITIAL HAH
+ {0x1EE29, 0x1EE32, prAL, gcLo}, // [10] ARABIC MATHEMATICAL INITIAL YEH..ARABIC MATHEMATICAL INITIAL QAF
+ {0x1EE34, 0x1EE37, prAL, gcLo}, // [4] ARABIC MATHEMATICAL INITIAL SHEEN..ARABIC MATHEMATICAL INITIAL KHAH
+ {0x1EE39, 0x1EE39, prAL, gcLo}, // ARABIC MATHEMATICAL INITIAL DAD
+ {0x1EE3B, 0x1EE3B, prAL, gcLo}, // ARABIC MATHEMATICAL INITIAL GHAIN
+ {0x1EE42, 0x1EE42, prAL, gcLo}, // ARABIC MATHEMATICAL TAILED JEEM
+ {0x1EE47, 0x1EE47, prAL, gcLo}, // ARABIC MATHEMATICAL TAILED HAH
+ {0x1EE49, 0x1EE49, prAL, gcLo}, // ARABIC MATHEMATICAL TAILED YEH
+ {0x1EE4B, 0x1EE4B, prAL, gcLo}, // ARABIC MATHEMATICAL TAILED LAM
+ {0x1EE4D, 0x1EE4F, prAL, gcLo}, // [3] ARABIC MATHEMATICAL TAILED NOON..ARABIC MATHEMATICAL TAILED AIN
+ {0x1EE51, 0x1EE52, prAL, gcLo}, // [2] ARABIC MATHEMATICAL TAILED SAD..ARABIC MATHEMATICAL TAILED QAF
+ {0x1EE54, 0x1EE54, prAL, gcLo}, // ARABIC MATHEMATICAL TAILED SHEEN
+ {0x1EE57, 0x1EE57, prAL, gcLo}, // ARABIC MATHEMATICAL TAILED KHAH
+ {0x1EE59, 0x1EE59, prAL, gcLo}, // ARABIC MATHEMATICAL TAILED DAD
+ {0x1EE5B, 0x1EE5B, prAL, gcLo}, // ARABIC MATHEMATICAL TAILED GHAIN
+ {0x1EE5D, 0x1EE5D, prAL, gcLo}, // ARABIC MATHEMATICAL TAILED DOTLESS NOON
+ {0x1EE5F, 0x1EE5F, prAL, gcLo}, // ARABIC MATHEMATICAL TAILED DOTLESS QAF
+ {0x1EE61, 0x1EE62, prAL, gcLo}, // [2] ARABIC MATHEMATICAL STRETCHED BEH..ARABIC MATHEMATICAL STRETCHED JEEM
+ {0x1EE64, 0x1EE64, prAL, gcLo}, // ARABIC MATHEMATICAL STRETCHED HEH
+ {0x1EE67, 0x1EE6A, prAL, gcLo}, // [4] ARABIC MATHEMATICAL STRETCHED HAH..ARABIC MATHEMATICAL STRETCHED KAF
+ {0x1EE6C, 0x1EE72, prAL, gcLo}, // [7] ARABIC MATHEMATICAL STRETCHED MEEM..ARABIC MATHEMATICAL STRETCHED QAF
+ {0x1EE74, 0x1EE77, prAL, gcLo}, // [4] ARABIC MATHEMATICAL STRETCHED SHEEN..ARABIC MATHEMATICAL STRETCHED KHAH
+ {0x1EE79, 0x1EE7C, prAL, gcLo}, // [4] ARABIC MATHEMATICAL STRETCHED DAD..ARABIC MATHEMATICAL STRETCHED DOTLESS BEH
+ {0x1EE7E, 0x1EE7E, prAL, gcLo}, // ARABIC MATHEMATICAL STRETCHED DOTLESS FEH
+ {0x1EE80, 0x1EE89, prAL, gcLo}, // [10] ARABIC MATHEMATICAL LOOPED ALEF..ARABIC MATHEMATICAL LOOPED YEH
+ {0x1EE8B, 0x1EE9B, prAL, gcLo}, // [17] ARABIC MATHEMATICAL LOOPED LAM..ARABIC MATHEMATICAL LOOPED GHAIN
+ {0x1EEA1, 0x1EEA3, prAL, gcLo}, // [3] ARABIC MATHEMATICAL DOUBLE-STRUCK BEH..ARABIC MATHEMATICAL DOUBLE-STRUCK DAL
+ {0x1EEA5, 0x1EEA9, prAL, gcLo}, // [5] ARABIC MATHEMATICAL DOUBLE-STRUCK WAW..ARABIC MATHEMATICAL DOUBLE-STRUCK YEH
+ {0x1EEAB, 0x1EEBB, prAL, gcLo}, // [17] ARABIC MATHEMATICAL DOUBLE-STRUCK LAM..ARABIC MATHEMATICAL DOUBLE-STRUCK GHAIN
+ {0x1EEF0, 0x1EEF1, prAL, gcSm}, // [2] ARABIC MATHEMATICAL OPERATOR MEEM WITH HAH WITH TATWEEL..ARABIC MATHEMATICAL OPERATOR HAH WITH DAL
+ {0x1F000, 0x1F02B, prID, gcSo}, // [44] MAHJONG TILE EAST WIND..MAHJONG TILE BACK
+ {0x1F02C, 0x1F02F, prID, gcCn}, // [4] ..
+ {0x1F030, 0x1F093, prID, gcSo}, // [100] DOMINO TILE HORIZONTAL BACK..DOMINO TILE VERTICAL-06-06
+ {0x1F094, 0x1F09F, prID, gcCn}, // [12] ..
+ {0x1F0A0, 0x1F0AE, prID, gcSo}, // [15] PLAYING CARD BACK..PLAYING CARD KING OF SPADES
+ {0x1F0AF, 0x1F0B0, prID, gcCn}, // [2] ..
+ {0x1F0B1, 0x1F0BF, prID, gcSo}, // [15] PLAYING CARD ACE OF HEARTS..PLAYING CARD RED JOKER
+ {0x1F0C0, 0x1F0C0, prID, gcCn}, //
+ {0x1F0C1, 0x1F0CF, prID, gcSo}, // [15] PLAYING CARD ACE OF DIAMONDS..PLAYING CARD BLACK JOKER
+ {0x1F0D0, 0x1F0D0, prID, gcCn}, //
+ {0x1F0D1, 0x1F0F5, prID, gcSo}, // [37] PLAYING CARD ACE OF CLUBS..PLAYING CARD TRUMP-21
+ {0x1F0F6, 0x1F0FF, prID, gcCn}, // [10] ..
+ {0x1F100, 0x1F10C, prAI, gcNo}, // [13] DIGIT ZERO FULL STOP..DINGBAT NEGATIVE CIRCLED SANS-SERIF DIGIT ZERO
+ {0x1F10D, 0x1F10F, prID, gcSo}, // [3] CIRCLED ZERO WITH SLASH..CIRCLED DOLLAR SIGN WITH OVERLAID BACKSLASH
+ {0x1F110, 0x1F12D, prAI, gcSo}, // [30] PARENTHESIZED LATIN CAPITAL LETTER A..CIRCLED CD
+ {0x1F12E, 0x1F12F, prAL, gcSo}, // [2] CIRCLED WZ..COPYLEFT SYMBOL
+ {0x1F130, 0x1F169, prAI, gcSo}, // [58] SQUARED LATIN CAPITAL LETTER A..NEGATIVE CIRCLED LATIN CAPITAL LETTER Z
+ {0x1F16A, 0x1F16C, prAL, gcSo}, // [3] RAISED MC SIGN..RAISED MR SIGN
+ {0x1F16D, 0x1F16F, prID, gcSo}, // [3] CIRCLED CC..CIRCLED HUMAN FIGURE
+ {0x1F170, 0x1F1AC, prAI, gcSo}, // [61] NEGATIVE SQUARED LATIN CAPITAL LETTER A..SQUARED VOD
+ {0x1F1AD, 0x1F1AD, prID, gcSo}, // MASK WORK SYMBOL
+ {0x1F1AE, 0x1F1E5, prID, gcCn}, // [56] ..
+ {0x1F1E6, 0x1F1FF, prRI, gcSo}, // [26] REGIONAL INDICATOR SYMBOL LETTER A..REGIONAL INDICATOR SYMBOL LETTER Z
+ {0x1F200, 0x1F202, prID, gcSo}, // [3] SQUARE HIRAGANA HOKA..SQUARED KATAKANA SA
+ {0x1F203, 0x1F20F, prID, gcCn}, // [13] ..
+ {0x1F210, 0x1F23B, prID, gcSo}, // [44] SQUARED CJK UNIFIED IDEOGRAPH-624B..SQUARED CJK UNIFIED IDEOGRAPH-914D
+ {0x1F23C, 0x1F23F, prID, gcCn}, // [4] ..
+ {0x1F240, 0x1F248, prID, gcSo}, // [9] TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-672C..TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-6557
+ {0x1F249, 0x1F24F, prID, gcCn}, // [7] ..
+ {0x1F250, 0x1F251, prID, gcSo}, // [2] CIRCLED IDEOGRAPH ADVANTAGE..CIRCLED IDEOGRAPH ACCEPT
+ {0x1F252, 0x1F25F, prID, gcCn}, // [14] ..
+ {0x1F260, 0x1F265, prID, gcSo}, // [6] ROUNDED SYMBOL FOR FU..ROUNDED SYMBOL FOR CAI
+ {0x1F266, 0x1F2FF, prID, gcCn}, // [154] ..
+ {0x1F300, 0x1F384, prID, gcSo}, // [133] CYCLONE..CHRISTMAS TREE
+ {0x1F385, 0x1F385, prEB, gcSo}, // FATHER CHRISTMAS
+ {0x1F386, 0x1F39B, prID, gcSo}, // [22] FIREWORKS..CONTROL KNOBS
+ {0x1F39C, 0x1F39D, prAL, gcSo}, // [2] BEAMED ASCENDING MUSICAL NOTES..BEAMED DESCENDING MUSICAL NOTES
+ {0x1F39E, 0x1F3B4, prID, gcSo}, // [23] FILM FRAMES..FLOWER PLAYING CARDS
+ {0x1F3B5, 0x1F3B6, prAL, gcSo}, // [2] MUSICAL NOTE..MULTIPLE MUSICAL NOTES
+ {0x1F3B7, 0x1F3BB, prID, gcSo}, // [5] SAXOPHONE..VIOLIN
+ {0x1F3BC, 0x1F3BC, prAL, gcSo}, // MUSICAL SCORE
+ {0x1F3BD, 0x1F3C1, prID, gcSo}, // [5] RUNNING SHIRT WITH SASH..CHEQUERED FLAG
+ {0x1F3C2, 0x1F3C4, prEB, gcSo}, // [3] SNOWBOARDER..SURFER
+ {0x1F3C5, 0x1F3C6, prID, gcSo}, // [2] SPORTS MEDAL..TROPHY
+ {0x1F3C7, 0x1F3C7, prEB, gcSo}, // HORSE RACING
+ {0x1F3C8, 0x1F3C9, prID, gcSo}, // [2] AMERICAN FOOTBALL..RUGBY FOOTBALL
+ {0x1F3CA, 0x1F3CC, prEB, gcSo}, // [3] SWIMMER..GOLFER
+ {0x1F3CD, 0x1F3FA, prID, gcSo}, // [46] RACING MOTORCYCLE..AMPHORA
+ {0x1F3FB, 0x1F3FF, prEM, gcSk}, // [5] EMOJI MODIFIER FITZPATRICK TYPE-1-2..EMOJI MODIFIER FITZPATRICK TYPE-6
+ {0x1F400, 0x1F441, prID, gcSo}, // [66] RAT..EYE
+ {0x1F442, 0x1F443, prEB, gcSo}, // [2] EAR..NOSE
+ {0x1F444, 0x1F445, prID, gcSo}, // [2] MOUTH..TONGUE
+ {0x1F446, 0x1F450, prEB, gcSo}, // [11] WHITE UP POINTING BACKHAND INDEX..OPEN HANDS SIGN
+ {0x1F451, 0x1F465, prID, gcSo}, // [21] CROWN..BUSTS IN SILHOUETTE
+ {0x1F466, 0x1F478, prEB, gcSo}, // [19] BOY..PRINCESS
+ {0x1F479, 0x1F47B, prID, gcSo}, // [3] JAPANESE OGRE..GHOST
+ {0x1F47C, 0x1F47C, prEB, gcSo}, // BABY ANGEL
+ {0x1F47D, 0x1F480, prID, gcSo}, // [4] EXTRATERRESTRIAL ALIEN..SKULL
+ {0x1F481, 0x1F483, prEB, gcSo}, // [3] INFORMATION DESK PERSON..DANCER
+ {0x1F484, 0x1F484, prID, gcSo}, // LIPSTICK
+ {0x1F485, 0x1F487, prEB, gcSo}, // [3] NAIL POLISH..HAIRCUT
+ {0x1F488, 0x1F48E, prID, gcSo}, // [7] BARBER POLE..GEM STONE
+ {0x1F48F, 0x1F48F, prEB, gcSo}, // KISS
+ {0x1F490, 0x1F490, prID, gcSo}, // BOUQUET
+ {0x1F491, 0x1F491, prEB, gcSo}, // COUPLE WITH HEART
+ {0x1F492, 0x1F49F, prID, gcSo}, // [14] WEDDING..HEART DECORATION
+ {0x1F4A0, 0x1F4A0, prAL, gcSo}, // DIAMOND SHAPE WITH A DOT INSIDE
+ {0x1F4A1, 0x1F4A1, prID, gcSo}, // ELECTRIC LIGHT BULB
+ {0x1F4A2, 0x1F4A2, prAL, gcSo}, // ANGER SYMBOL
+ {0x1F4A3, 0x1F4A3, prID, gcSo}, // BOMB
+ {0x1F4A4, 0x1F4A4, prAL, gcSo}, // SLEEPING SYMBOL
+ {0x1F4A5, 0x1F4A9, prID, gcSo}, // [5] COLLISION SYMBOL..PILE OF POO
+ {0x1F4AA, 0x1F4AA, prEB, gcSo}, // FLEXED BICEPS
+ {0x1F4AB, 0x1F4AE, prID, gcSo}, // [4] DIZZY SYMBOL..WHITE FLOWER
+ {0x1F4AF, 0x1F4AF, prAL, gcSo}, // HUNDRED POINTS SYMBOL
+ {0x1F4B0, 0x1F4B0, prID, gcSo}, // MONEY BAG
+ {0x1F4B1, 0x1F4B2, prAL, gcSo}, // [2] CURRENCY EXCHANGE..HEAVY DOLLAR SIGN
+ {0x1F4B3, 0x1F4FF, prID, gcSo}, // [77] CREDIT CARD..PRAYER BEADS
+ {0x1F500, 0x1F506, prAL, gcSo}, // [7] TWISTED RIGHTWARDS ARROWS..HIGH BRIGHTNESS SYMBOL
+ {0x1F507, 0x1F516, prID, gcSo}, // [16] SPEAKER WITH CANCELLATION STROKE..BOOKMARK
+ {0x1F517, 0x1F524, prAL, gcSo}, // [14] LINK SYMBOL..INPUT SYMBOL FOR LATIN LETTERS
+ {0x1F525, 0x1F531, prID, gcSo}, // [13] FIRE..TRIDENT EMBLEM
+ {0x1F532, 0x1F549, prAL, gcSo}, // [24] BLACK SQUARE BUTTON..OM SYMBOL
+ {0x1F54A, 0x1F573, prID, gcSo}, // [42] DOVE OF PEACE..HOLE
+ {0x1F574, 0x1F575, prEB, gcSo}, // [2] MAN IN BUSINESS SUIT LEVITATING..SLEUTH OR SPY
+ {0x1F576, 0x1F579, prID, gcSo}, // [4] DARK SUNGLASSES..JOYSTICK
+ {0x1F57A, 0x1F57A, prEB, gcSo}, // MAN DANCING
+ {0x1F57B, 0x1F58F, prID, gcSo}, // [21] LEFT HAND TELEPHONE RECEIVER..TURNED OK HAND SIGN
+ {0x1F590, 0x1F590, prEB, gcSo}, // RAISED HAND WITH FINGERS SPLAYED
+ {0x1F591, 0x1F594, prID, gcSo}, // [4] REVERSED RAISED HAND WITH FINGERS SPLAYED..REVERSED VICTORY HAND
+ {0x1F595, 0x1F596, prEB, gcSo}, // [2] REVERSED HAND WITH MIDDLE FINGER EXTENDED..RAISED HAND WITH PART BETWEEN MIDDLE AND RING FINGERS
+ {0x1F597, 0x1F5D3, prID, gcSo}, // [61] WHITE DOWN POINTING LEFT HAND INDEX..SPIRAL CALENDAR PAD
+ {0x1F5D4, 0x1F5DB, prAL, gcSo}, // [8] DESKTOP WINDOW..DECREASE FONT SIZE SYMBOL
+ {0x1F5DC, 0x1F5F3, prID, gcSo}, // [24] COMPRESSION..BALLOT BOX WITH BALLOT
+ {0x1F5F4, 0x1F5F9, prAL, gcSo}, // [6] BALLOT SCRIPT X..BALLOT BOX WITH BOLD CHECK
+ {0x1F5FA, 0x1F5FF, prID, gcSo}, // [6] WORLD MAP..MOYAI
+ {0x1F600, 0x1F644, prID, gcSo}, // [69] GRINNING FACE..FACE WITH ROLLING EYES
+ {0x1F645, 0x1F647, prEB, gcSo}, // [3] FACE WITH NO GOOD GESTURE..PERSON BOWING DEEPLY
+ {0x1F648, 0x1F64A, prID, gcSo}, // [3] SEE-NO-EVIL MONKEY..SPEAK-NO-EVIL MONKEY
+ {0x1F64B, 0x1F64F, prEB, gcSo}, // [5] HAPPY PERSON RAISING ONE HAND..PERSON WITH FOLDED HANDS
+ {0x1F650, 0x1F675, prAL, gcSo}, // [38] NORTH WEST POINTING LEAF..SWASH AMPERSAND ORNAMENT
+ {0x1F676, 0x1F678, prQU, gcSo}, // [3] SANS-SERIF HEAVY DOUBLE TURNED COMMA QUOTATION MARK ORNAMENT..SANS-SERIF HEAVY LOW DOUBLE COMMA QUOTATION MARK ORNAMENT
+ {0x1F679, 0x1F67B, prNS, gcSo}, // [3] HEAVY INTERROBANG ORNAMENT..HEAVY SANS-SERIF INTERROBANG ORNAMENT
+ {0x1F67C, 0x1F67F, prAL, gcSo}, // [4] VERY HEAVY SOLIDUS..REVERSE CHECKER BOARD
+ {0x1F680, 0x1F6A2, prID, gcSo}, // [35] ROCKET..SHIP
+ {0x1F6A3, 0x1F6A3, prEB, gcSo}, // ROWBOAT
+ {0x1F6A4, 0x1F6B3, prID, gcSo}, // [16] SPEEDBOAT..NO BICYCLES
+ {0x1F6B4, 0x1F6B6, prEB, gcSo}, // [3] BICYCLIST..PEDESTRIAN
+ {0x1F6B7, 0x1F6BF, prID, gcSo}, // [9] NO PEDESTRIANS..SHOWER
+ {0x1F6C0, 0x1F6C0, prEB, gcSo}, // BATH
+ {0x1F6C1, 0x1F6CB, prID, gcSo}, // [11] BATHTUB..COUCH AND LAMP
+ {0x1F6CC, 0x1F6CC, prEB, gcSo}, // SLEEPING ACCOMMODATION
+ {0x1F6CD, 0x1F6D7, prID, gcSo}, // [11] SHOPPING BAGS..ELEVATOR
+ {0x1F6D8, 0x1F6DB, prID, gcCn}, // [4] ..
+ {0x1F6DC, 0x1F6EC, prID, gcSo}, // [17] WIRELESS..AIRPLANE ARRIVING
+ {0x1F6ED, 0x1F6EF, prID, gcCn}, // [3] ..
+ {0x1F6F0, 0x1F6FC, prID, gcSo}, // [13] SATELLITE..ROLLER SKATE
+ {0x1F6FD, 0x1F6FF, prID, gcCn}, // [3] ..
+ {0x1F700, 0x1F773, prAL, gcSo}, // [116] ALCHEMICAL SYMBOL FOR QUINTESSENCE..ALCHEMICAL SYMBOL FOR HALF OUNCE
+ {0x1F774, 0x1F776, prID, gcSo}, // [3] LOT OF FORTUNE..LUNAR ECLIPSE
+ {0x1F777, 0x1F77A, prID, gcCn}, // [4] ..
+ {0x1F77B, 0x1F77F, prID, gcSo}, // [5] HAUMEA..ORCUS
+ {0x1F780, 0x1F7D4, prAL, gcSo}, // [85] BLACK LEFT-POINTING ISOSCELES RIGHT TRIANGLE..HEAVY TWELVE POINTED PINWHEEL STAR
+ {0x1F7D5, 0x1F7D9, prID, gcSo}, // [5] CIRCLED TRIANGLE..NINE POINTED WHITE STAR
+ {0x1F7DA, 0x1F7DF, prID, gcCn}, // [6] ..
+ {0x1F7E0, 0x1F7EB, prID, gcSo}, // [12] LARGE ORANGE CIRCLE..LARGE BROWN SQUARE
+ {0x1F7EC, 0x1F7EF, prID, gcCn}, // [4] ..
+ {0x1F7F0, 0x1F7F0, prID, gcSo}, // HEAVY EQUALS SIGN
+ {0x1F7F1, 0x1F7FF, prID, gcCn}, // [15] ..
+ {0x1F800, 0x1F80B, prAL, gcSo}, // [12] LEFTWARDS ARROW WITH SMALL TRIANGLE ARROWHEAD..DOWNWARDS ARROW WITH LARGE TRIANGLE ARROWHEAD
+ {0x1F80C, 0x1F80F, prID, gcCn}, // [4] ..
+ {0x1F810, 0x1F847, prAL, gcSo}, // [56] LEFTWARDS ARROW WITH SMALL EQUILATERAL ARROWHEAD..DOWNWARDS HEAVY ARROW
+ {0x1F848, 0x1F84F, prID, gcCn}, // [8] ..
+ {0x1F850, 0x1F859, prAL, gcSo}, // [10] LEFTWARDS SANS-SERIF ARROW..UP DOWN SANS-SERIF ARROW
+ {0x1F85A, 0x1F85F, prID, gcCn}, // [6] ..
+ {0x1F860, 0x1F887, prAL, gcSo}, // [40] WIDE-HEADED LEFTWARDS LIGHT BARB ARROW..WIDE-HEADED SOUTH WEST VERY HEAVY BARB ARROW
+ {0x1F888, 0x1F88F, prID, gcCn}, // [8] ..
+ {0x1F890, 0x1F8AD, prAL, gcSo}, // [30] LEFTWARDS TRIANGLE ARROWHEAD..WHITE ARROW SHAFT WIDTH TWO THIRDS
+ {0x1F8AE, 0x1F8AF, prID, gcCn}, // [2] ..
+ {0x1F8B0, 0x1F8B1, prID, gcSo}, // [2] ARROW POINTING UPWARDS THEN NORTH WEST..ARROW POINTING RIGHTWARDS THEN CURVING SOUTH WEST
+ {0x1F8B2, 0x1F8FF, prID, gcCn}, // [78] ..
+ {0x1F900, 0x1F90B, prAL, gcSo}, // [12] CIRCLED CROSS FORMEE WITH FOUR DOTS..DOWNWARD FACING NOTCHED HOOK WITH DOT
+ {0x1F90C, 0x1F90C, prEB, gcSo}, // PINCHED FINGERS
+ {0x1F90D, 0x1F90E, prID, gcSo}, // [2] WHITE HEART..BROWN HEART
+ {0x1F90F, 0x1F90F, prEB, gcSo}, // PINCHING HAND
+ {0x1F910, 0x1F917, prID, gcSo}, // [8] ZIPPER-MOUTH FACE..HUGGING FACE
+ {0x1F918, 0x1F91F, prEB, gcSo}, // [8] SIGN OF THE HORNS..I LOVE YOU HAND SIGN
+ {0x1F920, 0x1F925, prID, gcSo}, // [6] FACE WITH COWBOY HAT..LYING FACE
+ {0x1F926, 0x1F926, prEB, gcSo}, // FACE PALM
+ {0x1F927, 0x1F92F, prID, gcSo}, // [9] SNEEZING FACE..SHOCKED FACE WITH EXPLODING HEAD
+ {0x1F930, 0x1F939, prEB, gcSo}, // [10] PREGNANT WOMAN..JUGGLING
+ {0x1F93A, 0x1F93B, prID, gcSo}, // [2] FENCER..MODERN PENTATHLON
+ {0x1F93C, 0x1F93E, prEB, gcSo}, // [3] WRESTLERS..HANDBALL
+ {0x1F93F, 0x1F976, prID, gcSo}, // [56] DIVING MASK..FREEZING FACE
+ {0x1F977, 0x1F977, prEB, gcSo}, // NINJA
+ {0x1F978, 0x1F9B4, prID, gcSo}, // [61] DISGUISED FACE..BONE
+ {0x1F9B5, 0x1F9B6, prEB, gcSo}, // [2] LEG..FOOT
+ {0x1F9B7, 0x1F9B7, prID, gcSo}, // TOOTH
+ {0x1F9B8, 0x1F9B9, prEB, gcSo}, // [2] SUPERHERO..SUPERVILLAIN
+ {0x1F9BA, 0x1F9BA, prID, gcSo}, // SAFETY VEST
+ {0x1F9BB, 0x1F9BB, prEB, gcSo}, // EAR WITH HEARING AID
+ {0x1F9BC, 0x1F9CC, prID, gcSo}, // [17] MOTORIZED WHEELCHAIR..TROLL
+ {0x1F9CD, 0x1F9CF, prEB, gcSo}, // [3] STANDING PERSON..DEAF PERSON
+ {0x1F9D0, 0x1F9D0, prID, gcSo}, // FACE WITH MONOCLE
+ {0x1F9D1, 0x1F9DD, prEB, gcSo}, // [13] ADULT..ELF
+ {0x1F9DE, 0x1F9FF, prID, gcSo}, // [34] GENIE..NAZAR AMULET
+ {0x1FA00, 0x1FA53, prAL, gcSo}, // [84] NEUTRAL CHESS KING..BLACK CHESS KNIGHT-BISHOP
+ {0x1FA54, 0x1FA5F, prID, gcCn}, // [12] ..
+ {0x1FA60, 0x1FA6D, prID, gcSo}, // [14] XIANGQI RED GENERAL..XIANGQI BLACK SOLDIER
+ {0x1FA6E, 0x1FA6F, prID, gcCn}, // [2] ..
+ {0x1FA70, 0x1FA7C, prID, gcSo}, // [13] BALLET SHOES..CRUTCH
+ {0x1FA7D, 0x1FA7F, prID, gcCn}, // [3] ..
+ {0x1FA80, 0x1FA88, prID, gcSo}, // [9] YO-YO..FLUTE
+ {0x1FA89, 0x1FA8F, prID, gcCn}, // [7] ..
+ {0x1FA90, 0x1FABD, prID, gcSo}, // [46] RINGED PLANET..WING
+ {0x1FABE, 0x1FABE, prID, gcCn}, //
+ {0x1FABF, 0x1FAC2, prID, gcSo}, // [4] GOOSE..PEOPLE HUGGING
+ {0x1FAC3, 0x1FAC5, prEB, gcSo}, // [3] PREGNANT MAN..PERSON WITH CROWN
+ {0x1FAC6, 0x1FACD, prID, gcCn}, // [8] ..
+ {0x1FACE, 0x1FADB, prID, gcSo}, // [14] MOOSE..PEA POD
+ {0x1FADC, 0x1FADF, prID, gcCn}, // [4] ..
+ {0x1FAE0, 0x1FAE8, prID, gcSo}, // [9] MELTING FACE..SHAKING FACE
+ {0x1FAE9, 0x1FAEF, prID, gcCn}, // [7] ..
+ {0x1FAF0, 0x1FAF8, prEB, gcSo}, // [9] HAND WITH INDEX FINGER AND THUMB CROSSED..RIGHTWARDS PUSHING HAND
+ {0x1FAF9, 0x1FAFF, prID, gcCn}, // [7] ..
+ {0x1FB00, 0x1FB92, prAL, gcSo}, // [147] BLOCK SEXTANT-1..UPPER HALF INVERSE MEDIUM SHADE AND LOWER HALF BLOCK
+ {0x1FB94, 0x1FBCA, prAL, gcSo}, // [55] LEFT HALF INVERSE MEDIUM SHADE AND RIGHT HALF BLOCK..WHITE UP-POINTING CHEVRON
+ {0x1FBF0, 0x1FBF9, prNU, gcNd}, // [10] SEGMENTED DIGIT ZERO..SEGMENTED DIGIT NINE
+ {0x1FC00, 0x1FFFD, prID, gcCn}, // [1022] ..