pax_global_header00006660000000000000000000000064150362567140014523gustar00rootroot0000000000000052 comment=029bf646f01f3fe61e9b519f6157b022b8193619 hx-0.3.0+20250717/000077500000000000000000000000001503625671400130455ustar00rootroot00000000000000hx-0.3.0+20250717/.envrc000066400000000000000000000001761503625671400141670ustar00rootroot00000000000000watch_file flake.lock # try to use flakes, if it fails use normal nix (ie. shell.nix) use flake || use nix eval "$shellHook" hx-0.3.0+20250717/.gitattributes000066400000000000000000000003771503625671400157470ustar00rootroot00000000000000# See test-grammars/*/*.scm linguist-vendored test-grammars/*/src/** linguist-vendored test-grammars/*/src/{parser.c,grammar.json,scanner.*} binary fixtures/** -linguist-detectable hx-0.3.0+20250717/.github/000077500000000000000000000000001503625671400144055ustar00rootroot00000000000000hx-0.3.0+20250717/.github/workflows/000077500000000000000000000000001503625671400164425ustar00rootroot00000000000000hx-0.3.0+20250717/.github/workflows/ci.yml000066400000000000000000000034571503625671400175710ustar00rootroot00000000000000name: CI on: pull_request: push: branches: - master env: MSRV: "1.82" jobs: check-msrv: name: Check strategy: matrix: toolchain: - MSRV - stable runs-on: ubuntu-latest steps: - name: Checkout sources uses: actions/checkout@v4 - name: Install toolchain uses: dtolnay/rust-toolchain@master with: toolchain: ${{ matrix.toolchain == 'MSRV' && env.MSRV || 'stable' }} - uses: Swatinem/rust-cache@v2 - name: Run cargo check run: cargo check - name: Run cargo check without default features run: cargo check --no-default-features test: name: Test runs-on: ubuntu-latest steps: - name: Checkout sources uses: actions/checkout@v4 - name: Install MSRV toolchain uses: dtolnay/rust-toolchain@master with: toolchain: "${{ env.MSRV }}" - uses: Swatinem/rust-cache@v2 - name: Run cargo test run: cargo test --workspace lints: name: Lints runs-on: ubuntu-latest steps: - name: Checkout sources uses: actions/checkout@v4 - name: Install MSRV toolchain uses: dtolnay/rust-toolchain@master with: toolchain: "${{ env.MSRV }}" components: rustfmt, clippy - uses: Swatinem/rust-cache@v2 - name: Run cargo fmt run: cargo fmt --all --check - name: Run cargo clippy run: cargo clippy --workspace --all-targets -- -D warnings - name: Run cargo clippy without default features run: cargo clippy --workspace --all-targets --no-default-features -- -D warnings - name: Run cargo doc run: cargo doc --no-deps --workspace --document-private-items env: RUSTDOCFLAGS: -D warnings hx-0.3.0+20250717/.gitignore000066400000000000000000000001131503625671400150300ustar00rootroot00000000000000target result .direnv /test-grammars/*/*.so /test-grammars/*/.BUILD_COOKIE hx-0.3.0+20250717/Cargo.lock000066400000000000000000000513411503625671400147560ustar00rootroot00000000000000# This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 3 [[package]] name = "aho-corasick" version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" dependencies = [ "memchr", ] [[package]] name = "allocator-api2" version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" [[package]] name = "anyhow" version = "1.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" [[package]] name = "arc-swap" version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" [[package]] name = "autocfg" version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" [[package]] name = "bitflags" version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f68f53c83ab957f72c32642f3868eec03eb974d1fb82e453128456482613d36" [[package]] name = "block-buffer" version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" dependencies = [ "generic-array", ] [[package]] name = "bumpalo" version = "3.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf" [[package]] name = "cc" version = "1.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "755717a7de9ec452bf7f3f1a3099085deabd7f2962b861dae91ecd7a365903d2" dependencies = [ "shlex", ] [[package]] name = "cfg-if" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "console" version = "0.15.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ea3c6ecd8059b57859df5c69830340ed3c41d30e3da0c1cbed90a96ac853041b" dependencies = [ "encode_unicode", "libc", "once_cell", "unicode-width 0.2.0", "windows-sys", ] [[package]] name = "cpufeatures" version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" dependencies = [ "libc", ] [[package]] name = "crypto-common" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ "generic-array", "typenum", ] [[package]] name = "diff" version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" [[package]] name = "digest" version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", "crypto-common", ] [[package]] name = "encode_unicode" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" [[package]] name = "equivalent" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", "windows-sys", ] [[package]] name = "fastrand" version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] name = "foldhash" version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a0d2fde1f7b3d48b8395d5f2de76c18a528bd6a9cdde438df747bfcba3e05d6f" [[package]] name = "generic-array" version = "0.14.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ "typenum", "version_check", ] [[package]] name = "getrandom" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43a49c392881ce6d5c3b8cb70f98717b7c07aabbdff06687b9030dbfbe2725f8" dependencies = [ "cfg-if", "libc", "wasi", "windows-targets", ] [[package]] name = "hashbrown" version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" dependencies = [ "allocator-api2", "equivalent", "foldhash", ] [[package]] name = "indexmap" version = "2.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8c9c992b02b5b4c94ea26e32fe5bccb7aa7d9f390ab5c1221ff895bc7ea8b652" dependencies = [ "equivalent", "hashbrown", ] [[package]] name = "indicatif" version = "0.17.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "183b3088984b400f4cfac3620d5e076c84da5364016b4f49473de574b2586235" dependencies = [ "console", "number_prefix", "portable-atomic", "unicode-width 0.2.0", "web-time", ] [[package]] name = "itoa" version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" [[package]] name = "js-sys" version = "0.3.77" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" dependencies = [ "once_cell", "wasm-bindgen", ] [[package]] name = "kstring" version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "558bf9508a558512042d3095138b1f7b8fe90c5467d94f9f1da28b3731c5dbd1" dependencies = [ "static_assertions", ] [[package]] name = "libc" version = "0.2.169" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" [[package]] name = "libloading" version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34" dependencies = [ "cfg-if", "windows-targets", ] [[package]] name = "linux-raw-sys" version = "0.4.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" [[package]] name = "log" version = "0.4.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04cbf5b083de1c7e0222a7a51dbfdba1cbe1c6ab0b15e29fff3f6c077fd9cd9f" [[package]] name = "memchr" version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "number_prefix" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" [[package]] name = "once_cell" version = "1.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" [[package]] name = "portable-atomic" version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "280dc24453071f1b63954171985a0b0d30058d287960968b9b2aca264c8d4ee6" [[package]] name = "pretty_assertions" version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3ae130e2f271fbc2ac3a40fb1d07180839cdbbe443c7a27e1e3c13c5cac0116d" dependencies = [ "diff", "yansi", ] [[package]] name = "proc-macro2" version = "1.0.93" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99" dependencies = [ "unicode-ident", ] [[package]] name = "quote" version = "1.0.38" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc" dependencies = [ "proc-macro2", ] [[package]] name = "regex" version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", "regex-automata", "regex-syntax", ] [[package]] name = "regex-automata" version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", "regex-syntax", ] [[package]] name = "regex-cursor" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0497c781d2f982ae8284d2932aee6a877e58a4541daa5e8fadc18cc75c23a61d" dependencies = [ "log", "memchr", "regex-automata", "regex-syntax", "ropey", ] [[package]] name = "regex-syntax" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "ropey" version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93411e420bcd1a75ddd1dc3caf18c23155eda2c090631a85af21ba19e97093b5" dependencies = [ "smallvec", "str_indices", ] [[package]] name = "rustix" version = "0.38.44" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" dependencies = [ "bitflags", "errno", "libc", "linux-raw-sys", "windows-sys", ] [[package]] name = "ruzstd" version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fad02996bfc73da3e301efe90b1837be9ed8f4a462b6ed410aa35d00381de89f" dependencies = [ "twox-hash", ] [[package]] name = "ryu" version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ea1a2d0a644769cc99faa24c3ad26b379b786fe7c36fd3c546254801650e6dd" [[package]] name = "same-file" version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" dependencies = [ "winapi-util", ] [[package]] name = "serde" version = "1.0.217" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" version = "1.0.217" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "serde_json" version = "1.0.138" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d434192e7da787e94a6ea7e9670b26a036d0ca41e0b7efb2676dd32bae872949" dependencies = [ "itoa", "memchr", "ryu", "serde", ] [[package]] name = "sha1" version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" dependencies = [ "cfg-if", "cpufeatures", "digest", ] [[package]] name = "shlex" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "skidder" version = "0.1.0" dependencies = [ "anyhow", "cc", "indicatif", "ruzstd", "serde", "serde_json", "sha1", "tempfile", "walkdir", ] [[package]] name = "skidder-cli" version = "0.1.0" dependencies = [ "anyhow", "indicatif", "libloading", "serde", "serde_json", "skidder", "tempfile", "walkdir", "xflags", ] [[package]] name = "slab" version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" dependencies = [ "autocfg", ] [[package]] name = "smallvec" version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" [[package]] name = "static_assertions" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" [[package]] name = "str_indices" version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d08889ec5408683408db66ad89e0e1f93dff55c73a4ccc71c427d5b277ee47e6" [[package]] name = "syn" version = "2.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "36147f1a48ae0ec2b5b3bc5b537d267457555a10dc06f3dbc8cb11ba3006d3b1" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] [[package]] name = "tempfile" version = "3.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38c246215d7d24f48ae091a2902398798e05d978b24315d6efbc00ede9a8bb91" dependencies = [ "cfg-if", "fastrand", "getrandom", "once_cell", "rustix", "windows-sys", ] [[package]] name = "thiserror" version = "2.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d452f284b73e6d76dd36758a0c8684b1d5be31f92b89d07fd5822175732206fc" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" version = "2.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26afc1baea8a989337eeb52b6e72a039780ce45c3edfcc9c5b9d112feeb173c2" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "tree-house" version = "0.3.0" dependencies = [ "arc-swap", "hashbrown", "indexmap", "kstring", "once_cell", "pretty_assertions", "regex", "regex-cursor", "ropey", "skidder", "slab", "tree-house-bindings", "unicode-width 0.1.12", ] [[package]] name = "tree-house-bindings" version = "0.2.1" dependencies = [ "cc", "libloading", "regex-cursor", "ropey", "thiserror", ] [[package]] name = "twox-hash" version = "1.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675" dependencies = [ "cfg-if", "static_assertions", ] [[package]] name = "typenum" version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "unicode-ident" version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a210d160f08b701c8721ba1c726c11662f877ea6b7094007e1ca9a1041945034" [[package]] name = "unicode-width" version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68f5e5f3158ecfd4b8ff6fe086db7c8467a2dfdac97fe420f2b7c4aa97af66d6" [[package]] name = "unicode-width" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" [[package]] name = "version_check" version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "walkdir" version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" dependencies = [ "same-file", "winapi-util", ] [[package]] name = "wasi" version = "0.13.3+wasi-0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26816d2e1a4a36a2940b96c5296ce403917633dff8f3440e9b236ed6f6bacad2" dependencies = [ "wit-bindgen-rt", ] [[package]] name = "wasm-bindgen" version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" dependencies = [ "cfg-if", "once_cell", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" dependencies = [ "bumpalo", "log", "proc-macro2", "quote", "syn", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-macro" version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" dependencies = [ "quote", "wasm-bindgen-macro-support", ] [[package]] name = "wasm-bindgen-macro-support" version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" dependencies = [ "proc-macro2", "quote", "syn", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" dependencies = [ "unicode-ident", ] [[package]] name = "web-time" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" dependencies = [ "js-sys", "wasm-bindgen", ] [[package]] name = "winapi-util" version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ "windows-sys", ] [[package]] name = "windows-sys" version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" dependencies = [ "windows-targets", ] [[package]] name = "windows-targets" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ "windows_aarch64_gnullvm", "windows_aarch64_msvc", "windows_i686_gnu", "windows_i686_gnullvm", "windows_i686_msvc", "windows_x86_64_gnu", "windows_x86_64_gnullvm", "windows_x86_64_msvc", ] [[package]] name = "windows_aarch64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] name = "windows_i686_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_x86_64_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "wit-bindgen-rt" version = "0.33.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3268f3d866458b787f390cf61f4bbb563b922d091359f9608842999eaee3943c" dependencies = [ "bitflags", ] [[package]] name = "xflags" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7d9e15fbb3de55454b0106e314b28e671279009b363e6f1d8e39fdc3bf048944" dependencies = [ "xflags-macros", ] [[package]] name = "xflags-macros" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "672423d4fea7ffa2f6c25ba60031ea13dc6258070556f125cc4d790007d4a155" [[package]] name = "yansi" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" hx-0.3.0+20250717/Cargo.toml000066400000000000000000000001231503625671400147710ustar00rootroot00000000000000[workspace] resolver = "2" members = ["bindings", "cli", "highlighter", "skidder"] hx-0.3.0+20250717/LICENSE000066400000000000000000000405251503625671400140600ustar00rootroot00000000000000Mozilla Public License Version 2.0 ================================== 1. Definitions -------------- 1.1. "Contributor" means each individual or legal entity that creates, contributes to the creation of, or owns Covered Software. 1.2. "Contributor Version" means the combination of the Contributions of others (if any) used by a Contributor and that particular Contributor's Contribution. 1.3. "Contribution" means Covered Software of a particular Contributor. 1.4. "Covered Software" means Source Code Form to which the initial Contributor has attached the notice in Exhibit A, the Executable Form of such Source Code Form, and Modifications of such Source Code Form, in each case including portions thereof. 1.5. "Incompatible With Secondary Licenses" means (a) that the initial Contributor has attached the notice described in Exhibit B to the Covered Software; or (b) that the Covered Software was made available under the terms of version 1.1 or earlier of the License, but not also under the terms of a Secondary License. 1.6. "Executable Form" means any form of the work other than Source Code Form. 1.7. "Larger Work" means a work that combines Covered Software with other material, in a separate file or files, that is not Covered Software. 1.8. "License" means this document. 1.9. "Licensable" means having the right to grant, to the maximum extent possible, whether at the time of the initial grant or subsequently, any and all of the rights conveyed by this License. 1.10. "Modifications" means any of the following: (a) any file in Source Code Form that results from an addition to, deletion from, or modification of the contents of Covered Software; or (b) any new file in Source Code Form that contains any Covered Software. 1.11. "Patent Claims" of a Contributor means any patent claim(s), including without limitation, method, process, and apparatus claims, in any patent Licensable by such Contributor that would be infringed, but for the grant of the License, by the making, using, selling, offering for sale, having made, import, or transfer of either its Contributions or its Contributor Version. 1.12. "Secondary License" means either the GNU General Public License, Version 2.0, the GNU Lesser General Public License, Version 2.1, the GNU Affero General Public License, Version 3.0, or any later versions of those licenses. 1.13. "Source Code Form" means the form of the work preferred for making modifications. 1.14. "You" (or "Your") means an individual or a legal entity exercising rights under this License. For legal entities, "You" includes any entity that controls, is controlled by, or is under common control with You. For purposes of this definition, "control" means (a) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (b) ownership of more than fifty percent (50%) of the outstanding shares or beneficial ownership of such entity. 2. License Grants and Conditions -------------------------------- 2.1. Grants Each Contributor hereby grants You a world-wide, royalty-free, non-exclusive license: (a) under intellectual property rights (other than patent or trademark) Licensable by such Contributor to use, reproduce, make available, modify, display, perform, distribute, and otherwise exploit its Contributions, either on an unmodified basis, with Modifications, or as part of a Larger Work; and (b) under Patent Claims of such Contributor to make, use, sell, offer for sale, have made, import, and otherwise transfer either its Contributions or its Contributor Version. 2.2. Effective Date The licenses granted in Section 2.1 with respect to any Contribution become effective for each Contribution on the date the Contributor first distributes such Contribution. 2.3. Limitations on Grant Scope The licenses granted in this Section 2 are the only rights granted under this License. No additional rights or licenses will be implied from the distribution or licensing of Covered Software under this License. Notwithstanding Section 2.1(b) above, no patent license is granted by a Contributor: (a) for any code that a Contributor has removed from Covered Software; or (b) for infringements caused by: (i) Your and any other third party's modifications of Covered Software, or (ii) the combination of its Contributions with other software (except as part of its Contributor Version); or (c) under Patent Claims infringed by Covered Software in the absence of its Contributions. This License does not grant any rights in the trademarks, service marks, or logos of any Contributor (except as may be necessary to comply with the notice requirements in Section 3.4). 2.4. Subsequent Licenses No Contributor makes additional grants as a result of Your choice to distribute the Covered Software under a subsequent version of this License (see Section 10.2) or under the terms of a Secondary License (if permitted under the terms of Section 3.3). 2.5. Representation Each Contributor represents that the Contributor believes its Contributions are its original creation(s) or it has sufficient rights to grant the rights to its Contributions conveyed by this License. 2.6. Fair Use This License is not intended to limit any rights You have under applicable copyright doctrines of fair use, fair dealing, or other equivalents. 2.7. Conditions Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in Section 2.1. 3. Responsibilities ------------------- 3.1. Distribution of Source Form All distribution of Covered Software in Source Code Form, including any Modifications that You create or to which You contribute, must be under the terms of this License. You must inform recipients that the Source Code Form of the Covered Software is governed by the terms of this License, and how they can obtain a copy of this License. You may not attempt to alter or restrict the recipients' rights in the Source Code Form. 3.2. Distribution of Executable Form If You distribute Covered Software in Executable Form then: (a) such Covered Software must also be made available in Source Code Form, as described in Section 3.1, and You must inform recipients of the Executable Form how they can obtain a copy of such Source Code Form by reasonable means in a timely manner, at a charge no more than the cost of distribution to the recipient; and (b) You may distribute such Executable Form under the terms of this License, or sublicense it under different terms, provided that the license for the Executable Form does not attempt to limit or alter the recipients' rights in the Source Code Form under this License. 3.3. Distribution of a Larger Work You may create and distribute a Larger Work under terms of Your choice, provided that You also comply with the requirements of this License for the Covered Software. If the Larger Work is a combination of Covered Software with a work governed by one or more Secondary Licenses, and the Covered Software is not Incompatible With Secondary Licenses, this License permits You to additionally distribute such Covered Software under the terms of such Secondary License(s), so that the recipient of the Larger Work may, at their option, further distribute the Covered Software under the terms of either this License or such Secondary License(s). 3.4. Notices You may not remove or alter the substance of any license notices (including copyright notices, patent notices, disclaimers of warranty, or limitations of liability) contained within the Source Code Form of the Covered Software, except that You may alter any license notices to the extent required to remedy known factual inaccuracies. 3.5. Application of Additional Terms You may choose to offer, and to charge a fee for, warranty, support, indemnity or liability obligations to one or more recipients of Covered Software. However, You may do so only on Your own behalf, and not on behalf of any Contributor. You must make it absolutely clear that any such warranty, support, indemnity, or liability obligation is offered by You alone, and You hereby agree to indemnify every Contributor for any liability incurred by such Contributor as a result of warranty, support, indemnity or liability terms You offer. You may include additional disclaimers of warranty and limitations of liability specific to any jurisdiction. 4. Inability to Comply Due to Statute or Regulation --------------------------------------------------- If it is impossible for You to comply with any of the terms of this License with respect to some or all of the Covered Software due to statute, judicial order, or regulation then You must: (a) comply with the terms of this License to the maximum extent possible; and (b) describe the limitations and the code they affect. Such description must be placed in a text file included with all distributions of the Covered Software under this License. Except to the extent prohibited by statute or regulation, such description must be sufficiently detailed for a recipient of ordinary skill to be able to understand it. 5. Termination -------------- 5.1. The rights granted under this License will terminate automatically if You fail to comply with any of its terms. However, if You become compliant, then the rights granted under this License from a particular Contributor are reinstated (a) provisionally, unless and until such Contributor explicitly and finally terminates Your grants, and (b) on an ongoing basis, if such Contributor fails to notify You of the non-compliance by some reasonable means prior to 60 days after You have come back into compliance. Moreover, Your grants from a particular Contributor are reinstated on an ongoing basis if such Contributor notifies You of the non-compliance by some reasonable means, this is the first time You have received notice of non-compliance with this License from such Contributor, and You become compliant prior to 30 days after Your receipt of the notice. 5.2. If You initiate litigation against any entity by asserting a patent infringement claim (excluding declaratory judgment actions, counter-claims, and cross-claims) alleging that a Contributor Version directly or indirectly infringes any patent, then the rights granted to You by any and all Contributors for the Covered Software under Section 2.1 of this License shall terminate. 5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user license agreements (excluding distributors and resellers) which have been validly granted by You or Your distributors under this License prior to termination shall survive termination. ************************************************************************ * * * 6. Disclaimer of Warranty * * ------------------------- * * * * Covered Software is provided under this License on an "as is" * * basis, without warranty of any kind, either expressed, implied, or * * statutory, including, without limitation, warranties that the * * Covered Software is free of defects, merchantable, fit for a * * particular purpose or non-infringing. The entire risk as to the * * quality and performance of the Covered Software is with You. * * Should any Covered Software prove defective in any respect, You * * (not any Contributor) assume the cost of any necessary servicing, * * repair, or correction. This disclaimer of warranty constitutes an * * essential part of this License. No use of any Covered Software is * * authorized under this License except under this disclaimer. * * * ************************************************************************ ************************************************************************ * * * 7. Limitation of Liability * * -------------------------- * * * * Under no circumstances and under no legal theory, whether tort * * (including negligence), contract, or otherwise, shall any * * Contributor, or anyone who distributes Covered Software as * * permitted above, be liable to You for any direct, indirect, * * special, incidental, or consequential damages of any character * * including, without limitation, damages for lost profits, loss of * * goodwill, work stoppage, computer failure or malfunction, or any * * and all other commercial damages or losses, even if such party * * shall have been informed of the possibility of such damages. This * * limitation of liability shall not apply to liability for death or * * personal injury resulting from such party's negligence to the * * extent applicable law prohibits such limitation. Some * * jurisdictions do not allow the exclusion or limitation of * * incidental or consequential damages, so this exclusion and * * limitation may not apply to You. * * * ************************************************************************ 8. Litigation ------------- Any litigation relating to this License may be brought only in the courts of a jurisdiction where the defendant maintains its principal place of business and such litigation shall be governed by laws of that jurisdiction, without reference to its conflict-of-law provisions. Nothing in this Section shall prevent a party's ability to bring cross-claims or counter-claims. 9. Miscellaneous ---------------- This License represents the complete agreement concerning the subject matter hereof. If any provision of this License is held to be unenforceable, such provision shall be reformed only to the extent necessary to make it enforceable. Any law or regulation which provides that the language of a contract shall be construed against the drafter shall not be used to construe this License against a Contributor. 10. Versions of the License --------------------------- 10.1. New Versions Mozilla Foundation is the license steward. Except as provided in Section 10.3, no one other than the license steward has the right to modify or publish new versions of this License. Each version will be given a distinguishing version number. 10.2. Effect of New Versions You may distribute the Covered Software under the terms of the version of the License under which You originally received the Covered Software, or under the terms of any subsequent version published by the license steward. 10.3. Modified Versions If you create software not governed by this License, and you want to create a new license for such software, you may create and use a modified version of this License if you rename the license and remove any references to the name of the license steward (except to note that such modified license differs from this License). 10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses If You choose to distribute Source Code Form that is Incompatible With Secondary Licenses under the terms of this version of the License, the notice described in Exhibit B of this License must be attached. Exhibit A - Source Code Form License Notice ------------------------------------------- This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/. If it is not possible or desirable to put the notice in a particular file, then You may include the notice in a location (such as a LICENSE file in a relevant directory) where a recipient would be likely to look for such a notice. You may add additional accurate notices of copyright ownership. Exhibit B - "Incompatible With Secondary Licenses" Notice --------------------------------------------------------- This Source Code Form is "Incompatible With Secondary Licenses", as defined by the Mozilla Public License, v. 2.0. hx-0.3.0+20250717/README.md000066400000000000000000000024001503625671400143200ustar00rootroot00000000000000# `tree-house` This repository contains a number of crates used by the [Helix editor](https://github.com/helix-editor/helix) for integration with the [tree-sitter](https://github.com/tree-sitter/tree-sitter) C library. Most notably the highlighter crate [`tree-house`](https://crates.io/crates/tree-house) provides Helix's syntax highlighting and all other tree-sitter features since the 25.07 release. The highlighter was rewritten from scratch for simplification and to fix a number of bugs. Read more in the [25.07 release highlights](https://helix-editor.com/news/release-25-07-highlights/#tree-house). Documentation is a work-in-progress and these crates may see breaking changes as we expand our use of Tree-sitter in Helix. * `bindings/` contains the `tree-house-bindings` crate which provides Rust bindings over the C library and optional integration with the [Ropey](https://github.com/cessen/ropey) rope crate. * `highlighter/` contains the `tree-house` crate which exposes a robust highlighter and query iterator for working across [injections]. * `skidder/` contains the `skidder` crate which exposes utilities for building a package repository for tree-sitter grammars. * `cli/` contains the `skidder-cli` crate which wraps `skidder` in a command line interface. hx-0.3.0+20250717/bindings/000077500000000000000000000000001503625671400146425ustar00rootroot00000000000000hx-0.3.0+20250717/bindings/CHANGELOG.md000066400000000000000000000013731503625671400164570ustar00rootroot00000000000000# Changelog All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). ## [v0.2.1] - 2025-07-12 ### Added * Added `Node::is_extra` ### Updated * Updated the tree-sitter C library to v0.25.7 ## [v0.2.0] - 2025-06-06 ### Added * Added `TreeCursor::reset` * Added an iterator for recursively walking over the nodes in a `TreeCursor`: `TreeRecursiveWalker` ### Updated * Updated the tree-sitter C library to v0.25.6 ## [v0.1.1] - 2025-05-14 ### Fixed * Patched `endian.h` to include IllumOS ## [v0.1.0] - 2025-05-13 ### Added * Initial publish hx-0.3.0+20250717/bindings/Cargo.toml000066400000000000000000000010151503625671400165670ustar00rootroot00000000000000[package] name = "tree-house-bindings" description = "Homey Rust bindings for the tree-sitter C library" authors = ["Pascal Kuthe "] version = "0.2.1" edition = "2021" license = "MPL-2.0" repository = "https://github.com/helix-editor/tree-house" readme = "../README.md" rust-version = "1.76.0" [features] ropey = ["dep:ropey"] [dependencies] ropey = { version = "1.6", default-features = false, optional=true } regex-cursor = "0.1.5" libloading = "0.8" thiserror = "2.0" [build-dependencies] cc = "1.0" hx-0.3.0+20250717/bindings/LICENSE000077700000000000000000000000001503625671400170622../LICENSEustar00rootroot00000000000000hx-0.3.0+20250717/bindings/build.rs000066400000000000000000000020461503625671400163110ustar00rootroot00000000000000use std::path::Path; use std::{env, fs}; fn main() { if env::var_os("DISABLED_TS_BUILD").is_some() { return; } let mut config = cc::Build::new(); let manifest_path = Path::new(env!("CARGO_MANIFEST_DIR")); let include_path = manifest_path.join("vendor/include"); let src_path = manifest_path.join("vendor/src"); for entry in fs::read_dir(&src_path).unwrap() { let entry = entry.unwrap(); let path = src_path.join(entry.file_name()); println!("cargo:rerun-if-changed={}", path.to_str().unwrap()); } config .flag_if_supported("-std=c11") .flag_if_supported("-fvisibility=hidden") .flag_if_supported("-Wshadow") .flag_if_supported("-Wno-unused-parameter") .flag_if_supported("-Wno-incompatible-pointer-types") .include(&src_path) .include(&include_path) .define("_POSIX_C_SOURCE", "200112L") .define("_DEFAULT_SOURCE", None) .warnings(false) .file(src_path.join("lib.c")) .compile("tree-sitter"); } hx-0.3.0+20250717/bindings/src/000077500000000000000000000000001503625671400154315ustar00rootroot00000000000000hx-0.3.0+20250717/bindings/src/grammar.rs000066400000000000000000000075461503625671400174410ustar00rootroot00000000000000use std::fmt; use std::path::{Path, PathBuf}; use std::ptr::NonNull; use libloading::{Library, Symbol}; /// Lowest supported ABI version of a grammar. // WARNING: update when updating vendored c sources // `TREE_SITTER_MIN_COMPATIBLE_LANGUAGE_VERSION` pub const MIN_COMPATIBLE_ABI_VERSION: u32 = 13; // `TREE_SITTER_LANGUAGE_VERSION` pub const ABI_VERSION: u32 = 15; // opaque pointer enum GrammarData {} #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, Hash)] pub struct Grammar { ptr: NonNull, } unsafe impl Send for Grammar {} unsafe impl Sync for Grammar {} impl std::fmt::Debug for Grammar { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("Grammar").finish_non_exhaustive() } } impl Grammar { /// Loads a shared library containing a tree sitter grammar with name `name` // from `library_path`. /// /// # Safety /// /// `library_path` must be a valid tree sitter grammar pub unsafe fn new(name: &str, library_path: &Path) -> Result { let library = unsafe { Library::new(library_path).map_err(|err| Error::DlOpen { err, path: library_path.to_owned(), })? }; let language_fn_name = format!("tree_sitter_{}", name.replace('-', "_")); let grammar = unsafe { let language_fn: Symbol NonNull> = library .get(language_fn_name.as_bytes()) .map_err(|err| Error::DlSym { err, symbol: name.to_owned(), })?; Grammar { ptr: language_fn() } }; let version = grammar.abi_version(); if (MIN_COMPATIBLE_ABI_VERSION..=ABI_VERSION).contains(&version) { std::mem::forget(library); Ok(grammar) } else { Err(Error::IncompatibleVersion { version }) } } pub fn abi_version(self) -> u32 { unsafe { ts_language_abi_version(self) } } pub fn node_kind_is_visible(self, kind_id: u16) -> bool { let symbol_type = unsafe { ts_language_symbol_type(self, kind_id) }; symbol_type <= (SymbolType::Anonymous as u32) } } #[derive(thiserror::Error, Debug)] pub enum Error { #[error("Error opening dynamic library {path:?}")] DlOpen { #[source] err: libloading::Error, path: PathBuf, }, #[error("Failed to load symbol {symbol}")] DlSym { #[source] err: libloading::Error, symbol: String, }, #[error("Tried to load grammar with incompatible ABI {version}.")] IncompatibleVersion { version: u32 }, } /// An error that occurred when trying to assign an incompatible [`Grammar`] to /// a [`crate::parser::Parser`]. #[derive(Debug, PartialEq, Eq)] pub struct IncompatibleGrammarError { pub abi_version: u32, } impl fmt::Display for IncompatibleGrammarError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "Tried to load grammar with incompatible ABI version {}.", self.abi_version, ) } } impl std::error::Error for IncompatibleGrammarError {} #[repr(u32)] #[allow(dead_code)] pub enum SymbolType { Regular, Anonymous, Supertype, Auxiliary, } extern "C" { /// Get the ABI version number for this language. This version number /// is used to ensure that languages were generated by a compatible version of /// Tree-sitter. See also `ts_parser_set_language`. pub fn ts_language_abi_version(grammar: Grammar) -> u32; /// Checks whether the given node type belongs to named nodes, anonymous nodes, or hidden /// nodes. /// /// See also `ts_node_is_named`. Hidden nodes are never returned from the API. pub fn ts_language_symbol_type(grammar: Grammar, symbol: u16) -> u32; } hx-0.3.0+20250717/bindings/src/lib.rs000066400000000000000000000026221503625671400165470ustar00rootroot00000000000000mod grammar; mod node; mod parser; pub mod query; mod query_cursor; mod tree; mod tree_cursor; #[cfg(feature = "ropey")] mod ropey; #[cfg(feature = "ropey")] pub use ropey::RopeInput; use std::ops; pub use grammar::{Grammar, IncompatibleGrammarError}; pub use node::Node; pub use parser::{Parser, ParserInputRaw}; pub use query::{Capture, Pattern, Query, QueryStr}; pub use query_cursor::{InactiveQueryCursor, MatchedNode, MatchedNodeIdx, QueryCursor, QueryMatch}; pub use tree::{InputEdit, Tree}; pub use tree_cursor::TreeCursor; #[repr(C)] #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct Point { pub row: u32, pub col: u32, } impl Point { pub const ZERO: Self = Self { row: 0, col: 0 }; pub const MAX: Self = Self { row: u32::MAX, col: u32::MAX, }; } #[repr(C)] #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct Range { pub start_point: Point, pub end_point: Point, pub start_byte: u32, pub end_byte: u32, } pub trait Input { type Cursor: regex_cursor::Cursor; fn cursor_at(&mut self, offset: u32) -> &mut Self::Cursor; fn eq(&mut self, range1: ops::Range, range2: ops::Range) -> bool; } pub trait IntoInput { type Input: Input; fn into_input(self) -> Self::Input; } impl IntoInput for T { type Input = T; fn into_input(self) -> T { self } } hx-0.3.0+20250717/bindings/src/node.rs000066400000000000000000000252651503625671400167360ustar00rootroot00000000000000use std::ffi::{c_char, c_void, CStr}; use std::fmt; use std::marker::PhantomData; use std::ops::Range; use std::ptr::NonNull; use crate::tree::Tree; use crate::tree_cursor::TreeCursor; use crate::Grammar; #[repr(C)] #[derive(Debug, Clone, Copy)] pub(super) struct NodeRaw { context: [u32; 4], id: *const c_void, tree: *const c_void, } impl From> for NodeRaw { fn from(node: Node) -> NodeRaw { NodeRaw { context: node.context, id: node.id.as_ptr(), tree: node.tree.as_ptr(), } } } #[derive(Clone)] #[repr(C)] pub struct Node<'tree> { context: [u32; 4], id: NonNull, tree: NonNull, _phantom: PhantomData<&'tree Tree>, } impl fmt::Debug for Node<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let range = self.byte_range(); write!(f, "{{Node {} {range:?}}}", self.kind()) } } impl<'tree> Node<'tree> { #[inline] pub(super) unsafe fn from_raw(raw: NodeRaw) -> Option { Some(Node { context: raw.context, id: NonNull::new(raw.id as *mut _)?, tree: unsafe { NonNull::new_unchecked(raw.tree as *mut _) }, _phantom: PhantomData, }) } #[inline] pub(crate) fn as_raw(&self) -> NodeRaw { NodeRaw { context: self.context, id: self.id.as_ptr(), tree: self.tree.as_ptr(), } } pub fn id(&self) -> usize { self.id.as_ptr() as usize } /// Get this node's type as a string #[inline] pub fn kind(&self) -> &'tree str { unsafe { CStr::from_ptr(ts_node_type(self.as_raw())) } .to_str() .unwrap() } /// Get this node's type as a numerical id. #[inline] pub fn kind_id(&self) -> u16 { unsafe { ts_node_symbol(self.as_raw()) } } /// Get the [`Grammar`] that was used to parse this node's syntax tree. #[inline] pub fn grammar(&self) -> Grammar { unsafe { ts_node_language(self.as_raw()) } } /// Check if this node is *named*. /// /// Named nodes correspond to named rules in the grammar, whereas /// *anonymous* nodes correspond to string literals in the grammar. #[inline] pub fn is_named(&self) -> bool { unsafe { ts_node_is_named(self.as_raw()) } } /// Returns true if and only if this node is contained "inside" the given /// input range, i.e. either start_new > start_old and end_new <= end_old OR /// start_new >= start_old and end_new < end_old pub fn is_contained_within(&self, range: Range) -> bool { (self.start_byte() > range.start && self.end_byte() <= range.end) || (self.start_byte() >= range.start && self.end_byte() < range.end) } /// Check if this node is *missing*. /// /// Missing nodes are inserted by the parser in order to recover from /// certain kinds of syntax errors. #[inline] pub fn is_missing(&self) -> bool { unsafe { ts_node_is_missing(self.as_raw()) } } /// Check if this node is *extra*. /// /// Extra nodes represent things like comments, which are not required by the /// grammar, but can appear anywhere. #[inline] pub fn is_extra(&self) -> bool { unsafe { ts_node_is_extra(self.as_raw()) } } /// Get the byte offsets where this node starts. #[inline(always)] pub fn start_byte(&self) -> u32 { // Normally we would implement this method like so: // // extern "C" { // /// Get the node's start byte. // fn ts_node_start_byte(self_: NodeRaw) -> u32; // } // unsafe { ts_node_start_byte(self.as_raw()) } // // However this method has a trivial implementation which is unlikely to change (though // there is no guarantee) and this method can be called often, in tight loops, on a hot // code path (for example the highlighter's `next_event_offset` method). So we inline the // implementation directly from `node.c` in the C library to minimize overhead: self.context[0] } /// Get the byte offsets where this node end. #[inline] pub fn end_byte(&self) -> u32 { unsafe { ts_node_end_byte(self.as_raw()) } } /// Get the byte range of source code that this node represents. #[inline] pub fn byte_range(&self) -> Range { self.start_byte()..self.end_byte() } /// Get the node's child at the given index, where zero represents the first /// child. /// /// This method is fairly fast, but its cost is technically log(i), so if /// you might be iterating over a long list of children, you should use /// [`Node::children`] instead. #[inline] pub fn child(&self, i: u32) -> Option> { unsafe { Node::from_raw(ts_node_child(self.as_raw(), i)) } } /// Get this node's number of children. #[inline] pub fn child_count(&self) -> u32 { unsafe { ts_node_child_count(self.as_raw()) } } /// Get this node's *named* child at the given index. /// /// See also [`Node::is_named`]. /// This method is fairly fast, but its cost is technically log(i), so if /// you might be iterating over a long list of children, you should use /// `Node::named_children` instead. #[inline] pub fn named_child(&self, i: u32) -> Option> { unsafe { Node::from_raw(ts_node_named_child(self.as_raw(), i)) } } /// Get this node's number of *named* children. /// /// See also [`Node::is_named`]. #[inline] pub fn named_child_count(&self) -> u32 { unsafe { ts_node_named_child_count(self.as_raw()) } } #[inline] unsafe fn map(&self, f: unsafe extern "C" fn(NodeRaw) -> NodeRaw) -> Option> { Node::from_raw(f(self.as_raw())) } /// Get this node's immediate parent. #[inline] pub fn parent(&self) -> Option { unsafe { self.map(ts_node_parent) } } /// Get this node's next sibling. #[inline] pub fn next_sibling(&self) -> Option { unsafe { self.map(ts_node_next_sibling) } } /// Get this node's previous sibling. #[inline] pub fn prev_sibling(&self) -> Option { unsafe { self.map(ts_node_prev_sibling) } } /// Get this node's next named sibling. #[inline] pub fn next_named_sibling(&self) -> Option { unsafe { self.map(ts_node_next_named_sibling) } } /// Get this node's previous named sibling. #[inline] pub fn prev_named_sibling(&self) -> Option { unsafe { self.map(ts_node_prev_named_sibling) } } /// Get the smallest node within this node that spans the given range. #[inline] pub fn descendant_for_byte_range(&self, start: u32, end: u32) -> Option { unsafe { Self::from_raw(ts_node_descendant_for_byte_range(self.as_raw(), start, end)) } } /// Get the smallest named node within this node that spans the given range. #[inline] pub fn named_descendant_for_byte_range(&self, start: u32, end: u32) -> Option { unsafe { Self::from_raw(ts_node_named_descendant_for_byte_range( self.as_raw(), start, end, )) } } /// Iterate over this node's children. /// /// A [`TreeCursor`] is used to retrieve the children efficiently. Obtain /// a [`TreeCursor`] by calling [`Tree::walk`] or [`Node::walk`]. To avoid /// unnecessary allocations, you should reuse the same cursor for /// subsequent calls to this method. /// /// If you're walking the tree recursively, you may want to use the /// [`TreeCursor`] APIs directly instead. pub fn children(&self) -> impl ExactSizeIterator> { let mut cursor = TreeCursor::new(self); cursor.goto_first_child(); (0..self.child_count()).map(move |_| { let result = cursor.node(); cursor.goto_next_sibling(); result }) } pub fn walk(&self) -> TreeCursor<'tree> { TreeCursor::new(self) } } impl PartialEq for Node<'_> { fn eq(&self, other: &Self) -> bool { self.id == other.id } } impl Eq for Node<'_> {} unsafe impl Send for Node<'_> {} unsafe impl Sync for Node<'_> {} extern "C" { /// Get the node's type as a null-terminated string. fn ts_node_type(node: NodeRaw) -> *const c_char; /// Get the node's type as a numerical id. fn ts_node_symbol(node: NodeRaw) -> u16; /// Get the node's language. fn ts_node_language(node: NodeRaw) -> Grammar; /// Check if the node is *named*. Named nodes correspond to named rules in /// the grammar, whereas *anonymous* nodes correspond to string literals in /// the grammar fn ts_node_is_named(node: NodeRaw) -> bool; /// Check if the node is *missing*. Missing nodes are inserted by the parser /// in order to recover from certain kinds of syntax errors fn ts_node_is_missing(node: NodeRaw) -> bool; /// Check if this node is *extra*. /// /// Extra nodes represent things like comments, which are not required by the /// grammar, but can appear anywhere. fn ts_node_is_extra(node: NodeRaw) -> bool; /// Get the node's immediate parent fn ts_node_parent(node: NodeRaw) -> NodeRaw; /// Get the node's child at the given index, where zero represents the first /// child fn ts_node_child(node: NodeRaw, child_index: u32) -> NodeRaw; /// Get the node's number of children fn ts_node_child_count(node: NodeRaw) -> u32; /// Get the node's *named* child at the given index. See also /// [`ts_node_is_named`] fn ts_node_named_child(node: NodeRaw, child_index: u32) -> NodeRaw; /// Get the node's number of *named* children. See also [`ts_node_is_named`] fn ts_node_named_child_count(node: NodeRaw) -> u32; /// Get the node's next sibling fn ts_node_next_sibling(node: NodeRaw) -> NodeRaw; fn ts_node_prev_sibling(node: NodeRaw) -> NodeRaw; /// Get the node's next *named* sibling fn ts_node_next_named_sibling(node: NodeRaw) -> NodeRaw; fn ts_node_prev_named_sibling(node: NodeRaw) -> NodeRaw; /// Get the smallest node within this node that spans the given range of /// bytes or (row, column) positions fn ts_node_descendant_for_byte_range(node: NodeRaw, start: u32, end: u32) -> NodeRaw; /// Get the smallest named node within this node that spans the given range /// of bytes or (row, column) positions fn ts_node_named_descendant_for_byte_range(node: NodeRaw, start: u32, end: u32) -> NodeRaw; /// Get the node's end byte. fn ts_node_end_byte(node: NodeRaw) -> u32; } hx-0.3.0+20250717/bindings/src/parser.rs000066400000000000000000000230061503625671400172740ustar00rootroot00000000000000use std::cell::Cell; use std::os::raw::c_void; use std::panic::{catch_unwind, AssertUnwindSafe}; use std::ptr::NonNull; use std::time::Duration; use std::{fmt, mem, ptr}; use regex_cursor::Cursor; use crate::grammar::IncompatibleGrammarError; use crate::tree::{SyntaxTreeData, Tree}; use crate::{Grammar, Input, IntoInput, Point, Range}; // opaque data enum ParserData {} #[clippy::msrv = "1.76.0"] thread_local! { static PARSER_CACHE: Cell> = const { Cell::new(None) }; } struct RawParser { ptr: NonNull, } impl Drop for RawParser { fn drop(&mut self) { unsafe { ts_parser_delete(self.ptr) } } } /// A stateful object that this is used to produce a [`Tree`] based on some /// source code. pub struct Parser { ptr: NonNull, } impl Parser { /// Create a new parser. #[must_use] pub fn new() -> Parser { let ptr = match PARSER_CACHE.take() { Some(cached) => { let ptr = cached.ptr; mem::forget(cached); ptr } None => unsafe { ts_parser_new() }, }; Parser { ptr } } /// Set the language that the parser should use for parsing. pub fn set_grammar(&mut self, grammar: Grammar) -> Result<(), IncompatibleGrammarError> { if unsafe { ts_parser_set_language(self.ptr, grammar) } { Ok(()) } else { Err(IncompatibleGrammarError { abi_version: grammar.abi_version(), }) } } pub fn set_timeout(&mut self, duration: Duration) { #[allow(deprecated)] unsafe { ts_parser_set_timeout_micros(self.ptr, duration.as_micros().try_into().unwrap()); } } /// Set the ranges of text that the parser should include when parsing. By default, the parser /// will always include entire documents. This function allows you to parse only a *portion* /// of a document but still return a syntax tree whose ranges match up with the document as a /// whole. You can also pass multiple disjoint ranges. /// /// `ranges` must be non-overlapping and sorted. pub fn set_included_ranges(&mut self, ranges: &[Range]) -> Result<(), InvalidRangesError> { // TODO: save some memory by only storing byte ranges and converting them to TS ranges in an // internal buffer here. Points are not used by TS. Alternatively we can patch the TS C code // to accept a simple pair (struct with two fields) of byte positions here instead of a full // tree sitter range let success = unsafe { ts_parser_set_included_ranges(self.ptr, ranges.as_ptr(), ranges.len() as u32) }; if success { Ok(()) } else { Err(InvalidRangesError) } } #[must_use] pub fn parse( &mut self, input: impl IntoInput, old_tree: Option<&Tree>, ) -> Option { let mut input = input.into_input(); unsafe extern "C" fn read( payload: NonNull, byte_index: u32, _position: Point, bytes_read: *mut u32, ) -> *const u8 { let cursor = catch_unwind(AssertUnwindSafe(move || { let input: &mut C = payload.cast().as_mut(); let cursor = input.cursor_at(byte_index); let slice = cursor.chunk(); let offset: u32 = cursor.offset().try_into().unwrap(); let len: u32 = slice.len().try_into().unwrap(); (byte_index - offset, slice.as_ptr(), len) })); match cursor { Ok((chunk_offset, ptr, len)) if chunk_offset < len => { *bytes_read = len - chunk_offset; ptr.add(chunk_offset as usize) } _ => { *bytes_read = 0; ptr::null() } } } let input = ParserInputRaw { payload: NonNull::from(&mut input).cast(), read: read::, encoding: InputEncoding::Utf8, decode: None, }; unsafe { let old_tree = old_tree.map(|tree| tree.as_raw()); let new_tree = ts_parser_parse(self.ptr, old_tree, input); new_tree.map(|raw| Tree::from_raw(raw)) } } } impl Default for Parser { fn default() -> Self { Self::new() } } unsafe impl Sync for Parser {} unsafe impl Send for Parser {} impl Drop for Parser { fn drop(&mut self) { PARSER_CACHE.set(Some(RawParser { ptr: self.ptr })); } } /// An error that occurred when trying to assign an incompatible [`Grammar`] to /// a [`Parser`]. #[derive(Debug, PartialEq, Eq)] pub struct InvalidRangesError; impl fmt::Display for InvalidRangesError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "include ranges overlap or are not sorted",) } } impl std::error::Error for InvalidRangesError {} type TreeSitterReadFn = unsafe extern "C" fn( payload: NonNull, byte_index: u32, position: Point, bytes_read: *mut u32, ) -> *const u8; /// A function that reads one code point from the given string, returning the number of bytes /// consumed. type DecodeInputFn = unsafe extern "C" fn(string: *const u8, length: u32, code_point: *const i32) -> u32; #[repr(C)] #[derive(Debug)] pub struct ParserInputRaw { pub payload: NonNull, pub read: TreeSitterReadFn, pub encoding: InputEncoding, /// A function to decode the the input. /// /// This function is only used if the encoding is `InputEncoding::Custom`. pub decode: Option, } // `TSInputEncoding` #[repr(u32)] #[derive(Debug, Clone, Copy)] pub enum InputEncoding { Utf8, Utf16LE, Utf16BE, Custom, } #[allow(unused)] #[repr(C)] #[derive(Debug)] struct ParseState { /// The payload passed via `ParseOptions`' `payload` field. payload: NonNull, current_byte_offset: u32, has_error: bool, } /// A function that accepts the current parser state and returns `true` when the parse should be /// cancelled. #[allow(unused)] type ProgressCallback = unsafe extern "C" fn(state: NonNull) -> bool; #[allow(unused)] #[repr(C)] #[derive(Debug, Default)] struct ParseOptions { payload: Option>, progress_callback: Option, } extern "C" { /// Create a new parser fn ts_parser_new() -> NonNull; /// Delete the parser, freeing all of the memory that it used. fn ts_parser_delete(parser: NonNull); /// Set the language that the parser should use for parsing. Returns a boolean indicating /// whether or not the language was successfully assigned. True means assignment /// succeeded. False means there was a version mismatch: the language was generated with /// an incompatible version of the Tree-sitter CLI. Check the language's version using /// `ts_language_version` and compare it to this library's `TREE_SITTER_LANGUAGE_VERSION` /// and `TREE_SITTER_MIN_COMPATIBLE_LANGUAGE_VERSION` constants. fn ts_parser_set_language(parser: NonNull, language: Grammar) -> bool; /// Set the ranges of text that the parser should include when parsing. By default, the parser /// will always include entire documents. This function allows you to parse only a *portion* /// of a document but still return a syntax tree whose ranges match up with the document as a /// whole. You can also pass multiple disjoint ranges. The second and third parameters specify /// the location and length of an array of ranges. The parser does *not* take ownership of /// these ranges; it copies the data, so it doesn't matter how these ranges are allocated. /// If `count` is zero, then the entire document will be parsed. Otherwise, the given ranges /// must be ordered from earliest to latest in the document, and they must not overlap. That /// is, the following must hold for all: `i < count - 1`: `ranges[i].end_byte <= ranges[i + /// 1].start_byte` If this requirement is not satisfied, the operation will fail, the ranges /// will not be assigned, and this function will return `false`. On success, this function /// returns `true` fn ts_parser_set_included_ranges( parser: NonNull, ranges: *const Range, count: u32, ) -> bool; fn ts_parser_parse( parser: NonNull, old_tree: Option>, input: ParserInputRaw, ) -> Option>; /// Set the maximum duration in microseconds that parsing should be allowed to /// take before halting. /// /// If parsing takes longer than this, it will halt early, returning NULL. /// See [`ts_parser_parse`] for more information. #[deprecated = "use ts_parser_parse_with_options and pass in a calback instead, this will be removed in 0.26"] fn ts_parser_set_timeout_micros(self_: NonNull, timeout_micros: u64); /// Use the parser to parse some source code and create a syntax tree, with some options. /// /// See `ts_parser_parse` for more details. /// /// See `TSParseOptions` for more details on the options. #[allow(unused)] fn ts_parser_parse_with_options( parser: NonNull, old_tree: Option>, input: ParserInputRaw, parse_options: ParseOptions, ) -> Option>; } hx-0.3.0+20250717/bindings/src/query.rs000066400000000000000000000457721503625671400171630ustar00rootroot00000000000000use std::fmt::{self, Display}; use std::ops::Range; use std::ptr::NonNull; use std::{slice, str}; use crate::query::predicate::TextPredicate; pub use crate::query::predicate::{InvalidPredicateError, Predicate}; use crate::Grammar; mod predicate; mod property; #[derive(Debug)] pub enum UserPredicate<'a> { IsPropertySet { negate: bool, key: &'a str, val: Option<&'a str>, }, SetProperty { key: &'a str, val: Option<&'a str>, }, Other(Predicate<'a>), } impl Display for UserPredicate<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { UserPredicate::IsPropertySet { negate, key, val } => { let predicate = if negate { "is-not?" } else { "is?" }; let spacer = if val.is_some() { " " } else { "" }; write!(f, " (#{predicate} {key}{spacer}{})", val.unwrap_or("")) } UserPredicate::SetProperty { key, val } => { let spacer = if val.is_some() { " " } else { "" }; write!(f, "(#set! {key}{spacer}{})", val.unwrap_or("")) } UserPredicate::Other(ref predicate) => { write!(f, "#{}", predicate.name()) } } } } #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct Pattern(pub(crate) u32); impl Pattern { pub const SENTINEL: Pattern = Pattern(u32::MAX); pub fn idx(&self) -> usize { self.0 as usize } } pub enum QueryData {} #[derive(Debug)] pub(super) struct PatternData { text_predicates: Range, } #[derive(Debug)] pub struct Query { pub(crate) raw: NonNull, num_captures: u32, num_strings: u32, text_predicates: Vec, patterns: Box<[PatternData]>, } unsafe impl Send for Query {} unsafe impl Sync for Query {} impl Query { /// Create a new query from a string containing one or more S-expression /// patterns. /// /// The query is associated with a particular grammar, and can only be run /// on syntax nodes parsed with that grammar. References to Queries can be /// shared between multiple threads. pub fn new( grammar: Grammar, source: &str, mut custom_predicate: impl FnMut(Pattern, UserPredicate) -> Result<(), InvalidPredicateError>, ) -> Result { assert!( source.len() <= i32::MAX as usize, "TreeSitter queries must be smaller then 2 GiB (is {})", source.len() as f64 / 1024.0 / 1024.0 / 1024.0 ); let mut error_offset = 0u32; let mut error_kind = RawQueryError::None; let bytes = source.as_bytes(); // Compile the query. let ptr = unsafe { ts_query_new( grammar, bytes.as_ptr(), bytes.len() as u32, &mut error_offset, &mut error_kind, ) }; let Some(raw) = ptr else { let offset = error_offset as usize; let error_word = || { source[offset..] .chars() .take_while(|&c| c.is_alphanumeric() || matches!(c, '_' | '-')) .collect() }; let err = match error_kind { RawQueryError::NodeType => { let node: String = error_word(); ParseError::InvalidNodeType { location: ParserErrorLocation::new(source, offset, node.chars().count()), node, } } RawQueryError::Field => { let field = error_word(); ParseError::InvalidFieldName { location: ParserErrorLocation::new(source, offset, field.chars().count()), field, } } RawQueryError::Capture => { let capture = error_word(); ParseError::InvalidCaptureName { location: ParserErrorLocation::new(source, offset, capture.chars().count()), capture, } } RawQueryError::Syntax => { ParseError::SyntaxError(ParserErrorLocation::new(source, offset, 0)) } RawQueryError::Structure => { ParseError::ImpossiblePattern(ParserErrorLocation::new(source, offset, 0)) } RawQueryError::None => { unreachable!("tree-sitter returned a null pointer but did not set an error") } RawQueryError::Language => unreachable!("should be handled at grammar load"), }; return Err(err); }; // I am not going to bother with safety comments here, all of these are // safe as long as TS is not buggy because raw is a properly constructed query let num_captures = unsafe { ts_query_capture_count(raw) }; let num_strings = unsafe { ts_query_string_count(raw) }; let num_patterns = unsafe { ts_query_pattern_count(raw) }; let mut query = Query { raw, num_captures, num_strings, text_predicates: Vec::new(), patterns: Box::default(), }; let patterns: Result<_, ParseError> = (0..num_patterns) .map(|pattern| { query .parse_pattern_predicates(Pattern(pattern), &mut custom_predicate) .map_err(|err| { let pattern_start = unsafe { ts_query_start_byte_for_pattern(query.raw, pattern) as usize }; match err { InvalidPredicateError::UnknownPredicate { name } => { let offset = source[pattern_start..] .find(&*name) .expect("predicate name is a substring of the query text") + pattern_start // Subtract a byte for b'#'. - 1; ParseError::InvalidPredicate { message: format!("unknown predicate #{name}"), location: ParserErrorLocation::new( source, offset, // Add one char for the '#'. name.chars().count() + 1, ), } } InvalidPredicateError::UnknownProperty { property } => { // TODO: this is naive. We should ensure that it is within a // `#set!` or `#is(-not)?`. let offset = source[pattern_start..] .find(&*property) .expect("property name is a substring of the query text") + pattern_start; ParseError::InvalidPredicate { message: format!("unknown property '{property}'"), location: ParserErrorLocation::new( source, offset, property.chars().count(), ), } } InvalidPredicateError::Other { msg } => ParseError::InvalidPredicate { message: msg.into(), location: ParserErrorLocation::new(source, pattern_start, 0), }, } }) }) .collect(); query.patterns = patterns?; Ok(query) } #[inline] fn get_string(&self, str: QueryStr) -> &str { let value_id = str.0; // need an assertions because the ts c api does not do bounds check assert!(value_id <= self.num_strings, "invalid value index"); unsafe { let mut len = 0; let ptr = ts_query_string_value_for_id(self.raw, value_id, &mut len); let data = slice::from_raw_parts(ptr, len as usize); // safety: we only allow passing valid str(ings) as arguments to query::new // name is always a substring of that. Treesitter does proper utf8 segmentation // so any substrings it produces are codepoint aligned and therefore valid utf8 str::from_utf8_unchecked(data) } } #[inline] pub fn capture_name(&self, capture_idx: Capture) -> &str { let capture_idx = capture_idx.0; // need an assertions because the ts c api does not do bounds check assert!(capture_idx <= self.num_captures, "invalid capture index"); let mut length = 0; unsafe { let ptr = ts_query_capture_name_for_id(self.raw, capture_idx, &mut length); let name = slice::from_raw_parts(ptr, length as usize); // safety: we only allow passing valid str(ings) as arguments to query::new // name is always a substring of that. Treesitter does proper utf8 segmentation // so any substrings it produces are codepoint aligned and therefore valid utf8 str::from_utf8_unchecked(name) } } #[inline] pub fn captures(&self) -> impl ExactSizeIterator { (0..self.num_captures).map(|cap| (Capture(cap), self.capture_name(Capture(cap)))) } #[inline] pub fn num_captures(&self) -> u32 { self.num_captures } #[inline] pub fn get_capture(&self, capture_name: &str) -> Option { for capture in 0..self.num_captures { if capture_name == self.capture_name(Capture(capture)) { return Some(Capture(capture)); } } None } pub(crate) fn pattern_text_predicates(&self, pattern_idx: u16) -> &[TextPredicate] { let range = self.patterns[pattern_idx as usize].text_predicates.clone(); &self.text_predicates[range.start as usize..range.end as usize] } /// Get the byte offset where the given pattern starts in the query's /// source. #[doc(alias = "ts_query_start_byte_for_pattern")] #[must_use] pub fn start_byte_for_pattern(&self, pattern: Pattern) -> usize { assert!( pattern.0 < self.text_predicates.len() as u32, "Pattern index is {pattern:?} but the pattern count is {}", self.text_predicates.len(), ); unsafe { ts_query_start_byte_for_pattern(self.raw, pattern.0) as usize } } /// Get the number of patterns in the query. #[must_use] pub fn pattern_count(&self) -> usize { unsafe { ts_query_pattern_count(self.raw) as usize } } /// Get the number of patterns in the query. #[must_use] pub fn patterns(&self) -> impl ExactSizeIterator { (0..self.pattern_count() as u32).map(Pattern) } /// Disable a certain capture within a query. /// /// This prevents the capture from being returned in matches, and also avoids /// any resource usage associated with recording the capture. Currently, there /// is no way to undo this. pub fn disable_capture(&mut self, name: &str) { let bytes = name.as_bytes(); unsafe { ts_query_disable_capture(self.raw, bytes.as_ptr(), bytes.len() as u32); } } } impl Drop for Query { fn drop(&mut self) { unsafe { ts_query_delete(self.raw) } } } #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] #[repr(transparent)] pub struct Capture(u32); impl Capture { pub fn name(self, query: &Query) -> &str { query.capture_name(self) } pub fn idx(self) -> usize { self.0 as usize } } /// A reference to a string stored in a query #[derive(Clone, Copy, Debug)] pub struct QueryStr(u32); impl QueryStr { pub fn get(self, query: &Query) -> &str { query.get_string(self) } } #[derive(Debug, PartialEq, Eq)] pub struct ParserErrorLocation { /// at which line the error occurred pub line: u32, /// at which codepoints/columns the errors starts in the line pub column: u32, /// how many codepoints/columns the error takes up pub len: u32, line_content: String, line_before: Option, line_after: Option, } impl ParserErrorLocation { pub fn new(source: &str, start: usize, len: usize) -> ParserErrorLocation { let mut line = 0; let mut column = 0; let mut line_content = String::new(); let mut line_before = None; let mut line_after = None; let mut byte_offset = 0; for (this_line_no, this_line) in source.split('\n').enumerate() { let line_start = byte_offset; let line_end = line_start + this_line.len(); if line_start <= start && start <= line_end { line = this_line_no; line_content = this_line .strip_suffix('\r') .unwrap_or(this_line) .to_string(); column = source[line_start..start].chars().count(); line_before = source[..line_start] .lines() .next_back() .filter(|s| !s.is_empty()) .map(ToOwned::to_owned); line_after = source .get(line_end + 1..) .and_then(|rest| rest.lines().next()) .filter(|s| !s.is_empty()) .map(ToOwned::to_owned); break; } byte_offset += this_line.len() + 1; } ParserErrorLocation { line: line as u32, column: column as u32, len: len as u32, line_content: line_content.to_owned(), line_before, line_after, } } } impl Display for ParserErrorLocation { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { writeln!(f, " --> {}:{}", self.line + 1, self.column + 1)?; let max_line_number = if self.line_after.is_some() { self.line + 2 } else { self.line + 1 }; let line_number_column_len = max_line_number.to_string().len(); let line = (self.line + 1).to_string(); let prefix = format!(" {:width$} |", "", width = line_number_column_len); writeln!(f, "{prefix}")?; if let Some(before) = self.line_before.as_ref() { writeln!(f, " {} | {}", self.line, before)?; } writeln!(f, " {line} | {}", self.line_content)?; writeln!( f, "{prefix}{:width$} {:^ Option>; /// Delete a query, freeing all of the memory that it used. fn ts_query_delete(query: NonNull); /// Get the number of patterns, captures, or string literals in the query. fn ts_query_pattern_count(query: NonNull) -> u32; fn ts_query_capture_count(query: NonNull) -> u32; fn ts_query_string_count(query: NonNull) -> u32; /// Get the byte offset where the given pattern starts in the query's /// source. This can be useful when combining queries by concatenating their /// source code strings. fn ts_query_start_byte_for_pattern(query: NonNull, pattern_index: u32) -> u32; // fn ts_query_is_pattern_rooted(query: NonNull, pattern_index: u32) -> bool; // fn ts_query_is_pattern_non_local(query: NonNull, pattern_index: u32) -> bool; // fn ts_query_is_pattern_guaranteed_at_step(query: NonNull, byte_offset: u32) -> bool; /// Get the name and length of one of the query's captures, or one of the /// query's string literals. Each capture and string is associated with a /// numeric id based on the order that it appeared in the query's source. fn ts_query_capture_name_for_id( query: NonNull, index: u32, length: &mut u32, ) -> *const u8; fn ts_query_string_value_for_id( self_: NonNull, index: u32, length: &mut u32, ) -> *const u8; /// Disable a certain capture within a query. /// /// This prevents the capture from being returned in matches, and also avoids /// any resource usage associated with recording the capture. Currently, there /// is no way to undo this. fn ts_query_disable_capture(self_: NonNull, name: *const u8, length: u32); } hx-0.3.0+20250717/bindings/src/query/000077500000000000000000000000001503625671400165765ustar00rootroot00000000000000hx-0.3.0+20250717/bindings/src/query/predicate.rs000066400000000000000000000373121503625671400211120ustar00rootroot00000000000000use std::error::Error; use std::iter::zip; use std::ops::Range; use std::ptr::NonNull; use std::{fmt, slice}; use crate::query::property::QueryProperty; use crate::query::{Capture, Pattern, PatternData, Query, QueryData, QueryStr, UserPredicate}; use crate::query_cursor::MatchedNode; use crate::Input; use regex_cursor::engines::meta::Regex; use regex_cursor::Cursor; macro_rules! bail { ($($args:tt)*) => {{ return Err(InvalidPredicateError::Other {msg: format!($($args)*).into() }) }} } macro_rules! ensure { ($cond: expr, $($args:tt)*) => {{ if !$cond { return Err(InvalidPredicateError::Other { msg: format!($($args)*).into() }) } }} } #[derive(Debug)] pub(super) enum TextPredicateKind { EqString(QueryStr), EqCapture(Capture), MatchString(Regex), AnyString(Box<[QueryStr]>), } #[derive(Debug)] pub(crate) struct TextPredicate { capture: Capture, kind: TextPredicateKind, negated: bool, match_all: bool, } fn input_matches_str(str: &str, range: Range, input: &mut I) -> bool { if str.len() != range.len() { return false; } let mut str = str.as_bytes(); let cursor = input.cursor_at(range.start); let range = range.start as usize..range.end as usize; let start_in_chunk = range.start - cursor.offset(); if range.end - cursor.offset() <= cursor.chunk().len() { // hotpath return &cursor.chunk()[start_in_chunk..range.end - cursor.offset()] == str; } if cursor.chunk()[start_in_chunk..] != str[..cursor.chunk().len() - start_in_chunk] { return false; } str = &str[..cursor.chunk().len() - start_in_chunk]; while cursor.advance() { if str.len() <= cursor.chunk().len() { return &cursor.chunk()[..range.end - cursor.offset()] == str; } if &str[..cursor.chunk().len()] != cursor.chunk() { return false; } str = &str[cursor.chunk().len()..] } // buggy cursor/invalid range false } impl TextPredicate { /// handlers match_all and negated fn satisfied_helper(&self, mut nodes: impl Iterator) -> bool { if self.match_all { nodes.all(|matched| matched != self.negated) } else { nodes.any(|matched| matched != self.negated) } } pub fn satisfied( &self, input: &mut I, matched_nodes: &[MatchedNode], query: &Query, ) -> bool { let mut capture_nodes = matched_nodes .iter() .filter(|matched_node| matched_node.capture == self.capture); match self.kind { TextPredicateKind::EqString(str) => self.satisfied_helper(capture_nodes.map(|node| { let range = node.node.byte_range(); input_matches_str(query.get_string(str), range.clone(), input) })), TextPredicateKind::EqCapture(other_capture) => { let mut other_nodes = matched_nodes .iter() .filter(|matched_node| matched_node.capture == other_capture); let res = self.satisfied_helper(zip(&mut capture_nodes, &mut other_nodes).map( |(node1, node2)| { let range1 = node1.node.byte_range(); let range2 = node2.node.byte_range(); input.eq(range1, range2) }, )); let consumed_all = capture_nodes.next().is_none() && other_nodes.next().is_none(); res && (!self.match_all || consumed_all) } TextPredicateKind::MatchString(ref regex) => { self.satisfied_helper(capture_nodes.map(|node| { let range = node.node.byte_range(); let mut input = regex_cursor::Input::new(input.cursor_at(range.start)); input.slice(range.start as usize..range.end as usize); regex.is_match(input) })) } TextPredicateKind::AnyString(ref strings) => { let strings = strings.iter().map(|&str| query.get_string(str)); self.satisfied_helper(capture_nodes.map(|node| { let range = node.node.byte_range(); strings .clone() .filter(|str| str.len() == range.len()) .any(|str| input_matches_str(str, range.clone(), input)) })) } } } } impl Query { pub(super) fn parse_pattern_predicates( &mut self, pattern: Pattern, mut custom_predicate: impl FnMut(Pattern, UserPredicate) -> Result<(), InvalidPredicateError>, ) -> Result { let text_predicate_start = self.text_predicates.len() as u32; let predicate_steps = unsafe { let mut len = 0u32; let raw_predicates = ts_query_predicates_for_pattern(self.raw, pattern.0, &mut len); (len != 0) .then(|| slice::from_raw_parts(raw_predicates, len as usize)) .unwrap_or_default() }; let predicates = predicate_steps .split(|step| step.kind == PredicateStepKind::Done) .filter(|predicate| !predicate.is_empty()); for predicate in predicates { let predicate = unsafe { Predicate::new(self, predicate)? }; match predicate.name() { "eq?" | "not-eq?" | "any-eq?" | "any-not-eq?" => { predicate.check_arg_count(2)?; let capture_idx = predicate.capture_arg(0)?; let arg2 = predicate.arg(1); let negated = matches!(predicate.name(), "not-eq?" | "not-any-eq?"); let match_all = matches!(predicate.name(), "eq?" | "not-eq?"); let kind = match arg2 { PredicateArg::Capture(capture) => TextPredicateKind::EqCapture(capture), PredicateArg::String(str) => TextPredicateKind::EqString(str), }; self.text_predicates.push(TextPredicate { capture: capture_idx, kind, negated, match_all, }); } "match?" | "not-match?" | "any-match?" | "any-not-match?" => { predicate.check_arg_count(2)?; let capture_idx = predicate.capture_arg(0)?; let regex = predicate.query_str_arg(1)?.get(self); let negated = matches!(predicate.name(), "not-match?" | "any-not-match?"); let match_all = matches!(predicate.name(), "match?" | "not-match?"); let regex = match Regex::builder().build(regex) { Ok(regex) => regex, Err(err) => bail!("invalid regex '{regex}', {err}"), }; self.text_predicates.push(TextPredicate { capture: capture_idx, kind: TextPredicateKind::MatchString(regex), negated, match_all, }); } "set!" => { let property = QueryProperty::parse(&predicate)?; custom_predicate( pattern, UserPredicate::SetProperty { key: property.key.get(self), val: property.val.map(|val| val.get(self)), }, )? } "is-not?" | "is?" => { let property = QueryProperty::parse(&predicate)?; custom_predicate( pattern, UserPredicate::IsPropertySet { negate: predicate.name() == "is-not?", key: property.key.get(self), val: property.val.map(|val| val.get(self)), }, )? } "any-of?" | "not-any-of?" => { predicate.check_min_arg_count(1)?; let capture = predicate.capture_arg(0)?; let negated = predicate.name() == "not-any-of?"; let values: Result<_, InvalidPredicateError> = (1..predicate.num_args()) .map(|i| predicate.query_str_arg(i)) .collect(); self.text_predicates.push(TextPredicate { capture, kind: TextPredicateKind::AnyString(values?), negated, match_all: false, }); } // is and is-not are better handled as custom predicates since interpreting is context dependent // "is?" => property_predicates.push((QueryProperty::parse(&predicate), false)), // "is-not?" => property_predicates.push((QueryProperty::parse(&predicate), true)), _ => custom_predicate(pattern, UserPredicate::Other(predicate))?, } } Ok(PatternData { text_predicates: text_predicate_start..self.text_predicates.len() as u32, }) } } pub enum PredicateArg { Capture(Capture), String(QueryStr), } #[derive(Debug, Clone, Copy)] pub struct Predicate<'a> { pub name: QueryStr, args: &'a [PredicateStep], query: &'a Query, } impl<'a> Predicate<'a> { unsafe fn new( query: &'a Query, predicate: &'a [PredicateStep], ) -> Result, InvalidPredicateError> { ensure!( predicate[0].kind == PredicateStepKind::String, "expected predicate to start with a function name. Got @{}.", Capture(predicate[0].value_id).name(query) ); let operator_name = QueryStr(predicate[0].value_id); Ok(Predicate { name: operator_name, args: &predicate[1..], query, }) } pub fn name(&self) -> &str { self.name.get(self.query) } pub fn check_arg_count(&self, n: usize) -> Result<(), InvalidPredicateError> { ensure!( self.args.len() == n, "expected {n} arguments for #{}, got {}", self.name(), self.args.len() ); Ok(()) } pub fn check_min_arg_count(&self, n: usize) -> Result<(), InvalidPredicateError> { ensure!( n <= self.args.len(), "expected at least {n} arguments for #{}, got {}", self.name(), self.args.len() ); Ok(()) } pub fn check_max_arg_count(&self, n: usize) -> Result<(), InvalidPredicateError> { ensure!( self.args.len() <= n, "expected at most {n} arguments for #{}, got {}", self.name(), self.args.len() ); Ok(()) } pub fn query_str_arg(&self, i: usize) -> Result { match self.arg(i) { PredicateArg::String(str) => Ok(str), PredicateArg::Capture(capture) => bail!( "{i}. argument to #{} must be a literal, got capture @{:?}", self.name(), capture.name(self.query) ), } } pub fn str_arg(&self, i: usize) -> Result<&str, InvalidPredicateError> { Ok(self.query_str_arg(i)?.get(self.query)) } pub fn num_args(&self) -> usize { self.args.len() } pub fn capture_arg(&self, i: usize) -> Result { match self.arg(i) { PredicateArg::Capture(capture) => Ok(capture), PredicateArg::String(str) => bail!( "{i}. argument to #{} expected a capture, got literal {:?}", self.name(), str.get(self.query) ), } } pub fn arg(&self, i: usize) -> PredicateArg { self.args[i].try_into().unwrap() } pub fn args(&self) -> impl Iterator + '_ { self.args.iter().map(|&arg| arg.try_into().unwrap()) } } #[derive(Debug)] pub enum InvalidPredicateError { /// The property specified in `#set! ` is not known. UnknownProperty { property: Box, }, /// Predicate is unknown/unsupported by this query. UnknownPredicate { name: Box, }, Other { msg: Box, }, } impl InvalidPredicateError { pub fn unknown(predicate: UserPredicate) -> Self { match predicate { UserPredicate::IsPropertySet { key, .. } => Self::UnknownProperty { property: key.into(), }, UserPredicate::SetProperty { key, .. } => Self::UnknownProperty { property: key.into(), }, UserPredicate::Other(predicate) => Self::UnknownPredicate { name: predicate.name().into(), }, } } } impl From for InvalidPredicateError { fn from(value: String) -> Self { InvalidPredicateError::Other { msg: value.into_boxed_str(), } } } impl<'a> From<&'a str> for InvalidPredicateError { fn from(value: &'a str) -> Self { InvalidPredicateError::Other { msg: value.into() } } } impl fmt::Display for InvalidPredicateError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::UnknownProperty { property } => write!(f, "unknown property '{property}'"), Self::UnknownPredicate { name } => write!(f, "unknown predicate #{name}"), Self::Other { msg } => f.write_str(msg), } } } impl Error for InvalidPredicateError {} #[repr(C)] #[derive(Debug, Clone, Copy, PartialEq, Eq)] // warns about never being constructed but it's constructed by C code // and written into a mutable reference #[allow(dead_code)] enum PredicateStepKind { Done = 0, Capture = 1, String = 2, } #[repr(C)] #[derive(Debug, Clone, Copy)] struct PredicateStep { kind: PredicateStepKind, value_id: u32, } impl TryFrom for PredicateArg { type Error = (); fn try_from(step: PredicateStep) -> Result { match step.kind { PredicateStepKind::String => Ok(PredicateArg::String(QueryStr(step.value_id))), PredicateStepKind::Capture => Ok(PredicateArg::Capture(Capture(step.value_id))), PredicateStepKind::Done => Err(()), } } } extern "C" { /// Get all of the predicates for the given pattern in the query. The /// predicates are represented as a single array of steps. There are three /// types of steps in this array, which correspond to the three legal values /// for the `type` field: /// /// - `TSQueryPredicateStepTypeCapture` - Steps with this type represent names of captures. /// Their `value_id` can be used with the `ts_query_capture_name_for_id` function to /// obtain the name of the capture. /// - `TSQueryPredicateStepTypeString` - Steps with this type represent literal strings. /// Their `value_id` can be used with the `ts_query_string_value_for_id` function to /// obtain their string value. /// - `TSQueryPredicateStepTypeDone` - Steps with this type are *sentinels* that represent the /// end of an individual predicate. If a pattern has two predicates, then there will be two /// steps with this `type` in the array. fn ts_query_predicates_for_pattern( query: NonNull, pattern_index: u32, step_count: &mut u32, ) -> *const PredicateStep; } hx-0.3.0+20250717/bindings/src/query/property.rs000066400000000000000000000011271503625671400210310ustar00rootroot00000000000000use crate::query::predicate::{InvalidPredicateError, Predicate}; use crate::query::QueryStr; #[derive(Debug)] pub struct QueryProperty { pub key: QueryStr, pub val: Option, } impl QueryProperty { pub fn parse(predicate: &Predicate) -> Result { predicate.check_min_arg_count(1)?; predicate.check_max_arg_count(2)?; let key = predicate.query_str_arg(0)?; let val = (predicate.num_args() == 2) .then(|| predicate.query_str_arg(1)) .transpose()?; Ok(QueryProperty { key, val }) } } hx-0.3.0+20250717/bindings/src/query_cursor.rs000066400000000000000000000274151503625671400205520ustar00rootroot00000000000000use core::slice; use std::cell::UnsafeCell; use std::marker::PhantomData; use std::mem; use std::ops::Range; use std::ptr::{self, NonNull}; use crate::node::NodeRaw; use crate::query::{Capture, Pattern, Query, QueryData}; use crate::{Input, IntoInput, Node, Tree}; enum QueryCursorData {} thread_local! { static CURSOR_CACHE: UnsafeCell> = UnsafeCell::new(Vec::with_capacity(8)); } /// SAFETY: must not call itself recursively unsafe fn with_cache(f: impl FnOnce(&mut Vec) -> T) -> T { CURSOR_CACHE.with(|cache| f(&mut *cache.get())) } pub struct QueryCursor<'a, 'tree, I: Input> { query: &'a Query, ptr: NonNull, tree: PhantomData<&'tree Tree>, input: I, } impl<'tree, I: Input> QueryCursor<'_, 'tree, I> { pub fn next_match(&mut self) -> Option> { let mut query_match = TSQueryMatch { id: 0, pattern_index: 0, capture_count: 0, captures: ptr::null(), }; loop { let success = unsafe { ts_query_cursor_next_match(self.ptr.as_ptr(), &mut query_match) }; if !success { return None; } let matched_nodes = unsafe { slice::from_raw_parts( query_match.captures.cast(), query_match.capture_count as usize, ) }; let satisfies_predicates = self .query .pattern_text_predicates(query_match.pattern_index) .iter() .all(|predicate| predicate.satisfied(&mut self.input, matched_nodes, self.query)); if satisfies_predicates { let res = QueryMatch { id: query_match.id, pattern: Pattern(query_match.pattern_index as u32), matched_nodes, query_cursor: unsafe { self.ptr.as_mut() }, _tree: PhantomData, }; return Some(res); } } } pub fn next_matched_node(&mut self) -> Option<(QueryMatch<'_, 'tree>, MatchedNodeIdx)> { let mut query_match = TSQueryMatch { id: 0, pattern_index: 0, capture_count: 0, captures: ptr::null(), }; let mut capture_idx = 0; loop { let success = unsafe { ts_query_cursor_next_capture(self.ptr.as_ptr(), &mut query_match, &mut capture_idx) }; if !success { return None; } let matched_nodes = unsafe { slice::from_raw_parts( query_match.captures.cast(), query_match.capture_count as usize, ) }; let satisfies_predicates = self .query .pattern_text_predicates(query_match.pattern_index) .iter() .all(|predicate| predicate.satisfied(&mut self.input, matched_nodes, self.query)); if satisfies_predicates { let res = QueryMatch { id: query_match.id, pattern: Pattern(query_match.pattern_index as u32), matched_nodes, query_cursor: unsafe { self.ptr.as_mut() }, _tree: PhantomData, }; return Some((res, capture_idx)); } else { unsafe { ts_query_cursor_remove_match(self.ptr.as_ptr(), query_match.id); } } } } pub fn set_byte_range(&mut self, range: Range) { unsafe { ts_query_cursor_set_byte_range(self.ptr.as_ptr(), range.start, range.end); } } pub fn reuse(self) -> InactiveQueryCursor { let res = InactiveQueryCursor { ptr: self.ptr }; mem::forget(self); res } } impl Drop for QueryCursor<'_, '_, I> { fn drop(&mut self) { unsafe { with_cache(|cache| cache.push(InactiveQueryCursor { ptr: self.ptr })) } } } /// A query cursor that is not actively associated with a query pub struct InactiveQueryCursor { ptr: NonNull, } impl InactiveQueryCursor { #[must_use] pub fn new(range: Range, limit: u32) -> Self { let mut this = unsafe { with_cache(|cache| { cache.pop().unwrap_or_else(|| InactiveQueryCursor { ptr: NonNull::new_unchecked(ts_query_cursor_new()), }) }) }; this.set_byte_range(range); this.set_match_limit(limit); this } /// Return the maximum number of in-progress matches for this cursor. #[doc(alias = "ts_query_cursor_match_limit")] #[must_use] pub fn match_limit(&self) -> u32 { unsafe { ts_query_cursor_match_limit(self.ptr.as_ptr()) } } /// Set the maximum number of in-progress matches for this cursor. The /// limit must be > 0 and <= 65536. #[doc(alias = "ts_query_cursor_set_match_limit")] pub fn set_match_limit(&mut self, limit: u32) { unsafe { ts_query_cursor_set_match_limit(self.ptr.as_ptr(), limit); } } /// Check if, on its last execution, this cursor exceeded its maximum number /// of in-progress matches. #[doc(alias = "ts_query_cursor_did_exceed_match_limit")] #[must_use] pub fn did_exceed_match_limit(&self) -> bool { unsafe { ts_query_cursor_did_exceed_match_limit(self.ptr.as_ptr()) } } pub fn set_byte_range(&mut self, range: Range) { unsafe { ts_query_cursor_set_byte_range(self.ptr.as_ptr(), range.start, range.end); } } pub fn execute_query<'a, 'tree, I: IntoInput>( self, query: &'a Query, node: &Node<'tree>, input: I, ) -> QueryCursor<'a, 'tree, I::Input> { let ptr = self.ptr; unsafe { ts_query_cursor_exec(ptr.as_ptr(), query.raw.as_ref(), node.as_raw()) }; mem::forget(self); QueryCursor { query, ptr, tree: PhantomData, input: input.into_input(), } } } impl Default for InactiveQueryCursor { fn default() -> Self { Self::new(0..u32::MAX, u32::MAX) } } impl Drop for InactiveQueryCursor { fn drop(&mut self) { unsafe { ts_query_cursor_delete(self.ptr.as_ptr()) } } } pub type MatchedNodeIdx = u32; #[repr(C)] #[derive(Debug, Clone)] pub struct MatchedNode<'tree> { pub node: Node<'tree>, pub capture: Capture, } pub struct QueryMatch<'cursor, 'tree> { id: u32, pattern: Pattern, matched_nodes: &'cursor [MatchedNode<'tree>], query_cursor: &'cursor mut QueryCursorData, _tree: PhantomData<&'tree super::Tree>, } impl std::fmt::Debug for QueryMatch<'_, '_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("QueryMatch") .field("id", &self.id) .field("pattern", &self.pattern) .field("matched_nodes", &self.matched_nodes) .finish_non_exhaustive() } } impl<'tree> QueryMatch<'_, 'tree> { pub fn matched_nodes(&self) -> impl Iterator> { self.matched_nodes.iter() } pub fn nodes_for_capture(&self, capture: Capture) -> impl Iterator> { self.matched_nodes .iter() .filter(move |mat| mat.capture == capture) .map(|mat| &mat.node) } pub fn matched_node(&self, i: MatchedNodeIdx) -> &MatchedNode<'tree> { &self.matched_nodes[i as usize] } #[must_use] pub const fn id(&self) -> u32 { self.id } #[must_use] pub const fn pattern(&self) -> Pattern { self.pattern } #[doc(alias = "ts_query_cursor_remove_match")] /// removes this match from the cursor so that further captures /// from its cursor so that future captures that belong to this match /// are no longer returned by capture iterators pub fn remove(self) { unsafe { ts_query_cursor_remove_match(self.query_cursor, self.id); } } } #[repr(C)] #[derive(Debug)] struct TSQueryCapture { node: NodeRaw, index: u32, } #[repr(C)] #[derive(Debug)] struct TSQueryMatch { id: u32, pattern_index: u16, capture_count: u16, captures: *const TSQueryCapture, } extern "C" { /// Advance to the next capture of the currently running query. /// If there is a capture, write its match to `*match` and its index within /// the matche's capture list to `*capture_index`. Otherwise, return `false`. fn ts_query_cursor_next_capture( self_: *mut QueryCursorData, match_: &mut TSQueryMatch, capture_index: &mut u32, ) -> bool; /// Advance to the next match of the currently running query. /// /// If there is a match, write it to `*match` and return `true`. /// Otherwise, return `false`. fn ts_query_cursor_next_match(self_: *mut QueryCursorData, match_: &mut TSQueryMatch) -> bool; fn ts_query_cursor_remove_match(self_: *mut QueryCursorData, match_id: u32); /// Delete a query cursor, freeing all of the memory that it used fn ts_query_cursor_delete(self_: *mut QueryCursorData); /// Create a new cursor for executing a given query. /// The cursor stores the state that is needed to iteratively search /// for matches. To use the query cursor, first call [`ts_query_cursor_exec`] /// to start running a given query on a given syntax node. Then, there are /// two options for consuming the results of the query: /// /// 1. Repeatedly call [`ts_query_cursor_next_match`] to iterate over all of the /// *matches* in the order that they were found. Each match contains the /// index of the pattern that matched, and an array of captures. Because /// multiple patterns can match the same set of nodes, one match may contain /// captures that appear *before* some of the captures from a previous match. /// 2. Repeatedly call [`ts_query_cursor_next_capture`] to iterate over all of the /// individual *captures* in the order that they appear. This is useful if /// don't care about which pattern matched, and just want a single ordered /// sequence of captures. /// /// If you don't care about consuming all of the results, you can stop calling /// [`ts_query_cursor_next_match`] or [`ts_query_cursor_next_capture`] at any point. /// You can then start executing another query on another node by calling /// [`ts_query_cursor_exec`] again."] fn ts_query_cursor_new() -> *mut QueryCursorData; /// Start running a given query on a given node. fn ts_query_cursor_exec(self_: *mut QueryCursorData, query: &QueryData, node: NodeRaw); /// Manage the maximum number of in-progress matches allowed by this query /// cursor. /// /// Query cursors have an optional maximum capacity for storing lists of /// in-progress captures. If this capacity is exceeded, then the /// earliest-starting match will silently be dropped to make room for further /// matches. This maximum capacity is optional — by default, query cursors allow /// any number of pending matches, dynamically allocating new space for them as /// needed as the query is executed. fn ts_query_cursor_did_exceed_match_limit(self_: *const QueryCursorData) -> bool; fn ts_query_cursor_match_limit(self_: *const QueryCursorData) -> u32; fn ts_query_cursor_set_match_limit(self_: *mut QueryCursorData, limit: u32); /// Set the range of bytes or (row, column) positions in which the query /// will be executed. fn ts_query_cursor_set_byte_range(self_: *mut QueryCursorData, start_byte: u32, end_byte: u32); } hx-0.3.0+20250717/bindings/src/ropey.rs000066400000000000000000000036271503625671400171450ustar00rootroot00000000000000use std::ops; use regex_cursor::{Cursor, RopeyCursor}; use ropey::RopeSlice; use crate::{Input, IntoInput}; pub struct RopeInput<'a> { src: RopeSlice<'a>, cursor: regex_cursor::RopeyCursor<'a>, } impl<'a> RopeInput<'a> { pub fn new(src: RopeSlice<'a>) -> Self { RopeInput { src, cursor: regex_cursor::RopeyCursor::new(src), } } } impl<'a> IntoInput for RopeSlice<'a> { type Input = RopeInput<'a>; fn into_input(self) -> Self::Input { RopeInput { src: self, cursor: RopeyCursor::new(self), } } } impl<'a> Input for RopeInput<'a> { type Cursor = RopeyCursor<'a>; fn cursor_at(&mut self, offset: u32) -> &mut RopeyCursor<'a> { let offset = offset as usize; debug_assert!( offset <= self.src.len_bytes(), "parser offset out of bounds: {offset} > {}", self.src.len_bytes() ); // this cursor is optimized for contiguous reads which are by far the most common during parsing // very far jumps (like injections at the other end of the document) are handled // by starting a new cursor (new chunks iterator) if offset < self.cursor.offset() || offset - self.cursor.offset() > 4906 { self.cursor = regex_cursor::RopeyCursor::at(self.src, offset); } else { while self.cursor.offset() + self.cursor.chunk().len() <= offset { if !self.cursor.advance() { break; } } } &mut self.cursor } fn eq(&mut self, range1: ops::Range, range2: ops::Range) -> bool { let range1 = self .src .byte_slice(range1.start as usize..range1.end as usize); let range2 = self .src .byte_slice(range2.start as usize..range2.end as usize); range1 == range2 } } hx-0.3.0+20250717/bindings/src/tree.rs000066400000000000000000000052241503625671400167410ustar00rootroot00000000000000use std::fmt; use std::ptr::NonNull; use crate::node::{Node, NodeRaw}; use crate::{Point, TreeCursor}; // opaque pointers pub(super) enum SyntaxTreeData {} pub struct Tree { ptr: NonNull, } impl Tree { pub(super) unsafe fn from_raw(raw: NonNull) -> Tree { Tree { ptr: raw } } pub(super) fn as_raw(&self) -> NonNull { self.ptr } pub fn root_node(&self) -> Node<'_> { unsafe { Node::from_raw(ts_tree_root_node(self.ptr)).unwrap() } } pub fn edit(&mut self, edit: &InputEdit) { unsafe { ts_tree_edit(self.ptr, edit) } } pub fn walk(&self) -> TreeCursor<'_> { self.root_node().walk() } } impl fmt::Debug for Tree { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{{Tree {:?}}}", self.root_node()) } } impl Drop for Tree { fn drop(&mut self) { unsafe { ts_tree_delete(self.ptr) } } } impl Clone for Tree { fn clone(&self) -> Self { unsafe { Tree { ptr: ts_tree_copy(self.ptr), } } } } unsafe impl Send for Tree {} unsafe impl Sync for Tree {} #[repr(C)] #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub struct InputEdit { pub start_byte: u32, pub old_end_byte: u32, pub new_end_byte: u32, pub start_point: Point, pub old_end_point: Point, pub new_end_point: Point, } impl InputEdit { /// returns the offset between the old end of the edit and the new end of /// the edit. This offset needs to be added to every position that occurs /// after `self.old_end_byte` to may it to its old position /// /// This function assumes that the the source-file is smaller than 2GiB pub fn offset(&self) -> i32 { self.new_end_byte as i32 - self.old_end_byte as i32 } } extern "C" { /// Create a shallow copy of the syntax tree. This is very fast. You need to /// copy a syntax tree in order to use it on more than one thread at a time, /// as syntax trees are not thread safe. fn ts_tree_copy(self_: NonNull) -> NonNull; /// Delete the syntax tree, freeing all of the memory that it used. fn ts_tree_delete(self_: NonNull); /// Get the root node of the syntax tree. fn ts_tree_root_node<'tree>(self_: NonNull) -> NodeRaw; /// Edit the syntax tree to keep it in sync with source code that has been /// edited. /// /// You must describe the edit both in terms of byte offsets and in terms of /// row/column coordinates. fn ts_tree_edit(self_: NonNull, edit: &InputEdit); } hx-0.3.0+20250717/bindings/src/tree_cursor.rs000066400000000000000000000221201503625671400203300ustar00rootroot00000000000000use ::std::os::raw; use std::cell::Cell; use std::collections::VecDeque; use std::ffi::{c_char, CStr}; use std::marker::PhantomData; use std::{fmt, mem}; use crate::node::NodeRaw; use crate::{Node, Tree}; thread_local! { static CACHE: Cell> = const { Cell::new(None) }; } #[repr(C)] #[derive(Clone)] struct TreeCursorRaw { tree: *const raw::c_void, id: *const raw::c_void, context: [u32; 3usize], } #[repr(C)] struct TreeCursorGuard(TreeCursorRaw); impl Drop for TreeCursorGuard { fn drop(&mut self) { unsafe { ts_tree_cursor_delete(&mut self.0) } } } pub struct TreeCursor<'a> { inner: TreeCursorRaw, tree: PhantomData<&'a Tree>, } impl<'tree> TreeCursor<'tree> { pub(crate) fn new(node: &Node<'tree>) -> Self { Self { inner: match CACHE.take() { Some(guard) => unsafe { let mut cursor = guard.0.clone(); mem::forget(guard); ts_tree_cursor_reset(&mut cursor, node.as_raw()); cursor }, None => unsafe { ts_tree_cursor_new(node.as_raw()) }, }, tree: PhantomData, } } pub fn goto_parent(&mut self) -> bool { unsafe { ts_tree_cursor_goto_parent(&mut self.inner) } } pub fn goto_next_sibling(&mut self) -> bool { unsafe { ts_tree_cursor_goto_next_sibling(&mut self.inner) } } pub fn goto_previous_sibling(&mut self) -> bool { unsafe { ts_tree_cursor_goto_previous_sibling(&mut self.inner) } } pub fn goto_first_child(&mut self) -> bool { unsafe { ts_tree_cursor_goto_first_child(&mut self.inner) } } pub fn goto_last_child(&mut self) -> bool { unsafe { ts_tree_cursor_goto_last_child(&mut self.inner) } } pub fn goto_first_child_for_byte(&mut self, byte_idx: u32) -> Option { match unsafe { ts_tree_cursor_goto_first_child_for_byte(&mut self.inner, byte_idx) } { -1 => None, n => Some(n as u32), } } pub fn reset(&mut self, node: &Node<'tree>) { unsafe { ts_tree_cursor_reset(&mut self.inner, node.as_raw()) } } pub fn node(&self) -> Node<'tree> { unsafe { Node::from_raw(ts_tree_cursor_current_node(&self.inner)).unwrap_unchecked() } } pub fn field_name(&self) -> Option<&'tree str> { unsafe { let ptr = ts_tree_cursor_current_field_name(&self.inner); (!ptr.is_null()).then(|| CStr::from_ptr(ptr).to_str().unwrap()) } } } impl fmt::Debug for TreeCursorRaw { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("InactiveTreeCursor").finish_non_exhaustive() } } impl Drop for TreeCursor<'_> { fn drop(&mut self) { CACHE.set(Some(TreeCursorGuard(self.inner.clone()))) } } impl Clone for TreeCursor<'_> { fn clone(&self) -> Self { TreeCursor { inner: unsafe { ts_tree_cursor_copy(&self.inner) }, tree: PhantomData, } } } impl<'tree> IntoIterator for &'tree mut TreeCursor<'tree> { type Item = Node<'tree>; type IntoIter = TreeRecursiveWalker<'tree>; fn into_iter(self) -> Self::IntoIter { let mut queue = VecDeque::new(); let root = self.node(); queue.push_back(root.clone()); TreeRecursiveWalker { cursor: self, queue, root, } } } pub struct TreeRecursiveWalker<'tree> { cursor: &'tree mut TreeCursor<'tree>, queue: VecDeque>, root: Node<'tree>, } impl<'tree> Iterator for TreeRecursiveWalker<'tree> { type Item = Node<'tree>; fn next(&mut self) -> Option { let current = self.cursor.node(); if current != self.root && self.cursor.goto_next_sibling() { self.queue.push_back(current); return Some(self.cursor.node()); } while let Some(queued) = self.queue.pop_front() { self.cursor.reset(&queued); if !self.cursor.goto_first_child() { continue; } return Some(self.cursor.node()); } None } } extern "C" { /// Create a new tree cursor starting from the given node. /// /// A tree cursor allows you to walk a syntax tree more efficiently than is /// possible using the `TSNode` functions. It is a mutable object that is always /// on a certain syntax node, and can be moved imperatively to different nodes. /// /// Note that the given node is considered the root of the cursor, /// and the cursor cannot walk outside this node. fn ts_tree_cursor_new(node: NodeRaw) -> TreeCursorRaw; /// Delete a tree cursor, freeing all of the memory that it used. fn ts_tree_cursor_delete(self_: *mut TreeCursorRaw); /// Re-initialize a tree cursor to start at a different node. fn ts_tree_cursor_reset(self_: *mut TreeCursorRaw, node: NodeRaw); // /// Re-initialize a tree cursor to the same position as another cursor. // /// Unlike [`ts_tree_cursor_reset`], this will not lose parent information and // /// allows reusing already created cursors. // fn ts_tree_cursor_reset_to(dst: *mut TreeCursorRaw, src: *const TreeCursorRaw); /// Get the tree cursor's current node. fn ts_tree_cursor_current_node(self_: *const TreeCursorRaw) -> NodeRaw; // /// Get the field name of the tree cursor's current node. // /// This returns `NULL` if the current node doesn't have a field. // /// See also [`ts_node_child_by_field_name`]. // fn ts_tree_cursor_current_field_name(self_: *const TreeCursorRaw) -> *const raw::c_char; // /// Get the field id of the tree cursor's current node. // /// This returns zero if the current node doesn't have a field. // /// See also [`ts_node_child_by_field_id`], [`ts_language_field_id_for_name`]. // fn ts_tree_cursor_current_field_id(self_: *const TreeCursorRaw) -> TSFieldId; /// Move the cursor to the parent of its current node. /// This returns `true` if the cursor successfully moved, and returns `false` /// if there was no parent node (the cursor was already on the root node). fn ts_tree_cursor_goto_parent(self_: *mut TreeCursorRaw) -> bool; /// Move the cursor to the next sibling of its current node. /// This returns `true` if the cursor successfully moved, and returns `false` /// if there was no next sibling node. fn ts_tree_cursor_goto_next_sibling(self_: *mut TreeCursorRaw) -> bool; /// Move the cursor to the previous sibling of its current node. /// This returns `true` if the cursor successfully moved, and returns `false` if /// there was no previous sibling node. /// Note, that this function may be slower than /// [`ts_tree_cursor_goto_next_sibling`] due to how node positions are stored. In /// the worst case, this will need to iterate through all the children upto the /// previous sibling node to recalculate its position. fn ts_tree_cursor_goto_previous_sibling(self_: *mut TreeCursorRaw) -> bool; /// Move the cursor to the first child of its current node. /// This returns `true` if the cursor successfully moved, and returns `false` /// if there were no children. fn ts_tree_cursor_goto_first_child(self_: *mut TreeCursorRaw) -> bool; /// Move the cursor to the last child of its current node. /// This returns `true` if the cursor successfully moved, and returns `false` if /// there were no children. /// Note that this function may be slower than [`ts_tree_cursor_goto_first_child`] /// because it needs to iterate through all the children to compute the child's /// position. fn ts_tree_cursor_goto_last_child(self_: *mut TreeCursorRaw) -> bool; /* /// Move the cursor to the node that is the nth descendant of /// the original node that the cursor was constructed with, where /// zero represents the original node itself. fn ts_tree_cursor_goto_descendant(self_: *mut TreeCursorRaw, goal_descendant_index: u32); /// Get the index of the cursor's current node out of all of the /// descendants of the original node that the cursor was constructed with. fn ts_tree_cursor_current_descendant_index(self_: *const TreeCursorRaw) -> u32; /// Get the depth of the cursor's current node relative to the original /// node that the cursor was constructed with. fn ts_tree_cursor_current_depth(self_: *const TreeCursorRaw) -> u32; */ /// Move the cursor to the first child of its current node that extends beyond /// the given byte offset or point. /// This returns the index of the child node if one was found, and returns -1 /// if no such child was found. fn ts_tree_cursor_goto_first_child_for_byte(self_: *mut TreeCursorRaw, goal_byte: u32) -> i64; fn ts_tree_cursor_copy(cursor: *const TreeCursorRaw) -> TreeCursorRaw; /// Get the field name of the tree cursor's curren tnode. /// /// This returns `NULL` if the current node doesn't have a field. See also /// `ts_node_child_by_field_name`. fn ts_tree_cursor_current_field_name(cursor: *const TreeCursorRaw) -> *const c_char; } hx-0.3.0+20250717/bindings/vendor.sh000077500000000000000000000004031503625671400164730ustar00rootroot00000000000000#!/usr/bin/env bash REMOTE=https://github.com/tree-sitter/tree-sitter.git BRANCH=v0.25.8 rm -rf vendor rm -rf tmp git clone --depth 1 --branch $BRANCH $REMOTE tmp mkdir vendor mv tmp/lib/src vendor mv tmp/lib/include vendor mv tmp/LICENSE vendor rm -rf tmp hx-0.3.0+20250717/bindings/vendor/000077500000000000000000000000001503625671400161375ustar00rootroot00000000000000hx-0.3.0+20250717/bindings/vendor/LICENSE000066400000000000000000000020751503625671400171500ustar00rootroot00000000000000The MIT License (MIT) Copyright (c) 2018-2024 Max Brunsfeld Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. hx-0.3.0+20250717/bindings/vendor/include/000077500000000000000000000000001503625671400175625ustar00rootroot00000000000000hx-0.3.0+20250717/bindings/vendor/include/tree_sitter/000077500000000000000000000000001503625671400221135ustar00rootroot00000000000000hx-0.3.0+20250717/bindings/vendor/include/tree_sitter/api.h000066400000000000000000001345341503625671400230470ustar00rootroot00000000000000#ifndef TREE_SITTER_API_H_ #define TREE_SITTER_API_H_ #ifndef TREE_SITTER_HIDE_SYMBOLS #if defined(__GNUC__) || defined(__clang__) #pragma GCC visibility push(default) #endif #endif #include #include #include #ifdef __cplusplus extern "C" { #endif /****************************/ /* Section - ABI Versioning */ /****************************/ /** * The latest ABI version that is supported by the current version of the * library. When Languages are generated by the Tree-sitter CLI, they are * assigned an ABI version number that corresponds to the current CLI version. * The Tree-sitter library is generally backwards-compatible with languages * generated using older CLI versions, but is not forwards-compatible. */ #define TREE_SITTER_LANGUAGE_VERSION 15 /** * The earliest ABI version that is supported by the current version of the * library. */ #define TREE_SITTER_MIN_COMPATIBLE_LANGUAGE_VERSION 13 /*******************/ /* Section - Types */ /*******************/ typedef uint16_t TSStateId; typedef uint16_t TSSymbol; typedef uint16_t TSFieldId; typedef struct TSLanguage TSLanguage; typedef struct TSParser TSParser; typedef struct TSTree TSTree; typedef struct TSQuery TSQuery; typedef struct TSQueryCursor TSQueryCursor; typedef struct TSLookaheadIterator TSLookaheadIterator; // This function signature reads one code point from the given string, // returning the number of bytes consumed. It should write the code point // to the `code_point` pointer, or write -1 if the input is invalid. typedef uint32_t (*DecodeFunction)( const uint8_t *string, uint32_t length, int32_t *code_point ); typedef enum TSInputEncoding { TSInputEncodingUTF8, TSInputEncodingUTF16LE, TSInputEncodingUTF16BE, TSInputEncodingCustom } TSInputEncoding; typedef enum TSSymbolType { TSSymbolTypeRegular, TSSymbolTypeAnonymous, TSSymbolTypeSupertype, TSSymbolTypeAuxiliary, } TSSymbolType; typedef struct TSPoint { uint32_t row; uint32_t column; } TSPoint; typedef struct TSRange { TSPoint start_point; TSPoint end_point; uint32_t start_byte; uint32_t end_byte; } TSRange; typedef struct TSInput { void *payload; const char *(*read)(void *payload, uint32_t byte_index, TSPoint position, uint32_t *bytes_read); TSInputEncoding encoding; DecodeFunction decode; } TSInput; typedef struct TSParseState { void *payload; uint32_t current_byte_offset; bool has_error; } TSParseState; typedef struct TSParseOptions { void *payload; bool (*progress_callback)(TSParseState *state); } TSParseOptions; typedef enum TSLogType { TSLogTypeParse, TSLogTypeLex, } TSLogType; typedef struct TSLogger { void *payload; void (*log)(void *payload, TSLogType log_type, const char *buffer); } TSLogger; typedef struct TSInputEdit { uint32_t start_byte; uint32_t old_end_byte; uint32_t new_end_byte; TSPoint start_point; TSPoint old_end_point; TSPoint new_end_point; } TSInputEdit; typedef struct TSNode { uint32_t context[4]; const void *id; const TSTree *tree; } TSNode; typedef struct TSTreeCursor { const void *tree; const void *id; uint32_t context[3]; } TSTreeCursor; typedef struct TSQueryCapture { TSNode node; uint32_t index; } TSQueryCapture; typedef enum TSQuantifier { TSQuantifierZero = 0, // must match the array initialization value TSQuantifierZeroOrOne, TSQuantifierZeroOrMore, TSQuantifierOne, TSQuantifierOneOrMore, } TSQuantifier; typedef struct TSQueryMatch { uint32_t id; uint16_t pattern_index; uint16_t capture_count; const TSQueryCapture *captures; } TSQueryMatch; typedef enum TSQueryPredicateStepType { TSQueryPredicateStepTypeDone, TSQueryPredicateStepTypeCapture, TSQueryPredicateStepTypeString, } TSQueryPredicateStepType; typedef struct TSQueryPredicateStep { TSQueryPredicateStepType type; uint32_t value_id; } TSQueryPredicateStep; typedef enum TSQueryError { TSQueryErrorNone = 0, TSQueryErrorSyntax, TSQueryErrorNodeType, TSQueryErrorField, TSQueryErrorCapture, TSQueryErrorStructure, TSQueryErrorLanguage, } TSQueryError; typedef struct TSQueryCursorState { void *payload; uint32_t current_byte_offset; } TSQueryCursorState; typedef struct TSQueryCursorOptions { void *payload; bool (*progress_callback)(TSQueryCursorState *state); } TSQueryCursorOptions; /** * The metadata associated with a language. * * Currently, this metadata can be used to check the [Semantic Version](https://semver.org/) * of the language. This version information should be used to signal if a given parser might * be incompatible with existing queries when upgrading between major versions, or minor versions * if it's in zerover. */ typedef struct TSLanguageMetadata { uint8_t major_version; uint8_t minor_version; uint8_t patch_version; } TSLanguageMetadata; /********************/ /* Section - Parser */ /********************/ /** * Create a new parser. */ TSParser *ts_parser_new(void); /** * Delete the parser, freeing all of the memory that it used. */ void ts_parser_delete(TSParser *self); /** * Get the parser's current language. */ const TSLanguage *ts_parser_language(const TSParser *self); /** * Set the language that the parser should use for parsing. * * Returns a boolean indicating whether or not the language was successfully * assigned. True means assignment succeeded. False means there was a version * mismatch: the language was generated with an incompatible version of the * Tree-sitter CLI. Check the language's ABI version using [`ts_language_abi_version`] * and compare it to this library's [`TREE_SITTER_LANGUAGE_VERSION`] and * [`TREE_SITTER_MIN_COMPATIBLE_LANGUAGE_VERSION`] constants. */ bool ts_parser_set_language(TSParser *self, const TSLanguage *language); /** * Set the ranges of text that the parser should include when parsing. * * By default, the parser will always include entire documents. This function * allows you to parse only a *portion* of a document but still return a syntax * tree whose ranges match up with the document as a whole. You can also pass * multiple disjoint ranges. * * The second and third parameters specify the location and length of an array * of ranges. The parser does *not* take ownership of these ranges; it copies * the data, so it doesn't matter how these ranges are allocated. * * If `count` is zero, then the entire document will be parsed. Otherwise, * the given ranges must be ordered from earliest to latest in the document, * and they must not overlap. That is, the following must hold for all: * * `i < count - 1`: `ranges[i].end_byte <= ranges[i + 1].start_byte` * * If this requirement is not satisfied, the operation will fail, the ranges * will not be assigned, and this function will return `false`. On success, * this function returns `true` */ bool ts_parser_set_included_ranges( TSParser *self, const TSRange *ranges, uint32_t count ); /** * Get the ranges of text that the parser will include when parsing. * * The returned pointer is owned by the parser. The caller should not free it * or write to it. The length of the array will be written to the given * `count` pointer. */ const TSRange *ts_parser_included_ranges( const TSParser *self, uint32_t *count ); /** * Use the parser to parse some source code and create a syntax tree. * * If you are parsing this document for the first time, pass `NULL` for the * `old_tree` parameter. Otherwise, if you have already parsed an earlier * version of this document and the document has since been edited, pass the * previous syntax tree so that the unchanged parts of it can be reused. * This will save time and memory. For this to work correctly, you must have * already edited the old syntax tree using the [`ts_tree_edit`] function in a * way that exactly matches the source code changes. * * The [`TSInput`] parameter lets you specify how to read the text. It has the * following three fields: * 1. [`read`]: A function to retrieve a chunk of text at a given byte offset * and (row, column) position. The function should return a pointer to the * text and write its length to the [`bytes_read`] pointer. The parser does * not take ownership of this buffer; it just borrows it until it has * finished reading it. The function should write a zero value to the * [`bytes_read`] pointer to indicate the end of the document. * 2. [`payload`]: An arbitrary pointer that will be passed to each invocation * of the [`read`] function. * 3. [`encoding`]: An indication of how the text is encoded. Either * `TSInputEncodingUTF8` or `TSInputEncodingUTF16`. * * This function returns a syntax tree on success, and `NULL` on failure. There * are four possible reasons for failure: * 1. The parser does not have a language assigned. Check for this using the [`ts_parser_language`] function. * 2. Parsing was cancelled due to a timeout that was set by an earlier call to * the [`ts_parser_set_timeout_micros`] function. You can resume parsing from * where the parser left out by calling [`ts_parser_parse`] again with the * same arguments. Or you can start parsing from scratch by first calling * [`ts_parser_reset`]. * 3. Parsing was cancelled using a cancellation flag that was set by an * earlier call to [`ts_parser_set_cancellation_flag`]. You can resume parsing * from where the parser left out by calling [`ts_parser_parse`] again with * the same arguments. * 4. Parsing was cancelled due to the progress callback returning true. This callback * is passed in [`ts_parser_parse_with_options`] inside the [`TSParseOptions`] struct. * * [`read`]: TSInput::read * [`payload`]: TSInput::payload * [`encoding`]: TSInput::encoding * [`bytes_read`]: TSInput::read */ TSTree *ts_parser_parse( TSParser *self, const TSTree *old_tree, TSInput input ); /** * Use the parser to parse some source code and create a syntax tree, with some options. * * See [`ts_parser_parse`] for more details. * * See [`TSParseOptions`] for more details on the options. */ TSTree* ts_parser_parse_with_options( TSParser *self, const TSTree *old_tree, TSInput input, TSParseOptions parse_options ); /** * Use the parser to parse some source code stored in one contiguous buffer. * The first two parameters are the same as in the [`ts_parser_parse`] function * above. The second two parameters indicate the location of the buffer and its * length in bytes. */ TSTree *ts_parser_parse_string( TSParser *self, const TSTree *old_tree, const char *string, uint32_t length ); /** * Use the parser to parse some source code stored in one contiguous buffer with * a given encoding. The first four parameters work the same as in the * [`ts_parser_parse_string`] method above. The final parameter indicates whether * the text is encoded as UTF8 or UTF16. */ TSTree *ts_parser_parse_string_encoding( TSParser *self, const TSTree *old_tree, const char *string, uint32_t length, TSInputEncoding encoding ); /** * Instruct the parser to start the next parse from the beginning. * * If the parser previously failed because of a timeout or a cancellation, then * by default, it will resume where it left off on the next call to * [`ts_parser_parse`] or other parsing functions. If you don't want to resume, * and instead intend to use this parser to parse some other document, you must * call [`ts_parser_reset`] first. */ void ts_parser_reset(TSParser *self); /** * @deprecated use [`ts_parser_parse_with_options`] and pass in a callback instead, this will be removed in 0.26. * * Set the maximum duration in microseconds that parsing should be allowed to * take before halting. * * If parsing takes longer than this, it will halt early, returning NULL. * See [`ts_parser_parse`] for more information. */ void ts_parser_set_timeout_micros(TSParser *self, uint64_t timeout_micros); /** * @deprecated use [`ts_parser_parse_with_options`] and pass in a callback instead, this will be removed in 0.26. * * Get the duration in microseconds that parsing is allowed to take. */ uint64_t ts_parser_timeout_micros(const TSParser *self); /** * @deprecated use [`ts_parser_parse_with_options`] and pass in a callback instead, this will be removed in 0.26. * * Set the parser's current cancellation flag pointer. * * If a non-null pointer is assigned, then the parser will periodically read * from this pointer during parsing. If it reads a non-zero value, it will * halt early, returning NULL. See [`ts_parser_parse`] for more information. */ void ts_parser_set_cancellation_flag(TSParser *self, const size_t *flag); /** * @deprecated use [`ts_parser_parse_with_options`] and pass in a callback instead, this will be removed in 0.26. * * Get the parser's current cancellation flag pointer. */ const size_t *ts_parser_cancellation_flag(const TSParser *self); /** * Set the logger that a parser should use during parsing. * * The parser does not take ownership over the logger payload. If a logger was * previously assigned, the caller is responsible for releasing any memory * owned by the previous logger. */ void ts_parser_set_logger(TSParser *self, TSLogger logger); /** * Get the parser's current logger. */ TSLogger ts_parser_logger(const TSParser *self); /** * Set the file descriptor to which the parser should write debugging graphs * during parsing. The graphs are formatted in the DOT language. You may want * to pipe these graphs directly to a `dot(1)` process in order to generate * SVG output. You can turn off this logging by passing a negative number. */ void ts_parser_print_dot_graphs(TSParser *self, int fd); /******************/ /* Section - Tree */ /******************/ /** * Create a shallow copy of the syntax tree. This is very fast. * * You need to copy a syntax tree in order to use it on more than one thread at * a time, as syntax trees are not thread safe. */ TSTree *ts_tree_copy(const TSTree *self); /** * Delete the syntax tree, freeing all of the memory that it used. */ void ts_tree_delete(TSTree *self); /** * Get the root node of the syntax tree. */ TSNode ts_tree_root_node(const TSTree *self); /** * Get the root node of the syntax tree, but with its position * shifted forward by the given offset. */ TSNode ts_tree_root_node_with_offset( const TSTree *self, uint32_t offset_bytes, TSPoint offset_extent ); /** * Get the language that was used to parse the syntax tree. */ const TSLanguage *ts_tree_language(const TSTree *self); /** * Get the array of included ranges that was used to parse the syntax tree. * * The returned pointer must be freed by the caller. */ TSRange *ts_tree_included_ranges(const TSTree *self, uint32_t *length); /** * Edit the syntax tree to keep it in sync with source code that has been * edited. * * You must describe the edit both in terms of byte offsets and in terms of * (row, column) coordinates. */ void ts_tree_edit(TSTree *self, const TSInputEdit *edit); /** * Compare an old edited syntax tree to a new syntax tree representing the same * document, returning an array of ranges whose syntactic structure has changed. * * For this to work correctly, the old syntax tree must have been edited such * that its ranges match up to the new tree. Generally, you'll want to call * this function right after calling one of the [`ts_parser_parse`] functions. * You need to pass the old tree that was passed to parse, as well as the new * tree that was returned from that function. * * The returned ranges indicate areas where the hierarchical structure of syntax * nodes (from root to leaf) has changed between the old and new trees. Characters * outside these ranges have identical ancestor nodes in both trees. * * Note that the returned ranges may be slightly larger than the exact changed areas, * but Tree-sitter attempts to make them as small as possible. * * The returned array is allocated using `malloc` and the caller is responsible * for freeing it using `free`. The length of the array will be written to the * given `length` pointer. */ TSRange *ts_tree_get_changed_ranges( const TSTree *old_tree, const TSTree *new_tree, uint32_t *length ); /** * Write a DOT graph describing the syntax tree to the given file. */ void ts_tree_print_dot_graph(const TSTree *self, int file_descriptor); /******************/ /* Section - Node */ /******************/ /** * Get the node's type as a null-terminated string. */ const char *ts_node_type(TSNode self); /** * Get the node's type as a numerical id. */ TSSymbol ts_node_symbol(TSNode self); /** * Get the node's language. */ const TSLanguage *ts_node_language(TSNode self); /** * Get the node's type as it appears in the grammar ignoring aliases as a * null-terminated string. */ const char *ts_node_grammar_type(TSNode self); /** * Get the node's type as a numerical id as it appears in the grammar ignoring * aliases. This should be used in [`ts_language_next_state`] instead of * [`ts_node_symbol`]. */ TSSymbol ts_node_grammar_symbol(TSNode self); /** * Get the node's start byte. */ uint32_t ts_node_start_byte(TSNode self); /** * Get the node's start position in terms of rows and columns. */ TSPoint ts_node_start_point(TSNode self); /** * Get the node's end byte. */ uint32_t ts_node_end_byte(TSNode self); /** * Get the node's end position in terms of rows and columns. */ TSPoint ts_node_end_point(TSNode self); /** * Get an S-expression representing the node as a string. * * This string is allocated with `malloc` and the caller is responsible for * freeing it using `free`. */ char *ts_node_string(TSNode self); /** * Check if the node is null. Functions like [`ts_node_child`] and * [`ts_node_next_sibling`] will return a null node to indicate that no such node * was found. */ bool ts_node_is_null(TSNode self); /** * Check if the node is *named*. Named nodes correspond to named rules in the * grammar, whereas *anonymous* nodes correspond to string literals in the * grammar. */ bool ts_node_is_named(TSNode self); /** * Check if the node is *missing*. Missing nodes are inserted by the parser in * order to recover from certain kinds of syntax errors. */ bool ts_node_is_missing(TSNode self); /** * Check if the node is *extra*. Extra nodes represent things like comments, * which are not required the grammar, but can appear anywhere. */ bool ts_node_is_extra(TSNode self); /** * Check if a syntax node has been edited. */ bool ts_node_has_changes(TSNode self); /** * Check if the node is a syntax error or contains any syntax errors. */ bool ts_node_has_error(TSNode self); /** * Check if the node is a syntax error. */ bool ts_node_is_error(TSNode self); /** * Get this node's parse state. */ TSStateId ts_node_parse_state(TSNode self); /** * Get the parse state after this node. */ TSStateId ts_node_next_parse_state(TSNode self); /** * Get the node's immediate parent. * Prefer [`ts_node_child_with_descendant`] for * iterating over the node's ancestors. */ TSNode ts_node_parent(TSNode self); /** * Get the node that contains `descendant`. * * Note that this can return `descendant` itself. */ TSNode ts_node_child_with_descendant(TSNode self, TSNode descendant); /** * Get the node's child at the given index, where zero represents the first * child. */ TSNode ts_node_child(TSNode self, uint32_t child_index); /** * Get the field name for node's child at the given index, where zero represents * the first child. Returns NULL, if no field is found. */ const char *ts_node_field_name_for_child(TSNode self, uint32_t child_index); /** * Get the field name for node's named child at the given index, where zero * represents the first named child. Returns NULL, if no field is found. */ const char *ts_node_field_name_for_named_child(TSNode self, uint32_t named_child_index); /** * Get the node's number of children. */ uint32_t ts_node_child_count(TSNode self); /** * Get the node's *named* child at the given index. * * See also [`ts_node_is_named`]. */ TSNode ts_node_named_child(TSNode self, uint32_t child_index); /** * Get the node's number of *named* children. * * See also [`ts_node_is_named`]. */ uint32_t ts_node_named_child_count(TSNode self); /** * Get the node's child with the given field name. */ TSNode ts_node_child_by_field_name( TSNode self, const char *name, uint32_t name_length ); /** * Get the node's child with the given numerical field id. * * You can convert a field name to an id using the * [`ts_language_field_id_for_name`] function. */ TSNode ts_node_child_by_field_id(TSNode self, TSFieldId field_id); /** * Get the node's next / previous sibling. */ TSNode ts_node_next_sibling(TSNode self); TSNode ts_node_prev_sibling(TSNode self); /** * Get the node's next / previous *named* sibling. */ TSNode ts_node_next_named_sibling(TSNode self); TSNode ts_node_prev_named_sibling(TSNode self); /** * Get the node's first child that contains or starts after the given byte offset. */ TSNode ts_node_first_child_for_byte(TSNode self, uint32_t byte); /** * Get the node's first named child that contains or starts after the given byte offset. */ TSNode ts_node_first_named_child_for_byte(TSNode self, uint32_t byte); /** * Get the node's number of descendants, including one for the node itself. */ uint32_t ts_node_descendant_count(TSNode self); /** * Get the smallest node within this node that spans the given range of bytes * or (row, column) positions. */ TSNode ts_node_descendant_for_byte_range(TSNode self, uint32_t start, uint32_t end); TSNode ts_node_descendant_for_point_range(TSNode self, TSPoint start, TSPoint end); /** * Get the smallest named node within this node that spans the given range of * bytes or (row, column) positions. */ TSNode ts_node_named_descendant_for_byte_range(TSNode self, uint32_t start, uint32_t end); TSNode ts_node_named_descendant_for_point_range(TSNode self, TSPoint start, TSPoint end); /** * Edit the node to keep it in-sync with source code that has been edited. * * This function is only rarely needed. When you edit a syntax tree with the * [`ts_tree_edit`] function, all of the nodes that you retrieve from the tree * afterward will already reflect the edit. You only need to use [`ts_node_edit`] * when you have a [`TSNode`] instance that you want to keep and continue to use * after an edit. */ void ts_node_edit(TSNode *self, const TSInputEdit *edit); /** * Check if two nodes are identical. */ bool ts_node_eq(TSNode self, TSNode other); /************************/ /* Section - TreeCursor */ /************************/ /** * Create a new tree cursor starting from the given node. * * A tree cursor allows you to walk a syntax tree more efficiently than is * possible using the [`TSNode`] functions. It is a mutable object that is always * on a certain syntax node, and can be moved imperatively to different nodes. * * Note that the given node is considered the root of the cursor, * and the cursor cannot walk outside this node. */ TSTreeCursor ts_tree_cursor_new(TSNode node); /** * Delete a tree cursor, freeing all of the memory that it used. */ void ts_tree_cursor_delete(TSTreeCursor *self); /** * Re-initialize a tree cursor to start at the original node that the cursor was * constructed with. */ void ts_tree_cursor_reset(TSTreeCursor *self, TSNode node); /** * Re-initialize a tree cursor to the same position as another cursor. * * Unlike [`ts_tree_cursor_reset`], this will not lose parent information and * allows reusing already created cursors. */ void ts_tree_cursor_reset_to(TSTreeCursor *dst, const TSTreeCursor *src); /** * Get the tree cursor's current node. */ TSNode ts_tree_cursor_current_node(const TSTreeCursor *self); /** * Get the field name of the tree cursor's current node. * * This returns `NULL` if the current node doesn't have a field. * See also [`ts_node_child_by_field_name`]. */ const char *ts_tree_cursor_current_field_name(const TSTreeCursor *self); /** * Get the field id of the tree cursor's current node. * * This returns zero if the current node doesn't have a field. * See also [`ts_node_child_by_field_id`], [`ts_language_field_id_for_name`]. */ TSFieldId ts_tree_cursor_current_field_id(const TSTreeCursor *self); /** * Move the cursor to the parent of its current node. * * This returns `true` if the cursor successfully moved, and returns `false` * if there was no parent node (the cursor was already on the root node). * * Note that the node the cursor was constructed with is considered the root * of the cursor, and the cursor cannot walk outside this node. */ bool ts_tree_cursor_goto_parent(TSTreeCursor *self); /** * Move the cursor to the next sibling of its current node. * * This returns `true` if the cursor successfully moved, and returns `false` * if there was no next sibling node. * * Note that the node the cursor was constructed with is considered the root * of the cursor, and the cursor cannot walk outside this node. */ bool ts_tree_cursor_goto_next_sibling(TSTreeCursor *self); /** * Move the cursor to the previous sibling of its current node. * * This returns `true` if the cursor successfully moved, and returns `false` if * there was no previous sibling node. * * Note, that this function may be slower than * [`ts_tree_cursor_goto_next_sibling`] due to how node positions are stored. In * the worst case, this will need to iterate through all the children up to the * previous sibling node to recalculate its position. Also note that the node the cursor * was constructed with is considered the root of the cursor, and the cursor cannot * walk outside this node. */ bool ts_tree_cursor_goto_previous_sibling(TSTreeCursor *self); /** * Move the cursor to the first child of its current node. * * This returns `true` if the cursor successfully moved, and returns `false` * if there were no children. */ bool ts_tree_cursor_goto_first_child(TSTreeCursor *self); /** * Move the cursor to the last child of its current node. * * This returns `true` if the cursor successfully moved, and returns `false` if * there were no children. * * Note that this function may be slower than [`ts_tree_cursor_goto_first_child`] * because it needs to iterate through all the children to compute the child's * position. */ bool ts_tree_cursor_goto_last_child(TSTreeCursor *self); /** * Move the cursor to the node that is the nth descendant of * the original node that the cursor was constructed with, where * zero represents the original node itself. */ void ts_tree_cursor_goto_descendant(TSTreeCursor *self, uint32_t goal_descendant_index); /** * Get the index of the cursor's current node out of all of the * descendants of the original node that the cursor was constructed with. */ uint32_t ts_tree_cursor_current_descendant_index(const TSTreeCursor *self); /** * Get the depth of the cursor's current node relative to the original * node that the cursor was constructed with. */ uint32_t ts_tree_cursor_current_depth(const TSTreeCursor *self); /** * Move the cursor to the first child of its current node that contains or starts after * the given byte offset or point. * * This returns the index of the child node if one was found, and returns -1 * if no such child was found. */ int64_t ts_tree_cursor_goto_first_child_for_byte(TSTreeCursor *self, uint32_t goal_byte); int64_t ts_tree_cursor_goto_first_child_for_point(TSTreeCursor *self, TSPoint goal_point); TSTreeCursor ts_tree_cursor_copy(const TSTreeCursor *cursor); /*******************/ /* Section - Query */ /*******************/ /** * Create a new query from a string containing one or more S-expression * patterns. The query is associated with a particular language, and can * only be run on syntax nodes parsed with that language. * * If all of the given patterns are valid, this returns a [`TSQuery`]. * If a pattern is invalid, this returns `NULL`, and provides two pieces * of information about the problem: * 1. The byte offset of the error is written to the `error_offset` parameter. * 2. The type of error is written to the `error_type` parameter. */ TSQuery *ts_query_new( const TSLanguage *language, const char *source, uint32_t source_len, uint32_t *error_offset, TSQueryError *error_type ); /** * Delete a query, freeing all of the memory that it used. */ void ts_query_delete(TSQuery *self); /** * Get the number of patterns, captures, or string literals in the query. */ uint32_t ts_query_pattern_count(const TSQuery *self); uint32_t ts_query_capture_count(const TSQuery *self); uint32_t ts_query_string_count(const TSQuery *self); /** * Get the byte offset where the given pattern starts in the query's source. * * This can be useful when combining queries by concatenating their source * code strings. */ uint32_t ts_query_start_byte_for_pattern(const TSQuery *self, uint32_t pattern_index); /** * Get the byte offset where the given pattern ends in the query's source. * * This can be useful when combining queries by concatenating their source * code strings. */ uint32_t ts_query_end_byte_for_pattern(const TSQuery *self, uint32_t pattern_index); /** * Get all of the predicates for the given pattern in the query. * * The predicates are represented as a single array of steps. There are three * types of steps in this array, which correspond to the three legal values for * the `type` field: * - `TSQueryPredicateStepTypeCapture` - Steps with this type represent names * of captures. Their `value_id` can be used with the * [`ts_query_capture_name_for_id`] function to obtain the name of the capture. * - `TSQueryPredicateStepTypeString` - Steps with this type represent literal * strings. Their `value_id` can be used with the * [`ts_query_string_value_for_id`] function to obtain their string value. * - `TSQueryPredicateStepTypeDone` - Steps with this type are *sentinels* * that represent the end of an individual predicate. If a pattern has two * predicates, then there will be two steps with this `type` in the array. */ const TSQueryPredicateStep *ts_query_predicates_for_pattern( const TSQuery *self, uint32_t pattern_index, uint32_t *step_count ); /* * Check if the given pattern in the query has a single root node. */ bool ts_query_is_pattern_rooted(const TSQuery *self, uint32_t pattern_index); /* * Check if the given pattern in the query is 'non local'. * * A non-local pattern has multiple root nodes and can match within a * repeating sequence of nodes, as specified by the grammar. Non-local * patterns disable certain optimizations that would otherwise be possible * when executing a query on a specific range of a syntax tree. */ bool ts_query_is_pattern_non_local(const TSQuery *self, uint32_t pattern_index); /* * Check if a given pattern is guaranteed to match once a given step is reached. * The step is specified by its byte offset in the query's source code. */ bool ts_query_is_pattern_guaranteed_at_step(const TSQuery *self, uint32_t byte_offset); /** * Get the name and length of one of the query's captures, or one of the * query's string literals. Each capture and string is associated with a * numeric id based on the order that it appeared in the query's source. */ const char *ts_query_capture_name_for_id( const TSQuery *self, uint32_t index, uint32_t *length ); /** * Get the quantifier of the query's captures. Each capture is * associated * with a numeric id based on the order that it appeared in the query's source. */ TSQuantifier ts_query_capture_quantifier_for_id( const TSQuery *self, uint32_t pattern_index, uint32_t capture_index ); const char *ts_query_string_value_for_id( const TSQuery *self, uint32_t index, uint32_t *length ); /** * Disable a certain capture within a query. * * This prevents the capture from being returned in matches, and also avoids * any resource usage associated with recording the capture. Currently, there * is no way to undo this. */ void ts_query_disable_capture(TSQuery *self, const char *name, uint32_t length); /** * Disable a certain pattern within a query. * * This prevents the pattern from matching and removes most of the overhead * associated with the pattern. Currently, there is no way to undo this. */ void ts_query_disable_pattern(TSQuery *self, uint32_t pattern_index); /** * Create a new cursor for executing a given query. * * The cursor stores the state that is needed to iteratively search * for matches. To use the query cursor, first call [`ts_query_cursor_exec`] * to start running a given query on a given syntax node. Then, there are * two options for consuming the results of the query: * 1. Repeatedly call [`ts_query_cursor_next_match`] to iterate over all of the * *matches* in the order that they were found. Each match contains the * index of the pattern that matched, and an array of captures. Because * multiple patterns can match the same set of nodes, one match may contain * captures that appear *before* some of the captures from a previous match. * 2. Repeatedly call [`ts_query_cursor_next_capture`] to iterate over all of the * individual *captures* in the order that they appear. This is useful if * don't care about which pattern matched, and just want a single ordered * sequence of captures. * * If you don't care about consuming all of the results, you can stop calling * [`ts_query_cursor_next_match`] or [`ts_query_cursor_next_capture`] at any point. * You can then start executing another query on another node by calling * [`ts_query_cursor_exec`] again. */ TSQueryCursor *ts_query_cursor_new(void); /** * Delete a query cursor, freeing all of the memory that it used. */ void ts_query_cursor_delete(TSQueryCursor *self); /** * Start running a given query on a given node. */ void ts_query_cursor_exec(TSQueryCursor *self, const TSQuery *query, TSNode node); /** * Start running a given query on a given node, with some options. */ void ts_query_cursor_exec_with_options( TSQueryCursor *self, const TSQuery *query, TSNode node, const TSQueryCursorOptions *query_options ); /** * Manage the maximum number of in-progress matches allowed by this query * cursor. * * Query cursors have an optional maximum capacity for storing lists of * in-progress captures. If this capacity is exceeded, then the * earliest-starting match will silently be dropped to make room for further * matches. This maximum capacity is optional — by default, query cursors allow * any number of pending matches, dynamically allocating new space for them as * needed as the query is executed. */ bool ts_query_cursor_did_exceed_match_limit(const TSQueryCursor *self); uint32_t ts_query_cursor_match_limit(const TSQueryCursor *self); void ts_query_cursor_set_match_limit(TSQueryCursor *self, uint32_t limit); /** * @deprecated use [`ts_query_cursor_exec_with_options`] and pass in a callback instead, this will be removed in 0.26. * * Set the maximum duration in microseconds that query execution should be allowed to * take before halting. * * If query execution takes longer than this, it will halt early, returning NULL. * See [`ts_query_cursor_next_match`] or [`ts_query_cursor_next_capture`] for more information. */ void ts_query_cursor_set_timeout_micros(TSQueryCursor *self, uint64_t timeout_micros); /** * @deprecated use [`ts_query_cursor_exec_with_options`] and pass in a callback instead, this will be removed in 0.26. * * Get the duration in microseconds that query execution is allowed to take. * * This is set via [`ts_query_cursor_set_timeout_micros`]. */ uint64_t ts_query_cursor_timeout_micros(const TSQueryCursor *self); /** * Set the range of bytes in which the query will be executed. * * The query cursor will return matches that intersect with the given point range. * This means that a match may be returned even if some of its captures fall * outside the specified range, as long as at least part of the match * overlaps with the range. * * For example, if a query pattern matches a node that spans a larger area * than the specified range, but part of that node intersects with the range, * the entire match will be returned. * * This will return `false` if the start byte is greater than the end byte, otherwise * it will return `true`. */ bool ts_query_cursor_set_byte_range(TSQueryCursor *self, uint32_t start_byte, uint32_t end_byte); /** * Set the range of (row, column) positions in which the query will be executed. * * The query cursor will return matches that intersect with the given point range. * This means that a match may be returned even if some of its captures fall * outside the specified range, as long as at least part of the match * overlaps with the range. * * For example, if a query pattern matches a node that spans a larger area * than the specified range, but part of that node intersects with the range, * the entire match will be returned. * * This will return `false` if the start point is greater than the end point, otherwise * it will return `true`. */ bool ts_query_cursor_set_point_range(TSQueryCursor *self, TSPoint start_point, TSPoint end_point); /** * Advance to the next match of the currently running query. * * If there is a match, write it to `*match` and return `true`. * Otherwise, return `false`. */ bool ts_query_cursor_next_match(TSQueryCursor *self, TSQueryMatch *match); void ts_query_cursor_remove_match(TSQueryCursor *self, uint32_t match_id); /** * Advance to the next capture of the currently running query. * * If there is a capture, write its match to `*match` and its index within * the match's capture list to `*capture_index`. Otherwise, return `false`. */ bool ts_query_cursor_next_capture( TSQueryCursor *self, TSQueryMatch *match, uint32_t *capture_index ); /** * Set the maximum start depth for a query cursor. * * This prevents cursors from exploring children nodes at a certain depth. * Note if a pattern includes many children, then they will still be checked. * * The zero max start depth value can be used as a special behavior and * it helps to destructure a subtree by staying on a node and using captures * for interested parts. Note that the zero max start depth only limit a search * depth for a pattern's root node but other nodes that are parts of the pattern * may be searched at any depth what defined by the pattern structure. * * Set to `UINT32_MAX` to remove the maximum start depth. */ void ts_query_cursor_set_max_start_depth(TSQueryCursor *self, uint32_t max_start_depth); /**********************/ /* Section - Language */ /**********************/ /** * Get another reference to the given language. */ const TSLanguage *ts_language_copy(const TSLanguage *self); /** * Free any dynamically-allocated resources for this language, if * this is the last reference. */ void ts_language_delete(const TSLanguage *self); /** * Get the number of distinct node types in the language. */ uint32_t ts_language_symbol_count(const TSLanguage *self); /** * Get the number of valid states in this language. */ uint32_t ts_language_state_count(const TSLanguage *self); /** * Get the numerical id for the given node type string. */ TSSymbol ts_language_symbol_for_name( const TSLanguage *self, const char *string, uint32_t length, bool is_named ); /** * Get the number of distinct field names in the language. */ uint32_t ts_language_field_count(const TSLanguage *self); /** * Get the field name string for the given numerical id. */ const char *ts_language_field_name_for_id(const TSLanguage *self, TSFieldId id); /** * Get the numerical id for the given field name string. */ TSFieldId ts_language_field_id_for_name(const TSLanguage *self, const char *name, uint32_t name_length); /** * Get a list of all supertype symbols for the language. */ const TSSymbol *ts_language_supertypes(const TSLanguage *self, uint32_t *length); /** * Get a list of all subtype symbol ids for a given supertype symbol. * * See [`ts_language_supertypes`] for fetching all supertype symbols. */ const TSSymbol *ts_language_subtypes( const TSLanguage *self, TSSymbol supertype, uint32_t *length ); /** * Get a node type string for the given numerical id. */ const char *ts_language_symbol_name(const TSLanguage *self, TSSymbol symbol); /** * Check whether the given node type id belongs to named nodes, anonymous nodes, * or a hidden nodes. * * See also [`ts_node_is_named`]. Hidden nodes are never returned from the API. */ TSSymbolType ts_language_symbol_type(const TSLanguage *self, TSSymbol symbol); /** * @deprecated use [`ts_language_abi_version`] instead, this will be removed in 0.26. * * Get the ABI version number for this language. This version number is used * to ensure that languages were generated by a compatible version of * Tree-sitter. * * See also [`ts_parser_set_language`]. */ uint32_t ts_language_version(const TSLanguage *self); /** * Get the ABI version number for this language. This version number is used * to ensure that languages were generated by a compatible version of * Tree-sitter. * * See also [`ts_parser_set_language`]. */ uint32_t ts_language_abi_version(const TSLanguage *self); /** * Get the metadata for this language. This information is generated by the * CLI, and relies on the language author providing the correct metadata in * the language's `tree-sitter.json` file. * * See also [`TSMetadata`]. */ const TSLanguageMetadata *ts_language_metadata(const TSLanguage *self); /** * Get the next parse state. Combine this with lookahead iterators to generate * completion suggestions or valid symbols in error nodes. Use * [`ts_node_grammar_symbol`] for valid symbols. */ TSStateId ts_language_next_state(const TSLanguage *self, TSStateId state, TSSymbol symbol); /** * Get the name of this language. This returns `NULL` in older parsers. */ const char *ts_language_name(const TSLanguage *self); /********************************/ /* Section - Lookahead Iterator */ /********************************/ /** * Create a new lookahead iterator for the given language and parse state. * * This returns `NULL` if state is invalid for the language. * * Repeatedly using [`ts_lookahead_iterator_next`] and * [`ts_lookahead_iterator_current_symbol`] will generate valid symbols in the * given parse state. Newly created lookahead iterators will contain the `ERROR` * symbol. * * Lookahead iterators can be useful to generate suggestions and improve syntax * error diagnostics. To get symbols valid in an ERROR node, use the lookahead * iterator on its first leaf node state. For `MISSING` nodes, a lookahead * iterator created on the previous non-extra leaf node may be appropriate. */ TSLookaheadIterator *ts_lookahead_iterator_new(const TSLanguage *self, TSStateId state); /** * Delete a lookahead iterator freeing all the memory used. */ void ts_lookahead_iterator_delete(TSLookaheadIterator *self); /** * Reset the lookahead iterator to another state. * * This returns `true` if the iterator was reset to the given state and `false` * otherwise. */ bool ts_lookahead_iterator_reset_state(TSLookaheadIterator *self, TSStateId state); /** * Reset the lookahead iterator. * * This returns `true` if the language was set successfully and `false` * otherwise. */ bool ts_lookahead_iterator_reset(TSLookaheadIterator *self, const TSLanguage *language, TSStateId state); /** * Get the current language of the lookahead iterator. */ const TSLanguage *ts_lookahead_iterator_language(const TSLookaheadIterator *self); /** * Advance the lookahead iterator to the next symbol. * * This returns `true` if there is a new symbol and `false` otherwise. */ bool ts_lookahead_iterator_next(TSLookaheadIterator *self); /** * Get the current symbol of the lookahead iterator; */ TSSymbol ts_lookahead_iterator_current_symbol(const TSLookaheadIterator *self); /** * Get the current symbol type of the lookahead iterator as a null terminated * string. */ const char *ts_lookahead_iterator_current_symbol_name(const TSLookaheadIterator *self); /*************************************/ /* Section - WebAssembly Integration */ /************************************/ typedef struct wasm_engine_t TSWasmEngine; typedef struct TSWasmStore TSWasmStore; typedef enum { TSWasmErrorKindNone = 0, TSWasmErrorKindParse, TSWasmErrorKindCompile, TSWasmErrorKindInstantiate, TSWasmErrorKindAllocate, } TSWasmErrorKind; typedef struct { TSWasmErrorKind kind; char *message; } TSWasmError; /** * Create a Wasm store. */ TSWasmStore *ts_wasm_store_new( TSWasmEngine *engine, TSWasmError *error ); /** * Free the memory associated with the given Wasm store. */ void ts_wasm_store_delete(TSWasmStore *); /** * Create a language from a buffer of Wasm. The resulting language behaves * like any other Tree-sitter language, except that in order to use it with * a parser, that parser must have a Wasm store. Note that the language * can be used with any Wasm store, it doesn't need to be the same store that * was used to originally load it. */ const TSLanguage *ts_wasm_store_load_language( TSWasmStore *, const char *name, const char *wasm, uint32_t wasm_len, TSWasmError *error ); /** * Get the number of languages instantiated in the given wasm store. */ size_t ts_wasm_store_language_count(const TSWasmStore *); /** * Check if the language came from a Wasm module. If so, then in order to use * this language with a Parser, that parser must have a Wasm store assigned. */ bool ts_language_is_wasm(const TSLanguage *); /** * Assign the given Wasm store to the parser. A parser must have a Wasm store * in order to use Wasm languages. */ void ts_parser_set_wasm_store(TSParser *, TSWasmStore *); /** * Remove the parser's current Wasm store and return it. This returns NULL if * the parser doesn't have a Wasm store. */ TSWasmStore *ts_parser_take_wasm_store(TSParser *); /**********************************/ /* Section - Global Configuration */ /**********************************/ /** * Set the allocation functions used by the library. * * By default, Tree-sitter uses the standard libc allocation functions, * but aborts the process when an allocation fails. This function lets * you supply alternative allocation functions at runtime. * * If you pass `NULL` for any parameter, Tree-sitter will switch back to * its default implementation of that function. * * If you call this function after the library has already been used, then * you must ensure that either: * 1. All the existing objects have been freed. * 2. The new allocator shares its state with the old one, so it is capable * of freeing memory that was allocated by the old allocator. */ void ts_set_allocator( void *(*new_malloc)(size_t), void *(*new_calloc)(size_t, size_t), void *(*new_realloc)(void *, size_t), void (*new_free)(void *) ); #ifdef __cplusplus } #endif #ifndef TREE_SITTER_HIDE_SYMBOLS #if defined(__GNUC__) || defined(__clang__) #pragma GCC visibility pop #endif #endif #endif // TREE_SITTER_API_H_ hx-0.3.0+20250717/bindings/vendor/src/000077500000000000000000000000001503625671400167265ustar00rootroot00000000000000hx-0.3.0+20250717/bindings/vendor/src/alloc.c000066400000000000000000000030061503625671400201630ustar00rootroot00000000000000#include "alloc.h" #include "tree_sitter/api.h" #include static void *ts_malloc_default(size_t size) { void *result = malloc(size); if (size > 0 && !result) { fprintf(stderr, "tree-sitter failed to allocate %zu bytes", size); abort(); } return result; } static void *ts_calloc_default(size_t count, size_t size) { void *result = calloc(count, size); if (count > 0 && !result) { fprintf(stderr, "tree-sitter failed to allocate %zu bytes", count * size); abort(); } return result; } static void *ts_realloc_default(void *buffer, size_t size) { void *result = realloc(buffer, size); if (size > 0 && !result) { fprintf(stderr, "tree-sitter failed to reallocate %zu bytes", size); abort(); } return result; } // Allow clients to override allocation functions dynamically TS_PUBLIC void *(*ts_current_malloc)(size_t) = ts_malloc_default; TS_PUBLIC void *(*ts_current_calloc)(size_t, size_t) = ts_calloc_default; TS_PUBLIC void *(*ts_current_realloc)(void *, size_t) = ts_realloc_default; TS_PUBLIC void (*ts_current_free)(void *) = free; void ts_set_allocator( void *(*new_malloc)(size_t size), void *(*new_calloc)(size_t count, size_t size), void *(*new_realloc)(void *ptr, size_t size), void (*new_free)(void *ptr) ) { ts_current_malloc = new_malloc ? new_malloc : ts_malloc_default; ts_current_calloc = new_calloc ? new_calloc : ts_calloc_default; ts_current_realloc = new_realloc ? new_realloc : ts_realloc_default; ts_current_free = new_free ? new_free : free; } hx-0.3.0+20250717/bindings/vendor/src/alloc.h000066400000000000000000000016271503625671400201770ustar00rootroot00000000000000#ifndef TREE_SITTER_ALLOC_H_ #define TREE_SITTER_ALLOC_H_ #ifdef __cplusplus extern "C" { #endif #include #include #include #if defined(TREE_SITTER_HIDDEN_SYMBOLS) || defined(_WIN32) #define TS_PUBLIC #else #define TS_PUBLIC __attribute__((visibility("default"))) #endif TS_PUBLIC extern void *(*ts_current_malloc)(size_t size); TS_PUBLIC extern void *(*ts_current_calloc)(size_t count, size_t size); TS_PUBLIC extern void *(*ts_current_realloc)(void *ptr, size_t size); TS_PUBLIC extern void (*ts_current_free)(void *ptr); // Allow clients to override allocation functions #ifndef ts_malloc #define ts_malloc ts_current_malloc #endif #ifndef ts_calloc #define ts_calloc ts_current_calloc #endif #ifndef ts_realloc #define ts_realloc ts_current_realloc #endif #ifndef ts_free #define ts_free ts_current_free #endif #ifdef __cplusplus } #endif #endif // TREE_SITTER_ALLOC_H_ hx-0.3.0+20250717/bindings/vendor/src/array.h000066400000000000000000000243151503625671400202220ustar00rootroot00000000000000#ifndef TREE_SITTER_ARRAY_H_ #define TREE_SITTER_ARRAY_H_ #ifdef __cplusplus extern "C" { #endif #include "./alloc.h" #include "./ts_assert.h" #include #include #include #include #ifdef _MSC_VER #pragma warning(push) #pragma warning(disable : 4101) #elif defined(__GNUC__) || defined(__clang__) #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wunused-variable" #endif #define Array(T) \ struct { \ T *contents; \ uint32_t size; \ uint32_t capacity; \ } /// Initialize an array. #define array_init(self) \ ((self)->size = 0, (self)->capacity = 0, (self)->contents = NULL) /// Create an empty array. #define array_new() \ { NULL, 0, 0 } /// Get a pointer to the element at a given `index` in the array. #define array_get(self, _index) \ (ts_assert((uint32_t)(_index) < (self)->size), &(self)->contents[_index]) /// Get a pointer to the first element in the array. #define array_front(self) array_get(self, 0) /// Get a pointer to the last element in the array. #define array_back(self) array_get(self, (self)->size - 1) /// Clear the array, setting its size to zero. Note that this does not free any /// memory allocated for the array's contents. #define array_clear(self) ((self)->size = 0) /// Reserve `new_capacity` elements of space in the array. If `new_capacity` is /// less than the array's current capacity, this function has no effect. #define array_reserve(self, new_capacity) \ _array__reserve((Array *)(self), array_elem_size(self), new_capacity) /// Free any memory allocated for this array. Note that this does not free any /// memory allocated for the array's contents. #define array_delete(self) _array__delete((Array *)(self)) /// Push a new `element` onto the end of the array. #define array_push(self, element) \ (_array__grow((Array *)(self), 1, array_elem_size(self)), \ (self)->contents[(self)->size++] = (element)) /// Increase the array's size by `count` elements. /// New elements are zero-initialized. #define array_grow_by(self, count) \ do { \ if ((count) == 0) break; \ _array__grow((Array *)(self), count, array_elem_size(self)); \ memset((self)->contents + (self)->size, 0, (count) * array_elem_size(self)); \ (self)->size += (count); \ } while (0) /// Append all elements from one array to the end of another. #define array_push_all(self, other) \ array_extend((self), (other)->size, (other)->contents) /// Append `count` elements to the end of the array, reading their values from the /// `contents` pointer. #define array_extend(self, count, contents) \ _array__splice( \ (Array *)(self), array_elem_size(self), (self)->size, \ 0, count, contents \ ) /// Remove `old_count` elements from the array starting at the given `index`. At /// the same index, insert `new_count` new elements, reading their values from the /// `new_contents` pointer. #define array_splice(self, _index, old_count, new_count, new_contents) \ _array__splice( \ (Array *)(self), array_elem_size(self), _index, \ old_count, new_count, new_contents \ ) /// Insert one `element` into the array at the given `index`. #define array_insert(self, _index, element) \ _array__splice((Array *)(self), array_elem_size(self), _index, 0, 1, &(element)) /// Remove one element from the array at the given `index`. #define array_erase(self, _index) \ _array__erase((Array *)(self), array_elem_size(self), _index) /// Pop the last element off the array, returning the element by value. #define array_pop(self) ((self)->contents[--(self)->size]) /// Assign the contents of one array to another, reallocating if necessary. #define array_assign(self, other) \ _array__assign((Array *)(self), (const Array *)(other), array_elem_size(self)) /// Swap one array with another #define array_swap(self, other) \ _array__swap((Array *)(self), (Array *)(other)) /// Get the size of the array contents #define array_elem_size(self) (sizeof *(self)->contents) /// Search a sorted array for a given `needle` value, using the given `compare` /// callback to determine the order. /// /// If an existing element is found to be equal to `needle`, then the `index` /// out-parameter is set to the existing value's index, and the `exists` /// out-parameter is set to true. Otherwise, `index` is set to an index where /// `needle` should be inserted in order to preserve the sorting, and `exists` /// is set to false. #define array_search_sorted_with(self, compare, needle, _index, _exists) \ _array__search_sorted(self, 0, compare, , needle, _index, _exists) /// Search a sorted array for a given `needle` value, using integer comparisons /// of a given struct field (specified with a leading dot) to determine the order. /// /// See also `array_search_sorted_with`. #define array_search_sorted_by(self, field, needle, _index, _exists) \ _array__search_sorted(self, 0, _compare_int, field, needle, _index, _exists) /// Insert a given `value` into a sorted array, using the given `compare` /// callback to determine the order. #define array_insert_sorted_with(self, compare, value) \ do { \ unsigned _index, _exists; \ array_search_sorted_with(self, compare, &(value), &_index, &_exists); \ if (!_exists) array_insert(self, _index, value); \ } while (0) /// Insert a given `value` into a sorted array, using integer comparisons of /// a given struct field (specified with a leading dot) to determine the order. /// /// See also `array_search_sorted_by`. #define array_insert_sorted_by(self, field, value) \ do { \ unsigned _index, _exists; \ array_search_sorted_by(self, field, (value) field, &_index, &_exists); \ if (!_exists) array_insert(self, _index, value); \ } while (0) // Private typedef Array(void) Array; /// This is not what you're looking for, see `array_delete`. static inline void _array__delete(Array *self) { if (self->contents) { ts_free(self->contents); self->contents = NULL; self->size = 0; self->capacity = 0; } } /// This is not what you're looking for, see `array_erase`. static inline void _array__erase(Array *self, size_t element_size, uint32_t index) { ts_assert(index < self->size); char *contents = (char *)self->contents; memmove(contents + index * element_size, contents + (index + 1) * element_size, (self->size - index - 1) * element_size); self->size--; } /// This is not what you're looking for, see `array_reserve`. static inline void _array__reserve(Array *self, size_t element_size, uint32_t new_capacity) { if (new_capacity > self->capacity) { if (self->contents) { self->contents = ts_realloc(self->contents, new_capacity * element_size); } else { self->contents = ts_malloc(new_capacity * element_size); } self->capacity = new_capacity; } } /// This is not what you're looking for, see `array_assign`. static inline void _array__assign(Array *self, const Array *other, size_t element_size) { _array__reserve(self, element_size, other->size); self->size = other->size; memcpy(self->contents, other->contents, self->size * element_size); } /// This is not what you're looking for, see `array_swap`. static inline void _array__swap(Array *self, Array *other) { Array swap = *other; *other = *self; *self = swap; } /// This is not what you're looking for, see `array_push` or `array_grow_by`. static inline void _array__grow(Array *self, uint32_t count, size_t element_size) { uint32_t new_size = self->size + count; if (new_size > self->capacity) { uint32_t new_capacity = self->capacity * 2; if (new_capacity < 8) new_capacity = 8; if (new_capacity < new_size) new_capacity = new_size; _array__reserve(self, element_size, new_capacity); } } /// This is not what you're looking for, see `array_splice`. static inline void _array__splice(Array *self, size_t element_size, uint32_t index, uint32_t old_count, uint32_t new_count, const void *elements) { uint32_t new_size = self->size + new_count - old_count; uint32_t old_end = index + old_count; uint32_t new_end = index + new_count; ts_assert(old_end <= self->size); _array__reserve(self, element_size, new_size); char *contents = (char *)self->contents; if (self->size > old_end) { memmove( contents + new_end * element_size, contents + old_end * element_size, (self->size - old_end) * element_size ); } if (new_count > 0) { if (elements) { memcpy( (contents + index * element_size), elements, new_count * element_size ); } else { memset( (contents + index * element_size), 0, new_count * element_size ); } } self->size += new_count - old_count; } /// A binary search routine, based on Rust's `std::slice::binary_search_by`. /// This is not what you're looking for, see `array_search_sorted_with` or `array_search_sorted_by`. #define _array__search_sorted(self, start, compare, suffix, needle, _index, _exists) \ do { \ *(_index) = start; \ *(_exists) = false; \ uint32_t size = (self)->size - *(_index); \ if (size == 0) break; \ int comparison; \ while (size > 1) { \ uint32_t half_size = size / 2; \ uint32_t mid_index = *(_index) + half_size; \ comparison = compare(&((self)->contents[mid_index] suffix), (needle)); \ if (comparison <= 0) *(_index) = mid_index; \ size -= half_size; \ } \ comparison = compare(&((self)->contents[*(_index)] suffix), (needle)); \ if (comparison == 0) *(_exists) = true; \ else if (comparison < 0) *(_index) += 1; \ } while (0) /// Helper macro for the `_sorted_by` routines below. This takes the left (existing) /// parameter by reference in order to work with the generic sorting function above. #define _compare_int(a, b) ((int)*(a) - (int)(b)) #ifdef _MSC_VER #pragma warning(pop) #elif defined(__GNUC__) || defined(__clang__) #pragma GCC diagnostic pop #endif #ifdef __cplusplus } #endif #endif // TREE_SITTER_ARRAY_H_ hx-0.3.0+20250717/bindings/vendor/src/atomic.h000066400000000000000000000025501503625671400203550ustar00rootroot00000000000000#ifndef TREE_SITTER_ATOMIC_H_ #define TREE_SITTER_ATOMIC_H_ #include #include #include #ifdef __TINYC__ static inline size_t atomic_load(const volatile size_t *p) { return *p; } static inline uint32_t atomic_inc(volatile uint32_t *p) { *p += 1; return *p; } static inline uint32_t atomic_dec(volatile uint32_t *p) { *p-= 1; return *p; } #elif defined(_WIN32) #include static inline size_t atomic_load(const volatile size_t *p) { return *p; } static inline uint32_t atomic_inc(volatile uint32_t *p) { return InterlockedIncrement((long volatile *)p); } static inline uint32_t atomic_dec(volatile uint32_t *p) { return InterlockedDecrement((long volatile *)p); } #else static inline size_t atomic_load(const volatile size_t *p) { #ifdef __ATOMIC_RELAXED return __atomic_load_n(p, __ATOMIC_RELAXED); #else return __sync_fetch_and_add((volatile size_t *)p, 0); #endif } static inline uint32_t atomic_inc(volatile uint32_t *p) { #ifdef __ATOMIC_RELAXED return __atomic_add_fetch(p, 1U, __ATOMIC_SEQ_CST); #else return __sync_add_and_fetch(p, 1U); #endif } static inline uint32_t atomic_dec(volatile uint32_t *p) { #ifdef __ATOMIC_RELAXED return __atomic_sub_fetch(p, 1U, __ATOMIC_SEQ_CST); #else return __sync_sub_and_fetch(p, 1U); #endif } #endif #endif // TREE_SITTER_ATOMIC_H_ hx-0.3.0+20250717/bindings/vendor/src/clock.h000066400000000000000000000067271503625671400202060ustar00rootroot00000000000000#ifndef TREE_SITTER_CLOCK_H_ #define TREE_SITTER_CLOCK_H_ #include #include typedef uint64_t TSDuration; #ifdef _WIN32 // Windows: // * Represent a time as a performance counter value. // * Represent a duration as a number of performance counter ticks. #include typedef uint64_t TSClock; static inline TSDuration duration_from_micros(uint64_t micros) { LARGE_INTEGER frequency; QueryPerformanceFrequency(&frequency); return micros * (uint64_t)frequency.QuadPart / 1000000; } static inline uint64_t duration_to_micros(TSDuration self) { LARGE_INTEGER frequency; QueryPerformanceFrequency(&frequency); return self * 1000000 / (uint64_t)frequency.QuadPart; } static inline TSClock clock_null(void) { return 0; } static inline TSClock clock_now(void) { LARGE_INTEGER result; QueryPerformanceCounter(&result); return (uint64_t)result.QuadPart; } static inline TSClock clock_after(TSClock base, TSDuration duration) { return base + duration; } static inline bool clock_is_null(TSClock self) { return !self; } static inline bool clock_is_gt(TSClock self, TSClock other) { return self > other; } #elif defined(CLOCK_MONOTONIC) // POSIX with monotonic clock support (Linux, macOS) // * Represent a time as a monotonic (seconds, nanoseconds) pair. // * Represent a duration as a number of microseconds. // // On these platforms, parse timeouts will correspond accurately to // real time, regardless of what other processes are running. #include typedef struct timespec TSClock; static inline TSDuration duration_from_micros(uint64_t micros) { return micros; } static inline uint64_t duration_to_micros(TSDuration self) { return self; } static inline TSClock clock_now(void) { TSClock result; clock_gettime(CLOCK_MONOTONIC, &result); return result; } static inline TSClock clock_null(void) { return (TSClock) {0, 0}; } static inline TSClock clock_after(TSClock base, TSDuration duration) { TSClock result = base; result.tv_sec += duration / 1000000; result.tv_nsec += (duration % 1000000) * 1000; if (result.tv_nsec >= 1000000000) { result.tv_nsec -= 1000000000; ++(result.tv_sec); } return result; } static inline bool clock_is_null(TSClock self) { return !self.tv_sec && !self.tv_nsec; } static inline bool clock_is_gt(TSClock self, TSClock other) { if (self.tv_sec > other.tv_sec) return true; if (self.tv_sec < other.tv_sec) return false; return self.tv_nsec > other.tv_nsec; } #else // POSIX without monotonic clock support // * Represent a time as a process clock value. // * Represent a duration as a number of process clock ticks. // // On these platforms, parse timeouts may be affected by other processes, // which is not ideal, but is better than using a non-monotonic time API // like `gettimeofday`. #include typedef uint64_t TSClock; static inline TSDuration duration_from_micros(uint64_t micros) { return micros * (uint64_t)CLOCKS_PER_SEC / 1000000; } static inline uint64_t duration_to_micros(TSDuration self) { return self * 1000000 / (uint64_t)CLOCKS_PER_SEC; } static inline TSClock clock_null(void) { return 0; } static inline TSClock clock_now(void) { return (uint64_t)clock(); } static inline TSClock clock_after(TSClock base, TSDuration duration) { return base + duration; } static inline bool clock_is_null(TSClock self) { return !self; } static inline bool clock_is_gt(TSClock self, TSClock other) { return self > other; } #endif #endif // TREE_SITTER_CLOCK_H_ hx-0.3.0+20250717/bindings/vendor/src/error_costs.h000066400000000000000000000004461503625671400214470ustar00rootroot00000000000000#ifndef TREE_SITTER_ERROR_COSTS_H_ #define TREE_SITTER_ERROR_COSTS_H_ #define ERROR_STATE 0 #define ERROR_COST_PER_RECOVERY 500 #define ERROR_COST_PER_MISSING_TREE 110 #define ERROR_COST_PER_SKIPPED_TREE 100 #define ERROR_COST_PER_SKIPPED_LINE 30 #define ERROR_COST_PER_SKIPPED_CHAR 1 #endif hx-0.3.0+20250717/bindings/vendor/src/get_changed_ranges.c000066400000000000000000000402441503625671400226650ustar00rootroot00000000000000#include "./get_changed_ranges.h" #include "./subtree.h" #include "./language.h" #include "./error_costs.h" #include "./tree_cursor.h" #include "./ts_assert.h" // #define DEBUG_GET_CHANGED_RANGES static void ts_range_array_add( TSRangeArray *self, Length start, Length end ) { if (self->size > 0) { TSRange *last_range = array_back(self); if (start.bytes <= last_range->end_byte) { last_range->end_byte = end.bytes; last_range->end_point = end.extent; return; } } if (start.bytes < end.bytes) { TSRange range = { start.extent, end.extent, start.bytes, end.bytes }; array_push(self, range); } } bool ts_range_array_intersects( const TSRangeArray *self, unsigned start_index, uint32_t start_byte, uint32_t end_byte ) { for (unsigned i = start_index; i < self->size; i++) { TSRange *range = array_get(self, i); if (range->end_byte > start_byte) { if (range->start_byte >= end_byte) break; return true; } } return false; } void ts_range_array_get_changed_ranges( const TSRange *old_ranges, unsigned old_range_count, const TSRange *new_ranges, unsigned new_range_count, TSRangeArray *differences ) { unsigned new_index = 0; unsigned old_index = 0; Length current_position = length_zero(); bool in_old_range = false; bool in_new_range = false; while (old_index < old_range_count || new_index < new_range_count) { const TSRange *old_range = &old_ranges[old_index]; const TSRange *new_range = &new_ranges[new_index]; Length next_old_position; if (in_old_range) { next_old_position = (Length) {old_range->end_byte, old_range->end_point}; } else if (old_index < old_range_count) { next_old_position = (Length) {old_range->start_byte, old_range->start_point}; } else { next_old_position = LENGTH_MAX; } Length next_new_position; if (in_new_range) { next_new_position = (Length) {new_range->end_byte, new_range->end_point}; } else if (new_index < new_range_count) { next_new_position = (Length) {new_range->start_byte, new_range->start_point}; } else { next_new_position = LENGTH_MAX; } if (next_old_position.bytes < next_new_position.bytes) { if (in_old_range != in_new_range) { ts_range_array_add(differences, current_position, next_old_position); } if (in_old_range) old_index++; current_position = next_old_position; in_old_range = !in_old_range; } else if (next_new_position.bytes < next_old_position.bytes) { if (in_old_range != in_new_range) { ts_range_array_add(differences, current_position, next_new_position); } if (in_new_range) new_index++; current_position = next_new_position; in_new_range = !in_new_range; } else { if (in_old_range != in_new_range) { ts_range_array_add(differences, current_position, next_new_position); } if (in_old_range) old_index++; if (in_new_range) new_index++; in_old_range = !in_old_range; in_new_range = !in_new_range; current_position = next_new_position; } } } typedef struct { TreeCursor cursor; const TSLanguage *language; unsigned visible_depth; bool in_padding; Subtree prev_external_token; } Iterator; static Iterator iterator_new( TreeCursor *cursor, const Subtree *tree, const TSLanguage *language ) { array_clear(&cursor->stack); array_push(&cursor->stack, ((TreeCursorEntry) { .subtree = tree, .position = length_zero(), .child_index = 0, .structural_child_index = 0, })); return (Iterator) { .cursor = *cursor, .language = language, .visible_depth = 1, .in_padding = false, .prev_external_token = NULL_SUBTREE, }; } static bool iterator_done(Iterator *self) { return self->cursor.stack.size == 0; } static Length iterator_start_position(Iterator *self) { TreeCursorEntry entry = *array_back(&self->cursor.stack); if (self->in_padding) { return entry.position; } else { return length_add(entry.position, ts_subtree_padding(*entry.subtree)); } } static Length iterator_end_position(Iterator *self) { TreeCursorEntry entry = *array_back(&self->cursor.stack); Length result = length_add(entry.position, ts_subtree_padding(*entry.subtree)); if (self->in_padding) { return result; } else { return length_add(result, ts_subtree_size(*entry.subtree)); } } static bool iterator_tree_is_visible(const Iterator *self) { TreeCursorEntry entry = *array_back(&self->cursor.stack); if (ts_subtree_visible(*entry.subtree)) return true; if (self->cursor.stack.size > 1) { Subtree parent = *array_get(&self->cursor.stack, self->cursor.stack.size - 2)->subtree; return ts_language_alias_at( self->language, parent.ptr->production_id, entry.structural_child_index ) != 0; } return false; } static void iterator_get_visible_state( const Iterator *self, Subtree *tree, TSSymbol *alias_symbol, uint32_t *start_byte ) { uint32_t i = self->cursor.stack.size - 1; if (self->in_padding) { if (i == 0) return; i--; } for (; i + 1 > 0; i--) { TreeCursorEntry entry = *array_get(&self->cursor.stack, i); if (i > 0) { const Subtree *parent = array_get(&self->cursor.stack, i - 1)->subtree; *alias_symbol = ts_language_alias_at( self->language, parent->ptr->production_id, entry.structural_child_index ); } if (ts_subtree_visible(*entry.subtree) || *alias_symbol) { *tree = *entry.subtree; *start_byte = entry.position.bytes; break; } } } static void iterator_ascend(Iterator *self) { if (iterator_done(self)) return; if (iterator_tree_is_visible(self) && !self->in_padding) self->visible_depth--; if (array_back(&self->cursor.stack)->child_index > 0) self->in_padding = false; self->cursor.stack.size--; } static bool iterator_descend(Iterator *self, uint32_t goal_position) { if (self->in_padding) return false; bool did_descend = false; do { did_descend = false; TreeCursorEntry entry = *array_back(&self->cursor.stack); Length position = entry.position; uint32_t structural_child_index = 0; for (uint32_t i = 0, n = ts_subtree_child_count(*entry.subtree); i < n; i++) { const Subtree *child = &ts_subtree_children(*entry.subtree)[i]; Length child_left = length_add(position, ts_subtree_padding(*child)); Length child_right = length_add(child_left, ts_subtree_size(*child)); if (child_right.bytes > goal_position) { array_push(&self->cursor.stack, ((TreeCursorEntry) { .subtree = child, .position = position, .child_index = i, .structural_child_index = structural_child_index, })); if (iterator_tree_is_visible(self)) { if (child_left.bytes > goal_position) { self->in_padding = true; } else { self->visible_depth++; } return true; } did_descend = true; break; } position = child_right; if (!ts_subtree_extra(*child)) structural_child_index++; Subtree last_external_token = ts_subtree_last_external_token(*child); if (last_external_token.ptr) { self->prev_external_token = last_external_token; } } } while (did_descend); return false; } static void iterator_advance(Iterator *self) { if (self->in_padding) { self->in_padding = false; if (iterator_tree_is_visible(self)) { self->visible_depth++; } else { iterator_descend(self, 0); } return; } for (;;) { if (iterator_tree_is_visible(self)) self->visible_depth--; TreeCursorEntry entry = array_pop(&self->cursor.stack); if (iterator_done(self)) return; const Subtree *parent = array_back(&self->cursor.stack)->subtree; uint32_t child_index = entry.child_index + 1; Subtree last_external_token = ts_subtree_last_external_token(*entry.subtree); if (last_external_token.ptr) { self->prev_external_token = last_external_token; } if (ts_subtree_child_count(*parent) > child_index) { Length position = length_add(entry.position, ts_subtree_total_size(*entry.subtree)); uint32_t structural_child_index = entry.structural_child_index; if (!ts_subtree_extra(*entry.subtree)) structural_child_index++; const Subtree *next_child = &ts_subtree_children(*parent)[child_index]; array_push(&self->cursor.stack, ((TreeCursorEntry) { .subtree = next_child, .position = position, .child_index = child_index, .structural_child_index = structural_child_index, })); if (iterator_tree_is_visible(self)) { if (ts_subtree_padding(*next_child).bytes > 0) { self->in_padding = true; } else { self->visible_depth++; } } else { iterator_descend(self, 0); } break; } } } typedef enum { IteratorDiffers, IteratorMayDiffer, IteratorMatches, } IteratorComparison; static IteratorComparison iterator_compare( const Iterator *old_iter, const Iterator *new_iter ) { Subtree old_tree = NULL_SUBTREE; Subtree new_tree = NULL_SUBTREE; uint32_t old_start = 0; uint32_t new_start = 0; TSSymbol old_alias_symbol = 0; TSSymbol new_alias_symbol = 0; iterator_get_visible_state(old_iter, &old_tree, &old_alias_symbol, &old_start); iterator_get_visible_state(new_iter, &new_tree, &new_alias_symbol, &new_start); TSSymbol old_symbol = ts_subtree_symbol(old_tree); TSSymbol new_symbol = ts_subtree_symbol(new_tree); if (!old_tree.ptr && !new_tree.ptr) return IteratorMatches; if (!old_tree.ptr || !new_tree.ptr) return IteratorDiffers; if (old_alias_symbol != new_alias_symbol || old_symbol != new_symbol) return IteratorDiffers; uint32_t old_size = ts_subtree_size(old_tree).bytes; uint32_t new_size = ts_subtree_size(new_tree).bytes; TSStateId old_state = ts_subtree_parse_state(old_tree); TSStateId new_state = ts_subtree_parse_state(new_tree); bool old_has_external_tokens = ts_subtree_has_external_tokens(old_tree); bool new_has_external_tokens = ts_subtree_has_external_tokens(new_tree); uint32_t old_error_cost = ts_subtree_error_cost(old_tree); uint32_t new_error_cost = ts_subtree_error_cost(new_tree); if ( old_start != new_start || old_symbol == ts_builtin_sym_error || old_size != new_size || old_state == TS_TREE_STATE_NONE || new_state == TS_TREE_STATE_NONE || ((old_state == ERROR_STATE) != (new_state == ERROR_STATE)) || old_error_cost != new_error_cost || old_has_external_tokens != new_has_external_tokens || ts_subtree_has_changes(old_tree) || ( old_has_external_tokens && !ts_subtree_external_scanner_state_eq(old_iter->prev_external_token, new_iter->prev_external_token) ) ) { return IteratorMayDiffer; } return IteratorMatches; } #ifdef DEBUG_GET_CHANGED_RANGES static inline void iterator_print_state(Iterator *self) { TreeCursorEntry entry = *array_back(&self->cursor.stack); TSPoint start = iterator_start_position(self).extent; TSPoint end = iterator_end_position(self).extent; const char *name = ts_language_symbol_name(self->language, ts_subtree_symbol(*entry.subtree)); printf( "(%-25s %s\t depth:%u [%u, %u] - [%u, %u])", name, self->in_padding ? "(p)" : " ", self->visible_depth, start.row, start.column, end.row, end.column ); } #endif unsigned ts_subtree_get_changed_ranges( const Subtree *old_tree, const Subtree *new_tree, TreeCursor *cursor1, TreeCursor *cursor2, const TSLanguage *language, const TSRangeArray *included_range_differences, TSRange **ranges ) { TSRangeArray results = array_new(); Iterator old_iter = iterator_new(cursor1, old_tree, language); Iterator new_iter = iterator_new(cursor2, new_tree, language); unsigned included_range_difference_index = 0; Length position = iterator_start_position(&old_iter); Length next_position = iterator_start_position(&new_iter); if (position.bytes < next_position.bytes) { ts_range_array_add(&results, position, next_position); position = next_position; } else if (position.bytes > next_position.bytes) { ts_range_array_add(&results, next_position, position); next_position = position; } do { #ifdef DEBUG_GET_CHANGED_RANGES printf("At [%-2u, %-2u] Compare ", position.extent.row, position.extent.column); iterator_print_state(&old_iter); printf("\tvs\t"); iterator_print_state(&new_iter); puts(""); #endif // Compare the old and new subtrees. IteratorComparison comparison = iterator_compare(&old_iter, &new_iter); // Even if the two subtrees appear to be identical, they could differ // internally if they contain a range of text that was previously // excluded from the parse, and is now included, or vice-versa. if (comparison == IteratorMatches && ts_range_array_intersects( included_range_differences, included_range_difference_index, position.bytes, iterator_end_position(&old_iter).bytes )) { comparison = IteratorMayDiffer; } bool is_changed = false; switch (comparison) { // If the subtrees are definitely identical, move to the end // of both subtrees. case IteratorMatches: next_position = iterator_end_position(&old_iter); break; // If the subtrees might differ internally, descend into both // subtrees, finding the first child that spans the current position. case IteratorMayDiffer: if (iterator_descend(&old_iter, position.bytes)) { if (!iterator_descend(&new_iter, position.bytes)) { is_changed = true; next_position = iterator_end_position(&old_iter); } } else if (iterator_descend(&new_iter, position.bytes)) { is_changed = true; next_position = iterator_end_position(&new_iter); } else { next_position = length_min( iterator_end_position(&old_iter), iterator_end_position(&new_iter) ); } break; // If the subtrees are different, record a change and then move // to the end of both subtrees. case IteratorDiffers: is_changed = true; next_position = length_min( iterator_end_position(&old_iter), iterator_end_position(&new_iter) ); break; } // Ensure that both iterators are caught up to the current position. while ( !iterator_done(&old_iter) && iterator_end_position(&old_iter).bytes <= next_position.bytes ) iterator_advance(&old_iter); while ( !iterator_done(&new_iter) && iterator_end_position(&new_iter).bytes <= next_position.bytes ) iterator_advance(&new_iter); // Ensure that both iterators are at the same depth in the tree. while (old_iter.visible_depth > new_iter.visible_depth) { iterator_ascend(&old_iter); } while (new_iter.visible_depth > old_iter.visible_depth) { iterator_ascend(&new_iter); } if (is_changed) { #ifdef DEBUG_GET_CHANGED_RANGES printf( " change: [[%u, %u] - [%u, %u]]\n", position.extent.row + 1, position.extent.column, next_position.extent.row + 1, next_position.extent.column ); #endif ts_range_array_add(&results, position, next_position); } position = next_position; // Keep track of the current position in the included range differences // array in order to avoid scanning the entire array on each iteration. while (included_range_difference_index < included_range_differences->size) { const TSRange *range = array_get(included_range_differences, included_range_difference_index ); if (range->end_byte <= position.bytes) { included_range_difference_index++; } else { break; } } } while (!iterator_done(&old_iter) && !iterator_done(&new_iter)); Length old_size = ts_subtree_total_size(*old_tree); Length new_size = ts_subtree_total_size(*new_tree); if (old_size.bytes < new_size.bytes) { ts_range_array_add(&results, old_size, new_size); } else if (new_size.bytes < old_size.bytes) { ts_range_array_add(&results, new_size, old_size); } *cursor1 = old_iter.cursor; *cursor2 = new_iter.cursor; *ranges = results.contents; return results.size; } hx-0.3.0+20250717/bindings/vendor/src/get_changed_ranges.h000066400000000000000000000015041503625671400226660ustar00rootroot00000000000000#ifndef TREE_SITTER_GET_CHANGED_RANGES_H_ #define TREE_SITTER_GET_CHANGED_RANGES_H_ #ifdef __cplusplus extern "C" { #endif #include "./tree_cursor.h" #include "./subtree.h" typedef Array(TSRange) TSRangeArray; void ts_range_array_get_changed_ranges( const TSRange *old_ranges, unsigned old_range_count, const TSRange *new_ranges, unsigned new_range_count, TSRangeArray *differences ); bool ts_range_array_intersects( const TSRangeArray *self, unsigned start_index, uint32_t start_byte, uint32_t end_byte ); unsigned ts_subtree_get_changed_ranges( const Subtree *old_tree, const Subtree *new_tree, TreeCursor *cursor1, TreeCursor *cursor2, const TSLanguage *language, const TSRangeArray *included_range_differences, TSRange **ranges ); #ifdef __cplusplus } #endif #endif // TREE_SITTER_GET_CHANGED_RANGES_H_ hx-0.3.0+20250717/bindings/vendor/src/host.h000066400000000000000000000010101503625671400200440ustar00rootroot00000000000000 // Determine endian and pointer size based on known defines. // TS_BIG_ENDIAN and TS_PTR_SIZE can be set as -D compiler arguments // to override this. #if !defined(TS_BIG_ENDIAN) #if (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__) \ || (defined( __APPLE_CC__) && (defined(__ppc__) || defined(__ppc64__))) #define TS_BIG_ENDIAN 1 #else #define TS_BIG_ENDIAN 0 #endif #endif #if !defined(TS_PTR_SIZE) #if UINTPTR_MAX == 0xFFFFFFFF #define TS_PTR_SIZE 32 #else #define TS_PTR_SIZE 64 #endif #endif hx-0.3.0+20250717/bindings/vendor/src/language.c000066400000000000000000000205021503625671400206540ustar00rootroot00000000000000#include "./language.h" #include "./wasm_store.h" #include "tree_sitter/api.h" #include const TSLanguage *ts_language_copy(const TSLanguage *self) { if (self && ts_language_is_wasm(self)) { ts_wasm_language_retain(self); } return self; } void ts_language_delete(const TSLanguage *self) { if (self && ts_language_is_wasm(self)) { ts_wasm_language_release(self); } } uint32_t ts_language_symbol_count(const TSLanguage *self) { return self->symbol_count + self->alias_count; } uint32_t ts_language_state_count(const TSLanguage *self) { return self->state_count; } const TSSymbol *ts_language_supertypes(const TSLanguage *self, uint32_t *length) { if (self->abi_version >= LANGUAGE_VERSION_WITH_RESERVED_WORDS) { *length = self->supertype_count; return self->supertype_symbols; } else { *length = 0; return NULL; } } const TSSymbol *ts_language_subtypes( const TSLanguage *self, TSSymbol supertype, uint32_t *length ) { if (self->abi_version < LANGUAGE_VERSION_WITH_RESERVED_WORDS || !ts_language_symbol_metadata(self, supertype).supertype) { *length = 0; return NULL; } TSMapSlice slice = self->supertype_map_slices[supertype]; *length = slice.length; return &self->supertype_map_entries[slice.index]; } uint32_t ts_language_version(const TSLanguage *self) { return self->abi_version; } uint32_t ts_language_abi_version(const TSLanguage *self) { return self->abi_version; } const TSLanguageMetadata *ts_language_metadata(const TSLanguage *self) { return self->abi_version >= LANGUAGE_VERSION_WITH_RESERVED_WORDS ? &self->metadata : NULL; } const char *ts_language_name(const TSLanguage *self) { return self->abi_version >= LANGUAGE_VERSION_WITH_RESERVED_WORDS ? self->name : NULL; } uint32_t ts_language_field_count(const TSLanguage *self) { return self->field_count; } void ts_language_table_entry( const TSLanguage *self, TSStateId state, TSSymbol symbol, TableEntry *result ) { if (symbol == ts_builtin_sym_error || symbol == ts_builtin_sym_error_repeat) { result->action_count = 0; result->is_reusable = false; result->actions = NULL; } else { ts_assert(symbol < self->token_count); uint32_t action_index = ts_language_lookup(self, state, symbol); const TSParseActionEntry *entry = &self->parse_actions[action_index]; result->action_count = entry->entry.count; result->is_reusable = entry->entry.reusable; result->actions = (const TSParseAction *)(entry + 1); } } TSLexerMode ts_language_lex_mode_for_state( const TSLanguage *self, TSStateId state ) { if (self->abi_version < 15) { TSLexMode mode = ((const TSLexMode *)self->lex_modes)[state]; return (TSLexerMode) { .lex_state = mode.lex_state, .external_lex_state = mode.external_lex_state, .reserved_word_set_id = 0, }; } else { return self->lex_modes[state]; } } bool ts_language_is_reserved_word( const TSLanguage *self, TSStateId state, TSSymbol symbol ) { TSLexerMode lex_mode = ts_language_lex_mode_for_state(self, state); if (lex_mode.reserved_word_set_id > 0) { unsigned start = lex_mode.reserved_word_set_id * self->max_reserved_word_set_size; unsigned end = start + self->max_reserved_word_set_size; for (unsigned i = start; i < end; i++) { if (self->reserved_words[i] == symbol) return true; if (self->reserved_words[i] == 0) break; } } return false; } TSSymbolMetadata ts_language_symbol_metadata( const TSLanguage *self, TSSymbol symbol ) { if (symbol == ts_builtin_sym_error) { return (TSSymbolMetadata) {.visible = true, .named = true}; } else if (symbol == ts_builtin_sym_error_repeat) { return (TSSymbolMetadata) {.visible = false, .named = false}; } else { return self->symbol_metadata[symbol]; } } TSSymbol ts_language_public_symbol( const TSLanguage *self, TSSymbol symbol ) { if (symbol == ts_builtin_sym_error) return symbol; return self->public_symbol_map[symbol]; } TSStateId ts_language_next_state( const TSLanguage *self, TSStateId state, TSSymbol symbol ) { if (symbol == ts_builtin_sym_error || symbol == ts_builtin_sym_error_repeat) { return 0; } else if (symbol < self->token_count) { uint32_t count; const TSParseAction *actions = ts_language_actions(self, state, symbol, &count); if (count > 0) { TSParseAction action = actions[count - 1]; if (action.type == TSParseActionTypeShift) { return action.shift.extra ? state : action.shift.state; } } return 0; } else { return ts_language_lookup(self, state, symbol); } } const char *ts_language_symbol_name( const TSLanguage *self, TSSymbol symbol ) { if (symbol == ts_builtin_sym_error) { return "ERROR"; } else if (symbol == ts_builtin_sym_error_repeat) { return "_ERROR"; } else if (symbol < ts_language_symbol_count(self)) { return self->symbol_names[symbol]; } else { return NULL; } } TSSymbol ts_language_symbol_for_name( const TSLanguage *self, const char *string, uint32_t length, bool is_named ) { if (!strncmp(string, "ERROR", length)) return ts_builtin_sym_error; uint16_t count = (uint16_t)ts_language_symbol_count(self); for (TSSymbol i = 0; i < count; i++) { TSSymbolMetadata metadata = ts_language_symbol_metadata(self, i); if ((!metadata.visible && !metadata.supertype) || metadata.named != is_named) continue; const char *symbol_name = self->symbol_names[i]; if (!strncmp(symbol_name, string, length) && !symbol_name[length]) { return self->public_symbol_map[i]; } } return 0; } TSSymbolType ts_language_symbol_type( const TSLanguage *self, TSSymbol symbol ) { TSSymbolMetadata metadata = ts_language_symbol_metadata(self, symbol); if (metadata.named && metadata.visible) { return TSSymbolTypeRegular; } else if (metadata.visible) { return TSSymbolTypeAnonymous; } else if (metadata.supertype) { return TSSymbolTypeSupertype; } else { return TSSymbolTypeAuxiliary; } } const char *ts_language_field_name_for_id( const TSLanguage *self, TSFieldId id ) { uint32_t count = ts_language_field_count(self); if (count && id <= count) { return self->field_names[id]; } else { return NULL; } } TSFieldId ts_language_field_id_for_name( const TSLanguage *self, const char *name, uint32_t name_length ) { uint16_t count = (uint16_t)ts_language_field_count(self); for (TSSymbol i = 1; i < count + 1; i++) { switch (strncmp(name, self->field_names[i], name_length)) { case 0: if (self->field_names[i][name_length] == 0) return i; break; case -1: return 0; default: break; } } return 0; } TSLookaheadIterator *ts_lookahead_iterator_new(const TSLanguage *self, TSStateId state) { if (state >= self->state_count) return NULL; LookaheadIterator *iterator = ts_malloc(sizeof(LookaheadIterator)); *iterator = ts_language_lookaheads(self, state); return (TSLookaheadIterator *)iterator; } void ts_lookahead_iterator_delete(TSLookaheadIterator *self) { ts_free(self); } bool ts_lookahead_iterator_reset_state(TSLookaheadIterator * self, TSStateId state) { LookaheadIterator *iterator = (LookaheadIterator *)self; if (state >= iterator->language->state_count) return false; *iterator = ts_language_lookaheads(iterator->language, state); return true; } const TSLanguage *ts_lookahead_iterator_language(const TSLookaheadIterator *self) { const LookaheadIterator *iterator = (const LookaheadIterator *)self; return iterator->language; } bool ts_lookahead_iterator_reset(TSLookaheadIterator *self, const TSLanguage *language, TSStateId state) { if (state >= language->state_count) return false; LookaheadIterator *iterator = (LookaheadIterator *)self; *iterator = ts_language_lookaheads(language, state); return true; } bool ts_lookahead_iterator_next(TSLookaheadIterator *self) { LookaheadIterator *iterator = (LookaheadIterator *)self; return ts_lookahead_iterator__next(iterator); } TSSymbol ts_lookahead_iterator_current_symbol(const TSLookaheadIterator *self) { const LookaheadIterator *iterator = (const LookaheadIterator *)self; return iterator->symbol; } const char *ts_lookahead_iterator_current_symbol_name(const TSLookaheadIterator *self) { const LookaheadIterator *iterator = (const LookaheadIterator *)self; return ts_language_symbol_name(iterator->language, iterator->symbol); } hx-0.3.0+20250717/bindings/vendor/src/language.h000066400000000000000000000202231503625671400206610ustar00rootroot00000000000000#ifndef TREE_SITTER_LANGUAGE_H_ #define TREE_SITTER_LANGUAGE_H_ #ifdef __cplusplus extern "C" { #endif #include "./subtree.h" #include "./parser.h" #define ts_builtin_sym_error_repeat (ts_builtin_sym_error - 1) #define LANGUAGE_VERSION_WITH_RESERVED_WORDS 15 #define LANGUAGE_VERSION_WITH_PRIMARY_STATES 14 typedef struct { const TSParseAction *actions; uint32_t action_count; bool is_reusable; } TableEntry; typedef struct { const TSLanguage *language; const uint16_t *data; const uint16_t *group_end; TSStateId state; uint16_t table_value; uint16_t section_index; uint16_t group_count; bool is_small_state; const TSParseAction *actions; TSSymbol symbol; TSStateId next_state; uint16_t action_count; } LookaheadIterator; void ts_language_table_entry(const TSLanguage *self, TSStateId state, TSSymbol symbol, TableEntry *result); TSLexerMode ts_language_lex_mode_for_state(const TSLanguage *self, TSStateId state); bool ts_language_is_reserved_word(const TSLanguage *self, TSStateId state, TSSymbol symbol); TSSymbolMetadata ts_language_symbol_metadata(const TSLanguage *self, TSSymbol symbol); TSSymbol ts_language_public_symbol(const TSLanguage *self, TSSymbol symbol); static inline const TSParseAction *ts_language_actions( const TSLanguage *self, TSStateId state, TSSymbol symbol, uint32_t *count ) { TableEntry entry; ts_language_table_entry(self, state, symbol, &entry); *count = entry.action_count; return entry.actions; } static inline bool ts_language_has_reduce_action( const TSLanguage *self, TSStateId state, TSSymbol symbol ) { TableEntry entry; ts_language_table_entry(self, state, symbol, &entry); return entry.action_count > 0 && entry.actions[0].type == TSParseActionTypeReduce; } // Lookup the table value for a given symbol and state. // // For non-terminal symbols, the table value represents a successor state. // For terminal symbols, it represents an index in the actions table. // For 'large' parse states, this is a direct lookup. For 'small' parse // states, this requires searching through the symbol groups to find // the given symbol. static inline uint16_t ts_language_lookup( const TSLanguage *self, TSStateId state, TSSymbol symbol ) { if (state >= self->large_state_count) { uint32_t index = self->small_parse_table_map[state - self->large_state_count]; const uint16_t *data = &self->small_parse_table[index]; uint16_t group_count = *(data++); for (unsigned i = 0; i < group_count; i++) { uint16_t section_value = *(data++); uint16_t symbol_count = *(data++); for (unsigned j = 0; j < symbol_count; j++) { if (*(data++) == symbol) return section_value; } } return 0; } else { return self->parse_table[state * self->symbol_count + symbol]; } } static inline bool ts_language_has_actions( const TSLanguage *self, TSStateId state, TSSymbol symbol ) { return ts_language_lookup(self, state, symbol) != 0; } // Iterate over all of the symbols that are valid in the given state. // // For 'large' parse states, this just requires iterating through // all possible symbols and checking the parse table for each one. // For 'small' parse states, this exploits the structure of the // table to only visit the valid symbols. static inline LookaheadIterator ts_language_lookaheads( const TSLanguage *self, TSStateId state ) { bool is_small_state = state >= self->large_state_count; const uint16_t *data; const uint16_t *group_end = NULL; uint16_t group_count = 0; if (is_small_state) { uint32_t index = self->small_parse_table_map[state - self->large_state_count]; data = &self->small_parse_table[index]; group_end = data + 1; group_count = *data; } else { data = &self->parse_table[state * self->symbol_count] - 1; } return (LookaheadIterator) { .language = self, .data = data, .group_end = group_end, .group_count = group_count, .is_small_state = is_small_state, .symbol = UINT16_MAX, .next_state = 0, }; } static inline bool ts_lookahead_iterator__next(LookaheadIterator *self) { // For small parse states, valid symbols are listed explicitly, // grouped by their value. There's no need to look up the actions // again until moving to the next group. if (self->is_small_state) { self->data++; if (self->data == self->group_end) { if (self->group_count == 0) return false; self->group_count--; self->table_value = *(self->data++); unsigned symbol_count = *(self->data++); self->group_end = self->data + symbol_count; self->symbol = *self->data; } else { self->symbol = *self->data; return true; } } // For large parse states, iterate through every symbol until one // is found that has valid actions. else { do { self->data++; self->symbol++; if (self->symbol >= self->language->symbol_count) return false; self->table_value = *self->data; } while (!self->table_value); } // Depending on if the symbols is terminal or non-terminal, the table value either // represents a list of actions or a successor state. if (self->symbol < self->language->token_count) { const TSParseActionEntry *entry = &self->language->parse_actions[self->table_value]; self->action_count = entry->entry.count; self->actions = (const TSParseAction *)(entry + 1); self->next_state = 0; } else { self->action_count = 0; self->next_state = self->table_value; } return true; } // Whether the state is a "primary state". If this returns false, it indicates that there exists // another state that behaves identically to this one with respect to query analysis. static inline bool ts_language_state_is_primary( const TSLanguage *self, TSStateId state ) { if (self->abi_version >= LANGUAGE_VERSION_WITH_PRIMARY_STATES) { return state == self->primary_state_ids[state]; } else { return true; } } static inline const bool *ts_language_enabled_external_tokens( const TSLanguage *self, unsigned external_scanner_state ) { if (external_scanner_state == 0) { return NULL; } else { return self->external_scanner.states + self->external_token_count * external_scanner_state; } } static inline const TSSymbol *ts_language_alias_sequence( const TSLanguage *self, uint32_t production_id ) { return production_id ? &self->alias_sequences[production_id * self->max_alias_sequence_length] : NULL; } static inline TSSymbol ts_language_alias_at( const TSLanguage *self, uint32_t production_id, uint32_t child_index ) { return production_id ? self->alias_sequences[production_id * self->max_alias_sequence_length + child_index] : 0; } static inline void ts_language_field_map( const TSLanguage *self, uint32_t production_id, const TSFieldMapEntry **start, const TSFieldMapEntry **end ) { if (self->field_count == 0) { *start = NULL; *end = NULL; return; } TSMapSlice slice = self->field_map_slices[production_id]; *start = &self->field_map_entries[slice.index]; *end = &self->field_map_entries[slice.index] + slice.length; } static inline void ts_language_aliases_for_symbol( const TSLanguage *self, TSSymbol original_symbol, const TSSymbol **start, const TSSymbol **end ) { *start = &self->public_symbol_map[original_symbol]; *end = *start + 1; unsigned idx = 0; for (;;) { TSSymbol symbol = self->alias_map[idx++]; if (symbol == 0 || symbol > original_symbol) break; uint16_t count = self->alias_map[idx++]; if (symbol == original_symbol) { *start = &self->alias_map[idx]; *end = &self->alias_map[idx + count]; break; } idx += count; } } static inline void ts_language_write_symbol_as_dot_string( const TSLanguage *self, FILE *f, TSSymbol symbol ) { const char *name = ts_language_symbol_name(self, symbol); for (const char *chr = name; *chr; chr++) { switch (*chr) { case '"': case '\\': fputc('\\', f); fputc(*chr, f); break; case '\n': fputs("\\n", f); break; case '\t': fputs("\\t", f); break; default: fputc(*chr, f); break; } } } #ifdef __cplusplus } #endif #endif // TREE_SITTER_LANGUAGE_H_ hx-0.3.0+20250717/bindings/vendor/src/length.h000066400000000000000000000023731503625671400203650ustar00rootroot00000000000000#ifndef TREE_SITTER_LENGTH_H_ #define TREE_SITTER_LENGTH_H_ #include #include #include "./point.h" #include "tree_sitter/api.h" typedef struct { uint32_t bytes; TSPoint extent; } Length; static const Length LENGTH_UNDEFINED = {0, {0, 1}}; static const Length LENGTH_MAX = {UINT32_MAX, {UINT32_MAX, UINT32_MAX}}; static inline bool length_is_undefined(Length length) { return length.bytes == 0 && length.extent.column != 0; } static inline Length length_min(Length len1, Length len2) { return (len1.bytes < len2.bytes) ? len1 : len2; } static inline Length length_add(Length len1, Length len2) { Length result; result.bytes = len1.bytes + len2.bytes; result.extent = point_add(len1.extent, len2.extent); return result; } static inline Length length_sub(Length len1, Length len2) { Length result; result.bytes = (len1.bytes >= len2.bytes) ? len1.bytes - len2.bytes : 0; result.extent = point_sub(len1.extent, len2.extent); return result; } static inline Length length_zero(void) { Length result = {0, {0, 0}}; return result; } static inline Length length_saturating_sub(Length len1, Length len2) { if (len1.bytes > len2.bytes) { return length_sub(len1, len2); } else { return length_zero(); } } #endif hx-0.3.0+20250717/bindings/vendor/src/lexer.c000066400000000000000000000354241503625671400202210ustar00rootroot00000000000000#include "./length.h" #include "./lexer.h" #include "./unicode.h" #include "tree_sitter/api.h" #include #include #define LOG(message, character) \ if (self->logger.log) { \ snprintf( \ self->debug_buffer, \ TREE_SITTER_SERIALIZATION_BUFFER_SIZE, \ 32 <= character && character < 127 ? \ message " character:'%c'" : \ message " character:%d", \ character \ ); \ self->logger.log( \ self->logger.payload, \ TSLogTypeLex, \ self->debug_buffer \ ); \ } static const int32_t BYTE_ORDER_MARK = 0xFEFF; static const TSRange DEFAULT_RANGE = { .start_point = { .row = 0, .column = 0, }, .end_point = { .row = UINT32_MAX, .column = UINT32_MAX, }, .start_byte = 0, .end_byte = UINT32_MAX }; /** * Sets the column data to the given value and marks it valid. * @param self The lexer state. * @param val The new value of the column data. */ static void ts_lexer__set_column_data(Lexer *self, uint32_t val) { self->column_data.valid = true; self->column_data.value = val; } /** * Increments the value of the column data; no-op if invalid. * @param self The lexer state. */ static void ts_lexer__increment_column_data(Lexer *self) { if (self->column_data.valid) { self->column_data.value++; } } /** * Marks the column data as invalid. * @param self The lexer state. */ static void ts_lexer__invalidate_column_data(Lexer *self) { self->column_data.valid = false; self->column_data.value = 0; } // Check if the lexer has reached EOF. This state is stored // by setting the lexer's `current_included_range_index` such that // it has consumed all of its available ranges. static bool ts_lexer__eof(const TSLexer *_self) { Lexer *self = (Lexer *)_self; return self->current_included_range_index == self->included_range_count; } // Clear the currently stored chunk of source code, because the lexer's // position has changed. static void ts_lexer__clear_chunk(Lexer *self) { self->chunk = NULL; self->chunk_size = 0; self->chunk_start = 0; } // Call the lexer's input callback to obtain a new chunk of source code // for the current position. static void ts_lexer__get_chunk(Lexer *self) { self->chunk_start = self->current_position.bytes; self->chunk = self->input.read( self->input.payload, self->current_position.bytes, self->current_position.extent, &self->chunk_size ); if (!self->chunk_size) { self->current_included_range_index = self->included_range_count; self->chunk = NULL; } } // Decode the next unicode character in the current chunk of source code. // This assumes that the lexer has already retrieved a chunk of source // code that spans the current position. static void ts_lexer__get_lookahead(Lexer *self) { uint32_t position_in_chunk = self->current_position.bytes - self->chunk_start; uint32_t size = self->chunk_size - position_in_chunk; if (size == 0) { self->lookahead_size = 1; self->data.lookahead = '\0'; return; } const uint8_t *chunk = (const uint8_t *)self->chunk + position_in_chunk; DecodeFunction decode = self->input.encoding == TSInputEncodingUTF8 ? ts_decode_utf8 : self->input.encoding == TSInputEncodingUTF16LE ? ts_decode_utf16_le : self->input.encoding == TSInputEncodingUTF16BE ? ts_decode_utf16_be : self->input.decode; self->lookahead_size = decode(chunk, size, &self->data.lookahead); // If this chunk ended in the middle of a multi-byte character, // try again with a fresh chunk. if (self->data.lookahead == TS_DECODE_ERROR && size < 4) { ts_lexer__get_chunk(self); chunk = (const uint8_t *)self->chunk; size = self->chunk_size; self->lookahead_size = decode(chunk, size, &self->data.lookahead); } if (self->data.lookahead == TS_DECODE_ERROR) { self->lookahead_size = 1; } } static void ts_lexer_goto(Lexer *self, Length position) { if (position.bytes != self->current_position.bytes) { ts_lexer__invalidate_column_data(self); } self->current_position = position; // Move to the first valid position at or after the given position. bool found_included_range = false; for (unsigned i = 0; i < self->included_range_count; i++) { TSRange *included_range = &self->included_ranges[i]; if ( included_range->end_byte > self->current_position.bytes && included_range->end_byte > included_range->start_byte ) { if (included_range->start_byte >= self->current_position.bytes) { self->current_position = (Length) { .bytes = included_range->start_byte, .extent = included_range->start_point, }; } self->current_included_range_index = i; found_included_range = true; break; } } if (found_included_range) { // If the current position is outside of the current chunk of text, // then clear out the current chunk of text. if (self->chunk && ( self->current_position.bytes < self->chunk_start || self->current_position.bytes >= self->chunk_start + self->chunk_size )) { ts_lexer__clear_chunk(self); } self->lookahead_size = 0; self->data.lookahead = '\0'; } // If the given position is beyond any of included ranges, move to the EOF // state - past the end of the included ranges. else { self->current_included_range_index = self->included_range_count; TSRange *last_included_range = &self->included_ranges[self->included_range_count - 1]; self->current_position = (Length) { .bytes = last_included_range->end_byte, .extent = last_included_range->end_point, }; ts_lexer__clear_chunk(self); self->lookahead_size = 1; self->data.lookahead = '\0'; } } /** * Actually advances the lexer. Does not log anything. * @param self The lexer state. * @param skip Whether to mark the consumed codepoint as whitespace. */ static void ts_lexer__do_advance(Lexer *self, bool skip) { if (self->lookahead_size) { if (self->data.lookahead == '\n') { self->current_position.extent.row++; self->current_position.extent.column = 0; ts_lexer__set_column_data(self, 0); } else { bool is_bom = self->current_position.bytes == 0 && self->data.lookahead == BYTE_ORDER_MARK; if (!is_bom) ts_lexer__increment_column_data(self); self->current_position.extent.column += self->lookahead_size; } self->current_position.bytes += self->lookahead_size; } const TSRange *current_range = &self->included_ranges[self->current_included_range_index]; while ( self->current_position.bytes >= current_range->end_byte || current_range->end_byte == current_range->start_byte ) { if (self->current_included_range_index < self->included_range_count) { self->current_included_range_index++; } if (self->current_included_range_index < self->included_range_count) { current_range++; self->current_position = (Length) { current_range->start_byte, current_range->start_point, }; } else { current_range = NULL; break; } } if (skip) self->token_start_position = self->current_position; if (current_range) { if ( self->current_position.bytes < self->chunk_start || self->current_position.bytes >= self->chunk_start + self->chunk_size ) { ts_lexer__get_chunk(self); } ts_lexer__get_lookahead(self); } else { ts_lexer__clear_chunk(self); self->data.lookahead = '\0'; self->lookahead_size = 1; } } // Advance to the next character in the source code, retrieving a new // chunk of source code if needed. static void ts_lexer__advance(TSLexer *_self, bool skip) { Lexer *self = (Lexer *)_self; if (!self->chunk) return; if (skip) { LOG("skip", self->data.lookahead) } else { LOG("consume", self->data.lookahead) } ts_lexer__do_advance(self, skip); } // Mark that a token match has completed. This can be called multiple // times if a longer match is found later. static void ts_lexer__mark_end(TSLexer *_self) { Lexer *self = (Lexer *)_self; if (!ts_lexer__eof(&self->data)) { // If the lexer is right at the beginning of included range, // then the token should be considered to end at the *end* of the // previous included range, rather than here. TSRange *current_included_range = &self->included_ranges[ self->current_included_range_index ]; if ( self->current_included_range_index > 0 && self->current_position.bytes == current_included_range->start_byte ) { TSRange *previous_included_range = current_included_range - 1; self->token_end_position = (Length) { previous_included_range->end_byte, previous_included_range->end_point, }; return; } } self->token_end_position = self->current_position; } static uint32_t ts_lexer__get_column(TSLexer *_self) { Lexer *self = (Lexer *)_self; self->did_get_column = true; if (!self->column_data.valid) { // Record current position uint32_t goal_byte = self->current_position.bytes; // Back up to the beginning of the line Length start_of_col = { self->current_position.bytes - self->current_position.extent.column, {self->current_position.extent.row, 0}, }; ts_lexer_goto(self, start_of_col); ts_lexer__set_column_data(self, 0); ts_lexer__get_chunk(self); if (!ts_lexer__eof(_self)) { ts_lexer__get_lookahead(self); // Advance to the recorded position while (self->current_position.bytes < goal_byte && !ts_lexer__eof(_self) && self->chunk) { ts_lexer__do_advance(self, false); if (ts_lexer__eof(_self)) break; } } } return self->column_data.value; } // Is the lexer at a boundary between two disjoint included ranges of // source code? This is exposed as an API because some languages' external // scanners need to perform custom actions at these boundaries. static bool ts_lexer__is_at_included_range_start(const TSLexer *_self) { const Lexer *self = (const Lexer *)_self; if (self->current_included_range_index < self->included_range_count) { TSRange *current_range = &self->included_ranges[self->current_included_range_index]; return self->current_position.bytes == current_range->start_byte; } else { return false; } } static void ts_lexer__log(const TSLexer *_self, const char *fmt, ...) { Lexer *self = (Lexer *)_self; va_list args; va_start(args, fmt); if (self->logger.log) { vsnprintf(self->debug_buffer, TREE_SITTER_SERIALIZATION_BUFFER_SIZE, fmt, args); self->logger.log(self->logger.payload, TSLogTypeLex, self->debug_buffer); } va_end(args); } void ts_lexer_init(Lexer *self) { *self = (Lexer) { .data = { // The lexer's methods are stored as struct fields so that generated // parsers can call them without needing to be linked against this // library. .advance = ts_lexer__advance, .mark_end = ts_lexer__mark_end, .get_column = ts_lexer__get_column, .is_at_included_range_start = ts_lexer__is_at_included_range_start, .eof = ts_lexer__eof, .log = ts_lexer__log, .lookahead = 0, .result_symbol = 0, }, .chunk = NULL, .chunk_size = 0, .chunk_start = 0, .current_position = {0, {0, 0}}, .logger = { .payload = NULL, .log = NULL }, .included_ranges = NULL, .included_range_count = 0, .current_included_range_index = 0, .did_get_column = false, .column_data = { .valid = false, .value = 0 } }; ts_lexer_set_included_ranges(self, NULL, 0); } void ts_lexer_delete(Lexer *self) { ts_free(self->included_ranges); } void ts_lexer_set_input(Lexer *self, TSInput input) { self->input = input; ts_lexer__clear_chunk(self); ts_lexer_goto(self, self->current_position); } // Move the lexer to the given position. This doesn't do any work // if the parser is already at the given position. void ts_lexer_reset(Lexer *self, Length position) { if (position.bytes != self->current_position.bytes) { ts_lexer_goto(self, position); } } void ts_lexer_start(Lexer *self) { self->token_start_position = self->current_position; self->token_end_position = LENGTH_UNDEFINED; self->data.result_symbol = 0; self->did_get_column = false; if (!ts_lexer__eof(&self->data)) { if (!self->chunk_size) ts_lexer__get_chunk(self); if (!self->lookahead_size) ts_lexer__get_lookahead(self); if (self->current_position.bytes == 0) { if (self->data.lookahead == BYTE_ORDER_MARK) { ts_lexer__advance(&self->data, true); } ts_lexer__set_column_data(self, 0); } } } void ts_lexer_finish(Lexer *self, uint32_t *lookahead_end_byte) { if (length_is_undefined(self->token_end_position)) { ts_lexer__mark_end(&self->data); } // If the token ended at an included range boundary, then its end position // will have been reset to the end of the preceding range. Reset the start // position to match. if (self->token_end_position.bytes < self->token_start_position.bytes) { self->token_start_position = self->token_end_position; } uint32_t current_lookahead_end_byte = self->current_position.bytes + 1; // In order to determine that a byte sequence is invalid UTF8 or UTF16, // the character decoding algorithm may have looked at the following byte. // Therefore, the next byte *after* the current (invalid) character // affects the interpretation of the current character. if (self->data.lookahead == TS_DECODE_ERROR) { current_lookahead_end_byte += 4; // the maximum number of bytes read to identify an invalid code point } if (current_lookahead_end_byte > *lookahead_end_byte) { *lookahead_end_byte = current_lookahead_end_byte; } } void ts_lexer_mark_end(Lexer *self) { ts_lexer__mark_end(&self->data); } bool ts_lexer_set_included_ranges( Lexer *self, const TSRange *ranges, uint32_t count ) { if (count == 0 || !ranges) { ranges = &DEFAULT_RANGE; count = 1; } else { uint32_t previous_byte = 0; for (unsigned i = 0; i < count; i++) { const TSRange *range = &ranges[i]; if ( range->start_byte < previous_byte || range->end_byte < range->start_byte ) return false; previous_byte = range->end_byte; } } size_t size = count * sizeof(TSRange); self->included_ranges = ts_realloc(self->included_ranges, size); memcpy(self->included_ranges, ranges, size); self->included_range_count = count; ts_lexer_goto(self, self->current_position); return true; } TSRange *ts_lexer_included_ranges(const Lexer *self, uint32_t *count) { *count = self->included_range_count; return self->included_ranges; } #undef LOG hx-0.3.0+20250717/bindings/vendor/src/lexer.h000066400000000000000000000023571503625671400202250ustar00rootroot00000000000000#ifndef TREE_SITTER_LEXER_H_ #define TREE_SITTER_LEXER_H_ #ifdef __cplusplus extern "C" { #endif #include "./length.h" #include "./subtree.h" #include "tree_sitter/api.h" #include "./parser.h" typedef struct { uint32_t value; bool valid; } ColumnData; typedef struct { TSLexer data; Length current_position; Length token_start_position; Length token_end_position; TSRange *included_ranges; const char *chunk; TSInput input; TSLogger logger; uint32_t included_range_count; uint32_t current_included_range_index; uint32_t chunk_start; uint32_t chunk_size; uint32_t lookahead_size; bool did_get_column; ColumnData column_data; char debug_buffer[TREE_SITTER_SERIALIZATION_BUFFER_SIZE]; } Lexer; void ts_lexer_init(Lexer *self); void ts_lexer_delete(Lexer *self); void ts_lexer_set_input(Lexer *self, TSInput input); void ts_lexer_reset(Lexer *self, Length position); void ts_lexer_start(Lexer *self); void ts_lexer_finish(Lexer *self, uint32_t *lookahead_end_byte); void ts_lexer_mark_end(Lexer *self); bool ts_lexer_set_included_ranges(Lexer *self, const TSRange *ranges, uint32_t count); TSRange *ts_lexer_included_ranges(const Lexer *self, uint32_t *count); #ifdef __cplusplus } #endif #endif // TREE_SITTER_LEXER_H_ hx-0.3.0+20250717/bindings/vendor/src/lib.c000066400000000000000000000004301503625671400176350ustar00rootroot00000000000000#include "./alloc.c" #include "./get_changed_ranges.c" #include "./language.c" #include "./lexer.c" #include "./node.c" #include "./parser.c" #include "./query.c" #include "./stack.c" #include "./subtree.c" #include "./tree_cursor.c" #include "./tree.c" #include "./wasm_store.c" hx-0.3.0+20250717/bindings/vendor/src/node.c000066400000000000000000000615251503625671400200300ustar00rootroot00000000000000#include #include "./point.h" #include "./subtree.h" #include "./tree.h" #include "./language.h" typedef struct { Subtree parent; const TSTree *tree; Length position; uint32_t child_index; uint32_t structural_child_index; const TSSymbol *alias_sequence; } NodeChildIterator; static inline bool ts_node__is_relevant(TSNode self, bool include_anonymous); // TSNode - constructors TSNode ts_node_new( const TSTree *tree, const Subtree *subtree, Length position, TSSymbol alias ) { return (TSNode) { {position.bytes, position.extent.row, position.extent.column, alias}, subtree, tree, }; } static inline TSNode ts_node__null(void) { return ts_node_new(NULL, NULL, length_zero(), 0); } // TSNode - accessors uint32_t ts_node_start_byte(TSNode self) { return self.context[0]; } TSPoint ts_node_start_point(TSNode self) { return (TSPoint) {self.context[1], self.context[2]}; } static inline uint32_t ts_node__alias(const TSNode *self) { return self->context[3]; } static inline Subtree ts_node__subtree(TSNode self) { return *(const Subtree *)self.id; } // NodeChildIterator static inline NodeChildIterator ts_node_iterate_children(const TSNode *node) { Subtree subtree = ts_node__subtree(*node); if (ts_subtree_child_count(subtree) == 0) { return (NodeChildIterator) {NULL_SUBTREE, node->tree, length_zero(), 0, 0, NULL}; } const TSSymbol *alias_sequence = ts_language_alias_sequence( node->tree->language, subtree.ptr->production_id ); return (NodeChildIterator) { .tree = node->tree, .parent = subtree, .position = {ts_node_start_byte(*node), ts_node_start_point(*node)}, .child_index = 0, .structural_child_index = 0, .alias_sequence = alias_sequence, }; } static inline bool ts_node_child_iterator_done(NodeChildIterator *self) { return self->child_index == self->parent.ptr->child_count; } static inline bool ts_node_child_iterator_next( NodeChildIterator *self, TSNode *result ) { if (!self->parent.ptr || ts_node_child_iterator_done(self)) return false; const Subtree *child = &ts_subtree_children(self->parent)[self->child_index]; TSSymbol alias_symbol = 0; if (!ts_subtree_extra(*child)) { if (self->alias_sequence) { alias_symbol = self->alias_sequence[self->structural_child_index]; } self->structural_child_index++; } if (self->child_index > 0) { self->position = length_add(self->position, ts_subtree_padding(*child)); } *result = ts_node_new( self->tree, child, self->position, alias_symbol ); self->position = length_add(self->position, ts_subtree_size(*child)); self->child_index++; return true; } // TSNode - private static inline bool ts_node__is_relevant(TSNode self, bool include_anonymous) { Subtree tree = ts_node__subtree(self); if (include_anonymous) { return ts_subtree_visible(tree) || ts_node__alias(&self); } else { TSSymbol alias = ts_node__alias(&self); if (alias) { return ts_language_symbol_metadata(self.tree->language, alias).named; } else { return ts_subtree_visible(tree) && ts_subtree_named(tree); } } } static inline uint32_t ts_node__relevant_child_count( TSNode self, bool include_anonymous ) { Subtree tree = ts_node__subtree(self); if (ts_subtree_child_count(tree) > 0) { if (include_anonymous) { return tree.ptr->visible_child_count; } else { return tree.ptr->named_child_count; } } else { return 0; } } static inline TSNode ts_node__child( TSNode self, uint32_t child_index, bool include_anonymous ) { TSNode result = self; bool did_descend = true; while (did_descend) { did_descend = false; TSNode child; uint32_t index = 0; NodeChildIterator iterator = ts_node_iterate_children(&result); while (ts_node_child_iterator_next(&iterator, &child)) { if (ts_node__is_relevant(child, include_anonymous)) { if (index == child_index) { return child; } index++; } else { uint32_t grandchild_index = child_index - index; uint32_t grandchild_count = ts_node__relevant_child_count(child, include_anonymous); if (grandchild_index < grandchild_count) { did_descend = true; result = child; child_index = grandchild_index; break; } index += grandchild_count; } } } return ts_node__null(); } static bool ts_subtree_has_trailing_empty_descendant( Subtree self, Subtree other ) { for (unsigned i = ts_subtree_child_count(self) - 1; i + 1 > 0; i--) { Subtree child = ts_subtree_children(self)[i]; if (ts_subtree_total_bytes(child) > 0) break; if (child.ptr == other.ptr || ts_subtree_has_trailing_empty_descendant(child, other)) { return true; } } return false; } static inline TSNode ts_node__prev_sibling(TSNode self, bool include_anonymous) { Subtree self_subtree = ts_node__subtree(self); bool self_is_empty = ts_subtree_total_bytes(self_subtree) == 0; uint32_t target_end_byte = ts_node_end_byte(self); TSNode node = ts_node_parent(self); TSNode earlier_node = ts_node__null(); bool earlier_node_is_relevant = false; while (!ts_node_is_null(node)) { TSNode earlier_child = ts_node__null(); bool earlier_child_is_relevant = false; bool found_child_containing_target = false; TSNode child; NodeChildIterator iterator = ts_node_iterate_children(&node); while (ts_node_child_iterator_next(&iterator, &child)) { if (child.id == self.id) break; if (iterator.position.bytes > target_end_byte) { found_child_containing_target = true; break; } if (iterator.position.bytes == target_end_byte && (!self_is_empty || ts_subtree_has_trailing_empty_descendant(ts_node__subtree(child), self_subtree))) { found_child_containing_target = true; break; } if (ts_node__is_relevant(child, include_anonymous)) { earlier_child = child; earlier_child_is_relevant = true; } else if (ts_node__relevant_child_count(child, include_anonymous) > 0) { earlier_child = child; earlier_child_is_relevant = false; } } if (found_child_containing_target) { if (!ts_node_is_null(earlier_child)) { earlier_node = earlier_child; earlier_node_is_relevant = earlier_child_is_relevant; } node = child; } else if (earlier_child_is_relevant) { return earlier_child; } else if (!ts_node_is_null(earlier_child)) { node = earlier_child; } else if (earlier_node_is_relevant) { return earlier_node; } else { node = earlier_node; earlier_node = ts_node__null(); earlier_node_is_relevant = false; } } return ts_node__null(); } static inline TSNode ts_node__next_sibling(TSNode self, bool include_anonymous) { uint32_t target_end_byte = ts_node_end_byte(self); TSNode node = ts_node_parent(self); TSNode later_node = ts_node__null(); bool later_node_is_relevant = false; while (!ts_node_is_null(node)) { TSNode later_child = ts_node__null(); bool later_child_is_relevant = false; TSNode child_containing_target = ts_node__null(); TSNode child; NodeChildIterator iterator = ts_node_iterate_children(&node); while (ts_node_child_iterator_next(&iterator, &child)) { if (iterator.position.bytes <= target_end_byte) continue; uint32_t start_byte = ts_node_start_byte(self); uint32_t child_start_byte = ts_node_start_byte(child); bool is_empty = start_byte == target_end_byte; bool contains_target = is_empty ? child_start_byte < start_byte : child_start_byte <= start_byte; if (contains_target) { if (ts_node__subtree(child).ptr != ts_node__subtree(self).ptr) { child_containing_target = child; } } else if (ts_node__is_relevant(child, include_anonymous)) { later_child = child; later_child_is_relevant = true; break; } else if (ts_node__relevant_child_count(child, include_anonymous) > 0) { later_child = child; later_child_is_relevant = false; break; } } if (!ts_node_is_null(child_containing_target)) { if (!ts_node_is_null(later_child)) { later_node = later_child; later_node_is_relevant = later_child_is_relevant; } node = child_containing_target; } else if (later_child_is_relevant) { return later_child; } else if (!ts_node_is_null(later_child)) { node = later_child; } else if (later_node_is_relevant) { return later_node; } else { node = later_node; } } return ts_node__null(); } static inline TSNode ts_node__first_child_for_byte( TSNode self, uint32_t goal, bool include_anonymous ) { TSNode node = self; bool did_descend = true; NodeChildIterator last_iterator; bool has_last_iterator = false; while (did_descend) { did_descend = false; TSNode child; NodeChildIterator iterator = ts_node_iterate_children(&node); loop: while (ts_node_child_iterator_next(&iterator, &child)) { if (ts_node_end_byte(child) > goal) { if (ts_node__is_relevant(child, include_anonymous)) { return child; } else if (ts_node_child_count(child) > 0) { if (iterator.child_index < ts_subtree_child_count(ts_node__subtree(child))) { last_iterator = iterator; has_last_iterator = true; } did_descend = true; node = child; break; } } } if (!did_descend && has_last_iterator) { iterator = last_iterator; has_last_iterator = false; goto loop; } } return ts_node__null(); } static inline TSNode ts_node__descendant_for_byte_range( TSNode self, uint32_t range_start, uint32_t range_end, bool include_anonymous ) { if (range_start > range_end) { return ts_node__null(); } TSNode node = self; TSNode last_visible_node = self; bool did_descend = true; while (did_descend) { did_descend = false; TSNode child; NodeChildIterator iterator = ts_node_iterate_children(&node); while (ts_node_child_iterator_next(&iterator, &child)) { uint32_t node_end = iterator.position.bytes; // The end of this node must extend far enough forward to touch // the end of the range if (node_end < range_end) continue; // ...and exceed the start of the range, unless the node itself is // empty, in which case it must at least be equal to the start of the range. bool is_empty = ts_node_start_byte(child) == node_end; if (is_empty ? node_end < range_start : node_end <= range_start) continue; // The start of this node must extend far enough backward to // touch the start of the range. if (range_start < ts_node_start_byte(child)) break; node = child; if (ts_node__is_relevant(node, include_anonymous)) { last_visible_node = node; } did_descend = true; break; } } return last_visible_node; } static inline TSNode ts_node__descendant_for_point_range( TSNode self, TSPoint range_start, TSPoint range_end, bool include_anonymous ) { if (point_gt(range_start, range_end)) { return ts_node__null(); } TSNode node = self; TSNode last_visible_node = self; bool did_descend = true; while (did_descend) { did_descend = false; TSNode child; NodeChildIterator iterator = ts_node_iterate_children(&node); while (ts_node_child_iterator_next(&iterator, &child)) { TSPoint node_end = iterator.position.extent; // The end of this node must extend far enough forward to touch // the end of the range if (point_lt(node_end, range_end)) continue; // ...and exceed the start of the range, unless the node itself is // empty, in which case it must at least be equal to the start of the range. bool is_empty = point_eq(ts_node_start_point(child), node_end); if (is_empty ? point_lt(node_end, range_start) : point_lte(node_end, range_start)) { continue; } // The start of this node must extend far enough backward to // touch the start of the range. if (point_lt(range_start, ts_node_start_point(child))) break; node = child; if (ts_node__is_relevant(node, include_anonymous)) { last_visible_node = node; } did_descend = true; break; } } return last_visible_node; } // TSNode - public uint32_t ts_node_end_byte(TSNode self) { return ts_node_start_byte(self) + ts_subtree_size(ts_node__subtree(self)).bytes; } TSPoint ts_node_end_point(TSNode self) { return point_add(ts_node_start_point(self), ts_subtree_size(ts_node__subtree(self)).extent); } TSSymbol ts_node_symbol(TSNode self) { TSSymbol symbol = ts_node__alias(&self); if (!symbol) symbol = ts_subtree_symbol(ts_node__subtree(self)); return ts_language_public_symbol(self.tree->language, symbol); } const char *ts_node_type(TSNode self) { TSSymbol symbol = ts_node__alias(&self); if (!symbol) symbol = ts_subtree_symbol(ts_node__subtree(self)); return ts_language_symbol_name(self.tree->language, symbol); } const TSLanguage *ts_node_language(TSNode self) { return self.tree->language; } TSSymbol ts_node_grammar_symbol(TSNode self) { return ts_subtree_symbol(ts_node__subtree(self)); } const char *ts_node_grammar_type(TSNode self) { TSSymbol symbol = ts_subtree_symbol(ts_node__subtree(self)); return ts_language_symbol_name(self.tree->language, symbol); } char *ts_node_string(TSNode self) { TSSymbol alias_symbol = ts_node__alias(&self); return ts_subtree_string( ts_node__subtree(self), alias_symbol, ts_language_symbol_metadata(self.tree->language, alias_symbol).visible, self.tree->language, false ); } bool ts_node_eq(TSNode self, TSNode other) { return self.tree == other.tree && self.id == other.id; } bool ts_node_is_null(TSNode self) { return self.id == 0; } bool ts_node_is_extra(TSNode self) { return ts_subtree_extra(ts_node__subtree(self)); } bool ts_node_is_named(TSNode self) { TSSymbol alias = ts_node__alias(&self); return alias ? ts_language_symbol_metadata(self.tree->language, alias).named : ts_subtree_named(ts_node__subtree(self)); } bool ts_node_is_missing(TSNode self) { return ts_subtree_missing(ts_node__subtree(self)); } bool ts_node_has_changes(TSNode self) { return ts_subtree_has_changes(ts_node__subtree(self)); } bool ts_node_has_error(TSNode self) { return ts_subtree_error_cost(ts_node__subtree(self)) > 0; } bool ts_node_is_error(TSNode self) { TSSymbol symbol = ts_node_symbol(self); return symbol == ts_builtin_sym_error; } uint32_t ts_node_descendant_count(TSNode self) { return ts_subtree_visible_descendant_count(ts_node__subtree(self)) + 1; } TSStateId ts_node_parse_state(TSNode self) { return ts_subtree_parse_state(ts_node__subtree(self)); } TSStateId ts_node_next_parse_state(TSNode self) { const TSLanguage *language = self.tree->language; uint16_t state = ts_node_parse_state(self); if (state == TS_TREE_STATE_NONE) { return TS_TREE_STATE_NONE; } uint16_t symbol = ts_node_grammar_symbol(self); return ts_language_next_state(language, state, symbol); } TSNode ts_node_parent(TSNode self) { TSNode node = ts_tree_root_node(self.tree); if (node.id == self.id) return ts_node__null(); while (true) { TSNode next_node = ts_node_child_with_descendant(node, self); if (next_node.id == self.id || ts_node_is_null(next_node)) break; node = next_node; } return node; } TSNode ts_node_child_with_descendant(TSNode self, TSNode descendant) { uint32_t start_byte = ts_node_start_byte(descendant); uint32_t end_byte = ts_node_end_byte(descendant); bool is_empty = start_byte == end_byte; do { NodeChildIterator iter = ts_node_iterate_children(&self); do { if ( !ts_node_child_iterator_next(&iter, &self) || ts_node_start_byte(self) > start_byte ) { return ts_node__null(); } if (self.id == descendant.id) { return self; } // If the descendant is empty, and the end byte is within `self`, // we check whether `self` contains it or not. if (is_empty && iter.position.bytes >= end_byte && ts_node_child_count(self) > 0) { TSNode child = ts_node_child_with_descendant(self, descendant); // If the child is not null, return self if it's relevant, else return the child if (!ts_node_is_null(child)) { return ts_node__is_relevant(self, true) ? self : child; } } } while ((is_empty ? iter.position.bytes <= end_byte : iter.position.bytes < end_byte) || ts_node_child_count(self) == 0); } while (!ts_node__is_relevant(self, true)); return self; } TSNode ts_node_child(TSNode self, uint32_t child_index) { return ts_node__child(self, child_index, true); } TSNode ts_node_named_child(TSNode self, uint32_t child_index) { return ts_node__child(self, child_index, false); } TSNode ts_node_child_by_field_id(TSNode self, TSFieldId field_id) { recur: if (!field_id || ts_node_child_count(self) == 0) return ts_node__null(); const TSFieldMapEntry *field_map, *field_map_end; ts_language_field_map( self.tree->language, ts_node__subtree(self).ptr->production_id, &field_map, &field_map_end ); if (field_map == field_map_end) return ts_node__null(); // The field mappings are sorted by their field id. Scan all // the mappings to find the ones for the given field id. while (field_map->field_id < field_id) { field_map++; if (field_map == field_map_end) return ts_node__null(); } while (field_map_end[-1].field_id > field_id) { field_map_end--; if (field_map == field_map_end) return ts_node__null(); } TSNode child; NodeChildIterator iterator = ts_node_iterate_children(&self); while (ts_node_child_iterator_next(&iterator, &child)) { if (!ts_subtree_extra(ts_node__subtree(child))) { uint32_t index = iterator.structural_child_index - 1; if (index < field_map->child_index) continue; // Hidden nodes' fields are "inherited" by their visible parent. if (field_map->inherited) { // If this is the *last* possible child node for this field, // then perform a tail call to avoid recursion. if (field_map + 1 == field_map_end) { self = child; goto recur; } // Otherwise, descend into this child, but if it doesn't contain // the field, continue searching subsequent children. else { TSNode result = ts_node_child_by_field_id(child, field_id); if (result.id) return result; field_map++; if (field_map == field_map_end) return ts_node__null(); } } else if (ts_node__is_relevant(child, true)) { return child; } // If the field refers to a hidden node with visible children, // return the first visible child. else if (ts_node_child_count(child) > 0 ) { return ts_node_child(child, 0); } // Otherwise, continue searching subsequent children. else { field_map++; if (field_map == field_map_end) return ts_node__null(); } } } return ts_node__null(); } static inline const char *ts_node__field_name_from_language(TSNode self, uint32_t structural_child_index) { const TSFieldMapEntry *field_map, *field_map_end; ts_language_field_map( self.tree->language, ts_node__subtree(self).ptr->production_id, &field_map, &field_map_end ); for (; field_map != field_map_end; field_map++) { if (!field_map->inherited && field_map->child_index == structural_child_index) { return self.tree->language->field_names[field_map->field_id]; } } return NULL; } const char *ts_node_field_name_for_child(TSNode self, uint32_t child_index) { TSNode result = self; bool did_descend = true; const char *inherited_field_name = NULL; while (did_descend) { did_descend = false; TSNode child; uint32_t index = 0; NodeChildIterator iterator = ts_node_iterate_children(&result); while (ts_node_child_iterator_next(&iterator, &child)) { if (ts_node__is_relevant(child, true)) { if (index == child_index) { if (ts_node_is_extra(child)) { return NULL; } const char *field_name = ts_node__field_name_from_language(result, iterator.structural_child_index - 1); if (field_name) return field_name; return inherited_field_name; } index++; } else { uint32_t grandchild_index = child_index - index; uint32_t grandchild_count = ts_node__relevant_child_count(child, true); if (grandchild_index < grandchild_count) { const char *field_name = ts_node__field_name_from_language(result, iterator.structural_child_index - 1); if (field_name) inherited_field_name = field_name; did_descend = true; result = child; child_index = grandchild_index; break; } index += grandchild_count; } } } return NULL; } const char *ts_node_field_name_for_named_child(TSNode self, uint32_t named_child_index) { TSNode result = self; bool did_descend = true; const char *inherited_field_name = NULL; while (did_descend) { did_descend = false; TSNode child; uint32_t index = 0; NodeChildIterator iterator = ts_node_iterate_children(&result); while (ts_node_child_iterator_next(&iterator, &child)) { if (ts_node__is_relevant(child, false)) { if (index == named_child_index) { if (ts_node_is_extra(child)) { return NULL; } const char *field_name = ts_node__field_name_from_language(result, iterator.structural_child_index - 1); if (field_name) return field_name; return inherited_field_name; } index++; } else { uint32_t named_grandchild_index = named_child_index - index; uint32_t grandchild_count = ts_node__relevant_child_count(child, false); if (named_grandchild_index < grandchild_count) { const char *field_name = ts_node__field_name_from_language(result, iterator.structural_child_index - 1); if (field_name) inherited_field_name = field_name; did_descend = true; result = child; named_child_index = named_grandchild_index; break; } index += grandchild_count; } } } return NULL; } TSNode ts_node_child_by_field_name( TSNode self, const char *name, uint32_t name_length ) { TSFieldId field_id = ts_language_field_id_for_name( self.tree->language, name, name_length ); return ts_node_child_by_field_id(self, field_id); } uint32_t ts_node_child_count(TSNode self) { Subtree tree = ts_node__subtree(self); if (ts_subtree_child_count(tree) > 0) { return tree.ptr->visible_child_count; } else { return 0; } } uint32_t ts_node_named_child_count(TSNode self) { Subtree tree = ts_node__subtree(self); if (ts_subtree_child_count(tree) > 0) { return tree.ptr->named_child_count; } else { return 0; } } TSNode ts_node_next_sibling(TSNode self) { return ts_node__next_sibling(self, true); } TSNode ts_node_next_named_sibling(TSNode self) { return ts_node__next_sibling(self, false); } TSNode ts_node_prev_sibling(TSNode self) { return ts_node__prev_sibling(self, true); } TSNode ts_node_prev_named_sibling(TSNode self) { return ts_node__prev_sibling(self, false); } TSNode ts_node_first_child_for_byte(TSNode self, uint32_t byte) { return ts_node__first_child_for_byte(self, byte, true); } TSNode ts_node_first_named_child_for_byte(TSNode self, uint32_t byte) { return ts_node__first_child_for_byte(self, byte, false); } TSNode ts_node_descendant_for_byte_range( TSNode self, uint32_t start, uint32_t end ) { return ts_node__descendant_for_byte_range(self, start, end, true); } TSNode ts_node_named_descendant_for_byte_range( TSNode self, uint32_t start, uint32_t end ) { return ts_node__descendant_for_byte_range(self, start, end, false); } TSNode ts_node_descendant_for_point_range( TSNode self, TSPoint start, TSPoint end ) { return ts_node__descendant_for_point_range(self, start, end, true); } TSNode ts_node_named_descendant_for_point_range( TSNode self, TSPoint start, TSPoint end ) { return ts_node__descendant_for_point_range(self, start, end, false); } void ts_node_edit(TSNode *self, const TSInputEdit *edit) { uint32_t start_byte = ts_node_start_byte(*self); TSPoint start_point = ts_node_start_point(*self); if (start_byte >= edit->old_end_byte) { start_byte = edit->new_end_byte + (start_byte - edit->old_end_byte); start_point = point_add(edit->new_end_point, point_sub(start_point, edit->old_end_point)); } else if (start_byte > edit->start_byte) { start_byte = edit->new_end_byte; start_point = edit->new_end_point; } self->context[0] = start_byte; self->context[1] = start_point.row; self->context[2] = start_point.column; } hx-0.3.0+20250717/bindings/vendor/src/parser.c000066400000000000000000002321421503625671400203720ustar00rootroot00000000000000#include #include #include #include #include #include "tree_sitter/api.h" #include "./alloc.h" #include "./array.h" #include "./atomic.h" #include "./clock.h" #include "./error_costs.h" #include "./get_changed_ranges.h" #include "./language.h" #include "./length.h" #include "./lexer.h" #include "./reduce_action.h" #include "./reusable_node.h" #include "./stack.h" #include "./subtree.h" #include "./tree.h" #include "./ts_assert.h" #include "./wasm_store.h" #define LOG(...) \ if (self->lexer.logger.log || self->dot_graph_file) { \ snprintf(self->lexer.debug_buffer, TREE_SITTER_SERIALIZATION_BUFFER_SIZE, __VA_ARGS__); \ ts_parser__log(self); \ } #define LOG_LOOKAHEAD(symbol_name, size) \ if (self->lexer.logger.log || self->dot_graph_file) { \ char *buf = self->lexer.debug_buffer; \ const char *symbol = symbol_name; \ int off = snprintf( \ buf, \ TREE_SITTER_SERIALIZATION_BUFFER_SIZE, \ "lexed_lookahead sym:" \ ); \ for ( \ int i = 0; \ symbol[i] != '\0' \ && off < TREE_SITTER_SERIALIZATION_BUFFER_SIZE; \ i++ \ ) { \ switch (symbol[i]) { \ case '\t': buf[off++] = '\\'; buf[off++] = 't'; break; \ case '\n': buf[off++] = '\\'; buf[off++] = 'n'; break; \ case '\v': buf[off++] = '\\'; buf[off++] = 'v'; break; \ case '\f': buf[off++] = '\\'; buf[off++] = 'f'; break; \ case '\r': buf[off++] = '\\'; buf[off++] = 'r'; break; \ case '\\': buf[off++] = '\\'; buf[off++] = '\\'; break; \ default: buf[off++] = symbol[i]; break; \ } \ } \ snprintf( \ buf + off, \ TREE_SITTER_SERIALIZATION_BUFFER_SIZE - off, \ ", size:%u", \ size \ ); \ ts_parser__log(self); \ } #define LOG_STACK() \ if (self->dot_graph_file) { \ ts_stack_print_dot_graph(self->stack, self->language, self->dot_graph_file); \ fputs("\n\n", self->dot_graph_file); \ } #define LOG_TREE(tree) \ if (self->dot_graph_file) { \ ts_subtree_print_dot_graph(tree, self->language, self->dot_graph_file); \ fputs("\n", self->dot_graph_file); \ } #define SYM_NAME(symbol) ts_language_symbol_name(self->language, symbol) #define TREE_NAME(tree) SYM_NAME(ts_subtree_symbol(tree)) static const unsigned MAX_VERSION_COUNT = 6; static const unsigned MAX_VERSION_COUNT_OVERFLOW = 4; static const unsigned MAX_SUMMARY_DEPTH = 16; static const unsigned MAX_COST_DIFFERENCE = 18 * ERROR_COST_PER_SKIPPED_TREE; static const unsigned OP_COUNT_PER_PARSER_TIMEOUT_CHECK = 100; typedef struct { Subtree token; Subtree last_external_token; uint32_t byte_index; } TokenCache; struct TSParser { Lexer lexer; Stack *stack; SubtreePool tree_pool; const TSLanguage *language; TSWasmStore *wasm_store; ReduceActionSet reduce_actions; Subtree finished_tree; SubtreeArray trailing_extras; SubtreeArray trailing_extras2; SubtreeArray scratch_trees; TokenCache token_cache; ReusableNode reusable_node; void *external_scanner_payload; FILE *dot_graph_file; TSClock end_clock; TSDuration timeout_duration; unsigned accept_count; unsigned operation_count; const volatile size_t *cancellation_flag; Subtree old_tree; TSRangeArray included_range_differences; TSParseOptions parse_options; TSParseState parse_state; unsigned included_range_difference_index; bool has_scanner_error; bool canceled_balancing; bool has_error; }; typedef struct { unsigned cost; unsigned node_count; int dynamic_precedence; bool is_in_error; } ErrorStatus; typedef enum { ErrorComparisonTakeLeft, ErrorComparisonPreferLeft, ErrorComparisonNone, ErrorComparisonPreferRight, ErrorComparisonTakeRight, } ErrorComparison; typedef struct { const char *string; uint32_t length; } TSStringInput; // StringInput static const char *ts_string_input_read( void *_self, uint32_t byte, TSPoint point, uint32_t *length ) { (void)point; TSStringInput *self = (TSStringInput *)_self; if (byte >= self->length) { *length = 0; return ""; } else { *length = self->length - byte; return self->string + byte; } } // Parser - Private static void ts_parser__log(TSParser *self) { if (self->lexer.logger.log) { self->lexer.logger.log( self->lexer.logger.payload, TSLogTypeParse, self->lexer.debug_buffer ); } if (self->dot_graph_file) { fprintf(self->dot_graph_file, "graph {\nlabel=\""); for (char *chr = &self->lexer.debug_buffer[0]; *chr != 0; chr++) { if (*chr == '"' || *chr == '\\') fputc('\\', self->dot_graph_file); fputc(*chr, self->dot_graph_file); } fprintf(self->dot_graph_file, "\"\n}\n\n"); } } static bool ts_parser__breakdown_top_of_stack( TSParser *self, StackVersion version ) { bool did_break_down = false; bool pending = false; do { StackSliceArray pop = ts_stack_pop_pending(self->stack, version); if (!pop.size) break; did_break_down = true; pending = false; for (uint32_t i = 0; i < pop.size; i++) { StackSlice slice = *array_get(&pop, i); TSStateId state = ts_stack_state(self->stack, slice.version); Subtree parent = *array_front(&slice.subtrees); for (uint32_t j = 0, n = ts_subtree_child_count(parent); j < n; j++) { Subtree child = ts_subtree_children(parent)[j]; pending = ts_subtree_child_count(child) > 0; if (ts_subtree_is_error(child)) { state = ERROR_STATE; } else if (!ts_subtree_extra(child)) { state = ts_language_next_state(self->language, state, ts_subtree_symbol(child)); } ts_subtree_retain(child); ts_stack_push(self->stack, slice.version, child, pending, state); } for (uint32_t j = 1; j < slice.subtrees.size; j++) { Subtree tree = *array_get(&slice.subtrees, j); ts_stack_push(self->stack, slice.version, tree, false, state); } ts_subtree_release(&self->tree_pool, parent); array_delete(&slice.subtrees); LOG("breakdown_top_of_stack tree:%s", TREE_NAME(parent)); LOG_STACK(); } } while (pending); return did_break_down; } static void ts_parser__breakdown_lookahead( TSParser *self, Subtree *lookahead, TSStateId state, ReusableNode *reusable_node ) { bool did_descend = false; Subtree tree = reusable_node_tree(reusable_node); while (ts_subtree_child_count(tree) > 0 && ts_subtree_parse_state(tree) != state) { LOG("state_mismatch sym:%s", TREE_NAME(tree)); reusable_node_descend(reusable_node); tree = reusable_node_tree(reusable_node); did_descend = true; } if (did_descend) { ts_subtree_release(&self->tree_pool, *lookahead); *lookahead = tree; ts_subtree_retain(*lookahead); } } static ErrorComparison ts_parser__compare_versions( TSParser *self, ErrorStatus a, ErrorStatus b ) { (void)self; if (!a.is_in_error && b.is_in_error) { if (a.cost < b.cost) { return ErrorComparisonTakeLeft; } else { return ErrorComparisonPreferLeft; } } if (a.is_in_error && !b.is_in_error) { if (b.cost < a.cost) { return ErrorComparisonTakeRight; } else { return ErrorComparisonPreferRight; } } if (a.cost < b.cost) { if ((b.cost - a.cost) * (1 + a.node_count) > MAX_COST_DIFFERENCE) { return ErrorComparisonTakeLeft; } else { return ErrorComparisonPreferLeft; } } if (b.cost < a.cost) { if ((a.cost - b.cost) * (1 + b.node_count) > MAX_COST_DIFFERENCE) { return ErrorComparisonTakeRight; } else { return ErrorComparisonPreferRight; } } if (a.dynamic_precedence > b.dynamic_precedence) return ErrorComparisonPreferLeft; if (b.dynamic_precedence > a.dynamic_precedence) return ErrorComparisonPreferRight; return ErrorComparisonNone; } static ErrorStatus ts_parser__version_status( TSParser *self, StackVersion version ) { unsigned cost = ts_stack_error_cost(self->stack, version); bool is_paused = ts_stack_is_paused(self->stack, version); if (is_paused) cost += ERROR_COST_PER_SKIPPED_TREE; return (ErrorStatus) { .cost = cost, .node_count = ts_stack_node_count_since_error(self->stack, version), .dynamic_precedence = ts_stack_dynamic_precedence(self->stack, version), .is_in_error = is_paused || ts_stack_state(self->stack, version) == ERROR_STATE }; } static bool ts_parser__better_version_exists( TSParser *self, StackVersion version, bool is_in_error, unsigned cost ) { if (self->finished_tree.ptr && ts_subtree_error_cost(self->finished_tree) <= cost) { return true; } Length position = ts_stack_position(self->stack, version); ErrorStatus status = { .cost = cost, .is_in_error = is_in_error, .dynamic_precedence = ts_stack_dynamic_precedence(self->stack, version), .node_count = ts_stack_node_count_since_error(self->stack, version), }; for (StackVersion i = 0, n = ts_stack_version_count(self->stack); i < n; i++) { if (i == version || !ts_stack_is_active(self->stack, i) || ts_stack_position(self->stack, i).bytes < position.bytes) continue; ErrorStatus status_i = ts_parser__version_status(self, i); switch (ts_parser__compare_versions(self, status, status_i)) { case ErrorComparisonTakeRight: return true; case ErrorComparisonPreferRight: if (ts_stack_can_merge(self->stack, i, version)) return true; break; default: break; } } return false; } static bool ts_parser__call_main_lex_fn(TSParser *self, TSLexerMode lex_mode) { if (ts_language_is_wasm(self->language)) { return ts_wasm_store_call_lex_main(self->wasm_store, lex_mode.lex_state); } else { return self->language->lex_fn(&self->lexer.data, lex_mode.lex_state); } } static bool ts_parser__call_keyword_lex_fn(TSParser *self) { if (ts_language_is_wasm(self->language)) { return ts_wasm_store_call_lex_keyword(self->wasm_store, 0); } else { return self->language->keyword_lex_fn(&self->lexer.data, 0); } } static void ts_parser__external_scanner_create( TSParser *self ) { if (self->language && self->language->external_scanner.states) { if (ts_language_is_wasm(self->language)) { self->external_scanner_payload = (void *)(uintptr_t)ts_wasm_store_call_scanner_create( self->wasm_store ); if (ts_wasm_store_has_error(self->wasm_store)) { self->has_scanner_error = true; } } else if (self->language->external_scanner.create) { self->external_scanner_payload = self->language->external_scanner.create(); } } } static void ts_parser__external_scanner_destroy( TSParser *self ) { if ( self->language && self->external_scanner_payload && self->language->external_scanner.destroy && !ts_language_is_wasm(self->language) ) { self->language->external_scanner.destroy( self->external_scanner_payload ); } self->external_scanner_payload = NULL; } static unsigned ts_parser__external_scanner_serialize( TSParser *self ) { if (ts_language_is_wasm(self->language)) { return ts_wasm_store_call_scanner_serialize( self->wasm_store, (uintptr_t)self->external_scanner_payload, self->lexer.debug_buffer ); } else { uint32_t length = self->language->external_scanner.serialize( self->external_scanner_payload, self->lexer.debug_buffer ); ts_assert(length <= TREE_SITTER_SERIALIZATION_BUFFER_SIZE); return length; } } static void ts_parser__external_scanner_deserialize( TSParser *self, Subtree external_token ) { const char *data = NULL; uint32_t length = 0; if (external_token.ptr) { data = ts_external_scanner_state_data(&external_token.ptr->external_scanner_state); length = external_token.ptr->external_scanner_state.length; } if (ts_language_is_wasm(self->language)) { ts_wasm_store_call_scanner_deserialize( self->wasm_store, (uintptr_t)self->external_scanner_payload, data, length ); if (ts_wasm_store_has_error(self->wasm_store)) { self->has_scanner_error = true; } } else { self->language->external_scanner.deserialize( self->external_scanner_payload, data, length ); } } static bool ts_parser__external_scanner_scan( TSParser *self, TSStateId external_lex_state ) { if (ts_language_is_wasm(self->language)) { bool result = ts_wasm_store_call_scanner_scan( self->wasm_store, (uintptr_t)self->external_scanner_payload, external_lex_state * self->language->external_token_count ); if (ts_wasm_store_has_error(self->wasm_store)) { self->has_scanner_error = true; } return result; } else { const bool *valid_external_tokens = ts_language_enabled_external_tokens( self->language, external_lex_state ); return self->language->external_scanner.scan( self->external_scanner_payload, &self->lexer.data, valid_external_tokens ); } } static bool ts_parser__can_reuse_first_leaf( TSParser *self, TSStateId state, Subtree tree, TableEntry *table_entry ) { TSSymbol leaf_symbol = ts_subtree_leaf_symbol(tree); TSStateId leaf_state = ts_subtree_leaf_parse_state(tree); TSLexerMode current_lex_mode = ts_language_lex_mode_for_state(self->language, state); TSLexerMode leaf_lex_mode = ts_language_lex_mode_for_state(self->language, leaf_state); // At the end of a non-terminal extra node, the lexer normally returns // NULL, which indicates that the parser should look for a reduce action // at symbol `0`. Avoid reusing tokens in this situation to ensure that // the same thing happens when incrementally reparsing. if (current_lex_mode.lex_state == (uint16_t)(-1)) return false; // If the token was created in a state with the same set of lookaheads, it is reusable. if ( table_entry->action_count > 0 && memcmp(&leaf_lex_mode, ¤t_lex_mode, sizeof(TSLexerMode)) == 0 && ( leaf_symbol != self->language->keyword_capture_token || (!ts_subtree_is_keyword(tree) && ts_subtree_parse_state(tree) == state) ) ) return true; // Empty tokens are not reusable in states with different lookaheads. if (ts_subtree_size(tree).bytes == 0 && leaf_symbol != ts_builtin_sym_end) return false; // If the current state allows external tokens or other tokens that conflict with this // token, this token is not reusable. return current_lex_mode.external_lex_state == 0 && table_entry->is_reusable; } static Subtree ts_parser__lex( TSParser *self, StackVersion version, TSStateId parse_state ) { TSLexerMode lex_mode = ts_language_lex_mode_for_state(self->language, parse_state); if (lex_mode.lex_state == (uint16_t)-1) { LOG("no_lookahead_after_non_terminal_extra"); return NULL_SUBTREE; } const Length start_position = ts_stack_position(self->stack, version); const Subtree external_token = ts_stack_last_external_token(self->stack, version); bool found_external_token = false; bool error_mode = parse_state == ERROR_STATE; bool skipped_error = false; bool called_get_column = false; int32_t first_error_character = 0; Length error_start_position = length_zero(); Length error_end_position = length_zero(); uint32_t lookahead_end_byte = 0; uint32_t external_scanner_state_len = 0; bool external_scanner_state_changed = false; ts_lexer_reset(&self->lexer, start_position); for (;;) { bool found_token = false; Length current_position = self->lexer.current_position; ColumnData column_data = self->lexer.column_data; if (lex_mode.external_lex_state != 0) { LOG( "lex_external state:%d, row:%u, column:%u", lex_mode.external_lex_state, current_position.extent.row, current_position.extent.column ); ts_lexer_start(&self->lexer); ts_parser__external_scanner_deserialize(self, external_token); found_token = ts_parser__external_scanner_scan(self, lex_mode.external_lex_state); if (self->has_scanner_error) return NULL_SUBTREE; ts_lexer_finish(&self->lexer, &lookahead_end_byte); if (found_token) { external_scanner_state_len = ts_parser__external_scanner_serialize(self); external_scanner_state_changed = !ts_external_scanner_state_eq( ts_subtree_external_scanner_state(external_token), self->lexer.debug_buffer, external_scanner_state_len ); // Avoid infinite loops caused by the external scanner returning empty tokens. // Empty tokens are needed in some circumstances, e.g. indent/dedent tokens // in Python. Ignore the following classes of empty tokens: // // * Tokens produced during error recovery. When recovering from an error, // all tokens are allowed, so it's easy to accidentally return unwanted // empty tokens. // * Tokens that are marked as 'extra' in the grammar. These don't change // the parse state, so they would definitely cause an infinite loop. if ( self->lexer.token_end_position.bytes <= current_position.bytes && !external_scanner_state_changed ) { TSSymbol symbol = self->language->external_scanner.symbol_map[self->lexer.data.result_symbol]; TSStateId next_parse_state = ts_language_next_state(self->language, parse_state, symbol); bool token_is_extra = (next_parse_state == parse_state); if (error_mode || !ts_stack_has_advanced_since_error(self->stack, version) || token_is_extra) { LOG( "ignore_empty_external_token symbol:%s", SYM_NAME(self->language->external_scanner.symbol_map[self->lexer.data.result_symbol]) ); found_token = false; } } } if (found_token) { found_external_token = true; called_get_column = self->lexer.did_get_column; break; } ts_lexer_reset(&self->lexer, current_position); self->lexer.column_data = column_data; } LOG( "lex_internal state:%d, row:%u, column:%u", lex_mode.lex_state, current_position.extent.row, current_position.extent.column ); ts_lexer_start(&self->lexer); found_token = ts_parser__call_main_lex_fn(self, lex_mode); ts_lexer_finish(&self->lexer, &lookahead_end_byte); if (found_token) break; if (!error_mode) { error_mode = true; lex_mode = ts_language_lex_mode_for_state(self->language, ERROR_STATE); ts_lexer_reset(&self->lexer, start_position); continue; } if (!skipped_error) { LOG("skip_unrecognized_character"); skipped_error = true; error_start_position = self->lexer.token_start_position; error_end_position = self->lexer.token_start_position; first_error_character = self->lexer.data.lookahead; } if (self->lexer.current_position.bytes == error_end_position.bytes) { if (self->lexer.data.eof(&self->lexer.data)) { self->lexer.data.result_symbol = ts_builtin_sym_error; break; } self->lexer.data.advance(&self->lexer.data, false); } error_end_position = self->lexer.current_position; } Subtree result; if (skipped_error) { Length padding = length_sub(error_start_position, start_position); Length size = length_sub(error_end_position, error_start_position); uint32_t lookahead_bytes = lookahead_end_byte - error_end_position.bytes; result = ts_subtree_new_error( &self->tree_pool, first_error_character, padding, size, lookahead_bytes, parse_state, self->language ); } else { bool is_keyword = false; TSSymbol symbol = self->lexer.data.result_symbol; Length padding = length_sub(self->lexer.token_start_position, start_position); Length size = length_sub(self->lexer.token_end_position, self->lexer.token_start_position); uint32_t lookahead_bytes = lookahead_end_byte - self->lexer.token_end_position.bytes; if (found_external_token) { symbol = self->language->external_scanner.symbol_map[symbol]; } else if (symbol == self->language->keyword_capture_token && symbol != 0) { uint32_t end_byte = self->lexer.token_end_position.bytes; ts_lexer_reset(&self->lexer, self->lexer.token_start_position); ts_lexer_start(&self->lexer); is_keyword = ts_parser__call_keyword_lex_fn(self); if ( is_keyword && self->lexer.token_end_position.bytes == end_byte && ( ts_language_has_actions(self->language, parse_state, self->lexer.data.result_symbol) || ts_language_is_reserved_word(self->language, parse_state, self->lexer.data.result_symbol) ) ) { symbol = self->lexer.data.result_symbol; } } result = ts_subtree_new_leaf( &self->tree_pool, symbol, padding, size, lookahead_bytes, parse_state, found_external_token, called_get_column, is_keyword, self->language ); if (found_external_token) { MutableSubtree mut_result = ts_subtree_to_mut_unsafe(result); ts_external_scanner_state_init( &mut_result.ptr->external_scanner_state, self->lexer.debug_buffer, external_scanner_state_len ); mut_result.ptr->has_external_scanner_state_change = external_scanner_state_changed; } } LOG_LOOKAHEAD( SYM_NAME(ts_subtree_symbol(result)), ts_subtree_total_size(result).bytes ); return result; } static Subtree ts_parser__get_cached_token( TSParser *self, TSStateId state, size_t position, Subtree last_external_token, TableEntry *table_entry ) { TokenCache *cache = &self->token_cache; if ( cache->token.ptr && cache->byte_index == position && ts_subtree_external_scanner_state_eq(cache->last_external_token, last_external_token) ) { ts_language_table_entry(self->language, state, ts_subtree_symbol(cache->token), table_entry); if (ts_parser__can_reuse_first_leaf(self, state, cache->token, table_entry)) { ts_subtree_retain(cache->token); return cache->token; } } return NULL_SUBTREE; } static void ts_parser__set_cached_token( TSParser *self, uint32_t byte_index, Subtree last_external_token, Subtree token ) { TokenCache *cache = &self->token_cache; if (token.ptr) ts_subtree_retain(token); if (last_external_token.ptr) ts_subtree_retain(last_external_token); if (cache->token.ptr) ts_subtree_release(&self->tree_pool, cache->token); if (cache->last_external_token.ptr) ts_subtree_release(&self->tree_pool, cache->last_external_token); cache->token = token; cache->byte_index = byte_index; cache->last_external_token = last_external_token; } static bool ts_parser__has_included_range_difference( const TSParser *self, uint32_t start_position, uint32_t end_position ) { return ts_range_array_intersects( &self->included_range_differences, self->included_range_difference_index, start_position, end_position ); } static Subtree ts_parser__reuse_node( TSParser *self, StackVersion version, TSStateId *state, uint32_t position, Subtree last_external_token, TableEntry *table_entry ) { Subtree result; while ((result = reusable_node_tree(&self->reusable_node)).ptr) { uint32_t byte_offset = reusable_node_byte_offset(&self->reusable_node); uint32_t end_byte_offset = byte_offset + ts_subtree_total_bytes(result); // Do not reuse an EOF node if the included ranges array has changes // later on in the file. if (ts_subtree_is_eof(result)) end_byte_offset = UINT32_MAX; if (byte_offset > position) { LOG("before_reusable_node symbol:%s", TREE_NAME(result)); break; } if (byte_offset < position) { LOG("past_reusable_node symbol:%s", TREE_NAME(result)); if (end_byte_offset <= position || !reusable_node_descend(&self->reusable_node)) { reusable_node_advance(&self->reusable_node); } continue; } if (!ts_subtree_external_scanner_state_eq(self->reusable_node.last_external_token, last_external_token)) { LOG("reusable_node_has_different_external_scanner_state symbol:%s", TREE_NAME(result)); reusable_node_advance(&self->reusable_node); continue; } const char *reason = NULL; if (ts_subtree_has_changes(result)) { reason = "has_changes"; } else if (ts_subtree_is_error(result)) { reason = "is_error"; } else if (ts_subtree_missing(result)) { reason = "is_missing"; } else if (ts_subtree_is_fragile(result)) { reason = "is_fragile"; } else if (ts_parser__has_included_range_difference(self, byte_offset, end_byte_offset)) { reason = "contains_different_included_range"; } if (reason) { LOG("cant_reuse_node_%s tree:%s", reason, TREE_NAME(result)); if (!reusable_node_descend(&self->reusable_node)) { reusable_node_advance(&self->reusable_node); ts_parser__breakdown_top_of_stack(self, version); *state = ts_stack_state(self->stack, version); } continue; } TSSymbol leaf_symbol = ts_subtree_leaf_symbol(result); ts_language_table_entry(self->language, *state, leaf_symbol, table_entry); if (!ts_parser__can_reuse_first_leaf(self, *state, result, table_entry)) { LOG( "cant_reuse_node symbol:%s, first_leaf_symbol:%s", TREE_NAME(result), SYM_NAME(leaf_symbol) ); reusable_node_advance_past_leaf(&self->reusable_node); break; } LOG("reuse_node symbol:%s", TREE_NAME(result)); ts_subtree_retain(result); return result; } return NULL_SUBTREE; } // Determine if a given tree should be replaced by an alternative tree. // // The decision is based on the trees' error costs (if any), their dynamic precedence, // and finally, as a default, by a recursive comparison of the trees' symbols. static bool ts_parser__select_tree(TSParser *self, Subtree left, Subtree right) { if (!left.ptr) return true; if (!right.ptr) return false; if (ts_subtree_error_cost(right) < ts_subtree_error_cost(left)) { LOG("select_smaller_error symbol:%s, over_symbol:%s", TREE_NAME(right), TREE_NAME(left)); return true; } if (ts_subtree_error_cost(left) < ts_subtree_error_cost(right)) { LOG("select_smaller_error symbol:%s, over_symbol:%s", TREE_NAME(left), TREE_NAME(right)); return false; } if (ts_subtree_dynamic_precedence(right) > ts_subtree_dynamic_precedence(left)) { LOG("select_higher_precedence symbol:%s, prec:%" PRId32 ", over_symbol:%s, other_prec:%" PRId32, TREE_NAME(right), ts_subtree_dynamic_precedence(right), TREE_NAME(left), ts_subtree_dynamic_precedence(left)); return true; } if (ts_subtree_dynamic_precedence(left) > ts_subtree_dynamic_precedence(right)) { LOG("select_higher_precedence symbol:%s, prec:%" PRId32 ", over_symbol:%s, other_prec:%" PRId32, TREE_NAME(left), ts_subtree_dynamic_precedence(left), TREE_NAME(right), ts_subtree_dynamic_precedence(right)); return false; } if (ts_subtree_error_cost(left) > 0) return true; int comparison = ts_subtree_compare(left, right, &self->tree_pool); switch (comparison) { case -1: LOG("select_earlier symbol:%s, over_symbol:%s", TREE_NAME(left), TREE_NAME(right)); return false; break; case 1: LOG("select_earlier symbol:%s, over_symbol:%s", TREE_NAME(right), TREE_NAME(left)); return true; default: LOG("select_existing symbol:%s, over_symbol:%s", TREE_NAME(left), TREE_NAME(right)); return false; } } // Determine if a given tree's children should be replaced by an alternative // array of children. static bool ts_parser__select_children( TSParser *self, Subtree left, const SubtreeArray *children ) { array_assign(&self->scratch_trees, children); // Create a temporary subtree using the scratch trees array. This node does // not perform any allocation except for possibly growing the array to make // room for its own heap data. The scratch tree is never explicitly released, // so the same 'scratch trees' array can be reused again later. MutableSubtree scratch_tree = ts_subtree_new_node( ts_subtree_symbol(left), &self->scratch_trees, 0, self->language ); return ts_parser__select_tree( self, left, ts_subtree_from_mut(scratch_tree) ); } static void ts_parser__shift( TSParser *self, StackVersion version, TSStateId state, Subtree lookahead, bool extra ) { bool is_leaf = ts_subtree_child_count(lookahead) == 0; Subtree subtree_to_push = lookahead; if (extra != ts_subtree_extra(lookahead) && is_leaf) { MutableSubtree result = ts_subtree_make_mut(&self->tree_pool, lookahead); ts_subtree_set_extra(&result, extra); subtree_to_push = ts_subtree_from_mut(result); } ts_stack_push(self->stack, version, subtree_to_push, !is_leaf, state); if (ts_subtree_has_external_tokens(subtree_to_push)) { ts_stack_set_last_external_token( self->stack, version, ts_subtree_last_external_token(subtree_to_push) ); } } static StackVersion ts_parser__reduce( TSParser *self, StackVersion version, TSSymbol symbol, uint32_t count, int dynamic_precedence, uint16_t production_id, bool is_fragile, bool end_of_non_terminal_extra ) { uint32_t initial_version_count = ts_stack_version_count(self->stack); // Pop the given number of nodes from the given version of the parse stack. // If stack versions have previously merged, then there may be more than one // path back through the stack. For each path, create a new parent node to // contain the popped children, and push it onto the stack in place of the // children. StackSliceArray pop = ts_stack_pop_count(self->stack, version, count); uint32_t removed_version_count = 0; uint32_t halted_version_count = ts_stack_halted_version_count(self->stack); for (uint32_t i = 0; i < pop.size; i++) { StackSlice slice = *array_get(&pop, i); StackVersion slice_version = slice.version - removed_version_count; // This is where new versions are added to the parse stack. The versions // will all be sorted and truncated at the end of the outer parsing loop. // Allow the maximum version count to be temporarily exceeded, but only // by a limited threshold. if (slice_version > MAX_VERSION_COUNT + MAX_VERSION_COUNT_OVERFLOW + halted_version_count) { ts_stack_remove_version(self->stack, slice_version); ts_subtree_array_delete(&self->tree_pool, &slice.subtrees); removed_version_count++; while (i + 1 < pop.size) { LOG("aborting reduce with too many versions") StackSlice next_slice = *array_get(&pop, i + 1); if (next_slice.version != slice.version) break; ts_subtree_array_delete(&self->tree_pool, &next_slice.subtrees); i++; } continue; } // Extra tokens on top of the stack should not be included in this new parent // node. They will be re-pushed onto the stack after the parent node is // created and pushed. SubtreeArray children = slice.subtrees; ts_subtree_array_remove_trailing_extras(&children, &self->trailing_extras); MutableSubtree parent = ts_subtree_new_node( symbol, &children, production_id, self->language ); // This pop operation may have caused multiple stack versions to collapse // into one, because they all diverged from a common state. In that case, // choose one of the arrays of trees to be the parent node's children, and // delete the rest of the tree arrays. while (i + 1 < pop.size) { StackSlice next_slice = *array_get(&pop, i + 1); if (next_slice.version != slice.version) break; i++; SubtreeArray next_slice_children = next_slice.subtrees; ts_subtree_array_remove_trailing_extras(&next_slice_children, &self->trailing_extras2); if (ts_parser__select_children( self, ts_subtree_from_mut(parent), &next_slice_children )) { ts_subtree_array_clear(&self->tree_pool, &self->trailing_extras); ts_subtree_release(&self->tree_pool, ts_subtree_from_mut(parent)); array_swap(&self->trailing_extras, &self->trailing_extras2); parent = ts_subtree_new_node( symbol, &next_slice_children, production_id, self->language ); } else { array_clear(&self->trailing_extras2); ts_subtree_array_delete(&self->tree_pool, &next_slice.subtrees); } } TSStateId state = ts_stack_state(self->stack, slice_version); TSStateId next_state = ts_language_next_state(self->language, state, symbol); if (end_of_non_terminal_extra && next_state == state) { parent.ptr->extra = true; } if (is_fragile || pop.size > 1 || initial_version_count > 1) { parent.ptr->fragile_left = true; parent.ptr->fragile_right = true; parent.ptr->parse_state = TS_TREE_STATE_NONE; } else { parent.ptr->parse_state = state; } parent.ptr->dynamic_precedence += dynamic_precedence; // Push the parent node onto the stack, along with any extra tokens that // were previously on top of the stack. ts_stack_push(self->stack, slice_version, ts_subtree_from_mut(parent), false, next_state); for (uint32_t j = 0; j < self->trailing_extras.size; j++) { ts_stack_push(self->stack, slice_version, *array_get(&self->trailing_extras, j), false, next_state); } for (StackVersion j = 0; j < slice_version; j++) { if (j == version) continue; if (ts_stack_merge(self->stack, j, slice_version)) { removed_version_count++; break; } } } // Return the first new stack version that was created. return ts_stack_version_count(self->stack) > initial_version_count ? initial_version_count : STACK_VERSION_NONE; } static void ts_parser__accept( TSParser *self, StackVersion version, Subtree lookahead ) { ts_assert(ts_subtree_is_eof(lookahead)); ts_stack_push(self->stack, version, lookahead, false, 1); StackSliceArray pop = ts_stack_pop_all(self->stack, version); for (uint32_t i = 0; i < pop.size; i++) { SubtreeArray trees = array_get(&pop, i)->subtrees; Subtree root = NULL_SUBTREE; for (uint32_t j = trees.size - 1; j + 1 > 0; j--) { Subtree tree = *array_get(&trees, j); if (!ts_subtree_extra(tree)) { ts_assert(!tree.data.is_inline); uint32_t child_count = ts_subtree_child_count(tree); const Subtree *children = ts_subtree_children(tree); for (uint32_t k = 0; k < child_count; k++) { ts_subtree_retain(children[k]); } array_splice(&trees, j, 1, child_count, children); root = ts_subtree_from_mut(ts_subtree_new_node( ts_subtree_symbol(tree), &trees, tree.ptr->production_id, self->language )); ts_subtree_release(&self->tree_pool, tree); break; } } ts_assert(root.ptr); self->accept_count++; if (self->finished_tree.ptr) { if (ts_parser__select_tree(self, self->finished_tree, root)) { ts_subtree_release(&self->tree_pool, self->finished_tree); self->finished_tree = root; } else { ts_subtree_release(&self->tree_pool, root); } } else { self->finished_tree = root; } } ts_stack_remove_version(self->stack, array_get(&pop, 0)->version); ts_stack_halt(self->stack, version); } static bool ts_parser__do_all_potential_reductions( TSParser *self, StackVersion starting_version, TSSymbol lookahead_symbol ) { uint32_t initial_version_count = ts_stack_version_count(self->stack); bool can_shift_lookahead_symbol = false; StackVersion version = starting_version; for (unsigned i = 0; true; i++) { uint32_t version_count = ts_stack_version_count(self->stack); if (version >= version_count) break; bool merged = false; for (StackVersion j = initial_version_count; j < version; j++) { if (ts_stack_merge(self->stack, j, version)) { merged = true; break; } } if (merged) continue; TSStateId state = ts_stack_state(self->stack, version); bool has_shift_action = false; array_clear(&self->reduce_actions); TSSymbol first_symbol, end_symbol; if (lookahead_symbol != 0) { first_symbol = lookahead_symbol; end_symbol = lookahead_symbol + 1; } else { first_symbol = 1; end_symbol = self->language->token_count; } for (TSSymbol symbol = first_symbol; symbol < end_symbol; symbol++) { TableEntry entry; ts_language_table_entry(self->language, state, symbol, &entry); for (uint32_t j = 0; j < entry.action_count; j++) { TSParseAction action = entry.actions[j]; switch (action.type) { case TSParseActionTypeShift: case TSParseActionTypeRecover: if (!action.shift.extra && !action.shift.repetition) has_shift_action = true; break; case TSParseActionTypeReduce: if (action.reduce.child_count > 0) ts_reduce_action_set_add(&self->reduce_actions, (ReduceAction) { .symbol = action.reduce.symbol, .count = action.reduce.child_count, .dynamic_precedence = action.reduce.dynamic_precedence, .production_id = action.reduce.production_id, }); break; default: break; } } } StackVersion reduction_version = STACK_VERSION_NONE; for (uint32_t j = 0; j < self->reduce_actions.size; j++) { ReduceAction action = *array_get(&self->reduce_actions, j); reduction_version = ts_parser__reduce( self, version, action.symbol, action.count, action.dynamic_precedence, action.production_id, true, false ); } if (has_shift_action) { can_shift_lookahead_symbol = true; } else if (reduction_version != STACK_VERSION_NONE && i < MAX_VERSION_COUNT) { ts_stack_renumber_version(self->stack, reduction_version, version); continue; } else if (lookahead_symbol != 0) { ts_stack_remove_version(self->stack, version); } if (version == starting_version) { version = version_count; } else { version++; } } return can_shift_lookahead_symbol; } static bool ts_parser__recover_to_state( TSParser *self, StackVersion version, unsigned depth, TSStateId goal_state ) { StackSliceArray pop = ts_stack_pop_count(self->stack, version, depth); StackVersion previous_version = STACK_VERSION_NONE; for (unsigned i = 0; i < pop.size; i++) { StackSlice slice = *array_get(&pop, i); if (slice.version == previous_version) { ts_subtree_array_delete(&self->tree_pool, &slice.subtrees); array_erase(&pop, i--); continue; } if (ts_stack_state(self->stack, slice.version) != goal_state) { ts_stack_halt(self->stack, slice.version); ts_subtree_array_delete(&self->tree_pool, &slice.subtrees); array_erase(&pop, i--); continue; } SubtreeArray error_trees = ts_stack_pop_error(self->stack, slice.version); if (error_trees.size > 0) { ts_assert(error_trees.size == 1); Subtree error_tree = *array_get(&error_trees, 0); uint32_t error_child_count = ts_subtree_child_count(error_tree); if (error_child_count > 0) { array_splice(&slice.subtrees, 0, 0, error_child_count, ts_subtree_children(error_tree)); for (unsigned j = 0; j < error_child_count; j++) { ts_subtree_retain(*array_get(&slice.subtrees, j)); } } ts_subtree_array_delete(&self->tree_pool, &error_trees); } ts_subtree_array_remove_trailing_extras(&slice.subtrees, &self->trailing_extras); if (slice.subtrees.size > 0) { Subtree error = ts_subtree_new_error_node(&slice.subtrees, true, self->language); ts_stack_push(self->stack, slice.version, error, false, goal_state); } else { array_delete(&slice.subtrees); } for (unsigned j = 0; j < self->trailing_extras.size; j++) { Subtree tree = *array_get(&self->trailing_extras, j); ts_stack_push(self->stack, slice.version, tree, false, goal_state); } previous_version = slice.version; } return previous_version != STACK_VERSION_NONE; } static void ts_parser__recover( TSParser *self, StackVersion version, Subtree lookahead ) { bool did_recover = false; unsigned previous_version_count = ts_stack_version_count(self->stack); Length position = ts_stack_position(self->stack, version); StackSummary *summary = ts_stack_get_summary(self->stack, version); unsigned node_count_since_error = ts_stack_node_count_since_error(self->stack, version); unsigned current_error_cost = ts_stack_error_cost(self->stack, version); // When the parser is in the error state, there are two strategies for recovering with a // given lookahead token: // 1. Find a previous state on the stack in which that lookahead token would be valid. Then, // create a new stack version that is in that state again. This entails popping all of the // subtrees that have been pushed onto the stack since that previous state, and wrapping // them in an ERROR node. // 2. Wrap the lookahead token in an ERROR node, push that ERROR node onto the stack, and // move on to the next lookahead token, remaining in the error state. // // First, try the strategy 1. Upon entering the error state, the parser recorded a summary // of the previous parse states and their depths. Look at each state in the summary, to see // if the current lookahead token would be valid in that state. if (summary && !ts_subtree_is_error(lookahead)) { for (unsigned i = 0; i < summary->size; i++) { StackSummaryEntry entry = *array_get(summary, i); if (entry.state == ERROR_STATE) continue; if (entry.position.bytes == position.bytes) continue; unsigned depth = entry.depth; if (node_count_since_error > 0) depth++; // Do not recover in ways that create redundant stack versions. bool would_merge = false; for (unsigned j = 0; j < previous_version_count; j++) { if ( ts_stack_state(self->stack, j) == entry.state && ts_stack_position(self->stack, j).bytes == position.bytes ) { would_merge = true; break; } } if (would_merge) continue; // Do not recover if the result would clearly be worse than some existing stack version. unsigned new_cost = current_error_cost + entry.depth * ERROR_COST_PER_SKIPPED_TREE + (position.bytes - entry.position.bytes) * ERROR_COST_PER_SKIPPED_CHAR + (position.extent.row - entry.position.extent.row) * ERROR_COST_PER_SKIPPED_LINE; if (ts_parser__better_version_exists(self, version, false, new_cost)) break; // If the current lookahead token is valid in some previous state, recover to that state. // Then stop looking for further recoveries. if (ts_language_has_actions(self->language, entry.state, ts_subtree_symbol(lookahead))) { if (ts_parser__recover_to_state(self, version, depth, entry.state)) { did_recover = true; LOG("recover_to_previous state:%u, depth:%u", entry.state, depth); LOG_STACK(); break; } } } } // In the process of attempting to recover, some stack versions may have been created // and subsequently halted. Remove those versions. for (unsigned i = previous_version_count; i < ts_stack_version_count(self->stack); i++) { if (!ts_stack_is_active(self->stack, i)) { LOG("removed paused version:%u", i); ts_stack_remove_version(self->stack, i--); LOG_STACK(); } } // If the parser is still in the error state at the end of the file, just wrap everything // in an ERROR node and terminate. if (ts_subtree_is_eof(lookahead)) { LOG("recover_eof"); SubtreeArray children = array_new(); Subtree parent = ts_subtree_new_error_node(&children, false, self->language); ts_stack_push(self->stack, version, parent, false, 1); ts_parser__accept(self, version, lookahead); return; } // If strategy 1 succeeded, a new stack version will have been created which is able to handle // the current lookahead token. Now, in addition, try strategy 2 described above: skip the // current lookahead token by wrapping it in an ERROR node. // Don't pursue this additional strategy if there are already too many stack versions. if (did_recover && ts_stack_version_count(self->stack) > MAX_VERSION_COUNT) { ts_stack_halt(self->stack, version); ts_subtree_release(&self->tree_pool, lookahead); return; } if ( did_recover && ts_subtree_has_external_scanner_state_change(lookahead) ) { ts_stack_halt(self->stack, version); ts_subtree_release(&self->tree_pool, lookahead); return; } // Do not recover if the result would clearly be worse than some existing stack version. unsigned new_cost = current_error_cost + ERROR_COST_PER_SKIPPED_TREE + ts_subtree_total_bytes(lookahead) * ERROR_COST_PER_SKIPPED_CHAR + ts_subtree_total_size(lookahead).extent.row * ERROR_COST_PER_SKIPPED_LINE; if (ts_parser__better_version_exists(self, version, false, new_cost)) { ts_stack_halt(self->stack, version); ts_subtree_release(&self->tree_pool, lookahead); return; } // If the current lookahead token is an extra token, mark it as extra. This means it won't // be counted in error cost calculations. unsigned n; const TSParseAction *actions = ts_language_actions(self->language, 1, ts_subtree_symbol(lookahead), &n); if (n > 0 && actions[n - 1].type == TSParseActionTypeShift && actions[n - 1].shift.extra) { MutableSubtree mutable_lookahead = ts_subtree_make_mut(&self->tree_pool, lookahead); ts_subtree_set_extra(&mutable_lookahead, true); lookahead = ts_subtree_from_mut(mutable_lookahead); } // Wrap the lookahead token in an ERROR. LOG("skip_token symbol:%s", TREE_NAME(lookahead)); SubtreeArray children = array_new(); array_reserve(&children, 1); array_push(&children, lookahead); MutableSubtree error_repeat = ts_subtree_new_node( ts_builtin_sym_error_repeat, &children, 0, self->language ); // If other tokens have already been skipped, so there is already an ERROR at the top of the // stack, then pop that ERROR off the stack and wrap the two ERRORs together into one larger // ERROR. if (node_count_since_error > 0) { StackSliceArray pop = ts_stack_pop_count(self->stack, version, 1); // TODO: Figure out how to make this condition occur. // See https://github.com/atom/atom/issues/18450#issuecomment-439579778 // If multiple stack versions have merged at this point, just pick one of the errors // arbitrarily and discard the rest. if (pop.size > 1) { for (unsigned i = 1; i < pop.size; i++) { ts_subtree_array_delete(&self->tree_pool, &array_get(&pop, i)->subtrees); } while (ts_stack_version_count(self->stack) > array_get(&pop, 0)->version + 1) { ts_stack_remove_version(self->stack, array_get(&pop, 0)->version + 1); } } ts_stack_renumber_version(self->stack, array_get(&pop, 0)->version, version); array_push(&array_get(&pop, 0)->subtrees, ts_subtree_from_mut(error_repeat)); error_repeat = ts_subtree_new_node( ts_builtin_sym_error_repeat, &array_get(&pop, 0)->subtrees, 0, self->language ); } // Push the new ERROR onto the stack. ts_stack_push(self->stack, version, ts_subtree_from_mut(error_repeat), false, ERROR_STATE); if (ts_subtree_has_external_tokens(lookahead)) { ts_stack_set_last_external_token( self->stack, version, ts_subtree_last_external_token(lookahead) ); } bool has_error = true; for (unsigned i = 0; i < ts_stack_version_count(self->stack); i++) { ErrorStatus status = ts_parser__version_status(self, i); if (!status.is_in_error) { has_error = false; break; } } self->has_error = has_error; } static void ts_parser__handle_error( TSParser *self, StackVersion version, Subtree lookahead ) { uint32_t previous_version_count = ts_stack_version_count(self->stack); // Perform any reductions that can happen in this state, regardless of the lookahead. After // skipping one or more invalid tokens, the parser might find a token that would have allowed // a reduction to take place. ts_parser__do_all_potential_reductions(self, version, 0); uint32_t version_count = ts_stack_version_count(self->stack); Length position = ts_stack_position(self->stack, version); // Push a discontinuity onto the stack. Merge all of the stack versions that // were created in the previous step. bool did_insert_missing_token = false; for (StackVersion v = version; v < version_count;) { if (!did_insert_missing_token) { TSStateId state = ts_stack_state(self->stack, v); for ( TSSymbol missing_symbol = 1; missing_symbol < (uint16_t)self->language->token_count; missing_symbol++ ) { TSStateId state_after_missing_symbol = ts_language_next_state( self->language, state, missing_symbol ); if (state_after_missing_symbol == 0 || state_after_missing_symbol == state) { continue; } if (ts_language_has_reduce_action( self->language, state_after_missing_symbol, ts_subtree_leaf_symbol(lookahead) )) { // In case the parser is currently outside of any included range, the lexer will // snap to the beginning of the next included range. The missing token's padding // must be assigned to position it within the next included range. ts_lexer_reset(&self->lexer, position); ts_lexer_mark_end(&self->lexer); Length padding = length_sub(self->lexer.token_end_position, position); uint32_t lookahead_bytes = ts_subtree_total_bytes(lookahead) + ts_subtree_lookahead_bytes(lookahead); StackVersion version_with_missing_tree = ts_stack_copy_version(self->stack, v); Subtree missing_tree = ts_subtree_new_missing_leaf( &self->tree_pool, missing_symbol, padding, lookahead_bytes, self->language ); ts_stack_push( self->stack, version_with_missing_tree, missing_tree, false, state_after_missing_symbol ); if (ts_parser__do_all_potential_reductions( self, version_with_missing_tree, ts_subtree_leaf_symbol(lookahead) )) { LOG( "recover_with_missing symbol:%s, state:%u", SYM_NAME(missing_symbol), ts_stack_state(self->stack, version_with_missing_tree) ); did_insert_missing_token = true; break; } } } } ts_stack_push(self->stack, v, NULL_SUBTREE, false, ERROR_STATE); v = (v == version) ? previous_version_count : v + 1; } for (unsigned i = previous_version_count; i < version_count; i++) { bool did_merge = ts_stack_merge(self->stack, version, previous_version_count); ts_assert(did_merge); } ts_stack_record_summary(self->stack, version, MAX_SUMMARY_DEPTH); // Begin recovery with the current lookahead node, rather than waiting for the // next turn of the parse loop. This ensures that the tree accounts for the // current lookahead token's "lookahead bytes" value, which describes how far // the lexer needed to look ahead beyond the content of the token in order to // recognize it. if (ts_subtree_child_count(lookahead) > 0) { ts_parser__breakdown_lookahead(self, &lookahead, ERROR_STATE, &self->reusable_node); } ts_parser__recover(self, version, lookahead); LOG_STACK(); } static bool ts_parser__check_progress(TSParser *self, Subtree *lookahead, const uint32_t *position, unsigned operations) { self->operation_count += operations; if (self->operation_count >= OP_COUNT_PER_PARSER_TIMEOUT_CHECK) { self->operation_count = 0; } if (position != NULL) { self->parse_state.current_byte_offset = *position; self->parse_state.has_error = self->has_error; } if ( self->operation_count == 0 && ( // TODO(amaanq): remove cancellation flag & clock checks before 0.26 (self->cancellation_flag && atomic_load(self->cancellation_flag)) || (!clock_is_null(self->end_clock) && clock_is_gt(clock_now(), self->end_clock)) || (self->parse_options.progress_callback && self->parse_options.progress_callback(&self->parse_state)) ) ) { if (lookahead && lookahead->ptr) { ts_subtree_release(&self->tree_pool, *lookahead); } return false; } return true; } static bool ts_parser__advance( TSParser *self, StackVersion version, bool allow_node_reuse ) { TSStateId state = ts_stack_state(self->stack, version); uint32_t position = ts_stack_position(self->stack, version).bytes; Subtree last_external_token = ts_stack_last_external_token(self->stack, version); bool did_reuse = true; Subtree lookahead = NULL_SUBTREE; TableEntry table_entry = {.action_count = 0}; // If possible, reuse a node from the previous syntax tree. if (allow_node_reuse) { lookahead = ts_parser__reuse_node( self, version, &state, position, last_external_token, &table_entry ); } // If no node from the previous syntax tree could be reused, then try to // reuse the token previously returned by the lexer. if (!lookahead.ptr) { did_reuse = false; lookahead = ts_parser__get_cached_token( self, state, position, last_external_token, &table_entry ); } bool needs_lex = !lookahead.ptr; for (;;) { // Otherwise, re-run the lexer. if (needs_lex) { needs_lex = false; lookahead = ts_parser__lex(self, version, state); if (self->has_scanner_error) return false; if (lookahead.ptr) { ts_parser__set_cached_token(self, position, last_external_token, lookahead); ts_language_table_entry(self->language, state, ts_subtree_symbol(lookahead), &table_entry); } // When parsing a non-terminal extra, a null lookahead indicates the // end of the rule. The reduction is stored in the EOF table entry. // After the reduction, the lexer needs to be run again. else { ts_language_table_entry(self->language, state, ts_builtin_sym_end, &table_entry); } } // If a cancellation flag, timeout, or progress callback was provided, then check every // time a fixed number of parse actions has been processed. if (!ts_parser__check_progress(self, &lookahead, &position, 1)) { return false; } // Process each parse action for the current lookahead token in // the current state. If there are multiple actions, then this is // an ambiguous state. REDUCE actions always create a new stack // version, whereas SHIFT actions update the existing stack version // and terminate this loop. bool did_reduce = false; StackVersion last_reduction_version = STACK_VERSION_NONE; for (uint32_t i = 0; i < table_entry.action_count; i++) { TSParseAction action = table_entry.actions[i]; switch (action.type) { case TSParseActionTypeShift: { if (action.shift.repetition) break; TSStateId next_state; if (action.shift.extra) { next_state = state; LOG("shift_extra"); } else { next_state = action.shift.state; LOG("shift state:%u", next_state); } if (ts_subtree_child_count(lookahead) > 0) { ts_parser__breakdown_lookahead(self, &lookahead, state, &self->reusable_node); next_state = ts_language_next_state(self->language, state, ts_subtree_symbol(lookahead)); } ts_parser__shift(self, version, next_state, lookahead, action.shift.extra); if (did_reuse) reusable_node_advance(&self->reusable_node); return true; } case TSParseActionTypeReduce: { bool is_fragile = table_entry.action_count > 1; bool end_of_non_terminal_extra = lookahead.ptr == NULL; LOG("reduce sym:%s, child_count:%u", SYM_NAME(action.reduce.symbol), action.reduce.child_count); StackVersion reduction_version = ts_parser__reduce( self, version, action.reduce.symbol, action.reduce.child_count, action.reduce.dynamic_precedence, action.reduce.production_id, is_fragile, end_of_non_terminal_extra ); did_reduce = true; if (reduction_version != STACK_VERSION_NONE) { last_reduction_version = reduction_version; } break; } case TSParseActionTypeAccept: { LOG("accept"); ts_parser__accept(self, version, lookahead); return true; } case TSParseActionTypeRecover: { if (ts_subtree_child_count(lookahead) > 0) { ts_parser__breakdown_lookahead(self, &lookahead, ERROR_STATE, &self->reusable_node); } ts_parser__recover(self, version, lookahead); if (did_reuse) reusable_node_advance(&self->reusable_node); return true; } } } // If a reduction was performed, then replace the current stack version // with one of the stack versions created by a reduction, and continue // processing this version of the stack with the same lookahead symbol. if (last_reduction_version != STACK_VERSION_NONE) { ts_stack_renumber_version(self->stack, last_reduction_version, version); LOG_STACK(); state = ts_stack_state(self->stack, version); // At the end of a non-terminal extra rule, the lexer will return a // null subtree, because the parser needs to perform a fixed reduction // regardless of the lookahead node. After performing that reduction, // (and completing the non-terminal extra rule) run the lexer again based // on the current parse state. if (!lookahead.ptr) { needs_lex = true; } else { ts_language_table_entry( self->language, state, ts_subtree_leaf_symbol(lookahead), &table_entry ); } continue; } // A reduction was performed, but was merged into an existing stack version. // This version can be discarded. if (did_reduce) { if (lookahead.ptr) { ts_subtree_release(&self->tree_pool, lookahead); } ts_stack_halt(self->stack, version); return true; } // If the current lookahead token is a keyword that is not valid, but the // default word token *is* valid, then treat the lookahead token as the word // token instead. if ( ts_subtree_is_keyword(lookahead) && ts_subtree_symbol(lookahead) != self->language->keyword_capture_token && !ts_language_is_reserved_word(self->language, state, ts_subtree_symbol(lookahead)) ) { ts_language_table_entry( self->language, state, self->language->keyword_capture_token, &table_entry ); if (table_entry.action_count > 0) { LOG( "switch from_keyword:%s, to_word_token:%s", TREE_NAME(lookahead), SYM_NAME(self->language->keyword_capture_token) ); MutableSubtree mutable_lookahead = ts_subtree_make_mut(&self->tree_pool, lookahead); ts_subtree_set_symbol(&mutable_lookahead, self->language->keyword_capture_token, self->language); lookahead = ts_subtree_from_mut(mutable_lookahead); continue; } } // If the current lookahead token is not valid and the previous subtree on // the stack was reused from an old tree, then it wasn't actually valid to // reuse that previous subtree. Remove it from the stack, and in its place, // push each of its children. Then try again to process the current lookahead. if (ts_parser__breakdown_top_of_stack(self, version)) { state = ts_stack_state(self->stack, version); ts_subtree_release(&self->tree_pool, lookahead); needs_lex = true; continue; } // Otherwise, there is definitely an error in this version of the parse stack. // Mark this version as paused and continue processing any other stack // versions that exist. If some other version advances successfully, then // this version can simply be removed. But if all versions end up paused, // then error recovery is needed. LOG("detect_error lookahead:%s", TREE_NAME(lookahead)); ts_stack_pause(self->stack, version, lookahead); return true; } } static unsigned ts_parser__condense_stack(TSParser *self) { bool made_changes = false; unsigned min_error_cost = UINT_MAX; for (StackVersion i = 0; i < ts_stack_version_count(self->stack); i++) { // Prune any versions that have been marked for removal. if (ts_stack_is_halted(self->stack, i)) { ts_stack_remove_version(self->stack, i); i--; continue; } // Keep track of the minimum error cost of any stack version so // that it can be returned. ErrorStatus status_i = ts_parser__version_status(self, i); if (!status_i.is_in_error && status_i.cost < min_error_cost) { min_error_cost = status_i.cost; } // Examine each pair of stack versions, removing any versions that // are clearly worse than another version. Ensure that the versions // are ordered from most promising to least promising. for (StackVersion j = 0; j < i; j++) { ErrorStatus status_j = ts_parser__version_status(self, j); switch (ts_parser__compare_versions(self, status_j, status_i)) { case ErrorComparisonTakeLeft: made_changes = true; ts_stack_remove_version(self->stack, i); i--; j = i; break; case ErrorComparisonPreferLeft: case ErrorComparisonNone: if (ts_stack_merge(self->stack, j, i)) { made_changes = true; i--; j = i; } break; case ErrorComparisonPreferRight: made_changes = true; if (ts_stack_merge(self->stack, j, i)) { i--; j = i; } else { ts_stack_swap_versions(self->stack, i, j); } break; case ErrorComparisonTakeRight: made_changes = true; ts_stack_remove_version(self->stack, j); i--; j--; break; } } } // Enforce a hard upper bound on the number of stack versions by // discarding the least promising versions. while (ts_stack_version_count(self->stack) > MAX_VERSION_COUNT) { ts_stack_remove_version(self->stack, MAX_VERSION_COUNT); made_changes = true; } // If the best-performing stack version is currently paused, or all // versions are paused, then resume the best paused version and begin // the error recovery process. Otherwise, remove the paused versions. if (ts_stack_version_count(self->stack) > 0) { bool has_unpaused_version = false; for (StackVersion i = 0, n = ts_stack_version_count(self->stack); i < n; i++) { if (ts_stack_is_paused(self->stack, i)) { if (!has_unpaused_version && self->accept_count < MAX_VERSION_COUNT) { LOG("resume version:%u", i); min_error_cost = ts_stack_error_cost(self->stack, i); Subtree lookahead = ts_stack_resume(self->stack, i); ts_parser__handle_error(self, i, lookahead); has_unpaused_version = true; } else { ts_stack_remove_version(self->stack, i); made_changes = true; i--; n--; } } else { has_unpaused_version = true; } } } if (made_changes) { LOG("condense"); LOG_STACK(); } return min_error_cost; } static bool ts_parser__balance_subtree(TSParser *self) { Subtree finished_tree = self->finished_tree; // If we haven't canceled balancing in progress before, then we want to clear the tree stack and // push the initial finished tree onto it. Otherwise, if we're resuming balancing after a // cancellation, we don't want to clear the tree stack. if (!self->canceled_balancing) { array_clear(&self->tree_pool.tree_stack); if (ts_subtree_child_count(finished_tree) > 0 && finished_tree.ptr->ref_count == 1) { array_push(&self->tree_pool.tree_stack, ts_subtree_to_mut_unsafe(finished_tree)); } } while (self->tree_pool.tree_stack.size > 0) { if (!ts_parser__check_progress(self, NULL, NULL, 1)) { return false; } MutableSubtree tree = *array_get(&self->tree_pool.tree_stack, self->tree_pool.tree_stack.size - 1 ); if (tree.ptr->repeat_depth > 0) { Subtree child1 = ts_subtree_children(tree)[0]; Subtree child2 = ts_subtree_children(tree)[tree.ptr->child_count - 1]; long repeat_delta = (long)ts_subtree_repeat_depth(child1) - (long)ts_subtree_repeat_depth(child2); if (repeat_delta > 0) { unsigned n = (unsigned)repeat_delta; for (unsigned i = n / 2; i > 0; i /= 2) { ts_subtree_compress(tree, i, self->language, &self->tree_pool.tree_stack); n -= i; // We scale the operation count increment in `ts_parser__check_progress` proportionately to the compression // size since larger values of i take longer to process. Shifting by 4 empirically provides good check // intervals (e.g. 193 operations when i=3100) to prevent blocking during large compressions. uint8_t operations = i >> 4 > 0 ? i >> 4 : 1; if (!ts_parser__check_progress(self, NULL, NULL, operations)) { return false; } } } } (void)array_pop(&self->tree_pool.tree_stack); for (uint32_t i = 0; i < tree.ptr->child_count; i++) { Subtree child = ts_subtree_children(tree)[i]; if (ts_subtree_child_count(child) > 0 && child.ptr->ref_count == 1) { array_push(&self->tree_pool.tree_stack, ts_subtree_to_mut_unsafe(child)); } } } return true; } static bool ts_parser_has_outstanding_parse(TSParser *self) { return ( self->canceled_balancing || self->external_scanner_payload || ts_stack_state(self->stack, 0) != 1 || ts_stack_node_count_since_error(self->stack, 0) != 0 ); } // Parser - Public TSParser *ts_parser_new(void) { TSParser *self = ts_calloc(1, sizeof(TSParser)); ts_lexer_init(&self->lexer); array_init(&self->reduce_actions); array_reserve(&self->reduce_actions, 4); self->tree_pool = ts_subtree_pool_new(32); self->stack = ts_stack_new(&self->tree_pool); self->finished_tree = NULL_SUBTREE; self->reusable_node = reusable_node_new(); self->dot_graph_file = NULL; self->cancellation_flag = NULL; self->timeout_duration = 0; self->language = NULL; self->has_scanner_error = false; self->has_error = false; self->canceled_balancing = false; self->external_scanner_payload = NULL; self->end_clock = clock_null(); self->operation_count = 0; self->old_tree = NULL_SUBTREE; self->included_range_differences = (TSRangeArray) array_new(); self->included_range_difference_index = 0; ts_parser__set_cached_token(self, 0, NULL_SUBTREE, NULL_SUBTREE); return self; } void ts_parser_delete(TSParser *self) { if (!self) return; ts_parser_set_language(self, NULL); ts_stack_delete(self->stack); if (self->reduce_actions.contents) { array_delete(&self->reduce_actions); } if (self->included_range_differences.contents) { array_delete(&self->included_range_differences); } if (self->old_tree.ptr) { ts_subtree_release(&self->tree_pool, self->old_tree); self->old_tree = NULL_SUBTREE; } ts_wasm_store_delete(self->wasm_store); ts_lexer_delete(&self->lexer); ts_parser__set_cached_token(self, 0, NULL_SUBTREE, NULL_SUBTREE); ts_subtree_pool_delete(&self->tree_pool); reusable_node_delete(&self->reusable_node); array_delete(&self->trailing_extras); array_delete(&self->trailing_extras2); array_delete(&self->scratch_trees); ts_free(self); } const TSLanguage *ts_parser_language(const TSParser *self) { return self->language; } bool ts_parser_set_language(TSParser *self, const TSLanguage *language) { ts_parser_reset(self); ts_language_delete(self->language); self->language = NULL; if (language) { if ( language->abi_version > TREE_SITTER_LANGUAGE_VERSION || language->abi_version < TREE_SITTER_MIN_COMPATIBLE_LANGUAGE_VERSION ) return false; if (ts_language_is_wasm(language)) { if ( !self->wasm_store || !ts_wasm_store_start(self->wasm_store, &self->lexer.data, language) ) return false; } } self->language = ts_language_copy(language); return true; } TSLogger ts_parser_logger(const TSParser *self) { return self->lexer.logger; } void ts_parser_set_logger(TSParser *self, TSLogger logger) { self->lexer.logger = logger; } void ts_parser_print_dot_graphs(TSParser *self, int fd) { if (self->dot_graph_file) { fclose(self->dot_graph_file); } if (fd >= 0) { #ifdef _WIN32 self->dot_graph_file = _fdopen(fd, "a"); #else self->dot_graph_file = fdopen(fd, "a"); #endif } else { self->dot_graph_file = NULL; } } const size_t *ts_parser_cancellation_flag(const TSParser *self) { return (const size_t *)self->cancellation_flag; } void ts_parser_set_cancellation_flag(TSParser *self, const size_t *flag) { self->cancellation_flag = (const volatile size_t *)flag; } uint64_t ts_parser_timeout_micros(const TSParser *self) { return duration_to_micros(self->timeout_duration); } void ts_parser_set_timeout_micros(TSParser *self, uint64_t timeout_micros) { self->timeout_duration = duration_from_micros(timeout_micros); } bool ts_parser_set_included_ranges( TSParser *self, const TSRange *ranges, uint32_t count ) { return ts_lexer_set_included_ranges(&self->lexer, ranges, count); } const TSRange *ts_parser_included_ranges(const TSParser *self, uint32_t *count) { return ts_lexer_included_ranges(&self->lexer, count); } void ts_parser_reset(TSParser *self) { ts_parser__external_scanner_destroy(self); if (self->wasm_store) { ts_wasm_store_reset(self->wasm_store); } if (self->old_tree.ptr) { ts_subtree_release(&self->tree_pool, self->old_tree); self->old_tree = NULL_SUBTREE; } reusable_node_clear(&self->reusable_node); ts_lexer_reset(&self->lexer, length_zero()); ts_stack_clear(self->stack); ts_parser__set_cached_token(self, 0, NULL_SUBTREE, NULL_SUBTREE); if (self->finished_tree.ptr) { ts_subtree_release(&self->tree_pool, self->finished_tree); self->finished_tree = NULL_SUBTREE; } self->accept_count = 0; self->has_scanner_error = false; self->has_error = false; self->canceled_balancing = false; self->parse_options = (TSParseOptions) {0}; self->parse_state = (TSParseState) {0}; } TSTree *ts_parser_parse( TSParser *self, const TSTree *old_tree, TSInput input ) { TSTree *result = NULL; if (!self->language || !input.read) return NULL; if (ts_language_is_wasm(self->language)) { if (!self->wasm_store) return NULL; ts_wasm_store_start(self->wasm_store, &self->lexer.data, self->language); } ts_lexer_set_input(&self->lexer, input); array_clear(&self->included_range_differences); self->included_range_difference_index = 0; self->operation_count = 0; if (self->timeout_duration) { self->end_clock = clock_after(clock_now(), self->timeout_duration); } else { self->end_clock = clock_null(); } if (ts_parser_has_outstanding_parse(self)) { LOG("resume_parsing"); if (self->canceled_balancing) goto balance; } else { ts_parser__external_scanner_create(self); if (self->has_scanner_error) goto exit; if (old_tree) { ts_subtree_retain(old_tree->root); self->old_tree = old_tree->root; ts_range_array_get_changed_ranges( old_tree->included_ranges, old_tree->included_range_count, self->lexer.included_ranges, self->lexer.included_range_count, &self->included_range_differences ); reusable_node_reset(&self->reusable_node, old_tree->root); LOG("parse_after_edit"); LOG_TREE(self->old_tree); for (unsigned i = 0; i < self->included_range_differences.size; i++) { TSRange *range = array_get(&self->included_range_differences, i); LOG("different_included_range %u - %u", range->start_byte, range->end_byte); } } else { reusable_node_clear(&self->reusable_node); LOG("new_parse"); } } uint32_t position = 0, last_position = 0, version_count = 0; do { for ( StackVersion version = 0; version_count = ts_stack_version_count(self->stack), version < version_count; version++ ) { bool allow_node_reuse = version_count == 1; while (ts_stack_is_active(self->stack, version)) { LOG( "process version:%u, version_count:%u, state:%d, row:%u, col:%u", version, ts_stack_version_count(self->stack), ts_stack_state(self->stack, version), ts_stack_position(self->stack, version).extent.row, ts_stack_position(self->stack, version).extent.column ); if (!ts_parser__advance(self, version, allow_node_reuse)) { if (self->has_scanner_error) goto exit; return NULL; } LOG_STACK(); position = ts_stack_position(self->stack, version).bytes; if (position > last_position || (version > 0 && position == last_position)) { last_position = position; break; } } } // After advancing each version of the stack, re-sort the versions by their cost, // removing any versions that are no longer worth pursuing. unsigned min_error_cost = ts_parser__condense_stack(self); // If there's already a finished parse tree that's better than any in-progress version, // then terminate parsing. Clear the parse stack to remove any extra references to subtrees // within the finished tree, ensuring that these subtrees can be safely mutated in-place // for rebalancing. if (self->finished_tree.ptr && ts_subtree_error_cost(self->finished_tree) < min_error_cost) { ts_stack_clear(self->stack); break; } while (self->included_range_difference_index < self->included_range_differences.size) { TSRange *range = array_get(&self->included_range_differences, self->included_range_difference_index); if (range->end_byte <= position) { self->included_range_difference_index++; } else { break; } } } while (version_count != 0); balance: ts_assert(self->finished_tree.ptr); if (!ts_parser__balance_subtree(self)) { self->canceled_balancing = true; return false; } self->canceled_balancing = false; LOG("done"); LOG_TREE(self->finished_tree); result = ts_tree_new( self->finished_tree, self->language, self->lexer.included_ranges, self->lexer.included_range_count ); self->finished_tree = NULL_SUBTREE; exit: ts_parser_reset(self); return result; } TSTree *ts_parser_parse_with_options( TSParser *self, const TSTree *old_tree, TSInput input, TSParseOptions parse_options ) { self->parse_options = parse_options; self->parse_state.payload = parse_options.payload; TSTree *result = ts_parser_parse(self, old_tree, input); // Reset parser options before further parse calls. self->parse_options = (TSParseOptions) {0}; return result; } TSTree *ts_parser_parse_string( TSParser *self, const TSTree *old_tree, const char *string, uint32_t length ) { return ts_parser_parse_string_encoding(self, old_tree, string, length, TSInputEncodingUTF8); } TSTree *ts_parser_parse_string_encoding( TSParser *self, const TSTree *old_tree, const char *string, uint32_t length, TSInputEncoding encoding ) { TSStringInput input = {string, length}; return ts_parser_parse(self, old_tree, (TSInput) { &input, ts_string_input_read, encoding, NULL, }); } void ts_parser_set_wasm_store(TSParser *self, TSWasmStore *store) { if (self->language && ts_language_is_wasm(self->language)) { // Copy the assigned language into the new store. const TSLanguage *copy = ts_language_copy(self->language); ts_parser_set_language(self, copy); ts_language_delete(copy); } ts_wasm_store_delete(self->wasm_store); self->wasm_store = store; } TSWasmStore *ts_parser_take_wasm_store(TSParser *self) { if (self->language && ts_language_is_wasm(self->language)) { ts_parser_set_language(self, NULL); } TSWasmStore *result = self->wasm_store; self->wasm_store = NULL; return result; } #undef LOG hx-0.3.0+20250717/bindings/vendor/src/parser.h000066400000000000000000000167101503625671400204000ustar00rootroot00000000000000#ifndef TREE_SITTER_PARSER_H_ #define TREE_SITTER_PARSER_H_ #ifdef __cplusplus extern "C" { #endif #include #include #include #define ts_builtin_sym_error ((TSSymbol)-1) #define ts_builtin_sym_end 0 #define TREE_SITTER_SERIALIZATION_BUFFER_SIZE 1024 #ifndef TREE_SITTER_API_H_ typedef uint16_t TSStateId; typedef uint16_t TSSymbol; typedef uint16_t TSFieldId; typedef struct TSLanguage TSLanguage; typedef struct TSLanguageMetadata { uint8_t major_version; uint8_t minor_version; uint8_t patch_version; } TSLanguageMetadata; #endif typedef struct { TSFieldId field_id; uint8_t child_index; bool inherited; } TSFieldMapEntry; // Used to index the field and supertype maps. typedef struct { uint16_t index; uint16_t length; } TSMapSlice; typedef struct { bool visible; bool named; bool supertype; } TSSymbolMetadata; typedef struct TSLexer TSLexer; struct TSLexer { int32_t lookahead; TSSymbol result_symbol; void (*advance)(TSLexer *, bool); void (*mark_end)(TSLexer *); uint32_t (*get_column)(TSLexer *); bool (*is_at_included_range_start)(const TSLexer *); bool (*eof)(const TSLexer *); void (*log)(const TSLexer *, const char *, ...); }; typedef enum { TSParseActionTypeShift, TSParseActionTypeReduce, TSParseActionTypeAccept, TSParseActionTypeRecover, } TSParseActionType; typedef union { struct { uint8_t type; TSStateId state; bool extra; bool repetition; } shift; struct { uint8_t type; uint8_t child_count; TSSymbol symbol; int16_t dynamic_precedence; uint16_t production_id; } reduce; uint8_t type; } TSParseAction; typedef struct { uint16_t lex_state; uint16_t external_lex_state; } TSLexMode; typedef struct { uint16_t lex_state; uint16_t external_lex_state; uint16_t reserved_word_set_id; } TSLexerMode; typedef union { TSParseAction action; struct { uint8_t count; bool reusable; } entry; } TSParseActionEntry; typedef struct { int32_t start; int32_t end; } TSCharacterRange; struct TSLanguage { uint32_t abi_version; uint32_t symbol_count; uint32_t alias_count; uint32_t token_count; uint32_t external_token_count; uint32_t state_count; uint32_t large_state_count; uint32_t production_id_count; uint32_t field_count; uint16_t max_alias_sequence_length; const uint16_t *parse_table; const uint16_t *small_parse_table; const uint32_t *small_parse_table_map; const TSParseActionEntry *parse_actions; const char * const *symbol_names; const char * const *field_names; const TSMapSlice *field_map_slices; const TSFieldMapEntry *field_map_entries; const TSSymbolMetadata *symbol_metadata; const TSSymbol *public_symbol_map; const uint16_t *alias_map; const TSSymbol *alias_sequences; const TSLexerMode *lex_modes; bool (*lex_fn)(TSLexer *, TSStateId); bool (*keyword_lex_fn)(TSLexer *, TSStateId); TSSymbol keyword_capture_token; struct { const bool *states; const TSSymbol *symbol_map; void *(*create)(void); void (*destroy)(void *); bool (*scan)(void *, TSLexer *, const bool *symbol_whitelist); unsigned (*serialize)(void *, char *); void (*deserialize)(void *, const char *, unsigned); } external_scanner; const TSStateId *primary_state_ids; const char *name; const TSSymbol *reserved_words; uint16_t max_reserved_word_set_size; uint32_t supertype_count; const TSSymbol *supertype_symbols; const TSMapSlice *supertype_map_slices; const TSSymbol *supertype_map_entries; TSLanguageMetadata metadata; }; static inline bool set_contains(const TSCharacterRange *ranges, uint32_t len, int32_t lookahead) { uint32_t index = 0; uint32_t size = len - index; while (size > 1) { uint32_t half_size = size / 2; uint32_t mid_index = index + half_size; const TSCharacterRange *range = &ranges[mid_index]; if (lookahead >= range->start && lookahead <= range->end) { return true; } else if (lookahead > range->end) { index = mid_index; } size -= half_size; } const TSCharacterRange *range = &ranges[index]; return (lookahead >= range->start && lookahead <= range->end); } /* * Lexer Macros */ #ifdef _MSC_VER #define UNUSED __pragma(warning(suppress : 4101)) #else #define UNUSED __attribute__((unused)) #endif #define START_LEXER() \ bool result = false; \ bool skip = false; \ UNUSED \ bool eof = false; \ int32_t lookahead; \ goto start; \ next_state: \ lexer->advance(lexer, skip); \ start: \ skip = false; \ lookahead = lexer->lookahead; #define ADVANCE(state_value) \ { \ state = state_value; \ goto next_state; \ } #define ADVANCE_MAP(...) \ { \ static const uint16_t map[] = { __VA_ARGS__ }; \ for (uint32_t i = 0; i < sizeof(map) / sizeof(map[0]); i += 2) { \ if (map[i] == lookahead) { \ state = map[i + 1]; \ goto next_state; \ } \ } \ } #define SKIP(state_value) \ { \ skip = true; \ state = state_value; \ goto next_state; \ } #define ACCEPT_TOKEN(symbol_value) \ result = true; \ lexer->result_symbol = symbol_value; \ lexer->mark_end(lexer); #define END_STATE() return result; /* * Parse Table Macros */ #define SMALL_STATE(id) ((id) - LARGE_STATE_COUNT) #define STATE(id) id #define ACTIONS(id) id #define SHIFT(state_value) \ {{ \ .shift = { \ .type = TSParseActionTypeShift, \ .state = (state_value) \ } \ }} #define SHIFT_REPEAT(state_value) \ {{ \ .shift = { \ .type = TSParseActionTypeShift, \ .state = (state_value), \ .repetition = true \ } \ }} #define SHIFT_EXTRA() \ {{ \ .shift = { \ .type = TSParseActionTypeShift, \ .extra = true \ } \ }} #define REDUCE(symbol_name, children, precedence, prod_id) \ {{ \ .reduce = { \ .type = TSParseActionTypeReduce, \ .symbol = symbol_name, \ .child_count = children, \ .dynamic_precedence = precedence, \ .production_id = prod_id \ }, \ }} #define RECOVER() \ {{ \ .type = TSParseActionTypeRecover \ }} #define ACCEPT_INPUT() \ {{ \ .type = TSParseActionTypeAccept \ }} #ifdef __cplusplus } #endif #endif // TREE_SITTER_PARSER_H_ hx-0.3.0+20250717/bindings/vendor/src/point.h000066400000000000000000000024331503625671400202320ustar00rootroot00000000000000#ifndef TREE_SITTER_POINT_H_ #define TREE_SITTER_POINT_H_ #include "tree_sitter/api.h" #define POINT_ZERO ((TSPoint) {0, 0}) #define POINT_MAX ((TSPoint) {UINT32_MAX, UINT32_MAX}) static inline TSPoint point__new(unsigned row, unsigned column) { TSPoint result = {row, column}; return result; } static inline TSPoint point_add(TSPoint a, TSPoint b) { if (b.row > 0) return point__new(a.row + b.row, b.column); else return point__new(a.row, a.column + b.column); } static inline TSPoint point_sub(TSPoint a, TSPoint b) { if (a.row > b.row) return point__new(a.row - b.row, a.column); else return point__new(0, (a.column >= b.column) ? a.column - b.column : 0); } static inline bool point_lte(TSPoint a, TSPoint b) { return (a.row < b.row) || (a.row == b.row && a.column <= b.column); } static inline bool point_lt(TSPoint a, TSPoint b) { return (a.row < b.row) || (a.row == b.row && a.column < b.column); } static inline bool point_gt(TSPoint a, TSPoint b) { return (a.row > b.row) || (a.row == b.row && a.column > b.column); } static inline bool point_gte(TSPoint a, TSPoint b) { return (a.row > b.row) || (a.row == b.row && a.column >= b.column); } static inline bool point_eq(TSPoint a, TSPoint b) { return a.row == b.row && a.column == b.column; } #endif hx-0.3.0+20250717/bindings/vendor/src/portable/000077500000000000000000000000001503625671400205365ustar00rootroot00000000000000hx-0.3.0+20250717/bindings/vendor/src/portable/endian.h000066400000000000000000000152211503625671400221460ustar00rootroot00000000000000// "License": Public Domain // I, Mathias Panzenböck, place this file hereby into the public domain. Use it at your own risk for whatever you like. // In case there are jurisdictions that don't support putting things in the public domain you can also consider it to // be "dual licensed" under the BSD, MIT and Apache licenses, if you want to. This code is trivial anyway. Consider it // an example on how to get the endian conversion functions on different platforms. // updates from https://github.com/mikepb/endian.h/issues/4 #ifndef ENDIAN_H #define ENDIAN_H #if (defined(_WIN16) || defined(_WIN32) || defined(_WIN64)) && !defined(__WINDOWS__) # define __WINDOWS__ #endif #if defined(HAVE_ENDIAN_H) || \ defined(__linux__) || \ defined(__GNU__) || \ defined(__illumos__) || \ defined(__NetBSD__) || \ defined(__OpenBSD__) || \ defined(__CYGWIN__) || \ defined(__MSYS__) || \ defined(__EMSCRIPTEN__) || \ defined(__wasi__) #if defined(__NetBSD__) #define _NETBSD_SOURCE 1 #endif # include #elif defined(HAVE_SYS_ENDIAN_H) || \ defined(__FreeBSD__) || \ defined(__DragonFly__) # include #elif defined(__APPLE__) # define __BYTE_ORDER BYTE_ORDER # define __BIG_ENDIAN BIG_ENDIAN # define __LITTLE_ENDIAN LITTLE_ENDIAN # define __PDP_ENDIAN PDP_ENDIAN # if !defined(_POSIX_C_SOURCE) # include # define htobe16(x) OSSwapHostToBigInt16(x) # define htole16(x) OSSwapHostToLittleInt16(x) # define be16toh(x) OSSwapBigToHostInt16(x) # define le16toh(x) OSSwapLittleToHostInt16(x) # define htobe32(x) OSSwapHostToBigInt32(x) # define htole32(x) OSSwapHostToLittleInt32(x) # define be32toh(x) OSSwapBigToHostInt32(x) # define le32toh(x) OSSwapLittleToHostInt32(x) # define htobe64(x) OSSwapHostToBigInt64(x) # define htole64(x) OSSwapHostToLittleInt64(x) # define be64toh(x) OSSwapBigToHostInt64(x) # define le64toh(x) OSSwapLittleToHostInt64(x) # else # if BYTE_ORDER == LITTLE_ENDIAN # define htobe16(x) __builtin_bswap16(x) # define htole16(x) (x) # define be16toh(x) __builtin_bswap16(x) # define le16toh(x) (x) # define htobe32(x) __builtin_bswap32(x) # define htole32(x) (x) # define be32toh(x) __builtin_bswap32(x) # define le32toh(x) (x) # define htobe64(x) __builtin_bswap64(x) # define htole64(x) (x) # define be64toh(x) __builtin_bswap64(x) # define le64toh(x) (x) # elif BYTE_ORDER == BIG_ENDIAN # define htobe16(x) (x) # define htole16(x) __builtin_bswap16(x) # define be16toh(x) (x) # define le16toh(x) __builtin_bswap16(x) # define htobe32(x) (x) # define htole32(x) __builtin_bswap32(x) # define be32toh(x) (x) # define le32toh(x) __builtin_bswap32(x) # define htobe64(x) (x) # define htole64(x) __builtin_bswap64(x) # define be64toh(x) (x) # define le64toh(x) __builtin_bswap64(x) # else # error byte order not supported # endif # endif #elif defined(__WINDOWS__) # if defined(_MSC_VER) && !defined(__clang__) # include # define B_SWAP_16(x) _byteswap_ushort(x) # define B_SWAP_32(x) _byteswap_ulong(x) # define B_SWAP_64(x) _byteswap_uint64(x) # else # define B_SWAP_16(x) __builtin_bswap16(x) # define B_SWAP_32(x) __builtin_bswap32(x) # define B_SWAP_64(x) __builtin_bswap64(x) # endif # if defined(__MINGW32__) || defined(HAVE_SYS_PARAM_H) # include # endif # ifndef BIG_ENDIAN # ifdef __BIG_ENDIAN # define BIG_ENDIAN __BIG_ENDIAN # elif defined(__ORDER_BIG_ENDIAN__) # define BIG_ENDIAN __ORDER_BIG_ENDIAN__ # else # define BIG_ENDIAN 4321 # endif # endif # ifndef LITTLE_ENDIAN # ifdef __LITTLE_ENDIAN # define LITTLE_ENDIAN __LITTLE_ENDIAN # elif defined(__ORDER_LITTLE_ENDIAN__) # define LITTLE_ENDIAN __ORDER_LITTLE_ENDIAN__ # else # define LITTLE_ENDIAN 1234 # endif # endif # ifndef BYTE_ORDER # ifdef __BYTE_ORDER # define BYTE_ORDER __BYTE_ORDER # elif defined(__BYTE_ORDER__) # define BYTE_ORDER __BYTE_ORDER__ # else /* assume LE on Windows if nothing was defined */ # define BYTE_ORDER LITTLE_ENDIAN # endif # endif # if BYTE_ORDER == LITTLE_ENDIAN # define htobe16(x) B_SWAP_16(x) # define htole16(x) (x) # define be16toh(x) B_SWAP_16(x) # define le16toh(x) (x) # define htobe32(x) B_SWAP_32(x) # define htole32(x) (x) # define be32toh(x) B_SWAP_32(x) # define le32toh(x) (x) # define htobe64(x) B_SWAP_64(x) # define htole64(x) (x) # define be64toh(x) B_SWAP_64(x) # define le64toh(x) (x) # elif BYTE_ORDER == BIG_ENDIAN # define htobe16(x) (x) # define htole16(x) B_SWAP_16(x) # define be16toh(x) (x) # define le16toh(x) B_SWAP_16(x) # define htobe32(x) (x) # define htole32(x) B_SWAP_32(x) # define be32toh(x) (x) # define le32toh(x) B_SWAP_32(x) # define htobe64(x) (x) # define htole64(x) B_SWAP_64(x) # define be64toh(x) (x) # define le64toh(x) B_SWAP_64(x) # else # error byte order not supported # endif #elif defined(__QNXNTO__) # include # define __LITTLE_ENDIAN 1234 # define __BIG_ENDIAN 4321 # define __PDP_ENDIAN 3412 # if defined(__BIGENDIAN__) # define __BYTE_ORDER __BIG_ENDIAN # define htobe16(x) (x) # define htobe32(x) (x) # define htobe64(x) (x) # define htole16(x) ENDIAN_SWAP16(x) # define htole32(x) ENDIAN_SWAP32(x) # define htole64(x) ENDIAN_SWAP64(x) # elif defined(__LITTLEENDIAN__) # define __BYTE_ORDER __LITTLE_ENDIAN # define htole16(x) (x) # define htole32(x) (x) # define htole64(x) (x) # define htobe16(x) ENDIAN_SWAP16(x) # define htobe32(x) ENDIAN_SWAP32(x) # define htobe64(x) ENDIAN_SWAP64(x) # else # error byte order not supported # endif # define be16toh(x) ENDIAN_BE16(x) # define be32toh(x) ENDIAN_BE32(x) # define be64toh(x) ENDIAN_BE64(x) # define le16toh(x) ENDIAN_LE16(x) # define le32toh(x) ENDIAN_LE32(x) # define le64toh(x) ENDIAN_LE64(x) #else # error platform not supported #endif #endif hx-0.3.0+20250717/bindings/vendor/src/query.c000066400000000000000000004441321503625671400202470ustar00rootroot00000000000000/* * On NetBSD, defining standard requirements like this removes symbols * from the namespace; however, we need non-standard symbols for * endian.h. */ #if defined(__NetBSD__) && defined(_POSIX_C_SOURCE) #undef _POSIX_C_SOURCE #endif #include "tree_sitter/api.h" #include "./alloc.h" #include "./array.h" #include "./clock.h" #include "./language.h" #include "./point.h" #include "./tree_cursor.h" #include "./unicode.h" #include // #define DEBUG_ANALYZE_QUERY // #define DEBUG_EXECUTE_QUERY #define MAX_STEP_CAPTURE_COUNT 3 #define MAX_NEGATED_FIELD_COUNT 8 #define MAX_STATE_PREDECESSOR_COUNT 256 #define MAX_ANALYSIS_STATE_DEPTH 8 #define MAX_ANALYSIS_ITERATION_COUNT 256 /* * Stream - A sequence of unicode characters derived from a UTF8 string. * This struct is used in parsing queries from S-expressions. */ typedef struct { const char *input; const char *start; const char *end; int32_t next; uint8_t next_size; } Stream; /* * QueryStep - A step in the process of matching a query. Each node within * a query S-expression corresponds to one of these steps. An entire pattern * is represented as a sequence of these steps. The basic properties of a * node are represented by these fields: * - `symbol` - The grammar symbol to match. A zero value represents the * wildcard symbol, '_'. * - `field` - The field name to match. A zero value means that a field name * was not specified. * - `capture_ids` - An array of integers representing the names of captures * associated with this node in the pattern, terminated by a `NONE` value. * - `depth` - The depth where this node occurs in the pattern. The root node * of the pattern has depth zero. * - `negated_field_list_id` - An id representing a set of fields that must * not be present on a node matching this step. * * Steps have some additional fields in order to handle the `.` (or "anchor") operator, * which forbids additional child nodes: * - `is_immediate` - Indicates that the node matching this step cannot be preceded * by other sibling nodes that weren't specified in the pattern. * - `is_last_child` - Indicates that the node matching this step cannot have any * subsequent named siblings. * * For simple patterns, steps are matched in sequential order. But in order to * handle alternative/repeated/optional sub-patterns, query steps are not always * structured as a linear sequence; they sometimes need to split and merge. This * is done using the following fields: * - `alternative_index` - The index of a different query step that serves as * an alternative to this step. A `NONE` value represents no alternative. * When a query state reaches a step with an alternative index, the state * is duplicated, with one copy remaining at the original step, and one copy * moving to the alternative step. The alternative may have its own alternative * step, so this splitting is an iterative process. * - `is_dead_end` - Indicates that this state cannot be passed directly, and * exists only in order to redirect to an alternative index, with no splitting. * - `is_pass_through` - Indicates that state has no matching logic of its own, * and exists only to split a state. One copy of the state advances immediately * to the next step, and one moves to the alternative step. * - `alternative_is_immediate` - Indicates that this step's alternative step * should be treated as if `is_immediate` is true. * * Steps also store some derived state that summarizes how they relate to other * steps within the same pattern. This is used to optimize the matching process: * - `contains_captures` - Indicates that this step or one of its child steps * has a non-empty `capture_ids` list. * - `parent_pattern_guaranteed` - Indicates that if this step is reached, then * it and all of its subsequent sibling steps within the same parent pattern * are guaranteed to match. * - `root_pattern_guaranteed` - Similar to `parent_pattern_guaranteed`, but * for the entire top-level pattern. When iterating through a query's * captures using `ts_query_cursor_next_capture`, this field is used to * detect that a capture can safely be returned from a match that has not * even completed yet. */ typedef struct { TSSymbol symbol; TSSymbol supertype_symbol; TSFieldId field; uint16_t capture_ids[MAX_STEP_CAPTURE_COUNT]; uint16_t depth; uint16_t alternative_index; uint16_t negated_field_list_id; bool is_named: 1; bool is_immediate: 1; bool is_last_child: 1; bool is_pass_through: 1; bool is_dead_end: 1; bool alternative_is_immediate: 1; bool contains_captures: 1; bool root_pattern_guaranteed: 1; bool parent_pattern_guaranteed: 1; bool is_missing: 1; } QueryStep; /* * Slice - A slice of an external array. Within a query, capture names, * literal string values, and predicate step information are stored in three * contiguous arrays. Individual captures, string values, and predicates are * represented as slices of these three arrays. */ typedef struct { uint32_t offset; uint32_t length; } Slice; /* * SymbolTable - a two-way mapping of strings to ids. */ typedef struct { Array(char) characters; Array(Slice) slices; } SymbolTable; /** * CaptureQuantifiers - a data structure holding the quantifiers of pattern captures. */ typedef Array(uint8_t) CaptureQuantifiers; /* * PatternEntry - Information about the starting point for matching a particular * pattern. These entries are stored in a 'pattern map' - a sorted array that * makes it possible to efficiently lookup patterns based on the symbol for their * first step. The entry consists of the following fields: * - `pattern_index` - the index of the pattern within the query * - `step_index` - the index of the pattern's first step in the shared `steps` array * - `is_rooted` - whether or not the pattern has a single root node. This property * affects decisions about whether or not to start the pattern for nodes outside * of a QueryCursor's range restriction. */ typedef struct { uint16_t step_index; uint16_t pattern_index; bool is_rooted; } PatternEntry; typedef struct { Slice steps; Slice predicate_steps; uint32_t start_byte; uint32_t end_byte; bool is_non_local; } QueryPattern; typedef struct { uint32_t byte_offset; uint16_t step_index; } StepOffset; /* * QueryState - The state of an in-progress match of a particular pattern * in a query. While executing, a `TSQueryCursor` must keep track of a number * of possible in-progress matches. Each of those possible matches is * represented as one of these states. Fields: * - `id` - A numeric id that is exposed to the public API. This allows the * caller to remove a given match, preventing any more of its captures * from being returned. * - `start_depth` - The depth in the tree where the first step of the state's * pattern was matched. * - `pattern_index` - The pattern that the state is matching. * - `consumed_capture_count` - The number of captures from this match that * have already been returned. * - `capture_list_id` - A numeric id that can be used to retrieve the state's * list of captures from the `CaptureListPool`. * - `seeking_immediate_match` - A flag that indicates that the state's next * step must be matched by the very next sibling. This is used when * processing repetitions, or when processing a wildcard node followed by * an anchor. * - `has_in_progress_alternatives` - A flag that indicates that there is are * other states that have the same captures as this state, but are at * different steps in their pattern. This means that in order to obey the * 'longest-match' rule, this state should not be returned as a match until * it is clear that there can be no other alternative match with more captures. */ typedef struct { uint32_t id; uint32_t capture_list_id; uint16_t start_depth; uint16_t step_index; uint16_t pattern_index; uint16_t consumed_capture_count: 12; bool seeking_immediate_match: 1; bool has_in_progress_alternatives: 1; bool dead: 1; bool needs_parent: 1; } QueryState; typedef Array(TSQueryCapture) CaptureList; /* * CaptureListPool - A collection of *lists* of captures. Each query state needs * to maintain its own list of captures. To avoid repeated allocations, this struct * maintains a fixed set of capture lists, and keeps track of which ones are * currently in use by a query state. */ typedef struct { Array(CaptureList) list; CaptureList empty_list; // The maximum number of capture lists that we are allowed to allocate. We // never allow `list` to allocate more entries than this, dropping pending // matches if needed to stay under the limit. uint32_t max_capture_list_count; // The number of capture lists allocated in `list` that are not currently in // use. We reuse those existing-but-unused capture lists before trying to // allocate any new ones. We use an invalid value (UINT32_MAX) for a capture // list's length to indicate that it's not in use. uint32_t free_capture_list_count; } CaptureListPool; /* * AnalysisState - The state needed for walking the parse table when analyzing * a query pattern, to determine at which steps the pattern might fail to match. */ typedef struct { TSStateId parse_state; TSSymbol parent_symbol; uint16_t child_index; TSFieldId field_id: 15; bool done: 1; } AnalysisStateEntry; typedef struct { AnalysisStateEntry stack[MAX_ANALYSIS_STATE_DEPTH]; uint16_t depth; uint16_t step_index; TSSymbol root_symbol; } AnalysisState; typedef Array(AnalysisState *) AnalysisStateSet; typedef struct { AnalysisStateSet states; AnalysisStateSet next_states; AnalysisStateSet deeper_states; AnalysisStateSet state_pool; Array(uint16_t) final_step_indices; Array(TSSymbol) finished_parent_symbols; bool did_abort; } QueryAnalysis; /* * AnalysisSubgraph - A subset of the states in the parse table that are used * in constructing nodes with a certain symbol. Each state is accompanied by * some information about the possible node that could be produced in * downstream states. */ typedef struct { TSStateId state; uint16_t production_id; uint8_t child_index: 7; bool done: 1; } AnalysisSubgraphNode; typedef struct { TSSymbol symbol; Array(TSStateId) start_states; Array(AnalysisSubgraphNode) nodes; } AnalysisSubgraph; typedef Array(AnalysisSubgraph) AnalysisSubgraphArray; /* * StatePredecessorMap - A map that stores the predecessors of each parse state. * This is used during query analysis to determine which parse states can lead * to which reduce actions. */ typedef struct { TSStateId *contents; } StatePredecessorMap; /* * TSQuery - A tree query, compiled from a string of S-expressions. The query * itself is immutable. The mutable state used in the process of executing the * query is stored in a `TSQueryCursor`. */ struct TSQuery { SymbolTable captures; SymbolTable predicate_values; Array(CaptureQuantifiers) capture_quantifiers; Array(QueryStep) steps; Array(PatternEntry) pattern_map; Array(TSQueryPredicateStep) predicate_steps; Array(QueryPattern) patterns; Array(StepOffset) step_offsets; Array(TSFieldId) negated_fields; Array(char) string_buffer; Array(TSSymbol) repeat_symbols_with_rootless_patterns; const TSLanguage *language; uint16_t wildcard_root_pattern_count; }; /* * TSQueryCursor - A stateful struct used to execute a query on a tree. */ struct TSQueryCursor { const TSQuery *query; TSTreeCursor cursor; Array(QueryState) states; Array(QueryState) finished_states; CaptureListPool capture_list_pool; uint32_t depth; uint32_t max_start_depth; uint32_t start_byte; uint32_t end_byte; TSPoint start_point; TSPoint end_point; uint32_t next_state_id; TSClock end_clock; TSDuration timeout_duration; const TSQueryCursorOptions *query_options; TSQueryCursorState query_state; unsigned operation_count; bool on_visible_node; bool ascending; bool halted; bool did_exceed_match_limit; }; static const TSQueryError PARENT_DONE = -1; static const uint16_t PATTERN_DONE_MARKER = UINT16_MAX; static const uint16_t NONE = UINT16_MAX; static const TSSymbol WILDCARD_SYMBOL = 0; static const unsigned OP_COUNT_PER_QUERY_TIMEOUT_CHECK = 100; /********** * Stream **********/ // Advance to the next unicode code point in the stream. static bool stream_advance(Stream *self) { self->input += self->next_size; if (self->input < self->end) { uint32_t size = ts_decode_utf8( (const uint8_t *)self->input, (uint32_t)(self->end - self->input), &self->next ); if (size > 0) { self->next_size = size; return true; } } else { self->next_size = 0; self->next = '\0'; } return false; } // Reset the stream to the given input position, represented as a pointer // into the input string. static void stream_reset(Stream *self, const char *input) { self->input = input; self->next_size = 0; stream_advance(self); } static Stream stream_new(const char *string, uint32_t length) { Stream self = { .next = 0, .input = string, .start = string, .end = string + length, }; stream_advance(&self); return self; } static void stream_skip_whitespace(Stream *self) { for (;;) { if (iswspace(self->next)) { stream_advance(self); } else if (self->next == ';') { // skip over comments stream_advance(self); while (self->next && self->next != '\n') { if (!stream_advance(self)) break; } } else { break; } } } static bool stream_is_ident_start(Stream *self) { return iswalnum(self->next) || self->next == '_' || self->next == '-'; } static void stream_scan_identifier(Stream *stream) { do { stream_advance(stream); } while ( iswalnum(stream->next) || stream->next == '_' || stream->next == '-' || stream->next == '.' || stream->next == '?' || stream->next == '!' ); } static uint32_t stream_offset(Stream *self) { return (uint32_t)(self->input - self->start); } /****************** * CaptureListPool ******************/ static CaptureListPool capture_list_pool_new(void) { return (CaptureListPool) { .list = array_new(), .empty_list = array_new(), .max_capture_list_count = UINT32_MAX, .free_capture_list_count = 0, }; } static void capture_list_pool_reset(CaptureListPool *self) { for (uint16_t i = 0; i < (uint16_t)self->list.size; i++) { // This invalid size means that the list is not in use. array_get(&self->list, i)->size = UINT32_MAX; } self->free_capture_list_count = self->list.size; } static void capture_list_pool_delete(CaptureListPool *self) { for (uint16_t i = 0; i < (uint16_t)self->list.size; i++) { array_delete(array_get(&self->list, i)); } array_delete(&self->list); } static const CaptureList *capture_list_pool_get(const CaptureListPool *self, uint16_t id) { if (id >= self->list.size) return &self->empty_list; return array_get(&self->list, id); } static CaptureList *capture_list_pool_get_mut(CaptureListPool *self, uint16_t id) { ts_assert(id < self->list.size); return array_get(&self->list, id); } static bool capture_list_pool_is_empty(const CaptureListPool *self) { // The capture list pool is empty if all allocated lists are in use, and we // have reached the maximum allowed number of allocated lists. return self->free_capture_list_count == 0 && self->list.size >= self->max_capture_list_count; } static uint16_t capture_list_pool_acquire(CaptureListPool *self) { // First see if any already allocated capture list is currently unused. if (self->free_capture_list_count > 0) { for (uint16_t i = 0; i < (uint16_t)self->list.size; i++) { if (array_get(&self->list, i)->size == UINT32_MAX) { array_clear(array_get(&self->list, i)); self->free_capture_list_count--; return i; } } } // Otherwise allocate and initialize a new capture list, as long as that // doesn't put us over the requested maximum. uint32_t i = self->list.size; if (i >= self->max_capture_list_count) { return NONE; } CaptureList list; array_init(&list); array_push(&self->list, list); return i; } static void capture_list_pool_release(CaptureListPool *self, uint16_t id) { if (id >= self->list.size) return; array_get(&self->list, id)->size = UINT32_MAX; self->free_capture_list_count++; } /************** * Quantifiers **************/ static TSQuantifier quantifier_mul( TSQuantifier left, TSQuantifier right ) { switch (left) { case TSQuantifierZero: return TSQuantifierZero; case TSQuantifierZeroOrOne: switch (right) { case TSQuantifierZero: return TSQuantifierZero; case TSQuantifierZeroOrOne: case TSQuantifierOne: return TSQuantifierZeroOrOne; case TSQuantifierZeroOrMore: case TSQuantifierOneOrMore: return TSQuantifierZeroOrMore; }; break; case TSQuantifierZeroOrMore: switch (right) { case TSQuantifierZero: return TSQuantifierZero; case TSQuantifierZeroOrOne: case TSQuantifierZeroOrMore: case TSQuantifierOne: case TSQuantifierOneOrMore: return TSQuantifierZeroOrMore; }; break; case TSQuantifierOne: return right; case TSQuantifierOneOrMore: switch (right) { case TSQuantifierZero: return TSQuantifierZero; case TSQuantifierZeroOrOne: case TSQuantifierZeroOrMore: return TSQuantifierZeroOrMore; case TSQuantifierOne: case TSQuantifierOneOrMore: return TSQuantifierOneOrMore; }; break; } return TSQuantifierZero; // to make compiler happy, but all cases should be covered above! } static TSQuantifier quantifier_join( TSQuantifier left, TSQuantifier right ) { switch (left) { case TSQuantifierZero: switch (right) { case TSQuantifierZero: return TSQuantifierZero; case TSQuantifierZeroOrOne: case TSQuantifierOne: return TSQuantifierZeroOrOne; case TSQuantifierZeroOrMore: case TSQuantifierOneOrMore: return TSQuantifierZeroOrMore; }; break; case TSQuantifierZeroOrOne: switch (right) { case TSQuantifierZero: case TSQuantifierZeroOrOne: case TSQuantifierOne: return TSQuantifierZeroOrOne; break; case TSQuantifierZeroOrMore: case TSQuantifierOneOrMore: return TSQuantifierZeroOrMore; break; }; break; case TSQuantifierZeroOrMore: return TSQuantifierZeroOrMore; case TSQuantifierOne: switch (right) { case TSQuantifierZero: case TSQuantifierZeroOrOne: return TSQuantifierZeroOrOne; case TSQuantifierZeroOrMore: return TSQuantifierZeroOrMore; case TSQuantifierOne: return TSQuantifierOne; case TSQuantifierOneOrMore: return TSQuantifierOneOrMore; }; break; case TSQuantifierOneOrMore: switch (right) { case TSQuantifierZero: case TSQuantifierZeroOrOne: case TSQuantifierZeroOrMore: return TSQuantifierZeroOrMore; case TSQuantifierOne: case TSQuantifierOneOrMore: return TSQuantifierOneOrMore; }; break; } return TSQuantifierZero; // to make compiler happy, but all cases should be covered above! } static TSQuantifier quantifier_add( TSQuantifier left, TSQuantifier right ) { switch (left) { case TSQuantifierZero: return right; case TSQuantifierZeroOrOne: switch (right) { case TSQuantifierZero: return TSQuantifierZeroOrOne; case TSQuantifierZeroOrOne: case TSQuantifierZeroOrMore: return TSQuantifierZeroOrMore; case TSQuantifierOne: case TSQuantifierOneOrMore: return TSQuantifierOneOrMore; }; break; case TSQuantifierZeroOrMore: switch (right) { case TSQuantifierZero: return TSQuantifierZeroOrMore; case TSQuantifierZeroOrOne: case TSQuantifierZeroOrMore: return TSQuantifierZeroOrMore; case TSQuantifierOne: case TSQuantifierOneOrMore: return TSQuantifierOneOrMore; }; break; case TSQuantifierOne: switch (right) { case TSQuantifierZero: return TSQuantifierOne; case TSQuantifierZeroOrOne: case TSQuantifierZeroOrMore: case TSQuantifierOne: case TSQuantifierOneOrMore: return TSQuantifierOneOrMore; }; break; case TSQuantifierOneOrMore: return TSQuantifierOneOrMore; } return TSQuantifierZero; // to make compiler happy, but all cases should be covered above! } // Create new capture quantifiers structure static CaptureQuantifiers capture_quantifiers_new(void) { return (CaptureQuantifiers) array_new(); } // Delete capture quantifiers structure static void capture_quantifiers_delete( CaptureQuantifiers *self ) { array_delete(self); } // Clear capture quantifiers structure static void capture_quantifiers_clear( CaptureQuantifiers *self ) { array_clear(self); } // Replace capture quantifiers with the given quantifiers static void capture_quantifiers_replace( CaptureQuantifiers *self, CaptureQuantifiers *quantifiers ) { array_clear(self); array_push_all(self, quantifiers); } // Return capture quantifier for the given capture id static TSQuantifier capture_quantifier_for_id( const CaptureQuantifiers *self, uint16_t id ) { return (self->size <= id) ? TSQuantifierZero : (TSQuantifier) *array_get(self, id); } // Add the given quantifier to the current value for id static void capture_quantifiers_add_for_id( CaptureQuantifiers *self, uint16_t id, TSQuantifier quantifier ) { if (self->size <= id) { array_grow_by(self, id + 1 - self->size); } uint8_t *own_quantifier = array_get(self, id); *own_quantifier = (uint8_t) quantifier_add((TSQuantifier) *own_quantifier, quantifier); } // Point-wise add the given quantifiers to the current values static void capture_quantifiers_add_all( CaptureQuantifiers *self, CaptureQuantifiers *quantifiers ) { if (self->size < quantifiers->size) { array_grow_by(self, quantifiers->size - self->size); } for (uint16_t id = 0; id < (uint16_t)quantifiers->size; id++) { uint8_t *quantifier = array_get(quantifiers, id); uint8_t *own_quantifier = array_get(self, id); *own_quantifier = (uint8_t) quantifier_add((TSQuantifier) *own_quantifier, (TSQuantifier) *quantifier); } } // Join the given quantifier with the current values static void capture_quantifiers_mul( CaptureQuantifiers *self, TSQuantifier quantifier ) { for (uint16_t id = 0; id < (uint16_t)self->size; id++) { uint8_t *own_quantifier = array_get(self, id); *own_quantifier = (uint8_t) quantifier_mul((TSQuantifier) *own_quantifier, quantifier); } } // Point-wise join the quantifiers from a list of alternatives with the current values static void capture_quantifiers_join_all( CaptureQuantifiers *self, CaptureQuantifiers *quantifiers ) { if (self->size < quantifiers->size) { array_grow_by(self, quantifiers->size - self->size); } for (uint32_t id = 0; id < quantifiers->size; id++) { uint8_t *quantifier = array_get(quantifiers, id); uint8_t *own_quantifier = array_get(self, id); *own_quantifier = (uint8_t) quantifier_join((TSQuantifier) *own_quantifier, (TSQuantifier) *quantifier); } for (uint32_t id = quantifiers->size; id < self->size; id++) { uint8_t *own_quantifier = array_get(self, id); *own_quantifier = (uint8_t) quantifier_join((TSQuantifier) *own_quantifier, TSQuantifierZero); } } /************** * SymbolTable **************/ static SymbolTable symbol_table_new(void) { return (SymbolTable) { .characters = array_new(), .slices = array_new(), }; } static void symbol_table_delete(SymbolTable *self) { array_delete(&self->characters); array_delete(&self->slices); } static int symbol_table_id_for_name( const SymbolTable *self, const char *name, uint32_t length ) { for (unsigned i = 0; i < self->slices.size; i++) { Slice slice = *array_get(&self->slices, i); if ( slice.length == length && !strncmp(array_get(&self->characters, slice.offset), name, length) ) return i; } return -1; } static const char *symbol_table_name_for_id( const SymbolTable *self, uint16_t id, uint32_t *length ) { Slice slice = *(array_get(&self->slices,id)); *length = slice.length; return array_get(&self->characters, slice.offset); } static uint16_t symbol_table_insert_name( SymbolTable *self, const char *name, uint32_t length ) { int id = symbol_table_id_for_name(self, name, length); if (id >= 0) return (uint16_t)id; Slice slice = { .offset = self->characters.size, .length = length, }; array_grow_by(&self->characters, length + 1); memcpy(array_get(&self->characters, slice.offset), name, length); *array_get(&self->characters, self->characters.size - 1) = 0; array_push(&self->slices, slice); return self->slices.size - 1; } /************ * QueryStep ************/ static QueryStep query_step__new( TSSymbol symbol, uint16_t depth, bool is_immediate ) { QueryStep step = { .symbol = symbol, .depth = depth, .field = 0, .alternative_index = NONE, .negated_field_list_id = 0, .contains_captures = false, .is_last_child = false, .is_named = false, .is_pass_through = false, .is_dead_end = false, .root_pattern_guaranteed = false, .is_immediate = is_immediate, .alternative_is_immediate = false, }; for (unsigned i = 0; i < MAX_STEP_CAPTURE_COUNT; i++) { step.capture_ids[i] = NONE; } return step; } static void query_step__add_capture(QueryStep *self, uint16_t capture_id) { for (unsigned i = 0; i < MAX_STEP_CAPTURE_COUNT; i++) { if (self->capture_ids[i] == NONE) { self->capture_ids[i] = capture_id; break; } } } static void query_step__remove_capture(QueryStep *self, uint16_t capture_id) { for (unsigned i = 0; i < MAX_STEP_CAPTURE_COUNT; i++) { if (self->capture_ids[i] == capture_id) { self->capture_ids[i] = NONE; while (i + 1 < MAX_STEP_CAPTURE_COUNT) { if (self->capture_ids[i + 1] == NONE) break; self->capture_ids[i] = self->capture_ids[i + 1]; self->capture_ids[i + 1] = NONE; i++; } break; } } } /********************** * StatePredecessorMap **********************/ static inline StatePredecessorMap state_predecessor_map_new( const TSLanguage *language ) { return (StatePredecessorMap) { .contents = ts_calloc( (size_t)language->state_count * (MAX_STATE_PREDECESSOR_COUNT + 1), sizeof(TSStateId) ), }; } static inline void state_predecessor_map_delete(StatePredecessorMap *self) { ts_free(self->contents); } static inline void state_predecessor_map_add( StatePredecessorMap *self, TSStateId state, TSStateId predecessor ) { size_t index = (size_t)state * (MAX_STATE_PREDECESSOR_COUNT + 1); TSStateId *count = &self->contents[index]; if ( *count == 0 || (*count < MAX_STATE_PREDECESSOR_COUNT && self->contents[index + *count] != predecessor) ) { (*count)++; self->contents[index + *count] = predecessor; } } static inline const TSStateId *state_predecessor_map_get( const StatePredecessorMap *self, TSStateId state, unsigned *count ) { size_t index = (size_t)state * (MAX_STATE_PREDECESSOR_COUNT + 1); *count = self->contents[index]; return &self->contents[index + 1]; } /**************** * AnalysisState ****************/ static unsigned analysis_state__recursion_depth(const AnalysisState *self) { unsigned result = 0; for (unsigned i = 0; i < self->depth; i++) { TSSymbol symbol = self->stack[i].parent_symbol; for (unsigned j = 0; j < i; j++) { if (self->stack[j].parent_symbol == symbol) { result++; break; } } } return result; } static inline int analysis_state__compare_position( AnalysisState *const *self, AnalysisState *const *other ) { for (unsigned i = 0; i < (*self)->depth; i++) { if (i >= (*other)->depth) return -1; if ((*self)->stack[i].child_index < (*other)->stack[i].child_index) return -1; if ((*self)->stack[i].child_index > (*other)->stack[i].child_index) return 1; } if ((*self)->depth < (*other)->depth) return 1; if ((*self)->step_index < (*other)->step_index) return -1; if ((*self)->step_index > (*other)->step_index) return 1; return 0; } static inline int analysis_state__compare( AnalysisState *const *self, AnalysisState *const *other ) { int result = analysis_state__compare_position(self, other); if (result != 0) return result; for (unsigned i = 0; i < (*self)->depth; i++) { if ((*self)->stack[i].parent_symbol < (*other)->stack[i].parent_symbol) return -1; if ((*self)->stack[i].parent_symbol > (*other)->stack[i].parent_symbol) return 1; if ((*self)->stack[i].parse_state < (*other)->stack[i].parse_state) return -1; if ((*self)->stack[i].parse_state > (*other)->stack[i].parse_state) return 1; if ((*self)->stack[i].field_id < (*other)->stack[i].field_id) return -1; if ((*self)->stack[i].field_id > (*other)->stack[i].field_id) return 1; } return 0; } static inline AnalysisStateEntry *analysis_state__top(AnalysisState *self) { if (self->depth == 0) { return &self->stack[0]; } return &self->stack[self->depth - 1]; } static inline bool analysis_state__has_supertype(AnalysisState *self, TSSymbol symbol) { for (unsigned i = 0; i < self->depth; i++) { if (self->stack[i].parent_symbol == symbol) return true; } return false; } /****************** * AnalysisStateSet ******************/ // Obtains an `AnalysisState` instance, either by consuming one from this set's object pool, or by // cloning one from scratch. static inline AnalysisState *analysis_state_pool__clone_or_reuse( AnalysisStateSet *self, AnalysisState *borrowed_item ) { AnalysisState *new_item; if (self->size) { new_item = array_pop(self); } else { new_item = ts_malloc(sizeof(AnalysisState)); } *new_item = *borrowed_item; return new_item; } // Inserts a clone of the passed-in item at the appropriate position to maintain ordering in this // set. The set does not contain duplicates, so if the item is already present, it will not be // inserted, and no clone will be made. // // The caller retains ownership of the passed-in memory. However, the clone that is created by this // function will be managed by the state set. static inline void analysis_state_set__insert_sorted( AnalysisStateSet *self, AnalysisStateSet *pool, AnalysisState *borrowed_item ) { unsigned index, exists; array_search_sorted_with(self, analysis_state__compare, &borrowed_item, &index, &exists); if (!exists) { AnalysisState *new_item = analysis_state_pool__clone_or_reuse(pool, borrowed_item); array_insert(self, index, new_item); } } // Inserts a clone of the passed-in item at the end position of this list. // // IMPORTANT: The caller MUST ENSURE that this item is larger (by the comparison function // `analysis_state__compare`) than largest item already in this set. If items are inserted in the // wrong order, the set will not function properly for future use. // // The caller retains ownership of the passed-in memory. However, the clone that is created by this // function will be managed by the state set. static inline void analysis_state_set__push( AnalysisStateSet *self, AnalysisStateSet *pool, AnalysisState *borrowed_item ) { AnalysisState *new_item = analysis_state_pool__clone_or_reuse(pool, borrowed_item); array_push(self, new_item); } // Removes all items from this set, returning it to an empty state. static inline void analysis_state_set__clear(AnalysisStateSet *self, AnalysisStateSet *pool) { array_push_all(pool, self); array_clear(self); } // Releases all memory that is managed with this state set, including any items currently present. // After calling this function, the set is no longer suitable for use. static inline void analysis_state_set__delete(AnalysisStateSet *self) { for (unsigned i = 0; i < self->size; i++) { ts_free(self->contents[i]); } array_delete(self); } /**************** * QueryAnalyzer ****************/ static inline QueryAnalysis query_analysis__new(void) { return (QueryAnalysis) { .states = array_new(), .next_states = array_new(), .deeper_states = array_new(), .state_pool = array_new(), .final_step_indices = array_new(), .finished_parent_symbols = array_new(), .did_abort = false, }; } static inline void query_analysis__delete(QueryAnalysis *self) { analysis_state_set__delete(&self->states); analysis_state_set__delete(&self->next_states); analysis_state_set__delete(&self->deeper_states); analysis_state_set__delete(&self->state_pool); array_delete(&self->final_step_indices); array_delete(&self->finished_parent_symbols); } /*********************** * AnalysisSubgraphNode ***********************/ static inline int analysis_subgraph_node__compare(const AnalysisSubgraphNode *self, const AnalysisSubgraphNode *other) { if (self->state < other->state) return -1; if (self->state > other->state) return 1; if (self->child_index < other->child_index) return -1; if (self->child_index > other->child_index) return 1; if (self->done < other->done) return -1; if (self->done > other->done) return 1; if (self->production_id < other->production_id) return -1; if (self->production_id > other->production_id) return 1; return 0; } /********* * Query *********/ // The `pattern_map` contains a mapping from TSSymbol values to indices in the // `steps` array. For a given syntax node, the `pattern_map` makes it possible // to quickly find the starting steps of all of the patterns whose root matches // that node. Each entry has two fields: a `pattern_index`, which identifies one // of the patterns in the query, and a `step_index`, which indicates the start // offset of that pattern's steps within the `steps` array. // // The entries are sorted by the patterns' root symbols, and lookups use a // binary search. This ensures that the cost of this initial lookup step // scales logarithmically with the number of patterns in the query. // // This returns `true` if the symbol is present and `false` otherwise. // If the symbol is not present `*result` is set to the index where the // symbol should be inserted. static inline bool ts_query__pattern_map_search( const TSQuery *self, TSSymbol needle, uint32_t *result ) { uint32_t base_index = self->wildcard_root_pattern_count; uint32_t size = self->pattern_map.size - base_index; if (size == 0) { *result = base_index; return false; } while (size > 1) { uint32_t half_size = size / 2; uint32_t mid_index = base_index + half_size; TSSymbol mid_symbol = array_get(&self->steps, array_get(&self->pattern_map, mid_index)->step_index )->symbol; if (needle > mid_symbol) base_index = mid_index; size -= half_size; } TSSymbol symbol = array_get(&self->steps, array_get(&self->pattern_map, base_index)->step_index )->symbol; if (needle > symbol) { base_index++; if (base_index < self->pattern_map.size) { symbol = array_get(&self->steps, array_get(&self->pattern_map, base_index)->step_index )->symbol; } } *result = base_index; return needle == symbol; } // Insert a new pattern's start index into the pattern map, maintaining // the pattern map's ordering invariant. static inline void ts_query__pattern_map_insert( TSQuery *self, TSSymbol symbol, PatternEntry new_entry ) { uint32_t index; ts_query__pattern_map_search(self, symbol, &index); // Ensure that the entries are sorted not only by symbol, but also // by pattern_index. This way, states for earlier patterns will be // initiated first, which allows the ordering of the states array // to be maintained more efficiently. while (index < self->pattern_map.size) { PatternEntry *entry = array_get(&self->pattern_map, index); if ( array_get(&self->steps, entry->step_index)->symbol == symbol && entry->pattern_index < new_entry.pattern_index ) { index++; } else { break; } } array_insert(&self->pattern_map, index, new_entry); } // Walk the subgraph for this non-terminal, tracking all of the possible // sequences of progress within the pattern. static void ts_query__perform_analysis( TSQuery *self, const AnalysisSubgraphArray *subgraphs, QueryAnalysis *analysis ) { unsigned recursion_depth_limit = 0; unsigned prev_final_step_count = 0; array_clear(&analysis->final_step_indices); array_clear(&analysis->finished_parent_symbols); for (unsigned iteration = 0;; iteration++) { if (iteration == MAX_ANALYSIS_ITERATION_COUNT) { analysis->did_abort = true; break; } #ifdef DEBUG_ANALYZE_QUERY printf("Iteration: %u. Final step indices:", iteration); for (unsigned j = 0; j < analysis->final_step_indices.size; j++) { printf(" %4u", *array_get(&analysis->final_step_indices, j)); } printf("\n"); for (unsigned j = 0; j < analysis->states.size; j++) { AnalysisState *state = *array_get(&analysis->states, j); printf(" %3u: step: %u, stack: [", j, state->step_index); for (unsigned k = 0; k < state->depth; k++) { printf( " {%s, child: %u, state: %4u", self->language->symbol_names[state->stack[k].parent_symbol], state->stack[k].child_index, state->stack[k].parse_state ); if (state->stack[k].field_id) printf(", field: %s", self->language->field_names[state->stack[k].field_id]); if (state->stack[k].done) printf(", DONE"); printf("}"); } printf(" ]\n"); } #endif // If no further progress can be made within the current recursion depth limit, then // bump the depth limit by one, and continue to process the states the exceeded the // limit. But only allow this if progress has been made since the last time the depth // limit was increased. if (analysis->states.size == 0) { if ( analysis->deeper_states.size > 0 && analysis->final_step_indices.size > prev_final_step_count ) { #ifdef DEBUG_ANALYZE_QUERY printf("Increase recursion depth limit to %u\n", recursion_depth_limit + 1); #endif prev_final_step_count = analysis->final_step_indices.size; recursion_depth_limit++; AnalysisStateSet _states = analysis->states; analysis->states = analysis->deeper_states; analysis->deeper_states = _states; continue; } break; } analysis_state_set__clear(&analysis->next_states, &analysis->state_pool); for (unsigned j = 0; j < analysis->states.size; j++) { AnalysisState * const state = *array_get(&analysis->states, j); // For efficiency, it's important to avoid processing the same analysis state more // than once. To achieve this, keep the states in order of ascending position within // their hypothetical syntax trees. In each iteration of this loop, start by advancing // the states that have made the least progress. Avoid advancing states that have already // made more progress. if (analysis->next_states.size > 0) { int comparison = analysis_state__compare_position( &state, array_back(&analysis->next_states) ); if (comparison == 0) { analysis_state_set__insert_sorted(&analysis->next_states, &analysis->state_pool, state); continue; } else if (comparison > 0) { #ifdef DEBUG_ANALYZE_QUERY printf("Terminate iteration at state %u\n", j); #endif while (j < analysis->states.size) { analysis_state_set__push( &analysis->next_states, &analysis->state_pool, *array_get(&analysis->states, j) ); j++; } break; } } const TSStateId parse_state = analysis_state__top(state)->parse_state; const TSSymbol parent_symbol = analysis_state__top(state)->parent_symbol; const TSFieldId parent_field_id = analysis_state__top(state)->field_id; const unsigned child_index = analysis_state__top(state)->child_index; const QueryStep * const step = array_get(&self->steps, state->step_index); unsigned subgraph_index, exists; array_search_sorted_by(subgraphs, .symbol, parent_symbol, &subgraph_index, &exists); if (!exists) continue; const AnalysisSubgraph *subgraph = array_get(subgraphs, subgraph_index); // Follow every possible path in the parse table, but only visit states that // are part of the subgraph for the current symbol. LookaheadIterator lookahead_iterator = ts_language_lookaheads(self->language, parse_state); while (ts_lookahead_iterator__next(&lookahead_iterator)) { TSSymbol sym = lookahead_iterator.symbol; AnalysisSubgraphNode successor = { .state = parse_state, .child_index = child_index, }; if (lookahead_iterator.action_count) { const TSParseAction *action = &lookahead_iterator.actions[lookahead_iterator.action_count - 1]; if (action->type == TSParseActionTypeShift) { if (!action->shift.extra) { successor.state = action->shift.state; successor.child_index++; } } else { continue; } } else if (lookahead_iterator.next_state != 0) { successor.state = lookahead_iterator.next_state; successor.child_index++; } else { continue; } unsigned node_index; array_search_sorted_with( &subgraph->nodes, analysis_subgraph_node__compare, &successor, &node_index, &exists ); while (node_index < subgraph->nodes.size) { AnalysisSubgraphNode *node = array_get(&subgraph->nodes, node_index); node_index++; if (node->state != successor.state || node->child_index != successor.child_index) break; // Use the subgraph to determine what alias and field will eventually be applied // to this child node. TSSymbol alias = ts_language_alias_at(self->language, node->production_id, child_index); TSSymbol visible_symbol = alias ? alias : self->language->symbol_metadata[sym].visible ? self->language->public_symbol_map[sym] : 0; TSFieldId field_id = parent_field_id; if (!field_id) { const TSFieldMapEntry *field_map, *field_map_end; ts_language_field_map(self->language, node->production_id, &field_map, &field_map_end); for (; field_map != field_map_end; field_map++) { if (!field_map->inherited && field_map->child_index == child_index) { field_id = field_map->field_id; break; } } } // Create a new state that has advanced past this hypothetical subtree. AnalysisState next_state = *state; AnalysisStateEntry *next_state_top = analysis_state__top(&next_state); next_state_top->child_index = successor.child_index; next_state_top->parse_state = successor.state; if (node->done) next_state_top->done = true; // Determine if this hypothetical child node would match the current step // of the query pattern. bool does_match = false; if (visible_symbol) { does_match = true; if (step->symbol == WILDCARD_SYMBOL) { if ( step->is_named && !self->language->symbol_metadata[visible_symbol].named ) does_match = false; } else if (step->symbol != visible_symbol) { does_match = false; } if (step->field && step->field != field_id) { does_match = false; } if ( step->supertype_symbol && !analysis_state__has_supertype(state, step->supertype_symbol) ) does_match = false; } // If this child is hidden, then descend into it and walk through its children. // If the top entry of the stack is at the end of its rule, then that entry can // be replaced. Otherwise, push a new entry onto the stack. else if (sym >= self->language->token_count) { if (!next_state_top->done) { if (next_state.depth + 1 >= MAX_ANALYSIS_STATE_DEPTH) { #ifdef DEBUG_ANALYZE_QUERY printf("Exceeded depth limit for state %u\n", j); #endif analysis->did_abort = true; continue; } next_state.depth++; next_state_top = analysis_state__top(&next_state); } *next_state_top = (AnalysisStateEntry) { .parse_state = parse_state, .parent_symbol = sym, .child_index = 0, .field_id = field_id, .done = false, }; if (analysis_state__recursion_depth(&next_state) > recursion_depth_limit) { analysis_state_set__insert_sorted( &analysis->deeper_states, &analysis->state_pool, &next_state ); continue; } } // Pop from the stack when this state reached the end of its current syntax node. while (next_state.depth > 0 && next_state_top->done) { next_state.depth--; next_state_top = analysis_state__top(&next_state); } // If this hypothetical child did match the current step of the query pattern, // then advance to the next step at the current depth. This involves skipping // over any descendant steps of the current child. const QueryStep *next_step = step; if (does_match) { for (;;) { next_state.step_index++; next_step = array_get(&self->steps, next_state.step_index); if ( next_step->depth == PATTERN_DONE_MARKER || next_step->depth <= step->depth ) break; } } else if (successor.state == parse_state) { continue; } for (;;) { // Skip pass-through states. Although these states have alternatives, they are only // used to implement repetitions, and query analysis does not need to process // repetitions in order to determine whether steps are possible and definite. if (next_step->is_pass_through) { next_state.step_index++; next_step++; continue; } // If the pattern is finished or hypothetical parent node is complete, then // record that matching can terminate at this step of the pattern. Otherwise, // add this state to the list of states to process on the next iteration. if (!next_step->is_dead_end) { bool did_finish_pattern = array_get(&self->steps, next_state.step_index)->depth != step->depth; if (did_finish_pattern) { array_insert_sorted_by(&analysis->finished_parent_symbols, , state->root_symbol); } else if (next_state.depth == 0) { array_insert_sorted_by(&analysis->final_step_indices, , next_state.step_index); } else { analysis_state_set__insert_sorted(&analysis->next_states, &analysis->state_pool, &next_state); } } // If the state has advanced to a step with an alternative step, then add another state // at that alternative step. This process is simpler than the process of actually matching a // pattern during query execution, because for the purposes of query analysis, there is no // need to process repetitions. if ( does_match && next_step->alternative_index != NONE && next_step->alternative_index > next_state.step_index ) { next_state.step_index = next_step->alternative_index; next_step = array_get(&self->steps, next_state.step_index); } else { break; } } } } } AnalysisStateSet _states = analysis->states; analysis->states = analysis->next_states; analysis->next_states = _states; } } static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) { Array(uint16_t) non_rooted_pattern_start_steps = array_new(); for (unsigned i = 0; i < self->pattern_map.size; i++) { PatternEntry *pattern = array_get(&self->pattern_map, i); if (!pattern->is_rooted) { QueryStep *step = array_get(&self->steps, pattern->step_index); if (step->symbol != WILDCARD_SYMBOL) { array_push(&non_rooted_pattern_start_steps, i); } } } // Walk forward through all of the steps in the query, computing some // basic information about each step. Mark all of the steps that contain // captures, and record the indices of all of the steps that have child steps. Array(uint32_t) parent_step_indices = array_new(); for (unsigned i = 0; i < self->steps.size; i++) { QueryStep *step = array_get(&self->steps, i); if (step->depth == PATTERN_DONE_MARKER) { step->parent_pattern_guaranteed = true; step->root_pattern_guaranteed = true; continue; } bool has_children = false; bool is_wildcard = step->symbol == WILDCARD_SYMBOL; step->contains_captures = step->capture_ids[0] != NONE; for (unsigned j = i + 1; j < self->steps.size; j++) { QueryStep *next_step = array_get(&self->steps, j); if ( next_step->depth == PATTERN_DONE_MARKER || next_step->depth <= step->depth ) break; if (next_step->capture_ids[0] != NONE) { step->contains_captures = true; } if (!is_wildcard) { next_step->root_pattern_guaranteed = true; next_step->parent_pattern_guaranteed = true; } has_children = true; } if (has_children && !is_wildcard) { array_push(&parent_step_indices, i); } } // For every parent symbol in the query, initialize an 'analysis subgraph'. // This subgraph lists all of the states in the parse table that are directly // involved in building subtrees for this symbol. // // In addition to the parent symbols in the query, construct subgraphs for all // of the hidden symbols in the grammar, because these might occur within // one of the parent nodes, such that their children appear to belong to the // parent. AnalysisSubgraphArray subgraphs = array_new(); for (unsigned i = 0; i < parent_step_indices.size; i++) { uint32_t parent_step_index = *array_get(&parent_step_indices, i); TSSymbol parent_symbol = array_get(&self->steps, parent_step_index)->symbol; AnalysisSubgraph subgraph = { .symbol = parent_symbol }; array_insert_sorted_by(&subgraphs, .symbol, subgraph); } for (TSSymbol sym = (uint16_t)self->language->token_count; sym < (uint16_t)self->language->symbol_count; sym++) { if (!ts_language_symbol_metadata(self->language, sym).visible) { AnalysisSubgraph subgraph = { .symbol = sym }; array_insert_sorted_by(&subgraphs, .symbol, subgraph); } } // Scan the parse table to find the data needed to populate these subgraphs. // Collect three things during this scan: // 1) All of the parse states where one of these symbols can start. // 2) All of the parse states where one of these symbols can end, along // with information about the node that would be created. // 3) A list of predecessor states for each state. StatePredecessorMap predecessor_map = state_predecessor_map_new(self->language); for (TSStateId state = 1; state < (uint16_t)self->language->state_count; state++) { unsigned subgraph_index, exists; LookaheadIterator lookahead_iterator = ts_language_lookaheads(self->language, state); while (ts_lookahead_iterator__next(&lookahead_iterator)) { if (lookahead_iterator.action_count) { for (unsigned i = 0; i < lookahead_iterator.action_count; i++) { const TSParseAction *action = &lookahead_iterator.actions[i]; if (action->type == TSParseActionTypeReduce) { const TSSymbol *aliases, *aliases_end; ts_language_aliases_for_symbol( self->language, action->reduce.symbol, &aliases, &aliases_end ); for (const TSSymbol *symbol = aliases; symbol < aliases_end; symbol++) { array_search_sorted_by( &subgraphs, .symbol, *symbol, &subgraph_index, &exists ); if (exists) { AnalysisSubgraph *subgraph = array_get(&subgraphs, subgraph_index); if (subgraph->nodes.size == 0 || array_back(&subgraph->nodes)->state != state) { array_push(&subgraph->nodes, ((AnalysisSubgraphNode) { .state = state, .production_id = action->reduce.production_id, .child_index = action->reduce.child_count, .done = true, })); } } } } else if (action->type == TSParseActionTypeShift && !action->shift.extra) { TSStateId next_state = action->shift.state; state_predecessor_map_add(&predecessor_map, next_state, state); } } } else if (lookahead_iterator.next_state != 0) { if (lookahead_iterator.next_state != state) { state_predecessor_map_add(&predecessor_map, lookahead_iterator.next_state, state); } if (ts_language_state_is_primary(self->language, state)) { const TSSymbol *aliases, *aliases_end; ts_language_aliases_for_symbol( self->language, lookahead_iterator.symbol, &aliases, &aliases_end ); for (const TSSymbol *symbol = aliases; symbol < aliases_end; symbol++) { array_search_sorted_by( &subgraphs, .symbol, *symbol, &subgraph_index, &exists ); if (exists) { AnalysisSubgraph *subgraph = array_get(&subgraphs, subgraph_index); if ( subgraph->start_states.size == 0 || *array_back(&subgraph->start_states) != state ) array_push(&subgraph->start_states, state); } } } } } } // For each subgraph, compute the preceding states by walking backward // from the end states using the predecessor map. Array(AnalysisSubgraphNode) next_nodes = array_new(); for (unsigned i = 0; i < subgraphs.size; i++) { AnalysisSubgraph *subgraph = array_get(&subgraphs, i); if (subgraph->nodes.size == 0) { array_delete(&subgraph->start_states); array_erase(&subgraphs, i); i--; continue; } array_assign(&next_nodes, &subgraph->nodes); while (next_nodes.size > 0) { AnalysisSubgraphNode node = array_pop(&next_nodes); if (node.child_index > 1) { unsigned predecessor_count; const TSStateId *predecessors = state_predecessor_map_get( &predecessor_map, node.state, &predecessor_count ); for (unsigned j = 0; j < predecessor_count; j++) { AnalysisSubgraphNode predecessor_node = { .state = predecessors[j], .child_index = node.child_index - 1, .production_id = node.production_id, .done = false, }; unsigned index, exists; array_search_sorted_with( &subgraph->nodes, analysis_subgraph_node__compare, &predecessor_node, &index, &exists ); if (!exists) { array_insert(&subgraph->nodes, index, predecessor_node); array_push(&next_nodes, predecessor_node); } } } } } #ifdef DEBUG_ANALYZE_QUERY printf("\nSubgraphs:\n"); for (unsigned i = 0; i < subgraphs.size; i++) { AnalysisSubgraph *subgraph = array_get(&subgraphs, i); printf(" %u, %s:\n", subgraph->symbol, ts_language_symbol_name(self->language, subgraph->symbol)); for (unsigned j = 0; j < subgraph->start_states.size; j++) { printf( " {state: %u}\n", *array_get(&subgraph->start_states, j) ); } for (unsigned j = 0; j < subgraph->nodes.size; j++) { AnalysisSubgraphNode *node = array_get(&subgraph->nodes, j); printf( " {state: %u, child_index: %u, production_id: %u, done: %d}\n", node->state, node->child_index, node->production_id, node->done ); } printf("\n"); } #endif // For each non-terminal pattern, determine if the pattern can successfully match, // and identify all of the possible children within the pattern where matching could fail. bool all_patterns_are_valid = true; QueryAnalysis analysis = query_analysis__new(); for (unsigned i = 0; i < parent_step_indices.size; i++) { uint16_t parent_step_index = *array_get(&parent_step_indices, i); uint16_t parent_depth = array_get(&self->steps, parent_step_index)->depth; TSSymbol parent_symbol = array_get(&self->steps, parent_step_index)->symbol; if (parent_symbol == ts_builtin_sym_error) continue; // Find the subgraph that corresponds to this pattern's root symbol. If the pattern's // root symbol is a terminal, then return an error. unsigned subgraph_index, exists; array_search_sorted_by(&subgraphs, .symbol, parent_symbol, &subgraph_index, &exists); if (!exists) { unsigned first_child_step_index = parent_step_index + 1; uint32_t j, child_exists; array_search_sorted_by(&self->step_offsets, .step_index, first_child_step_index, &j, &child_exists); ts_assert(child_exists); *error_offset = array_get(&self->step_offsets, j)->byte_offset; all_patterns_are_valid = false; break; } // Initialize an analysis state at every parse state in the table where // this parent symbol can occur. AnalysisSubgraph *subgraph = array_get(&subgraphs, subgraph_index); analysis_state_set__clear(&analysis.states, &analysis.state_pool); analysis_state_set__clear(&analysis.deeper_states, &analysis.state_pool); for (unsigned j = 0; j < subgraph->start_states.size; j++) { TSStateId parse_state = *array_get(&subgraph->start_states, j); analysis_state_set__push(&analysis.states, &analysis.state_pool, &((AnalysisState) { .step_index = parent_step_index + 1, .stack = { [0] = { .parse_state = parse_state, .parent_symbol = parent_symbol, .child_index = 0, .field_id = 0, .done = false, }, }, .depth = 1, .root_symbol = parent_symbol, })); } #ifdef DEBUG_ANALYZE_QUERY printf( "\nWalk states for %s:\n", ts_language_symbol_name(self->language, (*array_get(&analysis.states, 0))->stack[0].parent_symbol) ); #endif analysis.did_abort = false; ts_query__perform_analysis(self, &subgraphs, &analysis); // If this pattern could not be fully analyzed, then every step should // be considered fallible. if (analysis.did_abort) { for (unsigned j = parent_step_index + 1; j < self->steps.size; j++) { QueryStep *step = array_get(&self->steps, j); if ( step->depth <= parent_depth || step->depth == PATTERN_DONE_MARKER ) break; if (!step->is_dead_end) { step->parent_pattern_guaranteed = false; step->root_pattern_guaranteed = false; } } continue; } // If this pattern cannot match, store the pattern index so that it can be // returned to the caller. if (analysis.finished_parent_symbols.size == 0) { ts_assert(analysis.final_step_indices.size > 0); uint16_t impossible_step_index = *array_back(&analysis.final_step_indices); uint32_t j, impossible_exists; array_search_sorted_by(&self->step_offsets, .step_index, impossible_step_index, &j, &impossible_exists); if (j >= self->step_offsets.size) j = self->step_offsets.size - 1; *error_offset = array_get(&self->step_offsets, j)->byte_offset; all_patterns_are_valid = false; break; } // Mark as fallible any step where a match terminated. // Later, this property will be propagated to all of the step's predecessors. for (unsigned j = 0; j < analysis.final_step_indices.size; j++) { uint32_t final_step_index = *array_get(&analysis.final_step_indices, j); QueryStep *step = array_get(&self->steps, final_step_index); if ( step->depth != PATTERN_DONE_MARKER && step->depth > parent_depth && !step->is_dead_end ) { step->parent_pattern_guaranteed = false; step->root_pattern_guaranteed = false; } } } // Mark as indefinite any step with captures that are used in predicates. Array(uint16_t) predicate_capture_ids = array_new(); for (unsigned i = 0; i < self->patterns.size; i++) { QueryPattern *pattern = array_get(&self->patterns, i); // Gather all of the captures that are used in predicates for this pattern. array_clear(&predicate_capture_ids); for ( unsigned start = pattern->predicate_steps.offset, end = start + pattern->predicate_steps.length, j = start; j < end; j++ ) { TSQueryPredicateStep *step = array_get(&self->predicate_steps, j); if (step->type == TSQueryPredicateStepTypeCapture) { uint16_t value_id = step->value_id; array_insert_sorted_by(&predicate_capture_ids, , value_id); } } // Find all of the steps that have these captures. for ( unsigned start = pattern->steps.offset, end = start + pattern->steps.length, j = start; j < end; j++ ) { QueryStep *step = array_get(&self->steps, j); for (unsigned k = 0; k < MAX_STEP_CAPTURE_COUNT; k++) { uint16_t capture_id = step->capture_ids[k]; if (capture_id == NONE) break; unsigned index, exists; array_search_sorted_by(&predicate_capture_ids, , capture_id, &index, &exists); if (exists) { step->root_pattern_guaranteed = false; break; } } } } // Propagate fallibility. If a pattern is fallible at a given step, then it is // fallible at all of its preceding steps. bool done = self->steps.size == 0; while (!done) { done = true; for (unsigned i = self->steps.size - 1; i > 0; i--) { QueryStep *step = array_get(&self->steps, i); if (step->depth == PATTERN_DONE_MARKER) continue; // Determine if this step is definite or has definite alternatives. bool parent_pattern_guaranteed = false; for (;;) { if (step->root_pattern_guaranteed) { parent_pattern_guaranteed = true; break; } if (step->alternative_index == NONE || step->alternative_index < i) { break; } step = array_get(&self->steps, step->alternative_index); } // If not, mark its predecessor as indefinite. if (!parent_pattern_guaranteed) { QueryStep *prev_step = array_get(&self->steps, i - 1); if ( !prev_step->is_dead_end && prev_step->depth != PATTERN_DONE_MARKER && prev_step->root_pattern_guaranteed ) { prev_step->root_pattern_guaranteed = false; done = false; } } } } #ifdef DEBUG_ANALYZE_QUERY printf("Steps:\n"); for (unsigned i = 0; i < self->steps.size; i++) { QueryStep *step = array_get(&self->steps, i); if (step->depth == PATTERN_DONE_MARKER) { printf(" %u: DONE\n", i); } else { printf( " %u: {symbol: %s, field: %s, depth: %u, parent_pattern_guaranteed: %d, root_pattern_guaranteed: %d}\n", i, (step->symbol == WILDCARD_SYMBOL) ? "ANY" : ts_language_symbol_name(self->language, step->symbol), (step->field ? ts_language_field_name_for_id(self->language, step->field) : "-"), step->depth, step->parent_pattern_guaranteed, step->root_pattern_guaranteed ); } } #endif // Determine which repetition symbols in this language have the possibility // of matching non-rooted patterns in this query. These repetition symbols // prevent certain optimizations with range restrictions. analysis.did_abort = false; for (uint32_t i = 0; i < non_rooted_pattern_start_steps.size; i++) { uint16_t pattern_entry_index = *array_get(&non_rooted_pattern_start_steps, i); PatternEntry *pattern_entry = array_get(&self->pattern_map, pattern_entry_index); analysis_state_set__clear(&analysis.states, &analysis.state_pool); analysis_state_set__clear(&analysis.deeper_states, &analysis.state_pool); for (unsigned j = 0; j < subgraphs.size; j++) { AnalysisSubgraph *subgraph = array_get(&subgraphs, j); TSSymbolMetadata metadata = ts_language_symbol_metadata(self->language, subgraph->symbol); if (metadata.visible || metadata.named) continue; for (uint32_t k = 0; k < subgraph->start_states.size; k++) { TSStateId parse_state = *array_get(&subgraph->start_states, k); analysis_state_set__push(&analysis.states, &analysis.state_pool, &((AnalysisState) { .step_index = pattern_entry->step_index, .stack = { [0] = { .parse_state = parse_state, .parent_symbol = subgraph->symbol, .child_index = 0, .field_id = 0, .done = false, }, }, .root_symbol = subgraph->symbol, .depth = 1, })); } } #ifdef DEBUG_ANALYZE_QUERY printf("\nWalk states for rootless pattern step %u:\n", pattern_entry->step_index); #endif ts_query__perform_analysis( self, &subgraphs, &analysis ); if (analysis.finished_parent_symbols.size > 0) { array_get(&self->patterns, pattern_entry->pattern_index)->is_non_local = true; } for (unsigned k = 0; k < analysis.finished_parent_symbols.size; k++) { TSSymbol symbol = *array_get(&analysis.finished_parent_symbols, k); array_insert_sorted_by(&self->repeat_symbols_with_rootless_patterns, , symbol); } } #ifdef DEBUG_ANALYZE_QUERY if (self->repeat_symbols_with_rootless_patterns.size > 0) { printf("\nRepetition symbols with rootless patterns:\n"); printf("aborted analysis: %d\n", analysis.did_abort); for (unsigned i = 0; i < self->repeat_symbols_with_rootless_patterns.size; i++) { TSSymbol symbol = *array_get(&self->repeat_symbols_with_rootless_patterns, i); printf(" %u, %s\n", symbol, ts_language_symbol_name(self->language, symbol)); } printf("\n"); } #endif // Cleanup for (unsigned i = 0; i < subgraphs.size; i++) { array_delete(&array_get(&subgraphs, i)->start_states); array_delete(&array_get(&subgraphs, i)->nodes); } array_delete(&subgraphs); query_analysis__delete(&analysis); array_delete(&next_nodes); array_delete(&non_rooted_pattern_start_steps); array_delete(&parent_step_indices); array_delete(&predicate_capture_ids); state_predecessor_map_delete(&predecessor_map); return all_patterns_are_valid; } static void ts_query__add_negated_fields( TSQuery *self, uint16_t step_index, TSFieldId *field_ids, uint16_t field_count ) { QueryStep *step = array_get(&self->steps, step_index); // The negated field array stores a list of field lists, separated by zeros. // Try to find the start index of an existing list that matches this new list. bool failed_match = false; unsigned match_count = 0; unsigned start_i = 0; for (unsigned i = 0; i < self->negated_fields.size; i++) { TSFieldId existing_field_id = *array_get(&self->negated_fields, i); // At each zero value, terminate the match attempt. If we've exactly // matched the new field list, then reuse this index. Otherwise, // start over the matching process. if (existing_field_id == 0) { if (match_count == field_count) { step->negated_field_list_id = start_i; return; } else { start_i = i + 1; match_count = 0; failed_match = false; } } // If the existing list matches our new list so far, then advance // to the next element of the new list. else if ( match_count < field_count && existing_field_id == field_ids[match_count] && !failed_match ) { match_count++; } // Otherwise, this existing list has failed to match. else { match_count = 0; failed_match = true; } } step->negated_field_list_id = self->negated_fields.size; array_extend(&self->negated_fields, field_count, field_ids); array_push(&self->negated_fields, 0); } static TSQueryError ts_query__parse_string_literal( TSQuery *self, Stream *stream ) { const char *string_start = stream->input; if (stream->next != '"') return TSQueryErrorSyntax; stream_advance(stream); const char *prev_position = stream->input; bool is_escaped = false; array_clear(&self->string_buffer); for (;;) { if (is_escaped) { is_escaped = false; switch (stream->next) { case 'n': array_push(&self->string_buffer, '\n'); break; case 'r': array_push(&self->string_buffer, '\r'); break; case 't': array_push(&self->string_buffer, '\t'); break; case '0': array_push(&self->string_buffer, '\0'); break; default: array_extend(&self->string_buffer, stream->next_size, stream->input); break; } prev_position = stream->input + stream->next_size; } else { if (stream->next == '\\') { array_extend(&self->string_buffer, (uint32_t)(stream->input - prev_position), prev_position); prev_position = stream->input + 1; is_escaped = true; } else if (stream->next == '"') { array_extend(&self->string_buffer, (uint32_t)(stream->input - prev_position), prev_position); stream_advance(stream); return TSQueryErrorNone; } else if (stream->next == '\n') { stream_reset(stream, string_start); return TSQueryErrorSyntax; } } if (!stream_advance(stream)) { stream_reset(stream, string_start); return TSQueryErrorSyntax; } } } // Parse a single predicate associated with a pattern, adding it to the // query's internal `predicate_steps` array. Predicates are arbitrary // S-expressions associated with a pattern which are meant to be handled at // a higher level of abstraction, such as the Rust/JavaScript bindings. They // can contain '@'-prefixed capture names, double-quoted strings, and bare // symbols, which also represent strings. static TSQueryError ts_query__parse_predicate( TSQuery *self, Stream *stream ) { if (!stream_is_ident_start(stream)) return TSQueryErrorSyntax; const char *predicate_name = stream->input; stream_scan_identifier(stream); uint32_t length = (uint32_t)(stream->input - predicate_name); uint16_t id = symbol_table_insert_name( &self->predicate_values, predicate_name, length ); array_push(&self->predicate_steps, ((TSQueryPredicateStep) { .type = TSQueryPredicateStepTypeString, .value_id = id, })); stream_skip_whitespace(stream); for (;;) { if (stream->next == ')') { stream_advance(stream); stream_skip_whitespace(stream); array_push(&self->predicate_steps, ((TSQueryPredicateStep) { .type = TSQueryPredicateStepTypeDone, .value_id = 0, })); break; } // Parse an '@'-prefixed capture name else if (stream->next == '@') { stream_advance(stream); // Parse the capture name if (!stream_is_ident_start(stream)) return TSQueryErrorSyntax; const char *capture_name = stream->input; stream_scan_identifier(stream); uint32_t capture_length = (uint32_t)(stream->input - capture_name); // Add the capture id to the first step of the pattern int capture_id = symbol_table_id_for_name( &self->captures, capture_name, capture_length ); if (capture_id == -1) { stream_reset(stream, capture_name); return TSQueryErrorCapture; } array_push(&self->predicate_steps, ((TSQueryPredicateStep) { .type = TSQueryPredicateStepTypeCapture, .value_id = capture_id, })); } // Parse a string literal else if (stream->next == '"') { TSQueryError e = ts_query__parse_string_literal(self, stream); if (e) return e; uint16_t query_id = symbol_table_insert_name( &self->predicate_values, self->string_buffer.contents, self->string_buffer.size ); array_push(&self->predicate_steps, ((TSQueryPredicateStep) { .type = TSQueryPredicateStepTypeString, .value_id = query_id, })); } // Parse a bare symbol else if (stream_is_ident_start(stream)) { const char *symbol_start = stream->input; stream_scan_identifier(stream); uint32_t symbol_length = (uint32_t)(stream->input - symbol_start); uint16_t query_id = symbol_table_insert_name( &self->predicate_values, symbol_start, symbol_length ); array_push(&self->predicate_steps, ((TSQueryPredicateStep) { .type = TSQueryPredicateStepTypeString, .value_id = query_id, })); } else { return TSQueryErrorSyntax; } stream_skip_whitespace(stream); } return 0; } // Read one S-expression pattern from the stream, and incorporate it into // the query's internal state machine representation. For nested patterns, // this function calls itself recursively. // // The caller is responsible for passing in a dedicated CaptureQuantifiers. // These should not be shared between different calls to ts_query__parse_pattern! static TSQueryError ts_query__parse_pattern( TSQuery *self, Stream *stream, uint32_t depth, bool is_immediate, CaptureQuantifiers *capture_quantifiers ) { if (stream->next == 0) return TSQueryErrorSyntax; if (stream->next == ')' || stream->next == ']') return PARENT_DONE; const uint32_t starting_step_index = self->steps.size; // Store the byte offset of each step in the query. if ( self->step_offsets.size == 0 || array_back(&self->step_offsets)->step_index != starting_step_index ) { array_push(&self->step_offsets, ((StepOffset) { .step_index = starting_step_index, .byte_offset = stream_offset(stream), })); } // An open bracket is the start of an alternation. if (stream->next == '[') { stream_advance(stream); stream_skip_whitespace(stream); // Parse each branch, and add a placeholder step in between the branches. Array(uint32_t) branch_step_indices = array_new(); CaptureQuantifiers branch_capture_quantifiers = capture_quantifiers_new(); for (;;) { uint32_t start_index = self->steps.size; TSQueryError e = ts_query__parse_pattern( self, stream, depth, is_immediate, &branch_capture_quantifiers ); if (e == PARENT_DONE) { if (stream->next == ']' && branch_step_indices.size > 0) { stream_advance(stream); break; } e = TSQueryErrorSyntax; } if (e) { capture_quantifiers_delete(&branch_capture_quantifiers); array_delete(&branch_step_indices); return e; } if (start_index == starting_step_index) { capture_quantifiers_replace(capture_quantifiers, &branch_capture_quantifiers); } else { capture_quantifiers_join_all(capture_quantifiers, &branch_capture_quantifiers); } array_push(&branch_step_indices, start_index); array_push(&self->steps, query_step__new(0, depth, false)); capture_quantifiers_clear(&branch_capture_quantifiers); } (void)array_pop(&self->steps); // For all of the branches except for the last one, add the subsequent branch as an // alternative, and link the end of the branch to the current end of the steps. for (unsigned i = 0; i < branch_step_indices.size - 1; i++) { uint32_t step_index = *array_get(&branch_step_indices, i); uint32_t next_step_index = *array_get(&branch_step_indices, i + 1); QueryStep *start_step = array_get(&self->steps, step_index); QueryStep *end_step = array_get(&self->steps, next_step_index - 1); start_step->alternative_index = next_step_index; end_step->alternative_index = self->steps.size; end_step->is_dead_end = true; } capture_quantifiers_delete(&branch_capture_quantifiers); array_delete(&branch_step_indices); } // An open parenthesis can be the start of three possible constructs: // * A grouped sequence // * A predicate // * A named node else if (stream->next == '(') { stream_advance(stream); stream_skip_whitespace(stream); // If this parenthesis is followed by a node, then it represents a grouped sequence. if (stream->next == '(' || stream->next == '"' || stream->next == '[') { bool child_is_immediate = is_immediate; CaptureQuantifiers child_capture_quantifiers = capture_quantifiers_new(); for (;;) { if (stream->next == '.') { child_is_immediate = true; stream_advance(stream); stream_skip_whitespace(stream); } TSQueryError e = ts_query__parse_pattern( self, stream, depth, child_is_immediate, &child_capture_quantifiers ); if (e == PARENT_DONE) { if (stream->next == ')') { stream_advance(stream); break; } e = TSQueryErrorSyntax; } if (e) { capture_quantifiers_delete(&child_capture_quantifiers); return e; } capture_quantifiers_add_all(capture_quantifiers, &child_capture_quantifiers); capture_quantifiers_clear(&child_capture_quantifiers); child_is_immediate = false; } capture_quantifiers_delete(&child_capture_quantifiers); } // A dot/pound character indicates the start of a predicate. else if (stream->next == '.' || stream->next == '#') { stream_advance(stream); return ts_query__parse_predicate(self, stream); } // Otherwise, this parenthesis is the start of a named node. else { TSSymbol symbol; bool is_missing = false; const char *node_name = stream->input; // Parse a normal node name if (stream_is_ident_start(stream)) { stream_scan_identifier(stream); uint32_t length = (uint32_t)(stream->input - node_name); // Parse the wildcard symbol if (length == 1 && node_name[0] == '_') { symbol = WILDCARD_SYMBOL; } else if (!strncmp(node_name, "MISSING", length)) { is_missing = true; stream_skip_whitespace(stream); if (stream_is_ident_start(stream)) { const char *missing_node_name = stream->input; stream_scan_identifier(stream); uint32_t missing_node_length = (uint32_t)(stream->input - missing_node_name); symbol = ts_language_symbol_for_name( self->language, missing_node_name, missing_node_length, true ); if (!symbol) { stream_reset(stream, missing_node_name); return TSQueryErrorNodeType; } } else if (stream->next == '"') { const char *string_start = stream->input; TSQueryError e = ts_query__parse_string_literal(self, stream); if (e) return e; symbol = ts_language_symbol_for_name( self->language, self->string_buffer.contents, self->string_buffer.size, false ); if (!symbol) { stream_reset(stream, string_start + 1); return TSQueryErrorNodeType; } } else if (stream->next == ')') { symbol = WILDCARD_SYMBOL; } else { stream_reset(stream, stream->input); return TSQueryErrorSyntax; } } else { symbol = ts_language_symbol_for_name( self->language, node_name, length, true ); if (!symbol) { stream_reset(stream, node_name); return TSQueryErrorNodeType; } } } else { return TSQueryErrorSyntax; } // Add a step for the node. array_push(&self->steps, query_step__new(symbol, depth, is_immediate)); QueryStep *step = array_back(&self->steps); if (ts_language_symbol_metadata(self->language, symbol).supertype) { step->supertype_symbol = step->symbol; step->symbol = WILDCARD_SYMBOL; } if (is_missing) { step->is_missing = true; } if (symbol == WILDCARD_SYMBOL) { step->is_named = true; } stream_skip_whitespace(stream); if (stream->next == '/') { if (!step->supertype_symbol) { stream_reset(stream, node_name - 1); // reset to the start of the node return TSQueryErrorStructure; } stream_advance(stream); if (!stream_is_ident_start(stream)) { return TSQueryErrorSyntax; } const char *subtype_node_name = stream->input; stream_scan_identifier(stream); uint32_t length = (uint32_t)(stream->input - subtype_node_name); step->symbol = ts_language_symbol_for_name( self->language, subtype_node_name, length, true ); if (!step->symbol) { stream_reset(stream, subtype_node_name); return TSQueryErrorNodeType; } // Get all the possible subtypes for the given supertype, // and check if the given subtype is valid. if (self->language->abi_version >= LANGUAGE_VERSION_WITH_RESERVED_WORDS) { uint32_t subtype_length; const TSSymbol *subtypes = ts_language_subtypes( self->language, step->supertype_symbol, &subtype_length ); bool subtype_is_valid = false; for (uint32_t i = 0; i < subtype_length; i++) { if (subtypes[i] == step->symbol) { subtype_is_valid = true; break; } } // This subtype is not valid for the given supertype. if (!subtype_is_valid) { stream_reset(stream, node_name - 1); // reset to the start of the node return TSQueryErrorStructure; } } stream_skip_whitespace(stream); } // Parse the child patterns bool child_is_immediate = false; uint16_t last_child_step_index = 0; uint16_t negated_field_count = 0; TSFieldId negated_field_ids[MAX_NEGATED_FIELD_COUNT]; CaptureQuantifiers child_capture_quantifiers = capture_quantifiers_new(); for (;;) { // Parse a negated field assertion if (stream->next == '!') { stream_advance(stream); stream_skip_whitespace(stream); if (!stream_is_ident_start(stream)) { capture_quantifiers_delete(&child_capture_quantifiers); return TSQueryErrorSyntax; } const char *field_name = stream->input; stream_scan_identifier(stream); uint32_t length = (uint32_t)(stream->input - field_name); stream_skip_whitespace(stream); TSFieldId field_id = ts_language_field_id_for_name( self->language, field_name, length ); if (!field_id) { stream->input = field_name; capture_quantifiers_delete(&child_capture_quantifiers); return TSQueryErrorField; } // Keep the field ids sorted. if (negated_field_count < MAX_NEGATED_FIELD_COUNT) { negated_field_ids[negated_field_count] = field_id; negated_field_count++; } continue; } // Parse a sibling anchor if (stream->next == '.') { child_is_immediate = true; stream_advance(stream); stream_skip_whitespace(stream); } uint16_t step_index = self->steps.size; TSQueryError e = ts_query__parse_pattern( self, stream, depth + 1, child_is_immediate, &child_capture_quantifiers ); // In the event we only parsed a predicate, meaning no new steps were added, // then subtract one so we're not indexing past the end of the array if (step_index == self->steps.size) step_index--; if (e == PARENT_DONE) { if (stream->next == ')') { if (child_is_immediate) { if (last_child_step_index == 0) { capture_quantifiers_delete(&child_capture_quantifiers); return TSQueryErrorSyntax; } // Mark this step *and* its alternatives as the last child of the parent. QueryStep *last_child_step = array_get(&self->steps, last_child_step_index); last_child_step->is_last_child = true; if ( last_child_step->alternative_index != NONE && last_child_step->alternative_index < self->steps.size ) { QueryStep *alternative_step = array_get(&self->steps, last_child_step->alternative_index); alternative_step->is_last_child = true; while ( alternative_step->alternative_index != NONE && alternative_step->alternative_index < self->steps.size ) { alternative_step = array_get(&self->steps, alternative_step->alternative_index); alternative_step->is_last_child = true; } } } if (negated_field_count) { ts_query__add_negated_fields( self, starting_step_index, negated_field_ids, negated_field_count ); } stream_advance(stream); break; } e = TSQueryErrorSyntax; } if (e) { capture_quantifiers_delete(&child_capture_quantifiers); return e; } capture_quantifiers_add_all(capture_quantifiers, &child_capture_quantifiers); last_child_step_index = step_index; child_is_immediate = false; capture_quantifiers_clear(&child_capture_quantifiers); } capture_quantifiers_delete(&child_capture_quantifiers); } } // Parse a wildcard pattern else if (stream->next == '_') { stream_advance(stream); stream_skip_whitespace(stream); // Add a step that matches any kind of node array_push(&self->steps, query_step__new(WILDCARD_SYMBOL, depth, is_immediate)); } // Parse a double-quoted anonymous leaf node expression else if (stream->next == '"') { const char *string_start = stream->input; TSQueryError e = ts_query__parse_string_literal(self, stream); if (e) return e; // Add a step for the node TSSymbol symbol = ts_language_symbol_for_name( self->language, self->string_buffer.contents, self->string_buffer.size, false ); if (!symbol) { stream_reset(stream, string_start + 1); return TSQueryErrorNodeType; } array_push(&self->steps, query_step__new(symbol, depth, is_immediate)); } // Parse a field-prefixed pattern else if (stream_is_ident_start(stream)) { // Parse the field name const char *field_name = stream->input; stream_scan_identifier(stream); uint32_t length = (uint32_t)(stream->input - field_name); stream_skip_whitespace(stream); if (stream->next != ':') { stream_reset(stream, field_name); return TSQueryErrorSyntax; } stream_advance(stream); stream_skip_whitespace(stream); // Parse the pattern CaptureQuantifiers field_capture_quantifiers = capture_quantifiers_new(); TSQueryError e = ts_query__parse_pattern( self, stream, depth, is_immediate, &field_capture_quantifiers ); if (e) { capture_quantifiers_delete(&field_capture_quantifiers); if (e == PARENT_DONE) e = TSQueryErrorSyntax; return e; } // Add the field name to the first step of the pattern TSFieldId field_id = ts_language_field_id_for_name( self->language, field_name, length ); if (!field_id) { stream->input = field_name; return TSQueryErrorField; } uint32_t step_index = starting_step_index; QueryStep *step = array_get(&self->steps, step_index); for (;;) { step->field = field_id; if ( step->alternative_index != NONE && step->alternative_index > step_index && step->alternative_index < self->steps.size ) { step_index = step->alternative_index; step = array_get(&self->steps, step_index); } else { break; } } capture_quantifiers_add_all(capture_quantifiers, &field_capture_quantifiers); capture_quantifiers_delete(&field_capture_quantifiers); } else { return TSQueryErrorSyntax; } stream_skip_whitespace(stream); // Parse suffixes modifiers for this pattern TSQuantifier quantifier = TSQuantifierOne; for (;;) { // Parse the one-or-more operator. if (stream->next == '+') { quantifier = quantifier_join(TSQuantifierOneOrMore, quantifier); stream_advance(stream); stream_skip_whitespace(stream); QueryStep repeat_step = query_step__new(WILDCARD_SYMBOL, depth, false); repeat_step.alternative_index = starting_step_index; repeat_step.is_pass_through = true; repeat_step.alternative_is_immediate = true; array_push(&self->steps, repeat_step); } // Parse the zero-or-more repetition operator. else if (stream->next == '*') { quantifier = quantifier_join(TSQuantifierZeroOrMore, quantifier); stream_advance(stream); stream_skip_whitespace(stream); QueryStep repeat_step = query_step__new(WILDCARD_SYMBOL, depth, false); repeat_step.alternative_index = starting_step_index; repeat_step.is_pass_through = true; repeat_step.alternative_is_immediate = true; array_push(&self->steps, repeat_step); // Stop when `step->alternative_index` is `NONE` or it points to // `repeat_step` or beyond. Note that having just been pushed, // `repeat_step` occupies slot `self->steps.size - 1`. QueryStep *step = array_get(&self->steps, starting_step_index); while (step->alternative_index != NONE && step->alternative_index < self->steps.size - 1) { step = array_get(&self->steps, step->alternative_index); } step->alternative_index = self->steps.size; } // Parse the optional operator. else if (stream->next == '?') { quantifier = quantifier_join(TSQuantifierZeroOrOne, quantifier); stream_advance(stream); stream_skip_whitespace(stream); QueryStep *step = array_get(&self->steps, starting_step_index); while (step->alternative_index != NONE && step->alternative_index < self->steps.size) { step = array_get(&self->steps, step->alternative_index); } step->alternative_index = self->steps.size; } // Parse an '@'-prefixed capture pattern else if (stream->next == '@') { stream_advance(stream); if (!stream_is_ident_start(stream)) return TSQueryErrorSyntax; const char *capture_name = stream->input; stream_scan_identifier(stream); uint32_t length = (uint32_t)(stream->input - capture_name); stream_skip_whitespace(stream); // Add the capture id to the first step of the pattern uint16_t capture_id = symbol_table_insert_name( &self->captures, capture_name, length ); // Add the capture quantifier capture_quantifiers_add_for_id(capture_quantifiers, capture_id, TSQuantifierOne); uint32_t step_index = starting_step_index; for (;;) { QueryStep *step = array_get(&self->steps, step_index); query_step__add_capture(step, capture_id); if ( step->alternative_index != NONE && step->alternative_index > step_index && step->alternative_index < self->steps.size ) { step_index = step->alternative_index; } else { break; } } } // No more suffix modifiers else { break; } } capture_quantifiers_mul(capture_quantifiers, quantifier); return 0; } TSQuery *ts_query_new( const TSLanguage *language, const char *source, uint32_t source_len, uint32_t *error_offset, TSQueryError *error_type ) { if ( !language || language->abi_version > TREE_SITTER_LANGUAGE_VERSION || language->abi_version < TREE_SITTER_MIN_COMPATIBLE_LANGUAGE_VERSION ) { *error_type = TSQueryErrorLanguage; return NULL; } TSQuery *self = ts_malloc(sizeof(TSQuery)); *self = (TSQuery) { .steps = array_new(), .pattern_map = array_new(), .captures = symbol_table_new(), .capture_quantifiers = array_new(), .predicate_values = symbol_table_new(), .predicate_steps = array_new(), .patterns = array_new(), .step_offsets = array_new(), .string_buffer = array_new(), .negated_fields = array_new(), .repeat_symbols_with_rootless_patterns = array_new(), .wildcard_root_pattern_count = 0, .language = ts_language_copy(language), }; array_push(&self->negated_fields, 0); // Parse all of the S-expressions in the given string. Stream stream = stream_new(source, source_len); stream_skip_whitespace(&stream); while (stream.input < stream.end) { uint32_t pattern_index = self->patterns.size; uint32_t start_step_index = self->steps.size; uint32_t start_predicate_step_index = self->predicate_steps.size; array_push(&self->patterns, ((QueryPattern) { .steps = (Slice) {.offset = start_step_index}, .predicate_steps = (Slice) {.offset = start_predicate_step_index}, .start_byte = stream_offset(&stream), .is_non_local = false, })); CaptureQuantifiers capture_quantifiers = capture_quantifiers_new(); *error_type = ts_query__parse_pattern(self, &stream, 0, false, &capture_quantifiers); array_push(&self->steps, query_step__new(0, PATTERN_DONE_MARKER, false)); QueryPattern *pattern = array_back(&self->patterns); pattern->steps.length = self->steps.size - start_step_index; pattern->predicate_steps.length = self->predicate_steps.size - start_predicate_step_index; pattern->end_byte = stream_offset(&stream); // If any pattern could not be parsed, then report the error information // and terminate. if (*error_type) { if (*error_type == PARENT_DONE) *error_type = TSQueryErrorSyntax; *error_offset = stream_offset(&stream); capture_quantifiers_delete(&capture_quantifiers); ts_query_delete(self); return NULL; } // Maintain a list of capture quantifiers for each pattern array_push(&self->capture_quantifiers, capture_quantifiers); // Maintain a map that can look up patterns for a given root symbol. uint16_t wildcard_root_alternative_index = NONE; for (;;) { QueryStep *step = array_get(&self->steps, start_step_index); // If a pattern has a wildcard at its root, but it has a non-wildcard child, // then optimize the matching process by skipping matching the wildcard. // Later, during the matching process, the query cursor will check that // there is a parent node, and capture it if necessary. if (step->symbol == WILDCARD_SYMBOL && step->depth == 0 && !step->field) { QueryStep *second_step = array_get(&self->steps, start_step_index + 1); if (second_step->symbol != WILDCARD_SYMBOL && second_step->depth == 1 && !second_step->is_immediate) { wildcard_root_alternative_index = step->alternative_index; start_step_index += 1; step = second_step; } } // Determine whether the pattern has a single root node. This affects // decisions about whether or not to start matching the pattern when // a query cursor has a range restriction or when immediately within an // error node. uint32_t start_depth = step->depth; bool is_rooted = start_depth == 0; for (uint32_t step_index = start_step_index + 1; step_index < self->steps.size; step_index++) { QueryStep *child_step = array_get(&self->steps, step_index); if (child_step->is_dead_end) break; if (child_step->depth == start_depth) { is_rooted = false; break; } } ts_query__pattern_map_insert(self, step->symbol, (PatternEntry) { .step_index = start_step_index, .pattern_index = pattern_index, .is_rooted = is_rooted }); if (step->symbol == WILDCARD_SYMBOL) { self->wildcard_root_pattern_count++; } // If there are alternatives or options at the root of the pattern, // then add multiple entries to the pattern map. if (step->alternative_index != NONE) { start_step_index = step->alternative_index; } else if (wildcard_root_alternative_index != NONE) { start_step_index = wildcard_root_alternative_index; wildcard_root_alternative_index = NONE; } else { break; } } } if (!ts_query__analyze_patterns(self, error_offset)) { *error_type = TSQueryErrorStructure; ts_query_delete(self); return NULL; } array_delete(&self->string_buffer); return self; } void ts_query_delete(TSQuery *self) { if (self) { array_delete(&self->steps); array_delete(&self->pattern_map); array_delete(&self->predicate_steps); array_delete(&self->patterns); array_delete(&self->step_offsets); array_delete(&self->string_buffer); array_delete(&self->negated_fields); array_delete(&self->repeat_symbols_with_rootless_patterns); ts_language_delete(self->language); symbol_table_delete(&self->captures); symbol_table_delete(&self->predicate_values); for (uint32_t index = 0; index < self->capture_quantifiers.size; index++) { CaptureQuantifiers *capture_quantifiers = array_get(&self->capture_quantifiers, index); capture_quantifiers_delete(capture_quantifiers); } array_delete(&self->capture_quantifiers); ts_free(self); } } uint32_t ts_query_pattern_count(const TSQuery *self) { return self->patterns.size; } uint32_t ts_query_capture_count(const TSQuery *self) { return self->captures.slices.size; } uint32_t ts_query_string_count(const TSQuery *self) { return self->predicate_values.slices.size; } const char *ts_query_capture_name_for_id( const TSQuery *self, uint32_t index, uint32_t *length ) { return symbol_table_name_for_id(&self->captures, index, length); } TSQuantifier ts_query_capture_quantifier_for_id( const TSQuery *self, uint32_t pattern_index, uint32_t capture_index ) { CaptureQuantifiers *capture_quantifiers = array_get(&self->capture_quantifiers, pattern_index); return capture_quantifier_for_id(capture_quantifiers, capture_index); } const char *ts_query_string_value_for_id( const TSQuery *self, uint32_t index, uint32_t *length ) { return symbol_table_name_for_id(&self->predicate_values, index, length); } const TSQueryPredicateStep *ts_query_predicates_for_pattern( const TSQuery *self, uint32_t pattern_index, uint32_t *step_count ) { Slice slice = array_get(&self->patterns, pattern_index)->predicate_steps; *step_count = slice.length; if (slice.length == 0) return NULL; return array_get(&self->predicate_steps, slice.offset); } uint32_t ts_query_start_byte_for_pattern( const TSQuery *self, uint32_t pattern_index ) { return array_get(&self->patterns, pattern_index)->start_byte; } uint32_t ts_query_end_byte_for_pattern( const TSQuery *self, uint32_t pattern_index ) { return array_get(&self->patterns, pattern_index)->end_byte; } bool ts_query_is_pattern_rooted( const TSQuery *self, uint32_t pattern_index ) { for (unsigned i = 0; i < self->pattern_map.size; i++) { PatternEntry *entry = array_get(&self->pattern_map, i); if (entry->pattern_index == pattern_index) { if (!entry->is_rooted) return false; } } return true; } bool ts_query_is_pattern_non_local( const TSQuery *self, uint32_t pattern_index ) { if (pattern_index < self->patterns.size) { return array_get(&self->patterns, pattern_index)->is_non_local; } else { return false; } } bool ts_query_is_pattern_guaranteed_at_step( const TSQuery *self, uint32_t byte_offset ) { uint32_t step_index = UINT32_MAX; for (unsigned i = 0; i < self->step_offsets.size; i++) { StepOffset *step_offset = array_get(&self->step_offsets, i); if (step_offset->byte_offset > byte_offset) break; step_index = step_offset->step_index; } if (step_index < self->steps.size) { return array_get(&self->steps, step_index)->root_pattern_guaranteed; } else { return false; } } bool ts_query__step_is_fallible( const TSQuery *self, uint16_t step_index ) { ts_assert((uint32_t)step_index + 1 < self->steps.size); QueryStep *step = array_get(&self->steps, step_index); QueryStep *next_step = array_get(&self->steps, step_index + 1); return ( next_step->depth != PATTERN_DONE_MARKER && next_step->depth > step->depth && (!next_step->parent_pattern_guaranteed || step->symbol == WILDCARD_SYMBOL) ); } void ts_query_disable_capture( TSQuery *self, const char *name, uint32_t length ) { // Remove capture information for any pattern step that previously // captured with the given name. int id = symbol_table_id_for_name(&self->captures, name, length); if (id != -1) { for (unsigned i = 0; i < self->steps.size; i++) { QueryStep *step = array_get(&self->steps, i); query_step__remove_capture(step, id); } } } void ts_query_disable_pattern( TSQuery *self, uint32_t pattern_index ) { // Remove the given pattern from the pattern map. Its steps will still // be in the `steps` array, but they will never be read. for (unsigned i = 0; i < self->pattern_map.size; i++) { PatternEntry *pattern = array_get(&self->pattern_map, i); if (pattern->pattern_index == pattern_index) { array_erase(&self->pattern_map, i); i--; } } } /*************** * QueryCursor ***************/ TSQueryCursor *ts_query_cursor_new(void) { TSQueryCursor *self = ts_malloc(sizeof(TSQueryCursor)); *self = (TSQueryCursor) { .did_exceed_match_limit = false, .ascending = false, .halted = false, .states = array_new(), .finished_states = array_new(), .capture_list_pool = capture_list_pool_new(), .start_byte = 0, .end_byte = UINT32_MAX, .start_point = {0, 0}, .end_point = POINT_MAX, .max_start_depth = UINT32_MAX, .timeout_duration = 0, .end_clock = clock_null(), .operation_count = 0, }; array_reserve(&self->states, 8); array_reserve(&self->finished_states, 8); return self; } void ts_query_cursor_delete(TSQueryCursor *self) { array_delete(&self->states); array_delete(&self->finished_states); ts_tree_cursor_delete(&self->cursor); capture_list_pool_delete(&self->capture_list_pool); ts_free(self); } bool ts_query_cursor_did_exceed_match_limit(const TSQueryCursor *self) { return self->did_exceed_match_limit; } uint32_t ts_query_cursor_match_limit(const TSQueryCursor *self) { return self->capture_list_pool.max_capture_list_count; } void ts_query_cursor_set_match_limit(TSQueryCursor *self, uint32_t limit) { self->capture_list_pool.max_capture_list_count = limit; } uint64_t ts_query_cursor_timeout_micros(const TSQueryCursor *self) { return duration_to_micros(self->timeout_duration); } void ts_query_cursor_set_timeout_micros(TSQueryCursor *self, uint64_t timeout_micros) { self->timeout_duration = duration_from_micros(timeout_micros); } #ifdef DEBUG_EXECUTE_QUERY #define LOG(...) fprintf(stderr, __VA_ARGS__) #else #define LOG(...) #endif void ts_query_cursor_exec( TSQueryCursor *self, const TSQuery *query, TSNode node ) { if (query) { LOG("query steps:\n"); for (unsigned i = 0; i < query->steps.size; i++) { QueryStep *step = array_get(&query->steps, i); LOG(" %u: {", i); if (step->depth == PATTERN_DONE_MARKER) { LOG("DONE"); } else if (step->is_dead_end) { LOG("dead_end"); } else if (step->is_pass_through) { LOG("pass_through"); } else if (step->symbol != WILDCARD_SYMBOL) { LOG("symbol: %s", query->language->symbol_names[step->symbol]); } else { LOG("symbol: *"); } if (step->field) { LOG(", field: %s", query->language->field_names[step->field]); } if (step->alternative_index != NONE) { LOG(", alternative: %u", step->alternative_index); } LOG("},\n"); } } array_clear(&self->states); array_clear(&self->finished_states); ts_tree_cursor_reset(&self->cursor, node); capture_list_pool_reset(&self->capture_list_pool); self->on_visible_node = true; self->next_state_id = 0; self->depth = 0; self->ascending = false; self->halted = false; self->query = query; self->did_exceed_match_limit = false; self->operation_count = 0; if (self->timeout_duration) { self->end_clock = clock_after(clock_now(), self->timeout_duration); } else { self->end_clock = clock_null(); } self->query_options = NULL; self->query_state = (TSQueryCursorState) {0}; } void ts_query_cursor_exec_with_options( TSQueryCursor *self, const TSQuery *query, TSNode node, const TSQueryCursorOptions *query_options ) { ts_query_cursor_exec(self, query, node); if (query_options) { self->query_options = query_options; self->query_state = (TSQueryCursorState) { .payload = query_options->payload }; } } bool ts_query_cursor_set_byte_range( TSQueryCursor *self, uint32_t start_byte, uint32_t end_byte ) { if (end_byte == 0) { end_byte = UINT32_MAX; } if (start_byte > end_byte) { return false; } self->start_byte = start_byte; self->end_byte = end_byte; return true; } bool ts_query_cursor_set_point_range( TSQueryCursor *self, TSPoint start_point, TSPoint end_point ) { if (end_point.row == 0 && end_point.column == 0) { end_point = POINT_MAX; } if (point_gt(start_point, end_point)) { return false; } self->start_point = start_point; self->end_point = end_point; return true; } // Search through all of the in-progress states, and find the captured // node that occurs earliest in the document. static bool ts_query_cursor__first_in_progress_capture( TSQueryCursor *self, uint32_t *state_index, uint32_t *byte_offset, uint32_t *pattern_index, bool *is_definite ) { bool result = false; *state_index = UINT32_MAX; *byte_offset = UINT32_MAX; *pattern_index = UINT32_MAX; for (unsigned i = 0; i < self->states.size; i++) { QueryState *state = array_get(&self->states, i); if (state->dead) continue; const CaptureList *captures = capture_list_pool_get( &self->capture_list_pool, state->capture_list_id ); if (state->consumed_capture_count >= captures->size) { continue; } TSNode node = array_get(captures, state->consumed_capture_count)->node; if ( ts_node_end_byte(node) <= self->start_byte || point_lte(ts_node_end_point(node), self->start_point) ) { state->consumed_capture_count++; i--; continue; } uint32_t node_start_byte = ts_node_start_byte(node); if ( !result || node_start_byte < *byte_offset || (node_start_byte == *byte_offset && state->pattern_index < *pattern_index) ) { QueryStep *step = array_get(&self->query->steps, state->step_index); if (is_definite) { // We're being a bit conservative here by asserting that the following step // is not immediate, because this capture might end up being discarded if the // following symbol in the tree isn't the required symbol for this step. *is_definite = step->root_pattern_guaranteed && !step->is_immediate; } else if (step->root_pattern_guaranteed) { continue; } result = true; *state_index = i; *byte_offset = node_start_byte; *pattern_index = state->pattern_index; } } return result; } // Determine which node is first in a depth-first traversal int ts_query_cursor__compare_nodes(TSNode left, TSNode right) { if (left.id != right.id) { uint32_t left_start = ts_node_start_byte(left); uint32_t right_start = ts_node_start_byte(right); if (left_start < right_start) return -1; if (left_start > right_start) return 1; uint32_t left_node_count = ts_node_end_byte(left); uint32_t right_node_count = ts_node_end_byte(right); if (left_node_count > right_node_count) return -1; if (left_node_count < right_node_count) return 1; } return 0; } // Determine if either state contains a superset of the other state's captures. void ts_query_cursor__compare_captures( TSQueryCursor *self, QueryState *left_state, QueryState *right_state, bool *left_contains_right, bool *right_contains_left ) { const CaptureList *left_captures = capture_list_pool_get( &self->capture_list_pool, left_state->capture_list_id ); const CaptureList *right_captures = capture_list_pool_get( &self->capture_list_pool, right_state->capture_list_id ); *left_contains_right = true; *right_contains_left = true; unsigned i = 0, j = 0; for (;;) { if (i < left_captures->size) { if (j < right_captures->size) { TSQueryCapture *left = array_get(left_captures, i); TSQueryCapture *right = array_get(right_captures, j); if (left->node.id == right->node.id && left->index == right->index) { i++; j++; } else { switch (ts_query_cursor__compare_nodes(left->node, right->node)) { case -1: *right_contains_left = false; i++; break; case 1: *left_contains_right = false; j++; break; default: *right_contains_left = false; *left_contains_right = false; i++; j++; break; } } } else { *right_contains_left = false; break; } } else { if (j < right_captures->size) { *left_contains_right = false; } break; } } } static void ts_query_cursor__add_state( TSQueryCursor *self, const PatternEntry *pattern ) { QueryStep *step = array_get(&self->query->steps, pattern->step_index); uint32_t start_depth = self->depth - step->depth; // Keep the states array in ascending order of start_depth and pattern_index, // so that it can be processed more efficiently elsewhere. Usually, there is // no work to do here because of two facts: // * States with lower start_depth are naturally added first due to the // order in which nodes are visited. // * Earlier patterns are naturally added first because of the ordering of the // pattern_map data structure that's used to initiate matches. // // This loop is only needed in cases where two conditions hold: // * A pattern consists of more than one sibling node, so that its states // remain in progress after exiting the node that started the match. // * The first node in the pattern matches against multiple nodes at the // same depth. // // An example of this is the pattern '((comment)* (function))'. If multiple // `comment` nodes appear in a row, then we may initiate a new state for this // pattern while another state for the same pattern is already in progress. // If there are multiple patterns like this in a query, then this loop will // need to execute in order to keep the states ordered by pattern_index. uint32_t index = self->states.size; while (index > 0) { QueryState *prev_state = array_get(&self->states, index - 1); if (prev_state->start_depth < start_depth) break; if (prev_state->start_depth == start_depth) { // Avoid inserting an unnecessary duplicate state, which would be // immediately pruned by the longest-match criteria. if ( prev_state->pattern_index == pattern->pattern_index && prev_state->step_index == pattern->step_index ) return; if (prev_state->pattern_index <= pattern->pattern_index) break; } index--; } LOG( " start state. pattern:%u, step:%u\n", pattern->pattern_index, pattern->step_index ); array_insert(&self->states, index, ((QueryState) { .id = UINT32_MAX, .capture_list_id = NONE, .step_index = pattern->step_index, .pattern_index = pattern->pattern_index, .start_depth = start_depth, .consumed_capture_count = 0, .seeking_immediate_match = true, .has_in_progress_alternatives = false, .needs_parent = step->depth == 1, .dead = false, })); } // Acquire a capture list for this state. If there are no capture lists left in the // pool, this will steal the capture list from another existing state, and mark that // other state as 'dead'. static CaptureList *ts_query_cursor__prepare_to_capture( TSQueryCursor *self, QueryState *state, unsigned state_index_to_preserve ) { if (state->capture_list_id == NONE) { state->capture_list_id = capture_list_pool_acquire(&self->capture_list_pool); // If there are no capture lists left in the pool, then terminate whichever // state has captured the earliest node in the document, and steal its // capture list. if (state->capture_list_id == NONE) { self->did_exceed_match_limit = true; uint32_t state_index, byte_offset, pattern_index; if ( ts_query_cursor__first_in_progress_capture( self, &state_index, &byte_offset, &pattern_index, NULL ) && state_index != state_index_to_preserve ) { LOG( " abandon state. index:%u, pattern:%u, offset:%u.\n", state_index, pattern_index, byte_offset ); QueryState *other_state = array_get(&self->states, state_index); state->capture_list_id = other_state->capture_list_id; other_state->capture_list_id = NONE; other_state->dead = true; CaptureList *list = capture_list_pool_get_mut( &self->capture_list_pool, state->capture_list_id ); array_clear(list); return list; } else { LOG(" ran out of capture lists"); return NULL; } } } return capture_list_pool_get_mut(&self->capture_list_pool, state->capture_list_id); } static void ts_query_cursor__capture( TSQueryCursor *self, QueryState *state, QueryStep *step, TSNode node ) { if (state->dead) return; CaptureList *capture_list = ts_query_cursor__prepare_to_capture(self, state, UINT32_MAX); if (!capture_list) { state->dead = true; return; } for (unsigned j = 0; j < MAX_STEP_CAPTURE_COUNT; j++) { uint16_t capture_id = step->capture_ids[j]; if (step->capture_ids[j] == NONE) break; array_push(capture_list, ((TSQueryCapture) { node, capture_id })); LOG( " capture node. type:%s, pattern:%u, capture_id:%u, capture_count:%u\n", ts_node_type(node), state->pattern_index, capture_id, capture_list->size ); } } // Duplicate the given state and insert the newly-created state immediately after // the given state in the `states` array. Ensures that the given state reference is // still valid, even if the states array is reallocated. static QueryState *ts_query_cursor__copy_state( TSQueryCursor *self, QueryState **state_ref ) { const QueryState *state = *state_ref; uint32_t state_index = (uint32_t)(state - self->states.contents); QueryState copy = *state; copy.capture_list_id = NONE; // If the state has captures, copy its capture list. if (state->capture_list_id != NONE) { CaptureList *new_captures = ts_query_cursor__prepare_to_capture(self, ©, state_index); if (!new_captures) return NULL; const CaptureList *old_captures = capture_list_pool_get( &self->capture_list_pool, state->capture_list_id ); array_push_all(new_captures, old_captures); } array_insert(&self->states, state_index + 1, copy); *state_ref = array_get(&self->states, state_index); return array_get(&self->states, state_index + 1); } static inline bool ts_query_cursor__should_descend( TSQueryCursor *self, bool node_intersects_range ) { if (node_intersects_range && self->depth < self->max_start_depth) { return true; } // If there are in-progress matches whose remaining steps occur // deeper in the tree, then descend. for (unsigned i = 0; i < self->states.size; i++) { QueryState *state = array_get(&self->states, i); QueryStep *next_step = array_get(&self->query->steps, state->step_index); if ( next_step->depth != PATTERN_DONE_MARKER && state->start_depth + next_step->depth > self->depth ) { return true; } } if (self->depth >= self->max_start_depth) { return false; } // If the current node is hidden, then a non-rooted pattern might match // one if its roots inside of this node, and match another of its roots // as part of a sibling node, so we may need to descend. if (!self->on_visible_node) { // Descending into a repetition node outside of the range can be // expensive, because these nodes can have many visible children. // Avoid descending into repetition nodes unless we have already // determined that this query can match rootless patterns inside // of this type of repetition node. Subtree subtree = ts_tree_cursor_current_subtree(&self->cursor); if (ts_subtree_is_repetition(subtree)) { bool exists; uint32_t index; array_search_sorted_by( &self->query->repeat_symbols_with_rootless_patterns,, ts_subtree_symbol(subtree), &index, &exists ); return exists; } return true; } return false; } // Walk the tree, processing patterns until at least one pattern finishes, // If one or more patterns finish, return `true` and store their states in the // `finished_states` array. Multiple patterns can finish on the same node. If // there are no more matches, return `false`. static inline bool ts_query_cursor__advance( TSQueryCursor *self, bool stop_on_definite_step ) { bool did_match = false; for (;;) { if (self->halted) { while (self->states.size > 0) { QueryState state = array_pop(&self->states); capture_list_pool_release( &self->capture_list_pool, state.capture_list_id ); } } if (++self->operation_count == OP_COUNT_PER_QUERY_TIMEOUT_CHECK) { self->operation_count = 0; } if (self->query_options && self->query_options->progress_callback) { self->query_state.current_byte_offset = ts_node_start_byte(ts_tree_cursor_current_node(&self->cursor)); } if ( did_match || self->halted || ( self->operation_count == 0 && ( (!clock_is_null(self->end_clock) && clock_is_gt(clock_now(), self->end_clock)) || (self->query_options && self->query_options->progress_callback && self->query_options->progress_callback(&self->query_state)) ) ) ) { return did_match; } // Exit the current node. if (self->ascending) { if (self->on_visible_node) { LOG( "leave node. depth:%u, type:%s\n", self->depth, ts_node_type(ts_tree_cursor_current_node(&self->cursor)) ); // After leaving a node, remove any states that cannot make further progress. uint32_t deleted_count = 0; for (unsigned i = 0, n = self->states.size; i < n; i++) { QueryState *state = array_get(&self->states, i); QueryStep *step = array_get(&self->query->steps, state->step_index); // If a state completed its pattern inside of this node, but was deferred from finishing // in order to search for longer matches, mark it as finished. if ( step->depth == PATTERN_DONE_MARKER && (state->start_depth > self->depth || self->depth == 0) ) { LOG(" finish pattern %u\n", state->pattern_index); array_push(&self->finished_states, *state); did_match = true; deleted_count++; } // If a state needed to match something within this node, then remove that state // as it has failed to match. else if ( step->depth != PATTERN_DONE_MARKER && (uint32_t)state->start_depth + (uint32_t)step->depth > self->depth ) { LOG( " failed to match. pattern:%u, step:%u\n", state->pattern_index, state->step_index ); capture_list_pool_release( &self->capture_list_pool, state->capture_list_id ); deleted_count++; } else if (deleted_count > 0) { *array_get(&self->states, i - deleted_count) = *state; } } self->states.size -= deleted_count; } // Leave this node by stepping to its next sibling or to its parent. switch (ts_tree_cursor_goto_next_sibling_internal(&self->cursor)) { case TreeCursorStepVisible: if (!self->on_visible_node) { self->depth++; self->on_visible_node = true; } self->ascending = false; break; case TreeCursorStepHidden: if (self->on_visible_node) { self->depth--; self->on_visible_node = false; } self->ascending = false; break; default: if (ts_tree_cursor_goto_parent(&self->cursor)) { self->depth--; } else { LOG("halt at root\n"); self->halted = true; } } } // Enter a new node. else { // Get the properties of the current node. TSNode node = ts_tree_cursor_current_node(&self->cursor); TSNode parent_node = ts_tree_cursor_parent_node(&self->cursor); uint32_t start_byte = ts_node_start_byte(node); uint32_t end_byte = ts_node_end_byte(node); TSPoint start_point = ts_node_start_point(node); TSPoint end_point = ts_node_end_point(node); bool is_empty = start_byte == end_byte; bool parent_precedes_range = !ts_node_is_null(parent_node) && ( ts_node_end_byte(parent_node) <= self->start_byte || point_lte(ts_node_end_point(parent_node), self->start_point) ); bool parent_follows_range = !ts_node_is_null(parent_node) && ( ts_node_start_byte(parent_node) >= self->end_byte || point_gte(ts_node_start_point(parent_node), self->end_point) ); bool node_precedes_range = parent_precedes_range || end_byte < self->start_byte || point_lt(end_point, self->start_point) || (!is_empty && end_byte == self->start_byte) || (!is_empty && point_eq(end_point, self->start_point)); bool node_follows_range = parent_follows_range || ( start_byte >= self->end_byte || point_gte(start_point, self->end_point) ); bool parent_intersects_range = !parent_precedes_range && !parent_follows_range; bool node_intersects_range = !node_precedes_range && !node_follows_range; if (self->on_visible_node) { TSSymbol symbol = ts_node_symbol(node); bool is_named = ts_node_is_named(node); bool is_missing = ts_node_is_missing(node); bool has_later_siblings; bool has_later_named_siblings; bool can_have_later_siblings_with_this_field; TSFieldId field_id = 0; TSSymbol supertypes[8] = {0}; unsigned supertype_count = 8; ts_tree_cursor_current_status( &self->cursor, &field_id, &has_later_siblings, &has_later_named_siblings, &can_have_later_siblings_with_this_field, supertypes, &supertype_count ); LOG( "enter node. depth:%u, type:%s, field:%s, row:%u state_count:%u, finished_state_count:%u\n", self->depth, ts_node_type(node), ts_language_field_name_for_id(self->query->language, field_id), ts_node_start_point(node).row, self->states.size, self->finished_states.size ); bool node_is_error = symbol == ts_builtin_sym_error; bool parent_is_error = !ts_node_is_null(parent_node) && ts_node_symbol(parent_node) == ts_builtin_sym_error; // Add new states for any patterns whose root node is a wildcard. if (!node_is_error) { for (unsigned i = 0; i < self->query->wildcard_root_pattern_count; i++) { PatternEntry *pattern = array_get(&self->query->pattern_map, i); // If this node matches the first step of the pattern, then add a new // state at the start of this pattern. QueryStep *step = array_get(&self->query->steps, pattern->step_index); uint32_t start_depth = self->depth - step->depth; if ( (pattern->is_rooted ? node_intersects_range : (parent_intersects_range && !parent_is_error)) && (!step->field || field_id == step->field) && (!step->supertype_symbol || supertype_count > 0) && (start_depth <= self->max_start_depth) ) { ts_query_cursor__add_state(self, pattern); } } } // Add new states for any patterns whose root node matches this node. unsigned i; if (ts_query__pattern_map_search(self->query, symbol, &i)) { PatternEntry *pattern = array_get(&self->query->pattern_map, i); QueryStep *step = array_get(&self->query->steps, pattern->step_index); uint32_t start_depth = self->depth - step->depth; do { // If this node matches the first step of the pattern, then add a new // state at the start of this pattern. if ( (pattern->is_rooted ? node_intersects_range : (parent_intersects_range && !parent_is_error)) && (!step->field || field_id == step->field) && (start_depth <= self->max_start_depth) ) { ts_query_cursor__add_state(self, pattern); } // Advance to the next pattern whose root node matches this node. i++; if (i == self->query->pattern_map.size) break; pattern = array_get(&self->query->pattern_map, i); step = array_get(&self->query->steps, pattern->step_index); } while (step->symbol == symbol); } // Update all of the in-progress states with current node. for (unsigned j = 0, copy_count = 0; j < self->states.size; j += 1 + copy_count) { QueryState *state = array_get(&self->states, j); QueryStep *step = array_get(&self->query->steps, state->step_index); state->has_in_progress_alternatives = false; copy_count = 0; // Check that the node matches all of the criteria for the next // step of the pattern. if ((uint32_t)state->start_depth + (uint32_t)step->depth != self->depth) continue; // Determine if this node matches this step of the pattern, and also // if this node can have later siblings that match this step of the // pattern. bool node_does_match = false; if (step->symbol == WILDCARD_SYMBOL) { if (step->is_missing) { node_does_match = is_missing; } else { node_does_match = !node_is_error && (is_named || !step->is_named); } } else { node_does_match = symbol == step->symbol && (!step->is_missing || is_missing); } bool later_sibling_can_match = has_later_siblings; if ((step->is_immediate && is_named) || state->seeking_immediate_match) { later_sibling_can_match = false; } if (step->is_last_child && has_later_named_siblings) { node_does_match = false; } if (step->supertype_symbol) { bool has_supertype = false; for (unsigned k = 0; k < supertype_count; k++) { if (supertypes[k] == step->supertype_symbol) { has_supertype = true; break; } } if (!has_supertype) node_does_match = false; } if (step->field) { if (step->field == field_id) { if (!can_have_later_siblings_with_this_field) { later_sibling_can_match = false; } } else { node_does_match = false; } } if (step->negated_field_list_id) { TSFieldId *negated_field_ids = array_get(&self->query->negated_fields, step->negated_field_list_id); for (;;) { TSFieldId negated_field_id = *negated_field_ids; if (negated_field_id) { negated_field_ids++; if (ts_node_child_by_field_id(node, negated_field_id).id) { node_does_match = false; break; } } else { break; } } } // Remove states immediately if it is ever clear that they cannot match. if (!node_does_match) { if (!later_sibling_can_match) { LOG( " discard state. pattern:%u, step:%u\n", state->pattern_index, state->step_index ); capture_list_pool_release( &self->capture_list_pool, state->capture_list_id ); array_erase(&self->states, j); j--; } continue; } // Some patterns can match their root node in multiple ways, capturing different // children. If this pattern step could match later children within the same // parent, then this query state cannot simply be updated in place. It must be // split into two states: one that matches this node, and one which skips over // this node, to preserve the possibility of matching later siblings. if (later_sibling_can_match && ( step->contains_captures || ts_query__step_is_fallible(self->query, state->step_index) )) { if (ts_query_cursor__copy_state(self, &state)) { LOG( " split state for capture. pattern:%u, step:%u\n", state->pattern_index, state->step_index ); copy_count++; } } // If this pattern started with a wildcard, such that the pattern map // actually points to the *second* step of the pattern, then check // that the node has a parent, and capture the parent node if necessary. if (state->needs_parent) { TSNode parent = ts_tree_cursor_parent_node(&self->cursor); if (ts_node_is_null(parent)) { LOG(" missing parent node\n"); state->dead = true; } else { state->needs_parent = false; QueryStep *skipped_wildcard_step = step; do { skipped_wildcard_step--; } while ( skipped_wildcard_step->is_dead_end || skipped_wildcard_step->is_pass_through || skipped_wildcard_step->depth > 0 ); if (skipped_wildcard_step->capture_ids[0] != NONE) { LOG(" capture wildcard parent\n"); ts_query_cursor__capture( self, state, skipped_wildcard_step, parent ); } } } // If the current node is captured in this pattern, add it to the capture list. if (step->capture_ids[0] != NONE) { ts_query_cursor__capture(self, state, step, node); } if (state->dead) { array_erase(&self->states, j); j--; continue; } // Advance this state to the next step of its pattern. state->step_index++; LOG( " advance state. pattern:%u, step:%u\n", state->pattern_index, state->step_index ); QueryStep *next_step = array_get(&self->query->steps, state->step_index); // For a given step, if the current symbol is the wildcard symbol, `_`, and it is **not** // named, meaning it should capture anonymous nodes, **and** the next step is immediate, // we reuse the `seeking_immediate_match` flag to indicate that we are looking for an // immediate match due to an unnamed wildcard symbol. // // The reason for this is that typically, anchors will not consider anonymous nodes, // but we're special casing the wildcard symbol to allow for any immediate matches, // regardless of whether they are named or not. if (step->symbol == WILDCARD_SYMBOL && !step->is_named && next_step->is_immediate) { state->seeking_immediate_match = true; } else { state->seeking_immediate_match = false; } if (stop_on_definite_step && next_step->root_pattern_guaranteed) did_match = true; // If this state's next step has an alternative step, then copy the state in order // to pursue both alternatives. The alternative step itself may have an alternative, // so this is an interactive process. unsigned end_index = j + 1; for (unsigned k = j; k < end_index; k++) { QueryState *child_state = array_get(&self->states, k); QueryStep *child_step = array_get(&self->query->steps, child_state->step_index); if (child_step->alternative_index != NONE) { // A "dead-end" step exists only to add a non-sequential jump into the step sequence, // via its alternative index. When a state reaches a dead-end step, it jumps straight // to the step's alternative. if (child_step->is_dead_end) { child_state->step_index = child_step->alternative_index; k--; continue; } // A "pass-through" step exists only to add a branch into the step sequence, // via its alternative_index. When a state reaches a pass-through step, it splits // in order to process the alternative step, and then it advances to the next step. if (child_step->is_pass_through) { child_state->step_index++; k--; } QueryState *copy = ts_query_cursor__copy_state(self, &child_state); if (copy) { LOG( " split state for branch. pattern:%u, from_step:%u, to_step:%u, immediate:%d, capture_count: %u\n", copy->pattern_index, copy->step_index, next_step->alternative_index, next_step->alternative_is_immediate, capture_list_pool_get(&self->capture_list_pool, copy->capture_list_id)->size ); end_index++; copy_count++; copy->step_index = child_step->alternative_index; if (child_step->alternative_is_immediate) { copy->seeking_immediate_match = true; } } } } } for (unsigned j = 0; j < self->states.size; j++) { QueryState *state = array_get(&self->states, j); if (state->dead) { array_erase(&self->states, j); j--; continue; } // Enforce the longest-match criteria. When a query pattern contains optional or // repeated nodes, this is necessary to avoid multiple redundant states, where // one state has a strict subset of another state's captures. bool did_remove = false; for (unsigned k = j + 1; k < self->states.size; k++) { QueryState *other_state = array_get(&self->states, k); // Query states are kept in ascending order of start_depth and pattern_index. // Since the longest-match criteria is only used for deduping matches of the same // pattern and root node, we only need to perform pairwise comparisons within a // small slice of the states array. if ( other_state->start_depth != state->start_depth || other_state->pattern_index != state->pattern_index ) break; bool left_contains_right, right_contains_left; ts_query_cursor__compare_captures( self, state, other_state, &left_contains_right, &right_contains_left ); if (left_contains_right) { if (state->step_index == other_state->step_index) { LOG( " drop shorter state. pattern: %u, step_index: %u\n", state->pattern_index, state->step_index ); capture_list_pool_release(&self->capture_list_pool, other_state->capture_list_id); array_erase(&self->states, k); k--; continue; } other_state->has_in_progress_alternatives = true; } if (right_contains_left) { if (state->step_index == other_state->step_index) { LOG( " drop shorter state. pattern: %u, step_index: %u\n", state->pattern_index, state->step_index ); capture_list_pool_release(&self->capture_list_pool, state->capture_list_id); array_erase(&self->states, j); j--; did_remove = true; break; } state->has_in_progress_alternatives = true; } } // If the state is at the end of its pattern, remove it from the list // of in-progress states and add it to the list of finished states. if (!did_remove) { LOG( " keep state. pattern: %u, start_depth: %u, step_index: %u, capture_count: %u\n", state->pattern_index, state->start_depth, state->step_index, capture_list_pool_get(&self->capture_list_pool, state->capture_list_id)->size ); QueryStep *next_step = array_get(&self->query->steps, state->step_index); if (next_step->depth == PATTERN_DONE_MARKER) { if (state->has_in_progress_alternatives) { LOG(" defer finishing pattern %u\n", state->pattern_index); } else { LOG(" finish pattern %u\n", state->pattern_index); array_push(&self->finished_states, *state); array_erase(&self->states, (uint32_t)(state - self->states.contents)); did_match = true; j--; } } } } } if (ts_query_cursor__should_descend(self, node_intersects_range)) { switch (ts_tree_cursor_goto_first_child_internal(&self->cursor)) { case TreeCursorStepVisible: self->depth++; self->on_visible_node = true; continue; case TreeCursorStepHidden: self->on_visible_node = false; continue; default: break; } } self->ascending = true; } } } bool ts_query_cursor_next_match( TSQueryCursor *self, TSQueryMatch *match ) { if (self->finished_states.size == 0) { if (!ts_query_cursor__advance(self, false)) { return false; } } QueryState *state = array_get(&self->finished_states, 0); if (state->id == UINT32_MAX) state->id = self->next_state_id++; match->id = state->id; match->pattern_index = state->pattern_index; const CaptureList *captures = capture_list_pool_get( &self->capture_list_pool, state->capture_list_id ); match->captures = captures->contents; match->capture_count = captures->size; capture_list_pool_release(&self->capture_list_pool, state->capture_list_id); array_erase(&self->finished_states, 0); return true; } void ts_query_cursor_remove_match( TSQueryCursor *self, uint32_t match_id ) { for (unsigned i = 0; i < self->finished_states.size; i++) { const QueryState *state = array_get(&self->finished_states, i); if (state->id == match_id) { capture_list_pool_release( &self->capture_list_pool, state->capture_list_id ); array_erase(&self->finished_states, i); return; } } // Remove unfinished query states as well to prevent future // captures for a match being removed. for (unsigned i = 0; i < self->states.size; i++) { const QueryState *state = array_get(&self->states, i); if (state->id == match_id) { capture_list_pool_release( &self->capture_list_pool, state->capture_list_id ); array_erase(&self->states, i); return; } } } bool ts_query_cursor_next_capture( TSQueryCursor *self, TSQueryMatch *match, uint32_t *capture_index ) { // The goal here is to return captures in order, even though they may not // be discovered in order, because patterns can overlap. Search for matches // until there is a finished capture that is before any unfinished capture. for (;;) { // First, find the earliest capture in an unfinished match. uint32_t first_unfinished_capture_byte; uint32_t first_unfinished_pattern_index; uint32_t first_unfinished_state_index; bool first_unfinished_state_is_definite = false; bool found_unfinished_state = ts_query_cursor__first_in_progress_capture( self, &first_unfinished_state_index, &first_unfinished_capture_byte, &first_unfinished_pattern_index, &first_unfinished_state_is_definite ); // Then find the earliest capture in a finished match. It must occur // before the first capture in an *unfinished* match. QueryState *first_finished_state = NULL; uint32_t first_finished_capture_byte = first_unfinished_capture_byte; uint32_t first_finished_pattern_index = first_unfinished_pattern_index; for (unsigned i = 0; i < self->finished_states.size;) { QueryState *state = array_get(&self->finished_states, i); const CaptureList *captures = capture_list_pool_get( &self->capture_list_pool, state->capture_list_id ); // Remove states whose captures are all consumed. if (state->consumed_capture_count >= captures->size) { capture_list_pool_release( &self->capture_list_pool, state->capture_list_id ); array_erase(&self->finished_states, i); continue; } TSNode node = array_get(captures, state->consumed_capture_count)->node; bool node_precedes_range = ( ts_node_end_byte(node) <= self->start_byte || point_lte(ts_node_end_point(node), self->start_point) ); bool node_follows_range = ( ts_node_start_byte(node) >= self->end_byte || point_gte(ts_node_start_point(node), self->end_point) ); bool node_outside_of_range = node_precedes_range || node_follows_range; // Skip captures that are outside of the cursor's range. if (node_outside_of_range) { state->consumed_capture_count++; continue; } uint32_t node_start_byte = ts_node_start_byte(node); if ( node_start_byte < first_finished_capture_byte || ( node_start_byte == first_finished_capture_byte && state->pattern_index < first_finished_pattern_index ) ) { first_finished_state = state; first_finished_capture_byte = node_start_byte; first_finished_pattern_index = state->pattern_index; } i++; } // If there is finished capture that is clearly before any unfinished // capture, then return its match, and its capture index. Internally // record the fact that the capture has been 'consumed'. QueryState *state; if (first_finished_state) { state = first_finished_state; } else if (first_unfinished_state_is_definite) { state = array_get(&self->states, first_unfinished_state_index); } else { state = NULL; } if (state) { if (state->id == UINT32_MAX) state->id = self->next_state_id++; match->id = state->id; match->pattern_index = state->pattern_index; const CaptureList *captures = capture_list_pool_get( &self->capture_list_pool, state->capture_list_id ); match->captures = captures->contents; match->capture_count = captures->size; *capture_index = state->consumed_capture_count; state->consumed_capture_count++; return true; } if (capture_list_pool_is_empty(&self->capture_list_pool) && found_unfinished_state) { LOG( " abandon state. index:%u, pattern:%u, offset:%u.\n", first_unfinished_state_index, first_unfinished_pattern_index, first_unfinished_capture_byte ); capture_list_pool_release( &self->capture_list_pool, array_get(&self->states, first_unfinished_state_index)->capture_list_id ); array_erase(&self->states, first_unfinished_state_index); } // If there are no finished matches that are ready to be returned, then // continue finding more matches. if ( !ts_query_cursor__advance(self, true) && self->finished_states.size == 0 ) return false; } } void ts_query_cursor_set_max_start_depth( TSQueryCursor *self, uint32_t max_start_depth ) { self->max_start_depth = max_start_depth; } #undef LOG hx-0.3.0+20250717/bindings/vendor/src/reduce_action.h000066400000000000000000000014041503625671400217020ustar00rootroot00000000000000#ifndef TREE_SITTER_REDUCE_ACTION_H_ #define TREE_SITTER_REDUCE_ACTION_H_ #ifdef __cplusplus extern "C" { #endif #include "./array.h" #include "tree_sitter/api.h" typedef struct { uint32_t count; TSSymbol symbol; int dynamic_precedence; unsigned short production_id; } ReduceAction; typedef Array(ReduceAction) ReduceActionSet; static inline void ts_reduce_action_set_add(ReduceActionSet *self, ReduceAction new_action) { for (uint32_t i = 0; i < self->size; i++) { ReduceAction action = self->contents[i]; if (action.symbol == new_action.symbol && action.count == new_action.count) return; } array_push(self, new_action); } #ifdef __cplusplus } #endif #endif // TREE_SITTER_REDUCE_ACTION_H_ hx-0.3.0+20250717/bindings/vendor/src/reusable_node.h000066400000000000000000000052731503625671400217150ustar00rootroot00000000000000#include "./subtree.h" typedef struct { Subtree tree; uint32_t child_index; uint32_t byte_offset; } StackEntry; typedef struct { Array(StackEntry) stack; Subtree last_external_token; } ReusableNode; static inline ReusableNode reusable_node_new(void) { return (ReusableNode) {array_new(), NULL_SUBTREE}; } static inline void reusable_node_clear(ReusableNode *self) { array_clear(&self->stack); self->last_external_token = NULL_SUBTREE; } static inline Subtree reusable_node_tree(ReusableNode *self) { return self->stack.size > 0 ? self->stack.contents[self->stack.size - 1].tree : NULL_SUBTREE; } static inline uint32_t reusable_node_byte_offset(ReusableNode *self) { return self->stack.size > 0 ? self->stack.contents[self->stack.size - 1].byte_offset : UINT32_MAX; } static inline void reusable_node_delete(ReusableNode *self) { array_delete(&self->stack); } static inline void reusable_node_advance(ReusableNode *self) { StackEntry last_entry = *array_back(&self->stack); uint32_t byte_offset = last_entry.byte_offset + ts_subtree_total_bytes(last_entry.tree); if (ts_subtree_has_external_tokens(last_entry.tree)) { self->last_external_token = ts_subtree_last_external_token(last_entry.tree); } Subtree tree; uint32_t next_index; do { StackEntry popped_entry = array_pop(&self->stack); next_index = popped_entry.child_index + 1; if (self->stack.size == 0) return; tree = array_back(&self->stack)->tree; } while (ts_subtree_child_count(tree) <= next_index); array_push(&self->stack, ((StackEntry) { .tree = ts_subtree_children(tree)[next_index], .child_index = next_index, .byte_offset = byte_offset, })); } static inline bool reusable_node_descend(ReusableNode *self) { StackEntry last_entry = *array_back(&self->stack); if (ts_subtree_child_count(last_entry.tree) > 0) { array_push(&self->stack, ((StackEntry) { .tree = ts_subtree_children(last_entry.tree)[0], .child_index = 0, .byte_offset = last_entry.byte_offset, })); return true; } else { return false; } } static inline void reusable_node_advance_past_leaf(ReusableNode *self) { while (reusable_node_descend(self)) {} reusable_node_advance(self); } static inline void reusable_node_reset(ReusableNode *self, Subtree tree) { reusable_node_clear(self); array_push(&self->stack, ((StackEntry) { .tree = tree, .child_index = 0, .byte_offset = 0, })); // Never reuse the root node, because it has a non-standard internal structure // due to transformations that are applied when it is accepted: adding the EOF // child and any extra children. if (!reusable_node_descend(self)) { reusable_node_clear(self); } } hx-0.3.0+20250717/bindings/vendor/src/stack.c000066400000000000000000000677341503625671400202200ustar00rootroot00000000000000#include "./alloc.h" #include "./language.h" #include "./subtree.h" #include "./array.h" #include "./stack.h" #include "./length.h" #include #include #include #define MAX_LINK_COUNT 8 #define MAX_NODE_POOL_SIZE 50 #define MAX_ITERATOR_COUNT 64 #if defined _WIN32 && !defined __GNUC__ #define forceinline __forceinline #else #define forceinline static inline __attribute__((always_inline)) #endif typedef struct StackNode StackNode; typedef struct { StackNode *node; Subtree subtree; bool is_pending; } StackLink; struct StackNode { TSStateId state; Length position; StackLink links[MAX_LINK_COUNT]; short unsigned int link_count; uint32_t ref_count; unsigned error_cost; unsigned node_count; int dynamic_precedence; }; typedef struct { StackNode *node; SubtreeArray subtrees; uint32_t subtree_count; bool is_pending; } StackIterator; typedef Array(StackNode *) StackNodeArray; typedef enum { StackStatusActive, StackStatusPaused, StackStatusHalted, } StackStatus; typedef struct { StackNode *node; StackSummary *summary; unsigned node_count_at_last_error; Subtree last_external_token; Subtree lookahead_when_paused; StackStatus status; } StackHead; struct Stack { Array(StackHead) heads; StackSliceArray slices; Array(StackIterator) iterators; StackNodeArray node_pool; StackNode *base_node; SubtreePool *subtree_pool; }; typedef unsigned StackAction; enum { StackActionNone, StackActionStop = 1, StackActionPop = 2, }; typedef StackAction (*StackCallback)(void *, const StackIterator *); static void stack_node_retain(StackNode *self) { if (!self) return; ts_assert(self->ref_count > 0); self->ref_count++; ts_assert(self->ref_count != 0); } static void stack_node_release( StackNode *self, StackNodeArray *pool, SubtreePool *subtree_pool ) { recur: ts_assert(self->ref_count != 0); self->ref_count--; if (self->ref_count > 0) return; StackNode *first_predecessor = NULL; if (self->link_count > 0) { for (unsigned i = self->link_count - 1; i > 0; i--) { StackLink link = self->links[i]; if (link.subtree.ptr) ts_subtree_release(subtree_pool, link.subtree); stack_node_release(link.node, pool, subtree_pool); } StackLink link = self->links[0]; if (link.subtree.ptr) ts_subtree_release(subtree_pool, link.subtree); first_predecessor = self->links[0].node; } if (pool->size < MAX_NODE_POOL_SIZE) { array_push(pool, self); } else { ts_free(self); } if (first_predecessor) { self = first_predecessor; goto recur; } } /// Get the number of nodes in the subtree, for the purpose of measuring /// how much progress has been made by a given version of the stack. static uint32_t stack__subtree_node_count(Subtree subtree) { uint32_t count = ts_subtree_visible_descendant_count(subtree); if (ts_subtree_visible(subtree)) count++; // Count intermediate error nodes even though they are not visible, // because a stack version's node count is used to check whether it // has made any progress since the last time it encountered an error. if (ts_subtree_symbol(subtree) == ts_builtin_sym_error_repeat) count++; return count; } static StackNode *stack_node_new( StackNode *previous_node, Subtree subtree, bool is_pending, TSStateId state, StackNodeArray *pool ) { StackNode *node = pool->size > 0 ? array_pop(pool) : ts_malloc(sizeof(StackNode)); *node = (StackNode) { .ref_count = 1, .link_count = 0, .state = state }; if (previous_node) { node->link_count = 1; node->links[0] = (StackLink) { .node = previous_node, .subtree = subtree, .is_pending = is_pending, }; node->position = previous_node->position; node->error_cost = previous_node->error_cost; node->dynamic_precedence = previous_node->dynamic_precedence; node->node_count = previous_node->node_count; if (subtree.ptr) { node->error_cost += ts_subtree_error_cost(subtree); node->position = length_add(node->position, ts_subtree_total_size(subtree)); node->node_count += stack__subtree_node_count(subtree); node->dynamic_precedence += ts_subtree_dynamic_precedence(subtree); } } else { node->position = length_zero(); node->error_cost = 0; } return node; } static bool stack__subtree_is_equivalent(Subtree left, Subtree right) { if (left.ptr == right.ptr) return true; if (!left.ptr || !right.ptr) return false; // Symbols must match if (ts_subtree_symbol(left) != ts_subtree_symbol(right)) return false; // If both have errors, don't bother keeping both. if (ts_subtree_error_cost(left) > 0 && ts_subtree_error_cost(right) > 0) return true; return ( ts_subtree_padding(left).bytes == ts_subtree_padding(right).bytes && ts_subtree_size(left).bytes == ts_subtree_size(right).bytes && ts_subtree_child_count(left) == ts_subtree_child_count(right) && ts_subtree_extra(left) == ts_subtree_extra(right) && ts_subtree_external_scanner_state_eq(left, right) ); } static void stack_node_add_link( StackNode *self, StackLink link, SubtreePool *subtree_pool ) { if (link.node == self) return; for (int i = 0; i < self->link_count; i++) { StackLink *existing_link = &self->links[i]; if (stack__subtree_is_equivalent(existing_link->subtree, link.subtree)) { // In general, we preserve ambiguities until they are removed from the stack // during a pop operation where multiple paths lead to the same node. But in // the special case where two links directly connect the same pair of nodes, // we can safely remove the ambiguity ahead of time without changing behavior. if (existing_link->node == link.node) { if ( ts_subtree_dynamic_precedence(link.subtree) > ts_subtree_dynamic_precedence(existing_link->subtree) ) { ts_subtree_retain(link.subtree); ts_subtree_release(subtree_pool, existing_link->subtree); existing_link->subtree = link.subtree; self->dynamic_precedence = link.node->dynamic_precedence + ts_subtree_dynamic_precedence(link.subtree); } return; } // If the previous nodes are mergeable, merge them recursively. if ( existing_link->node->state == link.node->state && existing_link->node->position.bytes == link.node->position.bytes && existing_link->node->error_cost == link.node->error_cost ) { for (int j = 0; j < link.node->link_count; j++) { stack_node_add_link(existing_link->node, link.node->links[j], subtree_pool); } int32_t dynamic_precedence = link.node->dynamic_precedence; if (link.subtree.ptr) { dynamic_precedence += ts_subtree_dynamic_precedence(link.subtree); } if (dynamic_precedence > self->dynamic_precedence) { self->dynamic_precedence = dynamic_precedence; } return; } } } if (self->link_count == MAX_LINK_COUNT) return; stack_node_retain(link.node); unsigned node_count = link.node->node_count; int dynamic_precedence = link.node->dynamic_precedence; self->links[self->link_count++] = link; if (link.subtree.ptr) { ts_subtree_retain(link.subtree); node_count += stack__subtree_node_count(link.subtree); dynamic_precedence += ts_subtree_dynamic_precedence(link.subtree); } if (node_count > self->node_count) self->node_count = node_count; if (dynamic_precedence > self->dynamic_precedence) self->dynamic_precedence = dynamic_precedence; } static void stack_head_delete( StackHead *self, StackNodeArray *pool, SubtreePool *subtree_pool ) { if (self->node) { if (self->last_external_token.ptr) { ts_subtree_release(subtree_pool, self->last_external_token); } if (self->lookahead_when_paused.ptr) { ts_subtree_release(subtree_pool, self->lookahead_when_paused); } if (self->summary) { array_delete(self->summary); ts_free(self->summary); } stack_node_release(self->node, pool, subtree_pool); } } static StackVersion ts_stack__add_version( Stack *self, StackVersion original_version, StackNode *node ) { StackHead head = { .node = node, .node_count_at_last_error = array_get(&self->heads, original_version)->node_count_at_last_error, .last_external_token = array_get(&self->heads, original_version)->last_external_token, .status = StackStatusActive, .lookahead_when_paused = NULL_SUBTREE, }; array_push(&self->heads, head); stack_node_retain(node); if (head.last_external_token.ptr) ts_subtree_retain(head.last_external_token); return (StackVersion)(self->heads.size - 1); } static void ts_stack__add_slice( Stack *self, StackVersion original_version, StackNode *node, SubtreeArray *subtrees ) { for (uint32_t i = self->slices.size - 1; i + 1 > 0; i--) { StackVersion version = array_get(&self->slices, i)->version; if (array_get(&self->heads, version)->node == node) { StackSlice slice = {*subtrees, version}; array_insert(&self->slices, i + 1, slice); return; } } StackVersion version = ts_stack__add_version(self, original_version, node); StackSlice slice = { *subtrees, version }; array_push(&self->slices, slice); } static StackSliceArray stack__iter( Stack *self, StackVersion version, StackCallback callback, void *payload, int goal_subtree_count ) { array_clear(&self->slices); array_clear(&self->iterators); StackHead *head = array_get(&self->heads, version); StackIterator new_iterator = { .node = head->node, .subtrees = array_new(), .subtree_count = 0, .is_pending = true, }; bool include_subtrees = false; if (goal_subtree_count >= 0) { include_subtrees = true; array_reserve(&new_iterator.subtrees, (uint32_t)ts_subtree_alloc_size(goal_subtree_count) / sizeof(Subtree)); } array_push(&self->iterators, new_iterator); while (self->iterators.size > 0) { for (uint32_t i = 0, size = self->iterators.size; i < size; i++) { StackIterator *iterator = array_get(&self->iterators, i); StackNode *node = iterator->node; StackAction action = callback(payload, iterator); bool should_pop = action & StackActionPop; bool should_stop = action & StackActionStop || node->link_count == 0; if (should_pop) { SubtreeArray subtrees = iterator->subtrees; if (!should_stop) { ts_subtree_array_copy(subtrees, &subtrees); } ts_subtree_array_reverse(&subtrees); ts_stack__add_slice( self, version, node, &subtrees ); } if (should_stop) { if (!should_pop) { ts_subtree_array_delete(self->subtree_pool, &iterator->subtrees); } array_erase(&self->iterators, i); i--, size--; continue; } for (uint32_t j = 1; j <= node->link_count; j++) { StackIterator *next_iterator; StackLink link; if (j == node->link_count) { link = node->links[0]; next_iterator = array_get(&self->iterators, i); } else { if (self->iterators.size >= MAX_ITERATOR_COUNT) continue; link = node->links[j]; StackIterator current_iterator = *array_get(&self->iterators, i); array_push(&self->iterators, current_iterator); next_iterator = array_back(&self->iterators); ts_subtree_array_copy(next_iterator->subtrees, &next_iterator->subtrees); } next_iterator->node = link.node; if (link.subtree.ptr) { if (include_subtrees) { array_push(&next_iterator->subtrees, link.subtree); ts_subtree_retain(link.subtree); } if (!ts_subtree_extra(link.subtree)) { next_iterator->subtree_count++; if (!link.is_pending) { next_iterator->is_pending = false; } } } else { next_iterator->subtree_count++; next_iterator->is_pending = false; } } } } return self->slices; } Stack *ts_stack_new(SubtreePool *subtree_pool) { Stack *self = ts_calloc(1, sizeof(Stack)); array_init(&self->heads); array_init(&self->slices); array_init(&self->iterators); array_init(&self->node_pool); array_reserve(&self->heads, 4); array_reserve(&self->slices, 4); array_reserve(&self->iterators, 4); array_reserve(&self->node_pool, MAX_NODE_POOL_SIZE); self->subtree_pool = subtree_pool; self->base_node = stack_node_new(NULL, NULL_SUBTREE, false, 1, &self->node_pool); ts_stack_clear(self); return self; } void ts_stack_delete(Stack *self) { if (self->slices.contents) array_delete(&self->slices); if (self->iterators.contents) array_delete(&self->iterators); stack_node_release(self->base_node, &self->node_pool, self->subtree_pool); for (uint32_t i = 0; i < self->heads.size; i++) { stack_head_delete(array_get(&self->heads, i), &self->node_pool, self->subtree_pool); } array_clear(&self->heads); if (self->node_pool.contents) { for (uint32_t i = 0; i < self->node_pool.size; i++) ts_free(*array_get(&self->node_pool, i)); array_delete(&self->node_pool); } array_delete(&self->heads); ts_free(self); } uint32_t ts_stack_version_count(const Stack *self) { return self->heads.size; } uint32_t ts_stack_halted_version_count(Stack *self) { uint32_t count = 0; for (uint32_t i = 0; i < self->heads.size; i++) { StackHead *head = array_get(&self->heads, i); if (head->status == StackStatusHalted) { count++; } } return count; } TSStateId ts_stack_state(const Stack *self, StackVersion version) { return array_get(&self->heads, version)->node->state; } Length ts_stack_position(const Stack *self, StackVersion version) { return array_get(&self->heads, version)->node->position; } Subtree ts_stack_last_external_token(const Stack *self, StackVersion version) { return array_get(&self->heads, version)->last_external_token; } void ts_stack_set_last_external_token(Stack *self, StackVersion version, Subtree token) { StackHead *head = array_get(&self->heads, version); if (token.ptr) ts_subtree_retain(token); if (head->last_external_token.ptr) ts_subtree_release(self->subtree_pool, head->last_external_token); head->last_external_token = token; } unsigned ts_stack_error_cost(const Stack *self, StackVersion version) { StackHead *head = array_get(&self->heads, version); unsigned result = head->node->error_cost; if ( head->status == StackStatusPaused || (head->node->state == ERROR_STATE && !head->node->links[0].subtree.ptr)) { result += ERROR_COST_PER_RECOVERY; } return result; } unsigned ts_stack_node_count_since_error(const Stack *self, StackVersion version) { StackHead *head = array_get(&self->heads, version); if (head->node->node_count < head->node_count_at_last_error) { head->node_count_at_last_error = head->node->node_count; } return head->node->node_count - head->node_count_at_last_error; } void ts_stack_push( Stack *self, StackVersion version, Subtree subtree, bool pending, TSStateId state ) { StackHead *head = array_get(&self->heads, version); StackNode *new_node = stack_node_new(head->node, subtree, pending, state, &self->node_pool); if (!subtree.ptr) head->node_count_at_last_error = new_node->node_count; head->node = new_node; } forceinline StackAction pop_count_callback(void *payload, const StackIterator *iterator) { unsigned *goal_subtree_count = payload; if (iterator->subtree_count == *goal_subtree_count) { return StackActionPop | StackActionStop; } else { return StackActionNone; } } StackSliceArray ts_stack_pop_count(Stack *self, StackVersion version, uint32_t count) { return stack__iter(self, version, pop_count_callback, &count, (int)count); } forceinline StackAction pop_pending_callback(void *payload, const StackIterator *iterator) { (void)payload; if (iterator->subtree_count >= 1) { if (iterator->is_pending) { return StackActionPop | StackActionStop; } else { return StackActionStop; } } else { return StackActionNone; } } StackSliceArray ts_stack_pop_pending(Stack *self, StackVersion version) { StackSliceArray pop = stack__iter(self, version, pop_pending_callback, NULL, 0); if (pop.size > 0) { ts_stack_renumber_version(self, array_get(&pop, 0)->version, version); array_get(&pop, 0)->version = version; } return pop; } forceinline StackAction pop_error_callback(void *payload, const StackIterator *iterator) { if (iterator->subtrees.size > 0) { bool *found_error = payload; if (!*found_error && ts_subtree_is_error(*array_get(&iterator->subtrees, 0))) { *found_error = true; return StackActionPop | StackActionStop; } else { return StackActionStop; } } else { return StackActionNone; } } SubtreeArray ts_stack_pop_error(Stack *self, StackVersion version) { StackNode *node = array_get(&self->heads, version)->node; for (unsigned i = 0; i < node->link_count; i++) { if (node->links[i].subtree.ptr && ts_subtree_is_error(node->links[i].subtree)) { bool found_error = false; StackSliceArray pop = stack__iter(self, version, pop_error_callback, &found_error, 1); if (pop.size > 0) { ts_assert(pop.size == 1); ts_stack_renumber_version(self, array_get(&pop, 0)->version, version); return array_get(&pop, 0)->subtrees; } break; } } return (SubtreeArray) {.size = 0}; } forceinline StackAction pop_all_callback(void *payload, const StackIterator *iterator) { (void)payload; return iterator->node->link_count == 0 ? StackActionPop : StackActionNone; } StackSliceArray ts_stack_pop_all(Stack *self, StackVersion version) { return stack__iter(self, version, pop_all_callback, NULL, 0); } typedef struct { StackSummary *summary; unsigned max_depth; } SummarizeStackSession; forceinline StackAction summarize_stack_callback(void *payload, const StackIterator *iterator) { SummarizeStackSession *session = payload; TSStateId state = iterator->node->state; unsigned depth = iterator->subtree_count; if (depth > session->max_depth) return StackActionStop; for (unsigned i = session->summary->size - 1; i + 1 > 0; i--) { StackSummaryEntry entry = *array_get(session->summary, i); if (entry.depth < depth) break; if (entry.depth == depth && entry.state == state) return StackActionNone; } array_push(session->summary, ((StackSummaryEntry) { .position = iterator->node->position, .depth = depth, .state = state, })); return StackActionNone; } void ts_stack_record_summary(Stack *self, StackVersion version, unsigned max_depth) { SummarizeStackSession session = { .summary = ts_malloc(sizeof(StackSummary)), .max_depth = max_depth }; array_init(session.summary); stack__iter(self, version, summarize_stack_callback, &session, -1); StackHead *head = array_get(&self->heads, version); if (head->summary) { array_delete(head->summary); ts_free(head->summary); } head->summary = session.summary; } StackSummary *ts_stack_get_summary(Stack *self, StackVersion version) { return array_get(&self->heads, version)->summary; } int ts_stack_dynamic_precedence(Stack *self, StackVersion version) { return array_get(&self->heads, version)->node->dynamic_precedence; } bool ts_stack_has_advanced_since_error(const Stack *self, StackVersion version) { const StackHead *head = array_get(&self->heads, version); const StackNode *node = head->node; if (node->error_cost == 0) return true; while (node) { if (node->link_count > 0) { Subtree subtree = node->links[0].subtree; if (subtree.ptr) { if (ts_subtree_total_bytes(subtree) > 0) { return true; } else if ( node->node_count > head->node_count_at_last_error && ts_subtree_error_cost(subtree) == 0 ) { node = node->links[0].node; continue; } } } break; } return false; } void ts_stack_remove_version(Stack *self, StackVersion version) { stack_head_delete(array_get(&self->heads, version), &self->node_pool, self->subtree_pool); array_erase(&self->heads, version); } void ts_stack_renumber_version(Stack *self, StackVersion v1, StackVersion v2) { if (v1 == v2) return; ts_assert(v2 < v1); ts_assert((uint32_t)v1 < self->heads.size); StackHead *source_head = array_get(&self->heads, v1); StackHead *target_head = array_get(&self->heads, v2); if (target_head->summary && !source_head->summary) { source_head->summary = target_head->summary; target_head->summary = NULL; } stack_head_delete(target_head, &self->node_pool, self->subtree_pool); *target_head = *source_head; array_erase(&self->heads, v1); } void ts_stack_swap_versions(Stack *self, StackVersion v1, StackVersion v2) { StackHead temporary_head = *array_get(&self->heads, v1); *array_get(&self->heads, v1) = *array_get(&self->heads, v2); *array_get(&self->heads, v2) = temporary_head; } StackVersion ts_stack_copy_version(Stack *self, StackVersion version) { ts_assert(version < self->heads.size); StackHead version_head = *array_get(&self->heads, version); array_push(&self->heads, version_head); StackHead *head = array_back(&self->heads); stack_node_retain(head->node); if (head->last_external_token.ptr) ts_subtree_retain(head->last_external_token); head->summary = NULL; return self->heads.size - 1; } bool ts_stack_merge(Stack *self, StackVersion version1, StackVersion version2) { if (!ts_stack_can_merge(self, version1, version2)) return false; StackHead *head1 = array_get(&self->heads, version1); StackHead *head2 = array_get(&self->heads, version2); for (uint32_t i = 0; i < head2->node->link_count; i++) { stack_node_add_link(head1->node, head2->node->links[i], self->subtree_pool); } if (head1->node->state == ERROR_STATE) { head1->node_count_at_last_error = head1->node->node_count; } ts_stack_remove_version(self, version2); return true; } bool ts_stack_can_merge(Stack *self, StackVersion version1, StackVersion version2) { StackHead *head1 = array_get(&self->heads, version1); StackHead *head2 = array_get(&self->heads, version2); return head1->status == StackStatusActive && head2->status == StackStatusActive && head1->node->state == head2->node->state && head1->node->position.bytes == head2->node->position.bytes && head1->node->error_cost == head2->node->error_cost && ts_subtree_external_scanner_state_eq(head1->last_external_token, head2->last_external_token); } void ts_stack_halt(Stack *self, StackVersion version) { array_get(&self->heads, version)->status = StackStatusHalted; } void ts_stack_pause(Stack *self, StackVersion version, Subtree lookahead) { StackHead *head = array_get(&self->heads, version); head->status = StackStatusPaused; head->lookahead_when_paused = lookahead; head->node_count_at_last_error = head->node->node_count; } bool ts_stack_is_active(const Stack *self, StackVersion version) { return array_get(&self->heads, version)->status == StackStatusActive; } bool ts_stack_is_halted(const Stack *self, StackVersion version) { return array_get(&self->heads, version)->status == StackStatusHalted; } bool ts_stack_is_paused(const Stack *self, StackVersion version) { return array_get(&self->heads, version)->status == StackStatusPaused; } Subtree ts_stack_resume(Stack *self, StackVersion version) { StackHead *head = array_get(&self->heads, version); ts_assert(head->status == StackStatusPaused); Subtree result = head->lookahead_when_paused; head->status = StackStatusActive; head->lookahead_when_paused = NULL_SUBTREE; return result; } void ts_stack_clear(Stack *self) { stack_node_retain(self->base_node); for (uint32_t i = 0; i < self->heads.size; i++) { stack_head_delete(array_get(&self->heads, i), &self->node_pool, self->subtree_pool); } array_clear(&self->heads); array_push(&self->heads, ((StackHead) { .node = self->base_node, .status = StackStatusActive, .last_external_token = NULL_SUBTREE, .lookahead_when_paused = NULL_SUBTREE, })); } bool ts_stack_print_dot_graph(Stack *self, const TSLanguage *language, FILE *f) { array_reserve(&self->iterators, 32); if (!f) f = stderr; fprintf(f, "digraph stack {\n"); fprintf(f, "rankdir=\"RL\";\n"); fprintf(f, "edge [arrowhead=none]\n"); Array(StackNode *) visited_nodes = array_new(); array_clear(&self->iterators); for (uint32_t i = 0; i < self->heads.size; i++) { StackHead *head = array_get(&self->heads, i); if (head->status == StackStatusHalted) continue; fprintf(f, "node_head_%u [shape=none, label=\"\"]\n", i); fprintf(f, "node_head_%u -> node_%p [", i, (void *)head->node); if (head->status == StackStatusPaused) { fprintf(f, "color=red "); } fprintf(f, "label=%u, fontcolor=blue, weight=10000, labeltooltip=\"node_count: %u\nerror_cost: %u", i, ts_stack_node_count_since_error(self, i), ts_stack_error_cost(self, i) ); if (head->summary) { fprintf(f, "\nsummary:"); for (uint32_t j = 0; j < head->summary->size; j++) fprintf(f, " %u", array_get(head->summary, j)->state); } if (head->last_external_token.ptr) { const ExternalScannerState *state = &head->last_external_token.ptr->external_scanner_state; const char *data = ts_external_scanner_state_data(state); fprintf(f, "\nexternal_scanner_state:"); for (uint32_t j = 0; j < state->length; j++) fprintf(f, " %2X", data[j]); } fprintf(f, "\"]\n"); array_push(&self->iterators, ((StackIterator) { .node = head->node })); } bool all_iterators_done = false; while (!all_iterators_done) { all_iterators_done = true; for (uint32_t i = 0; i < self->iterators.size; i++) { StackIterator iterator = *array_get(&self->iterators, i); StackNode *node = iterator.node; for (uint32_t j = 0; j < visited_nodes.size; j++) { if (*array_get(&visited_nodes, j) == node) { node = NULL; break; } } if (!node) continue; all_iterators_done = false; fprintf(f, "node_%p [", (void *)node); if (node->state == ERROR_STATE) { fprintf(f, "label=\"?\""); } else if ( node->link_count == 1 && node->links[0].subtree.ptr && ts_subtree_extra(node->links[0].subtree) ) { fprintf(f, "shape=point margin=0 label=\"\""); } else { fprintf(f, "label=\"%d\"", node->state); } fprintf( f, " tooltip=\"position: %u,%u\nnode_count:%u\nerror_cost: %u\ndynamic_precedence: %d\"];\n", node->position.extent.row + 1, node->position.extent.column, node->node_count, node->error_cost, node->dynamic_precedence ); for (int j = 0; j < node->link_count; j++) { StackLink link = node->links[j]; fprintf(f, "node_%p -> node_%p [", (void *)node, (void *)link.node); if (link.is_pending) fprintf(f, "style=dashed "); if (link.subtree.ptr && ts_subtree_extra(link.subtree)) fprintf(f, "fontcolor=gray "); if (!link.subtree.ptr) { fprintf(f, "color=red"); } else { fprintf(f, "label=\""); bool quoted = ts_subtree_visible(link.subtree) && !ts_subtree_named(link.subtree); if (quoted) fprintf(f, "'"); ts_language_write_symbol_as_dot_string(language, f, ts_subtree_symbol(link.subtree)); if (quoted) fprintf(f, "'"); fprintf(f, "\""); fprintf( f, "labeltooltip=\"error_cost: %u\ndynamic_precedence: %" PRId32 "\"", ts_subtree_error_cost(link.subtree), ts_subtree_dynamic_precedence(link.subtree) ); } fprintf(f, "];\n"); StackIterator *next_iterator; if (j == 0) { next_iterator = array_get(&self->iterators, i); } else { array_push(&self->iterators, iterator); next_iterator = array_back(&self->iterators); } next_iterator->node = link.node; } array_push(&visited_nodes, node); } } fprintf(f, "}\n"); array_delete(&visited_nodes); return true; } #undef forceinline hx-0.3.0+20250717/bindings/vendor/src/stack.h000066400000000000000000000114101503625671400202010ustar00rootroot00000000000000#ifndef TREE_SITTER_PARSE_STACK_H_ #define TREE_SITTER_PARSE_STACK_H_ #ifdef __cplusplus extern "C" { #endif #include "./array.h" #include "./subtree.h" #include typedef struct Stack Stack; typedef unsigned StackVersion; #define STACK_VERSION_NONE ((StackVersion)-1) typedef struct { SubtreeArray subtrees; StackVersion version; } StackSlice; typedef Array(StackSlice) StackSliceArray; typedef struct { Length position; unsigned depth; TSStateId state; } StackSummaryEntry; typedef Array(StackSummaryEntry) StackSummary; // Create a stack. Stack *ts_stack_new(SubtreePool *subtree_pool); // Release the memory reserved for a given stack. void ts_stack_delete(Stack *self); // Get the stack's current number of versions. uint32_t ts_stack_version_count(const Stack *self); // Get the stack's current number of halted versions. uint32_t ts_stack_halted_version_count(Stack *self); // Get the state at the top of the given version of the stack. If the stack is // empty, this returns the initial state, 0. TSStateId ts_stack_state(const Stack *self, StackVersion version); // Get the last external token associated with a given version of the stack. Subtree ts_stack_last_external_token(const Stack *self, StackVersion version); // Set the last external token associated with a given version of the stack. void ts_stack_set_last_external_token(Stack *self, StackVersion version, Subtree token); // Get the position of the given version of the stack within the document. Length ts_stack_position(const Stack *, StackVersion); // Push a tree and state onto the given version of the stack. // // This transfers ownership of the tree to the Stack. Callers that // need to retain ownership of the tree for their own purposes should // first retain the tree. void ts_stack_push(Stack *self, StackVersion version, Subtree subtree, bool pending, TSStateId state); // Pop the given number of entries from the given version of the stack. This // operation can increase the number of stack versions by revealing multiple // versions which had previously been merged. It returns an array that // specifies the index of each revealed version and the trees that were // removed from that version. StackSliceArray ts_stack_pop_count(Stack *self, StackVersion version, uint32_t count); // Remove an error at the top of the given version of the stack. SubtreeArray ts_stack_pop_error(Stack *self, StackVersion version); // Remove any pending trees from the top of the given version of the stack. StackSliceArray ts_stack_pop_pending(Stack *self, StackVersion version); // Remove all trees from the given version of the stack. StackSliceArray ts_stack_pop_all(Stack *self, StackVersion version); // Get the maximum number of tree nodes reachable from this version of the stack // since the last error was detected. unsigned ts_stack_node_count_since_error(const Stack *self, StackVersion version); int ts_stack_dynamic_precedence(Stack *self, StackVersion version); bool ts_stack_has_advanced_since_error(const Stack *self, StackVersion version); // Compute a summary of all the parse states near the top of the given // version of the stack and store the summary for later retrieval. void ts_stack_record_summary(Stack *self, StackVersion version, unsigned max_depth); // Retrieve a summary of all the parse states near the top of the // given version of the stack. StackSummary *ts_stack_get_summary(Stack *self, StackVersion version); // Get the total cost of all errors on the given version of the stack. unsigned ts_stack_error_cost(const Stack *self, StackVersion version); // Merge the given two stack versions if possible, returning true // if they were successfully merged and false otherwise. bool ts_stack_merge(Stack *self, StackVersion version1, StackVersion version2); // Determine whether the given two stack versions can be merged. bool ts_stack_can_merge(Stack *self, StackVersion version1, StackVersion version2); Subtree ts_stack_resume(Stack *self, StackVersion version); void ts_stack_pause(Stack *self, StackVersion version, Subtree lookahead); void ts_stack_halt(Stack *self, StackVersion version); bool ts_stack_is_active(const Stack *self, StackVersion version); bool ts_stack_is_paused(const Stack *self, StackVersion version); bool ts_stack_is_halted(const Stack *self, StackVersion version); void ts_stack_renumber_version(Stack *self, StackVersion v1, StackVersion v2); void ts_stack_swap_versions(Stack *, StackVersion v1, StackVersion v2); StackVersion ts_stack_copy_version(Stack *self, StackVersion version); // Remove the given version from the stack. void ts_stack_remove_version(Stack *self, StackVersion version); void ts_stack_clear(Stack *self); bool ts_stack_print_dot_graph(Stack *self, const TSLanguage *language, FILE *f); #ifdef __cplusplus } #endif #endif // TREE_SITTER_PARSE_STACK_H_ hx-0.3.0+20250717/bindings/vendor/src/subtree.c000066400000000000000000001033541503625671400205510ustar00rootroot00000000000000#include #include #include #include #include #include "./alloc.h" #include "./array.h" #include "./atomic.h" #include "./subtree.h" #include "./length.h" #include "./language.h" #include "./error_costs.h" #include "./ts_assert.h" #include typedef struct { Length start; Length old_end; Length new_end; } Edit; #define TS_MAX_INLINE_TREE_LENGTH UINT8_MAX #define TS_MAX_TREE_POOL_SIZE 32 // ExternalScannerState void ts_external_scanner_state_init(ExternalScannerState *self, const char *data, unsigned length) { self->length = length; if (length > sizeof(self->short_data)) { self->long_data = ts_malloc(length); memcpy(self->long_data, data, length); } else { memcpy(self->short_data, data, length); } } ExternalScannerState ts_external_scanner_state_copy(const ExternalScannerState *self) { ExternalScannerState result = *self; if (self->length > sizeof(self->short_data)) { result.long_data = ts_malloc(self->length); memcpy(result.long_data, self->long_data, self->length); } return result; } void ts_external_scanner_state_delete(ExternalScannerState *self) { if (self->length > sizeof(self->short_data)) { ts_free(self->long_data); } } const char *ts_external_scanner_state_data(const ExternalScannerState *self) { if (self->length > sizeof(self->short_data)) { return self->long_data; } else { return self->short_data; } } bool ts_external_scanner_state_eq(const ExternalScannerState *self, const char *buffer, unsigned length) { return self->length == length && memcmp(ts_external_scanner_state_data(self), buffer, length) == 0; } // SubtreeArray void ts_subtree_array_copy(SubtreeArray self, SubtreeArray *dest) { dest->size = self.size; dest->capacity = self.capacity; dest->contents = self.contents; if (self.capacity > 0) { dest->contents = ts_calloc(self.capacity, sizeof(Subtree)); memcpy(dest->contents, self.contents, self.size * sizeof(Subtree)); for (uint32_t i = 0; i < self.size; i++) { ts_subtree_retain(*array_get(dest, i)); } } } void ts_subtree_array_clear(SubtreePool *pool, SubtreeArray *self) { for (uint32_t i = 0; i < self->size; i++) { ts_subtree_release(pool, *array_get(self, i)); } array_clear(self); } void ts_subtree_array_delete(SubtreePool *pool, SubtreeArray *self) { ts_subtree_array_clear(pool, self); array_delete(self); } void ts_subtree_array_remove_trailing_extras( SubtreeArray *self, SubtreeArray *destination ) { array_clear(destination); while (self->size > 0) { Subtree last = *array_get(self, self->size - 1); if (ts_subtree_extra(last)) { self->size--; array_push(destination, last); } else { break; } } ts_subtree_array_reverse(destination); } void ts_subtree_array_reverse(SubtreeArray *self) { for (uint32_t i = 0, limit = self->size / 2; i < limit; i++) { size_t reverse_index = self->size - 1 - i; Subtree swap = *array_get(self, i); *array_get(self, i) = *array_get(self, reverse_index); *array_get(self, reverse_index) = swap; } } // SubtreePool SubtreePool ts_subtree_pool_new(uint32_t capacity) { SubtreePool self = {array_new(), array_new()}; array_reserve(&self.free_trees, capacity); return self; } void ts_subtree_pool_delete(SubtreePool *self) { if (self->free_trees.contents) { for (unsigned i = 0; i < self->free_trees.size; i++) { ts_free(array_get(&self->free_trees, i)->ptr); } array_delete(&self->free_trees); } if (self->tree_stack.contents) array_delete(&self->tree_stack); } static SubtreeHeapData *ts_subtree_pool_allocate(SubtreePool *self) { if (self->free_trees.size > 0) { return array_pop(&self->free_trees).ptr; } else { return ts_malloc(sizeof(SubtreeHeapData)); } } static void ts_subtree_pool_free(SubtreePool *self, SubtreeHeapData *tree) { if (self->free_trees.capacity > 0 && self->free_trees.size + 1 <= TS_MAX_TREE_POOL_SIZE) { array_push(&self->free_trees, (MutableSubtree) {.ptr = tree}); } else { ts_free(tree); } } // Subtree static inline bool ts_subtree_can_inline(Length padding, Length size, uint32_t lookahead_bytes) { return padding.bytes < TS_MAX_INLINE_TREE_LENGTH && padding.extent.row < 16 && padding.extent.column < TS_MAX_INLINE_TREE_LENGTH && size.bytes < TS_MAX_INLINE_TREE_LENGTH && size.extent.row == 0 && size.extent.column < TS_MAX_INLINE_TREE_LENGTH && lookahead_bytes < 16; } Subtree ts_subtree_new_leaf( SubtreePool *pool, TSSymbol symbol, Length padding, Length size, uint32_t lookahead_bytes, TSStateId parse_state, bool has_external_tokens, bool depends_on_column, bool is_keyword, const TSLanguage *language ) { TSSymbolMetadata metadata = ts_language_symbol_metadata(language, symbol); bool extra = symbol == ts_builtin_sym_end; bool is_inline = ( symbol <= UINT8_MAX && !has_external_tokens && ts_subtree_can_inline(padding, size, lookahead_bytes) ); if (is_inline) { return (Subtree) {{ .parse_state = parse_state, .symbol = symbol, .padding_bytes = padding.bytes, .padding_rows = padding.extent.row, .padding_columns = padding.extent.column, .size_bytes = size.bytes, .lookahead_bytes = lookahead_bytes, .visible = metadata.visible, .named = metadata.named, .extra = extra, .has_changes = false, .is_missing = false, .is_keyword = is_keyword, .is_inline = true, }}; } else { SubtreeHeapData *data = ts_subtree_pool_allocate(pool); *data = (SubtreeHeapData) { .ref_count = 1, .padding = padding, .size = size, .lookahead_bytes = lookahead_bytes, .error_cost = 0, .child_count = 0, .symbol = symbol, .parse_state = parse_state, .visible = metadata.visible, .named = metadata.named, .extra = extra, .fragile_left = false, .fragile_right = false, .has_changes = false, .has_external_tokens = has_external_tokens, .has_external_scanner_state_change = false, .depends_on_column = depends_on_column, .is_missing = false, .is_keyword = is_keyword, {{.first_leaf = {.symbol = 0, .parse_state = 0}}} }; return (Subtree) {.ptr = data}; } } void ts_subtree_set_symbol( MutableSubtree *self, TSSymbol symbol, const TSLanguage *language ) { TSSymbolMetadata metadata = ts_language_symbol_metadata(language, symbol); if (self->data.is_inline) { ts_assert(symbol < UINT8_MAX); self->data.symbol = symbol; self->data.named = metadata.named; self->data.visible = metadata.visible; } else { self->ptr->symbol = symbol; self->ptr->named = metadata.named; self->ptr->visible = metadata.visible; } } Subtree ts_subtree_new_error( SubtreePool *pool, int32_t lookahead_char, Length padding, Length size, uint32_t bytes_scanned, TSStateId parse_state, const TSLanguage *language ) { Subtree result = ts_subtree_new_leaf( pool, ts_builtin_sym_error, padding, size, bytes_scanned, parse_state, false, false, false, language ); SubtreeHeapData *data = (SubtreeHeapData *)result.ptr; data->fragile_left = true; data->fragile_right = true; data->lookahead_char = lookahead_char; return result; } // Clone a subtree. MutableSubtree ts_subtree_clone(Subtree self) { size_t alloc_size = ts_subtree_alloc_size(self.ptr->child_count); Subtree *new_children = ts_malloc(alloc_size); Subtree *old_children = ts_subtree_children(self); memcpy(new_children, old_children, alloc_size); SubtreeHeapData *result = (SubtreeHeapData *)&new_children[self.ptr->child_count]; if (self.ptr->child_count > 0) { for (uint32_t i = 0; i < self.ptr->child_count; i++) { ts_subtree_retain(new_children[i]); } } else if (self.ptr->has_external_tokens) { result->external_scanner_state = ts_external_scanner_state_copy( &self.ptr->external_scanner_state ); } result->ref_count = 1; return (MutableSubtree) {.ptr = result}; } // Get mutable version of a subtree. // // This takes ownership of the subtree. If the subtree has only one owner, // this will directly convert it into a mutable version. Otherwise, it will // perform a copy. MutableSubtree ts_subtree_make_mut(SubtreePool *pool, Subtree self) { if (self.data.is_inline) return (MutableSubtree) {self.data}; if (self.ptr->ref_count == 1) return ts_subtree_to_mut_unsafe(self); MutableSubtree result = ts_subtree_clone(self); ts_subtree_release(pool, self); return result; } void ts_subtree_compress( MutableSubtree self, unsigned count, const TSLanguage *language, MutableSubtreeArray *stack ) { unsigned initial_stack_size = stack->size; MutableSubtree tree = self; TSSymbol symbol = tree.ptr->symbol; for (unsigned i = 0; i < count; i++) { if (tree.ptr->ref_count > 1 || tree.ptr->child_count < 2) break; MutableSubtree child = ts_subtree_to_mut_unsafe(ts_subtree_children(tree)[0]); if ( child.data.is_inline || child.ptr->child_count < 2 || child.ptr->ref_count > 1 || child.ptr->symbol != symbol ) break; MutableSubtree grandchild = ts_subtree_to_mut_unsafe(ts_subtree_children(child)[0]); if ( grandchild.data.is_inline || grandchild.ptr->child_count < 2 || grandchild.ptr->ref_count > 1 || grandchild.ptr->symbol != symbol ) break; ts_subtree_children(tree)[0] = ts_subtree_from_mut(grandchild); ts_subtree_children(child)[0] = ts_subtree_children(grandchild)[grandchild.ptr->child_count - 1]; ts_subtree_children(grandchild)[grandchild.ptr->child_count - 1] = ts_subtree_from_mut(child); array_push(stack, tree); tree = grandchild; } while (stack->size > initial_stack_size) { tree = array_pop(stack); MutableSubtree child = ts_subtree_to_mut_unsafe(ts_subtree_children(tree)[0]); MutableSubtree grandchild = ts_subtree_to_mut_unsafe(ts_subtree_children(child)[child.ptr->child_count - 1]); ts_subtree_summarize_children(grandchild, language); ts_subtree_summarize_children(child, language); ts_subtree_summarize_children(tree, language); } } // Assign all of the node's properties that depend on its children. void ts_subtree_summarize_children( MutableSubtree self, const TSLanguage *language ) { ts_assert(!self.data.is_inline); self.ptr->named_child_count = 0; self.ptr->visible_child_count = 0; self.ptr->error_cost = 0; self.ptr->repeat_depth = 0; self.ptr->visible_descendant_count = 0; self.ptr->has_external_tokens = false; self.ptr->depends_on_column = false; self.ptr->has_external_scanner_state_change = false; self.ptr->dynamic_precedence = 0; uint32_t structural_index = 0; const TSSymbol *alias_sequence = ts_language_alias_sequence(language, self.ptr->production_id); uint32_t lookahead_end_byte = 0; const Subtree *children = ts_subtree_children(self); for (uint32_t i = 0; i < self.ptr->child_count; i++) { Subtree child = children[i]; if ( self.ptr->size.extent.row == 0 && ts_subtree_depends_on_column(child) ) { self.ptr->depends_on_column = true; } if (ts_subtree_has_external_scanner_state_change(child)) { self.ptr->has_external_scanner_state_change = true; } if (i == 0) { self.ptr->padding = ts_subtree_padding(child); self.ptr->size = ts_subtree_size(child); } else { self.ptr->size = length_add(self.ptr->size, ts_subtree_total_size(child)); } uint32_t child_lookahead_end_byte = self.ptr->padding.bytes + self.ptr->size.bytes + ts_subtree_lookahead_bytes(child); if (child_lookahead_end_byte > lookahead_end_byte) { lookahead_end_byte = child_lookahead_end_byte; } if (ts_subtree_symbol(child) != ts_builtin_sym_error_repeat) { self.ptr->error_cost += ts_subtree_error_cost(child); } uint32_t grandchild_count = ts_subtree_child_count(child); if ( self.ptr->symbol == ts_builtin_sym_error || self.ptr->symbol == ts_builtin_sym_error_repeat ) { if (!ts_subtree_extra(child) && !(ts_subtree_is_error(child) && grandchild_count == 0)) { if (ts_subtree_visible(child)) { self.ptr->error_cost += ERROR_COST_PER_SKIPPED_TREE; } else if (grandchild_count > 0) { self.ptr->error_cost += ERROR_COST_PER_SKIPPED_TREE * child.ptr->visible_child_count; } } } self.ptr->dynamic_precedence += ts_subtree_dynamic_precedence(child); self.ptr->visible_descendant_count += ts_subtree_visible_descendant_count(child); if ( !ts_subtree_extra(child) && ts_subtree_symbol(child) != 0 && alias_sequence && alias_sequence[structural_index] != 0 ) { self.ptr->visible_descendant_count++; self.ptr->visible_child_count++; if (ts_language_symbol_metadata(language, alias_sequence[structural_index]).named) { self.ptr->named_child_count++; } } else if (ts_subtree_visible(child)) { self.ptr->visible_descendant_count++; self.ptr->visible_child_count++; if (ts_subtree_named(child)) self.ptr->named_child_count++; } else if (grandchild_count > 0) { self.ptr->visible_child_count += child.ptr->visible_child_count; self.ptr->named_child_count += child.ptr->named_child_count; } if (ts_subtree_has_external_tokens(child)) self.ptr->has_external_tokens = true; if (ts_subtree_is_error(child)) { self.ptr->fragile_left = self.ptr->fragile_right = true; self.ptr->parse_state = TS_TREE_STATE_NONE; } if (!ts_subtree_extra(child)) structural_index++; } self.ptr->lookahead_bytes = lookahead_end_byte - self.ptr->size.bytes - self.ptr->padding.bytes; if ( self.ptr->symbol == ts_builtin_sym_error || self.ptr->symbol == ts_builtin_sym_error_repeat ) { self.ptr->error_cost += ERROR_COST_PER_RECOVERY + ERROR_COST_PER_SKIPPED_CHAR * self.ptr->size.bytes + ERROR_COST_PER_SKIPPED_LINE * self.ptr->size.extent.row; } if (self.ptr->child_count > 0) { Subtree first_child = children[0]; Subtree last_child = children[self.ptr->child_count - 1]; self.ptr->first_leaf.symbol = ts_subtree_leaf_symbol(first_child); self.ptr->first_leaf.parse_state = ts_subtree_leaf_parse_state(first_child); if (ts_subtree_fragile_left(first_child)) self.ptr->fragile_left = true; if (ts_subtree_fragile_right(last_child)) self.ptr->fragile_right = true; if ( self.ptr->child_count >= 2 && !self.ptr->visible && !self.ptr->named && ts_subtree_symbol(first_child) == self.ptr->symbol ) { if (ts_subtree_repeat_depth(first_child) > ts_subtree_repeat_depth(last_child)) { self.ptr->repeat_depth = ts_subtree_repeat_depth(first_child) + 1; } else { self.ptr->repeat_depth = ts_subtree_repeat_depth(last_child) + 1; } } } } // Create a new parent node with the given children. // // This takes ownership of the children array. MutableSubtree ts_subtree_new_node( TSSymbol symbol, SubtreeArray *children, unsigned production_id, const TSLanguage *language ) { TSSymbolMetadata metadata = ts_language_symbol_metadata(language, symbol); bool fragile = symbol == ts_builtin_sym_error || symbol == ts_builtin_sym_error_repeat; // Allocate the node's data at the end of the array of children. size_t new_byte_size = ts_subtree_alloc_size(children->size); if (children->capacity * sizeof(Subtree) < new_byte_size) { children->contents = ts_realloc(children->contents, new_byte_size); children->capacity = (uint32_t)(new_byte_size / sizeof(Subtree)); } SubtreeHeapData *data = (SubtreeHeapData *)&children->contents[children->size]; *data = (SubtreeHeapData) { .ref_count = 1, .symbol = symbol, .child_count = children->size, .visible = metadata.visible, .named = metadata.named, .has_changes = false, .has_external_scanner_state_change = false, .fragile_left = fragile, .fragile_right = fragile, .is_keyword = false, {{ .visible_descendant_count = 0, .production_id = production_id, .first_leaf = {.symbol = 0, .parse_state = 0}, }} }; MutableSubtree result = {.ptr = data}; ts_subtree_summarize_children(result, language); return result; } // Create a new error node containing the given children. // // This node is treated as 'extra'. Its children are prevented from having // having any effect on the parse state. Subtree ts_subtree_new_error_node( SubtreeArray *children, bool extra, const TSLanguage *language ) { MutableSubtree result = ts_subtree_new_node( ts_builtin_sym_error, children, 0, language ); result.ptr->extra = extra; return ts_subtree_from_mut(result); } // Create a new 'missing leaf' node. // // This node is treated as 'extra'. Its children are prevented from having // having any effect on the parse state. Subtree ts_subtree_new_missing_leaf( SubtreePool *pool, TSSymbol symbol, Length padding, uint32_t lookahead_bytes, const TSLanguage *language ) { Subtree result = ts_subtree_new_leaf( pool, symbol, padding, length_zero(), lookahead_bytes, 0, false, false, false, language ); if (result.data.is_inline) { result.data.is_missing = true; } else { ((SubtreeHeapData *)result.ptr)->is_missing = true; } return result; } void ts_subtree_retain(Subtree self) { if (self.data.is_inline) return; ts_assert(self.ptr->ref_count > 0); atomic_inc((volatile uint32_t *)&self.ptr->ref_count); ts_assert(self.ptr->ref_count != 0); } void ts_subtree_release(SubtreePool *pool, Subtree self) { if (self.data.is_inline) return; array_clear(&pool->tree_stack); ts_assert(self.ptr->ref_count > 0); if (atomic_dec((volatile uint32_t *)&self.ptr->ref_count) == 0) { array_push(&pool->tree_stack, ts_subtree_to_mut_unsafe(self)); } while (pool->tree_stack.size > 0) { MutableSubtree tree = array_pop(&pool->tree_stack); if (tree.ptr->child_count > 0) { Subtree *children = ts_subtree_children(tree); for (uint32_t i = 0; i < tree.ptr->child_count; i++) { Subtree child = children[i]; if (child.data.is_inline) continue; ts_assert(child.ptr->ref_count > 0); if (atomic_dec((volatile uint32_t *)&child.ptr->ref_count) == 0) { array_push(&pool->tree_stack, ts_subtree_to_mut_unsafe(child)); } } ts_free(children); } else { if (tree.ptr->has_external_tokens) { ts_external_scanner_state_delete(&tree.ptr->external_scanner_state); } ts_subtree_pool_free(pool, tree.ptr); } } } int ts_subtree_compare(Subtree left, Subtree right, SubtreePool *pool) { array_push(&pool->tree_stack, ts_subtree_to_mut_unsafe(left)); array_push(&pool->tree_stack, ts_subtree_to_mut_unsafe(right)); while (pool->tree_stack.size > 0) { right = ts_subtree_from_mut(array_pop(&pool->tree_stack)); left = ts_subtree_from_mut(array_pop(&pool->tree_stack)); int result = 0; if (ts_subtree_symbol(left) < ts_subtree_symbol(right)) result = -1; else if (ts_subtree_symbol(right) < ts_subtree_symbol(left)) result = 1; else if (ts_subtree_child_count(left) < ts_subtree_child_count(right)) result = -1; else if (ts_subtree_child_count(right) < ts_subtree_child_count(left)) result = 1; if (result != 0) { array_clear(&pool->tree_stack); return result; } for (uint32_t i = ts_subtree_child_count(left); i > 0; i--) { Subtree left_child = ts_subtree_children(left)[i - 1]; Subtree right_child = ts_subtree_children(right)[i - 1]; array_push(&pool->tree_stack, ts_subtree_to_mut_unsafe(left_child)); array_push(&pool->tree_stack, ts_subtree_to_mut_unsafe(right_child)); } } return 0; } static inline void ts_subtree_set_has_changes(MutableSubtree *self) { if (self->data.is_inline) { self->data.has_changes = true; } else { self->ptr->has_changes = true; } } Subtree ts_subtree_edit(Subtree self, const TSInputEdit *input_edit, SubtreePool *pool) { typedef struct { Subtree *tree; Edit edit; } EditEntry; Array(EditEntry) stack = array_new(); array_push(&stack, ((EditEntry) { .tree = &self, .edit = (Edit) { .start = {input_edit->start_byte, input_edit->start_point}, .old_end = {input_edit->old_end_byte, input_edit->old_end_point}, .new_end = {input_edit->new_end_byte, input_edit->new_end_point}, }, })); while (stack.size) { EditEntry entry = array_pop(&stack); Edit edit = entry.edit; bool is_noop = edit.old_end.bytes == edit.start.bytes && edit.new_end.bytes == edit.start.bytes; bool is_pure_insertion = edit.old_end.bytes == edit.start.bytes; bool parent_depends_on_column = ts_subtree_depends_on_column(*entry.tree); bool column_shifted = edit.new_end.extent.column != edit.old_end.extent.column; Length size = ts_subtree_size(*entry.tree); Length padding = ts_subtree_padding(*entry.tree); Length total_size = length_add(padding, size); uint32_t lookahead_bytes = ts_subtree_lookahead_bytes(*entry.tree); uint32_t end_byte = total_size.bytes + lookahead_bytes; if (edit.start.bytes > end_byte || (is_noop && edit.start.bytes == end_byte)) continue; // If the edit is entirely within the space before this subtree, then shift this // subtree over according to the edit without changing its size. if (edit.old_end.bytes <= padding.bytes) { padding = length_add(edit.new_end, length_sub(padding, edit.old_end)); } // If the edit starts in the space before this subtree and extends into this subtree, // shrink the subtree's content to compensate for the change in the space before it. else if (edit.start.bytes < padding.bytes) { size = length_saturating_sub(size, length_sub(edit.old_end, padding)); padding = edit.new_end; } // If the edit is within this subtree, resize the subtree to reflect the edit. else if ( edit.start.bytes < total_size.bytes || (edit.start.bytes == total_size.bytes && is_pure_insertion) ) { size = length_add( length_sub(edit.new_end, padding), length_saturating_sub(total_size, edit.old_end) ); } MutableSubtree result = ts_subtree_make_mut(pool, *entry.tree); if (result.data.is_inline) { if (ts_subtree_can_inline(padding, size, lookahead_bytes)) { result.data.padding_bytes = padding.bytes; result.data.padding_rows = padding.extent.row; result.data.padding_columns = padding.extent.column; result.data.size_bytes = size.bytes; } else { SubtreeHeapData *data = ts_subtree_pool_allocate(pool); data->ref_count = 1; data->padding = padding; data->size = size; data->lookahead_bytes = lookahead_bytes; data->error_cost = 0; data->child_count = 0; data->symbol = result.data.symbol; data->parse_state = result.data.parse_state; data->visible = result.data.visible; data->named = result.data.named; data->extra = result.data.extra; data->fragile_left = false; data->fragile_right = false; data->has_changes = false; data->has_external_tokens = false; data->depends_on_column = false; data->is_missing = result.data.is_missing; data->is_keyword = result.data.is_keyword; result.ptr = data; } } else { result.ptr->padding = padding; result.ptr->size = size; } ts_subtree_set_has_changes(&result); *entry.tree = ts_subtree_from_mut(result); Length child_left, child_right = length_zero(); for (uint32_t i = 0, n = ts_subtree_child_count(*entry.tree); i < n; i++) { Subtree *child = &ts_subtree_children(*entry.tree)[i]; Length child_size = ts_subtree_total_size(*child); child_left = child_right; child_right = length_add(child_left, child_size); // If this child ends before the edit, it is not affected. if (child_right.bytes + ts_subtree_lookahead_bytes(*child) < edit.start.bytes) continue; // Keep editing child nodes until a node is reached that starts after the edit. // Also, if this node's validity depends on its column position, then continue // invalidating child nodes until reaching a line break. if (( (child_left.bytes > edit.old_end.bytes) || (child_left.bytes == edit.old_end.bytes && child_size.bytes > 0 && i > 0) ) && ( !parent_depends_on_column || child_left.extent.row > padding.extent.row ) && ( !ts_subtree_depends_on_column(*child) || !column_shifted || child_left.extent.row > edit.old_end.extent.row )) { break; } // Transform edit into the child's coordinate space. Edit child_edit = { .start = length_saturating_sub(edit.start, child_left), .old_end = length_saturating_sub(edit.old_end, child_left), .new_end = length_saturating_sub(edit.new_end, child_left), }; // Interpret all inserted text as applying to the *first* child that touches the edit. // Subsequent children are only never have any text inserted into them; they are only // shrunk to compensate for the edit. if ( child_right.bytes > edit.start.bytes || (child_right.bytes == edit.start.bytes && is_pure_insertion) ) { edit.new_end = edit.start; } // Children that occur before the edit are not reshaped by the edit. else { child_edit.old_end = child_edit.start; child_edit.new_end = child_edit.start; } // Queue processing of this child's subtree. array_push(&stack, ((EditEntry) { .tree = child, .edit = child_edit, })); } } array_delete(&stack); return self; } Subtree ts_subtree_last_external_token(Subtree tree) { if (!ts_subtree_has_external_tokens(tree)) return NULL_SUBTREE; while (tree.ptr->child_count > 0) { for (uint32_t i = tree.ptr->child_count - 1; i + 1 > 0; i--) { Subtree child = ts_subtree_children(tree)[i]; if (ts_subtree_has_external_tokens(child)) { tree = child; break; } } } return tree; } static size_t ts_subtree__write_char_to_string(char *str, size_t n, int32_t chr) { if (chr == -1) return snprintf(str, n, "INVALID"); else if (chr == '\0') return snprintf(str, n, "'\\0'"); else if (chr == '\n') return snprintf(str, n, "'\\n'"); else if (chr == '\t') return snprintf(str, n, "'\\t'"); else if (chr == '\r') return snprintf(str, n, "'\\r'"); else if (0 < chr && chr < 128 && isprint(chr)) return snprintf(str, n, "'%c'", chr); else return snprintf(str, n, "%d", chr); } static const char *const ROOT_FIELD = "__ROOT__"; static size_t ts_subtree__write_to_string( Subtree self, char *string, size_t limit, const TSLanguage *language, bool include_all, TSSymbol alias_symbol, bool alias_is_named, const char *field_name ) { if (!self.ptr) return snprintf(string, limit, "(NULL)"); char *cursor = string; char **writer = (limit > 1) ? &cursor : &string; bool is_root = field_name == ROOT_FIELD; bool is_visible = include_all || ts_subtree_missing(self) || ( alias_symbol ? alias_is_named : ts_subtree_visible(self) && ts_subtree_named(self) ); if (is_visible) { if (!is_root) { cursor += snprintf(*writer, limit, " "); if (field_name) { cursor += snprintf(*writer, limit, "%s: ", field_name); } } if (ts_subtree_is_error(self) && ts_subtree_child_count(self) == 0 && self.ptr->size.bytes > 0) { cursor += snprintf(*writer, limit, "(UNEXPECTED "); cursor += ts_subtree__write_char_to_string(*writer, limit, self.ptr->lookahead_char); } else { TSSymbol symbol = alias_symbol ? alias_symbol : ts_subtree_symbol(self); const char *symbol_name = ts_language_symbol_name(language, symbol); if (ts_subtree_missing(self)) { cursor += snprintf(*writer, limit, "(MISSING "); if (alias_is_named || ts_subtree_named(self)) { cursor += snprintf(*writer, limit, "%s", symbol_name); } else { cursor += snprintf(*writer, limit, "\"%s\"", symbol_name); } } else { cursor += snprintf(*writer, limit, "(%s", symbol_name); } } } else if (is_root) { TSSymbol symbol = alias_symbol ? alias_symbol : ts_subtree_symbol(self); const char *symbol_name = ts_language_symbol_name(language, symbol); if (ts_subtree_child_count(self) > 0) { cursor += snprintf(*writer, limit, "(%s", symbol_name); } else if (ts_subtree_named(self)) { cursor += snprintf(*writer, limit, "(%s)", symbol_name); } else { cursor += snprintf(*writer, limit, "(\"%s\")", symbol_name); } } if (ts_subtree_child_count(self)) { const TSSymbol *alias_sequence = ts_language_alias_sequence(language, self.ptr->production_id); const TSFieldMapEntry *field_map, *field_map_end; ts_language_field_map( language, self.ptr->production_id, &field_map, &field_map_end ); uint32_t structural_child_index = 0; for (uint32_t i = 0; i < self.ptr->child_count; i++) { Subtree child = ts_subtree_children(self)[i]; if (ts_subtree_extra(child)) { cursor += ts_subtree__write_to_string( child, *writer, limit, language, include_all, 0, false, NULL ); } else { TSSymbol subtree_alias_symbol = alias_sequence ? alias_sequence[structural_child_index] : 0; bool subtree_alias_is_named = subtree_alias_symbol ? ts_language_symbol_metadata(language, subtree_alias_symbol).named : false; const char *child_field_name = is_visible ? NULL : field_name; for (const TSFieldMapEntry *map = field_map; map < field_map_end; map++) { if (!map->inherited && map->child_index == structural_child_index) { child_field_name = language->field_names[map->field_id]; break; } } cursor += ts_subtree__write_to_string( child, *writer, limit, language, include_all, subtree_alias_symbol, subtree_alias_is_named, child_field_name ); structural_child_index++; } } } if (is_visible) cursor += snprintf(*writer, limit, ")"); return cursor - string; } char *ts_subtree_string( Subtree self, TSSymbol alias_symbol, bool alias_is_named, const TSLanguage *language, bool include_all ) { char scratch_string[1]; size_t size = ts_subtree__write_to_string( self, scratch_string, 1, language, include_all, alias_symbol, alias_is_named, ROOT_FIELD ) + 1; char *result = ts_malloc(size * sizeof(char)); ts_subtree__write_to_string( self, result, size, language, include_all, alias_symbol, alias_is_named, ROOT_FIELD ); return result; } void ts_subtree__print_dot_graph(const Subtree *self, uint32_t start_offset, const TSLanguage *language, TSSymbol alias_symbol, FILE *f) { TSSymbol subtree_symbol = ts_subtree_symbol(*self); TSSymbol symbol = alias_symbol ? alias_symbol : subtree_symbol; uint32_t end_offset = start_offset + ts_subtree_total_bytes(*self); fprintf(f, "tree_%p [label=\"", (void *)self); ts_language_write_symbol_as_dot_string(language, f, symbol); fprintf(f, "\""); if (ts_subtree_child_count(*self) == 0) fprintf(f, ", shape=plaintext"); if (ts_subtree_extra(*self)) fprintf(f, ", fontcolor=gray"); if (ts_subtree_has_changes(*self)) fprintf(f, ", color=green, penwidth=2"); fprintf(f, ", tooltip=\"" "range: %u - %u\n" "state: %d\n" "error-cost: %u\n" "has-changes: %u\n" "depends-on-column: %u\n" "descendant-count: %u\n" "repeat-depth: %u\n" "lookahead-bytes: %u", start_offset, end_offset, ts_subtree_parse_state(*self), ts_subtree_error_cost(*self), ts_subtree_has_changes(*self), ts_subtree_depends_on_column(*self), ts_subtree_visible_descendant_count(*self), ts_subtree_repeat_depth(*self), ts_subtree_lookahead_bytes(*self) ); if (ts_subtree_is_error(*self) && ts_subtree_child_count(*self) == 0 && self->ptr->lookahead_char != 0) { fprintf(f, "\ncharacter: '%c'", self->ptr->lookahead_char); } fprintf(f, "\"]\n"); uint32_t child_start_offset = start_offset; uint32_t child_info_offset = language->max_alias_sequence_length * ts_subtree_production_id(*self); for (uint32_t i = 0, n = ts_subtree_child_count(*self); i < n; i++) { const Subtree *child = &ts_subtree_children(*self)[i]; TSSymbol subtree_alias_symbol = 0; if (!ts_subtree_extra(*child) && child_info_offset) { subtree_alias_symbol = language->alias_sequences[child_info_offset]; child_info_offset++; } ts_subtree__print_dot_graph(child, child_start_offset, language, subtree_alias_symbol, f); fprintf(f, "tree_%p -> tree_%p [tooltip=%u]\n", (void *)self, (void *)child, i); child_start_offset += ts_subtree_total_bytes(*child); } } void ts_subtree_print_dot_graph(Subtree self, const TSLanguage *language, FILE *f) { fprintf(f, "digraph tree {\n"); fprintf(f, "edge [arrowhead=none]\n"); ts_subtree__print_dot_graph(&self, 0, language, 0, f); fprintf(f, "}\n"); } const ExternalScannerState *ts_subtree_external_scanner_state(Subtree self) { static const ExternalScannerState empty_state = {{.short_data = {0}}, .length = 0}; if ( self.ptr && !self.data.is_inline && self.ptr->has_external_tokens && self.ptr->child_count == 0 ) { return &self.ptr->external_scanner_state; } else { return &empty_state; } } bool ts_subtree_external_scanner_state_eq(Subtree self, Subtree other) { const ExternalScannerState *state_self = ts_subtree_external_scanner_state(self); const ExternalScannerState *state_other = ts_subtree_external_scanner_state(other); return ts_external_scanner_state_eq( state_self, ts_external_scanner_state_data(state_other), state_other->length ); } hx-0.3.0+20250717/bindings/vendor/src/subtree.h000066400000000000000000000303461503625671400205560ustar00rootroot00000000000000#ifndef TREE_SITTER_SUBTREE_H_ #define TREE_SITTER_SUBTREE_H_ #ifdef __cplusplus extern "C" { #endif #include #include #include #include "./length.h" #include "./array.h" #include "./error_costs.h" #include "./host.h" #include "tree_sitter/api.h" #include "./parser.h" #define TS_TREE_STATE_NONE USHRT_MAX #define NULL_SUBTREE ((Subtree) {.ptr = NULL}) // The serialized state of an external scanner. // // Every time an external token subtree is created after a call to an // external scanner, the scanner's `serialize` function is called to // retrieve a serialized copy of its state. The bytes are then copied // onto the subtree itself so that the scanner's state can later be // restored using its `deserialize` function. // // Small byte arrays are stored inline, and long ones are allocated // separately on the heap. typedef struct { union { char *long_data; char short_data[24]; }; uint32_t length; } ExternalScannerState; // A compact representation of a subtree. // // This representation is used for small leaf nodes that are not // errors, and were not created by an external scanner. // // The idea behind the layout of this struct is that the `is_inline` // bit will fall exactly into the same location as the least significant // bit of the pointer in `Subtree` or `MutableSubtree`, respectively. // Because of alignment, for any valid pointer this will be 0, giving // us the opportunity to make use of this bit to signify whether to use // the pointer or the inline struct. typedef struct SubtreeInlineData SubtreeInlineData; #define SUBTREE_BITS \ bool visible : 1; \ bool named : 1; \ bool extra : 1; \ bool has_changes : 1; \ bool is_missing : 1; \ bool is_keyword : 1; #define SUBTREE_SIZE \ uint8_t padding_columns; \ uint8_t padding_rows : 4; \ uint8_t lookahead_bytes : 4; \ uint8_t padding_bytes; \ uint8_t size_bytes; #if TS_BIG_ENDIAN #if TS_PTR_SIZE == 32 struct SubtreeInlineData { uint16_t parse_state; uint8_t symbol; SUBTREE_BITS bool unused : 1; bool is_inline : 1; SUBTREE_SIZE }; #else struct SubtreeInlineData { SUBTREE_SIZE uint16_t parse_state; uint8_t symbol; SUBTREE_BITS bool unused : 1; bool is_inline : 1; }; #endif #else struct SubtreeInlineData { bool is_inline : 1; SUBTREE_BITS uint8_t symbol; uint16_t parse_state; SUBTREE_SIZE }; #endif #undef SUBTREE_BITS #undef SUBTREE_SIZE // A heap-allocated representation of a subtree. // // This representation is used for parent nodes, external tokens, // errors, and other leaf nodes whose data is too large to fit into // the inline representation. typedef struct { volatile uint32_t ref_count; Length padding; Length size; uint32_t lookahead_bytes; uint32_t error_cost; uint32_t child_count; TSSymbol symbol; TSStateId parse_state; bool visible : 1; bool named : 1; bool extra : 1; bool fragile_left : 1; bool fragile_right : 1; bool has_changes : 1; bool has_external_tokens : 1; bool has_external_scanner_state_change : 1; bool depends_on_column: 1; bool is_missing : 1; bool is_keyword : 1; union { // Non-terminal subtrees (`child_count > 0`) struct { uint32_t visible_child_count; uint32_t named_child_count; uint32_t visible_descendant_count; int32_t dynamic_precedence; uint16_t repeat_depth; uint16_t production_id; struct { TSSymbol symbol; TSStateId parse_state; } first_leaf; }; // External terminal subtrees (`child_count == 0 && has_external_tokens`) ExternalScannerState external_scanner_state; // Error terminal subtrees (`child_count == 0 && symbol == ts_builtin_sym_error`) int32_t lookahead_char; }; } SubtreeHeapData; // The fundamental building block of a syntax tree. typedef union { SubtreeInlineData data; const SubtreeHeapData *ptr; } Subtree; // Like Subtree, but mutable. typedef union { SubtreeInlineData data; SubtreeHeapData *ptr; } MutableSubtree; typedef Array(Subtree) SubtreeArray; typedef Array(MutableSubtree) MutableSubtreeArray; typedef struct { MutableSubtreeArray free_trees; MutableSubtreeArray tree_stack; } SubtreePool; void ts_external_scanner_state_init(ExternalScannerState *self, const char *data, unsigned length); const char *ts_external_scanner_state_data(const ExternalScannerState *self); bool ts_external_scanner_state_eq(const ExternalScannerState *self, const char *buffer, unsigned length); void ts_external_scanner_state_delete(ExternalScannerState *self); void ts_subtree_array_copy(SubtreeArray self, SubtreeArray *dest); void ts_subtree_array_clear(SubtreePool *pool, SubtreeArray *self); void ts_subtree_array_delete(SubtreePool *pool, SubtreeArray *self); void ts_subtree_array_remove_trailing_extras(SubtreeArray *self, SubtreeArray *destination); void ts_subtree_array_reverse(SubtreeArray *self); SubtreePool ts_subtree_pool_new(uint32_t capacity); void ts_subtree_pool_delete(SubtreePool *self); Subtree ts_subtree_new_leaf( SubtreePool *pool, TSSymbol symbol, Length padding, Length size, uint32_t lookahead_bytes, TSStateId parse_state, bool has_external_tokens, bool depends_on_column, bool is_keyword, const TSLanguage *language ); Subtree ts_subtree_new_error( SubtreePool *pool, int32_t lookahead_char, Length padding, Length size, uint32_t bytes_scanned, TSStateId parse_state, const TSLanguage *language ); MutableSubtree ts_subtree_new_node( TSSymbol symbol, SubtreeArray *chiildren, unsigned production_id, const TSLanguage *language ); Subtree ts_subtree_new_error_node( SubtreeArray *children, bool extra, const TSLanguage * language ); Subtree ts_subtree_new_missing_leaf( SubtreePool *pool, TSSymbol symbol, Length padding, uint32_t lookahead_bytes, const TSLanguage *language ); MutableSubtree ts_subtree_make_mut(SubtreePool *pool, Subtree self); void ts_subtree_retain(Subtree self); void ts_subtree_release(SubtreePool *pool, Subtree self); int ts_subtree_compare(Subtree left, Subtree right, SubtreePool *pool); void ts_subtree_set_symbol(MutableSubtree *self, TSSymbol symbol, const TSLanguage *language); void ts_subtree_compress(MutableSubtree self, unsigned count, const TSLanguage *language, MutableSubtreeArray *stack); void ts_subtree_summarize_children(MutableSubtree self, const TSLanguage *language); Subtree ts_subtree_edit(Subtree self, const TSInputEdit *edit, SubtreePool *pool); char *ts_subtree_string(Subtree self, TSSymbol alias_symbol, bool alias_is_named, const TSLanguage *language, bool include_all); void ts_subtree_print_dot_graph(Subtree self, const TSLanguage *language, FILE *f); Subtree ts_subtree_last_external_token(Subtree tree); const ExternalScannerState *ts_subtree_external_scanner_state(Subtree self); bool ts_subtree_external_scanner_state_eq(Subtree self, Subtree other); #define SUBTREE_GET(self, name) ((self).data.is_inline ? (self).data.name : (self).ptr->name) static inline TSSymbol ts_subtree_symbol(Subtree self) { return SUBTREE_GET(self, symbol); } static inline bool ts_subtree_visible(Subtree self) { return SUBTREE_GET(self, visible); } static inline bool ts_subtree_named(Subtree self) { return SUBTREE_GET(self, named); } static inline bool ts_subtree_extra(Subtree self) { return SUBTREE_GET(self, extra); } static inline bool ts_subtree_has_changes(Subtree self) { return SUBTREE_GET(self, has_changes); } static inline bool ts_subtree_missing(Subtree self) { return SUBTREE_GET(self, is_missing); } static inline bool ts_subtree_is_keyword(Subtree self) { return SUBTREE_GET(self, is_keyword); } static inline TSStateId ts_subtree_parse_state(Subtree self) { return SUBTREE_GET(self, parse_state); } static inline uint32_t ts_subtree_lookahead_bytes(Subtree self) { return SUBTREE_GET(self, lookahead_bytes); } #undef SUBTREE_GET // Get the size needed to store a heap-allocated subtree with the given // number of children. static inline size_t ts_subtree_alloc_size(uint32_t child_count) { return child_count * sizeof(Subtree) + sizeof(SubtreeHeapData); } // Get a subtree's children, which are allocated immediately before the // tree's own heap data. #define ts_subtree_children(self) \ ((self).data.is_inline ? NULL : (Subtree *)((self).ptr) - (self).ptr->child_count) static inline void ts_subtree_set_extra(MutableSubtree *self, bool is_extra) { if (self->data.is_inline) { self->data.extra = is_extra; } else { self->ptr->extra = is_extra; } } static inline TSSymbol ts_subtree_leaf_symbol(Subtree self) { if (self.data.is_inline) return self.data.symbol; if (self.ptr->child_count == 0) return self.ptr->symbol; return self.ptr->first_leaf.symbol; } static inline TSStateId ts_subtree_leaf_parse_state(Subtree self) { if (self.data.is_inline) return self.data.parse_state; if (self.ptr->child_count == 0) return self.ptr->parse_state; return self.ptr->first_leaf.parse_state; } static inline Length ts_subtree_padding(Subtree self) { if (self.data.is_inline) { Length result = {self.data.padding_bytes, {self.data.padding_rows, self.data.padding_columns}}; return result; } else { return self.ptr->padding; } } static inline Length ts_subtree_size(Subtree self) { if (self.data.is_inline) { Length result = {self.data.size_bytes, {0, self.data.size_bytes}}; return result; } else { return self.ptr->size; } } static inline Length ts_subtree_total_size(Subtree self) { return length_add(ts_subtree_padding(self), ts_subtree_size(self)); } static inline uint32_t ts_subtree_total_bytes(Subtree self) { return ts_subtree_total_size(self).bytes; } static inline uint32_t ts_subtree_child_count(Subtree self) { return self.data.is_inline ? 0 : self.ptr->child_count; } static inline uint32_t ts_subtree_repeat_depth(Subtree self) { return self.data.is_inline ? 0 : self.ptr->repeat_depth; } static inline uint32_t ts_subtree_is_repetition(Subtree self) { return self.data.is_inline ? 0 : !self.ptr->named && !self.ptr->visible && self.ptr->child_count != 0; } static inline uint32_t ts_subtree_visible_descendant_count(Subtree self) { return (self.data.is_inline || self.ptr->child_count == 0) ? 0 : self.ptr->visible_descendant_count; } static inline uint32_t ts_subtree_visible_child_count(Subtree self) { if (ts_subtree_child_count(self) > 0) { return self.ptr->visible_child_count; } else { return 0; } } static inline uint32_t ts_subtree_error_cost(Subtree self) { if (ts_subtree_missing(self)) { return ERROR_COST_PER_MISSING_TREE + ERROR_COST_PER_RECOVERY; } else { return self.data.is_inline ? 0 : self.ptr->error_cost; } } static inline int32_t ts_subtree_dynamic_precedence(Subtree self) { return (self.data.is_inline || self.ptr->child_count == 0) ? 0 : self.ptr->dynamic_precedence; } static inline uint16_t ts_subtree_production_id(Subtree self) { if (ts_subtree_child_count(self) > 0) { return self.ptr->production_id; } else { return 0; } } static inline bool ts_subtree_fragile_left(Subtree self) { return self.data.is_inline ? false : self.ptr->fragile_left; } static inline bool ts_subtree_fragile_right(Subtree self) { return self.data.is_inline ? false : self.ptr->fragile_right; } static inline bool ts_subtree_has_external_tokens(Subtree self) { return self.data.is_inline ? false : self.ptr->has_external_tokens; } static inline bool ts_subtree_has_external_scanner_state_change(Subtree self) { return self.data.is_inline ? false : self.ptr->has_external_scanner_state_change; } static inline bool ts_subtree_depends_on_column(Subtree self) { return self.data.is_inline ? false : self.ptr->depends_on_column; } static inline bool ts_subtree_is_fragile(Subtree self) { return self.data.is_inline ? false : (self.ptr->fragile_left || self.ptr->fragile_right); } static inline bool ts_subtree_is_error(Subtree self) { return ts_subtree_symbol(self) == ts_builtin_sym_error; } static inline bool ts_subtree_is_eof(Subtree self) { return ts_subtree_symbol(self) == ts_builtin_sym_end; } static inline Subtree ts_subtree_from_mut(MutableSubtree self) { Subtree result; result.data = self.data; return result; } static inline MutableSubtree ts_subtree_to_mut_unsafe(Subtree self) { MutableSubtree result; result.data = self.data; return result; } #ifdef __cplusplus } #endif #endif // TREE_SITTER_SUBTREE_H_ hx-0.3.0+20250717/bindings/vendor/src/tree.c000066400000000000000000000122301503625671400200270ustar00rootroot00000000000000#include "tree_sitter/api.h" #include "./array.h" #include "./get_changed_ranges.h" #include "./length.h" #include "./subtree.h" #include "./tree_cursor.h" #include "./tree.h" TSTree *ts_tree_new( Subtree root, const TSLanguage *language, const TSRange *included_ranges, unsigned included_range_count ) { TSTree *result = ts_malloc(sizeof(TSTree)); result->root = root; result->language = ts_language_copy(language); result->included_ranges = ts_calloc(included_range_count, sizeof(TSRange)); memcpy(result->included_ranges, included_ranges, included_range_count * sizeof(TSRange)); result->included_range_count = included_range_count; return result; } TSTree *ts_tree_copy(const TSTree *self) { ts_subtree_retain(self->root); return ts_tree_new(self->root, self->language, self->included_ranges, self->included_range_count); } void ts_tree_delete(TSTree *self) { if (!self) return; SubtreePool pool = ts_subtree_pool_new(0); ts_subtree_release(&pool, self->root); ts_subtree_pool_delete(&pool); ts_language_delete(self->language); ts_free(self->included_ranges); ts_free(self); } TSNode ts_tree_root_node(const TSTree *self) { return ts_node_new(self, &self->root, ts_subtree_padding(self->root), 0); } TSNode ts_tree_root_node_with_offset( const TSTree *self, uint32_t offset_bytes, TSPoint offset_extent ) { Length offset = {offset_bytes, offset_extent}; return ts_node_new(self, &self->root, length_add(offset, ts_subtree_padding(self->root)), 0); } const TSLanguage *ts_tree_language(const TSTree *self) { return self->language; } void ts_tree_edit(TSTree *self, const TSInputEdit *edit) { for (unsigned i = 0; i < self->included_range_count; i++) { TSRange *range = &self->included_ranges[i]; if (range->end_byte >= edit->old_end_byte) { if (range->end_byte != UINT32_MAX) { range->end_byte = edit->new_end_byte + (range->end_byte - edit->old_end_byte); range->end_point = point_add( edit->new_end_point, point_sub(range->end_point, edit->old_end_point) ); if (range->end_byte < edit->new_end_byte) { range->end_byte = UINT32_MAX; range->end_point = POINT_MAX; } } } else if (range->end_byte > edit->start_byte) { range->end_byte = edit->start_byte; range->end_point = edit->start_point; } if (range->start_byte >= edit->old_end_byte) { range->start_byte = edit->new_end_byte + (range->start_byte - edit->old_end_byte); range->start_point = point_add( edit->new_end_point, point_sub(range->start_point, edit->old_end_point) ); if (range->start_byte < edit->new_end_byte) { range->start_byte = UINT32_MAX; range->start_point = POINT_MAX; } } else if (range->start_byte > edit->start_byte) { range->start_byte = edit->start_byte; range->start_point = edit->start_point; } } SubtreePool pool = ts_subtree_pool_new(0); self->root = ts_subtree_edit(self->root, edit, &pool); ts_subtree_pool_delete(&pool); } TSRange *ts_tree_included_ranges(const TSTree *self, uint32_t *length) { *length = self->included_range_count; TSRange *ranges = ts_calloc(self->included_range_count, sizeof(TSRange)); memcpy(ranges, self->included_ranges, self->included_range_count * sizeof(TSRange)); return ranges; } TSRange *ts_tree_get_changed_ranges(const TSTree *old_tree, const TSTree *new_tree, uint32_t *length) { TreeCursor cursor1 = {NULL, array_new(), 0}; TreeCursor cursor2 = {NULL, array_new(), 0}; ts_tree_cursor_init(&cursor1, ts_tree_root_node(old_tree)); ts_tree_cursor_init(&cursor2, ts_tree_root_node(new_tree)); TSRangeArray included_range_differences = array_new(); ts_range_array_get_changed_ranges( old_tree->included_ranges, old_tree->included_range_count, new_tree->included_ranges, new_tree->included_range_count, &included_range_differences ); TSRange *result; *length = ts_subtree_get_changed_ranges( &old_tree->root, &new_tree->root, &cursor1, &cursor2, old_tree->language, &included_range_differences, &result ); array_delete(&included_range_differences); array_delete(&cursor1.stack); array_delete(&cursor2.stack); return result; } #ifdef _WIN32 #include #include int _ts_dup(HANDLE handle) { HANDLE dup_handle; if (!DuplicateHandle( GetCurrentProcess(), handle, GetCurrentProcess(), &dup_handle, 0, FALSE, DUPLICATE_SAME_ACCESS )) return -1; return _open_osfhandle((intptr_t)dup_handle, 0); } void ts_tree_print_dot_graph(const TSTree *self, int fd) { FILE *file = _fdopen(_ts_dup((HANDLE)_get_osfhandle(fd)), "a"); ts_subtree_print_dot_graph(self->root, self->language, file); fclose(file); } #elif !defined(__wasi__) // WASI doesn't support dup #include int _ts_dup(int file_descriptor) { return dup(file_descriptor); } void ts_tree_print_dot_graph(const TSTree *self, int file_descriptor) { FILE *file = fdopen(_ts_dup(file_descriptor), "a"); ts_subtree_print_dot_graph(self->root, self->language, file); fclose(file); } #else void ts_tree_print_dot_graph(const TSTree *self, int file_descriptor) { (void)self; (void)file_descriptor; } #endif hx-0.3.0+20250717/bindings/vendor/src/tree.h000066400000000000000000000012271503625671400200400ustar00rootroot00000000000000#ifndef TREE_SITTER_TREE_H_ #define TREE_SITTER_TREE_H_ #include "./subtree.h" #ifdef __cplusplus extern "C" { #endif typedef struct { const Subtree *child; const Subtree *parent; Length position; TSSymbol alias_symbol; } ParentCacheEntry; struct TSTree { Subtree root; const TSLanguage *language; TSRange *included_ranges; unsigned included_range_count; }; TSTree *ts_tree_new(Subtree root, const TSLanguage *language, const TSRange *included_ranges, unsigned included_range_count); TSNode ts_node_new(const TSTree *tree, const Subtree *subtree, Length position, TSSymbol alias); #ifdef __cplusplus } #endif #endif // TREE_SITTER_TREE_H_ hx-0.3.0+20250717/bindings/vendor/src/tree_cursor.c000066400000000000000000000546651503625671400214460ustar00rootroot00000000000000#include "tree_sitter/api.h" #include "./tree_cursor.h" #include "./language.h" #include "./tree.h" typedef struct { Subtree parent; const TSTree *tree; Length position; uint32_t child_index; uint32_t structural_child_index; uint32_t descendant_index; const TSSymbol *alias_sequence; } CursorChildIterator; // CursorChildIterator static inline bool ts_tree_cursor_is_entry_visible(const TreeCursor *self, uint32_t index) { TreeCursorEntry *entry = array_get(&self->stack, index); if (index == 0 || ts_subtree_visible(*entry->subtree)) { return true; } else if (!ts_subtree_extra(*entry->subtree)) { TreeCursorEntry *parent_entry = array_get(&self->stack, index - 1); return ts_language_alias_at( self->tree->language, parent_entry->subtree->ptr->production_id, entry->structural_child_index ); } else { return false; } } static inline CursorChildIterator ts_tree_cursor_iterate_children(const TreeCursor *self) { TreeCursorEntry *last_entry = array_back(&self->stack); if (ts_subtree_child_count(*last_entry->subtree) == 0) { return (CursorChildIterator) {NULL_SUBTREE, self->tree, length_zero(), 0, 0, 0, NULL}; } const TSSymbol *alias_sequence = ts_language_alias_sequence( self->tree->language, last_entry->subtree->ptr->production_id ); uint32_t descendant_index = last_entry->descendant_index; if (ts_tree_cursor_is_entry_visible(self, self->stack.size - 1)) { descendant_index += 1; } return (CursorChildIterator) { .tree = self->tree, .parent = *last_entry->subtree, .position = last_entry->position, .child_index = 0, .structural_child_index = 0, .descendant_index = descendant_index, .alias_sequence = alias_sequence, }; } static inline bool ts_tree_cursor_child_iterator_next( CursorChildIterator *self, TreeCursorEntry *result, bool *visible ) { if (!self->parent.ptr || self->child_index == self->parent.ptr->child_count) return false; const Subtree *child = &ts_subtree_children(self->parent)[self->child_index]; *result = (TreeCursorEntry) { .subtree = child, .position = self->position, .child_index = self->child_index, .structural_child_index = self->structural_child_index, .descendant_index = self->descendant_index, }; *visible = ts_subtree_visible(*child); bool extra = ts_subtree_extra(*child); if (!extra) { if (self->alias_sequence) { *visible |= self->alias_sequence[self->structural_child_index]; } self->structural_child_index++; } self->descendant_index += ts_subtree_visible_descendant_count(*child); if (*visible) { self->descendant_index += 1; } self->position = length_add(self->position, ts_subtree_size(*child)); self->child_index++; if (self->child_index < self->parent.ptr->child_count) { Subtree next_child = ts_subtree_children(self->parent)[self->child_index]; self->position = length_add(self->position, ts_subtree_padding(next_child)); } return true; } // Return a position that, when `b` is added to it, yields `a`. This // can only be computed if `b` has zero rows. Otherwise, this function // returns `LENGTH_UNDEFINED`, and the caller needs to recompute // the position some other way. static inline Length length_backtrack(Length a, Length b) { if (length_is_undefined(a) || b.extent.row != 0) { return LENGTH_UNDEFINED; } Length result; result.bytes = a.bytes - b.bytes; result.extent.row = a.extent.row; result.extent.column = a.extent.column - b.extent.column; return result; } static inline bool ts_tree_cursor_child_iterator_previous( CursorChildIterator *self, TreeCursorEntry *result, bool *visible ) { // this is mostly a reverse `ts_tree_cursor_child_iterator_next` taking into // account unsigned underflow if (!self->parent.ptr || (int8_t)self->child_index == -1) return false; const Subtree *child = &ts_subtree_children(self->parent)[self->child_index]; *result = (TreeCursorEntry) { .subtree = child, .position = self->position, .child_index = self->child_index, .structural_child_index = self->structural_child_index, }; *visible = ts_subtree_visible(*child); bool extra = ts_subtree_extra(*child); self->position = length_backtrack(self->position, ts_subtree_padding(*child)); self->child_index--; if (!extra && self->alias_sequence) { *visible |= self->alias_sequence[self->structural_child_index]; if (self->structural_child_index > 0) { self->structural_child_index--; } } // unsigned can underflow so compare it to child_count if (self->child_index < self->parent.ptr->child_count) { Subtree previous_child = ts_subtree_children(self->parent)[self->child_index]; Length size = ts_subtree_size(previous_child); self->position = length_backtrack(self->position, size); } return true; } // TSTreeCursor - lifecycle TSTreeCursor ts_tree_cursor_new(TSNode node) { TSTreeCursor self = {NULL, NULL, {0, 0, 0}}; ts_tree_cursor_init((TreeCursor *)&self, node); return self; } void ts_tree_cursor_reset(TSTreeCursor *_self, TSNode node) { ts_tree_cursor_init((TreeCursor *)_self, node); } void ts_tree_cursor_init(TreeCursor *self, TSNode node) { self->tree = node.tree; self->root_alias_symbol = node.context[3]; array_clear(&self->stack); array_push(&self->stack, ((TreeCursorEntry) { .subtree = (const Subtree *)node.id, .position = { ts_node_start_byte(node), ts_node_start_point(node) }, .child_index = 0, .structural_child_index = 0, .descendant_index = 0, })); } void ts_tree_cursor_delete(TSTreeCursor *_self) { TreeCursor *self = (TreeCursor *)_self; array_delete(&self->stack); } // TSTreeCursor - walking the tree TreeCursorStep ts_tree_cursor_goto_first_child_internal(TSTreeCursor *_self) { TreeCursor *self = (TreeCursor *)_self; bool visible; TreeCursorEntry entry; CursorChildIterator iterator = ts_tree_cursor_iterate_children(self); while (ts_tree_cursor_child_iterator_next(&iterator, &entry, &visible)) { if (visible) { array_push(&self->stack, entry); return TreeCursorStepVisible; } if (ts_subtree_visible_child_count(*entry.subtree) > 0) { array_push(&self->stack, entry); return TreeCursorStepHidden; } } return TreeCursorStepNone; } bool ts_tree_cursor_goto_first_child(TSTreeCursor *self) { for (;;) { switch (ts_tree_cursor_goto_first_child_internal(self)) { case TreeCursorStepHidden: continue; case TreeCursorStepVisible: return true; default: return false; } } } TreeCursorStep ts_tree_cursor_goto_last_child_internal(TSTreeCursor *_self) { TreeCursor *self = (TreeCursor *)_self; bool visible; TreeCursorEntry entry; CursorChildIterator iterator = ts_tree_cursor_iterate_children(self); if (!iterator.parent.ptr || iterator.parent.ptr->child_count == 0) return TreeCursorStepNone; TreeCursorEntry last_entry = {0}; TreeCursorStep last_step = TreeCursorStepNone; while (ts_tree_cursor_child_iterator_next(&iterator, &entry, &visible)) { if (visible) { last_entry = entry; last_step = TreeCursorStepVisible; } else if (ts_subtree_visible_child_count(*entry.subtree) > 0) { last_entry = entry; last_step = TreeCursorStepHidden; } } if (last_entry.subtree) { array_push(&self->stack, last_entry); return last_step; } return TreeCursorStepNone; } bool ts_tree_cursor_goto_last_child(TSTreeCursor *self) { for (;;) { switch (ts_tree_cursor_goto_last_child_internal(self)) { case TreeCursorStepHidden: continue; case TreeCursorStepVisible: return true; default: return false; } } } static inline int64_t ts_tree_cursor_goto_first_child_for_byte_and_point( TSTreeCursor *_self, uint32_t goal_byte, TSPoint goal_point ) { TreeCursor *self = (TreeCursor *)_self; uint32_t initial_size = self->stack.size; uint32_t visible_child_index = 0; bool did_descend; do { did_descend = false; bool visible; TreeCursorEntry entry; CursorChildIterator iterator = ts_tree_cursor_iterate_children(self); while (ts_tree_cursor_child_iterator_next(&iterator, &entry, &visible)) { Length entry_end = length_add(entry.position, ts_subtree_size(*entry.subtree)); bool at_goal = entry_end.bytes > goal_byte && point_gt(entry_end.extent, goal_point); uint32_t visible_child_count = ts_subtree_visible_child_count(*entry.subtree); if (at_goal) { if (visible) { array_push(&self->stack, entry); return visible_child_index; } if (visible_child_count > 0) { array_push(&self->stack, entry); did_descend = true; break; } } else if (visible) { visible_child_index++; } else { visible_child_index += visible_child_count; } } } while (did_descend); self->stack.size = initial_size; return -1; } int64_t ts_tree_cursor_goto_first_child_for_byte(TSTreeCursor *self, uint32_t goal_byte) { return ts_tree_cursor_goto_first_child_for_byte_and_point(self, goal_byte, POINT_ZERO); } int64_t ts_tree_cursor_goto_first_child_for_point(TSTreeCursor *self, TSPoint goal_point) { return ts_tree_cursor_goto_first_child_for_byte_and_point(self, 0, goal_point); } TreeCursorStep ts_tree_cursor_goto_sibling_internal( TSTreeCursor *_self, bool (*advance)(CursorChildIterator *, TreeCursorEntry *, bool *) ) { TreeCursor *self = (TreeCursor *)_self; uint32_t initial_size = self->stack.size; while (self->stack.size > 1) { TreeCursorEntry entry = array_pop(&self->stack); CursorChildIterator iterator = ts_tree_cursor_iterate_children(self); iterator.child_index = entry.child_index; iterator.structural_child_index = entry.structural_child_index; iterator.position = entry.position; iterator.descendant_index = entry.descendant_index; bool visible = false; advance(&iterator, &entry, &visible); if (visible && self->stack.size + 1 < initial_size) break; while (advance(&iterator, &entry, &visible)) { if (visible) { array_push(&self->stack, entry); return TreeCursorStepVisible; } if (ts_subtree_visible_child_count(*entry.subtree)) { array_push(&self->stack, entry); return TreeCursorStepHidden; } } } self->stack.size = initial_size; return TreeCursorStepNone; } TreeCursorStep ts_tree_cursor_goto_next_sibling_internal(TSTreeCursor *_self) { return ts_tree_cursor_goto_sibling_internal(_self, ts_tree_cursor_child_iterator_next); } bool ts_tree_cursor_goto_next_sibling(TSTreeCursor *self) { switch (ts_tree_cursor_goto_next_sibling_internal(self)) { case TreeCursorStepHidden: ts_tree_cursor_goto_first_child(self); return true; case TreeCursorStepVisible: return true; default: return false; } } TreeCursorStep ts_tree_cursor_goto_previous_sibling_internal(TSTreeCursor *_self) { // since subtracting across row loses column information, we may have to // restore it TreeCursor *self = (TreeCursor *)_self; // for that, save current position before traversing TreeCursorStep step = ts_tree_cursor_goto_sibling_internal( _self, ts_tree_cursor_child_iterator_previous); if (step == TreeCursorStepNone) return step; // if length is already valid, there's no need to recompute it if (!length_is_undefined(array_back(&self->stack)->position)) return step; // restore position from the parent node const TreeCursorEntry *parent = array_get(&self->stack, self->stack.size - 2); Length position = parent->position; uint32_t child_index = array_back(&self->stack)->child_index; const Subtree *children = ts_subtree_children((*(parent->subtree))); if (child_index > 0) { // skip first child padding since its position should match the position of the parent position = length_add(position, ts_subtree_size(children[0])); for (uint32_t i = 1; i < child_index; ++i) { position = length_add(position, ts_subtree_total_size(children[i])); } position = length_add(position, ts_subtree_padding(children[child_index])); } array_back(&self->stack)->position = position; return step; } bool ts_tree_cursor_goto_previous_sibling(TSTreeCursor *self) { switch (ts_tree_cursor_goto_previous_sibling_internal(self)) { case TreeCursorStepHidden: ts_tree_cursor_goto_last_child(self); return true; case TreeCursorStepVisible: return true; default: return false; } } bool ts_tree_cursor_goto_parent(TSTreeCursor *_self) { TreeCursor *self = (TreeCursor *)_self; for (unsigned i = self->stack.size - 2; i + 1 > 0; i--) { if (ts_tree_cursor_is_entry_visible(self, i)) { self->stack.size = i + 1; return true; } } return false; } void ts_tree_cursor_goto_descendant( TSTreeCursor *_self, uint32_t goal_descendant_index ) { TreeCursor *self = (TreeCursor *)_self; // Ascend to the lowest ancestor that contains the goal node. for (;;) { uint32_t i = self->stack.size - 1; TreeCursorEntry *entry = array_get(&self->stack, i); uint32_t next_descendant_index = entry->descendant_index + (ts_tree_cursor_is_entry_visible(self, i) ? 1 : 0) + ts_subtree_visible_descendant_count(*entry->subtree); if ( (entry->descendant_index <= goal_descendant_index) && (next_descendant_index > goal_descendant_index) ) { break; } else if (self->stack.size <= 1) { return; } else { self->stack.size--; } } // Descend to the goal node. bool did_descend = true; do { did_descend = false; bool visible; TreeCursorEntry entry; CursorChildIterator iterator = ts_tree_cursor_iterate_children(self); if (iterator.descendant_index > goal_descendant_index) { return; } while (ts_tree_cursor_child_iterator_next(&iterator, &entry, &visible)) { if (iterator.descendant_index > goal_descendant_index) { array_push(&self->stack, entry); if (visible && entry.descendant_index == goal_descendant_index) { return; } else { did_descend = true; break; } } } } while (did_descend); } uint32_t ts_tree_cursor_current_descendant_index(const TSTreeCursor *_self) { const TreeCursor *self = (const TreeCursor *)_self; TreeCursorEntry *last_entry = array_back(&self->stack); return last_entry->descendant_index; } TSNode ts_tree_cursor_current_node(const TSTreeCursor *_self) { const TreeCursor *self = (const TreeCursor *)_self; TreeCursorEntry *last_entry = array_back(&self->stack); bool is_extra = ts_subtree_extra(*last_entry->subtree); TSSymbol alias_symbol = is_extra ? 0 : self->root_alias_symbol; if (self->stack.size > 1 && !is_extra) { TreeCursorEntry *parent_entry = array_get(&self->stack, self->stack.size - 2); alias_symbol = ts_language_alias_at( self->tree->language, parent_entry->subtree->ptr->production_id, last_entry->structural_child_index ); } return ts_node_new( self->tree, last_entry->subtree, last_entry->position, alias_symbol ); } // Private - Get various facts about the current node that are needed // when executing tree queries. void ts_tree_cursor_current_status( const TSTreeCursor *_self, TSFieldId *field_id, bool *has_later_siblings, bool *has_later_named_siblings, bool *can_have_later_siblings_with_this_field, TSSymbol *supertypes, unsigned *supertype_count ) { const TreeCursor *self = (const TreeCursor *)_self; unsigned max_supertypes = *supertype_count; *field_id = 0; *supertype_count = 0; *has_later_siblings = false; *has_later_named_siblings = false; *can_have_later_siblings_with_this_field = false; // Walk up the tree, visiting the current node and its invisible ancestors, // because fields can refer to nodes through invisible *wrapper* nodes, for (unsigned i = self->stack.size - 1; i > 0; i--) { TreeCursorEntry *entry = array_get(&self->stack, i); TreeCursorEntry *parent_entry = array_get(&self->stack, i - 1); const TSSymbol *alias_sequence = ts_language_alias_sequence( self->tree->language, parent_entry->subtree->ptr->production_id ); #define subtree_symbol(subtree, structural_child_index) \ (( \ !ts_subtree_extra(subtree) && \ alias_sequence && \ alias_sequence[structural_child_index] \ ) ? \ alias_sequence[structural_child_index] : \ ts_subtree_symbol(subtree)) // Stop walking up when a visible ancestor is found. TSSymbol entry_symbol = subtree_symbol( *entry->subtree, entry->structural_child_index ); TSSymbolMetadata entry_metadata = ts_language_symbol_metadata( self->tree->language, entry_symbol ); if (i != self->stack.size - 1 && entry_metadata.visible) break; // Record any supertypes if (entry_metadata.supertype && *supertype_count < max_supertypes) { supertypes[*supertype_count] = entry_symbol; (*supertype_count)++; } // Determine if the current node has later siblings. if (!*has_later_siblings) { unsigned sibling_count = parent_entry->subtree->ptr->child_count; unsigned structural_child_index = entry->structural_child_index; if (!ts_subtree_extra(*entry->subtree)) structural_child_index++; for (unsigned j = entry->child_index + 1; j < sibling_count; j++) { Subtree sibling = ts_subtree_children(*parent_entry->subtree)[j]; TSSymbolMetadata sibling_metadata = ts_language_symbol_metadata( self->tree->language, subtree_symbol(sibling, structural_child_index) ); if (sibling_metadata.visible) { *has_later_siblings = true; if (*has_later_named_siblings) break; if (sibling_metadata.named) { *has_later_named_siblings = true; break; } } else if (ts_subtree_visible_child_count(sibling) > 0) { *has_later_siblings = true; if (*has_later_named_siblings) break; if (sibling.ptr->named_child_count > 0) { *has_later_named_siblings = true; break; } } if (!ts_subtree_extra(sibling)) structural_child_index++; } } #undef subtree_symbol if (!ts_subtree_extra(*entry->subtree)) { const TSFieldMapEntry *field_map, *field_map_end; ts_language_field_map( self->tree->language, parent_entry->subtree->ptr->production_id, &field_map, &field_map_end ); // Look for a field name associated with the current node. if (!*field_id) { for (const TSFieldMapEntry *map = field_map; map < field_map_end; map++) { if (!map->inherited && map->child_index == entry->structural_child_index) { *field_id = map->field_id; break; } } } // Determine if the current node can have later siblings with the same field name. if (*field_id) { for (const TSFieldMapEntry *map = field_map; map < field_map_end; map++) { if ( map->field_id == *field_id && map->child_index > entry->structural_child_index ) { *can_have_later_siblings_with_this_field = true; break; } } } } } } uint32_t ts_tree_cursor_current_depth(const TSTreeCursor *_self) { const TreeCursor *self = (const TreeCursor *)_self; uint32_t depth = 0; for (unsigned i = 1; i < self->stack.size; i++) { if (ts_tree_cursor_is_entry_visible(self, i)) { depth++; } } return depth; } TSNode ts_tree_cursor_parent_node(const TSTreeCursor *_self) { const TreeCursor *self = (const TreeCursor *)_self; for (int i = (int)self->stack.size - 2; i >= 0; i--) { TreeCursorEntry *entry = array_get(&self->stack, i); bool is_visible = true; TSSymbol alias_symbol = 0; if (i > 0) { TreeCursorEntry *parent_entry = array_get(&self->stack, i - 1); alias_symbol = ts_language_alias_at( self->tree->language, parent_entry->subtree->ptr->production_id, entry->structural_child_index ); is_visible = (alias_symbol != 0) || ts_subtree_visible(*entry->subtree); } if (is_visible) { return ts_node_new( self->tree, entry->subtree, entry->position, alias_symbol ); } } return ts_node_new(NULL, NULL, length_zero(), 0); } TSFieldId ts_tree_cursor_current_field_id(const TSTreeCursor *_self) { const TreeCursor *self = (const TreeCursor *)_self; // Walk up the tree, visiting the current node and its invisible ancestors. for (unsigned i = self->stack.size - 1; i > 0; i--) { TreeCursorEntry *entry = array_get(&self->stack, i); TreeCursorEntry *parent_entry = array_get(&self->stack, i - 1); // Stop walking up when another visible node is found. if ( i != self->stack.size - 1 && ts_tree_cursor_is_entry_visible(self, i) ) break; if (ts_subtree_extra(*entry->subtree)) break; const TSFieldMapEntry *field_map, *field_map_end; ts_language_field_map( self->tree->language, parent_entry->subtree->ptr->production_id, &field_map, &field_map_end ); for (const TSFieldMapEntry *map = field_map; map < field_map_end; map++) { if (!map->inherited && map->child_index == entry->structural_child_index) { return map->field_id; } } } return 0; } const char *ts_tree_cursor_current_field_name(const TSTreeCursor *_self) { TSFieldId id = ts_tree_cursor_current_field_id(_self); if (id) { const TreeCursor *self = (const TreeCursor *)_self; return self->tree->language->field_names[id]; } else { return NULL; } } TSTreeCursor ts_tree_cursor_copy(const TSTreeCursor *_cursor) { const TreeCursor *cursor = (const TreeCursor *)_cursor; TSTreeCursor res = {NULL, NULL, {0, 0}}; TreeCursor *copy = (TreeCursor *)&res; copy->tree = cursor->tree; copy->root_alias_symbol = cursor->root_alias_symbol; array_init(©->stack); array_push_all(©->stack, &cursor->stack); return res; } void ts_tree_cursor_reset_to(TSTreeCursor *_dst, const TSTreeCursor *_src) { const TreeCursor *cursor = (const TreeCursor *)_src; TreeCursor *copy = (TreeCursor *)_dst; copy->tree = cursor->tree; copy->root_alias_symbol = cursor->root_alias_symbol; array_clear(©->stack); array_push_all(©->stack, &cursor->stack); } hx-0.3.0+20250717/bindings/vendor/src/tree_cursor.h000066400000000000000000000024051503625671400214340ustar00rootroot00000000000000#ifndef TREE_SITTER_TREE_CURSOR_H_ #define TREE_SITTER_TREE_CURSOR_H_ #include "./subtree.h" typedef struct { const Subtree *subtree; Length position; uint32_t child_index; uint32_t structural_child_index; uint32_t descendant_index; } TreeCursorEntry; typedef struct { const TSTree *tree; Array(TreeCursorEntry) stack; TSSymbol root_alias_symbol; } TreeCursor; typedef enum { TreeCursorStepNone, TreeCursorStepHidden, TreeCursorStepVisible, } TreeCursorStep; void ts_tree_cursor_init(TreeCursor *self, TSNode node); void ts_tree_cursor_current_status( const TSTreeCursor *_self, TSFieldId *field_id, bool *has_later_siblings, bool *has_later_named_siblings, bool *can_have_later_siblings_with_this_field, TSSymbol *supertypes, unsigned *supertype_count ); TreeCursorStep ts_tree_cursor_goto_first_child_internal(TSTreeCursor *_self); TreeCursorStep ts_tree_cursor_goto_next_sibling_internal(TSTreeCursor *_self); static inline Subtree ts_tree_cursor_current_subtree(const TSTreeCursor *_self) { const TreeCursor *self = (const TreeCursor *)_self; TreeCursorEntry *last_entry = array_back(&self->stack); return *last_entry->subtree; } TSNode ts_tree_cursor_parent_node(const TSTreeCursor *_self); #endif // TREE_SITTER_TREE_CURSOR_H_ hx-0.3.0+20250717/bindings/vendor/src/ts_assert.h000066400000000000000000000003151503625671400211050ustar00rootroot00000000000000#ifndef TREE_SITTER_ASSERT_H_ #define TREE_SITTER_ASSERT_H_ #ifdef NDEBUG #define ts_assert(e) ((void)(e)) #else #include #define ts_assert(e) assert(e) #endif #endif // TREE_SITTER_ASSERT_H_ hx-0.3.0+20250717/bindings/vendor/src/unicode.h000066400000000000000000000031421503625671400205250ustar00rootroot00000000000000#ifndef TREE_SITTER_UNICODE_H_ #define TREE_SITTER_UNICODE_H_ #ifdef __cplusplus extern "C" { #endif #include #include #define U_EXPORT #define U_EXPORT2 #include "unicode/utf8.h" #include "unicode/utf16.h" #include "portable/endian.h" #define U16_NEXT_LE(s, i, length, c) UPRV_BLOCK_MACRO_BEGIN { \ (c)=le16toh((s)[(i)++]); \ if(U16_IS_LEAD(c)) { \ uint16_t __c2; \ if((i)!=(length) && U16_IS_TRAIL(__c2=(s)[(i)])) { \ ++(i); \ (c)=U16_GET_SUPPLEMENTARY((c), __c2); \ } \ } \ } UPRV_BLOCK_MACRO_END #define U16_NEXT_BE(s, i, length, c) UPRV_BLOCK_MACRO_BEGIN { \ (c)=be16toh((s)[(i)++]); \ if(U16_IS_LEAD(c)) { \ uint16_t __c2; \ if((i)!=(length) && U16_IS_TRAIL(__c2=(s)[(i)])) { \ ++(i); \ (c)=U16_GET_SUPPLEMENTARY((c), __c2); \ } \ } \ } UPRV_BLOCK_MACRO_END static const int32_t TS_DECODE_ERROR = U_SENTINEL; static inline uint32_t ts_decode_utf8( const uint8_t *string, uint32_t length, int32_t *code_point ) { uint32_t i = 0; U8_NEXT(string, i, length, *code_point); return i; } static inline uint32_t ts_decode_utf16_le( const uint8_t *string, uint32_t length, int32_t *code_point ) { uint32_t i = 0; U16_NEXT_LE(((uint16_t *)string), i, length, *code_point); return i * 2; } static inline uint32_t ts_decode_utf16_be( const uint8_t *string, uint32_t length, int32_t *code_point ) { uint32_t i = 0; U16_NEXT_BE(((uint16_t *)string), i, length, *code_point); return i * 2; } #ifdef __cplusplus } #endif #endif // TREE_SITTER_UNICODE_H_ hx-0.3.0+20250717/bindings/vendor/src/unicode/000077500000000000000000000000001503625671400203545ustar00rootroot00000000000000hx-0.3.0+20250717/bindings/vendor/src/unicode/ICU_SHA000066400000000000000000000000511503625671400214060ustar00rootroot00000000000000552b01f61127d30d6589aa4bf99468224979b661 hx-0.3.0+20250717/bindings/vendor/src/unicode/LICENSE000066400000000000000000000510111503625671400213570ustar00rootroot00000000000000COPYRIGHT AND PERMISSION NOTICE (ICU 58 and later) Copyright © 1991-2019 Unicode, Inc. All rights reserved. Distributed under the Terms of Use in https://www.unicode.org/copyright.html. Permission is hereby granted, free of charge, to any person obtaining a copy of the Unicode data files and any associated documentation (the "Data Files") or Unicode software and any associated documentation (the "Software") to deal in the Data Files or Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, and/or sell copies of the Data Files or Software, and to permit persons to whom the Data Files or Software are furnished to do so, provided that either (a) this copyright and permission notice appear with all copies of the Data Files or Software, or (b) this copyright and permission notice appear in associated Documentation. THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THE DATA FILES OR SOFTWARE. Except as contained in this notice, the name of a copyright holder shall not be used in advertising or otherwise to promote the sale, use or other dealings in these Data Files or Software without prior written authorization of the copyright holder. --------------------- Third-Party Software Licenses This section contains third-party software notices and/or additional terms for licensed third-party software components included within ICU libraries. 1. ICU License - ICU 1.8.1 to ICU 57.1 COPYRIGHT AND PERMISSION NOTICE Copyright (c) 1995-2016 International Business Machines Corporation and others All rights reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, provided that the above copyright notice(s) and this permission notice appear in all copies of the Software and that both the above copyright notice(s) and this permission notice appear in supporting documentation. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. Except as contained in this notice, the name of a copyright holder shall not be used in advertising or otherwise to promote the sale, use or other dealings in this Software without prior written authorization of the copyright holder. All trademarks and registered trademarks mentioned herein are the property of their respective owners. 2. Chinese/Japanese Word Break Dictionary Data (cjdict.txt) # The Google Chrome software developed by Google is licensed under # the BSD license. Other software included in this distribution is # provided under other licenses, as set forth below. # # The BSD License # http://opensource.org/licenses/bsd-license.php # Copyright (C) 2006-2008, Google Inc. # # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided with # the distribution. # Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND # CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR # BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # # The word list in cjdict.txt are generated by combining three word lists # listed below with further processing for compound word breaking. The # frequency is generated with an iterative training against Google web # corpora. # # * Libtabe (Chinese) # - https://sourceforge.net/project/?group_id=1519 # - Its license terms and conditions are shown below. # # * IPADIC (Japanese) # - http://chasen.aist-nara.ac.jp/chasen/distribution.html # - Its license terms and conditions are shown below. # # ---------COPYING.libtabe ---- BEGIN-------------------- # # /* # * Copyright (c) 1999 TaBE Project. # * Copyright (c) 1999 Pai-Hsiang Hsiao. # * All rights reserved. # * # * Redistribution and use in source and binary forms, with or without # * modification, are permitted provided that the following conditions # * are met: # * # * . Redistributions of source code must retain the above copyright # * notice, this list of conditions and the following disclaimer. # * . Redistributions in binary form must reproduce the above copyright # * notice, this list of conditions and the following disclaimer in # * the documentation and/or other materials provided with the # * distribution. # * . Neither the name of the TaBE Project nor the names of its # * contributors may be used to endorse or promote products derived # * from this software without specific prior written permission. # * # * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) # * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, # * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED # * OF THE POSSIBILITY OF SUCH DAMAGE. # */ # # /* # * Copyright (c) 1999 Computer Systems and Communication Lab, # * Institute of Information Science, Academia # * Sinica. All rights reserved. # * # * Redistribution and use in source and binary forms, with or without # * modification, are permitted provided that the following conditions # * are met: # * # * . Redistributions of source code must retain the above copyright # * notice, this list of conditions and the following disclaimer. # * . Redistributions in binary form must reproduce the above copyright # * notice, this list of conditions and the following disclaimer in # * the documentation and/or other materials provided with the # * distribution. # * . Neither the name of the Computer Systems and Communication Lab # * nor the names of its contributors may be used to endorse or # * promote products derived from this software without specific # * prior written permission. # * # * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # * REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) # * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, # * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED # * OF THE POSSIBILITY OF SUCH DAMAGE. # */ # # Copyright 1996 Chih-Hao Tsai @ Beckman Institute, # University of Illinois # c-tsai4@uiuc.edu http://casper.beckman.uiuc.edu/~c-tsai4 # # ---------------COPYING.libtabe-----END-------------------------------- # # # ---------------COPYING.ipadic-----BEGIN------------------------------- # # Copyright 2000, 2001, 2002, 2003 Nara Institute of Science # and Technology. All Rights Reserved. # # Use, reproduction, and distribution of this software is permitted. # Any copy of this software, whether in its original form or modified, # must include both the above copyright notice and the following # paragraphs. # # Nara Institute of Science and Technology (NAIST), # the copyright holders, disclaims all warranties with regard to this # software, including all implied warranties of merchantability and # fitness, in no event shall NAIST be liable for # any special, indirect or consequential damages or any damages # whatsoever resulting from loss of use, data or profits, whether in an # action of contract, negligence or other tortuous action, arising out # of or in connection with the use or performance of this software. # # A large portion of the dictionary entries # originate from ICOT Free Software. The following conditions for ICOT # Free Software applies to the current dictionary as well. # # Each User may also freely distribute the Program, whether in its # original form or modified, to any third party or parties, PROVIDED # that the provisions of Section 3 ("NO WARRANTY") will ALWAYS appear # on, or be attached to, the Program, which is distributed substantially # in the same form as set out herein and that such intended # distribution, if actually made, will neither violate or otherwise # contravene any of the laws and regulations of the countries having # jurisdiction over the User or the intended distribution itself. # # NO WARRANTY # # The program was produced on an experimental basis in the course of the # research and development conducted during the project and is provided # to users as so produced on an experimental basis. Accordingly, the # program is provided without any warranty whatsoever, whether express, # implied, statutory or otherwise. The term "warranty" used herein # includes, but is not limited to, any warranty of the quality, # performance, merchantability and fitness for a particular purpose of # the program and the nonexistence of any infringement or violation of # any right of any third party. # # Each user of the program will agree and understand, and be deemed to # have agreed and understood, that there is no warranty whatsoever for # the program and, accordingly, the entire risk arising from or # otherwise connected with the program is assumed by the user. # # Therefore, neither ICOT, the copyright holder, or any other # organization that participated in or was otherwise related to the # development of the program and their respective officials, directors, # officers and other employees shall be held liable for any and all # damages, including, without limitation, general, special, incidental # and consequential damages, arising out of or otherwise in connection # with the use or inability to use the program or any product, material # or result produced or otherwise obtained by using the program, # regardless of whether they have been advised of, or otherwise had # knowledge of, the possibility of such damages at any time during the # project or thereafter. Each user will be deemed to have agreed to the # foregoing by his or her commencement of use of the program. The term # "use" as used herein includes, but is not limited to, the use, # modification, copying and distribution of the program and the # production of secondary products from the program. # # In the case where the program, whether in its original form or # modified, was distributed or delivered to or received by a user from # any person, organization or entity other than ICOT, unless it makes or # grants independently of ICOT any specific warranty to the user in # writing, such person, organization or entity, will also be exempted # from and not be held liable to the user for any such damages as noted # above as far as the program is concerned. # # ---------------COPYING.ipadic-----END---------------------------------- 3. Lao Word Break Dictionary Data (laodict.txt) # Copyright (c) 2013 International Business Machines Corporation # and others. All Rights Reserved. # # Project: http://code.google.com/p/lao-dictionary/ # Dictionary: http://lao-dictionary.googlecode.com/git/Lao-Dictionary.txt # License: http://lao-dictionary.googlecode.com/git/Lao-Dictionary-LICENSE.txt # (copied below) # # This file is derived from the above dictionary, with slight # modifications. # ---------------------------------------------------------------------- # Copyright (C) 2013 Brian Eugene Wilson, Robert Martin Campbell. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, # are permitted provided that the following conditions are met: # # # Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. Redistributions in # binary form must reproduce the above copyright notice, this list of # conditions and the following disclaimer in the documentation and/or # other materials provided with the distribution. # # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, # INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED # OF THE POSSIBILITY OF SUCH DAMAGE. # -------------------------------------------------------------------------- 4. Burmese Word Break Dictionary Data (burmesedict.txt) # Copyright (c) 2014 International Business Machines Corporation # and others. All Rights Reserved. # # This list is part of a project hosted at: # github.com/kanyawtech/myanmar-karen-word-lists # # -------------------------------------------------------------------------- # Copyright (c) 2013, LeRoy Benjamin Sharon # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: Redistributions of source code must retain the above # copyright notice, this list of conditions and the following # disclaimer. Redistributions in binary form must reproduce the # above copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # # Neither the name Myanmar Karen Word Lists, nor the names of its # contributors may be used to endorse or promote products derived # from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND # CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS # BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED # TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON # ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR # TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF # THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF # SUCH DAMAGE. # -------------------------------------------------------------------------- 5. Time Zone Database ICU uses the public domain data and code derived from Time Zone Database for its time zone support. The ownership of the TZ database is explained in BCP 175: Procedure for Maintaining the Time Zone Database section 7. # 7. Database Ownership # # The TZ database itself is not an IETF Contribution or an IETF # document. Rather it is a pre-existing and regularly updated work # that is in the public domain, and is intended to remain in the # public domain. Therefore, BCPs 78 [RFC5378] and 79 [RFC3979] do # not apply to the TZ Database or contributions that individuals make # to it. Should any claims be made and substantiated against the TZ # Database, the organization that is providing the IANA # Considerations defined in this RFC, under the memorandum of # understanding with the IETF, currently ICANN, may act in accordance # with all competent court orders. No ownership claims will be made # by ICANN or the IETF Trust on the database or the code. Any person # making a contribution to the database or code waives all rights to # future claims in that contribution or in the TZ Database. 6. Google double-conversion Copyright 2006-2011, the V8 project authors. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. hx-0.3.0+20250717/bindings/vendor/src/unicode/README.md000066400000000000000000000023661503625671400216420ustar00rootroot00000000000000# ICU Parts This directory contains a small subset of files from the Unicode organization's [ICU repository](https://github.com/unicode-org/icu). ### License The license for these files is contained in the `LICENSE` file within this directory. ### Contents * Source files taken from the [`icu4c/source/common/unicode`](https://github.com/unicode-org/icu/tree/552b01f61127d30d6589aa4bf99468224979b661/icu4c/source/common/unicode) directory: * `utf8.h` * `utf16.h` * `umachine.h` * Empty source files that are referenced by the above source files, but whose original contents in `libicu` are not needed: * `ptypes.h` * `urename.h` * `utf.h` * `ICU_SHA` - File containing the Git SHA of the commit in the `icu` repository from which the files were obtained. * `LICENSE` - The license file from the [`icu4c`](https://github.com/unicode-org/icu/tree/552b01f61127d30d6589aa4bf99468224979b661/icu4c) directory of the `icu` repository. * `README.md` - This text file. ### Updating ICU To incorporate changes from the upstream `icu` repository: * Update `ICU_SHA` with the new Git SHA. * Update `LICENSE` with the license text from the directory mentioned above. * Update `utf8.h`, `utf16.h`, and `umachine.h` with their new contents in the `icu` repository. hx-0.3.0+20250717/bindings/vendor/src/unicode/ptypes.h000066400000000000000000000001101503625671400220410ustar00rootroot00000000000000// This file must exist in order for `utf8.h` and `utf16.h` to be used. hx-0.3.0+20250717/bindings/vendor/src/unicode/umachine.h000066400000000000000000000350211503625671400223170ustar00rootroot00000000000000// © 2016 and later: Unicode, Inc. and others. // License & terms of use: http://www.unicode.org/copyright.html /* ****************************************************************************** * * Copyright (C) 1999-2015, International Business Machines * Corporation and others. All Rights Reserved. * ****************************************************************************** * file name: umachine.h * encoding: UTF-8 * tab size: 8 (not used) * indentation:4 * * created on: 1999sep13 * created by: Markus W. Scherer * * This file defines basic types and constants for ICU to be * platform-independent. umachine.h and utf.h are included into * utypes.h to provide all the general definitions for ICU. * All of these definitions used to be in utypes.h before * the UTF-handling macros made this unmaintainable. */ #ifndef __UMACHINE_H__ #define __UMACHINE_H__ /** * \file * \brief Basic types and constants for UTF * *

Basic types and constants for UTF

* This file defines basic types and constants for utf.h to be * platform-independent. umachine.h and utf.h are included into * utypes.h to provide all the general definitions for ICU. * All of these definitions used to be in utypes.h before * the UTF-handling macros made this unmaintainable. * */ /*==========================================================================*/ /* Include platform-dependent definitions */ /* which are contained in the platform-specific file platform.h */ /*==========================================================================*/ #include "unicode/ptypes.h" /* platform.h is included in ptypes.h */ /* * ANSI C headers: * stddef.h defines wchar_t */ #include /*==========================================================================*/ /* For C wrappers, we use the symbol U_STABLE. */ /* This works properly if the includer is C or C++. */ /* Functions are declared U_STABLE return-type U_EXPORT2 function-name()... */ /*==========================================================================*/ /** * \def U_CFUNC * This is used in a declaration of a library private ICU C function. * @stable ICU 2.4 */ /** * \def U_CDECL_BEGIN * This is used to begin a declaration of a library private ICU C API. * @stable ICU 2.4 */ /** * \def U_CDECL_END * This is used to end a declaration of a library private ICU C API * @stable ICU 2.4 */ #ifdef __cplusplus # define U_CFUNC extern "C" # define U_CDECL_BEGIN extern "C" { # define U_CDECL_END } #else # define U_CFUNC extern # define U_CDECL_BEGIN # define U_CDECL_END #endif #ifndef U_ATTRIBUTE_DEPRECATED /** * \def U_ATTRIBUTE_DEPRECATED * This is used for GCC specific attributes * @internal */ #if U_GCC_MAJOR_MINOR >= 302 # define U_ATTRIBUTE_DEPRECATED __attribute__ ((deprecated)) /** * \def U_ATTRIBUTE_DEPRECATED * This is used for Visual C++ specific attributes * @internal */ #elif defined(_MSC_VER) && (_MSC_VER >= 1400) # define U_ATTRIBUTE_DEPRECATED __declspec(deprecated) #else # define U_ATTRIBUTE_DEPRECATED #endif #endif /** This is used to declare a function as a public ICU C API @stable ICU 2.0*/ #define U_CAPI U_CFUNC U_EXPORT /** This is used to declare a function as a stable public ICU C API*/ #define U_STABLE U_CAPI /** This is used to declare a function as a draft public ICU C API */ #define U_DRAFT U_CAPI /** This is used to declare a function as a deprecated public ICU C API */ #define U_DEPRECATED U_CAPI U_ATTRIBUTE_DEPRECATED /** This is used to declare a function as an obsolete public ICU C API */ #define U_OBSOLETE U_CAPI /** This is used to declare a function as an internal ICU C API */ #define U_INTERNAL U_CAPI /** * \def U_OVERRIDE * Defined to the C++11 "override" keyword if available. * Denotes a class or member which is an override of the base class. * May result in an error if it applied to something not an override. * @internal */ #ifndef U_OVERRIDE #define U_OVERRIDE override #endif /** * \def U_FINAL * Defined to the C++11 "final" keyword if available. * Denotes a class or member which may not be overridden in subclasses. * May result in an error if subclasses attempt to override. * @internal */ #if !defined(U_FINAL) || defined(U_IN_DOXYGEN) #define U_FINAL final #endif // Before ICU 65, function-like, multi-statement ICU macros were just defined as // series of statements wrapped in { } blocks and the caller could choose to // either treat them as if they were actual functions and end the invocation // with a trailing ; creating an empty statement after the block or else omit // this trailing ; using the knowledge that the macro would expand to { }. // // But doing so doesn't work well with macros that look like functions and // compiler warnings about empty statements (ICU-20601) and ICU 65 therefore // switches to the standard solution of wrapping such macros in do { } while. // // This will however break existing code that depends on being able to invoke // these macros without a trailing ; so to be able to remain compatible with // such code the wrapper is itself defined as macros so that it's possible to // build ICU 65 and later with the old macro behaviour, like this: // // CPPFLAGS='-DUPRV_BLOCK_MACRO_BEGIN="" -DUPRV_BLOCK_MACRO_END=""' // runConfigureICU ... /** * \def UPRV_BLOCK_MACRO_BEGIN * Defined as the "do" keyword by default. * @internal */ #ifndef UPRV_BLOCK_MACRO_BEGIN #define UPRV_BLOCK_MACRO_BEGIN do #endif /** * \def UPRV_BLOCK_MACRO_END * Defined as "while (FALSE)" by default. * @internal */ #ifndef UPRV_BLOCK_MACRO_END #define UPRV_BLOCK_MACRO_END while (FALSE) #endif /*==========================================================================*/ /* limits for int32_t etc., like in POSIX inttypes.h */ /*==========================================================================*/ #ifndef INT8_MIN /** The smallest value an 8 bit signed integer can hold @stable ICU 2.0 */ # define INT8_MIN ((int8_t)(-128)) #endif #ifndef INT16_MIN /** The smallest value a 16 bit signed integer can hold @stable ICU 2.0 */ # define INT16_MIN ((int16_t)(-32767-1)) #endif #ifndef INT32_MIN /** The smallest value a 32 bit signed integer can hold @stable ICU 2.0 */ # define INT32_MIN ((int32_t)(-2147483647-1)) #endif #ifndef INT8_MAX /** The largest value an 8 bit signed integer can hold @stable ICU 2.0 */ # define INT8_MAX ((int8_t)(127)) #endif #ifndef INT16_MAX /** The largest value a 16 bit signed integer can hold @stable ICU 2.0 */ # define INT16_MAX ((int16_t)(32767)) #endif #ifndef INT32_MAX /** The largest value a 32 bit signed integer can hold @stable ICU 2.0 */ # define INT32_MAX ((int32_t)(2147483647)) #endif #ifndef UINT8_MAX /** The largest value an 8 bit unsigned integer can hold @stable ICU 2.0 */ # define UINT8_MAX ((uint8_t)(255U)) #endif #ifndef UINT16_MAX /** The largest value a 16 bit unsigned integer can hold @stable ICU 2.0 */ # define UINT16_MAX ((uint16_t)(65535U)) #endif #ifndef UINT32_MAX /** The largest value a 32 bit unsigned integer can hold @stable ICU 2.0 */ # define UINT32_MAX ((uint32_t)(4294967295U)) #endif #if defined(U_INT64_T_UNAVAILABLE) # error int64_t is required for decimal format and rule-based number format. #else # ifndef INT64_C /** * Provides a platform independent way to specify a signed 64-bit integer constant. * note: may be wrong for some 64 bit platforms - ensure your compiler provides INT64_C * @stable ICU 2.8 */ # define INT64_C(c) c ## LL # endif # ifndef UINT64_C /** * Provides a platform independent way to specify an unsigned 64-bit integer constant. * note: may be wrong for some 64 bit platforms - ensure your compiler provides UINT64_C * @stable ICU 2.8 */ # define UINT64_C(c) c ## ULL # endif # ifndef U_INT64_MIN /** The smallest value a 64 bit signed integer can hold @stable ICU 2.8 */ # define U_INT64_MIN ((int64_t)(INT64_C(-9223372036854775807)-1)) # endif # ifndef U_INT64_MAX /** The largest value a 64 bit signed integer can hold @stable ICU 2.8 */ # define U_INT64_MAX ((int64_t)(INT64_C(9223372036854775807))) # endif # ifndef U_UINT64_MAX /** The largest value a 64 bit unsigned integer can hold @stable ICU 2.8 */ # define U_UINT64_MAX ((uint64_t)(UINT64_C(18446744073709551615))) # endif #endif /*==========================================================================*/ /* Boolean data type */ /*==========================================================================*/ /** The ICU boolean type @stable ICU 2.0 */ typedef int8_t UBool; #ifndef TRUE /** The TRUE value of a UBool @stable ICU 2.0 */ # define TRUE 1 #endif #ifndef FALSE /** The FALSE value of a UBool @stable ICU 2.0 */ # define FALSE 0 #endif /*==========================================================================*/ /* Unicode data types */ /*==========================================================================*/ /* wchar_t-related definitions -------------------------------------------- */ /* * \def U_WCHAR_IS_UTF16 * Defined if wchar_t uses UTF-16. * * @stable ICU 2.0 */ /* * \def U_WCHAR_IS_UTF32 * Defined if wchar_t uses UTF-32. * * @stable ICU 2.0 */ #if !defined(U_WCHAR_IS_UTF16) && !defined(U_WCHAR_IS_UTF32) # ifdef __STDC_ISO_10646__ # if (U_SIZEOF_WCHAR_T==2) # define U_WCHAR_IS_UTF16 # elif (U_SIZEOF_WCHAR_T==4) # define U_WCHAR_IS_UTF32 # endif # elif defined __UCS2__ # if (U_PF_OS390 <= U_PLATFORM && U_PLATFORM <= U_PF_OS400) && (U_SIZEOF_WCHAR_T==2) # define U_WCHAR_IS_UTF16 # endif # elif defined(__UCS4__) || (U_PLATFORM == U_PF_OS400 && defined(__UTF32__)) # if (U_SIZEOF_WCHAR_T==4) # define U_WCHAR_IS_UTF32 # endif # elif U_PLATFORM_IS_DARWIN_BASED || (U_SIZEOF_WCHAR_T==4 && U_PLATFORM_IS_LINUX_BASED) # define U_WCHAR_IS_UTF32 # elif U_PLATFORM_HAS_WIN32_API # define U_WCHAR_IS_UTF16 # endif #endif /* UChar and UChar32 definitions -------------------------------------------- */ /** Number of bytes in a UChar. @stable ICU 2.0 */ #define U_SIZEOF_UCHAR 2 /** * \def U_CHAR16_IS_TYPEDEF * If 1, then char16_t is a typedef and not a real type (yet) * @internal */ #if (U_PLATFORM == U_PF_AIX) && defined(__cplusplus) &&(U_CPLUSPLUS_VERSION < 11) // for AIX, uchar.h needs to be included # include # define U_CHAR16_IS_TYPEDEF 1 #elif defined(_MSC_VER) && (_MSC_VER < 1900) // Versions of Visual Studio/MSVC below 2015 do not support char16_t as a real type, // and instead use a typedef. https://msdn.microsoft.com/library/bb531344.aspx # define U_CHAR16_IS_TYPEDEF 1 #else # define U_CHAR16_IS_TYPEDEF 0 #endif /** * \var UChar * * The base type for UTF-16 code units and pointers. * Unsigned 16-bit integer. * Starting with ICU 59, C++ API uses char16_t directly, while C API continues to use UChar. * * UChar is configurable by defining the macro UCHAR_TYPE * on the preprocessor or compiler command line: * -DUCHAR_TYPE=uint16_t or -DUCHAR_TYPE=wchar_t (if U_SIZEOF_WCHAR_T==2) etc. * (The UCHAR_TYPE can also be \#defined earlier in this file, for outside the ICU library code.) * This is for transitional use from application code that uses uint16_t or wchar_t for UTF-16. * * The default is UChar=char16_t. * * C++11 defines char16_t as bit-compatible with uint16_t, but as a distinct type. * * In C, char16_t is a simple typedef of uint_least16_t. * ICU requires uint_least16_t=uint16_t for data memory mapping. * On macOS, char16_t is not available because the uchar.h standard header is missing. * * @stable ICU 4.4 */ #if 1 // #if 1 is normal. UChar defaults to char16_t in C++. // For configuration testing of UChar=uint16_t temporarily change this to #if 0. // The intltest Makefile #defines UCHAR_TYPE=char16_t, // so we only #define it to uint16_t if it is undefined so far. #elif !defined(UCHAR_TYPE) # define UCHAR_TYPE uint16_t #endif #if defined(U_COMBINED_IMPLEMENTATION) || defined(U_COMMON_IMPLEMENTATION) || \ defined(U_I18N_IMPLEMENTATION) || defined(U_IO_IMPLEMENTATION) // Inside the ICU library code, never configurable. typedef char16_t UChar; #elif defined(UCHAR_TYPE) typedef UCHAR_TYPE UChar; #elif defined(__cplusplus) typedef char16_t UChar; #else typedef uint16_t UChar; #endif /** * \var OldUChar * Default ICU 58 definition of UChar. * A base type for UTF-16 code units and pointers. * Unsigned 16-bit integer. * * Define OldUChar to be wchar_t if that is 16 bits wide. * If wchar_t is not 16 bits wide, then define UChar to be uint16_t. * * This makes the definition of OldUChar platform-dependent * but allows direct string type compatibility with platforms with * 16-bit wchar_t types. * * This is how UChar was defined in ICU 58, for transition convenience. * Exception: ICU 58 UChar was defined to UCHAR_TYPE if that macro was defined. * The current UChar responds to UCHAR_TYPE but OldUChar does not. * * @stable ICU 59 */ #if U_SIZEOF_WCHAR_T==2 typedef wchar_t OldUChar; #elif defined(__CHAR16_TYPE__) typedef __CHAR16_TYPE__ OldUChar; #else typedef uint16_t OldUChar; #endif /** * Define UChar32 as a type for single Unicode code points. * UChar32 is a signed 32-bit integer (same as int32_t). * * The Unicode code point range is 0..0x10ffff. * All other values (negative or >=0x110000) are illegal as Unicode code points. * They may be used as sentinel values to indicate "done", "error" * or similar non-code point conditions. * * Before ICU 2.4 (Jitterbug 2146), UChar32 was defined * to be wchar_t if that is 32 bits wide (wchar_t may be signed or unsigned) * or else to be uint32_t. * That is, the definition of UChar32 was platform-dependent. * * @see U_SENTINEL * @stable ICU 2.4 */ typedef int32_t UChar32; /** * This value is intended for sentinel values for APIs that * (take or) return single code points (UChar32). * It is outside of the Unicode code point range 0..0x10ffff. * * For example, a "done" or "error" value in a new API * could be indicated with U_SENTINEL. * * ICU APIs designed before ICU 2.4 usually define service-specific "done" * values, mostly 0xffff. * Those may need to be distinguished from * actual U+ffff text contents by calling functions like * CharacterIterator::hasNext() or UnicodeString::length(). * * @return -1 * @see UChar32 * @stable ICU 2.4 */ #define U_SENTINEL (-1) #include "unicode/urename.h" #endif hx-0.3.0+20250717/bindings/vendor/src/unicode/urename.h000066400000000000000000000001101503625671400221510ustar00rootroot00000000000000// This file must exist in order for `utf8.h` and `utf16.h` to be used. hx-0.3.0+20250717/bindings/vendor/src/unicode/utf.h000066400000000000000000000001101503625671400213130ustar00rootroot00000000000000// This file must exist in order for `utf8.h` and `utf16.h` to be used. hx-0.3.0+20250717/bindings/vendor/src/unicode/utf16.h000066400000000000000000000565061503625671400215060ustar00rootroot00000000000000// © 2016 and later: Unicode, Inc. and others. // License & terms of use: http://www.unicode.org/copyright.html /* ******************************************************************************* * * Copyright (C) 1999-2012, International Business Machines * Corporation and others. All Rights Reserved. * ******************************************************************************* * file name: utf16.h * encoding: UTF-8 * tab size: 8 (not used) * indentation:4 * * created on: 1999sep09 * created by: Markus W. Scherer */ /** * \file * \brief C API: 16-bit Unicode handling macros * * This file defines macros to deal with 16-bit Unicode (UTF-16) code units and strings. * * For more information see utf.h and the ICU User Guide Strings chapter * (http://userguide.icu-project.org/strings). * * Usage: * ICU coding guidelines for if() statements should be followed when using these macros. * Compound statements (curly braces {}) must be used for if-else-while... * bodies and all macro statements should be terminated with semicolon. */ #ifndef __UTF16_H__ #define __UTF16_H__ #include "unicode/umachine.h" #ifndef __UTF_H__ # include "unicode/utf.h" #endif /* single-code point definitions -------------------------------------------- */ /** * Does this code unit alone encode a code point (BMP, not a surrogate)? * @param c 16-bit code unit * @return TRUE or FALSE * @stable ICU 2.4 */ #define U16_IS_SINGLE(c) !U_IS_SURROGATE(c) /** * Is this code unit a lead surrogate (U+d800..U+dbff)? * @param c 16-bit code unit * @return TRUE or FALSE * @stable ICU 2.4 */ #define U16_IS_LEAD(c) (((c)&0xfffffc00)==0xd800) /** * Is this code unit a trail surrogate (U+dc00..U+dfff)? * @param c 16-bit code unit * @return TRUE or FALSE * @stable ICU 2.4 */ #define U16_IS_TRAIL(c) (((c)&0xfffffc00)==0xdc00) /** * Is this code unit a surrogate (U+d800..U+dfff)? * @param c 16-bit code unit * @return TRUE or FALSE * @stable ICU 2.4 */ #define U16_IS_SURROGATE(c) U_IS_SURROGATE(c) /** * Assuming c is a surrogate code point (U16_IS_SURROGATE(c)), * is it a lead surrogate? * @param c 16-bit code unit * @return TRUE or FALSE * @stable ICU 2.4 */ #define U16_IS_SURROGATE_LEAD(c) (((c)&0x400)==0) /** * Assuming c is a surrogate code point (U16_IS_SURROGATE(c)), * is it a trail surrogate? * @param c 16-bit code unit * @return TRUE or FALSE * @stable ICU 4.2 */ #define U16_IS_SURROGATE_TRAIL(c) (((c)&0x400)!=0) /** * Helper constant for U16_GET_SUPPLEMENTARY. * @internal */ #define U16_SURROGATE_OFFSET ((0xd800<<10UL)+0xdc00-0x10000) /** * Get a supplementary code point value (U+10000..U+10ffff) * from its lead and trail surrogates. * The result is undefined if the input values are not * lead and trail surrogates. * * @param lead lead surrogate (U+d800..U+dbff) * @param trail trail surrogate (U+dc00..U+dfff) * @return supplementary code point (U+10000..U+10ffff) * @stable ICU 2.4 */ #define U16_GET_SUPPLEMENTARY(lead, trail) \ (((UChar32)(lead)<<10UL)+(UChar32)(trail)-U16_SURROGATE_OFFSET) /** * Get the lead surrogate (0xd800..0xdbff) for a * supplementary code point (0x10000..0x10ffff). * @param supplementary 32-bit code point (U+10000..U+10ffff) * @return lead surrogate (U+d800..U+dbff) for supplementary * @stable ICU 2.4 */ #define U16_LEAD(supplementary) (UChar)(((supplementary)>>10)+0xd7c0) /** * Get the trail surrogate (0xdc00..0xdfff) for a * supplementary code point (0x10000..0x10ffff). * @param supplementary 32-bit code point (U+10000..U+10ffff) * @return trail surrogate (U+dc00..U+dfff) for supplementary * @stable ICU 2.4 */ #define U16_TRAIL(supplementary) (UChar)(((supplementary)&0x3ff)|0xdc00) /** * How many 16-bit code units are used to encode this Unicode code point? (1 or 2) * The result is not defined if c is not a Unicode code point (U+0000..U+10ffff). * @param c 32-bit code point * @return 1 or 2 * @stable ICU 2.4 */ #define U16_LENGTH(c) ((uint32_t)(c)<=0xffff ? 1 : 2) /** * The maximum number of 16-bit code units per Unicode code point (U+0000..U+10ffff). * @return 2 * @stable ICU 2.4 */ #define U16_MAX_LENGTH 2 /** * Get a code point from a string at a random-access offset, * without changing the offset. * "Unsafe" macro, assumes well-formed UTF-16. * * The offset may point to either the lead or trail surrogate unit * for a supplementary code point, in which case the macro will read * the adjacent matching surrogate as well. * The result is undefined if the offset points to a single, unpaired surrogate. * Iteration through a string is more efficient with U16_NEXT_UNSAFE or U16_NEXT. * * @param s const UChar * string * @param i string offset * @param c output UChar32 variable * @see U16_GET * @stable ICU 2.4 */ #define U16_GET_UNSAFE(s, i, c) UPRV_BLOCK_MACRO_BEGIN { \ (c)=(s)[i]; \ if(U16_IS_SURROGATE(c)) { \ if(U16_IS_SURROGATE_LEAD(c)) { \ (c)=U16_GET_SUPPLEMENTARY((c), (s)[(i)+1]); \ } else { \ (c)=U16_GET_SUPPLEMENTARY((s)[(i)-1], (c)); \ } \ } \ } UPRV_BLOCK_MACRO_END /** * Get a code point from a string at a random-access offset, * without changing the offset. * "Safe" macro, handles unpaired surrogates and checks for string boundaries. * * The offset may point to either the lead or trail surrogate unit * for a supplementary code point, in which case the macro will read * the adjacent matching surrogate as well. * * The length can be negative for a NUL-terminated string. * * If the offset points to a single, unpaired surrogate, then * c is set to that unpaired surrogate. * Iteration through a string is more efficient with U16_NEXT_UNSAFE or U16_NEXT. * * @param s const UChar * string * @param start starting string offset (usually 0) * @param i string offset, must be start<=i(start) && U16_IS_LEAD(__c2=(s)[(i)-1])) { \ (c)=U16_GET_SUPPLEMENTARY(__c2, (c)); \ } \ } \ } \ } UPRV_BLOCK_MACRO_END /** * Get a code point from a string at a random-access offset, * without changing the offset. * "Safe" macro, handles unpaired surrogates and checks for string boundaries. * * The offset may point to either the lead or trail surrogate unit * for a supplementary code point, in which case the macro will read * the adjacent matching surrogate as well. * * The length can be negative for a NUL-terminated string. * * If the offset points to a single, unpaired surrogate, then * c is set to U+FFFD. * Iteration through a string is more efficient with U16_NEXT_UNSAFE or U16_NEXT_OR_FFFD. * * @param s const UChar * string * @param start starting string offset (usually 0) * @param i string offset, must be start<=i(start) && U16_IS_LEAD(__c2=(s)[(i)-1])) { \ (c)=U16_GET_SUPPLEMENTARY(__c2, (c)); \ } else { \ (c)=0xfffd; \ } \ } \ } \ } UPRV_BLOCK_MACRO_END /* definitions with forward iteration --------------------------------------- */ /** * Get a code point from a string at a code point boundary offset, * and advance the offset to the next code point boundary. * (Post-incrementing forward iteration.) * "Unsafe" macro, assumes well-formed UTF-16. * * The offset may point to the lead surrogate unit * for a supplementary code point, in which case the macro will read * the following trail surrogate as well. * If the offset points to a trail surrogate, then that itself * will be returned as the code point. * The result is undefined if the offset points to a single, unpaired lead surrogate. * * @param s const UChar * string * @param i string offset * @param c output UChar32 variable * @see U16_NEXT * @stable ICU 2.4 */ #define U16_NEXT_UNSAFE(s, i, c) UPRV_BLOCK_MACRO_BEGIN { \ (c)=(s)[(i)++]; \ if(U16_IS_LEAD(c)) { \ (c)=U16_GET_SUPPLEMENTARY((c), (s)[(i)++]); \ } \ } UPRV_BLOCK_MACRO_END /** * Get a code point from a string at a code point boundary offset, * and advance the offset to the next code point boundary. * (Post-incrementing forward iteration.) * "Safe" macro, handles unpaired surrogates and checks for string boundaries. * * The length can be negative for a NUL-terminated string. * * The offset may point to the lead surrogate unit * for a supplementary code point, in which case the macro will read * the following trail surrogate as well. * If the offset points to a trail surrogate or * to a single, unpaired lead surrogate, then c is set to that unpaired surrogate. * * @param s const UChar * string * @param i string offset, must be i>10)+0xd7c0); \ (s)[(i)++]=(uint16_t)(((c)&0x3ff)|0xdc00); \ } \ } UPRV_BLOCK_MACRO_END /** * Append a code point to a string, overwriting 1 or 2 code units. * The offset points to the current end of the string contents * and is advanced (post-increment). * "Safe" macro, checks for a valid code point. * If a surrogate pair is written, checks for sufficient space in the string. * If the code point is not valid or a trail surrogate does not fit, * then isError is set to TRUE. * * @param s const UChar * string buffer * @param i string offset, must be i>10)+0xd7c0); \ (s)[(i)++]=(uint16_t)(((c)&0x3ff)|0xdc00); \ } else /* c>0x10ffff or not enough space */ { \ (isError)=TRUE; \ } \ } UPRV_BLOCK_MACRO_END /** * Advance the string offset from one code point boundary to the next. * (Post-incrementing iteration.) * "Unsafe" macro, assumes well-formed UTF-16. * * @param s const UChar * string * @param i string offset * @see U16_FWD_1 * @stable ICU 2.4 */ #define U16_FWD_1_UNSAFE(s, i) UPRV_BLOCK_MACRO_BEGIN { \ if(U16_IS_LEAD((s)[(i)++])) { \ ++(i); \ } \ } UPRV_BLOCK_MACRO_END /** * Advance the string offset from one code point boundary to the next. * (Post-incrementing iteration.) * "Safe" macro, handles unpaired surrogates and checks for string boundaries. * * The length can be negative for a NUL-terminated string. * * @param s const UChar * string * @param i string offset, must be i0) { \ U16_FWD_1_UNSAFE(s, i); \ --__N; \ } \ } UPRV_BLOCK_MACRO_END /** * Advance the string offset from one code point boundary to the n-th next one, * i.e., move forward by n code points. * (Post-incrementing iteration.) * "Safe" macro, handles unpaired surrogates and checks for string boundaries. * * The length can be negative for a NUL-terminated string. * * @param s const UChar * string * @param i int32_t string offset, must be i0 && ((i)<(length) || ((length)<0 && (s)[i]!=0))) { \ U16_FWD_1(s, i, length); \ --__N; \ } \ } UPRV_BLOCK_MACRO_END /** * Adjust a random-access offset to a code point boundary * at the start of a code point. * If the offset points to the trail surrogate of a surrogate pair, * then the offset is decremented. * Otherwise, it is not modified. * "Unsafe" macro, assumes well-formed UTF-16. * * @param s const UChar * string * @param i string offset * @see U16_SET_CP_START * @stable ICU 2.4 */ #define U16_SET_CP_START_UNSAFE(s, i) UPRV_BLOCK_MACRO_BEGIN { \ if(U16_IS_TRAIL((s)[i])) { \ --(i); \ } \ } UPRV_BLOCK_MACRO_END /** * Adjust a random-access offset to a code point boundary * at the start of a code point. * If the offset points to the trail surrogate of a surrogate pair, * then the offset is decremented. * Otherwise, it is not modified. * "Safe" macro, handles unpaired surrogates and checks for string boundaries. * * @param s const UChar * string * @param start starting string offset (usually 0) * @param i string offset, must be start<=i * @see U16_SET_CP_START_UNSAFE * @stable ICU 2.4 */ #define U16_SET_CP_START(s, start, i) UPRV_BLOCK_MACRO_BEGIN { \ if(U16_IS_TRAIL((s)[i]) && (i)>(start) && U16_IS_LEAD((s)[(i)-1])) { \ --(i); \ } \ } UPRV_BLOCK_MACRO_END /* definitions with backward iteration -------------------------------------- */ /** * Move the string offset from one code point boundary to the previous one * and get the code point between them. * (Pre-decrementing backward iteration.) * "Unsafe" macro, assumes well-formed UTF-16. * * The input offset may be the same as the string length. * If the offset is behind a trail surrogate unit * for a supplementary code point, then the macro will read * the preceding lead surrogate as well. * If the offset is behind a lead surrogate, then that itself * will be returned as the code point. * The result is undefined if the offset is behind a single, unpaired trail surrogate. * * @param s const UChar * string * @param i string offset * @param c output UChar32 variable * @see U16_PREV * @stable ICU 2.4 */ #define U16_PREV_UNSAFE(s, i, c) UPRV_BLOCK_MACRO_BEGIN { \ (c)=(s)[--(i)]; \ if(U16_IS_TRAIL(c)) { \ (c)=U16_GET_SUPPLEMENTARY((s)[--(i)], (c)); \ } \ } UPRV_BLOCK_MACRO_END /** * Move the string offset from one code point boundary to the previous one * and get the code point between them. * (Pre-decrementing backward iteration.) * "Safe" macro, handles unpaired surrogates and checks for string boundaries. * * The input offset may be the same as the string length. * If the offset is behind a trail surrogate unit * for a supplementary code point, then the macro will read * the preceding lead surrogate as well. * If the offset is behind a lead surrogate or behind a single, unpaired * trail surrogate, then c is set to that unpaired surrogate. * * @param s const UChar * string * @param start starting string offset (usually 0) * @param i string offset, must be start(start) && U16_IS_LEAD(__c2=(s)[(i)-1])) { \ --(i); \ (c)=U16_GET_SUPPLEMENTARY(__c2, (c)); \ } \ } \ } UPRV_BLOCK_MACRO_END /** * Move the string offset from one code point boundary to the previous one * and get the code point between them. * (Pre-decrementing backward iteration.) * "Safe" macro, handles unpaired surrogates and checks for string boundaries. * * The input offset may be the same as the string length. * If the offset is behind a trail surrogate unit * for a supplementary code point, then the macro will read * the preceding lead surrogate as well. * If the offset is behind a lead surrogate or behind a single, unpaired * trail surrogate, then c is set to U+FFFD. * * @param s const UChar * string * @param start starting string offset (usually 0) * @param i string offset, must be start(start) && U16_IS_LEAD(__c2=(s)[(i)-1])) { \ --(i); \ (c)=U16_GET_SUPPLEMENTARY(__c2, (c)); \ } else { \ (c)=0xfffd; \ } \ } \ } UPRV_BLOCK_MACRO_END /** * Move the string offset from one code point boundary to the previous one. * (Pre-decrementing backward iteration.) * The input offset may be the same as the string length. * "Unsafe" macro, assumes well-formed UTF-16. * * @param s const UChar * string * @param i string offset * @see U16_BACK_1 * @stable ICU 2.4 */ #define U16_BACK_1_UNSAFE(s, i) UPRV_BLOCK_MACRO_BEGIN { \ if(U16_IS_TRAIL((s)[--(i)])) { \ --(i); \ } \ } UPRV_BLOCK_MACRO_END /** * Move the string offset from one code point boundary to the previous one. * (Pre-decrementing backward iteration.) * The input offset may be the same as the string length. * "Safe" macro, handles unpaired surrogates and checks for string boundaries. * * @param s const UChar * string * @param start starting string offset (usually 0) * @param i string offset, must be start(start) && U16_IS_LEAD((s)[(i)-1])) { \ --(i); \ } \ } UPRV_BLOCK_MACRO_END /** * Move the string offset from one code point boundary to the n-th one before it, * i.e., move backward by n code points. * (Pre-decrementing backward iteration.) * The input offset may be the same as the string length. * "Unsafe" macro, assumes well-formed UTF-16. * * @param s const UChar * string * @param i string offset * @param n number of code points to skip * @see U16_BACK_N * @stable ICU 2.4 */ #define U16_BACK_N_UNSAFE(s, i, n) UPRV_BLOCK_MACRO_BEGIN { \ int32_t __N=(n); \ while(__N>0) { \ U16_BACK_1_UNSAFE(s, i); \ --__N; \ } \ } UPRV_BLOCK_MACRO_END /** * Move the string offset from one code point boundary to the n-th one before it, * i.e., move backward by n code points. * (Pre-decrementing backward iteration.) * The input offset may be the same as the string length. * "Safe" macro, handles unpaired surrogates and checks for string boundaries. * * @param s const UChar * string * @param start start of string * @param i string offset, must be start0 && (i)>(start)) { \ U16_BACK_1(s, start, i); \ --__N; \ } \ } UPRV_BLOCK_MACRO_END /** * Adjust a random-access offset to a code point boundary after a code point. * If the offset is behind the lead surrogate of a surrogate pair, * then the offset is incremented. * Otherwise, it is not modified. * The input offset may be the same as the string length. * "Unsafe" macro, assumes well-formed UTF-16. * * @param s const UChar * string * @param i string offset * @see U16_SET_CP_LIMIT * @stable ICU 2.4 */ #define U16_SET_CP_LIMIT_UNSAFE(s, i) UPRV_BLOCK_MACRO_BEGIN { \ if(U16_IS_LEAD((s)[(i)-1])) { \ ++(i); \ } \ } UPRV_BLOCK_MACRO_END /** * Adjust a random-access offset to a code point boundary after a code point. * If the offset is behind the lead surrogate of a surrogate pair, * then the offset is incremented. * Otherwise, it is not modified. * The input offset may be the same as the string length. * "Safe" macro, handles unpaired surrogates and checks for string boundaries. * * The length can be negative for a NUL-terminated string. * * @param s const UChar * string * @param start int32_t starting string offset (usually 0) * @param i int32_t string offset, start<=i<=length * @param length int32_t string length * @see U16_SET_CP_LIMIT_UNSAFE * @stable ICU 2.4 */ #define U16_SET_CP_LIMIT(s, start, i, length) UPRV_BLOCK_MACRO_BEGIN { \ if((start)<(i) && ((i)<(length) || (length)<0) && U16_IS_LEAD((s)[(i)-1]) && U16_IS_TRAIL((s)[i])) { \ ++(i); \ } \ } UPRV_BLOCK_MACRO_END #endif hx-0.3.0+20250717/bindings/vendor/src/unicode/utf8.h000066400000000000000000000757221503625671400214300ustar00rootroot00000000000000// © 2016 and later: Unicode, Inc. and others. // License & terms of use: http://www.unicode.org/copyright.html /* ******************************************************************************* * * Copyright (C) 1999-2015, International Business Machines * Corporation and others. All Rights Reserved. * ******************************************************************************* * file name: utf8.h * encoding: UTF-8 * tab size: 8 (not used) * indentation:4 * * created on: 1999sep13 * created by: Markus W. Scherer */ /** * \file * \brief C API: 8-bit Unicode handling macros * * This file defines macros to deal with 8-bit Unicode (UTF-8) code units (bytes) and strings. * * For more information see utf.h and the ICU User Guide Strings chapter * (http://userguide.icu-project.org/strings). * * Usage: * ICU coding guidelines for if() statements should be followed when using these macros. * Compound statements (curly braces {}) must be used for if-else-while... * bodies and all macro statements should be terminated with semicolon. */ #ifndef __UTF8_H__ #define __UTF8_H__ #include "unicode/umachine.h" #ifndef __UTF_H__ # include "unicode/utf.h" #endif /* internal definitions ----------------------------------------------------- */ /** * Counts the trail bytes for a UTF-8 lead byte. * Returns 0 for 0..0xc1 as well as for 0xf5..0xff. * leadByte might be evaluated multiple times. * * This is internal since it is not meant to be called directly by external clients; * however it is called by public macros in this file and thus must remain stable. * * @param leadByte The first byte of a UTF-8 sequence. Must be 0..0xff. * @internal */ #define U8_COUNT_TRAIL_BYTES(leadByte) \ (U8_IS_LEAD(leadByte) ? \ ((uint8_t)(leadByte)>=0xe0)+((uint8_t)(leadByte)>=0xf0)+1 : 0) /** * Counts the trail bytes for a UTF-8 lead byte of a valid UTF-8 sequence. * Returns 0 for 0..0xc1. Undefined for 0xf5..0xff. * leadByte might be evaluated multiple times. * * This is internal since it is not meant to be called directly by external clients; * however it is called by public macros in this file and thus must remain stable. * * @param leadByte The first byte of a UTF-8 sequence. Must be 0..0xff. * @internal */ #define U8_COUNT_TRAIL_BYTES_UNSAFE(leadByte) \ (((uint8_t)(leadByte)>=0xc2)+((uint8_t)(leadByte)>=0xe0)+((uint8_t)(leadByte)>=0xf0)) /** * Mask a UTF-8 lead byte, leave only the lower bits that form part of the code point value. * * This is internal since it is not meant to be called directly by external clients; * however it is called by public macros in this file and thus must remain stable. * @internal */ #define U8_MASK_LEAD_BYTE(leadByte, countTrailBytes) ((leadByte)&=(1<<(6-(countTrailBytes)))-1) /** * Internal bit vector for 3-byte UTF-8 validity check, for use in U8_IS_VALID_LEAD3_AND_T1. * Each bit indicates whether one lead byte + first trail byte pair starts a valid sequence. * Lead byte E0..EF bits 3..0 are used as byte index, * first trail byte bits 7..5 are used as bit index into that byte. * @see U8_IS_VALID_LEAD3_AND_T1 * @internal */ #define U8_LEAD3_T1_BITS "\x20\x30\x30\x30\x30\x30\x30\x30\x30\x30\x30\x30\x30\x10\x30\x30" /** * Internal 3-byte UTF-8 validity check. * Non-zero if lead byte E0..EF and first trail byte 00..FF start a valid sequence. * @internal */ #define U8_IS_VALID_LEAD3_AND_T1(lead, t1) (U8_LEAD3_T1_BITS[(lead)&0xf]&(1<<((uint8_t)(t1)>>5))) /** * Internal bit vector for 4-byte UTF-8 validity check, for use in U8_IS_VALID_LEAD4_AND_T1. * Each bit indicates whether one lead byte + first trail byte pair starts a valid sequence. * First trail byte bits 7..4 are used as byte index, * lead byte F0..F4 bits 2..0 are used as bit index into that byte. * @see U8_IS_VALID_LEAD4_AND_T1 * @internal */ #define U8_LEAD4_T1_BITS "\x00\x00\x00\x00\x00\x00\x00\x00\x1E\x0F\x0F\x0F\x00\x00\x00\x00" /** * Internal 4-byte UTF-8 validity check. * Non-zero if lead byte F0..F4 and first trail byte 00..FF start a valid sequence. * @internal */ #define U8_IS_VALID_LEAD4_AND_T1(lead, t1) (U8_LEAD4_T1_BITS[(uint8_t)(t1)>>4]&(1<<((lead)&7))) /** * Function for handling "next code point" with error-checking. * * This is internal since it is not meant to be called directly by external clients; * however it is U_STABLE (not U_INTERNAL) since it is called by public macros in this * file and thus must remain stable, and should not be hidden when other internal * functions are hidden (otherwise public macros would fail to compile). * @internal */ U_STABLE UChar32 U_EXPORT2 utf8_nextCharSafeBody(const uint8_t *s, int32_t *pi, int32_t length, UChar32 c, UBool strict); /** * Function for handling "append code point" with error-checking. * * This is internal since it is not meant to be called directly by external clients; * however it is U_STABLE (not U_INTERNAL) since it is called by public macros in this * file and thus must remain stable, and should not be hidden when other internal * functions are hidden (otherwise public macros would fail to compile). * @internal */ U_STABLE int32_t U_EXPORT2 utf8_appendCharSafeBody(uint8_t *s, int32_t i, int32_t length, UChar32 c, UBool *pIsError); /** * Function for handling "previous code point" with error-checking. * * This is internal since it is not meant to be called directly by external clients; * however it is U_STABLE (not U_INTERNAL) since it is called by public macros in this * file and thus must remain stable, and should not be hidden when other internal * functions are hidden (otherwise public macros would fail to compile). * @internal */ U_STABLE UChar32 U_EXPORT2 utf8_prevCharSafeBody(const uint8_t *s, int32_t start, int32_t *pi, UChar32 c, UBool strict); /** * Function for handling "skip backward one code point" with error-checking. * * This is internal since it is not meant to be called directly by external clients; * however it is U_STABLE (not U_INTERNAL) since it is called by public macros in this * file and thus must remain stable, and should not be hidden when other internal * functions are hidden (otherwise public macros would fail to compile). * @internal */ U_STABLE int32_t U_EXPORT2 utf8_back1SafeBody(const uint8_t *s, int32_t start, int32_t i); /* single-code point definitions -------------------------------------------- */ /** * Does this code unit (byte) encode a code point by itself (US-ASCII 0..0x7f)? * @param c 8-bit code unit (byte) * @return TRUE or FALSE * @stable ICU 2.4 */ #define U8_IS_SINGLE(c) (((c)&0x80)==0) /** * Is this code unit (byte) a UTF-8 lead byte? (0xC2..0xF4) * @param c 8-bit code unit (byte) * @return TRUE or FALSE * @stable ICU 2.4 */ #define U8_IS_LEAD(c) ((uint8_t)((c)-0xc2)<=0x32) // 0x32=0xf4-0xc2 /** * Is this code unit (byte) a UTF-8 trail byte? (0x80..0xBF) * @param c 8-bit code unit (byte) * @return TRUE or FALSE * @stable ICU 2.4 */ #define U8_IS_TRAIL(c) ((int8_t)(c)<-0x40) /** * How many code units (bytes) are used for the UTF-8 encoding * of this Unicode code point? * @param c 32-bit code point * @return 1..4, or 0 if c is a surrogate or not a Unicode code point * @stable ICU 2.4 */ #define U8_LENGTH(c) \ ((uint32_t)(c)<=0x7f ? 1 : \ ((uint32_t)(c)<=0x7ff ? 2 : \ ((uint32_t)(c)<=0xd7ff ? 3 : \ ((uint32_t)(c)<=0xdfff || (uint32_t)(c)>0x10ffff ? 0 : \ ((uint32_t)(c)<=0xffff ? 3 : 4)\ ) \ ) \ ) \ ) /** * The maximum number of UTF-8 code units (bytes) per Unicode code point (U+0000..U+10ffff). * @return 4 * @stable ICU 2.4 */ #define U8_MAX_LENGTH 4 /** * Get a code point from a string at a random-access offset, * without changing the offset. * The offset may point to either the lead byte or one of the trail bytes * for a code point, in which case the macro will read all of the bytes * for the code point. * The result is undefined if the offset points to an illegal UTF-8 * byte sequence. * Iteration through a string is more efficient with U8_NEXT_UNSAFE or U8_NEXT. * * @param s const uint8_t * string * @param i string offset * @param c output UChar32 variable * @see U8_GET * @stable ICU 2.4 */ #define U8_GET_UNSAFE(s, i, c) UPRV_BLOCK_MACRO_BEGIN { \ int32_t _u8_get_unsafe_index=(int32_t)(i); \ U8_SET_CP_START_UNSAFE(s, _u8_get_unsafe_index); \ U8_NEXT_UNSAFE(s, _u8_get_unsafe_index, c); \ } UPRV_BLOCK_MACRO_END /** * Get a code point from a string at a random-access offset, * without changing the offset. * The offset may point to either the lead byte or one of the trail bytes * for a code point, in which case the macro will read all of the bytes * for the code point. * * The length can be negative for a NUL-terminated string. * * If the offset points to an illegal UTF-8 byte sequence, then * c is set to a negative value. * Iteration through a string is more efficient with U8_NEXT_UNSAFE or U8_NEXT. * * @param s const uint8_t * string * @param start int32_t starting string offset * @param i int32_t string offset, must be start<=i=0xe0 ? \ ((c)<0xf0 ? /* U+0800..U+FFFF except surrogates */ \ U8_LEAD3_T1_BITS[(c)&=0xf]&(1<<((__t=(s)[i])>>5)) && \ (__t&=0x3f, 1) \ : /* U+10000..U+10FFFF */ \ ((c)-=0xf0)<=4 && \ U8_LEAD4_T1_BITS[(__t=(s)[i])>>4]&(1<<(c)) && \ ((c)=((c)<<6)|(__t&0x3f), ++(i)!=(length)) && \ (__t=(s)[i]-0x80)<=0x3f) && \ /* valid second-to-last trail byte */ \ ((c)=((c)<<6)|__t, ++(i)!=(length)) \ : /* U+0080..U+07FF */ \ (c)>=0xc2 && ((c)&=0x1f, 1)) && \ /* last trail byte */ \ (__t=(s)[i]-0x80)<=0x3f && \ ((c)=((c)<<6)|__t, ++(i), 1)) { \ } else { \ (c)=(sub); /* ill-formed*/ \ } \ } \ } UPRV_BLOCK_MACRO_END /** * Append a code point to a string, overwriting 1 to 4 bytes. * The offset points to the current end of the string contents * and is advanced (post-increment). * "Unsafe" macro, assumes a valid code point and sufficient space in the string. * Otherwise, the result is undefined. * * @param s const uint8_t * string buffer * @param i string offset * @param c code point to append * @see U8_APPEND * @stable ICU 2.4 */ #define U8_APPEND_UNSAFE(s, i, c) UPRV_BLOCK_MACRO_BEGIN { \ uint32_t __uc=(c); \ if(__uc<=0x7f) { \ (s)[(i)++]=(uint8_t)__uc; \ } else { \ if(__uc<=0x7ff) { \ (s)[(i)++]=(uint8_t)((__uc>>6)|0xc0); \ } else { \ if(__uc<=0xffff) { \ (s)[(i)++]=(uint8_t)((__uc>>12)|0xe0); \ } else { \ (s)[(i)++]=(uint8_t)((__uc>>18)|0xf0); \ (s)[(i)++]=(uint8_t)(((__uc>>12)&0x3f)|0x80); \ } \ (s)[(i)++]=(uint8_t)(((__uc>>6)&0x3f)|0x80); \ } \ (s)[(i)++]=(uint8_t)((__uc&0x3f)|0x80); \ } \ } UPRV_BLOCK_MACRO_END /** * Append a code point to a string, overwriting 1 to 4 bytes. * The offset points to the current end of the string contents * and is advanced (post-increment). * "Safe" macro, checks for a valid code point. * If a non-ASCII code point is written, checks for sufficient space in the string. * If the code point is not valid or trail bytes do not fit, * then isError is set to TRUE. * * @param s const uint8_t * string buffer * @param i int32_t string offset, must be i>6)|0xc0); \ (s)[(i)++]=(uint8_t)((__uc&0x3f)|0x80); \ } else if((__uc<=0xd7ff || (0xe000<=__uc && __uc<=0xffff)) && (i)+2<(capacity)) { \ (s)[(i)++]=(uint8_t)((__uc>>12)|0xe0); \ (s)[(i)++]=(uint8_t)(((__uc>>6)&0x3f)|0x80); \ (s)[(i)++]=(uint8_t)((__uc&0x3f)|0x80); \ } else if(0xffff<__uc && __uc<=0x10ffff && (i)+3<(capacity)) { \ (s)[(i)++]=(uint8_t)((__uc>>18)|0xf0); \ (s)[(i)++]=(uint8_t)(((__uc>>12)&0x3f)|0x80); \ (s)[(i)++]=(uint8_t)(((__uc>>6)&0x3f)|0x80); \ (s)[(i)++]=(uint8_t)((__uc&0x3f)|0x80); \ } else { \ (isError)=TRUE; \ } \ } UPRV_BLOCK_MACRO_END /** * Advance the string offset from one code point boundary to the next. * (Post-incrementing iteration.) * "Unsafe" macro, assumes well-formed UTF-8. * * @param s const uint8_t * string * @param i string offset * @see U8_FWD_1 * @stable ICU 2.4 */ #define U8_FWD_1_UNSAFE(s, i) UPRV_BLOCK_MACRO_BEGIN { \ (i)+=1+U8_COUNT_TRAIL_BYTES_UNSAFE((s)[i]); \ } UPRV_BLOCK_MACRO_END /** * Advance the string offset from one code point boundary to the next. * (Post-incrementing iteration.) * "Safe" macro, checks for illegal sequences and for string boundaries. * * The length can be negative for a NUL-terminated string. * * @param s const uint8_t * string * @param i int32_t string offset, must be i=0xf0 */ { \ if(U8_IS_VALID_LEAD4_AND_T1(__b, __t1) && \ ++(i)!=(length) && U8_IS_TRAIL((s)[i]) && \ ++(i)!=(length) && U8_IS_TRAIL((s)[i])) { \ ++(i); \ } \ } \ } \ } UPRV_BLOCK_MACRO_END /** * Advance the string offset from one code point boundary to the n-th next one, * i.e., move forward by n code points. * (Post-incrementing iteration.) * "Unsafe" macro, assumes well-formed UTF-8. * * @param s const uint8_t * string * @param i string offset * @param n number of code points to skip * @see U8_FWD_N * @stable ICU 2.4 */ #define U8_FWD_N_UNSAFE(s, i, n) UPRV_BLOCK_MACRO_BEGIN { \ int32_t __N=(n); \ while(__N>0) { \ U8_FWD_1_UNSAFE(s, i); \ --__N; \ } \ } UPRV_BLOCK_MACRO_END /** * Advance the string offset from one code point boundary to the n-th next one, * i.e., move forward by n code points. * (Post-incrementing iteration.) * "Safe" macro, checks for illegal sequences and for string boundaries. * * The length can be negative for a NUL-terminated string. * * @param s const uint8_t * string * @param i int32_t string offset, must be i0 && ((i)<(length) || ((length)<0 && (s)[i]!=0))) { \ U8_FWD_1(s, i, length); \ --__N; \ } \ } UPRV_BLOCK_MACRO_END /** * Adjust a random-access offset to a code point boundary * at the start of a code point. * If the offset points to a UTF-8 trail byte, * then the offset is moved backward to the corresponding lead byte. * Otherwise, it is not modified. * "Unsafe" macro, assumes well-formed UTF-8. * * @param s const uint8_t * string * @param i string offset * @see U8_SET_CP_START * @stable ICU 2.4 */ #define U8_SET_CP_START_UNSAFE(s, i) UPRV_BLOCK_MACRO_BEGIN { \ while(U8_IS_TRAIL((s)[i])) { --(i); } \ } UPRV_BLOCK_MACRO_END /** * Adjust a random-access offset to a code point boundary * at the start of a code point. * If the offset points to a UTF-8 trail byte, * then the offset is moved backward to the corresponding lead byte. * Otherwise, it is not modified. * * "Safe" macro, checks for illegal sequences and for string boundaries. * Unlike U8_TRUNCATE_IF_INCOMPLETE(), this macro always reads s[i]. * * @param s const uint8_t * string * @param start int32_t starting string offset (usually 0) * @param i int32_t string offset, must be start<=i * @see U8_SET_CP_START_UNSAFE * @see U8_TRUNCATE_IF_INCOMPLETE * @stable ICU 2.4 */ #define U8_SET_CP_START(s, start, i) UPRV_BLOCK_MACRO_BEGIN { \ if(U8_IS_TRAIL((s)[(i)])) { \ (i)=utf8_back1SafeBody(s, start, (i)); \ } \ } UPRV_BLOCK_MACRO_END /** * If the string ends with a UTF-8 byte sequence that is valid so far * but incomplete, then reduce the length of the string to end before * the lead byte of that incomplete sequence. * For example, if the string ends with E1 80, the length is reduced by 2. * * In all other cases (the string ends with a complete sequence, or it is not * possible for any further trail byte to extend the trailing sequence) * the length remains unchanged. * * Useful for processing text split across multiple buffers * (save the incomplete sequence for later) * and for optimizing iteration * (check for string length only once per character). * * "Safe" macro, checks for illegal sequences and for string boundaries. * Unlike U8_SET_CP_START(), this macro never reads s[length]. * * (In UTF-16, simply check for U16_IS_LEAD(last code unit).) * * @param s const uint8_t * string * @param start int32_t starting string offset (usually 0) * @param length int32_t string length (usually start<=length) * @see U8_SET_CP_START * @stable ICU 61 */ #define U8_TRUNCATE_IF_INCOMPLETE(s, start, length) UPRV_BLOCK_MACRO_BEGIN { \ if((length)>(start)) { \ uint8_t __b1=s[(length)-1]; \ if(U8_IS_SINGLE(__b1)) { \ /* common ASCII character */ \ } else if(U8_IS_LEAD(__b1)) { \ --(length); \ } else if(U8_IS_TRAIL(__b1) && ((length)-2)>=(start)) { \ uint8_t __b2=s[(length)-2]; \ if(0xe0<=__b2 && __b2<=0xf4) { \ if(__b2<0xf0 ? U8_IS_VALID_LEAD3_AND_T1(__b2, __b1) : \ U8_IS_VALID_LEAD4_AND_T1(__b2, __b1)) { \ (length)-=2; \ } \ } else if(U8_IS_TRAIL(__b2) && ((length)-3)>=(start)) { \ uint8_t __b3=s[(length)-3]; \ if(0xf0<=__b3 && __b3<=0xf4 && U8_IS_VALID_LEAD4_AND_T1(__b3, __b2)) { \ (length)-=3; \ } \ } \ } \ } \ } UPRV_BLOCK_MACRO_END /* definitions with backward iteration -------------------------------------- */ /** * Move the string offset from one code point boundary to the previous one * and get the code point between them. * (Pre-decrementing backward iteration.) * "Unsafe" macro, assumes well-formed UTF-8. * * The input offset may be the same as the string length. * If the offset is behind a multi-byte sequence, then the macro will read * the whole sequence. * If the offset is behind a lead byte, then that itself * will be returned as the code point. * The result is undefined if the offset is behind an illegal UTF-8 sequence. * * @param s const uint8_t * string * @param i string offset * @param c output UChar32 variable * @see U8_PREV * @stable ICU 2.4 */ #define U8_PREV_UNSAFE(s, i, c) UPRV_BLOCK_MACRO_BEGIN { \ (c)=(uint8_t)(s)[--(i)]; \ if(U8_IS_TRAIL(c)) { \ uint8_t __b, __count=1, __shift=6; \ \ /* c is a trail byte */ \ (c)&=0x3f; \ for(;;) { \ __b=(s)[--(i)]; \ if(__b>=0xc0) { \ U8_MASK_LEAD_BYTE(__b, __count); \ (c)|=(UChar32)__b<<__shift; \ break; \ } else { \ (c)|=(UChar32)(__b&0x3f)<<__shift; \ ++__count; \ __shift+=6; \ } \ } \ } \ } UPRV_BLOCK_MACRO_END /** * Move the string offset from one code point boundary to the previous one * and get the code point between them. * (Pre-decrementing backward iteration.) * "Safe" macro, checks for illegal sequences and for string boundaries. * * The input offset may be the same as the string length. * If the offset is behind a multi-byte sequence, then the macro will read * the whole sequence. * If the offset is behind a lead byte, then that itself * will be returned as the code point. * If the offset is behind an illegal UTF-8 sequence, then c is set to a negative value. * * @param s const uint8_t * string * @param start int32_t starting string offset (usually 0) * @param i int32_t string offset, must be start0) { \ U8_BACK_1_UNSAFE(s, i); \ --__N; \ } \ } UPRV_BLOCK_MACRO_END /** * Move the string offset from one code point boundary to the n-th one before it, * i.e., move backward by n code points. * (Pre-decrementing backward iteration.) * The input offset may be the same as the string length. * "Safe" macro, checks for illegal sequences and for string boundaries. * * @param s const uint8_t * string * @param start int32_t index of the start of the string * @param i int32_t string offset, must be start0 && (i)>(start)) { \ U8_BACK_1(s, start, i); \ --__N; \ } \ } UPRV_BLOCK_MACRO_END /** * Adjust a random-access offset to a code point boundary after a code point. * If the offset is behind a partial multi-byte sequence, * then the offset is incremented to behind the whole sequence. * Otherwise, it is not modified. * The input offset may be the same as the string length. * "Unsafe" macro, assumes well-formed UTF-8. * * @param s const uint8_t * string * @param i string offset * @see U8_SET_CP_LIMIT * @stable ICU 2.4 */ #define U8_SET_CP_LIMIT_UNSAFE(s, i) UPRV_BLOCK_MACRO_BEGIN { \ U8_BACK_1_UNSAFE(s, i); \ U8_FWD_1_UNSAFE(s, i); \ } UPRV_BLOCK_MACRO_END /** * Adjust a random-access offset to a code point boundary after a code point. * If the offset is behind a partial multi-byte sequence, * then the offset is incremented to behind the whole sequence. * Otherwise, it is not modified. * The input offset may be the same as the string length. * "Safe" macro, checks for illegal sequences and for string boundaries. * * The length can be negative for a NUL-terminated string. * * @param s const uint8_t * string * @param start int32_t starting string offset (usually 0) * @param i int32_t string offset, must be start<=i<=length * @param length int32_t string length * @see U8_SET_CP_LIMIT_UNSAFE * @stable ICU 2.4 */ #define U8_SET_CP_LIMIT(s, start, i, length) UPRV_BLOCK_MACRO_BEGIN { \ if((start)<(i) && ((i)<(length) || (length)<0)) { \ U8_BACK_1(s, start, i); \ U8_FWD_1(s, i, length); \ } \ } UPRV_BLOCK_MACRO_END #endif hx-0.3.0+20250717/bindings/vendor/src/wasm/000077500000000000000000000000001503625671400176755ustar00rootroot00000000000000hx-0.3.0+20250717/bindings/vendor/src/wasm/stdlib-symbols.txt000066400000000000000000000004101503625671400234000ustar00rootroot00000000000000"calloc", "free", "iswalnum", "iswalpha", "iswblank", "iswdigit", "iswlower", "iswspace", "iswupper", "iswxdigit", "malloc", "memchr", "memcmp", "memcpy", "memmove", "memset", "realloc", "strcmp", "strlen", "strncat", "strncmp", "strncpy", "towlower", "towupper", hx-0.3.0+20250717/bindings/vendor/src/wasm/stdlib.c000066400000000000000000000054671503625671400213360ustar00rootroot00000000000000// This file implements a very simple allocator for external scanners running // in WASM. Allocation is just bumping a static pointer and growing the heap // as needed, and freeing is mostly a noop. But in the special case of freeing // the last-allocated pointer, we'll reuse that pointer again. #ifdef TREE_SITTER_FEATURE_WASM #include #include #include #include extern void tree_sitter_debug_message(const char *, size_t); #define PAGESIZE 0x10000 #define MAX_HEAP_SIZE (4 * 1024 * 1024) typedef struct { size_t size; char data[0]; } Region; static Region *heap_end = NULL; static Region *heap_start = NULL; static Region *next = NULL; // Get the region metadata for the given heap pointer. static inline Region *region_for_ptr(void *ptr) { return ((Region *)ptr) - 1; } // Get the location of the next region after the given region, // if the given region had the given size. static inline Region *region_after(Region *self, size_t len) { char *address = self->data + len; char *aligned = (char *)((uintptr_t)(address + 3) & ~0x3); return (Region *)aligned; } static void *get_heap_end() { return (void *)(__builtin_wasm_memory_size(0) * PAGESIZE); } static int grow_heap(size_t size) { size_t new_page_count = ((size - 1) / PAGESIZE) + 1; return __builtin_wasm_memory_grow(0, new_page_count) != SIZE_MAX; } // Clear out the heap, and move it to the given address. void reset_heap(void *new_heap_start) { heap_start = new_heap_start; next = new_heap_start; heap_end = get_heap_end(); } void *malloc(size_t size) { Region *region_end = region_after(next, size); if (region_end > heap_end) { if ((char *)region_end - (char *)heap_start > MAX_HEAP_SIZE) { return NULL; } if (!grow_heap(size)) return NULL; heap_end = get_heap_end(); } void *result = &next->data; next->size = size; next = region_end; return result; } void free(void *ptr) { if (ptr == NULL) return; Region *region = region_for_ptr(ptr); Region *region_end = region_after(region, region->size); // When freeing the last allocated pointer, re-use that // pointer for the next allocation. if (region_end == next) { next = region; } } void *calloc(size_t count, size_t size) { void *result = malloc(count * size); memset(result, 0, count * size); return result; } void *realloc(void *ptr, size_t new_size) { if (ptr == NULL) { return malloc(new_size); } Region *region = region_for_ptr(ptr); Region *region_end = region_after(region, region->size); // When reallocating the last allocated region, return // the same pointer, and skip copying the data. if (region_end == next) { next = region; return malloc(new_size); } void *result = malloc(new_size); memcpy(result, ®ion->data, region->size); return result; } #endif hx-0.3.0+20250717/bindings/vendor/src/wasm/wasm-stdlib.h000066400000000000000000002747751503625671400223220ustar00rootroot00000000000000#ifdef TREE_SITTER_FEATURE_WASM unsigned char STDLIB_WASM[] = { 0x00, 0x61, 0x73, 0x6d, 0x01, 0x00, 0x00, 0x00, 0x01, 0x1e, 0x06, 0x60, 0x02, 0x7f, 0x7f, 0x01, 0x7f, 0x60, 0x01, 0x7f, 0x00, 0x60, 0x00, 0x00, 0x60, 0x01, 0x7f, 0x01, 0x7f, 0x60, 0x00, 0x01, 0x7f, 0x60, 0x03, 0x7f, 0x7f, 0x7f, 0x01, 0x7f, 0x02, 0x9e, 0x01, 0x05, 0x03, 0x65, 0x6e, 0x76, 0x06, 0x6d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x02, 0x00, 0x02, 0x03, 0x65, 0x6e, 0x76, 0x19, 0x5f, 0x5f, 0x69, 0x6e, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x5f, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x01, 0x70, 0x00, 0x01, 0x16, 0x77, 0x61, 0x73, 0x69, 0x5f, 0x73, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x5f, 0x70, 0x72, 0x65, 0x76, 0x69, 0x65, 0x77, 0x31, 0x08, 0x61, 0x72, 0x67, 0x73, 0x5f, 0x67, 0x65, 0x74, 0x00, 0x00, 0x16, 0x77, 0x61, 0x73, 0x69, 0x5f, 0x73, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x5f, 0x70, 0x72, 0x65, 0x76, 0x69, 0x65, 0x77, 0x31, 0x0e, 0x61, 0x72, 0x67, 0x73, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x73, 0x5f, 0x67, 0x65, 0x74, 0x00, 0x00, 0x16, 0x77, 0x61, 0x73, 0x69, 0x5f, 0x73, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x5f, 0x70, 0x72, 0x65, 0x76, 0x69, 0x65, 0x77, 0x31, 0x09, 0x70, 0x72, 0x6f, 0x63, 0x5f, 0x65, 0x78, 0x69, 0x74, 0x00, 0x01, 0x03, 0x2a, 0x29, 0x02, 0x00, 0x02, 0x02, 0x01, 0x03, 0x01, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x02, 0x02, 0x05, 0x05, 0x03, 0x03, 0x05, 0x05, 0x00, 0x03, 0x00, 0x03, 0x05, 0x03, 0x05, 0x03, 0x03, 0x03, 0x03, 0x05, 0x05, 0x05, 0x03, 0x03, 0x00, 0x03, 0x03, 0x06, 0x0d, 0x02, 0x7f, 0x01, 0x41, 0x80, 0x80, 0x04, 0x0b, 0x7f, 0x00, 0x41, 0x00, 0x0b, 0x07, 0xad, 0x02, 0x1c, 0x11, 0x5f, 0x5f, 0x77, 0x61, 0x73, 0x6d, 0x5f, 0x63, 0x61, 0x6c, 0x6c, 0x5f, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x00, 0x03, 0x0f, 0x5f, 0x5f, 0x73, 0x74, 0x61, 0x63, 0x6b, 0x5f, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x03, 0x00, 0x06, 0x5f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x00, 0x06, 0x0a, 0x72, 0x65, 0x73, 0x65, 0x74, 0x5f, 0x68, 0x65, 0x61, 0x70, 0x00, 0x07, 0x06, 0x6d, 0x61, 0x6c, 0x6c, 0x6f, 0x63, 0x00, 0x08, 0x04, 0x66, 0x72, 0x65, 0x65, 0x00, 0x09, 0x06, 0x63, 0x61, 0x6c, 0x6c, 0x6f, 0x63, 0x00, 0x0a, 0x06, 0x6d, 0x65, 0x6d, 0x73, 0x65, 0x74, 0x00, 0x14, 0x07, 0x72, 0x65, 0x61, 0x6c, 0x6c, 0x6f, 0x63, 0x00, 0x0b, 0x06, 0x6d, 0x65, 0x6d, 0x63, 0x70, 0x79, 0x00, 0x13, 0x06, 0x73, 0x74, 0x72, 0x6c, 0x65, 0x6e, 0x00, 0x15, 0x08, 0x69, 0x73, 0x77, 0x61, 0x6c, 0x6e, 0x75, 0x6d, 0x00, 0x2b, 0x08, 0x69, 0x73, 0x77, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x00, 0x16, 0x08, 0x69, 0x73, 0x77, 0x62, 0x6c, 0x61, 0x6e, 0x6b, 0x00, 0x22, 0x08, 0x69, 0x73, 0x77, 0x64, 0x69, 0x67, 0x69, 0x74, 0x00, 0x23, 0x08, 0x69, 0x73, 0x77, 0x6c, 0x6f, 0x77, 0x65, 0x72, 0x00, 0x20, 0x08, 0x69, 0x73, 0x77, 0x73, 0x70, 0x61, 0x63, 0x65, 0x00, 0x2a, 0x08, 0x69, 0x73, 0x77, 0x75, 0x70, 0x70, 0x65, 0x72, 0x00, 0x1e, 0x09, 0x69, 0x73, 0x77, 0x78, 0x64, 0x69, 0x67, 0x69, 0x74, 0x00, 0x27, 0x08, 0x74, 0x6f, 0x77, 0x6c, 0x6f, 0x77, 0x65, 0x72, 0x00, 0x1a, 0x08, 0x74, 0x6f, 0x77, 0x75, 0x70, 0x70, 0x65, 0x72, 0x00, 0x1c, 0x06, 0x6d, 0x65, 0x6d, 0x63, 0x68, 0x72, 0x00, 0x18, 0x06, 0x6d, 0x65, 0x6d, 0x63, 0x6d, 0x70, 0x00, 0x17, 0x07, 0x6d, 0x65, 0x6d, 0x6d, 0x6f, 0x76, 0x65, 0x00, 0x1f, 0x06, 0x73, 0x74, 0x72, 0x63, 0x6d, 0x70, 0x00, 0x19, 0x07, 0x73, 0x74, 0x72, 0x6e, 0x63, 0x61, 0x74, 0x00, 0x24, 0x07, 0x73, 0x74, 0x72, 0x6e, 0x63, 0x6d, 0x70, 0x00, 0x1d, 0x07, 0x73, 0x74, 0x72, 0x6e, 0x63, 0x70, 0x79, 0x00, 0x26, 0x08, 0x01, 0x05, 0x0a, 0xff, 0x2b, 0x29, 0x02, 0x00, 0x0b, 0x03, 0x00, 0x00, 0x0b, 0x0d, 0x00, 0x41, 0xe8, 0xc2, 0x04, 0x41, 0x00, 0x41, 0x10, 0xfc, 0x0b, 0x00, 0x0b, 0x51, 0x01, 0x01, 0x7f, 0x02, 0x40, 0x02, 0x40, 0x23, 0x81, 0x80, 0x80, 0x80, 0x00, 0x41, 0xe8, 0xc2, 0x84, 0x80, 0x00, 0x6a, 0x28, 0x02, 0x00, 0x0d, 0x00, 0x23, 0x81, 0x80, 0x80, 0x80, 0x00, 0x41, 0xe8, 0xc2, 0x84, 0x80, 0x00, 0x6a, 0x41, 0x01, 0x36, 0x02, 0x00, 0x10, 0x83, 0x80, 0x80, 0x80, 0x00, 0x10, 0x8d, 0x80, 0x80, 0x80, 0x00, 0x21, 0x00, 0x10, 0x92, 0x80, 0x80, 0x80, 0x00, 0x20, 0x00, 0x0d, 0x01, 0x0f, 0x0b, 0x00, 0x0b, 0x20, 0x00, 0x10, 0x90, 0x80, 0x80, 0x80, 0x00, 0x00, 0x0b, 0x37, 0x01, 0x01, 0x7f, 0x23, 0x81, 0x80, 0x80, 0x80, 0x00, 0x22, 0x01, 0x41, 0xf0, 0xc2, 0x84, 0x80, 0x00, 0x6a, 0x20, 0x00, 0x36, 0x02, 0x00, 0x20, 0x01, 0x41, 0xec, 0xc2, 0x84, 0x80, 0x00, 0x6a, 0x20, 0x00, 0x36, 0x02, 0x00, 0x20, 0x01, 0x41, 0xf4, 0xc2, 0x84, 0x80, 0x00, 0x6a, 0x3f, 0x00, 0x41, 0x10, 0x74, 0x36, 0x02, 0x00, 0x0b, 0xb4, 0x01, 0x01, 0x03, 0x7f, 0x02, 0x40, 0x02, 0x40, 0x23, 0x81, 0x80, 0x80, 0x80, 0x00, 0x22, 0x01, 0x41, 0xf4, 0xc2, 0x84, 0x80, 0x00, 0x6a, 0x28, 0x02, 0x00, 0x20, 0x01, 0x41, 0xf0, 0xc2, 0x84, 0x80, 0x00, 0x6a, 0x28, 0x02, 0x00, 0x22, 0x01, 0x20, 0x00, 0x6a, 0x41, 0x07, 0x6a, 0x41, 0x7c, 0x71, 0x22, 0x02, 0x4f, 0x0d, 0x00, 0x41, 0x00, 0x21, 0x01, 0x20, 0x02, 0x23, 0x81, 0x80, 0x80, 0x80, 0x00, 0x41, 0xec, 0xc2, 0x84, 0x80, 0x00, 0x6a, 0x28, 0x02, 0x00, 0x6b, 0x41, 0x80, 0x80, 0x80, 0x02, 0x4a, 0x0d, 0x01, 0x20, 0x00, 0x41, 0x7f, 0x6a, 0x41, 0x10, 0x76, 0x41, 0x01, 0x6a, 0x40, 0x00, 0x41, 0x7f, 0x46, 0x0d, 0x01, 0x3f, 0x00, 0x21, 0x01, 0x23, 0x81, 0x80, 0x80, 0x80, 0x00, 0x22, 0x03, 0x41, 0xf4, 0xc2, 0x84, 0x80, 0x00, 0x6a, 0x20, 0x01, 0x41, 0x10, 0x74, 0x36, 0x02, 0x00, 0x20, 0x03, 0x41, 0xf0, 0xc2, 0x84, 0x80, 0x00, 0x6a, 0x28, 0x02, 0x00, 0x21, 0x01, 0x0b, 0x20, 0x01, 0x20, 0x00, 0x36, 0x02, 0x00, 0x23, 0x81, 0x80, 0x80, 0x80, 0x00, 0x41, 0xf0, 0xc2, 0x84, 0x80, 0x00, 0x6a, 0x20, 0x02, 0x36, 0x02, 0x00, 0x20, 0x01, 0x41, 0x04, 0x6a, 0x21, 0x01, 0x0b, 0x20, 0x01, 0x0b, 0x48, 0x01, 0x02, 0x7f, 0x02, 0x40, 0x20, 0x00, 0x45, 0x0d, 0x00, 0x20, 0x00, 0x41, 0x7c, 0x6a, 0x22, 0x01, 0x28, 0x02, 0x00, 0x21, 0x02, 0x23, 0x81, 0x80, 0x80, 0x80, 0x00, 0x41, 0xf0, 0xc2, 0x84, 0x80, 0x00, 0x6a, 0x28, 0x02, 0x00, 0x20, 0x00, 0x20, 0x02, 0x6a, 0x41, 0x03, 0x6a, 0x41, 0x7c, 0x71, 0x47, 0x0d, 0x00, 0x23, 0x81, 0x80, 0x80, 0x80, 0x00, 0x41, 0xf0, 0xc2, 0x84, 0x80, 0x00, 0x6a, 0x20, 0x01, 0x36, 0x02, 0x00, 0x0b, 0x0b, 0x19, 0x00, 0x20, 0x01, 0x20, 0x00, 0x6c, 0x22, 0x00, 0x10, 0x88, 0x80, 0x80, 0x80, 0x00, 0x41, 0x00, 0x20, 0x00, 0x10, 0x94, 0x80, 0x80, 0x80, 0x00, 0x0b, 0x6b, 0x01, 0x02, 0x7f, 0x02, 0x40, 0x20, 0x00, 0x45, 0x0d, 0x00, 0x20, 0x00, 0x41, 0x7c, 0x6a, 0x22, 0x02, 0x28, 0x02, 0x00, 0x21, 0x03, 0x02, 0x40, 0x23, 0x81, 0x80, 0x80, 0x80, 0x00, 0x41, 0xf0, 0xc2, 0x84, 0x80, 0x00, 0x6a, 0x28, 0x02, 0x00, 0x20, 0x00, 0x20, 0x03, 0x6a, 0x41, 0x03, 0x6a, 0x41, 0x7c, 0x71, 0x47, 0x0d, 0x00, 0x23, 0x81, 0x80, 0x80, 0x80, 0x00, 0x41, 0xf0, 0xc2, 0x84, 0x80, 0x00, 0x6a, 0x20, 0x02, 0x36, 0x02, 0x00, 0x0c, 0x01, 0x0b, 0x20, 0x01, 0x10, 0x88, 0x80, 0x80, 0x80, 0x00, 0x20, 0x00, 0x20, 0x02, 0x28, 0x02, 0x00, 0x10, 0x93, 0x80, 0x80, 0x80, 0x00, 0x0f, 0x0b, 0x20, 0x01, 0x10, 0x88, 0x80, 0x80, 0x80, 0x00, 0x0b, 0x0b, 0x00, 0x20, 0x00, 0x10, 0x90, 0x80, 0x80, 0x80, 0x00, 0x00, 0x0b, 0xd5, 0x01, 0x01, 0x03, 0x7f, 0x23, 0x80, 0x80, 0x80, 0x80, 0x00, 0x41, 0x10, 0x6b, 0x22, 0x00, 0x24, 0x80, 0x80, 0x80, 0x80, 0x00, 0x02, 0x40, 0x02, 0x40, 0x02, 0x40, 0x02, 0x40, 0x02, 0x40, 0x20, 0x00, 0x41, 0x08, 0x6a, 0x20, 0x00, 0x41, 0x0c, 0x6a, 0x10, 0x8f, 0x80, 0x80, 0x80, 0x00, 0x0d, 0x00, 0x20, 0x00, 0x28, 0x02, 0x08, 0x41, 0x01, 0x6a, 0x22, 0x01, 0x45, 0x0d, 0x01, 0x20, 0x00, 0x28, 0x02, 0x0c, 0x10, 0x88, 0x80, 0x80, 0x80, 0x00, 0x22, 0x02, 0x45, 0x0d, 0x02, 0x20, 0x01, 0x41, 0x04, 0x10, 0x8a, 0x80, 0x80, 0x80, 0x00, 0x22, 0x01, 0x45, 0x0d, 0x03, 0x20, 0x01, 0x20, 0x02, 0x10, 0x8e, 0x80, 0x80, 0x80, 0x00, 0x0d, 0x04, 0x20, 0x00, 0x28, 0x02, 0x08, 0x20, 0x01, 0x10, 0x84, 0x80, 0x80, 0x80, 0x00, 0x21, 0x01, 0x20, 0x00, 0x41, 0x10, 0x6a, 0x24, 0x80, 0x80, 0x80, 0x80, 0x00, 0x20, 0x01, 0x0f, 0x0b, 0x41, 0xc7, 0x00, 0x10, 0x8c, 0x80, 0x80, 0x80, 0x00, 0x00, 0x0b, 0x41, 0xc6, 0x00, 0x10, 0x8c, 0x80, 0x80, 0x80, 0x00, 0x00, 0x0b, 0x41, 0xc6, 0x00, 0x10, 0x8c, 0x80, 0x80, 0x80, 0x00, 0x00, 0x0b, 0x20, 0x02, 0x10, 0x89, 0x80, 0x80, 0x80, 0x00, 0x41, 0xc6, 0x00, 0x10, 0x8c, 0x80, 0x80, 0x80, 0x00, 0x00, 0x0b, 0x20, 0x02, 0x10, 0x89, 0x80, 0x80, 0x80, 0x00, 0x20, 0x01, 0x10, 0x89, 0x80, 0x80, 0x80, 0x00, 0x41, 0xc7, 0x00, 0x10, 0x8c, 0x80, 0x80, 0x80, 0x00, 0x00, 0x0b, 0x11, 0x00, 0x20, 0x00, 0x20, 0x01, 0x10, 0x80, 0x80, 0x80, 0x80, 0x00, 0x41, 0xff, 0xff, 0x03, 0x71, 0x0b, 0x11, 0x00, 0x20, 0x00, 0x20, 0x01, 0x10, 0x81, 0x80, 0x80, 0x80, 0x00, 0x41, 0xff, 0xff, 0x03, 0x71, 0x0b, 0x0b, 0x00, 0x20, 0x00, 0x10, 0x82, 0x80, 0x80, 0x80, 0x00, 0x00, 0x0b, 0x02, 0x00, 0x0b, 0x0e, 0x00, 0x10, 0x91, 0x80, 0x80, 0x80, 0x00, 0x10, 0x91, 0x80, 0x80, 0x80, 0x00, 0x0b, 0xee, 0x07, 0x01, 0x04, 0x7f, 0x02, 0x40, 0x02, 0x40, 0x02, 0x40, 0x20, 0x02, 0x41, 0x20, 0x4b, 0x0d, 0x00, 0x20, 0x01, 0x41, 0x03, 0x71, 0x45, 0x0d, 0x01, 0x20, 0x02, 0x45, 0x0d, 0x01, 0x20, 0x00, 0x20, 0x01, 0x2d, 0x00, 0x00, 0x3a, 0x00, 0x00, 0x20, 0x02, 0x41, 0x7f, 0x6a, 0x21, 0x03, 0x20, 0x00, 0x41, 0x01, 0x6a, 0x21, 0x04, 0x20, 0x01, 0x41, 0x01, 0x6a, 0x22, 0x05, 0x41, 0x03, 0x71, 0x45, 0x0d, 0x02, 0x20, 0x03, 0x45, 0x0d, 0x02, 0x20, 0x00, 0x20, 0x01, 0x2d, 0x00, 0x01, 0x3a, 0x00, 0x01, 0x20, 0x02, 0x41, 0x7e, 0x6a, 0x21, 0x03, 0x20, 0x00, 0x41, 0x02, 0x6a, 0x21, 0x04, 0x20, 0x01, 0x41, 0x02, 0x6a, 0x22, 0x05, 0x41, 0x03, 0x71, 0x45, 0x0d, 0x02, 0x20, 0x03, 0x45, 0x0d, 0x02, 0x20, 0x00, 0x20, 0x01, 0x2d, 0x00, 0x02, 0x3a, 0x00, 0x02, 0x20, 0x02, 0x41, 0x7d, 0x6a, 0x21, 0x03, 0x20, 0x00, 0x41, 0x03, 0x6a, 0x21, 0x04, 0x20, 0x01, 0x41, 0x03, 0x6a, 0x22, 0x05, 0x41, 0x03, 0x71, 0x45, 0x0d, 0x02, 0x20, 0x03, 0x45, 0x0d, 0x02, 0x20, 0x00, 0x20, 0x01, 0x2d, 0x00, 0x03, 0x3a, 0x00, 0x03, 0x20, 0x02, 0x41, 0x7c, 0x6a, 0x21, 0x03, 0x20, 0x00, 0x41, 0x04, 0x6a, 0x21, 0x04, 0x20, 0x01, 0x41, 0x04, 0x6a, 0x21, 0x05, 0x0c, 0x02, 0x0b, 0x20, 0x00, 0x20, 0x01, 0x20, 0x02, 0xfc, 0x0a, 0x00, 0x00, 0x20, 0x00, 0x0f, 0x0b, 0x20, 0x02, 0x21, 0x03, 0x20, 0x00, 0x21, 0x04, 0x20, 0x01, 0x21, 0x05, 0x0b, 0x02, 0x40, 0x02, 0x40, 0x20, 0x04, 0x41, 0x03, 0x71, 0x22, 0x02, 0x0d, 0x00, 0x02, 0x40, 0x02, 0x40, 0x20, 0x03, 0x41, 0x10, 0x4f, 0x0d, 0x00, 0x20, 0x03, 0x21, 0x02, 0x0c, 0x01, 0x0b, 0x02, 0x40, 0x20, 0x03, 0x41, 0x70, 0x6a, 0x22, 0x02, 0x41, 0x10, 0x71, 0x0d, 0x00, 0x20, 0x04, 0x20, 0x05, 0x29, 0x02, 0x00, 0x37, 0x02, 0x00, 0x20, 0x04, 0x20, 0x05, 0x29, 0x02, 0x08, 0x37, 0x02, 0x08, 0x20, 0x04, 0x41, 0x10, 0x6a, 0x21, 0x04, 0x20, 0x05, 0x41, 0x10, 0x6a, 0x21, 0x05, 0x20, 0x02, 0x21, 0x03, 0x0b, 0x20, 0x02, 0x41, 0x10, 0x49, 0x0d, 0x00, 0x20, 0x03, 0x21, 0x02, 0x03, 0x40, 0x20, 0x04, 0x20, 0x05, 0x29, 0x02, 0x00, 0x37, 0x02, 0x00, 0x20, 0x04, 0x20, 0x05, 0x29, 0x02, 0x08, 0x37, 0x02, 0x08, 0x20, 0x04, 0x20, 0x05, 0x29, 0x02, 0x10, 0x37, 0x02, 0x10, 0x20, 0x04, 0x20, 0x05, 0x29, 0x02, 0x18, 0x37, 0x02, 0x18, 0x20, 0x04, 0x41, 0x20, 0x6a, 0x21, 0x04, 0x20, 0x05, 0x41, 0x20, 0x6a, 0x21, 0x05, 0x20, 0x02, 0x41, 0x60, 0x6a, 0x22, 0x02, 0x41, 0x0f, 0x4b, 0x0d, 0x00, 0x0b, 0x0b, 0x02, 0x40, 0x20, 0x02, 0x41, 0x08, 0x49, 0x0d, 0x00, 0x20, 0x04, 0x20, 0x05, 0x29, 0x02, 0x00, 0x37, 0x02, 0x00, 0x20, 0x05, 0x41, 0x08, 0x6a, 0x21, 0x05, 0x20, 0x04, 0x41, 0x08, 0x6a, 0x21, 0x04, 0x0b, 0x02, 0x40, 0x20, 0x02, 0x41, 0x04, 0x71, 0x45, 0x0d, 0x00, 0x20, 0x04, 0x20, 0x05, 0x28, 0x02, 0x00, 0x36, 0x02, 0x00, 0x20, 0x05, 0x41, 0x04, 0x6a, 0x21, 0x05, 0x20, 0x04, 0x41, 0x04, 0x6a, 0x21, 0x04, 0x0b, 0x02, 0x40, 0x20, 0x02, 0x41, 0x02, 0x71, 0x45, 0x0d, 0x00, 0x20, 0x04, 0x20, 0x05, 0x2f, 0x00, 0x00, 0x3b, 0x00, 0x00, 0x20, 0x04, 0x41, 0x02, 0x6a, 0x21, 0x04, 0x20, 0x05, 0x41, 0x02, 0x6a, 0x21, 0x05, 0x0b, 0x20, 0x02, 0x41, 0x01, 0x71, 0x45, 0x0d, 0x01, 0x20, 0x04, 0x20, 0x05, 0x2d, 0x00, 0x00, 0x3a, 0x00, 0x00, 0x20, 0x00, 0x0f, 0x0b, 0x02, 0x40, 0x02, 0x40, 0x02, 0x40, 0x02, 0x40, 0x02, 0x40, 0x20, 0x03, 0x41, 0x20, 0x49, 0x0d, 0x00, 0x20, 0x04, 0x20, 0x05, 0x28, 0x02, 0x00, 0x22, 0x03, 0x3a, 0x00, 0x00, 0x02, 0x40, 0x02, 0x40, 0x20, 0x02, 0x41, 0x7f, 0x6a, 0x0e, 0x03, 0x03, 0x00, 0x01, 0x03, 0x0b, 0x20, 0x04, 0x20, 0x03, 0x41, 0x08, 0x76, 0x3a, 0x00, 0x01, 0x20, 0x04, 0x20, 0x05, 0x41, 0x06, 0x6a, 0x29, 0x01, 0x00, 0x37, 0x02, 0x06, 0x20, 0x04, 0x20, 0x05, 0x28, 0x02, 0x04, 0x41, 0x10, 0x74, 0x20, 0x03, 0x41, 0x10, 0x76, 0x72, 0x36, 0x02, 0x02, 0x20, 0x04, 0x41, 0x12, 0x6a, 0x21, 0x02, 0x20, 0x05, 0x41, 0x12, 0x6a, 0x21, 0x01, 0x41, 0x0e, 0x21, 0x06, 0x20, 0x05, 0x41, 0x0e, 0x6a, 0x28, 0x01, 0x00, 0x21, 0x05, 0x41, 0x0e, 0x21, 0x03, 0x0c, 0x03, 0x0b, 0x20, 0x04, 0x20, 0x05, 0x41, 0x05, 0x6a, 0x29, 0x00, 0x00, 0x37, 0x02, 0x05, 0x20, 0x04, 0x20, 0x05, 0x28, 0x02, 0x04, 0x41, 0x18, 0x74, 0x20, 0x03, 0x41, 0x08, 0x76, 0x72, 0x36, 0x02, 0x01, 0x20, 0x04, 0x41, 0x11, 0x6a, 0x21, 0x02, 0x20, 0x05, 0x41, 0x11, 0x6a, 0x21, 0x01, 0x41, 0x0d, 0x21, 0x06, 0x20, 0x05, 0x41, 0x0d, 0x6a, 0x28, 0x00, 0x00, 0x21, 0x05, 0x41, 0x0f, 0x21, 0x03, 0x0c, 0x02, 0x0b, 0x02, 0x40, 0x02, 0x40, 0x20, 0x03, 0x41, 0x10, 0x4f, 0x0d, 0x00, 0x20, 0x04, 0x21, 0x02, 0x20, 0x05, 0x21, 0x01, 0x0c, 0x01, 0x0b, 0x20, 0x04, 0x20, 0x05, 0x2d, 0x00, 0x00, 0x3a, 0x00, 0x00, 0x20, 0x04, 0x20, 0x05, 0x28, 0x00, 0x01, 0x36, 0x00, 0x01, 0x20, 0x04, 0x20, 0x05, 0x29, 0x00, 0x05, 0x37, 0x00, 0x05, 0x20, 0x04, 0x20, 0x05, 0x2f, 0x00, 0x0d, 0x3b, 0x00, 0x0d, 0x20, 0x04, 0x20, 0x05, 0x2d, 0x00, 0x0f, 0x3a, 0x00, 0x0f, 0x20, 0x04, 0x41, 0x10, 0x6a, 0x21, 0x02, 0x20, 0x05, 0x41, 0x10, 0x6a, 0x21, 0x01, 0x0b, 0x20, 0x03, 0x41, 0x08, 0x71, 0x0d, 0x02, 0x0c, 0x03, 0x0b, 0x20, 0x04, 0x20, 0x03, 0x41, 0x10, 0x76, 0x3a, 0x00, 0x02, 0x20, 0x04, 0x20, 0x03, 0x41, 0x08, 0x76, 0x3a, 0x00, 0x01, 0x20, 0x04, 0x20, 0x05, 0x41, 0x07, 0x6a, 0x29, 0x00, 0x00, 0x37, 0x02, 0x07, 0x20, 0x04, 0x20, 0x05, 0x28, 0x02, 0x04, 0x41, 0x08, 0x74, 0x20, 0x03, 0x41, 0x18, 0x76, 0x72, 0x36, 0x02, 0x03, 0x20, 0x04, 0x41, 0x13, 0x6a, 0x21, 0x02, 0x20, 0x05, 0x41, 0x13, 0x6a, 0x21, 0x01, 0x41, 0x0f, 0x21, 0x06, 0x20, 0x05, 0x41, 0x0f, 0x6a, 0x28, 0x00, 0x00, 0x21, 0x05, 0x41, 0x0d, 0x21, 0x03, 0x0b, 0x20, 0x04, 0x20, 0x06, 0x6a, 0x20, 0x05, 0x36, 0x02, 0x00, 0x0b, 0x20, 0x02, 0x20, 0x01, 0x29, 0x00, 0x00, 0x37, 0x00, 0x00, 0x20, 0x02, 0x41, 0x08, 0x6a, 0x21, 0x02, 0x20, 0x01, 0x41, 0x08, 0x6a, 0x21, 0x01, 0x0b, 0x02, 0x40, 0x20, 0x03, 0x41, 0x04, 0x71, 0x45, 0x0d, 0x00, 0x20, 0x02, 0x20, 0x01, 0x28, 0x00, 0x00, 0x36, 0x00, 0x00, 0x20, 0x02, 0x41, 0x04, 0x6a, 0x21, 0x02, 0x20, 0x01, 0x41, 0x04, 0x6a, 0x21, 0x01, 0x0b, 0x02, 0x40, 0x20, 0x03, 0x41, 0x02, 0x71, 0x45, 0x0d, 0x00, 0x20, 0x02, 0x20, 0x01, 0x2f, 0x00, 0x00, 0x3b, 0x00, 0x00, 0x20, 0x02, 0x41, 0x02, 0x6a, 0x21, 0x02, 0x20, 0x01, 0x41, 0x02, 0x6a, 0x21, 0x01, 0x0b, 0x20, 0x03, 0x41, 0x01, 0x71, 0x45, 0x0d, 0x00, 0x20, 0x02, 0x20, 0x01, 0x2d, 0x00, 0x00, 0x3a, 0x00, 0x00, 0x0b, 0x20, 0x00, 0x0b, 0x88, 0x03, 0x02, 0x03, 0x7f, 0x01, 0x7e, 0x02, 0x40, 0x20, 0x02, 0x41, 0x21, 0x49, 0x0d, 0x00, 0x20, 0x00, 0x20, 0x01, 0x20, 0x02, 0xfc, 0x0b, 0x00, 0x20, 0x00, 0x0f, 0x0b, 0x02, 0x40, 0x20, 0x02, 0x45, 0x0d, 0x00, 0x20, 0x00, 0x20, 0x01, 0x3a, 0x00, 0x00, 0x20, 0x00, 0x20, 0x02, 0x6a, 0x22, 0x03, 0x41, 0x7f, 0x6a, 0x20, 0x01, 0x3a, 0x00, 0x00, 0x20, 0x02, 0x41, 0x03, 0x49, 0x0d, 0x00, 0x20, 0x00, 0x20, 0x01, 0x3a, 0x00, 0x02, 0x20, 0x00, 0x20, 0x01, 0x3a, 0x00, 0x01, 0x20, 0x03, 0x41, 0x7d, 0x6a, 0x20, 0x01, 0x3a, 0x00, 0x00, 0x20, 0x03, 0x41, 0x7e, 0x6a, 0x20, 0x01, 0x3a, 0x00, 0x00, 0x20, 0x02, 0x41, 0x07, 0x49, 0x0d, 0x00, 0x20, 0x00, 0x20, 0x01, 0x3a, 0x00, 0x03, 0x20, 0x03, 0x41, 0x7c, 0x6a, 0x20, 0x01, 0x3a, 0x00, 0x00, 0x20, 0x02, 0x41, 0x09, 0x49, 0x0d, 0x00, 0x20, 0x00, 0x41, 0x00, 0x20, 0x00, 0x6b, 0x41, 0x03, 0x71, 0x22, 0x04, 0x6a, 0x22, 0x05, 0x20, 0x01, 0x41, 0xff, 0x01, 0x71, 0x41, 0x81, 0x82, 0x84, 0x08, 0x6c, 0x22, 0x03, 0x36, 0x02, 0x00, 0x20, 0x05, 0x20, 0x02, 0x20, 0x04, 0x6b, 0x41, 0x3c, 0x71, 0x22, 0x01, 0x6a, 0x22, 0x02, 0x41, 0x7c, 0x6a, 0x20, 0x03, 0x36, 0x02, 0x00, 0x20, 0x01, 0x41, 0x09, 0x49, 0x0d, 0x00, 0x20, 0x05, 0x20, 0x03, 0x36, 0x02, 0x08, 0x20, 0x05, 0x20, 0x03, 0x36, 0x02, 0x04, 0x20, 0x02, 0x41, 0x78, 0x6a, 0x20, 0x03, 0x36, 0x02, 0x00, 0x20, 0x02, 0x41, 0x74, 0x6a, 0x20, 0x03, 0x36, 0x02, 0x00, 0x20, 0x01, 0x41, 0x19, 0x49, 0x0d, 0x00, 0x20, 0x05, 0x20, 0x03, 0x36, 0x02, 0x18, 0x20, 0x05, 0x20, 0x03, 0x36, 0x02, 0x14, 0x20, 0x05, 0x20, 0x03, 0x36, 0x02, 0x10, 0x20, 0x05, 0x20, 0x03, 0x36, 0x02, 0x0c, 0x20, 0x02, 0x41, 0x70, 0x6a, 0x20, 0x03, 0x36, 0x02, 0x00, 0x20, 0x02, 0x41, 0x6c, 0x6a, 0x20, 0x03, 0x36, 0x02, 0x00, 0x20, 0x02, 0x41, 0x68, 0x6a, 0x20, 0x03, 0x36, 0x02, 0x00, 0x20, 0x02, 0x41, 0x64, 0x6a, 0x20, 0x03, 0x36, 0x02, 0x00, 0x20, 0x01, 0x20, 0x05, 0x41, 0x04, 0x71, 0x41, 0x18, 0x72, 0x22, 0x02, 0x6b, 0x22, 0x01, 0x41, 0x20, 0x49, 0x0d, 0x00, 0x20, 0x03, 0xad, 0x42, 0x81, 0x80, 0x80, 0x80, 0x10, 0x7e, 0x21, 0x06, 0x20, 0x05, 0x20, 0x02, 0x6a, 0x21, 0x02, 0x03, 0x40, 0x20, 0x02, 0x20, 0x06, 0x37, 0x03, 0x18, 0x20, 0x02, 0x20, 0x06, 0x37, 0x03, 0x10, 0x20, 0x02, 0x20, 0x06, 0x37, 0x03, 0x08, 0x20, 0x02, 0x20, 0x06, 0x37, 0x03, 0x00, 0x20, 0x02, 0x41, 0x20, 0x6a, 0x21, 0x02, 0x20, 0x01, 0x41, 0x60, 0x6a, 0x22, 0x01, 0x41, 0x1f, 0x4b, 0x0d, 0x00, 0x0b, 0x0b, 0x20, 0x00, 0x0b, 0xcf, 0x01, 0x01, 0x03, 0x7f, 0x20, 0x00, 0x21, 0x01, 0x02, 0x40, 0x02, 0x40, 0x20, 0x00, 0x41, 0x03, 0x71, 0x45, 0x0d, 0x00, 0x02, 0x40, 0x20, 0x00, 0x2d, 0x00, 0x00, 0x0d, 0x00, 0x20, 0x00, 0x20, 0x00, 0x6b, 0x0f, 0x0b, 0x20, 0x00, 0x41, 0x01, 0x6a, 0x22, 0x01, 0x41, 0x03, 0x71, 0x45, 0x0d, 0x00, 0x20, 0x01, 0x2d, 0x00, 0x00, 0x45, 0x0d, 0x01, 0x20, 0x00, 0x41, 0x02, 0x6a, 0x22, 0x01, 0x41, 0x03, 0x71, 0x45, 0x0d, 0x00, 0x20, 0x01, 0x2d, 0x00, 0x00, 0x45, 0x0d, 0x01, 0x20, 0x00, 0x41, 0x03, 0x6a, 0x22, 0x01, 0x41, 0x03, 0x71, 0x45, 0x0d, 0x00, 0x20, 0x01, 0x2d, 0x00, 0x00, 0x45, 0x0d, 0x01, 0x20, 0x00, 0x41, 0x04, 0x6a, 0x22, 0x01, 0x41, 0x03, 0x71, 0x0d, 0x01, 0x0b, 0x20, 0x01, 0x41, 0x7c, 0x6a, 0x21, 0x02, 0x20, 0x01, 0x41, 0x7b, 0x6a, 0x21, 0x01, 0x03, 0x40, 0x20, 0x01, 0x41, 0x04, 0x6a, 0x21, 0x01, 0x41, 0x80, 0x82, 0x84, 0x08, 0x20, 0x02, 0x41, 0x04, 0x6a, 0x22, 0x02, 0x28, 0x02, 0x00, 0x22, 0x03, 0x6b, 0x20, 0x03, 0x72, 0x41, 0x80, 0x81, 0x82, 0x84, 0x78, 0x71, 0x41, 0x80, 0x81, 0x82, 0x84, 0x78, 0x46, 0x0d, 0x00, 0x0b, 0x03, 0x40, 0x20, 0x01, 0x41, 0x01, 0x6a, 0x21, 0x01, 0x20, 0x02, 0x2d, 0x00, 0x00, 0x21, 0x03, 0x20, 0x02, 0x41, 0x01, 0x6a, 0x21, 0x02, 0x20, 0x03, 0x0d, 0x00, 0x0b, 0x0b, 0x20, 0x01, 0x20, 0x00, 0x6b, 0x0b, 0x44, 0x00, 0x02, 0x40, 0x20, 0x00, 0x41, 0xff, 0xff, 0x07, 0x4b, 0x0d, 0x00, 0x20, 0x00, 0x41, 0x08, 0x76, 0x41, 0x80, 0x80, 0x84, 0x80, 0x00, 0x6a, 0x2d, 0x00, 0x00, 0x41, 0x05, 0x74, 0x20, 0x00, 0x41, 0x03, 0x76, 0x41, 0x1f, 0x71, 0x72, 0x41, 0x80, 0x80, 0x84, 0x80, 0x00, 0x6a, 0x2d, 0x00, 0x00, 0x20, 0x00, 0x41, 0x07, 0x71, 0x76, 0x41, 0x01, 0x71, 0x0f, 0x0b, 0x20, 0x00, 0x41, 0xfe, 0xff, 0x0b, 0x49, 0x0b, 0x49, 0x01, 0x03, 0x7f, 0x41, 0x00, 0x21, 0x03, 0x02, 0x40, 0x20, 0x02, 0x45, 0x0d, 0x00, 0x02, 0x40, 0x03, 0x40, 0x20, 0x00, 0x2d, 0x00, 0x00, 0x22, 0x04, 0x20, 0x01, 0x2d, 0x00, 0x00, 0x22, 0x05, 0x47, 0x0d, 0x01, 0x20, 0x01, 0x41, 0x01, 0x6a, 0x21, 0x01, 0x20, 0x00, 0x41, 0x01, 0x6a, 0x21, 0x00, 0x20, 0x02, 0x41, 0x7f, 0x6a, 0x22, 0x02, 0x0d, 0x00, 0x0c, 0x02, 0x0b, 0x0b, 0x20, 0x04, 0x20, 0x05, 0x6b, 0x21, 0x03, 0x0b, 0x20, 0x03, 0x0b, 0xf6, 0x02, 0x01, 0x03, 0x7f, 0x20, 0x02, 0x41, 0x00, 0x47, 0x21, 0x03, 0x02, 0x40, 0x02, 0x40, 0x02, 0x40, 0x02, 0x40, 0x20, 0x00, 0x41, 0x03, 0x71, 0x45, 0x0d, 0x00, 0x20, 0x02, 0x45, 0x0d, 0x00, 0x02, 0x40, 0x20, 0x00, 0x2d, 0x00, 0x00, 0x20, 0x01, 0x41, 0xff, 0x01, 0x71, 0x47, 0x0d, 0x00, 0x20, 0x00, 0x21, 0x04, 0x20, 0x02, 0x21, 0x05, 0x0c, 0x03, 0x0b, 0x20, 0x02, 0x41, 0x7f, 0x6a, 0x22, 0x05, 0x41, 0x00, 0x47, 0x21, 0x03, 0x20, 0x00, 0x41, 0x01, 0x6a, 0x22, 0x04, 0x41, 0x03, 0x71, 0x45, 0x0d, 0x01, 0x20, 0x05, 0x45, 0x0d, 0x01, 0x20, 0x04, 0x2d, 0x00, 0x00, 0x20, 0x01, 0x41, 0xff, 0x01, 0x71, 0x46, 0x0d, 0x02, 0x20, 0x02, 0x41, 0x7e, 0x6a, 0x22, 0x05, 0x41, 0x00, 0x47, 0x21, 0x03, 0x20, 0x00, 0x41, 0x02, 0x6a, 0x22, 0x04, 0x41, 0x03, 0x71, 0x45, 0x0d, 0x01, 0x20, 0x05, 0x45, 0x0d, 0x01, 0x20, 0x04, 0x2d, 0x00, 0x00, 0x20, 0x01, 0x41, 0xff, 0x01, 0x71, 0x46, 0x0d, 0x02, 0x20, 0x02, 0x41, 0x7d, 0x6a, 0x22, 0x05, 0x41, 0x00, 0x47, 0x21, 0x03, 0x20, 0x00, 0x41, 0x03, 0x6a, 0x22, 0x04, 0x41, 0x03, 0x71, 0x45, 0x0d, 0x01, 0x20, 0x05, 0x45, 0x0d, 0x01, 0x20, 0x04, 0x2d, 0x00, 0x00, 0x20, 0x01, 0x41, 0xff, 0x01, 0x71, 0x46, 0x0d, 0x02, 0x20, 0x00, 0x41, 0x04, 0x6a, 0x21, 0x04, 0x20, 0x02, 0x41, 0x7c, 0x6a, 0x22, 0x05, 0x41, 0x00, 0x47, 0x21, 0x03, 0x0c, 0x01, 0x0b, 0x20, 0x02, 0x21, 0x05, 0x20, 0x00, 0x21, 0x04, 0x0b, 0x20, 0x03, 0x45, 0x0d, 0x01, 0x02, 0x40, 0x20, 0x04, 0x2d, 0x00, 0x00, 0x20, 0x01, 0x41, 0xff, 0x01, 0x71, 0x46, 0x0d, 0x00, 0x20, 0x05, 0x41, 0x04, 0x49, 0x0d, 0x00, 0x20, 0x01, 0x41, 0xff, 0x01, 0x71, 0x41, 0x81, 0x82, 0x84, 0x08, 0x6c, 0x21, 0x00, 0x03, 0x40, 0x41, 0x80, 0x82, 0x84, 0x08, 0x20, 0x04, 0x28, 0x02, 0x00, 0x20, 0x00, 0x73, 0x22, 0x02, 0x6b, 0x20, 0x02, 0x72, 0x41, 0x80, 0x81, 0x82, 0x84, 0x78, 0x71, 0x41, 0x80, 0x81, 0x82, 0x84, 0x78, 0x47, 0x0d, 0x02, 0x20, 0x04, 0x41, 0x04, 0x6a, 0x21, 0x04, 0x20, 0x05, 0x41, 0x7c, 0x6a, 0x22, 0x05, 0x41, 0x03, 0x4b, 0x0d, 0x00, 0x0b, 0x0b, 0x20, 0x05, 0x45, 0x0d, 0x01, 0x0b, 0x20, 0x01, 0x41, 0xff, 0x01, 0x71, 0x21, 0x02, 0x03, 0x40, 0x02, 0x40, 0x20, 0x04, 0x2d, 0x00, 0x00, 0x20, 0x02, 0x47, 0x0d, 0x00, 0x20, 0x04, 0x0f, 0x0b, 0x20, 0x04, 0x41, 0x01, 0x6a, 0x21, 0x04, 0x20, 0x05, 0x41, 0x7f, 0x6a, 0x22, 0x05, 0x0d, 0x00, 0x0b, 0x0b, 0x41, 0x00, 0x0b, 0x67, 0x01, 0x02, 0x7f, 0x20, 0x01, 0x2d, 0x00, 0x00, 0x21, 0x02, 0x02, 0x40, 0x20, 0x00, 0x2d, 0x00, 0x00, 0x22, 0x03, 0x45, 0x0d, 0x00, 0x20, 0x03, 0x20, 0x02, 0x41, 0xff, 0x01, 0x71, 0x47, 0x0d, 0x00, 0x20, 0x00, 0x41, 0x01, 0x6a, 0x21, 0x00, 0x20, 0x01, 0x41, 0x01, 0x6a, 0x21, 0x01, 0x03, 0x40, 0x20, 0x01, 0x2d, 0x00, 0x00, 0x21, 0x02, 0x20, 0x00, 0x2d, 0x00, 0x00, 0x22, 0x03, 0x45, 0x0d, 0x01, 0x20, 0x00, 0x41, 0x01, 0x6a, 0x21, 0x00, 0x20, 0x01, 0x41, 0x01, 0x6a, 0x21, 0x01, 0x20, 0x03, 0x20, 0x02, 0x41, 0xff, 0x01, 0x71, 0x46, 0x0d, 0x00, 0x0b, 0x0b, 0x20, 0x03, 0x20, 0x02, 0x41, 0xff, 0x01, 0x71, 0x6b, 0x0b, 0x0c, 0x00, 0x20, 0x00, 0x41, 0x00, 0x10, 0x9b, 0x80, 0x80, 0x80, 0x00, 0x0b, 0xb4, 0x02, 0x01, 0x07, 0x7f, 0x02, 0x40, 0x20, 0x00, 0x41, 0xff, 0xff, 0x07, 0x4b, 0x0d, 0x00, 0x20, 0x00, 0x20, 0x00, 0x41, 0xff, 0x01, 0x71, 0x22, 0x02, 0x41, 0x03, 0x6e, 0x22, 0x03, 0x41, 0x03, 0x6c, 0x6b, 0x41, 0xff, 0x01, 0x71, 0x41, 0x02, 0x74, 0x41, 0xc0, 0x9e, 0x84, 0x80, 0x00, 0x6a, 0x28, 0x02, 0x00, 0x20, 0x00, 0x41, 0x08, 0x76, 0x22, 0x04, 0x41, 0xa0, 0xa9, 0x84, 0x80, 0x00, 0x6a, 0x2d, 0x00, 0x00, 0x41, 0xd6, 0x00, 0x6c, 0x20, 0x03, 0x6a, 0x41, 0xa0, 0xa9, 0x84, 0x80, 0x00, 0x6a, 0x2d, 0x00, 0x00, 0x6c, 0x41, 0x0b, 0x76, 0x41, 0x06, 0x70, 0x20, 0x04, 0x41, 0x90, 0xbe, 0x84, 0x80, 0x00, 0x6a, 0x2d, 0x00, 0x00, 0x6a, 0x41, 0x02, 0x74, 0x41, 0xd0, 0x9e, 0x84, 0x80, 0x00, 0x6a, 0x28, 0x02, 0x00, 0x22, 0x03, 0x41, 0x08, 0x75, 0x21, 0x04, 0x02, 0x40, 0x20, 0x03, 0x41, 0xff, 0x01, 0x71, 0x22, 0x03, 0x41, 0x01, 0x4b, 0x0d, 0x00, 0x20, 0x04, 0x41, 0x00, 0x20, 0x03, 0x20, 0x01, 0x73, 0x6b, 0x71, 0x20, 0x00, 0x6a, 0x0f, 0x0b, 0x20, 0x04, 0x41, 0xff, 0x01, 0x71, 0x22, 0x03, 0x45, 0x0d, 0x00, 0x20, 0x04, 0x41, 0x08, 0x76, 0x21, 0x04, 0x03, 0x40, 0x02, 0x40, 0x20, 0x02, 0x20, 0x03, 0x41, 0x01, 0x76, 0x22, 0x05, 0x20, 0x04, 0x6a, 0x22, 0x06, 0x41, 0x01, 0x74, 0x41, 0x90, 0xa6, 0x84, 0x80, 0x00, 0x6a, 0x22, 0x07, 0x2d, 0x00, 0x00, 0x22, 0x08, 0x47, 0x0d, 0x00, 0x02, 0x40, 0x20, 0x07, 0x2d, 0x00, 0x01, 0x41, 0x02, 0x74, 0x41, 0xd0, 0x9e, 0x84, 0x80, 0x00, 0x6a, 0x28, 0x02, 0x00, 0x22, 0x03, 0x41, 0xff, 0x01, 0x71, 0x22, 0x04, 0x41, 0x01, 0x4b, 0x0d, 0x00, 0x20, 0x03, 0x41, 0x08, 0x75, 0x41, 0x00, 0x20, 0x04, 0x20, 0x01, 0x73, 0x6b, 0x71, 0x20, 0x00, 0x6a, 0x0f, 0x0b, 0x41, 0x7f, 0x41, 0x01, 0x20, 0x01, 0x1b, 0x20, 0x00, 0x6a, 0x0f, 0x0b, 0x20, 0x04, 0x20, 0x06, 0x20, 0x02, 0x20, 0x08, 0x49, 0x22, 0x08, 0x1b, 0x21, 0x04, 0x20, 0x05, 0x20, 0x03, 0x20, 0x05, 0x6b, 0x20, 0x08, 0x1b, 0x22, 0x03, 0x0d, 0x00, 0x0b, 0x0b, 0x20, 0x00, 0x0b, 0x0c, 0x00, 0x20, 0x00, 0x41, 0x01, 0x10, 0x9b, 0x80, 0x80, 0x80, 0x00, 0x0b, 0x87, 0x01, 0x01, 0x02, 0x7f, 0x02, 0x40, 0x20, 0x02, 0x0d, 0x00, 0x41, 0x00, 0x0f, 0x0b, 0x02, 0x40, 0x02, 0x40, 0x20, 0x00, 0x2d, 0x00, 0x00, 0x22, 0x03, 0x0d, 0x00, 0x41, 0x00, 0x21, 0x03, 0x0c, 0x01, 0x0b, 0x20, 0x00, 0x41, 0x01, 0x6a, 0x21, 0x00, 0x20, 0x02, 0x41, 0x7f, 0x6a, 0x21, 0x02, 0x02, 0x40, 0x03, 0x40, 0x20, 0x03, 0x41, 0xff, 0x01, 0x71, 0x20, 0x01, 0x2d, 0x00, 0x00, 0x22, 0x04, 0x47, 0x0d, 0x01, 0x20, 0x04, 0x45, 0x0d, 0x01, 0x20, 0x02, 0x41, 0x00, 0x46, 0x0d, 0x01, 0x20, 0x02, 0x41, 0x7f, 0x6a, 0x21, 0x02, 0x20, 0x01, 0x41, 0x01, 0x6a, 0x21, 0x01, 0x20, 0x00, 0x2d, 0x00, 0x00, 0x21, 0x03, 0x20, 0x00, 0x41, 0x01, 0x6a, 0x21, 0x00, 0x20, 0x03, 0x0d, 0x00, 0x0b, 0x41, 0x00, 0x21, 0x03, 0x0b, 0x20, 0x03, 0x41, 0xff, 0x01, 0x71, 0x21, 0x03, 0x0b, 0x20, 0x03, 0x20, 0x01, 0x2d, 0x00, 0x00, 0x6b, 0x0b, 0x0d, 0x00, 0x20, 0x00, 0x10, 0x9a, 0x80, 0x80, 0x80, 0x00, 0x20, 0x00, 0x47, 0x0b, 0xbf, 0x09, 0x01, 0x04, 0x7f, 0x02, 0x40, 0x02, 0x40, 0x02, 0x40, 0x20, 0x02, 0x41, 0x21, 0x4f, 0x0d, 0x00, 0x20, 0x00, 0x20, 0x01, 0x46, 0x0d, 0x02, 0x20, 0x01, 0x20, 0x00, 0x20, 0x02, 0x6a, 0x22, 0x03, 0x6b, 0x41, 0x00, 0x20, 0x02, 0x41, 0x01, 0x74, 0x6b, 0x4b, 0x0d, 0x01, 0x0b, 0x20, 0x00, 0x20, 0x01, 0x20, 0x02, 0xfc, 0x0a, 0x00, 0x00, 0x0c, 0x01, 0x0b, 0x20, 0x01, 0x20, 0x00, 0x73, 0x41, 0x03, 0x71, 0x21, 0x04, 0x02, 0x40, 0x02, 0x40, 0x02, 0x40, 0x20, 0x00, 0x20, 0x01, 0x4f, 0x0d, 0x00, 0x02, 0x40, 0x20, 0x04, 0x45, 0x0d, 0x00, 0x20, 0x02, 0x21, 0x05, 0x20, 0x00, 0x21, 0x03, 0x0c, 0x03, 0x0b, 0x02, 0x40, 0x20, 0x00, 0x41, 0x03, 0x71, 0x0d, 0x00, 0x20, 0x02, 0x21, 0x05, 0x20, 0x00, 0x21, 0x03, 0x0c, 0x02, 0x0b, 0x20, 0x02, 0x45, 0x0d, 0x03, 0x20, 0x00, 0x20, 0x01, 0x2d, 0x00, 0x00, 0x3a, 0x00, 0x00, 0x20, 0x02, 0x41, 0x7f, 0x6a, 0x21, 0x05, 0x02, 0x40, 0x20, 0x00, 0x41, 0x01, 0x6a, 0x22, 0x03, 0x41, 0x03, 0x71, 0x0d, 0x00, 0x20, 0x01, 0x41, 0x01, 0x6a, 0x21, 0x01, 0x0c, 0x02, 0x0b, 0x20, 0x05, 0x45, 0x0d, 0x03, 0x20, 0x00, 0x20, 0x01, 0x2d, 0x00, 0x01, 0x3a, 0x00, 0x01, 0x20, 0x02, 0x41, 0x7e, 0x6a, 0x21, 0x05, 0x02, 0x40, 0x20, 0x00, 0x41, 0x02, 0x6a, 0x22, 0x03, 0x41, 0x03, 0x71, 0x0d, 0x00, 0x20, 0x01, 0x41, 0x02, 0x6a, 0x21, 0x01, 0x0c, 0x02, 0x0b, 0x20, 0x05, 0x45, 0x0d, 0x03, 0x20, 0x00, 0x20, 0x01, 0x2d, 0x00, 0x02, 0x3a, 0x00, 0x02, 0x20, 0x02, 0x41, 0x7d, 0x6a, 0x21, 0x05, 0x02, 0x40, 0x20, 0x00, 0x41, 0x03, 0x6a, 0x22, 0x03, 0x41, 0x03, 0x71, 0x0d, 0x00, 0x20, 0x01, 0x41, 0x03, 0x6a, 0x21, 0x01, 0x0c, 0x02, 0x0b, 0x20, 0x05, 0x45, 0x0d, 0x03, 0x20, 0x00, 0x20, 0x01, 0x2d, 0x00, 0x03, 0x3a, 0x00, 0x03, 0x20, 0x00, 0x41, 0x04, 0x6a, 0x21, 0x03, 0x20, 0x01, 0x41, 0x04, 0x6a, 0x21, 0x01, 0x20, 0x02, 0x41, 0x7c, 0x6a, 0x21, 0x05, 0x0c, 0x01, 0x0b, 0x02, 0x40, 0x20, 0x04, 0x0d, 0x00, 0x02, 0x40, 0x20, 0x03, 0x41, 0x03, 0x71, 0x45, 0x0d, 0x00, 0x20, 0x02, 0x45, 0x0d, 0x04, 0x20, 0x00, 0x20, 0x02, 0x41, 0x7f, 0x6a, 0x22, 0x03, 0x6a, 0x22, 0x04, 0x20, 0x01, 0x20, 0x03, 0x6a, 0x2d, 0x00, 0x00, 0x3a, 0x00, 0x00, 0x02, 0x40, 0x20, 0x04, 0x41, 0x03, 0x71, 0x0d, 0x00, 0x20, 0x03, 0x21, 0x02, 0x0c, 0x01, 0x0b, 0x20, 0x03, 0x45, 0x0d, 0x04, 0x20, 0x00, 0x20, 0x02, 0x41, 0x7e, 0x6a, 0x22, 0x03, 0x6a, 0x22, 0x04, 0x20, 0x01, 0x20, 0x03, 0x6a, 0x2d, 0x00, 0x00, 0x3a, 0x00, 0x00, 0x02, 0x40, 0x20, 0x04, 0x41, 0x03, 0x71, 0x0d, 0x00, 0x20, 0x03, 0x21, 0x02, 0x0c, 0x01, 0x0b, 0x20, 0x03, 0x45, 0x0d, 0x04, 0x20, 0x00, 0x20, 0x02, 0x41, 0x7d, 0x6a, 0x22, 0x03, 0x6a, 0x22, 0x04, 0x20, 0x01, 0x20, 0x03, 0x6a, 0x2d, 0x00, 0x00, 0x3a, 0x00, 0x00, 0x02, 0x40, 0x20, 0x04, 0x41, 0x03, 0x71, 0x0d, 0x00, 0x20, 0x03, 0x21, 0x02, 0x0c, 0x01, 0x0b, 0x20, 0x03, 0x45, 0x0d, 0x04, 0x20, 0x00, 0x20, 0x02, 0x41, 0x7c, 0x6a, 0x22, 0x02, 0x6a, 0x20, 0x01, 0x20, 0x02, 0x6a, 0x2d, 0x00, 0x00, 0x3a, 0x00, 0x00, 0x0b, 0x20, 0x02, 0x41, 0x04, 0x49, 0x0d, 0x00, 0x02, 0x40, 0x20, 0x02, 0x41, 0x7c, 0x6a, 0x22, 0x06, 0x41, 0x02, 0x76, 0x41, 0x01, 0x6a, 0x41, 0x03, 0x71, 0x22, 0x03, 0x45, 0x0d, 0x00, 0x20, 0x01, 0x41, 0x7c, 0x6a, 0x21, 0x04, 0x20, 0x00, 0x41, 0x7c, 0x6a, 0x21, 0x05, 0x03, 0x40, 0x20, 0x05, 0x20, 0x02, 0x6a, 0x20, 0x04, 0x20, 0x02, 0x6a, 0x28, 0x02, 0x00, 0x36, 0x02, 0x00, 0x20, 0x02, 0x41, 0x7c, 0x6a, 0x21, 0x02, 0x20, 0x03, 0x41, 0x7f, 0x6a, 0x22, 0x03, 0x0d, 0x00, 0x0b, 0x0b, 0x20, 0x06, 0x41, 0x0c, 0x49, 0x0d, 0x00, 0x20, 0x01, 0x41, 0x70, 0x6a, 0x21, 0x05, 0x20, 0x00, 0x41, 0x70, 0x6a, 0x21, 0x06, 0x03, 0x40, 0x20, 0x06, 0x20, 0x02, 0x6a, 0x22, 0x03, 0x41, 0x0c, 0x6a, 0x20, 0x05, 0x20, 0x02, 0x6a, 0x22, 0x04, 0x41, 0x0c, 0x6a, 0x28, 0x02, 0x00, 0x36, 0x02, 0x00, 0x20, 0x03, 0x41, 0x08, 0x6a, 0x20, 0x04, 0x41, 0x08, 0x6a, 0x28, 0x02, 0x00, 0x36, 0x02, 0x00, 0x20, 0x03, 0x41, 0x04, 0x6a, 0x20, 0x04, 0x41, 0x04, 0x6a, 0x28, 0x02, 0x00, 0x36, 0x02, 0x00, 0x20, 0x03, 0x20, 0x04, 0x28, 0x02, 0x00, 0x36, 0x02, 0x00, 0x20, 0x02, 0x41, 0x70, 0x6a, 0x22, 0x02, 0x41, 0x03, 0x4b, 0x0d, 0x00, 0x0b, 0x0b, 0x20, 0x02, 0x45, 0x0d, 0x02, 0x20, 0x02, 0x21, 0x03, 0x02, 0x40, 0x20, 0x02, 0x41, 0x03, 0x71, 0x22, 0x04, 0x45, 0x0d, 0x00, 0x20, 0x01, 0x41, 0x7f, 0x6a, 0x21, 0x05, 0x20, 0x00, 0x41, 0x7f, 0x6a, 0x21, 0x06, 0x20, 0x02, 0x21, 0x03, 0x03, 0x40, 0x20, 0x06, 0x20, 0x03, 0x6a, 0x20, 0x05, 0x20, 0x03, 0x6a, 0x2d, 0x00, 0x00, 0x3a, 0x00, 0x00, 0x20, 0x03, 0x41, 0x7f, 0x6a, 0x21, 0x03, 0x20, 0x04, 0x41, 0x7f, 0x6a, 0x22, 0x04, 0x0d, 0x00, 0x0b, 0x0b, 0x20, 0x02, 0x41, 0x04, 0x49, 0x0d, 0x02, 0x20, 0x01, 0x41, 0x7c, 0x6a, 0x21, 0x04, 0x20, 0x00, 0x41, 0x7c, 0x6a, 0x21, 0x05, 0x03, 0x40, 0x20, 0x05, 0x20, 0x03, 0x6a, 0x22, 0x01, 0x41, 0x03, 0x6a, 0x20, 0x04, 0x20, 0x03, 0x6a, 0x22, 0x02, 0x41, 0x03, 0x6a, 0x2d, 0x00, 0x00, 0x3a, 0x00, 0x00, 0x20, 0x01, 0x41, 0x02, 0x6a, 0x20, 0x02, 0x41, 0x02, 0x6a, 0x2d, 0x00, 0x00, 0x3a, 0x00, 0x00, 0x20, 0x01, 0x41, 0x01, 0x6a, 0x20, 0x02, 0x41, 0x01, 0x6a, 0x2d, 0x00, 0x00, 0x3a, 0x00, 0x00, 0x20, 0x01, 0x20, 0x02, 0x2d, 0x00, 0x00, 0x3a, 0x00, 0x00, 0x20, 0x03, 0x41, 0x7c, 0x6a, 0x22, 0x03, 0x0d, 0x00, 0x0c, 0x03, 0x0b, 0x0b, 0x20, 0x05, 0x41, 0x04, 0x49, 0x0d, 0x00, 0x02, 0x40, 0x20, 0x05, 0x41, 0x7c, 0x6a, 0x22, 0x04, 0x41, 0x02, 0x76, 0x41, 0x01, 0x6a, 0x41, 0x07, 0x71, 0x22, 0x02, 0x45, 0x0d, 0x00, 0x20, 0x05, 0x20, 0x02, 0x41, 0x02, 0x74, 0x6b, 0x21, 0x05, 0x03, 0x40, 0x20, 0x03, 0x20, 0x01, 0x28, 0x02, 0x00, 0x36, 0x02, 0x00, 0x20, 0x01, 0x41, 0x04, 0x6a, 0x21, 0x01, 0x20, 0x03, 0x41, 0x04, 0x6a, 0x21, 0x03, 0x20, 0x02, 0x41, 0x7f, 0x6a, 0x22, 0x02, 0x0d, 0x00, 0x0b, 0x0b, 0x20, 0x04, 0x41, 0x1c, 0x49, 0x0d, 0x00, 0x03, 0x40, 0x20, 0x03, 0x20, 0x01, 0x28, 0x02, 0x00, 0x36, 0x02, 0x00, 0x20, 0x03, 0x20, 0x01, 0x28, 0x02, 0x04, 0x36, 0x02, 0x04, 0x20, 0x03, 0x20, 0x01, 0x28, 0x02, 0x08, 0x36, 0x02, 0x08, 0x20, 0x03, 0x20, 0x01, 0x28, 0x02, 0x0c, 0x36, 0x02, 0x0c, 0x20, 0x03, 0x20, 0x01, 0x28, 0x02, 0x10, 0x36, 0x02, 0x10, 0x20, 0x03, 0x20, 0x01, 0x28, 0x02, 0x14, 0x36, 0x02, 0x14, 0x20, 0x03, 0x20, 0x01, 0x28, 0x02, 0x18, 0x36, 0x02, 0x18, 0x20, 0x03, 0x20, 0x01, 0x28, 0x02, 0x1c, 0x36, 0x02, 0x1c, 0x20, 0x01, 0x41, 0x20, 0x6a, 0x21, 0x01, 0x20, 0x03, 0x41, 0x20, 0x6a, 0x21, 0x03, 0x20, 0x05, 0x41, 0x60, 0x6a, 0x22, 0x05, 0x41, 0x03, 0x4b, 0x0d, 0x00, 0x0b, 0x0b, 0x20, 0x05, 0x45, 0x0d, 0x00, 0x02, 0x40, 0x02, 0x40, 0x20, 0x05, 0x41, 0x07, 0x71, 0x22, 0x02, 0x0d, 0x00, 0x20, 0x05, 0x21, 0x04, 0x0c, 0x01, 0x0b, 0x20, 0x05, 0x41, 0x78, 0x71, 0x21, 0x04, 0x03, 0x40, 0x20, 0x03, 0x20, 0x01, 0x2d, 0x00, 0x00, 0x3a, 0x00, 0x00, 0x20, 0x03, 0x41, 0x01, 0x6a, 0x21, 0x03, 0x20, 0x01, 0x41, 0x01, 0x6a, 0x21, 0x01, 0x20, 0x02, 0x41, 0x7f, 0x6a, 0x22, 0x02, 0x0d, 0x00, 0x0b, 0x0b, 0x20, 0x05, 0x41, 0x08, 0x49, 0x0d, 0x00, 0x03, 0x40, 0x20, 0x03, 0x20, 0x01, 0x2d, 0x00, 0x00, 0x3a, 0x00, 0x00, 0x20, 0x03, 0x20, 0x01, 0x2d, 0x00, 0x01, 0x3a, 0x00, 0x01, 0x20, 0x03, 0x20, 0x01, 0x2d, 0x00, 0x02, 0x3a, 0x00, 0x02, 0x20, 0x03, 0x20, 0x01, 0x2d, 0x00, 0x03, 0x3a, 0x00, 0x03, 0x20, 0x03, 0x20, 0x01, 0x2d, 0x00, 0x04, 0x3a, 0x00, 0x04, 0x20, 0x03, 0x20, 0x01, 0x2d, 0x00, 0x05, 0x3a, 0x00, 0x05, 0x20, 0x03, 0x20, 0x01, 0x2d, 0x00, 0x06, 0x3a, 0x00, 0x06, 0x20, 0x03, 0x20, 0x01, 0x2d, 0x00, 0x07, 0x3a, 0x00, 0x07, 0x20, 0x03, 0x41, 0x08, 0x6a, 0x21, 0x03, 0x20, 0x01, 0x41, 0x08, 0x6a, 0x21, 0x01, 0x20, 0x04, 0x41, 0x78, 0x6a, 0x22, 0x04, 0x0d, 0x00, 0x0b, 0x0b, 0x20, 0x00, 0x0b, 0x0d, 0x00, 0x20, 0x00, 0x10, 0x9c, 0x80, 0x80, 0x80, 0x00, 0x20, 0x00, 0x47, 0x0b, 0x0d, 0x00, 0x20, 0x00, 0x41, 0x20, 0x46, 0x20, 0x00, 0x41, 0x09, 0x46, 0x72, 0x0b, 0x0a, 0x00, 0x20, 0x00, 0x10, 0xa1, 0x80, 0x80, 0x80, 0x00, 0x0b, 0x0a, 0x00, 0x20, 0x00, 0x41, 0x50, 0x6a, 0x41, 0x0a, 0x49, 0x0b, 0x4d, 0x01, 0x02, 0x7f, 0x20, 0x00, 0x20, 0x00, 0x10, 0x95, 0x80, 0x80, 0x80, 0x00, 0x6a, 0x21, 0x03, 0x02, 0x40, 0x20, 0x02, 0x45, 0x0d, 0x00, 0x03, 0x40, 0x20, 0x01, 0x2d, 0x00, 0x00, 0x22, 0x04, 0x45, 0x0d, 0x01, 0x20, 0x03, 0x20, 0x04, 0x3a, 0x00, 0x00, 0x20, 0x03, 0x41, 0x01, 0x6a, 0x21, 0x03, 0x20, 0x01, 0x41, 0x01, 0x6a, 0x21, 0x01, 0x20, 0x02, 0x41, 0x7f, 0x6a, 0x22, 0x02, 0x0d, 0x00, 0x0b, 0x0b, 0x20, 0x03, 0x41, 0x00, 0x3a, 0x00, 0x00, 0x20, 0x00, 0x0b, 0xf3, 0x03, 0x01, 0x04, 0x7f, 0x02, 0x40, 0x02, 0x40, 0x02, 0x40, 0x02, 0x40, 0x02, 0x40, 0x20, 0x01, 0x20, 0x00, 0x73, 0x41, 0x03, 0x71, 0x45, 0x0d, 0x00, 0x20, 0x00, 0x21, 0x03, 0x0c, 0x01, 0x0b, 0x20, 0x02, 0x41, 0x00, 0x47, 0x21, 0x04, 0x02, 0x40, 0x02, 0x40, 0x20, 0x01, 0x41, 0x03, 0x71, 0x0d, 0x00, 0x20, 0x00, 0x21, 0x03, 0x0c, 0x01, 0x0b, 0x02, 0x40, 0x20, 0x02, 0x0d, 0x00, 0x20, 0x00, 0x21, 0x03, 0x0c, 0x01, 0x0b, 0x20, 0x00, 0x20, 0x01, 0x2d, 0x00, 0x00, 0x22, 0x03, 0x3a, 0x00, 0x00, 0x02, 0x40, 0x20, 0x03, 0x0d, 0x00, 0x20, 0x00, 0x21, 0x03, 0x20, 0x02, 0x21, 0x05, 0x0c, 0x05, 0x0b, 0x20, 0x00, 0x41, 0x01, 0x6a, 0x21, 0x03, 0x20, 0x02, 0x41, 0x7f, 0x6a, 0x22, 0x05, 0x41, 0x00, 0x47, 0x21, 0x04, 0x02, 0x40, 0x20, 0x01, 0x41, 0x01, 0x6a, 0x22, 0x06, 0x41, 0x03, 0x71, 0x45, 0x0d, 0x00, 0x20, 0x05, 0x45, 0x0d, 0x00, 0x20, 0x03, 0x20, 0x06, 0x2d, 0x00, 0x00, 0x22, 0x04, 0x3a, 0x00, 0x00, 0x20, 0x04, 0x45, 0x0d, 0x05, 0x20, 0x00, 0x41, 0x02, 0x6a, 0x21, 0x03, 0x20, 0x02, 0x41, 0x7e, 0x6a, 0x22, 0x05, 0x41, 0x00, 0x47, 0x21, 0x04, 0x02, 0x40, 0x20, 0x01, 0x41, 0x02, 0x6a, 0x22, 0x06, 0x41, 0x03, 0x71, 0x45, 0x0d, 0x00, 0x20, 0x05, 0x45, 0x0d, 0x00, 0x20, 0x03, 0x20, 0x06, 0x2d, 0x00, 0x00, 0x22, 0x04, 0x3a, 0x00, 0x00, 0x20, 0x04, 0x45, 0x0d, 0x06, 0x20, 0x00, 0x41, 0x03, 0x6a, 0x21, 0x03, 0x20, 0x02, 0x41, 0x7d, 0x6a, 0x22, 0x05, 0x41, 0x00, 0x47, 0x21, 0x04, 0x02, 0x40, 0x20, 0x01, 0x41, 0x03, 0x6a, 0x22, 0x06, 0x41, 0x03, 0x71, 0x45, 0x0d, 0x00, 0x20, 0x05, 0x45, 0x0d, 0x00, 0x20, 0x03, 0x20, 0x06, 0x2d, 0x00, 0x00, 0x22, 0x04, 0x3a, 0x00, 0x00, 0x20, 0x04, 0x45, 0x0d, 0x07, 0x20, 0x00, 0x41, 0x04, 0x6a, 0x21, 0x03, 0x20, 0x01, 0x41, 0x04, 0x6a, 0x21, 0x01, 0x20, 0x02, 0x41, 0x7c, 0x6a, 0x22, 0x02, 0x41, 0x00, 0x47, 0x21, 0x04, 0x0c, 0x03, 0x0b, 0x20, 0x06, 0x21, 0x01, 0x20, 0x05, 0x21, 0x02, 0x0c, 0x02, 0x0b, 0x20, 0x06, 0x21, 0x01, 0x20, 0x05, 0x21, 0x02, 0x0c, 0x01, 0x0b, 0x20, 0x06, 0x21, 0x01, 0x20, 0x05, 0x21, 0x02, 0x0b, 0x20, 0x04, 0x45, 0x0d, 0x02, 0x02, 0x40, 0x20, 0x01, 0x2d, 0x00, 0x00, 0x0d, 0x00, 0x20, 0x02, 0x21, 0x05, 0x0c, 0x04, 0x0b, 0x20, 0x02, 0x41, 0x04, 0x49, 0x0d, 0x00, 0x03, 0x40, 0x41, 0x80, 0x82, 0x84, 0x08, 0x20, 0x01, 0x28, 0x02, 0x00, 0x22, 0x00, 0x6b, 0x20, 0x00, 0x72, 0x41, 0x80, 0x81, 0x82, 0x84, 0x78, 0x71, 0x41, 0x80, 0x81, 0x82, 0x84, 0x78, 0x47, 0x0d, 0x02, 0x20, 0x03, 0x20, 0x00, 0x36, 0x02, 0x00, 0x20, 0x03, 0x41, 0x04, 0x6a, 0x21, 0x03, 0x20, 0x01, 0x41, 0x04, 0x6a, 0x21, 0x01, 0x20, 0x02, 0x41, 0x7c, 0x6a, 0x22, 0x02, 0x41, 0x03, 0x4b, 0x0d, 0x00, 0x0b, 0x0b, 0x20, 0x02, 0x45, 0x0d, 0x01, 0x0b, 0x03, 0x40, 0x20, 0x03, 0x20, 0x01, 0x2d, 0x00, 0x00, 0x22, 0x00, 0x3a, 0x00, 0x00, 0x02, 0x40, 0x20, 0x00, 0x0d, 0x00, 0x20, 0x02, 0x21, 0x05, 0x0c, 0x03, 0x0b, 0x20, 0x03, 0x41, 0x01, 0x6a, 0x21, 0x03, 0x20, 0x01, 0x41, 0x01, 0x6a, 0x21, 0x01, 0x20, 0x02, 0x41, 0x7f, 0x6a, 0x22, 0x02, 0x0d, 0x00, 0x0b, 0x0b, 0x41, 0x00, 0x21, 0x05, 0x0b, 0x20, 0x03, 0x41, 0x00, 0x20, 0x05, 0x10, 0x94, 0x80, 0x80, 0x80, 0x00, 0x0b, 0x11, 0x00, 0x20, 0x00, 0x20, 0x01, 0x20, 0x02, 0x10, 0xa5, 0x80, 0x80, 0x80, 0x00, 0x1a, 0x20, 0x00, 0x0b, 0x17, 0x00, 0x20, 0x00, 0x41, 0x50, 0x6a, 0x41, 0x0a, 0x49, 0x20, 0x00, 0x41, 0x20, 0x72, 0x41, 0x9f, 0x7f, 0x6a, 0x41, 0x06, 0x49, 0x72, 0x0b, 0x2a, 0x01, 0x03, 0x7f, 0x41, 0x00, 0x21, 0x01, 0x03, 0x40, 0x20, 0x00, 0x20, 0x01, 0x6a, 0x21, 0x02, 0x20, 0x01, 0x41, 0x04, 0x6a, 0x22, 0x03, 0x21, 0x01, 0x20, 0x02, 0x28, 0x02, 0x00, 0x0d, 0x00, 0x0b, 0x20, 0x03, 0x41, 0x7c, 0x6a, 0x41, 0x02, 0x75, 0x0b, 0x45, 0x01, 0x01, 0x7f, 0x02, 0x40, 0x20, 0x01, 0x45, 0x0d, 0x00, 0x20, 0x00, 0x41, 0x7c, 0x6a, 0x21, 0x00, 0x02, 0x40, 0x03, 0x40, 0x20, 0x00, 0x41, 0x04, 0x6a, 0x22, 0x00, 0x28, 0x02, 0x00, 0x22, 0x02, 0x45, 0x0d, 0x01, 0x20, 0x02, 0x20, 0x01, 0x47, 0x0d, 0x00, 0x0b, 0x0b, 0x20, 0x00, 0x41, 0x00, 0x20, 0x02, 0x1b, 0x0f, 0x0b, 0x20, 0x00, 0x20, 0x00, 0x10, 0xa8, 0x80, 0x80, 0x80, 0x00, 0x41, 0x02, 0x74, 0x6a, 0x0b, 0x1d, 0x00, 0x02, 0x40, 0x20, 0x00, 0x0d, 0x00, 0x41, 0x00, 0x0f, 0x0b, 0x41, 0x90, 0xc2, 0x84, 0x80, 0x00, 0x20, 0x00, 0x10, 0xa9, 0x80, 0x80, 0x80, 0x00, 0x41, 0x00, 0x47, 0x0b, 0x24, 0x01, 0x01, 0x7f, 0x41, 0x01, 0x21, 0x01, 0x02, 0x40, 0x20, 0x00, 0x41, 0x50, 0x6a, 0x41, 0x0a, 0x49, 0x0d, 0x00, 0x20, 0x00, 0x10, 0x96, 0x80, 0x80, 0x80, 0x00, 0x41, 0x00, 0x47, 0x21, 0x01, 0x0b, 0x20, 0x01, 0x0b, 0x0b, 0xf1, 0x42, 0x01, 0x00, 0x41, 0x80, 0x80, 0x04, 0x0b, 0xe8, 0x42, 0x12, 0x11, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x11, 0x22, 0x23, 0x24, 0x11, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x11, 0x2d, 0x2e, 0x2f, 0x10, 0x10, 0x30, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x31, 0x32, 0x33, 0x10, 0x34, 0x35, 0x10, 0x10, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x36, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x37, 0x11, 0x11, 0x11, 0x11, 0x38, 0x11, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x3f, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x11, 0x40, 0x41, 0x11, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x4a, 0x11, 0x4b, 0x4c, 0x4d, 0x4e, 0x4f, 0x50, 0x51, 0x10, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5a, 0x5b, 0x5c, 0x5d, 0x10, 0x5e, 0x5f, 0x60, 0x10, 0x11, 0x11, 0x11, 0x61, 0x62, 0x63, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x11, 0x11, 0x11, 0x11, 0x64, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x11, 0x11, 0x65, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x11, 0x11, 0x66, 0x67, 0x10, 0x10, 0x68, 0x69, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x6a, 0x11, 0x11, 0x6b, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x11, 0x6c, 0x6d, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x6e, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x6f, 0x70, 0x71, 0x72, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x73, 0x74, 0x75, 0x10, 0x10, 0x10, 0x10, 0x10, 0x76, 0x77, 0x10, 0x10, 0x10, 0x10, 0x78, 0x10, 0x10, 0x79, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xfe, 0xff, 0xff, 0x07, 0xfe, 0xff, 0xff, 0x07, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x20, 0x04, 0xff, 0xff, 0x7f, 0xff, 0xff, 0xff, 0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xc3, 0xff, 0x03, 0x00, 0x1f, 0x50, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0xdf, 0xbc, 0x40, 0xd7, 0xff, 0xff, 0xfb, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xbf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x03, 0xfc, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe, 0xff, 0xff, 0xff, 0x7f, 0x02, 0xff, 0xff, 0xff, 0xff, 0xff, 0x01, 0x00, 0x00, 0x00, 0x00, 0xff, 0xbf, 0xb6, 0x00, 0xff, 0xff, 0xff, 0x87, 0x07, 0x00, 0x00, 0x00, 0xff, 0x07, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe, 0xff, 0xc3, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xef, 0x1f, 0xfe, 0xe1, 0xff, 0x9f, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00, 0xe0, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x03, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0x07, 0x30, 0x04, 0xff, 0xff, 0xff, 0xfc, 0xff, 0x1f, 0x00, 0x00, 0xff, 0xff, 0xff, 0x01, 0xff, 0x07, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xdf, 0x3f, 0x00, 0x00, 0xf0, 0xff, 0xf8, 0x03, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xef, 0xff, 0xdf, 0xe1, 0xff, 0xcf, 0xff, 0xfe, 0xff, 0xef, 0x9f, 0xf9, 0xff, 0xff, 0xfd, 0xc5, 0xe3, 0x9f, 0x59, 0x80, 0xb0, 0xcf, 0xff, 0x03, 0x10, 0xee, 0x87, 0xf9, 0xff, 0xff, 0xfd, 0x6d, 0xc3, 0x87, 0x19, 0x02, 0x5e, 0xc0, 0xff, 0x3f, 0x00, 0xee, 0xbf, 0xfb, 0xff, 0xff, 0xfd, 0xed, 0xe3, 0xbf, 0x1b, 0x01, 0x00, 0xcf, 0xff, 0x00, 0x1e, 0xee, 0x9f, 0xf9, 0xff, 0xff, 0xfd, 0xed, 0xe3, 0x9f, 0x19, 0xc0, 0xb0, 0xcf, 0xff, 0x02, 0x00, 0xec, 0xc7, 0x3d, 0xd6, 0x18, 0xc7, 0xff, 0xc3, 0xc7, 0x1d, 0x81, 0x00, 0xc0, 0xff, 0x00, 0x00, 0xef, 0xdf, 0xfd, 0xff, 0xff, 0xfd, 0xff, 0xe3, 0xdf, 0x1d, 0x60, 0x07, 0xcf, 0xff, 0x00, 0x00, 0xef, 0xdf, 0xfd, 0xff, 0xff, 0xfd, 0xef, 0xe3, 0xdf, 0x1d, 0x60, 0x40, 0xcf, 0xff, 0x06, 0x00, 0xef, 0xdf, 0xfd, 0xff, 0xff, 0xff, 0xff, 0xe7, 0xdf, 0x5d, 0xf0, 0x80, 0xcf, 0xff, 0x00, 0xfc, 0xec, 0xff, 0x7f, 0xfc, 0xff, 0xff, 0xfb, 0x2f, 0x7f, 0x80, 0x5f, 0xff, 0xc0, 0xff, 0x0c, 0x00, 0xfe, 0xff, 0xff, 0xff, 0xff, 0x7f, 0xff, 0x07, 0x3f, 0x20, 0xff, 0x03, 0x00, 0x00, 0x00, 0x00, 0xd6, 0xf7, 0xff, 0xff, 0xaf, 0xff, 0xff, 0x3b, 0x5f, 0x20, 0xff, 0xf3, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0xff, 0x03, 0x00, 0x00, 0xff, 0xfe, 0xff, 0xff, 0xff, 0x1f, 0xfe, 0xff, 0x03, 0xff, 0xff, 0xfe, 0xff, 0xff, 0xff, 0x1f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f, 0xf9, 0xff, 0x03, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x3f, 0xff, 0xff, 0xff, 0xff, 0xbf, 0x20, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf7, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x3d, 0x7f, 0x3d, 0xff, 0xff, 0xff, 0xff, 0xff, 0x3d, 0xff, 0xff, 0xff, 0xff, 0x3d, 0x7f, 0x3d, 0xff, 0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x3d, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x07, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x3f, 0x3f, 0xfe, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x9f, 0xff, 0xff, 0xfe, 0xff, 0xff, 0x07, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xc7, 0xff, 0x01, 0xff, 0xdf, 0x0f, 0x00, 0xff, 0xff, 0x0f, 0x00, 0xff, 0xff, 0x0f, 0x00, 0xff, 0xdf, 0x0d, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xcf, 0xff, 0xff, 0x01, 0x80, 0x10, 0xff, 0x03, 0x00, 0x00, 0x00, 0x00, 0xff, 0x03, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x01, 0xff, 0xff, 0xff, 0xff, 0xff, 0x07, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x3f, 0x00, 0xff, 0xff, 0xff, 0x7f, 0xff, 0x0f, 0xff, 0x01, 0xc0, 0xff, 0xff, 0xff, 0xff, 0x3f, 0x1f, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0x0f, 0xff, 0xff, 0xff, 0x03, 0xff, 0x03, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0x0f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f, 0xfe, 0xff, 0x1f, 0x00, 0xff, 0x03, 0xff, 0x03, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xef, 0xff, 0xef, 0x0f, 0xff, 0x03, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf3, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xbf, 0xff, 0x03, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f, 0x00, 0xff, 0xe3, 0xff, 0xff, 0xff, 0xff, 0xff, 0x3f, 0xff, 0x01, 0xff, 0xff, 0xff, 0xff, 0xff, 0xe7, 0x00, 0x00, 0x00, 0x00, 0x00, 0xde, 0x6f, 0x04, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x80, 0xff, 0x1f, 0x00, 0xff, 0xff, 0x3f, 0x3f, 0xff, 0xff, 0xff, 0xff, 0x3f, 0x3f, 0xff, 0xaa, 0xff, 0xff, 0xff, 0x3f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xdf, 0x5f, 0xdc, 0x1f, 0xcf, 0x0f, 0xff, 0x1f, 0xdc, 0x1f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x80, 0x00, 0x00, 0xff, 0x1f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x84, 0xfc, 0x2f, 0x3e, 0x50, 0xbd, 0xff, 0xf3, 0xe0, 0x43, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xc0, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x03, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x1f, 0x78, 0x0c, 0x00, 0xff, 0xff, 0xff, 0xff, 0xbf, 0x20, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x80, 0x00, 0x00, 0xff, 0xff, 0x7f, 0x00, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xe0, 0x00, 0x00, 0x00, 0xfe, 0x03, 0x3e, 0x1f, 0xfe, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f, 0xe0, 0xfe, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf7, 0xe0, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f, 0x00, 0x00, 0xff, 0xff, 0xff, 0x07, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x3f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x1f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0x3f, 0xff, 0x1f, 0xff, 0xff, 0xff, 0x0f, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f, 0xf0, 0x8f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x80, 0xff, 0xfc, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf9, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7c, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0xff, 0xbf, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x0f, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x2f, 0x00, 0xff, 0x03, 0x00, 0x00, 0xfc, 0xe8, 0xff, 0xff, 0xff, 0xff, 0xff, 0x07, 0xff, 0xff, 0xff, 0xff, 0x07, 0x00, 0xff, 0xff, 0xff, 0x1f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf7, 0xff, 0x00, 0x80, 0xff, 0x03, 0xff, 0xff, 0xff, 0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f, 0x00, 0xff, 0x3f, 0xff, 0x03, 0xff, 0xff, 0x7f, 0xfc, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f, 0x05, 0x00, 0x00, 0x38, 0xff, 0xff, 0x3c, 0x00, 0x7e, 0x7e, 0x7e, 0x00, 0x7f, 0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf7, 0xff, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x07, 0xff, 0x03, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x0f, 0x00, 0xff, 0xff, 0x7f, 0xf8, 0xff, 0xff, 0xff, 0xff, 0xff, 0x0f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x3f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x03, 0x00, 0x00, 0x00, 0x00, 0x7f, 0x00, 0xf8, 0xe0, 0xff, 0xfd, 0x7f, 0x5f, 0xdb, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x03, 0x00, 0x00, 0x00, 0xf8, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x3f, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfc, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0x0f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xdf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x1f, 0x00, 0x00, 0xff, 0x03, 0xfe, 0xff, 0xff, 0x07, 0xfe, 0xff, 0xff, 0x07, 0xc0, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f, 0xfc, 0xfc, 0xfc, 0x1c, 0x00, 0x00, 0x00, 0x00, 0xff, 0xef, 0xff, 0xff, 0x7f, 0xff, 0xff, 0xb7, 0xff, 0x3f, 0xff, 0x3f, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x07, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x1f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0x1f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0x00, 0xe0, 0xff, 0xff, 0xff, 0x07, 0xff, 0xff, 0xff, 0xff, 0xff, 0x07, 0xff, 0xff, 0xff, 0x3f, 0xff, 0xff, 0xff, 0xff, 0x0f, 0xff, 0x3e, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x3f, 0xff, 0x03, 0xff, 0xff, 0xff, 0xff, 0x0f, 0xff, 0xff, 0xff, 0xff, 0x0f, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x0f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f, 0x00, 0xff, 0xff, 0x3f, 0x00, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3f, 0xfd, 0xff, 0xff, 0xff, 0xff, 0xbf, 0x91, 0xff, 0xff, 0x3f, 0x00, 0xff, 0xff, 0x7f, 0x00, 0xff, 0xff, 0xff, 0x7f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0x37, 0x00, 0xff, 0xff, 0x3f, 0x00, 0xff, 0xff, 0xff, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xc0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x6f, 0xf0, 0xef, 0xfe, 0xff, 0xff, 0x3f, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0x1f, 0xff, 0xff, 0xff, 0x1f, 0x00, 0x00, 0x00, 0x00, 0xff, 0xfe, 0xff, 0xff, 0x1f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x3f, 0x00, 0xff, 0xff, 0x3f, 0x00, 0xff, 0xff, 0x07, 0x00, 0xff, 0xff, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x07, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x07, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00, 0xff, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0x1f, 0x80, 0x00, 0xff, 0xff, 0x3f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0x7f, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x3f, 0x00, 0x00, 0x00, 0xc0, 0xff, 0x00, 0x00, 0xfc, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x01, 0x00, 0x00, 0xff, 0xff, 0xff, 0x01, 0xff, 0x03, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xc7, 0xff, 0x70, 0x00, 0xff, 0xff, 0xff, 0xff, 0x47, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x1e, 0x00, 0xff, 0x17, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xfb, 0xff, 0xff, 0xff, 0x9f, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x7f, 0xbd, 0xff, 0xbf, 0xff, 0x01, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x01, 0xff, 0x03, 0xef, 0x9f, 0xf9, 0xff, 0xff, 0xfd, 0xed, 0xe3, 0x9f, 0x19, 0x81, 0xe0, 0x0f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xbb, 0x07, 0xff, 0x83, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xb3, 0x00, 0xff, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x3f, 0x7f, 0x00, 0x00, 0x00, 0x3f, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f, 0x11, 0x00, 0xff, 0x03, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x3f, 0x01, 0xff, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xe7, 0xff, 0x07, 0xff, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x03, 0x00, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xfc, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfc, 0x1a, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xe7, 0x7f, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x20, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x01, 0xff, 0xfd, 0xff, 0xff, 0xff, 0xff, 0x7f, 0x7f, 0x01, 0x00, 0xff, 0x03, 0x00, 0x00, 0xfc, 0xff, 0xff, 0xff, 0xfc, 0xff, 0xff, 0xfe, 0x7f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x7f, 0xfb, 0xff, 0xff, 0xff, 0xff, 0x7f, 0xb4, 0xcb, 0x00, 0xff, 0x03, 0xbf, 0xfd, 0xff, 0xff, 0xff, 0x7f, 0x7b, 0x01, 0xff, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0x7f, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x0f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x01, 0xff, 0xff, 0xff, 0x7f, 0xff, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0x3f, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0x0f, 0x00, 0xff, 0x03, 0xf8, 0xff, 0xff, 0xe0, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x87, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x80, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x07, 0x00, 0xff, 0xff, 0xff, 0x7f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, 0x00, 0xf0, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x0f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x07, 0xff, 0x1f, 0xff, 0x01, 0xff, 0x43, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xdf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xdf, 0x64, 0xde, 0xff, 0xeb, 0xef, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xbf, 0xe7, 0xdf, 0xdf, 0xff, 0xff, 0xff, 0x7b, 0x5f, 0xfc, 0xfd, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x3f, 0xff, 0xff, 0xff, 0xfd, 0xff, 0xff, 0xf7, 0xff, 0xff, 0xff, 0xf7, 0xff, 0xff, 0xdf, 0xff, 0xff, 0xff, 0xdf, 0xff, 0xff, 0x7f, 0xff, 0xff, 0xff, 0x7f, 0xff, 0xff, 0xff, 0xfd, 0xff, 0xff, 0xff, 0xfd, 0xff, 0xff, 0xf7, 0xcf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f, 0xff, 0xff, 0xf9, 0xdb, 0x07, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0x1f, 0x80, 0x3f, 0xff, 0x43, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0x0f, 0xff, 0x03, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x1f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x8f, 0x08, 0xff, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xef, 0xff, 0xff, 0xff, 0x96, 0xfe, 0xf7, 0x0a, 0x84, 0xea, 0x96, 0xaa, 0x96, 0xf7, 0xf7, 0x5e, 0xff, 0xfb, 0xff, 0x0f, 0xee, 0xfb, 0xff, 0x0f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0x03, 0xff, 0xff, 0xff, 0x03, 0xff, 0xff, 0xff, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x56, 0x01, 0x00, 0x00, 0x39, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x20, 0x00, 0x00, 0x00, 0xe0, 0xff, 0xff, 0x00, 0xbf, 0x1d, 0x00, 0x00, 0xe7, 0x02, 0x00, 0x00, 0x79, 0x00, 0x00, 0x02, 0x24, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x01, 0x02, 0x00, 0x00, 0x00, 0xfe, 0xff, 0xff, 0x01, 0x39, 0xff, 0xff, 0x00, 0x18, 0xff, 0xff, 0x01, 0x87, 0xff, 0xff, 0x00, 0xd4, 0xfe, 0xff, 0x00, 0xc3, 0x00, 0x00, 0x01, 0xd2, 0x00, 0x00, 0x01, 0xce, 0x00, 0x00, 0x01, 0xcd, 0x00, 0x00, 0x01, 0x4f, 0x00, 0x00, 0x01, 0xca, 0x00, 0x00, 0x01, 0xcb, 0x00, 0x00, 0x01, 0xcf, 0x00, 0x00, 0x00, 0x61, 0x00, 0x00, 0x01, 0xd3, 0x00, 0x00, 0x01, 0xd1, 0x00, 0x00, 0x00, 0xa3, 0x00, 0x00, 0x01, 0xd5, 0x00, 0x00, 0x00, 0x82, 0x00, 0x00, 0x01, 0xd6, 0x00, 0x00, 0x01, 0xda, 0x00, 0x00, 0x01, 0xd9, 0x00, 0x00, 0x01, 0xdb, 0x00, 0x00, 0x00, 0x38, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0xb1, 0xff, 0xff, 0x01, 0x9f, 0xff, 0xff, 0x01, 0xc8, 0xff, 0xff, 0x02, 0x28, 0x24, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0x00, 0x33, 0xff, 0xff, 0x00, 0x26, 0xff, 0xff, 0x01, 0x7e, 0xff, 0xff, 0x01, 0x2b, 0x2a, 0x00, 0x01, 0x5d, 0xff, 0xff, 0x01, 0x28, 0x2a, 0x00, 0x00, 0x3f, 0x2a, 0x00, 0x01, 0x3d, 0xff, 0xff, 0x01, 0x45, 0x00, 0x00, 0x01, 0x47, 0x00, 0x00, 0x00, 0x1f, 0x2a, 0x00, 0x00, 0x1c, 0x2a, 0x00, 0x00, 0x1e, 0x2a, 0x00, 0x00, 0x2e, 0xff, 0xff, 0x00, 0x32, 0xff, 0xff, 0x00, 0x36, 0xff, 0xff, 0x00, 0x35, 0xff, 0xff, 0x00, 0x4f, 0xa5, 0x00, 0x00, 0x4b, 0xa5, 0x00, 0x00, 0x31, 0xff, 0xff, 0x00, 0x28, 0xa5, 0x00, 0x00, 0x44, 0xa5, 0x00, 0x00, 0x2f, 0xff, 0xff, 0x00, 0x2d, 0xff, 0xff, 0x00, 0xf7, 0x29, 0x00, 0x00, 0x41, 0xa5, 0x00, 0x00, 0xfd, 0x29, 0x00, 0x00, 0x2b, 0xff, 0xff, 0x00, 0x2a, 0xff, 0xff, 0x00, 0xe7, 0x29, 0x00, 0x00, 0x43, 0xa5, 0x00, 0x00, 0x2a, 0xa5, 0x00, 0x00, 0xbb, 0xff, 0xff, 0x00, 0x27, 0xff, 0xff, 0x00, 0xb9, 0xff, 0xff, 0x00, 0x25, 0xff, 0xff, 0x00, 0x15, 0xa5, 0x00, 0x00, 0x12, 0xa5, 0x00, 0x02, 0x24, 0x4c, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x20, 0x00, 0x00, 0x00, 0xe0, 0xff, 0xff, 0x01, 0x01, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0x00, 0x54, 0x00, 0x00, 0x01, 0x74, 0x00, 0x00, 0x01, 0x26, 0x00, 0x00, 0x01, 0x25, 0x00, 0x00, 0x01, 0x40, 0x00, 0x00, 0x01, 0x3f, 0x00, 0x00, 0x00, 0xda, 0xff, 0xff, 0x00, 0xdb, 0xff, 0xff, 0x00, 0xe1, 0xff, 0xff, 0x00, 0xc0, 0xff, 0xff, 0x00, 0xc1, 0xff, 0xff, 0x01, 0x08, 0x00, 0x00, 0x00, 0xc2, 0xff, 0xff, 0x00, 0xc7, 0xff, 0xff, 0x00, 0xd1, 0xff, 0xff, 0x00, 0xca, 0xff, 0xff, 0x00, 0xf8, 0xff, 0xff, 0x00, 0xaa, 0xff, 0xff, 0x00, 0xb0, 0xff, 0xff, 0x00, 0x07, 0x00, 0x00, 0x00, 0x8c, 0xff, 0xff, 0x01, 0xc4, 0xff, 0xff, 0x00, 0xa0, 0xff, 0xff, 0x01, 0xf9, 0xff, 0xff, 0x02, 0x1a, 0x70, 0x00, 0x01, 0x01, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0x01, 0x20, 0x00, 0x00, 0x00, 0xe0, 0xff, 0xff, 0x01, 0x50, 0x00, 0x00, 0x01, 0x0f, 0x00, 0x00, 0x00, 0xf1, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x01, 0x30, 0x00, 0x00, 0x00, 0xd0, 0xff, 0xff, 0x01, 0x01, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0xc0, 0x0b, 0x00, 0x01, 0x60, 0x1c, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0xd0, 0x97, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0xf8, 0xff, 0xff, 0x02, 0x05, 0x8a, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x40, 0xf4, 0xff, 0x00, 0x9e, 0xe7, 0xff, 0x00, 0xc2, 0x89, 0x00, 0x00, 0xdb, 0xe7, 0xff, 0x00, 0x92, 0xe7, 0xff, 0x00, 0x93, 0xe7, 0xff, 0x00, 0x9c, 0xe7, 0xff, 0x00, 0x9d, 0xe7, 0xff, 0x00, 0xa4, 0xe7, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x38, 0x8a, 0x00, 0x00, 0x04, 0x8a, 0x00, 0x00, 0xe6, 0x0e, 0x00, 0x01, 0x01, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0xc5, 0xff, 0xff, 0x01, 0x41, 0xe2, 0xff, 0x02, 0x1d, 0x8f, 0x00, 0x00, 0x08, 0x00, 0x00, 0x01, 0xf8, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x56, 0x00, 0x00, 0x01, 0xaa, 0xff, 0xff, 0x00, 0x4a, 0x00, 0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x80, 0x00, 0x00, 0x00, 0x70, 0x00, 0x00, 0x00, 0x7e, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x01, 0xb6, 0xff, 0xff, 0x01, 0xf7, 0xff, 0xff, 0x00, 0xdb, 0xe3, 0xff, 0x01, 0x9c, 0xff, 0xff, 0x01, 0x90, 0xff, 0xff, 0x01, 0x80, 0xff, 0xff, 0x01, 0x82, 0xff, 0xff, 0x02, 0x05, 0xac, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x10, 0x00, 0x00, 0x00, 0xf0, 0xff, 0xff, 0x01, 0x1c, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0xa3, 0xe2, 0xff, 0x01, 0x41, 0xdf, 0xff, 0x01, 0xba, 0xdf, 0xff, 0x00, 0xe4, 0xff, 0xff, 0x02, 0x0b, 0xb1, 0x00, 0x01, 0x01, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0x01, 0x30, 0x00, 0x00, 0x00, 0xd0, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x01, 0x09, 0xd6, 0xff, 0x01, 0x1a, 0xf1, 0xff, 0x01, 0x19, 0xd6, 0xff, 0x00, 0xd5, 0xd5, 0xff, 0x00, 0xd8, 0xd5, 0xff, 0x01, 0xe4, 0xd5, 0xff, 0x01, 0x03, 0xd6, 0xff, 0x01, 0xe1, 0xd5, 0xff, 0x01, 0xe2, 0xd5, 0xff, 0x01, 0xc1, 0xd5, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0xa0, 0xe3, 0xff, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0x02, 0x0c, 0xbc, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0x01, 0xbc, 0x5a, 0xff, 0x01, 0xa0, 0x03, 0x00, 0x01, 0xfc, 0x75, 0xff, 0x01, 0xd8, 0x5a, 0xff, 0x00, 0x30, 0x00, 0x00, 0x01, 0xb1, 0x5a, 0xff, 0x01, 0xb5, 0x5a, 0xff, 0x01, 0xbf, 0x5a, 0xff, 0x01, 0xee, 0x5a, 0xff, 0x01, 0xd6, 0x5a, 0xff, 0x01, 0xeb, 0x5a, 0xff, 0x01, 0xd0, 0xff, 0xff, 0x01, 0xbd, 0x5a, 0xff, 0x01, 0xc8, 0x75, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x30, 0x68, 0xff, 0x00, 0x60, 0xfc, 0xff, 0x00, 0x00, 0x00, 0x00, 0x01, 0x20, 0x00, 0x00, 0x00, 0xe0, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x01, 0x28, 0x00, 0x00, 0x00, 0xd8, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x01, 0x40, 0x00, 0x00, 0x00, 0xc0, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x01, 0x20, 0x00, 0x00, 0x00, 0xe0, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x01, 0x20, 0x00, 0x00, 0x00, 0xe0, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x01, 0x22, 0x00, 0x00, 0x00, 0xde, 0xff, 0xff, 0x30, 0x0c, 0x31, 0x0d, 0x78, 0x0e, 0x7f, 0x0f, 0x80, 0x10, 0x81, 0x11, 0x86, 0x12, 0x89, 0x13, 0x8a, 0x13, 0x8e, 0x14, 0x8f, 0x15, 0x90, 0x16, 0x93, 0x13, 0x94, 0x17, 0x95, 0x18, 0x96, 0x19, 0x97, 0x1a, 0x9a, 0x1b, 0x9c, 0x19, 0x9d, 0x1c, 0x9e, 0x1d, 0x9f, 0x1e, 0xa6, 0x1f, 0xa9, 0x1f, 0xae, 0x1f, 0xb1, 0x20, 0xb2, 0x20, 0xb7, 0x21, 0xbf, 0x22, 0xc5, 0x23, 0xc8, 0x23, 0xcb, 0x23, 0xdd, 0x24, 0xf2, 0x23, 0xf6, 0x25, 0xf7, 0x26, 0x20, 0x2d, 0x3a, 0x2e, 0x3d, 0x2f, 0x3e, 0x30, 0x3f, 0x31, 0x40, 0x31, 0x43, 0x32, 0x44, 0x33, 0x45, 0x34, 0x50, 0x35, 0x51, 0x36, 0x52, 0x37, 0x53, 0x38, 0x54, 0x39, 0x59, 0x3a, 0x5b, 0x3b, 0x5c, 0x3c, 0x61, 0x3d, 0x63, 0x3e, 0x65, 0x3f, 0x66, 0x40, 0x68, 0x41, 0x69, 0x42, 0x6a, 0x40, 0x6b, 0x43, 0x6c, 0x44, 0x6f, 0x42, 0x71, 0x45, 0x72, 0x46, 0x75, 0x47, 0x7d, 0x48, 0x82, 0x49, 0x87, 0x4a, 0x89, 0x4b, 0x8a, 0x4c, 0x8b, 0x4c, 0x8c, 0x4d, 0x92, 0x4e, 0x9d, 0x4f, 0x9e, 0x50, 0x45, 0x57, 0x7b, 0x1d, 0x7c, 0x1d, 0x7d, 0x1d, 0x7f, 0x58, 0x86, 0x59, 0x88, 0x5a, 0x89, 0x5a, 0x8a, 0x5a, 0x8c, 0x5b, 0x8e, 0x5c, 0x8f, 0x5c, 0xac, 0x5d, 0xad, 0x5e, 0xae, 0x5e, 0xaf, 0x5e, 0xc2, 0x5f, 0xcc, 0x60, 0xcd, 0x61, 0xce, 0x61, 0xcf, 0x62, 0xd0, 0x63, 0xd1, 0x64, 0xd5, 0x65, 0xd6, 0x66, 0xd7, 0x67, 0xf0, 0x68, 0xf1, 0x69, 0xf2, 0x6a, 0xf3, 0x6b, 0xf4, 0x6c, 0xf5, 0x6d, 0xf9, 0x6e, 0xfd, 0x2d, 0xfe, 0x2d, 0xff, 0x2d, 0x50, 0x69, 0x51, 0x69, 0x52, 0x69, 0x53, 0x69, 0x54, 0x69, 0x55, 0x69, 0x56, 0x69, 0x57, 0x69, 0x58, 0x69, 0x59, 0x69, 0x5a, 0x69, 0x5b, 0x69, 0x5c, 0x69, 0x5d, 0x69, 0x5e, 0x69, 0x5f, 0x69, 0x82, 0x00, 0x83, 0x00, 0x84, 0x00, 0x85, 0x00, 0x86, 0x00, 0x87, 0x00, 0x88, 0x00, 0x89, 0x00, 0xc0, 0x75, 0xcf, 0x76, 0x80, 0x89, 0x81, 0x8a, 0x82, 0x8b, 0x85, 0x8c, 0x86, 0x8d, 0x70, 0x9d, 0x71, 0x9d, 0x76, 0x9e, 0x77, 0x9e, 0x78, 0x9f, 0x79, 0x9f, 0x7a, 0xa0, 0x7b, 0xa0, 0x7c, 0xa1, 0x7d, 0xa1, 0xb3, 0xa2, 0xba, 0xa3, 0xbb, 0xa3, 0xbc, 0xa4, 0xbe, 0xa5, 0xc3, 0xa2, 0xcc, 0xa4, 0xda, 0xa6, 0xdb, 0xa6, 0xe5, 0x6a, 0xea, 0xa7, 0xeb, 0xa7, 0xec, 0x6e, 0xf3, 0xa2, 0xf8, 0xa8, 0xf9, 0xa8, 0xfa, 0xa9, 0xfb, 0xa9, 0xfc, 0xa4, 0x26, 0xb0, 0x2a, 0xb1, 0x2b, 0xb2, 0x4e, 0xb3, 0x84, 0x08, 0x62, 0xba, 0x63, 0xbb, 0x64, 0xbc, 0x65, 0xbd, 0x66, 0xbe, 0x6d, 0xbf, 0x6e, 0xc0, 0x6f, 0xc1, 0x70, 0xc2, 0x7e, 0xc3, 0x7f, 0xc3, 0x7d, 0xcf, 0x8d, 0xd0, 0x94, 0xd1, 0xab, 0xd2, 0xac, 0xd3, 0xad, 0xd4, 0xb0, 0xd5, 0xb1, 0xd6, 0xb2, 0xd7, 0xc4, 0xd8, 0xc5, 0xd9, 0xc6, 0xda, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x0d, 0x06, 0x06, 0x0e, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x0f, 0x10, 0x11, 0x12, 0x06, 0x13, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x14, 0x15, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x16, 0x17, 0x06, 0x06, 0x06, 0x18, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x19, 0x06, 0x06, 0x06, 0x06, 0x1a, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x1b, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x1c, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x1d, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x1e, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x24, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x01, 0x00, 0x54, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x07, 0x2b, 0x2b, 0x5b, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x4a, 0x56, 0x56, 0x05, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x24, 0x50, 0x79, 0x31, 0x50, 0x31, 0x50, 0x31, 0x38, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x4e, 0x31, 0x02, 0x4e, 0x0d, 0x0d, 0x4e, 0x03, 0x4e, 0x00, 0x24, 0x6e, 0x00, 0x4e, 0x31, 0x26, 0x6e, 0x51, 0x4e, 0x24, 0x50, 0x4e, 0x39, 0x14, 0x81, 0x1b, 0x1d, 0x1d, 0x53, 0x31, 0x50, 0x31, 0x50, 0x0d, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x1b, 0x53, 0x24, 0x50, 0x31, 0x02, 0x5c, 0x7b, 0x5c, 0x7b, 0x5c, 0x7b, 0x5c, 0x7b, 0x5c, 0x7b, 0x14, 0x79, 0x5c, 0x7b, 0x5c, 0x7b, 0x5c, 0x2d, 0x2b, 0x49, 0x03, 0x48, 0x03, 0x78, 0x5c, 0x7b, 0x14, 0x00, 0x96, 0x0a, 0x01, 0x2b, 0x28, 0x06, 0x06, 0x00, 0x2a, 0x06, 0x2a, 0x2a, 0x2b, 0x07, 0xbb, 0xb5, 0x2b, 0x1e, 0x00, 0x2b, 0x07, 0x2b, 0x2b, 0x2b, 0x01, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x01, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2a, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0xcd, 0x46, 0xcd, 0x2b, 0x00, 0x25, 0x2b, 0x07, 0x01, 0x06, 0x01, 0x55, 0x56, 0x56, 0x56, 0x56, 0x56, 0x55, 0x56, 0x56, 0x02, 0x24, 0x81, 0x81, 0x81, 0x81, 0x81, 0x15, 0x81, 0x81, 0x81, 0x00, 0x00, 0x2b, 0x00, 0xb2, 0xd1, 0xb2, 0xd1, 0xb2, 0xd1, 0xb2, 0xd1, 0x00, 0x00, 0xcd, 0xcc, 0x01, 0x00, 0xd7, 0xd7, 0xd7, 0xd7, 0xd7, 0x83, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0x1c, 0x00, 0x00, 0x00, 0x00, 0x00, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x02, 0x00, 0x00, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x4e, 0x31, 0x50, 0x31, 0x50, 0x4e, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x02, 0x87, 0xa6, 0x87, 0xa6, 0x87, 0xa6, 0x87, 0xa6, 0x87, 0xa6, 0x87, 0xa6, 0x87, 0xa6, 0x87, 0xa6, 0x2a, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x00, 0x00, 0x00, 0x54, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x54, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x0c, 0x00, 0x0c, 0x2a, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x07, 0x2a, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x2a, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x56, 0x56, 0x6c, 0x81, 0x15, 0x00, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x07, 0x6c, 0x03, 0x41, 0x2b, 0x2b, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x2c, 0x56, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0c, 0x6c, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x56, 0x7a, 0x9e, 0x26, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x25, 0x06, 0x01, 0x2b, 0x2b, 0x4f, 0x56, 0x56, 0x2c, 0x2b, 0x7f, 0x56, 0x56, 0x39, 0x2b, 0x2b, 0x55, 0x56, 0x56, 0x2b, 0x2b, 0x4f, 0x56, 0x56, 0x2c, 0x2b, 0x7f, 0x56, 0x56, 0x81, 0x37, 0x75, 0x5b, 0x7b, 0x5c, 0x2b, 0x2b, 0x4f, 0x56, 0x56, 0x02, 0xac, 0x04, 0x00, 0x00, 0x39, 0x2b, 0x2b, 0x55, 0x56, 0x56, 0x2b, 0x2b, 0x4f, 0x56, 0x56, 0x2c, 0x2b, 0x2b, 0x56, 0x56, 0x32, 0x13, 0x81, 0x57, 0x00, 0x6f, 0x81, 0x7e, 0xc9, 0xd7, 0x7e, 0x2d, 0x81, 0x81, 0x0e, 0x7e, 0x39, 0x7f, 0x6f, 0x57, 0x00, 0x81, 0x81, 0x7e, 0x15, 0x00, 0x7e, 0x03, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x07, 0x2b, 0x24, 0x2b, 0x97, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2a, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x56, 0x56, 0x56, 0x56, 0x56, 0x80, 0x81, 0x81, 0x81, 0x81, 0x39, 0xbb, 0x2a, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x01, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0xc9, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xd0, 0x0d, 0x00, 0x4e, 0x31, 0x02, 0xb4, 0xc1, 0xc1, 0xd7, 0xd7, 0x24, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0xd7, 0xd7, 0x53, 0xc1, 0x47, 0xd4, 0xd7, 0xd7, 0xd7, 0x05, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x07, 0x01, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x4e, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x0d, 0x00, 0x00, 0x00, 0x00, 0x00, 0x24, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x31, 0x50, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x79, 0x5c, 0x7b, 0x5c, 0x7b, 0x4f, 0x7b, 0x5c, 0x7b, 0x5c, 0x7b, 0x5c, 0x7b, 0x5c, 0x7b, 0x5c, 0x7b, 0x5c, 0x7b, 0x5c, 0x7b, 0x5c, 0x7b, 0x5c, 0x7b, 0x5c, 0x2d, 0x2b, 0x2b, 0x79, 0x14, 0x5c, 0x7b, 0x5c, 0x2d, 0x79, 0x2a, 0x5c, 0x27, 0x5c, 0x7b, 0x5c, 0x7b, 0x5c, 0x7b, 0xa4, 0x00, 0x0a, 0xb4, 0x5c, 0x7b, 0x5c, 0x7b, 0x4f, 0x03, 0x2a, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x48, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x2a, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x07, 0x00, 0x48, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x55, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x0e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x24, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x07, 0x00, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x24, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x07, 0x00, 0x00, 0x00, 0x00, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x2a, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x0e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x2a, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x0e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x2b, 0x55, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x0e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x27, 0x51, 0x6f, 0x77, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x7c, 0x00, 0x00, 0x7f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x83, 0x8e, 0x92, 0x97, 0x00, 0xaa, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xb4, 0xc4, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xc6, 0xc9, 0x00, 0x00, 0x00, 0xdb, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xde, 0x00, 0x00, 0x00, 0x00, 0xe1, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xe4, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xe7, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xea, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xed, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x0a, 0x00, 0x00, 0x00, 0x0d, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x85, 0x00, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x01, 0x20, 0x00, 0x00, 0x02, 0x20, 0x00, 0x00, 0x03, 0x20, 0x00, 0x00, 0x04, 0x20, 0x00, 0x00, 0x05, 0x20, 0x00, 0x00, 0x06, 0x20, 0x00, 0x00, 0x08, 0x20, 0x00, 0x00, 0x09, 0x20, 0x00, 0x00, 0x0a, 0x20, 0x00, 0x00, 0x28, 0x20, 0x00, 0x00, 0x29, 0x20, 0x00, 0x00, 0x5f, 0x20, 0x00, 0x00, 0x00, 0x30, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xa0, 0x05, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x00, 0x0c, 0x0b, 0x73, 0x74, 0x64, 0x6c, 0x69, 0x62, 0x2e, 0x77, 0x61, 0x73, 0x6d, 0x01, 0xc9, 0x04, 0x2c, 0x00, 0x2a, 0x5f, 0x5f, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x5f, 0x77, 0x61, 0x73, 0x69, 0x5f, 0x73, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x5f, 0x70, 0x72, 0x65, 0x76, 0x69, 0x65, 0x77, 0x31, 0x5f, 0x61, 0x72, 0x67, 0x73, 0x5f, 0x67, 0x65, 0x74, 0x01, 0x30, 0x5f, 0x5f, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x5f, 0x77, 0x61, 0x73, 0x69, 0x5f, 0x73, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x5f, 0x70, 0x72, 0x65, 0x76, 0x69, 0x65, 0x77, 0x31, 0x5f, 0x61, 0x72, 0x67, 0x73, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x73, 0x5f, 0x67, 0x65, 0x74, 0x02, 0x2b, 0x5f, 0x5f, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x5f, 0x77, 0x61, 0x73, 0x69, 0x5f, 0x73, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x5f, 0x70, 0x72, 0x65, 0x76, 0x69, 0x65, 0x77, 0x31, 0x5f, 0x70, 0x72, 0x6f, 0x63, 0x5f, 0x65, 0x78, 0x69, 0x74, 0x03, 0x11, 0x5f, 0x5f, 0x77, 0x61, 0x73, 0x6d, 0x5f, 0x63, 0x61, 0x6c, 0x6c, 0x5f, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x04, 0x13, 0x75, 0x6e, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x64, 0x5f, 0x77, 0x65, 0x61, 0x6b, 0x3a, 0x6d, 0x61, 0x69, 0x6e, 0x05, 0x12, 0x5f, 0x5f, 0x77, 0x61, 0x73, 0x6d, 0x5f, 0x69, 0x6e, 0x69, 0x74, 0x5f, 0x6d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x06, 0x06, 0x5f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x07, 0x0a, 0x72, 0x65, 0x73, 0x65, 0x74, 0x5f, 0x68, 0x65, 0x61, 0x70, 0x08, 0x06, 0x6d, 0x61, 0x6c, 0x6c, 0x6f, 0x63, 0x09, 0x04, 0x66, 0x72, 0x65, 0x65, 0x0a, 0x06, 0x63, 0x61, 0x6c, 0x6c, 0x6f, 0x63, 0x0b, 0x07, 0x72, 0x65, 0x61, 0x6c, 0x6c, 0x6f, 0x63, 0x0c, 0x05, 0x5f, 0x45, 0x78, 0x69, 0x74, 0x0d, 0x0b, 0x5f, 0x5f, 0x6d, 0x61, 0x69, 0x6e, 0x5f, 0x76, 0x6f, 0x69, 0x64, 0x0e, 0x0f, 0x5f, 0x5f, 0x77, 0x61, 0x73, 0x69, 0x5f, 0x61, 0x72, 0x67, 0x73, 0x5f, 0x67, 0x65, 0x74, 0x0f, 0x15, 0x5f, 0x5f, 0x77, 0x61, 0x73, 0x69, 0x5f, 0x61, 0x72, 0x67, 0x73, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x73, 0x5f, 0x67, 0x65, 0x74, 0x10, 0x10, 0x5f, 0x5f, 0x77, 0x61, 0x73, 0x69, 0x5f, 0x70, 0x72, 0x6f, 0x63, 0x5f, 0x65, 0x78, 0x69, 0x74, 0x11, 0x05, 0x64, 0x75, 0x6d, 0x6d, 0x79, 0x12, 0x11, 0x5f, 0x5f, 0x77, 0x61, 0x73, 0x6d, 0x5f, 0x63, 0x61, 0x6c, 0x6c, 0x5f, 0x64, 0x74, 0x6f, 0x72, 0x73, 0x13, 0x06, 0x6d, 0x65, 0x6d, 0x63, 0x70, 0x79, 0x14, 0x06, 0x6d, 0x65, 0x6d, 0x73, 0x65, 0x74, 0x15, 0x06, 0x73, 0x74, 0x72, 0x6c, 0x65, 0x6e, 0x16, 0x08, 0x69, 0x73, 0x77, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x17, 0x06, 0x6d, 0x65, 0x6d, 0x63, 0x6d, 0x70, 0x18, 0x06, 0x6d, 0x65, 0x6d, 0x63, 0x68, 0x72, 0x19, 0x06, 0x73, 0x74, 0x72, 0x63, 0x6d, 0x70, 0x1a, 0x08, 0x74, 0x6f, 0x77, 0x6c, 0x6f, 0x77, 0x65, 0x72, 0x1b, 0x07, 0x63, 0x61, 0x73, 0x65, 0x6d, 0x61, 0x70, 0x1c, 0x08, 0x74, 0x6f, 0x77, 0x75, 0x70, 0x70, 0x65, 0x72, 0x1d, 0x07, 0x73, 0x74, 0x72, 0x6e, 0x63, 0x6d, 0x70, 0x1e, 0x08, 0x69, 0x73, 0x77, 0x75, 0x70, 0x70, 0x65, 0x72, 0x1f, 0x07, 0x6d, 0x65, 0x6d, 0x6d, 0x6f, 0x76, 0x65, 0x20, 0x08, 0x69, 0x73, 0x77, 0x6c, 0x6f, 0x77, 0x65, 0x72, 0x21, 0x07, 0x69, 0x73, 0x62, 0x6c, 0x61, 0x6e, 0x6b, 0x22, 0x08, 0x69, 0x73, 0x77, 0x62, 0x6c, 0x61, 0x6e, 0x6b, 0x23, 0x08, 0x69, 0x73, 0x77, 0x64, 0x69, 0x67, 0x69, 0x74, 0x24, 0x07, 0x73, 0x74, 0x72, 0x6e, 0x63, 0x61, 0x74, 0x25, 0x09, 0x5f, 0x5f, 0x73, 0x74, 0x70, 0x6e, 0x63, 0x70, 0x79, 0x26, 0x07, 0x73, 0x74, 0x72, 0x6e, 0x63, 0x70, 0x79, 0x27, 0x09, 0x69, 0x73, 0x77, 0x78, 0x64, 0x69, 0x67, 0x69, 0x74, 0x28, 0x06, 0x77, 0x63, 0x73, 0x6c, 0x65, 0x6e, 0x29, 0x06, 0x77, 0x63, 0x73, 0x63, 0x68, 0x72, 0x2a, 0x08, 0x69, 0x73, 0x77, 0x73, 0x70, 0x61, 0x63, 0x65, 0x2b, 0x08, 0x69, 0x73, 0x77, 0x61, 0x6c, 0x6e, 0x75, 0x6d, 0x07, 0x33, 0x02, 0x00, 0x0f, 0x5f, 0x5f, 0x73, 0x74, 0x61, 0x63, 0x6b, 0x5f, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x01, 0x1f, 0x47, 0x4f, 0x54, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x5f, 0x5f, 0x6d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x5f, 0x62, 0x61, 0x73, 0x65, 0x09, 0x0a, 0x01, 0x00, 0x07, 0x2e, 0x72, 0x6f, 0x64, 0x61, 0x74, 0x61, 0x00, 0x8e, 0x01, 0x09, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x65, 0x72, 0x73, 0x02, 0x08, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x01, 0x03, 0x43, 0x31, 0x31, 0x00, 0x0c, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x65, 0x64, 0x2d, 0x62, 0x79, 0x01, 0x05, 0x63, 0x6c, 0x61, 0x6e, 0x67, 0x5f, 0x31, 0x39, 0x2e, 0x31, 0x2e, 0x35, 0x2d, 0x77, 0x61, 0x73, 0x69, 0x2d, 0x73, 0x64, 0x6b, 0x20, 0x28, 0x68, 0x74, 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6c, 0x6c, 0x76, 0x6d, 0x2f, 0x6c, 0x6c, 0x76, 0x6d, 0x2d, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x20, 0x61, 0x62, 0x34, 0x62, 0x35, 0x61, 0x32, 0x64, 0x62, 0x35, 0x38, 0x32, 0x39, 0x35, 0x38, 0x61, 0x66, 0x31, 0x65, 0x65, 0x33, 0x30, 0x38, 0x61, 0x37, 0x39, 0x30, 0x63, 0x66, 0x64, 0x62, 0x34, 0x32, 0x62, 0x64, 0x32, 0x34, 0x37, 0x32, 0x30, 0x29, 0x00, 0x56, 0x0f, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x5f, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x05, 0x2b, 0x0b, 0x62, 0x75, 0x6c, 0x6b, 0x2d, 0x6d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x2b, 0x0a, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2b, 0x0f, 0x6d, 0x75, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x2d, 0x67, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x73, 0x2b, 0x0f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2d, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2b, 0x08, 0x73, 0x69, 0x67, 0x6e, 0x2d, 0x65, 0x78, 0x74 }; unsigned int STDLIB_WASM_LEN = 15673; #endif hx-0.3.0+20250717/bindings/vendor/src/wasm_store.c000066400000000000000000001714701503625671400212670ustar00rootroot00000000000000#include "tree_sitter/api.h" #include "./parser.h" #include #ifdef TREE_SITTER_FEATURE_WASM #include "./alloc.h" #include "./array.h" #include "./atomic.h" #include "./language.h" #include "./lexer.h" #include "./wasm/wasm-stdlib.h" #include "./wasm_store.h" #include #include #include #ifdef _MSC_VER #pragma warning(push) #pragma warning(disable : 4100) #elif defined(__GNUC__) || defined(__clang__) #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wunused-parameter" #endif #define array_len(a) (sizeof(a) / sizeof(a[0])) // The following symbols from the C and C++ standard libraries are available // for external scanners to use. const char *STDLIB_SYMBOLS[] = { #include "./stdlib-symbols.txt" }; // The contents of the `dylink.0` custom section of a wasm module, // as specified by the current WebAssembly dynamic linking ABI proposal. typedef struct { uint32_t memory_size; uint32_t memory_align; uint32_t table_size; uint32_t table_align; } WasmDylinkInfo; // WasmLanguageId - A pointer used to identify a language. This language id is // reference-counted, so that its ownership can be shared between the language // itself and the instances of the language that are held in wasm stores. typedef struct { volatile uint32_t ref_count; volatile uint32_t is_language_deleted; } WasmLanguageId; // LanguageWasmModule - Additional data associated with a wasm-backed // `TSLanguage`. This data is read-only and does not reference a particular // wasm store, so it can be shared by all users of a `TSLanguage`. A pointer to // this is stored on the language itself. typedef struct { volatile uint32_t ref_count; WasmLanguageId *language_id; wasmtime_module_t *module; const char *name; char *symbol_name_buffer; char *field_name_buffer; WasmDylinkInfo dylink_info; } LanguageWasmModule; // LanguageWasmInstance - Additional data associated with an instantiation of // a `TSLanguage` in a particular wasm store. The wasm store holds one of // these structs for each language that it has instantiated. typedef struct { WasmLanguageId *language_id; wasmtime_instance_t instance; int32_t external_states_address; int32_t lex_main_fn_index; int32_t lex_keyword_fn_index; int32_t scanner_create_fn_index; int32_t scanner_destroy_fn_index; int32_t scanner_serialize_fn_index; int32_t scanner_deserialize_fn_index; int32_t scanner_scan_fn_index; } LanguageWasmInstance; typedef struct { uint32_t reset_heap; uint32_t proc_exit; uint32_t abort; uint32_t assert_fail; uint32_t notify_memory_growth; uint32_t debug_message; uint32_t at_exit; uint32_t args_get; uint32_t args_sizes_get; } BuiltinFunctionIndices; // TSWasmStore - A struct that allows a given `Parser` to use wasm-backed // languages. This struct is mutable, and can only be used by one parser at a // time. struct TSWasmStore { wasm_engine_t *engine; wasmtime_store_t *store; wasmtime_table_t function_table; wasmtime_memory_t memory; TSLexer *current_lexer; LanguageWasmInstance *current_instance; Array(LanguageWasmInstance) language_instances; uint32_t current_memory_offset; uint32_t current_function_table_offset; uint32_t *stdlib_fn_indices; BuiltinFunctionIndices builtin_fn_indices; wasmtime_global_t stack_pointer_global; wasm_globaltype_t *const_i32_type; bool has_error; uint32_t lexer_address; }; typedef Array(char) StringData; // LanguageInWasmMemory - The memory layout of a `TSLanguage` when compiled to // wasm32. This is used to copy static language data out of the wasm memory. typedef struct { uint32_t abi_version; uint32_t symbol_count; uint32_t alias_count; uint32_t token_count; uint32_t external_token_count; uint32_t state_count; uint32_t large_state_count; uint32_t production_id_count; uint32_t field_count; uint16_t max_alias_sequence_length; int32_t parse_table; int32_t small_parse_table; int32_t small_parse_table_map; int32_t parse_actions; int32_t symbol_names; int32_t field_names; int32_t field_map_slices; int32_t field_map_entries; int32_t symbol_metadata; int32_t public_symbol_map; int32_t alias_map; int32_t alias_sequences; int32_t lex_modes; int32_t lex_fn; int32_t keyword_lex_fn; TSSymbol keyword_capture_token; struct { int32_t states; int32_t symbol_map; int32_t create; int32_t destroy; int32_t scan; int32_t serialize; int32_t deserialize; } external_scanner; int32_t primary_state_ids; int32_t name; int32_t reserved_words; uint16_t max_reserved_word_set_size; uint32_t supertype_count; int32_t supertype_symbols; int32_t supertype_map_slices; int32_t supertype_map_entries; TSLanguageMetadata metadata; } LanguageInWasmMemory; // LexerInWasmMemory - The memory layout of a `TSLexer` when compiled to wasm32. // This is used to copy mutable lexing state in and out of the wasm memory. typedef struct { int32_t lookahead; TSSymbol result_symbol; int32_t advance; int32_t mark_end; int32_t get_column; int32_t is_at_included_range_start; int32_t eof; } LexerInWasmMemory; // Linear memory layout: // [ <-- stack | stdlib statics | lexer | language statics --> | serialization_buffer | heap --> ] #define MAX_MEMORY_SIZE (128 * 1024 * 1024 / MEMORY_PAGE_SIZE) /************************ * WasmDylinkMemoryInfo ***********************/ static uint8_t read_u8(const uint8_t **p) { return *(*p)++; } static inline uint64_t read_uleb128(const uint8_t **p, const uint8_t *end) { uint64_t value = 0; unsigned shift = 0; do { if (*p == end) return UINT64_MAX; value += (uint64_t)(**p & 0x7f) << shift; shift += 7; } while (*((*p)++) >= 128); return value; } static bool wasm_dylink_info__parse( const uint8_t *bytes, size_t length, WasmDylinkInfo *info ) { const uint8_t WASM_MAGIC_NUMBER[4] = {0, 'a', 's', 'm'}; const uint8_t WASM_VERSION[4] = {1, 0, 0, 0}; const uint8_t WASM_CUSTOM_SECTION = 0x0; const uint8_t WASM_DYLINK_MEM_INFO = 0x1; const uint8_t *p = bytes; const uint8_t *end = bytes + length; if (length < 8) return false; if (memcmp(p, WASM_MAGIC_NUMBER, 4) != 0) return false; p += 4; if (memcmp(p, WASM_VERSION, 4) != 0) return false; p += 4; while (p < end) { uint8_t section_id = read_u8(&p); uint32_t section_length = read_uleb128(&p, end); const uint8_t *section_end = p + section_length; if (section_end > end) return false; if (section_id == WASM_CUSTOM_SECTION) { uint32_t name_length = read_uleb128(&p, section_end); const uint8_t *name_end = p + name_length; if (name_end > section_end) return false; if (name_length == 8 && memcmp(p, "dylink.0", 8) == 0) { p = name_end; while (p < section_end) { uint8_t subsection_type = read_u8(&p); uint32_t subsection_size = read_uleb128(&p, section_end); const uint8_t *subsection_end = p + subsection_size; if (subsection_end > section_end) return false; if (subsection_type == WASM_DYLINK_MEM_INFO) { info->memory_size = read_uleb128(&p, subsection_end); info->memory_align = read_uleb128(&p, subsection_end); info->table_size = read_uleb128(&p, subsection_end); info->table_align = read_uleb128(&p, subsection_end); return true; } p = subsection_end; } } } p = section_end; } return false; } /******************************************* * Native callbacks exposed to wasm modules *******************************************/ static wasm_trap_t *callback__abort( void *env, wasmtime_caller_t* caller, wasmtime_val_raw_t *args_and_results, size_t args_and_results_len ) { return wasmtime_trap_new("wasm module called abort", 24); } static wasm_trap_t *callback__debug_message( void *env, wasmtime_caller_t* caller, wasmtime_val_raw_t *args_and_results, size_t args_and_results_len ) { wasmtime_context_t *context = wasmtime_caller_context(caller); TSWasmStore *store = env; ts_assert(args_and_results_len == 2); uint32_t string_address = args_and_results[0].i32; uint32_t value = args_and_results[1].i32; uint8_t *memory = wasmtime_memory_data(context, &store->memory); printf("DEBUG: %s %u\n", &memory[string_address], value); return NULL; } static wasm_trap_t *callback__noop( void *env, wasmtime_caller_t* caller, wasmtime_val_raw_t *args_and_results, size_t args_and_results_len ) { return NULL; } static wasm_trap_t *callback__lexer_advance( void *env, wasmtime_caller_t* caller, wasmtime_val_raw_t *args_and_results, size_t args_and_results_len ) { wasmtime_context_t *context = wasmtime_caller_context(caller); ts_assert(args_and_results_len == 2); TSWasmStore *store = env; TSLexer *lexer = store->current_lexer; bool skip = args_and_results[1].i32; lexer->advance(lexer, skip); uint8_t *memory = wasmtime_memory_data(context, &store->memory); memcpy(&memory[store->lexer_address], &lexer->lookahead, sizeof(lexer->lookahead)); return NULL; } static wasm_trap_t *callback__lexer_mark_end( void *env, wasmtime_caller_t* caller, wasmtime_val_raw_t *args_and_results, size_t args_and_results_len ) { TSWasmStore *store = env; TSLexer *lexer = store->current_lexer; lexer->mark_end(lexer); return NULL; } static wasm_trap_t *callback__lexer_get_column( void *env, wasmtime_caller_t* caller, wasmtime_val_raw_t *args_and_results, size_t args_and_results_len ) { TSWasmStore *store = env; TSLexer *lexer = store->current_lexer; uint32_t result = lexer->get_column(lexer); args_and_results[0].i32 = result; return NULL; } static wasm_trap_t *callback__lexer_is_at_included_range_start( void *env, wasmtime_caller_t* caller, wasmtime_val_raw_t *args_and_results, size_t args_and_results_len ) { TSWasmStore *store = env; TSLexer *lexer = store->current_lexer; bool result = lexer->is_at_included_range_start(lexer); args_and_results[0].i32 = result; return NULL; } static wasm_trap_t *callback__lexer_eof( void *env, wasmtime_caller_t* caller, wasmtime_val_raw_t *args_and_results, size_t args_and_results_len ) { TSWasmStore *store = env; TSLexer *lexer = store->current_lexer; bool result = lexer->eof(lexer); args_and_results[0].i32 = result; return NULL; } typedef struct { uint32_t *storage_location; wasmtime_func_unchecked_callback_t callback; wasm_functype_t *type; } FunctionDefinition; static void *copy(const void *data, size_t size) { void *result = ts_malloc(size); memcpy(result, data, size); return result; } static void *copy_unsized_static_array( const uint8_t *data, int32_t start_address, const int32_t all_addresses[], size_t address_count ) { int32_t end_address = 0; for (unsigned i = 0; i < address_count; i++) { if (all_addresses[i] > start_address) { if (!end_address || all_addresses[i] < end_address) { end_address = all_addresses[i]; } } } if (!end_address) return NULL; size_t size = end_address - start_address; void *result = ts_malloc(size); memcpy(result, &data[start_address], size); return result; } static void *copy_strings( const uint8_t *data, int32_t array_address, size_t count, StringData *string_data ) { const char **result = ts_malloc(count * sizeof(char *)); for (unsigned i = 0; i < count; i++) { int32_t address; memcpy(&address, &data[array_address + i * sizeof(address)], sizeof(address)); if (address == 0) { result[i] = (const char *)-1; } else { const uint8_t *string = &data[address]; uint32_t len = strlen((const char *)string); result[i] = (const char *)(uintptr_t)string_data->size; array_extend(string_data, len + 1, string); } } for (unsigned i = 0; i < count; i++) { if (result[i] == (const char *)-1) { result[i] = NULL; } else { result[i] = string_data->contents + (uintptr_t)result[i]; } } return result; } static void *copy_string( const uint8_t *data, int32_t address ) { const char *string = (const char *)&data[address]; size_t len = strlen(string); char *result = ts_malloc(len + 1); memcpy(result, string, len + 1); return result; } static bool name_eq(const wasm_name_t *name, const char *string) { return strncmp(string, name->data, name->size) == 0; } static inline wasm_functype_t* wasm_functype_new_4_0( wasm_valtype_t* p1, wasm_valtype_t* p2, wasm_valtype_t* p3, wasm_valtype_t* p4 ) { wasm_valtype_t* ps[4] = {p1, p2, p3, p4}; wasm_valtype_vec_t params, results; wasm_valtype_vec_new(¶ms, 4, ps); wasm_valtype_vec_new_empty(&results); return wasm_functype_new(¶ms, &results); } #define format(output, ...) \ do { \ size_t message_length = snprintf((char *)NULL, 0, __VA_ARGS__); \ *output = ts_malloc(message_length + 1); \ snprintf(*output, message_length + 1, __VA_ARGS__); \ } while (0) WasmLanguageId *language_id_new(void) { WasmLanguageId *self = ts_malloc(sizeof(WasmLanguageId)); self->is_language_deleted = false; self->ref_count = 1; return self; } WasmLanguageId *language_id_clone(WasmLanguageId *self) { atomic_inc(&self->ref_count); return self; } void language_id_delete(WasmLanguageId *self) { if (atomic_dec(&self->ref_count) == 0) { ts_free(self); } } static wasmtime_extern_t get_builtin_extern( wasmtime_table_t *table, unsigned index ) { return (wasmtime_extern_t) { .kind = WASMTIME_EXTERN_FUNC, .of.func = (wasmtime_func_t) { .store_id = table->store_id, .__private = index } }; } static bool ts_wasm_store__provide_builtin_import( TSWasmStore *self, const wasm_name_t *import_name, wasmtime_extern_t *import ) { wasmtime_error_t *error = NULL; wasmtime_context_t *context = wasmtime_store_context(self->store); // Dynamic linking parameters if (name_eq(import_name, "__memory_base")) { wasmtime_val_t value = WASM_I32_VAL(self->current_memory_offset); wasmtime_global_t global; error = wasmtime_global_new(context, self->const_i32_type, &value, &global); ts_assert(!error); *import = (wasmtime_extern_t) {.kind = WASMTIME_EXTERN_GLOBAL, .of.global = global}; } else if (name_eq(import_name, "__table_base")) { wasmtime_val_t value = WASM_I32_VAL(self->current_function_table_offset); wasmtime_global_t global; error = wasmtime_global_new(context, self->const_i32_type, &value, &global); ts_assert(!error); *import = (wasmtime_extern_t) {.kind = WASMTIME_EXTERN_GLOBAL, .of.global = global}; } else if (name_eq(import_name, "__stack_pointer")) { *import = (wasmtime_extern_t) {.kind = WASMTIME_EXTERN_GLOBAL, .of.global = self->stack_pointer_global}; } else if (name_eq(import_name, "__indirect_function_table")) { *import = (wasmtime_extern_t) {.kind = WASMTIME_EXTERN_TABLE, .of.table = self->function_table}; } else if (name_eq(import_name, "memory")) { *import = (wasmtime_extern_t) {.kind = WASMTIME_EXTERN_MEMORY, .of.memory = self->memory}; } // Builtin functions else if (name_eq(import_name, "__assert_fail")) { *import = get_builtin_extern(&self->function_table, self->builtin_fn_indices.assert_fail); } else if (name_eq(import_name, "__cxa_atexit")) { *import = get_builtin_extern(&self->function_table, self->builtin_fn_indices.at_exit); } else if (name_eq(import_name, "args_get")) { *import = get_builtin_extern(&self->function_table, self->builtin_fn_indices.args_get); } else if (name_eq(import_name, "args_sizes_get")) { *import = get_builtin_extern(&self->function_table, self->builtin_fn_indices.args_sizes_get); } else if (name_eq(import_name, "abort")) { *import = get_builtin_extern(&self->function_table, self->builtin_fn_indices.abort); } else if (name_eq(import_name, "proc_exit")) { *import = get_builtin_extern(&self->function_table, self->builtin_fn_indices.proc_exit); } else if (name_eq(import_name, "emscripten_notify_memory_growth")) { *import = get_builtin_extern(&self->function_table, self->builtin_fn_indices.notify_memory_growth); } else if (name_eq(import_name, "tree_sitter_debug_message")) { *import = get_builtin_extern(&self->function_table, self->builtin_fn_indices.debug_message); } else { return false; } return true; } static bool ts_wasm_store__call_module_initializer( TSWasmStore *self, const wasm_name_t *export_name, wasmtime_extern_t *export, wasm_trap_t **trap ) { if ( name_eq(export_name, "_initialize") || name_eq(export_name, "__wasm_apply_data_relocs") || name_eq(export_name, "__wasm_call_ctors") ) { wasmtime_context_t *context = wasmtime_store_context(self->store); wasmtime_func_t initialization_func = export->of.func; wasmtime_error_t *error = wasmtime_func_call(context, &initialization_func, NULL, 0, NULL, 0, trap); ts_assert(!error); return true; } else { return false; } } TSWasmStore *ts_wasm_store_new(TSWasmEngine *engine, TSWasmError *wasm_error) { TSWasmStore *self = ts_calloc(1, sizeof(TSWasmStore)); wasmtime_store_t *store = wasmtime_store_new(engine, self, NULL); wasmtime_context_t *context = wasmtime_store_context(store); wasmtime_error_t *error = NULL; wasm_trap_t *trap = NULL; wasm_message_t message = WASM_EMPTY_VEC; wasm_exporttype_vec_t export_types = WASM_EMPTY_VEC; wasm_importtype_vec_t import_types = WASM_EMPTY_VEC; wasmtime_extern_t *imports = NULL; wasmtime_module_t *stdlib_module = NULL; wasm_memorytype_t *memory_type = NULL; wasm_tabletype_t *table_type = NULL; // Define functions called by scanners via function pointers on the lexer. LexerInWasmMemory lexer = { .lookahead = 0, .result_symbol = 0, }; FunctionDefinition lexer_definitions[] = { { (uint32_t *)&lexer.advance, callback__lexer_advance, wasm_functype_new_2_0(wasm_valtype_new_i32(), wasm_valtype_new_i32()) }, { (uint32_t *)&lexer.mark_end, callback__lexer_mark_end, wasm_functype_new_1_0(wasm_valtype_new_i32()) }, { (uint32_t *)&lexer.get_column, callback__lexer_get_column, wasm_functype_new_1_1(wasm_valtype_new_i32(), wasm_valtype_new_i32()) }, { (uint32_t *)&lexer.is_at_included_range_start, callback__lexer_is_at_included_range_start, wasm_functype_new_1_1(wasm_valtype_new_i32(), wasm_valtype_new_i32()) }, { (uint32_t *)&lexer.eof, callback__lexer_eof, wasm_functype_new_1_1(wasm_valtype_new_i32(), wasm_valtype_new_i32()) }, }; // Define builtin functions that can be imported by scanners. BuiltinFunctionIndices builtin_fn_indices; FunctionDefinition builtin_definitions[] = { { &builtin_fn_indices.proc_exit, callback__abort, wasm_functype_new_1_0(wasm_valtype_new_i32()) }, { &builtin_fn_indices.abort, callback__abort, wasm_functype_new_0_0() }, { &builtin_fn_indices.assert_fail, callback__abort, wasm_functype_new_4_0(wasm_valtype_new_i32(), wasm_valtype_new_i32(), wasm_valtype_new_i32(), wasm_valtype_new_i32()) }, { &builtin_fn_indices.notify_memory_growth, callback__noop, wasm_functype_new_1_0(wasm_valtype_new_i32()) }, { &builtin_fn_indices.debug_message, callback__debug_message, wasm_functype_new_2_0(wasm_valtype_new_i32(), wasm_valtype_new_i32()) }, { &builtin_fn_indices.at_exit, callback__noop, wasm_functype_new_3_1(wasm_valtype_new_i32(), wasm_valtype_new_i32(), wasm_valtype_new_i32(), wasm_valtype_new_i32()) }, { &builtin_fn_indices.args_get, callback__noop, wasm_functype_new_2_1(wasm_valtype_new_i32(), wasm_valtype_new_i32(), wasm_valtype_new_i32()) }, { &builtin_fn_indices.args_sizes_get, callback__noop, wasm_functype_new_2_1(wasm_valtype_new_i32(), wasm_valtype_new_i32(), wasm_valtype_new_i32()) }, }; // Create all of the wasm functions. unsigned builtin_definitions_len = array_len(builtin_definitions); unsigned lexer_definitions_len = array_len(lexer_definitions); for (unsigned i = 0; i < builtin_definitions_len; i++) { FunctionDefinition *definition = &builtin_definitions[i]; wasmtime_func_t func; wasmtime_func_new_unchecked(context, definition->type, definition->callback, self, NULL, &func); *definition->storage_location = func.__private; wasm_functype_delete(definition->type); } for (unsigned i = 0; i < lexer_definitions_len; i++) { FunctionDefinition *definition = &lexer_definitions[i]; wasmtime_func_t func; wasmtime_func_new_unchecked(context, definition->type, definition->callback, self, NULL, &func); *definition->storage_location = func.__private; wasm_functype_delete(definition->type); } // Compile the stdlib module. error = wasmtime_module_new(engine, STDLIB_WASM, STDLIB_WASM_LEN, &stdlib_module); if (error) { wasmtime_error_message(error, &message); wasm_error->kind = TSWasmErrorKindCompile; format( &wasm_error->message, "failed to compile wasm stdlib: %.*s", (int)message.size, message.data ); goto error; } // Retrieve the stdlib module's imports. wasmtime_module_imports(stdlib_module, &import_types); // Find the initial number of memory pages needed by the stdlib. const wasm_memorytype_t *stdlib_memory_type = NULL; for (unsigned i = 0; i < import_types.size; i++) { wasm_importtype_t *import_type = import_types.data[i]; const wasm_name_t *import_name = wasm_importtype_name(import_type); if (name_eq(import_name, "memory")) { const wasm_externtype_t *type = wasm_importtype_type(import_type); stdlib_memory_type = wasm_externtype_as_memorytype_const(type); } } if (!stdlib_memory_type) { wasm_error->kind = TSWasmErrorKindCompile; format( &wasm_error->message, "wasm stdlib is missing the 'memory' import" ); goto error; } // Initialize store's memory uint64_t initial_memory_pages = wasmtime_memorytype_minimum(stdlib_memory_type); wasm_limits_t memory_limits = {.min = initial_memory_pages, .max = MAX_MEMORY_SIZE}; memory_type = wasm_memorytype_new(&memory_limits); wasmtime_memory_t memory; error = wasmtime_memory_new(context, memory_type, &memory); if (error) { wasmtime_error_message(error, &message); wasm_error->kind = TSWasmErrorKindAllocate; format( &wasm_error->message, "failed to allocate wasm memory: %.*s", (int)message.size, message.data ); goto error; } wasm_memorytype_delete(memory_type); memory_type = NULL; // Initialize store's function table wasm_limits_t table_limits = {.min = 1, .max = wasm_limits_max_default}; table_type = wasm_tabletype_new(wasm_valtype_new(WASM_FUNCREF), &table_limits); wasmtime_val_t initializer = {.kind = WASMTIME_FUNCREF}; wasmtime_table_t function_table; error = wasmtime_table_new(context, table_type, &initializer, &function_table); if (error) { wasmtime_error_message(error, &message); wasm_error->kind = TSWasmErrorKindAllocate; format( &wasm_error->message, "failed to allocate wasm table: %.*s", (int)message.size, message.data ); goto error; } wasm_tabletype_delete(table_type); table_type = NULL; unsigned stdlib_symbols_len = array_len(STDLIB_SYMBOLS); // Define globals for the stack and heap start addresses. wasm_globaltype_t *const_i32_type = wasm_globaltype_new(wasm_valtype_new_i32(), WASM_CONST); wasm_globaltype_t *var_i32_type = wasm_globaltype_new(wasm_valtype_new_i32(), WASM_VAR); wasmtime_val_t stack_pointer_value = WASM_I32_VAL(0); wasmtime_global_t stack_pointer_global; error = wasmtime_global_new(context, var_i32_type, &stack_pointer_value, &stack_pointer_global); ts_assert(!error); *self = (TSWasmStore) { .engine = wasmtime_engine_clone(engine), .store = store, .memory = memory, .function_table = function_table, .language_instances = array_new(), .stdlib_fn_indices = ts_calloc(stdlib_symbols_len, sizeof(uint32_t)), .builtin_fn_indices = builtin_fn_indices, .stack_pointer_global = stack_pointer_global, .current_memory_offset = 0, .current_function_table_offset = 0, .const_i32_type = const_i32_type, }; // Set up the imports for the stdlib module. imports = ts_calloc(import_types.size, sizeof(wasmtime_extern_t)); for (unsigned i = 0; i < import_types.size; i++) { wasm_importtype_t *type = import_types.data[i]; const wasm_name_t *import_name = wasm_importtype_name(type); if (!ts_wasm_store__provide_builtin_import(self, import_name, &imports[i])) { wasm_error->kind = TSWasmErrorKindInstantiate; format( &wasm_error->message, "unexpected import in wasm stdlib: %.*s\n", (int)import_name->size, import_name->data ); goto error; } } // Instantiate the stdlib module. wasmtime_instance_t instance; error = wasmtime_instance_new(context, stdlib_module, imports, import_types.size, &instance, &trap); ts_free(imports); imports = NULL; if (error) { wasmtime_error_message(error, &message); wasm_error->kind = TSWasmErrorKindInstantiate; format( &wasm_error->message, "failed to instantiate wasm stdlib module: %.*s", (int)message.size, message.data ); goto error; } if (trap) { wasm_trap_message(trap, &message); wasm_error->kind = TSWasmErrorKindInstantiate; format( &wasm_error->message, "trapped when instantiating wasm stdlib module: %.*s", (int)message.size, message.data ); goto error; } wasm_importtype_vec_delete(&import_types); // Process the stdlib module's exports. for (unsigned i = 0; i < stdlib_symbols_len; i++) { self->stdlib_fn_indices[i] = UINT32_MAX; } wasmtime_module_exports(stdlib_module, &export_types); for (unsigned i = 0; i < export_types.size; i++) { wasm_exporttype_t *export_type = export_types.data[i]; const wasm_name_t *name = wasm_exporttype_name(export_type); char *export_name; size_t name_len; wasmtime_extern_t export = {.kind = WASM_EXTERN_GLOBAL}; bool exists = wasmtime_instance_export_nth(context, &instance, i, &export_name, &name_len, &export); ts_assert(exists); if (export.kind == WASMTIME_EXTERN_GLOBAL) { if (name_eq(name, "__stack_pointer")) { self->stack_pointer_global = export.of.global; } } if (export.kind == WASMTIME_EXTERN_FUNC) { if (ts_wasm_store__call_module_initializer(self, name, &export, &trap)) { if (trap) { wasm_trap_message(trap, &message); wasm_error->kind = TSWasmErrorKindInstantiate; format( &wasm_error->message, "trap when calling stdlib relocation function: %.*s\n", (int)message.size, message.data ); goto error; } continue; } if (name_eq(name, "reset_heap")) { self->builtin_fn_indices.reset_heap = export.of.func.__private; continue; } for (unsigned j = 0; j < stdlib_symbols_len; j++) { if (name_eq(name, STDLIB_SYMBOLS[j])) { self->stdlib_fn_indices[j] = export.of.func.__private; break; } } } } if (self->builtin_fn_indices.reset_heap == UINT32_MAX) { wasm_error->kind = TSWasmErrorKindInstantiate; format( &wasm_error->message, "missing malloc reset function in wasm stdlib" ); goto error; } for (unsigned i = 0; i < stdlib_symbols_len; i++) { if (self->stdlib_fn_indices[i] == UINT32_MAX) { wasm_error->kind = TSWasmErrorKindInstantiate; format( &wasm_error->message, "missing exported symbol in wasm stdlib: %s", STDLIB_SYMBOLS[i] ); goto error; } } wasm_exporttype_vec_delete(&export_types); wasmtime_module_delete(stdlib_module); // Add all of the lexer callback functions to the function table. Store their function table // indices on the in-memory lexer. uint64_t table_index; error = wasmtime_table_grow(context, &function_table, lexer_definitions_len, &initializer, &table_index); if (error) { wasmtime_error_message(error, &message); wasm_error->kind = TSWasmErrorKindAllocate; format( &wasm_error->message, "failed to grow wasm table to initial size: %.*s", (int)message.size, message.data ); goto error; } for (unsigned i = 0; i < lexer_definitions_len; i++) { FunctionDefinition *definition = &lexer_definitions[i]; wasmtime_func_t func = {function_table.store_id, *definition->storage_location}; wasmtime_val_t func_val = {.kind = WASMTIME_FUNCREF, .of.funcref = func}; error = wasmtime_table_set(context, &function_table, table_index, &func_val); ts_assert(!error); *(int32_t *)(definition->storage_location) = table_index; table_index++; } self->current_function_table_offset = table_index; self->lexer_address = initial_memory_pages * MEMORY_PAGE_SIZE; self->current_memory_offset = self->lexer_address + sizeof(LexerInWasmMemory); // Grow the memory enough to hold the builtin lexer and serialization buffer. uint32_t new_pages_needed = (self->current_memory_offset - self->lexer_address - 1) / MEMORY_PAGE_SIZE + 1; uint64_t prev_memory_size; wasmtime_memory_grow(context, &memory, new_pages_needed, &prev_memory_size); uint8_t *memory_data = wasmtime_memory_data(context, &memory); memcpy(&memory_data[self->lexer_address], &lexer, sizeof(lexer)); return self; error: ts_free(self); if (stdlib_module) wasmtime_module_delete(stdlib_module); if (store) wasmtime_store_delete(store); if (import_types.size) wasm_importtype_vec_delete(&import_types); if (memory_type) wasm_memorytype_delete(memory_type); if (table_type) wasm_tabletype_delete(table_type); if (trap) wasm_trap_delete(trap); if (error) wasmtime_error_delete(error); if (message.size) wasm_byte_vec_delete(&message); if (export_types.size) wasm_exporttype_vec_delete(&export_types); if (imports) ts_free(imports); return NULL; } void ts_wasm_store_delete(TSWasmStore *self) { if (!self) return; ts_free(self->stdlib_fn_indices); wasm_globaltype_delete(self->const_i32_type); wasmtime_store_delete(self->store); wasm_engine_delete(self->engine); for (unsigned i = 0; i < self->language_instances.size; i++) { LanguageWasmInstance *instance = array_get(&self->language_instances, i); language_id_delete(instance->language_id); } array_delete(&self->language_instances); ts_free(self); } size_t ts_wasm_store_language_count(const TSWasmStore *self) { size_t result = 0; for (unsigned i = 0; i < self->language_instances.size; i++) { const WasmLanguageId *id = array_get(&self->language_instances, i)->language_id; if (!id->is_language_deleted) { result++; } } return result; } static uint32_t ts_wasm_store__heap_address(TSWasmStore *self) { return self->current_memory_offset + TREE_SITTER_SERIALIZATION_BUFFER_SIZE; } static uint32_t ts_wasm_store__serialization_buffer_address(TSWasmStore *self) { return self->current_memory_offset; } static bool ts_wasm_store__instantiate( TSWasmStore *self, wasmtime_module_t *module, const char *language_name, const WasmDylinkInfo *dylink_info, wasmtime_instance_t *result, int32_t *language_address, char **error_message ) { wasmtime_error_t *error = NULL; wasm_trap_t *trap = NULL; wasm_message_t message = WASM_EMPTY_VEC; char *language_function_name = NULL; wasmtime_extern_t *imports = NULL; wasmtime_context_t *context = wasmtime_store_context(self->store); // Grow the function table to make room for the new functions. wasmtime_val_t initializer = {.kind = WASMTIME_FUNCREF}; uint64_t prev_table_size; error = wasmtime_table_grow(context, &self->function_table, dylink_info->table_size, &initializer, &prev_table_size); if (error) { format(error_message, "invalid function table size %u", dylink_info->table_size); goto error; } // Grow the memory to make room for the new data. uint32_t needed_memory_size = ts_wasm_store__heap_address(self) + dylink_info->memory_size; uint32_t current_memory_size = wasmtime_memory_data_size(context, &self->memory); if (needed_memory_size > current_memory_size) { uint32_t pages_to_grow = ( needed_memory_size - current_memory_size + MEMORY_PAGE_SIZE - 1) / MEMORY_PAGE_SIZE; uint64_t prev_memory_size; error = wasmtime_memory_grow(context, &self->memory, pages_to_grow, &prev_memory_size); if (error) { format(error_message, "invalid memory size %u", dylink_info->memory_size); goto error; } } // Construct the language function name as string. format(&language_function_name, "tree_sitter_%s", language_name); const uint64_t store_id = self->function_table.store_id; // Build the imports list for the module. wasm_importtype_vec_t import_types = WASM_EMPTY_VEC; wasmtime_module_imports(module, &import_types); imports = ts_calloc(import_types.size, sizeof(wasmtime_extern_t)); for (unsigned i = 0; i < import_types.size; i++) { const wasm_importtype_t *import_type = import_types.data[i]; const wasm_name_t *import_name = wasm_importtype_name(import_type); if (import_name->size == 0) { format(error_message, "empty import name"); goto error; } if (ts_wasm_store__provide_builtin_import(self, import_name, &imports[i])) { continue; } bool defined_in_stdlib = false; for (unsigned j = 0; j < array_len(STDLIB_SYMBOLS); j++) { if (name_eq(import_name, STDLIB_SYMBOLS[j])) { uint16_t address = self->stdlib_fn_indices[j]; imports[i] = (wasmtime_extern_t) {.kind = WASMTIME_EXTERN_FUNC, .of.func = {store_id, address}}; defined_in_stdlib = true; break; } } if (!defined_in_stdlib) { format( error_message, "invalid import '%.*s'\n", (int)import_name->size, import_name->data ); goto error; } } wasmtime_instance_t instance; error = wasmtime_instance_new(context, module, imports, import_types.size, &instance, &trap); wasm_importtype_vec_delete(&import_types); ts_free(imports); imports = NULL; if (error) { wasmtime_error_message(error, &message); format( error_message, "error instantiating wasm module: %.*s\n", (int)message.size, message.data ); goto error; } if (trap) { wasm_trap_message(trap, &message); format( error_message, "trap when instantiating wasm module: %.*s\n", (int)message.size, message.data ); goto error; } self->current_memory_offset += dylink_info->memory_size; self->current_function_table_offset += dylink_info->table_size; // Process the module's exports. bool found_language = false; wasmtime_extern_t language_extern; wasm_exporttype_vec_t export_types = WASM_EMPTY_VEC; wasmtime_module_exports(module, &export_types); for (unsigned i = 0; i < export_types.size; i++) { wasm_exporttype_t *export_type = export_types.data[i]; const wasm_name_t *name = wasm_exporttype_name(export_type); size_t name_len; char *export_name; wasmtime_extern_t export = {.kind = WASM_EXTERN_GLOBAL}; bool exists = wasmtime_instance_export_nth(context, &instance, i, &export_name, &name_len, &export); ts_assert(exists); // If the module exports an initialization or data-relocation function, call it. if (ts_wasm_store__call_module_initializer(self, name, &export, &trap)) { if (trap) { wasm_trap_message(trap, &message); format( error_message, "trap when calling data relocation function: %.*s\n", (int)message.size, message.data ); goto error; } } // Find the main language function for the module. else if (name_eq(name, language_function_name)) { language_extern = export; found_language = true; } } wasm_exporttype_vec_delete(&export_types); if (!found_language) { format( error_message, "module did not contain language function: %s", language_function_name ); goto error; } // Invoke the language function to get the static address of the language object. wasmtime_func_t language_func = language_extern.of.func; wasmtime_val_t language_address_val; error = wasmtime_func_call(context, &language_func, NULL, 0, &language_address_val, 1, &trap); ts_assert(!error); if (trap) { wasm_trap_message(trap, &message); format( error_message, "trapped when calling language function: %s: %.*s\n", language_function_name, (int)message.size, message.data ); goto error; } if (language_address_val.kind != WASMTIME_I32) { format( error_message, "language function did not return an integer: %s\n", language_function_name ); goto error; } ts_free(language_function_name); *result = instance; *language_address = language_address_val.of.i32; return true; error: if (language_function_name) ts_free(language_function_name); if (message.size) wasm_byte_vec_delete(&message); if (error) wasmtime_error_delete(error); if (trap) wasm_trap_delete(trap); if (imports) ts_free(imports); return false; } static bool ts_wasm_store__sentinel_lex_fn(TSLexer *_lexer, TSStateId state) { return false; } const TSLanguage *ts_wasm_store_load_language( TSWasmStore *self, const char *language_name, const char *wasm, uint32_t wasm_len, TSWasmError *wasm_error ) { WasmDylinkInfo dylink_info; wasmtime_module_t *module = NULL; wasmtime_error_t *error = NULL; wasm_error->kind = TSWasmErrorKindNone; if (!wasm_dylink_info__parse((const unsigned char *)wasm, wasm_len, &dylink_info)) { wasm_error->kind = TSWasmErrorKindParse; format(&wasm_error->message, "failed to parse dylink section of wasm module"); goto error; } // Compile the wasm code. error = wasmtime_module_new(self->engine, (const uint8_t *)wasm, wasm_len, &module); if (error) { wasm_message_t message; wasmtime_error_message(error, &message); wasm_error->kind = TSWasmErrorKindCompile; format(&wasm_error->message, "error compiling wasm module: %.*s", (int)message.size, message.data); wasm_byte_vec_delete(&message); goto error; } // Instantiate the module in this store. wasmtime_instance_t instance; int32_t language_address; if (!ts_wasm_store__instantiate( self, module, language_name, &dylink_info, &instance, &language_address, &wasm_error->message )) { wasm_error->kind = TSWasmErrorKindInstantiate; goto error; } // Copy all of the static data out of the language object in wasm memory, // constructing a native language object. LanguageInWasmMemory wasm_language; wasmtime_context_t *context = wasmtime_store_context(self->store); const uint8_t *memory = wasmtime_memory_data(context, &self->memory); memcpy(&wasm_language, &memory[language_address], sizeof(LanguageInWasmMemory)); bool has_supertypes = wasm_language.abi_version > LANGUAGE_VERSION_WITH_RESERVED_WORDS && wasm_language.supertype_count > 0; int32_t addresses[] = { wasm_language.parse_table, wasm_language.small_parse_table, wasm_language.small_parse_table_map, wasm_language.parse_actions, wasm_language.symbol_names, wasm_language.field_names, wasm_language.field_map_slices, wasm_language.field_map_entries, wasm_language.symbol_metadata, wasm_language.public_symbol_map, wasm_language.alias_map, wasm_language.alias_sequences, wasm_language.lex_modes, wasm_language.lex_fn, wasm_language.keyword_lex_fn, wasm_language.primary_state_ids, wasm_language.name, wasm_language.reserved_words, has_supertypes ? wasm_language.supertype_symbols : 0, has_supertypes ? wasm_language.supertype_map_entries : 0, has_supertypes ? wasm_language.supertype_map_slices : 0, wasm_language.external_token_count > 0 ? wasm_language.external_scanner.states : 0, wasm_language.external_token_count > 0 ? wasm_language.external_scanner.symbol_map : 0, wasm_language.external_token_count > 0 ? wasm_language.external_scanner.create : 0, wasm_language.external_token_count > 0 ? wasm_language.external_scanner.destroy : 0, wasm_language.external_token_count > 0 ? wasm_language.external_scanner.scan : 0, wasm_language.external_token_count > 0 ? wasm_language.external_scanner.serialize : 0, wasm_language.external_token_count > 0 ? wasm_language.external_scanner.deserialize : 0, language_address, self->current_memory_offset, }; uint32_t address_count = array_len(addresses); TSLanguage *language = ts_calloc(1, sizeof(TSLanguage)); StringData symbol_name_buffer = array_new(); StringData field_name_buffer = array_new(); *language = (TSLanguage) { .abi_version = wasm_language.abi_version, .symbol_count = wasm_language.symbol_count, .alias_count = wasm_language.alias_count, .token_count = wasm_language.token_count, .external_token_count = wasm_language.external_token_count, .state_count = wasm_language.state_count, .large_state_count = wasm_language.large_state_count, .production_id_count = wasm_language.production_id_count, .field_count = wasm_language.field_count, .supertype_count = wasm_language.supertype_count, .max_alias_sequence_length = wasm_language.max_alias_sequence_length, .keyword_capture_token = wasm_language.keyword_capture_token, .metadata = wasm_language.metadata, .parse_table = copy( &memory[wasm_language.parse_table], wasm_language.large_state_count * wasm_language.symbol_count * sizeof(uint16_t) ), .parse_actions = copy_unsized_static_array( memory, wasm_language.parse_actions, addresses, address_count ), .symbol_names = copy_strings( memory, wasm_language.symbol_names, wasm_language.symbol_count + wasm_language.alias_count, &symbol_name_buffer ), .symbol_metadata = copy( &memory[wasm_language.symbol_metadata], (wasm_language.symbol_count + wasm_language.alias_count) * sizeof(TSSymbolMetadata) ), .public_symbol_map = copy( &memory[wasm_language.public_symbol_map], (wasm_language.symbol_count + wasm_language.alias_count) * sizeof(TSSymbol) ), .lex_modes = copy( &memory[wasm_language.lex_modes], wasm_language.state_count * sizeof(TSLexerMode) ), }; if (language->field_count > 0 && language->production_id_count > 0) { language->field_map_slices = copy( &memory[wasm_language.field_map_slices], wasm_language.production_id_count * sizeof(TSMapSlice) ); // Determine the number of field map entries by finding the greatest index // in any of the slices. uint32_t field_map_entry_count = 0; for (uint32_t i = 0; i < wasm_language.production_id_count; i++) { TSMapSlice slice = language->field_map_slices[i]; uint32_t slice_end = slice.index + slice.length; if (slice_end > field_map_entry_count) { field_map_entry_count = slice_end; } } language->field_map_entries = copy( &memory[wasm_language.field_map_entries], field_map_entry_count * sizeof(TSFieldMapEntry) ); language->field_names = copy_strings( memory, wasm_language.field_names, wasm_language.field_count + 1, &field_name_buffer ); } if (has_supertypes) { language->supertype_symbols = copy( &memory[wasm_language.supertype_symbols], wasm_language.supertype_count * sizeof(TSSymbol) ); // Determine the number of supertype map slices by finding the greatest // supertype ID. int largest_supertype = 0; for (unsigned i = 0; i < language->supertype_count; i++) { TSSymbol supertype = language->supertype_symbols[i]; if (supertype > largest_supertype) { largest_supertype = supertype; } } language->supertype_map_slices = copy( &memory[wasm_language.supertype_map_slices], (largest_supertype + 1) * sizeof(TSMapSlice) ); TSSymbol last_supertype = language->supertype_symbols[language->supertype_count - 1]; TSMapSlice last_slice = language->supertype_map_slices[last_supertype]; uint32_t supertype_map_entry_count = last_slice.index + last_slice.length; language->supertype_map_entries = copy( &memory[wasm_language.supertype_map_entries], supertype_map_entry_count * sizeof(char *) ); } if (language->max_alias_sequence_length > 0 && language->production_id_count > 0) { // The alias map contains symbols, alias counts, and aliases, terminated by a null symbol. int32_t alias_map_size = 0; for (;;) { TSSymbol symbol; memcpy(&symbol, &memory[wasm_language.alias_map + alias_map_size], sizeof(symbol)); alias_map_size += sizeof(TSSymbol); if (symbol == 0) break; uint16_t value_count; memcpy(&value_count, &memory[wasm_language.alias_map + alias_map_size], sizeof(value_count)); alias_map_size += value_count * sizeof(TSSymbol); } language->alias_map = copy( &memory[wasm_language.alias_map], alias_map_size * sizeof(TSSymbol) ); language->alias_sequences = copy( &memory[wasm_language.alias_sequences], wasm_language.production_id_count * wasm_language.max_alias_sequence_length * sizeof(TSSymbol) ); } if (language->state_count > language->large_state_count) { uint32_t small_state_count = wasm_language.state_count - wasm_language.large_state_count; language->small_parse_table_map = copy( &memory[wasm_language.small_parse_table_map], small_state_count * sizeof(uint32_t) ); language->small_parse_table = copy_unsized_static_array( memory, wasm_language.small_parse_table, addresses, address_count ); } if (language->abi_version >= LANGUAGE_VERSION_WITH_PRIMARY_STATES) { language->primary_state_ids = copy( &memory[wasm_language.primary_state_ids], wasm_language.state_count * sizeof(TSStateId) ); } if (language->abi_version >= LANGUAGE_VERSION_WITH_RESERVED_WORDS) { language->name = copy_string(memory, wasm_language.name); language->reserved_words = copy( &memory[wasm_language.reserved_words], wasm_language.max_reserved_word_set_size * sizeof(TSSymbol) ); language->max_reserved_word_set_size = wasm_language.max_reserved_word_set_size; } if (language->external_token_count > 0) { language->external_scanner.symbol_map = copy( &memory[wasm_language.external_scanner.symbol_map], wasm_language.external_token_count * sizeof(TSSymbol) ); language->external_scanner.states = (void *)(uintptr_t)wasm_language.external_scanner.states; } unsigned name_len = strlen(language_name); char *name = ts_malloc(name_len + 1); memcpy(name, language_name, name_len); name[name_len] = '\0'; LanguageWasmModule *language_module = ts_malloc(sizeof(LanguageWasmModule)); *language_module = (LanguageWasmModule) { .language_id = language_id_new(), .module = module, .name = name, .symbol_name_buffer = symbol_name_buffer.contents, .field_name_buffer = field_name_buffer.contents, .dylink_info = dylink_info, .ref_count = 1, }; // The lex functions are not used for wasm languages. Use those two fields // to mark this language as WASM-based and to store the language's // WASM-specific data. language->lex_fn = ts_wasm_store__sentinel_lex_fn; language->keyword_lex_fn = (bool (*)(TSLexer *, TSStateId))language_module; // Clear out any instances of languages that have been deleted. for (unsigned i = 0; i < self->language_instances.size; i++) { WasmLanguageId *id = array_get(&self->language_instances, i)->language_id; if (id->is_language_deleted) { language_id_delete(id); array_erase(&self->language_instances, i); i--; } } // Store this store's instance of this language module. array_push(&self->language_instances, ((LanguageWasmInstance) { .language_id = language_id_clone(language_module->language_id), .instance = instance, .external_states_address = wasm_language.external_scanner.states, .lex_main_fn_index = wasm_language.lex_fn, .lex_keyword_fn_index = wasm_language.keyword_lex_fn, .scanner_create_fn_index = wasm_language.external_scanner.create, .scanner_destroy_fn_index = wasm_language.external_scanner.destroy, .scanner_serialize_fn_index = wasm_language.external_scanner.serialize, .scanner_deserialize_fn_index = wasm_language.external_scanner.deserialize, .scanner_scan_fn_index = wasm_language.external_scanner.scan, })); return language; error: if (module) wasmtime_module_delete(module); return NULL; } bool ts_wasm_store_add_language( TSWasmStore *self, const TSLanguage *language, uint32_t *index ) { wasmtime_context_t *context = wasmtime_store_context(self->store); const LanguageWasmModule *language_module = (void *)language->keyword_lex_fn; // Search for this store's instance of the language module. Also clear out any // instances of languages that have been deleted. bool exists = false; for (unsigned i = 0; i < self->language_instances.size; i++) { WasmLanguageId *id = array_get(&self->language_instances, i)->language_id; if (id->is_language_deleted) { language_id_delete(id); array_erase(&self->language_instances, i); i--; } else if (id == language_module->language_id) { exists = true; *index = i; } } // If the language module has not been instantiated in this store, then add // it to this store. if (!exists) { *index = self->language_instances.size; char *message; wasmtime_instance_t instance; int32_t language_address; if (!ts_wasm_store__instantiate( self, language_module->module, language_module->name, &language_module->dylink_info, &instance, &language_address, &message )) { ts_free(message); return false; } LanguageInWasmMemory wasm_language; const uint8_t *memory = wasmtime_memory_data(context, &self->memory); memcpy(&wasm_language, &memory[language_address], sizeof(LanguageInWasmMemory)); array_push(&self->language_instances, ((LanguageWasmInstance) { .language_id = language_id_clone(language_module->language_id), .instance = instance, .external_states_address = wasm_language.external_scanner.states, .lex_main_fn_index = wasm_language.lex_fn, .lex_keyword_fn_index = wasm_language.keyword_lex_fn, .scanner_create_fn_index = wasm_language.external_scanner.create, .scanner_destroy_fn_index = wasm_language.external_scanner.destroy, .scanner_serialize_fn_index = wasm_language.external_scanner.serialize, .scanner_deserialize_fn_index = wasm_language.external_scanner.deserialize, .scanner_scan_fn_index = wasm_language.external_scanner.scan, })); } return true; } void ts_wasm_store_reset_heap(TSWasmStore *self) { wasmtime_context_t *context = wasmtime_store_context(self->store); wasmtime_func_t func = { self->function_table.store_id, self->builtin_fn_indices.reset_heap }; wasm_trap_t *trap = NULL; wasmtime_val_t args[1] = { {.of.i32 = ts_wasm_store__heap_address(self), .kind = WASMTIME_I32}, }; wasmtime_error_t *error = wasmtime_func_call(context, &func, args, 1, NULL, 0, &trap); ts_assert(!error); ts_assert(!trap); } bool ts_wasm_store_start(TSWasmStore *self, TSLexer *lexer, const TSLanguage *language) { uint32_t instance_index; if (!ts_wasm_store_add_language(self, language, &instance_index)) return false; self->current_lexer = lexer; self->current_instance = array_get(&self->language_instances, instance_index); self->has_error = false; ts_wasm_store_reset_heap(self); return true; } void ts_wasm_store_reset(TSWasmStore *self) { self->current_lexer = NULL; self->current_instance = NULL; self->has_error = false; ts_wasm_store_reset_heap(self); } static void ts_wasm_store__call( TSWasmStore *self, int32_t function_index, wasmtime_val_raw_t *args_and_results, size_t args_and_results_len ) { wasmtime_context_t *context = wasmtime_store_context(self->store); wasmtime_val_t value; bool succeeded = wasmtime_table_get(context, &self->function_table, function_index, &value); ts_assert(succeeded); ts_assert(value.kind == WASMTIME_FUNCREF); wasmtime_func_t func = value.of.funcref; wasm_trap_t *trap = NULL; wasmtime_error_t *error = wasmtime_func_call_unchecked(context, &func, args_and_results, args_and_results_len, &trap); if (error) { // wasm_message_t message; // wasmtime_error_message(error, &message); // fprintf( // stderr, // "error in wasm module: %.*s\n", // (int)message.size, message.data // ); wasmtime_error_delete(error); self->has_error = true; } else if (trap) { // wasm_message_t message; // wasm_trap_message(trap, &message); // fprintf( // stderr, // "trap in wasm module: %.*s\n", // (int)message.size, message.data // ); wasm_trap_delete(trap); self->has_error = true; } } // The data fields of TSLexer, without the function pointers. // // This portion of the struct needs to be copied in and out // of wasm memory before and after calling a scan function. typedef struct { int32_t lookahead; TSSymbol result_symbol; } TSLexerDataPrefix; static bool ts_wasm_store__call_lex_function(TSWasmStore *self, unsigned function_index, TSStateId state) { wasmtime_context_t *context = wasmtime_store_context(self->store); uint8_t *memory_data = wasmtime_memory_data(context, &self->memory); memcpy( &memory_data[self->lexer_address], self->current_lexer, sizeof(TSLexerDataPrefix) ); wasmtime_val_raw_t args[2] = { {.i32 = self->lexer_address}, {.i32 = state}, }; ts_wasm_store__call(self, function_index, args, 2); if (self->has_error) return false; bool result = args[0].i32; memcpy( self->current_lexer, &memory_data[self->lexer_address], sizeof(TSLexerDataPrefix) ); return result; } bool ts_wasm_store_call_lex_main(TSWasmStore *self, TSStateId state) { return ts_wasm_store__call_lex_function( self, self->current_instance->lex_main_fn_index, state ); } bool ts_wasm_store_call_lex_keyword(TSWasmStore *self, TSStateId state) { return ts_wasm_store__call_lex_function( self, self->current_instance->lex_keyword_fn_index, state ); } uint32_t ts_wasm_store_call_scanner_create(TSWasmStore *self) { wasmtime_val_raw_t args[1] = {{.i32 = 0}}; ts_wasm_store__call(self, self->current_instance->scanner_create_fn_index, args, 1); if (self->has_error) return 0; return args[0].i32; } void ts_wasm_store_call_scanner_destroy(TSWasmStore *self, uint32_t scanner_address) { if (self->current_instance) { wasmtime_val_raw_t args[1] = {{.i32 = scanner_address}}; ts_wasm_store__call(self, self->current_instance->scanner_destroy_fn_index, args, 1); } } bool ts_wasm_store_call_scanner_scan( TSWasmStore *self, uint32_t scanner_address, uint32_t valid_tokens_ix ) { wasmtime_context_t *context = wasmtime_store_context(self->store); uint8_t *memory_data = wasmtime_memory_data(context, &self->memory); memcpy( &memory_data[self->lexer_address], self->current_lexer, sizeof(TSLexerDataPrefix) ); uint32_t valid_tokens_address = self->current_instance->external_states_address + (valid_tokens_ix * sizeof(bool)); wasmtime_val_raw_t args[3] = { {.i32 = scanner_address}, {.i32 = self->lexer_address}, {.i32 = valid_tokens_address} }; ts_wasm_store__call(self, self->current_instance->scanner_scan_fn_index, args, 3); if (self->has_error) return false; memcpy( self->current_lexer, &memory_data[self->lexer_address], sizeof(TSLexerDataPrefix) ); return args[0].i32; } uint32_t ts_wasm_store_call_scanner_serialize( TSWasmStore *self, uint32_t scanner_address, char *buffer ) { wasmtime_context_t *context = wasmtime_store_context(self->store); uint8_t *memory_data = wasmtime_memory_data(context, &self->memory); uint32_t serialization_buffer_address = ts_wasm_store__serialization_buffer_address(self); wasmtime_val_raw_t args[2] = { {.i32 = scanner_address}, {.i32 = serialization_buffer_address}, }; ts_wasm_store__call(self, self->current_instance->scanner_serialize_fn_index, args, 2); if (self->has_error) return 0; uint32_t length = args[0].i32; if (length > TREE_SITTER_SERIALIZATION_BUFFER_SIZE) { self->has_error = true; return 0; } if (length > 0) { memcpy( ((Lexer *)self->current_lexer)->debug_buffer, &memory_data[serialization_buffer_address], length ); } return length; } void ts_wasm_store_call_scanner_deserialize( TSWasmStore *self, uint32_t scanner_address, const char *buffer, unsigned length ) { wasmtime_context_t *context = wasmtime_store_context(self->store); uint8_t *memory_data = wasmtime_memory_data(context, &self->memory); uint32_t serialization_buffer_address = ts_wasm_store__serialization_buffer_address(self); if (length > 0) { memcpy( &memory_data[serialization_buffer_address], buffer, length ); } wasmtime_val_raw_t args[3] = { {.i32 = scanner_address}, {.i32 = serialization_buffer_address}, {.i32 = length}, }; ts_wasm_store__call(self, self->current_instance->scanner_deserialize_fn_index, args, 3); } bool ts_wasm_store_has_error(const TSWasmStore *self) { return self->has_error; } bool ts_language_is_wasm(const TSLanguage *self) { return self->lex_fn == ts_wasm_store__sentinel_lex_fn; } static inline LanguageWasmModule *ts_language__wasm_module(const TSLanguage *self) { return (LanguageWasmModule *)self->keyword_lex_fn; } void ts_wasm_language_retain(const TSLanguage *self) { LanguageWasmModule *module = ts_language__wasm_module(self); ts_assert(module->ref_count > 0); atomic_inc(&module->ref_count); } void ts_wasm_language_release(const TSLanguage *self) { LanguageWasmModule *module = ts_language__wasm_module(self); ts_assert(module->ref_count > 0); if (atomic_dec(&module->ref_count) == 0) { // Update the language id to reflect that the language is deleted. This allows any wasm stores // that hold wasm instances for this language to delete those instances. atomic_inc(&module->language_id->is_language_deleted); language_id_delete(module->language_id); ts_free((void *)module->field_name_buffer); ts_free((void *)module->symbol_name_buffer); ts_free((void *)module->name); wasmtime_module_delete(module->module); ts_free(module); ts_free((void *)self->alias_map); ts_free((void *)self->alias_sequences); ts_free((void *)self->external_scanner.symbol_map); ts_free((void *)self->field_map_entries); ts_free((void *)self->field_map_slices); ts_free((void *)self->supertype_symbols); ts_free((void *)self->supertype_map_entries); ts_free((void *)self->supertype_map_slices); ts_free((void *)self->field_names); ts_free((void *)self->lex_modes); ts_free((void *)self->name); ts_free((void *)self->reserved_words); ts_free((void *)self->parse_actions); ts_free((void *)self->parse_table); ts_free((void *)self->primary_state_ids); ts_free((void *)self->public_symbol_map); ts_free((void *)self->small_parse_table); ts_free((void *)self->small_parse_table_map); ts_free((void *)self->symbol_metadata); ts_free((void *)self->symbol_names); ts_free((void *)self); } } #ifdef _MSC_VER #pragma warning(pop) #elif defined(__GNUC__) || defined(__clang__) #pragma GCC diagnostic pop #endif #else // If the WASM feature is not enabled, define dummy versions of all of the // wasm-related functions. void ts_wasm_store_delete(TSWasmStore *self) { (void)self; } bool ts_wasm_store_start( TSWasmStore *self, TSLexer *lexer, const TSLanguage *language ) { (void)self; (void)lexer; (void)language; return false; } void ts_wasm_store_reset(TSWasmStore *self) { (void)self; } bool ts_wasm_store_call_lex_main(TSWasmStore *self, TSStateId state) { (void)self; (void)state; return false; } bool ts_wasm_store_call_lex_keyword(TSWasmStore *self, TSStateId state) { (void)self; (void)state; return false; } uint32_t ts_wasm_store_call_scanner_create(TSWasmStore *self) { (void)self; return 0; } void ts_wasm_store_call_scanner_destroy( TSWasmStore *self, uint32_t scanner_address ) { (void)self; (void)scanner_address; } bool ts_wasm_store_call_scanner_scan( TSWasmStore *self, uint32_t scanner_address, uint32_t valid_tokens_ix ) { (void)self; (void)scanner_address; (void)valid_tokens_ix; return false; } uint32_t ts_wasm_store_call_scanner_serialize( TSWasmStore *self, uint32_t scanner_address, char *buffer ) { (void)self; (void)scanner_address; (void)buffer; return 0; } void ts_wasm_store_call_scanner_deserialize( TSWasmStore *self, uint32_t scanner_address, const char *buffer, unsigned length ) { (void)self; (void)scanner_address; (void)buffer; (void)length; } bool ts_wasm_store_has_error(const TSWasmStore *self) { (void)self; return false; } bool ts_language_is_wasm(const TSLanguage *self) { (void)self; return false; } void ts_wasm_language_retain(const TSLanguage *self) { (void)self; } void ts_wasm_language_release(const TSLanguage *self) { (void)self; } #endif hx-0.3.0+20250717/bindings/vendor/src/wasm_store.h000066400000000000000000000021611503625671400212620ustar00rootroot00000000000000#ifndef TREE_SITTER_WASM_H_ #define TREE_SITTER_WASM_H_ #ifdef __cplusplus extern "C" { #endif #include "tree_sitter/api.h" #include "./parser.h" bool ts_wasm_store_start(TSWasmStore *self, TSLexer *lexer, const TSLanguage *language); void ts_wasm_store_reset(TSWasmStore *self); bool ts_wasm_store_has_error(const TSWasmStore *self); bool ts_wasm_store_call_lex_main(TSWasmStore *self, TSStateId state); bool ts_wasm_store_call_lex_keyword(TSWasmStore *self, TSStateId state); uint32_t ts_wasm_store_call_scanner_create(TSWasmStore *self); void ts_wasm_store_call_scanner_destroy(TSWasmStore *self, uint32_t scanner_address); bool ts_wasm_store_call_scanner_scan(TSWasmStore *self, uint32_t scanner_address, uint32_t valid_tokens_ix); uint32_t ts_wasm_store_call_scanner_serialize(TSWasmStore *self, uint32_t scanner_address, char *buffer); void ts_wasm_store_call_scanner_deserialize(TSWasmStore *self, uint32_t scanner, const char *buffer, unsigned length); void ts_wasm_language_retain(const TSLanguage *self); void ts_wasm_language_release(const TSLanguage *self); #ifdef __cplusplus } #endif #endif // TREE_SITTER_WASM_H_ hx-0.3.0+20250717/cli/000077500000000000000000000000001503625671400136145ustar00rootroot00000000000000hx-0.3.0+20250717/cli/Cargo.toml000066400000000000000000000007341503625671400155500ustar00rootroot00000000000000[package] name = "skidder-cli" version = "0.1.0" edition = "2021" description = "A package manager for tree-sitter" authors = ["Pascal Kuthe "] license = "MPL-2.0" repository = "https://github.com/helix-editor/tree-house" readme = "../README.md" rust-version = "1.76.0" [dependencies] anyhow = "1.0" serde = "1.0" serde_json = "1.0" walkdir = "2.5" xflags = "0.3" skidder = { path = "../skidder" } libloading = "0.8" tempfile = "3.12" indicatif = "0.17" hx-0.3.0+20250717/cli/LICENSE000077700000000000000000000000001503625671400160342../LICENSEustar00rootroot00000000000000hx-0.3.0+20250717/cli/import.sh000077500000000000000000000031321503625671400154640ustar00rootroot00000000000000#!/bin/env bash set -e cargo build --release ../target/release/skidder-cli import --metadata -r ../../../tree-sitter-grammars/ ../../../master/runtime/grammars/sources/* ../target/release/skidder-cli import --metadata -r ../../../tree-sitter-grammars ../../../master/runtime/grammars/sources/markdown/tree-sitter-markdown:markdown ../target/release/skidder-cli import --metadata -r ../../../tree-sitter-grammars ../../../master/runtime/grammars/sources/markdown/tree-sitter-markdown-inline:markdown-inline ../target/release/skidder-cli import --metadata -r ../../../tree-sitter-grammars ../../../master/runtime/grammars/sources/v/tree_sitter_v:v ../target/release/skidder-cli import --metadata -r ../../../tree-sitter-grammars ../../../master/runtime/grammars/sources/wat/wat ../target/release/skidder-cli import --metadata -r ../../../tree-sitter-grammars ../../../master/runtime/grammars/sources/wat/wast ../target/release/skidder-cli import --metadata -r ../../../tree-sitter-grammars ../../../master/runtime/grammars/sources/typescript/typescript ../target/release/skidder-cli import --metadata -r ../../../tree-sitter-grammars ../../../master/runtime/grammars/sources/typescript/tsx ../target/release/skidder-cli import --metadata -r ../../../tree-sitter-grammars ../../../master/runtime/grammars/sources/php-only/php_only:php-only ../target/release/skidder-cli import --metadata -r ../../../tree-sitter-grammars ../../../master/runtime/grammars/sources/ocaml/ocaml ../target/release/skidder-cli import --metadata -r ../../../tree-sitter-grammars ../../../master/runtime/grammars/sources/ocaml/interface:ocaml-interface hx-0.3.0+20250717/cli/src/000077500000000000000000000000001503625671400144035ustar00rootroot00000000000000hx-0.3.0+20250717/cli/src/build.rs000066400000000000000000000014731503625671400160550ustar00rootroot00000000000000use std::num::NonZeroUsize; use std::path::PathBuf; use anyhow::Context; use crate::flags; impl flags::Build { pub fn run(self) -> anyhow::Result<()> { let repo = self .repo .canonicalize() .with_context(|| format!("failed to access {}", self.repo.display()))?; let config = skidder::Config { repos: vec![skidder::Repo::Local { path: repo }], index: PathBuf::new(), verbose: self.verbose, }; if let Some(grammar) = self.grammar { skidder::build_grammar(&config, &grammar, self.force)?; } else { skidder::build_all_grammars( &config, self.force, self.threads.and_then(NonZeroUsize::new), )?; } Ok(()) } } hx-0.3.0+20250717/cli/src/flags.rs000066400000000000000000000053111503625671400160450ustar00rootroot00000000000000use std::path::PathBuf; xflags::xflags! { src "./src/flags.rs" cmd skidder { cmd import { /// Whether to import queries optional --import-queries /// Whether to (re)generate metadata optional --metadata /// The repository/directory where repos are copied into. /// Defaults to the current working directory optional -r,--repo repo: PathBuf /// The path of the grammars to import. The name of the directory /// will be used as the grammar name. To overwrite you can append /// the grammar name with a colon repeated path: PathBuf } cmd build { optional --verbose optional -j, --threads threads: usize optional -f, --force required repo: PathBuf optional grammar: String } cmd init-repo { required repo: PathBuf } cmd load-grammar { optional -r, --recursive required path: PathBuf } cmd regenerate-parser { optional -r, --recursive required path: PathBuf } default cmd version { optional --version } } } // generated start // The following code is generated by `xflags` macro. // Run `env UPDATE_XFLAGS=1 cargo build` to regenerate. #[derive(Debug)] pub struct Skidder { pub subcommand: SkidderCmd, } #[derive(Debug)] pub enum SkidderCmd { Version(Version), Import(Import), Build(Build), InitRepo(InitRepo), LoadGrammar(LoadGrammar), RegenerateParser(RegenerateParser), } #[derive(Debug)] pub struct Version { pub version: bool, } #[derive(Debug)] pub struct Import { pub path: Vec, pub import_queries: bool, pub metadata: bool, pub repo: Option, } #[derive(Debug)] pub struct Build { pub repo: PathBuf, pub grammar: Option, pub verbose: bool, pub threads: Option, pub force: bool, } #[derive(Debug)] pub struct InitRepo { pub repo: PathBuf, } #[derive(Debug)] pub struct LoadGrammar { pub path: PathBuf, pub recursive: bool, } #[derive(Debug)] pub struct RegenerateParser { pub path: PathBuf, pub recursive: bool, } impl Skidder { #[allow(dead_code)] pub fn from_env_or_exit() -> Self { Self::from_env_or_exit_() } #[allow(dead_code)] pub fn from_env() -> xflags::Result { Self::from_env_() } #[allow(dead_code)] pub fn from_vec(args: Vec) -> xflags::Result { Self::from_vec_(args) } } // generated end impl Skidder { pub const HELP: &str = Self::HELP_; } hx-0.3.0+20250717/cli/src/generate_parser.rs000066400000000000000000000114521503625671400201220ustar00rootroot00000000000000use std::fs::{self, File}; use std::io::Write; use std::path::Path; use std::process::Command; use anyhow::{ensure, Context, Result}; use skidder::{decompress, Metadata}; use tempfile::TempDir; use crate::collect_grammars; use crate::flags::RegenerateParser; use crate::import::import_compressed; impl RegenerateParser { pub fn run(self) -> Result<()> { let paths = if self.recursive { collect_grammars(&self.path)? } else { vec![self.path.clone()] }; let temp_dir = TempDir::new().context("failed to create temporary directory for decompression")?; // create dummy file to prevent TS CLI from creating a full skeleton File::create(temp_dir.path().join("grammar.js")) .context("failed to create temporary directory for decompression")?; let mut failed = Vec::new(); for grammar_dir in paths { let grammar_name = grammar_dir.file_name().unwrap().to_str().unwrap(); if grammar_name <= "dart" { continue; } println!("checking {grammar_name}"); let compressed = Metadata::read(&grammar_dir.join("metadata.json")) .with_context(|| format!("failed to read metadata for {grammar_name}"))? .parser_definition() .unwrap() .compressed; let src_path = grammar_dir.join("src"); let src_grammar_path = src_path.join("grammar.json"); let grammar_path = temp_dir.path().join("grammar.json"); if !src_grammar_path.exists() { eprintln!("grammar.json not found for {grammar_name}"); failed.push(grammar_name.to_owned()); continue; } if compressed { let dst = File::create(&grammar_path).with_context(|| { format!( "failed to create grammar.json file in temporary build directory {}", temp_dir.path().display() ) })?; decompress_file(&src_grammar_path, dst).with_context(|| { format!("failed to decompress grammar.json for {grammar_name}") })?; } else { fs::copy(src_grammar_path, &grammar_path) .with_context(|| format!("failed to copy grammar.json for {grammar_name}"))?; } println!("running tree-sitter generate {}", grammar_path.display()); let res = Command::new("tree-sitter") .arg("generate") .arg("--no-bindings") .arg(&grammar_path) .current_dir(temp_dir.path()) .status() .with_context(|| { format!( "failed to execute tree-sitter generate {}", grammar_path.display() ) })? .success(); if !res { eprintln!( "failed to execute tree-sitter generate {}", grammar_path.display() ); failed.push(grammar_name.to_owned()); continue; } let new_parser_path = temp_dir.path().join("src").join("parser.c"); let old_parser_path = src_path.join("parser.c"); let mut old_parser = Vec::new(); decompress_file(&old_parser_path, &mut old_parser) .with_context(|| format!("failed to decompress parser for {grammar_name}"))?; let old_parser = String::from_utf8_lossy(&old_parser); let new_parser = fs::read_to_string(&new_parser_path) .context("tree-sitter cli did not generate parser.c")?; if old_parser.trim() == new_parser.trim() { continue; } failed.push(grammar_name.to_owned()); eprintln!("existing parser.c was outdated updating..."); if compressed { import_compressed(&new_parser_path, &old_parser_path).with_context(|| { format!("failed to compress new parser.c for {grammar_name}") })?; } else { fs::copy(&new_parser_path, &old_parser_path) .with_context(|| format!("failed to opy new parser.c for {grammar_name}"))?; } } ensure!( failed.is_empty(), "parser.c files is not up to date for {failed:?}!" ); Ok(()) } } fn decompress_file(src: &Path, dst: impl Write) -> Result<()> { File::open(src) .map_err(anyhow::Error::from) .and_then(|mut reader| decompress(&mut reader, dst)) .with_context(|| format!("failed to decompress {}", src.display()))?; Ok(()) } hx-0.3.0+20250717/cli/src/import.rs000066400000000000000000000234161503625671400162710ustar00rootroot00000000000000use std::env::current_dir; use std::io::Write; use std::path::{Path, PathBuf}; use std::process::Command; use std::{fs, io}; use anyhow::{bail, ensure, Context, Result}; use serde::Deserialize; use skidder::{Metadata, ParserDefinition}; use walkdir::WalkDir; use crate::flags::Import; const LICENSE_FILE_NAMES: &[&str] = &["LICENSE", "LICENSE.txt", "LICENCE", "LICENCE", "COPYING"]; const LICENSE_SEARCH: &[(&str, &str)] = &[ ("unlicense", "unlicense"), ("EUROPEAN UNION PUBLIC LICENCE v. 1.2", "EUPL-1.2"), ("The Artistic License 2.0", "Artistic-2.0"), ("Apache License", "Apache-2.0"), ("GNU GENERAL PUBLIC LICENSE", "GPL-3.0"), ("MIT License", "MIT"), ("DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE", "WTFPL"), ("BSD 3-Clause License", "BSD-3-Clause"), ]; impl Import { fn repo(&self) -> Result { match &self.repo { Some(path) => Ok(path.clone()), None => Ok(current_dir()?), } } pub fn run(self) -> Result<()> { let repo = self.repo()?; for path in &self.path { let Some(dir_name) = path.file_name().and_then(|file_name| file_name.to_str()) else { bail!("invalid path {path:?}"); }; let mut src_path = path.to_owned(); let grammar_name = match dir_name.rsplit_once(':') { Some((dir_name, grammar_name)) => { src_path.set_file_name(dir_name); grammar_name } None => dir_name, }; src_path.push("src"); let mut dst_path = repo.join(grammar_name); fs::create_dir_all(&dst_path) .with_context(|| format!("failed to create {}", dst_path.display()))?; if !src_path.join("parser.c").exists() { eprintln!( "skipping grammar {grammar_name}: no parser.c found at {}!", src_path.display() ); continue; } src_path.pop(); println!("importing {grammar_name}"); for dir in ["src", "../common"] { let src_path = src_path.join(dir); if !src_path.exists() { continue; } dst_path.push(dir.strip_prefix("../").unwrap_or(dir)); for file in WalkDir::new(&src_path) { let file = file?; if !file.file_type().is_file() { continue; } let Some(file_name) = file.file_name().to_str() else { continue; }; let Some((_, extension)) = file_name.rsplit_once('.') else { continue; }; if !(matches!(extension, "h" | "c" | "cc") || extension == "scm" && self.import_queries || file_name == "grammar.json") || file_name.starts_with("parser_abi") && extension == "c" { continue; } let relative_path = file.path().strip_prefix(&src_path).unwrap(); let dst_path = dst_path.join(relative_path); fs::create_dir_all(dst_path.parent().unwrap()).with_context(|| { format!("failed to create {}", dst_path.parent().unwrap().display()) })?; let res = if matches!(file_name, "parser.c" | "grammar.json") && file.path().parent() == Some(&src_path) && dir == "src" { import_compressed(file.path(), &dst_path)?; continue; } else if matches!(extension, "h" | "c" | "cc") && src_path.join("../../common").exists() { fs::read_to_string(file.path()).and_then(|contents| { let contents = contents.replace("../../common/", "../common/"); fs::write(&dst_path, contents) }) } else { fs::copy(file.path(), &dst_path).map(|_| ()) }; res.with_context(|| { format!( "failed to copy {} to {}", file.path().display(), dst_path.display() ) })?; } dst_path.pop(); } let license_file = LICENSE_FILE_NAMES .iter() .map(|name| src_path.join(name)) .find(|src_path| src_path.exists()); let mut license = None; if let Some(license_file) = license_file { let license_file_content = fs::read_to_string(&license_file) .with_context(|| format!("failed to read {}", license_file.display()))?; fs::write(dst_path.join("LICENSE"), &license_file_content).with_context(|| { format!("failed to write {}", dst_path.join("LICENSE").display()) })?; license = LICENSE_SEARCH .iter() .find(|(needle, _)| license_file_content.contains(needle)) .map(|(_, license)| (*license).to_owned()); if license.is_none() { eprintln!("failed to identify license in {}", license_file.display()); } } else { eprintln!("warning: {grammar_name} does not have a LICENSE file!"); } if self.metadata { let metadata_path = dst_path.join("metadata.json"); let rev = git_output(&["rev-parse", "HEAD"], &src_path, false).with_context(|| { format!("failed to obtain git revision at {}", src_path.display()) })?; let repo = git_output(&["remote", "get-url", "origin"], &src_path, false) .with_context(|| { format!("failed to obtain git remote at {}", src_path.display()) })?; let package_metadata: Option = fs::read_to_string(src_path.join("package.json")) .ok() .and_then(|json| serde_json::from_str(&json).ok()); if let Some(package_metadata) = package_metadata { match &license { Some(license) if license != &package_metadata.license => eprintln!("warning: license in package identifier differs from detected license {license} != {}", &package_metadata.license), _ => license = Some(package_metadata.license), } } let old_metadata = Metadata::read(&metadata_path) .ok() .and_then(Metadata::parser_definition) .filter(|old_meta| old_meta.repo == repo && !old_meta.license.is_empty()); if let Some(old_metadata) = &old_metadata { match &license { Some(license) => { if license != &old_metadata.license { eprintln!( "warning: license has changed {} => {license}", old_metadata.license ); } } None => { eprintln!( "warning: couldn't determine license for {grammar_name}, keeping {:?}", old_metadata.license ); license = Some(old_metadata.license.clone()) } } } if license.is_none() { eprintln!("warning: couldn't import determine license for {grammar_name}",); } let metadata = Metadata::ParserDefinition(ParserDefinition { repo, rev, license: license.unwrap_or_default(), compressed: true, }); metadata.write(&metadata_path).with_context(|| { format!( "failed to write metadata.json to {}", metadata_path.display() ) })? } } Ok(()) } } #[derive(Deserialize)] struct PackageJson { license: String, } fn git_output(args: &[&str], dir: &Path, verbose: bool) -> Result { let mut cmd = Command::new("git"); cmd.args(args).current_dir(dir); if verbose { println!("{}: git {}", dir.display(), args.join(" ")) } let res = cmd.output().context("failed to invoke git")?; if !res.status.success() { let _ = io::stdout().write_all(&res.stdout); let _ = io::stderr().write_all(&res.stderr); bail!("git returned non-zero exit-code: {}", res.status); } String::from_utf8(res.stdout) .context("git returned invalid utf8") .map(|output| output.trim_end().to_string()) } pub fn import_compressed(src: &Path, dst: &Path) -> anyhow::Result<()> { let success = Command::new("zstd") .args(["--ultra", "-22", "-f", "-o"]) .arg(dst) .arg(src) .status() .with_context(|| format!("failed to compress {}", src.display()))? .success(); ensure!(success, "failed to compress {}", src.display()); Ok(()) } hx-0.3.0+20250717/cli/src/init.rs000066400000000000000000000012331503625671400157130ustar00rootroot00000000000000use std::io::Write; use std::path::Path; use anyhow::Context; use crate::flags::InitRepo; impl InitRepo { pub fn run(self) -> anyhow::Result<()> { append(&self.repo.join(".gitignore"), "*/*.so\n*/.BUILD_COOKIE\n")?; append( &self.repo.join(".gitattributes"), "*/src/parser.c binary\n*/src/grammar.json binary\n", )?; Ok(()) } } fn append(path: &Path, contents: &str) -> anyhow::Result<()> { std::fs::File::options() .create(true) .append(true) .open(path)? .write_all(contents.as_bytes()) .with_context(|| format!("failed to write {}", path.display())) } hx-0.3.0+20250717/cli/src/load.rs000066400000000000000000000020441503625671400156700ustar00rootroot00000000000000use std::ffi::c_void; use anyhow::{Context, Result}; use libloading::Symbol; use crate::collect_grammars; use crate::flags::LoadGrammar; impl LoadGrammar { pub fn run(self) -> Result<()> { let paths = if self.recursive { collect_grammars(&self.path)? } else { vec![self.path.clone()] }; for path in paths { let Some(name) = path.file_stem().unwrap().to_str() else { continue; }; println!("loading {}", path.display()); unsafe { let lib = libloading::Library::new(&path) .with_context(|| format!("failed to load {}", path.display()))?; let language_fn_name = format!("tree_sitter_{}", name.replace('-', "_")); let _language_fn: Symbol *mut c_void> = lib .get(language_fn_name.as_bytes()) .with_context(|| format!("failed to load {}", path.display()))?; } } Ok(()) } } hx-0.3.0+20250717/cli/src/main.rs000066400000000000000000000031651503625671400157020ustar00rootroot00000000000000use std::path::{Path, PathBuf}; use std::process::exit; use ::skidder::list_grammars; use anyhow::Result; mod build; mod flags; mod generate_parser; mod import; mod init; mod load; fn get_version() -> String { const GIT_HASH: Option<&str> = option_env!("GIT_HASH"); const CARGO_VERSION: &str = env!("CARGO_PKG_VERSION"); let owned = CARGO_VERSION.to_string(); if let Some(hash) = GIT_HASH { owned + "-" + hash } else { owned } } fn wrapped_main() -> Result<()> { let flags = flags::Skidder::from_env_or_exit(); match flags.subcommand { flags::SkidderCmd::Import(import_cmd) => import_cmd.run(), flags::SkidderCmd::Build(build_cmd) => build_cmd.run(), flags::SkidderCmd::InitRepo(init_cmd) => init_cmd.run(), flags::SkidderCmd::LoadGrammar(load_cmd) => load_cmd.run(), flags::SkidderCmd::RegenerateParser(generate_cmd) => generate_cmd.run(), flags::SkidderCmd::Version(flags::Version { version }) => { if version { println!("skidder-cli {}", get_version()); } else { println!("{}", flags::Skidder::HELP); } Ok(()) } } } pub fn main() { if let Err(err) = wrapped_main() { for error in err.chain() { eprintln!("error: {error}") } exit(1) } } fn collect_grammars(repo: &Path) -> Result> { let config = skidder::Config { repos: vec![skidder::Repo::Local { path: repo.to_owned(), }], index: PathBuf::new(), verbose: false, }; list_grammars(&config) } hx-0.3.0+20250717/default.nix000066400000000000000000000015361503625671400152160ustar00rootroot00000000000000{ lib, rustPlatform, gitRev ? null, }: let fs = lib.fileset; files = fs.difference (fs.gitTracked ./.) ( fs.unions [ ./.github ./.envrc ./flake.lock (fs.fileFilter (file: lib.strings.hasInfix ".git" file.name) ./.) (fs.fileFilter (file: file.hasExt "md") ./.) (fs.fileFilter (file: file.hasExt "nix") ./.) ] ); in rustPlatform.buildRustPackage { strictDeps = true; pname = with builtins; (fromTOML (readFile ./cli/Cargo.toml)).package.name; version = with builtins; (fromTOML (readFile ./cli/Cargo.toml)).package.version; src = fs.toSource { root = ./.; fileset = files; }; cargoLock = { lockFile = ./Cargo.lock; allowBuiltinFetchGit = true; }; cargoBuildFlags = [ "-p skidder-cli" ]; doCheck = false; env.GIT_HASH = gitRev; meta.mainProgram = "skidder-cli"; } hx-0.3.0+20250717/fixtures/000077500000000000000000000000001503625671400147165ustar00rootroot00000000000000hx-0.3.0+20250717/fixtures/highlighter/000077500000000000000000000000001503625671400172145ustar00rootroot00000000000000hx-0.3.0+20250717/fixtures/highlighter/codefence_rust_doc_comments.md000066400000000000000000000015451503625671400252650ustar00rootroot00000000000000 ```rust // ┡━┛┡━━┛╰─ markup.raw.block // │ ╰─ markup.raw.block label // ╰─ markup.raw.block punctuation.bracket /// `Something` // ┡┛╿╿╿┡━━━━━━━┛╿╰─ markup.raw.block comment // │ ││││ ╰─ markup.raw.block comment markup.raw.inline punctuation.bracket // │ │││╰─ markup.raw.block comment markup.raw.inline // │ ││╰─ markup.raw.block comment markup.raw.inline punctuation.bracket // │ │╰─ markup.raw.block comment // │ ╰─ markup.raw.block comment comment // ╰─ markup.raw.block comment /// Anything // ┡┛╿┗━━━━━━━┹─ markup.raw.block comment // │ ╰─ markup.raw.block comment comment // ╰─ markup.raw.block comment ``` // ┡━┛╰─ markup.raw.block // ╰─ markup.raw.block punctuation.bracket hx-0.3.0+20250717/fixtures/highlighter/comment.html000066400000000000000000000007611503625671400215500ustar00rootroot00000000000000
// ╿┡━┛╿┡━━━━━━━━━━┛┡━━┛┡━━━┛┡┛┡━┛╰─ punctuation.bracket // ││ ││ │ │ │ ╰─ tag // ││ ││ │ │ ╰─ punctuation.bracket // ││ ││ │ ╰─ comment // ││ ││ ╰─ comment warning // ││ │╰─ comment // ││ ╰─ punctuation.bracket // │╰─ tag // ╰─ punctuation.bracket hx-0.3.0+20250717/fixtures/highlighter/edoc_code_combined_injection.erl000066400000000000000000000003241503625671400255250ustar00rootroot00000000000000 %% ` // ┡━┛╰─ comment markup.raw.inline // ╰─ comment foo // ┗━┹─ string.special.symbol %% ' // ┡┛┡┛╰─ comment // │ ╰─ comment markup.raw.inline // ╰─ comment hx-0.3.0+20250717/fixtures/highlighter/edoc_code_combined_injection_in_markdown.md000066400000000000000000000011121503625671400277270ustar00rootroot00000000000000 ```erlang // ┡━┛┡━━━━┛╰─ markup.raw.block // │ ╰─ markup.raw.block label // ╰─ markup.raw.block punctuation.bracket %% ` // ┡━┛╰─ markup.raw.block comment markup.raw.inline // ╰─ markup.raw.block comment foo // ┡━┛╰─ markup.raw.block // ╰─ markup.raw.block string.special.symbol %% ' // ┡┛┡┛╰─ markup.raw.block comment // │ ╰─ markup.raw.block comment markup.raw.inline // ╰─ markup.raw.block comment ``` // ┡━┛╰─ markup.raw.block // ╰─ markup.raw.block punctuation.bracket hx-0.3.0+20250717/fixtures/highlighter/hello_world.rs000066400000000000000000000006641503625671400221020ustar00rootroot00000000000000 fn main() { // ┡┛ ┡━━┛┡┛ ╰─ punctuation.bracket // │ │ ╰─ punctuation.bracket // │ ╰─ function // ╰─ keyword.function println!("hello world") // ┡━━━━━━┛╿┡━━━━━━━━━━━┛╰─ punctuation.bracket // │ │╰─ string // │ ╰─ punctuation.bracket // ╰─ function.macro } // ╰─ punctuation.bracket hx-0.3.0+20250717/fixtures/highlighter/html_in_edoc_in_erlang.erl000066400000000000000000000006421503625671400243640ustar00rootroot00000000000000 %%
  • foo // ┡━┛╿┡┛╿┗━┹─ comment // │ ││ ╰─ comment punctuation.bracket // │ │╰─ comment tag // │ ╰─ comment punctuation.bracket // ╰─ comment %% bar
  • // ┡━━━━┛┡┛┡┛╿╰─ comment // │ │ │ ╰─ comment punctuation.bracket // │ │ ╰─ comment tag // │ ╰─ comment punctuation.bracket // ╰─ comment hx-0.3.0+20250717/fixtures/highlighter/injectionception.rs000066400000000000000000000102501503625671400231240ustar00rootroot00000000000000 /// Says hello. // ┡┛╿┗━━━━━━━━━━┹─ comment // │ ╰─ comment comment // ╰─ comment /// // ┡┛╿╰─ comment // │ ╰─ comment comment // ╰─ comment /// # Example // ┡┛╿┡┛┗━━━━━━┹─ comment markup.heading.1 // │ │╰─ comment markup.heading.1 markup.heading.marker // │ ╰─ comment comment // ╰─ comment /// // ┡┛╿╰─ comment // │ ╰─ comment comment // ╰─ comment /// ```rust // ┡┛╿┡━━┛┡━━┛╰─ comment markup.raw.block // │ ││ ╰─ comment markup.raw.block label // │ │╰─ comment markup.raw.block punctuation.bracket // │ ╰─ comment comment // ╰─ comment /// fn add(left: usize, right: usize) -> usize { // ┡┛╿╿┡┛╿┡━┛╿┡━━┛╿╿┡━━━┛╿╿┡━━━┛╿╿┡━━━┛╿╿┡┛╿┡━━━┛╿╿╰─ comment markup.raw.block // │ │││ ││ ││ │││ │││ │││ │││ ││ │╰─ comment markup.raw.block punctuation.bracket // │ │││ ││ ││ │││ │││ │││ │││ ││ ╰─ comment markup.raw.block // │ │││ ││ ││ │││ │││ │││ │││ │╰─ comment markup.raw.block type.builtin // │ │││ ││ ││ │││ │││ │││ │││ ╰─ comment markup.raw.block // │ │││ ││ ││ │││ │││ │││ ││╰─ comment markup.raw.block operator // │ │││ ││ ││ │││ │││ │││ │╰─ comment markup.raw.block // │ │││ ││ ││ │││ │││ │││ ╰─ comment markup.raw.block punctuation.bracket // │ │││ ││ ││ │││ │││ ││╰─ comment markup.raw.block type.builtin // │ │││ ││ ││ │││ │││ │╰─ comment markup.raw.block // │ │││ ││ ││ │││ │││ ╰─ comment markup.raw.block punctuation.delimiter // │ │││ ││ ││ │││ ││╰─ comment markup.raw.block variable.parameter // │ │││ ││ ││ │││ │╰─ comment markup.raw.block // │ │││ ││ ││ │││ ╰─ comment markup.raw.block punctuation.delimiter // │ │││ ││ ││ ││╰─ comment markup.raw.block type.builtin // │ │││ ││ ││ │╰─ comment markup.raw.block // │ │││ ││ ││ ╰─ comment markup.raw.block punctuation.delimiter // │ │││ ││ │╰─ comment markup.raw.block variable.parameter // │ │││ ││ ╰─ comment markup.raw.block punctuation.bracket // │ │││ │╰─ comment markup.raw.block function // │ │││ ╰─ comment markup.raw.block // │ ││╰─ comment markup.raw.block keyword.function // │ │╰─ comment markup.raw.block // │ ╰─ comment comment // ╰─ comment /// left + right // ┡┛╿┡━━━┛┡━━┛╿╿╿┡━━━┛╰─ comment markup.raw.block // │ ││ │ │││╰─ comment markup.raw.block variable.parameter // │ ││ │ ││╰─ comment markup.raw.block // │ ││ │ │╰─ comment markup.raw.block operator // │ ││ │ ╰─ comment markup.raw.block // │ ││ ╰─ comment markup.raw.block variable.parameter // │ │╰─ comment markup.raw.block // │ ╰─ comment comment // ╰─ comment /// } // ┡┛╿╿╿╰─ comment markup.raw.block // │ ││╰─ comment markup.raw.block punctuation.bracket // │ │╰─ comment markup.raw.block // │ ╰─ comment comment // ╰─ comment /// ``` // ┡┛╿┡━━┛╰─ comment markup.raw.block // │ │╰─ comment markup.raw.block punctuation.bracket // │ ╰─ comment comment // ╰─ comment pub fn hello() {} // ┡━┛ ┡┛ ┡━━━┛┡┛ ┗┹─ punctuation.bracket // │ │ │ ╰─ punctuation.bracket // │ │ ╰─ function // │ ╰─ keyword.function // ╰─ keyword hx-0.3.0+20250717/fixtures/highlighter/markdown_bold.md000066400000000000000000000014621503625671400223630ustar00rootroot00000000000000 This is a **bold** comment. // ┡┛┡━━┛┗┹─ markup.bold punctuation.bracket // │ ╰─ markup.bold // ╰─ markup.bold punctuation.bracket This is a [link containing a `code` snippet]. // ╿┡━━━━━━━━━━━━━━━━┛╿┡━━┛╿┡━━━━━━┛╰─ punctuation.bracket // ││ ││ │╰─ markup.link.text // ││ ││ ╰─ markup.link.text markup.raw.inline punctuation.bracket // ││ │╰─ markup.link.text markup.raw.inline // ││ ╰─ markup.link.text markup.raw.inline punctuation.bracket // │╰─ markup.link.text // ╰─ punctuation.bracket hx-0.3.0+20250717/fixtures/highlighter/nested_vec.rs000066400000000000000000000002561503625671400217040ustar00rootroot00000000000000 vec![vec![]] // ┡━━┛╿┡━━┛┗━┹─ punctuation.bracket // │ │╰─ function.macro // │ ╰─ punctuation.bracket // ╰─ function.macro hx-0.3.0+20250717/fixtures/highlighter/non_local.rs000066400000000000000000000017031503625671400215270ustar00rootroot00000000000000 fn foo(this: &Thing) { // ┡┛ ┡━┛╿┡━━┛╿ ╿┡━━━┛╿ ╰─ punctuation.bracket // │ │ ││ │ ││ ╰─ punctuation.bracket // │ │ ││ │ │╰─ type // │ │ ││ │ ╰─ keyword.storage.modifier.ref // │ │ ││ ╰─ punctuation.delimiter // │ │ │╰─ variable.parameter // │ │ ╰─ punctuation.bracket // │ ╰─ function // ╰─ keyword.function this // ┗━━┹─ variable.parameter } // ╰─ punctuation.bracket fn bar() { // ┡┛ ┡━┛┡┛ ╰─ punctuation.bracket // │ │ ╰─ punctuation.bracket // │ ╰─ function // ╰─ keyword.function this.foo(); // ┡━━┛╿┡━┛┡┛╰─ punctuation.delimiter // │ ││ ╰─ punctuation.bracket // │ │╰─ function // │ ╰─ punctuation.delimiter // ╰─ variable.builtin } // ╰─ punctuation.bracket hx-0.3.0+20250717/fixtures/highlighter/parent_child_highlight_precedence.css000066400000000000000000000003561503625671400265720ustar00rootroot00000000000000 --color: #fff; // ┡━━━━━┛╿ ╿┡━┛╰─ punctuation.delimiter // │ │ │╰─ string.special // │ │ ╰─ string.special punctuation // │ ╰─ punctuation.delimiter // ╰─ variable hx-0.3.0+20250717/fixtures/highlighter/reference_highlight_starts_after_definition_ends.rs000066400000000000000000000024071503625671400315540ustar00rootroot00000000000000 fn event(tx: &Sender, event: MyEvent) { // ┡┛ ┡━━━┛╿┡┛╿ ╿┡━━━━┛╿ ┡━━━┛╿ ┡━━━━━┛╿ ╰─ punctuation.bracket // │ │ ││ │ ││ │ │ │ │ ╰─ punctuation.bracket // │ │ ││ │ ││ │ │ │ ╰─ type // │ │ ││ │ ││ │ │ ╰─ punctuation.delimiter // │ │ ││ │ ││ │ ╰─ variable.parameter // │ │ ││ │ ││ ╰─ punctuation.delimiter // │ │ ││ │ │╰─ type // │ │ ││ │ ╰─ keyword.storage.modifier.ref // │ │ ││ ╰─ punctuation.delimiter // │ │ │╰─ variable.parameter // │ │ ╰─ punctuation.bracket // │ ╰─ function // ╰─ keyword.function send_blocking(tx, event); // ┡━━━━━━━━━━━┛╿┡┛╿ ┡━━━┛╿╰─ punctuation.delimiter // │ ││ │ │ ╰─ punctuation.bracket // │ ││ │ ╰─ variable.parameter // │ ││ ╰─ punctuation.delimiter // │ │╰─ variable.parameter // │ ╰─ punctuation.bracket // ╰─ function } // ╰─ punctuation.bracket hx-0.3.0+20250717/fixtures/highlighter/rust_doc_comment.rs000066400000000000000000000031251503625671400231270ustar00rootroot00000000000000 /// **hello-world** // ┡┛╿╿┡┛┡━━━━━━━━━┛┡┛╰─ comment // │ │││ │ ╰─ comment markup.bold punctuation.bracket // │ │││ ╰─ comment markup.bold // │ ││╰─ comment markup.bold punctuation.bracket // │ │╰─ comment // │ ╰─ comment comment // ╰─ comment /// **foo // ┡┛╿╿┡┛┗━┹─ comment markup.bold // │ ││╰─ comment markup.bold punctuation.bracket // │ │╰─ comment // │ ╰─ comment comment // ╰─ comment fn foo() { // ┡┛ ┡━┛┡┛ ╰─ punctuation.bracket // │ │ ╰─ punctuation.bracket // │ ╰─ function // ╰─ keyword.function println!("hello world") // ┡━━━━━━┛╿┡━━━━━━━━━━━┛╰─ punctuation.bracket // │ │╰─ string // │ ╰─ punctuation.bracket // ╰─ function.macro } // ╰─ punctuation.bracket /// bar** // ┡┛╿┡━━┛┡┛╰─ comment // │ ││ ╰─ comment markup.bold punctuation.bracket // │ │╰─ comment markup.bold // │ ╰─ comment comment // ╰─ comment fn bar() { // ┡┛ ┡━┛┡┛ ╰─ punctuation.bracket // │ │ ╰─ punctuation.bracket // │ ╰─ function // ╰─ keyword.function println!("hello world") // ┡━━━━━━┛╿┡━━━━━━━━━━━┛╰─ punctuation.bracket // │ │╰─ string // │ ╰─ punctuation.bracket // ╰─ function.macro } // ╰─ punctuation.bracket hx-0.3.0+20250717/fixtures/highlighter/rust_no_doc_comment.rs000066400000000000000000000020661503625671400236260ustar00rootroot00000000000000 // TODO **hello-world** // ┡━┛┡━━┛┗━━━━━━━━━━━━━━━┹─ comment // │ ╰─ comment info // ╰─ comment fn foo() { // ┡┛ ┡━┛┡┛ ╰─ punctuation.bracket // │ │ ╰─ punctuation.bracket // │ ╰─ function // ╰─ keyword.function println!("hello world") // ┡━━━━━━┛╿┡━━━━━━━━━━━┛╰─ punctuation.bracket // │ │╰─ string // │ ╰─ punctuation.bracket // ╰─ function.macro } // ╰─ punctuation.bracket /// bar** // ┗━━━━━━━┹─ comment fn bar() { // ┡┛ ┡━┛┡┛ ╰─ punctuation.bracket // │ │ ╰─ punctuation.bracket // │ ╰─ function // ╰─ keyword.function println!("hello world") // ┡━━━━━━┛╿┡━━━━━━━━━━━┛╰─ punctuation.bracket // │ │╰─ string // │ ╰─ punctuation.bracket // ╰─ function.macro } // ╰─ punctuation.bracket hx-0.3.0+20250717/fixtures/highlighter/rust_parameter_locals.rs000066400000000000000000000042301503625671400241530ustar00rootroot00000000000000 fn add(left: usize, right: usize) -> usize { // ┡┛ ┡━┛╿┡━━┛╿ ┡━━━┛╿ ┡━━━┛╿ ┡━━━┛╿ ┡┛ ┡━━━┛ ╰─ punctuation.bracket // │ │ ││ │ │ │ │ │ │ │ │ ╰─ type.builtin // │ │ ││ │ │ │ │ │ │ │ ╰─ operator // │ │ ││ │ │ │ │ │ │ ╰─ punctuation.bracket // │ │ ││ │ │ │ │ │ ╰─ type.builtin // │ │ ││ │ │ │ │ ╰─ punctuation.delimiter // │ │ ││ │ │ │ ╰─ variable.parameter // │ │ ││ │ │ ╰─ punctuation.delimiter // │ │ ││ │ ╰─ type.builtin // │ │ ││ ╰─ punctuation.delimiter // │ │ │╰─ variable.parameter // │ │ ╰─ punctuation.bracket // │ ╰─ function // ╰─ keyword.function left + right // ┡━━┛ ╿ ┗━━━┹─ variable.parameter // │ ╰─ operator // ╰─ variable.parameter } // ╰─ punctuation.bracket fn add(left: usize, right: usize) -> usize { // ┡┛ ┡━┛╿┡━━┛╿ ┡━━━┛╿ ┡━━━┛╿ ┡━━━┛╿ ┡┛ ┡━━━┛ ╰─ punctuation.bracket // │ │ ││ │ │ │ │ │ │ │ │ ╰─ type.builtin // │ │ ││ │ │ │ │ │ │ │ ╰─ operator // │ │ ││ │ │ │ │ │ │ ╰─ punctuation.bracket // │ │ ││ │ │ │ │ │ ╰─ type.builtin // │ │ ││ │ │ │ │ ╰─ punctuation.delimiter // │ │ ││ │ │ │ ╰─ variable.parameter // │ │ ││ │ │ ╰─ punctuation.delimiter // │ │ ││ │ ╰─ type.builtin // │ │ ││ ╰─ punctuation.delimiter // │ │ │╰─ variable.parameter // │ │ ╰─ punctuation.bracket // │ ╰─ function // ╰─ keyword.function left + right // ┡━━┛ ╿ ┗━━━┹─ variable.parameter // │ ╰─ operator // ╰─ variable.parameter } // ╰─ punctuation.bracket hx-0.3.0+20250717/fixtures/injections/000077500000000000000000000000001503625671400170635ustar00rootroot00000000000000hx-0.3.0+20250717/fixtures/injections/edoc_code_combined_injection.erl000066400000000000000000000000771503625671400254010ustar00rootroot00000000000000 %% ` // ┗┹─ edoc foo %% ' // ┗┹─ edoc hx-0.3.0+20250717/fixtures/injections/html_in_edoc_in_erlang.erl000066400000000000000000000003061503625671400242300ustar00rootroot00000000000000 %%
  • foo // ╿┡━━┛┗━┹─ edoc // │╰─ edoc html // ╰─ edoc %% bar
  • // ┡━━┛┡━━━┛╰─ edoc // │ ╰─ edoc html // ╰─ edoc hx-0.3.0+20250717/fixtures/injections/injectionception.rs000066400000000000000000000014111503625671400227720ustar00rootroot00000000000000 /// Says hello. // ┡━━━━━━━━━━┛╰─ markdown // ╰─ markdown markdown-inline /// // ╰─ markdown /// # Example // ┡━┛┡━━━━━┛╰─ markdown // │ ╰─ markdown markdown-inline // ╰─ markdown /// // ╰─ markdown /// ```rust // ┗━━━━━━┹─ markdown /// fn add(left: usize, right: usize) -> usize { // ┗━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┹─ markdown rust /// left + right // ┗━━━━━━━━━━━━━━━┹─ markdown rust /// } // ┗┹─ markdown rust /// ``` // ┗━━┹─ markdown pub fn hello() {} hx-0.3.0+20250717/fixtures/injections/nested_vec.rs000066400000000000000000000001521503625671400215460ustar00rootroot00000000000000 vec![vec![]] // ┡━━━┛┡┛╰─ rust // │ ╰─ rust rust // ╰─ rust hx-0.3.0+20250717/fixtures/injections/overlapping_injection.rs000066400000000000000000000002661503625671400240250ustar00rootroot00000000000000 some_macro!((), (), "`rust` injection happens here"); // ┗━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┹─ rust hx-0.3.0+20250717/fixtures/injections/rust_doc_comment.rs000066400000000000000000000007721503625671400230030ustar00rootroot00000000000000 /// **hello-world** // ┗━━━━━━━━━━━━━━━┹─ markdown markdown-inline /// **foo // ┗━━━━┹─ markdown markdown-inline fn foo() { println!("hello world") // ┗━━━━━━━━━━━━━┹─ rust } /// bar** // ┡━━━━┛╰─ markdown // ╰─ markdown markdown-inline fn bar() { println!("hello world") // ┗━━━━━━━━━━━━━┹─ rust } hx-0.3.0+20250717/fixtures/injections/rust_no_doc_comment.rs000066400000000000000000000007231503625671400234730ustar00rootroot00000000000000 /// TODO **hello-world** // ┗━━━━━━━━━━━━━━━━━━━━━━━┹─ comment /// **foo // ┗━━━━━━━┹─ comment fn foo() { println!("hello world") // ┗━━━━━━━━━━━━━┹─ rust } /// bar** // ┗━━━━━━━┹─ comment fn bar() { println!("hello world") // ┗━━━━━━━━━━━━━┹─ rust } hx-0.3.0+20250717/flake.lock000066400000000000000000000021241503625671400150000ustar00rootroot00000000000000{ "nodes": { "nixpkgs": { "locked": { "lastModified": 1746904237, "narHash": "sha256-3e+AVBczosP5dCLQmMoMEogM57gmZ2qrVSrmq9aResQ=", "owner": "nixos", "repo": "nixpkgs", "rev": "d89fc19e405cb2d55ce7cc114356846a0ee5e956", "type": "github" }, "original": { "owner": "nixos", "ref": "nixos-unstable", "repo": "nixpkgs", "type": "github" } }, "root": { "inputs": { "nixpkgs": "nixpkgs", "rust-overlay": "rust-overlay" } }, "rust-overlay": { "inputs": { "nixpkgs": [ "nixpkgs" ] }, "locked": { "lastModified": 1747190175, "narHash": "sha256-s33mQ2s5L/2nyllhRTywgECNZyCqyF4MJeM3vG/GaRo=", "owner": "oxalica", "repo": "rust-overlay", "rev": "58160be7abad81f6f8cb53120d5b88c16e01c06d", "type": "github" }, "original": { "owner": "oxalica", "repo": "rust-overlay", "type": "github" } } }, "root": "root", "version": 7 } hx-0.3.0+20250717/flake.nix000066400000000000000000000031561503625671400146540ustar00rootroot00000000000000{ description = "A package manager for tree-sitter grammars"; inputs = { nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable"; rust-overlay = { url = "github:oxalica/rust-overlay"; inputs.nixpkgs.follows = "nixpkgs"; }; }; outputs = { self, nixpkgs, rust-overlay, }: let inherit (nixpkgs) lib; forEachSystem = lib.genAttrs lib.systems.flakeExposed; in { packages = forEachSystem ( system: let pkgs = import nixpkgs { inherit system; overlays = [ (import rust-overlay) ]; }; toolchain = pkgs.rust-bin.stable.latest.default; in { skidder-cli = pkgs.callPackage ./. { gitRev = self.rev or self.dirtyRev; }; default = self.packages.${system}.skidder-cli; } ); devShell = forEachSystem ( system: let pkgs = import nixpkgs { inherit system; overlays = [ (import rust-overlay) ]; }; toolchain = pkgs.rust-bin.stable.latest.default; in pkgs.mkShell { nativeBuildInputs = with pkgs; [ (toolchain.override { extensions = [ "rust-src" "clippy" "llvm-tools-preview" ]; }) rust-analyzer ] ++ (lib.optionals stdenv.isLinux [ cargo-llvm-cov cargo-flamegraph valgrind ]); RUST_BACKTRACE = "1"; } ); }; } hx-0.3.0+20250717/highlighter/000077500000000000000000000000001503625671400153435ustar00rootroot00000000000000hx-0.3.0+20250717/highlighter/CHANGELOG.md000066400000000000000000000025361503625671400171620ustar00rootroot00000000000000# Changelog All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). ## [v0.3.0] - 2025-06-16 ### Fixed * Fixed a bug where a parent node's first child being captured before the parent node caused the list of active highlights to become out-of-order. * Fixed an issue where a combined injection would not have its active highlights retained until the next injection range if that injection range did not have any captures or injections itself. ### Updated * The minimum required Rust version has been increased to 1.82. ## [v0.2.0] - 2025-06-06 ### Added * Added `Syntax::layers_for_byte_range` * Added `TreeCursor::reset` * Added an iterator for recursively walking over the nodes in a `TreeCursor`: `TreeRecursiveWalker` ### Changed * `InactiveQueryCursor::new` now takes the byte range and match limit as parameters ### Fixed * Included `LICENSE` in the crate package * Fixed an issue where a combined injection layer could be queried multiple times by `QueryIter` * Fixed an issue where a combined injection layer would not be re-parsed when an injection for the layer was removed by an edit ## [v0.1.0] - 2025-05-13 ### Added * Initial publish hx-0.3.0+20250717/highlighter/Cargo.toml000066400000000000000000000020101503625671400172640ustar00rootroot00000000000000[package] name = "tree-house" description = "A robust and cozy highlighter library for tree-sitter" authors = [ "Pascal Kuthe ", "Michael Davis " ] version = "0.3.0" edition = "2021" license = "MPL-2.0" repository = "https://github.com/helix-editor/tree-house" readme = "../README.md" rust-version = "1.82.0" [features] default = ["fixtures"] fixtures = ["dep:unicode-width", "dep:pretty_assertions"] [dependencies] ropey = { version = "1.6", default-features = false } arc-swap = "1" hashbrown = { version = "0.15" } regex = "1" regex-cursor = "0.1" slab = "0.4" unicode-width = { version = "=0.1.12", optional = true } pretty_assertions = { version = "1.4.0", optional = true } kstring = "2.0" [dev-dependencies] indexmap = "2.5.0" skidder = { path = "../skidder" } [dependencies.tree-sitter] version = "0.2" package = "tree-house-bindings" path = "../bindings" features = ["ropey"] [dependencies.once_cell] version = "1.19" default-features = false features = ["std", "alloc"] hx-0.3.0+20250717/highlighter/LICENSE000077700000000000000000000000001503625671400175632../LICENSEustar00rootroot00000000000000hx-0.3.0+20250717/highlighter/src/000077500000000000000000000000001503625671400161325ustar00rootroot00000000000000hx-0.3.0+20250717/highlighter/src/config.rs000066400000000000000000000060521503625671400177500ustar00rootroot00000000000000use once_cell::sync::Lazy; use regex::Regex; use tree_sitter::{query, Grammar}; use crate::highlighter::{Highlight, HighlightQuery}; use crate::injections_query::{InjectionLanguageMarker, InjectionsQuery}; use crate::Language; use std::fmt::Write; #[derive(Debug)] pub struct LanguageConfig { pub grammar: Grammar, pub highlight_query: HighlightQuery, pub injection_query: InjectionsQuery, } impl LanguageConfig { pub fn new( grammar: Grammar, highlight_query_text: &str, injection_query_text: &str, local_query_text: &str, ) -> Result { // NOTE: the injection queries are parsed first since the local query is passed as-is // to `Query::new` in `InjectionsQuery::new`. This ensures that the more readable error // bubbles up first if the locals queries have an issue. let injection_query = InjectionsQuery::new(grammar, injection_query_text, local_query_text)?; let highlight_query = HighlightQuery::new(grammar, highlight_query_text, local_query_text)?; Ok(Self { grammar, highlight_query, injection_query, }) } pub fn configure(&self, mut f: impl FnMut(&str) -> Option) { self.highlight_query.configure(&mut f); self.injection_query.configure(&mut f); } } static INHERITS_REGEX: Lazy = Lazy::new(|| Regex::new(r";+\s*inherits\s*:?\s*([a-z_,()-]+)\s*").unwrap()); /// reads a query by invoking `read_query_text`, handles any `inherits` directives pub fn read_query(language: &str, mut read_query_text: impl FnMut(&str) -> String) -> String { fn read_query_impl(language: &str, read_query_text: &mut impl FnMut(&str) -> String) -> String { let query = read_query_text(language); // replaces all "; inherits (,)*" with the queries of the given language(s) INHERITS_REGEX .replace_all(&query, |captures: ®ex::Captures| { captures[1] .split(',') .fold(String::new(), |mut output, language| { // `write!` to a String cannot fail. write!( output, "\n{}\n", read_query_impl(language, &mut *read_query_text) ) .unwrap(); output }) }) .into_owned() } read_query_impl(language, &mut read_query_text) } pub trait LanguageLoader { fn language_for_marker(&self, marker: InjectionLanguageMarker) -> Option; fn get_config(&self, lang: Language) -> Option<&LanguageConfig>; } impl LanguageLoader for &'_ T where T: LanguageLoader, { fn language_for_marker(&self, marker: InjectionLanguageMarker) -> Option { T::language_for_marker(self, marker) } fn get_config(&self, lang: Language) -> Option<&LanguageConfig> { T::get_config(self, lang) } } hx-0.3.0+20250717/highlighter/src/fixtures.rs000066400000000000000000000376301503625671400203620ustar00rootroot00000000000000use pretty_assertions::StrComparison; use ropey::{Rope, RopeSlice}; use std::fmt::Write; use std::fs; use std::ops::{Bound, RangeBounds}; use std::path::Path; use std::time::Duration; use tree_sitter::Query; use unicode_width::{UnicodeWidthChar, UnicodeWidthStr}; use crate::config::LanguageLoader; use crate::highlighter::{Highlight, HighlightEvent, Highlighter}; use crate::query_iter::{QueryIter, QueryIterEvent}; use crate::{Language, Range, Syntax}; macro_rules! w { ($dst: expr$(, $($args: tt)*)?) => {{ let _ = write!($dst$(, $($args)*)?); }}; } macro_rules! wln { ($dst: expr$(, $($args: tt)*)?) => {{ let _ = writeln!($dst$(, $($args)*)?); }}; } pub fn check_fixture(path: impl AsRef, roundtrip: impl FnOnce(&str) -> String) { let path = path.as_ref(); let snapshot = match fs::read_to_string(path) { Ok(content) => content.replace("\r\n", "\n"), Err(err) => panic!("Failed to read fixture {path:?}: {err}"), }; let snapshot = snapshot.trim_end(); let roundtrip = roundtrip(snapshot); if snapshot != roundtrip.trim_end() { if std::env::var_os("UPDATE_EXPECT").is_some_and(|it| it == "1") { println!("\x1b[1m\x1b[92mupdating\x1b[0m: {}", path.display()); fs::write(path, roundtrip).unwrap(); } else { println!( "\n \x1b[1mCurrent\x1b[0m: ---- {} ---- \x1b[1mGenerated\x1b[0m: ---- {} ---- \x1b[1mDiff\x1b[0m: ---- {} ---- \x1b[1m\x1b[91merror\x1b[97m: fixture test failed\x1b[0m \x1b[1m\x1b[34m-->\x1b[0m {} You can update all fixtures by running: env UPDATE_EXPECT=1 cargo test ", snapshot, roundtrip, StrComparison::new(snapshot, &roundtrip.trim_end()), path.display(), ); } std::panic::resume_unwind(Box::new(())); } } pub fn strip_annotations(src: &str, comment_prefix: &str) -> Rope { let ident = " ".repeat(comment_prefix.width()); let escape = src.lines().all(|line| { line.chars().all(|c| c.is_whitespace()) || line.starts_with(&ident) || line.starts_with(comment_prefix) }); if !escape { Rope::from_str(src); } let mut raw = String::new(); for mut line in src.split_inclusive('\n') { if line.starts_with(comment_prefix) { continue; } line = line.strip_prefix(&ident).unwrap_or(line); raw.push_str(line); } Rope::from_str(&raw) } pub fn check_highlighter_fixture>( path: impl AsRef, comment_prefix: &str, language: Language, loader: &impl LanguageLoader, get_highlight_name: impl Fn(Highlight) -> String, range: impl Fn(RopeSlice) -> R, ) { check_fixture(path, move |src| { roundtrip_highlighter_fixture( comment_prefix, language, loader, get_highlight_name, src, range, ) }) } pub fn check_injection_fixture>( path: impl AsRef, comment_prefix: &str, language: Language, loader: &impl LanguageLoader, get_language_name: impl Fn(Language) -> String, range: impl Fn(RopeSlice) -> R, ) { check_fixture(path, move |src| { roundtrip_injection_fixture( comment_prefix, language, loader, get_language_name, src, range, ) }) } pub fn roundtrip_highlighter_fixture>( comment_prefix: &str, language: Language, loader: &impl LanguageLoader, get_highlight_name: impl Fn(Highlight) -> String, src: &str, range: impl Fn(RopeSlice) -> R, ) -> String { let raw = strip_annotations(src, comment_prefix); let syntax = Syntax::new(raw.slice(..), language, Duration::from_secs(60), loader).unwrap(); let range = range(raw.slice(..)); highlighter_fixture( comment_prefix, loader, get_highlight_name, &syntax, raw.slice(..), range, ) } pub fn roundtrip_injection_fixture>( comment_prefix: &str, language: Language, loader: &impl LanguageLoader, get_language_name: impl Fn(Language) -> String, src: &str, range: impl Fn(RopeSlice) -> R, ) -> String { let raw = strip_annotations(src, comment_prefix); let syntax = Syntax::new(raw.slice(..), language, Duration::from_secs(60), loader).unwrap(); let range = range(raw.slice(..)); injections_fixture( comment_prefix, loader, get_language_name, &syntax, raw.slice(..), range, ) } pub fn highlighter_fixture( comment_prefix: &str, loader: &impl LanguageLoader, get_highlight_name: impl Fn(Highlight) -> String, syntax: &Syntax, src: RopeSlice<'_>, range: impl RangeBounds, ) -> String { let start = match range.start_bound() { Bound::Included(&i) => i, Bound::Excluded(&i) => i + 1, Bound::Unbounded => 0, }; let end = match range.end_bound() { Bound::Included(&i) => i - 1, Bound::Excluded(&i) => i, Bound::Unbounded => src.len_bytes(), }; let ident = " ".repeat(comment_prefix.width()); let mut highlighter = Highlighter::new(syntax, src, &loader, start as u32..); let mut pos = highlighter.next_event_offset(); let mut highlight_stack = Vec::new(); let mut line_idx = src.byte_to_line(pos as usize); let mut line_start = src.line_to_byte(line_idx) as u32; let mut line_end = src.line_to_byte(line_idx + 1) as u32; let mut line_highlights = Vec::new(); let mut res = String::new(); for line in src.byte_slice(..line_start as usize).lines() { if line.len_bytes() != 0 { wln!(res, "{ident}{line}") } } while pos < end as u32 { let (event, new_highlights) = highlighter.advance(); if event == HighlightEvent::Refresh { highlight_stack.clear(); } highlight_stack.extend(new_highlights.map(&get_highlight_name)); let start = pos; pos = highlighter.next_event_offset(); if pos == u32::MAX { pos = src.len_bytes() as u32 } if pos <= start { panic!( "INVALID HIGHLIGHT RANGE: {start}..{pos} '{}' {:?}", src.byte_slice(pos as usize..start as usize), highlight_stack ); } while start >= line_end { res.push_str(&ident); res.extend( src.byte_slice(line_start as usize..line_end as usize) .chunks(), ); annotate_line( comment_prefix, src, line_start, &mut line_highlights, &mut res, false, ); line_highlights.clear(); line_idx += 1; line_start = line_end; line_end = src .try_line_to_byte(line_idx + 1) .unwrap_or(src.len_bytes()) as u32; } if !highlight_stack.is_empty() { let range = start..pos.min(line_end); if !range.is_empty() { line_highlights.push((range, highlight_stack.clone())) } } while pos > line_end { res.push_str(&ident); res.extend( src.byte_slice(line_start as usize..line_end as usize) .chunks(), ); annotate_line( comment_prefix, src, line_start, &mut line_highlights, &mut res, !highlight_stack.is_empty(), ); line_highlights.clear(); line_idx += 1; line_start = line_end; line_end = src .try_line_to_byte(line_idx + 1) .unwrap_or(src.len_bytes()) as u32; line_highlights.is_empty(); if pos > line_start && !highlight_stack.is_empty() { line_highlights.push((line_start..pos.min(line_end), Vec::new())) } } } if !line_highlights.is_empty() { res.push_str(&ident); res.extend( src.byte_slice(line_start as usize..line_end as usize) .chunks(), ); if !res.ends_with('\n') { res.push('\n'); } annotate_line( comment_prefix, src, line_start, &mut line_highlights, &mut res, false, ); line_start = line_end; } for line in src.byte_slice(line_start as usize..).lines() { if line.len_bytes() != 0 { wln!(res, "{ident}{line}") } } res } pub fn injections_fixture( comment_prefix: &str, loader: &impl LanguageLoader, get_language_name: impl Fn(Language) -> String, syntax: &Syntax, src: RopeSlice<'_>, range: impl RangeBounds, ) -> String { let start = match range.start_bound() { Bound::Included(&i) => i, Bound::Excluded(&i) => i + 1, Bound::Unbounded => 0, }; let end = match range.end_bound() { Bound::Included(&i) => i - 1, Bound::Excluded(&i) => i, Bound::Unbounded => src.len_bytes(), }; let ident = " ".repeat(comment_prefix.width()); let lang = syntax.layer(syntax.root).language; let language_config = loader.get_config(lang).unwrap(); let query = Query::new(language_config.grammar, "", |_, _| unreachable!()).unwrap(); let mut query_iter = QueryIter::<_, ()>::new(syntax, src, |_| Some(&query), start as u32..); let event = query_iter.next(); let mut injection_stack = Vec::new(); let mut pos = if let Some(QueryIterEvent::EnterInjection(injection)) = event { let language = syntax.layer(injection.layer).language; injection_stack.push(get_language_name(language)); injection.range.start } else { end as u32 }; let mut line_idx = src.byte_to_line(pos as usize); let mut line_start = src.line_to_byte(line_idx) as u32; let mut line_end = src.line_to_byte(line_idx + 1) as u32; let mut line_labels = Vec::new(); let mut res = String::new(); for line in src.byte_slice(..line_start as usize).lines() { if line.len_bytes() != 0 { wln!(res, "{ident}{line}") } } let mut errors = String::new(); while pos < end as u32 { let Some(event) = query_iter.next() else { break; }; let mut start = pos; pos = event.start_byte(); if pos == u32::MAX { pos = src.len_bytes() as u32 } if pos <= start { wln!( errors, "INVALID RANGE: {start}..{pos} {:?} {:?}", src.byte_slice(pos as usize..start as usize), injection_stack ); start = pos; } if !injection_stack.is_empty() { let range = start..pos.min(line_end); if !range.is_empty() { line_labels.push((range, injection_stack.clone())) } } if start != pos { while pos >= line_end { res.push_str(&ident); res.extend( src.byte_slice(line_start as usize..line_end as usize) .chunks(), ); annotate_line( comment_prefix, src, line_start, &mut line_labels, &mut res, !injection_stack.is_empty() && pos > line_end, ); line_labels.clear(); line_idx += 1; line_start = line_end; line_end = src .try_line_to_byte(line_idx + 1) .unwrap_or(src.len_bytes()) as u32; if line_start == line_end { break; } if pos > line_start && !injection_stack.is_empty() { line_labels.push((line_start..pos.min(line_end), Vec::new())) } } } match event { QueryIterEvent::EnterInjection(injection) => { injection_stack.push(get_language_name(syntax.layer(injection.layer).language)); } QueryIterEvent::ExitInjection { .. } => { injection_stack.pop(); } QueryIterEvent::Match(_) => unreachable!(), } } if !line_labels.is_empty() { res.push_str(&ident); res.extend( src.byte_slice(line_start as usize..line_end as usize) .chunks(), ); if !res.ends_with('\n') { res.push('\n'); } annotate_line( comment_prefix, src, line_start, &mut line_labels, &mut res, false, ); line_start = line_end; } for line in src.byte_slice(line_start as usize..).lines() { if line.len_bytes() != 0 { wln!(res, "{ident}{line}") } } res } fn annotate_line( comment_prefix: &str, src: RopeSlice<'_>, line_start: u32, annotations: &mut Vec<(Range, Vec)>, dst: &mut String, continued: bool, ) { if annotations.is_empty() { return; } annotations.dedup_by(|(src_range, src_scopes), (dst_range, dst_scopes)| { if dst_scopes == src_scopes && dst_range.end == src_range.start { dst_range.end = src_range.end; true } else { false } }); w!(dst, "{comment_prefix}"); let mut prev_pos = line_start; let mut offsets = Vec::with_capacity(annotations.len()); for (i, (range, labels)) in annotations.iter().enumerate() { let offset = src .byte_slice(prev_pos as usize..range.start as usize) .chars() .map(|c| c.width().unwrap_or(0)) .sum(); let mut width: usize = src .byte_slice(range.start as usize..range.end as usize) .chars() .map(|c| c.width().unwrap_or(0)) .sum(); width = width.saturating_sub(1); offsets.push((offset, width)); let first_char = if labels.is_empty() { "━" } else if width == 0 { if i == annotations.len() - 1 { "╰" } else { "╿" } } else if i == annotations.len() - 1 { "┗" } else { "┡" }; let last_char = if i == annotations.len() - 1 && !labels.is_empty() { "┹" } else if continued && i == annotations.len() - 1 { "━" } else { "┛" }; if width == 0 { w!(dst, "{0:^offset$}{first_char}", ""); } else { width -= 1; w!(dst, "{0:^offset$}{first_char}{0:━^width$}{last_char}", ""); } prev_pos = range.end; } let Some(i) = annotations .iter() .position(|(_, scopes)| !scopes.is_empty()) else { wln!(dst); return; }; let highlights = &annotations[i..]; let offset: usize = offsets .drain(..i) .map(|(offset, width)| offset + width + 1) .sum(); offsets[0].0 += offset; w!(dst, "─"); for highlight in &highlights.last().unwrap().1 { w!(dst, " {highlight}") } wln!(dst); for depth in (0..highlights.len().saturating_sub(1)).rev() { w!(dst, "{comment_prefix}"); for &(offset, width) in &offsets[..depth] { w!(dst, "{0:^offset$}│{0:^width$}", ""); } let offset = offsets[depth].0; w!(dst, "{:>offset$}╰─", ""); for highlight in &highlights[depth].1 { w!(dst, " {highlight}") } wln!(dst); } } hx-0.3.0+20250717/highlighter/src/highlighter.rs000066400000000000000000000477331503625671400210140ustar00rootroot00000000000000use std::borrow::Cow; use std::cmp; use std::fmt; use std::mem::replace; use std::num::NonZeroU32; use std::ops::RangeBounds; use std::slice; use std::sync::Arc; use crate::config::{LanguageConfig, LanguageLoader}; use crate::locals::ScopeCursor; use crate::query_iter::{MatchedNode, QueryIter, QueryIterEvent, QueryLoader}; use crate::{Injection, Language, Layer, Syntax}; use arc_swap::ArcSwap; use hashbrown::{HashMap, HashSet}; use ropey::RopeSlice; use tree_sitter::{ query::{self, InvalidPredicateError, Query, UserPredicate}, Capture, Grammar, }; use tree_sitter::{Pattern, QueryMatch}; /// Contains the data needed to highlight code written in a particular language. /// /// This struct is immutable and can be shared between threads. #[derive(Debug)] pub struct HighlightQuery { pub query: Query, highlight_indices: ArcSwap>>, #[allow(dead_code)] /// Patterns that do not match when the node is a local. non_local_patterns: HashSet, local_reference_capture: Option, } impl HighlightQuery { pub(crate) fn new( grammar: Grammar, highlight_query_text: &str, local_query_text: &str, ) -> Result { // Concatenate the highlights and locals queries. let mut query_source = String::with_capacity(highlight_query_text.len() + local_query_text.len()); query_source.push_str(highlight_query_text); query_source.push_str(local_query_text); let mut non_local_patterns = HashSet::new(); let mut query = Query::new(grammar, &query_source, |pattern, predicate| { match predicate { // Allow the `(#set! local.scope-inherits )` property to be parsed. // This information is not used by this query though, it's used in the // injection query instead. UserPredicate::SetProperty { key: "local.scope-inherits", .. } => (), // TODO: `(#is(-not)? local)` applies to the entire pattern. Ideally you // should be able to supply capture(s?) which are each checked. UserPredicate::IsPropertySet { negate: true, key: "local", val: None, } => { non_local_patterns.insert(pattern); } _ => return Err(InvalidPredicateError::unknown(predicate)), } Ok(()) })?; // The highlight query only cares about local.reference captures. All scope and definition // captures can be disabled. query.disable_capture("local.scope"); let local_definition_captures: Vec<_> = query .captures() .filter(|&(_, name)| name.starts_with("local.definition.")) .map(|(_, name)| Box::::from(name)) .collect(); for name in local_definition_captures { query.disable_capture(&name); } Ok(Self { highlight_indices: ArcSwap::from_pointee(vec![None; query.num_captures() as usize]), non_local_patterns, local_reference_capture: query.get_capture("local.reference"), query, }) } /// Configures the list of recognized highlight names. /// /// Tree-sitter syntax-highlighting queries specify highlights in the form of dot-separated /// highlight names like `punctuation.bracket` and `function.method.builtin`. Consumers of /// these queries can choose to recognize highlights with different levels of specificity. /// For example, the string `function.builtin` will match against `function.builtin.constructor` /// but will not match `function.method.builtin` and `function.method`. /// /// The closure provided to this function should therefore try to first lookup the full /// name. If no highlight was found for that name it should [`rsplit_once('.')`](str::rsplit_once) /// and retry until a highlight has been found. If none of the parent scopes are defined /// then `Highlight::NONE` should be returned. /// /// When highlighting, results are returned as `Highlight` values, configured by this function. /// The meaning of these indices is up to the user of the implementation. The highlighter /// treats the indices as entirely opaque. pub(crate) fn configure(&self, f: &mut impl FnMut(&str) -> Option) { let highlight_indices = self .query .captures() .map(|(_, capture_name)| f(capture_name)) .collect(); self.highlight_indices.store(Arc::new(highlight_indices)); } } /// Indicates which highlight should be applied to a region of source code. /// /// This type is represented as a non-max u32 - a u32 which cannot be `u32::MAX`. This is checked /// at runtime with assertions in `Highlight::new`. #[derive(Copy, Clone, PartialEq, Eq)] pub struct Highlight(NonZeroU32); impl Highlight { pub const MAX: u32 = u32::MAX - 1; pub const fn new(inner: u32) -> Self { assert!(inner != u32::MAX); // SAFETY: must be non-zero because `inner` is not `u32::MAX`. Self(unsafe { NonZeroU32::new_unchecked(inner ^ u32::MAX) }) } pub const fn get(&self) -> u32 { self.0.get() ^ u32::MAX } pub const fn idx(&self) -> usize { self.get() as usize } } impl fmt::Debug for Highlight { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("Highlight").field(&self.get()).finish() } } #[derive(Debug)] struct HighlightedNode { end: u32, highlight: Highlight, } #[derive(Debug, Default)] pub struct LayerData { parent_highlights: usize, dormant_highlights: Vec, } pub struct Highlighter<'a, 'tree, Loader: LanguageLoader> { query: QueryIter<'a, 'tree, HighlightQueryLoader<&'a Loader>, ()>, next_query_event: Option>, /// The stack of currently active highlights. /// The ranges of the highlights stack, so each highlight in the Vec must have a starting /// point `>=` the starting point of the next highlight in the Vec and and ending point `<=` /// the ending point of the next highlight in the Vec. /// /// For a visual: /// /// ```text /// | C | /// | B | /// | A | /// ``` /// /// would be `vec![A, B, C]`. active_highlights: Vec, next_highlight_end: u32, next_highlight_start: u32, active_config: Option<&'a LanguageConfig>, // The current layer and per-layer state could be tracked on the QueryIter itself (see // `QueryIter::current_layer` and `QueryIter::layer_state`) however the highlighter peeks the // query iter. The query iter is always one event ahead, so it will enter/exit injections // before we get a chance to in the highlighter. So instead we track these on the highlighter. // Also see `Self::advance_query_iter`. current_layer: Layer, layer_states: HashMap, } pub struct HighlightList<'a>(slice::Iter<'a, HighlightedNode>); impl Iterator for HighlightList<'_> { type Item = Highlight; fn next(&mut self) -> Option { self.0.next().map(|node| node.highlight) } fn size_hint(&self) -> (usize, Option) { self.0.size_hint() } } impl DoubleEndedIterator for HighlightList<'_> { fn next_back(&mut self) -> Option { self.0.next_back().map(|node| node.highlight) } } impl ExactSizeIterator for HighlightList<'_> { fn len(&self) -> usize { self.0.len() } } #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum HighlightEvent { /// Reset the active set of highlights to the given ones. Refresh, /// Add more highlights which build on the existing highlights. Push, } impl<'a, 'tree: 'a, Loader: LanguageLoader> Highlighter<'a, 'tree, Loader> { pub fn new( syntax: &'tree Syntax, src: RopeSlice<'a>, loader: &'a Loader, range: impl RangeBounds, ) -> Self { let mut query = QueryIter::new(syntax, src, HighlightQueryLoader(loader), range); let active_language = query.current_language(); let mut res = Highlighter { active_config: query.loader().0.get_config(active_language), next_query_event: None, current_layer: query.current_layer(), layer_states: Default::default(), active_highlights: Vec::new(), next_highlight_end: u32::MAX, next_highlight_start: 0, query, }; res.advance_query_iter(); res } pub fn active_highlights(&self) -> HighlightList<'_> { HighlightList(self.active_highlights.iter()) } pub fn next_event_offset(&self) -> u32 { self.next_highlight_start.min(self.next_highlight_end) } pub fn advance(&mut self) -> (HighlightEvent, HighlightList<'_>) { let mut refresh = false; let prev_stack_size = self.active_highlights.len(); let pos = self.next_event_offset(); if self.next_highlight_end == pos { self.process_highlight_end(pos); refresh = true; } let mut first_highlight = true; while self.next_highlight_start == pos { let Some(query_event) = self.advance_query_iter() else { break; }; match query_event { QueryIterEvent::EnterInjection(injection) => self.enter_injection(injection.layer), QueryIterEvent::Match(node) => self.start_highlight(node, &mut first_highlight), QueryIterEvent::ExitInjection { injection, state } => { // `state` is returned if the layer is finished according to the `QueryIter`. // The highlighter should only consider a layer finished, though, when it also // has no remaining ranges to highlight. If the injection is combined and has // highlight(s) past this injection's range then we should deactivate it // (saving the highlights for the layer's next injection range) rather than // removing it. let layer_is_finished = state.is_some() && self .current_layer_highlights() .iter() .all(|h| h.end <= injection.range.end); if layer_is_finished { self.layer_states.remove(&injection.layer); } else { self.deactivate_layer(injection); refresh = true; } let active_language = self.query.syntax().layer(self.current_layer).language; self.active_config = self.query.loader().0.get_config(active_language); } } } self.next_highlight_end = self .active_highlights .last() .map_or(u32::MAX, |node| node.end); if refresh { ( HighlightEvent::Refresh, HighlightList(self.active_highlights.iter()), ) } else { ( HighlightEvent::Push, HighlightList(self.active_highlights[prev_stack_size..].iter()), ) } } fn advance_query_iter(&mut self) -> Option> { // Track the current layer **before** calling `QueryIter::next`. The QueryIter moves // to the next event with `QueryIter::next` but we're treating that event as peeked - it // hasn't occurred yet - so the current layer is the one the query iter was on _before_ // `QueryIter::next`. self.current_layer = self.query.current_layer(); let event = replace(&mut self.next_query_event, self.query.next()); self.next_highlight_start = self .next_query_event .as_ref() .map_or(u32::MAX, |event| event.start_byte()); event } fn process_highlight_end(&mut self, pos: u32) { let i = self .active_highlights .iter() .rposition(|highlight| highlight.end != pos) .map_or(0, |i| i + 1); self.active_highlights.truncate(i); } fn current_layer_highlights(&self) -> &[HighlightedNode] { let parent_start = self .layer_states .get(&self.current_layer) .map(|layer| layer.parent_highlights) .unwrap_or_default() .min(self.active_highlights.len()); &self.active_highlights[parent_start..] } fn enter_injection(&mut self, layer: Layer) { debug_assert_eq!(layer, self.current_layer); let active_language = self.query.syntax().layer(layer).language; self.active_config = self.query.loader().0.get_config(active_language); let state = self.layer_states.entry(layer).or_default(); state.parent_highlights = self.active_highlights.len(); self.active_highlights.append(&mut state.dormant_highlights); } fn deactivate_layer(&mut self, injection: Injection) { let LayerData { mut parent_highlights, ref mut dormant_highlights, .. } = self.layer_states.get_mut(&injection.layer).unwrap(); parent_highlights = parent_highlights.min(self.active_highlights.len()); dormant_highlights.extend(self.active_highlights.drain(parent_highlights..)); self.process_highlight_end(injection.range.end); } fn start_highlight(&mut self, node: MatchedNode, first_highlight: &mut bool) { let range = node.node.byte_range(); // `::next` skips matches with empty ranges. debug_assert!( !range.is_empty(), "QueryIter should not emit matches with empty ranges" ); let config = self .active_config .expect("must have an active config to emit matches"); let highlight = if Some(node.capture) == config.highlight_query.local_reference_capture { // If this capture was a `@local.reference` from the locals queries, look up the // text of the node in the current locals cursor and use that highlight. let text: Cow = self .query .source() .byte_slice(range.start as usize..range.end as usize) .into(); let Some(definition) = self .query .syntax() .layer(self.current_layer) .locals .lookup_reference(node.scope, &text) .filter(|def| range.start >= def.range.end) else { return; }; config .injection_query .local_definition_captures .load() .get(&definition.capture) .copied() } else { config.highlight_query.highlight_indices.load()[node.capture.idx()] }; let highlight = highlight.map(|highlight| HighlightedNode { end: range.end, highlight, }); // If multiple patterns match this exact node, prefer the last one which matched. // This matches the precedence of Neovim, Zed, and tree-sitter-cli. if !*first_highlight { // NOTE: `!*first_highlight` implies that the start positions are the same. let insert_position = self .active_highlights .iter() .rposition(|h| h.end <= range.end); if let Some(idx) = insert_position { match self.active_highlights[idx].end.cmp(&range.end) { // If there is a prior highlight for this start..end range, replace it. cmp::Ordering::Equal => { if let Some(highlight) = highlight { self.active_highlights[idx] = highlight; } else { self.active_highlights.remove(idx); } } // Captures are emitted in the order that they are finished. Insert any // highlights which start at the same position into the active highlights so // that the ordering invariant remains satisfied. cmp::Ordering::Less => { if let Some(highlight) = highlight { self.active_highlights.insert(idx, highlight) } } // By definition of our `rposition` predicate: cmp::Ordering::Greater => unreachable!(), } } else { self.active_highlights.extend(highlight); } } else if let Some(highlight) = highlight { self.active_highlights.push(highlight); *first_highlight = false; } // `active_highlights` must be a stack of highlight events the highlights stack on the // prior highlights in the Vec. Each highlight's range must be a subset of the highlight's // range before it. debug_assert!( // The assertion is actually true for the entire stack but combined injections // throw a wrench in things: the highlight can end after the current injection. // The highlight is removed from `active_highlights` as the injection layer ends // so the wider assertion would be true in practice. We don't track the injection // end right here though so we can't assert on it. self.current_layer_highlights().is_sorted_by_key(|h| cmp::Reverse(h.end)), "unsorted highlights on layer {:?}: {:?}\nall active highlights must be sorted by `end` descending", self.current_layer, self.active_highlights, ); } } pub(crate) struct HighlightQueryLoader(T); impl<'a, T: LanguageLoader> QueryLoader<'a> for HighlightQueryLoader<&'a T> { fn get_query(&mut self, lang: Language) -> Option<&'a Query> { self.0 .get_config(lang) .map(|config| &config.highlight_query.query) } fn are_predicates_satisfied( &self, lang: Language, mat: &QueryMatch<'_, '_>, source: RopeSlice<'_>, locals_cursor: &ScopeCursor<'_>, ) -> bool { let highlight_query = &self .0 .get_config(lang) .expect("must have a config to emit matches") .highlight_query; // Highlight queries should reject the match when a pattern is marked with // `(#is-not? local)` and any capture in the pattern matches a definition in scope. // // TODO: in the future we should propose that `#is-not? local` takes one or more // captures as arguments. Ideally we would check that the captured node is also captured // by a `local.reference` capture from the locals query but that's really messy to pass // around that information. For now we assume that all matches in the pattern are also // captured as `local.reference` in the locals, which covers most cases. if highlight_query.local_reference_capture.is_some() && highlight_query.non_local_patterns.contains(&mat.pattern()) { let has_local_reference = mat.matched_nodes().any(|n| { let range = n.node.byte_range(); let text: Cow = source .byte_slice(range.start as usize..range.end as usize) .into(); locals_cursor .locals .lookup_reference(locals_cursor.current_scope(), &text) .is_some_and(|def| range.start >= def.range.start) }); if has_local_reference { return false; } } true } } hx-0.3.0+20250717/highlighter/src/injections_query.rs000066400000000000000000000703651503625671400221050ustar00rootroot00000000000000use std::cmp::Reverse; use std::iter::{self, Peekable}; use std::mem::take; use std::sync::Arc; use arc_swap::ArcSwap; use hashbrown::{HashMap, HashSet}; use once_cell::sync::Lazy; use regex_cursor::engines::meta::Regex; use ropey::RopeSlice; use crate::config::{LanguageConfig, LanguageLoader}; use crate::highlighter::Highlight; use crate::locals::Locals; use crate::parse::LayerUpdateFlags; use crate::{Injection, Language, Layer, LayerData, Range, Syntax, TREE_SITTER_MATCH_LIMIT}; use tree_sitter::{ query::{self, InvalidPredicateError, UserPredicate}, Capture, Grammar, InactiveQueryCursor, MatchedNodeIdx, Node, Pattern, Query, QueryMatch, }; const SHEBANG: &str = r"#!\s*(?:\S*[/\\](?:env\s+(?:\-\S+\s+)*)?)?([^\s\.\d]+)"; static SHEBANG_REGEX: Lazy = Lazy::new(|| Regex::new(SHEBANG).unwrap()); #[derive(Clone, Default, Debug)] pub struct InjectionProperties { include_children: IncludedChildren, language: Option>, combined: bool, } /// An indicator in the document or query source file which used by the loader to know which /// language an injection should use. /// /// For example if a query sets a property `(#set! injection.language "rust")` then the loader /// should load the Rust language. Alternatively the loader might be asked to load a language /// based on some text in the document, for example a markdown code fence language name. #[derive(Debug, Clone, Copy)] pub enum InjectionLanguageMarker<'a> { /// The language is specified by name in the injection query itself. /// /// For example `(#set! injection.language "rust")`. These names should match exactly and so /// they can be looked up by equality - very efficiently. Name(&'a str), /// The language is specified by name - or similar - within the parsed document. /// /// This is slightly different than the `ExactName` variant: within a document you might /// specify Markdown as "md" or "markdown" for example. The loader should look up the language /// name by longest matching regex. Match(RopeSlice<'a>), Filename(RopeSlice<'a>), Shebang(RopeSlice<'a>), } #[derive(Clone, Debug)] pub struct InjectionQueryMatch<'tree> { include_children: IncludedChildren, language: Language, scope: Option, node: Node<'tree>, last_match: bool, pattern: Pattern, } #[derive(Clone, Debug, Hash, PartialEq, Eq)] enum InjectionScope { Match { id: u32, }, Pattern { pattern: Pattern, language: Language, }, } #[derive(Clone, Copy, Default, Debug, PartialEq, Eq)] enum IncludedChildren { #[default] None, All, Unnamed, } #[derive(Debug)] pub struct InjectionsQuery { injection_query: Query, injection_properties: HashMap, injection_content_capture: Option, injection_language_capture: Option, injection_filename_capture: Option, injection_shebang_capture: Option, // Note that the injections query is concatenated with the locals query. pub(crate) local_query: Query, // TODO: Use a Vec instead? pub(crate) not_scope_inherits: HashSet, pub(crate) local_scope_capture: Option, pub(crate) local_definition_captures: ArcSwap>, } impl InjectionsQuery { pub fn new( grammar: Grammar, injection_query_text: &str, local_query_text: &str, ) -> Result { let mut query_source = String::with_capacity(injection_query_text.len() + local_query_text.len()); query_source.push_str(injection_query_text); query_source.push_str(local_query_text); let mut injection_properties: HashMap = HashMap::new(); let mut not_scope_inherits = HashSet::new(); let injection_query = Query::new(grammar, injection_query_text, |pattern, predicate| { match predicate { // injections UserPredicate::SetProperty { key: "injection.include-unnamed-children", val: None, } => { injection_properties .entry(pattern) .or_default() .include_children = IncludedChildren::Unnamed } UserPredicate::SetProperty { key: "injection.include-children", val: None, } => { injection_properties .entry(pattern) .or_default() .include_children = IncludedChildren::All } UserPredicate::SetProperty { key: "injection.language", val: Some(lang), } => injection_properties.entry(pattern).or_default().language = Some(lang.into()), UserPredicate::SetProperty { key: "injection.combined", val: None, } => injection_properties.entry(pattern).or_default().combined = true, predicate => { return Err(InvalidPredicateError::unknown(predicate)); } } Ok(()) })?; let mut local_query = Query::new(grammar, local_query_text, |pattern, predicate| { match predicate { UserPredicate::SetProperty { key: "local.scope-inherits", val, } => { if val.is_some_and(|val| val != "true") { not_scope_inherits.insert(pattern); } } predicate => { return Err(InvalidPredicateError::unknown(predicate)); } } Ok(()) })?; // The injection queries do not track references - these are read by the highlight // query instead. local_query.disable_capture("local.reference"); Ok(InjectionsQuery { injection_properties, injection_content_capture: injection_query.get_capture("injection.content"), injection_language_capture: injection_query.get_capture("injection.language"), injection_filename_capture: injection_query.get_capture("injection.filename"), injection_shebang_capture: injection_query.get_capture("injection.shebang"), injection_query, not_scope_inherits, local_scope_capture: local_query.get_capture("local.scope"), local_definition_captures: ArcSwap::from_pointee(HashMap::new()), local_query, }) } pub(crate) fn configure(&self, f: &mut impl FnMut(&str) -> Option) { let local_definition_captures = self .local_query .captures() .filter_map(|(capture, name)| { let suffix = name.strip_prefix("local.definition.")?; Some((capture, f(suffix)?)) }) .collect(); self.local_definition_captures .store(Arc::new(local_definition_captures)); } fn process_match<'a, 'tree>( &self, query_match: &QueryMatch<'a, 'tree>, node_idx: MatchedNodeIdx, source: RopeSlice<'a>, loader: impl LanguageLoader, ) -> Option> { let properties = self.injection_properties.get(&query_match.pattern()); let mut marker = None; let mut last_content_node = 0; let mut content_nodes = 0; for (i, matched_node) in query_match.matched_nodes().enumerate() { let capture = Some(matched_node.capture); if capture == self.injection_language_capture { let range = matched_node.node.byte_range(); marker = Some(InjectionLanguageMarker::Match( source.byte_slice(range.start as usize..range.end as usize), )); } else if capture == self.injection_filename_capture { let range = matched_node.node.byte_range(); marker = Some(InjectionLanguageMarker::Filename( source.byte_slice(range.start as usize..range.end as usize), )); } else if capture == self.injection_shebang_capture { let range = matched_node.node.byte_range(); let node_slice = source.byte_slice(range.start as usize..range.end as usize); // some languages allow space and newlines before the actual string content // so a shebang could be on either the first or second line let lines = if let Ok(end) = node_slice.try_line_to_byte(2) { node_slice.byte_slice(..end) } else { node_slice }; marker = SHEBANG_REGEX .captures_iter(regex_cursor::Input::new(lines)) .map(|cap| { let cap = lines.byte_slice(cap.get_group(1).unwrap().range()); InjectionLanguageMarker::Shebang(cap) }) .next() } else if capture == self.injection_content_capture { content_nodes += 1; last_content_node = i as u32; } } let marker = marker.or(properties .and_then(|p| p.language.as_deref()) .map(InjectionLanguageMarker::Name))?; let language = loader.language_for_marker(marker)?; let scope = if properties.is_some_and(|p| p.combined) { Some(InjectionScope::Pattern { pattern: query_match.pattern(), language, }) } else if content_nodes != 1 { Some(InjectionScope::Match { id: query_match.id(), }) } else { None }; Some(InjectionQueryMatch { language, scope, include_children: properties.map(|p| p.include_children).unwrap_or_default(), node: query_match.matched_node(node_idx).node.clone(), last_match: last_content_node == node_idx, pattern: query_match.pattern(), }) } /// Executes the query on the given input and return an iterator of /// injection ranges together with their injection properties /// /// The ranges yielded by the iterator have an ascending start range. /// The ranges do not overlap exactly (matches of the exact same node are /// resolved with normal precedence rules). However, ranges can be nested. /// For example: /// /// ``` no-compile /// | range 2 | /// | range 1 | /// ``` /// is possible and will always result in iteration order [range1, range2]. /// This case should be handled by the calling function fn execute<'a>( &'a self, node: &Node<'a>, source: RopeSlice<'a>, loader: &'a impl LanguageLoader, ) -> impl Iterator> + 'a { let mut cursor = InactiveQueryCursor::new(0..u32::MAX, TREE_SITTER_MATCH_LIMIT) .execute_query(&self.injection_query, node, source); let injection_content_capture = self.injection_content_capture.unwrap(); let iter = iter::from_fn(move || loop { let (query_match, node_idx) = cursor.next_matched_node()?; if query_match.matched_node(node_idx).capture != injection_content_capture { continue; } let Some(mat) = self.process_match(&query_match, node_idx, source, loader) else { query_match.remove(); continue; }; let range = query_match.matched_node(node_idx).node.byte_range(); if mat.last_match { query_match.remove(); } if range.is_empty() { continue; } break Some(mat); }); let mut buf = Vec::new(); let mut iter = iter.peekable(); // handle identical/overlapping matches to correctly account for precedence iter::from_fn(move || { if let Some(mat) = buf.pop() { return Some(mat); } let mut res = iter.next()?; // if children are not included then nested injections don't // interfere with each other unless exactly identical. Since // this is the default setting we have a fastpath for it if res.include_children == IncludedChildren::None { let mut fast_return = true; while let Some(overlap) = iter.next_if(|mat| mat.node.byte_range() == res.node.byte_range()) { if overlap.include_children != IncludedChildren::None { buf.push(overlap); fast_return = false; break; } // Prefer the last capture which matches this exact node. res = overlap; } if fast_return { return Some(res); } } // we if can't use the fastpath we accumulate all overlapping matches // and then sort them according to precedence rules... while let Some(overlap) = iter.next_if(|mat| mat.node.end_byte() <= res.node.end_byte()) { buf.push(overlap) } if buf.is_empty() { return Some(res); } buf.push(res); buf.sort_unstable_by_key(|mat| (mat.pattern, Reverse(mat.node.start_byte()))); buf.pop() }) } } impl Syntax { pub(crate) fn run_injection_query( &mut self, layer: Layer, edits: &[tree_sitter::InputEdit], source: RopeSlice<'_>, loader: &impl LanguageLoader, mut parse_layer: impl FnMut(Layer), ) { self.map_injections(layer, None, edits); let layer_data = &mut self.layer_mut(layer); let Some(LanguageConfig { injection_query: ref injections_query, .. }) = loader.get_config(layer_data.language) else { return; }; if injections_query.injection_content_capture.is_none() { return; } // work around borrow checker let parent_ranges = take(&mut layer_data.ranges); let parse_tree = layer_data.parse_tree.take().unwrap(); let mut injections: Vec = Vec::with_capacity(layer_data.injections.len()); let mut old_injections = take(&mut layer_data.injections).into_iter().peekable(); let injection_query = injections_query.execute(&parse_tree.root_node(), source, loader); let mut combined_injections: HashMap = HashMap::with_capacity(32); for mat in injection_query { let matched_node_range = mat.node.byte_range(); let mut insert_position = injections.len(); // if a parent node already has an injection ignore this injection // in theory the first condition would be enough to detect that // however in case the parent node does not include children it // is possible that one of these children is another separate // injection. In these cases we cannot skip the injection // // also the precedence sorting (and rare intersection) means that // overlapping injections may be sorted not by position but by // precedence (highest precedence first). the code here ensures // that injections get sorted to the correct position if let Some(last_injection) = injections .last() .filter(|injection| ranges_intersect(&injection.range, &matched_node_range)) { // this condition is not needed but serves as fast path // for common cases if last_injection.range.start <= matched_node_range.start { continue; } else { insert_position = injections.partition_point(|injection| { injection.range.end <= matched_node_range.start }); if injections[insert_position].range.start < matched_node_range.end { continue; } } } let language = mat.language; let reused_injection = self.reuse_injection(language, matched_node_range.clone(), &mut old_injections); let layer = match mat.scope { Some(scope @ InjectionScope::Match { .. }) if mat.last_match => { combined_injections.remove(&scope).unwrap_or_else(|| { self.init_injection(layer, mat.language, reused_injection.clone()) }) } Some(scope) => *combined_injections.entry(scope).or_insert_with(|| { self.init_injection(layer, mat.language, reused_injection.clone()) }), None => self.init_injection(layer, mat.language, reused_injection.clone()), }; let mut layer_data = self.layer_mut(layer); if !layer_data.flags.touched { layer_data.flags.touched = true; parse_layer(layer) } if layer_data.flags.reused { layer_data.flags.modified |= reused_injection.as_ref().is_none_or(|injection| { injection.matched_node_range != matched_node_range || injection.layer != layer }); } else if let Some(reused_injection) = reused_injection { layer_data.flags.reused = true; layer_data.flags.modified = true; let reused_parse_tree = self.layer(reused_injection.layer).tree().cloned(); layer_data = self.layer_mut(layer); layer_data.parse_tree = reused_parse_tree; } let old_len = injections.len(); intersect_ranges(mat.include_children, mat.node, &parent_ranges, |range| { layer_data.ranges.push(tree_sitter::Range { start_point: tree_sitter::Point::ZERO, end_point: tree_sitter::Point::ZERO, start_byte: range.start, end_byte: range.end, }); injections.push(Injection { range, layer, matched_node_range: matched_node_range.clone(), }); }); if old_len != insert_position { let inserted = injections.len() - old_len; injections[insert_position..].rotate_right(inserted); layer_data.ranges[insert_position..].rotate_right(inserted); } } // Any remaining injections which were not reused should have their layers marked as // modified. These layers might have a new set of ranges (if they were visited) and so // their trees need to be re-parsed. for old_injection in old_injections { self.layer_mut(old_injection.layer).flags.modified = true; } let layer_data = &mut self.layer_mut(layer); layer_data.ranges = parent_ranges; layer_data.parse_tree = Some(parse_tree); layer_data.injections = injections; } /// Maps the layers injection ranges through edits to enable incremental re-parsing. fn map_injections( &mut self, layer: Layer, // TODO: drop this parameter? offset: Option, mut edits: &[tree_sitter::InputEdit], ) { if edits.is_empty() && offset.unwrap_or(0) == 0 { return; } let layer_data = self.layer_mut(layer); let first_relevant_injection = layer_data .injections .partition_point(|injection| injection.range.end < edits[0].start_byte); if first_relevant_injection == layer_data.injections.len() { return; } let mut offset = if let Some(offset) = offset { let first_relevant_edit = edits.partition_point(|edit| { (edit.old_end_byte as i32) < (layer_data.ranges[0].end_byte as i32 - offset) }); edits = &edits[first_relevant_edit..]; offset } else { 0 }; // injections and edits are non-overlapping and sorted so we can // apply edits in O(M+N) instead of O(NM) let mut edits = edits.iter().peekable(); let mut injections = take(&mut layer_data.injections); for injection in &mut injections[first_relevant_injection..] { let injection_range = &mut injection.range; let matched_node_range = &mut injection.matched_node_range; let flags = &mut self.layer_mut(injection.layer).flags; debug_assert!(matched_node_range.start <= injection_range.start); debug_assert!(matched_node_range.end >= injection_range.end); while let Some(edit) = edits.next_if(|edit| edit.old_end_byte < matched_node_range.start) { offset += edit.offset(); } let mut mapped_node_range_start = (matched_node_range.start as i32 + offset) as u32; if let Some(edit) = edits .peek() .filter(|edit| edit.start_byte <= matched_node_range.start) { mapped_node_range_start = (edit.new_end_byte as i32 + offset) as u32; } while let Some(edit) = edits.next_if(|edit| edit.old_end_byte < injection_range.start) { offset += edit.offset(); } flags.moved = offset != 0; let mut mapped_start = (injection_range.start as i32 + offset) as u32; if let Some(edit) = edits.next_if(|edit| edit.old_end_byte <= injection_range.end) { if edit.start_byte < injection_range.start { flags.moved = true; mapped_start = (edit.new_end_byte as i32 + offset) as u32; } else { flags.modified = true; } offset += edit.offset(); while let Some(edit) = edits.next_if(|edit| edit.old_end_byte <= injection_range.end) { offset += edit.offset(); } } let mut mapped_end = (injection_range.end as i32 + offset) as u32; if let Some(edit) = edits .peek() .filter(|edit| edit.start_byte <= injection_range.end) { flags.modified = true; if edit.start_byte < injection_range.start { mapped_start = (edit.new_end_byte as i32 + offset) as u32; mapped_end = mapped_start; } } let mut mapped_node_range_end = (matched_node_range.end as i32 + offset) as u32; if let Some(edit) = edits .peek() .filter(|edit| edit.start_byte <= matched_node_range.end) { if edit.start_byte < matched_node_range.start { mapped_node_range_start = (edit.new_end_byte as i32 + offset) as u32; mapped_node_range_end = mapped_node_range_start; } } *injection_range = mapped_start..mapped_end; *matched_node_range = mapped_node_range_start..mapped_node_range_end; } self.layer_mut(layer).injections = injections; } fn init_injection( &mut self, parent: Layer, language: Language, reuse: Option, ) -> Layer { match reuse { Some(old_injection) => { let layer_data = self.layer_mut(old_injection.layer); debug_assert_eq!(layer_data.parent, Some(parent)); layer_data.flags.reused = true; layer_data.ranges.clear(); old_injection.layer } None => { let layer = self.layers.insert(LayerData { language, parse_tree: None, ranges: Vec::new(), injections: Vec::new(), flags: LayerUpdateFlags::default(), parent: Some(parent), locals: Locals::default(), }); Layer(layer as u32) } } } // TODO: only reuse if same pattern is matched fn reuse_injection( &mut self, language: Language, new_range: Range, injections: &mut Peekable>, ) -> Option { while let Some(skipped) = injections.next_if(|injection| injection.range.end <= new_range.start) { // If the layer had an injection and now does not have the injection, consider the // skipped layer to be modified so that its tree is re-parsed. It must be re-parsed // since the skipped layer now has a different set of ranges than it used to. Note // that the layer isn't marked as `touched` so it could be discarded if the layer // is not ever visited. self.layer_mut(skipped.layer).flags.modified = true; } injections .next_if(|injection| { injection.range.start < new_range.end && self.layer(injection.layer).language == language && !self.layer(injection.layer).flags.reused }) .clone() } } fn intersect_ranges( include_children: IncludedChildren, node: Node, parent_ranges: &[tree_sitter::Range], push_range: impl FnMut(Range), ) { let range = node.byte_range(); let i = parent_ranges.partition_point(|parent_range| parent_range.end_byte <= range.start); let parent_ranges = parent_ranges[i..] .iter() .map(|range| range.start_byte..range.end_byte); match include_children { IncludedChildren::None => intersect_ranges_impl( range, node.children().map(|node| node.byte_range()), parent_ranges, push_range, ), IncludedChildren::All => { intersect_ranges_impl(range, [].into_iter(), parent_ranges, push_range) } IncludedChildren::Unnamed => intersect_ranges_impl( range, node.children() .filter(|node| node.is_named()) .map(|node| node.byte_range()), parent_ranges, push_range, ), } } fn intersect_ranges_impl( range: Range, excluded_ranges: impl Iterator, parent_ranges: impl Iterator, mut push_range: impl FnMut(Range), ) { let mut start = range.start; let mut excluded_ranges = excluded_ranges.filter(|range| !range.is_empty()).peekable(); let mut parent_ranges = parent_ranges.peekable(); loop { let parent_range = parent_ranges.peek().unwrap().clone(); if let Some(excluded_range) = excluded_ranges.next_if(|range| range.start <= parent_range.end) { if excluded_range.start >= range.end { break; } if start != excluded_range.start { push_range(start..excluded_range.start) } start = excluded_range.end; } else { parent_ranges.next(); if parent_range.end >= range.end { break; } if start != parent_range.end { push_range(start..parent_range.end) } let Some(next_parent_range) = parent_ranges.peek() else { return; }; start = next_parent_range.start; } } if start != range.end { push_range(start..range.end) } } fn ranges_intersect(a: &Range, b: &Range) -> bool { // Adapted from a.start == b.start || (a.end > b.start && b.end > a.start) } hx-0.3.0+20250717/highlighter/src/lib.rs000066400000000000000000000272501503625671400172540ustar00rootroot00000000000000use locals::Locals; use ropey::RopeSlice; use slab::Slab; use std::fmt; use std::hash::{Hash, Hasher}; use std::time::Duration; use tree_sitter::{IncompatibleGrammarError, Node, Tree}; pub use crate::config::{read_query, LanguageConfig, LanguageLoader}; pub use crate::injections_query::{InjectionLanguageMarker, InjectionsQuery}; use crate::parse::LayerUpdateFlags; pub use crate::tree_cursor::TreeCursor; pub use tree_sitter; // pub use pretty_print::pretty_print_tree; // pub use tree_cursor::TreeCursor; mod config; pub mod highlighter; mod injections_query; mod parse; #[cfg(all(test, feature = "fixtures"))] mod tests; // mod pretty_print; #[cfg(feature = "fixtures")] pub mod fixtures; pub mod locals; pub mod query_iter; pub mod text_object; mod tree_cursor; /// A layer represents a single a single syntax tree that represents (part of) /// a file parsed with a tree-sitter grammar. See [`Syntax`]. #[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)] pub struct Layer(u32); impl Layer { fn idx(self) -> usize { self.0 as usize } } #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct Language(pub u32); impl Language { pub fn new(idx: u32) -> Language { Language(idx) } pub fn idx(self) -> usize { self.0 as usize } } /// The Tree sitter syntax tree for a single language. /// /// This is really multiple (nested) different syntax trees due to tree sitter /// injections. A single syntax tree/parser is called layer. Each layer /// is parsed as a single "file" by tree sitter. There can be multiple layers /// for the same language. A layer corresponds to one of three things: /// * the root layer /// * a singular injection limited to a single node in its parent layer /// * Multiple injections (multiple disjoint nodes in parent layer) that are /// parsed as though they are a single uninterrupted file. /// /// An injection always refer to a single node into which another layer is /// injected. As injections only correspond to syntax tree nodes injections in /// the same layer do not intersect. However, the syntax tree in a an injected /// layer can have nodes that intersect with nodes from the parent layer. For /// example: /// /// ``` no-compile /// layer2: | Sibling A | Sibling B (layer3) | Sibling C | /// layer1: | Sibling A (layer2) | Sibling B | Sibling C (layer2) | /// ```` /// /// In this case Sibling B really spans across a "GAP" in layer2. While the syntax /// node can not be split up by tree sitter directly, we can treat Sibling B as two /// separate injections. That is done while parsing/running the query capture. As /// a result the injections form a tree. Note that such other queries must account for /// such multi injection nodes. #[derive(Debug)] pub struct Syntax { layers: Slab, root: Layer, } impl Syntax { pub fn new( source: RopeSlice, language: Language, timeout: Duration, loader: &impl LanguageLoader, ) -> Result { let root_layer = LayerData { parse_tree: None, language, flags: LayerUpdateFlags::default(), ranges: vec![tree_sitter::Range { start_byte: 0, end_byte: u32::MAX, start_point: tree_sitter::Point::ZERO, end_point: tree_sitter::Point::MAX, }], injections: Vec::new(), parent: None, locals: Locals::default(), }; let mut layers = Slab::with_capacity(32); let root = layers.insert(root_layer); let mut syntax = Self { root: Layer(root as u32), layers, }; syntax.update(source, timeout, &[], loader).map(|_| syntax) } pub fn layer(&self, layer: Layer) -> &LayerData { &self.layers[layer.idx()] } fn layer_mut(&mut self, layer: Layer) -> &mut LayerData { &mut self.layers[layer.idx()] } pub fn root(&self) -> Layer { self.root } pub fn tree(&self) -> &Tree { self.layer(self.root) .tree() .expect("`Syntax::new` would err if the root layer's tree could not be parsed") } #[inline] pub fn tree_for_byte_range(&self, start: u32, end: u32) -> &Tree { self.layer_and_tree_for_byte_range(start, end).1 } /// Finds the smallest layer which has a parse tree and covers the given range. pub(crate) fn layer_and_tree_for_byte_range(&self, start: u32, end: u32) -> (Layer, &Tree) { let mut layer = self.layer_for_byte_range(start, end); loop { // NOTE: this loop is guaranteed to terminate because the root layer always has a // tree. if let Some(tree) = self.layer(layer).tree() { return (layer, tree); } if let Some(parent) = self.layer(layer).parent { layer = parent; } } } #[inline] pub fn named_descendant_for_byte_range(&self, start: u32, end: u32) -> Option> { self.tree_for_byte_range(start, end) .root_node() .named_descendant_for_byte_range(start, end) } #[inline] pub fn descendant_for_byte_range(&self, start: u32, end: u32) -> Option> { self.tree_for_byte_range(start, end) .root_node() .descendant_for_byte_range(start, end) } /// Finds the smallest injection layer that fully includes the range `start..=end`. pub fn layer_for_byte_range(&self, start: u32, end: u32) -> Layer { self.layers_for_byte_range(start, end) .last() .expect("always includes the root layer") } /// Returns an iterator of layers which **fully include** the byte range `start..=end`, /// in decreasing order based on the size of each layer. /// /// The first layer is always the `root` layer. pub fn layers_for_byte_range(&self, start: u32, end: u32) -> impl Iterator + '_ { let mut parent_injection_layer = self.root; std::iter::once(self.root).chain(std::iter::from_fn(move || { let layer = &self.layers[parent_injection_layer.idx()]; let injection_at_start = layer.injection_at_byte_idx(start)?; // +1 because the end is exclusive. let injection_at_end = layer.injection_at_byte_idx(end + 1)?; (injection_at_start.layer == injection_at_end.layer).then(|| { parent_injection_layer = injection_at_start.layer; injection_at_start.layer }) })) } pub fn walk(&self) -> TreeCursor { TreeCursor::new(self) } } #[derive(Debug, Clone)] pub struct Injection { pub range: Range, pub layer: Layer, matched_node_range: Range, } #[derive(Debug)] pub struct LayerData { pub language: Language, parse_tree: Option, ranges: Vec, /// a list of **sorted** non-overlapping injection ranges. Note that /// injection ranges are not relative to the start of this layer but the /// start of the root layer injections: Vec, /// internal flags used during parsing to track incremental invalidation flags: LayerUpdateFlags, parent: Option, locals: Locals, } /// This PartialEq implementation only checks if that /// two layers are theoretically identical (meaning they highlight the same text range with the same language). /// It does not check whether the layers have the same internal tree-sitter /// state. impl PartialEq for LayerData { fn eq(&self, other: &Self) -> bool { self.parent == other.parent && self.language == other.language && self.ranges == other.ranges } } /// Hash implementation belongs to PartialEq implementation above. /// See its documentation for details. impl Hash for LayerData { fn hash(&self, state: &mut H) { self.parent.hash(state); self.language.hash(state); self.ranges.hash(state); } } impl LayerData { /// Returns the parsed `Tree` for this layer. /// /// This `Option` will always be `Some` when the `LanguageLoader` passed to `Syntax::new` /// returns `Some` when passed the layer's language in `LanguageLoader::get_config`. pub fn tree(&self) -> Option<&Tree> { self.parse_tree.as_ref() } /// Returns the injection range **within this layers** that contains `idx`. /// This function will not descend into nested injections pub fn injection_at_byte_idx(&self, idx: u32) -> Option<&Injection> { self.injections_at_byte_idx(idx) .next() .filter(|injection| injection.range.start <= idx) } /// Returns the injection ranges **within this layers** that contain /// `idx` or start after idx. This function will not descend into nested /// injections. pub fn injections_at_byte_idx(&self, idx: u32) -> impl Iterator { let i = self .injections .partition_point(|range| range.range.end < idx); self.injections[i..].iter() } } /// Represents the reason why syntax highlighting failed. #[derive(Debug, PartialEq, Eq)] pub enum Error { Timeout, ExceededMaximumSize, InvalidRanges, Unknown, NoRootConfig, IncompatibleGrammar(Language, IncompatibleGrammarError), } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::Timeout => f.write_str("configured timeout was exceeded"), Self::ExceededMaximumSize => f.write_str("input text exceeds the maximum allowed size"), Self::InvalidRanges => f.write_str("invalid ranges"), Self::Unknown => f.write_str("an unknown error occurred"), Self::NoRootConfig => f.write_str( "`LanguageLoader::get_config` for the root layer language returned `None`", ), Self::IncompatibleGrammar(language, IncompatibleGrammarError { abi_version }) => { write!( f, "failed to load grammar for language {language:?} with ABI version {abi_version}" ) } } } } /// The maximum number of in-progress matches a TS cursor can consider at once. /// This is set to a constant in order to avoid performance problems for medium to large files. Set with `set_match_limit`. /// Using such a limit means that we lose valid captures, so there is fundamentally a tradeoff here. /// /// /// Old tree sitter versions used a limit of 32 by default until this limit was removed in version `0.19.5` (must now be set manually). /// However, this causes performance issues for medium to large files. /// In Helix, this problem caused tree-sitter motions to take multiple seconds to complete in medium-sized rust files (3k loc). /// /// /// Neovim also encountered this problem and reintroduced this limit after it was removed upstream /// (see and ). /// The number used here is fundamentally a tradeoff between breaking some obscure edge cases and performance. /// /// /// Neovim chose 64 for this value somewhat arbitrarily (). /// 64 is too low for some languages though. In particular, it breaks some highlighting for record fields in Erlang record definitions. /// This number can be increased if new syntax highlight breakages are found, as long as the performance penalty is not too high. pub const TREE_SITTER_MATCH_LIMIT: u32 = 256; // use 32 bit ranges since TS doesn't support files larger than 2GiB anyway // and it allows us to save a lot memory/improve cache efficiency type Range = std::ops::Range; hx-0.3.0+20250717/highlighter/src/locals.rs000066400000000000000000000207711503625671400177640ustar00rootroot00000000000000use std::{ borrow::Cow, ops::{Index, IndexMut}, }; use hashbrown::HashMap; use kstring::KString; use ropey::RopeSlice; use tree_sitter::{Capture, InactiveQueryCursor}; use crate::{LanguageConfig, LanguageLoader, Layer, Range, Syntax, TREE_SITTER_MATCH_LIMIT}; #[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)] pub struct Scope(u32); impl Scope { const ROOT: Scope = Scope(0); fn idx(self) -> usize { self.0 as usize } } #[derive(Debug)] pub struct Locals { scopes: Vec, } impl Default for Locals { fn default() -> Self { let mut scopes = Vec::with_capacity(4); scopes.push(ScopeData { definitions: HashMap::new(), range: 0..u32::MAX, inherit: false, children: Vec::new(), parent: None, }); Self { scopes } } } impl Locals { fn push(&mut self, scope: ScopeData) -> Scope { let new_scope_id = Scope(self.scopes.len() as u32); let parent = scope .parent .expect("push cannot be used for the root layer"); self[parent].children.push(new_scope_id); self.scopes.push(scope); new_scope_id } pub fn lookup_reference(&self, mut scope: Scope, name: &str) -> Option<&Definition> { loop { let scope_data = &self[scope]; if let Some(def) = scope_data.definitions.get(name) { return Some(def); } if !scope_data.inherit { break; } scope = scope_data.parent?; } None } pub fn scope_cursor(&self, pos: u32) -> ScopeCursor<'_> { let mut scope = Scope::ROOT; let mut scope_stack = Vec::with_capacity(8); loop { let scope_data = &self[scope]; let child_idx = scope_data .children .partition_point(|&child| self[child].range.end < pos); scope_stack.push((scope, child_idx as u32)); let Some(&child) = scope_data.children.get(child_idx) else { break; }; if pos < self[child].range.start { break; } scope = child; } ScopeCursor { locals: self, scope_stack, } } } impl Index for Locals { type Output = ScopeData; fn index(&self, scope: Scope) -> &Self::Output { &self.scopes[scope.idx()] } } impl IndexMut for Locals { fn index_mut(&mut self, scope: Scope) -> &mut Self::Output { &mut self.scopes[scope.idx()] } } #[derive(Debug)] pub struct ScopeCursor<'a> { pub locals: &'a Locals, scope_stack: Vec<(Scope, u32)>, } impl ScopeCursor<'_> { pub fn advance(&mut self, to: u32) -> Scope { let (mut active_scope, mut child_idx) = self.scope_stack.pop().unwrap(); loop { let scope_data = &self.locals[active_scope]; if to < scope_data.range.end { break; } (active_scope, child_idx) = self.scope_stack.pop().unwrap(); child_idx += 1; } 'outer: loop { let scope_data = &self.locals[active_scope]; loop { let Some(&child) = scope_data.children.get(child_idx as usize) else { break 'outer; }; if self.locals[child].range.start > to { break 'outer; } if to < self.locals[child].range.end { self.scope_stack.push((active_scope, child_idx)); active_scope = child; child_idx = 0; break; } child_idx += 1; } } self.scope_stack.push((active_scope, child_idx)); active_scope } pub fn current_scope(&self) -> Scope { // The root scope is always active so `scope_stack` is never empty. self.scope_stack.last().unwrap().0 } } #[derive(Debug)] pub struct Definition { pub capture: Capture, pub range: Range, } #[derive(Debug)] pub struct ScopeData { definitions: HashMap, range: Range, inherit: bool, /// A list of sorted, non-overlapping child scopes. /// /// See the docs of the `Locals` type: locals information is laid out like a tree - similar /// to injections - per injection layer. children: Vec, parent: Option, } impl Syntax { pub(crate) fn run_local_query( &mut self, layer: Layer, source: RopeSlice<'_>, loader: &impl LanguageLoader, ) { let layer_data = &mut self.layer_mut(layer); let Some(LanguageConfig { ref injection_query, .. }) = loader.get_config(layer_data.language) else { return; }; let definition_captures = injection_query.local_definition_captures.load(); if definition_captures.is_empty() { return; } let root = layer_data.parse_tree.as_ref().unwrap().root_node(); let mut cursor = InactiveQueryCursor::new(0..u32::MAX, TREE_SITTER_MATCH_LIMIT) .execute_query(&injection_query.local_query, &root, source); let mut locals = Locals::default(); let mut scope = Scope::ROOT; while let Some((query_match, node_idx)) = cursor.next_matched_node() { let matched_node = query_match.matched_node(node_idx); let range = matched_node.node.byte_range(); let capture = matched_node.capture; while range.start >= locals[scope].range.end { scope = locals[scope].parent.expect("root node covers entire range"); } if Some(capture) == injection_query.local_scope_capture { scope = locals.push(ScopeData { definitions: HashMap::new(), range: matched_node.node.byte_range(), inherit: !injection_query .not_scope_inherits .contains(&query_match.pattern()), children: Vec::new(), parent: Some(scope), }); } else if definition_captures.contains_key(&capture) { let text = match source .byte_slice(range.start as usize..range.end as usize) .into() { Cow::Borrowed(inner) => KString::from_ref(inner), Cow::Owned(inner) => KString::from_string(inner), }; locals[scope] .definitions .insert(text, Definition { capture, range }); } // NOTE: `local.reference` captures are handled by the highlighter and are not // considered during parsing. } layer_data.locals = locals; } } #[cfg(test)] mod test { use super::*; #[test] fn cursor() { let mut locals = Locals::default(); let scope1 = locals.push(ScopeData { definitions: Default::default(), range: 5..105, inherit: true, // NOTE: the subsequent call to `push` below will add scope2 to scope1's children. children: Default::default(), parent: Some(Scope::ROOT), }); let scope2 = locals.push(ScopeData { definitions: Default::default(), range: 10..100, inherit: true, children: Default::default(), parent: Some(scope1), }); let mut cursor = locals.scope_cursor(0); assert_eq!(cursor.current_scope(), Scope::ROOT); assert_eq!(cursor.advance(3), Scope::ROOT); assert_eq!(cursor.advance(5), scope1); assert_eq!(cursor.advance(8), scope1); assert_eq!(cursor.advance(10), scope2); assert_eq!(cursor.advance(50), scope2); assert_eq!(cursor.advance(100), scope1); assert_eq!(cursor.advance(105), Scope::ROOT); assert_eq!(cursor.advance(110), Scope::ROOT); let mut cursor = locals.scope_cursor(8); assert_eq!(cursor.current_scope(), scope1); assert_eq!(cursor.advance(10), scope2); assert_eq!(cursor.advance(100), scope1); assert_eq!(cursor.advance(110), Scope::ROOT); let mut cursor = locals.scope_cursor(10); assert_eq!(cursor.current_scope(), scope2); assert_eq!(cursor.advance(100), scope1); assert_eq!(cursor.advance(110), Scope::ROOT); } } hx-0.3.0+20250717/highlighter/src/parse.rs000066400000000000000000000115271503625671400176200ustar00rootroot00000000000000use std::mem::take; use std::time::Duration; use ropey::RopeSlice; use tree_sitter::Parser; use crate::config::LanguageLoader; use crate::{Error, LayerData, Syntax}; impl Syntax { pub fn update( &mut self, source: RopeSlice, timeout: Duration, edits: &[tree_sitter::InputEdit], loader: &impl LanguageLoader, ) -> Result<(), Error> { // size limit of 512MiB, TS just cannot handle files this big (too // slow). Furthermore, TS uses 32 (signed) bit indices so this limit // must never be raised above 2GiB if source.len_bytes() >= 512 * 1024 * 1024 { return Err(Error::ExceededMaximumSize); } let mut queue = Vec::with_capacity(32); let root_flags = &mut self.layer_mut(self.root).flags; // The root layer is always considered. root_flags.touched = true; // If there was an edit then the root layer must've been modified. root_flags.modified = true; queue.push(self.root); let mut parser = Parser::new(); parser.set_timeout(timeout); while let Some(layer) = queue.pop() { let layer_data = self.layer_mut(layer); if layer_data.ranges.is_empty() { // Skip re-parsing and querying layers without any ranges. continue; } if let Some(tree) = &mut layer_data.parse_tree { if layer_data.flags.moved || layer_data.flags.modified { for edit in edits.iter().rev() { // Apply the edits in reverse. // If we applied them in order then edit 1 would disrupt the positioning // of edit 2. tree.edit(edit); } } if layer_data.flags.modified { // Re-parse the tree. layer_data.parse(&mut parser, source, loader)?; } } else { // always parse if this layer has never been parsed before layer_data.parse(&mut parser, source, loader)?; } self.run_injection_query(layer, edits, source, loader, |layer| queue.push(layer)); self.run_local_query(layer, source, loader); } if self.layer(self.root).parse_tree.is_none() { return Err(Error::NoRootConfig); } self.prune_dead_layers(); Ok(()) } /// Reset all `LayerUpdateFlags` and remove all untouched layers fn prune_dead_layers(&mut self) { self.layers .retain(|_, layer| take(&mut layer.flags).touched); } } impl LayerData { fn parse( &mut self, parser: &mut Parser, source: RopeSlice, loader: &impl LanguageLoader, ) -> Result<(), Error> { let Some(config) = loader.get_config(self.language) else { return Ok(()); }; if let Err(err) = parser.set_grammar(config.grammar) { return Err(Error::IncompatibleGrammar(self.language, err)); } parser .set_included_ranges(&self.ranges) .map_err(|_| Error::InvalidRanges)?; // HACK: // This is a workaround for a bug within the lexer (in the C library) or maybe within // tree-sitter-markdown which needs more debugging. When adding a new range to a combined // injection and passing the old tree, if the old tree doesn't already cover a wider range // than the newly added range, some assumptions are violated in the lexer and it tries to // access some invalid memory, resulting in a segfault. This workaround avoids that // situation by avoiding passing the old tree when the old tree's range doesn't cover the // total range of `self.ranges`. // // See . let tree = self.parse_tree.as_ref().filter(|tree| { let included_ranges_range = self.ranges.first().map(|r| r.start_byte).unwrap_or(0) ..self.ranges.last().map(|r| r.end_byte).unwrap_or(u32::MAX); // Allow re-parsing the root layer even though the range is larger. The root always // covers `0..u32::MAX`: if included_ranges_range == (0..u32::MAX) { return true; } let tree_range = tree.root_node().byte_range(); tree_range.start <= included_ranges_range.start && tree_range.end >= included_ranges_range.end }); let tree = parser.parse(source, tree).ok_or(Error::Timeout)?; self.parse_tree = Some(tree); Ok(()) } } #[derive(Debug, PartialEq, Eq, Default, Clone)] pub(crate) struct LayerUpdateFlags { pub reused: bool, pub modified: bool, pub moved: bool, pub touched: bool, } hx-0.3.0+20250717/highlighter/src/pretty_print.rs000066400000000000000000000031611503625671400212440ustar00rootroot00000000000000use std::fmt; use tree_sitter::{SyntaxTreeNode, TreeCursor}; pub fn pretty_print_tree(fmt: &mut W, node: SyntaxTreeNode) -> fmt::Result { if node.child_count() == 0 { if node_is_visible(&node) { write!(fmt, "({})", node.kind()) } else { write!(fmt, "\"{}\"", node.kind()) } } else { pretty_print_tree_impl(fmt, &mut node.walk(), 0) } } fn pretty_print_tree_impl( fmt: &mut W, cursor: &mut TreeCursor, depth: usize, ) -> fmt::Result { let node = cursor.node(); let visible = node_is_visible(&node); if visible { let indentation_columns = depth * 2; write!(fmt, "{:indentation_columns$}", "")?; if let Some(field_name) = cursor.field_name() { write!(fmt, "{}: ", field_name)?; } write!(fmt, "({}", node.kind())?; } // Handle children. if cursor.goto_first_child() { loop { if node_is_visible(&cursor.node()) { fmt.write_char('\n')?; } pretty_print_tree_impl(fmt, cursor, depth + 1)?; if !cursor.goto_next_sibling() { break; } } let moved = cursor.goto_parent(); // The parent of the first child must exist, and must be `node`. debug_assert!(moved); debug_assert!(cursor.node() == node); } if visible { fmt.write_char(')')?; } Ok(()) } fn node_is_visible(node: &SyntaxTreeNode) -> bool { node.is_missing() || (node.is_named() && node.language().node_kind_is_visible(node.kind_id())) } hx-0.3.0+20250717/highlighter/src/query_iter.rs000066400000000000000000000306221503625671400206730ustar00rootroot00000000000000use core::slice; use std::iter::Peekable; use std::mem::replace; use std::ops::RangeBounds; use hashbrown::{HashMap, HashSet}; use ropey::RopeSlice; use crate::{ locals::{Scope, ScopeCursor}, Injection, Language, Layer, Range, Syntax, TREE_SITTER_MATCH_LIMIT, }; use tree_sitter::{ Capture, InactiveQueryCursor, Node, Pattern, Query, QueryCursor, QueryMatch, RopeInput, }; #[derive(Debug, Clone)] pub struct MatchedNode<'tree> { pub match_id: u32, pub pattern: Pattern, pub node: Node<'tree>, pub capture: Capture, pub scope: Scope, } struct LayerQueryIter<'a, 'tree> { cursor: Option>>, peeked: Option>, language: Language, scope_cursor: ScopeCursor<'tree>, } impl<'a, 'tree> LayerQueryIter<'a, 'tree> { fn peek>( &mut self, source: RopeSlice<'_>, loader: &Loader, ) -> Option<&MatchedNode<'tree>> { if self.peeked.is_none() { loop { // NOTE: we take the cursor here so that if `next_matched_node` is None the // cursor is dropped and returned to the cache eagerly. let mut cursor = self.cursor.take()?; let (query_match, node_idx) = cursor.next_matched_node()?; let node = query_match.matched_node(node_idx); let match_id = query_match.id(); let pattern = query_match.pattern(); let range = node.node.byte_range(); let scope = self.scope_cursor.advance(range.start); if !loader.are_predicates_satisfied( self.language, &query_match, source, &self.scope_cursor, ) { query_match.remove(); self.cursor = Some(cursor); continue; } self.peeked = Some(MatchedNode { match_id, pattern, // NOTE: `Node` is cheap to clone, it's essentially Copy. node: node.node.clone(), capture: node.capture, scope, }); self.cursor = Some(cursor); break; } } self.peeked.as_ref() } fn consume(&mut self) -> MatchedNode<'tree> { self.peeked.take().unwrap() } } struct ActiveLayer<'a, 'tree, S> { state: S, query_iter: LayerQueryIter<'a, 'tree>, injections: Peekable>, } // data only needed when entering and exiting injections // separate struck to keep the QueryIter reasonably small struct QueryIterLayerManager<'a, 'tree, Loader, S> { range: Range, loader: Loader, src: RopeSlice<'a>, syntax: &'tree Syntax, active_layers: HashMap>>, active_injections: Vec, /// Layers which are known to have no more captures. finished_layers: HashSet, } impl<'a, 'tree: 'a, Loader, S> QueryIterLayerManager<'a, 'tree, Loader, S> where Loader: QueryLoader<'a>, S: Default, { fn init_layer(&mut self, injection: Injection) -> Box> { self.active_layers .remove(&injection.layer) .unwrap_or_else(|| { let layer = self.syntax.layer(injection.layer); let start_point = injection.range.start.max(self.range.start); let injection_start = layer .injections .partition_point(|child| child.range.end < start_point); let cursor = if self.finished_layers.contains(&injection.layer) { // If the layer has no more captures, skip creating a cursor. None } else { self.loader .get_query(layer.language) .and_then(|query| Some((query, layer.tree()?.root_node()))) .map(|(query, node)| { InactiveQueryCursor::new(self.range.clone(), TREE_SITTER_MATCH_LIMIT) .execute_query(query, &node, RopeInput::new(self.src)) }) }; Box::new(ActiveLayer { state: S::default(), query_iter: LayerQueryIter { language: layer.language, cursor, peeked: None, scope_cursor: layer.locals.scope_cursor(self.range.start), }, injections: layer.injections[injection_start..].iter().peekable(), }) }) } } pub struct QueryIter<'a, 'tree, Loader: QueryLoader<'a>, LayerState = ()> { layer_manager: Box>, current_layer: Box>, current_injection: Injection, } impl<'a, 'tree: 'a, Loader, LayerState> QueryIter<'a, 'tree, Loader, LayerState> where Loader: QueryLoader<'a>, LayerState: Default, { pub fn new( syntax: &'tree Syntax, src: RopeSlice<'a>, loader: Loader, range: impl RangeBounds, ) -> Self { let start = match range.start_bound() { std::ops::Bound::Included(&i) => i, std::ops::Bound::Excluded(&i) => i + 1, std::ops::Bound::Unbounded => 0, }; let end = match range.end_bound() { std::ops::Bound::Included(&i) => i + 1, std::ops::Bound::Excluded(&i) => i, std::ops::Bound::Unbounded => src.len_bytes() as u32, }; let range = start..end; let node = syntax.tree().root_node(); // create fake injection for query root let injection = Injection { range: node.byte_range(), layer: syntax.root, matched_node_range: node.byte_range(), }; let mut layer_manager = Box::new(QueryIterLayerManager { range, loader, src, syntax, // TODO: reuse allocations with an allocation pool active_layers: HashMap::with_capacity(8), active_injections: Vec::with_capacity(8), finished_layers: HashSet::with_capacity(8), }); Self { current_layer: layer_manager.init_layer(injection.clone()), current_injection: injection, layer_manager, } } #[inline] pub fn source(&self) -> RopeSlice<'a> { self.layer_manager.src } #[inline] pub fn syntax(&self) -> &'tree Syntax { self.layer_manager.syntax } #[inline] pub fn loader(&mut self) -> &mut Loader { &mut self.layer_manager.loader } #[inline] pub fn current_layer(&self) -> Layer { self.current_injection.layer } #[inline] pub fn current_injection(&mut self) -> (Injection, &mut LayerState) { ( self.current_injection.clone(), &mut self.current_layer.state, ) } #[inline] pub fn current_language(&self) -> Language { self.layer_manager .syntax .layer(self.current_injection.layer) .language } pub fn layer_state(&mut self, layer: Layer) -> &mut LayerState { if layer == self.current_injection.layer { &mut self.current_layer.state } else { &mut self .layer_manager .active_layers .get_mut(&layer) .unwrap() .state } } fn enter_injection(&mut self, injection: Injection) { let active_layer = self.layer_manager.init_layer(injection.clone()); let old_injection = replace(&mut self.current_injection, injection); let old_layer = replace(&mut self.current_layer, active_layer); self.layer_manager .active_layers .insert(old_injection.layer, old_layer); self.layer_manager.active_injections.push(old_injection); } fn exit_injection(&mut self) -> Option<(Injection, Option)> { let injection = replace( &mut self.current_injection, self.layer_manager.active_injections.pop()?, ); let mut layer = replace( &mut self.current_layer, self.layer_manager .active_layers .remove(&self.current_injection.layer)?, ); let layer_unfinished = layer.query_iter.peeked.is_some() || layer.injections.peek().is_some(); if layer_unfinished { self.layer_manager .active_layers .insert(injection.layer, layer); Some((injection, None)) } else { self.layer_manager.finished_layers.insert(injection.layer); Some((injection, Some(layer.state))) } } } impl<'a, 'tree: 'a, Loader, S> Iterator for QueryIter<'a, 'tree, Loader, S> where Loader: QueryLoader<'a>, S: Default, { type Item = QueryIterEvent<'tree, S>; fn next(&mut self) -> Option { loop { let next_injection = self .current_layer .injections .peek() .filter(|injection| injection.range.start <= self.current_injection.range.end); let next_match = self .current_layer .query_iter .peek(self.layer_manager.src, &self.layer_manager.loader) .filter(|matched_node| { matched_node.node.start_byte() <= self.current_injection.range.end }); match (next_match, next_injection) { (None, None) => { return self.exit_injection().map(|(injection, state)| { QueryIterEvent::ExitInjection { injection, state } }); } (Some(mat), _) if mat.node.byte_range().is_empty() => { self.current_layer.query_iter.consume(); continue; } (Some(_), None) => { // consume match let matched_node = self.current_layer.query_iter.consume(); return Some(QueryIterEvent::Match(matched_node)); } (Some(matched_node), Some(injection)) if matched_node.node.start_byte() < injection.range.end => { // consume match let matched_node = self.current_layer.query_iter.consume(); // ignore nodes that are overlapped by the injection if matched_node.node.start_byte() <= injection.range.start || injection.range.end < matched_node.node.end_byte() { return Some(QueryIterEvent::Match(matched_node)); } } (Some(_), Some(_)) | (None, Some(_)) => { // consume injection let injection = self.current_layer.injections.next().unwrap(); self.enter_injection(injection.clone()); return Some(QueryIterEvent::EnterInjection(injection.clone())); } } } } } #[derive(Debug)] pub enum QueryIterEvent<'tree, State = ()> { EnterInjection(Injection), Match(MatchedNode<'tree>), ExitInjection { injection: Injection, state: Option, }, } impl QueryIterEvent<'_, S> { pub fn start_byte(&self) -> u32 { match self { QueryIterEvent::EnterInjection(injection) => injection.range.start, QueryIterEvent::Match(mat) => mat.node.start_byte(), QueryIterEvent::ExitInjection { injection, .. } => injection.range.end, } } } pub trait QueryLoader<'a> { fn get_query(&mut self, lang: Language) -> Option<&'a Query>; fn are_predicates_satisfied( &self, _lang: Language, _match: &QueryMatch<'_, '_>, _source: RopeSlice<'_>, _locals_cursor: &ScopeCursor<'_>, ) -> bool { true } } impl<'a, F> QueryLoader<'a> for F where F: FnMut(Language) -> Option<&'a Query>, { fn get_query(&mut self, lang: Language) -> Option<&'a Query> { (self)(lang) } } hx-0.3.0+20250717/highlighter/src/tests.rs000066400000000000000000000452251503625671400176520ustar00rootroot00000000000000use std::borrow::Cow; use std::cell::RefCell; use std::fs; use std::path::{Path, PathBuf}; use indexmap::{IndexMap, IndexSet}; use once_cell::sync::Lazy; use once_cell::unsync::OnceCell; use skidder::Repo; use tree_sitter::{Grammar, InputEdit, Point}; use crate::config::{LanguageConfig, LanguageLoader}; use crate::fixtures::{check_highlighter_fixture, check_injection_fixture}; use crate::highlighter::Highlight; use crate::injections_query::InjectionLanguageMarker; use crate::{Language, Layer, Syntax}; const PARSE_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(1); static GRAMMARS: Lazy> = Lazy::new(|| { let skidder_config = skidder_config(); skidder::fetch(&skidder_config, false).unwrap(); skidder::build_all_grammars(&skidder_config, false, None).unwrap(); let grammars = skidder::list_grammars(&skidder_config).unwrap(); assert!(!grammars.is_empty()); grammars }); fn skidder_config() -> skidder::Config { skidder::Config { repos: vec![Repo::Local { // `./test-grammars` in the root of the repo. path: Path::new("../test-grammars").canonicalize().unwrap(), }], index: PathBuf::new(), verbose: true, } } #[derive(Debug, Clone, Default)] struct Overwrites { highlights: Option, locals: Option, injections: Option, } fn get_grammar(lang_name: &str, overwrites: &Overwrites) -> LanguageConfig { let skidder_config = skidder_config(); let grammar_dir = skidder_config.grammar_dir(lang_name).unwrap(); let parser_path = skidder::build_grammar(&skidder_config, lang_name, false).unwrap(); let grammar = unsafe { Grammar::new(lang_name, &parser_path).unwrap() }; let highlights_query_path = grammar_dir.join("highlights.scm"); let injections_query_path = grammar_dir.join("injections.scm"); if !injections_query_path.exists() { println!("\x1b[36mskipping loading of injections for {lang_name:?} since {injections_query_path:?} does not exist\x1b[0m"); } let locals_query_path = grammar_dir.join("locals.scm"); if !locals_query_path.exists() { println!("\x1b[36mskipping loading of locals for {lang_name:?} since {locals_query_path:?} does not exist\x1b[0m"); } LanguageConfig::new( grammar, &overwrites.highlights.clone().unwrap_or_else(|| { fs::read_to_string(&highlights_query_path) .map_err(|err| { format!( "failed to read highlights in {}: {err}", highlights_query_path.display() ) }) .unwrap() }), &overwrites .injections .clone() .unwrap_or_else(|| fs::read_to_string(&injections_query_path).unwrap_or_default()), &overwrites .locals .clone() .unwrap_or_else(|| fs::read_to_string(&locals_query_path).unwrap_or_default()), ) .unwrap() } #[derive(Debug)] struct TestLanguageLoader { languages: IndexMap, lang_config: Box<[OnceCell]>, overwrites: Box<[Overwrites]>, test_theme: RefCell>, } impl TestLanguageLoader { fn new() -> Self { let grammars = &GRAMMARS; Self { lang_config: (0..grammars.len()).map(|_| OnceCell::new()).collect(), overwrites: vec![Overwrites::default(); grammars.len()].into_boxed_slice(), test_theme: RefCell::default(), languages: grammars .iter() .enumerate() .map(|(i, grammar)| { ( grammar.file_name().unwrap().to_str().unwrap().to_owned(), Language::new(i as u32), ) }) .collect(), } } fn get(&self, name: &str) -> Language { self.languages[name] } // TODO: remove on first use. #[allow(dead_code)] fn overwrite_injections(&mut self, lang: &str, content: String) { let lang = self.get(lang); self.overwrites[lang.idx()].injections = Some(content); self.lang_config[lang.idx()] = OnceCell::new(); } fn overwrite_highlights(&mut self, lang: &str, content: String) { let lang = self.get(lang); self.overwrites[lang.idx()].highlights = Some(content); self.lang_config[lang.idx()] = OnceCell::new(); } fn shadow_injections(&mut self, lang: &str, content: &str) { let lang = self.get(lang); let skidder_config = skidder_config(); let grammar = self.languages.get_index(lang.idx()).unwrap().0; let grammar_dir = skidder_config.grammar_dir(grammar).unwrap(); let mut injections = fs::read_to_string(grammar_dir.join("injections.scm")).unwrap_or_default(); injections.push('\n'); injections.push_str(content); self.overwrites[lang.idx()].injections = Some(injections); self.lang_config[lang.idx()] = OnceCell::new(); } fn shadow_highlights(&mut self, lang: &str, content: &str) { let lang = self.get(lang); let skidder_config = skidder_config(); let grammar = self.languages.get_index(lang.idx()).unwrap().0; let grammar_dir = skidder_config.grammar_dir(grammar).unwrap(); let mut highlights = fs::read_to_string(grammar_dir.join("highlights.scm")).unwrap(); highlights.push('\n'); highlights.push_str(content); self.overwrites[lang.idx()].highlights = Some(highlights); self.lang_config[lang.idx()] = OnceCell::new(); } } impl LanguageLoader for TestLanguageLoader { fn language_for_marker(&self, marker: InjectionLanguageMarker) -> Option { match marker { InjectionLanguageMarker::Name(name) => self.languages.get(name).copied(), InjectionLanguageMarker::Match(text) => { let name: Cow = text.into(); self.languages.get(name.as_ref()).copied() } _ => unimplemented!(), } } fn get_config(&self, lang: Language) -> Option<&LanguageConfig> { let config = self.lang_config[lang.idx()].get_or_init(|| { let config = get_grammar( self.languages.get_index(lang.idx()).unwrap().0, &self.overwrites[lang.idx()], ); let mut theme = self.test_theme.borrow_mut(); config.configure(|scope| { Some(Highlight::new(theme.insert_full(scope.to_owned()).0 as u32)) }); config }); Some(config) } } fn lang_for_path(path: &Path, loader: &TestLanguageLoader) -> Language { match path .extension() .and_then(|it| it.to_str()) .unwrap_or_default() { "rs" => loader.get("rust"), "html" => loader.get("html"), "css" => loader.get("css"), "erl" => loader.get("erlang"), "md" => loader.get("markdown"), extension => panic!("unknown file type .{extension}"), } } fn highlight_fixture(loader: &TestLanguageLoader, fixture: impl AsRef) { let path = Path::new("../fixtures").join(fixture); let lang = lang_for_path(&path, loader); check_highlighter_fixture( path, "// ", lang, loader, |highlight| loader.test_theme.borrow()[highlight.idx()].clone(), |_| .., ) } fn injection_fixture(loader: &TestLanguageLoader, fixture: impl AsRef) { let path = Path::new("../fixtures").join(fixture); let lang = lang_for_path(&path, loader); check_injection_fixture( path, "// ", lang, loader, |lang| loader.languages.get_index(lang.idx()).unwrap().0.clone(), |_| .., ) } #[test] fn highlight() { let loader = TestLanguageLoader::new(); highlight_fixture(&loader, "highlighter/hello_world.rs"); } #[test] fn layers() { let loader = TestLanguageLoader::new(); let input = "/// Says hello. /// /// this is *markdown-inline* markdown /// /// # Example /// /// ```rust /// fn add(left: usize, right: usize) -> usize { /// left + right /// } /// ``` pub fn hello() {}"; let syntax = Syntax::new(input.into(), loader.get("rust"), PARSE_TIMEOUT, &loader).unwrap(); let assert_injection = |snippet: &str, expected: &[&str]| { assert!(!expected.is_empty(), "all layers have at least 1 injection"); let layer_lang_name = |layer: Layer| { loader .languages .get_index(syntax.layer(layer).language.idx()) .unwrap() .0 .clone() }; let snippet_start = input.find(snippet).unwrap() as u32; let snippet_end = snippet_start + snippet.len() as u32; let layers = syntax .layers_for_byte_range(snippet_start, snippet_end) .map(layer_lang_name) .collect::>(); assert_eq!(&layers, expected, r#"snippet: "{snippet}""#); let layer = syntax.layer_for_byte_range(snippet_start, snippet_end); assert_eq!( &layer_lang_name(layer), expected.last().unwrap(), "last layer is the smallest fully encompassing layer" ); }; // Rust function in a code block in the rust documentation assert_injection("fn add(left: usize, ri", &["rust", "markdown", "rust"]); // Markdown heading `# Example` assert_injection("# Example", &["rust", "markdown"]); // Outer-most Rust function `hello` assert_injection("pub fn hello() {}", &["rust"]); // Paragraph in the rust documentation assert_injection("markdown-inline", &["rust", "markdown", "markdown-inline"]); } #[test] fn highlight_overlaps_with_injection() { let loader = TestLanguageLoader::new(); // The comment node is highlighted both by the comment capture and as an injection for the // comment grammar. highlight_fixture(&loader, "highlighter/comment.html"); } #[test] fn rust_parameter_locals() { let loader = TestLanguageLoader::new(); highlight_fixture(&loader, "highlighter/rust_parameter_locals.rs"); } #[test] fn codefence_rust_doc_comments() { let loader = TestLanguageLoader::new(); highlight_fixture(&loader, "highlighter/codefence_rust_doc_comments.md"); } #[test] fn parameters_within_injections_within_injections() { let loader = TestLanguageLoader::new(); // The root language is Rust. Then markdown is injected in a doc comment. Then within that // we have a code fence which is Rust again. Within that block we check that locals are // highlighted as expected. highlight_fixture(&loader, "highlighter/injectionception.rs"); injection_fixture(&loader, "injections/injectionception.rs"); } #[test] fn html_in_edoc_in_erlang() { let loader = TestLanguageLoader::new(); // This fixture exhibited a bug (which has been fixed) where a combined injection became // dormant at the same time as a new highlight started, causing a total reset of all // highlights (incorrectly). highlight_fixture(&loader, "highlighter/html_in_edoc_in_erlang.erl"); injection_fixture(&loader, "injections/html_in_edoc_in_erlang.erl"); } #[test] fn non_local_pattern() { let mut loader = TestLanguageLoader::new(); // Pretend that `this` is a builtin like `self`, but only when it is not a parameter. loader.shadow_highlights( "rust", r#" ((identifier) @variable.builtin (#eq? @variable.builtin "this") (#is-not? local)) "#, ); highlight_fixture(&loader, "highlighter/non_local.rs"); } #[test] fn reference_highlight_starts_after_definition_ends() { let loader = TestLanguageLoader::new(); // In this example the function name matches one of the parameters. The function name can be // a reference but since the definition occurs after the function name it, the function name // should not be highlighted as a parameter. highlight_fixture( &loader, "highlighter/reference_highlight_starts_after_definition_ends.rs", ); } #[test] fn combined_injection() { let mut loader = TestLanguageLoader::new(); loader.shadow_injections( "rust", r#" ((doc_comment) @injection.content (#set! injection.language "markdown") (#set! injection.combined))"#, ); highlight_fixture(&loader, "highlighter/rust_doc_comment.rs"); } #[test] fn injection_in_child() { let mut loader = TestLanguageLoader::new(); // here doc_comment is a child of line_comment which has higher precedence // however since it doesn't include children the doc_comment injection is // still active here. This could probably use a more real world use case (maybe nix?) loader.shadow_injections( "rust", r#" ([(line_comment) (block_comment)] @injection.content (#set! injection.language "comment")) ([(line_comment (doc_comment) @injection.content) (block_comment (doc_comment) @injection.content)] (#set! injection.language "markdown") (#set! injection.combined)) "#, ); highlight_fixture(&loader, "highlighter/rust_doc_comment.rs"); injection_fixture(&loader, "injections/rust_doc_comment.rs"); } #[test] fn injection_precedence() { let mut loader = TestLanguageLoader::new(); loader.shadow_injections( "rust", r#" ([(line_comment) (block_comment)] @injection.content (#set! injection.language "comment") (#set! injection.include-children)) ([(line_comment (doc_comment) @injection.content) (block_comment (doc_comment) @injection.content)] (#set! injection.language "markdown") (#set! injection.combined)) "#, ); highlight_fixture(&loader, "highlighter/rust_doc_comment.rs"); loader.shadow_injections( "rust", r#" ([(line_comment (doc_comment) @injection.content) (block_comment (doc_comment) @injection.content)] (#set! injection.language "markdown") (#set! injection.combined)) ([(line_comment) (block_comment)] @injection.content (#set! injection.language "comment") (#set! injection.include-children)) "#, ); highlight_fixture(&loader, "highlighter/rust_no_doc_comment.rs"); injection_fixture(&loader, "injections/rust_no_doc_comment.rs"); loader.shadow_injections( "rust", r#" ((macro_invocation macro: [ (scoped_identifier name: (_) @_macro_name) (identifier) @_macro_name ] (token_tree . (_) . (_) . (string_literal) @injection.content)) (#any-of? @_macro_name ; std "some_macro") (#set! injection.language "rust") (#set! injection.include-children)) "#, ); injection_fixture(&loader, "injections/overlapping_injection.rs"); } #[test] fn rust_nested_vec() { let loader = TestLanguageLoader::new(); highlight_fixture(&loader, "highlighter/nested_vec.rs"); injection_fixture(&loader, "injections/nested_vec.rs"); } #[test] fn edit_remove_and_add_injection_layer() { let loader = TestLanguageLoader::new(); // Add another backtick, causing the double old backtick to become a codefence and the second // HTML comment to become the codefence's body. // When we reuse the injection for the HTML comments, we need to be sure to re-parse the HTML // layer so that it recognizes that the second comment is no longer valid. let before_text = "\n``\n"; let after_text = "\n```\n"; let edit = InputEdit { start_byte: 10, old_end_byte: 10, new_end_byte: 11, start_point: Point::ZERO, old_end_point: Point::ZERO, new_end_point: Point::ZERO, }; let mut syntax = Syntax::new( before_text.into(), loader.get("markdown"), PARSE_TIMEOUT, &loader, ) .unwrap(); // The test here is that `Syntax::update` can apply the edit `Ok(_)` without panicking. syntax .update(after_text.into(), PARSE_TIMEOUT, &[edit], &loader) .unwrap(); // Now test the inverse. Start with the after text and edit it to be the before text. In this // case an injection is added for the HTML comment. let edit = InputEdit { start_byte: 10, old_end_byte: 11, new_end_byte: 10, start_point: Point::ZERO, old_end_point: Point::ZERO, new_end_point: Point::ZERO, }; let mut syntax = Syntax::new( after_text.into(), loader.get("markdown"), PARSE_TIMEOUT, &loader, ) .unwrap(); // The test here is that `Syntax::update` can apply the edit `Ok(_)` without panicking. syntax .update(before_text.into(), PARSE_TIMEOUT, &[edit], &loader) .unwrap(); } #[test] fn markdown_bold_highlight() { let loader = TestLanguageLoader::new(); // This is a very simple case to check that adjacent equivalent highlights are merged // properly: the `punctuation.bracket` highlight on the consecutive `*`s should be combined // into one span. highlight_fixture(&loader, "highlighter/markdown_bold.md"); } #[test] fn css_parent_child_highlight_precedence() { let mut loader = TestLanguageLoader::new(); // NOTE: the pattern being tested here `((color_value) "#") @string.special` is odd and should // be rewritten to `(color_value "#" @string.special)` - that was probably the original intent // of the pattern. We overwrite the highlights to take the parts we need for this case so that // if/when we fix that pattern in the future it does not break this test case. loader.overwrite_highlights( "css", r##" ((property_name) @variable (#match? @variable "^--")) "#" @punctuation ((color_value) "#") @string.special (color_value) @string.special [";" ":"] @punctuation.delimiter "## .to_string(), ); // In this case two patterns fight over the `#` character and both should actually highlight // it. Because of the odd way that the pattern `((color_value) "#") @string.special` is // written, the QueryIter yields the captures in the opposite order it would normally: // first the child pattern `{Node # 9..10}` and then `{Node color_value 9..13}`. // // This case checks the invariant that "`active_highlights` ends are sorted descending" is // preserved. highlight_fixture(&loader, "highlighter/parent_child_highlight_precedence.css"); } #[test] fn edoc_code_combined_injection() { let loader = TestLanguageLoader::new(); highlight_fixture(&loader, "highlighter/edoc_code_combined_injection.erl"); injection_fixture(&loader, "injections/edoc_code_combined_injection.erl"); } #[test] fn edoc_code_combined_injection_in_markdown() { let loader = TestLanguageLoader::new(); // Same as the above but within markdown to add extra layers. highlight_fixture( &loader, "highlighter/edoc_code_combined_injection_in_markdown.md", ); } hx-0.3.0+20250717/highlighter/src/text_object.rs000066400000000000000000000051661503625671400210220ustar00rootroot00000000000000// TODO: rework using query iter use std::iter; use ropey::RopeSlice; use crate::TREE_SITTER_MATCH_LIMIT; use tree_sitter::{InactiveQueryCursor, Node, Query, RopeInput}; #[derive(Debug)] pub enum CapturedNode<'a> { Single(Node<'a>), /// Guaranteed to be not empty Grouped(Vec>), } impl CapturedNode<'_> { pub fn start_byte(&self) -> usize { match self { Self::Single(n) => n.start_byte() as usize, Self::Grouped(ns) => ns[0].start_byte() as usize, } } pub fn end_byte(&self) -> usize { match self { Self::Single(n) => n.end_byte() as usize, Self::Grouped(ns) => ns.last().unwrap().end_byte() as usize, } } } #[derive(Debug)] pub struct TextObjectQuery { pub query: Query, } impl TextObjectQuery { /// Run the query on the given node and return sub nodes which match given /// capture ("function.inside", "class.around", etc). /// /// Captures may contain multiple nodes by using quantifiers (+, *, etc), /// and support for this is partial and could use improvement. /// /// ```query /// (comment)+ @capture /// /// ; OR /// ( /// (comment)* /// . /// (function) /// ) @capture /// ``` pub fn capture_nodes<'a>( &'a self, capture_name: &str, node: Node<'a>, slice: RopeSlice<'a>, cursor: InactiveQueryCursor, ) -> Option>> { self.capture_nodes_any(&[capture_name], node, slice, cursor) } /// Find the first capture that exists out of all given `capture_names` /// and return sub nodes that match this capture. pub fn capture_nodes_any<'a>( &'a self, capture_names: &[&str], node: Node<'a>, slice: RopeSlice<'a>, mut cursor: InactiveQueryCursor, ) -> Option>> { let capture = capture_names .iter() .find_map(|cap| self.query.get_capture(cap))?; cursor.set_match_limit(TREE_SITTER_MATCH_LIMIT); let mut cursor = cursor.execute_query(&self.query, &node, RopeInput::new(slice)); let capture_node = iter::from_fn(move || { let (mat, _) = cursor.next_matched_node()?; Some(mat.nodes_for_capture(capture).cloned().collect()) }) .filter_map(move |nodes: Vec<_>| { if nodes.len() > 1 { Some(CapturedNode::Grouped(nodes)) } else { nodes.into_iter().map(CapturedNode::Single).next() } }); Some(capture_node) } } hx-0.3.0+20250717/highlighter/src/tree_cursor.rs000066400000000000000000000111721503625671400210360ustar00rootroot00000000000000use std::collections::VecDeque; use crate::tree_sitter::Node; use crate::{Layer, Syntax}; pub struct TreeCursor<'tree> { syntax: &'tree Syntax, current: Layer, cursor: tree_sitter::TreeCursor<'tree>, } impl<'tree> TreeCursor<'tree> { pub(crate) fn new(syntax: &'tree Syntax) -> Self { let cursor = syntax.tree().walk(); Self { syntax, current: syntax.root, cursor, } } pub fn node(&self) -> Node<'tree> { self.cursor.node() } pub fn goto_parent(&mut self) -> bool { if self.cursor.goto_parent() { return true; }; loop { // Ascend to the parent layer if one exists. let Some(parent) = self.syntax.layer(self.current).parent else { return false; }; self.current = parent; if let Some(tree) = self.syntax.layer(self.current).tree() { self.cursor = tree.walk(); break; } } true } pub fn goto_parent_with

    (&mut self, predicate: P) -> bool where P: Fn(&Node) -> bool, { while self.goto_parent() { if predicate(&self.node()) { return true; } } false } pub fn goto_first_child(&mut self) -> bool { let range = self.cursor.node().byte_range(); let layer = self.syntax.layer(self.current); if let Some((layer, tree)) = layer .injection_at_byte_idx(range.start) .filter(|injection| injection.range.end >= range.end) .and_then(|injection| { Some((injection.layer, self.syntax.layer(injection.layer).tree()?)) }) { // Switch to the child layer. self.current = layer; self.cursor = tree.walk(); return true; } self.cursor.goto_first_child() } pub fn goto_next_sibling(&mut self) -> bool { self.cursor.goto_next_sibling() } pub fn goto_previous_sibling(&mut self) -> bool { self.cursor.goto_previous_sibling() } pub fn reset_to_byte_range(&mut self, start: u32, end: u32) { let (layer, tree) = self.syntax.layer_and_tree_for_byte_range(start, end); self.current = layer; self.cursor = tree.walk(); loop { let node = self.cursor.node(); if start < node.start_byte() || end > node.end_byte() { self.cursor.goto_parent(); break; } if self.cursor.goto_first_child_for_byte(start).is_none() { break; } } } /// Returns an iterator over the children of the node the TreeCursor is on /// at the time this is called. pub fn children<'a>(&'a mut self) -> ChildIter<'a, 'tree> { let parent = self.node(); ChildIter { cursor: self, parent, } } } pub struct ChildIter<'a, 'tree> { cursor: &'a mut TreeCursor<'tree>, parent: Node<'tree>, } impl<'tree> Iterator for ChildIter<'_, 'tree> { type Item = Node<'tree>; fn next(&mut self) -> Option { // first iteration, just visit the first child if self.cursor.node() == self.parent { self.cursor.goto_first_child().then(|| self.cursor.node()) } else { self.cursor.goto_next_sibling().then(|| self.cursor.node()) } } } impl<'cursor, 'tree> IntoIterator for &'cursor mut TreeCursor<'tree> { type Item = Node<'tree>; type IntoIter = TreeRecursiveWalker<'cursor, 'tree>; fn into_iter(self) -> Self::IntoIter { let mut queue = VecDeque::new(); let root = self.node(); queue.push_back(root.clone()); TreeRecursiveWalker { cursor: self, queue, root, } } } pub struct TreeRecursiveWalker<'cursor, 'tree> { cursor: &'cursor mut TreeCursor<'tree>, queue: VecDeque>, root: Node<'tree>, } impl<'tree> Iterator for TreeRecursiveWalker<'_, 'tree> { type Item = Node<'tree>; fn next(&mut self) -> Option { let current = self.cursor.node(); if current != self.root && self.cursor.goto_next_sibling() { self.queue.push_back(current); return Some(self.cursor.node()); } while let Some(queued) = self.queue.pop_front() { self.cursor.cursor.reset(&queued); if !self.cursor.goto_first_child() { continue; } return Some(self.cursor.node()); } None } } hx-0.3.0+20250717/skidder/000077500000000000000000000000001503625671400144725ustar00rootroot00000000000000hx-0.3.0+20250717/skidder/Cargo.toml000066400000000000000000000007401503625671400164230ustar00rootroot00000000000000[package] name = "skidder" version = "0.1.0" edition = "2021" description = "A package manager for tree-sitter" authors = ["Pascal Kuthe "] license = "MPL-2.0" repository = "https://github.com/helix-editor/tree-house" readme = "../README.md" rust-version = "1.74.0" [dependencies] anyhow = "1.0" cc = "1.1" indicatif = "0.17" ruzstd = "0.7" serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" sha1 = "0.10" tempfile = "3.10" walkdir = "2.5" hx-0.3.0+20250717/skidder/LICENSE000077700000000000000000000000001503625671400167122../LICENSEustar00rootroot00000000000000hx-0.3.0+20250717/skidder/build.rs000066400000000000000000000001671503625671400161430ustar00rootroot00000000000000fn main() { println!( "cargo:rustc-env=BUILD_TARGET={}", std::env::var("TARGET").unwrap() ); } hx-0.3.0+20250717/skidder/src/000077500000000000000000000000001503625671400152615ustar00rootroot00000000000000hx-0.3.0+20250717/skidder/src/build.rs000066400000000000000000000174341503625671400167370ustar00rootroot00000000000000use std::fs::{self, File}; use std::io; use std::path::Path; use std::process::Command; use std::sync::OnceLock; use anyhow::{bail, ensure, Context, Result}; use sha1::{Digest, Sha1}; use tempfile::TempDir; use walkdir::WalkDir; use crate::{decompress, Metadata, LIB_EXTENSION}; type Checksum = [u8; 20]; fn is_fresh(grammar_dir: &Path, force: bool) -> Result<(Checksum, bool)> { let src_dir = grammar_dir.join("src"); let cookie = grammar_dir.join(".BUILD_COOKIE"); let mut hasher = Sha1::new(); for file in WalkDir::new(src_dir) { let file = file?; let file_type = file.file_type(); // Hash any .c, .cc or .h file if !file_type.is_file() { continue; } let file_name = file.file_name(); let Some(file_name) = file_name.to_str() else { continue; }; let Some((_, extension)) = file_name.rsplit_once('.') else { continue; }; if matches!(extension, "h" | "c" | "cc") { continue; } let path = file.path(); hasher.update(file_name.as_bytes()); hasher.update([0, 0, 0, 0]); File::open(path) .and_then(|mut file| io::copy(&mut file, &mut hasher)) .with_context(|| format!("failed to read {}", path.display()))?; hasher.update([0, 0, 0, 0]); } let checksum = hasher.finalize(); if force { return Ok((checksum.into(), false)); } let Ok(prev_checksum) = fs::read(cookie) else { return Ok((checksum.into(), false)); }; Ok((checksum.into(), prev_checksum == checksum[..])) } #[cfg(not(windows))] const SCANNER_OBJECT: &str = "scanner.o"; #[cfg(windows)] const SCANNER_OBJECT: &str = "scanner.obj"; const BUILD_TARGET: &str = env!("BUILD_TARGET"); static CPP_COMPILER: OnceLock = OnceLock::new(); static C_COMPILER: OnceLock = OnceLock::new(); enum CompilerCommand { Build, BuildAndLink { obj_files: Vec<&'static str> }, } impl CompilerCommand { pub fn setup(self, build_dir: &Path, src_dir: &Path, file: &Path, out_file: &str) -> Command { let cpp = file.extension().is_some_and(|ext| ext == "cc"); let compiler = if cpp { CPP_COMPILER.get_or_init(|| { cc::Build::new() .cpp(true) .opt_level(3) .std("c++14") .debug(false) .cargo_metadata(false) .host(BUILD_TARGET) .target(BUILD_TARGET) .get_compiler() }) } else { C_COMPILER.get_or_init(|| { cc::Build::new() // Note that we use a C++ compiler but force C mode below // with "-xc". This is important for compilation of grammars // that have C++ scanners. If we used `cpp(false)` then the // scanner might miss symbols from the C++ standard library. .cpp(true) .debug(false) .opt_level(3) .std("c11") .cargo_metadata(false) .host(BUILD_TARGET) .target(BUILD_TARGET) .get_compiler() }) }; let mut cmd = compiler.to_command(); cmd.current_dir(build_dir); if compiler.is_like_msvc() { cmd.args(["/nologo", "/LD", "/utf-8", "/I"]).arg(src_dir); match self { CompilerCommand::Build => { cmd.arg(format!("/Fo{out_file}")).arg("/c").arg(file); } CompilerCommand::BuildAndLink { obj_files } => { cmd.args(obj_files) .arg(file) .arg("/link") .arg(format!("/out:{out_file}")); } } } else { #[cfg(not(windows))] cmd.arg("-fPIC"); cmd.args(["-shared", "-fno-exceptions", "-o", out_file, "-I"]) .arg(src_dir); if cfg!(all( unix, not(any(target_os = "macos", target_os = "illumos")) )) { cmd.arg("-Wl,-z,relro,-z,now"); } match self { CompilerCommand::Build => { cmd.arg("-c"); } CompilerCommand::BuildAndLink { obj_files } => { cmd.args(obj_files); } } if !cpp { cmd.arg("-xc"); } cmd.arg(file); }; cmd } } pub fn build_grammar(grammar_name: &str, grammar_dir: &Path, force: bool) -> Result<()> { let src_dir = grammar_dir.join("src"); let mut parser = src_dir.join("parser.c"); ensure!( parser.exists(), "failed to compile {grammar_name}: {} not found!", parser.display() ); let (hash, fresh) = is_fresh(grammar_dir, force)?; if fresh { return Ok(()); } let build_dir = TempDir::new().context("failed to create temporary build directory")?; let metadata = Metadata::read(&grammar_dir.join("metadata.json")) .with_context(|| format!("failed to read metadata for {grammar_name}"))?; let Some(parser_definition) = metadata.parser_definition() else { bail!("source directories with parser.c files must have parser definition metadata"); }; if parser_definition.compressed { let decompressed_parser = build_dir.path().join(format!("{grammar_name}.c")); let mut dst = File::create(&decompressed_parser).with_context(|| { format!( "failed to create parser.c file in temporary build directory {}", build_dir.path().display() ) })?; File::open(&parser) .map_err(anyhow::Error::from) .and_then(|mut reader| decompress(&mut reader, &mut dst)) .with_context(|| { format!("failed to decompress parser {}", build_dir.path().display()) })?; parser = decompressed_parser; } let mut commands = Vec::new(); let mut obj_files = Vec::new(); if src_dir.join("scanner.c").exists() { let scanner_cmd = CompilerCommand::Build.setup( build_dir.path(), &src_dir, &src_dir.join("scanner.c"), SCANNER_OBJECT, ); obj_files.push(SCANNER_OBJECT); commands.push(scanner_cmd) } else if src_dir.join("scanner.cc").exists() { let scanner_cmd = CompilerCommand::Build.setup( build_dir.path(), &src_dir, &src_dir.join("scanner.cc"), SCANNER_OBJECT, ); obj_files.push(SCANNER_OBJECT); commands.push(scanner_cmd) } let lib_name = format!("{grammar_name}.{LIB_EXTENSION}"); let parser_cmd = CompilerCommand::BuildAndLink { obj_files }.setup( build_dir.path(), &src_dir, &parser, &lib_name, ); commands.push(parser_cmd); for mut cmd in commands { let output = cmd.output().context("Failed to execute compiler")?; if !output.status.success() { bail!( "Parser compilation failed.\nStdout: {}\nStderr: {}", String::from_utf8_lossy(&output.stdout), String::from_utf8_lossy(&output.stderr) ); } } let from = build_dir.path().join(lib_name); let to = grammar_dir.join(grammar_name).with_extension(LIB_EXTENSION); fs::copy(&from, &to).with_context(|| { format!( "failed to copy compiled library from {} to {}", from.display(), to.display() ) })?; let _ = fs::write(grammar_dir.join(".BUILD_COOKIE"), hash); Ok(()) } hx-0.3.0+20250717/skidder/src/lib.rs000066400000000000000000000330401503625671400163750ustar00rootroot00000000000000use std::fs::File; use std::io::{BufReader, Read, Seek, SeekFrom, Write}; use std::num::NonZeroUsize; use std::path::{Path, PathBuf}; use std::process::{Command, Stdio}; use std::sync::atomic::{self, AtomicUsize}; use std::sync::Mutex; use std::time::Duration; use std::{fs, io, thread}; use anyhow::{bail, ensure, Context, Result}; use indicatif::{ProgressBar, ProgressStyle}; use ruzstd::frame::ReadFrameHeaderError; use ruzstd::frame_decoder::FrameDecoderError; use ruzstd::{BlockDecodingStrategy, FrameDecoder}; use serde::{Deserialize, Serialize}; #[cfg(not(windows))] const LIB_EXTENSION: &str = "so"; #[cfg(windows)] const LIB_EXTENSION: &str = "dll"; mod build; #[derive(Debug, Serialize, Deserialize)] pub struct Config { pub repos: Vec, pub index: PathBuf, pub verbose: bool, } impl Config { pub fn compiled_parser_path(&self, grammar: &str) -> Option<(String, PathBuf)> { let (repo, metadata) = self.repos.iter().find_map(|repo| { let metadata = repo.read_metadata(self, grammar).ok()?; Some((repo, metadata)) })?; let grammar = match metadata { Metadata::ReuseParser { name, .. } => name, Metadata::ParserDefinition { .. } => grammar.to_string(), }; let parser = repo .dir(self) .join(&grammar) .join(&grammar) .with_extension(LIB_EXTENSION); parser.exists().then_some((grammar, parser)) } pub fn grammar_dir(&self, grammar: &str) -> Option { self.repos.iter().find_map(|repo| { repo.has_grammar(self, grammar) .then(|| repo.dir(self).join(grammar)) }) } fn git(&self, args: &[&str], dir: &Path) -> Result<()> { let mut cmd = Command::new("git"); cmd.args(args).current_dir(dir); if self.verbose { println!("{}: git {}", dir.display(), args.join(" ")) } let status = if self.verbose { cmd.status().context("failed to invoke git")? } else { let res = cmd.output().context("failed to invoke git")?; if !res.status.success() { let _ = io::stdout().write_all(&res.stdout); let _ = io::stderr().write_all(&res.stderr); } res.status }; if !status.success() { bail!("git returned non-zero exit-code: {status}"); } Ok(()) } // TODO: remove? #[allow(dead_code)] fn git_exit_with(&self, args: &[&str], dir: &Path, exitcode: i32) -> Result { let mut cmd = Command::new("git"); cmd.args(args).current_dir(dir); if self.verbose { println!("{}: git {}", dir.display(), args.join(" ")) } if !self.verbose { cmd.stdout(Stdio::piped()); cmd.stderr(Stdio::piped()); } let status = cmd.status().context("failed to invoke git")?; if status.code() == Some(exitcode) { return Ok(true); } if !status.success() { bail!("git returned unexpected exit-code: {status}"); } Ok(false) } fn git_output(&self, args: &[&str], dir: &Path) -> Result { let mut cmd = Command::new("git"); cmd.args(args).current_dir(dir); if self.verbose { println!("{}: git {}", dir.display(), args.join(" ")) } let res = cmd.output().context("failed to invoke git")?; if !res.status.success() { let _ = io::stdout().write_all(&res.stdout); let _ = io::stderr().write_all(&res.stderr); bail!("git returned non-zero exit-code: {}", res.status); } String::from_utf8(res.stdout).context("git returned invalid utf8") } } #[derive(Clone, Debug, Serialize, Deserialize)] pub enum Repo { Git { name: String, remote: String, branch: String, }, Local { path: PathBuf, }, } impl Repo { pub fn dir(&self, config: &Config) -> PathBuf { match self { Repo::Git { name, .. } => config.index.join(name), Repo::Local { path } => path.clone(), } } pub fn has_grammar(&self, config: &Config, grammar: &str) -> bool { self.dir(config) .join(grammar) .join("metadata.json") .exists() } pub fn read_metadata(&self, config: &Config, grammar: &str) -> Result { let path = self.dir(config).join(grammar).join("metadata.json"); Metadata::read(&path).with_context(|| format!("failed to read metadata for {grammar}")) } pub fn list_grammars(&self, config: &Config) -> Result> { let dir = self.dir(config); if !dir.exists() { return Ok(vec![]); } fs::read_dir(&dir) .with_context(|| format!("failed to access repository {}", dir.display()))? .map(|dent| { let dent = dent.with_context(|| format!("failed to access repository {}", dir.display()))?; if !dent.file_type()?.is_dir() || dent.file_name().to_str().is_none() { return Ok(None); } let path = dent.path(); let metadata_file = path.join("metadata.json"); if !metadata_file.exists() { return Ok(None); } let metadata = Metadata::read(&metadata_file).with_context(|| { format!("failed to read metadata file {}", metadata_file.display()) })?; Ok(metadata.parser_definition().map(|_| dent.path())) }) .filter_map(|res| res.transpose()) .collect() } pub fn fetch(&self, config: &Config, update: bool) -> Result<()> { let Repo::Git { remote, branch, .. } = self else { return Ok(()); }; let dir = self.dir(config); if dir.join(".git").exists() { let current_branch = config.git_output(&["rev-parse", "--abbrev-ref", "HEAD"], &dir)?; let switch_branch = current_branch != *branch; if !update && !switch_branch { return Ok(()); } if switch_branch { config.git(&["reset", "--hard"], &dir)?; // Cloning with `--single-branch` sets the `remote.origin.fetch` // spec to only fetch the desired branch. Switch this branch to // the new desired branch. config.git( &[ "config", "remote.origin.fetch", &format!("+refs/heads/{branch}:refs/remotes/origin/{branch}"), ], &dir, )?; } config.git(&["fetch", "origin", branch], &dir)?; if switch_branch { // Note that `git switch ` exists but is marked as experimental // at time of writing. `git checkout ` is the tried and // true alternative. config.git(&["checkout", branch], &dir)?; } config.git(&["reset", "--hard", &format!("origin/{branch}")], &dir)?; return Ok(()); } let _ = fs::create_dir_all(&dir); ensure!(dir.exists(), "failed to create directory {}", dir.display()); // intentionally not doing a shallow clone since that makes // incremental updates more exensive, however partial clones are a great // fit since that avoids fetching old parsers (which are not very useful) config.git( &[ "clone", "--single-branch", "--filter=blob:none", "--branch", branch, remote, ".", ], &dir, ) } } pub fn fetch(config: &Config, update_existing_grammar: bool) -> Result<()> { for repo in &config.repos { repo.fetch(config, update_existing_grammar)? } Ok(()) } pub fn build_grammar(config: &Config, grammar: &str, force_rebuild: bool) -> Result { for repo in &config.repos { if repo.has_grammar(config, grammar) { build::build_grammar(grammar, &repo.dir(config).join(grammar), force_rebuild)?; return Ok(repo .dir(config) .join(grammar) .join(grammar) .with_extension(LIB_EXTENSION)); } } bail!("grammar not found in any configured repository") } pub fn list_grammars(config: &Config) -> Result> { let mut res = Vec::new(); for repo in &config.repos { res.append(&mut repo.list_grammars(config)?) } res.sort_by(|path1, path2| path1.file_name().cmp(&path2.file_name())); res.dedup_by(|path1, path2| path1.file_name() == path2.file_name()); Ok(res) } pub fn build_all_grammars( config: &Config, force_rebuild: bool, concurrency: Option, ) -> Result { let grammars = list_grammars(config)?; let bar = ProgressBar::new(grammars.len() as u64).with_style( ProgressStyle::with_template("{spinner} {bar:40.cyan/blue} {pos:>7}/{len:7} {msg}") .unwrap(), ); bar.set_message("Compiling"); bar.enable_steady_tick(Duration::from_millis(100)); let i = AtomicUsize::new(0); let concurrency = concurrency .or_else(|| thread::available_parallelism().ok()) .map_or(4, usize::from); let failed = Mutex::new(Vec::new()); thread::scope(|scope| { for _ in 0..concurrency { scope.spawn(|| loop { let Some(grammar) = grammars.get(i.fetch_add(1, atomic::Ordering::Relaxed)) else { break; }; let name = grammar.file_name().unwrap().to_str().unwrap(); if let Err(err) = build::build_grammar(name, grammar, force_rebuild) { for err in err.chain() { bar.println(format!("error: {err}")) } failed.lock().unwrap().push(name.to_owned()) } bar.inc(1); }); } }); let failed = failed.into_inner().unwrap(); if !failed.is_empty() { bail!("failed to build grammars {failed:?}") } Ok(grammars.len()) } // TODO: version the metadata? Or allow unknown fields but warn on them? #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "kebab-case", untagged)] pub enum Metadata { ParserDefinition(ParserDefinition), ReuseParser { /// The name of the grammar to reuse. /// Grammars should only be reused from the same `Repo`. #[serde(rename = "reuse-parser")] name: String, }, } impl Metadata { pub fn parser_definition(self) -> Option { match self { Self::ParserDefinition(parser_definition) => Some(parser_definition), Self::ReuseParser { .. } => None, } } pub fn read(path: &Path) -> Result { let json = fs::read_to_string(path) .with_context(|| format!("couldn't read {}", path.display()))?; serde_json::from_str(&json) .with_context(|| format!("invalid metadata.json file at {}", path.display())) } pub fn write(&self, path: &Path) -> Result<()> { let json = serde_json::to_string_pretty(&self).unwrap(); fs::write(path, json).with_context(|| format!("failed to write {}", path.display())) } } #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "kebab-case", deny_unknown_fields)] pub struct ParserDefinition { /// The git remote of the upstream grammar repository pub repo: String, /// The revision of the git remote when the files were imported pub rev: String, /// The SPDX license identifier of the upstream grammar repository #[serde(default)] pub license: String, /// Whether the `parser.c` file is compressed #[serde(default)] pub compressed: bool, } // ruzstd is a bit manual, if they provided a better Reader implementation this // wouldn't be necessary... they don't do that because using zstd efficiently // apparently requires a seekable reader. Most readers are seekable so just // adding an extra trait bound would help... oh well /// decompresses a file compressed by skidder pub fn decompress(src: &mut File, mut dst: impl Write) -> Result<()> { const BATCH_SIZE: usize = 8 * 1024; let size = src.metadata()?.len(); let mut src = BufReader::new(src); let mut decoder = FrameDecoder::new(); let mut copy_buffer = [0; BATCH_SIZE]; while src.stream_position()? < size { match decoder.reset(&mut src) { Err(FrameDecoderError::ReadFrameHeaderError(ReadFrameHeaderError::SkipFrame { length: skip_size, .. })) => { src.seek(SeekFrom::Current(skip_size as i64)).unwrap(); continue; } other => other?, } while !decoder.is_finished() { decoder.decode_blocks(&mut src, BlockDecodingStrategy::UptoBytes(BATCH_SIZE))?; while decoder.can_collect() > BATCH_SIZE { let read = decoder.read(&mut copy_buffer).unwrap(); assert_eq!(read, BATCH_SIZE); dst.write_all(©_buffer)?; } } while decoder.can_collect() != 0 { let read = decoder.read(&mut copy_buffer).unwrap(); dst.write_all(©_buffer[..read])?; } } Ok(()) } hx-0.3.0+20250717/test-grammars/000077500000000000000000000000001503625671400156335ustar00rootroot00000000000000hx-0.3.0+20250717/test-grammars/comment/000077500000000000000000000000001503625671400172755ustar00rootroot00000000000000hx-0.3.0+20250717/test-grammars/comment/LICENSE000066400000000000000000000020601503625671400203000ustar00rootroot00000000000000MIT License Copyright (c) 2021 Santos Gallegos Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. hx-0.3.0+20250717/test-grammars/comment/highlights.scm000066400000000000000000000021111503625671400221260ustar00rootroot00000000000000(tag (name) @ui.text (user)? @constant) ; Hint level tags ((tag (name) @hint) (#any-of? @hint "HINT" "MARK" "PASSED" "STUB" "MOCK")) ("text" @hint (#any-of? @hint "HINT" "MARK" "PASSED" "STUB" "MOCK")) ; Info level tags ((tag (name) @info) (#any-of? @info "INFO" "NOTE" "TODO" "PERF" "OPTIMIZE" "PERFORMANCE" "QUESTION" "ASK")) ("text" @info (#any-of? @info "INFO" "NOTE" "TODO" "PERF" "OPTIMIZE" "PERFORMANCE" "QUESTION" "ASK")) ; Warning level tags ((tag (name) @warning) (#any-of? @warning "HACK" "WARN" "WARNING" "TEST" "TEMP")) ("text" @warning (#any-of? @warning "HACK" "WARN" "WARNING" "TEST" "TEMP")) ; Error level tags ((tag (name) @error) (#any-of? @error "BUG" "FIXME" "ISSUE" "XXX" "FIX" "SAFETY" "FIXIT" "FAILED" "DEBUG" "INVARIANT" "COMPLIANCE")) ("text" @error (#any-of? @error "BUG" "FIXME" "ISSUE" "XXX" "FIX" "SAFETY" "FIXIT" "FAILED" "DEBUG" "INVARIANT" "COMPLIANCE")) ; Issue number (#123) ("text" @constant.numeric (#match? @constant.numeric "^#[0-9]+$")) ; User mention (@user) ("text" @tag (#match? @tag "^[@][a-zA-Z0-9_-]+$")) (uri) @markup.link.url hx-0.3.0+20250717/test-grammars/comment/metadata.json000066400000000000000000000002341503625671400217470ustar00rootroot00000000000000{ "repo": "https://github.com/stsewd/tree-sitter-comment", "rev": "aefcc2813392eb6ffe509aa0fc8b4e9b57413ee1", "license": "MIT", "compressed": true }hx-0.3.0+20250717/test-grammars/comment/src/000077500000000000000000000000001503625671400200645ustar00rootroot00000000000000hx-0.3.0+20250717/test-grammars/comment/src/grammar.json000066400000000000000000000012231503625671400224030ustar00rootroot00000000000000(/d-FC!6k+#CW? -[`X.:<7xe!h˿oL~ݰ]J//8öVJ z3o& g'l۰te.dEY%=M=C5lvhr* ֝P`26<+~Owww;pUw&y4iǡCrw`C_(r0]/[j%{+ a=@\6!ҤPȌU @X$ !" '$X8 =l""Kv4.H0S#xrJMtA[s ۩ڀ3pgDuE@& N(Tה6Wb/_`Ep"Ϭ\ r ƚ3T9 -^' >6\لSϚNXc"en$A Rݐ4H s?t|pTs BT “<C$ǩdv53!qq`!_u_6*?ڇ8$چEp,2&pF"s'$qr= 2gсt42mBB` "h:Yh=xm8x .vu&<qdx6Mt0N m w=~*(}s=Ey{1<N xHah0LIZ{'E9d5_JR,'Sʭb'.Bdd 2JUꔕrZ2h;%kX1FEAwR`1J: ,JpY*IM\fB ]`)A]S#)bnF%9䑷q聝O[8}ןP3;H M&ô4mCKL"`^{j?}G#Id(qÏa1^˒7)m^^jQZJzĴmwF+Ze&2mYhk8|(<m}!i"swXUK5+sV d u'ZdTe[kZkZk/Z{ @38djfD$I Rsu4<׹|H$EV`@ DB p"#4`lD1بFF,JK3!8klگ7 ;Zx:3A%.j!?OxOGxߊ-FYNC]lUo ˖TV _Jb5$n$t3*hDOFbZ=4oKXMF:^fz>0PLX qvfOy,GZ8|@aA.? 7eI,F$e}Fsjd۰X)t(9K]12N/`a7]*伈F6OK#ἱ٨2GIj6@Wy-@46B90@)/w,ܲ"nੀ0*gvn6BQl7f9P|@=/fgblb+j3h Xw.'M`h3_`^, 7 j,:9V]Lj!~x:J&⃊m'H@jѽӉKW3!S^͵n̫%"}ɞm҉V-uYόэB~w5e3^rlŞd1C&h?r%%IsF}z7\M$MT#.i;F(ÞEJŋnfGم#f+o l Z~On1Cli<TnͶ}`$F0{TF}|ax[ FdErDg(?dės9s&{5Q6 4âm8PAᶀD@J=qCE) Bi]Jf"<Y6Fdk|z|R{.GR>[K4Z'wKHePAgG2.,9;v 0/V%mNLnfY¹_?@wlgT,5_9y'C厐xOH5aͳ?1(~יƥY04f ZTDQ|D;Y)ߡi2.&ߞֵk׾bIYc!aR~LlH'zQ,Bu՛MvhDqX0>,)0&t2 ^I;; *Mc E=W85\M +B͖x?z\rvy~.<0Qz6^Q6@kVz * <]ȿג`X`*"uUtB sE{jngTSDLRw?6t}AFV֙")&ϮPo0.ҷ.Qi*!z ?]';C`Q,_V*lju! ۤC=@ eT&8T#*-luaa@oW W E)6wmEՒ*Wy<20U^dk)5 Df<78_*k&K|r٫hVuHkn󛤠ME&c;S[3Gd5 K*v66*ͧ(VC_*>1K~y|nMnE/k r vP5Z8̉eu_+Wȇ O.Ҵ)D i:ʨ;C|*-|fYP8{$?O")se7fYZy!DDIJNNIc^\ 47_wAhkmfbŒq%$"Hw Af5D3b![O F(=$c68gE xIOMkHo+JeSgdG65=Yԙ]O飱 -!)ٲ!zKYw#YK"-HG6YW,ݨo|wq[N1,ebeuTC߃=0 ʳEnVP} gx3({ȏ~ 0rA@(jY|pRXrt(Ԕr5m̲M(jٺᰵ aav\>l{)SG7GʃS.=!d_"8Z9ޑ(R 7Mg˱ϿmP9EFجV (W7CAd>!uZ]g@BPs񍄁^Y@o^hx-0.3.0+20250717/test-grammars/comment/src/scanner.c000066400000000000000000000012131503625671400216560ustar00rootroot00000000000000#include #include "tree_sitter_comment/parser.c" #include "tree_sitter_comment/tokens.h" void* tree_sitter_comment_external_scanner_create() { return NULL; } void tree_sitter_comment_external_scanner_destroy(void* payload) { } unsigned tree_sitter_comment_external_scanner_serialize( void* payload, char* buffer) { return 0; } void tree_sitter_comment_external_scanner_deserialize( void* payload, const char* buffer, unsigned length) { } bool tree_sitter_comment_external_scanner_scan( void* payload, TSLexer* lexer, const bool* valid_symbols) { return parse(lexer, valid_symbols); } hx-0.3.0+20250717/test-grammars/comment/src/tree_sitter/000077500000000000000000000000001503625671400224155ustar00rootroot00000000000000hx-0.3.0+20250717/test-grammars/comment/src/tree_sitter/parser.h000066400000000000000000000124021503625671400240610ustar00rootroot00000000000000#ifndef TREE_SITTER_PARSER_H_ #define TREE_SITTER_PARSER_H_ #ifdef __cplusplus extern "C" { #endif #include #include #include #define ts_builtin_sym_error ((TSSymbol)-1) #define ts_builtin_sym_end 0 #define TREE_SITTER_SERIALIZATION_BUFFER_SIZE 1024 typedef uint16_t TSStateId; #ifndef TREE_SITTER_API_H_ typedef uint16_t TSSymbol; typedef uint16_t TSFieldId; typedef struct TSLanguage TSLanguage; #endif typedef struct { TSFieldId field_id; uint8_t child_index; bool inherited; } TSFieldMapEntry; typedef struct { uint16_t index; uint16_t length; } TSFieldMapSlice; typedef struct { bool visible; bool named; bool supertype; } TSSymbolMetadata; typedef struct TSLexer TSLexer; struct TSLexer { int32_t lookahead; TSSymbol result_symbol; void (*advance)(TSLexer *, bool); void (*mark_end)(TSLexer *); uint32_t (*get_column)(TSLexer *); bool (*is_at_included_range_start)(const TSLexer *); bool (*eof)(const TSLexer *); }; typedef enum { TSParseActionTypeShift, TSParseActionTypeReduce, TSParseActionTypeAccept, TSParseActionTypeRecover, } TSParseActionType; typedef union { struct { uint8_t type; TSStateId state; bool extra; bool repetition; } shift; struct { uint8_t type; uint8_t child_count; TSSymbol symbol; int16_t dynamic_precedence; uint16_t production_id; } reduce; uint8_t type; } TSParseAction; typedef struct { uint16_t lex_state; uint16_t external_lex_state; } TSLexMode; typedef union { TSParseAction action; struct { uint8_t count; bool reusable; } entry; } TSParseActionEntry; struct TSLanguage { uint32_t version; uint32_t symbol_count; uint32_t alias_count; uint32_t token_count; uint32_t external_token_count; uint32_t state_count; uint32_t large_state_count; uint32_t production_id_count; uint32_t field_count; uint16_t max_alias_sequence_length; const uint16_t *parse_table; const uint16_t *small_parse_table; const uint32_t *small_parse_table_map; const TSParseActionEntry *parse_actions; const char * const *symbol_names; const char * const *field_names; const TSFieldMapSlice *field_map_slices; const TSFieldMapEntry *field_map_entries; const TSSymbolMetadata *symbol_metadata; const TSSymbol *public_symbol_map; const uint16_t *alias_map; const TSSymbol *alias_sequences; const TSLexMode *lex_modes; bool (*lex_fn)(TSLexer *, TSStateId); bool (*keyword_lex_fn)(TSLexer *, TSStateId); TSSymbol keyword_capture_token; struct { const bool *states; const TSSymbol *symbol_map; void *(*create)(void); void (*destroy)(void *); bool (*scan)(void *, TSLexer *, const bool *symbol_whitelist); unsigned (*serialize)(void *, char *); void (*deserialize)(void *, const char *, unsigned); } external_scanner; const TSStateId *primary_state_ids; }; /* * Lexer Macros */ #define START_LEXER() \ bool result = false; \ bool skip = false; \ bool eof = false; \ int32_t lookahead; \ goto start; \ next_state: \ lexer->advance(lexer, skip); \ start: \ skip = false; \ lookahead = lexer->lookahead; #define ADVANCE(state_value) \ { \ state = state_value; \ goto next_state; \ } #define SKIP(state_value) \ { \ skip = true; \ state = state_value; \ goto next_state; \ } #define ACCEPT_TOKEN(symbol_value) \ result = true; \ lexer->result_symbol = symbol_value; \ lexer->mark_end(lexer); #define END_STATE() return result; /* * Parse Table Macros */ #define SMALL_STATE(id) id - LARGE_STATE_COUNT #define STATE(id) id #define ACTIONS(id) id #define SHIFT(state_value) \ {{ \ .shift = { \ .type = TSParseActionTypeShift, \ .state = state_value \ } \ }} #define SHIFT_REPEAT(state_value) \ {{ \ .shift = { \ .type = TSParseActionTypeShift, \ .state = state_value, \ .repetition = true \ } \ }} #define SHIFT_EXTRA() \ {{ \ .shift = { \ .type = TSParseActionTypeShift, \ .extra = true \ } \ }} #define REDUCE(symbol_val, child_count_val, ...) \ {{ \ .reduce = { \ .type = TSParseActionTypeReduce, \ .symbol = symbol_val, \ .child_count = child_count_val, \ __VA_ARGS__ \ }, \ }} #define RECOVER() \ {{ \ .type = TSParseActionTypeRecover \ }} #define ACCEPT_INPUT() \ {{ \ .type = TSParseActionTypeAccept \ }} #ifdef __cplusplus } #endif #endif // TREE_SITTER_PARSER_H_ hx-0.3.0+20250717/test-grammars/comment/src/tree_sitter_comment/000077500000000000000000000000001503625671400241375ustar00rootroot00000000000000hx-0.3.0+20250717/test-grammars/comment/src/tree_sitter_comment/chars.c000066400000000000000000000024041503625671400254030ustar00rootroot00000000000000#include "chars.h" bool is_upper(int32_t c) { const int32_t upper = 65; const int32_t lower = 90; return c >= upper && c <= lower; } bool is_digit(int32_t c) { const int32_t upper = 48; const int32_t lower = 57; return c >= upper && c <= lower; } bool is_newline(int32_t c) { const int32_t newline_chars[] = { CHAR_EOF, CHAR_NEWLINE, CHAR_CARRIAGE_RETURN, }; const int length = sizeof(newline_chars) / sizeof(int32_t); for (int i = 0; i < length; i++) { if (c == newline_chars[i]) { return true; } } return false; } bool is_space(int32_t c) { const int32_t space_chars[] = { CHAR_SPACE, CHAR_FORM_FEED, CHAR_TAB, CHAR_VERTICAL_TAB, }; const int length = sizeof(space_chars) / sizeof(int32_t); bool is_space_char = false; for (int i = 0; i < length; i++) { if (c == space_chars[i]) { is_space_char = true; break; } } return is_space_char || is_newline(c); } /// Check if the character is allowed inside the name. bool is_internal_char(int32_t c) { const int32_t valid_chars[] = { '-', '_', }; const int length = sizeof(valid_chars) / sizeof(int32_t); for (int i = 0; i < length; i++) { if (c == valid_chars[i]) { return true; } } return false; } hx-0.3.0+20250717/test-grammars/comment/src/tree_sitter_comment/chars.h000066400000000000000000000007451503625671400254160ustar00rootroot00000000000000#ifndef TREE_SITTER_COMMENT_CHARS_H #define TREE_SITTER_COMMENT_CHARS_H #include #include #define CHAR_EOF 0 #define CHAR_NEWLINE 10 #define CHAR_CARRIAGE_RETURN 13 #define CHAR_SPACE ' ' #define CHAR_FORM_FEED '\f' #define CHAR_TAB '\t' #define CHAR_VERTICAL_TAB '\v' bool is_internal_char(int32_t c); bool is_newline(int32_t c); bool is_space(int32_t c); bool is_upper(int32_t c); bool is_digit(int32_t c); #endif /* ifndef TREE_SITTER_COMMENT_CHARS_H */ hx-0.3.0+20250717/test-grammars/comment/src/tree_sitter_comment/parser.c000066400000000000000000000044511503625671400256030ustar00rootroot00000000000000#include "parser.h" #include "chars.c" #include "tokens.h" #include #include /// Parse the name of the tag. /// /// They can be of the form: /// - TODO: /// - TODO: text /// - TODO(stsewd): /// - TODO(stsewd): text /// - TODO (stsewd): text bool parse_tagname(TSLexer* lexer, const bool* valid_symbols) { if (!is_upper(lexer->lookahead) || !valid_symbols[T_TAGNAME]) { return false; } int32_t previous = lexer->lookahead; lexer->advance(lexer, false); while (is_upper(lexer->lookahead) || is_digit(lexer->lookahead) || is_internal_char(lexer->lookahead)) { previous = lexer->lookahead; lexer->advance(lexer, false); } // The tag name ends here. // But we keep parsing to see if it's a valid tag name. lexer->mark_end(lexer); // It can't end with an internal char. if (is_internal_char(previous)) { return false; } // For the user component this is `\s*(`. // We don't parse that part, we just need to be sure it ends with `:\s`. if ((is_space(lexer->lookahead) && !is_newline(lexer->lookahead)) || lexer->lookahead == '(') { // Skip white spaces. while (is_space(lexer->lookahead) && !is_newline(lexer->lookahead)) { lexer->advance(lexer, false); } // Checking aperture. if (lexer->lookahead != '(') { return false; } lexer->advance(lexer, false); // Checking closure. int user_length = 0; while (lexer->lookahead != ')') { if (is_newline(lexer->lookahead)) { return false; } lexer->advance(lexer, false); user_length++; } if (user_length <= 0) { return false; } lexer->advance(lexer, false); } // It should end with `:`... if (lexer->lookahead != ':') { return false; } // ... and be followed by one space. lexer->advance(lexer, false); if (!is_space(lexer->lookahead)) { return false; } lexer->result_symbol = T_TAGNAME; return true; } bool parse(TSLexer* lexer, const bool* valid_symbols) { // If all valid symbols are true, tree-sitter is in correction mode. // We don't want to parse anything in that case. if (valid_symbols[T_INVALID_TOKEN]) { return false; } if (is_upper(lexer->lookahead) && valid_symbols[T_TAGNAME]) { return parse_tagname(lexer, valid_symbols); } return false; } hx-0.3.0+20250717/test-grammars/comment/src/tree_sitter_comment/parser.h000066400000000000000000000004241503625671400256040ustar00rootroot00000000000000#ifndef TREE_SITTER_COMMENT_PARSER_H #define TREE_SITTER_COMMENT_PARSER_H #include bool parse_tagname(TSLexer* lexer, const bool* valid_symbols); bool parse(TSLexer* lexer, const bool* valid_symbols); #endif /* ifndef TREE_SITTER_COMMENT_PARSER_H */ hx-0.3.0+20250717/test-grammars/comment/src/tree_sitter_comment/tokens.h000066400000000000000000000002611503625671400256120ustar00rootroot00000000000000#ifndef TREE_SITTER_COMMENT_TOKENS_H #define TREE_SITTER_COMMENT_TOKENS_H enum TokenType { T_TAGNAME, T_INVALID_TOKEN, }; #endif /* ifndef TREE_SITTER_COMMENT_TOKENS_H */ hx-0.3.0+20250717/test-grammars/css/000077500000000000000000000000001503625671400164235ustar00rootroot00000000000000hx-0.3.0+20250717/test-grammars/css/LICENSE000066400000000000000000000020701503625671400174270ustar00rootroot00000000000000The MIT License (MIT) Copyright (c) 2018 Max Brunsfeld Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. hx-0.3.0+20250717/test-grammars/css/highlights.scm000066400000000000000000000017751503625671400212730ustar00rootroot00000000000000(comment) @comment [ (tag_name) (nesting_selector) (universal_selector) ] @tag [ "~" ">" "+" "-" "*" "/" "=" "^=" "|=" "~=" "$=" "*=" ] @operator [ "and" "not" "only" "or" ] @keyword.operator (property_name) @variable.other.member (plain_value) @constant ((property_name) @variable (#match? @variable "^--")) ((plain_value) @variable (#match? @variable "^--")) (attribute_name) @attribute (class_name) @label (feature_name) @variable.other.member (function_name) @function (id_name) @label (namespace_name) @namespace [ "@charset" "@import" "@keyframes" "@media" "@namespace" "@supports" (at_keyword) (from) (important) (to) (keyword_query) (keyframes_name) (unit) ] @keyword [ "#" "." ] @punctuation (string_value) @string ((color_value) "#") @string.special (color_value) @string.special (integer_value) @constant.numeric.integer (float_value) @constant.numeric.float [ ")" "(" "[" "]" "{" "}" ] @punctuation.bracket [ "," ";" ":" "::" ] @punctuation.delimiter hx-0.3.0+20250717/test-grammars/css/injections.scm000066400000000000000000000001051503625671400212700ustar00rootroot00000000000000((comment) @injection.content (#set! injection.language "comment")) hx-0.3.0+20250717/test-grammars/css/metadata.json000066400000000000000000000002351503625671400210760ustar00rootroot00000000000000{ "repo": "https://github.com/tree-sitter/tree-sitter-css", "rev": "769203d0f9abe1a9a691ac2b9fe4bb4397a73c51", "license": "MIT", "compressed": true }hx-0.3.0+20250717/test-grammars/css/src/000077500000000000000000000000001503625671400172125ustar00rootroot00000000000000hx-0.3.0+20250717/test-grammars/css/src/grammar.json000066400000000000000000000042531503625671400215370ustar00rootroot00000000000000(/dDV~(jaU٘1߄ P#zFĜO|xOnm:qow"磑EuJ⳹lf}6w2v[No󧲺Wݗ o22Ww^K%? ^Q2ϷBLw"Z#Ys&E'ճ;ţ3RjK,uGF$tB=P:L|/"dT`UDskgv-us]vK-Ӻ1:cr2h a+̥UlUQ^(.X(t\XVdv bbP&BXm3S3IRi3A("cA RaaAAA"D2GXﰕcIKG367p Ƞp݅iD̼E#Z+TSĠgcAوx%,Ih2sٳSmD">[|pZ~K 2*RW*0ʐ_)u@DMS/4xcA `Nj͘v?4nVMZ<[ `j2CbSI&|Sij"+0I n@]@X2ܫOozt[ rO:-DA>)ARf}:XhDX߁W/6=T)Tcp$O xZYU[h_<{"Vz™FdBm1'Qܔ5o5;'jĵ-9nvT,X,vjʝdD+U9a :쀖łoQsN/]ʉj+Qm6]UcVx}$6m\ə!'-GZ+G p!q,nCy Ă^k@ьRt+owQJ0EuUj DKy[Ts7 ^htY;* Iˍ`9:8#ہNXeK$#Lk+.B\@A] ||d:-'bWaF-'"3Ν`&Whӳ6_խ>`Ś?4a$unL]+0-[[XͶ8'de-}☲p rXg݄VDKč/PEK~-)Y~$6ƲƐg#m# .c47UDS<3J 0G$ 6A9W0n| 3PŨ$HJ+<{0=ƫ=}GKA[6="/ҷθɆwcjn #c2`b`үr g>P@TS$?r``P{Y!g\` E+yhx-0.3.0+20250717/test-grammars/css/src/parser.c000066400000000000000000000350041503625671400206540ustar00rootroot00000000000000(/-(6(RU&`cg|abmU;-LYMQ,3[m/hN&qXx%ɯ`&tO3Ma,o[ET:kK zͷ⿝Oߐ@7o:{r2z%'W HJ'Bi|;oeJ !`?U<wv`%]> ԓo'.\}zweD8lН9`5W U&eHB0[4~8:W)!M6$W6nnHlZ]" Y&۾v|.^[vTeMܢ#+tԘC/?EM{6?*ߊ, fZhث=sG\;Xل}p֗T\)iJ2\p *gq\ƒ&F,ߔ")o@wGv|G֜> eYsJԞ}+Ve/q((]VEMY/ٌ2O1`]VQ(}v͗3\{;gA~e}->zvzհԅ9wGΕa:z]fD$%)p(H$ 1l4I `DJY;rB#V ^c.Ib IVedrXHJEJo7ڜ1kT=Cj4M08bv 8M|Xd(n1 N?A ͳj&_x| +J@zXl~%wAA $0<,ZafTY+ortg8e;~-;@+X;LH4pEsrx J}98F>Ђ`In z$w8btgj@C9Abo@Vu? d<@e<uMy {ELQO 7ze-gĆv;ښm鬒"u7 ULc6ީ5Cށ4>ZADFұ "__g̻럈Fd kIbkp vN> ߁p-,6 $?=h C Nz5,Vn.0nZL/^u$V#%o`YHޭl#ًhb&& 1r.nl^nv`Z6܎Zَ[ ?}jN):@Y6iic(yXQͱ8T,d[EQ=~^|*$R]GT>+(R)2aanono-L/su2"TSSmapzqp"@(d`a,j2H͐%13?, H3kef*=qhlQF 'w$'2a"$'7*`}FpD$YlHBkʁ$,E\:sLUsˡ77-UmlIj$07g-ƤԸCܼ9 \ZSm!e^5s]E} |X`%m#QG!CB'~,pM.2,Q  R(zL7_BF /; H#Վ5*ބ"IHQγW7VgnTK*{jX55%i; ,*hoexFeC281 /9X蘑?w#K=ӣTcN:[ܠ-;Bi1>-&oP^/r|>EdfYC<Wx!U,%%Exڙtr*0z px"1$( BSĸ5b. y4Mxsd2,c /^#ŲK \s҃Y^٠?.bL2~p+*T:w} 4)([?-#U&7:h% LgʈK6ҢItG0[K:01G "0%VŁ<7%cZ:uQ ;PUE-YFԐ4uV~(Ғ O K#^F0WHiG]Tĕ2"[٣M}JA9DOKR1fhTB108B ċ'˦ӡ@tݘO=byGIPs>w|AG$>ttH\y_}[8CF7;|f?Zg36ÅtkFXήQQ565f>7MtYIiu($}glgə#73dj&*fTzeBet^ޓlzJ$jdԃEȤct$n ꨟUKxbx=Gn~D9⅑_u=Ճ97S*tW&EOwɡ؅p\6`![ Bز!D"$[6J;2N 3;N0MDPXYQ2>uJ @!`0@@~h)SU"_⛏'9BWO*T,Ss$_1Ec(% {|ݩymߋ]sB#zHHZ`A\F V1 2!8Y# .Uu';g+̘lččﻵks(Q~ʎ^pʦr}[>M>0 r0ϜHtռup[^V(Z{Ub![9V֞__-O.Y>pm;o]oP4VϘFK'15V?)6^{aek q !F1 W80c<0B00@B"Mp oil6óקuo#%۬ PM,Edp8Dq%gotUZWbf-cEyDwܯqXB.pg]JOPH^lv܉M {8Vyl@ E;K_}qj#ĬOãJ)gY32IҒya"2d04JQ #WUq^0׻Jܰ#ifw?i`Pݟ_m\2p0"*ݶạ]?aTV d=W`fLZ9}JQʅ9wa'B) Rh#uV,>tl4n@_-cbM  MHtIN6\qNʂt.PO.3YC^po! ^ O9%5T4 ԌES4̶JV4q}N;ucKNYMkճw 0 Qr UqgK~gC ɋޗJIŒ5\i睑0^X,V "(XLa;R\ nҀ)lxٿ,mNDL y[po]_*=QN5CCg}twjQ0 ! z|9* rA`e< 8 $M-&#ǟt82gsjo3eyql%+=Hx<c(n9A(4!}\ $0(ɝ}¸;+*D``q$*ݒt,H3ED4BѵA>fo:ca72Tԑ7=kȃ.NUN#-bxd86>88 >8`ƒ}<5=;Fiw2+8?vՌAg4J6#}}jb5$q*H',*4{V&PnH/ߵATG-Nu<3+#hcrbuA7Ͱ;A{HQY9^!Si B}C&":+mZqgxVqMۊ>ʊ>.IYI?c)N>TYD$˭>.eΊq)>+mId)<+K}\Y?6KK3}\ϊ>.eΊ<.m+j]Vڳ2}\ ϊ>nD_2 [vUlٻ]$B)VUs fL4kl%CC˰跥!Hx.+( ?]{찢S7?U(wI<z\0pLہ ݶݥT84F+߅yf',B:홤q0$+} bbs|^wsXQs^c%I;NM?G~Y " Og}iPHAݼ2O}?#O7K>T$|zGƂ/tG:K]@&0ptY ٌkWIVÙK|ecj L.:uT8=xœLtșx2cbs"u1ekn8ĉfhK!s73%M!tLmҜk"plXLQ] LdÏ 0D+]PMj*B5;yڬyvrW>֜4fKqCԌKn+Ԥ %Q Wӻ$X ވLw?`f:a4nҷ,d"@W8#r~vC=;A3sqM^}7jN_{o`.B΅(dXbaP%]e/i5C|%ɉkö|ɢx dx}cI`)zBbRwO*ZYT%/scwų@TX{@tYS[0U]OV3BZS9-BEHS[]PҵIi +Դə*K7+kZFjyv%#@2{ YLAQ1P$D&gCǒ0朠-@7 ڨ|ܡ| 6恁o@Z` rL\4[f9WFb @u i@c8[)Mցԏ Xϫft;1ΰBmL8rCQ0*d-'8}jw%$dӌ& 5EV~ie&Q t b-zVv+Sl1aPi^dhH+OSYu@u%tB׼?qvȻrthHp@r>:̯}Ҡr)4J6Vfe=4 P|4/X=mwUbfF #}bpúGWECP9s.̬!Zѷ_<> |84< un{77OCqSoi1lgnOYG›68B8|;D#݌h%'Ǩc7zgdX훢/HTAppV_W{cqΒ9QxďBV&E q'0_5IҘ Ԩg 2ҹ[ƿoF}]& n?lJidYQ.o'EH _5lAe}e] *#뫳t Zǭ22)?߄"J[4Lˤp=MKf)W0,4 ;#83or<غ8軼.+;n1׳$sCalVl MM/w0x3@'!0R>" cq'+03hp<`5q'GT lk3(&hژlIhRq!HW$H?zN|1GEy"}/ Fgt7U_lv})\s ӻUBLtm{5ġ'|PlpkTm#}hh<{N;} A3#uk+ h(j'u%C.aRQ^sU9c8?Fi}.iK.>(^P),`"ot$^&&","iҴ`d>r@!fh r'uigJٲ"lfD^4s)Z^P꟠p󌈑GDgX 034"J~9_,`3ƭT:.>0:6(Oz~7l3v9AqpD.[J2z7@rd=j>#28Bqb͈a _&5aɼgy#1a mX/GdbSF|W88U5e lp&MMۻa7"W 1i7 MTKxl{g/hm,gsbxM"4dA0D"`w̲@ }`w{4gxȴ+N8h8W%fƴ=-LB%pY&:V,H:e2Q9(GLaPĒ B6_7:vL&HY=UpE'Iո%F5Գjpn˳LtaVUv:lL!26όoX( ck 1].]CcX,S@ @0Aa 4+%~Yb8ܷBE'Q!맸\rb!u껬ì%ؐY{j-a#)0 #| ~lA3XQ"DMRotAo(;6&b8HKC ;<&Mt31$ c~0:*&I`j",J:l"c6K>Q `;; ̨-ef hg`@p/ /=Q*k1m9((T( ўWƼ( ᡹0vd(S=\7>o̼8p%/|ɅʴP gAMyjx9@~b7.yd-Fl7ȗI@>/NƆCSl?f?C TCNY!&(IGB!t`aAF *% YU^܇"s 4P=V(HM~Fc}g  ^(c݄D$;% m vA[:*gբ xJoe}l!̨iR@7V9gn0ߤB=lFbAE>@  3NLnK0psx4pKo@BO[XD7N SG*D6c6{g PJySqAqۈxwA#T3:`5BizR/i9ffv(} g1Fn/3ܼ d.u${Wz$XƎ?PE TF-vpBFZe1:CHMFy,J4>=M$h\s*$?fJ7*,Q1]#BH3284Їq'I/b?u:IE( ? 9F.FR*FW6+[fHX7}+i ȂKi;k|/dً{9;5%7H1"2(='1N6mCSqOǚqp0FAx.VX&^>:֝~gp&&]֬TwVygk ѱja7Y$ {FěscdeFLЁKN &!p; 9Q&{em_|fRq$:pylK'R5۷D 1yO 'cVF[k%r&} &ж\ Ax$v݊`rMB0 Oע /N L$O=2s )~c_5s ́+ H.5N=Hv6qzQzx~G`l#f :$.釕-s<3q4!x@:Nd}2c 8 6bOޔv?ͮ>ʉoEf1Bȝw(ND܋sm[PXuBI(䡕@ U ,WCsYЍH*(! eJ^ rܒo|g_!B)hˎsHc&z*7/Bڮ9>εv zd[ARm\d<,#"o;) 4jcȞ4'__CGdzOFsl%Β={L1;{dL![@=gpGTbov!qh06+/W`>3M  ۦpb̃B84A8l?l"uz8j!Ňk7`BSX<7/P0m ͇m{>V.#ۙkWO<{NĘ+CEFqQ-MR dq0L?"&`@s^f +\+|YaQ'ۉoԍfW:\?;,8x^$~4J.$Zzv'~6؜E-|T Nj;ɣ+3ڝ#>]jY 4&Bo0L^)F5o 2Qou$RhER cvF(vpt8s݂ U$L/%dgeShwĂS- [HbZiXRu:TKrwVuȫ;-9UC7Qf wb.5HӪ!]Չ,Q{D\ln.BBG+\+~SXT_<7pxTc=j#lg&!8gDL)q&ѠqA$ 96W!b$U  U=ckLx蟱EZr W53vE(!kECCwMU&&ueLbUDCu)8AK4BႢ'q (AJ  ox3S ݨmeM.z{kRɜEf=HD\\xX @x 6` <,2lA (Ta ,< PiȻ VyoȋTϠ13UD2DBM@DFt@0A!@hS"A(#] *UKT7|wz=M}HDˊ.L5,M p.7k~ȱJDcgY[ώ7ES]gG`.dZmDc&, -}vxR1MZH[ҡ-ެi }P9YzN2:iӊcudzjML>iɯ*c']$X>lD 놬3E+jMJ!o&45gg!]ޓ U1γWQhqh3FZ%70,~aLING%Je 2gќdB^w8 ?|޺u TȢGR1sX7 ORPϊ&mgL#?w4AxqP-Z[0~>**  >4{ E5>Jqp cGb1،d)^MCH[`Jy~5Qt]kKzQeK{61e[w+"7MwƢ/m~$HzxoF.ÒLg =,+ \[c74'"lgT1}[ z71 U֊JI˵,, : kyJ|d- EG@g.r=万)f BΥ#gq=7 ~uSxdS5$28pB>BWI @CE,q6%4Y5x^Uͣl"rdns>} vK2.*ѽ51Y|x`0"Ub5x HG%Z *,_Nl0e"g5߅]Y+avhQ<\s &#Ob$.ZLH++l=P;pD?ڃW2 ސuc$wu^( Y_Q &\xtM)D_9 r-@y(`&&V+hn5/i (aENvX6f^(azVWf縋:(RHE=~u/D{V#R%ʩI\F2:tz_t\~ RǁA]XK ·/=Fb^>%k0hce1biKdUϴV_+DPM!L( hӕ&WN }Z2bT$ ᤦHi ɳ^S8(M"G34F'ACq2I̪ܕ~eK$^;ϒth% kOgshx-0.3.0+20250717/test-grammars/css/src/scanner.c000066400000000000000000000026411503625671400210120ustar00rootroot00000000000000#include #include enum TokenType { DESCENDANT_OP, }; void *tree_sitter_css_external_scanner_create() { return NULL; } void tree_sitter_css_external_scanner_destroy(void *p) {} void tree_sitter_css_external_scanner_reset(void *p) {} unsigned tree_sitter_css_external_scanner_serialize(void *p, char *buffer) { return 0; } void tree_sitter_css_external_scanner_deserialize(void *p, const char *b, unsigned n) {} bool tree_sitter_css_external_scanner_scan(void *payload, TSLexer *lexer, const bool *valid_symbols) { if (iswspace(lexer->lookahead) && valid_symbols[DESCENDANT_OP]) { lexer->result_symbol = DESCENDANT_OP; lexer->advance(lexer, true); while (iswspace(lexer->lookahead)) { lexer->advance(lexer, true); } lexer->mark_end(lexer); if ( lexer->lookahead == '#' || lexer->lookahead == '.' || lexer->lookahead == '[' || lexer->lookahead == '-' || iswalnum(lexer->lookahead) ) { return true; } if (lexer->lookahead == ':') { lexer->advance(lexer, false); if (iswspace(lexer->lookahead)) return false; for (;;) { if ( lexer->lookahead == ';' || lexer->lookahead == '}' || lexer->eof(lexer) ) return false; if (lexer->lookahead == '{') { return true; } lexer->advance(lexer, false); } } } return false; } hx-0.3.0+20250717/test-grammars/css/src/tree_sitter/000077500000000000000000000000001503625671400215435ustar00rootroot00000000000000hx-0.3.0+20250717/test-grammars/css/src/tree_sitter/parser.h000066400000000000000000000123341503625671400232130ustar00rootroot00000000000000#ifndef TREE_SITTER_PARSER_H_ #define TREE_SITTER_PARSER_H_ #ifdef __cplusplus extern "C" { #endif #include #include #include #define ts_builtin_sym_error ((TSSymbol)-1) #define ts_builtin_sym_end 0 #define TREE_SITTER_SERIALIZATION_BUFFER_SIZE 1024 typedef uint16_t TSStateId; #ifndef TREE_SITTER_API_H_ typedef uint16_t TSSymbol; typedef uint16_t TSFieldId; typedef struct TSLanguage TSLanguage; #endif typedef struct { TSFieldId field_id; uint8_t child_index; bool inherited; } TSFieldMapEntry; typedef struct { uint16_t index; uint16_t length; } TSFieldMapSlice; typedef struct { bool visible; bool named; bool supertype; } TSSymbolMetadata; typedef struct TSLexer TSLexer; struct TSLexer { int32_t lookahead; TSSymbol result_symbol; void (*advance)(TSLexer *, bool); void (*mark_end)(TSLexer *); uint32_t (*get_column)(TSLexer *); bool (*is_at_included_range_start)(const TSLexer *); bool (*eof)(const TSLexer *); }; typedef enum { TSParseActionTypeShift, TSParseActionTypeReduce, TSParseActionTypeAccept, TSParseActionTypeRecover, } TSParseActionType; typedef union { struct { uint8_t type; TSStateId state; bool extra; bool repetition; } shift; struct { uint8_t type; uint8_t child_count; TSSymbol symbol; int16_t dynamic_precedence; uint16_t production_id; } reduce; uint8_t type; } TSParseAction; typedef struct { uint16_t lex_state; uint16_t external_lex_state; } TSLexMode; typedef union { TSParseAction action; struct { uint8_t count; bool reusable; } entry; } TSParseActionEntry; struct TSLanguage { uint32_t version; uint32_t symbol_count; uint32_t alias_count; uint32_t token_count; uint32_t external_token_count; uint32_t state_count; uint32_t large_state_count; uint32_t production_id_count; uint32_t field_count; uint16_t max_alias_sequence_length; const uint16_t *parse_table; const uint16_t *small_parse_table; const uint32_t *small_parse_table_map; const TSParseActionEntry *parse_actions; const char * const *symbol_names; const char * const *field_names; const TSFieldMapSlice *field_map_slices; const TSFieldMapEntry *field_map_entries; const TSSymbolMetadata *symbol_metadata; const TSSymbol *public_symbol_map; const uint16_t *alias_map; const TSSymbol *alias_sequences; const TSLexMode *lex_modes; bool (*lex_fn)(TSLexer *, TSStateId); bool (*keyword_lex_fn)(TSLexer *, TSStateId); TSSymbol keyword_capture_token; struct { const bool *states; const TSSymbol *symbol_map; void *(*create)(void); void (*destroy)(void *); bool (*scan)(void *, TSLexer *, const bool *symbol_whitelist); unsigned (*serialize)(void *, char *); void (*deserialize)(void *, const char *, unsigned); } external_scanner; }; /* * Lexer Macros */ #define START_LEXER() \ bool result = false; \ bool skip = false; \ bool eof = false; \ int32_t lookahead; \ goto start; \ next_state: \ lexer->advance(lexer, skip); \ start: \ skip = false; \ lookahead = lexer->lookahead; #define ADVANCE(state_value) \ { \ state = state_value; \ goto next_state; \ } #define SKIP(state_value) \ { \ skip = true; \ state = state_value; \ goto next_state; \ } #define ACCEPT_TOKEN(symbol_value) \ result = true; \ lexer->result_symbol = symbol_value; \ lexer->mark_end(lexer); #define END_STATE() return result; /* * Parse Table Macros */ #define SMALL_STATE(id) id - LARGE_STATE_COUNT #define STATE(id) id #define ACTIONS(id) id #define SHIFT(state_value) \ {{ \ .shift = { \ .type = TSParseActionTypeShift, \ .state = state_value \ } \ }} #define SHIFT_REPEAT(state_value) \ {{ \ .shift = { \ .type = TSParseActionTypeShift, \ .state = state_value, \ .repetition = true \ } \ }} #define SHIFT_EXTRA() \ {{ \ .shift = { \ .type = TSParseActionTypeShift, \ .extra = true \ } \ }} #define REDUCE(symbol_val, child_count_val, ...) \ {{ \ .reduce = { \ .type = TSParseActionTypeReduce, \ .symbol = symbol_val, \ .child_count = child_count_val, \ __VA_ARGS__ \ }, \ }} #define RECOVER() \ {{ \ .type = TSParseActionTypeRecover \ }} #define ACCEPT_INPUT() \ {{ \ .type = TSParseActionTypeAccept \ }} #ifdef __cplusplus } #endif #endif // TREE_SITTER_PARSER_H_ hx-0.3.0+20250717/test-grammars/edoc/000077500000000000000000000000001503625671400165455ustar00rootroot00000000000000hx-0.3.0+20250717/test-grammars/edoc/LICENSE000066400000000000000000000020561503625671400175550ustar00rootroot00000000000000MIT License Copyright (c) 2022 Michael Davis Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. hx-0.3.0+20250717/test-grammars/edoc/highlights.scm000066400000000000000000000023551503625671400214100ustar00rootroot00000000000000((section (section_marker) @markup.heading.marker (section_content) @markup.heading.1 (section_marker) @markup.heading.marker) (#eq? @markup.heading.marker "==")) ((section (section_marker) @markup.heading.marker (section_content) @markup.heading.2 (section_marker) @markup.heading.marker) (#eq? @markup.heading.marker "===")) ((section (section_marker) @markup.heading.marker (section_content) @markup.heading.3 (section_marker) @markup.heading.marker) (#eq? @markup.heading.marker "====")) (tag) @keyword (macro (tag) @function.macro) (macro_escape) @constant.character.escape (inline_quote) @markup.raw.inline (email_address) @markup.link.url (em_xhtml_tag (open_xhtml_tag) @tag (xhtml_tag_content) @markup.italic (close_xhtml_tag) @tag) (strong_xhtml_tag (open_xhtml_tag) @tag (xhtml_tag_content) @markup.bold (close_xhtml_tag) @tag) (module) @namespace (function) @function (type) @type ; could be @constant.numeric.integer but this looks similar to a capture (arity) @operator (expression [":" "/"] @operator) (expression ["(" ")"] @punctuation.delimiter) (macro ["{" "}"] @function.macro) [ (quote_marker) (language_identifier) (quote_content) ] @markup.raw.block (parameter) @variable.parameter hx-0.3.0+20250717/test-grammars/edoc/injections.scm000066400000000000000000000007621503625671400214230ustar00rootroot00000000000000((xhtml_tag) @injection.content (#set! injection.combined) (#set! injection.include-children) (#set! injection.language "html")) ((block_quote !language (quote_content) @injection.content) (#set! injection.language "erlang")) (block_quote language: (language_identifier) @injection.language (quote_content) @injection.content) ((macro (tag) @_tag (argument) @injection.content) (#eq? @_tag "@type") (#set! injection.language "erlang") (#set! injection.include-children)) hx-0.3.0+20250717/test-grammars/edoc/metadata.json000066400000000000000000000002401503625671400212140ustar00rootroot00000000000000{ "repo": "https://github.com/the-mikedavis/tree-sitter-edoc", "rev": "74774af7b45dd9cefbf9510328fc6ff2374afc50", "license": "MIT", "compressed": true }hx-0.3.0+20250717/test-grammars/edoc/src/000077500000000000000000000000001503625671400173345ustar00rootroot00000000000000hx-0.3.0+20250717/test-grammars/edoc/src/grammar.json000066400000000000000000000031231503625671400216540ustar00rootroot00000000000000(/dM-2bs10qۀ`K lEv$YoM-~T>}…^*gcfg|? .&E&:^$%x( `歶UOWKU:r<ھw~.{ԯip$'76yK髴b $Z#vMo*ix:2j]| juA* XY89Gy7wD]YEpu0 ԑ& N N7}i&44wPAd/TgI|m!HA̅`e2{&Xw/yf0t`a :w{nPeWp!%DI+:nt!iEzM jЈkR0<[Af +*$gidPq6ˊ@99;&i /m`jT)%Af9̧ lb <:K@xH쭥G'!0`('Il=MRpO p{A 83*N`|ޘ9(xND3P2hx-0.3.0+20250717/test-grammars/edoc/src/parser.c000066400000000000000000000306101503625671400207740ustar00rootroot00000000000000(/((|,B[Q!Hwn+()SDbUQE-Ny}we:JվP *f2.G`3.MA1x(sxINwshe13 aqF$D#*F1wtE7Zg\(AU֤UPZȤ9f&a+&icȨk1c,iÐo$ ݅}a0I?^,:Ʋ[p# %AldsZ\>Kp-ZJgVP8Śx!UMB)m2Q,vRUKC,J]77D0&&xS~|l" aMPI-"U"}H'piщ+(mmKBvxq#Zzs+6<#LJv۱uC s+ ۰iq3u**lbp>V:> ӑI!5Si>1~  uXYxMaf^e( ae^){Mn*]f Fwk fe&3M$/ej=8Wp|:w!U/I` l ߌl%t3{{@0.6֯' C0*K,epǨ-X% ñ_|7%vaݧy)gx\-7awMk^7ΚOeՏ 8;l6硛k8KwDtrOR tekm_22"TSSmap22Metadata .visibfalse,ndtru}}/,cA!jd" Ba 1dAPFFZݑ~ɯo~TBǢH)2KĹrjѶCr E9DϿ Hm@E2$$HtmD$4VBSsr5w"9xGPA m0,7 e83sBYG汰 AZAynWSSM)Zl#U7'| (ey湄UBb'4HGaS5€EF"2xM>{{-SDJpJ Fn` uQnO$xGV&xjp xѷ mU =] VZ~%ڣz}Ӱ42,6}v~@yW| $ b8[]ls Gյ|&.Y4ɯq+ Ǹ8pyﺪ[S>gin(=L]F,<ӚX6ӼqBSD̯c/,iL<ݺ.pPAbc [.r_GlYXoX+qdkP:ULf݂]&%crNHUǵ,#!FM"H;fG+zI9DL%Ga7?Q`M enP3 ,8l%~LY3Q͢-)h%@1 ˍ D}4xY5rw Z̮7mq^fA8@hC)M 7@1m<!`O\G{seaz`nULfU3*+yW{ͻ"ޙu3wfeݬO<ЊHTчHjELǀ hYfLF%)T 4"h $ ]|(&&2*fb@x<LĻf^g❕v3xgVw3gYټD3¾-^.ʪ5gee~OWŽZ*oj Iʄx60$%%ah`8$I%M,IH&"Ib[@T;B(<F@8"B!0BB"[)b/b_Qc+]rn?H~CaZ.]=䖗TE;۔xlHG:;hAOR6dHu<0b #w`>kS2 TLEb[ap\b=Sm AзtWAZ]g.Rk73[/(r7򄈸c#x x9F!&Sb2+Vr[y:"ErPC]M~`}"vƘ ĘR^8`3kOS| zP:]O=+^Jfʍ=_"j>qGoN0l!NCZ!vP _tJ6Z럗(R**5rJobl0UZi\,!p<_B^J e3=oU]Xz*Z }#麹tpJA2 j㒚Uo;KVP'R\cֳ!U/ 8P&7$M\ۤM.J$Kd@u~ISMc50ү9Шuh36uIZ-F+:هXc([jժN Xn!ߞV8#^+y=$ytԓ'g|BoSIiɪs дAhm4!JXѴhU~hHVO~70xeV%Vi x/jРB?j̮/N8a;T-gqhYT}ɸmN)G<΂a=?@ ]ʛkH⎦ԳC2\URK۰d[Ѐ>~X LYcaSxxX W!X*Gr(h2({tL -s=|uXc _"$ s<礚[ޠ).wтw`΀KGvf̳ rYN)N%!?9_N};RU/x@h )1t8xѓ47Me:x@z *d!bDN̉?D!nF 87]fP mW47S.PytZhT 0ua9c~Ep–q2j1"A <zshhhq$z |w'u5V4rD4DEPBd@&84ZPE0rq cFr0 (@ )IȈQ@ E5Ѐ#x8:3 p0C_ d8 !a*@_S#t¬"D' K`z$ƀSľݖ![L#Z4'\Rʮl>2FrQˑ{bE#HSrWE.FAE#p&h(p򢑻3/m.\4rwl]rXL#UUE#w9 *{$hwŋFUTzwh?v9FFvb'(7ɡ2XpN4a.hp@f&s`AnXk eZ`mBT>߾clNNcEU㻮N߶A'IYrd31+` VׄXX b%c?F"'?DnJCslaX\TuU$4!P>>wU^ Go\:/kޭf0\ B߶Y5okB$BX!T@ .lƔIHM]t* a\Z2>>)cHa]HrQsfrx%4Roda5r \bLyhďXcPo"DR&inbL!7[D#bߞIb|Ø"PEҔ&AE'-f.)uUKnǍqXn̑hD*F\jVvD:1.S3 0Hy300`@, b  D1 b(ɡ&Qs9)mi%7t'x t4N#|nܞg*: 挼,G@8d !F۫g[~r?pSUOƨgr^X:NE6$6͟9*YeܦTq$,bPT`4ctzMt) Ql@,t)Ws`.)h~oH$0.#h8 `<>`j.' JMouL@D ز9@^?m(.ğݞ[{ i^: Ps#sx`~Ɖ=n\4rJz|sF@#,Ȁ-!7@H4,D9UNch\Ss.cIJEPDH5 W#:+=)ɥPJ)jdQA3FS ;+t)L, 5EI*CV|ܲ^%#RNa7+6=ټԢBanMf'z Ϥ']f]/ aP!ÎGWaϵ ˌoQZs} (PAhy~'zޢJȓZ%&XUt3-;d+h_jA̟zXWs|'f.ltc<ɜˆW >41 get%T\o!Z +1sc2i5-Iᄅ M9>"'xyp']X946! fq EEH)C]qLoҘJir6H5G) Me* Vat@sR.䈩/fj0 X ;1-ő~v `vNj{ཆ޵TVFK;/]-txݼAXܹo4[J\K%z؇=g4ڲ}M^*)$ yP0sd *4XLAf [H 8y HcUCdd˹T3~pQq?lc ouuU$*O( daK$lkO Et,}fC[!p#5|.ŷH_`D5出<_Dza#$ M"Mx&dHi[Gu :ҖcFj5u.Y5-lB?)Tܓ KNσP c HƟ)?O3~sϓ)k1 ci[;ly`@M?(L_9JJza(:$r5ݲ]^q#X-gMHEP+~e9MlsY"*Y|s<X| I~cˁ! X%6W6KCaO+) ~+~JVlt:5WΠ%E-~Ѐ?R( 8 Owq F5*\Abg`TPA)c(u+&MP>~BzN>nAxm/ϨD͈S06,ݝ_XreϽ֍ eDLI:ɟ7UQObuT#X}1ǾJg7y5!)6tHzsN9?R5ɲu2ô~ui="":KJzTRnڭlh7dZX/L> C~O~;:RC+Ž METqcvF79n5#⬩1q-:pqf{j݉ڡB_ncoѱT2/:yK@ A稣?+n,T(g5 ۦwӯe>͟ODu}g*n|AYDFGp|' :M eSk,aݎ6.oڌP&a7[Ƈ,+SgY} P׬y~YXj;V!ƿIEP^8H35/Q<`60/JLl"ǏJS/ͥ8hUB&mY?wWUu= l\ ESB+2~ZXByM ?{XMA=8ۺm i^͌p(CXS)sbnFLLJ( Aoa\|*4h/,x n'Cý`IA*@Ֆ%}>O1 Ƞ.Mi`LT {'vRثA瑒1SR93AB) ɔ R*Ttv.Em#T#R|mH2u+j#T趤՗Ap"Ԉfx2H$T0yTx;3k6'6FWrpB%;7e CF?#Vu).00࠘(qf3psi6KqO.zAB{SZƔaA25.<fB1AF~M8(* s!cɂwXlHC"ҡ"F\ȑCFCThNo3Y$7{:zƿw:c:]ۯP5OF|## ح[G^h|$q=+Oj;Q~ p>%z|'WYuE/"h/̒M{ bt!Qэ TL" `0 B0 b2F)$;3?܁7N0nKg|eBPQ,=ki.M l9Rۇ+?l=sEC ЛKhcDZ1}1K Jvξ`}3Z\b*d\YIO9Mq:a6wP^B}/x;zI;C6 ؾMj Qm<ŕ+q'Y Wj~ v }oK#lZ=0Y^|shDzD2, c׳ɘ_+% lm3lcr I^?!D5:yA$s@C8b{FZə?@RymPB~^?$ޣz6Al^s{'NVdYn@vx1=ՎYN(/I.ݎKEhv҂LOc!UŻnQBԛ=\ϰ=xӫb؃FX~/̀-(N?I9y:_Lc{y\N/<" 7^f%4 SW/? t3'00`L L:SϓƬVEKKwh~ʂ:#(˛i$(V9xwhW,j8?1EVE  PZ3nhҪ4EtbM+,G}3pN"YFE2hz ِ7/ecsa)xnװu:n,D3C$!P';n%KX_[C7 / F"/"ݕMmL@ɿ饉kL"windq՝8~+ 7\7?Ls\WQ3PZIR3 -f;amS̲G}ϟi 0c;m9\D''-**X,)mxkRNLPŧ"0@aB#~ךqY}ƭ v(KU1E*R%eKDX)cKN\Qe"ڟB'(:c BQNQ -ApQ?D13(Uŧ-nSpzSbVv0zRƽUE0\1!K42A@p``I.(*#i"ޟ>AX+V,f[I|l1cotvVQ3-}bh[E؉(E nD0a~H*d, 1C@@C  4@"I:KT ! /J@ CNEM;;щ!X#-Iw"mĬcHL& PcCYFסcMg,xg] ,@␘@0!FƇG`xd|谁Q񰠁ν{w]gtFfv2K']ƞqhbbbwo̯eV~FOC+Vg$ϙw̼ބn͊t^1DV8NQ\ɍcYʨ"r))2TfSh"(AAc!CA!v` 0C0D\!=ɢ㦢)燵ʍּG|<# WvsY_Wg7p +IT.N4]}܈&HRyL.$VrLAb搨[f, U]7t$J*e~-$e0 QҶDdMp 5[7Ԗ۲ojk`˜ _[sI"p򃦝aa/ʲMN%}0S vuʻp/EYM2Q̛@F!8@b+./%AOLžE|93x ԢkNx]zBڐEŎ-o`wE{I4}#F䛼j$[0 G]͉hp^rK"~kbqoh]]UW1S"AfE32'n4U [v+uF k/3 ͋mS>4 I<ۃS2ᨐ C\}l3ّFha.)W`msH|L,<پꄥ8w(('& 0NxZdNHsa'm3vؖ:m3J8"&#M QdI}1Z3O4mMYEG 8"h!r{%ǽ^([nݙ:Up]91v'.f@zN xw.ez\)sܹޑ[,Ԭ>R![E0!eJ\ ᆄaÂHL(6s4u'=ශjzBI8t|\=N;q&qXba#T sƎYQVM;M7ytj*V"5j\=~}-Ǭ=C-|OW^y L?~d3kLt%Vhx-0.3.0+20250717/test-grammars/edoc/src/tree_sitter/000077500000000000000000000000001503625671400216655ustar00rootroot00000000000000hx-0.3.0+20250717/test-grammars/edoc/src/tree_sitter/parser.h000066400000000000000000000124021503625671400233310ustar00rootroot00000000000000#ifndef TREE_SITTER_PARSER_H_ #define TREE_SITTER_PARSER_H_ #ifdef __cplusplus extern "C" { #endif #include #include #include #define ts_builtin_sym_error ((TSSymbol)-1) #define ts_builtin_sym_end 0 #define TREE_SITTER_SERIALIZATION_BUFFER_SIZE 1024 typedef uint16_t TSStateId; #ifndef TREE_SITTER_API_H_ typedef uint16_t TSSymbol; typedef uint16_t TSFieldId; typedef struct TSLanguage TSLanguage; #endif typedef struct { TSFieldId field_id; uint8_t child_index; bool inherited; } TSFieldMapEntry; typedef struct { uint16_t index; uint16_t length; } TSFieldMapSlice; typedef struct { bool visible; bool named; bool supertype; } TSSymbolMetadata; typedef struct TSLexer TSLexer; struct TSLexer { int32_t lookahead; TSSymbol result_symbol; void (*advance)(TSLexer *, bool); void (*mark_end)(TSLexer *); uint32_t (*get_column)(TSLexer *); bool (*is_at_included_range_start)(const TSLexer *); bool (*eof)(const TSLexer *); }; typedef enum { TSParseActionTypeShift, TSParseActionTypeReduce, TSParseActionTypeAccept, TSParseActionTypeRecover, } TSParseActionType; typedef union { struct { uint8_t type; TSStateId state; bool extra; bool repetition; } shift; struct { uint8_t type; uint8_t child_count; TSSymbol symbol; int16_t dynamic_precedence; uint16_t production_id; } reduce; uint8_t type; } TSParseAction; typedef struct { uint16_t lex_state; uint16_t external_lex_state; } TSLexMode; typedef union { TSParseAction action; struct { uint8_t count; bool reusable; } entry; } TSParseActionEntry; struct TSLanguage { uint32_t version; uint32_t symbol_count; uint32_t alias_count; uint32_t token_count; uint32_t external_token_count; uint32_t state_count; uint32_t large_state_count; uint32_t production_id_count; uint32_t field_count; uint16_t max_alias_sequence_length; const uint16_t *parse_table; const uint16_t *small_parse_table; const uint32_t *small_parse_table_map; const TSParseActionEntry *parse_actions; const char * const *symbol_names; const char * const *field_names; const TSFieldMapSlice *field_map_slices; const TSFieldMapEntry *field_map_entries; const TSSymbolMetadata *symbol_metadata; const TSSymbol *public_symbol_map; const uint16_t *alias_map; const TSSymbol *alias_sequences; const TSLexMode *lex_modes; bool (*lex_fn)(TSLexer *, TSStateId); bool (*keyword_lex_fn)(TSLexer *, TSStateId); TSSymbol keyword_capture_token; struct { const bool *states; const TSSymbol *symbol_map; void *(*create)(void); void (*destroy)(void *); bool (*scan)(void *, TSLexer *, const bool *symbol_whitelist); unsigned (*serialize)(void *, char *); void (*deserialize)(void *, const char *, unsigned); } external_scanner; const TSStateId *primary_state_ids; }; /* * Lexer Macros */ #define START_LEXER() \ bool result = false; \ bool skip = false; \ bool eof = false; \ int32_t lookahead; \ goto start; \ next_state: \ lexer->advance(lexer, skip); \ start: \ skip = false; \ lookahead = lexer->lookahead; #define ADVANCE(state_value) \ { \ state = state_value; \ goto next_state; \ } #define SKIP(state_value) \ { \ skip = true; \ state = state_value; \ goto next_state; \ } #define ACCEPT_TOKEN(symbol_value) \ result = true; \ lexer->result_symbol = symbol_value; \ lexer->mark_end(lexer); #define END_STATE() return result; /* * Parse Table Macros */ #define SMALL_STATE(id) id - LARGE_STATE_COUNT #define STATE(id) id #define ACTIONS(id) id #define SHIFT(state_value) \ {{ \ .shift = { \ .type = TSParseActionTypeShift, \ .state = state_value \ } \ }} #define SHIFT_REPEAT(state_value) \ {{ \ .shift = { \ .type = TSParseActionTypeShift, \ .state = state_value, \ .repetition = true \ } \ }} #define SHIFT_EXTRA() \ {{ \ .shift = { \ .type = TSParseActionTypeShift, \ .extra = true \ } \ }} #define REDUCE(symbol_val, child_count_val, ...) \ {{ \ .reduce = { \ .type = TSParseActionTypeReduce, \ .symbol = symbol_val, \ .child_count = child_count_val, \ __VA_ARGS__ \ }, \ }} #define RECOVER() \ {{ \ .type = TSParseActionTypeRecover \ }} #define ACCEPT_INPUT() \ {{ \ .type = TSParseActionTypeAccept \ }} #ifdef __cplusplus } #endif #endif // TREE_SITTER_PARSER_H_ hx-0.3.0+20250717/test-grammars/erlang/000077500000000000000000000000001503625671400171035ustar00rootroot00000000000000hx-0.3.0+20250717/test-grammars/erlang/LICENSE000066400000000000000000000236751503625671400201250ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS hx-0.3.0+20250717/test-grammars/erlang/highlights.scm000066400000000000000000000073371503625671400217530ustar00rootroot00000000000000; Comments (tripledot) @comment.discard [(comment) (line_comment) (shebang)] @comment ; Basic types (variable) @variable (atom) @string.special.symbol ((atom) @constant.builtin.boolean (#match? @constant.builtin.boolean "^(true|false)$")) [(string) (sigil)] @string (character) @constant.character (escape_sequence) @constant.character.escape (integer) @constant.numeric.integer (float) @constant.numeric.float ; Punctuation ["," "." "-" ";"] @punctuation.delimiter ["(" ")" "#" "{" "}" "[" "]" "<<" ">>"] @punctuation.bracket ; Operators (binary_operator operator: _ @operator) (unary_operator operator: _ @operator) ["/" ":" "->"] @operator (binary_operator left: (atom) @function operator: "/" right: (integer) @constant.numeric.integer) ((binary_operator operator: _ @keyword.operator) (#match? @keyword.operator "^\\w+$")) ((unary_operator operator: _ @keyword.operator) (#match? @keyword.operator "^\\w+$")) ; Keywords (attribute name: (atom) @keyword) ["case" "fun" "if" "of" "when" "end" "receive" "try" "catch" "after" "begin" "maybe"] @keyword ; Attributes ; module declaration (attribute name: (atom) @keyword (arguments (atom) @namespace) (#any-of? @keyword "module" "behaviour" "behavior")) (attribute name: (atom) @keyword (arguments . (atom) @namespace) (#eq? @keyword "import")) (attribute name: (atom) @keyword (arguments . [(atom) @type (macro)] [ (tuple (atom)? @variable.other.member) (tuple (binary_operator left: (atom) @variable.other.member operator: ["=" "::"])) (tuple (binary_operator left: (binary_operator left: (atom) @variable.other.member operator: "=") operator: "::")) ]) (#eq? @keyword "record")) (attribute name: (atom) @keyword (arguments . [ (atom) @constant (variable) @constant (call function: [(variable) (atom)] @keyword.directive) ]) (#eq? @keyword "define")) (attribute name: (atom) @keyword (arguments (_) @keyword.directive) (#any-of? @keyword "ifndef" "ifdef")) (attribute name: (atom) @keyword module: (atom) @namespace (#any-of? @keyword "spec" "callback")) (attribute name: (atom) @keyword (arguments [ (string) (sigil) ] @comment.block.documentation) (#any-of? @keyword "doc" "moduledoc")) ; Functions (function_clause name: (atom) @function) (call module: (atom) @namespace) (call function: (atom) @function) (stab_clause name: (atom) @function) (function_capture module: (atom) @namespace) (function_capture function: (atom) @function) ; Ignored variables ((variable) @comment.discard (#match? @comment.discard "^_")) ; Macros (macro "?"+ @constant name: (_) @constant !arguments) (macro "?"+ @keyword.directive name: (_) @keyword.directive) ; Parameters ; specs ((attribute name: (atom) @keyword (stab_clause pattern: (arguments (variable)? @variable.parameter) body: (variable)? @variable.parameter)) (#match? @keyword "(spec|callback)")) ; functions (function_clause pattern: (arguments (variable) @variable.parameter)) ; anonymous functions (stab_clause pattern: (arguments (variable) @variable.parameter)) ; parametric types ((attribute name: (atom) @keyword (arguments (binary_operator left: (call (arguments (variable) @variable.parameter)) operator: "::"))) (#match? @keyword "(type|opaque)")) ; macros ((attribute name: (atom) @keyword (arguments (call (arguments (variable) @variable.parameter)))) (#eq? @keyword "define")) ; Records (record_content (binary_operator left: (atom) @variable.other.member operator: "=")) (record field: (atom) @variable.other.member) (record name: (atom) @type) hx-0.3.0+20250717/test-grammars/erlang/injections.scm000066400000000000000000000010201503625671400217450ustar00rootroot00000000000000((line_comment (comment_content) @injection.content) (#set! injection.language "edoc") (#set! injection.include-children) (#set! injection.combined)) ((comment (comment_content) @injection.content) (#set! injection.language "comment")) ; EEP-59 doc attributes use markdown by default. (attribute name: (atom) @_attribute (arguments [ (string (quoted_content) @injection.content) (sigil (quoted_content) @injection.content) ]) (#set! injection.language "markdown") (#any-of? @_attribute "doc" "moduledoc")) hx-0.3.0+20250717/test-grammars/erlang/locals.scm000066400000000000000000000014611503625671400210660ustar00rootroot00000000000000; Specs and Callbacks (attribute (stab_clause pattern: (arguments (variable)? @local.definition.variable.parameter) ; If a spec uses a variable as the return type (and later a `when` clause to type it): body: (variable)? @local.definition.variable.parameter)) @local.scope ; parametric `-type`s ((attribute name: (atom) @_type (arguments (binary_operator left: (call (arguments (variable) @local.definition.variable.parameter)) operator: "::") @local.scope)) (#match? @_type "(type|opaque)")) ; `fun`s (anonymous_function (stab_clause pattern: (arguments (variable) @local.definition.variable.parameter))) @local.scope ; Ordinary functions (function_clause pattern: (arguments (variable) @local.definition.variable.parameter)) @local.scope (variable) @local.reference hx-0.3.0+20250717/test-grammars/erlang/metadata.json000066400000000000000000000002511503625671400215540ustar00rootroot00000000000000{ "repo": "https://github.com/the-mikedavis/tree-sitter-erlang", "rev": "33a3e4f1fa77a3e1a2736813f4b27c358f6c0b63", "license": "Apache-2.0", "compressed": true }hx-0.3.0+20250717/test-grammars/erlang/src/000077500000000000000000000000001503625671400176725ustar00rootroot00000000000000hx-0.3.0+20250717/test-grammars/erlang/src/grammar.json000066400000000000000000000106351503625671400222200ustar00rootroot00000000000000(/m5P8< iV~J. $g<*{qwhUUUpJSfl",6D6.魝$6bY9G5.JÎɢ+N\tP?~)݅ac{1?ǹJƁ2+0J`,2.<6SJ~ؑ[_u[b0HWĈK gݷR^E1aȄ&jtHr4e_q^$SЊ|_w.np4s_]KS?k޿bŽJ5]Uy#_QnZWܖtsb>Mj%UH-ٿ!h#E*u:^jruJ&tS!P 3HNc8u۩d..LݽH N?yoЂتH-n?-{]G: 眊RzjJe"(DWǣ_[P ɼ/=1F ,p7KgOk |2+բj#Ї=x/XN^L%h jzy4' slH8İL&¦FM)D"qt9Qm2L&J4FJɽ^/{}8oSO?Ikqz30ԋ35 "dR&5DV3qqk҂,-JJK"0x8-S5I3A  A]$Sp00  @F@  0t &8g3otی" ;lH`KWaݷy56𵒃 ͋!|]:`ۯvʴ QY/cы;F G tV;E(`%^p,}jǪfXJ1Q%xJ!KVhAG<NsIjE?{ZI}^غ:&t_!uՔU[zL8lHS2,J(7'*(Y;=jVңFKSw0K)8Ud%T]zϵ\HXKk޹d[\kl/e hq D hdW.WWG t\]K3¨xik[I0/]+k-KO5l0/-)3[( KcseM /YJPP K?(sw:]5ZyH9ş=H`c"]i]yɢIbc((R롲2y]HeY@{WOI,x/mHT`F\!Ot$_"9E"UNūEd0|絮Hh90xׄoY%x4 `Ҙ=/0jY8٩ަ؅is(5_<"C#RlSCnlMs(f[F(l *-3{j5 v40:Eħ2M7fW:ޏ py$v-,bG1' G!qtڅlo*ƙA^9muYYL8Hy 'qN]#=FA>F!T5Ddع8=Aa ңi!`vd/[ Mw-(#G7^I8"EY4[9}';?@C )֥L;OMq,{ǨZ:Ј scJ$ҭ5SƔ:4*.b=(Z3\:–A 'dP>3YD֘#9y3#k4l19&"]]cNY@KGp&va WZχ Ϋbacߡ,mtOX5g'?)Ĭ}U#zaJs0"Kן-n7UñN,77DmE vr!}vNt2N}.Z<Cz)rvq%`RکYoΙɁf|g$cX */NRpc֧Fv{hiЁaJAO\15sZyK"Hq8p Gƒ5GWF-;gGzw`q+R},z GjoHr{`U܁"ȬɤbGɤJk` S?Yz-B-䢐MshVS1`S_TYtC*3炶_aq /Pe(.#9,*SP01,}mc0*R1)ٽ[tX4q"ȕm 2_i;mbfg>+i_t 勲۴MҶPx01׀5I2zFzd{=1Yo/qmBuM# w1$i!^'j׵ހɷ./-a0^%#TGYu{X)y2!mA0(|'vD V|WuOYfEmW9-TyiYÁBd|% x=v-pnmfRh\z][B摸qc!6 AX"Nߺnl Rڝ؝)fYQR8"gqKǘRap9]j)GrQ)q)_xLucorU0fAaZ4x "DGt>$[aho{Tnr)qOzc 0ʽ09|{P9y]S,8ڨfL ok53n`54ۢ펜;[ך{[_ Xܫ7M7m+ !||-4TDVW2q=<`w_dd٨h&z VR;n-҉s tqÁ5/94ui'jd \ٷl"pwo\}a+HvQ)Gp=^ck/y)?|qar4·~vS[1I'NyAot=0ui6M|> C;ˠ4Kcq!F_TŹmW<|Zܧsy%p]m6uED4:;j?*gGgsnޢX*Ԝi,"k=n9q?F-TY쩓|=ĵbrϿ&Q"g|"ޯZs^TPI~%Oc<4Ϣ4 Dȡc4[~.?w4<6s!T8;S3>DZT3muX֪֚hH_ڸm{bou-NvΠ"NW}CiWR;I fX*@Y058~JM|=T&x$$TQT8;-oLL%r .95@ $9 ;H@-č\&׹3{_oM`aY_ıriN;D" 2kXP WK< }ǚAksֆU[$ 4Nz,[1\NsQ)15`YrڪhOLM%?FȢ@9IqgMHϢUYXJd J.ДL* Ang S Ap$ƎpmQɝ"9S%nDr)ɂ:Q`?zxsq:Q{sy9ׅ,3I2U>co^]+]j"5\ s(;ưTDԌ|Of: w G]68{pйԙLE83N)eU:jIFH{k1*js߾ܴgV*xD}]%Ƴ~gM{<~li|{"m~M οqWQG]Zx2;1Ve^=ps+۷j\{9sy٨!QD*a!*rxT2B+""8rP UoGXoTԴOdoFBI"9e(;8QBk@Q!M + n(%ZꥈDAAh&ΩL#ސ)DG>oã׶ sA:;8\&c3u3BYL=:Nm&>]d\JS3u#*9Y)aReه$nKUv)tgN!Cvxax,U ޳˅-y6|/59{ яyxR"^ }5'.})ڔvPb77Ux}u\9Dg7Rrr~JF M-0{G]P.S^W!] zGC[?sh뜟?um=nS4mbb$ʩ*%MKT ' `ײ(j$GRB4Xa גdC pK7 B=74sPw&⚬vjVE‚u FnoXp-tж[ Ⱥm7m"~g/K;adYbٹР*}]=\iVPQ#گ-KڒkGZ"l~! }.=m[xL3듓`B+c֐݇c- YVH=`wg\IgXմ0dǚxu!:K7̎*)ص;q iz uZ&l et09tJpaٺ,e.jբ|lX篥,{mfSra})ux~les7_pQ[9͂:ι eXkZ1_?h. fr')ޝ19WWߒ":ӲwPKpQ83[#WA0 R俊X#Ic1J/ YHA-%YIJH)W#zi"\J)FuIuXC>O\a*pt>ʦԎIДipcwS$i'3O0WW) ahD;Q~#`^\ɜt>*R<(`ŽY+ڛkŽY+We!H!0O>g`V \b㬃L oˇrJ@B $Yq_A~:o@ȌWicw,MMQYqPB,ؓW0XlMLC~ON{=HZ|&RB9p^iӯ_HI)=}У֣\V<[.@2O $ *f=yf< ԌzA-Pz$yxJAA+&@E; {,:;".W; FW&̷@lpCvLٹؼdw@(_ړN:iU<"I ,bY(>td&jЊѶTNւv`@itTBfv|J(6A/K5S8QJ '@ Gsm1MH6ۀ `e\$n[#rnhMet@mĚiގ[Iϼ*Ϝ8sUy<13/Q.s#KW:Q!3_c\c I9Vwˆc؝i/.ngCǥO/NRvr-s?-$䘥ne Dս+?Bd5,+!/S)T^*SIUD%((5#PXb"'ũa'*5ʼnTH4L0KJTͨ$DG ϑ⦑HQHTNde"9aX WfQȣculV Hv@` Jt ǀQ;桳G(t` ͱqqg &\X|1ά?%iiiki|^,_C/[u:< .] m6|V_kIB_j Ma"V "B{!-V"2 x20Ժ#h^>_ Je/j1B Ht 2.n4ghŽ2ҠYSkxC(MZg nd`4{*D R旈vZ>}jXeCػcΪU|i,`nezrmTlMiz?ƇEfFYtj/L&ǽ"j6U7L~7n0on%mNeYoL;&jljguS&4nDRD. q8 w B$aIFy20@!B!("""Fq4A/պ>*KM{ra{щdem!w∸Eq>%L]M$c G: :tb֐S^{j= UjpoaF.5_#JϞ:T=Sg`G~3]aF|[.̭f+$,{9KHwKeƗy# Nc Vw#zRVwCYN8IAlQu$5SPM耙ݝ<H`1q~63B/M;Avc7=%8;7P>}i9^mb+v[lBE?Eg3cf쐦8el:"MU-o;OU^C.Ca`y=U 8r #f*?qWqFf.ƏtH0s=YE2$x"z7.QM]XVA XdiɈ.0R W!wɔ/aҺ[`8ˆӈīq4RN8 \K|őMʼn0.qJya* A^cJ%آN8 <3xa`-nBװ!fqis 74]\]Y`BO @3}UjȄ'_|V[#Ù䚇^r$\pq4 V1BcdՇj. ۽&I >h q.QEP 8 N<˂(s2J2L04㵡KC 'X"!>pCYg'uf9cHA¦ d}NTC.AU v=*!P6DME,` ZGI|~#eH'jquM+fA}LtpNπLV>\m0M|{_,PzkfxO>5OG{#sFbpwҝ:!8<|gyE=u~CL\279CaT)v` wn&)^][cBFI+t>t8L 8|<y-ϡ$Cg~&Q pH 6-I" B LD"h0&8Ĝ8 fyq:nd1*jD!t)D$dh0& I@3&DvKnDoPkPєu7\f;N4 } _~m,JС;um,e$I}-e#)(Q`hdb`]ۮ=0xmۖ$IRH(t6*Y.(rW"t&MV x8Dn;]0s~` ݺeWUI"9r풁1E<B*AN"(.5'B>@;M{-W\@ )i! \A6"P?%i.- EŌRN[.M[ YOzg ]0UGޛ,P`QV%VMUxX]j8gHB'gJJ Fm0`ζ|/EcWS" a!HEاuv)Sݺ+-9͢D^˒ ^)HR"ڝ_Ț^}NI9y+{G,;m~A]{ ieP":<[B,b@wK1;P?Eo `Xt IB@hxdfuBPy.F13ֿuLJ7*rz2bV?_NMa ~KFqI8 <1A/'5q^ 1CI/<*񭓗+O(u&Cz#ʟJt=y\LQ{n eLj[rNtwq|*H<-I7Cg$9`2ܦ-+JFaFV"%G"[r@">sH o&Dm%7I$ $(t[rD"nMf~%7I$$-7I$b|% x-7M3ÎsF3w\*<{-hp!F\LH8s 6*J(DEpꓣ!,UgCr  _/9ġtQ ciueN%%?? aH b8+a\~nX *(9@O8ґ$8#m_20oQ;GvP.k}k.n zƨ[{rXDCP*T#h3M R?]kv".2U?d %FS^V {bPuhhÎc ui`m\;-[LupVbHg9&GJp XiXЀ#|d"A VB3Ҟ5#\=ji?o>AP"{$ۘSaf~ok)1IH]#Fo`E n|h2.ܤ?16Y{ V቎40fStorܻ22MǸI[pMzTaMݴ#p-Ah#,V/{A/`(Y&s_qvv#U =#?tjkig(R'2g(m]4m&7WE {+5Z|~YJg0E2.h*0f,# ~qGm&yHfe&ش7)W5\'MBGT̏l{4ABYvrLOxR_i8e;Y[b=Xq2nMJi6nх>k6UoAS'K$6f Ԥ?^}E!/;F M3fln0h>?ϡw7v:r e_摄ߋJi= @;G(*1LR $"3 txz??е=@גCjmUۥ_!돳M q( gz-tB~#Co/#fN\;!ى>8^yF^P"q|NdK{ɡEJ$޻>{*S÷B.O tʄ{;}&0: t`#I< Gk5ܲ-c!-A)MKMv]91^9*0RQ.G!o0a!E,e?I ~;~{JAc^ę~_BI4AbGdv7]eO|.ti3UFtMDdx쁕yl7[K[};Ma9ucZ+h (MH|-JCxyK<俈71AfpiN@Rl'i?w: pE@VhV ŞRP $O@JL%d`싘Cq$3,q/D8K xRa/{?:..4#.* y2&'>I]RU7>rHq7m$gE'K{؊PI3zHQ䬞据\>"z7^IWR4;vƝ1/¢X#̢y~;g %.QWxZ iP\jpR1P1Xa*^u'j3ʘWANK6䝋tڧ# qF0cxK|G$+vk_t8i7,` SnPv̷ A%rPO 擞 |nTӐ'\q`vOX#```^ ġK1dAӲw-$ʹIU<\9'Y= Sp8_u'ǡ󁇥f"xf$` ܎woHжix>y j.X\ ᆏw%P_*[Ȗ`aC݄/0o#@w]]aAWT⑗+g%z k"-pR5IsisSs8Z8Ƴ=WLUaqے! /B* G?`oJ;ޙO'BGI-)mh(.KJPDl ‹>hr%؝)"ݡ+ Ԓ!grÈH nNX5ʩVjQ_CG@GNn c2ʃb\D[@~hAϼN|aT{lѺxM~SDf6Ǔ97)&c'6QmCO; 2gK~ ~:"Thr'%<>`ljZ:oBnhPL2021G<󻵸IvKnH0ݒ$`3]nEF[VII2"v+y~[LnRHH$)|ff:3}ۨN" 0f[r>gJY@d$0L/%e y亊H2'aD#G&v;$ GR϶ڼ>}eIy5RbX)h9I?c/ON-Gcx(]@D%mMV!@ayL >dg=IY%a&<p]`M\a1 Sәz S O^'RMbPHE/i 4=F )'?* (^]ʫ mzM&vrl#_$#3 q))r:÷y$F@GJbg"q٨!9]7rD|DCpͧ!U"{MI2kpHج")EMue^[p"u'0?>Z*)E|P$7¬NfNxtL&dd"ܤ SEEt]1Qh҂4d7]AfP%Jjdg̫x}jmz{zz̶뎈oo ~І>}Kq q3#hq8$DTj$F&i`kI)1/C0Ia`P2gCV4n|]^GG|OʨueEEfN8ģSQAaEIYޠ 3h5̽pV-j,)&J1ɍdfZEdA0_*:t&G*_j*DS:**n7vVz~|[>oYqy9WM\lFjOm7Cڨx$)A"! 25RaEh !#2ΐBW[GJD7CYHC o)*-s^+ dx@<3Urk Gw4?A1cR239+4}d t:FCñ)_T@66\c[r5Zyg_ -P ߇2Ni*dG#%鳠)$İ]puxT.*b|hgH߾ڝ}(٦dբoţb!ڑ$"I߿k%"N\AzO5z/tSjo5HYxEc=aA!MOh1AA (Fv.>F1VwC#edvKFTJ)-aJ1ʹƯ~~<::od<^22(B3D&p~* 6fleHͳaV&l0WTSb;6j1œ=+#_]3{rgVcNê讌VMhԑQ-zGĬhnɱAZ6m,#28N` 81_,8»ТYQ %:Z4C3s {/2Q7N/_Fs\5]ܛ|(xOkzVϐeWU./ny³[JԐ]}뙭c N͓X"TûzAK5rDs-q#WH%4ZUDGwcsBeQ礫#@A !a@  Ee:oŠf1Y`|뷗¼b~= dٽV6\yF9Q]_We}j'ILB@j>[Ъl=HQ(+ۏAQXΑd UfJ-Q5K~.?cҪB~8)G4nC眑:,AV:C:D2.xI=q[3%LZU6a#)Q뙂 lQ8Ư:XcSujd+v`Zh\sp謞HJbF[28eu'fx@]0§. D')1(ڿA<~3aUa[};obp§A}bLIa$9j/JXa Ƒ8Tig#GL\KVL!iEβNDbmoUt%Ԥ&mG8&VlOR Y{Ln< ɭx)=CE!5ʌ $ p,8 8EKNȻK<+BrfYRH&gBaT9V8B-.fc}3:08`xE*16%}AaN!cGŘx<˨°W 3D?c:MFeх'C>1 86㚨ս g ^>/ph Ѕ IFqUBL, 8[aBN0M*3&qҵ 2Jfaf`~G%ˆ>'315cY& юQ `p|"0ؚW;b1bL) a,y!5o/4~|I1LP{̂MKXc2x8+|Hu#Z`Pe*%}y"~Tg`sd7GITFu^VkG2F 8y]sIxY6Ob7PKoW;mMD4n7;T3*VI h#O;"ݵ1aͭ V0܀PGq8'9yb @NicA˂)D,aϐn7#6w,eAsZx5…ߧ8}vp`V$y-;+>P~ 穁(W/2>F-`OW|7 ֤l 0@v"YZC$1bD͎DtWo,jg`e^񶰂%tX+Þ܎8qT_vJJzL<0&ND*5^N8zt-wx\:./!@>N<3 @,15 ah%Dq_c>[Q_P8I2)y'uބRbEН ݔc YA8{m/b!$5KqXxp#OZ&e'6k6kڧm~QsF.RCYג+b(}WFVdpKи91@ܿyT%y .) ~Wy[f3Ǎyƍ6n\7{X6PK —,lbN~ǨXnT8~3>%G QЪWlv0k/\?NF<{x]-PL~s2%<C?o [{j|(N1mO/;26#~v\pܖAlbHRciqwӲZL *Ai_q~0\#T6 !;YM\fƎQxéB^%ڽcٝ69Q+I2 i#ڐS}Andۣ} y$4k}/o+ID?[|LCiR-|"1bX8 sqi g!bU_;Ono1Q`.I;[cTHrÁ8wU<$6ǷTVat>o'u "k,exs&GgM*0o5F #`t%. ;JbE{L_&(MK {gJ%)w?\Wgg+ \Kۇ\YdbN˝ٍnkJ=`h7׽n{8YسEmOI1."6t FRC"#$/ɆLF5zS)(B(.C.!ܱufԡS#Ed2WĘ{:6&4|Z_t6Ոv`ɲ6@q}dDó"f@_Ix`fUtgOIneǚFn. ɍP#kaC;AGbv+sN ~^3t)SzNOC\c1nx%wN?FB.XO;1H>)q&roOdž.qs"[WoW\1<iDUKD/?DR#cm' ~O4 񏇕Ym헬bl}VJtLD0PNڞ iUP|]ٟL|`.SSؾ`@| wLad긜1r7 :rG9GzG9FnE9FxG `#wT('; >r6x(F9AG('G4f;*O;X$ i9 FnF|rG⎀ @+itPA3B"$)@f٧vHrG9G('; :rG9G('8rG96  $0?Yf 5QbM z6AO& _Bu[=," [Du)IN#C0)#C 1!ǘDrL9G0_UUUUUUUUUUUUUUUUUUUUUU U70aH?fC*lp(fX3dU2z]p,:C` wn6)srrG H+FB r7 /]N#iĶ[NS ]N#I4\nrq9I w9 H]NIXn('-F~F4лFxFvHVN#nUFbwD4"w9MD4qi$ [i{#Irr r4"! wi .]Z]ri@?rir.q[Hr&q$rˉ2""wi$.r ]NC/7m$`w9@rˁ&BB@r F.<Ai$w9D0Kr'w9T@SvF~Hr ]NFvHr F.p9`w]۶H@.m]N# rDDR%$"ri$irHU!M!"!B A$ ="PM˩?/ļ╍댜}ij >4Z Afg+U |;-SZxN2)#Emfq!`cn7 BpcYw`!f.l-D^,ٷ,3םbK{+bK-}^Hj~@oĘ᫱S]g 4-%Wuۖ )GXu_Ȁ0.xw[0ҏ,WdݱtKj t`̧̕ aԶ;z{#ߤ8 /a=t 0㇂ySK`} cpױ>Nҩ`B旊[NkV` ߳;E-^mieyd—#a1W8=:@ hÎ։"d`r$(*!Q664o$"\"Eݩ0c`<3U[nD'3J=HUEz1.GHA8} &Rְf+]:-sN'ƘI|=r-R@ooIcg+@5gϽbmnK`M WÏ)׸֤n`&]?ۅiUtbAEvWdN 5~nz!4i܂5S'O)1~5"Hк1 ~cّF*`EX0'[SB89gd`{z xy \1J||$۶m;#ujU+B $q*jFhF&0 e"N*ukbB" QD3/g#OuSq,lsл<i.&IFtH4 w9M9$Ơ# 6)I3@>әD&T哋!H@sggfɳ۲T-B,92Oi3a[bHw9m2LF$3ap8 x $" )"ʐP (!1/93XLEۥ`HGj(A6q@$Ip60bĉF$Kah"qOE4| {o,np 0!C/p`jnrHr.t#32RLD2 XbBg#̈́p9]þ[PWuض$)#cq( 34m6i4MHBgb\u5"H(B-B0B^wQZNGOBgA:_29R=)sy'߀; x;|Iŋ+X%elQ7IHߦʢnV $l[R*ᓁ;>KP</T85Llg|;,<,Owb(7U;9J! wrZR[/=hdTW*]yH1/+xU%D'4㖔Î\ H WFa`L:.L~p[TRuz:0yq[|AL~~"NŠԵ:W 7GAb,#Pu+쑸uJͰ $DVQXj._A \cw&utU3"uut ZA]@bTVQRhx *?7+܀ xཬOXQ_iɘxaYy#6_=xȅe#^Q HeTB0qN]TqUi Ez!7 tnpH23nS wG܌m]Ei~oI[^$|!$Kstpgk`.SJIJU?3kde#m:#wI#]} K[˒"N☴EDx83Sba"oѤLޘTuM,v5pT-{٦G$I$ɱ=UeYVn'mSѴEsdLBgff۶m۶m۶m۶mcs"3!J)C=" !!` PPQ3"#"3M: ?a4Q2~|r}ԣ1(}!)^'+@u[.]=ʴOj8_5"վh]o4쀸7 4zw/3,<v93BY8#+=2a'dvܫ;#Zz& 75?/0Ű깯$1;|%lӂ }_{<+cJ'EUxy_y%K},xk6mͶDi~+m0ylP}3[*YƸӖ( Ƌ4di{|}Z惐SHe8oߕH,}$@3Gjj@Yy "ę|';n8ːL, U,UҜA WJJ7y6HU xQB?@YQ,!4~qHK,DR@U- aմQ۾hXZFtO7^1o'j=}%à}(:B ^)4Q"zz"M+4Xdi#A;׷aT7ӹ s8W- n4+m>N&Ts4XvcDBmaK^.ػ|r.o*] K@M ˞YJ ĀG_|`jdrǁi*pJ qs*sQI_(?n_mldַbS^,x+)(- 0E!Cfa5HЏ)7u}>]HrJK=5JS^CIvXRLeNxA*^[j2oZ jL*ywKRh83O R,lE_řTF8cޜ9L5G%q5eЬG%׌hȘt-N-0Fu|>7-lJ3vm+-MҐꗆL3iڸafd'-aO5mJ,UN DRMupgYlXE&]ޞ Wyd1 /,wsr5 8"i¬'jݺc:0u*́!T=$p-K;b}ZHb8 ]7r+\msY<(`Ȗ~Jft Gb sX,ȭ4KY8Ԯ^>|pK\+3cyR1&Ϝ.MW؋,PO$$4-R6)~8m`~ 3`IL"ZjFUbONfY4"=FW /4 !jl/ 8 a;d&:pzIT^ $W^0BڒȖl83;&H0" GR0a9@dB$D!݂R,%-LH2N;:s! rP g|%hܚm@YnM\6Fx6~!Z3v듻!V6+pYnyz,z@`S47MsUmtmKic |K CG A -aKdU?BmxdNpWmsuNL=Pxn՘9>m.!' ҫ$5AIzDN)P a“Ĉ( O חN`<@:TJǬʫ R)i$c}g bDdr$ %HN5J*,g'X?p7s5qc Dx`xAbkBk!Hf*V5䳾.M]gt3WP%h2-a9T>/ XsI;+Lb3a|!"{2tb v믣 2:NSiaJC4: VəNQ'זLӂ̰:" pL|'P 奷&rc޺&٘Il|I["X.pcBGi5GncL%؆ 87V68bJHGAj{:X  U U͝eyGPupG CrL fyu҇;AQpZW01 pU J F_Vq7pbn Wh(%j՛^I@Ӄb#y !oFa"cΙZ<f])&qF$t$o $mXy  uxgxcMFc%EvپX}Z Ka<*Lx{03WIgwq-ܣd,G*H]XGE/F!t"ڨaEn~*3۝҆hgQXLng2t]GxJɘ^Z!'t̎nİĘn݋'𤔦H ϱ';//P) ̎ Rs*&AЮ~O0\ x2E!Pڙ)#BLKٱ^/[Žd~DPU31`tALK_ha )1.ʀ䓁@ģdL#szтlc#WFa2ơc{s7X;ŒTĸK1(0Zݲ0;>k`T,Ҿ$@ o1>|wAi2ZEqK.eF+Shy Y7/Ep2;ڐ戢2i۱ؚ;]"X8?>_Vbq/Iet:[R.M#k?p٣[KB%7wѝ }42:ȭ@Y%5_! A=JNS޷ ZJo]6Bo(9x{۱hmc.t f=9b;l~6}^)Dɢ2t)&ah3%2wƜF(tvZ#&dQTUĵ5Ⱦ=.0;,jѻkK2b1Ip _NFI 'uy]tyIϊ<^Z]Zx5ӈ裢[:[naIo'`KH9%0'7^UOXZΌXݡ[Rx2%E%(,;=B=:111111111111}4 PMBfHjLd$I$I$$Xm۶m۶mh#Cضm۶m۶mf۶y**)*NDPG:'"'Y"ܶm۶m۶m&۶2%\qiPB ACP!mh5,UDinvU۶*TT{jr]`}4BB" tFAAa )"$bJ]Rl=_rt-}XBVX@E1ь/=^e@cNwGa;F?yƵC8%MǥRpi"ܥBwmh)>TJu@#];32}`y ̡ӿ;B% #:~^ By&T1!e8Ohu9֏G縔Q04u$2L8rERO_RН}2DmV*S?SAO~myS Ev6JVFnPh ⶻѪ;=`͢O'<F`IQ$eJ/g|8MO6*܇ O_%u$^QAf -KKt ܉5xhum4b|+ɥzuZ,JN@O `-lq >`)$f`ߌU;ٱ|>5e*-'^Sg,1J{L{/AO 3CqI'KGõlA   `S(m8%w45o&G^ j[&piYDcy3p2WO{0/{̍<y[m>o^sol=G>1cIо"VVīྸ!?yL#x* PrS &&'$ HJ(G56o.bXķSBuN2@@w_mj4zLrXVéa߶83PN"8O$FkfF}H4!yP9l_ŋm׭eZV m[-qX2vz#L!#@!P9 n-]fGCg}w $WlD5!~e( (Dk- 0&PvwpU~2ݠWa"͉|5i9(%4(>?PTR`LjA4M(ݠ_# (ws@IW+ HD ѷ& "WgAA7v *o˹xARq@t`'J k)AfPJdm@zk1RsAEPhp+$iPj_`ˀM=KOƀ#'2='?`^1@R3 ,5 rUsL| x/5`&b vLNx˶ڨ"g=+ ۼǟGyt܈fͰJě`tGOf%C&1dri77v6CpյKX! W˨i3+׀ rl.a]|F ?`*rEʅ}!h`s&$ʆk ~ATU,gfu= Ons!HdBWNJ>tı(6b'4fB2%#XNȜnV 摶J'( @v>1^J'9&BdCv!" s"E"hSkM[i/{{+ _ͦ!.bUԍPJ( ^%˽}W^¾U/>BMAٛRC@!)cq!EjE!>4N 1H@0>A#A~(HRwX@[%69!;tp]@|-&Pb^Oҡ QDDgtŋgg= p/}3c}Ѯ87rB]ԛYP溙-PZ)a>[׺.wśl@p\O~ 'B9nz*r%'d"\S5;RqЌux⌤Z+ԋ ^ ,g]^P^.9׮FFpGO$['e! qAU<<|7l ^@5B9ڪ͌s\kU5b|`_"֒4M̨wHeAu 11nq*%j y#6&y <hmVXn63ͦREv枈 u:<g)7 "wgK(<qFcG\,f($ +u^|T3"V]Ջf͸oH@0^|B"PH!P H @{h?``N*'$\y Q'P@C8$(.%|ORRPNM (bA?0((#qxLP10;P8 st'q#S^MY\#ך9넆v ζF\e.T@\tŊui !? ( As=ӭZ}r ݉K I [-z ;z]'K)@\PxYwe_odDϠX`ѓ|UY xrOhDa7.@SYm'z":6Gͦ*3'IoUX}BkzD:g)wM_Ѻ[E'T$Z GCARD%Ofފ}Sqߚn72}_>i|j[C PO什'mjRq ä׻peDNK_:xPp4"dDFdDR*xRRT{w`[&i)Z]ۭeIE,$,֥0#k):EI8QSD4b"JaDĂZsZr5Py8B N`LAa1XIGsBpDަa:Y3ʇr'4R= sqF'8A pz{IڀbgU! 3Ph!a'F0 @0BA(xZC #1NZ^r˪.b.Tbn'>(VqHa(d+P,ON " L̇:f/ wHoztҸUUETbǕ bWA1Y"b4 !>1^İ_kkoҢ?^KUM/obaJHPk,L%>) }yEP*~0V>0-BasO)A0-0 FL)'$"&&Nb7)IMňi ,jE""բ]iA$D% EQļ-NI)9prު-|>i/*qiwqxgF 3 FQ0~1 3u'ĂA_#"d3d'4(d0vH Q$(Ph S*L\$FVa% jgOrd &$>B1,BP tY|Y4x OY+bDP$ c2 (> J1yu%BK mbԆ@bVyU!(bg Pb@T@A@1R@AkĂyu#hM/ ~'< (9FfUM0x'9)PJ-͍85i8A 3qDňJb$/bd 1G cb!P(fP~!%O &Üރbmڪvvڮ>'9 ^E%>tD "r"Hmƍ#54~sqdGJ<KqUC񴀡Pz8B``b$I9?i/La#ap(AC  h&R4 cg&$̆elVP4x_Z`` JBA !l8EPWœ `#N S/(FN dž`L A cݓB8)?\bX03WO0W5x(!0'm(F$J<1q@!dۊaF{1Z@Q1dBA ɆPO#Z'@P$JJUH'2̓?0"vL UĄj\ =@ ^(b:PPA1r'1 za8ZsB;T'ZajY|BITpnPC4;|:LqPnWϐ3Cgp3摐\C* zMhEcGѴtgmSxh깩'J̕PHCEGT H3#ӕ Ph NuI7IA[o`\j%DaQd%Hב*fsOj(`΀2q@ aCa`RO&>X &L s< _OD (aa/P¯dKPJ$m't9urä* iSW=Rǣy j(6LŠP.~a0 Z80f3JbjL1n Nu)l!YPADic''S+0E-'a>w;7.ZW}" ĥ`t<(ES pߝ IՃBT̩V(@ bڈ8'-@\[PxCcOZ̨l1Jd ŘA()F+y%'|4qbN"ӇA)RLryU 96 Ql咔۶@۶1lZBH˧=`rk}BE(P2Mgc#>F]N=LBZz'2Ey2;ja2^8A'a"<<໩`6b= ߟ;CPҥW>搪1zߩNV@9›0R~_|1DoA!('7}P[ !c26 2bbJ< 0rsa$mPSdАXғ0tC=RP p4/>QOJ'b]aV3 SkQBOL"%g|@ P6P AQz9(8b6"hLu埘M-g''RR0S1/]J&yў,aRBJ0#2(a)} >yFW!w&wHPL,R08´lPÍsx8)( #=5@< ? s~bԢOK$YpT s*z@t=QдeP9'LA@M y.ٔ(TG#85Ekj:SYZ"5Y?uGNq'X3kUKhH:;䧇#8/eI4 ٦f2W8*#%r  XA 4ҹpPuuKTh&=(!LM[ehCLP($f+* I #ҊOSJSPhRqJa=`HԆ 1DA sq4sa!)0w+$'a aY XHPĨ s R')#PF a,~~l/az3vPC3ä6'AW<@jaĪ- }B |E܁C9:›TD#a/CA (Ab&$@цQ CIm@1!aT5AFqbH- $E@1LV0+=qubW" (Ã0j%a$GDWu`rathv+PrSZ/ Xbl5cgPĒK*NTJC* CR9G4uJ2y:$t2ob|%â`p,S&SɥDE!Q62-XKTֵѮ3;Fe928A3N! 0Cc(8raʅaP(eG7a:4% T 3 (bX"PJ|KFlPvioN-!4#=Sm?)dOz #ͻ'߇aWG? u޷x%o`\PzSUS5gT:j}7 fbQUbӼQȦц E͔Ѥf|#B8bc 0@??7m|Z!fBDM 5(7nj<*K YRT/د2?!QHy5Wc[N?m:$݅)Y>o8 O|R$U7uqʹ &G^i5DrMlfb쪭Ddm6 AP)CDdDz2$ 0$" BCF~r3֩ *}Eti2Zβj,=0| dfZPY:˗.-nt):+#(~byx)fGN-65t] L6xdt[8* Xrx{0`y&G:A;) : &2a wo=#L88:V&a l "C@ K_џ^1_ժ&t2e0*uD^ Qq39DTMH`6Q|\j@vg3N](ڄ%"#()a=,%5s T@͙'3kq} (o1mNN! }PDZr'0 2 q[l@q7sz(%'7ks!e\ Z1Mn75t m0P#$[~CAcXYK.q*D,(5!W]Yڰ2i|#9!+%J!~f/5%fT!io(Ը5P c%,P4.[u'-`A&. vEf*+)TS0R4ԅ$^&2Hc]/ͤ0d_ ˭ؕmƌ:![40MaO{UNM©u 38QwIJa7k ߖ0 "`ʜd!ѧHeAl2f PR(bf"  ABp0A!HC~ QvJn["z{\`)zǺЎ6=$Owa-73p$ԅKӵAaŌtޯp{{Xt#F\=]+3'notd,UL[RxhlUյCZ m,mc%ұMܸn jf`Riw-t]Ʋ49$S Iɸlյ6i୷J^J&(HDjG}=S[[s>eQqQrtFNp(pO:P-S>VgCʩNR~()>B:v0N nJ σÿ[ 8D#}('fU鞜[y\6"rg$ q(U<ՂΨ޺46VeХD ($!aaaE촷%p՞T!z??(ZdRծUU&ST%R؊WS:9#:("1/ok"^BɈz{Tb2_ w G?Wi^!:BJ}Cu*K*[z}B R|gRH"R:VPk59F}뢫UF 򔊨&>/$JƁ"*.2$³q1{2f$X .H ]p,9nnOf16-/d^ŠƗ19KttƱ5l$//\7VߎbL}2I6Eh1Qe1S?[~_feTO*[ߪX2Ar$BB![r" B   I 1aTdrb)y5Pc,(n J~] ? 0o8({O6p@fwPZ%DGX5 L 0z`q2鐏+WvAp$لM]Dc> U $uc?TM 39 _`,?*[TFgwR  _PzkUNڬѤgI01H ­@H u#S&i,{+W}Q1\y3Aн6ufy挱"M6`pUI \oG1߹ fgf"]_JzFr#Nqtf")%aIE JS؋`>Hӛeu 6ws³\XG!gzZN2BR$P}!=PcD 52VqwI%7n,yYf*g.Z *z2U/IXgd 5pp V`RZcCu^:ǣD߀HP@.[|an {݀wzu7LdDS;Spm J+! }]{?A )&'.JP6KUH"DX( Ma1Qv;|ISy=**eKffv~FRx"9]NU$2޴qwL%9#:}*ƕWUSf"4aI&brG Yre8B-M~#p—C(%b5A(uLm 2(PAB$  (b  Bm3>0"sE\rs;{S f:s? ~5|@3 /?rc4D}$/ o5po|e/P/H9&-B7 1-wH1ϴP9@j/OL Ͻ@x }VNs I]{2W6_W'q( -U,aHckH95~U jeNfqFWGnaGo10n{Q ࡋxcIu9~b6 fqVX-' ]<[7ZG~y=J}݊? &u `A(ay1L*yxe^4_}` ͵/I&3D jĊ p0&;,_&.h>flҥ 9F: K0GPbIV BP):nh@o|УR,HeY%C)#E͢i`s+gĿy@mnѥn[4:E\ `0O`lMfdsz#H{GqaG vP˿ q2f8**CƲ@Q#o B R?Vz3k~=mV~t2ƚ%ądّ"en-m|!I$#' w&,uMɒD]$Яv2-_%k~[5~O 3AB :G)5ď ۿ:!@/!lHʃϡlAeeA 8Z2Py/Fu<`PEե3MSMfsM_)JFٔ{V6/XH?n1ߧUT_9ی….YPCv &p"׻;OKLIW|ѝ -2*9M(4OFÈx=W:CO /" UFj[8$?aY撶F77!)*Ӌ@1’Sgמjq$rM8r3S :0H'{_Ŗ]ej cR\ C\ZOáST fllED b:S4P'!rS  լ?U;|9r^@ʄ Aդ4eu6:Z~Y!W !KDŽFʒdk|"v #ʞ\]Re^qIaF-L`58FS}W_GZ׉wu=gQ*׌ Zݩ7e xT"|\C* .<= FN6*tE/~t_Ke)>q0 G_"zkX~NJ&5ڋVٲ05ޙՎx%MueH #PĐAi9c q徧 4#l<[Cy'H-Y9)KR4[]]WM1(\* a FhF䛠!߯+w:gseyz F$( qY,ty@ɳ`)?<;@nop04jQS#3XܐZZI_ɤ:WE/򐽨M-=uvCJk'2Q?tuvwmr[g;vu}:kh=BSӜ>U"RPX+ƪ"BE6|G<Ϛbvl)PFētRA^6GRjK7W쳎j/B⭩u׮[8R׭a,b$m5S%yi[Jcyc&ȅ:rt[O6YlԦd&|*8X< k^ El*b ŵ듽aERFDD*}EddddA1v_I׌K!ӭҮהu)'F6|:U˚)'qHִd$ZΛ“ 9yRo]D7&SjIfjQ7kIy~LƑQ T3>zF tzһ*iv҅¡NL,1L|Qэ!JjSc$o;x% Tk@d:= 7f r6C;̄i%M@L_Z\fC[/ bs@Vkhs#Y:]f&nTPJ/{9rМ# ^ BbvZXhbTF J,ˬv_&^C6mJCs` ؟hC?:y&Hq[غbw>W ̜q)ZB*:,OsߧJJ n:&0szQa  1lۋIruK~bY.uYHQR39f!\X`:ZG<;g,w֮ꑿ$H֦ D6܅1fȏay9[ jY"Ki5`\˙]ӫ#lpn1ONpbc+]zuyn]I#deNzMݩ+Gs7)sZX)ZGH) ݍ!U0bH Cֺ ̋EKR:#?]qf(5] Ak!V8H&{S,]&ICx@,wLnES |ʱJk%RzxN]m WOLv\ Pxp@OnS3|U..^\+!=t;A≾pzޢw%=;Su4;8g\,MyVf ZK0PQfmI9O V$ƓT( Y ˿/- ڿC!ih^@fPENjdkid~T8%g`PTkT]q=ͫ ѐ/B5#% zk3]0I\fp R=O#'ojTWkrmtdRV&~҈\ u:{]Gv@h I7_3֟1wdL*>ۛepp9ʄwskg5>hp><\dHp6w(4iS2h6sQ5@54OZ&eDZӉiB?I1yjLEm`xOVc x_cqf]SIK, }S4oTSSSU/que4 M5;&-FORm }#lꚘv\lU*n OqN49ugP(F(*$R,8 Kx)(G+9jÍĽ#+"b=i>eϔ8C$D2Y8VO*E{ChdtlAR!U~MY( F_Wt>_aMmcYh>S ڥ\rY%fr&뷮")荄QnOlbJjqp +h3H;]ǨhS;u^Cr~iYXPpcKV %qoZ%\I||<kcZi]i< AK*k6I%BzH+9Ua0׾nv,]gҫ Zά2lrɖ tcڍ FCZ\S XlJDjf:%lBXۨ~ϻ/TcJ&h! ]4XH+ZsXj{X:[&kꘐWcēwPGղfLvD9)?#TD (_S;cc?B~ Z4Br%3"$pml=nyX$ZLB\IC41LPq6F`g\MICY mծp݆vmر5S]$gx]hD kZ TA8a0DbdęPD(ba$P $(gj a%udm CKf2:,ۿ]9p#9ͲYdA'L̼ 0*77/[A00WqD "vc6: zjTny~zbu}6 n!i̮ +pF C43mb$t d#8}U:GZghτVCu"kpv$hЈ+>q WJ_MNT+GBo{ 83}9܅0uYgVȏasin#iQ9JM<&]X|lL:ݢc Wff_YLμŸe`pU.JlybmE [R'p|> DB]@cqo`hGfCGkjyg,3)9وSpŒPV>׻FF];;#D6_Z^?KYeb}AH:\`HE uLӰ"swT[[E®D=ԔURBYU2K[tY]0|qΎn|tYVd2e,WZ=@Xyv1w%N)&hhr d%b@dGKgzac N?cE-7km^ j0 7-X1#];\"Xwhџv|,h;ox^=Kbv˛F#m"tٶZ ~+בڪRosc\ `+L&I-vik}*"Ix%#$ۄe TTc3Ѵ0 _#ӎagƿ:I;ai?#fڑ&@F]C97-@w[1ድ.(-s!fsHb'~Iܣ N\wЇnKjO \f.H2ZBMk H={ںىE$1b536i 7% .Bp՘:X਽SJ.ʶP CQ& <0 z'Dgv8piUDfw7i2YI\Y EtB I% 9ID:j+*P7B~`hsJq ׯ5<$?ӿʀHngz|Wd=ֺi %?'[HV8eder4X^۶mkpAtm{_2FT.=Z'1921<<"i?˦H3oBs,6"!|Z!6ňhr+@yЂr%QlWt쒨bؘ< ".DU$žPH(fAj]^,(>H\~I̜ IBB_$4z&f/l2I1B#Z2۾8 &{^$*Dp Lf/:Z{Яl0Dhm%y6B4,+Dt5t#b6M)FFFFF&&F14撸dK7q ő-68 Im)2ƮWeH2#!@X:(c" aN$@Ac04!MEy9Ȟ꾭8iswۧS S ٙMv5hI:: Keo+5w1HLNlV)"/B/W.|3f[QmK>WB&˒>2GD^0儹@c!rȱbnybbkUj"B4 lp.Q'wDd5$×[nNF՛G!Nt <~]N62]DaQ؛co=A53׻\): Dw0{=7|ij+Gl~z sSQ*m,%It”\&5*D&M%b`wI6IT~lxVΒ^_$랞}~l$FlL:ԤփðY4ጅMh E‹3Tnz҉& PZ7)NZ>C[3%_QD-y 6!:F/C+lB+T{l  a7`TIa 8aɂ=gjias4*TL9HZM;~Mv)QH?7AӇi!OqZZϮ@&il(gwx27 @W9B58dLQO&H-R/V}Xޘgc)1;G+Cp@H}C?7$񏟀 F)`>Υݹ_an!ǀ²vD@zPxb{YȨEU ;ߒUЩ ޴Ir0@eҰlw*׏ק6x@ZhE6D̓l0yzїd 1`䝿kV |PT8.889(Os4a ,3&/U @HY) fQQQދ5@O|vLslKml NR7ge\BƳ@5a&ܗq#bڝ}9"I :D`j `){, ѕh1tɔ B YY=ju|p[ 1o#R&}Ā߾)AI83az(@AP+[%"޸Gu9U3 `D(l=$&D,'TDȐzU@WľͨQ=0PQu H /  aTyc\|Uۂnݼ(UY(F؞inx$'3$4~0ŴPA&IvpS _GE  2zTV%3aa?҆б,xq4o:7= ˑOEL"h?1Jqd\xN<瑍(+b/%"F ,*oh )3`ު'_4.vhr{Ŧh(@?OKx9nnJRpOdw|T[) Δ6 O:[!~{Ɉvܿ5e</W6FYo3"6Z+ *gztuy6-8 5yC D] j#/ !{ J, 7Dh_6d՟Pt_rkrvJLUJ46TYh9k(Wjܧ!Q@vC&E;ev݁ 쯗TvB@|NudUݢ{u :_bI\ZHwosw Oc9GH! %7璸םdj5"rwpr:9{$˜;IZ₁LQADU@h!wq9Y%EvdS\31gN$er#NZBC6(*H[Oj|60o)kG CiM`{:|Hznnj%XI 0 v29+29$ \R!ى;U A^N~5a/"hKՆfpOw!m}?AqNLDV9Mw@9L8 CS 8rn#+> 8 {#LVpd@ N2^QǘN 0c:1I)3_ e:.[.2_EF'#XşwT LpF/.a; 5 {TʦbH5I D4 gNVxL =QxdnB"ijv+jGoӳƧ\V5ļkBh"ʱȼIrSIa`bX(u8TW3i*n)"u9{$ >^h< %֌Ԍ} f1 W%.La9=CIܰ@E$J2꼎B 11cˉ \顴S˲#ի.wo*{wگj7iQGKV# _>W#+D,b,TZ78SB0*B'k#¨m :u9yʵzp 7[lź PQ;y&L^ $>il5"( zDyմ?w&s*B3A AdL1cf;"0 @    EA 1 .qj0lV[NJXDVE]-CUR]KYϢO^`Hx_Lp!}kd1 _pAbjC_'/A/jO'`ԎG"69A/| k1F'JL;@&T2%Xd`|Zn ŔT+ǥ" Q3ʽ]#cajiW`b Af$$ 8QxMDN8SO MLҝ"^4^v|k@vBhl&$0f&#EU 4;PoI&|N4aSg{eM 䉂v644џ%ŧ/ w#"v:.~ck?⢳Z챍8.ʪH.:7V!/˜<@@55oݘ]=ӼYCGRɏ/{Ϝ"0_0Ջ+DzDP9=*?8,@ k!Fa[|?|:s5_L#]A |{T9 ;YڥulI0Wx?STN +aK[N佲<aX9t e+zwFٿ-,{}ĨoS&]stTLS(dzU{c5'Y~wT&VQIɱػp|.xtf~6Omf Za-3nq8 qP/F.1ѭ#`y z2Br~ h&;GٙW+(n_?⿩li݋#|l_&uo$P`vE9#iFĹ>8q- `"@R{7d8Mstl 7{@lD6'PCg[P|EV5Y Ib_2J'l[0.Ulh\a^R#uף-gߺIfxчa$vR<Ƙ'A&Sc 8qz/#D^n]un_wpq0!۔!bJ҂tiZJ "+ Ui-$o]Ԗ1./ _96DN-Jk x=L*h4-e` @\ܲ䣹1m~,2;21@#._U s'L_U$B;LQr,U8ąZ-;JlN4/L+Ѱ'oUc떇:Ѿ:@tj{SiVFjq \Op1#xjC.jܭ&݀to?"ҼQp(s]bAJfO]! Ug/?nyG׼g|c.+`IeQç\R 'u}aNڗV6ZNƻr-i6}R=_Q$ec)#\*Pun8AԜ'|UIA0FԀ@2{}*jW҅Z8Ur̝;7Zbc۶m?aPF0eàbHvffVeHhr5AJ-&0 G.H2aa$ɶm۶X,Y׺V4ik-La]ӳ\3~:,I.(^*M{ YX")x`ЮEX!gmk #JqQ0iHH4s"(^*Q9 #ZHK(M *)qCr(c^{4H5D+ a~BtBpNu:Y^]Dt/3DW^K'$PȌ/a(޶l*I>g$F`X>G*Lۖ iC3`0tig#J(2"V(H_ETd7LgaBTcH綌.b!')X6 h$ M0aFc3arZK"K;"©-e/D; pH%zrK1P^3)F*G q#1Iެ]Ă$#O-q 5 y.,8=l`e H$I&|[l90o[vmQnΈBaϐ$ED95XEm!617BrI%L"Ƀ4 3Lx,D5 YL"(HwL{ՑW!9z oCf> i*_iN5NT՝L9?gd:dsjxl7xpp ^,0xHF3FD .4bpA\0XڑaF! %瓠H'h>4t :< Ci1LB9"Vj *ҁ 'pV >@ s*pG*,@Y`L} j¥>8q5P\h݇,t1i'&AB*Uܜ*>dJAQŭ*>8WO`-2AޙS] &TM5D7is}. }:MK>>h|H"7\e~&d$v/Y%z~ru-Sd,{@iKr p߮FBMpif*6U+qh Yw\}jT9n.Ϳ1?xw-y]^UM(n~͈bᦘbAArYj;!k1ItB^JC$=Sc &dǻڤeI;c4`LT0_GyL!܉Y.u}؄fblr#%",hO159V8j(dW D@DFHu8{=5&`c<m}$6&Ͼgzʏ~\ zMf ]N /V^Hp򧴵ckɭ[RQ&s%v98 vFEkJ>]M>1wK9_.((=u D2Fyѹ0B0W7_Ge[.O0@zh(Tc_1"3TeͼuqSP򹞋'ԐGDS"*vM \~|h>^zSxBMdU #4C$f-lbv4*(1a0 x@Hw:LvO `naB=T+ﭒq&wTC:j.BASUК7tk ݇zO,,!@“PC4!@!ݥns~0&ʇ f$'X܅{'5]KF>*Q Z`ƹ", ;.D)8wZ7CWnwRhD^S0Z^n'"#*"/`tuw pi}v<@,{TG % /J 9FÙyߩ0Tɛ`V'Vz u.ٖxs,ٖz[rͱ"BGR (g_8!OM:Q;ø!؜D$픘{^*QٟL,@xƹ)Y%rgRӔ@p0pԒ 4JDOKC''n T%3}i1pz-yҶe4ec^BC:R•IrgB Jb\Lmޜ%C3dCs솒Aԍ1 پlKAh[9uȘ㽰mL-i.3eyKuKPs[=9K.k,|K(,M'DmtK6Pm+^"J#PXCH&բ }gcZiQ}ss겥AtO><vOR?y7O{ӵ,]M3L!ؾQXL(ef'cɣУ ~[U>]Je]z:jlWޒ/ rXxjoP߫t8:b3ʕ^Ȋ!'tjڋ= GO}] E_ vܣw$`tM>Φ*0Gjlp“$N7j^ {٣a1"he+"|q`w TvctASk+ 6t{ڢ\؁@&`%<4էV[v@yR 0֨_'y20F ܕlr'ȝq26%IHt,Qσ̂ƾ\-YNYʚ`yO1V1M]UY P!OY ;M%@iR*X^eSntA>}z;r2Ϛ^'C7L2Ĭ(rH/͉V̆JP5e1LzT+ub]&`TNRMtn5)x_=}h0[=R$ua-.&[twL5{W٩L]b^N(enCW^$0T; =%('rkNN_EO3Iu'F>!!*HxGet,o­qlh`pw$EEiˈtL2q^Cr`,-@85qN":GK>;St֬QTOpsAvqjȄyaxV.JhywT' F|4 < b1QoШt"dq tmŊb FvW,ګ41S]CRv-}wbLv0<4mc*w,F~B#RDXJ5[h(/戨$ﭩW`)9J?:CޔRYͨ1%^v7֩ 9XfWÆcvsK>P[(9#Ǐ(s=TAVmH!գŖmۜ.3D_[ E= }F"xK8}flPKhac?;8Nz,=mD#ş pç"erw)Q`ZQjC/в__ AFom#Ad@4UUfQ.wvcW oySڣ˫6[>/r:[^]GBS -bG/rPȏo uBT}DŠ  y/4C#l{޴)?U!*v#FE|BK(T|Q2aOH+Dm6ҡGUN0SOH99T !.UOșĕ0OXݕ2n aq[c$3>4(aj?ƆJ/"L0J=oTʙRώ< ^XP t8PTqaQx? Of"$; 2xy!&ahA"DDݡ;.~N})oyĹ*L0eFA_DOS3#㕗8 HO ,G,> Gm>c iDϹA8ŋ(rVNWoW{Ѱs5!% `0f"(8!cs&no&j|xind< ;:bq!!JW 'R⎤ȥ30J^lÎƆ.2:DY|Q !Qt>D/8'N93oBc3Y8bT,vY&l:cQq wk`g+c ulb0LoM!ޙb\楂['J[/Nhvr(6?DkjR.~3O(!5hAbh21,1>%m1'tf> Czm}Ei$Թ*TSc"tx&D d65K6Pz&n5\Jqc$<2l V d&v_BBP2c$ y;RDR荔A:gKG'28\fg+TY}{y2yRZd*FyBA݆FV?s>zꏟ~ e=>2f>t>n:b5!AsT0NnJOL1(ao&$8b6:tlp5lU9oU6Bu "u͞Sی(0 #)PX EV#GPEADP AE U;p0U h+'' 1ax,hzz - *F~*haB {}bLB >\6#Aș^$,睨9 aDFBE$ `/*$!BdW!a…<3 _ǃe suz A-XFEbp>7Wk*SY"&ue9:3mȣwko/gȃ;q)9 qe;ei]rei4VJ*'xqA] c BWvrP0#@24Z+m61}#. a5dF뼟9Ng?MEq޷U'O92e&-20)Rn׭ mإþ;bLzvHQ9\,8҉JJuzkװU=T “ZH/,"}ۜ߸ j/zfV))<8ejWq*ɾ>}cJU$.CaN'0'B%kB~ײmBLslWD372jdA1+F&U?ͶluIŢSD}|k:Mه\揶 W^|Px2 燗"S>QRJϋhyXx܇2xӰU?)+L1 ӊV<!̾J áˊZ}⢜(c Au[46.ll[sf"׵~pw%U{#AK:G)'E *~5/tok~ƅPXܿ]:iɦ|mHwIU5G);x:39z;ZHnc9h:72?6t]PRtmx|1\j? bz,YۚVSEu2gtRr02cNxshk 5{b\T%'TSNMRJ:vs1" ?-V@ =܈ugS/#Ǘ9- {n*;(Hɧ~BBYc3(~}Svc֞b"_ ]T=DCY߸2h;s}ݑ?glx=-]\izWKъ9`)'|?ZB䇀r׽Mfڙp[~{a{{Y`1KN9WfĨZ6;X}YUdMߘ I7#/PXO9:6`. 3M9KUhAu>9/ȩCQ2'+ܲAq8EYrn: É:. +0c'm߲> .nWv /sx@w $PcF?LQqiS`2՗{ I_Pd5/tT󜑗`=`@D8]gY*r]4)C1\q?oTb`Fr=[G^7ZG>\OH8Ԛnp-COoHf{b|Kķ{[fZyr.?=**n fV0_zS|3]%O{s_3?zxT6$'3\ Zۭ~`A@:v$frPxt+åU3E+MTFCPTigr}~ؒ"wב;ɫp:'Bdۅ݋jHƧrHĽ]$n*(n_C )_LV\y:@<띾1!SSi{: p+V8]:4R) "@Jnx}D*ե^B`UTP V <䩹BĽ!0(BݴvN({h&p-,T\R+`k"-jB9K1mƞ%(=IY`o-Rf9j?GY'wyj(pWc$)$VÎ3-IP3'eTEL_z€I9,^0)`/zs"vrW0-5M0" O=e!YURd$Z)^m %X{5Z+רiL(+yF1 Vfg7qZxviO.PE8.ǭQd6jMTwed߹n~KF;߄dݮ׼;[1 PB-ޑwmGrqҊzr_cr\HCDKH:Q, a~}F GV1U,*[Lefe*;&1]a"nO 8h.+Bͺe930-fM9WtɗtJaJ~=`< DV rbEhod =S 8\vBN F[R/\D>UQ>=. 1 "@H[0q u޲͝=1 3ܣnJbiOkU; ʜ\) ;bzsY\VyB `P @%=C~I@? A@8I 9|`0=13(f}W<׉wp>nMWE/>SO)W8t0sN-w/sY.)"!>$A!1Xmt@A @$ AB@$`!q#b'L ) % nG;JjRp0G`(LEʬnSfByDa=C9)%]fZa"ꞥ(Td$zu\2Ucd&MM" (JI2BY'G_6Rl"QyڌA&,EĖ + D^jJE"! V}eYDf-9TJ] Ś蔉tw6r89^$HrdEAQ(︂W5 m%:CEfH馎&ު^\4ӪdfD9Aۡ&T\c`IpYęGiܠj(JSGZst0>r{wSWOB9#9\w!dغ)GHG]íWy:$SEdfmCS,i.;+#54QW`Ĭk,MuhRB4!+JO8D0Eơ!dzL IE9>48[SV3=zbs@!,yK߆c(iH%:O{Y;4_pOYi6ψW 7>d!QlOA?9E mF%Lr{Kl?_VڍN}#8oJ(.+mdx lMgOhv)ϱA=@^:4R_Q8Li<qBwܫU[(ކ7C/Q0BvNrNj̿Jaºۦm}tJ7ǜ # L˽H%&ܤЃH$}$$&B0a.LNׇ*#MF;D.b 40 \1J48;@ @!H!@@A`"B?A"~+ve>9<ɻ؝s(g.blN5:Z%tr\¯UUʑn>:JA h}Y.a\nm!0)ܒWXK!r X2L`9=VHhaq>>͒߬7+_H_I+ŋ=Y=-ydCLuX=v**))"\'! 6iak/n u0Ą!xA[24,&b#YpYtؐOsȔSwNTB wkܕY3f&VL dS--i76B n>BAS,}r ۍ1'#!VI{b+e4,  2cME$.\p1bO/\.df ?CRxYBTn\T%lJ$ 9T(0خ1>*<إAg2f8 G"Ha@(\1_^%䐑d /L7ǻviYbX,,Nfe~R0N"í1#CA9jwZFZ +( -,f" |ؼz&'aΙ tjq7@8Y"KCf#@#_[T&BpK #}YL=16fb:WBY3" /2qk5Z ڤDnňL=TRTg1{@mcB÷.!ߓ)!3M`>"F&\>Bw8 HmA1&۠h-hG[ ?;,{ ]@_@^ .Nl2-!b,8ݚ~~Xza iH咚 5x4W˭kp4#`<'9O}?UM8vq90U䜻YCF,pTQU\xM8RP@Ywd}YcBERj(50c|xZ KGB4,;bMDLs*/R -pcٝ!)FPoa㻺|SҝG;x}Fd%&/x(s35G: )Tja!##,z2f@C<#%NLа|>$.aahc eI >^< K "9_*;TqDeB@Fldw\ә4'DVDrЌ Cmx T#2=at &O7<-* T@ІY, I `Xƨb-ЗE ]J4K A-նNC4[@=n3W w=R G42ߞN| ق9w ވa::\fj ߌa& 8s )fk p;FSc(v񳍄)c4zQ\bdb"UO `eg IOۼ H0rIt :歑jp4ȾіO 񳚉,_avgMaC@gB*p@AN˔tBI׎ЯFa *nT\}'~.>轅2M$/Y,p(tpmvaԧa]̞iأ!yҝBd ZOXt g&7}+( Zi2z$(I5cB&,Rj‹JMFqUJFE$I:K4Id(K0~)o/wb?'{II4>PTiy%k1vYGkM"evu=ҳ<@:3:t&8Mfsz4':D4ߢV w̌GdF4dq Cgf] flG,r속qԋffEp,j@0[38 <*@-xk :b'7g4ැCwgK1 _M'=LgcD3TxJ#-Ӟ\* {3p&0hZ#r#d"܉@>0鉺ǘ {gDXJ! E֒!:mDFܛGUc$UCHoH L%  1@K!1b1e6e+a&&[ 'B`wK~1+P]) V?SF8ڟ@[(;#FƂ ߘՄNu0ak#1/!N 6E*qd/2!S9\*I"X+cg"z4i zaspwD0K''dSJH$6>e$f;. Ι7DNyH _X`0c@[`cP!mrZn!]njk'o0хf/ R -~^kd[C ?FA]*(S\#̎EJiEbc('=@Q,9(eZ>L8W|R}.2 ɜoE8&Zy\)oVf1ǁT$XD:R YzkgRO\]u^/L8;%|\T˨j}pnT k-eVK]PHK/tm-t}#wP)[eBLjd}3?Qv-Rʕ4H+F;K/tYq٦}CL+44+R.}TKؿai*{!l 4RokT Gr3>00 *fb]teNS"xy b]a[醐1,͹֩Q>j#V Y/ l/cvp AEx~4GvZ;_VZ۔\Rb+s <jX9%#ASV_/U.7aSYzVQQD &p[.C 犇GFeB9ROTK a::98C:`t$e& ho*4_fhȣ1rۤafdKD"Dj$пY& gn_#N~t?3cȴ(3{"k1r$=mRofD.bI9C"lA%̞쉬]my.ofD.bI=c"naқ%&X!x(]HO~_^j.t^O羾f~_K|~^Uj\;@-={}j^sZk.H y_:2WX1Hptt}?Xdh^IJK8R?-InUI%M!j3i[[?h3aKUGTJV%C4>K9q xP`H  A !B 0h 8Dłmq" [eP[b .8E#/gSVIs!81e1 c| # ,YVc'+YH52UӪfb乘h>#5\m'"Bj?XluP MAR("  Bp !Ep?s9'[!É;\ 2f4Zd4&.y14>l#d)#5FIO_bKI ho|NzEkZʞ ¦O_ɜjvPE6G;صƸ.Mٵ8 !l!.|Z?EO}!T4fLt1UWQnB=h5 eo~|n 5}S:2d-`6C,Q, .k8$=&cKGrKEc×g`!T#!h:yP6&z]Dv;^rALi4xl=灝tO ͮs 1kPϢba ^e CDO7e3HAAk<*)@ SAh_p`b_&Yu 㵭RˢmTbS/Q aRɱU=Ŕi T-`G7΃[$-qpV=nM2V8cq)Kv8ne;6I<6flr` |ܻ""«&hgbCJ#:I9F/N   vMN d`9eVIMN`0ƵǧO?PaخPqϰoaXma! j {BX(l$D,a(41 M#H^6ZbBu Я&V&]$lM7 )La5)WW5q?Wx S|⊅y 5K- uD#+o)|f74+tAcD21 gXޜhr+>v-ݧ]a㴅ZFv'w'qe\iUUV9qxH$ RѢ 11É,$#A8~µKWD6k2Bz`iL.%{ nF|M}‘|1KQ"\A!@CA!d>@P5Yc>h_$GJ~+O1 D m"?|/?" O.v p}=Ғ_ARs1MC[J"W6=Caz$zC7%$o ;ª.#7 PbNRidz%'׏A`,YըueQ*4El;0)gFJXKz^3͸ lkBF<&ePcVnq*--O|&jqPN<[*HDjz oK_M @LU}_q>r"2,I:-n-\2~}~ۂ⍮;,kؽ\~>:'1uMʶ`d Hl0gGR,]ZUk t 9[4w:Hc*)J廛rb~H`RV24xH F*+wsi-9!sz8'AaXD)dm#FH428UONMR+>W^^=*=:Y&!`_9 yIoٶ&STZ!^y.DBu-uᲕg(a:J64hFt x"-WDiJч4؝̘dNaUf2}G34l/+`p5(6Ɉ)7ywLQՉNC!$HuZmAM"JP8-JO6*] tpj-# aS׌I5KDZUOBdӔhTrHǛ\LHīEseΐlyHaY=+"9D$TԚKsNm-][^K)iKZ:(d#E*N^ %1HUW["e""1U\h³=36s(y~K\N_R㲝gR"_4Ί&RбFȂn_vEUn<"(4R,f3OxL-"$[Nw;p='K.QrbW>}Ne'T:beXx bwKJ'}cSqOXI,Hr-&1~ 'Id/ djjFǼLz2tSK}{>!ݻŇXD?:4Q?id"KxU^f /\_xɯxQ4VҌSsrN'b[dDH2zK|%E2cw'Ixr ! 4Ԑ񣱾5*ZuiQc&4vu>THZ.ۭ2֒_QӲReUvz ]*V~".q3}R祥љAY$ҡߢ~Ze/ّr>GiZ"Ip*{wybVh<""LmSw*Tdֳ1"aYZ띢RUO3Ǩ|D+ŶW5žXmi?=µV2SyAKhjcNg<7Lkfe5i6r@`(ӠE=Vo[[NT&N6u{dR1s!2jɤa ?źdgBw\fqMuWjũNK3S)l3;AXGgZBU#dy3fQT|UUE5bFNR~ZډdriF)iWŘl6Fq":s2jJtiZz SLy T.gd+tl2OgUjP[D 1Sl{\аd2 5U.6-AT)IkY7ѪwѷAWߣ (-B6L*шsTFF)Cjzahat,chG٫SLDt:$ݣKk:G|dh]^$23Чycď5c_[O*^a3>o>$cXUU=SF205M\2!"T1[UbHhAdhBYɢ*X('vjU6UQ.z]E4B,3Cƭؤ."cW%$ҔRƉo&tԍ gfNtrL5EEfB&A3gF(3:G"fMm&Xs)PX+IdZ4&X i4"RL\8 ,j1|}oJjT*FiLZQ?tq:IɞlR&fU9+8`Pa \H 4LXpx@ Xph 4(`X@  48b㊲(CW-˯L>QDBs 4UpfF |#<D4ëKdCfnP⍵eP&qZ($شN2 3~F|K aUt. uTL1OCS3dHiSI2x*0Kq+e%!TR*lR1֘sgZV\4FQQHBRgU8jH f&F*YcR.Gs¥͉( auMMi K$Vu M J3)sk7!0zLQS,㫪MͥJƚʤ?6O/L61-]8*>i J҇iȤtB~lX$ |YXkpCƏHclw%܍sRE5E\,U&wTs,2%C;Q^MþJfWDycS=LNCBî+Iͪԥr0`xI:YF//JU5fԢ4-Vv$FDQAv#6ܐey.5z 4!*UxVk=BW|4 ^Ǧ{Z5Gz40f_/й ({zS硄MEvu(U%CDPJPq<2'KrI#,a g^f^TME3jzqX&0k J˫s҂VjuIY1 w`jW> ʀ\PT`%mJ}5Wv]^٠LmZ(̲asaTfavA twj' f 6ov#]ku}\ hSڃiY&ѵ[[\59p"t+(3h^rwElvALA1?eƗ;@sq.bl42*1p_J 6)P Pcmx|W&nBP>&EuѨs*2%Zڏ p vW'N C?qެD4r@7YS+9Kr0؎qM+(H.m~bQ7@Ȥ5b _\ٮ)AᒖGh|yc՘wa̅0ؖBPuӦ0i=TGt`#%z9C_&QxwCsc<3mϊN⎫$& r?QNjد:`C6B&ES!~1tVL&; CNo1^˹\s9+ MW3@zb/"ǗlQ(\L3S8bpm;=A0򬱡Y+KuFZ%ҍca{v-F|QqGWןNeR6L΁Q  Laʍ~𠆡[!@[u2ͮ˫#D4D R/jةoV$0@95*ȶgIη߬nIl#% lQUYr`oP&=ºb*%(X)De@١Q1;m3)Aڞ[?c }u5\m9BQ>V,Ȗ3@ot 3ԸJJ #y;{彑t2Q) #)nO`cQ/eәF&z [7fu cm<Ĺk5Eamc.^k~G6EAE_T9ͩ@A?G"6گf_ l^jي%{egpAe2 n'$>yKhzMrS D-CNR3XPr_#GDMt2_X!x?~}~Hଯ: JKpT\) XeډL͖O0Yj$1諏_܏'wqk}&Iػ׍鰋,_ f?J:Am7ݖ875pRP$[FTϸήn)ᒛy51<.ڪI"#!L$vq R SɧB&= ^9fMjJҕE ;T.c.J;5ٝtZN  Tގ%]q(*ॵUrś,iH=]r7mzE1l*gVoᣡBqBuacމAZ#Q\V[on]*ͭсUX픩5S H}L\ǽKR t93G}([ej~K4Z=7[R ~= Ρ'L<'BeH/jLU#8j'2yNO\hIkT35ӒfxLT/[SvgEN{J k 4siغ#HqXrc4b8"ի+¢i)oKdͺPI=COC|T!M-SW_탵JcO.x'-kzff"]l> ҺFDؔQlIXAS)5GUMe-OC'FeG,D`mYX,M'p"vW ȣRHt2yІ1gL@B,&i6vbƒB03>R7mƞYY/sd͆qler'~efy9x%jN8-f,Nz72:pIVI]<_U Ρp:<2/DOw` r!kJEY,n`s>r(ڞ@yPi#I0uz8<|Y/h,(Dȱ-/Fx Hf/smYK/G*YlFY< UӮֆvQ iCϩJgɤuJIPaPC r 00㟴;& teL쭑I(|qt:Mj>Vͅi\'bJI%*_Rë\{gg\z#fS.KspiEkS&>ʀPUPEs) Ζy6ҵ#~J?e2FpLPZ2X%,:5X(N5veD$'m áȈvӺ"*(:Tu<sR: Qe^DST>-I]Xli ڍKj :t*Qa|o Y-Vs~cOfiapX'QBg>:Y%UxbqYv8vuDiLȵj;sdJC5\P{/ SEM󕾴TM|Ğ)09j%A5T=mk VLտ]ao*뼏4i+W-،P0|(*3MM3ϏY=<8gV|O uav}^Wx 8=-kM=rQx F|y2RߡSd':bڊ5pc}dPS:840Sh#`(*#5HuY ٌx;p qod1 Y~c7MJ-4B@3vH^h#u:0b-ڄ&{(2jdK}AbH-{CLժk)q9u^w$K2b|jةx߮BuZhcp>YhM~nh8VO/"Q!~V <._dT+FGH\'`z n8o$7q#r V\I1^BLN n!ov3`:т͞r@ ;* } #include #include #define ts_builtin_sym_error ((TSSymbol)-1) #define ts_builtin_sym_end 0 #define TREE_SITTER_SERIALIZATION_BUFFER_SIZE 1024 typedef uint16_t TSStateId; #ifndef TREE_SITTER_API_H_ typedef uint16_t TSSymbol; typedef uint16_t TSFieldId; typedef struct TSLanguage TSLanguage; #endif typedef struct { TSFieldId field_id; uint8_t child_index; bool inherited; } TSFieldMapEntry; typedef struct { uint16_t index; uint16_t length; } TSFieldMapSlice; typedef struct { bool visible; bool named; bool supertype; } TSSymbolMetadata; typedef struct TSLexer TSLexer; struct TSLexer { int32_t lookahead; TSSymbol result_symbol; void (*advance)(TSLexer *, bool); void (*mark_end)(TSLexer *); uint32_t (*get_column)(TSLexer *); bool (*is_at_included_range_start)(const TSLexer *); bool (*eof)(const TSLexer *); }; typedef enum { TSParseActionTypeShift, TSParseActionTypeReduce, TSParseActionTypeAccept, TSParseActionTypeRecover, } TSParseActionType; typedef union { struct { uint8_t type; TSStateId state; bool extra; bool repetition; } shift; struct { uint8_t type; uint8_t child_count; TSSymbol symbol; int16_t dynamic_precedence; uint16_t production_id; } reduce; uint8_t type; } TSParseAction; typedef struct { uint16_t lex_state; uint16_t external_lex_state; } TSLexMode; typedef union { TSParseAction action; struct { uint8_t count; bool reusable; } entry; } TSParseActionEntry; struct TSLanguage { uint32_t version; uint32_t symbol_count; uint32_t alias_count; uint32_t token_count; uint32_t external_token_count; uint32_t state_count; uint32_t large_state_count; uint32_t production_id_count; uint32_t field_count; uint16_t max_alias_sequence_length; const uint16_t *parse_table; const uint16_t *small_parse_table; const uint32_t *small_parse_table_map; const TSParseActionEntry *parse_actions; const char * const *symbol_names; const char * const *field_names; const TSFieldMapSlice *field_map_slices; const TSFieldMapEntry *field_map_entries; const TSSymbolMetadata *symbol_metadata; const TSSymbol *public_symbol_map; const uint16_t *alias_map; const TSSymbol *alias_sequences; const TSLexMode *lex_modes; bool (*lex_fn)(TSLexer *, TSStateId); bool (*keyword_lex_fn)(TSLexer *, TSStateId); TSSymbol keyword_capture_token; struct { const bool *states; const TSSymbol *symbol_map; void *(*create)(void); void (*destroy)(void *); bool (*scan)(void *, TSLexer *, const bool *symbol_whitelist); unsigned (*serialize)(void *, char *); void (*deserialize)(void *, const char *, unsigned); } external_scanner; const TSStateId *primary_state_ids; }; /* * Lexer Macros */ #define START_LEXER() \ bool result = false; \ bool skip = false; \ bool eof = false; \ int32_t lookahead; \ goto start; \ next_state: \ lexer->advance(lexer, skip); \ start: \ skip = false; \ lookahead = lexer->lookahead; #define ADVANCE(state_value) \ { \ state = state_value; \ goto next_state; \ } #define SKIP(state_value) \ { \ skip = true; \ state = state_value; \ goto next_state; \ } #define ACCEPT_TOKEN(symbol_value) \ result = true; \ lexer->result_symbol = symbol_value; \ lexer->mark_end(lexer); #define END_STATE() return result; /* * Parse Table Macros */ #define SMALL_STATE(id) id - LARGE_STATE_COUNT #define STATE(id) id #define ACTIONS(id) id #define SHIFT(state_value) \ {{ \ .shift = { \ .type = TSParseActionTypeShift, \ .state = state_value \ } \ }} #define SHIFT_REPEAT(state_value) \ {{ \ .shift = { \ .type = TSParseActionTypeShift, \ .state = state_value, \ .repetition = true \ } \ }} #define SHIFT_EXTRA() \ {{ \ .shift = { \ .type = TSParseActionTypeShift, \ .extra = true \ } \ }} #define REDUCE(symbol_val, child_count_val, ...) \ {{ \ .reduce = { \ .type = TSParseActionTypeReduce, \ .symbol = symbol_val, \ .child_count = child_count_val, \ __VA_ARGS__ \ }, \ }} #define RECOVER() \ {{ \ .type = TSParseActionTypeRecover \ }} #define ACCEPT_INPUT() \ {{ \ .type = TSParseActionTypeAccept \ }} #ifdef __cplusplus } #endif #endif // TREE_SITTER_PARSER_H_ hx-0.3.0+20250717/test-grammars/erlang/textobjects.scm000066400000000000000000000007511503625671400221500ustar00rootroot00000000000000(function_clause pattern: (arguments (_)? @parameter.inside) body: (_) @function.inside) @function.around (anonymous_function (stab_clause body: (_) @function.inside)) @function.around (comment (comment_content) @comment.inside) @comment.around ; EUnit test names. ; (CommonTest cases are not recognizable by syntax alone.) ((function_clause name: (atom) @_name pattern: (arguments (_)? @parameter.inside) body: (_) @test.inside) @test.around (#match? @_name "_test$")) hx-0.3.0+20250717/test-grammars/html/000077500000000000000000000000001503625671400165775ustar00rootroot00000000000000hx-0.3.0+20250717/test-grammars/html/LICENSE000066400000000000000000000020701503625671400176030ustar00rootroot00000000000000The MIT License (MIT) Copyright (c) 2014 Max Brunsfeld Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. hx-0.3.0+20250717/test-grammars/html/highlights.scm000066400000000000000000000014611503625671400214370ustar00rootroot00000000000000(tag_name) @tag (erroneous_end_tag_name) @error (doctype) @constant (attribute_name) @attribute (attribute [(attribute_value) (quoted_attribute_value)] @string) ((attribute (attribute_name) @attribute (quoted_attribute_value (attribute_value) @markup.link.url)) (#any-of? @attribute "href" "src")) ((element (start_tag (tag_name) @_tag) (text) @markup.link.label) (#eq? @_tag "a")) ((element (start_tag (tag_name) @_tag) (text) @markup.bold) (#any-of? @_tag "strong" "b")) ((element (start_tag (tag_name) @_tag) (text) @markup.italic) (#any-of? @_tag "em" "i")) ((element (start_tag (tag_name) @_tag) (text) @markup.strikethrough) (#any-of? @_tag "s" "del")) [ "<" ">" "" "#u/0j@0 㞝$ b !A~S=XaKf=ʯڎ̗u}/ݖ^~W3 uw[ٝ^לzVM5c@: t 2!"щR_̤ Bs D@ab(PaJA8>!%Hm|:FS1qHW!(!eEȥ>'4X6:J68 ,r vSdzGL+'6&A(%y7[K_!hdP,CoPX=;f-.}C@@8>;=`":5gXÛ H<$]>3v,ǻb>g;XWuѥo\ݓuʙʵ __9JLR-_={c 1v'~gVo&q%ٷ)RtNCōeKX-#d2|;HѶ %[(%|[VN%lհS !n4ǜ &@J~kM\zϨt=f>0aRx/6(HR{@קEa Y[`TS+*".m6K085~L m} RHA0KyʍA1QNMCܨ (SAm}[LyZ.ohx-0.3.0+20250717/test-grammars/html/src/parser.c000066400000000000000000000125041503625671400210300ustar00rootroot00000000000000(/d׵t,8~wϫ5N;ݩ7zbwܫ ;o\lΛWSUyj|ͫpyf."BSDU"yD{>, 2%Tչ^uM(e4f'\Q51_D.  T41;"1 'M6.XXEQ\f@QR`&89BEF" 8@PXĄH!hY \Rk@MDL: XILXH2!yM%iw]P;gMٌ[):ovΪض5wnݱ-nrV]ctͨ۲;mVY3ivޟVW%Ay`IEt5*:jKniy.>"[R6lν7oȎ:|Nۡ{_MV\'mv٭.s<ͩ뾍~bkn-k;mٌȭyu}W9]cdDLY6lm:6[tX=%B,4t*4lDu2=*MO$PQUm8$o2XҌLzGHՑ` ^f"bUIs@XF` * ؃ՙ8b%_9ǺPUY' ~O5z6odUtv+dnzv-ocuָ錊o|Uguљ.zF_wm),YӳYYb{c£t0". .X:ե4*%yEu6aEzta(e{Ns2K\\mοSnϿN=Qt֐7~k dЈ$)((d8s KPV:sp ilG@( b0aA b5J`.(bymf*&ð._t&O4Yk%nl9-BjK]Y/܉[zHl[K+dۑԪ ^b_i!StIN;/s|B"{`4Wί@8,]v=O= 9SYEXoS% A}L5o5kZMc<3b,5/#uA5M J9ا'k4JΆ!+Qr*^J1 e4!J e*y&M~3Fk[4 AWWvT: }+'3DŲ t؉ޅFBfkh/HzxƷ7q0b7$uɧ/a&xY>aH¦LxAn =aAھw$&a^<7#iY&x>6|3V|҃Y;;.>|ZטȤJqV)S]ggHf\bUp\؝Ӫkz!o41LfB)ۄ6Ӵő +m0W1 nEcE82׉7$du,E>PۦC ~A_ ]^=$z)W YP!b:d#0;57c wZqfGh,t#FY[} ԑ/D8PDMn1 }!_\qMcݟS @4mb_ƦsS]r7A /Ů'ziNmu spm<#s <\K<=ޫ u$zYgعqi-.hƭY]YP1n}9N{B=1l I1<6pCu7j(ZIѪ@iGq'Fy.j CF<TKOT z{dF:x]IdΑz1.P|j7GXZu =_EUȬ]tA F qh<~X~,Nִ- @NZ[gc 5yvJ|՛qU7J= 89LLTwVC{)m CE-P]B2GKZ^)up)QKnDղ`thuXu^EB|},0uH=MLeuĄn%nUDw EwKo{GG\8 ֚ww"X5xDƏON ׅF'Wɧfc9!Q@mŠ(%ag$y2E"eN0#i#Pݤ) ȗH6BJQfJH(RQ@tv4d 5NXȱbdal=pc,+S"!p%Gk( 7iGڞ>0-14xs g`)o|H;HR ?8";puR?rFCXT搘N`]L *vh!yPaF/+9 Q,^ ]"l+[I`a(:z蛢/scqUnϦU jYXg,@92 )\nn?{آ}BF )-K<A݌xddU$J(@pA`٧d-{Q!H>uC4|e'YFE QZ"',wjL߇Qkӧωuߛc.0+?76 pj'vd$a„=Sb]PI/ ):{7{b ;P5o[X-\}?㮝ㅣeiFxmqR{6;HB$),=MB ;##;ZrX ZƬ it1] ەJz'j帆&˶v9jDQTerH$ǒ|y;3/NX$5Ά"C<x,HGc '{JA0܂N}-cl*L8HFt r+3XL8NVUn"cxF^p'QWL_Sz44JU%LI< >#虙0uVrEauz .sP= wbg&Qx/$X6- (&ydؤ2? #1qY2{Fq+jEPHKoO%sXbcBLpa],`|!/A"ؓjnR% +=r!_* Hs#0DvXE 5jv]H aj(c?~dACB6WΠr^[FrG84(KXhfo:aхBF ەBL #include #include #include #include #include #include "tag.h" namespace { using std::vector; using std::string; enum TokenType { START_TAG_NAME, SCRIPT_START_TAG_NAME, STYLE_START_TAG_NAME, END_TAG_NAME, ERRONEOUS_END_TAG_NAME, SELF_CLOSING_TAG_DELIMITER, IMPLICIT_END_TAG, RAW_TEXT, COMMENT }; struct Scanner { Scanner() {} unsigned serialize(char *buffer) { uint16_t tag_count = tags.size() > UINT16_MAX ? UINT16_MAX : tags.size(); uint16_t serialized_tag_count = 0; unsigned i = sizeof(tag_count); std::memcpy(&buffer[i], &tag_count, sizeof(tag_count)); i += sizeof(tag_count); for (; serialized_tag_count < tag_count; serialized_tag_count++) { Tag &tag = tags[serialized_tag_count]; if (tag.type == CUSTOM) { unsigned name_length = tag.custom_tag_name.size(); if (name_length > UINT8_MAX) name_length = UINT8_MAX; if (i + 2 + name_length >= TREE_SITTER_SERIALIZATION_BUFFER_SIZE) break; buffer[i++] = static_cast(tag.type); buffer[i++] = name_length; tag.custom_tag_name.copy(&buffer[i], name_length); i += name_length; } else { if (i + 1 >= TREE_SITTER_SERIALIZATION_BUFFER_SIZE) break; buffer[i++] = static_cast(tag.type); } } std::memcpy(&buffer[0], &serialized_tag_count, sizeof(serialized_tag_count)); return i; } void deserialize(const char *buffer, unsigned length) { tags.clear(); if (length > 0) { unsigned i = 0; uint16_t tag_count, serialized_tag_count; std::memcpy(&serialized_tag_count, &buffer[i], sizeof(serialized_tag_count)); i += sizeof(serialized_tag_count); std::memcpy(&tag_count, &buffer[i], sizeof(tag_count)); i += sizeof(tag_count); tags.resize(tag_count); for (unsigned j = 0; j < serialized_tag_count; j++) { Tag &tag = tags[j]; tag.type = static_cast(buffer[i++]); if (tag.type == CUSTOM) { uint16_t name_length = static_cast(buffer[i++]); tag.custom_tag_name.assign(&buffer[i], &buffer[i + name_length]); i += name_length; } } } } string scan_tag_name(TSLexer *lexer) { string tag_name; while (iswalnum(lexer->lookahead) || lexer->lookahead == '-' || lexer->lookahead == ':') { tag_name += towupper(lexer->lookahead); lexer->advance(lexer, false); } return tag_name; } bool scan_comment(TSLexer *lexer) { if (lexer->lookahead != '-') return false; lexer->advance(lexer, false); if (lexer->lookahead != '-') return false; lexer->advance(lexer, false); unsigned dashes = 0; while (lexer->lookahead) { switch (lexer->lookahead) { case '-': ++dashes; break; case '>': if (dashes >= 2) { lexer->result_symbol = COMMENT; lexer->advance(lexer, false); lexer->mark_end(lexer); return true; } default: dashes = 0; } lexer->advance(lexer, false); } return false; } bool scan_raw_text(TSLexer *lexer) { if (!tags.size()) return false; lexer->mark_end(lexer); const string &end_delimiter = tags.back().type == SCRIPT ? "lookahead) { if (towupper(lexer->lookahead) == end_delimiter[delimiter_index]) { delimiter_index++; if (delimiter_index == end_delimiter.size()) break; lexer->advance(lexer, false); } else { delimiter_index = 0; lexer->advance(lexer, false); lexer->mark_end(lexer); } } lexer->result_symbol = RAW_TEXT; return true; } bool scan_implicit_end_tag(TSLexer *lexer) { Tag *parent = tags.empty() ? NULL : &tags.back(); bool is_closing_tag = false; if (lexer->lookahead == '/') { is_closing_tag = true; lexer->advance(lexer, false); } else { if (parent && parent->is_void()) { tags.pop_back(); lexer->result_symbol = IMPLICIT_END_TAG; return true; } } string tag_name = scan_tag_name(lexer); if (tag_name.empty()) return false; Tag next_tag = Tag::for_name(tag_name); if (is_closing_tag) { // The tag correctly closes the topmost element on the stack if (!tags.empty() && tags.back() == next_tag) return false; // Otherwise, dig deeper and queue implicit end tags (to be nice in // the case of malformed HTML) if (std::find(tags.begin(), tags.end(), next_tag) != tags.end()) { tags.pop_back(); lexer->result_symbol = IMPLICIT_END_TAG; return true; } } else if (parent && !parent->can_contain(next_tag)) { tags.pop_back(); lexer->result_symbol = IMPLICIT_END_TAG; return true; } return false; } bool scan_start_tag_name(TSLexer *lexer) { string tag_name = scan_tag_name(lexer); if (tag_name.empty()) return false; Tag tag = Tag::for_name(tag_name); tags.push_back(tag); switch (tag.type) { case SCRIPT: lexer->result_symbol = SCRIPT_START_TAG_NAME; break; case STYLE: lexer->result_symbol = STYLE_START_TAG_NAME; break; default: lexer->result_symbol = START_TAG_NAME; break; } return true; } bool scan_end_tag_name(TSLexer *lexer) { string tag_name = scan_tag_name(lexer); if (tag_name.empty()) return false; Tag tag = Tag::for_name(tag_name); if (!tags.empty() && tags.back() == tag) { tags.pop_back(); lexer->result_symbol = END_TAG_NAME; } else { lexer->result_symbol = ERRONEOUS_END_TAG_NAME; } return true; } bool scan_self_closing_tag_delimiter(TSLexer *lexer) { lexer->advance(lexer, false); if (lexer->lookahead == '>') { lexer->advance(lexer, false); if (!tags.empty()) { tags.pop_back(); lexer->result_symbol = SELF_CLOSING_TAG_DELIMITER; } return true; } return false; } bool scan(TSLexer *lexer, const bool *valid_symbols) { while (iswspace(lexer->lookahead)) { lexer->advance(lexer, true); } if (valid_symbols[RAW_TEXT] && !valid_symbols[START_TAG_NAME] && !valid_symbols[END_TAG_NAME]) { return scan_raw_text(lexer); } switch (lexer->lookahead) { case '<': lexer->mark_end(lexer); lexer->advance(lexer, false); if (lexer->lookahead == '!') { lexer->advance(lexer, false); return scan_comment(lexer); } if (valid_symbols[IMPLICIT_END_TAG]) { return scan_implicit_end_tag(lexer); } break; case '\0': if (valid_symbols[IMPLICIT_END_TAG]) { return scan_implicit_end_tag(lexer); } break; case '/': if (valid_symbols[SELF_CLOSING_TAG_DELIMITER]) { return scan_self_closing_tag_delimiter(lexer); } break; default: if ((valid_symbols[START_TAG_NAME] || valid_symbols[END_TAG_NAME]) && !valid_symbols[RAW_TEXT]) { return valid_symbols[START_TAG_NAME] ? scan_start_tag_name(lexer) : scan_end_tag_name(lexer); } } return false; } vector tags; }; } extern "C" { void *tree_sitter_html_external_scanner_create() { return new Scanner(); } bool tree_sitter_html_external_scanner_scan(void *payload, TSLexer *lexer, const bool *valid_symbols) { Scanner *scanner = static_cast(payload); return scanner->scan(lexer, valid_symbols); } unsigned tree_sitter_html_external_scanner_serialize(void *payload, char *buffer) { Scanner *scanner = static_cast(payload); return scanner->serialize(buffer); } void tree_sitter_html_external_scanner_deserialize(void *payload, const char *buffer, unsigned length) { Scanner *scanner = static_cast(payload); scanner->deserialize(buffer, length); } void tree_sitter_html_external_scanner_destroy(void *payload) { Scanner *scanner = static_cast(payload); delete scanner; } } hx-0.3.0+20250717/test-grammars/html/src/tag.h000066400000000000000000000124431503625671400203160ustar00rootroot00000000000000#include #include using std::string; using std::map; enum TagType { AREA, BASE, BASEFONT, BGSOUND, BR, COL, COMMAND, EMBED, FRAME, HR, IMAGE, IMG, INPUT, ISINDEX, KEYGEN, LINK, MENUITEM, META, NEXTID, PARAM, SOURCE, TRACK, WBR, END_OF_VOID_TAGS, A, ABBR, ADDRESS, ARTICLE, ASIDE, AUDIO, B, BDI, BDO, BLOCKQUOTE, BODY, BUTTON, CANVAS, CAPTION, CITE, CODE, COLGROUP, DATA, DATALIST, DD, DEL, DETAILS, DFN, DIALOG, DIV, DL, DT, EM, FIELDSET, FIGCAPTION, FIGURE, FOOTER, FORM, H1, H2, H3, H4, H5, H6, HEAD, HEADER, HGROUP, HTML, I, IFRAME, INS, KBD, LABEL, LEGEND, LI, MAIN, MAP, MARK, MATH, MENU, METER, NAV, NOSCRIPT, OBJECT, OL, OPTGROUP, OPTION, OUTPUT, P, PICTURE, PRE, PROGRESS, Q, RB, RP, RT, RTC, RUBY, S, SAMP, SCRIPT, SECTION, SELECT, SLOT, SMALL, SPAN, STRONG, STYLE, SUB, SUMMARY, SUP, SVG, TABLE, TBODY, TD, TEMPLATE, TEXTAREA, TFOOT, TH, THEAD, TIME, TITLE, TR, U, UL, VAR, VIDEO, CUSTOM, }; static const map get_tag_map() { map result; #define TAG(name) result[#name] = name TAG(AREA); TAG(BASE); TAG(BASEFONT); TAG(BGSOUND); TAG(BR); TAG(COL); TAG(COMMAND); TAG(EMBED); TAG(FRAME); TAG(HR); TAG(IMAGE); TAG(IMG); TAG(INPUT); TAG(ISINDEX); TAG(KEYGEN); TAG(LINK); TAG(MENUITEM); TAG(META); TAG(NEXTID); TAG(PARAM); TAG(SOURCE); TAG(TRACK); TAG(WBR); TAG(A); TAG(ABBR); TAG(ADDRESS); TAG(ARTICLE); TAG(ASIDE); TAG(AUDIO); TAG(B); TAG(BDI); TAG(BDO); TAG(BLOCKQUOTE); TAG(BODY); TAG(BUTTON); TAG(CANVAS); TAG(CAPTION); TAG(CITE); TAG(CODE); TAG(COLGROUP); TAG(DATA); TAG(DATALIST); TAG(DD); TAG(DEL); TAG(DETAILS); TAG(DFN); TAG(DIALOG); TAG(DIV); TAG(DL); TAG(DT); TAG(EM); TAG(FIELDSET); TAG(FIGCAPTION); TAG(FIGURE); TAG(FOOTER); TAG(FORM); TAG(H1); TAG(H2); TAG(H3); TAG(H4); TAG(H5); TAG(H6); TAG(HEAD); TAG(HEADER); TAG(HGROUP); TAG(HTML); TAG(I); TAG(IFRAME); TAG(INS); TAG(KBD); TAG(LABEL); TAG(LEGEND); TAG(LI); TAG(MAIN); TAG(MAP); TAG(MARK); TAG(MATH); TAG(MENU); TAG(METER); TAG(NAV); TAG(NOSCRIPT); TAG(OBJECT); TAG(OL); TAG(OPTGROUP); TAG(OPTION); TAG(OUTPUT); TAG(P); TAG(PICTURE); TAG(PRE); TAG(PROGRESS); TAG(Q); TAG(RB); TAG(RP); TAG(RT); TAG(RTC); TAG(RUBY); TAG(S); TAG(SAMP); TAG(SCRIPT); TAG(SECTION); TAG(SELECT); TAG(SLOT); TAG(SMALL); TAG(SPAN); TAG(STRONG); TAG(STYLE); TAG(SUB); TAG(SUMMARY); TAG(SUP); TAG(SVG); TAG(TABLE); TAG(TBODY); TAG(TD); TAG(TEMPLATE); TAG(TEXTAREA); TAG(TFOOT); TAG(TH); TAG(THEAD); TAG(TIME); TAG(TITLE); TAG(TR); TAG(U); TAG(UL); TAG(VAR); TAG(VIDEO); #undef TAG return result; } static const map TAG_TYPES_BY_TAG_NAME = get_tag_map(); static const TagType TAG_TYPES_NOT_ALLOWED_IN_PARAGRAPHS[] = { ADDRESS, ARTICLE, ASIDE, BLOCKQUOTE, DETAILS, DIV, DL, FIELDSET, FIGCAPTION, FIGURE, FOOTER, FORM, H1, H2, H3, H4, H5, H6, HEADER, HR, MAIN, NAV, OL, P, PRE, SECTION, }; static const TagType *TAG_TYPES_NOT_ALLOWED_IN_PARAGRAPHS_END = ( TAG_TYPES_NOT_ALLOWED_IN_PARAGRAPHS + sizeof(TAG_TYPES_NOT_ALLOWED_IN_PARAGRAPHS) / sizeof(TagType) ); struct Tag { TagType type; string custom_tag_name; // This default constructor is used in the case where there is not enough space // in the serialization buffer to store all of the tags. In that case, tags // that cannot be serialized will be treated as having an unknown type. These // tags will be closed via implicit end tags regardless of the next closing // tag is encountered. Tag() : type(END_OF_VOID_TAGS) {} Tag(TagType type, const string &name) : type(type), custom_tag_name(name) {} bool operator==(const Tag &other) const { if (type != other.type) return false; if (type == CUSTOM && custom_tag_name != other.custom_tag_name) return false; return true; } inline bool is_void() const { return type < END_OF_VOID_TAGS; } inline bool can_contain(const Tag &tag) { TagType child = tag.type; switch (type) { case LI: return child != LI; case DT: case DD: return child != DT && child != DD; case P: return std::find( TAG_TYPES_NOT_ALLOWED_IN_PARAGRAPHS, TAG_TYPES_NOT_ALLOWED_IN_PARAGRAPHS_END, tag.type ) == TAG_TYPES_NOT_ALLOWED_IN_PARAGRAPHS_END; case COLGROUP: return child == COL; case RB: case RT: case RP: return child != RB && child != RT && child != RP; case OPTGROUP: return child != OPTGROUP; case TR: return child != TR; case TD: case TH: return child != TD && child != TH && child != TR; default: return true; } } static inline Tag for_name(const string &name) { map::const_iterator type = TAG_TYPES_BY_TAG_NAME.find(name); if (type != TAG_TYPES_BY_TAG_NAME.end()) { return Tag(type->second, string()); } else { return Tag(CUSTOM, name); } } }; hx-0.3.0+20250717/test-grammars/html/src/tree_sitter/000077500000000000000000000000001503625671400217175ustar00rootroot00000000000000hx-0.3.0+20250717/test-grammars/html/src/tree_sitter/parser.h000066400000000000000000000123341503625671400233670ustar00rootroot00000000000000#ifndef TREE_SITTER_PARSER_H_ #define TREE_SITTER_PARSER_H_ #ifdef __cplusplus extern "C" { #endif #include #include #include #define ts_builtin_sym_error ((TSSymbol)-1) #define ts_builtin_sym_end 0 #define TREE_SITTER_SERIALIZATION_BUFFER_SIZE 1024 typedef uint16_t TSStateId; #ifndef TREE_SITTER_API_H_ typedef uint16_t TSSymbol; typedef uint16_t TSFieldId; typedef struct TSLanguage TSLanguage; #endif typedef struct { TSFieldId field_id; uint8_t child_index; bool inherited; } TSFieldMapEntry; typedef struct { uint16_t index; uint16_t length; } TSFieldMapSlice; typedef struct { bool visible; bool named; bool supertype; } TSSymbolMetadata; typedef struct TSLexer TSLexer; struct TSLexer { int32_t lookahead; TSSymbol result_symbol; void (*advance)(TSLexer *, bool); void (*mark_end)(TSLexer *); uint32_t (*get_column)(TSLexer *); bool (*is_at_included_range_start)(const TSLexer *); bool (*eof)(const TSLexer *); }; typedef enum { TSParseActionTypeShift, TSParseActionTypeReduce, TSParseActionTypeAccept, TSParseActionTypeRecover, } TSParseActionType; typedef union { struct { uint8_t type; TSStateId state; bool extra; bool repetition; } shift; struct { uint8_t type; uint8_t child_count; TSSymbol symbol; int16_t dynamic_precedence; uint16_t production_id; } reduce; uint8_t type; } TSParseAction; typedef struct { uint16_t lex_state; uint16_t external_lex_state; } TSLexMode; typedef union { TSParseAction action; struct { uint8_t count; bool reusable; } entry; } TSParseActionEntry; struct TSLanguage { uint32_t version; uint32_t symbol_count; uint32_t alias_count; uint32_t token_count; uint32_t external_token_count; uint32_t state_count; uint32_t large_state_count; uint32_t production_id_count; uint32_t field_count; uint16_t max_alias_sequence_length; const uint16_t *parse_table; const uint16_t *small_parse_table; const uint32_t *small_parse_table_map; const TSParseActionEntry *parse_actions; const char * const *symbol_names; const char * const *field_names; const TSFieldMapSlice *field_map_slices; const TSFieldMapEntry *field_map_entries; const TSSymbolMetadata *symbol_metadata; const TSSymbol *public_symbol_map; const uint16_t *alias_map; const TSSymbol *alias_sequences; const TSLexMode *lex_modes; bool (*lex_fn)(TSLexer *, TSStateId); bool (*keyword_lex_fn)(TSLexer *, TSStateId); TSSymbol keyword_capture_token; struct { const bool *states; const TSSymbol *symbol_map; void *(*create)(void); void (*destroy)(void *); bool (*scan)(void *, TSLexer *, const bool *symbol_whitelist); unsigned (*serialize)(void *, char *); void (*deserialize)(void *, const char *, unsigned); } external_scanner; }; /* * Lexer Macros */ #define START_LEXER() \ bool result = false; \ bool skip = false; \ bool eof = false; \ int32_t lookahead; \ goto start; \ next_state: \ lexer->advance(lexer, skip); \ start: \ skip = false; \ lookahead = lexer->lookahead; #define ADVANCE(state_value) \ { \ state = state_value; \ goto next_state; \ } #define SKIP(state_value) \ { \ skip = true; \ state = state_value; \ goto next_state; \ } #define ACCEPT_TOKEN(symbol_value) \ result = true; \ lexer->result_symbol = symbol_value; \ lexer->mark_end(lexer); #define END_STATE() return result; /* * Parse Table Macros */ #define SMALL_STATE(id) id - LARGE_STATE_COUNT #define STATE(id) id #define ACTIONS(id) id #define SHIFT(state_value) \ {{ \ .shift = { \ .type = TSParseActionTypeShift, \ .state = state_value \ } \ }} #define SHIFT_REPEAT(state_value) \ {{ \ .shift = { \ .type = TSParseActionTypeShift, \ .state = state_value, \ .repetition = true \ } \ }} #define SHIFT_EXTRA() \ {{ \ .shift = { \ .type = TSParseActionTypeShift, \ .extra = true \ } \ }} #define REDUCE(symbol_val, child_count_val, ...) \ {{ \ .reduce = { \ .type = TSParseActionTypeReduce, \ .symbol = symbol_val, \ .child_count = child_count_val, \ __VA_ARGS__ \ }, \ }} #define RECOVER() \ {{ \ .type = TSParseActionTypeRecover \ }} #define ACCEPT_INPUT() \ {{ \ .type = TSParseActionTypeAccept \ }} #ifdef __cplusplus } #endif #endif // TREE_SITTER_PARSER_H_ hx-0.3.0+20250717/test-grammars/markdown-inline/000077500000000000000000000000001503625671400207315ustar00rootroot00000000000000hx-0.3.0+20250717/test-grammars/markdown-inline/LICENSE000066400000000000000000000020571503625671400217420ustar00rootroot00000000000000MIT License Copyright (c) 2021 Matthias Deiml Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. hx-0.3.0+20250717/test-grammars/markdown-inline/highlights.scm000066400000000000000000000012361503625671400235710ustar00rootroot00000000000000;; From nvim-treesitter/nvim-treesitter [ (code_span) (link_title) ] @markup.raw.inline [ (emphasis_delimiter) (code_span_delimiter) ] @punctuation.bracket (emphasis) @markup.italic (strong_emphasis) @markup.bold (strikethrough) @markup.strikethrough [ (link_destination) (uri_autolink) ] @markup.link.url [ (link_text) (image_description) ] @markup.link.text (link_label) @markup.link.label [ (backslash_escape) (hard_line_break) ] @constant.character.escape (image ["[" "]" "(" ")"] @punctuation.bracket) (image "!" @punctuation.special) (inline_link ["[" "]" "(" ")"] @punctuation.bracket) (shortcut_link ["[" "]"] @punctuation.bracket) hx-0.3.0+20250717/test-grammars/markdown-inline/injections.scm000066400000000000000000000004031503625671400235770ustar00rootroot00000000000000 ((html_tag) @injection.content (#set! injection.language "html") (#set! injection.include-unnamed-children) (#set! injection.combined)) ((latex_block) @injection.content (#set! injection.language "latex") (#set! injection.include-unnamed-children)) hx-0.3.0+20250717/test-grammars/markdown-inline/metadata.json000066400000000000000000000002531503625671400234040ustar00rootroot00000000000000{ "repo": "https://github.com/tree-sitter-grammars/tree-sitter-markdown", "rev": "62516e8c78380e3b51d5b55727995d2c511436d8", "license": "MIT", "compressed": true }hx-0.3.0+20250717/test-grammars/markdown-inline/src/000077500000000000000000000000001503625671400215205ustar00rootroot00000000000000hx-0.3.0+20250717/test-grammars/markdown-inline/src/grammar.json000066400000000000000000000247061503625671400240520ustar00rootroot00000000000000(/J~+6KJKLv5!x=n3s6EQpaZI˼ sLrq#!c\tPiƷfE!h\iby8f88mx>~[(5*/)E wU\5vԚcE Ot wd6'rl-ᱭ$h--m]\x(oB$D""},FDDDD|B_ZTK5SGbf=OSg?M>v~F;~s^fzg-E.d.gѤϱ"N鱍CʙP~ٻ!\… Ņ ,B*T-PaRe8Zy%?(%'Ad˱bq=[J[SM|PQDl5zj<2cƯJYG̙H%S-&;tyZF#'v:!a:`~´nlllll .)ʵvX|J"QZ]ѢVK-gqeS\kYB>"ӫro-Na5㎲ol#u#1Ne34p%Bun9ikX%"NӜ~fw+̿'<qD )q{7?XRf*Iĝ Y|Zκ`*bM1{uVn (eM" R`DUZ{QfįEWt¸aQq6flc^H >I}u4קI醘E"'qVuL LУ fM΢s2T"=k}& %qHCšÃpI\ 9ъ2PRL'NxXz?JjJ/弋ږ98;% h$"11l֫o篶c*Д7pQ@vDhv< H{Ȋ%&\ -H0#i}$ wX7°>>@; B75"~lv.})l煲衐`KCP+>Ew`RKڌY$F6)ӱ+j_r^.5J7Ծh<): <̐D3⢒B*p6"%ONM`RTo*X*: QB.i5.qCJx@(B hpRfEJJm1r@?XnsB>t$< g{wy7d g:HB2l M]ꋨI?RM>jұM[d8+>=Hr:OwШ~BТ"mUTq4D U=pvĥIC MXA9:/qCIbliVݙf@05NSR8Gܗr%2eMXO^ۓWfb$ {9eAtJz UP(R[;)T0>w(ftbeKwFIF!dWXɐ[Ce%-9dѣP̑/l,9bGC.'BՕiHS\P8/0bqF䄓љ-˴[Ů,ŖY|!m"b7NK-ABM ;ޱqp*(b-Ɯ>'Qq^SAɇanMm,ҼՔ2 |&+ouZYX6CT ܇TT;jZ/ht.ez: q^V8ݜ۫̒t{nPPG.jݷ8=j:@ (JH(X'@j2 ӳہOxw Ѵ='(6e\q'Ǽ|N~ɮĨܲX $e,>M%>#ld = 3VfcsvT?YYX ~~S#)X65ƨm*DweEHՌZ;Syϰ x@Y/n cE3"0$W ^JoPB#M$ %`0?_~*`ns7qOO `6NXN`HBxfYq,Ёo,j->$$@#+3:`:`x Ady <9 P{H=yv7 ATɺ, 9[0GSeQva? ;cbQv*fvqEaBj3\{Ӎ%c+2oH!a=BZm3-/M[U{#4a_'([/"O+IPW+PG,yC]{ӊ,y&TVqJL$@3(2,F:&.eԽLS#P:e \4,ʸ$EGk!"ʅ O9l0 -M'9Ӥs ZiitII=I}J{LSxr zE瞝2ȵ.5F-{hVJ )jS9C鋞q]a!Py퓜q7v 7ԡ-ݟ8_:920şn"w`P;h5f3-h>JO{V'uZÞ~cbԌĸR%;J!螗|[ vb UBaTIY Ur :RJILZ8,(ʷXMt|3@}" )ɍ1WkR]UM*e l?%Gm`+L'odǐCD)޷Ri+nׇ&!w?G\˫ ̍4N J)QVz;,0c…F*X\>%E#x۷yeC;5ai>.29c+2 SKoe%d F/CB|Yi _3?.atq0'ppwQsUE2LZiiA"˛+B஋t gm6̇JgM7%@t0A}Z(8z1srBAޓʌP2Rj ]Ԕġ#P^a#k#H+% /ԆcKF.;taX X8("!SPmtw`O- HVQ1v(BǗw x¬],sqHl&YZ7 R7;=4=ÐObѷC &=I T3)_6SG͎=0rW0 J?E0O]ӅIMΧ8#z]q< ,$QeMO @E=ISdµK;^V_1^ pg"IA1.FלD R*]o5 Um'rIpELJSG+AR!bGֶCN29 uYcrXb JRK͟U6@CyyD?*86 7.C4%>h2F%|.Ioj'iPs7S0::/J*#''ߥʟx,U (,"Px9t]ih܆;GCΫC9$ SX|E)Mha(9BBk"a6̙`'C(9J~mS:QkH{߈D|&{iZ! ,gҾ]tPɱR2hk$ܦ7#TAtQu_y54<|ikRמeV)ˋjO!+Ԯ! !n;5vy ˍB*DF-AabMQ'=B¶a,z*KAQCVyC |ŋo̷8[[Z𳪰DlYqPYxsp`/Iee ,WrBJ>_e"i$=zR*PPh-): J3/vhS'5>V )nlY+%rz\D6B]A-&g0dwRԛw/-9^mvY&qfxD$ a|r__bB0q gprwl DsmmAY"_}&qn{WUUU--G7Ig>?G7AR hglK) }'~M] ?2pEMR=N, 9^mZ] z9җ] ?6-. W=NҗbJFUcGi|i5`]=N@,sJܤ9 ~؄sr4SB"?E{)k۴ ,\QA07C֑p TݥY;jbuΟqaUQA07c.aaҲkAua+ji/-9^ivY5I8 srtit3a/?|kQ''>ң]4/;b|KF$k:afYq7b0 0 B! C   bQI2Iad<5`tLIg2&2L:Ƥ1Lb &L$eR<dc(<$ 룾k0mw!?mdmpjpV?e)eiw@~@^b9+/[lU6N_֮Hk ELs &8aN-'©8MNQs=ơ8l shA9 ;2pH[=zWJ=x Zf5X /O!4:P3bhpΆ -VSp<ˡM Gol*`ʡJ%`=voɺim w f~[wn#m('GagDZ m k;AN2I3iF^[9ZlET2Y f{YkCj gO:#[Xo 7titB^*$VG&s{#lVa6aOJ)Rg`ׁIӔ{Fu߬Z'Y˰Co'?"'i'F`J0HH"2`HX!0aE0Ds WI(t_~m/(bd@a!ҋb%`F~Whӊ\/ă{q ~38Z.878Y0gr 1rqN[o S)e4ذ1oid׈Z]kDccC_tY_<*`U5h-e\(oʁr)zLQe (Cʝ2)AYSFrO9%.|0T^ڦږT+2տj"sKB}&C? T5QOTش{ 4CCpE%4)8qd,#c8s1lX2qǘ1qb,#ȸaL7 f0(+m3ܑͤHE8*c|VQo ˷2Zs˭qiR١c`<)'(P-˭`"]y@kF]p dm6k9 l. -k*@.@lm#@cT^Kx*o2|M̾/hx k1@TKm:>,$24PT6}DZ$..(+@R9p өJ| 0ifQv gDH `9ӟGy=2(ԜaY&^JȪ<prj-ˆ` ̫Z$1p[FOĬί'\hx/R)4kyr@đdӋia۵&\aˌf)%ԾtΞX66#-b0#yrV}LȒmJ@ClRWCW`"J/If,@Wc.=q|ɴWց1n/4(W0/LZiC/p>kʵQ*x2GB<9Q #6nܽ7VJL:bwZFlJD ;oQSAH,H$[L YVD-S:$\ߠ%,8dC v?{ͼBʏ0k=P-(L pN+2_hx-0.3.0+20250717/test-grammars/markdown-inline/src/parser.c000066400000000000000000001660611503625671400231720ustar00rootroot00000000000000(/"d:-y.=|o1;׽aKx k> @ʅ R0ץR(O&$Y1]k+BEkYTe j2nLfmHJ6\ #XXp̉88. rԨ]~s~}t&!#E|r[385FG_[|0A'u`"f"-5wsT" O>h105TO=S)8A9=7 N.szogpv.C{EM[s|CrkE7%~UkuE^}΂hYvE3#Cl3 gsfV  -mzť r {L( ±d]\8N?ߎo@; 73`b2!H yuY.`tGX NwU-D %aF»]wbHv 1N 7b|2tA&yA5D kb?|=o{"ŗFyj%ͥ+8鴬_rRzp]4-{Rdx|({d!CNDC&>L6Xm"Wv0h Λ!8buCZe .x|N]xmE2Nj- I0bh~B@.WUY7D4"s.mNs;<_ b/!c@`PBA PdbX32R]56B x.Y؆&6N*9͝ ll]_4|je:jy'j֘Y ?$[cH(P_]BW_M 0uWs$j/jhڬAZBjRz;â&#+EA]C+xwfN'6i~; i״Yii0{KKi9I|)*\I,dF$GS @[n4(uFx9[m6?Gff^ ϥat% eoO&ߚNe-C\ : 6&4V̱N=qbXA DP³~L-I`{_Rߔj5kz֬:}j\rmz Rcj9-a4i̽hʼnaٰ=\{Ұ I-K9Ԝ_g/?qh(z5 f î+ *77am)r4;r& }gB9SL4#4>4-uCK~|о"H/>wfMU`;R~"č|C1)uwgrٽʹ&`@l Lጇ52XX*LG6B+ 2]>A i[G@dY!5 gR:!fYn^P_X昺k@Nm,kALȒF . IkS(#>!E-z;*톷6&5 q$A1Տx3Q&xQHEL^Ч~ix$K"0} =k,Cog~@?NWgН}y0;/x>X/wa+(8X"2ǃǀN O3]ܞ̃9\bwXҝ)Xʝ7UrgK78m|m,v|;k?YB ږ-djN5RwLyvWb&""{&ޙ]m+&D3m捻gwNq 5RF!r$T+(V(4G#&xRH,b,P DT(GHd`~`_Sy\_?'Ҙ6cm=T30s_EX3ŋH-?W0KZĐtN+wmhz^{z*zӽYCW|G6cl-!HoN_#%Ս( U 啨Zr6勒iluwt%iʎ<2Y^e嫁Hf?hכm"[jc찚ƃCuapZY06۶E,96j@Vȣ$9~V7ϐZx3`"+ b!K+`ns κ!v0E #bɏ4h֬<wLae::8Џ+I@W) >&)$9 K n(DN"&gV$tS N3j-Ld2DDDC1.+ $p:Yp3 7}\ rgJW|\ $ #[$vUɿO3K0cۭ8lo)m[iˮ ;pII\9IIr8$E$I2Fjxx"+EDg3@h4 /#NM~$XK B=XA}B'Rl*g& jRHa<+L47+B;iJYhUE.&>aooоD10ai ~ }cS;B8qotc *,u~R0 tgKx 6`X3Kq:kkw"RެA?}goNJ6VQ+̿K#U3\ )\>jDRsTo Y2ɊLGGY]ލ"ژ{lcK4ծ%&jYZ|X>*_Ыnh"PQpw*a|2 #boBc,[;#_!XwbQuS (CNؕ 5&yOȼeNά=q^]K;93R1FCoqqKb N+O(RdFBFvoЁbP%$aN*$[LՒP66@f23rP`yraY*|̃n3Dhtvwk0f lJ"8IحDpJ$({Ƒ@[C_)\$h@>2tEd8q%8_EDDD>ΎJa] vQ;Lu'T8xµ <ص431q˖]UGl e"&K',bi`[4c$2r3v+1?c"6ӁS[u1  J'1&$9o< m;v+AN"ϸ .q%N~ƭDp`W$8I n% [t?T"Mg ܤi ED"ȉh㈈`BgV(Q@?)1B4\Q0/) @ (4DqNe#m d;tշ z*p2I@W"B.(߼u MǂD$Ll~Q៙V$ܞem% xc # tu1"q\0?BZg(xqmS6a?K֬/E{@ƐPΌ9>s2lcLIJb7^GPq F:׀;n"-HQP5HDܒ-:*sy2:qKʾ2>6,Q}Z𤮇&}|G#KRIyN:ί.u6!5O"uw>LmHcD.`%%N"bL/:<5`I;% $:@Z2`kOu*?(QtxF3mL1lFE *rKR j>@fEtkWv)wjB.ş'+Աa-L,IDQ}?ǤM' `b6 Ya>Fcޜ3x+[xi‹4>'~-I'C|| JZP_pA!>&D%uSHlc uسz OEkWQ~=3Y4<;Xg=I~vbZ{IB ʏU"C{0@-=:tǔ-xCN n%(ݩ ?xL-jsJKac?K?Ro$cvҤ,a.!?gR(adU{= QK>fD!&G6|厞'^OZdRLa8HBT6flW4+}:_nѿ9wnގN޼+Sws`ua˘a٣<I2bWߥt,ha#B̄,U}*r>s8U.),F^!E;9+9G/PAq'L3z9QM/$ ܑkΟ92wzލ$8s}[DP! ǭ|*K_841!"j:B(G 0Р0B"0"! D(9p$ C2L~ aNɤ\79 rh:?ր$YrwPWi7g~8¤ Ѱm&) 2%ӄыZ eUİJ cn :Ql y͋TC ,c/3JiS7 4&\t*lgDTkUs~zK(*kqV:bpB:@A/Taij"Lk=%L.bZ7lrP2UZBm~Zt\⾤0uua}8b^G~,_΂#.PWJ(YnX/W@O^26#6tABe&DUmRCכ҇(éY+ѹBWr0 --iܞ/#rX H fs IvņF#wH k}9[Vn˙@uZ+3D$ r- 8pa f]C\sNӴa3"# ǧ\ qn;&PkA 5#)vX#nf>}9Ipg p?8>Eշ"8o @D^hѫ-`kd4vp^)^EQQJ蒠#Bn-jWbKKMKdo1vH'h3?/b|W)7[TB9~;yK1J3I/a|~H'h /_"_")r/[T:~;0ՔH䫖r|6!ʵG(ȿ^",RV(jt1q(c|@fV|pfUΑzP>MEt3d7ڄr gS\B?GL7!" ᦌzQlm"[u~1R -ÍMp(sS)0̀[RlbLqcD!;yg^F]="k=|6(8{,ۂ2M#&;v85uXI:֫76_$Qօ@00F\n:JjqU`ZX.KK0Dl,lj3[@5u4T2*fCڎVeP/& [(w~H\XTVqDg'xfɉԎJBE:?duZ8*JV! %,^ްOAi+?]8((WRu[ Ndz1Ƽq]:8E"zv!a/?xZU-S*lAST=8폰F鍏}~E cVQThE9oi2^7EL|P@"w3˔/[B'JL3ԎIyPah%( ޱcxά^ۈR phd6] Kr0uy=wy  0Rew VW0uacEFSVRefq <}g7 $Md<{f 1R; [{hɑqL.,r39΁d-BO>m@%!j9\_]\8@E>ѺUd7LͰ>냛$,x =,ݤ [#YDS43ֵ嫳{L9` ,8}eΰ5KrE9=h\0 ##41JU3 'ϺʎnT#$)w9.q9B'esrt&+W $],$_rE 4!Qe^BBm`CdHʌL){SΡQqrȎr7v7#ث.^(V|b%,HR$1 ƊHk4HM '" TNk$L( QSǻjn&SY Ph[<ĵq+jTol 8H5C/ȯ)уH$f-M'0QDJ8-|sȥqKP`(fxOR4yP   P0 A*a#pe A"!rJb4H \X(BUgF"'h)fXRD{$ҲAFLjC^)BnF ۷PZ&ʰ cqb[!JWeI]$BTGMa.R ύPEHQfNt< h/+_ա&z,NLJTəE%BCJ I9"2tFmK]oRr ՅdWx"XGXi1y,TeGdB-BmtL$)\B1.n${;A1q'?eŮo>/vhq%*3rtW[Qb#o\֪Q/PXb!#3 auQtq \s::t#','kw)h҇BӦBag!ڿVaR"iT3e'(H-pnČm"WqCJbuf&迺zԈ լ&sYPfwKG,kC 6|A`Br$`@ꆟlK^\,ΦyMϜkA YFtNu]h_`b!3dFNaK(t+"*$8f&Haʩ[UP/w"'/ 3_ZBI613p{sLϑ y&*C;W3K52\KT"/mK v1jYfNڼː򵶉Xc 3UN$EESVL,%b. V!W\$L*PjB32ٛBSBДLIɑ>>7n@R"IwxTCEhA&Jq |Q4Ъ.7J2a؃8.FS܈D[Q/FBMKLmSE6.s9kg) 0-2gD]Hd~a\IReO3H͕5!:9̥Yb"OEe86bC⻣h.&nC !W?z]jU؊VhriBpSTp$L塗S+FLSTn*XLi+1GHf\$<[ncVYoJ5:R#B=AVn5E'O-~faVEMdIdgi3ɪ.R:!t"&l%EiNbNJ`/4ӻ7Bj".L jQ#/+jfuq'TĚ9L|IC7PT1_*>UK8R5;>"{>r0 ;bT*JDMHT*.}$F ,B6X^ ESHu "zıoUϏcy+hk3U}s $dQ]U YW)#ZZ̅;ccTdfVIrOBj>D`I-  ``@p@00@ˊXe)PD)LVfuJ njtOz@=o?>S1|a{ k]W < pQ%a&E086 9CzńlX s]Р/ǺңOoy3ta6rI7` C_%y˗V1!ǣ O{"3.ѵ+:B)DﯷէRuˁ` =hLCKɩmtg9dܹ!:Jof1 7l<}db w1%BOA/ ej# HD>!9q;LpVPM0gKmB.u}i'h>ŸZ9[Uၮ]0!Aϩ,63*4$f]Y l97y0zn̩Fosi .(/0@Q$ }e cr$bb{oZSގz{ƌèO/(e8r4^ yyxKI/t^JO>+4*ԴrPE3Q:Uh?e%NMLZ7<ۜ'e[Ro*忴bdakT3&!F*;9Ar `t\^}VVezphւ”؆;M(iaT-u6B=nLK3PgTnyگ1"U Hdk6xPX䗹~T-W@:Ȭ񁈶a%Bt': >]ALy?񬇢ENyGSU6AhϑUr'.8SN_7UuAeG=Vɾ*̧nx=Π +|v7EϴPLU D7#olF^pXcё|™88F/tb#O2CRD9W_,3XC8CW·Kh3ؓ M&Vrj%g0S'h&H©Ri`7q%B}:c{GA36'U13\W#t=fe&ǷBe&8=hI,Orh[mPzpZQdD2[NK۷~%~OgM#)&-y] *}X1GFuv jN4B-zhm͟ R >#$? q',%ZAב2ɾӇ[QⴭSmQ݇x`ԙ,*~=6og'Jg&\A2.0ʻj'idmlAO@N#dQeu/Uq!RpK LL:g |r[9+StW}]~ʎӐڡRώ(4j9|38g ]8hSgM)ҿYuC[n wX '[HU:SK>E{U̔SH[TQHl[Mv/;H%΄QM%g2.r`ݥϬd;Kgnz}wɫmb F7/g✝Fȟ'y?$ ܓM`H/@(  4q&=hT UuPjFB%f:s+Iy~(T0ǦDC&⨮NQ%G|3 b~1ښpT^ƆS*e>EqaPFoL0`,cCp>41. #n5"_nh-_+[޸;BL!DOҴLZ(DxǗW0}hzFS2PeJ̫0 AQS /$:]Ҍ xTNq.9Tsyfp^y.c9D*qgoՑJtݠu0z{o~@e GK=%r}0$Kgz+Haw/'!y]a Rȗe g2VxW7AAcPE #Bٰό&4GhdR $(QTvrlv[ ZXABiUKu̮o$a^S!GK % [P-ud0j8A;5 rH^wADJ1 Nsvx3C?8Iq3IH3NmOC(gHo^A5?kxQG:(pMh֥xS 1Cp"H(@xoὓ]oӠHq,֧Xʶ(QiI{%L)| F'HV< vğF+uGVU?N [D [JLi@˘dr'c$TR\r O<6;.xi >I &V_2>fn4cEch k*#DO1^VESFNzXyUp-6F"wWlV(]Y]ͺ(z}#KO8vHIV:qϤp"Jqy&J /*|QYq`r|Z6VTA.?'$ @#s8koBsϐްopbדy05B,0BU}/sJlH^;p-PtOzcÄ́:=y]l1]3NwVHp[8J^,3A60iR/,!ٿ$ a1B:K3$I?d{@; "EAڕ31ղuFdmZt=/;ilZܔ`a[| WRv-{Q{pl<<[E4Lox,# 3Gܫ)=iǙ+Uw.#U I@[P,yfq̐G*@.$ԥ~N߰@;NC M.R0!ȵK议^=٫֛Lie+xqHlB_7W*v$ǖJtU5-<2CPje_gZ[JxMoRE/qLIlj=N{{5oD><j`JJA|):gt/U6^ qtH6څy9IXHLHF|0 8v؅(|A;(l"0?[T'Rbm?̒PyԂ4f`~``bbByUS%:0sy&aEg-D2!1۴u5vi2w8sx\w?BiEY6 FUXQHX4Chp1*g=tFZ+*ELU8I@" vWV3'-N;:x'=@#̸p#2 19McpbԒ5n|6ՊqB$ pW6MUү|/. уF ,qY>F7(Rb*Ř JJK>5%jzP O>!-͛+W[[^CAAN"t&ʨZMߎ~&G+ ̣zV+s+|=9iv~#`㰣^䡦-t~?MgapCS/сcy/Jr V\u0.Y@:I'#m~}rIFEsG$+E'B%-y._=HOJ`sCc]EmL&ūdLCrKȣ͜"<|bW6hS7Vሬ&*GH-&Js|NM'=J2GN l0Tq/nSb(k=*2Xp_jHf̶;TUGҨ)EV.]fCL uy ,Z^g8v!Ջ (X$.GQP}m(mN$e7cfH U% )YGp#w9'ur]qb? Ճ:bɢT8Up?%BhQJsQLAIP=ۚ(eo3 E}+cFnoWg6 QV{HT3U}ͅ%GPXp-zCxc57QBh6=mjH>"R[NyR;ËEڋ(ʾyH6Lq4ucÎlh։-,קbz\M,nj*h>q>ʹGbdUyTm?,r0TY7k)ET}Ë 5^:V8K+`/D)SN hU%CʇF}zh=.Qd/ۀU{:^FGgՈSXhL[[2ϣc.]9Qqa-ip^ a/ש^ج}nynTZf|+TB:$,dpؒ#vҠQt%UW'vIR TDnIOl@ P6Ws2ox.̃4L!M}/nA~:i?;//]0ZD(?O\}x4F Lm:9{ByDPR`ox={wquanF]3ͮ+5B@)η fxκ  ҟT.1#[sJgo)G}N:TӮ0hP\*Px_TA5A@,h] t Q4%8A۰/K@x9)~( |_h 22(G KQc#+7_xN&Ggg-$EƧÄj #BsMk!+4[5h`x̄?ܦV~IP ֠gԆ`&3$<#`i6,_l[J|b`XxrcL|*]Y!0!x:ZS- ][aчAk$"وƙ@HgkeRU6ܮl;Ňg+6K5-~m²#[+R]T21v|Q&% j$1iMK9x0@'&a<5Aҝ7='͒Zu&NKo#!јkFR (T|R\R8'.葞#蹥koDpZ}"e _"˳KQ ySTWoRp̘V\H].3{qMcL,gfZ h6x"T QR):Ds##:GExU|gfꛟaM[0K_sըciHG'EG_53Tn99IzT_sDJ/(!zV1syPH F ]W.RڭI;LЅu{=1O?cWX]yxML )}#AQ'0,.$ >qspk0b^ !A+98a5:9z$P R N2љ0=ȇ%.Fx!(ڙYqb}aF JC-O2C9+|JϱLTw)z2"0+pkwу@2ض(`rv*-3F %Ġ8"8UyʤL& bn'ކ;c'a96GjzjjJ?CeP,''u'd1Lb<:bina3uvTSQ; ZU#ryovH$p֣Qns$Ǜe]8J1YWqex^61P pMOCn [j5Uj 0JZe B%W)ă\id~hl=uN=w[v[[>ͧڦY.UPB2HD @H:'%+V6^RfL%/F qKAyx !@" P  p0H0$&B2.~Dbg:oVgαS){9.4f;v9i$ ݊:sH S< Sq w9MqwMSqH$=ϕP" DBB&f/)GđFrf`lۼus4-]#˅4]~uUc|FлG~.`wI#;FҀŕiD2qhL]N#0w9 . ]N#i$w9.4oӈ*N#q4 wi$.64eJ4w9k$i pbMTNմH E A +Ar7O glp94V͋^HZmVp[0"3bVViV[ZqFVu-z] q`r}eGi3̨_G9g,9( yzei+(7z-^*m+v8" D#im, b~»%QI2ƈ0f#7LDI.ȹrϸ,JrM Mx'1oqDMHyz$Ѯ]Kp[ ;q-?M@VRh`GI3eKzƒ2lXAWƓQBHgTuRQ ҟ~!Lۗ23%}-?q*Unk$L fn9 (؆4/I*-8gi@?ْ`(1']5H2$O -?[4FRi$`r F"4xw!߯wmnv$w90!\N#!i4@lffB\N#rH@4w9<"%ieTл5F[iQ̼qht"4$`8UB!E>A2 U)MmuL$тclZվK1m0o|W?k@?|XҶ` 3gx.=*I TM:)0N6'vk]mtI 'wezONK2T![GF Mh|2|k|v {3Byk(Jz#f3d&ĝzU>.Ή%Xr%eﴌ)=-dP"E-*jjd2TbHF:ݱ@%>HK u" 17J8%pSy吁O-۞_[P/Lim 2<50GJB5NȲ2CcF>A!w#L(e2sm>99-MX 6|J]3ɻ!:mzhC##= {!^F^,K@J{ žԞ# `O݃fѦ=Y vyfvpQt!O|wbBG)~q(#,RO=o K5'd  O̎BMٔ9=RXYTI|{K4XN1KT{˲OEó?|2*U>G }" L+;BBԝ (tuuEjO#K:ڟ2c>P(7Xz|Fq;ܧa£RGSs _Lxԝ9!V-kA&pͮg:.e 0]٠%h$Q 4kw_Y١N̬jjǎ9r\gf6v]WNĥ2~[uhNLE#"""b.JpD E^(p8QFep9sbZjժ\VZjժUVkժUV:QG]T( BPI*he8yB$eYeYYeYeYeieYeY${|@Q Jl6fpl6flf6Ƴ((* d ϱw gncwڵk]vڵk׮]v۵kW '$Irq["C Q }p89cc9c9c9c9c9o߼'M4M$:b E"Z6-E~뺮뺮ku]u]u]׬뺮UקM-߶m̜5ly_.x````60r>~Tڐm[E%c1p.Vfh"H$D"LSm۶#kB!BP( "i@ǰob F`0}y{߿߿)߯/_?E%r/[VktR8?p+\.rG.r\.7r\>r>~T$ XlZ&˪&01?O>O>O>O>O>(_ӨҀ~ҞL"$  B B`4)%U^],S{\ /o}zV0Jy\eo}4{=N)EKm//ExߡeakJC]#!Ul5^ Q;Ap!y#Dxu\ vAo » pZp(ڡqiwU@(Z[?= ۢ=G`ҏ,%՝'M$HnKGB |_rYep{ ,cf10 i#ZK/f1-x 󈪠I=YBfLNJvln}H x&#@Qꢂ4IЗ]=1 ~ޕ [KEacu:T;d9Fnt8kT_M1G@4dS˴,t XBv> B Ɇ TA÷{ (5yΖ}8jd`~Bf*1A3edm##=fx21rK>{@L홽 .Qਟ]Fm9ZW/MgM PX1] `0AP69cbʂf@&Hs=Y2a&-&]%3B&8'Rr`{-,l-b>7>cYdD F6DC\'UCk&?1ul231/ \ە\\œ\9\e$Wkȕޑ Ő #?E~[E~Z";Kl$bX]ZgK b&HfrrSX훞6TE]y'.N4^T%1Y9?^Wzi)Q]IFNi zիW^W^zիW^ZիW^UPGSdĄ)?1d($rlL,]#Q-KjɚZ%KBuԩSN:u"թSN:uԩSNԩSO4MD-Y6Cpv8+،y/Al۶ٶm۶mf88J۶mK A $H AFh4Ogi&EVY'!b:;vرcǎvرcǎ;vرc;vXcUVf&KXè*ՠ>B'X8뺮z]u]u]_uG, BpAh? A"Fw=)%K*0'Kѣ1A b 111A b 1A A bH1l`P'\0?Mt=.B. M7tM74.zu}."C#"}}5}wP An{KAP9Qf  %PB %PB&PB %PB %PBIL(JDBI<ȃ4 хoV S}SO-m۶m۶Ŷm۶m۶mb۶mSv]u:ӥEavav;;;+avx/;u_ϳYw|'>";GyGy'#<#<#<# }<<=<<<<}I 1?NC m@@ @ L"hzDY쌸 2P#MX y"Ή>APR gSU>2VF,ì`˥1 5o8]4P~31Z+5\q ͅ=D*\=Cs%E=E4z:: i'/Z1iW]|БVPR.Ė/SKCJ(o!T/ as38%
2rr\7zzpe+զ-pnދV +ϫN ԟ Y, E煄!{^,E5dRam s|8/ >|Oe.K" aޯdUwy.?/Ūaio\X0,=ԇrTڧ*).:N26@::WO f="@kHQC`?:C>LoJp%><=B|2|4nZFC/5 Q^ZKኽWTAǺs G0XbPz|t܀$EzhsGdLbu:t2|9(@k*ib *QRib1;ʜoc`[LROF<\fθSSB)z=7i )΃^D|s94` JmwW ? ¦.J#*V xgbb*RY% QCMtdzH{< '6&Bp@޺S(,:C/m#ɘ؟^>q#X|\|M} O_s?SmnLڬSeySt2pR\VQ Dd7V3OMLAư;h~{{{{{{{{{{{{{{l4-hsdh`ф.چ("Z@: L---------=ZZZZZZZZZZZZZZZZZ: ͞9dAbX,&Zk6C/fEcu SS3ƹ7g52'?O?~ӟ?Ox탾. @"""<<*L0 `۶$DDdZk$ITMWP;AS T`jj Ι 0;1K((@նҾJ@m ]Ar{r>hQ,laS*2Ʒo\rSs&T ظ1,W-5 ]@ȶ%53xF{ʕK4k;7^P/#aqzRC{͏zK="ۀH!y]q#,vyj}xF9bd#)HZԇ"Qd{ 7]kWxBSgsauw~ʥA]xX&BAK })A#K_G0aSs`㏓+?le֋

    TUXK5N!|1 zS%[(d:Y L13`-/jwJrhgdh4FDħL2$(AT||OI>'$fOI>'I>e)~L||>|>k+۶mUߨ([>1OH"$ Ao @QRA'mS0;ae<ױepJg'z X'BYJpCaSۭT s:;'v z~,/í*Ƃ̗z׻q rSYfpm oQgf/QchЌ$egpo8 G`nqYQ_' (9ppoFpW~ʙ p%|*D؋ J'_f  2ǁ1Uĕ}C6S|,W՘Xp1a?VBc|=4߁B~ۿj1o'2ָ{CL5SFю'\ +ufsa)JNèRDPG oHSf/tadD3HZZdg^ѱznʢ*ImK)lW{xxʈբZ 2]Iif60SQ=ǝBx~8 `B&(}M@ϭD?)Sfb&H')Dة xU\}6xy~ӼoNVpSfl\č0&c`ar8opǙqRițk-UW\Cz6 U7nnPW_ y0G:/&2@P).!&M<cqW $92U;8EAf.&!.fP+0Yd&QcPdn1o!Ք%qwID;*f_ Y䵔`}嘷bnӒ#"`57 iYݔ'?XD8(l`ɌlcuɪDG7G@O!"SЁ*ΪRAS #~0%cP>Z渷 (U BmDY0DZoPd:x:m1FpW +"TQS@d#x C͐V&<4P*Ґ NCvN62IAS#VhsH[$C3nNDsd/G<9[GSw[A&AW],/r3^ ?&ɭ"x8$ϾptO9O|'jz#ҫI?=ZOŤ8Ww0.>_aІ  caN  īxہ^ՓoX qp\[2n^D{HgW"$t{=1~Xk#&xfB/?f::}@' x:ǵ9lNcHˋſ2WMu9) C2V6ח@ Múam.wc`";;@$ )#n!ԓi!ǝ!Yd$GQz|h~!yR9fh2T7, m .;rɅ*M8L`SXmak0X `X:m. BBsV\̛kdfy";LGLaJT$˩T09LM_L a6Xn(sOBit%nA4 t;'4nA" O!@!ATT8n8W7֍(lѷ<ZguLh:uwtuiaIh@ 霆`6TBccq~a?EmuuV5N#9o{]}NEq ?IVh,@(gEz:θgM6 r9y$A!<2pmRu;8yԱ%Q͑D9dWб;t@ͥ训 Xt@'3b\\ei$- mjȱEčCi|PO,K :JcAAVKYV&q *rVsKOV8BK_S$mSY3d}ges jOܧ(յ9{&;I̊F"^HG@@+,tJohqG9=!,i=hA~ e6Hj5WmCy'2Q/T[ ~I_~潐ZZ֣-͍llÎZ2#<3э  m\SɰfkBë/^,< Hw?[=9  YG TFmЛq\?i#Nqdi#EVFn\9+QϛLX}}(qIԦ.vF P0 6 8+,,*QAU|O݂1kk8ǃx-d]&R3<ѻQ0d`D_Lʥ&$ Ñ(BJ!XY!aȫh\V6nLP@ mJ=#*G.i|:!r p_myowխ [k C"rXج1#FE03Bú' [8qQGdttl/(TeCzP&ڇG:@9+LGzxШ#{H3QǠ4MqMyA {耚[C Mѱp۞=:~f;/~=^k =iyHч[}&(az21z~:8=YxGALt pt1gyJϿHGFFWCD:rB;7W|C=_ć[9C>劮Ǵi"yTqc]XzcoSC-j:ts.; FF{xԁQzݦ **HϚ1ѡTt(3vqA|+tD7y]!.B0 r,*.{vl #[*Lb3b'PwrCmUQ)J! ' V^fyh,'( $$XFc0.U9 Vhϋ|1h@Ŷ Vy-YQ&fMFL( "J&T @w(?!@ !@FUuIEo ;+)64 s@PXQ'Ԏz5BWH B>ThhԨ*I)y0H~V[Exf˴KקD@JtU~py6l^EM̪ 4tx|NMVLZm@}4)^ 5D)ҫk sήB%"xKH-s]0P=%c Zo&R[&KJ7o{eE[ya"seT)׋KBt(OFr;&Q#$hG9%3#!y eV4ӉG{=uPODa qYm!xֈ@2~'lƳЯHfR ʡt\P=}=b%2npzEN= ǝÐΦDwv%G=TǴzN'i܎:ѭ/g#-"t9&~*w9; !s %L7XilWNY 0Z> 6fHRTen݈bYc+V8 /hdKaY䙣z[ a慫:Mb3x!9|.Ȣ >Sj4 a9Ojc=y Tnc =ͼ IVV1@EńV<ڣΩ ~hqeG'&%&"-"BNJE".&GfBRmAQ =%@0@\r+[ǔDS V!R PKH*b.NihBh2Dh',A>",@_$+O1D54CÉL(ZA5@04衉BҕA# !%qKA-?hJoV&p Ea&.<߶L>m&0(@G@%ЦkЧ՞tÈ!lJj"x˘{FЇbXio :Z#, L=+ =\)w'ϦFEmiOQS_@m1*ڗ|[#i݁sD`|ɇ|~*A<ʎ !"By{(C3 Sb}bM%;K9@&VU C4֩(d5”3 4B'v88D{8UwnRR+c*{bQ[ΤO*L=\HPDl2㷴 ٯejVYwrg' U9з u> Ka百7XSqVokT[曛\* ltQEUϹ=R1!ܝ3;T)qT-* sՃgDR&rۻ"" # )T$!QxL`+3*~86Tx~ ~/QVXLi} ZgDSKe~ʼn/8xcr & 'R8<u x'""N#F;]5КAB3IkU`zXGE@9( (L(BR7ϊ-PAw!0",Wٔ1 yȢSQ2xBUZ-So4$Q2V12B UT2hv^3V,!#n5An#)~*$^x)gVb#?4,j|{Snߋ.vhfGU\.W-\ #u#mkM:ibF KoZHQ jI*DrRv@BzߺSXzJrJ@FyZw,j:)TdWrU6C14.U$6wtg V 3̖g?rQ DvsCiY!wB@f[[ {P׫SdCY) AJrt(s JBߐRoP"$<JD rϔhV8vtwG9̚8ȯ8<)D${wQ̞6tU즚4L 0ڜ0s ,H 4PUD{ ѽaT 1/ O?|ؼRGao<.~ 1mO87prͧmڨ|YUYd"{G 2W> !Byy0A aPҏ ACjS#L 6.`Ј6#`E2:->TѱN[iB14a ^%pNFH0a8LXc> $T(&Ċ$pT>$Eb0BH`00"hGԭ[(Kɔr$"$ĂNS2 B( 1#B B@˒MlU^wTϪӲ%h˙)yA0Jgy.s>Ua]p.DaFY?mla&_Hg+ͳ 1 Р^w*BڱL;ƀ jқL?TϼAḬ뿙G+NuulM=`Q=i8ꫂN)6Վeg)Z`AÎ 9ti?rPseIP3]5Ù Y&]UgV,{ړ i0]f!ֲ M`B6b٬eًXpmd8`6a%Ė^r=Xlz^X$;1X/z8I@bf/R`_) eWq%2 转/}Tf|9? -@B ߿8 J)섃j,` g3.D+\BI2M]X Xo9%g,zA;Fq%vSp^p`UyjoI">ӦU˂$]jb5H9G"u쓨k=.`v\&ªdH5_v/UED=)v[0\T5Qg.+&Y wh|pO`&QϷ^|tL-%M}xaY1鷮&K;@#U >&H'*dਿO:bFLxi3ؖƚF۶&'_]-KKE /Q4KYM~`ɑ9e}kfAyoՏy/(3%%cduUlk>=^×M-6&{>^ ۂ=, Ȭf@z{LX!JC-QbLZq?:*vczpLao/K/C=O5'>TR&5 ͮ9u3ΞV2f8:GsmGPK0En;gFrHx@גZIŐPu_˲Wiv%љkA[s&&`w!7#8quBK-^\z4P@Nx-Qd8'P&8*)),OV~O<#D攼)WVC U`{bVJb*+qr DR# 6r1*lEEKҬB>RTRsb;jUv4Vm%9 Ӓ*Ijgh)=]Gd| akJa;30lbkDZ6cLJ3 F~yCz6syܰːVC\B5+" X kg(:B9D:Q5ŽDy^ض `.s4+1>*3Y ^ kEzpEOkkt*I", 9]ronXCX+hJԎZT0REc5iF#):SYÄa8gAޒ| I CTu+4`֜fN <(b V*G\[p& ˆ+1 bga"iMgǞ-Z0eZ Al- *AvYmqPEkܨ,щbbj "0 GB D '0Š FSb0f[Hio>~9A%H>QANv2`̙AJ4X@%Ւ%)YYذ C]tӱF*hdzR5 17ֈ\S5(gfI@IA;Yձ`a/9/sES?lInNvA69S-{`+c~v 2 GZZ̓ ^:e=){\P6yeuuIlYLފ\\SzQ[\>yg nM亾WGBNj݅r:QQ/^vYR<\ըwQb(Yo&x~;am7ƹKa\%Kda+30jgK[F3ETEĢszgʌ?O]r*;uwjJxa&vK@R=O~|z l۰kU{T[*4Cd1X44Fu7_]˕RduE`nE Xk_w 0JlӀ6.cԋn˧@A> ߫j{5CXEJV&F';DWt5ABo9V̖!rFfMqed? UwC,9Fֹͱ͛Wl'O{n6ϖwM!4HU{ Ҥ)`"^ N!@Fcn%k*T<rk6vsxED3tŚٸ+g~+ 4VҲ~ETltx9ܷ27)gǨV}˯LfL5DYwәs\cd,Z}hy13Pdݍlŕ!#(ZJ!$:|wH+(迶ey Vll4v&ޘ5Y}p#-vsd@B6}~ ds >F.]asﺚvn̼={NTJgc>إh㗋ni 9S;!os,1Ç$ߕwj~2\[bubbO|FW %ʊ-U'CCՌ4. 3BbL*7v tpzC, D*WEUrB}W~y=\!naxtXB¥T9|hyQeIʥMYq|wqQ@#F!WSH3, vrh`mZ80B%{/DAr3?-fnNlwn?ך.;Tm$Ô xmSsQ4 zOXDC[Fg:-ӇPgLw3b6M}ԇO4}ژƋqhPZ rhh #iC.Tè=KFj(V6 Aɳ[ahICn!}1Gb<_oQŰ[XaoFBt39.67ScX;xYZE) + ]e )&QLJkWu&Æ+.j/~MFSGffea&,ÅYa2hªo(8hdC:C2bB "ִRV0@X3t& 0K"LbXL(r]ObJgoDneGiu?Cw<~Vw\AqT*<,O升'EL+#(o3:%~Zʄ-щ]j PAH/e âh }BuK B5_NrVH~>UAlu+ոp%O7}Ei2t)a"D?zT8CE-^^6Z"tA?jil2T\}xk@;@HIqY 0AC}%A/ͫ M X}o̡IAZ1P&Cza_IY>+ߜKRjN7-Cu>bh?d Ӌi^p爸"=]y϶*H)1Uu EDj[nA\ݴX,?y^qB\>ns<z3k~pu*@kkeYK z+/8iVve솕*͚ Jsjg;\9{Nm mKQcE9&,0-hnS]dej^hy z(Uz;X)ۍ@Iw5y1K6j^ Wn~+VăqɎ"}=\w" Vȁx1U ߝ w(l1w#x!܇uy|J]1Hds0NC\rhTu\Ē2="ЅcW@Pt MRP'lW i2+ΐa9T:c }-&h`p9+A1h3stƒ8fC 3n-?k3kW< ` ec"JGpfGkBչCΌ|YĀ ¿EW;5WwLmajhw;JG )R4Cf;qdgS!rj[ˮW܌Qs9˞(|/J2y3EVQoMpEqއs#Xe {KkK P.'TЎijy 3,*h}ʤ@Sb1!X(9;XLyA~2r kOTcMC;bMl1B%6ջ v69GaFaĵE^|'l X w[Ei#s-SY,fHV'FCȟfZQqQaWcv{M`^&= A3bYv%0Z\MĽ^ 3KhNJ#(m_&t#&g HsG&2) C.OFHZs:Mpٖ֜2.mB?'NXABA\X~ 6"\ӛ$Dc%N9REOq3 "ԋ30LeSd>C2y[fc Q_C$,H`f*`, }o ".DI#:adYBz&z|IBen{p$]̭ u M`0C]ʼ3EJ+`&e!!0:$ةKß>ks${ ņb+dLJ"|jM.v)r=$zI >' [%! H3( NzڄFPHj\'iĴ>H&`xh LخBtu? T{BW"= i(p BG@-@> '=3I "艟g? HPu B`ª=. 7尋/tk06yT06Fk^_Q-=+%`wFPk +$4>y^[ 5Y&$U6 `v6;'{YΫ'{˔wNǶE#[#~ԏ zz}ov8Dԥ Ƴ .5gXN ~_3'hm3^e 1@5s㪓*I|sQ8]0^~uAH]MDbc/j mchM`p n)-e@I.a$jU%l^I 󮺄 k" |h|3텡k ''CBAA#{V&&`?)@Fjnk26LQCV]D\\ѳK`ܖ 'dݭcޛM+4@ۡur)0}jU`퓧-oE.V~宄a>Pt&5 ?OkO "J#H6!Ғ[glBz gLkRF de8t\<=*f!3([P}3{,Tskڑ̼auXz+Nzz|OZX'-yXȩL-y{,ML=zf?iIBIc!IiL$z6dFVh$p 7Fשf4 oṚ7;G5<^(UtX[Nx&f' )/'dU^KF|vV㢦wW5:h (h4,à "P Tx Ȋ#qH\ʬѴLqd,ri/K1%RsNhbV "˫䜉F/:_uVh#l'Ȇ))Co0 +N^$Tۙ5~eYI'X$-dKJ816O=Vrk(F殘PeElF^Rc*\UUۜNL?~zFq_u[YT(JHq`Ck1@Dnh&Ie @0DA!H;b_*O 2Wu!w. MD 0/*EAc(@c.wYX q)#kRcgG"xc>i"'`˹^qDTI1|.B_v$i^V}nˑca362>-dc|VH ,B_76|UjGOX o;V a@%s£)B$"5*4 *HҮBK>.v%"[pژ;B8BFD-hW J:C$> +\T"gQxpMfBAH)zb,8/EZIb;mU\b*8USvA\,lBV]"S"<&d7@ 2|kGx9 J] U9u¹fZ$ { "!HS[. ج"Ab>Ȭ+SZL w t3?KXhȃuE왐iar>E&݊{Ex 1!J11El٤ Q mQ~DaCq\t ni-^Ōa<;0Q*:iaZ0C? udC=oĖ[&ԺCg2uib.VO86 qf ӵ s029hBgbfb&# ȼ2'3L>w$l$z KCSqiѩ)DYe0R(AdNNHIzXJQYDDYI:QcӘmd_POgw,9{a1#(jgl ʹEbQL(zԩԇUQLiFpq vl[DVU԰(By-TQ0aϼҹr2At˄5B1HrէGq4=&RU!82g -L/רݽCvNS`xX,!IpCk G6a}rq[Ѧ;X*F>D@a%b5AbJ#Uq|g8/[RdpL~zw/q}7;<p.|bfzF`119ʋ:P5C`2;2K\uMȷ̛FѵfPxg ʘnOT|}'I /J˺ _8f}QIGKm x2@nMÁdޙ59'O%[3_+HsдT~Hp8'#p&b~'XO{?Ku^8 **} ߞN $πSN`>z%B#i_mIC,UGAG"<qe#(*^mHKj]`%7+`$*X2>#xxzFxl َo\T=E: UtxSz yA-$AE"V(wm?AM!()WTp,0X\RµDBFJpZܾԤ̰ѢghK8*$G)IݍCmDqU0TkFb,-‡wr[IH QP$bY.H-<20g_b/m̥ Špp%l楚mJl"3 P\|QMn;'xv}>]ܝ ȫRϟӴhoىy_l߼<=@bcK?qb feiOwm8=xڭ l h(0}ǵ[پޝY[]}c} xաT(nQN4^>7 z⨭46'ŮShTa-Qef1FFZ\w 9FS7.u|./Ã䬏(Ye4 a66tC,8 K\jdd4}Z :sD̜G LK' =I5)"PcjkwֱG h[҇|v-Ͱ ՒQ+7ft}6AP\xT(hӧdC N*M]Qjה&Q1PR$5 ,:`PMcؓR!hlܘ?sn/ѯ37ެ\_9Dw3ewÍ +U^j;^ [(vOYBXovJhfRydƂEnmkr mj"ή\YÆ!_v- NJ*] RC`!'}}yTNPu*f? m6ArY31ZG-PTf'\,vTSn|TmO%N-] ؈bs-ݖ{~w$QDNp2s mч  }26ςcb|Ό䢥,=RZy$.G iA %i*em|Y+5v 7jhC<=r{҇g0R).(%H"C݅(jNSĴh">X~b L!QqkM\_7$+qa?~`U?a2_O[vxO0rh{ )VV%,?/̇3Î ZfJnhڬ$QW%'lM#-+j 1RDֵ KʅnԢJzQ"yiM_kQʹ6V*u2mCKUk }XVXXPWR9i&eHz؂HHC cd''#逄Gu"*vZ ֵ[U;Z;G[{]i\y\%ũM﯐=&uUݱ[bp"f"RZEI-В:k/f4@rq?2w*N,bFM9'&!+ձTtSR 7Z)ڢw+ljiQ lv@흵r͈:COVծֺvG޺Qg(jdS6 AR']4.+EzzxsCkl_(Ĵa*w@+rg!" ds2b%q!TB'( X2+oI:"POC!#<3a^$ 0xmۮ%M?o0zps$||¸ 3j62ă_$R˸ewHJ" 8ޘ)bc(Q* +oz忄? *:7ԉCKjti4P@'sD",XJuk ez#8`q~Nxiࡄi^ DܾvK$N [E-64|0HXŵ\19 ̹!w.nm(N.Mg\MߴF86Ib&LƕPԛKwXo1+)ɓqN cXҀlԞmH7Ǹ;F&1֑:K(*4jʫ+tqd@_9ЩK `S:9c ϐ709!VK4dm/Cq^XG]`C`DGOI;jኹ4N)w٭WJ@,p^$tDk\25%=ˌb+$[zU3C"4#絬 Gف9b܎;ΙI\GFJNܹ)U@AA|<;/EwEAacΪ#HjgDGfIPqURWJҜ9)Gp.=1${߇4AT(IEt8[3"{N9~`&hq2 -l#4 3+©9ti!nK kH_ D2􆕹lTsB;n#S<3P`B0BpCC9 u ButVm׵BuS lG/gN) bq[4#<3LmmŒހh#y"PP3$*:okC 4S"e=ф5xOĊs+z"tC퓤vZ 4Z$JDF:Pzᵜtյ-ZVC|gbdkc*B2 TW4+xHWQCڡ]yr,SRH: ӈ*?P=.#E[Xe@[>ek{*`|vyVu)DAA8cf+Wt4kQ`')3)3bV6](b &i3 !UL#+_Ks_6_"}?O7F6DG1HKd%9hBRR-q#w ObՋ c9dCd'! SU[Wĵ f舁K@&q!$] s<=OlLH4P-LH-&0\`BpV?@`&&ʮL|ЉCKAFBՎ"i7|z\xL׏Odt'A:H\E _BH#efm.GqѓH93 5JG -K]72)xĤ(s6A_hx-0.3.0+20250717/test-grammars/markdown-inline/src/scanner.c000066400000000000000000000370241503625671400233230ustar00rootroot00000000000000#include "tree_sitter/parser.h" #ifdef _MSC_VER #define UNUSED __pragma(warning(suppress : 4101)) #else #define UNUSED __attribute__((unused)) #endif // For explanation of the tokens see grammar.js typedef enum { ERROR, TRIGGER_ERROR, CODE_SPAN_START, CODE_SPAN_CLOSE, EMPHASIS_OPEN_STAR, EMPHASIS_OPEN_UNDERSCORE, EMPHASIS_CLOSE_STAR, EMPHASIS_CLOSE_UNDERSCORE, LAST_TOKEN_WHITESPACE, LAST_TOKEN_PUNCTUATION, STRIKETHROUGH_OPEN, STRIKETHROUGH_CLOSE, LATEX_SPAN_START, LATEX_SPAN_CLOSE, UNCLOSED_SPAN } TokenType; // Determines if a character is punctuation as defined by the markdown spec. static bool is_punctuation(char chr) { return (chr >= '!' && chr <= '/') || (chr >= ':' && chr <= '@') || (chr >= '[' && chr <= '`') || (chr >= '{' && chr <= '~'); } // State bitflags used with `Scanner.state` // TODO static UNUSED const uint8_t STATE_EMPHASIS_DELIMITER_MOD_3 = 0x3; // Current delimiter run is opening static const uint8_t STATE_EMPHASIS_DELIMITER_IS_OPEN = 0x1 << 2; // Convenience function to emit the error token. This is done to stop invalid // parse branches. Specifically: // 1. When encountering a newline after a line break that ended a paragraph, and // no new block // has been opened. // 2. When encountering a new block after a soft line break. // 3. When a `$._trigger_error` token is valid, which is used to stop parse // branches through // normal tree-sitter grammar rules. // // See also the `$._soft_line_break` and `$._paragraph_end_newline` tokens in // grammar.js static bool error(TSLexer *lexer) { lexer->result_symbol = ERROR; return true; } typedef struct { // Parser state flags uint8_t state; uint8_t code_span_delimiter_length; uint8_t latex_span_delimiter_length; // The number of characters remaining in the currrent emphasis delimiter // run. uint8_t num_emphasis_delimiters_left; } Scanner; // Write the whole state of a Scanner to a byte buffer static unsigned serialize(Scanner *s, char *buffer) { unsigned size = 0; buffer[size++] = (char)s->state; buffer[size++] = (char)s->code_span_delimiter_length; buffer[size++] = (char)s->latex_span_delimiter_length; buffer[size++] = (char)s->num_emphasis_delimiters_left; return size; } // Read the whole state of a Scanner from a byte buffer // `serizalize` and `deserialize` should be fully symmetric. static void deserialize(Scanner *s, const char *buffer, unsigned length) { s->state = 0; s->code_span_delimiter_length = 0; s->latex_span_delimiter_length = 0; s->num_emphasis_delimiters_left = 0; if (length > 0) { size_t size = 0; s->state = (uint8_t)buffer[size++]; s->code_span_delimiter_length = (uint8_t)buffer[size++]; s->latex_span_delimiter_length = (uint8_t)buffer[size++]; s->num_emphasis_delimiters_left = (uint8_t)buffer[size++]; } } static bool parse_leaf_delimiter(TSLexer *lexer, uint8_t *delimiter_length, const bool *valid_symbols, const char delimiter, const TokenType open_token, const TokenType close_token) { uint8_t level = 0; while (lexer->lookahead == delimiter) { lexer->advance(lexer, false); level++; } lexer->mark_end(lexer); if (level == *delimiter_length && valid_symbols[close_token]) { *delimiter_length = 0; lexer->result_symbol = close_token; return true; } if (valid_symbols[open_token]) { // Parse ahead to check if there is a closing delimiter size_t close_level = 0; while (!lexer->eof(lexer)) { if (lexer->lookahead == delimiter) { close_level++; } else { if (close_level == level) { // Found a matching delimiter break; } close_level = 0; } lexer->advance(lexer, false); } if (close_level == level) { *delimiter_length = level; lexer->result_symbol = open_token; return true; } if (valid_symbols[UNCLOSED_SPAN]) { lexer->result_symbol = UNCLOSED_SPAN; return true; } } return false; } static bool parse_backtick(Scanner *s, TSLexer *lexer, const bool *valid_symbols) { return parse_leaf_delimiter(lexer, &s->code_span_delimiter_length, valid_symbols, '`', CODE_SPAN_START, CODE_SPAN_CLOSE); } static bool parse_dollar(Scanner *s, TSLexer *lexer, const bool *valid_symbols) { return parse_leaf_delimiter(lexer, &s->latex_span_delimiter_length, valid_symbols, '$', LATEX_SPAN_START, LATEX_SPAN_CLOSE); } static bool parse_star(Scanner *s, TSLexer *lexer, const bool *valid_symbols) { lexer->advance(lexer, false); // If `num_emphasis_delimiters_left` is not zero then we already decided // that this should be part of an emphasis delimiter run, so interpret it as // such. if (s->num_emphasis_delimiters_left > 0) { // The `STATE_EMPHASIS_DELIMITER_IS_OPEN` state flag tells us wether it // should be open or close. if ((s->state & STATE_EMPHASIS_DELIMITER_IS_OPEN) && valid_symbols[EMPHASIS_OPEN_STAR]) { s->state &= (~STATE_EMPHASIS_DELIMITER_IS_OPEN); lexer->result_symbol = EMPHASIS_OPEN_STAR; s->num_emphasis_delimiters_left--; return true; } if (valid_symbols[EMPHASIS_CLOSE_STAR]) { lexer->result_symbol = EMPHASIS_CLOSE_STAR; s->num_emphasis_delimiters_left--; return true; } } lexer->mark_end(lexer); // Otherwise count the number of stars uint8_t star_count = 1; while (lexer->lookahead == '*') { star_count++; lexer->advance(lexer, false); } bool line_end = lexer->lookahead == '\n' || lexer->lookahead == '\r' || lexer->eof(lexer); if (valid_symbols[EMPHASIS_OPEN_STAR] || valid_symbols[EMPHASIS_CLOSE_STAR]) { // The desicion made for the first star also counts for all the // following stars in the delimiter run. Rembemer how many there are. s->num_emphasis_delimiters_left = star_count - 1; // Look ahead to the next symbol (after the last star) to find out if it // is whitespace punctuation or other. bool next_symbol_whitespace = line_end || lexer->lookahead == ' ' || lexer->lookahead == '\t'; bool next_symbol_punctuation = is_punctuation((char)lexer->lookahead); // Information about the last token is in valid_symbols. See grammar.js // for these tokens for how this is done. if (valid_symbols[EMPHASIS_CLOSE_STAR] && !valid_symbols[LAST_TOKEN_WHITESPACE] && (!valid_symbols[LAST_TOKEN_PUNCTUATION] || next_symbol_punctuation || next_symbol_whitespace)) { // Closing delimiters take precedence s->state &= ~STATE_EMPHASIS_DELIMITER_IS_OPEN; lexer->result_symbol = EMPHASIS_CLOSE_STAR; return true; } if (!next_symbol_whitespace && (!next_symbol_punctuation || valid_symbols[LAST_TOKEN_PUNCTUATION] || valid_symbols[LAST_TOKEN_WHITESPACE])) { s->state |= STATE_EMPHASIS_DELIMITER_IS_OPEN; lexer->result_symbol = EMPHASIS_OPEN_STAR; return true; } } return false; } static bool parse_tilde(Scanner *s, TSLexer *lexer, const bool *valid_symbols) { lexer->advance(lexer, false); // If `num_emphasis_delimiters_left` is not zero then we already decided // that this should be part of an emphasis delimiter run, so interpret it as // such. if (s->num_emphasis_delimiters_left > 0) { // The `STATE_EMPHASIS_DELIMITER_IS_OPEN` state flag tells us wether it // should be open or close. if ((s->state & STATE_EMPHASIS_DELIMITER_IS_OPEN) && valid_symbols[STRIKETHROUGH_OPEN]) { s->state &= (~STATE_EMPHASIS_DELIMITER_IS_OPEN); lexer->result_symbol = STRIKETHROUGH_OPEN; s->num_emphasis_delimiters_left--; return true; } if (valid_symbols[STRIKETHROUGH_CLOSE]) { lexer->result_symbol = STRIKETHROUGH_CLOSE; s->num_emphasis_delimiters_left--; return true; } } lexer->mark_end(lexer); // Otherwise count the number of tildes uint8_t star_count = 1; while (lexer->lookahead == '~') { star_count++; lexer->advance(lexer, false); } bool line_end = lexer->lookahead == '\n' || lexer->lookahead == '\r' || lexer->eof(lexer); if (valid_symbols[STRIKETHROUGH_OPEN] || valid_symbols[STRIKETHROUGH_CLOSE]) { // The desicion made for the first star also counts for all the // following stars in the delimiter run. Rembemer how many there are. s->num_emphasis_delimiters_left = star_count - 1; // Look ahead to the next symbol (after the last star) to find out if it // is whitespace punctuation or other. bool next_symbol_whitespace = line_end || lexer->lookahead == ' ' || lexer->lookahead == '\t'; bool next_symbol_punctuation = is_punctuation((char)lexer->lookahead); // Information about the last token is in valid_symbols. See grammar.js // for these tokens for how this is done. if (valid_symbols[STRIKETHROUGH_CLOSE] && !valid_symbols[LAST_TOKEN_WHITESPACE] && (!valid_symbols[LAST_TOKEN_PUNCTUATION] || next_symbol_punctuation || next_symbol_whitespace)) { // Closing delimiters take precedence s->state &= ~STATE_EMPHASIS_DELIMITER_IS_OPEN; lexer->result_symbol = STRIKETHROUGH_CLOSE; return true; } if (!next_symbol_whitespace && (!next_symbol_punctuation || valid_symbols[LAST_TOKEN_PUNCTUATION] || valid_symbols[LAST_TOKEN_WHITESPACE])) { s->state |= STATE_EMPHASIS_DELIMITER_IS_OPEN; lexer->result_symbol = STRIKETHROUGH_OPEN; return true; } } return false; } static bool parse_underscore(Scanner *s, TSLexer *lexer, const bool *valid_symbols) { lexer->advance(lexer, false); // If `num_emphasis_delimiters_left` is not zero then we already decided // that this should be part of an emphasis delimiter run, so interpret it as // such. if (s->num_emphasis_delimiters_left > 0) { // The `STATE_EMPHASIS_DELIMITER_IS_OPEN` state flag tells us wether it // should be open or close. if ((s->state & STATE_EMPHASIS_DELIMITER_IS_OPEN) && valid_symbols[EMPHASIS_OPEN_UNDERSCORE]) { s->state &= (~STATE_EMPHASIS_DELIMITER_IS_OPEN); lexer->result_symbol = EMPHASIS_OPEN_UNDERSCORE; s->num_emphasis_delimiters_left--; return true; } if (valid_symbols[EMPHASIS_CLOSE_UNDERSCORE]) { lexer->result_symbol = EMPHASIS_CLOSE_UNDERSCORE; s->num_emphasis_delimiters_left--; return true; } } lexer->mark_end(lexer); // Otherwise count the number of stars uint8_t underscore_count = 1; while (lexer->lookahead == '_') { underscore_count++; lexer->advance(lexer, false); } bool line_end = lexer->lookahead == '\n' || lexer->lookahead == '\r' || lexer->eof(lexer); if (valid_symbols[EMPHASIS_OPEN_UNDERSCORE] || valid_symbols[EMPHASIS_CLOSE_UNDERSCORE]) { // The desicion made for the first underscore also counts for all the // following underscores in the delimiter run. Rembemer how many there are. s->num_emphasis_delimiters_left = underscore_count - 1; // Look ahead to the next symbol (after the last underscore) to find out if it // is whitespace punctuation or other. bool next_symbol_whitespace = line_end || lexer->lookahead == ' ' || lexer->lookahead == '\t'; bool next_symbol_punctuation = is_punctuation((char)lexer->lookahead); // Information about the last token is in valid_symbols. See grammar.js // for these tokens for how this is done. if (valid_symbols[EMPHASIS_CLOSE_UNDERSCORE] && !valid_symbols[LAST_TOKEN_WHITESPACE] && (!valid_symbols[LAST_TOKEN_PUNCTUATION] || next_symbol_punctuation || next_symbol_whitespace)) { // Closing delimiters take precedence s->state &= ~STATE_EMPHASIS_DELIMITER_IS_OPEN; lexer->result_symbol = EMPHASIS_CLOSE_UNDERSCORE; return true; } if (!next_symbol_whitespace && (!next_symbol_punctuation || valid_symbols[LAST_TOKEN_PUNCTUATION] || valid_symbols[LAST_TOKEN_WHITESPACE])) { s->state |= STATE_EMPHASIS_DELIMITER_IS_OPEN; lexer->result_symbol = EMPHASIS_OPEN_UNDERSCORE; return true; } } return false; } static bool scan(Scanner *s, TSLexer *lexer, const bool *valid_symbols) { // A normal tree-sitter rule decided that the current branch is invalid and // now "requests" an error to stop the branch if (valid_symbols[TRIGGER_ERROR]) { return error(lexer); } // Decide which tokens to consider based on the first non-whitespace // character switch (lexer->lookahead) { case '`': // A backtick could mark the beginning or ending of a code span or a // fenced code block. return parse_backtick(s, lexer, valid_symbols); case '$': return parse_dollar(s, lexer, valid_symbols); case '*': // A star could either mark the beginning or ending of emphasis, a // list item or thematic break. This code is similar to the code for // '_' and '+'. return parse_star(s, lexer, valid_symbols); case '_': return parse_underscore(s, lexer, valid_symbols); case '~': return parse_tilde(s, lexer, valid_symbols); } return false; } void *tree_sitter_markdown_inline_external_scanner_create() { Scanner *s = (Scanner *)malloc(sizeof(Scanner)); deserialize(s, NULL, 0); return s; } bool tree_sitter_markdown_inline_external_scanner_scan( void *payload, TSLexer *lexer, const bool *valid_symbols) { Scanner *scanner = (Scanner *)payload; return scan(scanner, lexer, valid_symbols); } unsigned tree_sitter_markdown_inline_external_scanner_serialize(void *payload, char *buffer) { Scanner *scanner = (Scanner *)payload; return serialize(scanner, buffer); } void tree_sitter_markdown_inline_external_scanner_deserialize(void *payload, char *buffer, unsigned length) { Scanner *scanner = (Scanner *)payload; deserialize(scanner, buffer, length); } void tree_sitter_markdown_inline_external_scanner_destroy(void *payload) { Scanner *scanner = (Scanner *)payload; free(scanner); } hx-0.3.0+20250717/test-grammars/markdown-inline/src/tree_sitter/000077500000000000000000000000001503625671400240515ustar00rootroot00000000000000hx-0.3.0+20250717/test-grammars/markdown-inline/src/tree_sitter/alloc.h000066400000000000000000000016761503625671400253260ustar00rootroot00000000000000#ifndef TREE_SITTER_ALLOC_H_ #define TREE_SITTER_ALLOC_H_ #ifdef __cplusplus extern "C" { #endif #include #include #include // Allow clients to override allocation functions #ifdef TREE_SITTER_REUSE_ALLOCATOR extern void *(*ts_current_malloc)(size_t); extern void *(*ts_current_calloc)(size_t, size_t); extern void *(*ts_current_realloc)(void *, size_t); extern void (*ts_current_free)(void *); #ifndef ts_malloc #define ts_malloc ts_current_malloc #endif #ifndef ts_calloc #define ts_calloc ts_current_calloc #endif #ifndef ts_realloc #define ts_realloc ts_current_realloc #endif #ifndef ts_free #define ts_free ts_current_free #endif #else #ifndef ts_malloc #define ts_malloc malloc #endif #ifndef ts_calloc #define ts_calloc calloc #endif #ifndef ts_realloc #define ts_realloc realloc #endif #ifndef ts_free #define ts_free free #endif #endif #ifdef __cplusplus } #endif #endif // TREE_SITTER_ALLOC_H_ hx-0.3.0+20250717/test-grammars/markdown-inline/src/tree_sitter/array.h000066400000000000000000000241711503625671400253450ustar00rootroot00000000000000#ifndef TREE_SITTER_ARRAY_H_ #define TREE_SITTER_ARRAY_H_ #ifdef __cplusplus extern "C" { #endif #include "./alloc.h" #include #include #include #include #include #ifdef _MSC_VER #pragma warning(disable : 4101) #elif defined(__GNUC__) || defined(__clang__) #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wunused-variable" #endif #define Array(T) \ struct { \ T *contents; \ uint32_t size; \ uint32_t capacity; \ } /// Initialize an array. #define array_init(self) \ ((self)->size = 0, (self)->capacity = 0, (self)->contents = NULL) /// Create an empty array. #define array_new() \ { NULL, 0, 0 } /// Get a pointer to the element at a given `index` in the array. #define array_get(self, _index) \ (assert((uint32_t)(_index) < (self)->size), &(self)->contents[_index]) /// Get a pointer to the first element in the array. #define array_front(self) array_get(self, 0) /// Get a pointer to the last element in the array. #define array_back(self) array_get(self, (self)->size - 1) /// Clear the array, setting its size to zero. Note that this does not free any /// memory allocated for the array's contents. #define array_clear(self) ((self)->size = 0) /// Reserve `new_capacity` elements of space in the array. If `new_capacity` is /// less than the array's current capacity, this function has no effect. #define array_reserve(self, new_capacity) \ _array__reserve((Array *)(self), array_elem_size(self), new_capacity) /// Free any memory allocated for this array. Note that this does not free any /// memory allocated for the array's contents. #define array_delete(self) _array__delete((Array *)(self)) /// Push a new `element` onto the end of the array. #define array_push(self, element) \ (_array__grow((Array *)(self), 1, array_elem_size(self)), \ (self)->contents[(self)->size++] = (element)) /// Increase the array's size by `count` elements. /// New elements are zero-initialized. #define array_grow_by(self, count) \ (_array__grow((Array *)(self), count, array_elem_size(self)), \ memset((self)->contents + (self)->size, 0, (count) * array_elem_size(self)), \ (self)->size += (count)) /// Append all elements from one array to the end of another. #define array_push_all(self, other) \ array_extend((self), (other)->size, (other)->contents) /// Append `count` elements to the end of the array, reading their values from the /// `contents` pointer. #define array_extend(self, count, contents) \ _array__splice( \ (Array *)(self), array_elem_size(self), (self)->size, \ 0, count, contents \ ) /// Remove `old_count` elements from the array starting at the given `index`. At /// the same index, insert `new_count` new elements, reading their values from the /// `new_contents` pointer. #define array_splice(self, _index, old_count, new_count, new_contents) \ _array__splice( \ (Array *)(self), array_elem_size(self), _index, \ old_count, new_count, new_contents \ ) /// Insert one `element` into the array at the given `index`. #define array_insert(self, _index, element) \ _array__splice((Array *)(self), array_elem_size(self), _index, 0, 1, &(element)) /// Remove one element from the array at the given `index`. #define array_erase(self, _index) \ _array__erase((Array *)(self), array_elem_size(self), _index) /// Pop the last element off the array, returning the element by value. #define array_pop(self) ((self)->contents[--(self)->size]) /// Assign the contents of one array to another, reallocating if necessary. #define array_assign(self, other) \ _array__assign((Array *)(self), (const Array *)(other), array_elem_size(self)) /// Swap one array with another #define array_swap(self, other) \ _array__swap((Array *)(self), (Array *)(other)) /// Get the size of the array contents #define array_elem_size(self) (sizeof *(self)->contents) /// Search a sorted array for a given `needle` value, using the given `compare` /// callback to determine the order. /// /// If an existing element is found to be equal to `needle`, then the `index` /// out-parameter is set to the existing value's index, and the `exists` /// out-parameter is set to true. Otherwise, `index` is set to an index where /// `needle` should be inserted in order to preserve the sorting, and `exists` /// is set to false. #define array_search_sorted_with(self, compare, needle, _index, _exists) \ _array__search_sorted(self, 0, compare, , needle, _index, _exists) /// Search a sorted array for a given `needle` value, using integer comparisons /// of a given struct field (specified with a leading dot) to determine the order. /// /// See also `array_search_sorted_with`. #define array_search_sorted_by(self, field, needle, _index, _exists) \ _array__search_sorted(self, 0, _compare_int, field, needle, _index, _exists) /// Insert a given `value` into a sorted array, using the given `compare` /// callback to determine the order. #define array_insert_sorted_with(self, compare, value) \ do { \ unsigned _index, _exists; \ array_search_sorted_with(self, compare, &(value), &_index, &_exists); \ if (!_exists) array_insert(self, _index, value); \ } while (0) /// Insert a given `value` into a sorted array, using integer comparisons of /// a given struct field (specified with a leading dot) to determine the order. /// /// See also `array_search_sorted_by`. #define array_insert_sorted_by(self, field, value) \ do { \ unsigned _index, _exists; \ array_search_sorted_by(self, field, (value) field, &_index, &_exists); \ if (!_exists) array_insert(self, _index, value); \ } while (0) // Private typedef Array(void) Array; /// This is not what you're looking for, see `array_delete`. static inline void _array__delete(Array *self) { if (self->contents) { ts_free(self->contents); self->contents = NULL; self->size = 0; self->capacity = 0; } } /// This is not what you're looking for, see `array_erase`. static inline void _array__erase(Array *self, size_t element_size, uint32_t index) { assert(index < self->size); char *contents = (char *)self->contents; memmove(contents + index * element_size, contents + (index + 1) * element_size, (self->size - index - 1) * element_size); self->size--; } /// This is not what you're looking for, see `array_reserve`. static inline void _array__reserve(Array *self, size_t element_size, uint32_t new_capacity) { if (new_capacity > self->capacity) { if (self->contents) { self->contents = ts_realloc(self->contents, new_capacity * element_size); } else { self->contents = ts_malloc(new_capacity * element_size); } self->capacity = new_capacity; } } /// This is not what you're looking for, see `array_assign`. static inline void _array__assign(Array *self, const Array *other, size_t element_size) { _array__reserve(self, element_size, other->size); self->size = other->size; memcpy(self->contents, other->contents, self->size * element_size); } /// This is not what you're looking for, see `array_swap`. static inline void _array__swap(Array *self, Array *other) { Array swap = *other; *other = *self; *self = swap; } /// This is not what you're looking for, see `array_push` or `array_grow_by`. static inline void _array__grow(Array *self, uint32_t count, size_t element_size) { uint32_t new_size = self->size + count; if (new_size > self->capacity) { uint32_t new_capacity = self->capacity * 2; if (new_capacity < 8) new_capacity = 8; if (new_capacity < new_size) new_capacity = new_size; _array__reserve(self, element_size, new_capacity); } } /// This is not what you're looking for, see `array_splice`. static inline void _array__splice(Array *self, size_t element_size, uint32_t index, uint32_t old_count, uint32_t new_count, const void *elements) { uint32_t new_size = self->size + new_count - old_count; uint32_t old_end = index + old_count; uint32_t new_end = index + new_count; assert(old_end <= self->size); _array__reserve(self, element_size, new_size); char *contents = (char *)self->contents; if (self->size > old_end) { memmove( contents + new_end * element_size, contents + old_end * element_size, (self->size - old_end) * element_size ); } if (new_count > 0) { if (elements) { memcpy( (contents + index * element_size), elements, new_count * element_size ); } else { memset( (contents + index * element_size), 0, new_count * element_size ); } } self->size += new_count - old_count; } /// A binary search routine, based on Rust's `std::slice::binary_search_by`. /// This is not what you're looking for, see `array_search_sorted_with` or `array_search_sorted_by`. #define _array__search_sorted(self, start, compare, suffix, needle, _index, _exists) \ do { \ *(_index) = start; \ *(_exists) = false; \ uint32_t size = (self)->size - *(_index); \ if (size == 0) break; \ int comparison; \ while (size > 1) { \ uint32_t half_size = size / 2; \ uint32_t mid_index = *(_index) + half_size; \ comparison = compare(&((self)->contents[mid_index] suffix), (needle)); \ if (comparison <= 0) *(_index) = mid_index; \ size -= half_size; \ } \ comparison = compare(&((self)->contents[*(_index)] suffix), (needle)); \ if (comparison == 0) *(_exists) = true; \ else if (comparison < 0) *(_index) += 1; \ } while (0) /// Helper macro for the `_sorted_by` routines below. This takes the left (existing) /// parameter by reference in order to work with the generic sorting function above. #define _compare_int(a, b) ((int)*(a) - (int)(b)) #ifdef _MSC_VER #pragma warning(default : 4101) #elif defined(__GNUC__) || defined(__clang__) #pragma GCC diagnostic pop #endif #ifdef __cplusplus } #endif #endif // TREE_SITTER_ARRAY_H_ hx-0.3.0+20250717/test-grammars/markdown-inline/src/tree_sitter/parser.h000066400000000000000000000126361503625671400255260ustar00rootroot00000000000000#ifndef TREE_SITTER_PARSER_H_ #define TREE_SITTER_PARSER_H_ #ifdef __cplusplus extern "C" { #endif #include #include #include #define ts_builtin_sym_error ((TSSymbol)-1) #define ts_builtin_sym_end 0 #define TREE_SITTER_SERIALIZATION_BUFFER_SIZE 1024 #ifndef TREE_SITTER_API_H_ typedef uint16_t TSStateId; typedef uint16_t TSSymbol; typedef uint16_t TSFieldId; typedef struct TSLanguage TSLanguage; #endif typedef struct { TSFieldId field_id; uint8_t child_index; bool inherited; } TSFieldMapEntry; typedef struct { uint16_t index; uint16_t length; } TSFieldMapSlice; typedef struct { bool visible; bool named; bool supertype; } TSSymbolMetadata; typedef struct TSLexer TSLexer; struct TSLexer { int32_t lookahead; TSSymbol result_symbol; void (*advance)(TSLexer *, bool); void (*mark_end)(TSLexer *); uint32_t (*get_column)(TSLexer *); bool (*is_at_included_range_start)(const TSLexer *); bool (*eof)(const TSLexer *); }; typedef enum { TSParseActionTypeShift, TSParseActionTypeReduce, TSParseActionTypeAccept, TSParseActionTypeRecover, } TSParseActionType; typedef union { struct { uint8_t type; TSStateId state; bool extra; bool repetition; } shift; struct { uint8_t type; uint8_t child_count; TSSymbol symbol; int16_t dynamic_precedence; uint16_t production_id; } reduce; uint8_t type; } TSParseAction; typedef struct { uint16_t lex_state; uint16_t external_lex_state; } TSLexMode; typedef union { TSParseAction action; struct { uint8_t count; bool reusable; } entry; } TSParseActionEntry; struct TSLanguage { uint32_t version; uint32_t symbol_count; uint32_t alias_count; uint32_t token_count; uint32_t external_token_count; uint32_t state_count; uint32_t large_state_count; uint32_t production_id_count; uint32_t field_count; uint16_t max_alias_sequence_length; const uint16_t *parse_table; const uint16_t *small_parse_table; const uint32_t *small_parse_table_map; const TSParseActionEntry *parse_actions; const char * const *symbol_names; const char * const *field_names; const TSFieldMapSlice *field_map_slices; const TSFieldMapEntry *field_map_entries; const TSSymbolMetadata *symbol_metadata; const TSSymbol *public_symbol_map; const uint16_t *alias_map; const TSSymbol *alias_sequences; const TSLexMode *lex_modes; bool (*lex_fn)(TSLexer *, TSStateId); bool (*keyword_lex_fn)(TSLexer *, TSStateId); TSSymbol keyword_capture_token; struct { const bool *states; const TSSymbol *symbol_map; void *(*create)(void); void (*destroy)(void *); bool (*scan)(void *, TSLexer *, const bool *symbol_whitelist); unsigned (*serialize)(void *, char *); void (*deserialize)(void *, const char *, unsigned); } external_scanner; const TSStateId *primary_state_ids; }; /* * Lexer Macros */ #ifdef _MSC_VER #define UNUSED __pragma(warning(suppress : 4101)) #else #define UNUSED __attribute__((unused)) #endif #define START_LEXER() \ bool result = false; \ bool skip = false; \ UNUSED \ bool eof = false; \ int32_t lookahead; \ goto start; \ next_state: \ lexer->advance(lexer, skip); \ start: \ skip = false; \ lookahead = lexer->lookahead; #define ADVANCE(state_value) \ { \ state = state_value; \ goto next_state; \ } #define SKIP(state_value) \ { \ skip = true; \ state = state_value; \ goto next_state; \ } #define ACCEPT_TOKEN(symbol_value) \ result = true; \ lexer->result_symbol = symbol_value; \ lexer->mark_end(lexer); #define END_STATE() return result; /* * Parse Table Macros */ #define SMALL_STATE(id) ((id) - LARGE_STATE_COUNT) #define STATE(id) id #define ACTIONS(id) id #define SHIFT(state_value) \ {{ \ .shift = { \ .type = TSParseActionTypeShift, \ .state = (state_value) \ } \ }} #define SHIFT_REPEAT(state_value) \ {{ \ .shift = { \ .type = TSParseActionTypeShift, \ .state = (state_value), \ .repetition = true \ } \ }} #define SHIFT_EXTRA() \ {{ \ .shift = { \ .type = TSParseActionTypeShift, \ .extra = true \ } \ }} #define REDUCE(symbol_val, child_count_val, ...) \ {{ \ .reduce = { \ .type = TSParseActionTypeReduce, \ .symbol = symbol_val, \ .child_count = child_count_val, \ __VA_ARGS__ \ }, \ }} #define RECOVER() \ {{ \ .type = TSParseActionTypeRecover \ }} #define ACCEPT_INPUT() \ {{ \ .type = TSParseActionTypeAccept \ }} #ifdef __cplusplus } #endif #endif // TREE_SITTER_PARSER_H_ hx-0.3.0+20250717/test-grammars/markdown/000077500000000000000000000000001503625671400174555ustar00rootroot00000000000000hx-0.3.0+20250717/test-grammars/markdown/LICENSE000066400000000000000000000020571503625671400204660ustar00rootroot00000000000000MIT License Copyright (c) 2021 Matthias Deiml Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. hx-0.3.0+20250717/test-grammars/markdown/highlights.scm000066400000000000000000000026311503625671400223150ustar00rootroot00000000000000 (setext_heading (paragraph) @markup.heading.1 (setext_h1_underline) @markup.heading.marker) (setext_heading (paragraph) @markup.heading.2 (setext_h2_underline) @markup.heading.marker) (atx_heading (atx_h1_marker) @markup.heading.marker) @markup.heading.1 (atx_heading (atx_h2_marker) @markup.heading.marker) @markup.heading.2 (atx_heading (atx_h3_marker) @markup.heading.marker) @markup.heading.3 (atx_heading (atx_h4_marker) @markup.heading.marker) @markup.heading.4 (atx_heading (atx_h5_marker) @markup.heading.marker) @markup.heading.5 (atx_heading (atx_h6_marker) @markup.heading.marker) @markup.heading.6 [ (indented_code_block) (fenced_code_block) ] @markup.raw.block (info_string) @label [ (fenced_code_block_delimiter) ] @punctuation.bracket [ (link_destination) ] @markup.link.url [ (link_label) ] @markup.link.label [ (list_marker_plus) (list_marker_minus) (list_marker_star) ] @markup.list.unnumbered [ (list_marker_dot) (list_marker_parenthesis) ] @markup.list.numbered (task_list_marker_checked) @markup.list.checked (task_list_marker_unchecked) @markup.list.unchecked (thematic_break) @punctuation.special [ (block_continuation) (block_quote_marker) ] @punctuation.special [ (backslash_escape) ] @string.escape (block_quote) @markup.quote (pipe_table_row "|" @punctuation.special) (pipe_table_header "|" @punctuation.special) (pipe_table_delimiter_row) @punctuation.special hx-0.3.0+20250717/test-grammars/markdown/injections.scm000066400000000000000000000016561503625671400223360ustar00rootroot00000000000000; From nvim-treesitter/nvim-treesitter (fenced_code_block (code_fence_content) @injection.shebang @injection.content (#set! injection.include-unnamed-children)) (fenced_code_block (info_string (language) @injection.language) (code_fence_content) @injection.content (#set! injection.include-unnamed-children)) ((html_block) @injection.content (#set! injection.language "html") (#set! injection.include-unnamed-children) (#set! injection.combined)) ((pipe_table_cell) @injection.content (#set! injection.language "markdown-inline") (#set! injection.include-unnamed-children)) ((minus_metadata) @injection.content (#set! injection.language "yaml") (#set! injection.include-unnamed-children)) ((plus_metadata) @injection.content (#set! injection.language "toml") (#set! injection.include-unnamed-children)) ((inline) @injection.content (#set! injection.language "markdown-inline") (#set! injection.include-unnamed-children)) hx-0.3.0+20250717/test-grammars/markdown/metadata.json000066400000000000000000000002531503625671400221300ustar00rootroot00000000000000{ "repo": "https://github.com/tree-sitter-grammars/tree-sitter-markdown", "rev": "62516e8c78380e3b51d5b55727995d2c511436d8", "license": "MIT", "compressed": true }hx-0.3.0+20250717/test-grammars/markdown/src/000077500000000000000000000000001503625671400202445ustar00rootroot00000000000000hx-0.3.0+20250717/test-grammars/markdown/src/grammar.json000066400000000000000000000247171503625671400226000ustar00rootroot00000000000000(/ntJ<,:qˆXRU,Y*&o8|Or]EQl`N<<3y&♚Q]pqy8ZGvx>$2c.I=J.\W:s$mHG]g6tb[x[3:vy{Jb~g1Gkd(11*,l2""""bj|Ts-gM9*'': cF=s91U=1;YMcnJRlr?[h]90``@ `0BUkF)r׈ihkHT &X4$K%.11auiܦQHH( PFNnToƵAMD:"4?\r5-oԸ0)!`UHs^\fF\6џtqV%rk;ؖ,'u>)bLc@P2&W*ǯR,U*+i)mKһHZ BLU*¸˴]Dxgm|Q,avl-,,jŌ}9fsܼ3Gl37xCtAm219L0a*JKAAA!5RQ"%J:YR EB!4q́=2jD44bqDfD}߉oud2K[S@&kybbb>~f77?|/.ݶ $?}ߍvWu8,Jξu}wΟ57.?_5v-vZkNXUw+jnAmp˲qgצQQNR)/}`+_mcn^%$2y[&\m,o2\x[&2eذ98So 4F52nI1ˑ4N '7 NA$t:79xUCStE M _r:!k`|dܫQR\\fddd`D  # *TV0.t/3P#>,jnvSMBe\@ vs c]U)ҽ\._ّ7n{k\.wφ6l-CmIJ_ZgRdPtWZ_ 16rɞh͆qI㻱1?Hѓ2fsegӭ;#yQ=ξ&Ï+)$M+ܢ﬒}ͺVY8{۬{FcS7  ( ((Fs8|ь73nFQk4WZ[m)i|*?ȻڹIR4X$MS=c+QD"%$5ROpQK{RDxވ~kp*r\"⛙%"\/*}Ei~;*r> g!L3 ]ۀ?XUX:1kr`o%߫nNkPe0$K>ε~=4ʣc=,FNGAd!ʘn"S[`=̀bkgࡵ.Ok#PZ#™|ە%mߋb$܈H_ʼnWH@X)l,[eq`'yN:%&_g@Z=滘h(!5W0'1N Yߓgڭ`{wqAm0׀DA-MD,鮔:15L)'1m8[=骫2I;d^\ɆSF2,*=m_}O!v ́;]0?>oJ9ezѵeM x;嬈Re?~?|UMy_i׌-јox*1 WـH ?L~xq]wJ`agӜ950C Su,^*6u$8a9Aj[K0(Z=9P\ i:Jhx!8_d'ˍM /fS Y+X7li=WL9 R h*ơF)ۏw&t)i2M1D )`B(l7I+OԾGmlx Cy#vq6?4Qx>x'Pu=a.Iɥ&+$?iiL7 Zorhy2e+C&̈́c+(,xʟ3.؛$ "Ө>pS?& Ǻ ?~xŸ0&ŎpS:$$ Gm0 /7^6fwARc MKfU7GWs!V(Vz/ౠOEi bmh5K.QRD<1`eIǁv{j\ BnQ4`/xn ծ +͟8AA=}׍҅\BF4 -jVѝqYjݠ.1H£+*gS^p4r=1|E<ϯ0i"{J/rơtJIde"!qaX>{ ߀2~TdG(E*N|_ZI!G9aƎ'NqLa6錕Ҳ,M "pٙs--m PU@4VMS#h=dء!djTAXėt)>2W QN~)j ًb Y, OE(Y-޴jJHKl%>HW^U|(1),-Jw )&#LO ז["i Fwz2̈u ~ի!!95jάڏM1Ksg㮫R}Z^zT @t#I岀E BB"XuRp @zVd5;4Mt)^5 !  }UZZ觚֠Ƕ^χĽ"(K lI^ ǫcq p~Sٯ*,;~Q- H9]1A:7YX5ya#֚ A}LH-gkvnoRfXx% ",Ie+S}Y`ᛘjrk`g5+go bor?P諣 *}߉D'ŭT(ɋjQ5"Fvs[Wr'2A0aQO!UV y9߰N3#-jgME~rKSPTnCf,6粂\*U2\ɉYW"MAl*n̏;J~A]ソDp J ^Uy /> ZtNaO. eG="B}|823@f4΁LiN Snl|1(1QhIN)wE gh'5Gs?,K^`f=-0 4a䦟~ΘoDNRYܡ;E0K@+q&psb,](iJpjҦ2XNtVdX,_2a0@"d"<GQ8%<.r0 ^c7>Fgdny ['0zo@ڮ x0?o\~d""蠠AĂ^UĬ>?h;DCaA:%F + +AلSìL`1.!4lo8+UQa (;o\Y١P-2 M~FA,)j[ʮGUG5lI9RM r)l.sVRo̸Ɏ?e-r5GW?<,.ע'\a;1tc[uϿnuwnKҋQ*Rt4 > ?؃i2^UffSJ "ȘtB*̦ӯVïےs.*'  xjHd2p߄#| AdCB" +&1x@rgU(jizTA&I^uRk)G.z2Mb% A9UϨ\WIQ bLBB" H $I:.|\AF-Aߠk2F HWAvp#3-6(oJGIq@_ [1&CtuUd V\ mwsէ>4=D`:"r =;Q:Kh :v:to"h }Z[ C-Д=(L\sI Qte[,( <$Ȯh&Q8#kޘv GܪːS%(x H.0<[X`hH{3C'+"ZReI$?aR{ SPjdđ=Xc{84h1.쫽jgq)ҾqZwh >$tW 6|#yF Pd5ގmc$%@S? L2\ ܃g|C,[tm&|eVfyoT˿Bf !(>f|Rj 6?whhMExijRAnt9[F6e EҘՆĂ7,00<@FfxSp}Ѵ0;Yf=[爁\" EA]mq&1ہ}?r5qp|TuFCV@.q4 xL,icb\dJQA ބB =yc"80cǕt-GŴAޑ,h'2Z*f ZD0C&b;I <@ϾDnշ((ɵM*C^͋"\p@"kΩWqϲ᭟8gF%﯈'Lxl^SEJ|ɉZ]LlVhAwjaږ*bWlW>{/a EWQU5j(!I^ǝ<.ή]ƀ:oXAG  ZATB_Сd \'S J1sMSii$T 3#ñPT ā" inA\'qᷟ^}͟v&s|mS9P(Ŝp~۳#ُi$T 3#"BQ%C" ib(Z ۰ozg8zb}OVzY-Ǔ R6M2 jۦeuazivYrGӷ=n=ϱ=/٧O?~:e8''#2.hleqUu]ƦeehS5tΌ"cC" ieϨh1AQPb AA8DAH!0U:|MRŇf'2rgdsTo[%u+`p< OX%~h'su~?r3 ɕgȅa18,{/[ŠM-JWg?Hx~OX%xCEN2헋iGlԛu=}?:wy5 *x|| uO>,;촄TX 0~ z0p( m CJ/RP4LDHw@~сz Sa]RXw>*=lW9C^$rI{MO;iI88 [sߔX&VCHƚp%hᶸ:6!7yVci&?A͆FE&ytLeF[ `  JѹczSJ<1YUyֵ6 ư]<6#\C!828Dq@jb5@^KCj gro ڑܸǿ1Rl{s"e81D x;lFWØa,,]STC#a\u/Vu pUd]4bސNXYCp'# QDE^bo'GKOGddCȏd"}r7 $LD@/Xda!ËB`'X$IZK㽺RQrC8ZL8AsKOr-r=>'#"N:ƂP~ t^Ơ2mCf/ 8`ʘ>vE^!JNJuj_WxwTzHR(6l+@O(fA&BD9Q|((32HHS)(g sf.'35E!d(M\-++:UznZP} &S =h$MR66E*E3SǑdryd▄ehT"c1?aa$3ϐ3g !c&Pb,1~c M *ǚb.+T90'B6@8zzW9t4T_V?<ɔ21wAv1m%~07Cb­0D>FWzGt0tO}]ױ00 }.RU{VEMR|*j".}G2vDy{ oaqa4̢$a NY7B[n&ŋ=B}>_{|r62IRX"BJ)w|PhXaMX+g 1/Ю췡x{ yr\<foYwJ1pYvx8$1Cq *"%z%IMMdExZՂEBΤ$(NC';QFOsq+~^Ɩ%Rb|/GR0a_)/x#9Nk2Լͤt,Ps`,GW$̾vY{W% Rnjw_)а0BX?)O Lg ݋UW5. pY <0$)A@HpKP $IS %HGHI`0]@$޴B%#m[o$-BO3؊).I.LNLps6M!ʊ95m2xm~ugFҸٚL;Ah&_88$.n}oВN.1 .ᶚZ 7?>W2@;膼HhN6$ko7`b5(+zu9ZLf٨V|w};t}2?q&gVFLSzl#BfG6K5p$sU!X%JKƇ]Դ] :X2G Z\z36Y\uSf~ 6B +#)UU^~8fNEn/C0ɖdRSd,c%Ve_hWfAvd г!îvH#Jc/Tm؁Khx-0.3.0+20250717/test-grammars/markdown/src/parser.c000066400000000000000000001724261503625671400217200ustar00rootroot00000000000000(/>B ,TyD]8 {E;< %ݷml_ x&a:\aPE|+B+=(C\ 2IU)TeAieU"N›:B=dEVE>BI*R$D(pȇmB5RYJF!Ăջ\qBUJ2Fn-KwoSmZiug;8 7ySqь1\4|)LUOŴܭ1.*r9 f=TX! }WxLuߟO*g0>2CV;uƝ\ǡՀB@A8`"P V*#]%!X0b?̞v5Fr@ J= )2BBJ'1BS*SFM vGju̥W:gZh2||Sf6xcp0;6=z= Bɘ`HI2 B$MR c.Nӝp}J4i%NPSڸglP=L2o JWᓻګ rXxsc7+?<%9c97IGRZ_ <\Pt E$JzCI^.s\dcePٿx9-qF Ol  H`K}Adm"n&58FQqw,=k wգ=zwehΛFhN弣D?w99|}SCڡ_c28Z=}ZlpɾV:ރ)=POV.Z rޢ=-kZ&ۛ#n eY; S*.Êpq~lPKOԴ701(K`62"*: =XV!/>uup}0l_.H /0>j6`b!? |}&^ ԏG"2$(u ^&dS[CPr"w$cΫr  J\r|©gNhy7Fٗvx?vJ36պ|1'NݥMll4%|n3Ϟ;''[x>IIŬuXo_mؒzc~G*ӋL Z)ίO#7V3pqjhqI WTǀ> V(wodݳT ѤP4Oj|NXm褬J$U[(F/tx5o ZM`YQ",˲,fVT4+ZҶFLe\w /TγNSLܠgҧ)9Q)}+JOX5ꥺV"ri'U$%TZ !g#}">~jP)ӹ*zM] 1W5(&?gv`ֱH-:s,2lJ- c'ۅ𽰗"4QTs$L8U͸=4 yrPهg8!IB|_Eg=#Û `pޮ3kSgպ%lDa0; PJX.-n)=<~TdMLbmP2K ^U yUaI/ސ^z߽}>ͣM?a|3}A_dfЊ"ТF UC ķb[Y{7/mP&gx10%O0_g~ [4ո # |Jq'}]d)w/RQvpBC"]Rk;s}='"#0 jKHMzůO|>xŜQቡVmUq}11#yaqD742Pt){9|b$e MQӽH&pp-Ļ~^ [ S P,SzcuO*S(Wii^TyՍVK1j CEc>S k r Q1 GT M3Mf15Ř1)09#ォ"ѵ}=)+PjLK vYvo,dyR mbؤLFO)H:e\`$Żv+D&oM+GwDH @ Ada( H1c949,&] V 3:dI?੪]5ͦykKrDy!)腾Fr$Ԭ_@:cr}HHu r$gYltH2,0#Fw"9-Zg-T2 (L>! :J!Pؐ}~G' !O\H)lBS$&Sfff=ALHT:-]Hr@C-ЃP@ʒP(h)D`* 9N!GұOIA}5OK"* m P8Il|vG@]3|"56؞Fޮ'ry''' LO=ғ@' 'O`E1{Ng+wKg{ғn1@5A.Ar˲`;˭$⼇"m҄Cƹ:,Qtrl}Q!-zqzX" XW;#1 ]ǪC LaMrNU/JrҐt Jj$_F IFz`lndс$qyS4 ؍29$I9$Jܶ]]r; -!醐C8 eԸ]_ҎУΰ71;( G|ffzA !מy1W9# ܬgFwñ $gnPC)61sJC5L}XjwI2St@ m`D*reJ")} 3  we )׭@n9#E@ZZj$%Gd]rFd U-gd Q.9# Kn9EH0o 0$i bSR id;*eQR̆!Iֆn !jֈ45r @MZìGU5pkwˢ۸1\6Z*HӞCi#?p(Q0]K+8 g((%(#ltf13k&DDDD@nw%gDi0o"[rF.x\߶KX1S4i!F7IJA$uQ 3܄l;x)RtwyZ@PV v,~xʟ i@ʉ-3E 0S|!1u%p 0p&YmM `'RܹKgF"=DWvNΜ!:@i.O~xB9:a(ԯT?Jh)A-P0pm53|[Q?JY :E볿lc8d)$ʫ8,+<UU~-ICDDdvm۶m)vڶ%I$ɟnq333 g3qʔR )U?r$~"%RHݒ)'nIZ!*vKn3.6^Ғ,"ah4yV۶֗痹 G[ l6S !BU?-yrn۶mۖ$ɒpXXHHX5֍fe#әϴ g~rɭPq-7&jժ[PDDCJ&ٶt9=m-M)U'͒a!j5+&Riqەm֤m%RRR"""TOYrFwQqvDFC|fʬRJ)RJ)gfJ!tE.S)am)Ru_{'2J-"m{l۶m۶mKے'M$IV\n3NdfÙ2dmJ!쒛)PUUQUXBܬNnXi)EAJM!j)dSYee: xpXp?jxQ7njFGPb} 5 ?6Y@BdH`F@ @`/ 0Ab ,pT`yAHa" 08$,xpCPʼn'0l1 %8lRTrfJw'[r)S_\[Bc{ XJFC `UdCiʼn J)JRPJC\ed.PZo n0/(PLhtpP+袢W`+E躥LO] +*kʅ~5QίU_Lѥ]P>ͲҒ  7F$-u1RMcUEE>?K~Dzr dGU*M@a!H#[A8S~~] NÚӀ?JbS4`Wͬs?w @|yZNS`=p8'bb®l?m}x(V@9xsGBM?AF-B)%n~Ь隁,D+LN]FXޣ^qj]6.x Rc6YP@`RS͆ՋDM a gQFQm(AUrX?#x$m(pNsujS/^?W>-@Օ<ڑ r2>7Yh|CFGb:y/ g~G&{bAbBh "#Cy'`;8`LLsK;5m$`, lR''BS_v`"z4| Q13{jx#/$\;/ H(;@z{PLHGצ(UN. ANqjpTrL1UUeޙdƑ e 6J RŢܢGv֌P9D  H%F.BtBɪҗvP"5ςQ(m+bɰk@3gCes=))ջ3d9ה˜E:2Lv]a< peTyM@ @DqUtp`,6,8%B2.QJVĮ` Mg*JBxr,L"y+k,αNF]\a뎬ɉݭ설8bKaVeyhWoTSg#9!ٔo;{3 ^w$>v F*ikh>=$}hnwQxGf{jQˌRY _=#T 8l+pRG!CR7ԘXJ{Т; ߜ -3h\+ҼrgFXZKɑGfχÒ-YBӗ%u7gTQ+5hE Չy ]#$ H&ND# P $$G (FS:x7 w%闟۠ Z!'8h60n5^R"xJ**]F<٧Y&؟)- R0TZe*7hӧ1tp{33z]*iQY2 oYLV*XLzQ^SE6@>wQ\_,w'=j=:Gui1_E?nkh/Ӗ>rȘ?9+֫/\ MҚ|=l#J/ NOp~D:<F d*ۢݮ?{p UOK?ɎJ|`Aێ+..XR?M܌GGb ]'>[$~>R'2cڼ#0'*RMڛ,:c18ŧ DfBb=}b̢ ȊZoeWy:jp Ꝏr9yguK$[g Ƞ,"$H,麈D 7-"f堄y60q"AyXL8ZIJ.#av]. ]ж $.Pj!R`ݓ_(lTkX>I4uY< !j`+ZnHuףGK܀v(.T&+"n96GNr[}B@Bpn6~ҙ+ԜINJK|=>5n͈ `n@3nʱFEJь2Z˴ү/rOIFC.@tj/2耨m8 0Tq79 gu& {$wF1g4$pكJxFm8̕Ź\mt76`,U@(Ns]R{!jlCbR/PNJ, bI_qMTe^%1oo!ؒN_18xF Z#h_ib~-%iSjHjInq<"wrǁ4_o;w#gQѾ? ¨r:vg^1>[v9Md~>@GFB0k'vQ ?E`)+xjz i_ȏ&/.wlD3x5Wc?9?Q`7K{O&F2O ){=܉\f Cya/od3$+q!]}rs|BBW$%9p )q'bb=}0඘9uzrFh.l&QzB c2 5˶f>mw-3z @$NBQu 3ʕl]DB@TO9_bueԻϵF҉'VSkSn q CTb#r.TΥgrwl 8ew __ӄ)oD$Jk7Keш-nB5m|8٥"94\϶kkZ>qY{Β' EKjJ&Ǻ.J ۹cDq-jUA!Pz "ސ~F!)N\lxÿ7n!f,!Ee]Njg$SC6Dlё2ol4q𾊶@WH[ǚ ?&yyMj\q`jn&c50M9beӹg&hߓ?5hfKBZsXK\A{"0p!3䃅vOˮd> +;y$tS ^"uR38Խ`r ~(E ,M 8/ G-~:|r zaK`xD5 s.Ilu°oW< NjD!V5 '1ս @ f4F+j@kO ]K ԃh!vdo4Qn$*ptP`,|+SJRDfUT],2anX&YF\yT.',džoFG5d'1]L9FK|vDdD Y,ҳM3BRi.2iZ?.+Mog-'w0-:ɘ٫y"ǪUIMI]š<)%Ptd[B"tqNCtWh~]O{cQG#mdBlPTBB"tt눇A#"*xMy*brE*+~ɫ̈<'1Gq*cpwjbքaCe. JN!sJSs*$)0{<27QEi#P 0NA 6 Bv#Bkw ,C CBrxܷ.2"j xk{:lHf"?bfensΜ9>ipN4;:v026O/NNYLVhv[f.JH6>?B4$A#YM@\]Y">i?ժf$Jo @9z HƀLCafkNsDdZ4NwpcY3:%L}Xɢ2XlndgE5M28{ 'Q*s^l/9|hK1Bբ#0t4|!S_9qC ]udI_%Fl;&Per4ۨ ̽-Z?8HdW=ΡjlcְB6 3:,r5^^ӸL$.!+9p}. [}DpEU`Lk*a" ZՏUKvM8DfN!C(AoE|~s3v7@4s{KI* :XQfy:14+%ۿQzg$XKǖXM//+ct 1\!āwYpBؔ*8tR89?\{r|06Umşov/1ǣ][F|~e׿\OXԃXA .%襰$0 輗 Ͽ路Ncs*PFX;=}4lEƺ=};IRԇCDz9g+v# e3Mw PU,)Ö#t\LjWn%,I0Q$,|[TevIoSvgMԪIJ6=*络H@ дBQQ.aTvȒ)<_/ qwe b&f 2z6zH9PB@k,8RY{3Qc(D=ysi 5Vm@c= XCs>}"IbUdټ9ܯ[0_VKNybTѠvL.LML٥x?Xp3A0sRZL"*iY- xsy_2Ap<3n30~A8, JN[ +.:Olgo 6_YUe,.-N͏B+y{#hLi k2:,6Ox95Z XܵW';dB 0ȕ'd?Bcی;%sErU~QFy]D5 TY*bBVm30^0Uh`)K?|?W:w۾޼, 1 nc~yK=n+5@@uL=DUo9VbrsTZSn 1,mlhN> 6#r#^qt7J5F ^ ń_32_UZ˪+InB[T5H-GIU ʬ F/I;pl4=O ,ɱ厘{w2Tۚ[.^%*}+Rl8ϜqgGRu!J7LGNG7f㡼!F$:}zܱd~g1Mo ydžwaDCe @R,t3zEjRqw-V|-/LJ&e;$ DL!W~scUdIdr'׽즉Nm$*Y茟wL—c솟*X5cy?f4UGDWqe@r<C5$q@!.UR a X$ mƋG|F ZiaUu;=M/+ x~D MtBj"ݜg0R+"$y|5ٺ7ɹ|λte|G+Yy79m(yӁYט';@0]s^bb4gjP']GVo$rW!D-0]& 3+\0HޠvKx TBDe@ AxVU"!\vC4EB,A# *g$(~hV#XiKF0?:I+Av(3;ZD*3d:*{Dʇ R(FNATiܺ3H#DP9+ac#Zry#@ @ <@@PYx$"&:]4+Ьx<ݷrSh'l^na+ >,tQ_"ⓞ+wx7D6w9PLS|} +^ Td@ ig0tbECoh{ɫV:hP؇ eJ%}xЂF~~vjwr`>}3؝jMZ<HeAMś!OI`:۴dRAo$X¹)$aНY?^NkJ|g92m&ԂrI$A4_P cԝc fJ[KG˜:'cA\#q.u,; Mx 7|L3GE9lRRybJ"誆ۣ޲89tctS e8<ٔ_&P9n^sVIsSBBU4"]z`Q=e:3H!)o_6Ȋ~MSL1@RzorđLBuRx9٬$wN÷nRn0gřƛHEOB7Q_wn*|OX-hΖeQڶy>:ܧk@_r1T@o󡥺{G qj #ӌ1C%hkD56_I"բb\?gE:.1Y?7ZqNZ] tX[wp%B!*,U$KfLDaOw \{)b?@CނUlBC ${Ok2*}/}_ޗ;.S=)ѵ?/5v/=CV +m ak}pi57sj:̿fs!&V_*ZB;> Д-&{]hAT2uSKOfd-:kr$q=yM鰻@sdu6+[|05JçGg <2^zbcEL O 4_*Ԗ9%ie%.E5(#D@pJ%[eP$rnZCe5Eexr1קJs\-tǗ8rߏV0SL OA} ӟw5? nt!?x# 1*a4̋>1dȞHO54[gum%N4WA&HHƗL4hDMzQtCo/_r4ڜnZ::'S}n2[9,(KPI9f+/V9ر>3T'I<|MF Q+PHHDIBlC6ء$A4N59V}"zFUf(E&)LX3?ŇH0[A,(7 "ga8YxFB$T0԰pD$Za6f@0A‚Xq305'HW[%ПN+ɨRR1(I"$@ >!H hvAPc8nɡnA?RtbggY7\_ ?A{$e81}̠o meMHJzʺ;+w$%Mn{yqDPîwePK[! 4v_IJӢ[- (=-ax̎4{y/Y?bAӻs2>7g4caᣔqpQLjA¦000vGE2(9rҒ=Ҿє`ZC4䯸HN'xϮ$ RP%;d~#((_N}Lwm= 40v >[91Jv z^S$\&MC!:>tA}rGgZ/){d[6˵m4tȣ'itALrWżtuV e~D.]LdEy/ٍ2 ](u% ng,%0C Y§`?h<>1/!o]Tň6IpN辒sGP=4A\hq5m%`ڣ#0vJZHyi02>/tsPȝ%=ɘ 7zH5! Di#]R0& ]#dMR.0?wuB:pZY0eQL<5z! V^s|v;A@JaxNVuD\Vsјyc%Tr:2yt*gSCR}hwHW=?Ӫi2BӖuv=ZUGdTBC4_pl^de-2 XTT$ԗHj@ UpIxŸMqlCqlPhx\!P(X EĄ{XQyn]ry$j1r+Ӎɯ I[lwYMhcǗ*i&ѱ!ą'V'JXb[.ʺ +"SD P $Yq_ʹ (P~ wJEb'3L]7DN>UC e5I]Q:1HM-UҲ>+LoVd^4';+Y}nyYtV%QsFEsKϟhn)MgEGJ9+RhnR*<4<<ʨ?p<!""#%"" Aa ! (bAs`-d8\Q  , 9:{ѸU,7|;qX\뮷b7nw2҆O! ZLQZQdS[DK9J;k=- s%) _IxAw;yeOӤP89J䆴"=!~:)E;#~@t2 @26c[";Iɓi>8H屵;=U]Ia<M"N`*󈘉_Cb'a6!-g-~!x[@F* DH yō/Dp~an2^C[CTQUz+Lz%fϸ$ǫ 61^}9á$=?|ޙs>gKN.SV: iNn~*ΩGcںiC&T a(,Z)2m& e>@XBA:d9w͹AhCsg0e"u\ز=)*&pUwnZQSQ,rf:K/l]B-RFR*!,.e KYHKYTpwf + .xR*tssޥ,2([6`\ E I[ …  Oe30@w]YK9T.-Ղpn)e!KYۄAܥ,@sSr5έd"pNK29ow\` =B1"Aifs[&FʨA|BmH@(!h a0!@AB|UXȢ>R- Sl>K/a*_%EcI*BP%Q _9dpaEQ^PV晑^]ƄJ!;@Ziʗ6ɕꍆ!ȀQs04Y7u|cnDNOc21ۜw4E8ȗf2h`*FHCN3l8D,qQ^^(L*2RޡSew.̗R*o L2eF 6vlo(jfk3JOJσMC;,l0ô&) ,!+wd% j"ʊN;RDר ێe$_3>OǕUͼ7?q'/uE[ 4I#lxj6sWl?1rHyg <׾ $N،8f`G> ^E0|_@".ԟFjO'@RKN,,uEҚ/(`ĶϽb%MbN(R^X6t*]~۔{pp$Y$I GoZ#$K3W. WI, tM#lv\f {.*_*[q^϶nTk@]+H6/EleH7R.B^T (QSe 2TDus쨇 Aܬq9#EHve1LP/-_C&(g&,#aaz@8w)&IҘʘa͑XL(aΚeL;'idΨkmYZjmp*,Iq-rZ@;*g72$Pkȉ jcpA;&'Fn\raE,/)Wv! 6 g̠%I+ cv6Q+kA´1)L)%EUD%9!Iۊ.IHѸAAEG^Uq9"**2.wy>̌?.'ᢏU-(o敛,&SPq9.A@` 8@@r2]N$.q9*IЇj" *EqL.IU]}]eOd 4Gq蕓nMv\ \H(])ER،""T2 R4t OdX,T*`(8AJฬ/5Lvʨmr;a:ȈA"  B!0J  QPIT9h}k]0mԵ}Xk=j߼mJaƤrzc/\%tp{,]&>[ ZditXW$p 'g:}{hL{* 76[ںi"T4Mz!lRȧ0='ad'mV"<N>@#}}Yj9`J(J;R]BUDXjwtPBX0`TC ,PXᯨZ phKxx[`л&A (&":0y X.Sm˘2Lٱ+fS{eS++b$:DC+Nߊf Ł5񐈛t`78,iPN{CJOTf~r de92h9=`Cw;`#7`0s> Ct5KNvKUxxʊʟ>)’#+UdJԑ!g8ʄ*l<# S|dPY8']S$pRr9ayʒj?Uy3Ĭm dх0Sv%Ck170C "C1fYgw0t8/6?s94?I2<7- }h#>%7UDм Ԭ?\ZM uvάTFt^BovܤT@Nw톮?ύt?4Fm/NOcό'l:.$ush>v8 sMPpD} hɡ 0"Ө\\q^A$p9ts,|) H6&C6l( 0Lljfc4;ρ(iK" Yq Ck5NQYSߗ zFsUhqUE ZX?k?d{'-Aj , , Ae0sЎ(1ݭ͟w\a*".tCHWMo]U>RE+S L+-\kzA\lB95l|P)YUee٤-#X쫥I+KޒRrqŴJzcU(T\7 6.Q+ri`em:(Lo"{Z*ʴ&E0c6-'ݪe2 EDKH1~U<;4@h hMHTBׅOd *V;%)#A5i^]ߕ?5`KqZTWgAN)FV"]z,wH)'?9OAE,EJeKS)V;s"':՛j􄸲$  mDOqqq?` τVm۶m Q{Ir$I{i&I31df23ZF *hTf.pDpAASS! +O1krǤL̼G6?I' OAM5p]m U9Cp%g, vcFW†=i}L n{ bR %[-M)Y_dAL]c٧Mr `x]5n;@DվâcLf,_3wW{%&^y2wP-|1'-)IetJ|ocgDoy|"闸xLA\0Q>0$׮dݢ%k*WӪUVZjժUVZjUTLDDDD&A D&H*0,˲l`۶m.)RZЛBzDB )4Dᦃ$ 0@@`PP`6̲)sJ+~%{*+rp\۲%P)DjO=^5:1 pN6W)gR!<.g(;:F7Y7>1ӃC]f,ʰ͙8Biu>aLd=|WPҨAg ts@-C̅$6c|]/4ٰP[r֚ЌHiMRFXQ/T "F-çFVqH/q464pif$=ҾI6ܠAwnkxyȕ5#EjB8,Rh&]`}NGmVud>B+DJrs62jƻ 6H[F[j#|P47q@okyǗ!gfs913b*mېSᕷ2qwW͜cBR(M6%цH\TGIye۝hPBΊ,aU1fRtd<StI6zj4АDp[|u?w#B"^M  K"6x#jSR=kz(MTZ6NKaKKֺbUsLM_|*(B?"J@sґ2|" ")Ѐj{qx/T*¶y7NEJ6((ݔH4,eq)*k" EN!hsY#3ʏ 1fTS$:\rp! w)\VD)*b$\ rku]HiSP/sk>eyyv/r\.r\.r\.r\.C?A8a!B!=ۯN=%۞<۶mgvkU{?xXb\%6Ѳ,<ϲ,˲,˲,˲,˲,˲,˲,˲,˲&h4FHFyx GvNhGf<4&&&&&&&&&&‡bX,ŜX,bX,bX,bX,;lfff&333333333333333333Y;Au]u]u]u]u]u]u]u]u]u]u]UV;m۶m۶m۶m۶m۶m۶m۶a \LLfL.&&#P`@`r,C &L V#eЎC -P,T(d) =BSB?AG(B$I$I$I$I$I$I$I$I$I$I$I$I$=BBP( BP( jjjjjjjj0_L۶m۶l۶m۶m۶m۶m۶mێ 뺮Ắ뺮뺮麮뺮뺮뺮뺮뺮뺮p;A8Xlb-b8''ڶm۶mٶm۲,K,˲,˲,˲,˲,˲,˲,˲,8 B$I$I$K$I$I$I$Id<? B8q$80888888888I!B!$B!B!B!B!B!BTxF$I%I $IaFGNfgefK" AhyWemf#\;=>TTN&?[*\6\b:pSJZ0v\jt2ˣg ]-h? ]h` DـN0mv%_PF-RJ0hkHآN^{XLP/p8U&جYyn3x<В7Bd$<գO3uRg;9[goUTl حnv. xjF-l4vfDICsą'K~t4뫱S մPN4h 5p*3gq7=_% | ٕSӨٞcT B%x>#Ye{~"`(-Na},{όnKǯZ HƇ3(n)U.O`1/qڗ>NpA*i29KT =WN`~/4}>kQac,:7ͽvKҶקj ;BjCzE%]̋+}!<f6s8$mmd(qTlydkFD%m9` E@\T#7 ))ccx܅nq!J,P -6ڀZ Y"*?o-jkqE蛪&Au3*`?A2*{ |c DTXΤh$84X@S,qC> d~(,8ʉNN<͎ qQ_GdCJ=nDj{f yqY'OM)aPzͭH dٍgkk?N @dLdTfy~+u7=EJٿr]B8I2*(hU21 RdTȟAS;yHtgd%y'>n>dC-▲g]:**ucq_j^b{$cOԹY[2HT;gvPH:T+}eoUEd] AVČ#S_ƥqi>yͳtS닉C7šRhTUH~j,-iB:+  {.,ዠoM\ 9Ji HUq EEEEբr8ʲ,˲,˲,˲,˲,rRYeYeYeYeY,˩,2xNys9s9s9cr9s9s9s9s8"FFFFD$bddddddddddďlFFFF*####$#m#AeG~4ǎ;Fcǎ1YFyVXaVXa+ {VXaVXaVXaVXaVXaVXaVXaVX #`m۶m۶m۶mV`{{{o{( < UQ=************************** g#{{{{{{){H/@#~{8~~~~~nđwرcǎ;v1cǎq{ގ;vرcǎ;vرc` D df5H @ !`!@!@(C$vDpk4gyJQ>Ru=t+O(#hGsh1iz-cZntrk$e\c ){*oyG N.)ʗ1R07 "IR* ,yѕh-?X1hgW Z Ջ`ߑ.T3WkQ2l kL0tfjd ZYYYU[v[oP%SlŤ` Y;! q!kйP'Q8aX Wx 2jG/]#!CC 2h׭\֏L+@`x#2_jڗ)ū wBh#<'Q|}X7gGP ߋȦכt8S v~V%I:ϓv@zʾ`[]sDĐs@e4;Toij́~EO-n ܳcWm~,/%J6Q(іB,W@C?iؾHz3x{{5P ! f͉2jAQŻQp5/B Pk*O\#2 F Eq-Hm#Ia{bgԽ!FRhYÊv\O㥐94w5+HdwH?齲x`м9KA&!sGRJ/c}I# Ob]?:y5^`Qz]ص$tMcDR#{*sSSo$R/.m͓Pe[q79L:hGe^BnXTDLN:2yIf.?\Zɻ2fvrg'd3vۦ\{œkݶiܬvZFX?gƵ7ܸV4l4Y᝴Au'A;z3ݻ9^ޏ6G96JaNFuM vJGSD9s9s9]^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^95fs9fyc2ƍ1c1c1c15`VP( BP( BP(B BP( BP( BP( BP( yZkZkZkZk큪s9s<9s9s9s9Sp.1c1c$M  #aAZkZkZkZkެa,+*'i b1jCl:u}1CP i G}t%z["5H:΁S﫣CaסsP닱zbͲV#A~$Zl[xaʦAЊF z_SC:a pjXGvŻ ## d,m1芈Lob2LPyZH:d!x:M" hX0aytˈMwk;XDuC`cG! 0逍oI(DXƂw,˓o[ tE"711JNMuuq :tM" h Xx*":9:b{?~V䨤z3EA7h$v ! %2Y0&D1c1c1 1c A BP( BP( BP( BCUUUUUUUUUQ`RJ)R")RJ)RJ)RJ)RJ)IHX 8&ۃ9 9Lt%܁ xHBR=GTg(EO/ $7Vw8o*n'`-TUv!w=gmnlx?of}抜Fzw)X~ DQ5Cl-)G r|lnli9!$Fllϵ \}c8B"Lq^9[% Ieb!UEQ Epp8m۶m۶m"` DHlm۶m۶m m۶m###AH222FFFFFFFFFFFFFFFFFFFFFFFFFF$A+YHX@(@0+['wJ5١jl+-ӟ@r_jJ)jR[ZuQߵ$ /dkV@&{~ϜD8\L!¡ oPBBp@Lp5E)0D%Y5PU*#cmf?yoOѝb8&Y$ǃí!4v'uWQ% F,(x0R\q A5I(R[!B ޷:݁SGM7%jIa= D˓B˷AZTWIE a|HdԿ9F% ?6jWȒ@s$;pI/'?sF%ԁw .̍xr9rn:7d) %i$I9')QEQEQE /RJ)RJ)RJ) <wwwwnQEQEQE(EQEQEQEQEQEQh7 Bafafaf230'fafaƌvA $Qd+ՙP8@(X2M{ pa\>^Y\V5Dѽk3<onqb_b :j- +MVN&ƃTp(ΉQ},™5ހ!hSLҔnod!R7"'V26|iMK |#NN*{um;5Xd]iaR V"d:]%0kC5 B˯[9lŽ 4TJpU\UѰ"պZyFnVmo=늒,^\./HqQ̨(#čB4ʅImu>˷Q뿇ߤtd['934e+IZ^k"ūX#+6nj4Jy8N9+^ Fyz*vQJބ'BeoQb-jf,\Zy!o%oUd[<Rm1V-B %I&78>:$I$I$I$I$I$I @c1cp8p8p8p8TUUUUUUUUUUUUUUUd$I$IR$I$I$  Ir9s9s9s9s9vjBd bX,bX,bX,bX,bX,bX,bX,bX+BHIx B tJ Sf orOV_;P-i1k[Ml^[I.W8|HnxU,PL݄^K~=,Nķdl9xq̹~̘g645iF`JNXOUڧZ~':]c8b/H0WlkrT 0h3 QOhh.^)UdagxoA@-XIϖn\F[w KLyB>=kflj<}AA-~o\,L` nl#po;ڵ;ZķNaO3}$ ZvWfd n+p-@;ڭZsΙp! Dqf( M>5(x ^V+t' [y^NLe-x )\vF %I&&;GEm۶mmu]u]u]u]u]u]u]u3?aaaaaa'I$I$I$I'I$I$I$UUUUUUUm۶m۶m۶m۶m۶mvffEDDDDDDaaaƆaA0 0 I$I$I$IAUUUUUu]u]u=뺮뺮뺮뺮뺮뺮뺮zຑ`fffffff+b0~r('IP@ @~X2R9n77َ7wkAxV,=ލPNfv:1>oߡĈXy 7ND%~ Hd (@KRj"-Uc#s-;2&@bSAqEhgYJ擵$Kf6a:z9Q+' XhT3gn;{i{~x1hϸLVLR$V._Iٸ5|t۽mv)D{n0߷֍}zs/}XtQJd""B?DFЗҬA '([j!}3ʷhZ'3;TC"Y1ON|mre GukX491t12a,"E`Hc SgZ46*o^}sw[ , d F\ %iN"kWdqqqqqqqqDZqĉ'N8qĉ'N8qĉ'N8qĉ'N8qĉ'N8qĊH$I$I$I$IJ)RJ)RJ)RJ)H@q3x<x<x<x<x<x<x<?YeYeYe)8 Gweޛm <@m!\Y"A@bK"\1y=Fr}¼5Jpsmy]Ǽ8;`h*&<-d䎖E:fH7L9y3d7z.P[><@G b!rե^k@ӖT! A' yqw"*PM*y.$ p"\i5v25=LCyOqrvm@$I(@y0łEIh=y+$m_V+tC-blz8h |?nBM:LcŊcM'pIfhܜn<_C\ .Ga݁X+d"Pp$R&-FN_{_Djd… .\p… .\p… .\… .\p… .\pUUUUUUUUUUUUH$I BP( BP(H( BP( BP( BP( B!Q({"m۶m۶m۶mV6YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYف] L@` iiiiiii4M4M4M4M4M4M4M4M4M)s9s9sH̘1cƌ3f̘1cƌ3f̘1cƌ3f̘1cƌSf́ dj{{{{{{{b|lrWTNIxA@@!P(Œ ZTfgWQw&a%4%%xkIy.AQz"H4\bי,Z{К8d:LϿWnӕlNvEkWD1SAz,*lGD4 Bv#Rh#(0j? C7rs{y}^/'v唯2!4pt蹣Ngp(9%q M!1=:Sr ]Xo4:υhG73)IHS:Ez7V 5,1(xQq`:G mYNf6"`]4 :Cӹ.5B 3+(;mcBbGkjʣ2#'a #'AIc[BK ڡg*ֱxaJ@&UvӖ&ݔϞ%ɰ&/bv 6I(<۶m۶m4 c1cm۶mF۶m۶m )t@жmB 3333333333##"""2330+*h؁ A°\ ^u"h vz9% kD6[ؐXDh@ls[ý^+қWp99&Kk1KА`~Key [C(\Lwo)\f{2Zʆܒ*]hh#%[iWfp4Ɛ pYNt:Nt:Nat:NGt:Nt:*t:ND"H$D""1TUUUUUUUUU &=kxA@A xC$V K^cs G᚝]+D8ő v(Pt<SZfϜѸs{ !4ڵ+e',;v=C}ߌSf"=d ߯ d BX{&q ۶m۶m۶m6A  a Bl۶m۶m۶m۶-X,bX,bX,bX H>kg" ?IB$sH8 :5]E*ԂSJOMݹacOG68 5pge@]l1ZNg:`z޷ oA0p}$jl# >-^wVvc, rO Y;,Q!b !1eiiiiiiiiE p!K(q"72v?!h QoN BuYMo"nXd;paBK**Zo]s]hY jzf&G*y:w,{c٧# \u7CL9103sF/3H `Y(MV2ED` K!_6C0T9WnВofU>MYV<3: x!20f PS[NR vQW:oFd#@aAa $!$2Hffffffff2333333333I$I$I$I$$ ^̬0agU"̪~Zoݣe2WTW35d|sk55Gsb2cLGlћKgvnnFlޕE*ВE2w>9m4YNyQ13ޢDs=/ ysM7jf:%z6gsߨ{RgT\(MnpO D$ 5445e8!a<Ot/Oa@$`afNPw|Y;]^Y5ZС2<.,P+.N o u?ݘ?_F+)yB')uo/ |j{8ͬ?섣m[ ~`v<1q =>`gѲڋ%!\Fj? .[q0:7뺮뺮뺮뺮뺮뺮뺮뺮뺮뺮뺮>cADBs9s9s9s9s9s9s9g@@Abb"B_A]u] aZkZkZkZkZkZkZkZkZkzs9sقBPDCĶm۶m۶m۶m۶m۶m۶m۶m۶x#EDD"A<AKi"(JBK ':˓zC$nR=Z"W%"3̪JjNiGXb]LetT3AM8(9ThZ.EfT$Vю?i#Ff.o޿go|Ϗ8ɘiqy9{[<.zk}z;Wx~;~h+|=b.EPM攙&+2YejGv9A3#-n6ܞK#&[9piAs[KO %=37 122222cEBH( : wjƪ1 az,ޞ$2Q$7Q yU*"GQ՘-fh%Dߺ| Um\$TW7JtfhȠ?QMH-;)K&J3)ѽ!c 99b(A`A8 a19G Tf~P]>L_\,yHʹ1AÔgNKeRQי2{l*qqP0-G 1[ Z|`iՓK泶3#uH)&ЪW!˜ xOTi:B%T v%tj7iXA5sz)Ք3%BXgքoA*zjI;<`B/Bx 7|pjBu}y;¯?P:̙g`ڈ=l4L+ \S̭@$aƒRHI)vTiW+aP_Rb?v nX-QeL߽Ս0ެN?-n|recl(hCf:#TeB5LUȈ ߠWPьJ;dAHٓd@q2nX\A4w~ldMJ܄KV4Sd wH#-=ևg {cH[2>fH$H:pSw\ؽB+ )mz`>Y ld~3ppU(s=/0a Di'hl`dB')`iR/sf7IJi|=zWz?}w~vo|_y%)JA֌Ts3RʩT2lc݄I!Payb(Ȭecy8&oYۛLC[9]'wFrpEvOMQ֓|S2TOlP]oQ bzqO`,cQ,MN+W CH$ %w:3}|~ŘDHĘpFXĀ| 6fD&h o' LWJonWH*G8 z#& qlqs fFYC ]K,|h.9&}3CfE9whwAA mȎIM )x.5 9&#T"E+,:ҏaƫíG5в69Ety %ÐdO!a#d.]$֢14A?Xfu[6c ؃TVDHԎY(hHԋY/1)=iDy\xG& 4`Ka26_Z@*P0p.;y'-VnN#,d!e)!ͯ W)/E}R' !B !uR:B0 AC" !B  04dc̟DMhߕdKi!)P [£ōdvZ5|1p tid 3ɕzB5ul==MsCVV>ha@|0քSO˅*֜_V,wbЅyv-)h뱂q6\J+o2D,NDJZ/TgOʓqiT9O\c0įϒGR -;LHg`m1T+NP!IJU|X.3ņxΖٓ#Z'q j?9]ďW̊R<dV!Xz b Ѳ,g-hɲjw`-@b`rC, z\5h[j+Zah(&/ԫU)3 x}#ng 0/2C򆺠|ptHr.Ӯخ-oz$~^BbdVò:cf1\@l3S#0pn:m͘c %nsw0a>3Uc-'4˜l<= UC Z[>+ĺ&;v=n_ s (4X-TXcfYtAT| R V ~}-&'WU5J՟ک챯eW8%^bQቴ|+fKk̲U}sL007\8JGK 'L6yuiZP @*eۤ_B%f@V) (#rkxtrPeIx~w#؁G*ٯ$M?|;)}0928B87 Ĩvbl>mWd`Ku5 PCmu-a29dPF G5/t"aF~U("PF|wXX_d+Bq"nYЙD* `,kEhN}bү?ݺlR+]M΃%dž[>CX8?W{D"V»l巼s0F" PNp Gc˷h`?6Pf=T y2Vwz6[W9_a#s+FY|nFZWRRP2K"@F AE]{/' 񐤍vOiMh g[K _,GU CcOx`ء$p5ӅWbpUthpLhf"Cus@$G LOck0+BF 8Vy- 3tΩ:* t5]M%5]jz%""j Wfb9e(~L5JtcRVnop ` ejY4"c3%xu$Gq >W N=o|.qI}ۙS,$:ҵ}yat_-~ʞHY:~&jM _ns@wx,2s?xUx=`gjw2N7X{ ه$LN`(Xf9&V)h]d^(®:DS_TV '9a^nV hA.s濌S AR>$HDp:ưI|4yꤕTnO[e/D. Mv:!nfUnx,ê` 33-@Z-':XCtY.DB7#\3ϐ75-hL&HFxk-)-\մVHG,)# gq{3)+ B$`T&3p=_Wh$`EP*Ďj%/z}01 $].qM}IwTl6{=R%pUV^1C>s.zc %~'vŻE]k4 B𽹣A5cn`r=.G0NuQ.zVX&C2}j_A-^R{nZ{wr-&pC/fbpا +nrE/.%H0y`M8yG399Pm"£4P%>4_sgXr/bn훀bVPm^` ӻU߃VGFK˶앝ݷ]FnRPT󷒞CIK8t F;^)ZSHgrz%ҢNm%$G-gшQ7$K̽8UOA0*gV5^dti:9_\{#w<ɥC V81UN8#jkrI.Fgw ǟW38? )0{'oaDp333A38X"uB{GA)LIE0-&nH%=d| -e-BQSM;bBe$3zUD̟5ᗉLxAgBLA aЅll UL?밅ɰ1 df ;s Y|t"9b!!9'!2(ABAcAa0c bC" [GG@G3/9[y͛ 30 K0Ș%*oV;bs3BȄ$Nk믦p+yM.ʺ~ځ}iI+FEIKsF4TҒ#,v$r XBuUJxH]^,ZcTsěV9/,iA,u9~魔 Lp*PU\Į$Ah\ YGQH ɧq$*>5WI'fQz-.ӳ0J]/^HȝT&OW( IyF%8IwZ}D?}%f$K<M`v6B>3.b-t U~.4st~|bcүT]}Qp0n{%MNt׸t9{MZzL*%eW BAr'lMG87Z2sg4a/>Pj{нJ$5i>3p˼-;*'vi Y#WydX^#XUA{IOT>tI\>@D8VhջCuM!(" F.U Aac%&a&;\bqSr\~bfv,3?B{9ȁRU@U5p<ʵ*w&J?xhp3^4>ҝI pl?S(+{/"_tZ#t27L4"* X11q 0|%Ck)RaR -vfLg *?L>Wr\-a+fNWr& VJ{p$FIL1b7d:8#ȢK9SQn' 3# rJ178# {O3s^H1h=!F5vM2Sn fs'HvOb+E~HG5(̫:"CD8;!zن LøzKpnc1 ^ rҒJPhE,bc`75&oPr$o)J}C0dxm w,A4a}ԻaM1lsF" | j8jc (C PB0$ q0B 8 (@WUqXqӂYYZГq_ɼ't%W:ߝzWZ,JGQJ{1;\*`@rxMq'&XSU2ÕaKQYHt$,7w`]utabžc/.,jt9]*5 q᳛:!g'4ޛn>Mc]  f)eHġ,z1(6R 4s|srƉ]P:UBK3 d}c>ԢK` Fn,P|#6MVX*5Ua2Dj*A6uj3uxa|vMwd[IJI3@Lml$+Qoؼ(\ 4<" Ja QLqc(;LCTNzvd.ѹ9c a3M'r!d$/"!c#"`"<1~\ׇL#zhw eQ'"P!Xa#rm_ҟꪆD׆=);|ވ'5"оu&!nAN ,c2>MC""(M99RwT_) Kܧ.I f!| 3m$0 }/,6?^*Ϡvl/QwLʮ%NU~xxE㓄LL|~,~V=P݉#]"J]U8ޚ$&K-.wroCZի>"RnPd459ƷqkV]bq勫a֤eJaʗ, 0AcAA.(p`a$Ppa 8` E'/BVJKV>67*GNkD|1Z]MZN+Gw2,R I(d3@ 8RA  B(HC.҂EE {ZkjԔԆXuoڌOr2jw|tE%* WΊj+}8y1pd~@e5) =?-te}ʏ  X#_,%\nUӱ ,StL,̺r%A?b;֋v1Mc4μ6YFF:$q͆8w+ml|R [BpCdGJ?ɲ&jXc&݆\?/.-SzB8GE7=#h1?0R qq M-Yޖqv̩,䖕{̽F|ԝe|ݼ;C2QX֏.I5qh;Ay ˡ̎?Ʊ9HjO.욏V-'_Mg$?h3jO `Yxa2$sSUtcu?aQ;p-B͑ gES6d(U^41DpCM_+4AfGɑo(+RW0pZa@ HXfBodABXTm"[XY(7D '4= ޮLaljUpR'[\+JK;黨QEEͩS6ESbCu( JMR&ERB:JIđ(* aIGF'gDE(w;F3,;  }F_%_W^հ)A>Lˆ]=N`j-}-4 DWw:=owoT9|s1%;ށ#=^0:3=4~N:. 'f^G^b)I$hMQ7rӽ|1MpBT!=ޟ:Wz+ixQY1, {iPX\ߊ_y '4O oҰs븖ґarQ~P g(AՁKp}Ydo_zz$ h"k4# :_jypg]粓 csIaqsa)vz~kb}55$f'dŞ墛+ XFA(ר2;$XX7S-A8Xwj︧OgH M/ fD8VἧVp잷9zGuSyՋE}wi.VAȈ,]  ØM4 (ҡNPÍ#bh){٫,AǜPCh2_rQucuw]nLIMHjPG&c8F4->50*BvFĽEIC>\yf7ސd!c!+lĬ(EѬ!%X!0Cc.(Ղ+R&RjNzP`Bxvv??H.ä;әpCsH2mFLE(4eЋFF857꥖Dfʗ.YF=Fw\x1KBځ>sC͸ra1wX(79~󄱓b(FȉY"Јdk\7fxJ؞(B-[cTF"y5i%e/8 7ʯb]e54ZKjPiᴍ)_HzҮ]VJO:jfюtYFD&CFiD>Uې?ŧE%-\8AB2$LLI3\rNK[h(+X4qbCLg6S_qHbS*˱ T0`ǚR qcUjbey|,,6)jI<˨t7;²hGt-rٝ\ &Ǘ&wH|S2͘b~L*}S_l銬1H컈W2# UU.hEuCH5ohidd{FfLȣ&펧M,u*,$95=pTCQ~(gDŨ(I]aIHTDiL=rGDҪh IlTDBy>5fHHH!AWY8 +fED"7Ig323tO)?Tܔqy6MzΊx37cÝb>T8.N=H1(j'47|*\Wr>EKkltqgaQ!CBfyof.k"WaUt#"#mEQ UL#ռz Y[N$QD3EJc>J/' 3HI:]Oa{T/FȠǬ1SGۊƦH9? B;W&~H~ݨE* [н$6{Y@ # ]ys|C3VۋJ&Oy'NWdHSDZ;cop@.%a3_Ex~Sovr/Uŭn>Kdi.iICwO6<0ګ!qǰ dwΕISW\,vt&7"h{(W;Udg $mL*xde+L"mo C-\8&Xkw1r˙d\aWmY>S j ZqS#945 qW+N'B{_eYabA]՛=6JL󽴜.NO0' ^ݬDOefZX!1}xk^^v  <= wq;5}}§X)^y񲞟w_~'P XoAϷtyQgi>vliϖ8G dD<‘Ҵ_D7y>/peaiu2 PRHy4xP| |'Nb\{%{3_>=}hG9NbKO.t.q`GHר%[XSkE| e:Bʶ#J j%a-h}Eբvv |{sqr5Pv ?j$ب^Xf))swڋ%KC+,.X-#IWJsϟg%iجD{iE˱nNc]>IRw,}S6\ZU .]bL tAϷKm}d}BcӽVs-8VMVTHf&2 3'ްH7v={梚U a@:҂VvUN".dCnl;38U/G2Mͬ$^ڑU|995l4pBO=G7%M7^n­bDgf`VcJJf*\ő*v%'便R*ͮ[l΂"n{['ga,)i ;s`}ͿTAf:=,@5N pW {u̜8H:2*y_W>Ƙ/k]ʗbJ^]a C|_!q$pEWhVH&TÃjx.^ U7c0ݴd*Rxa}yw <8vLy*uZf+ygdymqv9'P3CX4 OaW@^ :U'o"! *d;rzmG=}x@*Pj\&dӾRZXbY(fPNnfTHYjl28ÍOLcE_bh;ڊ]7ϑPS4S340j6XΚ Gge(2[Vژ-*KA\ ׅjG&XbeoʳgZm!xh;P-k۽32Jn9P [`1=t? +-(]Lnm"J]~b좑b #5Aar21^?Qn"C<^*!f Pkzz;3RcɆmNn0<ȰaV{N?ӑJK^4_M J''$]PBӮ ~"`7 ۈP`545{1 T] ʿy\¦ce;ohJP#xQt ^A$PC6{F}1&+b_9䆅Y  8s$TB'`X qU焘qpᨰǽn$:dīȿڎ c9#k筝7_QIiy9 ;*YCrnԍ&]j3GМ)fab۸&y nrBQد;.yqŶB}@xYp|Z9ٝSbG 0 @tcp0L= >d($IpOl,(9 !po5$fa\.mwf*&\}YM-'t.߽U]0$qF4W-LQ:tr# aq$Dr :!6Gu?M`< I8Dw/ ) c7šA0tN/ޑs\$oluKG1!w 4°`4JxB\Q}~kMsd N6.;RlH^66oBB,^Xg\`5K))̃~Pćʃ5׈ >^V]Ja7<~fΰ7a.Ѵ)h![ 9휗&/'ࠥ(a18/JUT7e1빃@>LQNOCs#N*@$C9Wp$(:}J׃m0حA3t~qk\><=pAkϐ:[ tl.a(s17_65[aU)*j>fSdz9ЦM o`B)Vv 6ݎ@@Ϻ@@,FNlS_6 u5OO3fwD妟._4AdV'xX&8dڍ@JϹs|DW/p6"`ir!L(|0d|qӗ Ɇv=T8cw 3:|)nA:0LeR!ڸ t/܂D=fWs9e%yZF"Vb 然k-qw/;MЅ=Pر\jPjYQ;;Ԝq_ c +Ƙḡymafa_3a77xVНh\ۘ;C9@8HM.8 Gtabn*|ΗnQ˵+LtPzt7Y(4'V.M|6M2Icً{9҇%i-%ȵ">s B<韬=؈x8게H4_u5M  $tK3J0#L[]i#A5T #I><IifMmWHcj>R~%e$yD8Dꜜ?tٍqͫƐ+C:Y&E<Yo}ddC%ЌɯaIEϞ6(Wu:i^ BnL|3qaw?>0Ws7 #include #include #include #include // For explanation of the tokens see grammar.js typedef enum { LINE_ENDING, SOFT_LINE_ENDING, BLOCK_CLOSE, BLOCK_CONTINUATION, BLOCK_QUOTE_START, INDENTED_CHUNK_START, ATX_H1_MARKER, ATX_H2_MARKER, ATX_H3_MARKER, ATX_H4_MARKER, ATX_H5_MARKER, ATX_H6_MARKER, SETEXT_H1_UNDERLINE, SETEXT_H2_UNDERLINE, THEMATIC_BREAK, LIST_MARKER_MINUS, LIST_MARKER_PLUS, LIST_MARKER_STAR, LIST_MARKER_PARENTHESIS, LIST_MARKER_DOT, LIST_MARKER_MINUS_DONT_INTERRUPT, LIST_MARKER_PLUS_DONT_INTERRUPT, LIST_MARKER_STAR_DONT_INTERRUPT, LIST_MARKER_PARENTHESIS_DONT_INTERRUPT, LIST_MARKER_DOT_DONT_INTERRUPT, FENCED_CODE_BLOCK_START_BACKTICK, FENCED_CODE_BLOCK_START_TILDE, BLANK_LINE_START, FENCED_CODE_BLOCK_END_BACKTICK, FENCED_CODE_BLOCK_END_TILDE, HTML_BLOCK_1_START, HTML_BLOCK_1_END, HTML_BLOCK_2_START, HTML_BLOCK_3_START, HTML_BLOCK_4_START, HTML_BLOCK_5_START, HTML_BLOCK_6_START, HTML_BLOCK_7_START, CLOSE_BLOCK, NO_INDENTED_CHUNK, ERROR, TRIGGER_ERROR, TOKEN_EOF, MINUS_METADATA, PLUS_METADATA, PIPE_TABLE_START, PIPE_TABLE_LINE_ENDING, } TokenType; // Description of a block on the block stack. // // LIST_ITEM is a list item with minimal indentation (content begins at indent // level 2) while LIST_ITEM_MAX_INDENTATION represents a list item with maximal // indentation without being considered a indented code block. // // ANONYMOUS represents any block that whose close is not handled by the // external s. typedef enum { BLOCK_QUOTE, INDENTED_CODE_BLOCK, LIST_ITEM, LIST_ITEM_1_INDENTATION, LIST_ITEM_2_INDENTATION, LIST_ITEM_3_INDENTATION, LIST_ITEM_4_INDENTATION, LIST_ITEM_5_INDENTATION, LIST_ITEM_6_INDENTATION, LIST_ITEM_7_INDENTATION, LIST_ITEM_8_INDENTATION, LIST_ITEM_9_INDENTATION, LIST_ITEM_10_INDENTATION, LIST_ITEM_11_INDENTATION, LIST_ITEM_12_INDENTATION, LIST_ITEM_13_INDENTATION, LIST_ITEM_14_INDENTATION, LIST_ITEM_MAX_INDENTATION, FENCED_CODE_BLOCK, ANONYMOUS, } Block; // Determines if a character is punctuation as defined by the markdown spec. static bool is_punctuation(char chr) { return (chr >= '!' && chr <= '/') || (chr >= ':' && chr <= '@') || (chr >= '[' && chr <= '`') || (chr >= '{' && chr <= '~'); } // Returns the indentation level which lines of a list item should have at // minimum. Should only be called with blocks for which `is_list_item` returns // true. static uint8_t list_item_indentation(Block block) { return (uint8_t)(block - LIST_ITEM + 2); } #define NUM_HTML_TAG_NAMES_RULE_1 3 static const char *const HTML_TAG_NAMES_RULE_1[NUM_HTML_TAG_NAMES_RULE_1] = { "pre", "script", "style"}; #define NUM_HTML_TAG_NAMES_RULE_7 62 static const char *const HTML_TAG_NAMES_RULE_7[NUM_HTML_TAG_NAMES_RULE_7] = { "address", "article", "aside", "base", "basefont", "blockquote", "body", "caption", "center", "col", "colgroup", "dd", "details", "dialog", "dir", "div", "dl", "dt", "fieldset", "figcaption", "figure", "footer", "form", "frame", "frameset", "h1", "h2", "h3", "h4", "h5", "h6", "head", "header", "hr", "html", "iframe", "legend", "li", "link", "main", "menu", "menuitem", "nav", "noframes", "ol", "optgroup", "option", "p", "param", "section", "source", "summary", "table", "tbody", "td", "tfoot", "th", "thead", "title", "tr", "track", "ul"}; // For explanation of the tokens see grammar.js static const bool paragraph_interrupt_symbols[] = { false, // LINE_ENDING, false, // SOFT_LINE_ENDING, false, // BLOCK_CLOSE, false, // BLOCK_CONTINUATION, true, // BLOCK_QUOTE_START, false, // INDENTED_CHUNK_START, true, // ATX_H1_MARKER, true, // ATX_H2_MARKER, true, // ATX_H3_MARKER, true, // ATX_H4_MARKER, true, // ATX_H5_MARKER, true, // ATX_H6_MARKER, true, // SETEXT_H1_UNDERLINE, true, // SETEXT_H2_UNDERLINE, true, // THEMATIC_BREAK, true, // LIST_MARKER_MINUS, true, // LIST_MARKER_PLUS, true, // LIST_MARKER_STAR, true, // LIST_MARKER_PARENTHESIS, true, // LIST_MARKER_DOT, false, // LIST_MARKER_MINUS_DONT_INTERRUPT, false, // LIST_MARKER_PLUS_DONT_INTERRUPT, false, // LIST_MARKER_STAR_DONT_INTERRUPT, false, // LIST_MARKER_PARENTHESIS_DONT_INTERRUPT, false, // LIST_MARKER_DOT_DONT_INTERRUPT, true, // FENCED_CODE_BLOCK_START_BACKTICK, true, // FENCED_CODE_BLOCK_START_TILDE, true, // BLANK_LINE_START, false, // FENCED_CODE_BLOCK_END_BACKTICK, false, // FENCED_CODE_BLOCK_END_TILDE, true, // HTML_BLOCK_1_START, false, // HTML_BLOCK_1_END, true, // HTML_BLOCK_2_START, true, // HTML_BLOCK_3_START, true, // HTML_BLOCK_4_START, true, // HTML_BLOCK_5_START, true, // HTML_BLOCK_6_START, false, // HTML_BLOCK_7_START, false, // CLOSE_BLOCK, false, // NO_INDENTED_CHUNK, false, // ERROR, false, // TRIGGER_ERROR, false, // EOF, false, // MINUS_METADATA, false, // PLUS_METADATA, true, // PIPE_TABLE_START, false, // PIPE_TABLE_LINE_ENDING, }; // State bitflags used with `Scanner.state` // Currently matching (at the beginning of a line) static const uint8_t STATE_MATCHING = 0x1 << 0; // Last line break was inside a paragraph static const uint8_t STATE_WAS_SOFT_LINE_BREAK = 0x1 << 1; // Block should be closed after next line break static const uint8_t STATE_CLOSE_BLOCK = 0x1 << 4; static size_t roundup_32(size_t x) { x--; x |= x >> 1; x |= x >> 2; x |= x >> 4; x |= x >> 8; x |= x >> 16; x++; return x; } typedef struct { // A stack of open blocks in the current parse state struct { size_t size; size_t capacity; Block *items; } open_blocks; // Parser state flags uint8_t state; // Number of blocks that have been matched so far. Only changes during // matching and is reset after every line ending uint8_t matched; // Consumed but "unused" indentation. Sometimes a tab needs to be "split" to // be used in multiple tokens. uint8_t indentation; // The current column. Used to decide how many spaces a tab should equal uint8_t column; // The delimiter length of the currently open fenced code block uint8_t fenced_code_block_delimiter_length; bool simulate; } Scanner; static void push_block(Scanner *s, Block b) { if (s->open_blocks.size == s->open_blocks.capacity) { s->open_blocks.capacity = s->open_blocks.capacity ? s->open_blocks.capacity << 1 : 8; void *tmp = realloc(s->open_blocks.items, sizeof(Block) * s->open_blocks.capacity); assert(tmp != NULL); s->open_blocks.items = tmp; } s->open_blocks.items[s->open_blocks.size++] = b; } static inline Block pop_block(Scanner *s) { return s->open_blocks.items[--s->open_blocks.size]; } // Write the whole state of a Scanner to a byte buffer static unsigned serialize(Scanner *s, char *buffer) { unsigned size = 0; buffer[size++] = (char)s->state; buffer[size++] = (char)s->matched; buffer[size++] = (char)s->indentation; buffer[size++] = (char)s->column; buffer[size++] = (char)s->fenced_code_block_delimiter_length; size_t blocks_count = s->open_blocks.size; if (blocks_count > 0) { memcpy(&buffer[size], s->open_blocks.items, blocks_count * sizeof(Block)); size += blocks_count * sizeof(Block); } return size; } // Read the whole state of a Scanner from a byte buffer // `serizalize` and `deserialize` should be fully symmetric. static void deserialize(Scanner *s, const char *buffer, unsigned length) { s->open_blocks.size = 0; s->open_blocks.capacity = 0; s->state = 0; s->matched = 0; s->indentation = 0; s->column = 0; s->fenced_code_block_delimiter_length = 0; if (length > 0) { size_t size = 0; s->state = (uint8_t)buffer[size++]; s->matched = (uint8_t)buffer[size++]; s->indentation = (uint8_t)buffer[size++]; s->column = (uint8_t)buffer[size++]; s->fenced_code_block_delimiter_length = (uint8_t)buffer[size++]; size_t blocks_size = length - size; if (blocks_size > 0) { size_t blocks_count = blocks_size / sizeof(Block); // ensure open blocks has enough room if (s->open_blocks.capacity < blocks_count) { size_t capacity = roundup_32(blocks_count); void *tmp = realloc(s->open_blocks.items, sizeof(Block) * capacity); assert(tmp != NULL); s->open_blocks.items = tmp; s->open_blocks.capacity = capacity; } memcpy(s->open_blocks.items, &buffer[size], blocks_size); s->open_blocks.size = blocks_count; } } } static void mark_end(Scanner *s, TSLexer *lexer) { if (!s->simulate) { lexer->mark_end(lexer); } } // Convenience function to emit the error token. This is done to stop invalid // parse branches. Specifically: // 1. When encountering a newline after a line break that ended a paragraph, and // no new block // has been opened. // 2. When encountering a new block after a soft line break. // 3. When a `$._trigger_error` token is valid, which is used to stop parse // branches through // normal tree-sitter grammar rules. // // See also the `$._soft_line_break` and `$._paragraph_end_newline` tokens in // grammar.js static bool error(TSLexer *lexer) { lexer->result_symbol = ERROR; return true; } // Advance the lexer one character // Also keeps track of the current column, counting tabs as spaces with tab stop // 4 See https://github.github.com/gfm/#tabs static size_t advance(Scanner *s, TSLexer *lexer) { size_t size = 1; if (lexer->lookahead == '\t') { size = 4 - s->column; s->column = 0; } else { s->column = (s->column + 1) % 4; } lexer->advance(lexer, false); return size; } // Try to match the given block, i.e. consume all tokens that belong to the // block. These are // 1. indentation for list items and indented code blocks // 2. '>' for block quotes // Returns true if the block is matched and false otherwise static bool match(Scanner *s, TSLexer *lexer, Block block) { switch (block) { case INDENTED_CODE_BLOCK: while (s->indentation < 4) { if (lexer->lookahead == ' ' || lexer->lookahead == '\t') { s->indentation += advance(s, lexer); } else { break; } } if (s->indentation >= 4 && lexer->lookahead != '\n' && lexer->lookahead != '\r') { s->indentation -= 4; return true; } break; case LIST_ITEM: case LIST_ITEM_1_INDENTATION: case LIST_ITEM_2_INDENTATION: case LIST_ITEM_3_INDENTATION: case LIST_ITEM_4_INDENTATION: case LIST_ITEM_5_INDENTATION: case LIST_ITEM_6_INDENTATION: case LIST_ITEM_7_INDENTATION: case LIST_ITEM_8_INDENTATION: case LIST_ITEM_9_INDENTATION: case LIST_ITEM_10_INDENTATION: case LIST_ITEM_11_INDENTATION: case LIST_ITEM_12_INDENTATION: case LIST_ITEM_13_INDENTATION: case LIST_ITEM_14_INDENTATION: case LIST_ITEM_MAX_INDENTATION: while (s->indentation < list_item_indentation(block)) { if (lexer->lookahead == ' ' || lexer->lookahead == '\t') { s->indentation += advance(s, lexer); } else { break; } } if (s->indentation >= list_item_indentation(block)) { s->indentation -= list_item_indentation(block); return true; } if (lexer->lookahead == '\n' || lexer->lookahead == '\r') { s->indentation = 0; return true; } break; case BLOCK_QUOTE: while (lexer->lookahead == ' ' || lexer->lookahead == '\t') { s->indentation += advance(s, lexer); } if (lexer->lookahead == '>') { advance(s, lexer); s->indentation = 0; if (lexer->lookahead == ' ' || lexer->lookahead == '\t') { s->indentation += advance(s, lexer) - 1; } return true; } break; case FENCED_CODE_BLOCK: case ANONYMOUS: return true; } return false; } static bool parse_fenced_code_block(Scanner *s, const char delimiter, TSLexer *lexer, const bool *valid_symbols) { // count the number of backticks uint8_t level = 0; while (lexer->lookahead == delimiter) { advance(s, lexer); level++; } mark_end(s, lexer); // If this is able to close a fenced code block then that is the only valid // interpretation. It can only close a fenced code block if the number of // backticks is at least the number of backticks of the opening delimiter. // Also it cannot be indented more than 3 spaces. if ((delimiter == '`' ? valid_symbols[FENCED_CODE_BLOCK_END_BACKTICK] : valid_symbols[FENCED_CODE_BLOCK_END_TILDE]) && s->indentation < 4 && level >= s->fenced_code_block_delimiter_length && (lexer->lookahead == '\n' || lexer->lookahead == '\r')) { s->fenced_code_block_delimiter_length = 0; lexer->result_symbol = delimiter == '`' ? FENCED_CODE_BLOCK_END_BACKTICK : FENCED_CODE_BLOCK_END_TILDE; return true; } // If this could be the start of a fenced code block, check if the info // string contains any backticks. if ((delimiter == '`' ? valid_symbols[FENCED_CODE_BLOCK_START_BACKTICK] : valid_symbols[FENCED_CODE_BLOCK_START_TILDE]) && level >= 3) { bool info_string_has_backtick = false; if (delimiter == '`') { while (lexer->lookahead != '\n' && lexer->lookahead != '\r' && !lexer->eof(lexer)) { if (lexer->lookahead == '`') { info_string_has_backtick = true; break; } advance(s, lexer); } } // If it does not then choose to interpret this as the start of a fenced // code block. if (!info_string_has_backtick) { lexer->result_symbol = delimiter == '`' ? FENCED_CODE_BLOCK_START_BACKTICK : FENCED_CODE_BLOCK_START_TILDE; if (!s->simulate) push_block(s, FENCED_CODE_BLOCK); // Remember the length of the delimiter for later, since we need it // to decide whether a sequence of backticks can close the block. s->fenced_code_block_delimiter_length = level; s->indentation = 0; return true; } } return false; } static bool parse_star(Scanner *s, TSLexer *lexer, const bool *valid_symbols) { advance(s, lexer); mark_end(s, lexer); // Otherwise count the number of stars permitting whitespaces between them. size_t star_count = 1; // Also remember how many stars there are before the first whitespace... // ...and how many spaces follow the first star. uint8_t extra_indentation = 0; for (;;) { if (lexer->lookahead == '*') { if (star_count == 1 && extra_indentation >= 1 && valid_symbols[LIST_MARKER_STAR]) { // If we get to this point then the token has to be at least // this long. We need to call `mark_end` here in case we decide // later that this is a list item. mark_end(s, lexer); } star_count++; advance(s, lexer); } else if (lexer->lookahead == ' ' || lexer->lookahead == '\t') { if (star_count == 1) { extra_indentation += advance(s, lexer); } else { advance(s, lexer); } } else { break; } } bool line_end = lexer->lookahead == '\n' || lexer->lookahead == '\r'; bool dont_interrupt = false; if (star_count == 1 && line_end) { extra_indentation = 1; // line is empty so don't interrupt paragraphs if this is a list marker dont_interrupt = s->matched == s->open_blocks.size; } // If there were at least 3 stars then this could be a thematic break bool thematic_break = star_count >= 3 && line_end; // If there was a star and at least one space after that star then this // could be a list marker. bool list_marker_star = star_count >= 1 && extra_indentation >= 1; if (valid_symbols[THEMATIC_BREAK] && thematic_break && s->indentation < 4) { // If a thematic break is valid then it takes precedence lexer->result_symbol = THEMATIC_BREAK; mark_end(s, lexer); s->indentation = 0; return true; } if ((dont_interrupt ? valid_symbols[LIST_MARKER_STAR_DONT_INTERRUPT] : valid_symbols[LIST_MARKER_STAR]) && list_marker_star) { // List markers take precedence over emphasis markers // If star_count > 1 then we already called mark_end at the right point. // Otherwise the token should go until this point. if (star_count == 1) { mark_end(s, lexer); } // Not counting one space... extra_indentation--; // ... check if the list item begins with an indented code block if (extra_indentation <= 3) { // If not then calculate the indentation level of the list item // content as indentation of list marker + indentation after list // marker - 1 extra_indentation += s->indentation; s->indentation = 0; } else { // Otherwise the indentation level is just the indentation of the // list marker. We keep the indentation after the list marker for // later blocks. uint8_t temp = s->indentation; s->indentation = extra_indentation; extra_indentation = temp; } if (!s->simulate) push_block(s, (Block)(LIST_ITEM + extra_indentation)); lexer->result_symbol = dont_interrupt ? LIST_MARKER_STAR_DONT_INTERRUPT : LIST_MARKER_STAR; return true; } return false; } static bool parse_thematic_break_underscore(Scanner *s, TSLexer *lexer, const bool *valid_symbols) { advance(s, lexer); mark_end(s, lexer); size_t underscore_count = 1; for (;;) { if (lexer->lookahead == '_') { underscore_count++; advance(s, lexer); } else if (lexer->lookahead == ' ' || lexer->lookahead == '\t') { advance(s, lexer); } else { break; } } bool line_end = lexer->lookahead == '\n' || lexer->lookahead == '\r'; if (underscore_count >= 3 && line_end && valid_symbols[THEMATIC_BREAK]) { lexer->result_symbol = THEMATIC_BREAK; mark_end(s, lexer); s->indentation = 0; return true; } return false; } static bool parse_block_quote(Scanner *s, TSLexer *lexer, const bool *valid_symbols) { if (valid_symbols[BLOCK_QUOTE_START]) { advance(s, lexer); s->indentation = 0; if (lexer->lookahead == ' ' || lexer->lookahead == '\t') { s->indentation += advance(s, lexer) - 1; } lexer->result_symbol = BLOCK_QUOTE_START; if (!s->simulate) push_block(s, BLOCK_QUOTE); return true; } return false; } static bool parse_atx_heading(Scanner *s, TSLexer *lexer, const bool *valid_symbols) { if (valid_symbols[ATX_H1_MARKER] && s->indentation <= 3) { mark_end(s, lexer); uint16_t level = 0; while (lexer->lookahead == '#' && level <= 6) { advance(s, lexer); level++; } if (level <= 6 && (lexer->lookahead == ' ' || lexer->lookahead == '\t' || lexer->lookahead == '\n' || lexer->lookahead == '\r')) { lexer->result_symbol = ATX_H1_MARKER + (level - 1); s->indentation = 0; mark_end(s, lexer); return true; } } return false; } static bool parse_setext_underline(Scanner *s, TSLexer *lexer, const bool *valid_symbols) { if (valid_symbols[SETEXT_H1_UNDERLINE] && s->matched == s->open_blocks.size) { mark_end(s, lexer); while (lexer->lookahead == '=') { advance(s, lexer); } while (lexer->lookahead == ' ' || lexer->lookahead == '\t') { advance(s, lexer); } if (lexer->lookahead == '\n' || lexer->lookahead == '\r') { lexer->result_symbol = SETEXT_H1_UNDERLINE; mark_end(s, lexer); return true; } } return false; } static bool parse_plus(Scanner *s, TSLexer *lexer, const bool *valid_symbols) { if (s->indentation <= 3 && (valid_symbols[LIST_MARKER_PLUS] || valid_symbols[LIST_MARKER_PLUS_DONT_INTERRUPT] || valid_symbols[PLUS_METADATA])) { advance(s, lexer); if (valid_symbols[PLUS_METADATA] && lexer->lookahead == '+') { advance(s, lexer); if (lexer->lookahead != '+') { return false; } advance(s, lexer); while (lexer->lookahead == ' ' || lexer->lookahead == '\t') { advance(s, lexer); } if (lexer->lookahead != '\n' && lexer->lookahead != '\r') { return false; } for (;;) { // advance over newline if (lexer->lookahead == '\r') { advance(s, lexer); if (lexer->lookahead == '\n') { advance(s, lexer); } } else { advance(s, lexer); } // check for pluses size_t plus_count = 0; while (lexer->lookahead == '+') { plus_count++; advance(s, lexer); } if (plus_count == 3) { // if exactly 3 check if next symbol (after eventual // whitespace) is newline while (lexer->lookahead == ' ' || lexer->lookahead == '\t') { advance(s, lexer); } if (lexer->lookahead == '\r' || lexer->lookahead == '\n') { // if so also consume newline if (lexer->lookahead == '\r') { advance(s, lexer); if (lexer->lookahead == '\n') { advance(s, lexer); } } else { advance(s, lexer); } mark_end(s, lexer); lexer->result_symbol = PLUS_METADATA; return true; } } // otherwise consume rest of line while (lexer->lookahead != '\n' && lexer->lookahead != '\r' && !lexer->eof(lexer)) { advance(s, lexer); } // if end of file is reached, then this is not metadata if (lexer->eof(lexer)) { break; } } } else { uint8_t extra_indentation = 0; while (lexer->lookahead == ' ' || lexer->lookahead == '\t') { extra_indentation += advance(s, lexer); } bool dont_interrupt = false; if (lexer->lookahead == '\r' || lexer->lookahead == '\n') { extra_indentation = 1; dont_interrupt = true; } dont_interrupt = dont_interrupt && s->matched == s->open_blocks.size; if (extra_indentation >= 1 && (dont_interrupt ? valid_symbols[LIST_MARKER_PLUS_DONT_INTERRUPT] : valid_symbols[LIST_MARKER_PLUS])) { lexer->result_symbol = dont_interrupt ? LIST_MARKER_PLUS_DONT_INTERRUPT : LIST_MARKER_PLUS; extra_indentation--; if (extra_indentation <= 3) { extra_indentation += s->indentation; s->indentation = 0; } else { uint8_t temp = s->indentation; s->indentation = extra_indentation; extra_indentation = temp; } if (!s->simulate) push_block(s, (Block)(LIST_ITEM + extra_indentation)); return true; } } } return false; } static bool parse_ordered_list_marker(Scanner *s, TSLexer *lexer, const bool *valid_symbols) { if (s->indentation <= 3 && (valid_symbols[LIST_MARKER_PARENTHESIS] || valid_symbols[LIST_MARKER_DOT] || valid_symbols[LIST_MARKER_PARENTHESIS_DONT_INTERRUPT] || valid_symbols[LIST_MARKER_DOT_DONT_INTERRUPT])) { size_t digits = 1; bool dont_interrupt = lexer->lookahead != '1'; advance(s, lexer); while (isdigit(lexer->lookahead)) { dont_interrupt = true; digits++; advance(s, lexer); } if (digits >= 1 && digits <= 9) { bool dot = false; bool parenthesis = false; if (lexer->lookahead == '.') { advance(s, lexer); dot = true; } else if (lexer->lookahead == ')') { advance(s, lexer); parenthesis = true; } if (dot || parenthesis) { uint8_t extra_indentation = 0; while (lexer->lookahead == ' ' || lexer->lookahead == '\t') { extra_indentation += advance(s, lexer); } bool line_end = lexer->lookahead == '\n' || lexer->lookahead == '\r'; if (line_end) { extra_indentation = 1; dont_interrupt = true; } dont_interrupt = dont_interrupt && s->matched == s->open_blocks.size; if (extra_indentation >= 1 && (dot ? (dont_interrupt ? valid_symbols[LIST_MARKER_DOT_DONT_INTERRUPT] : valid_symbols[LIST_MARKER_DOT]) : (dont_interrupt ? valid_symbols [LIST_MARKER_PARENTHESIS_DONT_INTERRUPT] : valid_symbols[LIST_MARKER_PARENTHESIS]))) { lexer->result_symbol = dot ? LIST_MARKER_DOT : LIST_MARKER_PARENTHESIS; extra_indentation--; if (extra_indentation <= 3) { extra_indentation += s->indentation; s->indentation = 0; } else { uint8_t temp = s->indentation; s->indentation = extra_indentation; extra_indentation = temp; } if (!s->simulate) push_block( s, (Block)(LIST_ITEM + extra_indentation + digits)); return true; } } } } return false; } static bool parse_minus(Scanner *s, TSLexer *lexer, const bool *valid_symbols) { if (s->indentation <= 3 && (valid_symbols[LIST_MARKER_MINUS] || valid_symbols[LIST_MARKER_MINUS_DONT_INTERRUPT] || valid_symbols[SETEXT_H2_UNDERLINE] || valid_symbols[THEMATIC_BREAK] || valid_symbols[MINUS_METADATA])) { mark_end(s, lexer); bool whitespace_after_minus = false; bool minus_after_whitespace = false; size_t minus_count = 0; uint8_t extra_indentation = 0; for (;;) { if (lexer->lookahead == '-') { if (minus_count == 1 && extra_indentation >= 1) { mark_end(s, lexer); } minus_count++; advance(s, lexer); minus_after_whitespace = whitespace_after_minus; } else if (lexer->lookahead == ' ' || lexer->lookahead == '\t') { if (minus_count == 1) { extra_indentation += advance(s, lexer); } else { advance(s, lexer); } whitespace_after_minus = true; } else { break; } } bool line_end = lexer->lookahead == '\n' || lexer->lookahead == '\r'; bool dont_interrupt = false; if (minus_count == 1 && line_end) { extra_indentation = 1; dont_interrupt = true; } dont_interrupt = dont_interrupt && s->matched == s->open_blocks.size; bool thematic_break = minus_count >= 3 && line_end; bool underline = minus_count >= 1 && !minus_after_whitespace && line_end && s->matched == s->open_blocks .size; // setext heading can not break lazy continuation bool list_marker_minus = minus_count >= 1 && extra_indentation >= 1; bool success = false; if (valid_symbols[SETEXT_H2_UNDERLINE] && underline) { lexer->result_symbol = SETEXT_H2_UNDERLINE; mark_end(s, lexer); s->indentation = 0; success = true; } else if (valid_symbols[THEMATIC_BREAK] && thematic_break) { // underline is false if list_marker_minus // is true lexer->result_symbol = THEMATIC_BREAK; mark_end(s, lexer); s->indentation = 0; success = true; } else if ((dont_interrupt ? valid_symbols[LIST_MARKER_MINUS_DONT_INTERRUPT] : valid_symbols[LIST_MARKER_MINUS]) && list_marker_minus) { if (minus_count == 1) { mark_end(s, lexer); } extra_indentation--; if (extra_indentation <= 3) { extra_indentation += s->indentation; s->indentation = 0; } else { uint8_t temp = s->indentation; s->indentation = extra_indentation; extra_indentation = temp; } if (!s->simulate) push_block(s, (Block)(LIST_ITEM + extra_indentation)); lexer->result_symbol = dont_interrupt ? LIST_MARKER_MINUS_DONT_INTERRUPT : LIST_MARKER_MINUS; return true; } if (minus_count == 3 && (!minus_after_whitespace) && line_end && valid_symbols[MINUS_METADATA]) { for (;;) { // advance over newline if (lexer->lookahead == '\r') { advance(s, lexer); if (lexer->lookahead == '\n') { advance(s, lexer); } } else { advance(s, lexer); } // check for minuses minus_count = 0; while (lexer->lookahead == '-') { minus_count++; advance(s, lexer); } if (minus_count == 3) { // if exactly 3 check if next symbol (after eventual // whitespace) is newline while (lexer->lookahead == ' ' || lexer->lookahead == '\t') { advance(s, lexer); } if (lexer->lookahead == '\r' || lexer->lookahead == '\n') { // if so also consume newline if (lexer->lookahead == '\r') { advance(s, lexer); if (lexer->lookahead == '\n') { advance(s, lexer); } } else { advance(s, lexer); } mark_end(s, lexer); lexer->result_symbol = MINUS_METADATA; return true; } } // otherwise consume rest of line while (lexer->lookahead != '\n' && lexer->lookahead != '\r' && !lexer->eof(lexer)) { advance(s, lexer); } // if end of file is reached, then this is not metadata if (lexer->eof(lexer)) { break; } } } if (success) { return true; } } return false; } static bool parse_html_block(Scanner *s, TSLexer *lexer, const bool *valid_symbols) { if (!(valid_symbols[HTML_BLOCK_1_START] || valid_symbols[HTML_BLOCK_1_END] || valid_symbols[HTML_BLOCK_2_START] || valid_symbols[HTML_BLOCK_3_START] || valid_symbols[HTML_BLOCK_4_START] || valid_symbols[HTML_BLOCK_5_START] || valid_symbols[HTML_BLOCK_6_START] || valid_symbols[HTML_BLOCK_7_START])) { return false; } advance(s, lexer); if (lexer->lookahead == '?' && valid_symbols[HTML_BLOCK_3_START]) { advance(s, lexer); lexer->result_symbol = HTML_BLOCK_3_START; if (!s->simulate) push_block(s, ANONYMOUS); return true; } if (lexer->lookahead == '!') { // could be block 2 advance(s, lexer); if (lexer->lookahead == '-') { advance(s, lexer); if (lexer->lookahead == '-' && valid_symbols[HTML_BLOCK_2_START]) { advance(s, lexer); lexer->result_symbol = HTML_BLOCK_2_START; if (!s->simulate) push_block(s, ANONYMOUS); return true; } } else if ('A' <= lexer->lookahead && lexer->lookahead <= 'Z' && valid_symbols[HTML_BLOCK_4_START]) { advance(s, lexer); lexer->result_symbol = HTML_BLOCK_4_START; if (!s->simulate) push_block(s, ANONYMOUS); return true; } else if (lexer->lookahead == '[') { advance(s, lexer); if (lexer->lookahead == 'C') { advance(s, lexer); if (lexer->lookahead == 'D') { advance(s, lexer); if (lexer->lookahead == 'A') { advance(s, lexer); if (lexer->lookahead == 'T') { advance(s, lexer); if (lexer->lookahead == 'A') { advance(s, lexer); if (lexer->lookahead == '[' && valid_symbols[HTML_BLOCK_5_START]) { advance(s, lexer); lexer->result_symbol = HTML_BLOCK_5_START; if (!s->simulate) push_block(s, ANONYMOUS); return true; } } } } } } } } bool starting_slash = lexer->lookahead == '/'; if (starting_slash) { advance(s, lexer); } char name[11]; size_t name_length = 0; while (iswalpha((wint_t)lexer->lookahead)) { if (name_length < 10) { name[name_length++] = (char)towlower((wint_t)lexer->lookahead); } else { name_length = 12; } advance(s, lexer); } if (name_length == 0) { return false; } bool tag_closed = false; if (name_length < 11) { name[name_length] = 0; bool next_symbol_valid = lexer->lookahead == ' ' || lexer->lookahead == '\t' || lexer->lookahead == '\n' || lexer->lookahead == '\r' || lexer->lookahead == '>'; if (next_symbol_valid) { // try block 1 names for (size_t i = 0; i < NUM_HTML_TAG_NAMES_RULE_1; i++) { if (strcmp(name, HTML_TAG_NAMES_RULE_1[i]) == 0) { if (starting_slash) { if (valid_symbols[HTML_BLOCK_1_END]) { lexer->result_symbol = HTML_BLOCK_1_END; return true; } } else if (valid_symbols[HTML_BLOCK_1_START]) { lexer->result_symbol = HTML_BLOCK_1_START; if (!s->simulate) push_block(s, ANONYMOUS); return true; } } } } if (!next_symbol_valid && lexer->lookahead == '/') { advance(s, lexer); if (lexer->lookahead == '>') { advance(s, lexer); tag_closed = true; } } if (next_symbol_valid || tag_closed) { // try block 2 names for (size_t i = 0; i < NUM_HTML_TAG_NAMES_RULE_7; i++) { if (strcmp(name, HTML_TAG_NAMES_RULE_7[i]) == 0 && valid_symbols[HTML_BLOCK_6_START]) { lexer->result_symbol = HTML_BLOCK_6_START; if (!s->simulate) push_block(s, ANONYMOUS); return true; } } } } if (!valid_symbols[HTML_BLOCK_7_START]) { return false; } if (!tag_closed) { // tag name (continued) while (iswalnum((wint_t)lexer->lookahead) || lexer->lookahead == '-') { advance(s, lexer); } if (!starting_slash) { // attributes bool had_whitespace = false; for (;;) { // whitespace while (lexer->lookahead == ' ' || lexer->lookahead == '\t') { had_whitespace = true; advance(s, lexer); } if (lexer->lookahead == '/') { advance(s, lexer); break; } if (lexer->lookahead == '>') { break; } // attribute name if (!had_whitespace) { return false; } if (!iswalpha((wint_t)lexer->lookahead) && lexer->lookahead != '_' && lexer->lookahead != ':') { return false; } had_whitespace = false; advance(s, lexer); while (iswalnum((wint_t)lexer->lookahead) || lexer->lookahead == '_' || lexer->lookahead == '.' || lexer->lookahead == ':' || lexer->lookahead == '-') { advance(s, lexer); } // attribute value specification // optional whitespace while (lexer->lookahead == ' ' || lexer->lookahead == '\t') { had_whitespace = true; advance(s, lexer); } // = if (lexer->lookahead == '=') { advance(s, lexer); had_whitespace = false; // optional whitespace while (lexer->lookahead == ' ' || lexer->lookahead == '\t') { advance(s, lexer); } // attribute value if (lexer->lookahead == '\'' || lexer->lookahead == '"') { char delimiter = (char)lexer->lookahead; advance(s, lexer); while (lexer->lookahead != delimiter && lexer->lookahead != '\n' && lexer->lookahead != '\r' && !lexer->eof(lexer)) { advance(s, lexer); } if (lexer->lookahead != delimiter) { return false; } advance(s, lexer); } else { // unquoted attribute value bool had_one = false; while (lexer->lookahead != ' ' && lexer->lookahead != '\t' && lexer->lookahead != '"' && lexer->lookahead != '\'' && lexer->lookahead != '=' && lexer->lookahead != '<' && lexer->lookahead != '>' && lexer->lookahead != '`' && lexer->lookahead != '\n' && lexer->lookahead != '\r' && !lexer->eof(lexer)) { advance(s, lexer); had_one = true; } if (!had_one) { return false; } } } } } else { while (lexer->lookahead == ' ' || lexer->lookahead == '\t') { advance(s, lexer); } } if (lexer->lookahead != '>') { return false; } advance(s, lexer); } while (lexer->lookahead == ' ' || lexer->lookahead == '\t') { advance(s, lexer); } if (lexer->lookahead == '\r' || lexer->lookahead == '\n') { lexer->result_symbol = HTML_BLOCK_7_START; if (!s->simulate) push_block(s, ANONYMOUS); return true; } return false; } static bool parse_pipe_table(Scanner *s, TSLexer *lexer, const bool *valid_symbols) { // unused (void)(valid_symbols); // PIPE_TABLE_START is zero width mark_end(s, lexer); // count number of cells size_t cell_count = 0; // also remember if we see starting and ending pipes, as empty headers have // to have both bool starting_pipe = false; bool ending_pipe = false; bool empty = true; if (lexer->lookahead == '|') { starting_pipe = true; advance(s, lexer); } while (lexer->lookahead != '\r' && lexer->lookahead != '\n' && !lexer->eof(lexer)) { if (lexer->lookahead == '|') { cell_count++; ending_pipe = true; advance(s, lexer); } else { if (lexer->lookahead != ' ' && lexer->lookahead != '\t') { ending_pipe = false; } if (lexer->lookahead == '\\') { advance(s, lexer); if (is_punctuation((char)lexer->lookahead)) { advance(s, lexer); } } else { advance(s, lexer); } } } if (empty && cell_count == 0 && !(starting_pipe && ending_pipe)) { return false; } if (!ending_pipe) { cell_count++; } // check the following line for a delimiter row // parse a newline if (lexer->lookahead == '\n') { advance(s, lexer); } else if (lexer->lookahead == '\r') { advance(s, lexer); if (lexer->lookahead == '\n') { advance(s, lexer); } } else { return false; } s->indentation = 0; s->column = 0; for (;;) { if (lexer->lookahead == ' ' || lexer->lookahead == '\t') { s->indentation += advance(s, lexer); } else { break; } } s->simulate = true; uint8_t matched_temp = 0; while (matched_temp < (uint8_t)s->open_blocks.size) { if (match(s, lexer, s->open_blocks.items[matched_temp])) { matched_temp++; } else { return false; } } // check if delimiter row has the same number of cells and at least one pipe size_t delimiter_cell_count = 0; if (lexer->lookahead == '|') { advance(s, lexer); } for (;;) { while (lexer->lookahead == ' ' || lexer->lookahead == '\t') { advance(s, lexer); } if (lexer->lookahead == '|') { delimiter_cell_count++; advance(s, lexer); continue; } if (lexer->lookahead == ':') { advance(s, lexer); if (lexer->lookahead != '-') { return false; } } bool had_one_minus = false; while (lexer->lookahead == '-') { had_one_minus = true; advance(s, lexer); } if (had_one_minus) { delimiter_cell_count++; } if (lexer->lookahead == ':') { if (!had_one_minus) { return false; } advance(s, lexer); } while (lexer->lookahead == ' ' || lexer->lookahead == '\t') { advance(s, lexer); } if (lexer->lookahead == '|') { if (!had_one_minus) { delimiter_cell_count++; } advance(s, lexer); continue; } if (lexer->lookahead != '\r' && lexer->lookahead != '\n') { return false; } else { break; } } // if the cell counts are not equal then this is not a table if (cell_count != delimiter_cell_count) { return false; } lexer->result_symbol = PIPE_TABLE_START; return true; } static bool scan(Scanner *s, TSLexer *lexer, const bool *valid_symbols) { // A normal tree-sitter rule decided that the current branch is invalid and // now "requests" an error to stop the branch if (valid_symbols[TRIGGER_ERROR]) { return error(lexer); } // Close the inner most block after the next line break as requested. See // `$._close_block` in grammar.js if (valid_symbols[CLOSE_BLOCK]) { s->state |= STATE_CLOSE_BLOCK; lexer->result_symbol = CLOSE_BLOCK; return true; } // if we are at the end of the file and there are still open blocks close // them all if (lexer->eof(lexer)) { if (valid_symbols[TOKEN_EOF]) { lexer->result_symbol = TOKEN_EOF; return true; } if (s->open_blocks.size > 0) { lexer->result_symbol = BLOCK_CLOSE; if (!s->simulate) pop_block(s); return true; } return false; } if (!(s->state & STATE_MATCHING)) { // Parse any preceeding whitespace and remember its length. This makes a // lot of parsing quite a bit easier. for (;;) { if (lexer->lookahead == ' ' || lexer->lookahead == '\t') { s->indentation += advance(s, lexer); } else { break; } } // We are not matching. This is where the parsing logic for most // "normal" token is. Most importantly parsing logic for the start of // new blocks. if (valid_symbols[INDENTED_CHUNK_START] && !valid_symbols[NO_INDENTED_CHUNK]) { if (s->indentation >= 4 && lexer->lookahead != '\n' && lexer->lookahead != '\r') { lexer->result_symbol = INDENTED_CHUNK_START; if (!s->simulate) push_block(s, INDENTED_CODE_BLOCK); s->indentation -= 4; return true; } } // Decide which tokens to consider based on the first non-whitespace // character switch (lexer->lookahead) { case '\r': case '\n': if (valid_symbols[BLANK_LINE_START]) { // A blank line token is actually just 0 width, so do not // consume the characters lexer->result_symbol = BLANK_LINE_START; return true; } break; case '`': // A backtick could mark the beginning or ending of a fenced // code block. return parse_fenced_code_block(s, '`', lexer, valid_symbols); case '~': // A tilde could mark the beginning or ending of a fenced code // block. return parse_fenced_code_block(s, '~', lexer, valid_symbols); case '*': // A star could either mark a list item or a thematic break. // This code is similar to the code for '_' and '+'. return parse_star(s, lexer, valid_symbols); case '_': return parse_thematic_break_underscore(s, lexer, valid_symbols); case '>': // A '>' could mark the beginning of a block quote return parse_block_quote(s, lexer, valid_symbols); case '#': // A '#' could mark a atx heading return parse_atx_heading(s, lexer, valid_symbols); case '=': // A '=' could mark a setext underline return parse_setext_underline(s, lexer, valid_symbols); case '+': // A '+' could be a list marker return parse_plus(s, lexer, valid_symbols); case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': // A number could be a list marker (if followed by a dot or a // parenthesis) return parse_ordered_list_marker(s, lexer, valid_symbols); case '-': // A minus could mark a list marker, a thematic break or a // setext underline return parse_minus(s, lexer, valid_symbols); case '<': // A < could mark the beginning of a html block return parse_html_block(s, lexer, valid_symbols); } if (lexer->lookahead != '\r' && lexer->lookahead != '\n' && valid_symbols[PIPE_TABLE_START]) { return parse_pipe_table(s, lexer, valid_symbols); } } else { // we are in the state of trying to match all currently open blocks bool partial_success = false; while (s->matched < (uint8_t)s->open_blocks.size) { if (s->matched == (uint8_t)s->open_blocks.size - 1 && (s->state & STATE_CLOSE_BLOCK)) { if (!partial_success) s->state &= ~STATE_CLOSE_BLOCK; break; } if (match(s, lexer, s->open_blocks.items[s->matched])) { partial_success = true; s->matched++; } else { if (s->state & STATE_WAS_SOFT_LINE_BREAK) { s->state &= (~STATE_MATCHING); } break; } } if (partial_success) { if (s->matched == s->open_blocks.size) { s->state &= (~STATE_MATCHING); } lexer->result_symbol = BLOCK_CONTINUATION; return true; } if (!(s->state & STATE_WAS_SOFT_LINE_BREAK)) { lexer->result_symbol = BLOCK_CLOSE; pop_block(s); if (s->matched == s->open_blocks.size) { s->state &= (~STATE_MATCHING); } return true; } } // The parser just encountered a line break. Setup the state correspondingly if ((valid_symbols[LINE_ENDING] || valid_symbols[SOFT_LINE_ENDING] || valid_symbols[PIPE_TABLE_LINE_ENDING]) && (lexer->lookahead == '\n' || lexer->lookahead == '\r')) { if (lexer->lookahead == '\r') { advance(s, lexer); if (lexer->lookahead == '\n') { advance(s, lexer); } } else { advance(s, lexer); } s->indentation = 0; s->column = 0; if (!(s->state & STATE_CLOSE_BLOCK) && (valid_symbols[SOFT_LINE_ENDING] || valid_symbols[PIPE_TABLE_LINE_ENDING])) { lexer->mark_end(lexer); for (;;) { if (lexer->lookahead == ' ' || lexer->lookahead == '\t') { s->indentation += advance(s, lexer); } else { break; } } s->simulate = true; uint8_t matched_temp = s->matched; s->matched = 0; bool one_will_be_matched = false; while (s->matched < (uint8_t)s->open_blocks.size) { if (match(s, lexer, s->open_blocks.items[s->matched])) { s->matched++; one_will_be_matched = true; } else { break; } } bool all_will_be_matched = s->matched == s->open_blocks.size; if (!lexer->eof(lexer) && !scan(s, lexer, paragraph_interrupt_symbols)) { s->matched = matched_temp; // If the last line break ended a paragraph and no new block // opened, the last line break should have been a soft line // break Reset the counter for matched blocks s->matched = 0; s->indentation = 0; s->column = 0; // If there is at least one open block, we should be in the // matching state. Also set the matching flag if a // `$._soft_line_break_marker` can be emitted so it does get // emitted. if (one_will_be_matched) { s->state |= STATE_MATCHING; } else { s->state &= (~STATE_MATCHING); } if (valid_symbols[PIPE_TABLE_LINE_ENDING]) { if (all_will_be_matched) { lexer->result_symbol = PIPE_TABLE_LINE_ENDING; return true; } } else { lexer->result_symbol = SOFT_LINE_ENDING; // reset some state variables s->state |= STATE_WAS_SOFT_LINE_BREAK; return true; } } else { s->matched = matched_temp; } s->indentation = 0; s->column = 0; } if (valid_symbols[LINE_ENDING]) { // If the last line break ended a paragraph and no new block opened, // the last line break should have been a soft line break Reset the // counter for matched blocks s->matched = 0; // If there is at least one open block, we should be in the matching // state. Also set the matching flag if a // `$._soft_line_break_marker` can be emitted so it does get // emitted. if (s->open_blocks.size > 0) { s->state |= STATE_MATCHING; } else { s->state &= (~STATE_MATCHING); } // reset some state variables s->state &= (~STATE_WAS_SOFT_LINE_BREAK); lexer->result_symbol = LINE_ENDING; return true; } } return false; } void *tree_sitter_markdown_external_scanner_create(void) { Scanner *s = (Scanner *)malloc(sizeof(Scanner)); s->open_blocks.items = (Block *)calloc(1, sizeof(Block)); #if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L) _Static_assert(ATX_H6_MARKER == ATX_H1_MARKER + 5, ""); #else assert(ATX_H6_MARKER == ATX_H1_MARKER + 5); #endif deserialize(s, NULL, 0); return s; } bool tree_sitter_markdown_external_scanner_scan(void *payload, TSLexer *lexer, const bool *valid_symbols) { Scanner *scanner = (Scanner *)payload; scanner->simulate = false; return scan(scanner, lexer, valid_symbols); } unsigned tree_sitter_markdown_external_scanner_serialize(void *payload, char *buffer) { Scanner *scanner = (Scanner *)payload; return serialize(scanner, buffer); } void tree_sitter_markdown_external_scanner_deserialize(void *payload, char *buffer, unsigned length) { Scanner *scanner = (Scanner *)payload; deserialize(scanner, buffer, length); } void tree_sitter_markdown_external_scanner_destroy(void *payload) { Scanner *scanner = (Scanner *)payload; free(scanner->open_blocks.items); free(scanner); } hx-0.3.0+20250717/test-grammars/markdown/src/tree_sitter/000077500000000000000000000000001503625671400225755ustar00rootroot00000000000000hx-0.3.0+20250717/test-grammars/markdown/src/tree_sitter/alloc.h000066400000000000000000000016761503625671400240520ustar00rootroot00000000000000#ifndef TREE_SITTER_ALLOC_H_ #define TREE_SITTER_ALLOC_H_ #ifdef __cplusplus extern "C" { #endif #include #include #include // Allow clients to override allocation functions #ifdef TREE_SITTER_REUSE_ALLOCATOR extern void *(*ts_current_malloc)(size_t); extern void *(*ts_current_calloc)(size_t, size_t); extern void *(*ts_current_realloc)(void *, size_t); extern void (*ts_current_free)(void *); #ifndef ts_malloc #define ts_malloc ts_current_malloc #endif #ifndef ts_calloc #define ts_calloc ts_current_calloc #endif #ifndef ts_realloc #define ts_realloc ts_current_realloc #endif #ifndef ts_free #define ts_free ts_current_free #endif #else #ifndef ts_malloc #define ts_malloc malloc #endif #ifndef ts_calloc #define ts_calloc calloc #endif #ifndef ts_realloc #define ts_realloc realloc #endif #ifndef ts_free #define ts_free free #endif #endif #ifdef __cplusplus } #endif #endif // TREE_SITTER_ALLOC_H_ hx-0.3.0+20250717/test-grammars/markdown/src/tree_sitter/array.h000066400000000000000000000241711503625671400240710ustar00rootroot00000000000000#ifndef TREE_SITTER_ARRAY_H_ #define TREE_SITTER_ARRAY_H_ #ifdef __cplusplus extern "C" { #endif #include "./alloc.h" #include #include #include #include #include #ifdef _MSC_VER #pragma warning(disable : 4101) #elif defined(__GNUC__) || defined(__clang__) #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wunused-variable" #endif #define Array(T) \ struct { \ T *contents; \ uint32_t size; \ uint32_t capacity; \ } /// Initialize an array. #define array_init(self) \ ((self)->size = 0, (self)->capacity = 0, (self)->contents = NULL) /// Create an empty array. #define array_new() \ { NULL, 0, 0 } /// Get a pointer to the element at a given `index` in the array. #define array_get(self, _index) \ (assert((uint32_t)(_index) < (self)->size), &(self)->contents[_index]) /// Get a pointer to the first element in the array. #define array_front(self) array_get(self, 0) /// Get a pointer to the last element in the array. #define array_back(self) array_get(self, (self)->size - 1) /// Clear the array, setting its size to zero. Note that this does not free any /// memory allocated for the array's contents. #define array_clear(self) ((self)->size = 0) /// Reserve `new_capacity` elements of space in the array. If `new_capacity` is /// less than the array's current capacity, this function has no effect. #define array_reserve(self, new_capacity) \ _array__reserve((Array *)(self), array_elem_size(self), new_capacity) /// Free any memory allocated for this array. Note that this does not free any /// memory allocated for the array's contents. #define array_delete(self) _array__delete((Array *)(self)) /// Push a new `element` onto the end of the array. #define array_push(self, element) \ (_array__grow((Array *)(self), 1, array_elem_size(self)), \ (self)->contents[(self)->size++] = (element)) /// Increase the array's size by `count` elements. /// New elements are zero-initialized. #define array_grow_by(self, count) \ (_array__grow((Array *)(self), count, array_elem_size(self)), \ memset((self)->contents + (self)->size, 0, (count) * array_elem_size(self)), \ (self)->size += (count)) /// Append all elements from one array to the end of another. #define array_push_all(self, other) \ array_extend((self), (other)->size, (other)->contents) /// Append `count` elements to the end of the array, reading their values from the /// `contents` pointer. #define array_extend(self, count, contents) \ _array__splice( \ (Array *)(self), array_elem_size(self), (self)->size, \ 0, count, contents \ ) /// Remove `old_count` elements from the array starting at the given `index`. At /// the same index, insert `new_count` new elements, reading their values from the /// `new_contents` pointer. #define array_splice(self, _index, old_count, new_count, new_contents) \ _array__splice( \ (Array *)(self), array_elem_size(self), _index, \ old_count, new_count, new_contents \ ) /// Insert one `element` into the array at the given `index`. #define array_insert(self, _index, element) \ _array__splice((Array *)(self), array_elem_size(self), _index, 0, 1, &(element)) /// Remove one element from the array at the given `index`. #define array_erase(self, _index) \ _array__erase((Array *)(self), array_elem_size(self), _index) /// Pop the last element off the array, returning the element by value. #define array_pop(self) ((self)->contents[--(self)->size]) /// Assign the contents of one array to another, reallocating if necessary. #define array_assign(self, other) \ _array__assign((Array *)(self), (const Array *)(other), array_elem_size(self)) /// Swap one array with another #define array_swap(self, other) \ _array__swap((Array *)(self), (Array *)(other)) /// Get the size of the array contents #define array_elem_size(self) (sizeof *(self)->contents) /// Search a sorted array for a given `needle` value, using the given `compare` /// callback to determine the order. /// /// If an existing element is found to be equal to `needle`, then the `index` /// out-parameter is set to the existing value's index, and the `exists` /// out-parameter is set to true. Otherwise, `index` is set to an index where /// `needle` should be inserted in order to preserve the sorting, and `exists` /// is set to false. #define array_search_sorted_with(self, compare, needle, _index, _exists) \ _array__search_sorted(self, 0, compare, , needle, _index, _exists) /// Search a sorted array for a given `needle` value, using integer comparisons /// of a given struct field (specified with a leading dot) to determine the order. /// /// See also `array_search_sorted_with`. #define array_search_sorted_by(self, field, needle, _index, _exists) \ _array__search_sorted(self, 0, _compare_int, field, needle, _index, _exists) /// Insert a given `value` into a sorted array, using the given `compare` /// callback to determine the order. #define array_insert_sorted_with(self, compare, value) \ do { \ unsigned _index, _exists; \ array_search_sorted_with(self, compare, &(value), &_index, &_exists); \ if (!_exists) array_insert(self, _index, value); \ } while (0) /// Insert a given `value` into a sorted array, using integer comparisons of /// a given struct field (specified with a leading dot) to determine the order. /// /// See also `array_search_sorted_by`. #define array_insert_sorted_by(self, field, value) \ do { \ unsigned _index, _exists; \ array_search_sorted_by(self, field, (value) field, &_index, &_exists); \ if (!_exists) array_insert(self, _index, value); \ } while (0) // Private typedef Array(void) Array; /// This is not what you're looking for, see `array_delete`. static inline void _array__delete(Array *self) { if (self->contents) { ts_free(self->contents); self->contents = NULL; self->size = 0; self->capacity = 0; } } /// This is not what you're looking for, see `array_erase`. static inline void _array__erase(Array *self, size_t element_size, uint32_t index) { assert(index < self->size); char *contents = (char *)self->contents; memmove(contents + index * element_size, contents + (index + 1) * element_size, (self->size - index - 1) * element_size); self->size--; } /// This is not what you're looking for, see `array_reserve`. static inline void _array__reserve(Array *self, size_t element_size, uint32_t new_capacity) { if (new_capacity > self->capacity) { if (self->contents) { self->contents = ts_realloc(self->contents, new_capacity * element_size); } else { self->contents = ts_malloc(new_capacity * element_size); } self->capacity = new_capacity; } } /// This is not what you're looking for, see `array_assign`. static inline void _array__assign(Array *self, const Array *other, size_t element_size) { _array__reserve(self, element_size, other->size); self->size = other->size; memcpy(self->contents, other->contents, self->size * element_size); } /// This is not what you're looking for, see `array_swap`. static inline void _array__swap(Array *self, Array *other) { Array swap = *other; *other = *self; *self = swap; } /// This is not what you're looking for, see `array_push` or `array_grow_by`. static inline void _array__grow(Array *self, uint32_t count, size_t element_size) { uint32_t new_size = self->size + count; if (new_size > self->capacity) { uint32_t new_capacity = self->capacity * 2; if (new_capacity < 8) new_capacity = 8; if (new_capacity < new_size) new_capacity = new_size; _array__reserve(self, element_size, new_capacity); } } /// This is not what you're looking for, see `array_splice`. static inline void _array__splice(Array *self, size_t element_size, uint32_t index, uint32_t old_count, uint32_t new_count, const void *elements) { uint32_t new_size = self->size + new_count - old_count; uint32_t old_end = index + old_count; uint32_t new_end = index + new_count; assert(old_end <= self->size); _array__reserve(self, element_size, new_size); char *contents = (char *)self->contents; if (self->size > old_end) { memmove( contents + new_end * element_size, contents + old_end * element_size, (self->size - old_end) * element_size ); } if (new_count > 0) { if (elements) { memcpy( (contents + index * element_size), elements, new_count * element_size ); } else { memset( (contents + index * element_size), 0, new_count * element_size ); } } self->size += new_count - old_count; } /// A binary search routine, based on Rust's `std::slice::binary_search_by`. /// This is not what you're looking for, see `array_search_sorted_with` or `array_search_sorted_by`. #define _array__search_sorted(self, start, compare, suffix, needle, _index, _exists) \ do { \ *(_index) = start; \ *(_exists) = false; \ uint32_t size = (self)->size - *(_index); \ if (size == 0) break; \ int comparison; \ while (size > 1) { \ uint32_t half_size = size / 2; \ uint32_t mid_index = *(_index) + half_size; \ comparison = compare(&((self)->contents[mid_index] suffix), (needle)); \ if (comparison <= 0) *(_index) = mid_index; \ size -= half_size; \ } \ comparison = compare(&((self)->contents[*(_index)] suffix), (needle)); \ if (comparison == 0) *(_exists) = true; \ else if (comparison < 0) *(_index) += 1; \ } while (0) /// Helper macro for the `_sorted_by` routines below. This takes the left (existing) /// parameter by reference in order to work with the generic sorting function above. #define _compare_int(a, b) ((int)*(a) - (int)(b)) #ifdef _MSC_VER #pragma warning(default : 4101) #elif defined(__GNUC__) || defined(__clang__) #pragma GCC diagnostic pop #endif #ifdef __cplusplus } #endif #endif // TREE_SITTER_ARRAY_H_ hx-0.3.0+20250717/test-grammars/markdown/src/tree_sitter/parser.h000066400000000000000000000126361503625671400242520ustar00rootroot00000000000000#ifndef TREE_SITTER_PARSER_H_ #define TREE_SITTER_PARSER_H_ #ifdef __cplusplus extern "C" { #endif #include #include #include #define ts_builtin_sym_error ((TSSymbol)-1) #define ts_builtin_sym_end 0 #define TREE_SITTER_SERIALIZATION_BUFFER_SIZE 1024 #ifndef TREE_SITTER_API_H_ typedef uint16_t TSStateId; typedef uint16_t TSSymbol; typedef uint16_t TSFieldId; typedef struct TSLanguage TSLanguage; #endif typedef struct { TSFieldId field_id; uint8_t child_index; bool inherited; } TSFieldMapEntry; typedef struct { uint16_t index; uint16_t length; } TSFieldMapSlice; typedef struct { bool visible; bool named; bool supertype; } TSSymbolMetadata; typedef struct TSLexer TSLexer; struct TSLexer { int32_t lookahead; TSSymbol result_symbol; void (*advance)(TSLexer *, bool); void (*mark_end)(TSLexer *); uint32_t (*get_column)(TSLexer *); bool (*is_at_included_range_start)(const TSLexer *); bool (*eof)(const TSLexer *); }; typedef enum { TSParseActionTypeShift, TSParseActionTypeReduce, TSParseActionTypeAccept, TSParseActionTypeRecover, } TSParseActionType; typedef union { struct { uint8_t type; TSStateId state; bool extra; bool repetition; } shift; struct { uint8_t type; uint8_t child_count; TSSymbol symbol; int16_t dynamic_precedence; uint16_t production_id; } reduce; uint8_t type; } TSParseAction; typedef struct { uint16_t lex_state; uint16_t external_lex_state; } TSLexMode; typedef union { TSParseAction action; struct { uint8_t count; bool reusable; } entry; } TSParseActionEntry; struct TSLanguage { uint32_t version; uint32_t symbol_count; uint32_t alias_count; uint32_t token_count; uint32_t external_token_count; uint32_t state_count; uint32_t large_state_count; uint32_t production_id_count; uint32_t field_count; uint16_t max_alias_sequence_length; const uint16_t *parse_table; const uint16_t *small_parse_table; const uint32_t *small_parse_table_map; const TSParseActionEntry *parse_actions; const char * const *symbol_names; const char * const *field_names; const TSFieldMapSlice *field_map_slices; const TSFieldMapEntry *field_map_entries; const TSSymbolMetadata *symbol_metadata; const TSSymbol *public_symbol_map; const uint16_t *alias_map; const TSSymbol *alias_sequences; const TSLexMode *lex_modes; bool (*lex_fn)(TSLexer *, TSStateId); bool (*keyword_lex_fn)(TSLexer *, TSStateId); TSSymbol keyword_capture_token; struct { const bool *states; const TSSymbol *symbol_map; void *(*create)(void); void (*destroy)(void *); bool (*scan)(void *, TSLexer *, const bool *symbol_whitelist); unsigned (*serialize)(void *, char *); void (*deserialize)(void *, const char *, unsigned); } external_scanner; const TSStateId *primary_state_ids; }; /* * Lexer Macros */ #ifdef _MSC_VER #define UNUSED __pragma(warning(suppress : 4101)) #else #define UNUSED __attribute__((unused)) #endif #define START_LEXER() \ bool result = false; \ bool skip = false; \ UNUSED \ bool eof = false; \ int32_t lookahead; \ goto start; \ next_state: \ lexer->advance(lexer, skip); \ start: \ skip = false; \ lookahead = lexer->lookahead; #define ADVANCE(state_value) \ { \ state = state_value; \ goto next_state; \ } #define SKIP(state_value) \ { \ skip = true; \ state = state_value; \ goto next_state; \ } #define ACCEPT_TOKEN(symbol_value) \ result = true; \ lexer->result_symbol = symbol_value; \ lexer->mark_end(lexer); #define END_STATE() return result; /* * Parse Table Macros */ #define SMALL_STATE(id) ((id) - LARGE_STATE_COUNT) #define STATE(id) id #define ACTIONS(id) id #define SHIFT(state_value) \ {{ \ .shift = { \ .type = TSParseActionTypeShift, \ .state = (state_value) \ } \ }} #define SHIFT_REPEAT(state_value) \ {{ \ .shift = { \ .type = TSParseActionTypeShift, \ .state = (state_value), \ .repetition = true \ } \ }} #define SHIFT_EXTRA() \ {{ \ .shift = { \ .type = TSParseActionTypeShift, \ .extra = true \ } \ }} #define REDUCE(symbol_val, child_count_val, ...) \ {{ \ .reduce = { \ .type = TSParseActionTypeReduce, \ .symbol = symbol_val, \ .child_count = child_count_val, \ __VA_ARGS__ \ }, \ }} #define RECOVER() \ {{ \ .type = TSParseActionTypeRecover \ }} #define ACCEPT_INPUT() \ {{ \ .type = TSParseActionTypeAccept \ }} #ifdef __cplusplus } #endif #endif // TREE_SITTER_PARSER_H_ hx-0.3.0+20250717/test-grammars/rust/000077500000000000000000000000001503625671400166305ustar00rootroot00000000000000hx-0.3.0+20250717/test-grammars/rust/LICENSE000066400000000000000000000020701503625671400176340ustar00rootroot00000000000000The MIT License (MIT) Copyright (c) 2017 Maxim Sokolov Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. hx-0.3.0+20250717/test-grammars/rust/highlights.scm000066400000000000000000000202161503625671400214670ustar00rootroot00000000000000; ------- ; Basic identifiers ; ------- ; We do not style ? as an operator on purpose as it allows styling ? differently, as many highlighters do. @operator.special might have been a better scope, but @special is already documented so the change would break themes (including the intent of the default theme) "?" @special (type_identifier) @type (identifier) @variable (field_identifier) @variable.other.member ; ------- ; Operators ; ------- [ "*" "'" "->" "=>" "<=" "=" "==" "!" "!=" "%" "%=" "&" "&=" "&&" "|" "|=" "||" "^" "^=" "*" "*=" "-" "-=" "+" "+=" "/" "/=" ">" "<" ">=" ">>" "<<" ">>=" "<<=" "@" ".." "..=" "'" ] @operator ; ------- ; Paths ; ------- (use_declaration argument: (identifier) @namespace) (use_wildcard (identifier) @namespace) (extern_crate_declaration name: (identifier) @namespace alias: (identifier)? @namespace) (mod_item name: (identifier) @namespace) (scoped_use_list path: (identifier)? @namespace) (use_list (identifier) @namespace) (use_as_clause path: (identifier)? @namespace alias: (identifier) @namespace) ; ------- ; Types ; ------- (type_parameters (type_identifier) @type.parameter) (constrained_type_parameter left: (type_identifier) @type.parameter) (optional_type_parameter name: (type_identifier) @type.parameter) ((type_arguments (type_identifier) @constant) (#match? @constant "^[A-Z_]+$")) (type_arguments (type_identifier) @type) (tuple_struct_pattern "_" @comment.unused) ((type_arguments (type_identifier) @comment.unused) (#eq? @comment.unused "_")) ; --- ; Primitives ; --- (escape_sequence) @constant.character.escape (primitive_type) @type.builtin (boolean_literal) @constant.builtin.boolean (integer_literal) @constant.numeric.integer (float_literal) @constant.numeric.float (char_literal) @constant.character [ (string_literal) (raw_string_literal) ] @string (outer_doc_comment_marker "/" @comment) (inner_doc_comment_marker "!" @comment) [ (line_comment) (block_comment) ] @comment ; --- ; Extraneous ; --- (self) @variable.builtin (field_initializer (field_identifier) @variable.other.member) (shorthand_field_initializer (identifier) @variable.other.member) (shorthand_field_identifier) @variable.other.member (lifetime "'" @label (identifier) @label) (label "'" @label (identifier) @label) ; --- ; Punctuation ; --- [ "::" "." ";" "," ":" ] @punctuation.delimiter [ "(" ")" "[" "]" "{" "}" "#" ] @punctuation.bracket (type_arguments [ "<" ">" ] @punctuation.bracket) (type_parameters [ "<" ">" ] @punctuation.bracket) (for_lifetimes ["<" ">"] @punctuation.bracket) (closure_parameters "|" @punctuation.bracket) (bracketed_type ["<" ">"] @punctuation.bracket) ; --- ; Variables ; --- (let_declaration pattern: [ ((identifier) @variable) ((tuple_pattern (identifier) @variable)) ]) ; It needs to be anonymous to not conflict with `call_expression` further below. (_ value: (field_expression value: (identifier)? @variable field: (field_identifier) @variable.other.member)) (parameter pattern: (identifier) @variable.parameter) (closure_parameters (identifier) @variable.parameter) ; ------- ; Keywords ; ------- (for_expression "for" @keyword.control.repeat) (gen_block "gen" @keyword.control) "in" @keyword.control [ "match" "if" "else" "try" ] @keyword.control.conditional [ "while" "loop" ] @keyword.control.repeat [ "break" "continue" "return" "await" "yield" ] @keyword.control.return "use" @keyword.control.import (mod_item "mod" @keyword.control.import !body) (use_as_clause "as" @keyword.control.import) (type_cast_expression "as" @keyword.operator) ((generic_type type: (type_identifier) @keyword) (#eq? @keyword "use")) [ (crate) (super) "as" "pub" "mod" "extern" "impl" "where" "trait" "for" "default" "async" ] @keyword [ "struct" "enum" "union" "type" ] @keyword.storage.type "let" @keyword.storage "fn" @keyword.function "unsafe" @keyword.special "macro_rules!" @function.macro (mutable_specifier) @keyword.storage.modifier.mut (reference_type "&" @keyword.storage.modifier.ref) (self_parameter "&" @keyword.storage.modifier.ref) [ "static" "const" "raw" "ref" "move" "dyn" ] @keyword.storage.modifier ; TODO: variable.mut to highlight mutable identifiers via locals.scm ; --- ; Remaining Paths ; --- (scoped_identifier path: (identifier)? @namespace name: (identifier) @namespace) (scoped_type_identifier path: (identifier) @namespace) ; ------- ; Functions ; ------- (call_expression function: [ ((identifier) @function) (scoped_identifier name: (identifier) @function) (field_expression field: (field_identifier) @function) ]) (generic_function function: [ ((identifier) @function) (scoped_identifier name: (identifier) @function) (field_expression field: (field_identifier) @function.method) ]) (function_item name: (identifier) @function) (function_signature_item name: (identifier) @function) ; ------- ; Guess Other Types ; ------- ; Other PascalCase identifiers are assumed to be structs. ((identifier) @type (#match? @type "^[A-Z]")) (never_type "!" @type) ((identifier) @constant (#match? @constant "^[A-Z][A-Z\\d_]*$")) ; --- ; PascalCase identifiers in call_expressions (e.g. `Ok()`) ; are assumed to be enum constructors. ; --- (call_expression function: [ ((identifier) @constructor (#match? @constructor "^[A-Z]")) (scoped_identifier name: ((identifier) @constructor (#match? @constructor "^[A-Z]"))) ]) ; --- ; PascalCase identifiers under a path which is also PascalCase ; are assumed to be constructors if they have methods or fields. ; --- (field_expression value: (scoped_identifier path: [ (identifier) @type (scoped_identifier name: (identifier) @type) ] name: (identifier) @constructor (#match? @type "^[A-Z]") (#match? @constructor "^[A-Z]"))) (enum_variant (identifier) @type.enum.variant) ; ------- ; Constructors ; ------- ; TODO: this is largely guesswork, remove it once we get actual info from locals.scm or r-a (struct_expression name: (type_identifier) @constructor) (tuple_struct_pattern type: [ (identifier) @constructor (scoped_identifier name: (identifier) @constructor) ]) (struct_pattern type: [ ((type_identifier) @constructor) (scoped_type_identifier name: (type_identifier) @constructor) ]) (match_pattern ((identifier) @constructor) (#match? @constructor "^[A-Z]")) (or_pattern ((identifier) @constructor) ((identifier) @constructor) (#match? @constructor "^[A-Z]")) ; --- ; Macros ; --- (attribute (identifier) @function.macro) (inner_attribute_item "!" @punctuation) (attribute [ (identifier) @function.macro (scoped_identifier name: (identifier) @function.macro) ] (token_tree (identifier) @function.macro)?) (inner_attribute_item) @attribute (macro_definition name: (identifier) @function.macro) (macro_invocation macro: [ ((identifier) @function.macro) (scoped_identifier name: (identifier) @function.macro) ] "!" @function.macro) (metavariable) @variable.parameter (fragment_specifier) @type (attribute (identifier) @special arguments: (token_tree (identifier) @type) (#eq? @special "derive") ) ; --- ; Prelude ; --- ((identifier) @type.enum.variant.builtin (#any-of? @type.enum.variant.builtin "Some" "None" "Ok" "Err")) (call_expression (identifier) @function.builtin (#any-of? @function.builtin "drop" "size_of" "size_of_val" "align_of" "align_of_val")) ((type_identifier) @type.builtin (#any-of? @type.builtin "Send" "Sized" "Sync" "Unpin" "Drop" "Fn" "FnMut" "FnOnce" "AsMut" "AsRef" "From" "Into" "DoubleEndedIterator" "ExactSizeIterator" "Extend" "IntoIterator" "Iterator" "Option" "Result" "Clone" "Copy" "Debug" "Default" "Eq" "Hash" "Ord" "PartialEq" "PartialOrd" "ToOwned" "Box" "String" "ToString" "Vec" "FromIterator" "TryFrom" "TryInto")) hx-0.3.0+20250717/test-grammars/rust/injections.scm000066400000000000000000000047131503625671400215060ustar00rootroot00000000000000([(line_comment !doc) (block_comment !doc)] @injection.content (#set! injection.language "comment")) ((doc_comment) @injection.content (#set! injection.language "markdown") (#set! injection.combined)) ((macro_invocation macro: [ (scoped_identifier name: (_) @_macro_name) (identifier) @_macro_name ] (token_tree) @injection.content) (#eq? @_macro_name "html") (#set! injection.language "html") (#set! injection.include-children)) ((macro_invocation macro: [ (scoped_identifier name: (_) @_macro_name) (identifier) @_macro_name ] (token_tree) @injection.content) (#eq? @_macro_name "slint") (#set! injection.language "slint") (#set! injection.include-children)) ((macro_invocation (token_tree) @injection.content) (#set! injection.language "rust") (#set! injection.include-children)) ((macro_rule (token_tree) @injection.content) (#set! injection.language "rust") (#set! injection.include-children)) (call_expression function: (scoped_identifier path: (identifier) @_regex (#eq? @_regex "Regex") name: (identifier) @_new (#eq? @_new "new")) arguments: (arguments (raw_string_literal) @injection.content) (#set! injection.language "regex")) (call_expression function: (scoped_identifier path: (scoped_identifier (identifier) @_regex (#eq? @_regex "Regex") .) name: (identifier) @_new (#eq? @_new "new")) arguments: (arguments (raw_string_literal) @injection.content) (#set! injection.language "regex")) ; Highlight SQL in `sqlx::query!()`, `sqlx::query_scalar!()`, and `sqlx::query_scalar_unchecked!()` (macro_invocation macro: (scoped_identifier path: (identifier) @_sqlx (#eq? @_sqlx "sqlx") name: (identifier) @_query (#match? @_query "^query(_scalar|_scalar_unchecked)?$")) (token_tree ; Only the first argument is SQL . [(string_literal) (raw_string_literal)] @injection.content ) (#set! injection.language "sql")) ; Highlight SQL in `sqlx::query_as!()` and `sqlx::query_as_unchecked!()` (macro_invocation macro: (scoped_identifier path: (identifier) @_sqlx (#eq? @_sqlx "sqlx") name: (identifier) @_query_as (#match? @_query_as "^query_as(_unchecked)?$")) (token_tree ; Only the second argument is SQL . ; Allow anything as the first argument in case the user has lower case type ; names for some reason (_) [(string_literal) (raw_string_literal)] @injection.content ) (#set! injection.language "sql")) hx-0.3.0+20250717/test-grammars/rust/locals.scm000066400000000000000000000006151503625671400206130ustar00rootroot00000000000000; Scopes [ (function_item) (struct_item) (enum_item) (union_item) (type_item) (trait_item) (impl_item) (closure_expression) (block) ] @local.scope ; Definitions (parameter (identifier) @local.definition.variable.parameter) (closure_parameters (identifier) @local.definition.variable.parameter) ; References (identifier) @local.reference (type_identifier) @local.reference hx-0.3.0+20250717/test-grammars/rust/metadata.json000066400000000000000000000002361503625671400213040ustar00rootroot00000000000000{ "repo": "https://github.com/tree-sitter/tree-sitter-rust", "rev": "1f63b33efee17e833e0ea29266dd3d713e27e321", "license": "MIT", "compressed": true }hx-0.3.0+20250717/test-grammars/rust/src/000077500000000000000000000000001503625671400174175ustar00rootroot00000000000000hx-0.3.0+20250717/test-grammars/rust/src/grammar.json000066400000000000000000000177261503625671400217550ustar00rootroot00000000000000(/PF &t5nJ gwgwӲjAE`#?eOϬ})yVʑ,9d[ >Ki/.|U8*b>qx~\쾽,wq?J "D2Q" EViI extdLӤ[$t~cnҩi@܋/jPQ: vGsg> 5jQ W\&_5ߟm B|P 1O.<@.}twwyD/WlD  %P0(PIFrEUʭ#\nZM1An"}K2"@DM4M4M"w׷HcCck[.>̮}eK+k |[q7JӚbQjk$ַr[+n/mqoMZFщc(qD9ޫg9ں;?%KBGz~\ &pf^1]풚 7) q{SvFݮŊ$W"xfb`e೙@!Ь |6#~ԋ6EPt&<";(#%Ƥ>n`' =u]q J˜r8ւ$P"FDsd]8wH,pߪP0OĶXHF~ n2";ŗZO#~] }B4JMk#k$*Fn4SVږ\SE3I vP @* B]}A "` l@@@@`AH!g9LhCC/ǟ\m 83A2W٬I]Ua~}_p^abe}埬iһԂ"ee^) ?P])6ᘭL@R94DpC p56{V3` iwL@88LCzo4t.!+XӭQnIX7r3C^ggn2%)9Vh7p-̭yw`&F15,s ۭqvFJa^g 4,zhR8^O&Z |# [k} ս"/IK ݎ܇dC/GvKzPb_c᱀Ԕ:ζhdSvhE}e'0&#u<靅'}.[4gjbGl|?N qDNNvrI.eWu(~?4(C#nZ|1SFbJƂ Kˠ%k@EUa5Zцh "; tV+ʰ"ףD~ndžJscS-;fr@([rSp!|dERĝ &Nۣ iR<-nWThQkN}Smк(X}/$PVXx qWC;(̪3Y[5GE-4;;F+0 7vdfcǶܲ%S \X| X'm]2 ܏ ǃ\lĵ<ؚ m_'#jԭG@\9<ƑqAPd2O[h C`+֐XSdyxөfk70U/^uQ5  ٱ#yx6=/Cc\E>P[Ϸ"b~0sq¶CC pEP&`jYzON}~ވ a TvBx#\82kzC|>s;08 q\q]S8"Y)((W&Wc-k#[քzgϛkDe$Q)&W 1uol iWdeUA О#H#mCw)}dlMۊQ|~!^1+r} &(lŒ࠼ߩ̱11ϤeN4rg#B8ꨦ3#:zqIYXP5^'K qF(LU^Q0jQ '(p2fo}[RH=6ƻ؟7wj'^z*xԞW!"@9XfTnIP  % J@ u^e: 1THF5}jcgw` |1#=#jo([2͡o9pָw ̍=idmKR"W!_VljQ U9wR lP?>)^#1R0Lӄ1pqAFH$T 1)5)T!NMc*"59͞I $9Ba4+:g ZS,i'sgYbN"K`>ap3TmK:W#Λ_/V #h A]B3tAu m =?bVn%G*O4bRau!ep7N+{%UvMhyҭ|D'M];?jZ$/H\~ԥ&ql `~$˭TGfWb|wgW@0Yսۂ_mgwxS= IfN?S7kc0O3HqNLzv \>+A@OnN a5*rj2 3`K SQNEf;db"~褰t)yjnEZs=w3YEiVtw0^ۯ9Of OrDx$J[T5pHڎ r |N-UǭJ$]3q3ݸ3>aMj^9HS7Bj>Kp /Hg6ͨ5c2X //ư1_͘ebXP=S*5QfeHHGOhuÚv'U]oݶmi'UNS,8BH,@ AX*Px%Ĥ.rRL:'3m6c`f4c1eVOŭ֕wbt\ܚq6fKnj[MeԻӺ{k-)aP뺎65 rdD)N%eV[S e<㡯Of2Rր0[ߺkqoڼU^Mryq ބHLOZJ/ 標ξAtTRV4$I9rA(Jb"AA"H! C7ɀ<+[A8>D>9-9a j/'ylT6`T *O@Ռ =X[ $L(ڤ 'V4z- c+1Wx ߹\QFWcږB][$V ,pѴesoҩ1'RVF,04ȶ^GSslV=ae32E (Av,oLROeE;9M_Fe+}5 I}^5fBsֳ:;dxHQc7b~o>`/))Ԝh\年e$zrHN,+uhlpX2UlZyX};"œ^9)gi!@(!FQ e{~A͗rx:Mb7]~ڔPfx]{"kgwZn m,=">iej01S8! ^{1]p)8\ $an)Zr7z`٨%YK]|WA _m 8"İLL!?c7YNo.R~ mhNm!@G(S,FIЫ`zh2<'8IΝTuUDij .ّ{ő5qV#4ѩ5D(bMYEPtn|k鉴HӐteCA.} s6l͆(UŐ_kn t"lRõ0ooZG/ã9S9BPVv$ djw/`k)#; fWCzj~ڑv8Yd<@y +;[Lr8- Aۛ/CEWC5~\`њIXr;̭-Y$#{'i>>=if|nYNbCB | yk#Y6 T *MH@9heհY2J41c9?e S! c tX؜°}(J w V)[h^YZZe 6o2!vJƴSw-И+<ߢ.+5!2j9 渀$W"owi: :1k%O`5MdH?R.8DFCȴQ#b;luO{M:%̚+%,j"b8|dJG$?T D:MiW@/YR&D#z'3X9vΉ}3P!>biMb9.wi3Z&vKm@6kJ*53Xl-q$h$T'=䣫(H6N@<$fKo4r7U髲[dzb{r ~2 ! g,vBbbY><8:UeA'AmCaJ*ӭxף_Ba2(qO"7`.# . %w$CfUŏY87P|+g5Ps?(tB15j~m=  j>a>@:, u9,C@0ʉ"AmAFmƗًhԕ>m9.`!ocuJԂBT[LO:# 3gA4#Q^b%~T<"rSxͯgT0y,F qku8dO ;͵c11f~,aQ\F̟H3:_Q;mu9\,8Goa;aE.G i*MG:"Q#N,o)\-4(`0O~bB_Jcb=//6&@%Y^{#"HS225Yq o$LѐSCέp)Z# O"?J>^7cOKϓNag;y] m-r:d0n@t=GGXA\1jE  ^yL\vg^vS3<3P9϶Ӄ3ٸ(_(r=%=<cX mL#=yc|zj!EOqog|Gl-zQD4Khx-0.3.0+20250717/test-grammars/rust/src/parser.c000066400000000000000000004373041503625671400210720ustar00rootroot00000000000000(/N^^Xd+bt&#\>+|E^QPvV蝒V1Ը׼MarE蜲-fc3r֤Ss:ZȣL.I#s1eQz.ilōrJCxS` cw DEϦ+e]W\W Wm&Y_/s+5N'#t9O߷UiM$x@ܛM"p8@@Jp:l0P)AaC%ܔr} kl=:e5斣SF\kl5:ed>ט[SFx]3~sj,*SF67FS 5Z*'ޙI,p]U#/J|]9T0m]S>;VX2QPPL(FaGArI #)PL4)|??5I|K Dp@ !NzTMD&G$V(WV*].$]{!L/:uQʏV|҆^ڼޚvNNױ>ុ:>[ƺqBpր0qd'2`j%arGtf'>-mCəaDZ5NϛU[(%]m|1ݲoh yQf|*QZ*ۦ19j k6PA.'Q7\<At QUo:ƟVOըO1=(qi3wN QeMtVRE].%A =.i`bpXnT;.XfYwXfT4n"Ȏ Ո} fR `4YsשL:lVCn|Ls@'֥ۿ/7#F 05 5q}tu EmJ~A/lbw}nuMH,j(PB| .MgD$6 KZi 1*H2̹^ h_K C?Y!+|JWʣi'2rc#<'w-dU۶9$//o4(!.mQ)"ђ'岐0\Php:cgM%p ܟbs`%|Z4бhca  w3<.A(Ϋ #t"C8 ߮C=_LO(ODN< Ȗ;8 ji*(e,EŸ7έ}'VgTm5!B )e!R-vZ';|U~@Q̩!*hLG pga5f6,!8}g=$@vOFi s(Z& 4q-gM &|3$ZYIuRz/Xbۋ!X*lX M1&P =5w9pBpb;fCxwgĞ C%#I!O!92(HyTU ٣3A6zA%&v-`4&b,A?3VaA]ɴR@V,V̺~ɞճ?)$sX O(b \+Ex]K g_8; 6 r=!GcZ,_y%C)Rx0Y<^L q#2E,;`<޸R;J.Z&@6CƢ߉„xQ# X-$3;:m$Db'm4"2:j9tXJ{CAK$7]=*.{FFNDˮ>bpD^)( =s pX/RG9'".z#V:}z8h s OCh1;3spQN1 #0A@PS!FHDJ((HBÝrXiyuCʞ+GRI *ckX6kxkzrHӢ,ݙW"QPXF he,3JɌ06S'.Noz?ukK|ԇ)_VPFq%9:Ma>"#A&aˆc q.c yU.89e;+:7CµWb ^N p ޲®Bt!`m-T-b@u0\Xf"v?hѽVbG-,鹹k n@&ZNAH/ѽ:V i&$Yۜ[ؑØhĭqK +,#W*贅^(_YopG.;͈} XGb u!{]t7a(BZd\pŃNA锕\dpu&,x.ݑE 9`6)A]Rvcc;"mZ qӢH76~iRQ[JC$KU)Q[y%:TnFX娬{e1j3w9(a-yjΓ$9!z8Ƿ$|J06'u6=SGpJe# 1Ը> ٩iP.=Rj|X!FXa~Z䈉0d* {l|#r?IEG0~>3%D3!5Dvxup}*6GC3cũO G<9DЇ=#[,8C"%r 1ښ;Auro9fWj":d#6D=>c)x![j,NEG+W]veOaMb|)HWqYyF̱'dLI~kBWZID`s| G֓YF9ʀy [i$l,hu+*f0*Ny,u eNL}&:%a9x3po3O ż陈{#Ct͋J=OqӋYocx*n MiܾIb&Th127 =WH 8!Fۿֽ)Õ^Xl^GECa foGQf'*}Yh Y[Zz8N6욪?.IbX!@G3$2v7;{}[P2(1㐬H#(e4 (Uޓ.MU`wݵW QVIQ`gEnU 5'pZ+T wɢB يP<Wo^Z1B)#hRZo s\~%yJ4 g  ĉF*a.F:KO*AFAWUvf33 фe"9ޱ>pEr(KĞἤTj̀}P$šmGC,P &P{,I:LҾd:s0l?5-C/+ӚmK* (/=]󈒄–O0`q  ]ؤZQ3*LݲIc$-I5b= +M JbZwK qv.k[ε$[PxֆXn\i7/mfA E*댞P4Gz hV38@0 8 #9ĘbΙIo9% &>B`37~vaߩ{{\_~Ϻ|07띮aXy3{; znv'eu1=px3螡z>g}]=iKq/..Rneʐ=vw}c.@ubw8-? {u;v㯓Xשײټ:pN]8qc[wkmZGk`)zY/V 4٧:kvu⡭nJ+?w_uI%PizFTOxj.vR͢.zxt9 M)t0-1ܣ[fOIYJ֒c&!"?:sJGG6zcF7HoEΙq}=+yZ ]Sd'$tiP< ql.IMѰxqƉ;jsA|{b9_;API݂Q 2A?wLh_;:. 1{=] N{w|ۍ3uoP7$7tښhropaȻ-և]];^ls6@$ՎoڝFGnP1#Seʑ]sY2Zr\'Sdz^(aEHsK*HQ"Rb,z>B83zx,MXXh*;$K]g)dܳ ֹ8Z9ۗynt8%)R}$Yr+x5#i( "[! [ujhV"#؎ jMB@iCQTlҲGS TR 3، Up6\E{vV8ŕ_ ;t2 d%HV{piIӦD<{F0JWN*.$/Ik*GCmjðNx9RCD*L3U.SH? }./82IgܒGRH;]DphÆrvNF{7-rH'F'% -uCX3Gk)6#t~HE83D4 тLȥlZ$K ^D[lD /1l׾(K0a) PbΉJ]@fQVBh:zESkq^2'BQ҂6̆/3%30h O1 GAI;H*~Yg8eq\Sj}I0q~)я? Y1A >yU|Y@7MF #&>Ø9׺R _AU&}}Gos1yB.orj&TKwlm -A?v\8F;z!LQ\B"t4iLB.>{H&՟M9Oc mG!?qB4^HP ۭ{D`11t/D>i$j!!>8o= cS۬z&J=E_?=L[sdz(Mz&=zz`r<: f|Soc3mZB9': F&la`97" 5h)xmPOBլ.ͱ]Ut.}fΪLkZt@@pPAAp`` Q!=jXr ؐ Qq"B]FVSo:lO{Vt_K֐dȥ cEffs~e=̊Tz=NgsI*T/+KFT&ZEX[)ys"-r?+K.CV.M]NIE㫑?^6ֺrY Lk 7! P>( <26,Ƌ}׳"[)BedgEC:<ӷ3g (0" QTHL3`( CG8!J8IDCŵt #I}\!vboK@>;n&hÒ_ f@ZV'aص`0BAq'T@ אcv"D_{.y;hwx|["|1n2Յ\!Z#o_(aN{_hx^wayI{*nbK۩zr*޺/2kvwԇ ;}ÅK@O42p. w {2@u k]}XO*XS獭qr6uΈ|C*/]d QQoKjV8|+@I{X: ew_sFX+K؈sNaqXݢЂڝJ{նnRdqsxHzl'$pβoΉG`usj, MrJ.ƾK=Mt z_֒qN"C;6rk1.nQW羦 (|ƀ⹄_K?ߴ0T#"@Ӈvn8N' ;J/Y >8-aQ*|3Jd7p.Shѫl u]Qޕŝ "#"R`ou@uϔs(W̃)АmI.d32 LI9[>:E]hP3۟Y,gylmMuD>{e"Dp `Щk`<B& yݘ# )BU/r 8'Iak (f5-2 SJNt?(AiuT1mG )ΣJMmSG"U2|`/QPgyj`̊( +,`GZwl:K/Z]p A4ɱ+RƏp4Fa} P}8̗*'sxU׍WZB^{LKXƚ^^)EA4ȅ$^q#T)X.-,.&#Oѵ͝H9\+7檢Iϝ_ednB %mٕ)jڋթFZ"v*TZ#U~ngVU~U_iS1Xt69?ms7b7&HO(@&ڡuyDg ܡ2wX؝.aok>)5J}ӐMTqfO؉!΅`OFl^=};7YM FTf?Ph$@Pu+x3H Iaviqݤ "W m8 r0s>'Ϣd(!}8|8+\o((@B$! HqpZ+FyrU 7$mDl8IٹS 3&g0 sd3ִ v Ooe_)֪Ubkr3d)*[gcƔVO&.jg(r*aaDI=86 kd(B[Y*_v)n5sA0ƢEi93`K\sJ)%)%̐sH<=bwYEi'IRjg421SҊLHfDD%"oD3"i6m۶U-ьY,pf[lUNoDC@U< e o۶m۶mj߈fD"Z7hF$%#j߈fDUoD3"U-ьHĪZ7UoD3"oD3"DEU-BXm $I$YՒ͈D$I$I$I$I$I$I$Iӌ팴mK|gAY?Mp#$mjь0" ɌTF4#!U-#HJ͈D@AU|D3"PhUKF4#V|#"I%#jɄ<#2hZ8S 0'^"J`LYfe"tv|`;ːٱf. ?`MW^(!`Uod/3we([,(A/WF/IX &?p9YaCXS6(k(@ߊtVFsQ) 9Tj DK~!]MCis<=Wv;V z]l 7Z^ !7};ͅwӰeUtǷ; L3<ӲxeUc< Q|JOhY31 9ݙ7Aeoe`u] H&cu{<Ӈ9u#Rꑆ%. H^=t=}<+^ .\(ʘT?T yFN:-DrUƅ/tBLs86c ֹ} $$ tZJDɤi|֝+Bٗ*>q<̆!2EV8DME;!7fj\qM{`Ų*+0dF\]`hҧxOPg~rjEEci2ȘN2=qKlZy5rVC`p Xl wn')D"K;_Lܶm6dB0yfǙF$`n$3I"ӫ5nIo$ Pv3H .\8~v](mۦ4m۶$I#)^$܌D"`-I"pB$m5'of$dpCբ+eFm4QݒN$e4'mH(.ItU$Or$b֑&1[rF {n_~zvKd$9`xDvKn3-ܗ3ʢ-%D" iy$nmF KZD"n9#ڿnɁ4Ǫ0 lp c0sYcUeifj2!yH\*1eib|.KuɄ0>e1P]ܒ3I^3DסlQLįqq&I$@.""1=4f$8oH+y'@ B A)Ә@\DZ0o9ř xʻ*rr&f5ZDG&pF&f梄-  @ncQC,<&PQw q%l8:UcJn3@)Em: c0[mF,A-A"!1-I" ~%I$v$qݒی$vKj%qۖ,#q$""aըB D]M5?b*'B.[./@^Kn:b(Y|45gG8 =b@r"q@r#D@@rD#<7k*x#DݒD"ߜVHhY~.&rL[N9S ƐdKn3DD@v$ấ@b05,q( ~ֻU"+ Id``:rQ =mz: ~wt%scK1hhT:y` {3ɫ}6G1;6E#k!{C 5 HJ} 8W0Et&럦I[a }9o.TT ɧh&>$@*=+rYlxIi.$>Hzf Jt:Cv>ey@ݐEI-6mkE3gx :QZ|, 5X|H[na8q'\(l;]=7L'Sb4Jrbb+tY1ELn.VۭzRT'jY$z=IBTKPD06 XIcqX+ۅe><`ls'c Q0N*1cTQ$w,EG!4Q|г8M@2)֓1WB҇,v9"cÞ*.%$1é=ә(aͺԈ|x> 'b3 Ҭ%;-ǤAxWjV)Htᾗ^a.^M-hJ4$FPp4SyƸmUN@m~6)i/&R:CC2ͩpg-Ū.~ظa.Oӛl+K69IGZIJqsӆg^[\ED,3f [w\䴤:xf=C8gӃ熊[ZTllI8/ eϣ$#v/CwJ-M.DCg+zG&1Gxn1ʤTs0tVHl i[a}Nh"]ӓ~)c(}B+t:*@\|a%E`d*͐\oa㜄C. 8rÔ)#'|ra+E;4 %8"BjD`H9ʻl;~,39͓D!1GۆHruslvmFFY Iw>"t8Io3hewGs[ +Oi,ҍtL Js!g b&4"z- hSkDz QXEMOo%͓69~S!$ض} DZs1[!" s4!?S[$bۥk۳ZLOZsaU_(}u*q7 Vu!wVeK`\>y vHqnk W=u vt0w{{N-&], 1i3(zWo! ]Mw)wꪉCMj-;]!l êC"-,XR .pGېXRasms,%Yg/Shbjs0wޡŒB@aEVCmf雠}V'GKnX'$0jDDQӖSupk*W |oQ j#vM▁^+3 o (dt)a7:~슩v=;%ipϻ٠yBYsD1H-$7gD0IffH2f>^h &Z8In^!guQU*Eݑ4JI7#T~jOpېHZK0l)h![̜N2IdGg:ucKkVx&!vTTyRuP`5jzKJ~xYך2">[s'k`/ԳUs kA4p @UA[ wh/u7L12[鱿^0[rQ o6#DԞ)MGFpo)'Db ]"ܦH-H"@Wn3H yȝpH2f*nmF vKn3ASrH0q.."H_r3HD@@|%6ƅYrD~"eXe_¶ 䄭%%Ւ0$H%m%aLl{w6#D6_rH" of$nɭDی$`oGKnas8[mF nmFR4vKn3H&ilǔimC*cTrHHE_U5ߓRHG|D2*m?r F $oȠ"0 цY}.+5"(5&3عιn2<ʁ9k|V(s6I e9,)K7k'4(Q ͩЮhbIr MLᄐ.IM#,UeA`),5 \ YIHZ ovmn܋zJYG^"f-tQƎYQ9LHTS BáYSu&|?EhT]FmHa(14{&Bho8ˁ*4%Rń7E{hLDJG!NIŗOMy[.ߜ0Vз eA;ᬮh ղW9ͫ4#7dr2"h2,HsZǒU±[וvZBD6 _‡,vJjgx_y~i.cy]KV4@8DDLIZdYB8#*rܰ\WcensV,ogVTíR<>^fk]|uj;ğI +Ո##NHX˿FvK̪Ms,"?=oHS`klm\]{5c g=[ aܝP@8 =yeB1|YJ;z9HpwcU*B}HB Q@s)ӒOe;?*9:/Ĉr=/[ۃ9`SԵ(Ӵ͝XX1!xO`d,QD㡪fj s)H:3@АUva?vp3XB[=wRk^;bn޽LeOcRML+929(e9n_UwUS{,('`" nrW{>!lq K'}cld]I1.;KnqяĚR'S z%'xLޡX}S z~X.V)OrKvRQ|G*pf{19۪J'G-*{7L8TYF{b^$,(>,8NWVqmoǬETEy%,/Z|Aqj.!flj*߿kDRSvjn -3ޡ=/2>'6I*a/bݫbJTpgC ^G;~ EX٤*!0@>NX{R+e)k=HbjWVczh,C2)Heס@!S TRa"'@:@hDAQ)_y4dQT}Z(KM||>aJ<1Jax VZY3ʷx'rqn߄D iI3)!00Ў̎ j og0rW=/ NIE?<9*b&$$Q wF/6 F,ζx_]]YWPT_ V?߫}ZOM# :D3[TbɻtV Yx(|}=pwfWEgs`9zEYWWcS'U4T=<ÚSW4-10Т)B@XPͣb }z, iV!׽{ww3q,ُP=M]<kDCA@Ãcq1a٧m;z ,ƥ( i" ̝kzv`K$D( F,KgF0)޻+8$ 5ٙPP2;z3V1+hDq HDd/fخUw\L tDCPPY#2 S0ދ(@?+ ̭f'uGADaUrq7NE?r#̲H rV5ỷʧW y#E򎞈v`Xx5{CNh~~Դ΢)NNsmRL|ªx4cqϭ j&fD@aUPýiiEE ԙ|jwFVUP#"5]͈x9P_w76D/ {EfDFAB3?THF$# ;#A/xe`FF0hЏr H$"*$WC&Py[ąD똖WDx@ai]wS\ =7nSE!6sKM`u{}خeEzCt^jF;UgZzzwMܺdVSJ5,qƒJ;/=e8G(7g̵0jqiLE dž[ܥ%l+c6'W"-ŗ%[B#r֥\pC""?΅oi9#u[O^UMP)T<9iu$`ȳu(jpU"d 3! Ҋ*\@3nV7/gu4퉜Oa FyxYҪפM'8]JzW2tHmM.oSeL}x ?~9KbOCSlmMz<;{㰒mf@5lkDGGP$([:M_:i0GΖ;;!s0rɖMFLJGLTyTf>*r}Jʞy]B:|?93+gwQzNqocc*ɊSHpv߭@As\Q~С$f +l/pe$LwuW)SFS]W}ʼnrse>Qo6Ջ5rI`<3GBF (MVPN~Q!~ZSHEB%ȇFd?:H  7(tx0Z@TDJ{Sp3~y\#JI*6ą٘iݰa]H1Ѭ9aNb&#ŦtOV>[1s)]R( -DoŋXg=(a%F;hќtgBXWGnw)UFlw)HTXE 6M]8p $ccc?SV6E=<ӭDq8)FʘmmRBɞ,=l^1c.5U9=9GғϾeʑwYWUCtݽLwtfV[YWUkcXOMLKKwEWYml*HumkjLP:)!a!=9KMup+==9զ $VUQ'ÑY8U? oyw޲xsscCIODt*"zBg&|;ޫp2 AX0 &8j Xp bA&`ЁC(F%E%`| ~-l8"FCT;3KWu>?țٱE++p WRwXfA7ng?__[6v4ˬwffOiglЃJ',c" QDD2 *AD !B!D$#"ȈȬFSG?@$eL~>k i\wD3f| =h1hGwf19Zȹ}BiL;9 0 \MQBd3)Xk(=Ķ:Lssʖx8 %~BL;()/ΈwIKO2Xpqi;_s/\1f﵊\dwԒ\˟k2k;=cykq)`#Nj!oxck0s+<<.l8ӟ+l<4HǪ:=IU &Vv S{5vW<НUjyBmQ^^}"Mu 44U~ )\(5q(lc!wt:ߨRO sLDiT<]Ҍ"fp4;WԩwfsbhLE+'3)\:< :M>˼S;JfxJwC"9fp$Ϊ4'_νes+MՀt14KCRL2w:XxC]&Qr>(B~C><@gP##&"oXoK2c8 c-[08>`ꦈQw=1T  5E`v^w[6M2- Iڻ=RI#I&)>3˺{WCcpu35&[4|źկl,VK8^Lx ;kmj k[ӎQ ce]-h'\$:9@%f`ɧ]SW$Ŕ]Șvusw|][[ǵ} N`-- ijb'Ւd#`YpRqR0  ÐQ!BS G,M 0:ĿDwhn;By$0w8aZlSWԱNFmf/lDבO);1t#o2P Ee`=Y?*w4^.!r}])=% X,@AATm1  br(@~#~1Sd1g;ԩ!yTcnCt;.U7UJܨop~Ş_Ψ%D `5yY 2SG3qAT[WR-tuvid+)BZzaUtqs^p $<%'fĩ"MƵ2Tv~fչtSt@t) z t ߏ9Ь@P8= pO8r oCO $R4`Nw97%x v6[0'D^? }!"~ yO]4>LK`\9CNȏ 5y !7ktSQE؛|4wH,y.iOuC25"q|' hP<.z}̸<=zM6?Hcym}:߼y6uE(2s♔Q"\% /Ae>JL+ ס+S0=K6;FTl #OY6e0F9Ӝ_@P8K)մ&=LI)p.=dWf{% !M׸GG7=NfAA^$I5'8j:]0eh *Ϛ}bӱֺ_;9;j?77UwdJJ\e5."vc49Ÿ >K|2Yѓ -[du|!*{K'nS/ǡ?ぴ4,9w$i:+k-"(Db%$otYԧPA!JZNW)sK *e[-q݀"1;@L7T,F.4e2}FLdmقTC>#"{7~2}񛰵~l[&ݞbS뚙nN(:&`M F= Qy%O*`T+26!{:<`O 7>Q u"7m)P%qfwјV4d x!݄%zBmE#%̗ν^50@rլz Rob۔C_ߔUdԙVT[PɸηNRI퍈zq) $x%/=q,D̯8gtBveQCjDb<]AY«nLD9(s:yJY)e)gu³:yؖpp8HMZ瑧5 [I/%<"i\#rf%u1eAC٦z PCuRvpsb8AY@AUx JvGuMEo >z`SP =ѧ[IA>U ( `:taCH-&m U+X7ljBTFCǵ}O"kwxy,CU]MnQ 7Eۏ.[ lomxPɓ/`\XL$KL*A pM4n'/~ŋW{J$9+IԈ>w3|Uw0LI0`]]axLd IY I|0`06I$I0`+ ` ]a]a4sW0}^a+ :w]aĹ+ b #0`>sW0j €sW0y ]a$sW0q ]a]!L3w> hs+ ]a4sW0} &]aԹ+ d &]aL1w]a2w0` 0[a+ 4 '0 3waϞV C(s) Ź+ 0y sW0L€Q1w]aY€I00`+ >wn۶LBx?h&I:@  `B@`HG nieO{K`37qӃtim_p9,u`Sa]aɇ+ XB6w` 63+|v}nWܰg ۩#" ]aIL1w(L0w04sW ]agy 0uW0L€sW]&sW03`ܵ0`EsW0`0 `]aLG3?`\nny])tUPQ$sW7qWbNWFsWuZ]a] '`߮omsK0u>p d &#P+ p#]:"I# ,@ 0h5v} ¼;fR;t KCf0Caԩ!j /Cg5U!?!)}lۂ]pk灎1-}d'q[C~]ƋP_; ő)((HBh}XW}2[F EQi_^*Q`wV73o{|Jz0˵^/ f@ELՕ$"H]n&(ʨbb25aVUJU|LTochaZ D/ew;Z.`2V3;f؅~^ Fp/7ZCgF"Z 3Ǥp@KSʭ&hǵ獑A{O][Sny`r\qJ̺wyn9DL p+֚4<*vNfAYp ޞvqvb^ͤIIz-Sb z34J!Ѡrv<{g쬭\8n\˟6 xCbY )l?ݭ @(_x?HOLEhs&"zcԲSu$,B[Oٴѳe';⛠"[+9ϩ, 8(P^^D-aDSߖM@;_bLLߖ!Yxe$Q R 2;O|¶JL2w9+`&ظL lsOymvkG+d. ܆ bVx+ l҅b c`3* Bba߶v2]!Npwa<7!bpH"I殰xF,@ swf6V0|F3f raܭۀN]a&Ya)Ӈ+2\¦ܕƀI333#\CGw*ݞlh44€yWaa:T&.0` &3Ķ]a?wU0} sW+€ `ߕ`"176mP%N4w@=0]aGq0b€ 0Z]aF3쟻BL0`2H8wI8qn"n|sۨ]O0I1wL3w]ۦWЩsW %Iģ' b0 os( ԘSF #C@F0~ o@k} KA Ede c KLż-2+5dbcl=Ql%f`ЛOϗ:>S6 0DHxo)XXS|yG9e^'h:t8tG #34 L#w++̏#0bz5ݳ(lTh~!N=ZQ?UAܥ$ t '@'1Q?c6eHoL:[2jvhA_js.dj)Upі+f\iP],}D;B9IIQȹ|bJD^q T]<+Hqoji$ձ;SUdDwO8ny[3F @GldE zW3C7/E0It,ZjBwhNP0Ȥ|0٭"""콃@sG8ʃ,YЍR?tkp |+kPA>Ue6C6,r]5C`;[El v]t: ar\$G'$Qߝ7JBqh?pe;q, o  ji_ھVٝui>%QW=gG/A6!,rDzQw}(}}uˡevhɵ,p@%PGj; ' &< 88NOzR]@崁3$MKB НA2!oC@DCDCam[m p܍bn0&€m `6#|6f]a]asw, hf+ 箅sWL3a d+ Q`B1w 00`T8 f ɶŦ]ǀI2w33~}{Vap0sW#H2C]sWM2msWWa 6msΫ '`fq>w6x-Ȑ.ɀqܫ0`90 r"(`!H|VIq:Ґ S nd_2C>ѭ3v&|z}C\J`$'xBDe \~ptTg 5}3QHQ3֞ t Pqs,

    'cy55@(ѩ ` b0,*TbNChИ;&4"dAZT(3=95zk%Lx9wy7Y*{׳v;tϛfYzf>LueU?h,b2RDq @0 0  !$K%*$5뀸fvN4ڽkqnӨ,!* A'u#1G` Y%K)%r Q^F Y^if+e*J)yHiF'nyT%'I;R)h4t"݋!6H5ԏ R^zwN.&w[BԴϺwlA:7B}"wfbGyAD6A͙`L>T׻Pɓ9VñQGבF'UH1ym&: zITJ3qG t2Z%X\ D5^&OFܛ`a*ϣ\-sc_ zYI֣$QK0ٱ|YͯU{\'`HrNGO;$9x q!ˆ`v KƧxEdk,fǟRӫI\RT7SM?f6VOhDW,F6@u)U!>e$} 0%jѸpAVCm}6`D` 0[z;!M9~uWm"2hﯚ\;td@eҐ45vz=6[ SkBPpH."mڅ6-:D={v :جCo_%ux|TAo>tyu#W?aM:"|R~3$EMԉ)L3D sكNb_Ѽl' $͝bkH0&:2UH~UtP)HNd*rX,߂NbJhȔ48CVmn0gTҊ|LFEҋ/v":Ǚo[-2 /Q%Ǚi;"y*y*_M"pU%PSmY#6~B6aj]D#I/z発x}_\pv"F[0㨀lB!!@`  Ø!!BBl{Bp~7_Rh'V*Hp:-^P{{Ѵ$ (~ 'rlF87<V\b:8u8.DPKUp|w|;3>%Iq bVhWo9w/'Y{)Z)o&MZw>k6xgZ3ݡp&C<5ƽL&NonkWr84Gk5ː\MYK6zċTtKfR~gݬ| Uh;]}*ȼ/,T8>37F"MhThRRHKrbT(FRa g K4I3iPI0V&Ȥ͌Is9oyLJȇYcPF)QĤPMT PjҰN?~Ź뱭-. nGZPH y.rcb+Cnn4+V`HPiTw;n֝p,l|*4c 5|e7i8ךeH2\_\ڔ0vkNp~ 妤LUq HHL昦5MӴMܶOv }C')WSHQ)&i)$ (o$J%I$J$D&""&?II$I6i۴y x\u=u]g:x|<|>6uA. Ӵ۶mg۶J۶m۶m۶m۶mUᎮ|{<ɄIX0IX0 > suKfb׶glM<{mPvө9sq߼Q1٭i&$!w1b=AV+"H (!HB!HBa(!D"Tc7J oٵ`## dGV\6]S|1-f i+SGg$'UN  qvi`j\#> z$oik. [U;V_N`3]}1U<{Zt1W RL&`e`RX1<|}#8l2m~w"E-Г O"'Y<>[moy mˏm UF1O9muw1qP4`|]+p*7iUE5[N^9 Vz,s1O˻Z%T%i0(`( }B?v5j':тXeq.bkԬ<7ABJG8na#1nDȪněvFHl D1˅$k)Kd4*8MM9M0.7Ta٭-#ɵK!Č$`]NܪBb-Vb t<$eyȖ>[_)9iZ_"oBD +ř[E>XBAfN8Iqe+uMd#Kfs'YW6<vt)(2,Mj(5dC-0W’ xJDez[cgʔ&Zz"/ ںjee`I-_5.Rle}b&MԂ-g c:cKݱ+cw qR&:*s>P0D-aH.£ll C* 8ɔX7S~>ԠNJ)hq,Q>ÕԐ FɢcZp/d| [[i\ne t[OD R$$Q|"V6t3[i:j")牡epYjGX-y͘''6A-=L{bv4*,M\24DLzVЄ(ǰ[zlX3\h[A t{c H CsLmN}DghN>ǾDZ{9iIm˽h>}׋io;kNi9=7-T f7\HpykbЂS*͜_t v^{ђf4&v_;9/Ѷ\Xwf665Uu-.$P+VLy1"/Jk{/LǡZ! +Y(j6e6ٚ\ +B^;KrT*~Sx6P1#u5vŧHUD*T C/7*Jgx3ڷ s>ďER_aP5Qէ<.Ot:+?G G I Sf[zӄǩQ3NOscRjXxIq2v+D FxOx:4^{HauŽElĿI&0*rS]D1<ײgQ(Kv Uy;cRQ)߿9R% %uhk+rFسlTCBR SCg{mϛyd`}؆ܪ,ӦsW$>a*%Hq{g/<{AhnnYX.9jqսָP~V6dz' [GFP~(O=sk.!NƖ>roBi:B* A/~ *flȂU,S e3<_G*f\L&- *w]"k:)Sw1ȉlGхHu: R9 Uu- bP`)s7+#T߂pFE3'[A)T;(UN>06P Uw_8Tzc1 7`.vBJQE+-} o%H\˛ ]ƿ3Du0=l\A (?@pjkўA" '?%ا{BbobnJ>T{kq0f?[k$8k&܎MeGf{m 슽:Y4%@%z̔;mU<<A7 pC 1ĉNĉ蠃cb(A+N 1LDddėpeY?mI~f· d2Ñ8G&8<<<<<<8G4L B C`'uu뺮뺮ʲ,,kfYY۶u]u]Wֶb]+M}0%I'*w̌ۂ!Ljx<|] DJ*u *jTQˑjoEeJF!@!@!H $qt}߈%ݚDYVh W&4k6~Yec(Wِèn9n?{'B78)ۨvPivXXwQݦPwDuuQ_Ps> +1㖡 v}x6t?IpтӇ$Dpfw2!uNhҡ,7Wֻ+L]Ia L vش;ѷ{'HQzU4bE!gShWoi=*64l챪elTX8FÙހb'mv5 x})J„\<Ԣ8aIRcv!GXi ^PP?Ω* "5g@oOk@ {{Hvr2hP(N2A$3: uMv[6_oX_yI4643)̸DH/GHNi1,jS5[ s\n4kF)qU+x4$\jt;$?`0)AɚуXbA9{>K#ð_z"X!Ff{¡/axKu̍BE*+3 |EW^W m[.Ez.CU_1UN; ~D|aGwnAL1BTXI GXBM vFdzi|/f&]QB:? %^=rBfҕ?P3 7iQ?j; %9N(x_o[wKou}<  $IHaQdmK]BӴ˘,FtFifeYID͌%"a{AV83WD@;XA`ApG :)`nòٛn+~pa@&|Pbs6;&eN!@b]Bgl"F,mD歆llX'oHn3nb{S:ƄP .j>bbCft7z\{C#Y7`26%<'ݖ jobCAu$hM:;B>i0:Fb:«HT\X7cv9-pM||cioa7,G`<s^jAuxu> s0N&ROD\a S4u"5Undi-,z/3ꁍt"8YEbw$رDXFRwıJnCqLiW( btN 7L' -?t& rdx םIX܃o*wCHI闋Rn*J N~* Γ($FDe"]dONAe}melg 'V4AA:q4)f#ÃYOd=F%c_}b9݌&Āʕ!7(("q+v]3_Äz/ ;1,>.}29bQ7qlo심D[YMh% rq)lbY8LMG$ AXaۼUX%폲}&UfoCsYɜ@x*+J DyY{C K5.`NEG5=uF/h8B~2ʊqZ :<|V אTEAfV4S`d8كhQa8MK*|B^767ɨoX&Ȏ>߀thֹH.#ZWc` S&p}(ЪtP4:-h_*OZRɾ[eX/Wkf=Lpb3{`,uQ,4PSg)ވ.Zz.dZ-<@% /ꑥ؂2}bMQK6Bm@3E@  l0@ !B!"IUUUUUUUUUUUUUUUUUUUUUIL$DIL$1 !H$ 1#a0AR*:Ҟ0$IP0B$$#hI iR!4U! JLrDSdjkhJ*sP3uRϲדI*p,y?o=DVLn>D2@ 9V& C2@WNhu@aDpѲw=U՚sd&| -d^ J Z (ܤ뒚{p2nR'ȡ}Qi[j֠рawƛ6JnVw.q+H8wer\pYZQC^ےӴ변Ρ^2ZdV[^d+r.QL+^tt(Y[IuDZ{2 G :Uɸ}~_%letEHWέSx Z% iҲ˖ztM{tq[(s Hj3H{6/7{o%ԗٯǮhhK^הe={JNz1IҢ/W[RrWJR+ΣWPǃˊ%% rQ1mf۶mmf6m۶6333!/>H d!@!XR@cp 2)톸N0ZƇS S)PIG2x&LV 4N9\)L佞 Y.Bv5‹dRP_ֶuP.gÔXJb1H!0q1)fIXψP{b$q"^[kȜDd;w=2c\z L״ g ['D4m۶mA+Ms  B]A@LiX2]#.eYHy ])/tHĚ1u|l ݽ?(Bѐq8o*lZ2p2S>cO a BsS{~l  z|E5cj|_ ]{?(B+2ng4d:AK`_v?һig JXF]V!UK FD vN} d"1F=' q]HjmU5Y.=A<<ϳ><<^~z( G$I$I4M P_W%Bz aC  Ƹ<##d22LFF&d2L5يF!ii$I$Ii$i$ii$)??CccWWUUUU_WUUUUU]ߙL&d2v=CEf #@@   EA$&i:R^8MvE6g8 7)sqQ&BbYMx|tVu*x 5֓P;ˇ|#uTeɼ@u23](Ig P,-̭ϕޝ(, S۰7d:Oagmgכӡѣӣӣxn$0 \>>4B8wEIJ%2=Eğ`#a.%tz 37Nt5yJ3|Hs"^$8wsrg]f΁X\:D,VܞTn*`|Uc٢0/W>=:eue ܢ+VZCV ʦӁl;0N m|:]pep!# L6B0hMQ@?Ѓ z,×8Mу{+ WUIfԃ b&׋TgN̞acܜY*Kȡ߃[JbYEA jL6y?(@̾߉3AOG ]#H`Ԃ4.WuS*!TZ{t\N 8WAIRAͺA'Rd1Caԭn6]r Im8RvT@ 58 &7`6cӻ6̵!0l8l^e6Z6"MCКYR7|5B^;2tFx4? =A: +4:\3Qc'|a &c` eb3Lzw`R+O vu] ri hH!p 2b2QpWY'Sw8||e9B/ ]|FXT .扺xNJiVVe#y'@t|$@^nB L7 h#mďZ5AMh¢sItB;:jCG0K (UD~NFLރsZ;8bk٪7ЩhP>x J PBp? Fl V"ԗrXS~c21[qfx$W e%QJ6s#͉dΖ\]&*׾r6) 9ɱn[|jE69t8N)2[Nx%jck\3E!=B<12˧*]ȩbH>lԍ/Q</zYXBm2%E5`ie7&).+b5 >,b #ouD蟆,0û?xW-Ood: [,ȟTq?;7}0,4n[J+ʇ4}'4]<\`޼ocBYZ<>R9ϭ) tFw^  nd׆EUpqnZ5`aD5Ž&]ӓ#E i7?Fo1ީO] A:3:U';qpq^4(b)4H `Dx}EU, nZD)(}L&D>$:zЩT䃫f`!Xdft×v_ 21Y%8Ȁ: chGo6?-DT{&$'!u/NQs Qs8jt.E.ª DfW, I\x())J83"Q #ؘ%%nda0!x" 6ҊS"Kd CTd $ٹr hìhcPvbAR$m(B"XhpTR ƜpVuRF3T*VGs(t i`sb5qA !M" B $H "$H} >@[c7uAJtM~߃f L<|r@ZoYrc~^MY(C6Up Ȑ*-a(~X) ЀM3]7a4ݺzM@G֫ojVzß m\m*_p kC:dME&J cJkcqeM6`]kﳃUsMbKNa`CځslXB'pY1#m&;8 zJqoE'x'b261ǃ)QOƌ16&'ORpi9ks;3EG)̣:य़`aM-riMd῜QQڗHU&R==c 4dgqZc3YW@U,!@$2Mr1s^zQYMQ„gԃN؉7H!dP"-6D"P6#Rˉ'?^+8Sb-|b *J_SRHAA! 8P> qSo;S-Xѥ┶^&i~gl01fi֡뻢03W Vpcr<ohPV-W8xhds~%Rщin!tti](FOSxxn^&عrokW`©NvrL8fI\8~t%EMQ@Vt%M)tw8/5:ӯLFVDTD DCd!DR#hiҤI@@ęgf GFJ:~Lq̘1ْZ0Ԙ9~&90c"%#bVC)1 F0B#H'H+TF0LQP7Daw6肉ۦ k$[htƉ+n*WF 4)i;F~֠eߕTuE9?'w$z(Ԡ%?noYnj@< jRr/Ҽ:$AͥsLnUe_ գCJ=Y5z2']ItF{܆ׇ9=C!C}-JfRe[ebs^&+4jz?; ĬvM_,$%HR;d ﻍ`bx ̭Z\jh} X\<ZW:Rs(5eKz^%B5TE V#xfO]˱ ~+^R0exƷWDŽXT;6 O;$GhF#lSse}ﴛR:N#.lQ(4((((HvZ1Pbg8 ""Lm*54r7| ̗HNqפϟ 巙G'N*oԬ/ƥ`h-ljoGdN Q'h 2K}mh"+{\'@Ӿ\6[N# 2hEr s>mX0сV鷀INGPES>z(ʵ u"XאZ_p k|.r, v\P'>}Sӧ#?~~~x|> >!=AЈ̐H LA@N BFHXP-b/风X99X|RB/EEx8nU0tw L% ,璞 YFR/n(|G-eB:jN`AYR)"ľR61$³ %)+JbĆC^-(xqɒ0:BS;f;KPXy$B ]x*,}I0hMc3ytWl/;omX}kg8.gXN#iʹc{Ł>\"/qA߻ 8>5;g]xOS- N~!ȶZ*=_}`]ڛmw݁jA4:PJ%$6r9!W񆌔=}j8k4\޷"eG&"4QfL؜;H=oHfĞЅFk5e'mR7-;Uo5)҉mU xx`v7FnpRsr+P%;"\<PSF'1k! 9RKpc|-v"^҂w{/B #R Lۑ(_Hܝ"kh\ITpo/IfL  lJnom۶߶M neJcƈb&Ķ.m۲-mK %!E"BH EJٸpA1TD>,uG!-nMz!:hG[]iGMt tC=vXX+wm.]4Go>r<}䪔v#o w['r lYLz)E}SPkԖo"VF/rQpjϭ$zVX n1r>`!OO$CBp} _ .C . }d+q/`4{ 6~EN~0r`pQܽЀ1줕'"  VR-#n^1 & ^ A pfT{/Pa.F|i+,jT}JkcMD9q!= /4ՠ 鴸=6ԕj| I`;,R;&9s[{i@ |A`@Ă$P(h  ,ĀiC m[m۶m۶m۶mۖlْ%I$[m۶m۶m˖$rsn۶m۶m۶mssss9ss۶m۶m۶mnsSR0 ۆaaaaaa 0löm6 0 0 0 mۆ$$I$I$# Irm#7$I$I$ɑ۶qA$q$88888H$G"q#H$#888#q#$۶$۶m۶m AAA-hA-tMPڶMPԶm۶mm۶m6Am%MP6 ,ˮʲ,˲,˲,˲,˲,[v]ו]u]u]ue۲,˲,]5i(C  IrInܶH$I$I$GnƵM$H$qqqqqq$8qĉ'>N|8{~w6w7www77ww67v6{M]MzݏZkZ\k-ŵZkZk-u{{{_{{~گKjjjjjj@ @ Ԩ ( TB  I$I$I$I72mF$I$I$9r6ds$9s9s9sN$'INr$IIs9sNrNr`PPPĠ 1((AAAAAAAAAAAA bPe! ޶-((((((((((((((((((((c0A A!("bql۶m۶m۶ Im۶m|""mD"H$DmMm6hD"H$Dm qmqqqqqq۶qqmqqqqq{/˲{{{eYeYe{e((mӊ.0*ZkqZku$-#]6 `` "܁1l00Ps {$e.f'XTj"2 GhhOp["rHXޛ3^H4%XN' bYZDacO6<晦ޱ:l1K2r2S'qDu+8٠`HB(.cN`d #=+PybsnݴŲEhj !rɻozө u(%>;&:>ډ]{n7[ -KK:kn+^VQV6.+a6կKf@~ ʥ[~̺W*ӥz)cFc"q生ޏޓ@t"bl$P/x*۪c$bxs7]?q$ā**M^G!sM(%F) y *f]Do۠N '5z0Fʲ=?˪ cr(wjȾγJAJr;%ȑKaP!+YE=5 8Ya )q#&9>9S;_1Ivs_t=-V Wa[a{ėZJ,حGu=>,Nyȉ$dGuss zpQ15>Q*І50x<#:}ԚWu4E[M9s3iH+Ҧ[#S1mM+Y߯ `pm A|1I©oU {bڙZEь`%wk b.O=8ou! 6'ź ^XAvt~eE]hғ}U7QLmFs.S/Z-'| ]b> Vd75duv ՝仢!_"^ٵ9*^a<Љ :m@+y"9(=zWi>&fTH-kꆃYniv},iNU*7kShtb<ؙN5#;8)&k{wtG&(N^c\Hoӽ5ߧf>UfUR;T7K@%u_"7܎5~^ؼ (|ͻзy ŕV}(:/eAwaL h]PZLkn$xG&"iV)doSKGpRA 3)QEĀr!#̀g{/0JYQJ`~3jlaw=$wlߐ(~ G %a}㴈_^~bv H:$TUf.#ÆjxW"L|oB&{) )vZpp3`eё*Dpəu:i&*-m4l}!ٜn{c՜HJL`BGऎ%r%{ =)R)2)vH~NmOB !/19Vg&Hs1J Z+ ا"_2OrĬ٬k%8O0 2s@1iL⨑_Ol=<~kM){hny(\y[3Iw@A(M(oZ0J97>}՝rNyV5ǒDcxGpy{$aho~HnŜqp~bA:.oڨJԝh|GzysjC _ʼnH\j΅jwpQfôxX ק/zͮհlLL1&QܗJlgY߳D+Կe/.^5% " 82=*0{<XܟH)N!wqBcƧUHrNǃ|qvC?eGt?}륿ԣph')tGXC5VZ(OB!2dHpI|2q\i)PBS Sb~d B[b֊'pJؘ ̘c;.q xsAR=HzwN69\5>g]BTe _M :/J{.+PoK_0nu EuJ_4㤁6!O9}y4ǡ7]XGv!`!*{g%h4ey-˰ nÀ #he y֣ U^W.1נ s1;uWhɽAHDd"p?~+} dOZtM YҾ1 *󧊶Vc*'GL.Ӧ}r9z+ e ~3z$ hJj$aQO8·AVMuQg]Tw.#گ=osM]F>^1=>Q 6FEh9t*d׳*`=iN, щ`PL'eYd` 4rd1. 򗘾#twSbC, 6*;rªx DWWB6~Rv+qk в[ǾtghYu.n>gCqk]nc5?5Bԁ"jMEZsg->ʁiC4 8fYeYeY6f٘@ 몪kȬ0ЕGv0!4JSȩrp1s0T24A͌Ԑg!*1VQ"(JY4 )+!3װj`'eSo+V#U#aQ׫#!X·cy J&7&\B jZZkZj@rαX,s9s,˱Xαb9ɬ/I #!*"+P !b=;`J/y]ŇPxp z5L~dA3 B R-m򈎂=/ )F$\4pxX\䷗uT!bĘ=3௮ / %m9.Y'!-/m~۶\ I.$ɬHqjF HjFm((IB!y4B$v!PÑ0ay"2,BHf ـ(B/!ipB mHKMbǝ!Ąm;@I^`bFNRHdAKH98H(B(0 tbJ5B)5u?Iz%^;9a! J$i"9:&Lly.9AYJk?BjήBӖ! keQTKOseFAZT'MlMi_TX&N6H=~t^sIŐ/j!>t7#wN6Hp!7O&3j0 x xF՘ 1璚EbԦ-9!v`WNӋ),d+v 8N~{g[Uaӕ8nugU?Xk6ȿ'd[T2 _ʰ{8S}zUN,'(rNSnW0m[?LTfNjJFDLUFe?y ж*܎Z謠!/`qCGBx(BE xņ8Q[(? 84zr\.|!\n_EcT AQ lF=K.pG{"ƺt4i+3t7g3'``r6:v#2^J'gMjQqZʜܶO}5v- ts3Cu:S DŎ&+2ZltIgQ 21Gҡ#{AZǪU&Jde4اLM%D)6M54'KL3([E 1© xuI_Rٷ r@IU.ʶxՅͯ۝ f<~ԕL8{{MԈ,IZDDm?*`h[ ;:5N U$B3 i3N<Pԃoc!2>$ſswms퓙C&~iӯB=3m JYbϓLLBӫhЂ]eFYQdj2/`#L"R- =z3}H-X:|qie[&^"7=$1aN$IV9p8Ox]m9^ :UDE?Ɉyp?G%-#"I/2pMQD%#ovL_,){q=UTyx`:)h)A-,E1pWS4tV"g פyfۂNDY "oy +d1 ~i 2¿!GK(piKD39q>gtnc1[0M'U-Џ!E*Ǽh"3W7.c'{*' H,Z6$ ay\^Μ,?=$@abv[q[X&85YeYeYEҶm[MP@;N<8mh4MI @r!$ɁG$IgLB/VTTT/Z!p+^0 2<0؄M ZI$Iii۶m۶mm۶m$ɁI$I$ImJ*"$$I$I4I4M4M4M4M4-$ )8I #BL4 GiDXS Z-WFkW((=>:y2J^i%G纎> !Sg kI^+΃MX6_C^ sAԫNJGp ̕#C7FaI/i\=J |3H| >C@xqT,mzYcĈLzz~w7f+۹II@CɦGQxeS'$ټOU|D_s ᥬ1]P]R?(D%!h5e3VTKQr+6)<2:N7JIHwU;冾AFh;B$h ߂5xMW JxK`UK#Ń(N-`vj78(r8}+;.[OG!<7-P\F>s$ó i*D'AYQrByΖn(@,&r ^{dA7\$\`T)<KFȆUE8L0nrbA֓SR7d4-wF2\ov:;# r)=2@2EU#> 1E qLKRiԟ̥2^h)( LOzwBL[JIᦍiX+U v g8@Dl ߚfdGH) UUUUU!UUUUB P3D0(BF!$/X` !܉Kc(ÄCo"/qcYpZDwj{7ƈGb7Ezס卫0(gvjnÚWWɔqsc /pڠ‰;U7?R WRY 8DI>1+;R䙂19E )[dևAMU V %H2,h _U-"""""""""""""*~DP I$I$I$I$QQ+e?8B@A X!x " Xȫ3",i6,lq^KRqg3/p!ٰ`t"ucq(BeU2(3wiN4 Ԗ9y(jRWIzf@xo镜Q<F1K*a0DJ2V-|wt  g8)ԤU UUUUUUUU$ UUUUUUUUUUM_U-HA*! iHPѫlB@!H>" 99WD=j;VB drš\'<SvdH'Za]\Ҳ rqOFylqL:בlOϫD @vAI23{2YXHԎ/q6$X^5su%D/RInO\W\ʕ{2c{Β|3X">)ՖH Yy &Ti )8% I$I$I$I$? UUUUUUWUUUUUUUUUUUUUUUUUU^UPA+U8B !P>" Q[״ù䯅 xC(J_7Fpi0wqO0@mqfZ!r6|Z]H;8$ x 9 @} *KR͹Z7tbJI^  s5fѹA^ '`g)ȵ-Z 2E QA/7o2[4F( 5i3!3UUUUUUUUUUUUUUUUU%##L05RJ)RJ)eURVU2`fff1B_!FA#ƈ1Èa0WqB0!HABYlZsN^n%Ry8;?kv< ZK#S_gzFFc\Q*( A4 ?h U߃YMZK\yp2{!O+}`j$ wD4u0W*^pT1ϓ~>5rtsE &lȼbpH *e:ٮg4l sl1A ;c 2S>ścF|&Y ALO6$}闏8!1FKLXR`3gyȀ!!ɏL1"UeW.!xQ7߳S_+3҂tx5;F]嘠xq+ND%k' %i]DDA!2d2`c0B!A!d2?2F_F`$K)UUUUUUUUUUUUWWUUU/1?80̘1 #p(3#0#0 A 0eJDJPs! 4몌+-g x3Ea_E ̽vTt=>p-Ü$$Ha<(4C4GI¯nq1zQv$!_OnA 9zOvohNE}qRVMx1|26 ' I>I8 c>`< rjbn>Ͱ;IB"Q$nʑ:BI#4By*ْq8NI!Q|.ϋ5) 1͢ˤ|2mF$!_'rAQc]V~'cH4.E}9(Gh<@D2 zI 47IB"Q$n#4BI#98Hgր>PYeUc'G}9 (G`x r1b~m>`L$lP dK C@?$hhBBB8WG4?i_Laaw?۶S`(3p#@    0 kxIw 7('5!ؕ, k:p^EN%v9 ]9[=КR=lzx<; ?N%;~(PԄoK]2gYbN5Poü9 b_oPNjB=Yk'5ta ž1J,n# UMvN =\5S=lq < Kb9QÆX;.9"QrҖ$gYbСކy*2 {'4灌<`6n *sz;̫T4(ns~@yRR뒜eqɊu9С UP*tDQbw9@Q, )斺IβXŜjކy)r(XMl4 F~r&K[e&9C P*4Qbs@@9 lեIβX4Сކd(t(;PԄ(&QuĜjPoy: b=~m)PN_(ə,d1tnüJNaw( )^gYbN5tކy1 (;~.o(χcwo{Trkާz؈ w"4 Kbw|q@Q@9 )֮dβXŜСކy*ʾw3j 5do-w۶m۶m۶m۶m۶m۶mB!m۶m۶m۶m۶m۶m۶m۶m۶m۶ضm۶m۶m۶m۶m۶m۶m۶m۶  m۶m۶m۶m۶m۶m۶m۶m!A$ٖd۶m۶m۶mI$۶m۶mK$K"H$I$I$I$I$I$I$I$I$I@"I$I$I$I$I$I۶m۶m۶m۶m۶m۶m۶-ƶm۶m۶m۶m۶m۶m۶m۶m۶m۶8m۶m۶m۶m۶m۶m۶m۶m۶m۶EDDl۶m۶m۶m۶m۶m۶m۶m۶m۶m m۶m۶m۶m۶m۶m۶m۶m۶m۶!b۶m۶m۶m۶m۶m۶m۶m۶m۶m۶m۶M 1ضm۶m۶m۶m۶m۶m۶m۶m۶m۶m۶m۶m۶m۶m۶m۶m۶m۶m0c۶m۶m۶m۶m۶m۶m۶m۶m۶m0m۶m۶m۶m۶m۶m۶m۶m۶m۶5m۶m۶۶m۶m۶m۶m۶m۶m۶m۶mۂ!a۶m۶m۶m۶m۶m۶m۶m۶m۶m;۶m۶m۶m۶m۶m۶m۶m۶m۶m222m۶m۶m۶m۶m۶m۶m۶m۶m۶ cضm۶m۶m۶m۶m۶m۶m۶m۶m6cl۶m `ïa p+$$AA5K&tyYϙ+j'4t]v^d˄pUT4hXE]&&,t[lq>DŽaSR4h9ZѶ9Dz\ R9F#_pEӊȏ͡Vؖ ϱBf (24fV섦~$=t¾ΰ=j&/Om@ѡ4C5;NnaG*K ۃeBfN3Nt~jk.6S^f}hˆ/68qtE'hC[sѽ lAF~^Rɢ93u>.\Emq׋sLE~WͰ=h}7v8԰4B'lo4"0΄I҉ Ӝm͋UqE4)n!dCqwcO>7#n֋ڌmc0e (5f m|EImC!|ljAW3O+rB#? &13A2ˋ ?F7 v!HfI#)KiH6umudxRx0Qc#hG7.˝0qF(f`x$DFޣlާeSGhl]N|h$r`zmE\EoәIr&W_ pQNH}O Q2vf,q>@S>}'\YN>CəyEx"g)5 "cA(Q>.11{FL8 cxPF ekʹ`` f]-m6DI# 3L ,!qN Ha*I@Eez4Q~Y@9Ӝ;|j)Pfu0RNě&( 1[&3$%$/&&eeedddSS/4m?A4AY2{od8A`@A  TFa)Nđ*~4$٪#AYOqzw MVˊɐ:dت#AY qz7NVˊ,=oWUnhْ͑Q Ί8= 7&eXԞokneb<ؽs;~ Ί8>/˦X-Ԟoշ%я"'"I5Y>Z}]۹dc<<48kI$,;UVj\_ATߎv.~  gx>Iqc@J']S'ܓK u6!jp;D?8Ke3U%%f~Lo.݅۹dc<<48SI$6UVj\_ATv.~  v\.ں W:Yb۶m۶m۶m۶m$m۶m۶m۶m۶m۶m۶m۶m۶m۶mm۶m۶m۶m۶m۶m۶m۶m۶m۶IU۶m۶m۶m6I۶m۶m۶m۶m۶m۶m۶m۶m۶m@ UUUUUUUUUUUUUUUUUUU$O$mom۶#"""`\m۶m۶m۶m۶m۶m۶m۶m۶m&i۶m۶m۶m۶m۶m۶m۶m۶m۶mm۶m۶m۶m۶mOI$I'nCs(I1pS@ 3 3X6wB ގ {atP7ضvL(raBS"@tgqBs̄"V:->1 jxJ{XЅvdxc^sxn:+&&$ء=640%33pX2Dn.>1G~{HJ2VCF] 93QM(CODԶ;,LD23;Y23E(GO;oIо ( ml;sdp5R$ҀCef#sY!&1`#s&th@xH +V%kV"LBHNŀLyn3)DfńqZآ˷kqg@Q%)gtK$| " '2%y1>9p;W[IŽ=ح##9TzmR:9Ptqd(žfzjX5;Z;LiBzmf=`ܔ&94"|ԨM qَ2J3drfMX J?oж vX 7L! x_JfL(a%T"6At'AkîH0dX|y$^ ]g] rRY-,h.Mm_R:X䃬M{3A胫}c$PòC#Dџc$hW"g6c<1ɺl օꉽYq· KYşg9kc(BsY쪔/p.s90O eH`]؛ VeUQd 2'2!?7J[kxA A!86C\ N..klhH)(i tᢰ.`9XKpHE!cȬj#Q|y`e( ,Xn8:8RQ2(.6[APT03T?r;w\K,R^OވhNuϸ}/v{}ݠY{b?!1#743(gOs߹osQ)F߂_Y+DwJٗnkwڗ{>TO.okRb:ϸ}v{}ǾݠY=7! vUU"Nj#F~VZ*jWuj'n `C$ Gr1䈙3CsVPХUH R Uڪ(F mV srhǷZoK#O qgk7@CbMێ3,$}-rCgxD=\.(jDwҼ oKOɁ6!%K@ڄNOA%ڶiFn6u:u:Uc|b0H$D"H$D"hrA nS!7ƍ7 $q.2ppE`Hq88Ψ`zJXH0$II D0H$CP M1@0vʮ3B(,dNp6=ⱳѰ`)@|kJTn`߄CdoȄc =ru*ξ%vԐFT"9m,²n4/Dnr9ҠwKLDC7`k|>j W\ ($SbMIE .}%5'Dc:prތ52sZn䓪BvPN' xv}y3`FnDͽ7Wd2opv< @znzJ9?<m۶mK$ٶm۶m[$Y4ݶim۶m۶m۶m[im۶m4M4]+J(J(J(C:t萀:tСC:lےl۶m۶mH$I$I$I[lF$I$mI:tH(:PB %PB %PB %PB %P`<hF۶߶m۶$1+J=8A``B0 E%Ҙ L/WmY~`R@ɲ}U!!`ly{ȋTBLlpwG)ygCUZ`:1j}jcK_#|"_,Ean ?l(W z !)J_ʯ+^o}rϹcg݃sc\6~" >hVMq8Tܝ< ||PB_gJzVgc7#9P?H GV(8ݞ<{a!/@;"hb?G SdaڶYeYeYeYeY۶*""b*""""""""""""DDDDDDDD%J(QD%J(QD((((((QD%J(QD%J(QD%J(QD%J(QDEp$I$_8H6Taa* 0 0 0 0 0 0 aaaa*TPB *TPB *TPBeY_eYeY_ u `A{~ Bۜsms9s9s9sf9ms9s9m\#"⏈?f 1 3888888q 0 0qqqq0 0ÈmDm۶m۶m۶m#"""ڶm۶mm6m۶mm۶m{*˲,,˲,˲,˲,˲,˲,˲,ceYӚִ5iMkZӚִ5iM+ZъVhM+Z!B.p \@.p <__ _|>|>e-/////|>|>ږ\ FFPH l`6 l`6x Tm#m۶m۶m۶mۈm۶mFDDD${{[[[[[[[o[o[ m{{޶m{{޶m۶m۶,;˲,۶m޶m۶m۶,;;;;˶C*tUU]]]UWWUU]UUUUUU]]]]]:tP( UUUUUUUUUUU :tP:]DhE+ZӚִ5hE+ZӚִ5iMkZӚִ5iMkZӚִ5iMkZӚִ5iE+ZъVhMkZͨs@l?#/H IRp EkeȂTT]LbEQ 𡔺ԖP/(|r;?W 9 3D#RI+A  ">hGKPmz+5_yO+-FG 7JnS# 3܄b={<-U5;5%ՠ]9ӣtoђJ hIwl`#&؉5[](jFĚY$DMqM.R^Y/PkhQXnL@ҝEyf&-X/r҅:_} -ep4wH#ibe&Su, v=_$v8ih&8dz{$ ID >$(O2 mL&t$ݬz^Tf`2)XSkZƾ36z $ Ro`VaE D@lo$ýcj"=sBB3gT"x?q~pndp' r-: `+!"Ց *1aup2e _~jTδ2Qy"D btF}L+5QxZ-+K,ٕ vp=j\ ,u29TPUuH~],w:XƇn:KZ66Zs0: C뒒Opu=ދ:%e"߃= PRL?}:rsGZ;sa^C Á(^6|Dr *";0ԋ1k&=Fm)sI_ TqiMR!e|(h}GjN- W:,;' " ??DM4DM4DM4DM4fl6(H$??~B!Ctwwws!jk|u^#TI^ AN ISEԓPI,$%X&|7QK7_ZWU9ʂ@.+ըPTɯDbĺ vxnX@qiЁ|42ɿ cƺ xK=W(DJaʩ4{4_ c}:/|p ЀJJʩ4{cai@ƴ}:/|~0DV|=Ah+uu ^ ʭ+a J X -VBCRA P@P@mc{q881cqq{qq1c:Nt:Nt:Z&X`33333333333hРA 4hРAP@ 4hРAAAAA!3333333333333333333334X:4:Nt:Nt:Nt:NiZ3333333333333a6a677777777777777777a6a6a6as6As!?kpwEPkVgţ뵿[;78R_jW]+%Fl8kY6R-5|ןTLlM]s< ە]~̀%w2Š: K6ΔfgT!ܪ^R}}0K߬F~6᫿}*{CC- ބäLHZ8q5EBnStzX`  (x4')9^as58<"&K|eL-zAx Х1rd)Q^L,rL&r\.r\.r\.rL&d2\.r\.r\&d2\&%AAAAAZhA 4hРA 4hРA 4hРA 4hРACCCCCCCC 4hРA 4hРA ccccccccccct:N%Z h5Fh4Fh4Fh4FZkh4Fh4FaÆ 6lذ{{7lذaÆ {JN NXt:Ns!?k8A@AA`!MF6h bZN_7;1CYI>PAKM:^0.BAɲkfO`gbZ2X<.~e9ky${硬~Fp&%?bl@Hլ#d\csWYI}Jdq6EqB3YbBa?M ^Fpoc9$-T8E׍2&\ %iOvmGWfolllllllAnnA뺮뺮뺮뺮뺮뺮n! YB,d!SL2eʔ) YB,d! `!YB2eʔ)SLYȔ7x}ccc}}}}}}YdEYdEYdEYdffffffffEYdEYdXYdEf)S dAB2eʔ,d! YB,d! YB,d! YdЂ`K$I$I$I$I$I$I$I%I$I$I$I_EDDYdffFYdEYdEYdEYdt! L?+P18A@AAA@>BpH84Ds?![=Qy.ƆRaco5zɧ#C/`e>MrnE+즵eY>Y\aeh=@K&{@d~.('S)c6&U{jLڄ1#.YBKF(J$3SIV\LZ`_F١*m*#4E'wT\\PD;iG ,VrR.lw!eYqR$Ht"JX ^#I2snqj;EQiiiiiii4MSEQE4M4M4M4MEQETEBhmնm۶m۶mۈoD"Vm۪$I$I$I ۶Uն$I$I$I$I۶m۶m$I$Iܶnݺu۸eYe,˲fY,˲f͚eYeYeY5˲ EDDeYeYeYeYDDDdIORI$9E۬**"Ѧ$'d۶I䌈m۶m۶m۶m۶m۶j۶m۶m۶m۶DDDDDDDDDDDDDDDDDDDDDDqۭFܶ$I$9s9s9s9s9s9s9s9s9X`>*e* k:*XGDU !*4nouo * * * 7BvrWBP@H@ @AN QG24㟀boqIL-w m==;,;B̑!M$(F)Ir*-V\tFMk{@Jk5n!({:kȚ2v* C2 D%0\Bp ,ky7',۶m۶m۶m۶m۶m6Cl۶m۶m۶m۶m۶m۶m۶m۶m۶mŢRJ11m۶A "?12eʔ)Sfafafafaf2eʔ)S0e&IK)7"WxA<A4`"9:Ksu\l5G]:Gvֆ鮬G|: !SnyUr5wDz*dW.vчҜT|SqT8hP c.hkLZUR)kDlUӹFzd<Մhbȩsݤ2zStDTE+U(H,JX pW5AcgW!r|椤7:Q%DVlJhL@JB{莰 ap@mvDdռҹF(l4xbȩvU$zӕTTEQѼfu5*bȔwVՑT]QU#4'MhZ%7*!^1f*B{.X Nʍm!Cdt܍M%!^X2*hvۙPFJ.im׍Tsmzj; IO1 --0{W^y(GJOnC虐qP=uu-`6Y_)vT/ȣ+zpE@((rd(. Z mU|5*.۶~۶(11QPTTLLLTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTLL s9s!B9s9s9B9 lm۶m۶m۶m۶iZo۶m۶Mk˲Bf9s9s9'9GFF!, ęyIr<N8"ȭJEMby*dD"E^ǜ$g|5ϥC_ 5D۵5J틏$rqrW~~3阆eNQkՌf" dݸBA6zCPȑ}T2YF$bάJ8>6+(Q,aIZ*P-qTU%~+"徤oTsf <}X!R*R.EYȔ?cN>Ea묜N1gV%yIyV.R]t  %_\?bAC 1?UUUUUUUUUUUUUUUUU%!k ! QSٴ_* 1, =$!x!*J,G Y!  YYf1.& X PqtWb9ZNXe HHgadj7q'$_bC\Bv*D"A2F> q'ezdBX1cJoljn2.,C"+`Fey:s$9enN=chwH a^!V]2< jbnH2O$1;{JMX iGN\Jxw\KJr -`43S#AX̍[i S{J-l],)SuebV-'365O))GEEuUUU TTTbbbgPudgg2uu8p]lBwnnNƤ$$hP '''R$J"HLEĵf+ @ٝ`N63[Ve ع&Sّ]8C=tGW"ǫxUb*"+S 2QF-,L'zS@T+OU\mMڌ~w*ugᾰ.r D@hڷ`ZP<0p8r Qv|~5Qvai=XWUE၃SG5KI=*x_3<^-{`ߣ(@1X68dIQ} 7MLztZJuhk q_.}y \b4M+nZ<0a:Tf>[9 Z">֒gk&THImFY榎Vn@At{ `:7ʘ#RĠXED2B/K.0vA*e 0^zEp^7f4βL)ɣh Yf$f;#ns3ccmC7;4dHYj2[M[ui%Fn=H`4fqW8qf]z(Bq_X/m9%\@:0Zˈ<0qbŨ)Ek҉57 g\1LTb6Zd _m2WjdK"]cĄ[ig.R`9ȚN( -}^x'_U6ek$^6ԙG9|4z1Ĕ&&.kqBbT$do@2U)(p1gbA$1G>׃1/EvEq3d(,F2}%Xx|'@mxu0;/EjTS@z=3_X "6.fs"-ɧ 4a "%`+0\sY3.q1LK?Ss$+q635Ed0\d=4tV8m`07rOⴎ|jAW 6B4ЬYxg| xg\ #ftZwH]VXV  0F(,Aݨ^zի)pՊJZB]S<@LP$$II$I~$Il-dݍ9**+rW^yW;rj@Pp8R8qbL55#1$ZP$8 8 8 8 yeY`h$@2шx/},rSԖX*4ε!8Ze9+C Bs²iYeYeYeYeyuY*|>7MR|4kz\'.m`p,mx8b00ġ8}Smkav76A@~N|2Q+8A@ O00HAx|8םYPz e apaҍonL:(DPPPoщF LtCӓ,@>MKt()P$袣8Qq[?‘|0z?COշ1*f=}meJਥCƞK){` jGe&C5/5@IϞFm/QRKG%?zrlPޅe0^LD}౨:Q\zƿa>RǧL|OHI) ;Ο<Ǭ![)dQw(k_4/t!-?׈FzƑff(Y!Ud(B%|CC숌ӼC(8wF&=j=i l+=j .zuՓ[ts~u/: RDyܹ@EZoIGzzzr,Atncn=a |گ9MlMt`hQnR.֎zbI%6]xFt0rt8N!pji"Xͯ(::N"GV &G>8j;t{ 3B<=inc0`R>huQF᣾Ǡ1м4W(sE-0Qw<Q1K}2*ǣb<USvԭ'ȉoA#ottc? s@$"q<=;GyUQJRUWUUU?4#Ou$8F"*"88x]B"D$$RAGPcht*`c!#$`Ȍ?0Y?%;NǑ$ٶtڶm@ +"!rg䌜3rF"q9q888^DD88$NY:Y:NgT*tIlmg$N6}O(12A B 8A$ rB Lx%`hmz:y?_@6].! ];8B5-D|!G'(kΈNE‘~izHRU_T~!DXc_jW ww#QH8eam;bfŽ V^u‘r@LT(NCJF`+e_QP5 (k&쨴;*gzkC|EPKQ#R#[/6S^gi^TLx! 2_Sf|3lFXD$rJLwz ud31NYf3`h:| n< 8*4S7#1BRxjq눵t3୺& X.ب LsO[ *Q-i=9y JkL~DzTS+sŨ |@Uz0D 'RsD/܀F3F݁tYQQ~CD~pCB>pUхu'Rjv:QvN@i8@>*Ͻ f A]D:wtaG: .:pswEGUq.a8EUq Hܨ?lM!R&M: Z# i-eJ/#}hiCXzvڟj>񽤲h>\֝gx&OmYum^/UcY2Ya6P,YPqX< jv&q,TEjZQK" ~*)B(!!Ec  A"8BD JŦDW,L}CuxzTkE܁AcOcN'I@ Ycӕ9JXv'VL^ngv/1]8nX]N]:4 ]Xƹ[:!5ú="zjB\\b<l#,7a|ѐ@ mGQx̖vM-.?Dk1Fa^eoZ*(.G͢]^%,'pg&E$D^ώiYr:}# @H 8Gꪒ6L*zE/lPЀJ#W¢2ɏ)ř's+jP)[u4%i@gZT$>(ġNtsX*#j?{0]nDSнZ #V謌Z~ ы b5=#raH(9(= $JHNPÁ,"&Mmwr7d,lxpmyY?  ٸ˙\9EU  3ܕ*9]t) HڄBTD~xоM|pe d023PSύot0'KɗeUΫ+h1qd#J Fw!!=OfN}ruO|z6҃C*Epz '(TXu:C$ȞtBI!9bp׭WopImgvߑiSOg=[`t' \_@TAC GE :YBxҪ϶DnH1PvYE=o~<3΃~GL'WKq d_eED V1 (q ȯ& PDk ی>h6ajzbTV}݅9ܷ?q fDRLWak\# v"}%$DYobj5ƻg~&CD9(` VM #:1+t3z_!248cr@X#B+1f[ZBX*jթdžIr=cB錄JtQD9r8 ]h鐣4 W ; GM"2ֹI7H$$ӆA K!U0*q bulʻh_fLyBNdpP)o >V| K?S,:og -У|]0fc&@V1KZX+n\q9ʴW͈Hm~ץق!b&,-)Q b XM$ A 3f< ;rPx??~ 3;"{tc3̐;Ш!yr!1"dJ)kB0 D" D  A p!("b pݵ8ˢ#[/Kxs7j'GŻ2b+K23'FI^ 7.zm\B& <6d)cA-\bYf =Ec>X$]xNeTKΉXN/De$372o{*,V,YB@II Xȅm3j %P CfF?Mh67cPov?G1~Vh!dFy I%Ε'IYޕ8T:c #g>0Y]$+zu>TQRΦ Rq:/u\А"+Sr Vt€:gqPAunn$ĩ GH \Z6&hߩqƳ LXU38=D\?e3ZKc@l1(8HOē-(λF{}[ڂp\[.~$O;, 9hbl4Y@^|>i„K /72cϝHr'vr-\$[!\Yf% ն# %܅ܹ%PgqB׵p~Ym`7JO cFsz)(AL36H:4ye"7/GjYufaѻVH/I>rs_f }*f5nSz֡]_KfPLGہ$ptZCSa-pD=R | "^ i}ST2MC񲰳Lizwba3)9 psxye>=~gFWMa-lmZڹkV bLP,ζKp\zG g\.RW%w4M5 ܻť+DLُ*5n莉~5~zj=}f69  !n1y ˺PqB}BYd 4;?3(BT?9 WbF`,s3Q2#¯ Toi"1 Ta%XM·Pq%A gUdUDPJ-oOG-eA݆MfNd黌"\,tRaLP}E]q)CFkQ_Oc+KADX) yM%wC$˺C,]f wGr̒08ӭht3T -Բ2ຎFΒE;ļ)9wnh_)s\(Xˆ-M]t?A,.ENYv0t'6R rKRQ[BRKSRZBв1JX,0+#CXkd%zH @Hٲe/X<~yxwՈ(Gk `^VHBR(!8n&Y 9_'d!3B(JDD DY-("IDb+6!(DQG3*ZQ:XkUZUba]pL*Y0*rmTIUTtCP T,AKEfXgλG#8ܷ폃 8os=\މ*柉`[Ff6ym%M-۲֞`Vm6mq-`R=IlXp-``?25`W}+]Օ\ƥ!m[.OJAL?UOVզ}枅 4?lB@g ~Ica5@``թOlO|Ŷ(o+&6kn[svNm@X(:-"Yh1wi!4+?t@׃M# :ӈZV:RKQ%m\"<H-M^F%ZKqR0P"4y%|l\estKh,nAd:%h.ov)<(.]r< B:4Tq(˖_r1KK(A,$]ӷ9Mc9r rKKPZ:4Ctej]:!vix;K8] .WE,!7X*"DS\5AFbDd%les%L,P@ [󹀌%r K$T4q70\ڎw&9& K leȠos e l@- ot.[(~L$H,Dt.Gp: (.!\ #6IHDMw(wr.FnueR[n50a<]&ʮ+PZ ]EEs4rF]ve۱ 2K+bLtY+q,Kca :&eQT˩x,UǁR]b*`¨8u4oT,Y'ct\JSi;*/@D SV<&J둨XX2K_"eɺ!e#¦)5` (K䮈 VY\[!=KZB*"![BjQ-(^ŊPgP*nS~2E՘D#EգfjGTTԍTIڣ!#޽`#w̭g^}x׉ #z@G8Nbԗ.-:W'Jz'b!c2OHmN ^yK+E<0\E77 oD|╇Abs; _fe> >k7|`N8Mun (A7@(t!] @yg^tb]г:{pNe% 饲)F=@c}Ycl6. Ǩq`Z4$2B$B B! mKGPB2\A\N6S6~mR2LˈnSٿE5Pb!rvfĦfffj锳cOu*JC2nO'i'Lf̩ %%n3ʠ%23cavtOɞ <0#ZvV@]kbI)teor`i/yƬ*|0!wv̂& gfƞwIR2Rf&*9a\3;SI! z59Իy̤JN bXfܘ~ 8iy=DKiNT 2IӃA*R,vY:`s+Ӝ֘+rA3J_G8M2)ZZ v`f ·ٽL,uh'fA' EfiNlwr6Qf1#8䉛 ifJIlffltq( *3eSӜ鴠GOj9+m 5d'ڜeaеW)ί rʹP'?o2fNwspI(}'\\Eqi<'xB>]v!*]iCQ:ylWEQ* fp`)ߗ(jB|EW=$FO!#E^s%5,; tjgL* v2*> Du }أLtMr"8+Gڨw)PNq]Kʹ̖\BpfI4P;%TgDB3K̖\BpfI4P;%Ԏ%Ѷ̒m9t8\%ebseJt/ZCaNY"xC!dw#U򾚷2J6gSʉ5׊ǰL;rAX%$Imgqv >8aCe Y.0X?D+/x ~"|iRի'gvDDFI;Gn6d33F0-L}O˪LyĆ>KLB?{s.w^o_?Ϯn[|33+jDusۻ{oSH q34vLC)C45`)FA- 5yeBun@'v]F>itsE&]x"cݥADم\HWbdśU8 [0~O[]F7w +uᏭam&EC ^zHPR-2A.*:)GDIL\!2ycB>;XU) u2B((B52 A!hAaGTkd<Դ@I4v_R0B̞a()~aF8gx6qq3#Ҩ.0&:ʩ]ѡԭo!⮃; '~nܸrD6sT5W'ROl~7ߧ#/@wKbPI) `{> 5D& g9qԕVun;2 ,Pi7sNnbCE 26U$+ BR2r7s T1hFBei-/?Rpy",BuCŷE'F@5 ela\'dq,T+&3^-L,N1k5$Y4w$86꣘W&qj.bpOd D.A.Ѓ@*d-jI398 A!nֹjsr:Ufic6TdAB3D2 MO"`mVF2X .TbI3L:EԲI 9ݐjݳ<ffSeriD(tB|Pc$4V 4*Fa3PTDfYzN ::k\ EDܔ$bgf!<A R{ A=t6(uFŷ\mlxO7AyE|Tf{[OlCV\z.EmQ2 np"xuڕ /@ ݗ :ERQF$:>a{&H.L6Fv* cA!!s o%- lf̋SO&܅ARz:[7 NC:K5V~I61tA@&L3d=$jK)^YaGI>|䋙'HS3b$E g0.Gqք0c6g1h&  ^ aOəK)#8Ddyr/\!h{ͦ)*A!9'!E" !@"HA!@ aaJ Snъ"a)_~w\1s#f~mI{pS;]bb?~N&v{ T/;wkrZ0a빍)9':d-2i4 hpMMD_k6vA ۴y]:Vo\;@*mumlx(x:[X O*bĬLG@-!pq|D/a,:Px$Ppg.#XL!L Cef5SNc(0VxHJ?QћzwDDb:`eASiGf3,y4"١1?% fCnJ$@/MN@"6У=A;\QڅloFL̘(˸Kb9Af4֙b(VySfqZ;.'x|>/ HDɄ'wi=g$p-o A ɻơ_dike벇m?EtgA^Ԉ, Ia|!͟A?vZ37&oYeO|fStuh:jF 1z߁>2eh#3l|s"d}OaI~\9\b{{AvBZ7fV6>\^f가Gp-?yńVpp L9 rĤT&]Kauڷg[/ 45Kh,άlJ;EaT>J`"\)g&0Z(Kaɇ=V˄'uAnL1,T9 ,2=ZiW2.NGG@TnO%qŧV@R>8cZ#ա iݳB%1(C>x&7NIHC0#8v%,V+P?P,N̐g43,B#LV)ri%snSodL˜Iʾck!&N:r@D*ނF6\On$򇷅+}\hQޯ/ȾX:J5NwO|M4RGQ){ϬۃP} ^W?rX\%L,HH24 {T?;1sM%V*X{|񻲏*yF\rfy7AN d?GLr8r2V$+~NH\U_IUd"pcX^&ys8c=P}}ΕTN=X:O17riΝB6EfZH$t"!& 0r a@Ye (2Jo!ZAw-  1tVυRWV#klQ%Dw!'{'qXNZ\X b"Mv6\1}o\6Rxuh{.hi8qEv/ ahOo2&3uG|)"3TKBm[ܐᗇw'V!DӨAP0"$"B0B! !8CAE)p-`% M &"V$2#'r|yO7;-m ^J5ȈO($ .`r$3n@lQ;,cgh T(3NP\"rTU5bIGłKa.d0aF{O4yV~ȗf!%}qIsB.q $%͵^ڮިQp#@A*ݨ])+H^X`⩨H1'9&ǙICeLh*2 D <2#zKEB&,01T{̒YIc꟎IP!>2R婕\RL:>(X`!Q͒t$s]ZGb8oaD`B6?`oo#/Uvl DnP/r1}4477HVЅ@4J҅zL`hCFBovKXƃiY!^, ޞ{.xxOG%j$70&"a*ELb(=îuX(Oo0OI=R&!c*򯜉wHxlg[nP\gQPu^>f%@x l\>6;̷=3h*$A0%m9 2,{B'>'#dk(Qᒻ89;TBt- >1U1Є\:a"'E+ JzDWzD`vnߐ)VW <ˬ+ PvyEc , 3 V c#n00Y䒾{y_|Z& $b2NJ/Q_L"^Z,7DTHJғ{2x?c>/q ǡm#&}DD/'g\SL@ KC0'(d>FDE.86zņ J.t(SZ. vR:16[Hr/a_&/Q6̐c?T$I# ] ۳V!5!TF8yϣ,/o4<)>"u$7e&Z2Eu ClsaeA>"!߄ ɪFET_&:$SO1%Rr GiHvB@]lJFRT  105Di}iR#j%ģEB.#2BrS(\C5A$kF:~)ȤUD.i\$oH4da;Bo''E<>=`18~0H)M&Ȣs3tA@C[Y;Y.:>s<c 8"ݝtz^W2%^cNj, TOfjoBQ=!h@,>t=[5}aMw8 `cc#).oT3YAUvbW5gHRFjptڀSu7bL@7xfZ10 *NW} ?A+ _iEP/KCId%H8#mqZq'[ 3He_# B&˶'$yI"Ma$Y3&-`s7څa%gx;\Rڨ* %o ̈́(}i^cJ#Z Ҷ!upqIA3geq`ZQ0oMUz2.cHe\ X! )qa@[ $hʱsbx0w(c3K@ĥ>h"h P)rj6䆦;1թ7.1hr(Vgƹt{&6Y|yZ">^L{9d41#rICwrV$((>$ Ow\+BdH*Kprr$wn888lk3Ho%}O!nIKz KRA!3G~8Xȅ]C5TX~/Pd# anVtӡ2N`Hʜ0M 54 Gf*iB4d0#}/<-00cKD\&IPS' 3[eܗP;G`8 !Ù0V0a m& kX\m5EL! e$])]ڽ-N4(WRAT*TRx]9 OsƚFSe$Ujp!C-Jص mH1fP+تZT*]# Z(Rlũ8a; fstTi`F8x'=RP cp#'a6TE#[Bhi4Ob#HbF;o5Z$Cn`c^8]Xv>pƍ{MAkZx:X 2$$T2 !PH$  A@ @ р\YCSTDp8[&% -m>n3ȼԘ/?&4"W۠"CS28$qa;yWiu˩3WWkaɵL/t|D3pd@&".`8Nd͈(TCpziJ԰t3d إfO`zqֱH;Afԫ YsN;b$r"GAv4_r>dv@ɌM[9H}{<5q`Ɩjӌ|%i$K` 鋌G ;33pԡ:B`AB=#-/(wNǷ@'Ѱ 9HGK=Ysl^;DVԧX: ԍ7iD+ 6M{#o3@Xrl=kgţ 1;sZAE2Vy@GNc$iP~A K$o," ħeڒ$IJRW-av`5%TA>CD;vP O]\? {Wr(GLsS$  ^h4jc=B01{w6A3iES!M.)8&@Lbm_kLE!AS -L)KLcaZΗ>.13"CZ0,K ,%p0C&(PE! .yĊ2*Č#TL"hor v LכȄ`rl$#baBA"%BqלMPf. /| GXWˆJ32h[< $?B!:rqLS%FJgEF| :+%,?Usc#LG;5mM!,lmͬzol[ǻKwԱ(<3'8 \'pt^L2yqtyubDfȄYO;xIݛ՘8|b0ǜu&cbf'9u °׸ c>ד~9??c(mp܍67P9&ۥ\Z*@S !HhqodC0\¾x'0H`#x@q1"ĸjST0-!b P׏:f6ȏDA8 '7gGċ0BgɄL&POu Ӥ(Jie7ìqM8*iC-2ǪH QJJ%)#D4Bl aFB4F1DXRa[$";ȤA/L$0D%gP"&!0Lp!З%GN{׵{jdk㌻H M4 S8"3p@"@  @  Fh Rb?_`=j 3Y@.J9"|60Ay(!;0h+@D#oU\\cI6V͢}2;(HXL!}՝Pr0:JQAG8HԻ ] At,ƴWʁѰꭘI`b^1l\EvUYS u; Kj39aa=Gyt}6VI5m͟lO-]<Q[~3PƂ=rt5ӳi hX-32fݱƨ4p>{Ez/o UiM wKymUYYM`;³bQPG/Q ;mFQc. ÌW] d?¯ɇgǸG<|31Qfu}M1׿Qǡ+1]ʀjPfڍJgn* ypM#ZXqXq;[mdk=c/ NN!"h)ٰJF M`|Fݿ3Ebȴi6 e Xd+FΡKWۈHdYnP\RUR[1%a9ApHd7O4v"L=>6= 1PU~}ZPH`\Z>#jjڝ&!cty2+P_{Xr х9(R{^!]oɈ-3}Օ"RD0FͣIS#(. ͺ-(N+>yzUeo;\723bݷQ^$t~'40s$S0WiQOHzໄz24 h>) 1KigrdgeQA )4? 833`"d K}#BėǜјC'5 0KOۈc1کǗ X#Ϋ&$ޭNs鰢 N,~9YjLk4COg3GQ߫oLԡʚZA2,A N,;ba=+rEe+@"cY1|V;R?Ov+W շIʵ{#*E׭O*Ƀ 6̎%+%^XKfwd&2$2YG:ھ=cu^cLM3=J6u"ؚRrGwtVM2J· .gJz %%"ɘ#l0'#K5M=o8#͈%4=G+2V4dӋ+E%'=n8IΑBL.2Kn0J п{PGυ5$'}oc<̐6_2^H-/f쐴 at:qI/Le'D7є@Ո(3fҽ*ֆTi'`IM<}D=#aQ(/!*vPA^`.!P\[?H<ٜ/?A]56q#D5r=p%Pi(_Yh3ib<aYn2[de^O?Ԙ8i3qغ@b u.0^ZoL ?o-C1{>M xDx_Q: )bʲ[z#^v<=(T$?JȈM< ~@[Ⱦo}כ(+EꚋVpZS g8bڃH_'B kʊHQ<cҎ d_)X1Y7?+n4/ν*c8O%}0*`T_Tz J~R͋ oHҝmg4ܥʓO>Կ,"=ER_}#[tpfBO>geR@*.eY*&5=@|M O/}.0+yZ䳁@Ǧk؉3>B<>yc?빷] S ZR.z?6djDe._u&İX_Xu<&Y C$T8 yjnκr-aAH{k{ߐi>?AfjZj Vnאe L.vDЊv%"²Z'6 Յrg"`%89؎\D6O"ե &;ԟ.4c]!_G+')ؕ\.{8dCstnUKE,i%=*2q[B <9_-(NjƁQ ,ώ-=w(O!MWG(Io*$L+X| s` |♃Sx>2_gK; Cg-P%*RCΝ)Ǧ?+֠푶瀙zxЁH"|^r,"8$ANr;օ/If@Dj;v@ T$;psȊCvHHHgxKe x"88c$TFl/EבD84۸ŨݘsmfQ~(3ϱBe G9UinxPn4|vɕ7Ȓ$.Bqsˇ$)qri|K4|5恦ԟ<re;XA? vu2Loڑ.p) [,c "!8̵f+$) sd35$Q\)7XlSݩD Smq h-X0D4gP6 DbhC;dD~(LA^* HvDe<>cȍf)BLV͗(/NA!e*ȉ q(p&ZAd|?d2; RZNIj6юŜ2̈́0O0 = 2Arō$9[) 1Ę4}tD?3BҖ0YW814臈0ۥz1dᤨE,[&!,/ O84.02:Z,V a(!Gd7YB!}h4d|䅄w;3S솛t8[[=TҠX; {ԑ ٗAO.]cqE\UWTYKv8Ay\-;ԇNt!rFg\KTBLCӄ`Da7y#\BAvܑ|d!o܂ ؚ| Oa0 H1:U4B=[P:"q$;_570?r6uA RGewF87nZFj;P3ĽB4UA =81 ɾh;قu!bgHPɥIYFyHzB.9,~#\nN$2"X^EF#% 0!tZf \xX!g2P̥!*tXn^!B,-GK3Rd1 )  ^g4Mj(2G ~5<@ߌ, bD0%[Z+-eZuR١Ho:4٤f{T41,sh0߀bd[9 /M"}~^8jdF!VnlT1( N}׌;2P0%.q8sDEM 9t_1M [hRv "nh :dr ;;Bњ(ݷٮw#=/ V."*"# ̖.Gy>vG=Q&pRzYLS 8DNYVF3!yy2?RD\RhYttE e?Zag q~IS);)bhKvr#5J c~34E31?&Fǒ%(sDjWd%3+wTI20c`ؘindMhm#5- r ?c*n_Tme]P&:)k#U i8VP4M"/|. hr9)(W_.̍j!M%@sϿ ]eD Q*37a|1Lq pW*δHa`bE$c;xKH3a8'd*:׎ܯz; 9^%)h\akA} m^gfȵR/pF̀^wR]µ9`IOaX &$ ppcEѴejw.왶GAK |(eg$!~yeiJ0\). M%g: F<`Ǔ{'J((̤UU:uW':M#~4,brC =5@`*e.Fw 'ZW9-d:oYM +4))$0Q<kI4SzkX@#>K4^ò=MzMܓY݌eI83i=U:<ceMCUn"Ȭ&e(*yn8ju<ۀ&2]cx]Gί+-^>6v2B)0m#&7bIJA<Kl_"9c465KTf,ToȔJ ~wryvy"֞Z" 2;V;_18; B!Y̱"l8#4Ȇk0A;/EW8nE8C^ʮ"2w,weź!0F? >X4 i?[,  5kߧܔ VR(m QO^O 2#o$D`fWѰD裈O\]VpSeזx~i~+A*HN-AhxZxB$ZI8wvZGN0P Q/al1uANKfq:2D&ꛆeUs"81aA<|65!"=!{]ѝO8t#6Ipo_ق][v/6!"+\~~BȪlBx]8[g0AK2'0 6 E1z q6Cq9L9\ 0!I>fSF}5=IDڙ&0zfFm25gՎzRhSF1.DO2@_m&@[W=5c3Eg”M^X##̃^;N= J6 ?"l=zQzw:%7w!Pܙ̶IA13_6Thk%3z'?K6 < 3_ې؋Gbêuoa8q [vwdƿPF*< UX Irޏ0BV1,M}E9'rL?.TD~fa? D = =#}X`L֌ ɒ9і TCPsxq49q7G쉚4/0" dZNEP&׽OsԀc3Fh銔ҡ/hH:ߥx0?{ `+gD+ύm &W*ĦuW$WF;NA3K 9 %K@6>I3R"W}RO Ⴂstw)R 'ex_shQysKiػ_-b|!V؟bx0'S S"#01{uG6q0%!MSh)³J $L:Ӏ$?x֨!dءEZxϿ|{k}63^/*q+Rbdx !^KW:)j^c zc1BDuICchFRq#G#Ubː{v2bI8JȬXSPctZ=\ox z|%Dy|᠚l%Iz/ĎW5O7l _+m~ BD§Ut()%]B 7I{p!^mB%o;B.?j};:Yg6&Wv3D Gg%(!.Ԁ +p{0\2ɂ及6Y(ӟc4ݤt ȠĚL%9$癙n'nivѱ2788[yr|}bGg0[ 9)7"`AI q肄5lÂ+j N-ň/Y 1d>y vP #zC V0BHg?ف+) 4BSr~*Ĉ$H fhU]>;I^>J,o߯3Dݕ&7Fu !ehl&tNҏůK%VWP B/  ASyP. :*V_*C"NVoO#SPmۋcc>v Fʄ0"1ޜCBnJէZ"9 M~UP)PB塸V tc/9窘x$pøt.{ @4أH4bR Y$Bb{8=SEysCaxaw"Ֆ:P _v@{gޱ08rh`$p*ALbN RЄ޶Uh0- K&J@+d] 4x*3[G4 [F%Ac"%l!@y#1~0x9, <@ aw^]|w"Aw@9p!a,%Q7XF"Cfl5j0Nvޏt,|>VNOl61Z&d3pz 2a?E4`Oĝpmq`[3J8ufiBXêOB<%gz0 !AdDkc01s<8rOBG\KC)YŇ1D: Q1HLꖼ D!Ê1XE(OB mbm²/2%q:&Tb2YEoD""dx؇,_(.#(! o! pB\` hmA%#+3X~7^}$2{p0Xc[R[ A5pT1LOM#G4p< e0$Lgɯ)H_6\'O`g ]P>:E$(R08)ج*BeXh#.0OPEȟYXG""b7"l?#-DE&:k")q Q6#%C9 Bt$Ӵ#`  @ @H   i ,SF{5DCt^<٥ l i%l |R0u4Edz- 3TrnVhő Gmd]T/BZ-U1G"Be⟿ +eff_7~D%]&;*%$q@a2.bΔ3Hȃf2.X _([p7ΐiE-a[m`^'H82(Ƃ8adQ4D!@ .`fg^a$5ƣ[Gq2J!>ӣ`,J kqV8fG;s *bVL x(BZP DC.Qd,[KMе@}El"eHT;FBuG cD V) ?JHO1S2 'l7qъr#^tTj>"n2v՜ f`{ŵ!F dMTp.D2%8od ׊i@_D#"WZ3geR/rXK x3%ߚH%%弾b覤q4IIE4j/*V=9fH2}߇M"pІ0O6HR2VxHf"$Zl*`/ 22CR}`QT.ll"( HN]HU5[)`+J'=OvlO-7F#_[1#1҂.O p㩦ip 9MV =W3a#r-9ѷwPbODl&2]Y~׎|} .>}يNp7^8~<)h='B{y*}W#8*{C?$A%' FaSd8 Q .l` VHbPdS>-u,Ɠ[cdN=x&U`"lIjpb(XnIڷ串#9^h@jc@@F|m4> /8u0q BZD17Wt@-&DX$2`tZ=-}p9ĀıȬ?1[}B JyW$K#[hȋz-?3|p5!fCQQH ax ΄T~d*(^fJJ!HtL r ,D*O2RwV%JGiՎ̘a62DJ ymtB Y-}Xt6?"l Ƞw-,( ۩<5%!@(EE|aiڰ̝)GBgBڥ@qjpNR]{NL70*jIJ}3ӘVRǡ%9?3,bgQ:P.fx8"ͅF:iǯI,sRH!AH!9.mwPHDP_X~ ri8%3VH`84b.Ӈ ߘp 恣̴&y?_8} $Y1/!-:- ;_%J 'dMN#ʤCoNjÿasK[̔d4ɥ9c+A:U_` l3ak^?s뻏H/|>x 4HOm(ET0~="82}VAẼl^W.)֩ [kŤjA;f$ˊQ_7&![XHeM=ēluc?#~h@dz3Ǖ/Sd#Z_S0cYZ,1IӨ@){3b3YvJicyd2M[c5ØE0r99nQ~@wQ9'cvPuCbV)8l-G+=t_Uօ^gajЂIJHУEN3.G2RhK |0ugCgº$;=G`Yru) z'C 6_A}A^!Be|~[`}QyXAhQ@&ȶ׻.jƹF\3<6g''D@7ISKjuD41v@WjDzz()%hVEgCVfv>vmmHhKڼRa,M44Nqk SW}m)|D/Km]j:mTx3%2KlEvDdӨq ПsR|VDD# U ڐU60'5|\]FL0 .S$n_1l= 1mϟ Yg#3GqʊB_ti$KwC@?@չ7"11;ЀMPtl M[r P"?33Y &""wT!:YNKZG9Qa$2?rѐľrIJ $Q[5q$7 ByY =P=oZa*(fveM92fy[#@eH l%F[6^b?vʆY‰gx"P1/;Y׼nc X8gL&`,ȞDzY'q8ńW[[@Ilk{J@*ERH5{9c>Qk9 ږ7A|TcHAn}mS)g ʗ*Bida[(X43\>·x}(Iִ"+ae EpnmVk06\9{j1#J [VD.X<aWɨ s 4 vP;ǔSWbH!F8C{ˈt+x:1j@L"bٌi YhjSȮ'|UFmׂZg ;;삩)4 'n}D2A~ uFȯju7?@CQ0tDsz@T"2;xz̞N=24 143+ئvT(f X%30}@G z\AbN ^v$=Kҝl"QGl<ѣ+O/b@8p~ῸuG_}|U?J_ O4u8") ]cn"dHۙgIJ yF< .1'" e U"' ٺddqxYy:a'k:<<$6_"1(,8ہNFIFs#b[;ٴ] *` `&0hEG6=mTߞUn-`d"m=vE`n&Vh%Zp!j,M? DCÐ s&0-v`U` CPsZK1 O,OzHL%.J!ŀY%Nx6 0rD-E DH !z+$z ,97Ȍ8p|32nO׻sqFG#T %nTMʤ`@A8@)0@s13]E!lЂK~<ڙ3@t񸉊`b3&,RX)HDE-9]]Hu<#UL4ll77.U0P` gvz~2耐<̔@HsMЯt#/R2@d7@FTL1\G E">-9 “OcXN(9,72Z((U D)""5B W"=dDx R6P ,O<JZAt|򉦆#`    L"2)64$ JnE A\ W΄ y|GGe |m2D"ƒ̎Xq>)?)|(=&ɬTjTA:ZPX&8, U1[~$x#JE`AL'9V̌3iJʷ҆fJT_lhkGR^q'g%EFK{V3n6IpP FgFbFQ E.#92enD3 Ֆ̡ ևМ e4 ("U:c݊\^nJbJ($._b&OEEeL 0?:ZS%3tcvgxhڬQ $fI7SHsO AOvT]่ ,'zatO:B3`\fQ=2QFhύUh8CޞC{4|`OmLQ;$.=?4MBk:\P_0+ ;gj PP&sO)zЍA-Vgc*d{J#*J@t\ٞ3@6D''H/ N$,)H|DуXR\W07#*WFB1KFKGQ{ .a5vݪN%^c9ta˧GRcq<~ڡ`(&f1Dİ,)6KG9Rl ?RY֎G B_e”Y7XeOߒ88HZpdiz$'?D5̛/u]: D*ᙸ[N`GwDH]O!tbCWI"D2CGlXƵxD>-s|}Tl*DMRm8ʎ4w V sA"ML ۊID>cKOmXc5 ip8@rxj6J Y#d.C=vL1{o> :}CSpe{\9R)6}4QQWxBP^${\6Oy\&茘Ef \YԫXA6 -YkH0: yFt+4#~~p;vJX?, DnH75}/\yjQ,Od0/C5ZMʂ 1\:"Nod\$^ ޕ dp3gvd<9#qbOp6b)/QE^3㺚Lz=Xd)POseF@]ʘL HFB=0"1K4-! # ҤϘD w_{mr-qVX;k. @ OU:3*1rҀR'uz"倵D$#J7P?/uVk&DxԼoG8*/]OcxuHɕVʸ󔹰=SA[;BsMa, ˋF)8Oԃ4ƽh-2l_2::NDބD XEhGau5ӛɀm^"D!ǻV3=b0w]%/7Sy~ImC0`G"Np-д0:|O%T@_(x|2ةY݉I4J2 Fц 2iWjK 7;2k/羴bYXcK_%d)w#ۼa@{IPv}sZ)2|o.EM;rASŶX}/+vRؔk8Sf~h`d DboI'A*!iӑ4~ZOi} …߆],uLk#-[rM^d聩*܏H)ۥi>19`J֜3 Zҍw`Z plN&2(j"h{fL,"\8@șͻ"d#NQHV8%LdxGcG=1ח.jS{hcۑQV :=H1a:U&f.Q鈍av-@.¾h$?rvbo+BI\p$5~1ɗC+q'2*r::H*J>CW`zIu:3Dfab0 שׁ`*fbnYU<̦zYD.&ƙ2rMAc9?:[x'CHX7 bс0Cz1 7|y_ߴk)+XC*u |qd k(%uTz @S%34F@05P@\L4$a#r3Pa'Fk'nw``O R*,gce x̆${@U@zcbwF*Nyt-:ttFs6\xnU9Z,9p&I|%@*65f M8# cĭ/&&yXx hM8j5=cT@F\ԏJK ף%S(Q PT!kVy%k@Sm*׽['n=2.&!~G4hgC1  0KӗvH(j~̓VZ!C(!fu()1[PF-Q9@>:ft B,)I$ބ3GpQOaų7F H9Cg q&Wf=񅺧1iǼnB'2Ca .L'?NKPi"'RKV3DYU;|LT 'Hl( A.#.'jRR*"2uj$Uv+F|rt(ʦP;۠{ Ж'`u_qXY:)qޓf&3lheJ͎BQS"!PSԊ \Zv*VVEty3P u!LoKxDE(:L0}< ىb }aB9y%M_m-Q0byȟ!Ⱥ̈ u_ "typ&sqq O SvW]CwyIJpXP&6r5!B0L7enoC-g#]1ShD\c[R輪AdlcbEq ψT+BED.B!QD؟G<74p_ɧ=0=w޽ y> s+Ef Ƅ4 !&y98tZ ]p19\JKXvqB4+^!!DT&@P B ABP  B C??YaBÙ&w}SwP*KXC-~^̄/_T3,I;{`ĄٞB2!pyxLd+`!o[@~]Enk>~6L8rɠ@<DpFfsMLOݿn J>D}H`}dpRPd0X!V)_2RlՔi+eBYb6-6*8؃)b§X,,a sD< K\v)O1 HH}I(m6srRc8&2R@CpFCtdP!>Ll$mƌ1PN1cʍ9Qj⦠y bLzjKcjfr)ʘ*#5YHyP498='cof+K^8 Pk*(bjFBȘ9b"Rp:J?HPXQ(d\Sϡz$XyDb:?_dyzɗdTsYF`8F;5 ^h,y{!Z>?V¼*9JVOY9_l@$Xَ Yd2 SӋ,*wqV`o3#;|l+pC<[xҹfڭ9_*Ov"ljK-(gEJ4AT٬#&?cWo`p)^7ψQIe p# "S|Ĩ7GxS6%x?pd0JHԁSRRa?Gf mrH#N?&V7<L"*ܽbX(S2\ +H+8Pg ϐ&scc?Zj/@.^*-5m=铈yBlykWVUfπSPP~ kBiYeZ\pICwk41bTultpg,qs5bXOySlôv?fRQB͟4!4}ã V@`tDH9qHysWCu|p|`GL0LKBc6ч7({o>V(܄N?Bg -{1+1?1*穣ɽTj&5R-m"z0{M'; &N0o&53"3<:8!m,i,T$B2)dER$ HI"AhB i˜d!__LB׆qƒX&`G +QR$a```$P2019p 5! G8B^xa- )pB$|'߿ׯ?~t %|o\{4thd(Ph"b P|BC^-M=wE%3RINѻ*b[Qsl0VV*)qP1#>Ta),0Ϭ8:L*qxQKg.cЪ,@ep놡nj |>!Aۼ|ʐdE$'Qr ?p~?"OO|=||c;1NYO_HӝU WˆtBC+V|z0Ď.8oS!YeW:L /G9rTXXf)),F/7͂M`S0Y\b_n$$;\}aD7b-h#3 n6[FmصMo̩.7;:4I lš` Me 1a $,6 \ϔo3ȻOArDZjxXGN4lʜf=OF(ʵwV \$qXB wn')FKId3\獵f:BcOl53<;|m,_; &*1hJ3r"EgKM Calf]σl`~o+i4]F6Xft.zbMPJ:T0|/[ P 1JyNPfMF6"$!" ĞG~ƈ!s!EŠ aAlN!('#̇U6xnGsUBnV% 5U)ykay TvVyh S!*NQ:nT Q&B Da% (B߅V]јu #q$pfCwC$ǻ!+&q<@הKzxXbXc LQW#%|UtP9! PCrqlE ŋ G:S"~gV͌6}&dbB}^z`SH1#(h:{=1i6>Q3d;!u3 R,$$( {*IQ&hVFF" uH>ZB_kVF:0 ]4L.$ԋ&"nגCKMaD ̂hP M|@Jv e84=>+a -W)2" o" Y5aRiT!i|88%X9zͥ<ߵ`^NM>wqYQ?Ծнf~;-A,0{kmeT*>a9 Vg3ٱ\_r*Ґa#BCNBBl!Y6ʶ6ôgs16g8l^q傁,lEϴo4PIa `-Pل2D@y%Hc S3f,XeUC93qP V!b 4gcc &#[&SELL𓶀;p^ME&\{7ڲhdSf ,]0 P_eD7!P rW s)E 1"¨rkBt<4৆5n4Q x`2CtfT0A-YhprȄ {- Y'9>ӭ)gJRf W@y4i e9A "Hf_Irx*}{ &@3$H@a`RkZ[iJP+=eq[T?j ˔$_h:V6x=R>&(/Z== r/5.іr*). }O]'%&_Ai08"%A!1EL9i'HhR,KwfI1s2NHuy_1,sp`Om3[H چ'&p'T,z&4R6428"Sްhs /fnDMEn)P{) ־@'-KԲJC$iR>An(=ly`p@?ӷ}e^j'5)aQEt9^7UŸfrο+p+ фJUPLqdn*S.WML"pܰ"JfD_+c۪,z\AD*P0e"Ca !s=/T; H#[?=Q'<ֱt$ (L%(RaY3r@$V@ b=o.H4Br2wF hrdGt,DC伡!#d D o# 8KLc ^EG2L3@ Jq@bAD0ġ*p:$* 0=VjSc0< .r,c"0`9z֤Q_B\#KQW4ڮow 9w4cd.̻ѝ+nۻi[e ج3 (f5҄nw"^M^>ɥD-òWΆh##` qY4fc'J vmd1%"x-ޣ{p۞-bY_D[c`+l YSq8F m- [dS'I"7BBď%Cp1xh6:򞄭2ff0A+,(2W-c#(HBb&eơ:̈ "zy[r@0AZ[ \Qd@N>P?S]X~ @8Η|~z7#:hϜgcu Kci\5Ս*Xm"J_4LqΧL!Kw{@vtĜ ,8ߜF#o4$4E 4TĿ- eq ,F!ݎZWe!w;jUY};gti ;,8Rle]Y6D@ 1 ׁI8ՀLOpȧBCľ\a822anBfBhMFF4=.˒%g.&7cQP"[L3Q{,A"=qf"?VB \5 Ō l5\pB5׋"jQDL@ Fb!fPܨCp(i F@ >!Z~+TiA/a .KSv*%Lǥ!u] >`_CF M/]b#uRkRwI1$`t93O8wr0 l:_ﴹ ;E'i띲dVn?- Z&zaUig0&?\UY( 洿X\_,յj0w ۝>8^-Gk1]Xܵ]<\)[t˭G3nM%MYo45*<=P炶 g9>vt3v6_PGG\AP"fcd4o&K>Iy<%vQ duTQ:ipIYFl:b˪=QTGZğP2adKwsf/̓0Y}Hzɥ+Y5Y(M@7sԯb{EdM>Jz.@ȳ|HPŪC}| yiA9为B:Sy_⁹?Vy.+bs<8\Xˊ!6Z_¼3lmVOH J۝>y$Np]EzX-C1\ANlpS|.avw / =f& ͑WLd+3Jߡqib޽{a 0\?go]6#̼y-:8#f4v6&,#W|U DxQkIFFGӎFḨg݊S$s˭X`Z,x1Z!E0)9O_ΓܹӊphmZAvj4cS&I{ 7ܖx`Q9ra%Z  ;f*q xD||o9΄3svr'we]v޽{1<bre HN AB~0I9c{K{f-OƜ|d&C;Mǣ}4S "5o*[v4d޻DoU,_ɝLW#b?9 FٳxD"'7kt\Rcf?Rւt3{ϠJ8=o~t^;L7Wd'c w[E@yJ4,U4(qno pÅV1ƌc]7/ƒ˛Pl/K0Z_A},SϽjah IT_'I֐7W&Z9?lyCʁ }L "vY7 SX;5P i8t\%rt[j+,MHU<$W, tGD0e~=I&dҷė2{3lOiHIN[ wDG]V6 W*װά:-C+{é -.v [:\"GRQ/QZY$Z‘{#T7ifz`]%HG5 EL$C6HY,(%N1&nlOip;>oibg8O)qrGz̼MC N̬:-C{éK-8v [:\fʈ{3lOilmB >oibGuj)q#ﻗV6 W*uk2ϕXRSZLt-M 7/%AR2lT8aYuzM[$#K{M ۮ{KkqȞa{J[OHyK;ÉKs)ѴiR V^ƖpTm^f5v [:\ϊ{3:/* B"&&j P@dA  88xC]Vͪ^idϰ=qv 'D;RazgMC ǡohk2d_IZcK!Z5|op2I+^idϰ=-(<2;$"&+HF[ ʏRψBV`TÒ-sWQ\TB jTe*%0!jWgai,-8z gMwܷnG*c4V빂O7iӣ4V 9&tz6jO3qLuvv~F'=-V(v|f}rGUޕK3{̓!ۊz9b#GTt1 #uPW's*^7 `E7᫟ѴĮS 䠰*k"=7G8>EI jtLgU"x;Js|WJ@1VzSéEGCcHz Y75ATN|%*a]|=_Y_-N*^X3Ɍ: C> `"8rњhڎa!Ah+ﮈٺ<V4ʮcB#LcX9((%"! #P֐qﮊh/>!c\1;W -"x$tMDq60Dqy3phz2~$Ӕ<YO c6ԲSR$A_턈ɇQA)!1PqyVB?(ʙS<*X7dP0n>",bBAL /j"lB$-Ȍ9I47BYFӰ񔕙~DùGŻ <*Kʙf"(ATź="xF66i<_V:Hd^; ja9 ciYSfGR,.bXTA ߕ* u|yX\abd c`=~]P.vWA1)(9TlB5yP 5"yfIO ?>#FC$i.f{z;\iWChBevzg~x)єG.+%w@a 3*0JQM>TVCf4/\1D.8]R 51yh@/ m]D3:V/-wn)"ܒ lt])32)z au&} Ukz0aR1(\BF فbX(4"V6QCJ=;z8l(*PsbaX$yL7[ wf̩ygzr]&"ECBbDAdEHG-l6qj!(QDM!Hr)rU_%"w$N;.)t/~K ?!t:8Ԑ[ZIGG9 Rz@;؇!Y*EQE[.)Q?!&}:Pa 3fpr7"N82P1R Ia)+zyTTt%ms!89z3,6)hvEԝj,#Jkjԏ>$''./*r=L0|E|< ф42rM($o7H;u2(RHuWtWLk :TC_L'f4HbFA#WʥxpqK n]Êqg6Ffo1 'h^]P1t)j,ڭJQz- EA݊Td#Ù7E#)(R"*naC4-hqro\O:& !EDP)C 姟taНqo/S}4ǖőjw[hQĥWcd(>ΚiQ.+CDJ2a c'AW=n_QB#sB&wm=<, [&HJwڍ"G~cbM1OnĈ/v nD&VCByCc̈CɄ Ag<0<ԌÄ4 L\˜4yB%G;kcj=0Fd3\3͔&_h!1I+Bv C40"?z?<:f5r .G}Ajп28@ o:}$yiϨ?GמGX"vHqG㣘s8Žy+S3 uhB^7j:PLI;-~}xgxSťI׏ WH> pnS3|l[ bi7nB>ijiVīltSҢ֫d!l9Hm}r r/u UQ(A-IׅuJUKn&A1⨔$H HAF3 ^  @RVdݶTAO-3B~'fF" h=쨙Va!@έ㡀HGGظ 5V mBHy%(?P'dc25&."-RO8xGEōtD|ӔM|")p"3"-rІ 3Ӿo;>{ dפrUAc<=l_'eSP%C$ fF!;Ei Җ}tPPLf`>^ |a5GLab| ̄`2a-W#a^=*^먜TwvD6P "viM&. qgƊd `4R k-$p$%5 UU Bb5hq#<[ن MFvrOadMLZx!B[fN[]0'} U!AՑă" ޓn*aXR=&{Qꂡma 5͟bq&\ZP dM{k[vm.?}8كdND@m砫bԂiHW7&C)F aɨMZqя SڥQ2/@N.!8E% 2d' T $Rp 4* E.d}5Y%ǢUg <:VkX^xne$oE\)D6`m/F@bP].9qe :ʅ$-k3wVȯhk~H>|`eA'Fiߴi薓W x(&/'sAڀlqtQ1/pxMd-z5U$Z8R>;g,@3e n4<1l-ף0ʷCPa˹4Fh,Vc8o!w"lxJ;8ɨݘֽrڞ&b~JkgBMZ㥶A^uµmVtr9iK1Ug$j9ˤ<$툁Q"}q =YGȥtqx+ lDG QnnZ}{i٬GiǶ(6awBS#byil bs 4A޷Ht]ml6=CWlȏXͅK*75txgaC?$HKbHVqbK_1DI1) 4^q*F[ec!黒5)1P_C۠ 2EM^ L}:.G#.(L%xuHE#GfEn ߰8נPhq`iڮ$ Mi!rLѹɊ=,&,u%(5-k.UP cR#MN) TG8[-iC ګ;V悊QH_@e خVtĠ*ĝ9= UrT-Ght.dR+zho\2| VΞ7V?l=bA2{[K νtbcS4oahHc9n: 6_M4һ+cܨb)FҐRWgz#>d4.;4Ѿ t]K׾n{!CmY t(hWxu>uѱ-<>;зG%LR\mg+>uADT sGdjc *"cK~]{z$(tD\c1j^] _w\]st܅%͸ 5d~nД*5͍f_אٽSnirYbSe{Qa76g)յ:@< t ,ĺЕ*J6hN>!$ptxdښ&r!2k-]bx$!W`oU3 >c!NhŘ#²x8*M$ zX5xS"IC?fĖ=n<~]AFkUv٧$ ݡsYKZ 7h\DB+<EUʡf+\>,JE@ϥTIbR`w:FYmiRVl+m KGy4df7f44ߵ}ı93{)$v> sLfT n[3sA\Ř*Yw܎JK؋ Pn; IraT #芀{qo%:v9Bԥ6 <@/Q5k֘o]#q=d+ᄧP3e'WIy=θԤ?,ݿ-F)ڢjIM+NH5i]SYWDe/&z.$эlc}9ٳN(- .uL b+/HA-8x><†Htsפq8\e¦BE f~]#R_ / N:(> lQ3Z]˟ed ;w_l޴Vu-;, OT NJʰ"1 2DI7 7#V[s=ViݲW*4#dtFD7bnsGM5*!e6+rBCB LAQa 5?qgǭ DZ-n/m[Uupg %G"5/s#}i1(e/h<ܢ7B.Z-j,r4b丆r?ͫ =bLV&;S.oPЧWa^fPQ@\Ԛ)KM%l6$G]Rf|5Ice9rQ+ (U:4Ds4#,hOHvĀ!eW}VC(+(mjYD@?y,/4HZ0p{'gQ(r?zdϴ-WC^[vYgb8Fm&Oҙ,#bϭxi~5@: `^שfMG(N4ۄuL3,_V5=1!bZjG;J?`u$Y+ɠ'~r^w#"L2Ё* ތ+ NTڹD%9ЉB*GO<<;S =Ӷkb CӍe, _ۦ['bM; 0?5̣ct iQ*8Epq\hÄ`5c)3Afő1޷a^va3wdwb*=!8B-5u%&mh)m9k9Y@a Xc*`vHrPF2ʙJeâ %[0h`3F,+1C\rmtüQ $?{Yb"\ $ G8A Hl"c__ch0x yf{6BU{Sa7j#+B1Q2bf:c z5#5% ,ӏV4!Kb'&mӽ\Q1)b>Dy-WS4Y%7)%A 543{84aYDd?*3] 0 !-A/O^)\fʓң6(ii4FeDqVXF("Q/uy3C~5$g2G ^CZ;4̊pj  ;TQ\dAVi)9>9n B-a>0/JĹa=HeUBڪ?:0ȡBƘhX2)<ސDHh Fx0c,LBOIB$ wObq[hVF-lVap 7Hi!4  .B#AB` D@   p $bP'_{1fT:m+Zڠxm kft*΄Vv=kvSjr:UYS>TIm֕hR c'Z3-[#N(:2V-kбBOP$#+ý;kSxuA5nm>8|6nu`qy`vi=SNnCC[)t4l|ݸ$+tv.rؼ*pk#$BңσCq}ޫ. lQ8}OMJ-Un!clRF1>"zcN=!CM(_I'բjΉ#"2#{/z\Y(GMajE497+$c#GЍAHNaR'MIbSv1莌HtCE@z3{ >X0Vak|3? 7u1ǐb n y/(O5O܂xE$X=d @8ƌg{=aue fZ(WġUiM4Ǜe^YDtR "ohT{k0:; 7,~.FI#I<*LJ:qůuxFN.«zd\Zd0CHSW {JLJYH^ _BY dX@;ZE:ǐ3TSoǕ77/E^ Y_\E-_*(ߘX_k4Q]bEsrBj\BK{} zqM t99ݽ;i??q.IW&>5sXaKN0[Ƞx1\X9-RU6#;պ\8~y|y7) NߛMs|{RҚhKЉi諗VM:b]^}׮[U.t>&RFxc[0[hz2د K;h LZDnLMάu` )(Ө2;M~-K(!W"lµ*)bAXr$`GX>qUW$nl |@Z]MOsC誥*b24mbꮀZ)bQU֑\-BXwWz7RI|4ZFS|z1Z>{2t Kƴ+%ѧkC\xE*T;`I, ɫBZætv5oC fV y=p A:)J 'K&, F+>$_S~iU"O~Kא7ZcOr> rvt\DDC]WtV5kt۳Ԧ/K;!;+wx6W dm$G(Ɂy4D2ATЙ & 3Ɯ1TWc`d\ʳ_/g:i9I/8L I>\bRsDz{}EdU=:2Ĥ)jiO&E{g ǨS!\ g Aihl.^AX nc92.Z"z_kCHE0 MH6"Ϊ *^f vPVfTH#afyо&N#+T1D?d0p!O(K惵G8> CuqnqpfgˏëUSM4(kq;>CcTYeLIc<6٪"*)c+UWjx_a 9IY-^gYI7z&fy@L=8H4uDҊq${Vi&< Y4Ȣ\p|T2LY!`~H[| =Vd'ɥQ4V-h{/cMFmulӌ{-ޚ&>Y9 W'/#I a$ $KfDƤ;6ڶmg#ImƯ5iͫ,._G LwZ/_V9&jH l:Y)FXC3ҡJɡwyY!ʨ $h!Kh.IQ#Wܼo`[>tt!N=O.rԙQat;Hn} Z(ټ[xQMHd9?zA?q6~U:!.qu=JJŶ9@9ܴyQPB1֪SiLoRBֿ=pKI 0~pb6< ɜ0B<0.QǍ(jU|Dl#Aݴ^fc#$R^@PN"⼏$P;yMǯ92 5yOMl5o&N#Yωj6fC[kcu ձopIA=5wl x4UnT/KsH8n;i@`8oƕ=Ne x?9P;qq= *U'TVn5Ƿ .(+5?a{i$P, m<6@Xk˖(1هlяP:"n)g_[@0DDt"]5Z@C*\ 4;IPs Deٓ fS۰}ua7ScFW&bd|}K .;Z$j,I`DI( {'?mN[}N&K VY1 K4Og`> PlqS2rZ5pɒSZŮZk[9׳ yA(ME7w-R(FYX  ,1%=7wQwVn h)e_|gmm|[ߗZV'"`bVG{@rRG0HTzPwZFZPKHIJ+uYU3(c9[iqF=I}/Z\uML%G$>3rTaK,s<* $!OɕFٸIq@`R,乁On]CC !`^A!UO}ѣe!2 VD$UeJ|ut!A qps(\&L=Z8 VDQT@ gѣr?z矘yGxkCLaJTW:Jc Q Ш ʺ. %I}L~Uf_t9#>ߑK5!-kNA3L•Ra`X1 tM+(J&́4SF##7屻NpKX;dƛd`tVW ã fXVdTh~ͤx ˠe.~6H:6#0cfQ|,$fJh DI0BhPh s4n1Qd? x>ˌob$alV K#"'# HZCNp~BF0`balACM+$ T=(/}"NVT l% If}7W k)Ֆzu4'#$=A4um`ݸ"-tO ) HLn$@}f_ƷF!w p100βmv\Dt hh:t(M(b2aR(;Cp ohMT IEi Pp|}|&hG(53MKF&! R 嘰!1MÓ)'#O qBdX 4 F"T)_w=͹F9. #eTh IxCGˀ5"*U&Ҹ($溱q:yRveTdhUKj$9ؠM!@_qK?S 11}co3VdB8L%]a N[ )[ Kp̖D{m!$X3L P~_Ǽ:e=ut[CoqLD QO "!E_"+BEez8raccjԁUwbpk98e Z"3=AXC3, uai*"R2 /)[b; c'ԌJXu"Uf$94L㗉Oz|X5#Fj>c2 -$LdD&n]R?"ha0vZ`eI Ru1k(*W@FK_= w*@ΨuǦo ],WU- p;690+%]p}NܬUP.ѡD^Dx!3H`b %q,|SHG"0Lh !h@tmb,u(‡Gw^-B>kJ*j0:hlm8Ao fc23Sw$c#O3Q F1UDA4A  AA!@ !@Ay[d)‘~l EK ȅyS/Bq j/6?ۛi q]=NRu &6AЮ9H䵋d mGBR.>.gISF68qG7ۅ2>}SFh-VKmLER Y%.pOb=> g e˾C/2,JЎԉű#;Xoc)j{|񽤾TŴISD#qr;$'UƈGK&6Aנ82j Qfn*&ב8,U|OB6=HPUkh\ A9=PݼoX7= %Th?xDL&[5d vZu3fע1X$R&5y0C_G/` E/ u+N@565<} %j<DZ-nٸ*.Ɨʛuե ե&uIa1 ĩ_ rL]p /t` . ڕ@g'zl?MG}lZ ibvq5/OZ`Fz~ʋnd7-_mlJw MC%8ZYıJMɈ%Rro/`Tz,ca0Z-/\V'zB-[$,S\B_@nB ]@$:SV&ۍ.ch ̼ژ4rG֪ ti Qk]}$:m  LD[}1 $\+ I6sj'r?`e^5a80E )ھ"mN^v mEa0L<+<&^aX8TfB&~qIf5r9?0$I" JCsgœ<,ڸUF2Q5Wfb 5=R'iѨ4e?q^[~HuinQ ]&r/ܤCuuFrX82Xlpޜ5r~8‘ *B9zm-V|*"{.19!ET i| k h7RF$$>aH+`z/QWz$0^p K\e)\ႬȯB_/?yX5KOy 'M o^qE*ĩ\Xɸq&rNCovw]8wFLI *fk@A&'gi낡պqGZ/E»$d\Gkիj)[*myb:s5nc~ i:ea:k/.s"*`"ː,BbC"4]2F̽Nmb[kh.hkm\ݱL.ΌB@B ?z?+ՕXFBڲw%I^l6R9OٶA TAW/+cL(csSBKn>.س`x LمM2x M鼹._b9bߵҽH"7'u: w׭ '\BAmsϫ, jmS[-&kUϨiG~> iA9~^ K-&HNP!w3W'"[TPv2B]?9cd=Sz*Q =jd/2Řw$u!!Do=)*NO| /qp`akf4n~ »lWy!YLghꋄJKQGo(DaWXœqKXLEe"hv+k)_*+k"cA?HHP]>9zp]g^X*xLj?Ye;ǿf.F+#B"Cbj,^*05L׋ f #"bA ehS\FT/Bj˗1X"LYgT!|,ggv- WF8*8^%VUQ ,8{IDy%$^ XXeAȑ*pw .O˹ЙH).xD .F l}OuZut1RpE_M(N Bj$wIxUbXK&eadsA% yV |r4uj}% "(а3@PUctV)rWUrɯhw6AE/|YF!ՖoӃ lqM@I˯'YLD%?jJPa28Ê$heAb^toIPjRpl&8 I} @S"DO: Sm\ UNFjb qEj%uJraDD,*)۔,o}<60!+Eˤ-` lCׂkTL: L]wFDA5l2p0Y]`zSдjD3VtOʭh#@H@k;)nD `20JQ{aO,ix Z>UH3$4ЇkBe9 A$)e)>@/KB0 h, A-4ۍe%bk61e KN|cD[N|%tI%ڭ3Q[Ya/Eqր ADD  X D c!!D@\5(J`Г!N Y M^E(A0c tM;7!XPg_2@јJwaU~D3T'mSQ': :O܏;2SǦq' (wuwd ܚ]8U/)X ;>pPAKx-wW8(IhY<̜/Dg PX BhCn^%f\N\Ϩ{0oe.QxZB=C'Ho= lgܡ _V'GMVy?] 5$]FϚrEU;MB"5 } щ wJ*@Dt/L'& 7 F;k'zjVrh?U 25RHPMᯆS|B&g2I"[H_wͯ|skm5Њsk@ l?Jib)<妑(Kz[˾ubTw}4i? P!PD R*$(0cH) %I9)H OpgK5;%4dM"+ڊl,:y{Z|#uw[?H{is|*ӷYf-dӇ 3iQI|e[l̩qu_bl. e*e:"\t))k:(ΞrINDb:DxٵƊ` H enum TokenType { STRING_CONTENT, RAW_STRING_LITERAL_START, RAW_STRING_LITERAL_CONTENT, RAW_STRING_LITERAL_END, FLOAT_LITERAL, BLOCK_OUTER_DOC_MARKER, BLOCK_INNER_DOC_MARKER, BLOCK_COMMENT_CONTENT, LINE_DOC_CONTENT, ERROR_SENTINEL }; typedef struct { uint8_t opening_hash_count; } Scanner; void *tree_sitter_rust_external_scanner_create() { return ts_calloc(1, sizeof(Scanner)); } void tree_sitter_rust_external_scanner_destroy(void *payload) { ts_free((Scanner *)payload); } unsigned tree_sitter_rust_external_scanner_serialize(void *payload, char *buffer) { Scanner *scanner = (Scanner *)payload; buffer[0] = (char)scanner->opening_hash_count; return 1; } void tree_sitter_rust_external_scanner_deserialize(void *payload, const char *buffer, unsigned length) { Scanner *scanner = (Scanner *)payload; scanner->opening_hash_count = 0; if (length == 1) { Scanner *scanner = (Scanner *)payload; scanner->opening_hash_count = buffer[0]; } } static inline bool is_num_char(int32_t c) { return c == '_' || iswdigit(c); } static inline void advance(TSLexer *lexer) { lexer->advance(lexer, false); } static inline void skip(TSLexer *lexer) { lexer->advance(lexer, true); } static inline bool process_string(TSLexer *lexer) { bool has_content = false; for (;;) { if (lexer->lookahead == '\"' || lexer->lookahead == '\\') { break; } if (lexer->eof(lexer)) { return false; } has_content = true; advance(lexer); } lexer->result_symbol = STRING_CONTENT; lexer->mark_end(lexer); return has_content; } static inline bool scan_raw_string_start(Scanner *scanner, TSLexer *lexer) { if (lexer->lookahead == 'b' || lexer->lookahead == 'c') { advance(lexer); } if (lexer->lookahead != 'r') { return false; } advance(lexer); uint8_t opening_hash_count = 0; while (lexer->lookahead == '#') { advance(lexer); opening_hash_count++; } if (lexer->lookahead != '"') { return false; } advance(lexer); scanner->opening_hash_count = opening_hash_count; lexer->result_symbol = RAW_STRING_LITERAL_START; return true; } static inline bool scan_raw_string_content(Scanner *scanner, TSLexer *lexer) { for (;;) { if (lexer->eof(lexer)) { return false; } if (lexer->lookahead == '"') { lexer->mark_end(lexer); advance(lexer); unsigned hash_count = 0; while (lexer->lookahead == '#' && hash_count < scanner->opening_hash_count) { advance(lexer); hash_count++; } if (hash_count == scanner->opening_hash_count) { lexer->result_symbol = RAW_STRING_LITERAL_CONTENT; return true; } } else { advance(lexer); } } } static inline bool scan_raw_string_end(Scanner *scanner, TSLexer *lexer) { advance(lexer); for (unsigned i = 0; i < scanner->opening_hash_count; i++) { advance(lexer); } lexer->result_symbol = RAW_STRING_LITERAL_END; return true; } static inline bool process_float_literal(TSLexer *lexer) { lexer->result_symbol = FLOAT_LITERAL; advance(lexer); while (is_num_char(lexer->lookahead)) { advance(lexer); } bool has_fraction = false, has_exponent = false; if (lexer->lookahead == '.') { has_fraction = true; advance(lexer); if (iswalpha(lexer->lookahead)) { // The dot is followed by a letter: 1.max(2) => not a float but an integer return false; } if (lexer->lookahead == '.') { return false; } while (is_num_char(lexer->lookahead)) { advance(lexer); } } lexer->mark_end(lexer); if (lexer->lookahead == 'e' || lexer->lookahead == 'E') { has_exponent = true; advance(lexer); if (lexer->lookahead == '+' || lexer->lookahead == '-') { advance(lexer); } if (!is_num_char(lexer->lookahead)) { return true; } advance(lexer); while (is_num_char(lexer->lookahead)) { advance(lexer); } lexer->mark_end(lexer); } if (!has_exponent && !has_fraction) { return false; } if (lexer->lookahead != 'u' && lexer->lookahead != 'i' && lexer->lookahead != 'f') { return true; } advance(lexer); if (!iswdigit(lexer->lookahead)) { return true; } while (iswdigit(lexer->lookahead)) { advance(lexer); } lexer->mark_end(lexer); return true; } static inline bool process_line_doc_content(TSLexer *lexer) { lexer->result_symbol = LINE_DOC_CONTENT; for (;;) { if (lexer->eof(lexer)) { return true; } if (lexer->lookahead == '\n') { // Include the newline in the doc content node. // Line endings are useful for markdown injection. advance(lexer); return true; } advance(lexer); } } typedef enum { LeftForwardSlash, LeftAsterisk, Continuing, } BlockCommentState; typedef struct { BlockCommentState state; unsigned nestingDepth; } BlockCommentProcessing; static inline void process_left_forward_slash(BlockCommentProcessing *processing, char current) { if (current == '*') { processing->nestingDepth += 1; } processing->state = Continuing; }; static inline void process_left_asterisk(BlockCommentProcessing *processing, char current, TSLexer *lexer) { if (current == '*') { lexer->mark_end(lexer); processing->state = LeftAsterisk; return; } if (current == '/') { processing->nestingDepth -= 1; } processing->state = Continuing; } static inline void process_continuing(BlockCommentProcessing *processing, char current) { switch (current) { case '/': processing->state = LeftForwardSlash; break; case '*': processing->state = LeftAsterisk; break; } } static inline bool process_block_comment(TSLexer *lexer, const bool *valid_symbols) { char first = (char)lexer->lookahead; // The first character is stored so we can safely advance inside // these if blocks. However, because we only store one, we can only // safely advance 1 time. Since there's a chance that an advance could // happen in one state, we must advance in all states to ensure that // the program ends up in a sane state prior to processing the block // comment if need be. if (valid_symbols[BLOCK_INNER_DOC_MARKER] && first == '!') { lexer->result_symbol = BLOCK_INNER_DOC_MARKER; advance(lexer); return true; } if (valid_symbols[BLOCK_OUTER_DOC_MARKER] && first == '*') { advance(lexer); lexer->mark_end(lexer); // If the next token is a / that means that it's an empty block comment. if (lexer->lookahead == '/') { return false; } // If the next token is a * that means that this isn't a BLOCK_OUTER_DOC_MARKER // as BLOCK_OUTER_DOC_MARKER's only have 2 * not 3 or more. if (lexer->lookahead != '*') { lexer->result_symbol = BLOCK_OUTER_DOC_MARKER; return true; } } else { advance(lexer); } if (valid_symbols[BLOCK_COMMENT_CONTENT]) { BlockCommentProcessing processing = {Continuing, 1}; // Manually set the current state based on the first character switch (first) { case '*': processing.state = LeftAsterisk; if (lexer->lookahead == '/') { // This case can happen in an empty doc block comment // like /*!*/. The comment has no contents, so bail. return false; } break; case '/': processing.state = LeftForwardSlash; break; default: processing.state = Continuing; break; } // For the purposes of actually parsing rust code, this // is incorrect as it considers an unterminated block comment // to be an error. However, for the purposes of syntax highlighting // this should be considered successful as otherwise you are not able // to syntax highlight a block of code prior to closing the // block comment while (!lexer->eof(lexer) && processing.nestingDepth != 0) { // Set first to the current lookahead as that is the second character // as we force an advance in the above code when we are checking if we // need to handle a block comment inner or outer doc comment signifier // node first = (char)lexer->lookahead; switch (processing.state) { case LeftForwardSlash: process_left_forward_slash(&processing, first); break; case LeftAsterisk: process_left_asterisk(&processing, first, lexer); break; case Continuing: lexer->mark_end(lexer); process_continuing(&processing, first); break; default: break; } advance(lexer); if (first == '/' && processing.nestingDepth != 0) { lexer->mark_end(lexer); } } lexer->result_symbol = BLOCK_COMMENT_CONTENT; return true; } return false; } bool tree_sitter_rust_external_scanner_scan(void *payload, TSLexer *lexer, const bool *valid_symbols) { // The documentation states that if the lexical analysis fails for some reason // they will mark every state as valid and pass it to the external scanner // However, we can't do anything to help them recover in that case so we // should just fail. /* link: https://tree-sitter.github.io/tree-sitter/creating-parsers#external-scanners If a syntax error is encountered during regular parsing, Tree-sitter’s first action during error recovery will be to call the external scanner’s scan function with all tokens marked valid. The scanner should detect this case and handle it appropriately. One simple method of detection is to add an unused token to the end of the externals array, for example externals: $ => [$.token1, $.token2, $.error_sentinel], then check whether that token is marked valid to determine whether Tree-sitter is in error correction mode. */ if (valid_symbols[ERROR_SENTINEL]) { return false; } Scanner *scanner = (Scanner *)payload; if (valid_symbols[BLOCK_COMMENT_CONTENT] || valid_symbols[BLOCK_INNER_DOC_MARKER] || valid_symbols[BLOCK_OUTER_DOC_MARKER]) { return process_block_comment(lexer, valid_symbols); } if (valid_symbols[STRING_CONTENT] && !valid_symbols[FLOAT_LITERAL]) { return process_string(lexer); } if (valid_symbols[LINE_DOC_CONTENT]) { return process_line_doc_content(lexer); } while (iswspace(lexer->lookahead)) { skip(lexer); } if (valid_symbols[RAW_STRING_LITERAL_START] && (lexer->lookahead == 'r' || lexer->lookahead == 'b' || lexer->lookahead == 'c')) { return scan_raw_string_start(scanner, lexer); } if (valid_symbols[RAW_STRING_LITERAL_CONTENT]) { return scan_raw_string_content(scanner, lexer); } if (valid_symbols[RAW_STRING_LITERAL_END] && lexer->lookahead == '"') { return scan_raw_string_end(scanner, lexer); } if (valid_symbols[FLOAT_LITERAL] && iswdigit(lexer->lookahead)) { return process_float_literal(lexer); } return false; } hx-0.3.0+20250717/test-grammars/rust/src/tree_sitter/000077500000000000000000000000001503625671400217505ustar00rootroot00000000000000hx-0.3.0+20250717/test-grammars/rust/src/tree_sitter/alloc.h000066400000000000000000000017311503625671400232150ustar00rootroot00000000000000#ifndef TREE_SITTER_ALLOC_H_ #define TREE_SITTER_ALLOC_H_ #ifdef __cplusplus extern "C" { #endif #include #include #include // Allow clients to override allocation functions #ifdef TREE_SITTER_REUSE_ALLOCATOR extern void *(*ts_current_malloc)(size_t size); extern void *(*ts_current_calloc)(size_t count, size_t size); extern void *(*ts_current_realloc)(void *ptr, size_t size); extern void (*ts_current_free)(void *ptr); #ifndef ts_malloc #define ts_malloc ts_current_malloc #endif #ifndef ts_calloc #define ts_calloc ts_current_calloc #endif #ifndef ts_realloc #define ts_realloc ts_current_realloc #endif #ifndef ts_free #define ts_free ts_current_free #endif #else #ifndef ts_malloc #define ts_malloc malloc #endif #ifndef ts_calloc #define ts_calloc calloc #endif #ifndef ts_realloc #define ts_realloc realloc #endif #ifndef ts_free #define ts_free free #endif #endif #ifdef __cplusplus } #endif #endif // TREE_SITTER_ALLOC_H_ hx-0.3.0+20250717/test-grammars/rust/src/tree_sitter/array.h000066400000000000000000000242641503625671400232470ustar00rootroot00000000000000#ifndef TREE_SITTER_ARRAY_H_ #define TREE_SITTER_ARRAY_H_ #ifdef __cplusplus extern "C" { #endif #include "./alloc.h" #include #include #include #include #include #ifdef _MSC_VER #pragma warning(disable : 4101) #elif defined(__GNUC__) || defined(__clang__) #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wunused-variable" #endif #define Array(T) \ struct { \ T *contents; \ uint32_t size; \ uint32_t capacity; \ } /// Initialize an array. #define array_init(self) \ ((self)->size = 0, (self)->capacity = 0, (self)->contents = NULL) /// Create an empty array. #define array_new() \ { NULL, 0, 0 } /// Get a pointer to the element at a given `index` in the array. #define array_get(self, _index) \ (assert((uint32_t)(_index) < (self)->size), &(self)->contents[_index]) /// Get a pointer to the first element in the array. #define array_front(self) array_get(self, 0) /// Get a pointer to the last element in the array. #define array_back(self) array_get(self, (self)->size - 1) /// Clear the array, setting its size to zero. Note that this does not free any /// memory allocated for the array's contents. #define array_clear(self) ((self)->size = 0) /// Reserve `new_capacity` elements of space in the array. If `new_capacity` is /// less than the array's current capacity, this function has no effect. #define array_reserve(self, new_capacity) \ _array__reserve((Array *)(self), array_elem_size(self), new_capacity) /// Free any memory allocated for this array. Note that this does not free any /// memory allocated for the array's contents. #define array_delete(self) _array__delete((Array *)(self)) /// Push a new `element` onto the end of the array. #define array_push(self, element) \ (_array__grow((Array *)(self), 1, array_elem_size(self)), \ (self)->contents[(self)->size++] = (element)) /// Increase the array's size by `count` elements. /// New elements are zero-initialized. #define array_grow_by(self, count) \ do { \ if ((count) == 0) break; \ _array__grow((Array *)(self), count, array_elem_size(self)); \ memset((self)->contents + (self)->size, 0, (count) * array_elem_size(self)); \ (self)->size += (count); \ } while (0) /// Append all elements from one array to the end of another. #define array_push_all(self, other) \ array_extend((self), (other)->size, (other)->contents) /// Append `count` elements to the end of the array, reading their values from the /// `contents` pointer. #define array_extend(self, count, contents) \ _array__splice( \ (Array *)(self), array_elem_size(self), (self)->size, \ 0, count, contents \ ) /// Remove `old_count` elements from the array starting at the given `index`. At /// the same index, insert `new_count` new elements, reading their values from the /// `new_contents` pointer. #define array_splice(self, _index, old_count, new_count, new_contents) \ _array__splice( \ (Array *)(self), array_elem_size(self), _index, \ old_count, new_count, new_contents \ ) /// Insert one `element` into the array at the given `index`. #define array_insert(self, _index, element) \ _array__splice((Array *)(self), array_elem_size(self), _index, 0, 1, &(element)) /// Remove one element from the array at the given `index`. #define array_erase(self, _index) \ _array__erase((Array *)(self), array_elem_size(self), _index) /// Pop the last element off the array, returning the element by value. #define array_pop(self) ((self)->contents[--(self)->size]) /// Assign the contents of one array to another, reallocating if necessary. #define array_assign(self, other) \ _array__assign((Array *)(self), (const Array *)(other), array_elem_size(self)) /// Swap one array with another #define array_swap(self, other) \ _array__swap((Array *)(self), (Array *)(other)) /// Get the size of the array contents #define array_elem_size(self) (sizeof *(self)->contents) /// Search a sorted array for a given `needle` value, using the given `compare` /// callback to determine the order. /// /// If an existing element is found to be equal to `needle`, then the `index` /// out-parameter is set to the existing value's index, and the `exists` /// out-parameter is set to true. Otherwise, `index` is set to an index where /// `needle` should be inserted in order to preserve the sorting, and `exists` /// is set to false. #define array_search_sorted_with(self, compare, needle, _index, _exists) \ _array__search_sorted(self, 0, compare, , needle, _index, _exists) /// Search a sorted array for a given `needle` value, using integer comparisons /// of a given struct field (specified with a leading dot) to determine the order. /// /// See also `array_search_sorted_with`. #define array_search_sorted_by(self, field, needle, _index, _exists) \ _array__search_sorted(self, 0, _compare_int, field, needle, _index, _exists) /// Insert a given `value` into a sorted array, using the given `compare` /// callback to determine the order. #define array_insert_sorted_with(self, compare, value) \ do { \ unsigned _index, _exists; \ array_search_sorted_with(self, compare, &(value), &_index, &_exists); \ if (!_exists) array_insert(self, _index, value); \ } while (0) /// Insert a given `value` into a sorted array, using integer comparisons of /// a given struct field (specified with a leading dot) to determine the order. /// /// See also `array_search_sorted_by`. #define array_insert_sorted_by(self, field, value) \ do { \ unsigned _index, _exists; \ array_search_sorted_by(self, field, (value) field, &_index, &_exists); \ if (!_exists) array_insert(self, _index, value); \ } while (0) // Private typedef Array(void) Array; /// This is not what you're looking for, see `array_delete`. static inline void _array__delete(Array *self) { if (self->contents) { ts_free(self->contents); self->contents = NULL; self->size = 0; self->capacity = 0; } } /// This is not what you're looking for, see `array_erase`. static inline void _array__erase(Array *self, size_t element_size, uint32_t index) { assert(index < self->size); char *contents = (char *)self->contents; memmove(contents + index * element_size, contents + (index + 1) * element_size, (self->size - index - 1) * element_size); self->size--; } /// This is not what you're looking for, see `array_reserve`. static inline void _array__reserve(Array *self, size_t element_size, uint32_t new_capacity) { if (new_capacity > self->capacity) { if (self->contents) { self->contents = ts_realloc(self->contents, new_capacity * element_size); } else { self->contents = ts_malloc(new_capacity * element_size); } self->capacity = new_capacity; } } /// This is not what you're looking for, see `array_assign`. static inline void _array__assign(Array *self, const Array *other, size_t element_size) { _array__reserve(self, element_size, other->size); self->size = other->size; memcpy(self->contents, other->contents, self->size * element_size); } /// This is not what you're looking for, see `array_swap`. static inline void _array__swap(Array *self, Array *other) { Array swap = *other; *other = *self; *self = swap; } /// This is not what you're looking for, see `array_push` or `array_grow_by`. static inline void _array__grow(Array *self, uint32_t count, size_t element_size) { uint32_t new_size = self->size + count; if (new_size > self->capacity) { uint32_t new_capacity = self->capacity * 2; if (new_capacity < 8) new_capacity = 8; if (new_capacity < new_size) new_capacity = new_size; _array__reserve(self, element_size, new_capacity); } } /// This is not what you're looking for, see `array_splice`. static inline void _array__splice(Array *self, size_t element_size, uint32_t index, uint32_t old_count, uint32_t new_count, const void *elements) { uint32_t new_size = self->size + new_count - old_count; uint32_t old_end = index + old_count; uint32_t new_end = index + new_count; assert(old_end <= self->size); _array__reserve(self, element_size, new_size); char *contents = (char *)self->contents; if (self->size > old_end) { memmove( contents + new_end * element_size, contents + old_end * element_size, (self->size - old_end) * element_size ); } if (new_count > 0) { if (elements) { memcpy( (contents + index * element_size), elements, new_count * element_size ); } else { memset( (contents + index * element_size), 0, new_count * element_size ); } } self->size += new_count - old_count; } /// A binary search routine, based on Rust's `std::slice::binary_search_by`. /// This is not what you're looking for, see `array_search_sorted_with` or `array_search_sorted_by`. #define _array__search_sorted(self, start, compare, suffix, needle, _index, _exists) \ do { \ *(_index) = start; \ *(_exists) = false; \ uint32_t size = (self)->size - *(_index); \ if (size == 0) break; \ int comparison; \ while (size > 1) { \ uint32_t half_size = size / 2; \ uint32_t mid_index = *(_index) + half_size; \ comparison = compare(&((self)->contents[mid_index] suffix), (needle)); \ if (comparison <= 0) *(_index) = mid_index; \ size -= half_size; \ } \ comparison = compare(&((self)->contents[*(_index)] suffix), (needle)); \ if (comparison == 0) *(_exists) = true; \ else if (comparison < 0) *(_index) += 1; \ } while (0) /// Helper macro for the `_sorted_by` routines below. This takes the left (existing) /// parameter by reference in order to work with the generic sorting function above. #define _compare_int(a, b) ((int)*(a) - (int)(b)) #ifdef _MSC_VER #pragma warning(default : 4101) #elif defined(__GNUC__) || defined(__clang__) #pragma GCC diagnostic pop #endif #ifdef __cplusplus } #endif #endif // TREE_SITTER_ARRAY_H_ hx-0.3.0+20250717/test-grammars/rust/src/tree_sitter/parser.h000066400000000000000000000155771503625671400234340ustar00rootroot00000000000000#ifndef TREE_SITTER_PARSER_H_ #define TREE_SITTER_PARSER_H_ #ifdef __cplusplus extern "C" { #endif #include #include #include #define ts_builtin_sym_error ((TSSymbol)-1) #define ts_builtin_sym_end 0 #define TREE_SITTER_SERIALIZATION_BUFFER_SIZE 1024 #ifndef TREE_SITTER_API_H_ typedef uint16_t TSStateId; typedef uint16_t TSSymbol; typedef uint16_t TSFieldId; typedef struct TSLanguage TSLanguage; #endif typedef struct { TSFieldId field_id; uint8_t child_index; bool inherited; } TSFieldMapEntry; typedef struct { uint16_t index; uint16_t length; } TSFieldMapSlice; typedef struct { bool visible; bool named; bool supertype; } TSSymbolMetadata; typedef struct TSLexer TSLexer; struct TSLexer { int32_t lookahead; TSSymbol result_symbol; void (*advance)(TSLexer *, bool); void (*mark_end)(TSLexer *); uint32_t (*get_column)(TSLexer *); bool (*is_at_included_range_start)(const TSLexer *); bool (*eof)(const TSLexer *); void (*log)(const TSLexer *, const char *, ...); }; typedef enum { TSParseActionTypeShift, TSParseActionTypeReduce, TSParseActionTypeAccept, TSParseActionTypeRecover, } TSParseActionType; typedef union { struct { uint8_t type; TSStateId state; bool extra; bool repetition; } shift; struct { uint8_t type; uint8_t child_count; TSSymbol symbol; int16_t dynamic_precedence; uint16_t production_id; } reduce; uint8_t type; } TSParseAction; typedef struct { uint16_t lex_state; uint16_t external_lex_state; } TSLexMode; typedef union { TSParseAction action; struct { uint8_t count; bool reusable; } entry; } TSParseActionEntry; typedef struct { int32_t start; int32_t end; } TSCharacterRange; struct TSLanguage { uint32_t version; uint32_t symbol_count; uint32_t alias_count; uint32_t token_count; uint32_t external_token_count; uint32_t state_count; uint32_t large_state_count; uint32_t production_id_count; uint32_t field_count; uint16_t max_alias_sequence_length; const uint16_t *parse_table; const uint16_t *small_parse_table; const uint32_t *small_parse_table_map; const TSParseActionEntry *parse_actions; const char * const *symbol_names; const char * const *field_names; const TSFieldMapSlice *field_map_slices; const TSFieldMapEntry *field_map_entries; const TSSymbolMetadata *symbol_metadata; const TSSymbol *public_symbol_map; const uint16_t *alias_map; const TSSymbol *alias_sequences; const TSLexMode *lex_modes; bool (*lex_fn)(TSLexer *, TSStateId); bool (*keyword_lex_fn)(TSLexer *, TSStateId); TSSymbol keyword_capture_token; struct { const bool *states; const TSSymbol *symbol_map; void *(*create)(void); void (*destroy)(void *); bool (*scan)(void *, TSLexer *, const bool *symbol_whitelist); unsigned (*serialize)(void *, char *); void (*deserialize)(void *, const char *, unsigned); } external_scanner; const TSStateId *primary_state_ids; }; static inline bool set_contains(TSCharacterRange *ranges, uint32_t len, int32_t lookahead) { uint32_t index = 0; uint32_t size = len - index; while (size > 1) { uint32_t half_size = size / 2; uint32_t mid_index = index + half_size; TSCharacterRange *range = &ranges[mid_index]; if (lookahead >= range->start && lookahead <= range->end) { return true; } else if (lookahead > range->end) { index = mid_index; } size -= half_size; } TSCharacterRange *range = &ranges[index]; return (lookahead >= range->start && lookahead <= range->end); } /* * Lexer Macros */ #ifdef _MSC_VER #define UNUSED __pragma(warning(suppress : 4101)) #else #define UNUSED __attribute__((unused)) #endif #define START_LEXER() \ bool result = false; \ bool skip = false; \ UNUSED \ bool eof = false; \ int32_t lookahead; \ goto start; \ next_state: \ lexer->advance(lexer, skip); \ start: \ skip = false; \ lookahead = lexer->lookahead; #define ADVANCE(state_value) \ { \ state = state_value; \ goto next_state; \ } #define ADVANCE_MAP(...) \ { \ static const uint16_t map[] = { __VA_ARGS__ }; \ for (uint32_t i = 0; i < sizeof(map) / sizeof(map[0]); i += 2) { \ if (map[i] == lookahead) { \ state = map[i + 1]; \ goto next_state; \ } \ } \ } #define SKIP(state_value) \ { \ skip = true; \ state = state_value; \ goto next_state; \ } #define ACCEPT_TOKEN(symbol_value) \ result = true; \ lexer->result_symbol = symbol_value; \ lexer->mark_end(lexer); #define END_STATE() return result; /* * Parse Table Macros */ #define SMALL_STATE(id) ((id) - LARGE_STATE_COUNT) #define STATE(id) id #define ACTIONS(id) id #define SHIFT(state_value) \ {{ \ .shift = { \ .type = TSParseActionTypeShift, \ .state = (state_value) \ } \ }} #define SHIFT_REPEAT(state_value) \ {{ \ .shift = { \ .type = TSParseActionTypeShift, \ .state = (state_value), \ .repetition = true \ } \ }} #define SHIFT_EXTRA() \ {{ \ .shift = { \ .type = TSParseActionTypeShift, \ .extra = true \ } \ }} #define REDUCE(symbol_name, children, precedence, prod_id) \ {{ \ .reduce = { \ .type = TSParseActionTypeReduce, \ .symbol = symbol_name, \ .child_count = children, \ .dynamic_precedence = precedence, \ .production_id = prod_id \ }, \ }} #define RECOVER() \ {{ \ .type = TSParseActionTypeRecover \ }} #define ACCEPT_INPUT() \ {{ \ .type = TSParseActionTypeAccept \ }} #ifdef __cplusplus } #endif #endif // TREE_SITTER_PARSER_H_