knuffel-3.2.0/.cargo_vcs_info.json0000644000000001360000000000100124700ustar { "git": { "sha1": "c44c6b0c0f31ea6d1174d5d2ed41064922ea44ca" }, "path_in_vcs": "" }knuffel-3.2.0/.editorconfig000064400000000000000000000002101046102023000137260ustar 00000000000000root = true [*] tab_width = 4 trim_trailing_whitespace = true insert_final_newline = true [*.rs] indent_style = space indent_size = 4 knuffel-3.2.0/.github/workflows/tests.yml000064400000000000000000000016041046102023000165430ustar 00000000000000on: [pull_request, push] name: Tests jobs: build_and_test: name: Rust tests runs-on: ubuntu-latest strategy: matrix: rust_version: [1.62, stable] steps: - uses: actions/checkout@master - uses: actions-rs/toolchain@v1 with: profile: minimal toolchain: ${{ matrix.rust_version }} default: true - name: All feature tests uses: actions-rs/cargo@v1 with: command: test args: --workspace --all-features - name: Build without features uses: actions-rs/cargo@v1 with: command: build args: --workspace --no-default-features - name: Normal tests without extra features (but derive is enabled) uses: actions-rs/cargo@v1 with: command: test args: --workspace --no-default-features --features=derive knuffel-3.2.0/.gitignore000064400000000000000000000000401046102023000132420ustar 00000000000000/.vagga /target /tmp Cargo.lock knuffel-3.2.0/Cargo.lock0000644000000405470000000000100104550ustar # This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 3 [[package]] name = "addr2line" version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a76fd60b23679b7d19bd066031410fb7e458ccc5e958eb5c325888ce4baedc97" dependencies = [ "gimli", ] [[package]] name = "adler" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] name = "ahash" version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" dependencies = [ "getrandom", "once_cell", "version_check", ] [[package]] name = "aho-corasick" version = "0.7.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cc936419f96fa211c1b9166887b38e5e40b19958e5b895be7c1f93adec7071ac" dependencies = [ "memchr", ] [[package]] name = "assert-json-diff" version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "47e4f2b81832e72834d7518d8487a0396a28cc408186a2e8854c0f98011faf12" dependencies = [ "serde", "serde_json", ] [[package]] name = "backtrace" version = "0.3.67" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "233d376d6d185f2a3093e58f283f60f880315b6c60075b01f36b3b85154564ca" dependencies = [ "addr2line", "cc", "cfg-if", "libc", "miniz_oxide", "object", "rustc-demangle", ] [[package]] name = "base64" version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4a4ddaa51a5bc52a6948f74c06d20aaaddb71924eab79b8c97a8c556e942d6a" [[package]] name = "bitflags" version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "cc" version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" [[package]] name = "cfg-if" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chumsky" version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "23170228b96236b5a7299057ac284a321457700bc8c41a4476052f0f4ba5349d" dependencies = [ "hashbrown", ] [[package]] name = "errno" version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f639046355ee4f37944e44f60642c6f3a7efa3cf6b78c78a0d989a8ce6c396a1" dependencies = [ "errno-dragonfly", "libc", "winapi", ] [[package]] name = "errno-dragonfly" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" dependencies = [ "cc", "libc", ] [[package]] name = "getrandom" version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31" dependencies = [ "cfg-if", "libc", "wasi", ] [[package]] name = "gimli" version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ad0a93d233ebf96623465aad4046a8d3aa4da22d4f4beba5388838c8a434bbb4" [[package]] name = "hashbrown" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" dependencies = [ "ahash", ] [[package]] name = "heck" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" dependencies = [ "unicode-segmentation", ] [[package]] name = "hermit-abi" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fed44880c466736ef9a5c5b5facefb5ed0785676d0c02d612db14e54f0d84286" [[package]] name = "io-lifetimes" version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cfa919a82ea574332e2de6e74b4c36e74d41982b335080fa59d4ef31be20fdf3" dependencies = [ "libc", "windows-sys", ] [[package]] name = "is-terminal" version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "21b6b32576413a8e69b90e952e4a026476040d81017b80445deda5f2d3921857" dependencies = [ "hermit-abi", "io-lifetimes", "rustix", "windows-sys", ] [[package]] name = "is_ci" version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "616cde7c720bb2bb5824a224687d8f77bfd38922027f01d825cd7453be5099fb" [[package]] name = "itoa" version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6" [[package]] name = "knuffel" version = "3.2.0" dependencies = [ "assert-json-diff", "base64", "chumsky", "knuffel-derive", "miette", "minicbor", "serde_json", "thiserror", "unicode-width", ] [[package]] name = "knuffel-derive" version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "91977f56c49cfb961e3d840e2e7c6e4a56bde7283898cf606861f1421348283d" dependencies = [ "heck", "proc-macro-error", "proc-macro2", "quote", "syn", ] [[package]] name = "libc" version = "0.2.140" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "99227334921fae1a979cf0bfdfcc6b3e5ce376ef57e16fb6fb3ea2ed6095f80c" [[package]] name = "linux-raw-sys" version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4" [[package]] name = "memchr" version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" [[package]] name = "miette" version = "5.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "07749fb52853e739208049fb513287c6f448de9103dfa78b05ae01f2fc5809bb" dependencies = [ "backtrace", "is-terminal", "miette-derive", "once_cell", "owo-colors", "supports-color", "supports-hyperlinks", "supports-unicode", "terminal_size", "textwrap", "thiserror", "unicode-width", ] [[package]] name = "miette-derive" version = "5.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2a07ad93a80d1b92bb44cb42d7c49b49c9aab1778befefad49cceb5e4c5bf460" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "minicbor" version = "0.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d7005aaf257a59ff4de471a9d5538ec868a21586534fff7f85dd97d4043a6139" dependencies = [ "minicbor-derive", ] [[package]] name = "minicbor-derive" version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1154809406efdb7982841adb6311b3d095b46f78342dd646736122fe6b19e267" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "miniz_oxide" version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b275950c28b37e794e8c55d88aeb5e139d0ce23fdbbeda68f8d7174abdf9e8fa" dependencies = [ "adler", ] [[package]] name = "object" version = "0.30.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ea86265d3d3dcb6a27fc51bd29a4bf387fae9d2986b823079d4986af253eb439" dependencies = [ "memchr", ] [[package]] name = "once_cell" version = "1.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3" [[package]] name = "owo-colors" version = "3.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c1b04fb49957986fdce4d6ee7a65027d55d4b6d2265e5848bbb507b58ccfdb6f" [[package]] name = "proc-macro-error" version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" dependencies = [ "proc-macro-error-attr", "proc-macro2", "quote", "syn", "version_check", ] [[package]] name = "proc-macro-error-attr" version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" dependencies = [ "proc-macro2", "quote", "version_check", ] [[package]] name = "proc-macro2" version = "1.0.52" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1d0e1ae9e836cc3beddd63db0df682593d7e2d3d891ae8c9083d2113e1744224" dependencies = [ "unicode-ident", ] [[package]] name = "quote" version = "1.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc" dependencies = [ "proc-macro2", ] [[package]] name = "regex" version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48aaa5748ba571fb95cd2c85c09f629215d3a6ece942baa100950af03a34f733" dependencies = [ "aho-corasick", "memchr", "regex-syntax", ] [[package]] name = "regex-syntax" version = "0.6.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848" [[package]] name = "rustc-demangle" version = "0.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342" [[package]] name = "rustix" version = "0.36.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fd5c6ff11fecd55b40746d1995a02f2eb375bf8c00d192d521ee09f42bef37bc" dependencies = [ "bitflags", "errno", "io-lifetimes", "libc", "linux-raw-sys", "windows-sys", ] [[package]] name = "ryu" version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041" [[package]] name = "serde" version = "1.0.156" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "314b5b092c0ade17c00142951e50ced110ec27cea304b1037c6969246c2469a4" [[package]] name = "serde_json" version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1c533a59c9d8a93a09c6ab31f0fd5e5f4dd1b8fc9434804029839884765d04ea" dependencies = [ "itoa", "ryu", "serde", ] [[package]] name = "smawk" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f67ad224767faa3c7d8b6d91985b78e70a1324408abcb1cfcc2be4c06bc06043" [[package]] name = "supports-color" version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4950e7174bffabe99455511c39707310e7e9b440364a2fcb1cc21521be57b354" dependencies = [ "is-terminal", "is_ci", ] [[package]] name = "supports-hyperlinks" version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4b4806e0b03b9906e76b018a5d821ebf198c8e9dc0829ed3328eeeb5094aed60" dependencies = [ "is-terminal", ] [[package]] name = "supports-unicode" version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4b6c2cb240ab5dd21ed4906895ee23fe5a48acdbd15a3ce388e7b62a9b66baf7" dependencies = [ "is-terminal", ] [[package]] name = "syn" version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] [[package]] name = "terminal_size" version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "633c1a546cee861a1a6d0dc69ebeca693bf4296661ba7852b9d21d159e0506df" dependencies = [ "libc", "winapi", ] [[package]] name = "textwrap" version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b7b3e525a49ec206798b40326a44121291b530c963cfb01018f63e135bac543d" dependencies = [ "smawk", "unicode-linebreak", "unicode-width", ] [[package]] name = "thiserror" version = "1.0.39" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a5ab016db510546d856297882807df8da66a16fb8c4101cb8b30054b0d5b2d9c" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" version = "1.0.39" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5420d42e90af0c38c3290abcca25b9b3bdf379fc9f55c528f53a269d9c9a267e" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "unicode-ident" version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5464a87b239f13a63a501f2701565754bae92d243d4bb7eb12f6d57d2269bf4" [[package]] name = "unicode-linebreak" version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c5faade31a542b8b35855fff6e8def199853b2da8da256da52f52f1316ee3137" dependencies = [ "hashbrown", "regex", ] [[package]] name = "unicode-segmentation" version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" [[package]] name = "unicode-width" version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" [[package]] name = "version_check" version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "winapi" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" dependencies = [ "winapi-i686-pc-windows-gnu", "winapi-x86_64-pc-windows-gnu", ] [[package]] name = "winapi-i686-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows-sys" version = "0.45.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" dependencies = [ "windows-targets", ] [[package]] name = "windows-targets" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" dependencies = [ "windows_aarch64_gnullvm", "windows_aarch64_msvc", "windows_i686_gnu", "windows_i686_msvc", "windows_x86_64_gnu", "windows_x86_64_gnullvm", "windows_x86_64_msvc", ] [[package]] name = "windows_aarch64_gnullvm" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" [[package]] name = "windows_aarch64_msvc" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" [[package]] name = "windows_i686_gnu" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" [[package]] name = "windows_i686_msvc" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" [[package]] name = "windows_x86_64_gnu" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" [[package]] name = "windows_x86_64_gnullvm" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" [[package]] name = "windows_x86_64_msvc" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" knuffel-3.2.0/Cargo.toml0000644000000031440000000000100104700ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2021" rust-version = "1.62.0" name = "knuffel" version = "3.2.0" description = """ Another KDL language implementation """ homepage = "https://github.com/tailhook/knuffel" documentation = "https://docs.rs/knuffel" readme = "README.md" keywords = [ "kdl", "configuration", "parser", ] categories = [ "parser-implementations", "config", "encoding", ] license = "MIT/Apache-2.0" [dependencies.base64] version = "0.21.0" optional = true [dependencies.chumsky] version = "0.9.2" default-features = false [dependencies.knuffel-derive] version = "^3.2.0" optional = true [dependencies.miette] version = "5.1.1" [dependencies.minicbor] version = "0.19.1" features = [ "std", "derive", ] optional = true [dependencies.thiserror] version = "1.0.31" [dependencies.unicode-width] version = "0.1.9" optional = true [dev-dependencies.assert-json-diff] version = "2.0.1" [dev-dependencies.miette] version = "5.1.1" features = ["fancy"] [dev-dependencies.serde_json] version = "1.0" [features] default = [ "derive", "base64", "line-numbers", ] derive = ["knuffel-derive"] line-numbers = ["unicode-width"] knuffel-3.2.0/Cargo.toml.orig000064400000000000000000000017741046102023000141600ustar 00000000000000[workspace] members = [ ".", "derive", ] [package] name = "knuffel" version = "3.2.0" edition = "2021" description = """ Another KDL language implementation """ license = "MIT/Apache-2.0" keywords = ["kdl", "configuration", "parser"] categories = ["parser-implementations", "config", "encoding"] homepage = "https://github.com/tailhook/knuffel" documentation = "https://docs.rs/knuffel" rust-version = "1.62.0" readme = "README.md" [dependencies] chumsky = {version="0.9.2", default-features=false} knuffel-derive = {path="./derive", version= "^3.2.0", optional=true} base64 = {version="0.21.0", optional=true} unicode-width = {version="0.1.9", optional=true} minicbor = {version="0.19.1", optional=true, features=["std", "derive"]} miette = "5.1.1" thiserror = "1.0.31" [dev-dependencies] miette = { version="5.1.1", features=["fancy"] } assert-json-diff = "2.0.1" serde_json = "1.0" [features] default = ["derive", "base64", "line-numbers"] derive = ["knuffel-derive"] line-numbers = ["unicode-width"] knuffel-3.2.0/LICENSE-APACHE000064400000000000000000000261361046102023000132140ustar 00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "{}" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright {yyyy} {name of copyright owner} Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. knuffel-3.2.0/LICENSE-MIT000064400000000000000000000020521046102023000127130ustar 00000000000000Copyright (c) 2021 The knuffel Developers Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. knuffel-3.2.0/README.md000064400000000000000000000124421046102023000125420ustar 00000000000000A [KDL](https://kdl.dev) file format parser with great error reporting and convenient derive macros. # About KDL To give you some background on the KDL format. Here is a small example: ```kdl foo 1 key="val" "three" { bar (role)baz 1 2 } ``` Here is what are annotations for all the datum as described by the [specification] and this guide: ```text foo 1 "three" key="val" { ╮ ─┬─ ┬ ───┬─── ────┬──── │ │ │ │ ╰───── property (can be multiple) │ │ │ │ │ │ ╰────┴────────────── arguments │ │ │ └── node name ├─ node "foo", with │ "bar" and "baz" bar │ being children (role)baz 1 2 │ ──┬─ │ └────── type name for node named "baz" │ } ╯ ``` (note, the order of properties doesn't matter as well as the order of properties with respect to arguments, so I've moved arguments to have less intersections for the arrows) # Usage Most common usage of this library is using `derive` and [parse] function: ```rust #[derive(knuffel::Decode)] enum TopLevelNode { Route(Route), Plugin(Plugin), } #[derive(knuffel::Decode)] struct Route { #[knuffel(argument)] path: String, #[knuffel(children(name="route"))] subroutes: Vec, } #[derive(knuffel::Decode)] struct Plugin { #[knuffel(argument)] name: String, #[knuffel(property)] url: String, } # fn main() -> miette::Result<()> { let config = knuffel::parse::>("example.kdl", r#" route "/api" { route "/api/v1" } plugin "http" url="https://example.org/http" "#)?; # Ok(()) # } ``` This parses into a vector of nodes as enums `TopLevelNode`, but you also use some node as a root of the document if there is no properties and arguments declared: ```rust,ignore #[derive(knuffel::Decode)] struct Document { #[knuffel(child, unwrap(argument))] version: Option, #[knuffel(children(name="route"))] routes: Vec, #[knuffel(children(name="plugin"))] plugins: Vec, } let config = parse::("example.kdl", r#" version "2.0" route "/api" { route "/api/v1" } plugin "http" url="https://example.org/http" "#)?; ``` See description of [Decode](derive@Decode) and [DecodeScalar](derive@DecodeScalar) for the full reference on allowed attributes and parse modes. # Errors This crate publishes nice errors, like this: 
Screenshot of error. Here is how narratable printer would print the error:
Error: single char expected after `Alt+`
    Diagnostic severity: error
\
Begin snippet for test.kdl starting at line 17, column 1
\
snippet line 17:     }
snippet line 18:     key "Alt+" mode="normal" {
    label starting at line 18, column 10: invalid value
snippet line 19:         move-focus "left"
To make them working, [miette]'s "fancy" feature must be enabled in the final application's `Cargo.toml`: ```toml [dependencies] miette = { version="4.3.0", features=["fancy"] } ``` And the error returned from parser should be converted to [miette::Report] and printed with debugging handler. The most manual way to do that is: ```rust # #[derive(knuffel::Decode, Debug)] # struct Config {} # let file_name = "1.kdl"; # let text = ""; let config = match knuffel::parse::(file_name, text) { Ok(config) => config, Err(e) => { println!("{:?}", miette::Report::new(e)); std::process::exit(1); } }; ``` But usually function that returns `miette::Result` is good enough: ```rust,no_run # use std::fs; # #[derive(knuffel::Decode)] # struct Config {} use miette::{IntoDiagnostic, Context}; fn parse_config(path: &str) -> miette::Result { let text = fs::read_to_string(path).into_diagnostic() .wrap_err_with(|| format!("cannot read {:?}", path))?; Ok(knuffel::parse(path, &text)?) } fn main() -> miette::Result<()> { let config = parse_config("my.kdl")?; # Ok(()) } ``` See [miette guide] for other ways of configuring error output. # The Name KDL is pronounced as cuddle. "Knuffel" means the same as cuddle in Dutch. License ======= Licensed under either of * Apache License, Version 2.0, (./LICENSE-APACHE or ) * MIT license (./LICENSE-MIT or ) at your option. Contribution ------------ Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. [specification]: https://github.com/kdl-org/kdl/blob/main/SPEC.md [miette]: https://docs.rs/miette/ [miette guide]: https://docs.rs/miette/latest/miette/#-handler-options knuffel-3.2.0/examples/simple.rs000064400000000000000000000014051046102023000147350ustar 00000000000000use std::io::Read; use miette::IntoDiagnostic; #[derive(knuffel::Decode, Debug)] #[allow(dead_code)] struct Plugin { #[knuffel(argument)] name: String, #[knuffel(property)] url: String, #[knuffel(child, unwrap(argument))] version: String, } #[derive(knuffel::Decode, Debug)] #[allow(dead_code)] struct Config { #[knuffel(child, unwrap(argument))] version: String, #[knuffel(children(name="plugin"))] plugins: Vec, } fn main() -> miette::Result<()> { let mut buf = String::new(); println!("Please type KDL document, press Return, Ctrl+D to finish"); std::io::stdin().read_to_string(&mut buf).into_diagnostic()?; let cfg: Config = knuffel::parse("", &buf)?; println!("{:#?}", cfg); Ok(()) } knuffel-3.2.0/images/error.png000064400000000000000000000753521046102023000144000ustar 00000000000000PNG  IHDRYgAMA a cHRMz&u0`:pQ<bKGDcaNvp\QDzGIDATxg\,{G(tA(%5k5jnQMԛ%&Fo-1K `AAPiҤtX`y^uq lc}?8;;;̜333?,dA`޽{8qi޽[SSflA>+Y}ܹҒ޼y9Ю B۷-g@t_666%}.{n +DFFFGGR ܰa޴iӝ;wpHgӦMT?qF7=gg &xxxPULUvEllǨQ!ZJJJWΓy˨HX|6nٳgZZZd ҥKǎK'N I&͟?fd߾}}qvv&,_<33s_2gFt_uuu_|ENN+..233-j# `]š]<ouuu==5kt.utt\䤯m>%Ԏwow޽{n7,Yp!J2k,:\[[+N^^27B?s#Gtt#T|Y]]M100 xxx$&&HOI>%4 SO !7nؽ{7D~n޼tӔ,E-ZcUdjjJҐ!*nTɓ'?m>jTG755!CT#~~YHaaa}555a͵7oތmhhqf/ia+WlwӋ fUUU/_~L=//gϞ%&&9s9И ptt444$TUUQo5.b``Q[[ q8ڢ_pB}݈BHcci!...&Mrww766nll,--{KKKϝ.w;wR>/^\UU%rMH,/MM1c;88hhh}'N8֦"?~#S>mڴǏ?~\!;v.uDDDxK :{CC3,k±M6),4446l6lbIEG ]Fg |gg}6fرc?[Vϲ*%K,^X V`p8fZ|N8 N2[oVXXH<<<ӧtYY١C$͌ř gq8ϲ=U:p86mbL.^_vvv>b~!UoFSKMMx @`ljhlMNhh|2ëWoXPPРAtkkkllljjjuu5իň#!\.>Xdo3gbpB쪪bbbZ[[mllFaooO5fqS5Ϗ~آ"6}-ZDSRR!fffT?w6p¬,y޿E/cccZZZ#SNIS^111#FvkܸqǏ_~7 z+<</_nMfccc Y,Sxx8̙3޽{rJ&/zdɒ˗+Xe˖~ܴLWVVVKKUHH@uԩǎPx, &׾`r=)S0՜gϞWWW___~}EDDHh8a„ &|K`F,/UoȖ>%NQxZZuJk'+-wP0 =MM\B$''59ψnXa5jȐ!tz׮]W^X!**jʕTjcc-p~~~~~ >x`۶m̍>}zѢE믿.. ~E\~ԩS7ntwwWX/YBZ[[+gΜϞ=O?1ûs[ӧ?. 80##>c<{#G\^,kƌT޽[ o//?{ƌ7::z޽tׯ=c z0 &:u︺~ H;!D&r'MD<~oƼ6l"Ei*X^*xߐ-4 驇ǵk-wP0lr<3󻗗DGO/jkh|L-|7.|B-?teIIp,Nikk;x๗Qzzz۷o|}>}J-d۷K~/L۷/>qȆz7oj/tC qջwo##樨(s=Ur>}Nx]vUA; d[!3f̰ƳgϦҕftttˋrر$*m``Я_?WUUQ %o3==][ْKJG%/rJ9ᣖ Lݻ.] 'xyy577_. ʝy0/%p !d#!DG֯_okkݯ_?GGG.cs6=~x]]]v촆;vH&=]O0gf .dff 7nffFlH5kΨfDž뫛]PTMOOO݋چ±xla!!b-$LttXmOYXXXXXHfmll8fzDKCCE0LJav8͘@étUUڵk={&rMq>ٖOHkllljj244~>S8.\_@Aw\\,N9=??d:l0d( {¦s>dJB_~s?C\ɣGMϟ?_Qioooj2)1Q h'KCW}a-\_ʺLZcqG=k)?#]VWNrw߿=[<88X`,+$$D1@%944jV^M=Cx!sVv211ӏ?&T r755}wtee'|~DD5;s)*siq\*uP1c{LLL٩>,™4i҂ !666*21ߛv\d,N)=}_Z!$*77Jٱ8Si m?TGNN>tЀuz-4F9!{T~l@IR.($$ϯy9n4 }ٲeXollܿ?DWWwٲeR-*r47YN(b6fF)@㜨RW̪qê%MmzeUx7HHx?!!V?>|ڵ-II>[$n5 [P2;__~\.WKK}Ѵi$L'2*%cζ!n^sCTɓ&MwK3f 0ib1[M[[{bm>ϏJߺu~sڵd*G7,,,L ǏOʰ޺uk)PNnllűg{HL]SS~zoSEP]]-2q<,e];vVVǕA?U1c ZZZk֬Q4@})O<a|>yNʪA qƐ!C!666˗ڸ\Űa7Udk;;;KtttG<%ddd0gP>l͖"Wػwof1KK]v;wT/+n{ٹs'w zjqq1211c(yrpСm۶Q^{5__اO0رcjjjxq!\.?sNRRRYY6558p`@@9{䁺T}ɒ%tkÇ ,++;rTOOoٲe[nc&Lطo5]kkUHHqETToqY<Ô:|rkk낂yyyr:g͚xʕ"r䈼=--m/^ZYY͜9SO믃 ^x+,,vL=$$$00J?~9s&44jA/ahooo\\KNa~~~bb"NOOoDFFLDV LuLݝ^*~}Byf}Ç%,|)~;=(rS**ׯo Jkk믿zutuuu?ٳ.]qϝ;wժUN<---ݲeKXbbb>Jq+477?~|۶m"[n?Gٵklܸ~`*,33?fXэt YZ[[[… %֭[]RRR>S硬gy}Z[[7o,iMYY֭[#K~̶(0[Hn+߃*wP$Ԏ+ȳgV^=xaÆPç477Ĕ]edkK,8q55!RRR-Zlhhkkk>}9oݺu޽P__>}r8ڂ _rUQQnݺSU,&+++>>>!![XXt3+dΩSĭE500XdŭfWX1~CijjVUU=~8..Nؠ;e~|GF vtthG1^?<w,Q t}}ww%nڴW@W7uc:;;[<///FM%Z[[c_}YJ7諯$4#oG"[]gg[}/>] rO}fᆱ~P`^I؎~?lmhPQX}Pa1'NJJRC8tXͣG6l`.10+:$͆ s/?_I[[̹٭Ǐ뭧BoՋ:-ⶶ6ȑPm:f +}<9\##~SSQTTÇ^̆5 ~5~S@{!*,,dV$fggiOWtTC8V%'3OV@ /1}:9^ѣ&K,Y"0_>=|ּy՞V?46UVlPBY#1OS 1~?_:yRhnn޼y8p@֊pFA+++o߾8(eŨtl9k@4R[{s tD9::ո̗YQPop %z g% OUul\Np!3': C766җ/_q\  KKKG3ZT\FU35xQqf ,ŵ(K DlIѲbV{?/tIxǕ2fK/"C@+߿/TrGJSy5ʰM__znۖIetSzpfCsBH}~>XS\LMM}^6JOO/,,D !W 6BJKKo޼Y[[ۣh?kY}Rpf99U))/RRjj33ҪZZuLJPϘujV.ݻx&15uzU<`r~35KJ|z& MlO>Lmk6=sN TWWXR|i}uq|"\ۿ]_?>/O`z?˭Ǐw1WBHSY//f~m7ޠ_3&5ٹsUii;; z1?6Rq'zK=ܹK*ܡp ]]#"د6.OHq 0^hŜo,.ؾczÜ9On_N+@NNcҊؚAAÇ7s 9@?0 O %EEUݼy {{˗7+if"mmaÆ4J3RT@!W]]]BHiizN[KKc̊gww׮ e}v^Μ){ׅo/%v2QXw  WǏo,.gZZƏf I: 6lH|M ^(e)Z?y3vذj(x\L? 0}}'zW[Sxn.܆;رyR HǏ Z䬬Bl̎/>=~+*u\*i,.75SVwܕ+W3 bnnnnnnÇy<d m#@"22rĉSN}Z[[塡-npΝ;ydetttddd:q№VGҥK pB)//%%%.;772[ncq-www_v1!$;;ٹ[())9vXEDDPG/=NX,dȖ*&M7o!9sѣ[G7RSSs%hii-ZSZZ醆dȖj+V2d!o~z'Kiذa/^SDե̚rPp/tpp رHnԨQVL RpҒrŽ{ʻ1m !nݪƩ"1zdȖj<o߾}#GR T \V8nݺ.ndzl !UUUwy0ZdȖj]ʼnt |}}}MLL!/_Crtss32dKu+b@W)B>^ ӳCjkkBbccP1o<*}M6Ym18tT ?XdVVV"=HyRdV q 7Bli)1b!]zRT)ḷ!ի (xspz]]]dVO ǩ8 :qB1kutt! [=" Azq s=" rK.a: W="T|2:U@ThVN]]@T6!6<<\ݴb uqqqrr"`h(G555! [*XB <>Lwc&N kkk: dBhjjBQб˃͞5k2''yB'N6y<3hȑ# - TADDرcbiTqA848 PJp@i( qA848h Clll444'p5lذ~kd4>.nݺdl8oɒ%;;#GRs]p E*s8sNsi&*߸q2!sΞ=3AVT7766^v{Я_>֖nС?CJJ NU)᥂-]tرTA!‚N_r"C*]ؘ-~\ZZfnnbeee˖~y\:::}a.qrrׯAzBHmm-2DT14iҼyH̙3G:[[uq8.!!Ç>%Kܻw@| /N諫 !DqٿB "r%j{CCѩÙ +kkkXbȐ!T7ׯ_v~mS۷os ۸q--E-] gLQMSX,PB8!πrG˽N"!/b񜜜?؈?!$%%Ed,N),,f[YYI^ƆJ<{ t{G[[[qWZE D3333339sݻ߿/111t8>|pxpp0wе8{H@TˣRϷ#xzz* We2gڼy3~͚5wY~=ݸǧ_~Wp? ]h@u]Ϛ5k޽^k׮erF'Hw:Urݲe<$$֭[;wl7Iy1;vp]]]''''';Ϟ=[݅iӎ?~q.S]s44V- ǻSgG?lذCIg̘1Tyw=''GjG^t)sp(&&&}ٞ={DA8R^^*זmOYjj*3M,ӦMb ٳl@I;[l)S̟?_[z)ni&777:88|ǿ4bggG"|Ɔ 8S@@ >S LʋŅ3q܅ r\ɛydɒqI/g֬Y|Mϼ0*.nJgcǎm޼wݳgȻ㽽 !G! =z4~E֧OE_9)))IIIeeeJ?f/\0+++33Sx#/^N8!nfqqq ˑUG}}}'''*MHKK& z,cM0ϏJM(J%ӧ¨,rvv0aBrrr\\3j3___#8pܹ˧O^r,dmmBs hʔ)X<'''..ٳgl6yԨQLӈz()IgZ>x`;;V#FS+xyy3F*/iӦBNf3gν{Us<5j3MMM}}}\\8BzAҨڵkr=1kU9ej󐔔DIHH믩P>~}G6(͝;}_N5kө-̛7oժU{r'MD<~oFodž HZ2\\|mۘ:}Ex_ˤ8ott޽{Dׯ_?yu먁8osN9,kƌT޽[o//?1cqlBdG=qB[ZZ|͑#G4СCT???9s888Bbbb"##18tܹs!!NYUF۷/>q7oo߾P߆VVVݨdU^L\.733Sx<ޮ]Gl`` '硑`<**Jd߻w/**7ߤە ijj:KtReee˓چmmmNMM]xիgϞ]Ġ_{5*Q k .[-&&'xxx_p~l:|ppח~NEJKKqf9(Wcc#i9 ǣ2a̙3tM9n555W\/X^Z(W||v|>?&&_^8Uy 8{^'O&hjj:99youݣqBbqtR ޽~39 )wYn֭[ 6C455}*[ }x<***x<<>E$''WVVRc>8p@ƈSwhp344 t?>5a,2;C'Oĭ DA422ǏKex,z atZ*/"[nHA__'\UVVhV ss8ɓ'O>}>?h ++#G9sCB7gzp̨( _rmmm*b<|>?66_'޺u~W^tyJJJzȬ{Ǚ/===f*oҥV1422R\]5)OBW0f5Fzi S%!7dU^$tfnJFfX= xٲeϟGnȖچׯ߹sȑ#/{gg9s,X `ǎ̋ޞ4660a.**_%wGFGP8!dĈp< M]KxQQϙoUTTS%v1ǶP|Kpx̋Evڭpu5VdyIK555Oqyb9FNΜ9s e^,,,榥x⦦.3744/lllx<޷~||iӦÇ/[?_~=&L@L~-s(,,LOOfvե;6J4662oc08F 77OT׭[Gw|QQQ~)UE5+WU :A=+W477o޼Y/^OWXuV jfKx"s`m=ԍ7W\Q] !MMMñ511>WA"ۜر*u"P.m۶;u70]pQ_1AJjPOg4xYYwz^R]+,, ﭷA;c ???*QTT$ z3ff͚v1:Ey 3n81#֭[;Z-B8.?jX͛ruuϿ曼PQ$TWzZYYI>ׯB,--wu9flM|x֭T'ӧQ1uHH`:tHF/:J5kV``+W8QCBB$Sjjjxq!\.?€26mjj:psYv\~T( ^N7r4m_|񅓓FLnnn%;;[ʱfhԠmmE}«ݻwٲe*n #GHߓ ݺjC+ 4ѣGLvpp 8q'&&Bϟ/sL^jjT͝;Wk?uww^:;; 4P,**zjḜS*_ dccC0::CḬ?*9 *XB Ow-ׯ[n֬Y'Nd­|~TToֽ&2q!CPc2«Ĕ\XvO:u1thmm-((qwڵn{tqf[q,ri-[lܸ9C'|fffUŋժz߾}tCq͛׮]K`*++ۺu4#jjj6nܸn:!m۶ubKYTSuaaaD PgYZZB^xQRR(4x<?@W/Z(%%Ed[E8;;ڧOvtw333juBHRRRw|bH㹹?ד:d)q/Ydĉ̱rWSSG5*88:e+>>֭[ιTR]]{K.ISQg뷴$&&;wAp\)KUTT[n...,&+++>>>!!KZ(fڵkWBeaR%9s̙3=*ajN:{l*u[n!O@tuuG!I~~ŋKJJ Pqu5b*Q]]-n`٪駟vvvvthz_at''LHH$0.]b.5 q'sLbbbFmooۉ> 8@\54tQQ ŋ'O>ʕ+۷ookkC@W/^ܹPJp@i( qA848 PJp@i( qA848 PJp@i( qA848 PJp@i( qA84FY0jrd~'ii 3tq0wfBt54Vw.r p:l #Ҧ0lD^$3ӅfzMH,oj2Byu:mX,2 2p:&>d~> p:,zҵkIϟ׵|h3tڎH⨣L{x&|𡞆7lY^p@<!QWY|)0 @zh ڽ/ ڐ *о)St5$}elMOwj0kiOB}G[Yڿy򤸱Qx c_65YEEI+o50rrpaamdd֫]ܜSWwקOoWVv|jk2QWWΫ))QN]r6u*%ɮFkhKKw^!<ޣCO^WVF 57GVHKSC @{ك-sqsϗKKkZZ-z71ȍS/kZZbKKWcX+4`@l֫Y^&&_DBPYo;8a\7/ CyIE}np˭B?0`NMmRO)P$4V+ BH"r:/2X_C㏠p <>lQsIfm|yHJf j@u㭭GF 7 @Ajh lo6@yII:;lAbq&6_p.w5p@,xPP2a߾F 6u_`NJBiI R>xwoiF[;jPNwǼW}Vo߆V K_ 4^|0y< uh\Mii=<+jd쪯?ѱ+ۜhcCI(/O(+kmk56keŌ3ttfe~gϚ|:Lddd__/q.=ʊ~L`G]ݕ}24?iNazβӧ*ϋG=f53 63{ںϗP++]^21WW={R[欧7ʌ4c[@zǕ@__۷o8pN>w#<~,dUWq] W=i' ff0ckkpRI *q,퇘1Ėִ3x1ՂzY^1Lk$|jqN]L붶'Sê%xtq7%zG'0pI> (5kFahhhhh8bĈ5k OT׏B>0-enOV3'$ 16@#OxupPBs[|3#75-s12R\pu텷ogZs[Q#p K!u--ndℐږ7o1~ 61ё PԎ+s3r ~$bMlXXn]"jz UnG^Pc@+J[XjhԽZ:1 ?xN>a4WHȮbV!?.v4X66Slm} 44cfʉp8̗gKA N+ZAAAFF$--֓r~x*/_fBcƝhj?jP^ WWWWWW_remTU/ 54Bnӫibbn~94 @P;/^عs'+Nz/'X[SdNZ栊VU LB6u*ru&Ζ- myCi𺭭2׮^]Y$E;8tKw_xe5*H zaQCC"?>qegբݠ@p!#WկZ[/{E>Ba 8tWCpYX׷cԡ.*jk3_k8thQjX 2v::"nef%0@:}^gq 6"0uT C ↰CS* đVȬ66 5t;;W+,0ՏB3"f:fLرߍp tN@wP^άnjf e^47ǕlɫͬpuuYW+RzuFEZ ϼ%ՑIyvͲ>}P^],/N72jU~%fH巵E3jyiq8 Sybfwn M%.憚g̨@ɫl}l#9)gϚ//~ih4rn=^L]f>:⟒--=taQEERY/tuƌ**JdLM[X76r ~u07w; !MMG\Pn|F>zzcƜ{,b3ڌQ?}I vlqwowFUㄐ? bYii1ߒ==qCVK(/իC] z_W=5֒RᝬyOV WW#.^ҋ͞ڻTFwd%u*-~\gX++B!>!$]\b hvuL^X %%̦&>_ ޕf>ثw^"YJܾ-v*zUk{FoyyҬ?;'OĽ=ɇO;]X|n8tc%\*)17?)Ix9J]Kˌ7wb2kkrČ9f>Arrʋ(W!o߼:;_ҲE OHXsaZ[7λ)BIiёO8:[XxkjB*WU]()9[! (Y3g ! ,PJp@i( qA848 PJ;pΝ;ydetttdddrCBB !UUUى8qvv0a%%eee ,OTzĉܽ{-T*& Ǎ׮]-wݺuI133333 5k֎;?~I&͟?SMҥKǎK4 ׮]kllLvvvBBBVXr !III%%%mmmVVV_|7|T\@@!Pp|ҤIp83g=z~/_r~#FXd˳ p**5kKNNX'//"|߯#f…8eIqmm+V 27\~]KKl۷oOJJxҥK[lWZST>ƍwFF;~( q6TjA/X<''?~z'6Iuiҳڼy3I~͚5wY~=ǧ_~Wp].ZHOOO*6335k޽{ HぁbqV8pUao̙w~+3{gbb\hlllll>>"?W^^.|1Eroݮ]X4Hj&&&6mڸqȾ2'R4w|y ujVWWSbeddD =mm>}߿:,,Ϗ/999..xƌ___}}}lsΥ_>}ʕ+Tkk됐'''ꭹsfff Wp655ݾ};((jjjZQQ!r9e/N>}-ZD.)))IIIeee ˋf.\%EEDDГ0a„ D%]GҿE/cccZZZSN$ cj>߫W/ #FPr?%K(^u4w|y(< qӦM"7"9vԌϟ?wppҒRKKRZZS@z[lF.JHH믩>>fsұ-:uj֬YӧO0o<&&&R8!dϟoB!,YBfimmꫯV;s׮]r\ŋW^- K&L?>5!ٳ?trܹuQy8}k׮W rJ9;w۪ӈBug:W(hjF 4ooo#aPE11ɽ{D[[۷p<))镩dL{vuuw9| mmmGquu\]]]{-Y3))ŅtKfKO8!2dyɓ'iPkggnqU>ںQ锔Q֭[G3g!D[[ʕ+;4;;;*QRR"SvAIj^~̙3g(l7|V|5 ǯ^oBfΜy q Q gΜI޽S@z/^P %<C{y pXWWw}oooBY߃DN/\ nbbbY =<<DFFFFF2thy3K_%GLL B'pW^Ty}z*Y!HxQQ-[#rCCO? ??ɓ'89 ٞUHT8pVprr4 T<O\tBHEEyަȣo7MV#q۩ Ъ"X=@sD={ٳӷnݢ> - !(l U/!id{>g8ŋ 6lڴ`Μ9SJRR]yyy '$w|KxmUUUyRXzzzUUȍX,*a`` n^q ,v  <877yyyʺ'P@]xyyy+W={vhh(st䲲SN-^o1@wHZZZ^^^nݢ255uvvTٳg~˩f;;;eM)))IJJnw^XRo^xw}155mnn.--۳g!$;;ҥK8 _T:00-U!Z\tt4z)@lllQQѼy=i&L?~|ttZZZc6ƴ4YfQ8p@E@WeggS5l6Soxff&5d ;v"ѣGׯׯ 56f?^WWw׮]=0p>t =nrx{{qBSSSqׯSḁ[GGOܘ*0Y Vaaaaaٳg !l6fcƌ' =|zzzOf?еDb2[̏c +J%2zIs'ӡ̷sJ/ Ο?2{6\_z>2dРA3g<{ ,ŭF+B* چR[[[["Y3s$ !^O>O|M.w |,=㚚'TWW755i S=VO MMMNJü?JWXXAz-oƌ~~~T:##H6cx{{{yyQ钒,ɟ-(('$$d8;;xw|fns…M MFcccʕ+B455W^mllLxȚ|kkk[[[[[۹s犬'tɓnwNshtC2ُ?(C(ÇnJM8}tj~{* tf'N$_GGZxMi>gϞ;wR;|bebbKǠQQQ̉{B>:th۶mv^{5__اO+;vLC?#,,LOObFeffFY]]HEEō7 B/_U]]r-,, ֿj+WtkKK]v;w҄OQ9%%N/_ں@n^^zSp\WW˚;JLiii,0`@HH!$))y%JMM=x sfa3''g޽˖-"NWWWq99--m|Ƅ++3g\3>>SQQs΍7RvKK˷~[x5_H~nnnTU;vv?z_/K.H2ӣu \ZsP8nllb +xzz ?ڽ{pb-\?T_UYYxb*V]]o߾)7ê*CCCzIUUMĔ\n,!ԩSǎ{ϟ///_l9qĉ'$ջsƍWZEVXXW_eggKׯ_zj ]T !G=qD7FJKKlٲqF t733\v=7V3f (8h"oݺHչo߾tRcc[kkkKLL;v,$11C8,Z(,,, А>}4%%%66=9oݺu޽P__>}r8ڂJixb*]\\_PW^UgV^=xaÆP}C RSS[_#K,8q555}ѨQѡ`̙QQQ@fP8 PJp@i( qA848 PJp@i( qA848 PJp@i( qA842YZZ"z2QWWG@VdǕ#''r = Spanned>, S>; /// KDL names with span information are represented using this type pub type SpannedName = Spanned, S>; /// A KDL node with span of the whole node (including children) pub type SpannedNode = Spanned, S>; /// Single node of the KDL document #[derive(Debug, Clone)] #[cfg_attr(feature="minicbor", derive(minicbor::Encode, minicbor::Decode))] pub struct Node { /// A type name if specified in parenthesis #[cfg_attr(feature="minicbor", n(0))] pub type_name: Option>, /// A node name #[cfg_attr(feature="minicbor", n(1))] pub node_name: SpannedName, /// Positional arguments #[cfg_attr(feature="minicbor", n(2))] pub arguments: Vec>, /// Named properties #[cfg_attr(feature="minicbor", n(3))] pub properties: BTreeMap, Value>, /// Node's children. This field is not none if there are braces `{..}` #[cfg_attr(feature="minicbor", n(4))] pub children: Option>, } /// KDL document root #[derive(Debug, Clone)] #[cfg_attr(feature="minicbor", derive(minicbor::Encode, minicbor::Decode))] pub struct Document { /// Nodes of the document #[cfg_attr(feature="minicbor", n(0))] pub nodes: Vec>, } #[derive(Debug, Clone, PartialEq, Eq)] #[cfg_attr(feature="minicbor", derive(minicbor::Encode, minicbor::Decode))] #[cfg_attr(feature="minicbor", cbor(index_only))] pub(crate) enum Radix { #[cfg_attr(feature="minicbor", n(2))] Bin, #[cfg_attr(feature="minicbor", n(16))] Hex, #[cfg_attr(feature="minicbor", n(8))] Oct, #[cfg_attr(feature="minicbor", n(10))] Dec, } /// Potentially unlimited size integer value #[derive(Debug, Clone, PartialEq, Eq)] #[cfg_attr(feature="minicbor", derive(minicbor::Encode, minicbor::Decode))] pub struct Integer( #[cfg_attr(feature="minicbor", n(0))] pub(crate) Radix, #[cfg_attr(feature="minicbor", n(1))] pub(crate) Box, ); /// Potentially unlimited precision decimal value #[derive(Debug, Clone, PartialEq, Eq)] #[cfg_attr(feature="minicbor", derive(minicbor::Encode, minicbor::Decode))] #[cfg_attr(feature="minicbor", cbor(transparent))] pub struct Decimal( #[cfg_attr(feature="minicbor", n(0))] pub(crate) Box, ); /// Possibly typed KDL scalar value #[derive(Debug, Clone)] #[cfg_attr(feature="minicbor", derive(minicbor::Encode, minicbor::Decode))] pub struct Value { /// A type name if specified in parenthesis #[cfg_attr(feature="minicbor", n(0))] pub type_name: Option>, /// The actual value literal #[cfg_attr(feature="minicbor", n(1))] pub literal: Spanned, } /// Type identifier #[derive(Debug, Clone, PartialEq, Eq)] pub struct TypeName(TypeNameInner); #[derive(Debug, Clone, PartialEq, Eq)] enum TypeNameInner { Builtin(BuiltinType), Custom(Box), } /// Known type identifier described by the KDL specification #[non_exhaustive] #[derive(Debug, Clone, PartialEq, Eq)] pub enum BuiltinType { /// `u8`: 8-bit unsigned integer type U8, /// `i8`: 8-bit signed integer type I8, /// `u16`: 16-bit unsigned integer type U16, /// `i16`: 16-bit signed integer type I16, /// `u32`: 32-bit unsigned integer type U32, /// `i32`: 32-bit signed integer type I32, /// `u64`: 64-bit unsigned integer type U64, /// `i64`: 64-bit signed integer type I64, /// `usize`: platform-dependent unsigned integer type Usize, /// `isize`: platform-dependent signed integer type Isize, /// `f32`: 32-bit floating point number F32, /// `f64`: 64-bit floating point number F64, /// `base64` denotes binary bytes type encoded using base64 encoding Base64, } /// Scalar KDL value #[derive(Debug, Clone, PartialEq, Eq)] #[cfg_attr(feature="minicbor", derive(minicbor::Encode, minicbor::Decode))] pub enum Literal { /// Null value #[cfg_attr(feature="minicbor", n(0))] Null, /// Boolean value #[cfg_attr(feature="minicbor", n(1))] Bool( #[cfg_attr(feature="minicbor", n(0))] bool ), /// Integer value #[cfg_attr(feature="minicbor", n(2))] Int( #[cfg_attr(feature="minicbor", n(0))] Integer ), /// Decimal (or floating point) value #[cfg_attr(feature="minicbor", n(3))] Decimal( #[cfg_attr(feature="minicbor", n(0))] Decimal ), /// String value #[cfg_attr(feature="minicbor", n(4))] String( #[cfg_attr(feature="minicbor", n(0))] Box ), } impl Node { /// Returns node children pub fn children(&self) -> impl Iterator, S>> + ExactSizeIterator { self.children.as_ref().map(|c| c.iter()).unwrap_or_else(|| [].iter()) } } impl BuiltinType { /// Returns string representation of the builtin type as defined by KDL /// specification pub const fn as_str(&self) -> &'static str { use BuiltinType::*; match self { U8 => "u8", I8 => "i8", U16 => "u16", I16 => "i16", U32 => "u32", I32 => "i32", U64 => "u64", I64 => "i64", Usize => "usize", Isize => "isize", F32 => "f32", F64 => "f64", Base64 => "base64", } } /// Returns `TypeName` structure for the builtin type pub const fn as_type(self) -> TypeName { TypeName(TypeNameInner::Builtin(self)) } } impl TypeName { pub(crate) fn from_string(val: Box) -> TypeName { use TypeNameInner::*; match BuiltinType::from_str(&val[..]) { Ok(b) => TypeName(Builtin(b)), _ => TypeName(Custom(val)), } } /// Returns string represenation of the type name pub fn as_str(&self) -> &str { match &self.0 { TypeNameInner::Builtin(t) => t.as_str(), TypeNameInner::Custom(t) => t.as_ref(), } } /// Returns `BuiltinType` enum for the type if typename matches builtin /// type /// /// Note: checking for `is_none()` is not forward compatible. In future we /// may add additional builtin type. Always use `as_str` for types that /// aren't yet builtin. pub const fn as_builtin(&self) -> Option<&BuiltinType> { match &self.0 { TypeNameInner::Builtin(t) => Some(t), TypeNameInner::Custom(_) => None, } } } impl FromStr for BuiltinType { type Err = (); fn from_str(s: &str) -> Result { use BuiltinType::*; match s { "u8" => Ok(U8), "i8" => Ok(I8), "u16" => Ok(U16), "i16" => Ok(I16), "u32" => Ok(U32), "i32" => Ok(I32), "u64" => Ok(U64), "i64" => Ok(I64), "f32" => Ok(F32), "f64" => Ok(F64), "base64" => Ok(Base64), _ => Err(()) } } } impl FromStr for TypeName { type Err = Infallible; fn from_str(s: &str) -> Result { use TypeNameInner::*; match BuiltinType::from_str(s) { Ok(b) => Ok(TypeName(Builtin(b))), Err(()) => Ok(TypeName(Custom(s.into()))), } } } impl std::ops::Deref for TypeName { type Target = str; fn deref(&self) -> &str { self.as_str() } } impl fmt::Display for TypeName { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.as_str().fmt(f) } } impl Into for BuiltinType { fn into(self) -> TypeName { self.as_type() } } #[cfg(feature="minicbor")] mod cbor { use super::TypeName; use minicbor::{Decoder, Encoder}; use minicbor::encode::Encode; use minicbor::decode::Decode; impl<'d, C> Decode<'d, C> for TypeName { fn decode(d: &mut Decoder<'d>, _ctx: &mut C) -> Result { d.str().and_then(|s| s.parse().map_err(|e| match e {})) } } impl Encode for TypeName { fn encode(&self, e: &mut Encoder, ctx: &mut C) -> Result<(), minicbor::encode::Error> where W: minicbor::encode::write::Write { self.as_str().encode(e, ctx) } } } knuffel-3.2.0/src/containers.rs000064400000000000000000000140101046102023000145560ustar 00000000000000use std::sync::Arc; use std::rc::Rc; use crate::ast::{SpannedNode, Literal, Value, TypeName}; use crate::decode::Context; use crate::errors::DecodeError; use crate::span::Spanned; use crate::traits::{Decode, DecodeChildren, DecodeScalar, DecodePartial}; use crate::traits::{ErrorSpan, DecodeSpan, Span}; impl> Decode for Box { fn decode_node(node: &SpannedNode, ctx: &mut Context) -> Result> { Decode::decode_node(node, ctx).map(Box::new) } } impl> DecodeChildren for Box { fn decode_children(nodes: &[SpannedNode], ctx: &mut Context) -> Result> { DecodeChildren::decode_children(nodes, ctx).map(Box::new) } } impl> DecodePartial for Box { fn insert_child(&mut self, node: &SpannedNode, ctx: &mut Context) -> Result> { (**self).insert_child(node, ctx) } fn insert_property(&mut self, name: &Spanned, S>, value: &Value, ctx: &mut Context) -> Result> { (**self).insert_property(name, value, ctx) } } impl> DecodeScalar for Box { fn type_check(type_name: &Option>, ctx: &mut Context) { T::type_check(type_name, ctx) } fn raw_decode(value: &Spanned, ctx: &mut Context) -> Result> { DecodeScalar::raw_decode(value, ctx).map(Box::new) } } impl> Decode for Arc { fn decode_node(node: &SpannedNode, ctx: &mut Context) -> Result> { Decode::decode_node(node, ctx).map(Arc::new) } } impl> DecodeChildren for Arc { fn decode_children(nodes: &[SpannedNode], ctx: &mut Context) -> Result> { DecodeChildren::decode_children(nodes, ctx).map(Arc::new) } } impl> DecodePartial for Arc { fn insert_child(&mut self, node: &SpannedNode, ctx: &mut Context) -> Result> { Arc::get_mut(self).expect("no Arc clone yet") .insert_child(node, ctx) } fn insert_property(&mut self, name: &Spanned, S>, value: &Value, ctx: &mut Context) -> Result> { Arc::get_mut(self).expect("no Arc clone yet") .insert_property(name, value, ctx) } } impl> DecodeScalar for Arc { fn type_check(type_name: &Option>, ctx: &mut Context) { T::type_check(type_name, ctx) } fn raw_decode(value: &Spanned, ctx: &mut Context) -> Result> { DecodeScalar::raw_decode(value, ctx).map(Arc::new) } } impl> Decode for Rc { fn decode_node(node: &SpannedNode, ctx: &mut Context) -> Result> { Decode::decode_node(node, ctx).map(Rc::new) } } impl> DecodeChildren for Rc { fn decode_children(nodes: &[SpannedNode], ctx: &mut Context) -> Result> { DecodeChildren::decode_children(nodes, ctx).map(Rc::new) } } impl> DecodePartial for Rc { fn insert_child(&mut self, node: &SpannedNode, ctx: &mut Context) -> Result> { Rc::get_mut(self).expect("no Rc clone yet") .insert_child(node, ctx) } fn insert_property(&mut self, name: &Spanned, S>, value: &Value, ctx: &mut Context) -> Result> { Rc::get_mut(self).expect("no Rc clone yet") .insert_property(name, value, ctx) } } impl> DecodeScalar for Rc { fn type_check(type_name: &Option>, ctx: &mut Context) { T::type_check(type_name, ctx) } fn raw_decode(value: &Spanned, ctx: &mut Context) -> Result> { DecodeScalar::raw_decode(value, ctx).map(Rc::new) } } impl> DecodeChildren for Vec { fn decode_children(nodes: &[SpannedNode], ctx: &mut Context) -> Result> { let mut result = Vec::with_capacity(nodes.len()); for node in nodes { match Decode::decode_node(node, ctx) { Ok(node) => result.push(node), Err(e) => ctx.emit_error(e), } } Ok(result) } } impl> DecodeScalar for Option { fn type_check(type_name: &Option>, ctx: &mut Context) { T::type_check(type_name, ctx) } fn raw_decode(value: &Spanned, ctx: &mut Context) -> Result> { match &**value { Literal::Null => Ok(None), _ => DecodeScalar::raw_decode(value, ctx).map(Some), } } } impl, S, Q> DecodeScalar for Spanned where S: Span, Q: DecodeSpan { fn type_check(type_name: &Option>, ctx: &mut Context) { T::type_check(type_name, ctx) } fn raw_decode(value: &Spanned, ctx: &mut Context) -> Result> { let decoded = T::raw_decode(value, ctx)?; Ok(Spanned { span: DecodeSpan::decode_span(&value.span, ctx), value: decoded, }) } } knuffel-3.2.0/src/convert.rs000064400000000000000000000210741046102023000141010ustar 00000000000000use std::default::Default; use std::path::{Path, PathBuf}; use std::str::FromStr; use std::sync::Arc; use crate::ast::{Literal, Integer, Decimal, Radix, TypeName, BuiltinType}; use crate::decode::{Context, Kind}; use crate::errors::{DecodeError, ExpectedType}; use crate::span::{Spanned}; use crate::traits::{ErrorSpan, DecodeScalar}; macro_rules! impl_integer { ($typ: ident, $marker: ident) => { impl TryFrom<&Integer> for $typ { type Error = <$typ as FromStr>::Err; fn try_from(val: &Integer) -> Result<$typ, <$typ as FromStr>::Err> { match val.0 { Radix::Bin => <$typ>::from_str_radix(&val.1, 2), Radix::Oct => <$typ>::from_str_radix(&val.1, 8), Radix::Dec => <$typ>::from_str(&val.1), Radix::Hex => <$typ>::from_str_radix(&val.1, 16), } } } impl DecodeScalar for $typ { fn raw_decode(val: &Spanned, ctx: &mut Context) -> Result<$typ, DecodeError> { match &**val { Literal::Int(ref value) => { match value.try_into() { Ok(val) => Ok(val), Err(e) => { ctx.emit_error(DecodeError::conversion(val, e)); Ok(0) } } } _ => { ctx.emit_error(DecodeError::scalar_kind( Kind::String, val)); Ok(0) } } } fn type_check(type_name: &Option>, ctx: &mut Context) { if let Some(typ) = type_name { if typ.as_builtin() != Some(&BuiltinType::$marker) { ctx.emit_error(DecodeError::TypeName { span: typ.span().clone(), found: Some(typ.value.clone()), expected: ExpectedType::optional( BuiltinType::$marker), rust_type: stringify!($typ), }); } } } } } } impl_integer!(i8, I8); impl_integer!(u8, U8); impl_integer!(i16, I16); impl_integer!(u16, U16); impl_integer!(i32, I32); impl_integer!(u32, U32); impl_integer!(i64, I64); impl_integer!(u64, U64); impl_integer!(isize, Isize); impl_integer!(usize, Usize); macro_rules! impl_float { ($typ: ident, $marker: ident) => { impl TryFrom<&Decimal> for $typ { type Error = <$typ as FromStr>::Err; fn try_from(val: &Decimal) -> Result<$typ, <$typ as FromStr>::Err> { <$typ>::from_str(&val.0) } } impl DecodeScalar for $typ { fn raw_decode(val: &Spanned, ctx: &mut Context) -> Result<$typ, DecodeError> { match &**val { Literal::Decimal(ref value) => { match value.try_into() { Ok(val) => Ok(val), Err(e) => { ctx.emit_error(DecodeError::conversion(val, e)); Ok(0.0) } } } _ => { ctx.emit_error(DecodeError::scalar_kind( Kind::String, val)); Ok(0.0) } } } fn type_check(type_name: &Option>, ctx: &mut Context) { if let Some(typ) = type_name { if typ.as_builtin() != Some(&BuiltinType::$marker) { ctx.emit_error(DecodeError::TypeName { span: typ.span().clone(), found: Some(typ.value.clone()), expected: ExpectedType::optional( BuiltinType::$marker), rust_type: stringify!($typ), }); } } } } } } impl_float!(f32, F32); impl_float!(f64, F64); impl DecodeScalar for String { fn raw_decode(val: &Spanned, ctx: &mut Context) -> Result> { match &**val { Literal::String(ref s) => Ok(s.clone().into()), _ => { ctx.emit_error(DecodeError::scalar_kind(Kind::String, val)); Ok(String::new()) } } } fn type_check(type_name: &Option>, ctx: &mut Context) { if let Some(typ) = type_name { ctx.emit_error(DecodeError::TypeName { span: typ.span().clone(), found: Some(typ.value.clone()), expected: ExpectedType::no_type(), rust_type: "String", }); } } } impl DecodeScalar for PathBuf { fn raw_decode(val: &Spanned, ctx: &mut Context) -> Result> { match &**val { Literal::String(ref s) => Ok(String::from(s.clone()).into()), _ => { ctx.emit_error(DecodeError::scalar_kind(Kind::String, val)); Ok(Default::default()) } } } fn type_check(type_name: &Option>, ctx: &mut Context) { if let Some(typ) = type_name { ctx.emit_error(DecodeError::TypeName { span: typ.span().clone(), found: Some(typ.value.clone()), expected: ExpectedType::no_type(), rust_type: "PathBuf", }); } } } impl DecodeScalar for Arc { fn raw_decode(val: &Spanned, ctx: &mut Context) -> Result, DecodeError> { match &**val { Literal::String(ref s) => Ok(PathBuf::from(&(**s)[..]).into()), _ => { ctx.emit_error(DecodeError::scalar_kind(Kind::String, val)); Ok(PathBuf::default().into()) } } } fn type_check(type_name: &Option>, ctx: &mut Context) { if let Some(typ) = type_name { ctx.emit_error(DecodeError::TypeName { span: typ.span().clone(), found: Some(typ.value.clone()), expected: ExpectedType::no_type(), rust_type: "Arc", }); } } } impl DecodeScalar for Arc { fn raw_decode(val: &Spanned, ctx: &mut Context) -> Result, DecodeError> { match &**val { Literal::String(ref s) => Ok(s.clone().into()), _ => { ctx.emit_error(DecodeError::scalar_kind(Kind::String, val)); Ok(String::default().into()) } } } fn type_check(type_name: &Option>, ctx: &mut Context) { if let Some(typ) = type_name { ctx.emit_error(DecodeError::TypeName { span: typ.span().clone(), found: Some(typ.value.clone()), expected: ExpectedType::no_type(), rust_type: "Arc", }); } } } impl DecodeScalar for bool { fn raw_decode(val: &Spanned, ctx: &mut Context) -> Result> { match &**val { Literal::Bool(value) => Ok(*value), _ => { ctx.emit_error(DecodeError::scalar_kind(Kind::Bool, &val)); Ok(Default::default()) } } } fn type_check(type_name: &Option>, ctx: &mut Context) { if let Some(typ) = type_name { ctx.emit_error(DecodeError::TypeName { span: typ.span().clone(), found: Some(typ.value.clone()), expected: ExpectedType::no_type(), rust_type: "bool", }); } } } knuffel-3.2.0/src/convert_ast.rs000064400000000000000000000053141046102023000147470ustar 00000000000000use crate::ast::{Node, SpannedNode, TypeName, Literal, Value}; use crate::decode::Context; use crate::errors::DecodeError; use crate::span::Spanned; use crate::traits::{Decode, DecodeScalar, DecodeSpan, Span}; impl Decode for Node where S: Span, T: DecodeSpan { fn decode_node(node: &SpannedNode, ctx: &mut Context) -> Result> { Ok(Node { type_name: node.type_name.as_ref().map(|n| n.clone_as(ctx)), node_name: node.node_name.clone_as(ctx), arguments: node.arguments.iter() .map(|v| DecodeScalar::decode(v, ctx)) .collect::>()?, properties: node.properties.iter() .map(|(k, v)| { Ok((k.clone_as(ctx), DecodeScalar::decode(v, ctx)?)) }) .collect::>()?, children: node.children.as_ref().map(|sc| { Ok(Spanned { span: DecodeSpan::decode_span(&sc.span, ctx), value: sc.iter() .map(|node| Ok(Spanned { span: DecodeSpan::decode_span(&node.span, ctx), value: Decode::decode_node(node, ctx)?, })) .collect::>()?, }) }).transpose()?, }) } } impl Decode for SpannedNode where S: Span, T: DecodeSpan { fn decode_node(node: &SpannedNode, ctx: &mut Context) -> Result> { Ok(Spanned { span: DecodeSpan::decode_span(&node.span, ctx), value: Decode::decode_node(node, ctx)?, }) } } impl DecodeScalar for Value where S: Span, T: DecodeSpan { fn type_check(_type_name: &Option>, _ctx: &mut Context) { } fn raw_decode(_value: &Spanned, _ctx: &mut Context) -> Result> { panic!("called `raw_decode` directly on the `Value`"); } fn decode(value: &Value, ctx: &mut Context) -> Result> { Ok(Value { type_name: value.type_name.as_ref().map(|n| n.clone_as(ctx)), literal: value.literal.clone_as(ctx), }) } } impl DecodeScalar for Literal where S: Span, { fn type_check(_type_name: &Option>, _ctx: &mut Context) { } fn raw_decode(value: &Spanned, _ctx: &mut Context) -> Result> { Ok((**value).clone()) } } knuffel-3.2.0/src/decode.rs000064400000000000000000000172541046102023000136510ustar 00000000000000//! Decode support stuff //! //! Mostly useful for manual implementation of various `Decode*` traits. use std::any::{Any, TypeId}; use std::collections::HashMap; use std::default::Default; use std::fmt; use crate::ast::{Literal, BuiltinType, Value, SpannedNode}; use crate::errors::{DecodeError, ExpectedType}; use crate::traits::{ErrorSpan, Decode}; /// Context is passed through all the decode operations and can be used for: /// /// 1. To emit error and proceed (so multiple errors presented to user) /// 2. To store and retrieve data in decoders of nodes, scalars and spans #[derive(Debug)] pub struct Context { errors: Vec>, extensions: HashMap>, } /// Scalar value kind /// /// Currently used only for error reporting #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub enum Kind { /// An unquoted integer value, signed or unsigned. Having no decimal point. /// Can be of virtually unlimited length. Can be expressed in binary, octal, /// decimal, or hexadecimal notation. Int, /// A number that has either decimal point or exponential part. Can be only /// in decimal notation. Can represent either decimal or floating value /// value. No quotes. Decimal, /// A string in `"double quotes"` or `r##"raw quotes"##` String, /// A boolean value of `true` or `false` Bool, /// The null value (usually corresponds to `None` in Rust) Null, } /// Decodes KDL value as bytes /// /// Used internally by `#[knuffel(..., bytes)]` attribute. But can be used /// manually for implementing [`DecodeScalar`](crate::traits::DecodeScalar). pub fn bytes(value: &Value, ctx: &mut Context) -> Vec { if let Some(typ) = &value.type_name { match typ.as_builtin() { Some(&BuiltinType::Base64) => { #[cfg(feature="base64")] { use base64::{Engine, engine::general_purpose::STANDARD}; match &*value.literal { Literal::String(s) => { match STANDARD.decode(s.as_bytes()) { Ok(vec) => vec, Err(e) => { ctx.emit_error(DecodeError::conversion( &value.literal, e)); Default::default() } } } _ => { ctx.emit_error(DecodeError::scalar_kind( Kind::String, &value.literal)); Default::default() } } } #[cfg(not(feature="base64"))] { ctx.emit_error(DecodeError::unsupported( &value.literal, "base64 support is not compiled in")); Default::default() } } _ => { ctx.emit_error(DecodeError::TypeName { span: typ.span().clone(), found: Some(typ.value.clone()), expected: ExpectedType::optional(BuiltinType::Base64), rust_type: "bytes", }); Default::default() } } } else { match &*value.literal { Literal::String(s) => s.as_bytes().to_vec(), _ => { ctx.emit_error(DecodeError::scalar_kind( Kind::String, &value.literal)); Default::default() } } } } /// Emits error(s) if node is not a flag node /// /// Flag node is a node that has no arguments, properties or children. /// /// Used internally by `#[knuffel(child)] x: bool,`. But can be used /// manually for implementing [`DecodeScalar`](crate::traits::DecodeScalar). pub fn check_flag_node( node: &SpannedNode, ctx: &mut Context) { for arg in &node.arguments { ctx.emit_error(DecodeError::unexpected( &arg.literal, "argument", "unexpected argument")); } for (name, _) in &node.properties { ctx.emit_error(DecodeError::unexpected( name, "property", format!("unexpected property `{}`", name.escape_default()))); } if let Some(children) = &node.children { for child in children.iter() { ctx.emit_error( DecodeError::unexpected( child, "node", format!("unexpected node `{}`", child.node_name.escape_default()) )); } } } /// Parse single KDL node from AST pub fn node(ast: &SpannedNode) -> Result>> where T: Decode, S: ErrorSpan, { let mut ctx = Context::new(); match Decode::decode_node(ast, &mut ctx) { Ok(_) if ctx.has_errors() => { Err(ctx.into_errors()) } Err(e) => { ctx.emit_error(e); Err(ctx.into_errors()) } Ok(v) => Ok(v) } } impl Context { pub(crate) fn new() -> Context { Context { errors: Vec::new(), extensions: HashMap::new(), } } /// Add error /// /// This fails decoding operation similarly to just returning error value. /// But unlike result allows returning some dummy value and allows decoder /// to proceed so multiple errors are presented to user at the same time. pub fn emit_error(&mut self, err: impl Into>) { self.errors.push(err.into()); } /// Returns `true` if any errors was emitted into the context pub fn has_errors(&self) -> bool { !self.errors.is_empty() } pub(crate) fn into_errors(self) -> Vec> { self.errors } /// Set context value /// /// These values aren't used by the knuffel itself. But can be used by /// user-defined decoders to get some value. Each type can have a single but /// separate value set. So users are encouraged to use [new type idiom /// ](https://doc.rust-lang.org/rust-by-example/generics/new_types.html) /// to avoid conflicts with other libraries. /// /// It's also discourated to use `set` in the decoder. It's expeced that /// context will be filled in using /// [`parse_with_context`](crate::parse_with_context) function. pub fn set(&mut self, value: T) { self.extensions.insert(TypeId::of::(), Box::new(value)); } /// Get context value /// /// Returns a value previously set in context pub fn get(&self) -> Option<&T> { self.extensions.get(&TypeId::of::()) .and_then(|b| b.downcast_ref()) } } impl fmt::Display for Kind { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str(self.as_str()) } } impl From<&'_ Literal> for Kind { fn from(lit: &Literal) -> Kind { use Literal as L; use Kind as K; match lit { L::Int(_) => K::Int, L::Decimal(_) => K::Decimal, L::String(_) => K::String, L::Bool(_) => K::Bool, L::Null => K::Null, } } } impl Kind { /// Returns the string representation of `Kind` /// /// This is currently used in error messages. pub const fn as_str(&self) -> &'static str { use Kind::*; match self { Int => "integer", Decimal => "decimal", String => "string", Bool => "boolean", Null => "null", } } } knuffel-3.2.0/src/errors.rs000064400000000000000000000413211046102023000137320ustar 00000000000000//! Error types for the knuffel library //! //! You only need [`Error`](enum@Error) exposed as `knuffel::Error` unless you //! do manual implementations of any of the `Decode*` traits. use std::borrow::Cow; use std::collections::BTreeSet; use std::fmt::{self, Write}; use thiserror::Error; use miette::{Diagnostic, NamedSource}; use crate::ast::{TypeName, Literal, SpannedNode}; use crate::span::{Spanned}; use crate::decode::Kind; use crate::traits::{ErrorSpan, Span}; /// Main error that is returned from KDL parsers /// /// Implements [`miette::Diagnostic`] so can be used to print nice error /// output with code snippets. /// /// See [crate documentation](crate#Errors) and [miette} documentation to /// find out how deal with them. #[derive(Debug, Diagnostic, Error)] #[error("error parsing KDL")] pub struct Error { #[source_code] pub(crate) source_code: NamedSource, #[related] pub(crate) errors: Vec, } /// An error type that is returned by decoder traits and emitted to the context /// /// These are elements of the #[derive(Debug, Diagnostic, Error)] #[non_exhaustive] pub enum DecodeError { /// Unexpected type name encountered /// /// Type names are identifiers and strings in parenthesis before node names /// or values. #[error("{} for {}, found {}", expected, rust_type, found.as_ref().map(|x| x.as_str()).unwrap_or("no type name"))] #[diagnostic()] TypeName { /// Position of the type name #[label="unexpected type name"] span: S, /// Type name contained in the source code found: Option, /// Expected type name or type names expected: ExpectedType, /// Rust type that is being decoded when error is encountered rust_type: &'static str, }, /// Different scalar kind was encountered than expected /// /// This is emitted when integer is used instead of string, and similar. It /// may also be encountered when `null` is used for non-optional field. #[diagnostic()] #[error("expected {} scalar, found {}", expected, found)] ScalarKind { /// Position of the unexpected scalar #[label("unexpected {}", found)] span: S, /// Scalar kind (or multiple) expected at this position expected: ExpectedKind, /// Kind of scalar that is found found: Kind, }, /// Some required element is missing /// /// This is emitted on missing required attributes, properties, or children. /// (missing type names are emitted using [`DecodeError::TypeName`]) #[diagnostic()] #[error("{}", message)] Missing { /// Position of the node name of which has missing element #[label("node starts here")] span: S, /// Description of what's missing message: String, }, /// Missing named node at top level /// /// This is similar to `Missing` but is only emitted for nodes on the /// document level. This is separate error because there is no way to show /// span where missing node is expected (end of input is not very helpful). #[diagnostic()] #[error("{}", message)] MissingNode { /// Descriptino of what's missing message: String, }, /// Unexpected entity encountered /// /// This is emitted for entities (arguments, properties, children) that have /// to matching structure field to put into, and also for nodes that aren /// expected to be encountered twice. #[diagnostic()] #[error("{}", message)] Unexpected { /// Position of the unexpected element #[label("unexpected {}", kind)] span: S, /// Kind of element that was found kind: &'static str, /// Description of the error message: String, }, /// Bad scalar conversion /// /// This error is emitted when some scalar value of right kind cannot be /// converted to the Rust value. Including, but not limited to: /// 1. Integer value out of range /// 2. `FromStr` returned error for the value parse by /// `#[knuffel(.., str)]` #[error("{}", source)] #[diagnostic()] Conversion { /// Position of the scalar that could not be converted #[label("invalid value")] span: S, /// Original error source: Box, }, /// Unsupported value /// /// This is currently used to error out on `(base64)` values when `base64` /// feature is not enabled. #[error("{}", message)] #[diagnostic()] Unsupported { /// Position of the value that is unsupported #[label="unsupported value"] span: S, /// Description of why the value is not supported message: Cow<'static, str>, }, /// Custom error that can be emitted during decoding /// /// This is not used by the knuffel itself. Note most of the time it's /// better to use [`DecodeError::Conversion`] as that will associate /// source code span to the error. #[error(transparent)] Custom(Box), } #[derive(Clone, Debug, PartialOrd, Ord, PartialEq, Eq)] pub(crate) enum TokenFormat { Char(char), Token(&'static str), Kind(&'static str), OpenRaw(usize), CloseRaw(usize), Eoi, } struct FormatUnexpected<'x>(&'x TokenFormat, &'x BTreeSet); #[derive(Debug, Diagnostic, Error)] pub(crate) enum ParseError { #[error("{}", FormatUnexpected(found, expected))] #[diagnostic()] Unexpected { label: Option<&'static str>, #[label("{}", label.unwrap_or("unexpected token"))] span: S, found: TokenFormat, expected: BTreeSet, }, #[error("unclosed {} {}", label, opened)] #[diagnostic()] Unclosed { label: &'static str, #[label="opened here"] opened_at: S, opened: TokenFormat, #[label("expected {}", expected)] expected_at: S, expected: TokenFormat, found: TokenFormat, }, #[error("{}", message)] #[diagnostic()] Message { label: Option<&'static str>, #[label("{}", label.unwrap_or("unexpected token"))] span: S, message: String, }, #[error("{}", message)] #[diagnostic(help("{}", help))] MessageWithHelp { label: Option<&'static str>, #[label("{}", label.unwrap_or("unexpected token"))] span: S, message: String, help: &'static str, }, } impl From> for TokenFormat { fn from(chr: Option) -> TokenFormat { if let Some(chr) = chr { TokenFormat::Char(chr) } else { TokenFormat::Eoi } } } impl From for TokenFormat { fn from(chr: char) -> TokenFormat { TokenFormat::Char(chr) } } impl From<&'static str> for TokenFormat { fn from(s: &'static str) -> TokenFormat { TokenFormat::Token(s) } } impl fmt::Display for TokenFormat { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use TokenFormat::*; match self { // do not escape quotes as we use backticks Char('"') => write!(f, "`\"`"), Char('\'') => write!(f, "`\'`"), // also single backslash should not confuse anybody in this context Char('\\') => write!(f, r"`\`"), Char(c) => write!(f, "`{}`", c.escape_default()), Token(s) => write!(f, "`{}`", s.escape_default()), Kind(s) => write!(f, "{}", s), Eoi => write!(f, "end of input"), OpenRaw(0) => { f.write_str("`r\"`") } OpenRaw(n) => { f.write_str("`r")?; for _ in 0..*n { f.write_char('#')?; } f.write_str("\"`") } CloseRaw(0) => { f.write_str("`\"`") } CloseRaw(n) => { f.write_str("`\"")?; for _ in 0..*n { f.write_char('#')?; } f.write_char('`') } } } } impl fmt::Display for FormatUnexpected<'_> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "found {}", self.0)?; let mut iter = self.1.iter(); if let Some(item) = iter.next() { write!(f, ", expected {}", item)?; let back = iter.next_back(); for item in iter { write!(f, ", {}", item)?; } if let Some(item) = back { write!(f, " or {}", item)?; } } Ok(()) } } impl ParseError { pub(crate) fn with_expected_token(mut self, token: &'static str) -> Self { use ParseError::*; match &mut self { Unexpected { ref mut expected, .. } => { *expected = [TokenFormat::Token(token)].into_iter().collect(); } _ => {}, } self } pub(crate) fn with_expected_kind(mut self, token: &'static str) -> Self { use ParseError::*; match &mut self { Unexpected { ref mut expected, .. } => { *expected = [TokenFormat::Kind(token)].into_iter().collect(); } _ => {}, } self } pub(crate) fn with_no_expected(mut self) -> Self { use ParseError::*; match &mut self { Unexpected { ref mut expected, .. } => { *expected = BTreeSet::new(); } _ => {}, } self } #[allow(dead_code)] pub(crate) fn map_span(self, f: impl Fn(S) -> T) -> ParseError where T: ErrorSpan, { use ParseError::*; match self { Unexpected { label, span, found, expected } => Unexpected { label, span: f(span), found, expected }, Unclosed { label, opened_at, opened, expected_at, expected, found } => Unclosed { label, opened_at: f(opened_at), opened, expected_at: f(expected_at), expected, found }, Message { label, span, message } => Message { label, span: f(span), message }, MessageWithHelp { label, span, message, help } => MessageWithHelp { label, span: f(span), message, help }, } } } impl chumsky::Error for ParseError { type Span = S; type Label = &'static str; fn expected_input_found(span: Self::Span, expected: Iter, found: Option) -> Self where Iter: IntoIterator> { ParseError::Unexpected { label: None, span, found: found.into(), expected: expected.into_iter().map(Into::into).collect(), } } fn with_label(mut self, new_label: Self::Label) -> Self { use ParseError::*; match self { Unexpected { ref mut label, .. } => *label = Some(new_label), Unclosed { ref mut label, .. } => *label = new_label, Message { ref mut label, .. } => *label = Some(new_label), MessageWithHelp { ref mut label, .. } => *label = Some(new_label), } self } fn merge(mut self, other: Self) -> Self { use ParseError::*; match (&mut self, other) { (Unclosed { .. }, _) => self, (_, other@Unclosed { .. }) => other, (Unexpected { expected: ref mut dest, .. }, Unexpected { expected, .. }) => { dest.extend(expected.into_iter()); self } (_, other) => todo!("{} -> {}", self, other), } } fn unclosed_delimiter( unclosed_span: Self::Span, unclosed: char, span: Self::Span, expected: char, found: Option ) -> Self { ParseError::Unclosed { label: "delimited", opened_at: unclosed_span, opened: unclosed.into(), expected_at: span, expected: expected.into(), found: found.into(), } } } impl DecodeError { /// Construct [`DecodeError::Conversion`] error pub fn conversion(span: &Spanned, err: E) -> Self where E: Into>, { DecodeError::Conversion { span: span.span().clone(), source: err.into(), } } /// Construct [`DecodeError::ScalarKind`] error pub fn scalar_kind(expected: Kind, found: &Spanned) -> Self { DecodeError::ScalarKind { span: found.span().clone(), expected: expected.into(), found: (&found.value).into(), } } /// Construct [`DecodeError::Missing`] error pub fn missing(node: &SpannedNode, message: impl Into) -> Self { DecodeError::Missing { span: node.node_name.span().clone(), message: message.into(), } } /// Construct [`DecodeError::Unexpected`] error pub fn unexpected(elem: &Spanned, kind: &'static str, message: impl Into) -> Self { DecodeError::Unexpected { span: elem.span().clone(), kind, message: message.into(), } } /// Construct [`DecodeError::Unsupported`] error pub fn unsupported(span: &Spanned, message: M)-> Self where M: Into>, { DecodeError::Unsupported { span: span.span().clone(), message: message.into(), } } #[allow(dead_code)] pub(crate) fn map_span(self, mut f: impl FnMut(S) -> T) -> DecodeError where T: ErrorSpan, { use DecodeError::*; match self { TypeName { span, found, expected, rust_type } => TypeName { span: f(span), found, expected, rust_type }, ScalarKind { span, expected, found } => ScalarKind { span: f(span), expected, found }, Missing { span, message } => Missing { span: f(span), message}, MissingNode { message } => MissingNode { message }, Unexpected { span, kind, message } => Unexpected { span: f(span), kind, message}, Conversion { span, source } => Conversion { span: f(span), source }, Unsupported { span, message } => Unsupported { span: f(span), message }, Custom(e) => Custom(e), } } } /// Wrapper around expected type that is used in [`DecodeError::TypeName`]. #[derive(Debug)] pub struct ExpectedType { types: Vec, no_type: bool, } impl ExpectedType { /// Declare that decoder expects no type (no parens at all) for the value pub fn no_type() -> Self { ExpectedType { types: [].into(), no_type: true, } } /// Declare the type that has to be attached to the value pub fn required(ty: impl Into) -> Self { ExpectedType { types: vec![ty.into()], no_type: false, } } /// Declare the type that can be attached to the value /// /// But no type is also okay in this case (although, "no type" and specified /// type can potentially have different meaning). pub fn optional(ty: impl Into) -> Self { ExpectedType { types: vec![ty.into()], no_type: true, } } } impl fmt::Display for ExpectedType { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if self.types.is_empty() { write!(f, "no type") } else { let mut iter = self.types.iter(); if let Some(first) = iter.next() { write!(f, "{}", first)?; } let last = if self.no_type { None } else { iter.next_back() }; for item in iter { write!(f, ", {}", item)?; } if self.no_type { write!(f, " or no type")?; } else if let Some(last) = last { write!(f, " or {}", last)?; } Ok(()) } } } /// Declares kind of value expected for the scalar value /// /// Use [`Kind`](crate::decode::Kind) and `.into()` to create this value. #[derive(Debug)] pub struct ExpectedKind(Kind); impl From for ExpectedKind { fn from(kind: Kind) -> ExpectedKind { ExpectedKind(kind) } } impl fmt::Display for ExpectedKind { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.0.as_str()) } } knuffel-3.2.0/src/grammar.rs000064400000000000000000001470611046102023000140540ustar 00000000000000use std::collections::{BTreeSet, BTreeMap}; use chumsky::prelude::*; use crate::ast::{Literal, TypeName, Node, Value, Integer, Decimal, Radix}; use crate::ast::{SpannedName, SpannedNode, Document}; use crate::span::{Spanned}; use crate::traits::{Span}; use crate::errors::{ParseError as Error, TokenFormat}; use chumsky::combinator::{Map, Then}; use chumsky::chain::Chain; trait ChainChar { type Error; fn chain_c( self, other: P ) -> Map, fn(_: (O, U)) -> Vec, (O, U)>where Self: Sized, U: Chain, O: Chain, P: Parser; } impl> ChainChar for R { type Error = >::Error; fn chain_c( self, other: P ) -> Map, fn(_: (O, U)) -> Vec, (O, U)>where Self: Sized, U: Chain, O: Chain, P: Parser { Parser::chain(self, other) } } fn begin_comment(which: char) -> impl Parser> + Clone { just('/') .map_err(|e: Error| e.with_no_expected()) .ignore_then(just(which).ignored()) } fn newline() -> impl Parser> { just('\r') .or_not() .ignore_then(just('\n')) .or(just('\r')) // Carriage return .or(just('\x0C')) // Form feed .or(just('\u{0085}')) // Next line .or(just('\u{2028}')) // Line separator .or(just('\u{2029}')) // Paragraph separator .ignored() .map_err(|e: Error| e.with_expected_kind("newline")) } fn ws_char() -> impl Parser> { filter(|c| matches!(c, '\t' | ' ' | '\u{00a0}' | '\u{1680}' | '\u{2000}'..='\u{200A}' | '\u{202F}' | '\u{205F}' | '\u{3000}' | '\u{FEFF}' )) .ignored() } fn id_char() -> impl Parser> { filter(|c| !matches!(c, '\u{0000}'..='\u{0021}' | '\\'|'/'|'('|')'|'{'|'}'|'<'|'>'|';'|'['|']'|'='|','|'"' | // whitespace, excluding 0x20 '\u{00a0}' | '\u{1680}' | '\u{2000}'..='\u{200A}' | '\u{202F}' | '\u{205F}' | '\u{3000}' | // newline (excluding <= 0x20) '\u{0085}' | '\u{2028}' | '\u{2029}' )) .map_err(|e: Error| e.with_expected_kind("letter")) } fn id_sans_dig() -> impl Parser> { filter(|c| !matches!(c, '0'..='9' | '\u{0000}'..='\u{0020}' | '\\'|'/'|'('|')'|'{'|'}'|'<'|'>'|';'|'['|']'|'='|','|'"' | // whitespace, excluding 0x20 '\u{00a0}' | '\u{1680}' | '\u{2000}'..='\u{200A}' | '\u{202F}' | '\u{205F}' | '\u{3000}' | // newline (excluding <= 0x20) '\u{0085}' | '\u{2028}' | '\u{2029}' )) .map_err(|e: Error| e.with_expected_kind("letter")) } fn id_sans_sign_dig() -> impl Parser> { filter(|c| !matches!(c, '-'| '+' | '0'..='9' | '\u{0000}'..='\u{0020}' | '\\'|'/'|'('|')'|'{'|'}'|'<'|'>'|';'|'['|']'|'='|','|'"' | // whitespace, excluding 0x20 '\u{00a0}' | '\u{1680}' | '\u{2000}'..='\u{200A}' | '\u{202F}' | '\u{205F}' | '\u{3000}' | // newline (excluding <= 0x20) '\u{0085}' | '\u{2028}' | '\u{2029}' )) .map_err(|e: Error| e.with_expected_kind("letter")) } fn ws() -> impl Parser> { ws_char().repeated().at_least(1).ignored().or(ml_comment()) .map_err(|e| e.with_expected_kind("whitespace")) } fn comment() -> impl Parser> { begin_comment('/') .then(take_until(newline().or(end()))).ignored() } fn ml_comment() -> impl Parser> { recursive::<_, _, _, _, Error>(|comment| { choice(( comment, none_of('*').ignored(), just('*').then_ignore(none_of('/').rewind()).ignored(), )).repeated().ignored() .delimited_by(begin_comment('*'), just("*/")) }) .map_err_with_span(|e, span| { if matches!(&e, Error::Unexpected { found: TokenFormat::Eoi, .. }) && span.length() > 2 { e.merge(Error::Unclosed { label: "comment", opened_at: span.at_start(2), opened: "/*".into(), expected_at: span.at_end(), expected: "*/".into(), found: None.into(), }) } else { // otherwise opening /* is not matched e } }) } fn raw_string() -> impl Parser, Error=Error> { just('r') .ignore_then(just('#').repeated().map(|v| v.len())) .then_ignore(just('"')) .then_with(|sharp_num| take_until( just('"') .ignore_then(just('#').repeated().exactly(sharp_num) .ignored())) .map_err_with_span(move |e: Error, span| { if matches!(&e, Error::Unexpected { found: TokenFormat::Eoi, .. }) { e.merge(Error::Unclosed { label: "raw string", opened_at: span.before_start(sharp_num+2), opened: TokenFormat::OpenRaw(sharp_num), expected_at: span.at_end(), expected: TokenFormat::CloseRaw(sharp_num), found: None.into(), }) } else { e } }) ) .map(|(text, ())| { text.into_iter().collect::().into() }) } fn string() -> impl Parser, Error=Error> { raw_string().or(escaped_string()) } fn expected_kind(s: &'static str) -> BTreeSet { [TokenFormat::Kind(s)].into_iter().collect() } fn esc_char() -> impl Parser> { filter_map(|span, c| match c { '"'|'\\'|'/' => Ok(c), 'b' => Ok('\u{0008}'), 'f' => Ok('\u{000C}'), 'n' => Ok('\n'), 'r' => Ok('\r'), 't' => Ok('\t'), c => Err(Error::Unexpected { label: Some("invalid escape char"), span, found: c.into(), expected: "\"\\/bfnrt".chars().map(|c| c.into()).collect(), }) }) .or(just('u').ignore_then( filter_map(|span, c: char| c.is_digit(16).then(|| c) .ok_or_else(|| Error::Unexpected { label: Some("unexpected character"), span, found: c.into(), expected: expected_kind("hexadecimal digit"), })) .repeated() .at_least(1) .at_most(6) .delimited_by(just('{'), just('}')) .try_map(|hex_chars, span| { let s = hex_chars.into_iter().collect::(); let c = u32::from_str_radix(&s, 16).map_err(|e| e.to_string()) .and_then(|n| char::try_from(n).map_err(|e| e.to_string())) .map_err(|e| Error::Message { label: Some("invalid character code"), span, message: e.to_string(), })?; Ok(c) }) .recover_with(skip_until(['}', '"', '\\'], |_| '\0')))) } fn escaped_string() -> impl Parser, Error=Error> { just('"') .ignore_then( filter(|&c| c != '"' && c != '\\') .or(just('\\').ignore_then(esc_char())) .repeated() .then_ignore(just('"')) .map(|val| val.into_iter().collect::().into()) .map_err_with_span(|e: Error, span| { if matches!(&e, Error::Unexpected { found: TokenFormat::Eoi, .. }) { e.merge(Error::Unclosed { label: "string", opened_at: span.before_start(1), opened: '"'.into(), expected_at: span.at_end(), expected: '"'.into(), found: None.into(), }) } else { e } }) ) } fn bare_ident() -> impl Parser, Error=Error> { let sign = just('+').or(just('-')); choice(( sign.chain(id_sans_dig().chain(id_char().repeated())), sign.repeated().exactly(1), id_sans_sign_dig().chain(id_char().repeated()) )) .map(|v| v.into_iter().collect()).try_map(|s: String, span| { match &s[..] { "true" => Err(Error::Unexpected { label: Some("keyword"), span, found: TokenFormat::Token("true"), expected: expected_kind("identifier"), }), "false" => Err(Error::Unexpected { label: Some("keyword"), span, found: TokenFormat::Token("false"), expected: expected_kind("identifier"), }), "null" => Err(Error::Unexpected { label: Some("keyword"), span, found: TokenFormat::Token("null"), expected: expected_kind("identifier"), }), _ => Ok(s.into()), } }) } fn ident() -> impl Parser, Error=Error> { choice(( // match -123 so `-` will not be treated as an ident by backtracking number().map(Err), bare_ident().map(Ok), string().map(Ok), )) // when backtracking is not already possible, // throw error for numbers (mapped to `Result::Err`) .try_map(|res, span| res.map_err(|_| Error::Unexpected { label: Some("unexpected number"), span, found: TokenFormat::Kind("number"), expected: expected_kind("identifier"), })) } fn keyword() -> impl Parser> { choice(( just("null") .map_err(|e: Error| e.with_expected_token("null")) .to(Literal::Null), just("true") .map_err(|e: Error| e.with_expected_token("true")) .to(Literal::Bool(true)), just("false") .map_err(|e: Error| e.with_expected_token("false")) .to(Literal::Bool(false)), )) } fn digit(radix: u32) -> impl Parser> { filter(move |c: &char| c.is_digit(radix)) } fn digits(radix: u32) -> impl Parser, Error=Error> { filter(move |c: &char| c == &'_' || c.is_digit(radix)).repeated() } fn decimal_number() -> impl Parser> { just('-').or(just('+')).or_not() .chain_c(digit(10)).chain_c(digits(10)) .chain_c( just('.').chain_c(digit(10)).chain_c(digits(10)).or_not().flatten()) .chain_c(just('e').or(just('E')) .chain_c(just('-').or(just('+')).or_not()) .chain_c(digits(10)).or_not().flatten()) .map(|v| { let is_decimal = v.iter().any(|c| matches!(c, '.'|'e'|'E')); let s: String = v.into_iter().filter(|c| c != &'_').collect(); if is_decimal { Literal::Decimal(Decimal(s.into())) } else { Literal::Int(Integer(Radix::Dec, s.into())) } }) } fn radix_number() -> impl Parser> { just('-').or(just('+')).or_not() .then_ignore(just('0')) .then(choice(( just('b').ignore_then( digit(2).chain(digits(2)).map(|s| (Radix::Bin, s))), just('o').ignore_then( digit(8).chain(digits(8)).map(|s| (Radix::Oct, s))), just('x').ignore_then( digit(16).chain(digits(16)).map(|s| (Radix::Hex, s))), ))) .map(|(sign, (radix, value))| { let mut s = String::with_capacity(value.len() + sign.map_or(0, |_| 1)); sign.map(|c| s.push(c)); s.extend(value.into_iter().filter(|&c| c != '_')); Literal::Int(Integer(radix, s.into())) }) } fn number() -> impl Parser> { radix_number().or(decimal_number()) } fn literal() -> impl Parser> { choice(( string().map(Literal::String), keyword(), number(), )) } fn type_name() -> impl Parser> { ident().delimited_by(just('('), just(')')).map(TypeName::from_string) } fn spanned(p: P) -> impl Parser, Error=Error> where P: Parser>, S: Span, { p.map_with_span(|value, span| Spanned { span, value }) } fn esc_line() -> impl Parser> { just('\\') .ignore_then(ws().repeated()) .ignore_then(comment().or(newline())) } fn node_space() -> impl Parser> { ws().or(esc_line()) } fn node_terminator() -> impl Parser> { choice((newline(), comment(), just(';').ignored(), end())) } enum PropOrArg { Prop(SpannedName, Value), Arg(Value), Ignore, } fn type_name_value() -> impl Parser, Error=Error> { spanned(type_name()).then(spanned(literal())) .map(|(type_name, literal)| Value { type_name: Some(type_name), literal }) } fn value() -> impl Parser, Error=Error> { type_name_value() .or(spanned(literal()).map(|literal| Value { type_name: None, literal })) } fn prop_or_arg_inner() -> impl Parser, Error=Error> { use PropOrArg::*; choice(( spanned(literal()).then(just('=').ignore_then(value()).or_not()) .try_map(|(name, value), _| { let name_span = name.span; match (name.value, value) { (Literal::String(s), Some(value)) => { let name = Spanned { span: name_span, value: s, }; Ok(Prop(name, value)) } (Literal::Bool(_) | Literal::Null, Some(_)) => { Err(Error::Unexpected { label: Some("unexpected keyword"), span: name_span, found: TokenFormat::Kind("keyword"), expected: [ TokenFormat::Kind("identifier"), TokenFormat::Kind("string"), ].into_iter().collect(), }) } (Literal::Int(_) | Literal::Decimal(_), Some(_)) => { Err(Error::MessageWithHelp { label: Some("unexpected number"), span: name_span, message: "numbers cannot be used as property names" .into(), help: "consider enclosing in double quotes \"..\"", }) } (value, None) => Ok(Arg(Value { type_name: None, literal: Spanned { span: name_span, value, }, })), } }), spanned(bare_ident()).then(just('=').ignore_then(value()).or_not()) .validate(|(name, value), span, emit| { if value.is_none() { emit(Error::MessageWithHelp { label: Some("unexpected identifier"), span, message: "identifiers cannot be used as arguments" .into(), help: "consider enclosing in double quotes \"..\"", }); } (name, value) }) .map(|(name, value)| { if let Some(value) = value { Prop(name, value) } else { // this is invalid, but we already emitted error // in validate() above, so doing a sane fallback Arg(Value { type_name: None, literal: name.map(Literal::String), }) } }), type_name_value().map(Arg), )) } fn prop_or_arg() -> impl Parser, Error=Error> { begin_comment('-') .ignore_then(node_space().repeated()) .ignore_then(prop_or_arg_inner()) .map(|_| PropOrArg::Ignore) .or(prop_or_arg_inner()) } fn line_space() -> impl Parser> { newline().or(ws()).or(comment()) } fn nodes() -> impl Parser>, Error=Error> { use PropOrArg::*; recursive(|nodes: chumsky::recursive::Recursive>| { let braced_nodes = just('{') .ignore_then(nodes .then_ignore(just('}')) .map_err_with_span(|e, span| { if matches!(&e, Error::Unexpected { found: TokenFormat::Eoi, .. }) { e.merge(Error::Unclosed { label: "curly braces", // we know it's `{` at the start of the span opened_at: span.before_start(1), opened: '{'.into(), expected_at: span.at_end(), expected: '}'.into(), found: None.into(), }) } else { e } })); let node = spanned(type_name()).or_not() .then(spanned(ident())) .then( node_space() .repeated().at_least(1) .ignore_then(prop_or_arg()) .repeated() ) .then(node_space().repeated() .ignore_then(begin_comment('-') .then_ignore(node_space().repeated()) .or_not()) .then(spanned(braced_nodes)) .or_not()) .then_ignore(node_space().repeated().then(node_terminator())) .map(|(((type_name, node_name), line_items), opt_children)| { let mut node = Node { type_name, node_name, properties: BTreeMap::new(), arguments: Vec::new(), children: match opt_children { Some((Some(_comment), _)) => None, Some((None, children)) => Some(children), None => None, }, }; for item in line_items { match item { Prop(name, value) => { node.properties.insert(name, value); } Arg(value) => { node.arguments.push(value); } Ignore => {} } } node }); begin_comment('-').then_ignore(node_space().repeated()).or_not() .then(spanned(node)) .separated_by(line_space().repeated()) .allow_leading().allow_trailing() .map(|vec| vec.into_iter().filter_map(|(comment, node)| { if comment.is_none() { Some(node) } else { None } }).collect()) }) } pub(crate) fn document() -> impl Parser, Error=Error> { nodes().then_ignore(end()).map(|nodes| Document { nodes }) } #[cfg(test)] mod test { use chumsky::prelude::*; use miette::NamedSource; use crate::errors::{ParseError, Error}; use crate::span::Span; use crate::ast::{Literal, TypeName, Radix, Decimal, Integer}; use crate::traits::sealed::Sealed; use super::{ws, comment, ml_comment, string, ident, literal, type_name}; use super::{nodes, number}; macro_rules! err_eq { ($left: expr, $right: expr) => { let left = $left.unwrap_err(); let left: serde_json::Value = serde_json::from_str(&left).unwrap(); let right: serde_json::Value = serde_json::from_str($right).unwrap(); assert_json_diff::assert_json_include!( actual: left, expected: right); //assert_json_diff::assert_json_eq!(left, right); } } fn parse<'x, P, T>(p: P, text: &'x str) -> Result where P: Parser> { p.then_ignore(end()) .parse(Span::stream(text)).map_err(|errors| { let source = text.to_string() + " "; let e = Error { source_code: NamedSource::new("", source), errors: errors.into_iter().map(Into::into).collect(), }; let mut buf = String::with_capacity(512); miette::GraphicalReportHandler::new() .render_report(&mut buf, &e).unwrap(); println!("{}", buf); buf.truncate(0); miette::JSONReportHandler::new() .render_report(&mut buf, &e).unwrap(); return buf; }) } #[test] fn parse_ws() { parse(ws(), " ").unwrap(); parse(ws(), "text").unwrap_err(); } #[test] fn parse_comments() { parse(comment(), "//hello").unwrap(); parse(comment(), "//hello\n").unwrap(); parse(ml_comment(), "/*nothing*/").unwrap(); parse(ml_comment(), "/*nothing**/").unwrap(); parse(ml_comment(), "/*no*thing*/").unwrap(); parse(ml_comment(), "/*no/**/thing*/").unwrap(); parse(ml_comment(), "/*no/*/**/*/thing*/").unwrap(); parse(ws().then(comment()), " // hello").unwrap(); parse(ws().then(comment()).then(ws()).then(comment()), " // hello\n //world").unwrap(); } #[test] fn parse_comment_err() { err_eq!(parse(ws(), r#"/* comment"#), r#"{ "message": "error parsing KDL", "severity": "error", "labels": [], "related": [{ "message": "unclosed comment `/*`", "severity": "error", "filename": "", "labels": [ {"label": "opened here", "span": {"offset": 0, "length": 2}}, {"label": "expected `*/`", "span": {"offset": 10, "length": 0}} ], "related": [] }] }"#); err_eq!(parse(ws(), r#"/* com/*ment *"#), r#"{ "message": "error parsing KDL", "severity": "error", "labels": [], "related": [{ "message": "unclosed comment `/*`", "severity": "error", "filename": "", "labels": [ {"label": "opened here", "span": {"offset": 0, "length": 2}}, {"label": "expected `*/`", "span": {"offset": 14, "length": 0}} ], "related": [] }] }"#); err_eq!(parse(ws(), r#"/* com/*me*/nt *"#), r#"{ "message": "error parsing KDL", "severity": "error", "labels": [], "related": [{ "message": "unclosed comment `/*`", "severity": "error", "filename": "", "labels": [ {"label": "opened here", "span": {"offset": 0, "length": 2}}, {"label": "expected `*/`", "span": {"offset": 16, "length": 0}} ], "related": [] }] }"#); err_eq!(parse(ws(), r#"/* comment *"#), r#"{ "message": "error parsing KDL", "severity": "error", "labels": [], "related": [{ "message": "unclosed comment `/*`", "severity": "error", "filename": "", "labels": [ {"label": "opened here", "span": {"offset": 0, "length": 2}}, {"label": "expected `*/`", "span": {"offset": 12, "length": 0}} ], "related": [] }] }"#); err_eq!(parse(ws(), r#"/*/"#), r#"{ "message": "error parsing KDL", "severity": "error", "labels": [], "related": [{ "message": "unclosed comment `/*`", "severity": "error", "filename": "", "labels": [ {"label": "opened here", "span": {"offset": 0, "length": 2}}, {"label": "expected `*/`", "span": {"offset": 3, "length": 0}} ], "related": [] }] }"#); // nothing is expected for comment or whitespace err_eq!(parse(ws(), r#"xxx"#), r#"{ "message": "error parsing KDL", "severity": "error", "labels": [], "related": [{ "message": "found `x`, expected whitespace", "severity": "error", "filename": "", "labels": [ {"label": "unexpected token", "span": {"offset": 0, "length": 1}} ], "related": [] }] }"#); } #[test] fn parse_str() { assert_eq!(&*parse(string(), r#""hello""#).unwrap(), "hello"); assert_eq!(&*parse(string(), r#""""#).unwrap(), ""); assert_eq!(&*parse(string(), r#""hel\"lo""#).unwrap(),"hel\"lo"); assert_eq!(&*parse(string(), r#""hello\nworld!""#).unwrap(), "hello\nworld!"); assert_eq!(&*parse(string(), r#""\u{1F680}""#).unwrap(), "🚀"); } #[test] fn parse_raw_str() { assert_eq!(&*parse(string(), r#"r"hello""#).unwrap(), "hello"); assert_eq!(&*parse(string(), r##"r#"world"#"##).unwrap(), "world"); assert_eq!(&*parse(string(), r##"r#"world"#"##).unwrap(), "world"); assert_eq!(&*parse(string(), r####"r###"a\n"##b"###"####).unwrap(), "a\\n\"##b"); } #[test] fn parse_str_err() { err_eq!(parse(string(), r#""hello"#), r#"{ "message": "error parsing KDL", "severity": "error", "labels": [], "related": [{ "message": "unclosed string `\"`", "severity": "error", "filename": "", "labels": [ {"label": "opened here", "span": {"offset": 0, "length": 1}}, {"label": "expected `\"`", "span": {"offset": 6, "length": 0}} ], "related": [] }] }"#); err_eq!(parse(string(), r#""he\u{FFFFFF}llo""#), r#"{ "message": "error parsing KDL", "severity": "error", "labels": [], "related": [{ "message": "converted integer out of range for `char`", "severity": "error", "filename": "", "labels": [ {"label": "invalid character code", "span": {"offset": 5, "length": 8}} ], "related": [] }] }"#); err_eq!(parse(string(), r#""he\u{1234567}llo""#), r#"{ "message": "error parsing KDL", "severity": "error", "labels": [], "related": [{ "message": "found `7`, expected `}`", "severity": "error", "filename": "", "labels": [ {"label": "unexpected token", "span": {"offset": 12, "length": 1}} ], "related": [] }] }"#); err_eq!(parse(string(), r#""he\u{1gh}llo""#), r#"{ "message": "error parsing KDL", "severity": "error", "labels": [], "related": [{ "message": "found `g`, expected `}` or hexadecimal digit", "severity": "error", "filename": "", "labels": [ {"label": "unexpected token", "span": {"offset": 7, "length": 1}} ], "related": [] }] }"#); err_eq!(parse(string(), r#""he\x01llo""#), r#"{ "message": "error parsing KDL", "severity": "error", "labels": [], "related": [{ "message": "found `x`, expected `\"`, `/`, `\\`, `b`, `f`, `n`, `r`, `t` or `u`", "severity": "error", "filename": "", "labels": [ {"label": "invalid escape char", "span": {"offset": 4, "length": 1}} ], "related": [] }] }"#); // Tests error recovery err_eq!(parse(string(), r#""he\u{FFFFFF}l\!lo""#), r#"{ "message": "error parsing KDL", "severity": "error", "labels": [], "related": [{ "message": "converted integer out of range for `char`", "severity": "error", "filename": "", "labels": [ {"label": "invalid character code", "span": {"offset": 5, "length": 8}} ], "related": [] }, { "message": "found `!`, expected `\"`, `/`, `\\`, `b`, `f`, `n`, `r`, `t` or `u`", "severity": "error", "filename": "", "labels": [ {"label": "invalid escape char", "span": {"offset": 15, "length": 1}} ], "related": [] }] }"#); } #[test] fn parse_raw_str_err() { err_eq!(parse(string(), r#"r"hello"#), r#"{ "message": "error parsing KDL", "severity": "error", "labels": [], "related": [{ "message": "unclosed raw string `r\"`", "severity": "error", "filename": "", "labels": [ {"label": "opened here", "span": {"offset": 0, "length": 2}}, {"label": "expected `\"`", "span": {"offset": 7, "length": 0}} ], "related": [] }] }"#); err_eq!(parse(string(), r###"r#"hello""###), r###"{ "message": "error parsing KDL", "severity": "error", "labels": [], "related": [{ "message": "unclosed raw string `r#\"`", "severity": "error", "filename": "", "labels": [ {"label": "opened here", "span": {"offset": 0, "length": 3}}, {"label": "expected `\"#`", "span": {"offset": 9, "length": 0}} ], "related": [] }] }"###); err_eq!(parse(string(), r####"r###"hello"####), r####"{ "message": "error parsing KDL", "severity": "error", "labels": [], "related": [{ "message": "unclosed raw string `r###\"`", "severity": "error", "filename": "", "labels": [ {"label": "opened here", "span": {"offset": 0, "length": 5}}, {"label": "expected `\"###`", "span": {"offset": 10, "length": 0}} ], "related": [] }] }"####); err_eq!(parse(string(), r####"r###"hello"#world"####), r####"{ "message": "error parsing KDL", "severity": "error", "labels": [], "related": [{ "message": "unclosed raw string `r###\"`", "severity": "error", "filename": "", "labels": [ {"label": "opened here", "span": {"offset": 0, "length": 5}}, {"label": "expected `\"###`", "span": {"offset": 17, "length": 0}} ], "related": [] }] }"####); } #[test] fn parse_ident() { assert_eq!(&*parse(ident(), "abcdef").unwrap(), "abcdef"); assert_eq!(&*parse(ident(), "xx_cd$yy").unwrap(), "xx_cd$yy"); assert_eq!(&*parse(ident(), "-").unwrap(), "-"); assert_eq!(&*parse(ident(), "--hello").unwrap(), "--hello"); assert_eq!(&*parse(ident(), "--hello1234").unwrap(), "--hello1234"); assert_eq!(&*parse(ident(), "--1").unwrap(), "--1"); assert_eq!(&*parse(ident(), "++1").unwrap(), "++1"); assert_eq!(&*parse(ident(), "-hello").unwrap(), "-hello"); assert_eq!(&*parse(ident(), "+hello").unwrap(), "+hello"); assert_eq!(&*parse(ident(), "-A").unwrap(), "-A"); assert_eq!(&*parse(ident(), "+b").unwrap(), "+b"); assert_eq!(&*parse(ident().then_ignore(ws()), "adef ").unwrap(), "adef"); assert_eq!(&*parse(ident().then_ignore(ws()), "a123@ ").unwrap(), "a123@"); parse(ident(), "1abc").unwrap_err(); parse(ident(), "-1").unwrap_err(); parse(ident(), "-1test").unwrap_err(); parse(ident(), "+1").unwrap_err(); } #[test] fn parse_literal() { assert_eq!(parse(literal(), "true").unwrap(), Literal::Bool(true)); assert_eq!(parse(literal(), "false").unwrap(), Literal::Bool(false)); assert_eq!(parse(literal(), "null").unwrap(), Literal::Null); } #[test] fn exclude_keywords() { parse(nodes(), "item true").unwrap(); err_eq!(parse(nodes(), "true \"item\""), r#"{ "message": "error parsing KDL", "severity": "error", "labels": [], "related": [{ "message": "found `true`, expected identifier", "severity": "error", "filename": "", "labels": [ {"label": "keyword", "span": {"offset": 0, "length": 4}} ], "related": [] }] }"#); err_eq!(parse(nodes(), "item false=true"), r#"{ "message": "error parsing KDL", "severity": "error", "labels": [], "related": [{ "message": "found keyword, expected identifier or string", "severity": "error", "filename": "", "labels": [ {"label": "unexpected keyword", "span": {"offset": 5, "length": 5}} ], "related": [] }] }"#); err_eq!(parse(nodes(), "item 2=2"), r#"{ "message": "error parsing KDL", "severity": "error", "labels": [], "related": [{ "message": "numbers cannot be used as property names", "severity": "error", "filename": "", "labels": [ {"label": "unexpected number", "span": {"offset": 5, "length": 1}} ], "help": "consider enclosing in double quotes \"..\"", "related": [] }] }"#); } #[test] fn parse_type() { assert_eq!(parse(type_name(), "(abcdef)").unwrap(), TypeName::from_string("abcdef".into())); assert_eq!(parse(type_name(), "(xx_cd$yy)").unwrap(), TypeName::from_string("xx_cd$yy".into())); parse(type_name(), "(1abc)").unwrap_err(); parse(type_name(), "( abc)").unwrap_err(); parse(type_name(), "(abc )").unwrap_err(); } #[test] fn parse_type_err() { err_eq!(parse(type_name(), "(123)"), r#"{ "message": "error parsing KDL", "severity": "error", "labels": [], "related": [{ "message": "found number, expected identifier", "severity": "error", "filename": "", "labels": [ {"label": "unexpected number", "span": {"offset": 1, "length": 3}} ], "related": [] }] }"#); err_eq!(parse(type_name(), "(-1)"), r#"{ "message": "error parsing KDL", "severity": "error", "labels": [], "related": [{ "message": "found number, expected identifier", "severity": "error", "filename": "", "labels": [ {"label": "unexpected number", "span": {"offset": 1, "length": 2}} ], "related": [] }] }"#); } fn single(r: Result, E>) -> T { let mut v = r.unwrap(); assert_eq!(v.len(), 1); v.remove(0) } #[test] fn parse_node() { let nval = single(parse(nodes(), "hello")); assert_eq!(nval.node_name.as_ref(), "hello"); assert_eq!(nval.type_name.as_ref(), None); let nval = single(parse(nodes(), "\"123\"")); assert_eq!(nval.node_name.as_ref(), "123"); assert_eq!(nval.type_name.as_ref(), None); let nval = single(parse(nodes(), "(typ)other")); assert_eq!(nval.node_name.as_ref(), "other"); assert_eq!(nval.type_name.as_ref().map(|x| &***x), Some("typ")); let nval = single(parse(nodes(), "(\"std::duration\")\"timeout\"")); assert_eq!(nval.node_name.as_ref(), "timeout"); assert_eq!(nval.type_name.as_ref().map(|x| &***x), Some("std::duration")); let nval = single(parse(nodes(), "hello \"arg1\"")); assert_eq!(nval.node_name.as_ref(), "hello"); assert_eq!(nval.type_name.as_ref(), None); assert_eq!(nval.arguments.len(), 1); assert_eq!(nval.properties.len(), 0); assert_eq!(&*nval.arguments[0].literal, &Literal::String("arg1".into())); let nval = single(parse(nodes(), "node \"true\"")); assert_eq!(nval.node_name.as_ref(), "node"); assert_eq!(nval.type_name.as_ref(), None); assert_eq!(nval.arguments.len(), 1); assert_eq!(nval.properties.len(), 0); assert_eq!(&*nval.arguments[0].literal, &Literal::String("true".into())); let nval = single(parse(nodes(), "hello (string)\"arg1\"")); assert_eq!(nval.node_name.as_ref(), "hello"); assert_eq!(nval.type_name.as_ref(), None); assert_eq!(nval.arguments.len(), 1); assert_eq!(nval.properties.len(), 0); assert_eq!(&***nval.arguments[0].type_name.as_ref().unwrap(), "string"); assert_eq!(&*nval.arguments[0].literal, &Literal::String("arg1".into())); let nval = single(parse(nodes(), "hello key=(string)\"arg1\"")); assert_eq!(nval.node_name.as_ref(), "hello"); assert_eq!(nval.type_name.as_ref(), None); assert_eq!(nval.arguments.len(), 0); assert_eq!(nval.properties.len(), 1); assert_eq!(&***nval.properties.get("key").unwrap() .type_name.as_ref().unwrap(), "string"); assert_eq!(&*nval.properties.get("key").unwrap().literal, &Literal::String("arg1".into())); let nval = single(parse(nodes(), "hello key=\"arg1\"")); assert_eq!(nval.node_name.as_ref(), "hello"); assert_eq!(nval.type_name.as_ref(), None); assert_eq!(nval.arguments.len(), 0); assert_eq!(nval.properties.len(), 1); assert_eq!(&*nval.properties.get("key").unwrap().literal, &Literal::String("arg1".into())); let nval = single(parse(nodes(), "parent {\nchild\n}")); assert_eq!(nval.node_name.as_ref(), "parent"); assert_eq!(nval.children().len(), 1); assert_eq!(nval.children.as_ref().unwrap()[0].node_name.as_ref(), "child"); let nval = single(parse(nodes(), "parent {\nchild1\nchild2\n}")); assert_eq!(nval.node_name.as_ref(), "parent"); assert_eq!(nval.children().len(), 2); assert_eq!(nval.children.as_ref().unwrap()[0].node_name.as_ref(), "child1"); assert_eq!(nval.children.as_ref().unwrap()[1].node_name.as_ref(), "child2"); let nval = single(parse(nodes(), "parent{\nchild3\n}")); assert_eq!(nval.node_name.as_ref(), "parent"); assert_eq!(nval.children().len(), 1); assert_eq!(nval.children.as_ref().unwrap()[0].node_name.as_ref(), "child3"); let nval = single(parse(nodes(), "parent \"x\"=1 {\nchild4\n}")); assert_eq!(nval.node_name.as_ref(), "parent"); assert_eq!(nval.properties.len(), 1); assert_eq!(nval.children().len(), 1); assert_eq!(nval.children.as_ref().unwrap()[0].node_name.as_ref(), "child4"); let nval = single(parse(nodes(), "parent \"x\" {\nchild4\n}")); assert_eq!(nval.node_name.as_ref(), "parent"); assert_eq!(nval.arguments.len(), 1); assert_eq!(nval.children().len(), 1); assert_eq!(nval.children.as_ref().unwrap()[0].node_name.as_ref(), "child4"); let nval = single(parse(nodes(), "parent \"x\"{\nchild5\n}")); assert_eq!(nval.node_name.as_ref(), "parent"); assert_eq!(nval.arguments.len(), 1); assert_eq!(nval.children().len(), 1); assert_eq!(nval.children.as_ref().unwrap()[0].node_name.as_ref(), "child5"); let nval = single(parse(nodes(), "hello /-\"skip_arg\" \"arg2\"")); assert_eq!(nval.node_name.as_ref(), "hello"); assert_eq!(nval.type_name.as_ref(), None); assert_eq!(nval.arguments.len(), 1); assert_eq!(nval.properties.len(), 0); assert_eq!(&*nval.arguments[0].literal, &Literal::String("arg2".into())); let nval = single(parse(nodes(), "hello /- \"skip_arg\" \"arg2\"")); assert_eq!(nval.node_name.as_ref(), "hello"); assert_eq!(nval.type_name.as_ref(), None); assert_eq!(nval.arguments.len(), 1); assert_eq!(nval.properties.len(), 0); assert_eq!(&*nval.arguments[0].literal, &Literal::String("arg2".into())); let nval = single(parse(nodes(), "hello prop1=\"1\" /-prop1=\"2\"")); assert_eq!(nval.node_name.as_ref(), "hello"); assert_eq!(nval.type_name.as_ref(), None); assert_eq!(nval.arguments.len(), 0); assert_eq!(nval.properties.len(), 1); assert_eq!(&*nval.properties.get("prop1").unwrap().literal, &Literal::String("1".into())); let nval = single(parse(nodes(), "parent /-{\nchild\n}")); assert_eq!(nval.node_name.as_ref(), "parent"); assert_eq!(nval.children().len(), 0); } #[test] fn parse_node_whitespace() { let nval = single(parse(nodes(), "hello { }")); assert_eq!(nval.node_name.as_ref(), "hello"); assert_eq!(nval.type_name.as_ref(), None); let nval = single(parse(nodes(), "hello { } ")); assert_eq!(nval.node_name.as_ref(), "hello"); assert_eq!(nval.type_name.as_ref(), None); let nval = single(parse(nodes(), "hello ")); assert_eq!(nval.node_name.as_ref(), "hello"); assert_eq!(nval.type_name.as_ref(), None); let nval = single(parse(nodes(), "hello ")); assert_eq!(nval.node_name.as_ref(), "hello"); assert_eq!(nval.type_name.as_ref(), None); } #[test] fn parse_node_err() { err_eq!(parse(nodes(), "hello{"), r#"{ "message": "error parsing KDL", "severity": "error", "labels": [], "related": [{ "message": "unclosed curly braces `{`", "severity": "error", "filename": "", "labels": [ {"label": "opened here", "span": {"offset": 5, "length": 1}}, {"label": "expected `}`", "span": {"offset": 6, "length": 0}} ], "related": [] }] }"#); err_eq!(parse(nodes(), "hello world"), r#"{ "message": "error parsing KDL", "severity": "error", "labels": [], "related": [{ "message": "identifiers cannot be used as arguments", "severity": "error", "filename": "", "labels": [ {"label": "unexpected identifier", "span": {"offset": 6, "length": 5}} ], "help": "consider enclosing in double quotes \"..\"", "related": [] }] }"#); err_eq!(parse(nodes(), "hello world {"), r#"{ "message": "error parsing KDL", "severity": "error", "labels": [], "related": [{ "message": "identifiers cannot be used as arguments", "severity": "error", "filename": "", "labels": [ {"label": "unexpected identifier", "span": {"offset": 6, "length": 5}} ], "help": "consider enclosing in double quotes \"..\"", "related": [] }, { "message": "unclosed curly braces `{`", "severity": "error", "filename": "", "labels": [ {"label": "opened here", "span": {"offset": 12, "length": 1}}, {"label": "expected `}`", "span": {"offset": 13, "length": 0}} ], "related": [] }] }"#); err_eq!(parse(nodes(), "1 + 2"), r#"{ "message": "error parsing KDL", "severity": "error", "labels": [], "related": [{ "message": "found number, expected identifier", "severity": "error", "filename": "", "labels": [ {"label": "unexpected number", "span": {"offset": 0, "length": 1}} ], "related": [] }] }"#); err_eq!(parse(nodes(), "-1 +2"), r#"{ "message": "error parsing KDL", "severity": "error", "labels": [], "related": [{ "message": "found number, expected identifier", "severity": "error", "filename": "", "labels": [ {"label": "unexpected number", "span": {"offset": 0, "length": 2}} ], "related": [] }] }"#); } #[test] fn parse_nodes() { let nval = parse(nodes(), "parent {\n/- child\n}").unwrap(); assert_eq!(nval.len(), 1); assert_eq!(nval[0].node_name.as_ref(), "parent"); assert_eq!(nval[0].children().len(), 0); let nval = parse(nodes(), "/-parent {\n child\n}\nsecond").unwrap(); assert_eq!(nval.len(), 1); assert_eq!(nval[0].node_name.as_ref(), "second"); assert_eq!(nval[0].children().len(), 0); } #[test] fn parse_number() { assert_eq!(parse(number(), "12").unwrap(), Literal::Int(Integer(Radix::Dec, "12".into()))); assert_eq!(parse(number(), "012").unwrap(), Literal::Int(Integer(Radix::Dec, "012".into()))); assert_eq!(parse(number(), "0").unwrap(), Literal::Int(Integer(Radix::Dec, "0".into()))); assert_eq!(parse(number(), "-012").unwrap(), Literal::Int(Integer(Radix::Dec, "-012".into()))); assert_eq!(parse(number(), "+0").unwrap(), Literal::Int(Integer(Radix::Dec, "+0".into()))); assert_eq!(parse(number(), "123_555").unwrap(), Literal::Int(Integer(Radix::Dec, "123555".into()))); assert_eq!(parse(number(), "123.555").unwrap(), Literal::Decimal(Decimal("123.555".into()))); assert_eq!(parse(number(), "+1_23.5_55E-17").unwrap(), Literal::Decimal(Decimal("+123.555E-17".into()))); assert_eq!(parse(number(), "123e+555").unwrap(), Literal::Decimal(Decimal("123e+555".into()))); } #[test] fn parse_radix_number() { assert_eq!(parse(number(), "0x12").unwrap(), Literal::Int(Integer(Radix::Hex, "12".into()))); assert_eq!(parse(number(), "0xab_12").unwrap(), Literal::Int(Integer(Radix::Hex, "ab12".into()))); assert_eq!(parse(number(), "-0xab_12").unwrap(), Literal::Int(Integer(Radix::Hex, "-ab12".into()))); assert_eq!(parse(number(), "0o17").unwrap(), Literal::Int(Integer(Radix::Oct, "17".into()))); assert_eq!(parse(number(), "+0o17").unwrap(), Literal::Int(Integer(Radix::Oct, "+17".into()))); assert_eq!(parse(number(), "0b1010_101").unwrap(), Literal::Int(Integer(Radix::Bin, "1010101".into()))); } #[test] fn parse_dashes() { let nval = parse(nodes(), "-").unwrap(); assert_eq!(nval.len(), 1); assert_eq!(nval[0].node_name.as_ref(), "-"); assert_eq!(nval[0].children().len(), 0); let nval = parse(nodes(), "--").unwrap(); assert_eq!(nval.len(), 1); assert_eq!(nval[0].node_name.as_ref(), "--"); assert_eq!(nval[0].children().len(), 0); let nval = parse(nodes(), "--1").unwrap(); assert_eq!(nval.len(), 1); assert_eq!(nval[0].node_name.as_ref(), "--1"); assert_eq!(nval[0].children().len(), 0); let nval = parse(nodes(), "-\n-").unwrap(); assert_eq!(nval.len(), 2); assert_eq!(nval[0].node_name.as_ref(), "-"); assert_eq!(nval[0].children().len(), 0); assert_eq!(nval[1].node_name.as_ref(), "-"); assert_eq!(nval[1].children().len(), 0); let nval = parse(nodes(), "node -1 --x=2").unwrap(); assert_eq!(nval.len(), 1); assert_eq!(nval[0].arguments.len(), 1); assert_eq!(nval[0].properties.len(), 1); assert_eq!(&*nval[0].arguments[0].literal, &Literal::Int(Integer(Radix::Dec, "-1".into()))); assert_eq!(&*nval[0].properties.get("--x").unwrap().literal, &Literal::Int(Integer(Radix::Dec, "2".into()))); } } knuffel-3.2.0/src/lib.rs000064400000000000000000000007201046102023000131620ustar 00000000000000#![doc = include_str!("../README.md")] #![warn(missing_docs)] #![warn(missing_debug_implementations)] mod containers; mod convert; mod convert_ast; mod grammar; mod wrappers; pub mod ast; pub mod decode; pub mod errors; pub mod span; pub mod traits; #[cfg(feature="derive")] pub use knuffel_derive::{Decode, DecodeScalar}; pub use wrappers::{parse_ast, parse, parse_with_context}; pub use traits::{Decode, DecodeScalar, DecodeChildren}; pub use errors::Error; knuffel-3.2.0/src/span.rs000064400000000000000000000250211046102023000133560ustar 00000000000000//! Knuffel supports to kinds of the span for parsing //! //! 1. [`Span`] which only tracks byte offset from the start of the source code //! 2. [`LineSpan`] which also track line numbers //! //! This distinction is important during parsing stage as [`Span`] is normally //! faster. And [`LineSpan`] is still faster than find out line/column number //! for each span separately, and is also more convenient if you need this //! information. //! //! On the other hand, on the decode stage you can convert your span types into //! more elaborate thing that includes file name or can refer to the defaults //! as a separate kind of span. See [`traits::DecodeSpan`]. use std::fmt; use std::ops::Range; use crate::traits; use crate::decode::Context; /// Reexport of [miette::SourceSpan] trait that we use for parsing pub use miette::SourceSpan as ErrorSpan; /// Wraps the structure to keep source code span, but also dereference to T #[derive(Clone, Debug)] #[cfg_attr(feature="minicbor", derive(minicbor::Encode, minicbor::Decode))] pub struct Spanned { #[cfg_attr(feature="minicbor", n(0))] pub(crate) span: S, #[cfg_attr(feature="minicbor", n(1))] pub(crate) value: T, } /// Normal byte offset span #[derive(Clone, Debug, PartialEq, Eq)] #[cfg_attr(feature="minicbor", derive(minicbor::Encode, minicbor::Decode))] pub struct Span( #[cfg_attr(feature="minicbor", n(0))] pub usize, #[cfg_attr(feature="minicbor", n(1))] pub usize, ); /// Line and column position of the datum in the source code // TODO(tailhook) optimize Eq to check only offset #[derive(Clone, Copy, PartialEq, Eq, Debug)] #[cfg_attr(feature="minicbor", derive(minicbor::Encode, minicbor::Decode))] pub struct LinePos { /// Zero-based byte offset #[cfg_attr(feature="minicbor", n(0))] pub offset: usize, /// Zero-based line number #[cfg_attr(feature="minicbor", n(1))] pub line: usize, /// Zero-based column number #[cfg_attr(feature="minicbor", n(2))] pub column: usize, } /// Span with line and column number #[derive(Clone, Debug, PartialEq, Eq)] #[cfg_attr(feature="minicbor", derive(minicbor::Encode, minicbor::Decode))] pub struct LineSpan( #[cfg_attr(feature="minicbor", n(0))] pub LinePos, #[cfg_attr(feature="minicbor", n(1))] pub LinePos, ); #[allow(missing_debug_implementations)] mod sealed { pub struct OffsetTracker { pub(crate) offset: usize, } #[cfg(feature="line-numbers")] pub struct LineTracker { pub(crate) offset: usize, pub(crate) caret_return: bool, pub(crate) line: usize, pub(crate) column: usize, } } impl Span { /// Length of the span in bytes pub fn length(&self) -> usize { self.1.saturating_sub(self.0) } } impl Into for Span { fn into(self) -> ErrorSpan { (self.0, self.1.saturating_sub(self.0)).into() } } impl Into for LineSpan { fn into(self) -> ErrorSpan { (self.0.offset, self.1.offset.saturating_sub(self.0.offset)).into() } } impl chumsky::Span for Span { type Context = (); type Offset = usize; fn new(_context: (), range: std::ops::Range) -> Self { Span(range.start(), range.end()) } fn context(&self) -> () { () } fn start(&self) -> usize { self.0 } fn end(&self) -> usize { self.1 } } impl traits::sealed::SpanTracker for sealed::OffsetTracker { type Span = Span; fn next_span(&mut self, c: char) -> Span { let start = self.offset; self.offset += c.len_utf8(); Span(start, self.offset) } } impl traits::sealed::Sealed for Span { type Tracker = sealed::OffsetTracker; fn at_start(&self, chars: usize) -> Self { Span(self.0, self.0+chars) } fn at_end(&self) -> Self { Span(self.1, self.1) } fn before_start(&self, chars: usize) -> Self { Span(self.0.saturating_sub(chars), self.0) } fn length(&self) -> usize { self.1.saturating_sub(self.0) } fn stream(text: &str) -> traits::sealed::Stream<'_, Self, Self::Tracker> where Self: chumsky::Span { chumsky::Stream::from_iter( Span(text.len(), text.len()), traits::sealed::Map(text.chars(), sealed::OffsetTracker { offset: 0 }), ) } } impl traits::Span for Span {} impl chumsky::Span for LineSpan { type Context = (); type Offset = LinePos; fn new(_context: (), range: std::ops::Range) -> Self { LineSpan(range.start, range.end) } fn context(&self) -> () { () } fn start(&self) -> LinePos { self.0 } fn end(&self) -> LinePos { self.1 } } #[cfg(feature="line-numbers")] impl traits::sealed::SpanTracker for sealed::LineTracker { type Span = LineSpan; fn next_span(&mut self, c: char) -> LineSpan { let offset = self.offset; let line = self.line; let column = self.column; self.offset += c.len_utf8(); match c { '\n' if self.caret_return => {} '\r'|'\n'|'\x0C'|'\u{0085}'|'\u{2028}'|'\u{2029}' => { self.line += 1; self.column = 0; } '\t' => self.column += 8, c => { self.column += unicode_width::UnicodeWidthChar::width(c) .unwrap_or(0); // treat control chars as zero-length } } self.caret_return = c == '\r'; LineSpan( LinePos { line, column, offset, }, LinePos { line: self.line, column: self.column, offset: self.offset, }, ) } } #[cfg(feature="line-numbers")] impl traits::sealed::Sealed for LineSpan { type Tracker = sealed::LineTracker; /// Note assuming ascii, single-width, non-newline chars here fn at_start(&self, chars: usize) -> Self { LineSpan(self.0, LinePos { offset: self.0.offset + chars, column: self.0.column + chars, .. self.0 }) } fn at_end(&self) -> Self { LineSpan(self.1, self.1) } /// Note assuming ascii, single-width, non-newline chars here fn before_start(&self, chars: usize) -> Self { LineSpan(LinePos { offset: self.0.offset.saturating_sub(chars), column: self.0.column.saturating_sub(chars), .. self.0 }, self.0) } fn length(&self) -> usize { self.1.offset.saturating_sub(self.0.offset) } fn stream(text: &str) -> traits::sealed::Stream<'_, Self, Self::Tracker> where Self: chumsky::Span { let mut caret_return = false; let mut line = 0; let mut last_line = text; let mut iter = text.chars(); while let Some(c) = iter.next() { match c { '\n' if caret_return => {} '\r'|'\n'|'\x0C'|'\u{0085}'|'\u{2028}'|'\u{2029}' => { line += 1; last_line = iter.as_str(); } _ => {} } caret_return = c == '\r'; } let column = unicode_width::UnicodeWidthStr::width(last_line); let eoi = LinePos { line, column, offset: text.len(), }; chumsky::Stream::from_iter( LineSpan(eoi, eoi), traits::sealed::Map( text.chars(), sealed::LineTracker { caret_return: false, offset: 0, line: 0, column: 0, }, ), ) } } #[cfg(feature="line-numbers")] impl traits::Span for LineSpan {} #[cfg(feature="line-numbers")] impl traits::DecodeSpan for Span { fn decode_span(span: &LineSpan, _: &mut Context) -> Self { Span(span.0.offset, span.1.offset) } } impl Spanned { /// Converts value but keeps the same span attached pub fn map(self, f: impl FnOnce(T) -> R) -> Spanned { Spanned { span: self.span, value: f(self.value), } } /// Converts span but keeps the same value attached pub fn map_span(self, f: impl FnOnce(S) -> U) -> Spanned { Spanned { span: f(self.span), value: self.value, } } pub(crate) fn clone_as(&self, ctx: &mut Context) -> Spanned where U: traits::DecodeSpan, T: Clone, S: traits::ErrorSpan, { Spanned { span: traits::DecodeSpan::decode_span(&self.span, ctx), value: self.value.clone(), } } } impl, S> AsRef for Spanned { fn as_ref(&self) -> &U { self.value.as_ref() } } impl, S> AsMut for Spanned { fn as_mut(&mut self) -> &mut U { self.value.as_mut() } } impl std::ops::Deref for Spanned { type Target = T; fn deref(&self) -> &T { &self.value } } impl std::ops::DerefMut for Spanned { fn deref_mut(&mut self) -> &mut T { &mut self.value } } impl std::borrow::Borrow for Spanned { fn borrow(&self) -> &T { self.value.borrow() } } impl std::borrow::Borrow for Spanned, S> { fn borrow(&self) -> &T { self.value.borrow() } } impl Spanned { /// Returns the span of the value pub fn span(&self) -> &S { &self.span } } impl> PartialEq for Spanned { fn eq(&self, other: &Spanned) -> bool { self.value == other.value } } impl> PartialOrd for Spanned { fn partial_cmp(&self, other: &Spanned) -> Option { self.value.partial_cmp(&other.value) } } impl Ord for Spanned { fn cmp(&self, other: &Spanned) -> std::cmp::Ordering { self.value.cmp(&other.value) } } impl Eq for Spanned {} impl std::hash::Hash for Spanned { fn hash(&self, state: &mut H) where H: std::hash::Hasher, { self.value.hash(state) } } impl fmt::Display for Span { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.0.fmt(f)?; "..".fmt(f)?; self.1.fmt(f)?; Ok(()) } } impl From> for Span { fn from(r: Range) -> Span { Span(r.start, r.end) } } knuffel-3.2.0/src/traits.rs000064400000000000000000000125471046102023000137340ustar 00000000000000//! Traits used for the library //! //! Most users will never implement these manually. See //! [`Decode`](derive@crate::Decode)` and //! [`DecodeScalar`](derive@crate::DecodeScalar) for a //! documentation of the derives to implement these traits. use std::fmt; use crate::ast::{SpannedNode, Literal, Value, TypeName}; use crate::span::Spanned; use crate::errors::DecodeError; use crate::decode::Context; /// Trait to decode KDL node from the AST pub trait Decode: Sized { /// Decodes the node from the ast fn decode_node(node: &SpannedNode, ctx: &mut Context) -> Result>; } /// Trait to decode children of the KDL node, mostly used for root document pub trait DecodeChildren: Sized { /// Decodes from a list of chidren ASTs fn decode_children(nodes: &[SpannedNode], ctx: &mut Context) -> Result>; } /// The trait is implemented for structures that can be used as part of other /// structs /// /// The type of field that `#[knuffel(flatten)]` is used for should implement /// this trait. It is automatically implemented by `#[derive(knuffel::Decode)]` /// by structures that have only optional properties and children (no /// arguments). pub trait DecodePartial: Sized { /// The method is called when unknown child is encountered by parent /// structure /// /// Returns `Ok(true)` if the child is "consumed" (i.e. stored in this /// structure). fn insert_child(&mut self, node: &SpannedNode, ctx: &mut Context) -> Result>; /// The method is called when unknown property is encountered by parent /// structure /// /// Returns `Ok(true)` if the property is "consumed" (i.e. stored in this /// structure). fn insert_property(&mut self, name: &Spanned, S>, value: &Value, ctx: &mut Context) -> Result>; } /// The trait that decodes scalar value and checks its type pub trait DecodeScalar: Sized { /// Typecheck the value /// /// This method can only emit errors to the context in type mismatch case. /// Errors emitted to the context are considered fatal once the whole data /// is processed but non fatal when encountered. So even if there is a type /// in type name we can proceed and try parsing actual value. fn type_check(type_name: &Option>, ctx: &mut Context); /// Decode value without typecheck /// /// This can be used by wrappers to parse some know value but use a /// different typename (kinda emulated subclassing) fn raw_decode(value: &Spanned, ctx: &mut Context) -> Result>; /// Decode the value and typecheck /// /// This should not be overriden and uses `type_check` in combination with /// `raw_decode`. fn decode(value: &Value, ctx: &mut Context) -> Result> { Self::type_check(&value.type_name, ctx); Self::raw_decode(&value.literal, ctx) } } /// The trait that decodes span into the final structure pub trait DecodeSpan: Sized { /// Decode span /// /// This method can use some extra data (say file name) from the context. /// Although, by default context is empty and end users are expected to use /// [`parse_with_context`](crate::parse_with_context) to add some values. fn decode_span(span: &S, ctx: &mut Context) -> Self; } impl DecodeSpan for T { fn decode_span(span: &T, _: &mut Context) -> Self { span.clone() } } /// Span must implement this trait to be used in the error messages /// /// Custom span types can be used for this unlike for [`Span`] pub trait ErrorSpan: Into + Clone + fmt::Debug + Send + Sync + 'static {} impl ErrorSpan for T where T: Into, T: Clone + fmt::Debug + Send + Sync + 'static, {} /// Span trait used for parsing source code /// /// It's sealed because needs some tight interoperation with the parser. Use /// [`DecodeSpan`] to convert spans whenever needed. pub trait Span: sealed::Sealed + chumsky::Span + ErrorSpan {} #[allow(missing_debug_implementations)] pub(crate) mod sealed { pub type Stream<'a, S, T> = chumsky::Stream< 'a, char, S, Map, T> >; pub struct Map(pub(crate) I, pub(crate) F); pub trait SpanTracker { type Span; fn next_span(&mut self, c: char) -> Self::Span; } impl Iterator for Map where I: Iterator, T: SpanTracker, { type Item = (char, T::Span); fn next(&mut self) -> Option<(char, T::Span)> { self.0.next().map(|c| (c, self.1.next_span(c))) } } pub trait Sealed { type Tracker: SpanTracker; /// Note assuming ascii, single-width, non-newline chars here fn at_start(&self, chars: usize) -> Self; fn at_end(&self) -> Self; /// Note assuming ascii, single-width, non-newline chars here fn before_start(&self, chars: usize) -> Self; fn length(&self) -> usize; fn stream(s: &str) -> Stream<'_, Self, Self::Tracker> where Self: chumsky::Span; } } knuffel-3.2.0/src/wrappers.rs000064400000000000000000000034331046102023000142630ustar 00000000000000use chumsky::Parser; use miette::NamedSource; use crate::ast::Document; use crate::decode::Context; use crate::errors::Error; use crate::grammar; use crate::span::{Span}; use crate::traits::{self, DecodeChildren}; /// Parse KDL text and return AST pub fn parse_ast(file_name: &str, text: &str) -> Result, Error> { grammar::document() .parse(S::stream(text)) .map_err(|errors| { Error { source_code: NamedSource::new(file_name, text.to_string()), errors: errors.into_iter().map(Into::into).collect(), } }) } /// Parse KDL text and decode Rust object pub fn parse(file_name: &str, text: &str) -> Result where T: DecodeChildren, { parse_with_context(file_name, text, |_| {}) } /// Parse KDL text and decode Rust object providing extra context for the /// decoder pub fn parse_with_context(file_name: &str, text: &str, set_ctx: F) -> Result where F: FnOnce(&mut Context), T: DecodeChildren, S: traits::Span, { let ast = parse_ast(file_name, text)?; let mut ctx = Context::new(); set_ctx(&mut ctx); let errors = match DecodeChildren::decode_children(&ast.nodes, &mut ctx) { Ok(_) if ctx.has_errors() => { ctx.into_errors() } Err(e) => { ctx.emit_error(e); ctx.into_errors() } Ok(v) => return Ok(v) }; return Err(Error { source_code: NamedSource::new(file_name, text.to_string()), errors: errors.into_iter().map(Into::into).collect(), }); } #[test] fn normal() { let doc = parse_ast::("embedded.kdl", r#"node "hello""#).unwrap(); assert_eq!(doc.nodes.len(), 1); assert_eq!(&**doc.nodes[0].node_name, "node"); } knuffel-3.2.0/vagga.yaml000064400000000000000000000043501046102023000132330ustar 00000000000000commands: make: !Command description: Build the library and CLI container: ubuntu run: [cargo, build] cargo: !Command description: Run arbitrary cargo command symlink-name: cargo container: ubuntu run: [cargo] nightly-build: !Command description: Run cargo build on nightly symlink-name: cargo container: nightly run: [cargo, build] environ: RUSTFLAGS: "-Z macro-backtrace" expand: !Command description: Run cargo expand symlink-name: cargo container: nightly run: [cargo, expand] test-rust: !Command description: Run test suite container: ubuntu run: [cargo, test, --workspace] volumes: /tmp: !Tmpfs size: 1Gi test-rust-all-features: !Command description: Run tests with all features enabled container: ubuntu run: [cargo, test, --workspace, --all-features] volumes: /tmp: !Tmpfs size: 1Gi test: !Command description: Run all tests container: ubuntu prerequisites: [test-rust, test-rust-all-features] run: [echo, Ok] containers: ubuntu: setup: - !Ubuntu jammy - !Install [ca-certificates, git, build-essential, vim, pkg-config, libssl-dev] - !TarInstall url: "https://static.rust-lang.org/dist/rust-1.68.0-x86_64-unknown-linux-gnu.tar.gz" script: "./install.sh --prefix=/usr \ --components=rustc,rust-std-x86_64-unknown-linux-gnu,cargo" - !TarInstall url: "https://static.rust-lang.org/dist/rust-std-1.68.0-wasm32-unknown-unknown.tar.gz" script: "./install.sh --prefix=/usr --components=rust-std-wasm32-unknown-unknown" - !Sh 'cargo install cargo-release cargo-audit cargo-outdated --root=/usr' environ: HOME: /work/target LANG: C.UTF-8 RUST_BACKTRACE: 1 nightly: setup: - !Ubuntu focal - !Install [ca-certificates, git, build-essential, vim] - !TarInstall url: "https://static.rust-lang.org/dist/rust-nightly-x86_64-unknown-linux-gnu.tar.gz" script: "./install.sh --prefix=/usr \ --components=rustc,rust-std-x86_64-unknown-linux-gnu,cargo" - !Sh 'cargo install cargo-expand --root=/usr' environ: HOME: /work/target RUST_BACKTRACE: 1