protox-0.9.0/.cargo_vcs_info.json0000644000000001440000000000100123740ustar { "git": { "sha1": "eaca3f3d9d4a87f4cf2145da52a62960dcd40492" }, "path_in_vcs": "protox" }protox-0.9.0/Cargo.lock0000644000000635240000000000100103620ustar # This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 3 [[package]] name = "addr2line" version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" dependencies = [ "gimli", ] [[package]] name = "adler2" version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" [[package]] name = "aho-corasick" version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" dependencies = [ "memchr", ] [[package]] name = "anstream" version = "0.6.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "301af1932e46185686725e0fad2f8f2aa7da69dd70bf6ecc44d6b703844a3933" dependencies = [ "anstyle", "anstyle-parse", "anstyle-query", "anstyle-wincon", "colorchoice", "is_terminal_polyfill", "utf8parse", ] [[package]] name = "anstyle" version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "862ed96ca487e809f1c8e5a8447f6ee2cf102f846893800b20cebdf541fc6bbd" [[package]] name = "anstyle-parse" version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c8bdeb6047d8983be085bab0ba1472e6dc604e7041dbf6fcd5e71523014fae9" dependencies = [ "windows-sys", ] [[package]] name = "anstyle-wincon" version = "3.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "403f75924867bb1033c59fbf0797484329750cfbe3c4325cd33127941fabc882" dependencies = [ "anstyle", "once_cell_polyfill", "windows-sys", ] [[package]] name = "anyhow" version = "1.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" [[package]] name = "autocfg" version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" [[package]] name = "backtrace" version = "0.3.75" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6806a6321ec58106fea15becdad98371e28d92ccbc7c8f1b3b6dd724fe8f1002" dependencies = [ "addr2line", "cfg-if", "libc", "miniz_oxide", "object", "rustc-demangle", "windows-targets", ] [[package]] name = "backtrace-ext" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "537beee3be4a18fb023b570f80e3ae28003db9167a751266b259926e25539d50" dependencies = [ "backtrace", ] [[package]] name = "base64" version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] name = "beef" version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3a8241f3ebb85c056b509d4327ad0358fbbba6ffb340bf388f26350aeda225b1" [[package]] name = "bitflags" version = "2.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967" [[package]] name = "bstr" version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "234113d19d0d7d613b40e86fb654acf958910802bcceab913a4f9e7cda03b1a4" dependencies = [ "memchr", "regex-automata", "serde", ] [[package]] name = "bytes" version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" [[package]] name = "cfg-if" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268" [[package]] name = "clap" version = "4.5.40" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "40b6887a1d8685cebccf115538db5c0efe625ccac9696ad45c409d96566e910f" dependencies = [ "clap_builder", "clap_derive", ] [[package]] name = "clap_builder" version = "4.5.40" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e0c66c08ce9f0c698cbce5c0279d0bb6ac936d8674174fe48f736533b964f59e" dependencies = [ "anstream", "anstyle", "clap_lex", "strsim", ] [[package]] name = "clap_derive" version = "4.5.40" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2c7947ae4cc3d851207c1adb5b5e260ff0cca11446b1d6d1423788e442257ce" dependencies = [ "heck", "proc-macro2", "quote", "syn", ] [[package]] name = "clap_lex" version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675" [[package]] name = "colorchoice" version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" [[package]] name = "console" version = "0.15.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "054ccb5b10f9f2cbf51eb355ca1d05c2d279ce1804688d0db74b4733a5aeafd8" dependencies = [ "encode_unicode", "libc", "once_cell", "windows-sys", ] [[package]] name = "either" version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" [[package]] name = "encode_unicode" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" [[package]] name = "equivalent" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" [[package]] name = "errno" version = "0.3.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cea14ef9355e3beab063703aa9dab15afd25f0667c341310c1e5274bb1d0da18" dependencies = [ "libc", "windows-sys", ] [[package]] name = "fastrand" version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] name = "fixedbitset" version = "0.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99" [[package]] name = "fnv" version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "getrandom" version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" dependencies = [ "cfg-if", "libc", "r-efi", "wasi", ] [[package]] name = "gimli" version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" [[package]] name = "hashbrown" version = "0.15.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5971ac85611da7067dbfcabef3c70ebb5606018acd9e2a3903a0da507521e0d5" [[package]] name = "heck" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" [[package]] name = "indexmap" version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e" dependencies = [ "equivalent", "hashbrown", ] [[package]] name = "insta" version = "1.43.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "154934ea70c58054b556dd430b99a98c2a7ff5309ac9891597e339b5c28f4371" dependencies = [ "console", "once_cell", "serde", "similar", ] [[package]] name = "is_ci" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7655c9839580ee829dfacba1d1278c2b7883e50a277ff7541299489d6bdfdc45" [[package]] name = "is_terminal_polyfill" version = "1.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" [[package]] name = "itertools" version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" dependencies = [ "either", ] [[package]] name = "itoa" version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" [[package]] name = "lazy_static" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "libc" version = "0.2.173" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d8cfeafaffdbc32176b64fb251369d52ea9f0a8fbc6f8759edffef7b525d64bb" [[package]] name = "linux-raw-sys" version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" [[package]] name = "log" version = "0.4.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" [[package]] name = "logos" version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab6f536c1af4c7cc81edf73da1f8029896e7e1e16a219ef09b184e76a296f3db" dependencies = [ "logos-derive", ] [[package]] name = "logos-codegen" version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "189bbfd0b61330abea797e5e9276408f2edbe4f822d7ad08685d67419aafb34e" dependencies = [ "beef", "fnv", "lazy_static", "proc-macro2", "quote", "regex-syntax", "rustc_version", "syn", ] [[package]] name = "logos-derive" version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ebfe8e1a19049ddbfccbd14ac834b215e11b85b90bab0c2dba7c7b92fb5d5cba" dependencies = [ "logos-codegen", ] [[package]] name = "memchr" version = "2.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" [[package]] name = "miette" version = "7.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f98efec8807c63c752b5bd61f862c165c115b0a35685bdcfd9238c7aeb592b7" dependencies = [ "backtrace", "backtrace-ext", "cfg-if", "miette-derive", "owo-colors", "supports-color", "supports-hyperlinks", "supports-unicode", "terminal_size", "textwrap", "unicode-width 0.1.14", ] [[package]] name = "miette-derive" version = "7.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "db5b29714e950dbb20d5e6f74f9dcec4edbcc1067bb7f8ed198c097b8c1a818b" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "miniz_oxide" version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" dependencies = [ "adler2", ] [[package]] name = "multimap" version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1d87ecb2933e8aeadb3e3a02b828fed80a7528047e68b4f424523a0981a3a084" [[package]] name = "num-traits" version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", ] [[package]] name = "object" version = "0.36.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" dependencies = [ "memchr", ] [[package]] name = "once_cell" version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" [[package]] name = "once_cell_polyfill" version = "1.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad" [[package]] name = "ordered-float" version = "2.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c" dependencies = [ "num-traits", ] [[package]] name = "owo-colors" version = "4.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26995317201fa17f3656c36716aed4a7c81743a9634ac4c99c0eeda495db0cec" [[package]] name = "petgraph" version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3672b37090dbd86368a4145bc067582552b29c27377cad4e0a306c97f9bd7772" dependencies = [ "fixedbitset", "indexmap", ] [[package]] name = "prettyplease" version = "0.2.34" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6837b9e10d61f45f987d50808f83d1ee3d206c66acf650c3e4ae2e1f6ddedf55" dependencies = [ "proc-macro2", "syn", ] [[package]] name = "proc-macro2" version = "1.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" dependencies = [ "unicode-ident", ] [[package]] name = "prost" version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbe28332295ca4451b7d779aff2749b144cabe5e6e05fe86f31337831d7df232" dependencies = [ "bytes", "prost-derive", ] [[package]] name = "prost-build" version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8189daf915866b24b097b7775fbdd5a105cc3a44da13637d4b0961be73e8bc2d" dependencies = [ "heck", "itertools", "log", "multimap", "once_cell", "petgraph", "prettyplease", "prost", "prost-types", "regex", "syn", "tempfile", ] [[package]] name = "prost-derive" version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26ac3e3c6b0e1c219e61ceda600eaad26d7195ecc9b5c027925c904091374ab5" dependencies = [ "anyhow", "itertools", "proc-macro2", "quote", "syn", ] [[package]] name = "prost-reflect" version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6e6b545c556471033fc1099868fca468bff8cf034f7bb9153b445f24d00aa28d" dependencies = [ "base64", "logos", "miette", "prost", "prost-types", "serde", "serde-value", ] [[package]] name = "prost-types" version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f2bfff0dbd11dbadf180fea466aa146cdf20aed230e1c42b8bae192df8f0469a" dependencies = [ "prost", ] [[package]] name = "protox" version = "0.9.0" dependencies = [ "bytes", "clap", "insta", "miette", "once_cell", "prost", "prost-build", "prost-reflect", "prost-types", "protox-parse", "scopeguard", "serde_json", "serde_yaml", "similar-asserts", "tempfile", "thiserror", ] [[package]] name = "protox-parse" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "072eee358134396a4643dff81cfff1c255c9fbd3fb296be14bdb6a26f9156366" dependencies = [ "logos", "miette", "prost-types", "thiserror", ] [[package]] name = "quote" version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" dependencies = [ "proc-macro2", ] [[package]] name = "r-efi" version = "5.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" [[package]] name = "regex" version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", "regex-automata", "regex-syntax", ] [[package]] name = "regex-automata" version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", "regex-syntax", ] [[package]] name = "regex-syntax" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "rustc-demangle" version = "0.1.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "989e6739f80c4ad5b13e0fd7fe89531180375b18520cc8c82080e4dc4035b84f" [[package]] name = "rustc_version" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" dependencies = [ "semver", ] [[package]] name = "rustix" version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c71e83d6afe7ff64890ec6b71d6a69bb8a610ab78ce364b3352876bb4c801266" dependencies = [ "bitflags", "errno", "libc", "linux-raw-sys", "windows-sys", ] [[package]] name = "ryu" version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" [[package]] name = "scopeguard" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "semver" version = "1.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0" [[package]] name = "serde" version = "1.0.219" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" dependencies = [ "serde_derive", ] [[package]] name = "serde-value" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c" dependencies = [ "ordered-float", "serde", ] [[package]] name = "serde_derive" version = "1.0.219" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "serde_json" version = "1.0.140" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373" dependencies = [ "itoa", "memchr", "ryu", "serde", ] [[package]] name = "serde_yaml" version = "0.9.34+deprecated" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" dependencies = [ "indexmap", "itoa", "ryu", "serde", "unsafe-libyaml", ] [[package]] name = "similar" version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbbb5d9659141646ae647b42fe094daf6c6192d1620870b449d9557f748b2daa" dependencies = [ "bstr", "unicode-segmentation", ] [[package]] name = "similar-asserts" version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5b441962c817e33508847a22bd82f03a30cff43642dc2fae8b050566121eb9a" dependencies = [ "console", "similar", ] [[package]] name = "strsim" version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" [[package]] name = "supports-color" version = "3.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c64fc7232dd8d2e4ac5ce4ef302b1d81e0b80d055b9d77c7c4f51f6aa4c867d6" dependencies = [ "is_ci", ] [[package]] name = "supports-hyperlinks" version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "804f44ed3c63152de6a9f90acbea1a110441de43006ea51bcce8f436196a288b" [[package]] name = "supports-unicode" version = "3.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b7401a30af6cb5818bb64852270bb722533397edcfc7344954a38f420819ece2" [[package]] name = "syn" version = "2.0.103" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e4307e30089d6fd6aff212f2da3a1f9e32f3223b1f010fb09b7c95f90f3ca1e8" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] [[package]] name = "tempfile" version = "3.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1" dependencies = [ "fastrand", "getrandom", "once_cell", "rustix", "windows-sys", ] [[package]] name = "terminal_size" version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "45c6481c4829e4cc63825e62c49186a34538b7b2750b73b266581ffb612fb5ed" dependencies = [ "rustix", "windows-sys", ] [[package]] name = "textwrap" version = "0.16.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c13547615a44dc9c452a8a534638acdf07120d4b6847c8178705da06306a3057" dependencies = [ "unicode-linebreak", "unicode-width 0.2.1", ] [[package]] name = "thiserror" version = "2.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" version = "2.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "unicode-ident" version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" [[package]] name = "unicode-linebreak" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f" [[package]] name = "unicode-segmentation" version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" [[package]] name = "unicode-width" version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" [[package]] name = "unicode-width" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c" [[package]] name = "unsafe-libyaml" version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" [[package]] name = "utf8parse" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "wasi" version = "0.14.2+wasi-0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" dependencies = [ "wit-bindgen-rt", ] [[package]] name = "windows-sys" version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" dependencies = [ "windows-targets", ] [[package]] name = "windows-targets" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ "windows_aarch64_gnullvm", "windows_aarch64_msvc", "windows_i686_gnu", "windows_i686_gnullvm", "windows_i686_msvc", "windows_x86_64_gnu", "windows_x86_64_gnullvm", "windows_x86_64_msvc", ] [[package]] name = "windows_aarch64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] name = "windows_i686_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_x86_64_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "wit-bindgen-rt" version = "0.39.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" dependencies = [ "bitflags", ] protox-0.9.0/Cargo.toml0000644000000074210000000000100103770ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2021" rust-version = "1.74.0" name = "protox" version = "0.9.0" authors = ["Andrew Hickman "] build = false include = [ "LICENSE-APACHE", "LICENSE-MIT", "src/**/*.rs", "tests", ] autolib = false autobins = false autoexamples = false autotests = false autobenches = false description = "A rust implementation of the protobuf compiler." documentation = "https://docs.rs/protox" readme = "README.md" keywords = [ "protobuf", "serialization", ] categories = [ "compilers", "encoding", "development-tools::build-utils", ] license = "MIT OR Apache-2.0" repository = "https://github.com/andrewhickman/protox" [package.metadata.docs.rs] all-features = true rustdoc-args = [ "--cfg", "docsrs", ] [package.metadata.release] pre-release-commit-message = "(cargo-release) {{crate_name}} version {{version}}" sign-commit = true sign-tag = true tag-name = "{{version}}" [[package.metadata.release.pre-release-replacements]] exactly = 1 file = "../README.md" replace = "https://deps.rs/crate/protox/{{version}}/status.svg" search = 'https://deps\.rs/crate/protox/[^/]*/status.svg' [[package.metadata.release.pre-release-replacements]] file = "../README.md" min = 0 replace = "https://docs.rs/protox/{{version}}/protox" search = 'https://docs\.rs/protox/[^/]*/protox' [[package.metadata.release.pre-release-replacements]] exactly = 1 file = "src/lib.rs" replace = 'html_root_url = "https://docs.rs/protox/{{version}}/"' search = 'html_root_url = "https://docs.rs/protox/[^/]*/"' [[package.metadata.release.pre-release-replacements]] exactly = 1 file = "../CHANGELOG.md" replace = """ ## [Unreleased] ## [{{version}}] - {{date}}""" search = '## \[Unreleased\]' [[package.metadata.release.pre-release-replacements]] exactly = 1 file = "../CHANGELOG.md" replace = """ [Unreleased]: https://github.com/andrewhickman/protox/compare/{{version}}...HEAD [{{version}}]: https://github.com/andrewhickman/protox/compare/{{prev_version}}...{{version}}""" search = '\[Unreleased\]: https://github\.com/andrewhickman/protox/compare/.*?...HEAD' [features] bin = [ "dep:clap", "miette/fancy", ] [lib] name = "protox" path = "src/lib.rs" [[bin]] name = "protox" path = "src/main.rs" required-features = ["bin"] [[test]] name = "compare" path = "tests/compare.rs" [[test]] name = "compiler" path = "tests/compiler.rs" [[test]] name = "wkt" path = "tests/wkt.rs" [dependencies.bytes] version = "1.6.0" [dependencies.clap] version = "4.5.4" features = ["derive"] optional = true [dependencies.miette] version = "7.2.0" [dependencies.prost] version = "0.14.0" [dependencies.prost-reflect] version = "0.16.0" features = [ "miette", "text-format", ] [dependencies.prost-types] version = "0.14.0" [dependencies.protox-parse] version = "0.9.0" [dependencies.thiserror] version = "2.0.0" [dev-dependencies.insta] version = "1.39.0" features = ["yaml"] [dev-dependencies.once_cell] version = "1.12.0" default-features = false [dev-dependencies.prost-build] version = "0.14.0" [dev-dependencies.prost-reflect] version = "0.16.0" features = ["serde"] [dev-dependencies.scopeguard] version = "1.1.0" [dev-dependencies.serde_json] version = "1.0.117" [dev-dependencies.serde_yaml] version = "0.9.34" [dev-dependencies.similar-asserts] version = "1.2.0" [dev-dependencies.tempfile] version = "3.10.1" protox-0.9.0/Cargo.toml.orig000064400000000000000000000053551046102023000140640ustar 00000000000000[package] name = "protox" description = "A rust implementation of the protobuf compiler." keywords = ["protobuf", "serialization"] categories = ["compilers", "encoding", "development-tools::build-utils"] version = "0.9.0" authors = ["Andrew Hickman "] repository = "https://github.com/andrewhickman/protox" documentation = "https://docs.rs/protox" license = "MIT OR Apache-2.0" readme = "../README.md" edition = "2021" rust-version = "1.74.0" include = [ "LICENSE-APACHE", "LICENSE-MIT", "src/**/*.rs", "tests", ] [[bin]] name = "protox" path = "src/main.rs" required-features = ["bin"] [features] bin = ["dep:clap", "miette/fancy"] [dependencies] bytes = "1.6.0" clap = { version = "4.5.4", features = ["derive"], optional = true } miette = "7.2.0" prost = "0.14.0" prost-reflect = { version = "0.16.0", features = ["miette", "text-format"] } prost-types = "0.14.0" protox-parse = { version = "0.9.0", path = "../protox-parse" } thiserror = "2.0.0" [dev-dependencies] insta = { version = "1.39.0", features = ["yaml"] } prost-build = { version = "0.14.0" } prost-reflect = { version = "0.16.0", features = ["serde"] } once_cell = { version = "1.12.0", default-features = false } scopeguard = "1.1.0" serde_yaml = "0.9.34" similar-asserts = { version = "1.2.0" } tempfile = "3.10.1" serde_json = "1.0.117" # tonic-build = "0.12.3" [package.metadata.release] tag-name = "{{version}}" sign-tag = true sign-commit = true pre-release-commit-message = "(cargo-release) {{crate_name}} version {{version}}" [[package.metadata.release.pre-release-replacements]] file = "../README.md" search = "https://deps\\.rs/crate/protox/[^/]*/status.svg" replace = "https://deps.rs/crate/protox/{{version}}/status.svg" exactly = 1 [[package.metadata.release.pre-release-replacements]] file = "../README.md" search = "https://docs\\.rs/protox/[^/]*/protox" replace = "https://docs.rs/protox/{{version}}/protox" min = 0 [[package.metadata.release.pre-release-replacements]] file = "src/lib.rs" search = "html_root_url = \"https://docs.rs/protox/[^/]*/\"" replace = "html_root_url = \"https://docs.rs/protox/{{version}}/\"" exactly = 1 [[package.metadata.release.pre-release-replacements]] file = "../CHANGELOG.md" search = "## \\[Unreleased\\]" replace = "## [Unreleased]\n\n## [{{version}}] - {{date}}" exactly = 1 [[package.metadata.release.pre-release-replacements]] file = "../CHANGELOG.md" search = "\\[Unreleased\\]: https://github\\.com/andrewhickman/protox/compare/.*?...HEAD" replace = "[Unreleased]: https://github.com/andrewhickman/protox/compare/{{version}}...HEAD\n[{{version}}]: https://github.com/andrewhickman/protox/compare/{{prev_version}}...{{version}}" exactly = 1 [package.metadata.docs.rs] all-features = true rustdoc-args = ["--cfg", "docsrs"] protox-0.9.0/LICENSE-APACHE000064400000000000000000000251371046102023000131210ustar 00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. protox-0.9.0/LICENSE-MIT000064400000000000000000000017771046102023000126350ustar 00000000000000Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. protox-0.9.0/README.md000064400000000000000000000064711046102023000124540ustar 00000000000000[![crates.io](https://img.shields.io/crates/v/protox.svg)](https://crates.io/crates/protox/) [![docs.rs](https://docs.rs/protox/badge.svg)](https://docs.rs/protox/) [![deps.rs](https://deps.rs/crate/protox/0.8.0/status.svg)](https://deps.rs/crate/protox) ![MSRV](https://img.shields.io/badge/rustc-1.74+-blue.svg) [![Continuous integration](https://github.com/andrewhickman/protox/actions/workflows/ci.yml/badge.svg)](https://github.com/andrewhickman/protox/actions/workflows/ci.yml) [![codecov](https://codecov.io/gh/andrewhickman/protox/branch/main/graph/badge.svg?token=9YKHGUUPUX)](https://codecov.io/gh/andrewhickman/protox) ![Apache 2.0 OR MIT licensed](https://img.shields.io/badge/license-Apache2.0%2FMIT-blue.svg) # protox An implementation of the protobuf compiler in rust, intended for use as a library with crates such as [`prost-build`](https://crates.io/crates/prost-build) to avoid needing to build `protoc`. ## Examples Compiling a single source file: ```rust assert_eq!(protox::compile(["root.proto"], ["."]).unwrap(), FileDescriptorSet { file: vec![ FileDescriptorProto { name: Some("root.proto".to_owned()), /* ... */ } ], }); ``` Usage with [`prost-build`](https://crates.io/crates/prost-build): ```rust let file_descriptors = protox::compile(["root.proto"], ["."]).unwrap(); prost_build::compile_fds(file_descriptors).unwrap(); ``` Usage with [`tonic-build`](https://crates.io/crates/tonic-build): ```rust let file_descriptors = protox::compile(["root.proto"], ["."]).unwrap(); tonic_build::configure() .build_server(true) .compile_fds(file_descriptors) .unwrap(); ``` ### Error messages This crate uses [`miette`](https://crates.io/crates/miette) to add additional details to errors. For nice error messages, add `miette` as a dependency with the `fancy` feature enabled and return a [`miette::Result`](https://docs.rs/miette/latest/miette/type.Result.html) from your build script. ```rust fn main() -> miette::Result<()> { let _ = protox::compile(["root.proto"], ["."])?; Ok(()) } ``` Example error message: ``` Error: × name 'Bar' is not defined ╭─[root.proto:3:1] 3 │ message Foo { 4 │ Bar bar = 1; · ─┬─ · ╰── found here 5 │ } ╰──── ``` ## Minimum Supported Rust Version Rust **1.74** or higher. The minimum supported Rust version may be changed in the future, but it will be done with a minor version bump. ## License Licensed under either of * Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) at your option. This project includes code imported from the Protocol Buffers project, which is included under its original ([BSD][2]) license. [2]: https://github.com/protocolbuffers/protobuf/blob/master/LICENSE ## Contribution Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. ## Related projects * [prost](https://crates.io/crates/prost) - a protocol buffers implementation for the Rust Language * [protoxy](https://github.com/tardyp/protoxy) - python bindings for protoxprotox-0.9.0/src/compile/mod.rs000064400000000000000000000256371046102023000145460ustar 00000000000000use std::{ collections::{HashMap, HashSet}, fmt::{self, Write}, path::{Path, PathBuf}, }; use prost::Message; use prost_reflect::{DescriptorPool, DynamicMessage, ReflectMessage, Value}; use prost_types::{FileDescriptorProto, FileDescriptorSet}; use crate::{ error::{Error, ErrorKind}, file::{check_shadow, path_to_file_name, File, FileMetadata, FileResolver}, }; #[cfg(test)] mod tests; /// Options for compiling protobuf files. /// /// # Examples /// /// ``` /// # use std::fs; /// # use prost_types::{ /// # DescriptorProto, FieldDescriptorProto, field_descriptor_proto::{Label, Type}, FileDescriptorSet, FileDescriptorProto, /// # SourceCodeInfo, source_code_info::Location /// # }; /// # use protox::Compiler; /// # fn main() -> Result<(), protox::Error> { /// # let tempdir = tempfile::TempDir::new().unwrap(); /// # std::env::set_current_dir(&tempdir).unwrap(); /// # /// fs::write("bar.proto", " /// message Bar { } /// ").unwrap(); /// /// let file_descriptor_set = Compiler::new(["."])? /// .include_imports(true) /// .include_source_info(false) /// .open_file("bar.proto")? /// .file_descriptor_set(); /// /// assert_eq!(file_descriptor_set.file[0].message_type[0].name(), "Bar"); /// # Ok(()) /// # } /// ``` pub struct Compiler { pool: DescriptorPool, resolver: Box, files: HashMap, include_imports: bool, include_source_info: bool, } impl Compiler { /// Creates a new [`Compiler`] with default options and the given set of include paths. /// /// In addition to the given include paths, the [`Compiler`] instance will be able to import /// standard files like `google/protobuf/descriptor.proto`. pub fn new(includes: I) -> Result where I: IntoIterator, P: AsRef, { use crate::file::{ChainFileResolver, GoogleFileResolver, IncludeFileResolver}; let mut resolver = ChainFileResolver::new(); for include in includes { resolver.add(IncludeFileResolver::new(include.as_ref().to_owned())); } resolver.add(GoogleFileResolver::new()); Ok(Compiler::with_file_resolver(resolver)) } /// Creates a new [`Compiler`] with a custom [`FileResolver`] for looking up imported files. pub fn with_file_resolver(resolver: R) -> Self where R: FileResolver + 'static, { Compiler { pool: DescriptorPool::new(), resolver: Box::new(resolver), files: HashMap::new(), include_imports: false, include_source_info: false, } } /// Sets whether the output `FileDescriptorSet` should include source info. /// /// If set, the file descriptors returned by [`file_descriptor_set`](Compiler::file_descriptor_set) will have /// the [`FileDescriptorProto::source_code_info`](prost_types::FileDescriptorProto::source_code_info) field /// populated with source locations and comments. pub fn include_source_info(&mut self, yes: bool) -> &mut Self { self.include_source_info = yes; self } /// Sets whether the output `FileDescriptorSet` should include imported files. /// /// By default, only files explicitly added with [`open_file`](Compiler::open_file) are returned by [`file_descriptor_set`](Compiler::file_descriptor_set). /// If this option is set, imported files are included too. pub fn include_imports(&mut self, yes: bool) -> &mut Self { self.include_imports = yes; self } /// Compiles the file at the given path, and adds it to this `Compiler` instance. /// /// If the path is absolute, or relative to the current directory, it must reside under one of the /// include paths. Otherwise, it is looked up relative to the given include paths in the same way as /// `import` statements. pub fn open_file(&mut self, path: impl AsRef) -> Result<&mut Self, Error> { let path = path.as_ref(); let (name, is_resolved) = if let Some(name) = self.resolver.resolve_path(path) { (name, true) } else if let Some(name) = path_to_file_name(path) { (name, false) } else { return Err(Error::from_kind(ErrorKind::FileNotIncluded { path: path.to_owned(), })); }; if let Some(file_metadata) = self.files.get_mut(&name) { if is_resolved { check_shadow(&name, file_metadata.path(), path)?; } file_metadata.is_import = false; return Ok(self); } let file = self.resolver.open_file(&name).map_err(|err| { if err.is_file_not_found() { Error::from_kind(ErrorKind::FileNotIncluded { path: path.to_owned(), }) } else { err } })?; if is_resolved { check_shadow(&name, file.path(), path)?; } let mut import_stack = vec![name.clone()]; let mut already_imported = HashSet::new(); for (i, import) in file.descriptor.dependency.iter().enumerate() { if !already_imported.insert(import) { return Err(Error::duplicated_import(import.to_owned(), &file, i)); } self.add_import(import, &mut import_stack) .map_err(|e| e.into_import_error(&file, i))?; } let path = self.check_file(file)?; self.files.insert( name.clone(), FileMetadata { name, path, is_import: false, }, ); Ok(self) } /// Compiles the given files, and adds them to this `Compiler` instance. /// /// See [`open_file()`][Compiler::open_file()]. pub fn open_files( &mut self, paths: impl IntoIterator>, ) -> Result<&mut Self, Error> { for path in paths { self.open_file(path)?; } Ok(self) } /// Converts all added files into an instance of [`FileDescriptorSet`](prost_types::FileDescriptorSet). /// /// Files are sorted topologically, with dependency files ordered before the files that import them. pub fn file_descriptor_set(&self) -> prost_types::FileDescriptorSet { let file = self .pool .files() .filter(|f| self.include_imports || !self.files[f.name()].is_import) .map(|f| { if self.include_source_info { f.file_descriptor_proto().clone() } else { prost_types::FileDescriptorProto { source_code_info: None, ..f.file_descriptor_proto().clone() } } }) .collect(); prost_types::FileDescriptorSet { file } } /// Converts all added files into an instance of [`FileDescriptorSet`](prost_types::FileDescriptorSet) and encodes it. /// /// This is equivalent to `file_descriptor_set()?.encode_to_vec()`, with the exception that extension /// options are included. pub fn encode_file_descriptor_set(&self) -> Vec { if self.include_imports && self.include_source_info { // Avoid reflection if possible. return self.pool.encode_to_vec(); } let file_desc = FileDescriptorProto::default().descriptor(); let files = self .pool .files() .filter(|f| self.include_imports || !self.files[f.name()].is_import) .map(|f| { let file_buf = f.encode_to_vec(); let mut file_msg = DynamicMessage::decode(file_desc.clone(), file_buf.as_slice()).unwrap(); if !self.include_source_info { file_msg.clear_field_by_name("source_code_info"); } Value::Message(file_msg) }) .collect(); let mut file_descriptor_set = FileDescriptorSet::default().transcode_to_dynamic(); file_descriptor_set.set_field_by_name("file", Value::List(files)); file_descriptor_set.encode_to_vec() } /// Gets a copy of the [`DescriptorPool`] containing all referenced files. pub fn descriptor_pool(&self) -> DescriptorPool { self.pool.clone() } /// Gets a reference to all imported source files. /// /// The files will appear in topological order, so each file appears before any file that imports it. pub fn files(&self) -> impl ExactSizeIterator { self.pool.files().map(|f| &self.files[f.name()]) } fn add_import(&mut self, file_name: &str, import_stack: &mut Vec) -> Result<(), Error> { if import_stack.iter().any(|name| name == file_name) { let mut cycle = String::new(); for import in import_stack { write!(&mut cycle, "{} -> ", import).unwrap(); } write!(&mut cycle, "{}", file_name).unwrap(); return Err(Error::from_kind(ErrorKind::CircularImport { name: file_name.to_owned(), cycle, })); } if self.files.contains_key(file_name) { return Ok(()); } let file = self.resolver.open_file(file_name)?; import_stack.push(file_name.to_owned()); let mut already_imported = HashSet::new(); for (i, import) in file.descriptor.dependency.iter().enumerate() { if !already_imported.insert(import) { return Err(Error::duplicated_import(import.to_owned(), &file, i)); } self.add_import(import, import_stack) .map_err(|e| e.into_import_error(&file, i))?; } import_stack.pop(); let path = self.check_file(file)?; self.files.insert( file_name.to_owned(), FileMetadata { name: file_name.to_owned(), path, is_import: true, }, ); Ok(()) } fn check_file( &mut self, File { path, source, descriptor, encoded, }: File, ) -> Result, Error> { if let Some(encoded) = &encoded { self.pool.decode_file_descriptor_proto(encoded.clone()) } else { self.pool.add_file_descriptor_proto(descriptor) } .map_err(|mut err| { if let Some(source) = source { err = err.with_source_code(&source); } err })?; Ok(path) } } impl fmt::Debug for Compiler { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("Compiler") .field("include_imports", &self.include_imports) .field("include_source_info", &self.include_source_info) .finish_non_exhaustive() } } protox-0.9.0/src/compile/tests.rs000064400000000000000000000701401046102023000151160ustar 00000000000000use std::{fs, iter::once}; use tempfile::TempDir; use super::*; const EMPTY: &[u8] = &[]; const INVALID_UTF8: &[u8] = &[255]; fn with_current_dir(path: impl AsRef, f: impl FnOnce()) { use std::{ env::{current_dir, set_current_dir}, sync::Mutex, }; use once_cell::sync::Lazy; use scopeguard::defer; static CURRENT_DIR_LOCK: Lazy> = Lazy::new(Default::default); let _lock = CURRENT_DIR_LOCK .lock() .unwrap_or_else(|err| err.into_inner()); let prev_dir = current_dir().unwrap(); defer!({ let _ = set_current_dir(prev_dir); }); set_current_dir(path).unwrap(); f(); } fn test_compile_success(include: impl AsRef, file: impl AsRef, name: &str) { let include = include.as_ref(); let file = file.as_ref(); std::fs::create_dir_all(include).unwrap(); if let Some(parent) = include.join(name).parent() { std::fs::create_dir_all(parent).unwrap(); } std::fs::write(include.join(name), EMPTY).unwrap(); let mut compiler = Compiler::new(once(include)).unwrap(); compiler.open_file(file).unwrap(); assert_eq!(compiler.files().len(), 1); assert_eq!(compiler.descriptor_pool().files().len(), 1); assert_eq!( compiler.file_descriptor_set().file[0], prost_types::FileDescriptorProto { name: Some(name.to_owned()), ..Default::default() } ); assert_eq!( compiler.files[name].path(), Some(include.join(name).as_ref()) ); } fn test_compile_error( include: impl AsRef, file: impl AsRef, name: &str, expected_err: ErrorKind, ) { let include = include.as_ref(); let file = file.as_ref(); std::fs::create_dir_all(include).unwrap(); if let Some(parent) = include.join(name).parent() { std::fs::create_dir_all(parent).unwrap(); } std::fs::write(include.join(name), EMPTY).unwrap(); let mut compiler = Compiler::new(once(include)).unwrap(); let err = compiler.open_file(file).unwrap_err(); match (err.kind(), &expected_err) { ( ErrorKind::FileNotIncluded { path: lpath }, ErrorKind::FileNotIncluded { path: rpath }, ) => assert_eq!(lpath, rpath), (err, _) => panic!("unexpected error: {}", err), } assert_eq!(compiler.files().len(), 0); } #[test] fn abs_include_simple_file() { let dir = TempDir::new().unwrap(); test_compile_success(dir.path(), "foo.proto", "foo.proto"); } #[test] fn abs_include_simple_subdir_file() { let dir = TempDir::new().unwrap(); test_compile_success(dir.path(), "dir/foo.proto", "dir/foo.proto"); } #[test] fn abs_include_rel_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_success(dir.path(), "foo.proto", "foo.proto"); }) } #[test] fn abs_include_rel_subdir_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_success( dir.path(), Path::new("dir").join("foo.proto"), "dir/foo.proto", ); }) } #[test] fn abs_include_abs_file() { let dir = TempDir::new().unwrap(); test_compile_success(dir.path(), dir.path().join("foo.proto"), "foo.proto"); } #[test] fn abs_include_abs_subdir_file() { let dir = TempDir::new().unwrap(); test_compile_success( dir.path(), dir.path().join("dir").join("foo.proto"), "dir/foo.proto", ); } #[test] fn abs_include_dot_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_error( dir.path(), Path::new(".").join("foo.proto"), "foo.proto", ErrorKind::FileNotIncluded { path: Path::new(".").join("foo.proto"), }, ) }) } #[test] fn abs_include_dot_subdir_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_error( dir.path(), Path::new(".").join("dir").join("foo.proto"), "dir/foo.proto", ErrorKind::FileNotIncluded { path: Path::new(".").join("dir").join("foo.proto"), }, ) }) } #[test] fn abs_subdir_include_simple_file() { let dir = TempDir::new().unwrap(); test_compile_success(dir.path().join("include"), "foo.proto", "foo.proto"); } #[test] fn abs_subdir_include_simple_subdir_file() { let dir = TempDir::new().unwrap(); test_compile_success(dir.path().join("include"), "dir/foo.proto", "dir/foo.proto"); } #[test] fn abs_subdir_include_rel_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_error( dir.path().join("include"), Path::new("include").join("foo.proto"), "foo.proto", ErrorKind::FileNotIncluded { path: Path::new("include").join("foo.proto"), }, ); }); } #[test] fn abs_subdir_include_rel_subdir_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_error( dir.path().join("include"), Path::new("include").join("dir").join("foo.proto"), "dir/foo.proto", ErrorKind::FileNotIncluded { path: Path::new("include").join("dir").join("foo.proto"), }, ); }); } #[test] fn abs_subdir_include_abs_file() { let dir = TempDir::new().unwrap(); test_compile_success(&dir, dir.path().join("foo.proto"), "foo.proto"); } #[test] fn abs_subdir_include_abs_subdir_file() { let dir = TempDir::new().unwrap(); test_compile_success( dir.path().join("include"), dir.path().join("include").join("foo.proto"), "foo.proto", ); } #[test] fn abs_subdir_include_dot_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_error( dir.path().join("include"), Path::new(".").join("include").join("foo.proto"), "foo.proto", ErrorKind::FileNotIncluded { path: Path::new(".").join("include").join("foo.proto"), }, ); }); } #[test] fn abs_subdir_include_dot_subdir_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_error( dir.path().join("include"), Path::new(".").join("include").join("dir").join("foo.proto"), "dir/foo.proto", ErrorKind::FileNotIncluded { path: Path::new(".").join("include").join("dir").join("foo.proto"), }, ); }); } #[test] fn abs_include_complex_file() { let dir = TempDir::new().unwrap(); test_compile_error( &dir, dir.path() .join("dir") .join("..") .join("dir") .join("foo.proto"), "dir/foo.proto", ErrorKind::FileNotIncluded { path: dir .path() .join("dir") .join("..") .join("dir") .join("foo.proto"), }, ); } #[test] fn abs_subdir_include_complex_file() { let dir = TempDir::new().unwrap(); test_compile_error( dir.path().join("include"), dir.path() .join("include") .join("..") .join("include") .join("foo.proto"), "foo.proto", ErrorKind::FileNotIncluded { path: dir .path() .join("include") .join("..") .join("include") .join("foo.proto"), }, ); } #[test] fn rel_subdir_include_simple_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_success("include", "foo.proto", "foo.proto"); }); } #[test] fn rel_subdir_include_simple_subdir_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_success("include", "dir/foo.proto", "dir/foo.proto"); }); } #[test] fn rel_subdir_include_rel_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_success( "include", Path::new("include").join("foo.proto"), "foo.proto", ); }); } #[test] fn rel_subdir_include_rel_subdir_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_success( "include", Path::new("include").join("dir").join("foo.proto"), "dir/foo.proto", ); }); } #[test] fn rel_subdir_include_abs_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_error( "include", dir.path().join("foo.proto"), "foo.proto", ErrorKind::FileNotIncluded { path: dir.path().join("foo.proto"), }, ); }); } #[test] fn rel_subdir_include_abs_subdir_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_error( "include", dir.path().join("dir").join("foo.proto"), "dir/foo.proto", ErrorKind::FileNotIncluded { path: dir.path().join("dir").join("foo.proto"), }, ); }); } #[test] fn rel_subdir_include_dot_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_success( "include", Path::new(".").join("include").join("foo.proto"), "foo.proto", ); }); } #[test] fn rel_subdir_include_dot_subdir_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_success( "include", Path::new(".").join("include").join("dir").join("foo.proto"), "dir/foo.proto", ); }); } #[test] fn rel_subdir_include_complex_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_error( "include", Path::new("include") .join("..") .join("include") .join("foo.proto"), "foo.proto", ErrorKind::FileNotIncluded { path: Path::new("include") .join("..") .join("include") .join("foo.proto"), }, ); }); } #[test] fn dot_include_simple_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_success(".", "foo.proto", "foo.proto"); }); } #[test] fn dot_include_simple_subdir_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_success(".", "dir/foo.proto", "dir/foo.proto"); }); } #[test] fn dot_include_rel_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_success(".", "foo.proto", "foo.proto"); }); } #[test] fn dot_include_rel_subdir_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_success(".", Path::new("dir").join("foo.proto"), "dir/foo.proto"); }); } #[test] fn dot_include_abs_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_error( ".", dir.path().join("foo.proto"), "foo.proto", ErrorKind::FileNotIncluded { path: dir.path().join("foo.proto"), }, ); }); } #[test] fn dot_include_abs_subdir_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_error( ".", dir.path().join("dir").join("foo.proto"), "dir/foo.proto", ErrorKind::FileNotIncluded { path: dir.path().join("dir").join("foo.proto"), }, ); }); } #[test] fn dot_include_dot_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_success(".", Path::new(".").join("foo.proto"), "foo.proto"); }); } #[test] fn dot_include_dot_subdir_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_success( ".", Path::new(".").join("dir").join("foo.proto"), "dir/foo.proto", ); }); } #[test] fn dot_subdir_include_simple_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_success(Path::new(".").join("include"), "foo.proto", "foo.proto"); }); } #[test] fn dot_subdir_include_simple_subdir_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_success( Path::new(".").join("include"), "dir/foo.proto", "dir/foo.proto", ); }); } #[test] fn dot_subdir_include_rel_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_success( Path::new(".").join("include"), Path::new("include").join("foo.proto"), "foo.proto", ); }); } #[test] fn dot_subdir_include_rel_subdir_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_success( Path::new(".").join("include"), Path::new("include").join("dir").join("foo.proto"), "dir/foo.proto", ); }); } #[test] fn dot_subdir_include_abs_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_error( Path::new(".").join("include"), dir.path().join("include").join("foo.proto"), "dir/foo.proto", ErrorKind::FileNotIncluded { path: dir.path().join("include").join("foo.proto"), }, ); }); } #[test] fn dot_subdir_include_abs_subdir_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_error( Path::new(".").join("include"), dir.path().join("include").join("dir").join("foo.proto"), "dir/foo.proto", ErrorKind::FileNotIncluded { path: dir.path().join("include").join("dir").join("foo.proto"), }, ); }); } #[test] fn dot_subdir_include_dot_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_success( Path::new(".").join("include"), Path::new(".").join("include").join("foo.proto"), "foo.proto", ); }); } #[test] fn dot_subdir_include_dot_subdir_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_success( Path::new(".").join("include"), Path::new(".").join("include").join("dir").join("foo.proto"), "dir/foo.proto", ); }); } #[test] fn dot_subdir_include_complex_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_error( Path::new(".").join("include"), Path::new("include") .join("..") .join("include") .join("foo.proto"), "foo.proto", ErrorKind::FileNotIncluded { path: Path::new("include") .join("..") .join("include") .join("foo.proto"), }, ); }); } #[test] fn complex_include_complex_file() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { test_compile_success( Path::new(".").join("include").join("..").join("include"), Path::new(".") .join("include") .join("..") .join("include") .join("foo.proto"), "foo.proto", ); }); } #[test] fn invalid_file() { let dir = TempDir::new().unwrap(); std::fs::write(dir.path().join("foo.proto"), INVALID_UTF8).unwrap(); let mut compiler = Compiler::new(once(&dir)).unwrap(); let err = compiler.open_file("foo.proto").unwrap_err(); match err.kind() { ErrorKind::FileInvalidUtf8 { name } => { assert_eq!(name, "foo.proto"); } kind => panic!("unexpected error: {}", kind), } } #[test] fn shadow_file_rel() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { std::fs::write("foo.proto", EMPTY).unwrap(); fs::create_dir_all("include").unwrap(); std::fs::write(Path::new("include").join("foo.proto"), EMPTY).unwrap(); let mut compiler = Compiler::new(["include", "."]).unwrap(); let err = compiler.open_file("foo.proto").unwrap_err(); match err.kind() { ErrorKind::FileShadowed { name, path, shadow } => { assert_eq!(name, "foo.proto"); assert_eq!(path, Path::new("foo.proto")); assert_eq!(shadow, &Path::new("include").join("foo.proto")); } kind => panic!("unexpected error: {}", kind), } }); } #[test] fn shadow_file_rel_subdir() { let dir = TempDir::new().unwrap(); with_current_dir(&dir, || { fs::create_dir_all("include1").unwrap(); std::fs::write(Path::new("include1").join("foo.proto"), EMPTY).unwrap(); fs::create_dir_all("include2").unwrap(); std::fs::write(Path::new("include2").join("foo.proto"), EMPTY).unwrap(); let mut compiler = Compiler::new(["include1", "include2"]).unwrap(); let err = compiler .open_file(Path::new("include2").join("foo.proto")) .unwrap_err(); match err.kind() { ErrorKind::FileShadowed { name, path, shadow } => { assert_eq!(name, "foo.proto"); assert_eq!(path, &Path::new("include2").join("foo.proto")); assert_eq!(shadow, &Path::new("include1").join("foo.proto")); } kind => panic!("unexpected error: {}", kind), } }); } #[test] fn shadow_file_abs() { let dir = TempDir::new().unwrap(); std::fs::write(dir.path().join("foo.proto"), EMPTY).unwrap(); fs::create_dir_all(dir.path().join("include")).unwrap(); std::fs::write(dir.path().join("include").join("foo.proto"), EMPTY).unwrap(); let mut compiler = Compiler::new([dir.path().join("include").as_ref(), dir.path()]).unwrap(); let err = compiler .open_file(dir.path().join("foo.proto")) .unwrap_err(); match err.kind() { ErrorKind::FileShadowed { name, path, shadow } => { assert_eq!(name, "foo.proto"); assert_eq!(path, &dir.path().join("foo.proto")); assert_eq!(shadow, &dir.path().join("include").join("foo.proto")); } kind => panic!("unexpected error: {}", kind), } } #[test] fn shadow_file_abs_subdir() { let dir = TempDir::new().unwrap(); fs::create_dir_all(dir.path().join("include1")).unwrap(); std::fs::write(dir.path().join("include1").join("foo.proto"), EMPTY).unwrap(); fs::create_dir_all(dir.path().join("include2")).unwrap(); std::fs::write(dir.path().join("include2").join("foo.proto"), EMPTY).unwrap(); let mut compiler = Compiler::new([dir.path().join("include1"), dir.path().join("include2")]).unwrap(); let err = compiler .open_file(dir.path().join("include2").join("foo.proto")) .unwrap_err(); match err.kind() { ErrorKind::FileShadowed { name, path, shadow } => { assert_eq!(name, "foo.proto"); assert_eq!(path, &dir.path().join("include2").join("foo.proto")); assert_eq!(shadow, &dir.path().join("include1").join("foo.proto")); } kind => panic!("unexpected error: {}", kind), } } #[test] fn shadow_invalid_file() { let dir = TempDir::new().unwrap(); fs::create_dir_all(dir.path().join("include1")).unwrap(); std::fs::write(dir.path().join("include1").join("foo.proto"), INVALID_UTF8).unwrap(); fs::create_dir_all(dir.path().join("include2")).unwrap(); std::fs::write(dir.path().join("include2").join("foo.proto"), EMPTY).unwrap(); let mut compiler = Compiler::new([dir.path().join("include1"), dir.path().join("include2")]).unwrap(); let err = compiler .open_file(dir.path().join("include2").join("foo.proto")) .unwrap_err(); match err.kind() { ErrorKind::FileInvalidUtf8 { name } => { assert_eq!(name, "foo.proto"); } kind => panic!("unexpected error: {}", kind), } } #[test] fn shadow_already_imported_file() { let dir = TempDir::new().unwrap(); fs::create_dir_all(dir.path().join("include1")).unwrap(); std::fs::write(dir.path().join("include1").join("foo.proto"), EMPTY).unwrap(); fs::create_dir_all(dir.path().join("include2")).unwrap(); std::fs::write(dir.path().join("include2").join("foo.proto"), EMPTY).unwrap(); let mut compiler = Compiler::new([dir.path().join("include1"), dir.path().join("include2")]).unwrap(); compiler.open_file("foo.proto").unwrap(); let err = compiler .open_file(dir.path().join("include2").join("foo.proto")) .unwrap_err(); match err.kind() { ErrorKind::FileShadowed { name, path, shadow } => { assert_eq!(name, "foo.proto"); assert_eq!(path, &dir.path().join("include2").join("foo.proto")); assert_eq!(shadow, &dir.path().join("include1").join("foo.proto")); } kind => panic!("unexpected error: {}", kind), } } #[test] fn import_files() { let dir = TempDir::new().unwrap(); fs::create_dir(dir.path().join("include")).unwrap(); std::fs::write( dir.path().join("include").join("dep.proto"), "import 'dep2.proto';", ) .unwrap(); std::fs::write(dir.path().join("root.proto"), "import 'dep.proto';").unwrap(); std::fs::write(dir.path().join("dep2.proto"), EMPTY).unwrap(); let mut compiler = Compiler::new([dir.path().to_owned(), dir.path().join("include")]).unwrap(); compiler.open_file("root.proto").unwrap(); assert_eq!(compiler.files().len(), 3); assert_eq!(compiler.files().next().unwrap().name(), "dep2.proto"); assert_eq!( compiler.files["dep2.proto"].path(), Some(dir.path().join("dep2.proto").as_ref()) ); assert_eq!(compiler.files().nth(1).unwrap().name(), "dep.proto"); assert_eq!( compiler.files["dep.proto"].path(), Some(dir.path().join("include").join("dep.proto").as_ref()) ); assert_eq!(compiler.files().nth(2).unwrap().name(), "root.proto"); assert_eq!( compiler.files["root.proto"].path(), Some(dir.path().join("root.proto").as_ref()) ); let file_descriptor_set = compiler.file_descriptor_set(); assert_eq!(file_descriptor_set.file.len(), 1); assert_eq!(file_descriptor_set.file[0].name(), "root.proto"); compiler.include_imports(true); let file_descriptor_set = compiler.file_descriptor_set(); assert_eq!(file_descriptor_set.file.len(), 3); assert_eq!(file_descriptor_set.file[0].name(), "dep2.proto"); assert_eq!(file_descriptor_set.file[1].name(), "dep.proto"); assert_eq!(file_descriptor_set.file[2].name(), "root.proto"); } #[test] fn import_files_include_imports_path_already_imported() { let dir = TempDir::new().unwrap(); std::fs::write(dir.path().join("root1.proto"), "import 'root2.proto';").unwrap(); std::fs::write(dir.path().join("root2.proto"), EMPTY).unwrap(); let mut compiler = Compiler::new([dir.path().to_owned()]).unwrap(); compiler.open_file("root1.proto").unwrap(); let file_descriptor_set = compiler.file_descriptor_set(); assert_eq!(file_descriptor_set.file.len(), 1); assert_eq!(file_descriptor_set.file[0].name(), "root1.proto"); compiler.open_file("root2.proto").unwrap(); let file_descriptor_set = compiler.file_descriptor_set(); assert_eq!(file_descriptor_set.file.len(), 2); assert_eq!(file_descriptor_set.file[0].name(), "root2.proto"); assert_eq!(file_descriptor_set.file[1].name(), "root1.proto"); } #[test] fn import_cycle() { let dir = TempDir::new().unwrap(); fs::create_dir(dir.path().join("include")).unwrap(); std::fs::write( dir.path().join("include").join("dep.proto"), "import 'dep2.proto';", ) .unwrap(); std::fs::write(dir.path().join("root.proto"), "import 'dep.proto';").unwrap(); std::fs::write(dir.path().join("dep2.proto"), "import 'root.proto';").unwrap(); let mut compiler = Compiler::new([dir.path().to_owned(), dir.path().join("include")]).unwrap(); let err = compiler.open_file("root.proto").unwrap_err(); match err.kind() { ErrorKind::CircularImport { name, cycle } => { assert_eq!(name, "root.proto"); assert_eq!(cycle, "root.proto -> dep.proto -> dep2.proto -> root.proto") } kind => panic!("unexpected error: {}", kind), } } #[test] fn import_cycle_short() { let dir = TempDir::new().unwrap(); std::fs::write(dir.path().join("root.proto"), "import 'dep.proto';").unwrap(); std::fs::write(dir.path().join("dep.proto"), "import 'dep.proto';").unwrap(); let mut compiler = Compiler::new([dir.path()]).unwrap(); let err = compiler.open_file("root.proto").unwrap_err(); match err.kind() { ErrorKind::CircularImport { name, cycle } => { assert_eq!(name, "dep.proto"); assert_eq!(cycle, "root.proto -> dep.proto -> dep.proto") } kind => panic!("unexpected error: {}", kind), } } #[test] fn import_cycle_nested() { let dir = TempDir::new().unwrap(); std::fs::write(dir.path().join("root.proto"), "import 'root.proto';").unwrap(); let mut compiler = Compiler::new([dir.path().to_owned(), dir.path().join("include")]).unwrap(); let err = compiler.open_file("root.proto").unwrap_err(); match err.kind() { ErrorKind::CircularImport { name, cycle } => { assert_eq!(name, "root.proto"); assert_eq!(cycle, "root.proto -> root.proto") } kind => panic!("unexpected error: {}", kind), } } #[test] fn duplicated_import() { let dir = TempDir::new().unwrap(); fs::create_dir(dir.path().join("include")).unwrap(); std::fs::write( dir.path().join("include").join("dep.proto"), "import 'dep2.proto';", ) .unwrap(); std::fs::write( dir.path().join("root.proto"), "import 'dep.proto'; import 'dep2.proto';", ) .unwrap(); std::fs::write(dir.path().join("dep2.proto"), EMPTY).unwrap(); let mut compiler = Compiler::new([dir.path().to_owned(), dir.path().join("include")]).unwrap(); compiler.open_file("root.proto").unwrap(); assert_eq!(compiler.files().len(), 3); assert_eq!(compiler.files().next().unwrap().name(), "dep2.proto"); assert_eq!( compiler.files["dep2.proto"].path(), Some(dir.path().join("dep2.proto").as_ref()) ); assert_eq!(compiler.files().nth(1).unwrap().name(), "dep.proto"); assert_eq!( compiler.files["dep.proto"].path(), Some(dir.path().join("include").join("dep.proto").as_ref()) ); assert_eq!(compiler.files().nth(2).unwrap().name(), "root.proto"); assert_eq!( compiler.files["root.proto"].path(), Some(dir.path().join("root.proto").as_ref()) ); } #[test] fn import_file_absolute_path() { let dir = TempDir::new().unwrap(); fs::create_dir(dir.path().join("include")).unwrap(); std::fs::write(dir.path().join("include").join("dep.proto"), EMPTY).unwrap(); std::fs::write( dir.path().join("root.proto"), format!( "import '{}';", dir.path() .join("include") .join("dep.proto") .display() .to_string() .replace('\\', "/") .escape_default() ), ) .unwrap(); let mut compiler = Compiler::new([dir.path().to_owned(), dir.path().join("include")]).unwrap(); compiler.open_file("root.proto").unwrap_err(); } #[cfg(windows)] #[test] fn open_file_case_insensitive() { let dir = TempDir::new().unwrap(); test_compile_success( dir.path().join("include"), dir.path().join("INCLUDE").join("foo.proto"), "foo.proto", ); } protox-0.9.0/src/error.rs000064400000000000000000000221531046102023000134560ustar 00000000000000use std::{fmt, io, path::PathBuf}; use miette::{Diagnostic, NamedSource, SourceCode, SourceOffset, SourceSpan}; use prost_reflect::DescriptorError; use protox_parse::ParseError; use thiserror::Error; use crate::file::File; /// An error that can occur when compiling protobuf files. #[derive(Diagnostic, Error)] #[error(transparent)] #[diagnostic(transparent)] pub struct Error { kind: Box, } #[derive(Debug, Diagnostic, Error)] pub(crate) enum ErrorKind { #[error("{}", err)] #[diagnostic(forward(err))] Parse { err: ParseError }, #[error("{}", err)] #[diagnostic(forward(err))] Check { err: DescriptorError }, #[error("error opening file '{path}'")] OpenFile { name: String, path: PathBuf, #[source] err: io::Error, }, #[error("file '{name}' is too large")] #[diagnostic(help("the maximum file length is 2,147,483,647 bytes"))] FileTooLarge { name: String }, #[error("file '{name}' is not valid utf-8")] FileInvalidUtf8 { name: String }, #[error("file '{name}' not found")] FileNotFound { name: String }, #[error("import '{name}' not found")] ImportNotFound { #[label("imported here")] span: Option, #[source_code] source_code: NamedSource, name: String, }, #[error("import cycle detected: {cycle}")] CircularImport { name: String, cycle: String }, #[error("file '{path}' is not in any include path")] FileNotIncluded { path: PathBuf }, #[error("path '{path}' is shadowed by '{shadow}' in the include paths")] #[diagnostic(help("either pass '{}' as the input file, or re-order the include paths so that '{}' comes first", shadow.display(), path.display()))] FileShadowed { name: String, path: PathBuf, shadow: PathBuf, }, /// This variant is intermediate and should not be present in the final error. #[error("import '{name}' was listed twice")] DuplicateImport { #[label("imported here")] span: Option, #[source_code] source_code: NamedSource, name: String, }, #[error(transparent)] Custom(Box), } impl Error { /// Creates an instance of [`struct@Error`] with an arbitrary payload. pub fn new(error: E) -> Self where E: Into>, { Error::from_kind(ErrorKind::Custom(error.into())) } /// Creates an instance of [`struct@Error`] indicating that an imported file could not be found. /// /// This error should be returned by [`FileResolver`](crate::file::FileResolver) instances if a file is not found. pub fn file_not_found(name: &str) -> Self { Error::from_kind(ErrorKind::FileNotFound { name: name.to_owned(), }) } /// The file in which this error occurred, if available. pub fn file(&self) -> Option<&str> { match &*self.kind { ErrorKind::Parse { err } => Some(err.file()), ErrorKind::Check { err } => err.file(), ErrorKind::OpenFile { name, .. } | ErrorKind::FileTooLarge { name } | ErrorKind::FileInvalidUtf8 { name } | ErrorKind::FileNotFound { name } | ErrorKind::CircularImport { name, .. } | ErrorKind::FileShadowed { name, .. } => Some(name), ErrorKind::FileNotIncluded { .. } => None, ErrorKind::Custom(_) => None, ErrorKind::ImportNotFound { source_code, .. } | ErrorKind::DuplicateImport { source_code, .. } => Some(source_code.name()), } } pub(crate) fn from_kind(kind: ErrorKind) -> Self { Error { kind: Box::new(kind), } } #[cfg(test)] pub(crate) fn kind(&self) -> &ErrorKind { &self.kind } /// Returns true if this is an instance of [`Error::file_not_found()`] pub fn is_file_not_found(&self) -> bool { matches!( &*self.kind, ErrorKind::FileNotFound { .. } | ErrorKind::ImportNotFound { .. } | ErrorKind::FileNotIncluded { .. } ) } /// Returns true if this error is caused by an invalid protobuf source file. pub fn is_parse(&self) -> bool { matches!( &*self.kind, ErrorKind::Parse { .. } | ErrorKind::FileTooLarge { .. } | ErrorKind::FileInvalidUtf8 { .. } ) } /// Returns true if this error is caused by an IO error while opening a file. pub fn is_io(&self) -> bool { match &*self.kind { ErrorKind::OpenFile { .. } => true, ErrorKind::Custom(err) if err.downcast_ref::().is_some() => true, _ => false, } } pub(crate) fn into_import_error(self, file: &File, import_idx: usize) -> Self { match *self.kind { ErrorKind::FileNotFound { name } => { let source_code: NamedSource = NamedSource::new(file.name(), file.source().unwrap_or_default().to_owned()); let span = find_import_span(file, import_idx); Error::from_kind(ErrorKind::ImportNotFound { span, source_code, name, }) } _ => self, } } pub(crate) fn duplicated_import(name: String, file: &File, import_idx: usize) -> Error { let source_code: NamedSource = NamedSource::new(file.name(), file.source().unwrap_or_default().to_owned()); let span = find_import_span(file, import_idx); Error::from_kind(ErrorKind::DuplicateImport { span, source_code, name, }) } } fn find_import_span(file: &File, import_idx: usize) -> Option { if let Some(sci) = &file.descriptor.source_code_info { if let Some(source) = file.source() { for location in &sci.location { if location.path == [3, import_idx as i32] { if location.span.len() != 3 { continue; } let start_line = location.span[0] as usize + 1; let start_col = location.span[1] as usize + 1; let end_col = location.span[2] as usize + 1; return Some(SourceSpan::new( SourceOffset::from_location(source, start_line, start_col), end_col - start_col, )); } } } } None } impl From for Error { fn from(err: DescriptorError) -> Self { Error::from_kind(ErrorKind::Check { err }) } } impl From for Error { fn from(err: ParseError) -> Self { Error::from_kind(ErrorKind::Parse { err }) } } impl From for Error { fn from(err: io::Error) -> Self { Error::new(err) } } impl fmt::Debug for Error { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match &*self.kind { ErrorKind::Parse { err } => err.fmt(f), ErrorKind::Check { err } => err.fmt(f), ErrorKind::OpenFile { err, .. } => write!(f, "{}: {}", self, err), ErrorKind::FileTooLarge { .. } | ErrorKind::FileInvalidUtf8 { .. } | ErrorKind::FileNotFound { .. } | ErrorKind::CircularImport { .. } | ErrorKind::FileNotIncluded { .. } | ErrorKind::FileShadowed { .. } => write!(f, "{}", self), ErrorKind::Custom(err) => err.fmt(f), ErrorKind::DuplicateImport { span, source_code, .. } | ErrorKind::ImportNotFound { span, source_code, .. } => { write!(f, "{}:", source_code.name())?; if let Some(span) = span { if let Ok(span_contents) = source_code.read_span(span, 0, 0) { write!( f, "{}:{}: ", span_contents.line() + 1, span_contents.column() + 1 )?; } } write!(f, "{}", self) } } } } #[test] fn fmt_debug_io() { let err = Error::from_kind(ErrorKind::OpenFile { name: "file.proto".into(), path: "path/to/file.proto".into(), err: io::Error::new(io::ErrorKind::Other, "io error"), }); assert!(err.is_io()); assert_eq!(err.file(), Some("file.proto")); assert_eq!( format!("{:?}", err), "error opening file 'path/to/file.proto': io error" ); } #[test] fn fmt_debug_parse() { let err = Error::from(protox_parse::parse("file.proto", "invalid").unwrap_err()); assert!(err.is_parse()); assert_eq!(err.file(), Some("file.proto")); assert_eq!( format!("{:?}", err), "file.proto:1:1: expected 'enum', 'extend', 'import', 'message', 'option', 'service', 'package' or ';', but found 'invalid'" ); } protox-0.9.0/src/file/chain.rs000064400000000000000000000031011046102023000143160ustar 00000000000000use std::{fmt, path::Path}; use super::{File, FileResolver}; use crate::Error; /// An implementation of [`FileResolver`] which chains together several other resolvers. /// /// When opening files, each resolver is searched in turn until the file is found. #[derive(Default)] pub struct ChainFileResolver { resolvers: Vec>, } impl ChainFileResolver { /// Creates a new, empty [`ChainFileResolver`]. pub fn new() -> Self { Default::default() } /// Adds a new resolver. /// /// The new resolver will be searched after all previously-added resolvers. pub fn add(&mut self, resolver: F) where F: FileResolver + 'static, { self.resolvers.push(Box::new(resolver)) } } impl FileResolver for ChainFileResolver { fn resolve_path(&self, path: &Path) -> Option { for resolver in &self.resolvers { if let Some(name) = resolver.resolve_path(path) { return Some(name); } } None } fn open_file(&self, name: &str) -> Result { for resolver in &self.resolvers { match resolver.open_file(name) { Ok(file) => return Ok(file), Err(err) if err.is_file_not_found() => continue, Err(err) => return Err(err), } } Err(Error::file_not_found(name)) } } impl fmt::Debug for ChainFileResolver { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("ChainFileResolver").finish_non_exhaustive() } } protox-0.9.0/src/file/descriptor_set.rs000064400000000000000000000056301046102023000162760ustar 00000000000000use bytes::{Buf, Bytes}; use prost::{ encoding::{check_wire_type, decode_key, decode_varint, skip_field, DecodeContext, WireType}, DecodeError, Message, }; use prost_types::FileDescriptorProto; use crate::{ file::{File, FileResolver}, Error, }; /// An implementation of [`FileResolver`] which resolves files from a compiled [`FileDescriptorSet`](prost_types::FileDescriptorSet). #[derive(Debug)] pub struct DescriptorSetFileResolver { set: Vec, } #[derive(Debug, Clone, Default, PartialEq)] struct FileDescriptor { file: FileDescriptorProto, encoded: Option, } impl DescriptorSetFileResolver { /// Creates an instance of [`DescriptorSetFileResolver`] from the file descriptor set. pub fn new(set: prost_types::FileDescriptorSet) -> Self { DescriptorSetFileResolver { set: set .file .into_iter() .map(|file| FileDescriptor { encoded: None, file, }) .collect(), } } /// Creates an instance of [`DescriptorSetFileResolver`] by deserializing a [`FileDescriptorSet`](prost_types::FileDescriptorSet) /// from the given bytes. /// /// Unlike when going through [`new()`](DescriptorSetFileResolver::new), extension options are preserved. pub fn decode(mut buf: B) -> Result where B: Buf, { const FILE_TAG: u32 = 1; let mut set = Vec::new(); while buf.has_remaining() { let (key, wire_type) = decode_key(&mut buf)?; if key == FILE_TAG { check_wire_type(WireType::LengthDelimited, wire_type)?; let len = decode_varint(&mut buf)? as usize; if len > buf.remaining() { return Err(DecodeError::new("buffer underflow")); } set.push(FileDescriptor::decode((&mut buf).take(len))?); } else { skip_field(wire_type, key, &mut buf, DecodeContext::default())?; } } Ok(DescriptorSetFileResolver { set }) } } impl FileResolver for DescriptorSetFileResolver { fn open_file(&self, name: &str) -> Result { for file in &self.set { if file.file.name() == name { return Ok(File { path: None, source: None, descriptor: file.file.clone(), encoded: file.encoded.clone(), }); } } Err(Error::file_not_found(name)) } } impl FileDescriptor { fn decode(mut buf: impl Buf) -> Result { let encoded = buf.copy_to_bytes(buf.remaining()); let file = FileDescriptorProto::decode(&mut encoded.as_ref())?; Ok(FileDescriptor { file, encoded: Some(encoded), }) } } protox-0.9.0/src/file/google.rs000064400000000000000000000030321046102023000145130ustar 00000000000000use prost_reflect::DescriptorPool; use super::{File, FileResolver}; use crate::Error; /// An implementation of [`FileResolver`] which resolves well-known imports such as `google/protobuf/descriptor.proto`. #[derive(Debug, Default)] pub struct GoogleFileResolver { pool: DescriptorPool, } impl GoogleFileResolver { /// Creates a new instance of [`GoogleFileResolver`]. pub fn new() -> Self { GoogleFileResolver { pool: DescriptorPool::global(), } } } impl FileResolver for GoogleFileResolver { fn open_file(&self, name: &str) -> Result { match name { "google/protobuf/any.proto" | "google/protobuf/api.proto" | "google/protobuf/descriptor.proto" | "google/protobuf/duration.proto" | "google/protobuf/empty.proto" | "google/protobuf/field_mask.proto" | "google/protobuf/source_context.proto" | "google/protobuf/struct.proto" | "google/protobuf/timestamp.proto" | "google/protobuf/type.proto" | "google/protobuf/wrappers.proto" | "google/protobuf/compiler/plugin.proto" => { let file = self .pool .get_file_by_name(name) .expect("well-known file not found"); Ok(File::from_file_descriptor_proto( file.file_descriptor_proto().clone(), )) } _ => Err(Error::file_not_found(name)), } } } protox-0.9.0/src/file/include.rs000064400000000000000000000131211046102023000146620ustar 00000000000000use std::path::{self, Path, PathBuf}; use crate::{error::ErrorKind, Error}; use super::{File, FileResolver}; /// An implementation of [`FileResolver`] which searches an include path on the file system. #[derive(Debug)] pub struct IncludeFileResolver { include: PathBuf, } impl IncludeFileResolver { /// Constructs a `IncludeFileResolver` that searches the given include path. pub fn new(include: PathBuf) -> Self { IncludeFileResolver { include } } } impl FileResolver for IncludeFileResolver { /// Converts a file system path to a unique file name. /// /// # Examples /// /// ``` /// # use std::path::{Path, PathBuf}; /// # use protox::file::{IncludeFileResolver, FileResolver}; /// let resolver = IncludeFileResolver::new(PathBuf::from("/path/to/include")); /// assert_eq!(resolver.resolve_path(Path::new("/path/to/include/dir/foo.proto")), Some("dir/foo.proto".to_owned())); /// assert_eq!(resolver.resolve_path(Path::new("notincluded.proto")), None); /// ``` fn resolve_path(&self, path: &Path) -> Option { if let Some(relative_path) = strip_prefix(path, &self.include) { if let Some(name) = path_to_file_name(relative_path) { return Some(name); } } None } /// Opens a file by its unique name. /// /// If the include path contains a file with the given name, it is parsed and returned. /// /// # Errors /// /// Returns an error if there is an IO error opening the file, or it is not /// a valid protobuf source file. /// /// If the file does not exist, [`Error::file_not_found()`] is returned /// /// # Examples /// /// ``` /// # use std::{fs, path::PathBuf}; /// # use protox::file::{IncludeFileResolver, FileResolver}; /// # let tempdir = tempfile::TempDir::new().unwrap(); /// # std::env::set_current_dir(&tempdir).unwrap(); /// fs::write("./foo.proto", "/* hello! */").unwrap(); /// /// let resolver = IncludeFileResolver::new(PathBuf::from(".")); /// let file = resolver.open_file("foo.proto").unwrap(); /// assert_eq!(file.path(), Some("./foo.proto".as_ref())); /// assert_eq!(file.source(), Some("/* hello! */")); /// ``` fn open_file(&self, name: &str) -> Result { File::open(name, &self.include.join(name)) } } pub(crate) fn path_to_file_name(path: &Path) -> Option { let mut name = String::new(); for component in path.components() { match component { std::path::Component::Normal(component) => { if let Some(component) = component.to_str() { if !name.is_empty() { name.push('/'); } name.push_str(component); } else { return None; } } _ => return None, } } if name.is_empty() { None } else { Some(name) } } pub(crate) fn check_shadow( file: &str, actual_path: Option<&Path>, expected_path: &Path, ) -> Result<(), Error> { // actual_path is expected to be an include path concatenated with `expected_path` if let Some(actual_path) = actual_path { if !path_eq(actual_path, expected_path) { return Err(Error::from_kind(ErrorKind::FileShadowed { name: file.to_string(), path: expected_path.to_owned(), shadow: actual_path.to_owned(), })); } } Ok(()) } fn strip_prefix<'a>(path: &'a Path, prefix: &Path) -> Option<&'a Path> { Some(iter_after(path.components(), prefix.components())?.as_path()) } /// Naive path equality fn path_eq(l: &Path, r: &Path) -> bool { let (mut lhs, mut rhs) = (l.components(), r.components()); loop { let (mut lhs_next, mut rhs_next) = (lhs.clone(), rhs.clone()); match (lhs_next.next(), rhs_next.next()) { (None, None) => return true, (Some(path::Component::CurDir), _) => { lhs = lhs_next; } (_, Some(path::Component::CurDir)) => { rhs = rhs_next; } (Some(ref l), Some(ref r)) if path_component_eq(l, r) => { lhs = lhs_next; rhs = rhs_next; } _ => return false, } } } /// Comparison of paths which ignores '.' components and is case-insensitive on windows. fn iter_after<'a, 'b, I, J>(mut iter: I, mut prefix: J) -> Option where I: Iterator> + Clone, J: Iterator> + Clone, { loop { let mut path_next = iter.clone(); let mut prefix_next = prefix.clone(); match (path_next.next(), prefix_next.next()) { (Some(path::Component::CurDir), _) => { iter = path_next; } (_, Some(path::Component::CurDir)) => { prefix = prefix_next; } (Some(ref l), Some(ref r)) if path_component_eq(l, r) => { iter = path_next; prefix = prefix_next; } (Some(_), Some(_)) => return None, (Some(_), None) => return Some(iter), (None, None) => return Some(iter), (None, Some(_)) => return None, } } } #[cfg(windows)] fn path_component_eq(l: &path::Component, r: &path::Component) -> bool { l.as_os_str().eq_ignore_ascii_case(r.as_os_str()) } #[cfg(not(windows))] fn path_component_eq(l: &path::Component, r: &path::Component) -> bool { l == r } protox-0.9.0/src/file/mod.rs000064400000000000000000000222771046102023000140320ustar 00000000000000//! Interfaces for customizing resolution of protobuf source files. mod chain; mod descriptor_set; mod google; mod include; #[cfg(test)] mod tests; pub use chain::ChainFileResolver; pub use descriptor_set::DescriptorSetFileResolver; pub use google::GoogleFileResolver; pub use include::IncludeFileResolver; use prost_types::FileDescriptorProto; use std::{ fs, io::{self, Read}, path::{Path, PathBuf}, }; use bytes::{Buf, Bytes}; pub(crate) use include::{check_shadow, path_to_file_name}; use prost::{DecodeError, Message}; use crate::error::{Error, ErrorKind}; const MAX_FILE_LEN: u64 = i32::MAX as u64; /// A strategy for locating protobuf source files. /// /// The main implementation is [`IncludeFileResolver`] which uses the file system, but /// this trait allows sourcing files from other places as well. pub trait FileResolver { /// Converts a file system path to a unique file name. fn resolve_path(&self, _path: &Path) -> Option { None } /// Opens a file by its unique name. /// /// # Errors /// /// If the file is not found, the implementation should return [`Error::file_not_found`]. fn open_file(&self, name: &str) -> Result; } impl FileResolver for Box where T: FileResolver + ?Sized, { fn resolve_path(&self, path: &Path) -> Option { (**self).resolve_path(path) } fn open_file(&self, name: &str) -> Result { (**self).open_file(name) } } /// An opened protobuf source file, returned by [`FileResolver::open_file`]. #[derive(Debug, Clone)] pub struct File { pub(crate) path: Option, pub(crate) source: Option, pub(crate) descriptor: FileDescriptorProto, pub(crate) encoded: Option, } /// Information about a [`File`] after it has been added to a [`Compiler`](crate::Compiler) instance. #[derive(Debug, Clone)] pub struct FileMetadata { pub(crate) name: String, pub(crate) path: Option, pub(crate) is_import: bool, } impl File { /// Read a protobuf source file from the filesystem into a new instance of [`File`] /// /// # Errors /// /// Returns an error if there is an IO error opening the file, or it is not /// a valid protobuf source file. /// /// If the file does not exist, [`Error::file_not_found()`] is returned /// /// # Examples /// /// ``` /// # use std::{fs, path::PathBuf}; /// # use protox::file::File; /// # use prost_types::{DescriptorProto, FileDescriptorProto, SourceCodeInfo, source_code_info::Location}; /// # let tempdir = tempfile::TempDir::new().unwrap(); /// # std::env::set_current_dir(&tempdir).unwrap(); /// fs::write("foo.proto", "message Foo { }").unwrap(); /// /// let file = File::open("foo.proto", "foo.proto".as_ref()).unwrap(); /// assert_eq!(file.path(), Some("foo.proto".as_ref())); /// assert_eq!(file.source(), Some("message Foo { }")); /// assert_eq!(file.file_descriptor_proto(), &FileDescriptorProto { /// name: Some("foo.proto".to_owned()), /// message_type: vec![DescriptorProto { /// name: Some("Foo".to_owned()), /// ..Default::default() /// }], /// source_code_info: Some(SourceCodeInfo { /// location: vec![ /// Location { path: vec![], span: vec![0, 0, 15], ..Default::default() }, /// Location { path: vec![4, 0], span: vec![0, 0, 15], ..Default::default() }, /// Location { path: vec![4, 0, 1], span: vec![0, 8, 11], ..Default::default() } /// ] /// }), /// ..Default::default() /// }); /// /// assert!(File::open("notfound.proto", "notfound.proto".as_ref()).unwrap_err().is_file_not_found()); /// ``` pub fn open(name: &str, path: &Path) -> Result { let map_io_err = |err: io::Error| -> Error { match err.kind() { io::ErrorKind::NotFound => Error::file_not_found(name), io::ErrorKind::InvalidData => Error::from_kind(ErrorKind::FileInvalidUtf8 { name: name.to_owned(), }), _ => Error::from_kind(ErrorKind::OpenFile { name: name.to_owned(), path: path.to_owned(), err, }), } }; let file = fs::File::open(path).map_err(map_io_err)?; let metadata = file.metadata().map_err(map_io_err)?; if metadata.len() > MAX_FILE_LEN { return Err(Error::from_kind(ErrorKind::FileTooLarge { name: name.to_owned(), })); } let mut buf = String::with_capacity(metadata.len() as usize); file.take(MAX_FILE_LEN) .read_to_string(&mut buf) .map_err(map_io_err)?; let descriptor = protox_parse::parse(name, &buf)?; Ok(File { path: Some(path.to_owned()), source: Some(buf), descriptor, encoded: None, }) } /// Read a protobuf source file from a string into a new instance of [`File`] /// /// # Errors /// /// Returns an error the string is not a valid protobuf source file. /// /// # Examples /// /// ``` /// # use std::{fs, path::PathBuf}; /// # use protox::file::File; /// # use prost_types::{DescriptorProto, FileDescriptorProto, SourceCodeInfo, source_code_info::Location}; /// let file = File::from_source("foo.proto", "message Foo { }").unwrap(); /// assert_eq!(file.path(), None); /// assert_eq!(file.source(), Some("message Foo { }")); /// assert_eq!(file.file_descriptor_proto(), &FileDescriptorProto { /// name: Some("foo.proto".to_owned()), /// message_type: vec![DescriptorProto { /// name: Some("Foo".to_owned()), /// ..Default::default() /// }], /// source_code_info: Some(SourceCodeInfo { /// location: vec![ /// Location { path: vec![], span: vec![0, 0, 15], ..Default::default() }, /// Location { path: vec![4, 0], span: vec![0, 0, 15], ..Default::default() }, /// Location { path: vec![4, 0, 1], span: vec![0, 8, 11], ..Default::default() } /// ] /// }), /// ..Default::default() /// }); /// ``` pub fn from_source(name: &str, source: &str) -> Result { let descriptor = protox_parse::parse(name, source)?; Ok(File { path: None, source: Some(source.to_owned()), descriptor, encoded: None, }) } /// Create a new instance of [`File`] from a parsed [`FileDescriptorProto`]. /// /// The file does not need to have type names or imports resolved. Typically, it would be returned by the [`parse()`](protox_parse::parse()) method. pub fn from_file_descriptor_proto(file: prost_types::FileDescriptorProto) -> Self { File { path: None, source: None, descriptor: file, encoded: None, } } /// Create an instance of [`File`] by deserializing a [`FileDescriptorProto`] /// from the given bytes. /// /// Unlike when going through [`from_file_descriptor_proto()`](File::from_file_descriptor_proto), extension options are preserved. /// /// The file does not need to have type names or imports resolved. pub fn decode_file_descriptor_proto(mut buf: B) -> Result where B: Buf, { let encoded = buf.copy_to_bytes(buf.remaining()); Ok(File { path: None, source: None, descriptor: FileDescriptorProto::decode(encoded.as_ref())?, encoded: Some(encoded), }) } /// Returns the name of this file. pub fn name(&self) -> &str { self.descriptor.name() } /// Returns the filesystem path, if this source is backed by a physical file. pub fn path(&self) -> Option<&Path> { self.path.as_deref() } /// Returns the full content of the source file if available. pub fn source(&self) -> Option<&str> { self.source.as_deref() } /// Returns the parsed value of the source file. /// /// This is typically equivalent to calling [`parse()`](protox_parse::parse()) on the string returned by [`source()`](File::source). pub fn file_descriptor_proto(&self) -> &FileDescriptorProto { &self.descriptor } } impl FileMetadata { /// Returns the name of this file. pub fn name(&self) -> &str { self.name.as_str() } /// Returns the filesystem path, if this source is backed by a physical file. pub fn path(&self) -> Option<&Path> { self.path.as_deref() } /// Returns `true` if this file was added explicitly by [`open_file()`](crate::Compiler::open_file), or `false` if it /// is was added as an import of some other file. pub fn is_import(&self) -> bool { self.is_import } } impl From for File { fn from(file: FileDescriptorProto) -> Self { File::from_file_descriptor_proto(file) } } impl From for FileDescriptorProto { fn from(file: File) -> Self { file.descriptor } } protox-0.9.0/src/file/tests.rs000064400000000000000000000261411046102023000144070ustar 00000000000000use std::{ io::{self, Seek, Write}, path::{Path, PathBuf}, }; use prost_types::{source_code_info::Location, FileDescriptorProto, SourceCodeInfo}; use crate::{file::FileResolver, Error}; use super::{ ChainFileResolver, DescriptorSetFileResolver, File, GoogleFileResolver, IncludeFileResolver, }; struct EmptyFileResolver; impl FileResolver for EmptyFileResolver { fn open_file(&self, name: &str) -> Result { Err(Error::file_not_found(name)) } } struct SingleFileResolver(File); impl FileResolver for SingleFileResolver { fn resolve_path(&self, path: &Path) -> Option { if self.0.path.as_deref() == Some(path) { Some(self.0.name().to_owned()) } else { None } } fn open_file(&self, name: &str) -> Result { if name == self.0.name() { Ok(File::from_file_descriptor_proto( self.0.file_descriptor_proto().clone(), )) } else { Err(Error::file_not_found(name)) } } } #[test] fn chain_file_resolver() { let source = "syntax = 'proto3';"; let mut resolver = ChainFileResolver::new(); resolver.add(EmptyFileResolver); resolver.add(SingleFileResolver( File::from_source("foo.proto", source).unwrap(), )); resolver.add(SingleFileResolver(File { path: Some(PathBuf::from("./bar.proto")), source: Some(source.to_owned()), descriptor: protox_parse::parse("bar.proto", source).unwrap(), encoded: None, })); assert_eq!(resolver.resolve_path("./notfound.proto".as_ref()), None); assert_eq!( resolver.resolve_path("./bar.proto".as_ref()).as_deref(), Some("bar.proto") ); assert!(resolver .open_file("notfound.proto") .unwrap_err() .is_file_not_found()); assert_eq!(resolver.open_file("foo.proto").unwrap().name(), "foo.proto"); } #[test] fn descriptor_set_file_resolver() { let mut encoded_files: Vec = vec![ 0x0a, 0x16, 0x0a, 0x09, 0x66, 0x6f, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, ]; let unknown_field = &[0x90, 0x03, 0x05]; encoded_files.extend_from_slice(unknown_field); let resolver = DescriptorSetFileResolver::decode(encoded_files.as_slice()).unwrap(); let file = resolver.open_file("foo.proto").unwrap(); assert_eq!(file.name(), "foo.proto"); assert_eq!(file.source(), None); assert_eq!(file.path(), None); assert_eq!( file.file_descriptor_proto(), &FileDescriptorProto { name: Some("foo.proto".to_owned()), syntax: Some("proto3".to_owned()), ..Default::default() } ); assert!(file .encoded .unwrap() .as_ref() .ends_with(unknown_field.as_ref())); assert!(resolver .open_file("notfound.proto") .unwrap_err() .is_file_not_found()); } #[test] fn google_resolver() { let resolver = GoogleFileResolver::new(); assert_eq!( resolver .open_file("google/protobuf/any.proto") .unwrap() .name(), "google/protobuf/any.proto" ); assert_eq!( resolver .open_file("google/protobuf/api.proto") .unwrap() .name(), "google/protobuf/api.proto" ); assert_eq!( resolver .open_file("google/protobuf/descriptor.proto") .unwrap() .name(), "google/protobuf/descriptor.proto" ); assert_eq!( resolver .open_file("google/protobuf/duration.proto") .unwrap() .name(), "google/protobuf/duration.proto" ); assert_eq!( resolver .open_file("google/protobuf/empty.proto") .unwrap() .name(), "google/protobuf/empty.proto" ); assert_eq!( resolver .open_file("google/protobuf/field_mask.proto") .unwrap() .name(), "google/protobuf/field_mask.proto" ); assert_eq!( resolver .open_file("google/protobuf/source_context.proto") .unwrap() .name(), "google/protobuf/source_context.proto" ); assert_eq!( resolver .open_file("google/protobuf/struct.proto") .unwrap() .name(), "google/protobuf/struct.proto" ); assert_eq!( resolver .open_file("google/protobuf/timestamp.proto") .unwrap() .name(), "google/protobuf/timestamp.proto" ); assert_eq!( resolver .open_file("google/protobuf/type.proto") .unwrap() .name(), "google/protobuf/type.proto" ); assert_eq!( resolver .open_file("google/protobuf/wrappers.proto") .unwrap() .name(), "google/protobuf/wrappers.proto" ); assert_eq!( resolver .open_file("google/protobuf/compiler/plugin.proto") .unwrap() .name(), "google/protobuf/compiler/plugin.proto" ); assert!(resolver .open_file("otherfile") .unwrap_err() .is_file_not_found()); } #[test] fn include_resolver() { let include = IncludeFileResolver::new("/path/to/include".into()); #[cfg(unix)] fn non_utf8_path() -> PathBuf { use std::{ffi::OsStr, os::unix::ffi::OsStrExt}; OsStr::from_bytes(&[0, 159, 146, 150]).into() } #[cfg(windows)] fn non_utf8_path() -> PathBuf { use std::{ffi::OsString, os::windows::ffi::OsStringExt}; OsString::from_wide(&[0x61, 0xE9, 0x20, 0xD83D, 0xD83D, 0xDCA9]).into() } assert_eq!( include .resolve_path(Path::new("/path/to/include/foo.proto")) .as_deref(), Some("foo.proto") ); assert_eq!( include.resolve_path(Path::new("/path/nope/include/foo.proto")), None ); assert_eq!( include .resolve_path(Path::new("/path/./to/include/foo.proto")) .as_deref(), Some("foo.proto") ); assert_eq!(include.resolve_path(Path::new("/path/to/include")), None); assert_eq!( include.resolve_path(Path::new("/path/to/../to/include/foo.proto")), None ); assert_eq!(include.resolve_path(Path::new("/path/to")), None); assert_eq!( include .resolve_path(Path::new("/path/to/include/dir/foo.proto")) .as_deref(), Some("dir/foo.proto") ); assert_eq!( include .resolve_path(Path::new("/path/to/include/./foo.proto")) .as_deref(), Some("foo.proto") ); assert_eq!( include .resolve_path(Path::new("/path/to/include/dir/./foo.proto")) .as_deref(), Some("dir/foo.proto") ); assert_eq!( include.resolve_path(Path::new("/path/to/include/dir/../foo.proto")), None ); assert_eq!( include.resolve_path(&Path::new("/path/to/include").join(non_utf8_path())), None ); let include_non_utf8 = IncludeFileResolver::new(Path::new("/path/to/include").join(non_utf8_path())); assert_eq!( include_non_utf8 .resolve_path( &Path::new("/path/to/include") .join(non_utf8_path()) .join("foo.proto") ) .as_deref(), Some("foo.proto") ); } #[test] fn file_open() { let mut tempfile = tempfile::NamedTempFile::new().unwrap(); tempfile.write_all("syntax = 'proto3';".as_bytes()).unwrap(); tempfile.seek(io::SeekFrom::Start(0)).unwrap(); let file = File::open("foo.proto", tempfile.path()).unwrap(); assert_eq!(file.name(), "foo.proto"); assert_eq!(file.path(), Some(tempfile.path())); assert_eq!(file.source(), Some("syntax = 'proto3';")); assert_eq!( file.file_descriptor_proto(), &FileDescriptorProto { name: Some("foo.proto".to_owned()), syntax: Some("proto3".to_owned()), source_code_info: Some(SourceCodeInfo { location: vec![ Location { path: vec![], span: vec![0, 0, 18], ..Default::default() }, Location { path: vec![12], span: vec![0, 0, 18], ..Default::default() }, ], }), ..Default::default() } ); } #[test] fn file_from_source() { let file = File::from_source("foo.proto", "syntax = 'proto3';").unwrap(); assert_eq!(file.name(), "foo.proto"); assert_eq!(file.path(), None); assert_eq!(file.source(), Some("syntax = 'proto3';")); assert_eq!( file.file_descriptor_proto(), &FileDescriptorProto { name: Some("foo.proto".to_owned()), syntax: Some("proto3".to_owned()), source_code_info: Some(SourceCodeInfo { location: vec![ Location { path: vec![], span: vec![0, 0, 18], ..Default::default() }, Location { path: vec![12], span: vec![0, 0, 18], ..Default::default() }, ], }), ..Default::default() } ); } #[test] fn file_from_file_descriptor_proto() { let file = File::from(FileDescriptorProto { name: Some("foo.proto".to_owned()), syntax: Some("proto3".to_owned()), ..Default::default() }); assert_eq!(file.name(), "foo.proto"); assert_eq!(file.path(), None); assert_eq!(file.source(), None); assert_eq!( file.file_descriptor_proto(), &FileDescriptorProto { name: Some("foo.proto".to_owned()), syntax: Some("proto3".to_owned()), ..Default::default() } ); assert_eq!( FileDescriptorProto::from(file), FileDescriptorProto { name: Some("foo.proto".to_owned()), syntax: Some("proto3".to_owned()), ..Default::default() } ); } #[test] fn file_decode_file_descriptor_proto() { let file = File::decode_file_descriptor_proto( [ 0x0a, 0x09, 0x66, 0x6f, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, ] .as_ref(), ) .unwrap(); assert_eq!(file.name(), "foo.proto"); assert_eq!(file.path(), None); assert_eq!(file.source(), None); assert_eq!( file.file_descriptor_proto(), &FileDescriptorProto { name: Some("foo.proto".to_owned()), syntax: Some("proto3".to_owned()), ..Default::default() } ); } #[test] fn file_decode_file_descriptor_proto_err() { let invalid = b"invalid"; assert!(File::decode_file_descriptor_proto(invalid.as_ref()).is_err()); } protox-0.9.0/src/lib.rs000064400000000000000000000153271046102023000131000ustar 00000000000000//! A rust implementation of the protobuf compiler. //! //! For convenient compilation of protobuf source files in a single function, see //! [`compile()`]. For more options see [`Compiler`]. //! //! # Examples //! //! Usage with [`prost-build`](https://crates.io/crates/prost-build): //! //! ``` //! # use std::{env, fs, path::PathBuf}; //! # use prost::Message; //! # let tempdir = tempfile::TempDir::new().unwrap(); //! # env::set_current_dir(&tempdir).unwrap(); //! # env::set_var("OUT_DIR", tempdir.path()); //! # fs::write("root.proto", "").unwrap(); //! let file_descriptors = protox::compile(["root.proto"], ["."]).unwrap(); //! prost_build::compile_fds(file_descriptors).unwrap(); //! ``` //! //! Usage with [`tonic-build`](https://crates.io/crates/tonic-build): //! //! ```rust,ignore //! # use std::{env, fs, path::PathBuf}; //! # let tempdir = tempfile::TempDir::new().unwrap(); //! # env::set_current_dir(&tempdir).unwrap(); //! # env::set_var("OUT_DIR", tempdir.path()); //! # fs::write("root.proto", "").unwrap(); //! use protox::prost::Message; //! //! let file_descriptors = protox::compile(["root.proto"], ["."]).unwrap(); //! //! tonic_build::configure() //! .build_server(true) //! .compile_fds(file_descriptors) //! .unwrap(); //! ``` //! //! ### Error messages //! //! This crate uses [`miette`](https://crates.io/crates/miette) to add additional details to errors. For nice error messages, add `miette` as a dependency with the `fancy` feature enabled and return a [`miette::Result`](https://docs.rs/miette/latest/miette/type.Result.html) from your build script. //! //! ```rust //! # use std::{env, fs, path::PathBuf}; //! fn main() -> miette::Result<()> { //! # let tempdir = tempfile::TempDir::new().unwrap(); //! # env::set_current_dir(&tempdir).unwrap(); //! # env::set_var("OUT_DIR", tempdir.path()); //! # fs::write("root.proto", "").unwrap(); //! let _ = protox::compile(["root.proto"], ["."])?; //! //! Ok(()) //! } //! ``` //! //! Example error message: //! //! ```text //! Error: //! × name 'Bar' is not defined //! ╭─[root.proto:3:1] //! 3 │ message Foo { //! 4 │ Bar bar = 1; //! · ─┬─ //! · ╰── found here //! 5 │ } //! ╰──── //! ``` #![warn(missing_debug_implementations, missing_docs)] #![deny(unsafe_code)] #![doc(html_root_url = "https://docs.rs/protox/0.8.0/")] pub mod file; mod compile; mod error; use std::path::Path; pub use {prost, prost_reflect}; pub use self::compile::Compiler; pub use self::error::Error; /// Compiles a set of protobuf files using the given include paths. /// /// For more control over how files are compiled, see [`Compiler`]. This function is equivalent to: /// /// ```rust /// # use protox::Compiler; /// # fn main() -> Result<(), protox::Error> { /// # let files: Vec = vec![]; /// # let includes: Vec = vec![".".into()]; /// let file_descriptor_set = Compiler::new(includes)? /// .include_source_info(true) /// .include_imports(true) /// .open_files(files)? /// .file_descriptor_set(); /// # Ok(()) /// # } /// ``` /// /// # Examples /// /// ``` /// # use std::fs; /// # use prost_types::{ /// # DescriptorProto, FieldDescriptorProto, field_descriptor_proto::{Label, Type}, FileDescriptorSet, FileDescriptorProto, /// # SourceCodeInfo, source_code_info::Location /// # }; /// # use protox::compile; /// # let tempdir = tempfile::TempDir::new().unwrap(); /// # std::env::set_current_dir(&tempdir).unwrap(); /// # /// fs::write("bar.proto", " /// message Bar { } /// ").unwrap(); /// fs::write("root.proto", " /// import 'bar.proto'; /// /// message Foo { /// optional Bar bar = 1; /// } /// ").unwrap(); /// /// assert_eq!(compile(["root.proto"], ["."]).unwrap(), FileDescriptorSet { /// file: vec![ /// FileDescriptorProto { /// name: Some("bar.proto".to_owned()), /// message_type: vec![DescriptorProto { /// name: Some("Bar".to_owned()), /// ..Default::default() /// }], /// source_code_info: Some(SourceCodeInfo { /// location: vec![ /// Location { path: vec![], span: vec![1, 4, 19], ..Default::default() }, /// Location { path: vec![4, 0], span: vec![1, 4, 19], ..Default::default() }, /// Location { path: vec![4, 0, 1], span: vec![1, 12, 15], ..Default::default() }, /// ], /// }), /// ..Default::default() /// }, /// FileDescriptorProto { /// name: Some("root.proto".to_owned()), /// dependency: vec!["bar.proto".to_owned()], /// message_type: vec![DescriptorProto { /// name: Some("Foo".to_owned()), /// field: vec![FieldDescriptorProto { /// name: Some("bar".to_owned()), /// number: Some(1), /// label: Some(Label::Optional as _), /// r#type: Some(Type::Message as _), /// type_name: Some(".Bar".to_owned()), /// json_name: Some("bar".to_owned()), /// ..Default::default() /// }], /// ..Default::default() /// }], /// source_code_info: Some(SourceCodeInfo { /// location: vec![ /// Location { path: vec![], span: vec![1, 4, 5, 5], ..Default::default() }, /// Location { path: vec![3, 0], span: vec![1, 4, 23], ..Default::default() }, /// Location { path: vec![4, 0], span: vec![3, 4, 5, 5], ..Default::default() }, /// Location { path: vec![4, 0, 1], span: vec![3, 12, 15], ..Default::default() }, /// Location { path: vec![4, 0, 2, 0], span: vec![4, 8, 29], ..Default::default() }, /// Location { path: vec![4, 0, 2, 0, 1], span: vec![4, 21, 24], ..Default::default() }, /// Location { path: vec![4, 0, 2, 0, 3], span: vec![4, 27, 28], ..Default::default() }, /// Location { path: vec![4, 0, 2, 0, 4], span: vec![4, 8, 16], ..Default::default() }, /// Location { path: vec![4, 0, 2, 0, 6], span: vec![4, 17, 20], ..Default::default() }, /// ], /// }), /// ..Default::default() /// }, /// ], /// ..Default::default() /// }); /// ``` pub fn compile( files: impl IntoIterator>, includes: impl IntoIterator>, ) -> Result { Ok(Compiler::new(includes)? .include_source_info(true) .include_imports(true) .open_files(files)? .file_descriptor_set()) } protox-0.9.0/src/main.rs000064400000000000000000000031671046102023000132550ustar 00000000000000use std::{fs, path::PathBuf}; use clap::Parser; use miette::Result; use protox::Compiler; #[derive(Debug, Parser)] pub struct Args { /// The source file(s) to compile #[clap(value_name = "PROTO_FILES", required = true, value_parser)] files: Vec, /// The directory in which to search for imports. #[clap( short = 'I', long = "include", visible_alias = "proto_path", value_name = "PATH", default_value = ".", value_parser )] includes: Vec, /// The output path to write a file descriptor set to. #[clap( short = 'o', long = "output", visible_alias = "descriptor_set_out", value_name = "PATH", value_parser )] output: Option, /// If set, includes source code information in the output file descriptor set. #[clap(long, visible_alias = "include_source_info")] include_source_info: bool, /// If set, all dependencies of the input files are output, so that the file descriptor set is self-contained. #[clap(long, visible_alias = "include_imports")] include_imports: bool, } pub fn main() -> Result<()> { miette::set_panic_hook(); let args = Args::parse(); let mut compiler = Compiler::new(args.includes)?; compiler.include_imports(args.include_imports); compiler.include_source_info(args.include_source_info); for file in args.files { compiler.open_file(file)?; } if let Some(output) = args.output { fs::write(output, compiler.encode_file_descriptor_set()) .map_err(|err| miette::miette!(err))?; } Ok(()) } protox-0.9.0/tests/compare.rs000064400000000000000000000213551046102023000143310ustar 00000000000000use std::{ env, fs, path::PathBuf, process::{Command, Stdio}, }; use prost_reflect::{DescriptorPool, DynamicMessage, SerializeOptions, Value}; use prost_types::{field_descriptor_proto::Type, source_code_info::Location}; use tempfile::TempDir; fn test_data_dir() -> PathBuf { PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap()).join("tests/data") } fn google_proto_dir() -> PathBuf { PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap()) .join("tests/protobuf/src/google/protobuf") } fn google_src_dir() -> PathBuf { PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap()).join("tests/protobuf/src") } fn compare(name: &str) { let files = if name == "descriptor" { vec![format!("{}.proto", name)] } else { vec![ // Ensure we use a consistent version of descriptor.proto "google/protobuf/descriptor.proto".to_owned(), format!("{}.proto", name), ] }; let expected = to_yaml(&protoc(&files)); let actual = to_yaml(&protox(&files)); similar_asserts::assert_eq!(expected, actual); } fn to_yaml(message: &DynamicMessage) -> String { let mut serializer = serde_yaml::Serializer::new(Vec::new()); message .serialize_with_options( &mut serializer, &SerializeOptions::new() .skip_default_fields(true) .stringify_64_bit_integers(false), ) .unwrap(); String::from_utf8(serializer.into_inner().unwrap()).unwrap() } fn protoc(files: &[String]) -> DynamicMessage { let tempdir = TempDir::new().unwrap(); let result = tempdir.path().join("desc.bin"); let output = Command::new(prost_build::protoc_from_env()) .arg("--proto_path") .arg(test_data_dir()) .arg("--proto_path") .arg(google_proto_dir()) .arg("--proto_path") .arg(google_src_dir()) .arg("--include_imports") .arg("--include_source_info") .arg(format!("--descriptor_set_out={}", result.display())) .args(files) .stderr(Stdio::piped()) .output() .unwrap(); if !output.status.success() { panic!( "protoc did not succeed: {}", String::from_utf8_lossy(&output.stderr) ); } let bytes = fs::read(result).unwrap(); decode_file_descriptor(bytes) } fn protox(files: &[String]) -> DynamicMessage { let descriptor = protox::Compiler::new([test_data_dir(), google_proto_dir(), google_src_dir()]) .unwrap() .include_imports(true) .include_source_info(true) .open_files(files) .unwrap() .encode_file_descriptor_set(); decode_file_descriptor(descriptor) } fn decode_file_descriptor(bytes: Vec) -> DynamicMessage { let pool = DescriptorPool::decode(bytes.as_slice()).unwrap(); let desc = pool .get_message_by_name("google.protobuf.FileDescriptorSet") .unwrap(); let mut file_set = DynamicMessage::decode(desc, bytes.as_slice()).unwrap(); let files = file_set .get_field_by_name_mut("file") .unwrap() .as_list_mut() .unwrap(); // We can't compare google.protobuf files directly since they are baked into protoc and may be a different version to // what we are using. (The google_protobuf_* tests ensures we are compiling these files correctly) files.retain(|f| { !f.as_message() .unwrap() .get_field_by_name("name") .unwrap() .as_str() .unwrap() .starts_with("google/protobuf/") }); debug_assert!(!files.is_empty()); for file in files { let file = file.as_message_mut().unwrap(); // Normalize ordering of spans let locations = file .get_field_by_name_mut("source_code_info") .unwrap() .as_message_mut() .unwrap() .get_field_by_name_mut("location") .unwrap() .as_list_mut() .unwrap(); locations.sort_unstable_by_key(|location| { let location = location .as_message() .unwrap() .transcode_to::() .unwrap(); (location.path, location.span) }); // Our formatting of floats is slightly different to protoc (and exact conformance is tricky), so we normalize // them in default values visit_messages( file.get_field_by_name_mut("message_type") .unwrap() .as_list_mut() .unwrap(), &|message| { for field in message .get_field_by_name_mut("field") .unwrap() .as_list_mut() .unwrap() { let field = field.as_message_mut().unwrap(); let ty = field .get_field_by_name("type") .unwrap() .as_enum_number() .unwrap(); let default_value = field .get_field_by_name_mut("default_value") .unwrap() .as_string_mut() .unwrap(); if !default_value.is_empty() && matches!(Type::try_from(ty), Ok(Type::Float | Type::Double)) { *default_value = default_value.parse::().unwrap().to_string(); } } }, ) } file_set } fn visit_messages(messages: &mut [Value], f: &impl Fn(&mut DynamicMessage)) { for message in messages { let message = message.as_message_mut().unwrap(); f(message); visit_messages( message .get_field_by_name_mut("nested_type") .unwrap() .as_list_mut() .unwrap(), f, ); } } macro_rules! compare { ($name:ident) => { #[test] fn $name() { compare(stringify!($name)); } }; } compare!(empty_file); compare!(empty_file_with_comment); compare!(field_defaults); compare!(generate_map_entry_message); compare!(generate_group_message); compare!(generate_synthetic_oneof_ordering); compare!(generate_synthetic_oneof); compare!(generated_message_ordering); compare!(multiple_extends); compare!(name_resolution); compare!(option_merge_message); compare!(custom_json_name); compare!(reserved_ranges); compare!(oneof_group_field); compare!(service); compare!(option_group_field); compare!(message_name_field_name_conflict); compare!(package_name_field_name_conflict); #[test] fn google_protobuf_any() { compare("any"); } #[test] fn google_protobuf_api() { compare("api"); } #[test] fn google_protobuf_descriptor() { compare("descriptor"); } #[test] fn google_protobuf_duration() { compare("duration"); } #[test] fn google_protobuf_empty() { compare("empty"); } #[test] fn google_protobuf_field_mask() { compare("field_mask"); } #[test] fn google_protobuf_source_context() { compare("source_context"); } #[test] fn google_protobuf_struct() { compare("struct"); } #[test] fn google_protobuf_timestamp() { compare("timestamp"); } #[test] fn google_protobuf_type() { compare("type"); } #[test] fn google_protobuf_wrappers() { compare("wrappers"); } #[test] fn google_protobuf_compiler_plugin() { compare("compiler/plugin"); } #[test] fn google_map_proto2_unittest() { compare("map_proto2_unittest"); } #[test] fn google_map_unittest() { compare("map_unittest"); } #[test] fn google_test_messages_proto2() { compare("test_messages_proto2"); } #[test] fn google_test_messages_proto3() { compare("test_messages_proto3"); } #[test] fn google_unittest_custom_options() { compare("google/google_unittest_custom_options"); } #[test] fn google_unittest_empty() { compare("unittest_empty"); } #[test] fn google_unittest_enormous_descriptor() { compare("unittest_enormous_descriptor"); } #[test] fn google_unittest_import() { compare("unittest_import"); } #[test] fn google_unittest_no_field_presence() { compare("google/unittest_no_field_presence"); } #[test] fn google_unittest_preserve_unknown_enum() { compare("google/unittest_preserve_unknown_enum"); } #[test] fn google_unittest_preserve_unknown_enum2() { compare("google/unittest_preserve_unknown_enum2"); } #[test] fn google_unittest_proto3_optional() { compare("unittest_proto3_optional"); } #[test] fn google_unittest_proto3() { compare("unittest_proto3"); } #[test] fn google_unittest_well_known_types() { compare("unittest_well_known_types"); } #[test] fn google_unittest() { compare("unittest"); } protox-0.9.0/tests/compiler.rs000064400000000000000000000304301046102023000145070ustar 00000000000000use std::{env, fs, io, path::PathBuf}; use insta::assert_yaml_snapshot; use miette::{Diagnostic, JSONReportHandler}; use prost::Message; use prost_reflect::{DescriptorPool, Value}; use prost_types::{ source_code_info::Location, FileDescriptorProto, FileDescriptorSet, SourceCodeInfo, }; use protox::{ compile, file::{ChainFileResolver, DescriptorSetFileResolver, File, FileResolver, GoogleFileResolver}, Compiler, Error, }; use tempfile::TempDir; struct TestFileResolver { files: &'static [(&'static str, &'static str)], } impl FileResolver for TestFileResolver { fn open_file(&self, name: &str) -> Result { if name == "customerror.proto" { return Err(Error::new(io::Error::new( io::ErrorKind::Other, "failed to load file!", ))); } for file in self.files { if file.0 == name { return File::from_source(name, file.1); } } Err(Error::file_not_found(name)) } } fn check(files: &'static [(&'static str, &'static str)]) -> Result { let tempdir = tempfile::tempdir().unwrap(); for (file, source) in files { fs::write(tempdir.path().join(file), source).unwrap(); } let mut compiler = Compiler::with_file_resolver(TestFileResolver { files }); // Only compile last file. // Imports may have errors that must be correctly listed by compilation of root. compiler.open_file(files[files.len() - 1].0)?; Ok(compiler) } fn check_err(files: &'static [(&'static str, &'static str)]) -> serde_json::Value { error_to_json(&check(files).unwrap_err()) } fn error_to_json(err: &dyn Diagnostic) -> serde_json::Value { let mut json = String::new(); JSONReportHandler::new() .render_report(&mut json, err) .unwrap(); serde_json::from_str(&json).unwrap() } #[test] fn import_not_found() { assert_yaml_snapshot!(check_err(&[("root.proto", "import 'notfound.proto';")])); } #[test] fn import_error() { assert_yaml_snapshot!(check_err(&[("root.proto", "import 'customerror.proto';")])); } #[test] fn double_import_error() { assert_yaml_snapshot!(check_err(&[ ("existing.proto", ""), ( "root.proto", "import 'existing.proto'; import 'existing.proto'; " ), ])); } #[test] fn double_import_branch_error() { assert_yaml_snapshot!(check_err(&[ ("existing.proto", ""), ( "branch.proto", "import 'existing.proto'; import 'existing.proto'; " ), ( "root.proto", "import 'branch.proto'; " ), ])); } #[test] fn type_not_found() { assert_yaml_snapshot!(check_err(&[( "root.proto", " message Foo { optional NotFound foo = 1; } " )])); } #[test] fn default_options() { let mut compiler = Compiler::with_file_resolver(TestFileResolver { files: &[("dep.proto", ""), ("root.proto", "import 'dep.proto';")], }); compiler.open_file("root.proto").unwrap(); let files = compiler.file_descriptor_set(); assert_eq!( files, FileDescriptorSet { file: vec![FileDescriptorProto { name: Some("root.proto".to_owned()), dependency: vec!["dep.proto".to_owned()], ..Default::default() },], } ); let encoded = compiler.encode_file_descriptor_set(); assert_eq!( FileDescriptorSet::decode(encoded.as_slice()).unwrap(), files ); } #[test] fn include_imports() { let mut compiler = Compiler::with_file_resolver(TestFileResolver { files: &[("dep.proto", ""), ("root.proto", "import 'dep.proto';")], }); compiler.include_imports(true); compiler.open_file("root.proto").unwrap(); let files = compiler.file_descriptor_set(); assert_eq!( files, FileDescriptorSet { file: vec![ FileDescriptorProto { name: Some("dep.proto".to_owned()), ..Default::default() }, FileDescriptorProto { name: Some("root.proto".to_owned()), dependency: vec!["dep.proto".to_owned()], ..Default::default() }, ], } ); let encoded = compiler.encode_file_descriptor_set(); assert_eq!( FileDescriptorSet::decode(encoded.as_slice()).unwrap(), files ); } #[test] fn include_source_info() { let mut compiler = Compiler::with_file_resolver(TestFileResolver { files: &[("dep.proto", ""), ("root.proto", "import 'dep.proto';")], }); compiler.include_source_info(true); compiler.open_file("root.proto").unwrap(); let files = compiler.file_descriptor_set(); assert_eq!( files, FileDescriptorSet { file: vec![FileDescriptorProto { name: Some("root.proto".to_owned()), dependency: vec!["dep.proto".to_owned()], source_code_info: Some(SourceCodeInfo { location: vec![ Location { path: vec![], span: vec![0, 0, 19], ..Default::default() }, Location { path: vec![3, 0], span: vec![0, 0, 19], ..Default::default() } ] }), ..Default::default() },], } ); let encoded = compiler.encode_file_descriptor_set(); assert_eq!( FileDescriptorSet::decode(encoded.as_slice()).unwrap(), files ); } #[test] fn include_source_info_and_imports() { let mut compiler = Compiler::with_file_resolver(TestFileResolver { files: &[("dep.proto", ""), ("root.proto", "import 'dep.proto';")], }); compiler.include_imports(true); compiler.include_source_info(true); compiler.open_file("root.proto").unwrap(); let files = compiler.file_descriptor_set(); assert_eq!( files, FileDescriptorSet { file: vec![ FileDescriptorProto { name: Some("dep.proto".to_owned()), source_code_info: Some(SourceCodeInfo { location: vec![Location { path: vec![], span: vec![0, 0, 0], ..Default::default() },] }), ..Default::default() }, FileDescriptorProto { name: Some("root.proto".to_owned()), dependency: vec!["dep.proto".to_owned()], source_code_info: Some(SourceCodeInfo { location: vec![ Location { path: vec![], span: vec![0, 0, 19], ..Default::default() }, Location { path: vec![3, 0], span: vec![0, 0, 19], ..Default::default() } ] }), ..Default::default() }, ], } ); let encoded = compiler.encode_file_descriptor_set(); assert_eq!( FileDescriptorSet::decode(encoded.as_slice()).unwrap(), files ); } #[test] fn pass_through_extension_options() { let mut resolver = ChainFileResolver::new(); resolver.add(TestFileResolver { files: &[( "root.proto", " import 'google/protobuf/descriptor.proto'; extend google.protobuf.FileOptions { optional int32 ext = 1001; } option (ext) = 1; ", )], }); resolver.add(GoogleFileResolver::new()); let mut compiler = Compiler::with_file_resolver(resolver); compiler.include_imports(true); compiler.open_file("root.proto").unwrap(); let dyn_set = DescriptorPool::decode(compiler.encode_file_descriptor_set().as_slice()).unwrap(); let ext = dyn_set.get_extension_by_name("ext").unwrap(); assert_eq!( dyn_set .get_file_by_name("root.proto") .unwrap() .options() .get_extension(&ext) .as_ref(), &Value::I32(1) ); let roundtripped_resolver = DescriptorSetFileResolver::decode(compiler.encode_file_descriptor_set().as_slice()) .unwrap(); let mut roundtripped_compiler = Compiler::with_file_resolver(roundtripped_resolver); roundtripped_compiler.include_imports(true); roundtripped_compiler.open_file("root.proto").unwrap(); let roundtripped_dyn_set = DescriptorPool::decode( roundtripped_compiler .encode_file_descriptor_set() .as_slice(), ) .unwrap(); let roundtripped_ext = roundtripped_dyn_set.get_extension_by_name("ext").unwrap(); assert_eq!( roundtripped_dyn_set .get_file_by_name("root.proto") .unwrap() .options() .get_extension(&roundtripped_ext) .as_ref(), &Value::I32(1) ); } #[test] fn error_fmt_debug() { let parse_err = check(&[("root.proto", "message {")]).unwrap_err(); let check_err = check(&[("root.proto", "message Foo {} service Foo {}")]).unwrap_err(); let import_err = check(&[("root.proto", "// comment \nimport 'notfound.proto';")]).unwrap_err(); let open_err = check(&[("root.proto", "import 'customerror.proto';")]).unwrap_err(); assert!(parse_err.is_parse()); assert_eq!(parse_err.file(), Some("root.proto")); assert_eq!( parse_err.to_string(), "expected an identifier, but found '{'" ); assert_eq!( format!("{:?}", parse_err), "root.proto:1:9: expected an identifier, but found '{'" ); assert!(!check_err.is_io() && !check_err.is_parse()); assert_eq!(check_err.file(), Some("root.proto")); assert_eq!(check_err.to_string(), "name 'Foo' is defined twice"); assert_eq!( format!("{:?}", check_err), "root.proto:1:24: name 'Foo' is defined twice" ); assert!(import_err.is_file_not_found()); assert_eq!(import_err.file(), Some("root.proto")); assert_eq!(import_err.to_string(), "import 'notfound.proto' not found"); assert_eq!( format!("{:?}", import_err), "root.proto:2:1: import 'notfound.proto' not found" ); assert!(open_err.is_io()); assert!(open_err.file().is_none()); assert_eq!(open_err.to_string(), "failed to load file!"); assert_eq!( format!("{:?}", open_err), "Custom { kind: Other, error: \"failed to load file!\" }" ); } #[test] fn error_invalid_utf8() { let dir = TempDir::new().unwrap(); fs::write(dir.path().join("foo.proto"), b"message \xF0\x90\x80Foo {}").unwrap(); let err = compile([dir.path().join("foo.proto")], [dir.path()]).unwrap_err(); assert!(err.is_parse()); assert_eq!(err.file(), Some("foo.proto")); assert_eq!(err.to_string(), "file 'foo.proto' is not valid utf-8"); assert_eq!(format!("{:?}", err), "file 'foo.proto' is not valid utf-8"); } #[test] fn name_resolution_incorrect() { let test_data_dir = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap()).join("tests/data"); let error = protox::Compiler::new([test_data_dir]) .unwrap() .include_imports(true) .include_source_info(true) .open_files(["name_resolution_incorrect.proto"]) .unwrap_err(); assert_eq!( error.to_string(), "'foo.Foo' resolves to 'com.foo.Foo', which is not defined" ); assert_eq!( format!("{:?}", error), "name_resolution_incorrect.proto:10:5: 'foo.Foo' resolves to 'com.foo.Foo', which is not defined" ); assert_eq!(format!("{}", error.help().unwrap()), "The innermost scope is searched first in name resolution. Consider using a leading '.' (i.e., '.foo.Foo') to start from the outermost scope."); } protox-0.9.0/tests/data/custom_json_name.proto000064400000000000000000000001041046102023000176630ustar 00000000000000message Foo { repeated uint64 foo = 1 [json_name = "BAR_quz"]; }protox-0.9.0/tests/data/empty_file.proto000064400000000000000000000000021046102023000164520ustar 00000000000000 protox-0.9.0/tests/data/empty_file_with_comment.proto000064400000000000000000000000131046102023000212310ustar 00000000000000// comment protox-0.9.0/tests/data/field_defaults.proto000064400000000000000000000023201046102023000172740ustar 00000000000000syntax = "proto2"; message Foo { optional double double = 1 [default = 4.2]; optional float float = 2 [default = 2.4e-2]; optional int32 int32 = 3 [default = -1]; optional int64 int64 = 4 [default = -3221225470]; optional uint32 uint32 = 5 [default = 0]; optional uint64 uint64 = 6 [default = 3221225470]; optional sint32 sint32 = 7 [default = -5]; optional sint64 sint64 = 8 [default = 42]; optional fixed32 fixed32 = 9 [default = 5]; optional fixed64 fixed64 = 10 [default = 7]; optional sfixed32 sfixed32 = 11 [default = -100]; optional sfixed64 sfixed64 = 12 [default = 10]; optional bool bool = 13 [default = true]; optional string string = 14 [default = "hello"]; optional bytes bytes = 15 [default = "abc\366\xFE\a\b\f\n\r\t\v\\\'\"\x00"]; optional Enum enum = 16 [default = ZERO]; optional double double_int = 17 [default = 42]; optional double double_int_negative = 18 [default = -24]; optional double double_nan = 19 [default = nan]; optional double double_inf = 20 [default = inf]; optional double double_nan_negative = 21 [default = -nan]; optional double double_inf_negative = 22 [default = -inf]; } enum Enum { ZERO = 0; } protox-0.9.0/tests/data/generate_group_message.proto000064400000000000000000000001151046102023000210340ustar 00000000000000syntax = "proto2"; message Foo { // bar optional group Bar = 1 {}; }protox-0.9.0/tests/data/generate_map_entry_message.proto000064400000000000000000000001031046102023000216730ustar 00000000000000syntax = "proto3"; message Foo { map bar = 1; }protox-0.9.0/tests/data/generate_synthetic_oneof.proto000064400000000000000000000002361046102023000214000ustar 00000000000000syntax = 'proto3'; message Foo { optional fixed64 val = 1; } message Bar { optional sfixed64 _val = 1; } message Quz { optional Bar val = 1; } protox-0.9.0/tests/data/generate_synthetic_oneof_ordering.proto000064400000000000000000000002241046102023000232660ustar 00000000000000syntax = 'proto3'; message Foo { oneof o1 { float a = 1; } optional bool o3 = 2; oneof o2 { string b = 3; } }protox-0.9.0/tests/data/generated_message_ordering.proto000064400000000000000000000003161046102023000216600ustar 00000000000000syntax = "proto2"; extend Bar { optional group Baz = 1 {} } message Bar { extensions 1; map x = 5; oneof foo { // foo group Quz = 3 {} } message Nest {} }protox-0.9.0/tests/data/google/google_unittest_custom_options.proto000064400000000000000000000322331046102023000241640ustar 00000000000000// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // https://developers.google.com/protocol-buffers/ // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // Author: benjy@google.com (Benjy Weinberger) // Based on original Protocol Buffers design by // Sanjay Ghemawat, Jeff Dean, and others. // // A proto file used to test the "custom options" feature of google.protobuf. // Modified to remove references to the message set encoding in custom options syntax = "proto2"; // Some generic_services option(s) added automatically. // See: http://go/proto2-generic-services-default option cc_generic_services = true; // auto-added option java_generic_services = true; // auto-added option py_generic_services = true; // A custom file option (defined below). option (file_opt1) = 9876543210; import "google/protobuf/any.proto"; import "google/protobuf/descriptor.proto"; // We don't put this in a package within proto2 because we need to make sure // that the generated code doesn't depend on being in the proto2 namespace. package protobuf_unittest; // Some simple test custom options of various types. extend google.protobuf.FileOptions { optional uint64 file_opt1 = 7736974; } extend google.protobuf.MessageOptions { optional int32 message_opt1 = 7739036; } extend google.protobuf.FieldOptions { optional fixed64 field_opt1 = 7740936; // This is useful for testing that we correctly register default values for // extension options. optional int32 field_opt2 = 7753913 [default = 42]; } extend google.protobuf.OneofOptions { optional int32 oneof_opt1 = 7740111; } extend google.protobuf.EnumOptions { optional sfixed32 enum_opt1 = 7753576; } extend google.protobuf.EnumValueOptions { optional int32 enum_value_opt1 = 1560678; } extend google.protobuf.ServiceOptions { optional sint64 service_opt1 = 7887650; } enum MethodOpt1 { METHODOPT1_VAL1 = 1; METHODOPT1_VAL2 = 2; } extend google.protobuf.MethodOptions { optional MethodOpt1 method_opt1 = 7890860; } // A test message with custom options at all possible locations (and also some // regular options, to make sure they interact nicely). message TestMessageWithCustomOptions { option message_set_wire_format = false; option (message_opt1) = -56; optional string field1 = 1 [ctype = CORD, (field_opt1) = 8765432109]; oneof AnOneof { option (oneof_opt1) = -99; int32 oneof_field = 2; } map map_field = 3 [(field_opt1) = 12345]; enum AnEnum { option (enum_opt1) = -789; ANENUM_VAL1 = 1; ANENUM_VAL2 = 2 [(enum_value_opt1) = 123]; } } // A test RPC service with custom options at all possible locations (and also // some regular options, to make sure they interact nicely). message CustomOptionFooRequest {} message CustomOptionFooResponse {} message CustomOptionFooClientMessage {} message CustomOptionFooServerMessage {} service TestServiceWithCustomOptions { option (service_opt1) = -9876543210; rpc Foo(CustomOptionFooRequest) returns (CustomOptionFooResponse) { option (method_opt1) = METHODOPT1_VAL2; } } // Options of every possible field type, so we can test them all exhaustively. message DummyMessageContainingEnum { enum TestEnumType { TEST_OPTION_ENUM_TYPE1 = 22; TEST_OPTION_ENUM_TYPE2 = -23; } } message DummyMessageInvalidAsOptionType {} extend google.protobuf.MessageOptions { optional bool bool_opt = 7706090; optional int32 int32_opt = 7705709; optional int64 int64_opt = 7705542; optional uint32 uint32_opt = 7704880; optional uint64 uint64_opt = 7702367; optional sint32 sint32_opt = 7701568; optional sint64 sint64_opt = 7700863; optional fixed32 fixed32_opt = 7700307; optional fixed64 fixed64_opt = 7700194; optional sfixed32 sfixed32_opt = 7698645; optional sfixed64 sfixed64_opt = 7685475; optional float float_opt = 7675390; optional double double_opt = 7673293; optional string string_opt = 7673285; optional bytes bytes_opt = 7673238; optional DummyMessageContainingEnum.TestEnumType enum_opt = 7673233; optional DummyMessageInvalidAsOptionType message_type_opt = 7665967; } message CustomOptionMinIntegerValues { option (bool_opt) = false; option (int32_opt) = -0x80000000; option (int64_opt) = -0x8000000000000000; option (uint32_opt) = 0; option (uint64_opt) = 0; option (sint32_opt) = -0x80000000; option (sint64_opt) = -0x8000000000000000; option (fixed32_opt) = 0; option (fixed64_opt) = 0; option (sfixed32_opt) = -0x80000000; option (sfixed64_opt) = -0x8000000000000000; } message CustomOptionMaxIntegerValues { option (bool_opt) = true; option (int32_opt) = 0x7FFFFFFF; option (int64_opt) = 0x7FFFFFFFFFFFFFFF; option (uint32_opt) = 0xFFFFFFFF; option (uint64_opt) = 0xFFFFFFFFFFFFFFFF; option (sint32_opt) = 0x7FFFFFFF; option (sint64_opt) = 0x7FFFFFFFFFFFFFFF; option (fixed32_opt) = 0xFFFFFFFF; option (fixed64_opt) = 0xFFFFFFFFFFFFFFFF; option (sfixed32_opt) = 0x7FFFFFFF; option (sfixed64_opt) = 0x7FFFFFFFFFFFFFFF; } message CustomOptionOtherValues { option (int32_opt) = -100; // To test sign-extension. option (float_opt) = 12.3456789; option (double_opt) = 1.234567890123456789; option (string_opt) = "Hello, \"World\""; option (bytes_opt) = "Hello\0World"; option (enum_opt) = TEST_OPTION_ENUM_TYPE2; } message SettingRealsFromPositiveInts { option (float_opt) = 12; option (double_opt) = 154; } message SettingRealsFromNegativeInts { option (float_opt) = -12; option (double_opt) = -154; } // Options of complex message types, themselves combined and extended in // various ways. message ComplexOptionType1 { optional int32 foo = 1; optional int32 foo2 = 2; optional int32 foo3 = 3; repeated int32 foo4 = 4; extensions 100 to max; } message ComplexOptionType2 { optional ComplexOptionType1 bar = 1; optional int32 baz = 2; message ComplexOptionType4 { optional int32 waldo = 1; extend google.protobuf.MessageOptions { optional ComplexOptionType4 complex_opt4 = 7633546; } } optional ComplexOptionType4 fred = 3; repeated ComplexOptionType4 barney = 4; extensions 100 to max; } message ComplexOptionType3 { optional int32 moo = 1; optional group ComplexOptionType5 = 2 { optional int32 plugh = 3; } } extend ComplexOptionType1 { optional int32 mooo = 7663707; optional ComplexOptionType3 corge = 7663442; } extend ComplexOptionType2 { optional int32 grault = 7650927; optional ComplexOptionType1 garply = 7649992; } extend google.protobuf.MessageOptions { optional protobuf_unittest.ComplexOptionType1 complex_opt1 = 7646756; optional ComplexOptionType2 complex_opt2 = 7636949; optional ComplexOptionType3 complex_opt3 = 7636463; optional group ComplexOpt6 = 7595468 { optional int32 xyzzy = 7593951; } } // Note that we try various different ways of naming the same extension. message VariousComplexOptions { option (.protobuf_unittest.complex_opt1).foo = 42; option (protobuf_unittest.complex_opt1).(.protobuf_unittest.mooo) = 324; option (.protobuf_unittest.complex_opt1).(protobuf_unittest.corge).moo = 876; option (protobuf_unittest.complex_opt1).foo4 = 99; option (protobuf_unittest.complex_opt1).foo4 = 88; option (complex_opt2).baz = 987; option (complex_opt2).(grault) = 654; option (complex_opt2).bar.foo = 743; option (complex_opt2).bar.(mooo) = 1999; option (complex_opt2).bar.(protobuf_unittest.corge).moo = 2008; option (complex_opt2).(garply).foo = 741; option (complex_opt2).(garply).(.protobuf_unittest.mooo) = 1998; option (complex_opt2).(protobuf_unittest.garply).(corge).moo = 2121; option (ComplexOptionType2.ComplexOptionType4.complex_opt4).waldo = 1971; option (complex_opt2).fred.waldo = 321; option (complex_opt2).barney = { waldo: 101 }; option (complex_opt2).barney = { waldo: 212 }; option (protobuf_unittest.complex_opt3).moo = 9; option (complex_opt3).complexoptiontype5.plugh = 22; option (complexopt6).xyzzy = 24; } // ------------------------------------------------------ // Definitions for testing aggregate option parsing. // See descriptor_unittest.cc. message AggregateMessageSet { // Modified since protox doesn't support message set wire format option message_set_wire_format = false; extensions 4 to max; } message AggregateMessageSetElement { extend AggregateMessageSet { optional AggregateMessageSetElement message_set_extension = 15447542; } optional string s = 1; } // A helper type used to test aggregate option parsing message Aggregate { optional int32 i = 1; optional string s = 2; // A nested object optional Aggregate sub = 3; // To test the parsing of extensions inside aggregate values optional google.protobuf.FileOptions file = 4; extend google.protobuf.FileOptions { optional Aggregate nested = 15476903; } // An embedded message set optional AggregateMessageSet mset = 5; // An any optional google.protobuf.Any any = 6; } // Allow Aggregate to be used as an option at all possible locations // in the .proto grammar. extend google.protobuf.FileOptions { optional Aggregate fileopt = 15478479; } extend google.protobuf.MessageOptions { optional Aggregate msgopt = 15480088; } extend google.protobuf.FieldOptions { optional Aggregate fieldopt = 15481374; } extend google.protobuf.EnumOptions { optional Aggregate enumopt = 15483218; } extend google.protobuf.EnumValueOptions { optional Aggregate enumvalopt = 15486921; } extend google.protobuf.ServiceOptions { optional Aggregate serviceopt = 15497145; } extend google.protobuf.MethodOptions { optional Aggregate methodopt = 15512713; } // Try using AggregateOption at different points in the proto grammar option (fileopt) = { s: 'FileAnnotation' // Also test the handling of comments /* of both types */ i: 100 sub { s: 'NestedFileAnnotation' } // Include a google.protobuf.FileOptions and recursively extend it with // another fileopt. file { [protobuf_unittest.fileopt] { s: 'FileExtensionAnnotation' } } // A message set inside an option value mset { [protobuf_unittest.AggregateMessageSetElement.message_set_extension] { s: 'EmbeddedMessageSetElement' } } any { [type.googleapis.com/protobuf_unittest.AggregateMessageSetElement] { s: 'EmbeddedMessageSetElement' } } }; message AggregateMessage { option (msgopt) = { i: 101 s: 'MessageAnnotation' }; optional int32 fieldname = 1 [(fieldopt) = { s: 'FieldAnnotation' }]; } service AggregateService { option (serviceopt) = { s: 'ServiceAnnotation' }; rpc Method(AggregateMessage) returns (AggregateMessage) { option (methodopt) = { s: 'MethodAnnotation' }; } } enum AggregateEnum { option (enumopt) = { s: 'EnumAnnotation' }; VALUE = 1 [(enumvalopt) = { s: 'EnumValueAnnotation' }]; } // Test custom options for nested type. message NestedOptionType { message NestedMessage { option (message_opt1) = 1001; optional int32 nested_field = 1 [(field_opt1) = 1002]; } enum NestedEnum { option (enum_opt1) = 1003; NESTED_ENUM_VALUE = 1 [(enum_value_opt1) = 1004]; } extend google.protobuf.FileOptions { optional int32 nested_extension = 7912573 [(field_opt2) = 1005]; } } // Custom message option that has a required enum field. // WARNING: this is strongly discouraged! message OldOptionType { enum TestEnum { OLD_VALUE = 0; } required TestEnum value = 1; } // Updated version of the custom option above. message NewOptionType { enum TestEnum { OLD_VALUE = 0; NEW_VALUE = 1; } required TestEnum value = 1; } extend google.protobuf.MessageOptions { optional OldOptionType required_enum_opt = 106161807; } // Test message using the "required_enum_opt" option defined above. message TestMessageWithRequiredEnumOption { option (required_enum_opt) = { value: OLD_VALUE }; } protox-0.9.0/tests/data/google/unittest_no_field_presence.proto000064400000000000000000000070601046102023000232060ustar 00000000000000// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file or at // https://developers.google.com/open-source/licenses/bsd // A proto file used to test a message type with no explicit field presence. // Modified due to lack of edition support in protox syntax = "proto3"; // Treat all fields as implicit present by default (proto3 behavior). option features.field_presence = IMPLICIT; // We want to test embedded proto2 messages, so include some proto2 types. package proto2_nofieldpresence_unittest; import "google/protobuf/unittest.proto"; // This proto includes every type of field in both singular and repeated // forms. message TestAllTypes { message NestedMessage { int32 bb = 1; } enum NestedEnum { FOO = 0; BAR = 1; BAZ = 2; } // Singular // TODO: remove 'optional' labels as soon as CL 69188077 is LGTM'd to make // 'optional' optional. int32 optional_int32 = 1; int64 optional_int64 = 2; uint32 optional_uint32 = 3; uint64 optional_uint64 = 4; sint32 optional_sint32 = 5; sint64 optional_sint64 = 6; fixed32 optional_fixed32 = 7; fixed64 optional_fixed64 = 8; sfixed32 optional_sfixed32 = 9; sfixed64 optional_sfixed64 = 10; float optional_float = 11; double optional_double = 12; bool optional_bool = 13; string optional_string = 14; bytes optional_bytes = 15; NestedMessage optional_nested_message = 18; ForeignMessage optional_foreign_message = 19; protobuf_unittest.TestAllTypes optional_proto2_message = 20; NestedEnum optional_nested_enum = 21; ForeignEnum optional_foreign_enum = 22; // N.B.: proto2-enum-type fields not allowed, because their default values // might not be zero. // optional protobuf_unittest.ForeignEnum optional_proto2_enum = // 23; string optional_string_piece = 24 [ctype = STRING_PIECE]; string optional_cord = 25 [ctype = CORD]; NestedMessage optional_lazy_message = 30 [lazy = true]; // Repeated repeated int32 repeated_int32 = 31; repeated int64 repeated_int64 = 32; repeated uint32 repeated_uint32 = 33; repeated uint64 repeated_uint64 = 34; repeated sint32 repeated_sint32 = 35; repeated sint64 repeated_sint64 = 36; repeated fixed32 repeated_fixed32 = 37; repeated fixed64 repeated_fixed64 = 38; repeated sfixed32 repeated_sfixed32 = 39; repeated sfixed64 repeated_sfixed64 = 40; repeated float repeated_float = 41; repeated double repeated_double = 42; repeated bool repeated_bool = 43; repeated string repeated_string = 44; repeated bytes repeated_bytes = 45; repeated NestedMessage repeated_nested_message = 48; repeated ForeignMessage repeated_foreign_message = 49; repeated protobuf_unittest.TestAllTypes repeated_proto2_message = 50; repeated NestedEnum repeated_nested_enum = 51; repeated ForeignEnum repeated_foreign_enum = 52; repeated string repeated_string_piece = 54 [ctype = STRING_PIECE]; repeated string repeated_cord = 55 [ctype = CORD]; repeated NestedMessage repeated_lazy_message = 57 [lazy = true]; oneof oneof_field { uint32 oneof_uint32 = 111; NestedMessage oneof_nested_message = 112; string oneof_string = 113; NestedEnum oneof_enum = 114; } } message TestProto2Required { protobuf_unittest.TestRequired proto2 = 1; } // Define these after TestAllTypes to make sure the compiler can handle // that. message ForeignMessage { int32 c = 1; } enum ForeignEnum { FOREIGN_FOO = 0; FOREIGN_BAR = 1; FOREIGN_BAZ = 2; } protox-0.9.0/tests/data/google/unittest_preserve_unknown_enum.proto000064400000000000000000000024211046102023000241750ustar 00000000000000// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file or at // https://developers.google.com/open-source/licenses/bsd // Modified due to lack of edition support in protox syntax = "proto3"; package proto3_preserve_unknown_enum_unittest; // Treat all fields as implicit present by default (proto3 behavior). option features.field_presence = IMPLICIT; option objc_class_prefix = "UnknownEnums"; option csharp_namespace = "Google.Protobuf.TestProtos"; enum MyEnum { FOO = 0; BAR = 1; BAZ = 2; } enum MyEnumPlusExtra { E_FOO = 0; E_BAR = 1; E_BAZ = 2; E_EXTRA = 3; } message MyMessage { MyEnum e = 1; repeated MyEnum repeated_e = 2; repeated MyEnum repeated_packed_e = 3; repeated MyEnumPlusExtra repeated_packed_unexpected_e = 4; // not packed oneof o { MyEnum oneof_e_1 = 5; MyEnum oneof_e_2 = 6; } } message MyMessagePlusExtra { MyEnumPlusExtra e = 1; repeated MyEnumPlusExtra repeated_e = 2; repeated MyEnumPlusExtra repeated_packed_e = 3; repeated MyEnumPlusExtra repeated_packed_unexpected_e = 4; oneof o { MyEnumPlusExtra oneof_e_1 = 5; MyEnumPlusExtra oneof_e_2 = 6; } } protox-0.9.0/tests/data/google/unittest_preserve_unknown_enum2.proto000064400000000000000000000016611046102023000242640ustar 00000000000000// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file or at // https://developers.google.com/open-source/licenses/bsd // Modified due to lack of edition support in protox syntax = "proto3"; package proto2_preserve_unknown_enum_unittest; // Treat all enums as closed and use expanded encoding for repeated fields by // default (proto2 behavior). option features.enum_type = CLOSED; option features.repeated_field_encoding = EXPANDED; enum MyEnum { FOO = 0; BAR = 1; BAZ = 2; } message MyMessage { MyEnum e = 1; repeated MyEnum repeated_e = 2; repeated MyEnum repeated_packed_e = 3 [features.repeated_field_encoding = PACKED]; repeated MyEnum repeated_packed_unexpected_e = 4; // not packed oneof o { MyEnum oneof_e_1 = 5; MyEnum oneof_e_2 = 6; } } protox-0.9.0/tests/data/include/no_package.proto000064400000000000000000000000421046102023000200330ustar 00000000000000syntax = "proto3"; message Bar {}protox-0.9.0/tests/data/include/package.proto000064400000000000000000000000601046102023000173370ustar 00000000000000syntax = "proto3"; package foo; message Foo {}protox-0.9.0/tests/data/message_name_field_name_conflict.proto000064400000000000000000000004341046102023000227760ustar 00000000000000// Regression test for https://github.com/andrewhickman/protox/issues/57 syntax = "proto3"; message Parent { ChildMessage ChildMessage = 1; ChildEnum ChildEnum = 2; } message ChildMessage { string field = 1; } enum ChildEnum { UNKNOWN = 0; A = 1; B = 2; }protox-0.9.0/tests/data/multiple_extends.proto000064400000000000000000000003461046102023000177150ustar 00000000000000syntax = "proto2"; message Foo { extensions 1, 2 to 3; } // Foo1 extend Foo { // Foo1.a optional int32 a = 1; } // Foo2 extend Foo { // Foo2.b optional int32 b = 2; // Foo2.c optional int32 c = 3; } protox-0.9.0/tests/data/name_resolution.proto000064400000000000000000000012211046102023000175240ustar 00000000000000syntax = "proto3"; import "include/package.proto"; import "include/no_package.proto"; package foo.bar; message FooBar {} message Names { .foo.bar.FooBar field1 = 1; foo.bar.FooBar field2 = 2; bar.FooBar field3 = 3; FooBar field4 = 4; .foo.Foo field5 = 5; foo.Foo field6 = 6; Foo field7 = 7; .Bar field8 = 8; Bar field9 = 9; message Nested { .foo.bar.FooBar field1 = 1; foo.bar.FooBar field2 = 2; bar.FooBar field3 = 3; FooBar field4 = 4; .foo.Foo field5 = 5; foo.Foo field6 = 6; Foo field7 = 7; .Bar field8 = 8; Bar field9 = 9; } } protox-0.9.0/tests/data/name_resolution_incorrect.proto000064400000000000000000000002751046102023000216040ustar 00000000000000syntax = "proto3"; import "include/package.proto"; package com.foo.bar; message Names { .foo.Foo field5 = 5; // correct foo.Foo field6 = 6; // incorrect: not found in com.foo } protox-0.9.0/tests/data/oneof_group_field.proto000064400000000000000000000002071046102023000200110ustar 00000000000000syntax = "proto2"; message Message { oneof oneof { group Group = 1 { optional int32 bar = 1; } } }protox-0.9.0/tests/data/option_group_field.proto000064400000000000000000000007131046102023000202150ustar 00000000000000syntax = "proto2"; import "google/protobuf/descriptor.proto"; message Composite { optional string str = 1001; optional group CompositeGroup = 1006 { required int32 grouped_int32 = 1; optional string grouped_str = 2; } } extend google.protobuf.MessageOptions { optional Composite compo = 30000; } message Message { option (compo) = { str: "a string" CompositeGroup: { grouped_int32: 1 grouped_str: "hello" } }; }protox-0.9.0/tests/data/option_merge_message.proto000064400000000000000000000000071046102023000205150ustar 00000000000000// TODOprotox-0.9.0/tests/data/package_name_field_name_conflict.proto000064400000000000000000000002641046102023000227460ustar 00000000000000// Regression test for https://github.com/andrewhickman/protox/issues/86 syntax = "proto3"; package sample; import "include/package.proto"; message Sample { foo.Foo foo = 2; }protox-0.9.0/tests/data/reserved_ranges.proto000064400000000000000000000004521046102023000175040ustar 00000000000000syntax = "proto2"; message Message { optional int32 field = 2; // name reserved "foo"; // reserved reserved 1, 3 to 4; // extensions extensions 5 to max; } enum Enum { ZERO = 0; // name reserved "ONE"; // reserved reserved 1, 2 to 3, 4 to max; } protox-0.9.0/tests/data/service.proto000064400000000000000000000003671046102023000157730ustar 00000000000000syntax = "proto3"; message Foo {} service Service { rpc unary(Foo) returns (Foo); rpc client_streaming(stream Foo) returns (Foo); rpc server_streaming(Foo) returns (stream Foo); rpc streaming(stream Foo) returns (stream Foo); } protox-0.9.0/tests/snapshots/compiler__double_import_branch_error.snap000064400000000000000000000007141046102023000246610ustar 00000000000000--- source: protox/tests/compiler.rs assertion_line: 91 expression: "check_err(&[(\"existing.proto\", \"\"),\n(\"branch.proto\",\n\"import 'existing.proto';\n import 'existing.proto';\n \"),\n(\"root.proto\", \"import 'branch.proto';\n \"),])" --- causes: [] filename: branch.proto labels: - label: imported here span: length: 24 offset: 33 message: "import 'existing.proto' was listed twice" related: [] severity: error protox-0.9.0/tests/snapshots/compiler__double_import_error.snap000064400000000000000000000006171046102023000233460ustar 00000000000000--- source: protox/tests/compiler.rs assertion_line: 79 expression: "check_err(&[(\"existing.proto\", \"\"),\n(\"root.proto\",\n\"import 'existing.proto';\n import 'existing.proto';\n \"),])" --- causes: [] filename: root.proto labels: - label: imported here span: length: 24 offset: 33 message: "import 'existing.proto' was listed twice" related: [] severity: error protox-0.9.0/tests/snapshots/compiler__import_error.snap000064400000000000000000000003101046102023000220020ustar 00000000000000--- source: protox/tests/compiler.rs expression: "check_err(&[(\"root.proto\", \"import 'customerror.proto';\")])" --- causes: [] labels: [] message: failed to load file! related: [] severity: error protox-0.9.0/tests/snapshots/compiler__import_not_found.snap000064400000000000000000000004521046102023000226530ustar 00000000000000--- source: protox/tests/compiler.rs expression: "check_err(&[(\"root.proto\", \"import 'notfound.proto';\")])" --- causes: [] filename: root.proto labels: - label: imported here span: length: 24 offset: 0 message: "import 'notfound.proto' not found" related: [] severity: error protox-0.9.0/tests/snapshots/compiler__type_not_found.snap000064400000000000000000000005611046102023000223230ustar 00000000000000--- source: protox/tests/compiler.rs expression: "check_err(&[(\"root.proto\",\n \"\n message Foo {\n optional NotFound foo = 1;\n }\n \")])" --- causes: [] filename: root.proto labels: - label: found here span: length: 8 offset: 44 message: "name 'NotFound' is not defined" related: [] severity: error protox-0.9.0/tests/wkt.rs000064400000000000000000000050551046102023000135070ustar 00000000000000use prost_reflect::ReflectMessage; use prost_types::FileDescriptorSet; use protox::{file::GoogleFileResolver, Compiler}; #[test] fn prost_reflect_wkt_matches_compiled_wkt() { use prost::Message; let desc = FileDescriptorSet::decode(expected_well_known_types().as_slice()).unwrap(); let prost_wkt_desc = FileDescriptorSet { file: ().descriptor().parent_pool().file_descriptor_protos().cloned().collect(), }; if desc != prost_wkt_desc { let actual = format!("{prost_wkt_desc:#?}"); let expected = format!("{desc:#?}"); let diff = similar_asserts::SimpleDiff::from_str(&actual, &expected, "actual", "expected"); // If this fails with a non-trivial diff it's reasonable to just dump `desc` via // the debug representation and afterwards adjust the output to be valid rust code // The following steps were done for the intial version: // // * replace `[` with `vec![` // * Call `.into()` on strings and enum variants // * Wrap all options field into `Options::from_prost` // // The first two steps can be easily done with a bunch of search and // replace queries for almost all instances. There are a few cases // that need to be manually adjusted afterwards // // The last step requires manually going through these fields, but // that's only ~10 instances. panic!( "The well known file descriptor returned by `make_description()` \ does not match the expected file descriptor parsed from `src/well_known_types_bytes.bin`: \ {diff}" ); } } fn expected_well_known_types() -> Vec { // protox can output a FileDescriptorSet directly, but by going through bytes, this should still work // when upgrading to a newer prost-types version. Compiler::with_file_resolver(GoogleFileResolver::new()) .include_source_info(false) .open_files([ "google/protobuf/any.proto", "google/protobuf/api.proto", "google/protobuf/descriptor.proto", "google/protobuf/duration.proto", "google/protobuf/empty.proto", "google/protobuf/field_mask.proto", "google/protobuf/source_context.proto", "google/protobuf/struct.proto", "google/protobuf/timestamp.proto", "google/protobuf/type.proto", "google/protobuf/wrappers.proto", "google/protobuf/compiler/plugin.proto", ]) .unwrap() .encode_file_descriptor_set() }