protobuf-parse-3.7.2/.cargo_vcs_info.json0000644000000001540000000000100140150ustar { "git": { "sha1": "4cb84f305c05f0376ff51b555a2740c5251c1280" }, "path_in_vcs": "protobuf-parse" }protobuf-parse-3.7.2/Cargo.lock0000644000000221370000000000100117750ustar # This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 4 [[package]] name = "anyhow" version = "1.0.97" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dcfed56ad506cb2c684a14971b8861fdc3baaaae314b9e5f9bb532cbe3ba7a4f" [[package]] name = "bitflags" version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd" [[package]] name = "cfg-if" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "either" version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" [[package]] name = "equivalent" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" [[package]] name = "errno" version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", "windows-sys", ] [[package]] name = "fastrand" version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] name = "getrandom" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43a49c392881ce6d5c3b8cb70f98717b7c07aabbdff06687b9030dbfbe2725f8" dependencies = [ "cfg-if", "libc", "wasi", "windows-targets", ] [[package]] name = "hashbrown" version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" [[package]] name = "home" version = "0.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589533453244b0995c858700322199b2becb13b627df2851f64a2775d024abcf" dependencies = [ "windows-sys", ] [[package]] name = "indexmap" version = "2.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8c9c992b02b5b4c94ea26e32fe5bccb7aa7d9f390ab5c1221ff895bc7ea8b652" dependencies = [ "equivalent", "hashbrown", ] [[package]] name = "libc" version = "0.2.170" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "875b3680cb2f8f71bdcf9a30f38d48282f5d3c95cbf9b3fa57269bb5d5c06828" [[package]] name = "linux-raw-sys" version = "0.4.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" [[package]] name = "linux-raw-sys" version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6db9c683daf087dc577b7506e9695b3d556a9f3849903fa28186283afd6809e9" [[package]] name = "log" version = "0.4.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "30bde2b3dc3671ae49d8e2e9f044c7c005836e7a023ee57cffa25ab82764bb9e" [[package]] name = "once_cell" version = "1.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "945462a4b81e43c4e3ba96bd7b49d834c6f61198356aa858733bc4acf3cbe62e" [[package]] name = "proc-macro2" version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a31971752e70b8b2686d7e46ec17fb38dad4051d94024c88df49b667caea9c84" dependencies = [ "unicode-ident", ] [[package]] name = "protobuf" version = "3.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d65a1d4ddae7d8b5de68153b48f6aa3bba8cb002b243dbdbc55a5afbc98f99f4" dependencies = [ "once_cell", "protobuf-support", "thiserror", ] [[package]] name = "protobuf-parse" version = "3.7.2" dependencies = [ "anyhow", "indexmap", "log", "protobuf", "protobuf-support", "tempfile", "thiserror", "which", ] [[package]] name = "protobuf-support" version = "3.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3e36c2f31e0a47f9280fb347ef5e461ffcd2c52dd520d8e216b52f93b0b0d7d6" dependencies = [ "thiserror", ] [[package]] name = "quote" version = "1.0.39" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c1f1914ce909e1658d9907913b4b91947430c7d9be598b15a1912935b8c04801" dependencies = [ "proc-macro2", ] [[package]] name = "rustix" version = "0.38.44" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" dependencies = [ "bitflags", "errno", "libc", "linux-raw-sys 0.4.15", "windows-sys", ] [[package]] name = "rustix" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f7178faa4b75a30e269c71e61c353ce2748cf3d76f0c44c393f4e60abf49b825" dependencies = [ "bitflags", "errno", "libc", "linux-raw-sys 0.9.2", "windows-sys", ] [[package]] name = "syn" version = "2.0.100" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b09a44accad81e1ba1cd74a32461ba89dee89095ba17b32f5d03683b1b1fc2a0" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] [[package]] name = "tempfile" version = "3.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2c317e0a526ee6120d8dabad239c8dadca62b24b6f168914bbbc8e2fb1f0e567" dependencies = [ "cfg-if", "fastrand", "getrandom", "once_cell", "rustix 1.0.2", "windows-sys", ] [[package]] name = "thiserror" version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "unicode-ident" version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" [[package]] name = "wasi" version = "0.13.3+wasi-0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26816d2e1a4a36a2940b96c5296ce403917633dff8f3440e9b236ed6f6bacad2" dependencies = [ "wit-bindgen-rt", ] [[package]] name = "which" version = "4.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" dependencies = [ "either", "home", "once_cell", "rustix 0.38.44", ] [[package]] name = "windows-sys" version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" dependencies = [ "windows-targets", ] [[package]] name = "windows-targets" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ "windows_aarch64_gnullvm", "windows_aarch64_msvc", "windows_i686_gnu", "windows_i686_gnullvm", "windows_i686_msvc", "windows_x86_64_gnu", "windows_x86_64_gnullvm", "windows_x86_64_msvc", ] [[package]] name = "windows_aarch64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] name = "windows_i686_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_x86_64_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "wit-bindgen-rt" version = "0.33.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3268f3d866458b787f390cf61f4bbb563b922d091359f9608842999eaee3943c" dependencies = [ "bitflags", ] protobuf-parse-3.7.2/Cargo.toml0000644000000035410000000000100120160ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2021" name = "protobuf-parse" version = "3.7.2" authors = ["Stepan Koltsov "] build = false autolib = false autobins = false autoexamples = false autotests = false autobenches = false description = """ Parse `.proto` files. Files are parsed into a `protobuf::descriptor::FileDescriptorSet` object using either: * pure rust parser (no dependencies) * `protoc` binary (more reliable and compatible with Google's implementation) """ homepage = "https://github.com/stepancheg/rust-protobuf/tree/master/protobuf-parse/" readme = "README.md" license = "MIT" repository = "https://github.com/stepancheg/rust-protobuf/tree/master/protobuf-parse/" [package.metadata.docs.rs] all-features = true [lib] name = "protobuf_parse" path = "src/lib.rs" doctest = false [[bin]] name = "parse-and-typecheck" path = "src/bin/parse-and-typecheck.rs" test = false [[example]] name = "file-descriptor-out-compare" path = "examples/file-descriptor-out-compare.rs" [[test]] name = "bundled_proto_consistent" path = "tests/bundled_proto_consistent.rs" [dependencies.anyhow] version = "1.0.53" [dependencies.indexmap] version = "2.0.0" [dependencies.log] version = "0.4" [dependencies.protobuf] version = "=3.7.2" [dependencies.protobuf-support] version = "=3.7.2" [dependencies.tempfile] version = "3.2.0" [dependencies.thiserror] version = "1.0.30" [dependencies.which] version = "4.0" protobuf-parse-3.7.2/Cargo.toml.orig000064400000000000000000000020251046102023000154730ustar 00000000000000[package] name = "protobuf-parse" version = "3.7.2" edition = "2021" authors = ["Stepan Koltsov "] license = "MIT" homepage = "https://github.com/stepancheg/rust-protobuf/tree/master/protobuf-parse/" repository = "https://github.com/stepancheg/rust-protobuf/tree/master/protobuf-parse/" description = """ Parse `.proto` files. Files are parsed into a `protobuf::descriptor::FileDescriptorSet` object using either: * pure rust parser (no dependencies) * `protoc` binary (more reliable and compatible with Google's implementation) """ [dependencies] tempfile = "3.2.0" log = "0.4" which = "4.0" anyhow = "1.0.53" thiserror = "1.0.30" indexmap = "2.0.0" protobuf = { path = "../protobuf", version = "=3.7.2" } protobuf-support = { path = "../protobuf-support", version = "=3.7.2" } [lib] # TODO: figure out what to do with bundled linked_hash_map doctest = false [[bin]] name = "parse-and-typecheck" path = "src/bin/parse-and-typecheck.rs" test = false [package.metadata.docs.rs] all-features = true protobuf-parse-3.7.2/LICENSE.txt000064400000000000000000000054321046102023000144340ustar 00000000000000Copyright (c) 2019 Stepan Koltsov Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ----- The following applies to src/proto/google/ Copyright 2008 Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Code generated by the Protocol Buffer compiler is owned by the owner of the input file used when generating it. This code is not standalone and requires a support library to be linked with it. This support library is itself covered by the above license. protobuf-parse-3.7.2/README.md000064400000000000000000000010751046102023000140670ustar 00000000000000 # Parse `.proto` files Parse `.proto` file definitions, **not** the protobuf text format serialization. Files can be parsed using pure Rust parser (mod `pure`) or using the `protoc` command (mod `protoc`). This crate is not meant to be used directly, but rather through the `protobuf-codegen` crate. If you think this crate might be useful to you, please [consider creating an issue](https://github.com/stepancheg/rust-protobuf/issues/new), until that this crate is considered to have **no stable API**. protobuf-parse-3.7.2/examples/file-descriptor-out-compare.rs000064400000000000000000000020701046102023000223140ustar 00000000000000use std::env; use std::fs; use protobuf::text_format; use protobuf_parse::Parser; enum Which { Protoc, Pure, } fn main() { let args = env::args().skip(1).collect::>(); let args = args.iter().map(|s| s.as_str()).collect::>(); let (path, out_protoc, out_pure) = match args.as_slice() { // Just invoke protoc. [path, out_protoc, out_pure] => (path, out_protoc, out_pure), _ => panic!("wrong args"), }; for which in [Which::Pure, Which::Protoc] { let mut parser = Parser::new(); match which { Which::Protoc => { parser.protoc(); } Which::Pure => { parser.pure(); } } parser.input(path); parser.include("."); let fds = parser.file_descriptor_set().unwrap(); let fds = text_format::print_to_string_pretty(&fds); let out = match which { Which::Protoc => out_protoc, Which::Pure => out_pure, }; fs::write(out, fds).unwrap(); } } protobuf-parse-3.7.2/src/bin/parse-and-typecheck.rs000064400000000000000000000014371046102023000203460ustar 00000000000000use std::env; use std::path::PathBuf; use std::process::exit; use protobuf_parse::Parser; fn main() { let args = env::args_os() .skip(1) .map(PathBuf::from) .collect::>(); if args.len() != 2 { eprintln!( "usage: {} ", env::args().next().unwrap() ); exit(1); } eprintln!( "{} is not a part of public interface", env::args().next().unwrap() ); assert!(args.len() >= 2); let (input, includes) = args.split_at(1); let t = Parser::new() .pure() .includes(includes) .inputs(input) .parse_and_typecheck() .expect("parse_and_typecheck"); for fd in t.file_descriptors { println!("{:#?}", fd); } } protobuf-parse-3.7.2/src/case_convert.rs000064400000000000000000000024611046102023000164200ustar 00000000000000// copy-paste from Google Protobuf // must be kept in sync with Google for JSON interop #[doc(hidden)] pub fn camel_case(input: &str) -> String { let mut capitalize_next = true; let mut result = String::new(); result.reserve(input.len()); for c in input.chars() { if c == '_' { capitalize_next = true; } else if capitalize_next { result.push(c.to_ascii_uppercase()); capitalize_next = false; } else { result.push(c); } } result } #[doc(hidden)] pub fn snake_case(input: &str) -> String { let mut result = String::new(); let mut last_lower = false; for c in input.chars() { if c.is_ascii_uppercase() && last_lower { result.push('_'); } result.push(c.to_ascii_lowercase()); last_lower = c.is_lowercase(); } result } #[cfg(test)] mod test { use super::*; #[test] fn test_camel_case() { assert_eq!("FooBarBazQuxQUUX", camel_case("foo_barBaz_QuxQUUX")); assert_eq!("FooBarBazQuxQUUX", camel_case("Foo_barBaz_QuxQUUX")); } #[test] fn test_snake_case() { assert_eq!("foo_bar_baz_qux_quux", snake_case("foo_barBaz_QuxQUUX")); assert_eq!("foo_bar_baz_qux_quux", snake_case("Foo_barBaz_QuxQUUX")); } } protobuf-parse-3.7.2/src/lib.rs000064400000000000000000000025501046102023000145120ustar 00000000000000//! # Parse `.proto` files //! //! Parse `.proto` file definitions, **not** the protobuf text format serialization. //! //! Files can be parsed using pure Rust parser (mod `pure`) //! or using the `protoc` command (mod `protoc`). //! //! This crate is not meant to be used directly, but rather through the `protobuf-codegen` crate. //! If you think this crate might be useful to you, //! please [consider creating an issue](https://github.com/stepancheg/rust-protobuf/issues/new), //! until that this crate is considered to have **no stable API**. extern crate core; mod case_convert; mod parse_and_typecheck; mod parser; mod path; mod proto; mod proto_path; mod protobuf_abs_path; mod protobuf_ident; mod protobuf_path; mod protobuf_rel_path; pub(crate) mod protoc; pub mod pure; mod rel_path; mod test_against_protobuf_protos; mod which_parser; // Public API // Non-public API used by codegen crate. pub use case_convert::*; pub use parse_and_typecheck::*; pub use parser::Parser; pub use proto_path::*; use protobuf::reflect::FileDescriptor; pub use protobuf_abs_path::*; pub use protobuf_ident::*; pub use protobuf_rel_path::*; use crate::pure::model; #[derive(Clone)] pub(crate) struct FileDescriptorPair { pub(crate) parsed: model::FileDescriptor, pub(crate) descriptor_proto: protobuf::descriptor::FileDescriptorProto, pub(crate) descriptor: FileDescriptor, } protobuf-parse-3.7.2/src/parse_and_typecheck.rs000064400000000000000000000047021046102023000177400ustar 00000000000000use crate::ProtoPathBuf; /// Result of parsing `.proto` files. #[doc(hidden)] pub struct ParsedAndTypechecked { /// One entry for each input `.proto` file. pub relative_paths: Vec, /// All parsed `.proto` files including dependencies of input files. pub file_descriptors: Vec, /// Description of the parser (e.g. to include in generated files). pub parser: String, } #[cfg(test)] mod test { use std::collections::HashSet; use std::fs; use crate::Parser; #[test] fn parse_and_typecheck() { let dir = tempfile::tempdir().unwrap(); let a_proto = dir.path().join("a.proto"); let b_proto = dir.path().join("b.proto"); fs::write(&a_proto, "syntax = 'proto3'; message Apple {}").unwrap(); fs::write( &b_proto, "syntax = 'proto3'; import 'a.proto'; message Banana { Apple a = 1; }", ) .unwrap(); let pure = Parser::new() .pure() .include(dir.path()) .input(&b_proto) .parse_and_typecheck() .unwrap(); let protoc = Parser::new() .protoc() .include(dir.path()) .input(&b_proto) .parse_and_typecheck() .unwrap(); assert_eq!(pure.relative_paths, protoc.relative_paths); assert_eq!(2, pure.file_descriptors.len()); assert_eq!(2, protoc.file_descriptors.len()); // TODO: make result more deterministic assert_eq!( HashSet::from(["a.proto", "b.proto"]), pure.file_descriptors.iter().map(|d| d.name()).collect() ); assert_eq!( HashSet::from(["a.proto", "b.proto"]), protoc.file_descriptors.iter().map(|d| d.name()).collect() ); assert_eq!(1, protoc.file_descriptors[0].message_type.len()); assert_eq!(1, pure.file_descriptors[0].message_type.len()); assert_eq!( "Banana", pure.file_descriptors .iter() .find(|d| d.name() == "b.proto") .unwrap() .message_type[0] .name() ); assert_eq!( "Banana", protoc .file_descriptors .iter() .find(|d| d.name() == "b.proto") .unwrap() .message_type[0] .name() ); } } protobuf-parse-3.7.2/src/parser.rs000064400000000000000000000074541046102023000152500ustar 00000000000000use std::collections::HashSet; use std::ffi::OsStr; use std::ffi::OsString; use std::path::Path; use std::path::PathBuf; use anyhow::Context; use protobuf::descriptor::FileDescriptorSet; use crate::protoc; use crate::pure; use crate::which_parser::WhichParser; use crate::ParsedAndTypechecked; /// Configure and invoke `.proto` parser. #[derive(Default, Debug)] pub struct Parser { which_parser: WhichParser, pub(crate) includes: Vec, pub(crate) inputs: Vec, pub(crate) protoc: Option, pub(crate) protoc_extra_args: Vec, pub(crate) capture_stderr: bool, } impl Parser { /// Create new default configured parser. pub fn new() -> Parser { Parser::default() } /// Use pure rust parser. pub fn pure(&mut self) -> &mut Self { self.which_parser = WhichParser::Pure; self } /// Use `protoc` for parsing. pub fn protoc(&mut self) -> &mut Self { self.which_parser = WhichParser::Protoc; self } /// Add an include directory. pub fn include(&mut self, include: impl AsRef) -> &mut Self { self.includes.push(include.as_ref().to_owned()); self } /// Add include directories. pub fn includes(&mut self, includes: impl IntoIterator>) -> &mut Self { for include in includes { self.include(include); } self } /// Append a `.proto` file path to compile pub fn input(&mut self, input: impl AsRef) -> &mut Self { self.inputs.push(input.as_ref().to_owned()); self } /// Append multiple `.proto` file paths to compile pub fn inputs(&mut self, inputs: impl IntoIterator>) -> &mut Self { for input in inputs { self.input(input); } self } /// Specify `protoc` path used for parsing. /// /// This is ignored if pure rust parser is used. pub fn protoc_path(&mut self, protoc: &Path) -> &mut Self { self.protoc = Some(protoc.to_owned()); self } /// Extra arguments to pass to `protoc` command (like experimental options). /// /// This is ignored if pure rust parser is used. pub fn protoc_extra_args( &mut self, args: impl IntoIterator>, ) -> &mut Self { self.protoc_extra_args = args.into_iter().map(|s| s.as_ref().to_owned()).collect(); self } /// Capture stderr and return it in error. /// /// This option applies only to `protoc` parser. /// By default `protoc` stderr is inherited from this process stderr. pub fn capture_stderr(&mut self) -> &mut Self { self.capture_stderr = true; self } /// Parse `.proto` files and typecheck them using pure Rust parser of `protoc` command. pub fn parse_and_typecheck(&self) -> anyhow::Result { match &self.which_parser { WhichParser::Pure => { pure::parse_and_typecheck::parse_and_typecheck(&self).context("using pure parser") } WhichParser::Protoc => protoc::parse_and_typecheck::parse_and_typecheck(&self) .context("using protoc parser"), } } /// Parse and convert result to `FileDescriptorSet`. pub fn file_descriptor_set(&self) -> anyhow::Result { let mut generated = self.parse_and_typecheck()?; let relative_paths: HashSet<_> = generated .relative_paths .iter() .map(|path| path.to_string()) .collect(); generated .file_descriptors .retain(|fd| relative_paths.contains(fd.name())); let mut fds = FileDescriptorSet::new(); fds.file = generated.file_descriptors; Ok(fds) } } protobuf-parse-3.7.2/src/path.rs000064400000000000000000000012251046102023000146760ustar 00000000000000use std::path::is_separator; use crate::proto_path::ProtoPath; pub(crate) fn fs_path_to_proto_path(path: &ProtoPath) -> String { path.to_str() .chars() .map(|c| if is_separator(c) { '/' } else { c }) .collect() } #[cfg(test)] mod test { use crate::path::fs_path_to_proto_path; use crate::ProtoPath; #[test] fn test_fs_path_to_proto_path() { assert_eq!( "foo.proto", fs_path_to_proto_path(ProtoPath::new("foo.proto").unwrap()) ); assert_eq!( "bar/foo.proto", fs_path_to_proto_path(ProtoPath::new("bar/foo.proto").unwrap()) ); } } protobuf-parse-3.7.2/src/proto/google/protobuf/any.proto000064400000000000000000000134341046102023000215340ustar 00000000000000// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // https://developers.google.com/protocol-buffers/ // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. syntax = "proto3"; package google.protobuf; option csharp_namespace = "Google.Protobuf.WellKnownTypes"; option go_package = "google.golang.org/protobuf/types/known/anypb"; option java_package = "com.google.protobuf"; option java_outer_classname = "AnyProto"; option java_multiple_files = true; option objc_class_prefix = "GPB"; // `Any` contains an arbitrary serialized protocol buffer message along with a // URL that describes the type of the serialized message. // // Protobuf library provides support to pack/unpack Any values in the form // of utility functions or additional generated methods of the Any type. // // Example 1: Pack and unpack a message in C++. // // Foo foo = ...; // Any any; // any.PackFrom(foo); // ... // if (any.UnpackTo(&foo)) { // ... // } // // Example 2: Pack and unpack a message in Java. // // Foo foo = ...; // Any any = Any.pack(foo); // ... // if (any.is(Foo.class)) { // foo = any.unpack(Foo.class); // } // // Example 3: Pack and unpack a message in Python. // // foo = Foo(...) // any = Any() // any.Pack(foo) // ... // if any.Is(Foo.DESCRIPTOR): // any.Unpack(foo) // ... // // Example 4: Pack and unpack a message in Go // // foo := &pb.Foo{...} // any, err := anypb.New(foo) // if err != nil { // ... // } // ... // foo := &pb.Foo{} // if err := any.UnmarshalTo(foo); err != nil { // ... // } // // The pack methods provided by protobuf library will by default use // 'type.googleapis.com/full.type.name' as the type URL and the unpack // methods only use the fully qualified type name after the last '/' // in the type URL, for example "foo.bar.com/x/y.z" will yield type // name "y.z". // // // JSON // ==== // The JSON representation of an `Any` value uses the regular // representation of the deserialized, embedded message, with an // additional field `@type` which contains the type URL. Example: // // package google.profile; // message Person { // string first_name = 1; // string last_name = 2; // } // // { // "@type": "type.googleapis.com/google.profile.Person", // "firstName": , // "lastName": // } // // If the embedded message type is well-known and has a custom JSON // representation, that representation will be embedded adding a field // `value` which holds the custom JSON in addition to the `@type` // field. Example (for message [google.protobuf.Duration][]): // // { // "@type": "type.googleapis.com/google.protobuf.Duration", // "value": "1.212s" // } // message Any { // A URL/resource name that uniquely identifies the type of the serialized // protocol buffer message. This string must contain at least // one "/" character. The last segment of the URL's path must represent // the fully qualified name of the type (as in // `path/google.protobuf.Duration`). The name should be in a canonical form // (e.g., leading "." is not accepted). // // In practice, teams usually precompile into the binary all types that they // expect it to use in the context of Any. However, for URLs which use the // scheme `http`, `https`, or no scheme, one can optionally set up a type // server that maps type URLs to message definitions as follows: // // * If no scheme is provided, `https` is assumed. // * An HTTP GET on the URL must yield a [google.protobuf.Type][] // value in binary format, or produce an error. // * Applications are allowed to cache lookup results based on the // URL, or have them precompiled into a binary to avoid any // lookup. Therefore, binary compatibility needs to be preserved // on changes to types. (Use versioned type names to manage // breaking changes.) // // Note: this functionality is not currently available in the official // protobuf release, and it is not used for type URLs beginning with // type.googleapis.com. // // Schemes other than `http`, `https` (or the empty scheme) might be // used with implementation specific semantics. // string type_url = 1; // Must be a valid serialized protocol buffer of the above specified type. bytes value = 2; } protobuf-parse-3.7.2/src/proto/google/protobuf/api.proto000064400000000000000000000170661046102023000215230ustar 00000000000000// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // https://developers.google.com/protocol-buffers/ // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. syntax = "proto3"; package google.protobuf; import "google/protobuf/source_context.proto"; import "google/protobuf/type.proto"; option csharp_namespace = "Google.Protobuf.WellKnownTypes"; option java_package = "com.google.protobuf"; option java_outer_classname = "ApiProto"; option java_multiple_files = true; option objc_class_prefix = "GPB"; option go_package = "google.golang.org/protobuf/types/known/apipb"; // Api is a light-weight descriptor for an API Interface. // // Interfaces are also described as "protocol buffer services" in some contexts, // such as by the "service" keyword in a .proto file, but they are different // from API Services, which represent a concrete implementation of an interface // as opposed to simply a description of methods and bindings. They are also // sometimes simply referred to as "APIs" in other contexts, such as the name of // this message itself. See https://cloud.google.com/apis/design/glossary for // detailed terminology. message Api { // The fully qualified name of this interface, including package name // followed by the interface's simple name. string name = 1; // The methods of this interface, in unspecified order. repeated Method methods = 2; // Any metadata attached to the interface. repeated Option options = 3; // A version string for this interface. If specified, must have the form // `major-version.minor-version`, as in `1.10`. If the minor version is // omitted, it defaults to zero. If the entire version field is empty, the // major version is derived from the package name, as outlined below. If the // field is not empty, the version in the package name will be verified to be // consistent with what is provided here. // // The versioning schema uses [semantic // versioning](http://semver.org) where the major version number // indicates a breaking change and the minor version an additive, // non-breaking change. Both version numbers are signals to users // what to expect from different versions, and should be carefully // chosen based on the product plan. // // The major version is also reflected in the package name of the // interface, which must end in `v`, as in // `google.feature.v1`. For major versions 0 and 1, the suffix can // be omitted. Zero major versions must only be used for // experimental, non-GA interfaces. // // string version = 4; // Source context for the protocol buffer service represented by this // message. SourceContext source_context = 5; // Included interfaces. See [Mixin][]. repeated Mixin mixins = 6; // The source syntax of the service. Syntax syntax = 7; } // Method represents a method of an API interface. message Method { // The simple name of this method. string name = 1; // A URL of the input message type. string request_type_url = 2; // If true, the request is streamed. bool request_streaming = 3; // The URL of the output message type. string response_type_url = 4; // If true, the response is streamed. bool response_streaming = 5; // Any metadata attached to the method. repeated Option options = 6; // The source syntax of this method. Syntax syntax = 7; } // Declares an API Interface to be included in this interface. The including // interface must redeclare all the methods from the included interface, but // documentation and options are inherited as follows: // // - If after comment and whitespace stripping, the documentation // string of the redeclared method is empty, it will be inherited // from the original method. // // - Each annotation belonging to the service config (http, // visibility) which is not set in the redeclared method will be // inherited. // // - If an http annotation is inherited, the path pattern will be // modified as follows. Any version prefix will be replaced by the // version of the including interface plus the [root][] path if // specified. // // Example of a simple mixin: // // package google.acl.v1; // service AccessControl { // // Get the underlying ACL object. // rpc GetAcl(GetAclRequest) returns (Acl) { // option (google.api.http).get = "/v1/{resource=**}:getAcl"; // } // } // // package google.storage.v2; // service Storage { // rpc GetAcl(GetAclRequest) returns (Acl); // // // Get a data record. // rpc GetData(GetDataRequest) returns (Data) { // option (google.api.http).get = "/v2/{resource=**}"; // } // } // // Example of a mixin configuration: // // apis: // - name: google.storage.v2.Storage // mixins: // - name: google.acl.v1.AccessControl // // The mixin construct implies that all methods in `AccessControl` are // also declared with same name and request/response types in // `Storage`. A documentation generator or annotation processor will // see the effective `Storage.GetAcl` method after inheriting // documentation and annotations as follows: // // service Storage { // // Get the underlying ACL object. // rpc GetAcl(GetAclRequest) returns (Acl) { // option (google.api.http).get = "/v2/{resource=**}:getAcl"; // } // ... // } // // Note how the version in the path pattern changed from `v1` to `v2`. // // If the `root` field in the mixin is specified, it should be a // relative path under which inherited HTTP paths are placed. Example: // // apis: // - name: google.storage.v2.Storage // mixins: // - name: google.acl.v1.AccessControl // root: acls // // This implies the following inherited HTTP annotation: // // service Storage { // // Get the underlying ACL object. // rpc GetAcl(GetAclRequest) returns (Acl) { // option (google.api.http).get = "/v2/acls/{resource=**}:getAcl"; // } // ... // } message Mixin { // The fully qualified name of the interface which is included. string name = 1; // If non-empty specifies a path under which inherited HTTP paths // are rooted. string root = 2; } protobuf-parse-3.7.2/src/proto/google/protobuf/compiler/plugin.proto000064400000000000000000000210621046102023000240510ustar 00000000000000// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // https://developers.google.com/protocol-buffers/ // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // Author: kenton@google.com (Kenton Varda) // // WARNING: The plugin interface is currently EXPERIMENTAL and is subject to // change. // // protoc (aka the Protocol Compiler) can be extended via plugins. A plugin is // just a program that reads a CodeGeneratorRequest from stdin and writes a // CodeGeneratorResponse to stdout. // // Plugins written using C++ can use google/protobuf/compiler/plugin.h instead // of dealing with the raw protocol defined here. // // A plugin executable needs only to be placed somewhere in the path. The // plugin should be named "protoc-gen-$NAME", and will then be used when the // flag "--${NAME}_out" is passed to protoc. syntax = "proto2"; package google.protobuf.compiler; option java_package = "com.google.protobuf.compiler"; option java_outer_classname = "PluginProtos"; option go_package = "google.golang.org/protobuf/types/pluginpb"; import "google/protobuf/descriptor.proto"; // The version number of protocol compiler. message Version { optional int32 major = 1; optional int32 minor = 2; optional int32 patch = 3; // A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should // be empty for mainline stable releases. optional string suffix = 4; } // An encoded CodeGeneratorRequest is written to the plugin's stdin. message CodeGeneratorRequest { // The .proto files that were explicitly listed on the command-line. The // code generator should generate code only for these files. Each file's // descriptor will be included in proto_file, below. repeated string file_to_generate = 1; // The generator parameter passed on the command-line. optional string parameter = 2; // FileDescriptorProtos for all files in files_to_generate and everything // they import. The files will appear in topological order, so each file // appears before any file that imports it. // // protoc guarantees that all proto_files will be written after // the fields above, even though this is not technically guaranteed by the // protobuf wire format. This theoretically could allow a plugin to stream // in the FileDescriptorProtos and handle them one by one rather than read // the entire set into memory at once. However, as of this writing, this // is not similarly optimized on protoc's end -- it will store all fields in // memory at once before sending them to the plugin. // // Type names of fields and extensions in the FileDescriptorProto are always // fully qualified. repeated FileDescriptorProto proto_file = 15; // The version number of protocol compiler. optional Version compiler_version = 3; } // The plugin writes an encoded CodeGeneratorResponse to stdout. message CodeGeneratorResponse { // Error message. If non-empty, code generation failed. The plugin process // should exit with status code zero even if it reports an error in this way. // // This should be used to indicate errors in .proto files which prevent the // code generator from generating correct code. Errors which indicate a // problem in protoc itself -- such as the input CodeGeneratorRequest being // unparseable -- should be reported by writing a message to stderr and // exiting with a non-zero status code. optional string error = 1; // A bitmask of supported features that the code generator supports. // This is a bitwise "or" of values from the Feature enum. optional uint64 supported_features = 2; // Sync with code_generator.h. enum Feature { FEATURE_NONE = 0; FEATURE_PROTO3_OPTIONAL = 1; } // Represents a single generated file. message File { // The file name, relative to the output directory. The name must not // contain "." or ".." components and must be relative, not be absolute (so, // the file cannot lie outside the output directory). "/" must be used as // the path separator, not "\". // // If the name is omitted, the content will be appended to the previous // file. This allows the generator to break large files into small chunks, // and allows the generated text to be streamed back to protoc so that large // files need not reside completely in memory at one time. Note that as of // this writing protoc does not optimize for this -- it will read the entire // CodeGeneratorResponse before writing files to disk. optional string name = 1; // If non-empty, indicates that the named file should already exist, and the // content here is to be inserted into that file at a defined insertion // point. This feature allows a code generator to extend the output // produced by another code generator. The original generator may provide // insertion points by placing special annotations in the file that look // like: // @@protoc_insertion_point(NAME) // The annotation can have arbitrary text before and after it on the line, // which allows it to be placed in a comment. NAME should be replaced with // an identifier naming the point -- this is what other generators will use // as the insertion_point. Code inserted at this point will be placed // immediately above the line containing the insertion point (thus multiple // insertions to the same point will come out in the order they were added). // The double-@ is intended to make it unlikely that the generated code // could contain things that look like insertion points by accident. // // For example, the C++ code generator places the following line in the // .pb.h files that it generates: // // @@protoc_insertion_point(namespace_scope) // This line appears within the scope of the file's package namespace, but // outside of any particular class. Another plugin can then specify the // insertion_point "namespace_scope" to generate additional classes or // other declarations that should be placed in this scope. // // Note that if the line containing the insertion point begins with // whitespace, the same whitespace will be added to every line of the // inserted text. This is useful for languages like Python, where // indentation matters. In these languages, the insertion point comment // should be indented the same amount as any inserted code will need to be // in order to work correctly in that context. // // The code generator that generates the initial file and the one which // inserts into it must both run as part of a single invocation of protoc. // Code generators are executed in the order in which they appear on the // command line. // // If |insertion_point| is present, |name| must also be present. optional string insertion_point = 2; // The file contents. optional string content = 15; // Information describing the file content being inserted. If an insertion // point is used, this information will be appropriately offset and inserted // into the code generation metadata for the generated files. optional GeneratedCodeInfo generated_code_info = 16; } repeated File file = 15; } protobuf-parse-3.7.2/src/proto/google/protobuf/descriptor.proto000064400000000000000000001122331046102023000231200ustar 00000000000000// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // https://developers.google.com/protocol-buffers/ // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // Author: kenton@google.com (Kenton Varda) // Based on original Protocol Buffers design by // Sanjay Ghemawat, Jeff Dean, and others. // // The messages in this file describe the definitions found in .proto files. // A valid .proto file can be translated directly to a FileDescriptorProto // without any other information (e.g. without reading its imports). syntax = "proto2"; package google.protobuf; option go_package = "google.golang.org/protobuf/types/descriptorpb"; option java_package = "com.google.protobuf"; option java_outer_classname = "DescriptorProtos"; option csharp_namespace = "Google.Protobuf.Reflection"; option objc_class_prefix = "GPB"; option cc_enable_arenas = true; // descriptor.proto must be optimized for speed because reflection-based // algorithms don't work during bootstrapping. option optimize_for = SPEED; // The protocol compiler can output a FileDescriptorSet containing the .proto // files it parses. message FileDescriptorSet { repeated FileDescriptorProto file = 1; } // Describes a complete .proto file. message FileDescriptorProto { optional string name = 1; // file name, relative to root of source tree optional string package = 2; // e.g. "foo", "foo.bar", etc. // Names of files imported by this file. repeated string dependency = 3; // Indexes of the public imported files in the dependency list above. repeated int32 public_dependency = 10; // Indexes of the weak imported files in the dependency list. // For Google-internal migration only. Do not use. repeated int32 weak_dependency = 11; // All top-level definitions in this file. repeated DescriptorProto message_type = 4; repeated EnumDescriptorProto enum_type = 5; repeated ServiceDescriptorProto service = 6; repeated FieldDescriptorProto extension = 7; optional FileOptions options = 8; // This field contains optional information about the original source code. // You may safely remove this entire field without harming runtime // functionality of the descriptors -- the information is needed only by // development tools. optional SourceCodeInfo source_code_info = 9; // The syntax of the proto file. // The supported values are "proto2" and "proto3". optional string syntax = 12; } // Describes a message type. message DescriptorProto { optional string name = 1; repeated FieldDescriptorProto field = 2; repeated FieldDescriptorProto extension = 6; repeated DescriptorProto nested_type = 3; repeated EnumDescriptorProto enum_type = 4; message ExtensionRange { optional int32 start = 1; // Inclusive. optional int32 end = 2; // Exclusive. optional ExtensionRangeOptions options = 3; } repeated ExtensionRange extension_range = 5; repeated OneofDescriptorProto oneof_decl = 8; optional MessageOptions options = 7; // Range of reserved tag numbers. Reserved tag numbers may not be used by // fields or extension ranges in the same message. Reserved ranges may // not overlap. message ReservedRange { optional int32 start = 1; // Inclusive. optional int32 end = 2; // Exclusive. } repeated ReservedRange reserved_range = 9; // Reserved field names, which may not be used by fields in the same message. // A given name may only be reserved once. repeated string reserved_name = 10; } message ExtensionRangeOptions { // The parser stores options it doesn't recognize here. See above. repeated UninterpretedOption uninterpreted_option = 999; // Clients can define custom options in extensions of this message. See above. extensions 1000 to max; } // Describes a field within a message. message FieldDescriptorProto { enum Type { // 0 is reserved for errors. // Order is weird for historical reasons. TYPE_DOUBLE = 1; TYPE_FLOAT = 2; // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if // negative values are likely. TYPE_INT64 = 3; TYPE_UINT64 = 4; // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if // negative values are likely. TYPE_INT32 = 5; TYPE_FIXED64 = 6; TYPE_FIXED32 = 7; TYPE_BOOL = 8; TYPE_STRING = 9; // Tag-delimited aggregate. // Group type is deprecated and not supported in proto3. However, Proto3 // implementations should still be able to parse the group wire format and // treat group fields as unknown fields. TYPE_GROUP = 10; TYPE_MESSAGE = 11; // Length-delimited aggregate. // New in version 2. TYPE_BYTES = 12; TYPE_UINT32 = 13; TYPE_ENUM = 14; TYPE_SFIXED32 = 15; TYPE_SFIXED64 = 16; TYPE_SINT32 = 17; // Uses ZigZag encoding. TYPE_SINT64 = 18; // Uses ZigZag encoding. } enum Label { // 0 is reserved for errors LABEL_OPTIONAL = 1; LABEL_REQUIRED = 2; LABEL_REPEATED = 3; } optional string name = 1; optional int32 number = 3; optional Label label = 4; // If type_name is set, this need not be set. If both this and type_name // are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. optional Type type = 5; // For message and enum types, this is the name of the type. If the name // starts with a '.', it is fully-qualified. Otherwise, C++-like scoping // rules are used to find the type (i.e. first the nested types within this // message are searched, then within the parent, on up to the root // namespace). optional string type_name = 6; // For extensions, this is the name of the type being extended. It is // resolved in the same manner as type_name. optional string extendee = 2; // For numeric types, contains the original text representation of the value. // For booleans, "true" or "false". // For strings, contains the default text contents (not escaped in any way). // For bytes, contains the C escaped value. All bytes >= 128 are escaped. // TODO(kenton): Base-64 encode? optional string default_value = 7; // If set, gives the index of a oneof in the containing type's oneof_decl // list. This field is a member of that oneof. optional int32 oneof_index = 9; // JSON name of this field. The value is set by protocol compiler. If the // user has set a "json_name" option on this field, that option's value // will be used. Otherwise, it's deduced from the field's name by converting // it to camelCase. optional string json_name = 10; optional FieldOptions options = 8; // If true, this is a proto3 "optional". When a proto3 field is optional, it // tracks presence regardless of field type. // // When proto3_optional is true, this field must be belong to a oneof to // signal to old proto3 clients that presence is tracked for this field. This // oneof is known as a "synthetic" oneof, and this field must be its sole // member (each proto3 optional field gets its own synthetic oneof). Synthetic // oneofs exist in the descriptor only, and do not generate any API. Synthetic // oneofs must be ordered after all "real" oneofs. // // For message fields, proto3_optional doesn't create any semantic change, // since non-repeated message fields always track presence. However it still // indicates the semantic detail of whether the user wrote "optional" or not. // This can be useful for round-tripping the .proto file. For consistency we // give message fields a synthetic oneof also, even though it is not required // to track presence. This is especially important because the parser can't // tell if a field is a message or an enum, so it must always create a // synthetic oneof. // // Proto2 optional fields do not set this flag, because they already indicate // optional with `LABEL_OPTIONAL`. optional bool proto3_optional = 17; } // Describes a oneof. message OneofDescriptorProto { optional string name = 1; optional OneofOptions options = 2; } // Describes an enum type. message EnumDescriptorProto { optional string name = 1; repeated EnumValueDescriptorProto value = 2; optional EnumOptions options = 3; // Range of reserved numeric values. Reserved values may not be used by // entries in the same enum. Reserved ranges may not overlap. // // Note that this is distinct from DescriptorProto.ReservedRange in that it // is inclusive such that it can appropriately represent the entire int32 // domain. message EnumReservedRange { optional int32 start = 1; // Inclusive. optional int32 end = 2; // Inclusive. } // Range of reserved numeric values. Reserved numeric values may not be used // by enum values in the same enum declaration. Reserved ranges may not // overlap. repeated EnumReservedRange reserved_range = 4; // Reserved enum value names, which may not be reused. A given name may only // be reserved once. repeated string reserved_name = 5; } // Describes a value within an enum. message EnumValueDescriptorProto { optional string name = 1; optional int32 number = 2; optional EnumValueOptions options = 3; } // Describes a service. message ServiceDescriptorProto { optional string name = 1; repeated MethodDescriptorProto method = 2; optional ServiceOptions options = 3; } // Describes a method of a service. message MethodDescriptorProto { optional string name = 1; // Input and output type names. These are resolved in the same way as // FieldDescriptorProto.type_name, but must refer to a message type. optional string input_type = 2; optional string output_type = 3; optional MethodOptions options = 4; // Identifies if client streams multiple client messages optional bool client_streaming = 5 [default = false]; // Identifies if server streams multiple server messages optional bool server_streaming = 6 [default = false]; } // =================================================================== // Options // Each of the definitions above may have "options" attached. These are // just annotations which may cause code to be generated slightly differently // or may contain hints for code that manipulates protocol messages. // // Clients may define custom options as extensions of the *Options messages. // These extensions may not yet be known at parsing time, so the parser cannot // store the values in them. Instead it stores them in a field in the *Options // message called uninterpreted_option. This field must have the same name // across all *Options messages. We then use this field to populate the // extensions when we build a descriptor, at which point all protos have been // parsed and so all extensions are known. // // Extension numbers for custom options may be chosen as follows: // * For options which will only be used within a single application or // organization, or for experimental options, use field numbers 50000 // through 99999. It is up to you to ensure that you do not use the // same number for multiple options. // * For options which will be published and used publicly by multiple // independent entities, e-mail protobuf-global-extension-registry@google.com // to reserve extension numbers. Simply provide your project name (e.g. // Objective-C plugin) and your project website (if available) -- there's no // need to explain how you intend to use them. Usually you only need one // extension number. You can declare multiple options with only one extension // number by putting them in a sub-message. See the Custom Options section of // the docs for examples: // https://developers.google.com/protocol-buffers/docs/proto#options // If this turns out to be popular, a web service will be set up // to automatically assign option numbers. message FileOptions { // Sets the Java package where classes generated from this .proto will be // placed. By default, the proto package is used, but this is often // inappropriate because proto packages do not normally start with backwards // domain names. optional string java_package = 1; // Controls the name of the wrapper Java class generated for the .proto file. // That class will always contain the .proto file's getDescriptor() method as // well as any top-level extensions defined in the .proto file. // If java_multiple_files is disabled, then all the other classes from the // .proto file will be nested inside the single wrapper outer class. optional string java_outer_classname = 8; // If enabled, then the Java code generator will generate a separate .java // file for each top-level message, enum, and service defined in the .proto // file. Thus, these types will *not* be nested inside the wrapper class // named by java_outer_classname. However, the wrapper class will still be // generated to contain the file's getDescriptor() method as well as any // top-level extensions defined in the file. optional bool java_multiple_files = 10 [default = false]; // This option does nothing. optional bool java_generate_equals_and_hash = 20 [deprecated=true]; // If set true, then the Java2 code generator will generate code that // throws an exception whenever an attempt is made to assign a non-UTF-8 // byte sequence to a string field. // Message reflection will do the same. // However, an extension field still accepts non-UTF-8 byte sequences. // This option has no effect on when used with the lite runtime. optional bool java_string_check_utf8 = 27 [default = false]; // Generated classes can be optimized for speed or code size. enum OptimizeMode { SPEED = 1; // Generate complete code for parsing, serialization, // etc. CODE_SIZE = 2; // Use ReflectionOps to implement these methods. LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime. } optional OptimizeMode optimize_for = 9 [default = SPEED]; // Sets the Go package where structs generated from this .proto will be // placed. If omitted, the Go package will be derived from the following: // - The basename of the package import path, if provided. // - Otherwise, the package statement in the .proto file, if present. // - Otherwise, the basename of the .proto file, without extension. optional string go_package = 11; // Should generic services be generated in each language? "Generic" services // are not specific to any particular RPC system. They are generated by the // main code generators in each language (without additional plugins). // Generic services were the only kind of service generation supported by // early versions of google.protobuf. // // Generic services are now considered deprecated in favor of using plugins // that generate code specific to your particular RPC system. Therefore, // these default to false. Old code which depends on generic services should // explicitly set them to true. optional bool cc_generic_services = 16 [default = false]; optional bool java_generic_services = 17 [default = false]; optional bool py_generic_services = 18 [default = false]; optional bool php_generic_services = 42 [default = false]; // Is this file deprecated? // Depending on the target platform, this can emit Deprecated annotations // for everything in the file, or it will be completely ignored; in the very // least, this is a formalization for deprecating files. optional bool deprecated = 23 [default = false]; // Enables the use of arenas for the proto messages in this file. This applies // only to generated classes for C++. optional bool cc_enable_arenas = 31 [default = true]; // Sets the objective c class prefix which is prepended to all objective c // generated classes from this .proto. There is no default. optional string objc_class_prefix = 36; // Namespace for generated classes; defaults to the package. optional string csharp_namespace = 37; // By default Swift generators will take the proto package and CamelCase it // replacing '.' with underscore and use that to prefix the types/symbols // defined. When this options is provided, they will use this value instead // to prefix the types/symbols defined. optional string swift_prefix = 39; // Sets the php class prefix which is prepended to all php generated classes // from this .proto. Default is empty. optional string php_class_prefix = 40; // Use this option to change the namespace of php generated classes. Default // is empty. When this option is empty, the package name will be used for // determining the namespace. optional string php_namespace = 41; // Use this option to change the namespace of php generated metadata classes. // Default is empty. When this option is empty, the proto file name will be // used for determining the namespace. optional string php_metadata_namespace = 44; // Use this option to change the package of ruby generated classes. Default // is empty. When this option is not set, the package name will be used for // determining the ruby package. optional string ruby_package = 45; // The parser stores options it doesn't recognize here. // See the documentation for the "Options" section above. repeated UninterpretedOption uninterpreted_option = 999; // Clients can define custom options in extensions of this message. // See the documentation for the "Options" section above. extensions 1000 to max; reserved 38; } message MessageOptions { // Set true to use the old proto1 MessageSet wire format for extensions. // This is provided for backwards-compatibility with the MessageSet wire // format. You should not use this for any other reason: It's less // efficient, has fewer features, and is more complicated. // // The message must be defined exactly as follows: // message Foo { // option message_set_wire_format = true; // extensions 4 to max; // } // Note that the message cannot have any defined fields; MessageSets only // have extensions. // // All extensions of your type must be singular messages; e.g. they cannot // be int32s, enums, or repeated messages. // // Because this is an option, the above two restrictions are not enforced by // the protocol compiler. optional bool message_set_wire_format = 1 [default = false]; // Disables the generation of the standard "descriptor()" accessor, which can // conflict with a field of the same name. This is meant to make migration // from proto1 easier; new code should avoid fields named "descriptor". optional bool no_standard_descriptor_accessor = 2 [default = false]; // Is this message deprecated? // Depending on the target platform, this can emit Deprecated annotations // for the message, or it will be completely ignored; in the very least, // this is a formalization for deprecating messages. optional bool deprecated = 3 [default = false]; reserved 4, 5, 6; // Whether the message is an automatically generated map entry type for the // maps field. // // For maps fields: // map map_field = 1; // The parsed descriptor looks like: // message MapFieldEntry { // option map_entry = true; // optional KeyType key = 1; // optional ValueType value = 2; // } // repeated MapFieldEntry map_field = 1; // // Implementations may choose not to generate the map_entry=true message, but // use a native map in the target language to hold the keys and values. // The reflection APIs in such implementations still need to work as // if the field is a repeated message field. // // NOTE: Do not set the option in .proto files. Always use the maps syntax // instead. The option should only be implicitly set by the proto compiler // parser. optional bool map_entry = 7; reserved 8; // javalite_serializable reserved 9; // javanano_as_lite // The parser stores options it doesn't recognize here. See above. repeated UninterpretedOption uninterpreted_option = 999; // Clients can define custom options in extensions of this message. See above. extensions 1000 to max; } message FieldOptions { // The ctype option instructs the C++ code generator to use a different // representation of the field than it normally would. See the specific // options below. This option is not yet implemented in the open source // release -- sorry, we'll try to include it in a future version! optional CType ctype = 1 [default = STRING]; enum CType { // Default mode. STRING = 0; CORD = 1; STRING_PIECE = 2; } // The packed option can be enabled for repeated primitive fields to enable // a more efficient representation on the wire. Rather than repeatedly // writing the tag and type for each element, the entire array is encoded as // a single length-delimited blob. In proto3, only explicit setting it to // false will avoid using packed encoding. optional bool packed = 2; // The jstype option determines the JavaScript type used for values of the // field. The option is permitted only for 64 bit integral and fixed types // (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING // is represented as JavaScript string, which avoids loss of precision that // can happen when a large value is converted to a floating point JavaScript. // Specifying JS_NUMBER for the jstype causes the generated JavaScript code to // use the JavaScript "number" type. The behavior of the default option // JS_NORMAL is implementation dependent. // // This option is an enum to permit additional types to be added, e.g. // goog.math.Integer. optional JSType jstype = 6 [default = JS_NORMAL]; enum JSType { // Use the default type. JS_NORMAL = 0; // Use JavaScript strings. JS_STRING = 1; // Use JavaScript numbers. JS_NUMBER = 2; } // Should this field be parsed lazily? Lazy applies only to message-type // fields. It means that when the outer message is initially parsed, the // inner message's contents will not be parsed but instead stored in encoded // form. The inner message will actually be parsed when it is first accessed. // // This is only a hint. Implementations are free to choose whether to use // eager or lazy parsing regardless of the value of this option. However, // setting this option true suggests that the protocol author believes that // using lazy parsing on this field is worth the additional bookkeeping // overhead typically needed to implement it. // // This option does not affect the public interface of any generated code; // all method signatures remain the same. Furthermore, thread-safety of the // interface is not affected by this option; const methods remain safe to // call from multiple threads concurrently, while non-const methods continue // to require exclusive access. // // // Note that implementations may choose not to check required fields within // a lazy sub-message. That is, calling IsInitialized() on the outer message // may return true even if the inner message has missing required fields. // This is necessary because otherwise the inner message would have to be // parsed in order to perform the check, defeating the purpose of lazy // parsing. An implementation which chooses not to check required fields // must be consistent about it. That is, for any particular sub-message, the // implementation must either *always* check its required fields, or *never* // check its required fields, regardless of whether or not the message has // been parsed. optional bool lazy = 5 [default = false]; // Is this field deprecated? // Depending on the target platform, this can emit Deprecated annotations // for accessors, or it will be completely ignored; in the very least, this // is a formalization for deprecating fields. optional bool deprecated = 3 [default = false]; // For Google-internal migration only. Do not use. optional bool weak = 10 [default = false]; // The parser stores options it doesn't recognize here. See above. repeated UninterpretedOption uninterpreted_option = 999; // Clients can define custom options in extensions of this message. See above. extensions 1000 to max; reserved 4; // removed jtype } message OneofOptions { // The parser stores options it doesn't recognize here. See above. repeated UninterpretedOption uninterpreted_option = 999; // Clients can define custom options in extensions of this message. See above. extensions 1000 to max; } message EnumOptions { // Set this option to true to allow mapping different tag names to the same // value. optional bool allow_alias = 2; // Is this enum deprecated? // Depending on the target platform, this can emit Deprecated annotations // for the enum, or it will be completely ignored; in the very least, this // is a formalization for deprecating enums. optional bool deprecated = 3 [default = false]; reserved 5; // javanano_as_lite // The parser stores options it doesn't recognize here. See above. repeated UninterpretedOption uninterpreted_option = 999; // Clients can define custom options in extensions of this message. See above. extensions 1000 to max; } message EnumValueOptions { // Is this enum value deprecated? // Depending on the target platform, this can emit Deprecated annotations // for the enum value, or it will be completely ignored; in the very least, // this is a formalization for deprecating enum values. optional bool deprecated = 1 [default = false]; // The parser stores options it doesn't recognize here. See above. repeated UninterpretedOption uninterpreted_option = 999; // Clients can define custom options in extensions of this message. See above. extensions 1000 to max; } message ServiceOptions { // Note: Field numbers 1 through 32 are reserved for Google's internal RPC // framework. We apologize for hoarding these numbers to ourselves, but // we were already using them long before we decided to release Protocol // Buffers. // Is this service deprecated? // Depending on the target platform, this can emit Deprecated annotations // for the service, or it will be completely ignored; in the very least, // this is a formalization for deprecating services. optional bool deprecated = 33 [default = false]; // The parser stores options it doesn't recognize here. See above. repeated UninterpretedOption uninterpreted_option = 999; // Clients can define custom options in extensions of this message. See above. extensions 1000 to max; } message MethodOptions { // Note: Field numbers 1 through 32 are reserved for Google's internal RPC // framework. We apologize for hoarding these numbers to ourselves, but // we were already using them long before we decided to release Protocol // Buffers. // Is this method deprecated? // Depending on the target platform, this can emit Deprecated annotations // for the method, or it will be completely ignored; in the very least, // this is a formalization for deprecating methods. optional bool deprecated = 33 [default = false]; // Is this method side-effect-free (or safe in HTTP parlance), or idempotent, // or neither? HTTP based RPC implementation may choose GET verb for safe // methods, and PUT verb for idempotent methods instead of the default POST. enum IdempotencyLevel { IDEMPOTENCY_UNKNOWN = 0; NO_SIDE_EFFECTS = 1; // implies idempotent IDEMPOTENT = 2; // idempotent, but may have side effects } optional IdempotencyLevel idempotency_level = 34 [default = IDEMPOTENCY_UNKNOWN]; // The parser stores options it doesn't recognize here. See above. repeated UninterpretedOption uninterpreted_option = 999; // Clients can define custom options in extensions of this message. See above. extensions 1000 to max; } // A message representing a option the parser does not recognize. This only // appears in options protos created by the compiler::Parser class. // DescriptorPool resolves these when building Descriptor objects. Therefore, // options protos in descriptor objects (e.g. returned by Descriptor::options(), // or produced by Descriptor::CopyTo()) will never have UninterpretedOptions // in them. message UninterpretedOption { // The name of the uninterpreted option. Each string represents a segment in // a dot-separated name. is_extension is true iff a segment represents an // extension (denoted with parentheses in options specs in .proto files). // E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents // "foo.(bar.baz).qux". message NamePart { required string name_part = 1; required bool is_extension = 2; } repeated NamePart name = 2; // The value of the uninterpreted option, in whatever type the tokenizer // identified it as during parsing. Exactly one of these should be set. optional string identifier_value = 3; optional uint64 positive_int_value = 4; optional int64 negative_int_value = 5; optional double double_value = 6; optional bytes string_value = 7; optional string aggregate_value = 8; } // =================================================================== // Optional source code info // Encapsulates information about the original source file from which a // FileDescriptorProto was generated. message SourceCodeInfo { // A Location identifies a piece of source code in a .proto file which // corresponds to a particular definition. This information is intended // to be useful to IDEs, code indexers, documentation generators, and similar // tools. // // For example, say we have a file like: // message Foo { // optional string foo = 1; // } // Let's look at just the field definition: // optional string foo = 1; // ^ ^^ ^^ ^ ^^^ // a bc de f ghi // We have the following locations: // span path represents // [a,i) [ 4, 0, 2, 0 ] The whole field definition. // [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). // [c,d) [ 4, 0, 2, 0, 5 ] The type (string). // [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). // [g,h) [ 4, 0, 2, 0, 3 ] The number (1). // // Notes: // - A location may refer to a repeated field itself (i.e. not to any // particular index within it). This is used whenever a set of elements are // logically enclosed in a single code segment. For example, an entire // extend block (possibly containing multiple extension definitions) will // have an outer location whose path refers to the "extensions" repeated // field without an index. // - Multiple locations may have the same path. This happens when a single // logical declaration is spread out across multiple places. The most // obvious example is the "extend" block again -- there may be multiple // extend blocks in the same scope, each of which will have the same path. // - A location's span is not always a subset of its parent's span. For // example, the "extendee" of an extension declaration appears at the // beginning of the "extend" block and is shared by all extensions within // the block. // - Just because a location's span is a subset of some other location's span // does not mean that it is a descendant. For example, a "group" defines // both a type and a field in a single declaration. Thus, the locations // corresponding to the type and field and their components will overlap. // - Code which tries to interpret locations should probably be designed to // ignore those that it doesn't understand, as more types of locations could // be recorded in the future. repeated Location location = 1; message Location { // Identifies which part of the FileDescriptorProto was defined at this // location. // // Each element is a field number or an index. They form a path from // the root FileDescriptorProto to the place where the definition. For // example, this path: // [ 4, 3, 2, 7, 1 ] // refers to: // file.message_type(3) // 4, 3 // .field(7) // 2, 7 // .name() // 1 // This is because FileDescriptorProto.message_type has field number 4: // repeated DescriptorProto message_type = 4; // and DescriptorProto.field has field number 2: // repeated FieldDescriptorProto field = 2; // and FieldDescriptorProto.name has field number 1: // optional string name = 1; // // Thus, the above path gives the location of a field name. If we removed // the last element: // [ 4, 3, 2, 7 ] // this path refers to the whole field declaration (from the beginning // of the label to the terminating semicolon). repeated int32 path = 1 [packed = true]; // Always has exactly three or four elements: start line, start column, // end line (optional, otherwise assumed same as start line), end column. // These are packed into a single field for efficiency. Note that line // and column numbers are zero-based -- typically you will want to add // 1 to each before displaying to a user. repeated int32 span = 2 [packed = true]; // If this SourceCodeInfo represents a complete declaration, these are any // comments appearing before and after the declaration which appear to be // attached to the declaration. // // A series of line comments appearing on consecutive lines, with no other // tokens appearing on those lines, will be treated as a single comment. // // leading_detached_comments will keep paragraphs of comments that appear // before (but not connected to) the current element. Each paragraph, // separated by empty lines, will be one comment element in the repeated // field. // // Only the comment content is provided; comment markers (e.g. //) are // stripped out. For block comments, leading whitespace and an asterisk // will be stripped from the beginning of each line other than the first. // Newlines are included in the output. // // Examples: // // optional int32 foo = 1; // Comment attached to foo. // // Comment attached to bar. // optional int32 bar = 2; // // optional string baz = 3; // // Comment attached to baz. // // Another line attached to baz. // // // Comment attached to qux. // // // // Another line attached to qux. // optional double qux = 4; // // // Detached comment for corge. This is not leading or trailing comments // // to qux or corge because there are blank lines separating it from // // both. // // // Detached comment for corge paragraph 2. // // optional string corge = 5; // /* Block comment attached // * to corge. Leading asterisks // * will be removed. */ // /* Block comment attached to // * grault. */ // optional int32 grault = 6; // // // ignored detached comments. optional string leading_comments = 3; optional string trailing_comments = 4; repeated string leading_detached_comments = 6; } } // Describes the relationship between generated code and its original source // file. A GeneratedCodeInfo message is associated with only one generated // source file, but may contain references to different source .proto files. message GeneratedCodeInfo { // An Annotation connects some span of text in generated code to an element // of its generating .proto file. repeated Annotation annotation = 1; message Annotation { // Identifies the element in the original source .proto file. This field // is formatted the same as SourceCodeInfo.Location.path. repeated int32 path = 1 [packed = true]; // Identifies the filesystem path to the original source .proto. optional string source_file = 2; // Identifies the starting offset in bytes in the generated code // that relates to the identified object. optional int32 begin = 3; // Identifies the ending offset in bytes in the generated code that // relates to the identified offset. The end offset should be one past // the last relevant byte (so the length of the text = end - begin). optional int32 end = 4; } } protobuf-parse-3.7.2/src/proto/google/protobuf/duration.proto000064400000000000000000000114371046102023000225730ustar 00000000000000// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // https://developers.google.com/protocol-buffers/ // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. syntax = "proto3"; package google.protobuf; option csharp_namespace = "Google.Protobuf.WellKnownTypes"; option cc_enable_arenas = true; option go_package = "google.golang.org/protobuf/types/known/durationpb"; option java_package = "com.google.protobuf"; option java_outer_classname = "DurationProto"; option java_multiple_files = true; option objc_class_prefix = "GPB"; // A Duration represents a signed, fixed-length span of time represented // as a count of seconds and fractions of seconds at nanosecond // resolution. It is independent of any calendar and concepts like "day" // or "month". It is related to Timestamp in that the difference between // two Timestamp values is a Duration and it can be added or subtracted // from a Timestamp. Range is approximately +-10,000 years. // // # Examples // // Example 1: Compute Duration from two Timestamps in pseudo code. // // Timestamp start = ...; // Timestamp end = ...; // Duration duration = ...; // // duration.seconds = end.seconds - start.seconds; // duration.nanos = end.nanos - start.nanos; // // if (duration.seconds < 0 && duration.nanos > 0) { // duration.seconds += 1; // duration.nanos -= 1000000000; // } else if (duration.seconds > 0 && duration.nanos < 0) { // duration.seconds -= 1; // duration.nanos += 1000000000; // } // // Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. // // Timestamp start = ...; // Duration duration = ...; // Timestamp end = ...; // // end.seconds = start.seconds + duration.seconds; // end.nanos = start.nanos + duration.nanos; // // if (end.nanos < 0) { // end.seconds -= 1; // end.nanos += 1000000000; // } else if (end.nanos >= 1000000000) { // end.seconds += 1; // end.nanos -= 1000000000; // } // // Example 3: Compute Duration from datetime.timedelta in Python. // // td = datetime.timedelta(days=3, minutes=10) // duration = Duration() // duration.FromTimedelta(td) // // # JSON Mapping // // In JSON format, the Duration type is encoded as a string rather than an // object, where the string ends in the suffix "s" (indicating seconds) and // is preceded by the number of seconds, with nanoseconds expressed as // fractional seconds. For example, 3 seconds with 0 nanoseconds should be // encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should // be expressed in JSON format as "3.000000001s", and 3 seconds and 1 // microsecond should be expressed in JSON format as "3.000001s". // // message Duration { // Signed seconds of the span of time. Must be from -315,576,000,000 // to +315,576,000,000 inclusive. Note: these bounds are computed from: // 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years int64 seconds = 1; // Signed fractions of a second at nanosecond resolution of the span // of time. Durations less than one second are represented with a 0 // `seconds` field and a positive or negative `nanos` field. For durations // of one second or more, a non-zero value for the `nanos` field must be // of the same sign as the `seconds` field. Must be from -999,999,999 // to +999,999,999 inclusive. int32 nanos = 2; } protobuf-parse-3.7.2/src/proto/google/protobuf/empty.proto000064400000000000000000000045751046102023000221110ustar 00000000000000// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // https://developers.google.com/protocol-buffers/ // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. syntax = "proto3"; package google.protobuf; option csharp_namespace = "Google.Protobuf.WellKnownTypes"; option go_package = "google.golang.org/protobuf/types/known/emptypb"; option java_package = "com.google.protobuf"; option java_outer_classname = "EmptyProto"; option java_multiple_files = true; option objc_class_prefix = "GPB"; option cc_enable_arenas = true; // A generic empty message that you can re-use to avoid defining duplicated // empty messages in your APIs. A typical example is to use it as the request // or the response type of an API method. For instance: // // service Foo { // rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); // } // // The JSON representation for `Empty` is empty JSON object `{}`. message Empty {} protobuf-parse-3.7.2/src/proto/google/protobuf/field_mask.proto000064400000000000000000000177711046102023000230530ustar 00000000000000// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // https://developers.google.com/protocol-buffers/ // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. syntax = "proto3"; package google.protobuf; option csharp_namespace = "Google.Protobuf.WellKnownTypes"; option java_package = "com.google.protobuf"; option java_outer_classname = "FieldMaskProto"; option java_multiple_files = true; option objc_class_prefix = "GPB"; option go_package = "google.golang.org/protobuf/types/known/fieldmaskpb"; option cc_enable_arenas = true; // `FieldMask` represents a set of symbolic field paths, for example: // // paths: "f.a" // paths: "f.b.d" // // Here `f` represents a field in some root message, `a` and `b` // fields in the message found in `f`, and `d` a field found in the // message in `f.b`. // // Field masks are used to specify a subset of fields that should be // returned by a get operation or modified by an update operation. // Field masks also have a custom JSON encoding (see below). // // # Field Masks in Projections // // When used in the context of a projection, a response message or // sub-message is filtered by the API to only contain those fields as // specified in the mask. For example, if the mask in the previous // example is applied to a response message as follows: // // f { // a : 22 // b { // d : 1 // x : 2 // } // y : 13 // } // z: 8 // // The result will not contain specific values for fields x,y and z // (their value will be set to the default, and omitted in proto text // output): // // // f { // a : 22 // b { // d : 1 // } // } // // A repeated field is not allowed except at the last position of a // paths string. // // If a FieldMask object is not present in a get operation, the // operation applies to all fields (as if a FieldMask of all fields // had been specified). // // Note that a field mask does not necessarily apply to the // top-level response message. In case of a REST get operation, the // field mask applies directly to the response, but in case of a REST // list operation, the mask instead applies to each individual message // in the returned resource list. In case of a REST custom method, // other definitions may be used. Where the mask applies will be // clearly documented together with its declaration in the API. In // any case, the effect on the returned resource/resources is required // behavior for APIs. // // # Field Masks in Update Operations // // A field mask in update operations specifies which fields of the // targeted resource are going to be updated. The API is required // to only change the values of the fields as specified in the mask // and leave the others untouched. If a resource is passed in to // describe the updated values, the API ignores the values of all // fields not covered by the mask. // // If a repeated field is specified for an update operation, new values will // be appended to the existing repeated field in the target resource. Note that // a repeated field is only allowed in the last position of a `paths` string. // // If a sub-message is specified in the last position of the field mask for an // update operation, then new value will be merged into the existing sub-message // in the target resource. // // For example, given the target message: // // f { // b { // d: 1 // x: 2 // } // c: [1] // } // // And an update message: // // f { // b { // d: 10 // } // c: [2] // } // // then if the field mask is: // // paths: ["f.b", "f.c"] // // then the result will be: // // f { // b { // d: 10 // x: 2 // } // c: [1, 2] // } // // An implementation may provide options to override this default behavior for // repeated and message fields. // // In order to reset a field's value to the default, the field must // be in the mask and set to the default value in the provided resource. // Hence, in order to reset all fields of a resource, provide a default // instance of the resource and set all fields in the mask, or do // not provide a mask as described below. // // If a field mask is not present on update, the operation applies to // all fields (as if a field mask of all fields has been specified). // Note that in the presence of schema evolution, this may mean that // fields the client does not know and has therefore not filled into // the request will be reset to their default. If this is unwanted // behavior, a specific service may require a client to always specify // a field mask, producing an error if not. // // As with get operations, the location of the resource which // describes the updated values in the request message depends on the // operation kind. In any case, the effect of the field mask is // required to be honored by the API. // // ## Considerations for HTTP REST // // The HTTP kind of an update operation which uses a field mask must // be set to PATCH instead of PUT in order to satisfy HTTP semantics // (PUT must only be used for full updates). // // # JSON Encoding of Field Masks // // In JSON, a field mask is encoded as a single string where paths are // separated by a comma. Fields name in each path are converted // to/from lower-camel naming conventions. // // As an example, consider the following message declarations: // // message Profile { // User user = 1; // Photo photo = 2; // } // message User { // string display_name = 1; // string address = 2; // } // // In proto a field mask for `Profile` may look as such: // // mask { // paths: "user.display_name" // paths: "photo" // } // // In JSON, the same mask is represented as below: // // { // mask: "user.displayName,photo" // } // // # Field Masks and Oneof Fields // // Field masks treat fields in oneofs just as regular fields. Consider the // following message: // // message SampleMessage { // oneof test_oneof { // string name = 4; // SubMessage sub_message = 9; // } // } // // The field mask can be: // // mask { // paths: "name" // } // // Or: // // mask { // paths: "sub_message" // } // // Note that oneof type names ("test_oneof" in this case) cannot be used in // paths. // // ## Field Mask Verification // // The implementation of any API method which has a FieldMask type field in the // request should verify the included field paths, and return an // `INVALID_ARGUMENT` error if any path is unmappable. message FieldMask { // The set of field mask paths. repeated string paths = 1; } protobuf-parse-3.7.2/src/proto/google/protobuf/source_context.proto000064400000000000000000000044451046102023000240130ustar 00000000000000// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // https://developers.google.com/protocol-buffers/ // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. syntax = "proto3"; package google.protobuf; option csharp_namespace = "Google.Protobuf.WellKnownTypes"; option java_package = "com.google.protobuf"; option java_outer_classname = "SourceContextProto"; option java_multiple_files = true; option objc_class_prefix = "GPB"; option go_package = "google.golang.org/protobuf/types/known/sourcecontextpb"; // `SourceContext` represents information about the source of a // protobuf element, like the file in which it is defined. message SourceContext { // The path-qualified name of the .proto file that contained the associated // protobuf element. For example: `"google/protobuf/source_context.proto"`. string file_name = 1; } protobuf-parse-3.7.2/src/proto/google/protobuf/struct.proto000064400000000000000000000073031046102023000222670ustar 00000000000000// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // https://developers.google.com/protocol-buffers/ // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. syntax = "proto3"; package google.protobuf; option csharp_namespace = "Google.Protobuf.WellKnownTypes"; option cc_enable_arenas = true; option go_package = "google.golang.org/protobuf/types/known/structpb"; option java_package = "com.google.protobuf"; option java_outer_classname = "StructProto"; option java_multiple_files = true; option objc_class_prefix = "GPB"; // `Struct` represents a structured data value, consisting of fields // which map to dynamically typed values. In some languages, `Struct` // might be supported by a native representation. For example, in // scripting languages like JS a struct is represented as an // object. The details of that representation are described together // with the proto support for the language. // // The JSON representation for `Struct` is JSON object. message Struct { // Unordered map of dynamically typed values. map fields = 1; } // `Value` represents a dynamically typed value which can be either // null, a number, a string, a boolean, a recursive struct value, or a // list of values. A producer of value is expected to set one of these // variants. Absence of any variant indicates an error. // // The JSON representation for `Value` is JSON value. message Value { // The kind of value. oneof kind { // Represents a null value. NullValue null_value = 1; // Represents a double value. double number_value = 2; // Represents a string value. string string_value = 3; // Represents a boolean value. bool bool_value = 4; // Represents a structured value. Struct struct_value = 5; // Represents a repeated `Value`. ListValue list_value = 6; } } // `NullValue` is a singleton enumeration to represent the null value for the // `Value` type union. // // The JSON representation for `NullValue` is JSON `null`. enum NullValue { // Null value. NULL_VALUE = 0; } // `ListValue` is a wrapper around a repeated field of values. // // The JSON representation for `ListValue` is JSON array. message ListValue { // Repeated field of dynamically typed values. repeated Value values = 1; } protobuf-parse-3.7.2/src/proto/google/protobuf/timestamp.proto000064400000000000000000000144731046102023000227540ustar 00000000000000// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // https://developers.google.com/protocol-buffers/ // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. syntax = "proto3"; package google.protobuf; option csharp_namespace = "Google.Protobuf.WellKnownTypes"; option cc_enable_arenas = true; option go_package = "google.golang.org/protobuf/types/known/timestamppb"; option java_package = "com.google.protobuf"; option java_outer_classname = "TimestampProto"; option java_multiple_files = true; option objc_class_prefix = "GPB"; // A Timestamp represents a point in time independent of any time zone or local // calendar, encoded as a count of seconds and fractions of seconds at // nanosecond resolution. The count is relative to an epoch at UTC midnight on // January 1, 1970, in the proleptic Gregorian calendar which extends the // Gregorian calendar backwards to year one. // // All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap // second table is needed for interpretation, using a [24-hour linear // smear](https://developers.google.com/time/smear). // // The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By // restricting to that range, we ensure that we can convert to and from [RFC // 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. // // # Examples // // Example 1: Compute Timestamp from POSIX `time()`. // // Timestamp timestamp; // timestamp.set_seconds(time(NULL)); // timestamp.set_nanos(0); // // Example 2: Compute Timestamp from POSIX `gettimeofday()`. // // struct timeval tv; // gettimeofday(&tv, NULL); // // Timestamp timestamp; // timestamp.set_seconds(tv.tv_sec); // timestamp.set_nanos(tv.tv_usec * 1000); // // Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. // // FILETIME ft; // GetSystemTimeAsFileTime(&ft); // UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; // // // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z // // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. // Timestamp timestamp; // timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); // timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); // // Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. // // long millis = System.currentTimeMillis(); // // Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) // .setNanos((int) ((millis % 1000) * 1000000)).build(); // // // Example 5: Compute Timestamp from Java `Instant.now()`. // // Instant now = Instant.now(); // // Timestamp timestamp = // Timestamp.newBuilder().setSeconds(now.getEpochSecond()) // .setNanos(now.getNano()).build(); // // // Example 6: Compute Timestamp from current time in Python. // // timestamp = Timestamp() // timestamp.GetCurrentTime() // // # JSON Mapping // // In JSON format, the Timestamp type is encoded as a string in the // [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the // format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" // where {year} is always expressed using four digits while {month}, {day}, // {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional // seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), // are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone // is required. A proto3 JSON serializer should always use UTC (as indicated by // "Z") when printing the Timestamp type and a proto3 JSON parser should be // able to accept both UTC and other timezones (as indicated by an offset). // // For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past // 01:30 UTC on January 15, 2017. // // In JavaScript, one can convert a Date object to this format using the // standard // [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) // method. In Python, a standard `datetime.datetime` object can be converted // to this format using // [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with // the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use // the Joda Time's [`ISODateTimeFormat.dateTime()`]( // http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D // ) to obtain a formatter capable of generating timestamps in this format. // // message Timestamp { // Represents seconds of UTC time since Unix epoch // 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to // 9999-12-31T23:59:59Z inclusive. int64 seconds = 1; // Non-negative fractions of a second at nanosecond resolution. Negative // second values with fractions must still have non-negative nanos values // that count forward in time. Must be from 0 to 999,999,999 // inclusive. int32 nanos = 2; } protobuf-parse-3.7.2/src/proto/google/protobuf/type.proto000064400000000000000000000137561046102023000217350ustar 00000000000000// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // https://developers.google.com/protocol-buffers/ // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. syntax = "proto3"; package google.protobuf; import "google/protobuf/any.proto"; import "google/protobuf/source_context.proto"; option csharp_namespace = "Google.Protobuf.WellKnownTypes"; option cc_enable_arenas = true; option java_package = "com.google.protobuf"; option java_outer_classname = "TypeProto"; option java_multiple_files = true; option objc_class_prefix = "GPB"; option go_package = "google.golang.org/protobuf/types/known/typepb"; // A protocol buffer message type. message Type { // The fully qualified message name. string name = 1; // The list of fields. repeated Field fields = 2; // The list of types appearing in `oneof` definitions in this type. repeated string oneofs = 3; // The protocol buffer options. repeated Option options = 4; // The source context. SourceContext source_context = 5; // The source syntax. Syntax syntax = 6; } // A single field of a message type. message Field { // Basic field types. enum Kind { // Field type unknown. TYPE_UNKNOWN = 0; // Field type double. TYPE_DOUBLE = 1; // Field type float. TYPE_FLOAT = 2; // Field type int64. TYPE_INT64 = 3; // Field type uint64. TYPE_UINT64 = 4; // Field type int32. TYPE_INT32 = 5; // Field type fixed64. TYPE_FIXED64 = 6; // Field type fixed32. TYPE_FIXED32 = 7; // Field type bool. TYPE_BOOL = 8; // Field type string. TYPE_STRING = 9; // Field type group. Proto2 syntax only, and deprecated. TYPE_GROUP = 10; // Field type message. TYPE_MESSAGE = 11; // Field type bytes. TYPE_BYTES = 12; // Field type uint32. TYPE_UINT32 = 13; // Field type enum. TYPE_ENUM = 14; // Field type sfixed32. TYPE_SFIXED32 = 15; // Field type sfixed64. TYPE_SFIXED64 = 16; // Field type sint32. TYPE_SINT32 = 17; // Field type sint64. TYPE_SINT64 = 18; } // Whether a field is optional, required, or repeated. enum Cardinality { // For fields with unknown cardinality. CARDINALITY_UNKNOWN = 0; // For optional fields. CARDINALITY_OPTIONAL = 1; // For required fields. Proto2 syntax only. CARDINALITY_REQUIRED = 2; // For repeated fields. CARDINALITY_REPEATED = 3; } // The field type. Kind kind = 1; // The field cardinality. Cardinality cardinality = 2; // The field number. int32 number = 3; // The field name. string name = 4; // The field type URL, without the scheme, for message or enumeration // types. Example: `"type.googleapis.com/google.protobuf.Timestamp"`. string type_url = 6; // The index of the field type in `Type.oneofs`, for message or enumeration // types. The first type has index 1; zero means the type is not in the list. int32 oneof_index = 7; // Whether to use alternative packed wire representation. bool packed = 8; // The protocol buffer options. repeated Option options = 9; // The field JSON name. string json_name = 10; // The string value of the default value of this field. Proto2 syntax only. string default_value = 11; } // Enum type definition. message Enum { // Enum type name. string name = 1; // Enum value definitions. repeated EnumValue enumvalue = 2; // Protocol buffer options. repeated Option options = 3; // The source context. SourceContext source_context = 4; // The source syntax. Syntax syntax = 5; } // Enum value definition. message EnumValue { // Enum value name. string name = 1; // Enum value number. int32 number = 2; // Protocol buffer options. repeated Option options = 3; } // A protocol buffer option, which can be attached to a message, field, // enumeration, etc. message Option { // The option's name. For protobuf built-in options (options defined in // descriptor.proto), this is the short name. For example, `"map_entry"`. // For custom options, it should be the fully-qualified name. For example, // `"google.api.http"`. string name = 1; // The option's value packed in an Any message. If the value is a primitive, // the corresponding wrapper type defined in google/protobuf/wrappers.proto // should be used. If the value is an enum, it should be stored as an int32 // value using the google.protobuf.Int32Value type. Any value = 2; } // The syntax in which a protocol buffer element is defined. enum Syntax { // Syntax `proto2`. SYNTAX_PROTO2 = 0; // Syntax `proto3`. SYNTAX_PROTO3 = 1; } protobuf-parse-3.7.2/src/proto/google/protobuf/wrappers.proto000064400000000000000000000077121046102023000226120ustar 00000000000000// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // https://developers.google.com/protocol-buffers/ // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // Wrappers for primitive (non-message) types. These types are useful // for embedding primitives in the `google.protobuf.Any` type and for places // where we need to distinguish between the absence of a primitive // typed field and its default value. // // These wrappers have no meaningful use within repeated fields as they lack // the ability to detect presence on individual elements. // These wrappers have no meaningful use within a map or a oneof since // individual entries of a map or fields of a oneof can already detect presence. syntax = "proto3"; package google.protobuf; option csharp_namespace = "Google.Protobuf.WellKnownTypes"; option cc_enable_arenas = true; option go_package = "google.golang.org/protobuf/types/known/wrapperspb"; option java_package = "com.google.protobuf"; option java_outer_classname = "WrappersProto"; option java_multiple_files = true; option objc_class_prefix = "GPB"; // Wrapper message for `double`. // // The JSON representation for `DoubleValue` is JSON number. message DoubleValue { // The double value. double value = 1; } // Wrapper message for `float`. // // The JSON representation for `FloatValue` is JSON number. message FloatValue { // The float value. float value = 1; } // Wrapper message for `int64`. // // The JSON representation for `Int64Value` is JSON string. message Int64Value { // The int64 value. int64 value = 1; } // Wrapper message for `uint64`. // // The JSON representation for `UInt64Value` is JSON string. message UInt64Value { // The uint64 value. uint64 value = 1; } // Wrapper message for `int32`. // // The JSON representation for `Int32Value` is JSON number. message Int32Value { // The int32 value. int32 value = 1; } // Wrapper message for `uint32`. // // The JSON representation for `UInt32Value` is JSON number. message UInt32Value { // The uint32 value. uint32 value = 1; } // Wrapper message for `bool`. // // The JSON representation for `BoolValue` is JSON `true` and `false`. message BoolValue { // The bool value. bool value = 1; } // Wrapper message for `string`. // // The JSON representation for `StringValue` is JSON string. message StringValue { // The string value. string value = 1; } // Wrapper message for `bytes`. // // The JSON representation for `BytesValue` is JSON string. message BytesValue { // The bytes value. bytes value = 1; } protobuf-parse-3.7.2/src/proto/mod.rs000064400000000000000000000023021046102023000156610ustar 00000000000000//! This folder contains copy of .proto files //! needed for pure codegen. //! //! Files are copied here because when publishing to crates, //! referencing files from outside is not allowed. pub(crate) const RUSTPROTO_PROTO: &str = include_str!("rustproto.proto"); pub(crate) const ANY_PROTO: &str = include_str!("google/protobuf/any.proto"); pub(crate) const API_PROTO: &str = include_str!("google/protobuf/api.proto"); pub(crate) const DESCRIPTOR_PROTO: &str = include_str!("google/protobuf/descriptor.proto"); pub(crate) const DURATION_PROTO: &str = include_str!("google/protobuf/duration.proto"); pub(crate) const EMPTY_PROTO: &str = include_str!("google/protobuf/empty.proto"); pub(crate) const FIELD_MASK_PROTO: &str = include_str!("google/protobuf/field_mask.proto"); pub(crate) const SOURCE_CONTEXT_PROTO: &str = include_str!("google/protobuf/source_context.proto"); pub(crate) const STRUCT_PROTO: &str = include_str!("google/protobuf/struct.proto"); pub(crate) const TIMESTAMP_PROTO: &str = include_str!("google/protobuf/timestamp.proto"); pub(crate) const TYPE_PROTO: &str = include_str!("google/protobuf/type.proto"); pub(crate) const WRAPPERS_PROTO: &str = include_str!("google/protobuf/wrappers.proto"); protobuf-parse-3.7.2/src/proto/rustproto.proto000064400000000000000000000050051046102023000177050ustar 00000000000000syntax = "proto2"; import "google/protobuf/descriptor.proto"; // see https://github.com/gogo/protobuf/blob/master/gogoproto/gogo.proto // for the original idea // Generated files can be customized using this proto // or using `Customize` struct when codegen is invoked programmatically. package rustproto; extend google.protobuf.FileOptions { // When false, `get_`, `set_`, `mut_` etc. accessors are not generated optional bool generate_accessors_all = 17004; // When false, `get_` is not generated even if `syntax = "proto2"` optional bool generate_getter_all = 17005; // Use `bytes::Bytes` for `bytes` fields optional bool tokio_bytes_all = 17011; // Use `bytes::Bytes` for `string` fields optional bool tokio_bytes_for_string_all = 17012; // When false, `#[non_exhaustive]` is not generated for `oneof` fields. optional bool oneofs_non_exhaustive_all = 17013; // When true, generate `BTreeMap` instead of `HashMap` for map fields. optional bool btreemap_all = 17014; // When true, will only generate codes that works with lite runtime. optional bool lite_runtime_all = 17035; } extend google.protobuf.MessageOptions { // When false, `get_`, `set_`, `mut_` etc. accessors are not generated optional bool generate_accessors = 17004; // When false, `get_` is not generated even if `syntax = "proto2"` optional bool generate_getter = 17005; // Use `bytes::Bytes` for `bytes` fields optional bool tokio_bytes = 17011; // Use `bytes::Bytes` for `string` fields optional bool tokio_bytes_for_string = 17012; // When false, `#[non_exhaustive]` is not generated for `oneof` fields. optional bool oneofs_non_exhaustive = 17013; // When true, generate `BTreeMap` instead of `HashMap` for map fields. optional bool btreemap = 17014; } extend google.protobuf.FieldOptions { // When false, `get_`, `set_`, `mut_` etc. accessors are not generated optional bool generate_accessors_field = 17004; // When false, `get_` is not generated even if `syntax = "proto2"` optional bool generate_getter_field = 17005; // Use `bytes::Bytes` for `bytes` fields optional bool tokio_bytes_field = 17011; // Use `bytes::Bytes` for `string` fields optional bool tokio_bytes_for_string_field = 17012; // When false, `#[non_exhaustive]` is not generated for `oneof` fields. optional bool oneofs_non_exhaustive_field = 17013; // When true, generate `BTreeMap` instead of `HashMap` for map fields. optional bool btreemap_field = 17014; } protobuf-parse-3.7.2/src/proto_path.rs000064400000000000000000000102251046102023000161210ustar 00000000000000#![doc(hidden)] use std::borrow::Borrow; use std::fmt; use std::hash::Hash; use std::ops::Deref; use std::path::Component; use std::path::Path; use std::path::PathBuf; #[derive(Debug, thiserror::Error)] enum Error { #[error("path is empty")] Empty, #[error("backslashes in path: {0:?}")] Backslashes(String), #[error("path contains empty components: {0:?}")] EmptyComponent(String), #[error("dot in path: {0:?}")] Dot(String), #[error("dot-dot in path: {0:?}")] DotDot(String), #[error("path is absolute: `{}`", _0.display())] Absolute(PathBuf), #[error("non-UTF-8 component in path: `{}`", _0.display())] NotUtf8(PathBuf), } /// Protobuf file relative normalized file path. #[repr(transparent)] #[derive(Eq, PartialEq, Hash, Debug)] pub struct ProtoPath { path: str, } /// Protobuf file relative normalized file path. #[derive(Debug, Clone, PartialEq, Eq, Default)] pub struct ProtoPathBuf { path: String, } impl Hash for ProtoPathBuf { fn hash(&self, state: &mut H) { self.as_path().hash(state); } } impl Borrow for ProtoPathBuf { fn borrow(&self) -> &ProtoPath { self.as_path() } } impl Deref for ProtoPathBuf { type Target = ProtoPath; fn deref(&self) -> &ProtoPath { self.as_path() } } impl fmt::Display for ProtoPath { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", &self.path) } } impl fmt::Display for ProtoPathBuf { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", &self.path) } } impl PartialEq for ProtoPath { fn eq(&self, other: &str) -> bool { &self.path == other } } impl PartialEq for ProtoPathBuf { fn eq(&self, other: &str) -> bool { &self.path == other } } impl ProtoPath { fn unchecked_new(path: &str) -> &ProtoPath { unsafe { &*(path as *const str as *const ProtoPath) } } pub fn new(path: &str) -> anyhow::Result<&ProtoPath> { if path.is_empty() { return Err(Error::Empty.into()); } if path.contains('\\') { return Err(Error::Backslashes(path.to_owned()).into()); } for component in path.split('/') { if component.is_empty() { return Err(Error::EmptyComponent(path.to_owned()).into()); } if component == "." { return Err(Error::Dot(path.to_owned()).into()); } if component == ".." { return Err(Error::DotDot(path.to_owned()).into()); } } Ok(Self::unchecked_new(path)) } pub fn to_str(&self) -> &str { &self.path } pub fn to_path(&self) -> &Path { Path::new(&self.path) } pub fn to_proto_path_buf(&self) -> ProtoPathBuf { ProtoPathBuf { path: self.path.to_owned(), } } } impl ProtoPathBuf { pub fn as_path(&self) -> &ProtoPath { ProtoPath::unchecked_new(&self.path) } pub fn new(path: String) -> anyhow::Result { ProtoPath::new(&path)?; Ok(ProtoPathBuf { path }) } pub fn from_path(path: &Path) -> anyhow::Result { let mut path_str = String::new(); for component in path.components() { match component { Component::Prefix(..) => return Err(Error::Absolute(path.to_owned()).into()), Component::RootDir => return Err(Error::Absolute(path.to_owned()).into()), Component::CurDir if path_str.is_empty() => {} Component::CurDir => return Err(Error::Dot(path.display().to_string()).into()), Component::ParentDir => { return Err(Error::DotDot(path.display().to_string()).into()) } Component::Normal(c) => { if !path_str.is_empty() { path_str.push('/'); } path_str.push_str(c.to_str().ok_or_else(|| Error::NotUtf8(path.to_owned()))?); } } } Ok(ProtoPathBuf { path: path_str }) } } protobuf-parse-3.7.2/src/protobuf_abs_path.rs000064400000000000000000000223011046102023000174410ustar 00000000000000#![doc(hidden)] use std::fmt; use std::mem; use std::ops::Deref; use protobuf::descriptor::FileDescriptorProto; use protobuf::reflect::FileDescriptor; use protobuf::reflect::MessageDescriptor; use crate::protobuf_ident::ProtobufIdent; use crate::protobuf_rel_path::ProtobufRelPath; use crate::ProtobufIdentRef; use crate::ProtobufRelPathRef; /// Protobuf absolute name (e. g. `.foo.Bar`). #[derive(Clone, Eq, PartialEq, Debug, Hash)] #[doc(hidden)] pub struct ProtobufAbsPath { pub path: String, } #[doc(hidden)] #[derive(Eq, PartialEq, Debug, Hash)] #[repr(C)] pub struct ProtobufAbsPathRef(str); impl Default for ProtobufAbsPath { fn default() -> ProtobufAbsPath { ProtobufAbsPath::root() } } impl Deref for ProtobufAbsPathRef { type Target = str; fn deref(&self) -> &str { &self.0 } } impl Deref for ProtobufAbsPath { type Target = ProtobufAbsPathRef; fn deref(&self) -> &ProtobufAbsPathRef { ProtobufAbsPathRef::new(&self.path) } } impl ProtobufAbsPathRef { pub fn is_root(&self) -> bool { self.0.is_empty() } pub fn root() -> &'static ProtobufAbsPathRef { Self::new("") } pub fn new(path: &str) -> &ProtobufAbsPathRef { assert!(ProtobufAbsPath::is_abs(path), "{:?} is not absolute", path); // SAFETY: repr(transparent) unsafe { mem::transmute(path) } } pub fn remove_prefix(&self, prefix: &ProtobufAbsPathRef) -> Option<&ProtobufRelPathRef> { if self.0.starts_with(&prefix.0) { let rem = &self.0[prefix.0.len()..]; if rem.is_empty() { return Some(ProtobufRelPathRef::empty()); } if rem.starts_with('.') { return Some(ProtobufRelPathRef::new(&rem[1..])); } } None } pub fn starts_with(&self, that: &ProtobufAbsPathRef) -> bool { self.remove_prefix(that).is_some() } pub fn as_str(&self) -> &str { &self.0 } pub fn to_owned(&self) -> ProtobufAbsPath { ProtobufAbsPath { path: self.0.to_owned(), } } pub fn parent(&self) -> Option<&ProtobufAbsPathRef> { match self.0.rfind('.') { Some(pos) => Some(ProtobufAbsPathRef::new(&self.0[..pos])), None => { if self.0.is_empty() { None } else { Some(ProtobufAbsPathRef::root()) } } } } pub fn self_and_parents(&self) -> Vec<&ProtobufAbsPathRef> { let mut tmp = self; let mut r: Vec<&ProtobufAbsPathRef> = Vec::new(); r.push(&self); while let Some(parent) = tmp.parent() { r.push(parent); tmp = parent; } r } } impl ProtobufAbsPath { pub fn root() -> ProtobufAbsPath { ProtobufAbsPathRef::root().to_owned() } pub fn as_ref(&self) -> &ProtobufAbsPathRef { ProtobufAbsPathRef::new(&self.path) } /// If given name is an fully quialified protobuf name. pub fn is_abs(path: &str) -> bool { path.is_empty() || (path.starts_with(".") && path != ".") } pub fn try_new(path: &str) -> Option { if ProtobufAbsPath::is_abs(path) { Some(ProtobufAbsPath::new(path)) } else { None } } pub fn new>(path: S) -> ProtobufAbsPath { let path = path.into(); assert!( ProtobufAbsPath::is_abs(&path), "path is not absolute: `{}`", path ); assert!(!path.ends_with("."), "{}", path); ProtobufAbsPath { path } } pub fn new_from_rel(path: &str) -> ProtobufAbsPath { assert!( !path.starts_with("."), "rel path must not start with dot: {:?}", path ); ProtobufAbsPath { path: if path.is_empty() { String::new() } else { format!(".{}", path) }, } } pub fn package_from_file_proto(file: &FileDescriptorProto) -> ProtobufAbsPath { Self::new_from_rel(file.package()) } pub fn package_from_file_descriptor(file: &FileDescriptor) -> ProtobufAbsPath { Self::package_from_file_proto(file.proto()) } pub fn from_message(message: &MessageDescriptor) -> ProtobufAbsPath { Self::new_from_rel(&message.full_name()) } pub fn concat(a: &ProtobufAbsPathRef, b: &ProtobufRelPathRef) -> ProtobufAbsPath { let mut a = a.to_owned(); a.push_relative(b); a } pub fn from_path_without_dot(path: &str) -> ProtobufAbsPath { assert!(!path.is_empty()); assert!(!path.starts_with(".")); assert!(!path.ends_with(".")); ProtobufAbsPath::new(format!(".{}", path)) } pub fn from_path_maybe_dot(path: &str) -> ProtobufAbsPath { if path.starts_with(".") { ProtobufAbsPath::new(path.to_owned()) } else { ProtobufAbsPath::from_path_without_dot(path) } } pub fn push_simple(&mut self, simple: &ProtobufIdentRef) { self.path.push('.'); self.path.push_str(&simple); } pub fn push_relative(&mut self, relative: &ProtobufRelPathRef) { if !relative.is_empty() { self.path.push_str(&format!(".{}", relative)); } } pub fn remove_suffix(&self, suffix: &ProtobufRelPathRef) -> Option<&ProtobufAbsPathRef> { if suffix.is_empty() { return Some(ProtobufAbsPathRef::new(&self.path)); } if self.path.ends_with(suffix.as_str()) { let rem = &self.path[..self.path.len() - suffix.as_str().len()]; if rem.is_empty() { return Some(ProtobufAbsPathRef::root()); } if rem.ends_with('.') { return Some(ProtobufAbsPathRef::new(&rem[..rem.len() - 1])); } } None } /// Pop the last name component pub fn pop(&mut self) -> Option { match self.path.rfind('.') { Some(dot) => { let ident = ProtobufIdent::new(&self.path[dot + 1..]); self.path.truncate(dot); Some(ident) } None => None, } } pub fn to_root_rel(&self) -> ProtobufRelPath { if self == &Self::root() { ProtobufRelPath::empty() } else { ProtobufRelPath::new(&self.path[1..]) } } pub fn ends_with(&self, that: &ProtobufRelPath) -> bool { self.remove_suffix(that).is_some() } } impl From<&'_ str> for ProtobufAbsPath { fn from(s: &str) -> Self { ProtobufAbsPath::new(s.to_owned()) } } impl From for ProtobufAbsPath { fn from(s: String) -> Self { ProtobufAbsPath::new(s) } } impl fmt::Display for ProtobufAbsPathRef { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", &self.0) } } impl fmt::Display for ProtobufAbsPath { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", ProtobufAbsPathRef::new(&self.0)) } } #[cfg(test)] mod test { use super::*; #[test] fn absolute_path_push_simple() { let mut foo = ProtobufAbsPath::new(".foo".to_owned()); foo.push_simple(ProtobufIdentRef::new("bar")); assert_eq!(ProtobufAbsPath::new(".foo.bar".to_owned()), foo); let mut foo = ProtobufAbsPath::root(); foo.push_simple(ProtobufIdentRef::new("bar")); assert_eq!(ProtobufAbsPath::new(".bar".to_owned()), foo); } #[test] fn absolute_path_remove_prefix() { assert_eq!( Some(ProtobufRelPathRef::empty()), ProtobufAbsPath::new(".foo".to_owned()) .remove_prefix(&ProtobufAbsPath::new(".foo".to_owned())) ); assert_eq!( Some(ProtobufRelPathRef::new("bar")), ProtobufAbsPath::new(".foo.bar".to_owned()) .remove_prefix(&ProtobufAbsPath::new(".foo".to_owned())) ); assert_eq!( Some(ProtobufRelPathRef::new("baz.qux")), ProtobufAbsPath::new(".foo.bar.baz.qux".to_owned()) .remove_prefix(&ProtobufAbsPath::new(".foo.bar".to_owned())) ); assert_eq!( None, ProtobufAbsPath::new(".foo.barbaz".to_owned()) .remove_prefix(ProtobufAbsPathRef::new(".foo.bar")) ); } #[test] fn self_and_parents() { assert_eq!( vec![ ProtobufAbsPathRef::new(".ab.cde.fghi"), ProtobufAbsPathRef::new(".ab.cde"), ProtobufAbsPathRef::new(".ab"), ProtobufAbsPathRef::root(), ], ProtobufAbsPath::new(".ab.cde.fghi".to_owned()).self_and_parents() ); } #[test] fn ends_with() { assert!(ProtobufAbsPath::new(".foo.bar").ends_with(&ProtobufRelPath::new(""))); assert!(ProtobufAbsPath::new(".foo.bar").ends_with(&ProtobufRelPath::new("bar"))); assert!(ProtobufAbsPath::new(".foo.bar").ends_with(&ProtobufRelPath::new("foo.bar"))); assert!(!ProtobufAbsPath::new(".foo.bar").ends_with(&ProtobufRelPath::new("foo.bar.baz"))); } } protobuf-parse-3.7.2/src/protobuf_ident.rs000064400000000000000000000036251046102023000167730ustar 00000000000000#![doc(hidden)] use std::fmt; use std::mem; use std::ops::Deref; /// Identifier in `.proto` file #[derive(Eq, PartialEq, Debug, Clone, Hash)] #[doc(hidden)] pub struct ProtobufIdent(String); #[derive(Eq, PartialEq, Debug, Hash)] #[doc(hidden)] #[repr(transparent)] pub struct ProtobufIdentRef(str); impl Deref for ProtobufIdentRef { type Target = str; fn deref(&self) -> &str { &self.0 } } impl Deref for ProtobufIdent { type Target = ProtobufIdentRef; fn deref(&self) -> &ProtobufIdentRef { ProtobufIdentRef::new(&self.0) } } impl From<&'_ str> for ProtobufIdent { fn from(s: &str) -> Self { ProtobufIdent::new(s) } } impl From for ProtobufIdent { fn from(s: String) -> Self { ProtobufIdent::new(&s) } } impl Into for ProtobufIdent { fn into(self) -> String { self.0 } } impl fmt::Display for ProtobufIdent { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(&self.get(), f) } } impl ProtobufIdentRef { pub fn new<'a>(ident: &'a str) -> &'a ProtobufIdentRef { assert!(!ident.is_empty()); // SAFETY: ProtobufIdentRef is repr(transparent) unsafe { mem::transmute(ident) } } pub fn as_str(&self) -> &str { &*self } pub fn to_owned(&self) -> ProtobufIdent { ProtobufIdent(self.0.to_owned()) } } impl ProtobufIdent { pub fn as_ref(&self) -> &ProtobufIdentRef { ProtobufIdentRef::new(&self.0) } pub fn new(s: &str) -> ProtobufIdent { assert!(!s.is_empty()); assert!(!s.contains("/")); assert!(!s.contains(".")); assert!(!s.contains(":")); assert!(!s.contains("(")); assert!(!s.contains(")")); ProtobufIdent(s.to_owned()) } pub fn get(&self) -> &str { &self.0 } pub fn into_string(self) -> String { self.0 } } protobuf-parse-3.7.2/src/protobuf_path.rs000064400000000000000000000022241046102023000166160ustar 00000000000000use std::fmt; use crate::protobuf_abs_path::ProtobufAbsPath; use crate::protobuf_rel_path::ProtobufRelPath; /// Protobuf identifier can be absolute or relative. #[derive(Debug, Eq, PartialEq, Clone, Hash)] pub(crate) enum ProtobufPath { Abs(ProtobufAbsPath), Rel(ProtobufRelPath), } impl ProtobufPath { pub fn new>(path: S) -> ProtobufPath { let path = path.into(); if path.starts_with('.') { ProtobufPath::Abs(ProtobufAbsPath::new(path)) } else { ProtobufPath::Rel(ProtobufRelPath::new(path)) } } pub fn _resolve(&self, package: &ProtobufAbsPath) -> ProtobufAbsPath { match self { ProtobufPath::Abs(p) => p.clone(), ProtobufPath::Rel(p) => { let mut package = package.clone(); package.push_relative(p); package } } } } impl fmt::Display for ProtobufPath { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { ProtobufPath::Abs(p) => write!(f, "{}", p), ProtobufPath::Rel(p) => write!(f, "{}", p), } } } protobuf-parse-3.7.2/src/protobuf_rel_path.rs000064400000000000000000000141171046102023000174640ustar 00000000000000#![doc(hidden)] use std::fmt; use std::iter; use std::mem; use std::ops::Deref; use crate::protobuf_abs_path::ProtobufAbsPath; use crate::protobuf_ident::ProtobufIdent; use crate::ProtobufIdentRef; impl From for ProtobufRelPath { fn from(s: String) -> ProtobufRelPath { ProtobufRelPath::new(s) } } impl From<&'_ str> for ProtobufRelPath { fn from(s: &str) -> ProtobufRelPath { ProtobufRelPath::from(s.to_owned()) } } impl ProtobufRelPathRef { pub fn as_str(&self) -> &str { &self } pub fn empty() -> &'static ProtobufRelPathRef { Self::new("") } pub fn new(path: &str) -> &ProtobufRelPathRef { assert!(!path.starts_with('.')); // SAFETY: repr(transparent) unsafe { mem::transmute(path) } } pub fn is_empty(&self) -> bool { self.0.is_empty() } pub fn split_first_rem(&self) -> Option<(&ProtobufIdentRef, &ProtobufRelPathRef)> { if self.is_empty() { None } else { match self.0.find('.') { Some(i) => Some(( ProtobufIdentRef::new(&self.0[..i]), ProtobufRelPathRef::new(&self.0[i + 1..]), )), None => Some((ProtobufIdentRef::new(&self.0), ProtobufRelPathRef::empty())), } } } pub fn components(&self) -> impl Iterator { iter::once(&self.0) .filter(|s| !s.is_empty()) .flat_map(|p| p.split('.').map(|s| ProtobufIdentRef::new(s))) } fn parent(&self) -> Option<&ProtobufRelPathRef> { if self.0.is_empty() { None } else { match self.0.rfind('.') { Some(i) => Some(ProtobufRelPathRef::new(&self.0[..i])), None => Some(ProtobufRelPathRef::empty()), } } } pub fn self_and_parents(&self) -> Vec<&ProtobufRelPathRef> { let mut tmp = self; let mut r = Vec::new(); r.push(self); while let Some(parent) = tmp.parent() { r.push(parent); tmp = parent; } r } pub fn append(&self, simple: &ProtobufRelPathRef) -> ProtobufRelPath { if self.is_empty() { simple.to_owned() } else if simple.is_empty() { self.to_owned() } else { ProtobufRelPath { path: format!("{}.{}", &self.0, &simple.0), } } } pub fn append_ident(&self, simple: &ProtobufIdentRef) -> ProtobufRelPath { self.append(&ProtobufRelPath::from(simple.to_owned())) } pub fn to_absolute(&self) -> ProtobufAbsPath { self.to_owned().into_absolute() } pub fn to_owned(&self) -> ProtobufRelPath { ProtobufRelPath { path: self.0.to_owned(), } } } impl ProtobufRelPath { pub fn as_ref(&self) -> &ProtobufRelPathRef { &self } pub fn empty() -> ProtobufRelPath { ProtobufRelPath { path: String::new(), } } pub fn new>(path: S) -> ProtobufRelPath { let path = path.into(); // Validate ProtobufRelPathRef::new(&path); ProtobufRelPath { path } } pub fn from_components<'a, I: IntoIterator>( i: I, ) -> ProtobufRelPath { let v: Vec<&str> = i.into_iter().map(|c| c.as_str()).collect(); ProtobufRelPath::from(v.join(".")) } pub fn into_absolute(self) -> ProtobufAbsPath { if self.is_empty() { ProtobufAbsPath::root() } else { ProtobufAbsPath::from(format!(".{}", self)) } } } #[doc(hidden)] #[derive(Debug, Eq, PartialEq, Clone, Hash)] pub struct ProtobufRelPath { pub(crate) path: String, } #[doc(hidden)] #[derive(Debug, Eq, PartialEq, Hash)] #[repr(transparent)] pub struct ProtobufRelPathRef(str); impl Deref for ProtobufRelPathRef { type Target = str; fn deref(&self) -> &str { &self.0 } } impl Deref for ProtobufRelPath { type Target = ProtobufRelPathRef; fn deref(&self) -> &ProtobufRelPathRef { ProtobufRelPathRef::new(&self.path) } } impl From for ProtobufRelPath { fn from(s: ProtobufIdent) -> ProtobufRelPath { ProtobufRelPath { path: s.into() } } } impl fmt::Display for ProtobufRelPathRef { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", &self.0) } } impl fmt::Display for ProtobufRelPath { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.path) } } #[cfg(test)] mod test { use super::*; #[test] fn parent() { assert_eq!(None, ProtobufRelPathRef::empty().parent()); assert_eq!( Some(ProtobufRelPathRef::empty()), ProtobufRelPath::new("aaa".to_owned()).parent() ); assert_eq!( Some(ProtobufRelPathRef::new("abc")), ProtobufRelPath::new("abc.def".to_owned()).parent() ); assert_eq!( Some(ProtobufRelPathRef::new("abc.def")), ProtobufRelPath::new("abc.def.gh".to_owned()).parent() ); } #[test] fn self_and_parents() { assert_eq!( vec![ ProtobufRelPathRef::new("ab.cde.fghi"), ProtobufRelPathRef::new("ab.cde"), ProtobufRelPathRef::new("ab"), ProtobufRelPathRef::empty(), ], ProtobufRelPath::new("ab.cde.fghi".to_owned()).self_and_parents() ); } #[test] fn components() { assert_eq!( Vec::<&ProtobufIdentRef>::new(), ProtobufRelPath::empty().components().collect::>() ); assert_eq!( vec![ProtobufIdentRef::new("ab")], ProtobufRelPath::new("ab").components().collect::>() ); assert_eq!( vec![ProtobufIdentRef::new("ab"), ProtobufIdentRef::new("cd")], ProtobufRelPath::new("ab.cd") .components() .collect::>() ); } } protobuf-parse-3.7.2/src/protoc/command.rs000064400000000000000000000227461046102023000167010ustar 00000000000000//! API to invoke `protoc` command. //! //! `protoc` command must be in `$PATH`, along with `protoc-gen-LANG` command. //! //! Note that to generate `rust` code from `.proto` files, `protoc-rust` crate //! can be used, which does not require `protoc-gen-rs` present in `$PATH`. #![deny(missing_docs)] #![deny(rustdoc::broken_intra_doc_links)] use std::ffi::OsStr; use std::ffi::OsString; use std::fmt; use std::io; use std::path::Path; use std::path::PathBuf; use std::process; use std::process::Stdio; use log::info; #[derive(Debug, thiserror::Error)] enum Error { #[error("protoc command exited with non-zero code")] ProtocNonZero, #[error("protoc command {0} exited with non-zero code")] ProtocNamedNonZero(String), #[error("protoc command {0} exited with non-zero code; stderr: {1:?}")] ProtocNamedNonZeroStderr(String, String), #[error("input is empty")] InputIsEmpty, #[error("output is empty")] OutputIsEmpty, #[error("output does not start with prefix")] OutputDoesNotStartWithPrefix, #[error("version is empty")] VersionIsEmpty, #[error("version does not start with digit")] VersionDoesNotStartWithDigit, #[error("failed to spawn command `{0}`")] FailedToSpawnCommand(String, #[source] io::Error), #[error("protoc output is not UTF-8")] ProtocOutputIsNotUtf8, } /// `Protoc --descriptor_set_out...` args #[derive(Debug)] pub(crate) struct DescriptorSetOutArgs { protoc: Protoc, /// `--file_descriptor_out=...` param out: Option, /// `-I` args includes: Vec, /// List of `.proto` files to compile inputs: Vec, /// `--include_imports` include_imports: bool, /// Extra command line flags (like `--experimental_allow_proto3_optional`) extra_args: Vec, /// Capture stderr instead of inheriting it. capture_stderr: bool, } impl DescriptorSetOutArgs { /// Set `--file_descriptor_out=...` param pub fn out(&mut self, out: impl AsRef) -> &mut Self { self.out = Some(out.as_ref().to_owned()); self } /// Append a path to `-I` args pub fn include(&mut self, include: impl AsRef) -> &mut Self { self.includes.push(include.as_ref().to_owned()); self } /// Append multiple paths to `-I` args pub fn includes(&mut self, includes: impl IntoIterator>) -> &mut Self { for include in includes { self.include(include); } self } /// Append a `.proto` file path to compile pub fn input(&mut self, input: impl AsRef) -> &mut Self { self.inputs.push(input.as_ref().to_owned()); self } /// Append multiple `.proto` file paths to compile pub fn inputs(&mut self, inputs: impl IntoIterator>) -> &mut Self { for input in inputs { self.input(input); } self } /// Set `--include_imports` pub fn include_imports(&mut self, include_imports: bool) -> &mut Self { self.include_imports = include_imports; self } /// Add command line flags like `--experimental_allow_proto3_optional`. pub fn extra_arg(&mut self, arg: impl Into) -> &mut Self { self.extra_args.push(arg.into()); self } /// Add command line flags like `--experimental_allow_proto3_optional`. pub fn extra_args(&mut self, args: impl IntoIterator>) -> &mut Self { for arg in args { self.extra_arg(arg); } self } /// Capture stderr instead of inheriting it. pub(crate) fn capture_stderr(&mut self, capture_stderr: bool) -> &mut Self { self.capture_stderr = capture_stderr; self } /// Execute `protoc --descriptor_set_out=` pub fn write_descriptor_set(&self) -> anyhow::Result<()> { if self.inputs.is_empty() { return Err(Error::InputIsEmpty.into()); } let out = self.out.as_ref().ok_or_else(|| Error::OutputIsEmpty)?; // -I{include} let include_flags = self.includes.iter().map(|include| { let mut flag = OsString::from("-I"); flag.push(include); flag }); // --descriptor_set_out={out} let mut descriptor_set_out_flag = OsString::from("--descriptor_set_out="); descriptor_set_out_flag.push(out); // --include_imports let include_imports_flag = match self.include_imports { false => None, true => Some("--include_imports".into()), }; let mut cmd_args = Vec::new(); cmd_args.extend(include_flags); cmd_args.push(descriptor_set_out_flag); cmd_args.extend(include_imports_flag); cmd_args.extend(self.inputs.iter().map(|path| path.as_os_str().to_owned())); cmd_args.extend(self.extra_args.iter().cloned()); self.protoc.run_with_args(cmd_args, self.capture_stderr) } } /// Protoc command. #[derive(Clone, Debug)] pub(crate) struct Protoc { exec: OsString, } impl Protoc { /// New `protoc` command from `$PATH` pub(crate) fn from_env_path() -> Protoc { match which::which("protoc") { Ok(path) => Protoc { exec: path.into_os_string(), }, Err(e) => { panic!("protoc binary not found: {}", e); } } } /// New `protoc` command from specified path /// /// # Examples /// /// ```no_run /// # mod protoc_bin_vendored { /// # pub fn protoc_bin_path() -> Result { /// # unimplemented!() /// # } /// # } /// /// // Use a binary from `protoc-bin-vendored` crate /// let protoc = protoc::Protoc::from_path( /// protoc_bin_vendored::protoc_bin_path().unwrap()); /// ``` pub(crate) fn from_path(path: impl AsRef) -> Protoc { Protoc { exec: path.as_ref().to_owned(), } } /// Check `protoc` command found and valid pub(crate) fn _check(&self) -> anyhow::Result<()> { self.version()?; Ok(()) } fn spawn(&self, cmd: &mut process::Command) -> anyhow::Result { info!("spawning command {:?}", cmd); cmd.spawn() .map_err(|e| Error::FailedToSpawnCommand(format!("{:?}", cmd), e).into()) } /// Obtain `protoc` version pub(crate) fn version(&self) -> anyhow::Result { let child = self.spawn( process::Command::new(&self.exec) .stdin(process::Stdio::null()) .stdout(process::Stdio::piped()) .stderr(process::Stdio::piped()) .args(&["--version"]), )?; let output = child.wait_with_output()?; if !output.status.success() { return Err(Error::ProtocNonZero.into()); } let output = String::from_utf8(output.stdout).map_err(|_| Error::ProtocOutputIsNotUtf8)?; let output = match output.lines().next() { None => return Err(Error::OutputIsEmpty.into()), Some(line) => line, }; let prefix = "libprotoc "; if !output.starts_with(prefix) { return Err(Error::OutputDoesNotStartWithPrefix.into()); } let output = &output[prefix.len()..]; if output.is_empty() { return Err(Error::VersionIsEmpty.into()); } let first = output.chars().next().unwrap(); if !first.is_digit(10) { return Err(Error::VersionDoesNotStartWithDigit.into()); } Ok(Version { version: output.to_owned(), }) } /// Execute `protoc` command with given args, check it completed correctly. fn run_with_args(&self, args: Vec, capture_stderr: bool) -> anyhow::Result<()> { let mut cmd = process::Command::new(&self.exec); cmd.stdin(process::Stdio::null()); cmd.args(args); if capture_stderr { cmd.stderr(Stdio::piped()); } let mut child = self.spawn(&mut cmd)?; if capture_stderr { let output = child.wait_with_output()?; if !output.status.success() { let stderr = String::from_utf8_lossy(&output.stderr); let stderr = stderr.trim_end().to_owned(); return Err(Error::ProtocNamedNonZeroStderr(format!("{:?}", cmd), stderr).into()); } } else { if !child.wait()?.success() { return Err(Error::ProtocNamedNonZero(format!("{:?}", cmd)).into()); } } Ok(()) } /// Get default DescriptorSetOutArgs for this command. pub(crate) fn descriptor_set_out_args(&self) -> DescriptorSetOutArgs { DescriptorSetOutArgs { protoc: self.clone(), out: None, includes: Vec::new(), inputs: Vec::new(), include_imports: false, extra_args: Vec::new(), capture_stderr: false, } } } /// Protobuf (protoc) version. pub(crate) struct Version { version: String, } impl Version { /// `true` if the protoc major version is 3. pub fn _is_3(&self) -> bool { self.version.starts_with("3") } } impl fmt::Display for Version { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(&self.version, f) } } #[cfg(test)] mod test { use super::*; #[test] fn version() { Protoc::from_env_path().version().expect("version"); } } protobuf-parse-3.7.2/src/protoc/mod.rs000064400000000000000000000001561046102023000160310ustar 00000000000000//! Parse `.proto` files using `protoc` command. pub(crate) mod command; pub(crate) mod parse_and_typecheck; protobuf-parse-3.7.2/src/protoc/parse_and_typecheck.rs000064400000000000000000000027631046102023000212530ustar 00000000000000use std::fs; use protobuf::descriptor::FileDescriptorSet; use protobuf::Message; use crate::protoc::command::Protoc; use crate::pure::parse_and_typecheck::path_to_proto_path; use crate::ParsedAndTypechecked; use crate::Parser; use crate::ProtoPathBuf; /// Parse `.proto` files using `protoc` command. pub(crate) fn parse_and_typecheck(parser: &Parser) -> anyhow::Result { let temp_dir = tempfile::Builder::new() .prefix("protobuf-parse") .tempdir()?; let temp_file = temp_dir.path().join("descriptor.pbbin"); let relative_paths: Vec = parser .inputs .iter() .map(|p| path_to_proto_path(p, &parser.includes)) .collect::>()?; let protoc = match &parser.protoc { Some(protoc) => Protoc::from_path(protoc), None => Protoc::from_env_path(), }; protoc .descriptor_set_out_args() .inputs(&parser.inputs) .includes(&parser.includes) .out(&temp_file) .include_imports(true) .extra_args(&parser.protoc_extra_args) .capture_stderr(parser.capture_stderr) .write_descriptor_set()?; let version = protoc.version()?; let fds = fs::read(temp_file)?; drop(temp_dir); let fds: protobuf::descriptor::FileDescriptorSet = FileDescriptorSet::parse_from_bytes(&fds)?; Ok(ParsedAndTypechecked { relative_paths, file_descriptors: fds.file, parser: format!("protoc {}", version), }) } protobuf-parse-3.7.2/src/pure/convert/mod.rs000064400000000000000000000574761046102023000171770ustar 00000000000000//! Convert parser model to rust-protobuf model mod option_resolver; mod type_resolver; use protobuf::descriptor::descriptor_proto::ReservedRange; use protobuf::descriptor::enum_descriptor_proto::EnumReservedRange; use protobuf::descriptor::field_descriptor_proto; use protobuf::descriptor::field_descriptor_proto::Type; use protobuf::descriptor::FieldDescriptorProto; use protobuf::descriptor::OneofDescriptorProto; use protobuf::reflect::FileDescriptor; use protobuf_support::json_name::json_name; use protobuf_support::text_format::escape_bytes_to; use crate::case_convert::camel_case; use crate::path::fs_path_to_proto_path; use crate::proto_path::ProtoPath; use crate::protobuf_abs_path::ProtobufAbsPath; use crate::protobuf_ident::ProtobufIdent; use crate::pure::convert::option_resolver::OptionResoler; use crate::pure::convert::option_resolver::ProtobufOptions; use crate::pure::convert::type_resolver::MessageOrEnum; use crate::pure::convert::type_resolver::TypeResolver; use crate::pure::model; use crate::FileDescriptorPair; use crate::ProtobufAbsPathRef; use crate::ProtobufIdentRef; #[derive(Debug, thiserror::Error)] enum ConvertError { #[error("default value is not a string literal")] DefaultValueIsNotStringLiteral, #[error("expecting a message for name {0}")] ExpectingMessage(ProtobufAbsPath), #[error("expecting an enum for name {0}")] ExpectingEnum(ProtobufAbsPath), } pub struct WithFullName { full_name: ProtobufAbsPath, t: T, } #[derive(Debug, PartialEq)] enum TypeResolved { Int32, Int64, Uint32, Uint64, Sint32, Sint64, Bool, Fixed64, Sfixed64, Double, String, Bytes, Fixed32, Sfixed32, Float, Message(ProtobufAbsPath), Enum(ProtobufAbsPath), Group(ProtobufAbsPath), } impl TypeResolved { fn from_field(field: &FieldDescriptorProto) -> TypeResolved { match field.type_() { Type::TYPE_DOUBLE => TypeResolved::Double, Type::TYPE_FLOAT => TypeResolved::Float, Type::TYPE_INT64 => TypeResolved::Int64, Type::TYPE_UINT64 => TypeResolved::Uint64, Type::TYPE_INT32 => TypeResolved::Int32, Type::TYPE_FIXED64 => TypeResolved::Fixed64, Type::TYPE_FIXED32 => TypeResolved::Fixed32, Type::TYPE_UINT32 => TypeResolved::Uint32, Type::TYPE_SFIXED32 => TypeResolved::Sfixed32, Type::TYPE_SFIXED64 => TypeResolved::Sfixed64, Type::TYPE_SINT32 => TypeResolved::Sint32, Type::TYPE_SINT64 => TypeResolved::Sint64, Type::TYPE_BOOL => TypeResolved::Bool, Type::TYPE_STRING => TypeResolved::String, Type::TYPE_BYTES => TypeResolved::Bytes, Type::TYPE_GROUP => { assert!(!field.type_name().is_empty()); TypeResolved::Group(ProtobufAbsPath::new(field.type_name())) } Type::TYPE_ENUM => { assert!(!field.type_name().is_empty()); TypeResolved::Enum(ProtobufAbsPath::new(field.type_name())) } Type::TYPE_MESSAGE => { assert!(!field.type_name().is_empty()); TypeResolved::Message(ProtobufAbsPath::new(field.type_name())) } } } fn type_enum(&self) -> Type { match self { TypeResolved::Bool => Type::TYPE_BOOL, TypeResolved::Int32 => Type::TYPE_INT32, TypeResolved::Int64 => Type::TYPE_INT64, TypeResolved::Uint32 => Type::TYPE_UINT32, TypeResolved::Uint64 => Type::TYPE_UINT64, TypeResolved::Sint32 => Type::TYPE_SINT32, TypeResolved::Sint64 => Type::TYPE_SINT64, TypeResolved::Fixed32 => Type::TYPE_FIXED32, TypeResolved::Fixed64 => Type::TYPE_FIXED64, TypeResolved::Sfixed32 => Type::TYPE_SFIXED32, TypeResolved::Sfixed64 => Type::TYPE_SFIXED64, TypeResolved::Float => Type::TYPE_FLOAT, TypeResolved::Double => Type::TYPE_DOUBLE, TypeResolved::String => Type::TYPE_STRING, TypeResolved::Bytes => Type::TYPE_BYTES, TypeResolved::Message(_) => Type::TYPE_MESSAGE, TypeResolved::Enum(_) => Type::TYPE_ENUM, TypeResolved::Group(_) => Type::TYPE_GROUP, } } fn type_name(&self) -> Option<&ProtobufAbsPath> { match self { TypeResolved::Message(t) | TypeResolved::Enum(t) | TypeResolved::Group(t) => Some(t), _ => None, } } } pub(crate) struct Resolver<'a> { type_resolver: TypeResolver<'a>, current_file: &'a model::FileDescriptor, } impl<'a> Resolver<'a> { fn map_entry_name_for_field_name(field_name: &str) -> ProtobufIdent { // Field name and message name must match, otherwise // Google's validation fails. // https://git.io/JeOvF ProtobufIdent::from(format!("{}Entry", camel_case(field_name))) } fn map_entry_field( &self, scope: &ProtobufAbsPath, name: &str, number: i32, field_type: &model::FieldType, ) -> anyhow::Result { // should be consisent with DescriptorBuilder::ValidateMapEntry let mut output = protobuf::descriptor::FieldDescriptorProto::new(); output.set_name(name.to_owned()); output.set_number(number); let t = self.field_type(&scope, name, field_type)?; output.set_type(t.type_enum()); if let Some(t_name) = t.type_name() { output.set_type_name(t_name.path.clone()); } output.set_label(field_descriptor_proto::Label::LABEL_OPTIONAL); output.set_json_name(json_name(&name)); Ok(output) } fn map_entry_message( &self, scope: &ProtobufAbsPath, field_name: &str, key: &model::FieldType, value: &model::FieldType, ) -> anyhow::Result { let mut output = protobuf::descriptor::DescriptorProto::new(); output.options.mut_or_insert_default().set_map_entry(true); output.set_name(Resolver::map_entry_name_for_field_name(field_name).into_string()); output .field .push(self.map_entry_field(&scope, "key", 1, key)?); output .field .push(self.map_entry_field(&scope, "value", 2, value)?); Ok(output) } fn group_message( &self, scope: &ProtobufAbsPath, name: &str, fields: &[model::WithLoc], ) -> anyhow::Result { let mut output = protobuf::descriptor::DescriptorProto::new(); output.set_name(name.to_owned()); for f in fields { output.field.push(self.field(scope, f, None)?); } Ok(output) } fn message( &self, scope: &ProtobufAbsPathRef, input: &model::Message, ) -> anyhow::Result { let mut nested_scope = scope.to_owned(); nested_scope.push_simple(ProtobufIdentRef::new(&input.name)); let mut output = protobuf::descriptor::DescriptorProto::new(); output.set_name(input.name.clone()); let mut nested_messages = Vec::new(); for m in &input.messages { let message = self.message(&nested_scope, &m.t)?; nested_messages.push(model::WithLoc { t: message, loc: m.loc, }); } for f in input.regular_fields_including_in_oneofs() { match &f.t.typ { model::FieldType::Map(t) => { let message = self.map_entry_message(&nested_scope, &f.t.name, &t.0, &t.1)?; nested_messages.push(model::WithLoc { t: message, loc: f.loc, }); } model::FieldType::Group(model::Group { name: group_name, fields, .. }) => { let message = self.group_message(&nested_scope, group_name, fields)?; nested_messages.push(model::WithLoc { t: message, loc: f.loc, }); } _ => (), } } // Preserve declaration order nested_messages.sort_by_key(|m| m.loc); output.nested_type = nested_messages .into_iter() .map(|model::WithLoc { t, .. }| t) .collect(); output.enum_type = input .enums .iter() .map(|e| self.enumeration(scope, e)) .collect::>()?; { let mut fields = Vec::new(); for fo in &input.fields { match &fo.t { model::FieldOrOneOf::Field(f) => { let oneof_index = if self.is_proto3_optional(f) { let oneof_index = output.oneof_decl.len() as i32; let mut oneof = OneofDescriptorProto::new(); oneof.set_name(format!("_{}", f.name)); output.oneof_decl.push(oneof); Some(oneof_index) } else { None }; fields.push(self.field(&nested_scope, f, oneof_index)?); } model::FieldOrOneOf::OneOf(o) => { let oneof_index = output.oneof_decl.len(); for f in &o.fields { fields.push(self.field(&nested_scope, f, Some(oneof_index as i32))?); } output.oneof_decl.push(self.oneof(scope, o)?); } } } output.field = fields; } for ext in &input.extension_ranges { let mut extension_range = protobuf::descriptor::descriptor_proto::ExtensionRange::new(); extension_range.set_start(*ext.start()); extension_range.set_end(*ext.end() + 1); output.extension_range.push(extension_range); } for ext in &input.extensions { let mut extension = self.field(scope, &ext.t.field, None)?; extension.set_extendee( self.type_resolver .resolve_message_or_enum(scope, &ext.t.extendee)? .full_name .path, ); output.extension.push(extension); } for reserved in &input.reserved_nums { let mut reserved_range = ReservedRange::new(); reserved_range.set_start(*reserved.start()); reserved_range.set_end(*reserved.end() + 1); output.reserved_range.push(reserved_range); } output.reserved_name = input.reserved_names.clone().into(); Ok(output) } fn service_method( &self, input: &model::Method, ) -> anyhow::Result { let scope = &self.current_file.package; let mut output = protobuf::descriptor::MethodDescriptorProto::new(); output.set_name(input.name.clone()); output.set_input_type( self.type_resolver .resolve_message_or_enum(scope, &input.input_type)? .full_name .to_string(), ); output.set_output_type( self.type_resolver .resolve_message_or_enum(scope, &input.output_type)? .full_name .to_string(), ); if input.client_streaming { output.set_client_streaming(input.client_streaming); } if input.server_streaming { output.set_server_streaming(input.server_streaming); } Ok(output) } fn service( &self, input: &model::Service, ) -> anyhow::Result { let mut output = protobuf::descriptor::ServiceDescriptorProto::new(); output.set_name(input.name.clone()); output.method = input .methods .iter() .map(|m| self.service_method(m)) .collect::>()?; Ok(output) } fn is_proto3_optional(&self, input: &model::WithLoc) -> bool { (self.current_file.syntax, input.t.rule) == (model::Syntax::Proto3, Some(model::Rule::Optional)) } fn field( &self, scope: &ProtobufAbsPathRef, input: &model::WithLoc, oneof_index: Option, ) -> anyhow::Result { let mut output = protobuf::descriptor::FieldDescriptorProto::new(); output.set_name(input.t.name.clone()); if let model::FieldType::Map(..) = input.t.typ { output.set_label(protobuf::descriptor::field_descriptor_proto::Label::LABEL_REPEATED); } else { output.set_label(label(input.t.rule)); if self.is_proto3_optional(input) { output.set_proto3_optional(true); } } let t = self.field_type(scope, &input.t.name, &input.t.typ)?; output.set_type(t.type_enum()); if let Some(t_name) = t.type_name() { output.set_type_name(t_name.path.clone()); } output.set_number(input.t.number); // TODO: move default to option parser if let Some(ref default) = input.t.options.as_slice().by_name("default") { let default = match output.type_() { protobuf::descriptor::field_descriptor_proto::Type::TYPE_STRING => { if let &model::ProtobufConstant::String(ref s) = default { s.decode_utf8()? } else { return Err(ConvertError::DefaultValueIsNotStringLiteral.into()); } } protobuf::descriptor::field_descriptor_proto::Type::TYPE_BYTES => { if let &model::ProtobufConstant::String(ref s) = default { let mut buf = String::new(); escape_bytes_to(&s.decode_bytes()?, &mut buf); buf } else { return Err(ConvertError::DefaultValueIsNotStringLiteral.into()); } } _ => default.format(), }; output.set_default_value(default); } if let Some(oneof_index) = oneof_index { output.set_oneof_index(oneof_index); } if let Some(json_name) = input.t.options.as_slice().by_name_string("json_name")? { output.set_json_name(json_name); } else { output.set_json_name(json_name(&input.t.name)); } Ok(output) } fn find_message_by_abs_name( &self, abs_path: &ProtobufAbsPath, ) -> anyhow::Result> { let with_full_name = self .type_resolver .find_message_or_enum_by_abs_name(abs_path)?; match with_full_name.t { MessageOrEnum::Message(m) => Ok(WithFullName { t: m, full_name: with_full_name.full_name, }), MessageOrEnum::Enum(..) => Err(ConvertError::ExpectingMessage(abs_path.clone()).into()), } } fn find_enum_by_abs_name( &self, abs_path: &ProtobufAbsPath, ) -> anyhow::Result<&'a model::Enumeration> { match self .type_resolver .find_message_or_enum_by_abs_name(abs_path)? .t { MessageOrEnum::Enum(e) => Ok(e), MessageOrEnum::Message(..) => Err(ConvertError::ExpectingEnum(abs_path.clone()).into()), } } fn field_type( &self, scope: &ProtobufAbsPathRef, name: &str, input: &model::FieldType, ) -> anyhow::Result { Ok(match *input { model::FieldType::Bool => TypeResolved::Bool, model::FieldType::Int32 => TypeResolved::Int32, model::FieldType::Int64 => TypeResolved::Int64, model::FieldType::Uint32 => TypeResolved::Uint32, model::FieldType::Uint64 => TypeResolved::Uint64, model::FieldType::Sint32 => TypeResolved::Sint32, model::FieldType::Sint64 => TypeResolved::Sint64, model::FieldType::Fixed32 => TypeResolved::Fixed32, model::FieldType::Fixed64 => TypeResolved::Fixed64, model::FieldType::Sfixed32 => TypeResolved::Sfixed32, model::FieldType::Sfixed64 => TypeResolved::Sfixed64, model::FieldType::Float => TypeResolved::Float, model::FieldType::Double => TypeResolved::Double, model::FieldType::String => TypeResolved::String, model::FieldType::Bytes => TypeResolved::Bytes, model::FieldType::MessageOrEnum(ref name) => { let t = self.type_resolver.resolve_message_or_enum(scope, &name)?; match t.t { MessageOrEnum::Message(..) => TypeResolved::Message(t.full_name), MessageOrEnum::Enum(..) => TypeResolved::Enum(t.full_name), } } model::FieldType::Map(..) => { let mut type_name = scope.to_owned(); type_name.push_simple(&Resolver::map_entry_name_for_field_name(name)); TypeResolved::Message(type_name) } model::FieldType::Group(model::Group { name: ref group_name, .. }) => { let mut type_name = scope.to_owned(); type_name.push_simple(ProtobufIdentRef::new(group_name)); TypeResolved::Group(type_name) } }) } fn enum_value( &self, _scope: &ProtobufAbsPathRef, input: &model::EnumValue, ) -> anyhow::Result { let mut output = protobuf::descriptor::EnumValueDescriptorProto::new(); output.set_name(input.name.clone()); output.set_number(input.number); Ok(output) } fn enumeration( &self, scope: &ProtobufAbsPathRef, input: &model::Enumeration, ) -> anyhow::Result { let mut output = protobuf::descriptor::EnumDescriptorProto::new(); output.set_name(input.name.clone()); output.value = input .values .iter() .map(|v| self.enum_value(scope, &v)) .collect::>()?; for reserved in &input.reserved_nums { let mut reserved_range = EnumReservedRange::new(); reserved_range.set_start(*reserved.start()); // EnumReservedRange is inclusive, not like ExtensionRange and // ReservedRange, which are exclusive. reserved_range.set_end(*reserved.end()); output.reserved_range.push(reserved_range); } output.reserved_name = input.reserved_names.clone().into(); Ok(output) } fn oneof( &self, _scope: &ProtobufAbsPathRef, input: &model::OneOf, ) -> anyhow::Result { let mut output = protobuf::descriptor::OneofDescriptorProto::new(); output.set_name(input.name.clone()); Ok(output) } fn extension( &self, scope: &ProtobufAbsPath, input: &model::Extension, ) -> anyhow::Result<( protobuf::descriptor::FieldDescriptorProto, Option, )> { let mut field = self.field(scope, &input.field, None)?; field.set_extendee( self.type_resolver .resolve_message_or_enum(scope, &input.extendee)? .full_name .to_string(), ); let group_messages = if let model::FieldType::Group(g) = &input.field.t.typ { Some(self.group_message(scope, &g.name, &g.fields)?) } else { None }; Ok((field, group_messages)) } } fn syntax(input: model::Syntax) -> String { match input { model::Syntax::Proto2 => "proto2".to_owned(), model::Syntax::Proto3 => "proto3".to_owned(), } } fn label(input: Option) -> protobuf::descriptor::field_descriptor_proto::Label { match input { Some(model::Rule::Optional) => { protobuf::descriptor::field_descriptor_proto::Label::LABEL_OPTIONAL } Some(model::Rule::Required) => { protobuf::descriptor::field_descriptor_proto::Label::LABEL_REQUIRED } Some(model::Rule::Repeated) => { protobuf::descriptor::field_descriptor_proto::Label::LABEL_REPEATED } None => protobuf::descriptor::field_descriptor_proto::Label::LABEL_OPTIONAL, } } pub(crate) fn populate_dependencies( input: &model::FileDescriptor, output: &mut protobuf::descriptor::FileDescriptorProto, ) { for import in &input.imports { if import.vis == model::ImportVis::Public { output .public_dependency .push(output.dependency.len() as i32); } else if import.vis == model::ImportVis::Weak { output.weak_dependency.push(output.dependency.len() as i32); } output.dependency.push(import.path.to_string()); } } pub(crate) fn file_descriptor( name: &ProtoPath, input: &model::FileDescriptor, deps: &[FileDescriptorPair], ) -> anyhow::Result { let resolver = Resolver { current_file: &input, type_resolver: TypeResolver { current_file: &input, deps, }, }; let mut output = protobuf::descriptor::FileDescriptorProto::new(); output.set_name(fs_path_to_proto_path(name)); output.set_syntax(syntax(input.syntax)); if input.package != ProtobufAbsPath::root() { output.set_package(input.package.to_root_rel().to_string()); } populate_dependencies(&input, &mut output); let mut messages = Vec::new(); let mut services = Vec::new(); let mut extensions = Vec::new(); for e in &input.extensions { let (ext, group_messages) = resolver.extension(&resolver.current_file.package, &e.t)?; extensions.push(ext); messages.extend(group_messages.map(model::WithLoc::with_loc(e.loc))); } output.extension = extensions; for m in &input.messages { let message = resolver.message(&resolver.current_file.package, &m.t)?; messages.push(model::WithLoc { t: message, loc: m.loc, }); } for s in &input.services { let service = resolver.service(&s.t)?; services.push(model::WithLoc { t: service, loc: s.loc, }) } // Preserve declaration order messages.sort_by_key(|m| m.loc); output.message_type = messages .into_iter() .map(|model::WithLoc { t, .. }| t) .collect(); output.enum_type = input .enums .iter() .map(|e| resolver.enumeration(&resolver.current_file.package, e)) .collect::>()?; output.service = services .into_iter() .map(|model::WithLoc { t, .. }| t) .collect(); let descriptor_without_options = FileDescriptor::new_dynamic( output.clone(), &deps .iter() .map(|d| d.descriptor.clone()) .collect::>(), )?; let option_resolver = OptionResoler { resolver: &resolver, descriptor_without_options, }; option_resolver.file(&mut output)?; Ok(output) } protobuf-parse-3.7.2/src/pure/convert/option_resolver.rs000064400000000000000000001071551046102023000216370ustar 00000000000000use protobuf::descriptor::DescriptorProto; use protobuf::descriptor::EnumDescriptorProto; use protobuf::descriptor::EnumValueDescriptorProto; use protobuf::descriptor::FieldDescriptorProto; use protobuf::descriptor::FileDescriptorProto; use protobuf::descriptor::MethodDescriptorProto; use protobuf::descriptor::OneofDescriptorProto; use protobuf::descriptor::ServiceDescriptorProto; use protobuf::reflect::FieldDescriptor; use protobuf::reflect::FileDescriptor; use protobuf::reflect::MessageDescriptor; use protobuf::MessageFull; use protobuf::UnknownFields; use protobuf::UnknownValue; use protobuf_support::lexer::str_lit::StrLitDecodeError; use crate::model; use crate::model::ProtobufConstant; use crate::model::ProtobufConstantMessage; use crate::model::ProtobufConstantMessageFieldName; use crate::model::ProtobufOptionName; use crate::model::ProtobufOptionNameExt; use crate::model::ProtobufOptionNamePart; use crate::model::WithLoc; use crate::protobuf_path::ProtobufPath; use crate::pure::convert::Resolver; use crate::pure::convert::TypeResolved; use crate::ProtobufAbsPath; use crate::ProtobufAbsPathRef; use crate::ProtobufIdent; use crate::ProtobufIdentRef; use crate::ProtobufRelPath; use crate::ProtobufRelPathRef; #[derive(Debug, thiserror::Error)] enum OptionResolverError { #[error(transparent)] OtherError(anyhow::Error), #[error("extension is not a message: {0}")] ExtensionIsNotMessage(String), #[error("unknown field name: {0}")] UnknownFieldName(String), #[error("wrong extension type: option {0} extendee {1} expected extendee {2}")] WrongExtensionType(String, String, String), #[error("extension not found: {0}")] ExtensionNotFound(String), #[error("unknown enum value: {0}")] UnknownEnumValue(String), #[error("unsupported extension type: {0} {1} {2}")] UnsupportedExtensionType(String, String, model::ProtobufConstant), #[error("builtin option {0} not found for options {1}")] BuiltinOptionNotFound(String, String), #[error("builtin option {0} points to a non-singular field of {1}")] BuiltinOptionPointsToNonSingularField(String, String), #[error("incorrect string literal: {0}")] StrLitDecodeError(#[source] StrLitDecodeError), #[error("wrong option type, expecting {0}, got `{1}`")] WrongOptionType(&'static str, String), #[error("Message field requires a message constant")] MessageFieldRequiresMessageConstant, #[error("message not found by name {0}")] MessageNotFound(ProtobufAbsPath), #[error("message not found by name {0}")] MessageFoundMoreThanOnce(ProtobufAbsPath), } #[derive(Clone)] enum LookupScope2 { File(FileDescriptor), Message(MessageDescriptor, ProtobufAbsPath), } impl LookupScope2 { fn current_path(&self) -> ProtobufAbsPath { match self { LookupScope2::File(f) => ProtobufAbsPath::package_from_file_descriptor(f), LookupScope2::Message(_, p) => p.clone(), } } fn messages(&self) -> Vec { match self { LookupScope2::File(file) => file.messages().collect(), LookupScope2::Message(message, _) => message.nested_messages().collect(), } } fn down(&self, name: &ProtobufIdentRef) -> Option { match self.messages().iter().find(|m| m.name() == name.as_str()) { Some(m) => { let mut path = self.current_path(); path.push_simple(name); Some(LookupScope2::Message(m.clone(), path)) } None => None, } } fn extensions(&self) -> Vec { match self { LookupScope2::File(f) => f.extensions().collect(), LookupScope2::Message(m, _) => m.extensions().collect(), } } } #[derive(Clone)] pub(crate) struct LookupScopeUnion2 { path: ProtobufAbsPath, scopes: Vec, partial_scopes: Vec, } impl LookupScopeUnion2 { fn down(&self, name: &ProtobufIdentRef) -> LookupScopeUnion2 { let mut path: ProtobufAbsPath = self.path.clone(); path.push_simple(name); let mut scopes: Vec<_> = self.scopes.iter().flat_map(|f| f.down(name)).collect(); let mut partial_scopes = Vec::new(); for partial_scope in &self.partial_scopes { let package = ProtobufAbsPath::package_from_file_descriptor(partial_scope); if package.as_ref() == path.as_ref() { scopes.push(LookupScope2::File(partial_scope.clone())); } else if package.starts_with(&path) { partial_scopes.push(partial_scope.clone()); } } LookupScopeUnion2 { path, scopes, partial_scopes, } } fn lookup(&self, path: &ProtobufRelPath) -> LookupScopeUnion2 { let mut scope = self.clone(); for c in path.components() { scope = scope.down(c); } scope } fn extensions(&self) -> Vec { self.scopes.iter().flat_map(|s| s.extensions()).collect() } fn as_message(&self) -> anyhow::Result { let mut messages: Vec = self .scopes .iter() .filter_map(|s| match s { LookupScope2::Message(m, _) => Some(m.clone()), _ => None, }) .collect(); let message = match messages.pop() { Some(m) => m, None => return Err(OptionResolverError::MessageNotFound(self.path.clone()).into()), }; if !messages.is_empty() { return Err(OptionResolverError::MessageFoundMoreThanOnce(self.path.clone()).into()); } Ok(message) } } pub(crate) trait ProtobufOptions { fn by_name(&self, name: &str) -> Option<&model::ProtobufConstant>; fn _by_name_bool(&self, name: &str) -> anyhow::Result> { match self.by_name(name) { Some(model::ProtobufConstant::Bool(b)) => Ok(Some(*b)), Some(c) => Err(OptionResolverError::WrongOptionType("bool", c.to_string()).into()), None => Ok(None), } } fn by_name_string(&self, name: &str) -> anyhow::Result> { match self.by_name(name) { Some(model::ProtobufConstant::String(s)) => s .decode_utf8() .map(Some) .map_err(|e| OptionResolverError::StrLitDecodeError(e).into()), Some(c) => Err(OptionResolverError::WrongOptionType("string", c.to_string()).into()), None => Ok(None), } } } impl<'a> ProtobufOptions for &'a [model::ProtobufOption] { fn by_name(&self, name: &str) -> Option<&model::ProtobufConstant> { let option_name = ProtobufOptionName::simple(name); for model::ProtobufOption { name, value } in *self { if name == &option_name { return Some(&value); } } None } } pub(crate) struct OptionResoler<'a> { pub(crate) resolver: &'a Resolver<'a>, pub(crate) descriptor_without_options: FileDescriptor, } impl<'a> OptionResoler<'a> { fn all_files(&self) -> Vec { let mut files = Vec::new(); files.push(self.descriptor_without_options.clone()); files.extend( self.resolver .type_resolver .deps .iter() .map(|p| p.descriptor.clone()), ); files } fn root_scope(&self) -> LookupScopeUnion2 { let (scopes, partial_scopes) = self .all_files() .into_iter() .partition::, _>(|f| ProtobufAbsPath::package_from_file_descriptor(f).is_root()); LookupScopeUnion2 { path: ProtobufAbsPath::root(), scopes: scopes.into_iter().map(LookupScope2::File).collect(), partial_scopes, } } fn lookup(&self, path: &ProtobufAbsPath) -> LookupScopeUnion2 { self.root_scope().lookup(&path.to_root_rel()) } fn find_message_by_abs_name( &self, path: &ProtobufAbsPath, ) -> anyhow::Result { let scope = self.lookup(path); scope.as_message() } fn scope_resolved_candidates_rel( scope: &ProtobufAbsPathRef, rel: &ProtobufRelPathRef, ) -> Vec { scope .self_and_parents() .into_iter() .map(|a| { let mut a = a.to_owned(); a.push_relative(rel); a }) .collect() } fn scope_resolved_candidates( scope: &ProtobufAbsPathRef, path: &ProtobufPath, ) -> Vec { match path { ProtobufPath::Abs(p) => vec![p.clone()], ProtobufPath::Rel(p) => Self::scope_resolved_candidates_rel(scope, p), } } fn find_extension_by_abs_path( &self, path: &ProtobufAbsPathRef, ) -> anyhow::Result> { let mut path = path.to_owned(); let extension = path.pop().unwrap(); let scope = self.lookup(&path); for ext in scope.extensions() { if ext.name() == extension.get() { return Ok(Some(ext.clone())); } } Ok(None) } fn find_extension_by_path( &self, scope: &ProtobufAbsPathRef, path: &ProtobufPath, ) -> anyhow::Result { for candidate in Self::scope_resolved_candidates(scope, path) { if let Some(e) = self.find_extension_by_abs_path(&candidate)? { return Ok(e); } } Err(OptionResolverError::ExtensionNotFound(path.to_string()).into()) } fn ext_resolve_field_ext( &self, scope: &ProtobufAbsPathRef, message: &MessageDescriptor, field_name: &ProtobufPath, ) -> anyhow::Result { let expected_extendee = ProtobufAbsPath::from_message(message); let field = self.find_extension_by_path(scope, field_name)?; if ProtobufAbsPath::new(field.proto().extendee()) != expected_extendee { return Err(OptionResolverError::WrongExtensionType( format!("{}", field_name), format!("{}", field.proto().extendee()), format!("{}", expected_extendee), ) .into()); } Ok(field) } fn ext_resolve_field( &self, scope: &ProtobufAbsPathRef, message: &MessageDescriptor, field: &ProtobufOptionNamePart, ) -> anyhow::Result { match field { ProtobufOptionNamePart::Direct(field) => match message.field_by_name(field.get()) { Some(field) => Ok(field), None => Err(OptionResolverError::UnknownFieldName(field.to_string()).into()), }, ProtobufOptionNamePart::Ext(field) => { Ok(self.ext_resolve_field_ext(scope, message, field)?) } } } fn custom_option_ext_step( &self, scope: &ProtobufAbsPathRef, options_type: &MessageDescriptor, unknown_fields: &mut UnknownFields, option_name: &ProtobufOptionNamePart, option_name_rem: &[ProtobufOptionNamePart], option_value: &ProtobufConstant, ) -> anyhow::Result<()> { let field = self.ext_resolve_field(scope, options_type, option_name)?; let field_type = TypeResolved::from_field(field.proto()); match option_name_rem.split_first() { Some((first, rem)) => { match field_type { TypeResolved::Message(message_name) => { let m = self.find_message_by_abs_name(&message_name)?; let mut message_unknown_fields = UnknownFields::new(); self.custom_option_ext_step( scope, &m, &mut message_unknown_fields, first, rem, option_value, )?; unknown_fields.add_length_delimited( field.proto().number() as u32, message_unknown_fields.write_to_bytes(), ); Ok(()) } TypeResolved::Group(..) => { // TODO: implement Ok(()) } _ => Err(OptionResolverError::ExtensionIsNotMessage(format!( "scope: {}, option name: {}", scope, option_name )) .into()), } } None => self .add_option_value_to_unknown_fields( &field_type, field.number() as u32, unknown_fields, option_value, &format!("{}", option_name), ) .map_err(|err| { err.context(format!( "parsing custom option `{}` value `{}` at `{}`", option_name, option_value, scope )) }), } } fn add_option_value_to_unknown_fields( &self, field_type: &TypeResolved, field_num: u32, unknown_fields: &mut UnknownFields, option_value: &ProtobufConstant, option_name_for_diag: &str, ) -> anyhow::Result<()> { let error = || { OptionResolverError::UnsupportedExtensionType( option_name_for_diag.to_owned(), format!("{:?}", field_type), option_value.clone(), ) }; match option_value { ProtobufConstant::U64(v) => match field_type { TypeResolved::Fixed64 => unknown_fields.add_value(field_num, Self::fixed64(*v)?), TypeResolved::Sfixed64 => unknown_fields.add_value(field_num, Self::sfixed64(*v)?), TypeResolved::Fixed32 => unknown_fields.add_value(field_num, Self::fixed32(*v)?), TypeResolved::Sfixed32 => unknown_fields.add_value(field_num, Self::sfixed32(*v)?), TypeResolved::Int32 => unknown_fields.add_value(field_num, Self::int32(*v)?), TypeResolved::Int64 => unknown_fields.add_value(field_num, Self::int64(*v)?), TypeResolved::Uint64 => unknown_fields.add_value(field_num, Self::uint64(*v)?), TypeResolved::Uint32 => unknown_fields.add_value(field_num, Self::uint32(*v)?), TypeResolved::Sint64 => unknown_fields.add_value(field_num, Self::sint64(*v)?), TypeResolved::Sint32 => unknown_fields.add_value(field_num, Self::sint32(*v)?), TypeResolved::Float => { unknown_fields.add_value(field_num, UnknownValue::float(*v as f32)) } TypeResolved::Double => { unknown_fields.add_value(field_num, UnknownValue::double(*v as f64)) } _ => return Err(error().into()), }, ProtobufConstant::I64(v) => match field_type { TypeResolved::Fixed64 => unknown_fields.add_value(field_num, Self::fixed64(*v)?), TypeResolved::Sfixed64 => unknown_fields.add_value(field_num, Self::sfixed64(*v)?), TypeResolved::Fixed32 => unknown_fields.add_value(field_num, Self::fixed32(*v)?), TypeResolved::Sfixed32 => unknown_fields.add_value(field_num, Self::sfixed32(*v)?), TypeResolved::Int32 => unknown_fields.add_value(field_num, Self::int32(*v)?), TypeResolved::Int64 => unknown_fields.add_value(field_num, Self::int64(*v)?), TypeResolved::Uint64 => unknown_fields.add_value(field_num, Self::uint64(*v)?), TypeResolved::Uint32 => unknown_fields.add_value(field_num, Self::uint32(*v)?), TypeResolved::Sint64 => unknown_fields.add_value(field_num, Self::sint64(*v)?), TypeResolved::Sint32 => unknown_fields.add_value(field_num, Self::sint32(*v)?), TypeResolved::Float => { unknown_fields.add_value(field_num, UnknownValue::float(*v as f32)) } TypeResolved::Double => { unknown_fields.add_value(field_num, UnknownValue::double(*v as f64)) } _ => return Err(error().into()), }, ProtobufConstant::F64(f) => match field_type { TypeResolved::Float => { unknown_fields.add_value(field_num, UnknownValue::float(*f as f32)) } TypeResolved::Double => { unknown_fields.add_value(field_num, UnknownValue::double(*f)) } TypeResolved::Fixed32 => { unknown_fields.add_value(field_num, UnknownValue::Fixed32(*f as u32)) } TypeResolved::Fixed64 => { unknown_fields.add_value(field_num, UnknownValue::Fixed64(*f as u64)) } TypeResolved::Sfixed32 => { unknown_fields.add_value(field_num, UnknownValue::sfixed32(*f as i32)) } TypeResolved::Sfixed64 => { unknown_fields.add_value(field_num, UnknownValue::sfixed64(*f as i64)) } TypeResolved::Sint64 => { unknown_fields.add_value(field_num, UnknownValue::sint64(*f as i64)) } TypeResolved::Sint32 => { unknown_fields.add_value(field_num, UnknownValue::sint32(*f as i32)) } TypeResolved::Int32 | TypeResolved::Int64 => { unknown_fields.add_value(field_num, UnknownValue::int64(*f as i64)) } TypeResolved::Uint32 | TypeResolved::Uint64 => { unknown_fields.add_value(field_num, UnknownValue::Varint(*f as u64)) } _ => return Err(error().into()), }, ProtobufConstant::Bool(b) => match field_type { TypeResolved::Bool => unknown_fields .add_value(field_num, UnknownValue::Varint(if *b { 1 } else { 0 })), _ => return Err(error().into()), }, ProtobufConstant::Ident(ident) => match field_type { TypeResolved::Enum(abs_path) => { let n = self .resolver .find_enum_by_abs_name(abs_path) .map_err(OptionResolverError::OtherError)? .values .iter() .find(|v| v.name == ident.to_string()) .map(|v| v.number) .ok_or_else(|| OptionResolverError::UnknownEnumValue(ident.to_string()))?; unknown_fields.add_value(field_num, UnknownValue::int32(n)); } _ => return Err(error().into()), }, ProtobufConstant::String(s) => match field_type { TypeResolved::String => unknown_fields.add_value( field_num, UnknownValue::LengthDelimited(s.decode_utf8()?.into_bytes()), ), TypeResolved::Bytes => unknown_fields .add_value(field_num, UnknownValue::LengthDelimited(s.decode_bytes()?)), _ => return Err(error().into()), }, ProtobufConstant::Message(message) => self.add_option_value_message_to_unknown_fields( field_type, field_num, unknown_fields, message, &option_name_for_diag, )?, ProtobufConstant::Repeated(list) => { for v in list { self.add_option_value_to_unknown_fields( field_type, field_num, unknown_fields, v, option_name_for_diag, )? } } } Ok(()) } fn custom_option_ext( &self, scope: &ProtobufAbsPathRef, options: &mut M, option_name: &ProtobufOptionNameExt, option_value: &ProtobufConstant, ) -> anyhow::Result<()> where M: MessageFull, { self.custom_option_ext_step( scope, &M::descriptor(), options.mut_unknown_fields(), &option_name.0[0], &option_name.0[1..], option_value, ) } fn fixed32( v: impl TryInto, ) -> anyhow::Result { Ok(UnknownValue::Fixed32(v.try_into()?)) } fn sfixed32( v: impl TryInto, ) -> anyhow::Result { Ok(UnknownValue::sfixed32(v.try_into()?)) } fn fixed64( v: impl TryInto, ) -> anyhow::Result { Ok(UnknownValue::Fixed64(v.try_into()?)) } fn sfixed64( v: impl TryInto, ) -> anyhow::Result { Ok(UnknownValue::sfixed64(v.try_into()?)) } fn int32( v: impl TryInto, ) -> anyhow::Result { Ok(UnknownValue::int32(v.try_into()?)) } fn int64( v: impl TryInto, ) -> anyhow::Result { Ok(UnknownValue::int64(v.try_into()?)) } fn uint32( v: impl TryInto, ) -> anyhow::Result { Ok(UnknownValue::Varint(v.try_into()? as u64)) } fn uint64( v: impl TryInto, ) -> anyhow::Result { Ok(UnknownValue::Varint(v.try_into()?)) } fn sint32( v: impl TryInto, ) -> anyhow::Result { Ok(UnknownValue::sint32(v.try_into()?)) } fn sint64( v: impl TryInto, ) -> anyhow::Result { Ok(UnknownValue::sint64(v.try_into()?)) } fn add_option_value_message_to_unknown_fields( &self, field_type: &TypeResolved, field_num: u32, options: &mut UnknownFields, option_value: &ProtobufConstantMessage, option_name_for_diag: &str, ) -> anyhow::Result<()> { match &field_type { TypeResolved::Message(ma) => { let m = self .resolver .find_message_by_abs_name(ma) .map_err(OptionResolverError::OtherError)? .t; let mut unknown_fields = UnknownFields::new(); for (n, v) in &option_value.fields { match n { ProtobufConstantMessageFieldName::Regular(n) => { let field = m .field_by_name(n.as_str()) .ok_or_else(|| OptionResolverError::UnknownFieldName(n.clone()))?; let field_type = self.resolver.field_type(&ma, n, &field.typ)?; self.add_option_value_to_unknown_fields( &field_type, field.number as u32, &mut unknown_fields, v, option_name_for_diag, ) .map_err(OptionResolverError::OtherError)?; } ProtobufConstantMessageFieldName::Extension(..) => { // TODO: implement extension fields in constants } ProtobufConstantMessageFieldName::AnyTypeUrl(..) => { // TODO: implement any type url in constants } } } options.add_value( field_num, UnknownValue::LengthDelimited(unknown_fields.write_to_bytes()), ); Ok(()) } _ => Err(OptionResolverError::MessageFieldRequiresMessageConstant.into()), } } fn custom_option_builtin( &self, _scope: &ProtobufAbsPathRef, options: &mut M, option: &ProtobufIdent, option_value: &ProtobufConstant, ) -> anyhow::Result<()> where M: MessageFull, { if M::descriptor().full_name() == "google.protobuf.FieldOptions" { if option.get() == "default" || option.get() == "json_name" { // some options are written to non-options message and handled outside return Ok(()); } } match M::descriptor().field_by_name(option.get()) { Some(field) => { if field.is_repeated_or_map() { return Err(OptionResolverError::BuiltinOptionPointsToNonSingularField( M::descriptor().full_name().to_owned(), option.get().to_owned(), ) .into()); } field.set_singular_field( options, option_value.as_type(field.singular_runtime_type())?, ); return Ok(()); } None => { return Err(OptionResolverError::BuiltinOptionNotFound( M::descriptor().full_name().to_owned(), option.get().to_owned(), ) .into()) } } } fn custom_option( &self, scope: &ProtobufAbsPathRef, options: &mut M, option: &model::ProtobufOption, ) -> anyhow::Result<()> where M: MessageFull, { match &option.name { ProtobufOptionName::Builtin(simple) => { self.custom_option_builtin(scope, options, simple, &option.value) } ProtobufOptionName::Ext(e) => self.custom_option_ext(scope, options, e, &option.value), } } fn custom_options( &self, scope: &ProtobufAbsPathRef, input: &[model::ProtobufOption], ) -> anyhow::Result> where M: MessageFull, { if input.is_empty() { // Empty options do not have to represented to unset message field, // but this is what Google's parser does. return Ok(None); } let mut options = M::new(); for option in input { self.custom_option(scope, &mut options, option)?; } Ok(Some(options)) } fn file_options( &self, scope: &ProtobufAbsPath, input: &[model::ProtobufOption], ) -> anyhow::Result> { self.custom_options(scope, input) } fn enum_options( &self, scope: &ProtobufAbsPathRef, input: &[model::ProtobufOption], ) -> anyhow::Result> { self.custom_options(scope, input) } fn enum_value_options( &self, scope: &ProtobufAbsPathRef, input: &[model::ProtobufOption], ) -> anyhow::Result> { self.custom_options(scope, input) } fn field_options( &self, scope: &ProtobufAbsPathRef, input: &[model::ProtobufOption], ) -> anyhow::Result> { self.custom_options(scope, input) } fn message_options( &self, scope: &ProtobufAbsPathRef, input: &[model::ProtobufOption], ) -> anyhow::Result> { self.custom_options(scope, input) } fn oneof_options( &self, scope: &ProtobufAbsPathRef, input: &[model::ProtobufOption], ) -> anyhow::Result> { self.custom_options(scope, input) } fn method( &self, method_proto: &mut MethodDescriptorProto, method_model: &model::Method, ) -> anyhow::Result<()> { method_proto.options = self.service_method_options(&method_model.options)?.into(); Ok(()) } fn service_options( &self, input: &[model::ProtobufOption], ) -> anyhow::Result> { self.custom_options(&self.resolver.current_file.package, input) } fn service_method_options( &self, input: &[model::ProtobufOption], ) -> anyhow::Result> { self.custom_options(&self.resolver.current_file.package, input) } fn service( &self, service_proto: &mut ServiceDescriptorProto, service_model: &WithLoc, ) -> anyhow::Result<()> { service_proto.options = self.service_options(&service_model.options)?.into(); for service_method_model in &service_model.methods { let mut method_proto = service_proto .method .iter_mut() .find(|method| method.name() == service_method_model.name) .unwrap(); self.method(&mut method_proto, service_method_model)?; } Ok(()) } fn enum_value( &self, scope: &ProtobufAbsPathRef, enum_value_proto: &mut EnumValueDescriptorProto, enum_value_model: &model::EnumValue, ) -> anyhow::Result<()> { enum_value_proto.options = self .enum_value_options(scope, &enum_value_model.options)? .into(); Ok(()) } fn enumeration( &self, scope: &ProtobufAbsPathRef, enum_proto: &mut EnumDescriptorProto, enum_model: &WithLoc, ) -> anyhow::Result<()> { enum_proto.options = self.enum_options(scope, &enum_model.options)?.into(); for enum_value_model in &enum_model.values { let mut enum_value_proto = enum_proto .value .iter_mut() .find(|v| v.name() == enum_value_model.name) .unwrap(); self.enum_value(scope, &mut enum_value_proto, enum_value_model)?; } Ok(()) } fn oneof( &self, scope: &ProtobufAbsPathRef, oneof_proto: &mut OneofDescriptorProto, oneof_model: &model::OneOf, ) -> anyhow::Result<()> { oneof_proto.options = self.oneof_options(scope, &oneof_model.options)?.into(); Ok(()) } fn field( &self, scope: &ProtobufAbsPathRef, field_proto: &mut FieldDescriptorProto, field_model: &model::Field, ) -> anyhow::Result<()> { field_proto.options = self.field_options(scope, &field_model.options)?.into(); Ok(()) } fn message( &self, scope: &ProtobufAbsPathRef, message_proto: &mut DescriptorProto, message_model: &WithLoc, ) -> anyhow::Result<()> { message_proto.options = self.message_options(scope, &message_model.options)?.into(); let mut nested_scope = scope.to_owned(); nested_scope.push_simple(ProtobufIdentRef::new(&message_proto.name())); for field_model in &message_model.regular_fields_including_in_oneofs() { let mut field_proto = message_proto .field .iter_mut() .find(|field| field.name() == field_model.name) .unwrap(); self.field(&nested_scope, &mut field_proto, field_model)?; } for field_model in &message_model.extensions { let field_proto = message_proto .extension .iter_mut() .find(|field| field.name() == field_model.field.name) .unwrap(); self.field(&nested_scope, field_proto, &field_model.field)?; } for nested_message_model in &message_model.messages { let nested_message_proto = message_proto .nested_type .iter_mut() .find(|nested_message_proto| { nested_message_proto.name() == nested_message_model.name }) .unwrap(); self.message(&nested_scope, nested_message_proto, nested_message_model)?; } for nested_enum_model in &message_model.enums { let nested_enum_proto = message_proto .enum_type .iter_mut() .find(|nested_enum_proto| nested_enum_proto.name() == nested_enum_model.name) .unwrap(); self.enumeration(&nested_scope, nested_enum_proto, nested_enum_model)?; } for oneof_model in &message_model.oneofs() { let oneof_proto = message_proto .oneof_decl .iter_mut() .find(|oneof_proto| oneof_proto.name() == oneof_model.name) .unwrap(); self.oneof(&nested_scope, oneof_proto, oneof_model)?; } Ok(()) } pub(crate) fn file(&self, output: &mut FileDescriptorProto) -> anyhow::Result<()> { // TODO: use it to resolve messages. let _ = &self.descriptor_without_options; for message_model in &self.resolver.current_file.messages { let message_proto = output .message_type .iter_mut() .find(|m| m.name() == message_model.name) .unwrap(); self.message( &self.resolver.current_file.package, message_proto, message_model, )?; } for enum_model in &self.resolver.current_file.enums { let enum_proto = output .enum_type .iter_mut() .find(|e| e.name() == enum_model.name) .unwrap(); self.enumeration(&self.resolver.current_file.package, enum_proto, enum_model)?; } for service_proto in &mut output.service { let service_model = self .resolver .current_file .services .iter() .find(|s| s.name == service_proto.name()) .unwrap(); self.service(service_proto, service_model)?; } for extension_model in &self.resolver.current_file.extensions { let extension_proto = output .extension .iter_mut() .find(|e| e.name() == extension_model.field.name) .unwrap(); self.field( &self.resolver.current_file.package, extension_proto, &extension_model.field, )?; } output.options = self .file_options( &self.resolver.current_file.package, &self.resolver.current_file.options, )? .into(); Ok(()) } } protobuf-parse-3.7.2/src/pure/convert/type_resolver.rs000064400000000000000000000136271046102023000213100ustar 00000000000000use std::iter; use crate::model; use crate::model::WithLoc; use crate::protobuf_path::ProtobufPath; use crate::pure::convert::WithFullName; use crate::FileDescriptorPair; use crate::ProtobufAbsPath; use crate::ProtobufAbsPathRef; use crate::ProtobufIdent; use crate::ProtobufIdentRef; use crate::ProtobufRelPath; use crate::ProtobufRelPathRef; #[derive(thiserror::Error, Debug)] enum TypeResolverError { #[error("object is not found by path: {0}")] NotFoundByAbsPath(ProtobufAbsPath), #[error("object is not found by path `{0}` in scope `{1}`")] NotFoundByRelPath(ProtobufRelPath, ProtobufAbsPath), } pub(crate) enum MessageOrEnum<'a> { Message(&'a model::Message), Enum(&'a model::Enumeration), } impl MessageOrEnum<'_> { fn _descriptor_type(&self) -> protobuf::descriptor::field_descriptor_proto::Type { match *self { MessageOrEnum::Message(..) => { protobuf::descriptor::field_descriptor_proto::Type::TYPE_MESSAGE } MessageOrEnum::Enum(..) => { protobuf::descriptor::field_descriptor_proto::Type::TYPE_ENUM } } } } #[derive(Clone)] enum LookupScope<'a> { File(&'a model::FileDescriptor), Message(&'a model::Message, ProtobufAbsPath), } impl<'a> LookupScope<'a> { fn current_path(&self) -> ProtobufAbsPath { match self { LookupScope::File(f) => f.package.clone(), LookupScope::Message(_, p) => p.clone(), } } fn messages(&self) -> &'a [model::WithLoc] { match self { &LookupScope::File(file) => &file.messages, &LookupScope::Message(messasge, _) => &messasge.messages, } } fn find_message(&self, simple_name: &ProtobufIdentRef) -> Option<&'a model::Message> { self.messages() .into_iter() .find(|m| m.t.name == simple_name.as_str()) .map(|m| &m.t) } fn enums(&self) -> &'a [WithLoc] { match self { &LookupScope::File(file) => &file.enums, &LookupScope::Message(messasge, _) => &messasge.enums, } } fn members(&self) -> Vec<(ProtobufIdent, MessageOrEnum<'a>)> { let mut r = Vec::new(); r.extend( self.enums() .into_iter() .map(|e| (ProtobufIdent::from(&e.name[..]), MessageOrEnum::Enum(e))), ); r.extend(self.messages().into_iter().map(|m| { ( ProtobufIdent::from(&m.t.name[..]), MessageOrEnum::Message(&m.t), ) })); r } fn find_member(&self, simple_name: &ProtobufIdentRef) -> Option> { self.members() .into_iter() .filter_map(|(member_name, message_or_enum)| { if member_name.as_ref() == simple_name { Some(message_or_enum) } else { None } }) .next() } pub(crate) fn find_message_or_enum( &self, path: &ProtobufRelPathRef, ) -> Option>> { let current_path = self.current_path(); let (first, rem) = match path.split_first_rem() { Some(x) => x, None => return None, }; if rem.is_empty() { match self.find_member(first) { Some(message_or_enum) => { let mut result_path = current_path.clone(); result_path.push_simple(first); Some(WithFullName { full_name: result_path, t: message_or_enum, }) } None => None, } } else { match self.find_message(first) { Some(message) => { let mut message_path = current_path.clone(); message_path.push_simple(ProtobufIdentRef::new(&message.name)); let message_scope = LookupScope::Message(message, message_path); message_scope.find_message_or_enum(rem) } None => None, } } } } pub(crate) struct TypeResolver<'a> { pub(crate) current_file: &'a model::FileDescriptor, pub(crate) deps: &'a [FileDescriptorPair], } impl<'a> TypeResolver<'a> { pub(crate) fn all_files(&self) -> Vec<&'a model::FileDescriptor> { iter::once(self.current_file) .chain(self.deps.iter().map(|p| &p.parsed)) .collect() } pub(crate) fn find_message_or_enum_by_abs_name( &self, absolute_path: &ProtobufAbsPath, ) -> anyhow::Result>> { for file in self.all_files() { if let Some(relative) = absolute_path.remove_prefix(&file.package) { if let Some(w) = LookupScope::File(file).find_message_or_enum(&relative) { return Ok(w); } } } return Err(TypeResolverError::NotFoundByAbsPath(absolute_path.clone()).into()); } pub(crate) fn resolve_message_or_enum( &self, scope: &ProtobufAbsPathRef, name: &ProtobufPath, ) -> anyhow::Result> { match name { ProtobufPath::Abs(name) => Ok(self.find_message_or_enum_by_abs_name(&name)?), ProtobufPath::Rel(name) => { // find message or enum in current package for p in scope.self_and_parents() { let mut fq = p.to_owned(); fq.push_relative(&name); if let Ok(me) = self.find_message_or_enum_by_abs_name(&fq) { return Ok(me); } } Err(TypeResolverError::NotFoundByRelPath(name.clone(), scope.to_owned()).into()) } } } } protobuf-parse-3.7.2/src/pure/mod.rs000064400000000000000000000003771046102023000155030ustar 00000000000000//! Pure rust `.proto` file parser. pub(crate) mod convert; pub(crate) mod model; pub(crate) mod parse_and_typecheck; pub(crate) mod parse_dependencies; mod parser; pub use parse_and_typecheck::parse_and_typecheck_custom; pub use parse_dependencies::*; protobuf-parse-3.7.2/src/pure/model.rs000064400000000000000000000411641046102023000160230ustar 00000000000000//! A nom-based protobuf file parser //! //! This crate can be seen as a rust transcription of the //! [descriptor.proto](https://github.com/google/protobuf/blob/master/src/google/protobuf/descriptor.proto) file use std::fmt; use std::fmt::Write; use std::ops::Deref; use std::ops::RangeInclusive; use indexmap::IndexMap; use protobuf::reflect::ReflectValueBox; use protobuf::reflect::RuntimeType; use protobuf_support::lexer::float::format_protobuf_float; use protobuf_support::lexer::loc::Loc; use protobuf_support::lexer::str_lit::StrLit; use crate::model; use crate::proto_path::ProtoPathBuf; use crate::protobuf_abs_path::ProtobufAbsPath; use crate::protobuf_ident::ProtobufIdent; use crate::protobuf_path::ProtobufPath; use crate::pure::parser::Parser; pub use crate::pure::parser::ParserErrorWithLocation; #[derive(thiserror::Error, Debug)] enum ModelError { #[error("cannot convert value `{1}` to type `{0}`")] InconvertibleValue(RuntimeType, model::ProtobufConstant), } #[derive(Debug, Clone, PartialEq)] pub(crate) struct WithLoc { pub loc: Loc, pub t: T, } impl Deref for WithLoc { type Target = T; fn deref(&self) -> &Self::Target { &self.t } } impl WithLoc { pub fn with_loc(loc: Loc) -> impl FnOnce(T) -> WithLoc { move |t| WithLoc { t, loc: loc.clone(), } } } /// Protobuf syntax. #[derive(Debug, Clone, Copy, Eq, PartialEq)] pub(crate) enum Syntax { /// Protobuf syntax [2](https://developers.google.com/protocol-buffers/docs/proto) (default) Proto2, /// Protobuf syntax [3](https://developers.google.com/protocol-buffers/docs/proto3) Proto3, } impl Default for Syntax { fn default() -> Syntax { Syntax::Proto2 } } /// A field rule #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] pub(crate) enum Rule { /// A well-formed message can have zero or one of this field (but not more than one). Optional, /// This field can be repeated any number of times (including zero) in a well-formed message. /// The order of the repeated values will be preserved. Repeated, /// A well-formed message must have exactly one of this field. Required, } impl Rule { pub(crate) const ALL: [Rule; 3] = [Rule::Optional, Rule::Repeated, Rule::Required]; pub(crate) const fn as_str(&self) -> &'static str { match self { Rule::Optional => "optional", Rule::Repeated => "repeated", Rule::Required => "required", } } } /// Protobuf group #[derive(Debug, Clone, PartialEq)] pub(crate) struct Group { /// Group name pub name: String, pub fields: Vec>, } /// Protobuf supported field types #[derive(Debug, Clone, PartialEq)] pub(crate) enum FieldType { /// Protobuf int32 /// /// # Remarks /// /// Uses variable-length encoding. Inefficient for encoding negative numbers – if /// your field is likely to have negative values, use sint32 instead. Int32, /// Protobuf int64 /// /// # Remarks /// /// Uses variable-length encoding. Inefficient for encoding negative numbers – if /// your field is likely to have negative values, use sint64 instead. Int64, /// Protobuf uint32 /// /// # Remarks /// /// Uses variable-length encoding. Uint32, /// Protobuf uint64 /// /// # Remarks /// /// Uses variable-length encoding. Uint64, /// Protobuf sint32 /// /// # Remarks /// /// Uses ZigZag variable-length encoding. Signed int value. These more efficiently /// encode negative numbers than regular int32s. Sint32, /// Protobuf sint64 /// /// # Remarks /// /// Uses ZigZag variable-length encoding. Signed int value. These more efficiently /// encode negative numbers than regular int32s. Sint64, /// Protobuf bool Bool, /// Protobuf fixed64 /// /// # Remarks /// /// Always eight bytes. More efficient than uint64 if values are often greater than 2^56. Fixed64, /// Protobuf sfixed64 /// /// # Remarks /// /// Always eight bytes. Sfixed64, /// Protobuf double Double, /// Protobuf string /// /// # Remarks /// /// A string must always contain UTF-8 encoded or 7-bit ASCII text. String, /// Protobuf bytes /// /// # Remarks /// /// May contain any arbitrary sequence of bytes. Bytes, /// Protobut fixed32 /// /// # Remarks /// /// Always four bytes. More efficient than uint32 if values are often greater than 2^28. Fixed32, /// Protobut sfixed32 /// /// # Remarks /// /// Always four bytes. Sfixed32, /// Protobut float Float, /// Protobuf message or enum (holds the name) MessageOrEnum(ProtobufPath), /// Protobut map Map(Box<(FieldType, FieldType)>), /// Protobuf group (deprecated) Group(Group), } /// A Protobuf Field #[derive(Debug, Clone, PartialEq)] pub(crate) struct Field { /// Field name pub name: String, /// Field `Rule` pub rule: Option, /// Field type pub typ: FieldType, /// Tag number pub number: i32, /// Non-builtin options pub options: Vec, } /// A Protobuf field of oneof group #[derive(Debug, Clone, PartialEq)] pub(crate) enum FieldOrOneOf { Field(WithLoc), OneOf(OneOf), } /// A protobuf message #[derive(Debug, Clone, Default)] pub(crate) struct Message { /// Message name pub name: String, /// Message fields and oneofs pub fields: Vec>, /// Message reserved numbers pub reserved_nums: Vec>, /// Message reserved names pub reserved_names: Vec, /// Nested messages pub messages: Vec>, /// Nested enums pub enums: Vec>, /// Non-builtin options pub options: Vec, /// Extension field numbers pub extension_ranges: Vec>, /// Extensions pub extensions: Vec>, } impl Message { pub fn regular_fields_including_in_oneofs(&self) -> Vec<&WithLoc> { self.fields .iter() .flat_map(|fo| match &fo.t { FieldOrOneOf::Field(f) => vec![f], FieldOrOneOf::OneOf(o) => o.fields.iter().collect(), }) .collect() } /** Find a field by name. */ pub fn field_by_name(&self, name: &str) -> Option<&Field> { self.regular_fields_including_in_oneofs() .iter() .find(|f| f.t.name == name) .map(|f| &f.t) } pub fn _nested_extensions(&self) -> Vec<&Group> { self.regular_fields_including_in_oneofs() .into_iter() .flat_map(|f| match &f.t.typ { FieldType::Group(g) => Some(g), _ => None, }) .collect() } #[cfg(test)] pub fn regular_fields_for_test(&self) -> Vec<&Field> { self.fields .iter() .flat_map(|fo| match &fo.t { FieldOrOneOf::Field(f) => Some(&f.t), FieldOrOneOf::OneOf(_) => None, }) .collect() } pub(crate) fn oneofs(&self) -> Vec<&OneOf> { self.fields .iter() .flat_map(|fo| match &fo.t { FieldOrOneOf::Field(_) => None, FieldOrOneOf::OneOf(o) => Some(o), }) .collect() } } /// A protobuf enumeration field #[derive(Debug, Clone)] pub(crate) struct EnumValue { /// enum value name pub name: String, /// enum value number pub number: i32, /// enum value options pub options: Vec, } /// A protobuf enumerator #[derive(Debug, Clone)] pub(crate) struct Enumeration { /// enum name pub name: String, /// enum values pub values: Vec, /// enum options pub options: Vec, /// enum reserved numbers pub reserved_nums: Vec>, /// enum reserved names pub reserved_names: Vec, } /// A OneOf #[derive(Debug, Clone, Default, PartialEq)] pub(crate) struct OneOf { /// OneOf name pub name: String, /// OneOf fields pub fields: Vec>, /// oneof options pub options: Vec, } #[derive(Debug, Clone)] pub(crate) struct Extension { /// Extend this type with field pub extendee: ProtobufPath, /// Extension field pub field: WithLoc, } /// Service method #[derive(Debug, Clone)] pub(crate) struct Method { /// Method name pub name: String, /// Input type pub input_type: ProtobufPath, /// Output type pub output_type: ProtobufPath, /// If this method is client streaming #[allow(dead_code)] // TODO pub client_streaming: bool, /// If this method is server streaming #[allow(dead_code)] // TODO pub server_streaming: bool, /// Method options pub options: Vec, } /// Service definition #[derive(Debug, Clone)] pub(crate) struct Service { /// Service name pub name: String, pub methods: Vec, pub options: Vec, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub(crate) struct AnyTypeUrl { pub(crate) prefix: String, pub(crate) full_type_name: ProtobufPath, } impl fmt::Display for AnyTypeUrl { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}/{}", self.prefix, self.full_type_name) } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub(crate) enum ProtobufConstantMessageFieldName { Regular(String), Extension(ProtobufPath), AnyTypeUrl(AnyTypeUrl), } impl fmt::Display for ProtobufConstantMessageFieldName { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { ProtobufConstantMessageFieldName::Regular(s) => write!(f, "{}", s), ProtobufConstantMessageFieldName::Extension(p) => write!(f, "[{}]", p), ProtobufConstantMessageFieldName::AnyTypeUrl(a) => write!(f, "[{}]", a), } } } #[derive(Debug, Clone, PartialEq, Default)] pub(crate) struct ProtobufConstantMessage { pub(crate) fields: IndexMap, } /// constant = fullIdent | /// ( [ "-" | "+" ] intLit ) | /// ( [ "-" | "+" ] floatLit ) | /// strLit | /// boolLit | /// messageValue /// /// https://protobuf.dev/reference/protobuf/proto2-spec/#constant /// https://protobuf.dev/reference/protobuf/proto3-spec/#constant /// https://protobuf.dev/reference/protobuf/textformat-spec/#fields #[derive(Debug, Clone, PartialEq)] pub(crate) enum ProtobufConstant { U64(u64), I64(i64), F64(f64), // TODO: eq Bool(bool), Ident(ProtobufPath), String(StrLit), Message(ProtobufConstantMessage), Repeated(Vec), } impl fmt::Display for ProtobufConstant { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { ProtobufConstant::U64(v) => write!(f, "{}", v), ProtobufConstant::I64(v) => write!(f, "{}", v), ProtobufConstant::F64(v) => write!(f, "{}", format_protobuf_float(*v)), ProtobufConstant::Bool(v) => write!(f, "{}", v), ProtobufConstant::Ident(v) => write!(f, "{}", v), ProtobufConstant::String(v) => write!(f, "{}", v), // TODO: text format explicitly ProtobufConstant::Message(v) => write!(f, "{:?}", v), ProtobufConstant::Repeated(v) => write!(f, "{:?}", v), } } } impl ProtobufConstantMessage { pub fn format(&self) -> String { let mut s = String::new(); write!(s, "{{ ").unwrap(); for (n, v) in &self.fields { match v { ProtobufConstant::Message(m) => write!(s, "{} {}", n, m.format()).unwrap(), v => write!(s, "{}: {} ", n, v.format()).unwrap(), } } write!(s, "}}").unwrap(); s } } impl ProtobufConstant { pub fn format(&self) -> String { match *self { ProtobufConstant::U64(u) => u.to_string(), ProtobufConstant::I64(i) => i.to_string(), ProtobufConstant::F64(f) => format_protobuf_float(f), ProtobufConstant::Bool(b) => b.to_string(), ProtobufConstant::Ident(ref i) => format!("{}", i), ProtobufConstant::String(ref s) => s.quoted(), ProtobufConstant::Message(ref s) => s.format(), ProtobufConstant::Repeated(ref l) => { let mut s = String::from("["); let mut it = l.iter().peekable(); while let Some(constant) = it.next() { s.push_str(&constant.format()); if it.peek().is_some() { s.push(','); } } s.push(']'); s } } } /** Interpret .proto constant as an reflection value. */ pub fn as_type(&self, ty: RuntimeType) -> anyhow::Result { match (self, &ty) { (ProtobufConstant::Ident(ident), RuntimeType::Enum(e)) => { if let Some(v) = e.value_by_name(&ident.to_string()) { return Ok(ReflectValueBox::Enum(e.clone(), v.value())); } } (ProtobufConstant::Bool(b), RuntimeType::Bool) => return Ok(ReflectValueBox::Bool(*b)), (ProtobufConstant::String(lit), RuntimeType::String) => { return Ok(ReflectValueBox::String(lit.decode_utf8()?)) } _ => {} } Err(ModelError::InconvertibleValue(ty.clone(), self.clone()).into()) } } /// Equivalent of `UninterpretedOption.NamePart`. #[derive(Debug, Clone, PartialEq)] pub(crate) enum ProtobufOptionNamePart { Direct(ProtobufIdent), Ext(ProtobufPath), } impl fmt::Display for ProtobufOptionNamePart { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { ProtobufOptionNamePart::Direct(n) => write!(f, "{}", n), ProtobufOptionNamePart::Ext(n) => write!(f, "({})", n), } } } #[derive(Debug, Clone, PartialEq)] pub(crate) struct ProtobufOptionNameExt(pub Vec); #[derive(Debug, Clone, PartialEq)] pub(crate) enum ProtobufOptionName { Builtin(ProtobufIdent), Ext(ProtobufOptionNameExt), } impl ProtobufOptionName { pub fn simple(name: &str) -> ProtobufOptionName { ProtobufOptionName::Builtin(ProtobufIdent::new(name)) } } impl fmt::Display for ProtobufOptionNameExt { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { for (index, comp) in self.0.iter().enumerate() { if index != 0 { write!(f, ".")?; } write!(f, "{}", comp)?; } Ok(()) } } impl fmt::Display for ProtobufOptionName { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { ProtobufOptionName::Builtin(n) => write!(f, "{}", n), ProtobufOptionName::Ext(n) => write!(f, "{}", n), } } } #[derive(Debug, Clone, PartialEq)] pub(crate) struct ProtobufOption { pub name: ProtobufOptionName, pub value: ProtobufConstant, } /// Visibility of import statement #[derive(Debug, Clone, Eq, PartialEq)] pub(crate) enum ImportVis { Default, Public, Weak, } impl Default for ImportVis { fn default() -> Self { ImportVis::Default } } /// Import statement #[derive(Debug, Default, Clone)] pub(crate) struct Import { pub path: ProtoPathBuf, pub vis: ImportVis, } /// A File descriptor representing a whole .proto file #[derive(Debug, Default, Clone)] pub(crate) struct FileDescriptor { /// Imports pub imports: Vec, /// Package pub package: ProtobufAbsPath, /// Protobuf Syntax pub syntax: Syntax, /// Top level messages pub messages: Vec>, /// Enums pub enums: Vec>, /// Extensions pub extensions: Vec>, /// Services pub services: Vec>, /// Non-builtin options pub options: Vec, } impl FileDescriptor { /// Parses a .proto file content into a `FileDescriptor` pub fn parse>(file: S) -> Result { let mut parser = Parser::new(file.as_ref()); match parser.next_proto() { Ok(r) => Ok(r), Err(error) => { let Loc { line, col } = parser.tokenizer.loc(); Err(ParserErrorWithLocation { error, line, col }) } } } } protobuf-parse-3.7.2/src/pure/parse_and_typecheck.rs000064400000000000000000000254421046102023000207170ustar 00000000000000use std::fmt; use std::fs; use std::io; use std::path::Path; use std::path::PathBuf; use std::str; use indexmap::IndexMap; use protobuf::descriptor::FileDescriptorProto; use protobuf::reflect::FileDescriptor; use crate::parse_and_typecheck::ParsedAndTypechecked; use crate::proto; use crate::proto_path::ProtoPath; use crate::proto_path::ProtoPathBuf; use crate::pure::convert; use crate::pure::model; use crate::FileDescriptorPair; use crate::Parser; #[derive(Debug, thiserror::Error)] enum ParseAndTypeckError { #[error("file `{0}` content is not UTF-8")] FileContentIsNotUtf8(String), #[error("protobuf path `{0}` is not found in import path {1}")] FileNotFoundInImportPath(String, String), #[error("file `{0}` must reside in include path {1}")] FileMustResideInImportPath(String, String), #[error("could not read file `{0}`: {1}")] CouldNotReadFile(String, io::Error), } #[derive(Debug, thiserror::Error)] #[error("error in `{file}`: {error}")] struct WithFileError { file: String, #[source] error: anyhow::Error, } /// Resolve `.proto` files. `Display` is used for error messages. pub trait ProtoPathResolver: fmt::Display { /// Resolve a `.proto` file. /// /// Return `None` if a path is unknown, and if a path is a built-in protobuf file, /// like `google/protobuf/descriptor.proto`, it will be handled by the library. fn resolve(&self, path: &ProtoPath) -> anyhow::Result>; } struct Run where R: ProtoPathResolver, { parsed_files: IndexMap, resolver: R, } impl Run where R: ProtoPathResolver, { fn file_and_all_deps_already_parsed( &self, protobuf_path: &ProtoPath, result: &mut IndexMap, ) { if let Some(_) = result.get(protobuf_path) { return; } let pair = self .parsed_files .get(protobuf_path) .expect("must be already parsed"); result.insert(protobuf_path.to_proto_path_buf(), pair.clone()); self.all_deps_already_parsed(&pair.parsed, result); } fn all_deps_already_parsed( &self, parsed: &model::FileDescriptor, result: &mut IndexMap, ) { for import in &parsed.imports { self.file_and_all_deps_already_parsed(&import.path, result); } } fn add_file_content( &mut self, protobuf_path: &ProtoPath, resolved: &ResolvedProtoFile, ) -> anyhow::Result<()> { let content = str::from_utf8(&resolved.content) .map_err(|_| ParseAndTypeckError::FileContentIsNotUtf8(protobuf_path.to_string()))?; let parsed = model::FileDescriptor::parse(&content).map_err(|e| WithFileError { file: resolved.path.clone(), error: e.into(), })?; for import in &parsed.imports { self.add_imported_file(&import.path)?; } let mut this_file_deps = IndexMap::new(); self.all_deps_already_parsed(&parsed, &mut this_file_deps); let this_file_deps: Vec<_> = this_file_deps.into_iter().map(|(_, v)| v).collect(); let descriptor_proto = convert::file_descriptor(protobuf_path, &parsed, &this_file_deps) .map_err(|e| WithFileError { file: resolved.path.clone(), error: e.into(), })?; let deps: Vec = self .parsed_files .values() .map(|v| v.descriptor.clone()) .collect(); let descriptor = FileDescriptor::new_dynamic(descriptor_proto.clone(), &deps)?; self.parsed_files.insert( protobuf_path.to_proto_path_buf(), FileDescriptorPair { parsed, descriptor_proto, descriptor, }, ); Ok(()) } fn add_imported_file(&mut self, protobuf_path: &ProtoPath) -> anyhow::Result<()> { if let Some(_) = self.parsed_files.get(protobuf_path) { return Ok(()); } let resolved = self.resolver.resolve(protobuf_path)?; if let Some(resolved) = resolved { return self.add_file_content(protobuf_path, &resolved); } let embedded = match protobuf_path.to_str() { "rustproto.proto" => Some(proto::RUSTPROTO_PROTO), "google/protobuf/any.proto" => Some(proto::ANY_PROTO), "google/protobuf/api.proto" => Some(proto::API_PROTO), "google/protobuf/descriptor.proto" => Some(proto::DESCRIPTOR_PROTO), "google/protobuf/duration.proto" => Some(proto::DURATION_PROTO), "google/protobuf/empty.proto" => Some(proto::EMPTY_PROTO), "google/protobuf/field_mask.proto" => Some(proto::FIELD_MASK_PROTO), "google/protobuf/source_context.proto" => Some(proto::SOURCE_CONTEXT_PROTO), "google/protobuf/struct.proto" => Some(proto::STRUCT_PROTO), "google/protobuf/timestamp.proto" => Some(proto::TIMESTAMP_PROTO), "google/protobuf/type.proto" => Some(proto::TYPE_PROTO), "google/protobuf/wrappers.proto" => Some(proto::WRAPPERS_PROTO), _ => None, }; match embedded { Some(content) => self.add_file_content( protobuf_path, &ResolvedProtoFile { path: protobuf_path.to_string(), content: content.as_bytes().to_vec(), }, ), None => Err(ParseAndTypeckError::FileNotFoundInImportPath( protobuf_path.to_string(), format!("{}", self.resolver), ) .into()), } } } pub(crate) fn path_to_proto_path( path: &Path, includes: &[PathBuf], ) -> anyhow::Result { for include in includes { if include == Path::new(".") && path.is_relative() { // Special handling of `.` to allow using `.` as an include path // and `foo.proto` as input. return ProtoPathBuf::from_path(path); } match path.strip_prefix(include) { Ok(stripped) => return ProtoPathBuf::from_path(stripped), Err(_) => continue, } } Err(ParseAndTypeckError::FileMustResideInImportPath( path.display().to_string(), format!("{:?}", includes), ) .into()) } /// `.proto` file result provided from the [`ProtoPathResolver`]. pub struct ResolvedProtoFile { /// For error reporting. pub path: String, /// File content. pub content: Vec, } fn fs_resolver(includes: &[PathBuf]) -> impl ProtoPathResolver { struct Impl { includes: Vec, } impl fmt::Display for Impl { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{:?}", self.includes) } } impl ProtoPathResolver for Impl { fn resolve(&self, proto_path: &ProtoPath) -> anyhow::Result> { for include_dir in &self.includes { let fs_path = include_dir.join(proto_path.to_path()); match fs::read_to_string(&fs_path) { Ok(content) => { return Ok(Some(ResolvedProtoFile { path: fs_path.display().to_string(), content: content.into_bytes(), })) } Err(e) if e.kind() == io::ErrorKind::NotFound => continue, Err(e) => { return Err(ParseAndTypeckError::CouldNotReadFile( fs_path.display().to_string(), e, ) .into()) } } } Ok(None) } } Impl { includes: includes.to_vec(), } } /// Parse `.proto` files using pure Rust implementation. pub fn parse_and_typecheck(parser: &Parser) -> anyhow::Result { let mut run = Run { parsed_files: IndexMap::new(), resolver: fs_resolver(&parser.includes), }; let relative_paths = parser .inputs .iter() .map(|input| Ok((path_to_proto_path(input, &parser.includes)?, input))) .collect::>>()?; for (proto_path, path) in &relative_paths { let content = fs::read_to_string(path) .map_err(|e| ParseAndTypeckError::CouldNotReadFile(path.display().to_string(), e))?; run.add_file_content( proto_path, &ResolvedProtoFile { path: path.display().to_string(), content: content.into_bytes(), }, )?; } let file_descriptors: Vec<_> = run .parsed_files .into_iter() .map(|(_, v)| v.descriptor_proto) .collect(); Ok(ParsedAndTypechecked { relative_paths: relative_paths.into_iter().map(|(p, _)| p).collect(), file_descriptors, parser: "pure".to_owned(), }) } /// TODO: this API is to be refactored. pub fn parse_and_typecheck_custom( input: &[ProtoPathBuf], resolver: impl ProtoPathResolver, ) -> anyhow::Result> { let mut run = Run { parsed_files: IndexMap::new(), resolver, }; for proto_path in input { run.add_imported_file(proto_path)?; } Ok(run .parsed_files .into_iter() .map(|(_, v)| v.descriptor_proto) .collect()) } #[cfg(test)] mod test { use std::fmt; use crate::proto_path::ProtoPath; use crate::pure::parse_and_typecheck::ProtoPathResolver; use crate::pure::parse_and_typecheck::ResolvedProtoFile; use crate::ProtoPathBuf; #[test] fn parse_and_typecheck_custom() { struct ResolverImpl; impl fmt::Display for ResolverImpl { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "ResolverImpl") } } impl ProtoPathResolver for ResolverImpl { fn resolve(&self, proto_path: &ProtoPath) -> anyhow::Result> { if proto_path == "xx.proto" { Ok(Some(ResolvedProtoFile { path: "xx.proto".to_string(), content: "syntax = 'proto3'; message Foo {}".as_bytes().to_vec(), })) } else { Ok(None) } } } let resolved = super::parse_and_typecheck_custom( &[ProtoPathBuf::new("xx.proto".to_owned()).unwrap()], ResolverImpl, ) .unwrap(); assert_eq!(1, resolved.len()); assert_eq!("Foo", resolved[0].message_type[0].name()); } } protobuf-parse-3.7.2/src/pure/parse_dependencies.rs000064400000000000000000000021721046102023000205370ustar 00000000000000use protobuf::descriptor::FileDescriptorProto; use crate::pure::convert::populate_dependencies; use crate::pure::model; use crate::pure::parser::ParserErrorWithLocation; /// Parse imports from a `.proto` file. /// /// The result is [`FileDescriptorProto`] object with only `*dependency` fields filled. pub fn parse_dependencies(content: &str) -> Result { let input = model::FileDescriptor::parse(content)?; let mut output = FileDescriptorProto::new(); populate_dependencies(&input, &mut output); Ok(output) } #[cfg(test)] mod test { #[test] fn parse_dependencies() { let deps = crate::pure::parse_dependencies::parse_dependencies( r" syntax = 'proto3'; import 'google/protobuf/field_mask.proto'; import public 'google/protobuf/struct.proto'; message IgnoreMe {} ", ) .unwrap(); assert_eq!( &[ "google/protobuf/field_mask.proto", "google/protobuf/struct.proto", ], &deps.dependency[..] ); assert_eq!(&[1], &deps.public_dependency[..]); } } protobuf-parse-3.7.2/src/pure/parser.rs000064400000000000000000001533411046102023000162200ustar 00000000000000use std::ops::RangeInclusive; use std::str; use protobuf_support::lexer::int; use protobuf_support::lexer::lexer_impl::LexerError; use protobuf_support::lexer::num_lit::NumLit; use protobuf_support::lexer::parser_language::ParserLanguage; use protobuf_support::lexer::str_lit::StrLitDecodeError; use protobuf_support::lexer::token::Token; use protobuf_support::lexer::tokenizer::Tokenizer; use protobuf_support::lexer::tokenizer::TokenizerError; use crate::model::AnyTypeUrl; use crate::model::ProtobufConstantMessageFieldName; use crate::proto_path::ProtoPathBuf; use crate::protobuf_abs_path::ProtobufAbsPath; use crate::protobuf_ident::ProtobufIdent; use crate::protobuf_path::ProtobufPath; use crate::protobuf_rel_path::ProtobufRelPath; use crate::pure::model; use crate::pure::model::EnumValue; use crate::pure::model::Enumeration; use crate::pure::model::Extension; use crate::pure::model::Field; use crate::pure::model::FieldOrOneOf; use crate::pure::model::FieldType; use crate::pure::model::FileDescriptor; use crate::pure::model::Group; use crate::pure::model::ImportVis; use crate::pure::model::Message; use crate::pure::model::Method; use crate::pure::model::OneOf; use crate::pure::model::ProtobufConstant; use crate::pure::model::ProtobufConstantMessage; use crate::pure::model::ProtobufOption; use crate::pure::model::ProtobufOptionName; use crate::pure::model::ProtobufOptionNameExt; use crate::pure::model::ProtobufOptionNamePart; use crate::pure::model::Rule; use crate::pure::model::Service; use crate::pure::model::Syntax; use crate::pure::model::WithLoc; /// Basic information about parsing error. #[derive(Debug, thiserror::Error)] pub(crate) enum ParserError { #[error("{0}")] TokenizerError(#[source] TokenizerError), // TODO #[error("incorrect input")] IncorrectInput, #[error("expecting a constant")] ExpectConstant, #[error("unknown syntax")] UnknownSyntax, #[error("integer overflow")] IntegerOverflow, #[error("label not allowed")] LabelNotAllowed, #[error("label required")] LabelRequired, #[error("group name should start with upper case")] GroupNameShouldStartWithUpperCase, #[error("map field not allowed")] MapFieldNotAllowed, #[error("string literal decode error: {0}")] StrLitDecodeError(#[source] StrLitDecodeError), #[error("lexer error: {0}")] LexerError(#[source] LexerError), #[error("oneof in group")] OneOfInGroup, #[error("oneof in oneof")] OneOfInOneOf, #[error("oneof in extend")] OneOfInExtend, } impl From for ParserError { fn from(e: TokenizerError) -> Self { ParserError::TokenizerError(e) } } impl From for ParserError { fn from(e: StrLitDecodeError) -> Self { ParserError::StrLitDecodeError(e) } } impl From for ParserError { fn from(e: LexerError) -> Self { ParserError::LexerError(e) } } impl From for ParserError { fn from(_: int::Overflow) -> Self { ParserError::IntegerOverflow } } #[derive(Debug, thiserror::Error)] #[error("at {line}:{col}: {error}")] pub struct ParserErrorWithLocation { #[source] pub error: anyhow::Error, /// 1-based pub line: u32, /// 1-based pub col: u32, } trait ToI32 { fn to_i32(&self) -> anyhow::Result; } trait ToI64 { fn to_i64(&self) -> anyhow::Result; } impl ToI32 for u64 { fn to_i32(&self) -> anyhow::Result { if *self <= i32::max_value() as u64 { Ok(*self as i32) } else { Err(ParserError::IntegerOverflow.into()) } } } impl ToI32 for i64 { fn to_i32(&self) -> anyhow::Result { if *self <= i32::max_value() as i64 && *self >= i32::min_value() as i64 { Ok(*self as i32) } else { Err(ParserError::IntegerOverflow.into()) } } } impl ToI64 for u64 { fn to_i64(&self) -> anyhow::Result { if *self <= i64::max_value() as u64 { Ok(*self as i64) } else { Err(ParserError::IntegerOverflow.into()) } } } #[derive(Clone)] pub(crate) struct Parser<'a> { pub tokenizer: Tokenizer<'a>, syntax: Syntax, } #[derive(Copy, Clone)] enum MessageBodyParseMode { MessageProto2, MessageProto3, Oneof, ExtendProto2, ExtendProto3, } impl MessageBodyParseMode { fn label_allowed(&self, label: Rule) -> bool { match label { Rule::Repeated => match *self { MessageBodyParseMode::MessageProto2 | MessageBodyParseMode::MessageProto3 | MessageBodyParseMode::ExtendProto2 | MessageBodyParseMode::ExtendProto3 => true, MessageBodyParseMode::Oneof => false, }, Rule::Optional => match *self { MessageBodyParseMode::MessageProto2 | MessageBodyParseMode::ExtendProto2 => true, MessageBodyParseMode::MessageProto3 | MessageBodyParseMode::ExtendProto3 => true, MessageBodyParseMode::Oneof => false, }, Rule::Required => match *self { MessageBodyParseMode::MessageProto2 | MessageBodyParseMode::ExtendProto2 => true, MessageBodyParseMode::MessageProto3 | MessageBodyParseMode::ExtendProto3 => false, MessageBodyParseMode::Oneof => false, }, } } fn some_label_required(&self) -> bool { match *self { MessageBodyParseMode::MessageProto2 | MessageBodyParseMode::ExtendProto2 => true, MessageBodyParseMode::MessageProto3 | MessageBodyParseMode::ExtendProto3 | MessageBodyParseMode::Oneof => false, } } fn map_allowed(&self) -> bool { match *self { MessageBodyParseMode::MessageProto2 | MessageBodyParseMode::MessageProto3 | MessageBodyParseMode::ExtendProto2 | MessageBodyParseMode::ExtendProto3 => true, MessageBodyParseMode::Oneof => false, } } fn is_most_non_fields_allowed(&self) -> bool { match *self { MessageBodyParseMode::MessageProto2 | MessageBodyParseMode::MessageProto3 => true, MessageBodyParseMode::ExtendProto2 | MessageBodyParseMode::ExtendProto3 | MessageBodyParseMode::Oneof => false, } } fn is_option_allowed(&self) -> bool { match *self { MessageBodyParseMode::MessageProto2 | MessageBodyParseMode::MessageProto3 | MessageBodyParseMode::Oneof => true, MessageBodyParseMode::ExtendProto2 | MessageBodyParseMode::ExtendProto3 => false, } } fn is_extensions_allowed(&self) -> bool { match self { MessageBodyParseMode::MessageProto2 => true, _ => false, } } } #[derive(Default)] pub(crate) struct MessageBody { pub fields: Vec>, pub reserved_nums: Vec>, pub reserved_names: Vec, pub messages: Vec>, pub enums: Vec>, pub options: Vec, pub extension_ranges: Vec>, pub extensions: Vec>, } trait NumLitEx { fn to_option_value(&self, sign_is_plus: bool) -> anyhow::Result; } impl NumLitEx for NumLit { fn to_option_value(&self, sign_is_plus: bool) -> anyhow::Result { Ok(match (*self, sign_is_plus) { (NumLit::U64(u), true) => ProtobufConstant::U64(u), (NumLit::F64(f), true) => ProtobufConstant::F64(f), (NumLit::U64(u), false) => { ProtobufConstant::I64(int::neg(u).map_err(|_| ParserError::IntegerOverflow)?) } (NumLit::F64(f), false) => ProtobufConstant::F64(-f), }) } } impl<'a> Parser<'a> { pub(crate) fn new(input: &'a str) -> Parser<'a> { Parser { tokenizer: Tokenizer::new(input, ParserLanguage::Proto), syntax: Syntax::Proto2, } } // Protobuf grammar // fullIdent = ident { "." ident } fn next_full_ident(&mut self) -> anyhow::Result { let mut full_ident = String::new(); // https://github.com/google/protobuf/issues/4563 if self.tokenizer.next_symbol_if_eq('.')? { full_ident.push('.'); } full_ident.push_str(&self.tokenizer.next_ident()?); while self.tokenizer.next_symbol_if_eq('.')? { full_ident.push('.'); full_ident.push_str(&self.tokenizer.next_ident()?); } Ok(ProtobufPath::new(full_ident)) } // fullIdent = ident { "." ident } fn next_full_ident_rel(&mut self) -> anyhow::Result { let mut full_ident = String::new(); full_ident.push_str(&self.tokenizer.next_ident()?); while self.tokenizer.next_symbol_if_eq('.')? { full_ident.push('.'); full_ident.push_str(&self.tokenizer.next_ident()?); } Ok(ProtobufRelPath::new(full_ident)) } // emptyStatement = ";" fn next_empty_statement_opt(&mut self) -> anyhow::Result> { if self.tokenizer.next_symbol_if_eq(';')? { Ok(Some(())) } else { Ok(None) } } // messageName = ident // enumName = ident // messageType = [ "." ] { ident "." } messageName // enumType = [ "." ] { ident "." } enumName fn next_message_or_enum_type(&mut self) -> anyhow::Result { self.next_full_ident() } // groupName = capitalLetter { letter | decimalDigit | "_" } fn next_group_name(&mut self) -> anyhow::Result { // lexer cannot distinguish between group name and other ident let mut clone = self.clone(); let ident = clone.tokenizer.next_ident()?; if !ident.chars().next().unwrap().is_ascii_uppercase() { return Err(ParserError::GroupNameShouldStartWithUpperCase.into()); } *self = clone; Ok(ident) } // Boolean // boolLit = "true" | "false" fn next_bool_lit_opt(&mut self) -> anyhow::Result> { Ok(if self.tokenizer.next_ident_if_eq("true")? { Some(true) } else if self.tokenizer.next_ident_if_eq("false")? { Some(false) } else { None }) } // Constant fn next_num_lit(&mut self) -> anyhow::Result { self.tokenizer .next_token_check_map(|token| Ok(token.to_num_lit()?)) } fn next_message_constant_field_name( &mut self, ) -> anyhow::Result { if self.tokenizer.next_symbol_if_eq('[')? { let n = self.next_full_ident()?; if self.tokenizer.next_symbol_if_eq('/')? { let prefix = format!("{}", n); let full_type_name = self.next_full_ident()?; self.tokenizer .next_symbol_expect_eq(']', "message constant")?; Ok(ProtobufConstantMessageFieldName::AnyTypeUrl(AnyTypeUrl { prefix, full_type_name, })) } else { self.tokenizer .next_symbol_expect_eq(']', "message constant")?; Ok(ProtobufConstantMessageFieldName::Extension(n)) } } else { let n = self.tokenizer.next_ident()?; Ok(ProtobufConstantMessageFieldName::Regular(n)) } } fn next_message_constant(&mut self) -> anyhow::Result { let mut r = ProtobufConstantMessage::default(); self.tokenizer .next_symbol_expect_eq('{', "message constant")?; while !self.tokenizer.lookahead_is_symbol('}')? { let n = self.next_message_constant_field_name()?; let v = self.next_field_value()?; // Consume the comma or semicolon if present. Commas and semicolons // between message fields are optional, all these are valid: // // {foo: 1,bar: 2,baz: 3,} // {foo: 1;bar: 2;baz: 3;} // {foo: 1 bar: 2 baz: 3} // {foo: 1,bar: 2;baz: 3} // {foo: 1,bar: 2 baz: 3} // self.tokenizer.next_symbol_if_in(&[',', ';'])?; r.fields.insert(n, v); } self.tokenizer .next_symbol_expect_eq('}', "message constant")?; Ok(r) } fn next_list_constant(&mut self) -> anyhow::Result> { self.tokenizer.next_symbol_expect_eq('[', "list constant")?; let mut list = Vec::new(); // The list may be empty. if self.tokenizer.next_symbol_if_eq(']')? { return Ok(list); } list.push(self.next_constant()?); while self.tokenizer.next_symbol_if_eq(',')? { list.push(self.next_constant()?); } self.tokenizer.next_symbol_expect_eq(']', "list constant")?; Ok(list) } // constant = fullIdent | ( [ "-" | "+" ] intLit ) | ( [ "-" | "+" ] floatLit ) | // strLit | boolLit | MessageValue fn next_constant(&mut self) -> anyhow::Result { // https://github.com/google/protobuf/blob/a21f225824e994ebd35e8447382ea4e0cd165b3c/src/google/protobuf/unittest_custom_options.proto#L350 if self.tokenizer.lookahead_is_symbol('{')? { return Ok(ProtobufConstant::Message(self.next_message_constant()?)); } if self.tokenizer.lookahead_is_symbol('[')? { return Ok(ProtobufConstant::Repeated(self.next_list_constant()?)); } if let Some(b) = self.next_bool_lit_opt()? { return Ok(ProtobufConstant::Bool(b)); } if let &Token::Symbol(c) = self.tokenizer.lookahead_some()? { if c == '+' || c == '-' { self.tokenizer.advance()?; let sign = c == '+'; return Ok(self.next_num_lit()?.to_option_value(sign)?); } } if let Some(r) = self.tokenizer.next_token_if_map(|token| match token { &Token::StrLit(ref s) => Some(ProtobufConstant::String(s.clone())), _ => None, })? { return Ok(r); } match self.tokenizer.lookahead_some()? { &Token::IntLit(..) | &Token::FloatLit(..) => { return self.next_num_lit()?.to_option_value(true); } &Token::Ident(..) => { return Ok(ProtobufConstant::Ident(self.next_full_ident()?)); } _ => {} } Err(ParserError::ExpectConstant.into()) } fn next_field_value(&mut self) -> anyhow::Result { if self.tokenizer.next_symbol_if_eq(':')? { // Colon is optional when reading message constant. self.next_constant() } else { Ok(ProtobufConstant::Message(self.next_message_constant()?)) } } fn next_int_lit(&mut self) -> anyhow::Result { self.tokenizer.next_token_check_map(|token| match token { &Token::IntLit(i) => Ok(i), _ => Err(ParserError::IncorrectInput.into()), }) } // Syntax // syntax = "syntax" "=" quote "proto2" quote ";" // syntax = "syntax" "=" quote "proto3" quote ";" fn next_syntax(&mut self) -> anyhow::Result> { if self.tokenizer.next_ident_if_eq("syntax")? { self.tokenizer.next_symbol_expect_eq('=', "syntax")?; let syntax_str = self.tokenizer.next_str_lit()?.decode_utf8()?; let syntax = if syntax_str == "proto2" { Syntax::Proto2 } else if syntax_str == "proto3" { Syntax::Proto3 } else { return Err(ParserError::UnknownSyntax.into()); }; self.tokenizer.next_symbol_expect_eq(';', "syntax")?; Ok(Some(syntax)) } else { Ok(None) } } // Import Statement // import = "import" [ "weak" | "public" ] strLit ";" fn next_import_opt(&mut self) -> anyhow::Result> { if self.tokenizer.next_ident_if_eq("import")? { let vis = if self.tokenizer.next_ident_if_eq("weak")? { ImportVis::Weak } else if self.tokenizer.next_ident_if_eq("public")? { ImportVis::Public } else { ImportVis::Default }; let path = self.tokenizer.next_str_lit()?.decode_utf8()?; self.tokenizer.next_symbol_expect_eq(';', "import")?; let path = ProtoPathBuf::new(path)?; Ok(Some(model::Import { path, vis })) } else { Ok(None) } } // Package // package = "package" fullIdent ";" fn next_package_opt(&mut self) -> anyhow::Result> { if self.tokenizer.next_ident_if_eq("package")? { let package = self.next_full_ident_rel()?; self.tokenizer.next_symbol_expect_eq(';', "package")?; Ok(Some(package.into_absolute())) } else { Ok(None) } } // Option fn next_ident(&mut self) -> anyhow::Result { Ok(ProtobufIdent::from(self.tokenizer.next_ident()?)) } fn next_option_name_component(&mut self) -> anyhow::Result { if self.tokenizer.next_symbol_if_eq('(')? { let comp = self.next_full_ident()?; self.tokenizer .next_symbol_expect_eq(')', "option name component")?; Ok(ProtobufOptionNamePart::Ext(comp)) } else { Ok(ProtobufOptionNamePart::Direct(self.next_ident()?)) } } // https://github.com/google/protobuf/issues/4563 // optionName = ( ident | "(" fullIdent ")" ) { "." ident } fn next_option_name(&mut self) -> anyhow::Result { let mut components = Vec::new(); components.push(self.next_option_name_component()?); while self.tokenizer.next_symbol_if_eq('.')? { components.push(self.next_option_name_component()?); } if components.len() == 1 { if let ProtobufOptionNamePart::Direct(n) = &components[0] { return Ok(ProtobufOptionName::Builtin(n.clone())); } } Ok(ProtobufOptionName::Ext(ProtobufOptionNameExt(components))) } // option = "option" optionName "=" constant ";" fn next_option_opt(&mut self) -> anyhow::Result> { if self.tokenizer.next_ident_if_eq("option")? { let name = self.next_option_name()?; self.tokenizer.next_symbol_expect_eq('=', "option")?; let value = self.next_constant()?; self.tokenizer.next_symbol_expect_eq(';', "option")?; Ok(Some(ProtobufOption { name, value })) } else { Ok(None) } } // Fields // label = "required" | "optional" | "repeated" fn next_label(&mut self, mode: MessageBodyParseMode) -> anyhow::Result> { for rule in Rule::ALL { let mut clone = self.clone(); if clone.tokenizer.next_ident_if_eq(rule.as_str())? { if !mode.label_allowed(rule) { return Err(ParserError::LabelNotAllowed.into()); } *self = clone; return Ok(Some(rule)); } } if mode.some_label_required() { Err(ParserError::LabelRequired.into()) } else { Ok(None) } } fn next_field_type(&mut self) -> anyhow::Result { let simple = &[ ("int32", FieldType::Int32), ("int64", FieldType::Int64), ("uint32", FieldType::Uint32), ("uint64", FieldType::Uint64), ("sint32", FieldType::Sint32), ("sint64", FieldType::Sint64), ("fixed32", FieldType::Fixed32), ("sfixed32", FieldType::Sfixed32), ("fixed64", FieldType::Fixed64), ("sfixed64", FieldType::Sfixed64), ("bool", FieldType::Bool), ("string", FieldType::String), ("bytes", FieldType::Bytes), ("float", FieldType::Float), ("double", FieldType::Double), ]; for &(ref n, ref t) in simple { if self.tokenizer.next_ident_if_eq(n)? { return Ok(t.clone()); } } if let Some(t) = self.next_map_field_type_opt()? { return Ok(t); } let message_or_enum = self.next_message_or_enum_type()?; Ok(FieldType::MessageOrEnum(message_or_enum)) } fn next_field_number(&mut self) -> anyhow::Result { // TODO: not all integers are valid field numbers self.tokenizer.next_token_check_map(|token| match token { &Token::IntLit(i) => i.to_i32(), _ => Err(ParserError::IncorrectInput.into()), }) } // fieldOption = optionName "=" constant fn next_field_option(&mut self) -> anyhow::Result { let name = self.next_option_name()?; self.tokenizer.next_symbol_expect_eq('=', "field option")?; let value = self.next_constant()?; Ok(ProtobufOption { name, value }) } // fieldOptions = fieldOption { "," fieldOption } fn next_field_options(&mut self) -> anyhow::Result> { let mut options = Vec::new(); options.push(self.next_field_option()?); while self.tokenizer.next_symbol_if_eq(',')? { options.push(self.next_field_option()?); } Ok(options) } // field = label type fieldName "=" fieldNumber [ "[" fieldOptions "]" ] ";" // group = label "group" groupName "=" fieldNumber messageBody fn next_field(&mut self, mode: MessageBodyParseMode) -> anyhow::Result> { let loc = self.tokenizer.lookahead_loc(); let rule = if self.clone().tokenizer.next_ident_if_eq("map")? { if !mode.map_allowed() { return Err(ParserError::MapFieldNotAllowed.into()); } None } else { self.next_label(mode)? }; if self.tokenizer.next_ident_if_eq("group")? { let name = self.next_group_name()?.to_owned(); self.tokenizer.next_symbol_expect_eq('=', "group")?; let number = self.next_field_number()?; let mode = match self.syntax { Syntax::Proto2 => MessageBodyParseMode::MessageProto2, Syntax::Proto3 => MessageBodyParseMode::MessageProto3, }; let MessageBody { fields, .. } = self.next_message_body(mode)?; let fields = fields .into_iter() .map(|fo| match fo.t { FieldOrOneOf::Field(f) => Ok(f), FieldOrOneOf::OneOf(_) => Err(ParserError::OneOfInGroup), }) .collect::>()?; let field = Field { // The field name is a lowercased version of the type name // (which has been verified to start with an uppercase letter). // https://git.io/JvxAP name: name.to_ascii_lowercase(), rule, typ: FieldType::Group(Group { name, fields }), number, options: Vec::new(), }; Ok(WithLoc { t: field, loc }) } else { let typ = self.next_field_type()?; let name = self.tokenizer.next_ident()?.to_owned(); self.tokenizer.next_symbol_expect_eq('=', "field")?; let number = self.next_field_number()?; let mut options = Vec::new(); if self.tokenizer.next_symbol_if_eq('[')? { for o in self.next_field_options()? { options.push(o); } self.tokenizer.next_symbol_expect_eq(']', "field")?; } self.tokenizer.next_symbol_expect_eq(';', "field")?; let field = Field { name, rule, typ, number, options, }; Ok(WithLoc { t: field, loc }) } } // oneof = "oneof" oneofName "{" { oneofField | emptyStatement } "}" // oneofField = type fieldName "=" fieldNumber [ "[" fieldOptions "]" ] ";" fn next_oneof_opt(&mut self) -> anyhow::Result> { if self.tokenizer.next_ident_if_eq("oneof")? { let name = self.tokenizer.next_ident()?.to_owned(); let MessageBody { fields, options, .. } = self.next_message_body(MessageBodyParseMode::Oneof)?; let fields = fields .into_iter() .map(|fo| match fo.t { FieldOrOneOf::Field(f) => Ok(f), FieldOrOneOf::OneOf(_) => Err(ParserError::OneOfInOneOf), }) .collect::>()?; Ok(Some(OneOf { name, fields, options, })) } else { Ok(None) } } // mapField = "map" "<" keyType "," type ">" mapName "=" fieldNumber [ "[" fieldOptions "]" ] ";" // keyType = "int32" | "int64" | "uint32" | "uint64" | "sint32" | "sint64" | // "fixed32" | "fixed64" | "sfixed32" | "sfixed64" | "bool" | "string" fn next_map_field_type_opt(&mut self) -> anyhow::Result> { if self.tokenizer.next_ident_if_eq("map")? { self.tokenizer .next_symbol_expect_eq('<', "map field type")?; // TODO: restrict key types let key = self.next_field_type()?; self.tokenizer .next_symbol_expect_eq(',', "map field type")?; let value = self.next_field_type()?; self.tokenizer .next_symbol_expect_eq('>', "map field type")?; Ok(Some(FieldType::Map(Box::new((key, value))))) } else { Ok(None) } } // Extensions and Reserved // Extensions // range = intLit [ "to" ( intLit | "max" ) ] fn next_range(&mut self) -> anyhow::Result> { let from = self.next_field_number()?; let to = if self.tokenizer.next_ident_if_eq("to")? { if self.tokenizer.next_ident_if_eq("max")? { 0x20000000 - 1 } else { self.next_field_number()? } } else { from }; Ok(from..=to) } // ranges = range { "," range } fn next_ranges(&mut self) -> anyhow::Result>> { let mut ranges = Vec::new(); ranges.push(self.next_range()?); while self.tokenizer.next_symbol_if_eq(',')? { ranges.push(self.next_range()?); } Ok(ranges) } // extensions = "extensions" ranges ";" fn next_extensions_opt(&mut self) -> anyhow::Result>>> { if self.tokenizer.next_ident_if_eq("extensions")? { Ok(Some(self.next_ranges()?)) } else { Ok(None) } } // Reserved // Grammar is incorrect: https://github.com/google/protobuf/issues/4558 // reserved = "reserved" ( ranges | fieldNames ) ";" // fieldNames = fieldName { "," fieldName } fn next_reserved_opt( &mut self, ) -> anyhow::Result>, Vec)>> { if self.tokenizer.next_ident_if_eq("reserved")? { let (ranges, names) = if let &Token::StrLit(..) = self.tokenizer.lookahead_some()? { let mut names = Vec::new(); names.push(self.tokenizer.next_str_lit()?.decode_utf8()?); while self.tokenizer.next_symbol_if_eq(',')? { names.push(self.tokenizer.next_str_lit()?.decode_utf8()?); } (Vec::new(), names) } else { (self.next_ranges()?, Vec::new()) }; self.tokenizer.next_symbol_expect_eq(';', "reserved")?; Ok(Some((ranges, names))) } else { Ok(None) } } // Top Level definitions // Enum definition // enumValueOption = optionName "=" constant fn next_enum_value_option(&mut self) -> anyhow::Result { let name = self.next_option_name()?; self.tokenizer .next_symbol_expect_eq('=', "enum value option")?; let value = self.next_constant()?; Ok(ProtobufOption { name, value }) } // https://github.com/google/protobuf/issues/4561 fn next_enum_value(&mut self) -> anyhow::Result { let minus = self.tokenizer.next_symbol_if_eq('-')?; let lit = self.next_int_lit()?; Ok(if minus { let unsigned = lit.to_i64()?; match unsigned.checked_neg() { Some(neg) => neg.to_i32()?, None => return Err(ParserError::IntegerOverflow.into()), } } else { lit.to_i32()? }) } // enumField = ident "=" intLit [ "[" enumValueOption { "," enumValueOption } "]" ]";" fn next_enum_field(&mut self) -> anyhow::Result { let name = self.tokenizer.next_ident()?.to_owned(); self.tokenizer.next_symbol_expect_eq('=', "enum field")?; let number = self.next_enum_value()?; let mut options = Vec::new(); if self.tokenizer.next_symbol_if_eq('[')? { options.push(self.next_enum_value_option()?); while self.tokenizer.next_symbol_if_eq(',')? { options.push(self.next_enum_value_option()?); } self.tokenizer.next_symbol_expect_eq(']', "enum field")?; } Ok(EnumValue { name, number, options, }) } // enum = "enum" enumName enumBody // enumBody = "{" { option | enumField | emptyStatement | reserved } "}" fn next_enum_opt(&mut self) -> anyhow::Result>> { let loc = self.tokenizer.lookahead_loc(); if self.tokenizer.next_ident_if_eq("enum")? { let name = self.tokenizer.next_ident()?.to_owned(); let mut values = Vec::new(); let mut options = Vec::new(); let mut reserved_nums = Vec::new(); let mut reserved_names = Vec::new(); self.tokenizer.next_symbol_expect_eq('{', "enum")?; while self.tokenizer.lookahead_if_symbol()? != Some('}') { // emptyStatement if self.tokenizer.next_symbol_if_eq(';')? { continue; } if let Some((field_nums, field_names)) = self.next_reserved_opt()? { reserved_nums.extend(field_nums); reserved_names.extend(field_names); continue; } if let Some(o) = self.next_option_opt()? { options.push(o); continue; } values.push(self.next_enum_field()?); } self.tokenizer.next_symbol_expect_eq('}', "enum")?; let enumeration = Enumeration { name, values, options, reserved_nums, reserved_names, }; Ok(Some(WithLoc { loc, t: enumeration, })) } else { Ok(None) } } // Message definition // messageBody = "{" { field | enum | message | extend | extensions | group | // option | oneof | mapField | reserved | emptyStatement } "}" fn next_message_body(&mut self, mode: MessageBodyParseMode) -> anyhow::Result { self.tokenizer.next_symbol_expect_eq('{', "message body")?; let mut r = MessageBody::default(); while self.tokenizer.lookahead_if_symbol()? != Some('}') { let loc = self.tokenizer.lookahead_loc(); // emptyStatement if self.tokenizer.next_symbol_if_eq(';')? { continue; } if mode.is_most_non_fields_allowed() { if let Some((field_nums, field_names)) = self.next_reserved_opt()? { r.reserved_nums.extend(field_nums); r.reserved_names.extend(field_names); continue; } if let Some(oneof) = self.next_oneof_opt()? { let one_of = FieldOrOneOf::OneOf(oneof); r.fields.push(WithLoc { t: one_of, loc }); continue; } if let Some(extensions) = self.next_extend_opt()? { r.extensions.extend(extensions); continue; } if let Some(nested_message) = self.next_message_opt()? { r.messages.push(nested_message); continue; } if let Some(nested_enum) = self.next_enum_opt()? { r.enums.push(nested_enum); continue; } } else { self.tokenizer.next_ident_if_eq_error("reserved")?; self.tokenizer.next_ident_if_eq_error("oneof")?; self.tokenizer.next_ident_if_eq_error("extend")?; self.tokenizer.next_ident_if_eq_error("message")?; self.tokenizer.next_ident_if_eq_error("enum")?; } if mode.is_extensions_allowed() { if let Some(extension_ranges) = self.next_extensions_opt()? { r.extension_ranges.extend(extension_ranges); continue; } } else { self.tokenizer.next_ident_if_eq_error("extensions")?; } if mode.is_option_allowed() { if let Some(option) = self.next_option_opt()? { r.options.push(option); continue; } } else { self.tokenizer.next_ident_if_eq_error("option")?; } let field = FieldOrOneOf::Field(self.next_field(mode)?); r.fields.push(WithLoc { t: field, loc }); } self.tokenizer.next_symbol_expect_eq('}', "message body")?; Ok(r) } // message = "message" messageName messageBody fn next_message_opt(&mut self) -> anyhow::Result>> { let loc = self.tokenizer.lookahead_loc(); if self.tokenizer.next_ident_if_eq("message")? { let name = self.tokenizer.next_ident()?.to_owned(); let mode = match self.syntax { Syntax::Proto2 => MessageBodyParseMode::MessageProto2, Syntax::Proto3 => MessageBodyParseMode::MessageProto3, }; let MessageBody { fields, reserved_nums, reserved_names, messages, enums, options, extensions, extension_ranges, } = self.next_message_body(mode)?; let message = Message { name, fields, reserved_nums, reserved_names, messages, enums, options, extensions, extension_ranges, }; Ok(Some(WithLoc { t: message, loc })) } else { Ok(None) } } // Extend // extend = "extend" messageType "{" {field | group | emptyStatement} "}" fn next_extend_opt(&mut self) -> anyhow::Result>>> { let mut clone = self.clone(); if clone.tokenizer.next_ident_if_eq("extend")? { // According to spec `extend` is only for `proto2`, but it is used in `proto3` // https://github.com/google/protobuf/issues/4610 *self = clone; let extendee = self.next_message_or_enum_type()?; let mode = match self.syntax { Syntax::Proto2 => MessageBodyParseMode::ExtendProto2, Syntax::Proto3 => MessageBodyParseMode::ExtendProto3, }; let MessageBody { fields, .. } = self.next_message_body(mode)?; // TODO: is oneof allowed in extend? let fields: Vec> = fields .into_iter() .map(|fo| match fo.t { FieldOrOneOf::Field(f) => Ok(f), FieldOrOneOf::OneOf(_) => Err(ParserError::OneOfInExtend), }) .collect::>()?; let extensions = fields .into_iter() .map(|field| { let extendee = extendee.clone(); let loc = field.loc; let extension = Extension { extendee, field }; WithLoc { t: extension, loc } }) .collect(); Ok(Some(extensions)) } else { Ok(None) } } // Service definition fn next_options_or_colon(&mut self) -> anyhow::Result> { let mut options = Vec::new(); if self.tokenizer.next_symbol_if_eq('{')? { while self.tokenizer.lookahead_if_symbol()? != Some('}') { if let Some(option) = self.next_option_opt()? { options.push(option); continue; } if let Some(()) = self.next_empty_statement_opt()? { continue; } return Err(ParserError::IncorrectInput.into()); } self.tokenizer.next_symbol_expect_eq('}', "option")?; } else { self.tokenizer.next_symbol_expect_eq(';', "option")?; } Ok(options) } // stream = "stream" streamName "(" messageType "," messageType ")" // (( "{" { option | emptyStatement } "}") | ";" ) fn next_stream_opt(&mut self) -> anyhow::Result> { assert_eq!(Syntax::Proto2, self.syntax); if self.tokenizer.next_ident_if_eq("stream")? { let name = self.tokenizer.next_ident()?; self.tokenizer.next_symbol_expect_eq('(', "stream")?; let input_type = self.next_message_or_enum_type()?; self.tokenizer.next_symbol_expect_eq(',', "stream")?; let output_type = self.next_message_or_enum_type()?; self.tokenizer.next_symbol_expect_eq(')', "stream")?; let options = self.next_options_or_colon()?; Ok(Some(Method { name, input_type, output_type, client_streaming: true, server_streaming: true, options, })) } else { Ok(None) } } // rpc = "rpc" rpcName "(" [ "stream" ] messageType ")" // "returns" "(" [ "stream" ] messageType ")" // (( "{" { option | emptyStatement } "}" ) | ";" ) fn next_rpc_opt(&mut self) -> anyhow::Result> { if self.tokenizer.next_ident_if_eq("rpc")? { let name = self.tokenizer.next_ident()?; self.tokenizer.next_symbol_expect_eq('(', "rpc")?; let client_streaming = self.tokenizer.next_ident_if_eq("stream")?; let input_type = self.next_message_or_enum_type()?; self.tokenizer.next_symbol_expect_eq(')', "rpc")?; self.tokenizer.next_ident_expect_eq("returns")?; self.tokenizer.next_symbol_expect_eq('(', "rpc")?; let server_streaming = self.tokenizer.next_ident_if_eq("stream")?; let output_type = self.next_message_or_enum_type()?; self.tokenizer.next_symbol_expect_eq(')', "rpc")?; let options = self.next_options_or_colon()?; Ok(Some(Method { name, input_type, output_type, client_streaming, server_streaming, options, })) } else { Ok(None) } } // proto2: // service = "service" serviceName "{" { option | rpc | stream | emptyStatement } "}" // // proto3: // service = "service" serviceName "{" { option | rpc | emptyStatement } "}" fn next_service_opt(&mut self) -> anyhow::Result>> { let loc = self.tokenizer.lookahead_loc(); if self.tokenizer.next_ident_if_eq("service")? { let name = self.tokenizer.next_ident()?; let mut methods = Vec::new(); let mut options = Vec::new(); self.tokenizer.next_symbol_expect_eq('{', "service")?; while self.tokenizer.lookahead_if_symbol()? != Some('}') { if let Some(method) = self.next_rpc_opt()? { methods.push(method); continue; } if self.syntax == Syntax::Proto2 { if let Some(method) = self.next_stream_opt()? { methods.push(method); continue; } } if let Some(o) = self.next_option_opt()? { options.push(o); continue; } if let Some(()) = self.next_empty_statement_opt()? { continue; } return Err(ParserError::IncorrectInput.into()); } self.tokenizer.next_symbol_expect_eq('}', "service")?; Ok(Some(WithLoc { loc, t: Service { name, methods, options, }, })) } else { Ok(None) } } // Proto file // proto = syntax { import | package | option | topLevelDef | emptyStatement } // topLevelDef = message | enum | extend | service pub fn next_proto(&mut self) -> anyhow::Result { let syntax = self.next_syntax()?.unwrap_or(Syntax::Proto2); self.syntax = syntax; let mut imports = Vec::new(); let mut package = ProtobufAbsPath::root(); let mut messages = Vec::new(); let mut enums = Vec::new(); let mut extensions = Vec::new(); let mut options = Vec::new(); let mut services = Vec::new(); while !self.tokenizer.syntax_eof()? { if let Some(import) = self.next_import_opt()? { imports.push(import); continue; } if let Some(next_package) = self.next_package_opt()? { package = next_package; continue; } if let Some(option) = self.next_option_opt()? { options.push(option); continue; } if let Some(message) = self.next_message_opt()? { messages.push(message); continue; } if let Some(enumeration) = self.next_enum_opt()? { enums.push(enumeration); continue; } if let Some(more_extensions) = self.next_extend_opt()? { extensions.extend(more_extensions); continue; } if let Some(service) = self.next_service_opt()? { services.push(service); continue; } if self.tokenizer.next_symbol_if_eq(';')? { continue; } return Err(ParserError::IncorrectInput.into()); } Ok(FileDescriptor { imports, package, syntax, messages, enums, extensions, services, options, }) } } #[cfg(test)] mod test { use super::*; fn parse(input: &str, parse_what: P) -> R where P: FnOnce(&mut Parser) -> anyhow::Result, { let mut parser = Parser::new(input); let r = parse_what(&mut parser).expect(&format!("parse failed at {}", parser.tokenizer.loc())); let eof = parser .tokenizer .syntax_eof() .expect(&format!("check eof failed at {}", parser.tokenizer.loc())); assert!(eof, "{}", parser.tokenizer.loc()); r } fn parse_opt(input: &str, parse_what: P) -> R where P: FnOnce(&mut Parser) -> anyhow::Result>, { let mut parser = Parser::new(input); let o = parse_what(&mut parser).expect(&format!("parse failed at {}", parser.tokenizer.loc())); let r = o.expect(&format!( "parser returned none at {}", parser.tokenizer.loc() )); assert!(parser.tokenizer.syntax_eof().unwrap()); r } #[test] fn test_syntax() { let msg = r#" syntax = "proto3"; "#; let mess = parse_opt(msg, |p| p.next_syntax()); assert_eq!(Syntax::Proto3, mess); } #[test] fn test_field_default_value_int() { let msg = r#" optional int64 f = 4 [default = 12]; "#; let mess = parse(msg, |p| p.next_field(MessageBodyParseMode::MessageProto2)); assert_eq!("f", mess.t.name); assert_eq!( ProtobufOptionName::simple("default"), mess.t.options[0].name ); assert_eq!("12", mess.t.options[0].value.format()); } #[test] fn test_field_default_value_float() { let msg = r#" optional float f = 2 [default = 10.0]; "#; let mess = parse(msg, |p| p.next_field(MessageBodyParseMode::MessageProto2)); assert_eq!("f", mess.t.name); assert_eq!( ProtobufOptionName::simple("default"), mess.t.options[0].name ); assert_eq!("10", mess.t.options[0].value.format()); } #[test] fn test_field_options() { let msg = r#" (my_opt).my_field = {foo: 1 bar: 2} "#; let opt = parse(msg, |p| p.next_field_option()); assert_eq!(r#"{ foo: 1 bar: 2 }"#, opt.value.format()); let msg = r#" (my_opt).my_field = {foo: 1; bar:2;} "#; let opt = parse(msg, |p| p.next_field_option()); assert_eq!(r#"{ foo: 1 bar: 2 }"#, opt.value.format()); let msg = r#" (my_opt).my_field = {foo: 1, bar: 2} "#; let opt = parse(msg, |p| p.next_field_option()); assert_eq!(r#"{ foo: 1 bar: 2 }"#, opt.value.format()); let msg = r#" (my_opt).my_field = "foo" "#; let opt = parse(msg, |p| p.next_field_option()); assert_eq!(r#""foo""#, opt.value.format()); let msg = r#" (my_opt) = { my_field: "foo"} "#; let opt = parse(msg, |p| p.next_field_option()); assert_eq!(r#"{ my_field: "foo" }"#, opt.value.format()); let msg = r#" (my_opt) = { my_field: [] } "#; let opt = parse(msg, |p| p.next_field_option()); assert_eq!(r#"{ my_field: [] }"#, opt.value.format()); let msg = r#" (my_opt) = { my_field: ["foo", "bar"] } "#; let opt = parse(msg, |p| p.next_field_option()); assert_eq!(r#"{ my_field: ["foo","bar"] }"#, opt.value.format()); let msg = r#" (my_opt) = { my_field: [1, 2] } "#; let opt = parse(msg, |p| p.next_field_option()); assert_eq!(r#"{ my_field: [1,2] }"#, opt.value.format()); } #[test] fn test_message() { let msg = r#"message ReferenceData { repeated ScenarioInfo scenarioSet = 1; repeated CalculatedObjectInfo calculatedObjectSet = 2; repeated RiskFactorList riskFactorListSet = 3; repeated RiskMaturityInfo riskMaturitySet = 4; repeated IndicatorInfo indicatorSet = 5; repeated RiskStrikeInfo riskStrikeSet = 6; repeated FreeProjectionList freeProjectionListSet = 7; repeated ValidationProperty ValidationSet = 8; repeated CalcProperties calcPropertiesSet = 9; repeated MaturityInfo maturitySet = 10; }"#; let mess = parse_opt(msg, |p| p.next_message_opt()); assert_eq!(10, mess.t.fields.len()); } #[test] fn test_enum() { let msg = r#"enum PairingStatus { DEALPAIRED = 0; INVENTORYORPHAN = 1; CALCULATEDORPHAN = 2; CANCELED = 3; }"#; let enumeration = parse_opt(msg, |p| p.next_enum_opt()); assert_eq!(4, enumeration.values.len()); } #[test] fn test_ignore() { let msg = r#"option optimize_for = SPEED;"#; parse_opt(msg, |p| p.next_option_opt()); } #[test] fn test_import() { let msg = r#"syntax = "proto3"; import "test_import_nested_imported_pb.proto"; message ContainsImportedNested { ContainerForNested.NestedMessage m = 1; ContainerForNested.NestedEnum e = 2; } "#; let desc = parse(msg, |p| p.next_proto()); assert_eq!( vec!["test_import_nested_imported_pb.proto"], desc.imports .into_iter() .map(|i| i.path.to_str().to_owned()) .collect::>() ); } #[test] fn test_nested_message() { let msg = r#"message A { message B { repeated int32 a = 1; optional string b = 2; } optional string b = 1; }"#; let mess = parse_opt(msg, |p| p.next_message_opt()); assert_eq!(1, mess.t.messages.len()); } #[test] fn test_map() { let msg = r#"message A { optional map b = 1; }"#; let mess = parse_opt(msg, |p| p.next_message_opt()); assert_eq!(1, mess.t.fields.len()); match mess.t.regular_fields_for_test()[0].typ { FieldType::Map(ref f) => match &**f { &(FieldType::String, FieldType::Int32) => (), ref f => panic!("Expecting Map found {:?}", f), }, ref f => panic!("Expecting map, got {:?}", f), } } #[test] fn test_oneof() { let msg = r#"message A { optional int32 a1 = 1; oneof a_oneof { string a2 = 2; int32 a3 = 3; bytes a4 = 4; } repeated bool a5 = 5; }"#; let mess = parse_opt(msg, |p| p.next_message_opt()); assert_eq!(1, mess.t.oneofs().len()); assert_eq!(3, mess.t.oneofs()[0].fields.len()); } #[test] fn test_reserved_in_message() { let msg = r#"message Sample { reserved 4, 15, 17 to 20, 30; reserved "foo", "bar"; optional uint64 age =1; required bytes name =2; }"#; let mess = parse_opt(msg, |p| p.next_message_opt()); assert_eq!( vec![4..=4, 15..=15, 17..=20, 30..=30,], mess.t.reserved_nums ); assert_eq!( vec!["foo".to_string(), "bar".to_string()], mess.t.reserved_names ); assert_eq!(2, mess.t.fields.len()); } #[test] fn test_reserved_in_enum() { let msg = r#"enum Sample { reserved 4, 15, 17 to 20, 30; reserved "foo", "bar"; }"#; let enum_ = parse_opt(msg, |p| p.next_enum_opt()); assert_eq!( vec![4..=4, 15..=15, 17..=20, 30..=30,], enum_.t.reserved_nums ); assert_eq!( vec!["foo".to_string(), "bar".to_string()], enum_.t.reserved_names ); } #[test] fn test_default_value_int() { let msg = r#"message Sample { optional int32 x = 1 [default = 17]; }"#; let mess = parse_opt(msg, |p| p.next_message_opt()); assert_eq!( ProtobufOptionName::simple("default"), mess.t.regular_fields_for_test()[0].options[0].name ); assert_eq!( "17", mess.t.regular_fields_for_test()[0].options[0] .value .format() ); } #[test] fn test_default_value_string() { let msg = r#"message Sample { optional string x = 1 [default = "ab\nc d\"g\'h\0\"z"]; }"#; let mess = parse_opt(msg, |p| p.next_message_opt()); assert_eq!( r#""ab\nc d\"g\'h\0\"z""#, mess.t.regular_fields_for_test()[0].options[0] .value .format() ); } #[test] fn test_default_value_bytes() { let msg = r#"message Sample { optional bytes x = 1 [default = "ab\nc d\xfeE\"g\'h\0\"z"]; }"#; let mess = parse_opt(msg, |p| p.next_message_opt()); assert_eq!( r#""ab\nc d\xfeE\"g\'h\0\"z""#, mess.t.regular_fields_for_test()[0].options[0] .value .format() ); } #[test] fn test_group() { let msg = r#"message MessageWithGroup { optional string aaa = 1; repeated group Identifier = 18 { optional int32 iii = 19; optional string sss = 20; } required int bbb = 3; }"#; let mess = parse_opt(msg, |p| p.next_message_opt()); assert_eq!("identifier", mess.t.regular_fields_for_test()[1].name); if let FieldType::Group(Group { fields, .. }) = &mess.t.regular_fields_for_test()[1].typ { assert_eq!(2, fields.len()); } else { panic!("expecting group"); } assert_eq!("bbb", mess.t.regular_fields_for_test()[2].name); } #[test] fn test_incorrect_file_descriptor() { let msg = r#" message Foo {} dfgdg "#; let err = FileDescriptor::parse(msg).err().expect("err"); assert_eq!(4, err.line); } } protobuf-parse-3.7.2/src/rel_path.rs000064400000000000000000000023461046102023000155450ustar 00000000000000use std::ops::Deref; use std::path::Path; use std::path::PathBuf; /// Wrapper for `Path` that asserts that the path is relative. #[repr(transparent)] pub(crate) struct RelPath { path: Path, } /// Wrapper for `PathBuf` that asserts that the path is relative. #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub(crate) struct _RelPathBuf { path: PathBuf, } impl RelPath { pub(crate) fn _new(path: &Path) -> &RelPath { assert!( !path.is_absolute(), "path must be relative: {}", path.display() ); unsafe { &*(path as *const Path as *const RelPath) } } pub(crate) fn _to_owned(&self) -> _RelPathBuf { _RelPathBuf { path: self.path.to_owned(), } } } impl _RelPathBuf { pub(crate) fn _new(path: PathBuf) -> _RelPathBuf { assert!( !path.is_absolute(), "path must be relative: {}", path.display() ); _RelPathBuf { path } } } impl Deref for RelPath { type Target = Path; fn deref(&self) -> &Self::Target { &self.path } } impl Deref for _RelPathBuf { type Target = RelPath; fn deref(&self) -> &Self::Target { RelPath::_new(&self.path) } } protobuf-parse-3.7.2/src/test_against_protobuf_protos.rs000064400000000000000000000017531046102023000217630ustar 00000000000000#![cfg(test)] use std::fs; use std::io::Read; use std::path::Path; use anyhow::Context; use crate::model; fn parse_recursively(path: &Path) { assert!(path.exists()); let file_name = path .file_name() .expect("file_name") .to_str() .expect("to_str"); if path.is_dir() { for entry in fs::read_dir(path).expect("read_dir") { parse_recursively(&entry.expect("entry").path()); } } else if file_name.ends_with(".proto") { println!("checking {}", path.display()); let mut content = String::new(); fs::File::open(path) .expect("open") .read_to_string(&mut content) .expect("read"); model::FileDescriptor::parse(&content) .with_context(|| format!("testing `{}`", path.display())) .expect("parse"); } } #[test] fn test() { let path = &Path::new("../google-protobuf-all-protos/protobuf"); parse_recursively(&Path::new(path)); } protobuf-parse-3.7.2/src/which_parser.rs000064400000000000000000000005431046102023000164220ustar 00000000000000/// Which parse to use to parse `.proto` files. #[derive(Debug, Copy, Clone)] pub(crate) enum WhichParser { /// Pure Rust parser implemented by this crate. Pure, /// Parse `.proto` files using `protoc --descriptor_set_out=...` command. Protoc, } impl Default for WhichParser { fn default() -> Self { WhichParser::Pure } } protobuf-parse-3.7.2/tests/bundled_proto_consistent.rs000064400000000000000000000034251046102023000214320ustar 00000000000000use std::fs; use std::path::Path; use std::path::PathBuf; fn list_dir(p: &Path) -> Vec { let mut children = fs::read_dir(p) .unwrap() .map(|r| r.map(|e| e.path())) .collect::, _>>() .unwrap(); children.sort(); children } fn assert_equal_recursively(a: &Path, b: &Path) { assert_eq!(a.is_dir(), b.is_dir(), "{} {}", a.display(), b.display()); assert_eq!(a.is_file(), b.is_file(), "{} {}", a.display(), b.display()); if a.is_dir() { let mut a_contents = list_dir(a).into_iter(); let mut b_contents = list_dir(b).into_iter(); loop { let a_child = a_contents.next(); let b_child = b_contents.next(); match (a_child, b_child) { (Some(a_child), Some(b_child)) => { assert_eq!(a_child.file_name(), b_child.file_name()); assert_equal_recursively(&a_child, &b_child); } (None, None) => break, _ => panic!( "mismatched directories: {} and {}", a.display(), b.display() ), } } } else { let a_contents = fs::read_to_string(a).unwrap(); let b_contents = fs::read_to_string(b).unwrap(); assert_eq!(a_contents, b_contents); } } #[test] fn test_bundled_google_proto_files_consistent() { let source = "../proto/google"; let our_copy = "src/proto/google"; assert_equal_recursively(Path::new(source), Path::new(our_copy)); } #[test] fn test_bundled_rustproto_proto_consistent() { let source = "../proto/rustproto.proto"; let our_copy = "src/proto/rustproto.proto"; assert_equal_recursively(Path::new(source), Path::new(our_copy)); }