summaryrefslogtreecommitdiff
path: root/vendor/prost-build
diff options
context:
space:
mode:
authormo khan <mo@mokhan.ca>2025-07-02 18:36:06 -0600
committermo khan <mo@mokhan.ca>2025-07-02 18:36:06 -0600
commit8cdfa445d6629ffef4cb84967ff7017654045bc2 (patch)
tree22f0b0907c024c78d26a731e2e1f5219407d8102 /vendor/prost-build
parent4351c74c7c5f97156bc94d3a8549b9940ac80e3f (diff)
chore: add vendor directory
Diffstat (limited to 'vendor/prost-build')
-rw-r--r--vendor/prost-build/.cargo-checksum.json1
-rw-r--r--vendor/prost-build/Cargo.toml105
-rw-r--r--vendor/prost-build/LICENSE201
-rw-r--r--vendor/prost-build/README.md27
-rw-r--r--vendor/prost-build/src/ast.rs452
-rw-r--r--vendor/prost-build/src/code_generator.rs1183
-rw-r--r--vendor/prost-build/src/code_generator/c_escaping.rs139
-rw-r--r--vendor/prost-build/src/code_generator/syntax.rs14
-rw-r--r--vendor/prost-build/src/collections.rs57
-rw-r--r--vendor/prost-build/src/config.rs1174
-rw-r--r--vendor/prost-build/src/extern_paths.rs170
-rw-r--r--vendor/prost-build/src/fixtures/alphabet/_expected_include.rs19
-rw-r--r--vendor/prost-build/src/fixtures/alphabet/a.proto8
-rw-r--r--vendor/prost-build/src/fixtures/alphabet/b.proto8
-rw-r--r--vendor/prost-build/src/fixtures/alphabet/c.proto8
-rw-r--r--vendor/prost-build/src/fixtures/alphabet/d.proto8
-rw-r--r--vendor/prost-build/src/fixtures/alphabet/e.proto8
-rw-r--r--vendor/prost-build/src/fixtures/alphabet/f.proto8
-rw-r--r--vendor/prost-build/src/fixtures/field_attributes/_expected_field_attributes.rs34
-rw-r--r--vendor/prost-build/src/fixtures/field_attributes/_expected_field_attributes_formatted.rs33
-rw-r--r--vendor/prost-build/src/fixtures/field_attributes/field_attributes.proto21
-rw-r--r--vendor/prost-build/src/fixtures/helloworld/_expected_helloworld.rs45
-rw-r--r--vendor/prost-build/src/fixtures/helloworld/_expected_helloworld_formatted.rs45
-rw-r--r--vendor/prost-build/src/fixtures/helloworld/goodbye.proto9
-rw-r--r--vendor/prost-build/src/fixtures/helloworld/hello.proto9
-rw-r--r--vendor/prost-build/src/fixtures/helloworld/types.proto17
-rw-r--r--vendor/prost-build/src/fixtures/imports_empty/_expected_include.rs10
-rw-r--r--vendor/prost-build/src/fixtures/imports_empty/imports_empty.proto40
-rw-r--r--vendor/prost-build/src/fixtures/smoke_test/smoke_test.proto18
-rw-r--r--vendor/prost-build/src/fixtures/write_includes/_.includes.rs23
-rw-r--r--vendor/prost-build/src/ident.rs256
-rw-r--r--vendor/prost-build/src/lib.rs563
-rw-r--r--vendor/prost-build/src/message_graph.rs87
-rw-r--r--vendor/prost-build/src/module.rs93
-rw-r--r--vendor/prost-build/src/path.rs246
35 files changed, 5139 insertions, 0 deletions
diff --git a/vendor/prost-build/.cargo-checksum.json b/vendor/prost-build/.cargo-checksum.json
new file mode 100644
index 00000000..6ae299d4
--- /dev/null
+++ b/vendor/prost-build/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.toml":"75a8ba143424ea17eb4a9ac461ead7ba5a4fddb33ba572795c04ad2bf03cd41a","LICENSE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","README.md":"3750e8c49e5a8e053c9987602fcd2d475dd87e22cd6424b7c84f073243b4997d","src/ast.rs":"4ef148e3d1bb36aa66eb5eb34d5ed20d0a453ffbd67f11b07f730a1cac2aef15","src/code_generator.rs":"139da145205c18e53a2dd894b99a9b405e7cf835005e072fe07b34fd8d8f5d0d","src/code_generator/c_escaping.rs":"08d6696fed3de1e8e857272a93579066b7873475cff20ad5c5d61a355b4c125b","src/code_generator/syntax.rs":"8f5f01190b4a978d287797326011767cb70722e7ef347b1b5bcc724ee5c118f3","src/collections.rs":"fc99433892b37f3686a9109723fa23a1ac35d18d74702b318a1d886e4b788b4b","src/config.rs":"23bb2b502d11e11c4c4671240e813fe6203535296be98548a8c7b4b946f855a0","src/extern_paths.rs":"467287a457f0ed4bdd2938edf12ead1cd7c83e8a5ce94ae4f14bbd3ec628f5a0","src/fixtures/alphabet/_expected_include.rs":"6512f74cbabe8454432a3f8afbc3d465e121ac3f597f340a278588e1b735c3e8","src/fixtures/alphabet/a.proto":"519aac8a62937d216eb7cdf0b3426c5c1dcc0da8c069cd379e1713c853059d8b","src/fixtures/alphabet/b.proto":"6644d5ad3134f699e6c3047595ae2e83cfd73d28f143510463120c65379a7f78","src/fixtures/alphabet/c.proto":"a33cb90823c566b0287b903871f1d675ff0fc31c3966d5183fa172a88cff8eb3","src/fixtures/alphabet/d.proto":"f72ef9346f886196986798354b974d0ce6669f90e3e6235f13a997bdc5f7291f","src/fixtures/alphabet/e.proto":"f666aad9d4c767d4221941dceb6a85b42472aa20f09ac8852de2e51efabb5475","src/fixtures/alphabet/f.proto":"0585e25be55e9dd35ee72aabde8495fb92ea763b0bf002a845e07579e0da05cf","src/fixtures/field_attributes/_expected_field_attributes.rs":"b3009038ed952992d7ad9e2f11bb68a8a4dc69a66a3c038d3b18251aefa3448e","src/fixtures/field_attributes/_expected_field_attributes_formatted.rs":"daa6dfd92b241386801cf9db6b04f30fb0fa69f4da12e195a25605be25727d59","src/fixtures/field_attributes/field_attributes.proto":"3cef78c6ab8ba62a9c8d5a7ae49a7a5d1844cfc474f5b469bdadb14adccea4d0","src/fixtures/helloworld/_expected_helloworld.rs":"44f60faa19588d4acc13ca3ae015a5c55997241484bf77cc74de82151bd4cd2c","src/fixtures/helloworld/_expected_helloworld_formatted.rs":"10c054007a1adc0f49a4268d748c1c559131b05ccddaccb07cf581d991816aec","src/fixtures/helloworld/goodbye.proto":"ef20704f5ddf4979ba1825ae6847393e94b72fd52a2d2d9b95ab4ffc5d32c746","src/fixtures/helloworld/hello.proto":"4eeeeb58af30bb7522fbd4560009cf89540f5f314e0db49e1f9dd8f4de284064","src/fixtures/helloworld/types.proto":"e2f69b020b3115c0f0e2af46c93710d1ea6d6bb29e193536aee180c6de6aa518","src/fixtures/imports_empty/_expected_include.rs":"b3ca9ab03c1fb1d46a285b6c7488d7e3cbd66d6aa8db7fa0e99157d07cb5645d","src/fixtures/imports_empty/imports_empty.proto":"b16d690bfbc148d2eeee9bf644f7a7790705a053049687635d4014e4d330e309","src/fixtures/smoke_test/smoke_test.proto":"05068c3db3f1df2d5737eaaec730e42e3e58eb1d533df630fcb3dc138c7d8c9b","src/fixtures/write_includes/_.includes.rs":"ed15945b7fd3e281a9adf665b7d35c45899256f8b3e234b67022ee062d441e47","src/ident.rs":"f3bdd82c7f21b7fac122309fa5b664a37970eb995869516cfdd15d0f4d363333","src/lib.rs":"6473e87f4e3cc41196e8457233ea82f29c61c2eb2b8bbfacb2eab33103ae10fd","src/message_graph.rs":"51a27b5baabe6449f6030ec42b0d98360fbb7930d6293e03cf8d9dec87a213dd","src/module.rs":"e945b8100a8ea80b1460a8a1c72f2246a49f6aaa852c004ba9558fb657af4cb3","src/path.rs":"fc672c6aad805c506f2dd9b71b1329b283c3db0dced841f58d1455ae34b03b06"},"package":"22505a5c94da8e3b7c2996394d1c933236c4d743e81a410bcca4e6989fc066a4"} \ No newline at end of file
diff --git a/vendor/prost-build/Cargo.toml b/vendor/prost-build/Cargo.toml
new file mode 100644
index 00000000..1d5f6bab
--- /dev/null
+++ b/vendor/prost-build/Cargo.toml
@@ -0,0 +1,105 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2021"
+rust-version = "1.70"
+name = "prost-build"
+version = "0.12.6"
+authors = [
+ "Dan Burkert <dan@danburkert.com>",
+ "Lucio Franco <luciofranco14@gmail.com>",
+ "Casper Meijn <casper@meijn.net>",
+ "Tokio Contributors <team@tokio.rs>",
+]
+description = "Generate Prost annotated Rust types from Protocol Buffers files."
+documentation = "https://docs.rs/prost-build"
+readme = "README.md"
+license = "Apache-2.0"
+repository = "https://github.com/tokio-rs/prost"
+
+[dependencies.bytes]
+version = "1"
+default-features = false
+
+[dependencies.heck]
+version = ">=0.4, <=0.5"
+
+[dependencies.itertools]
+version = ">=0.10, <=0.12"
+features = ["use_alloc"]
+default-features = false
+
+[dependencies.log]
+version = "0.4.4"
+
+[dependencies.multimap]
+version = ">=0.8, <=0.10"
+default-features = false
+
+[dependencies.once_cell]
+version = "1.17.1"
+
+[dependencies.petgraph]
+version = "0.6"
+default-features = false
+
+[dependencies.prettyplease]
+version = "0.2"
+optional = true
+
+[dependencies.prost]
+version = "0.12.6"
+default-features = false
+
+[dependencies.prost-types]
+version = "0.12.6"
+default-features = false
+
+[dependencies.pulldown-cmark]
+version = "0.9.1"
+optional = true
+default-features = false
+
+[dependencies.pulldown-cmark-to-cmark]
+version = "10.0.1"
+optional = true
+
+[dependencies.regex]
+version = "1.8.1"
+features = [
+ "std",
+ "unicode-bool",
+]
+default-features = false
+
+[dependencies.syn]
+version = "2"
+features = ["full"]
+optional = true
+
+[dependencies.tempfile]
+version = "3"
+
+[dev-dependencies.env_logger]
+version = "0.10"
+default-features = false
+
+[features]
+cleanup-markdown = [
+ "dep:pulldown-cmark",
+ "dep:pulldown-cmark-to-cmark",
+]
+default = ["format"]
+format = [
+ "dep:prettyplease",
+ "dep:syn",
+]
diff --git a/vendor/prost-build/LICENSE b/vendor/prost-build/LICENSE
new file mode 100644
index 00000000..16fe87b0
--- /dev/null
+++ b/vendor/prost-build/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/vendor/prost-build/README.md b/vendor/prost-build/README.md
new file mode 100644
index 00000000..4646dbc8
--- /dev/null
+++ b/vendor/prost-build/README.md
@@ -0,0 +1,27 @@
+[![Documentation](https://docs.rs/prost-build/badge.svg)](https://docs.rs/prost-build/)
+[![Crate](https://img.shields.io/crates/v/prost-build.svg)](https://crates.io/crates/prost-build)
+
+# `prost-build`
+
+`prost-build` makes it easy to generate Rust code from `.proto` files as part of
+a Cargo build. See the crate [documentation](https://docs.rs/prost-build/) for examples
+of how to integrate `prost-build` into a Cargo project.
+
+## `protoc`
+
+`prost-build` uses `protoc` to parse the proto files. There are two ways to make `protoc`
+available for `prost-build`:
+
+* Include `protoc` in your `PATH`. This can be done by following the [`protoc` install instructions].
+* Pass the `PROTOC=<my/path/to/protoc>` environment variable with the path to
+ `protoc`.
+
+[`protoc` install instructions]: https://github.com/protocolbuffers/protobuf#protocol-compiler-installation
+
+## License
+
+`prost-build` is distributed under the terms of the Apache License (Version 2.0).
+
+See [LICENSE](../LICENSE) for details.
+
+Copyright 2017 Dan Burkert
diff --git a/vendor/prost-build/src/ast.rs b/vendor/prost-build/src/ast.rs
new file mode 100644
index 00000000..9a6a0de9
--- /dev/null
+++ b/vendor/prost-build/src/ast.rs
@@ -0,0 +1,452 @@
+use once_cell::sync::Lazy;
+use prost_types::source_code_info::Location;
+#[cfg(feature = "cleanup-markdown")]
+use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag};
+use regex::Regex;
+
+/// Comments on a Protobuf item.
+#[derive(Debug, Default, Clone)]
+pub struct Comments {
+ /// Leading detached blocks of comments.
+ pub leading_detached: Vec<Vec<String>>,
+
+ /// Leading comments.
+ pub leading: Vec<String>,
+
+ /// Trailing comments.
+ pub trailing: Vec<String>,
+}
+
+impl Comments {
+ pub(crate) fn from_location(location: &Location) -> Comments {
+ let leading_detached = location
+ .leading_detached_comments
+ .iter()
+ .map(get_lines)
+ .collect();
+ let leading = location
+ .leading_comments
+ .as_ref()
+ .map_or(Vec::new(), get_lines);
+ let trailing = location
+ .trailing_comments
+ .as_ref()
+ .map_or(Vec::new(), get_lines);
+ Comments {
+ leading_detached,
+ leading,
+ trailing,
+ }
+ }
+
+ /// Appends the comments to a buffer with indentation.
+ ///
+ /// Each level of indentation corresponds to four space (' ') characters.
+ pub fn append_with_indent(&self, indent_level: u8, buf: &mut String) {
+ // Append blocks of detached comments.
+ for detached_block in &self.leading_detached {
+ for line in detached_block {
+ for _ in 0..indent_level {
+ buf.push_str(" ");
+ }
+ buf.push_str("//");
+ buf.push_str(&Self::sanitize_line(line));
+ buf.push('\n');
+ }
+ buf.push('\n');
+ }
+
+ // Append leading comments.
+ for line in &self.leading {
+ for _ in 0..indent_level {
+ buf.push_str(" ");
+ }
+ buf.push_str("///");
+ buf.push_str(&Self::sanitize_line(line));
+ buf.push('\n');
+ }
+
+ // Append an empty comment line if there are leading and trailing comments.
+ if !self.leading.is_empty() && !self.trailing.is_empty() {
+ for _ in 0..indent_level {
+ buf.push_str(" ");
+ }
+ buf.push_str("///\n");
+ }
+
+ // Append trailing comments.
+ for line in &self.trailing {
+ for _ in 0..indent_level {
+ buf.push_str(" ");
+ }
+ buf.push_str("///");
+ buf.push_str(&Self::sanitize_line(line));
+ buf.push('\n');
+ }
+ }
+
+ /// Checks whether a RustDoc line should be indented.
+ ///
+ /// Lines should be indented if:
+ /// - they are non-empty, AND
+ /// - they don't already start with a space
+ /// OR
+ /// - they start with several spaces.
+ ///
+ /// The last condition can happen in the case of multi-line Markdown lists
+ /// such as:
+ ///
+ /// - this is a list
+ /// where some elements spans multiple lines
+ /// - but not all elements
+ fn should_indent(sanitized_line: &str) -> bool {
+ let mut chars = sanitized_line.chars();
+ chars
+ .next()
+ .map_or(false, |c| c != ' ' || chars.next() == Some(' '))
+ }
+
+ /// Sanitizes the line for rustdoc by performing the following operations:
+ /// - escape urls as <http://foo.com>
+ /// - escape `[` & `]` if not already escaped and not followed by a parenthesis or bracket
+ fn sanitize_line(line: &str) -> String {
+ static RULE_URL: Lazy<Regex> = Lazy::new(|| Regex::new(r"https?://[^\s)]+").unwrap());
+ static RULE_BRACKETS: Lazy<Regex> =
+ Lazy::new(|| Regex::new(r"(^|[^\]\\])\[(([^\]]*[^\\])?)\]([^(\[]|$)").unwrap());
+
+ let mut s = RULE_URL.replace_all(line, r"<$0>").to_string();
+ s = RULE_BRACKETS.replace_all(&s, r"$1\[$2\]$4").to_string();
+ if Self::should_indent(&s) {
+ s.insert(0, ' ');
+ }
+ s
+ }
+}
+
+/// A service descriptor.
+#[derive(Debug, Clone)]
+pub struct Service {
+ /// The service name in Rust style.
+ pub name: String,
+ /// The service name as it appears in the .proto file.
+ pub proto_name: String,
+ /// The package name as it appears in the .proto file.
+ pub package: String,
+ /// The service comments.
+ pub comments: Comments,
+ /// The service methods.
+ pub methods: Vec<Method>,
+ /// The service options.
+ pub options: prost_types::ServiceOptions,
+}
+
+/// A service method descriptor.
+#[derive(Debug, Clone)]
+pub struct Method {
+ /// The name of the method in Rust style.
+ pub name: String,
+ /// The name of the method as it appears in the .proto file.
+ pub proto_name: String,
+ /// The method comments.
+ pub comments: Comments,
+ /// The input Rust type.
+ pub input_type: String,
+ /// The output Rust type.
+ pub output_type: String,
+ /// The input Protobuf type.
+ pub input_proto_type: String,
+ /// The output Protobuf type.
+ pub output_proto_type: String,
+ /// The method options.
+ pub options: prost_types::MethodOptions,
+ /// Identifies if client streams multiple client messages.
+ pub client_streaming: bool,
+ /// Identifies if server streams multiple server messages.
+ pub server_streaming: bool,
+}
+
+#[cfg(not(feature = "cleanup-markdown"))]
+fn get_lines<S>(comments: S) -> Vec<String>
+where
+ S: AsRef<str>,
+{
+ comments.as_ref().lines().map(str::to_owned).collect()
+}
+
+#[cfg(feature = "cleanup-markdown")]
+fn get_lines<S>(comments: S) -> Vec<String>
+where
+ S: AsRef<str>,
+{
+ let comments = comments.as_ref();
+ let mut buffer = String::with_capacity(comments.len() + 256);
+ let opts = pulldown_cmark_to_cmark::Options {
+ code_block_token_count: 3,
+ ..Default::default()
+ };
+ match pulldown_cmark_to_cmark::cmark_with_options(
+ Parser::new_ext(comments, Options::all() - Options::ENABLE_SMART_PUNCTUATION).map(
+ |event| {
+ fn map_codeblock(kind: CodeBlockKind) -> CodeBlockKind {
+ match kind {
+ CodeBlockKind::Fenced(s) => {
+ if s.as_ref() == "rust" {
+ CodeBlockKind::Fenced("compile_fail".into())
+ } else {
+ CodeBlockKind::Fenced(format!("text,{}", s).into())
+ }
+ }
+ CodeBlockKind::Indented => CodeBlockKind::Fenced("text".into()),
+ }
+ }
+ match event {
+ Event::Start(Tag::CodeBlock(kind)) => {
+ Event::Start(Tag::CodeBlock(map_codeblock(kind)))
+ }
+ Event::End(Tag::CodeBlock(kind)) => {
+ Event::End(Tag::CodeBlock(map_codeblock(kind)))
+ }
+ e => e,
+ }
+ },
+ ),
+ &mut buffer,
+ opts,
+ ) {
+ Ok(_) => buffer.lines().map(str::to_owned).collect(),
+ Err(_) => comments.lines().map(str::to_owned).collect(),
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_comment_append_with_indent_leaves_prespaced_lines() {
+ struct TestCases {
+ name: &'static str,
+ input: String,
+ expected: String,
+ }
+
+ let tests = vec![
+ TestCases {
+ name: "existing_space",
+ input: " A line with a single leading space.".to_string(),
+ expected: "/// A line with a single leading space.\n".to_string(),
+ },
+ TestCases {
+ name: "non_existing_space",
+ input: "A line without a single leading space.".to_string(),
+ expected: "/// A line without a single leading space.\n".to_string(),
+ },
+ TestCases {
+ name: "empty",
+ input: "".to_string(),
+ expected: "///\n".to_string(),
+ },
+ TestCases {
+ name: "multiple_leading_spaces",
+ input: " a line with several leading spaces, such as in a markdown list"
+ .to_string(),
+ expected: "/// a line with several leading spaces, such as in a markdown list\n"
+ .to_string(),
+ },
+ ];
+ for t in tests {
+ let input = Comments {
+ leading_detached: vec![],
+ leading: vec![],
+ trailing: vec![t.input],
+ };
+
+ let mut actual = "".to_string();
+ input.append_with_indent(0, &mut actual);
+
+ assert_eq!(t.expected, actual, "failed {}", t.name);
+ }
+ }
+
+ #[test]
+ fn test_comment_append_with_indent_sanitizes_comment_doc_url() {
+ struct TestCases {
+ name: &'static str,
+ input: String,
+ expected: String,
+ }
+
+ let tests = vec![
+ TestCases {
+ name: "valid_http",
+ input: "See https://www.rust-lang.org/".to_string(),
+ expected: "/// See <https://www.rust-lang.org/>\n".to_string(),
+ },
+ TestCases {
+ name: "valid_https",
+ input: "See https://www.rust-lang.org/".to_string(),
+ expected: "/// See <https://www.rust-lang.org/>\n".to_string(),
+ },
+ TestCases {
+ name: "valid_https_parenthesis",
+ input: "See (https://www.rust-lang.org/)".to_string(),
+ expected: "/// See (<https://www.rust-lang.org/>)\n".to_string(),
+ },
+ TestCases {
+ name: "invalid",
+ input: "See note://abc".to_string(),
+ expected: "/// See note://abc\n".to_string(),
+ },
+ ];
+ for t in tests {
+ let input = Comments {
+ leading_detached: vec![],
+ leading: vec![],
+ trailing: vec![t.input],
+ };
+
+ let mut actual = "".to_string();
+ input.append_with_indent(0, &mut actual);
+
+ assert_eq!(t.expected, actual, "failed {}", t.name);
+ }
+ }
+
+ #[test]
+ fn test_comment_append_with_indent_sanitizes_square_brackets() {
+ struct TestCases {
+ name: &'static str,
+ input: String,
+ expected: String,
+ }
+
+ let tests = vec![
+ TestCases {
+ name: "valid_brackets",
+ input: "foo [bar] baz".to_string(),
+ expected: "/// foo \\[bar\\] baz\n".to_string(),
+ },
+ TestCases {
+ name: "invalid_start_bracket",
+ input: "foo [= baz".to_string(),
+ expected: "/// foo [= baz\n".to_string(),
+ },
+ TestCases {
+ name: "invalid_end_bracket",
+ input: "foo =] baz".to_string(),
+ expected: "/// foo =] baz\n".to_string(),
+ },
+ TestCases {
+ name: "invalid_bracket_combination",
+ input: "[0, 9)".to_string(),
+ expected: "/// [0, 9)\n".to_string(),
+ },
+ TestCases {
+ name: "valid_brackets_parenthesis",
+ input: "foo [bar](bar) baz".to_string(),
+ expected: "/// foo [bar](bar) baz\n".to_string(),
+ },
+ TestCases {
+ name: "valid_brackets_end",
+ input: "foo [bar]".to_string(),
+ expected: "/// foo \\[bar\\]\n".to_string(),
+ },
+ TestCases {
+ name: "valid_brackets_no_parenthesis",
+ input: "foo [bar]baz".to_string(),
+ expected: "/// foo \\[bar\\]baz\n".to_string(),
+ },
+ TestCases {
+ name: "valid_empty_brackets",
+ input: "foo []".to_string(),
+ expected: "/// foo \\[\\]\n".to_string(),
+ },
+ TestCases {
+ name: "valid_empty_brackets_parenthesis",
+ input: "foo []()".to_string(),
+ expected: "/// foo []()\n".to_string(),
+ },
+ TestCases {
+ name: "valid_brackets_brackets",
+ input: "foo [bar][bar] baz".to_string(),
+ expected: "/// foo [bar][bar] baz\n".to_string(),
+ },
+ TestCases {
+ name: "valid_brackets_brackets_end",
+ input: "foo [bar][baz]".to_string(),
+ expected: "/// foo [bar][baz]\n".to_string(),
+ },
+ TestCases {
+ name: "valid_brackets_brackets_all",
+ input: "[bar][baz]".to_string(),
+ expected: "/// [bar][baz]\n".to_string(),
+ },
+ TestCases {
+ name: "escaped_brackets",
+ input: "\\[bar\\]\\[baz\\]".to_string(),
+ expected: "/// \\[bar\\]\\[baz\\]\n".to_string(),
+ },
+ TestCases {
+ name: "escaped_empty_brackets",
+ input: "\\[\\]\\[\\]".to_string(),
+ expected: "/// \\[\\]\\[\\]\n".to_string(),
+ },
+ ];
+ for t in tests {
+ let input = Comments {
+ leading_detached: vec![],
+ leading: vec![],
+ trailing: vec![t.input],
+ };
+
+ let mut actual = "".to_string();
+ input.append_with_indent(0, &mut actual);
+
+ assert_eq!(t.expected, actual, "failed {}", t.name);
+ }
+ }
+
+ #[test]
+ fn test_codeblocks() {
+ struct TestCase {
+ name: &'static str,
+ input: &'static str,
+ #[allow(unused)]
+ cleanedup_expected: Vec<&'static str>,
+ }
+
+ let tests = vec![
+ TestCase {
+ name: "unlabelled_block",
+ input: " thingy\n",
+ cleanedup_expected: vec!["", "```text", "thingy", "```"],
+ },
+ TestCase {
+ name: "rust_block",
+ input: "```rust\nfoo.bar()\n```\n",
+ cleanedup_expected: vec!["", "```compile_fail", "foo.bar()", "```"],
+ },
+ TestCase {
+ name: "js_block",
+ input: "```javascript\nfoo.bar()\n```\n",
+ cleanedup_expected: vec!["", "```text,javascript", "foo.bar()", "```"],
+ },
+ ];
+
+ for t in tests {
+ let loc = Location {
+ path: vec![],
+ span: vec![],
+ leading_comments: Some(t.input.into()),
+ trailing_comments: None,
+ leading_detached_comments: vec![],
+ };
+ let comments = Comments::from_location(&loc);
+ #[cfg(feature = "cleanup-markdown")]
+ let expected = t.cleanedup_expected;
+ #[cfg(not(feature = "cleanup-markdown"))]
+ let expected: Vec<&str> = t.input.lines().collect();
+ assert_eq!(expected, comments.leading, "failed {}", t.name);
+ }
+ }
+}
diff --git a/vendor/prost-build/src/code_generator.rs b/vendor/prost-build/src/code_generator.rs
new file mode 100644
index 00000000..6ca8581a
--- /dev/null
+++ b/vendor/prost-build/src/code_generator.rs
@@ -0,0 +1,1183 @@
+use std::ascii;
+use std::borrow::Cow;
+use std::collections::{HashMap, HashSet};
+use std::iter;
+
+use itertools::{Either, Itertools};
+use log::debug;
+use multimap::MultiMap;
+use prost_types::field_descriptor_proto::{Label, Type};
+use prost_types::source_code_info::Location;
+use prost_types::{
+ DescriptorProto, EnumDescriptorProto, EnumValueDescriptorProto, FieldDescriptorProto,
+ FieldOptions, FileDescriptorProto, OneofDescriptorProto, ServiceDescriptorProto,
+ SourceCodeInfo,
+};
+
+use crate::ast::{Comments, Method, Service};
+use crate::extern_paths::ExternPaths;
+use crate::ident::{strip_enum_prefix, to_snake, to_upper_camel};
+use crate::message_graph::MessageGraph;
+use crate::Config;
+
+mod c_escaping;
+use c_escaping::unescape_c_escape_string;
+
+mod syntax;
+use syntax::Syntax;
+
+pub struct CodeGenerator<'a> {
+ config: &'a mut Config,
+ package: String,
+ type_path: Vec<String>,
+ source_info: Option<SourceCodeInfo>,
+ syntax: Syntax,
+ message_graph: &'a MessageGraph,
+ extern_paths: &'a ExternPaths,
+ depth: u8,
+ path: Vec<i32>,
+ buf: &'a mut String,
+}
+
+fn push_indent(buf: &mut String, depth: u8) {
+ for _ in 0..depth {
+ buf.push_str(" ");
+ }
+}
+
+fn prost_path(config: &Config) -> &str {
+ config.prost_path.as_deref().unwrap_or("::prost")
+}
+
+struct Field {
+ descriptor: FieldDescriptorProto,
+ path_index: i32,
+}
+
+impl Field {
+ fn new(descriptor: FieldDescriptorProto, path_index: i32) -> Self {
+ Self {
+ descriptor,
+ path_index,
+ }
+ }
+
+ fn rust_name(&self) -> String {
+ to_snake(self.descriptor.name())
+ }
+}
+
+struct OneofField {
+ descriptor: OneofDescriptorProto,
+ fields: Vec<Field>,
+ path_index: i32,
+}
+
+impl OneofField {
+ fn new(descriptor: OneofDescriptorProto, fields: Vec<Field>, path_index: i32) -> Self {
+ Self {
+ descriptor,
+ fields,
+ path_index,
+ }
+ }
+
+ fn rust_name(&self) -> String {
+ to_snake(self.descriptor.name())
+ }
+}
+
+impl<'a> CodeGenerator<'a> {
+ pub fn generate(
+ config: &mut Config,
+ message_graph: &MessageGraph,
+ extern_paths: &ExternPaths,
+ file: FileDescriptorProto,
+ buf: &mut String,
+ ) {
+ let source_info = file.source_code_info.map(|mut s| {
+ s.location.retain(|loc| {
+ let len = loc.path.len();
+ len > 0 && len % 2 == 0
+ });
+ s.location.sort_by(|a, b| a.path.cmp(&b.path));
+ s
+ });
+
+ let mut code_gen = CodeGenerator {
+ config,
+ package: file.package.unwrap_or_default(),
+ type_path: Vec::new(),
+ source_info,
+ syntax: file.syntax.as_deref().into(),
+ message_graph,
+ extern_paths,
+ depth: 0,
+ path: Vec::new(),
+ buf,
+ };
+
+ debug!(
+ "file: {:?}, package: {:?}",
+ file.name.as_ref().unwrap(),
+ code_gen.package
+ );
+
+ code_gen.path.push(4);
+ for (idx, message) in file.message_type.into_iter().enumerate() {
+ code_gen.path.push(idx as i32);
+ code_gen.append_message(message);
+ code_gen.path.pop();
+ }
+ code_gen.path.pop();
+
+ code_gen.path.push(5);
+ for (idx, desc) in file.enum_type.into_iter().enumerate() {
+ code_gen.path.push(idx as i32);
+ code_gen.append_enum(desc);
+ code_gen.path.pop();
+ }
+ code_gen.path.pop();
+
+ if code_gen.config.service_generator.is_some() {
+ code_gen.path.push(6);
+ for (idx, service) in file.service.into_iter().enumerate() {
+ code_gen.path.push(idx as i32);
+ code_gen.push_service(service);
+ code_gen.path.pop();
+ }
+
+ if let Some(service_generator) = code_gen.config.service_generator.as_mut() {
+ service_generator.finalize(code_gen.buf);
+ }
+
+ code_gen.path.pop();
+ }
+ }
+
+ fn append_message(&mut self, message: DescriptorProto) {
+ debug!(" message: {:?}", message.name());
+
+ let message_name = message.name().to_string();
+ let fq_message_name = self.fq_name(&message_name);
+
+ // Skip external types.
+ if self.extern_paths.resolve_ident(&fq_message_name).is_some() {
+ return;
+ }
+
+ // Split the nested message types into a vector of normal nested message types, and a map
+ // of the map field entry types. The path index of the nested message types is preserved so
+ // that comments can be retrieved.
+ type NestedTypes = Vec<(DescriptorProto, usize)>;
+ type MapTypes = HashMap<String, (FieldDescriptorProto, FieldDescriptorProto)>;
+ let (nested_types, map_types): (NestedTypes, MapTypes) = message
+ .nested_type
+ .into_iter()
+ .enumerate()
+ .partition_map(|(idx, nested_type)| {
+ if nested_type
+ .options
+ .as_ref()
+ .and_then(|options| options.map_entry)
+ .unwrap_or(false)
+ {
+ let key = nested_type.field[0].clone();
+ let value = nested_type.field[1].clone();
+ assert_eq!("key", key.name());
+ assert_eq!("value", value.name());
+
+ let name = format!("{}.{}", &fq_message_name, nested_type.name());
+ Either::Right((name, (key, value)))
+ } else {
+ Either::Left((nested_type, idx))
+ }
+ });
+
+ // Split the fields into a vector of the normal fields, and oneof fields.
+ // Path indexes are preserved so that comments can be retrieved.
+ type OneofFieldsByIndex = MultiMap<i32, Field>;
+ let (fields, mut oneof_map): (Vec<Field>, OneofFieldsByIndex) = message
+ .field
+ .into_iter()
+ .enumerate()
+ .partition_map(|(idx, proto)| {
+ let idx = idx as i32;
+ if proto.proto3_optional.unwrap_or(false) {
+ Either::Left(Field::new(proto, idx))
+ } else if let Some(oneof_index) = proto.oneof_index {
+ Either::Right((oneof_index, Field::new(proto, idx)))
+ } else {
+ Either::Left(Field::new(proto, idx))
+ }
+ });
+ // Optional fields create a synthetic oneof that we want to skip
+ let oneof_fields: Vec<OneofField> = message
+ .oneof_decl
+ .into_iter()
+ .enumerate()
+ .filter_map(move |(idx, proto)| {
+ let idx = idx as i32;
+ oneof_map
+ .remove(&idx)
+ .map(|fields| OneofField::new(proto, fields, idx))
+ })
+ .collect();
+
+ self.append_doc(&fq_message_name, None);
+ self.append_type_attributes(&fq_message_name);
+ self.append_message_attributes(&fq_message_name);
+ self.push_indent();
+ self.buf
+ .push_str("#[allow(clippy::derive_partial_eq_without_eq)]\n");
+ self.buf.push_str(&format!(
+ "#[derive(Clone, PartialEq, {}::Message)]\n",
+ prost_path(self.config)
+ ));
+ self.append_skip_debug(&fq_message_name);
+ self.push_indent();
+ self.buf.push_str("pub struct ");
+ self.buf.push_str(&to_upper_camel(&message_name));
+ self.buf.push_str(" {\n");
+
+ self.depth += 1;
+ self.path.push(2);
+ for field in &fields {
+ self.path.push(field.path_index);
+ match field
+ .descriptor
+ .type_name
+ .as_ref()
+ .and_then(|type_name| map_types.get(type_name))
+ {
+ Some((key, value)) => self.append_map_field(&fq_message_name, field, key, value),
+ None => self.append_field(&fq_message_name, field),
+ }
+ self.path.pop();
+ }
+ self.path.pop();
+
+ self.path.push(8);
+ for oneof in &oneof_fields {
+ self.path.push(oneof.path_index);
+ self.append_oneof_field(&message_name, &fq_message_name, oneof);
+ self.path.pop();
+ }
+ self.path.pop();
+
+ self.depth -= 1;
+ self.push_indent();
+ self.buf.push_str("}\n");
+
+ if !message.enum_type.is_empty() || !nested_types.is_empty() || !oneof_fields.is_empty() {
+ self.push_mod(&message_name);
+ self.path.push(3);
+ for (nested_type, idx) in nested_types {
+ self.path.push(idx as i32);
+ self.append_message(nested_type);
+ self.path.pop();
+ }
+ self.path.pop();
+
+ self.path.push(4);
+ for (idx, nested_enum) in message.enum_type.into_iter().enumerate() {
+ self.path.push(idx as i32);
+ self.append_enum(nested_enum);
+ self.path.pop();
+ }
+ self.path.pop();
+
+ for oneof in &oneof_fields {
+ self.append_oneof(&fq_message_name, oneof);
+ }
+
+ self.pop_mod();
+ }
+
+ if self.config.enable_type_names {
+ self.append_type_name(&message_name, &fq_message_name);
+ }
+ }
+
+ fn append_type_name(&mut self, message_name: &str, fq_message_name: &str) {
+ self.buf.push_str(&format!(
+ "impl {}::Name for {} {{\n",
+ self.config.prost_path.as_deref().unwrap_or("::prost"),
+ to_upper_camel(message_name)
+ ));
+ self.depth += 1;
+
+ self.buf.push_str(&format!(
+ "const NAME: &'static str = \"{}\";\n",
+ message_name,
+ ));
+ self.buf.push_str(&format!(
+ "const PACKAGE: &'static str = \"{}\";\n",
+ self.package,
+ ));
+
+ let prost_path = self.config.prost_path.as_deref().unwrap_or("::prost");
+ let string_path = format!("{prost_path}::alloc::string::String");
+
+ let full_name = format!(
+ "{}{}{}{}{message_name}",
+ self.package.trim_matches('.'),
+ if self.package.is_empty() { "" } else { "." },
+ self.type_path.join("."),
+ if self.type_path.is_empty() { "" } else { "." },
+ );
+ let domain_name = self
+ .config
+ .type_name_domains
+ .get_first(fq_message_name)
+ .map_or("", |name| name.as_str());
+
+ self.buf.push_str(&format!(
+ r#"fn full_name() -> {string_path} {{ "{full_name}".into() }}"#,
+ ));
+
+ self.buf.push_str(&format!(
+ r#"fn type_url() -> {string_path} {{ "{domain_name}/{full_name}".into() }}"#,
+ ));
+
+ self.depth -= 1;
+ self.buf.push_str("}\n");
+ }
+
+ fn append_type_attributes(&mut self, fq_message_name: &str) {
+ assert_eq!(b'.', fq_message_name.as_bytes()[0]);
+ for attribute in self.config.type_attributes.get(fq_message_name) {
+ push_indent(self.buf, self.depth);
+ self.buf.push_str(attribute);
+ self.buf.push('\n');
+ }
+ }
+
+ fn append_message_attributes(&mut self, fq_message_name: &str) {
+ assert_eq!(b'.', fq_message_name.as_bytes()[0]);
+ for attribute in self.config.message_attributes.get(fq_message_name) {
+ push_indent(self.buf, self.depth);
+ self.buf.push_str(attribute);
+ self.buf.push('\n');
+ }
+ }
+
+ fn should_skip_debug(&self, fq_message_name: &str) -> bool {
+ assert_eq!(b'.', fq_message_name.as_bytes()[0]);
+ self.config.skip_debug.get(fq_message_name).next().is_some()
+ }
+
+ fn append_skip_debug(&mut self, fq_message_name: &str) {
+ if self.should_skip_debug(fq_message_name) {
+ push_indent(self.buf, self.depth);
+ self.buf.push_str("#[prost(skip_debug)]");
+ self.buf.push('\n');
+ }
+ }
+
+ fn append_enum_attributes(&mut self, fq_message_name: &str) {
+ assert_eq!(b'.', fq_message_name.as_bytes()[0]);
+ for attribute in self.config.enum_attributes.get(fq_message_name) {
+ push_indent(self.buf, self.depth);
+ self.buf.push_str(attribute);
+ self.buf.push('\n');
+ }
+ }
+
+ fn append_field_attributes(&mut self, fq_message_name: &str, field_name: &str) {
+ assert_eq!(b'.', fq_message_name.as_bytes()[0]);
+ for attribute in self
+ .config
+ .field_attributes
+ .get_field(fq_message_name, field_name)
+ {
+ push_indent(self.buf, self.depth);
+ self.buf.push_str(attribute);
+ self.buf.push('\n');
+ }
+ }
+
+ fn append_field(&mut self, fq_message_name: &str, field: &Field) {
+ let type_ = field.descriptor.r#type();
+ let repeated = field.descriptor.label == Some(Label::Repeated as i32);
+ let deprecated = self.deprecated(&field.descriptor);
+ let optional = self.optional(&field.descriptor);
+ let boxed = self.boxed(&field.descriptor, fq_message_name, None);
+ let ty = self.resolve_type(&field.descriptor, fq_message_name);
+
+ debug!(
+ " field: {:?}, type: {:?}, boxed: {}",
+ field.descriptor.name(),
+ ty,
+ boxed
+ );
+
+ self.append_doc(fq_message_name, Some(field.descriptor.name()));
+
+ if deprecated {
+ self.push_indent();
+ self.buf.push_str("#[deprecated]\n");
+ }
+
+ self.push_indent();
+ self.buf.push_str("#[prost(");
+ let type_tag = self.field_type_tag(&field.descriptor);
+ self.buf.push_str(&type_tag);
+
+ if type_ == Type::Bytes {
+ let bytes_type = self
+ .config
+ .bytes_type
+ .get_first_field(fq_message_name, field.descriptor.name())
+ .copied()
+ .unwrap_or_default();
+ self.buf
+ .push_str(&format!("={:?}", bytes_type.annotation()));
+ }
+
+ match field.descriptor.label() {
+ Label::Optional => {
+ if optional {
+ self.buf.push_str(", optional");
+ }
+ }
+ Label::Required => self.buf.push_str(", required"),
+ Label::Repeated => {
+ self.buf.push_str(", repeated");
+ if can_pack(&field.descriptor)
+ && !field
+ .descriptor
+ .options
+ .as_ref()
+ .map_or(self.syntax == Syntax::Proto3, |options| options.packed())
+ {
+ self.buf.push_str(", packed=\"false\"");
+ }
+ }
+ }
+
+ if boxed {
+ self.buf.push_str(", boxed");
+ }
+ self.buf.push_str(", tag=\"");
+ self.buf.push_str(&field.descriptor.number().to_string());
+
+ if let Some(ref default) = field.descriptor.default_value {
+ self.buf.push_str("\", default=\"");
+ if type_ == Type::Bytes {
+ self.buf.push_str("b\\\"");
+ for b in unescape_c_escape_string(default) {
+ self.buf.extend(
+ ascii::escape_default(b).flat_map(|c| (c as char).escape_default()),
+ );
+ }
+ self.buf.push_str("\\\"");
+ } else if type_ == Type::Enum {
+ let mut enum_value = to_upper_camel(default);
+ if self.config.strip_enum_prefix {
+ // Field types are fully qualified, so we extract
+ // the last segment and strip it from the left
+ // side of the default value.
+ let enum_type = field
+ .descriptor
+ .type_name
+ .as_ref()
+ .and_then(|ty| ty.split('.').last())
+ .unwrap();
+
+ enum_value = strip_enum_prefix(&to_upper_camel(enum_type), &enum_value)
+ }
+ self.buf.push_str(&enum_value);
+ } else {
+ self.buf.push_str(&default.escape_default().to_string());
+ }
+ }
+
+ self.buf.push_str("\")]\n");
+ self.append_field_attributes(fq_message_name, field.descriptor.name());
+ self.push_indent();
+ self.buf.push_str("pub ");
+ self.buf.push_str(&field.rust_name());
+ self.buf.push_str(": ");
+
+ let prost_path = prost_path(self.config);
+
+ if repeated {
+ self.buf
+ .push_str(&format!("{}::alloc::vec::Vec<", prost_path));
+ } else if optional {
+ self.buf.push_str("::core::option::Option<");
+ }
+ if boxed {
+ self.buf
+ .push_str(&format!("{}::alloc::boxed::Box<", prost_path));
+ }
+ self.buf.push_str(&ty);
+ if boxed {
+ self.buf.push('>');
+ }
+ if repeated || optional {
+ self.buf.push('>');
+ }
+ self.buf.push_str(",\n");
+ }
+
+ fn append_map_field(
+ &mut self,
+ fq_message_name: &str,
+ field: &Field,
+ key: &FieldDescriptorProto,
+ value: &FieldDescriptorProto,
+ ) {
+ let key_ty = self.resolve_type(key, fq_message_name);
+ let value_ty = self.resolve_type(value, fq_message_name);
+
+ debug!(
+ " map field: {:?}, key type: {:?}, value type: {:?}",
+ field.descriptor.name(),
+ key_ty,
+ value_ty
+ );
+
+ self.append_doc(fq_message_name, Some(field.descriptor.name()));
+ self.push_indent();
+
+ let map_type = self
+ .config
+ .map_type
+ .get_first_field(fq_message_name, field.descriptor.name())
+ .copied()
+ .unwrap_or_default();
+ let key_tag = self.field_type_tag(key);
+ let value_tag = self.map_value_type_tag(value);
+
+ self.buf.push_str(&format!(
+ "#[prost({}=\"{}, {}\", tag=\"{}\")]\n",
+ map_type.annotation(),
+ key_tag,
+ value_tag,
+ field.descriptor.number()
+ ));
+ self.append_field_attributes(fq_message_name, field.descriptor.name());
+ self.push_indent();
+ self.buf.push_str(&format!(
+ "pub {}: {}<{}, {}>,\n",
+ field.rust_name(),
+ map_type.rust_type(),
+ key_ty,
+ value_ty
+ ));
+ }
+
+ fn append_oneof_field(
+ &mut self,
+ message_name: &str,
+ fq_message_name: &str,
+ oneof: &OneofField,
+ ) {
+ let type_name = format!(
+ "{}::{}",
+ to_snake(message_name),
+ to_upper_camel(oneof.descriptor.name())
+ );
+ self.append_doc(fq_message_name, None);
+ self.push_indent();
+ self.buf.push_str(&format!(
+ "#[prost(oneof=\"{}\", tags=\"{}\")]\n",
+ type_name,
+ oneof
+ .fields
+ .iter()
+ .map(|field| field.descriptor.number())
+ .join(", "),
+ ));
+ self.append_field_attributes(fq_message_name, oneof.descriptor.name());
+ self.push_indent();
+ self.buf.push_str(&format!(
+ "pub {}: ::core::option::Option<{}>,\n",
+ oneof.rust_name(),
+ type_name
+ ));
+ }
+
+ fn append_oneof(&mut self, fq_message_name: &str, oneof: &OneofField) {
+ self.path.push(8);
+ self.path.push(oneof.path_index);
+ self.append_doc(fq_message_name, None);
+ self.path.pop();
+ self.path.pop();
+
+ let oneof_name = format!("{}.{}", fq_message_name, oneof.descriptor.name());
+ self.append_type_attributes(&oneof_name);
+ self.append_enum_attributes(&oneof_name);
+ self.push_indent();
+ self.buf
+ .push_str("#[allow(clippy::derive_partial_eq_without_eq)]\n");
+ self.buf.push_str(&format!(
+ "#[derive(Clone, PartialEq, {}::Oneof)]\n",
+ prost_path(self.config)
+ ));
+ self.append_skip_debug(fq_message_name);
+ self.push_indent();
+ self.buf.push_str("pub enum ");
+ self.buf.push_str(&to_upper_camel(oneof.descriptor.name()));
+ self.buf.push_str(" {\n");
+
+ self.path.push(2);
+ self.depth += 1;
+ for field in &oneof.fields {
+ self.path.push(field.path_index);
+ self.append_doc(fq_message_name, Some(field.descriptor.name()));
+ self.path.pop();
+
+ self.push_indent();
+ let ty_tag = self.field_type_tag(&field.descriptor);
+ self.buf.push_str(&format!(
+ "#[prost({}, tag=\"{}\")]\n",
+ ty_tag,
+ field.descriptor.number()
+ ));
+ self.append_field_attributes(&oneof_name, field.descriptor.name());
+
+ self.push_indent();
+ let ty = self.resolve_type(&field.descriptor, fq_message_name);
+
+ let boxed = self.boxed(
+ &field.descriptor,
+ fq_message_name,
+ Some(oneof.descriptor.name()),
+ );
+
+ debug!(
+ " oneof: {:?}, type: {:?}, boxed: {}",
+ field.descriptor.name(),
+ ty,
+ boxed
+ );
+
+ if boxed {
+ self.buf.push_str(&format!(
+ "{}(::prost::alloc::boxed::Box<{}>),\n",
+ to_upper_camel(field.descriptor.name()),
+ ty
+ ));
+ } else {
+ self.buf.push_str(&format!(
+ "{}({}),\n",
+ to_upper_camel(field.descriptor.name()),
+ ty
+ ));
+ }
+ }
+ self.depth -= 1;
+ self.path.pop();
+
+ self.push_indent();
+ self.buf.push_str("}\n");
+ }
+
+ fn location(&self) -> Option<&Location> {
+ let source_info = self.source_info.as_ref()?;
+ let idx = source_info
+ .location
+ .binary_search_by_key(&&self.path[..], |location| &location.path[..])
+ .unwrap();
+ Some(&source_info.location[idx])
+ }
+
+ fn append_doc(&mut self, fq_name: &str, field_name: Option<&str>) {
+ let append_doc = if let Some(field_name) = field_name {
+ self.config
+ .disable_comments
+ .get_first_field(fq_name, field_name)
+ .is_none()
+ } else {
+ self.config.disable_comments.get(fq_name).next().is_none()
+ };
+ if append_doc {
+ if let Some(comments) = self.location().map(Comments::from_location) {
+ comments.append_with_indent(self.depth, self.buf);
+ }
+ }
+ }
+
+ fn append_enum(&mut self, desc: EnumDescriptorProto) {
+ debug!(" enum: {:?}", desc.name());
+
+ let proto_enum_name = desc.name();
+ let enum_name = to_upper_camel(proto_enum_name);
+
+ let enum_values = &desc.value;
+ let fq_proto_enum_name = self.fq_name(proto_enum_name);
+
+ if self
+ .extern_paths
+ .resolve_ident(&fq_proto_enum_name)
+ .is_some()
+ {
+ return;
+ }
+
+ self.append_doc(&fq_proto_enum_name, None);
+ self.append_type_attributes(&fq_proto_enum_name);
+ self.append_enum_attributes(&fq_proto_enum_name);
+ self.push_indent();
+ let dbg = if self.should_skip_debug(&fq_proto_enum_name) {
+ ""
+ } else {
+ "Debug, "
+ };
+ self.buf.push_str(&format!(
+ "#[derive(Clone, Copy, {}PartialEq, Eq, Hash, PartialOrd, Ord, {}::Enumeration)]\n",
+ dbg,
+ prost_path(self.config),
+ ));
+ self.push_indent();
+ self.buf.push_str("#[repr(i32)]\n");
+ self.push_indent();
+ self.buf.push_str("pub enum ");
+ self.buf.push_str(&enum_name);
+ self.buf.push_str(" {\n");
+
+ let variant_mappings =
+ build_enum_value_mappings(&enum_name, self.config.strip_enum_prefix, enum_values);
+
+ self.depth += 1;
+ self.path.push(2);
+ for variant in variant_mappings.iter() {
+ self.path.push(variant.path_idx as i32);
+
+ self.append_doc(&fq_proto_enum_name, Some(variant.proto_name));
+ self.append_field_attributes(&fq_proto_enum_name, variant.proto_name);
+ self.push_indent();
+ self.buf.push_str(&variant.generated_variant_name);
+ self.buf.push_str(" = ");
+ self.buf.push_str(&variant.proto_number.to_string());
+ self.buf.push_str(",\n");
+
+ self.path.pop();
+ }
+
+ self.path.pop();
+ self.depth -= 1;
+
+ self.push_indent();
+ self.buf.push_str("}\n");
+
+ self.push_indent();
+ self.buf.push_str("impl ");
+ self.buf.push_str(&enum_name);
+ self.buf.push_str(" {\n");
+ self.depth += 1;
+ self.path.push(2);
+
+ self.push_indent();
+ self.buf.push_str(
+ "/// String value of the enum field names used in the ProtoBuf definition.\n",
+ );
+ self.push_indent();
+ self.buf.push_str("///\n");
+ self.push_indent();
+ self.buf.push_str(
+ "/// The values are not transformed in any way and thus are considered stable\n",
+ );
+ self.push_indent();
+ self.buf.push_str(
+ "/// (if the ProtoBuf definition does not change) and safe for programmatic use.\n",
+ );
+ self.push_indent();
+ self.buf
+ .push_str("pub fn as_str_name(&self) -> &'static str {\n");
+ self.depth += 1;
+
+ self.push_indent();
+ self.buf.push_str("match self {\n");
+ self.depth += 1;
+
+ for variant in variant_mappings.iter() {
+ self.push_indent();
+ self.buf.push_str(&enum_name);
+ self.buf.push_str("::");
+ self.buf.push_str(&variant.generated_variant_name);
+ self.buf.push_str(" => \"");
+ self.buf.push_str(variant.proto_name);
+ self.buf.push_str("\",\n");
+ }
+
+ self.depth -= 1;
+ self.push_indent();
+ self.buf.push_str("}\n"); // End of match
+
+ self.depth -= 1;
+ self.push_indent();
+ self.buf.push_str("}\n"); // End of as_str_name()
+
+ self.push_indent();
+ self.buf
+ .push_str("/// Creates an enum from field names used in the ProtoBuf definition.\n");
+
+ self.push_indent();
+ self.buf
+ .push_str("pub fn from_str_name(value: &str) -> ::core::option::Option<Self> {\n");
+ self.depth += 1;
+
+ self.push_indent();
+ self.buf.push_str("match value {\n");
+ self.depth += 1;
+
+ for variant in variant_mappings.iter() {
+ self.push_indent();
+ self.buf.push('\"');
+ self.buf.push_str(variant.proto_name);
+ self.buf.push_str("\" => Some(Self::");
+ self.buf.push_str(&variant.generated_variant_name);
+ self.buf.push_str("),\n");
+ }
+ self.push_indent();
+ self.buf.push_str("_ => None,\n");
+
+ self.depth -= 1;
+ self.push_indent();
+ self.buf.push_str("}\n"); // End of match
+
+ self.depth -= 1;
+ self.push_indent();
+ self.buf.push_str("}\n"); // End of from_str_name()
+
+ self.path.pop();
+ self.depth -= 1;
+ self.push_indent();
+ self.buf.push_str("}\n"); // End of impl
+ }
+
+ fn push_service(&mut self, service: ServiceDescriptorProto) {
+ let name = service.name().to_owned();
+ debug!(" service: {:?}", name);
+
+ let comments = self
+ .location()
+ .map(Comments::from_location)
+ .unwrap_or_default();
+
+ self.path.push(2);
+ let methods = service
+ .method
+ .into_iter()
+ .enumerate()
+ .map(|(idx, mut method)| {
+ debug!(" method: {:?}", method.name());
+
+ self.path.push(idx as i32);
+ let comments = self
+ .location()
+ .map(Comments::from_location)
+ .unwrap_or_default();
+ self.path.pop();
+
+ let name = method.name.take().unwrap();
+ let input_proto_type = method.input_type.take().unwrap();
+ let output_proto_type = method.output_type.take().unwrap();
+ let input_type = self.resolve_ident(&input_proto_type);
+ let output_type = self.resolve_ident(&output_proto_type);
+ let client_streaming = method.client_streaming();
+ let server_streaming = method.server_streaming();
+
+ Method {
+ name: to_snake(&name),
+ proto_name: name,
+ comments,
+ input_type,
+ output_type,
+ input_proto_type,
+ output_proto_type,
+ options: method.options.unwrap_or_default(),
+ client_streaming,
+ server_streaming,
+ }
+ })
+ .collect();
+ self.path.pop();
+
+ let service = Service {
+ name: to_upper_camel(&name),
+ proto_name: name,
+ package: self.package.clone(),
+ comments,
+ methods,
+ options: service.options.unwrap_or_default(),
+ };
+
+ if let Some(service_generator) = self.config.service_generator.as_mut() {
+ service_generator.generate(service, self.buf)
+ }
+ }
+
+ fn push_indent(&mut self) {
+ push_indent(self.buf, self.depth);
+ }
+
+ fn push_mod(&mut self, module: &str) {
+ self.push_indent();
+ self.buf.push_str("/// Nested message and enum types in `");
+ self.buf.push_str(module);
+ self.buf.push_str("`.\n");
+
+ self.push_indent();
+ self.buf.push_str("pub mod ");
+ self.buf.push_str(&to_snake(module));
+ self.buf.push_str(" {\n");
+
+ self.type_path.push(module.into());
+
+ self.depth += 1;
+ }
+
+ fn pop_mod(&mut self) {
+ self.depth -= 1;
+
+ self.type_path.pop();
+
+ self.push_indent();
+ self.buf.push_str("}\n");
+ }
+
+ fn resolve_type(&self, field: &FieldDescriptorProto, fq_message_name: &str) -> String {
+ match field.r#type() {
+ Type::Float => String::from("f32"),
+ Type::Double => String::from("f64"),
+ Type::Uint32 | Type::Fixed32 => String::from("u32"),
+ Type::Uint64 | Type::Fixed64 => String::from("u64"),
+ Type::Int32 | Type::Sfixed32 | Type::Sint32 | Type::Enum => String::from("i32"),
+ Type::Int64 | Type::Sfixed64 | Type::Sint64 => String::from("i64"),
+ Type::Bool => String::from("bool"),
+ Type::String => format!("{}::alloc::string::String", prost_path(self.config)),
+ Type::Bytes => self
+ .config
+ .bytes_type
+ .get_first_field(fq_message_name, field.name())
+ .copied()
+ .unwrap_or_default()
+ .rust_type()
+ .to_owned(),
+ Type::Group | Type::Message => self.resolve_ident(field.type_name()),
+ }
+ }
+
+ fn resolve_ident(&self, pb_ident: &str) -> String {
+ // protoc should always give fully qualified identifiers.
+ assert_eq!(".", &pb_ident[..1]);
+
+ if let Some(proto_ident) = self.extern_paths.resolve_ident(pb_ident) {
+ return proto_ident;
+ }
+
+ let mut local_path = self
+ .package
+ .split('.')
+ .chain(self.type_path.iter().map(String::as_str))
+ .peekable();
+
+ // If no package is specified the start of the package name will be '.'
+ // and split will return an empty string ("") which breaks resolution
+ // The fix to this is to ignore the first item if it is empty.
+ if local_path.peek().map_or(false, |s| s.is_empty()) {
+ local_path.next();
+ }
+
+ let mut ident_path = pb_ident[1..].split('.');
+ let ident_type = ident_path.next_back().unwrap();
+ let mut ident_path = ident_path.peekable();
+
+ // Skip path elements in common.
+ while local_path.peek().is_some() && local_path.peek() == ident_path.peek() {
+ local_path.next();
+ ident_path.next();
+ }
+
+ local_path
+ .map(|_| "super".to_string())
+ .chain(ident_path.map(to_snake))
+ .chain(iter::once(to_upper_camel(ident_type)))
+ .join("::")
+ }
+
+ fn field_type_tag(&self, field: &FieldDescriptorProto) -> Cow<'static, str> {
+ match field.r#type() {
+ Type::Float => Cow::Borrowed("float"),
+ Type::Double => Cow::Borrowed("double"),
+ Type::Int32 => Cow::Borrowed("int32"),
+ Type::Int64 => Cow::Borrowed("int64"),
+ Type::Uint32 => Cow::Borrowed("uint32"),
+ Type::Uint64 => Cow::Borrowed("uint64"),
+ Type::Sint32 => Cow::Borrowed("sint32"),
+ Type::Sint64 => Cow::Borrowed("sint64"),
+ Type::Fixed32 => Cow::Borrowed("fixed32"),
+ Type::Fixed64 => Cow::Borrowed("fixed64"),
+ Type::Sfixed32 => Cow::Borrowed("sfixed32"),
+ Type::Sfixed64 => Cow::Borrowed("sfixed64"),
+ Type::Bool => Cow::Borrowed("bool"),
+ Type::String => Cow::Borrowed("string"),
+ Type::Bytes => Cow::Borrowed("bytes"),
+ Type::Group => Cow::Borrowed("group"),
+ Type::Message => Cow::Borrowed("message"),
+ Type::Enum => Cow::Owned(format!(
+ "enumeration={:?}",
+ self.resolve_ident(field.type_name())
+ )),
+ }
+ }
+
+ fn map_value_type_tag(&self, field: &FieldDescriptorProto) -> Cow<'static, str> {
+ match field.r#type() {
+ Type::Enum => Cow::Owned(format!(
+ "enumeration({})",
+ self.resolve_ident(field.type_name())
+ )),
+ _ => self.field_type_tag(field),
+ }
+ }
+
+ fn optional(&self, field: &FieldDescriptorProto) -> bool {
+ if field.proto3_optional.unwrap_or(false) {
+ return true;
+ }
+
+ if field.label() != Label::Optional {
+ return false;
+ }
+
+ match field.r#type() {
+ Type::Message => true,
+ _ => self.syntax == Syntax::Proto2,
+ }
+ }
+
+ /// Returns whether the Rust type for this field needs to be `Box<_>`.
+ ///
+ /// This can be explicitly configured with `Config::boxed`, or necessary
+ /// to prevent an infinitely sized type definition in case when the type of
+ /// a non-repeated message field transitively contains the message itself.
+ fn boxed(
+ &self,
+ field: &FieldDescriptorProto,
+ fq_message_name: &str,
+ oneof: Option<&str>,
+ ) -> bool {
+ let repeated = field.label == Some(Label::Repeated as i32);
+ let fd_type = field.r#type();
+ if !repeated
+ && (fd_type == Type::Message || fd_type == Type::Group)
+ && self
+ .message_graph
+ .is_nested(field.type_name(), fq_message_name)
+ {
+ return true;
+ }
+ let config_path = match oneof {
+ None => Cow::Borrowed(fq_message_name),
+ Some(ooname) => Cow::Owned(format!("{fq_message_name}.{ooname}")),
+ };
+ if self
+ .config
+ .boxed
+ .get_first_field(&config_path, field.name())
+ .is_some()
+ {
+ if repeated {
+ println!(
+ "cargo:warning=\
+ Field X is repeated and manually marked as boxed. \
+ This is deprecated and support will be removed in a later release"
+ );
+ }
+ return true;
+ }
+ false
+ }
+
+ /// Returns `true` if the field options includes the `deprecated` option.
+ fn deprecated(&self, field: &FieldDescriptorProto) -> bool {
+ field
+ .options
+ .as_ref()
+ .map_or(false, FieldOptions::deprecated)
+ }
+
+ /// Returns the fully-qualified name, starting with a dot
+ fn fq_name(&self, message_name: &str) -> String {
+ format!(
+ "{}{}{}{}.{}",
+ if self.package.is_empty() { "" } else { "." },
+ self.package.trim_matches('.'),
+ if self.type_path.is_empty() { "" } else { "." },
+ self.type_path.join("."),
+ message_name,
+ )
+ }
+}
+
+/// Returns `true` if the repeated field type can be packed.
+fn can_pack(field: &FieldDescriptorProto) -> bool {
+ matches!(
+ field.r#type(),
+ Type::Float
+ | Type::Double
+ | Type::Int32
+ | Type::Int64
+ | Type::Uint32
+ | Type::Uint64
+ | Type::Sint32
+ | Type::Sint64
+ | Type::Fixed32
+ | Type::Fixed64
+ | Type::Sfixed32
+ | Type::Sfixed64
+ | Type::Bool
+ | Type::Enum
+ )
+}
+
+struct EnumVariantMapping<'a> {
+ path_idx: usize,
+ proto_name: &'a str,
+ proto_number: i32,
+ generated_variant_name: String,
+}
+
+fn build_enum_value_mappings<'a>(
+ generated_enum_name: &str,
+ do_strip_enum_prefix: bool,
+ enum_values: &'a [EnumValueDescriptorProto],
+) -> Vec<EnumVariantMapping<'a>> {
+ let mut numbers = HashSet::new();
+ let mut generated_names = HashMap::new();
+ let mut mappings = Vec::new();
+
+ for (idx, value) in enum_values.iter().enumerate() {
+ // Skip duplicate enum values. Protobuf allows this when the
+ // 'allow_alias' option is set.
+ if !numbers.insert(value.number()) {
+ continue;
+ }
+
+ let mut generated_variant_name = to_upper_camel(value.name());
+ if do_strip_enum_prefix {
+ generated_variant_name =
+ strip_enum_prefix(generated_enum_name, &generated_variant_name);
+ }
+
+ if let Some(old_v) = generated_names.insert(generated_variant_name.to_owned(), value.name())
+ {
+ panic!("Generated enum variant names overlap: `{}` variant name to be used both by `{}` and `{}` ProtoBuf enum values",
+ generated_variant_name, old_v, value.name());
+ }
+
+ mappings.push(EnumVariantMapping {
+ path_idx: idx,
+ proto_name: value.name(),
+ proto_number: value.number(),
+ generated_variant_name,
+ })
+ }
+ mappings
+}
diff --git a/vendor/prost-build/src/code_generator/c_escaping.rs b/vendor/prost-build/src/code_generator/c_escaping.rs
new file mode 100644
index 00000000..58b2ede4
--- /dev/null
+++ b/vendor/prost-build/src/code_generator/c_escaping.rs
@@ -0,0 +1,139 @@
+use log::debug;
+
+/// Based on [`google::protobuf::UnescapeCEscapeString`][1]
+/// [1]: https://github.com/google/protobuf/blob/3.3.x/src/google/protobuf/stubs/strutil.cc#L312-L322
+pub(super) fn unescape_c_escape_string(s: &str) -> Vec<u8> {
+ let src = s.as_bytes();
+ let len = src.len();
+ let mut dst = Vec::new();
+
+ let mut p = 0;
+
+ while p < len {
+ if src[p] != b'\\' {
+ dst.push(src[p]);
+ p += 1;
+ } else {
+ p += 1;
+ if p == len {
+ panic!(
+ "invalid c-escaped default binary value ({}): ends with '\'",
+ s
+ )
+ }
+ match src[p] {
+ b'a' => {
+ dst.push(0x07);
+ p += 1;
+ }
+ b'b' => {
+ dst.push(0x08);
+ p += 1;
+ }
+ b'f' => {
+ dst.push(0x0C);
+ p += 1;
+ }
+ b'n' => {
+ dst.push(0x0A);
+ p += 1;
+ }
+ b'r' => {
+ dst.push(0x0D);
+ p += 1;
+ }
+ b't' => {
+ dst.push(0x09);
+ p += 1;
+ }
+ b'v' => {
+ dst.push(0x0B);
+ p += 1;
+ }
+ b'\\' => {
+ dst.push(0x5C);
+ p += 1;
+ }
+ b'?' => {
+ dst.push(0x3F);
+ p += 1;
+ }
+ b'\'' => {
+ dst.push(0x27);
+ p += 1;
+ }
+ b'"' => {
+ dst.push(0x22);
+ p += 1;
+ }
+ b'0'..=b'7' => {
+ debug!("another octal: {}, offset: {}", s, &s[p..]);
+ let mut octal = 0;
+ for _ in 0..3 {
+ if p < len && src[p] >= b'0' && src[p] <= b'7' {
+ debug!("\toctal: {}", octal);
+ octal = octal * 8 + (src[p] - b'0');
+ p += 1;
+ } else {
+ break;
+ }
+ }
+ dst.push(octal);
+ }
+ b'x' | b'X' => {
+ if p + 3 > len {
+ panic!(
+ "invalid c-escaped default binary value ({}): incomplete hex value",
+ s
+ )
+ }
+ match u8::from_str_radix(&s[p + 1..p + 3], 16) {
+ Ok(b) => dst.push(b),
+ _ => panic!(
+ "invalid c-escaped default binary value ({}): invalid hex value",
+ &s[p..p + 2]
+ ),
+ }
+ p += 3;
+ }
+ _ => panic!(
+ "invalid c-escaped default binary value ({}): invalid escape",
+ s
+ ),
+ }
+ }
+ }
+ dst
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_unescape_c_escape_string() {
+ assert_eq!(
+ &b"hello world"[..],
+ &unescape_c_escape_string("hello world")[..]
+ );
+
+ assert_eq!(&b"\0"[..], &unescape_c_escape_string(r#"\0"#)[..]);
+
+ assert_eq!(
+ &[0o012, 0o156],
+ &unescape_c_escape_string(r#"\012\156"#)[..]
+ );
+ assert_eq!(&[0x01, 0x02], &unescape_c_escape_string(r#"\x01\x02"#)[..]);
+
+ assert_eq!(
+ &b"\0\x01\x07\x08\x0C\n\r\t\x0B\\\'\"\xFE"[..],
+ &unescape_c_escape_string(r#"\0\001\a\b\f\n\r\t\v\\\'\"\xfe"#)[..]
+ );
+ }
+
+ #[test]
+ #[should_panic(expected = "incomplete hex value")]
+ fn test_unescape_c_escape_string_incomplete_hex_value() {
+ unescape_c_escape_string(r#"\x1"#);
+ }
+}
diff --git a/vendor/prost-build/src/code_generator/syntax.rs b/vendor/prost-build/src/code_generator/syntax.rs
new file mode 100644
index 00000000..4fbe9919
--- /dev/null
+++ b/vendor/prost-build/src/code_generator/syntax.rs
@@ -0,0 +1,14 @@
+#[derive(PartialEq)]
+pub(super) enum Syntax {
+ Proto2,
+ Proto3,
+}
+impl From<Option<&str>> for Syntax {
+ fn from(optional_str: Option<&str>) -> Self {
+ match optional_str {
+ None | Some("proto2") => Syntax::Proto2,
+ Some("proto3") => Syntax::Proto3,
+ Some(s) => panic!("unknown syntax: {}", s),
+ }
+ }
+}
diff --git a/vendor/prost-build/src/collections.rs b/vendor/prost-build/src/collections.rs
new file mode 100644
index 00000000..63be4d62
--- /dev/null
+++ b/vendor/prost-build/src/collections.rs
@@ -0,0 +1,57 @@
+/// The map collection type to output for Protobuf `map` fields.
+#[non_exhaustive]
+#[derive(Default, Clone, Copy, Debug, PartialEq)]
+pub(crate) enum MapType {
+ /// The [`std::collections::HashMap`] type.
+ #[default]
+ HashMap,
+ /// The [`std::collections::BTreeMap`] type.
+ BTreeMap,
+}
+
+/// The bytes collection type to output for Protobuf `bytes` fields.
+#[non_exhaustive]
+#[derive(Default, Clone, Copy, Debug, PartialEq)]
+pub(crate) enum BytesType {
+ /// The [`alloc::collections::Vec::<u8>`] type.
+ #[default]
+ Vec,
+ /// The [`bytes::Bytes`] type.
+ Bytes,
+}
+
+impl MapType {
+ /// The `prost-derive` annotation type corresponding to the map type.
+ pub fn annotation(&self) -> &'static str {
+ match self {
+ MapType::HashMap => "map",
+ MapType::BTreeMap => "btree_map",
+ }
+ }
+
+ /// The fully-qualified Rust type corresponding to the map type.
+ pub fn rust_type(&self) -> &'static str {
+ match self {
+ MapType::HashMap => "::std::collections::HashMap",
+ MapType::BTreeMap => "::prost::alloc::collections::BTreeMap",
+ }
+ }
+}
+
+impl BytesType {
+ /// The `prost-derive` annotation type corresponding to the bytes type.
+ pub fn annotation(&self) -> &'static str {
+ match self {
+ BytesType::Vec => "vec",
+ BytesType::Bytes => "bytes",
+ }
+ }
+
+ /// The fully-qualified Rust type corresponding to the bytes type.
+ pub fn rust_type(&self) -> &'static str {
+ match self {
+ BytesType::Vec => "::prost::alloc::vec::Vec<u8>",
+ BytesType::Bytes => "::prost::bytes::Bytes",
+ }
+ }
+}
diff --git a/vendor/prost-build/src/config.rs b/vendor/prost-build/src/config.rs
new file mode 100644
index 00000000..a696e404
--- /dev/null
+++ b/vendor/prost-build/src/config.rs
@@ -0,0 +1,1174 @@
+use std::collections::HashMap;
+use std::default;
+use std::env;
+use std::ffi::{OsStr, OsString};
+use std::fmt;
+use std::fs;
+use std::io::{Error, ErrorKind, Result, Write};
+use std::path::{Path, PathBuf};
+use std::process::Command;
+
+use log::debug;
+use log::trace;
+
+use prost::Message;
+use prost_types::{FileDescriptorProto, FileDescriptorSet};
+
+use crate::code_generator::CodeGenerator;
+use crate::extern_paths::ExternPaths;
+use crate::message_graph::MessageGraph;
+use crate::path::PathMap;
+use crate::BytesType;
+use crate::MapType;
+use crate::Module;
+use crate::ServiceGenerator;
+
+/// Configuration options for Protobuf code generation.
+///
+/// This configuration builder can be used to set non-default code generation options.
+pub struct Config {
+ pub(crate) file_descriptor_set_path: Option<PathBuf>,
+ pub(crate) service_generator: Option<Box<dyn ServiceGenerator>>,
+ pub(crate) map_type: PathMap<MapType>,
+ pub(crate) bytes_type: PathMap<BytesType>,
+ pub(crate) type_attributes: PathMap<String>,
+ pub(crate) message_attributes: PathMap<String>,
+ pub(crate) enum_attributes: PathMap<String>,
+ pub(crate) field_attributes: PathMap<String>,
+ pub(crate) boxed: PathMap<()>,
+ pub(crate) prost_types: bool,
+ pub(crate) strip_enum_prefix: bool,
+ pub(crate) out_dir: Option<PathBuf>,
+ pub(crate) extern_paths: Vec<(String, String)>,
+ pub(crate) default_package_filename: String,
+ pub(crate) enable_type_names: bool,
+ pub(crate) type_name_domains: PathMap<String>,
+ pub(crate) protoc_args: Vec<OsString>,
+ pub(crate) disable_comments: PathMap<()>,
+ pub(crate) skip_debug: PathMap<()>,
+ pub(crate) skip_protoc_run: bool,
+ pub(crate) include_file: Option<PathBuf>,
+ pub(crate) prost_path: Option<String>,
+ #[cfg(feature = "format")]
+ pub(crate) fmt: bool,
+}
+
+impl Config {
+ /// Creates a new code generator configuration with default options.
+ pub fn new() -> Config {
+ Config::default()
+ }
+
+ /// Configure the code generator to generate Rust [`BTreeMap`][1] fields for Protobuf
+ /// [`map`][2] type fields.
+ ///
+ /// # Arguments
+ ///
+ /// **`paths`** - paths to specific fields, messages, or packages which should use a Rust
+ /// `BTreeMap` for Protobuf `map` fields. Paths are specified in terms of the Protobuf type
+ /// name (not the generated Rust type name). Paths with a leading `.` are treated as fully
+ /// qualified names. Paths without a leading `.` are treated as relative, and are suffix
+ /// matched on the fully qualified field name. If a Protobuf map field matches any of the
+ /// paths, a Rust `BTreeMap` field is generated instead of the default [`HashMap`][3].
+ ///
+ /// The matching is done on the Protobuf names, before converting to Rust-friendly casing
+ /// standards.
+ ///
+ /// # Examples
+ ///
+ /// ```rust
+ /// # let mut config = prost_build::Config::new();
+ /// // Match a specific field in a message type.
+ /// config.btree_map(&[".my_messages.MyMessageType.my_map_field"]);
+ ///
+ /// // Match all map fields in a message type.
+ /// config.btree_map(&[".my_messages.MyMessageType"]);
+ ///
+ /// // Match all map fields in a package.
+ /// config.btree_map(&[".my_messages"]);
+ ///
+ /// // Match all map fields. Specially useful in `no_std` contexts.
+ /// config.btree_map(&["."]);
+ ///
+ /// // Match all map fields in a nested message.
+ /// config.btree_map(&[".my_messages.MyMessageType.MyNestedMessageType"]);
+ ///
+ /// // Match all fields named 'my_map_field'.
+ /// config.btree_map(&["my_map_field"]);
+ ///
+ /// // Match all fields named 'my_map_field' in messages named 'MyMessageType', regardless of
+ /// // package or nesting.
+ /// config.btree_map(&["MyMessageType.my_map_field"]);
+ ///
+ /// // Match all fields named 'my_map_field', and all fields in the 'foo.bar' package.
+ /// config.btree_map(&["my_map_field", ".foo.bar"]);
+ /// ```
+ ///
+ /// [1]: https://doc.rust-lang.org/std/collections/struct.BTreeMap.html
+ /// [2]: https://developers.google.com/protocol-buffers/docs/proto3#maps
+ /// [3]: https://doc.rust-lang.org/std/collections/struct.HashMap.html
+ pub fn btree_map<I, S>(&mut self, paths: I) -> &mut Self
+ where
+ I: IntoIterator<Item = S>,
+ S: AsRef<str>,
+ {
+ self.map_type.clear();
+ for matcher in paths {
+ self.map_type
+ .insert(matcher.as_ref().to_string(), MapType::BTreeMap);
+ }
+ self
+ }
+
+ /// Configure the code generator to generate Rust [`bytes::Bytes`][1] fields for Protobuf
+ /// [`bytes`][2] type fields.
+ ///
+ /// # Arguments
+ ///
+ /// **`paths`** - paths to specific fields, messages, or packages which should use a Rust
+ /// `Bytes` for Protobuf `bytes` fields. Paths are specified in terms of the Protobuf type
+ /// name (not the generated Rust type name). Paths with a leading `.` are treated as fully
+ /// qualified names. Paths without a leading `.` are treated as relative, and are suffix
+ /// matched on the fully qualified field name. If a Protobuf map field matches any of the
+ /// paths, a Rust `Bytes` field is generated instead of the default [`Vec<u8>`][3].
+ ///
+ /// The matching is done on the Protobuf names, before converting to Rust-friendly casing
+ /// standards.
+ ///
+ /// # Examples
+ ///
+ /// ```rust
+ /// # let mut config = prost_build::Config::new();
+ /// // Match a specific field in a message type.
+ /// config.bytes(&[".my_messages.MyMessageType.my_bytes_field"]);
+ ///
+ /// // Match all bytes fields in a message type.
+ /// config.bytes(&[".my_messages.MyMessageType"]);
+ ///
+ /// // Match all bytes fields in a package.
+ /// config.bytes(&[".my_messages"]);
+ ///
+ /// // Match all bytes fields. Specially useful in `no_std` contexts.
+ /// config.bytes(&["."]);
+ ///
+ /// // Match all bytes fields in a nested message.
+ /// config.bytes(&[".my_messages.MyMessageType.MyNestedMessageType"]);
+ ///
+ /// // Match all fields named 'my_bytes_field'.
+ /// config.bytes(&["my_bytes_field"]);
+ ///
+ /// // Match all fields named 'my_bytes_field' in messages named 'MyMessageType', regardless of
+ /// // package or nesting.
+ /// config.bytes(&["MyMessageType.my_bytes_field"]);
+ ///
+ /// // Match all fields named 'my_bytes_field', and all fields in the 'foo.bar' package.
+ /// config.bytes(&["my_bytes_field", ".foo.bar"]);
+ /// ```
+ ///
+ /// [1]: https://docs.rs/bytes/latest/bytes/struct.Bytes.html
+ /// [2]: https://developers.google.com/protocol-buffers/docs/proto3#scalar
+ /// [3]: https://doc.rust-lang.org/std/vec/struct.Vec.html
+ pub fn bytes<I, S>(&mut self, paths: I) -> &mut Self
+ where
+ I: IntoIterator<Item = S>,
+ S: AsRef<str>,
+ {
+ self.bytes_type.clear();
+ for matcher in paths {
+ self.bytes_type
+ .insert(matcher.as_ref().to_string(), BytesType::Bytes);
+ }
+ self
+ }
+
+ /// Add additional attribute to matched fields.
+ ///
+ /// # Arguments
+ ///
+ /// **`path`** - a path matching any number of fields. These fields get the attribute.
+ /// For details about matching fields see [`btree_map`](#method.btree_map).
+ ///
+ /// **`attribute`** - an arbitrary string that'll be placed before each matched field. The
+ /// expected usage are additional attributes, usually in concert with whole-type
+ /// attributes set with [`type_attribute`](method.type_attribute), but it is not
+ /// checked and anything can be put there.
+ ///
+ /// Note that the calls to this method are cumulative ‒ if multiple paths from multiple calls
+ /// match the same field, the field gets all the corresponding attributes.
+ ///
+ /// # Examples
+ ///
+ /// ```rust
+ /// # let mut config = prost_build::Config::new();
+ /// // Prost renames fields named `in` to `in_`. But if serialized through serde,
+ /// // they should as `in`.
+ /// config.field_attribute("in", "#[serde(rename = \"in\")]");
+ /// ```
+ pub fn field_attribute<P, A>(&mut self, path: P, attribute: A) -> &mut Self
+ where
+ P: AsRef<str>,
+ A: AsRef<str>,
+ {
+ self.field_attributes
+ .insert(path.as_ref().to_string(), attribute.as_ref().to_string());
+ self
+ }
+
+ /// Add additional attribute to matched messages, enums and one-ofs.
+ ///
+ /// # Arguments
+ ///
+ /// **`paths`** - a path matching any number of types. It works the same way as in
+ /// [`btree_map`](#method.btree_map), just with the field name omitted.
+ ///
+ /// **`attribute`** - an arbitrary string to be placed before each matched type. The
+ /// expected usage are additional attributes, but anything is allowed.
+ ///
+ /// The calls to this method are cumulative. They don't overwrite previous calls and if a
+ /// type is matched by multiple calls of the method, all relevant attributes are added to
+ /// it.
+ ///
+ /// For things like serde it might be needed to combine with [field
+ /// attributes](#method.field_attribute).
+ ///
+ /// # Examples
+ ///
+ /// ```rust
+ /// # let mut config = prost_build::Config::new();
+ /// // Nothing around uses floats, so we can derive real `Eq` in addition to `PartialEq`.
+ /// config.type_attribute(".", "#[derive(Eq)]");
+ /// // Some messages want to be serializable with serde as well.
+ /// config.type_attribute("my_messages.MyMessageType",
+ /// "#[derive(Serialize)] #[serde(rename_all = \"snake_case\")]");
+ /// config.type_attribute("my_messages.MyMessageType.MyNestedMessageType",
+ /// "#[derive(Serialize)] #[serde(rename_all = \"snake_case\")]");
+ /// ```
+ ///
+ /// # Oneof fields
+ ///
+ /// The `oneof` fields don't have a type name of their own inside Protobuf. Therefore, the
+ /// field name can be used both with `type_attribute` and `field_attribute` ‒ the first is
+ /// placed before the `enum` type definition, the other before the field inside corresponding
+ /// message `struct`.
+ ///
+ /// In other words, to place an attribute on the `enum` implementing the `oneof`, the match
+ /// would look like `my_messages.MyMessageType.oneofname`.
+ pub fn type_attribute<P, A>(&mut self, path: P, attribute: A) -> &mut Self
+ where
+ P: AsRef<str>,
+ A: AsRef<str>,
+ {
+ self.type_attributes
+ .insert(path.as_ref().to_string(), attribute.as_ref().to_string());
+ self
+ }
+
+ /// Add additional attribute to matched messages.
+ ///
+ /// # Arguments
+ ///
+ /// **`paths`** - a path matching any number of types. It works the same way as in
+ /// [`btree_map`](#method.btree_map), just with the field name omitted.
+ ///
+ /// **`attribute`** - an arbitrary string to be placed before each matched type. The
+ /// expected usage are additional attributes, but anything is allowed.
+ ///
+ /// The calls to this method are cumulative. They don't overwrite previous calls and if a
+ /// type is matched by multiple calls of the method, all relevant attributes are added to
+ /// it.
+ ///
+ /// For things like serde it might be needed to combine with [field
+ /// attributes](#method.field_attribute).
+ ///
+ /// # Examples
+ ///
+ /// ```rust
+ /// # let mut config = prost_build::Config::new();
+ /// // Nothing around uses floats, so we can derive real `Eq` in addition to `PartialEq`.
+ /// config.message_attribute(".", "#[derive(Eq)]");
+ /// // Some messages want to be serializable with serde as well.
+ /// config.message_attribute("my_messages.MyMessageType",
+ /// "#[derive(Serialize)] #[serde(rename_all = \"snake_case\")]");
+ /// config.message_attribute("my_messages.MyMessageType.MyNestedMessageType",
+ /// "#[derive(Serialize)] #[serde(rename_all = \"snake_case\")]");
+ /// ```
+ pub fn message_attribute<P, A>(&mut self, path: P, attribute: A) -> &mut Self
+ where
+ P: AsRef<str>,
+ A: AsRef<str>,
+ {
+ self.message_attributes
+ .insert(path.as_ref().to_string(), attribute.as_ref().to_string());
+ self
+ }
+
+ /// Add additional attribute to matched enums and one-ofs.
+ ///
+ /// # Arguments
+ ///
+ /// **`paths`** - a path matching any number of types. It works the same way as in
+ /// [`btree_map`](#method.btree_map), just with the field name omitted.
+ ///
+ /// **`attribute`** - an arbitrary string to be placed before each matched type. The
+ /// expected usage are additional attributes, but anything is allowed.
+ ///
+ /// The calls to this method are cumulative. They don't overwrite previous calls and if a
+ /// type is matched by multiple calls of the method, all relevant attributes are added to
+ /// it.
+ ///
+ /// For things like serde it might be needed to combine with [field
+ /// attributes](#method.field_attribute).
+ ///
+ /// # Examples
+ ///
+ /// ```rust
+ /// # let mut config = prost_build::Config::new();
+ /// // Nothing around uses floats, so we can derive real `Eq` in addition to `PartialEq`.
+ /// config.enum_attribute(".", "#[derive(Eq)]");
+ /// // Some messages want to be serializable with serde as well.
+ /// config.enum_attribute("my_messages.MyEnumType",
+ /// "#[derive(Serialize)] #[serde(rename_all = \"snake_case\")]");
+ /// config.enum_attribute("my_messages.MyMessageType.MyNestedEnumType",
+ /// "#[derive(Serialize)] #[serde(rename_all = \"snake_case\")]");
+ /// ```
+ ///
+ /// # Oneof fields
+ ///
+ /// The `oneof` fields don't have a type name of their own inside Protobuf. Therefore, the
+ /// field name can be used both with `enum_attribute` and `field_attribute` ‒ the first is
+ /// placed before the `enum` type definition, the other before the field inside corresponding
+ /// message `struct`.
+ ///
+ /// In other words, to place an attribute on the `enum` implementing the `oneof`, the match
+ /// would look like `my_messages.MyNestedMessageType.oneofname`.
+ pub fn enum_attribute<P, A>(&mut self, path: P, attribute: A) -> &mut Self
+ where
+ P: AsRef<str>,
+ A: AsRef<str>,
+ {
+ self.enum_attributes
+ .insert(path.as_ref().to_string(), attribute.as_ref().to_string());
+ self
+ }
+
+ /// Wrap matched fields in a `Box`.
+ ///
+ /// # Arguments
+ ///
+ /// **`path`** - a path matching any number of fields. These fields get the attribute.
+ /// For details about matching fields see [`btree_map`](#method.btree_map).
+ ///
+ /// # Examples
+ ///
+ /// ```rust
+ /// # let mut config = prost_build::Config::new();
+ /// config.boxed(".my_messages.MyMessageType.my_field");
+ /// ```
+ pub fn boxed<P>(&mut self, path: P) -> &mut Self
+ where
+ P: AsRef<str>,
+ {
+ self.boxed.insert(path.as_ref().to_string(), ());
+ self
+ }
+
+ /// Configures the code generator to use the provided service generator.
+ pub fn service_generator(&mut self, service_generator: Box<dyn ServiceGenerator>) -> &mut Self {
+ self.service_generator = Some(service_generator);
+ self
+ }
+
+ /// Configures the code generator to not use the `prost_types` crate for Protobuf well-known
+ /// types, and instead generate Protobuf well-known types from their `.proto` definitions.
+ pub fn compile_well_known_types(&mut self) -> &mut Self {
+ self.prost_types = false;
+ self
+ }
+
+ /// Configures the code generator to omit documentation comments on generated Protobuf types.
+ ///
+ /// # Example
+ ///
+ /// Occasionally `.proto` files contain code blocks which are not valid Rust. To avoid doctest
+ /// failures, annotate the invalid code blocks with an [`ignore` or `no_run` attribute][1], or
+ /// disable doctests for the crate with a [Cargo.toml entry][2]. If neither of these options
+ /// are possible, then omit comments on generated code during doctest builds:
+ ///
+ /// ```rust,no_run
+ /// # fn main() -> std::io::Result<()> {
+ /// let mut config = prost_build::Config::new();
+ /// config.disable_comments(&["."]);
+ /// config.compile_protos(&["src/frontend.proto", "src/backend.proto"], &["src"])?;
+ /// # Ok(())
+ /// # }
+ /// ```
+ ///
+ /// As with other options which take a set of paths, comments can be disabled on a per-package
+ /// or per-symbol basis.
+ ///
+ /// [1]: https://doc.rust-lang.org/rustdoc/documentation-tests.html#attributes
+ /// [2]: https://doc.rust-lang.org/cargo/reference/cargo-targets.html#configuring-a-target
+ pub fn disable_comments<I, S>(&mut self, paths: I) -> &mut Self
+ where
+ I: IntoIterator<Item = S>,
+ S: AsRef<str>,
+ {
+ self.disable_comments.clear();
+ for matcher in paths {
+ self.disable_comments
+ .insert(matcher.as_ref().to_string(), ());
+ }
+ self
+ }
+
+ /// Skips generating `impl Debug` for types
+ pub fn skip_debug<I, S>(&mut self, paths: I) -> &mut Self
+ where
+ I: IntoIterator<Item = S>,
+ S: AsRef<str>,
+ {
+ self.skip_debug.clear();
+ for matcher in paths {
+ self.skip_debug.insert(matcher.as_ref().to_string(), ());
+ }
+ self
+ }
+
+ /// Declare an externally provided Protobuf package or type.
+ ///
+ /// `extern_path` allows `prost` types in external crates to be referenced in generated code.
+ ///
+ /// When `prost` compiles a `.proto` which includes an import of another `.proto`, it will
+ /// automatically recursively compile the imported file as well. `extern_path` can be used
+ /// to instead substitute types from an external crate.
+ ///
+ /// # Example
+ ///
+ /// As an example, consider a crate, `uuid`, with a `prost`-generated `Uuid` type:
+ ///
+ /// ```proto
+ /// // uuid.proto
+ ///
+ /// syntax = "proto3";
+ /// package uuid;
+ ///
+ /// message Uuid {
+ /// string uuid_str = 1;
+ /// }
+ /// ```
+ ///
+ /// The `uuid` crate implements some traits for `Uuid`, and publicly exports it:
+ ///
+ /// ```rust,ignore
+ /// // lib.rs in the uuid crate
+ ///
+ /// include!(concat!(env!("OUT_DIR"), "/uuid.rs"));
+ ///
+ /// pub trait DoSomething {
+ /// fn do_it(&self);
+ /// }
+ ///
+ /// impl DoSomething for Uuid {
+ /// fn do_it(&self) {
+ /// println!("Done");
+ /// }
+ /// }
+ /// ```
+ ///
+ /// A separate crate, `my_application`, uses `prost` to generate message types which reference
+ /// `Uuid`:
+ ///
+ /// ```proto
+ /// // my_application.proto
+ ///
+ /// syntax = "proto3";
+ /// package my_application;
+ ///
+ /// import "uuid.proto";
+ ///
+ /// message MyMessage {
+ /// uuid.Uuid message_id = 1;
+ /// string some_payload = 2;
+ /// }
+ /// ```
+ ///
+ /// Additionally, `my_application` depends on the trait impls provided by the `uuid` crate:
+ ///
+ /// ```rust,ignore
+ /// // `main.rs` of `my_application`
+ ///
+ /// use uuid::{DoSomething, Uuid};
+ ///
+ /// include!(concat!(env!("OUT_DIR"), "/my_application.rs"));
+ ///
+ /// pub fn process_message(msg: MyMessage) {
+ /// if let Some(uuid) = msg.message_id {
+ /// uuid.do_it();
+ /// }
+ /// }
+ /// ```
+ ///
+ /// Without configuring `uuid` as an external path in `my_application`'s `build.rs`, `prost`
+ /// would compile a completely separate version of the `Uuid` type, and `process_message` would
+ /// fail to compile. However, if `my_application` configures `uuid` as an extern path with a
+ /// call to `.extern_path(".uuid", "::uuid")`, `prost` will use the external type instead of
+ /// compiling a new version of `Uuid`. Note that the configuration could also be specified as
+ /// `.extern_path(".uuid.Uuid", "::uuid::Uuid")` if only the `Uuid` type were externally
+ /// provided, and not the whole `uuid` package.
+ ///
+ /// # Usage
+ ///
+ /// `extern_path` takes a fully-qualified Protobuf path, and the corresponding Rust path that
+ /// it will be substituted with in generated code. The Protobuf path can refer to a package or
+ /// a type, and the Rust path should correspondingly refer to a Rust module or type.
+ ///
+ /// ```rust
+ /// # let mut config = prost_build::Config::new();
+ /// // Declare the `uuid` Protobuf package and all nested packages and types as externally
+ /// // provided by the `uuid` crate.
+ /// config.extern_path(".uuid", "::uuid");
+ ///
+ /// // Declare the `foo.bar.baz` Protobuf package and all nested packages and types as
+ /// // externally provided by the `foo_bar_baz` crate.
+ /// config.extern_path(".foo.bar.baz", "::foo_bar_baz");
+ ///
+ /// // Declare the `uuid.Uuid` Protobuf type (and all nested types) as externally provided
+ /// // by the `uuid` crate's `Uuid` type.
+ /// config.extern_path(".uuid.Uuid", "::uuid::Uuid");
+ /// ```
+ pub fn extern_path<P1, P2>(&mut self, proto_path: P1, rust_path: P2) -> &mut Self
+ where
+ P1: Into<String>,
+ P2: Into<String>,
+ {
+ self.extern_paths
+ .push((proto_path.into(), rust_path.into()));
+ self
+ }
+
+ /// When set, the `FileDescriptorSet` generated by `protoc` is written to the provided
+ /// filesystem path.
+ ///
+ /// This option can be used in conjunction with the [`include_bytes!`] macro and the types in
+ /// the `prost-types` crate for implementing reflection capabilities, among other things.
+ ///
+ /// ## Example
+ ///
+ /// In `build.rs`:
+ ///
+ /// ```rust, no_run
+ /// # use std::env;
+ /// # use std::path::PathBuf;
+ /// # let mut config = prost_build::Config::new();
+ /// config.file_descriptor_set_path(
+ /// PathBuf::from(env::var("OUT_DIR").expect("OUT_DIR environment variable not set"))
+ /// .join("file_descriptor_set.bin"));
+ /// ```
+ ///
+ /// In `lib.rs`:
+ ///
+ /// ```rust,ignore
+ /// let file_descriptor_set_bytes = include_bytes!(concat!(env!("OUT_DIR"), "/file_descriptor_set.bin"));
+ /// let file_descriptor_set = prost_types::FileDescriptorSet::decode(&file_descriptor_set_bytes[..]).unwrap();
+ /// ```
+ pub fn file_descriptor_set_path<P>(&mut self, path: P) -> &mut Self
+ where
+ P: Into<PathBuf>,
+ {
+ self.file_descriptor_set_path = Some(path.into());
+ self
+ }
+
+ /// In combination with with `file_descriptor_set_path`, this can be used to provide a file
+ /// descriptor set as an input file, rather than having prost-build generate the file by calling
+ /// protoc.
+ ///
+ /// In `build.rs`:
+ ///
+ /// ```rust
+ /// # let mut config = prost_build::Config::new();
+ /// config.file_descriptor_set_path("path/from/build/system")
+ /// .skip_protoc_run()
+ /// .compile_protos(&["src/items.proto"], &["src/"]);
+ /// ```
+ ///
+ pub fn skip_protoc_run(&mut self) -> &mut Self {
+ self.skip_protoc_run = true;
+ self
+ }
+
+ /// Configures the code generator to not strip the enum name from variant names.
+ ///
+ /// Protobuf enum definitions commonly include the enum name as a prefix of every variant name.
+ /// This style is non-idiomatic in Rust, so by default `prost` strips the enum name prefix from
+ /// variants which include it. Configuring this option prevents `prost` from stripping the
+ /// prefix.
+ pub fn retain_enum_prefix(&mut self) -> &mut Self {
+ self.strip_enum_prefix = false;
+ self
+ }
+
+ /// Configures the output directory where generated Rust files will be written.
+ ///
+ /// If unset, defaults to the `OUT_DIR` environment variable. `OUT_DIR` is set by Cargo when
+ /// executing build scripts, so `out_dir` typically does not need to be configured.
+ pub fn out_dir<P>(&mut self, path: P) -> &mut Self
+ where
+ P: Into<PathBuf>,
+ {
+ self.out_dir = Some(path.into());
+ self
+ }
+
+ /// Configures what filename protobufs with no package definition are written to.
+ /// The filename will be appended with the `.rs` extension.
+ pub fn default_package_filename<S>(&mut self, filename: S) -> &mut Self
+ where
+ S: Into<String>,
+ {
+ self.default_package_filename = filename.into();
+ self
+ }
+
+ /// Configures the code generator to include type names.
+ ///
+ /// Message types will implement `Name` trait, which provides type and package name.
+ /// This is needed for encoding messages as `Any` type.
+ pub fn enable_type_names(&mut self) -> &mut Self {
+ self.enable_type_names = true;
+ self
+ }
+
+ /// Specify domain names to use with message type URLs.
+ ///
+ /// # Domains
+ ///
+ /// **`paths`** - a path matching any number of types. It works the same way as in
+ /// [`btree_map`](#method.btree_map), just with the field name omitted.
+ ///
+ /// **`domain`** - an arbitrary string to be used as a prefix for type URLs.
+ ///
+ /// # Examples
+ ///
+ /// ```rust
+ /// # let mut config = prost_build::Config::new();
+ /// // Full type URL of the message `google.profile.Person`,
+ /// // will be `type.googleapis.com/google.profile.Person`.
+ /// config.type_name_domain(&["."], "type.googleapis.com");
+ /// ```
+ pub fn type_name_domain<I, S, D>(&mut self, paths: I, domain: D) -> &mut Self
+ where
+ I: IntoIterator<Item = S>,
+ S: AsRef<str>,
+ D: AsRef<str>,
+ {
+ self.type_name_domains.clear();
+ for matcher in paths {
+ self.type_name_domains
+ .insert(matcher.as_ref().to_string(), domain.as_ref().to_string());
+ }
+ self
+ }
+
+ /// Configures the path that's used for deriving `Message` for generated messages.
+ /// This is mainly useful for generating crates that wish to re-export prost.
+ /// Defaults to `::prost::Message` if not specified.
+ pub fn prost_path<S>(&mut self, path: S) -> &mut Self
+ where
+ S: Into<String>,
+ {
+ self.prost_path = Some(path.into());
+ self
+ }
+
+ /// Add an argument to the `protoc` protobuf compilation invocation.
+ ///
+ /// # Example `build.rs`
+ ///
+ /// ```rust,no_run
+ /// # use std::io::Result;
+ /// fn main() -> Result<()> {
+ /// let mut prost_build = prost_build::Config::new();
+ /// // Enable a protoc experimental feature.
+ /// prost_build.protoc_arg("--experimental_allow_proto3_optional");
+ /// prost_build.compile_protos(&["src/frontend.proto", "src/backend.proto"], &["src"])?;
+ /// Ok(())
+ /// }
+ /// ```
+ pub fn protoc_arg<S>(&mut self, arg: S) -> &mut Self
+ where
+ S: AsRef<OsStr>,
+ {
+ self.protoc_args.push(arg.as_ref().to_owned());
+ self
+ }
+
+ /// Configures the optional module filename for easy inclusion of all generated Rust files
+ ///
+ /// If set, generates a file (inside the `OUT_DIR` or `out_dir()` as appropriate) which contains
+ /// a set of `pub mod XXX` statements combining to load all Rust files generated. This can allow
+ /// for a shortcut where multiple related proto files have been compiled together resulting in
+ /// a semi-complex set of includes.
+ ///
+ /// Turning a need for:
+ ///
+ /// ```rust,no_run,ignore
+ /// pub mod Foo {
+ /// pub mod Bar {
+ /// include!(concat!(env!("OUT_DIR"), "/foo.bar.rs"));
+ /// }
+ /// pub mod Baz {
+ /// include!(concat!(env!("OUT_DIR"), "/foo.baz.rs"));
+ /// }
+ /// }
+ /// ```
+ ///
+ /// Into the simpler:
+ ///
+ /// ```rust,no_run,ignore
+ /// include!(concat!(env!("OUT_DIR"), "/_includes.rs"));
+ /// ```
+ pub fn include_file<P>(&mut self, path: P) -> &mut Self
+ where
+ P: Into<PathBuf>,
+ {
+ self.include_file = Some(path.into());
+ self
+ }
+
+ // IMPROVEMENT: https://github.com/tokio-rs/prost/pull/1022/files#r1563818651
+ /// Configures the code generator to format the output code via `prettyplease`.
+ ///
+ /// By default, this is enabled but if the `format` feature is not enabled this does
+ /// nothing.
+ #[cfg(feature = "format")]
+ pub fn format(&mut self, enabled: bool) -> &mut Self {
+ self.fmt = enabled;
+ self
+ }
+
+ /// Compile a [`FileDescriptorSet`] into Rust files during a Cargo build with
+ /// additional code generator configuration options.
+ ///
+ /// This method is like `compile_protos` function except it does not invoke `protoc`
+ /// and instead requires the user to supply a [`FileDescriptorSet`].
+ ///
+ /// # Example `build.rs`
+ ///
+ /// ```rust,no_run
+ /// # use prost_types::FileDescriptorSet;
+ /// # fn fds() -> FileDescriptorSet { todo!() }
+ /// fn main() -> std::io::Result<()> {
+ /// let file_descriptor_set = fds();
+ ///
+ /// prost_build::Config::new()
+ /// .compile_fds(file_descriptor_set)
+ /// }
+ /// ```
+ pub fn compile_fds(&mut self, fds: FileDescriptorSet) -> Result<()> {
+ let mut target_is_env = false;
+ let target: PathBuf = self.out_dir.clone().map(Ok).unwrap_or_else(|| {
+ env::var_os("OUT_DIR")
+ .ok_or_else(|| {
+ Error::new(ErrorKind::Other, "OUT_DIR environment variable is not set")
+ })
+ .map(|val| {
+ target_is_env = true;
+ Into::into(val)
+ })
+ })?;
+
+ let requests = fds
+ .file
+ .into_iter()
+ .map(|descriptor| {
+ (
+ Module::from_protobuf_package_name(descriptor.package()),
+ descriptor,
+ )
+ })
+ .collect::<Vec<_>>();
+
+ let file_names = requests
+ .iter()
+ .map(|req| {
+ (
+ req.0.clone(),
+ req.0.to_file_name_or(&self.default_package_filename),
+ )
+ })
+ .collect::<HashMap<Module, String>>();
+
+ let modules = self.generate(requests)?;
+ for (module, content) in &modules {
+ let file_name = file_names
+ .get(module)
+ .expect("every module should have a filename");
+ let output_path = target.join(file_name);
+
+ let previous_content = fs::read(&output_path);
+
+ if previous_content
+ .map(|previous_content| previous_content == content.as_bytes())
+ .unwrap_or(false)
+ {
+ trace!("unchanged: {:?}", file_name);
+ } else {
+ trace!("writing: {:?}", file_name);
+ fs::write(output_path, content)?;
+ }
+ }
+
+ if let Some(ref include_file) = self.include_file {
+ trace!("Writing include file: {:?}", target.join(include_file));
+ let mut file = fs::File::create(target.join(include_file))?;
+ self.write_line(&mut file, 0, "// This file is @generated by prost-build.")?;
+ self.write_includes(
+ modules.keys().collect(),
+ &mut file,
+ if target_is_env { None } else { Some(&target) },
+ &file_names,
+ )?;
+ file.flush()?;
+ }
+
+ Ok(())
+ }
+
+ /// Compile `.proto` files into Rust files during a Cargo build with additional code generator
+ /// configuration options.
+ ///
+ /// This method is like the `prost_build::compile_protos` function, with the added ability to
+ /// specify non-default code generation options. See that function for more information about
+ /// the arguments and generated outputs.
+ ///
+ /// The `protos` and `includes` arguments are ignored if `skip_protoc_run` is specified.
+ ///
+ /// # Example `build.rs`
+ ///
+ /// ```rust,no_run
+ /// # use std::io::Result;
+ /// fn main() -> Result<()> {
+ /// let mut prost_build = prost_build::Config::new();
+ /// prost_build.btree_map(&["."]);
+ /// prost_build.compile_protos(&["src/frontend.proto", "src/backend.proto"], &["src"])?;
+ /// Ok(())
+ /// }
+ /// ```
+ pub fn compile_protos(
+ &mut self,
+ protos: &[impl AsRef<Path>],
+ includes: &[impl AsRef<Path>],
+ ) -> Result<()> {
+ // TODO: This should probably emit 'rerun-if-changed=PATH' directives for cargo, however
+ // according to [1] if any are output then those paths replace the default crate root,
+ // which is undesirable. Figure out how to do it in an additive way; perhaps gcc-rs has
+ // this figured out.
+ // [1]: http://doc.crates.io/build-script.html#outputs-of-the-build-script
+
+ let tmp;
+ let file_descriptor_set_path = if let Some(path) = &self.file_descriptor_set_path {
+ path.clone()
+ } else {
+ if self.skip_protoc_run {
+ return Err(Error::new(
+ ErrorKind::Other,
+ "file_descriptor_set_path is required with skip_protoc_run",
+ ));
+ }
+ tmp = tempfile::Builder::new().prefix("prost-build").tempdir()?;
+ tmp.path().join("prost-descriptor-set")
+ };
+
+ if !self.skip_protoc_run {
+ let protoc = protoc_from_env();
+
+ let mut cmd = Command::new(protoc.clone());
+ cmd.arg("--include_imports")
+ .arg("--include_source_info")
+ .arg("-o")
+ .arg(&file_descriptor_set_path);
+
+ for include in includes {
+ if include.as_ref().exists() {
+ cmd.arg("-I").arg(include.as_ref());
+ } else {
+ debug!(
+ "ignoring {} since it does not exist.",
+ include.as_ref().display()
+ )
+ }
+ }
+
+ // Set the protoc include after the user includes in case the user wants to
+ // override one of the built-in .protos.
+ if let Some(protoc_include) = protoc_include_from_env() {
+ cmd.arg("-I").arg(protoc_include);
+ }
+
+ for arg in &self.protoc_args {
+ cmd.arg(arg);
+ }
+
+ for proto in protos {
+ cmd.arg(proto.as_ref());
+ }
+
+ debug!("Running: {:?}", cmd);
+
+ let output = match cmd.output() {
+ Err(err) if ErrorKind::NotFound == err.kind() => return Err(Error::new(
+ err.kind(),
+ error_message_protoc_not_found()
+ )),
+ Err(err) => return Err(Error::new(
+ err.kind(),
+ format!("failed to invoke protoc (hint: https://docs.rs/prost-build/#sourcing-protoc): (path: {:?}): {}", &protoc, err),
+ )),
+ Ok(output) => output,
+ };
+
+ if !output.status.success() {
+ return Err(Error::new(
+ ErrorKind::Other,
+ format!("protoc failed: {}", String::from_utf8_lossy(&output.stderr)),
+ ));
+ }
+ }
+
+ let buf = fs::read(&file_descriptor_set_path).map_err(|e| {
+ Error::new(
+ e.kind(),
+ format!(
+ "unable to open file_descriptor_set_path: {:?}, OS: {}",
+ &file_descriptor_set_path, e
+ ),
+ )
+ })?;
+ let file_descriptor_set = FileDescriptorSet::decode(buf.as_slice()).map_err(|error| {
+ Error::new(
+ ErrorKind::InvalidInput,
+ format!("invalid FileDescriptorSet: {}", error),
+ )
+ })?;
+
+ self.compile_fds(file_descriptor_set)
+ }
+
+ pub(crate) fn write_includes(
+ &self,
+ mut modules: Vec<&Module>,
+ outfile: &mut impl Write,
+ basepath: Option<&PathBuf>,
+ file_names: &HashMap<Module, String>,
+ ) -> Result<()> {
+ modules.sort();
+
+ let mut stack = Vec::new();
+
+ for module in modules {
+ while !module.starts_with(&stack) {
+ stack.pop();
+ self.write_line(outfile, stack.len(), "}")?;
+ }
+ while stack.len() < module.len() {
+ self.write_line(
+ outfile,
+ stack.len(),
+ &format!("pub mod {} {{", module.part(stack.len())),
+ )?;
+ stack.push(module.part(stack.len()).to_owned());
+ }
+
+ let file_name = file_names
+ .get(module)
+ .expect("every module should have a filename");
+
+ if basepath.is_some() {
+ self.write_line(
+ outfile,
+ stack.len(),
+ &format!("include!(\"{}\");", file_name),
+ )?;
+ } else {
+ self.write_line(
+ outfile,
+ stack.len(),
+ &format!("include!(concat!(env!(\"OUT_DIR\"), \"/{}\"));", file_name),
+ )?;
+ }
+ }
+
+ for depth in (0..stack.len()).rev() {
+ self.write_line(outfile, depth, "}")?;
+ }
+
+ Ok(())
+ }
+
+ fn write_line(&self, outfile: &mut impl Write, depth: usize, line: &str) -> Result<()> {
+ outfile.write_all(format!("{}{}\n", (" ").to_owned().repeat(depth), line).as_bytes())
+ }
+
+ /// Processes a set of modules and file descriptors, returning a map of modules to generated
+ /// code contents.
+ ///
+ /// This is generally used when control over the output should not be managed by Prost,
+ /// such as in a flow for a `protoc` code generating plugin. When compiling as part of a
+ /// `build.rs` file, instead use [`compile_protos()`].
+ pub fn generate(
+ &mut self,
+ requests: Vec<(Module, FileDescriptorProto)>,
+ ) -> Result<HashMap<Module, String>> {
+ let mut modules = HashMap::new();
+ let mut packages = HashMap::new();
+
+ let message_graph = MessageGraph::new(requests.iter().map(|x| &x.1))
+ .map_err(|error| Error::new(ErrorKind::InvalidInput, error))?;
+ let extern_paths = ExternPaths::new(&self.extern_paths, self.prost_types)
+ .map_err(|error| Error::new(ErrorKind::InvalidInput, error))?;
+
+ for (request_module, request_fd) in requests {
+ // Only record packages that have services
+ if !request_fd.service.is_empty() {
+ packages.insert(request_module.clone(), request_fd.package().to_string());
+ }
+ let buf = modules
+ .entry(request_module.clone())
+ .or_insert_with(String::new);
+ CodeGenerator::generate(self, &message_graph, &extern_paths, request_fd, buf);
+ if buf.is_empty() {
+ // Did not generate any code, remove from list to avoid inclusion in include file or output file list
+ modules.remove(&request_module);
+ }
+ }
+
+ if let Some(ref mut service_generator) = self.service_generator {
+ for (module, package) in packages {
+ let buf = modules.get_mut(&module).unwrap();
+ service_generator.finalize_package(&package, buf);
+ }
+ }
+
+ #[cfg(feature = "format")]
+ if self.fmt {
+ for buf in modules.values_mut() {
+ let file = syn::parse_file(buf).unwrap();
+ let formatted = prettyplease::unparse(&file);
+ *buf = formatted;
+ }
+ }
+
+ self.add_generated_modules(&mut modules);
+
+ Ok(modules)
+ }
+
+ fn add_generated_modules(&mut self, modules: &mut HashMap<Module, String>) {
+ for buf in modules.values_mut() {
+ let with_generated = "// This file is @generated by prost-build.\n".to_string() + buf;
+ *buf = with_generated;
+ }
+ }
+}
+
+impl default::Default for Config {
+ fn default() -> Config {
+ Config {
+ file_descriptor_set_path: None,
+ service_generator: None,
+ map_type: PathMap::default(),
+ bytes_type: PathMap::default(),
+ type_attributes: PathMap::default(),
+ message_attributes: PathMap::default(),
+ enum_attributes: PathMap::default(),
+ field_attributes: PathMap::default(),
+ boxed: PathMap::default(),
+ prost_types: true,
+ strip_enum_prefix: true,
+ out_dir: None,
+ extern_paths: Vec::new(),
+ default_package_filename: "_".to_string(),
+ enable_type_names: false,
+ type_name_domains: PathMap::default(),
+ protoc_args: Vec::new(),
+ disable_comments: PathMap::default(),
+ skip_debug: PathMap::default(),
+ skip_protoc_run: false,
+ include_file: None,
+ prost_path: None,
+ #[cfg(feature = "format")]
+ fmt: true,
+ }
+ }
+}
+
+impl fmt::Debug for Config {
+ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt.debug_struct("Config")
+ .field("file_descriptor_set_path", &self.file_descriptor_set_path)
+ .field("service_generator", &self.service_generator.is_some())
+ .field("map_type", &self.map_type)
+ .field("bytes_type", &self.bytes_type)
+ .field("type_attributes", &self.type_attributes)
+ .field("field_attributes", &self.field_attributes)
+ .field("prost_types", &self.prost_types)
+ .field("strip_enum_prefix", &self.strip_enum_prefix)
+ .field("out_dir", &self.out_dir)
+ .field("extern_paths", &self.extern_paths)
+ .field("default_package_filename", &self.default_package_filename)
+ .field("enable_type_names", &self.enable_type_names)
+ .field("type_name_domains", &self.type_name_domains)
+ .field("protoc_args", &self.protoc_args)
+ .field("disable_comments", &self.disable_comments)
+ .field("skip_debug", &self.skip_debug)
+ .field("prost_path", &self.prost_path)
+ .finish()
+ }
+}
+
+pub fn error_message_protoc_not_found() -> String {
+ let error_msg = "Could not find `protoc`. If `protoc` is installed, try setting the `PROTOC` environment variable to the path of the `protoc` binary.";
+
+ let os_specific_hint = if cfg!(target_os = "macos") {
+ "To install it on macOS, run `brew install protobuf`."
+ } else if cfg!(target_os = "linux") {
+ "To install it on Debian, run `apt-get install protobuf-compiler`."
+ } else {
+ "Try installing `protobuf-compiler` or `protobuf` using your package manager."
+ };
+ let download_msg =
+ "It is also available at https://github.com/protocolbuffers/protobuf/releases";
+
+ format!(
+ "{} {} {} For more information: https://docs.rs/prost-build/#sourcing-protoc",
+ error_msg, os_specific_hint, download_msg
+ )
+}
+
+/// Returns the path to the `protoc` binary.
+pub fn protoc_from_env() -> PathBuf {
+ env::var_os("PROTOC")
+ .map(PathBuf::from)
+ .unwrap_or(PathBuf::from("protoc"))
+}
+
+/// Returns the path to the Protobuf include directory.
+pub fn protoc_include_from_env() -> Option<PathBuf> {
+ let protoc_include: PathBuf = env::var_os("PROTOC_INCLUDE")?.into();
+
+ if !protoc_include.exists() {
+ panic!(
+ "PROTOC_INCLUDE environment variable points to non-existent directory ({:?})",
+ protoc_include
+ );
+ }
+ if !protoc_include.is_dir() {
+ panic!(
+ "PROTOC_INCLUDE environment variable points to a non-directory file ({:?})",
+ protoc_include
+ );
+ }
+
+ Some(protoc_include)
+}
diff --git a/vendor/prost-build/src/extern_paths.rs b/vendor/prost-build/src/extern_paths.rs
new file mode 100644
index 00000000..27c8d6d7
--- /dev/null
+++ b/vendor/prost-build/src/extern_paths.rs
@@ -0,0 +1,170 @@
+use std::collections::{hash_map, HashMap};
+
+use itertools::Itertools;
+
+use crate::ident::{to_snake, to_upper_camel};
+
+fn validate_proto_path(path: &str) -> Result<(), String> {
+ if path.chars().next().map(|c| c != '.').unwrap_or(true) {
+ return Err(format!(
+ "Protobuf paths must be fully qualified (begin with a leading '.'): {}",
+ path
+ ));
+ }
+ if path.split('.').skip(1).any(str::is_empty) {
+ return Err(format!("invalid fully-qualified Protobuf path: {}", path));
+ }
+ Ok(())
+}
+
+#[derive(Debug)]
+pub struct ExternPaths {
+ extern_paths: HashMap<String, String>,
+}
+
+impl ExternPaths {
+ pub fn new(paths: &[(String, String)], prost_types: bool) -> Result<ExternPaths, String> {
+ let mut extern_paths = ExternPaths {
+ extern_paths: HashMap::new(),
+ };
+
+ for (proto_path, rust_path) in paths {
+ extern_paths.insert(proto_path.clone(), rust_path.clone())?;
+ }
+
+ if prost_types {
+ extern_paths.insert(".google.protobuf".to_string(), "::prost_types".to_string())?;
+ extern_paths.insert(".google.protobuf.BoolValue".to_string(), "bool".to_string())?;
+ extern_paths.insert(
+ ".google.protobuf.BytesValue".to_string(),
+ "::prost::alloc::vec::Vec<u8>".to_string(),
+ )?;
+ extern_paths.insert(
+ ".google.protobuf.DoubleValue".to_string(),
+ "f64".to_string(),
+ )?;
+ extern_paths.insert(".google.protobuf.Empty".to_string(), "()".to_string())?;
+ extern_paths.insert(".google.protobuf.FloatValue".to_string(), "f32".to_string())?;
+ extern_paths.insert(".google.protobuf.Int32Value".to_string(), "i32".to_string())?;
+ extern_paths.insert(".google.protobuf.Int64Value".to_string(), "i64".to_string())?;
+ extern_paths.insert(
+ ".google.protobuf.StringValue".to_string(),
+ "::prost::alloc::string::String".to_string(),
+ )?;
+ extern_paths.insert(
+ ".google.protobuf.UInt32Value".to_string(),
+ "u32".to_string(),
+ )?;
+ extern_paths.insert(
+ ".google.protobuf.UInt64Value".to_string(),
+ "u64".to_string(),
+ )?;
+ }
+
+ Ok(extern_paths)
+ }
+
+ fn insert(&mut self, proto_path: String, rust_path: String) -> Result<(), String> {
+ validate_proto_path(&proto_path)?;
+ match self.extern_paths.entry(proto_path) {
+ hash_map::Entry::Occupied(occupied) => {
+ return Err(format!(
+ "duplicate extern Protobuf path: {}",
+ occupied.key()
+ ));
+ }
+ hash_map::Entry::Vacant(vacant) => vacant.insert(rust_path),
+ };
+ Ok(())
+ }
+
+ pub fn resolve_ident(&self, pb_ident: &str) -> Option<String> {
+ // protoc should always give fully qualified identifiers.
+ assert_eq!(".", &pb_ident[..1]);
+
+ if let Some(rust_path) = self.extern_paths.get(pb_ident) {
+ return Some(rust_path.clone());
+ }
+
+ // TODO(danburkert): there must be a more efficient way to do this, maybe a trie?
+ for (idx, _) in pb_ident.rmatch_indices('.') {
+ if let Some(rust_path) = self.extern_paths.get(&pb_ident[..idx]) {
+ let mut segments = pb_ident[idx + 1..].split('.');
+ let ident_type = segments.next_back().map(to_upper_camel);
+
+ return Some(
+ rust_path
+ .split("::")
+ .chain(segments)
+ .enumerate()
+ .map(|(idx, segment)| {
+ if idx == 0 && segment == "crate" {
+ // If the first segment of the path is 'crate', then do not escape
+ // it into a raw identifier, since it's being used as the keyword.
+ segment.to_owned()
+ } else {
+ to_snake(segment)
+ }
+ })
+ .chain(ident_type.into_iter())
+ .join("::"),
+ );
+ }
+ }
+
+ None
+ }
+}
+
+#[cfg(test)]
+mod tests {
+
+ use super::*;
+
+ #[test]
+ fn test_extern_paths() {
+ let paths = ExternPaths::new(
+ &[
+ (".foo".to_string(), "::foo1".to_string()),
+ (".foo.bar".to_string(), "::foo2".to_string()),
+ (".foo.baz".to_string(), "::foo3".to_string()),
+ (".foo.Fuzz".to_string(), "::foo4::Fuzz".to_string()),
+ (".a.b.c.d.e.f".to_string(), "::abc::def".to_string()),
+ ],
+ false,
+ )
+ .unwrap();
+
+ let case = |proto_ident: &str, resolved_ident: &str| {
+ assert_eq!(paths.resolve_ident(proto_ident).unwrap(), resolved_ident);
+ };
+
+ case(".foo", "::foo1");
+ case(".foo.Foo", "::foo1::Foo");
+ case(".foo.bar", "::foo2");
+ case(".foo.Bas", "::foo1::Bas");
+
+ case(".foo.bar.Bar", "::foo2::Bar");
+ case(".foo.Fuzz.Bar", "::foo4::fuzz::Bar");
+
+ case(".a.b.c.d.e.f", "::abc::def");
+ case(".a.b.c.d.e.f.g.FooBar.Baz", "::abc::def::g::foo_bar::Baz");
+
+ assert!(paths.resolve_ident(".a").is_none());
+ assert!(paths.resolve_ident(".a.b").is_none());
+ assert!(paths.resolve_ident(".a.c").is_none());
+ }
+
+ #[test]
+ fn test_well_known_types() {
+ let paths = ExternPaths::new(&[], true).unwrap();
+
+ let case = |proto_ident: &str, resolved_ident: &str| {
+ assert_eq!(paths.resolve_ident(proto_ident).unwrap(), resolved_ident);
+ };
+
+ case(".google.protobuf.Value", "::prost_types::Value");
+ case(".google.protobuf.Duration", "::prost_types::Duration");
+ case(".google.protobuf.Empty", "()");
+ }
+}
diff --git a/vendor/prost-build/src/fixtures/alphabet/_expected_include.rs b/vendor/prost-build/src/fixtures/alphabet/_expected_include.rs
new file mode 100644
index 00000000..87de4fee
--- /dev/null
+++ b/vendor/prost-build/src/fixtures/alphabet/_expected_include.rs
@@ -0,0 +1,19 @@
+// This file is @generated by prost-build.
+pub mod a {
+ include!("a.rs");
+}
+pub mod b {
+ include!("b.rs");
+}
+pub mod c {
+ include!("c.rs");
+}
+pub mod d {
+ include!("d.rs");
+}
+pub mod e {
+ include!("e.rs");
+}
+pub mod f {
+ include!("f.rs");
+}
diff --git a/vendor/prost-build/src/fixtures/alphabet/a.proto b/vendor/prost-build/src/fixtures/alphabet/a.proto
new file mode 100644
index 00000000..a9d4aab0
--- /dev/null
+++ b/vendor/prost-build/src/fixtures/alphabet/a.proto
@@ -0,0 +1,8 @@
+syntax = "proto3";
+
+package a;
+
+message Alpha {
+ string sign = 1;
+ string pronunciation = 2;
+}
diff --git a/vendor/prost-build/src/fixtures/alphabet/b.proto b/vendor/prost-build/src/fixtures/alphabet/b.proto
new file mode 100644
index 00000000..decd9fb6
--- /dev/null
+++ b/vendor/prost-build/src/fixtures/alphabet/b.proto
@@ -0,0 +1,8 @@
+syntax = "proto3";
+
+package b;
+
+message Bravo {
+ string sign = 1;
+ string pronunciation = 2;
+}
diff --git a/vendor/prost-build/src/fixtures/alphabet/c.proto b/vendor/prost-build/src/fixtures/alphabet/c.proto
new file mode 100644
index 00000000..023a8a39
--- /dev/null
+++ b/vendor/prost-build/src/fixtures/alphabet/c.proto
@@ -0,0 +1,8 @@
+syntax = "proto3";
+
+package c;
+
+message Charlie {
+ string sign = 1;
+ string pronunciation = 2;
+}
diff --git a/vendor/prost-build/src/fixtures/alphabet/d.proto b/vendor/prost-build/src/fixtures/alphabet/d.proto
new file mode 100644
index 00000000..b86c875e
--- /dev/null
+++ b/vendor/prost-build/src/fixtures/alphabet/d.proto
@@ -0,0 +1,8 @@
+syntax = "proto3";
+
+package d;
+
+message Delta {
+ string sign = 1;
+ string pronunciation = 2;
+}
diff --git a/vendor/prost-build/src/fixtures/alphabet/e.proto b/vendor/prost-build/src/fixtures/alphabet/e.proto
new file mode 100644
index 00000000..d0ed3379
--- /dev/null
+++ b/vendor/prost-build/src/fixtures/alphabet/e.proto
@@ -0,0 +1,8 @@
+syntax = "proto3";
+
+package e;
+
+message Echo {
+ string sign = 1;
+ string pronunciation = 2;
+} \ No newline at end of file
diff --git a/vendor/prost-build/src/fixtures/alphabet/f.proto b/vendor/prost-build/src/fixtures/alphabet/f.proto
new file mode 100644
index 00000000..0ebd85b0
--- /dev/null
+++ b/vendor/prost-build/src/fixtures/alphabet/f.proto
@@ -0,0 +1,8 @@
+syntax = "proto3";
+
+package f;
+
+message Foxtrot {
+ string sign = 1;
+ string pronunciation = 2;
+} \ No newline at end of file
diff --git a/vendor/prost-build/src/fixtures/field_attributes/_expected_field_attributes.rs b/vendor/prost-build/src/fixtures/field_attributes/_expected_field_attributes.rs
new file mode 100644
index 00000000..95fb05d8
--- /dev/null
+++ b/vendor/prost-build/src/fixtures/field_attributes/_expected_field_attributes.rs
@@ -0,0 +1,34 @@
+// This file is @generated by prost-build.
+#[allow(clippy::derive_partial_eq_without_eq)]
+#[derive(Clone, PartialEq, ::prost::Message)]
+pub struct Container {
+ #[prost(oneof="container::Data", tags="1, 2")]
+ pub data: ::core::option::Option<container::Data>,
+}
+/// Nested message and enum types in `Container`.
+pub mod container {
+ #[allow(clippy::derive_partial_eq_without_eq)]
+#[derive(Clone, PartialEq, ::prost::Oneof)]
+ pub enum Data {
+ #[prost(message, tag="1")]
+ Foo(::prost::alloc::boxed::Box<super::Foo>),
+ #[prost(message, tag="2")]
+ Bar(super::Bar),
+ }
+}
+#[allow(clippy::derive_partial_eq_without_eq)]
+#[derive(Clone, PartialEq, ::prost::Message)]
+pub struct Foo {
+ #[prost(string, tag="1")]
+ pub foo: ::prost::alloc::string::String,
+}
+#[allow(clippy::derive_partial_eq_without_eq)]
+#[derive(Clone, PartialEq, ::prost::Message)]
+pub struct Bar {
+ #[prost(message, optional, boxed, tag="1")]
+ pub qux: ::core::option::Option<::prost::alloc::boxed::Box<Qux>>,
+}
+#[allow(clippy::derive_partial_eq_without_eq)]
+#[derive(Clone, PartialEq, ::prost::Message)]
+pub struct Qux {
+}
diff --git a/vendor/prost-build/src/fixtures/field_attributes/_expected_field_attributes_formatted.rs b/vendor/prost-build/src/fixtures/field_attributes/_expected_field_attributes_formatted.rs
new file mode 100644
index 00000000..f1eaee75
--- /dev/null
+++ b/vendor/prost-build/src/fixtures/field_attributes/_expected_field_attributes_formatted.rs
@@ -0,0 +1,33 @@
+// This file is @generated by prost-build.
+#[allow(clippy::derive_partial_eq_without_eq)]
+#[derive(Clone, PartialEq, ::prost::Message)]
+pub struct Container {
+ #[prost(oneof = "container::Data", tags = "1, 2")]
+ pub data: ::core::option::Option<container::Data>,
+}
+/// Nested message and enum types in `Container`.
+pub mod container {
+ #[allow(clippy::derive_partial_eq_without_eq)]
+ #[derive(Clone, PartialEq, ::prost::Oneof)]
+ pub enum Data {
+ #[prost(message, tag = "1")]
+ Foo(::prost::alloc::boxed::Box<super::Foo>),
+ #[prost(message, tag = "2")]
+ Bar(super::Bar),
+ }
+}
+#[allow(clippy::derive_partial_eq_without_eq)]
+#[derive(Clone, PartialEq, ::prost::Message)]
+pub struct Foo {
+ #[prost(string, tag = "1")]
+ pub foo: ::prost::alloc::string::String,
+}
+#[allow(clippy::derive_partial_eq_without_eq)]
+#[derive(Clone, PartialEq, ::prost::Message)]
+pub struct Bar {
+ #[prost(message, optional, boxed, tag = "1")]
+ pub qux: ::core::option::Option<::prost::alloc::boxed::Box<Qux>>,
+}
+#[allow(clippy::derive_partial_eq_without_eq)]
+#[derive(Clone, PartialEq, ::prost::Message)]
+pub struct Qux {}
diff --git a/vendor/prost-build/src/fixtures/field_attributes/field_attributes.proto b/vendor/prost-build/src/fixtures/field_attributes/field_attributes.proto
new file mode 100644
index 00000000..9ef5aa89
--- /dev/null
+++ b/vendor/prost-build/src/fixtures/field_attributes/field_attributes.proto
@@ -0,0 +1,21 @@
+syntax = "proto3";
+
+package field_attributes;
+
+message Container {
+ oneof data {
+ Foo foo = 1;
+ Bar bar = 2;
+ }
+}
+
+message Foo {
+ string foo = 1;
+}
+
+message Bar {
+ Qux qux = 1;
+}
+
+message Qux {
+}
diff --git a/vendor/prost-build/src/fixtures/helloworld/_expected_helloworld.rs b/vendor/prost-build/src/fixtures/helloworld/_expected_helloworld.rs
new file mode 100644
index 00000000..401ee90c
--- /dev/null
+++ b/vendor/prost-build/src/fixtures/helloworld/_expected_helloworld.rs
@@ -0,0 +1,45 @@
+// This file is @generated by prost-build.
+#[derive(derive_builder::Builder)]
+#[allow(clippy::derive_partial_eq_without_eq)]
+#[derive(Clone, PartialEq, ::prost::Message)]
+pub struct Message {
+ #[prost(string, tag="1")]
+ pub say: ::prost::alloc::string::String,
+}
+#[derive(derive_builder::Builder)]
+#[allow(clippy::derive_partial_eq_without_eq)]
+#[derive(Clone, PartialEq, ::prost::Message)]
+pub struct Response {
+ #[prost(string, tag="1")]
+ pub say: ::prost::alloc::string::String,
+}
+#[some_enum_attr(u8)]
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
+#[repr(i32)]
+pub enum ServingStatus {
+ Unknown = 0,
+ Serving = 1,
+ NotServing = 2,
+}
+impl ServingStatus {
+ /// String value of the enum field names used in the ProtoBuf definition.
+ ///
+ /// The values are not transformed in any way and thus are considered stable
+ /// (if the ProtoBuf definition does not change) and safe for programmatic use.
+ pub fn as_str_name(&self) -> &'static str {
+ match self {
+ ServingStatus::Unknown => "UNKNOWN",
+ ServingStatus::Serving => "SERVING",
+ ServingStatus::NotServing => "NOT_SERVING",
+ }
+ }
+ /// Creates an enum from field names used in the ProtoBuf definition.
+ pub fn from_str_name(value: &str) -> ::core::option::Option<Self> {
+ match value {
+ "UNKNOWN" => Some(Self::Unknown),
+ "SERVING" => Some(Self::Serving),
+ "NOT_SERVING" => Some(Self::NotServing),
+ _ => None,
+ }
+ }
+}
diff --git a/vendor/prost-build/src/fixtures/helloworld/_expected_helloworld_formatted.rs b/vendor/prost-build/src/fixtures/helloworld/_expected_helloworld_formatted.rs
new file mode 100644
index 00000000..3f688c7e
--- /dev/null
+++ b/vendor/prost-build/src/fixtures/helloworld/_expected_helloworld_formatted.rs
@@ -0,0 +1,45 @@
+// This file is @generated by prost-build.
+#[derive(derive_builder::Builder)]
+#[allow(clippy::derive_partial_eq_without_eq)]
+#[derive(Clone, PartialEq, ::prost::Message)]
+pub struct Message {
+ #[prost(string, tag = "1")]
+ pub say: ::prost::alloc::string::String,
+}
+#[derive(derive_builder::Builder)]
+#[allow(clippy::derive_partial_eq_without_eq)]
+#[derive(Clone, PartialEq, ::prost::Message)]
+pub struct Response {
+ #[prost(string, tag = "1")]
+ pub say: ::prost::alloc::string::String,
+}
+#[some_enum_attr(u8)]
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
+#[repr(i32)]
+pub enum ServingStatus {
+ Unknown = 0,
+ Serving = 1,
+ NotServing = 2,
+}
+impl ServingStatus {
+ /// String value of the enum field names used in the ProtoBuf definition.
+ ///
+ /// The values are not transformed in any way and thus are considered stable
+ /// (if the ProtoBuf definition does not change) and safe for programmatic use.
+ pub fn as_str_name(&self) -> &'static str {
+ match self {
+ ServingStatus::Unknown => "UNKNOWN",
+ ServingStatus::Serving => "SERVING",
+ ServingStatus::NotServing => "NOT_SERVING",
+ }
+ }
+ /// Creates an enum from field names used in the ProtoBuf definition.
+ pub fn from_str_name(value: &str) -> ::core::option::Option<Self> {
+ match value {
+ "UNKNOWN" => Some(Self::Unknown),
+ "SERVING" => Some(Self::Serving),
+ "NOT_SERVING" => Some(Self::NotServing),
+ _ => None,
+ }
+ }
+}
diff --git a/vendor/prost-build/src/fixtures/helloworld/goodbye.proto b/vendor/prost-build/src/fixtures/helloworld/goodbye.proto
new file mode 100644
index 00000000..4527d7d1
--- /dev/null
+++ b/vendor/prost-build/src/fixtures/helloworld/goodbye.proto
@@ -0,0 +1,9 @@
+syntax = "proto3";
+
+import "types.proto";
+
+package helloworld;
+
+service Farewell {
+ rpc Goodbye (Message) returns (Response) {}
+}
diff --git a/vendor/prost-build/src/fixtures/helloworld/hello.proto b/vendor/prost-build/src/fixtures/helloworld/hello.proto
new file mode 100644
index 00000000..8661cc3e
--- /dev/null
+++ b/vendor/prost-build/src/fixtures/helloworld/hello.proto
@@ -0,0 +1,9 @@
+syntax = "proto3";
+
+import "types.proto";
+
+package helloworld;
+
+service Greeting {
+ rpc Hello (Message) returns (Response) {}
+}
diff --git a/vendor/prost-build/src/fixtures/helloworld/types.proto b/vendor/prost-build/src/fixtures/helloworld/types.proto
new file mode 100644
index 00000000..5bf84aa6
--- /dev/null
+++ b/vendor/prost-build/src/fixtures/helloworld/types.proto
@@ -0,0 +1,17 @@
+syntax = "proto3";
+
+package helloworld;
+
+message Message {
+ string say = 1;
+}
+
+message Response {
+ string say = 1;
+}
+
+enum ServingStatus {
+ UNKNOWN = 0;
+ SERVING = 1;
+ NOT_SERVING = 2;
+}
diff --git a/vendor/prost-build/src/fixtures/imports_empty/_expected_include.rs b/vendor/prost-build/src/fixtures/imports_empty/_expected_include.rs
new file mode 100644
index 00000000..a9dcc938
--- /dev/null
+++ b/vendor/prost-build/src/fixtures/imports_empty/_expected_include.rs
@@ -0,0 +1,10 @@
+// This file is @generated by prost-build.
+pub mod com {
+ pub mod prost_test {
+ pub mod test {
+ pub mod v1 {
+ include!("com.prost_test.test.v1.rs");
+ }
+ }
+ }
+}
diff --git a/vendor/prost-build/src/fixtures/imports_empty/imports_empty.proto b/vendor/prost-build/src/fixtures/imports_empty/imports_empty.proto
new file mode 100644
index 00000000..30413421
--- /dev/null
+++ b/vendor/prost-build/src/fixtures/imports_empty/imports_empty.proto
@@ -0,0 +1,40 @@
+syntax = "proto3";
+
+/*******************************************************************************
+ * 1. Package */
+package com.prost_test.test.v1;
+
+/*******************************************************************************
+ * 2. Imports */
+import "google/protobuf/empty.proto";
+
+/*******************************************************************************
+ * 3. File Options */
+
+/*******************************************************************************
+ * 4. service */
+
+/* test service */
+service Test {
+ /* test method */
+ rpc GetTest(google.protobuf.Empty) returns (GetTestResponse);
+}
+
+/******************************************************************************
+ * 5. resource "message" definitions */
+
+/* Test application configuration */
+message TestConfig {
+}
+
+/******************************************************************************
+ * 6. request & response "message" definitions */
+
+/* Test response */
+message GetTestResponse {
+ /* Test config */
+ TestConfig conf = 1;
+}
+
+/******************************************************************************
+ * 7. enum */ \ No newline at end of file
diff --git a/vendor/prost-build/src/fixtures/smoke_test/smoke_test.proto b/vendor/prost-build/src/fixtures/smoke_test/smoke_test.proto
new file mode 100644
index 00000000..04679a27
--- /dev/null
+++ b/vendor/prost-build/src/fixtures/smoke_test/smoke_test.proto
@@ -0,0 +1,18 @@
+syntax = "proto2";
+
+package smoke_test;
+
+message SmokeRequest {
+}
+
+message SmokeResponse {
+}
+
+// Just a smoke test service.
+service SmokeService {
+
+ // A detached comment block.
+
+ // Blow some smoke.
+ rpc BlowSmoke(SmokeRequest) returns (SmokeResponse);
+}
diff --git a/vendor/prost-build/src/fixtures/write_includes/_.includes.rs b/vendor/prost-build/src/fixtures/write_includes/_.includes.rs
new file mode 100644
index 00000000..99b55563
--- /dev/null
+++ b/vendor/prost-build/src/fixtures/write_includes/_.includes.rs
@@ -0,0 +1,23 @@
+include!(concat!(env!("OUT_DIR"), "/_.default.rs"));
+pub mod bar {
+ include!(concat!(env!("OUT_DIR"), "/bar.rs"));
+}
+pub mod foo {
+ include!(concat!(env!("OUT_DIR"), "/foo.rs"));
+ pub mod bar {
+ include!(concat!(env!("OUT_DIR"), "/foo.bar.rs"));
+ pub mod a {
+ pub mod b {
+ pub mod c {
+ include!(concat!(env!("OUT_DIR"), "/foo.bar.a.b.c.rs"));
+ }
+ }
+ }
+ pub mod baz {
+ include!(concat!(env!("OUT_DIR"), "/foo.bar.baz.rs"));
+ }
+ pub mod qux {
+ include!(concat!(env!("OUT_DIR"), "/foo.bar.qux.rs"));
+ }
+ }
+}
diff --git a/vendor/prost-build/src/ident.rs b/vendor/prost-build/src/ident.rs
new file mode 100644
index 00000000..9d7deae8
--- /dev/null
+++ b/vendor/prost-build/src/ident.rs
@@ -0,0 +1,256 @@
+//! Utility functions for working with identifiers.
+
+use heck::{ToSnakeCase, ToUpperCamelCase};
+
+pub fn sanitize_identifier(s: impl AsRef<str>) -> String {
+ let ident = s.as_ref();
+ // Use a raw identifier if the identifier matches a Rust keyword:
+ // https://doc.rust-lang.org/reference/keywords.html.
+ match ident {
+ // 2015 strict keywords.
+ | "as" | "break" | "const" | "continue" | "else" | "enum" | "false"
+ | "fn" | "for" | "if" | "impl" | "in" | "let" | "loop" | "match" | "mod" | "move" | "mut"
+ | "pub" | "ref" | "return" | "static" | "struct" | "trait" | "true"
+ | "type" | "unsafe" | "use" | "where" | "while"
+ // 2018 strict keywords.
+ | "dyn"
+ // 2015 reserved keywords.
+ | "abstract" | "become" | "box" | "do" | "final" | "macro" | "override" | "priv" | "typeof"
+ | "unsized" | "virtual" | "yield"
+ // 2018 reserved keywords.
+ | "async" | "await" | "try" => format!("r#{}", ident),
+ // the following keywords are not supported as raw identifiers and are therefore suffixed with an underscore.
+ "_" | "super" | "self" | "Self" | "extern" | "crate" => format!("{}_", ident),
+ // the following keywords begin with a number and are therefore prefixed with an underscore.
+ s if s.starts_with(|c: char| c.is_numeric()) => format!("_{}", ident),
+ _ => ident.to_string(),
+ }
+}
+
+/// Converts a `camelCase` or `SCREAMING_SNAKE_CASE` identifier to a `lower_snake` case Rust field
+/// identifier.
+pub fn to_snake(s: impl AsRef<str>) -> String {
+ sanitize_identifier(s.as_ref().to_snake_case())
+}
+
+/// Converts a `snake_case` identifier to an `UpperCamel` case Rust type identifier.
+pub fn to_upper_camel(s: impl AsRef<str>) -> String {
+ sanitize_identifier(s.as_ref().to_upper_camel_case())
+}
+
+/// Strip an enum's type name from the prefix of an enum value.
+///
+/// This function assumes that both have been formatted to Rust's
+/// upper camel case naming conventions.
+///
+/// It also tries to handle cases where the stripped name would be
+/// invalid - for example, if it were to begin with a number.
+///
+/// If the stripped name is `"Self"`, it will be replaced with `"Self_"`
+pub fn strip_enum_prefix(prefix: &str, name: &str) -> String {
+ let stripped = name.strip_prefix(prefix).unwrap_or(name);
+
+ // If the next character after the stripped prefix is not
+ // uppercase, then it means that we didn't have a true prefix -
+ // for example, "Foo" should not be stripped from "Foobar".
+ let stripped = if stripped
+ .chars()
+ .next()
+ .map(char::is_uppercase)
+ .unwrap_or(false)
+ {
+ stripped
+ } else {
+ name
+ };
+ sanitize_identifier(stripped)
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_sanitize_identifier() {
+ assert_eq!(sanitize_identifier("as"), "r#as");
+ assert_eq!(sanitize_identifier("break"), "r#break");
+ assert_eq!(sanitize_identifier("const"), "r#const");
+ assert_eq!(sanitize_identifier("continue"), "r#continue");
+ assert_eq!(sanitize_identifier("else"), "r#else");
+ assert_eq!(sanitize_identifier("enum"), "r#enum");
+ assert_eq!(sanitize_identifier("false"), "r#false");
+ assert_eq!(sanitize_identifier("fn"), "r#fn");
+ assert_eq!(sanitize_identifier("for"), "r#for");
+ assert_eq!(sanitize_identifier("if"), "r#if");
+ assert_eq!(sanitize_identifier("impl"), "r#impl");
+ assert_eq!(sanitize_identifier("in"), "r#in");
+ assert_eq!(sanitize_identifier("let"), "r#let");
+ assert_eq!(sanitize_identifier("loop"), "r#loop");
+ assert_eq!(sanitize_identifier("match"), "r#match");
+ assert_eq!(sanitize_identifier("mod"), "r#mod");
+ assert_eq!(sanitize_identifier("move"), "r#move");
+ assert_eq!(sanitize_identifier("mut"), "r#mut");
+ assert_eq!(sanitize_identifier("pub"), "r#pub");
+ assert_eq!(sanitize_identifier("ref"), "r#ref");
+ assert_eq!(sanitize_identifier("return"), "r#return");
+ assert_eq!(sanitize_identifier("static"), "r#static");
+ assert_eq!(sanitize_identifier("struct"), "r#struct");
+ assert_eq!(sanitize_identifier("trait"), "r#trait");
+ assert_eq!(sanitize_identifier("true"), "r#true");
+ assert_eq!(sanitize_identifier("type"), "r#type");
+ assert_eq!(sanitize_identifier("unsafe"), "r#unsafe");
+ assert_eq!(sanitize_identifier("use"), "r#use");
+ assert_eq!(sanitize_identifier("where"), "r#where");
+ assert_eq!(sanitize_identifier("while"), "r#while");
+ assert_eq!(sanitize_identifier("dyn"), "r#dyn");
+ assert_eq!(sanitize_identifier("abstract"), "r#abstract");
+ assert_eq!(sanitize_identifier("become"), "r#become");
+ assert_eq!(sanitize_identifier("box"), "r#box");
+ assert_eq!(sanitize_identifier("do"), "r#do");
+ assert_eq!(sanitize_identifier("final"), "r#final");
+ assert_eq!(sanitize_identifier("macro"), "r#macro");
+ assert_eq!(sanitize_identifier("override"), "r#override");
+ assert_eq!(sanitize_identifier("priv"), "r#priv");
+ assert_eq!(sanitize_identifier("typeof"), "r#typeof");
+ assert_eq!(sanitize_identifier("unsized"), "r#unsized");
+ assert_eq!(sanitize_identifier("virtual"), "r#virtual");
+ assert_eq!(sanitize_identifier("yield"), "r#yield");
+ assert_eq!(sanitize_identifier("async"), "r#async");
+ assert_eq!(sanitize_identifier("await"), "r#await");
+ assert_eq!(sanitize_identifier("try"), "r#try");
+ assert_eq!(sanitize_identifier("self"), "self_");
+ assert_eq!(sanitize_identifier("super"), "super_");
+ assert_eq!(sanitize_identifier("extern"), "extern_");
+ assert_eq!(sanitize_identifier("crate"), "crate_");
+ assert_eq!(sanitize_identifier("foo"), "foo");
+ assert_eq!(sanitize_identifier("bar"), "bar");
+ assert_eq!(sanitize_identifier("baz"), "baz");
+ assert_eq!(sanitize_identifier("0foo"), "_0foo");
+ assert_eq!(sanitize_identifier("foo0"), "foo0");
+ assert_eq!(sanitize_identifier("foo_"), "foo_");
+ assert_eq!(sanitize_identifier("_foo"), "_foo");
+ }
+
+ #[test]
+ fn test_to_snake() {
+ assert_eq!("foo_bar", &to_snake("FooBar"));
+ assert_eq!("foo_bar_baz", &to_snake("FooBarBAZ"));
+ assert_eq!("foo_bar_baz", &to_snake("FooBarBAZ"));
+ assert_eq!("xml_http_request", &to_snake("XMLHttpRequest"));
+ assert_eq!("r#while", &to_snake("While"));
+ assert_eq!("fuzz_buster", &to_snake("FUZZ_BUSTER"));
+ assert_eq!("foo_bar_baz", &to_snake("foo_bar_baz"));
+ assert_eq!("fuzz_buster", &to_snake("FUZZ_buster"));
+ assert_eq!("fuzz", &to_snake("_FUZZ"));
+ assert_eq!("fuzz", &to_snake("_fuzz"));
+ assert_eq!("fuzz", &to_snake("_Fuzz"));
+ assert_eq!("fuzz", &to_snake("FUZZ_"));
+ assert_eq!("fuzz", &to_snake("fuzz_"));
+ assert_eq!("fuzz", &to_snake("Fuzz_"));
+ assert_eq!("fuz_z", &to_snake("FuzZ_"));
+
+ // From test_messages_proto3.proto.
+ assert_eq!("fieldname1", &to_snake("fieldname1"));
+ assert_eq!("field_name2", &to_snake("field_name2"));
+ assert_eq!("field_name3", &to_snake("_field_name3"));
+ assert_eq!("field_name4", &to_snake("field__name4_"));
+ assert_eq!("field0name5", &to_snake("field0name5"));
+ assert_eq!("field_0_name6", &to_snake("field_0_name6"));
+ assert_eq!("field_name7", &to_snake("fieldName7"));
+ assert_eq!("field_name8", &to_snake("FieldName8"));
+ assert_eq!("field_name9", &to_snake("field_Name9"));
+ assert_eq!("field_name10", &to_snake("Field_Name10"));
+
+ assert_eq!("field_name11", &to_snake("FIELD_NAME11"));
+ assert_eq!("field_name12", &to_snake("FIELD_name12"));
+ assert_eq!("field_name13", &to_snake("__field_name13"));
+ assert_eq!("field_name14", &to_snake("__Field_name14"));
+ assert_eq!("field_name15", &to_snake("field__name15"));
+ assert_eq!("field_name16", &to_snake("field__Name16"));
+ assert_eq!("field_name17", &to_snake("field_name17__"));
+ assert_eq!("field_name18", &to_snake("Field_name18__"));
+ }
+
+ #[test]
+ fn test_to_snake_raw_keyword() {
+ assert_eq!("r#as", &to_snake("as"));
+ assert_eq!("r#break", &to_snake("break"));
+ assert_eq!("r#const", &to_snake("const"));
+ assert_eq!("r#continue", &to_snake("continue"));
+ assert_eq!("r#else", &to_snake("else"));
+ assert_eq!("r#enum", &to_snake("enum"));
+ assert_eq!("r#false", &to_snake("false"));
+ assert_eq!("r#fn", &to_snake("fn"));
+ assert_eq!("r#for", &to_snake("for"));
+ assert_eq!("r#if", &to_snake("if"));
+ assert_eq!("r#impl", &to_snake("impl"));
+ assert_eq!("r#in", &to_snake("in"));
+ assert_eq!("r#let", &to_snake("let"));
+ assert_eq!("r#loop", &to_snake("loop"));
+ assert_eq!("r#match", &to_snake("match"));
+ assert_eq!("r#mod", &to_snake("mod"));
+ assert_eq!("r#move", &to_snake("move"));
+ assert_eq!("r#mut", &to_snake("mut"));
+ assert_eq!("r#pub", &to_snake("pub"));
+ assert_eq!("r#ref", &to_snake("ref"));
+ assert_eq!("r#return", &to_snake("return"));
+ assert_eq!("r#static", &to_snake("static"));
+ assert_eq!("r#struct", &to_snake("struct"));
+ assert_eq!("r#trait", &to_snake("trait"));
+ assert_eq!("r#true", &to_snake("true"));
+ assert_eq!("r#type", &to_snake("type"));
+ assert_eq!("r#unsafe", &to_snake("unsafe"));
+ assert_eq!("r#use", &to_snake("use"));
+ assert_eq!("r#where", &to_snake("where"));
+ assert_eq!("r#while", &to_snake("while"));
+ assert_eq!("r#dyn", &to_snake("dyn"));
+ assert_eq!("r#abstract", &to_snake("abstract"));
+ assert_eq!("r#become", &to_snake("become"));
+ assert_eq!("r#box", &to_snake("box"));
+ assert_eq!("r#do", &to_snake("do"));
+ assert_eq!("r#final", &to_snake("final"));
+ assert_eq!("r#macro", &to_snake("macro"));
+ assert_eq!("r#override", &to_snake("override"));
+ assert_eq!("r#priv", &to_snake("priv"));
+ assert_eq!("r#typeof", &to_snake("typeof"));
+ assert_eq!("r#unsized", &to_snake("unsized"));
+ assert_eq!("r#virtual", &to_snake("virtual"));
+ assert_eq!("r#yield", &to_snake("yield"));
+ assert_eq!("r#async", &to_snake("async"));
+ assert_eq!("r#await", &to_snake("await"));
+ assert_eq!("r#try", &to_snake("try"));
+ }
+
+ #[test]
+ fn test_to_snake_non_raw_keyword() {
+ assert_eq!("self_", &to_snake("self"));
+ assert_eq!("super_", &to_snake("super"));
+ assert_eq!("extern_", &to_snake("extern"));
+ assert_eq!("crate_", &to_snake("crate"));
+ }
+
+ #[test]
+ fn test_to_upper_camel() {
+ assert_eq!("", &to_upper_camel(""));
+ assert_eq!("F", &to_upper_camel("F"));
+ assert_eq!("Foo", &to_upper_camel("FOO"));
+ assert_eq!("FooBar", &to_upper_camel("FOO_BAR"));
+ assert_eq!("FooBar", &to_upper_camel("_FOO_BAR"));
+ assert_eq!("FooBar", &to_upper_camel("FOO_BAR_"));
+ assert_eq!("FooBar", &to_upper_camel("_FOO_BAR_"));
+ assert_eq!("FuzzBuster", &to_upper_camel("fuzzBuster"));
+ assert_eq!("FuzzBuster", &to_upper_camel("FuzzBuster"));
+ assert_eq!("Self_", &to_upper_camel("self"));
+ }
+
+ #[test]
+ fn test_strip_enum_prefix() {
+ assert_eq!(strip_enum_prefix("Foo", "FooBar"), "Bar");
+ assert_eq!(strip_enum_prefix("Foo", "Foobar"), "Foobar");
+ assert_eq!(strip_enum_prefix("Foo", "Foo"), "Foo");
+ assert_eq!(strip_enum_prefix("Foo", "Bar"), "Bar");
+ assert_eq!(strip_enum_prefix("Foo", "Foo1"), "Foo1");
+ assert_eq!(strip_enum_prefix("Foo", "FooBar"), "Bar");
+ assert_eq!(strip_enum_prefix("Foo", "FooSelf"), "Self_");
+ }
+}
diff --git a/vendor/prost-build/src/lib.rs b/vendor/prost-build/src/lib.rs
new file mode 100644
index 00000000..e3659a2c
--- /dev/null
+++ b/vendor/prost-build/src/lib.rs
@@ -0,0 +1,563 @@
+#![doc(html_root_url = "https://docs.rs/prost-build/0.12.6")]
+#![allow(clippy::option_as_ref_deref, clippy::format_push_string)]
+
+//! `prost-build` compiles `.proto` files into Rust.
+//!
+//! `prost-build` is designed to be used for build-time code generation as part of a Cargo
+//! build-script.
+//!
+//! ## Example
+//!
+//! Let's create a small crate, `snazzy`, that defines a collection of
+//! snazzy new items in a protobuf file.
+//!
+//! ```bash
+//! $ cargo new snazzy && cd snazzy
+//! ```
+//!
+//! First, add `prost-build`, `prost` and its public dependencies to `Cargo.toml`
+//! (see [crates.io](https://crates.io/crates/prost) for the current versions):
+//!
+//! ```toml
+//! [dependencies]
+//! bytes = <bytes-version>
+//! prost = <prost-version>
+//!
+//! [build-dependencies]
+//! prost-build = { version = <prost-version> }
+//! ```
+//!
+//! Next, add `src/items.proto` to the project:
+//!
+//! ```proto
+//! syntax = "proto3";
+//!
+//! package snazzy.items;
+//!
+//! // A snazzy new shirt!
+//! message Shirt {
+//! enum Size {
+//! SMALL = 0;
+//! MEDIUM = 1;
+//! LARGE = 2;
+//! }
+//!
+//! string color = 1;
+//! Size size = 2;
+//! }
+//! ```
+//!
+//! To generate Rust code from `items.proto`, we use `prost-build` in the crate's
+//! `build.rs` build-script:
+//!
+//! ```rust,no_run
+//! use std::io::Result;
+//! fn main() -> Result<()> {
+//! prost_build::compile_protos(&["src/items.proto"], &["src/"])?;
+//! Ok(())
+//! }
+//! ```
+//!
+//! And finally, in `lib.rs`, include the generated code:
+//!
+//! ```rust,ignore
+//! // Include the `items` module, which is generated from items.proto.
+//! // It is important to maintain the same structure as in the proto.
+//! pub mod snazzy {
+//! pub mod items {
+//! include!(concat!(env!("OUT_DIR"), "/snazzy.items.rs"));
+//! }
+//! }
+//!
+//! use snazzy::items;
+//!
+//! pub fn create_large_shirt(color: String) -> items::Shirt {
+//! let mut shirt = items::Shirt::default();
+//! shirt.color = color;
+//! shirt.set_size(items::shirt::Size::Large);
+//! shirt
+//! }
+//! ```
+//!
+//! That's it! Run `cargo doc` to see documentation for the generated code. The full
+//! example project can be found on [GitHub](https://github.com/danburkert/snazzy).
+//!
+//! ## Feature Flags
+//! - `format`: Format the generated output. This feature is enabled by default.
+//! - `cleanup-markdown`: Clean up Markdown in protobuf docs. Enable this to clean up protobuf files from third parties.
+//!
+//! ### Cleaning up Markdown in code docs
+//!
+//! If you are using protobuf files from third parties, where the author of the protobuf
+//! is not treating comments as Markdown, or is, but has codeblocks in their docs,
+//! then you may need to clean up the documentation in order that `cargo test --doc`
+//! will not fail spuriously, and that `cargo doc` doesn't attempt to render the
+//! codeblocks as Rust code.
+//!
+//! To do this, in your `Cargo.toml`, add `features = ["cleanup-markdown"]` to the inclusion
+//! of the `prost-build` crate and when your code is generated, the code docs will automatically
+//! be cleaned up a bit.
+//!
+//! ## Sourcing `protoc`
+//!
+//! `prost-build` depends on the Protocol Buffers compiler, `protoc`, to parse `.proto` files into
+//! a representation that can be transformed into Rust. If set, `prost-build` uses the `PROTOC`
+//! for locating `protoc`. For example, on a macOS system where Protobuf is installed
+//! with Homebrew, set the environment variables to:
+//!
+//! ```bash
+//! PROTOC=/usr/local/bin/protoc
+//! ```
+//!
+//! and in a typical Linux installation:
+//!
+//! ```bash
+//! PROTOC=/usr/bin/protoc
+//! ```
+//!
+//! If no `PROTOC` environment variable is set then `prost-build` will search the
+//! current path for `protoc` or `protoc.exe`. If `prost-build` can not find `protoc`
+//! via these methods the `compile_protos` method will fail.
+//!
+//! ### Compiling `protoc` from source
+//!
+//! To compile `protoc` from source you can use the `protobuf-src` crate and
+//! set the correct environment variables.
+//! ```no_run,ignore, rust
+//! std::env::set_var("PROTOC", protobuf_src::protoc());
+//!
+//! // Now compile your proto files via prost-build
+//! ```
+//!
+//! [`protobuf-src`]: https://docs.rs/protobuf-src
+
+use std::io::Result;
+use std::path::Path;
+
+use prost_types::FileDescriptorSet;
+
+mod ast;
+pub use crate::ast::{Comments, Method, Service};
+
+mod collections;
+pub(crate) use collections::{BytesType, MapType};
+
+mod code_generator;
+mod extern_paths;
+mod ident;
+mod message_graph;
+mod path;
+
+mod config;
+pub use config::{
+ error_message_protoc_not_found, protoc_from_env, protoc_include_from_env, Config,
+};
+
+mod module;
+pub use module::Module;
+
+/// A service generator takes a service descriptor and generates Rust code.
+///
+/// `ServiceGenerator` can be used to generate application-specific interfaces
+/// or implementations for Protobuf service definitions.
+///
+/// Service generators are registered with a code generator using the
+/// `Config::service_generator` method.
+///
+/// A viable scenario is that an RPC framework provides a service generator. It generates a trait
+/// describing methods of the service and some glue code to call the methods of the trait, defining
+/// details like how errors are handled or if it is asynchronous. Then the user provides an
+/// implementation of the generated trait in the application code and plugs it into the framework.
+///
+/// Such framework isn't part of Prost at present.
+pub trait ServiceGenerator {
+ /// Generates a Rust interface or implementation for a service, writing the
+ /// result to `buf`.
+ fn generate(&mut self, service: Service, buf: &mut String);
+
+ /// Finalizes the generation process.
+ ///
+ /// In case there's something that needs to be output at the end of the generation process, it
+ /// goes here. Similar to [`generate`](#method.generate), the output should be appended to
+ /// `buf`.
+ ///
+ /// An example can be a module or other thing that needs to appear just once, not for each
+ /// service generated.
+ ///
+ /// This still can be called multiple times in a lifetime of the service generator, because it
+ /// is called once per `.proto` file.
+ ///
+ /// The default implementation is empty and does nothing.
+ fn finalize(&mut self, _buf: &mut String) {}
+
+ /// Finalizes the generation process for an entire protobuf package.
+ ///
+ /// This differs from [`finalize`](#method.finalize) by where (and how often) it is called
+ /// during the service generator life cycle. This method is called once per protobuf package,
+ /// making it ideal for grouping services within a single package spread across multiple
+ /// `.proto` files.
+ ///
+ /// The default implementation is empty and does nothing.
+ fn finalize_package(&mut self, _package: &str, _buf: &mut String) {}
+}
+
+/// Compile `.proto` files into Rust files during a Cargo build.
+///
+/// The generated `.rs` files are written to the Cargo `OUT_DIR` directory, suitable for use with
+/// the [include!][1] macro. See the [Cargo `build.rs` code generation][2] example for more info.
+///
+/// This function should be called in a project's `build.rs`.
+///
+/// # Arguments
+///
+/// **`protos`** - Paths to `.proto` files to compile. Any transitively [imported][3] `.proto`
+/// files are automatically be included.
+///
+/// **`includes`** - Paths to directories in which to search for imports. Directories are searched
+/// in order. The `.proto` files passed in **`protos`** must be found in one of the provided
+/// include directories.
+///
+/// # Errors
+///
+/// This function can fail for a number of reasons:
+///
+/// - Failure to locate or download `protoc`.
+/// - Failure to parse the `.proto`s.
+/// - Failure to locate an imported `.proto`.
+/// - Failure to compile a `.proto` without a [package specifier][4].
+///
+/// It's expected that this function call be `unwrap`ed in a `build.rs`; there is typically no
+/// reason to gracefully recover from errors during a build.
+///
+/// # Example `build.rs`
+///
+/// ```rust,no_run
+/// # use std::io::Result;
+/// fn main() -> Result<()> {
+/// prost_build::compile_protos(&["src/frontend.proto", "src/backend.proto"], &["src"])?;
+/// Ok(())
+/// }
+/// ```
+///
+/// [1]: https://doc.rust-lang.org/std/macro.include.html
+/// [2]: http://doc.crates.io/build-script.html#case-study-code-generation
+/// [3]: https://developers.google.com/protocol-buffers/docs/proto3#importing-definitions
+/// [4]: https://developers.google.com/protocol-buffers/docs/proto#packages
+pub fn compile_protos(protos: &[impl AsRef<Path>], includes: &[impl AsRef<Path>]) -> Result<()> {
+ Config::new().compile_protos(protos, includes)
+}
+
+/// Compile a [`FileDescriptorSet`] into Rust files during a Cargo build.
+///
+/// The generated `.rs` files are written to the Cargo `OUT_DIR` directory, suitable for use with
+/// the [include!][1] macro. See the [Cargo `build.rs` code generation][2] example for more info.
+///
+/// This function should be called in a project's `build.rs`.
+///
+/// This function can be combined with a crate like [`protox`] which outputs a
+/// [`FileDescriptorSet`] and is a pure Rust implementation of `protoc`.
+///
+/// [`protox`]: https://github.com/andrewhickman/protox
+///
+/// # Example
+/// ```rust,no_run
+/// # use prost_types::FileDescriptorSet;
+/// # fn fds() -> FileDescriptorSet { todo!() }
+/// fn main() -> std::io::Result<()> {
+/// let file_descriptor_set = fds();
+///
+/// prost_build::compile_fds(file_descriptor_set)
+/// }
+/// ```
+pub fn compile_fds(fds: FileDescriptorSet) -> Result<()> {
+ Config::new().compile_fds(fds)
+}
+
+#[cfg(test)]
+mod tests {
+ use std::cell::RefCell;
+ use std::fs::File;
+ use std::io::Read;
+ use std::rc::Rc;
+
+ use super::*;
+
+ /// An example service generator that generates a trait with methods corresponding to the
+ /// service methods.
+ struct ServiceTraitGenerator;
+
+ impl ServiceGenerator for ServiceTraitGenerator {
+ fn generate(&mut self, service: Service, buf: &mut String) {
+ // Generate a trait for the service.
+ service.comments.append_with_indent(0, buf);
+ buf.push_str(&format!("trait {} {{\n", &service.name));
+
+ // Generate the service methods.
+ for method in service.methods {
+ method.comments.append_with_indent(1, buf);
+ buf.push_str(&format!(
+ " fn {}(_: {}) -> {};\n",
+ method.name, method.input_type, method.output_type
+ ));
+ }
+
+ // Close out the trait.
+ buf.push_str("}\n");
+ }
+ fn finalize(&mut self, buf: &mut String) {
+ // Needs to be present only once, no matter how many services there are
+ buf.push_str("pub mod utils { }\n");
+ }
+ }
+
+ /// Implements `ServiceGenerator` and provides some state for assertions.
+ struct MockServiceGenerator {
+ state: Rc<RefCell<MockState>>,
+ }
+
+ /// Holds state for `MockServiceGenerator`
+ #[derive(Default)]
+ struct MockState {
+ service_names: Vec<String>,
+ package_names: Vec<String>,
+ finalized: u32,
+ }
+
+ impl MockServiceGenerator {
+ fn new(state: Rc<RefCell<MockState>>) -> Self {
+ Self { state }
+ }
+ }
+
+ impl ServiceGenerator for MockServiceGenerator {
+ fn generate(&mut self, service: Service, _buf: &mut String) {
+ let mut state = self.state.borrow_mut();
+ state.service_names.push(service.name);
+ }
+
+ fn finalize(&mut self, _buf: &mut String) {
+ let mut state = self.state.borrow_mut();
+ state.finalized += 1;
+ }
+
+ fn finalize_package(&mut self, package: &str, _buf: &mut String) {
+ let mut state = self.state.borrow_mut();
+ state.package_names.push(package.to_string());
+ }
+ }
+
+ #[test]
+ fn smoke_test() {
+ let _ = env_logger::try_init();
+ let tempdir = tempfile::tempdir().unwrap();
+
+ Config::new()
+ .service_generator(Box::new(ServiceTraitGenerator))
+ .out_dir(tempdir.path())
+ .compile_protos(&["src/fixtures/smoke_test/smoke_test.proto"], &["src"])
+ .unwrap();
+ }
+
+ #[test]
+ fn finalize_package() {
+ let _ = env_logger::try_init();
+ let tempdir = tempfile::tempdir().unwrap();
+
+ let state = Rc::new(RefCell::new(MockState::default()));
+ let gen = MockServiceGenerator::new(Rc::clone(&state));
+
+ Config::new()
+ .service_generator(Box::new(gen))
+ .include_file("_protos.rs")
+ .out_dir(tempdir.path())
+ .compile_protos(
+ &[
+ "src/fixtures/helloworld/hello.proto",
+ "src/fixtures/helloworld/goodbye.proto",
+ ],
+ &["src/fixtures/helloworld"],
+ )
+ .unwrap();
+
+ let state = state.borrow();
+ assert_eq!(&state.service_names, &["Greeting", "Farewell"]);
+ assert_eq!(&state.package_names, &["helloworld"]);
+ assert_eq!(state.finalized, 3);
+ }
+
+ #[test]
+ fn test_generate_message_attributes() {
+ let _ = env_logger::try_init();
+ let tempdir = tempfile::tempdir().unwrap();
+
+ Config::new()
+ .out_dir(tempdir.path())
+ .message_attribute(".", "#[derive(derive_builder::Builder)]")
+ .enum_attribute(".", "#[some_enum_attr(u8)]")
+ .compile_protos(
+ &["src/fixtures/helloworld/hello.proto"],
+ &["src/fixtures/helloworld"],
+ )
+ .unwrap();
+
+ let out_file = tempdir.path().join("helloworld.rs");
+ #[cfg(feature = "format")]
+ let expected_content =
+ read_all_content("src/fixtures/helloworld/_expected_helloworld_formatted.rs")
+ .replace("\r\n", "\n");
+ #[cfg(not(feature = "format"))]
+ let expected_content = read_all_content("src/fixtures/helloworld/_expected_helloworld.rs")
+ .replace("\r\n", "\n");
+ let content = read_all_content(out_file).replace("\r\n", "\n");
+ assert_eq!(
+ expected_content, content,
+ "Unexpected content: \n{}",
+ content
+ );
+ }
+
+ #[test]
+ fn test_generate_no_empty_outputs() {
+ let _ = env_logger::try_init();
+ let state = Rc::new(RefCell::new(MockState::default()));
+ let gen = MockServiceGenerator::new(Rc::clone(&state));
+ let include_file = "_include.rs";
+ let tempdir = tempfile::tempdir().unwrap();
+ let previously_empty_proto_path = tempdir.path().join(Path::new("google.protobuf.rs"));
+
+ Config::new()
+ .service_generator(Box::new(gen))
+ .include_file(include_file)
+ .out_dir(tempdir.path())
+ .compile_protos(
+ &["src/fixtures/imports_empty/imports_empty.proto"],
+ &["src/fixtures/imports_empty"],
+ )
+ .unwrap();
+
+ // Prior to PR introducing this test, the generated include file would have the file
+ // google.protobuf.rs which was an empty file. Now that file should only exist if it has content
+ if let Ok(mut f) = File::open(previously_empty_proto_path) {
+ // Since this file was generated, it should not be empty.
+ let mut contents = String::new();
+ f.read_to_string(&mut contents).unwrap();
+ assert!(!contents.is_empty());
+ } else {
+ // The file wasn't generated so the result include file should not reference it
+ let expected = read_all_content("src/fixtures/imports_empty/_expected_include.rs");
+ let actual = read_all_content(tempdir.path().join(Path::new(include_file)));
+ // Normalizes windows and Linux-style EOL
+ let expected = expected.replace("\r\n", "\n");
+ let actual = actual.replace("\r\n", "\n");
+ assert_eq!(expected, actual);
+ }
+ }
+
+ #[test]
+ fn test_generate_field_attributes() {
+ let _ = env_logger::try_init();
+ let tempdir = tempfile::tempdir().unwrap();
+
+ Config::new()
+ .out_dir(tempdir.path())
+ .boxed("Container.data.foo")
+ .boxed("Bar.qux")
+ .compile_protos(
+ &["src/fixtures/field_attributes/field_attributes.proto"],
+ &["src/fixtures/field_attributes"],
+ )
+ .unwrap();
+
+ let out_file = tempdir.path().join("field_attributes.rs");
+
+ let content = read_all_content(out_file).replace("\r\n", "\n");
+
+ #[cfg(feature = "format")]
+ let expected_content = read_all_content(
+ "src/fixtures/field_attributes/_expected_field_attributes_formatted.rs",
+ )
+ .replace("\r\n", "\n");
+ #[cfg(not(feature = "format"))]
+ let expected_content =
+ read_all_content("src/fixtures/field_attributes/_expected_field_attributes.rs")
+ .replace("\r\n", "\n");
+
+ assert_eq!(
+ expected_content, content,
+ "Unexpected content: \n{}",
+ content
+ );
+ }
+
+ #[test]
+ fn deterministic_include_file() {
+ let _ = env_logger::try_init();
+
+ for _ in 1..10 {
+ let state = Rc::new(RefCell::new(MockState::default()));
+ let gen = MockServiceGenerator::new(Rc::clone(&state));
+ let include_file = "_include.rs";
+ let tempdir = tempfile::tempdir().unwrap();
+
+ Config::new()
+ .service_generator(Box::new(gen))
+ .include_file(include_file)
+ .out_dir(tempdir.path())
+ .compile_protos(
+ &[
+ "src/fixtures/alphabet/a.proto",
+ "src/fixtures/alphabet/b.proto",
+ "src/fixtures/alphabet/c.proto",
+ "src/fixtures/alphabet/d.proto",
+ "src/fixtures/alphabet/e.proto",
+ "src/fixtures/alphabet/f.proto",
+ ],
+ &["src/fixtures/alphabet"],
+ )
+ .unwrap();
+
+ let expected = read_all_content("src/fixtures/alphabet/_expected_include.rs");
+ let actual = read_all_content(tempdir.path().join(Path::new(include_file)));
+ // Normalizes windows and Linux-style EOL
+ let expected = expected.replace("\r\n", "\n");
+ let actual = actual.replace("\r\n", "\n");
+
+ assert_eq!(expected, actual);
+ }
+ }
+
+ fn read_all_content(filepath: impl AsRef<Path>) -> String {
+ let mut f = File::open(filepath).unwrap();
+ let mut content = String::new();
+ f.read_to_string(&mut content).unwrap();
+ content
+ }
+
+ #[test]
+ fn write_includes() {
+ let modules = [
+ Module::from_protobuf_package_name("foo.bar.baz"),
+ Module::from_protobuf_package_name(""),
+ Module::from_protobuf_package_name("foo.bar"),
+ Module::from_protobuf_package_name("bar"),
+ Module::from_protobuf_package_name("foo"),
+ Module::from_protobuf_package_name("foo.bar.qux"),
+ Module::from_protobuf_package_name("foo.bar.a.b.c"),
+ ];
+
+ let file_names = modules
+ .iter()
+ .map(|m| (m.clone(), m.to_file_name_or("_.default")))
+ .collect();
+
+ let mut buf = Vec::new();
+ Config::new()
+ .default_package_filename("_.default")
+ .write_includes(modules.iter().collect(), &mut buf, None, &file_names)
+ .unwrap();
+ let expected =
+ read_all_content("src/fixtures/write_includes/_.includes.rs").replace("\r\n", "\n");
+ let actual = String::from_utf8(buf).unwrap();
+ assert_eq!(expected, actual);
+ }
+}
diff --git a/vendor/prost-build/src/message_graph.rs b/vendor/prost-build/src/message_graph.rs
new file mode 100644
index 00000000..ac0ad152
--- /dev/null
+++ b/vendor/prost-build/src/message_graph.rs
@@ -0,0 +1,87 @@
+use std::collections::HashMap;
+
+use petgraph::algo::has_path_connecting;
+use petgraph::graph::NodeIndex;
+use petgraph::Graph;
+
+use prost_types::{field_descriptor_proto, DescriptorProto, FileDescriptorProto};
+
+/// `MessageGraph` builds a graph of messages whose edges correspond to nesting.
+/// The goal is to recognize when message types are recursively nested, so
+/// that fields can be boxed when necessary.
+pub struct MessageGraph {
+ index: HashMap<String, NodeIndex>,
+ graph: Graph<String, ()>,
+}
+
+impl MessageGraph {
+ pub fn new<'a>(
+ files: impl Iterator<Item = &'a FileDescriptorProto>,
+ ) -> Result<MessageGraph, String> {
+ let mut msg_graph = MessageGraph {
+ index: HashMap::new(),
+ graph: Graph::new(),
+ };
+
+ for file in files {
+ let package = format!(
+ "{}{}",
+ if file.package.is_some() { "." } else { "" },
+ file.package.as_ref().map(String::as_str).unwrap_or("")
+ );
+ for msg in &file.message_type {
+ msg_graph.add_message(&package, msg);
+ }
+ }
+
+ Ok(msg_graph)
+ }
+
+ fn get_or_insert_index(&mut self, msg_name: String) -> NodeIndex {
+ let MessageGraph {
+ ref mut index,
+ ref mut graph,
+ } = *self;
+ assert_eq!(b'.', msg_name.as_bytes()[0]);
+ *index
+ .entry(msg_name.clone())
+ .or_insert_with(|| graph.add_node(msg_name))
+ }
+
+ /// Adds message to graph IFF it contains a non-repeated field containing another message.
+ /// The purpose of the message graph is detecting recursively nested messages and co-recursively nested messages.
+ /// Because prost does not box message fields, recursively nested messages would not compile in Rust.
+ /// To allow recursive messages, the message graph is used to detect recursion and automatically box the recursive field.
+ /// Since repeated messages are already put in a Vec, boxing them isn’t necessary even if the reference is recursive.
+ fn add_message(&mut self, package: &str, msg: &DescriptorProto) {
+ let msg_name = format!("{}.{}", package, msg.name.as_ref().unwrap());
+ let msg_index = self.get_or_insert_index(msg_name.clone());
+
+ for field in &msg.field {
+ if field.r#type() == field_descriptor_proto::Type::Message
+ && field.label() != field_descriptor_proto::Label::Repeated
+ {
+ let field_index = self.get_or_insert_index(field.type_name.clone().unwrap());
+ self.graph.add_edge(msg_index, field_index, ());
+ }
+ }
+
+ for msg in &msg.nested_type {
+ self.add_message(&msg_name, msg);
+ }
+ }
+
+ /// Returns true if message type `inner` is nested in message type `outer`.
+ pub fn is_nested(&self, outer: &str, inner: &str) -> bool {
+ let outer = match self.index.get(outer) {
+ Some(outer) => *outer,
+ None => return false,
+ };
+ let inner = match self.index.get(inner) {
+ Some(inner) => *inner,
+ None => return false,
+ };
+
+ has_path_connecting(&self.graph, outer, inner, None)
+ }
+}
diff --git a/vendor/prost-build/src/module.rs b/vendor/prost-build/src/module.rs
new file mode 100644
index 00000000..02715c16
--- /dev/null
+++ b/vendor/prost-build/src/module.rs
@@ -0,0 +1,93 @@
+use std::fmt;
+
+use crate::ident::to_snake;
+
+/// A Rust module path for a Protobuf package.
+#[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub struct Module {
+ components: Vec<String>,
+}
+
+impl Module {
+ /// Construct a module path from an iterator of parts.
+ pub fn from_parts<I>(parts: I) -> Self
+ where
+ I: IntoIterator,
+ I::Item: Into<String>,
+ {
+ Self {
+ components: parts.into_iter().map(|s| s.into()).collect(),
+ }
+ }
+
+ /// Construct a module path from a Protobuf package name.
+ ///
+ /// Constituent parts are automatically converted to snake case in order to follow
+ /// Rust module naming conventions.
+ pub fn from_protobuf_package_name(name: &str) -> Self {
+ Self {
+ components: name
+ .split('.')
+ .filter(|s| !s.is_empty())
+ .map(to_snake)
+ .collect(),
+ }
+ }
+
+ /// An iterator over the parts of the path.
+ pub fn parts(&self) -> impl Iterator<Item = &str> {
+ self.components.iter().map(|s| s.as_str())
+ }
+
+ #[must_use]
+ #[inline(always)]
+ pub(crate) fn starts_with(&self, needle: &[String]) -> bool
+ where
+ String: PartialEq,
+ {
+ self.components.starts_with(needle)
+ }
+
+ /// Format the module path into a filename for generated Rust code.
+ ///
+ /// If the module path is empty, `default` is used to provide the root of the filename.
+ pub fn to_file_name_or(&self, default: &str) -> String {
+ let mut root = if self.components.is_empty() {
+ default.to_owned()
+ } else {
+ self.components.join(".")
+ };
+
+ root.push_str(".rs");
+
+ root
+ }
+
+ /// The number of parts in the module's path.
+ pub fn len(&self) -> usize {
+ self.components.len()
+ }
+
+ /// Whether the module's path contains any components.
+ pub fn is_empty(&self) -> bool {
+ self.components.is_empty()
+ }
+
+ pub(crate) fn part(&self, idx: usize) -> &str {
+ self.components[idx].as_str()
+ }
+}
+
+impl fmt::Display for Module {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let mut parts = self.parts();
+ if let Some(first) = parts.next() {
+ f.write_str(first)?;
+ }
+ for part in parts {
+ f.write_str("::")?;
+ f.write_str(part)?;
+ }
+ Ok(())
+ }
+}
diff --git a/vendor/prost-build/src/path.rs b/vendor/prost-build/src/path.rs
new file mode 100644
index 00000000..f6897005
--- /dev/null
+++ b/vendor/prost-build/src/path.rs
@@ -0,0 +1,246 @@
+//! Utilities for working with Protobuf paths.
+
+use std::iter;
+
+/// Maps a fully-qualified Protobuf path to a value using path matchers.
+#[derive(Debug, Default)]
+pub(crate) struct PathMap<T> {
+ // insertion order might actually matter (to avoid warning about legacy-derive-helpers)
+ // see: https://doc.rust-lang.org/rustc/lints/listing/warn-by-default.html#legacy-derive-helpers
+ pub(crate) matchers: Vec<(String, T)>,
+}
+
+impl<T> PathMap<T> {
+ /// Inserts a new matcher and associated value to the path map.
+ pub(crate) fn insert(&mut self, matcher: String, value: T) {
+ self.matchers.push((matcher, value));
+ }
+
+ /// Returns a iterator over all the value matching the given fd_path and associated suffix/prefix path
+ pub(crate) fn get(&self, fq_path: &str) -> Iter<'_, T> {
+ Iter::new(self, fq_path.to_string())
+ }
+
+ /// Returns a iterator over all the value matching the path `fq_path.field` and associated suffix/prefix path
+ pub(crate) fn get_field(&self, fq_path: &str, field: &str) -> Iter<'_, T> {
+ Iter::new(self, format!("{}.{}", fq_path, field))
+ }
+
+ /// Returns the first value found matching the given path
+ /// If nothing matches the path, suffix paths will be tried, then prefix paths, then the global path
+ #[allow(unused)]
+ pub(crate) fn get_first<'a>(&'a self, fq_path: &'_ str) -> Option<&'a T> {
+ self.find_best_matching(fq_path)
+ }
+
+ /// Returns the first value found matching the path `fq_path.field`
+ /// If nothing matches the path, suffix paths will be tried, then prefix paths, then the global path
+ pub(crate) fn get_first_field<'a>(&'a self, fq_path: &'_ str, field: &'_ str) -> Option<&'a T> {
+ self.find_best_matching(&format!("{}.{}", fq_path, field))
+ }
+
+ /// Removes all matchers from the path map.
+ pub(crate) fn clear(&mut self) {
+ self.matchers.clear();
+ }
+
+ /// Returns the first value found best matching the path
+ /// See [sub_path_iter()] for paths test order
+ fn find_best_matching(&self, full_path: &str) -> Option<&T> {
+ sub_path_iter(full_path).find_map(|path| {
+ self.matchers
+ .iter()
+ .find(|(p, _)| p == path)
+ .map(|(_, v)| v)
+ })
+ }
+}
+
+/// Iterator inside a PathMap that only returns values that matches a given path
+pub(crate) struct Iter<'a, T> {
+ iter: std::slice::Iter<'a, (String, T)>,
+ path: String,
+}
+
+impl<'a, T> Iter<'a, T> {
+ fn new(map: &'a PathMap<T>, path: String) -> Self {
+ Self {
+ iter: map.matchers.iter(),
+ path,
+ }
+ }
+
+ fn is_match(&self, path: &str) -> bool {
+ sub_path_iter(self.path.as_str()).any(|p| p == path)
+ }
+}
+
+impl<'a, T> std::iter::Iterator for Iter<'a, T> {
+ type Item = &'a T;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ loop {
+ match self.iter.next() {
+ Some((p, v)) => {
+ if self.is_match(p) {
+ return Some(v);
+ }
+ }
+ None => return None,
+ }
+ }
+ }
+}
+
+impl<'a, T> std::iter::FusedIterator for Iter<'a, T> {}
+
+/// Given a fully-qualified path, returns a sequence of paths:
+/// - the path itself
+/// - the sequence of suffix paths
+/// - the sequence of prefix paths
+/// - the global path
+///
+/// Example: sub_path_iter(".a.b.c") -> [".a.b.c", "a.b.c", "b.c", "c", ".a.b", ".a", "."]
+fn sub_path_iter(full_path: &str) -> impl Iterator<Item = &str> {
+ // First, try matching the path.
+ iter::once(full_path)
+ // Then, try matching path suffixes.
+ .chain(suffixes(full_path))
+ // Then, try matching path prefixes.
+ .chain(prefixes(full_path))
+ // Then, match the global path.
+ .chain(iter::once("."))
+}
+
+/// Given a fully-qualified path, returns a sequence of fully-qualified paths which match a prefix
+/// of the input path, in decreasing path-length order.
+///
+/// Example: prefixes(".a.b.c.d") -> [".a.b.c", ".a.b", ".a"]
+fn prefixes(fq_path: &str) -> impl Iterator<Item = &str> {
+ std::iter::successors(Some(fq_path), |path| {
+ #[allow(unknown_lints, clippy::manual_split_once)]
+ path.rsplitn(2, '.').nth(1).filter(|path| !path.is_empty())
+ })
+ .skip(1)
+}
+
+/// Given a fully-qualified path, returns a sequence of paths which match the suffix of the input
+/// path, in decreasing path-length order.
+///
+/// Example: suffixes(".a.b.c.d") -> ["a.b.c.d", "b.c.d", "c.d", "d"]
+fn suffixes(fq_path: &str) -> impl Iterator<Item = &str> {
+ std::iter::successors(Some(fq_path), |path| {
+ #[allow(unknown_lints, clippy::manual_split_once)]
+ path.splitn(2, '.').nth(1).filter(|path| !path.is_empty())
+ })
+ .skip(1)
+}
+
+#[cfg(test)]
+mod tests {
+
+ use super::*;
+
+ #[test]
+ fn test_prefixes() {
+ assert_eq!(
+ prefixes(".a.b.c.d").collect::<Vec<_>>(),
+ vec![".a.b.c", ".a.b", ".a"],
+ );
+ assert_eq!(prefixes(".a").count(), 0);
+ assert_eq!(prefixes(".").count(), 0);
+ }
+
+ #[test]
+ fn test_suffixes() {
+ assert_eq!(
+ suffixes(".a.b.c.d").collect::<Vec<_>>(),
+ vec!["a.b.c.d", "b.c.d", "c.d", "d"],
+ );
+ assert_eq!(suffixes(".a").collect::<Vec<_>>(), vec!["a"]);
+ assert_eq!(suffixes(".").collect::<Vec<_>>(), Vec::<&str>::new());
+ }
+
+ #[test]
+ fn test_get_matches_sub_path() {
+ let mut path_map = PathMap::default();
+
+ // full path
+ path_map.insert(".a.b.c.d".to_owned(), 1);
+ assert_eq!(Some(&1), path_map.get(".a.b.c.d").next());
+ assert_eq!(Some(&1), path_map.get_field(".a.b.c", "d").next());
+
+ // suffix
+ path_map.clear();
+ path_map.insert("c.d".to_owned(), 1);
+ assert_eq!(Some(&1), path_map.get(".a.b.c.d").next());
+ assert_eq!(Some(&1), path_map.get("b.c.d").next());
+ assert_eq!(Some(&1), path_map.get_field(".a.b.c", "d").next());
+
+ // prefix
+ path_map.clear();
+ path_map.insert(".a.b".to_owned(), 1);
+ assert_eq!(Some(&1), path_map.get(".a.b.c.d").next());
+ assert_eq!(Some(&1), path_map.get_field(".a.b.c", "d").next());
+
+ // global
+ path_map.clear();
+ path_map.insert(".".to_owned(), 1);
+ assert_eq!(Some(&1), path_map.get(".a.b.c.d").next());
+ assert_eq!(Some(&1), path_map.get("b.c.d").next());
+ assert_eq!(Some(&1), path_map.get_field(".a.b.c", "d").next());
+ }
+
+ #[test]
+ fn test_get_best() {
+ let mut path_map = PathMap::default();
+
+ // worst is global
+ path_map.insert(".".to_owned(), 1);
+ assert_eq!(Some(&1), path_map.get_first(".a.b.c.d"));
+ assert_eq!(Some(&1), path_map.get_first("b.c.d"));
+ assert_eq!(Some(&1), path_map.get_first_field(".a.b.c", "d"));
+
+ // then prefix
+ path_map.insert(".a.b".to_owned(), 2);
+ assert_eq!(Some(&2), path_map.get_first(".a.b.c.d"));
+ assert_eq!(Some(&2), path_map.get_first_field(".a.b.c", "d"));
+
+ // then suffix
+ path_map.insert("c.d".to_owned(), 3);
+ assert_eq!(Some(&3), path_map.get_first(".a.b.c.d"));
+ assert_eq!(Some(&3), path_map.get_first("b.c.d"));
+ assert_eq!(Some(&3), path_map.get_first_field(".a.b.c", "d"));
+
+ // best is full path
+ path_map.insert(".a.b.c.d".to_owned(), 4);
+ assert_eq!(Some(&4), path_map.get_first(".a.b.c.d"));
+ assert_eq!(Some(&4), path_map.get_first_field(".a.b.c", "d"));
+ }
+
+ #[test]
+ fn test_get_keep_order() {
+ let mut path_map = PathMap::default();
+ path_map.insert(".".to_owned(), 1);
+ path_map.insert(".a.b".to_owned(), 2);
+ path_map.insert(".a.b.c.d".to_owned(), 3);
+
+ let mut iter = path_map.get(".a.b.c.d");
+ assert_eq!(Some(&1), iter.next());
+ assert_eq!(Some(&2), iter.next());
+ assert_eq!(Some(&3), iter.next());
+ assert_eq!(None, iter.next());
+
+ path_map.clear();
+
+ path_map.insert(".a.b.c.d".to_owned(), 1);
+ path_map.insert(".a.b".to_owned(), 2);
+ path_map.insert(".".to_owned(), 3);
+
+ let mut iter = path_map.get(".a.b.c.d");
+ assert_eq!(Some(&1), iter.next());
+ assert_eq!(Some(&2), iter.next());
+ assert_eq!(Some(&3), iter.next());
+ assert_eq!(None, iter.next());
+ }
+}